d                 145 arch/alpha/include/asm/core_lca.h #define LCA_WRITE_PMR(d)    (*((volatile unsigned long *)LCA_PMR_ADDR) = (d))
d                  42 arch/alpha/include/asm/vga.h extern void scr_memcpyw(u16 *d, const u16 *s, unsigned int count);
d                  25 arch/alpha/kernel/err_impl.h #define SUBPACKET_ANNOTATION(c, t, r, d, a) {NULL, (c), (t), (r), (d), (a)}
d                 591 arch/alpha/kernel/io.c scr_memcpyw(u16 *d, const u16 *s, unsigned int count)
d                 594 arch/alpha/kernel/io.c 	u16 __iomem *iod = (u16 __iomem *) d;
d                 596 arch/alpha/kernel/io.c 	int d_isio = __is_ioaddr(d);
d                 610 arch/alpha/kernel/io.c 			memcpy_fromio(d, ios, count);
d                 615 arch/alpha/kernel/io.c 			memcpy(d, s, count);
d                  37 arch/alpha/kernel/irq_i8259.c i8259a_enable_irq(struct irq_data *d)
d                  40 arch/alpha/kernel/irq_i8259.c 	i8259_update_irq_hw(d->irq, cached_irq_mask &= ~(1 << d->irq));
d                  51 arch/alpha/kernel/irq_i8259.c i8259a_disable_irq(struct irq_data *d)
d                  54 arch/alpha/kernel/irq_i8259.c 	__i8259a_disable_irq(d->irq);
d                  59 arch/alpha/kernel/irq_i8259.c i8259a_mask_and_ack_irq(struct irq_data *d)
d                  61 arch/alpha/kernel/irq_i8259.c 	unsigned int irq = d->irq;
d                  35 arch/alpha/kernel/irq_impl.h extern void i8259a_enable_irq(struct irq_data *d);
d                  36 arch/alpha/kernel/irq_impl.h extern void i8259a_disable_irq(struct irq_data *d);
d                  37 arch/alpha/kernel/irq_impl.h extern void i8259a_mask_and_ack_irq(struct irq_data *d);
d                  33 arch/alpha/kernel/irq_pyxis.c pyxis_enable_irq(struct irq_data *d)
d                  35 arch/alpha/kernel/irq_pyxis.c 	pyxis_update_irq_hw(cached_irq_mask |= 1UL << (d->irq - 16));
d                  39 arch/alpha/kernel/irq_pyxis.c pyxis_disable_irq(struct irq_data *d)
d                  41 arch/alpha/kernel/irq_pyxis.c 	pyxis_update_irq_hw(cached_irq_mask &= ~(1UL << (d->irq - 16)));
d                  45 arch/alpha/kernel/irq_pyxis.c pyxis_mask_and_ack_irq(struct irq_data *d)
d                  47 arch/alpha/kernel/irq_pyxis.c 	unsigned long bit = 1UL << (d->irq - 16);
d                  22 arch/alpha/kernel/irq_srm.c srm_enable_irq(struct irq_data *d)
d                  25 arch/alpha/kernel/irq_srm.c 	cserve_ena(d->irq - 16);
d                  30 arch/alpha/kernel/irq_srm.c srm_disable_irq(struct irq_data *d)
d                  33 arch/alpha/kernel/irq_srm.c 	cserve_dis(d->irq - 16);
d                  47 arch/alpha/kernel/sys_alcor.c alcor_enable_irq(struct irq_data *d)
d                  49 arch/alpha/kernel/sys_alcor.c 	alcor_update_irq_hw(cached_irq_mask |= 1UL << (d->irq - 16));
d                  53 arch/alpha/kernel/sys_alcor.c alcor_disable_irq(struct irq_data *d)
d                  55 arch/alpha/kernel/sys_alcor.c 	alcor_update_irq_hw(cached_irq_mask &= ~(1UL << (d->irq - 16)));
d                  59 arch/alpha/kernel/sys_alcor.c alcor_mask_and_ack_irq(struct irq_data *d)
d                  61 arch/alpha/kernel/sys_alcor.c 	alcor_disable_irq(d);
d                  64 arch/alpha/kernel/sys_alcor.c 	*(vuip)GRU_INT_CLEAR = 1 << (d->irq - 16); mb();
d                  69 arch/alpha/kernel/sys_alcor.c alcor_isa_mask_and_ack_irq(struct irq_data *d)
d                  71 arch/alpha/kernel/sys_alcor.c 	i8259a_mask_and_ack_irq(d);
d                  49 arch/alpha/kernel/sys_cabriolet.c cabriolet_enable_irq(struct irq_data *d)
d                  51 arch/alpha/kernel/sys_cabriolet.c 	cabriolet_update_irq_hw(d->irq, cached_irq_mask &= ~(1UL << d->irq));
d                  55 arch/alpha/kernel/sys_cabriolet.c cabriolet_disable_irq(struct irq_data *d)
d                  57 arch/alpha/kernel/sys_cabriolet.c 	cabriolet_update_irq_hw(d->irq, cached_irq_mask |= 1UL << d->irq);
d                 101 arch/alpha/kernel/sys_dp264.c dp264_enable_irq(struct irq_data *d)
d                 104 arch/alpha/kernel/sys_dp264.c 	cached_irq_mask |= 1UL << d->irq;
d                 110 arch/alpha/kernel/sys_dp264.c dp264_disable_irq(struct irq_data *d)
d                 113 arch/alpha/kernel/sys_dp264.c 	cached_irq_mask &= ~(1UL << d->irq);
d                 119 arch/alpha/kernel/sys_dp264.c clipper_enable_irq(struct irq_data *d)
d                 122 arch/alpha/kernel/sys_dp264.c 	cached_irq_mask |= 1UL << (d->irq - 16);
d                 128 arch/alpha/kernel/sys_dp264.c clipper_disable_irq(struct irq_data *d)
d                 131 arch/alpha/kernel/sys_dp264.c 	cached_irq_mask &= ~(1UL << (d->irq - 16));
d                 152 arch/alpha/kernel/sys_dp264.c dp264_set_affinity(struct irq_data *d, const struct cpumask *affinity,
d                 156 arch/alpha/kernel/sys_dp264.c 	cpu_set_irq_affinity(d->irq, *affinity);
d                 164 arch/alpha/kernel/sys_dp264.c clipper_set_affinity(struct irq_data *d, const struct cpumask *affinity,
d                 168 arch/alpha/kernel/sys_dp264.c 	cpu_set_irq_affinity(d->irq - 16, *affinity);
d                  47 arch/alpha/kernel/sys_eb64p.c eb64p_enable_irq(struct irq_data *d)
d                  49 arch/alpha/kernel/sys_eb64p.c 	eb64p_update_irq_hw(d->irq, cached_irq_mask &= ~(1 << d->irq));
d                  53 arch/alpha/kernel/sys_eb64p.c eb64p_disable_irq(struct irq_data *d)
d                  55 arch/alpha/kernel/sys_eb64p.c 	eb64p_update_irq_hw(d->irq, cached_irq_mask |= 1 << d->irq);
d                  53 arch/alpha/kernel/sys_eiger.c eiger_enable_irq(struct irq_data *d)
d                  55 arch/alpha/kernel/sys_eiger.c 	unsigned int irq = d->irq;
d                  62 arch/alpha/kernel/sys_eiger.c eiger_disable_irq(struct irq_data *d)
d                  64 arch/alpha/kernel/sys_eiger.c 	unsigned int irq = d->irq;
d                  66 arch/alpha/kernel/sys_jensen.c jensen_local_enable(struct irq_data *d)
d                  69 arch/alpha/kernel/sys_jensen.c 	if (d->irq == 7)
d                  70 arch/alpha/kernel/sys_jensen.c 		i8259a_enable_irq(d);
d                  74 arch/alpha/kernel/sys_jensen.c jensen_local_disable(struct irq_data *d)
d                  77 arch/alpha/kernel/sys_jensen.c 	if (d->irq == 7)
d                  78 arch/alpha/kernel/sys_jensen.c 		i8259a_disable_irq(d);
d                  82 arch/alpha/kernel/sys_jensen.c jensen_local_mask_ack(struct irq_data *d)
d                  85 arch/alpha/kernel/sys_jensen.c 	if (d->irq == 7)
d                  86 arch/alpha/kernel/sys_jensen.c 		i8259a_mask_and_ack_irq(d);
d                 106 arch/alpha/kernel/sys_marvel.c io7_enable_irq(struct irq_data *d)
d                 109 arch/alpha/kernel/sys_marvel.c 	unsigned int irq = d->irq;
d                 127 arch/alpha/kernel/sys_marvel.c io7_disable_irq(struct irq_data *d)
d                 130 arch/alpha/kernel/sys_marvel.c 	unsigned int irq = d->irq;
d                 148 arch/alpha/kernel/sys_marvel.c marvel_irq_noop(struct irq_data *d)
d                  47 arch/alpha/kernel/sys_mikasa.c mikasa_enable_irq(struct irq_data *d)
d                  49 arch/alpha/kernel/sys_mikasa.c 	mikasa_update_irq_hw(cached_irq_mask |= 1 << (d->irq - 16));
d                  53 arch/alpha/kernel/sys_mikasa.c mikasa_disable_irq(struct irq_data *d)
d                  55 arch/alpha/kernel/sys_mikasa.c 	mikasa_update_irq_hw(cached_irq_mask &= ~(1 << (d->irq - 16)));
d                  52 arch/alpha/kernel/sys_noritake.c noritake_enable_irq(struct irq_data *d)
d                  54 arch/alpha/kernel/sys_noritake.c 	noritake_update_irq_hw(d->irq, cached_irq_mask |= 1 << (d->irq - 16));
d                  58 arch/alpha/kernel/sys_noritake.c noritake_disable_irq(struct irq_data *d)
d                  60 arch/alpha/kernel/sys_noritake.c 	noritake_update_irq_hw(d->irq, cached_irq_mask &= ~(1 << (d->irq - 16)));
d                  59 arch/alpha/kernel/sys_rawhide.c rawhide_enable_irq(struct irq_data *d)
d                  62 arch/alpha/kernel/sys_rawhide.c 	unsigned int irq = d->irq;
d                  80 arch/alpha/kernel/sys_rawhide.c rawhide_disable_irq(struct irq_data *d)
d                  83 arch/alpha/kernel/sys_rawhide.c 	unsigned int irq = d->irq;
d                 101 arch/alpha/kernel/sys_rawhide.c rawhide_mask_and_ack_irq(struct irq_data *d)
d                 104 arch/alpha/kernel/sys_rawhide.c 	unsigned int irq = d->irq;
d                  50 arch/alpha/kernel/sys_rx164.c rx164_enable_irq(struct irq_data *d)
d                  52 arch/alpha/kernel/sys_rx164.c 	rx164_update_irq_hw(cached_irq_mask |= 1UL << (d->irq - 16));
d                  56 arch/alpha/kernel/sys_rx164.c rx164_disable_irq(struct irq_data *d)
d                  58 arch/alpha/kernel/sys_rx164.c 	rx164_update_irq_hw(cached_irq_mask &= ~(1UL << (d->irq - 16)));
d                 446 arch/alpha/kernel/sys_sable.c sable_lynx_enable_irq(struct irq_data *d)
d                 450 arch/alpha/kernel/sys_sable.c 	bit = sable_lynx_irq_swizzle->irq_to_mask[d->irq];
d                 462 arch/alpha/kernel/sys_sable.c sable_lynx_disable_irq(struct irq_data *d)
d                 466 arch/alpha/kernel/sys_sable.c 	bit = sable_lynx_irq_swizzle->irq_to_mask[d->irq];
d                 478 arch/alpha/kernel/sys_sable.c sable_lynx_mask_and_ack_irq(struct irq_data *d)
d                 482 arch/alpha/kernel/sys_sable.c 	bit = sable_lynx_irq_swizzle->irq_to_mask[d->irq];
d                  48 arch/alpha/kernel/sys_takara.c takara_enable_irq(struct irq_data *d)
d                  50 arch/alpha/kernel/sys_takara.c 	unsigned int irq = d->irq;
d                  57 arch/alpha/kernel/sys_takara.c takara_disable_irq(struct irq_data *d)
d                  59 arch/alpha/kernel/sys_takara.c 	unsigned int irq = d->irq;
d                 116 arch/alpha/kernel/sys_titan.c titan_enable_irq(struct irq_data *d)
d                 118 arch/alpha/kernel/sys_titan.c 	unsigned int irq = d->irq;
d                 126 arch/alpha/kernel/sys_titan.c titan_disable_irq(struct irq_data *d)
d                 128 arch/alpha/kernel/sys_titan.c 	unsigned int irq = d->irq;
d                 150 arch/alpha/kernel/sys_titan.c titan_set_irq_affinity(struct irq_data *d, const struct cpumask *affinity,
d                 153 arch/alpha/kernel/sys_titan.c 	unsigned int irq = d->irq;
d                 107 arch/alpha/kernel/sys_wildfire.c wildfire_enable_irq(struct irq_data *d)
d                 109 arch/alpha/kernel/sys_wildfire.c 	unsigned int irq = d->irq;
d                 112 arch/alpha/kernel/sys_wildfire.c 		i8259a_enable_irq(d);
d                 121 arch/alpha/kernel/sys_wildfire.c wildfire_disable_irq(struct irq_data *d)
d                 123 arch/alpha/kernel/sys_wildfire.c 	unsigned int irq = d->irq;
d                 126 arch/alpha/kernel/sys_wildfire.c 		i8259a_disable_irq(d);
d                 135 arch/alpha/kernel/sys_wildfire.c wildfire_mask_and_ack_irq(struct irq_data *d)
d                 137 arch/alpha/kernel/sys_wildfire.c 	unsigned int irq = d->irq;
d                 140 arch/alpha/kernel/sys_wildfire.c 		i8259a_mask_and_ack_irq(d);
d                  26 arch/alpha/lib/memcpy.c #define ALIGN_DEST_TO8_UP(d,s,n) \
d                  27 arch/alpha/lib/memcpy.c 	while (d & 7) { \
d                  30 arch/alpha/lib/memcpy.c 		*(char *) d = *(char *) s; \
d                  31 arch/alpha/lib/memcpy.c 		d++; s++; \
d                  33 arch/alpha/lib/memcpy.c #define ALIGN_DEST_TO8_DN(d,s,n) \
d                  34 arch/alpha/lib/memcpy.c 	while (d & 7) { \
d                  37 arch/alpha/lib/memcpy.c 		d--; s--; \
d                  38 arch/alpha/lib/memcpy.c 		*(char *) d = *(char *) s; \
d                  45 arch/alpha/lib/memcpy.c #define DO_REST_UP(d,s,n) \
d                  48 arch/alpha/lib/memcpy.c 		*(char *) d = *(char *) s; \
d                  49 arch/alpha/lib/memcpy.c 		d++; s++; \
d                  51 arch/alpha/lib/memcpy.c #define DO_REST_DN(d,s,n) \
d                  54 arch/alpha/lib/memcpy.c 		d--; s--; \
d                  55 arch/alpha/lib/memcpy.c 		*(char *) d = *(char *) s; \
d                  62 arch/alpha/lib/memcpy.c #define DO_REST_ALIGNED_UP(d,s,n) DO_REST_UP(d,s,n)
d                  63 arch/alpha/lib/memcpy.c #define DO_REST_ALIGNED_DN(d,s,n) DO_REST_DN(d,s,n)
d                  72 arch/alpha/lib/memcpy.c static inline void __memcpy_unaligned_up (unsigned long d, unsigned long s,
d                  75 arch/alpha/lib/memcpy.c 	ALIGN_DEST_TO8_UP(d,s,n);
d                  91 arch/alpha/lib/memcpy.c 			*(unsigned long *) d = low_word | tmp;
d                  92 arch/alpha/lib/memcpy.c 			d += 8;
d                  97 arch/alpha/lib/memcpy.c 	DO_REST_UP(d,s,n);
d                 100 arch/alpha/lib/memcpy.c static inline void __memcpy_unaligned_dn (unsigned long d, unsigned long s,
d                 105 arch/alpha/lib/memcpy.c 	d += n;
d                 107 arch/alpha/lib/memcpy.c 		* (char *) --d = * (char *) --s;
d                 118 arch/alpha/lib/memcpy.c static inline void __memcpy_aligned_up (unsigned long d, unsigned long s,
d                 121 arch/alpha/lib/memcpy.c 	ALIGN_DEST_TO8_UP(d,s,n);
d                 128 arch/alpha/lib/memcpy.c 		*(unsigned long *) d = tmp;
d                 129 arch/alpha/lib/memcpy.c 		d += 8;
d                 132 arch/alpha/lib/memcpy.c 	DO_REST_ALIGNED_UP(d,s,n);
d                 134 arch/alpha/lib/memcpy.c static inline void __memcpy_aligned_dn (unsigned long d, unsigned long s,
d                 138 arch/alpha/lib/memcpy.c 	d += n;
d                 139 arch/alpha/lib/memcpy.c 	ALIGN_DEST_TO8_DN(d,s,n);
d                 146 arch/alpha/lib/memcpy.c 		d -= 8;
d                 147 arch/alpha/lib/memcpy.c 		*(unsigned long *) d = tmp;
d                 150 arch/alpha/lib/memcpy.c 	DO_REST_ALIGNED_DN(d,s,n);
d                  21 arch/alpha/math-emu/sfp-util.h #define udiv_qrnnd(q, r, n1, n0, d)				\
d                  23 arch/alpha/math-emu/sfp-util.h     (q) = __udiv_qrnnd (&__r, (n1), (n0), (d));			\
d                  88 arch/arc/include/asm/entry-compact.h 	b.d	66f
d                 207 arch/arc/include/asm/io.h #define readsb(p,d,l)		({ __raw_readsb(p,d,l); __iormb(); })
d                 208 arch/arc/include/asm/io.h #define readsw(p,d,l)		({ __raw_readsw(p,d,l); __iormb(); })
d                 209 arch/arc/include/asm/io.h #define readsl(p,d,l)		({ __raw_readsl(p,d,l); __iormb(); })
d                 214 arch/arc/include/asm/io.h #define writesb(p,d,l)		({ __iowmb(); __raw_writesb(p,d,l); })
d                 215 arch/arc/include/asm/io.h #define writesw(p,d,l)		({ __iowmb(); __raw_writesw(p,d,l); })
d                 216 arch/arc/include/asm/io.h #define writesl(p,d,l)		({ __iowmb(); __raw_writesl(p,d,l); })
d                  68 arch/arc/include/asm/tlb-mmu1.h b.d     89f
d                 726 arch/arc/include/asm/uaccess.h #define __clear_user(d, n)		__arc_clear_user(d, n)
d                 727 arch/arc/include/asm/uaccess.h #define __strncpy_from_user(d, s, n)	__arc_strncpy_from_user(d, s, n)
d                 736 arch/arc/include/asm/uaccess.h #define __clear_user(d, n)		arc_clear_user_noinline(d, n)
d                 737 arch/arc/include/asm/uaccess.h #define __strncpy_from_user(d, s, n)	arc_strncpy_from_user_noinline(d, s, n)
d                 132 arch/arc/kernel/intc-arcv2.c static int arcv2_irq_map(struct irq_domain *d, unsigned int irq,
d                  88 arch/arc/kernel/intc-compact.c static int arc_intc_domain_map(struct irq_domain *d, unsigned int irq,
d                 359 arch/arc/kernel/mcip.c static int idu_irq_map(struct irq_domain *d, unsigned int virq, irq_hw_number_t hwirq)
d                  79 arch/arc/plat-axs10x/axs10x.c 			unsigned int pad:11, y:12, m:4, d:5;
d                  81 arch/arc/plat-axs10x/axs10x.c 			unsigned int d:5, m:4, y:12, pad:11;
d                  88 arch/arc/plat-axs10x/axs10x.c 	pr_info("AXS: %s FPGA Date: %u-%u-%u\n", str, board.d, board.m,
d                  13 arch/arm/boot/compressed/string.c 	unsigned char *d = (unsigned char *)__dest, *s = (unsigned char *)__src;
d                  16 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  17 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  18 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  19 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  20 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  21 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  22 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  23 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  27 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  28 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  29 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  30 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  34 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  35 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  39 arch/arm/boot/compressed/string.c 		*d++ = *s++;
d                  46 arch/arm/boot/compressed/string.c 	unsigned char *d = __dest;
d                  56 arch/arm/boot/compressed/string.c 		d[count] = s[count];
d                  32 arch/arm/common/it8152.c static void it8152_mask_irq(struct irq_data *d)
d                  34 arch/arm/common/it8152.c 	unsigned int irq = d->irq;
d                  51 arch/arm/common/it8152.c static void it8152_unmask_irq(struct irq_data *d)
d                  53 arch/arm/common/it8152.c 	unsigned int irq = d->irq;
d                 163 arch/arm/common/locomo.c static void locomo_ack_irq(struct irq_data *d)
d                 167 arch/arm/common/locomo.c static void locomo_mask_irq(struct irq_data *d)
d                 169 arch/arm/common/locomo.c 	struct locomo *lchip = irq_data_get_irq_chip_data(d);
d                 172 arch/arm/common/locomo.c 	r &= ~(0x0010 << (d->irq - lchip->irq_base));
d                 176 arch/arm/common/locomo.c static void locomo_unmask_irq(struct irq_data *d)
d                 178 arch/arm/common/locomo.c 	struct locomo *lchip = irq_data_get_irq_chip_data(d);
d                 181 arch/arm/common/locomo.c 	r |= (0x0010 << (d->irq - lchip->irq_base));
d                 201 arch/arm/common/sa1111.c 	struct irq_desc *d = irq_to_desc(irq_linear_revmap(irqdomain, irq));
d                 203 arch/arm/common/sa1111.c 	if (d)
d                 204 arch/arm/common/sa1111.c 		generic_handle_irq_desc(d);
d                 247 arch/arm/common/sa1111.c static u32 sa1111_irqmask(struct irq_data *d)
d                 249 arch/arm/common/sa1111.c 	return BIT(irqd_to_hwirq(d) & 31);
d                 252 arch/arm/common/sa1111.c static int sa1111_irqbank(struct irq_data *d)
d                 254 arch/arm/common/sa1111.c 	return (irqd_to_hwirq(d) / 32) * 4;
d                 257 arch/arm/common/sa1111.c static void sa1111_ack_irq(struct irq_data *d)
d                 261 arch/arm/common/sa1111.c static void sa1111_mask_irq(struct irq_data *d)
d                 263 arch/arm/common/sa1111.c 	struct sa1111 *sachip = irq_data_get_irq_chip_data(d);
d                 264 arch/arm/common/sa1111.c 	void __iomem *mapbase = sachip->base + SA1111_INTC + sa1111_irqbank(d);
d                 268 arch/arm/common/sa1111.c 	ie &= ~sa1111_irqmask(d);
d                 272 arch/arm/common/sa1111.c static void sa1111_unmask_irq(struct irq_data *d)
d                 274 arch/arm/common/sa1111.c 	struct sa1111 *sachip = irq_data_get_irq_chip_data(d);
d                 275 arch/arm/common/sa1111.c 	void __iomem *mapbase = sachip->base + SA1111_INTC + sa1111_irqbank(d);
d                 279 arch/arm/common/sa1111.c 	ie |= sa1111_irqmask(d);
d                 290 arch/arm/common/sa1111.c static int sa1111_retrigger_irq(struct irq_data *d)
d                 292 arch/arm/common/sa1111.c 	struct sa1111 *sachip = irq_data_get_irq_chip_data(d);
d                 293 arch/arm/common/sa1111.c 	void __iomem *mapbase = sachip->base + SA1111_INTC + sa1111_irqbank(d);
d                 294 arch/arm/common/sa1111.c 	u32 ip, mask = sa1111_irqmask(d);
d                 307 arch/arm/common/sa1111.c 		       d->irq);
d                 311 arch/arm/common/sa1111.c static int sa1111_type_irq(struct irq_data *d, unsigned int flags)
d                 313 arch/arm/common/sa1111.c 	struct sa1111 *sachip = irq_data_get_irq_chip_data(d);
d                 314 arch/arm/common/sa1111.c 	void __iomem *mapbase = sachip->base + SA1111_INTC + sa1111_irqbank(d);
d                 315 arch/arm/common/sa1111.c 	u32 ip, mask = sa1111_irqmask(d);
d                 334 arch/arm/common/sa1111.c static int sa1111_wake_irq(struct irq_data *d, unsigned int on)
d                 336 arch/arm/common/sa1111.c 	struct sa1111 *sachip = irq_data_get_irq_chip_data(d);
d                 337 arch/arm/common/sa1111.c 	void __iomem *mapbase = sachip->base + SA1111_INTC + sa1111_irqbank(d);
d                 338 arch/arm/common/sa1111.c 	u32 we, mask = sa1111_irqmask(d);
d                 360 arch/arm/common/sa1111.c static int sa1111_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                 363 arch/arm/common/sa1111.c 	struct sa1111 *sachip = d->host_data;
d                  30 arch/arm/common/sharpsl_param.c #define MAGIC_CHG(a,b,c,d) ( ( d << 24 ) | ( c << 16 )  | ( b << 8 ) | a )
d                 195 arch/arm/include/asm/assembler.h 	adr\c	\rd, \sym + 1
d                 197 arch/arm/include/asm/assembler.h 	adr\c	\rd, \sym
d                 206 arch/arm/include/asm/assembler.h  ARM(	mov	\rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT	)
d                 207 arch/arm/include/asm/assembler.h  THUMB(	mov	\rd, sp			)
d                 208 arch/arm/include/asm/assembler.h  THUMB(	lsr	\rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT	)
d                 209 arch/arm/include/asm/assembler.h 	mov	\rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
d                 176 arch/arm/include/asm/ecard.h 	} d;
d                 184 arch/arm/include/asm/hardware/locomo.h #define locomo_get_drvdata(d)	dev_get_drvdata(&(d)->dev)
d                 185 arch/arm/include/asm/hardware/locomo.h #define locomo_set_drvdata(d,p)	dev_set_drvdata(&(d)->dev, p)
d                 399 arch/arm/include/asm/hardware/sa1111.h #define sa1111_get_drvdata(d)	dev_get_drvdata(&(d)->dev)
d                 400 arch/arm/include/asm/hardware/sa1111.h #define sa1111_set_drvdata(d,p)	dev_set_drvdata(&(d)->dev, p)
d                 264 arch/arm/include/asm/io.h #define outsb(p,d,l)		__raw_writesb(__io(p),d,l)
d                 265 arch/arm/include/asm/io.h #define outsw(p,d,l)		__raw_writesw(__io(p),d,l)
d                 266 arch/arm/include/asm/io.h #define outsl(p,d,l)		__raw_writesl(__io(p),d,l)
d                 268 arch/arm/include/asm/io.h #define insb(p,d,l)		__raw_readsb(__io(p),d,l)
d                 269 arch/arm/include/asm/io.h #define insw(p,d,l)		__raw_readsw(__io(p),d,l)
d                 270 arch/arm/include/asm/io.h #define insl(p,d,l)		__raw_readsl(__io(p),d,l)
d                 309 arch/arm/include/asm/io.h #define readsb(p,d,l)		__raw_readsb(p,d,l)
d                 310 arch/arm/include/asm/io.h #define readsw(p,d,l)		__raw_readsw(p,d,l)
d                 311 arch/arm/include/asm/io.h #define readsl(p,d,l)		__raw_readsl(p,d,l)
d                 313 arch/arm/include/asm/io.h #define writesb(p,d,l)		__raw_writesb(p,d,l)
d                 314 arch/arm/include/asm/io.h #define writesw(p,d,l)		__raw_writesw(p,d,l)
d                 315 arch/arm/include/asm/io.h #define writesl(p,d,l)		__raw_writesl(p,d,l)
d                  13 arch/arm/include/asm/vfpmacros.h 	MRC\cond	p10, 7, \rd, \sysreg, cr0, 0	@ FMRX	\rd, \sysreg
d                  17 arch/arm/include/asm/vfpmacros.h 	MCR\cond	p10, 7, \rd, \sysreg, cr0, 0	@ FMXR	\sysreg, \rd
d                  21 arch/arm/kernel/return_address.c static int save_return_addr(struct stackframe *frame, void *d)
d                  23 arch/arm/kernel/return_address.c 	struct return_address_data *data = d;
d                  72 arch/arm/kernel/stacktrace.c static int save_trace(struct stackframe *frame, void *d)
d                  74 arch/arm/kernel/stacktrace.c 	struct stack_trace_data *data = d;
d                 389 arch/arm/mach-davinci/board-dm644x-evm.c sw_show(struct device *d, struct device_attribute *a, char *buf)
d                  32 arch/arm/mach-ebsa110/core.c static void ebsa110_mask_irq(struct irq_data *d)
d                  34 arch/arm/mach-ebsa110/core.c 	__raw_writeb(1 << d->irq, IRQ_MCLR);
d                  37 arch/arm/mach-ebsa110/core.c static void ebsa110_unmask_irq(struct irq_data *d)
d                  39 arch/arm/mach-ebsa110/core.c 	__raw_writeb(1 << d->irq, IRQ_MSET);
d                 136 arch/arm/mach-exynos/suspend.c static int exynos_pmu_domain_translate(struct irq_domain *d,
d                  78 arch/arm/mach-footbridge/common.c static void fb_mask_irq(struct irq_data *d)
d                  80 arch/arm/mach-footbridge/common.c 	*CSR_IRQ_DISABLE = fb_irq_mask[_DC21285_INR(d->irq)];
d                  83 arch/arm/mach-footbridge/common.c static void fb_unmask_irq(struct irq_data *d)
d                  85 arch/arm/mach-footbridge/common.c 	*CSR_IRQ_ENABLE = fb_irq_mask[_DC21285_INR(d->irq)];
d                  30 arch/arm/mach-footbridge/isa-irq.c static void isa_mask_pic_lo_irq(struct irq_data *d)
d                  32 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                  37 arch/arm/mach-footbridge/isa-irq.c static void isa_ack_pic_lo_irq(struct irq_data *d)
d                  39 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                  45 arch/arm/mach-footbridge/isa-irq.c static void isa_unmask_pic_lo_irq(struct irq_data *d)
d                  47 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                  58 arch/arm/mach-footbridge/isa-irq.c static void isa_mask_pic_hi_irq(struct irq_data *d)
d                  60 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                  65 arch/arm/mach-footbridge/isa-irq.c static void isa_ack_pic_hi_irq(struct irq_data *d)
d                  67 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                  74 arch/arm/mach-footbridge/isa-irq.c static void isa_unmask_pic_hi_irq(struct irq_data *d)
d                  76 arch/arm/mach-footbridge/isa-irq.c 	unsigned int mask = 1 << (d->irq & 7);
d                 109 arch/arm/mach-imx/3ds_debugboard.c static void expio_mask_irq(struct irq_data *d)
d                 112 arch/arm/mach-imx/3ds_debugboard.c 	u32 expio = d->hwirq;
d                 119 arch/arm/mach-imx/3ds_debugboard.c static void expio_ack_irq(struct irq_data *d)
d                 121 arch/arm/mach-imx/3ds_debugboard.c 	u32 expio = d->hwirq;
d                 125 arch/arm/mach-imx/3ds_debugboard.c 	expio_mask_irq(d);
d                 128 arch/arm/mach-imx/3ds_debugboard.c static void expio_unmask_irq(struct irq_data *d)
d                 131 arch/arm/mach-imx/3ds_debugboard.c 	u32 expio = d->hwirq;
d                  81 arch/arm/mach-imx/avic.c static void avic_irq_suspend(struct irq_data *d)
d                  83 arch/arm/mach-imx/avic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  85 arch/arm/mach-imx/avic.c 	int idx = d->hwirq >> 5;
d                  91 arch/arm/mach-imx/avic.c 		u8 offs = d->hwirq < AVIC_NUM_IRQS / 2 ?
d                 103 arch/arm/mach-imx/avic.c static void avic_irq_resume(struct irq_data *d)
d                 105 arch/arm/mach-imx/avic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 107 arch/arm/mach-imx/avic.c 	int idx = d->hwirq >> 5;
d                 112 arch/arm/mach-imx/avic.c 		u8 offs = d->hwirq < AVIC_NUM_IRQS / 2 ?
d                  89 arch/arm/mach-imx/gpc.c static int imx_gpc_irq_set_wake(struct irq_data *d, unsigned int on)
d                  91 arch/arm/mach-imx/gpc.c 	unsigned int idx = d->hwirq / 32;
d                  94 arch/arm/mach-imx/gpc.c 	mask = 1 << d->hwirq % 32;
d                 148 arch/arm/mach-imx/gpc.c static void imx_gpc_irq_unmask(struct irq_data *d)
d                 150 arch/arm/mach-imx/gpc.c 	imx_gpc_hwirq_unmask(d->hwirq);
d                 151 arch/arm/mach-imx/gpc.c 	irq_chip_unmask_parent(d);
d                 154 arch/arm/mach-imx/gpc.c static void imx_gpc_irq_mask(struct irq_data *d)
d                 156 arch/arm/mach-imx/gpc.c 	imx_gpc_hwirq_mask(d->hwirq);
d                 157 arch/arm/mach-imx/gpc.c 	irq_chip_mask_parent(d);
d                 173 arch/arm/mach-imx/gpc.c static int imx_gpc_domain_translate(struct irq_domain *d,
d                  24 arch/arm/mach-imx/irq-common.c 			struct irq_data *d = irq_get_irq_data(irq);
d                  25 arch/arm/mach-imx/irq-common.c 			ret = exirq->set_irq_fiq(irqd_to_hwirq(d), type);
d                 170 arch/arm/mach-imx/mach-mx31ads.c static void expio_mask_irq(struct irq_data *d)
d                 172 arch/arm/mach-imx/mach-mx31ads.c 	u32 expio = d->hwirq;
d                 182 arch/arm/mach-imx/mach-mx31ads.c static void expio_ack_irq(struct irq_data *d)
d                 184 arch/arm/mach-imx/mach-mx31ads.c 	u32 expio = d->hwirq;
d                 193 arch/arm/mach-imx/mach-mx31ads.c static void expio_unmask_irq(struct irq_data *d)
d                 195 arch/arm/mach-imx/mach-mx31ads.c 	u32 expio = d->hwirq;
d                  73 arch/arm/mach-imx/tzic.c static void tzic_irq_suspend(struct irq_data *d)
d                  75 arch/arm/mach-imx/tzic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  76 arch/arm/mach-imx/tzic.c 	int idx = d->hwirq >> 5;
d                  81 arch/arm/mach-imx/tzic.c static void tzic_irq_resume(struct irq_data *d)
d                  83 arch/arm/mach-imx/tzic.c 	int idx = d->hwirq >> 5;
d                 368 arch/arm/mach-integrator/impd1.c 		struct amba_device *d;
d                 423 arch/arm/mach-integrator/impd1.c 		d = amba_ahb_device_add_res(&dev->dev, devname, pc_base, SZ_4K,
d                 427 arch/arm/mach-integrator/impd1.c 		if (IS_ERR(d)) {
d                 428 arch/arm/mach-integrator/impd1.c 			dev_err(&dev->dev, "unable to register device: %ld\n", PTR_ERR(d));
d                  14 arch/arm/mach-integrator/lm.c #define to_lm_device(d)	container_of(d, struct lm_device, dev)
d                  15 arch/arm/mach-integrator/lm.c #define to_lm_driver(d)	container_of(d, struct lm_driver, drv)
d                  69 arch/arm/mach-integrator/lm.c 	struct lm_device *d = to_lm_device(dev);
d                  71 arch/arm/mach-integrator/lm.c 	kfree(d);
d                  24 arch/arm/mach-integrator/lm.h #define lm_set_drvdata(lm,d)	dev_set_drvdata(&(lm)->dev, d)
d                  33 arch/arm/mach-iop32x/irq.c iop32x_irq_mask(struct irq_data *d)
d                  35 arch/arm/mach-iop32x/irq.c 	iop32x_mask &= ~(1 << d->irq);
d                  40 arch/arm/mach-iop32x/irq.c iop32x_irq_unmask(struct irq_data *d)
d                  42 arch/arm/mach-iop32x/irq.c 	iop32x_mask |= 1 << d->irq;
d                  39 arch/arm/mach-mmp/mmp2.h 	struct pxa_device_desc *d = NULL;
d                  42 arch/arm/mach-mmp/mmp2.h 	case 1: d = &mmp2_device_uart1; break;
d                  43 arch/arm/mach-mmp/mmp2.h 	case 2: d = &mmp2_device_uart2; break;
d                  44 arch/arm/mach-mmp/mmp2.h 	case 3: d = &mmp2_device_uart3; break;
d                  45 arch/arm/mach-mmp/mmp2.h 	case 4: d = &mmp2_device_uart4; break;
d                  50 arch/arm/mach-mmp/mmp2.h 	return pxa_register_device(d, NULL, 0);
d                  56 arch/arm/mach-mmp/mmp2.h 	struct pxa_device_desc *d = NULL;
d                  60 arch/arm/mach-mmp/mmp2.h 	case 1: d = &mmp2_device_twsi1; break;
d                  61 arch/arm/mach-mmp/mmp2.h 	case 2: d = &mmp2_device_twsi2; break;
d                  62 arch/arm/mach-mmp/mmp2.h 	case 3: d = &mmp2_device_twsi3; break;
d                  63 arch/arm/mach-mmp/mmp2.h 	case 4: d = &mmp2_device_twsi4; break;
d                  64 arch/arm/mach-mmp/mmp2.h 	case 5: d = &mmp2_device_twsi5; break;
d                  65 arch/arm/mach-mmp/mmp2.h 	case 6: d = &mmp2_device_twsi6; break;
d                  74 arch/arm/mach-mmp/mmp2.h 	return pxa_register_device(d, data, sizeof(*data));
d                  79 arch/arm/mach-mmp/mmp2.h 	struct pxa_device_desc *d = NULL;
d                  82 arch/arm/mach-mmp/mmp2.h 	case 0: d = &mmp2_device_sdh0; break;
d                  83 arch/arm/mach-mmp/mmp2.h 	case 1: d = &mmp2_device_sdh1; break;
d                  84 arch/arm/mach-mmp/mmp2.h 	case 2: d = &mmp2_device_sdh2; break;
d                  85 arch/arm/mach-mmp/mmp2.h 	case 3: d = &mmp2_device_sdh3; break;
d                  90 arch/arm/mach-mmp/mmp2.h 	return pxa_register_device(d, data, sizeof(*data));
d                  26 arch/arm/mach-mmp/pm-mmp2.c int mmp2_set_wake(struct irq_data *d, unsigned int on)
d                  29 arch/arm/mach-mmp/pm-mmp2.c 	int irq = d->irq;
d                  58 arch/arm/mach-mmp/pm-mmp2.h extern int mmp2_set_wake(struct irq_data *d, unsigned int on);
d                  51 arch/arm/mach-mmp/pxa168.h 	struct pxa_device_desc *d = NULL;
d                  54 arch/arm/mach-mmp/pxa168.h 	case 1: d = &pxa168_device_uart1; break;
d                  55 arch/arm/mach-mmp/pxa168.h 	case 2: d = &pxa168_device_uart2; break;
d                  56 arch/arm/mach-mmp/pxa168.h 	case 3: d = &pxa168_device_uart3; break;
d                  59 arch/arm/mach-mmp/pxa168.h 	if (d == NULL)
d                  62 arch/arm/mach-mmp/pxa168.h 	return pxa_register_device(d, NULL, 0);
d                  68 arch/arm/mach-mmp/pxa168.h 	struct pxa_device_desc *d = NULL;
d                  72 arch/arm/mach-mmp/pxa168.h 	case 0: d = &pxa168_device_twsi0; break;
d                  73 arch/arm/mach-mmp/pxa168.h 	case 1: d = &pxa168_device_twsi1; break;
d                  82 arch/arm/mach-mmp/pxa168.h 	return pxa_register_device(d, data, sizeof(*data));
d                  87 arch/arm/mach-mmp/pxa168.h 	struct pxa_device_desc *d = NULL;
d                  90 arch/arm/mach-mmp/pxa168.h 	case 1: d = &pxa168_device_pwm1; break;
d                  91 arch/arm/mach-mmp/pxa168.h 	case 2: d = &pxa168_device_pwm2; break;
d                  92 arch/arm/mach-mmp/pxa168.h 	case 3: d = &pxa168_device_pwm3; break;
d                  93 arch/arm/mach-mmp/pxa168.h 	case 4: d = &pxa168_device_pwm4; break;
d                  98 arch/arm/mach-mmp/pxa168.h 	return pxa_register_device(d, NULL, 0);
d                 103 arch/arm/mach-mmp/pxa168.h 	struct pxa_device_desc *d = NULL;
d                 106 arch/arm/mach-mmp/pxa168.h 	case 1: d = &pxa168_device_ssp1; break;
d                 107 arch/arm/mach-mmp/pxa168.h 	case 2: d = &pxa168_device_ssp2; break;
d                 108 arch/arm/mach-mmp/pxa168.h 	case 3: d = &pxa168_device_ssp3; break;
d                 109 arch/arm/mach-mmp/pxa168.h 	case 4: d = &pxa168_device_ssp4; break;
d                 110 arch/arm/mach-mmp/pxa168.h 	case 5: d = &pxa168_device_ssp5; break;
d                 114 arch/arm/mach-mmp/pxa168.h 	return pxa_register_device(d, NULL, 0);
d                  37 arch/arm/mach-mmp/pxa910.h 	struct pxa_device_desc *d = NULL;
d                  40 arch/arm/mach-mmp/pxa910.h 	case 1: d = &pxa910_device_uart1; break;
d                  41 arch/arm/mach-mmp/pxa910.h 	case 2: d = &pxa910_device_uart2; break;
d                  44 arch/arm/mach-mmp/pxa910.h 	if (d == NULL)
d                  47 arch/arm/mach-mmp/pxa910.h 	return pxa_register_device(d, NULL, 0);
d                  53 arch/arm/mach-mmp/pxa910.h 	struct pxa_device_desc *d = NULL;
d                  57 arch/arm/mach-mmp/pxa910.h 	case 0: d = &pxa910_device_twsi0; break;
d                  58 arch/arm/mach-mmp/pxa910.h 	case 1: d = &pxa910_device_twsi1; break;
d                  67 arch/arm/mach-mmp/pxa910.h 	return pxa_register_device(d, data, sizeof(*data));
d                  72 arch/arm/mach-mmp/pxa910.h 	struct pxa_device_desc *d = NULL;
d                  75 arch/arm/mach-mmp/pxa910.h 	case 1: d = &pxa910_device_pwm1; break;
d                  76 arch/arm/mach-mmp/pxa910.h 	case 2: d = &pxa910_device_pwm2; break;
d                  77 arch/arm/mach-mmp/pxa910.h 	case 3: d = &pxa910_device_pwm3; break;
d                  78 arch/arm/mach-mmp/pxa910.h 	case 4: d = &pxa910_device_pwm4; break;
d                  83 arch/arm/mach-mmp/pxa910.h 	return pxa_register_device(d, NULL, 0);
d                  53 arch/arm/mach-omap1/ams-delta-fiq.c 	struct irq_data *d;
d                  62 arch/arm/mach-omap1/ams-delta-fiq.c 		d = irq_data[gpio];
d                  63 arch/arm/mach-omap1/ams-delta-fiq.c 		irq_num = d->irq;
d                  74 arch/arm/mach-omap1/ams-delta-fiq.c 				irq_chip->irq_unmask(d);
d                 992 arch/arm/mach-omap1/clock.c 	struct dentry *d;
d                 995 arch/arm/mach-omap1/clock.c 	d = debugfs_create_dir(c->name, pa ? pa->dent : clk_debugfs_root);
d                 996 arch/arm/mach-omap1/clock.c 	c->dent = d;
d                1017 arch/arm/mach-omap1/clock.c 	struct dentry *d;
d                1019 arch/arm/mach-omap1/clock.c 	d = debugfs_create_dir("clock", NULL);
d                1020 arch/arm/mach-omap1/clock.c 	clk_debugfs_root = d;
d                1025 arch/arm/mach-omap1/clock.c 	debugfs_create_file("summary", S_IRUGO, d, NULL, &debug_clock_fops);
d                 313 arch/arm/mach-omap1/dma.c 	struct omap_dma_dev_attr		*d;
d                 338 arch/arm/mach-omap1/dma.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 339 arch/arm/mach-omap1/dma.c 	if (!d) {
d                 346 arch/arm/mach-omap1/dma.c 		d->dev_caps = ENABLE_1510_MODE;
d                 347 arch/arm/mach-omap1/dma.c 	enable_1510_mode = d->dev_caps & ENABLE_1510_MODE;
d                 350 arch/arm/mach-omap1/dma.c 		d->dev_caps = ENABLE_16XX_MODE;
d                 352 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= SRC_PORT;
d                 353 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= DST_PORT;
d                 354 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= SRC_INDEX;
d                 355 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= DST_INDEX;
d                 356 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= IS_BURST_ONLY4;
d                 357 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= CLEAR_CSR_ON_READ;
d                 358 arch/arm/mach-omap1/dma.c 	d->dev_caps		|= IS_WORD_16;
d                 362 arch/arm/mach-omap1/dma.c 		d->lch_count = 9;
d                 364 arch/arm/mach-omap1/dma.c 		if (d->dev_caps & ENABLE_1510_MODE)
d                 365 arch/arm/mach-omap1/dma.c 			d->lch_count = 9;
d                 367 arch/arm/mach-omap1/dma.c 			d->lch_count = 16;
d                 371 arch/arm/mach-omap1/dma.c 	p.dma_attr = d;
d                 407 arch/arm/mach-omap1/dma.c 	kfree(d);
d                  33 arch/arm/mach-omap1/fpga.c static void fpga_mask_irq(struct irq_data *d)
d                  35 arch/arm/mach-omap1/fpga.c 	unsigned int irq = d->irq - OMAP_FPGA_IRQ_BASE;
d                  61 arch/arm/mach-omap1/fpga.c static void fpga_ack_irq(struct irq_data *d)
d                  66 arch/arm/mach-omap1/fpga.c static void fpga_unmask_irq(struct irq_data *d)
d                  68 arch/arm/mach-omap1/fpga.c 	unsigned int irq = d->irq - OMAP_FPGA_IRQ_BASE;
d                  81 arch/arm/mach-omap1/fpga.c static void fpga_mask_ack_irq(struct irq_data *d)
d                  83 arch/arm/mach-omap1/fpga.c 	fpga_mask_irq(d);
d                  84 arch/arm/mach-omap1/fpga.c 	fpga_ack_irq(d);
d                  87 arch/arm/mach-omap1/irq.c static void omap_mask_ack_irq(struct irq_data *d)
d                  89 arch/arm/mach-omap1/irq.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  91 arch/arm/mach-omap1/irq.c 	ct->chip.irq_mask(d);
d                  92 arch/arm/mach-omap1/irq.c 	omap_ack_irq(d->irq);
d                 195 arch/arm/mach-omap1/irq.c 	struct irq_data *d = NULL;
d                 271 arch/arm/mach-omap1/irq.c 	d = irq_get_irq_data(irq_find_mapping(domain, omap_l2_irq));
d                 272 arch/arm/mach-omap1/irq.c 	if (d) {
d                 273 arch/arm/mach-omap1/irq.c 		ct = irq_data_get_chip_type(d);
d                 274 arch/arm/mach-omap1/irq.c 		ct->chip.irq_unmask(d);
d                 539 arch/arm/mach-omap1/pm.c 	struct dentry *d;
d                 541 arch/arm/mach-omap1/pm.c 	d = debugfs_create_dir("pm_debug", NULL);
d                 542 arch/arm/mach-omap1/pm.c 	debugfs_create_file("omap_pm", S_IWUSR | S_IRUGO, d, NULL,
d                 234 arch/arm/mach-omap2/dma.c 	struct omap_dma_dev_attr		*d;
d                 270 arch/arm/mach-omap2/dma.c 	d = oh->dev_attr;
d                 273 arch/arm/mach-omap2/dma.c 		d->dev_caps |= HS_CHANNELS_RESERVED;
d                 276 arch/arm/mach-omap2/dma.c 		d->dev_caps |= DMA_ENGINE_HANDLE_IRQ;
d                 133 arch/arm/mach-omap2/omap-wakeupgen.c static void wakeupgen_mask(struct irq_data *d)
d                 138 arch/arm/mach-omap2/omap-wakeupgen.c 	_wakeupgen_clear(d->hwirq, irq_target_cpu[d->hwirq]);
d                 140 arch/arm/mach-omap2/omap-wakeupgen.c 	irq_chip_mask_parent(d);
d                 146 arch/arm/mach-omap2/omap-wakeupgen.c static void wakeupgen_unmask(struct irq_data *d)
d                 151 arch/arm/mach-omap2/omap-wakeupgen.c 	_wakeupgen_set(d->hwirq, irq_target_cpu[d->hwirq]);
d                 153 arch/arm/mach-omap2/omap-wakeupgen.c 	irq_chip_unmask_parent(d);
d                 160 arch/arm/mach-omap2/omap-wakeupgen.c static int wakeupgen_irq_set_type(struct irq_data *d, unsigned int type)
d                 179 arch/arm/mach-omap2/omap-wakeupgen.c 	if (inverted && d->hwirq != SYS_NIRQ1_EXT_SYS_IRQ_1 &&
d                 180 arch/arm/mach-omap2/omap-wakeupgen.c 	    d->hwirq != SYS_NIRQ2_EXT_SYS_IRQ_2)
d                 182 arch/arm/mach-omap2/omap-wakeupgen.c 			d->hwirq);
d                 184 arch/arm/mach-omap2/omap-wakeupgen.c 	return irq_chip_set_type_parent(d, type);
d                 487 arch/arm/mach-omap2/omap-wakeupgen.c static int wakeupgen_domain_translate(struct irq_domain *d,
d                 180 arch/arm/mach-omap2/pm-debug.c 	struct dentry *d;
d                 192 arch/arm/mach-omap2/pm-debug.c 	d = debugfs_create_dir(pwrdm->name, (struct dentry *)dir);
d                 193 arch/arm/mach-omap2/pm-debug.c 	debugfs_create_file("suspend", S_IRUGO|S_IWUSR, d, pwrdm,
d                 226 arch/arm/mach-omap2/pm-debug.c 	struct dentry *d;
d                 231 arch/arm/mach-omap2/pm-debug.c 	d = debugfs_create_dir("pm_debug", NULL);
d                 233 arch/arm/mach-omap2/pm-debug.c 	debugfs_create_file("count", 0444, d, NULL, &pm_dbg_counters_fops);
d                 234 arch/arm/mach-omap2/pm-debug.c 	debugfs_create_file("time", 0444, d, NULL, &pm_dbg_timers_fops);
d                 236 arch/arm/mach-omap2/pm-debug.c 	pwrdm_for_each(pwrdms_setup, (void *)d);
d                 238 arch/arm/mach-omap2/pm-debug.c 	debugfs_create_file("enable_off_mode", S_IRUGO | S_IWUSR, d,
d                 109 arch/arm/mach-orion5x/common.c void __init orion5x_eth_switch_init(struct dsa_chip_data *d)
d                 111 arch/arm/mach-orion5x/common.c 	orion_ge00_switch_init(d);
d                  45 arch/arm/mach-orion5x/common.h void orion5x_eth_switch_init(struct dsa_chip_data *d);
d                 472 arch/arm/mach-pxa/balloon3.c static void balloon3_mask_irq(struct irq_data *d)
d                 474 arch/arm/mach-pxa/balloon3.c 	int balloon3_irq = (d->irq - BALLOON3_IRQ(0));
d                 479 arch/arm/mach-pxa/balloon3.c static void balloon3_unmask_irq(struct irq_data *d)
d                 481 arch/arm/mach-pxa/balloon3.c 	int balloon3_irq = (d->irq - BALLOON3_IRQ(0));
d                 498 arch/arm/mach-pxa/balloon3.c 		struct irq_data *d = irq_desc_get_irq_data(desc);
d                 504 arch/arm/mach-pxa/balloon3.c 			chip->irq_ack(d);
d                  64 arch/arm/mach-pxa/irq.c void pxa_mask_irq(struct irq_data *d)
d                  66 arch/arm/mach-pxa/irq.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                  67 arch/arm/mach-pxa/irq.c 	irq_hw_number_t irq = irqd_to_hwirq(d);
d                  74 arch/arm/mach-pxa/irq.c void pxa_unmask_irq(struct irq_data *d)
d                  76 arch/arm/mach-pxa/irq.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                  77 arch/arm/mach-pxa/irq.c 	irq_hw_number_t irq = irqd_to_hwirq(d);
d                  96 arch/arm/mach-pxa/lpd270.c static void lpd270_mask_irq(struct irq_data *d)
d                  98 arch/arm/mach-pxa/lpd270.c 	int lpd270_irq = d->irq - LPD270_IRQ(0);
d                 106 arch/arm/mach-pxa/lpd270.c static void lpd270_unmask_irq(struct irq_data *d)
d                 108 arch/arm/mach-pxa/lpd270.c 	int lpd270_irq = d->irq - LPD270_IRQ(0);
d                 172 arch/arm/mach-pxa/mfp-pxa2xx.c 	struct gpio_desc *d;
d                 178 arch/arm/mach-pxa/mfp-pxa2xx.c 	d = &gpio_desc[gpio];
d                 179 arch/arm/mach-pxa/mfp-pxa2xx.c 	c = d->config;
d                 181 arch/arm/mach-pxa/mfp-pxa2xx.c 	if (!d->valid)
d                 187 arch/arm/mach-pxa/mfp-pxa2xx.c 	if (d->keypad_gpio && (MFP_AF(d->config) == 0) &&
d                 188 arch/arm/mach-pxa/mfp-pxa2xx.c 	    (d->config & MFP_LPM_CAN_WAKEUP)) {
d                 190 arch/arm/mach-pxa/mfp-pxa2xx.c 			PKWR |= d->mask;
d                 192 arch/arm/mach-pxa/mfp-pxa2xx.c 			PKWR &= ~d->mask;
d                 196 arch/arm/mach-pxa/mfp-pxa2xx.c 	mux_taken = (PWER & d->mux_mask) & (~d->mask);
d                 200 arch/arm/mach-pxa/mfp-pxa2xx.c 	if (d->can_wakeup && (c & MFP_LPM_CAN_WAKEUP)) {
d                 202 arch/arm/mach-pxa/mfp-pxa2xx.c 			PWER = (PWER & ~d->mux_mask) | d->mask;
d                 205 arch/arm/mach-pxa/mfp-pxa2xx.c 				PRER |= d->mask;
d                 207 arch/arm/mach-pxa/mfp-pxa2xx.c 				PRER &= ~d->mask;
d                 210 arch/arm/mach-pxa/mfp-pxa2xx.c 				PFER |= d->mask;
d                 212 arch/arm/mach-pxa/mfp-pxa2xx.c 				PFER &= ~d->mask;
d                 214 arch/arm/mach-pxa/mfp-pxa2xx.c 			PWER &= ~d->mask;
d                 215 arch/arm/mach-pxa/mfp-pxa2xx.c 			PRER &= ~d->mask;
d                 216 arch/arm/mach-pxa/mfp-pxa2xx.c 			PFER &= ~d->mask;
d                 260 arch/arm/mach-pxa/mfp-pxa2xx.c 	struct gpio_desc *d;
d                 265 arch/arm/mach-pxa/mfp-pxa2xx.c 		d = &gpio_desc[gpio];
d                 268 arch/arm/mach-pxa/mfp-pxa2xx.c 		if (MFP_AF(d->config) == 0)
d                 271 arch/arm/mach-pxa/mfp-pxa2xx.c 		if (d->config & MFP_LPM_CAN_WAKEUP)
d                 258 arch/arm/mach-pxa/pcm990-baseboard.c static void pcm990_mask_ack_irq(struct irq_data *d)
d                 260 arch/arm/mach-pxa/pcm990-baseboard.c 	int pcm990_irq = (d->irq - PCM027_IRQ(0));
d                 267 arch/arm/mach-pxa/pcm990-baseboard.c static void pcm990_unmask_irq(struct irq_data *d)
d                 269 arch/arm/mach-pxa/pcm990-baseboard.c 	int pcm990_irq = (d->irq - PCM027_IRQ(0));
d                 119 arch/arm/mach-pxa/pxa25x.c static int pxa25x_set_wake(struct irq_data *d, unsigned int on)
d                 121 arch/arm/mach-pxa/pxa25x.c 	int gpio = pxa_irq_to_gpio(d->irq);
d                 127 arch/arm/mach-pxa/pxa25x.c 	if (d->irq == IRQ_RTCAlrm) {
d                 202 arch/arm/mach-pxa/pxa27x.c static int pxa27x_set_wake(struct irq_data *d, unsigned int on)
d                 204 arch/arm/mach-pxa/pxa27x.c 	int gpio = pxa_irq_to_gpio(d->irq);
d                 210 arch/arm/mach-pxa/pxa27x.c 	if (d->irq == IRQ_KEYPAD)
d                 213 arch/arm/mach-pxa/pxa27x.c 	switch (d->irq) {
d                 199 arch/arm/mach-pxa/pxa3xx.c static int pxa3xx_set_wake(struct irq_data *d, unsigned int on)
d                 203 arch/arm/mach-pxa/pxa3xx.c 	switch (d->irq) {
d                 292 arch/arm/mach-pxa/pxa3xx.c static void pxa_ack_ext_wakeup(struct irq_data *d)
d                 294 arch/arm/mach-pxa/pxa3xx.c 	PECR |= PECR_IS(d->irq - IRQ_WAKEUP0);
d                 297 arch/arm/mach-pxa/pxa3xx.c static void pxa_mask_ext_wakeup(struct irq_data *d)
d                 299 arch/arm/mach-pxa/pxa3xx.c 	pxa_mask_irq(d);
d                 300 arch/arm/mach-pxa/pxa3xx.c 	PECR &= ~PECR_IE(d->irq - IRQ_WAKEUP0);
d                 303 arch/arm/mach-pxa/pxa3xx.c static void pxa_unmask_ext_wakeup(struct irq_data *d)
d                 305 arch/arm/mach-pxa/pxa3xx.c 	pxa_unmask_irq(d);
d                 306 arch/arm/mach-pxa/pxa3xx.c 	PECR |= PECR_IE(d->irq - IRQ_WAKEUP0);
d                 309 arch/arm/mach-pxa/pxa3xx.c static int pxa_set_ext_wakeup_type(struct irq_data *d, unsigned int flow_type)
d                 312 arch/arm/mach-pxa/pxa3xx.c 		PWER |= 1 << (d->irq - IRQ_WAKEUP0);
d                 315 arch/arm/mach-pxa/pxa3xx.c 		PWER |= 1 << (d->irq - IRQ_WAKEUP0 + 2);
d                  34 arch/arm/mach-pxa/pxa_cplds_irqs.c static irqreturn_t cplds_irq_handler(int in_irq, void *d)
d                  36 arch/arm/mach-pxa/pxa_cplds_irqs.c 	struct cplds *fpga = d;
d                  51 arch/arm/mach-pxa/pxa_cplds_irqs.c static void cplds_irq_mask(struct irq_data *d)
d                  53 arch/arm/mach-pxa/pxa_cplds_irqs.c 	struct cplds *fpga = irq_data_get_irq_chip_data(d);
d                  54 arch/arm/mach-pxa/pxa_cplds_irqs.c 	unsigned int cplds_irq = irqd_to_hwirq(d);
d                  61 arch/arm/mach-pxa/pxa_cplds_irqs.c static void cplds_irq_unmask(struct irq_data *d)
d                  63 arch/arm/mach-pxa/pxa_cplds_irqs.c 	struct cplds *fpga = irq_data_get_irq_chip_data(d);
d                  64 arch/arm/mach-pxa/pxa_cplds_irqs.c 	unsigned int cplds_irq = irqd_to_hwirq(d);
d                  82 arch/arm/mach-pxa/pxa_cplds_irqs.c static int cplds_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                  85 arch/arm/mach-pxa/pxa_cplds_irqs.c 	struct cplds *fpga = d->host_data;
d                 252 arch/arm/mach-pxa/viper.c static void viper_ack_irq(struct irq_data *d)
d                 254 arch/arm/mach-pxa/viper.c 	int viper_irq = viper_irq_to_bitmask(d->irq);
d                 262 arch/arm/mach-pxa/viper.c static void viper_mask_irq(struct irq_data *d)
d                 264 arch/arm/mach-pxa/viper.c 	viper_irq_enabled_mask &= ~(viper_irq_to_bitmask(d->irq));
d                 267 arch/arm/mach-pxa/viper.c static void viper_unmask_irq(struct irq_data *d)
d                 269 arch/arm/mach-pxa/viper.c 	viper_irq_enabled_mask |= viper_irq_to_bitmask(d->irq);
d                  88 arch/arm/mach-pxa/zeus.c static void zeus_ack_irq(struct irq_data *d)
d                  90 arch/arm/mach-pxa/zeus.c 	__raw_writew(zeus_irq_to_bitmask(d->irq), ZEUS_CPLD_ISA_IRQ);
d                  93 arch/arm/mach-pxa/zeus.c static void zeus_mask_irq(struct irq_data *d)
d                  95 arch/arm/mach-pxa/zeus.c 	zeus_irq_enabled_mask &= ~(zeus_irq_to_bitmask(d->irq));
d                  98 arch/arm/mach-pxa/zeus.c static void zeus_unmask_irq(struct irq_data *d)
d                 100 arch/arm/mach-pxa/zeus.c 	zeus_irq_enabled_mask |= zeus_irq_to_bitmask(d->irq);
d                 370 arch/arm/mach-rpc/ecard.c 			ecard_readbytes((unsigned char *)excd.d.string, ec,
d                 379 arch/arm/mach-rpc/ecard.c 	memcpy(cd->d.string, excd.d.string, 256);
d                 428 arch/arm/mach-rpc/ecard.c static void ecard_irq_unmask(struct irq_data *d)
d                 430 arch/arm/mach-rpc/ecard.c 	ecard_t *ec = irq_data_get_irq_chip_data(d);
d                 437 arch/arm/mach-rpc/ecard.c 			ec->ops->irqenable(ec, d->irq);
d                 440 arch/arm/mach-rpc/ecard.c 				"enable IRQs for %d\n", d->irq);
d                 444 arch/arm/mach-rpc/ecard.c static void ecard_irq_mask(struct irq_data *d)
d                 446 arch/arm/mach-rpc/ecard.c 	ecard_t *ec = irq_data_get_irq_chip_data(d);
d                 453 arch/arm/mach-rpc/ecard.c 			ec->ops->irqdisable(ec, d->irq);
d                 637 arch/arm/mach-rpc/ecard.c 			ec->card_desc = kmalloc(strlen(incd.d.string)+1, GFP_KERNEL);
d                 640 arch/arm/mach-rpc/ecard.c 				strcpy((char *)ec->card_desc, incd.d.string);
d                  49 arch/arm/mach-rpc/ecard.h 	} d;
d                  49 arch/arm/mach-rpc/include/mach/acornfb.h 		int d;
d                  60 arch/arm/mach-rpc/include/mach/acornfb.h 		d = pixclk - p;
d                  62 arch/arm/mach-rpc/include/mach/acornfb.h 		if (d < 0)
d                  63 arch/arm/mach-rpc/include/mach/acornfb.h 			d = -d;
d                  65 arch/arm/mach-rpc/include/mach/acornfb.h 		if (d < best_d) {
d                  66 arch/arm/mach-rpc/include/mach/acornfb.h 			best_d = d;
d                  71 arch/arm/mach-rpc/include/mach/acornfb.h 		if (d == 0)
d                  17 arch/arm/mach-rpc/irq.c static void __iomem *iomd_get_base(struct irq_data *d)
d                  19 arch/arm/mach-rpc/irq.c 	void *cd = irq_data_get_irq_chip_data(d);
d                  26 arch/arm/mach-rpc/irq.c 	struct irq_data *d = irq_get_irq_data(irq);
d                  28 arch/arm/mach-rpc/irq.c 	d->mask = mask;
d                  32 arch/arm/mach-rpc/irq.c static void iomd_irq_mask_ack(struct irq_data *d)
d                  34 arch/arm/mach-rpc/irq.c 	void __iomem *base = iomd_get_base(d);
d                  35 arch/arm/mach-rpc/irq.c 	unsigned int val, mask = d->mask;
d                  42 arch/arm/mach-rpc/irq.c static void iomd_irq_mask(struct irq_data *d)
d                  44 arch/arm/mach-rpc/irq.c 	void __iomem *base = iomd_get_base(d);
d                  45 arch/arm/mach-rpc/irq.c 	unsigned int val, mask = d->mask;
d                  51 arch/arm/mach-rpc/irq.c static void iomd_irq_unmask(struct irq_data *d)
d                  53 arch/arm/mach-rpc/irq.c 	void __iomem *base = iomd_get_base(d);
d                  54 arch/arm/mach-rpc/irq.c 	unsigned int val, mask = d->mask;
d                 204 arch/arm/mach-s3c24xx/include/mach/io.h #define insb(p,d,l)	__raw_readsb(__ioaddr(p),d,l)
d                 205 arch/arm/mach-s3c24xx/include/mach/io.h #define insw(p,d,l)	__raw_readsw(__ioaddr(p),d,l)
d                 206 arch/arm/mach-s3c24xx/include/mach/io.h #define insl(p,d,l)	__raw_readsl(__ioaddr(p),d,l)
d                 208 arch/arm/mach-s3c24xx/include/mach/io.h #define outsb(p,d,l)	__raw_writesb(__ioaddr(p),d,l)
d                 209 arch/arm/mach-s3c24xx/include/mach/io.h #define outsw(p,d,l)	__raw_writesw(__ioaddr(p),d,l)
d                 210 arch/arm/mach-s3c24xx/include/mach/io.h #define outsl(p,d,l)	__raw_writesl(__ioaddr(p),d,l)
d                 143 arch/arm/mach-sa1100/neponset.c 	struct neponset_drvdata *d = irq_desc_get_handler_data(desc);
d                 157 arch/arm/mach-sa1100/neponset.c 		irr = readb_relaxed(d->base + IRR);
d                 180 arch/arm/mach-sa1100/neponset.c 				generic_handle_irq(d->irq_base + NEP_IRQ_SMC91X);
d                 183 arch/arm/mach-sa1100/neponset.c 				generic_handle_irq(d->irq_base + NEP_IRQ_USAR);
d                 189 arch/arm/mach-sa1100/neponset.c 			generic_handle_irq(d->irq_base + NEP_IRQ_SA1111);
d                 227 arch/arm/mach-sa1100/neponset.c 	struct neponset_drvdata *d;
d                 279 arch/arm/mach-sa1100/neponset.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 280 arch/arm/mach-sa1100/neponset.c 	if (!d) {
d                 285 arch/arm/mach-sa1100/neponset.c 	d->base = ioremap(nep_res->start, SZ_4K);
d                 286 arch/arm/mach-sa1100/neponset.c 	if (!d->base) {
d                 291 arch/arm/mach-sa1100/neponset.c 	if (readb_relaxed(d->base + WHOAMI) != 0x11) {
d                 293 arch/arm/mach-sa1100/neponset.c 			 readb_relaxed(d->base + WHOAMI));
d                 307 arch/arm/mach-sa1100/neponset.c 	d->irq_base = ret;
d                 309 arch/arm/mach-sa1100/neponset.c 	irq_set_chip_and_handler(d->irq_base + NEP_IRQ_SMC91X, &nochip,
d                 311 arch/arm/mach-sa1100/neponset.c 	irq_clear_status_flags(d->irq_base + NEP_IRQ_SMC91X, IRQ_NOREQUEST | IRQ_NOPROBE);
d                 312 arch/arm/mach-sa1100/neponset.c 	irq_set_chip_and_handler(d->irq_base + NEP_IRQ_USAR, &nochip,
d                 314 arch/arm/mach-sa1100/neponset.c 	irq_clear_status_flags(d->irq_base + NEP_IRQ_USAR, IRQ_NOREQUEST | IRQ_NOPROBE);
d                 315 arch/arm/mach-sa1100/neponset.c 	irq_set_chip(d->irq_base + NEP_IRQ_SA1111, &nochip);
d                 318 arch/arm/mach-sa1100/neponset.c 	irq_set_chained_handler_and_data(irq, neponset_irq_handler, d);
d                 321 arch/arm/mach-sa1100/neponset.c 	writeb_relaxed(NCR_GP01_OFF, d->base + NCR_0);
d                 323 arch/arm/mach-sa1100/neponset.c 	neponset_init_gpio(&d->gpio[0], &dev->dev, "neponset-ncr",
d                 324 arch/arm/mach-sa1100/neponset.c 			   d->base + NCR_0, NCR_NGPIO, false,
d                 326 arch/arm/mach-sa1100/neponset.c 	neponset_init_gpio(&d->gpio[1], &dev->dev, "neponset-mdm-ctl0",
d                 327 arch/arm/mach-sa1100/neponset.c 			   d->base + MDM_CTL_0, MDM_CTL0_NGPIO, false,
d                 329 arch/arm/mach-sa1100/neponset.c 	neponset_init_gpio(&d->gpio[2], &dev->dev, "neponset-mdm-ctl1",
d                 330 arch/arm/mach-sa1100/neponset.c 			   d->base + MDM_CTL_1, MDM_CTL1_NGPIO, true,
d                 332 arch/arm/mach-sa1100/neponset.c 	neponset_init_gpio(&d->gpio[3], &dev->dev, "neponset-aud-ctl",
d                 333 arch/arm/mach-sa1100/neponset.c 			   d->base + AUD_CTL, AUD_NGPIO, false,
d                 349 arch/arm/mach-sa1100/neponset.c 		 d->irq_base, d->irq_base + NEP_IRQ_NR - 1);
d                 350 arch/arm/mach-sa1100/neponset.c 	nep = d;
d                 356 arch/arm/mach-sa1100/neponset.c 	sa1111_resources[1].start = d->irq_base + NEP_IRQ_SA1111;
d                 357 arch/arm/mach-sa1100/neponset.c 	sa1111_resources[1].end = d->irq_base + NEP_IRQ_SA1111;
d                 358 arch/arm/mach-sa1100/neponset.c 	d->sa1111 = platform_device_register_full(&sa1111_devinfo);
d                 362 arch/arm/mach-sa1100/neponset.c 	smc91x_resources[2].start = d->irq_base + NEP_IRQ_SMC91X;
d                 363 arch/arm/mach-sa1100/neponset.c 	smc91x_resources[2].end = d->irq_base + NEP_IRQ_SMC91X;
d                 364 arch/arm/mach-sa1100/neponset.c 	d->smc91x = platform_device_register_full(&smc91x_devinfo);
d                 366 arch/arm/mach-sa1100/neponset.c 	platform_set_drvdata(dev, d);
d                 372 arch/arm/mach-sa1100/neponset.c 	iounmap(d->base);
d                 374 arch/arm/mach-sa1100/neponset.c 	kfree(d);
d                 381 arch/arm/mach-sa1100/neponset.c 	struct neponset_drvdata *d = platform_get_drvdata(dev);
d                 384 arch/arm/mach-sa1100/neponset.c 	if (!IS_ERR(d->sa1111))
d                 385 arch/arm/mach-sa1100/neponset.c 		platform_device_unregister(d->sa1111);
d                 386 arch/arm/mach-sa1100/neponset.c 	if (!IS_ERR(d->smc91x))
d                 387 arch/arm/mach-sa1100/neponset.c 		platform_device_unregister(d->smc91x);
d                 394 arch/arm/mach-sa1100/neponset.c 	irq_free_descs(d->irq_base, NEP_IRQ_NR);
d                 396 arch/arm/mach-sa1100/neponset.c 	iounmap(d->base);
d                 397 arch/arm/mach-sa1100/neponset.c 	kfree(d);
d                 405 arch/arm/mach-sa1100/neponset.c 	struct neponset_drvdata *d = dev_get_drvdata(dev);
d                 408 arch/arm/mach-sa1100/neponset.c 	for (i = 0; i < ARRAY_SIZE(d->gpio); i++) {
d                 409 arch/arm/mach-sa1100/neponset.c 		ret = gpio_reg_resume(d->gpio[i]);
d                  53 arch/arm/mach-tegra/sleep.h 	subne	\rd, \rcpu, #1
d                  54 arch/arm/mach-tegra/sleep.h 	movne	\rd, \rd, lsl #3
d                  55 arch/arm/mach-tegra/sleep.h 	addne	\rd, \rd, #0x14
d                  56 arch/arm/mach-tegra/sleep.h 	moveq	\rd, #0
d                  62 arch/arm/mach-tegra/sleep.h 	subne	\rd, \rcpu, #1
d                  63 arch/arm/mach-tegra/sleep.h 	movne	\rd, \rd, lsl #3
d                  64 arch/arm/mach-tegra/sleep.h 	addne	\rd, \rd, #0x18
d                  65 arch/arm/mach-tegra/sleep.h 	moveq	\rd, #8
d                  70 arch/arm/mach-tegra/sleep.h 	mrc	p15, 0, \rd, c0, c0, 5
d                  71 arch/arm/mach-tegra/sleep.h 	and	\rd, \rd, #0xF
d                 327 arch/arm/mm/cache-feroceon-l2.c 		int i, d;
d                 331 arch/arm/mm/cache-feroceon-l2.c 		d = flush_and_disable_dcache();
d                 337 arch/arm/mm/cache-feroceon-l2.c 		if (d)
d                  62 arch/arm/oprofile/common.c static int report_trace(struct stackframe *frame, void *d)
d                  64 arch/arm/oprofile/common.c 	unsigned int *depth = d;
d                  66 arch/arm/plat-omap/dma.c static struct omap_dma_dev_attr *d;
d                 768 arch/arm/plat-omap/dma.c 	if (d->dev_caps & IS_RW_PRIORITY)
d                1299 arch/arm/plat-omap/dma.c 	d			= p->dma_attr;
d                1302 arch/arm/plat-omap/dma.c 	if ((d->dev_caps & RESERVE_CHANNEL) && omap_dma_reserve_channels
d                1303 arch/arm/plat-omap/dma.c 			&& (omap_dma_reserve_channels < d->lch_count))
d                1304 arch/arm/plat-omap/dma.c 		d->lch_count	= omap_dma_reserve_channels;
d                1306 arch/arm/plat-omap/dma.c 	dma_lch_count		= d->lch_count;
d                1308 arch/arm/plat-omap/dma.c 	enable_1510_mode	= d->dev_caps & ENABLE_1510_MODE;
d                1362 arch/arm/plat-omap/dma.c 	if (d->dev_caps & IS_RW_PRIORITY)
d                1366 arch/arm/plat-omap/dma.c 	if (dma_omap2plus() && !(d->dev_caps & DMA_ENGINE_HANDLE_IRQ)) {
d                1383 arch/arm/plat-omap/dma.c 	if (d->dev_caps & HS_CHANNELS_RESERVED) {
d                 480 arch/arm/plat-orion/common.c void __init orion_ge00_switch_init(struct dsa_chip_data *d)
d                 487 arch/arm/plat-orion/common.c 	for (i = 0; i < ARRAY_SIZE(d->port_names); i++) {
d                 488 arch/arm/plat-orion/common.c 		if (!strcmp(d->port_names[i], "cpu")) {
d                 489 arch/arm/plat-orion/common.c 			d->netdev[i] = &orion_ge00.dev;
d                 494 arch/arm/plat-orion/common.c 	orion_ge00_switch_board_info.mdio_addr = d->sw_addr;
d                 495 arch/arm/plat-orion/common.c 	orion_ge00_switch_board_info.platform_data = d;
d                 351 arch/arm/plat-orion/gpio.c static int gpio_irq_set_type(struct irq_data *d, u32 type)
d                 353 arch/arm/plat-orion/gpio.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 354 arch/arm/plat-orion/gpio.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 359 arch/arm/plat-orion/gpio.c 	pin = d->hwirq - ochip->secondary_irq_base;
d                 372 arch/arm/plat-orion/gpio.c 		if (irq_setup_alt_chip(d, type))
d                 495 arch/arm/plat-orion/gpio.c static void orion_gpio_unmask_irq(struct irq_data *d)
d                 497 arch/arm/plat-orion/gpio.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 498 arch/arm/plat-orion/gpio.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 500 arch/arm/plat-orion/gpio.c 	u32 mask = d->mask;
d                 509 arch/arm/plat-orion/gpio.c static void orion_gpio_mask_irq(struct irq_data *d)
d                 511 arch/arm/plat-orion/gpio.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 512 arch/arm/plat-orion/gpio.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 513 arch/arm/plat-orion/gpio.c 	u32 mask = d->mask;
d                  60 arch/arm/plat-orion/include/plat/common.h void __init orion_ge00_switch_init(struct dsa_chip_data *d);
d                  39 arch/arm/plat-orion/pcie.c #define  PCIE_CONF_DEV(d)		(((d) & 0x1f) << 11)
d                 464 arch/arm/probes/decode.c 			struct decode_table *d = (struct decode_table *)h;
d                 465 arch/arm/probes/decode.c 			next = (struct decode_header *)d->table.table;
d                 471 arch/arm/probes/decode.c 			struct decode_custom *d = (struct decode_custom *)h;
d                 472 arch/arm/probes/decode.c 			int action = d->decoder.action;
d                 482 arch/arm/probes/decode.c 			struct decode_simulate *d = (struct decode_simulate *)h;
d                 483 arch/arm/probes/decode.c 			int action = d->handler.action;
d                 494 arch/arm/probes/decode.c 			struct decode_emulate *d = (struct decode_emulate *)h;
d                 495 arch/arm/probes/decode.c 			int action = d->handler.action;
d                  69 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 145 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 147 arch/arm/probes/kprobes/actions-thumb.c 	enum probes_insn ret = kprobe_decode_ldmstm(insn, asi, d);
d                 405 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 423 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 513 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 541 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 596 arch/arm/probes/kprobes/actions-thumb.c 		const struct decode_header *d)
d                 648 arch/arm/probes/kprobes/test-core.c 		struct decode_table *d = (struct decode_table *)h;
d                 652 arch/arm/probes/kprobes/test-core.c 		return table_iter(d->table.table, table_test_fn, &args2);
d                 753 arch/arm/probes/kprobes/test-core.c 		struct decode_table *d = (struct decode_table *)h;
d                 756 arch/arm/probes/kprobes/test-core.c 		ret = table_iter(d->table.table, coverage_start_fn, coverage);
d                 106 arch/arm/probes/uprobes/actions-arm.c 	     const struct decode_header *d)
d                 110 arch/arm/probes/uprobes/actions-arm.c 	struct decode_emulate *decode = (struct decode_emulate *) d;
d                 130 arch/arm/probes/uprobes/actions-arm.c 	     const struct decode_header *d, bool alu)
d                 134 arch/arm/probes/uprobes/actions-arm.c 	enum probes_insn ret = decode_pc_ro(insn, asi, d);
d                 146 arch/arm/probes/uprobes/actions-arm.c 			      const struct decode_header *d)
d                 148 arch/arm/probes/uprobes/actions-arm.c 	return decode_wb_pc(insn, asi, d, true);
d                 153 arch/arm/probes/uprobes/actions-arm.c 	   const struct decode_header *d)
d                 155 arch/arm/probes/uprobes/actions-arm.c 	return decode_wb_pc(insn, asi, d, false);
d                 161 arch/arm/probes/uprobes/actions-arm.c 		     const struct decode_header *d)
d                  11 arch/arm/probes/uprobes/core.h 				      const struct decode_header *d);
d                  15 arch/arm/probes/uprobes/core.h 			    const struct decode_header *d);
d                  20 arch/arm/probes/uprobes/core.h 			      const struct decode_header *d);
d                  24 arch/arm/probes/uprobes/core.h 	     const struct decode_header *d, bool alu);
d                  28 arch/arm/probes/uprobes/core.h 	     const struct decode_header *d);
d                  48 arch/arm/vfp/vfpdouble.c static void vfp_double_dump(const char *str, struct vfp_double *d)
d                  51 arch/arm/vfp/vfpdouble.c 		 str, d->sign != 0, d->exponent, d->significand);
d                 194 arch/arm/vfp/vfpdouble.c 		s64 d = vfp_double_pack(vd);
d                 196 arch/arm/vfp/vfpdouble.c 			 dd, d, exceptions);
d                 197 arch/arm/vfp/vfpdouble.c 		vfp_put_double(d, dd);
d                 360 arch/arm/vfp/vfpdouble.c 	s64 d, m;
d                 373 arch/arm/vfp/vfpdouble.c 	d = vfp_get_double(dd);
d                 374 arch/arm/vfp/vfpdouble.c 	if (vfp_double_packed_exponent(d) == 2047 && vfp_double_packed_mantissa(d)) {
d                 376 arch/arm/vfp/vfpdouble.c 		if (signal_on_qnan || !(vfp_double_packed_mantissa(d) & (1ULL << (VFP_DOUBLE_MANTISSA_BITS - 1))))
d                 384 arch/arm/vfp/vfpdouble.c 		if (d == m || vfp_double_packed_abs(d | m) == 0) {
d                 389 arch/arm/vfp/vfpdouble.c 		} else if (vfp_double_packed_sign(d ^ m)) {
d                 393 arch/arm/vfp/vfpdouble.c 			if (vfp_double_packed_sign(d))
d                 403 arch/arm/vfp/vfpdouble.c 		} else if ((vfp_double_packed_sign(d) != 0) ^ (d < m)) {
d                 408 arch/arm/vfp/vfpdouble.c 		} else if ((vfp_double_packed_sign(d) != 0) ^ (d > m)) {
d                 509 arch/arm/vfp/vfpdouble.c 	u32 d, exceptions = 0;
d                 526 arch/arm/vfp/vfpdouble.c 		d = vdm.sign ? 0 : 0xffffffff;
d                 535 arch/arm/vfp/vfpdouble.c 		d = (vdm.significand << 1) >> shift;
d                 540 arch/arm/vfp/vfpdouble.c 			if ((d & 1) == 0)
d                 549 arch/arm/vfp/vfpdouble.c 			if (d < 0xffffffff)
d                 550 arch/arm/vfp/vfpdouble.c 				d += 1;
d                 555 arch/arm/vfp/vfpdouble.c 		if (d && vdm.sign) {
d                 556 arch/arm/vfp/vfpdouble.c 			d = 0;
d                 561 arch/arm/vfp/vfpdouble.c 		d = 0;
d                 565 arch/arm/vfp/vfpdouble.c 				d = 1;
d                 567 arch/arm/vfp/vfpdouble.c 				d = 0;
d                 573 arch/arm/vfp/vfpdouble.c 	pr_debug("VFP: ftoui: d(s%d)=%08x exceptions=%08x\n", sd, d, exceptions);
d                 575 arch/arm/vfp/vfpdouble.c 	vfp_put_float(d, sd);
d                 588 arch/arm/vfp/vfpdouble.c 	u32 d, exceptions = 0;
d                 603 arch/arm/vfp/vfpdouble.c 		d = 0;
d                 606 arch/arm/vfp/vfpdouble.c 		d = 0x7fffffff;
d                 608 arch/arm/vfp/vfpdouble.c 			d = ~d;
d                 614 arch/arm/vfp/vfpdouble.c 		d = (vdm.significand << 1) >> shift;
d                 619 arch/arm/vfp/vfpdouble.c 			if ((d & 1) == 0)
d                 627 arch/arm/vfp/vfpdouble.c 		if ((rem + incr) < rem && d < 0xffffffff)
d                 628 arch/arm/vfp/vfpdouble.c 			d += 1;
d                 629 arch/arm/vfp/vfpdouble.c 		if (d > 0x7fffffff + (vdm.sign != 0)) {
d                 630 arch/arm/vfp/vfpdouble.c 			d = 0x7fffffff + (vdm.sign != 0);
d                 636 arch/arm/vfp/vfpdouble.c 			d = -d;
d                 638 arch/arm/vfp/vfpdouble.c 		d = 0;
d                 642 arch/arm/vfp/vfpdouble.c 				d = 1;
d                 644 arch/arm/vfp/vfpdouble.c 				d = -1;
d                 648 arch/arm/vfp/vfpdouble.c 	pr_debug("VFP: ftosi: d(s%d)=%08x exceptions=%08x\n", sd, d, exceptions);
d                 650 arch/arm/vfp/vfpdouble.c 	vfp_put_float((s32)d, sd);
d                 199 arch/arm/vfp/vfpsingle.c 		s32 d = vfp_single_pack(vs);
d                 202 arch/arm/vfp/vfpsingle.c 			 sd, d, exceptions);
d                 204 arch/arm/vfp/vfpsingle.c 		vfp_put_float(d, sd);
d                 405 arch/arm/vfp/vfpsingle.c 	s32 d;
d                 408 arch/arm/vfp/vfpsingle.c 	d = vfp_get_float(sd);
d                 418 arch/arm/vfp/vfpsingle.c 	if (vfp_single_packed_exponent(d) == 255 && vfp_single_packed_mantissa(d)) {
d                 420 arch/arm/vfp/vfpsingle.c 		if (signal_on_qnan || !(vfp_single_packed_mantissa(d) & (1 << (VFP_SINGLE_MANTISSA_BITS - 1))))
d                 428 arch/arm/vfp/vfpsingle.c 		if (d == m || vfp_single_packed_abs(d | m) == 0) {
d                 433 arch/arm/vfp/vfpsingle.c 		} else if (vfp_single_packed_sign(d ^ m)) {
d                 437 arch/arm/vfp/vfpsingle.c 			if (vfp_single_packed_sign(d))
d                 447 arch/arm/vfp/vfpsingle.c 		} else if ((vfp_single_packed_sign(d) != 0) ^ (d < m)) {
d                 452 arch/arm/vfp/vfpsingle.c 		} else if ((vfp_single_packed_sign(d) != 0) ^ (d > m)) {
d                 550 arch/arm/vfp/vfpsingle.c 	u32 d, exceptions = 0;
d                 568 arch/arm/vfp/vfpsingle.c 		d = vsm.sign ? 0 : 0xffffffff;
d                 577 arch/arm/vfp/vfpsingle.c 		d = (vsm.significand << 1) >> shift;
d                 582 arch/arm/vfp/vfpsingle.c 			if ((d & 1) == 0)
d                 591 arch/arm/vfp/vfpsingle.c 			if (d < 0xffffffff)
d                 592 arch/arm/vfp/vfpsingle.c 				d += 1;
d                 597 arch/arm/vfp/vfpsingle.c 		if (d && vsm.sign) {
d                 598 arch/arm/vfp/vfpsingle.c 			d = 0;
d                 603 arch/arm/vfp/vfpsingle.c 		d = 0;
d                 607 arch/arm/vfp/vfpsingle.c 				d = 1;
d                 609 arch/arm/vfp/vfpsingle.c 				d = 0;
d                 615 arch/arm/vfp/vfpsingle.c 	pr_debug("VFP: ftoui: d(s%d)=%08x exceptions=%08x\n", sd, d, exceptions);
d                 617 arch/arm/vfp/vfpsingle.c 	vfp_put_float(d, sd);
d                 630 arch/arm/vfp/vfpsingle.c 	u32 d, exceptions = 0;
d                 645 arch/arm/vfp/vfpsingle.c 		d = 0;
d                 651 arch/arm/vfp/vfpsingle.c 		d = 0x7fffffff;
d                 653 arch/arm/vfp/vfpsingle.c 			d = ~d;
d                 660 arch/arm/vfp/vfpsingle.c 		d = (vsm.significand << 1) >> shift;
d                 665 arch/arm/vfp/vfpsingle.c 			if ((d & 1) == 0)
d                 673 arch/arm/vfp/vfpsingle.c 		if ((rem + incr) < rem && d < 0xffffffff)
d                 674 arch/arm/vfp/vfpsingle.c 			d += 1;
d                 675 arch/arm/vfp/vfpsingle.c 		if (d > 0x7fffffff + (vsm.sign != 0)) {
d                 676 arch/arm/vfp/vfpsingle.c 			d = 0x7fffffff + (vsm.sign != 0);
d                 682 arch/arm/vfp/vfpsingle.c 			d = -d;
d                 684 arch/arm/vfp/vfpsingle.c 		d = 0;
d                 688 arch/arm/vfp/vfpsingle.c 				d = 1;
d                 690 arch/arm/vfp/vfpsingle.c 				d = -1;
d                 694 arch/arm/vfp/vfpsingle.c 	pr_debug("VFP: ftosi: d(s%d)=%08x exceptions=%08x\n", sd, d, exceptions);
d                 696 arch/arm/vfp/vfpsingle.c 	vfp_put_float((s32)d, sd);
d                 190 arch/arm64/include/asm/assembler.h 	orr	\rd, \lbits, \hbits, lsl #32
d                 269 arch/arm64/include/asm/assembler.h 	ldr	\rd, [\rn, #VMA_VM_MM]
d                 276 arch/arm64/include/asm/assembler.h 	ldr	\rd, [\rn, #MM_CONTEXT_ID]
d                 528 arch/arm64/include/asm/assembler.h 	mrs	\rd, sp_el0
d                 103 arch/arm64/include/asm/mmu.h 	struct bp_hardening_data *d;
d                 108 arch/arm64/include/asm/mmu.h 	d = arm64_get_bp_hardening_data();
d                 109 arch/arm64/include/asm/mmu.h 	if (d->fn)
d                 110 arch/arm64/include/asm/mmu.h 		d->fn();
d                  21 arch/arm64/kernel/return_address.c static int save_return_addr(struct stackframe *frame, void *d)
d                  23 arch/arm64/kernel/return_address.c 	struct return_address_data *data = d;
d                 139 arch/arm64/kernel/stacktrace.c static int save_trace(struct stackframe *frame, void *d)
d                 141 arch/arm64/kernel/stacktrace.c 	struct stack_trace_data *data = d;
d                 137 arch/csky/abiv2/fpu.c #define STW_FPU_REGS(a, b, c, d)	\
d                 141 arch/csky/abiv2/fpu.c 	"stw    %3, (%4, "#d")\n"
d                 143 arch/csky/abiv2/fpu.c #define LDW_FPU_REGS(a, b, c, d)	\
d                 147 arch/csky/abiv2/fpu.c 	"ldw    %3, (%4, "#d")\n"
d                  57 arch/h8300/boot/compressed/misc.c 	char *d = (char *)dest, *s = (char *)src;
d                  60 arch/h8300/boot/compressed/misc.c 		d[i] = s[i];
d                  14 arch/h8300/include/asm/string.h extern void *memcpy(void *d, const void *s, size_t count);
d                  39 arch/hexagon/include/asm/io.h #define readsw(p, d, l)	__raw_readsw(p, d, l)
d                  40 arch/hexagon/include/asm/io.h #define writesw(p, d, l) __raw_writesw(p, d, l)
d                  42 arch/hexagon/include/asm/io.h #define readsl(p, d, l)   __raw_readsl(p, d, l)
d                  43 arch/hexagon/include/asm/io.h #define writesl(p, d, l)  __raw_writesl(p, d, l)
d                  25 arch/hexagon/lib/checksum.c #define VR_NEGATE(a, b, c, d)	(SIGN(a, 48) + SIGN(b, 32) + SIGN(c, 16) \
d                  26 arch/hexagon/lib/checksum.c 	+ SIGN(d, 0))
d                  27 arch/hexagon/lib/checksum.c #define VR_CARRY(a, b, c, d)	(CARRY(a, 48) + CARRY(b, 32) + CARRY(c, 16) \
d                  28 arch/hexagon/lib/checksum.c 	+ CARRY(d, 0))
d                  29 arch/hexagon/lib/checksum.c #define VR_SELECT(a, b, c, d)	(SELECT(a, 48) + SELECT(b, 32) + SELECT(c, 16) \
d                  30 arch/hexagon/lib/checksum.c 	+ SELECT(d, 0))
d                 447 arch/ia64/hp/common/sba_iommu.c 	long double d = size - 1;
d                 450 arch/ia64/hp/common/sba_iommu.c 	order = ia64_getf_exp(d);
d                 654 arch/ia64/hp/common/sba_iommu.c 				struct sba_dma_pair *d;
d                 657 arch/ia64/hp/common/sba_iommu.c 				d = &(ioc->saved[ioc->saved_cnt - 1]);
d                 661 arch/ia64/hp/common/sba_iommu.c 					sba_mark_invalid(ioc, d->iova, d->size);
d                 662 arch/ia64/hp/common/sba_iommu.c 					sba_free_range(ioc, d->iova, d->size);
d                 663 arch/ia64/hp/common/sba_iommu.c 					d--;
d                1038 arch/ia64/hp/common/sba_iommu.c 	struct sba_dma_pair *d;
d                1077 arch/ia64/hp/common/sba_iommu.c 	d = &(ioc->saved[ioc->saved_cnt]);
d                1078 arch/ia64/hp/common/sba_iommu.c 	d->iova = iova;
d                1079 arch/ia64/hp/common/sba_iommu.c 	d->size = size;
d                1084 arch/ia64/hp/common/sba_iommu.c 			sba_mark_invalid(ioc, d->iova, d->size);
d                1085 arch/ia64/hp/common/sba_iommu.c 			sba_free_range(ioc, d->iova, d->size);
d                1086 arch/ia64/hp/common/sba_iommu.c 			d--;
d                 380 arch/ia64/include/asm/bitops.h 	long double d = x;
d                 383 arch/ia64/include/asm/bitops.h 	exp = ia64_getf_exp(d);
d                  90 arch/ia64/include/asm/mca_asm.h 	srlz.d;										\
d                 108 arch/ia64/include/asm/mca_asm.h 	srlz.d;										\
d                 173 arch/ia64/include/asm/mca_asm.h 	srlz.d;							\
d                  66 arch/ia64/include/asm/native/inst.h (pred)	itc.d reg
d                  70 arch/ia64/include/asm/native/inst.h (pred_d) itc.d reg
d                  84 arch/ia64/include/asm/native/inst.h 	srlz.d
d                 160 arch/ia64/include/asm/page.h 	long double d = size - 1;
d                 163 arch/ia64/include/asm/page.h 	order = ia64_getf_exp(d);
d                 736 arch/ia64/kernel/palinfo.c 		unsigned long d:1;
d                 801 arch/ia64/kernel/palinfo.c 			   gr_reg->d);
d                1169 arch/ia64/kernel/unwind.c #define UNW_DEC_REG_GR(fmt,r,d,arg)		desc_reg_gr(r,d,arg)
d                1178 arch/ia64/kernel/unwind.c #define UNW_DEC_RP_BR(fmt,d,arg)		desc_rp_br(d,arg)
d                 133 arch/m68k/68000/ints.c static void intc_irq_unmask(struct irq_data *d)
d                 135 arch/m68k/68000/ints.c 	IMR &= ~(1 << d->irq);
d                 138 arch/m68k/68000/ints.c static void intc_irq_mask(struct irq_data *d)
d                 140 arch/m68k/68000/ints.c 	IMR |= (1 << d->irq);
d                  50 arch/m68k/coldfire/intc-2.c static void intc_irq_mask(struct irq_data *d)
d                  52 arch/m68k/coldfire/intc-2.c 	unsigned int irq = d->irq - MCFINT_VECBASE;
d                  68 arch/m68k/coldfire/intc-2.c static void intc_irq_unmask(struct irq_data *d)
d                  70 arch/m68k/coldfire/intc-2.c 	unsigned int irq = d->irq - MCFINT_VECBASE;
d                  96 arch/m68k/coldfire/intc-2.c static void intc_irq_ack(struct irq_data *d)
d                  98 arch/m68k/coldfire/intc-2.c 	unsigned int irq = d->irq;
d                 112 arch/m68k/coldfire/intc-2.c static unsigned int intc_irq_startup(struct irq_data *d)
d                 114 arch/m68k/coldfire/intc-2.c 	unsigned int irq = d->irq - MCFINT_VECBASE;
d                 126 arch/m68k/coldfire/intc-2.c 	irq = d->irq;
d                 141 arch/m68k/coldfire/intc-2.c 	intc_irq_unmask(d);
d                 145 arch/m68k/coldfire/intc-2.c static int intc_irq_set_type(struct irq_data *d, unsigned int type)
d                 147 arch/m68k/coldfire/intc-2.c 	unsigned int irq = d->irq;
d                  20 arch/m68k/coldfire/intc-5249.c static void intc2_irq_gpio_mask(struct irq_data *d)
d                  24 arch/m68k/coldfire/intc-5249.c 	imr &= ~(0x1 << (d->irq - MCF_IRQ_GPIO0));
d                  28 arch/m68k/coldfire/intc-5249.c static void intc2_irq_gpio_unmask(struct irq_data *d)
d                  32 arch/m68k/coldfire/intc-5249.c 	imr |= (0x1 << (d->irq - MCF_IRQ_GPIO0));
d                  36 arch/m68k/coldfire/intc-5249.c static void intc2_irq_gpio_ack(struct irq_data *d)
d                  38 arch/m68k/coldfire/intc-5249.c 	writel(0x1 << (d->irq - MCF_IRQ_GPIO0), MCFSIM2_GPIOINTCLEAR);
d                  21 arch/m68k/coldfire/intc-525x.c static void intc2_irq_gpio_mask(struct irq_data *d)
d                  24 arch/m68k/coldfire/intc-525x.c 	u32 type = irqd_get_trigger_type(d);
d                  25 arch/m68k/coldfire/intc-525x.c 	int irq = d->irq - MCF_IRQ_GPIO0;
d                  34 arch/m68k/coldfire/intc-525x.c static void intc2_irq_gpio_unmask(struct irq_data *d)
d                  37 arch/m68k/coldfire/intc-525x.c 	u32 type = irqd_get_trigger_type(d);
d                  38 arch/m68k/coldfire/intc-525x.c 	int irq = d->irq - MCF_IRQ_GPIO0;
d                  47 arch/m68k/coldfire/intc-525x.c static void intc2_irq_gpio_ack(struct irq_data *d)
d                  50 arch/m68k/coldfire/intc-525x.c 	u32 type = irqd_get_trigger_type(d);
d                  51 arch/m68k/coldfire/intc-525x.c 	int irq = d->irq - MCF_IRQ_GPIO0;
d                  60 arch/m68k/coldfire/intc-525x.c static int intc2_irq_gpio_set_type(struct irq_data *d, unsigned int f)
d                  81 arch/m68k/coldfire/intc-5272.c static void intc_irq_mask(struct irq_data *d)
d                  83 arch/m68k/coldfire/intc-5272.c 	unsigned int irq = d->irq;
d                  93 arch/m68k/coldfire/intc-5272.c static void intc_irq_unmask(struct irq_data *d)
d                  95 arch/m68k/coldfire/intc-5272.c 	unsigned int irq = d->irq;
d                 105 arch/m68k/coldfire/intc-5272.c static void intc_irq_ack(struct irq_data *d)
d                 107 arch/m68k/coldfire/intc-5272.c 	unsigned int irq = d->irq;
d                 122 arch/m68k/coldfire/intc-5272.c static int intc_irq_set_type(struct irq_data *d, unsigned int type)
d                 124 arch/m68k/coldfire/intc-5272.c 	unsigned int irq = d->irq;
d                  67 arch/m68k/coldfire/intc-simr.c static void intc_irq_mask(struct irq_data *d)
d                  69 arch/m68k/coldfire/intc-simr.c 	unsigned int irq = d->irq - MCFINT_VECBASE;
d                  79 arch/m68k/coldfire/intc-simr.c static void intc_irq_unmask(struct irq_data *d)
d                  81 arch/m68k/coldfire/intc-simr.c 	unsigned int irq = d->irq - MCFINT_VECBASE;
d                  91 arch/m68k/coldfire/intc-simr.c static void intc_irq_ack(struct irq_data *d)
d                  93 arch/m68k/coldfire/intc-simr.c 	unsigned int ebit = irq2ebit(d->irq);
d                  98 arch/m68k/coldfire/intc-simr.c static unsigned int intc_irq_startup(struct irq_data *d)
d                 100 arch/m68k/coldfire/intc-simr.c 	unsigned int irq = d->irq;
d                 125 arch/m68k/coldfire/intc-simr.c 	intc_irq_unmask(d);
d                 129 arch/m68k/coldfire/intc-simr.c static int intc_irq_set_type(struct irq_data *d, unsigned int type)
d                 131 arch/m68k/coldfire/intc-simr.c 	unsigned int ebit, irq = d->irq;
d                 114 arch/m68k/coldfire/intc.c static void intc_irq_mask(struct irq_data *d)
d                 116 arch/m68k/coldfire/intc.c 	if (mcf_irq2imr[d->irq])
d                 117 arch/m68k/coldfire/intc.c 		mcf_setimr(mcf_irq2imr[d->irq]);
d                 120 arch/m68k/coldfire/intc.c static void intc_irq_unmask(struct irq_data *d)
d                 122 arch/m68k/coldfire/intc.c 	if (mcf_irq2imr[d->irq])
d                 123 arch/m68k/coldfire/intc.c 		mcf_clrimr(mcf_irq2imr[d->irq]);
d                 126 arch/m68k/coldfire/intc.c static int intc_irq_set_type(struct irq_data *d, unsigned int type)
d                19792 arch/m68k/ifpsp060/src/fpsp.S #	    if (s || d || x) then check for SNAN,UNNORM,DENORM		#
d                 100 arch/m68k/include/asm/bootstd.h #define _bsc4(type,name,atype,a,btype,b,ctype,c,dtype,d) \
d                 101 arch/m68k/include/asm/bootstd.h type name(atype a, btype b, ctype c, dtype d) \
d                 107 arch/m68k/include/asm/bootstd.h    register long __d __asm__ ("%d4") = (long)d; \
d                 116 arch/m68k/include/asm/bootstd.h #define _bsc5(type,name,atype,a,btype,b,ctype,c,dtype,d,etype,e) \
d                 117 arch/m68k/include/asm/bootstd.h type name(atype a, btype b, ctype c, dtype d, etype e) \
d                 123 arch/m68k/include/asm/bootstd.h    register long __d __asm__ ("%d4") = (long)d; \
d                  40 arch/m68k/include/asm/openprom.h 	int (*v2_inst2pkg)(int d);	/* Convert ihandle to phandle */
d                  49 arch/m68k/include/asm/openprom.h 	void (*v2_dev_close)(int d);
d                  50 arch/m68k/include/asm/openprom.h 	int (*v2_dev_read)(int d, char *buf, int nbytes);
d                  51 arch/m68k/include/asm/openprom.h 	int (*v2_dev_write)(int d, char *buf, int nbytes);
d                  52 arch/m68k/include/asm/openprom.h 	int (*v2_dev_seek)(int d, int hi, int lo);
d                 237 arch/m68k/include/asm/openprom.h 	unsigned int (*pv_enaddr)(int d, char *enaddr);
d                  64 arch/m68k/include/asm/string.h #define memcmp(d, s, n) __builtin_memcmp(d, s, n)
d                  68 arch/m68k/include/asm/string.h #define memset(d, c, n) __builtin_memset(d, c, n)
d                  72 arch/m68k/include/asm/string.h #define memcpy(d, s, n) __builtin_memcpy(d, s, n)
d                  68 arch/m68k/include/asm/uaccess_mm.h 		__put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT);	\
d                 136 arch/m68k/include/asm/uaccess_mm.h 		__get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);	\
d                 322 arch/m68k/include/asm/uaccess_mm.h 		__put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
d                  62 arch/microblaze/include/asm/hash.h 	unsigned int b, c, d;
d                  70 arch/microblaze/include/asm/hash.h 	d = c << 7;	/* 7 18 */
d                  71 arch/microblaze/include/asm/hash.h 	d += b;		/* 1 19 */
d                  72 arch/microblaze/include/asm/hash.h 	d <<= 8;	/* 8 27 */
d                  73 arch/microblaze/include/asm/hash.h 	d += a;		/* 1 28 */
d                  74 arch/microblaze/include/asm/hash.h 	d <<= 1;	/* 1 29 */
d                  75 arch/microblaze/include/asm/hash.h 	d += b;		/* 1 30 */
d                  76 arch/microblaze/include/asm/hash.h 	d <<= 6;	/* 6 36 */
d                  77 arch/microblaze/include/asm/hash.h 	return d + c;	/* 1 37 total instructions*/
d                 258 arch/mips/alchemy/board-gpr.c static int gpr_map_pci_irq(const struct pci_dev *d, u8 slot, u8 pin)
d                 240 arch/mips/alchemy/board-mtx1.c static int mtx1_map_pci_irq(const struct pci_dev *d, u8 slot, u8 pin)
d                 192 arch/mips/alchemy/common/clock.c 	unsigned long d = rate;
d                 195 arch/mips/alchemy/common/clock.c 		d /= parent_rate;
d                 197 arch/mips/alchemy/common/clock.c 		d = 0;
d                 200 arch/mips/alchemy/common/clock.c 	if (((d < 7) && (d != 0)) || (d > a->maxmult))
d                 203 arch/mips/alchemy/common/clock.c 	alchemy_wrsys(d, a->reg);
d                 890 arch/mips/alchemy/common/clock.c 	unsigned long d, v, flags;
d                 896 arch/mips/alchemy/common/clock.c 	d = (parent_rate + (rate / 2)) / rate;
d                 897 arch/mips/alchemy/common/clock.c 	if (d > 4)
d                 899 arch/mips/alchemy/common/clock.c 	if ((d == 3) && (c->dt[2] != 3))
d                 900 arch/mips/alchemy/common/clock.c 		d = 4;
d                 903 arch/mips/alchemy/common/clock.c 		if (c->dt[i] == d)
d                  76 arch/mips/alchemy/common/irq.c static int au1x_ic_settype(struct irq_data *d, unsigned int type);
d                  77 arch/mips/alchemy/common/irq.c static int au1300_gpic_settype(struct irq_data *d, unsigned int type);
d                 288 arch/mips/alchemy/common/irq.c static void au1x_ic0_unmask(struct irq_data *d)
d                 290 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC0_INT_BASE;
d                 298 arch/mips/alchemy/common/irq.c static void au1x_ic1_unmask(struct irq_data *d)
d                 300 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC1_INT_BASE;
d                 308 arch/mips/alchemy/common/irq.c static void au1x_ic0_mask(struct irq_data *d)
d                 310 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC0_INT_BASE;
d                 318 arch/mips/alchemy/common/irq.c static void au1x_ic1_mask(struct irq_data *d)
d                 320 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC1_INT_BASE;
d                 328 arch/mips/alchemy/common/irq.c static void au1x_ic0_ack(struct irq_data *d)
d                 330 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC0_INT_BASE;
d                 342 arch/mips/alchemy/common/irq.c static void au1x_ic1_ack(struct irq_data *d)
d                 344 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC1_INT_BASE;
d                 356 arch/mips/alchemy/common/irq.c static void au1x_ic0_maskack(struct irq_data *d)
d                 358 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC0_INT_BASE;
d                 368 arch/mips/alchemy/common/irq.c static void au1x_ic1_maskack(struct irq_data *d)
d                 370 arch/mips/alchemy/common/irq.c 	unsigned int bit = d->irq - AU1000_INTC1_INT_BASE;
d                 380 arch/mips/alchemy/common/irq.c static int au1x_ic1_setwake(struct irq_data *d, unsigned int on)
d                 382 arch/mips/alchemy/common/irq.c 	int bit = d->irq - AU1000_INTC1_INT_BASE;
d                 426 arch/mips/alchemy/common/irq.c static int au1x_ic_settype(struct irq_data *d, unsigned int flow_type)
d                 429 arch/mips/alchemy/common/irq.c 	unsigned int bit, irq = d->irq;
d                 494 arch/mips/alchemy/common/irq.c 	irq_set_chip_handler_name_locked(d, chip, handler, name);
d                 600 arch/mips/alchemy/common/irq.c static void au1300_gpic_mask(struct irq_data *d)
d                 603 arch/mips/alchemy/common/irq.c 	unsigned long bit, irq = d->irq;
d                 614 arch/mips/alchemy/common/irq.c static void au1300_gpic_unmask(struct irq_data *d)
d                 617 arch/mips/alchemy/common/irq.c 	unsigned long bit, irq = d->irq;
d                 629 arch/mips/alchemy/common/irq.c static void au1300_gpic_maskack(struct irq_data *d)
d                 632 arch/mips/alchemy/common/irq.c 	unsigned long bit, irq = d->irq;
d                 644 arch/mips/alchemy/common/irq.c static void au1300_gpic_ack(struct irq_data *d)
d                 647 arch/mips/alchemy/common/irq.c 	unsigned long bit, irq = d->irq;
d                 665 arch/mips/alchemy/common/irq.c static int au1300_gpic_settype(struct irq_data *d, unsigned int type)
d                 706 arch/mips/alchemy/common/irq.c 	irq_set_chip_handler_name_locked(d, &au1300_gpic, hdl, name);
d                 708 arch/mips/alchemy/common/irq.c 	au1300_gpic_chgcfg(d->irq - ALCHEMY_GPIC_INT_BASE, GPIC_CFG_IC_MASK, s);
d                 733 arch/mips/alchemy/common/irq.c static inline void alchemy_ic_suspend_one(void __iomem *base, unsigned long *d)
d                 735 arch/mips/alchemy/common/irq.c 	d[0] = __raw_readl(base + IC_CFG0RD);
d                 736 arch/mips/alchemy/common/irq.c 	d[1] = __raw_readl(base + IC_CFG1RD);
d                 737 arch/mips/alchemy/common/irq.c 	d[2] = __raw_readl(base + IC_CFG2RD);
d                 738 arch/mips/alchemy/common/irq.c 	d[3] = __raw_readl(base + IC_SRCRD);
d                 739 arch/mips/alchemy/common/irq.c 	d[4] = __raw_readl(base + IC_ASSIGNRD);
d                 740 arch/mips/alchemy/common/irq.c 	d[5] = __raw_readl(base + IC_WAKERD);
d                 741 arch/mips/alchemy/common/irq.c 	d[6] = __raw_readl(base + IC_MASKRD);
d                 745 arch/mips/alchemy/common/irq.c static inline void alchemy_ic_resume_one(void __iomem *base, unsigned long *d)
d                 749 arch/mips/alchemy/common/irq.c 	__raw_writel(d[0], base + IC_CFG0SET);
d                 750 arch/mips/alchemy/common/irq.c 	__raw_writel(d[1], base + IC_CFG1SET);
d                 751 arch/mips/alchemy/common/irq.c 	__raw_writel(d[2], base + IC_CFG2SET);
d                 752 arch/mips/alchemy/common/irq.c 	__raw_writel(d[3], base + IC_SRCSET);
d                 753 arch/mips/alchemy/common/irq.c 	__raw_writel(d[4], base + IC_ASSIGNSET);
d                 754 arch/mips/alchemy/common/irq.c 	__raw_writel(d[5], base + IC_WAKESET);
d                 757 arch/mips/alchemy/common/irq.c 	__raw_writel(d[6], base + IC_MASKSET);
d                 854 arch/mips/alchemy/common/irq.c static void au1000_##name##_dispatch(struct irq_desc *d)		      \
d                 868 arch/mips/alchemy/common/irq.c static void alchemy_gpic_dispatch(struct irq_desc *d)
d                 197 arch/mips/alchemy/common/platform.c static int __init _new_usbres(struct resource **r, struct platform_device **d)
d                 202 arch/mips/alchemy/common/platform.c 	*d = kzalloc(sizeof(struct platform_device), GFP_KERNEL);
d                 203 arch/mips/alchemy/common/platform.c 	if (!*d) {
d                 208 arch/mips/alchemy/common/platform.c 	(*d)->dev.coherent_dma_mask = DMA_BIT_MASK(32);
d                 209 arch/mips/alchemy/common/platform.c 	(*d)->num_resources = 2;
d                 210 arch/mips/alchemy/common/platform.c 	(*d)->resource = *r;
d                  91 arch/mips/alchemy/devboards/bcsr.c static void bcsr_csc_handler(struct irq_desc *d)
d                  94 arch/mips/alchemy/devboards/bcsr.c 	struct irq_chip *chip = irq_desc_get_chip(d);
d                  96 arch/mips/alchemy/devboards/bcsr.c 	chained_irq_enter(chip, d);
d                  98 arch/mips/alchemy/devboards/bcsr.c 	chained_irq_exit(chip, d);
d                 101 arch/mips/alchemy/devboards/bcsr.c static void bcsr_irq_mask(struct irq_data *d)
d                 103 arch/mips/alchemy/devboards/bcsr.c 	unsigned short v = 1 << (d->irq - bcsr_csc_base);
d                 108 arch/mips/alchemy/devboards/bcsr.c static void bcsr_irq_maskack(struct irq_data *d)
d                 110 arch/mips/alchemy/devboards/bcsr.c 	unsigned short v = 1 << (d->irq - bcsr_csc_base);
d                 116 arch/mips/alchemy/devboards/bcsr.c static void bcsr_irq_unmask(struct irq_data *d)
d                 118 arch/mips/alchemy/devboards/bcsr.c 	unsigned short v = 1 << (d->irq - bcsr_csc_base);
d                  55 arch/mips/alchemy/devboards/db1000.c static int db1500_map_pci_irq(const struct pci_dev *d, u8 slot, u8 pin)
d                 417 arch/mips/alchemy/devboards/db1550.c static int db1550_map_pci_irq(const struct pci_dev *d, u8 slot, u8 pin)
d                 442 arch/mips/alchemy/devboards/db1550.c static int pb1550_map_pci_irq(const struct pci_dev *d, u8 slot, u8 pin)
d                  39 arch/mips/ar7/irq.c static void ar7_unmask_irq(struct irq_data *d)
d                  41 arch/mips/ar7/irq.c 	writel(1 << ((d->irq - ar7_irq_base) % 32),
d                  42 arch/mips/ar7/irq.c 	       REG(ESR_OFFSET(d->irq - ar7_irq_base)));
d                  45 arch/mips/ar7/irq.c static void ar7_mask_irq(struct irq_data *d)
d                  47 arch/mips/ar7/irq.c 	writel(1 << ((d->irq - ar7_irq_base) % 32),
d                  48 arch/mips/ar7/irq.c 	       REG(ECR_OFFSET(d->irq - ar7_irq_base)));
d                  51 arch/mips/ar7/irq.c static void ar7_ack_irq(struct irq_data *d)
d                  53 arch/mips/ar7/irq.c 	writel(1 << ((d->irq - ar7_irq_base) % 32),
d                  54 arch/mips/ar7/irq.c 	       REG(CR_OFFSET(d->irq - ar7_irq_base)));
d                  57 arch/mips/ar7/irq.c static void ar7_unmask_sec_irq(struct irq_data *d)
d                  59 arch/mips/ar7/irq.c 	writel(1 << (d->irq - ar7_irq_base - 40), REG(SEC_ESR_OFFSET));
d                  62 arch/mips/ar7/irq.c static void ar7_mask_sec_irq(struct irq_data *d)
d                  64 arch/mips/ar7/irq.c 	writel(1 << (d->irq - ar7_irq_base - 40), REG(SEC_ECR_OFFSET));
d                  67 arch/mips/ar7/irq.c static void ar7_ack_sec_irq(struct irq_data *d)
d                  69 arch/mips/ar7/irq.c 	writel(1 << (d->irq - ar7_irq_base - 40), REG(SEC_CR_OFFSET));
d                  96 arch/mips/ath25/ar2315.c static void ar2315_misc_irq_unmask(struct irq_data *d)
d                  98 arch/mips/ath25/ar2315.c 	ar2315_rst_reg_mask(AR2315_IMR, 0, BIT(d->hwirq));
d                 101 arch/mips/ath25/ar2315.c static void ar2315_misc_irq_mask(struct irq_data *d)
d                 103 arch/mips/ath25/ar2315.c 	ar2315_rst_reg_mask(AR2315_IMR, BIT(d->hwirq), 0);
d                 112 arch/mips/ath25/ar2315.c static int ar2315_misc_irq_map(struct irq_domain *d, unsigned irq,
d                  99 arch/mips/ath25/ar5312.c static void ar5312_misc_irq_unmask(struct irq_data *d)
d                 101 arch/mips/ath25/ar5312.c 	ar5312_rst_reg_mask(AR5312_IMR, 0, BIT(d->hwirq));
d                 105 arch/mips/ath25/ar5312.c static void ar5312_misc_irq_mask(struct irq_data *d)
d                 107 arch/mips/ath25/ar5312.c 	ar5312_rst_reg_mask(AR5312_IMR, BIT(d->hwirq), 0);
d                 117 arch/mips/ath25/ar5312.c static int ar5312_misc_irq_map(struct irq_domain *d, unsigned irq,
d                  33 arch/mips/bcm63xx/irq.c static void (*internal_irq_mask)(struct irq_data *d);
d                  34 arch/mips/bcm63xx/irq.c static void (*internal_irq_unmask)(struct irq_data *d, const struct cpumask *m);
d                  53 arch/mips/bcm63xx/irq.c static inline int enable_irq_for_cpu(int cpu, struct irq_data *d,
d                  61 arch/mips/bcm63xx/irq.c 	else if (irqd_affinity_was_set(d))
d                  62 arch/mips/bcm63xx/irq.c 		enable &= cpumask_test_cpu(cpu, irq_data_get_affinity_mask(d));
d                 112 arch/mips/bcm63xx/irq.c static void __internal_irq_mask_##width(struct irq_data *d)		\
d                 115 arch/mips/bcm63xx/irq.c 	unsigned irq = d->irq - IRQ_INTERNAL_BASE;			\
d                 133 arch/mips/bcm63xx/irq.c static void __internal_irq_unmask_##width(struct irq_data *d,		\
d                 137 arch/mips/bcm63xx/irq.c 	unsigned irq = d->irq - IRQ_INTERNAL_BASE;			\
d                 149 arch/mips/bcm63xx/irq.c 		if (enable_irq_for_cpu(cpu, d, m))			\
d                 199 arch/mips/bcm63xx/irq.c static void bcm63xx_internal_irq_mask(struct irq_data *d)
d                 201 arch/mips/bcm63xx/irq.c 	internal_irq_mask(d);
d                 204 arch/mips/bcm63xx/irq.c static void bcm63xx_internal_irq_unmask(struct irq_data *d)
d                 206 arch/mips/bcm63xx/irq.c 	internal_irq_unmask(d, NULL);
d                 213 arch/mips/bcm63xx/irq.c static void bcm63xx_external_irq_mask(struct irq_data *d)
d                 215 arch/mips/bcm63xx/irq.c 	unsigned int irq = d->irq - IRQ_EXTERNAL_BASE;
d                 235 arch/mips/bcm63xx/irq.c static void bcm63xx_external_irq_unmask(struct irq_data *d)
d                 237 arch/mips/bcm63xx/irq.c 	unsigned int irq = d->irq - IRQ_EXTERNAL_BASE;
d                 258 arch/mips/bcm63xx/irq.c static void bcm63xx_external_irq_clear(struct irq_data *d)
d                 260 arch/mips/bcm63xx/irq.c 	unsigned int irq = d->irq - IRQ_EXTERNAL_BASE;
d                 277 arch/mips/bcm63xx/irq.c static int bcm63xx_external_irq_set_type(struct irq_data *d,
d                 280 arch/mips/bcm63xx/irq.c 	unsigned int irq = d->irq - IRQ_EXTERNAL_BASE;
d                 365 arch/mips/bcm63xx/irq.c 	irqd_set_trigger_type(d, flow_type);
d                 367 arch/mips/bcm63xx/irq.c 		irq_set_handler_locked(d, handle_level_irq);
d                 369 arch/mips/bcm63xx/irq.c 		irq_set_handler_locked(d, handle_edge_irq);
d                  14 arch/mips/boot/compressed/string.c 	char *d = dest;
d                  17 arch/mips/boot/compressed/string.c 		d[i] = s[i];
d                 118 arch/mips/cavium-octeon/octeon-irq.c static void octeon_irq_free_cd(struct irq_domain *d, unsigned int irq)
d                1146 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_gpio_xlat(struct irq_domain *d,
d                1157 arch/mips/cavium-octeon/octeon-irq.c 	if (irq_domain_get_of_node(d) != node)
d                1195 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_ciu_xlat(struct irq_domain *d,
d                1203 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_irq_ciu_domain_data *dd = d->host_data;
d                1221 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_ciu_map(struct irq_domain *d,
d                1227 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_irq_ciu_domain_data *dd = d->host_data;
d                1254 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_gpio_map(struct irq_domain *d,
d                1257 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_irq_gpio_domain_data *gpiod = d->host_data;
d                1890 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_ciu2_xlat(struct irq_domain *d,
d                1934 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_ciu2_map(struct irq_domain *d,
d                2160 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_cib_xlat(struct irq_domain *d,
d                2189 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_cib_map(struct irq_domain *d,
d                2192 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_irq_cib_host_data *host_data = d->host_data;
d                2197 arch/mips/cavium-octeon/octeon-irq.c 		       irq_domain_get_of_node(d)->name, (unsigned)hw);
d                2333 arch/mips/cavium-octeon/octeon-irq.c int octeon_irq_ciu3_xlat(struct irq_domain *d,
d                2340 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_ciu3_info *ciu3_info = d->host_data;
d                2542 arch/mips/cavium-octeon/octeon-irq.c int octeon_irq_ciu3_mapx(struct irq_domain *d, unsigned int virq,
d                2545 arch/mips/cavium-octeon/octeon-irq.c 	struct octeon_ciu3_info *ciu3_info = d->host_data;
d                2560 arch/mips/cavium-octeon/octeon-irq.c static int octeon_irq_ciu3_map(struct irq_domain *d,
d                2563 arch/mips/cavium-octeon/octeon-irq.c 	return octeon_irq_ciu3_mapx(d, virq, hw, &octeon_irq_chip_ciu3);
d                  78 arch/mips/crypto/crc32-mips.c 		CRC32(crc, value, d);
d                 118 arch/mips/crypto/crc32-mips.c 		CRC32C(crc, value, d);
d                  18 arch/mips/dec/ioasic-irq.c static void unmask_ioasic_irq(struct irq_data *d)
d                  23 arch/mips/dec/ioasic-irq.c 	simr |= (1 << (d->irq - ioasic_irq_base));
d                  27 arch/mips/dec/ioasic-irq.c static void mask_ioasic_irq(struct irq_data *d)
d                  32 arch/mips/dec/ioasic-irq.c 	simr &= ~(1 << (d->irq - ioasic_irq_base));
d                  36 arch/mips/dec/ioasic-irq.c static void ack_ioasic_irq(struct irq_data *d)
d                  38 arch/mips/dec/ioasic-irq.c 	mask_ioasic_irq(d);
d                  50 arch/mips/dec/ioasic-irq.c static void clear_ioasic_dma_irq(struct irq_data *d)
d                  54 arch/mips/dec/ioasic-irq.c 	sir = ~(1 << (d->irq - ioasic_irq_base));
d                  28 arch/mips/dec/kn02-irq.c static void unmask_kn02_irq(struct irq_data *d)
d                  33 arch/mips/dec/kn02-irq.c 	cached_kn02_csr |= (1 << (d->irq - kn02_irq_base + 16));
d                  37 arch/mips/dec/kn02-irq.c static void mask_kn02_irq(struct irq_data *d)
d                  42 arch/mips/dec/kn02-irq.c 	cached_kn02_csr &= ~(1 << (d->irq - kn02_irq_base + 16));
d                  46 arch/mips/dec/kn02-irq.c static void ack_kn02_irq(struct irq_data *d)
d                  48 arch/mips/dec/kn02-irq.c 	mask_kn02_irq(d);
d                  23 arch/mips/emma/markeins/irq.c static void emma2rh_irq_enable(struct irq_data *d)
d                  25 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_IRQ_BASE;
d                  35 arch/mips/emma/markeins/irq.c static void emma2rh_irq_disable(struct irq_data *d)
d                  37 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_IRQ_BASE;
d                  63 arch/mips/emma/markeins/irq.c static void emma2rh_sw_irq_enable(struct irq_data *d)
d                  65 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_SW_IRQ_BASE;
d                  73 arch/mips/emma/markeins/irq.c static void emma2rh_sw_irq_disable(struct irq_data *d)
d                  75 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_SW_IRQ_BASE;
d                  99 arch/mips/emma/markeins/irq.c static void emma2rh_gpio_irq_enable(struct irq_data *d)
d                 101 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_GPIO_IRQ_BASE;
d                 109 arch/mips/emma/markeins/irq.c static void emma2rh_gpio_irq_disable(struct irq_data *d)
d                 111 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_GPIO_IRQ_BASE;
d                 119 arch/mips/emma/markeins/irq.c static void emma2rh_gpio_irq_ack(struct irq_data *d)
d                 121 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_GPIO_IRQ_BASE;
d                 126 arch/mips/emma/markeins/irq.c static void emma2rh_gpio_irq_mask_ack(struct irq_data *d)
d                 128 arch/mips/emma/markeins/irq.c 	unsigned int irq = d->irq - EMMA2RH_GPIO_IRQ_BASE;
d                  20 arch/mips/include/asm/asmmacro-32.h 	s.d	$f0,  THREAD_FPR0(\thread)
d                  21 arch/mips/include/asm/asmmacro-32.h 	s.d	$f2,  THREAD_FPR2(\thread)
d                  22 arch/mips/include/asm/asmmacro-32.h 	s.d	$f4,  THREAD_FPR4(\thread)
d                  23 arch/mips/include/asm/asmmacro-32.h 	s.d	$f6,  THREAD_FPR6(\thread)
d                  24 arch/mips/include/asm/asmmacro-32.h 	s.d	$f8,  THREAD_FPR8(\thread)
d                  25 arch/mips/include/asm/asmmacro-32.h 	s.d	$f10, THREAD_FPR10(\thread)
d                  26 arch/mips/include/asm/asmmacro-32.h 	s.d	$f12, THREAD_FPR12(\thread)
d                  27 arch/mips/include/asm/asmmacro-32.h 	s.d	$f14, THREAD_FPR14(\thread)
d                  28 arch/mips/include/asm/asmmacro-32.h 	s.d	$f16, THREAD_FPR16(\thread)
d                  29 arch/mips/include/asm/asmmacro-32.h 	s.d	$f18, THREAD_FPR18(\thread)
d                  30 arch/mips/include/asm/asmmacro-32.h 	s.d	$f20, THREAD_FPR20(\thread)
d                  31 arch/mips/include/asm/asmmacro-32.h 	s.d	$f22, THREAD_FPR22(\thread)
d                  32 arch/mips/include/asm/asmmacro-32.h 	s.d	$f24, THREAD_FPR24(\thread)
d                  33 arch/mips/include/asm/asmmacro-32.h 	s.d	$f26, THREAD_FPR26(\thread)
d                  34 arch/mips/include/asm/asmmacro-32.h 	s.d	$f28, THREAD_FPR28(\thread)
d                  35 arch/mips/include/asm/asmmacro-32.h 	s.d	$f30, THREAD_FPR30(\thread)
d                  44 arch/mips/include/asm/asmmacro-32.h 	l.d	$f0,  THREAD_FPR0(\thread)
d                  45 arch/mips/include/asm/asmmacro-32.h 	l.d	$f2,  THREAD_FPR2(\thread)
d                  46 arch/mips/include/asm/asmmacro-32.h 	l.d	$f4,  THREAD_FPR4(\thread)
d                  47 arch/mips/include/asm/asmmacro-32.h 	l.d	$f6,  THREAD_FPR6(\thread)
d                  48 arch/mips/include/asm/asmmacro-32.h 	l.d	$f8,  THREAD_FPR8(\thread)
d                  49 arch/mips/include/asm/asmmacro-32.h 	l.d	$f10, THREAD_FPR10(\thread)
d                  50 arch/mips/include/asm/asmmacro-32.h 	l.d	$f12, THREAD_FPR12(\thread)
d                  51 arch/mips/include/asm/asmmacro-32.h 	l.d	$f14, THREAD_FPR14(\thread)
d                  52 arch/mips/include/asm/asmmacro-32.h 	l.d	$f16, THREAD_FPR16(\thread)
d                  53 arch/mips/include/asm/asmmacro-32.h 	l.d	$f18, THREAD_FPR18(\thread)
d                  54 arch/mips/include/asm/asmmacro-32.h 	l.d	$f20, THREAD_FPR20(\thread)
d                  55 arch/mips/include/asm/asmmacro-32.h 	l.d	$f22, THREAD_FPR22(\thread)
d                  56 arch/mips/include/asm/asmmacro-32.h 	l.d	$f24, THREAD_FPR24(\thread)
d                  57 arch/mips/include/asm/asmmacro-32.h 	l.d	$f26, THREAD_FPR26(\thread)
d                  58 arch/mips/include/asm/asmmacro-32.h 	l.d	$f28, THREAD_FPR28(\thread)
d                  59 arch/mips/include/asm/asmmacro-32.h 	l.d	$f30, THREAD_FPR30(\thread)
d                 205 arch/mips/include/asm/asmmacro.h 	ext	\rd, \rs, \p, \s
d                 209 arch/mips/include/asm/asmmacro.h 	srl	\rd, \rs, \p
d                 210 arch/mips/include/asm/asmmacro.h 	andi	\rd, \rd, (1 << \s) - 1
d                 234 arch/mips/include/asm/asmmacro.h 	 .word	0x41000000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
d                 238 arch/mips/include/asm/asmmacro.h 	 .word	0x41800000 | (\rt << 16) | (\rd << 11) | (\u << 5) | (\sel)
d                 247 arch/mips/include/asm/asmmacro.h 	cfcmsa	\rd, $\cs
d                 256 arch/mips/include/asm/asmmacro.h 	ctcmsa	$\cd, \rs
d                 265 arch/mips/include/asm/asmmacro.h 	ld.b	$w\wd, \off(\base)
d                 274 arch/mips/include/asm/asmmacro.h 	ld.h	$w\wd, \off(\base)
d                 283 arch/mips/include/asm/asmmacro.h 	ld.w	$w\wd, \off(\base)
d                 292 arch/mips/include/asm/asmmacro.h 	ld.d	$w\wd, \off(\base)
d                 301 arch/mips/include/asm/asmmacro.h 	st.b	$w\wd, \off(\base)
d                 310 arch/mips/include/asm/asmmacro.h 	st.h	$w\wd, \off(\base)
d                 319 arch/mips/include/asm/asmmacro.h 	st.w	$w\wd, \off(\base)
d                 328 arch/mips/include/asm/asmmacro.h 	st.d	$w\wd, \off(\base)
d                 346 arch/mips/include/asm/asmmacro.h 	copy_s.d $1, $w\ws[\n]
d                 355 arch/mips/include/asm/asmmacro.h 	insert.w $w\wd[\n], $1
d                 364 arch/mips/include/asm/asmmacro.h 	insert.d $w\wd[\n], $1
d                 378 arch/mips/include/asm/asmmacro.h 	move	\rd, $1
d                 387 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x783e0819 | (\cd << 6)
d                 388 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x583e0816 | (\cd << 6)
d                 397 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000820 | (\wd << 6)
d                 398 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x58000807 | (\wd << 6)
d                 407 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000821 | (\wd << 6)
d                 408 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x58000817 | (\wd << 6)
d                 417 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000822 | (\wd << 6)
d                 418 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x58000827 | (\wd << 6)
d                 427 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000823 | (\wd << 6)
d                 428 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x58000837 | (\wd << 6)
d                 437 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000824 | (\wd << 6)
d                 438 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x5800080f | (\wd << 6)
d                 447 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000825 | (\wd << 6)
d                 448 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x5800081f | (\wd << 6)
d                 457 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000826 | (\wd << 6)
d                 458 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x5800082f | (\wd << 6)
d                 467 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x78000827 | (\wd << 6)
d                 468 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x5800083f | (\wd << 6)
d                 494 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x79300819 | (\n << 16) | (\wd << 6)
d                 495 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x59300816 | (\n << 16) | (\wd << 6)
d                 503 arch/mips/include/asm/asmmacro.h 	insn_if_mips 0x79380819 | (\n << 16) | (\wd << 6)
d                 504 arch/mips/include/asm/asmmacro.h 	insn32_if_mm 0x59380816 | (\n << 16) | (\wd << 6)
d                 610 arch/mips/include/asm/asmmacro.h 	insert_d \wd, 1
d                 612 arch/mips/include/asm/asmmacro.h 	insert_w \wd, 2
d                 613 arch/mips/include/asm/asmmacro.h 	insert_w \wd, 3
d                  69 arch/mips/include/asm/cdmm.h #define to_mips_cdmm_device(d)	container_of(d, struct mips_cdmm_device, dev)
d                  71 arch/mips/include/asm/cdmm.h #define mips_cdmm_get_drvdata(d)	dev_get_drvdata(&d->dev)
d                  72 arch/mips/include/asm/cdmm.h #define mips_cdmm_set_drvdata(d, p)	dev_set_drvdata(&d->dev, p)
d                  14 arch/mips/include/asm/compat-signal.h static inline int __copy_conv_sigset_to_user(compat_sigset_t __user *d,
d                  17 arch/mips/include/asm/compat-signal.h 	BUILD_BUG_ON(sizeof(*d) != sizeof(*s));
d                  20 arch/mips/include/asm/compat-signal.h 	return put_compat_sigset(d, s, sizeof(*d));
d                  23 arch/mips/include/asm/compat-signal.h static inline int __copy_conv_sigset_from_user(sigset_t *d,
d                  26 arch/mips/include/asm/compat-signal.h 	return get_compat_sigset(d, s);
d                  20 arch/mips/include/asm/gio_device.h #define to_gio_device(d) container_of(d, struct gio_device, dev)
d                 798 arch/mips/include/asm/mach-au1x00/au1000.h 	int (*board_map_irq)(const struct pci_dev *d, u8 slot, u8 pin);
d                 221 arch/mips/include/asm/mach-au1x00/au1xxx_dbdma.h #define DSCR_DEV2CUSTOM_ID(x, d) (((((x) & 0xFFFF) << 8) | 0x32000000) | \
d                 222 arch/mips/include/asm/mach-au1x00/au1xxx_dbdma.h 				  ((d) & 0xFF))
d                 288 arch/mips/include/asm/mach-au1x00/gpio-au1000.h 	unsigned long d = __raw_readl(base + AU1000_GPIO2_DIR);
d                 291 arch/mips/include/asm/mach-au1x00/gpio-au1000.h 		d |= mask;
d                 293 arch/mips/include/asm/mach-au1x00/gpio-au1000.h 		d &= ~mask;
d                 294 arch/mips/include/asm/mach-au1x00/gpio-au1000.h 	__raw_writel(d, base + AU1000_GPIO2_DIR);
d                 340 arch/mips/include/asm/mach-loongson64/loongson.h #define LOONGSON_ADDRWIN_CFG(s, d, w, src, dst, size) do {\
d                 342 arch/mips/include/asm/mach-loongson64/loongson.h 	s##_WIN##w##_MMAP = (dst) | ADDRWIN_MAP_DST_##d; \
d                 454 arch/mips/include/asm/mips-cm.h 	struct cpuinfo_mips *d = &cpu_data[cpu];
d                 456 arch/mips/include/asm/mips-cm.h 	mips_cm_lock_other(cpu_cluster(d), cpu_core(d), cpu_vpe_id(d), block);
d                  51 arch/mips/include/asm/netlogic/xlp-hal/iomap.h #define XLP_IO_PCI_OFFSET(b, d, f)	(((b) << 20) | ((d) << 15) | ((f) << 12))
d                 115 arch/mips/include/asm/netlogic/xlp-hal/iomap.h #define XLP9XX_HDR_OFFSET(n, d, f)	\
d                 116 arch/mips/include/asm/netlogic/xlp-hal/iomap.h 			XLP_IO_PCI_OFFSET(xlp9xx_get_socbus(n), d, f)
d                3518 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                3520 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                3542 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                3544 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                3557 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                3559 arch/mips/include/asm/octeon/cvmx-mio-defs.h 		uint64_t d:32;
d                 312 arch/mips/include/asm/octeon/octeon.h int octeon_irq_ciu3_xlat(struct irq_domain *d,
d                 323 arch/mips/include/asm/octeon/octeon.h int octeon_irq_ciu3_mapx(struct irq_domain *d, unsigned int virq,
d                 209 arch/mips/include/asm/pci/bridge.h 		u64	d[0x1000 / 8];
d                 214 arch/mips/include/asm/pci/bridge.h 			u64	d[0x100 / 8];
d                 223 arch/mips/include/asm/pci/bridge.h 		u64	d[0x1000 / 8];
d                 233 arch/mips/include/asm/pci/bridge.h 		u64	d[8 / 8];
d                 249 arch/mips/include/asm/pci/bridge.h 		u64	d[0x100000 / 8];
d                 263 arch/mips/include/asm/pci/bridge.h 		u64	d[0x400000 / 8];	/* read-only */
d                 571 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 16, )
d                 574 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 32, )
d                 578 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 64, )
d                 581 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D, 128, )
d                 604 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
d                 607 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
d                 610 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_USER_CACHE(d, dcache, Index_Writeback_Inv_D, Hit_Writeback_Inv_D,
d                 633 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, protected_, )
d                 664 arch/mips/include/asm/r4kcache.h __BUILD_PROT_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D)
d                 671 arch/mips/include/asm/r4kcache.h __BUILD_BLAST_CACHE_RANGE(d, dcache, Hit_Writeback_Inv_D, , )
d                  86 arch/mips/include/asm/txx9/jmr3927.h #define jmr3927_ioc_reg_out(d, a)	((*(volatile unsigned char *)(a)) = (d))
d                  89 arch/mips/include/asm/txx9/jmr3927.h #define jmr3927_ioc_reg_out(d, a)	((*(volatile unsigned char *)((a)^1)) = (d))
d                 139 arch/mips/include/asm/txx9/tx4927.h #define TX4927_PCFG_SDCLKDLY(d) ((d)<<28)
d                 144 arch/mips/include/asm/txx9/tx4938.h #define TX4938_PCFG_SDCLKDLY(d) ((d)<<28)
d                  50 arch/mips/include/asm/uasm.h 	   unsigned int d)
d                  50 arch/mips/include/asm/vga.h #define scr_memcpyw(d, s, c) memcpy(d, s, c)
d                  51 arch/mips/include/asm/vga.h #define scr_memmovew(d, s, c) memmove(d, s, c)
d                  27 arch/mips/jazz/irq.c static void enable_r4030_irq(struct irq_data *d)
d                  29 arch/mips/jazz/irq.c 	unsigned int mask = 1 << (d->irq - JAZZ_IRQ_START);
d                  38 arch/mips/jazz/irq.c void disable_r4030_irq(struct irq_data *d)
d                  40 arch/mips/jazz/irq.c 	unsigned int mask = ~(1 << (d->irq - JAZZ_IRQ_START));
d                  19 arch/mips/kernel/irq-gt641xx.c static void ack_gt641xx_irq(struct irq_data *d)
d                  26 arch/mips/kernel/irq-gt641xx.c 	cause &= ~GT641XX_IRQ_TO_BIT(d->irq);
d                  31 arch/mips/kernel/irq-gt641xx.c static void mask_gt641xx_irq(struct irq_data *d)
d                  38 arch/mips/kernel/irq-gt641xx.c 	mask &= ~GT641XX_IRQ_TO_BIT(d->irq);
d                  43 arch/mips/kernel/irq-gt641xx.c static void mask_ack_gt641xx_irq(struct irq_data *d)
d                  50 arch/mips/kernel/irq-gt641xx.c 	mask &= ~GT641XX_IRQ_TO_BIT(d->irq);
d                  54 arch/mips/kernel/irq-gt641xx.c 	cause &= ~GT641XX_IRQ_TO_BIT(d->irq);
d                  59 arch/mips/kernel/irq-gt641xx.c static void unmask_gt641xx_irq(struct irq_data *d)
d                  66 arch/mips/kernel/irq-gt641xx.c 	mask |= GT641XX_IRQ_TO_BIT(d->irq);
d                  27 arch/mips/kernel/irq-msc01.c static inline void mask_msc_irq(struct irq_data *d)
d                  29 arch/mips/kernel/irq-msc01.c 	unsigned int irq = d->irq;
d                  38 arch/mips/kernel/irq-msc01.c static inline void unmask_msc_irq(struct irq_data *d)
d                  40 arch/mips/kernel/irq-msc01.c 	unsigned int irq = d->irq;
d                  51 arch/mips/kernel/irq-msc01.c static void level_mask_and_ack_msc_irq(struct irq_data *d)
d                  53 arch/mips/kernel/irq-msc01.c 	mask_msc_irq(d);
d                  61 arch/mips/kernel/irq-msc01.c static void edge_mask_and_ack_msc_irq(struct irq_data *d)
d                  63 arch/mips/kernel/irq-msc01.c 	unsigned int irq = d->irq;
d                  65 arch/mips/kernel/irq-msc01.c 	mask_msc_irq(d);
d                  16 arch/mips/kernel/irq-rm7000.c static inline void unmask_rm7k_irq(struct irq_data *d)
d                  18 arch/mips/kernel/irq-rm7000.c 	set_c0_intcontrol(0x100 << (d->irq - RM7K_CPU_IRQ_BASE));
d                  21 arch/mips/kernel/irq-rm7000.c static inline void mask_rm7k_irq(struct irq_data *d)
d                  23 arch/mips/kernel/irq-rm7000.c 	clear_c0_intcontrol(0x100 << (d->irq - RM7K_CPU_IRQ_BASE));
d                  66 arch/mips/kernel/irq_txx9.c static void txx9_irq_unmask(struct irq_data *d)
d                  68 arch/mips/kernel/irq_txx9.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  82 arch/mips/kernel/irq_txx9.c static inline void txx9_irq_mask(struct irq_data *d)
d                  84 arch/mips/kernel/irq_txx9.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                 102 arch/mips/kernel/irq_txx9.c static void txx9_irq_mask_ack(struct irq_data *d)
d                 104 arch/mips/kernel/irq_txx9.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                 106 arch/mips/kernel/irq_txx9.c 	txx9_irq_mask(d);
d                 112 arch/mips/kernel/irq_txx9.c static int txx9_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 114 arch/mips/kernel/irq_txx9.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  77 arch/mips/lantiq/irq.c void ltq_disable_irq(struct irq_data *d)
d                  79 arch/mips/lantiq/irq.c 	unsigned long offset = d->hwirq - MIPS_CPU_IRQ_CASCADE;
d                  95 arch/mips/lantiq/irq.c void ltq_mask_and_ack_irq(struct irq_data *d)
d                  97 arch/mips/lantiq/irq.c 	unsigned long offset = d->hwirq - MIPS_CPU_IRQ_CASCADE;
d                 114 arch/mips/lantiq/irq.c static void ltq_ack_irq(struct irq_data *d)
d                 116 arch/mips/lantiq/irq.c 	unsigned long offset = d->hwirq - MIPS_CPU_IRQ_CASCADE;
d                 130 arch/mips/lantiq/irq.c void ltq_enable_irq(struct irq_data *d)
d                 132 arch/mips/lantiq/irq.c 	unsigned long offset = d->hwirq - MIPS_CPU_IRQ_CASCADE;
d                 139 arch/mips/lantiq/irq.c 	vpe = cpumask_first(irq_data_get_effective_affinity_mask(d));
d                 153 arch/mips/lantiq/irq.c static int ltq_eiu_settype(struct irq_data *d, unsigned int type)
d                 159 arch/mips/lantiq/irq.c 		if (d->hwirq == ltq_eiu_irq[i]) {
d                 186 arch/mips/lantiq/irq.c 					type, d->hwirq);
d                 191 arch/mips/lantiq/irq.c 				irq_set_handler(d->hwirq, handle_edge_irq);
d                 204 arch/mips/lantiq/irq.c static unsigned int ltq_startup_eiu_irq(struct irq_data *d)
d                 208 arch/mips/lantiq/irq.c 	ltq_enable_irq(d);
d                 210 arch/mips/lantiq/irq.c 		if (d->hwirq == ltq_eiu_irq[i]) {
d                 212 arch/mips/lantiq/irq.c 			ltq_eiu_settype(d, IRQF_TRIGGER_LOW);
d                 226 arch/mips/lantiq/irq.c static void ltq_shutdown_eiu_irq(struct irq_data *d)
d                 230 arch/mips/lantiq/irq.c 	ltq_disable_irq(d);
d                 232 arch/mips/lantiq/irq.c 		if (d->hwirq == ltq_eiu_irq[i]) {
d                 242 arch/mips/lantiq/irq.c static int ltq_icu_irq_set_affinity(struct irq_data *d,
d                 250 arch/mips/lantiq/irq.c 	irq_data_update_effective_affinity(d, &tmask);
d                 310 arch/mips/lantiq/irq.c static int icu_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw)
d                  23 arch/mips/lasat/interrupt.c void disable_lasat_irq(struct irq_data *d)
d                  25 arch/mips/lasat/interrupt.c 	unsigned int irq_nr = d->irq - LASAT_IRQ_BASE;
d                  30 arch/mips/lasat/interrupt.c void enable_lasat_irq(struct irq_data *d)
d                  32 arch/mips/lasat/interrupt.c 	unsigned int irq_nr = d->irq - LASAT_IRQ_BASE;
d                  23 arch/mips/loongson32/common/irq.c static void ls1x_irq_ack(struct irq_data *d)
d                  25 arch/mips/loongson32/common/irq.c 	unsigned int bit = (d->irq - LS1X_IRQ_BASE) & 0x1f;
d                  26 arch/mips/loongson32/common/irq.c 	unsigned int n = (d->irq - LS1X_IRQ_BASE) >> 5;
d                  32 arch/mips/loongson32/common/irq.c static void ls1x_irq_mask(struct irq_data *d)
d                  34 arch/mips/loongson32/common/irq.c 	unsigned int bit = (d->irq - LS1X_IRQ_BASE) & 0x1f;
d                  35 arch/mips/loongson32/common/irq.c 	unsigned int n = (d->irq - LS1X_IRQ_BASE) >> 5;
d                  41 arch/mips/loongson32/common/irq.c static void ls1x_irq_mask_ack(struct irq_data *d)
d                  43 arch/mips/loongson32/common/irq.c 	unsigned int bit = (d->irq - LS1X_IRQ_BASE) & 0x1f;
d                  44 arch/mips/loongson32/common/irq.c 	unsigned int n = (d->irq - LS1X_IRQ_BASE) >> 5;
d                  52 arch/mips/loongson32/common/irq.c static void ls1x_irq_unmask(struct irq_data *d)
d                  54 arch/mips/loongson32/common/irq.c 	unsigned int bit = (d->irq - LS1X_IRQ_BASE) & 0x1f;
d                  55 arch/mips/loongson32/common/irq.c 	unsigned int n = (d->irq - LS1X_IRQ_BASE) >> 5;
d                  61 arch/mips/loongson32/common/irq.c static int ls1x_irq_settype(struct irq_data *d, unsigned int type)
d                  63 arch/mips/loongson32/common/irq.c 	unsigned int bit = (d->irq - LS1X_IRQ_BASE) & 0x1f;
d                  64 arch/mips/loongson32/common/irq.c 	unsigned int n = (d->irq - LS1X_IRQ_BASE) >> 5;
d                  15 arch/mips/loongson64/common/bonito-irq.c static inline void bonito_irq_enable(struct irq_data *d)
d                  17 arch/mips/loongson64/common/bonito-irq.c 	LOONGSON_INTENSET = (1 << (d->irq - LOONGSON_IRQ_BASE));
d                  21 arch/mips/loongson64/common/bonito-irq.c static inline void bonito_irq_disable(struct irq_data *d)
d                  23 arch/mips/loongson64/common/bonito-irq.c 	LOONGSON_INTENCLR = (1 << (d->irq - LOONGSON_IRQ_BASE));
d                  19 arch/mips/loongson64/loongson-3/irq.c int plat_set_irq_affinity(struct irq_data *d, const struct cpumask *affinity,
d                  34 arch/mips/loongson64/loongson-3/irq.c 	cpumask_copy(d->common->affinity, &new_affinity);
d                  99 arch/mips/loongson64/loongson-3/irq.c static inline void mask_loongson_irq(struct irq_data *d) { }
d                 100 arch/mips/loongson64/loongson-3/irq.c static inline void unmask_loongson_irq(struct irq_data *d) { }
d                1434 arch/mips/math-emu/cp1emu.c static union ieee754dp fpemu_dp_recip(union ieee754dp d)
d                1436 arch/mips/math-emu/cp1emu.c 	return ieee754dp_div(ieee754dp_one(0), d);
d                1439 arch/mips/math-emu/cp1emu.c static union ieee754dp fpemu_dp_rsqrt(union ieee754dp d)
d                1441 arch/mips/math-emu/cp1emu.c 	return ieee754dp_div(ieee754dp_one(0), ieee754dp_sqrt(d));
d                1667 arch/mips/math-emu/cp1emu.c 		union ieee754dp d;
d                1947 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fsp(fs);
d                2113 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2121 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2129 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2137 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FT(ir));
d                2141 arch/mips/math-emu/cp1emu.c 				DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2149 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FT(ir));
d                2151 arch/mips/math-emu/cp1emu.c 				DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2166 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_maddf(fd, fs, ft);
d                2180 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_msubf(fd, fs, ft);
d                2192 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_rint(fs);
d                2218 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fmin(fs, ft);
d                2231 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fmina(fs, ft);
d                2244 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fmax(fs, ft);
d                2257 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fmaxa(fs, ft);
d                2274 arch/mips/math-emu/cp1emu.c 			DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2282 arch/mips/math-emu/cp1emu.c 			rv.d = (*handler.b) (fs, ft);
d                2286 arch/mips/math-emu/cp1emu.c 			rv.d = (*handler.u) (fs);
d                2340 arch/mips/math-emu/cp1emu.c 				DPFROMREG(rv.d, MIPSInst_FT(ir));
d                2342 arch/mips/math-emu/cp1emu.c 				DPFROMREG(rv.d, MIPSInst_FS(ir));
d                2422 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_fint(fs.bits);
d                2587 arch/mips/math-emu/cp1emu.c 			rv.d = ieee754dp_flong(bits);
d                2763 arch/mips/math-emu/cp1emu.c 		DPTOREG(rv.d, MIPSInst_FD(ir));
d                  78 arch/mips/mm/page.c #define _uasm_i_pref(a, b, c, d)		\
d                  82 arch/mips/mm/page.c 			uasm_i_pref(a, b, c, d);\
d                  84 arch/mips/mm/page.c 		uasm_i_pref(a, b, c, d);	\
d                  32 arch/mips/mm/uasm-micromips.c #define M(a, b, c, d, e, f)					\
d                  36 arch/mips/mm/uasm-micromips.c 	 | (d) << RD_SH						\
d                  32 arch/mips/mm/uasm-mips.c #define M(a, b, c, d, e, f)					\
d                  36 arch/mips/mm/uasm-mips.c 	 | (d) << RD_SH						\
d                  41 arch/mips/mm/uasm-mips.c #define M6(a, b, c, d, e)					\
d                  45 arch/mips/mm/uasm-mips.c 	 | (d) << SIMM9_SH					\
d                 211 arch/mips/mm/uasm.c 	build_insn(buf, insn##op, b, a, c+d-1, c);	\
d                 218 arch/mips/mm/uasm.c 	build_insn(buf, insn##op, b, a, c+d-33, c);	\
d                 225 arch/mips/mm/uasm.c 	build_insn(buf, insn##op, b, a, c+d-33, c-32);	\
d                 232 arch/mips/mm/uasm.c 	build_insn(buf, insn##op, b, a, d-1, c);	\
d                  87 arch/mips/netlogic/common/irq.c static void xlp_pic_enable(struct irq_data *d)
d                  90 arch/mips/netlogic/common/irq.c 	struct nlm_pic_irq *pd = irq_data_get_irq_chip_data(d);
d                  98 arch/mips/netlogic/common/irq.c static void xlp_pic_disable(struct irq_data *d)
d                 100 arch/mips/netlogic/common/irq.c 	struct nlm_pic_irq *pd = irq_data_get_irq_chip_data(d);
d                 109 arch/mips/netlogic/common/irq.c static void xlp_pic_mask_ack(struct irq_data *d)
d                 111 arch/mips/netlogic/common/irq.c 	struct nlm_pic_irq *pd = irq_data_get_irq_chip_data(d);
d                 117 arch/mips/netlogic/common/irq.c static void xlp_pic_unmask(struct irq_data *d)
d                 119 arch/mips/netlogic/common/irq.c 	struct nlm_pic_irq *pd = irq_data_get_irq_chip_data(d);
d                 124 arch/mips/netlogic/common/irq.c 		pd->extra_ack(d);
d                 141 arch/mips/netlogic/common/irq.c static void cpuintr_disable(struct irq_data *d)
d                 143 arch/mips/netlogic/common/irq.c 	clear_c0_eimr(d->irq);
d                 146 arch/mips/netlogic/common/irq.c static void cpuintr_enable(struct irq_data *d)
d                 148 arch/mips/netlogic/common/irq.c 	set_c0_eimr(d->irq);
d                 151 arch/mips/netlogic/common/irq.c static void cpuintr_ack(struct irq_data *d)
d                 153 arch/mips/netlogic/common/irq.c 	ack_c0_eirr(d->irq);
d                 132 arch/mips/pci/msi-xlp.c static void xlp_msi_enable(struct irq_data *d)
d                 134 arch/mips/pci/msi-xlp.c 	struct xlp_msi_data *md = irq_data_get_irq_chip_data(d);
d                 138 arch/mips/pci/msi-xlp.c 	vec = nlm_irq_msivec(d->irq);
d                 149 arch/mips/pci/msi-xlp.c static void xlp_msi_disable(struct irq_data *d)
d                 151 arch/mips/pci/msi-xlp.c 	struct xlp_msi_data *md = irq_data_get_irq_chip_data(d);
d                 155 arch/mips/pci/msi-xlp.c 	vec = nlm_irq_msivec(d->irq);
d                 166 arch/mips/pci/msi-xlp.c static void xlp_msi_mask_ack(struct irq_data *d)
d                 168 arch/mips/pci/msi-xlp.c 	struct xlp_msi_data *md = irq_data_get_irq_chip_data(d);
d                 171 arch/mips/pci/msi-xlp.c 	link = nlm_irq_msilink(d->irq);
d                 172 arch/mips/pci/msi-xlp.c 	vec = nlm_irq_msivec(d->irq);
d                 173 arch/mips/pci/msi-xlp.c 	xlp_msi_disable(d);
d                 205 arch/mips/pci/msi-xlp.c static void xlp_msix_mask_ack(struct irq_data *d)
d                 211 arch/mips/pci/msi-xlp.c 	msixvec = nlm_irq_msixvec(d->irq);
d                 213 arch/mips/pci/msi-xlp.c 	pci_msi_mask_irq(d);
d                 214 arch/mips/pci/msi-xlp.c 	md = irq_data_get_irq_chip_data(d);
d                  45 arch/mips/pci/pci-alchemy.c 	int (*board_map_irq)(const struct pci_dev *d, u8 slot, u8 pin);
d                 350 arch/mips/pci/pci-ar2315.c static void ar2315_pci_irq_mask(struct irq_data *d)
d                 352 arch/mips/pci/pci-ar2315.c 	struct ar2315_pci_ctrl *apc = irq_data_get_irq_chip_data(d);
d                 354 arch/mips/pci/pci-ar2315.c 	ar2315_pci_reg_mask(apc, AR2315_PCI_IMR, BIT(d->hwirq), 0);
d                 357 arch/mips/pci/pci-ar2315.c static void ar2315_pci_irq_mask_ack(struct irq_data *d)
d                 359 arch/mips/pci/pci-ar2315.c 	struct ar2315_pci_ctrl *apc = irq_data_get_irq_chip_data(d);
d                 360 arch/mips/pci/pci-ar2315.c 	u32 m = BIT(d->hwirq);
d                 366 arch/mips/pci/pci-ar2315.c static void ar2315_pci_irq_unmask(struct irq_data *d)
d                 368 arch/mips/pci/pci-ar2315.c 	struct ar2315_pci_ctrl *apc = irq_data_get_irq_chip_data(d);
d                 370 arch/mips/pci/pci-ar2315.c 	ar2315_pci_reg_mask(apc, AR2315_PCI_IMR, 0, BIT(d->hwirq));
d                 380 arch/mips/pci/pci-ar2315.c static int ar2315_pci_irq_map(struct irq_domain *d, unsigned irq,
d                 384 arch/mips/pci/pci-ar2315.c 	irq_set_chip_data(irq, d->host_data);
d                 253 arch/mips/pci/pci-ar71xx.c static void ar71xx_pci_irq_unmask(struct irq_data *d)
d                 260 arch/mips/pci/pci-ar71xx.c 	apc = irq_data_get_irq_chip_data(d);
d                 261 arch/mips/pci/pci-ar71xx.c 	irq = d->irq - apc->irq_base;
d                 270 arch/mips/pci/pci-ar71xx.c static void ar71xx_pci_irq_mask(struct irq_data *d)
d                 277 arch/mips/pci/pci-ar71xx.c 	apc = irq_data_get_irq_chip_data(d);
d                 278 arch/mips/pci/pci-ar71xx.c 	irq = d->irq - apc->irq_base;
d                 248 arch/mips/pci/pci-ar724x.c static void ar724x_pci_irq_unmask(struct irq_data *d)
d                 255 arch/mips/pci/pci-ar724x.c 	apc = irq_data_get_irq_chip_data(d);
d                 257 arch/mips/pci/pci-ar724x.c 	offset = apc->irq_base - d->irq;
d                 269 arch/mips/pci/pci-ar724x.c static void ar724x_pci_irq_mask(struct irq_data *d)
d                 276 arch/mips/pci/pci-ar724x.c 	apc = irq_data_get_irq_chip_data(d);
d                 278 arch/mips/pci/pci-ar724x.c 	offset = apc->irq_base - d->irq;
d                 153 arch/mips/pci/pci-rt3883.c static void rt3883_pci_irq_unmask(struct irq_data *d)
d                 158 arch/mips/pci/pci-rt3883.c 	rpc = irq_data_get_irq_chip_data(d);
d                 161 arch/mips/pci/pci-rt3883.c 	rt3883_pci_w32(rpc, t | BIT(d->hwirq), RT3883_PCI_REG_PCIENA);
d                 166 arch/mips/pci/pci-rt3883.c static void rt3883_pci_irq_mask(struct irq_data *d)
d                 171 arch/mips/pci/pci-rt3883.c 	rpc = irq_data_get_irq_chip_data(d);
d                 174 arch/mips/pci/pci-rt3883.c 	rt3883_pci_w32(rpc, t & ~BIT(d->hwirq), RT3883_PCI_REG_PCIENA);
d                 186 arch/mips/pci/pci-rt3883.c static int rt3883_pci_irq_map(struct irq_domain *d, unsigned int irq,
d                 190 arch/mips/pci/pci-rt3883.c 	irq_set_chip_data(irq, d->host_data);
d                 269 arch/mips/pci/pci-xlr.c static void xlr_pci_ack(struct irq_data *d)
d                 277 arch/mips/pci/pci-xlr.c static void xls_pcie_ack(struct irq_data *d)
d                 281 arch/mips/pci/pci-xlr.c 	switch (d->irq) {
d                 298 arch/mips/pci/pci-xlr.c static void xls_pcie_ack_b(struct irq_data *d)
d                 302 arch/mips/pci/pci-xlr.c 	switch (d->irq) {
d                 275 arch/mips/pci/pci-xtalk-bridge.c static int bridge_set_affinity(struct irq_data *d, const struct cpumask *mask,
d                 279 arch/mips/pci/pci-xtalk-bridge.c 	struct bridge_irq_chip_data *data = d->chip_data;
d                 280 arch/mips/pci/pci-xtalk-bridge.c 	int bit = d->parent_data->hwirq;
d                 281 arch/mips/pci/pci-xtalk-bridge.c 	int pin = d->hwirq;
d                 284 arch/mips/pci/pci-xtalk-bridge.c 	ret = irq_chip_set_affinity_parent(d, mask, force);
d                 295 arch/mips/pci/pci-xtalk-bridge.c 	return irq_chip_set_affinity_parent(d, mask, force);
d                  75 arch/mips/pmcs-msp71xx/msp_irq_cic.c static void unmask_cic_irq(struct irq_data *d)
d                  88 arch/mips/pmcs-msp71xx/msp_irq_cic.c 			      irq_data_get_affinity_mask(d)))
d                  94 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	cic_msk_reg[vpe] |= (1 << (d->irq - MSP_CIC_INTBASE));
d                  99 arch/mips/pmcs-msp71xx/msp_irq_cic.c static void mask_cic_irq(struct irq_data *d)
d                 107 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	cic_msk_reg[vpe] &= ~(1 << (d->irq - MSP_CIC_INTBASE));
d                 111 arch/mips/pmcs-msp71xx/msp_irq_cic.c static void msp_cic_irq_ack(struct irq_data *d)
d                 113 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	mask_cic_irq(d);
d                 119 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	*CIC_STS_REG = (1 << (d->irq - MSP_CIC_INTBASE));
d                 125 arch/mips/pmcs-msp71xx/msp_irq_cic.c static int msp_cic_irq_set_affinity(struct irq_data *d,
d                 131 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	unsigned long imask = (1 << (d->irq - MSP_CIC_INTBASE));
d                 135 arch/mips/pmcs-msp71xx/msp_irq_cic.c 	BUG_ON(d->irq == MSP_INT_VPE0_TIMER || d->irq == MSP_INT_VPE1_TIMER);
d                  46 arch/mips/pmcs-msp71xx/msp_irq_per.c static inline void unmask_per_irq(struct irq_data *d)
d                  51 arch/mips/pmcs-msp71xx/msp_irq_per.c 	*PER_INT_MSK_REG |= (1 << (d->irq - MSP_PER_INTBASE));
d                  54 arch/mips/pmcs-msp71xx/msp_irq_per.c 	*PER_INT_MSK_REG |= (1 << (d->irq - MSP_PER_INTBASE));
d                  59 arch/mips/pmcs-msp71xx/msp_irq_per.c static inline void mask_per_irq(struct irq_data *d)
d                  64 arch/mips/pmcs-msp71xx/msp_irq_per.c 	*PER_INT_MSK_REG &= ~(1 << (d->irq - MSP_PER_INTBASE));
d                  67 arch/mips/pmcs-msp71xx/msp_irq_per.c 	*PER_INT_MSK_REG &= ~(1 << (d->irq - MSP_PER_INTBASE));
d                  72 arch/mips/pmcs-msp71xx/msp_irq_per.c static inline void msp_per_irq_ack(struct irq_data *d)
d                  74 arch/mips/pmcs-msp71xx/msp_irq_per.c 	mask_per_irq(d);
d                  80 arch/mips/pmcs-msp71xx/msp_irq_per.c 	*PER_INT_STS_REG = (1 << (d->irq - MSP_PER_INTBASE));
d                  84 arch/mips/pmcs-msp71xx/msp_irq_per.c static int msp_per_irq_set_affinity(struct irq_data *d,
d                  88 arch/mips/pmcs-msp71xx/msp_irq_per.c 	unmask_per_irq(d);
d                  19 arch/mips/pmcs-msp71xx/msp_irq_slp.c static inline void unmask_msp_slp_irq(struct irq_data *d)
d                  21 arch/mips/pmcs-msp71xx/msp_irq_slp.c 	unsigned int irq = d->irq;
d                  30 arch/mips/pmcs-msp71xx/msp_irq_slp.c static inline void mask_msp_slp_irq(struct irq_data *d)
d                  32 arch/mips/pmcs-msp71xx/msp_irq_slp.c 	unsigned int irq = d->irq;
d                  45 arch/mips/pmcs-msp71xx/msp_irq_slp.c static inline void ack_msp_slp_irq(struct irq_data *d)
d                  47 arch/mips/pmcs-msp71xx/msp_irq_slp.c 	unsigned int irq = d->irq;
d                  48 arch/mips/pmcs-msp71xx/msp_serial.c 	struct msp_uart_data *d = p->private_data;
d                  51 arch/mips/pmcs-msp71xx/msp_serial.c 		d->last_lcr = value;
d                  66 arch/mips/pmcs-msp71xx/msp_serial.c 	struct msp_uart_data *d = p->private_data;
d                  83 arch/mips/pmcs-msp71xx/msp_serial.c 		writeb(d->last_lcr, p->membase + (UART_LCR << p->regshift));
d                 156 arch/mips/pnx833x/common/interrupts.c static void pnx833x_enable_pic_irq(struct irq_data *d)
d                 159 arch/mips/pnx833x/common/interrupts.c 	unsigned int pic_irq = d->irq - PNX833X_PIC_IRQ_BASE;
d                 166 arch/mips/pnx833x/common/interrupts.c static void pnx833x_disable_pic_irq(struct irq_data *d)
d                 169 arch/mips/pnx833x/common/interrupts.c 	unsigned int pic_irq = d->irq - PNX833X_PIC_IRQ_BASE;
d                 178 arch/mips/pnx833x/common/interrupts.c static void pnx833x_enable_gpio_irq(struct irq_data *d)
d                 180 arch/mips/pnx833x/common/interrupts.c 	int pin = d->irq - PNX833X_GPIO_IRQ_BASE;
d                 187 arch/mips/pnx833x/common/interrupts.c static void pnx833x_disable_gpio_irq(struct irq_data *d)
d                 189 arch/mips/pnx833x/common/interrupts.c 	int pin = d->irq - PNX833X_GPIO_IRQ_BASE;
d                 196 arch/mips/pnx833x/common/interrupts.c static int pnx833x_set_type_gpio_irq(struct irq_data *d, unsigned int flow_type)
d                 198 arch/mips/pnx833x/common/interrupts.c 	int pin = d->irq - PNX833X_GPIO_IRQ_BASE;
d                  69 arch/mips/ralink/irq.c static void ralink_intc_irq_unmask(struct irq_data *d)
d                  71 arch/mips/ralink/irq.c 	rt_intc_w32(BIT(d->hwirq), INTC_REG_ENABLE);
d                  74 arch/mips/ralink/irq.c static void ralink_intc_irq_mask(struct irq_data *d)
d                  76 arch/mips/ralink/irq.c 	rt_intc_w32(BIT(d->hwirq), INTC_REG_DISABLE);
d                 134 arch/mips/ralink/irq.c static int intc_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw)
d                 112 arch/mips/rb532/irq.c static void rb532_enable_irq(struct irq_data *d)
d                 114 arch/mips/rb532/irq.c 	unsigned int group, intr_bit, irq_nr = d->irq;
d                 133 arch/mips/rb532/irq.c static void rb532_disable_irq(struct irq_data *d)
d                 135 arch/mips/rb532/irq.c 	unsigned int group, intr_bit, mask, irq_nr = d->irq;
d                 164 arch/mips/rb532/irq.c static void rb532_mask_and_ack_irq(struct irq_data *d)
d                 166 arch/mips/rb532/irq.c 	rb532_disable_irq(d);
d                 167 arch/mips/rb532/irq.c 	ack_local_irq(group_to_ip(irq_to_group(d->irq)));
d                 170 arch/mips/rb532/irq.c static int rb532_set_type(struct irq_data *d,  unsigned type)
d                 172 arch/mips/rb532/irq.c 	int gpio = d->irq - GPIO_MAPPED_IRQ_BASE;
d                 173 arch/mips/rb532/irq.c 	int group = irq_to_group(d->irq);
d                 175 arch/mips/rb532/irq.c 	if (group != GPIO_MAPPED_IRQ_GROUP || d->irq > (GROUP4_IRQ_BASE + 13))
d                  35 arch/mips/sgi-ip22/ip22-int.c static void enable_local0_irq(struct irq_data *d)
d                  39 arch/mips/sgi-ip22/ip22-int.c 	if (d->irq != SGI_MAP_0_IRQ)
d                  40 arch/mips/sgi-ip22/ip22-int.c 		sgint->imask0 |= (1 << (d->irq - SGINT_LOCAL0));
d                  43 arch/mips/sgi-ip22/ip22-int.c static void disable_local0_irq(struct irq_data *d)
d                  45 arch/mips/sgi-ip22/ip22-int.c 	sgint->imask0 &= ~(1 << (d->irq - SGINT_LOCAL0));
d                  54 arch/mips/sgi-ip22/ip22-int.c static void enable_local1_irq(struct irq_data *d)
d                  58 arch/mips/sgi-ip22/ip22-int.c 	if (d->irq != SGI_MAP_1_IRQ)
d                  59 arch/mips/sgi-ip22/ip22-int.c 		sgint->imask1 |= (1 << (d->irq - SGINT_LOCAL1));
d                  62 arch/mips/sgi-ip22/ip22-int.c static void disable_local1_irq(struct irq_data *d)
d                  64 arch/mips/sgi-ip22/ip22-int.c 	sgint->imask1 &= ~(1 << (d->irq - SGINT_LOCAL1));
d                  73 arch/mips/sgi-ip22/ip22-int.c static void enable_local2_irq(struct irq_data *d)
d                  76 arch/mips/sgi-ip22/ip22-int.c 	sgint->cmeimask0 |= (1 << (d->irq - SGINT_LOCAL2));
d                  79 arch/mips/sgi-ip22/ip22-int.c static void disable_local2_irq(struct irq_data *d)
d                  81 arch/mips/sgi-ip22/ip22-int.c 	sgint->cmeimask0 &= ~(1 << (d->irq - SGINT_LOCAL2));
d                  92 arch/mips/sgi-ip22/ip22-int.c static void enable_local3_irq(struct irq_data *d)
d                  95 arch/mips/sgi-ip22/ip22-int.c 	sgint->cmeimask1 |= (1 << (d->irq - SGINT_LOCAL3));
d                  98 arch/mips/sgi-ip22/ip22-int.c static void disable_local3_irq(struct irq_data *d)
d                 100 arch/mips/sgi-ip22/ip22-int.c 	sgint->cmeimask1 &= ~(1 << (d->irq - SGINT_LOCAL3));
d                  50 arch/mips/sgi-ip27/ip27-irq.c static void enable_hub_irq(struct irq_data *d)
d                  52 arch/mips/sgi-ip27/ip27-irq.c 	struct hub_irq_data *hd = irq_data_get_irq_chip_data(d);
d                  55 arch/mips/sgi-ip27/ip27-irq.c 	set_bit(d->hwirq, mask);
d                  60 arch/mips/sgi-ip27/ip27-irq.c static void disable_hub_irq(struct irq_data *d)
d                  62 arch/mips/sgi-ip27/ip27-irq.c 	struct hub_irq_data *hd = irq_data_get_irq_chip_data(d);
d                  65 arch/mips/sgi-ip27/ip27-irq.c 	clear_bit(d->hwirq, mask);
d                  90 arch/mips/sgi-ip27/ip27-irq.c static int set_affinity_hub_irq(struct irq_data *d, const struct cpumask *mask,
d                  93 arch/mips/sgi-ip27/ip27-irq.c 	struct hub_irq_data *hd = irq_data_get_irq_chip_data(d);
d                  98 arch/mips/sgi-ip27/ip27-irq.c 	if (irqd_is_started(d))
d                  99 arch/mips/sgi-ip27/ip27-irq.c 		disable_hub_irq(d);
d                 103 arch/mips/sgi-ip27/ip27-irq.c 	if (irqd_is_started(d))
d                 104 arch/mips/sgi-ip27/ip27-irq.c 		enable_hub_irq(d);
d                 106 arch/mips/sgi-ip27/ip27-irq.c 	irq_data_update_effective_affinity(d, cpumask_of(hd->cpu));
d                 131 arch/mips/sgi-ip32/ip32-irq.c static inline void crime_enable_irq(struct irq_data *d)
d                 133 arch/mips/sgi-ip32/ip32-irq.c 	unsigned int bit = d->irq - CRIME_IRQ_BASE;
d                 139 arch/mips/sgi-ip32/ip32-irq.c static inline void crime_disable_irq(struct irq_data *d)
d                 141 arch/mips/sgi-ip32/ip32-irq.c 	unsigned int bit = d->irq - CRIME_IRQ_BASE;
d                 154 arch/mips/sgi-ip32/ip32-irq.c static void crime_edge_mask_and_ack_irq(struct irq_data *d)
d                 156 arch/mips/sgi-ip32/ip32-irq.c 	unsigned int bit = d->irq - CRIME_IRQ_BASE;
d                 164 arch/mips/sgi-ip32/ip32-irq.c 	crime_disable_irq(d);
d                 183 arch/mips/sgi-ip32/ip32-irq.c static void enable_macepci_irq(struct irq_data *d)
d                 185 arch/mips/sgi-ip32/ip32-irq.c 	macepci_mask |= MACEPCI_CONTROL_INT(d->irq - MACEPCI_SCSI0_IRQ);
d                 187 arch/mips/sgi-ip32/ip32-irq.c 	crime_mask |= 1 << (d->irq - CRIME_IRQ_BASE);
d                 191 arch/mips/sgi-ip32/ip32-irq.c static void disable_macepci_irq(struct irq_data *d)
d                 193 arch/mips/sgi-ip32/ip32-irq.c 	crime_mask &= ~(1 << (d->irq - CRIME_IRQ_BASE));
d                 196 arch/mips/sgi-ip32/ip32-irq.c 	macepci_mask &= ~MACEPCI_CONTROL_INT(d->irq - MACEPCI_SCSI0_IRQ);
d                 246 arch/mips/sgi-ip32/ip32-irq.c static void enable_maceisa_irq(struct irq_data *d)
d                 250 arch/mips/sgi-ip32/ip32-irq.c 	pr_debug("maceisa enable: %u\n", d->irq);
d                 252 arch/mips/sgi-ip32/ip32-irq.c 	switch (d->irq) {
d                 266 arch/mips/sgi-ip32/ip32-irq.c 	maceisa_mask |= 1 << (d->irq - MACEISA_AUDIO_SW_IRQ);
d                 270 arch/mips/sgi-ip32/ip32-irq.c static void disable_maceisa_irq(struct irq_data *d)
d                 274 arch/mips/sgi-ip32/ip32-irq.c 	maceisa_mask &= ~(1 << (d->irq - MACEISA_AUDIO_SW_IRQ));
d                 288 arch/mips/sgi-ip32/ip32-irq.c static void mask_and_ack_maceisa_irq(struct irq_data *d)
d                 294 arch/mips/sgi-ip32/ip32-irq.c 	mace_int &= ~(1 << (d->irq - MACEISA_AUDIO_SW_IRQ));
d                 297 arch/mips/sgi-ip32/ip32-irq.c 	disable_maceisa_irq(d);
d                 318 arch/mips/sgi-ip32/ip32-irq.c static void enable_mace_irq(struct irq_data *d)
d                 320 arch/mips/sgi-ip32/ip32-irq.c 	unsigned int bit = d->irq - CRIME_IRQ_BASE;
d                 326 arch/mips/sgi-ip32/ip32-irq.c static void disable_mace_irq(struct irq_data *d)
d                 328 arch/mips/sgi-ip32/ip32-irq.c 	unsigned int bit = d->irq - CRIME_IRQ_BASE;
d                  77 arch/mips/sibyte/bcm1480/irq.c static int bcm1480_set_affinity(struct irq_data *d, const struct cpumask *mask,
d                  80 arch/mips/sibyte/bcm1480/irq.c 	unsigned int irq_dirty, irq = d->irq;
d                 125 arch/mips/sibyte/bcm1480/irq.c static void disable_bcm1480_irq(struct irq_data *d)
d                 127 arch/mips/sibyte/bcm1480/irq.c 	unsigned int irq = d->irq;
d                 132 arch/mips/sibyte/bcm1480/irq.c static void enable_bcm1480_irq(struct irq_data *d)
d                 134 arch/mips/sibyte/bcm1480/irq.c 	unsigned int irq = d->irq;
d                 140 arch/mips/sibyte/bcm1480/irq.c static void ack_bcm1480_irq(struct irq_data *d)
d                 142 arch/mips/sibyte/bcm1480/irq.c 	unsigned int irq_dirty, irq = d->irq;
d                  70 arch/mips/sibyte/sb1250/irq.c static int sb1250_set_affinity(struct irq_data *d, const struct cpumask *mask,
d                  74 arch/mips/sibyte/sb1250/irq.c 	unsigned int irq = d->irq;
d                 112 arch/mips/sibyte/sb1250/irq.c static void disable_sb1250_irq(struct irq_data *d)
d                 114 arch/mips/sibyte/sb1250/irq.c 	unsigned int irq = d->irq;
d                 119 arch/mips/sibyte/sb1250/irq.c static void enable_sb1250_irq(struct irq_data *d)
d                 121 arch/mips/sibyte/sb1250/irq.c 	unsigned int irq = d->irq;
d                 127 arch/mips/sibyte/sb1250/irq.c static void ack_sb1250_irq(struct irq_data *d)
d                 129 arch/mips/sibyte/sb1250/irq.c 	unsigned int irq = d->irq;
d                 181 arch/mips/sni/a20r.c static inline void unmask_a20r_irq(struct irq_data *d)
d                 183 arch/mips/sni/a20r.c 	set_c0_status(0x100 << (d->irq - SNI_A20R_IRQ_BASE));
d                 187 arch/mips/sni/a20r.c static inline void mask_a20r_irq(struct irq_data *d)
d                 189 arch/mips/sni/a20r.c 	clear_c0_status(0x100 << (d->irq - SNI_A20R_IRQ_BASE));
d                 199 arch/mips/sni/pcimt.c static void enable_pcimt_irq(struct irq_data *d)
d                 201 arch/mips/sni/pcimt.c 	unsigned int mask = 1 << (d->irq - PCIMT_IRQ_INT2);
d                 206 arch/mips/sni/pcimt.c void disable_pcimt_irq(struct irq_data *d)
d                 208 arch/mips/sni/pcimt.c 	unsigned int mask = ~(1 << (d->irq - PCIMT_IRQ_INT2));
d                 161 arch/mips/sni/pcit.c static void enable_pcit_irq(struct irq_data *d)
d                 163 arch/mips/sni/pcit.c 	u32 mask = 1 << (d->irq - SNI_PCIT_INT_START + 24);
d                 168 arch/mips/sni/pcit.c void disable_pcit_irq(struct irq_data *d)
d                 170 arch/mips/sni/pcit.c 	u32 mask = 1 << (d->irq - SNI_PCIT_INT_START + 24);
d                 158 arch/mips/sni/rm200.c static void sni_rm200_disable_8259A_irq(struct irq_data *d)
d                 160 arch/mips/sni/rm200.c 	unsigned int mask, irq = d->irq - RM200_I8259A_IRQ_BASE;
d                 173 arch/mips/sni/rm200.c static void sni_rm200_enable_8259A_irq(struct irq_data *d)
d                 175 arch/mips/sni/rm200.c 	unsigned int mask, irq = d->irq - RM200_I8259A_IRQ_BASE;
d                 211 arch/mips/sni/rm200.c void sni_rm200_mask_and_ack_8259A(struct irq_data *d)
d                 213 arch/mips/sni/rm200.c 	unsigned int irqmask, irq = d->irq - RM200_I8259A_IRQ_BASE;
d                 430 arch/mips/sni/rm200.c static void enable_rm200_irq(struct irq_data *d)
d                 432 arch/mips/sni/rm200.c 	unsigned int mask = 1 << (d->irq - SNI_RM200_INT_START);
d                 437 arch/mips/sni/rm200.c void disable_rm200_irq(struct irq_data *d)
d                 439 arch/mips/sni/rm200.c 	unsigned int mask = 1 << (d->irq - SNI_RM200_INT_START);
d                  53 arch/mips/txx9/generic/irq_tx4939.c static void tx4939_irq_unmask(struct irq_data *d)
d                  55 arch/mips/txx9/generic/irq_tx4939.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  71 arch/mips/txx9/generic/irq_tx4939.c static inline void tx4939_irq_mask(struct irq_data *d)
d                  73 arch/mips/txx9/generic/irq_tx4939.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  90 arch/mips/txx9/generic/irq_tx4939.c static void tx4939_irq_mask_ack(struct irq_data *d)
d                  92 arch/mips/txx9/generic/irq_tx4939.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  94 arch/mips/txx9/generic/irq_tx4939.c 	tx4939_irq_mask(d);
d                 104 arch/mips/txx9/generic/irq_tx4939.c static int tx4939_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 106 arch/mips/txx9/generic/irq_tx4939.c 	unsigned int irq_nr = d->irq - TXX9_IRQ_BASE;
d                  50 arch/mips/txx9/jmr3927/irq.c static void mask_irq_ioc(struct irq_data *d)
d                  53 arch/mips/txx9/jmr3927/irq.c 	unsigned int irq_nr = d->irq - JMR3927_IRQ_IOC;
d                  60 arch/mips/txx9/jmr3927/irq.c static void unmask_irq_ioc(struct irq_data *d)
d                  63 arch/mips/txx9/jmr3927/irq.c 	unsigned int irq_nr = d->irq - JMR3927_IRQ_IOC;
d                 130 arch/mips/txx9/rbtx4927/irq.c static void toshiba_rbtx4927_irq_ioc_enable(struct irq_data *d)
d                 135 arch/mips/txx9/rbtx4927/irq.c 	v |= (1 << (d->irq - RBTX4927_IRQ_IOC));
d                 139 arch/mips/txx9/rbtx4927/irq.c static void toshiba_rbtx4927_irq_ioc_disable(struct irq_data *d)
d                 144 arch/mips/txx9/rbtx4927/irq.c 	v &= ~(1 << (d->irq - RBTX4927_IRQ_IOC));
d                  83 arch/mips/txx9/rbtx4938/irq.c static void toshiba_rbtx4938_irq_ioc_enable(struct irq_data *d)
d                  88 arch/mips/txx9/rbtx4938/irq.c 	v |= (1 << (d->irq - RBTX4938_IRQ_IOC));
d                  93 arch/mips/txx9/rbtx4938/irq.c static void toshiba_rbtx4938_irq_ioc_disable(struct irq_data *d)
d                  98 arch/mips/txx9/rbtx4938/irq.c 	v &= ~(1 << (d->irq - RBTX4938_IRQ_IOC));
d                  22 arch/mips/txx9/rbtx4939/irq.c static void rbtx4939_ioc_irq_unmask(struct irq_data *d)
d                  24 arch/mips/txx9/rbtx4939/irq.c 	int ioc_nr = d->irq - RBTX4939_IRQ_IOC;
d                  29 arch/mips/txx9/rbtx4939/irq.c static void rbtx4939_ioc_irq_mask(struct irq_data *d)
d                  31 arch/mips/txx9/rbtx4939/irq.c 	int ioc_nr = d->irq - RBTX4939_IRQ_IOC;
d                 432 arch/mips/vr41xx/common/icu.c static void disable_sysint1_irq(struct irq_data *d)
d                 434 arch/mips/vr41xx/common/icu.c 	icu1_clear(MSYSINT1REG, 1 << SYSINT1_IRQ_TO_PIN(d->irq));
d                 437 arch/mips/vr41xx/common/icu.c static void enable_sysint1_irq(struct irq_data *d)
d                 439 arch/mips/vr41xx/common/icu.c 	icu1_set(MSYSINT1REG, 1 << SYSINT1_IRQ_TO_PIN(d->irq));
d                 448 arch/mips/vr41xx/common/icu.c static void disable_sysint2_irq(struct irq_data *d)
d                 450 arch/mips/vr41xx/common/icu.c 	icu2_clear(MSYSINT2REG, 1 << SYSINT2_IRQ_TO_PIN(d->irq));
d                 453 arch/mips/vr41xx/common/icu.c static void enable_sysint2_irq(struct irq_data *d)
d                 455 arch/mips/vr41xx/common/icu.c 	icu2_set(MSYSINT2REG, 1 << SYSINT2_IRQ_TO_PIN(d->irq));
d                   8 arch/nds32/include/asm/assembler.h 	setgie.d
d                  19 arch/nds32/include/asm/assembler.h 	setgie.d
d                 128 arch/nds32/include/asm/sfp-machine.h #define udiv_qrnnd(q, r, n1, n0, d)				\
d                 131 arch/nds32/include/asm/sfp-machine.h 	__d1 = __ll_highpart(d);				\
d                 132 arch/nds32/include/asm/sfp-machine.h 	__d0 = __ll_lowpart(d);					\
d                 139 arch/nds32/include/asm/sfp-machine.h 		__q1--, __r1 += (d);				\
d                 140 arch/nds32/include/asm/sfp-machine.h 		if (__r1 >= (d))				\
d                 142 arch/nds32/include/asm/sfp-machine.h 				__q1--, __r1 += (d);		\
d                 150 arch/nds32/include/asm/sfp-machine.h 		__q0--, __r0 += (d);				\
d                 151 arch/nds32/include/asm/sfp-machine.h 		if (__r0 >= (d))				\
d                 153 arch/nds32/include/asm/sfp-machine.h 				__q0--, __r0 += (d);		\
d                  34 arch/nds32/math-emu/fpuemu.c DEF3OP(fmadd, d, fmuld, faddd);
d                  35 arch/nds32/math-emu/fpuemu.c DEF3OP(fmsub, d, fmuld, fsubd);
d                  38 arch/nds32/math-emu/fpuemu.c DEF3OPNEG(fnmadd, d, fmuld, faddd, fnegd);
d                  39 arch/nds32/math-emu/fpuemu.c DEF3OPNEG(fnmsub, d, fmuld, fsubd, fnegd);
d                 107 arch/nios2/boot/compressed/misc.c 	char *d = (char *)__dest, *s = (char *)__src;
d                 110 arch/nios2/boot/compressed/misc.c 		d[i] = s[i];
d                  19 arch/nios2/include/asm/string.h extern void *memcpy(void *d, const void *s, size_t count);
d                  20 arch/nios2/include/asm/string.h extern void *memmove(void *d, const void *s, size_t count);
d                  31 arch/nios2/kernel/irq.c static void chip_unmask(struct irq_data *d)
d                  33 arch/nios2/kernel/irq.c 	ienable |= (1 << d->hwirq);
d                  37 arch/nios2/kernel/irq.c static void chip_mask(struct irq_data *d)
d                  39 arch/nios2/kernel/irq.c 	ienable &= ~(1 << d->hwirq);
d                  13 arch/nios2/lib/memmove.c void *memmove(void *d, const void *s, size_t count)
d                  18 arch/nios2/lib/memmove.c 		return d;
d                  20 arch/nios2/lib/memmove.c 	if (d < s) {
d                  21 arch/nios2/lib/memmove.c 		dst = (unsigned long) d;
d                  47 arch/nios2/lib/memmove.c 		dst = (unsigned long) d + count;
d                  79 arch/nios2/lib/memmove.c 	return d;
d                  29 arch/openrisc/lib/memcpy.c 	unsigned char *d, *s;
d                  61 arch/openrisc/lib/memcpy.c 		d = (unsigned char *)dest_w;
d                  65 arch/openrisc/lib/memcpy.c 		d = (unsigned char *)dest_w;
d                  69 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  70 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  71 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  72 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  73 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  74 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  75 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  76 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  80 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  81 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  82 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  83 arch/openrisc/lib/memcpy.c 			*d++ = *s++;
d                  88 arch/openrisc/lib/memcpy.c 		*d++ = *s++;
d                  89 arch/openrisc/lib/memcpy.c 		*d++ = *s++;
d                  93 arch/openrisc/lib/memcpy.c 		*d++ = *s++;
d                 104 arch/openrisc/lib/memcpy.c 	unsigned char *d = (unsigned char *)dest, *s = (unsigned char *)src;
d                 113 arch/openrisc/lib/memcpy.c 	d = (unsigned char *)dest_w;
d                 118 arch/openrisc/lib/memcpy.c 		*d++ = *s++;
d                  68 arch/parisc/boot/compressed/misc.c 	char *d = dest;
d                  70 arch/parisc/boot/compressed/misc.c 	if (d <= s) {
d                  72 arch/parisc/boot/compressed/misc.c 			*d++ = *s++;
d                  74 arch/parisc/boot/compressed/misc.c 		d += n;
d                  77 arch/parisc/boot/compressed/misc.c 			*--d = *--s;
d                  91 arch/parisc/boot/compressed/misc.c void *memcpy(void *d, const void *s, size_t len)
d                  93 arch/parisc/boot/compressed/misc.c 	char *dest = (char *)d;
d                  98 arch/parisc/boot/compressed/misc.c 	return d;
d                 120 arch/parisc/include/asm/hash.h 	u64 b, c, d;
d                 132 arch/parisc/include/asm/hash.h 	b = (b << 2) + a;	_ASSIGN(d, a << 17);
d                 133 arch/parisc/include/asm/hash.h 	a = b + (a << 1);	c += d;
d                 134 arch/parisc/include/asm/hash.h 	d = a << 10;		_ASSIGN(a, a << 19);
d                 135 arch/parisc/include/asm/hash.h 	d = a - d;		_ASSIGN(a, a << 4, "X" (d));
d                 137 arch/parisc/include/asm/hash.h 	d -= c;			c += a << 1;
d                 138 arch/parisc/include/asm/hash.h 	a += c << 3;		_ASSIGN(b, b << (7+31), "X" (c), "X" (d));
d                 139 arch/parisc/include/asm/hash.h 	a <<= 31;		b += d;
d                  38 arch/parisc/include/asm/irq.h void cpu_ack_irq(struct irq_data *d);
d                  39 arch/parisc/include/asm/irq.h void cpu_eoi_irq(struct irq_data *d);
d                  48 arch/parisc/include/asm/irq.h extern int cpu_check_affinity(struct irq_data *d, const struct cpumask *dest);
d                  43 arch/parisc/include/asm/parisc-device.h #define to_parisc_device(d)	container_of(d, struct parisc_device, dev)
d                  44 arch/parisc/include/asm/parisc-device.h #define to_parisc_driver(d)	container_of(d, struct parisc_driver, drv)
d                  45 arch/parisc/include/asm/parisc-device.h #define parisc_parent(d)	to_parisc_device(d->dev.parent)
d                  47 arch/parisc/include/asm/parisc-device.h static inline const char *parisc_pathname(struct parisc_device *d)
d                  49 arch/parisc/include/asm/parisc-device.h 	return dev_name(&d->dev);
d                  53 arch/parisc/include/asm/parisc-device.h parisc_set_drvdata(struct parisc_device *d, void *p)
d                  55 arch/parisc/include/asm/parisc-device.h 	dev_set_drvdata(&d->dev, p);
d                  59 arch/parisc/include/asm/parisc-device.h parisc_get_drvdata(struct parisc_device *d)
d                  61 arch/parisc/include/asm/parisc-device.h 	return dev_get_drvdata(&d->dev);
d                  92 arch/parisc/include/asm/psw.h 	unsigned int d:1;
d                  95 arch/parisc/include/asm/ropes.h static inline int IS_ASTRO(struct parisc_device *d) {
d                  96 arch/parisc/include/asm/ropes.h 	return d->id.hversion == ASTRO_RUNWAY_PORT;
d                  99 arch/parisc/include/asm/ropes.h static inline int IS_IKE(struct parisc_device *d) {
d                 100 arch/parisc/include/asm/ropes.h 	return d->id.hversion == IKE_MERCED_PORT;
d                 103 arch/parisc/include/asm/ropes.h static inline int IS_PLUTO(struct parisc_device *d) {
d                 104 arch/parisc/include/asm/ropes.h 	return d->id.hversion == PLUTO_MCKINLEY_PORT;
d                 225 arch/parisc/include/asm/ropes.h static inline int IS_ELROY(struct parisc_device *d) {
d                 226 arch/parisc/include/asm/ropes.h 	return (d->id.hversion == ELROY_HVERS);
d                 229 arch/parisc/include/asm/ropes.h static inline int IS_MERCURY(struct parisc_device *d) {
d                 230 arch/parisc/include/asm/ropes.h 	return (d->id.hversion == MERCURY_HVERS);
d                 233 arch/parisc/include/asm/ropes.h static inline int IS_QUICKSILVER(struct parisc_device *d) {
d                 234 arch/parisc/include/asm/ropes.h 	return (d->id.hversion == QUICKSILVER_HVERS);
d                 235 arch/parisc/kernel/drivers.c 	struct find_data * d = (struct find_data*)data;
d                 238 arch/parisc/kernel/drivers.c 		if (pdev->hpa.start == d->hpa) {
d                 239 arch/parisc/kernel/drivers.c 			d->dev = pdev;
d                 248 arch/parisc/kernel/drivers.c 	struct find_data d = {
d                 253 arch/parisc/kernel/drivers.c 	ret = for_each_padev(find_device, &d);
d                 254 arch/parisc/kernel/drivers.c 	return ret ? d.dev : NULL;
d                 452 arch/parisc/kernel/drivers.c 	struct match_id_data * d = data;
d                 454 arch/parisc/kernel/drivers.c 	if (pdev->hw_path == d->id) {
d                 455 arch/parisc/kernel/drivers.c 		d->dev = pdev;
d                 472 arch/parisc/kernel/drivers.c 	struct match_id_data d = {
d                 475 arch/parisc/kernel/drivers.c 	if (device_for_each_child(parent, &d, match_by_id))
d                 476 arch/parisc/kernel/drivers.c 		return d.dev;
d                 702 arch/parisc/kernel/drivers.c 	struct parse_tree_data * d = data;
d                 706 arch/parisc/kernel/drivers.c 			if (match_parisc_device(dev, d->index, d->modpath))
d                 707 arch/parisc/kernel/drivers.c 				d->dev = dev;
d                 709 arch/parisc/kernel/drivers.c 			if (match_pci_device(dev, d->index, d->modpath))
d                 710 arch/parisc/kernel/drivers.c 				d->dev = dev;
d                 713 arch/parisc/kernel/drivers.c 			struct device *new = parse_tree_node(dev, d->index, d->modpath);
d                 715 arch/parisc/kernel/drivers.c 				d->dev = new;
d                 718 arch/parisc/kernel/drivers.c 	return d->dev != NULL;
d                 734 arch/parisc/kernel/drivers.c 	struct parse_tree_data d = {
d                 740 arch/parisc/kernel/drivers.c 		.obj	= &d,
d                 747 arch/parisc/kernel/drivers.c 	return d.dev;
d                  42 arch/parisc/kernel/irq.c static void cpu_mask_irq(struct irq_data *d)
d                  44 arch/parisc/kernel/irq.c 	unsigned long eirr_bit = EIEM_MASK(d->irq);
d                  65 arch/parisc/kernel/irq.c static void cpu_unmask_irq(struct irq_data *d)
d                  67 arch/parisc/kernel/irq.c 	__cpu_unmask_irq(d->irq);
d                  70 arch/parisc/kernel/irq.c void cpu_ack_irq(struct irq_data *d)
d                  72 arch/parisc/kernel/irq.c 	unsigned long mask = EIEM_MASK(d->irq);
d                  85 arch/parisc/kernel/irq.c void cpu_eoi_irq(struct irq_data *d)
d                  87 arch/parisc/kernel/irq.c 	unsigned long mask = EIEM_MASK(d->irq);
d                  98 arch/parisc/kernel/irq.c int cpu_check_affinity(struct irq_data *d, const struct cpumask *dest)
d                 103 arch/parisc/kernel/irq.c 	if (irqd_is_per_cpu(d))
d                 107 arch/parisc/kernel/irq.c 	cpu_dest = cpumask_next_and(d->irq & (num_online_cpus()-1),
d                 115 arch/parisc/kernel/irq.c static int cpu_set_affinity_irq(struct irq_data *d, const struct cpumask *dest,
d                 120 arch/parisc/kernel/irq.c 	cpu_dest = cpu_check_affinity(d, dest);
d                 124 arch/parisc/kernel/irq.c 	cpumask_copy(irq_data_get_affinity_mask(d), dest);
d                 337 arch/parisc/kernel/irq.c 	struct irq_data *d = irq_get_irq_data(irq);
d                 338 arch/parisc/kernel/irq.c 	cpumask_copy(irq_data_get_affinity_mask(d), cpumask_of(cpu));
d                 425 arch/parisc/kernel/module.c 	int __maybe_unused d;
d                 479 arch/parisc/kernel/module.c 		d = get_got(me, value, addend);
d                 480 arch/parisc/kernel/module.c 		if (d <= 15) {
d                 483 arch/parisc/kernel/module.c 			stub->insns[0] |= low_sign_unext(d, 5) << 16;
d                 487 arch/parisc/kernel/module.c 			stub->insns[0] |= reassemble_16a(d);
d                  62 arch/parisc/kernel/pci.c #define EISA_OUT(size) if (EISA_bus && (b == 0)) return eisa_out##size(d, addr)
d                  84 arch/parisc/kernel/pci.c void out##type (u##size d, int addr) \
d                  89 arch/parisc/kernel/pci.c 	pci_port->out##type(parisc_pci_hba[b], PCI_PORT_ADDR(addr), d); \
d                  58 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(memcpy_fromio, (void *d, const PCI_IO_ADDR s, unsigned long n),
d                  59 arch/powerpc/include/asm/io-defs.h 		 (d, s, n), mem, s)
d                  60 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(memcpy_toio, (PCI_IO_ADDR d, const void *s, unsigned long n),
d                  61 arch/powerpc/include/asm/io-defs.h 		 (d, s, n), mem, d)
d                  49 arch/powerpc/include/asm/macio.h #define	to_macio_device(d) container_of(d, struct macio_dev, ofdev.dev)
d                  50 arch/powerpc/include/asm/macio.h #define	of_to_macio_device(d) container_of(d, struct macio_dev, ofdev)
d                 481 arch/powerpc/include/asm/mpic.h extern void mpic_unmask_irq(struct irq_data *d);
d                 483 arch/powerpc/include/asm/mpic.h extern void mpic_mask_irq(struct irq_data *d);
d                 485 arch/powerpc/include/asm/mpic.h extern void mpic_end_irq(struct irq_data *d);
d                  53 arch/powerpc/include/asm/pci.h #define set_pci_dma_ops(d)
d                 280 arch/powerpc/include/asm/sfp-machine.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 284 arch/powerpc/include/asm/sfp-machine.h     __d1 = __ll_highpart (d);						\
d                 285 arch/powerpc/include/asm/sfp-machine.h     __d0 = __ll_lowpart (d);						\
d                 293 arch/powerpc/include/asm/sfp-machine.h 	__q1--, __r1 += (d);						\
d                 294 arch/powerpc/include/asm/sfp-machine.h 	if (__r1 >= (d)) /* i.e. we didn't get carry when adding to __r1 */\
d                 296 arch/powerpc/include/asm/sfp-machine.h 	    __q1--, __r1 += (d);					\
d                 306 arch/powerpc/include/asm/sfp-machine.h 	__q0--, __r0 += (d);						\
d                 307 arch/powerpc/include/asm/sfp-machine.h 	if (__r0 >= (d))						\
d                 309 arch/powerpc/include/asm/sfp-machine.h 	    __q0--, __r0 += (d);					\
d                 119 arch/powerpc/include/asm/sstep.h 	unsigned long d[2];
d                  56 arch/powerpc/include/asm/xics.h 	void (*eoi)(struct irq_data *d);
d                 163 arch/powerpc/include/asm/xics.h extern void icp_native_eoi(struct irq_data *d);
d                 164 arch/powerpc/include/asm/xics.h extern int xics_set_irq_type(struct irq_data *d, unsigned int flow_type);
d                 102 arch/powerpc/include/asm/xive.h extern int xmon_xive_get_irq_config(u32 hw_irq, struct irq_data *d);
d                 972 arch/powerpc/kernel/dt_cpu_ftrs.c 			struct dt_cpu_feature *d = &dt_cpu_features[j];
d                 974 arch/powerpc/kernel/dt_cpu_ftrs.c 			if (of_get_flat_dt_phandle(d->node) == phandle) {
d                 975 arch/powerpc/kernel/dt_cpu_ftrs.c 				cpufeatures_deps_enable(d);
d                 976 arch/powerpc/kernel/dt_cpu_ftrs.c 				if (d->disabled) {
d                1154 arch/powerpc/kernel/time.c 	unsigned long a, b, c, d;
d                1161 arch/powerpc/kernel/time.c 	d = dividend_low & 0xffffffff;
d                1169 arch/powerpc/kernel/time.c 	rc = ((u64) do_div(rb, divisor) << 32) + d;
d                2272 arch/powerpc/kernel/traps.c 	struct dentry *dir, *d;
d                2284 arch/powerpc/kernel/traps.c 	d = debugfs_create_u32("do_warn", 0644, dir,
d                2286 arch/powerpc/kernel/traps.c 	if (!d)
d                2290 arch/powerpc/kernel/traps.c 		d = debugfs_create_u32(entries[i].name, 0644, dir,
d                2292 arch/powerpc/kernel/traps.c 		if (!d)
d                 482 arch/powerpc/kvm/book3s_paired_singles.c 	int d = inst & 0x8ff;
d                 484 arch/powerpc/kvm/book3s_paired_singles.c 	if (d & 0x800)
d                 485 arch/powerpc/kvm/book3s_paired_singles.c 		return -(d & 0x7ff);
d                 487 arch/powerpc/kvm/book3s_paired_singles.c 	return (d & 0x7ff);
d                 144 arch/powerpc/kvm/book3s_rtas.c 	struct rtas_token_definition *d, *tmp;
d                 148 arch/powerpc/kvm/book3s_rtas.c 	list_for_each_entry_safe(d, tmp, &kvm->arch.rtas_tokens, list) {
d                 149 arch/powerpc/kvm/book3s_rtas.c 		if (rtas_name_matches(d->handler->name, name)) {
d                 150 arch/powerpc/kvm/book3s_rtas.c 			list_del(&d->list);
d                 151 arch/powerpc/kvm/book3s_rtas.c 			kfree(d);
d                 162 arch/powerpc/kvm/book3s_rtas.c 	struct rtas_token_definition *d;
d                 169 arch/powerpc/kvm/book3s_rtas.c 	list_for_each_entry(d, &kvm->arch.rtas_tokens, list) {
d                 170 arch/powerpc/kvm/book3s_rtas.c 		if (d->token == token)
d                 186 arch/powerpc/kvm/book3s_rtas.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 187 arch/powerpc/kvm/book3s_rtas.c 	if (!d)
d                 190 arch/powerpc/kvm/book3s_rtas.c 	d->handler = h;
d                 191 arch/powerpc/kvm/book3s_rtas.c 	d->token = token;
d                 193 arch/powerpc/kvm/book3s_rtas.c 	list_add_tail(&d->list, &kvm->arch.rtas_tokens);
d                 220 arch/powerpc/kvm/book3s_rtas.c 	struct rtas_token_definition *d;
d                 248 arch/powerpc/kvm/book3s_rtas.c 	list_for_each_entry(d, &vcpu->kvm->arch.rtas_tokens, list) {
d                 249 arch/powerpc/kvm/book3s_rtas.c 		if (d->token == be32_to_cpu(args.token)) {
d                 250 arch/powerpc/kvm/book3s_rtas.c 			d->handler->handler(vcpu, &args);
d                 280 arch/powerpc/kvm/book3s_rtas.c 	struct rtas_token_definition *d, *tmp;
d                 282 arch/powerpc/kvm/book3s_rtas.c 	list_for_each_entry_safe(d, tmp, &kvm->arch.rtas_tokens, list) {
d                 283 arch/powerpc/kvm/book3s_rtas.c 		list_del(&d->list);
d                 284 arch/powerpc/kvm/book3s_rtas.c 		kfree(d);
d                 228 arch/powerpc/kvm/book3s_xive.c 		struct irq_data *d = irq_get_irq_data(xc->esc_virq[prio]);
d                 229 arch/powerpc/kvm/book3s_xive.c 		struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                1136 arch/powerpc/kvm/book3s_xive.c 	struct irq_data *d = irq_get_irq_data(irq);
d                1137 arch/powerpc/kvm/book3s_xive.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                2067 arch/powerpc/kvm/book3s_xive.c 			struct irq_data *d = irq_get_irq_data(xc->esc_virq[i]);
d                2069 arch/powerpc/kvm/book3s_xive.c 				irq_data_get_irq_handler_data(d);
d                  30 arch/powerpc/lib/rheap.c static inline void fixup(unsigned long s, unsigned long e, int d,
d                  37 arch/powerpc/lib/rheap.c 		*pp += d;
d                  41 arch/powerpc/lib/rheap.c 		*pp += d;
d                 466 arch/powerpc/lib/sstep.c 		double d[2];
d                 486 arch/powerpc/lib/sstep.c 			conv_sp_to_dp(&u.f, &u.d[0]);
d                 493 arch/powerpc/lib/sstep.c 		put_fpr(rn, &u.d[0]);
d                 500 arch/powerpc/lib/sstep.c 			put_fpr(rn, &u.d[1]);
d                 516 arch/powerpc/lib/sstep.c 		double d[2];
d                 527 arch/powerpc/lib/sstep.c 		get_fpr(rn, &u.d[0]);
d                 532 arch/powerpc/lib/sstep.c 			conv_dp_to_sp(&u.d[0], &u.f);
d                 539 arch/powerpc/lib/sstep.c 			get_fpr(rn, &u.d[1]);
d                 666 arch/powerpc/lib/sstep.c 	reg->d[0] = reg->d[1] = 0;
d                 689 arch/powerpc/lib/sstep.c 				reg->d[IS_LE] = (signed int) reg->d[IS_LE];
d                 699 arch/powerpc/lib/sstep.c 				reg->d[IS_BE] = !rev ? v : byterev_8(v);
d                 701 arch/powerpc/lib/sstep.c 				reg->d[IS_BE] = reg->d[IS_LE];
d                 761 arch/powerpc/lib/sstep.c 			buf.d[0] = byterev_8(reg->d[1]);
d                 762 arch/powerpc/lib/sstep.c 			buf.d[1] = byterev_8(reg->d[0]);
d                 772 arch/powerpc/lib/sstep.c 			buf.d[0] = buf.d[1] = 0;
d                 780 arch/powerpc/lib/sstep.c 			memcpy(mem + 8, &reg->d[IS_BE], 8);
d                 835 arch/powerpc/lib/sstep.c 			current->thread.fp_state.fpr[reg][0] = buf.d[0];
d                 836 arch/powerpc/lib/sstep.c 			current->thread.fp_state.fpr[reg][1] = buf.d[1];
d                 866 arch/powerpc/lib/sstep.c 			buf.d[0] = current->thread.fp_state.fpr[reg][0];
d                 867 arch/powerpc/lib/sstep.c 			buf.d[1] = current->thread.fp_state.fpr[reg][1];
d                  31 arch/powerpc/net/bpf_jit.h #define PLANT_INSTR(d, idx, instr)					      \
d                  32 arch/powerpc/net/bpf_jit.h 	do { if (d) { (d)[idx] = instr; } idx++; } while (0)
d                  41 arch/powerpc/net/bpf_jit.h #define PPC_ADDI(d, a, i)	EMIT(PPC_INST_ADDI | ___PPC_RT(d) |	      \
d                  43 arch/powerpc/net/bpf_jit.h #define PPC_MR(d, a)		PPC_OR(d, a, a)
d                  45 arch/powerpc/net/bpf_jit.h #define PPC_ADDIS(d, a, i)	EMIT(PPC_INST_ADDIS |			      \
d                  46 arch/powerpc/net/bpf_jit.h 				     ___PPC_RT(d) | ___PPC_RA(a) | IMM_L(i))
d                 101 arch/powerpc/net/bpf_jit.h #define PPC_SUB(d, a, b)	EMIT(PPC_INST_SUB | ___PPC_RT(d) |	      \
d                 103 arch/powerpc/net/bpf_jit.h #define PPC_ADD(d, a, b)	EMIT(PPC_INST_ADD | ___PPC_RT(d) |	      \
d                 105 arch/powerpc/net/bpf_jit.h #define PPC_MULD(d, a, b)	EMIT(PPC_INST_MULLD | ___PPC_RT(d) |	      \
d                 107 arch/powerpc/net/bpf_jit.h #define PPC_MULW(d, a, b)	EMIT(PPC_INST_MULLW | ___PPC_RT(d) |	      \
d                 109 arch/powerpc/net/bpf_jit.h #define PPC_MULHWU(d, a, b)	EMIT(PPC_INST_MULHWU | ___PPC_RT(d) |	      \
d                 111 arch/powerpc/net/bpf_jit.h #define PPC_MULI(d, a, i)	EMIT(PPC_INST_MULLI | ___PPC_RT(d) |	      \
d                 113 arch/powerpc/net/bpf_jit.h #define PPC_DIVWU(d, a, b)	EMIT(PPC_INST_DIVWU | ___PPC_RT(d) |	      \
d                 115 arch/powerpc/net/bpf_jit.h #define PPC_DIVDU(d, a, b)	EMIT(PPC_INST_DIVDU | ___PPC_RT(d) |	      \
d                 117 arch/powerpc/net/bpf_jit.h #define PPC_AND(d, a, b)	EMIT(PPC_INST_AND | ___PPC_RA(d) |	      \
d                 119 arch/powerpc/net/bpf_jit.h #define PPC_ANDI(d, a, i)	EMIT(PPC_INST_ANDI | ___PPC_RA(d) |	      \
d                 121 arch/powerpc/net/bpf_jit.h #define PPC_AND_DOT(d, a, b)	EMIT(PPC_INST_ANDDOT | ___PPC_RA(d) |	      \
d                 123 arch/powerpc/net/bpf_jit.h #define PPC_OR(d, a, b)		EMIT(PPC_INST_OR | ___PPC_RA(d) |	      \
d                 125 arch/powerpc/net/bpf_jit.h #define PPC_MR(d, a)		PPC_OR(d, a, a)
d                 126 arch/powerpc/net/bpf_jit.h #define PPC_ORI(d, a, i)	EMIT(PPC_INST_ORI | ___PPC_RA(d) |	      \
d                 128 arch/powerpc/net/bpf_jit.h #define PPC_ORIS(d, a, i)	EMIT(PPC_INST_ORIS | ___PPC_RA(d) |	      \
d                 130 arch/powerpc/net/bpf_jit.h #define PPC_XOR(d, a, b)	EMIT(PPC_INST_XOR | ___PPC_RA(d) |	      \
d                 132 arch/powerpc/net/bpf_jit.h #define PPC_XORI(d, a, i)	EMIT(PPC_INST_XORI | ___PPC_RA(d) |	      \
d                 134 arch/powerpc/net/bpf_jit.h #define PPC_XORIS(d, a, i)	EMIT(PPC_INST_XORIS | ___PPC_RA(d) |	      \
d                 136 arch/powerpc/net/bpf_jit.h #define PPC_EXTSW(d, a)		EMIT(PPC_INST_EXTSW | ___PPC_RA(d) |	      \
d                 138 arch/powerpc/net/bpf_jit.h #define PPC_SLW(d, a, s)	EMIT(PPC_INST_SLW | ___PPC_RA(d) |	      \
d                 140 arch/powerpc/net/bpf_jit.h #define PPC_SLD(d, a, s)	EMIT(PPC_INST_SLD | ___PPC_RA(d) |	      \
d                 142 arch/powerpc/net/bpf_jit.h #define PPC_SRW(d, a, s)	EMIT(PPC_INST_SRW | ___PPC_RA(d) |	      \
d                 144 arch/powerpc/net/bpf_jit.h #define PPC_SRAW(d, a, s)	EMIT(PPC_INST_SRAW | ___PPC_RA(d) |	      \
d                 146 arch/powerpc/net/bpf_jit.h #define PPC_SRAWI(d, a, i)	EMIT(PPC_INST_SRAWI | ___PPC_RA(d) |	      \
d                 148 arch/powerpc/net/bpf_jit.h #define PPC_SRD(d, a, s)	EMIT(PPC_INST_SRD | ___PPC_RA(d) |	      \
d                 150 arch/powerpc/net/bpf_jit.h #define PPC_SRAD(d, a, s)	EMIT(PPC_INST_SRAD | ___PPC_RA(d) |	      \
d                 152 arch/powerpc/net/bpf_jit.h #define PPC_SRADI(d, a, i)	EMIT(PPC_INST_SRADI | ___PPC_RA(d) |	      \
d                 154 arch/powerpc/net/bpf_jit.h #define PPC_RLWINM(d, a, i, mb, me)	EMIT(PPC_INST_RLWINM | ___PPC_RA(d) | \
d                 157 arch/powerpc/net/bpf_jit.h #define PPC_RLWINM_DOT(d, a, i, mb, me)	EMIT(PPC_INST_RLWINM_DOT |	      \
d                 158 arch/powerpc/net/bpf_jit.h 					___PPC_RA(d) | ___PPC_RS(a) |	      \
d                 161 arch/powerpc/net/bpf_jit.h #define PPC_RLWIMI(d, a, i, mb, me)	EMIT(PPC_INST_RLWIMI | ___PPC_RA(d) | \
d                 164 arch/powerpc/net/bpf_jit.h #define PPC_RLDICL(d, a, i, mb)		EMIT(PPC_INST_RLDICL | ___PPC_RA(d) | \
d                 167 arch/powerpc/net/bpf_jit.h #define PPC_RLDICR(d, a, i, me)		EMIT(PPC_INST_RLDICR | ___PPC_RA(d) | \
d                 172 arch/powerpc/net/bpf_jit.h #define PPC_SLWI(d, a, i)	PPC_RLWINM(d, a, i, 0, 31-(i))
d                 174 arch/powerpc/net/bpf_jit.h #define PPC_SRWI(d, a, i)	PPC_RLWINM(d, a, 32-(i), i, 31)
d                 176 arch/powerpc/net/bpf_jit.h #define PPC_SLDI(d, a, i)	PPC_RLDICR(d, a, i, 63-(i))
d                 178 arch/powerpc/net/bpf_jit.h #define PPC_SRDI(d, a, i)	PPC_RLDICL(d, a, 64-(i), i)
d                 180 arch/powerpc/net/bpf_jit.h #define PPC_NEG(d, a)		EMIT(PPC_INST_NEG | ___PPC_RT(d) | ___PPC_RA(a))
d                 191 arch/powerpc/net/bpf_jit.h #define PPC_LI32(d, i)		do {					      \
d                 194 arch/powerpc/net/bpf_jit.h 			PPC_LI(d, i);					      \
d                 196 arch/powerpc/net/bpf_jit.h 			PPC_LIS(d, IMM_H(i));				      \
d                 198 arch/powerpc/net/bpf_jit.h 				PPC_ORI(d, d, IMM_L(i));		      \
d                 201 arch/powerpc/net/bpf_jit.h #define PPC_LI64(d, i)		do {					      \
d                 204 arch/powerpc/net/bpf_jit.h 			PPC_LI32(d, i);					      \
d                 207 arch/powerpc/net/bpf_jit.h 				PPC_LI(d, ((uintptr_t)(i) >> 32) & 0xffff);   \
d                 209 arch/powerpc/net/bpf_jit.h 				PPC_LIS(d, ((uintptr_t)(i) >> 48));	      \
d                 211 arch/powerpc/net/bpf_jit.h 					PPC_ORI(d, d,			      \
d                 214 arch/powerpc/net/bpf_jit.h 			PPC_SLDI(d, d, 32);				      \
d                 216 arch/powerpc/net/bpf_jit.h 				PPC_ORIS(d, d,				      \
d                 219 arch/powerpc/net/bpf_jit.h 				PPC_ORI(d, d, (uintptr_t)(i) & 0xffff);	      \
d                 223 arch/powerpc/net/bpf_jit.h #define PPC_FUNC_ADDR(d,i) do { PPC_LI64(d, i); } while(0)
d                 225 arch/powerpc/net/bpf_jit.h #define PPC_FUNC_ADDR(d,i) do { PPC_LI32(d, i); } while(0)
d                 382 arch/powerpc/perf/hv-24x7.c 	struct dev_ext_attribute *d;
d                 384 arch/powerpc/perf/hv-24x7.c 	d = container_of(attr, struct dev_ext_attribute, attr);
d                 386 arch/powerpc/perf/hv-24x7.c 	return sprintf(buf, "%s\n", (char *)d->var);
d                 988 arch/powerpc/perf/hv-24x7.c 	int d, n, count = 0;
d                 991 arch/powerpc/perf/hv-24x7.c 	for (d = 0; d < HV_PERF_DOMAIN_MAX; d++) {
d                 992 arch/powerpc/perf/hv-24x7.c 		str = domain_name(d);
d                 996 arch/powerpc/perf/hv-24x7.c 		n = sprintf(page, "%d: %s\n", d, str);
d                  50 arch/powerpc/platforms/4xx/uic.c static void uic_unmask_irq(struct irq_data *d)
d                  52 arch/powerpc/platforms/4xx/uic.c 	struct uic *uic = irq_data_get_irq_chip_data(d);
d                  53 arch/powerpc/platforms/4xx/uic.c 	unsigned int src = irqd_to_hwirq(d);
d                  60 arch/powerpc/platforms/4xx/uic.c 	if (irqd_is_level_type(d))
d                  68 arch/powerpc/platforms/4xx/uic.c static void uic_mask_irq(struct irq_data *d)
d                  70 arch/powerpc/platforms/4xx/uic.c 	struct uic *uic = irq_data_get_irq_chip_data(d);
d                  71 arch/powerpc/platforms/4xx/uic.c 	unsigned int src = irqd_to_hwirq(d);
d                  82 arch/powerpc/platforms/4xx/uic.c static void uic_ack_irq(struct irq_data *d)
d                  84 arch/powerpc/platforms/4xx/uic.c 	struct uic *uic = irq_data_get_irq_chip_data(d);
d                  85 arch/powerpc/platforms/4xx/uic.c 	unsigned int src = irqd_to_hwirq(d);
d                  93 arch/powerpc/platforms/4xx/uic.c static void uic_mask_ack_irq(struct irq_data *d)
d                  95 arch/powerpc/platforms/4xx/uic.c 	struct uic *uic = irq_data_get_irq_chip_data(d);
d                  96 arch/powerpc/platforms/4xx/uic.c 	unsigned int src = irqd_to_hwirq(d);
d                 113 arch/powerpc/platforms/4xx/uic.c 	if (!irqd_is_level_type(d))
d                 118 arch/powerpc/platforms/4xx/uic.c static int uic_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 120 arch/powerpc/platforms/4xx/uic.c 	struct uic *uic = irq_data_get_irq_chip_data(d);
d                 121 arch/powerpc/platforms/4xx/uic.c 	unsigned int src = irqd_to_hwirq(d);
d                 128 arch/powerpc/platforms/4xx/uic.c 		uic_mask_irq(d);
d                  58 arch/powerpc/platforms/512x/mpc5121_ads_cpld.c cpld_mask_irq(struct irq_data *d)
d                  60 arch/powerpc/platforms/512x/mpc5121_ads_cpld.c 	unsigned int cpld_irq = (unsigned int)irqd_to_hwirq(d);
d                  68 arch/powerpc/platforms/512x/mpc5121_ads_cpld.c cpld_unmask_irq(struct irq_data *d)
d                  70 arch/powerpc/platforms/512x/mpc5121_ads_cpld.c 	unsigned int cpld_irq = (unsigned int)irqd_to_hwirq(d);
d                  47 arch/powerpc/platforms/52xx/media5200.c static void media5200_irq_unmask(struct irq_data *d)
d                  54 arch/powerpc/platforms/52xx/media5200.c 	val |= 1 << (MEDIA5200_IRQ_SHIFT + irqd_to_hwirq(d));
d                  59 arch/powerpc/platforms/52xx/media5200.c static void media5200_irq_mask(struct irq_data *d)
d                  66 arch/powerpc/platforms/52xx/media5200.c 	val &= ~(1 << (MEDIA5200_IRQ_SHIFT + irqd_to_hwirq(d)));
d                 135 arch/powerpc/platforms/52xx/mpc52xx_gpt.c static void mpc52xx_gpt_irq_unmask(struct irq_data *d)
d                 137 arch/powerpc/platforms/52xx/mpc52xx_gpt.c 	struct mpc52xx_gpt_priv *gpt = irq_data_get_irq_chip_data(d);
d                 145 arch/powerpc/platforms/52xx/mpc52xx_gpt.c static void mpc52xx_gpt_irq_mask(struct irq_data *d)
d                 147 arch/powerpc/platforms/52xx/mpc52xx_gpt.c 	struct mpc52xx_gpt_priv *gpt = irq_data_get_irq_chip_data(d);
d                 155 arch/powerpc/platforms/52xx/mpc52xx_gpt.c static void mpc52xx_gpt_irq_ack(struct irq_data *d)
d                 157 arch/powerpc/platforms/52xx/mpc52xx_gpt.c 	struct mpc52xx_gpt_priv *gpt = irq_data_get_irq_chip_data(d);
d                 162 arch/powerpc/platforms/52xx/mpc52xx_gpt.c static int mpc52xx_gpt_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 164 arch/powerpc/platforms/52xx/mpc52xx_gpt.c 	struct mpc52xx_gpt_priv *gpt = irq_data_get_irq_chip_data(d);
d                 168 arch/powerpc/platforms/52xx/mpc52xx_gpt.c 	dev_dbg(gpt->dev, "%s: virq=%i type=%x\n", __func__, d->irq, flow_type);
d                 158 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_extirq_mask(struct irq_data *d)
d                 160 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 164 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_extirq_unmask(struct irq_data *d)
d                 166 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 170 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_extirq_ack(struct irq_data *d)
d                 172 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 176 arch/powerpc/platforms/52xx/mpc52xx_pic.c static int mpc52xx_extirq_set_type(struct irq_data *d, unsigned int flow_type)
d                 179 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 183 arch/powerpc/platforms/52xx/mpc52xx_pic.c 		(int) irqd_to_hwirq(d), l2irq, flow_type);
d                 199 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	irq_set_handler_locked(d, handler);
d                 215 arch/powerpc/platforms/52xx/mpc52xx_pic.c static int mpc52xx_null_set_type(struct irq_data *d, unsigned int flow_type)
d                 220 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_main_mask(struct irq_data *d)
d                 222 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 226 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_main_unmask(struct irq_data *d)
d                 228 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 243 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_periph_mask(struct irq_data *d)
d                 245 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 249 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_periph_unmask(struct irq_data *d)
d                 251 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 266 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_sdma_mask(struct irq_data *d)
d                 268 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 272 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_sdma_unmask(struct irq_data *d)
d                 274 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                 278 arch/powerpc/platforms/52xx/mpc52xx_pic.c static void mpc52xx_sdma_ack(struct irq_data *d)
d                 280 arch/powerpc/platforms/52xx/mpc52xx_pic.c 	int l2irq = irqd_to_hwirq(d) & MPC52xx_IRQ_L2_MASK;
d                  38 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c static void pq2ads_pci_mask_irq(struct irq_data *d)
d                  40 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c 	struct pq2ads_pci_pic *priv = irq_data_get_irq_chip_data(d);
d                  41 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c 	int irq = NUM_IRQS - irqd_to_hwirq(d) - 1;
d                  54 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c static void pq2ads_pci_unmask_irq(struct irq_data *d)
d                  56 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c 	struct pq2ads_pci_pic *priv = irq_data_get_irq_chip_data(d);
d                  57 arch/powerpc/platforms/82xx/pq2ads-pci-pic.c 	int irq = NUM_IRQS - irqd_to_hwirq(d) - 1;
d                  70 arch/powerpc/platforms/83xx/mcu_mpc8349emitx.c static ssize_t show_status(struct device *d,
d                 106 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static void socrates_fpga_pic_ack(struct irq_data *d)
d                 109 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int irq_line, hwirq = irqd_to_hwirq(d);
d                 121 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static void socrates_fpga_pic_mask(struct irq_data *d)
d                 124 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 137 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static void socrates_fpga_pic_mask_ack(struct irq_data *d)
d                 140 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 154 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static void socrates_fpga_pic_unmask(struct irq_data *d)
d                 157 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 170 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static void socrates_fpga_pic_eoi(struct irq_data *d)
d                 173 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 186 arch/powerpc/platforms/85xx/socrates_fpga_pic.c static int socrates_fpga_pic_set_type(struct irq_data *d,
d                 190 arch/powerpc/platforms/85xx/socrates_fpga_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                  59 arch/powerpc/platforms/8xx/cpm1.c static void cpm_mask_irq(struct irq_data *d)
d                  61 arch/powerpc/platforms/8xx/cpm1.c 	unsigned int cpm_vec = (unsigned int)irqd_to_hwirq(d);
d                  66 arch/powerpc/platforms/8xx/cpm1.c static void cpm_unmask_irq(struct irq_data *d)
d                  68 arch/powerpc/platforms/8xx/cpm1.c 	unsigned int cpm_vec = (unsigned int)irqd_to_hwirq(d);
d                  73 arch/powerpc/platforms/8xx/cpm1.c static void cpm_end_irq(struct irq_data *d)
d                  75 arch/powerpc/platforms/8xx/cpm1.c 	unsigned int cpm_vec = (unsigned int)irqd_to_hwirq(d);
d                  23 arch/powerpc/platforms/8xx/pic.c static inline unsigned long mpc8xx_irqd_to_bit(struct irq_data *d)
d                  25 arch/powerpc/platforms/8xx/pic.c 	return 0x80000000 >> irqd_to_hwirq(d);
d                  28 arch/powerpc/platforms/8xx/pic.c static void mpc8xx_unmask_irq(struct irq_data *d)
d                  30 arch/powerpc/platforms/8xx/pic.c 	mpc8xx_cached_irq_mask |= mpc8xx_irqd_to_bit(d);
d                  34 arch/powerpc/platforms/8xx/pic.c static void mpc8xx_mask_irq(struct irq_data *d)
d                  36 arch/powerpc/platforms/8xx/pic.c 	mpc8xx_cached_irq_mask &= ~mpc8xx_irqd_to_bit(d);
d                  40 arch/powerpc/platforms/8xx/pic.c static void mpc8xx_ack(struct irq_data *d)
d                  42 arch/powerpc/platforms/8xx/pic.c 	out_be32(&siu_reg->sc_sipend, mpc8xx_irqd_to_bit(d));
d                  45 arch/powerpc/platforms/8xx/pic.c static void mpc8xx_end_irq(struct irq_data *d)
d                  47 arch/powerpc/platforms/8xx/pic.c 	mpc8xx_cached_irq_mask |= mpc8xx_irqd_to_bit(d);
d                  51 arch/powerpc/platforms/8xx/pic.c static int mpc8xx_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                  54 arch/powerpc/platforms/8xx/pic.c 	if ((flow_type & IRQ_TYPE_EDGE_FALLING) && !(irqd_to_hwirq(d) & 1)) {
d                  56 arch/powerpc/platforms/8xx/pic.c 		siel |= mpc8xx_irqd_to_bit(d);
d                  58 arch/powerpc/platforms/8xx/pic.c 		irq_set_handler_locked(d, handle_edge_irq);
d                  62 arch/powerpc/platforms/cell/interrupt.c static void iic_mask(struct irq_data *d)
d                  66 arch/powerpc/platforms/cell/interrupt.c static void iic_unmask(struct irq_data *d)
d                  70 arch/powerpc/platforms/cell/interrupt.c static void iic_eoi(struct irq_data *d)
d                  85 arch/powerpc/platforms/cell/interrupt.c static void iic_ioexc_eoi(struct irq_data *d)
d                  58 arch/powerpc/platforms/cell/spider-pic.c static struct spider_pic *spider_irq_data_to_pic(struct irq_data *d)
d                  60 arch/powerpc/platforms/cell/spider-pic.c 	return irq_data_get_irq_chip_data(d);
d                  69 arch/powerpc/platforms/cell/spider-pic.c static void spider_unmask_irq(struct irq_data *d)
d                  71 arch/powerpc/platforms/cell/spider-pic.c 	struct spider_pic *pic = spider_irq_data_to_pic(d);
d                  72 arch/powerpc/platforms/cell/spider-pic.c 	void __iomem *cfg = spider_get_irq_config(pic, irqd_to_hwirq(d));
d                  77 arch/powerpc/platforms/cell/spider-pic.c static void spider_mask_irq(struct irq_data *d)
d                  79 arch/powerpc/platforms/cell/spider-pic.c 	struct spider_pic *pic = spider_irq_data_to_pic(d);
d                  80 arch/powerpc/platforms/cell/spider-pic.c 	void __iomem *cfg = spider_get_irq_config(pic, irqd_to_hwirq(d));
d                  85 arch/powerpc/platforms/cell/spider-pic.c static void spider_ack_irq(struct irq_data *d)
d                  87 arch/powerpc/platforms/cell/spider-pic.c 	struct spider_pic *pic = spider_irq_data_to_pic(d);
d                  88 arch/powerpc/platforms/cell/spider-pic.c 	unsigned int src = irqd_to_hwirq(d);
d                  92 arch/powerpc/platforms/cell/spider-pic.c 	if (irqd_is_level_type(d))
d                 103 arch/powerpc/platforms/cell/spider-pic.c static int spider_set_irq_type(struct irq_data *d, unsigned int type)
d                 106 arch/powerpc/platforms/cell/spider-pic.c 	struct spider_pic *pic = spider_irq_data_to_pic(d);
d                 107 arch/powerpc/platforms/cell/spider-pic.c 	unsigned int hw = irqd_to_hwirq(d);
d                  45 arch/powerpc/platforms/embedded6xx/flipper-pic.c static void flipper_pic_mask_and_ack(struct irq_data *d)
d                  47 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	int irq = irqd_to_hwirq(d);
d                  48 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  56 arch/powerpc/platforms/embedded6xx/flipper-pic.c static void flipper_pic_ack(struct irq_data *d)
d                  58 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	int irq = irqd_to_hwirq(d);
d                  59 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  65 arch/powerpc/platforms/embedded6xx/flipper-pic.c static void flipper_pic_mask(struct irq_data *d)
d                  67 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	int irq = irqd_to_hwirq(d);
d                  68 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  73 arch/powerpc/platforms/embedded6xx/flipper-pic.c static void flipper_pic_unmask(struct irq_data *d)
d                  75 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	int irq = irqd_to_hwirq(d);
d                  76 arch/powerpc/platforms/embedded6xx/flipper-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  42 arch/powerpc/platforms/embedded6xx/hlwd-pic.c static void hlwd_pic_mask_and_ack(struct irq_data *d)
d                  44 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	int irq = irqd_to_hwirq(d);
d                  45 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  52 arch/powerpc/platforms/embedded6xx/hlwd-pic.c static void hlwd_pic_ack(struct irq_data *d)
d                  54 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	int irq = irqd_to_hwirq(d);
d                  55 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  60 arch/powerpc/platforms/embedded6xx/hlwd-pic.c static void hlwd_pic_mask(struct irq_data *d)
d                  62 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	int irq = irqd_to_hwirq(d);
d                  63 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                  68 arch/powerpc/platforms/embedded6xx/hlwd-pic.c static void hlwd_pic_unmask(struct irq_data *d)
d                  70 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	int irq = irqd_to_hwirq(d);
d                  71 arch/powerpc/platforms/embedded6xx/hlwd-pic.c 	void __iomem *io_base = irq_data_get_irq_chip_data(d);
d                 177 arch/powerpc/platforms/fsl_uli1575.c 	unsigned int d;
d                 186 arch/powerpc/platforms/fsl_uli1575.c 	pci_read_config_dword(dev, PCI_CLASS_REVISION, &d);
d                 187 arch/powerpc/platforms/fsl_uli1575.c 	d = (d & 0xff) | (PCI_CLASS_STORAGE_SATA_AHCI << 8);
d                 188 arch/powerpc/platforms/fsl_uli1575.c 	pci_write_config_dword(dev, PCI_CLASS_REVISION, d);
d                  74 arch/powerpc/platforms/powermac/pic.c static void pmac_mask_and_ack_irq(struct irq_data *d)
d                  76 arch/powerpc/platforms/powermac/pic.c 	unsigned int src = irqd_to_hwirq(d);
d                  96 arch/powerpc/platforms/powermac/pic.c static void pmac_ack_irq(struct irq_data *d)
d                  98 arch/powerpc/platforms/powermac/pic.c 	unsigned int src = irqd_to_hwirq(d);
d                 141 arch/powerpc/platforms/powermac/pic.c static unsigned int pmac_startup_irq(struct irq_data *d)
d                 144 arch/powerpc/platforms/powermac/pic.c 	unsigned int src = irqd_to_hwirq(d);
d                 149 arch/powerpc/platforms/powermac/pic.c 	if (!irqd_is_level_type(d))
d                 158 arch/powerpc/platforms/powermac/pic.c static void pmac_mask_irq(struct irq_data *d)
d                 161 arch/powerpc/platforms/powermac/pic.c 	unsigned int src = irqd_to_hwirq(d);
d                 169 arch/powerpc/platforms/powermac/pic.c static void pmac_unmask_irq(struct irq_data *d)
d                 172 arch/powerpc/platforms/powermac/pic.c 	unsigned int src = irqd_to_hwirq(d);
d                 180 arch/powerpc/platforms/powermac/pic.c static int pmac_retrigger(struct irq_data *d)
d                 185 arch/powerpc/platforms/powermac/pic.c 	__pmac_retrigger(irqd_to_hwirq(d));
d                 162 arch/powerpc/platforms/powermac/smp.c static irqreturn_t psurge_ipi_intr(int irq, void *d)
d                  80 arch/powerpc/platforms/powernv/opal-irqchip.c static void opal_event_mask(struct irq_data *d)
d                  82 arch/powerpc/platforms/powernv/opal-irqchip.c 	clear_bit(d->hwirq, &opal_event_irqchip.mask);
d                  85 arch/powerpc/platforms/powernv/opal-irqchip.c static void opal_event_unmask(struct irq_data *d)
d                  87 arch/powerpc/platforms/powernv/opal-irqchip.c 	set_bit(d->hwirq, &opal_event_irqchip.mask);
d                  92 arch/powerpc/platforms/powernv/opal-irqchip.c static int opal_event_set_type(struct irq_data *d, unsigned int flow_type)
d                 115 arch/powerpc/platforms/powernv/opal-irqchip.c static int opal_event_map(struct irq_domain *d, unsigned int irq,
d                2753 arch/powerpc/platforms/powernv/pci-ioda.c static void pnv_ioda2_msi_eoi(struct irq_data *d)
d                2756 arch/powerpc/platforms/powernv/pci-ioda.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                2757 arch/powerpc/platforms/powernv/pci-ioda.c 	struct irq_chip *chip = irq_data_get_irq_chip(d);
d                2762 arch/powerpc/platforms/powernv/pci-ioda.c 	icp_native_eoi(d);
d                 118 arch/powerpc/platforms/powernv/vas-debug.c 	struct dentry *f, *d;
d                 129 arch/powerpc/platforms/powernv/vas-debug.c 	d = debugfs_create_dir(window->dbgname, window->vinst->dbgdir);
d                 130 arch/powerpc/platforms/powernv/vas-debug.c 	if (IS_ERR(d))
d                 133 arch/powerpc/platforms/powernv/vas-debug.c 	window->dbgdir = d;
d                 135 arch/powerpc/platforms/powernv/vas-debug.c 	f = debugfs_create_file("info", 0444, d, window, &info_fops);
d                 139 arch/powerpc/platforms/powernv/vas-debug.c 	f = debugfs_create_file("hvwc", 0444, d, window, &hvwc_fops);
d                 156 arch/powerpc/platforms/powernv/vas-debug.c 	struct dentry *d;
d                 168 arch/powerpc/platforms/powernv/vas-debug.c 	d = debugfs_create_dir(vinst->dbgname, vas_debugfs);
d                 169 arch/powerpc/platforms/powernv/vas-debug.c 	if (IS_ERR(d))
d                 172 arch/powerpc/platforms/powernv/vas-debug.c 	vinst->dbgdir = d;
d                  94 arch/powerpc/platforms/ps3/interrupt.c static void ps3_chip_mask(struct irq_data *d)
d                  96 arch/powerpc/platforms/ps3/interrupt.c 	struct ps3_private *pd = irq_data_get_irq_chip_data(d);
d                 100 arch/powerpc/platforms/ps3/interrupt.c 		pd->thread_id, d->irq);
d                 103 arch/powerpc/platforms/ps3/interrupt.c 	clear_bit(63 - d->irq, &pd->bmp.mask);
d                 115 arch/powerpc/platforms/ps3/interrupt.c static void ps3_chip_unmask(struct irq_data *d)
d                 117 arch/powerpc/platforms/ps3/interrupt.c 	struct ps3_private *pd = irq_data_get_irq_chip_data(d);
d                 121 arch/powerpc/platforms/ps3/interrupt.c 		pd->thread_id, d->irq);
d                 124 arch/powerpc/platforms/ps3/interrupt.c 	set_bit(63 - d->irq, &pd->bmp.mask);
d                 136 arch/powerpc/platforms/ps3/interrupt.c static void ps3_chip_eoi(struct irq_data *d)
d                 138 arch/powerpc/platforms/ps3/interrupt.c 	const struct ps3_private *pd = irq_data_get_irq_chip_data(d);
d                 142 arch/powerpc/platforms/ps3/interrupt.c 	if (!test_bit(63 - d->irq, &pd->ipi_mask))
d                 143 arch/powerpc/platforms/ps3/interrupt.c 		lv1_end_of_interrupt_ext(pd->ppe_id, pd->thread_id, d->irq);
d                 363 arch/powerpc/platforms/pseries/lpar.c 	struct dtl_worker *d = container_of(work, struct dtl_worker, work.work);
d                 369 arch/powerpc/platforms/pseries/lpar.c 	if (d->cpu != smp_processor_id()) {
d                 384 arch/powerpc/platforms/pseries/lpar.c 				d->cpu,
d                 400 arch/powerpc/platforms/pseries/lpar.c 	schedule_delayed_work_on(d->cpu, to_delayed_work(work),
d                 406 arch/powerpc/platforms/pseries/lpar.c 	struct dtl_worker *d = &per_cpu(dtl_workers, cpu);
d                 408 arch/powerpc/platforms/pseries/lpar.c 	memset(d, 0, sizeof(*d));
d                 409 arch/powerpc/platforms/pseries/lpar.c 	INIT_DELAYED_WORK(&d->work, process_dtl_buffer);
d                 410 arch/powerpc/platforms/pseries/lpar.c 	d->cpu = cpu;
d                 419 arch/powerpc/platforms/pseries/lpar.c 	schedule_delayed_work_on(cpu, &d->work, HZ / vcpudispatch_stats_freq);
d                 425 arch/powerpc/platforms/pseries/lpar.c 	struct dtl_worker *d = &per_cpu(dtl_workers, cpu);
d                 427 arch/powerpc/platforms/pseries/lpar.c 	cancel_delayed_work_sync(&d->work);
d                2008 arch/powerpc/platforms/pseries/lpar.c 		struct dentry *d;
d                2012 arch/powerpc/platforms/pseries/lpar.c 		d = debugfs_create_file(name, 0400, vpa_dir, (void *)i,
d                2014 arch/powerpc/platforms/pseries/lpar.c 		if (!d) {
d                  79 arch/powerpc/sysdev/cpm2_pic.c static void cpm2_mask_irq(struct irq_data *d)
d                  82 arch/powerpc/sysdev/cpm2_pic.c 	unsigned int irq_nr = irqd_to_hwirq(d);
d                  91 arch/powerpc/sysdev/cpm2_pic.c static void cpm2_unmask_irq(struct irq_data *d)
d                  94 arch/powerpc/sysdev/cpm2_pic.c 	unsigned int irq_nr = irqd_to_hwirq(d);
d                 103 arch/powerpc/sysdev/cpm2_pic.c static void cpm2_ack(struct irq_data *d)
d                 106 arch/powerpc/sysdev/cpm2_pic.c 	unsigned int irq_nr = irqd_to_hwirq(d);
d                 114 arch/powerpc/sysdev/cpm2_pic.c static void cpm2_end_irq(struct irq_data *d)
d                 117 arch/powerpc/sysdev/cpm2_pic.c 	unsigned int irq_nr = irqd_to_hwirq(d);
d                 132 arch/powerpc/sysdev/cpm2_pic.c static int cpm2_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 134 arch/powerpc/sysdev/cpm2_pic.c 	unsigned int src = irqd_to_hwirq(d);
d                 156 arch/powerpc/sysdev/cpm2_pic.c 	irqd_set_trigger_type(d, flow_type);
d                 158 arch/powerpc/sysdev/cpm2_pic.c 		irq_set_handler_locked(d, handle_level_irq);
d                 160 arch/powerpc/sysdev/cpm2_pic.c 		irq_set_handler_locked(d, handle_edge_irq);
d                  44 arch/powerpc/sysdev/ehv_pic.c void ehv_pic_unmask_irq(struct irq_data *d)
d                  46 arch/powerpc/sysdev/ehv_pic.c 	unsigned int src = virq_to_hw(d->irq);
d                  51 arch/powerpc/sysdev/ehv_pic.c void ehv_pic_mask_irq(struct irq_data *d)
d                  53 arch/powerpc/sysdev/ehv_pic.c 	unsigned int src = virq_to_hw(d->irq);
d                  58 arch/powerpc/sysdev/ehv_pic.c void ehv_pic_end_irq(struct irq_data *d)
d                  60 arch/powerpc/sysdev/ehv_pic.c 	unsigned int src = virq_to_hw(d->irq);
d                  65 arch/powerpc/sysdev/ehv_pic.c void ehv_pic_direct_end_irq(struct irq_data *d)
d                  70 arch/powerpc/sysdev/ehv_pic.c int ehv_pic_set_affinity(struct irq_data *d, const struct cpumask *dest,
d                  73 arch/powerpc/sysdev/ehv_pic.c 	unsigned int src = virq_to_hw(d->irq);
d                 111 arch/powerpc/sysdev/ehv_pic.c int ehv_pic_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 113 arch/powerpc/sysdev/ehv_pic.c 	unsigned int src = virq_to_hw(d->irq);
d                 120 arch/powerpc/sysdev/ehv_pic.c 	irqd_set_trigger_type(d, flow_type);
d                  32 arch/powerpc/sysdev/fsl_mpic_err.c static void fsl_mpic_mask_err(struct irq_data *d)
d                  35 arch/powerpc/sysdev/fsl_mpic_err.c 	struct mpic *mpic = irq_data_get_irq_chip_data(d);
d                  36 arch/powerpc/sysdev/fsl_mpic_err.c 	unsigned int src = virq_to_hw(d->irq) - mpic->err_int_vecs[0];
d                  43 arch/powerpc/sysdev/fsl_mpic_err.c static void fsl_mpic_unmask_err(struct irq_data *d)
d                  46 arch/powerpc/sysdev/fsl_mpic_err.c 	struct mpic *mpic = irq_data_get_irq_chip_data(d);
d                  47 arch/powerpc/sysdev/fsl_mpic_err.c 	unsigned int src = virq_to_hw(d->irq) - mpic->err_int_vecs[0];
d                  61 arch/powerpc/sysdev/fsl_msi.c static void fsl_msi_end_irq(struct irq_data *d)
d                 935 arch/powerpc/sysdev/fsl_pci.c 	unsigned int rd, ra, rb, d;
d                 940 arch/powerpc/sysdev/fsl_pci.c 	d = get_d(inst);
d                 994 arch/powerpc/sysdev/fsl_pci.c 		regs->gpr[ra] += (s16)d;
d                1003 arch/powerpc/sysdev/fsl_pci.c 		regs->gpr[ra] += (s16)d;
d                1012 arch/powerpc/sysdev/fsl_pci.c 		regs->gpr[ra] += (s16)d;
d                1021 arch/powerpc/sysdev/fsl_pci.c 		regs->gpr[ra] += (s16)d;
d                 111 arch/powerpc/sysdev/ge/ge_pic.c static void gef_pic_mask(struct irq_data *d)
d                 114 arch/powerpc/sysdev/ge/ge_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 124 arch/powerpc/sysdev/ge/ge_pic.c static void gef_pic_mask_ack(struct irq_data *d)
d                 129 arch/powerpc/sysdev/ge/ge_pic.c 	gef_pic_mask(d);
d                 132 arch/powerpc/sysdev/ge/ge_pic.c static void gef_pic_unmask(struct irq_data *d)
d                 135 arch/powerpc/sysdev/ge/ge_pic.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                  18 arch/powerpc/sysdev/grackle.c #define GRACKLE_CFA(b, d, o)	(0x80 | ((b) << 8) | ((d) << 16) \
d                  76 arch/powerpc/sysdev/i8259.c static void i8259_mask_and_ack_irq(struct irq_data *d)
d                  81 arch/powerpc/sysdev/i8259.c 	if (d->irq > 7) {
d                  82 arch/powerpc/sysdev/i8259.c 		cached_A1 |= 1 << (d->irq-8);
d                  88 arch/powerpc/sysdev/i8259.c 		cached_21 |= 1 << d->irq;
d                 102 arch/powerpc/sysdev/i8259.c static void i8259_mask_irq(struct irq_data *d)
d                 106 arch/powerpc/sysdev/i8259.c 	pr_debug("i8259_mask_irq(%d)\n", d->irq);
d                 109 arch/powerpc/sysdev/i8259.c 	if (d->irq < 8)
d                 110 arch/powerpc/sysdev/i8259.c 		cached_21 |= 1 << d->irq;
d                 112 arch/powerpc/sysdev/i8259.c 		cached_A1 |= 1 << (d->irq-8);
d                 113 arch/powerpc/sysdev/i8259.c 	i8259_set_irq_mask(d->irq);
d                 117 arch/powerpc/sysdev/i8259.c static void i8259_unmask_irq(struct irq_data *d)
d                 121 arch/powerpc/sysdev/i8259.c 	pr_debug("i8259_unmask_irq(%d)\n", d->irq);
d                 124 arch/powerpc/sysdev/i8259.c 	if (d->irq < 8)
d                 125 arch/powerpc/sysdev/i8259.c 		cached_21 &= ~(1 << d->irq);
d                 127 arch/powerpc/sysdev/i8259.c 		cached_A1 &= ~(1 << (d->irq-8));
d                 128 arch/powerpc/sysdev/i8259.c 	i8259_set_irq_mask(d->irq);
d                 520 arch/powerpc/sysdev/ipic.c static void ipic_unmask_irq(struct irq_data *d)
d                 522 arch/powerpc/sysdev/ipic.c 	struct ipic *ipic = ipic_from_irq(d->irq);
d                 523 arch/powerpc/sysdev/ipic.c 	unsigned int src = irqd_to_hwirq(d);
d                 536 arch/powerpc/sysdev/ipic.c static void ipic_mask_irq(struct irq_data *d)
d                 538 arch/powerpc/sysdev/ipic.c 	struct ipic *ipic = ipic_from_irq(d->irq);
d                 539 arch/powerpc/sysdev/ipic.c 	unsigned int src = irqd_to_hwirq(d);
d                 556 arch/powerpc/sysdev/ipic.c static void ipic_ack_irq(struct irq_data *d)
d                 558 arch/powerpc/sysdev/ipic.c 	struct ipic *ipic = ipic_from_irq(d->irq);
d                 559 arch/powerpc/sysdev/ipic.c 	unsigned int src = irqd_to_hwirq(d);
d                 575 arch/powerpc/sysdev/ipic.c static void ipic_mask_irq_and_ack(struct irq_data *d)
d                 577 arch/powerpc/sysdev/ipic.c 	struct ipic *ipic = ipic_from_irq(d->irq);
d                 578 arch/powerpc/sysdev/ipic.c 	unsigned int src = irqd_to_hwirq(d);
d                 598 arch/powerpc/sysdev/ipic.c static int ipic_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 600 arch/powerpc/sysdev/ipic.c 	struct ipic *ipic = ipic_from_irq(d->irq);
d                 601 arch/powerpc/sysdev/ipic.c 	unsigned int src = irqd_to_hwirq(d);
d                 622 arch/powerpc/sysdev/ipic.c 	irqd_set_trigger_type(d, flow_type);
d                 624 arch/powerpc/sysdev/ipic.c 		irq_set_handler_locked(d, handle_level_irq);
d                 625 arch/powerpc/sysdev/ipic.c 		d->chip = &ipic_level_irq_chip;
d                 627 arch/powerpc/sysdev/ipic.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 628 arch/powerpc/sysdev/ipic.c 		d->chip = &ipic_edge_irq_chip;
d                 636 arch/powerpc/sysdev/mpic.c static inline struct mpic * mpic_from_ipi(struct irq_data *d)
d                 638 arch/powerpc/sysdev/mpic.c 	return irq_data_get_irq_chip_data(d);
d                 649 arch/powerpc/sysdev/mpic.c static inline struct mpic * mpic_from_irq_data(struct irq_data *d)
d                 651 arch/powerpc/sysdev/mpic.c 	return irq_data_get_irq_chip_data(d);
d                 665 arch/powerpc/sysdev/mpic.c void mpic_unmask_irq(struct irq_data *d)
d                 668 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 669 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 671 arch/powerpc/sysdev/mpic.c 	DBG("%p: %s: enable_irq: %d (src %d)\n", mpic, mpic->name, d->irq, src);
d                 686 arch/powerpc/sysdev/mpic.c void mpic_mask_irq(struct irq_data *d)
d                 689 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 690 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 692 arch/powerpc/sysdev/mpic.c 	DBG("%s: disable_irq: %d (src %d)\n", mpic->name, d->irq, src);
d                 708 arch/powerpc/sysdev/mpic.c void mpic_end_irq(struct irq_data *d)
d                 710 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 713 arch/powerpc/sysdev/mpic.c 	DBG("%s: end_irq: %d\n", mpic->name, d->irq);
d                 725 arch/powerpc/sysdev/mpic.c static void mpic_unmask_ht_irq(struct irq_data *d)
d                 727 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 728 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 730 arch/powerpc/sysdev/mpic.c 	mpic_unmask_irq(d);
d                 732 arch/powerpc/sysdev/mpic.c 	if (irqd_is_level_type(d))
d                 736 arch/powerpc/sysdev/mpic.c static unsigned int mpic_startup_ht_irq(struct irq_data *d)
d                 738 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 739 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 741 arch/powerpc/sysdev/mpic.c 	mpic_unmask_irq(d);
d                 742 arch/powerpc/sysdev/mpic.c 	mpic_startup_ht_interrupt(mpic, src, irqd_is_level_type(d));
d                 747 arch/powerpc/sysdev/mpic.c static void mpic_shutdown_ht_irq(struct irq_data *d)
d                 749 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 750 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 753 arch/powerpc/sysdev/mpic.c 	mpic_mask_irq(d);
d                 756 arch/powerpc/sysdev/mpic.c static void mpic_end_ht_irq(struct irq_data *d)
d                 758 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 759 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 762 arch/powerpc/sysdev/mpic.c 	DBG("%s: end_irq: %d\n", mpic->name, d->irq);
d                 769 arch/powerpc/sysdev/mpic.c 	if (irqd_is_level_type(d))
d                 777 arch/powerpc/sysdev/mpic.c static void mpic_unmask_ipi(struct irq_data *d)
d                 779 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_ipi(d);
d                 780 arch/powerpc/sysdev/mpic.c 	unsigned int src = virq_to_hw(d->irq) - mpic->ipi_vecs[0];
d                 782 arch/powerpc/sysdev/mpic.c 	DBG("%s: enable_ipi: %d (ipi %d)\n", mpic->name, d->irq, src);
d                 786 arch/powerpc/sysdev/mpic.c static void mpic_mask_ipi(struct irq_data *d)
d                 791 arch/powerpc/sysdev/mpic.c static void mpic_end_ipi(struct irq_data *d)
d                 793 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_ipi(d);
d                 805 arch/powerpc/sysdev/mpic.c static void mpic_unmask_tm(struct irq_data *d)
d                 807 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 808 arch/powerpc/sysdev/mpic.c 	unsigned int src = virq_to_hw(d->irq) - mpic->timer_vecs[0];
d                 810 arch/powerpc/sysdev/mpic.c 	DBG("%s: enable_tm: %d (tm %d)\n", mpic->name, d->irq, src);
d                 815 arch/powerpc/sysdev/mpic.c static void mpic_mask_tm(struct irq_data *d)
d                 817 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 818 arch/powerpc/sysdev/mpic.c 	unsigned int src = virq_to_hw(d->irq) - mpic->timer_vecs[0];
d                 824 arch/powerpc/sysdev/mpic.c int mpic_set_affinity(struct irq_data *d, const struct cpumask *cpumask,
d                 827 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 828 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 867 arch/powerpc/sysdev/mpic.c int mpic_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 869 arch/powerpc/sysdev/mpic.c 	struct mpic *mpic = mpic_from_irq_data(d);
d                 870 arch/powerpc/sysdev/mpic.c 	unsigned int src = irqd_to_hwirq(d);
d                 874 arch/powerpc/sysdev/mpic.c 	    mpic, d->irq, src, flow_type);
d                 909 arch/powerpc/sysdev/mpic.c 	irqd_set_trigger_type(d, flow_type);
d                  32 arch/powerpc/sysdev/mpic.h extern int mpic_set_irq_type(struct irq_data *d, unsigned int flow_type);
d                  34 arch/powerpc/sysdev/mpic.h extern int mpic_set_affinity(struct irq_data *d,
d                 331 arch/powerpc/sysdev/tsi108_pci.c static void tsi108_pci_irq_unmask(struct irq_data *d)
d                 333 arch/powerpc/sysdev/tsi108_pci.c 	tsi108_pci_int_unmask(d->irq);
d                 343 arch/powerpc/sysdev/tsi108_pci.c static void tsi108_pci_irq_mask(struct irq_data *d)
d                 345 arch/powerpc/sysdev/tsi108_pci.c 	tsi108_pci_int_mask(d->irq);
d                 348 arch/powerpc/sysdev/tsi108_pci.c static void tsi108_pci_irq_ack(struct irq_data *d)
d                 350 arch/powerpc/sysdev/tsi108_pci.c 	tsi108_pci_int_mask(d->irq);
d                  74 arch/powerpc/sysdev/xics/icp-hv.c static void icp_hv_eoi(struct irq_data *d)
d                  76 arch/powerpc/sysdev/xics/icp-hv.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                  86 arch/powerpc/sysdev/xics/icp-native.c void icp_native_eoi(struct irq_data *d)
d                  88 arch/powerpc/sysdev/xics/icp-native.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 105 arch/powerpc/sysdev/xics/icp-opal.c static void icp_opal_eoi(struct irq_data *d)
d                 107 arch/powerpc/sysdev/xics/icp-opal.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                  42 arch/powerpc/sysdev/xics/ics-opal.c static void ics_opal_unmask_irq(struct irq_data *d)
d                  44 arch/powerpc/sysdev/xics/ics-opal.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                  48 arch/powerpc/sysdev/xics/ics-opal.c 	pr_devel("ics-hal: unmask virq %d [hw 0x%x]\n", d->irq, hw_irq);
d                  53 arch/powerpc/sysdev/xics/ics-opal.c 	server = xics_get_irq_server(d->irq, irq_data_get_affinity_mask(d), 0);
d                  60 arch/powerpc/sysdev/xics/ics-opal.c 		       __func__, d->irq, hw_irq, server, rc);
d                  63 arch/powerpc/sysdev/xics/ics-opal.c static unsigned int ics_opal_startup(struct irq_data *d)
d                  71 arch/powerpc/sysdev/xics/ics-opal.c 	if (irq_data_get_msi_desc(d))
d                  72 arch/powerpc/sysdev/xics/ics-opal.c 		pci_msi_unmask_irq(d);
d                  76 arch/powerpc/sysdev/xics/ics-opal.c 	ics_opal_unmask_irq(d);
d                  95 arch/powerpc/sysdev/xics/ics-opal.c static void ics_opal_mask_irq(struct irq_data *d)
d                  97 arch/powerpc/sysdev/xics/ics-opal.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                  99 arch/powerpc/sysdev/xics/ics-opal.c 	pr_devel("ics-hal: mask virq %d [hw 0x%x]\n", d->irq, hw_irq);
d                 106 arch/powerpc/sysdev/xics/ics-opal.c static int ics_opal_set_affinity(struct irq_data *d,
d                 110 arch/powerpc/sysdev/xics/ics-opal.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 123 arch/powerpc/sysdev/xics/ics-opal.c 		       __func__, d->irq, hw_irq, rc);
d                 128 arch/powerpc/sysdev/xics/ics-opal.c 	wanted_server = xics_get_irq_server(d->irq, cpumask, 1);
d                 131 arch/powerpc/sysdev/xics/ics-opal.c 			__func__, cpumask_pr_args(cpumask), d->irq);
d                 137 arch/powerpc/sysdev/xics/ics-opal.c 		 d->irq, hw_irq, wanted_server, server);
d                 143 arch/powerpc/sysdev/xics/ics-opal.c 		       __func__, d->irq, hw_irq, server, rc);
d                  40 arch/powerpc/sysdev/xics/ics-rtas.c static void ics_rtas_unmask_irq(struct irq_data *d)
d                  42 arch/powerpc/sysdev/xics/ics-rtas.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                  46 arch/powerpc/sysdev/xics/ics-rtas.c 	pr_devel("xics: unmask virq %d [hw 0x%x]\n", d->irq, hw_irq);
d                  51 arch/powerpc/sysdev/xics/ics-rtas.c 	server = xics_get_irq_server(d->irq, irq_data_get_affinity_mask(d), 0);
d                  71 arch/powerpc/sysdev/xics/ics-rtas.c static unsigned int ics_rtas_startup(struct irq_data *d)
d                  79 arch/powerpc/sysdev/xics/ics-rtas.c 	if (irq_data_get_msi_desc(d))
d                  80 arch/powerpc/sysdev/xics/ics-rtas.c 		pci_msi_unmask_irq(d);
d                  83 arch/powerpc/sysdev/xics/ics-rtas.c 	ics_rtas_unmask_irq(d);
d                 111 arch/powerpc/sysdev/xics/ics-rtas.c static void ics_rtas_mask_irq(struct irq_data *d)
d                 113 arch/powerpc/sysdev/xics/ics-rtas.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 115 arch/powerpc/sysdev/xics/ics-rtas.c 	pr_devel("xics: mask virq %d [hw 0x%x]\n", d->irq, hw_irq);
d                 122 arch/powerpc/sysdev/xics/ics-rtas.c static int ics_rtas_set_affinity(struct irq_data *d,
d                 126 arch/powerpc/sysdev/xics/ics-rtas.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 142 arch/powerpc/sysdev/xics/ics-rtas.c 	irq_server = xics_get_irq_server(d->irq, cpumask, 1);
d                 145 arch/powerpc/sysdev/xics/ics-rtas.c 			__func__, cpumask_pr_args(cpumask), d->irq);
d                 320 arch/powerpc/sysdev/xics/xics-common.c static void xics_ipi_unmask(struct irq_data *d) { }
d                 321 arch/powerpc/sysdev/xics/xics-common.c static void xics_ipi_mask(struct irq_data *d) { }
d                 381 arch/powerpc/sysdev/xics/xics-common.c int xics_set_irq_type(struct irq_data *d, unsigned int flow_type)
d                 397 arch/powerpc/sysdev/xics/xics-common.c 	irqd_set_trigger_type(d, flow_type);
d                 260 arch/powerpc/sysdev/xive/common.c int xmon_xive_get_irq_config(u32 hw_irq, struct irq_data *d)
d                 262 arch/powerpc/sysdev/xive/common.c 	struct irq_chip *chip = irq_data_get_irq_chip(d);
d                 280 arch/powerpc/sysdev/xive/common.c 	if (d) {
d                 281 arch/powerpc/sysdev/xive/common.c 		struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 399 arch/powerpc/sysdev/xive/common.c static void xive_irq_eoi(struct irq_data *d)
d                 401 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 405 arch/powerpc/sysdev/xive/common.c 		    d->irq, irqd_to_hwirq(d), xc->pending_prio);
d                 411 arch/powerpc/sysdev/xive/common.c 	if (!irqd_irq_disabled(d) && !irqd_is_forwarded_to_vcpu(d) &&
d                 413 arch/powerpc/sysdev/xive/common.c 		xive_do_source_eoi(irqd_to_hwirq(d), xd);
d                 555 arch/powerpc/sysdev/xive/common.c static int xive_pick_irq_target(struct irq_data *d,
d                 559 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 590 arch/powerpc/sysdev/xive/common.c static unsigned int xive_irq_startup(struct irq_data *d)
d                 592 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 593 arch/powerpc/sysdev/xive/common.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 599 arch/powerpc/sysdev/xive/common.c 		 d->irq, hw_irq, d);
d                 607 arch/powerpc/sysdev/xive/common.c 	if (irq_data_get_msi_desc(d))
d                 608 arch/powerpc/sysdev/xive/common.c 		pci_msi_unmask_irq(d);
d                 612 arch/powerpc/sysdev/xive/common.c 	target = xive_pick_irq_target(d, irq_data_get_affinity_mask(d));
d                 615 arch/powerpc/sysdev/xive/common.c 		target = xive_pick_irq_target(d, cpu_online_mask);
d                 618 arch/powerpc/sysdev/xive/common.c 		pr_warn("irq %d started with broken affinity\n", d->irq);
d                 634 arch/powerpc/sysdev/xive/common.c 				     xive_irq_priority, d->irq);
d                 645 arch/powerpc/sysdev/xive/common.c static void xive_irq_shutdown(struct irq_data *d)
d                 647 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 648 arch/powerpc/sysdev/xive/common.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 651 arch/powerpc/sysdev/xive/common.c 		 d->irq, hw_irq, d);
d                 671 arch/powerpc/sysdev/xive/common.c static void xive_irq_unmask(struct irq_data *d)
d                 673 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 675 arch/powerpc/sysdev/xive/common.c 	pr_devel("xive_irq_unmask: irq %d data @%p\n", d->irq, xd);
d                 684 arch/powerpc/sysdev/xive/common.c 		unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 687 arch/powerpc/sysdev/xive/common.c 					xive_irq_priority, d->irq);
d                 694 arch/powerpc/sysdev/xive/common.c static void xive_irq_mask(struct irq_data *d)
d                 696 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 698 arch/powerpc/sysdev/xive/common.c 	pr_devel("xive_irq_mask: irq %d data @%p\n", d->irq, xd);
d                 707 arch/powerpc/sysdev/xive/common.c 		unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 710 arch/powerpc/sysdev/xive/common.c 					0xff, d->irq);
d                 717 arch/powerpc/sysdev/xive/common.c static int xive_irq_set_affinity(struct irq_data *d,
d                 721 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 722 arch/powerpc/sysdev/xive/common.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 726 arch/powerpc/sysdev/xive/common.c 	pr_devel("xive_irq_set_affinity: irq %d\n", d->irq);
d                 733 arch/powerpc/sysdev/xive/common.c 	if (!irqd_is_started(d))
d                 746 arch/powerpc/sysdev/xive/common.c 	target = xive_pick_irq_target(d, cpumask);
d                 762 arch/powerpc/sysdev/xive/common.c 	if (!irqd_is_forwarded_to_vcpu(d))
d                 765 arch/powerpc/sysdev/xive/common.c 					     xive_irq_priority, d->irq);
d                 767 arch/powerpc/sysdev/xive/common.c 		pr_err("Error %d reconfiguring irq %d\n", rc, d->irq);
d                 781 arch/powerpc/sysdev/xive/common.c static int xive_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 783 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 799 arch/powerpc/sysdev/xive/common.c 	irqd_set_trigger_type(d, flow_type);
d                 812 arch/powerpc/sysdev/xive/common.c 			d->irq, (u32)irqd_to_hwirq(d),
d                 820 arch/powerpc/sysdev/xive/common.c static int xive_irq_retrigger(struct irq_data *d)
d                 822 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 849 arch/powerpc/sysdev/xive/common.c static int xive_irq_set_vcpu_affinity(struct irq_data *d, void *state)
d                 851 arch/powerpc/sysdev/xive/common.c 	struct xive_irq_data *xd = irq_data_get_irq_handler_data(d);
d                 852 arch/powerpc/sysdev/xive/common.c 	unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                 868 arch/powerpc/sysdev/xive/common.c 		irqd_set_forwarded_to_vcpu(d);
d                 919 arch/powerpc/sysdev/xive/common.c 		irqd_clr_forwarded_to_vcpu(d);
d                 945 arch/powerpc/sysdev/xive/common.c 					     xive_irq_priority, d->irq);
d                1095 arch/powerpc/sysdev/xive/common.c static void xive_ipi_eoi(struct irq_data *d)
d                1104 arch/powerpc/sysdev/xive/common.c 		    d->irq, irqd_to_hwirq(d), xc->hw_ipi, xc->pending_prio);
d                1110 arch/powerpc/sysdev/xive/common.c static void xive_ipi_do_nothing(struct irq_data *d)
d                1258 arch/powerpc/sysdev/xive/common.c static void xive_irq_domain_unmap(struct irq_domain *d, unsigned int virq)
d                1409 arch/powerpc/sysdev/xive/common.c 		struct irq_data *d = irq_desc_get_irq_data(desc);
d                1411 arch/powerpc/sysdev/xive/common.c 		unsigned int hw_irq = (unsigned int)irqd_to_hwirq(d);
d                1417 arch/powerpc/sysdev/xive/common.c 		if (d->domain != xive_irq_domain || hw_irq == 0)
d                1442 arch/powerpc/sysdev/xive/common.c 			xive_do_source_eoi(irqd_to_hwirq(d), xd);
d                1444 arch/powerpc/sysdev/xive/common.c 			xive_irq_retrigger(d);
d                 119 arch/powerpc/xmon/spu-insns.h #define _A4(a,b,c,d) {4,a,b,c,d}
d                 743 arch/powerpc/xmon/xmon.c irqreturn_t xmon_irq(int irq, void *d)
d                2623 arch/powerpc/xmon/xmon.c static void dump_one_xive_irq(u32 num, struct irq_data *d)
d                2625 arch/powerpc/xmon/xmon.c 	xmon_xive_get_irq_config(num, d);
d                2634 arch/powerpc/xmon/xmon.c 		struct irq_data *d = irq_desc_get_irq_data(desc);
d                2637 arch/powerpc/xmon/xmon.c 		if (!d)
d                2640 arch/powerpc/xmon/xmon.c 		hwirq = (unsigned int)irqd_to_hwirq(d);
d                2643 arch/powerpc/xmon/xmon.c 			dump_one_xive_irq(hwirq, d);
d                3314 arch/powerpc/xmon/xmon.c 	int c, d;
d                3352 arch/powerpc/xmon/xmon.c 			d = hexdigit(c);
d                3353 arch/powerpc/xmon/xmon.c 			if (d == EOF) {
d                3385 arch/powerpc/xmon/xmon.c 	d = hexdigit(c);
d                3386 arch/powerpc/xmon/xmon.c 	if (d == EOF) {
d                3392 arch/powerpc/xmon/xmon.c 		v = (v << 4) + d;
d                3394 arch/powerpc/xmon/xmon.c 		d = hexdigit(c);
d                3395 arch/powerpc/xmon/xmon.c 	} while (d != EOF);
d                  25 arch/riscv/include/asm/asm.h #define REG_SC		__REG_SEL(sc.d, sc.w)
d                  73 arch/riscv/include/asm/atomic.h         ATOMIC_OP (op, asm_op, I, d, s64, 64)
d                 136 arch/riscv/include/asm/atomic.h         ATOMIC_FETCH_OP( op, asm_op,       I, d, s64, 64)		\
d                 137 arch/riscv/include/asm/atomic.h         ATOMIC_OP_RETURN(op, asm_op, c_op, I, d, s64, 64)
d                 173 arch/riscv/include/asm/atomic.h         ATOMIC_FETCH_OP(op, asm_op, I, d, s64, 64)
d                  76 arch/riscv/include/uapi/asm/ptrace.h 	struct __riscv_d_ext_state d;
d                  32 arch/riscv/kernel/signal.c 	struct __riscv_d_ext_state __user *state = &sc_fpregs->d;
d                  59 arch/riscv/kernel/signal.c 	struct __riscv_d_ext_state __user *state = &sc_fpregs->d;
d                 158 arch/s390/include/asm/sysinfo.h 	unsigned char d:1;
d                 190 arch/s390/include/uapi/asm/ptrace.h 	double	d;
d                  51 arch/s390/include/uapi/asm/runtime_instr.h 	__u32 d			: 1;
d                  24 arch/s390/kernel/compat_linux.h 	__u64   d;
d                 137 arch/s390/kernel/topology.c 			topo->dedicated = tl_core->d;
d                2636 arch/s390/kvm/kvm-s390.c static inline void sca_copy_entry(struct esca_entry *d, struct bsca_entry *s)
d                2638 arch/s390/kvm/kvm-s390.c 	d->sda = s->sda;
d                2639 arch/s390/kvm/kvm-s390.c 	d->sigp_ctrl.c = s->sigp_ctrl.c;
d                2640 arch/s390/kvm/kvm-s390.c 	d->sigp_ctrl.scn = s->sigp_ctrl.scn;
d                2643 arch/s390/kvm/kvm-s390.c static void sca_copy_b_to_e(struct esca_block *d, struct bsca_block *s)
d                2647 arch/s390/kvm/kvm-s390.c 	d->ipte_control = s->ipte_control;
d                2648 arch/s390/kvm/kvm-s390.c 	d->mcn[0] = s->mcn;
d                2650 arch/s390/kvm/kvm-s390.c 		sca_copy_entry(&d->cpu[i], &s->cpu[i]);
d                 131 arch/sh/boards/mach-r2d/irq.c 	struct intc_desc *d;
d                 137 arch/sh/boards/mach-r2d/irq.c 		d = &intc_desc_r2d_plus;
d                 145 arch/sh/boards/mach-r2d/irq.c 		d = &intc_desc_r2d_1;
d                 155 arch/sh/boards/mach-r2d/irq.c 	register_intc_controller(d);
d                  92 arch/sh/boot/compressed/misc.c 	char *d = (char *)__dest, *s = (char *)__src;
d                  94 arch/sh/boot/compressed/misc.c 	for (i=0;i<__n;i++) d[i] = s[i];
d                  60 arch/sh/include/asm/io.h #define readsb(p,d,l)		__raw_readsb(p,d,l)
d                  61 arch/sh/include/asm/io.h #define readsw(p,d,l)		__raw_readsw(p,d,l)
d                  62 arch/sh/include/asm/io.h #define readsl(p,d,l)		__raw_readsl(p,d,l)
d                  64 arch/sh/include/asm/io.h #define writesb(p,d,l)		__raw_writesb(p,d,l)
d                  65 arch/sh/include/asm/io.h #define writesw(p,d,l)		__raw_writesw(p,d,l)
d                  66 arch/sh/include/asm/io.h #define writesl(p,d,l)		__raw_writesl(p,d,l)
d                   9 arch/sh/lib/div64-generic.c extern uint64_t __xdiv64_32(u64 n, u32 d);
d                  54 arch/sh/math-emu/math.c #define WRITE(d,a)	({if(put_user(d, (typeof (d)*)a)) return -EFAULT;})
d                  55 arch/sh/math-emu/math.c #define READ(d,a)	({if(get_user(d, (typeof (d)*)a)) return -EFAULT;})
d                  32 arch/sh/math-emu/sfp-util.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                  36 arch/sh/math-emu/sfp-util.h     __d1 = __ll_highpart (d);						\
d                  37 arch/sh/math-emu/sfp-util.h     __d0 = __ll_lowpart (d);						\
d                  45 arch/sh/math-emu/sfp-util.h 	__q1--, __r1 += (d);						\
d                  46 arch/sh/math-emu/sfp-util.h 	if (__r1 >= (d)) /* i.e. we didn't get carry when adding to __r1 */\
d                  48 arch/sh/math-emu/sfp-util.h 	    __q1--, __r1 += (d);					\
d                  58 arch/sh/math-emu/sfp-util.h 	__q0--, __r0 += (d);						\
d                  59 arch/sh/math-emu/sfp-util.h 	if (__r0 >= (d))						\
d                  61 arch/sh/math-emu/sfp-util.h 	    __q0--, __r0 += (d);					\
d                  33 arch/sparc/crypto/des_glue.c static void encrypt_to_decrypt(u64 *d, const u64 *e)
d                  39 arch/sparc/crypto/des_glue.c 		*d++ = *s--;
d                  30 arch/sparc/crypto/opcodes.h #define AES_EROUND01(a,b,c,d)	\
d                  31 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 0)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  32 arch/sparc/crypto/opcodes.h #define AES_EROUND23(a,b,c,d)	\
d                  33 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 1)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  34 arch/sparc/crypto/opcodes.h #define AES_DROUND01(a,b,c,d)	\
d                  35 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 2)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  36 arch/sparc/crypto/opcodes.h #define AES_DROUND23(a,b,c,d)	\
d                  37 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 3)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  38 arch/sparc/crypto/opcodes.h #define AES_EROUND01_L(a,b,c,d)	\
d                  39 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 4)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  40 arch/sparc/crypto/opcodes.h #define AES_EROUND23_L(a,b,c,d)	\
d                  41 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 5)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  42 arch/sparc/crypto/opcodes.h #define AES_DROUND01_L(a,b,c,d)	\
d                  43 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 6)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  44 arch/sparc/crypto/opcodes.h #define AES_DROUND23_L(a,b,c,d)	\
d                  45 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 7)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  46 arch/sparc/crypto/opcodes.h #define AES_KEXPAND1(a,b,c,d)	\
d                  47 arch/sparc/crypto/opcodes.h 	.word	(F3F(2, 0x19, 8)|RS1(a)|RS2(b)|IMM5_9(c)|RD(d));
d                  59 arch/sparc/crypto/opcodes.h #define DES_ROUND(a,b,c,d)	\
d                  60 arch/sparc/crypto/opcodes.h 	.word		(F3F(2, 0x19, 0x009)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  62 arch/sparc/crypto/opcodes.h #define CAMELLIA_F(a,b,c,d)		\
d                  63 arch/sparc/crypto/opcodes.h 	.word		(F3F(2, 0x19, 0x00c)|RS1(a)|RS2(b)|RS3(c)|RD(d));
d                  48 arch/sparc/include/asm/checksum_32.h 	register char *d asm("o1") = dst;
d                  54 arch/sparc/include/asm/checksum_32.h 	: "=&r" (ret), "=&r" (d), "=&r" (l)
d                  55 arch/sparc/include/asm/checksum_32.h 	: "0" (ret), "1" (d), "2" (l), "r" (sum)
d                  67 arch/sparc/include/asm/checksum_32.h 	register char *d asm("o1") = dst;
d                  79 arch/sparc/include/asm/checksum_32.h 	: "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
d                  80 arch/sparc/include/asm/checksum_32.h 	: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
d                  95 arch/sparc/include/asm/checksum_32.h 		register char __user *d asm("o1") = dst;
d                 107 arch/sparc/include/asm/checksum_32.h 		: "=&r" (ret), "=&r" (d), "=&r" (l), "=&r" (s)
d                 108 arch/sparc/include/asm/checksum_32.h 		: "0" (ret), "1" (d), "2" (l), "3" (s), "r" (err)
d                1907 arch/sparc/include/asm/hypervisor.h #define HV_PCI_DEVICE_BUILD(b,d,f)	\
d                1909 arch/sparc/include/asm/hypervisor.h 	 (((d) & 0x1f) << 11) | \
d                  10 arch/sparc/include/asm/io_32.h #define memset_io(d,c,sz)     _memset_io(d,c,sz)
d                  11 arch/sparc/include/asm/io_32.h #define memcpy_fromio(d,s,sz) _memcpy_fromio(d,s,sz)
d                  12 arch/sparc/include/asm/io_32.h #define memcpy_toio(d,s,sz)   _memcpy_toio(d,s,sz)
d                  19 arch/sparc/include/asm/io_32.h 	volatile void __iomem *d = dst;
d                  22 arch/sparc/include/asm/io_32.h 		writeb(c, d);
d                  23 arch/sparc/include/asm/io_32.h 		d++;
d                  30 arch/sparc/include/asm/io_32.h 	char *d = dst;
d                  34 arch/sparc/include/asm/io_32.h 		*d++ = tmp;
d                  43 arch/sparc/include/asm/io_32.h 	volatile void __iomem *d = dst;
d                  47 arch/sparc/include/asm/io_32.h 		writeb(tmp, d);
d                  48 arch/sparc/include/asm/io_32.h 		d++;
d                 101 arch/sparc/include/asm/io_32.h 	char *d = dst;
d                 105 arch/sparc/include/asm/io_32.h 		*d++ = tmp;
d                 115 arch/sparc/include/asm/io_32.h 	volatile void __iomem *d = dst;
d                 119 arch/sparc/include/asm/io_32.h 		sbus_writeb(tmp, d);
d                 120 arch/sparc/include/asm/io_32.h 		d++;
d                 275 arch/sparc/include/asm/io_64.h #define ioread8_rep(p,d,l)	readsb(p,d,l)
d                 276 arch/sparc/include/asm/io_64.h #define ioread16_rep(p,d,l)	readsw(p,d,l)
d                 277 arch/sparc/include/asm/io_64.h #define ioread32_rep(p,d,l)	readsl(p,d,l)
d                 278 arch/sparc/include/asm/io_64.h #define iowrite8_rep(p,d,l)	writesb(p,d,l)
d                 279 arch/sparc/include/asm/io_64.h #define iowrite16_rep(p,d,l)	writesw(p,d,l)
d                 280 arch/sparc/include/asm/io_64.h #define iowrite32_rep(p,d,l)	writesl(p,d,l)
d                 340 arch/sparc/include/asm/io_64.h 	volatile void __iomem *d = dst;
d                 343 arch/sparc/include/asm/io_64.h 		writeb(c, d);
d                 344 arch/sparc/include/asm/io_64.h 		d++;
d                 351 arch/sparc/include/asm/io_64.h 	char *d = dst;
d                 355 arch/sparc/include/asm/io_64.h 		*d++ = tmp;
d                 364 arch/sparc/include/asm/io_64.h 	char *d = dst;
d                 368 arch/sparc/include/asm/io_64.h 		*d++ = tmp;
d                 377 arch/sparc/include/asm/io_64.h 	volatile void __iomem *d = dst;
d                 381 arch/sparc/include/asm/io_64.h 		sbus_writeb(tmp, d);
d                 382 arch/sparc/include/asm/io_64.h 		d++;
d                 390 arch/sparc/include/asm/io_64.h 	volatile void __iomem *d = dst;
d                 394 arch/sparc/include/asm/io_64.h 		writeb(tmp, d);
d                 395 arch/sparc/include/asm/io_64.h 		d++;
d                  32 arch/sparc/include/asm/openprom.h 	phandle (*v2_inst2pkg)(int d);	/* Convert ihandle to phandle */
d                  41 arch/sparc/include/asm/openprom.h 	void (*v2_dev_close)(int d);
d                  42 arch/sparc/include/asm/openprom.h 	int (*v2_dev_read)(int d, char *buf, int nbytes);
d                  43 arch/sparc/include/asm/openprom.h 	int (*v2_dev_write)(int d, const char *buf, int nbytes);
d                  44 arch/sparc/include/asm/openprom.h 	int (*v2_dev_seek)(int d, int hi, int lo);
d                 134 arch/sparc/include/asm/openprom.h 	unsigned int (*pv_enaddr)(int d, char *enaddr);
d                 334 arch/sparc/include/asm/pgtable_32.h #define pte_offset_map(d, a)		pte_offset_kernel(d,a)
d                 121 arch/sparc/include/asm/uaccess_32.h 			__put_user_asm(x, d, addr, __pu_ret); \
d                 139 arch/sparc/include/asm/uaccess_32.h 	case 8: __put_user_asm(x, d, addr, __pu_ret); break;	\
d                 181 arch/sparc/include/asm/uaccess_32.h 			__get_user_asm(__gu_val, d, addr, __gu_ret); \
d                 203 arch/sparc/include/asm/uaccess_32.h 	case 8: __get_user_asm(__gu_val, d, addr, __gu_ret); break;	\
d                  44 arch/sparc/include/asm/vga.h static inline void scr_memcpyw(u16 *d, u16 *s, unsigned int n)
d                  46 arch/sparc/include/asm/vga.h 	BUG_ON((long) d >= 0);
d                  48 arch/sparc/include/asm/vga.h 	memcpy(d, s, n);
d                  51 arch/sparc/include/asm/vga.h static inline void scr_memmovew(u16 *d, u16 *s, unsigned int n)
d                  53 arch/sparc/include/asm/vga.h 	BUG_ON((long) d >= 0);
d                  55 arch/sparc/include/asm/vga.h 	memmove(d, s, n);
d                  63 arch/sparc/kernel/mdesc.c 	} d;
d                 641 arch/sparc/kernel/mdesc.c 		ret = ep[from_node].d.val;
d                 649 arch/sparc/kernel/mdesc.c 		ret = ep[ret].d.val;
d                 676 arch/sparc/kernel/mdesc.c 			val = &ep->d.val;
d                 682 arch/sparc/kernel/mdesc.c 			val = data + ep->d.data.data_offset;
d                 683 arch/sparc/kernel/mdesc.c 			len = ep->d.data.data_len;
d                 735 arch/sparc/kernel/mdesc.c 	return ep->d.val;
d                 271 arch/sparc/math-emu/math_32.c 	u64 d;
d                 163 arch/sparc/math-emu/math_64.c 	u64 d;
d                 469 arch/sparc/math-emu/math_64.c 		case FXTOQ: XR = rs2->d; FP_FROM_INT_Q (QR, XR, 64, long); break;
d                 471 arch/sparc/math-emu/math_64.c 		case FXTOS: XR = rs2->d; FP_FROM_INT_S (SR, XR, 64, long); break;
d                 472 arch/sparc/math-emu/math_64.c 		case FXTOD: XR = rs2->d; FP_FROM_INT_D (DR, XR, 64, long); break;
d                 508 arch/sparc/math-emu/math_64.c 			case 2: rd->d = XR; break;
d                  77 arch/sparc/math-emu/sfp-util_32.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 104 arch/sparc/math-emu/sfp-util_32.h 	   : "r" ((USItype)(d)),					\
d                  76 arch/sparc/math-emu/sfp-util_64.h #define udiv_qrnnd(q, r, n1, n0, d) 			\
d                  79 arch/sparc/math-emu/sfp-util_64.h     __d1 = (d >> 32);                                   \
d                  80 arch/sparc/math-emu/sfp-util_64.h     __d0 = (USItype)d;                                  \
d                  88 arch/sparc/math-emu/sfp-util_64.h         __q1--, __r1 += (d);                            \
d                  89 arch/sparc/math-emu/sfp-util_64.h         if (__r1 >= (d)) /* i.e. we didn't get carry when adding to __r1 */ \
d                  91 arch/sparc/math-emu/sfp-util_64.h             __q1--, __r1 += (d);                        \
d                 101 arch/sparc/math-emu/sfp-util_64.h         __q0--, __r0 += (d);                            \
d                 102 arch/sparc/math-emu/sfp-util_64.h         if (__r0 >= (d))                                \
d                 104 arch/sparc/math-emu/sfp-util_64.h             __q0--, __r0 += (d);                        \
d                 233 arch/sparc/prom/tree_32.c 	char *s = name, *d;
d                 242 arch/sparc/prom/tree_32.c 		for (d = nbuf; *s != 0 && *s != '@' && *s != '/';)
d                 243 arch/sparc/prom/tree_32.c 			*d++ = *s++;
d                 244 arch/sparc/prom/tree_32.c 		*d = 0;
d                 253 arch/sparc/prom/tree_32.c 				phys_addr = simple_strtoul(s+3, &d, 16);
d                 254 arch/sparc/prom/tree_32.c 				if (d != s + 3 && (!*d || *d == '/')
d                 255 arch/sparc/prom/tree_32.c 				    && d <= s + 3 + 8) {
d                  50 arch/um/drivers/fd.c static int fd_open(int input, int output, int primary, void *d, char **dev_out)
d                  52 arch/um/drivers/fd.c 	struct fd_chan *data = d;
d                  69 arch/um/drivers/fd.c static void fd_close(int fd, void *d)
d                  71 arch/um/drivers/fd.c 	struct fd_chan *data = d;
d                  17 arch/um/drivers/harddog_user.c static void pre_exec(void *d)
d                  19 arch/um/drivers/harddog_user.c 	struct dog_data *data = d;
d                 400 arch/um/drivers/line.c 			struct device *d = tty_port_register_device(&line->port,
d                 402 arch/um/drivers/line.c 			if (IS_ERR(d)) {
d                 404 arch/um/drivers/line.c 				err = PTR_ERR(d);
d                 834 arch/um/drivers/net_kern.c void iter_addresses(void *d, void (*cb)(unsigned char *, unsigned char *,
d                 838 arch/um/drivers/net_kern.c 	struct net_device *dev = d;
d                 853 arch/um/drivers/net_kern.c int dev_netmask(void *d, void *m)
d                 855 arch/um/drivers/net_kern.c 	struct net_device *dev = d;
d                  20 arch/um/drivers/null.c static int null_open(int input, int output, int primary, void *d,
d                  11 arch/um/drivers/port.h extern void port_kern_close(void *d);
d                  15 arch/um/drivers/port.h extern void port_kern_free(void *d);
d                  17 arch/um/drivers/port.h extern void port_remove_dev(void *d);
d                 271 arch/um/drivers/port_kern.c void port_remove_dev(void *d)
d                 273 arch/um/drivers/port_kern.c 	struct port_dev *dev = d;
d                 283 arch/um/drivers/port_kern.c void port_kern_free(void *d)
d                 285 arch/um/drivers/port_kern.c 	struct port_dev *dev = d;
d                  62 arch/um/drivers/port_user.c static void port_free(void *d)
d                  64 arch/um/drivers/port_user.c 	struct port_chan *data = d;
d                  70 arch/um/drivers/port_user.c static int port_open(int input, int output, int primary, void *d,
d                  73 arch/um/drivers/port_user.c 	struct port_chan *data = d;
d                  90 arch/um/drivers/port_user.c static void port_close(int fd, void *d)
d                  92 arch/um/drivers/port_user.c 	struct port_chan *data = d;
d                  40 arch/um/drivers/pty.c static int pts_open(int input, int output, int primary, void *d,
d                  43 arch/um/drivers/pty.c 	struct pty_chan *data = d;
d                 113 arch/um/drivers/pty.c static int pty_open(int input, int output, int primary, void *d,
d                 116 arch/um/drivers/pty.c 	struct pty_chan *data = d;
d                  47 arch/um/drivers/slip_common.h static inline int slip_esc(unsigned char *s, unsigned char *d, int len)
d                  49 arch/um/drivers/slip_common.h 	unsigned char *ptr = d;
d                  81 arch/um/drivers/slip_common.h 	return (ptr - d);
d                  39 arch/um/drivers/tty.c static int tty_open(int input, int output, int primary, void *d,
d                  42 arch/um/drivers/tty.c 	struct tty_chan *data = d;
d                 949 arch/um/drivers/virtio_uml.c static void virtio_uml_release_dev(struct device *d)
d                 952 arch/um/drivers/virtio_uml.c 			container_of(d, struct virtio_device, dev);
d                1025 arch/um/drivers/virtio_uml.c static void vu_cmdline_release_dev(struct device *d)
d                  84 arch/um/drivers/xterm.c static int xterm_open(int input, int output, int primary, void *d,
d                  87 arch/um/drivers/xterm.c 	struct xterm_chan *data = d;
d                 197 arch/um/drivers/xterm.c static void xterm_close(int fd, void *d)
d                 199 arch/um/drivers/xterm.c 	struct xterm_chan *data = d;
d                  28 arch/um/include/shared/net_user.h extern void iter_addresses(void *d, void (*cb)(unsigned char *,
d                  51 arch/um/include/shared/net_user.h extern int dev_netmask(void *d, void *m);
d                 453 arch/um/kernel/irq.c static void dummy(struct irq_data *d)
d                  43 arch/unicore32/boot/compressed/misc.c 	unsigned char *d = (unsigned char *)dest, *s = (unsigned char *)src;
d                  46 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  47 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  48 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  49 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  50 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  51 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  52 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  53 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  57 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  58 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  59 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  60 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  64 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  65 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  69 arch/unicore32/boot/compressed/misc.c 		*d++ = *s++;
d                  40 arch/unicore32/kernel/irq.c static int puv3_gpio_type(struct irq_data *d, unsigned int type)
d                  44 arch/unicore32/kernel/irq.c 	if (d->irq < IRQ_GPIOHIGH)
d                  45 arch/unicore32/kernel/irq.c 		mask = 1 << d->irq;
d                  47 arch/unicore32/kernel/irq.c 		mask = GPIO_MASK(d->irq);
d                  73 arch/unicore32/kernel/irq.c static void puv3_low_gpio_ack(struct irq_data *d)
d                  75 arch/unicore32/kernel/irq.c 	writel((1 << d->irq), GPIO_GEDR);
d                  78 arch/unicore32/kernel/irq.c static void puv3_low_gpio_mask(struct irq_data *d)
d                  80 arch/unicore32/kernel/irq.c 	writel(readl(INTC_ICMR) & ~(1 << d->irq), INTC_ICMR);
d                  83 arch/unicore32/kernel/irq.c static void puv3_low_gpio_unmask(struct irq_data *d)
d                  85 arch/unicore32/kernel/irq.c 	writel(readl(INTC_ICMR) | (1 << d->irq), INTC_ICMR);
d                  88 arch/unicore32/kernel/irq.c static int puv3_low_gpio_wake(struct irq_data *d, unsigned int on)
d                  91 arch/unicore32/kernel/irq.c 		writel(readl(PM_PWER) | (1 << d->irq), PM_PWER);
d                  93 arch/unicore32/kernel/irq.c 		writel(readl(PM_PWER) & ~(1 << d->irq), PM_PWER);
d                 139 arch/unicore32/kernel/irq.c static void puv3_high_gpio_ack(struct irq_data *d)
d                 141 arch/unicore32/kernel/irq.c 	unsigned int mask = GPIO_MASK(d->irq);
d                 146 arch/unicore32/kernel/irq.c static void puv3_high_gpio_mask(struct irq_data *d)
d                 148 arch/unicore32/kernel/irq.c 	unsigned int mask = GPIO_MASK(d->irq);
d                 156 arch/unicore32/kernel/irq.c static void puv3_high_gpio_unmask(struct irq_data *d)
d                 158 arch/unicore32/kernel/irq.c 	unsigned int mask = GPIO_MASK(d->irq);
d                 166 arch/unicore32/kernel/irq.c static int puv3_high_gpio_wake(struct irq_data *d, unsigned int on)
d                 188 arch/unicore32/kernel/irq.c static void puv3_mask_irq(struct irq_data *d)
d                 190 arch/unicore32/kernel/irq.c 	writel(readl(INTC_ICMR) & ~(1 << d->irq), INTC_ICMR);
d                 193 arch/unicore32/kernel/irq.c static void puv3_unmask_irq(struct irq_data *d)
d                 195 arch/unicore32/kernel/irq.c 	writel(readl(INTC_ICMR) | (1 << d->irq), INTC_ICMR);
d                 201 arch/unicore32/kernel/irq.c static int puv3_set_wake(struct irq_data *d, unsigned int on)
d                 203 arch/unicore32/kernel/irq.c 	if (d->irq == IRQ_RTCAlarm) {
d                  77 arch/unicore32/kernel/stacktrace.c static int save_trace(struct stackframe *frame, void *d)
d                  79 arch/unicore32/kernel/stacktrace.c 	struct stack_trace_data *data = d;
d                 525 arch/x86/boot/compressed/eboot.c 		efi_memory_desc_t *d;
d                 533 arch/x86/boot/compressed/eboot.c 		d = efi_early_memdesc_ptr(m, efi->efi_memdesc_size, i);
d                 534 arch/x86/boot/compressed/eboot.c 		switch (d->type) {
d                 574 arch/x86/boot/compressed/eboot.c 		    (prev->addr + prev->size) == d->phys_addr) {
d                 575 arch/x86/boot/compressed/eboot.c 			prev->size += d->num_pages << 12;
d                 590 arch/x86/boot/compressed/eboot.c 		entry->addr = d->phys_addr;
d                 591 arch/x86/boot/compressed/eboot.c 		entry->size = d->num_pages << PAGE_SHIFT;
d                 852 arch/x86/boot/compressed/eboot.c 		desc->d		= SEG_OP_SIZE_32BIT;
d                 875 arch/x86/boot/compressed/eboot.c 		desc->d = 0;
d                 878 arch/x86/boot/compressed/eboot.c 		desc->d = SEG_OP_SIZE_32BIT;
d                 895 arch/x86/boot/compressed/eboot.c 	desc->d		= SEG_OP_SIZE_32BIT;
d                 912 arch/x86/boot/compressed/eboot.c 		desc->d		= 0;
d                  55 arch/x86/boot/compressed/string.c 	unsigned char *d = dest;
d                  58 arch/x86/boot/compressed/string.c 	if (d <= s || d - s >= n)
d                  62 arch/x86/boot/compressed/string.c 		d[n] = s[n];
d                  56 arch/x86/boot/cpucheck.c #define A32(a, b, c, d) (((d) << 24)+((c) << 16)+((b) << 8)+(a))
d                  75 arch/x86/boot/cpuflags.c 		u32 *a, u32 *b, u32 *c, u32 *d)
d                  80 arch/x86/boot/cpuflags.c 		    : "=a" (*a), "=c" (*c), "=d" (*d), EBX_REG (*b)
d                  85 arch/x86/boot/cpuflags.c #define cpuid(id, a, b, c, d) cpuid_count(id, 0, a, b, c, d)
d                 207 arch/x86/boot/string.c 	} d = { dividend };
d                 210 arch/x86/boot/string.c 	upper = d.v32[1];
d                 211 arch/x86/boot/string.c 	d.v32[1] = 0;
d                 213 arch/x86/boot/string.c 		d.v32[1] = upper / divisor;
d                 216 arch/x86/boot/string.c 	asm ("divl %2" : "=a" (d.v32[0]), "=d" (*remainder) :
d                 217 arch/x86/boot/string.c 		"rm" (divisor), "0" (d.v32[0]), "1" (upper));
d                 218 arch/x86/boot/string.c 	return d.v64;
d                  18 arch/x86/boot/string.h #define memcpy(d,s,l) __builtin_memcpy(d,s,l)
d                  19 arch/x86/boot/string.h #define memset(d,c,l) __builtin_memset(d,c,l)
d                 313 arch/x86/crypto/glue_helper.c 		struct scatterlist s[2], d[2];
d                 318 arch/x86/crypto/glue_helper.c 			dst = scatterwalk_ffwd(d, req->dst, req->cryptlen);
d                2744 arch/x86/events/intel/core.c 	struct event_constraint *c = NULL, *d;
d                2755 arch/x86/events/intel/core.c 		d = __intel_shared_reg_get_constraints(cpuc, event, breg);
d                2756 arch/x86/events/intel/core.c 		if (d == &emptyconstraint) {
d                2758 arch/x86/events/intel/core.c 			c = d;
d                 818 arch/x86/events/intel/uncore_snb.c #define IMC_DEV(a, d) \
d                 819 arch/x86/events/intel/uncore_snb.c 	{ .pci_id = PCI_DEVICE_ID_INTEL_##a, .driver = (d) }
d                  32 arch/x86/include/asm/desc.h 	desc->d			= info->seg_32bit;
d                 165 arch/x86/include/asm/desc.h static inline void set_tssldt_descriptor(void *d, unsigned long addr,
d                 168 arch/x86/include/asm/desc.h 	struct ldttss_desc *desc = d;
d                 186 arch/x86/include/asm/desc.h 	struct desc_struct *d = get_cpu_gdt_rw(cpu);
d                 191 arch/x86/include/asm/desc.h 	write_gdt_entry(d, entry, &tss, DESC_TSS);
d                 289 arch/x86/include/asm/desc.h 	struct desc_struct *d = get_current_gdt_rw();
d                 292 arch/x86/include/asm/desc.h 	memcpy(&tss, &d[GDT_ENTRY_TSS], sizeof(tss_desc));
d                 299 arch/x86/include/asm/desc.h 	write_gdt_entry(d, GDT_ENTRY_TSS, &tss, DESC_TSS);
d                  20 arch/x86/include/asm/desc_defs.h 	u16	limit1: 4, avl: 1, l: 1, d: 1, g: 1, base2: 8;
d                  36 arch/x86/include/asm/desc_defs.h 		.d		= (flags >> 14) & 0x01,		\
d                  48 arch/x86/include/asm/div64.h 	} d = { dividend };
d                  51 arch/x86/include/asm/div64.h 	upper = d.v32[1];
d                  52 arch/x86/include/asm/div64.h 	d.v32[1] = 0;
d                  54 arch/x86/include/asm/div64.h 		d.v32[1] = upper / divisor;
d                  57 arch/x86/include/asm/div64.h 	asm ("divl %2" : "=a" (d.v32[0]), "=d" (*remainder) :
d                  58 arch/x86/include/asm/div64.h 		"rm" (divisor), "0" (d.v32[0]), "1" (upper));
d                  59 arch/x86/include/asm/div64.h 	return d.v64;
d                  64 arch/x86/include/asm/dwarf2.h .macro cfi_ignore a=0, b=0, c=0, d=0
d                   8 arch/x86/include/asm/invpcid.h 	struct { u64 d[2]; } desc = { { pcid, addr } };
d                 345 arch/x86/include/asm/kvm_emulate.h 	u64 d;
d                  83 arch/x86/include/asm/microcode.h #define QCHAR(a, b, c, d) ((a) + ((b) << 8) + ((c) << 16) + ((d) << 24))
d                 126 arch/x86/include/asm/mpspec.h #define physids_shift_right(d, s, n)				\
d                 127 arch/x86/include/asm/mpspec.h 	bitmap_shift_right((d).mask, (s).mask, n, MAX_LOCAL_APIC)
d                 129 arch/x86/include/asm/mpspec.h #define physids_shift_left(d, s, n)				\
d                 130 arch/x86/include/asm/mpspec.h 	bitmap_shift_left((d).mask, (s).mask, n, MAX_LOCAL_APIC)
d                1322 arch/x86/kernel/acpi/boot.c static int __init disable_acpi_irq(const struct dmi_system_id *d)
d                1326 arch/x86/kernel/acpi/boot.c 		       d->ident);
d                1332 arch/x86/kernel/acpi/boot.c static int __init disable_acpi_pci(const struct dmi_system_id *d)
d                1336 arch/x86/kernel/acpi/boot.c 		       d->ident);
d                1342 arch/x86/kernel/acpi/boot.c static int __init dmi_disable_acpi(const struct dmi_system_id *d)
d                1345 arch/x86/kernel/acpi/boot.c 		printk(KERN_NOTICE "%s detected: acpi off\n", d->ident);
d                1357 arch/x86/kernel/acpi/boot.c static int __init dmi_ignore_irq0_timer_override(const struct dmi_system_id *d)
d                1361 arch/x86/kernel/acpi/boot.c 			d->ident);
d                2762 arch/x86/kernel/apic/apic.c static int set_multi(const struct dmi_system_id *d)
d                2766 arch/x86/kernel/apic/apic.c 	pr_info("APIC: %s detected, Multi Chassis\n", d->ident);
d                  87 arch/x86/kernel/apic/bigsmp_32.c static int hp_ht_bigsmp(const struct dmi_system_id *d)
d                  89 arch/x86/kernel/apic/bigsmp_32.c 	printk(KERN_NOTICE "%s detected: force use of apic=bigsmp\n", d->ident);
d                 297 arch/x86/kernel/apic/msi.c 	struct irq_domain *d;
d                 302 arch/x86/kernel/apic/msi.c 	d = pci_msi_create_irq_domain(fn, &pci_msi_ir_domain_info, parent);
d                 304 arch/x86/kernel/apic/msi.c 	return d;
d                 460 arch/x86/kernel/apic/msi.c 	struct irq_domain *parent, *d;
d                 490 arch/x86/kernel/apic/msi.c 	d = msi_create_irq_domain(fn, domain_info, parent);
d                 492 arch/x86/kernel/apic/msi.c 	return d;
d                 588 arch/x86/kernel/apic/vector.c static void x86_vector_debug_show(struct seq_file *m, struct irq_domain *d,
d                1923 arch/x86/kernel/apm_32.c static int __init print_if_true(const struct dmi_system_id *d)
d                1925 arch/x86/kernel/apm_32.c 	printk("%s\n", d->ident);
d                1933 arch/x86/kernel/apm_32.c static int __init broken_ps2_resume(const struct dmi_system_id *d)
d                1936 arch/x86/kernel/apm_32.c 	       "workaround hopefully not needed.\n", d->ident);
d                1941 arch/x86/kernel/apm_32.c static int __init set_realmode_power_off(const struct dmi_system_id *d)
d                1946 arch/x86/kernel/apm_32.c 		       "Using realmode poweroff only.\n", d->ident);
d                1952 arch/x86/kernel/apm_32.c static int __init set_apm_ints(const struct dmi_system_id *d)
d                1957 arch/x86/kernel/apm_32.c 		       "Enabling interrupts during APM calls.\n", d->ident);
d                1963 arch/x86/kernel/apm_32.c static int __init apm_is_horked(const struct dmi_system_id *d)
d                1968 arch/x86/kernel/apm_32.c 		       "Disabling APM.\n", d->ident);
d                1973 arch/x86/kernel/apm_32.c static int __init apm_is_horked_d850md(const struct dmi_system_id *d)
d                1978 arch/x86/kernel/apm_32.c 		       "Disabling APM.\n", d->ident);
d                1986 arch/x86/kernel/apm_32.c static int __init apm_likes_to_melt(const struct dmi_system_id *d)
d                1991 arch/x86/kernel/apm_32.c 		       "Disabling APM idle calls.\n", d->ident);
d                2011 arch/x86/kernel/apm_32.c static int __init broken_apm_power(const struct dmi_system_id *d)
d                2023 arch/x86/kernel/apm_32.c static int __init swab_apm_power_in_minutes(const struct dmi_system_id *d)
d                 135 arch/x86/kernel/cpu/amd.c 		u64 d, d2;
d                 147 arch/x86/kernel/cpu/amd.c 		d = rdtsc();
d                 151 arch/x86/kernel/cpu/amd.c 		d = d2-d;
d                 153 arch/x86/kernel/cpu/amd.c 		if (d > 20*K6_BUG_LOOP)
d                1827 arch/x86/kernel/cpu/common.c 	struct desc_struct d = { };
d                1833 arch/x86/kernel/cpu/common.c 	d.limit0 = cpudata;
d                1834 arch/x86/kernel/cpu/common.c 	d.limit1 = cpudata >> 16;
d                1836 arch/x86/kernel/cpu/common.c 	d.type = 5;		/* RO data, expand down, accessed */
d                1837 arch/x86/kernel/cpu/common.c 	d.dpl = 3;		/* Visible to user code */
d                1838 arch/x86/kernel/cpu/common.c 	d.s = 1;		/* Not a system segment */
d                1839 arch/x86/kernel/cpu/common.c 	d.p = 1;		/* Present */
d                1840 arch/x86/kernel/cpu/common.c 	d.d = 1;		/* 32-bit */
d                1842 arch/x86/kernel/cpu/common.c 	write_gdt_entry(get_cpu_gdt_rw(cpu), GDT_ENTRY_CPUNODE, &d, DESCTYPE_S);
d                  96 arch/x86/kernel/cpu/cpuid-deps.c 	const struct cpuid_dep *d;
d                 111 arch/x86/kernel/cpu/cpuid-deps.c 		for (d = cpuid_deps; d->feature; d++) {
d                 112 arch/x86/kernel/cpu/cpuid-deps.c 			if (!test_bit(d->depends, disable))
d                 114 arch/x86/kernel/cpu/cpuid-deps.c 			if (__test_and_set_bit(d->feature, disable))
d                 118 arch/x86/kernel/cpu/cpuid-deps.c 			clear_feature(c, d->feature);
d                  52 arch/x86/kernel/cpu/resctrl/core.c mba_wrmsr_intel(struct rdt_domain *d, struct msr_param *m,
d                  55 arch/x86/kernel/cpu/resctrl/core.c cat_wrmsr(struct rdt_domain *d, struct msr_param *m, struct rdt_resource *r);
d                  57 arch/x86/kernel/cpu/resctrl/core.c mba_wrmsr_amd(struct rdt_domain *d, struct msr_param *m,
d                 363 arch/x86/kernel/cpu/resctrl/core.c mba_wrmsr_amd(struct rdt_domain *d, struct msr_param *m, struct rdt_resource *r)
d                 368 arch/x86/kernel/cpu/resctrl/core.c 		wrmsrl(r->msr_base + i, d->ctrl_val[i]);
d                 386 arch/x86/kernel/cpu/resctrl/core.c mba_wrmsr_intel(struct rdt_domain *d, struct msr_param *m,
d                 393 arch/x86/kernel/cpu/resctrl/core.c 		wrmsrl(r->msr_base + i, delay_bw_map(d->ctrl_val[i], r));
d                 397 arch/x86/kernel/cpu/resctrl/core.c cat_wrmsr(struct rdt_domain *d, struct msr_param *m, struct rdt_resource *r)
d                 402 arch/x86/kernel/cpu/resctrl/core.c 		wrmsrl(r->msr_base + cbm_idx(r, i), d->ctrl_val[i]);
d                 407 arch/x86/kernel/cpu/resctrl/core.c 	struct rdt_domain *d;
d                 409 arch/x86/kernel/cpu/resctrl/core.c 	list_for_each_entry(d, &r->domains, list) {
d                 411 arch/x86/kernel/cpu/resctrl/core.c 		if (cpumask_test_cpu(cpu, &d->cpu_mask))
d                 412 arch/x86/kernel/cpu/resctrl/core.c 			return d;
d                 423 arch/x86/kernel/cpu/resctrl/core.c 	struct rdt_domain *d;
d                 425 arch/x86/kernel/cpu/resctrl/core.c 	d = get_domain_from_cpu(cpu, r);
d                 426 arch/x86/kernel/cpu/resctrl/core.c 	if (d) {
d                 427 arch/x86/kernel/cpu/resctrl/core.c 		r->msr_update(d, m, r);
d                 445 arch/x86/kernel/cpu/resctrl/core.c 	struct rdt_domain *d;
d                 452 arch/x86/kernel/cpu/resctrl/core.c 		d = list_entry(l, struct rdt_domain, list);
d                 454 arch/x86/kernel/cpu/resctrl/core.c 		if (id == d->id)
d                 455 arch/x86/kernel/cpu/resctrl/core.c 			return d;
d                 457 arch/x86/kernel/cpu/resctrl/core.c 		if (id < d->id)
d                 483 arch/x86/kernel/cpu/resctrl/core.c static int domain_setup_ctrlval(struct rdt_resource *r, struct rdt_domain *d)
d                 488 arch/x86/kernel/cpu/resctrl/core.c 	dc = kmalloc_array(r->num_closid, sizeof(*d->ctrl_val), GFP_KERNEL);
d                 492 arch/x86/kernel/cpu/resctrl/core.c 	dm = kmalloc_array(r->num_closid, sizeof(*d->mbps_val), GFP_KERNEL);
d                 498 arch/x86/kernel/cpu/resctrl/core.c 	d->ctrl_val = dc;
d                 499 arch/x86/kernel/cpu/resctrl/core.c 	d->mbps_val = dm;
d                 504 arch/x86/kernel/cpu/resctrl/core.c 	r->msr_update(d, &m, r);
d                 508 arch/x86/kernel/cpu/resctrl/core.c static int domain_setup_mon_state(struct rdt_resource *r, struct rdt_domain *d)
d                 513 arch/x86/kernel/cpu/resctrl/core.c 		d->rmid_busy_llc = bitmap_zalloc(r->num_rmid, GFP_KERNEL);
d                 514 arch/x86/kernel/cpu/resctrl/core.c 		if (!d->rmid_busy_llc)
d                 516 arch/x86/kernel/cpu/resctrl/core.c 		INIT_DELAYED_WORK(&d->cqm_limbo, cqm_handle_limbo);
d                 519 arch/x86/kernel/cpu/resctrl/core.c 		tsize = sizeof(*d->mbm_total);
d                 520 arch/x86/kernel/cpu/resctrl/core.c 		d->mbm_total = kcalloc(r->num_rmid, tsize, GFP_KERNEL);
d                 521 arch/x86/kernel/cpu/resctrl/core.c 		if (!d->mbm_total) {
d                 522 arch/x86/kernel/cpu/resctrl/core.c 			bitmap_free(d->rmid_busy_llc);
d                 527 arch/x86/kernel/cpu/resctrl/core.c 		tsize = sizeof(*d->mbm_local);
d                 528 arch/x86/kernel/cpu/resctrl/core.c 		d->mbm_local = kcalloc(r->num_rmid, tsize, GFP_KERNEL);
d                 529 arch/x86/kernel/cpu/resctrl/core.c 		if (!d->mbm_local) {
d                 530 arch/x86/kernel/cpu/resctrl/core.c 			bitmap_free(d->rmid_busy_llc);
d                 531 arch/x86/kernel/cpu/resctrl/core.c 			kfree(d->mbm_total);
d                 537 arch/x86/kernel/cpu/resctrl/core.c 		INIT_DELAYED_WORK(&d->mbm_over, mbm_handle_overflow);
d                 538 arch/x86/kernel/cpu/resctrl/core.c 		mbm_setup_overflow_handler(d, MBM_OVERFLOW_INTERVAL);
d                 561 arch/x86/kernel/cpu/resctrl/core.c 	struct rdt_domain *d;
d                 563 arch/x86/kernel/cpu/resctrl/core.c 	d = rdt_find_domain(r, id, &add_pos);
d                 564 arch/x86/kernel/cpu/resctrl/core.c 	if (IS_ERR(d)) {
d                 569 arch/x86/kernel/cpu/resctrl/core.c 	if (d) {
d                 570 arch/x86/kernel/cpu/resctrl/core.c 		cpumask_set_cpu(cpu, &d->cpu_mask);
d                 574 arch/x86/kernel/cpu/resctrl/core.c 	d = kzalloc_node(sizeof(*d), GFP_KERNEL, cpu_to_node(cpu));
d                 575 arch/x86/kernel/cpu/resctrl/core.c 	if (!d)
d                 578 arch/x86/kernel/cpu/resctrl/core.c 	d->id = id;
d                 579 arch/x86/kernel/cpu/resctrl/core.c 	cpumask_set_cpu(cpu, &d->cpu_mask);
d                 583 arch/x86/kernel/cpu/resctrl/core.c 	if (r->alloc_capable && domain_setup_ctrlval(r, d)) {
d                 584 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d);
d                 588 arch/x86/kernel/cpu/resctrl/core.c 	if (r->mon_capable && domain_setup_mon_state(r, d)) {
d                 589 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d);
d                 593 arch/x86/kernel/cpu/resctrl/core.c 	list_add_tail(&d->list, add_pos);
d                 600 arch/x86/kernel/cpu/resctrl/core.c 		mkdir_mondata_subdir_allrdtgrp(r, d);
d                 606 arch/x86/kernel/cpu/resctrl/core.c 	struct rdt_domain *d;
d                 608 arch/x86/kernel/cpu/resctrl/core.c 	d = rdt_find_domain(r, id, NULL);
d                 609 arch/x86/kernel/cpu/resctrl/core.c 	if (IS_ERR_OR_NULL(d)) {
d                 614 arch/x86/kernel/cpu/resctrl/core.c 	cpumask_clear_cpu(cpu, &d->cpu_mask);
d                 615 arch/x86/kernel/cpu/resctrl/core.c 	if (cpumask_empty(&d->cpu_mask)) {
d                 621 arch/x86/kernel/cpu/resctrl/core.c 			rmdir_mondata_subdir_allrdtgrp(r, d->id);
d                 622 arch/x86/kernel/cpu/resctrl/core.c 		list_del(&d->list);
d                 624 arch/x86/kernel/cpu/resctrl/core.c 			cancel_delayed_work(&d->mbm_over);
d                 625 arch/x86/kernel/cpu/resctrl/core.c 		if (is_llc_occupancy_enabled() &&  has_busy_rmid(r, d)) {
d                 634 arch/x86/kernel/cpu/resctrl/core.c 			__check_limbo(d, true);
d                 635 arch/x86/kernel/cpu/resctrl/core.c 			cancel_delayed_work(&d->cqm_limbo);
d                 642 arch/x86/kernel/cpu/resctrl/core.c 		if (d->plr)
d                 643 arch/x86/kernel/cpu/resctrl/core.c 			d->plr->d = NULL;
d                 645 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d->ctrl_val);
d                 646 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d->mbps_val);
d                 647 arch/x86/kernel/cpu/resctrl/core.c 		bitmap_free(d->rmid_busy_llc);
d                 648 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d->mbm_total);
d                 649 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d->mbm_local);
d                 650 arch/x86/kernel/cpu/resctrl/core.c 		kfree(d);
d                 655 arch/x86/kernel/cpu/resctrl/core.c 		if (is_mbm_enabled() && cpu == d->mbm_work_cpu) {
d                 656 arch/x86/kernel/cpu/resctrl/core.c 			cancel_delayed_work(&d->mbm_over);
d                 657 arch/x86/kernel/cpu/resctrl/core.c 			mbm_setup_overflow_handler(d, 0);
d                 659 arch/x86/kernel/cpu/resctrl/core.c 		if (is_llc_occupancy_enabled() && cpu == d->cqm_work_cpu &&
d                 660 arch/x86/kernel/cpu/resctrl/core.c 		    has_busy_rmid(r, d)) {
d                 661 arch/x86/kernel/cpu/resctrl/core.c 			cancel_delayed_work(&d->cqm_limbo);
d                 662 arch/x86/kernel/cpu/resctrl/core.c 			cqm_setup_limbo_handler(d, 0);
d                  53 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		 struct rdt_domain *d)
d                  57 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (d->have_new_ctrl) {
d                  58 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		rdt_last_cmd_printf("Duplicate domain %d\n", d->id);
d                  65 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->new_ctrl = bw_val;
d                  66 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->have_new_ctrl = true;
d                 108 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		   struct rdt_domain *d)
d                 112 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (d->have_new_ctrl) {
d                 113 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		rdt_last_cmd_printf("Duplicate domain %d\n", d->id);
d                 119 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->new_ctrl = bw_val;
d                 120 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->have_new_ctrl = true;
d                 195 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	      struct rdt_domain *d)
d                 200 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (d->have_new_ctrl) {
d                 201 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		rdt_last_cmd_printf("Duplicate domain %d\n", d->id);
d                 210 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	    rdtgroup_pseudo_locked_in_hierarchy(d)) {
d                 220 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	    rdtgroup_cbm_overlaps_pseudo_locked(d, cbm_val)) {
d                 229 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (rdtgroup_cbm_overlaps(r, d, cbm_val, rdtgrp->closid, true)) {
d                 234 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (rdtgroup_cbm_overlaps(r, d, cbm_val, rdtgrp->closid, false)) {
d                 242 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->new_ctrl = cbm_val;
d                 243 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d->have_new_ctrl = true;
d                 259 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	struct rdt_domain *d;
d                 278 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	list_for_each_entry(d, &r->domains, list) {
d                 279 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		if (d->id == dom_id) {
d                 282 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 			if (r->parse_ctrlval(&data, r, d))
d                 294 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 				rdtgrp->plr->d = d;
d                 295 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 				rdtgrp->plr->cbm = d->new_ctrl;
d                 296 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 				d->plr = rdtgrp->plr;
d                 309 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	struct rdt_domain *d;
d                 322 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	list_for_each_entry(d, &r->domains, list) {
d                 323 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		dc = !mba_sc ? d->ctrl_val : d->mbps_val;
d                 324 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 		if (d->have_new_ctrl && d->new_ctrl != dc[closid]) {
d                 325 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 			cpumask_set_cpu(cpumask_any(&d->cpu_mask), cpu_mask);
d                 326 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 			dc[closid] = d->new_ctrl;
d                 474 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 			if (!rdtgrp->plr->d) {
d                 481 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 					   rdtgrp->plr->d->id,
d                 498 arch/x86/kernel/cpu/resctrl/ctrlmondata.c void mon_event_read(struct rmid_read *rr, struct rdt_domain *d,
d                 506 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	rr->d = d;
d                 510 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	smp_call_function_any(&d->cpu_mask, mon_event_count, rr, 1);
d                 520 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	struct rdt_domain *d;
d                 536 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	d = rdt_find_domain(r, domid, NULL);
d                 537 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	if (IS_ERR_OR_NULL(d)) {
d                 542 arch/x86/kernel/cpu/resctrl/ctrlmondata.c 	mon_event_read(&rr, d, rdtgrp, evtid, false);
d                  90 arch/x86/kernel/cpu/resctrl/internal.h 	struct rdt_domain	*d;
d                 173 arch/x86/kernel/cpu/resctrl/internal.h 	struct rdt_domain	*d;
d                 449 arch/x86/kernel/cpu/resctrl/internal.h 	void (*msr_update)	(struct rdt_domain *d, struct msr_param *m,
d                 458 arch/x86/kernel/cpu/resctrl/internal.h 			     struct rdt_domain *d);
d                 467 arch/x86/kernel/cpu/resctrl/internal.h 	      struct rdt_domain *d);
d                 469 arch/x86/kernel/cpu/resctrl/internal.h 		   struct rdt_domain *d);
d                 471 arch/x86/kernel/cpu/resctrl/internal.h 		 struct rdt_domain *d);
d                 563 arch/x86/kernel/cpu/resctrl/internal.h bool rdtgroup_cbm_overlaps(struct rdt_resource *r, struct rdt_domain *d,
d                 565 arch/x86/kernel/cpu/resctrl/internal.h unsigned int rdtgroup_cbm_to_size(struct rdt_resource *r, struct rdt_domain *d,
d                 571 arch/x86/kernel/cpu/resctrl/internal.h bool rdtgroup_cbm_overlaps_pseudo_locked(struct rdt_domain *d, unsigned long cbm);
d                 572 arch/x86/kernel/cpu/resctrl/internal.h bool rdtgroup_pseudo_locked_in_hierarchy(struct rdt_domain *d);
d                 589 arch/x86/kernel/cpu/resctrl/internal.h 				    struct rdt_domain *d);
d                 590 arch/x86/kernel/cpu/resctrl/internal.h void mon_event_read(struct rmid_read *rr, struct rdt_domain *d,
d                 600 arch/x86/kernel/cpu/resctrl/internal.h bool has_busy_rmid(struct rdt_resource *r, struct rdt_domain *d);
d                 601 arch/x86/kernel/cpu/resctrl/internal.h void __check_limbo(struct rdt_domain *d, bool force_free);
d                 108 arch/x86/kernel/cpu/resctrl/monitor.c void __check_limbo(struct rdt_domain *d, bool force_free)
d                 123 arch/x86/kernel/cpu/resctrl/monitor.c 		nrmid = find_next_bit(d->rmid_busy_llc, r->num_rmid, crmid);
d                 129 arch/x86/kernel/cpu/resctrl/monitor.c 			clear_bit(entry->rmid, d->rmid_busy_llc);
d                 139 arch/x86/kernel/cpu/resctrl/monitor.c bool has_busy_rmid(struct rdt_resource *r, struct rdt_domain *d)
d                 141 arch/x86/kernel/cpu/resctrl/monitor.c 	return find_first_bit(d->rmid_busy_llc, r->num_rmid) != r->num_rmid;
d                 168 arch/x86/kernel/cpu/resctrl/monitor.c 	struct rdt_domain *d;
d                 176 arch/x86/kernel/cpu/resctrl/monitor.c 	list_for_each_entry(d, &r->domains, list) {
d                 177 arch/x86/kernel/cpu/resctrl/monitor.c 		if (cpumask_test_cpu(cpu, &d->cpu_mask)) {
d                 187 arch/x86/kernel/cpu/resctrl/monitor.c 		if (!has_busy_rmid(r, d))
d                 188 arch/x86/kernel/cpu/resctrl/monitor.c 			cqm_setup_limbo_handler(d, CQM_LIMBOCHECK_INTERVAL);
d                 189 arch/x86/kernel/cpu/resctrl/monitor.c 		set_bit(entry->rmid, d->rmid_busy_llc);
d                 240 arch/x86/kernel/cpu/resctrl/monitor.c 		m = &rr->d->mbm_total[rmid];
d                 243 arch/x86/kernel/cpu/resctrl/monitor.c 		m = &rr->d->mbm_local[rmid];
d                 274 arch/x86/kernel/cpu/resctrl/monitor.c 	struct mbm_state *m = &rr->d->mbm_local[rmid];
d                 436 arch/x86/kernel/cpu/resctrl/monitor.c static void mbm_update(struct rdt_domain *d, int rmid)
d                 441 arch/x86/kernel/cpu/resctrl/monitor.c 	rr.d = d;
d                 475 arch/x86/kernel/cpu/resctrl/monitor.c 	struct rdt_domain *d;
d                 480 arch/x86/kernel/cpu/resctrl/monitor.c 	d = get_domain_from_cpu(cpu, r);
d                 482 arch/x86/kernel/cpu/resctrl/monitor.c 	if (!d) {
d                 487 arch/x86/kernel/cpu/resctrl/monitor.c 	__check_limbo(d, false);
d                 489 arch/x86/kernel/cpu/resctrl/monitor.c 	if (has_busy_rmid(r, d))
d                 490 arch/x86/kernel/cpu/resctrl/monitor.c 		schedule_delayed_work_on(cpu, &d->cqm_limbo, delay);
d                 513 arch/x86/kernel/cpu/resctrl/monitor.c 	struct rdt_domain *d;
d                 520 arch/x86/kernel/cpu/resctrl/monitor.c 	d = get_domain_from_cpu(cpu, &rdt_resources_all[RDT_RESOURCE_L3]);
d                 521 arch/x86/kernel/cpu/resctrl/monitor.c 	if (!d)
d                 525 arch/x86/kernel/cpu/resctrl/monitor.c 		mbm_update(d, prgrp->mon.rmid);
d                 529 arch/x86/kernel/cpu/resctrl/monitor.c 			mbm_update(d, crgrp->mon.rmid);
d                 532 arch/x86/kernel/cpu/resctrl/monitor.c 			update_mba_bw(prgrp, d);
d                 535 arch/x86/kernel/cpu/resctrl/monitor.c 	schedule_delayed_work_on(cpu, &d->mbm_over, delay);
d                 206 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	for_each_cpu(cpu, &plr->d->cpu_mask) {
d                 250 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (plr->d)
d                 251 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 		plr->d->plr = NULL;
d                 252 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	plr->d = NULL;
d                 282 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	plr->cpu = cpumask_first(&plr->d->cpu_mask);
d                 293 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	plr->size = rdtgroup_cbm_to_size(plr->r, plr->d, plr->cbm);
d                 793 arch/x86/kernel/cpu/resctrl/pseudo_lock.c bool rdtgroup_cbm_overlaps_pseudo_locked(struct rdt_domain *d, unsigned long cbm)
d                 798 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (d->plr) {
d                 799 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 		cbm_len = d->plr->r->cache.cbm_len;
d                 800 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 		cbm_b = d->plr->cbm;
d                 820 arch/x86/kernel/cpu/resctrl/pseudo_lock.c bool rdtgroup_pseudo_locked_in_hierarchy(struct rdt_domain *d)
d                 846 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (cpumask_intersects(&d->cpu_mask, cpu_with_psl))
d                1166 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (!plr->d) {
d                1172 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	cpu = cpumask_first(&plr->d->cpu_mask);
d                1491 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (!plr->d) {
d                1502 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	if (!cpumask_subset(current->cpus_ptr, &plr->d->cpu_mask)) {
d                 272 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			if (!rdtgrp->plr->d) {
d                 277 arch/x86/kernel/cpu/resctrl/rdtgroup.c 				mask = &rdtgrp->plr->d->cpu_mask;
d                 992 arch/x86/kernel/cpu/resctrl/rdtgroup.c static int rdt_cdp_peer_get(struct rdt_resource *r, struct rdt_domain *d,
d                1027 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	_d_cdp = rdt_find_domain(_r_cdp, d->id, NULL);
d                1060 arch/x86/kernel/cpu/resctrl/rdtgroup.c static bool __rdtgroup_cbm_overlaps(struct rdt_resource *r, struct rdt_domain *d,
d                1076 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ctrl = d->ctrl_val;
d                1116 arch/x86/kernel/cpu/resctrl/rdtgroup.c bool rdtgroup_cbm_overlaps(struct rdt_resource *r, struct rdt_domain *d,
d                1122 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (__rdtgroup_cbm_overlaps(r, d, cbm, closid, exclusive))
d                1125 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (rdt_cdp_peer_get(r, d, &r_cdp, &d_cdp) < 0)
d                1148 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                1154 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		list_for_each_entry(d, &r->domains, list) {
d                1155 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			if (rdtgroup_cbm_overlaps(r, d, d->ctrl_val[closid],
d                1259 arch/x86/kernel/cpu/resctrl/rdtgroup.c 				  struct rdt_domain *d, unsigned long cbm)
d                1266 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ci = get_cpu_cacheinfo(cpumask_any(&d->cpu_mask));
d                1289 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                1302 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		if (!rdtgrp->plr->d) {
d                1310 arch/x86/kernel/cpu/resctrl/rdtgroup.c 						    rdtgrp->plr->d,
d                1312 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			seq_printf(s, "%d=%u\n", rdtgrp->plr->d->id, size);
d                1320 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		list_for_each_entry(d, &r->domains, list) {
d                1327 arch/x86/kernel/cpu/resctrl/rdtgroup.c 						d->ctrl_val[rdtgrp->closid] :
d                1328 arch/x86/kernel/cpu/resctrl/rdtgroup.c 						d->mbps_val[rdtgrp->closid]);
d                1332 arch/x86/kernel/cpu/resctrl/rdtgroup.c 					size = rdtgroup_cbm_to_size(r, d, ctrl);
d                1334 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			seq_printf(s, "%d=%u", d->id, size);
d                1741 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                1755 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	list_for_each_entry(d, &r_l->domains, list) {
d                1757 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		cpumask_set_cpu(cpumask_any(&d->cpu_mask), cpu_mask);
d                1794 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                1801 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	list_for_each_entry(d, &r->domains, list)
d                1802 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		setup_default_ctrlval(r, d->ctrl_val, d->mbps_val);
d                2128 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                2143 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	list_for_each_entry(d, &r->domains, list) {
d                2144 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		cpumask_set_cpu(cpumask_any(&d->cpu_mask), cpu_mask);
d                2147 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			d->ctrl_val[i] = r->default_ctrl;
d                2351 arch/x86/kernel/cpu/resctrl/rdtgroup.c 				struct rdt_domain *d,
d                2361 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	sprintf(name, "mon_%s_%02d", r->name, d->id);
d                2382 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	priv.u.domid = d->id;
d                2390 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			mon_event_read(&rr, d, prgrp, mevt->evtid, true);
d                2405 arch/x86/kernel/cpu/resctrl/rdtgroup.c 				    struct rdt_domain *d)
d                2416 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		mkdir_mondata_subdir(parent_kn, d, r, prgrp);
d                2421 arch/x86/kernel/cpu/resctrl/rdtgroup.c 			mkdir_mondata_subdir(parent_kn, d, r, crgrp);
d                2532 arch/x86/kernel/cpu/resctrl/rdtgroup.c static int __init_one_rdt_domain(struct rdt_domain *d, struct rdt_resource *r,
d                2543 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	rdt_cdp_peer_get(r, d, &r_cdp, &d_cdp);
d                2544 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	d->have_new_ctrl = false;
d                2545 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	d->new_ctrl = r->cache.shareable_bits;
d                2547 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ctrl = d->ctrl_val;
d                2569 arch/x86/kernel/cpu/resctrl/rdtgroup.c 				d->new_ctrl |= *ctrl | peer_ctl;
d                2572 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (d->plr && d->plr->cbm > 0)
d                2573 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		used_b |= d->plr->cbm;
d                2576 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	d->new_ctrl |= unused_b;
d                2581 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	d->new_ctrl = cbm_ensure_valid(d->new_ctrl, r);
d                2586 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	tmp_cbm = d->new_ctrl;
d                2588 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		rdt_last_cmd_printf("No space on %s:%d\n", r->name, d->id);
d                2591 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	d->have_new_ctrl = true;
d                2608 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                2611 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	list_for_each_entry(d, &r->domains, list) {
d                2612 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		ret = __init_one_rdt_domain(d, r, closid);
d                2623 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_domain *d;
d                2625 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	list_for_each_entry(d, &r->domains, list) {
d                2626 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		d->new_ctrl = is_mba_sc(r) ? MBA_MAX_MBPS : r->default_ctrl;
d                2627 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		d->have_new_ctrl = true;
d                 127 arch/x86/kernel/cpu/vmware.c 	struct cyc2ns_data *d = &vmware_cyc2ns;
d                 130 arch/x86/kernel/cpu/vmware.c 	clocks_calc_mult_shift(&d->cyc2ns_mul, &d->cyc2ns_shift,
d                 132 arch/x86/kernel/cpu/vmware.c 	d->cyc2ns_offset = mul_u64_u32_shr(tsc_now, d->cyc2ns_mul,
d                 133 arch/x86/kernel/cpu/vmware.c 					   d->cyc2ns_shift);
d                 136 arch/x86/kernel/cpu/vmware.c 	pr_info("using sched offset of %llu ns\n", d->cyc2ns_offset);
d                 118 arch/x86/kernel/early-quirks.c 	u32 d;
d                 125 arch/x86/kernel/early-quirks.c 	d = read_pci_config(num, slot, func, 0x70);
d                 126 arch/x86/kernel/early-quirks.c 	d |= 1<<8;
d                 127 arch/x86/kernel/early-quirks.c 	write_pci_config(num, slot, func, 0x70, d);
d                 129 arch/x86/kernel/early-quirks.c 	d = read_pci_config(num, slot, func, 0x8);
d                 130 arch/x86/kernel/early-quirks.c 	d &= 0xff;
d                 131 arch/x86/kernel/early-quirks.c 	return d;
d                 136 arch/x86/kernel/early-quirks.c 	u32 d;
d                 142 arch/x86/kernel/early-quirks.c 	d = ati_ixp4x0_rev(num, slot, func);
d                 143 arch/x86/kernel/early-quirks.c 	if (d  < 0x82)
d                 153 arch/x86/kernel/early-quirks.c 		printk(KERN_INFO "SB4X0 revision 0x%x\n", d);
d                 162 arch/x86/kernel/early-quirks.c 	u32 d;
d                 164 arch/x86/kernel/early-quirks.c 	d = read_pci_config(num, slot, func, 0x8);
d                 165 arch/x86/kernel/early-quirks.c 	d &= 0xff;
d                 167 arch/x86/kernel/early-quirks.c 	return d;
d                 172 arch/x86/kernel/early-quirks.c 	u32 d, rev;
d                 190 arch/x86/kernel/early-quirks.c 	d = read_pci_config(num, slot, func, 0x64);
d                 191 arch/x86/kernel/early-quirks.c 	if (!(d & (1<<14)))
d                  80 arch/x86/kernel/hpet.c static inline void hpet_writel(unsigned int d, unsigned int a)
d                  82 arch/x86/kernel/hpet.c 	writel(d, hpet_virt_address + a);
d                 203 arch/x86/kernel/idt.c static inline void idt_init_desc(gate_desc *gate, const struct idt_data *d)
d                 205 arch/x86/kernel/idt.c 	unsigned long addr = (unsigned long) d->addr;
d                 208 arch/x86/kernel/idt.c 	gate->segment		= (u16) d->segment;
d                 209 arch/x86/kernel/idt.c 	gate->bits		= d->bits;
d                  74 arch/x86/kernel/kdebugfs.c 	struct dentry *d;
d                  78 arch/x86/kernel/kdebugfs.c 	d = debugfs_create_dir(buf, parent);
d                  80 arch/x86/kernel/kdebugfs.c 	debugfs_create_x32("type", S_IRUGO, d, &node->type);
d                  81 arch/x86/kernel/kdebugfs.c 	debugfs_create_file("data", S_IRUGO, d, node, &fops_setup_data);
d                  89 arch/x86/kernel/kdebugfs.c 	struct dentry *d;
d                  93 arch/x86/kernel/kdebugfs.c 	d = debugfs_create_dir("setup_data", parent);
d                 114 arch/x86/kernel/kdebugfs.c 		create_setup_data_node(d, no, node);
d                 124 arch/x86/kernel/kdebugfs.c 	debugfs_remove_recursive(d);
d                 217 arch/x86/kernel/mmconf-fam10h_64.c static int __init set_check_enable_amd_mmconf(const struct dmi_system_id *d)
d                   7 arch/x86/kernel/paravirt_patch.c #define PSTART(d, m)							\
d                   8 arch/x86/kernel/paravirt_patch.c 	patch_data_##d.m
d                  10 arch/x86/kernel/paravirt_patch.c #define PEND(d, m)							\
d                  11 arch/x86/kernel/paravirt_patch.c 	(PSTART(d, m) + sizeof(patch_data_##d.m))
d                  13 arch/x86/kernel/paravirt_patch.c #define PATCH(d, m, insn_buff, len)						\
d                  14 arch/x86/kernel/paravirt_patch.c 	paravirt_patch_insns(insn_buff, len, PSTART(d, m), PEND(d, m))
d                 359 arch/x86/kernel/quirks.c 	u32 d = 0;
d                 365 arch/x86/kernel/quirks.c 	err |= pci_read_config_dword(dev, 0x70, &d);
d                 366 arch/x86/kernel/quirks.c 	d |= 1<<8;
d                 367 arch/x86/kernel/quirks.c 	err |= pci_write_config_dword(dev, 0x70, d);
d                 368 arch/x86/kernel/quirks.c 	err |= pci_read_config_dword(dev, 0x8, &d);
d                 369 arch/x86/kernel/quirks.c 	d &= 0xff;
d                 370 arch/x86/kernel/quirks.c 	dev_printk(KERN_DEBUG, &dev->dev, "SB4X0 revision 0x%x\n", d);
d                 374 arch/x86/kernel/quirks.c 	return d;
d                 379 arch/x86/kernel/quirks.c 	u32 d, val;
d                 390 arch/x86/kernel/quirks.c 	d = ati_ixp4x0_rev(dev);
d                 391 arch/x86/kernel/quirks.c 	if (d  < 0x82)
d                 405 arch/x86/kernel/quirks.c 	pci_read_config_dword(dev, 0x64, &d);
d                 406 arch/x86/kernel/quirks.c 	d |= (1<<10);
d                 407 arch/x86/kernel/quirks.c 	pci_write_config_dword(dev, 0x64, d);
d                 408 arch/x86/kernel/quirks.c 	pci_read_config_dword(dev, 0x64, &d);
d                 409 arch/x86/kernel/quirks.c 	if (!(d & (1<<10)))
d                  60 arch/x86/kernel/reboot.c static int __init set_acpi_reboot(const struct dmi_system_id *d)
d                  65 arch/x86/kernel/reboot.c 			d->ident, "ACPI");
d                  74 arch/x86/kernel/reboot.c static int __init set_bios_reboot(const struct dmi_system_id *d)
d                  79 arch/x86/kernel/reboot.c 			d->ident, "BIOS");
d                  88 arch/x86/kernel/reboot.c static int __init set_efi_reboot(const struct dmi_system_id *d)
d                  92 arch/x86/kernel/reboot.c 		pr_info("%s series board detected. Selecting EFI-method for reboot.\n", d->ident);
d                 148 arch/x86/kernel/reboot.c static int __init set_pci_reboot(const struct dmi_system_id *d)
d                 153 arch/x86/kernel/reboot.c 			d->ident, "PCI");
d                 158 arch/x86/kernel/reboot.c static int __init set_kbd_reboot(const struct dmi_system_id *d)
d                 163 arch/x86/kernel/reboot.c 			d->ident, "KBD");
d                 161 arch/x86/kernel/setup_percpu.c 	struct desc_struct d = GDT_ENTRY_INIT(0x8092, per_cpu_offset(cpu),
d                 164 arch/x86/kernel/setup_percpu.c 	write_gdt_entry(get_cpu_gdt_rw(cpu), GDT_ENTRY_PERCPU, &d, DESCTYPE_S);
d                  45 arch/x86/kernel/step.c 			if (!desc->d)
d                 209 arch/x86/kernel/tls.c 	info->seg_32bit = desc->d;
d                 552 arch/x86/kvm/emulate.c 	return ~0U >> ((ss.d ^ 1) * 16);  /* d=0: 0xffff; d=1: 0xffffffff */
d                 685 arch/x86/kvm/emulate.c 	u64 alignment = ctxt->d & AlignMask;
d                 746 arch/x86/kvm/emulate.c 			lim = desc.d ? 0xffffffff : 0xffff;
d                 820 arch/x86/kvm/emulate.c 		mode = cs_desc->d ? X86EMUL_MODE_PROT32 : X86EMUL_MODE_PROT16;
d                1230 arch/x86/kvm/emulate.c 	if (!(ctxt->d & ModRM))
d                1233 arch/x86/kvm/emulate.c 	if (ctxt->d & Sse) {
d                1240 arch/x86/kvm/emulate.c 	if (ctxt->d & Mmx) {
d                1249 arch/x86/kvm/emulate.c 	op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                1250 arch/x86/kvm/emulate.c 	op->addr.reg = decode_register(ctxt, reg, ctxt->d & ByteOp);
d                1279 arch/x86/kvm/emulate.c 	if (ctxt->modrm_mod == 3 || (ctxt->d & NoMod)) {
d                1281 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                1283 arch/x86/kvm/emulate.c 				ctxt->d & ByteOp);
d                1284 arch/x86/kvm/emulate.c 		if (ctxt->d & Sse) {
d                1291 arch/x86/kvm/emulate.c 		if (ctxt->d & Mmx) {
d                1367 arch/x86/kvm/emulate.c 				if ((ctxt->d & IncSP) &&
d                1532 arch/x86/kvm/emulate.c 	if (ctxt->rep_prefix && (ctxt->d & String) &&
d                1694 arch/x86/kvm/emulate.c 			seg_desc.d = 1;
d                1747 arch/x86/kvm/emulate.c 		if (seg_desc.d && seg_desc.l) {
d                2394 arch/x86/kvm/emulate.c 	desc->d    = (flags >> 22) & 1;
d                2706 arch/x86/kvm/emulate.c 	cs->d = 1;
d                2714 arch/x86/kvm/emulate.c 	ss->d = 1;		/* 32bit stack segment */
d                2813 arch/x86/kvm/emulate.c 		cs.d = 0;
d                2880 arch/x86/kvm/emulate.c 		cs.d = 0;
d                2937 arch/x86/kvm/emulate.c 		cs.d = 0;
d                4971 arch/x86/kvm/emulate.c 	size = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5018 arch/x86/kvm/emulate.c 			  unsigned d)
d                5022 arch/x86/kvm/emulate.c 	switch (d) {
d                5030 arch/x86/kvm/emulate.c 		ctxt->memop.bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5034 arch/x86/kvm/emulate.c 		if (ctxt->d & BitOp)
d                5043 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5050 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 2 : ctxt->op_bytes;
d                5056 arch/x86/kvm/emulate.c 		if (ctxt->d & ByteOp) {
d                5068 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5122 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5131 arch/x86/kvm/emulate.c 		op->bytes = (ctxt->d & ByteOp) ? 1 : ctxt->op_bytes;
d                5214 arch/x86/kvm/emulate.c 		if (desc.d)
d                5318 arch/x86/kvm/emulate.c 	ctxt->d = opcode.flags;
d                5320 arch/x86/kvm/emulate.c 	if (ctxt->d & ModRM)
d                5326 arch/x86/kvm/emulate.c 		ctxt->d = NotImpl;
d                5329 arch/x86/kvm/emulate.c 	while (ctxt->d & GroupMask) {
d                5330 arch/x86/kvm/emulate.c 		switch (ctxt->d & GroupMask) {
d                5384 arch/x86/kvm/emulate.c 		ctxt->d &= ~(u64)GroupMask;
d                5385 arch/x86/kvm/emulate.c 		ctxt->d |= opcode.flags;
d                5389 arch/x86/kvm/emulate.c 	if (ctxt->d == 0)
d                5394 arch/x86/kvm/emulate.c 	if (unlikely(ctxt->ud) && likely(!(ctxt->d & EmulateOnUD)))
d                5397 arch/x86/kvm/emulate.c 	if (unlikely(ctxt->d &
d                5407 arch/x86/kvm/emulate.c 		if (ctxt->d & NotImpl)
d                5411 arch/x86/kvm/emulate.c 			if (ctxt->op_bytes == 4 && (ctxt->d & Stack))
d                5413 arch/x86/kvm/emulate.c 			else if (ctxt->d & NearBranch)
d                5417 arch/x86/kvm/emulate.c 		if (ctxt->d & Op3264) {
d                5424 arch/x86/kvm/emulate.c 		if ((ctxt->d & No16) && ctxt->op_bytes == 2)
d                5427 arch/x86/kvm/emulate.c 		if (ctxt->d & Sse)
d                5429 arch/x86/kvm/emulate.c 		else if (ctxt->d & Mmx)
d                5434 arch/x86/kvm/emulate.c 	if (ctxt->d & ModRM) {
d                5440 arch/x86/kvm/emulate.c 	} else if (ctxt->d & MemAbs)
d                5454 arch/x86/kvm/emulate.c 	rc = decode_operand(ctxt, &ctxt->src, (ctxt->d >> SrcShift) & OpMask);
d                5462 arch/x86/kvm/emulate.c 	rc = decode_operand(ctxt, &ctxt->src2, (ctxt->d >> Src2Shift) & OpMask);
d                5467 arch/x86/kvm/emulate.c 	rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask);
d                5481 arch/x86/kvm/emulate.c 	return ctxt->d & PageTable;
d                5529 arch/x86/kvm/emulate.c 	if (!(ctxt->d & ByteOp))
d                5563 arch/x86/kvm/emulate.c 	if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) {
d                5568 arch/x86/kvm/emulate.c 	if ((ctxt->d & SrcMask) == SrcMemFAddr && ctxt->src.type != OP_MEM) {
d                5574 arch/x86/kvm/emulate.c 	if (unlikely(ctxt->d &
d                5576 arch/x86/kvm/emulate.c 		if ((ctxt->mode == X86EMUL_MODE_PROT64 && (ctxt->d & No64)) ||
d                5577 arch/x86/kvm/emulate.c 				(ctxt->d & Undefined)) {
d                5582 arch/x86/kvm/emulate.c 		if (((ctxt->d & (Sse|Mmx)) && ((ops->get_cr(ctxt, 0) & X86_CR0_EM)))
d                5583 arch/x86/kvm/emulate.c 		    || ((ctxt->d & Sse) && !(ops->get_cr(ctxt, 4) & X86_CR4_OSFXSR))) {
d                5588 arch/x86/kvm/emulate.c 		if ((ctxt->d & (Sse|Mmx)) && (ops->get_cr(ctxt, 0) & X86_CR0_TS)) {
d                5593 arch/x86/kvm/emulate.c 		if (ctxt->d & Mmx) {
d                5603 arch/x86/kvm/emulate.c 			if (!(ctxt->d & Mov))
d                5615 arch/x86/kvm/emulate.c 		if ((ctxt->d & Prot) && ctxt->mode < X86EMUL_MODE_PROT16) {
d                5621 arch/x86/kvm/emulate.c 		if ((ctxt->d & Priv) && ops->cpl(ctxt)) {
d                5622 arch/x86/kvm/emulate.c 			if (ctxt->d & PrivUD)
d                5630 arch/x86/kvm/emulate.c 		if (ctxt->d & CheckPerm) {
d                5636 arch/x86/kvm/emulate.c 		if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) {
d                5643 arch/x86/kvm/emulate.c 		if (ctxt->rep_prefix && (ctxt->d & String)) {
d                5654 arch/x86/kvm/emulate.c 	if ((ctxt->src.type == OP_MEM) && !(ctxt->d & NoAccess)) {
d                5669 arch/x86/kvm/emulate.c 	if ((ctxt->d & DstMask) == ImplicitOps)
d                5673 arch/x86/kvm/emulate.c 	if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) {
d                5678 arch/x86/kvm/emulate.c 			if (!(ctxt->d & NoWrite) &&
d                5690 arch/x86/kvm/emulate.c 	if (unlikely(emul_flags & X86EMUL_GUEST_MASK) && (ctxt->d & Intercept)) {
d                5697 arch/x86/kvm/emulate.c 	if (ctxt->rep_prefix && (ctxt->d & String))
d                5703 arch/x86/kvm/emulate.c 		if (ctxt->d & Fastop) {
d                5784 arch/x86/kvm/emulate.c 	if (ctxt->d & SrcWrite) {
d                5790 arch/x86/kvm/emulate.c 	if (!(ctxt->d & NoWrite)) {
d                5802 arch/x86/kvm/emulate.c 	if ((ctxt->d & SrcMask) == SrcSI)
d                5805 arch/x86/kvm/emulate.c 	if ((ctxt->d & DstMask) == DstDI)
d                5808 arch/x86/kvm/emulate.c 	if (ctxt->rep_prefix && (ctxt->d & String)) {
d                5811 arch/x86/kvm/emulate.c 		if ((ctxt->d & SrcMask) == SrcSI)
d                5919 arch/x86/kvm/emulate.c 	if (ctxt->rep_prefix && (ctxt->d & String))
d                5922 arch/x86/kvm/emulate.c 	if (ctxt->d & TwoMemOp)
d                 118 arch/x86/kvm/i8254.c 	s64 d, t;
d                 122 arch/x86/kvm/i8254.c 	d = mul_u64_u32_div(t, KVM_PIT_FREQ, NSEC_PER_SEC);
d                 129 arch/x86/kvm/i8254.c 		counter = (c->count - d) & 0xffff;
d                 133 arch/x86/kvm/i8254.c 		counter = c->count - (mod_64((2 * d), c->count));
d                 136 arch/x86/kvm/i8254.c 		counter = c->count - mod_64(d, c->count);
d                 145 arch/x86/kvm/i8254.c 	s64 d, t;
d                 149 arch/x86/kvm/i8254.c 	d = mul_u64_u32_div(t, KVM_PIT_FREQ, NSEC_PER_SEC);
d                 154 arch/x86/kvm/i8254.c 		out = (d >= c->count);
d                 157 arch/x86/kvm/i8254.c 		out = (d < c->count);
d                 160 arch/x86/kvm/i8254.c 		out = ((mod_64(d, c->count) == 0) && (d != 0));
d                 163 arch/x86/kvm/i8254.c 		out = (mod_64(d, c->count) < ((c->count + 1) >> 1));
d                 167 arch/x86/kvm/i8254.c 		out = (d == c->count);
d                6149 arch/x86/kvm/x86.c 	desc->d = var.db;
d                6172 arch/x86/kvm/x86.c 	var.db = desc->d;
d                 768 arch/x86/lib/insn-eval.c 	switch ((desc.l << 1) | desc.d) {
d                  10 arch/x86/lib/misc.c 	int d = 1;
d                  13 arch/x86/lib/misc.c 		d++;
d                  19 arch/x86/lib/misc.c 		d++;
d                  21 arch/x86/lib/misc.c 	return d;
d                 148 arch/x86/math-emu/fpu_entry.c 		if (code_descriptor.d) {
d                 103 arch/x86/math-emu/fpu_proto.h 		     FPU_REG * d);
d                 135 arch/x86/math-emu/fpu_proto.h 			      long double __user * d);
d                 141 arch/x86/math-emu/fpu_proto.h 			   long long __user * d);
d                 142 arch/x86/math-emu/fpu_proto.h extern int FPU_store_int32(FPU_REG *st0_ptr, u_char st0_tag, long __user *d);
d                 143 arch/x86/math-emu/fpu_proto.h extern int FPU_store_int16(FPU_REG *st0_ptr, u_char st0_tag, short __user *d);
d                 144 arch/x86/math-emu/fpu_proto.h extern int FPU_store_bcd(FPU_REG *st0_ptr, u_char st0_tag, u_char __user *d);
d                 148 arch/x86/math-emu/fpu_proto.h extern u_char __user *fstenv(fpu_addr_modes addr_modes, u_char __user *d);
d                  44 arch/x86/math-emu/fpu_system.h static inline unsigned long seg_get_base(struct desc_struct *d)
d                  46 arch/x86/math-emu/fpu_system.h 	unsigned long base = (unsigned long)d->base2 << 24;
d                  48 arch/x86/math-emu/fpu_system.h 	return base | ((unsigned long)d->base1 << 16) | d->base0;
d                  51 arch/x86/math-emu/fpu_system.h static inline unsigned long seg_get_limit(struct desc_struct *d)
d                  53 arch/x86/math-emu/fpu_system.h 	return ((unsigned long)d->limit1 << 16) | d->limit0;
d                  56 arch/x86/math-emu/fpu_system.h static inline unsigned long seg_get_granularity(struct desc_struct *d)
d                  58 arch/x86/math-emu/fpu_system.h 	return d->g ? 4096 : 1;
d                  61 arch/x86/math-emu/fpu_system.h static inline bool seg_expands_down(struct desc_struct *d)
d                  63 arch/x86/math-emu/fpu_system.h 	return (d->type & SEG_TYPE_EXPAND_MASK) == SEG_TYPE_EXPANDS_DOWN;
d                  66 arch/x86/math-emu/fpu_system.h static inline bool seg_execute_only(struct desc_struct *d)
d                  68 arch/x86/math-emu/fpu_system.h 	return (d->type & SEG_TYPE_EXECUTE_MASK) == SEG_TYPE_EXECUTE;
d                  71 arch/x86/math-emu/fpu_system.h static inline bool seg_writable(struct desc_struct *d)
d                  73 arch/x86/math-emu/fpu_system.h 	return (d->type & SEG_TYPE_EXECUTE_MASK) == SEG_TYPE_WRITABLE;
d                 338 arch/x86/math-emu/reg_ld_str.c 		       long double __user * d)
d                 348 arch/x86/math-emu/reg_ld_str.c 		FPU_access_ok(d, 10);
d                 350 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(st0_ptr->sigl, (unsigned long __user *)d);
d                 352 arch/x86/math-emu/reg_ld_str.c 			     (unsigned long __user *)((u_char __user *) d + 4));
d                 354 arch/x86/math-emu/reg_ld_str.c 			     (unsigned short __user *)((u_char __user *) d +
d                 367 arch/x86/math-emu/reg_ld_str.c 		FPU_access_ok(d, 10);
d                 368 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(0, (unsigned long __user *)d);
d                 369 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(0xc0000000, 1 + (unsigned long __user *)d);
d                 370 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(0xffff, 4 + (short __user *)d);
d                 753 arch/x86/math-emu/reg_ld_str.c int FPU_store_int64(FPU_REG *st0_ptr, u_char st0_tag, long long __user *d)
d                 794 arch/x86/math-emu/reg_ld_str.c 	FPU_access_ok(d, 8);
d                 795 arch/x86/math-emu/reg_ld_str.c 	if (copy_to_user(d, &tll, 8))
d                 803 arch/x86/math-emu/reg_ld_str.c int FPU_store_int32(FPU_REG *st0_ptr, u_char st0_tag, long __user *d)
d                 841 arch/x86/math-emu/reg_ld_str.c 	FPU_access_ok(d, 4);
d                 842 arch/x86/math-emu/reg_ld_str.c 	FPU_put_user(t.sigl, (unsigned long __user *)d);
d                 849 arch/x86/math-emu/reg_ld_str.c int FPU_store_int16(FPU_REG *st0_ptr, u_char st0_tag, short __user *d)
d                 887 arch/x86/math-emu/reg_ld_str.c 	FPU_access_ok(d, 2);
d                 888 arch/x86/math-emu/reg_ld_str.c 	FPU_put_user((short)t.sigl, d);
d                 895 arch/x86/math-emu/reg_ld_str.c int FPU_store_bcd(FPU_REG *st0_ptr, u_char st0_tag, u_char __user *d)
d                 928 arch/x86/math-emu/reg_ld_str.c 			FPU_access_ok(d, 10);
d                 930 arch/x86/math-emu/reg_ld_str.c 				FPU_put_user(0, d + i);	/* These bytes "undefined" */
d                 931 arch/x86/math-emu/reg_ld_str.c 			FPU_put_user(0xc0, d + 7);	/* This byte "undefined" */
d                 932 arch/x86/math-emu/reg_ld_str.c 			FPU_put_user(0xff, d + 8);
d                 933 arch/x86/math-emu/reg_ld_str.c 			FPU_put_user(0xff, d + 9);
d                 944 arch/x86/math-emu/reg_ld_str.c 	FPU_access_ok(d, 10);
d                 950 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(b, d + i);
d                 954 arch/x86/math-emu/reg_ld_str.c 	FPU_put_user(sign, d + 9);
d                1143 arch/x86/math-emu/reg_ld_str.c u_char __user *fstenv(fpu_addr_modes addr_modes, u_char __user *d)
d                1149 arch/x86/math-emu/reg_ld_str.c 		FPU_access_ok(d, 14);
d                1151 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(control_word & ~0xe080, (unsigned long __user *)d);
d                1153 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(control_word, (unsigned short __user *)d);
d                1155 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(status_word(), (unsigned short __user *)(d + 2));
d                1156 arch/x86/math-emu/reg_ld_str.c 		FPU_put_user(fpu_tag_word, (unsigned short __user *)(d + 4));
d                1158 arch/x86/math-emu/reg_ld_str.c 			     (unsigned short __user *)(d + 6));
d                1160 arch/x86/math-emu/reg_ld_str.c 			     (unsigned short __user *)(d + 0x0a));
d                1164 arch/x86/math-emu/reg_ld_str.c 				     (unsigned short __user *)(d + 8));
d                1166 arch/x86/math-emu/reg_ld_str.c 				     (unsigned short __user *)(d + 0x0c));
d                1169 arch/x86/math-emu/reg_ld_str.c 				     (unsigned short __user *)(d + 8));
d                1171 arch/x86/math-emu/reg_ld_str.c 				     (unsigned short __user *)(d + 0x0c));
d                1174 arch/x86/math-emu/reg_ld_str.c 		d += 0x0e;
d                1177 arch/x86/math-emu/reg_ld_str.c 		FPU_access_ok(d, 7 * 4);
d                1187 arch/x86/math-emu/reg_ld_str.c 		if (__copy_to_user(d, &control_word, 7 * 4))
d                1190 arch/x86/math-emu/reg_ld_str.c 		d += 0x1c;
d                1196 arch/x86/math-emu/reg_ld_str.c 	return d;
d                1201 arch/x86/math-emu/reg_ld_str.c 	u_char __user *d;
d                1204 arch/x86/math-emu/reg_ld_str.c 	d = fstenv(addr_modes, data_address);
d                1207 arch/x86/math-emu/reg_ld_str.c 	FPU_access_ok(d, 80);
d                1210 arch/x86/math-emu/reg_ld_str.c 	if (__copy_to_user(d, register_base + offset, other))
d                1213 arch/x86/math-emu/reg_ld_str.c 		if (__copy_to_user(d + other, register_base, offset))
d                  82 arch/x86/pci/common.c static int __init can_skip_ioresource_align(const struct dmi_system_id *d)
d                  85 arch/x86/pci/common.c 	printk(KERN_INFO "PCI: %s detected, can skip ISA alignment\n", d->ident);
d                 187 arch/x86/pci/common.c static int __init set_bf_sort(const struct dmi_system_id *d)
d                 191 arch/x86/pci/common.c 		printk(KERN_INFO "PCI: %s detected, enabling pci=bfsort.\n", d->ident);
d                 207 arch/x86/pci/common.c static int __init find_sort_method(const struct dmi_system_id *d)
d                 209 arch/x86/pci/common.c 	dmi_walk(read_dmi_type_b1, (void *)d);
d                 217 arch/x86/pci/common.c static int __init assign_all_busses(const struct dmi_system_id *d)
d                 221 arch/x86/pci/common.c 			" (pci=assign-busses)\n", d->ident);
d                 226 arch/x86/pci/common.c static int __init set_scan_all(const struct dmi_system_id *d)
d                 229 arch/x86/pci/common.c 	       d->ident);
d                  13 arch/x86/pci/fixup.c static void pci_fixup_i450nx(struct pci_dev *d)
d                  21 arch/x86/pci/fixup.c 	dev_warn(&d->dev, "Searching for i450NX host bridges\n");
d                  24 arch/x86/pci/fixup.c 		pci_read_config_byte(d, reg++, &busno);
d                  25 arch/x86/pci/fixup.c 		pci_read_config_byte(d, reg++, &suba);
d                  26 arch/x86/pci/fixup.c 		pci_read_config_byte(d, reg++, &subb);
d                  27 arch/x86/pci/fixup.c 		dev_dbg(&d->dev, "i450NX PXB %d: %02x/%02x/%02x\n", pxb, busno,
d                  38 arch/x86/pci/fixup.c static void pci_fixup_i450gx(struct pci_dev *d)
d                  45 arch/x86/pci/fixup.c 	pci_read_config_byte(d, 0x4a, &busno);
d                  46 arch/x86/pci/fixup.c 	dev_info(&d->dev, "i440KX/GX host bridge; secondary bus %02x\n", busno);
d                  52 arch/x86/pci/fixup.c static void pci_fixup_umc_ide(struct pci_dev *d)
d                  60 arch/x86/pci/fixup.c 	dev_warn(&d->dev, "Fixing base address flags\n");
d                  62 arch/x86/pci/fixup.c 		d->resource[i].flags |= PCI_BASE_ADDRESS_SPACE_IO;
d                  66 arch/x86/pci/fixup.c static void pci_fixup_latency(struct pci_dev *d)
d                  72 arch/x86/pci/fixup.c 	dev_dbg(&d->dev, "Setting max latency to 32\n");
d                  78 arch/x86/pci/fixup.c static void pci_fixup_piix4_acpi(struct pci_dev *d)
d                  83 arch/x86/pci/fixup.c 	d->irq = 9;
d                 107 arch/x86/pci/fixup.c static void pci_fixup_via_northbridge_bug(struct pci_dev *d)
d                 113 arch/x86/pci/fixup.c 	if (d->device == PCI_DEVICE_ID_VIA_8367_0) {
d                 117 arch/x86/pci/fixup.c 		pci_write_config_byte(d, PCI_LATENCY_TIMER, 0);
d                 121 arch/x86/pci/fixup.c 	} else if (d->device == PCI_DEVICE_ID_VIA_8363_0 &&
d                 122 arch/x86/pci/fixup.c 			(d->revision == VIA_8363_KL133_REVISION_ID ||
d                 123 arch/x86/pci/fixup.c 			d->revision == VIA_8363_KM133_REVISION_ID)) {
d                 128 arch/x86/pci/fixup.c 	pci_read_config_byte(d, where, &v);
d                 130 arch/x86/pci/fixup.c 		dev_warn(&d->dev, "Disabling VIA memory write queue (PCI ID %04x, rev %02x): [%02x] %02x & %02x -> %02x\n", \
d                 131 arch/x86/pci/fixup.c 			d->device, d->revision, where, v, mask, v & mask);
d                 133 arch/x86/pci/fixup.c 		pci_write_config_byte(d, where, v);
d                1073 arch/x86/pci/irq.c static int __init fix_broken_hp_bios_irq9(const struct dmi_system_id *d)
d                1078 arch/x86/pci/irq.c 			d->ident);
d                1087 arch/x86/pci/irq.c static int __init fix_acer_tm360_irqrouting(const struct dmi_system_id *d)
d                1092 arch/x86/pci/irq.c 			d->ident);
d                  62 arch/x86/platform/olpc/olpc.c #define wait_on_ibf(p, d) __wait_on_ibf(__LINE__, (p), (d))
d                  82 arch/x86/platform/olpc/olpc.c #define wait_on_obf(p, d) __wait_on_obf(__LINE__, (p), (d))
d                 444 arch/x86/power/cpu.c static int msr_initialize_bdw(const struct dmi_system_id *d)
d                 449 arch/x86/power/cpu.c 	pr_info("x86/pm: %s detected, MSR saving is needed during suspending.\n", d->ident);
d                  44 arch/xtensa/include/asm/asm-uaccess.h 	GET_CURRENT(\ad,\sp)
d                  46 arch/xtensa/include/asm/asm-uaccess.h 	addi	\ad, \ad, TASK_THREAD
d                  47 arch/xtensa/include/asm/asm-uaccess.h 	l32i	\ad, \ad, THREAD_CURRENT_DS - TASK_THREAD
d                  49 arch/xtensa/include/asm/asm-uaccess.h 	l32i	\ad, \ad, THREAD_CURRENT_DS
d                  38 arch/xtensa/include/asm/irq.h int xtensa_irq_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw);
d                  88 arch/xtensa/kernel/irq.c int xtensa_irq_map(struct irq_domain *d, unsigned int irq,
d                  91 arch/xtensa/kernel/irq.c 	struct irq_chip *irq_chip = d->host_data;
d                  77 arch/xtensa/kernel/jump_label.c 	u32 d = (jump_entry_target(e) - (jump_entry_code(e) + 4));
d                  81 arch/xtensa/kernel/jump_label.c 	BUG_ON(!((d & J_SIGN_MASK) == 0 ||
d                  82 arch/xtensa/kernel/jump_label.c 		 (d & J_SIGN_MASK) == J_SIGN_MASK));
d                  86 arch/xtensa/kernel/jump_label.c 		insn = ((d & J_OFFSET_MASK) << 6) | J_INSN;
d                  88 arch/xtensa/kernel/jump_label.c 		insn = ((d & J_OFFSET_MASK) << 8) | J_INSN;
d                  67 arch/xtensa/platforms/iss/include/platform/simcall.h static inline int __simc(int a, int b, int c, int d)
d                  73 arch/xtensa/platforms/iss/include/platform/simcall.h 	register int d1 asm("a5") = d;
d                 292 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp0_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 293 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp0_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 294 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp2_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 295 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp2_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 296 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp3_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 297 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp3_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 298 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp4_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 299 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp4_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 300 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp5_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 301 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp5_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 302 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp6_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 303 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp6_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 304 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp7_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 305 arch/xtensa/variants/test_kc705_be/include/variant/tie-asm.h 	.macro xchal_cp7_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 313 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp0_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 314 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp0_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 315 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp2_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 316 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp2_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 317 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp3_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 318 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp3_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 319 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp4_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 320 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp4_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 321 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp5_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 322 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp5_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 323 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp6_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 324 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp6_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 325 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp7_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 326 arch/xtensa/variants/test_kc705_hifi/include/variant/tie-asm.h 	.macro xchal_cp7_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 167 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp0_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 168 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp0_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 169 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp2_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 170 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp2_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 171 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp3_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 172 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp3_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 173 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp4_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 174 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp4_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 175 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp5_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 176 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp5_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 177 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp6_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 178 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp6_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 179 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp7_store	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 180 arch/xtensa/variants/test_mmuhifi_c3/include/variant/tie-asm.h 	.macro xchal_cp7_load	p a b c d continue=0 ofs=-1 select=-1 ; .endm
d                 493 block/bfq-cgroup.c 	struct bfq_group_data *d = cpd_to_bfqgd(cpd);
d                 495 block/bfq-cgroup.c 	d->weight = cgroup_subsys_on_dfl(io_cgrp_subsys) ?
d                 529 block/bfq-cgroup.c 	struct bfq_group_data *d = blkcg_to_bfqgd(blkg->blkcg);
d                 531 block/bfq-cgroup.c 	entity->orig_weight = entity->weight = entity->new_weight = d->weight;
d                 280 block/bfq-wf2q.c 	u64 d = (u64)service << WFQ_SERVICE_SHIFT;
d                 282 block/bfq-wf2q.c 	do_div(d, weight);
d                 283 block/bfq-wf2q.c 	return d;
d                 555 block/blk-mq-debugfs.c 		unsigned int d = 1U << (i - 1);
d                 557 block/blk-mq-debugfs.c 		seq_printf(m, "%8u\t%lu\n", d, hctx->dispatched[i]);
d                 175 block/partitions/aix.c 	unsigned char *d;
d                 189 block/partitions/aix.c 	d = read_part_sector(state, 7, &sect);
d                 190 block/partitions/aix.c 	if (d) {
d                 191 block/partitions/aix.c 		struct lvm_rec *p = (struct lvm_rec *)d;
d                 213 block/partitions/aix.c 	if (vgda_sector && (d = read_part_sector(state, vgda_sector, &sect))) {
d                 214 block/partitions/aix.c 		struct vgda *p = (struct vgda *)d;
d                 222 block/partitions/aix.c 	if (numlvs && (d = read_part_sector(state, vgda_sector + 1, &sect))) {
d                 223 block/partitions/aix.c 		struct lvd *p = (struct lvd *)d;
d                  73 block/partitions/msdos.c 	unsigned char *d;
d                  90 block/partitions/msdos.c 	d = read_part_sector(state, 7, &sect);
d                  91 block/partitions/msdos.c 	if (d) {
d                  92 block/partitions/msdos.c 		if (d[0] == '_' && d[1] == 'L' && d[2] == 'V' && d[3] == 'M')
d                 983 crypto/algapi.c 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
d                 987 crypto/algapi.c 		relalign = d ? 1 << __ffs(d) : size;
d                  26 crypto/async_tx/async_pq.c #define P(b, d) (b[d-2])
d                  27 crypto/async_tx/async_pq.c #define Q(b, d) (b[d-1])
d                  91 crypto/async_tx/async_raid6_recov.c 	u8 *d, *s;
d                 136 crypto/async_tx/async_raid6_recov.c 	d = page_address(dest);
d                 140 crypto/async_tx/async_raid6_recov.c 		*d++ = qmul[*s++];
d                  44 crypto/async_tx/raid6test.c static char disk_type(int d, int disks)
d                  46 crypto/async_tx/raid6test.c 	if (d == disks - 2)
d                  48 crypto/async_tx/raid6test.c 	else if (d == disks - 1)
d                 108 crypto/cts.c   	u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
d                 117 crypto/cts.c   	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
d                 119 crypto/cts.c   	memset(d, 0, bsize);
d                 120 crypto/cts.c   	scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
d                 122 crypto/cts.c   	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
d                 123 crypto/cts.c   	memzero_explicit(d, sizeof(d));
d                 189 crypto/cts.c   	u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
d                 201 crypto/cts.c   	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
d                 203 crypto/cts.c   	crypto_xor(d + bsize, space, bsize);
d                 205 crypto/cts.c   	memset(d, 0, bsize);
d                 206 crypto/cts.c   	scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
d                 209 crypto/cts.c   	crypto_xor(d + bsize, d, lastn);
d                 212 crypto/cts.c   	memcpy(d + lastn, d + bsize + lastn, bsize - lastn);
d                 215 crypto/cts.c   	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
d                 216 crypto/cts.c   	memzero_explicit(d, sizeof(d));
d                  63 crypto/md4.c   #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s))
d                  64 crypto/md4.c   #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (u32)0x5A827999,s))
d                  65 crypto/md4.c   #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (u32)0x6ED9EBA1,s))
d                  69 crypto/md4.c   	u32 a, b, c, d;
d                  74 crypto/md4.c   	d = hash[3];
d                  76 crypto/md4.c   	ROUND1(a, b, c, d, in[0], 3);
d                  77 crypto/md4.c   	ROUND1(d, a, b, c, in[1], 7);
d                  78 crypto/md4.c   	ROUND1(c, d, a, b, in[2], 11);
d                  79 crypto/md4.c   	ROUND1(b, c, d, a, in[3], 19);
d                  80 crypto/md4.c   	ROUND1(a, b, c, d, in[4], 3);
d                  81 crypto/md4.c   	ROUND1(d, a, b, c, in[5], 7);
d                  82 crypto/md4.c   	ROUND1(c, d, a, b, in[6], 11);
d                  83 crypto/md4.c   	ROUND1(b, c, d, a, in[7], 19);
d                  84 crypto/md4.c   	ROUND1(a, b, c, d, in[8], 3);
d                  85 crypto/md4.c   	ROUND1(d, a, b, c, in[9], 7);
d                  86 crypto/md4.c   	ROUND1(c, d, a, b, in[10], 11);
d                  87 crypto/md4.c   	ROUND1(b, c, d, a, in[11], 19);
d                  88 crypto/md4.c   	ROUND1(a, b, c, d, in[12], 3);
d                  89 crypto/md4.c   	ROUND1(d, a, b, c, in[13], 7);
d                  90 crypto/md4.c   	ROUND1(c, d, a, b, in[14], 11);
d                  91 crypto/md4.c   	ROUND1(b, c, d, a, in[15], 19);
d                  93 crypto/md4.c   	ROUND2(a, b, c, d,in[ 0], 3);
d                  94 crypto/md4.c   	ROUND2(d, a, b, c, in[4], 5);
d                  95 crypto/md4.c   	ROUND2(c, d, a, b, in[8], 9);
d                  96 crypto/md4.c   	ROUND2(b, c, d, a, in[12], 13);
d                  97 crypto/md4.c   	ROUND2(a, b, c, d, in[1], 3);
d                  98 crypto/md4.c   	ROUND2(d, a, b, c, in[5], 5);
d                  99 crypto/md4.c   	ROUND2(c, d, a, b, in[9], 9);
d                 100 crypto/md4.c   	ROUND2(b, c, d, a, in[13], 13);
d                 101 crypto/md4.c   	ROUND2(a, b, c, d, in[2], 3);
d                 102 crypto/md4.c   	ROUND2(d, a, b, c, in[6], 5);
d                 103 crypto/md4.c   	ROUND2(c, d, a, b, in[10], 9);
d                 104 crypto/md4.c   	ROUND2(b, c, d, a, in[14], 13);
d                 105 crypto/md4.c   	ROUND2(a, b, c, d, in[3], 3);
d                 106 crypto/md4.c   	ROUND2(d, a, b, c, in[7], 5);
d                 107 crypto/md4.c   	ROUND2(c, d, a, b, in[11], 9);
d                 108 crypto/md4.c   	ROUND2(b, c, d, a, in[15], 13);
d                 110 crypto/md4.c   	ROUND3(a, b, c, d,in[ 0], 3);
d                 111 crypto/md4.c   	ROUND3(d, a, b, c, in[8], 9);
d                 112 crypto/md4.c   	ROUND3(c, d, a, b, in[4], 11);
d                 113 crypto/md4.c   	ROUND3(b, c, d, a, in[12], 15);
d                 114 crypto/md4.c   	ROUND3(a, b, c, d, in[2], 3);
d                 115 crypto/md4.c   	ROUND3(d, a, b, c, in[10], 9);
d                 116 crypto/md4.c   	ROUND3(c, d, a, b, in[6], 11);
d                 117 crypto/md4.c   	ROUND3(b, c, d, a, in[14], 15);
d                 118 crypto/md4.c   	ROUND3(a, b, c, d, in[1], 3);
d                 119 crypto/md4.c   	ROUND3(d, a, b, c, in[9], 9);
d                 120 crypto/md4.c   	ROUND3(c, d, a, b, in[5], 11);
d                 121 crypto/md4.c   	ROUND3(b, c, d, a, in[13], 15);
d                 122 crypto/md4.c   	ROUND3(a, b, c, d, in[3], 3);
d                 123 crypto/md4.c   	ROUND3(d, a, b, c, in[11], 9);
d                 124 crypto/md4.c   	ROUND3(c, d, a, b, in[7], 11);
d                 125 crypto/md4.c   	ROUND3(b, c, d, a, in[15], 15);
d                 130 crypto/md4.c   	hash[3] += d;
d                  45 crypto/md5.c   	u32 a, b, c, d;
d                  50 crypto/md5.c   	d = hash[3];
d                  52 crypto/md5.c   	MD5STEP(F1, a, b, c, d, in[0] + 0xd76aa478, 7);
d                  53 crypto/md5.c   	MD5STEP(F1, d, a, b, c, in[1] + 0xe8c7b756, 12);
d                  54 crypto/md5.c   	MD5STEP(F1, c, d, a, b, in[2] + 0x242070db, 17);
d                  55 crypto/md5.c   	MD5STEP(F1, b, c, d, a, in[3] + 0xc1bdceee, 22);
d                  56 crypto/md5.c   	MD5STEP(F1, a, b, c, d, in[4] + 0xf57c0faf, 7);
d                  57 crypto/md5.c   	MD5STEP(F1, d, a, b, c, in[5] + 0x4787c62a, 12);
d                  58 crypto/md5.c   	MD5STEP(F1, c, d, a, b, in[6] + 0xa8304613, 17);
d                  59 crypto/md5.c   	MD5STEP(F1, b, c, d, a, in[7] + 0xfd469501, 22);
d                  60 crypto/md5.c   	MD5STEP(F1, a, b, c, d, in[8] + 0x698098d8, 7);
d                  61 crypto/md5.c   	MD5STEP(F1, d, a, b, c, in[9] + 0x8b44f7af, 12);
d                  62 crypto/md5.c   	MD5STEP(F1, c, d, a, b, in[10] + 0xffff5bb1, 17);
d                  63 crypto/md5.c   	MD5STEP(F1, b, c, d, a, in[11] + 0x895cd7be, 22);
d                  64 crypto/md5.c   	MD5STEP(F1, a, b, c, d, in[12] + 0x6b901122, 7);
d                  65 crypto/md5.c   	MD5STEP(F1, d, a, b, c, in[13] + 0xfd987193, 12);
d                  66 crypto/md5.c   	MD5STEP(F1, c, d, a, b, in[14] + 0xa679438e, 17);
d                  67 crypto/md5.c   	MD5STEP(F1, b, c, d, a, in[15] + 0x49b40821, 22);
d                  69 crypto/md5.c   	MD5STEP(F2, a, b, c, d, in[1] + 0xf61e2562, 5);
d                  70 crypto/md5.c   	MD5STEP(F2, d, a, b, c, in[6] + 0xc040b340, 9);
d                  71 crypto/md5.c   	MD5STEP(F2, c, d, a, b, in[11] + 0x265e5a51, 14);
d                  72 crypto/md5.c   	MD5STEP(F2, b, c, d, a, in[0] + 0xe9b6c7aa, 20);
d                  73 crypto/md5.c   	MD5STEP(F2, a, b, c, d, in[5] + 0xd62f105d, 5);
d                  74 crypto/md5.c   	MD5STEP(F2, d, a, b, c, in[10] + 0x02441453, 9);
d                  75 crypto/md5.c   	MD5STEP(F2, c, d, a, b, in[15] + 0xd8a1e681, 14);
d                  76 crypto/md5.c   	MD5STEP(F2, b, c, d, a, in[4] + 0xe7d3fbc8, 20);
d                  77 crypto/md5.c   	MD5STEP(F2, a, b, c, d, in[9] + 0x21e1cde6, 5);
d                  78 crypto/md5.c   	MD5STEP(F2, d, a, b, c, in[14] + 0xc33707d6, 9);
d                  79 crypto/md5.c   	MD5STEP(F2, c, d, a, b, in[3] + 0xf4d50d87, 14);
d                  80 crypto/md5.c   	MD5STEP(F2, b, c, d, a, in[8] + 0x455a14ed, 20);
d                  81 crypto/md5.c   	MD5STEP(F2, a, b, c, d, in[13] + 0xa9e3e905, 5);
d                  82 crypto/md5.c   	MD5STEP(F2, d, a, b, c, in[2] + 0xfcefa3f8, 9);
d                  83 crypto/md5.c   	MD5STEP(F2, c, d, a, b, in[7] + 0x676f02d9, 14);
d                  84 crypto/md5.c   	MD5STEP(F2, b, c, d, a, in[12] + 0x8d2a4c8a, 20);
d                  86 crypto/md5.c   	MD5STEP(F3, a, b, c, d, in[5] + 0xfffa3942, 4);
d                  87 crypto/md5.c   	MD5STEP(F3, d, a, b, c, in[8] + 0x8771f681, 11);
d                  88 crypto/md5.c   	MD5STEP(F3, c, d, a, b, in[11] + 0x6d9d6122, 16);
d                  89 crypto/md5.c   	MD5STEP(F3, b, c, d, a, in[14] + 0xfde5380c, 23);
d                  90 crypto/md5.c   	MD5STEP(F3, a, b, c, d, in[1] + 0xa4beea44, 4);
d                  91 crypto/md5.c   	MD5STEP(F3, d, a, b, c, in[4] + 0x4bdecfa9, 11);
d                  92 crypto/md5.c   	MD5STEP(F3, c, d, a, b, in[7] + 0xf6bb4b60, 16);
d                  93 crypto/md5.c   	MD5STEP(F3, b, c, d, a, in[10] + 0xbebfbc70, 23);
d                  94 crypto/md5.c   	MD5STEP(F3, a, b, c, d, in[13] + 0x289b7ec6, 4);
d                  95 crypto/md5.c   	MD5STEP(F3, d, a, b, c, in[0] + 0xeaa127fa, 11);
d                  96 crypto/md5.c   	MD5STEP(F3, c, d, a, b, in[3] + 0xd4ef3085, 16);
d                  97 crypto/md5.c   	MD5STEP(F3, b, c, d, a, in[6] + 0x04881d05, 23);
d                  98 crypto/md5.c   	MD5STEP(F3, a, b, c, d, in[9] + 0xd9d4d039, 4);
d                  99 crypto/md5.c   	MD5STEP(F3, d, a, b, c, in[12] + 0xe6db99e5, 11);
d                 100 crypto/md5.c   	MD5STEP(F3, c, d, a, b, in[15] + 0x1fa27cf8, 16);
d                 101 crypto/md5.c   	MD5STEP(F3, b, c, d, a, in[2] + 0xc4ac5665, 23);
d                 103 crypto/md5.c   	MD5STEP(F4, a, b, c, d, in[0] + 0xf4292244, 6);
d                 104 crypto/md5.c   	MD5STEP(F4, d, a, b, c, in[7] + 0x432aff97, 10);
d                 105 crypto/md5.c   	MD5STEP(F4, c, d, a, b, in[14] + 0xab9423a7, 15);
d                 106 crypto/md5.c   	MD5STEP(F4, b, c, d, a, in[5] + 0xfc93a039, 21);
d                 107 crypto/md5.c   	MD5STEP(F4, a, b, c, d, in[12] + 0x655b59c3, 6);
d                 108 crypto/md5.c   	MD5STEP(F4, d, a, b, c, in[3] + 0x8f0ccc92, 10);
d                 109 crypto/md5.c   	MD5STEP(F4, c, d, a, b, in[10] + 0xffeff47d, 15);
d                 110 crypto/md5.c   	MD5STEP(F4, b, c, d, a, in[1] + 0x85845dd1, 21);
d                 111 crypto/md5.c   	MD5STEP(F4, a, b, c, d, in[8] + 0x6fa87e4f, 6);
d                 112 crypto/md5.c   	MD5STEP(F4, d, a, b, c, in[15] + 0xfe2ce6e0, 10);
d                 113 crypto/md5.c   	MD5STEP(F4, c, d, a, b, in[6] + 0xa3014314, 15);
d                 114 crypto/md5.c   	MD5STEP(F4, b, c, d, a, in[13] + 0x4e0811a1, 21);
d                 115 crypto/md5.c   	MD5STEP(F4, a, b, c, d, in[4] + 0xf7537e82, 6);
d                 116 crypto/md5.c   	MD5STEP(F4, d, a, b, c, in[11] + 0xbd3af235, 10);
d                 117 crypto/md5.c   	MD5STEP(F4, c, d, a, b, in[2] + 0x2ad7d2bb, 15);
d                 118 crypto/md5.c   	MD5STEP(F4, b, c, d, a, in[9] + 0xeb86d391, 21);
d                 123 crypto/md5.c   	hash[3] += d;
d                  40 crypto/rmd128.c #define ROUND(a, b, c, d, f, k, x, s)  { \
d                  41 crypto/rmd128.c 	(a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k);	\
d                  43 crypto/rmd160.c #define ROUND(a, b, c, d, e, f, k, x, s)  { \
d                  44 crypto/rmd160.c 	(a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \
d                  40 crypto/rmd256.c #define ROUND(a, b, c, d, f, k, x, s)  { \
d                  41 crypto/rmd256.c 	(a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \
d                  43 crypto/rmd320.c #define ROUND(a, b, c, d, e, f, k, x, s)  { \
d                  44 crypto/rmd320.c 	(a) += f((b), (c), (d)) + le32_to_cpup(&(x)) + (k); \
d                  18 crypto/rsa.c   	MPI d;
d                  46 crypto/rsa.c   	return mpi_powm(m, c, key->d, key->n);
d                 104 crypto/rsa.c   	if (unlikely(!pkey->n || !pkey->d)) {
d                 133 crypto/rsa.c   	mpi_free(key->d);
d                 136 crypto/rsa.c   	key->d = NULL;
d                 204 crypto/rsa.c   	mpi_key->d = mpi_read_raw_data(raw_key.d, raw_key.d_sz);
d                 205 crypto/rsa.c   	if (!mpi_key->d)
d                  70 crypto/rsa_helper.c 	key->d = value;
d                  29 crypto/serpent_generic.c #define keyiter(a, b, c, d, i, j) \
d                  30 crypto/serpent_generic.c 	({ b ^= d; b ^= c; b ^= a; b ^= PHI ^ i; b = rol32(b, 11); k[j] = b; })
d                 456 crypto/serpent_generic.c 	__le32	*d = (__le32 *)dst;
d                 503 crypto/serpent_generic.c 	d[0] = cpu_to_le32(r0);
d                 504 crypto/serpent_generic.c 	d[1] = cpu_to_le32(r1);
d                 505 crypto/serpent_generic.c 	d[2] = cpu_to_le32(r2);
d                 506 crypto/serpent_generic.c 	d[3] = cpu_to_le32(r3);
d                 521 crypto/serpent_generic.c 	__le32	*d = (__le32 *)dst;
d                 563 crypto/serpent_generic.c 	d[0] = cpu_to_le32(r2);
d                 564 crypto/serpent_generic.c 	d[1] = cpu_to_le32(r3);
d                 565 crypto/serpent_generic.c 	d[2] = cpu_to_le32(r1);
d                 566 crypto/serpent_generic.c 	d[3] = cpu_to_le32(r4);
d                 592 crypto/serpent_generic.c 	u32 * const d = (u32 * const)dst;
d                 603 crypto/serpent_generic.c 	d[0] = swab32(rd[3]);
d                 604 crypto/serpent_generic.c 	d[1] = swab32(rd[2]);
d                 605 crypto/serpent_generic.c 	d[2] = swab32(rd[1]);
d                 606 crypto/serpent_generic.c 	d[3] = swab32(rd[0]);
d                 612 crypto/serpent_generic.c 	u32 * const d = (u32 * const)dst;
d                 623 crypto/serpent_generic.c 	d[0] = swab32(rd[3]);
d                 624 crypto/serpent_generic.c 	d[1] = swab32(rd[2]);
d                 625 crypto/serpent_generic.c 	d[2] = swab32(rd[1]);
d                 626 crypto/serpent_generic.c 	d[3] = swab32(rd[0]);
d                 101 crypto/sha512_generic.c 	u64 a, b, c, d, e, f, g, h, t1, t2;
d                 107 crypto/sha512_generic.c 	a=state[0];   b=state[1];   c=state[2];   d=state[3];
d                 127 crypto/sha512_generic.c 		t2 = e0(a) + Maj(a,b,c);    d+=t1;    h=t1+t2;
d                 128 crypto/sha512_generic.c 		t1 = g + e1(d) + Ch(d,e,f) + sha512_K[i+1] + W[(i & 15) + 1];
d                 130 crypto/sha512_generic.c 		t1 = f + e1(c) + Ch(c,d,e) + sha512_K[i+2] + W[(i & 15) + 2];
d                 132 crypto/sha512_generic.c 		t1 = e + e1(b) + Ch(b,c,d) + sha512_K[i+3] + W[(i & 15) + 3];
d                 134 crypto/sha512_generic.c 		t1 = d + e1(a) + Ch(a,b,c) + sha512_K[i+4] + W[(i & 15) + 4];
d                 135 crypto/sha512_generic.c 		t2 = e0(e) + Maj(e,f,g);    h+=t1;    d=t1+t2;
d                 137 crypto/sha512_generic.c 		t2 = e0(d) + Maj(d,e,f);    g+=t1;    c=t1+t2;
d                 139 crypto/sha512_generic.c 		t2 = e0(c) + Maj(c,d,e);    f+=t1;    b=t1+t2;
d                 141 crypto/sha512_generic.c 		t2 = e0(b) + Maj(b,c,d);    e+=t1;    a=t1+t2;
d                 144 crypto/sha512_generic.c 	state[0] += a; state[1] += b; state[2] += c; state[3] += d;
d                 148 crypto/sha512_generic.c 	a = b = c = d = e = f = g = h = t1 = t2 = 0;
d                  78 crypto/sm3_generic.c 	u32 a, b, c, d, e, f, g, h;
d                  84 crypto/sm3_generic.c 	d = m[3];
d                  96 crypto/sm3_generic.c 		tt1 = ff(i, a, b, c) + d + ss2 + *wt;
d                 102 crypto/sm3_generic.c 		d = c;
d                 115 crypto/sm3_generic.c 	m[3] = d ^ m[3];
d                 121 crypto/sm3_generic.c 	a = b = c = d = e = f = g = h = ss1 = ss2 = tt1 = tt2 = 0;
d                 468 crypto/twofish_common.c #define CALC_S(a, b, c, d, i, w, x, y, z) \
d                 474 crypto/twofish_common.c       (d) ^= exp_to_poly[tmp + (z)]; \
d                 529 crypto/twofish_common.c #define CALC_K_2(a, b, c, d, j) \
d                 533 crypto/twofish_common.c    ^ mds[3][q1[d ^ key[(j) + 11]] ^ key[(j) + 3]]
d                 542 crypto/twofish_common.c #define CALC_K192_2(a, b, c, d, j) \
d                 546 crypto/twofish_common.c 	     q1[d ^ key[(j) + 19]], j)
d                  53 crypto/twofish_generic.c #define ENCROUND(n, a, b, c, d) \
d                  58 crypto/twofish_generic.c    (d) = rol32((d), 1) ^ y
d                  60 crypto/twofish_generic.c #define DECROUND(n, a, b, c, d) \
d                  63 crypto/twofish_generic.c    (d) ^= y + ctx->k[2 * (n) + 1]; \
d                  64 crypto/twofish_generic.c    (d) = ror32((d), 1); \
d                  72 crypto/twofish_generic.c    ENCROUND (2 * (n), a, b, c, d); \
d                  73 crypto/twofish_generic.c    ENCROUND (2 * (n) + 1, c, d, a, b)
d                  76 crypto/twofish_generic.c    DECROUND (2 * (n) + 1, c, d, a, b); \
d                  77 crypto/twofish_generic.c    DECROUND (2 * (n), a, b, c, d)
d                 102 crypto/twofish_generic.c 	u32 a, b, c, d;
d                 111 crypto/twofish_generic.c 	INPACK (3, d, 3);
d                 125 crypto/twofish_generic.c 	OUTUNPACK (1, d, 5);
d                 139 crypto/twofish_generic.c 	u32 a, b, c, d;
d                 146 crypto/twofish_generic.c 	INPACK (1, d, 5);
d                 164 crypto/twofish_generic.c 	OUTUNPACK (3, d, 3);
d                 284 drivers/acpi/ac.c static int __init thinkpad_e530_quirk(const struct dmi_system_id *d)
d                 290 drivers/acpi/ac.c static int __init ac_do_not_check_pmic_quirk(const struct dmi_system_id *d)
d                 381 drivers/acpi/acpi_video.c static int video_set_bqc_offset(const struct dmi_system_id *d)
d                 388 drivers/acpi/acpi_video.c 	const struct dmi_system_id *d)
d                 395 drivers/acpi/acpi_video.c static int video_set_device_id_scheme(const struct dmi_system_id *d)
d                 401 drivers/acpi/acpi_video.c static int video_enable_only_lcd(const struct dmi_system_id *d)
d                 415 drivers/acpi/acpi_video.c 	const struct dmi_system_id *d)
d                  54 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_16(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[1];\
d                  55 drivers/acpi/acpica/acmacros.h 			  ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[0];}
d                  57 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_32(d, s)        {(*(u32 *)(void *)(d))=0;\
d                  58 drivers/acpi/acpica/acmacros.h 					  ((u8 *)(void *)(d))[2] = ((u8 *)(void *)(s))[1];\
d                  59 drivers/acpi/acpica/acmacros.h 					  ((u8 *)(void *)(d))[3] = ((u8 *)(void *)(s))[0];}
d                  61 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_64(d, s)        {(*(u64 *)(void *)(d))=0;\
d                  62 drivers/acpi/acpica/acmacros.h 							   ((u8 *)(void *)(d))[6] = ((u8 *)(void *)(s))[1];\
d                  63 drivers/acpi/acpica/acmacros.h 							   ((u8 *)(void *)(d))[7] = ((u8 *)(void *)(s))[0];}
d                  67 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                  69 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_32(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[3];\
d                  70 drivers/acpi/acpica/acmacros.h 									  ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[2];\
d                  71 drivers/acpi/acpica/acmacros.h 									  ((  u8 *)(void *)(d))[2] = ((u8 *)(void *)(s))[1];\
d                  72 drivers/acpi/acpica/acmacros.h 									  ((  u8 *)(void *)(d))[3] = ((u8 *)(void *)(s))[0];}
d                  74 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_64(d, s)        {(*(u64 *)(void *)(d))=0;\
d                  75 drivers/acpi/acpica/acmacros.h 										   ((u8 *)(void *)(d))[4] = ((u8 *)(void *)(s))[3];\
d                  76 drivers/acpi/acpica/acmacros.h 										   ((u8 *)(void *)(d))[5] = ((u8 *)(void *)(s))[2];\
d                  77 drivers/acpi/acpica/acmacros.h 										   ((u8 *)(void *)(d))[6] = ((u8 *)(void *)(s))[1];\
d                  78 drivers/acpi/acpica/acmacros.h 										   ((u8 *)(void *)(d))[7] = ((u8 *)(void *)(s))[0];}
d                  82 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                  84 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_32(d, s)        ACPI_MOVE_32_TO_32(d, s)	/* Truncate to 32 */
d                  86 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_64(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[7];\
d                  87 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[6];\
d                  88 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[2] = ((u8 *)(void *)(s))[5];\
d                  89 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[3] = ((u8 *)(void *)(s))[4];\
d                  90 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[4] = ((u8 *)(void *)(s))[3];\
d                  91 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[5] = ((u8 *)(void *)(s))[2];\
d                  92 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[6] = ((u8 *)(void *)(s))[1];\
d                  93 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[7] = ((u8 *)(void *)(s))[0];}
d                 105 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_16(d, s)        *(u16 *)(void *)(d) = *(u16 *)(void *)(s)
d                 106 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_32(d, s)        *(u32 *)(void *)(d) = *(u16 *)(void *)(s)
d                 107 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_64(d, s)        *(u64 *)(void *)(d) = *(u16 *)(void *)(s)
d                 111 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                 112 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_32(d, s)        *(u32 *)(void *)(d) = *(u32 *)(void *)(s)
d                 113 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_64(d, s)        *(u64 *)(void *)(d) = *(u32 *)(void *)(s)
d                 117 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                 118 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_32(d, s)        ACPI_MOVE_32_TO_32(d, s)	/* Truncate to 32 */
d                 119 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_64(d, s)        *(u64 *)(void *)(d) = *(u64 *)(void *)(s)
d                 130 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_16(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[0];\
d                 131 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[1];}
d                 133 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_32(d, s)        {(*(u32 *)(void *)(d)) = 0; ACPI_MOVE_16_TO_16(d, s);}
d                 134 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_16_TO_64(d, s)        {(*(u64 *)(void *)(d)) = 0; ACPI_MOVE_16_TO_16(d, s);}
d                 138 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                 140 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_32(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[0];\
d                 141 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[1];\
d                 142 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[2] = ((u8 *)(void *)(s))[2];\
d                 143 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[3] = ((u8 *)(void *)(s))[3];}
d                 145 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_32_TO_64(d, s)        {(*(u64 *)(void *)(d)) = 0; ACPI_MOVE_32_TO_32(d, s);}
d                 149 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_16(d, s)        ACPI_MOVE_16_TO_16(d, s)	/* Truncate to 16 */
d                 150 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_32(d, s)        ACPI_MOVE_32_TO_32(d, s)	/* Truncate to 32 */
d                 151 drivers/acpi/acpica/acmacros.h #define ACPI_MOVE_64_TO_64(d, s)        {((  u8 *)(void *)(d))[0] = ((u8 *)(void *)(s))[0];\
d                 152 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[1] = ((u8 *)(void *)(s))[1];\
d                 153 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[2] = ((u8 *)(void *)(s))[2];\
d                 154 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[3] = ((u8 *)(void *)(s))[3];\
d                 155 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[4] = ((u8 *)(void *)(s))[4];\
d                 156 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[5] = ((u8 *)(void *)(s))[5];\
d                 157 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[6] = ((u8 *)(void *)(s))[6];\
d                 158 drivers/acpi/acpica/acmacros.h 										 ((  u8 *)(void *)(d))[7] = ((u8 *)(void *)(s))[7];}
d                 374 drivers/acpi/acpica/acmacros.h #define ACPI_GET_DESCRIPTOR_PTR(d)      (((union acpi_descriptor *)(void *)(d))->common.common_pointer)
d                 375 drivers/acpi/acpica/acmacros.h #define ACPI_SET_DESCRIPTOR_PTR(d, p)   (((union acpi_descriptor *)(void *)(d))->common.common_pointer = (p))
d                 376 drivers/acpi/acpica/acmacros.h #define ACPI_GET_DESCRIPTOR_TYPE(d)     (((union acpi_descriptor *)(void *)(d))->common.descriptor_type)
d                 377 drivers/acpi/acpica/acmacros.h #define ACPI_SET_DESCRIPTOR_TYPE(d, t)  (((union acpi_descriptor *)(void *)(d))->common.descriptor_type = (t))
d                 401 drivers/acpi/acpica/acmacros.h #define ARGI_LIST4(a, b, c, d)          (ARG_1(d)|ARG_2(c)|ARG_3(b)|ARG_4(a))
d                 402 drivers/acpi/acpica/acmacros.h #define ARGI_LIST5(a, b, c, d, e)       (ARG_1(e)|ARG_2(d)|ARG_3(c)|ARG_4(b)|ARG_5(a))
d                 403 drivers/acpi/acpica/acmacros.h #define ARGI_LIST6(a, b, c, d, e, f)    (ARG_1(f)|ARG_2(e)|ARG_3(d)|ARG_4(c)|ARG_5(b)|ARG_6(a))
d                 408 drivers/acpi/acpica/acmacros.h #define ARGP_LIST4(a, b, c, d)          (ARG_1(a)|ARG_2(b)|ARG_3(c)|ARG_4(d))
d                 409 drivers/acpi/acpica/acmacros.h #define ARGP_LIST5(a, b, c, d, e)       (ARG_1(a)|ARG_2(b)|ARG_3(c)|ARG_4(d)|ARG_5(e))
d                 410 drivers/acpi/acpica/acmacros.h #define ARGP_LIST6(a, b, c, d, e, f)    (ARG_1(a)|ARG_2(b)|ARG_3(c)|ARG_4(d)|ARG_5(e)|ARG_6(f))
d                 462 drivers/acpi/acpica/acmacros.h #define ACPI_IS_OCTAL_DIGIT(d)              (((char)(d) >= '0') && ((char)(d) <= '7'))
d                 477 drivers/acpi/acpica/acmacros.h #define ASL_CV_PRINT_ONE_COMMENT(a,b,c,d) cv_print_one_comment_type (a,b,c,d);
d                 492 drivers/acpi/acpica/acmacros.h #define ASL_CV_PRINT_ONE_COMMENT(a,b,c,d)
d                 124 drivers/acpi/acpica/acpredef.h #define PACKAGE_INFO(a,b,c,d,e,f)       {{{(a),(b),(c),(d)}, ((((u16)(f)) << 8) | (e)), 0}}
d                  82 drivers/acpi/acpica/acresrc.h #define ACPI_RSC_TABLE_SIZE(d)          (sizeof (d) / sizeof (struct acpi_rsconvert_info))
d                1313 drivers/acpi/battery.c battery_bix_broken_package_quirk(const struct dmi_system_id *d)
d                1320 drivers/acpi/battery.c battery_notification_delay_quirk(const struct dmi_system_id *d)
d                1327 drivers/acpi/battery.c battery_ac_is_broken_quirk(const struct dmi_system_id *d)
d                1334 drivers/acpi/battery.c battery_do_not_check_pmic_quirk(const struct dmi_system_id *d)
d                  74 drivers/acpi/blacklist.c static int __init dmi_enable_rev_override(const struct dmi_system_id *d)
d                  77 drivers/acpi/blacklist.c 	       d->ident);
d                 369 drivers/acpi/device_sysfs.c acpi_eject_store(struct device *d, struct device_attribute *attr,
d                 372 drivers/acpi/device_sysfs.c 	struct acpi_device *acpi_device = to_acpi_device(d);
d                 850 drivers/acpi/ec.c 	u8 d;
d                 852 drivers/acpi/ec.c 				.wdata = NULL, .rdata = &d,
d                 871 drivers/acpi/ec.c 	u8 d;
d                 873 drivers/acpi/ec.c 				.wdata = &address, .rdata = &d,
d                 877 drivers/acpi/ec.c 	*data = d;
d                  29 drivers/acpi/irq.c 	struct irq_domain *d = irq_find_matching_fwnode(acpi_gsi_domain_id,
d                  32 drivers/acpi/irq.c 	*irq = irq_find_mapping(d, gsi);
d                  76 drivers/acpi/irq.c 	struct irq_domain *d = irq_find_matching_fwnode(acpi_gsi_domain_id,
d                  78 drivers/acpi/irq.c 	int irq = irq_find_mapping(d, gsi);
d                 311 drivers/acpi/irq.c 	struct irq_domain *d = irq_find_matching_fwnode(acpi_gsi_domain_id,
d                 314 drivers/acpi/irq.c 	if (!d)
d                 317 drivers/acpi/irq.c 	return irq_domain_create_hierarchy(d, flags, size, fwnode, ops,
d                 189 drivers/acpi/numa.c 	int d = slit->locality_count;
d                 190 drivers/acpi/numa.c 	for (i = 0; i < d; i++) {
d                 191 drivers/acpi/numa.c 		for (j = 0; j < d; j++)  {
d                 192 drivers/acpi/numa.c 			u8 val = slit->entry[d*i + j];
d                 280 drivers/acpi/osi.c 				      const struct dmi_system_id *d)
d                 282 drivers/acpi/osi.c 	pr_notice("DMI detected to setup _OSI(\"Linux\"): %s\n", d->ident);
d                 287 drivers/acpi/osi.c static int __init dmi_enable_osi_linux(const struct dmi_system_id *d)
d                 289 drivers/acpi/osi.c 	acpi_osi_dmi_linux(true, d);
d                 294 drivers/acpi/osi.c static int __init dmi_disable_osi_vista(const struct dmi_system_id *d)
d                 296 drivers/acpi/osi.c 	pr_notice("DMI detected: %s\n", d->ident);
d                 304 drivers/acpi/osi.c static int __init dmi_disable_osi_win7(const struct dmi_system_id *d)
d                 306 drivers/acpi/osi.c 	pr_notice("DMI detected: %s\n", d->ident);
d                 312 drivers/acpi/osi.c static int __init dmi_disable_osi_win8(const struct dmi_system_id *d)
d                 314 drivers/acpi/osi.c 	pr_notice("DMI detected: %s\n", d->ident);
d                 161 drivers/acpi/pci_slot.c static int do_sta_before_sun(const struct dmi_system_id *d)
d                 164 drivers/acpi/pci_slot.c 		d->ident);
d                  56 drivers/acpi/pmic/intel_pmic.c 	struct intel_pmic_opregion_data *d = opregion->data;
d                  65 drivers/acpi/pmic/intel_pmic.c 	result = pmic_get_reg_bit(address, d->power_table,
d                  66 drivers/acpi/pmic/intel_pmic.c 				  d->power_table_count, &reg, &bit);
d                  73 drivers/acpi/pmic/intel_pmic.c 		d->get_power(regmap, reg, bit, value64) :
d                  74 drivers/acpi/pmic/intel_pmic.c 		d->update_power(regmap, reg, bit, *value64 == 1);
d                 138 drivers/acpi/pmic/intel_pmic.c 	struct intel_pmic_opregion_data *d = opregion->data;
d                 141 drivers/acpi/pmic/intel_pmic.c 	if (!d->get_policy || !d->update_policy)
d                 145 drivers/acpi/pmic/intel_pmic.c 		return d->get_policy(regmap, reg, bit, value);
d                 150 drivers/acpi/pmic/intel_pmic.c 	return d->update_policy(regmap, reg, bit, *value);
d                 174 drivers/acpi/pmic/intel_pmic.c 	struct intel_pmic_opregion_data *d = opregion->data;
d                 180 drivers/acpi/pmic/intel_pmic.c 	result = pmic_get_reg_bit(address, d->thermal_table,
d                 181 drivers/acpi/pmic/intel_pmic.c 				  d->thermal_table_count, &reg, &bit);
d                 253 drivers/acpi/pmic/intel_pmic.c 					struct intel_pmic_opregion_data *d)
d                 259 drivers/acpi/pmic/intel_pmic.c 	if (!dev || !regmap || !d)
d                 299 drivers/acpi/pmic/intel_pmic.c 	opregion->data = d;
d                 336 drivers/acpi/pmic/intel_pmic.c 	struct intel_pmic_opregion_data *d;
d                 344 drivers/acpi/pmic/intel_pmic.c 	d = intel_pmic_opregion->data;
d                 348 drivers/acpi/pmic/intel_pmic.c 	if (d->exec_mipi_pmic_seq_element) {
d                 349 drivers/acpi/pmic/intel_pmic.c 		ret = d->exec_mipi_pmic_seq_element(intel_pmic_opregion->regmap,
d                 352 drivers/acpi/pmic/intel_pmic.c 	} else if (d->pmic_i2c_address) {
d                 353 drivers/acpi/pmic/intel_pmic.c 		if (i2c_address == d->pmic_i2c_address) {
d                  28 drivers/acpi/pmic/intel_pmic.h int intel_pmic_install_opregion_handler(struct device *dev, acpi_handle handle, struct regmap *regmap, struct intel_pmic_opregion_data *d);
d                1169 drivers/acpi/processor_idle.c 	struct acpi_device *d = NULL;
d                1189 drivers/acpi/processor_idle.c 		acpi_bus_get_device(pr_ahandle, &d);
d                1192 drivers/acpi/processor_idle.c 		if (strcmp(acpi_device_hid(d), ACPI_PROCESSOR_CONTAINER_HID))
d                 135 drivers/acpi/sleep.c static int __init init_nvs_save_s3(const struct dmi_system_id *d)
d                 153 drivers/acpi/sleep.c static int __init init_old_suspend_ordering(const struct dmi_system_id *d)
d                 159 drivers/acpi/sleep.c static int __init init_nvs_nosave(const struct dmi_system_id *d)
d                 167 drivers/acpi/sleep.c static int __init init_default_s3(const struct dmi_system_id *d)
d                1158 drivers/acpi/thermal.c static int thermal_act(const struct dmi_system_id *d) {
d                1162 drivers/acpi/thermal.c 			  "disabling all active thermal trip points\n", d->ident);
d                1167 drivers/acpi/thermal.c static int thermal_nocrt(const struct dmi_system_id *d) {
d                1170 drivers/acpi/thermal.c 		  "disabling all critical thermal trip point actions.\n", d->ident);
d                1174 drivers/acpi/thermal.c static int thermal_tzp(const struct dmi_system_id *d) {
d                1178 drivers/acpi/thermal.c 			  "enabling thermal zone polling\n", d->ident);
d                1183 drivers/acpi/thermal.c static int thermal_psv(const struct dmi_system_id *d) {
d                1187 drivers/acpi/thermal.c 			  "disabling all passive thermal trip points\n", d->ident);
d                  88 drivers/acpi/video_detect.c static int video_detect_force_vendor(const struct dmi_system_id *d)
d                  94 drivers/acpi/video_detect.c static int video_detect_force_video(const struct dmi_system_id *d)
d                 100 drivers/acpi/video_detect.c static int video_detect_force_native(const struct dmi_system_id *d)
d                 106 drivers/acpi/video_detect.c static int video_detect_force_none(const struct dmi_system_id *d)
d                  25 drivers/amba/bus.c #define to_amba_driver(d)	container_of(d, struct amba_driver, drv)
d                 361 drivers/amba/bus.c 	struct amba_device *d = to_amba_device(dev);
d                 363 drivers/amba/bus.c 	if (d->res.parent)
d                 364 drivers/amba/bus.c 		release_resource(&d->res);
d                 365 drivers/amba/bus.c 	kfree(d);
d                 731 drivers/amba/bus.c 	struct find_data *d = data;
d                 735 drivers/amba/bus.c 	r = (pcdev->periphid & d->mask) == d->id;
d                 736 drivers/amba/bus.c 	if (d->parent)
d                 737 drivers/amba/bus.c 		r &= d->parent == dev->parent;
d                 738 drivers/amba/bus.c 	if (d->busid)
d                 739 drivers/amba/bus.c 		r &= strcmp(dev_name(dev), d->busid) == 0;
d                 743 drivers/amba/bus.c 		d->dev = pcdev;
d                  68 drivers/ata/libata-transport.c #define tdev_to_device(d)					\
d                  69 drivers/ata/libata-transport.c 	container_of((d), struct ata_device, tdev)
d                  73 drivers/ata/libata-transport.c #define tdev_to_link(d)						\
d                  74 drivers/ata/libata-transport.c 	container_of((d), struct ata_link, tdev)
d                  78 drivers/ata/libata-transport.c #define tdev_to_port(d)						\
d                  79 drivers/ata/libata-transport.c 	container_of((d), struct ata_port, tdev)
d                  88 drivers/ata/libata.h #define to_ata_port(d) container_of(d, struct ata_port, tdev)
d                  34 drivers/ata/pata_pxa.c static void pxa_ata_dma_irq(void *d)
d                  36 drivers/ata/pata_pxa.c 	struct pata_pxa_data *pd = d;
d                 149 drivers/atm/eni.c #define NEPMOK(a0,d,b,c) NEPJOK(a0,(a0+d) & (c-1),b)
d                 150 drivers/atm/eni.c #define EEPMOK(a0,d,b,c) EEPJOK(a0,(a0+d) & (c-1),b)
d                 121 drivers/atm/eni.h #define ENI_DEV(d) ((struct eni_dev *) (d)->dev_data)
d                 122 drivers/atm/eni.h #define ENI_VCC(d) ((struct eni_vcc *) (d)->dev_data)
d                  65 drivers/atm/fore200e.h #define FORE200E_DEV(d)          ((struct fore200e*)((d)->dev_data))
d                  66 drivers/atm/fore200e.h #define FORE200E_VCC(d)          ((struct fore200e_vcc*)((d)->dev_data))
d                1607 drivers/atm/horizon.c     unsigned short d = 0;
d                1611 drivers/atm/horizon.c 		d = (d << 4) | hex_to_bin(*s++);
d                1612 drivers/atm/horizon.c       PRINTK (KERN_INFO, "debug bitmap is now %hx", debug = d);
d                1060 drivers/atm/iphase.h #define INPH_IA_DEV(d) ((IADEV *) (d)->dev_data)  
d                  93 drivers/atm/zatm.h #define ZATM_DEV(d) ((struct zatm_dev *) (d)->dev_data)
d                  94 drivers/atm/zatm.h #define ZATM_VCC(d) ((struct zatm_vcc *) (d)->dev_data)
d                2719 drivers/base/core.c static inline struct root_device *to_root_device(struct device *d)
d                2721 drivers/base/core.c 	return container_of(d, struct root_device, dev);
d                 438 drivers/base/platform.c 	void *d = NULL;
d                 441 drivers/base/platform.c 		d = kmemdup(data, size, GFP_KERNEL);
d                 442 drivers/base/platform.c 		if (!d)
d                 447 drivers/base/platform.c 	pdev->dev.platform_data = d;
d                 122 drivers/base/power/domain.c #define genpd_lock_nested(p, d)		p->lock_ops->lock_nested(p, d)
d                3014 drivers/base/power/domain.c 	struct dentry *d;
d                3023 drivers/base/power/domain.c 		d = debugfs_create_dir(genpd->name, genpd_debugfs_dir);
d                3026 drivers/base/power/domain.c 				d, genpd, &status_fops);
d                3028 drivers/base/power/domain.c 				d, genpd, &sub_domains_fops);
d                3030 drivers/base/power/domain.c 				d, genpd, &idle_states_fops);
d                3032 drivers/base/power/domain.c 				d, genpd, &active_time_fops);
d                3034 drivers/base/power/domain.c 				d, genpd, &total_idle_time_fops);
d                3036 drivers/base/power/domain.c 				d, genpd, &devices_fops);
d                3039 drivers/base/power/domain.c 					    d, genpd, &perf_state_fops);
d                  57 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                  59 drivers/base/regmap/regmap-irq.c 	mutex_lock(&d->lock);
d                  62 drivers/base/regmap/regmap-irq.c static int regmap_irq_update_bits(struct regmap_irq_chip_data *d,
d                  66 drivers/base/regmap/regmap-irq.c 	if (d->chip->mask_writeonly)
d                  67 drivers/base/regmap/regmap-irq.c 		return regmap_write_bits(d->map, reg, mask, val);
d                  69 drivers/base/regmap/regmap-irq.c 		return regmap_update_bits(d->map, reg, mask, val);
d                  74 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                  75 drivers/base/regmap/regmap-irq.c 	struct regmap *map = d->map;
d                  81 drivers/base/regmap/regmap-irq.c 	if (d->chip->runtime_pm) {
d                  88 drivers/base/regmap/regmap-irq.c 	if (d->clear_status) {
d                  89 drivers/base/regmap/regmap-irq.c 		for (i = 0; i < d->chip->num_regs; i++) {
d                  90 drivers/base/regmap/regmap-irq.c 			reg = d->chip->status_base +
d                  91 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->irq_reg_stride);
d                  95 drivers/base/regmap/regmap-irq.c 				dev_err(d->map->dev,
d                  99 drivers/base/regmap/regmap-irq.c 		d->clear_status = false;
d                 107 drivers/base/regmap/regmap-irq.c 	for (i = 0; i < d->chip->num_regs; i++) {
d                 108 drivers/base/regmap/regmap-irq.c 		if (!d->chip->mask_base)
d                 111 drivers/base/regmap/regmap-irq.c 		reg = d->chip->mask_base +
d                 112 drivers/base/regmap/regmap-irq.c 			(i * map->reg_stride * d->irq_reg_stride);
d                 113 drivers/base/regmap/regmap-irq.c 		if (d->chip->mask_invert) {
d                 114 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d, reg,
d                 115 drivers/base/regmap/regmap-irq.c 					 d->mask_buf_def[i], ~d->mask_buf[i]);
d                 116 drivers/base/regmap/regmap-irq.c 		} else if (d->chip->unmask_base) {
d                 118 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d, reg,
d                 119 drivers/base/regmap/regmap-irq.c 					d->mask_buf_def[i], ~d->mask_buf[i]);
d                 121 drivers/base/regmap/regmap-irq.c 				dev_err(d->map->dev,
d                 124 drivers/base/regmap/regmap-irq.c 			unmask_offset = d->chip->unmask_base -
d                 125 drivers/base/regmap/regmap-irq.c 							d->chip->mask_base;
d                 127 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d,
d                 129 drivers/base/regmap/regmap-irq.c 					d->mask_buf_def[i],
d                 130 drivers/base/regmap/regmap-irq.c 					d->mask_buf[i]);
d                 132 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d, reg,
d                 133 drivers/base/regmap/regmap-irq.c 					 d->mask_buf_def[i], d->mask_buf[i]);
d                 136 drivers/base/regmap/regmap-irq.c 			dev_err(d->map->dev, "Failed to sync masks in %x\n",
d                 139 drivers/base/regmap/regmap-irq.c 		reg = d->chip->wake_base +
d                 140 drivers/base/regmap/regmap-irq.c 			(i * map->reg_stride * d->irq_reg_stride);
d                 141 drivers/base/regmap/regmap-irq.c 		if (d->wake_buf) {
d                 142 drivers/base/regmap/regmap-irq.c 			if (d->chip->wake_invert)
d                 143 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 144 drivers/base/regmap/regmap-irq.c 							 d->mask_buf_def[i],
d                 145 drivers/base/regmap/regmap-irq.c 							 ~d->wake_buf[i]);
d                 147 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 148 drivers/base/regmap/regmap-irq.c 							 d->mask_buf_def[i],
d                 149 drivers/base/regmap/regmap-irq.c 							 d->wake_buf[i]);
d                 151 drivers/base/regmap/regmap-irq.c 				dev_err(d->map->dev,
d                 156 drivers/base/regmap/regmap-irq.c 		if (!d->chip->init_ack_masked)
d                 163 drivers/base/regmap/regmap-irq.c 		if (d->mask_buf[i] && (d->chip->ack_base || d->chip->use_ack)) {
d                 164 drivers/base/regmap/regmap-irq.c 			reg = d->chip->ack_base +
d                 165 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->irq_reg_stride);
d                 167 drivers/base/regmap/regmap-irq.c 			if (d->chip->ack_invert)
d                 168 drivers/base/regmap/regmap-irq.c 				ret = regmap_write(map, reg, ~d->mask_buf[i]);
d                 170 drivers/base/regmap/regmap-irq.c 				ret = regmap_write(map, reg, d->mask_buf[i]);
d                 172 drivers/base/regmap/regmap-irq.c 				dev_err(d->map->dev, "Failed to ack 0x%x: %d\n",
d                 178 drivers/base/regmap/regmap-irq.c 	if (!d->chip->type_in_mask) {
d                 179 drivers/base/regmap/regmap-irq.c 		for (i = 0; i < d->chip->num_type_reg; i++) {
d                 180 drivers/base/regmap/regmap-irq.c 			if (!d->type_buf_def[i])
d                 182 drivers/base/regmap/regmap-irq.c 			reg = d->chip->type_base +
d                 183 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->type_reg_stride);
d                 184 drivers/base/regmap/regmap-irq.c 			if (d->chip->type_invert)
d                 185 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 186 drivers/base/regmap/regmap-irq.c 					d->type_buf_def[i], ~d->type_buf[i]);
d                 188 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 189 drivers/base/regmap/regmap-irq.c 					d->type_buf_def[i], d->type_buf[i]);
d                 191 drivers/base/regmap/regmap-irq.c 				dev_err(d->map->dev, "Failed to sync type in %x\n",
d                 196 drivers/base/regmap/regmap-irq.c 	if (d->chip->runtime_pm)
d                 200 drivers/base/regmap/regmap-irq.c 	if (d->wake_count < 0)
d                 201 drivers/base/regmap/regmap-irq.c 		for (i = d->wake_count; i < 0; i++)
d                 202 drivers/base/regmap/regmap-irq.c 			irq_set_irq_wake(d->irq, 0);
d                 203 drivers/base/regmap/regmap-irq.c 	else if (d->wake_count > 0)
d                 204 drivers/base/regmap/regmap-irq.c 		for (i = 0; i < d->wake_count; i++)
d                 205 drivers/base/regmap/regmap-irq.c 			irq_set_irq_wake(d->irq, 1);
d                 207 drivers/base/regmap/regmap-irq.c 	d->wake_count = 0;
d                 209 drivers/base/regmap/regmap-irq.c 	mutex_unlock(&d->lock);
d                 214 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                 215 drivers/base/regmap/regmap-irq.c 	struct regmap *map = d->map;
d                 216 drivers/base/regmap/regmap-irq.c 	const struct regmap_irq *irq_data = irq_to_regmap_irq(d, data->hwirq);
d                 232 drivers/base/regmap/regmap-irq.c 	if (d->chip->type_in_mask && type)
d                 233 drivers/base/regmap/regmap-irq.c 		mask = d->type_buf[irq_data->reg_offset / map->reg_stride];
d                 237 drivers/base/regmap/regmap-irq.c 	if (d->chip->clear_on_unmask)
d                 238 drivers/base/regmap/regmap-irq.c 		d->clear_status = true;
d                 240 drivers/base/regmap/regmap-irq.c 	d->mask_buf[irq_data->reg_offset / map->reg_stride] &= ~mask;
d                 245 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                 246 drivers/base/regmap/regmap-irq.c 	struct regmap *map = d->map;
d                 247 drivers/base/regmap/regmap-irq.c 	const struct regmap_irq *irq_data = irq_to_regmap_irq(d, data->hwirq);
d                 249 drivers/base/regmap/regmap-irq.c 	d->mask_buf[irq_data->reg_offset / map->reg_stride] |= irq_data->mask;
d                 254 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                 255 drivers/base/regmap/regmap-irq.c 	struct regmap *map = d->map;
d                 256 drivers/base/regmap/regmap-irq.c 	const struct regmap_irq *irq_data = irq_to_regmap_irq(d, data->hwirq);
d                 266 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] &= ~t->type_reg_mask;
d                 268 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] &= ~(t->type_falling_val |
d                 274 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] |= t->type_falling_val;
d                 278 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] |= t->type_rising_val;
d                 282 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] |= (t->type_falling_val |
d                 287 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] |= t->type_level_high_val;
d                 291 drivers/base/regmap/regmap-irq.c 		d->type_buf[reg] |= t->type_level_low_val;
d                 301 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                 302 drivers/base/regmap/regmap-irq.c 	struct regmap *map = d->map;
d                 303 drivers/base/regmap/regmap-irq.c 	const struct regmap_irq *irq_data = irq_to_regmap_irq(d, data->hwirq);
d                 306 drivers/base/regmap/regmap-irq.c 		if (d->wake_buf)
d                 307 drivers/base/regmap/regmap-irq.c 			d->wake_buf[irq_data->reg_offset / map->reg_stride]
d                 309 drivers/base/regmap/regmap-irq.c 		d->wake_count++;
d                 311 drivers/base/regmap/regmap-irq.c 		if (d->wake_buf)
d                 312 drivers/base/regmap/regmap-irq.c 			d->wake_buf[irq_data->reg_offset / map->reg_stride]
d                 314 drivers/base/regmap/regmap-irq.c 		d->wake_count--;
d                 356 drivers/base/regmap/regmap-irq.c static irqreturn_t regmap_irq_thread(int irq, void *d)
d                 358 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *data = d;
d                 563 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d;
d                 593 drivers/base/regmap/regmap-irq.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 594 drivers/base/regmap/regmap-irq.c 	if (!d)
d                 598 drivers/base/regmap/regmap-irq.c 		d->main_status_buf = kcalloc(chip->num_main_regs,
d                 602 drivers/base/regmap/regmap-irq.c 		if (!d->main_status_buf)
d                 606 drivers/base/regmap/regmap-irq.c 	d->status_buf = kcalloc(chip->num_regs, sizeof(unsigned int),
d                 608 drivers/base/regmap/regmap-irq.c 	if (!d->status_buf)
d                 611 drivers/base/regmap/regmap-irq.c 	d->mask_buf = kcalloc(chip->num_regs, sizeof(unsigned int),
d                 613 drivers/base/regmap/regmap-irq.c 	if (!d->mask_buf)
d                 616 drivers/base/regmap/regmap-irq.c 	d->mask_buf_def = kcalloc(chip->num_regs, sizeof(unsigned int),
d                 618 drivers/base/regmap/regmap-irq.c 	if (!d->mask_buf_def)
d                 622 drivers/base/regmap/regmap-irq.c 		d->wake_buf = kcalloc(chip->num_regs, sizeof(unsigned int),
d                 624 drivers/base/regmap/regmap-irq.c 		if (!d->wake_buf)
d                 630 drivers/base/regmap/regmap-irq.c 		d->type_buf_def = kcalloc(num_type_reg,
d                 632 drivers/base/regmap/regmap-irq.c 		if (!d->type_buf_def)
d                 635 drivers/base/regmap/regmap-irq.c 		d->type_buf = kcalloc(num_type_reg, sizeof(unsigned int),
d                 637 drivers/base/regmap/regmap-irq.c 		if (!d->type_buf)
d                 641 drivers/base/regmap/regmap-irq.c 	d->irq_chip = regmap_irq_chip;
d                 642 drivers/base/regmap/regmap-irq.c 	d->irq_chip.name = chip->name;
d                 643 drivers/base/regmap/regmap-irq.c 	d->irq = irq;
d                 644 drivers/base/regmap/regmap-irq.c 	d->map = map;
d                 645 drivers/base/regmap/regmap-irq.c 	d->chip = chip;
d                 646 drivers/base/regmap/regmap-irq.c 	d->irq_base = irq_base;
d                 649 drivers/base/regmap/regmap-irq.c 		d->irq_reg_stride = chip->irq_reg_stride;
d                 651 drivers/base/regmap/regmap-irq.c 		d->irq_reg_stride = 1;
d                 654 drivers/base/regmap/regmap-irq.c 		d->type_reg_stride = chip->type_reg_stride;
d                 656 drivers/base/regmap/regmap-irq.c 		d->type_reg_stride = 1;
d                 659 drivers/base/regmap/regmap-irq.c 	    d->irq_reg_stride == 1) {
d                 660 drivers/base/regmap/regmap-irq.c 		d->status_reg_buf = kmalloc_array(chip->num_regs,
d                 663 drivers/base/regmap/regmap-irq.c 		if (!d->status_reg_buf)
d                 667 drivers/base/regmap/regmap-irq.c 	mutex_init(&d->lock);
d                 670 drivers/base/regmap/regmap-irq.c 		d->mask_buf_def[chip->irqs[i].reg_offset / map->reg_stride]
d                 675 drivers/base/regmap/regmap-irq.c 		d->mask_buf[i] = d->mask_buf_def[i];
d                 680 drivers/base/regmap/regmap-irq.c 			(i * map->reg_stride * d->irq_reg_stride);
d                 682 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d, reg,
d                 683 drivers/base/regmap/regmap-irq.c 					 d->mask_buf[i], ~d->mask_buf[i]);
d                 684 drivers/base/regmap/regmap-irq.c 		else if (d->chip->unmask_base) {
d                 685 drivers/base/regmap/regmap-irq.c 			unmask_offset = d->chip->unmask_base -
d                 686 drivers/base/regmap/regmap-irq.c 					d->chip->mask_base;
d                 687 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d,
d                 689 drivers/base/regmap/regmap-irq.c 					d->mask_buf[i],
d                 690 drivers/base/regmap/regmap-irq.c 					d->mask_buf[i]);
d                 692 drivers/base/regmap/regmap-irq.c 			ret = regmap_irq_update_bits(d, reg,
d                 693 drivers/base/regmap/regmap-irq.c 					 d->mask_buf[i], d->mask_buf[i]);
d                 705 drivers/base/regmap/regmap-irq.c 			(i * map->reg_stride * d->irq_reg_stride);
d                 706 drivers/base/regmap/regmap-irq.c 		ret = regmap_read(map, reg, &d->status_buf[i]);
d                 713 drivers/base/regmap/regmap-irq.c 		if (d->status_buf[i] && (chip->ack_base || chip->use_ack)) {
d                 715 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->irq_reg_stride);
d                 718 drivers/base/regmap/regmap-irq.c 					~(d->status_buf[i] & d->mask_buf[i]));
d                 721 drivers/base/regmap/regmap-irq.c 					d->status_buf[i] & d->mask_buf[i]);
d                 731 drivers/base/regmap/regmap-irq.c 	if (d->wake_buf) {
d                 733 drivers/base/regmap/regmap-irq.c 			d->wake_buf[i] = d->mask_buf_def[i];
d                 735 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->irq_reg_stride);
d                 738 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 739 drivers/base/regmap/regmap-irq.c 							 d->mask_buf_def[i],
d                 742 drivers/base/regmap/regmap-irq.c 				ret = regmap_irq_update_bits(d, reg,
d                 743 drivers/base/regmap/regmap-irq.c 							 d->mask_buf_def[i],
d                 744 drivers/base/regmap/regmap-irq.c 							 d->wake_buf[i]);
d                 756 drivers/base/regmap/regmap-irq.c 				(i * map->reg_stride * d->type_reg_stride);
d                 758 drivers/base/regmap/regmap-irq.c 			ret = regmap_read(map, reg, &d->type_buf_def[i]);
d                 760 drivers/base/regmap/regmap-irq.c 			if (d->chip->type_invert)
d                 761 drivers/base/regmap/regmap-irq.c 				d->type_buf_def[i] = ~d->type_buf_def[i];
d                 772 drivers/base/regmap/regmap-irq.c 		d->domain = irq_domain_add_legacy(map->dev->of_node,
d                 774 drivers/base/regmap/regmap-irq.c 						  &regmap_domain_ops, d);
d                 776 drivers/base/regmap/regmap-irq.c 		d->domain = irq_domain_add_linear(map->dev->of_node,
d                 778 drivers/base/regmap/regmap-irq.c 						  &regmap_domain_ops, d);
d                 779 drivers/base/regmap/regmap-irq.c 	if (!d->domain) {
d                 787 drivers/base/regmap/regmap-irq.c 				   chip->name, d);
d                 794 drivers/base/regmap/regmap-irq.c 	*data = d;
d                 801 drivers/base/regmap/regmap-irq.c 	kfree(d->type_buf);
d                 802 drivers/base/regmap/regmap-irq.c 	kfree(d->type_buf_def);
d                 803 drivers/base/regmap/regmap-irq.c 	kfree(d->wake_buf);
d                 804 drivers/base/regmap/regmap-irq.c 	kfree(d->mask_buf_def);
d                 805 drivers/base/regmap/regmap-irq.c 	kfree(d->mask_buf);
d                 806 drivers/base/regmap/regmap-irq.c 	kfree(d->status_buf);
d                 807 drivers/base/regmap/regmap-irq.c 	kfree(d->status_reg_buf);
d                 808 drivers/base/regmap/regmap-irq.c 	kfree(d);
d                 821 drivers/base/regmap/regmap-irq.c void regmap_del_irq_chip(int irq, struct regmap_irq_chip_data *d)
d                 826 drivers/base/regmap/regmap-irq.c 	if (!d)
d                 829 drivers/base/regmap/regmap-irq.c 	free_irq(irq, d);
d                 832 drivers/base/regmap/regmap-irq.c 	for (hwirq = 0; hwirq < d->chip->num_irqs; hwirq++) {
d                 834 drivers/base/regmap/regmap-irq.c 		if (!d->chip->irqs[hwirq].mask)
d                 841 drivers/base/regmap/regmap-irq.c 		virq = irq_find_mapping(d->domain, hwirq);
d                 846 drivers/base/regmap/regmap-irq.c 	irq_domain_remove(d->domain);
d                 847 drivers/base/regmap/regmap-irq.c 	kfree(d->type_buf);
d                 848 drivers/base/regmap/regmap-irq.c 	kfree(d->type_buf_def);
d                 849 drivers/base/regmap/regmap-irq.c 	kfree(d->wake_buf);
d                 850 drivers/base/regmap/regmap-irq.c 	kfree(d->mask_buf_def);
d                 851 drivers/base/regmap/regmap-irq.c 	kfree(d->mask_buf);
d                 852 drivers/base/regmap/regmap-irq.c 	kfree(d->status_reg_buf);
d                 853 drivers/base/regmap/regmap-irq.c 	kfree(d->status_buf);
d                 854 drivers/base/regmap/regmap-irq.c 	kfree(d);
d                 860 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data *d = *(struct regmap_irq_chip_data **)res;
d                 862 drivers/base/regmap/regmap-irq.c 	regmap_del_irq_chip(d->irq, d);
d                 898 drivers/base/regmap/regmap-irq.c 	struct regmap_irq_chip_data **ptr, *d;
d                 907 drivers/base/regmap/regmap-irq.c 				  chip, &d);
d                 913 drivers/base/regmap/regmap-irq.c 	*ptr = d;
d                 915 drivers/base/regmap/regmap-irq.c 	*data = d;
d                 343 drivers/base/swnode.c 	const char **d;
d                 344 drivers/base/swnode.c 	size_t nval = src->length / sizeof(*d);
d                 347 drivers/base/swnode.c 	d = kcalloc(nval, sizeof(*d), GFP_KERNEL);
d                 348 drivers/base/swnode.c 	if (!d)
d                 352 drivers/base/swnode.c 		d[i] = kstrdup(src->pointer.str[i], GFP_KERNEL);
d                 353 drivers/base/swnode.c 		if (!d[i] && src->pointer.str[i]) {
d                 355 drivers/base/swnode.c 				kfree(d[i]);
d                 356 drivers/base/swnode.c 			kfree(d);
d                 361 drivers/base/swnode.c 	dst->pointer.str = d;
d                  76 drivers/bcma/driver_gpio.c static void bcma_gpio_irq_unmask(struct irq_data *d)
d                  78 drivers/bcma/driver_gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  80 drivers/bcma/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                  87 drivers/bcma/driver_gpio.c static void bcma_gpio_irq_mask(struct irq_data *d)
d                  89 drivers/bcma/driver_gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  91 drivers/bcma/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                 141 drivers/block/aoe/aoe.h 	struct aoedev *d;			/* parent device I belong to */
d                 211 drivers/block/aoe/aoe.h void aoedisk_rm_debugfs(struct aoedev *d);
d                 217 drivers/block/aoe/aoe.h void aoecmd_work(struct aoedev *d);
d                 237 drivers/block/aoe/aoe.h void aoedev_downdev(struct aoedev *d);
d                  38 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                  42 drivers/block/aoe/aoeblk.c 			(d->flags & DEVFL_UP) ? "up" : "down",
d                  43 drivers/block/aoe/aoeblk.c 			(d->flags & DEVFL_KICKME) ? ",kickme" :
d                  44 drivers/block/aoe/aoeblk.c 			(d->nopen && !(d->flags & DEVFL_UP)) ? ",closewait" : "");
d                  51 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                  52 drivers/block/aoe/aoeblk.c 	struct aoetgt *t = d->targets[0];
d                  62 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                  71 drivers/block/aoe/aoeblk.c 	t = d->targets;
d                  72 drivers/block/aoe/aoeblk.c 	te = t + d->ntargets;
d                 100 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                 102 drivers/block/aoe/aoeblk.c 	return snprintf(page, PAGE_SIZE, "0x%04x\n", (unsigned int) d->fw_ver);
d                 108 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                 110 drivers/block/aoe/aoeblk.c 	return snprintf(page, PAGE_SIZE, "%lu\n", d->maxbcnt);
d                 115 drivers/block/aoe/aoeblk.c 	struct aoedev *d;
d                 121 drivers/block/aoe/aoeblk.c 	d = s->private;
d                 123 drivers/block/aoe/aoeblk.c 		d->rttavg >> RTTSCALE,
d                 124 drivers/block/aoe/aoeblk.c 		d->rttdev >> RTTDSCALE);
d                 125 drivers/block/aoe/aoeblk.c 	seq_printf(s, "nskbpool: %d\n", skb_queue_len(&d->skbpool));
d                 126 drivers/block/aoe/aoeblk.c 	seq_printf(s, "kicked: %ld\n", d->kicked);
d                 127 drivers/block/aoe/aoeblk.c 	seq_printf(s, "maxbcnt: %ld\n", d->maxbcnt);
d                 128 drivers/block/aoe/aoeblk.c 	seq_printf(s, "ref: %ld\n", d->ref);
d                 130 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 131 drivers/block/aoe/aoeblk.c 	t = d->targets;
d                 132 drivers/block/aoe/aoeblk.c 	te = t + d->ntargets;
d                 152 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 197 drivers/block/aoe/aoeblk.c aoedisk_add_debugfs(struct aoedev *d)
d                 203 drivers/block/aoe/aoeblk.c 	p = strchr(d->gd->disk_name, '/');
d                 205 drivers/block/aoe/aoeblk.c 		p = d->gd->disk_name;
d                 209 drivers/block/aoe/aoeblk.c 	d->debugfs = debugfs_create_file(p, 0444, aoe_debugfs_dir, d,
d                 213 drivers/block/aoe/aoeblk.c aoedisk_rm_debugfs(struct aoedev *d)
d                 215 drivers/block/aoe/aoeblk.c 	debugfs_remove(d->debugfs);
d                 216 drivers/block/aoe/aoeblk.c 	d->debugfs = NULL;
d                 222 drivers/block/aoe/aoeblk.c 	struct aoedev *d = bdev->bd_disk->private_data;
d                 225 drivers/block/aoe/aoeblk.c 	if (!virt_addr_valid(d)) {
d                 231 drivers/block/aoe/aoeblk.c 	if (!(d->flags & DEVFL_UP) || d->flags & DEVFL_TKILL)
d                 235 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 236 drivers/block/aoe/aoeblk.c 	if (d->flags & DEVFL_UP && !(d->flags & DEVFL_TKILL)) {
d                 237 drivers/block/aoe/aoeblk.c 		d->nopen++;
d                 238 drivers/block/aoe/aoeblk.c 		spin_unlock_irqrestore(&d->lock, flags);
d                 242 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 250 drivers/block/aoe/aoeblk.c 	struct aoedev *d = disk->private_data;
d                 253 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 255 drivers/block/aoe/aoeblk.c 	if (--d->nopen == 0) {
d                 256 drivers/block/aoe/aoeblk.c 		spin_unlock_irqrestore(&d->lock, flags);
d                 257 drivers/block/aoe/aoeblk.c 		aoecmd_cfg(d->aoemajor, d->aoeminor);
d                 260 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 266 drivers/block/aoe/aoeblk.c 	struct aoedev *d = hctx->queue->queuedata;
d                 268 drivers/block/aoe/aoeblk.c 	spin_lock_irq(&d->lock);
d                 270 drivers/block/aoe/aoeblk.c 	if ((d->flags & DEVFL_UP) == 0) {
d                 272 drivers/block/aoe/aoeblk.c 			d->aoemajor, d->aoeminor);
d                 273 drivers/block/aoe/aoeblk.c 		spin_unlock_irq(&d->lock);
d                 278 drivers/block/aoe/aoeblk.c 	list_add_tail(&bd->rq->queuelist, &d->rq_list);
d                 279 drivers/block/aoe/aoeblk.c 	aoecmd_work(d);
d                 280 drivers/block/aoe/aoeblk.c 	spin_unlock_irq(&d->lock);
d                 287 drivers/block/aoe/aoeblk.c 	struct aoedev *d = bdev->bd_disk->private_data;
d                 289 drivers/block/aoe/aoeblk.c 	if ((d->flags & DEVFL_UP) == 0) {
d                 294 drivers/block/aoe/aoeblk.c 	geo->cylinders = d->geo.cylinders;
d                 295 drivers/block/aoe/aoeblk.c 	geo->heads = d->geo.heads;
d                 296 drivers/block/aoe/aoeblk.c 	geo->sectors = d->geo.sectors;
d                 303 drivers/block/aoe/aoeblk.c 	struct aoedev *d;
d                 308 drivers/block/aoe/aoeblk.c 	d = bdev->bd_disk->private_data;
d                 309 drivers/block/aoe/aoeblk.c 	if ((d->flags & DEVFL_UP) == 0) {
d                 315 drivers/block/aoe/aoeblk.c 		if (!copy_to_user((void __user *) arg, &d->ident,
d                 316 drivers/block/aoe/aoeblk.c 			sizeof(d->ident)))
d                 344 drivers/block/aoe/aoeblk.c 	struct aoedev *d = vp;
d                 354 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 355 drivers/block/aoe/aoeblk.c 	if (d->flags & DEVFL_GDALLOC
d                 356 drivers/block/aoe/aoeblk.c 	&& !(d->flags & DEVFL_TKILL)
d                 357 drivers/block/aoe/aoeblk.c 	&& !(d->flags & DEVFL_GD_NOW))
d                 358 drivers/block/aoe/aoeblk.c 		d->flags |= DEVFL_GD_NOW;
d                 361 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 368 drivers/block/aoe/aoeblk.c 			d->aoemajor, d->aoeminor);
d                 376 drivers/block/aoe/aoeblk.c 			d->aoemajor, d->aoeminor);
d                 380 drivers/block/aoe/aoeblk.c 	set = &d->tag_set;
d                 390 drivers/block/aoe/aoeblk.c 			d->aoemajor, d->aoeminor);
d                 397 drivers/block/aoe/aoeblk.c 			d->aoemajor, d->aoeminor);
d                 402 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 403 drivers/block/aoe/aoeblk.c 	WARN_ON(!(d->flags & DEVFL_GD_NOW));
d                 404 drivers/block/aoe/aoeblk.c 	WARN_ON(!(d->flags & DEVFL_GDALLOC));
d                 405 drivers/block/aoe/aoeblk.c 	WARN_ON(d->flags & DEVFL_TKILL);
d                 406 drivers/block/aoe/aoeblk.c 	WARN_ON(d->gd);
d                 407 drivers/block/aoe/aoeblk.c 	WARN_ON(d->flags & DEVFL_UP);
d                 411 drivers/block/aoe/aoeblk.c 	d->bufpool = mp;
d                 412 drivers/block/aoe/aoeblk.c 	d->blkq = gd->queue = q;
d                 413 drivers/block/aoe/aoeblk.c 	q->queuedata = d;
d                 414 drivers/block/aoe/aoeblk.c 	d->gd = gd;
d                 418 drivers/block/aoe/aoeblk.c 	gd->first_minor = d->sysminor;
d                 420 drivers/block/aoe/aoeblk.c 	gd->private_data = d;
d                 421 drivers/block/aoe/aoeblk.c 	set_capacity(gd, d->ssize);
d                 423 drivers/block/aoe/aoeblk.c 		d->aoemajor, d->aoeminor);
d                 425 drivers/block/aoe/aoeblk.c 	d->flags &= ~DEVFL_GDALLOC;
d                 426 drivers/block/aoe/aoeblk.c 	d->flags |= DEVFL_UP;
d                 428 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 431 drivers/block/aoe/aoeblk.c 	aoedisk_add_debugfs(d);
d                 433 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 434 drivers/block/aoe/aoeblk.c 	WARN_ON(!(d->flags & DEVFL_GD_NOW));
d                 435 drivers/block/aoe/aoeblk.c 	d->flags &= ~DEVFL_GD_NOW;
d                 436 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 444 drivers/block/aoe/aoeblk.c 	spin_lock_irqsave(&d->lock, flags);
d                 445 drivers/block/aoe/aoeblk.c 	d->flags &= ~DEVFL_GD_NOW;
d                 446 drivers/block/aoe/aoeblk.c 	schedule_work(&d->work);
d                 447 drivers/block/aoe/aoeblk.c 	spin_unlock_irqrestore(&d->lock, flags);
d                  84 drivers/block/aoe/aoechr.c 	struct aoedev *d;
d                  99 drivers/block/aoe/aoechr.c 	d = aoedev_by_aoeaddr(major, minor, 0);
d                 100 drivers/block/aoe/aoechr.c 	if (!d)
d                 102 drivers/block/aoe/aoechr.c 	spin_lock_irqsave(&d->lock, flags);
d                 103 drivers/block/aoe/aoechr.c 	aoecmd_cleanslate(d);
d                 106 drivers/block/aoe/aoechr.c 	skb = aoecmd_ata_id(d);
d                 107 drivers/block/aoe/aoechr.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 112 drivers/block/aoe/aoechr.c 		spin_lock_irqsave(&d->lock, flags);
d                 115 drivers/block/aoe/aoechr.c 	aoedev_put(d);
d                  25 drivers/block/aoe/aoecmd.c static int count_targets(struct aoedev *d, int *untainted);
d                  79 drivers/block/aoe/aoecmd.c getframe_deferred(struct aoedev *d, u32 tag)
d                  84 drivers/block/aoe/aoecmd.c 	head = &d->rexmitq;
d                  96 drivers/block/aoe/aoecmd.c getframe(struct aoedev *d, u32 tag)
d                 103 drivers/block/aoe/aoecmd.c 	head = &d->factive[n];
d                 120 drivers/block/aoe/aoecmd.c newtag(struct aoedev *d)
d                 125 drivers/block/aoe/aoecmd.c 	return n |= (++d->lasttag & 0x7fff) << 16;
d                 129 drivers/block/aoe/aoecmd.c aoehdr_atainit(struct aoedev *d, struct aoetgt *t, struct aoe_hdr *h)
d                 131 drivers/block/aoe/aoecmd.c 	u32 host_tag = newtag(d);
d                 137 drivers/block/aoe/aoecmd.c 	h->major = cpu_to_be16(d->aoemajor);
d                 138 drivers/block/aoe/aoecmd.c 	h->minor = d->aoeminor;
d                 171 drivers/block/aoe/aoecmd.c skb_pool_put(struct aoedev *d, struct sk_buff *skb)
d                 173 drivers/block/aoe/aoecmd.c 	__skb_queue_tail(&d->skbpool, skb);
d                 177 drivers/block/aoe/aoecmd.c skb_pool_get(struct aoedev *d)
d                 179 drivers/block/aoe/aoecmd.c 	struct sk_buff *skb = skb_peek(&d->skbpool);
d                 182 drivers/block/aoe/aoecmd.c 		__skb_unlink(skb, &d->skbpool);
d                 185 drivers/block/aoe/aoecmd.c 	if (skb_queue_len(&d->skbpool) < NSKBPOOLMAX &&
d                 206 drivers/block/aoe/aoecmd.c newtframe(struct aoedev *d, struct aoetgt *t)
d                 236 drivers/block/aoe/aoecmd.c 		skb = skb_pool_get(d);
d                 239 drivers/block/aoe/aoecmd.c 		skb_pool_put(d, f->skb);
d                 250 drivers/block/aoe/aoecmd.c newframe(struct aoedev *d)
d                 258 drivers/block/aoe/aoecmd.c 	if (!d->targets || !d->targets[0]) {
d                 262 drivers/block/aoe/aoecmd.c 	tt = d->tgt;	/* last used target */
d                 265 drivers/block/aoe/aoecmd.c 		if (tt >= &d->targets[d->ntargets] || !*tt)
d                 266 drivers/block/aoe/aoecmd.c 			tt = d->targets;
d                 275 drivers/block/aoe/aoecmd.c 			f = newtframe(d, t);
d                 278 drivers/block/aoe/aoecmd.c 				d->tgt = tt;
d                 282 drivers/block/aoe/aoecmd.c 		if (tt == d->tgt) {	/* we've looped and found nada */
d                 290 drivers/block/aoe/aoecmd.c 		d->kicked++;
d                 291 drivers/block/aoe/aoecmd.c 		d->flags |= DEVFL_KICKME;
d                 310 drivers/block/aoe/aoecmd.c 	struct aoedev *d = f->t->d;
d                 314 drivers/block/aoe/aoecmd.c 	list_add_tail(&f->head, &d->factive[n]);
d                 336 drivers/block/aoe/aoecmd.c 	f->tag = aoehdr_atainit(t->d, t, h);
d                 345 drivers/block/aoe/aoecmd.c 	if (t->d->flags & DEVFL_EXT) {
d                 369 drivers/block/aoe/aoecmd.c aoecmd_ata_rw(struct aoedev *d)
d                 376 drivers/block/aoe/aoecmd.c 	buf = nextbuf(d);
d                 379 drivers/block/aoe/aoecmd.c 	f = newframe(d);
d                 387 drivers/block/aoe/aoecmd.c 				d->maxbcnt ?: DEFAULTBCNT,
d                 392 drivers/block/aoe/aoecmd.c 		d->ip.buf = NULL;
d                 452 drivers/block/aoe/aoecmd.c resend(struct aoedev *d, struct frame *f)
d                 462 drivers/block/aoe/aoecmd.c 	n = newtag(d);
d                 475 drivers/block/aoe/aoecmd.c 			"retransmit", d->aoemajor, d->aoeminor,
d                 559 drivers/block/aoe/aoecmd.c 	nf = newframe(f->t->d);
d                 582 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                 589 drivers/block/aoe/aoecmd.c 	d = t->d;
d                 590 drivers/block/aoe/aoecmd.c 	f = newtframe(d, t);
d                 595 drivers/block/aoe/aoecmd.c 			(long) d->aoemajor, d->aoeminor,
d                 601 drivers/block/aoe/aoecmd.c 	f->iter.bi_size = t->d->maxbcnt ? t->d->maxbcnt : DEFAULTBCNT;
d                 625 drivers/block/aoe/aoecmd.c rto(struct aoedev *d)
d                 629 drivers/block/aoe/aoecmd.c 	t = 2 * d->rttavg >> RTTSCALE;
d                 630 drivers/block/aoe/aoecmd.c 	t += 8 * d->rttdev >> RTTDSCALE;
d                 638 drivers/block/aoe/aoecmd.c rexmit_deferred(struct aoedev *d)
d                 647 drivers/block/aoe/aoecmd.c 	count_targets(d, &untainted);
d                 649 drivers/block/aoe/aoecmd.c 	head = &d->rexmitq;
d                 671 drivers/block/aoe/aoecmd.c 			} else if (tsince_hr(f) < t->taint * rto(d)) {
d                 680 drivers/block/aoe/aoecmd.c 			f->t->d->flags |= DEVFL_KICKME;
d                 692 drivers/block/aoe/aoecmd.c 		resend(d, f);
d                 713 drivers/block/aoe/aoecmd.c count_targets(struct aoedev *d, int *untainted)
d                 717 drivers/block/aoe/aoecmd.c 	for (i = good = 0; i < d->ntargets && d->targets[i]; ++i)
d                 718 drivers/block/aoe/aoecmd.c 		if (d->targets[i]->taint == 0)
d                 729 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                 741 drivers/block/aoe/aoecmd.c 	d = from_timer(d, timer, timer);
d                 743 drivers/block/aoe/aoecmd.c 	spin_lock_irqsave(&d->lock, flags);
d                 746 drivers/block/aoe/aoecmd.c 	timeout = rto(d);
d                 748 drivers/block/aoe/aoecmd.c 	utgts = count_targets(d, NULL);
d                 750 drivers/block/aoe/aoecmd.c 	if (d->flags & DEVFL_TKILL) {
d                 751 drivers/block/aoe/aoecmd.c 		spin_unlock_irqrestore(&d->lock, flags);
d                 757 drivers/block/aoe/aoecmd.c 		head = &d->factive[i];
d                 781 drivers/block/aoe/aoecmd.c 			list_splice(&flist, &d->factive[0]);
d                 782 drivers/block/aoe/aoecmd.c 			aoedev_downdev(d);
d                 808 drivers/block/aoe/aoecmd.c 		list_move_tail(pos, &d->rexmitq);
d                 811 drivers/block/aoe/aoecmd.c 	rexmit_deferred(d);
d                 814 drivers/block/aoe/aoecmd.c 	if ((d->flags & DEVFL_KICKME) && d->blkq) {
d                 815 drivers/block/aoe/aoecmd.c 		d->flags &= ~DEVFL_KICKME;
d                 816 drivers/block/aoe/aoecmd.c 		blk_mq_run_hw_queues(d->blkq, true);
d                 819 drivers/block/aoe/aoecmd.c 	d->timer.expires = jiffies + TIMERTICK;
d                 820 drivers/block/aoe/aoecmd.c 	add_timer(&d->timer);
d                 822 drivers/block/aoe/aoecmd.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 835 drivers/block/aoe/aoecmd.c nextbuf(struct aoedev *d)
d                 843 drivers/block/aoe/aoecmd.c 	q = d->blkq;
d                 846 drivers/block/aoe/aoecmd.c 	if (d->ip.buf)
d                 847 drivers/block/aoe/aoecmd.c 		return d->ip.buf;
d                 848 drivers/block/aoe/aoecmd.c 	rq = d->ip.rq;
d                 850 drivers/block/aoe/aoecmd.c 		rq = list_first_entry_or_null(&d->rq_list, struct request,
d                 856 drivers/block/aoe/aoecmd.c 		d->ip.rq = rq;
d                 857 drivers/block/aoe/aoecmd.c 		d->ip.nxbio = rq->bio;
d                 864 drivers/block/aoe/aoecmd.c 	buf = mempool_alloc(d->bufpool, GFP_ATOMIC);
d                 869 drivers/block/aoe/aoecmd.c 	bio = d->ip.nxbio;
d                 872 drivers/block/aoe/aoecmd.c 	d->ip.nxbio = bio;
d                 874 drivers/block/aoe/aoecmd.c 		d->ip.rq = NULL;
d                 875 drivers/block/aoe/aoecmd.c 	return d->ip.buf = buf;
d                 880 drivers/block/aoe/aoecmd.c aoecmd_work(struct aoedev *d)
d                 882 drivers/block/aoe/aoecmd.c 	rexmit_deferred(d);
d                 883 drivers/block/aoe/aoecmd.c 	while (aoecmd_ata_rw(d))
d                 892 drivers/block/aoe/aoecmd.c 	struct aoedev *d = container_of(work, struct aoedev, work);
d                 896 drivers/block/aoe/aoecmd.c 	if (d->flags & DEVFL_GDALLOC)
d                 897 drivers/block/aoe/aoecmd.c 		aoeblk_gdalloc(d);
d                 899 drivers/block/aoe/aoecmd.c 	if (d->flags & DEVFL_NEWSIZE) {
d                 900 drivers/block/aoe/aoecmd.c 		ssize = get_capacity(d->gd);
d                 901 drivers/block/aoe/aoecmd.c 		bd = bdget_disk(d->gd, 0);
d                 908 drivers/block/aoe/aoecmd.c 		spin_lock_irq(&d->lock);
d                 909 drivers/block/aoe/aoecmd.c 		d->flags |= DEVFL_UP;
d                 910 drivers/block/aoe/aoecmd.c 		d->flags &= ~DEVFL_NEWSIZE;
d                 911 drivers/block/aoe/aoecmd.c 		spin_unlock_irq(&d->lock);
d                 927 drivers/block/aoe/aoecmd.c ataid_complete(struct aoedev *d, struct aoetgt *t, unsigned char *id)
d                 939 drivers/block/aoe/aoecmd.c 		d->flags |= DEVFL_EXT;
d                 945 drivers/block/aoe/aoecmd.c 		d->geo.cylinders = ssize;
d                 946 drivers/block/aoe/aoecmd.c 		d->geo.cylinders /= (255 * 63);
d                 947 drivers/block/aoe/aoecmd.c 		d->geo.heads = 255;
d                 948 drivers/block/aoe/aoecmd.c 		d->geo.sectors = 63;
d                 950 drivers/block/aoe/aoecmd.c 		d->flags &= ~DEVFL_EXT;
d                 956 drivers/block/aoe/aoecmd.c 		d->geo.cylinders = get_unaligned_le16(&id[54 << 1]);
d                 957 drivers/block/aoe/aoecmd.c 		d->geo.heads = get_unaligned_le16(&id[55 << 1]);
d                 958 drivers/block/aoe/aoecmd.c 		d->geo.sectors = get_unaligned_le16(&id[56 << 1]);
d                 964 drivers/block/aoe/aoecmd.c 	memcpy(d->ident, id, sizeof(d->ident));
d                 966 drivers/block/aoe/aoecmd.c 	if (d->ssize != ssize)
d                 970 drivers/block/aoe/aoecmd.c 			d->aoemajor, d->aoeminor,
d                 971 drivers/block/aoe/aoecmd.c 			d->fw_ver, (long long)ssize);
d                 972 drivers/block/aoe/aoecmd.c 	d->ssize = ssize;
d                 973 drivers/block/aoe/aoecmd.c 	d->geo.start = 0;
d                 974 drivers/block/aoe/aoecmd.c 	if (d->flags & (DEVFL_GDALLOC|DEVFL_NEWSIZE))
d                 976 drivers/block/aoe/aoecmd.c 	if (d->gd != NULL) {
d                 977 drivers/block/aoe/aoecmd.c 		set_capacity(d->gd, ssize);
d                 978 drivers/block/aoe/aoecmd.c 		d->flags |= DEVFL_NEWSIZE;
d                 980 drivers/block/aoe/aoecmd.c 		d->flags |= DEVFL_GDALLOC;
d                 981 drivers/block/aoe/aoecmd.c 	schedule_work(&d->work);
d                 985 drivers/block/aoe/aoecmd.c calc_rttavg(struct aoedev *d, struct aoetgt *t, int rtt)
d                 992 drivers/block/aoe/aoecmd.c 	n -= d->rttavg >> RTTSCALE;
d                 993 drivers/block/aoe/aoecmd.c 	d->rttavg += n;
d                 996 drivers/block/aoe/aoecmd.c 	n -= d->rttdev >> RTTDSCALE;
d                 997 drivers/block/aoe/aoecmd.c 	d->rttdev += n;
d                1010 drivers/block/aoe/aoecmd.c gettgt(struct aoedev *d, char *addr)
d                1014 drivers/block/aoe/aoecmd.c 	t = d->targets;
d                1015 drivers/block/aoe/aoecmd.c 	e = t + d->ntargets;
d                1039 drivers/block/aoe/aoecmd.c aoe_end_request(struct aoedev *d, struct request *rq, int fastfail)
d                1046 drivers/block/aoe/aoecmd.c 	q = d->blkq;
d                1047 drivers/block/aoe/aoecmd.c 	if (rq == d->ip.rq)
d                1048 drivers/block/aoe/aoecmd.c 		d->ip.rq = NULL;
d                1064 drivers/block/aoe/aoecmd.c aoe_end_buf(struct aoedev *d, struct buf *buf)
d                1069 drivers/block/aoe/aoecmd.c 	if (buf == d->ip.buf)
d                1070 drivers/block/aoe/aoecmd.c 		d->ip.buf = NULL;
d                1071 drivers/block/aoe/aoecmd.c 	mempool_free(buf, d->bufpool);
d                1073 drivers/block/aoe/aoecmd.c 		aoe_end_request(d, rq, 0);
d                1085 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                1093 drivers/block/aoe/aoecmd.c 	d = t->d;
d                1111 drivers/block/aoe/aoecmd.c 			d->aoemajor, d->aoeminor);
d                1124 drivers/block/aoe/aoecmd.c 				(long) d->aoemajor, d->aoeminor,
d                1132 drivers/block/aoe/aoecmd.c 				(long) d->aoemajor, d->aoeminor,
d                1141 drivers/block/aoe/aoecmd.c 		spin_lock_irq(&d->lock);
d                1145 drivers/block/aoe/aoecmd.c 		spin_unlock_irq(&d->lock);
d                1151 drivers/block/aoe/aoecmd.c 				(long) d->aoemajor, d->aoeminor,
d                1157 drivers/block/aoe/aoecmd.c 		spin_lock_irq(&d->lock);
d                1158 drivers/block/aoe/aoecmd.c 		ataid_complete(d, t, skb->data);
d                1159 drivers/block/aoe/aoecmd.c 		spin_unlock_irq(&d->lock);
d                1168 drivers/block/aoe/aoecmd.c 	spin_lock_irq(&d->lock);
d                1172 drivers/block/aoe/aoecmd.c 		count_targets(d, &untainted);
d                1182 drivers/block/aoe/aoecmd.c 		aoe_end_buf(d, buf);
d                1184 drivers/block/aoe/aoecmd.c 	spin_unlock_irq(&d->lock);
d                1185 drivers/block/aoe/aoecmd.c 	aoedev_put(d);
d                1212 drivers/block/aoe/aoecmd.c 		actual_id = f->t->d->aoeminor % ncpus;
d                1285 drivers/block/aoe/aoecmd.c 	id = f->t->d->aoeminor % ncpus;
d                1304 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                1314 drivers/block/aoe/aoecmd.c 	d = aoedev_by_aoeaddr(aoemajor, h->minor, 0);
d                1315 drivers/block/aoe/aoecmd.c 	if (d == NULL) {
d                1323 drivers/block/aoe/aoecmd.c 	spin_lock_irqsave(&d->lock, flags);
d                1326 drivers/block/aoe/aoecmd.c 	f = getframe(d, n);
d                1328 drivers/block/aoe/aoecmd.c 		calc_rttavg(d, f->t, tsince_hr(f));
d                1333 drivers/block/aoe/aoecmd.c 		f = getframe_deferred(d, n);
d                1335 drivers/block/aoe/aoecmd.c 			calc_rttavg(d, NULL, tsince_hr(f));
d                1337 drivers/block/aoe/aoecmd.c 			calc_rttavg(d, NULL, tsince(n));
d                1338 drivers/block/aoe/aoecmd.c 			spin_unlock_irqrestore(&d->lock, flags);
d                1339 drivers/block/aoe/aoecmd.c 			aoedev_put(d);
d                1353 drivers/block/aoe/aoecmd.c 	aoecmd_work(d);
d                1355 drivers/block/aoe/aoecmd.c 	spin_unlock_irqrestore(&d->lock, flags);
d                1377 drivers/block/aoe/aoecmd.c aoecmd_ata_id(struct aoedev *d)
d                1385 drivers/block/aoe/aoecmd.c 	f = newframe(d);
d                1389 drivers/block/aoe/aoecmd.c 	t = *d->tgt;
d                1397 drivers/block/aoe/aoecmd.c 	f->tag = aoehdr_atainit(d, t, h);
d                1410 drivers/block/aoe/aoecmd.c 	d->rttavg = RTTAVG_INIT;
d                1411 drivers/block/aoe/aoecmd.c 	d->rttdev = RTTDEV_INIT;
d                1412 drivers/block/aoe/aoecmd.c 	d->timer.function = rexmit_timer;
d                1422 drivers/block/aoe/aoecmd.c grow_targets(struct aoedev *d)
d                1427 drivers/block/aoe/aoecmd.c 	oldn = d->ntargets;
d                1429 drivers/block/aoe/aoecmd.c 	tt = kcalloc(newn, sizeof(*d->targets), GFP_ATOMIC);
d                1432 drivers/block/aoe/aoecmd.c 	memmove(tt, d->targets, sizeof(*d->targets) * oldn);
d                1433 drivers/block/aoe/aoecmd.c 	d->tgt = tt + (d->tgt - d->targets);
d                1434 drivers/block/aoe/aoecmd.c 	kfree(d->targets);
d                1435 drivers/block/aoe/aoecmd.c 	d->targets = tt;
d                1436 drivers/block/aoe/aoecmd.c 	d->ntargets = newn;
d                1438 drivers/block/aoe/aoecmd.c 	return &d->targets[oldn];
d                1442 drivers/block/aoe/aoecmd.c addtgt(struct aoedev *d, char *addr, ulong nframes)
d                1446 drivers/block/aoe/aoecmd.c 	tt = d->targets;
d                1447 drivers/block/aoe/aoecmd.c 	te = tt + d->ntargets;
d                1452 drivers/block/aoe/aoecmd.c 		tt = grow_targets(d);
d                1460 drivers/block/aoe/aoecmd.c 	t->d = d;
d                1474 drivers/block/aoe/aoecmd.c setdbcnt(struct aoedev *d)
d                1479 drivers/block/aoe/aoecmd.c 	t = d->targets;
d                1480 drivers/block/aoe/aoecmd.c 	e = t + d->ntargets;
d                1484 drivers/block/aoe/aoecmd.c 	if (bcnt != d->maxbcnt) {
d                1485 drivers/block/aoe/aoecmd.c 		d->maxbcnt = bcnt;
d                1487 drivers/block/aoe/aoecmd.c 			d->aoemajor, d->aoeminor, bcnt);
d                1494 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                1498 drivers/block/aoe/aoecmd.c 	d = t->d;
d                1521 drivers/block/aoe/aoecmd.c 	setdbcnt(d);
d                1527 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                1565 drivers/block/aoe/aoecmd.c 	d = aoedev_by_aoeaddr(aoemajor, h->minor, 1);
d                1566 drivers/block/aoe/aoecmd.c 	if (d == NULL) {
d                1571 drivers/block/aoe/aoecmd.c 	spin_lock_irqsave(&d->lock, flags);
d                1573 drivers/block/aoe/aoecmd.c 	t = gettgt(d, h->src);
d                1579 drivers/block/aoe/aoecmd.c 		t = addtgt(d, h->src, n);
d                1592 drivers/block/aoe/aoecmd.c 	if (d->nopen == 0) {
d                1593 drivers/block/aoe/aoecmd.c 		d->fw_ver = be16_to_cpu(ch->fwver);
d                1594 drivers/block/aoe/aoecmd.c 		sl = aoecmd_ata_id(d);
d                1597 drivers/block/aoe/aoecmd.c 	spin_unlock_irqrestore(&d->lock, flags);
d                1598 drivers/block/aoe/aoecmd.c 	aoedev_put(d);
d                1615 drivers/block/aoe/aoecmd.c aoecmd_cleanslate(struct aoedev *d)
d                1619 drivers/block/aoe/aoecmd.c 	d->rttavg = RTTAVG_INIT;
d                1620 drivers/block/aoe/aoecmd.c 	d->rttdev = RTTDEV_INIT;
d                1621 drivers/block/aoe/aoecmd.c 	d->maxbcnt = 0;
d                1623 drivers/block/aoe/aoecmd.c 	t = d->targets;
d                1624 drivers/block/aoe/aoecmd.c 	te = t + d->ntargets;
d                1630 drivers/block/aoe/aoecmd.c aoe_failbuf(struct aoedev *d, struct buf *buf)
d                1637 drivers/block/aoe/aoecmd.c 		aoe_end_buf(d, buf);
d                1655 drivers/block/aoe/aoecmd.c 	struct aoedev *d;
d                1668 drivers/block/aoe/aoecmd.c 		d = f->t->d;
d                1670 drivers/block/aoe/aoecmd.c 		spin_lock_irqsave(&d->lock, flags);
d                1673 drivers/block/aoe/aoecmd.c 			aoe_failbuf(d, f->buf);
d                1676 drivers/block/aoe/aoecmd.c 		spin_unlock_irqrestore(&d->lock, flags);
d                1678 drivers/block/aoe/aoecmd.c 		aoedev_put(d);
d                  18 drivers/block/aoe/aoedev.c static void freetgt(struct aoedev *d, struct aoetgt *t);
d                  19 drivers/block/aoe/aoedev.c static void skbpoolfree(struct aoedev *d);
d                 138 drivers/block/aoe/aoedev.c aoedev_put(struct aoedev *d)
d                 143 drivers/block/aoe/aoedev.c 	d->ref--;
d                 150 drivers/block/aoe/aoedev.c 	struct aoedev *d;
d                 152 drivers/block/aoe/aoedev.c 	d = from_timer(d, t, timer);
d                 153 drivers/block/aoe/aoedev.c 	if (d->flags & DEVFL_TKILL)
d                 155 drivers/block/aoe/aoedev.c 	d->timer.expires = jiffies + HZ;
d                 156 drivers/block/aoe/aoedev.c 	add_timer(&d->timer);
d                 160 drivers/block/aoe/aoedev.c aoe_failip(struct aoedev *d)
d                 166 drivers/block/aoe/aoedev.c 	aoe_failbuf(d, d->ip.buf);
d                 167 drivers/block/aoe/aoedev.c 	rq = d->ip.rq;
d                 172 drivers/block/aoe/aoedev.c 	while ((bio = d->ip.nxbio)) {
d                 174 drivers/block/aoe/aoedev.c 		d->ip.nxbio = bio->bi_next;
d                 179 drivers/block/aoe/aoedev.c 		aoe_end_request(d, rq, 0);
d                 191 drivers/block/aoe/aoedev.c 		aoe_failbuf(f->t->d, f->buf);
d                 197 drivers/block/aoe/aoedev.c aoedev_downdev(struct aoedev *d)
d                 203 drivers/block/aoe/aoedev.c 	d->flags &= ~DEVFL_UP;
d                 207 drivers/block/aoe/aoedev.c 		head = &d->factive[i];
d                 211 drivers/block/aoe/aoedev.c 	head = &d->rexmitq;
d                 216 drivers/block/aoe/aoedev.c 	tt = d->targets;
d                 217 drivers/block/aoe/aoedev.c 	te = tt + d->ntargets;
d                 224 drivers/block/aoe/aoedev.c 	aoe_failip(d);
d                 227 drivers/block/aoe/aoedev.c 	if (d->blkq) {
d                 229 drivers/block/aoe/aoedev.c 		blk_mq_freeze_queue(d->blkq);
d                 230 drivers/block/aoe/aoedev.c 		blk_mq_quiesce_queue(d->blkq);
d                 231 drivers/block/aoe/aoedev.c 		blk_mq_unquiesce_queue(d->blkq);
d                 232 drivers/block/aoe/aoedev.c 		blk_mq_unfreeze_queue(d->blkq);
d                 235 drivers/block/aoe/aoedev.c 	if (d->gd)
d                 236 drivers/block/aoe/aoedev.c 		set_capacity(d->gd, 0);
d                 243 drivers/block/aoe/aoedev.c user_req(char *s, size_t slen, struct aoedev *d)
d                 248 drivers/block/aoe/aoedev.c 	if (!d->gd)
d                 250 drivers/block/aoe/aoedev.c 	p = kbasename(d->gd->disk_name);
d                 251 drivers/block/aoe/aoedev.c 	lim = sizeof(d->gd->disk_name);
d                 252 drivers/block/aoe/aoedev.c 	lim -= p - d->gd->disk_name;
d                 260 drivers/block/aoe/aoedev.c freedev(struct aoedev *d)
d                 266 drivers/block/aoe/aoedev.c 	spin_lock_irqsave(&d->lock, flags);
d                 267 drivers/block/aoe/aoedev.c 	if (d->flags & DEVFL_TKILL
d                 268 drivers/block/aoe/aoedev.c 	&& !(d->flags & DEVFL_FREEING)) {
d                 269 drivers/block/aoe/aoedev.c 		d->flags |= DEVFL_FREEING;
d                 272 drivers/block/aoe/aoedev.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 276 drivers/block/aoe/aoedev.c 	del_timer_sync(&d->timer);
d                 277 drivers/block/aoe/aoedev.c 	if (d->gd) {
d                 278 drivers/block/aoe/aoedev.c 		aoedisk_rm_debugfs(d);
d                 279 drivers/block/aoe/aoedev.c 		del_gendisk(d->gd);
d                 280 drivers/block/aoe/aoedev.c 		put_disk(d->gd);
d                 281 drivers/block/aoe/aoedev.c 		blk_mq_free_tag_set(&d->tag_set);
d                 282 drivers/block/aoe/aoedev.c 		blk_cleanup_queue(d->blkq);
d                 284 drivers/block/aoe/aoedev.c 	t = d->targets;
d                 285 drivers/block/aoe/aoedev.c 	e = t + d->ntargets;
d                 287 drivers/block/aoe/aoedev.c 		freetgt(d, *t);
d                 289 drivers/block/aoe/aoedev.c 	mempool_destroy(d->bufpool);
d                 290 drivers/block/aoe/aoedev.c 	skbpoolfree(d);
d                 291 drivers/block/aoe/aoedev.c 	minor_free(d->sysminor);
d                 293 drivers/block/aoe/aoedev.c 	spin_lock_irqsave(&d->lock, flags);
d                 294 drivers/block/aoe/aoedev.c 	d->flags |= DEVFL_FREED;
d                 295 drivers/block/aoe/aoedev.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 307 drivers/block/aoe/aoedev.c 	struct aoedev *d, **dd;
d                 329 drivers/block/aoe/aoedev.c 	for (d = devlist; d; d = d->next) {
d                 330 drivers/block/aoe/aoedev.c 		spin_lock(&d->lock);
d                 331 drivers/block/aoe/aoedev.c 		if (d->flags & DEVFL_TKILL)
d                 337 drivers/block/aoe/aoedev.c 			if (!user_req(buf, cnt, d))
d                 339 drivers/block/aoe/aoedev.c 		} else if ((!all && (d->flags & DEVFL_UP))
d                 340 drivers/block/aoe/aoedev.c 		|| d->flags & skipflags
d                 341 drivers/block/aoe/aoedev.c 		|| d->nopen
d                 342 drivers/block/aoe/aoedev.c 		|| d->ref)
d                 345 drivers/block/aoe/aoedev.c 		spin_unlock(&d->lock);
d                 347 drivers/block/aoe/aoedev.c 		aoedev_downdev(d);
d                 348 drivers/block/aoe/aoedev.c 		d->flags |= DEVFL_TKILL;
d                 351 drivers/block/aoe/aoedev.c 		spin_unlock(&d->lock);
d                 360 drivers/block/aoe/aoedev.c 	for (d = devlist; d; d = d->next) {
d                 361 drivers/block/aoe/aoedev.c 		spin_lock(&d->lock);
d                 362 drivers/block/aoe/aoedev.c 		if (d->flags & DEVFL_TKILL
d                 363 drivers/block/aoe/aoedev.c 		&& !(d->flags & DEVFL_FREEING)) {
d                 364 drivers/block/aoe/aoedev.c 			spin_unlock(&d->lock);
d                 366 drivers/block/aoe/aoedev.c 			freedev(d);
d                 369 drivers/block/aoe/aoedev.c 		spin_unlock(&d->lock);
d                 373 drivers/block/aoe/aoedev.c 	for (dd = &devlist, d = *dd; d; d = *dd) {
d                 376 drivers/block/aoe/aoedev.c 		spin_lock(&d->lock);
d                 377 drivers/block/aoe/aoedev.c 		if (d->flags & DEVFL_FREED) {
d                 378 drivers/block/aoe/aoedev.c 			*dd = d->next;
d                 379 drivers/block/aoe/aoedev.c 			doomed = d;
d                 381 drivers/block/aoe/aoedev.c 			dd = &d->next;
d                 383 drivers/block/aoe/aoedev.c 		spin_unlock(&d->lock);
d                 428 drivers/block/aoe/aoedev.c skbpoolfree(struct aoedev *d)
d                 432 drivers/block/aoe/aoedev.c 	skb_queue_walk_safe(&d->skbpool, skb, tmp)
d                 435 drivers/block/aoe/aoedev.c 	__skb_queue_head_init(&d->skbpool);
d                 442 drivers/block/aoe/aoedev.c 	struct aoedev *d;
d                 449 drivers/block/aoe/aoedev.c 	for (d=devlist; d; d=d->next)
d                 450 drivers/block/aoe/aoedev.c 		if (d->aoemajor == maj && d->aoeminor == min) {
d                 451 drivers/block/aoe/aoedev.c 			spin_lock(&d->lock);
d                 452 drivers/block/aoe/aoedev.c 			if (d->flags & DEVFL_TKILL) {
d                 453 drivers/block/aoe/aoedev.c 				spin_unlock(&d->lock);
d                 454 drivers/block/aoe/aoedev.c 				d = NULL;
d                 457 drivers/block/aoe/aoedev.c 			d->ref++;
d                 458 drivers/block/aoe/aoedev.c 			spin_unlock(&d->lock);
d                 461 drivers/block/aoe/aoedev.c 	if (d || !do_alloc || minor_get(&sysminor, maj, min) < 0)
d                 463 drivers/block/aoe/aoedev.c 	d = kcalloc(1, sizeof *d, GFP_ATOMIC);
d                 464 drivers/block/aoe/aoedev.c 	if (!d)
d                 466 drivers/block/aoe/aoedev.c 	d->targets = kcalloc(NTARGETS, sizeof(*d->targets), GFP_ATOMIC);
d                 467 drivers/block/aoe/aoedev.c 	if (!d->targets) {
d                 468 drivers/block/aoe/aoedev.c 		kfree(d);
d                 469 drivers/block/aoe/aoedev.c 		d = NULL;
d                 472 drivers/block/aoe/aoedev.c 	d->ntargets = NTARGETS;
d                 473 drivers/block/aoe/aoedev.c 	INIT_WORK(&d->work, aoecmd_sleepwork);
d                 474 drivers/block/aoe/aoedev.c 	spin_lock_init(&d->lock);
d                 475 drivers/block/aoe/aoedev.c 	INIT_LIST_HEAD(&d->rq_list);
d                 476 drivers/block/aoe/aoedev.c 	skb_queue_head_init(&d->skbpool);
d                 477 drivers/block/aoe/aoedev.c 	timer_setup(&d->timer, dummy_timer, 0);
d                 478 drivers/block/aoe/aoedev.c 	d->timer.expires = jiffies + HZ;
d                 479 drivers/block/aoe/aoedev.c 	add_timer(&d->timer);
d                 480 drivers/block/aoe/aoedev.c 	d->bufpool = NULL;	/* defer to aoeblk_gdalloc */
d                 481 drivers/block/aoe/aoedev.c 	d->tgt = d->targets;
d                 482 drivers/block/aoe/aoedev.c 	d->ref = 1;
d                 484 drivers/block/aoe/aoedev.c 		INIT_LIST_HEAD(&d->factive[i]);
d                 485 drivers/block/aoe/aoedev.c 	INIT_LIST_HEAD(&d->rexmitq);
d                 486 drivers/block/aoe/aoedev.c 	d->sysminor = sysminor;
d                 487 drivers/block/aoe/aoedev.c 	d->aoemajor = maj;
d                 488 drivers/block/aoe/aoedev.c 	d->aoeminor = min;
d                 489 drivers/block/aoe/aoedev.c 	d->rttavg = RTTAVG_INIT;
d                 490 drivers/block/aoe/aoedev.c 	d->rttdev = RTTDEV_INIT;
d                 491 drivers/block/aoe/aoedev.c 	d->next = devlist;
d                 492 drivers/block/aoe/aoedev.c 	devlist = d;
d                 495 drivers/block/aoe/aoedev.c 	return d;
d                 499 drivers/block/aoe/aoedev.c freetgt(struct aoedev *d, struct aoetgt *t)
d                1839 drivers/block/drbd/drbd_receiver.c 			      struct drbd_peer_request *r, void *d,
d                1844 drivers/block/drbd/drbd_receiver.c 	drbd_csum_ee(h, r, d);
d                4094 drivers/block/drbd/drbd_receiver.c 	sector_t d;
d                4097 drivers/block/drbd/drbd_receiver.c 	d = (a > b) ? (a - b) : (b - a);
d                4098 drivers/block/drbd/drbd_receiver.c 	if (d > (a>>3) || d > (b>>3))
d                1093 drivers/block/drbd/drbd_worker.c 		unsigned long *d;
d                1095 drivers/block/drbd/drbd_worker.c 		d = kmap_atomic(page);
d                1097 drivers/block/drbd/drbd_worker.c 			if (d[i]) {
d                1098 drivers/block/drbd/drbd_worker.c 				kunmap_atomic(d);
d                1102 drivers/block/drbd/drbd_worker.c 		kunmap_atomic(d);
d                 765 drivers/block/loop.c static ssize_t loop_attr_do_show_##_name(struct device *d,		\
d                 768 drivers/block/loop.c 	return loop_attr_show(d, b, loop_attr_##_name##_show);		\
d                  72 drivers/block/paride/aten.c {	int  k, a, b, c, d;
d                  80 drivers/block/paride/aten.c 			w2(0); d = r1(); w0(0x48); c = r1();
d                  81 drivers/block/paride/aten.c 			buf[2*k] = j44(c,d);
d                 198 drivers/block/paride/epia.c {       int     ph, k, last, d;
d                 207 drivers/block/paride/epia.c                         d = buf[k];
d                 208 drivers/block/paride/epia.c                         if (d != last) { last = d; w0(d); }
d                  66 drivers/block/paride/fit2.c {	int  k, a, b, c, d;
d                  74 drivers/block/paride/fit2.c 		w0(3); c = r1(); w0(2); d = r1(); 
d                  76 drivers/block/paride/fit2.c 		buf[4*k+1] = j44(d,c);
d                  80 drivers/block/paride/fit2.c                 w0(1); c = r1(); w0(0); d = r1(); 
d                  81 drivers/block/paride/fit2.c                 buf[4*k+2] = j44(d,c);
d                  97 drivers/block/paride/fit3.c {	int  k, a, b, c, d;
d                 106 drivers/block/paride/fit3.c 		    w2(0xe); d = r1();
d                 108 drivers/block/paride/fit3.c 		    buf[2*k+1] = j44(c,d);
d                 129 drivers/block/paride/on26.c {       int     i, m, d, x=0, y=0;
d                 134 drivers/block/paride/on26.c         d = pi->delay;
d                 177 drivers/block/paride/on26.c         pi->delay = d;
d                 434 drivers/block/paride/pcd.c 	int r, d, p, n, k, j;
d                 444 drivers/block/paride/pcd.c 			d = read_reg(cd, 4) + 256 * read_reg(cd, 5);
d                 445 drivers/block/paride/pcd.c 			n = (d + 3) & 0xfffc;
d                 459 drivers/block/paride/pcd.c 					     cd->name, fun, p, d, k);
d                 359 drivers/block/paride/pg.c 	int r, d, n, p;
d                 367 drivers/block/paride/pg.c 		d = (read_reg(dev, 4) + 256 * read_reg(dev, 5));
d                 368 drivers/block/paride/pg.c 		n = ((d + 3) & 0xfffc);
d                 377 drivers/block/paride/pg.c 		dev->dlen += (1 - p) * d;
d                 378 drivers/block/paride/pg.c 		buf += d;
d                 388 drivers/block/paride/ppc6lnx.c 				u8 d;
d                 396 drivers/block/paride/ppc6lnx.c 				d = inb(ppc->lpt_addr + 1);
d                 398 drivers/block/paride/ppc6lnx.c 				d = ((d & 0x80) >> 1) | ((d & 0x38) >> 3);
d                 406 drivers/block/paride/ppc6lnx.c 				d |= inb(ppc->lpt_addr + 1) & 0xB8;
d                 408 drivers/block/paride/ppc6lnx.c 				*data++ = d;
d                  98 drivers/bluetooth/hci_bcsp.c static void bcsp_crc_update(u16 *crc, u8 d)
d                 102 drivers/bluetooth/hci_bcsp.c 	reg = (reg >> 4) ^ crc_table[(reg ^ d) & 0x000f];
d                 103 drivers/bluetooth/hci_bcsp.c 	reg = (reg >> 4) ^ crc_table[(reg ^ (d >> 4)) & 0x000f];
d                  38 drivers/bus/mips_cdmm.c #define to_mips_cdmm_driver(d)	container_of(d, struct mips_cdmm_driver, drv)
d                 598 drivers/bus/moxtet.c static int moxtet_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 601 drivers/bus/moxtet.c 	struct moxtet *moxtet = d->host_data;
d                 608 drivers/bus/moxtet.c 	irq_set_chip_data(irq, d->host_data);
d                 614 drivers/bus/moxtet.c static int moxtet_irq_domain_xlate(struct irq_domain *d,
d                 620 drivers/bus/moxtet.c 	struct moxtet *moxtet = d->host_data;
d                 641 drivers/bus/moxtet.c static void moxtet_irq_mask(struct irq_data *d)
d                 643 drivers/bus/moxtet.c 	struct moxtet *moxtet = irq_data_get_irq_chip_data(d);
d                 645 drivers/bus/moxtet.c 	moxtet->irq.masked |= BIT(d->hwirq);
d                 648 drivers/bus/moxtet.c static void moxtet_irq_unmask(struct irq_data *d)
d                 650 drivers/bus/moxtet.c 	struct moxtet *moxtet = irq_data_get_irq_chip_data(d);
d                 652 drivers/bus/moxtet.c 	moxtet->irq.masked &= ~BIT(d->hwirq);
d                 655 drivers/bus/moxtet.c static void moxtet_irq_print_chip(struct irq_data *d, struct seq_file *p)
d                 657 drivers/bus/moxtet.c 	struct moxtet *moxtet = irq_data_get_irq_chip_data(d);
d                 658 drivers/bus/moxtet.c 	struct moxtet_irqpos *pos = &moxtet->irq.position[d->hwirq];
d                 668 drivers/char/random.c 	__u32 c = f->pool[2],	d = f->pool[3];
d                 670 drivers/char/random.c 	a += b;			c += d;
d                 671 drivers/char/random.c 	b = rol32(b, 6);	d = rol32(d, 27);
d                 672 drivers/char/random.c 	d ^= a;			b ^= c;
d                 674 drivers/char/random.c 	a += b;			c += d;
d                 675 drivers/char/random.c 	b = rol32(b, 16);	d = rol32(d, 14);
d                 676 drivers/char/random.c 	d ^= a;			b ^= c;
d                 678 drivers/char/random.c 	a += b;			c += d;
d                 679 drivers/char/random.c 	b = rol32(b, 6);	d = rol32(d, 27);
d                 680 drivers/char/random.c 	d ^= a;			b ^= c;
d                 682 drivers/char/random.c 	a += b;			c += d;
d                 683 drivers/char/random.c 	b = rol32(b, 16);	d = rol32(d, 14);
d                 684 drivers/char/random.c 	d ^= a;			b ^= c;
d                 687 drivers/char/random.c 	f->pool[2] = c;  f->pool[3] = d;
d                1090 drivers/char/random.c 	__u32		*s, *d;
d                1100 drivers/char/random.c 	d = &crng->state[4];
d                1102 drivers/char/random.c 		*d++ ^= *s++;
d                 279 drivers/char/tlclk.c static ssize_t show_current_ref(struct device *d,
d                 295 drivers/char/tlclk.c static ssize_t show_telclock_version(struct device *d,
d                 311 drivers/char/tlclk.c static ssize_t show_alarms(struct device *d,
d                 326 drivers/char/tlclk.c static ssize_t store_received_ref_clk3a(struct device *d,
d                 334 drivers/char/tlclk.c 	dev_dbg(d, ": tmp = 0x%lX\n", tmp);
d                 348 drivers/char/tlclk.c static ssize_t store_received_ref_clk3b(struct device *d,
d                 356 drivers/char/tlclk.c 	dev_dbg(d, ": tmp = 0x%lX\n", tmp);
d                 370 drivers/char/tlclk.c static ssize_t store_enable_clk3b_output(struct device *d,
d                 378 drivers/char/tlclk.c 	dev_dbg(d, ": tmp = 0x%lX\n", tmp);
d                 391 drivers/char/tlclk.c static ssize_t store_enable_clk3a_output(struct device *d,
d                 399 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 412 drivers/char/tlclk.c static ssize_t store_enable_clkb1_output(struct device *d,
d                 420 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 434 drivers/char/tlclk.c static ssize_t store_enable_clka1_output(struct device *d,
d                 442 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 455 drivers/char/tlclk.c static ssize_t store_enable_clkb0_output(struct device *d,
d                 463 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 476 drivers/char/tlclk.c static ssize_t store_enable_clka0_output(struct device *d,
d                 484 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 497 drivers/char/tlclk.c static ssize_t store_select_amcb2_transmit_clock(struct device *d,
d                 505 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 539 drivers/char/tlclk.c static ssize_t store_select_amcb1_transmit_clock(struct device *d,
d                 547 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 581 drivers/char/tlclk.c static ssize_t store_select_redundant_clock(struct device *d,
d                 589 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 602 drivers/char/tlclk.c static ssize_t store_select_ref_frequency(struct device *d,
d                 610 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 623 drivers/char/tlclk.c static ssize_t store_filter_select(struct device *d,
d                 631 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 643 drivers/char/tlclk.c static ssize_t store_hardware_switching_mode(struct device *d,
d                 651 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 664 drivers/char/tlclk.c static ssize_t store_hardware_switching(struct device *d,
d                 672 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 685 drivers/char/tlclk.c static ssize_t store_refalign (struct device *d,
d                 692 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 704 drivers/char/tlclk.c static ssize_t store_mode_select (struct device *d,
d                 712 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 724 drivers/char/tlclk.c static ssize_t store_reset (struct device *d,
d                 732 drivers/char/tlclk.c 	dev_dbg(d, "tmp = 0x%lX\n", tmp);
d                 167 drivers/char/tpm/tpm.h #define to_tpm_chip(d) container_of(d, struct tpm_chip, dev)
d                 208 drivers/char/tpm/tpm.h 	__be32	d;
d                 372 drivers/char/tpm/tpm1-cmd.c 	timeout_chip[3] = be32_to_cpu(cap.timeout.d);
d                  58 drivers/clk/actions/owl-factor.h #define div_mask(d) ((1 << ((d)->width)) - 1)
d                 139 drivers/clk/clk-aspeed.c 		u32 d = val & 0xf;
d                 142 drivers/clk/clk-aspeed.c 		div = d + 1;
d                 107 drivers/clk/clk-axi-clkgen.c 	unsigned long d, d_min, d_max, _d_min, _d_max;
d                 129 drivers/clk/clk-axi-clkgen.c 		for (d = _d_min; d <= _d_max; d++) {
d                 130 drivers/clk/clk-axi-clkgen.c 			fvco = fin * m / d;
d                 137 drivers/clk/clk-axi-clkgen.c 				*best_d = d;
d                 253 drivers/clk/clk-axi-clkgen.c 	unsigned int d, m, dout;
d                 264 drivers/clk/clk-axi-clkgen.c 	axi_clkgen_calc_params(parent_rate, rate, &d, &m, &dout);
d                 266 drivers/clk/clk-axi-clkgen.c 	if (d == 0 || dout == 0 || m == 0)
d                 278 drivers/clk/clk-axi-clkgen.c 	axi_clkgen_calc_clk_params(d, &low, &high, &edge, &nocount);
d                 302 drivers/clk/clk-axi-clkgen.c 	unsigned int d, m, dout;
d                 305 drivers/clk/clk-axi-clkgen.c 	axi_clkgen_calc_params(*parent_rate, rate, &d, &m, &dout);
d                 307 drivers/clk/clk-axi-clkgen.c 	if (d == 0 || dout == 0 || m == 0)
d                 311 drivers/clk/clk-axi-clkgen.c 	tmp = DIV_ROUND_CLOSEST_ULL(tmp, dout * d);
d                 320 drivers/clk/clk-axi-clkgen.c 	unsigned int d, m, dout;
d                 334 drivers/clk/clk-axi-clkgen.c 		d = 1;
d                 336 drivers/clk/clk-axi-clkgen.c 		d = (reg & 0x3f) + ((reg >> 6) & 0x3f);
d                 346 drivers/clk/clk-axi-clkgen.c 	if (d == 0 || dout == 0)
d                 350 drivers/clk/clk-axi-clkgen.c 	tmp = DIV_ROUND_CLOSEST_ULL(tmp, dout * d);
d                 209 drivers/clk/clk-si514.c 	u32 d = settings->hs_div * BIT(settings->ls_div_bits);
d                 211 drivers/clk/clk-si514.c 	return ((u32)(((m * FXO) + (FXO / 2)) >> 29)) / d;
d                 280 drivers/clk/clk-si544.c 	u32 d = settings->hs_div * BIT(settings->ls_div_bits);
d                 292 drivers/clk/clk-si544.c 	do_div(vco, d);
d                 314 drivers/clk/clk-vt8500.c #define VT8500_BITS_TO_FREQ(r, m, d)					\
d                 315 drivers/clk/clk-vt8500.c 				((r / d) * m)
d                 317 drivers/clk/clk-vt8500.c #define VT8500_BITS_TO_VAL(m, d)					\
d                 318 drivers/clk/clk-vt8500.c 				((d == 2 ? 0 : 0x100) | ((m >> 1) & 0x1F))
d                  25 drivers/clk/davinci/psc.h #define LPSC_CLKDEV(c, d) {	\
d                  27 drivers/clk/davinci/psc.h 	.dev_id = (d)		\
d                  30 drivers/clk/davinci/psc.h #define LPSC_CLKDEV1(n, c, d) \
d                  32 drivers/clk/davinci/psc.h 	LPSC_CLKDEV((c), (d)),						\
d                  69 drivers/clk/davinci/psc.h #define LPSC(m, d, n, p, c, f)	\
d                  75 drivers/clk/davinci/psc.h 	.pd	= (d),		\
d                  12 drivers/clk/imx/clk-fixup-div.c #define div_mask(d)	((1 << (d->width)) - 1)
d                 194 drivers/clk/meson/axg-audio.c static AUD_MST_SCLK_PRE_EN(d, AUDIO_MST_D_SCLK_CTRL0);
d                 230 drivers/clk/meson/axg-audio.c static AUD_MST_SCLK_DIV(d, AUDIO_MST_D_SCLK_CTRL0);
d                 241 drivers/clk/meson/axg-audio.c static AUD_MST_SCLK_POST_EN(d, AUDIO_MST_D_SCLK_CTRL0);
d                 281 drivers/clk/meson/axg-audio.c static AUD_MST_SCLK(d, AUDIO_MST_D_SCLK_CTRL1);
d                 292 drivers/clk/meson/axg-audio.c static AUD_MST_LRCLK_DIV(d, AUDIO_MST_D_SCLK_CTRL0);
d                 303 drivers/clk/meson/axg-audio.c static AUD_MST_LRCLK(d, AUDIO_MST_D_SCLK_CTRL1);
d                 586 drivers/clk/nxp/clk-lpc32xx.c 	u64 m_i, o = rate, i = *parent_rate, d = (u64)rate << 6;
d                 608 drivers/clk/nxp/clk-lpc32xx.c 			if (o * n_i * (1 << p_i) - i * m_i <= d) {
d                 612 drivers/clk/nxp/clk-lpc32xx.c 				d = o * n_i * (1 << p_i) - i * m_i;
d                 617 drivers/clk/nxp/clk-lpc32xx.c 	if (d == (u64)rate << 6) {
d                 635 drivers/clk/nxp/clk-lpc32xx.c 	if (!d)
d                 331 drivers/clk/pxa/clk-pxa25x.c 	struct dummy_clk *d;
d                 341 drivers/clk/pxa/clk-pxa25x.c 		d = &dummy_clks[i];
d                 342 drivers/clk/pxa/clk-pxa25x.c 		name = d->dev_id ? d->dev_id : d->con_id;
d                 343 drivers/clk/pxa/clk-pxa25x.c 		clk = clk_register_fixed_factor(NULL, name, d->parent, 0, 1, 1);
d                 344 drivers/clk/pxa/clk-pxa25x.c 		clk_register_clkdev(clk, d->con_id, d->dev_id);
d                 470 drivers/clk/pxa/clk-pxa27x.c 	struct dummy_clk *d;
d                 475 drivers/clk/pxa/clk-pxa27x.c 		d = &dummy_clks[i];
d                 476 drivers/clk/pxa/clk-pxa27x.c 		name = d->dev_id ? d->dev_id : d->con_id;
d                 477 drivers/clk/pxa/clk-pxa27x.c 		clk = clk_register_fixed_factor(NULL, name, d->parent, 0, 1, 1);
d                 478 drivers/clk/pxa/clk-pxa27x.c 		clk_register_clkdev(clk, d->con_id, d->dev_id);
d                 316 drivers/clk/pxa/clk-pxa3xx.c 	struct dummy_clk *d;
d                 321 drivers/clk/pxa/clk-pxa3xx.c 		d = &dummy_clks[i];
d                 322 drivers/clk/pxa/clk-pxa3xx.c 		name = d->dev_id ? d->dev_id : d->con_id;
d                 323 drivers/clk/pxa/clk-pxa3xx.c 		clk = clk_register_fixed_factor(NULL, name, d->parent, 0, 1, 1);
d                 324 drivers/clk/pxa/clk-pxa3xx.c 		clk_register_clkdev(clk, d->con_id, d->dev_id);
d                  23 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                  56 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                  92 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 107 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 132 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 152 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 190 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 202 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                 226 drivers/clk/qcom/clk-hfpll.c 	struct hfpll_data const *hd = h->d;
d                  32 drivers/clk/qcom/clk-hfpll.h 	struct hfpll_data const *d;
d                  89 drivers/clk/qcom/clk-krait.c 	struct krait_div2_clk *d = to_krait_div2_clk(hw);
d                  92 drivers/clk/qcom/clk-krait.c 	u32 mask = BIT(d->width) - 1;
d                  94 drivers/clk/qcom/clk-krait.c 	if (d->lpl)
d                  95 drivers/clk/qcom/clk-krait.c 		mask = mask << (d->shift + LPL_SHIFT) | mask << d->shift;
d                  98 drivers/clk/qcom/clk-krait.c 	val = krait_get_l2_indirect_reg(d->offset);
d                 100 drivers/clk/qcom/clk-krait.c 	krait_set_l2_indirect_reg(d->offset, val);
d                 109 drivers/clk/qcom/clk-krait.c 	struct krait_div2_clk *d = to_krait_div2_clk(hw);
d                 110 drivers/clk/qcom/clk-krait.c 	u32 mask = BIT(d->width) - 1;
d                 113 drivers/clk/qcom/clk-krait.c 	div = krait_get_l2_indirect_reg(d->offset);
d                 114 drivers/clk/qcom/clk-krait.c 	div >>= d->shift;
d                  62 drivers/clk/qcom/clk-regmap-mux-div.c 	u32 val, d, s;
d                  77 drivers/clk/qcom/clk-regmap-mux-div.c 	d = (val >> md->hid_shift);
d                  78 drivers/clk/qcom/clk-regmap-mux-div.c 	d &= BIT(md->hid_width) - 1;
d                  79 drivers/clk/qcom/clk-regmap-mux-div.c 	*div = d;
d                 124 drivers/clk/qcom/gcc-ipq806x.c 	.d = &hfpll0_data,
d                 150 drivers/clk/qcom/gcc-ipq806x.c 	.d = &hfpll1_data,
d                 176 drivers/clk/qcom/gcc-ipq806x.c 	.d = &hfpll_l2_data,
d                  97 drivers/clk/qcom/gcc-msm8960.c 	.d = &hfpll0_data,
d                 137 drivers/clk/qcom/gcc-msm8960.c 	.d = &hfpll1_data,
d                 163 drivers/clk/qcom/gcc-msm8960.c 	.d = &hfpll2_data,
d                 189 drivers/clk/qcom/gcc-msm8960.c 	.d = &hfpll3_data,
d                 229 drivers/clk/qcom/gcc-msm8960.c 	.d = &hfpll_l2_data,
d                3640 drivers/clk/qcom/gcc-msm8960.c 		hfpll1.d = &hfpll1_8064_data;
d                3641 drivers/clk/qcom/gcc-msm8960.c 		hfpll_l2.d = &hfpll_l2_8064_data;
d                  78 drivers/clk/qcom/hfpll.c 	h->d = &hdata;
d                  23 drivers/clk/renesas/clk-div6.c #define CPG_DIV6_DIV(d)		((d) & 0x3f)
d                 925 drivers/clk/renesas/r9a06g032-clocks.c 		const struct r9a06g032_clkdesc *d = &r9a06g032_clocks[i];
d                 926 drivers/clk/renesas/r9a06g032-clocks.c 		const char *parent_name = d->source ?
d                 927 drivers/clk/renesas/r9a06g032-clocks.c 			__clk_get_name(clocks->data.clks[d->source - 1]) :
d                 931 drivers/clk/renesas/r9a06g032-clocks.c 		switch (d->type) {
d                 933 drivers/clk/renesas/r9a06g032-clocks.c 			clk = clk_register_fixed_factor(NULL, d->name,
d                 935 drivers/clk/renesas/r9a06g032-clocks.c 							d->mul, d->div);
d                 938 drivers/clk/renesas/r9a06g032-clocks.c 			clk = r9a06g032_register_gate(clocks, parent_name, d);
d                 941 drivers/clk/renesas/r9a06g032-clocks.c 			clk = r9a06g032_register_div(clocks, parent_name, d);
d                 945 drivers/clk/renesas/r9a06g032-clocks.c 			uart_group_sel[d->dual.group] = d->dual.sel;
d                 946 drivers/clk/renesas/r9a06g032-clocks.c 			clk = r9a06g032_register_bitsel(clocks, parent_name, d);
d                 950 drivers/clk/renesas/r9a06g032-clocks.c 							  d,
d                 951 drivers/clk/renesas/r9a06g032-clocks.c 							  uart_group_sel[d->dual.group]);
d                 954 drivers/clk/renesas/r9a06g032-clocks.c 		clocks->data.clks[d->index] = clk;
d                  94 drivers/clk/samsung/clk.h #define FFACTOR(_id, cname, pname, m, d, f)		\
d                 100 drivers/clk/samsung/clk.h 		.div		= d,			\
d                 108 drivers/clk/sunxi/clk-sun4i-tcon-ch1.c 		u8 d;
d                 110 drivers/clk/sunxi/clk-sun4i-tcon-ch1.c 		for (d = 1; d < 3; d++) {
d                 113 drivers/clk/sunxi/clk-sun4i-tcon-ch1.c 			tmp_rate = parent_rate / m / d;
d                 122 drivers/clk/sunxi/clk-sun4i-tcon-ch1.c 				is_double = d;
d                  15 drivers/clk/tegra/clk-divider.c #define div_mask(d) ((1 << (d->width)) - 1)
d                  16 drivers/clk/tegra/clk-divider.c #define get_mul(d) (1 << d->frac_width)
d                  17 drivers/clk/tegra/clk-divider.c #define get_max_div(d) div_mask(d)
d                  25 drivers/clk/tegra/clk-sdmmc-mux.c #define get_max_div(d) DIV_MASK
d                 182 drivers/clk/ti/adpll.c static const char *ti_adpll_clk_get_name(struct ti_adpll_data *d,
d                 190 drivers/clk/ti/adpll.c 		err = of_property_read_string_index(d->np,
d                 200 drivers/clk/ti/adpll.c 		buf = devm_kzalloc(d->dev, 8 + 1 + strlen(base_name) + 1 +
d                 204 drivers/clk/ti/adpll.c 		sprintf(buf, "%08lx.%s.%s", d->pa, base_name, postfix);
d                 213 drivers/clk/ti/adpll.c static int ti_adpll_setup_clock(struct ti_adpll_data *d, struct clk *clock,
d                 221 drivers/clk/ti/adpll.c 	d->clocks[index].clk = clock;
d                 222 drivers/clk/ti/adpll.c 	d->clocks[index].unregister = unregister;
d                 228 drivers/clk/ti/adpll.c 			dev_warn(d->dev, "clock %s con_id lookup may fail\n",
d                 230 drivers/clk/ti/adpll.c 		snprintf(con_id, 16, "pll%03lx%s", d->pa & 0xfff, postfix + 1);
d                 234 drivers/clk/ti/adpll.c 		d->clocks[index].cl = cl;
d                 236 drivers/clk/ti/adpll.c 		dev_warn(d->dev, "no con_id for clock %s\n", name);
d                 242 drivers/clk/ti/adpll.c 	d->outputs.clks[output_index] = clock;
d                 243 drivers/clk/ti/adpll.c 	d->outputs.clk_num++;
d                 248 drivers/clk/ti/adpll.c static int ti_adpll_init_divider(struct ti_adpll_data *d,
d                 260 drivers/clk/ti/adpll.c 	child_name = ti_adpll_clk_get_name(d, output_index, name);
d                 265 drivers/clk/ti/adpll.c 	clock = clk_register_divider(d->dev, child_name, parent_name, 0,
d                 267 drivers/clk/ti/adpll.c 				     &d->lock);
d                 269 drivers/clk/ti/adpll.c 		dev_err(d->dev, "failed to register divider %s: %li\n",
d                 274 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, index, output_index, child_name,
d                 278 drivers/clk/ti/adpll.c static int ti_adpll_init_mux(struct ti_adpll_data *d,
d                 289 drivers/clk/ti/adpll.c 	child_name = ti_adpll_clk_get_name(d, -ENODEV, name);
d                 294 drivers/clk/ti/adpll.c 	clock = clk_register_mux(d->dev, child_name, parents, 2, 0,
d                 295 drivers/clk/ti/adpll.c 				 reg, shift, 1, 0, &d->lock);
d                 297 drivers/clk/ti/adpll.c 		dev_err(d->dev, "failed to register mux %s: %li\n",
d                 302 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, index, -ENODEV, child_name,
d                 306 drivers/clk/ti/adpll.c static int ti_adpll_init_gate(struct ti_adpll_data *d,
d                 318 drivers/clk/ti/adpll.c 	child_name = ti_adpll_clk_get_name(d, output_index, name);
d                 323 drivers/clk/ti/adpll.c 	clock = clk_register_gate(d->dev, child_name, parent_name, 0,
d                 325 drivers/clk/ti/adpll.c 				  &d->lock);
d                 327 drivers/clk/ti/adpll.c 		dev_err(d->dev, "failed to register gate %s: %li\n",
d                 332 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, index, output_index, child_name,
d                 336 drivers/clk/ti/adpll.c static int ti_adpll_init_fixed_factor(struct ti_adpll_data *d,
d                 347 drivers/clk/ti/adpll.c 	child_name = ti_adpll_clk_get_name(d, -ENODEV, name);
d                 352 drivers/clk/ti/adpll.c 	clock = clk_register_fixed_factor(d->dev, child_name, parent_name,
d                 357 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, index, -ENODEV, child_name,
d                 361 drivers/clk/ti/adpll.c static void ti_adpll_set_idle_bypass(struct ti_adpll_data *d)
d                 366 drivers/clk/ti/adpll.c 	spin_lock_irqsave(&d->lock, flags);
d                 367 drivers/clk/ti/adpll.c 	v = readl_relaxed(d->regs + ADPLL_CLKCTRL_OFFSET);
d                 369 drivers/clk/ti/adpll.c 	writel_relaxed(v, d->regs + ADPLL_CLKCTRL_OFFSET);
d                 370 drivers/clk/ti/adpll.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 373 drivers/clk/ti/adpll.c static void ti_adpll_clear_idle_bypass(struct ti_adpll_data *d)
d                 378 drivers/clk/ti/adpll.c 	spin_lock_irqsave(&d->lock, flags);
d                 379 drivers/clk/ti/adpll.c 	v = readl_relaxed(d->regs + ADPLL_CLKCTRL_OFFSET);
d                 381 drivers/clk/ti/adpll.c 	writel_relaxed(v, d->regs + ADPLL_CLKCTRL_OFFSET);
d                 382 drivers/clk/ti/adpll.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 385 drivers/clk/ti/adpll.c static bool ti_adpll_clock_is_bypass(struct ti_adpll_data *d)
d                 389 drivers/clk/ti/adpll.c 	v = readl_relaxed(d->regs + ADPLL_STATUS_OFFSET);
d                 399 drivers/clk/ti/adpll.c static bool ti_adpll_is_locked(struct ti_adpll_data *d)
d                 401 drivers/clk/ti/adpll.c 	u32 v = readl_relaxed(d->regs + ADPLL_STATUS_OFFSET);
d                 406 drivers/clk/ti/adpll.c static int ti_adpll_wait_lock(struct ti_adpll_data *d)
d                 411 drivers/clk/ti/adpll.c 		if (ti_adpll_is_locked(d))
d                 416 drivers/clk/ti/adpll.c 	dev_err(d->dev, "pll failed to lock\n");
d                 423 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = to_adpll(dco);
d                 425 drivers/clk/ti/adpll.c 	ti_adpll_clear_idle_bypass(d);
d                 426 drivers/clk/ti/adpll.c 	ti_adpll_wait_lock(d);
d                 434 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = to_adpll(dco);
d                 436 drivers/clk/ti/adpll.c 	ti_adpll_set_idle_bypass(d);
d                 442 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = to_adpll(dco);
d                 444 drivers/clk/ti/adpll.c 	return ti_adpll_is_locked(d);
d                 455 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = to_adpll(dco);
d                 460 drivers/clk/ti/adpll.c 	if (ti_adpll_clock_is_bypass(d))
d                 463 drivers/clk/ti/adpll.c 	spin_lock_irqsave(&d->lock, flags);
d                 464 drivers/clk/ti/adpll.c 	frac_m = readl_relaxed(d->regs + ADPLL_FRACDIV_OFFSET);
d                 466 drivers/clk/ti/adpll.c 	rate = (u64)readw_relaxed(d->regs + ADPLL_MN2DIV_OFFSET) << 18;
d                 469 drivers/clk/ti/adpll.c 	divider = (readw_relaxed(d->regs + ADPLL_M2NDIV_OFFSET) + 1) << 18;
d                 470 drivers/clk/ti/adpll.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 474 drivers/clk/ti/adpll.c 	if (d->c->is_type_s) {
d                 475 drivers/clk/ti/adpll.c 		v = readl_relaxed(d->regs + ADPLL_CLKCTRL_OFFSET);
d                 498 drivers/clk/ti/adpll.c static int ti_adpll_init_dco(struct ti_adpll_data *d)
d                 505 drivers/clk/ti/adpll.c 	d->outputs.clks = devm_kcalloc(d->dev,
d                 509 drivers/clk/ti/adpll.c 	if (!d->outputs.clks)
d                 512 drivers/clk/ti/adpll.c 	if (d->c->output_index < 0)
d                 517 drivers/clk/ti/adpll.c 	init.name = ti_adpll_clk_get_name(d, d->c->output_index, postfix);
d                 521 drivers/clk/ti/adpll.c 	init.parent_names = d->parent_names;
d                 522 drivers/clk/ti/adpll.c 	init.num_parents = d->c->nr_max_inputs;
d                 525 drivers/clk/ti/adpll.c 	d->dco.hw.init = &init;
d                 527 drivers/clk/ti/adpll.c 	if (d->c->is_type_s)
d                 533 drivers/clk/ti/adpll.c 	err = ti_adpll_init_divider(d, TI_ADPLL_N2, -ENODEV, "n2",
d                 534 drivers/clk/ti/adpll.c 				    d->parent_clocks[TI_ADPLL_CLKINP],
d                 535 drivers/clk/ti/adpll.c 				    d->regs + ADPLL_MN2DIV_OFFSET,
d                 540 drivers/clk/ti/adpll.c 	clock = devm_clk_register(d->dev, &d->dco.hw);
d                 544 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, TI_ADPLL_DCO, d->c->output_index,
d                 581 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = co->adpll;
d                 583 drivers/clk/ti/adpll.c 	return ti_adpll_clock_is_bypass(d);
d                 586 drivers/clk/ti/adpll.c static int ti_adpll_init_clkout(struct ti_adpll_data *d,
d                 600 drivers/clk/ti/adpll.c 	co = devm_kzalloc(d->dev, sizeof(*co), GFP_KERNEL);
d                 603 drivers/clk/ti/adpll.c 	co->adpll = d;
d                 605 drivers/clk/ti/adpll.c 	err = of_property_read_string_index(d->np,
d                 612 drivers/clk/ti/adpll.c 	ops = devm_kzalloc(d->dev, sizeof(*ops), GFP_KERNEL);
d                 628 drivers/clk/ti/adpll.c 		co->gate.lock = &d->lock;
d                 629 drivers/clk/ti/adpll.c 		co->gate.reg = d->regs + ADPLL_CLKCTRL_OFFSET;
d                 636 drivers/clk/ti/adpll.c 	clock = devm_clk_register(d->dev, &co->hw);
d                 638 drivers/clk/ti/adpll.c 		dev_err(d->dev, "failed to register output %s: %li\n",
d                 643 drivers/clk/ti/adpll.c 	return ti_adpll_setup_clock(d, clock, index, output_index, child_name,
d                 647 drivers/clk/ti/adpll.c static int ti_adpll_init_children_adpll_s(struct ti_adpll_data *d)
d                 651 drivers/clk/ti/adpll.c 	if (!d->c->is_type_s)
d                 655 drivers/clk/ti/adpll.c 	err = ti_adpll_init_mux(d, TI_ADPLL_BYPASS, "bypass",
d                 656 drivers/clk/ti/adpll.c 				d->clocks[TI_ADPLL_N2].clk,
d                 657 drivers/clk/ti/adpll.c 				d->parent_clocks[TI_ADPLL_CLKINPULOW],
d                 658 drivers/clk/ti/adpll.c 				d->regs + ADPLL_CLKCTRL_OFFSET,
d                 664 drivers/clk/ti/adpll.c 	err = ti_adpll_init_divider(d, TI_ADPLL_M2, -ENODEV, "m2",
d                 665 drivers/clk/ti/adpll.c 				    d->clocks[TI_ADPLL_DCO].clk,
d                 666 drivers/clk/ti/adpll.c 				    d->regs + ADPLL_M2NDIV_OFFSET,
d                 674 drivers/clk/ti/adpll.c 	err = ti_adpll_init_fixed_factor(d, TI_ADPLL_DIV2, "div2",
d                 675 drivers/clk/ti/adpll.c 					 d->clocks[TI_ADPLL_M2].clk,
d                 681 drivers/clk/ti/adpll.c 	err = ti_adpll_init_clkout(d, TI_ADPLL_CLKOUT, TI_ADPLL_S_CLKOUT,
d                 683 drivers/clk/ti/adpll.c 				   d->clocks[TI_ADPLL_DIV2].clk,
d                 684 drivers/clk/ti/adpll.c 				   d->clocks[TI_ADPLL_BYPASS].clk);
d                 689 drivers/clk/ti/adpll.c 	err = ti_adpll_init_clkout(d, TI_ADPLL_CLKOUT2, TI_ADPLL_S_CLKOUTX2, 0,
d                 690 drivers/clk/ti/adpll.c 				   "clkout2", d->clocks[TI_ADPLL_M2].clk,
d                 691 drivers/clk/ti/adpll.c 				   d->clocks[TI_ADPLL_BYPASS].clk);
d                 696 drivers/clk/ti/adpll.c 	if (d->parent_clocks[TI_ADPLL_CLKINPHIF]) {
d                 697 drivers/clk/ti/adpll.c 		err = ti_adpll_init_mux(d, TI_ADPLL_HIF, "hif",
d                 698 drivers/clk/ti/adpll.c 					d->clocks[TI_ADPLL_DCO].clk,
d                 699 drivers/clk/ti/adpll.c 					d->parent_clocks[TI_ADPLL_CLKINPHIF],
d                 700 drivers/clk/ti/adpll.c 					d->regs + ADPLL_CLKCTRL_OFFSET,
d                 707 drivers/clk/ti/adpll.c 	err = ti_adpll_init_divider(d, TI_ADPLL_M3, TI_ADPLL_S_CLKOUTHIF, "m3",
d                 708 drivers/clk/ti/adpll.c 				    d->clocks[TI_ADPLL_HIF].clk,
d                 709 drivers/clk/ti/adpll.c 				    d->regs + ADPLL_M3DIV_OFFSET,
d                 721 drivers/clk/ti/adpll.c static int ti_adpll_init_children_adpll_lj(struct ti_adpll_data *d)
d                 725 drivers/clk/ti/adpll.c 	if (d->c->is_type_s)
d                 729 drivers/clk/ti/adpll.c 	err = ti_adpll_init_gate(d, TI_ADPLL_DCO_GATE, TI_ADPLL_LJ_CLKDCOLDO,
d                 730 drivers/clk/ti/adpll.c 				 "clkdcoldo", d->clocks[TI_ADPLL_DCO].clk,
d                 731 drivers/clk/ti/adpll.c 				 d->regs + ADPLL_CLKCTRL_OFFSET,
d                 737 drivers/clk/ti/adpll.c 	err = ti_adpll_init_divider(d, TI_ADPLL_M2, -ENODEV,
d                 738 drivers/clk/ti/adpll.c 				    "m2", d->clocks[TI_ADPLL_DCO].clk,
d                 739 drivers/clk/ti/adpll.c 				    d->regs + ADPLL_M2NDIV_OFFSET,
d                 747 drivers/clk/ti/adpll.c 	err = ti_adpll_init_gate(d, TI_ADPLL_M2_GATE, TI_ADPLL_LJ_CLKOUTLDO,
d                 748 drivers/clk/ti/adpll.c 				 "clkoutldo", d->clocks[TI_ADPLL_M2].clk,
d                 749 drivers/clk/ti/adpll.c 				 d->regs + ADPLL_CLKCTRL_OFFSET,
d                 756 drivers/clk/ti/adpll.c 	err = ti_adpll_init_mux(d, TI_ADPLL_BYPASS, "bypass",
d                 757 drivers/clk/ti/adpll.c 				d->clocks[TI_ADPLL_N2].clk,
d                 758 drivers/clk/ti/adpll.c 				d->parent_clocks[TI_ADPLL_CLKINPULOW],
d                 759 drivers/clk/ti/adpll.c 				d->regs + ADPLL_CLKCTRL_OFFSET,
d                 765 drivers/clk/ti/adpll.c 	err = ti_adpll_init_clkout(d, TI_ADPLL_CLKOUT, TI_ADPLL_S_CLKOUT,
d                 767 drivers/clk/ti/adpll.c 				   d->clocks[TI_ADPLL_M2].clk,
d                 768 drivers/clk/ti/adpll.c 				   d->clocks[TI_ADPLL_BYPASS].clk);
d                 775 drivers/clk/ti/adpll.c static void ti_adpll_free_resources(struct ti_adpll_data *d)
d                 780 drivers/clk/ti/adpll.c 		struct ti_adpll_clock *ac = &d->clocks[i];
d                 801 drivers/clk/ti/adpll.c static int ti_adpll_init_registers(struct ti_adpll_data *d)
d                 805 drivers/clk/ti/adpll.c 	if (d->c->is_type_s) {
d                 807 drivers/clk/ti/adpll.c 		ti_adpll_unlock_all(d->iobase + ADPLL_PLLSS_MMR_LOCK_OFFSET);
d                 810 drivers/clk/ti/adpll.c 	d->regs = d->iobase + register_offset + ADPLL_PWRCTRL_OFFSET;
d                 815 drivers/clk/ti/adpll.c static int ti_adpll_init_inputs(struct ti_adpll_data *d)
d                 821 drivers/clk/ti/adpll.c 	nr_inputs = of_clk_get_parent_count(d->np);
d                 822 drivers/clk/ti/adpll.c 	if (nr_inputs < d->c->nr_max_inputs) {
d                 823 drivers/clk/ti/adpll.c 		dev_err(d->dev, error, nr_inputs);
d                 826 drivers/clk/ti/adpll.c 	of_clk_parent_fill(d->np, d->parent_names, nr_inputs);
d                 828 drivers/clk/ti/adpll.c 	clock = devm_clk_get(d->dev, d->parent_names[0]);
d                 830 drivers/clk/ti/adpll.c 		dev_err(d->dev, "could not get clkinp\n");
d                 833 drivers/clk/ti/adpll.c 	d->parent_clocks[TI_ADPLL_CLKINP] = clock;
d                 835 drivers/clk/ti/adpll.c 	clock = devm_clk_get(d->dev, d->parent_names[1]);
d                 837 drivers/clk/ti/adpll.c 		dev_err(d->dev, "could not get clkinpulow clock\n");
d                 840 drivers/clk/ti/adpll.c 	d->parent_clocks[TI_ADPLL_CLKINPULOW] = clock;
d                 842 drivers/clk/ti/adpll.c 	if (d->c->is_type_s) {
d                 843 drivers/clk/ti/adpll.c 		clock =  devm_clk_get(d->dev, d->parent_names[2]);
d                 845 drivers/clk/ti/adpll.c 			dev_err(d->dev, "could not get clkinphif clock\n");
d                 848 drivers/clk/ti/adpll.c 		d->parent_clocks[TI_ADPLL_CLKINPHIF] = clock;
d                 881 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d;
d                 891 drivers/clk/ti/adpll.c 	d = devm_kzalloc(dev, sizeof(*d), GFP_KERNEL);
d                 892 drivers/clk/ti/adpll.c 	if (!d)
d                 894 drivers/clk/ti/adpll.c 	d->dev = dev;
d                 895 drivers/clk/ti/adpll.c 	d->np = node;
d                 896 drivers/clk/ti/adpll.c 	d->c = pdata;
d                 897 drivers/clk/ti/adpll.c 	dev_set_drvdata(d->dev, d);
d                 898 drivers/clk/ti/adpll.c 	spin_lock_init(&d->lock);
d                 903 drivers/clk/ti/adpll.c 	d->pa = res->start;
d                 905 drivers/clk/ti/adpll.c 	d->iobase = devm_ioremap_resource(dev, res);
d                 906 drivers/clk/ti/adpll.c 	if (IS_ERR(d->iobase)) {
d                 908 drivers/clk/ti/adpll.c 			PTR_ERR(d->iobase));
d                 909 drivers/clk/ti/adpll.c 		return PTR_ERR(d->iobase);
d                 912 drivers/clk/ti/adpll.c 	err = ti_adpll_init_registers(d);
d                 916 drivers/clk/ti/adpll.c 	err = ti_adpll_init_inputs(d);
d                 920 drivers/clk/ti/adpll.c 	d->clocks = devm_kcalloc(d->dev,
d                 924 drivers/clk/ti/adpll.c 	if (!d->clocks)
d                 927 drivers/clk/ti/adpll.c 	err = ti_adpll_init_dco(d);
d                 933 drivers/clk/ti/adpll.c 	err = ti_adpll_init_children_adpll_s(d);
d                 936 drivers/clk/ti/adpll.c 	err = ti_adpll_init_children_adpll_lj(d);
d                 940 drivers/clk/ti/adpll.c 	err = of_clk_add_provider(d->np, of_clk_src_onecell_get, &d->outputs);
d                 948 drivers/clk/ti/adpll.c 	ti_adpll_free_resources(d);
d                 955 drivers/clk/ti/adpll.c 	struct ti_adpll_data *d = dev_get_drvdata(&pdev->dev);
d                 957 drivers/clk/ti/adpll.c 	ti_adpll_free_resources(d);
d                  29 drivers/clk/ti/divider.c #define div_mask(d)	((1 << ((d)->width)) - 1)
d                 985 drivers/clk/ti/dpll3xxx.c 	const struct omap3_dpll5_settings *d;
d                 998 drivers/clk/ti/dpll3xxx.c 	d = &precomputed[i];
d                1002 drivers/clk/ti/dpll3xxx.c 	dd->last_rounded_m = d->m;
d                1003 drivers/clk/ti/dpll3xxx.c 	dd->last_rounded_n = d->n;
d                1004 drivers/clk/ti/dpll3xxx.c 	dd->last_rounded_rate = div_u64((u64)parent_rate * d->m, d->n);
d                 163 drivers/clocksource/timer-atmel-tcb.c static int tc_shutdown(struct clock_event_device *d)
d                 165 drivers/clocksource/timer-atmel-tcb.c 	struct tc_clkevt_device *tcd = to_tc_clkevt(d);
d                 170 drivers/clocksource/timer-atmel-tcb.c 	if (!clockevent_state_detached(d))
d                 176 drivers/clocksource/timer-atmel-tcb.c static int tc_set_oneshot(struct clock_event_device *d)
d                 178 drivers/clocksource/timer-atmel-tcb.c 	struct tc_clkevt_device *tcd = to_tc_clkevt(d);
d                 181 drivers/clocksource/timer-atmel-tcb.c 	if (clockevent_state_oneshot(d) || clockevent_state_periodic(d))
d                 182 drivers/clocksource/timer-atmel-tcb.c 		tc_shutdown(d);
d                 195 drivers/clocksource/timer-atmel-tcb.c static int tc_set_periodic(struct clock_event_device *d)
d                 197 drivers/clocksource/timer-atmel-tcb.c 	struct tc_clkevt_device *tcd = to_tc_clkevt(d);
d                 200 drivers/clocksource/timer-atmel-tcb.c 	if (clockevent_state_oneshot(d) || clockevent_state_periodic(d))
d                 201 drivers/clocksource/timer-atmel-tcb.c 		tc_shutdown(d);
d                 222 drivers/clocksource/timer-atmel-tcb.c static int tc_next_event(unsigned long delta, struct clock_event_device *d)
d                 590 drivers/cpufreq/acpi-cpufreq.c static int sw_any_bug_found(const struct dmi_system_id *d)
d                 672 drivers/cpufreq/longhaul.c 	struct acpi_device *d;
d                 674 drivers/cpufreq/longhaul.c 	if (acpi_bus_get_device(obj_handle, &d))
d                 677 drivers/cpufreq/longhaul.c 	*return_value = acpi_driver_data(d);
d                 567 drivers/cpufreq/powernow-k7.c static int acer_cpufreq_pst(const struct dmi_system_id *d)
d                 570 drivers/cpufreq/powernow-k7.c 		d->ident);
d                 548 drivers/crypto/axis/artpec6_crypto.c 	struct pdma_descr *d;
d                 556 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt++];
d                 557 drivers/crypto/axis/artpec6_crypto.c 	memset(d, 0, sizeof(*d));
d                 559 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.short_descr = 0;
d                 560 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.eop = eop;
d                 561 drivers/crypto/axis/artpec6_crypto.c 	d->data.len = len;
d                 562 drivers/crypto/axis/artpec6_crypto.c 	d->data.buf = addr;
d                 581 drivers/crypto/axis/artpec6_crypto.c 	struct pdma_descr *d;
d                 590 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt++];
d                 591 drivers/crypto/axis/artpec6_crypto.c 	memset(d, 0, sizeof(*d));
d                 593 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.short_descr = 1;
d                 594 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.short_len = len;
d                 595 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.eop = eop;
d                 596 drivers/crypto/axis/artpec6_crypto.c 	memcpy(d->shrt.data, dst, len);
d                 740 drivers/crypto/axis/artpec6_crypto.c 	struct pdma_descr *d;
d                 747 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->in[dma->in_cnt++];
d                 748 drivers/crypto/axis/artpec6_crypto.c 	memset(d, 0, sizeof(*d));
d                 750 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.intr = intr;
d                 751 drivers/crypto/axis/artpec6_crypto.c 	d->data.len = len;
d                 752 drivers/crypto/axis/artpec6_crypto.c 	d->data.buf = addr;
d                 954 drivers/crypto/axis/artpec6_crypto.c 	struct pdma_descr *d;
d                 963 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->out[dma->out_cnt-1];
d                 964 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.eop = 1;
d                 978 drivers/crypto/axis/artpec6_crypto.c 	struct pdma_descr *d;
d                 986 drivers/crypto/axis/artpec6_crypto.c 	d = &dma->in[dma->in_cnt-1];
d                 987 drivers/crypto/axis/artpec6_crypto.c 	d->ctrl.intr = 1;
d                 424 drivers/crypto/caam/caampkc.c 	pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
d                 466 drivers/crypto/caam/caampkc.c 	pdb->d_dma = dma_map_single(dev, key->d, key->d_sz, DMA_TO_DEVICE);
d                 773 drivers/crypto/caam/caampkc.c 	if (unlikely(!key->n || !key->d))
d                 794 drivers/crypto/caam/caampkc.c 	kzfree(key->d);
d                 987 drivers/crypto/caam/caampkc.c 	rsa_key->d = kmemdup(raw_key.d, raw_key.d_sz, GFP_DMA | GFP_KERNEL);
d                 988 drivers/crypto/caam/caampkc.c 	if (!rsa_key->d)
d                  72 drivers/crypto/caam/caampkc.h 	u8 *d;
d                 463 drivers/crypto/caam/pdb.h 	u8 *d;
d                 476 drivers/crypto/caam/pdb.h 	u8 *d;
d                 165 drivers/crypto/ccp/ccp-crypto-rsa.c 						raw_key.d, raw_key.d_sz);
d                  97 drivers/crypto/ccp/ccp-dev.c void ccp_log_error(struct ccp_device *d, unsigned int e)
d                 103 drivers/crypto/ccp/ccp-dev.c 		dev_err(d->dev, "CCP error %d: %s\n", e, ccp_error_codes[e]);
d                 105 drivers/crypto/ccp/ccp-dev.c 		dev_err(d->dev, "CCP error %d: Unknown Error\n", e);
d                1918 drivers/crypto/hifn_795x.c 		struct hifn_desc *d = &dma->resr[i];
d                1922 drivers/crypto/hifn_795x.c 				(d->l & __cpu_to_le32(HIFN_D_VALID)) ? -ENODEV : 0);
d                 356 drivers/crypto/img-hash.c static void img_hash_dma_task(unsigned long d)
d                 358 drivers/crypto/img-hash.c 	struct img_hash_dev *hdev = (struct img_hash_dev *)d;
d                 326 drivers/crypto/omap-sham.c 	int i, d, big_endian = 0;
d                 333 drivers/crypto/omap-sham.c 		d = MD5_DIGEST_SIZE / sizeof(u32);
d                 339 drivers/crypto/omap-sham.c 		d = SHA1_DIGEST_SIZE / sizeof(u32);
d                 342 drivers/crypto/omap-sham.c 		d = SHA224_DIGEST_SIZE / sizeof(u32);
d                 345 drivers/crypto/omap-sham.c 		d = SHA256_DIGEST_SIZE / sizeof(u32);
d                 348 drivers/crypto/omap-sham.c 		d = SHA384_DIGEST_SIZE / sizeof(u32);
d                 351 drivers/crypto/omap-sham.c 		d = SHA512_DIGEST_SIZE / sizeof(u32);
d                 354 drivers/crypto/omap-sham.c 		d = 0;
d                 358 drivers/crypto/omap-sham.c 		for (i = 0; i < d; i++)
d                 361 drivers/crypto/omap-sham.c 		for (i = 0; i < d; i++)
d                 423 drivers/crypto/omap-sham.c 	int d;
d                 428 drivers/crypto/omap-sham.c 		d = SHA1_BLOCK_SIZE;
d                 432 drivers/crypto/omap-sham.c 		d = SHA256_BLOCK_SIZE;
d                 436 drivers/crypto/omap-sham.c 		d = SHA512_BLOCK_SIZE;
d                 439 drivers/crypto/omap-sham.c 		d = 0;
d                 442 drivers/crypto/omap-sham.c 	return d;
d                  76 drivers/crypto/qat/qat_common/qat_asym_algs.c 			dma_addr_t d;
d                 106 drivers/crypto/qat/qat_common/qat_asym_algs.c 	char *d;
d                 831 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (unlikely(!ctx->n || !ctx->d))
d                 862 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec.d = ctx->dma_d;
d                1047 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = dma_alloc_coherent(dev, ctx->key_sz, &ctx->dma_d, GFP_KERNEL);
d                1048 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->d)
d                1051 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->d + (ctx->key_sz - vlen), ptr, vlen);
d                1054 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
d                1164 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->d) {
d                1165 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->d, '\0', ctx->key_sz);
d                1166 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d);
d                1191 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
d                1225 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ret = qat_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);
d                1236 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (private && !ctx->d) {
d                1290 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->d) {
d                1291 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->d, '\0', ctx->key_sz);
d                1292 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d);
d                1297 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
d                 160 drivers/crypto/qce/common.c 	__be32 *d = dst;
d                 166 drivers/crypto/qce/common.c 		*d = cpu_to_be32p((const __u32 *) s);
d                 168 drivers/crypto/qce/common.c 		d++;
d                 493 drivers/crypto/stm32/stm32-cryp.c 	u32 *d;
d                 519 drivers/crypto/stm32/stm32-cryp.c 	d = (u32 *)b0;
d                 523 drivers/crypto/stm32/stm32-cryp.c 			*d = cpu_to_be32(*d);
d                 524 drivers/crypto/stm32/stm32-cryp.c 		stm32_cryp_write(cryp, CRYP_DIN, *d++);
d                1196 drivers/devfreq/devfreq.c static ssize_t available_governors_show(struct device *d,
d                1200 drivers/devfreq/devfreq.c 	struct devfreq *df = to_devfreq(d);
d                1378 drivers/devfreq/devfreq.c static ssize_t available_frequencies_show(struct device *d,
d                1382 drivers/devfreq/devfreq.c 	struct devfreq *df = to_devfreq(d);
d                  30 drivers/dio/dio-driver.c 		   const struct dio_dev *d)
d                  36 drivers/dio/dio-driver.c 			if (ids->id == d->id)
d                  39 drivers/dio/dio-driver.c 			if ((ids->id & 0xff) == (d->id & 0xff))
d                  51 drivers/dio/dio-driver.c 	struct dio_dev *d = to_dio_dev(dev);
d                  53 drivers/dio/dio-driver.c 	if (!d->driver && drv->probe) {
d                  56 drivers/dio/dio-driver.c 		id = dio_match_device(drv->id_table, d);
d                  58 drivers/dio/dio-driver.c 			error = drv->probe(d, id);
d                  60 drivers/dio/dio-driver.c 			d->driver = drv;
d                 115 drivers/dio/dio-driver.c 	struct dio_dev *d = to_dio_dev(dev);
d                 122 drivers/dio/dio-driver.c 	return dio_match_device(ids, d) ? 1 : 0;
d                  22 drivers/dio/dio-sysfs.c 	struct dio_dev *d;
d                  24 drivers/dio/dio-sysfs.c 	d = to_dio_dev(dev);
d                  25 drivers/dio/dio-sysfs.c 	return sprintf(buf, "0x%02x\n", (d->id & 0xff));
d                  31 drivers/dio/dio-sysfs.c 	struct dio_dev *d;
d                  33 drivers/dio/dio-sysfs.c 	d = to_dio_dev(dev);
d                  34 drivers/dio/dio-sysfs.c 	return sprintf(buf, "0x%02x\n", d->ipl);
d                  40 drivers/dio/dio-sysfs.c 	struct dio_dev *d;
d                  42 drivers/dio/dio-sysfs.c 	d = to_dio_dev(dev);
d                  43 drivers/dio/dio-sysfs.c 	return sprintf(buf, "0x%02x\n", ((d->id >> 8)& 0xff));
d                  49 drivers/dio/dio-sysfs.c 	struct dio_dev *d;
d                  51 drivers/dio/dio-sysfs.c 	d = to_dio_dev(dev);
d                  52 drivers/dio/dio-sysfs.c 	return sprintf(buf, "%s\n", d->name);
d                  58 drivers/dio/dio-sysfs.c 	struct dio_dev *d = to_dio_dev(dev);
d                  61 drivers/dio/dio-sysfs.c 		       (unsigned long)dio_resource_start(d),
d                  62 drivers/dio/dio-sysfs.c 		       (unsigned long)dio_resource_end(d),
d                  63 drivers/dio/dio-sysfs.c 		       dio_resource_flags(d));
d                  67 drivers/dio/dio-sysfs.c int dio_create_sysfs_dev_files(struct dio_dev *d)
d                  69 drivers/dio/dio-sysfs.c 	struct device *dev = &d->dev;
d                1249 drivers/dma-buf/dma-buf.c 	struct dentry *d;
d                1252 drivers/dma-buf/dma-buf.c 	d = debugfs_create_dir("dma_buf", NULL);
d                1253 drivers/dma-buf/dma-buf.c 	if (IS_ERR(d))
d                1254 drivers/dma-buf/dma-buf.c 		return PTR_ERR(d);
d                1256 drivers/dma-buf/dma-buf.c 	dma_buf_debugfs_dir = d;
d                1258 drivers/dma-buf/dma-buf.c 	d = debugfs_create_file("bufinfo", S_IRUGO, dma_buf_debugfs_dir,
d                1260 drivers/dma-buf/dma-buf.c 	if (IS_ERR(d)) {
d                1264 drivers/dma-buf/dma-buf.c 		err = PTR_ERR(d);
d                 194 drivers/dma/bcm-sba-raid.c static inline u32 __pure sba_cmd_pq_c_mdata(u32 d, u32 b1, u32 b0)
d                 198 drivers/dma/bcm-sba-raid.c 	       ((d & SBA_C_MDATA_DNUM_MASK) << SBA_C_MDATA_DNUM_SHIFT);
d                 190 drivers/dma/bcm2835-dma.c static inline struct bcm2835_dmadev *to_bcm2835_dma_dev(struct dma_device *d)
d                 192 drivers/dma/bcm2835-dma.c 	return container_of(d, struct bcm2835_dmadev, ddev);
d                 311 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 319 drivers/dma/bcm2835-dma.c 	d = kzalloc(struct_size(d, cb_list, frames), gfp);
d                 320 drivers/dma/bcm2835-dma.c 	if (!d)
d                 323 drivers/dma/bcm2835-dma.c 	d->c = c;
d                 324 drivers/dma/bcm2835-dma.c 	d->dir = direction;
d                 325 drivers/dma/bcm2835-dma.c 	d->cyclic = cyclic;
d                 331 drivers/dma/bcm2835-dma.c 	for (frame = 0, total_len = 0; frame < frames; d->frames++, frame++) {
d                 332 drivers/dma/bcm2835-dma.c 		cb_entry = &d->cb_list[frame];
d                 359 drivers/dma/bcm2835-dma.c 			d->cb_list[frame - 1].cb->next = cb_entry->paddr;
d                 368 drivers/dma/bcm2835-dma.c 		d->size += control_block->length;
d                 372 drivers/dma/bcm2835-dma.c 	d->cb_list[d->frames - 1].cb->info |= finalextrainfo;
d                 375 drivers/dma/bcm2835-dma.c 	if (buf_len && (d->size != buf_len))
d                 378 drivers/dma/bcm2835-dma.c 	return d;
d                 380 drivers/dma/bcm2835-dma.c 	bcm2835_dma_free_cb_chain(d);
d                 443 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 452 drivers/dma/bcm2835-dma.c 	c->desc = d = to_bcm2835_dma_desc(&vd->tx);
d                 454 drivers/dma/bcm2835-dma.c 	writel(d->cb_list[0].paddr, c->chan_base + BCM2835_DMA_ADDR);
d                 461 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 485 drivers/dma/bcm2835-dma.c 	d = c->desc;
d                 487 drivers/dma/bcm2835-dma.c 	if (d) {
d                 488 drivers/dma/bcm2835-dma.c 		if (d->cyclic) {
d                 490 drivers/dma/bcm2835-dma.c 			vchan_cyclic_callback(&d->vd);
d                 535 drivers/dma/bcm2835-dma.c static size_t bcm2835_dma_desc_size(struct bcm2835_desc *d)
d                 537 drivers/dma/bcm2835-dma.c 	return d->size;
d                 540 drivers/dma/bcm2835-dma.c static size_t bcm2835_dma_desc_size_pos(struct bcm2835_desc *d, dma_addr_t addr)
d                 545 drivers/dma/bcm2835-dma.c 	for (size = i = 0; i < d->frames; i++) {
d                 546 drivers/dma/bcm2835-dma.c 		struct bcm2835_dma_cb *control_block = d->cb_list[i].cb;
d                 550 drivers/dma/bcm2835-dma.c 		if (d->dir == DMA_DEV_TO_MEM)
d                 582 drivers/dma/bcm2835-dma.c 		struct bcm2835_desc *d = c->desc;
d                 585 drivers/dma/bcm2835-dma.c 		if (d->dir == DMA_MEM_TO_DEV)
d                 587 drivers/dma/bcm2835-dma.c 		else if (d->dir == DMA_DEV_TO_MEM)
d                 592 drivers/dma/bcm2835-dma.c 		txstate->residue = bcm2835_dma_desc_size_pos(d, pos);
d                 619 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 633 drivers/dma/bcm2835-dma.c 	d = bcm2835_dma_create_cb_chain(chan, DMA_MEM_TO_MEM, false,
d                 636 drivers/dma/bcm2835-dma.c 	if (!d)
d                 639 drivers/dma/bcm2835-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, flags);
d                 649 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 680 drivers/dma/bcm2835-dma.c 	d = bcm2835_dma_create_cb_chain(chan, direction, false,
d                 684 drivers/dma/bcm2835-dma.c 	if (!d)
d                 688 drivers/dma/bcm2835-dma.c 	bcm2835_dma_fill_cb_chain_with_sg(chan, direction, d->cb_list,
d                 691 drivers/dma/bcm2835-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, flags);
d                 701 drivers/dma/bcm2835-dma.c 	struct bcm2835_desc *d;
d                 767 drivers/dma/bcm2835-dma.c 	d = bcm2835_dma_create_cb_chain(chan, direction, true,
d                 771 drivers/dma/bcm2835-dma.c 	if (!d)
d                 775 drivers/dma/bcm2835-dma.c 	d->cb_list[d->frames - 1].cb->next = d->cb_list[0].paddr;
d                 777 drivers/dma/bcm2835-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, flags);
d                 822 drivers/dma/bcm2835-dma.c static int bcm2835_dma_chan_init(struct bcm2835_dmadev *d, int chan_id,
d                 827 drivers/dma/bcm2835-dma.c 	c = devm_kzalloc(d->ddev.dev, sizeof(*c), GFP_KERNEL);
d                 832 drivers/dma/bcm2835-dma.c 	vchan_init(&c->vc, &d->ddev);
d                 834 drivers/dma/bcm2835-dma.c 	c->chan_base = BCM2835_DMA_CHANIO(d->base, chan_id);
d                 870 drivers/dma/bcm2835-dma.c 	struct bcm2835_dmadev *d = ofdma->of_dma_data;
d                 873 drivers/dma/bcm2835-dma.c 	chan = dma_get_any_slave_channel(&d->ddev);
d                 696 drivers/dma/dmaengine.c 	struct dma_device *d, *_d;
d                 715 drivers/dma/dmaengine.c 	list_for_each_entry_safe(d, _d, &dma_device_list, global_node) {
d                 717 drivers/dma/dmaengine.c 		const struct dma_slave_map *map = dma_filter_match(d, name, dev);
d                 725 drivers/dma/dmaengine.c 		chan = find_candidate(d, &mask, d->filter.fn, map->param);
d                 491 drivers/dma/dmatest.c static void __dmatest_free_test_data(struct dmatest_data *d, unsigned int cnt)
d                 496 drivers/dma/dmatest.c 		kfree(d->raw[i]);
d                 498 drivers/dma/dmatest.c 	kfree(d->aligned);
d                 499 drivers/dma/dmatest.c 	kfree(d->raw);
d                 502 drivers/dma/dmatest.c static void dmatest_free_test_data(struct dmatest_data *d)
d                 504 drivers/dma/dmatest.c 	__dmatest_free_test_data(d, d->cnt);
d                 507 drivers/dma/dmatest.c static int dmatest_alloc_test_data(struct dmatest_data *d,
d                 512 drivers/dma/dmatest.c 	d->raw = kcalloc(d->cnt + 1, sizeof(u8 *), GFP_KERNEL);
d                 513 drivers/dma/dmatest.c 	if (!d->raw)
d                 516 drivers/dma/dmatest.c 	d->aligned = kcalloc(d->cnt + 1, sizeof(u8 *), GFP_KERNEL);
d                 517 drivers/dma/dmatest.c 	if (!d->aligned)
d                 520 drivers/dma/dmatest.c 	for (i = 0; i < d->cnt; i++) {
d                 521 drivers/dma/dmatest.c 		d->raw[i] = kmalloc(buf_size + align, GFP_KERNEL);
d                 522 drivers/dma/dmatest.c 		if (!d->raw[i])
d                 527 drivers/dma/dmatest.c 			d->aligned[i] = PTR_ALIGN(d->raw[i], align);
d                 529 drivers/dma/dmatest.c 			d->aligned[i] = d->raw[i];
d                 534 drivers/dma/dmatest.c 	__dmatest_free_test_data(d, i);
d                 386 drivers/dma/dw/regs.h #define lli_set(d, reg, v)		((d)->lli.reg |= cpu_to_le32(v))
d                 387 drivers/dma/dw/regs.h #define lli_clear(d, reg, v)		((d)->lli.reg &= ~cpu_to_le32(v))
d                 388 drivers/dma/dw/regs.h #define lli_read(d, reg)		le32_to_cpu((d)->lli.reg)
d                 389 drivers/dma/dw/regs.h #define lli_write(d, reg, v)		((d)->lli.reg = cpu_to_le32(v))
d                 247 drivers/dma/ep93xx_dma.c 		struct ep93xx_dma_desc *d = list_first_entry(&desc->tx_list,
d                 256 drivers/dma/ep93xx_dma.c 		d->txd.callback = desc->txd.callback;
d                 257 drivers/dma/ep93xx_dma.c 		d->txd.callback_param = desc->txd.callback_param;
d                 259 drivers/dma/ep93xx_dma.c 		list_move_tail(&d->node, &edmac->active);
d                 749 drivers/dma/ep93xx_dma.c 	struct ep93xx_dma_desc *desc, *d;
d                 776 drivers/dma/ep93xx_dma.c 	list_for_each_entry_safe(desc, d, &list, node) {
d                 952 drivers/dma/ep93xx_dma.c 	struct ep93xx_dma_desc *desc, *d;
d                 967 drivers/dma/ep93xx_dma.c 	list_for_each_entry_safe(desc, d, &list, node)
d                 256 drivers/dma/fsldma.h #define DMA_TO_CPU(fsl_chan, d, width)					\
d                 258 drivers/dma/fsldma.h 			be##width##_to_cpu((__force __be##width)(v##width)d) : \
d                 259 drivers/dma/fsldma.h 			le##width##_to_cpu((__force __le##width)(v##width)d))
d                 281 drivers/dma/imx-dma.c static inline void imxdma_sg_next(struct imxdma_desc *d)
d                 283 drivers/dma/imx-dma.c 	struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan);
d                 285 drivers/dma/imx-dma.c 	struct scatterlist *sg = d->sg;
d                 288 drivers/dma/imx-dma.c 	now = min_t(size_t, d->len, sg_dma_len(sg));
d                 289 drivers/dma/imx-dma.c 	if (d->len != IMX_DMA_LENGTH_LOOP)
d                 290 drivers/dma/imx-dma.c 		d->len -= now;
d                 292 drivers/dma/imx-dma.c 	if (d->direction == DMA_DEV_TO_MEM)
d                 308 drivers/dma/imx-dma.c static void imxdma_enable_hw(struct imxdma_desc *d)
d                 310 drivers/dma/imx-dma.c 	struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan);
d                 326 drivers/dma/imx-dma.c 			d->sg && imxdma_hw_chain(imxdmac)) {
d                 327 drivers/dma/imx-dma.c 		d->sg = sg_next(d->sg);
d                 328 drivers/dma/imx-dma.c 		if (d->sg) {
d                 330 drivers/dma/imx-dma.c 			imxdma_sg_next(d);
d                 511 drivers/dma/imx-dma.c static int imxdma_xfer_desc(struct imxdma_desc *d)
d                 513 drivers/dma/imx-dma.c 	struct imxdma_channel *imxdmac = to_imxdma_chan(d->desc.chan);
d                 519 drivers/dma/imx-dma.c 	switch (d->type) {
d                 524 drivers/dma/imx-dma.c 			((imxdma->slots_2d[i].xsr != d->x) ||
d                 525 drivers/dma/imx-dma.c 			(imxdma->slots_2d[i].ysr != d->y) ||
d                 526 drivers/dma/imx-dma.c 			(imxdma->slots_2d[i].wsr != d->w)))
d                 534 drivers/dma/imx-dma.c 		imxdma->slots_2d[slot].xsr = d->x;
d                 535 drivers/dma/imx-dma.c 		imxdma->slots_2d[slot].ysr = d->y;
d                 536 drivers/dma/imx-dma.c 		imxdma->slots_2d[slot].wsr = d->w;
d                 543 drivers/dma/imx-dma.c 			d->config_mem &= ~CCR_MSEL_B;
d                 544 drivers/dma/imx-dma.c 			d->config_port &= ~CCR_MSEL_B;
d                 545 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->x, DMA_XSRA);
d                 546 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->y, DMA_YSRA);
d                 547 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->w, DMA_WSRA);
d                 549 drivers/dma/imx-dma.c 			d->config_mem |= CCR_MSEL_B;
d                 550 drivers/dma/imx-dma.c 			d->config_port |= CCR_MSEL_B;
d                 551 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->x, DMA_XSRB);
d                 552 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->y, DMA_YSRB);
d                 553 drivers/dma/imx-dma.c 			imx_dmav1_writel(imxdma, d->w, DMA_WSRB);
d                 561 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, d->src, DMA_SAR(imxdmac->channel));
d                 562 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, d->dest, DMA_DAR(imxdmac->channel));
d                 563 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, d->config_mem | (d->config_port << 2),
d                 566 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, d->len, DMA_CNTR(imxdmac->channel));
d                 571 drivers/dma/imx-dma.c 			(unsigned long long)d->dest,
d                 572 drivers/dma/imx-dma.c 			(unsigned long long)d->src, d->len);
d                 578 drivers/dma/imx-dma.c 		if (d->direction == DMA_DEV_TO_MEM) {
d                 587 drivers/dma/imx-dma.c 				d->sg, d->sgcount, d->len,
d                 589 drivers/dma/imx-dma.c 		} else if (d->direction == DMA_MEM_TO_DEV) {
d                 598 drivers/dma/imx-dma.c 				d->sg, d->sgcount, d->len,
d                 606 drivers/dma/imx-dma.c 		imxdma_sg_next(d);
d                 612 drivers/dma/imx-dma.c 	imxdma_enable_hw(d);
d                 229 drivers/dma/ioat/dma.h #define dump_desc_dbg(c, d) \
d                 230 drivers/dma/ioat/dma.h 	({ if (d) __dump_desc_dbg(c, d->hw, &d->txd, desc_id(d)); 0; })
d                1329 drivers/dma/ioat/init.c 	struct ioatdma_device *d = devm_kzalloc(dev, sizeof(*d), GFP_KERNEL);
d                1331 drivers/dma/ioat/init.c 	if (!d)
d                1333 drivers/dma/ioat/init.c 	d->pdev = pdev;
d                1334 drivers/dma/ioat/init.c 	d->reg_base = iobase;
d                1335 drivers/dma/ioat/init.c 	return d;
d                 164 drivers/dma/ipu/ipu_intern.h #define to_idmac(d) container_of(d, struct idmac, dma)
d                  93 drivers/dma/ipu/ipu_irq.c static void ipu_irq_unmask(struct irq_data *d)
d                  95 drivers/dma/ipu/ipu_irq.c 	struct ipu_irq_map *map = irq_data_get_irq_chip_data(d);
d                 105 drivers/dma/ipu/ipu_irq.c 		pr_err("IPU: %s(%u) - unmapped!\n", __func__, d->irq);
d                 116 drivers/dma/ipu/ipu_irq.c static void ipu_irq_mask(struct irq_data *d)
d                 118 drivers/dma/ipu/ipu_irq.c 	struct ipu_irq_map *map = irq_data_get_irq_chip_data(d);
d                 128 drivers/dma/ipu/ipu_irq.c 		pr_err("IPU: %s(%u) - unmapped!\n", __func__, d->irq);
d                 139 drivers/dma/ipu/ipu_irq.c static void ipu_irq_ack(struct irq_data *d)
d                 141 drivers/dma/ipu/ipu_irq.c 	struct ipu_irq_map *map = irq_data_get_irq_chip_data(d);
d                 150 drivers/dma/ipu/ipu_irq.c 		pr_err("IPU: %s(%u) - unmapped!\n", __func__, d->irq);
d                 149 drivers/dma/k3dma.c static void k3_dma_terminate_chan(struct k3_dma_phy *phy, struct k3_dma_dev *d)
d                 156 drivers/dma/k3dma.c 	writel_relaxed(val, d->base + INT_TC1_RAW);
d                 157 drivers/dma/k3dma.c 	writel_relaxed(val, d->base + INT_TC2_RAW);
d                 158 drivers/dma/k3dma.c 	writel_relaxed(val, d->base + INT_ERR1_RAW);
d                 159 drivers/dma/k3dma.c 	writel_relaxed(val, d->base + INT_ERR2_RAW);
d                 171 drivers/dma/k3dma.c static u32 k3_dma_get_curr_cnt(struct k3_dma_dev *d, struct k3_dma_phy *phy)
d                 175 drivers/dma/k3dma.c 	cnt = readl_relaxed(d->base + CX_CUR_CNT + phy->idx * 0x10);
d                 185 drivers/dma/k3dma.c static u32 k3_dma_get_chan_stat(struct k3_dma_dev *d)
d                 187 drivers/dma/k3dma.c 	return readl_relaxed(d->base + CH_STAT);
d                 190 drivers/dma/k3dma.c static void k3_dma_enable_dma(struct k3_dma_dev *d, bool on)
d                 194 drivers/dma/k3dma.c 		writel_relaxed(0x0, d->base + CH_PRI);
d                 197 drivers/dma/k3dma.c 		writel_relaxed(0xffff, d->base + INT_TC1_MASK);
d                 198 drivers/dma/k3dma.c 		writel_relaxed(0xffff, d->base + INT_TC2_MASK);
d                 199 drivers/dma/k3dma.c 		writel_relaxed(0xffff, d->base + INT_ERR1_MASK);
d                 200 drivers/dma/k3dma.c 		writel_relaxed(0xffff, d->base + INT_ERR2_MASK);
d                 203 drivers/dma/k3dma.c 		writel_relaxed(0x0, d->base + INT_TC1_MASK);
d                 204 drivers/dma/k3dma.c 		writel_relaxed(0x0, d->base + INT_TC2_MASK);
d                 205 drivers/dma/k3dma.c 		writel_relaxed(0x0, d->base + INT_ERR1_MASK);
d                 206 drivers/dma/k3dma.c 		writel_relaxed(0x0, d->base + INT_ERR2_MASK);
d                 212 drivers/dma/k3dma.c 	struct k3_dma_dev *d = (struct k3_dma_dev *)dev_id;
d                 215 drivers/dma/k3dma.c 	u32 stat = readl_relaxed(d->base + INT_STAT);
d                 216 drivers/dma/k3dma.c 	u32 tc1  = readl_relaxed(d->base + INT_TC1);
d                 217 drivers/dma/k3dma.c 	u32 tc2  = readl_relaxed(d->base + INT_TC2);
d                 218 drivers/dma/k3dma.c 	u32 err1 = readl_relaxed(d->base + INT_ERR1);
d                 219 drivers/dma/k3dma.c 	u32 err2 = readl_relaxed(d->base + INT_ERR2);
d                 228 drivers/dma/k3dma.c 			p = &d->phy[i];
d                 248 drivers/dma/k3dma.c 			dev_warn(d->slave.dev, "DMA ERR\n");
d                 251 drivers/dma/k3dma.c 	writel_relaxed(irq_chan, d->base + INT_TC1_RAW);
d                 252 drivers/dma/k3dma.c 	writel_relaxed(irq_chan, d->base + INT_TC2_RAW);
d                 253 drivers/dma/k3dma.c 	writel_relaxed(err1, d->base + INT_ERR1_RAW);
d                 254 drivers/dma/k3dma.c 	writel_relaxed(err2, d->base + INT_ERR2_RAW);
d                 257 drivers/dma/k3dma.c 		tasklet_schedule(&d->task);
d                 267 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(c->vc.chan.device);
d                 273 drivers/dma/k3dma.c 	if (BIT(c->phy->idx) & k3_dma_get_chan_stat(d))
d                 302 drivers/dma/k3dma.c 	struct k3_dma_dev *d = (struct k3_dma_dev *)arg;
d                 308 drivers/dma/k3dma.c 	list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) {
d                 314 drivers/dma/k3dma.c 				dev_dbg(d->slave.dev, "pchan %u: free\n", p->idx);
d                 324 drivers/dma/k3dma.c 	spin_lock_irq(&d->lock);
d                 325 drivers/dma/k3dma.c 	for (pch = 0; pch < d->dma_channels; pch++) {
d                 326 drivers/dma/k3dma.c 		if (!(d->dma_channel_mask & (1 << pch)))
d                 329 drivers/dma/k3dma.c 		p = &d->phy[pch];
d                 331 drivers/dma/k3dma.c 		if (p->vchan == NULL && !list_empty(&d->chan_pending)) {
d                 332 drivers/dma/k3dma.c 			c = list_first_entry(&d->chan_pending,
d                 340 drivers/dma/k3dma.c 			dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc);
d                 343 drivers/dma/k3dma.c 	spin_unlock_irq(&d->lock);
d                 345 drivers/dma/k3dma.c 	for (pch = 0; pch < d->dma_channels; pch++) {
d                 346 drivers/dma/k3dma.c 		if (!(d->dma_channel_mask & (1 << pch)))
d                 350 drivers/dma/k3dma.c 			p = &d->phy[pch];
d                 364 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 367 drivers/dma/k3dma.c 	spin_lock_irqsave(&d->lock, flags);
d                 369 drivers/dma/k3dma.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 379 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 407 drivers/dma/k3dma.c 		bytes = k3_dma_get_curr_cnt(d, p);
d                 426 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 432 drivers/dma/k3dma.c 		spin_lock(&d->lock);
d                 436 drivers/dma/k3dma.c 				list_add_tail(&c->node, &d->chan_pending);
d                 438 drivers/dma/k3dma.c 				tasklet_schedule(&d->task);
d                 439 drivers/dma/k3dma.c 				dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc);
d                 442 drivers/dma/k3dma.c 		spin_unlock(&d->lock);
d                 444 drivers/dma/k3dma.c 		dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc);
d                 467 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 480 drivers/dma/k3dma.c 	ds->desc_hw = dma_pool_zalloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli);
d                 715 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(vd->tx.chan->device);
d                 717 drivers/dma/k3dma.c 	dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli);
d                 724 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 729 drivers/dma/k3dma.c 	dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc);
d                 732 drivers/dma/k3dma.c 	spin_lock(&d->lock);
d                 734 drivers/dma/k3dma.c 	spin_unlock(&d->lock);
d                 741 drivers/dma/k3dma.c 		k3_dma_terminate_chan(p, d);
d                 766 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 769 drivers/dma/k3dma.c 	dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc);
d                 775 drivers/dma/k3dma.c 			spin_lock(&d->lock);
d                 777 drivers/dma/k3dma.c 			spin_unlock(&d->lock);
d                 787 drivers/dma/k3dma.c 	struct k3_dma_dev *d = to_k3_dma(chan->device);
d                 791 drivers/dma/k3dma.c 	dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc);
d                 798 drivers/dma/k3dma.c 			spin_lock(&d->lock);
d                 799 drivers/dma/k3dma.c 			list_add_tail(&c->node, &d->chan_pending);
d                 800 drivers/dma/k3dma.c 			spin_unlock(&d->lock);
d                 830 drivers/dma/k3dma.c 	struct k3_dma_dev *d = ofdma->of_dma_data;
d                 833 drivers/dma/k3dma.c 	if (request >= d->dma_requests)
d                 836 drivers/dma/k3dma.c 	return dma_get_slave_channel(&(d->chans[request].vc.chan));
d                 842 drivers/dma/k3dma.c 	struct k3_dma_dev *d;
d                 851 drivers/dma/k3dma.c 	d = devm_kzalloc(&op->dev, sizeof(*d), GFP_KERNEL);
d                 852 drivers/dma/k3dma.c 	if (!d)
d                 859 drivers/dma/k3dma.c 	d->base = devm_ioremap_resource(&op->dev, iores);
d                 860 drivers/dma/k3dma.c 	if (IS_ERR(d->base))
d                 861 drivers/dma/k3dma.c 		return PTR_ERR(d->base);
d                 866 drivers/dma/k3dma.c 				"dma-channels", &d->dma_channels);
d                 868 drivers/dma/k3dma.c 				"dma-requests", &d->dma_requests);
d                 870 drivers/dma/k3dma.c 				"dma-channel-mask", &d->dma_channel_mask);
d                 874 drivers/dma/k3dma.c 			d->dma_channel_mask = (u32)~0UL;
d                 879 drivers/dma/k3dma.c 		d->clk = devm_clk_get(&op->dev, NULL);
d                 880 drivers/dma/k3dma.c 		if (IS_ERR(d->clk)) {
d                 882 drivers/dma/k3dma.c 			return PTR_ERR(d->clk);
d                 888 drivers/dma/k3dma.c 			k3_dma_int_handler, 0, DRIVER_NAME, d);
d                 892 drivers/dma/k3dma.c 	d->irq = irq;
d                 895 drivers/dma/k3dma.c 	d->pool = dmam_pool_create(DRIVER_NAME, &op->dev,
d                 897 drivers/dma/k3dma.c 	if (!d->pool)
d                 901 drivers/dma/k3dma.c 	d->phy = devm_kcalloc(&op->dev,
d                 902 drivers/dma/k3dma.c 		d->dma_channels, sizeof(struct k3_dma_phy), GFP_KERNEL);
d                 903 drivers/dma/k3dma.c 	if (d->phy == NULL)
d                 906 drivers/dma/k3dma.c 	for (i = 0; i < d->dma_channels; i++) {
d                 909 drivers/dma/k3dma.c 		if (!(d->dma_channel_mask & BIT(i)))
d                 912 drivers/dma/k3dma.c 		p = &d->phy[i];
d                 914 drivers/dma/k3dma.c 		p->base = d->base + i * 0x40;
d                 917 drivers/dma/k3dma.c 	INIT_LIST_HEAD(&d->slave.channels);
d                 918 drivers/dma/k3dma.c 	dma_cap_set(DMA_SLAVE, d->slave.cap_mask);
d                 919 drivers/dma/k3dma.c 	dma_cap_set(DMA_MEMCPY, d->slave.cap_mask);
d                 920 drivers/dma/k3dma.c 	dma_cap_set(DMA_CYCLIC, d->slave.cap_mask);
d                 921 drivers/dma/k3dma.c 	d->slave.dev = &op->dev;
d                 922 drivers/dma/k3dma.c 	d->slave.device_free_chan_resources = k3_dma_free_chan_resources;
d                 923 drivers/dma/k3dma.c 	d->slave.device_tx_status = k3_dma_tx_status;
d                 924 drivers/dma/k3dma.c 	d->slave.device_prep_dma_memcpy = k3_dma_prep_memcpy;
d                 925 drivers/dma/k3dma.c 	d->slave.device_prep_slave_sg = k3_dma_prep_slave_sg;
d                 926 drivers/dma/k3dma.c 	d->slave.device_prep_dma_cyclic = k3_dma_prep_dma_cyclic;
d                 927 drivers/dma/k3dma.c 	d->slave.device_issue_pending = k3_dma_issue_pending;
d                 928 drivers/dma/k3dma.c 	d->slave.device_config = k3_dma_config;
d                 929 drivers/dma/k3dma.c 	d->slave.device_pause = k3_dma_transfer_pause;
d                 930 drivers/dma/k3dma.c 	d->slave.device_resume = k3_dma_transfer_resume;
d                 931 drivers/dma/k3dma.c 	d->slave.device_terminate_all = k3_dma_terminate_all;
d                 932 drivers/dma/k3dma.c 	d->slave.device_synchronize = k3_dma_synchronize;
d                 933 drivers/dma/k3dma.c 	d->slave.copy_align = DMAENGINE_ALIGN_8_BYTES;
d                 936 drivers/dma/k3dma.c 	d->chans = devm_kcalloc(&op->dev,
d                 937 drivers/dma/k3dma.c 		d->dma_requests, sizeof(struct k3_dma_chan), GFP_KERNEL);
d                 938 drivers/dma/k3dma.c 	if (d->chans == NULL)
d                 941 drivers/dma/k3dma.c 	for (i = 0; i < d->dma_requests; i++) {
d                 942 drivers/dma/k3dma.c 		struct k3_dma_chan *c = &d->chans[i];
d                 947 drivers/dma/k3dma.c 		vchan_init(&c->vc, &d->slave);
d                 951 drivers/dma/k3dma.c 	ret = clk_prepare_enable(d->clk);
d                 957 drivers/dma/k3dma.c 	k3_dma_enable_dma(d, true);
d                 959 drivers/dma/k3dma.c 	ret = dma_async_device_register(&d->slave);
d                 964 drivers/dma/k3dma.c 					k3_of_dma_simple_xlate, d);
d                 968 drivers/dma/k3dma.c 	spin_lock_init(&d->lock);
d                 969 drivers/dma/k3dma.c 	INIT_LIST_HEAD(&d->chan_pending);
d                 970 drivers/dma/k3dma.c 	tasklet_init(&d->task, k3_dma_tasklet, (unsigned long)d);
d                 971 drivers/dma/k3dma.c 	platform_set_drvdata(op, d);
d                 977 drivers/dma/k3dma.c 	dma_async_device_unregister(&d->slave);
d                 979 drivers/dma/k3dma.c 	clk_disable_unprepare(d->clk);
d                 986 drivers/dma/k3dma.c 	struct k3_dma_dev *d = platform_get_drvdata(op);
d                 988 drivers/dma/k3dma.c 	dma_async_device_unregister(&d->slave);
d                 991 drivers/dma/k3dma.c 	devm_free_irq(&op->dev, d->irq, d);
d                 993 drivers/dma/k3dma.c 	list_for_each_entry_safe(c, cn, &d->slave.channels, vc.chan.device_node) {
d                 997 drivers/dma/k3dma.c 	tasklet_kill(&d->task);
d                 998 drivers/dma/k3dma.c 	clk_disable_unprepare(d->clk);
d                1005 drivers/dma/k3dma.c 	struct k3_dma_dev *d = dev_get_drvdata(dev);
d                1008 drivers/dma/k3dma.c 	stat = k3_dma_get_chan_stat(d);
d                1010 drivers/dma/k3dma.c 		dev_warn(d->slave.dev,
d                1014 drivers/dma/k3dma.c 	k3_dma_enable_dma(d, false);
d                1015 drivers/dma/k3dma.c 	clk_disable_unprepare(d->clk);
d                1021 drivers/dma/k3dma.c 	struct k3_dma_dev *d = dev_get_drvdata(dev);
d                1024 drivers/dma/k3dma.c 	ret = clk_prepare_enable(d->clk);
d                1026 drivers/dma/k3dma.c 		dev_err(d->slave.dev, "clk_prepare_enable failed: %d\n", ret);
d                1029 drivers/dma/k3dma.c 	k3_dma_enable_dma(d, true);
d                 105 drivers/dma/mediatek/mtk-uart-apdma.c to_mtk_uart_apdma_dev(struct dma_device *d)
d                 107 drivers/dma/mediatek/mtk-uart-apdma.c 	return container_of(d, struct mtk_uart_apdmadev, ddev);
d                 144 drivers/dma/mediatek/mtk-uart-apdma.c 	struct mtk_uart_apdma_desc *d = c->desc;
d                 149 drivers/dma/mediatek/mtk-uart-apdma.c 		mtk_uart_apdma_write(c, VFF_ADDR, d->addr);
d                 187 drivers/dma/mediatek/mtk-uart-apdma.c 	struct mtk_uart_apdma_desc *d = c->desc;
d                 192 drivers/dma/mediatek/mtk-uart-apdma.c 		mtk_uart_apdma_write(c, VFF_ADDR, d->addr);
d                 210 drivers/dma/mediatek/mtk-uart-apdma.c 	struct mtk_uart_apdma_desc *d = c->desc;
d                 216 drivers/dma/mediatek/mtk-uart-apdma.c 	list_del(&d->vd.node);
d                 217 drivers/dma/mediatek/mtk-uart-apdma.c 	vchan_cookie_complete(&d->vd);
d                 222 drivers/dma/mediatek/mtk-uart-apdma.c 	struct mtk_uart_apdma_desc *d = c->desc;
d                 246 drivers/dma/mediatek/mtk-uart-apdma.c 	c->rx_status = d->avail_len - cnt;
d                 249 drivers/dma/mediatek/mtk-uart-apdma.c 	list_del(&d->vd.node);
d                 250 drivers/dma/mediatek/mtk-uart-apdma.c 	vchan_cookie_complete(&d->vd);
d                 345 drivers/dma/mediatek/mtk-uart-apdma.c 	struct mtk_uart_apdma_desc *d;
d                 351 drivers/dma/mediatek/mtk-uart-apdma.c 	d = kzalloc(sizeof(*d), GFP_ATOMIC);
d                 352 drivers/dma/mediatek/mtk-uart-apdma.c 	if (!d)
d                 355 drivers/dma/mediatek/mtk-uart-apdma.c 	d->avail_len = sg_dma_len(sgl);
d                 356 drivers/dma/mediatek/mtk-uart-apdma.c 	d->addr = sg_dma_address(sgl);
d                 359 drivers/dma/mediatek/mtk-uart-apdma.c 	return vchan_tx_prep(&c->vc, &d->vd, tx_flags);
d                1013 drivers/dma/mmp_pdma.c 	struct mmp_pdma_device *d = ofdma->of_dma_data;
d                1016 drivers/dma/mmp_pdma.c 	chan = dma_get_any_slave_channel(&d->device);
d                 275 drivers/dma/moxart-dma.c 	struct moxart_desc *d;
d                 312 drivers/dma/moxart-dma.c 	d = kzalloc(struct_size(d, sg, sg_len), GFP_ATOMIC);
d                 313 drivers/dma/moxart-dma.c 	if (!d)
d                 316 drivers/dma/moxart-dma.c 	d->dma_dir = dir;
d                 317 drivers/dma/moxart-dma.c 	d->dev_addr = dev_addr;
d                 318 drivers/dma/moxart-dma.c 	d->es = es;
d                 321 drivers/dma/moxart-dma.c 		d->sg[i].addr = sg_dma_address(sgent);
d                 322 drivers/dma/moxart-dma.c 		d->sg[i].len = sg_dma_len(sgent);
d                 325 drivers/dma/moxart-dma.c 	d->sglen = sg_len;
d                 329 drivers/dma/moxart-dma.c 	return vchan_tx_prep(&ch->vc, &d->vd, tx_flags);
d                 380 drivers/dma/moxart-dma.c 	struct moxart_desc *d = ch->desc;
d                 381 drivers/dma/moxart-dma.c 	unsigned int sglen_div = es_bytes[d->es];
d                 383 drivers/dma/moxart-dma.c 	d->dma_cycles = len >> sglen_div;
d                 389 drivers/dma/moxart-dma.c 	writel(d->dma_cycles, ch->base + REG_OFF_CYCLES);
d                 392 drivers/dma/moxart-dma.c 		__func__, d->dma_cycles, len);
d                 406 drivers/dma/moxart-dma.c 	struct moxart_desc *d = ch->desc;
d                 410 drivers/dma/moxart-dma.c 		moxart_dma_set_params(ch, sg->addr, d->dev_addr);
d                 412 drivers/dma/moxart-dma.c 		moxart_dma_set_params(ch, d->dev_addr, sg->addr);
d                 450 drivers/dma/moxart-dma.c static size_t moxart_dma_desc_size(struct moxart_desc *d,
d                 456 drivers/dma/moxart-dma.c 	for (size = i = completed_sgs; i < d->sglen; i++)
d                 457 drivers/dma/moxart-dma.c 		size += d->sg[i].len;
d                 483 drivers/dma/moxart-dma.c 	struct moxart_desc *d;
d                 495 drivers/dma/moxart-dma.c 		d = to_moxart_dma_desc(&vd->tx);
d                 496 drivers/dma/moxart-dma.c 		txstate->residue = moxart_dma_desc_size(d, 0);
d                  54 drivers/dma/mxs-dma.c #define HW_APBHX_CHn_NXTCMDAR(d, n) \
d                  55 drivers/dma/mxs-dma.c 	(((dma_is_apbh(d) && apbh_is_old(d)) ? 0x050 : 0x110) + (n) * 0x70)
d                  56 drivers/dma/mxs-dma.c #define HW_APBHX_CHn_SEMA(d, n) \
d                  57 drivers/dma/mxs-dma.c 	(((dma_is_apbh(d) && apbh_is_old(d)) ? 0x080 : 0x140) + (n) * 0x70)
d                  58 drivers/dma/mxs-dma.c #define HW_APBHX_CHn_BAR(d, n) \
d                  59 drivers/dma/mxs-dma.c 	(((dma_is_apbh(d) && apbh_is_old(d)) ? 0x070 : 0x130) + (n) * 0x70)
d                  60 drivers/dma/mxs-dma.c #define HW_APBX_CHn_DEBUG1(d, n) (0x150 + (n) * 0x70)
d                 284 drivers/dma/nbpfaxi.c #define nbpf_to_chan(d) container_of(d, struct nbpf_channel, dma_chan)
d                1295 drivers/dma/pxa_dma.c 	struct pxad_device *d = ofdma->of_dma_data;
d                1298 drivers/dma/pxa_dma.c 	chan = dma_get_any_slave_channel(&d->slave);
d                 251 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = p->dev;
d                 263 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "pchan %u: irq: DCSR:%02x\n", p->num, dcsr);
d                 266 drivers/dma/sa11x0-dma.c 		dev_err(d->slave.dev, "pchan %u: error. DCSR:%02x DDAR:%08x DBSA:%08x DBTA:%08x DBSB:%08x DBTB:%08x\n",
d                 328 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = (struct sa11x0_dma_dev *)arg;
d                 333 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "tasklet enter\n");
d                 335 drivers/dma/sa11x0-dma.c 	list_for_each_entry(c, &d->slave.channels, vc.chan.device_node) {
d                 342 drivers/dma/sa11x0-dma.c 				dev_dbg(d->slave.dev, "pchan %u: free\n", p->num);
d                 352 drivers/dma/sa11x0-dma.c 	spin_lock_irq(&d->lock);
d                 354 drivers/dma/sa11x0-dma.c 		p = &d->phy[pch];
d                 356 drivers/dma/sa11x0-dma.c 		if (p->vchan == NULL && !list_empty(&d->chan_pending)) {
d                 357 drivers/dma/sa11x0-dma.c 			c = list_first_entry(&d->chan_pending,
d                 366 drivers/dma/sa11x0-dma.c 			dev_dbg(d->slave.dev, "pchan %u: alloc vchan %p\n", pch, &c->vc);
d                 369 drivers/dma/sa11x0-dma.c 	spin_unlock_irq(&d->lock);
d                 373 drivers/dma/sa11x0-dma.c 			p = &d->phy[pch];
d                 384 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "tasklet exit\n");
d                 391 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 394 drivers/dma/sa11x0-dma.c 	spin_lock_irqsave(&d->lock, flags);
d                 396 drivers/dma/sa11x0-dma.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 421 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 462 drivers/dma/sa11x0-dma.c 			dev_vdbg(d->slave.dev, "tx_status: addr:%pad\n", &addr);
d                 465 drivers/dma/sa11x0-dma.c 				dev_vdbg(d->slave.dev, "tx_status: [%u] %x+%x\n",
d                 473 drivers/dma/sa11x0-dma.c 					dev_vdbg(d->slave.dev, "tx_status: [%u] +%x\n",
d                 481 drivers/dma/sa11x0-dma.c 				dev_vdbg(d->slave.dev, "tx_status: [%u] %x+%x ++\n",
d                 490 drivers/dma/sa11x0-dma.c 	dev_vdbg(d->slave.dev, "tx_status: bytes 0x%x\n", state->residue);
d                 503 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 509 drivers/dma/sa11x0-dma.c 			spin_lock(&d->lock);
d                 511 drivers/dma/sa11x0-dma.c 				list_add_tail(&c->node, &d->chan_pending);
d                 512 drivers/dma/sa11x0-dma.c 				tasklet_schedule(&d->task);
d                 513 drivers/dma/sa11x0-dma.c 				dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc);
d                 515 drivers/dma/sa11x0-dma.c 			spin_unlock(&d->lock);
d                 518 drivers/dma/sa11x0-dma.c 		dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc);
d                 703 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 707 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "vchan %p: pause\n", &c->vc);
d                 716 drivers/dma/sa11x0-dma.c 			spin_lock(&d->lock);
d                 718 drivers/dma/sa11x0-dma.c 			spin_unlock(&d->lock);
d                 729 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 733 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "vchan %p: resume\n", &c->vc);
d                 742 drivers/dma/sa11x0-dma.c 			spin_lock(&d->lock);
d                 743 drivers/dma/sa11x0-dma.c 			list_add_tail(&c->node, &d->chan_pending);
d                 744 drivers/dma/sa11x0-dma.c 			spin_unlock(&d->lock);
d                 755 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = to_sa11x0_dma(chan->device);
d                 760 drivers/dma/sa11x0-dma.c 	dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc);
d                 767 drivers/dma/sa11x0-dma.c 		dev_dbg(d->slave.dev, "pchan %u: terminating\n", p->num);
d                 784 drivers/dma/sa11x0-dma.c 		spin_lock(&d->lock);
d                 786 drivers/dma/sa11x0-dma.c 		spin_unlock(&d->lock);
d                 787 drivers/dma/sa11x0-dma.c 		tasklet_schedule(&d->task);
d                 903 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d;
d                 912 drivers/dma/sa11x0-dma.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 913 drivers/dma/sa11x0-dma.c 	if (!d) {
d                 918 drivers/dma/sa11x0-dma.c 	spin_lock_init(&d->lock);
d                 919 drivers/dma/sa11x0-dma.c 	INIT_LIST_HEAD(&d->chan_pending);
d                 921 drivers/dma/sa11x0-dma.c 	d->slave.filter.fn = sa11x0_dma_filter_fn;
d                 922 drivers/dma/sa11x0-dma.c 	d->slave.filter.mapcnt = ARRAY_SIZE(sa11x0_dma_map);
d                 923 drivers/dma/sa11x0-dma.c 	d->slave.filter.map = sa11x0_dma_map;
d                 925 drivers/dma/sa11x0-dma.c 	d->base = ioremap(res->start, resource_size(res));
d                 926 drivers/dma/sa11x0-dma.c 	if (!d->base) {
d                 931 drivers/dma/sa11x0-dma.c 	tasklet_init(&d->task, sa11x0_dma_tasklet, (unsigned long)d);
d                 934 drivers/dma/sa11x0-dma.c 		struct sa11x0_dma_phy *p = &d->phy[i];
d                 936 drivers/dma/sa11x0-dma.c 		p->dev = d;
d                 938 drivers/dma/sa11x0-dma.c 		p->base = d->base + i * DMA_SIZE;
d                 948 drivers/dma/sa11x0-dma.c 				sa11x0_dma_free_irq(pdev, i, &d->phy[i]);
d                 954 drivers/dma/sa11x0-dma.c 	dma_cap_set(DMA_SLAVE, d->slave.cap_mask);
d                 955 drivers/dma/sa11x0-dma.c 	dma_cap_set(DMA_CYCLIC, d->slave.cap_mask);
d                 956 drivers/dma/sa11x0-dma.c 	d->slave.device_prep_slave_sg = sa11x0_dma_prep_slave_sg;
d                 957 drivers/dma/sa11x0-dma.c 	d->slave.device_prep_dma_cyclic = sa11x0_dma_prep_dma_cyclic;
d                 958 drivers/dma/sa11x0-dma.c 	d->slave.directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
d                 959 drivers/dma/sa11x0-dma.c 	d->slave.residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
d                 960 drivers/dma/sa11x0-dma.c 	d->slave.src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
d                 962 drivers/dma/sa11x0-dma.c 	d->slave.dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
d                 964 drivers/dma/sa11x0-dma.c 	ret = sa11x0_dma_init_dmadev(&d->slave, &pdev->dev);
d                 966 drivers/dma/sa11x0-dma.c 		dev_warn(d->slave.dev, "failed to register slave async device: %d\n",
d                 971 drivers/dma/sa11x0-dma.c 	platform_set_drvdata(pdev, d);
d                 975 drivers/dma/sa11x0-dma.c 	sa11x0_dma_free_channels(&d->slave);
d                 977 drivers/dma/sa11x0-dma.c 		sa11x0_dma_free_irq(pdev, i, &d->phy[i]);
d                 979 drivers/dma/sa11x0-dma.c 	tasklet_kill(&d->task);
d                 980 drivers/dma/sa11x0-dma.c 	iounmap(d->base);
d                 982 drivers/dma/sa11x0-dma.c 	kfree(d);
d                 989 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = platform_get_drvdata(pdev);
d                 992 drivers/dma/sa11x0-dma.c 	dma_async_device_unregister(&d->slave);
d                 994 drivers/dma/sa11x0-dma.c 	sa11x0_dma_free_channels(&d->slave);
d                 996 drivers/dma/sa11x0-dma.c 		sa11x0_dma_free_irq(pdev, pch, &d->phy[pch]);
d                 997 drivers/dma/sa11x0-dma.c 	tasklet_kill(&d->task);
d                 998 drivers/dma/sa11x0-dma.c 	iounmap(d->base);
d                 999 drivers/dma/sa11x0-dma.c 	kfree(d);
d                1006 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = dev_get_drvdata(dev);
d                1010 drivers/dma/sa11x0-dma.c 		struct sa11x0_dma_phy *p = &d->phy[pch];
d                1044 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_dev *d = dev_get_drvdata(dev);
d                1048 drivers/dma/sa11x0-dma.c 		struct sa11x0_dma_phy *p = &d->phy[pch];
d                  94 drivers/dma/sh/rcar-dmac.c #define to_rcar_dmac_desc(d)	container_of(d, struct rcar_dmac_desc, async_tx)
d                 211 drivers/dma/sh/rcar-dmac.c #define to_rcar_dmac(d)		container_of(d, struct rcar_dmac, engine)
d                  37 drivers/dma/sh/shdma-base.c #define to_shdma_dev(d) container_of(d, struct shdma_dev, dma_dev)
d                 106 drivers/dma/sh/usb-dmac.c #define to_usb_dmac(d)		container_of(d, struct usb_dmac, engine)
d                 771 drivers/dma/ste_dma40.c 		struct d40_desc *d;
d                 774 drivers/dma/ste_dma40.c 		list_for_each_entry_safe(d, _d, &d40c->client, node) {
d                 775 drivers/dma/ste_dma40.c 			if (async_tx_test_ack(&d->txd)) {
d                 776 drivers/dma/ste_dma40.c 				d40_desc_remove(d);
d                 777 drivers/dma/ste_dma40.c 				desc = d;
d                 263 drivers/dma/sun4i-dma.c 			    struct sun4i_dma_promise *d)
d                 270 drivers/dma/sun4i-dma.c 		writel_relaxed(d->src, pchan->base + SUN4I_DDMA_SRC_ADDR_REG);
d                 271 drivers/dma/sun4i-dma.c 		writel_relaxed(d->dst, pchan->base + SUN4I_DDMA_DST_ADDR_REG);
d                 272 drivers/dma/sun4i-dma.c 		writel_relaxed(d->len, pchan->base + SUN4I_DDMA_BYTE_COUNT_REG);
d                 273 drivers/dma/sun4i-dma.c 		writel_relaxed(d->para, pchan->base + SUN4I_DDMA_PARA_REG);
d                 274 drivers/dma/sun4i-dma.c 		writel_relaxed(d->cfg, pchan->base + SUN4I_DDMA_CFG_REG);
d                 276 drivers/dma/sun4i-dma.c 		writel_relaxed(d->src, pchan->base + SUN4I_NDMA_SRC_ADDR_REG);
d                 277 drivers/dma/sun4i-dma.c 		writel_relaxed(d->dst, pchan->base + SUN4I_NDMA_DST_ADDR_REG);
d                 278 drivers/dma/sun4i-dma.c 		writel_relaxed(d->len, pchan->base + SUN4I_NDMA_BYTE_COUNT_REG);
d                 279 drivers/dma/sun4i-dma.c 		writel_relaxed(d->cfg, pchan->base + SUN4I_NDMA_CFG_REG);
d                 210 drivers/dma/sun6i-dma.c static inline struct sun6i_dma_dev *to_sun6i_dma_dev(struct dma_device *d)
d                 212 drivers/dma/sun6i-dma.c 	return container_of(d, struct sun6i_dma_dev, slave);
d                 591 drivers/dma/ti/cppi41.c 	struct cppi41_desc *d;
d                 606 drivers/dma/ti/cppi41.c 	d = c->desc;
d                 615 drivers/dma/ti/cppi41.c 		d->pd0 = get_host_pd0(len);
d                 616 drivers/dma/ti/cppi41.c 		d->pd1 = get_host_pd1(c);
d                 617 drivers/dma/ti/cppi41.c 		d->pd2 = get_host_pd2(c);
d                 618 drivers/dma/ti/cppi41.c 		d->pd3 = get_host_pd3(len);
d                 619 drivers/dma/ti/cppi41.c 		d->pd4 = get_host_pd4_or_7(addr);
d                 620 drivers/dma/ti/cppi41.c 		d->pd5 = get_host_pd5();
d                 621 drivers/dma/ti/cppi41.c 		d->pd6 = get_host_pd6(len);
d                 622 drivers/dma/ti/cppi41.c 		d->pd7 = get_host_pd4_or_7(addr);
d                 624 drivers/dma/ti/cppi41.c 		d++;
d                 636 drivers/dma/ti/cppi41.c static void cppi41_compute_td_desc(struct cppi41_desc *d)
d                 638 drivers/dma/ti/cppi41.c 	d->pd0 = DESC_TYPE_TEARD << DESC_TYPE;
d                 741 drivers/dma/ti/edma.c static inline struct edma_cc *to_edma_cc(struct dma_device *d)
d                 743 drivers/dma/ti/edma.c 	return container_of(d, struct edma_cc, dma_slave);
d                 211 drivers/dma/ti/omap-dma.c static inline struct omap_dmadev *to_omap_dma_dev(struct dma_device *d)
d                 213 drivers/dma/ti/omap-dma.c 	return container_of(d, struct omap_dmadev, ddev);
d                 228 drivers/dma/ti/omap-dma.c 	struct omap_desc *d = to_omap_dma_desc(&vd->tx);
d                 230 drivers/dma/ti/omap-dma.c 	if (d->using_ll) {
d                 234 drivers/dma/ti/omap-dma.c 		for (i = 0; i < d->sglen; i++) {
d                 235 drivers/dma/ti/omap-dma.c 			if (d->sg[i].t2_desc)
d                 236 drivers/dma/ti/omap-dma.c 				dma_pool_free(od->desc_pool, d->sg[i].t2_desc,
d                 237 drivers/dma/ti/omap-dma.c 					      d->sg[i].t2_desc_paddr);
d                 241 drivers/dma/ti/omap-dma.c 	kfree(d);
d                 244 drivers/dma/ti/omap-dma.c static void omap_dma_fill_type2_desc(struct omap_desc *d, int idx,
d                 247 drivers/dma/ti/omap-dma.c 	struct omap_sg *sg = &d->sg[idx];
d                 251 drivers/dma/ti/omap-dma.c 		d->sg[idx - 1].t2_desc->next_desc = sg->t2_desc_paddr;
d                 258 drivers/dma/ti/omap-dma.c 	t2_desc->cicr = d->cicr;
d                 265 drivers/dma/ti/omap-dma.c 		t2_desc->csei = d->ei;
d                 267 drivers/dma/ti/omap-dma.c 		t2_desc->csfi = d->fi;
d                 273 drivers/dma/ti/omap-dma.c 		t2_desc->cdei = d->ei;
d                 275 drivers/dma/ti/omap-dma.c 		t2_desc->cdfi = d->fi;
d                 387 drivers/dma/ti/omap-dma.c static void omap_dma_start(struct omap_chan *c, struct omap_desc *d)
d                 390 drivers/dma/ti/omap-dma.c 	uint16_t cicr = d->cicr;
d                 399 drivers/dma/ti/omap-dma.c 	if (d->using_ll) {
d                 402 drivers/dma/ti/omap-dma.c 		if (d->dir == DMA_DEV_TO_MEM)
d                 408 drivers/dma/ti/omap-dma.c 		omap_dma_chan_write(c, CNDP, d->sg[0].t2_desc_paddr);
d                 422 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CCR, d->ccr | CCR_ENABLE);
d                 504 drivers/dma/ti/omap-dma.c static void omap_dma_start_sg(struct omap_chan *c, struct omap_desc *d)
d                 506 drivers/dma/ti/omap-dma.c 	struct omap_sg *sg = d->sg + c->sgidx;
d                 509 drivers/dma/ti/omap-dma.c 	if (d->dir == DMA_DEV_TO_MEM || d->dir == DMA_MEM_TO_MEM) {
d                 525 drivers/dma/ti/omap-dma.c 	omap_dma_start(c, d);
d                 532 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                 542 drivers/dma/ti/omap-dma.c 	c->desc = d = to_omap_dma_desc(&vd->tx);
d                 552 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CCR, d->ccr);
d                 554 drivers/dma/ti/omap-dma.c 		omap_dma_chan_write(c, CCR2, d->ccr >> 16);
d                 556 drivers/dma/ti/omap-dma.c 	if (d->dir == DMA_DEV_TO_MEM || d->dir == DMA_MEM_TO_MEM) {
d                 566 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxsa, d->dev_addr);
d                 567 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxei, d->ei);
d                 568 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxfi, d->fi);
d                 569 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CSDP, d->csdp);
d                 570 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CLNK_CTRL, d->clnk_ctrl);
d                 572 drivers/dma/ti/omap-dma.c 	omap_dma_start_sg(c, d);
d                 578 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                 582 drivers/dma/ti/omap-dma.c 	d = c->desc;
d                 583 drivers/dma/ti/omap-dma.c 	if (d) {
d                 585 drivers/dma/ti/omap-dma.c 			vchan_cyclic_callback(&d->vd);
d                 586 drivers/dma/ti/omap-dma.c 		} else if (d->using_ll || c->sgidx == d->sglen) {
d                 588 drivers/dma/ti/omap-dma.c 			vchan_cookie_complete(&d->vd);
d                 590 drivers/dma/ti/omap-dma.c 			omap_dma_start_sg(c, d);
d                 717 drivers/dma/ti/omap-dma.c static size_t omap_dma_desc_size(struct omap_desc *d)
d                 722 drivers/dma/ti/omap-dma.c 	for (size = i = 0; i < d->sglen; i++)
d                 723 drivers/dma/ti/omap-dma.c 		size += omap_dma_sg_size(&d->sg[i]);
d                 725 drivers/dma/ti/omap-dma.c 	return size * es_bytes[d->es];
d                 728 drivers/dma/ti/omap-dma.c static size_t omap_dma_desc_size_pos(struct omap_desc *d, dma_addr_t addr)
d                 731 drivers/dma/ti/omap-dma.c 	size_t size, es_size = es_bytes[d->es];
d                 733 drivers/dma/ti/omap-dma.c 	for (size = i = 0; i < d->sglen; i++) {
d                 734 drivers/dma/ti/omap-dma.c 		size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size;
d                 738 drivers/dma/ti/omap-dma.c 		else if (addr >= d->sg[i].addr &&
d                 739 drivers/dma/ti/omap-dma.c 			 addr < d->sg[i].addr + this_size)
d                 740 drivers/dma/ti/omap-dma.c 			size += d->sg[i].addr + this_size - addr;
d                 819 drivers/dma/ti/omap-dma.c 	struct omap_desc *d = NULL;
d                 827 drivers/dma/ti/omap-dma.c 		d = c->desc;
d                 832 drivers/dma/ti/omap-dma.c 	if (d) {
d                 835 drivers/dma/ti/omap-dma.c 		if (d->dir == DMA_MEM_TO_DEV)
d                 837 drivers/dma/ti/omap-dma.c 		else if (d->dir == DMA_DEV_TO_MEM  || d->dir == DMA_MEM_TO_MEM)
d                 842 drivers/dma/ti/omap-dma.c 		txstate->residue = omap_dma_desc_size_pos(d, pos);
d                 856 drivers/dma/ti/omap-dma.c 	} else if (d && d->polled && c->running) {
d                 865 drivers/dma/ti/omap-dma.c 			vchan_cookie_complete(&d->vd);
d                 893 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                 931 drivers/dma/ti/omap-dma.c 	d = kzalloc(struct_size(d, sg, sglen), GFP_ATOMIC);
d                 932 drivers/dma/ti/omap-dma.c 	if (!d)
d                 935 drivers/dma/ti/omap-dma.c 	d->dir = dir;
d                 936 drivers/dma/ti/omap-dma.c 	d->dev_addr = dev_addr;
d                 937 drivers/dma/ti/omap-dma.c 	d->es = es;
d                 944 drivers/dma/ti/omap-dma.c 		d->ei = 1;
d                 951 drivers/dma/ti/omap-dma.c 		d->fi = -(port_window_bytes - 1);
d                 954 drivers/dma/ti/omap-dma.c 	d->ccr = c->ccr | CCR_SYNC_FRAME;
d                 956 drivers/dma/ti/omap-dma.c 		d->csdp = CSDP_DST_BURST_64 | CSDP_DST_PACKED;
d                 958 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_DST_AMODE_POSTINC;
d                 960 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_SRC_AMODE_DBLIDX;
d                 963 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_SRC_BURST_64;
d                 965 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_SRC_BURST_32;
d                 967 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_SRC_BURST_16;
d                 970 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_SRC_AMODE_CONSTANT;
d                 973 drivers/dma/ti/omap-dma.c 		d->csdp = CSDP_SRC_BURST_64 | CSDP_SRC_PACKED;
d                 975 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_SRC_AMODE_POSTINC;
d                 977 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_DST_AMODE_DBLIDX;
d                 980 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_DST_BURST_64;
d                 982 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_DST_BURST_32;
d                 984 drivers/dma/ti/omap-dma.c 				d->csdp |= CSDP_DST_BURST_16;
d                 986 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_DST_AMODE_CONSTANT;
d                 990 drivers/dma/ti/omap-dma.c 	d->cicr = CICR_DROP_IE | CICR_BLOCK_IE;
d                 991 drivers/dma/ti/omap-dma.c 	d->csdp |= es;
d                 994 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_TOUT_IE;
d                 997 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_TIPB;
d                 999 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_DST_PORT_TIPB | CSDP_SRC_PORT_EMIFF;
d                1002 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_TRIGGER_SRC;
d                1004 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE;
d                1007 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_WRITE_LAST_NON_POSTED;
d                1010 drivers/dma/ti/omap-dma.c 		d->clnk_ctrl = c->dma_ch;
d                1025 drivers/dma/ti/omap-dma.c 		d->using_ll = od->ll123_supported;
d                1028 drivers/dma/ti/omap-dma.c 		struct omap_sg *osg = &d->sg[i];
d                1034 drivers/dma/ti/omap-dma.c 		if (d->using_ll) {
d                1041 drivers/dma/ti/omap-dma.c 				d->using_ll = false;
d                1045 drivers/dma/ti/omap-dma.c 			omap_dma_fill_type2_desc(d, i, dir, (i == sglen - 1));
d                1049 drivers/dma/ti/omap-dma.c 	d->sglen = sglen;
d                1053 drivers/dma/ti/omap-dma.c 		for (i = 0; i < d->sglen; i++) {
d                1054 drivers/dma/ti/omap-dma.c 			struct omap_sg *osg = &d->sg[i];
d                1064 drivers/dma/ti/omap-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, tx_flags);
d                1074 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                1108 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
d                1109 drivers/dma/ti/omap-dma.c 	if (!d)
d                1112 drivers/dma/ti/omap-dma.c 	d->dir = dir;
d                1113 drivers/dma/ti/omap-dma.c 	d->dev_addr = dev_addr;
d                1114 drivers/dma/ti/omap-dma.c 	d->fi = burst;
d                1115 drivers/dma/ti/omap-dma.c 	d->es = es;
d                1116 drivers/dma/ti/omap-dma.c 	d->sg[0].addr = buf_addr;
d                1117 drivers/dma/ti/omap-dma.c 	d->sg[0].en = period_len / es_bytes[es];
d                1118 drivers/dma/ti/omap-dma.c 	d->sg[0].fn = buf_len / period_len;
d                1119 drivers/dma/ti/omap-dma.c 	d->sglen = 1;
d                1121 drivers/dma/ti/omap-dma.c 	d->ccr = c->ccr;
d                1123 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_CONSTANT;
d                1125 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_DST_AMODE_CONSTANT | CCR_SRC_AMODE_POSTINC;
d                1127 drivers/dma/ti/omap-dma.c 	d->cicr = CICR_DROP_IE;
d                1129 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_FRAME_IE;
d                1131 drivers/dma/ti/omap-dma.c 	d->csdp = es;
d                1134 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_TOUT_IE;
d                1137 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_MPUI;
d                1139 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_DST_PORT_MPUI | CSDP_SRC_PORT_EMIFF;
d                1142 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_SYNC_PACKET;
d                1144 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_SYNC_ELEMENT;
d                1147 drivers/dma/ti/omap-dma.c 			d->ccr |= CCR_TRIGGER_SRC;
d                1148 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_DST_PACKED;
d                1150 drivers/dma/ti/omap-dma.c 			d->csdp |= CSDP_SRC_PACKED;
d                1153 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE;
d                1155 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64;
d                1159 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_AUTO_INIT | CCR_REPEAT;
d                1161 drivers/dma/ti/omap-dma.c 		d->clnk_ctrl = c->dma_ch | CLNK_CTRL_ENABLE_LNK;
d                1165 drivers/dma/ti/omap-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, flags);
d                1173 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                1176 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
d                1177 drivers/dma/ti/omap-dma.c 	if (!d)
d                1184 drivers/dma/ti/omap-dma.c 	d->dir = DMA_MEM_TO_MEM;
d                1185 drivers/dma/ti/omap-dma.c 	d->dev_addr = src;
d                1186 drivers/dma/ti/omap-dma.c 	d->fi = 0;
d                1187 drivers/dma/ti/omap-dma.c 	d->es = data_type;
d                1188 drivers/dma/ti/omap-dma.c 	d->sg[0].en = len / BIT(data_type);
d                1189 drivers/dma/ti/omap-dma.c 	d->sg[0].fn = 1;
d                1190 drivers/dma/ti/omap-dma.c 	d->sg[0].addr = dest;
d                1191 drivers/dma/ti/omap-dma.c 	d->sglen = 1;
d                1192 drivers/dma/ti/omap-dma.c 	d->ccr = c->ccr;
d                1193 drivers/dma/ti/omap-dma.c 	d->ccr |= CCR_DST_AMODE_POSTINC | CCR_SRC_AMODE_POSTINC;
d                1196 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_FRAME_IE;
d                1198 drivers/dma/ti/omap-dma.c 		d->polled = true;
d                1200 drivers/dma/ti/omap-dma.c 	d->csdp = data_type;
d                1203 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_TOUT_IE;
d                1204 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_EMIFF;
d                1206 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_PACKED | CSDP_SRC_PACKED;
d                1207 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE;
d                1208 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64;
d                1211 drivers/dma/ti/omap-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, tx_flags);
d                1219 drivers/dma/ti/omap-dma.c 	struct omap_desc *d;
d                1231 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
d                1232 drivers/dma/ti/omap-dma.c 	if (!d)
d                1239 drivers/dma/ti/omap-dma.c 	sg = &d->sg[0];
d                1240 drivers/dma/ti/omap-dma.c 	d->dir = DMA_MEM_TO_MEM;
d                1241 drivers/dma/ti/omap-dma.c 	d->dev_addr = xt->src_start;
d                1242 drivers/dma/ti/omap-dma.c 	d->es = data_type;
d                1246 drivers/dma/ti/omap-dma.c 	d->sglen = 1;
d                1247 drivers/dma/ti/omap-dma.c 	d->ccr = c->ccr;
d                1252 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_SRC_AMODE_DBLIDX;
d                1253 drivers/dma/ti/omap-dma.c 		d->ei = 1;
d                1254 drivers/dma/ti/omap-dma.c 		d->fi = src_icg + 1;
d                1256 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_SRC_AMODE_POSTINC;
d                1257 drivers/dma/ti/omap-dma.c 		d->fi = 0;
d                1262 drivers/dma/ti/omap-dma.c 		kfree(d);
d                1267 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_DST_AMODE_DBLIDX;
d                1271 drivers/dma/ti/omap-dma.c 		d->ccr |= CCR_DST_AMODE_POSTINC;
d                1277 drivers/dma/ti/omap-dma.c 		kfree(d);
d                1281 drivers/dma/ti/omap-dma.c 	d->cicr = CICR_DROP_IE | CICR_FRAME_IE;
d                1283 drivers/dma/ti/omap-dma.c 	d->csdp = data_type;
d                1286 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_TOUT_IE;
d                1287 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_PORT_EMIFF | CSDP_SRC_PORT_EMIFF;
d                1289 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_PACKED | CSDP_SRC_PACKED;
d                1290 drivers/dma/ti/omap-dma.c 		d->cicr |= CICR_MISALIGNED_ERR_IE | CICR_TRANS_ERR_IE;
d                1291 drivers/dma/ti/omap-dma.c 		d->csdp |= CSDP_DST_BURST_64 | CSDP_SRC_BURST_64;
d                1294 drivers/dma/ti/omap-dma.c 	return vchan_tx_prep(&c->vc, &d->vd, flags);
d                 483 drivers/dma/txx9dmac.c 		struct txx9dmac_hwdesc32 *d = (struct txx9dmac_hwdesc32 *)desc;
d                 487 drivers/dma/txx9dmac.c 			 d->CHAR, d->SAR, d->DAR, d->CNTR);
d                 492 drivers/dma/txx9dmac.c 			 d->CHAR, d->SAR, d->DAR, d->CNTR,
d                 493 drivers/dma/txx9dmac.c 			 d->SAIR, d->DAIR, d->CCR, d->CSR);
d                 138 drivers/dma/zx_dma.c static void zx_dma_terminate_chan(struct zx_dma_phy *phy, struct zx_dma_dev *d)
d                 148 drivers/dma/zx_dma.c 	writel_relaxed(val, d->base + REG_ZX_TC_IRQ_RAW);
d                 149 drivers/dma/zx_dma.c 	writel_relaxed(val, d->base + REG_ZX_SRC_ERR_IRQ_RAW);
d                 150 drivers/dma/zx_dma.c 	writel_relaxed(val, d->base + REG_ZX_DST_ERR_IRQ_RAW);
d                 151 drivers/dma/zx_dma.c 	writel_relaxed(val, d->base + REG_ZX_CFG_ERR_IRQ_RAW);
d                 171 drivers/dma/zx_dma.c static u32 zx_dma_get_chan_stat(struct zx_dma_dev *d)
d                 173 drivers/dma/zx_dma.c 	return readl_relaxed(d->base + REG_ZX_STATUS);
d                 176 drivers/dma/zx_dma.c static void zx_dma_init_state(struct zx_dma_dev *d)
d                 179 drivers/dma/zx_dma.c 	writel_relaxed(0x0, d->base + REG_ZX_DMA_ARB);
d                 181 drivers/dma/zx_dma.c 	writel_relaxed(0xffffffff, d->base + REG_ZX_TC_IRQ_RAW);
d                 182 drivers/dma/zx_dma.c 	writel_relaxed(0xffffffff, d->base + REG_ZX_SRC_ERR_IRQ_RAW);
d                 183 drivers/dma/zx_dma.c 	writel_relaxed(0xffffffff, d->base + REG_ZX_DST_ERR_IRQ_RAW);
d                 184 drivers/dma/zx_dma.c 	writel_relaxed(0xffffffff, d->base + REG_ZX_CFG_ERR_IRQ_RAW);
d                 189 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(c->vc.chan.device);
d                 195 drivers/dma/zx_dma.c 	if (BIT(c->phy->idx) & zx_dma_get_chan_stat(d))
d                 217 drivers/dma/zx_dma.c static void zx_dma_task(struct zx_dma_dev *d)
d                 225 drivers/dma/zx_dma.c 	list_for_each_entry_safe(c, cn, &d->slave.channels,
d                 231 drivers/dma/zx_dma.c 			dev_dbg(d->slave.dev, "pchan %u: free\n", p->idx);
d                 240 drivers/dma/zx_dma.c 	spin_lock_irqsave(&d->lock, flags);
d                 241 drivers/dma/zx_dma.c 	while (!list_empty(&d->chan_pending)) {
d                 242 drivers/dma/zx_dma.c 		c = list_first_entry(&d->chan_pending,
d                 244 drivers/dma/zx_dma.c 		p = &d->phy[c->id];
d                 253 drivers/dma/zx_dma.c 			dev_dbg(d->slave.dev, "pchan %u: busy!\n", c->id);
d                 256 drivers/dma/zx_dma.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 258 drivers/dma/zx_dma.c 	for (pch = 0; pch < d->dma_channels; pch++) {
d                 260 drivers/dma/zx_dma.c 			p = &d->phy[pch];
d                 273 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = (struct zx_dma_dev *)dev_id;
d                 276 drivers/dma/zx_dma.c 	u32 tc = readl_relaxed(d->base + REG_ZX_TC_IRQ);
d                 277 drivers/dma/zx_dma.c 	u32 serr = readl_relaxed(d->base + REG_ZX_SRC_ERR_IRQ);
d                 278 drivers/dma/zx_dma.c 	u32 derr = readl_relaxed(d->base + REG_ZX_DST_ERR_IRQ);
d                 279 drivers/dma/zx_dma.c 	u32 cfg = readl_relaxed(d->base + REG_ZX_CFG_ERR_IRQ);
d                 285 drivers/dma/zx_dma.c 		p = &d->phy[i];
d                 304 drivers/dma/zx_dma.c 		dev_warn(d->slave.dev, "DMA ERR src 0x%x, dst 0x%x, cfg 0x%x\n",
d                 307 drivers/dma/zx_dma.c 	writel_relaxed(irq_chan, d->base + REG_ZX_TC_IRQ_RAW);
d                 308 drivers/dma/zx_dma.c 	writel_relaxed(serr, d->base + REG_ZX_SRC_ERR_IRQ_RAW);
d                 309 drivers/dma/zx_dma.c 	writel_relaxed(derr, d->base + REG_ZX_DST_ERR_IRQ_RAW);
d                 310 drivers/dma/zx_dma.c 	writel_relaxed(cfg, d->base + REG_ZX_CFG_ERR_IRQ_RAW);
d                 313 drivers/dma/zx_dma.c 		zx_dma_task(d);
d                 320 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(chan->device);
d                 323 drivers/dma/zx_dma.c 	spin_lock_irqsave(&d->lock, flags);
d                 325 drivers/dma/zx_dma.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 382 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(chan->device);
d                 389 drivers/dma/zx_dma.c 		spin_lock(&d->lock);
d                 392 drivers/dma/zx_dma.c 			list_add_tail(&c->node, &d->chan_pending);
d                 394 drivers/dma/zx_dma.c 			dev_dbg(d->slave.dev, "vchan %p: issued\n", &c->vc);
d                 396 drivers/dma/zx_dma.c 		spin_unlock(&d->lock);
d                 398 drivers/dma/zx_dma.c 		dev_dbg(d->slave.dev, "vchan %p: nothing to issue\n", &c->vc);
d                 403 drivers/dma/zx_dma.c 		zx_dma_task(d);
d                 423 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(chan->device);
d                 436 drivers/dma/zx_dma.c 	ds->desc_hw = dma_pool_zalloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli);
d                 662 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(chan->device);
d                 667 drivers/dma/zx_dma.c 	dev_dbg(d->slave.dev, "vchan %p: terminate all\n", &c->vc);
d                 670 drivers/dma/zx_dma.c 	spin_lock(&d->lock);
d                 672 drivers/dma/zx_dma.c 	spin_unlock(&d->lock);
d                 679 drivers/dma/zx_dma.c 		zx_dma_terminate_chan(p, d);
d                 719 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = to_zx_dma(vd->tx.chan->device);
d                 721 drivers/dma/zx_dma.c 	dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli);
d                 734 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = ofdma->of_dma_data;
d                 739 drivers/dma/zx_dma.c 	if (request >= d->dma_requests)
d                 742 drivers/dma/zx_dma.c 	chan = dma_get_any_slave_channel(&d->slave);
d                 744 drivers/dma/zx_dma.c 		dev_err(d->slave.dev, "get channel fail in %s.\n", __func__);
d                 749 drivers/dma/zx_dma.c 	dev_info(d->slave.dev, "zx_dma: pchan %u: alloc vchan %p\n",
d                 756 drivers/dma/zx_dma.c 	struct zx_dma_dev *d;
d                 764 drivers/dma/zx_dma.c 	d = devm_kzalloc(&op->dev, sizeof(*d), GFP_KERNEL);
d                 765 drivers/dma/zx_dma.c 	if (!d)
d                 768 drivers/dma/zx_dma.c 	d->base = devm_ioremap_resource(&op->dev, iores);
d                 769 drivers/dma/zx_dma.c 	if (IS_ERR(d->base))
d                 770 drivers/dma/zx_dma.c 		return PTR_ERR(d->base);
d                 773 drivers/dma/zx_dma.c 			     "dma-channels", &d->dma_channels);
d                 775 drivers/dma/zx_dma.c 			     "dma-requests", &d->dma_requests);
d                 776 drivers/dma/zx_dma.c 	if (!d->dma_requests || !d->dma_channels)
d                 779 drivers/dma/zx_dma.c 	d->clk = devm_clk_get(&op->dev, NULL);
d                 780 drivers/dma/zx_dma.c 	if (IS_ERR(d->clk)) {
d                 782 drivers/dma/zx_dma.c 		return PTR_ERR(d->clk);
d                 785 drivers/dma/zx_dma.c 	d->irq = platform_get_irq(op, 0);
d                 786 drivers/dma/zx_dma.c 	ret = devm_request_irq(&op->dev, d->irq, zx_dma_int_handler,
d                 787 drivers/dma/zx_dma.c 			       0, DRIVER_NAME, d);
d                 792 drivers/dma/zx_dma.c 	d->pool = dmam_pool_create(DRIVER_NAME, &op->dev,
d                 794 drivers/dma/zx_dma.c 	if (!d->pool)
d                 798 drivers/dma/zx_dma.c 	d->phy = devm_kcalloc(&op->dev,
d                 799 drivers/dma/zx_dma.c 		d->dma_channels, sizeof(struct zx_dma_phy), GFP_KERNEL);
d                 800 drivers/dma/zx_dma.c 	if (!d->phy)
d                 803 drivers/dma/zx_dma.c 	for (i = 0; i < d->dma_channels; i++) {
d                 804 drivers/dma/zx_dma.c 		struct zx_dma_phy *p = &d->phy[i];
d                 807 drivers/dma/zx_dma.c 		p->base = d->base + i * 0x40;
d                 810 drivers/dma/zx_dma.c 	INIT_LIST_HEAD(&d->slave.channels);
d                 811 drivers/dma/zx_dma.c 	dma_cap_set(DMA_SLAVE, d->slave.cap_mask);
d                 812 drivers/dma/zx_dma.c 	dma_cap_set(DMA_MEMCPY, d->slave.cap_mask);
d                 813 drivers/dma/zx_dma.c 	dma_cap_set(DMA_CYCLIC, d->slave.cap_mask);
d                 814 drivers/dma/zx_dma.c 	dma_cap_set(DMA_PRIVATE, d->slave.cap_mask);
d                 815 drivers/dma/zx_dma.c 	d->slave.dev = &op->dev;
d                 816 drivers/dma/zx_dma.c 	d->slave.device_free_chan_resources = zx_dma_free_chan_resources;
d                 817 drivers/dma/zx_dma.c 	d->slave.device_tx_status = zx_dma_tx_status;
d                 818 drivers/dma/zx_dma.c 	d->slave.device_prep_dma_memcpy = zx_dma_prep_memcpy;
d                 819 drivers/dma/zx_dma.c 	d->slave.device_prep_slave_sg = zx_dma_prep_slave_sg;
d                 820 drivers/dma/zx_dma.c 	d->slave.device_prep_dma_cyclic = zx_dma_prep_dma_cyclic;
d                 821 drivers/dma/zx_dma.c 	d->slave.device_issue_pending = zx_dma_issue_pending;
d                 822 drivers/dma/zx_dma.c 	d->slave.device_config = zx_dma_config;
d                 823 drivers/dma/zx_dma.c 	d->slave.device_terminate_all = zx_dma_terminate_all;
d                 824 drivers/dma/zx_dma.c 	d->slave.device_pause = zx_dma_transfer_pause;
d                 825 drivers/dma/zx_dma.c 	d->slave.device_resume = zx_dma_transfer_resume;
d                 826 drivers/dma/zx_dma.c 	d->slave.copy_align = DMA_ALIGN;
d                 827 drivers/dma/zx_dma.c 	d->slave.src_addr_widths = ZX_DMA_BUSWIDTHS;
d                 828 drivers/dma/zx_dma.c 	d->slave.dst_addr_widths = ZX_DMA_BUSWIDTHS;
d                 829 drivers/dma/zx_dma.c 	d->slave.directions = BIT(DMA_MEM_TO_MEM) | BIT(DMA_MEM_TO_DEV)
d                 831 drivers/dma/zx_dma.c 	d->slave.residue_granularity = DMA_RESIDUE_GRANULARITY_SEGMENT;
d                 834 drivers/dma/zx_dma.c 	d->chans = devm_kcalloc(&op->dev,
d                 835 drivers/dma/zx_dma.c 		d->dma_requests, sizeof(struct zx_dma_chan), GFP_KERNEL);
d                 836 drivers/dma/zx_dma.c 	if (!d->chans)
d                 839 drivers/dma/zx_dma.c 	for (i = 0; i < d->dma_requests; i++) {
d                 840 drivers/dma/zx_dma.c 		struct zx_dma_chan *c = &d->chans[i];
d                 845 drivers/dma/zx_dma.c 		vchan_init(&c->vc, &d->slave);
d                 849 drivers/dma/zx_dma.c 	ret = clk_prepare_enable(d->clk);
d                 855 drivers/dma/zx_dma.c 	zx_dma_init_state(d);
d                 857 drivers/dma/zx_dma.c 	spin_lock_init(&d->lock);
d                 858 drivers/dma/zx_dma.c 	INIT_LIST_HEAD(&d->chan_pending);
d                 859 drivers/dma/zx_dma.c 	platform_set_drvdata(op, d);
d                 861 drivers/dma/zx_dma.c 	ret = dma_async_device_register(&d->slave);
d                 866 drivers/dma/zx_dma.c 					 zx_of_dma_simple_xlate, d);
d                 874 drivers/dma/zx_dma.c 	dma_async_device_unregister(&d->slave);
d                 876 drivers/dma/zx_dma.c 	clk_disable_unprepare(d->clk);
d                 884 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = platform_get_drvdata(op);
d                 887 drivers/dma/zx_dma.c 	devm_free_irq(&op->dev, d->irq, d);
d                 889 drivers/dma/zx_dma.c 	dma_async_device_unregister(&d->slave);
d                 892 drivers/dma/zx_dma.c 	list_for_each_entry_safe(c, cn, &d->slave.channels,
d                 896 drivers/dma/zx_dma.c 	clk_disable_unprepare(d->clk);
d                 897 drivers/dma/zx_dma.c 	dmam_pool_destroy(d->pool);
d                 905 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = dev_get_drvdata(dev);
d                 908 drivers/dma/zx_dma.c 	stat = zx_dma_get_chan_stat(d);
d                 910 drivers/dma/zx_dma.c 		dev_warn(d->slave.dev,
d                 914 drivers/dma/zx_dma.c 	clk_disable_unprepare(d->clk);
d                 920 drivers/dma/zx_dma.c 	struct zx_dma_dev *d = dev_get_drvdata(dev);
d                 923 drivers/dma/zx_dma.c 	ret = clk_prepare_enable(d->clk);
d                 925 drivers/dma/zx_dma.c 		dev_err(d->slave.dev, "clk_prepare_enable failed: %d\n", ret);
d                 928 drivers/dma/zx_dma.c 	zx_dma_init_state(d);
d                2083 drivers/edac/altera_edac.c static void a10_eccmgr_irq_mask(struct irq_data *d)
d                2085 drivers/edac/altera_edac.c 	struct altr_arria10_edac *edac = irq_data_get_irq_chip_data(d);
d                2088 drivers/edac/altera_edac.c 		     BIT(d->hwirq));
d                2091 drivers/edac/altera_edac.c static void a10_eccmgr_irq_unmask(struct irq_data *d)
d                2093 drivers/edac/altera_edac.c 	struct altr_arria10_edac *edac = irq_data_get_irq_chip_data(d);
d                2096 drivers/edac/altera_edac.c 		     BIT(d->hwirq));
d                2099 drivers/edac/altera_edac.c static int a10_eccmgr_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                2102 drivers/edac/altera_edac.c 	struct altr_arria10_edac *edac = d->host_data;
d                  22 drivers/edac/i10nm_base.c #define I10NM_GET_SCK_BAR(d, reg)		\
d                  23 drivers/edac/i10nm_base.c 	pci_read_config_dword((d)->uracu, 0xd0, &(reg))
d                  24 drivers/edac/i10nm_base.c #define I10NM_GET_IMC_BAR(d, i, reg)	\
d                  25 drivers/edac/i10nm_base.c 	pci_read_config_dword((d)->uracu, 0xd8 + (i) * 4, &(reg))
d                  66 drivers/edac/i10nm_base.c 	struct skx_dev *d;
d                  71 drivers/edac/i10nm_base.c 	list_for_each_entry(d, i10nm_edac_list, list) {
d                  72 drivers/edac/i10nm_base.c 		d->util_all = pci_get_dev_wrapper(d->seg, d->bus[1], 29, 1);
d                  73 drivers/edac/i10nm_base.c 		if (!d->util_all)
d                  76 drivers/edac/i10nm_base.c 		d->uracu = pci_get_dev_wrapper(d->seg, d->bus[0], 0, 1);
d                  77 drivers/edac/i10nm_base.c 		if (!d->uracu)
d                  80 drivers/edac/i10nm_base.c 		if (I10NM_GET_SCK_BAR(d, reg)) {
d                  90 drivers/edac/i10nm_base.c 			mdev = pci_get_dev_wrapper(d->seg, d->bus[0],
d                  99 drivers/edac/i10nm_base.c 			d->imc[i].mdev = mdev;
d                 101 drivers/edac/i10nm_base.c 			if (I10NM_GET_IMC_BAR(d, i, reg)) {
d                 118 drivers/edac/i10nm_base.c 			d->imc[i].mbase = mbase;
d                 239 drivers/edac/i10nm_base.c 	struct skx_dev *d;
d                 269 drivers/edac/i10nm_base.c 	list_for_each_entry(d, i10nm_edac_list, list) {
d                 270 drivers/edac/i10nm_base.c 		rc = skx_get_src_id(d, 0xf8, &src_id);
d                 274 drivers/edac/i10nm_base.c 		rc = skx_get_node_id(d, &node_id);
d                 280 drivers/edac/i10nm_base.c 			if (!d->imc[i].mdev)
d                 283 drivers/edac/i10nm_base.c 			d->imc[i].mc  = mc++;
d                 284 drivers/edac/i10nm_base.c 			d->imc[i].lmc = i;
d                 285 drivers/edac/i10nm_base.c 			d->imc[i].src_id  = src_id;
d                 286 drivers/edac/i10nm_base.c 			d->imc[i].node_id = node_id;
d                 288 drivers/edac/i10nm_base.c 			rc = skx_register_mci(&d->imc[i], d->imc[i].mdev,
d                 932 drivers/edac/pnd2_edac.c 	struct dimm_geometry *d = &dimms[g];
d                 944 drivers/edac/pnd2_edac.c 		type = d->bits[i + skiprs] & ~0xf;
d                 945 drivers/edac/pnd2_edac.c 		idx = d->bits[i + skiprs] & 0xf;
d                 953 drivers/edac/pnd2_edac.c 			type = d->bits[i + skiprs] & ~0xf;
d                 954 drivers/edac/pnd2_edac.c 			idx = d->bits[i + skiprs] & 0xf;
d                 964 drivers/edac/pnd2_edac.c 				bank ^= bank_hash(pmiaddr, idx, d->addrdec);
d                1095 drivers/edac/pnd2_edac.c #define DIMMS_PRESENT(d) ((d)->rken0 + (d)->rken1 + (d)->rken2 + (d)->rken3)
d                1099 drivers/edac/pnd2_edac.c 	struct d_cr_drp *d = &drp[ch];
d                1101 drivers/edac/pnd2_edac.c 	if (DIMMS_PRESENT(d) && !ecc_ctrl[ch].eccen) {
d                1226 drivers/edac/pnd2_edac.c 	struct d_cr_drp0 *d;
d                1240 drivers/edac/pnd2_edac.c 		d = &drp0[i];
d                1242 drivers/edac/pnd2_edac.c 			if (dimms[g].addrdec == d->addrdec &&
d                1243 drivers/edac/pnd2_edac.c 			    dimms[g].dden == d->dden &&
d                1244 drivers/edac/pnd2_edac.c 			    dimms[g].dwid == d->dwid)
d                1253 drivers/edac/pnd2_edac.c 		capacity = (d->rken0 + d->rken1) * 8 * (1ul << dimms[g].rowbits) *
d                1258 drivers/edac/pnd2_edac.c 		dimm->dtype = (d->dwid == 0) ? DEV_X8 : DEV_X16;
d                1273 drivers/edac/pnd2_edac.c 	struct d_cr_drp *d;
d                1304 drivers/edac/pnd2_edac.c 		d = &drp[i];
d                1306 drivers/edac/pnd2_edac.c 		ranks_of_dimm[0] = d->rken0 + d->rken1;
d                1308 drivers/edac/pnd2_edac.c 		ranks_of_dimm[1] = d->rken2 + d->rken3;
d                1324 drivers/edac/pnd2_edac.c 			dimm->dtype = dnv_dtypes[j ? d->dimmdwid0 : d->dimmdwid1];
d                  38 drivers/edac/skx_base.c 	struct skx_dev *d;
d                  40 drivers/edac/skx_base.c 	list_for_each_entry(d, skx_edac_list, list) {
d                  41 drivers/edac/skx_base.c 		if (d->seg == pci_domain_nr(bus) && d->bus[idx] == bus->number)
d                  42 drivers/edac/skx_base.c 			return d;
d                  78 drivers/edac/skx_base.c 	struct skx_dev *d;
d                  95 drivers/edac/skx_base.c 		d = get_skx_dev(pdev->bus, m->busidx);
d                  96 drivers/edac/skx_base.c 		if (!d)
d                 109 drivers/edac/skx_base.c 			d->imc[i].chan[m->mtype].cdev = pdev;
d                 113 drivers/edac/skx_base.c 			d->sad_all = pdev;
d                 117 drivers/edac/skx_base.c 			d->util_all = pdev;
d                 128 drivers/edac/skx_base.c 				if (d->mcroute == 0) {
d                 129 drivers/edac/skx_base.c 					d->mcroute = reg;
d                 130 drivers/edac/skx_base.c 				} else if (d->mcroute != reg) {
d                 199 drivers/edac/skx_base.c #define SKX_GET_SAD(d, i, reg)	\
d                 200 drivers/edac/skx_base.c 	pci_read_config_dword((d)->sad_all, 0x60 + 8 * (i), &(reg))
d                 201 drivers/edac/skx_base.c #define SKX_GET_ILV(d, i, reg)	\
d                 202 drivers/edac/skx_base.c 	pci_read_config_dword((d)->sad_all, 0x64 + 8 * (i), &(reg))
d                 217 drivers/edac/skx_base.c 	struct skx_dev *d = list_first_entry(skx_edac_list, typeof(*d), list);
d                 233 drivers/edac/skx_base.c 		SKX_GET_SAD(d, i, sad);
d                 245 drivers/edac/skx_base.c 	SKX_GET_ILV(d, i, ilv);
d                 271 drivers/edac/skx_base.c 		list_for_each_entry(d, skx_edac_list, list) {
d                 272 drivers/edac/skx_base.c 			if (d->imc[0].src_id == SKX_ILV_TARGET(tgt))
d                 314 drivers/edac/skx_base.c 	res->dev = d;
d                 315 drivers/edac/skx_base.c 	res->socket = d->imc[0].src_id;
d                 316 drivers/edac/skx_base.c 	res->imc = GET_BITFIELD(d->mcroute, lchan * 3, lchan * 3 + 2);
d                 317 drivers/edac/skx_base.c 	res->channel = GET_BITFIELD(d->mcroute, lchan * 2 + 18, lchan * 2 + 19);
d                 326 drivers/edac/skx_base.c #define SKX_GET_TADBASE(d, mc, i, reg)			\
d                 327 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[0].cdev, 0x850 + 4 * (i), &(reg))
d                 328 drivers/edac/skx_base.c #define SKX_GET_TADWAYNESS(d, mc, i, reg)		\
d                 329 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[0].cdev, 0x880 + 4 * (i), &(reg))
d                 330 drivers/edac/skx_base.c #define SKX_GET_TADCHNILVOFFSET(d, mc, ch, i, reg)	\
d                 331 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev, 0x90 + 4 * (i), &(reg))
d                 401 drivers/edac/skx_base.c #define SKX_GET_RIRWAYNESS(d, mc, ch, i, reg)		\
d                 402 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev,	\
d                 404 drivers/edac/skx_base.c #define SKX_GET_RIRILV(d, mc, ch, idx, i, reg)		\
d                 405 drivers/edac/skx_base.c 	pci_read_config_dword((d)->imc[mc].chan[ch].cdev,	\
d                 600 drivers/edac/skx_base.c 	struct skx_dev *d;
d                 637 drivers/edac/skx_base.c 	list_for_each_entry(d, skx_edac_list, list) {
d                 638 drivers/edac/skx_base.c 		rc = skx_get_src_id(d, 0xf0, &src_id);
d                 641 drivers/edac/skx_base.c 		rc = skx_get_node_id(d, &node_id);
d                 646 drivers/edac/skx_base.c 			d->imc[i].mc = mc++;
d                 647 drivers/edac/skx_base.c 			d->imc[i].lmc = i;
d                 648 drivers/edac/skx_base.c 			d->imc[i].src_id = src_id;
d                 649 drivers/edac/skx_base.c 			d->imc[i].node_id = node_id;
d                 650 drivers/edac/skx_base.c 			rc = skx_register_mci(&d->imc[i], d->imc[i].chan[0].cdev,
d                 139 drivers/edac/skx_common.c int skx_get_src_id(struct skx_dev *d, int off, u8 *id)
d                 143 drivers/edac/skx_common.c 	if (pci_read_config_dword(d->util_all, off, &reg)) {
d                 152 drivers/edac/skx_common.c int skx_get_node_id(struct skx_dev *d, u8 *id)
d                 156 drivers/edac/skx_common.c 	if (pci_read_config_dword(d->util_all, 0xf4, &reg)) {
d                 187 drivers/edac/skx_common.c 	struct skx_dev *d;
d                 197 drivers/edac/skx_common.c 		d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 198 drivers/edac/skx_common.c 		if (!d) {
d                 204 drivers/edac/skx_common.c 			kfree(d);
d                 210 drivers/edac/skx_common.c 		d->bus[0] = GET_BITFIELD(reg, 0, 7);
d                 211 drivers/edac/skx_common.c 		d->bus[1] = GET_BITFIELD(reg, 8, 15);
d                 213 drivers/edac/skx_common.c 			d->seg = pci_domain_nr(pdev->bus);
d                 214 drivers/edac/skx_common.c 			d->bus[2] = GET_BITFIELD(reg, 16, 23);
d                 215 drivers/edac/skx_common.c 			d->bus[3] = GET_BITFIELD(reg, 24, 31);
d                 217 drivers/edac/skx_common.c 			d->seg = GET_BITFIELD(reg, 16, 23);
d                 221 drivers/edac/skx_common.c 			 d->bus[0], d->bus[1], d->bus[2], d->bus[3]);
d                 222 drivers/edac/skx_common.c 		list_add_tail(&d->list, &dev_edac_list);
d                 457 drivers/edac/skx_common.c 	struct skx_dev *d;
d                 464 drivers/edac/skx_common.c 	list_for_each_entry(d, &dev_edac_list, list) {
d                 465 drivers/edac/skx_common.c 		if (d->imc[0].src_id == src_id)
d                 466 drivers/edac/skx_common.c 			return d->imc[lmc].mci;
d                 624 drivers/edac/skx_common.c 	struct skx_dev *d, *tmp;
d                 628 drivers/edac/skx_common.c 	list_for_each_entry_safe(d, tmp, &dev_edac_list, list) {
d                 629 drivers/edac/skx_common.c 		list_del(&d->list);
d                 631 drivers/edac/skx_common.c 			if (d->imc[i].mci)
d                 632 drivers/edac/skx_common.c 				skx_unregister_mci(&d->imc[i]);
d                 634 drivers/edac/skx_common.c 			if (d->imc[i].mdev)
d                 635 drivers/edac/skx_common.c 				pci_dev_put(d->imc[i].mdev);
d                 637 drivers/edac/skx_common.c 			if (d->imc[i].mbase)
d                 638 drivers/edac/skx_common.c 				iounmap(d->imc[i].mbase);
d                 641 drivers/edac/skx_common.c 				if (d->imc[i].chan[j].cdev)
d                 642 drivers/edac/skx_common.c 					pci_dev_put(d->imc[i].chan[j].cdev);
d                 645 drivers/edac/skx_common.c 		if (d->util_all)
d                 646 drivers/edac/skx_common.c 			pci_dev_put(d->util_all);
d                 647 drivers/edac/skx_common.c 		if (d->sad_all)
d                 648 drivers/edac/skx_common.c 			pci_dev_put(d->sad_all);
d                 649 drivers/edac/skx_common.c 		if (d->uracu)
d                 650 drivers/edac/skx_common.c 			pci_dev_put(d->uracu);
d                 652 drivers/edac/skx_common.c 		kfree(d);
d                 121 drivers/edac/skx_common.h int skx_get_src_id(struct skx_dev *d, int off, u8 *id);
d                 122 drivers/edac/skx_common.h int skx_get_node_id(struct skx_dev *d, u8 *id);
d                 390 drivers/firewire/core-transaction.c 	struct transaction_callback_data *d = data;
d                 393 drivers/firewire/core-transaction.c 		memcpy(d->payload, payload, length);
d                 394 drivers/firewire/core-transaction.c 	d->rcode = rcode;
d                 395 drivers/firewire/core-transaction.c 	complete(&d->done);
d                 418 drivers/firewire/core-transaction.c 	struct transaction_callback_data d;
d                 422 drivers/firewire/core-transaction.c 	init_completion(&d.done);
d                 423 drivers/firewire/core-transaction.c 	d.payload = payload;
d                 425 drivers/firewire/core-transaction.c 			offset, payload, length, transaction_callback, &d);
d                 426 drivers/firewire/core-transaction.c 	wait_for_completion(&d.done);
d                 429 drivers/firewire/core-transaction.c 	return d.rcode;
d                 102 drivers/firewire/ohci.c 				     struct descriptor *d,
d                 659 drivers/firewire/ohci.c 	struct descriptor *d;
d                 661 drivers/firewire/ohci.c 	d = &ctx->descriptors[index];
d                 662 drivers/firewire/ohci.c 	d->branch_address  &= cpu_to_le32(~0xf);
d                 663 drivers/firewire/ohci.c 	d->res_count       =  cpu_to_le16(PAGE_SIZE);
d                 664 drivers/firewire/ohci.c 	d->transfer_status =  0;
d                 667 drivers/firewire/ohci.c 	d = &ctx->descriptors[ctx->last_buffer_index];
d                 668 drivers/firewire/ohci.c 	d->branch_address  |= cpu_to_le32(1);
d                 976 drivers/firewire/ohci.c 	struct descriptor *d;
d                1008 drivers/firewire/ohci.c 		d = &ctx->descriptors[i];
d                1009 drivers/firewire/ohci.c 		d->req_count      = cpu_to_le16(PAGE_SIZE);
d                1010 drivers/firewire/ohci.c 		d->control        = cpu_to_le16(DESCRIPTOR_INPUT_MORE |
d                1013 drivers/firewire/ohci.c 		d->data_address   = cpu_to_le32(ar_buffer_bus(ctx, i));
d                1014 drivers/firewire/ohci.c 		d->branch_address = cpu_to_le32(ctx->descriptors_bus +
d                1039 drivers/firewire/ohci.c static struct descriptor *find_branch_descriptor(struct descriptor *d, int z)
d                1043 drivers/firewire/ohci.c 	branch = d->control & cpu_to_le16(DESCRIPTOR_BRANCH_ALWAYS);
d                1047 drivers/firewire/ohci.c 		return d;
d                1049 drivers/firewire/ohci.c 		return d + z - 1;
d                1055 drivers/firewire/ohci.c 	struct descriptor *d, *last;
d                1076 drivers/firewire/ohci.c 		d = desc->buffer + (address - desc->buffer_bus) / sizeof(*d);
d                1077 drivers/firewire/ohci.c 		last = find_branch_descriptor(d, z);
d                1079 drivers/firewire/ohci.c 		if (!ctx->callback(ctx, d, last))
d                1182 drivers/firewire/ohci.c 	struct descriptor *d = NULL;
d                1185 drivers/firewire/ohci.c 	if (z * sizeof(*d) > desc->buffer_size)
d                1188 drivers/firewire/ohci.c 	if (z * sizeof(*d) > desc->buffer_size - desc->used) {
d                1203 drivers/firewire/ohci.c 	d = desc->buffer + desc->used / sizeof(*d);
d                1204 drivers/firewire/ohci.c 	memset(d, 0, z * sizeof(*d));
d                1207 drivers/firewire/ohci.c 	return d;
d                1223 drivers/firewire/ohci.c 			   struct descriptor *d, int z, int extra)
d                1229 drivers/firewire/ohci.c 	d_bus = desc->buffer_bus + (d - desc->buffer) * sizeof(*d);
d                1231 drivers/firewire/ohci.c 	desc->used += (z + extra) * sizeof(*d);
d                1254 drivers/firewire/ohci.c 	ctx->prev = d;
d                1294 drivers/firewire/ohci.c 	struct descriptor *d, *last;
d                1298 drivers/firewire/ohci.c 	d = context_get_descriptors(ctx, 4, &d_bus);
d                1299 drivers/firewire/ohci.c 	if (d == NULL) {
d                1304 drivers/firewire/ohci.c 	d[0].control   = cpu_to_le16(DESCRIPTOR_KEY_IMMEDIATE);
d                1305 drivers/firewire/ohci.c 	d[0].res_count = cpu_to_le16(packet->timestamp);
d                1314 drivers/firewire/ohci.c 	header = (__le32 *) &d[1];
d                1336 drivers/firewire/ohci.c 		d[0].req_count = cpu_to_le16(packet->header_length);
d                1344 drivers/firewire/ohci.c 		d[0].req_count = cpu_to_le16(12);
d                1347 drivers/firewire/ohci.c 			d[0].control |= cpu_to_le16(DESCRIPTOR_PING);
d                1354 drivers/firewire/ohci.c 		d[0].req_count = cpu_to_le16(8);
d                1364 drivers/firewire/ohci.c 	driver_data = (struct driver_data *) &d[3];
d                1383 drivers/firewire/ohci.c 			payload_bus = d_bus + 3 * sizeof(*d);
d                1386 drivers/firewire/ohci.c 		d[2].req_count    = cpu_to_le16(packet->payload_length);
d                1387 drivers/firewire/ohci.c 		d[2].data_address = cpu_to_le32(payload_bus);
d                1388 drivers/firewire/ohci.c 		last = &d[2];
d                1391 drivers/firewire/ohci.c 		last = &d[0];
d                1408 drivers/firewire/ohci.c 	context_append(ctx, d, z, 4 - z);
d                1430 drivers/firewire/ohci.c 			    struct descriptor *d,
d                1442 drivers/firewire/ohci.c 	driver_data = (struct driver_data *) &d[3];
d                2762 drivers/firewire/ohci.c 				       struct descriptor *d,
d                2770 drivers/firewire/ohci.c 	for (pd = d; pd <= last; pd++)
d                2777 drivers/firewire/ohci.c 	while (!(d->control & cpu_to_le16(DESCRIPTOR_BRANCH_ALWAYS))) {
d                2778 drivers/firewire/ohci.c 		d++;
d                2779 drivers/firewire/ohci.c 		buffer_dma = le32_to_cpu(d->data_address);
d                2783 drivers/firewire/ohci.c 					      le16_to_cpu(d->req_count),
d                2797 drivers/firewire/ohci.c 				 struct descriptor *d,
d                2885 drivers/firewire/ohci.c 			    struct descriptor *d,
d                2893 drivers/firewire/ohci.c 	for (pd = d; pd <= last; pd++)
d                2900 drivers/firewire/ohci.c 	sync_it_packet_for_cpu(context, d);
d                3197 drivers/firewire/ohci.c 	struct descriptor *d, *last, *pd;
d                3225 drivers/firewire/ohci.c 	header_z = DIV_ROUND_UP(p->header_length, sizeof(*d));
d                3227 drivers/firewire/ohci.c 	d = context_get_descriptors(&ctx->context, z + header_z, &d_bus);
d                3228 drivers/firewire/ohci.c 	if (d == NULL)
d                3232 drivers/firewire/ohci.c 		d[0].control   = cpu_to_le16(DESCRIPTOR_KEY_IMMEDIATE);
d                3233 drivers/firewire/ohci.c 		d[0].req_count = cpu_to_le16(8);
d                3241 drivers/firewire/ohci.c 		d[0].branch_address = cpu_to_le32(d_bus | z);
d                3243 drivers/firewire/ohci.c 		header = (__le32 *) &d[1];
d                3255 drivers/firewire/ohci.c 		d[2].req_count    = cpu_to_le16(p->header_length);
d                3256 drivers/firewire/ohci.c 		d[2].data_address = cpu_to_le32(d_bus + z * sizeof(*d));
d                3257 drivers/firewire/ohci.c 		memcpy(&d[z], p->header, p->header_length);
d                3260 drivers/firewire/ohci.c 	pd = d + z - payload_z;
d                3285 drivers/firewire/ohci.c 	last = z == 2 ? d : d + z - 1;
d                3291 drivers/firewire/ohci.c 	context_append(&ctx->context, d, z, header_z);
d                3302 drivers/firewire/ohci.c 	struct descriptor *d, *pd;
d                3316 drivers/firewire/ohci.c 	header_z = DIV_ROUND_UP(header_size, sizeof(*d));
d                3324 drivers/firewire/ohci.c 		d = context_get_descriptors(&ctx->context,
d                3326 drivers/firewire/ohci.c 		if (d == NULL)
d                3329 drivers/firewire/ohci.c 		d->control      = cpu_to_le16(DESCRIPTOR_STATUS |
d                3332 drivers/firewire/ohci.c 			d->control |= cpu_to_le16(DESCRIPTOR_WAIT);
d                3333 drivers/firewire/ohci.c 		d->req_count    = cpu_to_le16(header_size);
d                3334 drivers/firewire/ohci.c 		d->res_count    = d->req_count;
d                3335 drivers/firewire/ohci.c 		d->transfer_status = 0;
d                3336 drivers/firewire/ohci.c 		d->data_address = cpu_to_le32(d_bus + (z * sizeof(*d)));
d                3339 drivers/firewire/ohci.c 		pd = d;
d                3371 drivers/firewire/ohci.c 		context_append(&ctx->context, d, z, header_z);
d                3382 drivers/firewire/ohci.c 	struct descriptor *d;
d                3397 drivers/firewire/ohci.c 		d = context_get_descriptors(&ctx->context, 1, &d_bus);
d                3398 drivers/firewire/ohci.c 		if (d == NULL)
d                3401 drivers/firewire/ohci.c 		d->control = cpu_to_le16(DESCRIPTOR_INPUT_MORE |
d                3404 drivers/firewire/ohci.c 			d->control |= cpu_to_le16(DESCRIPTOR_WAIT);
d                3406 drivers/firewire/ohci.c 			d->control |= cpu_to_le16(DESCRIPTOR_IRQ_ALWAYS);
d                3412 drivers/firewire/ohci.c 		d->req_count = cpu_to_le16(length);
d                3413 drivers/firewire/ohci.c 		d->res_count = d->req_count;
d                3414 drivers/firewire/ohci.c 		d->transfer_status = 0;
d                3417 drivers/firewire/ohci.c 		d->data_address = cpu_to_le32(page_bus + offset);
d                3427 drivers/firewire/ohci.c 		context_append(&ctx->context, d, 1, 0);
d                 648 drivers/firewire/sbp2.c 	__be32 d = 0;
d                 653 drivers/firewire/sbp2.c 			   &d, 4);
d                 666 drivers/firewire/sbp2.c 	static __be32 d;
d                 675 drivers/firewire/sbp2.c 			&d, 4, complete_agent_reset_write_no_wait, t);
d                 786 drivers/firewire/sbp2.c 	__be32 d = cpu_to_be32(SBP2_CYCLE_LIMIT | SBP2_RETRY_LIMIT);
d                 790 drivers/firewire/sbp2.c 			   CSR_REGISTER_BASE + CSR_BUSY_TIMEOUT, &d, 4);
d                  62 drivers/firmware/dmi-id.c static void ascii_filter(char *d, const char *s)
d                  67 drivers/firmware/dmi-id.c 			*(d++) = *s;
d                  69 drivers/firmware/dmi-id.c 	*d = 0;
d                 171 drivers/firmware/dmi_scan.c 	const char *d = (const char *) dm;
d                 177 drivers/firmware/dmi_scan.c 	p = dmi_string(dm, d[string]);
d                 187 drivers/firmware/dmi_scan.c 	const u8 *d;
d                 194 drivers/firmware/dmi_scan.c 	d = (u8 *) dm + index;
d                 196 drivers/firmware/dmi_scan.c 		if (d[i] != 0x00)
d                 198 drivers/firmware/dmi_scan.c 		if (d[i] != 0xFF)
d                 215 drivers/firmware/dmi_scan.c 		sprintf(s, "%pUl", d);
d                 217 drivers/firmware/dmi_scan.c 		sprintf(s, "%pUb", d);
d                 225 drivers/firmware/dmi_scan.c 	const u8 *d;
d                 235 drivers/firmware/dmi_scan.c 	d = (u8 *) dm + index;
d                 236 drivers/firmware/dmi_scan.c 	sprintf(s, "%u", *d & 0x7F);
d                 264 drivers/firmware/dmi_scan.c 		const char *d = (char *)(dm + 1) + (i * 2);
d                 267 drivers/firmware/dmi_scan.c 		if ((*d & 0x80) == 0)
d                 270 drivers/firmware/dmi_scan.c 		dmi_save_one_device(*d & 0x7f, dmi_string_nosave(dm, *(d + 1)));
d                 353 drivers/firmware/dmi_scan.c 	const u8 *d = (u8 *)dm;
d                 359 drivers/firmware/dmi_scan.c 	if ((d[0x5] & 0x80) == 0)
d                 362 drivers/firmware/dmi_scan.c 	name = dmi_string_nosave(dm, d[0x4]);
d                 363 drivers/firmware/dmi_scan.c 	dmi_save_dev_pciaddr(d[0x6], *(u16 *)(d + 0x7), d[0x9], d[0xA], name,
d                 365 drivers/firmware/dmi_scan.c 	dmi_save_one_device(d[0x5] & 0x7f, name);
d                 370 drivers/firmware/dmi_scan.c 	const u8 *d = (u8 *)dm;
d                 375 drivers/firmware/dmi_scan.c 	dmi_save_dev_pciaddr(*(u16 *)(d + 0x9), *(u16 *)(d + 0xD), d[0xF],
d                 376 drivers/firmware/dmi_scan.c 			     d[0x10], dmi_string_nosave(dm, d[0x4]),
d                 389 drivers/firmware/dmi_scan.c 	const char *d = (const char *)dm;
d                 401 drivers/firmware/dmi_scan.c 	dmi_memdev[nr].device = dmi_string(dm, d[0x10]);
d                 402 drivers/firmware/dmi_scan.c 	dmi_memdev[nr].bank = dmi_string(dm, d[0x11]);
d                 404 drivers/firmware/dmi_scan.c 	size = get_unaligned((u16 *)&d[0xC]);
d                 414 drivers/firmware/dmi_scan.c 		bytes = (u64)get_unaligned((u32 *)&d[0x1C]) << 20;
d                 852 drivers/firmware/dmi_scan.c 	const struct dmi_system_id *d;
d                 854 drivers/firmware/dmi_scan.c 	for (d = list; !dmi_is_end_of_table(d); d++)
d                 855 drivers/firmware/dmi_scan.c 		if (dmi_matches(d)) {
d                 857 drivers/firmware/dmi_scan.c 			if (d->callback && d->callback(d))
d                 881 drivers/firmware/dmi_scan.c 	const struct dmi_system_id *d;
d                 883 drivers/firmware/dmi_scan.c 	for (d = list; !dmi_is_end_of_table(d); d++)
d                 884 drivers/firmware/dmi_scan.c 		if (dmi_matches(d))
d                 885 drivers/firmware/dmi_scan.c 			return d;
d                 949 drivers/firmware/dmi_scan.c 	struct list_head *d;
d                 951 drivers/firmware/dmi_scan.c 	for (d = head->next; d != &dmi_devices; d = d->next) {
d                 953 drivers/firmware/dmi_scan.c 			list_entry(d, struct dmi_device, list);
d                  24 drivers/firmware/google/coreboot_table.c #define CB_DEV(d) container_of(d, struct coreboot_device, dev)
d                  25 drivers/firmware/google/coreboot_table.c #define CB_DRV(d) container_of(d, struct coreboot_driver, drv)
d                  29 drivers/firmware/meson/meson_sm.c #define CMD(d, s) { .index = (d), .smc_id = (s), }
d                  29 drivers/firmware/qcom_scm-64.c #define QCOM_SCM_ARGS_IMPL(num, a, b, c, d, e, f, g, h, i, j, ...) (\
d                  33 drivers/firmware/qcom_scm-64.c 			   (((d) & 0x3) << 10) | \
d                  77 drivers/firmware/qemu_fw_cfg.c static void fw_cfg_wait_for_control(struct fw_cfg_dma_access *d)
d                  80 drivers/firmware/qemu_fw_cfg.c 		u32 ctrl = be32_to_cpu(READ_ONCE(d->control));
d                  94 drivers/firmware/qemu_fw_cfg.c 	struct fw_cfg_dma_access *d = NULL;
d                  97 drivers/firmware/qemu_fw_cfg.c 	d = kmalloc(sizeof(*d), GFP_KERNEL);
d                  98 drivers/firmware/qemu_fw_cfg.c 	if (!d) {
d                 104 drivers/firmware/qemu_fw_cfg.c 	*d = (struct fw_cfg_dma_access) {
d                 110 drivers/firmware/qemu_fw_cfg.c 	dma = virt_to_phys(d);
d                 117 drivers/firmware/qemu_fw_cfg.c 	fw_cfg_wait_for_control(d);
d                 119 drivers/firmware/qemu_fw_cfg.c 	if (be32_to_cpu(READ_ONCE(d->control)) & FW_CFG_DMA_CTL_ERROR) {
d                 124 drivers/firmware/qemu_fw_cfg.c 	kfree(d);
d                 305 drivers/firmware/tegra/bpmp-debugfs.c 	uint32_t d, t;
d                 310 drivers/firmware/tegra/bpmp-debugfs.c 		err = seqbuf_read_u32(seqbuf, &d);
d                 314 drivers/firmware/tegra/bpmp-debugfs.c 		if (d < depth) {
d                 318 drivers/firmware/tegra/bpmp-debugfs.c 		} else if (d != depth) {
d                 122 drivers/firmware/ti_sci.c 	struct dentry *d;
d                 199 drivers/firmware/ti_sci.c 	info->d = debugfs_create_file(strncat(debug_name, dev_name(dev),
d                 203 drivers/firmware/ti_sci.c 	if (IS_ERR(info->d))
d                 204 drivers/firmware/ti_sci.c 		return PTR_ERR(info->d);
d                 222 drivers/firmware/ti_sci.c 	debugfs_remove(info->d);
d                3526 drivers/firmware/ti_sci.c 	debugfs_remove(info->d);
d                  26 drivers/fpga/xilinx-pr-decoupler.c static inline void xlnx_pr_decoupler_write(struct xlnx_pr_decoupler_data *d,
d                  29 drivers/fpga/xilinx-pr-decoupler.c 	writel(val, d->io_base + offset);
d                  32 drivers/fpga/xilinx-pr-decoupler.c static inline u32 xlnx_pr_decouple_read(const struct xlnx_pr_decoupler_data *d,
d                  35 drivers/fpga/xilinx-pr-decoupler.c 	return readl(d->io_base + offset);
d                  92 drivers/fsi/fsi-core.c #define to_fsi_master(d) container_of(d, struct fsi_master, dev)
d                  93 drivers/fsi/fsi-core.c #define to_fsi_slave(d) container_of(d, struct fsi_slave, dev)
d                  67 drivers/fsi/fsi-master.h #define dev_to_fsi_master(d) container_of(d, struct fsi_master, dev)
d                  34 drivers/gnss/core.c #define to_gnss_device(d) container_of((d), struct gnss_device, dev)
d                 331 drivers/gpio/gpio-adnp.c static void adnp_irq_mask(struct irq_data *d)
d                 333 drivers/gpio/gpio-adnp.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 335 drivers/gpio/gpio-adnp.c 	unsigned int reg = d->hwirq >> adnp->reg_shift;
d                 336 drivers/gpio/gpio-adnp.c 	unsigned int pos = d->hwirq & 7;
d                 341 drivers/gpio/gpio-adnp.c static void adnp_irq_unmask(struct irq_data *d)
d                 343 drivers/gpio/gpio-adnp.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 345 drivers/gpio/gpio-adnp.c 	unsigned int reg = d->hwirq >> adnp->reg_shift;
d                 346 drivers/gpio/gpio-adnp.c 	unsigned int pos = d->hwirq & 7;
d                 351 drivers/gpio/gpio-adnp.c static int adnp_irq_set_type(struct irq_data *d, unsigned int type)
d                 353 drivers/gpio/gpio-adnp.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 355 drivers/gpio/gpio-adnp.c 	unsigned int reg = d->hwirq >> adnp->reg_shift;
d                 356 drivers/gpio/gpio-adnp.c 	unsigned int pos = d->hwirq & 7;
d                 381 drivers/gpio/gpio-adnp.c static void adnp_irq_bus_lock(struct irq_data *d)
d                 383 drivers/gpio/gpio-adnp.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 389 drivers/gpio/gpio-adnp.c static void adnp_irq_bus_unlock(struct irq_data *d)
d                 391 drivers/gpio/gpio-adnp.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 149 drivers/gpio/gpio-adp5588.c static void adp5588_irq_bus_lock(struct irq_data *d)
d                 151 drivers/gpio/gpio-adp5588.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 165 drivers/gpio/gpio-adp5588.c static void adp5588_irq_bus_sync_unlock(struct irq_data *d)
d                 167 drivers/gpio/gpio-adp5588.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 191 drivers/gpio/gpio-adp5588.c static void adp5588_irq_mask(struct irq_data *d)
d                 193 drivers/gpio/gpio-adp5588.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 196 drivers/gpio/gpio-adp5588.c 	dev->irq_mask[ADP5588_BANK(d->hwirq)] &= ~ADP5588_BIT(d->hwirq);
d                 199 drivers/gpio/gpio-adp5588.c static void adp5588_irq_unmask(struct irq_data *d)
d                 201 drivers/gpio/gpio-adp5588.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 204 drivers/gpio/gpio-adp5588.c 	dev->irq_mask[ADP5588_BANK(d->hwirq)] |= ADP5588_BIT(d->hwirq);
d                 207 drivers/gpio/gpio-adp5588.c static int adp5588_irq_set_type(struct irq_data *d, unsigned int type)
d                 209 drivers/gpio/gpio-adp5588.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 211 drivers/gpio/gpio-adp5588.c 	uint16_t gpio = d->hwirq;
d                  36 drivers/gpio/gpio-altera.c static void altera_gpio_irq_unmask(struct irq_data *d)
d                  43 drivers/gpio/gpio-altera.c 	altera_gc = gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                  49 drivers/gpio/gpio-altera.c 	intmask |= BIT(irqd_to_hwirq(d));
d                  54 drivers/gpio/gpio-altera.c static void altera_gpio_irq_mask(struct irq_data *d)
d                  61 drivers/gpio/gpio-altera.c 	altera_gc = gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                  67 drivers/gpio/gpio-altera.c 	intmask &= ~BIT(irqd_to_hwirq(d));
d                  76 drivers/gpio/gpio-altera.c static int altera_gpio_irq_set_type(struct irq_data *d,
d                  81 drivers/gpio/gpio-altera.c 	altera_gc = gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                  84 drivers/gpio/gpio-altera.c 		irq_set_handler_locked(d, handle_bad_irq);
d                  89 drivers/gpio/gpio-altera.c 			irq_set_handler_locked(d, handle_level_irq);
d                  91 drivers/gpio/gpio-altera.c 			irq_set_handler_locked(d, handle_simple_irq);
d                  94 drivers/gpio/gpio-altera.c 	irq_set_handler_locked(d, handle_bad_irq);
d                  98 drivers/gpio/gpio-altera.c static unsigned int altera_gpio_irq_startup(struct irq_data *d)
d                 100 drivers/gpio/gpio-altera.c 	altera_gpio_irq_unmask(d);
d                 505 drivers/gpio/gpio-aspeed.c static inline int irqd_to_aspeed_gpio_data(struct irq_data *d,
d                 512 drivers/gpio/gpio-aspeed.c 	*offset = irqd_to_hwirq(d);
d                 514 drivers/gpio/gpio-aspeed.c 	internal = irq_data_get_irq_chip_data(d);
d                 527 drivers/gpio/gpio-aspeed.c static void aspeed_gpio_irq_ack(struct irq_data *d)
d                 537 drivers/gpio/gpio-aspeed.c 	rc = irqd_to_aspeed_gpio_data(d, &gpio, &bank, &bit, &offset);
d                 553 drivers/gpio/gpio-aspeed.c static void aspeed_gpio_irq_set_mask(struct irq_data *d, bool set)
d                 563 drivers/gpio/gpio-aspeed.c 	rc = irqd_to_aspeed_gpio_data(d, &gpio, &bank, &bit, &offset);
d                 584 drivers/gpio/gpio-aspeed.c static void aspeed_gpio_irq_mask(struct irq_data *d)
d                 586 drivers/gpio/gpio-aspeed.c 	aspeed_gpio_irq_set_mask(d, false);
d                 589 drivers/gpio/gpio-aspeed.c static void aspeed_gpio_irq_unmask(struct irq_data *d)
d                 591 drivers/gpio/gpio-aspeed.c 	aspeed_gpio_irq_set_mask(d, true);
d                 594 drivers/gpio/gpio-aspeed.c static int aspeed_gpio_set_type(struct irq_data *d, unsigned int type)
d                 608 drivers/gpio/gpio-aspeed.c 	rc = irqd_to_aspeed_gpio_data(d, &gpio, &bank, &bit, &offset);
d                 655 drivers/gpio/gpio-aspeed.c 	irq_set_handler_locked(d, handler);
d                 338 drivers/gpio/gpio-bcm-kona.c static void bcm_kona_gpio_irq_ack(struct irq_data *d)
d                 342 drivers/gpio/gpio-bcm-kona.c 	unsigned gpio = d->hwirq;
d                 348 drivers/gpio/gpio-bcm-kona.c 	kona_gpio = irq_data_get_irq_chip_data(d);
d                 359 drivers/gpio/gpio-bcm-kona.c static void bcm_kona_gpio_irq_mask(struct irq_data *d)
d                 363 drivers/gpio/gpio-bcm-kona.c 	unsigned gpio = d->hwirq;
d                 369 drivers/gpio/gpio-bcm-kona.c 	kona_gpio = irq_data_get_irq_chip_data(d);
d                 381 drivers/gpio/gpio-bcm-kona.c static void bcm_kona_gpio_irq_unmask(struct irq_data *d)
d                 385 drivers/gpio/gpio-bcm-kona.c 	unsigned gpio = d->hwirq;
d                 391 drivers/gpio/gpio-bcm-kona.c 	kona_gpio = irq_data_get_irq_chip_data(d);
d                 403 drivers/gpio/gpio-bcm-kona.c static int bcm_kona_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 407 drivers/gpio/gpio-bcm-kona.c 	unsigned gpio = d->hwirq;
d                 412 drivers/gpio/gpio-bcm-kona.c 	kona_gpio = irq_data_get_irq_chip_data(d);
d                 487 drivers/gpio/gpio-bcm-kona.c static int bcm_kona_gpio_irq_reqres(struct irq_data *d)
d                 489 drivers/gpio/gpio-bcm-kona.c 	struct bcm_kona_gpio *kona_gpio = irq_data_get_irq_chip_data(d);
d                 491 drivers/gpio/gpio-bcm-kona.c 	return gpiochip_reqres_irq(&kona_gpio->gpio_chip, d->hwirq);
d                 494 drivers/gpio/gpio-bcm-kona.c static void bcm_kona_gpio_irq_relres(struct irq_data *d)
d                 496 drivers/gpio/gpio-bcm-kona.c 	struct bcm_kona_gpio *kona_gpio = irq_data_get_irq_chip_data(d);
d                 498 drivers/gpio/gpio-bcm-kona.c 	gpiochip_relres_irq(&kona_gpio->gpio_chip, d->hwirq);
d                 523 drivers/gpio/gpio-bcm-kona.c static int bcm_kona_gpio_irq_map(struct irq_domain *d, unsigned int irq,
d                 528 drivers/gpio/gpio-bcm-kona.c 	ret = irq_set_chip_data(irq, d->host_data);
d                 538 drivers/gpio/gpio-bcm-kona.c static void bcm_kona_gpio_irq_unmap(struct irq_domain *d, unsigned int irq)
d                 140 drivers/gpio/gpio-brcmstb.c static void brcmstb_gpio_irq_mask(struct irq_data *d)
d                 142 drivers/gpio/gpio-brcmstb.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 145 drivers/gpio/gpio-brcmstb.c 	brcmstb_gpio_set_imask(bank, d->hwirq, false);
d                 148 drivers/gpio/gpio-brcmstb.c static void brcmstb_gpio_irq_unmask(struct irq_data *d)
d                 150 drivers/gpio/gpio-brcmstb.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 153 drivers/gpio/gpio-brcmstb.c 	brcmstb_gpio_set_imask(bank, d->hwirq, true);
d                 156 drivers/gpio/gpio-brcmstb.c static void brcmstb_gpio_irq_ack(struct irq_data *d)
d                 158 drivers/gpio/gpio-brcmstb.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 161 drivers/gpio/gpio-brcmstb.c 	u32 mask = BIT(brcmstb_gpio_hwirq_to_offset(d->hwirq, bank));
d                 166 drivers/gpio/gpio-brcmstb.c static int brcmstb_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 168 drivers/gpio/gpio-brcmstb.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 171 drivers/gpio/gpio-brcmstb.c 	u32 mask = BIT(brcmstb_gpio_hwirq_to_offset(d->hwirq, bank));
d                 242 drivers/gpio/gpio-brcmstb.c static int brcmstb_gpio_irq_set_wake(struct irq_data *d, unsigned int enable)
d                 244 drivers/gpio/gpio-brcmstb.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 247 drivers/gpio/gpio-brcmstb.c 	u32 mask = BIT(brcmstb_gpio_hwirq_to_offset(d->hwirq, bank));
d                 332 drivers/gpio/gpio-brcmstb.c static int brcmstb_gpio_irq_map(struct irq_domain *d, unsigned int irq,
d                 335 drivers/gpio/gpio-brcmstb.c 	struct brcmstb_gpio_priv *priv = d->host_data;
d                 356 drivers/gpio/gpio-brcmstb.c static void brcmstb_gpio_irq_unmap(struct irq_domain *d, unsigned int irq)
d                  67 drivers/gpio/gpio-cadence.c static void cdns_gpio_irq_mask(struct irq_data *d)
d                  69 drivers/gpio/gpio-cadence.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                  72 drivers/gpio/gpio-cadence.c 	iowrite32(BIT(d->hwirq), cgpio->regs + CDNS_GPIO_IRQ_DIS);
d                  75 drivers/gpio/gpio-cadence.c static void cdns_gpio_irq_unmask(struct irq_data *d)
d                  77 drivers/gpio/gpio-cadence.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                  80 drivers/gpio/gpio-cadence.c 	iowrite32(BIT(d->hwirq), cgpio->regs + CDNS_GPIO_IRQ_EN);
d                  83 drivers/gpio/gpio-cadence.c static int cdns_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                  85 drivers/gpio/gpio-cadence.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                  90 drivers/gpio/gpio-cadence.c 	u32 mask = BIT(d->hwirq);
d                  72 drivers/gpio/gpio-davinci.c static inline struct davinci_gpio_regs __iomem *irq2regs(struct irq_data *d)
d                  76 drivers/gpio/gpio-davinci.c 	g = (__force struct davinci_gpio_regs __iomem *)irq_data_get_irq_chip_data(d);
d                  89 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d = gpiochip_get_data(chip);
d                  96 drivers/gpio/gpio-davinci.c 	g = d->regs[bank];
d                  97 drivers/gpio/gpio-davinci.c 	spin_lock_irqsave(&d->lock, flags);
d                 106 drivers/gpio/gpio-davinci.c 	spin_unlock_irqrestore(&d->lock, flags);
d                 131 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d = gpiochip_get_data(chip);
d                 135 drivers/gpio/gpio-davinci.c 	g = d->regs[bank];
d                 146 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d = gpiochip_get_data(chip);
d                 150 drivers/gpio/gpio-davinci.c 	g = d->regs[bank];
d                 298 drivers/gpio/gpio-davinci.c static void gpio_irq_disable(struct irq_data *d)
d                 300 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_regs __iomem *g = irq2regs(d);
d                 301 drivers/gpio/gpio-davinci.c 	uintptr_t mask = (uintptr_t)irq_data_get_irq_handler_data(d);
d                 307 drivers/gpio/gpio-davinci.c static void gpio_irq_enable(struct irq_data *d)
d                 309 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_regs __iomem *g = irq2regs(d);
d                 310 drivers/gpio/gpio-davinci.c 	uintptr_t mask = (uintptr_t)irq_data_get_irq_handler_data(d);
d                 311 drivers/gpio/gpio-davinci.c 	unsigned status = irqd_get_trigger_type(d);
d                 323 drivers/gpio/gpio-davinci.c static int gpio_irq_type(struct irq_data *d, unsigned trigger)
d                 344 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d;
d                 350 drivers/gpio/gpio-davinci.c 	d = irqdata->chip;
d                 380 drivers/gpio/gpio-davinci.c 				irq_find_mapping(d->irq_domain, hw_irq));
d                 389 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d = gpiochip_get_data(chip);
d                 391 drivers/gpio/gpio-davinci.c 	if (d->irq_domain)
d                 392 drivers/gpio/gpio-davinci.c 		return irq_create_mapping(d->irq_domain, offset);
d                 399 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d = gpiochip_get_data(chip);
d                 405 drivers/gpio/gpio-davinci.c 	if (offset < d->gpio_unbanked)
d                 406 drivers/gpio/gpio-davinci.c 		return d->irqs[offset];
d                 413 drivers/gpio/gpio-davinci.c 	struct davinci_gpio_controller *d;
d                 417 drivers/gpio/gpio-davinci.c 	d = (struct davinci_gpio_controller *)irq_data_get_irq_handler_data(data);
d                 418 drivers/gpio/gpio-davinci.c 	g = (struct davinci_gpio_regs __iomem *)d->regs[0];
d                 420 drivers/gpio/gpio-davinci.c 		if (data->irq == d->irqs[i])
d                 440 drivers/gpio/gpio-davinci.c davinci_gpio_irq_map(struct irq_domain *d, unsigned int irq,
d                 444 drivers/gpio/gpio-davinci.c 				(struct davinci_gpio_controller *)d->host_data;
d                 221 drivers/gpio/gpio-dwapb.c static void dwapb_irq_enable(struct irq_data *d)
d                 223 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 231 drivers/gpio/gpio-dwapb.c 	val |= BIT(d->hwirq);
d                 236 drivers/gpio/gpio-dwapb.c static void dwapb_irq_disable(struct irq_data *d)
d                 238 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 246 drivers/gpio/gpio-dwapb.c 	val &= ~BIT(d->hwirq);
d                 251 drivers/gpio/gpio-dwapb.c static int dwapb_irq_reqres(struct irq_data *d)
d                 253 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 258 drivers/gpio/gpio-dwapb.c 	ret = gpiochip_lock_as_irq(gc, irqd_to_hwirq(d));
d                 261 drivers/gpio/gpio-dwapb.c 			irqd_to_hwirq(d));
d                 267 drivers/gpio/gpio-dwapb.c static void dwapb_irq_relres(struct irq_data *d)
d                 269 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 273 drivers/gpio/gpio-dwapb.c 	gpiochip_unlock_as_irq(gc, irqd_to_hwirq(d));
d                 276 drivers/gpio/gpio-dwapb.c static int dwapb_irq_set_type(struct irq_data *d, u32 type)
d                 278 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 281 drivers/gpio/gpio-dwapb.c 	int bit = d->hwirq;
d                 315 drivers/gpio/gpio-dwapb.c 	irq_setup_alt_chip(d, type);
d                 326 drivers/gpio/gpio-dwapb.c static int dwapb_irq_set_wake(struct irq_data *d, unsigned int enable)
d                 328 drivers/gpio/gpio-dwapb.c 	struct irq_chip_generic *igc = irq_data_get_irq_chip_data(d);
d                 333 drivers/gpio/gpio-dwapb.c 		ctx->wake_en |= BIT(d->hwirq);
d                 335 drivers/gpio/gpio-dwapb.c 		ctx->wake_en &= ~BIT(d->hwirq);
d                  75 drivers/gpio/gpio-em.c static void em_gio_irq_disable(struct irq_data *d)
d                  77 drivers/gpio/gpio-em.c 	struct em_gio_priv *p = irq_data_get_irq_chip_data(d);
d                  79 drivers/gpio/gpio-em.c 	em_gio_write(p, GIO_IDS, BIT(irqd_to_hwirq(d)));
d                  82 drivers/gpio/gpio-em.c static void em_gio_irq_enable(struct irq_data *d)
d                  84 drivers/gpio/gpio-em.c 	struct em_gio_priv *p = irq_data_get_irq_chip_data(d);
d                  86 drivers/gpio/gpio-em.c 	em_gio_write(p, GIO_IEN, BIT(irqd_to_hwirq(d)));
d                  89 drivers/gpio/gpio-em.c static int em_gio_irq_reqres(struct irq_data *d)
d                  91 drivers/gpio/gpio-em.c 	struct em_gio_priv *p = irq_data_get_irq_chip_data(d);
d                  94 drivers/gpio/gpio-em.c 	ret = gpiochip_lock_as_irq(&p->gpio_chip, irqd_to_hwirq(d));
d                  98 drivers/gpio/gpio-em.c 			irqd_to_hwirq(d));
d                 104 drivers/gpio/gpio-em.c static void em_gio_irq_relres(struct irq_data *d)
d                 106 drivers/gpio/gpio-em.c 	struct em_gio_priv *p = irq_data_get_irq_chip_data(d);
d                 108 drivers/gpio/gpio-em.c 	gpiochip_unlock_as_irq(&p->gpio_chip, irqd_to_hwirq(d));
d                 122 drivers/gpio/gpio-em.c static int em_gio_irq_set_type(struct irq_data *d, unsigned int type)
d                 125 drivers/gpio/gpio-em.c 	struct em_gio_priv *p = irq_data_get_irq_chip_data(d);
d                 133 drivers/gpio/gpio-em.c 	offset = irqd_to_hwirq(d);
d                 153 drivers/gpio/gpio-ep93xx.c static void ep93xx_gpio_irq_ack(struct irq_data *d)
d                 155 drivers/gpio/gpio-ep93xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 158 drivers/gpio/gpio-ep93xx.c 	int port_mask = BIT(d->irq & 7);
d                 160 drivers/gpio/gpio-ep93xx.c 	if (irqd_get_trigger_type(d) == IRQ_TYPE_EDGE_BOTH) {
d                 168 drivers/gpio/gpio-ep93xx.c static void ep93xx_gpio_irq_mask_ack(struct irq_data *d)
d                 170 drivers/gpio/gpio-ep93xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 173 drivers/gpio/gpio-ep93xx.c 	int port_mask = BIT(d->irq & 7);
d                 175 drivers/gpio/gpio-ep93xx.c 	if (irqd_get_trigger_type(d) == IRQ_TYPE_EDGE_BOTH)
d                 184 drivers/gpio/gpio-ep93xx.c static void ep93xx_gpio_irq_mask(struct irq_data *d)
d                 186 drivers/gpio/gpio-ep93xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 190 drivers/gpio/gpio-ep93xx.c 	gpio_int_unmasked[port] &= ~BIT(d->irq & 7);
d                 194 drivers/gpio/gpio-ep93xx.c static void ep93xx_gpio_irq_unmask(struct irq_data *d)
d                 196 drivers/gpio/gpio-ep93xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 200 drivers/gpio/gpio-ep93xx.c 	gpio_int_unmasked[port] |= BIT(d->irq & 7);
d                 209 drivers/gpio/gpio-ep93xx.c static int ep93xx_gpio_irq_type(struct irq_data *d, unsigned int type)
d                 211 drivers/gpio/gpio-ep93xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 214 drivers/gpio/gpio-ep93xx.c 	int offset = d->irq & 7;
d                 254 drivers/gpio/gpio-ep93xx.c 	irq_set_handler_locked(d, handler);
d                  56 drivers/gpio/gpio-ftgpio010.c static void ftgpio_gpio_ack_irq(struct irq_data *d)
d                  58 drivers/gpio/gpio-ftgpio010.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  61 drivers/gpio/gpio-ftgpio010.c 	writel(BIT(irqd_to_hwirq(d)), g->base + GPIO_INT_CLR);
d                  64 drivers/gpio/gpio-ftgpio010.c static void ftgpio_gpio_mask_irq(struct irq_data *d)
d                  66 drivers/gpio/gpio-ftgpio010.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  71 drivers/gpio/gpio-ftgpio010.c 	val &= ~BIT(irqd_to_hwirq(d));
d                  75 drivers/gpio/gpio-ftgpio010.c static void ftgpio_gpio_unmask_irq(struct irq_data *d)
d                  77 drivers/gpio/gpio-ftgpio010.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  82 drivers/gpio/gpio-ftgpio010.c 	val |= BIT(irqd_to_hwirq(d));
d                  86 drivers/gpio/gpio-ftgpio010.c static int ftgpio_gpio_set_irq_type(struct irq_data *d, unsigned int type)
d                  88 drivers/gpio/gpio-ftgpio010.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  90 drivers/gpio/gpio-ftgpio010.c 	u32 mask = BIT(irqd_to_hwirq(d));
d                  99 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 104 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 110 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 116 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_level_irq);
d                 121 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_level_irq);
d                 126 drivers/gpio/gpio-ftgpio010.c 		irq_set_handler_locked(d, handle_bad_irq);
d                 134 drivers/gpio/gpio-ftgpio010.c 	ftgpio_gpio_ack_irq(d);
d                 117 drivers/gpio/gpio-grgpio.c static int grgpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 119 drivers/gpio/gpio-grgpio.c 	struct grgpio_priv *priv = irq_data_get_irq_chip_data(d);
d                 121 drivers/gpio/gpio-grgpio.c 	u32 mask = BIT(d->hwirq);
d                 161 drivers/gpio/gpio-grgpio.c static void grgpio_irq_mask(struct irq_data *d)
d                 163 drivers/gpio/gpio-grgpio.c 	struct grgpio_priv *priv = irq_data_get_irq_chip_data(d);
d                 164 drivers/gpio/gpio-grgpio.c 	int offset = d->hwirq;
d                 174 drivers/gpio/gpio-grgpio.c static void grgpio_irq_unmask(struct irq_data *d)
d                 176 drivers/gpio/gpio-grgpio.c 	struct grgpio_priv *priv = irq_data_get_irq_chip_data(d);
d                 177 drivers/gpio/gpio-grgpio.c 	int offset = d->hwirq;
d                 230 drivers/gpio/gpio-grgpio.c static int grgpio_irq_map(struct irq_domain *d, unsigned int irq,
d                 233 drivers/gpio/gpio-grgpio.c 	struct grgpio_priv *priv = d->host_data;
d                 280 drivers/gpio/gpio-grgpio.c static void grgpio_irq_unmap(struct irq_domain *d, unsigned int irq)
d                 282 drivers/gpio/gpio-grgpio.c 	struct grgpio_priv *priv = d->host_data;
d                 166 drivers/gpio/gpio-intel-mid.c static int intel_mid_irq_type(struct irq_data *d, unsigned type)
d                 168 drivers/gpio/gpio-intel-mid.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 170 drivers/gpio/gpio-intel-mid.c 	u32 gpio = irqd_to_hwirq(d);
d                 202 drivers/gpio/gpio-intel-mid.c static void intel_mid_irq_unmask(struct irq_data *d)
d                 206 drivers/gpio/gpio-intel-mid.c static void intel_mid_irq_mask(struct irq_data *d)
d                  62 drivers/gpio/gpio-ixp4xx.c static void ixp4xx_gpio_irq_ack(struct irq_data *d)
d                  64 drivers/gpio/gpio-ixp4xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  67 drivers/gpio/gpio-ixp4xx.c 	__raw_writel(BIT(d->hwirq), g->base + IXP4XX_REG_GPIS);
d                  70 drivers/gpio/gpio-ixp4xx.c static void ixp4xx_gpio_irq_unmask(struct irq_data *d)
d                  72 drivers/gpio/gpio-ixp4xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  76 drivers/gpio/gpio-ixp4xx.c 	if (!(g->irq_edge & BIT(d->hwirq)))
d                  77 drivers/gpio/gpio-ixp4xx.c 		ixp4xx_gpio_irq_ack(d);
d                  79 drivers/gpio/gpio-ixp4xx.c 	irq_chip_unmask_parent(d);
d                  82 drivers/gpio/gpio-ixp4xx.c static int ixp4xx_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                  84 drivers/gpio/gpio-ixp4xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  86 drivers/gpio/gpio-ixp4xx.c 	int line = d->hwirq;
d                  94 drivers/gpio/gpio-ixp4xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                  96 drivers/gpio/gpio-ixp4xx.c 		g->irq_edge |= BIT(d->hwirq);
d                  99 drivers/gpio/gpio-ixp4xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 101 drivers/gpio/gpio-ixp4xx.c 		g->irq_edge |= BIT(d->hwirq);
d                 104 drivers/gpio/gpio-ixp4xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 106 drivers/gpio/gpio-ixp4xx.c 		g->irq_edge |= BIT(d->hwirq);
d                 109 drivers/gpio/gpio-ixp4xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 111 drivers/gpio/gpio-ixp4xx.c 		g->irq_edge &= ~BIT(d->hwirq);
d                 114 drivers/gpio/gpio-ixp4xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 116 drivers/gpio/gpio-ixp4xx.c 		g->irq_edge &= ~BIT(d->hwirq);
d                 147 drivers/gpio/gpio-ixp4xx.c 	val |= BIT(d->hwirq);
d                 153 drivers/gpio/gpio-ixp4xx.c 	return irq_chip_set_type_parent(d, IRQ_TYPE_LEVEL_HIGH);
d                  75 drivers/gpio/gpio-lpc18xx.c static void lpc18xx_gpio_pin_ic_mask(struct irq_data *d)
d                  77 drivers/gpio/gpio-lpc18xx.c 	struct lpc18xx_gpio_pin_ic *ic = d->chip_data;
d                  78 drivers/gpio/gpio-lpc18xx.c 	u32 type = irqd_get_trigger_type(d);
d                  83 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                  87 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                  92 drivers/gpio/gpio-lpc18xx.c 	irq_chip_mask_parent(d);
d                  95 drivers/gpio/gpio-lpc18xx.c static void lpc18xx_gpio_pin_ic_unmask(struct irq_data *d)
d                  97 drivers/gpio/gpio-lpc18xx.c 	struct lpc18xx_gpio_pin_ic *ic = d->chip_data;
d                  98 drivers/gpio/gpio-lpc18xx.c 	u32 type = irqd_get_trigger_type(d);
d                 103 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                 107 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                 112 drivers/gpio/gpio-lpc18xx.c 	irq_chip_unmask_parent(d);
d                 115 drivers/gpio/gpio-lpc18xx.c static void lpc18xx_gpio_pin_ic_eoi(struct irq_data *d)
d                 117 drivers/gpio/gpio-lpc18xx.c 	struct lpc18xx_gpio_pin_ic *ic = d->chip_data;
d                 118 drivers/gpio/gpio-lpc18xx.c 	u32 type = irqd_get_trigger_type(d);
d                 123 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                 128 drivers/gpio/gpio-lpc18xx.c 	irq_chip_eoi_parent(d);
d                 131 drivers/gpio/gpio-lpc18xx.c static int lpc18xx_gpio_pin_ic_set_type(struct irq_data *d, unsigned int type)
d                 133 drivers/gpio/gpio-lpc18xx.c 	struct lpc18xx_gpio_pin_ic *ic = d->chip_data;
d                 138 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_isel(ic, d->hwirq, true);
d                 139 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                 142 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_isel(ic, d->hwirq, true);
d                 143 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_set(ic, d->hwirq,
d                 146 drivers/gpio/gpio-lpc18xx.c 		lpc18xx_gpio_pin_ic_isel(ic, d->hwirq, false);
d                 135 drivers/gpio/gpio-lynxpoint.c static int lp_irq_type(struct irq_data *d, unsigned type)
d                 137 drivers/gpio/gpio-lynxpoint.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 139 drivers/gpio/gpio-lynxpoint.c 	u32 hwirq = irqd_to_hwirq(d);
d                 169 drivers/gpio/gpio-lynxpoint.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 171 drivers/gpio/gpio-lynxpoint.c 		irq_set_handler_locked(d, handle_level_irq);
d                 259 drivers/gpio/gpio-lynxpoint.c static void lp_irq_unmask(struct irq_data *d)
d                 263 drivers/gpio/gpio-lynxpoint.c static void lp_irq_mask(struct irq_data *d)
d                 267 drivers/gpio/gpio-lynxpoint.c static void lp_irq_enable(struct irq_data *d)
d                 269 drivers/gpio/gpio-lynxpoint.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 271 drivers/gpio/gpio-lynxpoint.c 	u32 hwirq = irqd_to_hwirq(d);
d                 280 drivers/gpio/gpio-lynxpoint.c static void lp_irq_disable(struct irq_data *d)
d                 282 drivers/gpio/gpio-lynxpoint.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 284 drivers/gpio/gpio-lynxpoint.c 	u32 hwirq = irqd_to_hwirq(d);
d                 348 drivers/gpio/gpio-max732x.c static void max732x_irq_mask(struct irq_data *d)
d                 350 drivers/gpio/gpio-max732x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 353 drivers/gpio/gpio-max732x.c 	chip->irq_mask_cur &= ~(1 << d->hwirq);
d                 356 drivers/gpio/gpio-max732x.c static void max732x_irq_unmask(struct irq_data *d)
d                 358 drivers/gpio/gpio-max732x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 361 drivers/gpio/gpio-max732x.c 	chip->irq_mask_cur |= 1 << d->hwirq;
d                 364 drivers/gpio/gpio-max732x.c static void max732x_irq_bus_lock(struct irq_data *d)
d                 366 drivers/gpio/gpio-max732x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 373 drivers/gpio/gpio-max732x.c static void max732x_irq_bus_sync_unlock(struct irq_data *d)
d                 375 drivers/gpio/gpio-max732x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 392 drivers/gpio/gpio-max732x.c static int max732x_irq_set_type(struct irq_data *d, unsigned int type)
d                 394 drivers/gpio/gpio-max732x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 396 drivers/gpio/gpio-max732x.c 	uint16_t off = d->hwirq;
d                 407 drivers/gpio/gpio-max732x.c 			d->irq, type);
d                 201 drivers/gpio/gpio-merrifield.c static void mrfld_irq_ack(struct irq_data *d)
d                 203 drivers/gpio/gpio-merrifield.c 	struct mrfld_gpio *priv = irq_data_get_irq_chip_data(d);
d                 204 drivers/gpio/gpio-merrifield.c 	u32 gpio = irqd_to_hwirq(d);
d                 215 drivers/gpio/gpio-merrifield.c static void mrfld_irq_unmask_mask(struct irq_data *d, bool unmask)
d                 217 drivers/gpio/gpio-merrifield.c 	struct mrfld_gpio *priv = irq_data_get_irq_chip_data(d);
d                 218 drivers/gpio/gpio-merrifield.c 	u32 gpio = irqd_to_hwirq(d);
d                 234 drivers/gpio/gpio-merrifield.c static void mrfld_irq_mask(struct irq_data *d)
d                 236 drivers/gpio/gpio-merrifield.c 	mrfld_irq_unmask_mask(d, false);
d                 239 drivers/gpio/gpio-merrifield.c static void mrfld_irq_unmask(struct irq_data *d)
d                 241 drivers/gpio/gpio-merrifield.c 	mrfld_irq_unmask_mask(d, true);
d                 244 drivers/gpio/gpio-merrifield.c static int mrfld_irq_set_type(struct irq_data *d, unsigned int type)
d                 246 drivers/gpio/gpio-merrifield.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 248 drivers/gpio/gpio-merrifield.c 	u32 gpio = irqd_to_hwirq(d);
d                 284 drivers/gpio/gpio-merrifield.c 		irq_set_handler_locked(d, handle_level_irq);
d                 289 drivers/gpio/gpio-merrifield.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 297 drivers/gpio/gpio-merrifield.c static int mrfld_irq_set_wake(struct irq_data *d, unsigned int on)
d                 299 drivers/gpio/gpio-merrifield.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 301 drivers/gpio/gpio-merrifield.c 	u32 gpio = irqd_to_hwirq(d);
d                 235 drivers/gpio/gpio-ml-ioh.c static int ioh_irq_type(struct irq_data *d, unsigned int type)
d                 244 drivers/gpio/gpio-ml-ioh.c 	int irq = d->irq;
d                 245 drivers/gpio/gpio-ml-ioh.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 304 drivers/gpio/gpio-ml-ioh.c static void ioh_irq_unmask(struct irq_data *d)
d                 306 drivers/gpio/gpio-ml-ioh.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 309 drivers/gpio/gpio-ml-ioh.c 	iowrite32(1 << (d->irq - chip->irq_base),
d                 313 drivers/gpio/gpio-ml-ioh.c static void ioh_irq_mask(struct irq_data *d)
d                 315 drivers/gpio/gpio-ml-ioh.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 318 drivers/gpio/gpio-ml-ioh.c 	iowrite32(1 << (d->irq - chip->irq_base),
d                 322 drivers/gpio/gpio-ml-ioh.c static void ioh_irq_disable(struct irq_data *d)
d                 324 drivers/gpio/gpio-ml-ioh.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 331 drivers/gpio/gpio-ml-ioh.c 	ien &= ~(1 << (d->irq - chip->irq_base));
d                 336 drivers/gpio/gpio-ml-ioh.c static void ioh_irq_enable(struct irq_data *d)
d                 338 drivers/gpio/gpio-ml-ioh.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 345 drivers/gpio/gpio-ml-ioh.c 	ien |= 1 << (d->irq - chip->irq_base);
d                 146 drivers/gpio/gpio-mpc8xxx.c static void mpc8xxx_irq_unmask(struct irq_data *d)
d                 148 drivers/gpio/gpio-mpc8xxx.c 	struct mpc8xxx_gpio_chip *mpc8xxx_gc = irq_data_get_irq_chip_data(d);
d                 156 drivers/gpio/gpio-mpc8xxx.c 		| mpc_pin2mask(irqd_to_hwirq(d)));
d                 161 drivers/gpio/gpio-mpc8xxx.c static void mpc8xxx_irq_mask(struct irq_data *d)
d                 163 drivers/gpio/gpio-mpc8xxx.c 	struct mpc8xxx_gpio_chip *mpc8xxx_gc = irq_data_get_irq_chip_data(d);
d                 171 drivers/gpio/gpio-mpc8xxx.c 		& ~mpc_pin2mask(irqd_to_hwirq(d)));
d                 176 drivers/gpio/gpio-mpc8xxx.c static void mpc8xxx_irq_ack(struct irq_data *d)
d                 178 drivers/gpio/gpio-mpc8xxx.c 	struct mpc8xxx_gpio_chip *mpc8xxx_gc = irq_data_get_irq_chip_data(d);
d                 182 drivers/gpio/gpio-mpc8xxx.c 		      mpc_pin2mask(irqd_to_hwirq(d)));
d                 185 drivers/gpio/gpio-mpc8xxx.c static int mpc8xxx_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 187 drivers/gpio/gpio-mpc8xxx.c 	struct mpc8xxx_gpio_chip *mpc8xxx_gc = irq_data_get_irq_chip_data(d);
d                 196 drivers/gpio/gpio-mpc8xxx.c 			| mpc_pin2mask(irqd_to_hwirq(d)));
d                 204 drivers/gpio/gpio-mpc8xxx.c 			& ~mpc_pin2mask(irqd_to_hwirq(d)));
d                 215 drivers/gpio/gpio-mpc8xxx.c static int mpc512x_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 217 drivers/gpio/gpio-mpc8xxx.c 	struct mpc8xxx_gpio_chip *mpc8xxx_gc = irq_data_get_irq_chip_data(d);
d                 219 drivers/gpio/gpio-mpc8xxx.c 	unsigned long gpio = irqd_to_hwirq(d);
d                 109 drivers/gpio/gpio-mt7621.c mediatek_gpio_irq_unmask(struct irq_data *d)
d                 111 drivers/gpio/gpio-mt7621.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 113 drivers/gpio/gpio-mt7621.c 	int pin = d->hwirq;
d                 130 drivers/gpio/gpio-mt7621.c mediatek_gpio_irq_mask(struct irq_data *d)
d                 132 drivers/gpio/gpio-mt7621.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 134 drivers/gpio/gpio-mt7621.c 	int pin = d->hwirq;
d                 151 drivers/gpio/gpio-mt7621.c mediatek_gpio_irq_type(struct irq_data *d, unsigned int type)
d                 153 drivers/gpio/gpio-mt7621.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 155 drivers/gpio/gpio-mt7621.c 	int pin = d->hwirq;
d                 400 drivers/gpio/gpio-mvebu.c static void mvebu_gpio_irq_ack(struct irq_data *d)
d                 402 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 404 drivers/gpio/gpio-mvebu.c 	u32 mask = d->mask;
d                 411 drivers/gpio/gpio-mvebu.c static void mvebu_gpio_edge_irq_mask(struct irq_data *d)
d                 413 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 415 drivers/gpio/gpio-mvebu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 416 drivers/gpio/gpio-mvebu.c 	u32 mask = d->mask;
d                 424 drivers/gpio/gpio-mvebu.c static void mvebu_gpio_edge_irq_unmask(struct irq_data *d)
d                 426 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 428 drivers/gpio/gpio-mvebu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 429 drivers/gpio/gpio-mvebu.c 	u32 mask = d->mask;
d                 437 drivers/gpio/gpio-mvebu.c static void mvebu_gpio_level_irq_mask(struct irq_data *d)
d                 439 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 441 drivers/gpio/gpio-mvebu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 442 drivers/gpio/gpio-mvebu.c 	u32 mask = d->mask;
d                 450 drivers/gpio/gpio-mvebu.c static void mvebu_gpio_level_irq_unmask(struct irq_data *d)
d                 452 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 454 drivers/gpio/gpio-mvebu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 455 drivers/gpio/gpio-mvebu.c 	u32 mask = d->mask;
d                 489 drivers/gpio/gpio-mvebu.c static int mvebu_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 491 drivers/gpio/gpio-mvebu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 492 drivers/gpio/gpio-mvebu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 497 drivers/gpio/gpio-mvebu.c 	pin = d->hwirq;
d                 509 drivers/gpio/gpio-mvebu.c 		if (irq_setup_alt_chip(d, type))
d                 171 drivers/gpio/gpio-mxc.c static int gpio_set_irq_type(struct irq_data *d, u32 type)
d                 173 drivers/gpio/gpio-mxc.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 176 drivers/gpio/gpio-mxc.c 	u32 gpio_idx = d->hwirq;
d                 322 drivers/gpio/gpio-mxc.c static int gpio_set_wake_irq(struct irq_data *d, u32 enable)
d                 324 drivers/gpio/gpio-mxc.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 326 drivers/gpio/gpio-mxc.c 	u32 gpio_idx = d->hwirq;
d                  70 drivers/gpio/gpio-mxs.c static int mxs_gpio_set_irq_type(struct irq_data *d, unsigned int type)
d                  73 drivers/gpio/gpio-mxs.c 	u32 pin_mask = 1 << d->hwirq;
d                  74 drivers/gpio/gpio-mxs.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  75 drivers/gpio/gpio-mxs.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  81 drivers/gpio/gpio-mxs.c 		if (irq_setup_alt_chip(d, type))
d                 179 drivers/gpio/gpio-mxs.c static int mxs_gpio_set_wake_irq(struct irq_data *d, unsigned int enable)
d                 181 drivers/gpio/gpio-mxs.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  84 drivers/gpio/gpio-omap.c static void omap_gpio_unmask_irq(struct irq_data *d);
d                  86 drivers/gpio/gpio-omap.c static inline struct gpio_bank *omap_irq_data_get_bank(struct irq_data *d)
d                  88 drivers/gpio/gpio-omap.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 422 drivers/gpio/gpio-omap.c static int omap_gpio_irq_type(struct irq_data *d, unsigned type)
d                 424 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 427 drivers/gpio/gpio-omap.c 	unsigned offset = d->hwirq;
d                 451 drivers/gpio/gpio-omap.c 		irq_set_handler_locked(d, handle_level_irq);
d                 459 drivers/gpio/gpio-omap.c 		irq_set_handler_locked(d, handle_simple_irq);
d                 540 drivers/gpio/gpio-omap.c static int omap_gpio_wake_enable(struct irq_data *d, unsigned int enable)
d                 542 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 623 drivers/gpio/gpio-omap.c static unsigned int omap_gpio_irq_startup(struct irq_data *d)
d                 625 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 627 drivers/gpio/gpio-omap.c 	unsigned offset = d->hwirq;
d                 637 drivers/gpio/gpio-omap.c 	omap_gpio_unmask_irq(d);
d                 642 drivers/gpio/gpio-omap.c static void omap_gpio_irq_shutdown(struct irq_data *d)
d                 644 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 646 drivers/gpio/gpio-omap.c 	unsigned offset = d->hwirq;
d                 673 drivers/gpio/gpio-omap.c static void omap_gpio_mask_irq(struct irq_data *d)
d                 675 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 676 drivers/gpio/gpio-omap.c 	unsigned offset = d->hwirq;
d                 685 drivers/gpio/gpio-omap.c static void omap_gpio_unmask_irq(struct irq_data *d)
d                 687 drivers/gpio/gpio-omap.c 	struct gpio_bank *bank = omap_irq_data_get_bank(d);
d                 688 drivers/gpio/gpio-omap.c 	unsigned offset = d->hwirq;
d                 689 drivers/gpio/gpio-omap.c 	u32 trigger = irqd_get_trigger_type(d);
d                 564 drivers/gpio/gpio-pca953x.c static void pca953x_irq_mask(struct irq_data *d)
d                 566 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 569 drivers/gpio/gpio-pca953x.c 	chip->irq_mask[d->hwirq / BANK_SZ] &= ~BIT(d->hwirq % BANK_SZ);
d                 572 drivers/gpio/gpio-pca953x.c static void pca953x_irq_unmask(struct irq_data *d)
d                 574 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 577 drivers/gpio/gpio-pca953x.c 	chip->irq_mask[d->hwirq / BANK_SZ] |= BIT(d->hwirq % BANK_SZ);
d                 580 drivers/gpio/gpio-pca953x.c static int pca953x_irq_set_wake(struct irq_data *d, unsigned int on)
d                 582 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 593 drivers/gpio/gpio-pca953x.c static void pca953x_irq_bus_lock(struct irq_data *d)
d                 595 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 601 drivers/gpio/gpio-pca953x.c static void pca953x_irq_bus_sync_unlock(struct irq_data *d)
d                 603 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 639 drivers/gpio/gpio-pca953x.c static int pca953x_irq_set_type(struct irq_data *d, unsigned int type)
d                 641 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 643 drivers/gpio/gpio-pca953x.c 	int bank_nb = d->hwirq / BANK_SZ;
d                 644 drivers/gpio/gpio-pca953x.c 	u8 mask = BIT(d->hwirq % BANK_SZ);
d                 648 drivers/gpio/gpio-pca953x.c 			d->irq, type);
d                 665 drivers/gpio/gpio-pca953x.c static void pca953x_irq_shutdown(struct irq_data *d)
d                 667 drivers/gpio/gpio-pca953x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 669 drivers/gpio/gpio-pca953x.c 	u8 mask = BIT(d->hwirq % BANK_SZ);
d                 671 drivers/gpio/gpio-pca953x.c 	chip->irq_trig_raise[d->hwirq / BANK_SZ] &= ~mask;
d                 672 drivers/gpio/gpio-pca953x.c 	chip->irq_trig_fall[d->hwirq / BANK_SZ] &= ~mask;
d                 219 drivers/gpio/gpio-pch.c static int pch_irq_type(struct irq_data *d, unsigned int type)
d                 221 drivers/gpio/gpio-pch.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 226 drivers/gpio/gpio-pch.c 	int ch, irq = d->irq;
d                 266 drivers/gpio/gpio-pch.c 		irq_set_handler_locked(d, handle_level_irq);
d                 268 drivers/gpio/gpio-pch.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 275 drivers/gpio/gpio-pch.c static void pch_irq_unmask(struct irq_data *d)
d                 277 drivers/gpio/gpio-pch.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 280 drivers/gpio/gpio-pch.c 	iowrite32(1 << (d->irq - chip->irq_base), &chip->reg->imaskclr);
d                 283 drivers/gpio/gpio-pch.c static void pch_irq_mask(struct irq_data *d)
d                 285 drivers/gpio/gpio-pch.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 288 drivers/gpio/gpio-pch.c 	iowrite32(1 << (d->irq - chip->irq_base), &chip->reg->imask);
d                 291 drivers/gpio/gpio-pch.c static void pch_irq_ack(struct irq_data *d)
d                 293 drivers/gpio/gpio-pch.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 296 drivers/gpio/gpio-pch.c 	iowrite32(1 << (d->irq - chip->irq_base), &chip->reg->iclr);
d                 121 drivers/gpio/gpio-pl061.c static int pl061_irq_type(struct irq_data *d, unsigned trigger)
d                 123 drivers/gpio/gpio-pl061.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 125 drivers/gpio/gpio-pl061.c 	int offset = irqd_to_hwirq(d);
d                 162 drivers/gpio/gpio-pl061.c 		irq_set_handler_locked(d, handle_level_irq);
d                 171 drivers/gpio/gpio-pl061.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 186 drivers/gpio/gpio-pl061.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 195 drivers/gpio/gpio-pl061.c 		irq_set_handler_locked(d, handle_bad_irq);
d                 229 drivers/gpio/gpio-pl061.c static void pl061_irq_mask(struct irq_data *d)
d                 231 drivers/gpio/gpio-pl061.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 233 drivers/gpio/gpio-pl061.c 	u8 mask = BIT(irqd_to_hwirq(d) % PL061_GPIO_NR);
d                 242 drivers/gpio/gpio-pl061.c static void pl061_irq_unmask(struct irq_data *d)
d                 244 drivers/gpio/gpio-pl061.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 246 drivers/gpio/gpio-pl061.c 	u8 mask = BIT(irqd_to_hwirq(d) % PL061_GPIO_NR);
d                 263 drivers/gpio/gpio-pl061.c static void pl061_irq_ack(struct irq_data *d)
d                 265 drivers/gpio/gpio-pl061.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 267 drivers/gpio/gpio-pl061.c 	u8 mask = BIT(irqd_to_hwirq(d) % PL061_GPIO_NR);
d                 274 drivers/gpio/gpio-pl061.c static int pl061_irq_set_wake(struct irq_data *d, unsigned int state)
d                 276 drivers/gpio/gpio-pl061.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 399 drivers/gpio/gpio-pxa.c static int pxa_gpio_irq_type(struct irq_data *d, unsigned int type)
d                 401 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = irq_data_get_irq_chip_data(d);
d                 402 drivers/gpio/gpio-pxa.c 	unsigned int gpio = irqd_to_hwirq(d);
d                 438 drivers/gpio/gpio-pxa.c 	pr_debug("%s: IRQ%d (GPIO%d) - edge%s%s\n", __func__, d->irq, gpio,
d                 444 drivers/gpio/gpio-pxa.c static irqreturn_t pxa_gpio_demux_handler(int in_irq, void *d)
d                 448 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = d;
d                 472 drivers/gpio/gpio-pxa.c static irqreturn_t pxa_gpio_direct_handler(int in_irq, void *d)
d                 474 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = d;
d                 487 drivers/gpio/gpio-pxa.c static void pxa_ack_muxed_gpio(struct irq_data *d)
d                 489 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = irq_data_get_irq_chip_data(d);
d                 490 drivers/gpio/gpio-pxa.c 	unsigned int gpio = irqd_to_hwirq(d);
d                 496 drivers/gpio/gpio-pxa.c static void pxa_mask_muxed_gpio(struct irq_data *d)
d                 498 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = irq_data_get_irq_chip_data(d);
d                 499 drivers/gpio/gpio-pxa.c 	unsigned int gpio = irqd_to_hwirq(d);
d                 512 drivers/gpio/gpio-pxa.c static int pxa_gpio_set_wake(struct irq_data *d, unsigned int on)
d                 514 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = irq_data_get_irq_chip_data(d);
d                 515 drivers/gpio/gpio-pxa.c 	unsigned int gpio = irqd_to_hwirq(d);
d                 523 drivers/gpio/gpio-pxa.c static void pxa_unmask_muxed_gpio(struct irq_data *d)
d                 525 drivers/gpio/gpio-pxa.c 	struct pxa_gpio_chip *pchip = irq_data_get_irq_chip_data(d);
d                 526 drivers/gpio/gpio-pxa.c 	unsigned int gpio = irqd_to_hwirq(d);
d                 567 drivers/gpio/gpio-pxa.c static int pxa_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 572 drivers/gpio/gpio-pxa.c 	irq_set_chip_data(irq, d->host_data);
d                  88 drivers/gpio/gpio-rcar.c static void gpio_rcar_irq_disable(struct irq_data *d)
d                  90 drivers/gpio/gpio-rcar.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                  93 drivers/gpio/gpio-rcar.c 	gpio_rcar_write(p, INTMSK, ~BIT(irqd_to_hwirq(d)));
d                  96 drivers/gpio/gpio-rcar.c static void gpio_rcar_irq_enable(struct irq_data *d)
d                  98 drivers/gpio/gpio-rcar.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 101 drivers/gpio/gpio-rcar.c 	gpio_rcar_write(p, MSKCLR, BIT(irqd_to_hwirq(d)));
d                 139 drivers/gpio/gpio-rcar.c static int gpio_rcar_irq_set_type(struct irq_data *d, unsigned int type)
d                 141 drivers/gpio/gpio-rcar.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 143 drivers/gpio/gpio-rcar.c 	unsigned int hwirq = irqd_to_hwirq(d);
d                 176 drivers/gpio/gpio-rcar.c static int gpio_rcar_irq_set_wake(struct irq_data *d, unsigned int on)
d                 178 drivers/gpio/gpio-rcar.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 122 drivers/gpio/gpio-sa1100.c static int sa1100_gpio_type(struct irq_data *d, unsigned int type)
d                 124 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
d                 125 drivers/gpio/gpio-sa1100.c 	unsigned int mask = BIT(d->hwirq);
d                 150 drivers/gpio/gpio-sa1100.c static void sa1100_gpio_ack(struct irq_data *d)
d                 152 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
d                 154 drivers/gpio/gpio-sa1100.c 	writel_relaxed(BIT(d->hwirq), sgc->membase + R_GEDR);
d                 157 drivers/gpio/gpio-sa1100.c static void sa1100_gpio_mask(struct irq_data *d)
d                 159 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
d                 160 drivers/gpio/gpio-sa1100.c 	unsigned int mask = BIT(d->hwirq);
d                 167 drivers/gpio/gpio-sa1100.c static void sa1100_gpio_unmask(struct irq_data *d)
d                 169 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
d                 170 drivers/gpio/gpio-sa1100.c 	unsigned int mask = BIT(d->hwirq);
d                 177 drivers/gpio/gpio-sa1100.c static int sa1100_gpio_wake(struct irq_data *d, unsigned int on)
d                 179 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
d                 180 drivers/gpio/gpio-sa1100.c 	int ret = sa11x0_gpio_set_wake(d->hwirq, on);
d                 183 drivers/gpio/gpio-sa1100.c 			sgc->irqwake |= BIT(d->hwirq);
d                 185 drivers/gpio/gpio-sa1100.c 			sgc->irqwake &= ~BIT(d->hwirq);
d                 202 drivers/gpio/gpio-sa1100.c static int sa1100_gpio_irqdomain_map(struct irq_domain *d,
d                 205 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = d->host_data;
d                  98 drivers/gpio/gpio-siox.c static void gpio_siox_irq_ack(struct irq_data *d)
d                 100 drivers/gpio/gpio-siox.c 	struct irq_chip *ic = irq_data_get_irq_chip(d);
d                 105 drivers/gpio/gpio-siox.c 	ddata->irq_status &= ~(1 << d->hwirq);
d                 109 drivers/gpio/gpio-siox.c static void gpio_siox_irq_mask(struct irq_data *d)
d                 111 drivers/gpio/gpio-siox.c 	struct irq_chip *ic = irq_data_get_irq_chip(d);
d                 116 drivers/gpio/gpio-siox.c 	ddata->irq_enable &= ~(1 << d->hwirq);
d                 120 drivers/gpio/gpio-siox.c static void gpio_siox_irq_unmask(struct irq_data *d)
d                 122 drivers/gpio/gpio-siox.c 	struct irq_chip *ic = irq_data_get_irq_chip(d);
d                 127 drivers/gpio/gpio-siox.c 	ddata->irq_enable |= 1 << d->hwirq;
d                 131 drivers/gpio/gpio-siox.c static int gpio_siox_irq_set_type(struct irq_data *d, u32 type)
d                 133 drivers/gpio/gpio-siox.c 	struct irq_chip *ic = irq_data_get_irq_chip(d);
d                 138 drivers/gpio/gpio-siox.c 	ddata->irq_type[d->hwirq] = type;
d                  45 drivers/gpio/gpio-sodaville.c static int sdv_gpio_pub_set_type(struct irq_data *d, unsigned int type)
d                  47 drivers/gpio/gpio-sodaville.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  52 drivers/gpio/gpio-sodaville.c 	if (d->hwirq < 8)
d                  61 drivers/gpio/gpio-sodaville.c 		reg &= ~BIT(4 * (d->hwirq % 8));
d                  65 drivers/gpio/gpio-sodaville.c 		reg |= BIT(4 * (d->hwirq % 8));
d                 261 drivers/gpio/gpio-sta2x11.c static int gsta_irq_type(struct irq_data *d, unsigned int type)
d                 263 drivers/gpio/gpio-sta2x11.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 265 drivers/gpio/gpio-sta2x11.c 	int nr = d->irq - chip->irq_base;
d                 275 drivers/gpio/gpio-sta2x11.c 	gsta_irq_enable(d);
d                 137 drivers/gpio/gpio-stmpe.c static int stmpe_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 139 drivers/gpio/gpio-stmpe.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 141 drivers/gpio/gpio-stmpe.c 	int offset = d->hwirq;
d                 166 drivers/gpio/gpio-stmpe.c static void stmpe_gpio_irq_lock(struct irq_data *d)
d                 168 drivers/gpio/gpio-stmpe.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 174 drivers/gpio/gpio-stmpe.c static void stmpe_gpio_irq_sync_unlock(struct irq_data *d)
d                 176 drivers/gpio/gpio-stmpe.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 225 drivers/gpio/gpio-stmpe.c static void stmpe_gpio_irq_mask(struct irq_data *d)
d                 227 drivers/gpio/gpio-stmpe.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 229 drivers/gpio/gpio-stmpe.c 	int offset = d->hwirq;
d                 236 drivers/gpio/gpio-stmpe.c static void stmpe_gpio_irq_unmask(struct irq_data *d)
d                 238 drivers/gpio/gpio-stmpe.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 240 drivers/gpio/gpio-stmpe.c 	int offset = d->hwirq;
d                 154 drivers/gpio/gpio-tc3589x.c static int tc3589x_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 156 drivers/gpio/gpio-tc3589x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 158 drivers/gpio/gpio-tc3589x.c 	int offset = d->hwirq;
d                 182 drivers/gpio/gpio-tc3589x.c static void tc3589x_gpio_irq_lock(struct irq_data *d)
d                 184 drivers/gpio/gpio-tc3589x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 190 drivers/gpio/gpio-tc3589x.c static void tc3589x_gpio_irq_sync_unlock(struct irq_data *d)
d                 192 drivers/gpio/gpio-tc3589x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 219 drivers/gpio/gpio-tc3589x.c static void tc3589x_gpio_irq_mask(struct irq_data *d)
d                 221 drivers/gpio/gpio-tc3589x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 223 drivers/gpio/gpio-tc3589x.c 	int offset = d->hwirq;
d                 230 drivers/gpio/gpio-tc3589x.c static void tc3589x_gpio_irq_unmask(struct irq_data *d)
d                 232 drivers/gpio/gpio-tc3589x.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 234 drivers/gpio/gpio-tc3589x.c 	int offset = d->hwirq;
d                 273 drivers/gpio/gpio-tegra.c static void tegra_gpio_irq_ack(struct irq_data *d)
d                 275 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 277 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq;
d                 282 drivers/gpio/gpio-tegra.c static void tegra_gpio_irq_mask(struct irq_data *d)
d                 284 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 286 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq;
d                 291 drivers/gpio/gpio-tegra.c static void tegra_gpio_irq_unmask(struct irq_data *d)
d                 293 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 295 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq;
d                 300 drivers/gpio/gpio-tegra.c static int tegra_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 302 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq, port = GPIO_PORT(gpio), lvl_type;
d                 303 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 355 drivers/gpio/gpio-tegra.c 		irq_set_handler_locked(d, handle_level_irq);
d                 357 drivers/gpio/gpio-tegra.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 362 drivers/gpio/gpio-tegra.c static void tegra_gpio_irq_shutdown(struct irq_data *d)
d                 364 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 366 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq;
d                 368 drivers/gpio/gpio-tegra.c 	tegra_gpio_irq_mask(d);
d                 493 drivers/gpio/gpio-tegra.c static int tegra_gpio_irq_set_wake(struct irq_data *d, unsigned int enable)
d                 495 drivers/gpio/gpio-tegra.c 	struct tegra_gpio_bank *bank = irq_data_get_irq_chip_data(d);
d                 496 drivers/gpio/gpio-tegra.c 	unsigned int gpio = d->hwirq;
d                 412 drivers/gpio/gpio-thunderx.c static int thunderx_gpio_irq_translate(struct irq_domain *d,
d                 417 drivers/gpio/gpio-thunderx.c 	struct thunderx_gpio *txgpio = d->host_data;
d                 428 drivers/gpio/gpio-thunderx.c static int thunderx_gpio_irq_alloc(struct irq_domain *d, unsigned int virq,
d                 433 drivers/gpio/gpio-thunderx.c 	return irq_domain_set_hwirq_and_chip(d, virq, txline->line,
d                 101 drivers/gpio/gpio-timberdale.c static void timbgpio_irq_disable(struct irq_data *d)
d                 103 drivers/gpio/gpio-timberdale.c 	struct timbgpio *tgpio = irq_data_get_irq_chip_data(d);
d                 104 drivers/gpio/gpio-timberdale.c 	int offset = d->irq - tgpio->irq_base;
d                 113 drivers/gpio/gpio-timberdale.c static void timbgpio_irq_enable(struct irq_data *d)
d                 115 drivers/gpio/gpio-timberdale.c 	struct timbgpio *tgpio = irq_data_get_irq_chip_data(d);
d                 116 drivers/gpio/gpio-timberdale.c 	int offset = d->irq - tgpio->irq_base;
d                 125 drivers/gpio/gpio-timberdale.c static int timbgpio_irq_type(struct irq_data *d, unsigned trigger)
d                 127 drivers/gpio/gpio-timberdale.c 	struct timbgpio *tgpio = irq_data_get_irq_chip_data(d);
d                 128 drivers/gpio/gpio-timberdale.c 	int offset = d->irq - tgpio->irq_base;
d                 158 drivers/gpio/gpio-vf610.c static void vf610_gpio_irq_ack(struct irq_data *d)
d                 161 drivers/gpio/gpio-vf610.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 162 drivers/gpio/gpio-vf610.c 	int gpio = d->hwirq;
d                 167 drivers/gpio/gpio-vf610.c static int vf610_gpio_irq_set_type(struct irq_data *d, u32 type)
d                 170 drivers/gpio/gpio-vf610.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 193 drivers/gpio/gpio-vf610.c 	port->irqc[d->hwirq] = irqc;
d                 196 drivers/gpio/gpio-vf610.c 		irq_set_handler_locked(d, handle_level_irq);
d                 198 drivers/gpio/gpio-vf610.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 203 drivers/gpio/gpio-vf610.c static void vf610_gpio_irq_mask(struct irq_data *d)
d                 206 drivers/gpio/gpio-vf610.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 207 drivers/gpio/gpio-vf610.c 	void __iomem *pcr_base = port->base + PORT_PCR(d->hwirq);
d                 212 drivers/gpio/gpio-vf610.c static void vf610_gpio_irq_unmask(struct irq_data *d)
d                 215 drivers/gpio/gpio-vf610.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 216 drivers/gpio/gpio-vf610.c 	void __iomem *pcr_base = port->base + PORT_PCR(d->hwirq);
d                 218 drivers/gpio/gpio-vf610.c 	vf610_gpio_writel(port->irqc[d->hwirq] << PORT_PCR_IRQC_OFFSET,
d                 222 drivers/gpio/gpio-vf610.c static int vf610_gpio_irq_set_wake(struct irq_data *d, u32 enable)
d                 225 drivers/gpio/gpio-vf610.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 102 drivers/gpio/gpio-vr41xx.c static void ack_giuint_low(struct irq_data *d)
d                 104 drivers/gpio/gpio-vr41xx.c 	giu_write(GIUINTSTATL, 1 << GPIO_PIN_OF_IRQ(d->irq));
d                 107 drivers/gpio/gpio-vr41xx.c static void mask_giuint_low(struct irq_data *d)
d                 109 drivers/gpio/gpio-vr41xx.c 	giu_clear(GIUINTENL, 1 << GPIO_PIN_OF_IRQ(d->irq));
d                 112 drivers/gpio/gpio-vr41xx.c static void mask_ack_giuint_low(struct irq_data *d)
d                 116 drivers/gpio/gpio-vr41xx.c 	pin = GPIO_PIN_OF_IRQ(d->irq);
d                 121 drivers/gpio/gpio-vr41xx.c static void unmask_giuint_low(struct irq_data *d)
d                 123 drivers/gpio/gpio-vr41xx.c 	giu_set(GIUINTENL, 1 << GPIO_PIN_OF_IRQ(d->irq));
d                 159 drivers/gpio/gpio-vr41xx.c static void ack_giuint_high(struct irq_data *d)
d                 162 drivers/gpio/gpio-vr41xx.c 		  1 << (GPIO_PIN_OF_IRQ(d->irq) - GIUINT_HIGH_OFFSET));
d                 165 drivers/gpio/gpio-vr41xx.c static void mask_giuint_high(struct irq_data *d)
d                 167 drivers/gpio/gpio-vr41xx.c 	giu_clear(GIUINTENH, 1 << (GPIO_PIN_OF_IRQ(d->irq) - GIUINT_HIGH_OFFSET));
d                 170 drivers/gpio/gpio-vr41xx.c static void mask_ack_giuint_high(struct irq_data *d)
d                 174 drivers/gpio/gpio-vr41xx.c 	pin = GPIO_PIN_OF_IRQ(d->irq) - GIUINT_HIGH_OFFSET;
d                 179 drivers/gpio/gpio-vr41xx.c static void unmask_giuint_high(struct irq_data *d)
d                 181 drivers/gpio/gpio-vr41xx.c 	giu_set(GIUINTENH, 1 << (GPIO_PIN_OF_IRQ(d->irq) - GIUINT_HIGH_OFFSET));
d                  74 drivers/gpio/gpio-xgene-sb.c static int xgene_gpio_sb_irq_set_type(struct irq_data *d, unsigned int type)
d                  76 drivers/gpio/gpio-xgene-sb.c 	struct xgene_gpio_sb *priv = irq_data_get_irq_chip_data(d);
d                  77 drivers/gpio/gpio-xgene-sb.c 	int gpio = HWIRQ_TO_GPIO(priv, d->hwirq);
d                  96 drivers/gpio/gpio-xgene-sb.c 			d->hwirq, lvl_type);
d                 100 drivers/gpio/gpio-xgene-sb.c 		return irq_chip_set_type_parent(d, IRQ_TYPE_EDGE_RISING);
d                 102 drivers/gpio/gpio-xgene-sb.c 		return irq_chip_set_type_parent(d, IRQ_TYPE_LEVEL_HIGH);
d                 129 drivers/gpio/gpio-xgene-sb.c static int xgene_gpio_sb_domain_activate(struct irq_domain *d,
d                 133 drivers/gpio/gpio-xgene-sb.c 	struct xgene_gpio_sb *priv = d->host_data;
d                 150 drivers/gpio/gpio-xgene-sb.c static void xgene_gpio_sb_domain_deactivate(struct irq_domain *d,
d                 153 drivers/gpio/gpio-xgene-sb.c 	struct xgene_gpio_sb *priv = d->host_data;
d                 161 drivers/gpio/gpio-xgene-sb.c static int xgene_gpio_sb_domain_translate(struct irq_domain *d,
d                 166 drivers/gpio/gpio-xgene-sb.c 	struct xgene_gpio_sb *priv = d->host_data;
d                 122 drivers/gpio/gpio-xlp.c static void xlp_gpio_irq_disable(struct irq_data *d)
d                 124 drivers/gpio/gpio-xlp.c 	struct gpio_chip *gc  = irq_data_get_irq_chip_data(d);
d                 129 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_en, d->hwirq, 0x0);
d                 130 drivers/gpio/gpio-xlp.c 	__clear_bit(d->hwirq, priv->gpio_enabled_mask);
d                 134 drivers/gpio/gpio-xlp.c static void xlp_gpio_irq_mask_ack(struct irq_data *d)
d                 136 drivers/gpio/gpio-xlp.c 	struct gpio_chip *gc  = irq_data_get_irq_chip_data(d);
d                 141 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_en, d->hwirq, 0x0);
d                 142 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_stat, d->hwirq, 0x1);
d                 143 drivers/gpio/gpio-xlp.c 	__clear_bit(d->hwirq, priv->gpio_enabled_mask);
d                 147 drivers/gpio/gpio-xlp.c static void xlp_gpio_irq_unmask(struct irq_data *d)
d                 149 drivers/gpio/gpio-xlp.c 	struct gpio_chip *gc  = irq_data_get_irq_chip_data(d);
d                 154 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_en, d->hwirq, 0x1);
d                 155 drivers/gpio/gpio-xlp.c 	__set_bit(d->hwirq, priv->gpio_enabled_mask);
d                 159 drivers/gpio/gpio-xlp.c static int xlp_gpio_set_irq_type(struct irq_data *d, unsigned int type)
d                 161 drivers/gpio/gpio-xlp.c 	struct gpio_chip *gc  = irq_data_get_irq_chip_data(d);
d                 186 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_type, d->hwirq, irq_type);
d                 187 drivers/gpio/gpio-xlp.c 	xlp_gpio_set_reg(priv->gpio_intr_pol, d->hwirq, pol);
d                 106 drivers/gpio/gpio-zx.c static int zx_irq_type(struct irq_data *d, unsigned trigger)
d                 108 drivers/gpio/gpio-zx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 110 drivers/gpio/gpio-zx.c 	int offset = irqd_to_hwirq(d);
d                 177 drivers/gpio/gpio-zx.c static void zx_irq_mask(struct irq_data *d)
d                 179 drivers/gpio/gpio-zx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 181 drivers/gpio/gpio-zx.c 	u16 mask = BIT(irqd_to_hwirq(d) % ZX_GPIO_NR);
d                 192 drivers/gpio/gpio-zx.c static void zx_irq_unmask(struct irq_data *d)
d                 194 drivers/gpio/gpio-zx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 196 drivers/gpio/gpio-zx.c 	u16 mask = BIT(irqd_to_hwirq(d) % ZX_GPIO_NR);
d                 554 drivers/gpio/gpio-zynq.c static int zynq_gpio_irq_reqres(struct irq_data *d)
d                 556 drivers/gpio/gpio-zynq.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 563 drivers/gpio/gpio-zynq.c 	return gpiochip_reqres_irq(chip, d->hwirq);
d                 566 drivers/gpio/gpio-zynq.c static void zynq_gpio_irq_relres(struct irq_data *d)
d                 568 drivers/gpio/gpio-zynq.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 570 drivers/gpio/gpio-zynq.c 	gpiochip_relres_irq(chip, d->hwirq);
d                  95 drivers/gpio/gpiolib.c static inline void desc_set_label(struct gpio_desc *d, const char *label)
d                  97 drivers/gpio/gpiolib.c 	d->label = label;
d                1845 drivers/gpio/gpiolib.c static int gpiochip_hierarchy_irq_domain_translate(struct irq_domain *d,
d                1852 drivers/gpio/gpiolib.c 		return irq_domain_translate_twocell(d, fwspec, hwirq, type);
d                1859 drivers/gpio/gpiolib.c 		ret = irq_domain_translate_twocell(d, fwspec, hwirq, type);
d                1868 drivers/gpio/gpiolib.c static int gpiochip_hierarchy_irq_domain_alloc(struct irq_domain *d,
d                1873 drivers/gpio/gpiolib.c 	struct gpio_chip *gc = d->host_data;
d                1889 drivers/gpio/gpiolib.c 	ret = gc->irq.child_irq_domain_ops.translate(d, fwspec, &hwirq, &type);
d                1907 drivers/gpio/gpiolib.c 	irq_domain_set_info(d,
d                1921 drivers/gpio/gpiolib.c 	parent_fwspec.fwnode = d->parent->fwnode;
d                1928 drivers/gpio/gpiolib.c 	ret = irq_domain_alloc_irqs_parent(d, irq, 1, &parent_fwspec);
d                2045 drivers/gpio/gpiolib.c int gpiochip_irq_map(struct irq_domain *d, unsigned int irq,
d                2048 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = d->host_data;
d                2085 drivers/gpio/gpiolib.c void gpiochip_irq_unmap(struct irq_domain *d, unsigned int irq)
d                2087 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = d->host_data;
d                2168 drivers/gpio/gpiolib.c static int gpiochip_irq_reqres(struct irq_data *d)
d                2170 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                2172 drivers/gpio/gpiolib.c 	return gpiochip_reqres_irq(chip, d->hwirq);
d                2175 drivers/gpio/gpiolib.c static void gpiochip_irq_relres(struct irq_data *d)
d                2177 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                2179 drivers/gpio/gpiolib.c 	gpiochip_relres_irq(chip, d->hwirq);
d                2182 drivers/gpio/gpiolib.c static void gpiochip_irq_enable(struct irq_data *d)
d                2184 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                2186 drivers/gpio/gpiolib.c 	gpiochip_enable_irq(chip, d->hwirq);
d                2188 drivers/gpio/gpiolib.c 		chip->irq.irq_enable(d);
d                2190 drivers/gpio/gpiolib.c 		chip->irq.chip->irq_unmask(d);
d                2193 drivers/gpio/gpiolib.c static void gpiochip_irq_disable(struct irq_data *d)
d                2195 drivers/gpio/gpiolib.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                2205 drivers/gpio/gpiolib.c 		chip->irq.irq_disable(d);
d                2207 drivers/gpio/gpiolib.c 		chip->irq.chip->irq_mask(d);
d                2208 drivers/gpio/gpiolib.c 	gpiochip_disable_irq(chip, d->hwirq);
d                 217 drivers/gpio/sgpio-aspeed.c static void irqd_to_aspeed_sgpio_data(struct irq_data *d,
d                 224 drivers/gpio/sgpio-aspeed.c 	*offset = irqd_to_hwirq(d);
d                 225 drivers/gpio/sgpio-aspeed.c 	internal = irq_data_get_irq_chip_data(d);
d                 233 drivers/gpio/sgpio-aspeed.c static void aspeed_sgpio_irq_ack(struct irq_data *d)
d                 242 drivers/gpio/sgpio-aspeed.c 	irqd_to_aspeed_sgpio_data(d, &gpio, &bank, &bit, &offset);
d                 253 drivers/gpio/sgpio-aspeed.c static void aspeed_sgpio_irq_set_mask(struct irq_data *d, bool set)
d                 262 drivers/gpio/sgpio-aspeed.c 	irqd_to_aspeed_sgpio_data(d, &gpio, &bank, &bit, &offset);
d                 278 drivers/gpio/sgpio-aspeed.c static void aspeed_sgpio_irq_mask(struct irq_data *d)
d                 280 drivers/gpio/sgpio-aspeed.c 	aspeed_sgpio_irq_set_mask(d, false);
d                 283 drivers/gpio/sgpio-aspeed.c static void aspeed_sgpio_irq_unmask(struct irq_data *d)
d                 285 drivers/gpio/sgpio-aspeed.c 	aspeed_sgpio_irq_set_mask(d, true);
d                 288 drivers/gpio/sgpio-aspeed.c static int aspeed_sgpio_set_type(struct irq_data *d, unsigned int type)
d                 301 drivers/gpio/sgpio-aspeed.c 	irqd_to_aspeed_sgpio_data(d, &gpio, &bank, &bit, &offset);
d                 343 drivers/gpio/sgpio-aspeed.c 	irq_set_handler_locked(d, handler);
d                1139 drivers/gpu/drm/amd/amdgpu/amdgpu.h #define amdgpu_asic_set_uvd_clocks(adev, v, d) (adev)->asic_funcs->set_uvd_clocks((adev), (v), (d))
d                 599 drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c static int amdgpu_irqdomain_map(struct irq_domain *d,
d                 245 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_emit_cntxcntl(r, d) (r)->funcs->emit_cntxcntl((r), (d))
d                 246 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_emit_rreg(r, d) (r)->funcs->emit_rreg((r), (d))
d                 247 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_emit_wreg(r, d, v) (r)->funcs->emit_wreg((r), (d), (v))
d                 248 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h #define amdgpu_ring_emit_reg_wait(r, d, v, m) (r)->funcs->emit_reg_wait((r), (d), (v), (m))
d                 100 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h #define amdgpu_emit_copy_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_copy_buffer((ib),  (s), (d), (b))
d                 101 drivers/gpu/drm/amd/amdgpu/amdgpu_sdma.h #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib), (s), (d), (b))
d                 343 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c #define AMDGPU_RAS_SUB_BLOCK(subblock, a, b, c, d, e, f, g, h)                             \
d                 347 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 		((a) | ((b) << 1) | ((c) << 2) | ((d) << 3)),                  \
d                2351 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	u64 pwr_efficiency_ratio, n, d;
d                2357 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	d = prev_vddc * prev_vddc;
d                2358 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	pwr_efficiency_ratio = div64_u64(n, d);
d                  44 drivers/gpu/drm/amd/display/dc/basics/conversion.c 	uint16_t d = (uint16_t)dc_fixpt_floor(
d                  48 drivers/gpu/drm/amd/display/dc/basics/conversion.c 	if (d <= (uint16_t)(1 << integer_bits) - (1 / (uint16_t)divisor))
d                 588 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns =
d                 590 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns =
d                 592 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns =
d                 594 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
d                 595 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = v->urgent_watermark * 1000;
d                 621 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
d                1127 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
d                 383 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (safe_to_lower || watermarks->d.urgent_ns > hubbub1->watermarks.d.urgent_ns) {
d                 384 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		hubbub1->watermarks.d.urgent_ns = watermarks->d.urgent_ns;
d                 385 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		prog_wm_value = convert_and_clamp(watermarks->d.urgent_ns,
d                 392 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			watermarks->d.urgent_ns, prog_wm_value);
d                 395 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (safe_to_lower || watermarks->d.pte_meta_urgent_ns > hubbub1->watermarks.d.pte_meta_urgent_ns) {
d                 396 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		hubbub1->watermarks.d.pte_meta_urgent_ns = watermarks->d.pte_meta_urgent_ns;
d                 397 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		prog_wm_value = convert_and_clamp(watermarks->d.pte_meta_urgent_ns,
d                 402 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			watermarks->d.pte_meta_urgent_ns, prog_wm_value);
d                 503 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns
d                 504 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns) {
d                 505 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns =
d                 506 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns;
d                 508 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns,
d                 514 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
d                 517 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.cstate_exit_ns
d                 518 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.cstate_exit_ns) {
d                 519 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.cstate_exit_ns =
d                 520 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.cstate_exit_ns;
d                 522 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.cstate_exit_ns,
d                 528 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			watermarks->d.cstate_pstate.cstate_exit_ns, prog_wm_value);
d                 588 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.pstate_change_ns
d                 589 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.pstate_change_ns) {
d                 590 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.pstate_change_ns =
d                 591 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.pstate_change_ns;
d                 593 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 				watermarks->d.cstate_pstate.pstate_change_ns,
d                 599 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 			watermarks->d.cstate_pstate.pstate_change_ns, prog_wm_value);
d                2108 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	uint16_t d = (uint16_t)dc_fixpt_floor(
d                2112 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (d <= (uint16_t)(1 << integer_bits) - (1 / (uint16_t)divisor))
d                2718 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
d                2719 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
d                2720 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
d                2721 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
d                2722 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
d                 237 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	if (safe_to_lower || watermarks->d.urgent_ns > hubbub1->watermarks.d.urgent_ns) {
d                 238 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 		hubbub1->watermarks.d.urgent_ns = watermarks->d.urgent_ns;
d                 239 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 		prog_wm_value = convert_and_clamp(watermarks->d.urgent_ns,
d                 247 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			watermarks->d.urgent_ns, prog_wm_value);
d                 371 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns
d                 372 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns) {
d                 373 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns =
d                 374 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns;
d                 376 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns,
d                 383 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			watermarks->d.cstate_pstate.cstate_enter_plus_exit_ns, prog_wm_value);
d                 386 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.cstate_exit_ns
d                 387 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.cstate_exit_ns) {
d                 388 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.cstate_exit_ns =
d                 389 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.cstate_exit_ns;
d                 391 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.cstate_exit_ns,
d                 398 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			watermarks->d.cstate_pstate.cstate_exit_ns, prog_wm_value);
d                 460 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	if (safe_to_lower || watermarks->d.cstate_pstate.pstate_change_ns
d                 461 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			> hubbub1->watermarks.d.cstate_pstate.pstate_change_ns) {
d                 462 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 		hubbub1->watermarks.d.cstate_pstate.pstate_change_ns =
d                 463 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.pstate_change_ns;
d                 465 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 				watermarks->d.cstate_pstate.pstate_change_ns,
d                 472 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 			watermarks->d.cstate_pstate.pstate_change_ns, prog_wm_value);
d                1041 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.d,
d                  43 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h static inline double dml_min4(double a, double b, double c, double d)
d                  45 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h 	return dml_min(dml_min(a, b), dml_min(c, d));
d                  58 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h static inline double dml_max4(double a, double b, double c, double d)
d                  60 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h 	return dml_max(dml_max(a, b), dml_max(c, d));
d                  63 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h static inline double dml_max5(double a, double b, double c, double d, double e)
d                  65 drivers/gpu/drm/amd/display/dc/dml/dml_inline_defs.h 	return dml_max(dml_max4(a, b, c, d), e);
d                  54 drivers/gpu/drm/amd/display/dc/inc/hw/mem_input.h 	struct dcn_watermarks d;
d                 170 drivers/gpu/drm/amd/powerplay/inc/amdgpu_smu.h #define SMU_TABLE_INIT(tables, table_id, s, a, d)	\
d                 174 drivers/gpu/drm/amd/powerplay/inc/amdgpu_smu.h 		tables[table_id].domain = d;		\
d                  53 drivers/gpu/drm/amd/powerplay/inc/smu72.h 	int32_t d;
d                  45 drivers/gpu/drm/amd/powerplay/inc/smu73.h     int32_t d;
d                  78 drivers/gpu/drm/amd/powerplay/inc/smu74.h 	int32_t d;
d                 241 drivers/gpu/drm/armada/armada_drv.c 		struct device *d;
d                 254 drivers/gpu/drm/armada/armada_drv.c 			d = bus_find_device_by_name(&platform_bus_type, NULL,
d                 256 drivers/gpu/drm/armada/armada_drv.c 			if (d && d->of_node)
d                 257 drivers/gpu/drm/armada/armada_drv.c 				armada_add_endpoints(dev, &match, d->of_node);
d                 258 drivers/gpu/drm/armada/armada_drv.c 			put_device(d);
d                 463 drivers/gpu/drm/bridge/sii902x.c 		unsigned int d = abs(div - sii902x_mclk_div_table[i]);
d                 465 drivers/gpu/drm/bridge/sii902x.c 		if (d >= distance)
d                 469 drivers/gpu/drm/bridge/sii902x.c 		distance = d;
d                 470 drivers/gpu/drm/bridge/sii902x.c 		if (d == 0)
d                 231 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 d[] = { arr }; \
d                 232 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, addr, d, ARRAY_SIZE(d)); \
d                 245 drivers/gpu/drm/bridge/sil-sii8620.c 	const u16 d[] = { seq }; \
d                 246 drivers/gpu/drm/bridge/sil-sii8620.c 	__sii8620_write_seq(ctx, d, ARRAY_SIZE(d)); \
d                 251 drivers/gpu/drm/bridge/sil-sii8620.c 	static const u16 d[] = { seq }; \
d                 252 drivers/gpu/drm/bridge/sil-sii8620.c 	__sii8620_write_seq(ctx, d, ARRAY_SIZE(d)); \
d                 639 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *d = ctx->burst.tx_buf;
d                 642 drivers/gpu/drm/bridge/sil-sii8620.c 		int len = d[1] + 2;
d                 646 drivers/gpu/drm/bridge/sil-sii8620.c 		d[0] = min(ctx->burst.rx_ack, 255);
d                 647 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack -= d[0];
d                 648 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_EMSC_XMIT_WRITE_PORT, d, len);
d                 651 drivers/gpu/drm/bridge/sil-sii8620.c 		d += len;
d                 669 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 buf[3], *d;
d                 688 drivers/gpu/drm/bridge/sil-sii8620.c 		d = sii8620_burst_get_rx_buf(ctx, len);
d                 689 drivers/gpu/drm/bridge/sil-sii8620.c 		if (!d)
d                 691 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_read_buf(ctx, REG_EMSC_RCV_READ_PORT, d, len);
d                 699 drivers/gpu/drm/bridge/sil-sii8620.c 	struct mhl_burst_blk_rcv_buffer_info *d =
d                 700 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_burst_get_tx_buf(ctx, sizeof(*d));
d                 701 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!d)
d                 704 drivers/gpu/drm/bridge/sil-sii8620.c 	d->id = cpu_to_be16(MHL_BURST_ID_BLK_RCV_BUFFER_INFO);
d                 705 drivers/gpu/drm/bridge/sil-sii8620.c 	d->size = cpu_to_le16(size);
d                 710 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *d = ptr, sum = 0;
d                 713 drivers/gpu/drm/bridge/sil-sii8620.c 		sum += *d++;
d                 728 drivers/gpu/drm/bridge/sil-sii8620.c 	struct mhl_burst_bits_per_pixel_fmt *d;
d                 729 drivers/gpu/drm/bridge/sil-sii8620.c 	const int size = sizeof(*d) + sizeof(d->desc[0]);
d                 731 drivers/gpu/drm/bridge/sil-sii8620.c 	d = sii8620_burst_get_tx_buf(ctx, size);
d                 732 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!d)
d                 735 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mhl_burst_hdr_set(&d->hdr, MHL_BURST_ID_BITS_PER_PIXEL_FMT);
d                 736 drivers/gpu/drm/bridge/sil-sii8620.c 	d->num_entries = 1;
d                 737 drivers/gpu/drm/bridge/sil-sii8620.c 	d->desc[0].stream_id = 0;
d                 738 drivers/gpu/drm/bridge/sil-sii8620.c 	d->desc[0].pixel_format = fmt;
d                 739 drivers/gpu/drm/bridge/sil-sii8620.c 	d->hdr.checksum -= sii8620_checksum(d, size);
d                 744 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *d = ctx->burst.rx_buf;
d                 748 drivers/gpu/drm/bridge/sil-sii8620.c 		int len = *d++;
d                 749 drivers/gpu/drm/bridge/sil-sii8620.c 		int id = get_unaligned_be16(&d[0]);
d                 753 drivers/gpu/drm/bridge/sil-sii8620.c 			ctx->burst.r_size = get_unaligned_le16(&d[2]);
d                 759 drivers/gpu/drm/bridge/sil-sii8620.c 		d += len;
d                1737 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mhl_burst_emsc_support_set(struct mhl_burst_emsc_support *d,
d                1740 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mhl_burst_hdr_set(&d->hdr, MHL_BURST_ID_EMSC_SUPPORT);
d                1741 drivers/gpu/drm/bridge/sil-sii8620.c 	d->num_entries = 1;
d                1742 drivers/gpu/drm/bridge/sil-sii8620.c 	d->burst_id[0] = cpu_to_be16(id);
d                 257 drivers/gpu/drm/drm_dp_aux_dev.c #define to_auxdev(d) container_of(d, struct drm_dp_aux_dev, aux)
d                1966 drivers/gpu/drm/drm_edid.c 	u8 d = ext[0x02];
d                1967 drivers/gpu/drm/drm_edid.c 	u8 *det_base = ext + d;
d                1969 drivers/gpu/drm/drm_edid.c 	n = (127 - d) / 18;
d                 592 drivers/gpu/drm/drm_ioc32.c 	struct drm_dma d;
d                 598 drivers/gpu/drm/drm_ioc32.c 	d.context = d32.context;
d                 599 drivers/gpu/drm/drm_ioc32.c 	d.send_count = d32.send_count;
d                 600 drivers/gpu/drm/drm_ioc32.c 	d.send_indices = compat_ptr(d32.send_indices);
d                 601 drivers/gpu/drm/drm_ioc32.c 	d.send_sizes = compat_ptr(d32.send_sizes);
d                 602 drivers/gpu/drm/drm_ioc32.c 	d.flags = d32.flags;
d                 603 drivers/gpu/drm/drm_ioc32.c 	d.request_count = d32.request_count;
d                 604 drivers/gpu/drm/drm_ioc32.c 	d.request_indices = compat_ptr(d32.request_indices);
d                 605 drivers/gpu/drm/drm_ioc32.c 	d.request_sizes = compat_ptr(d32.request_sizes);
d                 606 drivers/gpu/drm/drm_ioc32.c 	err = drm_ioctl_kernel(file, drm_legacy_dma_ioctl, &d, DRM_AUTH);
d                 610 drivers/gpu/drm/drm_ioc32.c 	if (put_user(d.request_size, &argp->request_size)
d                 611 drivers/gpu/drm/drm_ioc32.c 	    || put_user(d.granted_count, &argp->granted_count))
d                  62 drivers/gpu/drm/drm_legacy.h int drm_legacy_resctx(struct drm_device *d, void *v, struct drm_file *f);
d                  63 drivers/gpu/drm/drm_legacy.h int drm_legacy_addctx(struct drm_device *d, void *v, struct drm_file *f);
d                  64 drivers/gpu/drm/drm_legacy.h int drm_legacy_getctx(struct drm_device *d, void *v, struct drm_file *f);
d                  65 drivers/gpu/drm/drm_legacy.h int drm_legacy_switchctx(struct drm_device *d, void *v, struct drm_file *f);
d                  66 drivers/gpu/drm/drm_legacy.h int drm_legacy_newctx(struct drm_device *d, void *v, struct drm_file *f);
d                  67 drivers/gpu/drm/drm_legacy.h int drm_legacy_rmctx(struct drm_device *d, void *v, struct drm_file *f);
d                  69 drivers/gpu/drm/drm_legacy.h int drm_legacy_setsareactx(struct drm_device *d, void *v, struct drm_file *f);
d                  70 drivers/gpu/drm/drm_legacy.h int drm_legacy_getsareactx(struct drm_device *d, void *v, struct drm_file *f);
d                 102 drivers/gpu/drm/drm_legacy.h int drm_legacy_addmap_ioctl(struct drm_device *d, void *v, struct drm_file *f);
d                 103 drivers/gpu/drm/drm_legacy.h int drm_legacy_rmmap_ioctl(struct drm_device *d, void *v, struct drm_file *f);
d                 105 drivers/gpu/drm/drm_legacy.h int drm_legacy_addbufs(struct drm_device *d, void *v, struct drm_file *f);
d                 106 drivers/gpu/drm/drm_legacy.h int drm_legacy_infobufs(struct drm_device *d, void *v, struct drm_file *f);
d                 107 drivers/gpu/drm/drm_legacy.h int drm_legacy_markbufs(struct drm_device *d, void *v, struct drm_file *f);
d                 108 drivers/gpu/drm/drm_legacy.h int drm_legacy_freebufs(struct drm_device *d, void *v, struct drm_file *f);
d                 109 drivers/gpu/drm/drm_legacy.h int drm_legacy_mapbufs(struct drm_device *d, void *v, struct drm_file *f);
d                 110 drivers/gpu/drm/drm_legacy.h int drm_legacy_dma_ioctl(struct drm_device *d, void *v, struct drm_file *f);
d                 131 drivers/gpu/drm/drm_legacy.h void drm_legacy_vma_flush(struct drm_device *d);
d                 133 drivers/gpu/drm/drm_legacy.h static inline void drm_legacy_vma_flush(struct drm_device *d)
d                 153 drivers/gpu/drm/drm_legacy.h int drm_legacy_lock(struct drm_device *d, void *v, struct drm_file *f);
d                 154 drivers/gpu/drm/drm_legacy.h int drm_legacy_unlock(struct drm_device *d, void *v, struct drm_file *f);
d                  32 drivers/gpu/drm/drm_sysfs.c #define to_drm_minor(d) dev_get_drvdata(d)
d                  33 drivers/gpu/drm/drm_sysfs.c #define to_drm_connector(d) dev_get_drvdata(d)
d                 247 drivers/gpu/drm/exynos/exynos_drm_drv.c 		struct device *p = NULL, *d;
d                 252 drivers/gpu/drm/exynos/exynos_drm_drv.c 		while ((d = platform_find_device_by_driver(p, &info->driver->driver))) {
d                 256 drivers/gpu/drm/exynos/exynos_drm_drv.c 			    exynos_drm_check_fimc_device(d) == 0)
d                 258 drivers/gpu/drm/exynos/exynos_drm_drv.c 						    compare_dev, d);
d                 259 drivers/gpu/drm/exynos/exynos_drm_drv.c 			p = d;
d                 174 drivers/gpu/drm/i810/i810_dma.c static int i810_dma_get_buffer(struct drm_device *dev, drm_i810_dma_t *d,
d                 196 drivers/gpu/drm/i810/i810_dma.c 	d->granted = 1;
d                 197 drivers/gpu/drm/i810/i810_dma.c 	d->request_idx = buf->idx;
d                 198 drivers/gpu/drm/i810/i810_dma.c 	d->request_size = buf->total;
d                 199 drivers/gpu/drm/i810/i810_dma.c 	d->virtual = buf_priv->virtual;
d                1000 drivers/gpu/drm/i810/i810_dma.c 	drm_i810_dma_t *d = data;
d                1008 drivers/gpu/drm/i810/i810_dma.c 	d->granted = 0;
d                1010 drivers/gpu/drm/i810/i810_dma.c 	retcode = i810_dma_get_buffer(dev, d, file_priv);
d                1013 drivers/gpu/drm/i810/i810_dma.c 		  task_pid_nr(current), retcode, d->granted);
d                 385 drivers/gpu/drm/i915/display/dvo_ns2501.c #define NSPTR(d) ((NS2501Ptr)(d->DriverPrivate.ptr))
d                  67 drivers/gpu/drm/i915/display/dvo_sil164.c #define SILPTR(d) ((SIL164Ptr)(d->DriverPrivate.ptr))
d                1137 drivers/gpu/drm/i915/display/intel_display.c #define assert_fdi_tx_enabled(d, p) assert_fdi_tx(d, p, true)
d                1138 drivers/gpu/drm/i915/display/intel_display.c #define assert_fdi_tx_disabled(d, p) assert_fdi_tx(d, p, false)
d                1152 drivers/gpu/drm/i915/display/intel_display.c #define assert_fdi_rx_enabled(d, p) assert_fdi_rx(d, p, true)
d                1153 drivers/gpu/drm/i915/display/intel_display.c #define assert_fdi_rx_disabled(d, p) assert_fdi_rx(d, p, false)
d                 561 drivers/gpu/drm/i915/display/intel_display.h #define assert_pll_enabled(d, p) assert_pll(d, p, true)
d                 562 drivers/gpu/drm/i915/display/intel_display.h #define assert_pll_disabled(d, p) assert_pll(d, p, false)
d                 564 drivers/gpu/drm/i915/display/intel_display.h #define assert_dsi_pll_enabled(d) assert_dsi_pll(d, true)
d                 565 drivers/gpu/drm/i915/display/intel_display.h #define assert_dsi_pll_disabled(d) assert_dsi_pll(d, false)
d                 568 drivers/gpu/drm/i915/display/intel_display.h #define assert_fdi_rx_pll_enabled(d, p) assert_fdi_rx_pll(d, p, true)
d                 569 drivers/gpu/drm/i915/display/intel_display.h #define assert_fdi_rx_pll_disabled(d, p) assert_fdi_rx_pll(d, p, false)
d                 571 drivers/gpu/drm/i915/display/intel_display.h #define assert_pipe_enabled(d, p) assert_pipe(d, p, true)
d                 572 drivers/gpu/drm/i915/display/intel_display.h #define assert_pipe_disabled(d, p) assert_pipe(d, p, false)
d                2933 drivers/gpu/drm/i915/display/intel_dp.c #define assert_dp_port_disabled(d) assert_dp_port((d), false)
d                2943 drivers/gpu/drm/i915/display/intel_dp.c #define assert_edp_pll_enabled(d) assert_edp_pll((d), true)
d                2944 drivers/gpu/drm/i915/display/intel_dp.c #define assert_edp_pll_disabled(d) assert_edp_pll((d), false)
d                 692 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	u64 a, b, c, d, diff, diff_best;
d                 722 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	d = 1000000 * diff_best;
d                 724 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	if (a < c && b < d) {
d                 731 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	} else if (a >= c && b < d) {
d                 736 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	} else if (a >= c && b >= d) {
d                1310 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	unsigned int dco, d, i;
d                1315 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	for (d = 0; d < ARRAY_SIZE(dividers); d++) {
d                1317 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 			for (i = 0; i < dividers[d].n_dividers; i++) {
d                1318 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 				unsigned int p = dividers[d].list[i];
d                1340 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		if (d == 0 && ctx.p)
d                2289 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	int d, best_div = 0, pdiv = 0, qdiv = 0, kdiv = 0;
d                2291 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	for (d = 0; d < ARRAY_SIZE(dividers); d++) {
d                2292 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		dco = afe_clock * dividers[d];
d                2299 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 				best_div = dividers[d];
d                 358 drivers/gpu/drm/i915/display/intel_dpll_mgr.h #define assert_shared_dpll_enabled(d, p) assert_shared_dpll(d, p, true)
d                 359 drivers/gpu/drm/i915/display/intel_dpll_mgr.h #define assert_shared_dpll_disabled(d, p) assert_shared_dpll(d, p, false)
d                 151 drivers/gpu/drm/i915/display/intel_lpe_audio.c static void lpe_audio_irq_unmask(struct irq_data *d)
d                 155 drivers/gpu/drm/i915/display/intel_lpe_audio.c static void lpe_audio_irq_mask(struct irq_data *d)
d                 153 drivers/gpu/drm/i915/display/intel_quirks.c 	struct pci_dev *d = i915->drm.pdev;
d                 159 drivers/gpu/drm/i915/display/intel_quirks.c 		if (d->device == q->device &&
d                 160 drivers/gpu/drm/i915/display/intel_quirks.c 		    (d->subsystem_vendor == q->subsystem_vendor ||
d                 162 drivers/gpu/drm/i915/display/intel_quirks.c 		    (d->subsystem_device == q->subsystem_device ||
d                  94 drivers/gpu/drm/i915/display/vlv_dsi_pll.c 			int d = abs(target_dsi_clk - calc_dsi_clk);
d                  95 drivers/gpu/drm/i915/display/vlv_dsi_pll.c 			if (d < delta) {
d                  96 drivers/gpu/drm/i915/display/vlv_dsi_pll.c 				delta = d;
d                  83 drivers/gpu/drm/i915/gt/intel_renderstate.c 	u32 *d;
d                  90 drivers/gpu/drm/i915/gt/intel_renderstate.c 	d = kmap_atomic(i915_gem_object_get_dirty_page(so->obj, 0));
d                 103 drivers/gpu/drm/i915/gt/intel_renderstate.c 				d[i++] = s;
d                 110 drivers/gpu/drm/i915/gt/intel_renderstate.c 		d[i++] = s;
d                 122 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, MI_NOOP);
d                 145 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, GEN9_MEDIA_POOL_STATE);
d                 146 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, GEN9_MEDIA_POOL_ENABLE);
d                 147 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, eu_pool_config);
d                 148 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, 0);
d                 149 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, 0);
d                 150 drivers/gpu/drm/i915/gt/intel_renderstate.c 		OUT_BATCH(d, i, 0);
d                 153 drivers/gpu/drm/i915/gt/intel_renderstate.c 	OUT_BATCH(d, i, MI_BATCH_BUFFER_END);
d                 163 drivers/gpu/drm/i915/gt/intel_renderstate.c 		drm_clflush_virt_range(d, i * sizeof(u32));
d                 164 drivers/gpu/drm/i915/gt/intel_renderstate.c 	kunmap_atomic(d);
d                 172 drivers/gpu/drm/i915/gt/intel_renderstate.c 	kunmap_atomic(d);
d                  51 drivers/gpu/drm/i915/gvt/firmware.c #define dev_to_drm_minor(d) dev_get_drvdata((d))
d                1810 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_F(reg, s, f, am, rm, d, r, w) do { \
d                1812 drivers/gpu/drm/i915/gvt/handlers.c 		f, s, am, rm, d, r, w); \
d                1817 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_D(reg, d) \
d                1818 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, 0, 0, 0, d, NULL, NULL)
d                1820 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_DH(reg, d, r, w) \
d                1821 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, 0, 0, 0, d, r, w)
d                1823 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_DFH(reg, d, f, r, w) \
d                1824 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, f, 0, 0, d, r, w)
d                1826 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_GM(reg, d, r, w) \
d                1827 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, F_GMADR, 0xFFFFF000, 0, d, r, w)
d                1829 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_GM_RDR(reg, d, r, w) \
d                1830 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, F_GMADR | F_CMD_ACCESS, 0xFFFFF000, 0, d, r, w)
d                1832 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RO(reg, d, f, rm, r, w) \
d                1833 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(reg, 4, F_RO | f, 0, rm, d, r, w)
d                1835 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_F(prefix, s, f, am, rm, d, r, w) do { \
d                1836 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(prefix(RENDER_RING_BASE), s, f, am, rm, d, r, w); \
d                1837 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(prefix(BLT_RING_BASE), s, f, am, rm, d, r, w); \
d                1838 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(prefix(GEN6_BSD_RING_BASE), s, f, am, rm, d, r, w); \
d                1839 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_F(prefix(VEBOX_RING_BASE), s, f, am, rm, d, r, w); \
d                1841 drivers/gpu/drm/i915/gvt/handlers.c 		MMIO_F(prefix(GEN8_BSD2_RING_BASE), s, f, am, rm, d, r, w); \
d                1844 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_D(prefix, d) \
d                1845 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_RING_F(prefix, 4, 0, 0, 0, d, NULL, NULL)
d                1847 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_DFH(prefix, d, f, r, w) \
d                1848 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_RING_F(prefix, 4, f, 0, 0, d, r, w)
d                1850 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_GM(prefix, d, r, w) \
d                1851 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_RING_F(prefix, 4, F_GMADR, 0xFFFF0000, 0, d, r, w)
d                1853 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_GM_RDR(prefix, d, r, w) \
d                1854 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_RING_F(prefix, 4, F_GMADR | F_CMD_ACCESS, 0xFFFF0000, 0, d, r, w)
d                1856 drivers/gpu/drm/i915/gvt/handlers.c #define MMIO_RING_RO(prefix, d, f, rm, r, w) \
d                1857 drivers/gpu/drm/i915/gvt/handlers.c 	MMIO_RING_F(prefix, 4, F_RO | f, 0, rm, d, r, w)
d                  70 drivers/gpu/drm/i915/i915_fixed.h 				       uint_fixed_16_16_t d)
d                  72 drivers/gpu/drm/i915/i915_fixed.h 	return DIV_ROUND_UP(val.val, d.val);
d                  97 drivers/gpu/drm/i915/i915_fixed.h static inline uint_fixed_16_16_t div_fixed16(u32 val, u32 d)
d                 102 drivers/gpu/drm/i915/i915_fixed.h 	tmp = DIV_ROUND_UP_ULL(tmp, d);
d                 107 drivers/gpu/drm/i915/i915_fixed.h static inline u32 div_round_up_u32_fixed16(u32 val, uint_fixed_16_16_t d)
d                 112 drivers/gpu/drm/i915/i915_fixed.h 	tmp = DIV_ROUND_UP_ULL(tmp, d.val);
d                  88 drivers/gpu/drm/i915/intel_uncore.c #define fw_ack(d) readl((d)->reg_ack)
d                  89 drivers/gpu/drm/i915/intel_uncore.c #define fw_set(d, val) writel(_MASKED_BIT_ENABLE((val)), (d)->reg_set)
d                  90 drivers/gpu/drm/i915/intel_uncore.c #define fw_clear(d, val) writel(_MASKED_BIT_DISABLE((val)), (d)->reg_set)
d                  93 drivers/gpu/drm/i915/intel_uncore.c fw_domain_reset(const struct intel_uncore_forcewake_domain *d)
d                 101 drivers/gpu/drm/i915/intel_uncore.c 	fw_clear(d, 0xffff);
d                 105 drivers/gpu/drm/i915/intel_uncore.c fw_domain_arm_timer(struct intel_uncore_forcewake_domain *d)
d                 107 drivers/gpu/drm/i915/intel_uncore.c 	GEM_BUG_ON(d->uncore->fw_domains_timer & d->mask);
d                 108 drivers/gpu/drm/i915/intel_uncore.c 	d->uncore->fw_domains_timer |= d->mask;
d                 109 drivers/gpu/drm/i915/intel_uncore.c 	d->wake_count++;
d                 110 drivers/gpu/drm/i915/intel_uncore.c 	hrtimer_start_range_ns(&d->timer,
d                 117 drivers/gpu/drm/i915/intel_uncore.c __wait_for_ack(const struct intel_uncore_forcewake_domain *d,
d                 121 drivers/gpu/drm/i915/intel_uncore.c 	return wait_for_atomic((fw_ack(d) & ack) == value,
d                 126 drivers/gpu/drm/i915/intel_uncore.c wait_ack_clear(const struct intel_uncore_forcewake_domain *d,
d                 129 drivers/gpu/drm/i915/intel_uncore.c 	return __wait_for_ack(d, ack, 0);
d                 133 drivers/gpu/drm/i915/intel_uncore.c wait_ack_set(const struct intel_uncore_forcewake_domain *d,
d                 136 drivers/gpu/drm/i915/intel_uncore.c 	return __wait_for_ack(d, ack, ack);
d                 140 drivers/gpu/drm/i915/intel_uncore.c fw_domain_wait_ack_clear(const struct intel_uncore_forcewake_domain *d)
d                 142 drivers/gpu/drm/i915/intel_uncore.c 	if (wait_ack_clear(d, FORCEWAKE_KERNEL)) {
d                 144 drivers/gpu/drm/i915/intel_uncore.c 			  intel_uncore_forcewake_domain_to_str(d->id));
d                 155 drivers/gpu/drm/i915/intel_uncore.c fw_domain_wait_ack_with_fallback(const struct intel_uncore_forcewake_domain *d,
d                 179 drivers/gpu/drm/i915/intel_uncore.c 		wait_ack_clear(d, FORCEWAKE_KERNEL_FALLBACK);
d                 181 drivers/gpu/drm/i915/intel_uncore.c 		fw_set(d, FORCEWAKE_KERNEL_FALLBACK);
d                 184 drivers/gpu/drm/i915/intel_uncore.c 		wait_ack_set(d, FORCEWAKE_KERNEL_FALLBACK);
d                 186 drivers/gpu/drm/i915/intel_uncore.c 		ack_detected = (fw_ack(d) & ack_bit) == value;
d                 188 drivers/gpu/drm/i915/intel_uncore.c 		fw_clear(d, FORCEWAKE_KERNEL_FALLBACK);
d                 192 drivers/gpu/drm/i915/intel_uncore.c 			 intel_uncore_forcewake_domain_to_str(d->id),
d                 194 drivers/gpu/drm/i915/intel_uncore.c 			 fw_ack(d),
d                 201 drivers/gpu/drm/i915/intel_uncore.c fw_domain_wait_ack_clear_fallback(const struct intel_uncore_forcewake_domain *d)
d                 203 drivers/gpu/drm/i915/intel_uncore.c 	if (likely(!wait_ack_clear(d, FORCEWAKE_KERNEL)))
d                 206 drivers/gpu/drm/i915/intel_uncore.c 	if (fw_domain_wait_ack_with_fallback(d, ACK_CLEAR))
d                 207 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_clear(d);
d                 211 drivers/gpu/drm/i915/intel_uncore.c fw_domain_get(const struct intel_uncore_forcewake_domain *d)
d                 213 drivers/gpu/drm/i915/intel_uncore.c 	fw_set(d, FORCEWAKE_KERNEL);
d                 217 drivers/gpu/drm/i915/intel_uncore.c fw_domain_wait_ack_set(const struct intel_uncore_forcewake_domain *d)
d                 219 drivers/gpu/drm/i915/intel_uncore.c 	if (wait_ack_set(d, FORCEWAKE_KERNEL)) {
d                 221 drivers/gpu/drm/i915/intel_uncore.c 			  intel_uncore_forcewake_domain_to_str(d->id));
d                 227 drivers/gpu/drm/i915/intel_uncore.c fw_domain_wait_ack_set_fallback(const struct intel_uncore_forcewake_domain *d)
d                 229 drivers/gpu/drm/i915/intel_uncore.c 	if (likely(!wait_ack_set(d, FORCEWAKE_KERNEL)))
d                 232 drivers/gpu/drm/i915/intel_uncore.c 	if (fw_domain_wait_ack_with_fallback(d, ACK_SET))
d                 233 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_set(d);
d                 237 drivers/gpu/drm/i915/intel_uncore.c fw_domain_put(const struct intel_uncore_forcewake_domain *d)
d                 239 drivers/gpu/drm/i915/intel_uncore.c 	fw_clear(d, FORCEWAKE_KERNEL);
d                 245 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                 250 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp) {
d                 251 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_clear(d);
d                 252 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_get(d);
d                 255 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp)
d                 256 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_set(d);
d                 265 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                 270 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp) {
d                 271 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_clear_fallback(d);
d                 272 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_get(d);
d                 275 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp)
d                 276 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_wait_ack_set_fallback(d);
d                 284 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                 289 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp)
d                 290 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_put(d);
d                 299 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                 307 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain_masked(d, fw_domains, uncore, tmp)
d                 308 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_reset(d);
d                 878 drivers/gpu/drm/i915/intel_uncore.c #define GEN_FW_RANGE(s, e, d) \
d                 879 drivers/gpu/drm/i915/intel_uncore.c 	{ .start = (s), .end = (e), .domains = (d) }
d                1362 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                1370 drivers/gpu/drm/i915/intel_uncore.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                1371 drivers/gpu/drm/i915/intel_uncore.c 	if (!d)
d                1377 drivers/gpu/drm/i915/intel_uncore.c 	d->uncore = uncore;
d                1378 drivers/gpu/drm/i915/intel_uncore.c 	d->wake_count = 0;
d                1379 drivers/gpu/drm/i915/intel_uncore.c 	d->reg_set = uncore->regs + i915_mmio_reg_offset(reg_set);
d                1380 drivers/gpu/drm/i915/intel_uncore.c 	d->reg_ack = uncore->regs + i915_mmio_reg_offset(reg_ack);
d                1382 drivers/gpu/drm/i915/intel_uncore.c 	d->id = domain_id;
d                1394 drivers/gpu/drm/i915/intel_uncore.c 	d->mask = BIT(domain_id);
d                1396 drivers/gpu/drm/i915/intel_uncore.c 	hrtimer_init(&d->timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
d                1397 drivers/gpu/drm/i915/intel_uncore.c 	d->timer.function = intel_uncore_fw_release_timer;
d                1401 drivers/gpu/drm/i915/intel_uncore.c 	fw_domain_reset(d);
d                1403 drivers/gpu/drm/i915/intel_uncore.c 	uncore->fw_domain[domain_id] = d;
d                1411 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                1415 drivers/gpu/drm/i915/intel_uncore.c 	d = fetch_and_zero(&uncore->fw_domain[domain_id]);
d                1416 drivers/gpu/drm/i915/intel_uncore.c 	if (!d)
d                1420 drivers/gpu/drm/i915/intel_uncore.c 	WARN_ON(d->wake_count);
d                1421 drivers/gpu/drm/i915/intel_uncore.c 	WARN_ON(hrtimer_cancel(&d->timer));
d                1422 drivers/gpu/drm/i915/intel_uncore.c 	kfree(d);
d                1427 drivers/gpu/drm/i915/intel_uncore.c 	struct intel_uncore_forcewake_domain *d;
d                1430 drivers/gpu/drm/i915/intel_uncore.c 	for_each_fw_domain(d, uncore, tmp)
d                1431 drivers/gpu/drm/i915/intel_uncore.c 		fw_domain_fini(uncore, d->id);
d                1560 drivers/gpu/drm/i915/intel_uncore.c #define ASSIGN_FW_DOMAINS_TABLE(uncore, d) \
d                1563 drivers/gpu/drm/i915/intel_uncore.c 			(struct intel_forcewake_range *)(d); \
d                1564 drivers/gpu/drm/i915/intel_uncore.c 	(uncore)->fw_domains_table_entries = ARRAY_SIZE((d)); \
d                  39 drivers/gpu/drm/i915/selftests/i915_syncmap.c 		unsigned int d;
d                  41 drivers/gpu/drm/i915/selftests/i915_syncmap.c 		for (d = 0; d < depth - 1; d++) {
d                  42 drivers/gpu/drm/i915/selftests/i915_syncmap.c 			if (last & BIT(depth - d - 1))
d                 592 drivers/gpu/drm/ingenic/ingenic-drm.c static void ingenic_drm_free_dma_hwdesc(void *d)
d                 594 drivers/gpu/drm/ingenic/ingenic-drm.c 	struct ingenic_drm *priv = d;
d                 476 drivers/gpu/drm/mcde/mcde_drv.c 		struct device *p = NULL, *d;
d                 478 drivers/gpu/drm/mcde/mcde_drv.c 		while ((d = platform_find_device_by_driver(p, drv))) {
d                 480 drivers/gpu/drm/mcde/mcde_drv.c 			component_match_add(dev, &match, mcde_compare_dev, d);
d                 481 drivers/gpu/drm/mcde/mcde_drv.c 			p = d;
d                  74 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d;
d                  78 drivers/gpu/drm/mcde/mcde_dsi.c 	d = host_to_mcde_dsi(mdsi->host);
d                  80 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "%s called\n", __func__);
d                  82 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_DIRECT_CMD_STS_FLAG);
d                  84 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "DSI_DIRECT_CMD_STS_FLAG = %08x\n", val);
d                  86 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "direct command write completed\n");
d                  89 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "direct command TE received\n");
d                  92 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "direct command ACK ERR received\n");
d                  94 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "direct command read ERR received\n");
d                  96 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_STS_CLR);
d                  98 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_CMD_MODE_STS_FLAG);
d                 100 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "DSI_CMD_MODE_STS_FLAG = %08x\n", val);
d                 103 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "CMD mode no TE\n");
d                 106 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "CMD mode TE miss\n");
d                 108 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "CMD mode SD1 underrun\n");
d                 110 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "CMD mode SD2 underrun\n");
d                 112 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "CMD mode unwanted RD\n");
d                 113 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_CMD_MODE_STS_CLR);
d                 115 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_DIRECT_CMD_RD_STS_FLAG);
d                 117 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "DSI_DIRECT_CMD_RD_STS_FLAG = %08x\n", val);
d                 118 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_RD_STS_CLR);
d                 120 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_TG_STS_FLAG);
d                 122 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_dbg(d->dev, "DSI_TG_STS_FLAG = %08x\n", val);
d                 123 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_TG_STS_CLR);
d                 125 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_VID_MODE_STS_FLAG);
d                 127 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "some video mode error status\n");
d                 128 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_MODE_STS_CLR);
d                 136 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = host_to_mcde_dsi(host);
d                 143 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "attached DSI device with %d lanes\n", mdsi->lanes);
d                 145 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "format %08x, %dbpp\n", mdsi->format,
d                 147 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "mode flags: %08lx\n", mdsi->mode_flags);
d                 149 drivers/gpu/drm/mcde/mcde_dsi.c 	d->mdsi = mdsi;
d                 150 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mcde)
d                 151 drivers/gpu/drm/mcde/mcde_dsi.c 		d->mcde->mdsi = mdsi;
d                 159 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = host_to_mcde_dsi(host);
d                 161 drivers/gpu/drm/mcde/mcde_dsi.c 	d->mdsi = NULL;
d                 162 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mcde)
d                 163 drivers/gpu/drm/mcde/mcde_dsi.c 		d->mcde->mdsi = NULL;
d                 177 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = host_to_mcde_dsi(host);
d                 188 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev,
d                 193 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev,
d                 198 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev,
d                 220 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_MAIN_SETTINGS);
d                 228 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_WRDAT0);
d                 233 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_DIRECT_CMD_WRDAT1);
d                 239 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_DIRECT_CMD_WRDAT2);
d                 245 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_DIRECT_CMD_WRDAT3);
d                 248 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(~0, d->regs + DSI_DIRECT_CMD_STS_CLR);
d                 249 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(~0, d->regs + DSI_CMD_MODE_STS_CLR);
d                 251 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(1, d->regs + DSI_DIRECT_CMD_SEND);
d                 256 drivers/gpu/drm/mcde/mcde_dsi.c 		while (!(readl(d->regs + DSI_DIRECT_CMD_STS) &
d                 262 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_err(d->dev, "DSI read timeout!\n");
d                 267 drivers/gpu/drm/mcde/mcde_dsi.c 		while (!(readl(d->regs + DSI_DIRECT_CMD_STS) &
d                 273 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_err(d->dev, "DSI write timeout!\n");
d                 278 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_DIRECT_CMD_STS);
d                 280 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "read completed with error\n");
d                 281 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(1, d->regs + DSI_DIRECT_CMD_RD_INIT);
d                 286 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "error during transmission: %04x\n",
d                 300 drivers/gpu/drm/mcde/mcde_dsi.c 		rdsz = readl(d->regs + DSI_DIRECT_CMD_RD_PROPERTY);
d                 302 drivers/gpu/drm/mcde/mcde_dsi.c 		rddat = readl(d->regs + DSI_DIRECT_CMD_RDDAT);
d                 304 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_err(d->dev, "read error, requested %zd got %d\n",
d                 314 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(~0, d->regs + DSI_DIRECT_CMD_STS_CLR);
d                 315 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(~0, d->regs + DSI_CMD_MODE_STS_CLR);
d                 329 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d;
d                 332 drivers/gpu/drm/mcde/mcde_dsi.c 	d = host_to_mcde_dsi(mdsi->host);
d                 341 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_MAIN_SETTINGS);
d                 346 drivers/gpu/drm/mcde/mcde_dsi.c 	       d->regs + DSI_DIRECT_CMD_STS_CLR);
d                 347 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_DIRECT_CMD_STS_CTL);
d                 350 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_DIRECT_CMD_STS_CTL);
d                 355 drivers/gpu/drm/mcde/mcde_dsi.c 	       d->regs + DSI_CMD_MODE_STS_CLR);
d                 356 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_CMD_MODE_STS_CTL);
d                 359 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_CMD_MODE_STS_CTL);
d                 362 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(1, d->regs + DSI_DIRECT_CMD_SEND);
d                 365 drivers/gpu/drm/mcde/mcde_dsi.c static void mcde_dsi_setup_video_mode(struct mcde_dsi *d,
d                 368 drivers/gpu/drm/mcde/mcde_dsi.c 	u8 bpp = mipi_dsi_pixel_format_to_bpp(d->mdsi->format);
d                 378 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST)
d                 380 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) {
d                 385 drivers/gpu/drm/mcde/mcde_dsi.c 	switch (d->mdsi->format) {
d                 407 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "unknown pixel mode\n");
d                 420 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_MAIN_CTL);
d                 433 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_VSIZE);
d                 443 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) {
d                 466 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "hfp: %u, hbp: %u, hsa: %u\n",
d                 475 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_HSIZE1);
d                 479 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_HSIZE2);
d                 489 drivers/gpu/drm/mcde/mcde_dsi.c 	bpl *= (d->hs_freq / 8);
d                 492 drivers/gpu/drm/mcde/mcde_dsi.c 	bpl *= d->mdsi->lanes;
d                 493 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "calculated bytes per line: %llu\n", bpl);
d                 498 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO_SYNC_PULSE) {
d                 502 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_VID_BLKSIZE2);
d                 506 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_VID_BLKSIZE1);
d                 509 drivers/gpu/drm/mcde/mcde_dsi.c 	line_duration = (blkline_pck + 6) / d->mdsi->lanes;
d                 510 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "line duration %u\n", line_duration);
d                 517 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_DPHY_TIME);
d                 521 drivers/gpu/drm/mcde/mcde_dsi.c 	blkeol_duration = (blkeol_pck + 6) / d->mdsi->lanes;
d                 522 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "blkeol pck: %u, duration: %u\n",
d                 525 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO_BURST) {
d                 527 drivers/gpu/drm/mcde/mcde_dsi.c 		val = readl(d->regs + DSI_VID_BLKSIZE1);
d                 529 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_VID_BLKSIZE1);
d                 530 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(blkeol_pck, d->regs + DSI_VID_VCA_SETTING2);
d                 532 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(blkeol_duration, d->regs + DSI_VID_PCK_TIME);
d                 533 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(blkeol_duration - 6, d->regs + DSI_VID_VCA_SETTING1);
d                 537 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_VID_VCA_SETTING2);
d                 540 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_VCA_SETTING2);
d                 543 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 545 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 548 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_CMD_MODE_CTL);
d                 550 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_CMD_MODE_CTL);
d                 553 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_VID_MODE_STS_CTL);
d                 556 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_VID_MODE_STS_CTL);
d                 559 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 561 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 564 drivers/gpu/drm/mcde/mcde_dsi.c static void mcde_dsi_start(struct mcde_dsi *d)
d                 571 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(0, d->regs + DSI_MCTL_INTEGRATION_MODE);
d                 578 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_EOT_PACKET)
d                 580 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 584 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_CMD_MODE_CTL);
d                 591 drivers/gpu/drm/mcde/mcde_dsi.c 	hs_freq = clk_get_rate(d->hs_clk);
d                 594 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_dbg(d->dev, "UI value: %d\n", val);
d                 597 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_DPHY_STATIC);
d                 606 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->lanes == 2)
d                 608 drivers/gpu/drm/mcde/mcde_dsi.c 	if (!(d->mdsi->mode_flags & MIPI_DSI_CLOCK_NON_CONTINUOUS))
d                 613 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_MAIN_PHY_CTL);
d                 617 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_ULPOUT_TIME);
d                 620 drivers/gpu/drm/mcde/mcde_dsi.c 	       d->regs + DSI_DPHY_LANES_TRIM);
d                 626 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_DPHY_TIMEOUT);
d                 632 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->lanes == 2)
d                 634 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_MCTL_MAIN_EN);
d                 641 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->lanes == 2)
d                 643 drivers/gpu/drm/mcde/mcde_dsi.c 	while ((readl(d->regs + DSI_MCTL_MAIN_STS) & val) != val) {
d                 647 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_warn(d->dev, "DSI lanes did not start up\n");
d                 655 drivers/gpu/drm/mcde/mcde_dsi.c 	val = readl(d->regs + DSI_CMD_MODE_CTL);
d                 662 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(val, d->regs + DSI_CMD_MODE_CTL);
d                 666 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "DSI link enabled\n");
d                 672 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = bridge_to_mcde_dsi(bridge);
d                 674 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "enable DSI master\n");
d                 681 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = bridge_to_mcde_dsi(bridge);
d                 687 drivers/gpu/drm/mcde/mcde_dsi.c 	if (!d->mdsi) {
d                 688 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "no DSI device attached to encoder!\n");
d                 692 drivers/gpu/drm/mcde/mcde_dsi.c 	dev_info(d->dev, "set DSI master to %dx%d %lu Hz %s mode\n",
d                 694 drivers/gpu/drm/mcde/mcde_dsi.c 		 (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO) ? "VIDEO" : "CMD"
d                 698 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->lp_rate)
d                 699 drivers/gpu/drm/mcde/mcde_dsi.c 		lp_freq = d->mdsi->lp_rate;
d                 702 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->hs_rate)
d                 703 drivers/gpu/drm/mcde/mcde_dsi.c 		hs_freq = d->mdsi->hs_rate;
d                 708 drivers/gpu/drm/mcde/mcde_dsi.c 	d->lp_freq = clk_round_rate(d->lp_clk, lp_freq);
d                 709 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = clk_set_rate(d->lp_clk, d->lp_freq);
d                 711 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to set LP clock rate %lu Hz\n",
d                 712 drivers/gpu/drm/mcde/mcde_dsi.c 			d->lp_freq);
d                 714 drivers/gpu/drm/mcde/mcde_dsi.c 	d->hs_freq = clk_round_rate(d->hs_clk, hs_freq);
d                 715 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = clk_set_rate(d->hs_clk, d->hs_freq);
d                 717 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to set HS clock rate %lu Hz\n",
d                 718 drivers/gpu/drm/mcde/mcde_dsi.c 			d->hs_freq);
d                 721 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = clk_prepare_enable(d->lp_clk);
d                 723 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to enable LP clock\n");
d                 725 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_info(d->dev, "DSI LP clock rate %lu Hz\n",
d                 726 drivers/gpu/drm/mcde/mcde_dsi.c 			 d->lp_freq);
d                 727 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = clk_prepare_enable(d->hs_clk);
d                 729 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to enable HS clock\n");
d                 731 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_info(d->dev, "DSI HS clock rate %lu Hz\n",
d                 732 drivers/gpu/drm/mcde/mcde_dsi.c 			 d->hs_freq);
d                 734 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
d                 735 drivers/gpu/drm/mcde/mcde_dsi.c 		mcde_dsi_setup_video_mode(d, mode);
d                 738 drivers/gpu/drm/mcde/mcde_dsi.c 		val = readl(d->regs + DSI_CMD_MODE_CTL);
d                 745 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_CMD_MODE_CTL);
d                 749 drivers/gpu/drm/mcde/mcde_dsi.c static void mcde_dsi_wait_for_command_mode_stop(struct mcde_dsi *d)
d                 760 drivers/gpu/drm/mcde/mcde_dsi.c 	while ((readl(d->regs + DSI_CMD_MODE_STS) & val) == val) {
d                 764 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_warn(d->dev,
d                 771 drivers/gpu/drm/mcde/mcde_dsi.c static void mcde_dsi_wait_for_video_mode_stop(struct mcde_dsi *d)
d                 779 drivers/gpu/drm/mcde/mcde_dsi.c 	while ((readl(d->regs + DSI_VID_MODE_STS) & val) == val) {
d                 783 drivers/gpu/drm/mcde/mcde_dsi.c 			dev_warn(d->dev,
d                 792 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = bridge_to_mcde_dsi(bridge);
d                 796 drivers/gpu/drm/mcde/mcde_dsi.c 	writel(0, d->regs + DSI_VID_MODE_STS_CTL);
d                 798 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi->mode_flags & MIPI_DSI_MODE_VIDEO) {
d                 800 drivers/gpu/drm/mcde/mcde_dsi.c 		val = readl(d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 802 drivers/gpu/drm/mcde/mcde_dsi.c 		writel(val, d->regs + DSI_MCTL_MAIN_DATA_CTL);
d                 803 drivers/gpu/drm/mcde/mcde_dsi.c 		mcde_dsi_wait_for_video_mode_stop(d);
d                 806 drivers/gpu/drm/mcde/mcde_dsi.c 		mcde_dsi_wait_for_command_mode_stop(d);
d                 810 drivers/gpu/drm/mcde/mcde_dsi.c 	clk_disable_unprepare(d->hs_clk);
d                 811 drivers/gpu/drm/mcde/mcde_dsi.c 	clk_disable_unprepare(d->lp_clk);
d                 828 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = connector_to_mcde_dsi(connector);
d                 831 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->panel)
d                 832 drivers/gpu/drm/mcde/mcde_dsi.c 		return drm_panel_get_modes(d->panel);
d                 846 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = bridge_to_mcde_dsi(bridge);
d                 850 drivers/gpu/drm/mcde/mcde_dsi.c 	drm_connector_helper_add(&d->connector,
d                 854 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "we need atomic updates\n");
d                 858 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = drm_connector_init(drm, &d->connector,
d                 862 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to initialize DSI bridge connector\n");
d                 865 drivers/gpu/drm/mcde/mcde_dsi.c 	d->connector.polled = DRM_CONNECTOR_POLL_CONNECT;
d                 867 drivers/gpu/drm/mcde/mcde_dsi.c 	drm_connector_attach_encoder(&d->connector, bridge->encoder);
d                 869 drivers/gpu/drm/mcde/mcde_dsi.c 	ret = drm_bridge_attach(bridge->encoder, d->bridge_out, bridge);
d                 871 drivers/gpu/drm/mcde/mcde_dsi.c 		dev_err(d->dev, "failed to attach the DSI bridge\n");
d                 874 drivers/gpu/drm/mcde/mcde_dsi.c 	d->connector.status = connector_status_connected;
d                 891 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = dev_get_drvdata(dev);
d                 898 drivers/gpu/drm/mcde/mcde_dsi.c 		d->unused = true;
d                 901 drivers/gpu/drm/mcde/mcde_dsi.c 	d->mcde = mcde;
d                 903 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->mdsi)
d                 904 drivers/gpu/drm/mcde/mcde_dsi.c 		d->mcde->mdsi = d->mdsi;
d                 907 drivers/gpu/drm/mcde/mcde_dsi.c 	d->hs_clk = devm_clk_get(dev, "hs");
d                 908 drivers/gpu/drm/mcde/mcde_dsi.c 	if (IS_ERR(d->hs_clk)) {
d                 910 drivers/gpu/drm/mcde/mcde_dsi.c 		return PTR_ERR(d->hs_clk);
d                 913 drivers/gpu/drm/mcde/mcde_dsi.c 	d->lp_clk = devm_clk_get(dev, "lp");
d                 914 drivers/gpu/drm/mcde/mcde_dsi.c 	if (IS_ERR(d->lp_clk)) {
d                 916 drivers/gpu/drm/mcde/mcde_dsi.c 		return PTR_ERR(d->lp_clk);
d                 921 drivers/gpu/drm/mcde/mcde_dsi.c 	regmap_update_bits(d->prcmu, PRCM_DSI_SW_RESET,
d                 927 drivers/gpu/drm/mcde/mcde_dsi.c 	regmap_update_bits(d->prcmu, PRCM_DSI_SW_RESET,
d                 932 drivers/gpu/drm/mcde/mcde_dsi.c 	mcde_dsi_start(d);
d                 958 drivers/gpu/drm/mcde/mcde_dsi.c 		d->panel = panel;
d                 968 drivers/gpu/drm/mcde/mcde_dsi.c 	d->bridge_out = bridge;
d                 971 drivers/gpu/drm/mcde/mcde_dsi.c 	d->bridge.funcs = &mcde_dsi_bridge_funcs;
d                 972 drivers/gpu/drm/mcde/mcde_dsi.c 	d->bridge.of_node = dev->of_node;
d                 973 drivers/gpu/drm/mcde/mcde_dsi.c 	drm_bridge_add(&d->bridge);
d                 976 drivers/gpu/drm/mcde/mcde_dsi.c 	mcde->bridge = &d->bridge;
d                 986 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = dev_get_drvdata(dev);
d                 988 drivers/gpu/drm/mcde/mcde_dsi.c 	if (d->panel)
d                 989 drivers/gpu/drm/mcde/mcde_dsi.c 		drm_panel_bridge_remove(d->bridge_out);
d                 990 drivers/gpu/drm/mcde/mcde_dsi.c 	regmap_update_bits(d->prcmu, PRCM_DSI_SW_RESET,
d                1002 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d;
d                1008 drivers/gpu/drm/mcde/mcde_dsi.c 	d = devm_kzalloc(dev, sizeof(*d), GFP_KERNEL);
d                1009 drivers/gpu/drm/mcde/mcde_dsi.c 	if (!d)
d                1011 drivers/gpu/drm/mcde/mcde_dsi.c 	d->dev = dev;
d                1012 drivers/gpu/drm/mcde/mcde_dsi.c 	platform_set_drvdata(pdev, d);
d                1015 drivers/gpu/drm/mcde/mcde_dsi.c 	d->prcmu =
d                1017 drivers/gpu/drm/mcde/mcde_dsi.c 	if (IS_ERR(d->prcmu)) {
d                1019 drivers/gpu/drm/mcde/mcde_dsi.c 		return PTR_ERR(d->prcmu);
d                1023 drivers/gpu/drm/mcde/mcde_dsi.c 	d->regs = devm_ioremap_resource(dev, res);
d                1024 drivers/gpu/drm/mcde/mcde_dsi.c 	if (IS_ERR(d->regs)) {
d                1026 drivers/gpu/drm/mcde/mcde_dsi.c 		return PTR_ERR(d->regs);
d                1029 drivers/gpu/drm/mcde/mcde_dsi.c 	dsi_id = readl(d->regs + DSI_ID_REG);
d                1032 drivers/gpu/drm/mcde/mcde_dsi.c 	host = &d->dsi_host;
d                1042 drivers/gpu/drm/mcde/mcde_dsi.c 	platform_set_drvdata(pdev, d);
d                1048 drivers/gpu/drm/mcde/mcde_dsi.c 	struct mcde_dsi *d = platform_get_drvdata(pdev);
d                1051 drivers/gpu/drm/mcde/mcde_dsi.c 	mipi_dsi_host_unregister(&d->dsi_host);
d                1085 drivers/gpu/drm/mga/mga_dma.c 			       struct drm_file *file_priv, struct drm_dma *d)
d                1090 drivers/gpu/drm/mga/mga_dma.c 	for (i = d->granted_count; i < d->request_count; i++) {
d                1097 drivers/gpu/drm/mga/mga_dma.c 		if (copy_to_user(&d->request_indices[i],
d                1100 drivers/gpu/drm/mga/mga_dma.c 		if (copy_to_user(&d->request_sizes[i],
d                1104 drivers/gpu/drm/mga/mga_dma.c 		d->granted_count++;
d                1114 drivers/gpu/drm/mga/mga_dma.c 	struct drm_dma *d = data;
d                1121 drivers/gpu/drm/mga/mga_dma.c 	if (d->send_count != 0) {
d                1123 drivers/gpu/drm/mga/mga_dma.c 			  task_pid_nr(current), d->send_count);
d                1129 drivers/gpu/drm/mga/mga_dma.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
d                1131 drivers/gpu/drm/mga/mga_dma.c 			  task_pid_nr(current), d->request_count,
d                1138 drivers/gpu/drm/mga/mga_dma.c 	d->granted_count = 0;
d                1140 drivers/gpu/drm/mga/mga_dma.c 	if (d->request_count)
d                1141 drivers/gpu/drm/mga/mga_dma.c 		ret = mga_dma_get_buffers(dev, file_priv, d);
d                  96 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c static int mdss_hw_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                  99 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c 	struct mdp5_mdss *mdp5_mdss = d->host_data;
d                 119 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c 	struct irq_domain *d;
d                 121 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c 	d = irq_domain_add_linear(dev->of_node, 32, &mdss_hw_irqdomain_ops,
d                 123 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c 	if (!d) {
d                 129 drivers/gpu/drm/msm/disp/mdp5/mdp5_mdss.c 	mdp5_mdss->irqcontroller.domain = d;
d                  10 drivers/gpu/drm/msm/dsi/phy/dsi_phy.c #define S_DIV_ROUND_UP(n, d)	\
d                  11 drivers/gpu/drm/msm/dsi/phy/dsi_phy.c 	(((n) >= 0) ? (((n) + (d) - 1) / (d)) : (((n) - (d) + 1) / (d)))
d                  86 drivers/gpu/drm/nouveau/dispnv50/disp.h #define evo_data(p, d) do {						\
d                  87 drivers/gpu/drm/nouveau/dispnv50/disp.h 	const u32 _d = (d);						\
d                  31 drivers/gpu/drm/nouveau/include/nvif/device.h #define nvif_nsec(d,n,cond...) ({                                              \
d                  32 drivers/gpu/drm/nouveau/include/nvif/device.h 	struct nvif_device *_device = (d);                                     \
d                  44 drivers/gpu/drm/nouveau/include/nvif/device.h #define nvif_usec(d,u,cond...) nvif_nsec((d), (u) * 1000, ##cond)
d                  45 drivers/gpu/drm/nouveau/include/nvif/device.h #define nvif_msec(d,m,cond...) nvif_usec((d), (m) * 1000, ##cond)
d                  51 drivers/gpu/drm/nouveau/include/nvif/object.h #define nvif_wr(a,f,b,c,d) ({                                                  \
d                  54 drivers/gpu/drm/nouveau/include/nvif/object.h 		f((d), (u8 __iomem *)_object->map.ptr + (c));                  \
d                  56 drivers/gpu/drm/nouveau/include/nvif/object.h 		nvif_object_wr(_object, (b), (c), (d));                        \
d                  64 drivers/gpu/drm/nouveau/include/nvif/object.h #define nvif_mask(a,b,c,d) ({                                                  \
d                  67 drivers/gpu/drm/nouveau/include/nvif/object.h 	nvif_wr32(__object, _addr, (_data & ~(c)) | (d));                      \
d                  71 drivers/gpu/drm/nouveau/include/nvif/object.h #define nvif_mthd(a,b,c,d) nvif_object_mthd((a), (b), (c), (d))
d                   5 drivers/gpu/drm/nouveau/include/nvif/unpack.h #define nvif_unvers(r,d,s,m) ({                                                \
d                   6 drivers/gpu/drm/nouveau/include/nvif/unpack.h 	void **_data = (d); __u32 *_size = (s); int _ret = (r);                \
d                  14 drivers/gpu/drm/nouveau/include/nvif/unpack.h #define nvif_unpack(r,d,s,m,vl,vh,x) ({                                        \
d                  15 drivers/gpu/drm/nouveau/include/nvif/unpack.h 	void **_data = (d); __u32 *_size = (s);                                \
d                 259 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_rd08(d,a) ioread8((d)->pri + (a))
d                 260 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_rd16(d,a) ioread16_native((d)->pri + (a))
d                 261 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_rd32(d,a) ioread32_native((d)->pri + (a))
d                 262 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_wr08(d,a,v) iowrite8((v), (d)->pri + (a))
d                 263 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_wr16(d,a,v) iowrite16_native((v), (d)->pri + (a))
d                 264 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_wr32(d,a,v) iowrite32_native((v), (d)->pri + (a))
d                 265 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvkm_mask(d,a,m,v) ({                                                  \
d                 266 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	struct nvkm_device *_device = (d);                                     \
d                 283 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_printk_(d,l,p,f,a...) do {                                       \
d                 284 drivers/gpu/drm/nouveau/include/nvkm/core/device.h 	const struct nvkm_device *_device = (d);                               \
d                 288 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_printk(d,l,p,f,a...) nvdev_printk_((d), NV_DBG_##l, p, f, ##a)
d                 289 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_fatal(d,f,a...) nvdev_printk((d), FATAL,   crit, f, ##a)
d                 290 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_error(d,f,a...) nvdev_printk((d), ERROR,    err, f, ##a)
d                 291 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_warn(d,f,a...)  nvdev_printk((d),  WARN, notice, f, ##a)
d                 292 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_info(d,f,a...)  nvdev_printk((d),  INFO,   info, f, ##a)
d                 293 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_debug(d,f,a...) nvdev_printk((d), DEBUG,   info, f, ##a)
d                 294 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_trace(d,f,a...) nvdev_printk((d), TRACE,   info, f, ##a)
d                 295 drivers/gpu/drm/nouveau/include/nvkm/core/device.h #define nvdev_spam(d,f,a...)  nvdev_printk((d),  SPAM,    dbg, f, ##a)
d                  74 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_wo32(o,a,d) (o)->ptrs->wr32((o), (a), (d))
d                  75 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_mo32(o,a,m,d) ({                                                  \
d                  77 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h 	nvkm_wo32((o), _addr, (_data & ~(m)) | (d));                           \
d                  81 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_wo64(o,a,d) do {                                                  \
d                  82 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h 	u64 __a = (a), __d = (d);                                              \
d                  87 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_fill(t,s,o,a,d,c) do {                                            \
d                  88 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h 	u64 _a = (a), _c = (c), _d = (d), _o = _a >> s, _s = _c << s;          \
d                 103 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_fo32(o,a,d,c) nvkm_fill(32, 2, (o), (a), (d), (c))
d                 104 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_fo64(o,a,d,c) nvkm_fill(64, 3, (o), (a), (d), (c))
d                  52 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_nsec(d,n,cond...) ({                                              \
d                  57 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h 	nvkm_timer_wait_init((d), (n), &_wait);                                \
d                  66 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_usec(d,u,cond...) nvkm_nsec((d), (u) * 1000, ##cond)
d                  67 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_msec(d,m,cond...) nvkm_usec((d), (m) * 1000, ##cond)
d                  69 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_wait_nsec(d,n,addr,mask,data)                                     \
d                  70 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h 	nvkm_nsec(d, n,                                                        \
d                  71 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h 		if ((nvkm_rd32(d, (addr)) & (mask)) == (data))                 \
d                  74 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_wait_usec(d,u,addr,mask,data)                                     \
d                  75 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h 	nvkm_wait_nsec((d), (u) * 1000, (addr), (mask), (data))
d                  76 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h #define nvkm_wait_msec(d,m,addr,mask,data)                                     \
d                  77 drivers/gpu/drm/nouveau/include/nvkm/subdev/timer.h 	nvkm_wait_usec((d), (m) * 1000, (addr), (mask), (data))
d                  36 drivers/gpu/drm/nouveau/nouveau_bios.h #define ROMPTR(d,x) ({            \
d                  37 drivers/gpu/drm/nouveau/nouveau_bios.h 	struct nouveau_drm *drm = nouveau_drm((d)); \
d                  41 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_show_temp1_auto_point1_pwm(struct device *d,
d                  50 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_temp1_auto_point1_temp(struct device *d,
d                  53 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                  61 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_set_temp1_auto_point1_temp(struct device *d,
d                  65 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                  83 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_temp1_auto_point1_temp_hyst(struct device *d,
d                  86 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                  94 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_set_temp1_auto_point1_temp_hyst(struct device *d,
d                  98 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                 116 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_get_pwm1_max(struct device *d,
d                 119 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                 132 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_get_pwm1_min(struct device *d,
d                 135 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                 148 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_set_pwm1_min(struct device *d, struct device_attribute *a,
d                 151 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                 171 drivers/gpu/drm/nouveau/nouveau_hwmon.c nouveau_hwmon_set_pwm1_max(struct device *d, struct device_attribute *a,
d                 174 drivers/gpu/drm/nouveau/nouveau_hwmon.c 	struct drm_device *dev = dev_get_drvdata(d);
d                  18 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.h #define mmio_vram(a,b,c,d) gf100_grctx_mmio_data((a), (b), (c), (d))
d                  19 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.h #define mmio_refn(a,b,c,d,e) gf100_grctx_mmio_item((a), (b), (c), (d), (e))
d                 958 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 	int index, c = ltc->zbc_min, d = ltc->zbc_min, s = ltc->zbc_min;
d                 965 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 		gf100_gr_zbc_depth_get(gr, 1, 0x00000000, 0x00000000); d++;
d                 966 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 		gf100_gr_zbc_depth_get(gr, 1, 0x3f800000, 0x3f800000); d++;
d                 976 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 	for (index = d; index <= ltc->zbc_max; index++)
d                  31 drivers/gpu/drm/nouveau/nvkm/subdev/bios/dp.c 	struct bit_entry d;
d                  33 drivers/gpu/drm/nouveau/nvkm/subdev/bios/dp.c 	if (!bit_entry(bios, 'd', &d)) {
d                  34 drivers/gpu/drm/nouveau/nvkm/subdev/bios/dp.c 		if (d.version == 1 && d.length >= 2) {
d                  35 drivers/gpu/drm/nouveau/nvkm/subdev/bios/dp.c 			u16 data = nvbios_rd16(bios, d.offset);
d                  10 drivers/gpu/drm/nouveau/nvkm/subdev/clk/seq.h #define clk_wr32(s,r,d)     hwsq_wr32(&(s)->base, &(s)->r_##r, (d))
d                  11 drivers/gpu/drm/nouveau/nvkm/subdev/clk/seq.h #define clk_mask(s,r,m,d)   hwsq_mask(&(s)->base, &(s)->r_##r, (m), (d))
d                  12 drivers/gpu/drm/nouveau/nvkm/subdev/clk/seq.h #define clk_setf(s,f,d)     hwsq_setf(&(s)->base, (f), (d))
d                  13 drivers/gpu/drm/nouveau/nvkm/subdev/clk/seq.h #define clk_wait(s,f,d)     hwsq_wait(&(s)->base, (f), (d))
d                 168 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramfuc.h #define ram_wr32(s,r,d)      ramfuc_wr32(&(s)->base, &(s)->r_##r, (d))
d                 170 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramfuc.h #define ram_mask(s,r,m,d)    ramfuc_mask(&(s)->base, &(s)->r_##r, (m), (d))
d                 171 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramfuc.h #define ram_wait(s,r,m,d,n)  ramfuc_wait(&(s)->base, (r), (m), (d), (n))
d                 248 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c #define ram_nuts(s,r,m,d,c)                                                    \
d                 249 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	gk104_ram_nuts((s), &(s)->fuc.r_##r, (m), (d), (c))
d                1443 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	struct nvbios_ramcfg *d = &ram->diff;
d                1484 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0a_03fe |= p->rammap_11_0a_03fe != n->rammap_11_0a_03fe;
d                1485 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_09_01ff |= p->rammap_11_09_01ff != n->rammap_11_09_01ff;
d                1486 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0a_0400 |= p->rammap_11_0a_0400 != n->rammap_11_0a_0400;
d                1487 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0a_0800 |= p->rammap_11_0a_0800 != n->rammap_11_0a_0800;
d                1488 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0b_01f0 |= p->rammap_11_0b_01f0 != n->rammap_11_0b_01f0;
d                1489 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0b_0200 |= p->rammap_11_0b_0200 != n->rammap_11_0b_0200;
d                1490 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0d |= p->rammap_11_0d != n->rammap_11_0d;
d                1491 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0f |= p->rammap_11_0f != n->rammap_11_0f;
d                1492 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0e |= p->rammap_11_0e != n->rammap_11_0e;
d                1493 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0b_0800 |= p->rammap_11_0b_0800 != n->rammap_11_0b_0800;
d                1494 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->rammap_11_0b_0400 |= p->rammap_11_0b_0400 != n->rammap_11_0b_0400;
d                1495 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->ramcfg_11_01_01 |= p->ramcfg_11_01_01 != n->ramcfg_11_01_01;
d                1496 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->ramcfg_11_01_02 |= p->ramcfg_11_01_02 != n->ramcfg_11_01_02;
d                1497 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->ramcfg_11_01_10 |= p->ramcfg_11_01_10 != n->ramcfg_11_01_10;
d                1498 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->ramcfg_11_02_03 |= p->ramcfg_11_02_03 != n->ramcfg_11_02_03;
d                1499 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->ramcfg_11_08_20 |= p->ramcfg_11_08_20 != n->ramcfg_11_08_20;
d                1500 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	d->timing_20_30_07 |= p->timing_20_30_07 != n->timing_20_30_07;
d                  10 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramseq.h #define ram_wr32(s,r,d)     hwsq_wr32(&(s)->base, &(s)->r_##r, (d))
d                  12 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramseq.h #define ram_mask(s,r,m,d)   hwsq_mask(&(s)->base, &(s)->r_##r, (m), (d))
d                  13 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramseq.h #define ram_setf(s,f,d)     hwsq_setf(&(s)->base, (f), (d))
d                  14 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramseq.h #define ram_wait(s,f,d)     hwsq_wait(&(s)->base, (f), (d))
d                 324 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_FO(m,o,d,c,b) nvkm_fo##b((m)->memory, (o), (d), (c))
d                 325 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_WO(m,o,d,c,b) nvkm_wo##b((m)->memory, (o), (d))
d                 326 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_XO(m,v,o,d,c,b,fn,f,a...) do {                                     \
d                 327 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	const u32 _pteo = (o); u##b _data = (d);                               \
d                 332 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_WO032(m,v,o,d) VMM_XO((m),(v),(o),(d),  1, 32, WO, "%08x")
d                 333 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_FO032(m,v,o,d,c)                                                   \
d                 334 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	VMM_XO((m),(v),(o),(d),(c), 32, FO, "%08x %08x", (c))
d                 336 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_WO064(m,v,o,d) VMM_XO((m),(v),(o),(d),  1, 64, WO, "%016llx")
d                 337 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h #define VMM_FO064(m,v,o,d,c)                                                   \
d                 338 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	VMM_XO((m),(v),(o),(d),(c), 64, FO, "%016llx %08x", (c))
d                  96 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	u32 n, d;
d                  99 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	d = 1000000 / 32;
d                 104 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 		d = nvkm_rd32(device, NV04_PTIMER_DENOMINATOR);
d                 105 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 		if (!n || !d) {
d                 107 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 			d = 1;
d                 113 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	while (((n % 5) == 0) && ((d % 5) == 0)) {
d                 115 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 		d /= 5;
d                 118 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	while (((n % 2) == 0) && ((d % 2) == 0)) {
d                 120 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 		d /= 2;
d                 123 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	while (n > 0xffff || d > 0xffff) {
d                 125 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 		d >>= 1;
d                 130 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	nvkm_debug(subdev, "denominator     : %08x\n", d);
d                 131 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	nvkm_debug(subdev, "timer frequency : %dHz\n", f * d / n);
d                 134 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv04.c 	nvkm_wr32(device, NV04_PTIMER_DENOMINATOR, d);
d                  33 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	u32 n, d;
d                  36 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	d = 1000000 / 32;
d                  41 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 		d = nvkm_rd32(device, NV04_PTIMER_DENOMINATOR);
d                  42 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 		if (!n || !d) {
d                  44 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 			d = 1;
d                  50 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	while (((n % 5) == 0) && ((d % 5) == 0)) {
d                  52 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 		d /= 5;
d                  55 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	while (((n % 2) == 0) && ((d % 2) == 0)) {
d                  57 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 		d /= 2;
d                  60 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	while (n > 0xffff || d > 0xffff) {
d                  62 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 		d >>= 1;
d                  67 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	nvkm_debug(subdev, "denominator     : %08x\n", d);
d                  68 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	nvkm_debug(subdev, "timer frequency : %dHz\n", f * d / n);
d                  71 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv40.c 	nvkm_wr32(device, NV04_PTIMER_DENOMINATOR, d);
d                  33 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	u32 m = 1, n, d;
d                  36 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	d = 1000000 / 32;
d                  39 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	while (n < (d * 2)) {
d                  45 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	while (((n % 5) == 0) && ((d % 5) == 0)) {
d                  47 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 		d /= 5;
d                  50 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	while (((n % 2) == 0) && ((d % 2) == 0)) {
d                  52 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 		d /= 2;
d                  55 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	while (n > 0xffff || d > 0xffff) {
d                  57 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 		d >>= 1;
d                  63 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	nvkm_debug(subdev, "denominator     : %08x\n", d);
d                  64 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	nvkm_debug(subdev, "timer frequency : %dHz\n", (f * m) * d / n);
d                  68 drivers/gpu/drm/nouveau/nvkm/subdev/timer/nv41.c 	nvkm_wr32(device, NV04_PTIMER_DENOMINATOR, d);
d                5265 drivers/gpu/drm/omapdrm/dss/dsi.c 	const struct dsi_module_id_data *d;
d                5340 drivers/gpu/drm/omapdrm/dss/dsi.c 	d = dsi->data->modules;
d                5341 drivers/gpu/drm/omapdrm/dss/dsi.c 	while (d->address != 0 && d->address != dsi_mem->start)
d                5342 drivers/gpu/drm/omapdrm/dss/dsi.c 		d++;
d                5344 drivers/gpu/drm/omapdrm/dss/dsi.c 	if (d->address == 0) {
d                5349 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi->module_id = d->id;
d                 487 drivers/gpu/drm/omapdrm/dss/omapdss.h #define for_each_dss_output(d) \
d                 488 drivers/gpu/drm/omapdrm/dss/omapdss.h 	while ((d = omapdss_device_next_output(d)) != NULL)
d                 100 drivers/gpu/drm/omapdrm/omap_connector.c 	struct omap_dss_device *d;
d                 102 drivers/gpu/drm/omapdrm/omap_connector.c 	for (d = omap_connector->output; d; d = d->next) {
d                 103 drivers/gpu/drm/omapdrm/omap_connector.c 		if (d->ops_flags & op)
d                 104 drivers/gpu/drm/omapdrm/omap_connector.c 			dssdev = d;
d                 120 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	static const u8 d[] = { seq };			\
d                 121 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	otm8009a_dcs_write_buf(ctx, d, ARRAY_SIZE(d));	\
d                 131 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	static const u8 d[] = { seq };				\
d                 133 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	rm68200_dcs_write_buf(ctx, d, ARRAY_SIZE(d));		\
d                 142 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	static const u8 d[] = { seq };				\
d                 145 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	for (i = 0; i < ARRAY_SIZE(d) ; i++)			\
d                 146 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		rm68200_dcs_write_cmd(ctx, cmd + i, d[i]);	\
d                  64 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		static const u8 d[] = { seq };				\
d                  66 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		ret = mipi_dsi_generic_write(dsi, d, ARRAY_SIZE(d));	\
d                 168 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	static const u8 d[] = { seq };\
d                 169 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write(ctx, d, ARRAY_SIZE(d));\
d                 253 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	static const u8 d[] = { seq };			\
d                 255 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = s6e3ha2_dcs_write(ctx, d, ARRAY_SIZE(d));	\
d                 131 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		static const u8 d[] = { seq };				\
d                 132 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		s6e63j0x03_dcs_write_seq(ctx, d, ARRAY_SIZE(d));	\
d                 180 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		static const u8 d[] = { seq }; \
d                 181 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		s6e63m0_dcs_write(ctx, d, ARRAY_SIZE(d)); \
d                 172 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	const u8 d[] = { seq };\
d                 173 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	BUILD_BUG_ON_MSG(ARRAY_SIZE(d) > 64, "DCS sequence too big for stack");\
d                 174 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write(ctx, d, ARRAY_SIZE(d));\
d                 179 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	static const u8 d[] = { seq };\
d                 180 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write(ctx, d, ARRAY_SIZE(d));\
d                 125 drivers/gpu/drm/panel/panel-sitronix-st7701.c 		const u8 d[] = { seq };				\
d                 126 drivers/gpu/drm/panel/panel-sitronix-st7701.c 		st7701_dsi_write(st7701, d, ARRAY_SIZE(d));	\
d                 891 drivers/gpu/drm/r128/r128_cce.c 				struct drm_dma *d)
d                 896 drivers/gpu/drm/r128/r128_cce.c 	for (i = d->granted_count; i < d->request_count; i++) {
d                 903 drivers/gpu/drm/r128/r128_cce.c 		if (copy_to_user(&d->request_indices[i], &buf->idx,
d                 906 drivers/gpu/drm/r128/r128_cce.c 		if (copy_to_user(&d->request_sizes[i], &buf->total,
d                 910 drivers/gpu/drm/r128/r128_cce.c 		d->granted_count++;
d                 919 drivers/gpu/drm/r128/r128_cce.c 	struct drm_dma *d = data;
d                 925 drivers/gpu/drm/r128/r128_cce.c 	if (d->send_count != 0) {
d                 927 drivers/gpu/drm/r128/r128_cce.c 			  task_pid_nr(current), d->send_count);
d                 933 drivers/gpu/drm/r128/r128_cce.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
d                 935 drivers/gpu/drm/r128/r128_cce.c 			  task_pid_nr(current), d->request_count, dma->buf_count);
d                 939 drivers/gpu/drm/r128/r128_cce.c 	d->granted_count = 0;
d                 941 drivers/gpu/drm/r128/r128_cce.c 	if (d->request_count)
d                 942 drivers/gpu/drm/r128/r128_cce.c 		ret = r128_cce_get_buffers(dev, file_priv, d);
d                 874 drivers/gpu/drm/radeon/evergreen_cs.c 		unsigned w, h, d;
d                 878 drivers/gpu/drm/radeon/evergreen_cs.c 		d = r600_mip_minify(depth, i);
d                 908 drivers/gpu/drm/radeon/evergreen_cs.c 			moffset += surf.layer_size * d;
d                 918 drivers/gpu/drm/radeon/evergreen_cs.c 					d, radeon_bo_size(mipmap),
d                1397 drivers/gpu/drm/radeon/ni_dpm.c 		u64 tmp, n, d;
d                1423 drivers/gpu/drm/radeon/ni_dpm.c 		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
d                1424 drivers/gpu/drm/radeon/ni_dpm.c 		tmp = div64_u64(n, d);
d                2160 drivers/gpu/drm/radeon/r100.c 	unsigned u, i, w, h, d;
d                2195 drivers/gpu/drm/radeon/r100.c 				d = (1 << track->textures[u].txdepth) / (1 << i);
d                2196 drivers/gpu/drm/radeon/r100.c 				if (!d)
d                2197 drivers/gpu/drm/radeon/r100.c 					d = 1;
d                2199 drivers/gpu/drm/radeon/r100.c 				d = 1;
d                2203 drivers/gpu/drm/radeon/r100.c 				size += r100_track_compress_size(track->textures[u].compress_format, w, h) * d;
d                2206 drivers/gpu/drm/radeon/r100.c 				size += w * h * d;
d                2734 drivers/gpu/drm/radeon/radeon.h #define radeon_copy_blit(rdev, s, d, np, resv) (rdev)->asic->copy.blit((rdev), (s), (d), (np), (resv))
d                2735 drivers/gpu/drm/radeon/radeon.h #define radeon_copy_dma(rdev, s, d, np, resv) (rdev)->asic->copy.dma((rdev), (s), (d), (np), (resv))
d                2736 drivers/gpu/drm/radeon/radeon.h #define radeon_copy(rdev, s, d, np, resv) (rdev)->asic->copy.copy((rdev), (s), (d), (np), (resv))
d                2747 drivers/gpu/drm/radeon/radeon.h #define radeon_set_uvd_clocks(rdev, v, d) (rdev)->asic->pm.set_uvd_clocks((rdev), (v), (d))
d                1098 drivers/gpu/drm/radeon/radeon_display.c static inline uint32_t radeon_div(uint64_t n, uint32_t d)
d                1102 drivers/gpu/drm/radeon/radeon_display.c 	n += d / 2;
d                1104 drivers/gpu/drm/radeon/radeon_display.c 	mod = do_div(n, d);
d                2261 drivers/gpu/drm/radeon/si_dpm.c 	u64 pwr_efficiency_ratio, n, d;
d                2267 drivers/gpu/drm/radeon/si_dpm.c 	d = prev_vddc * prev_vddc;
d                2268 drivers/gpu/drm/radeon/si_dpm.c 	pwr_efficiency_ratio = div64_u64(n, d);
d                 329 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 		struct device *p = NULL, *d;
d                 332 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 			d = platform_find_device_by_driver(p, &drv->driver);
d                 334 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 			p = d;
d                 336 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 			if (!d)
d                 339 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 			device_link_add(dev, d, DL_FLAG_STATELESS);
d                 340 drivers/gpu/drm/rockchip/rockchip_drm_drv.c 			component_match_add(dev, &match, compare_dev, d);
d                 990 drivers/gpu/drm/savage/savage_bci.c 				  struct drm_dma *d)
d                 995 drivers/gpu/drm/savage/savage_bci.c 	for (i = d->granted_count; i < d->request_count; i++) {
d                1002 drivers/gpu/drm/savage/savage_bci.c 		if (copy_to_user(&d->request_indices[i],
d                1005 drivers/gpu/drm/savage/savage_bci.c 		if (copy_to_user(&d->request_sizes[i],
d                1009 drivers/gpu/drm/savage/savage_bci.c 		d->granted_count++;
d                1017 drivers/gpu/drm/savage/savage_bci.c 	struct drm_dma *d = data;
d                1024 drivers/gpu/drm/savage/savage_bci.c 	if (d->send_count != 0) {
d                1026 drivers/gpu/drm/savage/savage_bci.c 			  task_pid_nr(current), d->send_count);
d                1032 drivers/gpu/drm/savage/savage_bci.c 	if (d->request_count < 0 || d->request_count > dma->buf_count) {
d                1034 drivers/gpu/drm/savage/savage_bci.c 			  task_pid_nr(current), d->request_count, dma->buf_count);
d                1038 drivers/gpu/drm/savage/savage_bci.c 	d->granted_count = 0;
d                1040 drivers/gpu/drm/savage/savage_bci.c 	if (d->request_count) {
d                1041 drivers/gpu/drm/savage/savage_bci.c 		ret = savage_bci_get_buffers(dev, file_priv, d);
d                  38 drivers/gpu/drm/sun4i/sun4i_hdmi_tmds_clk.c 		u8 d;
d                  40 drivers/gpu/drm/sun4i/sun4i_hdmi_tmds_clk.c 		for (d = 1; d < 3; d++) {
d                  43 drivers/gpu/drm/sun4i/sun4i_hdmi_tmds_clk.c 			tmp_rate = parent_rate / m / d;
d                  52 drivers/gpu/drm/sun4i/sun4i_hdmi_tmds_clk.c 				is_double = (d == 2) ? true : false;
d                  74 drivers/gpu/drm/sun4i/sun8i_mixer.h #define SUN50I_MIXER_BLEND_CSC_CONST_VAL(d, c)	(((d) << 16) | ((c) & 0xffff))
d                 316 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct page *d = ttm->pages[page];
d                 319 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (!d)
d                 323 drivers/gpu/drm/ttm/ttm_bo_util.c 	dst = ttm_kmap_atomic_prot(d, prot);
d                 241 drivers/gpu/drm/vc4/vc4_drv.c 		struct device *p = NULL, *d;
d                 243 drivers/gpu/drm/vc4/vc4_drv.c 		while ((d = platform_find_device_by_driver(p, drv))) {
d                 245 drivers/gpu/drm/vc4/vc4_drv.c 			component_match_add(dev, match, compare_dev, d);
d                 246 drivers/gpu/drm/vc4/vc4_drv.c 			p = d;
d                  45 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	dst->d = cpu_to_le32(src->d);
d                  84 drivers/gpu/drm/vmwgfx/device_include/svga3d_types.h    uint32               d;
d                 111 drivers/gpu/drm/vmwgfx/device_include/svga3d_types.h    uint32               d;
d                 356 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c static int vmw_bo_cpu_blit_line(struct vmw_bo_blit_line_data *d,
d                 361 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	struct vmw_diff_cpy *diff = d->diff;
d                 369 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		bool unmap_dst = d->dst_addr && dst_page != d->mapped_dst;
d                 370 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		bool unmap_src = d->src_addr && (src_page != d->mapped_src ||
d                 377 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			ttm_kunmap_atomic_prot(d->src_addr, d->src_prot);
d                 378 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->src_addr = NULL;
d                 382 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			ttm_kunmap_atomic_prot(d->dst_addr, d->dst_prot);
d                 383 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->dst_addr = NULL;
d                 386 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		if (!d->dst_addr) {
d                 387 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			if (WARN_ON_ONCE(dst_page >= d->dst_num_pages))
d                 390 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->dst_addr =
d                 391 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 				ttm_kmap_atomic_prot(d->dst_pages[dst_page],
d                 392 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 						     d->dst_prot);
d                 393 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			if (!d->dst_addr)
d                 396 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->mapped_dst = dst_page;
d                 399 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		if (!d->src_addr) {
d                 400 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			if (WARN_ON_ONCE(src_page >= d->src_num_pages))
d                 403 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->src_addr =
d                 404 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 				ttm_kmap_atomic_prot(d->src_pages[src_page],
d                 405 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 						     d->src_prot);
d                 406 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			if (!d->src_addr)
d                 409 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			d->mapped_src = src_page;
d                 411 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		diff->do_cpy(diff, d->dst_addr + dst_page_offset,
d                 412 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 			     d->src_addr + src_page_offset, copy_size);
d                 457 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	struct vmw_bo_blit_line_data d;
d                 478 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.mapped_dst = 0;
d                 479 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.mapped_src = 0;
d                 480 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_addr = NULL;
d                 481 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_addr = NULL;
d                 482 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_pages = dst->ttm->pages;
d                 483 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_pages = src->ttm->pages;
d                 484 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_num_pages = dst->num_pages;
d                 485 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_num_pages = src->num_pages;
d                 486 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_prot = ttm_io_prot(dst->mem.placement, PAGE_KERNEL);
d                 487 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_prot = ttm_io_prot(src->mem.placement, PAGE_KERNEL);
d                 488 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.diff = diff;
d                 493 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ret = vmw_bo_cpu_blit_line(&d, dst_offset, src_offset, w);
d                 501 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (d.src_addr)
d                 502 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ttm_kunmap_atomic_prot(d.src_addr, d.src_prot);
d                 503 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (d.dst_addr)
d                 504 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ttm_kunmap_atomic_prot(d.dst_addr, d.dst_prot);
d                 185 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	    box->d != 1    || box_count != 1) {
d                 193 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			  box->w, box->h, box->d, box_count,
d                2636 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		box->d = 1;
d                 466 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	blit->d = 1;
d                 763 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 		blit->d = 1;
d                1237 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	box->d = 1;
d                1337 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 		box->d = 1;
d                1466 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 		box->d = 1;
d                1510 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	box->d = 1;
d                 312 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 		cb->d = cur_size->depth;
d                1067 drivers/hid/hid-cp2112.c static void cp2112_gpio_irq_ack(struct irq_data *d)
d                1071 drivers/hid/hid-cp2112.c static void cp2112_gpio_irq_mask(struct irq_data *d)
d                1073 drivers/hid/hid-cp2112.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1076 drivers/hid/hid-cp2112.c 	__clear_bit(d->hwirq, &dev->irq_mask);
d                1079 drivers/hid/hid-cp2112.c static void cp2112_gpio_irq_unmask(struct irq_data *d)
d                1081 drivers/hid/hid-cp2112.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1084 drivers/hid/hid-cp2112.c 	__set_bit(d->hwirq, &dev->irq_mask);
d                1091 drivers/hid/hid-cp2112.c 	struct irq_data *d;
d                1114 drivers/hid/hid-cp2112.c 		d = irq_get_irq_data(irq);
d                1115 drivers/hid/hid-cp2112.c 		if (!d)
d                1118 drivers/hid/hid-cp2112.c 		irq_type = irqd_get_trigger_type(d);
d                1149 drivers/hid/hid-cp2112.c static unsigned int cp2112_gpio_irq_startup(struct irq_data *d)
d                1151 drivers/hid/hid-cp2112.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1161 drivers/hid/hid-cp2112.c 	cp2112_gpio_irq_unmask(d);
d                1165 drivers/hid/hid-cp2112.c static void cp2112_gpio_irq_shutdown(struct irq_data *d)
d                1167 drivers/hid/hid-cp2112.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1173 drivers/hid/hid-cp2112.c static int cp2112_gpio_irq_type(struct irq_data *d, unsigned int type)
d                  98 drivers/hid/hid-ntrig.c 	__u8 d = ((raw[3] & 0x07) << 3) | ((raw[2] & 0xe0) >> 5);
d                 106 drivers/hid/hid-ntrig.c 	return sprintf(buf, "%u.%u.%u.%u.%u", a, b, c, d, e);
d                 181 drivers/hid/hid-picolcd.h #define picolcd_fbinfo(d) ((d)->fb_info)
d                 197 drivers/hid/hid-picolcd.h #define picolcd_fbinfo(d) NULL
d                  23 drivers/hid/intel-ish-hid/ishtp/bus.c #define to_ishtp_cl_driver(d) container_of(d, struct ishtp_cl_driver, driver)
d                  24 drivers/hid/intel-ish-hid/ishtp/bus.c #define to_ishtp_cl_device(d) container_of(d, struct ishtp_cl_device, dev)
d                2972 drivers/hid/wacom_wac.c 	int x = 0, y = 0, p = 0, d = 0;
d                3014 drivers/hid/wacom_wac.c 			d = features->distance_max - data[8];
d                3029 drivers/hid/wacom_wac.c 		input_report_abs(input, ABS_DISTANCE, d);
d                 958 drivers/hwmon/acpi_power_meter.c static int __init enable_cap_knobs(const struct dmi_system_id *d)
d                  74 drivers/hwmon/adt7x10.c 	struct adt7x10_data *d = dev_get_drvdata(dev);
d                  75 drivers/hwmon/adt7x10.c 	return d->ops->read_byte(dev, reg);
d                  80 drivers/hwmon/adt7x10.c 	struct adt7x10_data *d = dev_get_drvdata(dev);
d                  81 drivers/hwmon/adt7x10.c 	return d->ops->write_byte(dev, reg, data);
d                  86 drivers/hwmon/adt7x10.c 	struct adt7x10_data *d = dev_get_drvdata(dev);
d                  87 drivers/hwmon/adt7x10.c 	return d->ops->read_word(dev, reg);
d                  92 drivers/hwmon/adt7x10.c 	struct adt7x10_data *d = dev_get_drvdata(dev);
d                  93 drivers/hwmon/adt7x10.c 	return d->ops->write_word(dev, reg, data);
d                 149 drivers/hwmon/asc7621.c #define SETUP_SHOW_DATA_PARAM(d, a) \
d                 151 drivers/hwmon/asc7621.c 	struct asc7621_data *data = asc7621_update_device(d); \
d                 154 drivers/hwmon/asc7621.c #define SETUP_STORE_DATA_PARAM(d, a) \
d                 156 drivers/hwmon/asc7621.c 	struct i2c_client *client = to_i2c_client(d); \
d                 791 drivers/hwmon/asus_atk0110.c 	struct dentry *d;
d                 795 drivers/hwmon/asus_atk0110.c 	d = debugfs_create_dir("asus_atk0110", NULL);
d                 797 drivers/hwmon/asus_atk0110.c 	debugfs_create_x32("id", 0600, d, &data->debugfs.id);
d                 798 drivers/hwmon/asus_atk0110.c 	debugfs_create_file_unsafe("gitm", 0400, d, data, &atk_debugfs_gitm);
d                 799 drivers/hwmon/asus_atk0110.c 	debugfs_create_file("ggrp", 0400, d, data, &atk_debugfs_ggrp_fops);
d                 801 drivers/hwmon/asus_atk0110.c 	data->debugfs.root = d;
d                  39 drivers/hwmon/hwmon.c #define to_hwmon_device(d) container_of(d, struct hwmon_device, dev)
d                  52 drivers/hwmon/hwmon.c #define to_hwmon_attr(d) \
d                  53 drivers/hwmon/hwmon.c 	container_of(d, struct hwmon_device_attribute, dev_attr)
d                 252 drivers/hwmon/i5k_amb.c 	int i, j, k, d = 0;
d                 279 drivers/hwmon/i5k_amb.c 			d++;
d                 284 drivers/hwmon/i5k_amb.c 				 "temp%d_label", d);
d                 299 drivers/hwmon/i5k_amb.c 				 "temp%d_input", d);
d                 314 drivers/hwmon/i5k_amb.c 				 "temp%d_min", d);
d                 330 drivers/hwmon/i5k_amb.c 				 "temp%d_mid", d);
d                 346 drivers/hwmon/i5k_amb.c 				 "temp%d_max", d);
d                 362 drivers/hwmon/i5k_amb.c 				 "temp%d_alarm", d);
d                  54 drivers/hwmon/mlxreg-fan.c #define MLXREG_FAN_GET_RPM(rval, d, s)	(DIV_ROUND_CLOSEST(15000000 * 100, \
d                  55 drivers/hwmon/mlxreg-fan.c 					 ((rval) + (s)) * (d)))
d                 113 drivers/hwmon/pmbus/ltc2978.c #define has_clear_peaks(d)	((d)->features & FEAT_CLEAR_PEAKS)
d                 114 drivers/hwmon/pmbus/ltc2978.c #define needs_polling(d)	((d)->features & FEAT_NEEDS_POLLING)
d                 799 drivers/hwmon/sht15.c static irqreturn_t sht15_interrupt_fired(int irq, void *d)
d                 801 drivers/hwmon/sht15.c 	struct sht15_data *data = d;
d                 811 drivers/hwmon/sis5595.c 		struct pci_dev *d;
d                 812 drivers/hwmon/sis5595.c 		d = pci_get_device(PCI_VENDOR_ID_SI, *i, NULL);
d                 813 drivers/hwmon/sis5595.c 		if (d) {
d                 814 drivers/hwmon/sis5595.c 			dev_err(&d->dev,
d                 817 drivers/hwmon/sis5595.c 			pci_dev_put(d);
d                 825 drivers/hwtracing/intel_th/core.c 	struct intel_th_driver *d;
d                 832 drivers/hwtracing/intel_th/core.c 		d = to_intel_th_driver(th->thdev[i]->dev.driver);
d                 833 drivers/hwtracing/intel_th/core.c 		if (d && d->irq)
d                 834 drivers/hwtracing/intel_th/core.c 			ret |= d->irq(th->thdev[i]);
d                 174 drivers/i2c/busses/i2c-davinci.c 	u32 d;
d                 210 drivers/i2c/busses/i2c-davinci.c 	d = (psc >= 2) ? 5 : 7 - psc;
d                 213 drivers/i2c/busses/i2c-davinci.c 		d = 6;
d                 236 drivers/i2c/busses/i2c-davinci.c 	if (clk > clkl + d) {
d                 237 drivers/i2c/busses/i2c-davinci.c 		clkh = clk - clkl - d;
d                 238 drivers/i2c/busses/i2c-davinci.c 		clkl -= d;
d                 241 drivers/i2c/busses/i2c-davinci.c 		clkl = clk - (d << 1);
d                1151 drivers/i2c/busses/i2c-i801.c 		const u8 *d = (char *)(dm + 1) + (i * 2);
d                1153 drivers/i2c/busses/i2c-i801.c 		u8 type = d[0];
d                1154 drivers/i2c/busses/i2c-i801.c 		u8 s = d[1];
d                  42 drivers/i2c/busses/i2c-parport-light.c static inline void port_write(unsigned char p, unsigned char d)
d                  44 drivers/i2c/busses/i2c-parport-light.c 	outb(d, base+p);
d                  49 drivers/i2c/busses/i2c-parport.c static void port_write_data(struct parport *p, unsigned char d)
d                  51 drivers/i2c/busses/i2c-parport.c 	parport_write_data(p, d);
d                  54 drivers/i2c/busses/i2c-parport.c static void port_write_control(struct parport *p, unsigned char d)
d                  56 drivers/i2c/busses/i2c-parport.c 	parport_write_control(p, d);
d                1233 drivers/i2c/i2c-core-base.c static int __process_new_adapter(struct device_driver *d, void *data)
d                1235 drivers/i2c/i2c-core-base.c 	return i2c_do_add_adapter(to_i2c_driver(d), data);
d                1536 drivers/i2c/i2c-core-base.c static int __process_removed_adapter(struct device_driver *d, void *data)
d                1538 drivers/i2c/i2c-core-base.c 	i2c_do_del_adapter(to_i2c_driver(d), data);
d                  65 drivers/i2c/i2c-smbus.c static irqreturn_t smbus_alert(int irq, void *d)
d                  67 drivers/i2c/i2c-smbus.c 	struct i2c_smbus_alert *alert = d;
d                 282 drivers/i3c/master/i3c-master-cdns.c #define DEV_ID_RR0(d)			(0xc0 + ((d) * 0x10))
d                 292 drivers/i3c/master/i3c-master-cdns.c #define DEV_ID_RR1(d)			(0xc4 + ((d) * 0x10))
d                 295 drivers/i3c/master/i3c-master-cdns.c #define DEV_ID_RR2(d)			(0xc8 + ((d) * 0x10))
d                 302 drivers/i3c/master/i3c-master-cdns.c #define SIR_MAP_DEV_REG(d)		SIR_MAP((d) / 2)
d                 303 drivers/i3c/master/i3c-master-cdns.c #define SIR_MAP_DEV_SHIFT(d, fs)	((fs) + (((d) % 2) ? 16 : 0))
d                 304 drivers/i3c/master/i3c-master-cdns.c #define SIR_MAP_DEV_CONF_MASK(d)	(GENMASK(15, 0) << (((d) % 2) ? 16 : 0))
d                 305 drivers/i3c/master/i3c-master-cdns.c #define SIR_MAP_DEV_CONF(d, c)		((c) << (((d) % 2) ? 16 : 0))
d                 258 drivers/ide/aec62xx.c 	struct ide_port_info d;
d                 272 drivers/ide/aec62xx.c 	d = aec62xx_chipsets[idx];
d                 280 drivers/ide/aec62xx.c 			d.udma_mask = ATA_UDMA6;
d                 284 drivers/ide/aec62xx.c 	err = ide_pci_init_one(dev, &d, (void *)bus_clock);
d                 467 drivers/ide/alim15x3.c static int init_dma_ali15x3(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 470 drivers/ide/alim15x3.c 	unsigned long base = ide_pci_dma_base(hwif, d);
d                 477 drivers/ide/alim15x3.c 	if (ide_pci_check_simplex(hwif, d) < 0)
d                 480 drivers/ide/alim15x3.c 	if (ide_pci_set_master(dev, d->name) < 0)
d                 537 drivers/ide/alim15x3.c 	struct ide_port_info d = ali15x3_chipset;
d                 542 drivers/ide/alim15x3.c 		d.host_flags |= IDE_HFLAG_NO_LBA48_DMA;
d                 546 drivers/ide/alim15x3.c 			d.host_flags |= IDE_HFLAG_NO_ATAPI_DMA;
d                 549 drivers/ide/alim15x3.c 			d.udma_mask = ATA_UDMA2;
d                 551 drivers/ide/alim15x3.c 			d.udma_mask = ATA_UDMA4;
d                 553 drivers/ide/alim15x3.c 			d.udma_mask = ATA_UDMA5;
d                 555 drivers/ide/alim15x3.c 			d.udma_mask = ATA_UDMA6;
d                 557 drivers/ide/alim15x3.c 		d.dma_ops = &ali_dma_ops;
d                 559 drivers/ide/alim15x3.c 		d.host_flags |= IDE_HFLAG_NO_DMA;
d                 561 drivers/ide/alim15x3.c 		d.mwdma_mask = d.swdma_mask = 0;
d                 565 drivers/ide/alim15x3.c 		d.host_flags |= IDE_HFLAG_CLEAR_SIMPLEX;
d                 567 drivers/ide/alim15x3.c 	return ide_pci_init_one(dev, &d, NULL);
d                 236 drivers/ide/amd74xx.c 	struct ide_port_info d;
d                 239 drivers/ide/amd74xx.c 	d = amd74xx_chipsets[idx];
d                 246 drivers/ide/amd74xx.c 			d.swdma_mask = 0;
d                 247 drivers/ide/amd74xx.c 		d.host_flags |= IDE_HFLAG_CLEAR_SIMPLEX;
d                 251 drivers/ide/amd74xx.c 			d.udma_mask = ATA_UDMA5;
d                 261 drivers/ide/amd74xx.c 		d.host_flags |= IDE_HFLAG_BROKEN_ALTSTATUS;
d                 264 drivers/ide/amd74xx.c 		d.name, pci_name(dev), amd_dma[fls(d.udma_mask) - 1]);
d                 280 drivers/ide/amd74xx.c 				    d.name, amd_clock);
d                 284 drivers/ide/amd74xx.c 	return ide_pci_init_one(dev, &d, NULL);
d                 346 drivers/ide/au1xxx-ide.c static int auide_ddma_init(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 403 drivers/ide/au1xxx-ide.c static int auide_ddma_init(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 169 drivers/ide/buddha.c 		struct ide_port_info d = buddha_port_info;
d                 180 drivers/ide/buddha.c 			d.port_ops = &xsurf_port_ops;
d                 230 drivers/ide/buddha.c 		ide_host_add(&d, hws, i, NULL);
d                 382 drivers/ide/cmd64x.c 	struct ide_port_info d;
d                 385 drivers/ide/cmd64x.c 	d = cmd64x_chipsets[idx];
d                 401 drivers/ide/cmd64x.c 			d.udma_mask = 0x00;
d                 408 drivers/ide/cmd64x.c 				d.enablebits[0].reg = 0;
d                 409 drivers/ide/cmd64x.c 				d.port_ops = &cmd64x_port_ops;
d                 411 drivers/ide/cmd64x.c 					d.dma_ops = &cmd646_rev1_dma_ops;
d                 416 drivers/ide/cmd64x.c 	return ide_pci_init_one(dev, &d, NULL);
d                 113 drivers/ide/cs5520.c 	const struct ide_port_info *d = &cyrix_chipset;
d                 116 drivers/ide/cs5520.c 	ide_setup_pci_noise(dev, d);
d                 122 drivers/ide/cs5520.c 		printk(KERN_WARNING "%s: Unable to enable 55x0.\n", d->name);
d                 128 drivers/ide/cs5520.c 			d->name);
d                 137 drivers/ide/cs5520.c 	ide_pci_setup_ports(dev, d, &hw[0], &hws[0]);
d                 141 drivers/ide/cs5520.c 	return ide_host_add(d, hws, 2, NULL);
d                 125 drivers/ide/gayle.c 	struct ide_port_info d = gayle_port_info;
d                 144 drivers/ide/gayle.c 		d.port_ops = &gayle_a1200_port_ops;
d                 146 drivers/ide/gayle.c 		d.port_ops = &gayle_a4000_port_ops;
d                 156 drivers/ide/gayle.c 	error = ide_host_add(&d, hws, i, &host);
d                1222 drivers/ide/hpt366.c 				     const struct ide_port_info *d)
d                1225 drivers/ide/hpt366.c 	unsigned long flags, base = ide_pci_dma_base(hwif, d);
d                1233 drivers/ide/hpt366.c 	if (ide_pci_check_simplex(hwif, d) < 0)
d                1236 drivers/ide/hpt366.c 	if (ide_pci_set_master(dev, d->name) < 0)
d                1407 drivers/ide/hpt366.c 	struct ide_port_info d;
d                1449 drivers/ide/hpt366.c 	d = hpt366_chipsets[min_t(u8, idx, 1)];
d                1451 drivers/ide/hpt366.c 	d.udma_mask = info->udma_mask;
d                1455 drivers/ide/hpt366.c 		d.dma_ops = &hpt370_dma_ops;
d                1463 drivers/ide/hpt366.c 			d.name, pci_name(dev));
d                1481 drivers/ide/hpt366.c 				d.host_flags &= ~IDE_HFLAG_NON_BOOTABLE;
d                1484 drivers/ide/hpt366.c 		ret = ide_pci_init_two(dev, dev2, &d, dyn_info);
d                1492 drivers/ide/hpt366.c 	ret = ide_pci_init_one(dev, &d, dyn_info);
d                 363 drivers/ide/icside.c static int icside_dma_init(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 381 drivers/ide/icside.c static int icside_dma_off_init(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 472 drivers/ide/icside.c 	struct ide_port_info d = icside_v6_port_info;
d                 511 drivers/ide/icside.c 	host = ide_host_alloc(&d, hws, 2);
d                 521 drivers/ide/icside.c 		d.init_dma = icside_dma_init;
d                 522 drivers/ide/icside.c 		d.port_ops = &icside_v6_port_ops;
d                 523 drivers/ide/icside.c 		d.dma_ops  = &icside_v6_dma_ops;
d                 527 drivers/ide/icside.c 	ret = ide_host_register(host, &d, hws);
d                 534 drivers/ide/icside.c 	if (d.dma_ops)
d                   7 drivers/ide/ide-legacy.c 				u8 port_no, const struct ide_port_info *d,
d                  23 drivers/ide/ide-legacy.c 	if (!request_region(base, 8, d->name)) {
d                  25 drivers/ide/ide-legacy.c 				d->name, base, base + 7);
d                  29 drivers/ide/ide-legacy.c 	if (!request_region(ctl, 1, d->name)) {
d                  31 drivers/ide/ide-legacy.c 				d->name, ctl);
d                  43 drivers/ide/ide-legacy.c int ide_legacy_device_add(const struct ide_port_info *d, unsigned long config)
d                  49 drivers/ide/ide-legacy.c 	if ((d->host_flags & IDE_HFLAG_QD_2ND_PORT) == 0)
d                  50 drivers/ide/ide-legacy.c 		ide_legacy_init_one(hws, &hw[0], 0, d, config);
d                  51 drivers/ide/ide-legacy.c 	ide_legacy_init_one(hws, &hw[1], 1, d, config);
d                  54 drivers/ide/ide-legacy.c 	    (d->host_flags & IDE_HFLAG_SINGLE))
d                  57 drivers/ide/ide-legacy.c 	return ide_host_add(d, hws, 2, NULL);
d                 108 drivers/ide/ide-pci-generic.c 	const struct ide_port_info *d = &generic_chipsets[id->driver_data];
d                 143 drivers/ide/ide-pci-generic.c 				"controller\n", d->name, pci_name(dev));
d                 147 drivers/ide/ide-pci-generic.c 	ret = ide_pci_init_one(dev, d, NULL);
d                1081 drivers/ide/ide-probe.c 			  const struct ide_port_info *d)
d                1085 drivers/ide/ide-probe.c 	hwif->chipset = d->chipset ? d->chipset : ide_pci;
d                1087 drivers/ide/ide-probe.c 	if (d->init_iops)
d                1088 drivers/ide/ide-probe.c 		d->init_iops(hwif);
d                1091 drivers/ide/ide-probe.c 	hwif->host_flags |= d->host_flags;
d                1092 drivers/ide/ide-probe.c 	hwif->pio_mask = d->pio_mask;
d                1094 drivers/ide/ide-probe.c 	if (d->tp_ops)
d                1095 drivers/ide/ide-probe.c 		hwif->tp_ops = d->tp_ops;
d                1099 drivers/ide/ide-probe.c 		hwif->port_ops = d->port_ops;
d                1101 drivers/ide/ide-probe.c 	hwif->swdma_mask = d->swdma_mask;
d                1102 drivers/ide/ide-probe.c 	hwif->mwdma_mask = d->mwdma_mask;
d                1103 drivers/ide/ide-probe.c 	hwif->ultra_mask = d->udma_mask;
d                1105 drivers/ide/ide-probe.c 	if ((d->host_flags & IDE_HFLAG_NO_DMA) == 0) {
d                1108 drivers/ide/ide-probe.c 		hwif->dma_ops = d->dma_ops;
d                1110 drivers/ide/ide-probe.c 		if (d->init_dma)
d                1111 drivers/ide/ide-probe.c 			rc = d->init_dma(hwif, d);
d                1113 drivers/ide/ide-probe.c 			rc = ide_hwif_setup_dma(hwif, d);
d                1126 drivers/ide/ide-probe.c 	if ((d->host_flags & IDE_HFLAG_SERIALIZE) ||
d                1127 drivers/ide/ide-probe.c 	    ((d->host_flags & IDE_HFLAG_SERIALIZE_DMA) && hwif->dma_base))
d                1130 drivers/ide/ide-probe.c 	if (d->max_sectors)
d                1131 drivers/ide/ide-probe.c 		hwif->rqsize = d->max_sectors;
d                1141 drivers/ide/ide-probe.c 	if (d->init_hwif)
d                1142 drivers/ide/ide-probe.c 		d->init_hwif(hwif);
d                1263 drivers/ide/ide-probe.c static int ide_find_port_slot(const struct ide_port_info *d)
d                1266 drivers/ide/ide-probe.c 	u8 bootable = (d && (d->host_flags & IDE_HFLAG_NON_BOOTABLE)) ? 0 : 1;
d                1267 drivers/ide/ide-probe.c 	u8 i = (d && (d->host_flags & IDE_HFLAG_QD_2ND_PORT)) ? 1 : 0;
d                1347 drivers/ide/ide-probe.c struct ide_host *ide_host_alloc(const struct ide_port_info *d,
d                1375 drivers/ide/ide-probe.c 		idx = ide_find_port_slot(d);
d                1378 drivers/ide/ide-probe.c 					d ? d->name : "ide");
d                1399 drivers/ide/ide-probe.c 	if (d) {
d                1400 drivers/ide/ide-probe.c 		host->init_chipset = d->init_chipset;
d                1401 drivers/ide/ide-probe.c 		host->get_lock     = d->get_lock;
d                1402 drivers/ide/ide-probe.c 		host->release_lock = d->release_lock;
d                1403 drivers/ide/ide-probe.c 		host->host_flags = d->host_flags;
d                1404 drivers/ide/ide-probe.c 		host->irq_flags = d->irq_flags;
d                1435 drivers/ide/ide-probe.c int ide_host_register(struct ide_host *host, const struct ide_port_info *d,
d                1460 drivers/ide/ide-probe.c 		ide_init_port(hwif, i & 1, d);
d                1535 drivers/ide/ide-probe.c int ide_host_add(const struct ide_port_info *d, struct ide_hw **hws,
d                1541 drivers/ide/ide-probe.c 	host = ide_host_alloc(d, hws, n_ports);
d                1545 drivers/ide/ide-probe.c 	rc = ide_host_register(host, d, hws);
d                 232 drivers/ide/ide-proc.c 	const struct ide_proc_devset *setting, *g, *d;
d                 241 drivers/ide/ide-proc.c 	d = drive->settings;
d                 244 drivers/ide/ide-proc.c 	while (g->name || (d && d->name)) {
d                 246 drivers/ide/ide-proc.c 		if (g->name && d && d->name) {
d                 247 drivers/ide/ide-proc.c 			if (strcmp(d->name, g->name) < 0)
d                 248 drivers/ide/ide-proc.c 				setting = d++;
d                 251 drivers/ide/ide-proc.c 		} else if (d && d->name) {
d                 252 drivers/ide/ide-proc.c 			setting = d++;
d                  58 drivers/ide/ide-scan-pci.c 	struct pci_driver *d;
d                  62 drivers/ide/ide-scan-pci.c 		d = list_entry(l, struct pci_driver, node);
d                  63 drivers/ide/ide-scan-pci.c 		if (d->id_table) {
d                  65 drivers/ide/ide-scan-pci.c 				pci_match_id(d->id_table, dev);
d                  69 drivers/ide/ide-scan-pci.c 				ret = d->probe(dev, id);
d                  71 drivers/ide/ide-scan-pci.c 					dev->driver = d;
d                  92 drivers/ide/ide-scan-pci.c 	struct pci_driver *d;
d                 106 drivers/ide/ide-scan-pci.c 		d = list_entry(l, struct pci_driver, node);
d                 107 drivers/ide/ide-scan-pci.c 		if (__pci_register_driver(d, d->driver.owner,
d                 108 drivers/ide/ide-scan-pci.c 					  d->driver.mod_name))
d                 110 drivers/ide/ide-scan-pci.c 					__func__, d->driver.mod_name);
d                  53 drivers/ide/ide_platform.c 	struct ide_port_info d = platform_ide_port_info;
d                  93 drivers/ide/ide_platform.c 	d.irq_flags = res_irq->flags & IRQF_TRIGGER_MASK;
d                  95 drivers/ide/ide_platform.c 		d.irq_flags |= IRQF_SHARED;
d                  98 drivers/ide/ide_platform.c 		d.host_flags |= IDE_HFLAG_MMIO;
d                 100 drivers/ide/ide_platform.c 	ret = ide_host_add(&d, hws, 1, &host);
d                 108 drivers/ide/macide.c 	struct ide_port_info d = macide_port_info;
d                 124 drivers/ide/macide.c 		d.port_ops = NULL;
d                 136 drivers/ide/macide.c 	return ide_host_add(&d, hws, 1, NULL);
d                 308 drivers/ide/ns87415.c 	struct ide_port_info d = ns87415_chipset;
d                 313 drivers/ide/ns87415.c 		d.init_iops = superio_init_iops;
d                 314 drivers/ide/ns87415.c 		d.tp_ops = &superio_tp_ops;
d                 317 drivers/ide/ns87415.c 	return ide_pci_init_one(dev, &d, NULL);
d                 272 drivers/ide/palm_bk3710.c static int palm_bk3710_init_dma(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 479 drivers/ide/pdc202xx_new.c 	const struct ide_port_info *d = &pdcnew_chipsets[id->driver_data];
d                 493 drivers/ide/pdc202xx_new.c 			int ret = ide_pci_init_two(dev, dev2, d, NULL);
d                 509 drivers/ide/pdc202xx_new.c 	return ide_pci_init_one(dev, d, NULL);
d                 303 drivers/ide/pdc202xx_old.c 	const struct ide_port_info *d;
d                 306 drivers/ide/pdc202xx_old.c 	d = &pdc202xx_chipsets[idx];
d                 309 drivers/ide/pdc202xx_old.c 		pdc202ata4_fixup_irq(dev, d->name);
d                 325 drivers/ide/pdc202xx_old.c 	return ide_pci_init_one(dev, d, NULL);
d                1024 drivers/ide/pmac.c 	struct ide_port_info d = pmac_port_info;
d                1030 drivers/ide/pmac.c 		d.tp_ops = &pmac_ata6_tp_ops;
d                1031 drivers/ide/pmac.c 		d.port_ops = &pmac_ide_ata4_port_ops;
d                1032 drivers/ide/pmac.c 		d.udma_mask = ATA_UDMA6;
d                1035 drivers/ide/pmac.c 		d.tp_ops = &pmac_ata6_tp_ops;
d                1036 drivers/ide/pmac.c 		d.port_ops = &pmac_ide_ata4_port_ops;
d                1037 drivers/ide/pmac.c 		d.udma_mask = ATA_UDMA5;
d                1040 drivers/ide/pmac.c 		d.tp_ops = &pmac_ata6_tp_ops;
d                1041 drivers/ide/pmac.c 		d.port_ops = &pmac_ide_ata4_port_ops;
d                1042 drivers/ide/pmac.c 		d.udma_mask = ATA_UDMA5;
d                1046 drivers/ide/pmac.c 			d.port_ops = &pmac_ide_ata4_port_ops;
d                1047 drivers/ide/pmac.c 			d.udma_mask = ATA_UDMA4;
d                1073 drivers/ide/pmac.c 	host = ide_host_alloc(&d, hws, 1);
d                1104 drivers/ide/pmac.c 	rc = ide_host_register(host, &d, hws);
d                1673 drivers/ide/pmac.c static int pmac_ide_init_dma(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 359 drivers/ide/qd65xx.c 	struct ide_port_info d = qd65xx_port_info;
d                 369 drivers/ide/qd65xx.c 		d.host_flags |= IDE_HFLAG_QD_2ND_PORT;
d                 385 drivers/ide/qd65xx.c 		d.port_ops = &qd6500_port_ops;
d                 386 drivers/ide/qd65xx.c 		d.host_flags |= IDE_HFLAG_SINGLE;
d                 401 drivers/ide/qd65xx.c 		d.port_ops = &qd6580_port_ops;
d                 403 drivers/ide/qd65xx.c 			d.host_flags |= IDE_HFLAG_SINGLE;
d                 412 drivers/ide/qd65xx.c 	rc = ide_legacy_device_add(&d, (base << 8) | config);
d                 414 drivers/ide/qd65xx.c 	if (d.host_flags & IDE_HFLAG_SINGLE)
d                  49 drivers/ide/rz1000.c 	struct ide_port_info d = rz1000_chipset;
d                  57 drivers/ide/rz1000.c 		d.host_flags |= IDE_HFLAG_SERIALIZE;
d                  58 drivers/ide/rz1000.c 		d.host_flags |= IDE_HFLAG_NO_UNMASK_IRQS;
d                  61 drivers/ide/rz1000.c 	return ide_pci_init_one(dev, &d, NULL);
d                 403 drivers/ide/serverworks.c 	struct ide_port_info d;
d                 406 drivers/ide/serverworks.c 	d = serverworks_chipsets[idx];
d                 409 drivers/ide/serverworks.c 		d.host_flags |= IDE_HFLAG_CLEAR_SIMPLEX;
d                 413 drivers/ide/serverworks.c 				d.host_flags |= IDE_HFLAG_NON_BOOTABLE;
d                 414 drivers/ide/serverworks.c 			d.host_flags |= IDE_HFLAG_SINGLE;
d                 416 drivers/ide/serverworks.c 			d.host_flags &= ~IDE_HFLAG_SINGLE;
d                 419 drivers/ide/serverworks.c 	return ide_pci_init_one(dev, &d, NULL);
d                  80 drivers/ide/setup-pci.c unsigned long ide_pci_dma_base(ide_hwif_t *hwif, const struct ide_port_info *d)
d                  91 drivers/ide/setup-pci.c 		u8 baridx = (d->host_flags & IDE_HFLAG_CS5520) ? 2 : 4;
d                  97 drivers/ide/setup-pci.c 				d->name, pci_name(dev));
d                 109 drivers/ide/setup-pci.c int ide_pci_check_simplex(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 114 drivers/ide/setup-pci.c 	if (d->host_flags & (IDE_HFLAG_MMIO | IDE_HFLAG_CS5520))
d                 117 drivers/ide/setup-pci.c 	if (d->host_flags & IDE_HFLAG_CLEAR_SIMPLEX) {
d                 118 drivers/ide/setup-pci.c 		if (ide_pci_clear_simplex(hwif->dma_base, d->name))
d                 120 drivers/ide/setup-pci.c 				d->name, pci_name(dev));
d                 137 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 170 drivers/ide/setup-pci.c void ide_setup_pci_noise(struct pci_dev *dev, const struct ide_port_info *d)
d                 173 drivers/ide/setup-pci.c 		d->name, pci_name(dev),
d                 194 drivers/ide/setup-pci.c 			  const struct ide_port_info *d)
d                 202 drivers/ide/setup-pci.c 				d->name, pci_name(dev));
d                 206 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 217 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 221 drivers/ide/setup-pci.c 	ret = pci_request_selected_regions(dev, bars, d->name);
d                 224 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 238 drivers/ide/setup-pci.c static int ide_pci_configure(struct pci_dev *dev, const struct ide_port_info *d)
d                 248 drivers/ide/setup-pci.c 	if (ide_setup_pci_baseregs(dev, d->name) ||
d                 251 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 256 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 261 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 277 drivers/ide/setup-pci.c static int ide_pci_check_iomem(struct pci_dev *dev, const struct ide_port_info *d,
d                 308 drivers/ide/setup-pci.c static int ide_hw_configure(struct pci_dev *dev, const struct ide_port_info *d,
d                 313 drivers/ide/setup-pci.c 	if ((d->host_flags & IDE_HFLAG_ISA_PORTS) == 0) {
d                 314 drivers/ide/setup-pci.c 		if (ide_pci_check_iomem(dev, d, 2 * port) ||
d                 315 drivers/ide/setup-pci.c 		    ide_pci_check_iomem(dev, d, 2 * port + 1)) {
d                 318 drivers/ide/setup-pci.c 				d->name, pci_name(dev), port);
d                 332 drivers/ide/setup-pci.c 			d->name, pci_name(dev), port);
d                 354 drivers/ide/setup-pci.c int ide_hwif_setup_dma(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 358 drivers/ide/setup-pci.c 	if ((d->host_flags & IDE_HFLAG_NO_AUTODMA) == 0 ||
d                 361 drivers/ide/setup-pci.c 		unsigned long base = ide_pci_dma_base(hwif, d);
d                 371 drivers/ide/setup-pci.c 		if (ide_pci_check_simplex(hwif, d) < 0)
d                 374 drivers/ide/setup-pci.c 		if (ide_pci_set_master(dev, d->name) < 0)
d                 406 drivers/ide/setup-pci.c 				    const struct ide_port_info *d, int noisy)
d                 412 drivers/ide/setup-pci.c 		ide_setup_pci_noise(dev, d);
d                 414 drivers/ide/setup-pci.c 	ret = ide_pci_enable(dev, bars, d);
d                 421 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 425 drivers/ide/setup-pci.c 		ret = ide_pci_configure(dev, d);
d                 429 drivers/ide/setup-pci.c 			d->name, pci_name(dev));
d                 456 drivers/ide/setup-pci.c void ide_pci_setup_ports(struct pci_dev *dev, const struct ide_port_info *d,
d                 459 drivers/ide/setup-pci.c 	int channels = (d->host_flags & IDE_HFLAG_SINGLE) ? 1 : 2, port;
d                 467 drivers/ide/setup-pci.c 		const struct ide_pci_enablebit *e = &d->enablebits[port];
d                 472 drivers/ide/setup-pci.c 				d->name, pci_name(dev));
d                 476 drivers/ide/setup-pci.c 		if (ide_hw_configure(dev, d, port, hw + port))
d                 495 drivers/ide/setup-pci.c 				   const struct ide_port_info *d,
d                 511 drivers/ide/setup-pci.c 	ret = d->init_chipset ? d->init_chipset(dev) : 0;
d                 518 drivers/ide/setup-pci.c 				"probe irqs later\n", d->name, pci_name(dev));
d                 522 drivers/ide/setup-pci.c 			d->name, pci_name(dev), pciirq);
d                 525 drivers/ide/setup-pci.c 			d->name, pci_name(dev), pciirq);
d                 534 drivers/ide/setup-pci.c 		     const struct ide_port_info *d, void *priv)
d                 541 drivers/ide/setup-pci.c 	if (d->host_flags & IDE_HFLAG_SINGLE)
d                 546 drivers/ide/setup-pci.c 	if ((d->host_flags & IDE_HFLAG_NO_DMA) == 0) {
d                 547 drivers/ide/setup-pci.c 		if (d->host_flags & IDE_HFLAG_CS5520)
d                 554 drivers/ide/setup-pci.c 		ret = ide_setup_pci_controller(pdev[i], bars, d, !i);
d                 561 drivers/ide/setup-pci.c 		ide_pci_setup_ports(pdev[i], d, &hw[i*2], &hws[i*2]);
d                 564 drivers/ide/setup-pci.c 	host = ide_host_alloc(d, hws, n_ports);
d                 582 drivers/ide/setup-pci.c 		ret = do_ide_setup_pci_device(pdev[i], d, !i);
d                 599 drivers/ide/setup-pci.c 	ret = ide_host_register(host, d, hws);
d                 614 drivers/ide/setup-pci.c int ide_pci_init_one(struct pci_dev *dev, const struct ide_port_info *d,
d                 617 drivers/ide/setup-pci.c 	return ide_pci_init_two(dev, NULL, d, priv);
d                 742 drivers/ide/siimage.c 	struct ide_port_info d;
d                 746 drivers/ide/siimage.c 	d = siimage_chipsets[idx];
d                 757 drivers/ide/siimage.c 		d.host_flags |= IDE_HFLAG_NO_ATAPI_DMA;
d                 770 drivers/ide/siimage.c 		if (!request_mem_region(bar5, barsize, d.name)) {
d                 780 drivers/ide/siimage.c 	rc = ide_pci_init_one(dev, &d, ioaddr);
d                 578 drivers/ide/sis5513.c 	struct ide_port_info d = sis5513_chipset;
d                 590 drivers/ide/sis5513.c 		d.port_ops = &sis_ata133_port_ops;
d                 592 drivers/ide/sis5513.c 		d.port_ops = &sis_port_ops;
d                 594 drivers/ide/sis5513.c 	d.udma_mask = udma_rates[chipset_family];
d                 596 drivers/ide/sis5513.c 	return ide_pci_init_one(dev, &d, NULL);
d                 320 drivers/ide/sl82c105.c 	struct ide_port_info d = sl82c105_chipset;
d                 330 drivers/ide/sl82c105.c 		d.dma_ops = NULL;
d                 331 drivers/ide/sl82c105.c 		d.mwdma_mask = 0;
d                 332 drivers/ide/sl82c105.c 		d.host_flags &= ~IDE_HFLAG_SERIALIZE_DMA;
d                 335 drivers/ide/sl82c105.c 	return ide_pci_init_one(dev, &d, NULL);
d                 138 drivers/ide/tx4938ide.c 	struct ide_port_info d = tx4938ide_port_info;
d                 183 drivers/ide/tx4938ide.c 		d.port_ops = NULL;
d                 184 drivers/ide/tx4938ide.c 	ret = ide_host_add(&d, hws, 1, &host);
d                 409 drivers/ide/tx4939ide.c static int tx4939ide_init_dma(ide_hwif_t *hwif, const struct ide_port_info *d)
d                 422 drivers/ide/via82cxxx.c 	struct ide_port_info d;
d                 424 drivers/ide/via82cxxx.c 	d = via82cxxx_chipset;
d                 460 drivers/ide/via82cxxx.c 		d.enablebits[1].reg = d.enablebits[0].reg = 0;
d                 462 drivers/ide/via82cxxx.c 		d.host_flags |= IDE_HFLAG_NO_AUTODMA;
d                 465 drivers/ide/via82cxxx.c 		d.host_flags |= IDE_HFLAG_SINGLE;
d                 468 drivers/ide/via82cxxx.c 		d.host_flags |= IDE_HFLAG_UNMASK_IRQS;
d                 470 drivers/ide/via82cxxx.c 	d.udma_mask = via_config->udma_mask;
d                 481 drivers/ide/via82cxxx.c 	rc = ide_pci_init_one(dev, &d, vdev);
d                 414 drivers/iio/adc/qcom-pm8xxx-xoadc.c static irqreturn_t pm8xxx_eoc_irq(int irq, void *d)
d                 416 drivers/iio/adc/qcom-pm8xxx-xoadc.c 	struct iio_dev *indio_dev = d;
d                  65 drivers/iio/adc/sc27xx_adc.c #define SC27XX_VOLT_RATIO(n, d)		\
d                  66 drivers/iio/adc/sc27xx_adc.c 	(((n) << SC27XX_RATIO_NUMERATOR_OFFSET) | (d))
d                 350 drivers/iio/adc/stm32-adc-core.c static int stm32_adc_domain_map(struct irq_domain *d, unsigned int irq,
d                 353 drivers/iio/adc/stm32-adc-core.c 	irq_set_chip_data(irq, d->host_data);
d                 359 drivers/iio/adc/stm32-adc-core.c static void stm32_adc_domain_unmap(struct irq_domain *d, unsigned int irq)
d                 194 drivers/iio/adc/stm32-dfsdm-adc.c 	unsigned int i, d, fosr, iosr;
d                 223 drivers/iio/adc/stm32-dfsdm-adc.c 				d = fosr * iosr;
d                 225 drivers/iio/adc/stm32-dfsdm-adc.c 				d = fosr * (iosr + 3) + 2;
d                 227 drivers/iio/adc/stm32-dfsdm-adc.c 				d = fosr * (iosr - 1 + p) + p;
d                 229 drivers/iio/adc/stm32-dfsdm-adc.c 			if (d > oversamp)
d                 231 drivers/iio/adc/stm32-dfsdm-adc.c 			else if (d != oversamp)
d                 594 drivers/iio/adc/twl6030-gpadc.c static inline int twl6030_gpadc_get_trim_offset(s8 d)
d                 603 drivers/iio/adc/twl6030-gpadc.c 	__u32 temp = ((d & 0x7f) >> 1) | ((d & 1) << 6);
d                  22 drivers/iio/imu/inv_mpu6050/inv_mpu_acpi.c static int __init asus_t100_matched(const struct dmi_system_id *d)
d                 188 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	unsigned int d, mgmt_1;
d                 214 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_read(st->map, st->reg->pwr_mgmt_2, &d);
d                 218 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 		d &= ~mask;
d                 220 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 		d |= mask;
d                 221 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_write(st->map, st->reg->pwr_mgmt_2, d);
d                 314 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	u8 d;
d                 320 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	d = (INV_MPU6050_FSR_2000DPS << INV_MPU6050_GYRO_CONFIG_FSR_SHIFT);
d                 321 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_write(st->map, st->reg->gyro_config, d);
d                 329 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	d = INV_MPU6050_FIFO_RATE_TO_DIVIDER(INV_MPU6050_INIT_FIFO_RATE);
d                 330 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_write(st->map, st->reg->sample_rate_div, d);
d                 334 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	d = (INV_MPU6050_FS_02G << INV_MPU6050_ACCL_CONFIG_FSR_SHIFT);
d                 335 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_write(st->map, st->reg->accl_config, d);
d                 364 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	__be16 d = cpu_to_be16(val);
d                 367 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_bulk_write(st->map, reg + ind, (u8 *)&d, 2);
d                 378 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	__be16 d;
d                 381 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_bulk_read(st->map, reg + ind, (u8 *)&d, 2);
d                 384 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	*val = (short)be16_to_cpup(&d);
d                 523 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	u8 d;
d                 527 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 			d = (i << INV_MPU6050_GYRO_CONFIG_FSR_SHIFT);
d                 528 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 			result = regmap_write(st->map, st->reg->gyro_config, d);
d                 561 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	u8 d;
d                 565 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 			d = (i << INV_MPU6050_ACCL_CONFIG_FSR_SHIFT);
d                 566 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 			result = regmap_write(st->map, st->reg->accl_config, d);
d                 656 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	static const int d[] = {
d                 666 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	while ((h < hz[i]) && (i < ARRAY_SIZE(d) - 1))
d                 668 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	data = d[i];
d                 685 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	u8 d;
d                 701 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	d = INV_MPU6050_FIFO_RATE_TO_DIVIDER(fifo_rate);
d                 703 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	fifo_rate = INV_MPU6050_DIVIDER_TO_FIFO_RATE(d);
d                 706 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	if (d == st->chip_config.divider) {
d                 714 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	result = regmap_write(st->map, st->reg->sample_rate_div, d);
d                 717 drivers/iio/imu/inv_mpu6050/inv_mpu_core.c 	st->chip_config.divider = d;
d                  96 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	u8 d;
d                 120 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	d = st->chip_config.user_ctrl | INV_MPU6050_BIT_FIFO_RST;
d                 121 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	result = regmap_write(st->map, st->reg->user_ctrl, d);
d                 134 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	d = st->chip_config.user_ctrl | INV_MPU6050_BIT_FIFO_EN;
d                 135 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	result = regmap_write(st->map, st->reg->user_ctrl, d);
d                 139 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	d = 0;
d                 141 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 		d |= INV_MPU6050_BITS_GYRO_OUT;
d                 143 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 		d |= INV_MPU6050_BIT_ACCEL_OUT;
d                 144 drivers/iio/imu/inv_mpu6050/inv_mpu_ring.c 	result = regmap_write(st->map, st->reg->fifo_en, d);
d                  32 drivers/iio/industrialio-sw-device.c 	struct iio_sw_device_type *d = NULL, *iter;
d                  36 drivers/iio/industrialio-sw-device.c 			d = iter;
d                  40 drivers/iio/industrialio-sw-device.c 	return d;
d                  43 drivers/iio/industrialio-sw-device.c int iio_register_sw_device_type(struct iio_sw_device_type *d)
d                  49 drivers/iio/industrialio-sw-device.c 	iter = __iio_find_sw_device_type(d->name, strlen(d->name));
d                  53 drivers/iio/industrialio-sw-device.c 		list_add_tail(&d->list, &iio_device_types_list);
d                  59 drivers/iio/industrialio-sw-device.c 	d->group = configfs_register_default_group(iio_devices_group, d->name,
d                  61 drivers/iio/industrialio-sw-device.c 	if (IS_ERR(d->group))
d                  62 drivers/iio/industrialio-sw-device.c 		ret = PTR_ERR(d->group);
d                  98 drivers/iio/industrialio-sw-device.c 	struct iio_sw_device *d;
d                 106 drivers/iio/industrialio-sw-device.c 	d = dt->ops->probe(name);
d                 107 drivers/iio/industrialio-sw-device.c 	if (IS_ERR(d))
d                 110 drivers/iio/industrialio-sw-device.c 	d->device_type = dt;
d                 112 drivers/iio/industrialio-sw-device.c 	return d;
d                 115 drivers/iio/industrialio-sw-device.c 	return d;
d                 119 drivers/iio/industrialio-sw-device.c void iio_sw_device_destroy(struct iio_sw_device *d)
d                 121 drivers/iio/industrialio-sw-device.c 	struct iio_sw_device_type *dt = d->device_type;
d                 123 drivers/iio/industrialio-sw-device.c 	dt->ops->remove(d);
d                 131 drivers/iio/industrialio-sw-device.c 	struct iio_sw_device *d;
d                 133 drivers/iio/industrialio-sw-device.c 	d = iio_sw_device_create(group->cg_item.ci_name, name);
d                 134 drivers/iio/industrialio-sw-device.c 	if (IS_ERR(d))
d                 135 drivers/iio/industrialio-sw-device.c 		return ERR_CAST(d);
d                 137 drivers/iio/industrialio-sw-device.c 	config_item_set_name(&d->group.cg_item, "%s", name);
d                 139 drivers/iio/industrialio-sw-device.c 	return &d->group;
d                 145 drivers/iio/industrialio-sw-device.c 	struct iio_sw_device *d = to_iio_sw_device(item);
d                 147 drivers/iio/industrialio-sw-device.c 	iio_sw_device_destroy(d);
d                 501 drivers/iio/industrialio-trigger.c static void iio_trig_subirqmask(struct irq_data *d)
d                 503 drivers/iio/industrialio-trigger.c 	struct irq_chip *chip = irq_data_get_irq_chip(d);
d                 507 drivers/iio/industrialio-trigger.c 	trig->subirqs[d->irq - trig->subirq_base].enabled = false;
d                 510 drivers/iio/industrialio-trigger.c static void iio_trig_subirqunmask(struct irq_data *d)
d                 512 drivers/iio/industrialio-trigger.c 	struct irq_chip *chip = irq_data_get_irq_chip(d);
d                 516 drivers/iio/industrialio-trigger.c 	trig->subirqs[d->irq - trig->subirq_base].enabled = true;
d                  54 drivers/iio/light/cm3605.c static irqreturn_t cm3605_prox_irq(int irq, void *d)
d                  56 drivers/iio/light/cm3605.c 	struct iio_dev *indio_dev = d;
d                 370 drivers/iio/magnetometer/ak8974.c static irqreturn_t ak8974_drdy_irq(int irq, void *d)
d                 372 drivers/iio/magnetometer/ak8974.c 	struct ak8974 *ak8974 = d;
d                 381 drivers/iio/magnetometer/ak8974.c static irqreturn_t ak8974_drdy_irq_thread(int irq, void *d)
d                 383 drivers/iio/magnetometer/ak8974.c 	struct ak8974 *ak8974 = d;
d                 122 drivers/iio/magnetometer/rm3100-core.c static irqreturn_t rm3100_thread_fn(int irq, void *d)
d                 124 drivers/iio/magnetometer/rm3100-core.c 	struct iio_dev *indio_dev = d;
d                 136 drivers/iio/magnetometer/rm3100-core.c static irqreturn_t rm3100_irq_handler(int irq, void *d)
d                 138 drivers/iio/magnetometer/rm3100-core.c 	struct iio_dev *indio_dev = d;
d                 942 drivers/iio/pressure/bmp280-core.c static irqreturn_t bmp085_eoc_irq(int irq, void *d)
d                 944 drivers/iio/pressure/bmp280-core.c 	struct bmp280_data *data = d;
d                 527 drivers/infiniband/core/device.c static const void *net_namespace(struct device *d)
d                 530 drivers/infiniband/core/device.c 			container_of(d, struct ib_core_device, dev);
d                 349 drivers/infiniband/hw/cxgb3/iwch_cm.c static unsigned int find_best_mtu(const struct t3c_data *d, unsigned short mtu)
d                 353 drivers/infiniband/hw/cxgb3/iwch_cm.c 	while (i < d->nmtus - 1 && d->mtus[i + 1] <= mtu)
d                  87 drivers/infiniband/hw/cxgb4/device.c 	struct c4iw_debugfs_data *d = file->private_data;
d                  89 drivers/infiniband/hw/cxgb4/device.c 	return simple_read_from_buffer(buf, count, ppos, d->buf, d->pos);
d                  71 drivers/infiniband/hw/hfi1/debugfs.c 	struct dentry *d = file->f_path.dentry;
d                  74 drivers/infiniband/hw/hfi1/debugfs.c 	r = debugfs_file_get(d);
d                  78 drivers/infiniband/hw/hfi1/debugfs.c 	debugfs_file_put(d);
d                  84 drivers/infiniband/hw/hfi1/debugfs.c 	struct dentry *d = file->f_path.dentry;
d                  87 drivers/infiniband/hw/hfi1/debugfs.c 	r = debugfs_file_get(d);
d                  91 drivers/infiniband/hw/hfi1/debugfs.c 	debugfs_file_put(d);
d                 243 drivers/infiniband/hw/hfi1/eprom.c #define MAGIC4(a, b, c, d) ((d) << 24 | (c) << 16 | (b) << 8 | (a))
d                 617 drivers/infiniband/hw/hfi1/sdma.h static inline int sdma_mapping_type(struct sdma_desc *d)
d                 619 drivers/infiniband/hw/hfi1/sdma.h 	return (d->qw[1] & SDMA_DESC1_GENERATION_SMASK)
d                 623 drivers/infiniband/hw/hfi1/sdma.h static inline size_t sdma_mapping_len(struct sdma_desc *d)
d                 625 drivers/infiniband/hw/hfi1/sdma.h 	return (d->qw[0] & SDMA_DESC0_BYTE_COUNT_SMASK)
d                 629 drivers/infiniband/hw/hfi1/sdma.h static inline dma_addr_t sdma_mapping_addr(struct sdma_desc *d)
d                 631 drivers/infiniband/hw/hfi1/sdma.h 	return (d->qw[0] & SDMA_DESC0_PHY_ADDR_SMASK)
d                  79 drivers/infiniband/hw/mthca/mthca_catas.c 			struct mthca_dev *d = pci_get_drvdata(pdev);
d                  80 drivers/infiniband/hw/mthca/mthca_catas.c 			mthca_dbg(d, "Reset succeeded\n");
d                1427 drivers/infiniband/hw/qib/qib.h int qib_map_page(struct pci_dev *d, struct page *p, dma_addr_t *daddr);
d                1510 drivers/infiniband/ulp/ipoib/ipoib_cm.c static ssize_t show_mode(struct device *d, struct device_attribute *attr,
d                1513 drivers/infiniband/ulp/ipoib/ipoib_cm.c 	struct net_device *dev = to_net_dev(d);
d                1522 drivers/infiniband/ulp/ipoib/ipoib_cm.c static ssize_t set_mode(struct device *d, struct device_attribute *attr,
d                1525 drivers/infiniband/ulp/ipoib/ipoib_cm.c 	struct net_device *dev = to_net_dev(d);
d                  43 drivers/infiniband/ulp/ipoib/ipoib_vlan.c static ssize_t show_parent(struct device *d, struct device_attribute *attr,
d                  46 drivers/infiniband/ulp/ipoib/ipoib_vlan.c 	struct net_device *dev = to_net_dev(d);
d                 416 drivers/infiniband/ulp/srp/ib_srp.c 	struct srp_fr_desc *d;
d                 421 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0, d = &pool->desc[0]; i < pool->size; i++, d++) {
d                 422 drivers/infiniband/ulp/srp/ib_srp.c 		if (d->mr)
d                 423 drivers/infiniband/ulp/srp/ib_srp.c 			ib_dereg_mr(d->mr);
d                 440 drivers/infiniband/ulp/srp/ib_srp.c 	struct srp_fr_desc *d;
d                 461 drivers/infiniband/ulp/srp/ib_srp.c 	for (i = 0, d = &pool->desc[0]; i < pool->size; i++, d++) {
d                 470 drivers/infiniband/ulp/srp/ib_srp.c 		d->mr = mr;
d                 471 drivers/infiniband/ulp/srp/ib_srp.c 		list_add_tail(&d->entry, &pool->free_list);
d                 491 drivers/infiniband/ulp/srp/ib_srp.c 	struct srp_fr_desc *d = NULL;
d                 496 drivers/infiniband/ulp/srp/ib_srp.c 		d = list_first_entry(&pool->free_list, typeof(*d), entry);
d                 497 drivers/infiniband/ulp/srp/ib_srp.c 		list_del(&d->entry);
d                 501 drivers/infiniband/ulp/srp/ib_srp.c 	return d;
d                 255 drivers/input/keyboard/atkbd.c static ssize_t atkbd_do_show_##_name(struct device *d,				\
d                 258 drivers/input/keyboard/atkbd.c 	return atkbd_attr_show_helper(d, b, atkbd_show_##_name);		\
d                 260 drivers/input/keyboard/atkbd.c static ssize_t atkbd_do_set_##_name(struct device *d,				\
d                 263 drivers/input/keyboard/atkbd.c 	return atkbd_attr_set_helper(d, b, s, atkbd_set_##_name);		\
d                 277 drivers/input/keyboard/atkbd.c static ssize_t atkbd_do_show_##_name(struct device *d,				\
d                 280 drivers/input/keyboard/atkbd.c 	return atkbd_attr_show_helper(d, b, atkbd_show_##_name);		\
d                 151 drivers/input/keyboard/lm8323.c #define dev_to_lm8323(d)	container_of(d, struct lm8323_chip, client->dev)
d                  61 drivers/input/misc/yealink.c #define _SEG(t, a, am, b, bm, c, cm, d, dm, e, em, f, fm, g, gm)	\
d                  64 drivers/input/misc/yealink.c 		        _LOC(d, dm), _LOC(e, em), _LOC(g, gm),		\
d                  31 drivers/input/mouse/lifebook.c static int lifebook_limit_serio3(const struct dmi_system_id *d)
d                  39 drivers/input/mouse/lifebook.c static int lifebook_set_6byte_proto(const struct dmi_system_id *d)
d                  48 drivers/input/rmi4/rmi_bus.h #define to_rmi_function(d)	container_of(d, struct rmi_function, dev)
d                  89 drivers/input/rmi4/rmi_bus.h #define to_rmi_function_handler(d) \
d                  90 drivers/input/rmi4/rmi_bus.h 		container_of(d, struct rmi_function_handler, driver)
d                  99 drivers/input/rmi4/rmi_bus.h #define to_rmi_driver(d) \
d                 100 drivers/input/rmi4/rmi_bus.h 	container_of(d, struct rmi_driver, driver)
d                 102 drivers/input/rmi4/rmi_bus.h #define to_rmi_device(d) container_of(d, struct rmi_device, dev)
d                 105 drivers/input/rmi4/rmi_bus.h rmi_get_platform_data(struct rmi_device *d)
d                 107 drivers/input/rmi4/rmi_bus.h 	return &d->xport->pdata;
d                 119 drivers/input/rmi4/rmi_bus.h static inline int rmi_reset(struct rmi_device *d)
d                 121 drivers/input/rmi4/rmi_bus.h 	return d->driver->reset_handler(d);
d                 134 drivers/input/rmi4/rmi_bus.h static inline int rmi_read(struct rmi_device *d, u16 addr, u8 *buf)
d                 136 drivers/input/rmi4/rmi_bus.h 	return d->xport->ops->read_block(d->xport, addr, buf, 1);
d                 150 drivers/input/rmi4/rmi_bus.h static inline int rmi_read_block(struct rmi_device *d, u16 addr,
d                 153 drivers/input/rmi4/rmi_bus.h 	return d->xport->ops->read_block(d->xport, addr, buf, len);
d                 165 drivers/input/rmi4/rmi_bus.h static inline int rmi_write(struct rmi_device *d, u16 addr, u8 data)
d                 167 drivers/input/rmi4/rmi_bus.h 	return d->xport->ops->write_block(d->xport, addr, &data, 1);
d                 180 drivers/input/rmi4/rmi_bus.h static inline int rmi_write_block(struct rmi_device *d, u16 addr,
d                 183 drivers/input/rmi4/rmi_bus.h 	return d->xport->ops->write_block(d->xport, addr, buf, len);
d                 560 drivers/input/rmi4/rmi_driver.c int rmi_read_register_desc(struct rmi_device *d, u16 addr,
d                 578 drivers/input/rmi4/rmi_driver.c 	ret = rmi_read(d, addr, &size_presence_reg);
d                 593 drivers/input/rmi4/rmi_driver.c 	ret = rmi_read_block(d, addr, buf, size_presence_reg);
d                 616 drivers/input/rmi4/rmi_driver.c 	rdesc->registers = devm_kcalloc(&d->dev,
d                 638 drivers/input/rmi4/rmi_driver.c 	ret = rmi_read_block(d, addr, struct_buf, rdesc->struct_size);
d                 679 drivers/input/rmi4/rmi_driver.c 		rmi_dbg(RMI_DEBUG_CORE, &d->dev,
d                  72 drivers/input/rmi4/rmi_driver.h int rmi_read_register_desc(struct rmi_device *d, u16 addr,
d                 817 drivers/input/serio/hp_sdc.c static int __init hp_sdc_init_hppa(struct parisc_device *d);
d                 934 drivers/input/serio/hp_sdc.c static int __init hp_sdc_init_hppa(struct parisc_device *d)
d                 938 drivers/input/serio/hp_sdc.c 	if (!d)
d                 943 drivers/input/serio/hp_sdc.c 	hp_sdc.dev		= d;
d                 944 drivers/input/serio/hp_sdc.c 	hp_sdc.irq		= d->irq;
d                 945 drivers/input/serio/hp_sdc.c 	hp_sdc.nmi		= d->aux_irq;
d                 946 drivers/input/serio/hp_sdc.c 	hp_sdc.base_io		= d->hpa.start;
d                 947 drivers/input/serio/hp_sdc.c 	hp_sdc.data_io		= d->hpa.start + 0x800;
d                 948 drivers/input/serio/hp_sdc.c 	hp_sdc.status_io	= d->hpa.start + 0x801;
d                 359 drivers/input/serio/libps2.c 		u8 d = (command >> i) & 3;
d                 360 drivers/input/serio/libps2.c 		retval = __ps2_command(ps2dev, &d, PS2_CMD_SETRES);
d                1297 drivers/input/tablet/aiptek.c 	int err, d;
d                1299 drivers/input/tablet/aiptek.c 	err = kstrtoint(buf, 10, &d);
d                1303 drivers/input/tablet/aiptek.c 	aiptek->newSetting.programmableDelay = d;
d                 125 drivers/input/touchscreen/wm9705.c static inline void poll_delay(int d)
d                 127 drivers/input/touchscreen/wm9705.c 	udelay(3 * AC97_LINK_FRAME + delay_table[d]);
d                 143 drivers/input/touchscreen/wm9712.c static inline void poll_delay(int d)
d                 145 drivers/input/touchscreen/wm9712.c 	udelay(3 * AC97_LINK_FRAME + delay_table[d]);
d                 143 drivers/input/touchscreen/wm9713.c static inline void poll_delay(int d)
d                 145 drivers/input/touchscreen/wm9713.c 	udelay(3 * AC97_LINK_FRAME + delay_table[d]);
d                  47 drivers/iommu/amd_iommu_quirks.c static int __init ivrs_ioapic_quirk_cb(const struct dmi_system_id *d)
d                  51 drivers/iommu/amd_iommu_quirks.c 	for (i = d->driver_data; i->id != 0 && i->devid != 0; i++)
d                 383 drivers/iommu/intel-iommu.c #define device_needs_bounce(d) (!intel_no_bounce && dev_is_pci(d) &&	\
d                 384 drivers/iommu/intel-iommu.c 				to_pci_dev(d)->untrusted)
d                  24 drivers/iommu/intel-svm.c static irqreturn_t prq_event_thread(int irq, void *d);
d                 531 drivers/iommu/intel-svm.c static irqreturn_t prq_event_thread(int irq, void *d)
d                 533 drivers/iommu/intel-svm.c 	struct intel_iommu *iommu = d;
d                  38 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_START_LVL(d)		(ARM_LPAE_MAX_LEVELS - (d)->levels)
d                  44 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_LVL_SHIFT(l,d)						\
d                  45 drivers/iommu/io-pgtable-arm.c 	((((d)->levels - ((l) - ARM_LPAE_START_LVL(d) + 1))		\
d                  46 drivers/iommu/io-pgtable-arm.c 	  * (d)->bits_per_level) + (d)->pg_shift)
d                  48 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_GRANULE(d)		(1UL << (d)->pg_shift)
d                  50 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_PAGES_PER_PGD(d)					\
d                  51 drivers/iommu/io-pgtable-arm.c 	DIV_ROUND_UP((d)->pgd_size, ARM_LPAE_GRANULE(d))
d                  57 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_PGD_IDX(l,d)						\
d                  58 drivers/iommu/io-pgtable-arm.c 	((l) == ARM_LPAE_START_LVL(d) ? ilog2(ARM_LPAE_PAGES_PER_PGD(d)) : 0)
d                  60 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_LVL_IDX(a,l,d)						\
d                  61 drivers/iommu/io-pgtable-arm.c 	(((u64)(a) >> ARM_LPAE_LVL_SHIFT(l,d)) &			\
d                  62 drivers/iommu/io-pgtable-arm.c 	 ((1 << ((d)->bits_per_level + ARM_LPAE_PGD_IDX(l,d))) - 1))
d                  65 drivers/iommu/io-pgtable-arm.c #define ARM_LPAE_BLOCK_SIZE(l,d)					\
d                  67 drivers/iommu/io-pgtable-arm.c 		((ARM_LPAE_MAX_LEVELS - (l)) * (d)->bits_per_level)))
d                 173 drivers/iommu/io-pgtable-arm.c #define iopte_deref(pte,d) __va(iopte_to_paddr(pte, d))
d                 241 drivers/iommu/omap-iommu-debug.c 	struct dentry *d;
d                 246 drivers/iommu/omap-iommu-debug.c 	d = debugfs_create_dir(obj->name, iommu_debug_root);
d                 247 drivers/iommu/omap-iommu-debug.c 	obj->debug_dir = d;
d                 249 drivers/iommu/omap-iommu-debug.c 	debugfs_create_u32("nr_tlb_entries", 0400, d, &obj->nr_tlb_entries);
d                 250 drivers/iommu/omap-iommu-debug.c 	debugfs_create_file("regs", 0400, d, obj, &regs_fops);
d                 251 drivers/iommu/omap-iommu-debug.c 	debugfs_create_file("tlb", 0400, d, obj, &tlb_fops);
d                 252 drivers/iommu/omap-iommu-debug.c 	debugfs_create_file("pagetable", 0400, d, obj, &pagetable_fops);
d                  66 drivers/iommu/omap-iopgtable.h static inline phys_addr_t omap_iommu_translate(u32 d, u32 va, u32 mask)
d                  68 drivers/iommu/omap-iopgtable.h 	return (d & mask) | (va & (~mask));
d                  95 drivers/irqchip/exynos-combiner.c static int combiner_set_affinity(struct irq_data *d,
d                  98 drivers/irqchip/exynos-combiner.c 	struct combiner_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 138 drivers/irqchip/exynos-combiner.c static int combiner_irq_domain_xlate(struct irq_domain *d,
d                 144 drivers/irqchip/exynos-combiner.c 	if (irq_domain_get_of_node(d) != controller)
d                 156 drivers/irqchip/exynos-combiner.c static int combiner_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 159 drivers/irqchip/exynos-combiner.c 	struct combiner_chip_data *combiner_data = d->host_data;
d                  39 drivers/irqchip/irq-alpine-msi.c static void alpine_msix_mask_msi_irq(struct irq_data *d)
d                  41 drivers/irqchip/irq-alpine-msi.c 	pci_msi_mask_irq(d);
d                  42 drivers/irqchip/irq-alpine-msi.c 	irq_chip_mask_parent(d);
d                  45 drivers/irqchip/irq-alpine-msi.c static void alpine_msix_unmask_msi_irq(struct irq_data *d)
d                  47 drivers/irqchip/irq-alpine-msi.c 	pci_msi_unmask_irq(d);
d                  48 drivers/irqchip/irq-alpine-msi.c 	irq_chip_unmask_parent(d);
d                 123 drivers/irqchip/irq-alpine-msi.c 	struct irq_data *d;
d                 139 drivers/irqchip/irq-alpine-msi.c 	d = irq_domain_get_irq_data(domain->parent, virq);
d                 140 drivers/irqchip/irq-alpine-msi.c 	d->chip->irq_set_type(d, IRQ_TYPE_EDGE_RISING);
d                 178 drivers/irqchip/irq-alpine-msi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 179 drivers/irqchip/irq-alpine-msi.c 	struct alpine_msix_data *priv = irq_data_get_irq_chip_data(d);
d                 182 drivers/irqchip/irq-alpine-msi.c 	alpine_msix_free_sgi(priv, d->hwirq, nr_irqs);
d                 172 drivers/irqchip/irq-armada-370-xp.c static void armada_370_xp_irq_mask(struct irq_data *d)
d                 174 drivers/irqchip/irq-armada-370-xp.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 184 drivers/irqchip/irq-armada-370-xp.c static void armada_370_xp_irq_unmask(struct irq_data *d)
d                 186 drivers/irqchip/irq-armada-370-xp.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 259 drivers/irqchip/irq-armada-370-xp.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 262 drivers/irqchip/irq-armada-370-xp.c 	bitmap_clear(msi_used, d->hwirq, nr_irqs);
d                 316 drivers/irqchip/irq-armada-370-xp.c static int armada_xp_set_affinity(struct irq_data *d,
d                 319 drivers/irqchip/irq-armada-370-xp.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 333 drivers/irqchip/irq-armada-370-xp.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 107 drivers/irqchip/irq-aspeed-vic.c static void avic_ack_irq(struct irq_data *d)
d                 109 drivers/irqchip/irq-aspeed-vic.c 	struct aspeed_vic *vic = irq_data_get_irq_chip_data(d);
d                 110 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sidx = d->hwirq >> 5;
d                 111 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sbit = 1u << (d->hwirq & 0x1f);
d                 118 drivers/irqchip/irq-aspeed-vic.c static void avic_mask_irq(struct irq_data *d)
d                 120 drivers/irqchip/irq-aspeed-vic.c 	struct aspeed_vic *vic = irq_data_get_irq_chip_data(d);
d                 121 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sidx = d->hwirq >> 5;
d                 122 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sbit = 1u << (d->hwirq & 0x1f);
d                 127 drivers/irqchip/irq-aspeed-vic.c static void avic_unmask_irq(struct irq_data *d)
d                 129 drivers/irqchip/irq-aspeed-vic.c 	struct aspeed_vic *vic = irq_data_get_irq_chip_data(d);
d                 130 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sidx = d->hwirq >> 5;
d                 131 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sbit = 1u << (d->hwirq & 0x1f);
d                 137 drivers/irqchip/irq-aspeed-vic.c static void avic_mask_ack_irq(struct irq_data *d)
d                 139 drivers/irqchip/irq-aspeed-vic.c 	struct aspeed_vic *vic = irq_data_get_irq_chip_data(d);
d                 140 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sidx = d->hwirq >> 5;
d                 141 drivers/irqchip/irq-aspeed-vic.c 	unsigned int sbit = 1u << (d->hwirq & 0x1f);
d                 159 drivers/irqchip/irq-aspeed-vic.c static int avic_map(struct irq_domain *d, unsigned int irq,
d                 162 drivers/irqchip/irq-aspeed-vic.c 	struct aspeed_vic *vic = d->host_data;
d                  60 drivers/irqchip/irq-ath79-misc.c static void ar71xx_misc_irq_unmask(struct irq_data *d)
d                  62 drivers/irqchip/irq-ath79-misc.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                  63 drivers/irqchip/irq-ath79-misc.c 	unsigned int irq = d->hwirq;
d                  73 drivers/irqchip/irq-ath79-misc.c static void ar71xx_misc_irq_mask(struct irq_data *d)
d                  75 drivers/irqchip/irq-ath79-misc.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                  76 drivers/irqchip/irq-ath79-misc.c 	unsigned int irq = d->hwirq;
d                  86 drivers/irqchip/irq-ath79-misc.c static void ar724x_misc_irq_ack(struct irq_data *d)
d                  88 drivers/irqchip/irq-ath79-misc.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                  89 drivers/irqchip/irq-ath79-misc.c 	unsigned int irq = d->hwirq;
d                 105 drivers/irqchip/irq-ath79-misc.c static int misc_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw)
d                 108 drivers/irqchip/irq-ath79-misc.c 	irq_set_chip_data(irq, d->host_data);
d                  41 drivers/irqchip/irq-atmel-aic-common.c static void aic_common_shutdown(struct irq_data *d)
d                  43 drivers/irqchip/irq-atmel-aic-common.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  45 drivers/irqchip/irq-atmel-aic-common.c 	ct->chip.irq_mask(d);
d                  48 drivers/irqchip/irq-atmel-aic-common.c int aic_common_set_type(struct irq_data *d, unsigned type, unsigned *val)
d                  50 drivers/irqchip/irq-atmel-aic-common.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  62 drivers/irqchip/irq-atmel-aic-common.c 		if (!(d->mask & aic->ext_irqs))
d                  68 drivers/irqchip/irq-atmel-aic-common.c 		if (!(d->mask & aic->ext_irqs))
d                  89 drivers/irqchip/irq-atmel-aic-common.c int aic_common_irq_domain_xlate(struct irq_domain *d,
d                  20 drivers/irqchip/irq-atmel-aic-common.h int aic_common_set_type(struct irq_data *d, unsigned type, unsigned *val);
d                  24 drivers/irqchip/irq-atmel-aic-common.h int aic_common_irq_domain_xlate(struct irq_domain *d,
d                  77 drivers/irqchip/irq-atmel-aic.c static int aic_retrigger(struct irq_data *d)
d                  79 drivers/irqchip/irq-atmel-aic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  83 drivers/irqchip/irq-atmel-aic.c 	irq_reg_writel(gc, d->mask, AT91_AIC_ISCR);
d                  89 drivers/irqchip/irq-atmel-aic.c static int aic_set_type(struct irq_data *d, unsigned type)
d                  91 drivers/irqchip/irq-atmel-aic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  95 drivers/irqchip/irq-atmel-aic.c 	smr = irq_reg_readl(gc, AT91_AIC_SMR(d->hwirq));
d                  96 drivers/irqchip/irq-atmel-aic.c 	ret = aic_common_set_type(d, type, &smr);
d                 100 drivers/irqchip/irq-atmel-aic.c 	irq_reg_writel(gc, smr, AT91_AIC_SMR(d->hwirq));
d                 106 drivers/irqchip/irq-atmel-aic.c static void aic_suspend(struct irq_data *d)
d                 108 drivers/irqchip/irq-atmel-aic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 116 drivers/irqchip/irq-atmel-aic.c static void aic_resume(struct irq_data *d)
d                 118 drivers/irqchip/irq-atmel-aic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 126 drivers/irqchip/irq-atmel-aic.c static void aic_pm_shutdown(struct irq_data *d)
d                 128 drivers/irqchip/irq-atmel-aic.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 171 drivers/irqchip/irq-atmel-aic.c static int aic_irq_domain_xlate(struct irq_domain *d,
d                 177 drivers/irqchip/irq-atmel-aic.c 	struct irq_domain_chip_generic *dgc = d->gc;
d                 187 drivers/irqchip/irq-atmel-aic.c 	ret = aic_common_irq_domain_xlate(d, ctrlr, intspec, intsize,
d                  86 drivers/irqchip/irq-atmel-aic5.c static void aic5_mask(struct irq_data *d)
d                  88 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                  90 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  97 drivers/irqchip/irq-atmel-aic5.c 	irq_reg_writel(gc, d->hwirq, AT91_AIC5_SSR);
d                  99 drivers/irqchip/irq-atmel-aic5.c 	gc->mask_cache &= ~d->mask;
d                 103 drivers/irqchip/irq-atmel-aic5.c static void aic5_unmask(struct irq_data *d)
d                 105 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 107 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 114 drivers/irqchip/irq-atmel-aic5.c 	irq_reg_writel(gc, d->hwirq, AT91_AIC5_SSR);
d                 116 drivers/irqchip/irq-atmel-aic5.c 	gc->mask_cache |= d->mask;
d                 120 drivers/irqchip/irq-atmel-aic5.c static int aic5_retrigger(struct irq_data *d)
d                 122 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 127 drivers/irqchip/irq-atmel-aic5.c 	irq_reg_writel(bgc, d->hwirq, AT91_AIC5_SSR);
d                 134 drivers/irqchip/irq-atmel-aic5.c static int aic5_set_type(struct irq_data *d, unsigned type)
d                 136 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 142 drivers/irqchip/irq-atmel-aic5.c 	irq_reg_writel(bgc, d->hwirq, AT91_AIC5_SSR);
d                 144 drivers/irqchip/irq-atmel-aic5.c 	ret = aic_common_set_type(d, type, &smr);
d                 155 drivers/irqchip/irq-atmel-aic5.c static void aic5_suspend(struct irq_data *d)
d                 157 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 160 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 185 drivers/irqchip/irq-atmel-aic5.c static void aic5_resume(struct irq_data *d)
d                 187 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 190 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 221 drivers/irqchip/irq-atmel-aic5.c static void aic5_pm_shutdown(struct irq_data *d)
d                 223 drivers/irqchip/irq-atmel-aic5.c 	struct irq_domain *domain = d->domain;
d                 226 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 274 drivers/irqchip/irq-atmel-aic5.c static int aic5_irq_domain_xlate(struct irq_domain *d,
d                 280 drivers/irqchip/irq-atmel-aic5.c 	struct irq_chip_generic *bgc = irq_get_domain_generic_chip(d, 0);
d                 288 drivers/irqchip/irq-atmel-aic5.c 	ret = aic_common_irq_domain_xlate(d, ctrlr, intspec, intsize,
d                  91 drivers/irqchip/irq-bcm2835.c static void armctrl_mask_irq(struct irq_data *d)
d                  93 drivers/irqchip/irq-bcm2835.c 	writel_relaxed(HWIRQ_BIT(d->hwirq), intc.disable[HWIRQ_BANK(d->hwirq)]);
d                  96 drivers/irqchip/irq-bcm2835.c static void armctrl_unmask_irq(struct irq_data *d)
d                  98 drivers/irqchip/irq-bcm2835.c 	writel_relaxed(HWIRQ_BIT(d->hwirq), intc.enable[HWIRQ_BANK(d->hwirq)]);
d                 107 drivers/irqchip/irq-bcm2835.c static int armctrl_xlate(struct irq_domain *d, struct device_node *ctrlr,
d                  42 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_mask_timer_irq(struct irq_data *d)
d                  45 drivers/irqchip/irq-bcm2836.c 					     d->hwirq - LOCAL_IRQ_CNTPSIRQ,
d                  49 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_unmask_timer_irq(struct irq_data *d)
d                  52 drivers/irqchip/irq-bcm2836.c 					       d->hwirq - LOCAL_IRQ_CNTPSIRQ,
d                  62 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_mask_pmu_irq(struct irq_data *d)
d                  67 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_unmask_pmu_irq(struct irq_data *d)
d                  78 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_mask_gpu_irq(struct irq_data *d)
d                  82 drivers/irqchip/irq-bcm2836.c static void bcm2836_arm_irqchip_unmask_gpu_irq(struct irq_data *d)
d                  92 drivers/irqchip/irq-bcm2836.c static int bcm2836_map(struct irq_domain *d, unsigned int irq,
d                 116 drivers/irqchip/irq-bcm2836.c 	irq_domain_set_info(d, irq, hw, chip, d->host_data,
d                 111 drivers/irqchip/irq-bcm6345-l1.c 					struct irq_data *d)
d                 113 drivers/irqchip/irq-bcm6345-l1.c 	return cpumask_first_and(&intc->cpumask, irq_data_get_affinity_mask(d));
d                 152 drivers/irqchip/irq-bcm6345-l1.c static inline void __bcm6345_l1_unmask(struct irq_data *d)
d                 154 drivers/irqchip/irq-bcm6345-l1.c 	struct bcm6345_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 155 drivers/irqchip/irq-bcm6345-l1.c 	u32 word = d->hwirq / IRQS_PER_WORD;
d                 156 drivers/irqchip/irq-bcm6345-l1.c 	u32 mask = BIT(d->hwirq % IRQS_PER_WORD);
d                 157 drivers/irqchip/irq-bcm6345-l1.c 	unsigned int cpu_idx = cpu_for_irq(intc, d);
d                 164 drivers/irqchip/irq-bcm6345-l1.c static inline void __bcm6345_l1_mask(struct irq_data *d)
d                 166 drivers/irqchip/irq-bcm6345-l1.c 	struct bcm6345_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 167 drivers/irqchip/irq-bcm6345-l1.c 	u32 word = d->hwirq / IRQS_PER_WORD;
d                 168 drivers/irqchip/irq-bcm6345-l1.c 	u32 mask = BIT(d->hwirq % IRQS_PER_WORD);
d                 169 drivers/irqchip/irq-bcm6345-l1.c 	unsigned int cpu_idx = cpu_for_irq(intc, d);
d                 176 drivers/irqchip/irq-bcm6345-l1.c static void bcm6345_l1_unmask(struct irq_data *d)
d                 178 drivers/irqchip/irq-bcm6345-l1.c 	struct bcm6345_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 182 drivers/irqchip/irq-bcm6345-l1.c 	__bcm6345_l1_unmask(d);
d                 186 drivers/irqchip/irq-bcm6345-l1.c static void bcm6345_l1_mask(struct irq_data *d)
d                 188 drivers/irqchip/irq-bcm6345-l1.c 	struct bcm6345_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 192 drivers/irqchip/irq-bcm6345-l1.c 	__bcm6345_l1_mask(d);
d                 196 drivers/irqchip/irq-bcm6345-l1.c static int bcm6345_l1_set_affinity(struct irq_data *d,
d                 200 drivers/irqchip/irq-bcm6345-l1.c 	struct bcm6345_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 201 drivers/irqchip/irq-bcm6345-l1.c 	u32 word = d->hwirq / IRQS_PER_WORD;
d                 202 drivers/irqchip/irq-bcm6345-l1.c 	u32 mask = BIT(d->hwirq % IRQS_PER_WORD);
d                 203 drivers/irqchip/irq-bcm6345-l1.c 	unsigned int old_cpu = cpu_for_irq(intc, d);
d                 222 drivers/irqchip/irq-bcm6345-l1.c 			__bcm6345_l1_mask(d);
d                 223 drivers/irqchip/irq-bcm6345-l1.c 		cpumask_copy(irq_data_get_affinity_mask(d), dest);
d                 225 drivers/irqchip/irq-bcm6345-l1.c 			__bcm6345_l1_unmask(d);
d                 227 drivers/irqchip/irq-bcm6345-l1.c 		cpumask_copy(irq_data_get_affinity_mask(d), dest);
d                 231 drivers/irqchip/irq-bcm6345-l1.c 	irq_data_update_effective_affinity(d, cpumask_of(new_cpu));
d                 287 drivers/irqchip/irq-bcm6345-l1.c static int bcm6345_l1_map(struct irq_domain *d, unsigned int virq,
d                 292 drivers/irqchip/irq-bcm6345-l1.c 	irq_set_chip_data(virq, d->host_data);
d                 148 drivers/irqchip/irq-bcm7038-l1.c static void __bcm7038_l1_unmask(struct irq_data *d, unsigned int cpu_idx)
d                 150 drivers/irqchip/irq-bcm7038-l1.c 	struct bcm7038_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 151 drivers/irqchip/irq-bcm7038-l1.c 	u32 word = d->hwirq / IRQS_PER_WORD;
d                 152 drivers/irqchip/irq-bcm7038-l1.c 	u32 mask = BIT(d->hwirq % IRQS_PER_WORD);
d                 159 drivers/irqchip/irq-bcm7038-l1.c static void __bcm7038_l1_mask(struct irq_data *d, unsigned int cpu_idx)
d                 161 drivers/irqchip/irq-bcm7038-l1.c 	struct bcm7038_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 162 drivers/irqchip/irq-bcm7038-l1.c 	u32 word = d->hwirq / IRQS_PER_WORD;
d                 163 drivers/irqchip/irq-bcm7038-l1.c 	u32 mask = BIT(d->hwirq % IRQS_PER_WORD);
d                 170 drivers/irqchip/irq-bcm7038-l1.c static void bcm7038_l1_unmask(struct irq_data *d)
d                 172 drivers/irqchip/irq-bcm7038-l1.c 	struct bcm7038_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 176 drivers/irqchip/irq-bcm7038-l1.c 	__bcm7038_l1_unmask(d, intc->affinity[d->hwirq]);
d                 180 drivers/irqchip/irq-bcm7038-l1.c static void bcm7038_l1_mask(struct irq_data *d)
d                 182 drivers/irqchip/irq-bcm7038-l1.c 	struct bcm7038_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 186 drivers/irqchip/irq-bcm7038-l1.c 	__bcm7038_l1_mask(d, intc->affinity[d->hwirq]);
d                 190 drivers/irqchip/irq-bcm7038-l1.c static int bcm7038_l1_set_affinity(struct irq_data *d,
d                 194 drivers/irqchip/irq-bcm7038-l1.c 	struct bcm7038_l1_chip *intc = irq_data_get_irq_chip_data(d);
d                 196 drivers/irqchip/irq-bcm7038-l1.c 	irq_hw_number_t hw = d->hwirq;
d                 206 drivers/irqchip/irq-bcm7038-l1.c 	__bcm7038_l1_mask(d, intc->affinity[hw]);
d                 209 drivers/irqchip/irq-bcm7038-l1.c 		__bcm7038_l1_unmask(d, first_cpu);
d                 212 drivers/irqchip/irq-bcm7038-l1.c 	irq_data_update_effective_affinity(d, cpumask_of(first_cpu));
d                 218 drivers/irqchip/irq-bcm7038-l1.c static void bcm7038_l1_cpu_offline(struct irq_data *d)
d                 220 drivers/irqchip/irq-bcm7038-l1.c 	struct cpumask *mask = irq_data_get_affinity_mask(d);
d                 240 drivers/irqchip/irq-bcm7038-l1.c 	irq_set_affinity_locked(d, &new_affinity, false);
d                 304 drivers/irqchip/irq-bcm7038-l1.c static int bcm7038_l1_map(struct irq_domain *d, unsigned int virq,
d                 308 drivers/irqchip/irq-bcm7038-l1.c 	irq_set_chip_data(virq, d->host_data);
d                  78 drivers/irqchip/irq-brcmstb-l2.c static void brcmstb_l2_mask_and_ack(struct irq_data *d)
d                  80 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  81 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  82 drivers/irqchip/irq-brcmstb-l2.c 	u32 mask = d->mask;
d                 119 drivers/irqchip/irq-brcmstb-l2.c static void brcmstb_l2_intc_suspend(struct irq_data *d)
d                 121 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 122 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 138 drivers/irqchip/irq-brcmstb-l2.c static void brcmstb_l2_intc_resume(struct irq_data *d)
d                 140 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 141 drivers/irqchip/irq-brcmstb-l2.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  91 drivers/irqchip/irq-clps711x.c static void clps711x_intc_eoi(struct irq_data *d)
d                  93 drivers/irqchip/irq-clps711x.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                  98 drivers/irqchip/irq-clps711x.c static void clps711x_intc_mask(struct irq_data *d)
d                 100 drivers/irqchip/irq-clps711x.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 109 drivers/irqchip/irq-clps711x.c static void clps711x_intc_unmask(struct irq_data *d)
d                 111 drivers/irqchip/irq-clps711x.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 111 drivers/irqchip/irq-crossbar.c static int crossbar_domain_alloc(struct irq_domain *d, unsigned int virq,
d                 128 drivers/irqchip/irq-crossbar.c 		int err = allocate_gic_irq(d, virq + i, hwirq + i);
d                 133 drivers/irqchip/irq-crossbar.c 		irq_domain_set_hwirq_and_chip(d, virq + i, hwirq + i,
d                 159 drivers/irqchip/irq-crossbar.c 		struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
d                 161 drivers/irqchip/irq-crossbar.c 		irq_domain_reset_irq_data(d);
d                 162 drivers/irqchip/irq-crossbar.c 		cb->irq_map[d->hwirq] = IRQ_FREE;
d                 163 drivers/irqchip/irq-crossbar.c 		cb->write(d->hwirq, cb->safe_map);
d                 168 drivers/irqchip/irq-crossbar.c static int crossbar_domain_translate(struct irq_domain *d,
d                  46 drivers/irqchip/irq-csky-apb-intc.c static void irq_ck_mask_set_bit(struct irq_data *d)
d                  48 drivers/irqchip/irq-csky-apb-intc.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  49 drivers/irqchip/irq-csky-apb-intc.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  51 drivers/irqchip/irq-csky-apb-intc.c 	u32 mask = d->mask;
d                  81 drivers/irqchip/irq-csky-mpintc.c static void csky_mpintc_enable(struct irq_data *d)
d                  85 drivers/irqchip/irq-csky-mpintc.c 	setup_trigger(d->hwirq, __trigger[d->hwirq]);
d                  87 drivers/irqchip/irq-csky-mpintc.c 	writel_relaxed(d->hwirq, reg_base + INTCL_SENR);
d                  90 drivers/irqchip/irq-csky-mpintc.c static void csky_mpintc_disable(struct irq_data *d)
d                  94 drivers/irqchip/irq-csky-mpintc.c 	writel_relaxed(d->hwirq, reg_base + INTCL_CENR);
d                  97 drivers/irqchip/irq-csky-mpintc.c static void csky_mpintc_eoi(struct irq_data *d)
d                 101 drivers/irqchip/irq-csky-mpintc.c 	writel_relaxed(d->hwirq, reg_base + INTCL_CACR);
d                 104 drivers/irqchip/irq-csky-mpintc.c static int csky_mpintc_set_type(struct irq_data *d, unsigned int type)
d                 108 drivers/irqchip/irq-csky-mpintc.c 		__trigger[d->hwirq] = 0;
d                 111 drivers/irqchip/irq-csky-mpintc.c 		__trigger[d->hwirq] = 1;
d                 114 drivers/irqchip/irq-csky-mpintc.c 		__trigger[d->hwirq] = 2;
d                 117 drivers/irqchip/irq-csky-mpintc.c 		__trigger[d->hwirq] = 3;
d                 127 drivers/irqchip/irq-csky-mpintc.c static int csky_irq_set_affinity(struct irq_data *d,
d                 132 drivers/irqchip/irq-csky-mpintc.c 	unsigned int offset = 4 * (d->hwirq - COMM_IRQ_BASE);
d                 158 drivers/irqchip/irq-csky-mpintc.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 175 drivers/irqchip/irq-csky-mpintc.c static int csky_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                 190 drivers/irqchip/irq-csky-mpintc.c static int csky_irq_domain_xlate_cells(struct irq_domain *d,
d                  55 drivers/irqchip/irq-davinci-cp-intc.c static void davinci_cp_intc_ack_irq(struct irq_data *d)
d                  57 drivers/irqchip/irq-davinci-cp-intc.c 	davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_STAT_IDX_CLR);
d                  60 drivers/irqchip/irq-davinci-cp-intc.c static void davinci_cp_intc_mask_irq(struct irq_data *d)
d                  64 drivers/irqchip/irq-davinci-cp-intc.c 	davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_CLR);
d                  68 drivers/irqchip/irq-davinci-cp-intc.c static void davinci_cp_intc_unmask_irq(struct irq_data *d)
d                  70 drivers/irqchip/irq-davinci-cp-intc.c 	davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_SET);
d                  73 drivers/irqchip/irq-davinci-cp-intc.c static int davinci_cp_intc_set_irq_type(struct irq_data *d,
d                  78 drivers/irqchip/irq-davinci-cp-intc.c 	reg = BIT_WORD(d->hwirq);
d                  79 drivers/irqchip/irq-davinci-cp-intc.c 	mask = BIT_MASK(d->hwirq);
d                  31 drivers/irqchip/irq-dw-apb-ictl.c 	struct irq_domain *d = irq_desc_get_handler_data(desc);
d                  37 drivers/irqchip/irq-dw-apb-ictl.c 	for (n = 0; n < d->revmap_size; n += 32) {
d                  38 drivers/irqchip/irq-dw-apb-ictl.c 		struct irq_chip_generic *gc = irq_get_domain_generic_chip(d, n);
d                  43 drivers/irqchip/irq-dw-apb-ictl.c 			u32 virq = irq_find_mapping(d, gc->irq_base + hwirq);
d                  54 drivers/irqchip/irq-dw-apb-ictl.c static void dw_apb_ictl_resume(struct irq_data *d)
d                  56 drivers/irqchip/irq-dw-apb-ictl.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  57 drivers/irqchip/irq-dw-apb-ictl.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 109 drivers/irqchip/irq-eznps.c static int nps400_irq_map(struct irq_domain *d, unsigned int virq,
d                  54 drivers/irqchip/irq-ftintc010.c static void ft010_irq_mask(struct irq_data *d)
d                  56 drivers/irqchip/irq-ftintc010.c 	struct ft010_irq_data *f = irq_data_get_irq_chip_data(d);
d                  60 drivers/irqchip/irq-ftintc010.c 	mask &= ~BIT(irqd_to_hwirq(d));
d                  64 drivers/irqchip/irq-ftintc010.c static void ft010_irq_unmask(struct irq_data *d)
d                  66 drivers/irqchip/irq-ftintc010.c 	struct ft010_irq_data *f = irq_data_get_irq_chip_data(d);
d                  70 drivers/irqchip/irq-ftintc010.c 	mask |= BIT(irqd_to_hwirq(d));
d                  74 drivers/irqchip/irq-ftintc010.c static void ft010_irq_ack(struct irq_data *d)
d                  76 drivers/irqchip/irq-ftintc010.c 	struct ft010_irq_data *f = irq_data_get_irq_chip_data(d);
d                  78 drivers/irqchip/irq-ftintc010.c 	writel(BIT(irqd_to_hwirq(d)), FT010_IRQ_CLEAR(f->base));
d                  81 drivers/irqchip/irq-ftintc010.c static int ft010_irq_set_type(struct irq_data *d, unsigned int trigger)
d                  83 drivers/irqchip/irq-ftintc010.c 	struct ft010_irq_data *f = irq_data_get_irq_chip_data(d);
d                  84 drivers/irqchip/irq-ftintc010.c 	int offset = irqd_to_hwirq(d);
d                  91 drivers/irqchip/irq-ftintc010.c 		irq_set_handler_locked(d, handle_level_irq);
d                  95 drivers/irqchip/irq-ftintc010.c 		irq_set_handler_locked(d, handle_level_irq);
d                  99 drivers/irqchip/irq-ftintc010.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 103 drivers/irqchip/irq-ftintc010.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 107 drivers/irqchip/irq-ftintc010.c 		irq_set_handler_locked(d, handle_bad_irq);
d                 141 drivers/irqchip/irq-ftintc010.c static int ft010_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                 144 drivers/irqchip/irq-ftintc010.c 	struct ft010_irq_data *f = d->host_data;
d                 154 drivers/irqchip/irq-ftintc010.c static void ft010_irqdomain_unmap(struct irq_domain *d, unsigned int irq)
d                  73 drivers/irqchip/irq-gic-v2m.c static void gicv2m_mask_msi_irq(struct irq_data *d)
d                  75 drivers/irqchip/irq-gic-v2m.c 	pci_msi_mask_irq(d);
d                  76 drivers/irqchip/irq-gic-v2m.c 	irq_chip_mask_parent(d);
d                  79 drivers/irqchip/irq-gic-v2m.c static void gicv2m_unmask_msi_irq(struct irq_data *d)
d                  81 drivers/irqchip/irq-gic-v2m.c 	pci_msi_unmask_irq(d);
d                  82 drivers/irqchip/irq-gic-v2m.c 	irq_chip_unmask_parent(d);
d                 139 drivers/irqchip/irq-gic-v2m.c 	struct irq_data *d;
d                 162 drivers/irqchip/irq-gic-v2m.c 	d = irq_domain_get_irq_data(domain->parent, virq);
d                 163 drivers/irqchip/irq-gic-v2m.c 	d->chip->irq_set_type(d, IRQ_TYPE_EDGE_RISING);
d                 224 drivers/irqchip/irq-gic-v2m.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 225 drivers/irqchip/irq-gic-v2m.c 	struct v2m_data *v2m = irq_data_get_irq_chip_data(d);
d                 227 drivers/irqchip/irq-gic-v2m.c 	gicv2m_unalloc_msi(v2m, d->hwirq, nr_irqs);
d                  13 drivers/irqchip/irq-gic-v3-its-pci-msi.c static void its_mask_msi_irq(struct irq_data *d)
d                  15 drivers/irqchip/irq-gic-v3-its-pci-msi.c 	pci_msi_mask_irq(d);
d                  16 drivers/irqchip/irq-gic-v3-its-pci-msi.c 	irq_chip_mask_parent(d);
d                  19 drivers/irqchip/irq-gic-v3-its-pci-msi.c static void its_unmask_msi_irq(struct irq_data *d)
d                  21 drivers/irqchip/irq-gic-v3-its-pci-msi.c 	pci_msi_unmask_irq(d);
d                  22 drivers/irqchip/irq-gic-v3-its-pci-msi.c 	irq_chip_unmask_parent(d);
d                1049 drivers/irqchip/irq-gic-v3-its.c static inline u32 its_get_event_id(struct irq_data *d)
d                1051 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1052 drivers/irqchip/irq-gic-v3-its.c 	return d->hwirq - its_dev->event_map.lpi_base;
d                1055 drivers/irqchip/irq-gic-v3-its.c static void lpi_write_config(struct irq_data *d, u8 clr, u8 set)
d                1061 drivers/irqchip/irq-gic-v3-its.c 	if (irqd_is_forwarded_to_vcpu(d)) {
d                1062 drivers/irqchip/irq-gic-v3-its.c 		struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1063 drivers/irqchip/irq-gic-v3-its.c 		u32 event = its_get_event_id(d);
d                1075 drivers/irqchip/irq-gic-v3-its.c 		hwirq = d->hwirq;
d                1093 drivers/irqchip/irq-gic-v3-its.c static void lpi_update_config(struct irq_data *d, u8 clr, u8 set)
d                1095 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1097 drivers/irqchip/irq-gic-v3-its.c 	lpi_write_config(d, clr, set);
d                1098 drivers/irqchip/irq-gic-v3-its.c 	its_send_inv(its_dev, its_get_event_id(d));
d                1101 drivers/irqchip/irq-gic-v3-its.c static void its_vlpi_set_doorbell(struct irq_data *d, bool enable)
d                1103 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1104 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                1124 drivers/irqchip/irq-gic-v3-its.c static void its_mask_irq(struct irq_data *d)
d                1126 drivers/irqchip/irq-gic-v3-its.c 	if (irqd_is_forwarded_to_vcpu(d))
d                1127 drivers/irqchip/irq-gic-v3-its.c 		its_vlpi_set_doorbell(d, false);
d                1129 drivers/irqchip/irq-gic-v3-its.c 	lpi_update_config(d, LPI_PROP_ENABLED, 0);
d                1132 drivers/irqchip/irq-gic-v3-its.c static void its_unmask_irq(struct irq_data *d)
d                1134 drivers/irqchip/irq-gic-v3-its.c 	if (irqd_is_forwarded_to_vcpu(d))
d                1135 drivers/irqchip/irq-gic-v3-its.c 		its_vlpi_set_doorbell(d, true);
d                1137 drivers/irqchip/irq-gic-v3-its.c 	lpi_update_config(d, 0, LPI_PROP_ENABLED);
d                1140 drivers/irqchip/irq-gic-v3-its.c static int its_set_affinity(struct irq_data *d, const struct cpumask *mask_val,
d                1145 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1147 drivers/irqchip/irq-gic-v3-its.c 	u32 id = its_get_event_id(d);
d                1150 drivers/irqchip/irq-gic-v3-its.c 	if (irqd_is_forwarded_to_vcpu(d))
d                1172 drivers/irqchip/irq-gic-v3-its.c 		irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                1185 drivers/irqchip/irq-gic-v3-its.c static void its_irq_compose_msi_msg(struct irq_data *d, struct msi_msg *msg)
d                1187 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1196 drivers/irqchip/irq-gic-v3-its.c 	msg->data		= its_get_event_id(d);
d                1198 drivers/irqchip/irq-gic-v3-its.c 	iommu_dma_compose_msi_msg(irq_data_get_msi_desc(d), msg);
d                1201 drivers/irqchip/irq-gic-v3-its.c static int its_irq_set_irqchip_state(struct irq_data *d,
d                1205 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1206 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                1240 drivers/irqchip/irq-gic-v3-its.c 			struct irq_data *d = irq_get_irq_data(vpe->irq);
d                1246 drivers/irqchip/irq-gic-v3-its.c 			irq_data_update_effective_affinity(d, cpumask_of(vpe->col_idx));
d                1273 drivers/irqchip/irq-gic-v3-its.c static int its_vlpi_map(struct irq_data *d, struct its_cmd_info *info)
d                1275 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1276 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                1304 drivers/irqchip/irq-gic-v3-its.c 	if (irqd_is_forwarded_to_vcpu(d)) {
d                1315 drivers/irqchip/irq-gic-v3-its.c 		irqd_set_forwarded_to_vcpu(d);
d                1318 drivers/irqchip/irq-gic-v3-its.c 		lpi_write_config(d, 0xff, info->map->properties);
d                1335 drivers/irqchip/irq-gic-v3-its.c static int its_vlpi_get(struct irq_data *d, struct its_cmd_info *info)
d                1337 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1338 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                1357 drivers/irqchip/irq-gic-v3-its.c static int its_vlpi_unmap(struct irq_data *d)
d                1359 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1360 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                1365 drivers/irqchip/irq-gic-v3-its.c 	if (!its_dev->event_map.vm || !irqd_is_forwarded_to_vcpu(d)) {
d                1374 drivers/irqchip/irq-gic-v3-its.c 	irqd_clr_forwarded_to_vcpu(d);
d                1375 drivers/irqchip/irq-gic-v3-its.c 	its_send_mapti(its_dev, d->hwirq, event);
d                1376 drivers/irqchip/irq-gic-v3-its.c 	lpi_update_config(d, 0xff, (LPI_PROP_DEFAULT_PRIO |
d                1397 drivers/irqchip/irq-gic-v3-its.c static int its_vlpi_prop_update(struct irq_data *d, struct its_cmd_info *info)
d                1399 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1401 drivers/irqchip/irq-gic-v3-its.c 	if (!its_dev->event_map.vm || !irqd_is_forwarded_to_vcpu(d))
d                1405 drivers/irqchip/irq-gic-v3-its.c 		lpi_update_config(d, 0xff, info->config);
d                1407 drivers/irqchip/irq-gic-v3-its.c 		lpi_write_config(d, 0xff, info->config);
d                1408 drivers/irqchip/irq-gic-v3-its.c 	its_vlpi_set_doorbell(d, !!(info->config & LPI_PROP_ENABLED));
d                1413 drivers/irqchip/irq-gic-v3-its.c static int its_irq_set_vcpu_affinity(struct irq_data *d, void *vcpu_info)
d                1415 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                1424 drivers/irqchip/irq-gic-v3-its.c 		return its_vlpi_unmap(d);
d                1428 drivers/irqchip/irq-gic-v3-its.c 		return its_vlpi_map(d, info);
d                1431 drivers/irqchip/irq-gic-v3-its.c 		return its_vlpi_get(d, info);
d                1435 drivers/irqchip/irq-gic-v3-its.c 		return its_vlpi_prop_update(d, info);
d                2613 drivers/irqchip/irq-gic-v3-its.c 				   struct irq_data *d, bool reserve)
d                2615 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                2616 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                2634 drivers/irqchip/irq-gic-v3-its.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                2637 drivers/irqchip/irq-gic-v3-its.c 	its_send_mapti(its_dev, d->hwirq, event);
d                2642 drivers/irqchip/irq-gic-v3-its.c 				      struct irq_data *d)
d                2644 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                2645 drivers/irqchip/irq-gic-v3-its.c 	u32 event = its_get_event_id(d);
d                2654 drivers/irqchip/irq-gic-v3-its.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                2655 drivers/irqchip/irq-gic-v3-its.c 	struct its_device *its_dev = irq_data_get_irq_chip_data(d);
d                2792 drivers/irqchip/irq-gic-v3-its.c static int its_vpe_set_affinity(struct irq_data *d,
d                2796 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                2813 drivers/irqchip/irq-gic-v3-its.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                2887 drivers/irqchip/irq-gic-v3-its.c static int its_vpe_set_vcpu_affinity(struct irq_data *d, void *vcpu_info)
d                2889 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                2923 drivers/irqchip/irq-gic-v3-its.c static void its_vpe_send_inv(struct irq_data *d)
d                2925 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                2939 drivers/irqchip/irq-gic-v3-its.c static void its_vpe_mask_irq(struct irq_data *d)
d                2947 drivers/irqchip/irq-gic-v3-its.c 	lpi_write_config(d->parent_data, LPI_PROP_ENABLED, 0);
d                2948 drivers/irqchip/irq-gic-v3-its.c 	its_vpe_send_inv(d);
d                2951 drivers/irqchip/irq-gic-v3-its.c static void its_vpe_unmask_irq(struct irq_data *d)
d                2954 drivers/irqchip/irq-gic-v3-its.c 	lpi_write_config(d->parent_data, 0, LPI_PROP_ENABLED);
d                2955 drivers/irqchip/irq-gic-v3-its.c 	its_vpe_send_inv(d);
d                2958 drivers/irqchip/irq-gic-v3-its.c static int its_vpe_set_irqchip_state(struct irq_data *d,
d                2962 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                2988 drivers/irqchip/irq-gic-v3-its.c static int its_vpe_retrigger(struct irq_data *d)
d                2990 drivers/irqchip/irq-gic-v3-its.c 	return !its_vpe_set_irqchip_state(d, IRQCHIP_STATE_PENDING, true);
d                3134 drivers/irqchip/irq-gic-v3-its.c 				       struct irq_data *d, bool reserve)
d                3136 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                3154 drivers/irqchip/irq-gic-v3-its.c 	irq_data_update_effective_affinity(d, cpumask_of(vpe->col_idx));
d                3160 drivers/irqchip/irq-gic-v3-its.c 					  struct irq_data *d)
d                3162 drivers/irqchip/irq-gic-v3-its.c 	struct its_vpe *vpe = irq_data_get_irq_chip_data(d);
d                  46 drivers/irqchip/irq-gic-v3-mbi.c 	struct irq_data *d;
d                  71 drivers/irqchip/irq-gic-v3-mbi.c 	d = irq_domain_get_irq_data(domain->parent, virq);
d                  72 drivers/irqchip/irq-gic-v3-mbi.c 	return d->chip->irq_set_type(d, IRQ_TYPE_EDGE_RISING);
d                 133 drivers/irqchip/irq-gic-v3-mbi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 134 drivers/irqchip/irq-gic-v3-mbi.c 	struct mbi_range *mbi = irq_data_get_irq_chip_data(d);
d                 136 drivers/irqchip/irq-gic-v3-mbi.c 	mbi_free_msi(mbi, d->hwirq, nr_irqs);
d                 156 drivers/irqchip/irq-gic-v3-mbi.c static void mbi_mask_msi_irq(struct irq_data *d)
d                 158 drivers/irqchip/irq-gic-v3-mbi.c 	pci_msi_mask_irq(d);
d                 159 drivers/irqchip/irq-gic-v3-mbi.c 	irq_chip_mask_parent(d);
d                 162 drivers/irqchip/irq-gic-v3-mbi.c static void mbi_unmask_msi_irq(struct irq_data *d)
d                 164 drivers/irqchip/irq-gic-v3-mbi.c 	pci_msi_unmask_irq(d);
d                 165 drivers/irqchip/irq-gic-v3-mbi.c 	irq_chip_unmask_parent(d);
d                 131 drivers/irqchip/irq-gic-v3.c static enum gic_intid_range get_intid_range(struct irq_data *d)
d                 133 drivers/irqchip/irq-gic-v3.c 	return __get_intid_range(d->hwirq);
d                 136 drivers/irqchip/irq-gic-v3.c static inline unsigned int gic_irq(struct irq_data *d)
d                 138 drivers/irqchip/irq-gic-v3.c 	return d->hwirq;
d                 141 drivers/irqchip/irq-gic-v3.c static inline int gic_irq_in_rdist(struct irq_data *d)
d                 143 drivers/irqchip/irq-gic-v3.c 	enum gic_intid_range range = get_intid_range(d);
d                 147 drivers/irqchip/irq-gic-v3.c static inline void __iomem *gic_dist_base(struct irq_data *d)
d                 149 drivers/irqchip/irq-gic-v3.c 	switch (get_intid_range(d)) {
d                 243 drivers/irqchip/irq-gic-v3.c static u32 convert_offset_index(struct irq_data *d, u32 offset, u32 *index)
d                 245 drivers/irqchip/irq-gic-v3.c 	switch (get_intid_range(d)) {
d                 248 drivers/irqchip/irq-gic-v3.c 		*index = d->hwirq;
d                 256 drivers/irqchip/irq-gic-v3.c 		*index = d->hwirq - EPPI_BASE_INTID + 32;
d                 259 drivers/irqchip/irq-gic-v3.c 		*index = d->hwirq - ESPI_BASE_INTID;
d                 288 drivers/irqchip/irq-gic-v3.c 	*index = d->hwirq;
d                 292 drivers/irqchip/irq-gic-v3.c static int gic_peek_irq(struct irq_data *d, u32 offset)
d                 297 drivers/irqchip/irq-gic-v3.c 	offset = convert_offset_index(d, offset, &index);
d                 300 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d))
d                 308 drivers/irqchip/irq-gic-v3.c static void gic_poke_irq(struct irq_data *d, u32 offset)
d                 314 drivers/irqchip/irq-gic-v3.c 	offset = convert_offset_index(d, offset, &index);
d                 317 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d)) {
d                 329 drivers/irqchip/irq-gic-v3.c static void gic_mask_irq(struct irq_data *d)
d                 331 drivers/irqchip/irq-gic-v3.c 	gic_poke_irq(d, GICD_ICENABLER);
d                 334 drivers/irqchip/irq-gic-v3.c static void gic_eoimode1_mask_irq(struct irq_data *d)
d                 336 drivers/irqchip/irq-gic-v3.c 	gic_mask_irq(d);
d                 345 drivers/irqchip/irq-gic-v3.c 	if (irqd_is_forwarded_to_vcpu(d))
d                 346 drivers/irqchip/irq-gic-v3.c 		gic_poke_irq(d, GICD_ICACTIVER);
d                 349 drivers/irqchip/irq-gic-v3.c static void gic_unmask_irq(struct irq_data *d)
d                 351 drivers/irqchip/irq-gic-v3.c 	gic_poke_irq(d, GICD_ISENABLER);
d                 360 drivers/irqchip/irq-gic-v3.c static int gic_irq_set_irqchip_state(struct irq_data *d,
d                 365 drivers/irqchip/irq-gic-v3.c 	if (d->hwirq >= 8192) /* PPI/SPI only */
d                 385 drivers/irqchip/irq-gic-v3.c 	gic_poke_irq(d, reg);
d                 389 drivers/irqchip/irq-gic-v3.c static int gic_irq_get_irqchip_state(struct irq_data *d,
d                 392 drivers/irqchip/irq-gic-v3.c 	if (d->hwirq >= 8192) /* PPI/SPI only */
d                 397 drivers/irqchip/irq-gic-v3.c 		*val = gic_peek_irq(d, GICD_ISPENDR);
d                 401 drivers/irqchip/irq-gic-v3.c 		*val = gic_peek_irq(d, GICD_ISACTIVER);
d                 405 drivers/irqchip/irq-gic-v3.c 		*val = !gic_peek_irq(d, GICD_ISENABLER);
d                 415 drivers/irqchip/irq-gic-v3.c static void gic_irq_set_prio(struct irq_data *d, u8 prio)
d                 417 drivers/irqchip/irq-gic-v3.c 	void __iomem *base = gic_dist_base(d);
d                 420 drivers/irqchip/irq-gic-v3.c 	offset = convert_offset_index(d, GICD_IPRIORITYR, &index);
d                 425 drivers/irqchip/irq-gic-v3.c static u32 gic_get_ppi_index(struct irq_data *d)
d                 427 drivers/irqchip/irq-gic-v3.c 	switch (get_intid_range(d)) {
d                 429 drivers/irqchip/irq-gic-v3.c 		return d->hwirq - 16;
d                 431 drivers/irqchip/irq-gic-v3.c 		return d->hwirq - EPPI_BASE_INTID + 16;
d                 437 drivers/irqchip/irq-gic-v3.c static int gic_irq_nmi_setup(struct irq_data *d)
d                 439 drivers/irqchip/irq-gic-v3.c 	struct irq_desc *desc = irq_to_desc(d->irq);
d                 444 drivers/irqchip/irq-gic-v3.c 	if (gic_peek_irq(d, GICD_ISENABLER)) {
d                 445 drivers/irqchip/irq-gic-v3.c 		pr_err("Cannot set NMI property of enabled IRQ %u\n", d->irq);
d                 453 drivers/irqchip/irq-gic-v3.c 	if (WARN_ON(gic_irq(d) >= 8192))
d                 457 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d)) {
d                 458 drivers/irqchip/irq-gic-v3.c 		u32 idx = gic_get_ppi_index(d);
d                 469 drivers/irqchip/irq-gic-v3.c 	gic_irq_set_prio(d, GICD_INT_NMI_PRI);
d                 474 drivers/irqchip/irq-gic-v3.c static void gic_irq_nmi_teardown(struct irq_data *d)
d                 476 drivers/irqchip/irq-gic-v3.c 	struct irq_desc *desc = irq_to_desc(d->irq);
d                 481 drivers/irqchip/irq-gic-v3.c 	if (gic_peek_irq(d, GICD_ISENABLER)) {
d                 482 drivers/irqchip/irq-gic-v3.c 		pr_err("Cannot set NMI property of enabled IRQ %u\n", d->irq);
d                 490 drivers/irqchip/irq-gic-v3.c 	if (WARN_ON(gic_irq(d) >= 8192))
d                 494 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d)) {
d                 495 drivers/irqchip/irq-gic-v3.c 		u32 idx = gic_get_ppi_index(d);
d                 504 drivers/irqchip/irq-gic-v3.c 	gic_irq_set_prio(d, GICD_INT_DEF_PRI);
d                 507 drivers/irqchip/irq-gic-v3.c static void gic_eoi_irq(struct irq_data *d)
d                 509 drivers/irqchip/irq-gic-v3.c 	gic_write_eoir(gic_irq(d));
d                 512 drivers/irqchip/irq-gic-v3.c static void gic_eoimode1_eoi_irq(struct irq_data *d)
d                 518 drivers/irqchip/irq-gic-v3.c 	if (gic_irq(d) >= 8192 || irqd_is_forwarded_to_vcpu(d))
d                 520 drivers/irqchip/irq-gic-v3.c 	gic_write_dir(gic_irq(d));
d                 523 drivers/irqchip/irq-gic-v3.c static int gic_set_type(struct irq_data *d, unsigned int type)
d                 526 drivers/irqchip/irq-gic-v3.c 	unsigned int irq = gic_irq(d);
d                 536 drivers/irqchip/irq-gic-v3.c 	range = get_intid_range(d);
d                 543 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d)) {
d                 551 drivers/irqchip/irq-gic-v3.c 	offset = convert_offset_index(d, GICD_ICFGR, &index);
d                 563 drivers/irqchip/irq-gic-v3.c static int gic_irq_set_vcpu_affinity(struct irq_data *d, void *vcpu)
d                 566 drivers/irqchip/irq-gic-v3.c 		irqd_set_forwarded_to_vcpu(d);
d                 568 drivers/irqchip/irq-gic-v3.c 		irqd_clr_forwarded_to_vcpu(d);
d                1128 drivers/irqchip/irq-gic-v3.c static int gic_set_affinity(struct irq_data *d, const struct cpumask *mask_val,
d                1145 drivers/irqchip/irq-gic-v3.c 	if (gic_irq_in_rdist(d))
d                1149 drivers/irqchip/irq-gic-v3.c 	enabled = gic_peek_irq(d, GICD_ISENABLER);
d                1151 drivers/irqchip/irq-gic-v3.c 		gic_mask_irq(d);
d                1153 drivers/irqchip/irq-gic-v3.c 	offset = convert_offset_index(d, GICD_IROUTER, &index);
d                1154 drivers/irqchip/irq-gic-v3.c 	reg = gic_dist_base(d) + offset + (index * 8);
d                1164 drivers/irqchip/irq-gic-v3.c 		gic_unmask_irq(d);
d                1168 drivers/irqchip/irq-gic-v3.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                1238 drivers/irqchip/irq-gic-v3.c static int gic_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                1250 drivers/irqchip/irq-gic-v3.c 		irq_domain_set_info(d, irq, hw, chip, d->host_data,
d                1257 drivers/irqchip/irq-gic-v3.c 		irq_domain_set_info(d, irq, hw, chip, d->host_data,
d                1266 drivers/irqchip/irq-gic-v3.c 		irq_domain_set_info(d, irq, hw, chip, d->host_data,
d                1279 drivers/irqchip/irq-gic-v3.c static int gic_irq_domain_translate(struct irq_domain *d,
d                1367 drivers/irqchip/irq-gic-v3.c 		struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
d                1369 drivers/irqchip/irq-gic-v3.c 		irq_domain_reset_irq_data(d);
d                1373 drivers/irqchip/irq-gic-v3.c static int gic_irq_domain_select(struct irq_domain *d,
d                1378 drivers/irqchip/irq-gic-v3.c         if (fwspec->fwnode != d->fwnode)
d                1392 drivers/irqchip/irq-gic-v3.c 		return d == partition_get_domain(gic_data.ppi_descs[fwspec->param[1]]);
d                1394 drivers/irqchip/irq-gic-v3.c 	return d == gic_data.domain;
d                1404 drivers/irqchip/irq-gic-v3.c static int partition_domain_translate(struct irq_domain *d,
d                1437 drivers/irqchip/irq-gic-v3.c 	struct gic_chip_data *d = data;
d                1439 drivers/irqchip/irq-gic-v3.c 	d->flags |= FLAGS_WORKAROUND_GICR_WAKER_MSM8996;
d                1446 drivers/irqchip/irq-gic-v3.c 	struct gic_chip_data *d = data;
d                1455 drivers/irqchip/irq-gic-v3.c 	if (d->rdists.gicd_typer & GICD_TYPER_ESPI) {
d                1457 drivers/irqchip/irq-gic-v3.c 		d->rdists.gicd_typer &= ~GENMASK(9, 8);
d                 154 drivers/irqchip/irq-gic.c #define gic_data_dist_base(d)	((d)->dist_base.common_base)
d                 155 drivers/irqchip/irq-gic.c #define gic_data_cpu_base(d)	((d)->cpu_base.common_base)
d                 156 drivers/irqchip/irq-gic.c #define gic_set_base_accessor(d, f)
d                 159 drivers/irqchip/irq-gic.c static inline void __iomem *gic_dist_base(struct irq_data *d)
d                 161 drivers/irqchip/irq-gic.c 	struct gic_chip_data *gic_data = irq_data_get_irq_chip_data(d);
d                 165 drivers/irqchip/irq-gic.c static inline void __iomem *gic_cpu_base(struct irq_data *d)
d                 167 drivers/irqchip/irq-gic.c 	struct gic_chip_data *gic_data = irq_data_get_irq_chip_data(d);
d                 171 drivers/irqchip/irq-gic.c static inline unsigned int gic_irq(struct irq_data *d)
d                 173 drivers/irqchip/irq-gic.c 	return d->hwirq;
d                 176 drivers/irqchip/irq-gic.c static inline bool cascading_gic_irq(struct irq_data *d)
d                 178 drivers/irqchip/irq-gic.c 	void *data = irq_data_get_irq_handler_data(d);
d                 190 drivers/irqchip/irq-gic.c static void gic_poke_irq(struct irq_data *d, u32 offset)
d                 192 drivers/irqchip/irq-gic.c 	u32 mask = 1 << (gic_irq(d) % 32);
d                 193 drivers/irqchip/irq-gic.c 	writel_relaxed(mask, gic_dist_base(d) + offset + (gic_irq(d) / 32) * 4);
d                 196 drivers/irqchip/irq-gic.c static int gic_peek_irq(struct irq_data *d, u32 offset)
d                 198 drivers/irqchip/irq-gic.c 	u32 mask = 1 << (gic_irq(d) % 32);
d                 199 drivers/irqchip/irq-gic.c 	return !!(readl_relaxed(gic_dist_base(d) + offset + (gic_irq(d) / 32) * 4) & mask);
d                 202 drivers/irqchip/irq-gic.c static void gic_mask_irq(struct irq_data *d)
d                 204 drivers/irqchip/irq-gic.c 	gic_poke_irq(d, GIC_DIST_ENABLE_CLEAR);
d                 207 drivers/irqchip/irq-gic.c static void gic_eoimode1_mask_irq(struct irq_data *d)
d                 209 drivers/irqchip/irq-gic.c 	gic_mask_irq(d);
d                 218 drivers/irqchip/irq-gic.c 	if (irqd_is_forwarded_to_vcpu(d))
d                 219 drivers/irqchip/irq-gic.c 		gic_poke_irq(d, GIC_DIST_ACTIVE_CLEAR);
d                 222 drivers/irqchip/irq-gic.c static void gic_unmask_irq(struct irq_data *d)
d                 224 drivers/irqchip/irq-gic.c 	gic_poke_irq(d, GIC_DIST_ENABLE_SET);
d                 227 drivers/irqchip/irq-gic.c static void gic_eoi_irq(struct irq_data *d)
d                 229 drivers/irqchip/irq-gic.c 	writel_relaxed(gic_irq(d), gic_cpu_base(d) + GIC_CPU_EOI);
d                 232 drivers/irqchip/irq-gic.c static void gic_eoimode1_eoi_irq(struct irq_data *d)
d                 235 drivers/irqchip/irq-gic.c 	if (irqd_is_forwarded_to_vcpu(d))
d                 238 drivers/irqchip/irq-gic.c 	writel_relaxed(gic_irq(d), gic_cpu_base(d) + GIC_CPU_DEACTIVATE);
d                 241 drivers/irqchip/irq-gic.c static int gic_irq_set_irqchip_state(struct irq_data *d,
d                 263 drivers/irqchip/irq-gic.c 	gic_poke_irq(d, reg);
d                 267 drivers/irqchip/irq-gic.c static int gic_irq_get_irqchip_state(struct irq_data *d,
d                 272 drivers/irqchip/irq-gic.c 		*val = gic_peek_irq(d, GIC_DIST_PENDING_SET);
d                 276 drivers/irqchip/irq-gic.c 		*val = gic_peek_irq(d, GIC_DIST_ACTIVE_SET);
d                 280 drivers/irqchip/irq-gic.c 		*val = !gic_peek_irq(d, GIC_DIST_ENABLE_SET);
d                 290 drivers/irqchip/irq-gic.c static int gic_set_type(struct irq_data *d, unsigned int type)
d                 292 drivers/irqchip/irq-gic.c 	void __iomem *base = gic_dist_base(d);
d                 293 drivers/irqchip/irq-gic.c 	unsigned int gicirq = gic_irq(d);
d                 315 drivers/irqchip/irq-gic.c static int gic_irq_set_vcpu_affinity(struct irq_data *d, void *vcpu)
d                 318 drivers/irqchip/irq-gic.c 	if (cascading_gic_irq(d))
d                 322 drivers/irqchip/irq-gic.c 		irqd_set_forwarded_to_vcpu(d);
d                 324 drivers/irqchip/irq-gic.c 		irqd_clr_forwarded_to_vcpu(d);
d                 329 drivers/irqchip/irq-gic.c static int gic_set_affinity(struct irq_data *d, const struct cpumask *mask_val,
d                 332 drivers/irqchip/irq-gic.c 	void __iomem *reg = gic_dist_base(d) + GIC_DIST_TARGET + (gic_irq(d) & ~3);
d                 333 drivers/irqchip/irq-gic.c 	unsigned int cpu, shift = (gic_irq(d) % 4) * 8;
d                 352 drivers/irqchip/irq-gic.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 976 drivers/irqchip/irq-gic.c static int gic_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 979 drivers/irqchip/irq-gic.c 	struct gic_chip_data *gic = d->host_data;
d                 983 drivers/irqchip/irq-gic.c 		irq_domain_set_info(d, irq, hw, &gic->chip, d->host_data,
d                 987 drivers/irqchip/irq-gic.c 		irq_domain_set_info(d, irq, hw, &gic->chip, d->host_data,
d                 995 drivers/irqchip/irq-gic.c static void gic_irq_domain_unmap(struct irq_domain *d, unsigned int irq)
d                 999 drivers/irqchip/irq-gic.c static int gic_irq_domain_translate(struct irq_domain *d,
d                  71 drivers/irqchip/irq-hip04.c static inline void __iomem *hip04_dist_base(struct irq_data *d)
d                  73 drivers/irqchip/irq-hip04.c 	struct hip04_irq_data *hip04_data = irq_data_get_irq_chip_data(d);
d                  77 drivers/irqchip/irq-hip04.c static inline void __iomem *hip04_cpu_base(struct irq_data *d)
d                  79 drivers/irqchip/irq-hip04.c 	struct hip04_irq_data *hip04_data = irq_data_get_irq_chip_data(d);
d                  83 drivers/irqchip/irq-hip04.c static inline unsigned int hip04_irq(struct irq_data *d)
d                  85 drivers/irqchip/irq-hip04.c 	return d->hwirq;
d                  91 drivers/irqchip/irq-hip04.c static void hip04_mask_irq(struct irq_data *d)
d                  93 drivers/irqchip/irq-hip04.c 	u32 mask = 1 << (hip04_irq(d) % 32);
d                  96 drivers/irqchip/irq-hip04.c 	writel_relaxed(mask, hip04_dist_base(d) + GIC_DIST_ENABLE_CLEAR +
d                  97 drivers/irqchip/irq-hip04.c 		       (hip04_irq(d) / 32) * 4);
d                 101 drivers/irqchip/irq-hip04.c static void hip04_unmask_irq(struct irq_data *d)
d                 103 drivers/irqchip/irq-hip04.c 	u32 mask = 1 << (hip04_irq(d) % 32);
d                 106 drivers/irqchip/irq-hip04.c 	writel_relaxed(mask, hip04_dist_base(d) + GIC_DIST_ENABLE_SET +
d                 107 drivers/irqchip/irq-hip04.c 		       (hip04_irq(d) / 32) * 4);
d                 111 drivers/irqchip/irq-hip04.c static void hip04_eoi_irq(struct irq_data *d)
d                 113 drivers/irqchip/irq-hip04.c 	writel_relaxed(hip04_irq(d), hip04_cpu_base(d) + GIC_CPU_EOI);
d                 116 drivers/irqchip/irq-hip04.c static int hip04_irq_set_type(struct irq_data *d, unsigned int type)
d                 118 drivers/irqchip/irq-hip04.c 	void __iomem *base = hip04_dist_base(d);
d                 119 drivers/irqchip/irq-hip04.c 	unsigned int irq = hip04_irq(d);
d                 146 drivers/irqchip/irq-hip04.c static int hip04_irq_set_affinity(struct irq_data *d,
d                 151 drivers/irqchip/irq-hip04.c 	unsigned int cpu, shift = (hip04_irq(d) % 2) * 16;
d                 163 drivers/irqchip/irq-hip04.c 	reg = hip04_dist_base(d) + GIC_DIST_TARGET + ((hip04_irq(d) * 2) & ~3);
d                 170 drivers/irqchip/irq-hip04.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 307 drivers/irqchip/irq-hip04.c static int hip04_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 321 drivers/irqchip/irq-hip04.c 	irq_set_chip_data(irq, d->host_data);
d                 325 drivers/irqchip/irq-hip04.c static int hip04_irq_domain_xlate(struct irq_domain *d,
d                 333 drivers/irqchip/irq-hip04.c 	if (irq_domain_get_of_node(d) != controller)
d                  37 drivers/irqchip/irq-i8259.c static void disable_8259A_irq(struct irq_data *d);
d                  38 drivers/irqchip/irq-i8259.c static void enable_8259A_irq(struct irq_data *d);
d                  39 drivers/irqchip/irq-i8259.c static void mask_and_ack_8259A(struct irq_data *d);
d                  68 drivers/irqchip/irq-i8259.c static void disable_8259A_irq(struct irq_data *d)
d                  70 drivers/irqchip/irq-i8259.c 	unsigned int mask, irq = d->irq - I8259A_IRQ_BASE;
d                  83 drivers/irqchip/irq-i8259.c static void enable_8259A_irq(struct irq_data *d)
d                  85 drivers/irqchip/irq-i8259.c 	unsigned int mask, irq = d->irq - I8259A_IRQ_BASE;
d                 134 drivers/irqchip/irq-i8259.c static void mask_and_ack_8259A(struct irq_data *d)
d                 136 drivers/irqchip/irq-i8259.c 	unsigned int irqmask, irq = d->irq - I8259A_IRQ_BASE;
d                 294 drivers/irqchip/irq-i8259.c static int i8259A_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                  73 drivers/irqchip/irq-imx-gpcv2.c static int imx_gpcv2_irq_set_wake(struct irq_data *d, unsigned int on)
d                  75 drivers/irqchip/irq-imx-gpcv2.c 	struct gpcv2_irqchip_data *cd = d->chip_data;
d                  76 drivers/irqchip/irq-imx-gpcv2.c 	unsigned int idx = d->hwirq / 32;
d                  81 drivers/irqchip/irq-imx-gpcv2.c 	mask = BIT(d->hwirq % 32);
d                  95 drivers/irqchip/irq-imx-gpcv2.c static void imx_gpcv2_irq_unmask(struct irq_data *d)
d                  97 drivers/irqchip/irq-imx-gpcv2.c 	struct gpcv2_irqchip_data *cd = d->chip_data;
d                 102 drivers/irqchip/irq-imx-gpcv2.c 	reg = gpcv2_idx_to_reg(cd, d->hwirq / 32);
d                 104 drivers/irqchip/irq-imx-gpcv2.c 	val &= ~BIT(d->hwirq % 32);
d                 108 drivers/irqchip/irq-imx-gpcv2.c 	irq_chip_unmask_parent(d);
d                 111 drivers/irqchip/irq-imx-gpcv2.c static void imx_gpcv2_irq_mask(struct irq_data *d)
d                 113 drivers/irqchip/irq-imx-gpcv2.c 	struct gpcv2_irqchip_data *cd = d->chip_data;
d                 118 drivers/irqchip/irq-imx-gpcv2.c 	reg = gpcv2_idx_to_reg(cd, d->hwirq / 32);
d                 120 drivers/irqchip/irq-imx-gpcv2.c 	val |= BIT(d->hwirq % 32);
d                 124 drivers/irqchip/irq-imx-gpcv2.c 	irq_chip_mask_parent(d);
d                 140 drivers/irqchip/irq-imx-gpcv2.c static int imx_gpcv2_domain_translate(struct irq_domain *d,
d                  45 drivers/irqchip/irq-imx-irqsteer.c static void imx_irqsteer_irq_unmask(struct irq_data *d)
d                  47 drivers/irqchip/irq-imx-irqsteer.c 	struct irqsteer_data *data = d->chip_data;
d                  48 drivers/irqchip/irq-imx-irqsteer.c 	int idx = imx_irqsteer_get_reg_index(data, d->hwirq);
d                  54 drivers/irqchip/irq-imx-irqsteer.c 	val |= BIT(d->hwirq % 32);
d                  59 drivers/irqchip/irq-imx-irqsteer.c static void imx_irqsteer_irq_mask(struct irq_data *d)
d                  61 drivers/irqchip/irq-imx-irqsteer.c 	struct irqsteer_data *data = d->chip_data;
d                  62 drivers/irqchip/irq-imx-irqsteer.c 	int idx = imx_irqsteer_get_reg_index(data, d->hwirq);
d                  68 drivers/irqchip/irq-imx-irqsteer.c 	val &= ~BIT(d->hwirq % 32);
d                  46 drivers/irqchip/irq-ingenic-tcu.c static void ingenic_tcu_gc_unmask_enable_reg(struct irq_data *d)
d                  48 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  49 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  51 drivers/irqchip/irq-ingenic-tcu.c 	u32 mask = d->mask;
d                  60 drivers/irqchip/irq-ingenic-tcu.c static void ingenic_tcu_gc_mask_disable_reg(struct irq_data *d)
d                  62 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  63 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  65 drivers/irqchip/irq-ingenic-tcu.c 	u32 mask = d->mask;
d                  73 drivers/irqchip/irq-ingenic-tcu.c static void ingenic_tcu_gc_mask_disable_reg_and_ack(struct irq_data *d)
d                  75 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  76 drivers/irqchip/irq-ingenic-tcu.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  78 drivers/irqchip/irq-ingenic-tcu.c 	u32 mask = d->mask;
d                  65 drivers/irqchip/irq-ixp4xx.c static int ixp4xx_set_irq_type(struct irq_data *d, unsigned int type)
d                  73 drivers/irqchip/irq-ixp4xx.c static void ixp4xx_irq_mask(struct irq_data *d)
d                  75 drivers/irqchip/irq-ixp4xx.c 	struct ixp4xx_irq *ixi = irq_data_get_irq_chip_data(d);
d                  78 drivers/irqchip/irq-ixp4xx.c 	if (ixi->is_356 && d->hwirq >= 32) {
d                  80 drivers/irqchip/irq-ixp4xx.c 		val &= ~BIT(d->hwirq - 32);
d                  84 drivers/irqchip/irq-ixp4xx.c 		val &= ~BIT(d->hwirq);
d                  93 drivers/irqchip/irq-ixp4xx.c static void ixp4xx_irq_unmask(struct irq_data *d)
d                  95 drivers/irqchip/irq-ixp4xx.c 	struct ixp4xx_irq *ixi = irq_data_get_irq_chip_data(d);
d                  98 drivers/irqchip/irq-ixp4xx.c 	if (ixi->is_356 && d->hwirq >= 32) {
d                 100 drivers/irqchip/irq-ixp4xx.c 		val |= BIT(d->hwirq - 32);
d                 104 drivers/irqchip/irq-ixp4xx.c 		val |= BIT(d->hwirq);
d                 153 drivers/irqchip/irq-ixp4xx.c static int ixp4xx_irq_domain_alloc(struct irq_domain *d,
d                 157 drivers/irqchip/irq-ixp4xx.c 	struct ixp4xx_irq *ixi = d->host_data;
d                 164 drivers/irqchip/irq-ixp4xx.c 	ret = ixp4xx_irq_domain_translate(d, fwspec, &hwirq, &type);
d                 175 drivers/irqchip/irq-ixp4xx.c 		irq_domain_set_info(d,
d                  46 drivers/irqchip/irq-jcore-aic.c static int jcore_aic_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                  49 drivers/irqchip/irq-jcore-aic.c 	struct irq_chip *aic = d->host_data;
d                  66 drivers/irqchip/irq-keystone.c static void keystone_irq_setmask(struct irq_data *d)
d                  68 drivers/irqchip/irq-keystone.c 	struct keystone_irq_device *kirq = irq_data_get_irq_chip_data(d);
d                  70 drivers/irqchip/irq-keystone.c 	kirq->mask |= BIT(d->hwirq);
d                  71 drivers/irqchip/irq-keystone.c 	dev_dbg(kirq->dev, "mask %lu [%x]\n", d->hwirq, kirq->mask);
d                  74 drivers/irqchip/irq-keystone.c static void keystone_irq_unmask(struct irq_data *d)
d                  76 drivers/irqchip/irq-keystone.c 	struct keystone_irq_device *kirq = irq_data_get_irq_chip_data(d);
d                  78 drivers/irqchip/irq-keystone.c 	kirq->mask &= ~BIT(d->hwirq);
d                  79 drivers/irqchip/irq-keystone.c 	dev_dbg(kirq->dev, "unmask %lu [%x]\n", d->hwirq, kirq->mask);
d                  82 drivers/irqchip/irq-keystone.c static void keystone_irq_ack(struct irq_data *d)
d                  45 drivers/irqchip/irq-lpc32xx.c static void lpc32xx_irq_mask(struct irq_data *d)
d                  47 drivers/irqchip/irq-lpc32xx.c 	struct lpc32xx_irq_chip *ic = irq_data_get_irq_chip_data(d);
d                  48 drivers/irqchip/irq-lpc32xx.c 	u32 val, mask = BIT(d->hwirq);
d                  54 drivers/irqchip/irq-lpc32xx.c static void lpc32xx_irq_unmask(struct irq_data *d)
d                  56 drivers/irqchip/irq-lpc32xx.c 	struct lpc32xx_irq_chip *ic = irq_data_get_irq_chip_data(d);
d                  57 drivers/irqchip/irq-lpc32xx.c 	u32 val, mask = BIT(d->hwirq);
d                  63 drivers/irqchip/irq-lpc32xx.c static void lpc32xx_irq_ack(struct irq_data *d)
d                  65 drivers/irqchip/irq-lpc32xx.c 	struct lpc32xx_irq_chip *ic = irq_data_get_irq_chip_data(d);
d                  66 drivers/irqchip/irq-lpc32xx.c 	u32 mask = BIT(d->hwirq);
d                  71 drivers/irqchip/irq-lpc32xx.c static int lpc32xx_irq_set_type(struct irq_data *d, unsigned int type)
d                  73 drivers/irqchip/irq-lpc32xx.c 	struct lpc32xx_irq_chip *ic = irq_data_get_irq_chip_data(d);
d                  74 drivers/irqchip/irq-lpc32xx.c 	u32 val, mask = BIT(d->hwirq);
d                  99 drivers/irqchip/irq-lpc32xx.c 	irqd_set_trigger_type(d, type);
d                 111 drivers/irqchip/irq-lpc32xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 114 drivers/irqchip/irq-lpc32xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 172 drivers/irqchip/irq-ls-scfg-msi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 173 drivers/irqchip/irq-ls-scfg-msi.c 	struct ls_scfg_msi *msi_data = irq_data_get_irq_chip_data(d);
d                 176 drivers/irqchip/irq-ls-scfg-msi.c 	pos = d->hwirq;
d                 149 drivers/irqchip/irq-mbigen.c 	struct irq_data *d = irq_get_irq_data(desc->irq);
d                 150 drivers/irqchip/irq-mbigen.c 	void __iomem *base = d->chip_data;
d                 156 drivers/irqchip/irq-mbigen.c 	base += get_mbigen_vec_reg(d->hwirq);
d                 168 drivers/irqchip/irq-mbigen.c static int mbigen_domain_translate(struct irq_domain *d,
d                  40 drivers/irqchip/irq-mips-cpu.c static inline void unmask_mips_irq(struct irq_data *d)
d                  42 drivers/irqchip/irq-mips-cpu.c 	set_c0_status(IE_SW0 << d->hwirq);
d                  46 drivers/irqchip/irq-mips-cpu.c static inline void mask_mips_irq(struct irq_data *d)
d                  48 drivers/irqchip/irq-mips-cpu.c 	clear_c0_status(IE_SW0 << d->hwirq);
d                  67 drivers/irqchip/irq-mips-cpu.c static unsigned int mips_mt_cpu_irq_startup(struct irq_data *d)
d                  71 drivers/irqchip/irq-mips-cpu.c 	clear_c0_cause(C_SW0 << d->hwirq);
d                  73 drivers/irqchip/irq-mips-cpu.c 	unmask_mips_irq(d);
d                  81 drivers/irqchip/irq-mips-cpu.c static void mips_mt_cpu_irq_ack(struct irq_data *d)
d                  84 drivers/irqchip/irq-mips-cpu.c 	clear_c0_cause(C_SW0 << d->hwirq);
d                  86 drivers/irqchip/irq-mips-cpu.c 	mask_mips_irq(d);
d                  91 drivers/irqchip/irq-mips-cpu.c static void mips_mt_send_ipi(struct irq_data *d, unsigned int cpu)
d                  93 drivers/irqchip/irq-mips-cpu.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 150 drivers/irqchip/irq-mips-cpu.c static int mips_cpu_intc_map(struct irq_domain *d, unsigned int irq,
d                 208 drivers/irqchip/irq-mips-cpu.c static int mips_cpu_ipi_match(struct irq_domain *d, struct device_node *node,
d                 215 drivers/irqchip/irq-mips-cpu.c 		is_ipi = d->bus_token == bus_token;
d                 216 drivers/irqchip/irq-mips-cpu.c 		return (!node || (to_of_node(d->fwnode) == node)) && is_ipi;
d                 108 drivers/irqchip/irq-mips-gic.c static void gic_send_ipi(struct irq_data *d, unsigned int cpu)
d                 110 drivers/irqchip/irq-mips-gic.c 	irq_hw_number_t hwirq = GIC_HWIRQ_TO_SHARED(irqd_to_hwirq(d));
d                 176 drivers/irqchip/irq-mips-gic.c static void gic_mask_irq(struct irq_data *d)
d                 178 drivers/irqchip/irq-mips-gic.c 	unsigned int intr = GIC_HWIRQ_TO_SHARED(d->hwirq);
d                 184 drivers/irqchip/irq-mips-gic.c static void gic_unmask_irq(struct irq_data *d)
d                 186 drivers/irqchip/irq-mips-gic.c 	unsigned int intr = GIC_HWIRQ_TO_SHARED(d->hwirq);
d                 192 drivers/irqchip/irq-mips-gic.c 	cpu = cpumask_first(irq_data_get_effective_affinity_mask(d));
d                 196 drivers/irqchip/irq-mips-gic.c static void gic_ack_irq(struct irq_data *d)
d                 198 drivers/irqchip/irq-mips-gic.c 	unsigned int irq = GIC_HWIRQ_TO_SHARED(d->hwirq);
d                 203 drivers/irqchip/irq-mips-gic.c static int gic_set_type(struct irq_data *d, unsigned int type)
d                 208 drivers/irqchip/irq-mips-gic.c 	irq = GIC_HWIRQ_TO_SHARED(d->hwirq);
d                 245 drivers/irqchip/irq-mips-gic.c 		irq_set_chip_handler_name_locked(d, &gic_edge_irq_controller,
d                 248 drivers/irqchip/irq-mips-gic.c 		irq_set_chip_handler_name_locked(d, &gic_level_irq_controller,
d                 256 drivers/irqchip/irq-mips-gic.c static int gic_set_affinity(struct irq_data *d, const struct cpumask *cpumask,
d                 259 drivers/irqchip/irq-mips-gic.c 	unsigned int irq = GIC_HWIRQ_TO_SHARED(d->hwirq);
d                 278 drivers/irqchip/irq-mips-gic.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 327 drivers/irqchip/irq-mips-gic.c static void gic_mask_local_irq(struct irq_data *d)
d                 329 drivers/irqchip/irq-mips-gic.c 	int intr = GIC_HWIRQ_TO_LOCAL(d->hwirq);
d                 334 drivers/irqchip/irq-mips-gic.c static void gic_unmask_local_irq(struct irq_data *d)
d                 336 drivers/irqchip/irq-mips-gic.c 	int intr = GIC_HWIRQ_TO_LOCAL(d->hwirq);
d                 347 drivers/irqchip/irq-mips-gic.c static void gic_mask_local_irq_all_vpes(struct irq_data *d)
d                 353 drivers/irqchip/irq-mips-gic.c 	intr = GIC_HWIRQ_TO_LOCAL(d->hwirq);
d                 354 drivers/irqchip/irq-mips-gic.c 	cd = irq_data_get_irq_chip_data(d);
d                 365 drivers/irqchip/irq-mips-gic.c static void gic_unmask_local_irq_all_vpes(struct irq_data *d)
d                 371 drivers/irqchip/irq-mips-gic.c 	intr = GIC_HWIRQ_TO_LOCAL(d->hwirq);
d                 372 drivers/irqchip/irq-mips-gic.c 	cd = irq_data_get_irq_chip_data(d);
d                 383 drivers/irqchip/irq-mips-gic.c static void gic_all_vpes_irq_cpu_online(struct irq_data *d)
d                 388 drivers/irqchip/irq-mips-gic.c 	intr = GIC_HWIRQ_TO_LOCAL(d->hwirq);
d                 389 drivers/irqchip/irq-mips-gic.c 	cd = irq_data_get_irq_chip_data(d);
d                 415 drivers/irqchip/irq-mips-gic.c static int gic_shared_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                 433 drivers/irqchip/irq-mips-gic.c static int gic_irq_domain_xlate(struct irq_domain *d, struct device_node *ctrlr,
d                 452 drivers/irqchip/irq-mips-gic.c static int gic_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                 466 drivers/irqchip/irq-mips-gic.c 		err = irq_domain_set_hwirq_and_chip(d, virq, hwirq,
d                 473 drivers/irqchip/irq-mips-gic.c 		return gic_shared_irq_domain_map(d, virq, hwirq, 0);
d                 493 drivers/irqchip/irq-mips-gic.c 		err = irq_domain_set_hwirq_and_chip(d, virq, hwirq,
d                 503 drivers/irqchip/irq-mips-gic.c 		err = irq_domain_set_hwirq_and_chip(d, virq, hwirq,
d                 527 drivers/irqchip/irq-mips-gic.c static int gic_irq_domain_alloc(struct irq_domain *d, unsigned int virq,
d                 538 drivers/irqchip/irq-mips-gic.c 	return gic_irq_domain_map(d, virq, hwirq);
d                 541 drivers/irqchip/irq-mips-gic.c void gic_irq_domain_free(struct irq_domain *d, unsigned int virq,
d                 553 drivers/irqchip/irq-mips-gic.c static int gic_ipi_domain_xlate(struct irq_domain *d, struct device_node *ctrlr,
d                 568 drivers/irqchip/irq-mips-gic.c static int gic_ipi_domain_alloc(struct irq_domain *d, unsigned int virq,
d                 591 drivers/irqchip/irq-mips-gic.c 		ret = irq_domain_set_hwirq_and_chip(d, virq + i, hwirq,
d                 597 drivers/irqchip/irq-mips-gic.c 		ret = irq_domain_set_hwirq_and_chip(d->parent, virq + i, hwirq,
d                 607 drivers/irqchip/irq-mips-gic.c 		ret = gic_shared_irq_domain_map(d, virq + i, hwirq, cpu);
d                 620 drivers/irqchip/irq-mips-gic.c void gic_ipi_domain_free(struct irq_domain *d, unsigned int virq,
d                 634 drivers/irqchip/irq-mips-gic.c int gic_ipi_domain_match(struct irq_domain *d, struct device_node *node,
d                 641 drivers/irqchip/irq-mips-gic.c 		is_ipi = d->bus_token == bus_token;
d                 642 drivers/irqchip/irq-mips-gic.c 		return (!node || to_of_node(d->fwnode) == node) && is_ipi;
d                  67 drivers/irqchip/irq-mmp.c static void icu_mask_ack_irq(struct irq_data *d)
d                  69 drivers/irqchip/irq-mmp.c 	struct irq_domain *domain = d->domain;
d                  74 drivers/irqchip/irq-mmp.c 	hwirq = d->irq - data->virq_base;
d                  91 drivers/irqchip/irq-mmp.c static void icu_mask_irq(struct irq_data *d)
d                  93 drivers/irqchip/irq-mmp.c 	struct irq_domain *domain = d->domain;
d                  98 drivers/irqchip/irq-mmp.c 	hwirq = d->irq - data->virq_base;
d                 120 drivers/irqchip/irq-mmp.c static void icu_unmask_irq(struct irq_data *d)
d                 122 drivers/irqchip/irq-mmp.c 	struct irq_domain *domain = d->domain;
d                 127 drivers/irqchip/irq-mmp.c 	hwirq = d->irq - data->virq_base;
d                 183 drivers/irqchip/irq-mmp.c static int mmp_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 190 drivers/irqchip/irq-mmp.c static int mmp_irq_domain_xlate(struct irq_domain *d, struct device_node *node,
d                  45 drivers/irqchip/irq-mscc-ocelot.c 	struct irq_domain *d = irq_desc_get_handler_data(desc);
d                  46 drivers/irqchip/irq-mscc-ocelot.c 	struct irq_chip_generic *gc = irq_get_domain_generic_chip(d, 0);
d                  54 drivers/irqchip/irq-mscc-ocelot.c 		generic_handle_irq(irq_find_mapping(d, hwirq));
d                 103 drivers/irqchip/irq-mtk-cirq.c static int mtk_cirq_domain_translate(struct irq_domain *d,
d                  70 drivers/irqchip/irq-mtk-sysirq.c static int mtk_sysirq_domain_translate(struct irq_domain *d,
d                 133 drivers/irqchip/irq-mvebu-gicp.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 135 drivers/irqchip/irq-mvebu-gicp.c 	if (d->hwirq >= gicp->spi_cnt) {
d                 136 drivers/irqchip/irq-mvebu-gicp.c 		dev_err(gicp->dev, "Invalid hwirq %lu\n", d->hwirq);
d                 143 drivers/irqchip/irq-mvebu-gicp.c 	__clear_bit(d->hwirq, gicp->spi_bitmap);
d                  94 drivers/irqchip/irq-mvebu-icu.c 	struct irq_data *d = irq_get_irq_data(desc->irq);
d                  95 drivers/irqchip/irq-mvebu-icu.c 	struct mvebu_icu_msi_data *msi_data = platform_msi_get_host_data(d->domain);
d                  96 drivers/irqchip/irq-mvebu-icu.c 	struct mvebu_icu_irq_data *icu_irqd = d->chip_data;
d                 113 drivers/irqchip/irq-mvebu-icu.c 	writel_relaxed(icu_int, icu->base + ICU_INT_CFG(d->hwirq));
d                 124 drivers/irqchip/irq-mvebu-icu.c 	if (d->hwirq == ICU_SATA0_ICU_ID || d->hwirq == ICU_SATA1_ICU_ID) {
d                 151 drivers/irqchip/irq-mvebu-icu.c mvebu_icu_irq_domain_translate(struct irq_domain *d, struct irq_fwspec *fwspec,
d                 154 drivers/irqchip/irq-mvebu-icu.c 	struct mvebu_icu_msi_data *msi_data = platform_msi_get_host_data(d);
d                 155 drivers/irqchip/irq-mvebu-icu.c 	struct mvebu_icu *icu = platform_msi_get_host_data(d);
d                 258 drivers/irqchip/irq-mvebu-icu.c 	struct irq_data *d = irq_get_irq_data(virq);
d                 259 drivers/irqchip/irq-mvebu-icu.c 	struct mvebu_icu_irq_data *icu_irqd = d->chip_data;
d                  50 drivers/irqchip/irq-mvebu-odmi.c static void odmi_compose_msi_msg(struct irq_data *d, struct msi_msg *msg)
d                  56 drivers/irqchip/irq-mvebu-odmi.c 	if (WARN_ON(d->hwirq >= odmis_count * NODMIS_PER_FRAME))
d                  59 drivers/irqchip/irq-mvebu-odmi.c 	odmi = &odmis[d->hwirq >> NODMIS_SHIFT];
d                  60 drivers/irqchip/irq-mvebu-odmi.c 	odmin = d->hwirq & NODMIS_MASK;
d                  83 drivers/irqchip/irq-mvebu-odmi.c 	struct irq_data *d;
d                 116 drivers/irqchip/irq-mvebu-odmi.c 	d = irq_domain_get_irq_data(domain->parent, virq);
d                 117 drivers/irqchip/irq-mvebu-odmi.c 	d->chip->irq_set_type(d, IRQ_TYPE_EDGE_RISING);
d                 128 drivers/irqchip/irq-mvebu-odmi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 130 drivers/irqchip/irq-mvebu-odmi.c 	if (d->hwirq >= odmis_count * NODMIS_PER_FRAME) {
d                 131 drivers/irqchip/irq-mvebu-odmi.c 		pr_err("Failed to teardown msi. Invalid hwirq %lu\n", d->hwirq);
d                 139 drivers/irqchip/irq-mvebu-odmi.c 	__clear_bit(d->hwirq, odmis_bm);
d                  42 drivers/irqchip/irq-mvebu-pic.c static void mvebu_pic_eoi_irq(struct irq_data *d)
d                  44 drivers/irqchip/irq-mvebu-pic.c 	struct mvebu_pic *pic = irq_data_get_irq_chip_data(d);
d                  46 drivers/irqchip/irq-mvebu-pic.c 	writel(1 << d->hwirq, pic->base + PIC_CAUSE);
d                  49 drivers/irqchip/irq-mvebu-pic.c static void mvebu_pic_mask_irq(struct irq_data *d)
d                  51 drivers/irqchip/irq-mvebu-pic.c 	struct mvebu_pic *pic = irq_data_get_irq_chip_data(d);
d                  55 drivers/irqchip/irq-mvebu-pic.c 	reg |= (1 << d->hwirq);
d                  59 drivers/irqchip/irq-mvebu-pic.c static void mvebu_pic_unmask_irq(struct irq_data *d)
d                  61 drivers/irqchip/irq-mvebu-pic.c 	struct mvebu_pic *pic = irq_data_get_irq_chip_data(d);
d                  65 drivers/irqchip/irq-mvebu-pic.c 	reg &= ~(1 << d->hwirq);
d                  56 drivers/irqchip/irq-mvebu-sei.c static void mvebu_sei_ack_irq(struct irq_data *d)
d                  58 drivers/irqchip/irq-mvebu-sei.c 	struct mvebu_sei *sei = irq_data_get_irq_chip_data(d);
d                  59 drivers/irqchip/irq-mvebu-sei.c 	u32 reg_idx = SEI_IRQ_REG_IDX(d->hwirq);
d                  61 drivers/irqchip/irq-mvebu-sei.c 	writel_relaxed(BIT(SEI_IRQ_REG_BIT(d->hwirq)),
d                  65 drivers/irqchip/irq-mvebu-sei.c static void mvebu_sei_mask_irq(struct irq_data *d)
d                  67 drivers/irqchip/irq-mvebu-sei.c 	struct mvebu_sei *sei = irq_data_get_irq_chip_data(d);
d                  68 drivers/irqchip/irq-mvebu-sei.c 	u32 reg, reg_idx = SEI_IRQ_REG_IDX(d->hwirq);
d                  74 drivers/irqchip/irq-mvebu-sei.c 	reg |= BIT(SEI_IRQ_REG_BIT(d->hwirq));
d                  79 drivers/irqchip/irq-mvebu-sei.c static void mvebu_sei_unmask_irq(struct irq_data *d)
d                  81 drivers/irqchip/irq-mvebu-sei.c 	struct mvebu_sei *sei = irq_data_get_irq_chip_data(d);
d                  82 drivers/irqchip/irq-mvebu-sei.c 	u32 reg, reg_idx = SEI_IRQ_REG_IDX(d->hwirq);
d                  88 drivers/irqchip/irq-mvebu-sei.c 	reg &= ~BIT(SEI_IRQ_REG_BIT(d->hwirq));
d                  93 drivers/irqchip/irq-mvebu-sei.c static int mvebu_sei_set_affinity(struct irq_data *d,
d                 100 drivers/irqchip/irq-mvebu-sei.c static int mvebu_sei_set_irqchip_state(struct irq_data *d,
d                 108 drivers/irqchip/irq-mvebu-sei.c 	mvebu_sei_ack_irq(d);
d                 186 drivers/irqchip/irq-mvebu-sei.c 		struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
d                 188 drivers/irqchip/irq-mvebu-sei.c 		irq_domain_reset_irq_data(d);
d                 294 drivers/irqchip/irq-mvebu-sei.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 296 drivers/irqchip/irq-mvebu-sei.c 	if (nr_irqs != 1 || d->hwirq >= sei->caps->cp_range.size) {
d                 297 drivers/irqchip/irq-mvebu-sei.c 		dev_err(sei->dev, "Invalid hwirq %lu\n", d->hwirq);
d                 301 drivers/irqchip/irq-mvebu-sei.c 	mvebu_sei_cp_release_irq(sei, d->hwirq);
d                  62 drivers/irqchip/irq-mxs.c static u32 icoll_intr_bitshift(struct irq_data *d, u32 bit)
d                  68 drivers/irqchip/irq-mxs.c 	return bit << ((d->hwirq & 3) << 3);
d                  72 drivers/irqchip/irq-mxs.c static void __iomem *icoll_intr_reg(struct irq_data *d)
d                  75 drivers/irqchip/irq-mxs.c 	return icoll_priv.intr + ((d->hwirq >> 2) * 0x10);
d                  78 drivers/irqchip/irq-mxs.c static void icoll_ack_irq(struct irq_data *d)
d                  89 drivers/irqchip/irq-mxs.c static void icoll_mask_irq(struct irq_data *d)
d                  92 drivers/irqchip/irq-mxs.c 			icoll_priv.intr + CLR_REG + HW_ICOLL_INTERRUPTn(d->hwirq));
d                  95 drivers/irqchip/irq-mxs.c static void icoll_unmask_irq(struct irq_data *d)
d                  98 drivers/irqchip/irq-mxs.c 			icoll_priv.intr + SET_REG + HW_ICOLL_INTERRUPTn(d->hwirq));
d                 101 drivers/irqchip/irq-mxs.c static void asm9260_mask_irq(struct irq_data *d)
d                 103 drivers/irqchip/irq-mxs.c 	__raw_writel(icoll_intr_bitshift(d, BM_ICOLL_INTR_ENABLE),
d                 104 drivers/irqchip/irq-mxs.c 			icoll_intr_reg(d) + CLR_REG);
d                 107 drivers/irqchip/irq-mxs.c static void asm9260_unmask_irq(struct irq_data *d)
d                 109 drivers/irqchip/irq-mxs.c 	__raw_writel(ASM9260_BM_CLEAR_BIT(d->hwirq),
d                 111 drivers/irqchip/irq-mxs.c 		     ASM9260_HW_ICOLL_CLEARn(d->hwirq));
d                 113 drivers/irqchip/irq-mxs.c 	__raw_writel(icoll_intr_bitshift(d, BM_ICOLL_INTR_ENABLE),
d                 114 drivers/irqchip/irq-mxs.c 			icoll_intr_reg(d) + SET_REG);
d                 142 drivers/irqchip/irq-mxs.c static int icoll_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                  48 drivers/irqchip/irq-nvic.c static int nvic_irq_domain_translate(struct irq_domain *d,
d                 140 drivers/irqchip/irq-omap-intc.c static void omap_ack_irq(struct irq_data *d)
d                 145 drivers/irqchip/irq-omap-intc.c static void omap_mask_ack_irq(struct irq_data *d)
d                 147 drivers/irqchip/irq-omap-intc.c 	irq_gc_mask_disable_reg(d);
d                 148 drivers/irqchip/irq-omap-intc.c 	omap_ack_irq(d);
d                 187 drivers/irqchip/irq-omap-intc.c static int __init omap_alloc_gc_of(struct irq_domain *d, void __iomem *base)
d                 192 drivers/irqchip/irq-omap-intc.c 	ret = irq_alloc_domain_generic_chips(d, 32, 1, "INTC",
d                 204 drivers/irqchip/irq-omap-intc.c 		gc = irq_get_domain_generic_chip(d, 32 * i);
d                 122 drivers/irqchip/irq-or1k-pic.c static int or1k_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw)
d                 124 drivers/irqchip/irq-or1k-pic.c 	struct or1k_pic_dev *pic = d->host_data;
d                 111 drivers/irqchip/irq-orion.c 	struct irq_domain *d = irq_desc_get_handler_data(desc);
d                 113 drivers/irqchip/irq-orion.c 	struct irq_chip_generic *gc = irq_get_domain_generic_chip(d, 0);
d                 120 drivers/irqchip/irq-orion.c 		generic_handle_irq(irq_find_mapping(d, gc->irq_base + hwirq));
d                 129 drivers/irqchip/irq-orion.c static unsigned int orion_bridge_irq_startup(struct irq_data *d)
d                 131 drivers/irqchip/irq-orion.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 133 drivers/irqchip/irq-orion.c 	ct->chip.irq_ack(d);
d                 134 drivers/irqchip/irq-orion.c 	ct->chip.irq_unmask(d);
d                  31 drivers/irqchip/irq-partition-percpu.c static void partition_irq_mask(struct irq_data *d)
d                  33 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                  37 drivers/irqchip/irq-partition-percpu.c 	if (partition_check_cpu(part, smp_processor_id(), d->hwirq) &&
d                  42 drivers/irqchip/irq-partition-percpu.c static void partition_irq_unmask(struct irq_data *d)
d                  44 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                  48 drivers/irqchip/irq-partition-percpu.c 	if (partition_check_cpu(part, smp_processor_id(), d->hwirq) &&
d                  53 drivers/irqchip/irq-partition-percpu.c static int partition_irq_set_irqchip_state(struct irq_data *d,
d                  57 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                  61 drivers/irqchip/irq-partition-percpu.c 	if (partition_check_cpu(part, smp_processor_id(), d->hwirq) &&
d                  68 drivers/irqchip/irq-partition-percpu.c static int partition_irq_get_irqchip_state(struct irq_data *d,
d                  72 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                  76 drivers/irqchip/irq-partition-percpu.c 	if (partition_check_cpu(part, smp_processor_id(), d->hwirq) &&
d                  83 drivers/irqchip/irq-partition-percpu.c static int partition_irq_set_type(struct irq_data *d, unsigned int type)
d                  85 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                  95 drivers/irqchip/irq-partition-percpu.c static void partition_irq_print_chip(struct irq_data *d, struct seq_file *p)
d                  97 drivers/irqchip/irq-partition-percpu.c 	struct partition_desc *part = irq_data_get_irq_chip_data(d);
d                 168 drivers/irqchip/irq-partition-percpu.c 	struct irq_data *d;
d                 172 drivers/irqchip/irq-partition-percpu.c 	d = irq_domain_get_irq_data(domain, virq);
d                 174 drivers/irqchip/irq-partition-percpu.c 	irq_domain_reset_irq_data(d);
d                 204 drivers/irqchip/irq-partition-percpu.c 	struct irq_domain *d;
d                 216 drivers/irqchip/irq-partition-percpu.c 	d = irq_domain_create_linear(fwnode, nr_parts, &desc->ops, desc);
d                 217 drivers/irqchip/irq-partition-percpu.c 	if (!d)
d                 219 drivers/irqchip/irq-partition-percpu.c 	desc->domain = d;
d                 232 drivers/irqchip/irq-partition-percpu.c 	if (d)
d                 233 drivers/irqchip/irq-partition-percpu.c 		irq_domain_remove(d);
d                 125 drivers/irqchip/irq-pic32-evic.c static int pic32_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                 128 drivers/irqchip/irq-pic32-evic.c 	struct evic_chip_data *priv = d->host_data;
d                 134 drivers/irqchip/irq-pic32-evic.c 	ret = irq_map_generic_chip(d, virq, hw);
d                 145 drivers/irqchip/irq-pic32-evic.c 		data = irq_domain_get_irq_data(d, virq);
d                 165 drivers/irqchip/irq-pic32-evic.c int pic32_irq_domain_xlate(struct irq_domain *d, struct device_node *ctrlr,
d                 169 drivers/irqchip/irq-pic32-evic.c 	struct evic_chip_data *priv = d->host_data;
d                  30 drivers/irqchip/irq-rda-intc.c static void rda_intc_mask_irq(struct irq_data *d)
d                  32 drivers/irqchip/irq-rda-intc.c 	writel_relaxed(BIT(d->hwirq), rda_intc_base + RDA_INTC_MASK_CLR);
d                  35 drivers/irqchip/irq-rda-intc.c static void rda_intc_unmask_irq(struct irq_data *d)
d                  37 drivers/irqchip/irq-rda-intc.c 	writel_relaxed(BIT(d->hwirq), rda_intc_base + RDA_INTC_MASK_SET);
d                  68 drivers/irqchip/irq-rda-intc.c static int rda_irq_map(struct irq_domain *d,
d                  73 drivers/irqchip/irq-rda-intc.c 	irq_set_chip_data(virq, d->host_data);
d                 179 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_irq_enable(struct irq_data *d)
d                 181 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 182 drivers/irqchip/irq-renesas-intc-irqpin.c 	int hw_irq = irqd_to_hwirq(d);
d                 188 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_irq_disable(struct irq_data *d)
d                 190 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 191 drivers/irqchip/irq-renesas-intc-irqpin.c 	int hw_irq = irqd_to_hwirq(d);
d                 197 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_shared_irq_enable(struct irq_data *d)
d                 199 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 200 drivers/irqchip/irq-renesas-intc-irqpin.c 	int hw_irq = irqd_to_hwirq(d);
d                 208 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_shared_irq_disable(struct irq_data *d)
d                 210 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 211 drivers/irqchip/irq-renesas-intc-irqpin.c 	int hw_irq = irqd_to_hwirq(d);
d                 219 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_irq_enable_force(struct irq_data *d)
d                 221 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 222 drivers/irqchip/irq-renesas-intc-irqpin.c 	int irq = p->irq[irqd_to_hwirq(d)].requested_irq;
d                 224 drivers/irqchip/irq-renesas-intc-irqpin.c 	intc_irqpin_irq_enable(d);
d                 233 drivers/irqchip/irq-renesas-intc-irqpin.c static void intc_irqpin_irq_disable_force(struct irq_data *d)
d                 235 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 236 drivers/irqchip/irq-renesas-intc-irqpin.c 	int irq = p->irq[irqd_to_hwirq(d)].requested_irq;
d                 243 drivers/irqchip/irq-renesas-intc-irqpin.c 	intc_irqpin_irq_disable(d);
d                 257 drivers/irqchip/irq-renesas-intc-irqpin.c static int intc_irqpin_irq_set_type(struct irq_data *d, unsigned int type)
d                 260 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 265 drivers/irqchip/irq-renesas-intc-irqpin.c 	return intc_irqpin_set_sense(p, irqd_to_hwirq(d),
d                 269 drivers/irqchip/irq-renesas-intc-irqpin.c static int intc_irqpin_irq_set_wake(struct irq_data *d, unsigned int on)
d                 271 drivers/irqchip/irq-renesas-intc-irqpin.c 	struct intc_irqpin_priv *p = irq_data_get_irq_chip_data(d);
d                 272 drivers/irqchip/irq-renesas-intc-irqpin.c 	int hw_irq = irqd_to_hwirq(d);
d                 382 drivers/irqchip/irq-renesas-intc-irqpin.c 	void (*enable_fn)(struct irq_data *d);
d                 383 drivers/irqchip/irq-renesas-intc-irqpin.c 	void (*disable_fn)(struct irq_data *d);
d                  74 drivers/irqchip/irq-renesas-irqc.c static int irqc_irq_set_type(struct irq_data *d, unsigned int type)
d                  76 drivers/irqchip/irq-renesas-irqc.c 	struct irqc_priv *p = irq_data_to_priv(d);
d                  77 drivers/irqchip/irq-renesas-irqc.c 	int hw_irq = irqd_to_hwirq(d);
d                  93 drivers/irqchip/irq-renesas-irqc.c static int irqc_irq_set_wake(struct irq_data *d, unsigned int on)
d                  95 drivers/irqchip/irq-renesas-irqc.c 	struct irqc_priv *p = irq_data_to_priv(d);
d                  96 drivers/irqchip/irq-renesas-irqc.c 	int hw_irq = irqd_to_hwirq(d);
d                  54 drivers/irqchip/irq-renesas-rza1.c static void rza1_irqc_eoi(struct irq_data *d)
d                  56 drivers/irqchip/irq-renesas-rza1.c 	struct rza1_irqc_priv *priv = irq_data_to_priv(d);
d                  57 drivers/irqchip/irq-renesas-rza1.c 	u16 bit = BIT(irqd_to_hwirq(d));
d                  65 drivers/irqchip/irq-renesas-rza1.c 	irq_chip_eoi_parent(d);
d                  68 drivers/irqchip/irq-renesas-rza1.c static int rza1_irqc_set_type(struct irq_data *d, unsigned int type)
d                  70 drivers/irqchip/irq-renesas-rza1.c 	struct rza1_irqc_priv *priv = irq_data_to_priv(d);
d                  71 drivers/irqchip/irq-renesas-rza1.c 	unsigned int hw_irq = irqd_to_hwirq(d);
d                1166 drivers/irqchip/irq-s3c24xx.c static int s3c24xx_irq_xlate_of(struct irq_domain *d, struct device_node *n,
d                  34 drivers/irqchip/irq-sa11x0.c static void sa1100_mask_irq(struct irq_data *d)
d                  39 drivers/irqchip/irq-sa11x0.c 	reg &= ~BIT(d->hwirq);
d                  43 drivers/irqchip/irq-sa11x0.c static void sa1100_unmask_irq(struct irq_data *d)
d                  48 drivers/irqchip/irq-sa11x0.c 	reg |= BIT(d->hwirq);
d                  52 drivers/irqchip/irq-sa11x0.c static int sa1100_set_wake(struct irq_data *d, unsigned int on)
d                  54 drivers/irqchip/irq-sa11x0.c 	return sa11x0_sc_set_wake(d->hwirq, on);
d                  65 drivers/irqchip/irq-sa11x0.c static int sa1100_normal_irqdomain_map(struct irq_domain *d,
d                 100 drivers/irqchip/irq-sifive-plic.c static void plic_irq_unmask(struct irq_data *d)
d                 102 drivers/irqchip/irq-sifive-plic.c 	unsigned int cpu = cpumask_any_and(irq_data_get_affinity_mask(d),
d                 106 drivers/irqchip/irq-sifive-plic.c 	plic_irq_toggle(cpumask_of(cpu), d->hwirq, 1);
d                 109 drivers/irqchip/irq-sifive-plic.c static void plic_irq_mask(struct irq_data *d)
d                 111 drivers/irqchip/irq-sifive-plic.c 	plic_irq_toggle(cpu_possible_mask, d->hwirq, 0);
d                 115 drivers/irqchip/irq-sifive-plic.c static int plic_set_affinity(struct irq_data *d,
d                 128 drivers/irqchip/irq-sifive-plic.c 	plic_irq_toggle(cpu_possible_mask, d->hwirq, 0);
d                 129 drivers/irqchip/irq-sifive-plic.c 	plic_irq_toggle(cpumask_of(cpu), d->hwirq, 1);
d                 131 drivers/irqchip/irq-sifive-plic.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                 137 drivers/irqchip/irq-sifive-plic.c static void plic_irq_eoi(struct irq_data *d)
d                 141 drivers/irqchip/irq-sifive-plic.c 	writel(d->hwirq, handler->hart_base + CONTEXT_CLAIM);
d                 154 drivers/irqchip/irq-sifive-plic.c static int plic_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                  40 drivers/irqchip/irq-sni-exiu.c static void exiu_irq_eoi(struct irq_data *d)
d                  42 drivers/irqchip/irq-sni-exiu.c 	struct exiu_irq_data *data = irq_data_get_irq_chip_data(d);
d                  44 drivers/irqchip/irq-sni-exiu.c 	writel(BIT(d->hwirq), data->base + EIREQCLR);
d                  45 drivers/irqchip/irq-sni-exiu.c 	irq_chip_eoi_parent(d);
d                  48 drivers/irqchip/irq-sni-exiu.c static void exiu_irq_mask(struct irq_data *d)
d                  50 drivers/irqchip/irq-sni-exiu.c 	struct exiu_irq_data *data = irq_data_get_irq_chip_data(d);
d                  53 drivers/irqchip/irq-sni-exiu.c 	val = readl_relaxed(data->base + EIMASK) | BIT(d->hwirq);
d                  55 drivers/irqchip/irq-sni-exiu.c 	irq_chip_mask_parent(d);
d                  58 drivers/irqchip/irq-sni-exiu.c static void exiu_irq_unmask(struct irq_data *d)
d                  60 drivers/irqchip/irq-sni-exiu.c 	struct exiu_irq_data *data = irq_data_get_irq_chip_data(d);
d                  63 drivers/irqchip/irq-sni-exiu.c 	val = readl_relaxed(data->base + EIMASK) & ~BIT(d->hwirq);
d                  65 drivers/irqchip/irq-sni-exiu.c 	irq_chip_unmask_parent(d);
d                  68 drivers/irqchip/irq-sni-exiu.c static void exiu_irq_enable(struct irq_data *d)
d                  70 drivers/irqchip/irq-sni-exiu.c 	struct exiu_irq_data *data = irq_data_get_irq_chip_data(d);
d                  74 drivers/irqchip/irq-sni-exiu.c 	writel_relaxed(BIT(d->hwirq), data->base + EIREQCLR);
d                  76 drivers/irqchip/irq-sni-exiu.c 	val = readl_relaxed(data->base + EIMASK) & ~BIT(d->hwirq);
d                  78 drivers/irqchip/irq-sni-exiu.c 	irq_chip_enable_parent(d);
d                  81 drivers/irqchip/irq-sni-exiu.c static int exiu_irq_set_type(struct irq_data *d, unsigned int type)
d                  83 drivers/irqchip/irq-sni-exiu.c 	struct exiu_irq_data *data = irq_data_get_irq_chip_data(d);
d                  88 drivers/irqchip/irq-sni-exiu.c 		val |= BIT(d->hwirq);
d                  90 drivers/irqchip/irq-sni-exiu.c 		val &= ~BIT(d->hwirq);
d                  95 drivers/irqchip/irq-sni-exiu.c 		val &= ~BIT(d->hwirq);
d                  97 drivers/irqchip/irq-sni-exiu.c 		val |= BIT(d->hwirq);
d                 100 drivers/irqchip/irq-sni-exiu.c 	writel_relaxed(BIT(d->hwirq), data->base + EIREQCLR);
d                 102 drivers/irqchip/irq-sni-exiu.c 	return irq_chip_set_type_parent(d, IRQ_TYPE_LEVEL_HIGH);
d                 255 drivers/irqchip/irq-stm32-exti.c static int stm32_exti_set_type(struct irq_data *d,
d                 258 drivers/irqchip/irq-stm32-exti.c 	u32 mask = BIT(d->hwirq % IRQS_PER_BANK);
d                 316 drivers/irqchip/irq-stm32-exti.c static int stm32_irq_set_type(struct irq_data *d, unsigned int type)
d                 318 drivers/irqchip/irq-stm32-exti.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 333 drivers/irqchip/irq-stm32-exti.c 	err = stm32_exti_set_type(d, type, &rtsr, &ftsr);
d                 392 drivers/irqchip/irq-stm32-exti.c static int stm32_exti_alloc(struct irq_domain *d, unsigned int virq,
d                 400 drivers/irqchip/irq-stm32-exti.c 	irq_map_generic_chip(d, virq, hwirq);
d                 405 drivers/irqchip/irq-stm32-exti.c static void stm32_exti_free(struct irq_domain *d, unsigned int virq,
d                 408 drivers/irqchip/irq-stm32-exti.c 	struct irq_data *data = irq_domain_get_irq_data(d, virq);
d                 419 drivers/irqchip/irq-stm32-exti.c static void stm32_irq_ack(struct irq_data *d)
d                 421 drivers/irqchip/irq-stm32-exti.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 427 drivers/irqchip/irq-stm32-exti.c 	irq_reg_writel(gc, d->mask, stm32_bank->rpr_ofst);
d                 429 drivers/irqchip/irq-stm32-exti.c 		irq_reg_writel(gc, d->mask, stm32_bank->fpr_ofst);
d                 434 drivers/irqchip/irq-stm32-exti.c static inline u32 stm32_exti_set_bit(struct irq_data *d, u32 reg)
d                 436 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 441 drivers/irqchip/irq-stm32-exti.c 	val |= BIT(d->hwirq % IRQS_PER_BANK);
d                 447 drivers/irqchip/irq-stm32-exti.c static inline u32 stm32_exti_clr_bit(struct irq_data *d, u32 reg)
d                 449 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 454 drivers/irqchip/irq-stm32-exti.c 	val &= ~BIT(d->hwirq % IRQS_PER_BANK);
d                 460 drivers/irqchip/irq-stm32-exti.c static void stm32_exti_h_eoi(struct irq_data *d)
d                 462 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 467 drivers/irqchip/irq-stm32-exti.c 	stm32_exti_set_bit(d, stm32_bank->rpr_ofst);
d                 469 drivers/irqchip/irq-stm32-exti.c 		stm32_exti_set_bit(d, stm32_bank->fpr_ofst);
d                 473 drivers/irqchip/irq-stm32-exti.c 	if (d->parent_data->chip)
d                 474 drivers/irqchip/irq-stm32-exti.c 		irq_chip_eoi_parent(d);
d                 477 drivers/irqchip/irq-stm32-exti.c static void stm32_exti_h_mask(struct irq_data *d)
d                 479 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 483 drivers/irqchip/irq-stm32-exti.c 	chip_data->mask_cache = stm32_exti_clr_bit(d, stm32_bank->imr_ofst);
d                 486 drivers/irqchip/irq-stm32-exti.c 	if (d->parent_data->chip)
d                 487 drivers/irqchip/irq-stm32-exti.c 		irq_chip_mask_parent(d);
d                 490 drivers/irqchip/irq-stm32-exti.c static void stm32_exti_h_unmask(struct irq_data *d)
d                 492 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 496 drivers/irqchip/irq-stm32-exti.c 	chip_data->mask_cache = stm32_exti_set_bit(d, stm32_bank->imr_ofst);
d                 499 drivers/irqchip/irq-stm32-exti.c 	if (d->parent_data->chip)
d                 500 drivers/irqchip/irq-stm32-exti.c 		irq_chip_unmask_parent(d);
d                 503 drivers/irqchip/irq-stm32-exti.c static int stm32_exti_h_set_type(struct irq_data *d, unsigned int type)
d                 505 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 520 drivers/irqchip/irq-stm32-exti.c 	err = stm32_exti_set_type(d, type, &rtsr, &ftsr);
d                 535 drivers/irqchip/irq-stm32-exti.c static int stm32_exti_h_set_wake(struct irq_data *d, unsigned int on)
d                 537 drivers/irqchip/irq-stm32-exti.c 	struct stm32_exti_chip_data *chip_data = irq_data_get_irq_chip_data(d);
d                 538 drivers/irqchip/irq-stm32-exti.c 	u32 mask = BIT(d->hwirq % IRQS_PER_BANK);
d                 552 drivers/irqchip/irq-stm32-exti.c static int stm32_exti_h_set_affinity(struct irq_data *d,
d                 555 drivers/irqchip/irq-stm32-exti.c 	if (d->parent_data->chip)
d                 556 drivers/irqchip/irq-stm32-exti.c 		return irq_chip_set_affinity_parent(d, dest, force);
d                  93 drivers/irqchip/irq-sun4i.c static int sun4i_irq_map(struct irq_domain *d, unsigned int virq,
d                  88 drivers/irqchip/irq-tango.c static int tangox_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                  90 drivers/irqchip/irq-tango.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  96 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_RISE_SET, d->mask);
d                  97 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_FALL_CLR, d->mask);
d                 101 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_RISE_CLR, d->mask);
d                 102 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_FALL_SET, d->mask);
d                 106 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_RISE_CLR, d->mask);
d                 107 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_FALL_CLR, d->mask);
d                 111 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_RISE_SET, d->mask);
d                 112 drivers/irqchip/irq-tango.c 		intc_writel(chip, regs->type + EDGE_CFG_FALL_SET, d->mask);
d                 117 drivers/irqchip/irq-tango.c 		       flow_type, d->irq);
d                 121 drivers/irqchip/irq-tango.c 	return irq_setup_alt_chip(d, flow_type);
d                  82 drivers/irqchip/irq-tegra.c static inline void tegra_ictlr_write_mask(struct irq_data *d, unsigned long reg)
d                  84 drivers/irqchip/irq-tegra.c 	void __iomem *base = (void __iomem __force *)d->chip_data;
d                  87 drivers/irqchip/irq-tegra.c 	mask = BIT(d->hwirq % 32);
d                  91 drivers/irqchip/irq-tegra.c static void tegra_mask(struct irq_data *d)
d                  93 drivers/irqchip/irq-tegra.c 	tegra_ictlr_write_mask(d, ICTLR_CPU_IER_CLR);
d                  94 drivers/irqchip/irq-tegra.c 	irq_chip_mask_parent(d);
d                  97 drivers/irqchip/irq-tegra.c static void tegra_unmask(struct irq_data *d)
d                  99 drivers/irqchip/irq-tegra.c 	tegra_ictlr_write_mask(d, ICTLR_CPU_IER_SET);
d                 100 drivers/irqchip/irq-tegra.c 	irq_chip_unmask_parent(d);
d                 103 drivers/irqchip/irq-tegra.c static void tegra_eoi(struct irq_data *d)
d                 105 drivers/irqchip/irq-tegra.c 	tegra_ictlr_write_mask(d, ICTLR_CPU_IEP_FIR_CLR);
d                 106 drivers/irqchip/irq-tegra.c 	irq_chip_eoi_parent(d);
d                 109 drivers/irqchip/irq-tegra.c static int tegra_retrigger(struct irq_data *d)
d                 111 drivers/irqchip/irq-tegra.c 	tegra_ictlr_write_mask(d, ICTLR_CPU_IEP_FIR_SET);
d                 112 drivers/irqchip/irq-tegra.c 	return irq_chip_retrigger_hierarchy(d);
d                 116 drivers/irqchip/irq-tegra.c static int tegra_set_wake(struct irq_data *d, unsigned int enable)
d                 118 drivers/irqchip/irq-tegra.c 	u32 irq = d->hwirq;
d                 215 drivers/irqchip/irq-tegra.c static int tegra_ictlr_domain_translate(struct irq_domain *d,
d                 405 drivers/irqchip/irq-ti-sci-inta.c static int ti_sci_inta_set_affinity(struct irq_data *d,
d                  32 drivers/irqchip/irq-ts4800.c static void ts4800_irq_mask(struct irq_data *d)
d                  34 drivers/irqchip/irq-ts4800.c 	struct ts4800_irq_data *data = irq_data_get_irq_chip_data(d);
d                  36 drivers/irqchip/irq-ts4800.c 	u16 mask = 1 << d->hwirq;
d                  41 drivers/irqchip/irq-ts4800.c static void ts4800_irq_unmask(struct irq_data *d)
d                  43 drivers/irqchip/irq-ts4800.c 	struct ts4800_irq_data *data = irq_data_get_irq_chip_data(d);
d                  45 drivers/irqchip/irq-ts4800.c 	u16 mask = 1 << d->hwirq;
d                  50 drivers/irqchip/irq-ts4800.c static int ts4800_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                  53 drivers/irqchip/irq-ts4800.c 	struct ts4800_irq_data *data = d->host_data;
d                  54 drivers/irqchip/irq-versatile-fpga.c static void fpga_irq_mask(struct irq_data *d)
d                  56 drivers/irqchip/irq-versatile-fpga.c 	struct fpga_irq_data *f = irq_data_get_irq_chip_data(d);
d                  57 drivers/irqchip/irq-versatile-fpga.c 	u32 mask = 1 << d->hwirq;
d                  62 drivers/irqchip/irq-versatile-fpga.c static void fpga_irq_unmask(struct irq_data *d)
d                  64 drivers/irqchip/irq-versatile-fpga.c 	struct fpga_irq_data *f = irq_data_get_irq_chip_data(d);
d                  65 drivers/irqchip/irq-versatile-fpga.c 	u32 mask = 1 << d->hwirq;
d                 129 drivers/irqchip/irq-versatile-fpga.c static int fpga_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                 132 drivers/irqchip/irq-versatile-fpga.c 	struct fpga_irq_data *f = d->host_data;
d                 160 drivers/irqchip/irq-vf610-mscm-ir.c static int vf610_mscm_ir_domain_translate(struct irq_domain *d,
d                 181 drivers/irqchip/irq-vic.c static int vic_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                 184 drivers/irqchip/irq-vic.c 	struct vic_device *v = d->host_data;
d                 303 drivers/irqchip/irq-vic.c static void vic_ack_irq(struct irq_data *d)
d                 305 drivers/irqchip/irq-vic.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                 306 drivers/irqchip/irq-vic.c 	unsigned int irq = d->hwirq;
d                 312 drivers/irqchip/irq-vic.c static void vic_mask_irq(struct irq_data *d)
d                 314 drivers/irqchip/irq-vic.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                 315 drivers/irqchip/irq-vic.c 	unsigned int irq = d->hwirq;
d                 319 drivers/irqchip/irq-vic.c static void vic_unmask_irq(struct irq_data *d)
d                 321 drivers/irqchip/irq-vic.c 	void __iomem *base = irq_data_get_irq_chip_data(d);
d                 322 drivers/irqchip/irq-vic.c 	unsigned int irq = d->hwirq;
d                 341 drivers/irqchip/irq-vic.c static int vic_set_wake(struct irq_data *d, unsigned int on)
d                 343 drivers/irqchip/irq-vic.c 	struct vic_device *v = vic_from_irq(d->irq);
d                 344 drivers/irqchip/irq-vic.c 	unsigned int off = d->hwirq;
d                  70 drivers/irqchip/irq-vt8500.c static void vt8500_irq_mask(struct irq_data *d)
d                  72 drivers/irqchip/irq-vt8500.c 	struct vt8500_irq_data *priv = d->domain->host_data;
d                  74 drivers/irqchip/irq-vt8500.c 	void __iomem *stat_reg = base + VT8500_ICIS + (d->hwirq < 32 ? 0 : 4);
d                  78 drivers/irqchip/irq-vt8500.c 	edge = readb(base + VT8500_ICDC + d->hwirq) & VT8500_EDGE;
d                  82 drivers/irqchip/irq-vt8500.c 		status |= (1 << (d->hwirq & 0x1f));
d                  85 drivers/irqchip/irq-vt8500.c 		dctr = readb(base + VT8500_ICDC + d->hwirq);
d                  87 drivers/irqchip/irq-vt8500.c 		writeb(dctr, base + VT8500_ICDC + d->hwirq);
d                  91 drivers/irqchip/irq-vt8500.c static void vt8500_irq_unmask(struct irq_data *d)
d                  93 drivers/irqchip/irq-vt8500.c 	struct vt8500_irq_data *priv = d->domain->host_data;
d                  97 drivers/irqchip/irq-vt8500.c 	dctr = readb(base + VT8500_ICDC + d->hwirq);
d                  99 drivers/irqchip/irq-vt8500.c 	writeb(dctr, base + VT8500_ICDC + d->hwirq);
d                 102 drivers/irqchip/irq-vt8500.c static int vt8500_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 104 drivers/irqchip/irq-vt8500.c 	struct vt8500_irq_data *priv = d->domain->host_data;
d                 108 drivers/irqchip/irq-vt8500.c 	dctr = readb(base + VT8500_ICDC + d->hwirq);
d                 116 drivers/irqchip/irq-vt8500.c 		irq_set_handler_locked(d, handle_level_irq);
d                 120 drivers/irqchip/irq-vt8500.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 124 drivers/irqchip/irq-vt8500.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 127 drivers/irqchip/irq-vt8500.c 	writeb(dctr, base + VT8500_ICDC + d->hwirq);
d                  61 drivers/irqchip/irq-xilinx-intc.c static void intc_enable_or_unmask(struct irq_data *d)
d                  63 drivers/irqchip/irq-xilinx-intc.c 	unsigned long mask = 1 << d->hwirq;
d                  65 drivers/irqchip/irq-xilinx-intc.c 	pr_debug("irq-xilinx: enable_or_unmask: %ld\n", d->hwirq);
d                  71 drivers/irqchip/irq-xilinx-intc.c 	if (irqd_is_level_type(d))
d                  77 drivers/irqchip/irq-xilinx-intc.c static void intc_disable_or_mask(struct irq_data *d)
d                  79 drivers/irqchip/irq-xilinx-intc.c 	pr_debug("irq-xilinx: disable: %ld\n", d->hwirq);
d                  80 drivers/irqchip/irq-xilinx-intc.c 	xintc_write(CIE, 1 << d->hwirq);
d                  83 drivers/irqchip/irq-xilinx-intc.c static void intc_ack(struct irq_data *d)
d                  85 drivers/irqchip/irq-xilinx-intc.c 	pr_debug("irq-xilinx: ack: %ld\n", d->hwirq);
d                  86 drivers/irqchip/irq-xilinx-intc.c 	xintc_write(IAR, 1 << d->hwirq);
d                  89 drivers/irqchip/irq-xilinx-intc.c static void intc_mask_ack(struct irq_data *d)
d                  91 drivers/irqchip/irq-xilinx-intc.c 	unsigned long mask = 1 << d->hwirq;
d                  93 drivers/irqchip/irq-xilinx-intc.c 	pr_debug("irq-xilinx: disable_and_ack: %ld\n", d->hwirq);
d                 119 drivers/irqchip/irq-xilinx-intc.c static int xintc_map(struct irq_domain *d, unsigned int irq, irq_hw_number_t hw)
d                  25 drivers/irqchip/irq-xtensa-mx.c static int xtensa_mx_irq_map(struct irq_domain *d, unsigned int irq,
d                  29 drivers/irqchip/irq-xtensa-mx.c 		struct irq_chip *irq_chip = d->host_data;
d                  36 drivers/irqchip/irq-xtensa-mx.c 	return xtensa_irq_map(d, irq, hw);
d                  45 drivers/irqchip/irq-xtensa-mx.c static int xtensa_mx_irq_domain_xlate(struct irq_domain *d,
d                  69 drivers/irqchip/irq-xtensa-mx.c static void xtensa_mx_irq_mask(struct irq_data *d)
d                  71 drivers/irqchip/irq-xtensa-mx.c 	unsigned int mask = 1u << d->hwirq;
d                  75 drivers/irqchip/irq-xtensa-mx.c 		unsigned int ext_irq = xtensa_get_ext_irq_no(d->hwirq);
d                  87 drivers/irqchip/irq-xtensa-mx.c static void xtensa_mx_irq_unmask(struct irq_data *d)
d                  89 drivers/irqchip/irq-xtensa-mx.c 	unsigned int mask = 1u << d->hwirq;
d                  93 drivers/irqchip/irq-xtensa-mx.c 		unsigned int ext_irq = xtensa_get_ext_irq_no(d->hwirq);
d                 105 drivers/irqchip/irq-xtensa-mx.c static void xtensa_mx_irq_enable(struct irq_data *d)
d                 107 drivers/irqchip/irq-xtensa-mx.c 	xtensa_mx_irq_unmask(d);
d                 110 drivers/irqchip/irq-xtensa-mx.c static void xtensa_mx_irq_disable(struct irq_data *d)
d                 112 drivers/irqchip/irq-xtensa-mx.c 	xtensa_mx_irq_mask(d);
d                 115 drivers/irqchip/irq-xtensa-mx.c static void xtensa_mx_irq_ack(struct irq_data *d)
d                 117 drivers/irqchip/irq-xtensa-mx.c 	xtensa_set_sr(1 << d->hwirq, intclear);
d                 120 drivers/irqchip/irq-xtensa-mx.c static int xtensa_mx_irq_retrigger(struct irq_data *d)
d                 122 drivers/irqchip/irq-xtensa-mx.c 	unsigned int mask = 1u << d->hwirq;
d                 130 drivers/irqchip/irq-xtensa-mx.c static int xtensa_mx_irq_set_affinity(struct irq_data *d,
d                 136 drivers/irqchip/irq-xtensa-mx.c 	set_er(mask, MIROUT(d->hwirq - HW_IRQ_MX_BASE));
d                 137 drivers/irqchip/irq-xtensa-mx.c 	irq_data_update_effective_affinity(d, cpumask_of(cpu));
d                  29 drivers/irqchip/irq-xtensa-pic.c static int xtensa_pic_irq_domain_xlate(struct irq_domain *d,
d                  44 drivers/irqchip/irq-xtensa-pic.c static void xtensa_irq_mask(struct irq_data *d)
d                  46 drivers/irqchip/irq-xtensa-pic.c 	cached_irq_mask &= ~(1 << d->hwirq);
d                  50 drivers/irqchip/irq-xtensa-pic.c static void xtensa_irq_unmask(struct irq_data *d)
d                  52 drivers/irqchip/irq-xtensa-pic.c 	cached_irq_mask |= 1 << d->hwirq;
d                  56 drivers/irqchip/irq-xtensa-pic.c static void xtensa_irq_enable(struct irq_data *d)
d                  58 drivers/irqchip/irq-xtensa-pic.c 	xtensa_irq_unmask(d);
d                  61 drivers/irqchip/irq-xtensa-pic.c static void xtensa_irq_disable(struct irq_data *d)
d                  63 drivers/irqchip/irq-xtensa-pic.c 	xtensa_irq_mask(d);
d                  66 drivers/irqchip/irq-xtensa-pic.c static void xtensa_irq_ack(struct irq_data *d)
d                  68 drivers/irqchip/irq-xtensa-pic.c 	xtensa_set_sr(1 << d->hwirq, intclear);
d                  71 drivers/irqchip/irq-xtensa-pic.c static int xtensa_irq_retrigger(struct irq_data *d)
d                  73 drivers/irqchip/irq-xtensa-pic.c 	unsigned int mask = 1u << d->hwirq;
d                 119 drivers/irqchip/qcom-irq-combiner.c static int combiner_irq_translate(struct irq_domain *d, struct irq_fwspec *fws,
d                 122 drivers/irqchip/qcom-irq-combiner.c 	struct combiner *combiner = d->host_data;
d                  50 drivers/irqchip/qcom-pdc.c static void pdc_enable_intr(struct irq_data *d, bool on)
d                  52 drivers/irqchip/qcom-pdc.c 	int pin_out = d->hwirq;
d                  66 drivers/irqchip/qcom-pdc.c static void qcom_pdc_gic_mask(struct irq_data *d)
d                  68 drivers/irqchip/qcom-pdc.c 	pdc_enable_intr(d, false);
d                  69 drivers/irqchip/qcom-pdc.c 	irq_chip_mask_parent(d);
d                  72 drivers/irqchip/qcom-pdc.c static void qcom_pdc_gic_unmask(struct irq_data *d)
d                  74 drivers/irqchip/qcom-pdc.c 	pdc_enable_intr(d, true);
d                  75 drivers/irqchip/qcom-pdc.c 	irq_chip_unmask_parent(d);
d                 112 drivers/irqchip/qcom-pdc.c static int qcom_pdc_gic_set_type(struct irq_data *d, unsigned int type)
d                 114 drivers/irqchip/qcom-pdc.c 	int pin_out = d->hwirq;
d                 143 drivers/irqchip/qcom-pdc.c 	return irq_chip_set_type_parent(d, type);
d                 176 drivers/irqchip/qcom-pdc.c static int qcom_pdc_translate(struct irq_domain *d, struct irq_fwspec *fwspec,
d                  58 drivers/irqchip/spear-shirq.c static void shirq_irq_mask(struct irq_data *d)
d                  60 drivers/irqchip/spear-shirq.c 	struct spear_shirq *shirq = irq_data_get_irq_chip_data(d);
d                  61 drivers/irqchip/spear-shirq.c 	u32 val, shift = d->irq - shirq->virq_base + shirq->offset;
d                  70 drivers/irqchip/spear-shirq.c static void shirq_irq_unmask(struct irq_data *d)
d                  72 drivers/irqchip/spear-shirq.c 	struct spear_shirq *shirq = irq_data_get_irq_chip_data(d);
d                  73 drivers/irqchip/spear-shirq.c 	u32 val, shift = d->irq - shirq->virq_base + shirq->offset;
d                1937 drivers/isdn/hardware/mISDN/hfcmulti.c 	u_char *d;
d                2128 drivers/isdn/hardware/mISDN/hfcmulti.c 	d = (*sp)->data + i;
d                2138 drivers/isdn/hardware/mISDN/hfcmulti.c 	hc->write_fifo(hc, d, ii - i);
d                1184 drivers/isdn/mISDN/dsp_cmx.c 	u8 *d, *p;
d                1288 drivers/isdn/mISDN/dsp_cmx.c 	d = dsp->rx_buff;
d                1293 drivers/isdn/mISDN/dsp_cmx.c 		d[w++ & CMX_BUFF_MASK] = *p++;
d                1314 drivers/isdn/mISDN/dsp_cmx.c 	u8 *d, *p, *q, *o_q;
d                1370 drivers/isdn/mISDN/dsp_cmx.c 	d = skb_put(nskb, preload + len); /* result */
d                1378 drivers/isdn/mISDN/dsp_cmx.c 		memset(d, dsp_silence, preload);
d                1379 drivers/isdn/mISDN/dsp_cmx.c 		d += preload;
d                1385 drivers/isdn/mISDN/dsp_cmx.c 		dsp_tone_copy(dsp, d, len);
d                1402 drivers/isdn/mISDN/dsp_cmx.c 			*d++ = p[t]; /* write tx_buff */
d                1424 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = p[t]; /* write tx_buff */
d                1432 drivers/isdn/mISDN/dsp_cmx.c 				memset(d, dsp_silence, (rr - r) & CMX_BUFF_MASK);
d                1441 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = dsp_audio_mix_law[(p[t] << 8) | q[r]];
d                1446 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = q[r]; /* echo */
d                1477 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = dsp_audio_mix_law[(p[t] << 8) | o_q[o_r]];
d                1482 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = o_q[o_r];
d                1499 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = dsp_audio_s16_to_law[sample & 0xffff];
d                1506 drivers/isdn/mISDN/dsp_cmx.c 				*d++ = dsp_audio_mix_law[(q[r] << 8) | o_q[o_r]];
d                1528 drivers/isdn/mISDN/dsp_cmx.c 			*d++ = dsp_audio_s16_to_law[sample & 0xffff];
d                1539 drivers/isdn/mISDN/dsp_cmx.c 			*d++ = dsp_audio_s16_to_law[sample & 0xffff];
d                1555 drivers/isdn/mISDN/dsp_cmx.c 			*d++ = dsp_audio_s16_to_law[sample & 0xffff];
d                1566 drivers/isdn/mISDN/dsp_cmx.c 			*d++ = dsp_audio_s16_to_law[sample & 0xffff];
d                1863 drivers/isdn/mISDN/dsp_cmx.c 	u8 *d, *p;
d                1873 drivers/isdn/mISDN/dsp_cmx.c 	d = skb->data;
d                1901 drivers/isdn/mISDN/dsp_cmx.c 			sprintf(debugbuf + strlen(debugbuf), " %02x", *d);
d                1903 drivers/isdn/mISDN/dsp_cmx.c 		p[w] = *d++;
d                 465 drivers/isdn/mISDN/layer2.c 	register u_char d = *data;
d                 468 drivers/isdn/mISDN/layer2.c 		d &= 0xf;
d                 469 drivers/isdn/mISDN/layer2.c 	return ((d & 0xf3) == 1) && ((d & 0x0c) != 0x0c);
d                 475 drivers/isdn/mISDN/layer2.c 	u_char d = data[0] & ~0x10;
d                 477 drivers/isdn/mISDN/layer2.c 	return test_bit(FLG_MOD128, &l2->flag) ? d == SABME : d == SABM;
d                  28 drivers/leds/leds-sunfire.c #define	to_sunfire_led(d) container_of(d, struct sunfire_led, led_cdev)
d                 235 drivers/leds/leds-tca6507.c 			int d;
d                 241 drivers/leds/leds-tca6507.c 			d = abs(msec - tt);
d                 242 drivers/leds/leds-tca6507.c 			if (d >= diff)
d                 247 drivers/leds/leds-tca6507.c 			diff = d;
d                 248 drivers/leds/leds-tca6507.c 			if (d == 0)
d                 423 drivers/leds/leds-tca6507.c 			int d;
d                 429 drivers/leds/leds-tca6507.c 			d = abs(level - tca->bank[i].level);
d                 430 drivers/leds/leds-tca6507.c 			if (d < diff) {
d                 431 drivers/leds/leds-tca6507.c 				diff = d;
d                 408 drivers/mailbox/bcm-flexrm-mailbox.c 	u64 d;
d                 451 drivers/mailbox/bcm-flexrm-mailbox.c 		d = flexrm_header_desc(_toggle, _startpkt, _endpkt,
d                 455 drivers/mailbox/bcm-flexrm-mailbox.c 		flexrm_write_desc(*desc_ptr, d);
d                 661 drivers/mailbox/bcm-flexrm-mailbox.c 	u64 d;
d                 670 drivers/mailbox/bcm-flexrm-mailbox.c 				d = flexrm_src_desc(sg_dma_address(src_sg),
d                 673 drivers/mailbox/bcm-flexrm-mailbox.c 				d = flexrm_msrc_desc(sg_dma_address(src_sg),
d                 676 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 686 drivers/mailbox/bcm-flexrm-mailbox.c 				d = flexrm_dst_desc(sg_dma_address(dst_sg),
d                 689 drivers/mailbox/bcm-flexrm-mailbox.c 				d = flexrm_mdst_desc(sg_dma_address(dst_sg),
d                 692 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 770 drivers/mailbox/bcm-flexrm-mailbox.c 	u64 d;
d                 782 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_dst_desc(c->resp, c->resp_len);
d                 784 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 789 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_dstt_desc(c->resp, c->resp_len);
d                 791 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 798 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_dstt_desc(c->data, c->data_len);
d                 800 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 807 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_imm_desc(c->cmd);
d                 809 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 814 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_immt_desc(c->cmd);
d                 816 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                 824 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_srct_desc(c->data, c->data_len);
d                 826 drivers/mailbox/bcm-flexrm-mailbox.c 					     d, &desc_ptr, &toggle,
d                1235 drivers/mailbox/bcm-flexrm-mailbox.c 	u64 d;
d                1259 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_next_table_desc(RING_BD_TOGGLE_VALID(off),
d                1262 drivers/mailbox/bcm-flexrm-mailbox.c 			d = flexrm_null_desc(RING_BD_TOGGLE_INVALID(off));
d                1263 drivers/mailbox/bcm-flexrm-mailbox.c 		flexrm_write_desc(ring->bd_base + off, d);
d                 152 drivers/mailbox/ti-msgmgr.c ti_msgmgr_queue_get_num_messages(const struct ti_msgmgr_desc *d,
d                 156 drivers/mailbox/ti-msgmgr.c 	u32 status_cnt_mask = d->status_cnt_mask;
d                 175 drivers/mailbox/ti-msgmgr.c static inline bool ti_msgmgr_queue_is_error(const struct ti_msgmgr_desc *d,
d                 181 drivers/mailbox/ti-msgmgr.c 	if (!d->is_sproxy)
d                 188 drivers/mailbox/ti-msgmgr.c 	val = readl(qinst->queue_state) & d->status_err_mask;
d                 408 drivers/mailbox/ti-msgmgr.c 				      const struct ti_msgmgr_desc *d,
d                 417 drivers/mailbox/ti-msgmgr.c 		 "rx_%03d", d->is_sproxy ? qinst->proxy_id : qinst->queue_id);
d                 458 drivers/mailbox/ti-msgmgr.c 	const struct ti_msgmgr_desc *d = inst->desc;
d                 466 drivers/mailbox/ti-msgmgr.c 	if (d->is_sproxy) {
d                 470 drivers/mailbox/ti-msgmgr.c 		msg_count = ti_msgmgr_queue_get_num_messages(d, qinst);
d                 481 drivers/mailbox/ti-msgmgr.c 		qinst->rx_buff = kzalloc(d->max_message_size, GFP_KERNEL);
d                 485 drivers/mailbox/ti-msgmgr.c 		ret = ti_msgmgr_queue_rx_irq_req(dev, d, qinst, chan);
d                 523 drivers/mailbox/ti-msgmgr.c 	const struct ti_msgmgr_desc *d;
d                 530 drivers/mailbox/ti-msgmgr.c 	d = inst->desc;
d                 532 drivers/mailbox/ti-msgmgr.c 	if (d->is_sproxy)
d                 549 drivers/mailbox/ti-msgmgr.c 	if (d->is_sproxy) {
d                 550 drivers/mailbox/ti-msgmgr.c 		if (req_pid >= d->num_valid_queues)
d                 584 drivers/mailbox/ti-msgmgr.c 				 const struct ti_msgmgr_desc *d,
d                 594 drivers/mailbox/ti-msgmgr.c 	if (qinst->queue_id > d->queue_count) {
d                 596 drivers/mailbox/ti-msgmgr.c 			idx, qinst->queue_id, d->queue_count);
d                 600 drivers/mailbox/ti-msgmgr.c 	if (d->is_sproxy) {
d                 603 drivers/mailbox/ti-msgmgr.c 					      d->data_first_reg);
d                 606 drivers/mailbox/ti-msgmgr.c 					      d->data_last_reg);
d                 619 drivers/mailbox/ti-msgmgr.c 				  d->data_first_reg);
d                 622 drivers/mailbox/ti-msgmgr.c 				  d->data_last_reg);
d                 278 drivers/md/bcache/bcache.h 	int (*ioctl)(struct bcache_device *d, fmode_t mode,
d                1011 drivers/md/bcache/bcache.h void bcache_device_stop(struct bcache_device *d);
d                  29 drivers/md/bcache/bset.c 		       (unsigned int) ((u64 *) k - i->d), i->keys);
d                1237 drivers/md/bcache/bset.c 	out->keys = last ? (uint64_t *) bkey_next(last) - out->d : 0;
d                 400 drivers/md/bcache/bset.h #define bset_bkey_last(i)	bkey_idx((struct bkey *) (i)->d, \
d                1780 drivers/md/bcache/btree.c 		struct bcache_device *d = c->devices[i];
d                1785 drivers/md/bcache/btree.c 		if (!d || UUID_FLASH_ONLY(&c->uuids[i]))
d                1787 drivers/md/bcache/btree.c 		dc = container_of(d, struct cached_dev, disk);
d                1805 drivers/md/bcache/btree.c 		for (i = ca->sb.d; i < ca->sb.d + ca->sb.keys; i++)
d                  95 drivers/md/bcache/debug.c 			if (inmemory->d[j] != sorted->d[j])
d                  16 drivers/md/bcache/debug.h #define bypass_torture_test(d)		((d)->bypass_torture_test)
d                  25 drivers/md/bcache/debug.h #define bypass_torture_test(d)		0
d                  46 drivers/md/bcache/journal.c 	sector_t bucket = bucket_to_sector(ca->set, ca->sb.d[bucket_index]);
d                 628 drivers/md/bcache/journal.c 						ca->sb.d[ja->discard_idx]);
d                 686 drivers/md/bcache/journal.c 				  bucket_to_sector(c, ca->sb.d[ja->cur_idx]),
d                  47 drivers/md/bcache/request.c 		void *d = kmap(bv.bv_page) + bv.bv_offset;
d                  49 drivers/md/bcache/request.c 		csum = bch_crc64_update(csum, d, bv.bv_len);
d                 482 drivers/md/bcache/request.c 	struct bcache_device	*d;
d                 541 drivers/md/bcache/request.c 		int ret = s->d->cache_miss(b, s, bio, sectors);
d                 563 drivers/md/bcache/request.c 			   GFP_NOIO, &s->d->bio_split);
d                 617 drivers/md/bcache/request.c 		if (s->d && s->d->c &&
d                 618 drivers/md/bcache/request.c 				!UUID_FLASH_ONLY(&s->d->c->uuids[s->d->id])) {
d                 619 drivers/md/bcache/request.c 			dc = container_of(s->d, struct cached_dev, disk);
d                 654 drivers/md/bcache/request.c 		struct cached_dev *dc = container_of(s->d,
d                 683 drivers/md/bcache/request.c 		generic_end_io_acct(s->d->disk->queue, bio_op(s->orig_bio),
d                 684 drivers/md/bcache/request.c 				    &s->d->disk->part0, s->start_time);
d                 686 drivers/md/bcache/request.c 		trace_bcache_request_end(s->d, s->orig_bio);
d                 728 drivers/md/bcache/request.c 					  struct bcache_device *d)
d                 732 drivers/md/bcache/request.c 	s = mempool_alloc(&d->c->search, GFP_NOIO);
d                 736 drivers/md/bcache/request.c 	atomic_inc(&d->c->search_inflight);
d                 741 drivers/md/bcache/request.c 	s->d			= d;
d                 747 drivers/md/bcache/request.c 	s->iop.c		= d->c;
d                 749 drivers/md/bcache/request.c 	s->iop.inode		= d->id;
d                 765 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(s->d, struct cached_dev, disk);
d                 778 drivers/md/bcache/request.c 		bch_mark_cache_miss_collision(s->iop.c, s->d);
d                 817 drivers/md/bcache/request.c 	struct bcache_device *d = s->d;
d                 820 drivers/md/bcache/request.c 		bch_mark_cache_miss_collision(s->iop.c, s->d);
d                 826 drivers/md/bcache/request.c 	closure_put(&d->cl);
d                 832 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(s->d, struct cached_dev, disk);
d                 874 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(s->d, struct cached_dev, disk);
d                 876 drivers/md/bcache/request.c 	bch_mark_cache_accounting(s->iop.c, s->d,
d                 893 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(s->d, struct cached_dev, disk);
d                 899 drivers/md/bcache/request.c 		miss = bio_next_split(bio, sectors, GFP_NOIO, &s->d->bio_split);
d                 922 drivers/md/bcache/request.c 	miss = bio_next_split(bio, sectors, GFP_NOIO, &s->d->bio_split);
d                 945 drivers/md/bcache/request.c 		bch_mark_cache_readahead(s->iop.c, s->d);
d                 977 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(s->d, struct cached_dev, disk);
d                1083 drivers/md/bcache/request.c 	struct bcache_device	*d;
d                1097 drivers/md/bcache/request.c 	generic_end_io_acct(ddip->d->disk->queue, bio_op(bio),
d                1098 drivers/md/bcache/request.c 			    &ddip->d->disk->part0, ddip->start_time);
d                1101 drivers/md/bcache/request.c 		struct cached_dev *dc = container_of(ddip->d,
d                1111 drivers/md/bcache/request.c static void detached_dev_do_request(struct bcache_device *d, struct bio *bio)
d                1114 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                1122 drivers/md/bcache/request.c 	ddip->d = d;
d                1140 drivers/md/bcache/request.c 	struct bcache_device *d;
d                1160 drivers/md/bcache/request.c 			d = c->devices[i];
d                1161 drivers/md/bcache/request.c 			dc = container_of(d, struct cached_dev, disk);
d                1180 drivers/md/bcache/request.c 	struct bcache_device *d = bio->bi_disk->private_data;
d                1181 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                1184 drivers/md/bcache/request.c 	if (unlikely((d->c && test_bit(CACHE_SET_IO_DISABLE, &d->c->flags)) ||
d                1191 drivers/md/bcache/request.c 	if (likely(d->c)) {
d                1192 drivers/md/bcache/request.c 		if (atomic_read(&d->c->idle_counter))
d                1193 drivers/md/bcache/request.c 			atomic_set(&d->c->idle_counter, 0);
d                1200 drivers/md/bcache/request.c 		if (unlikely(atomic_read(&d->c->at_max_writeback_rate) == 1)) {
d                1201 drivers/md/bcache/request.c 			atomic_set(&d->c->at_max_writeback_rate, 0);
d                1202 drivers/md/bcache/request.c 			quit_max_writeback_rate(d->c, dc);
d                1209 drivers/md/bcache/request.c 			      &d->disk->part0);
d                1215 drivers/md/bcache/request.c 		s = search_alloc(bio, d);
d                1216 drivers/md/bcache/request.c 		trace_bcache_request_start(s->d, bio);
d                1236 drivers/md/bcache/request.c 		detached_dev_do_request(d, bio);
d                1241 drivers/md/bcache/request.c static int cached_dev_ioctl(struct bcache_device *d, fmode_t mode,
d                1244 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                1254 drivers/md/bcache/request.c 	struct bcache_device *d = data;
d                1255 drivers/md/bcache/request.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                1266 drivers/md/bcache/request.c 		for_each_cache(ca, d->c, i) {
d                1321 drivers/md/bcache/request.c 	struct bcache_device *d = bio->bi_disk->private_data;
d                1323 drivers/md/bcache/request.c 	if (unlikely(d->c && test_bit(CACHE_SET_IO_DISABLE, &d->c->flags))) {
d                1329 drivers/md/bcache/request.c 	generic_start_io_acct(q, bio_op(bio), bio_sectors(bio), &d->disk->part0);
d                1331 drivers/md/bcache/request.c 	s = search_alloc(bio, d);
d                1335 drivers/md/bcache/request.c 	trace_bcache_request_start(s->d, bio);
d                1348 drivers/md/bcache/request.c 					&KEY(d->id, bio->bi_iter.bi_sector, 0),
d                1349 drivers/md/bcache/request.c 					&KEY(d->id, bio_end_sector(bio), 0));
d                1364 drivers/md/bcache/request.c static int flash_dev_ioctl(struct bcache_device *d, fmode_t mode,
d                1372 drivers/md/bcache/request.c 	struct bcache_device *d = data;
d                1378 drivers/md/bcache/request.c 	for_each_cache(ca, d->c, i) {
d                1386 drivers/md/bcache/request.c void bch_flash_dev_request_init(struct bcache_device *d)
d                1388 drivers/md/bcache/request.c 	struct gendisk *g = d->disk;
d                1392 drivers/md/bcache/request.c 	d->cache_miss				= flash_dev_cache_miss;
d                1393 drivers/md/bcache/request.c 	d->ioctl				= flash_dev_ioctl;
d                  40 drivers/md/bcache/request.h void bch_flash_dev_request_init(struct bcache_device *d);
d                 203 drivers/md/bcache/stats.c void bch_mark_cache_accounting(struct cache_set *c, struct bcache_device *d,
d                 206 drivers/md/bcache/stats.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                 212 drivers/md/bcache/stats.c void bch_mark_cache_readahead(struct cache_set *c, struct bcache_device *d)
d                 214 drivers/md/bcache/stats.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                 220 drivers/md/bcache/stats.c void bch_mark_cache_miss_collision(struct cache_set *c, struct bcache_device *d)
d                 222 drivers/md/bcache/stats.c 	struct cached_dev *dc = container_of(d, struct cached_dev, disk);
d                  56 drivers/md/bcache/stats.h void bch_mark_cache_accounting(struct cache_set *c, struct bcache_device *d,
d                  58 drivers/md/bcache/stats.h void bch_mark_cache_readahead(struct cache_set *c, struct bcache_device *d);
d                  60 drivers/md/bcache/stats.h 				   struct bcache_device *d);
d                  90 drivers/md/bcache/super.c 		sb->d[i] = le64_to_cpu(s->d[i]);
d                 172 drivers/md/bcache/super.c 			if (sb->d[i] != sb->first_bucket + i)
d                 234 drivers/md/bcache/super.c 		out->d[i] = cpu_to_le64(sb->d[i]);
d                 567 drivers/md/bcache/super.c 		struct bucket_disk *d = p->data;
d                 568 drivers/md/bcache/super.c 		struct bucket_disk *end = d + prios_per_bucket(ca);
d                 571 drivers/md/bcache/super.c 		     b < ca->buckets + ca->sb.nbuckets && d < end;
d                 572 drivers/md/bcache/super.c 		     b++, d++) {
d                 573 drivers/md/bcache/super.c 			d->prio = cpu_to_le16(b->prio);
d                 574 drivers/md/bcache/super.c 			d->gen = b->gen;
d                 616 drivers/md/bcache/super.c 	struct bucket_disk *d = p->data + prios_per_bucket(ca), *end = d;
d                 622 drivers/md/bcache/super.c 	     b++, d++) {
d                 623 drivers/md/bcache/super.c 		if (d == end) {
d                 638 drivers/md/bcache/super.c 			d = p->data;
d                 641 drivers/md/bcache/super.c 		b->prio = le16_to_cpu(d->prio);
d                 642 drivers/md/bcache/super.c 		b->gen = b->last_gc = d->gen;
d                 650 drivers/md/bcache/super.c 	struct bcache_device *d = b->bd_disk->private_data;
d                 652 drivers/md/bcache/super.c 	if (test_bit(BCACHE_DEV_CLOSING, &d->flags))
d                 655 drivers/md/bcache/super.c 	closure_get(&d->cl);
d                 661 drivers/md/bcache/super.c 	struct bcache_device *d = b->private_data;
d                 663 drivers/md/bcache/super.c 	closure_put(&d->cl);
d                 669 drivers/md/bcache/super.c 	struct bcache_device *d = b->bd_disk->private_data;
d                 671 drivers/md/bcache/super.c 	return d->ioctl(d, mode, cmd, arg);
d                 681 drivers/md/bcache/super.c void bcache_device_stop(struct bcache_device *d)
d                 683 drivers/md/bcache/super.c 	if (!test_and_set_bit(BCACHE_DEV_CLOSING, &d->flags))
d                 689 drivers/md/bcache/super.c 		closure_queue(&d->cl);
d                 692 drivers/md/bcache/super.c static void bcache_device_unlink(struct bcache_device *d)
d                 696 drivers/md/bcache/super.c 	if (d->c && !test_and_set_bit(BCACHE_DEV_UNLINK_DONE, &d->flags)) {
d                 700 drivers/md/bcache/super.c 		sysfs_remove_link(&d->c->kobj, d->name);
d                 701 drivers/md/bcache/super.c 		sysfs_remove_link(&d->kobj, "cache");
d                 703 drivers/md/bcache/super.c 		for_each_cache(ca, d->c, i)
d                 704 drivers/md/bcache/super.c 			bd_unlink_disk_holder(ca->bdev, d->disk);
d                 708 drivers/md/bcache/super.c static void bcache_device_link(struct bcache_device *d, struct cache_set *c,
d                 715 drivers/md/bcache/super.c 	for_each_cache(ca, d->c, i)
d                 716 drivers/md/bcache/super.c 		bd_link_disk_holder(ca->bdev, d->disk);
d                 718 drivers/md/bcache/super.c 	snprintf(d->name, BCACHEDEVNAME_SIZE,
d                 719 drivers/md/bcache/super.c 		 "%s%u", name, d->id);
d                 721 drivers/md/bcache/super.c 	ret = sysfs_create_link(&d->kobj, &c->kobj, "cache");
d                 725 drivers/md/bcache/super.c 	ret = sysfs_create_link(&c->kobj, &d->kobj, d->name);
d                 729 drivers/md/bcache/super.c 	clear_bit(BCACHE_DEV_UNLINK_DONE, &d->flags);
d                 732 drivers/md/bcache/super.c static void bcache_device_detach(struct bcache_device *d)
d                 736 drivers/md/bcache/super.c 	atomic_dec(&d->c->attached_dev_nr);
d                 738 drivers/md/bcache/super.c 	if (test_bit(BCACHE_DEV_DETACHING, &d->flags)) {
d                 739 drivers/md/bcache/super.c 		struct uuid_entry *u = d->c->uuids + d->id;
d                 744 drivers/md/bcache/super.c 		bch_uuid_write(d->c);
d                 747 drivers/md/bcache/super.c 	bcache_device_unlink(d);
d                 749 drivers/md/bcache/super.c 	d->c->devices[d->id] = NULL;
d                 750 drivers/md/bcache/super.c 	closure_put(&d->c->caching);
d                 751 drivers/md/bcache/super.c 	d->c = NULL;
d                 754 drivers/md/bcache/super.c static void bcache_device_attach(struct bcache_device *d, struct cache_set *c,
d                 757 drivers/md/bcache/super.c 	d->id = id;
d                 758 drivers/md/bcache/super.c 	d->c = c;
d                 759 drivers/md/bcache/super.c 	c->devices[id] = d;
d                 777 drivers/md/bcache/super.c static void bcache_device_free(struct bcache_device *d)
d                 779 drivers/md/bcache/super.c 	struct gendisk *disk = d->disk;
d                 788 drivers/md/bcache/super.c 	if (d->c)
d                 789 drivers/md/bcache/super.c 		bcache_device_detach(d);
d                 803 drivers/md/bcache/super.c 	bioset_exit(&d->bio_split);
d                 804 drivers/md/bcache/super.c 	kvfree(d->full_dirty_stripes);
d                 805 drivers/md/bcache/super.c 	kvfree(d->stripe_sectors_dirty);
d                 807 drivers/md/bcache/super.c 	closure_debug_destroy(&d->cl);
d                 810 drivers/md/bcache/super.c static int bcache_device_init(struct bcache_device *d, unsigned int block_size,
d                 819 drivers/md/bcache/super.c 	if (!d->stripe_size)
d                 820 drivers/md/bcache/super.c 		d->stripe_size = 1 << 31;
d                 822 drivers/md/bcache/super.c 	d->nr_stripes = DIV_ROUND_UP_ULL(sectors, d->stripe_size);
d                 824 drivers/md/bcache/super.c 	if (!d->nr_stripes || d->nr_stripes > max_stripes) {
d                 826 drivers/md/bcache/super.c 			(unsigned int)d->nr_stripes);
d                 830 drivers/md/bcache/super.c 	n = d->nr_stripes * sizeof(atomic_t);
d                 831 drivers/md/bcache/super.c 	d->stripe_sectors_dirty = kvzalloc(n, GFP_KERNEL);
d                 832 drivers/md/bcache/super.c 	if (!d->stripe_sectors_dirty)
d                 835 drivers/md/bcache/super.c 	n = BITS_TO_LONGS(d->nr_stripes) * sizeof(unsigned long);
d                 836 drivers/md/bcache/super.c 	d->full_dirty_stripes = kvzalloc(n, GFP_KERNEL);
d                 837 drivers/md/bcache/super.c 	if (!d->full_dirty_stripes)
d                 845 drivers/md/bcache/super.c 	if (bioset_init(&d->bio_split, 4, offsetof(struct bbio, bio),
d                 849 drivers/md/bcache/super.c 	d->disk = alloc_disk(BCACHE_MINORS);
d                 850 drivers/md/bcache/super.c 	if (!d->disk)
d                 853 drivers/md/bcache/super.c 	set_capacity(d->disk, sectors);
d                 854 drivers/md/bcache/super.c 	snprintf(d->disk->disk_name, DISK_NAME_LEN, "bcache%i", idx);
d                 856 drivers/md/bcache/super.c 	d->disk->major		= bcache_major;
d                 857 drivers/md/bcache/super.c 	d->disk->first_minor	= idx_to_first_minor(idx);
d                 858 drivers/md/bcache/super.c 	d->disk->fops		= &bcache_ops;
d                 859 drivers/md/bcache/super.c 	d->disk->private_data	= d;
d                 866 drivers/md/bcache/super.c 	d->disk->queue			= q;
d                 867 drivers/md/bcache/super.c 	q->queuedata			= d;
d                 868 drivers/md/bcache/super.c 	q->backing_dev_info->congested_data = d;
d                 878 drivers/md/bcache/super.c 	blk_queue_flag_set(QUEUE_FLAG_NONROT, d->disk->queue);
d                 879 drivers/md/bcache/super.c 	blk_queue_flag_clear(QUEUE_FLAG_ADD_RANDOM, d->disk->queue);
d                 880 drivers/md/bcache/super.c 	blk_queue_flag_set(QUEUE_FLAG_DISCARD, d->disk->queue);
d                 945 drivers/md/bcache/super.c 	struct bcache_device *d = &dc->disk;
d                 972 drivers/md/bcache/super.c 	if (!d->c &&
d                 983 drivers/md/bcache/super.c 	add_disk(d->disk);
d                 989 drivers/md/bcache/super.c 	kobject_uevent_env(&disk_to_dev(d->disk)->kobj, KOBJ_CHANGE, env);
d                 994 drivers/md/bcache/super.c 	if (sysfs_create_link(&d->kobj, &disk_to_dev(d->disk)->kobj, "dev") ||
d                 995 drivers/md/bcache/super.c 	    sysfs_create_link(&disk_to_dev(d->disk)->kobj,
d                 996 drivers/md/bcache/super.c 			      &d->kobj, "bcache")) {
d                1291 drivers/md/bcache/super.c 	struct bcache_device *d = &dc->disk;
d                1294 drivers/md/bcache/super.c 	bcache_device_unlink(d);
d                1298 drivers/md/bcache/super.c 	kobject_del(&d->kobj);
d                1410 drivers/md/bcache/super.c 	struct bcache_device *d = container_of(kobj, struct bcache_device,
d                1412 drivers/md/bcache/super.c 	kfree(d);
d                1417 drivers/md/bcache/super.c 	struct bcache_device *d = container_of(cl, struct bcache_device, cl);
d                1420 drivers/md/bcache/super.c 	atomic_long_sub(bcache_dev_sectors_dirty(d),
d                1421 drivers/md/bcache/super.c 			&d->c->flash_dev_dirty_sectors);
d                1422 drivers/md/bcache/super.c 	bcache_device_free(d);
d                1424 drivers/md/bcache/super.c 	kobject_put(&d->kobj);
d                1429 drivers/md/bcache/super.c 	struct bcache_device *d = container_of(cl, struct bcache_device, cl);
d                1432 drivers/md/bcache/super.c 	bcache_device_unlink(d);
d                1434 drivers/md/bcache/super.c 	kobject_del(&d->kobj);
d                1440 drivers/md/bcache/super.c 	struct bcache_device *d = kzalloc(sizeof(struct bcache_device),
d                1442 drivers/md/bcache/super.c 	if (!d)
d                1445 drivers/md/bcache/super.c 	closure_init(&d->cl, NULL);
d                1446 drivers/md/bcache/super.c 	set_closure_fn(&d->cl, flash_dev_flush, system_wq);
d                1448 drivers/md/bcache/super.c 	kobject_init(&d->kobj, &bch_flash_dev_ktype);
d                1450 drivers/md/bcache/super.c 	if (bcache_device_init(d, block_bytes(c), u->sectors))
d                1453 drivers/md/bcache/super.c 	bcache_device_attach(d, c, u - c->uuids);
d                1454 drivers/md/bcache/super.c 	bch_sectors_dirty_init(d);
d                1455 drivers/md/bcache/super.c 	bch_flash_dev_request_init(d);
d                1456 drivers/md/bcache/super.c 	add_disk(d->disk);
d                1458 drivers/md/bcache/super.c 	if (kobject_add(&d->kobj, &disk_to_dev(d->disk)->kobj, "bcache"))
d                1461 drivers/md/bcache/super.c 	bcache_device_link(d, c, "volume");
d                1465 drivers/md/bcache/super.c 	kobject_put(&d->kobj);
d                1671 drivers/md/bcache/super.c 					   struct bcache_device *d,
d                1676 drivers/md/bcache/super.c 			d->disk->disk_name, c->sb.set_uuid);
d                1677 drivers/md/bcache/super.c 		bcache_device_stop(d);
d                1684 drivers/md/bcache/super.c 			d->disk->disk_name);
d                1699 drivers/md/bcache/super.c 		bcache_device_stop(d);
d                1706 drivers/md/bcache/super.c 			d->disk->disk_name);
d                1714 drivers/md/bcache/super.c 	struct bcache_device *d;
d                1720 drivers/md/bcache/super.c 		d = c->devices[i];
d                1721 drivers/md/bcache/super.c 		if (!d)
d                1726 drivers/md/bcache/super.c 			dc = container_of(d, struct cached_dev, disk);
d                1729 drivers/md/bcache/super.c 				conditional_stop_bcache_device(c, d, dc);
d                1731 drivers/md/bcache/super.c 			bcache_device_stop(d);
d                1971 drivers/md/bcache/super.c 				ca->sb.d[j] = ca->sb.first_bucket + j;
d                 526 drivers/md/bcache/sysfs.c 	struct bcache_device *d = container_of(kobj, struct bcache_device,
d                 528 drivers/md/bcache/sysfs.c 	struct uuid_entry *u = &d->c->uuids[d->id];
d                 530 drivers/md/bcache/sysfs.c 	sysfs_printf(data_csum,	"%i", d->data_csum);
d                 545 drivers/md/bcache/sysfs.c 	struct bcache_device *d = container_of(kobj, struct bcache_device,
d                 547 drivers/md/bcache/sysfs.c 	struct uuid_entry *u = &d->c->uuids[d->id];
d                 553 drivers/md/bcache/sysfs.c 	sysfs_strtoul(data_csum,	d->data_csum);
d                 561 drivers/md/bcache/sysfs.c 		bch_uuid_write(d->c);
d                 562 drivers/md/bcache/sysfs.c 		set_capacity(d->disk, u->sectors);
d                 567 drivers/md/bcache/sysfs.c 		bch_uuid_write(d->c);
d                 571 drivers/md/bcache/sysfs.c 		set_bit(BCACHE_DEV_DETACHING, &d->flags);
d                 572 drivers/md/bcache/sysfs.c 		bcache_device_stop(d);
d                 201 drivers/md/bcache/util.c uint64_t bch_next_delay(struct bch_ratelimit *d, uint64_t done)
d                 205 drivers/md/bcache/util.c 	d->next += div_u64(done * NSEC_PER_SEC, atomic_long_read(&d->rate));
d                 213 drivers/md/bcache/util.c 	if (time_before64(now + NSEC_PER_SEC * 5LLU / 2LLU, d->next))
d                 214 drivers/md/bcache/util.c 		d->next = now + NSEC_PER_SEC * 5LLU / 2LLU;
d                 216 drivers/md/bcache/util.c 	if (time_after64(now - NSEC_PER_SEC * 2, d->next))
d                 217 drivers/md/bcache/util.c 		d->next = now - NSEC_PER_SEC * 2;
d                 219 drivers/md/bcache/util.c 	return time_after64(d->next, now)
d                 220 drivers/md/bcache/util.c 		? div_u64(d->next - now, NSEC_PER_SEC / HZ)
d                  87 drivers/md/bcache/util.h #define heap_add(h, d, cmp)						\
d                  92 drivers/md/bcache/util.h 		(h)->data[_i] = d;					\
d                 100 drivers/md/bcache/util.h #define heap_pop(h, d, cmp)						\
d                 104 drivers/md/bcache/util.h 		(d) = (h)->data[0];					\
d                 448 drivers/md/bcache/util.h static inline void bch_ratelimit_reset(struct bch_ratelimit *d)
d                 450 drivers/md/bcache/util.h 	d->next = local_clock();
d                 453 drivers/md/bcache/util.h uint64_t bch_next_delay(struct bch_ratelimit *d, uint64_t done);
d                 455 drivers/md/bcache/util.h #define __DIV_SAFE(n, d, zero)						\
d                 458 drivers/md/bcache/util.h 	typeof(d) _d = (d);						\
d                 462 drivers/md/bcache/util.h #define DIV_SAFE(n, d)	__DIV_SAFE(n, d, 0)
d                 521 drivers/md/bcache/writeback.c 	struct bcache_device *d = c->devices[inode];
d                 524 drivers/md/bcache/writeback.c 	if (!d)
d                 530 drivers/md/bcache/writeback.c 	stripe = offset_to_stripe(d, offset);
d                 531 drivers/md/bcache/writeback.c 	stripe_offset = offset & (d->stripe_size - 1);
d                 535 drivers/md/bcache/writeback.c 			      d->stripe_size - stripe_offset);
d                 540 drivers/md/bcache/writeback.c 		if (stripe >= d->nr_stripes)
d                 544 drivers/md/bcache/writeback.c 					d->stripe_sectors_dirty + stripe);
d                 545 drivers/md/bcache/writeback.c 		if (sectors_dirty == d->stripe_size)
d                 546 drivers/md/bcache/writeback.c 			set_bit(stripe, d->full_dirty_stripes);
d                 548 drivers/md/bcache/writeback.c 			clear_bit(stripe, d->full_dirty_stripes);
d                 784 drivers/md/bcache/writeback.c void bch_sectors_dirty_init(struct bcache_device *d)
d                 790 drivers/md/bcache/writeback.c 	op.inode = d->id;
d                 795 drivers/md/bcache/writeback.c 		ret = bch_btree_map_keys(&op.op, d->c, &op.start,
d                  26 drivers/md/bcache/writeback.h static inline uint64_t bcache_dev_sectors_dirty(struct bcache_device *d)
d                  30 drivers/md/bcache/writeback.h 	for (i = 0; i < d->nr_stripes; i++)
d                  31 drivers/md/bcache/writeback.h 		ret += atomic_read(d->stripe_sectors_dirty + i);
d                  36 drivers/md/bcache/writeback.h static inline unsigned int offset_to_stripe(struct bcache_device *d,
d                  39 drivers/md/bcache/writeback.h 	do_div(offset, d->stripe_size);
d                 113 drivers/md/bcache/writeback.h void bch_sectors_dirty_init(struct bcache_device *d);
d                  26 drivers/md/dm-cache-policy-smq.c static unsigned safe_div(unsigned n, unsigned d)
d                  28 drivers/md/dm-cache-policy-smq.c 	return d ? n / d : 0u;
d                  31 drivers/md/dm-cache-policy-smq.c static unsigned safe_mod(unsigned n, unsigned d)
d                  33 drivers/md/dm-cache-policy-smq.c 	return d ? n % d : 0u;
d                 665 drivers/md/dm-era-target.c 					   struct digest *d);
d                 668 drivers/md/dm-era-target.c 					   struct digest *d)
d                 671 drivers/md/dm-era-target.c 	uint64_t key = d->era;
d                 680 drivers/md/dm-era-target.c 	d->step = metadata_digest_lookup_writeset;
d                 687 drivers/md/dm-era-target.c 					       struct digest *d)
d                 691 drivers/md/dm-era-target.c 	unsigned b, e = min(d->current_bit + INSERTS_PER_STEP, d->nr_bits);
d                 693 drivers/md/dm-era-target.c 	for (b = d->current_bit; b < e; b++) {
d                 694 drivers/md/dm-era-target.c 		r = writeset_marked_on_disk(&d->info, &d->writeset, b, &marked);
d                 703 drivers/md/dm-era-target.c 		__dm_bless_for_disk(&d->value);
d                 705 drivers/md/dm-era-target.c 				       b, &d->value, &md->era_array_root);
d                 712 drivers/md/dm-era-target.c 	if (b == d->nr_bits)
d                 713 drivers/md/dm-era-target.c 		d->step = metadata_digest_remove_writeset;
d                 715 drivers/md/dm-era-target.c 		d->current_bit = b;
d                 721 drivers/md/dm-era-target.c 					   struct digest *d)
d                 732 drivers/md/dm-era-target.c 	d->era = key;
d                 738 drivers/md/dm-era-target.c 			d->step = NULL;
d                 746 drivers/md/dm-era-target.c 	ws_unpack(&disk, &d->writeset);
d                 747 drivers/md/dm-era-target.c 	d->value = cpu_to_le32(key);
d                 749 drivers/md/dm-era-target.c 	d->nr_bits = min(d->writeset.nr_bits, md->nr_blocks);
d                 750 drivers/md/dm-era-target.c 	d->current_bit = 0;
d                 751 drivers/md/dm-era-target.c 	d->step = metadata_digest_transcribe_writeset;
d                 756 drivers/md/dm-era-target.c static int metadata_digest_start(struct era_metadata *md, struct digest *d)
d                 758 drivers/md/dm-era-target.c 	if (d->step)
d                 761 drivers/md/dm-era-target.c 	memset(d, 0, sizeof(*d));
d                 767 drivers/md/dm-era-target.c 	dm_disk_bitset_init(md->tm, &d->info);
d                 768 drivers/md/dm-era-target.c 	d->step = metadata_digest_lookup_writeset;
d                2226 drivers/md/dm-raid.c 	unsigned int d;
d                2341 drivers/md/dm-raid.c 	d = 0;
d                2358 drivers/md/dm-raid.c 		d++;
d                2729 drivers/md/dm-raid.c 	unsigned int d = mddev->raid_disks = rs->raid_disks;
d                2752 drivers/md/dm-raid.c 	while (d--) {
d                2753 drivers/md/dm-raid.c 		rdev = &rs->dev[d].rdev;
d                2755 drivers/md/dm-raid.c 		if (test_bit(d, (void *) rs->rebuild_disks)) {
d                2855 drivers/md/dm-raid.c 	unsigned int cur_raid_devs, d;
d                2895 drivers/md/dm-raid.c 		for (d = cur_raid_devs; d < rs->raid_disks; d++) {
d                2896 drivers/md/dm-raid.c 			rdev = &rs->dev[d].rdev;
d                2904 drivers/md/dm-raid.c 			rdev->raid_disk = d;
d                 362 drivers/md/dm-switch.c 	unsigned char d;
d                 365 drivers/md/dm-switch.c 	while ((d = hex_table[(unsigned char)**string]) < 16) {
d                 366 drivers/md/dm-switch.c 		r = (r << 4) | d;
d                 499 drivers/md/dm-table.c void dm_put_device(struct dm_target *ti, struct dm_dev *d)
d                 506 drivers/md/dm-table.c 		if (dd->dm_dev == d) {
d                 513 drivers/md/dm-table.c 		       dm_device_name(ti->table->md), d->name);
d                 517 drivers/md/dm-table.c 		dm_put_table_device(ti->table->md, d);
d                 831 drivers/md/dm.c void dm_put_table_device(struct mapped_device *md, struct dm_dev *d)
d                 833 drivers/md/dm.c 	struct table_device *td = container_of(d, struct table_device, dm_dev);
d                 187 drivers/md/dm.h void dm_put_table_device(struct mapped_device *md, struct dm_dev *d);
d                1415 drivers/md/md.c 		mdp_disk_t *d;
d                1434 drivers/md/md.c 		d = &sb->disks[rdev2->desc_nr];
d                1436 drivers/md/md.c 		d->number = rdev2->desc_nr;
d                1437 drivers/md/md.c 		d->major = MAJOR(rdev2->bdev->bd_dev);
d                1438 drivers/md/md.c 		d->minor = MINOR(rdev2->bdev->bd_dev);
d                1440 drivers/md/md.c 			d->raid_disk = rdev2->raid_disk;
d                1442 drivers/md/md.c 			d->raid_disk = rdev2->desc_nr; /* compatibility */
d                1444 drivers/md/md.c 			d->state = (1<<MD_DISK_FAULTY);
d                1446 drivers/md/md.c 			d->state = (1<<MD_DISK_ACTIVE);
d                1448 drivers/md/md.c 				d->state |= (1<<MD_DISK_SYNC);
d                1452 drivers/md/md.c 			d->state = 0;
d                1457 drivers/md/md.c 			d->state |= (1<<MD_DISK_WRITEMOSTLY);
d                1459 drivers/md/md.c 			d->state |= (1<<MD_DISK_FAILFAST);
d                1463 drivers/md/md.c 		mdp_disk_t *d = &sb->disks[i];
d                1464 drivers/md/md.c 		if (d->state == 0 && d->number == 0) {
d                1465 drivers/md/md.c 			d->number = i;
d                1466 drivers/md/md.c 			d->raid_disk = i;
d                1467 drivers/md/md.c 			d->state = (1<<MD_DISK_REMOVED);
d                1468 drivers/md/md.c 			d->state |= (1<<MD_DISK_FAULTY);
d                2008 drivers/md/raid1.c 		int d = r1_bio->read_disk;
d                2015 drivers/md/raid1.c 			if (r1_bio->bios[d]->bi_end_io == end_sync_read) {
d                2020 drivers/md/raid1.c 				rdev = conf->mirrors[d].rdev;
d                2028 drivers/md/raid1.c 			d++;
d                2029 drivers/md/raid1.c 			if (d == conf->raid_disks * 2)
d                2030 drivers/md/raid1.c 				d = 0;
d                2031 drivers/md/raid1.c 		} while (!success && d != r1_bio->read_disk);
d                2044 drivers/md/raid1.c 			for (d = 0; d < conf->raid_disks * 2; d++) {
d                2045 drivers/md/raid1.c 				rdev = conf->mirrors[d].rdev;
d                2066 drivers/md/raid1.c 		start = d;
d                2068 drivers/md/raid1.c 		while (d != r1_bio->read_disk) {
d                2069 drivers/md/raid1.c 			if (d == 0)
d                2070 drivers/md/raid1.c 				d = conf->raid_disks * 2;
d                2071 drivers/md/raid1.c 			d--;
d                2072 drivers/md/raid1.c 			if (r1_bio->bios[d]->bi_end_io != end_sync_read)
d                2074 drivers/md/raid1.c 			rdev = conf->mirrors[d].rdev;
d                2078 drivers/md/raid1.c 				r1_bio->bios[d]->bi_end_io = NULL;
d                2082 drivers/md/raid1.c 		d = start;
d                2083 drivers/md/raid1.c 		while (d != r1_bio->read_disk) {
d                2084 drivers/md/raid1.c 			if (d == 0)
d                2085 drivers/md/raid1.c 				d = conf->raid_disks * 2;
d                2086 drivers/md/raid1.c 			d--;
d                2087 drivers/md/raid1.c 			if (r1_bio->bios[d]->bi_end_io != end_sync_read)
d                2089 drivers/md/raid1.c 			rdev = conf->mirrors[d].rdev;
d                2250 drivers/md/raid1.c 		int d = read_disk;
d                2263 drivers/md/raid1.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                2280 drivers/md/raid1.c 			d++;
d                2281 drivers/md/raid1.c 			if (d == conf->raid_disks * 2)
d                2282 drivers/md/raid1.c 				d = 0;
d                2283 drivers/md/raid1.c 		} while (!success && d != read_disk);
d                2293 drivers/md/raid1.c 		start = d;
d                2294 drivers/md/raid1.c 		while (d != read_disk) {
d                2295 drivers/md/raid1.c 			if (d==0)
d                2296 drivers/md/raid1.c 				d = conf->raid_disks * 2;
d                2297 drivers/md/raid1.c 			d--;
d                2299 drivers/md/raid1.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                2310 drivers/md/raid1.c 		d = start;
d                2311 drivers/md/raid1.c 		while (d != read_disk) {
d                2313 drivers/md/raid1.c 			if (d==0)
d                2314 drivers/md/raid1.c 				d = conf->raid_disks * 2;
d                2315 drivers/md/raid1.c 			d--;
d                2317 drivers/md/raid1.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                3244 drivers/md/raid1.c 	int d, d2;
d                3267 drivers/md/raid1.c 		for (d= 0; d < conf->raid_disks; d++)
d                3268 drivers/md/raid1.c 			if (conf->mirrors[d].rdev)
d                3301 drivers/md/raid1.c 	for (d = d2 = 0; d < conf->raid_disks; d++) {
d                3302 drivers/md/raid1.c 		struct md_rdev *rdev = conf->mirrors[d].rdev;
d                 591 drivers/md/raid10.c 		int d = dev;
d                 594 drivers/md/raid10.c 		r10bio->devs[slot].devnum = d;
d                 599 drivers/md/raid10.c 			set = d / geo->far_set_size;
d                 600 drivers/md/raid10.c 			d += geo->near_copies;
d                 603 drivers/md/raid10.c 			    (d > last_far_set_start)) {
d                 604 drivers/md/raid10.c 				d -= last_far_set_start;
d                 605 drivers/md/raid10.c 				d %= last_far_set_size;
d                 606 drivers/md/raid10.c 				d += last_far_set_start;
d                 608 drivers/md/raid10.c 				d %= geo->far_set_size;
d                 609 drivers/md/raid10.c 				d += geo->far_set_size * set;
d                 612 drivers/md/raid10.c 			r10bio->devs[slot].devnum = d;
d                1365 drivers/md/raid10.c 		int d = r10_bio->devs[i].devnum;
d                1366 drivers/md/raid10.c 		struct md_rdev *rdev = rcu_dereference(conf->mirrors[d].rdev);
d                1368 drivers/md/raid10.c 			conf->mirrors[d].replacement);
d                1448 drivers/md/raid10.c 		int d;
d                1452 drivers/md/raid10.c 				d = r10_bio->devs[j].devnum;
d                1453 drivers/md/raid10.c 				rdev_dec_pending(conf->mirrors[d].rdev, mddev);
d                1457 drivers/md/raid10.c 				d = r10_bio->devs[j].devnum;
d                1458 drivers/md/raid10.c 				rdev = conf->mirrors[d].replacement;
d                1462 drivers/md/raid10.c 					rdev = conf->mirrors[d].rdev;
d                1883 drivers/md/raid10.c static void __end_sync_read(struct r10bio *r10_bio, struct bio *bio, int d)
d                1894 drivers/md/raid10.c 			   &conf->mirrors[d].rdev->corrected_errors);
d                1899 drivers/md/raid10.c 	rdev_dec_pending(conf->mirrors[d].rdev, conf->mddev);
d                1913 drivers/md/raid10.c 	int d = find_bio_disk(conf, r10_bio, bio, NULL, NULL);
d                1915 drivers/md/raid10.c 	__end_sync_read(r10_bio, bio, d);
d                1958 drivers/md/raid10.c 	int d;
d                1965 drivers/md/raid10.c 	d = find_bio_disk(conf, r10_bio, bio, &slot, &repl);
d                1967 drivers/md/raid10.c 		rdev = conf->mirrors[d].replacement;
d                1969 drivers/md/raid10.c 		rdev = conf->mirrors[d].rdev;
d                2035 drivers/md/raid10.c 		int  j, d;
d                2047 drivers/md/raid10.c 		d = r10_bio->devs[i].devnum;
d                2048 drivers/md/raid10.c 		rdev = conf->mirrors[d].rdev;
d                2094 drivers/md/raid10.c 		atomic_inc(&conf->mirrors[d].rdev->nr_pending);
d                2096 drivers/md/raid10.c 		md_sync_acct(conf->mirrors[d].rdev->bdev, bio_sectors(tbio));
d                2098 drivers/md/raid10.c 		if (test_bit(FailFast, &conf->mirrors[d].rdev->flags))
d                2100 drivers/md/raid10.c 		tbio->bi_iter.bi_sector += conf->mirrors[d].rdev->data_offset;
d                2101 drivers/md/raid10.c 		bio_set_dev(tbio, conf->mirrors[d].rdev->bdev);
d                2109 drivers/md/raid10.c 		int d;
d                2117 drivers/md/raid10.c 		d = r10_bio->devs[i].devnum;
d                2119 drivers/md/raid10.c 		md_sync_acct(conf->mirrors[d].replacement->bdev,
d                2227 drivers/md/raid10.c 	int d;
d                2240 drivers/md/raid10.c 	d = r10_bio->devs[1].devnum;
d                2250 drivers/md/raid10.c 		atomic_inc(&conf->mirrors[d].rdev->nr_pending);
d                2251 drivers/md/raid10.c 		md_sync_acct(conf->mirrors[d].rdev->bdev, bio_sectors(wbio));
d                2255 drivers/md/raid10.c 		atomic_inc(&conf->mirrors[d].replacement->nr_pending);
d                2256 drivers/md/raid10.c 		md_sync_acct(conf->mirrors[d].replacement->bdev,
d                2336 drivers/md/raid10.c 	int d = r10_bio->devs[r10_bio->read_slot].devnum;
d                2341 drivers/md/raid10.c 	rdev = conf->mirrors[d].rdev;
d                2378 drivers/md/raid10.c 			d = r10_bio->devs[sl].devnum;
d                2379 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                2433 drivers/md/raid10.c 			d = r10_bio->devs[sl].devnum;
d                2434 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                2469 drivers/md/raid10.c 			d = r10_bio->devs[sl].devnum;
d                2470 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                3140 drivers/md/raid10.c 				int d = r10_bio->devs[j].devnum;
d                3143 drivers/md/raid10.c 					rcu_dereference(conf->mirrors[d].rdev);
d                3184 drivers/md/raid10.c 				r10_bio->devs[0].devnum = d;
d                3275 drivers/md/raid10.c 					int d = r10_bio->devs[j].devnum;
d                3276 drivers/md/raid10.c 					if (conf->mirrors[d].rdev &&
d                3278 drivers/md/raid10.c 						      &conf->mirrors[d].rdev->flags))
d                3335 drivers/md/raid10.c 			int d = r10_bio->devs[i].devnum;
d                3346 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                3376 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].replacement);
d                3402 drivers/md/raid10.c 				int d = r10_bio->devs[i].devnum;
d                3404 drivers/md/raid10.c 					rdev_dec_pending(conf->mirrors[d].rdev,
d                3409 drivers/md/raid10.c 						conf->mirrors[d].replacement,
d                4610 drivers/md/raid10.c 		int d = r10_bio->devs[s/2].devnum;
d                4613 drivers/md/raid10.c 			rdev2 = rcu_dereference(conf->mirrors[d].replacement);
d                4616 drivers/md/raid10.c 			rdev2 = rcu_dereference(conf->mirrors[d].rdev);
d                4701 drivers/md/raid10.c 		int d = r10_bio->devs[s/2].devnum;
d                4705 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].replacement);
d                4708 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].rdev);
d                4797 drivers/md/raid10.c 			int d = r10b->devs[slot].devnum;
d                4798 drivers/md/raid10.c 			struct md_rdev *rdev = rcu_dereference(conf->mirrors[d].rdev);
d                4844 drivers/md/raid10.c 	int d;
d                4849 drivers/md/raid10.c 	d = find_bio_disk(conf, r10_bio, bio, &slot, &repl);
d                4851 drivers/md/raid10.c 		rdev = conf->mirrors[d].replacement;
d                4854 drivers/md/raid10.c 		rdev = conf->mirrors[d].rdev;
d                4889 drivers/md/raid10.c 		int d;
d                4891 drivers/md/raid10.c 		for (d = conf->geo.raid_disks ;
d                4892 drivers/md/raid10.c 		     d < conf->geo.raid_disks - mddev->delta_disks;
d                4893 drivers/md/raid10.c 		     d++) {
d                4894 drivers/md/raid10.c 			struct md_rdev *rdev = rcu_dereference(conf->mirrors[d].rdev);
d                4897 drivers/md/raid10.c 			rdev = rcu_dereference(conf->mirrors[d].replacement);
d                5518 drivers/md/raid5.c 		int d;
d                5531 drivers/md/raid5.c 		for (d = 0; d < conf->raid_disks; d++) {
d                5532 drivers/md/raid5.c 			if (d == sh->pd_idx || d == sh->qd_idx)
d                5534 drivers/md/raid5.c 			if (sh->dev[d].towrite || sh->dev[d].toread) {
d                5535 drivers/md/raid5.c 				set_bit(R5_Overlap, &sh->dev[d].flags);
d                5545 drivers/md/raid5.c 		for (d = 0; d < conf->raid_disks; d++) {
d                5546 drivers/md/raid5.c 			if (d == sh->pd_idx || d == sh->qd_idx)
d                5548 drivers/md/raid5.c 			sh->dev[d].towrite = bi;
d                5549 drivers/md/raid5.c 			set_bit(R5_OVERWRITE, &sh->dev[d].flags);
d                5556 drivers/md/raid5.c 			for (d = 0;
d                5557 drivers/md/raid5.c 			     d < conf->raid_disks - conf->max_degraded;
d                5558 drivers/md/raid5.c 			     d++)
d                8046 drivers/md/raid5.c 			int d;
d                8050 drivers/md/raid5.c 			for (d = conf->raid_disks ;
d                8051 drivers/md/raid5.c 			     d < conf->raid_disks - mddev->delta_disks;
d                8052 drivers/md/raid5.c 			     d++) {
d                8053 drivers/md/raid5.c 				struct md_rdev *rdev = conf->disks[d].rdev;
d                8056 drivers/md/raid5.c 				rdev = conf->disks[d].replacement;
d                 119 drivers/media/common/b2c2/flexcop-fe-tuner.c 	int i, par = 1, d;
d                 121 drivers/media/common/b2c2/flexcop-fe-tuner.c 		d = (data >> i) & 1;
d                 122 drivers/media/common/b2c2/flexcop-fe-tuner.c 		par ^= d;
d                 123 drivers/media/common/b2c2/flexcop-fe-tuner.c 		flexcop_diseqc_send_bit(fe, d);
d                 450 drivers/media/common/siano/smsdvb-debugfs.c 	struct dentry *d;
d                 464 drivers/media/common/siano/smsdvb-debugfs.c 	d = debugfs_create_file("stats", S_IRUGO | S_IWUSR, client->debugfs,
d                 466 drivers/media/common/siano/smsdvb-debugfs.c 	if (!d) {
d                 505 drivers/media/common/siano/smsdvb-debugfs.c 	struct dentry *d;
d                 517 drivers/media/common/siano/smsdvb-debugfs.c 	d = debugfs_create_dir("smsdvb", usb_debug_root);
d                 518 drivers/media/common/siano/smsdvb-debugfs.c 	if (IS_ERR_OR_NULL(d)) {
d                 522 drivers/media/common/siano/smsdvb-debugfs.c 	smsdvb_debugfs_usb_root = d;
d                 103 drivers/media/dvb-core/dvb_demux.c static void dvb_dmx_memcopy(struct dvb_demux_feed *f, u8 *d, const u8 *s,
d                 106 drivers/media/dvb-core/dvb_demux.c 	memcpy(d, s, len);
d                 402 drivers/media/dvb-frontends/cx24116.c 	enum fe_delivery_system d, enum fe_modulation m, enum fe_code_rate f)
d                 409 drivers/media/dvb-frontends/cx24116.c 		if ((d == CX24116_MODFEC_MODES[i].delivery_system) &&
d                 947 drivers/media/dvb-frontends/cx24116.c 	struct dvb_diseqc_master_cmd *d)
d                 953 drivers/media/dvb-frontends/cx24116.c 	if (d->msg_len > sizeof(d->msg))
d                 959 drivers/media/dvb-frontends/cx24116.c 		for (i = 0 ; i < d->msg_len ;) {
d                 960 drivers/media/dvb-frontends/cx24116.c 			printk(KERN_INFO "0x%02x", d->msg[i]);
d                 961 drivers/media/dvb-frontends/cx24116.c 			if (++i < d->msg_len)
d                 968 drivers/media/dvb-frontends/cx24116.c 	for (i = 0; i < d->msg_len; i++)
d                 969 drivers/media/dvb-frontends/cx24116.c 		state->dsec_cmd.args[CX24116_DISEQC_MSGOFS + i] = d->msg[i];
d                 972 drivers/media/dvb-frontends/cx24116.c 	state->dsec_cmd.args[CX24116_DISEQC_MSGLEN] = d->msg_len;
d                1006 drivers/media/dvb-frontends/cx24116.c 		if (d->msg_len >= 4 && d->msg[2] == 0x38)
d                1008 drivers/media/dvb-frontends/cx24116.c 				((d->msg[3] & 4) >> 2);
d                 378 drivers/media/dvb-frontends/cx24117.c 	enum fe_delivery_system d, enum fe_modulation m, enum fe_code_rate f)
d                 387 drivers/media/dvb-frontends/cx24117.c 		if ((d == cx24117_modfec_modes[i].delivery_system) &&
d                1025 drivers/media/dvb-frontends/cx24117.c 	struct dvb_diseqc_master_cmd *d)
d                1033 drivers/media/dvb-frontends/cx24117.c 	for (i = 0; i < d->msg_len; i++)
d                1034 drivers/media/dvb-frontends/cx24117.c 		dev_dbg(&state->priv->i2c->dev, "0x%02x ", d->msg[i]);
d                1038 drivers/media/dvb-frontends/cx24117.c 	if (d->msg_len > sizeof(d->msg))
d                1042 drivers/media/dvb-frontends/cx24117.c 	for (i = 0; i < d->msg_len; i++)
d                1043 drivers/media/dvb-frontends/cx24117.c 		state->dsec_cmd.args[CX24117_DISEQC_MSGOFS + i] = d->msg[i];
d                1046 drivers/media/dvb-frontends/cx24117.c 	state->dsec_cmd.args[CX24117_DISEQC_MSGLEN] = d->msg_len;
d                1070 drivers/media/dvb-frontends/cx24117.c 	if (d->msg_len >= 4 && d->msg[2] == 0x38)
d                1072 drivers/media/dvb-frontends/cx24117.c 			((d->msg[3] & 4) >> 2);
d                 547 drivers/media/dvb-frontends/cx24120.c 				   struct dvb_diseqc_master_cmd *d)
d                 575 drivers/media/dvb-frontends/cx24120.c 	cmd.len = d->msg_len + 6;
d                 581 drivers/media/dvb-frontends/cx24120.c 	cmd.arg[5] = d->msg_len;
d                 583 drivers/media/dvb-frontends/cx24120.c 	memcpy(&cmd.arg[6], &d->msg, d->msg_len);
d                 529 drivers/media/dvb-frontends/dib9000.c 	u8 *d, b[2];
d                 563 drivers/media/dvb-frontends/dib9000.c 	d = (u8 *) data;
d                 566 drivers/media/dvb-frontends/dib9000.c 		*d++ = tmp >> 8;
d                 567 drivers/media/dvb-frontends/dib9000.c 		*d++ = tmp & 0xff;
d                 590 drivers/media/dvb-frontends/dib9000.c 	u16 *d = data;
d                 629 drivers/media/dvb-frontends/dib9000.c 			dprintk("%04x\n", d[i]);
d                2209 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_MCRECORD(d)        ((d)->my_common_attr->mcversion)
d                2210 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_MIRRORFREQSPECT(d) ((d)->my_common_attr->mirror_freq_spect)
d                2211 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_CURRENTPOWERMODE(d)((d)->my_common_attr->current_power_mode)
d                2212 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_ISOPENED(d)        ((d)->my_common_attr->is_opened)
d                2213 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_USEBOOTLOADER(d)   ((d)->my_common_attr->use_bootloader)
d                2214 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_CURRENTSTANDARD(d) ((d)->my_common_attr->current_standard)
d                2215 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_PREVSTANDARD(d)    ((d)->my_common_attr->prev_standard)
d                2216 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_CACHESTANDARD(d)   ((d)->my_common_attr->di_cache_standard)
d                2217 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_CURRENTCHANNEL(d)  ((d)->my_common_attr->current_channel)
d                2218 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_MICROCODE(d)       ((d)->my_common_attr->microcode)
d                2219 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_VERIFYMICROCODE(d) ((d)->my_common_attr->verify_microcode)
d                2220 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_CAPABILITIES(d)    ((d)->my_common_attr->capabilities)
d                2221 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_PRODUCTID(d)       ((d)->my_common_attr->product_id)
d                2222 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_INTERMEDIATEFREQ(d) ((d)->my_common_attr->intermediate_freq)
d                2223 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_SYSCLOCKFREQ(d)     ((d)->my_common_attr->sys_clock_freq)
d                2224 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_TUNERRFAGCPOL(d)   ((d)->my_common_attr->tuner_rf_agc_pol)
d                2225 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_TUNERIFAGCPOL(d)    ((d)->my_common_attr->tuner_if_agc_pol)
d                2226 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_TUNERSLOWMODE(d)    ((d)->my_common_attr->tuner_slow_mode)
d                2227 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_TUNERSPORTNR(d)     ((d)->my_common_attr->tuner_port_nr)
d                2228 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_I2CADDR(d)         ((d)->my_i2c_dev_addr->i2c_addr)
d                2229 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_ATTR_I2CDEVID(d)        ((d)->my_i2c_dev_addr->i2c_dev_id)
d                2273 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_SET_PRESET(d, x) \
d                2274 drivers/media/dvb-frontends/drx39xyj/drx_driver.h 	DRX_ACCESSMACRO_SET((d), (x), DRX_XS_CFG_PRESET, char*)
d                2275 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_GET_PRESET(d, x) \
d                2276 drivers/media/dvb-frontends/drx39xyj/drx_driver.h 	DRX_ACCESSMACRO_GET((d), (x), DRX_XS_CFG_PRESET, char*, "ERROR")
d                2278 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_SET_AUD_BTSC_DETECT(d, x) DRX_ACCESSMACRO_SET((d), (x), \
d                2280 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_GET_AUD_BTSC_DETECT(d, x) DRX_ACCESSMACRO_GET((d), (x), \
d                2283 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_SET_QAM_LOCKRANGE(d, x) DRX_ACCESSMACRO_SET((d), (x), \
d                2285 drivers/media/dvb-frontends/drx39xyj/drx_driver.h #define DRX_GET_QAM_LOCKRANGE(d, x) DRX_ACCESSMACRO_GET((d), (x), \
d                1152 drivers/media/dvb-frontends/drx39xyj/drxj.c 	u32 d = 0;
d                1186 drivers/media/dvb-frontends/drx39xyj/drxj.c 	d = x & ((((u32) 1) << (scale - index_width)) - 1);
d                1189 drivers/media/dvb-frontends/drx39xyj/drxj.c 	    ((d * (log2lut[i + 1] - log2lut[i])) >> (scale - index_width));
d                 550 drivers/media/dvb-frontends/drx39xyj/drxj.h #define DRXJ_ATTR_BTSC_DETECT(d)                       \
d                 551 drivers/media/dvb-frontends/drx39xyj/drxj.h 			(((struct drxj_data *)(d)->my_ext_attr)->aud_data.btsc_detect)
d                  24 drivers/media/dvb-frontends/drxd_firm.c #define WR16(a, d) ADDRESS(a), LENGTH(1), DATA16(d)
d                 691 drivers/media/dvb-frontends/ds3000.c 				struct dvb_diseqc_master_cmd *d)
d                 699 drivers/media/dvb-frontends/ds3000.c 	for (i = 0 ; i < d->msg_len;) {
d                 700 drivers/media/dvb-frontends/ds3000.c 		dprintk("0x%02x", d->msg[i]);
d                 701 drivers/media/dvb-frontends/ds3000.c 		if (++i < d->msg_len)
d                 711 drivers/media/dvb-frontends/ds3000.c 	for (i = 0; i < d->msg_len; i++)
d                 712 drivers/media/dvb-frontends/ds3000.c 		ds3000_writereg(state, 0xa3 + i, d->msg[i]);
d                 720 drivers/media/dvb-frontends/ds3000.c 	data |= ((d->msg_len - 1) << 3) | 0x07;
d                 294 drivers/media/dvb-frontends/mn88443x.c 		u32 cnr = 0, x, y, d;
d                 306 drivers/media/dvb-frontends/mn88443x.c 		d = (y << 15) - x * x;
d                 307 drivers/media/dvb-frontends/mn88443x.c 		if (d > 0) {
d                 310 drivers/media/dvb-frontends/mn88443x.c 			d_3 = div_u64(16 * x * x, d);
d                 318 drivers/media/dvb-frontends/mn88443x.c 			tmp = (s64)2 * intlog10(x) - intlog10(abs(d)) - d_3
d                  94 drivers/media/dvb-frontends/mxl5xx.c static void convert_endian(u8 flag, u32 size, u8 *d)
d                 101 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 3];
d                 102 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 3] ^= d[i + 0];
d                 103 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 3];
d                 105 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 1] ^= d[i + 2];
d                 106 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 2] ^= d[i + 1];
d                 107 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 1] ^= d[i + 2];
d                 116 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 1];
d                 117 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 1] ^= d[i + 0];
d                 118 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 1];
d                 122 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 2];
d                 123 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 2] ^= d[i + 0];
d                 124 drivers/media/dvb-frontends/mxl5xx.c 		d[i + 0] ^= d[i + 2];
d                 849 drivers/media/dvb-frontends/rtl2832_sdr.c 	struct dvb_usb_device *d = pdata->dvb_usb_device;
d                 860 drivers/media/dvb-frontends/rtl2832_sdr.c 	if (d->props->power_ctrl)
d                 861 drivers/media/dvb-frontends/rtl2832_sdr.c 		d->props->power_ctrl(d, 1);
d                 864 drivers/media/dvb-frontends/rtl2832_sdr.c 	if (d->props->frontend_ctrl)
d                 865 drivers/media/dvb-frontends/rtl2832_sdr.c 		d->props->frontend_ctrl(pdata->dvb_frontend, 1);
d                 910 drivers/media/dvb-frontends/rtl2832_sdr.c 	struct dvb_usb_device *d = pdata->dvb_usb_device;
d                 931 drivers/media/dvb-frontends/rtl2832_sdr.c 	if (d->props->frontend_ctrl)
d                 932 drivers/media/dvb-frontends/rtl2832_sdr.c 		d->props->frontend_ctrl(pdata->dvb_frontend, 0);
d                 934 drivers/media/dvb-frontends/rtl2832_sdr.c 	if (d->props->power_ctrl)
d                 935 drivers/media/dvb-frontends/rtl2832_sdr.c 		d->props->power_ctrl(d, 0);
d                 287 drivers/media/dvb-frontends/sp887x.c static void divide (int n, int d, int *quotient_i, int *quotient_f)
d                 291 drivers/media/dvb-frontends/sp887x.c 	r = (n % d) << 8;
d                 292 drivers/media/dvb-frontends/sp887x.c 	q = (r / d);
d                 298 drivers/media/dvb-frontends/sp887x.c 		r = (r % d) << 8;
d                 299 drivers/media/dvb-frontends/sp887x.c 		q = (q << 8) | (r / d);
d                 300 drivers/media/dvb-frontends/sp887x.c 		r = (r % d) << 8;
d                 301 drivers/media/dvb-frontends/sp887x.c 		*quotient_f = (q << 8) | (r / d);
d                  15 drivers/media/dvb-frontends/stb0899_algo.c static inline u32 stb0899_do_div(u64 n, u32 d)
d                  19 drivers/media/dvb-frontends/stb0899_algo.c 	do_div(n, d);
d                  47 drivers/media/dvb-frontends/stb0899_priv.h #define MAKEWORD32(a, b, c, d)			(((a) << 24) | ((b) << 16) | ((c) << 8) | (d))
d                 508 drivers/media/dvb-frontends/stv0900_sw.c 		d = demod;
d                 520 drivers/media/dvb-frontends/stv0900_sw.c 	srate = intp->symbol_rate[d];
d                 521 drivers/media/dvb-frontends/stv0900_sw.c 	search_range = intp->srch_range[d];
d                 528 drivers/media/dvb-frontends/stv0900_sw.c 	lock = stv0900_get_demod_lock(intp, d, locktimeout);
d                 534 drivers/media/dvb-frontends/stv0900_sw.c 		if (stv0900_check_timing_lock(intp, d) == TRUE) {
d                 537 drivers/media/dvb-frontends/stv0900_sw.c 			lock = stv0900_get_demod_lock(intp, d, demod_timeout);
d                 582 drivers/media/dvb-frontends/stv0900_sw.c 		tuner_freq = intp->freq[d];
d                 583 drivers/media/dvb-frontends/stv0900_sw.c 		intp->bw[d] = stv0900_carrier_width(intp->symbol_rate[d],
d                 584 drivers/media/dvb-frontends/stv0900_sw.c 				intp->rolloff) + intp->symbol_rate[d];
d                 595 drivers/media/dvb-frontends/stv0900_sw.c 			if (intp->tuner_type[d] == 3)
d                 597 drivers/media/dvb-frontends/stv0900_sw.c 						intp->bw[d], demod);
d                 599 drivers/media/dvb-frontends/stv0900_sw.c 				stv0900_set_tuner(fe, tuner_freq, intp->bw[d]);
d                 615 drivers/media/dvb-frontends/stv0900_sw.c 		lock = stv0900_get_demod_lock(intp, d, timeout);
d                1181 drivers/media/dvb-frontends/stv0900_sw.c 		d = demod;
d                1186 drivers/media/dvb-frontends/stv0900_sw.c 	if (intp->srch_algo[d] == STV0900_BLIND_SEARCH) {
d                1198 drivers/media/dvb-frontends/stv0900_sw.c 	result->standard = stv0900_get_standard(fe, d);
d                1200 drivers/media/dvb-frontends/stv0900_sw.c 		result->frequency = stv0900_get_freq_auto(intp, d);
d                1204 drivers/media/dvb-frontends/stv0900_sw.c 	offsetFreq = stv0900_get_carr_freq(intp, intp->mclk, d) / 1000;
d                1206 drivers/media/dvb-frontends/stv0900_sw.c 	result->symbol_rate = stv0900_get_symbol_rate(intp, intp->mclk, d);
d                1207 drivers/media/dvb-frontends/stv0900_sw.c 	srate_offset = stv0900_get_timing_offst(intp, result->symbol_rate, d);
d                1209 drivers/media/dvb-frontends/stv0900_sw.c 	result->fec = stv0900_get_vit_fec(intp, d);
d                1240 drivers/media/dvb-frontends/stv0900_sw.c 	if ((intp->srch_algo[d] == STV0900_BLIND_SEARCH) ||
d                1241 drivers/media/dvb-frontends/stv0900_sw.c 				(intp->symbol_rate[d] < 10000000)) {
d                1242 drivers/media/dvb-frontends/stv0900_sw.c 		offsetFreq = result->frequency - intp->freq[d];
d                1244 drivers/media/dvb-frontends/stv0900_sw.c 			intp->freq[d] = stv0900_get_freq_auto(intp, d);
d                1246 drivers/media/dvb-frontends/stv0900_sw.c 			intp->freq[d] = stv0900_get_tuner_freq(fe);
d                1248 drivers/media/dvb-frontends/stv0900_sw.c 		if (abs(offsetFreq) <= ((intp->srch_range[d] / 2000) + 500))
d                1255 drivers/media/dvb-frontends/stv0900_sw.c 	} else if (abs(offsetFreq) <= ((intp->srch_range[d] / 2000) + 500))
d                1340 drivers/media/dvb-frontends/stv0910.c 	u32 n, d;
d                1342 drivers/media/dvb-frontends/stv0910.c 	get_bit_error_rate(state, &n, &d);
d                1347 drivers/media/dvb-frontends/stv0910.c 	p->pre_bit_count.stat[0].uvalue = d;
d                 332 drivers/media/dvb-frontends/stv6111.c 	u8 d[12];
d                 334 drivers/media/dvb-frontends/stv6111.c 	memcpy(&d[1], &state->reg[reg], len);
d                 335 drivers/media/dvb-frontends/stv6111.c 	d[0] = reg;
d                 336 drivers/media/dvb-frontends/stv6111.c 	return i2c_write(state->i2c, state->adr, d, len + 1);
d                 341 drivers/media/dvb-frontends/stv6111.c 	u8 d[2] = {reg, val};
d                 343 drivers/media/dvb-frontends/stv6111.c 	return i2c_write(state->i2c, state->adr, d, 2);
d                 432 drivers/media/dvb-frontends/tda10023.c 	u8 a,b,c,d;
d                 436 drivers/media/dvb-frontends/tda10023.c 	d= tda10023_readreg (state, 0x77);
d                 437 drivers/media/dvb-frontends/tda10023.c 	*ucblocks = a | (b<<8)|(c<<16)|(d<<24);
d                1860 drivers/media/i2c/adv7604.c #define _SEL(a,b,c,d,e,f)	{ \
d                1862 drivers/media/i2c/adv7604.c 	ADV76XX_OP_CH_SEL_##d, ADV76XX_OP_CH_SEL_##e, ADV76XX_OP_CH_SEL_##f }
d                2042 drivers/media/i2c/adv7842.c #define _SEL(a, b, c, d, e, f)	{ \
d                2044 drivers/media/i2c/adv7842.c 	ADV7842_OP_CH_SEL_##d, ADV7842_OP_CH_SEL_##e, ADV7842_OP_CH_SEL_##f }
d                 128 drivers/media/i2c/cx25840/cx25840-ir.c static inline u16 count_to_clock_divider(unsigned int d)
d                 130 drivers/media/i2c/cx25840/cx25840-ir.c 	if (d > RXCLK_RCD + 1)
d                 131 drivers/media/i2c/cx25840/cx25840-ir.c 		d = RXCLK_RCD;
d                 132 drivers/media/i2c/cx25840/cx25840-ir.c 	else if (d < 2)
d                 133 drivers/media/i2c/cx25840/cx25840-ir.c 		d = 1;
d                 135 drivers/media/i2c/cx25840/cx25840-ir.c 		d--;
d                 136 drivers/media/i2c/cx25840/cx25840-ir.c 	return (u16) d;
d                 184 drivers/media/i2c/cx25840/cx25840-ir.c static inline u16 count_to_lpf_count(unsigned int d)
d                 186 drivers/media/i2c/cx25840/cx25840-ir.c 	if (d > FILTR_LPF)
d                 187 drivers/media/i2c/cx25840/cx25840-ir.c 		d = FILTR_LPF;
d                 188 drivers/media/i2c/cx25840/cx25840-ir.c 	else if (d < 4)
d                 189 drivers/media/i2c/cx25840/cx25840-ir.c 		d = 0;
d                 190 drivers/media/i2c/cx25840/cx25840-ir.c 	return (u16) d;
d                 247 drivers/media/i2c/cx25840/cx25840-ir.c 	u32 d;
d                 255 drivers/media/i2c/cx25840/cx25840-ir.c 	d = (1 << 2) * ((u32) divider + 1) * 1000; /* millicycles/count */
d                 256 drivers/media/i2c/cx25840/cx25840-ir.c 	rem = do_div(n, d);
d                 257 drivers/media/i2c/cx25840/cx25840-ir.c 	if (rem >= d / 2)
d                 547 drivers/media/i2c/ir-kbd-i2c.c 		u16 d = get_unaligned_be16(&code_block->space[i]);
d                 549 drivers/media/i2c/ir-kbd-i2c.c 		if (get_unaligned_be16(&code_block->space[s]) > d)
d                 356 drivers/media/i2c/s5k5baf.c 	u16 *d, i, *end;
d                 373 drivers/media/i2c/s5k5baf.c 	d = devm_kcalloc(dev, count, sizeof(u16), GFP_KERNEL);
d                 374 drivers/media/i2c/s5k5baf.c 	if (!d)
d                 378 drivers/media/i2c/s5k5baf.c 		d[i] = le16_to_cpu(data[i]);
d                 380 drivers/media/i2c/s5k5baf.c 	f = (struct s5k5baf_fw *)d;
d                 386 drivers/media/i2c/s5k5baf.c 	end = d + count;
d                 387 drivers/media/i2c/s5k5baf.c 	d += 1 + 2 * f->count;
d                 390 drivers/media/i2c/s5k5baf.c 		if (f->seq[i].offset + d <= end)
d                 850 drivers/media/i2c/s5k5baf.c 			    const struct v4l2_rect *d)
d                 852 drivers/media/i2c/s5k5baf.c 	r->left = v->left * n->width / d->width;
d                 853 drivers/media/i2c/s5k5baf.c 	r->top = v->top * n->height / d->height;
d                 854 drivers/media/i2c/s5k5baf.c 	r->width = v->width * n->width / d->width;
d                 855 drivers/media/i2c/s5k5baf.c 	r->height = v->height * n->height / d->height;
d                1576 drivers/media/i2c/tda1997x.c static irqreturn_t tda1997x_isr_thread(int irq, void *d)
d                1578 drivers/media/i2c/tda1997x.c 	struct tda1997x_state *state = d;
d                 244 drivers/media/pci/cobalt/cobalt-cpld.c 		u32 d;
d                 249 drivers/media/pci/cobalt/cobalt-cpld.c 		div_u64_rem((dco << 28) + f_xtal / 2, f_xtal, &d);
d                 250 drivers/media/pci/cobalt/cobalt-cpld.c 		if (d < delta) {
d                 253 drivers/media/pci/cobalt/cobalt-cpld.c 			delta = d;
d                 154 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = (struct sg_dma_descriptor *)desc->virt;
d                 183 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->pci_l = addr & 0xffffffff;
d                 186 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->pci_h = (u64)addr >> 32;
d                 189 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->local = 0;
d                 190 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->reserved0 = 0;
d                 198 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->local = 0x11111111;
d                 203 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->bytes = (bytes / 2) & ~3;
d                 204 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->reserved1 = 0;
d                 205 drivers/media/pci/cobalt/cobalt-omnitek.c 				size -= d->bytes;
d                 206 drivers/media/pci/cobalt/cobalt-omnitek.c 				copied += d->bytes;
d                 207 drivers/media/pci/cobalt/cobalt-omnitek.c 				offset += d->bytes;
d                 208 drivers/media/pci/cobalt/cobalt-omnitek.c 				addr += d->bytes;
d                 210 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->next_h = (u32)((u64)next >> 32);
d                 211 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->next_l = (u32)next |
d                 213 drivers/media/pci/cobalt/cobalt-omnitek.c 				bytes -= d->bytes;
d                 214 drivers/media/pci/cobalt/cobalt-omnitek.c 				d++;
d                 216 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->pci_l = addr & 0xffffffff;
d                 220 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->pci_h = (u64)addr >> 32;
d                 223 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->local = 0;
d                 224 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->reserved0 = 0;
d                 228 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->bytes = bytes;
d                 229 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->reserved1 = 0;
d                 256 drivers/media/pci/cobalt/cobalt-omnitek.c 			d->next_h = (u32)((u64)desc->bus >> 32);
d                 257 drivers/media/pci/cobalt/cobalt-omnitek.c 			d->next_l = (u32)desc->bus |
d                 260 drivers/media/pci/cobalt/cobalt-omnitek.c 				d->local = 0x22222222;
d                 261 drivers/media/pci/cobalt/cobalt-omnitek.c 			desc->last_desc_virt = d;
d                 263 drivers/media/pci/cobalt/cobalt-omnitek.c 			d->next_h = (u32)((u64)next >> 32);
d                 264 drivers/media/pci/cobalt/cobalt-omnitek.c 			d->next_l = (u32)next | (to_pci ? WRITE_TO_PCI : 0);
d                 266 drivers/media/pci/cobalt/cobalt-omnitek.c 		d++;
d                 274 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = this->last_desc_virt;
d                 275 drivers/media/pci/cobalt/cobalt-omnitek.c 	u32 direction = d->next_l & WRITE_TO_PCI;
d                 278 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->next_h = 0;
d                 279 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->next_l = direction | INTERRUPT_ENABLE | END_OF_CHAIN;
d                 281 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->next_h = (u32)((u64)next->bus >> 32);
d                 282 drivers/media/pci/cobalt/cobalt-omnitek.c 		d->next_l = (u32)next->bus | direction | INTERRUPT_ENABLE;
d                 304 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = desc->last_desc_virt;
d                 306 drivers/media/pci/cobalt/cobalt-omnitek.c 	d->next_l |= INTERRUPT_ENABLE;
d                 311 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = desc->last_desc_virt;
d                 313 drivers/media/pci/cobalt/cobalt-omnitek.c 	d->next_l &= ~INTERRUPT_ENABLE;
d                 318 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = desc->last_desc_virt;
d                 320 drivers/media/pci/cobalt/cobalt-omnitek.c 	d->next_h = (u32)((u64)desc->bus >> 32);
d                 321 drivers/media/pci/cobalt/cobalt-omnitek.c 	d->next_l = (u32)desc->bus | (d->next_l & DESCRIPTOR_FLAG_MSK);
d                 326 drivers/media/pci/cobalt/cobalt-omnitek.c 	struct sg_dma_descriptor *d = desc->last_desc_virt;
d                 328 drivers/media/pci/cobalt/cobalt-omnitek.c 	d->next_l |= END_OF_CHAIN;
d                 167 drivers/media/pci/cx23885/cx23888-ir.c static inline u16 count_to_clock_divider(unsigned int d)
d                 169 drivers/media/pci/cx23885/cx23888-ir.c 	if (d > RXCLK_RCD + 1)
d                 170 drivers/media/pci/cx23885/cx23888-ir.c 		d = RXCLK_RCD;
d                 171 drivers/media/pci/cx23885/cx23888-ir.c 	else if (d < 2)
d                 172 drivers/media/pci/cx23885/cx23888-ir.c 		d = 1;
d                 174 drivers/media/pci/cx23885/cx23888-ir.c 		d--;
d                 175 drivers/media/pci/cx23885/cx23888-ir.c 	return (u16) d;
d                 223 drivers/media/pci/cx23885/cx23888-ir.c static inline u16 count_to_lpf_count(unsigned int d)
d                 225 drivers/media/pci/cx23885/cx23888-ir.c 	if (d > FILTR_LPF)
d                 226 drivers/media/pci/cx23885/cx23888-ir.c 		d = FILTR_LPF;
d                 227 drivers/media/pci/cx23885/cx23888-ir.c 	else if (d < 4)
d                 228 drivers/media/pci/cx23885/cx23888-ir.c 		d = 0;
d                 229 drivers/media/pci/cx23885/cx23888-ir.c 	return (u16) d;
d                 501 drivers/media/pci/cx88/cx88-mpeg.c 	struct cx8802_driver *d;
d                 503 drivers/media/pci/cx88/cx88-mpeg.c 	list_for_each_entry(d, &dev->drvlist, drvlist)
d                 504 drivers/media/pci/cx88/cx88-mpeg.c 		if (d->type_id == btype)
d                 505 drivers/media/pci/cx88/cx88-mpeg.c 			return d;
d                 655 drivers/media/pci/cx88/cx88-mpeg.c 	struct cx8802_driver *d, *dtmp;
d                 673 drivers/media/pci/cx88/cx88-mpeg.c 		list_for_each_entry_safe(d, dtmp, &dev->drvlist, drvlist) {
d                 675 drivers/media/pci/cx88/cx88-mpeg.c 			if (d->type_id != drv->type_id)
d                 678 drivers/media/pci/cx88/cx88-mpeg.c 			err = d->remove(d);
d                 680 drivers/media/pci/cx88/cx88-mpeg.c 				list_del(&d->drvlist);
d                 681 drivers/media/pci/cx88/cx88-mpeg.c 				kfree(d);
d                 593 drivers/media/pci/cx88/cx88.h #define cx_wait(d) { if (need_resched()) schedule(); else udelay(d); }
d                2778 drivers/media/pci/ddbridge/ddbridge-core.c static ssize_t fan_store(struct device *device, struct device_attribute *d,
d                 679 drivers/media/pci/intel/ipu3/ipu3-cio2.c 		unsigned int d;
d                 681 drivers/media/pci/intel/ipu3/ipu3-cio2.c 		for (d = 0; d < CIO2_NUM_DMA_CHAN; d++)
d                 682 drivers/media/pci/intel/ipu3/ipu3-cio2.c 			if (int_status & CIO2_INT_IOC(d)) {
d                 683 drivers/media/pci/intel/ipu3/ipu3-cio2.c 				clr |= CIO2_INT_IOC(d);
d                 684 drivers/media/pci/intel/ipu3/ipu3-cio2.c 				cio2_buffer_done(cio2, d);
d                 692 drivers/media/pci/intel/ipu3/ipu3-cio2.c 		unsigned int d;
d                 694 drivers/media/pci/intel/ipu3/ipu3-cio2.c 		for (d = 0; d < CIO2_NUM_DMA_CHAN; d++)
d                 695 drivers/media/pci/intel/ipu3/ipu3-cio2.c 			if (int_status & CIO2_INT_IOS_IOLN(d)) {
d                 696 drivers/media/pci/intel/ipu3/ipu3-cio2.c 				clr |= CIO2_INT_IOS_IOLN(d);
d                 697 drivers/media/pci/intel/ipu3/ipu3-cio2.c 				if (d == CIO2_DMA_CHAN)
d                1932 drivers/media/pci/intel/ipu3/ipu3-cio2.c 			u8 *d = ptr + elem_size * (arr[1].begin + i);
d                1937 drivers/media/pci/intel/ipu3/ipu3-cio2.c 				swap(d[j], s[j]);
d                  84 drivers/media/pci/ivtv/ivtv-vbi.c 				const struct v4l2_sliced_vbi_data *d,
d                  89 drivers/media/pci/ivtv/ivtv-vbi.c 	if (d->id == V4L2_SLICED_CAPTION_525 && d->line == 21) {
d                  90 drivers/media/pci/ivtv/ivtv-vbi.c 		if (d->field) {
d                  91 drivers/media/pci/ivtv/ivtv-vbi.c 			cc->even[0] = d->data[0];
d                  92 drivers/media/pci/ivtv/ivtv-vbi.c 			cc->even[1] = d->data[1];
d                  94 drivers/media/pci/ivtv/ivtv-vbi.c 			cc->odd[0] = d->data[0];
d                  95 drivers/media/pci/ivtv/ivtv-vbi.c 			cc->odd[1] = d->data[1];
d                  98 drivers/media/pci/ivtv/ivtv-vbi.c 	} else if (d->id == V4L2_SLICED_VPS && d->line == 16 && d->field == 0) {
d                 101 drivers/media/pci/ivtv/ivtv-vbi.c 		vps.data[0] = d->data[2];
d                 102 drivers/media/pci/ivtv/ivtv-vbi.c 		vps.data[1] = d->data[8];
d                 103 drivers/media/pci/ivtv/ivtv-vbi.c 		vps.data[2] = d->data[9];
d                 104 drivers/media/pci/ivtv/ivtv-vbi.c 		vps.data[3] = d->data[10];
d                 105 drivers/media/pci/ivtv/ivtv-vbi.c 		vps.data[4] = d->data[11];
d                 110 drivers/media/pci/ivtv/ivtv-vbi.c 	} else if (d->id == V4L2_SLICED_WSS_625 &&
d                 111 drivers/media/pci/ivtv/ivtv-vbi.c 		   d->line == 23 && d->field == 0) {
d                 112 drivers/media/pci/ivtv/ivtv-vbi.c 		int wss = d->data[0] | d->data[1] << 8;
d                 156 drivers/media/pci/ivtv/ivtv-vbi.c 	struct v4l2_sliced_vbi_data d;
d                 160 drivers/media/pci/ivtv/ivtv-vbi.c 		if (copy_from_user(&d, sliced + i,
d                 165 drivers/media/pci/ivtv/ivtv-vbi.c 		ivtv_write_vbi_line(itv, &d, &cc, &found_cc);
d                 153 drivers/media/pci/pt3/pt3_dma.c 	struct xfer_desc *d;
d                 183 drivers/media/pci/pt3/pt3_dma.c 			d = &adap->desc_buf[i - 1].descs[DESCS_IN_PAGE - 1];
d                 184 drivers/media/pci/pt3/pt3_dma.c 			d->next_l = lower_32_bits(desc_addr);
d                 185 drivers/media/pci/pt3/pt3_dma.c 			d->next_h = upper_32_bits(desc_addr);
d                 189 drivers/media/pci/pt3/pt3_dma.c 			d = &adap->desc_buf[i].descs[j];
d                 190 drivers/media/pci/pt3/pt3_dma.c 			d->addr_l = lower_32_bits(data_addr);
d                 191 drivers/media/pci/pt3/pt3_dma.c 			d->addr_h = upper_32_bits(data_addr);
d                 192 drivers/media/pci/pt3/pt3_dma.c 			d->size = DATA_XFER_SZ;
d                 195 drivers/media/pci/pt3/pt3_dma.c 			d->next_l = lower_32_bits(desc_addr);
d                 196 drivers/media/pci/pt3/pt3_dma.c 			d->next_h = upper_32_bits(desc_addr);
d                 204 drivers/media/pci/pt3/pt3_dma.c 					d->next_l = lower_32_bits(desc_addr);
d                 205 drivers/media/pci/pt3/pt3_dma.c 					d->next_h = upper_32_bits(desc_addr);
d                  39 drivers/media/pci/saa7164/saa7164-api.c 	struct tmComResDebugGetData d;
d                  47 drivers/media/pci/saa7164/saa7164-api.c 		memset(&d, 0, sizeof(d));
d                  50 drivers/media/pci/saa7164/saa7164-api.c 			GET_DEBUG_DATA_CONTROL, sizeof(d), &d);
d                  55 drivers/media/pci/saa7164/saa7164-api.c 		if (d.dwResult != SAA_OK)
d                  59 drivers/media/pci/saa7164/saa7164-api.c 			d.ucDebugData);
d                1032 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c static inline int calc_interval(u8 fps, u32 n, u32 d)
d                1034 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	if (!n || !d)
d                1036 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	if (d == fps)
d                1039 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	return min(15U, n / d + (n % d >= (fps >> 1)));
d                 859 drivers/media/pci/ttpci/av7110_hw.c 	int d, delta;
d                 895 drivers/media/pci/ttpci/av7110_hw.c 			for (d = delta - 2; d >= 0; d--) {
d                 896 drivers/media/pci/ttpci/av7110_hw.c 				c |= (((u8 *)av7110->bmpbuf)[1024 + i * delta + d]
d                 897 drivers/media/pci/ttpci/av7110_hw.c 				      << ((delta - d - 1) * bpp));
d                 609 drivers/media/pci/ttpci/av7110_v4l.c 	struct v4l2_sliced_vbi_data d;
d                 613 drivers/media/pci/ttpci/av7110_v4l.c 	if (FW_VERSION(av7110->arm_app) < 0x2623 || !av7110->wssMode || count != sizeof d)
d                 615 drivers/media/pci/ttpci/av7110_v4l.c 	if (copy_from_user(&d, data, count))
d                 617 drivers/media/pci/ttpci/av7110_v4l.c 	if ((d.id != 0 && d.id != V4L2_SLICED_WSS_625) || d.field != 0 || d.line != 23)
d                 619 drivers/media/pci/ttpci/av7110_v4l.c 	if (d.id)
d                 620 drivers/media/pci/ttpci/av7110_v4l.c 		av7110->wssData = ((d.data[1] << 8) & 0x3f00) | d.data[0];
d                  71 drivers/media/pci/ttpci/budget-patch.c 	int i, par=1, d;
d                  76 drivers/media/pci/ttpci/budget-patch.c 		d = (data>>i)&1;
d                  77 drivers/media/pci/ttpci/budget-patch.c 		par ^= d;
d                  78 drivers/media/pci/ttpci/budget-patch.c 		DiseqcSendBit(budget, d);
d                  69 drivers/media/pci/ttpci/budget.c 	int i, par=1, d;
d                  74 drivers/media/pci/ttpci/budget.c 		d = (data>>i)&1;
d                  75 drivers/media/pci/ttpci/budget.c 		par ^= d;
d                  76 drivers/media/pci/ttpci/budget.c 		DiseqcSendBit(budget, d);
d                 231 drivers/media/platform/cros-ec-cec/cros-ec-cec.c 			struct device *d;
d                 234 drivers/media/platform/cros-ec-cec/cros-ec-cec.c 			d = bus_find_device_by_name(&pci_bus_type, NULL,
d                 236 drivers/media/platform/cros-ec-cec/cros-ec-cec.c 			if (!d)
d                 238 drivers/media/platform/cros-ec-cec/cros-ec-cec.c 			put_device(d);
d                 240 drivers/media/platform/cros-ec-cec/cros-ec-cec.c 			return d;
d                  45 drivers/media/platform/rcar-vin/rcar-core.c #define v4l2_dev_to_vin(d)	container_of(d, struct rvin_dev, v4l2_dev)
d                 227 drivers/media/platform/rcar-vin/rcar-vin.h #define vin_dbg(d, fmt, arg...)		dev_dbg(d->dev, fmt, ##arg)
d                 228 drivers/media/platform/rcar-vin/rcar-vin.h #define vin_info(d, fmt, arg...)	dev_info(d->dev, fmt, ##arg)
d                 229 drivers/media/platform/rcar-vin/rcar-vin.h #define vin_warn(d, fmt, arg...)	dev_warn(d->dev, fmt, ##arg)
d                 230 drivers/media/platform/rcar-vin/rcar-vin.h #define vin_err(d, fmt, arg...)		dev_err(d->dev, fmt, ##arg)
d                  14 drivers/media/platform/s5p-g2d/g2d-hw.c #define w(x, a)	writel((x), d->regs + (a))
d                  15 drivers/media/platform/s5p-g2d/g2d-hw.c #define r(a)	readl(d->regs + (a))
d                  18 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_reset(struct g2d_dev *d)
d                  23 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_src_size(struct g2d_dev *d, struct g2d_frame *f)
d                  43 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_src_addr(struct g2d_dev *d, dma_addr_t a)
d                  48 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_dst_size(struct g2d_dev *d, struct g2d_frame *f)
d                  68 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_dst_addr(struct g2d_dev *d, dma_addr_t a)
d                  73 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_rop4(struct g2d_dev *d, u32 r)
d                  78 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_flip(struct g2d_dev *d, u32 r)
d                  83 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_v41_stretch(struct g2d_dev *d, struct g2d_frame *src,
d                  93 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_set_cmd(struct g2d_dev *d, u32 c)
d                  98 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_start(struct g2d_dev *d)
d                 101 drivers/media/platform/s5p-g2d/g2d-hw.c 	if (d->variant->hw_rev == TYPE_G2D_3X)
d                 110 drivers/media/platform/s5p-g2d/g2d-hw.c void g2d_clear_int(struct g2d_dev *d)
d                  73 drivers/media/platform/s5p-g2d/g2d.h void g2d_reset(struct g2d_dev *d);
d                  74 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_src_size(struct g2d_dev *d, struct g2d_frame *f);
d                  75 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_src_addr(struct g2d_dev *d, dma_addr_t a);
d                  76 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_dst_size(struct g2d_dev *d, struct g2d_frame *f);
d                  77 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_dst_addr(struct g2d_dev *d, dma_addr_t a);
d                  78 drivers/media/platform/s5p-g2d/g2d.h void g2d_start(struct g2d_dev *d);
d                  79 drivers/media/platform/s5p-g2d/g2d.h void g2d_clear_int(struct g2d_dev *d);
d                  80 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_rop4(struct g2d_dev *d, u32 r);
d                  81 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_flip(struct g2d_dev *d, u32 r);
d                  82 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_v41_stretch(struct g2d_dev *d,
d                  84 drivers/media/platform/s5p-g2d/g2d.h void g2d_set_cmd(struct g2d_dev *d, u32 c);
d                 531 drivers/media/platform/seco-cec/seco-cec.c 			struct device *d;
d                 534 drivers/media/platform/seco-cec/seco-cec.c 			d = bus_find_device_by_name(&pci_bus_type, NULL,
d                 536 drivers/media/platform/seco-cec/seco-cec.c 			if (!d)
d                 539 drivers/media/platform/seco-cec/seco-cec.c 			put_device(d);
d                 541 drivers/media/platform/seco-cec/seco-cec.c 			return d;
d                 543 drivers/media/platform/vicodec/codec-fwht.c 			int d;
d                 554 drivers/media/platform/vicodec/codec-fwht.c 			for (d = 0; d < 8; d++)
d                 555 drivers/media/platform/vicodec/codec-fwht.c 				out[8 * d] >>= 6;
d                 557 drivers/media/platform/vicodec/codec-fwht.c 			int d;
d                 568 drivers/media/platform/vicodec/codec-fwht.c 			for (d = 0; d < 8; d++) {
d                 569 drivers/media/platform/vicodec/codec-fwht.c 				out[8 * d] >>= 6;
d                 570 drivers/media/platform/vicodec/codec-fwht.c 				out[8 * d] += 128;
d                  58 drivers/media/platform/vivid/vivid-osd.c 		u16 *d = p;
d                  61 drivers/media/platform/vivid/vivid-osd.c 			d[x] = rgb[(y / 16 + x / 16) % 16];
d                  59 drivers/media/radio/dsbr100.c #define v4l2_dev_to_radio(d) container_of(d, struct dsbr100_device, v4l2_dev)
d                  56 drivers/media/radio/radio-shark.c #define v4l2_dev_to_shark(d) container_of(d, struct shark_device, v4l2_dev)
d                  53 drivers/media/radio/radio-shark2.c #define v4l2_dev_to_shark(d) container_of(d, struct shark_device, v4l2_dev)
d                 315 drivers/media/radio/radio-si476x.c v4l2_dev_to_radio(struct v4l2_device *d)
d                 317 drivers/media/radio/radio-si476x.c 	return container_of(d, struct si476x_radio, v4l2dev);
d                 321 drivers/media/radio/radio-si476x.c v4l2_ctrl_handler_to_radio(struct v4l2_ctrl_handler *d)
d                 323 drivers/media/radio/radio-si476x.c 	return container_of(d, struct si476x_radio, ctrl_handler);
d                 755 drivers/media/rc/imon.c static ssize_t show_associate_remote(struct device *d,
d                 759 drivers/media/rc/imon.c 	struct imon_context *ictx = dev_get_drvdata(d);
d                 770 drivers/media/rc/imon.c 	dev_info(d, "Visit http://www.lirc.org/html/imon-24g.html for instructions on how to associate your iMON 2.4G DT/LT remote\n");
d                 775 drivers/media/rc/imon.c static ssize_t store_associate_remote(struct device *d,
d                 781 drivers/media/rc/imon.c 	ictx = dev_get_drvdata(d);
d                 797 drivers/media/rc/imon.c static ssize_t show_imon_clock(struct device *d,
d                 800 drivers/media/rc/imon.c 	struct imon_context *ictx = dev_get_drvdata(d);
d                 823 drivers/media/rc/imon.c static ssize_t store_imon_clock(struct device *d,
d                 827 drivers/media/rc/imon.c 	struct imon_context *ictx = dev_get_drvdata(d);
d                 354 drivers/media/tuners/mt2063.c #define ceil(n, d) (((n) < 0) ? (-((-(n))/(d))) : (n)/(d) + ((n)%(d) != 0))
d                 355 drivers/media/tuners/mt2063.c #define floor(n, d) (((n) < 0) ? (-((-(n))/(d))) - ((n)%(d) != 0) : (n)/(d))
d                 687 drivers/media/tuners/mt2063.c 	const u32 d = pAS_Info->f_out + pAS_Info->f_out_bw / 2;
d                 688 drivers/media/tuners/mt2063.c 	const u32 c = d - pAS_Info->f_out_bw;
d                 706 drivers/media/tuners/mt2063.c 	gd_Scale = max((u32) gcd(lo_gcd, d), f_Scale);
d                 713 drivers/media/tuners/mt2063.c 	n0 = DIV_ROUND_UP(f_LO2 - d, f_LO1 - f_LO2);
d                 718 drivers/media/tuners/mt2063.c 		      ((d + hgds) / gd_Scale)) / ((f_LO2 + hgds) / gd_Scale);
d                 725 drivers/media/tuners/mt2063.c 		      ((d + hgds) / gd_Scale)) / ((f_LO2 + hgds) / gd_Scale);
d                 741 drivers/media/tuners/mt2063.c 			*fm = (((s32) d - f_Spur) / (mc - n)) + 1;
d                 771 drivers/media/tuners/mt2063.c 			*fp = (((s32) d + f_Spur) / (ma - n)) + 1;
d                 498 drivers/media/tuners/mxl5007t.c 	u8 d = 0xff;
d                 501 drivers/media/tuners/mxl5007t.c 		.buf = &d, .len = 1
d                 551 drivers/media/tuners/mxl5007t.c 	u8 d;
d                 557 drivers/media/tuners/mxl5007t.c 	ret = mxl5007t_read_reg(state, 0xd8, &d);
d                 561 drivers/media/tuners/mxl5007t.c 	if ((d & 0x0c) == 0x0c)
d                 564 drivers/media/tuners/mxl5007t.c 	if ((d & 0x03) == 0x03)
d                 555 drivers/media/tuners/tda18271-common.c 	u8 d, pd;
d                 558 drivers/media/tuners/tda18271-common.c 	int ret = tda18271_lookup_pll_map(fe, MAIN_PLL, &freq, &pd, &d);
d                 564 drivers/media/tuners/tda18271-common.c 	div =  ((d * (freq / 1000)) << 7) / 125;
d                 578 drivers/media/tuners/tda18271-common.c 	u8 d, pd;
d                 581 drivers/media/tuners/tda18271-common.c 	int ret = tda18271_lookup_pll_map(fe, CAL_PLL, &freq, &pd, &d);
d                 587 drivers/media/tuners/tda18271-common.c 	div =  ((d * (freq / 1000)) << 7) / 125;
d                  14 drivers/media/tuners/tda18271-maps.c 	u8 d;  /*      div */
d                  25 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  32000, .pd = 0x5f, .d = 0xf0 },
d                  26 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  35000, .pd = 0x5e, .d = 0xe0 },
d                  27 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  37000, .pd = 0x5d, .d = 0xd0 },
d                  28 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  41000, .pd = 0x5c, .d = 0xc0 },
d                  29 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  44000, .pd = 0x5b, .d = 0xb0 },
d                  30 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  49000, .pd = 0x5a, .d = 0xa0 },
d                  31 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  54000, .pd = 0x59, .d = 0x90 },
d                  32 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  61000, .pd = 0x58, .d = 0x80 },
d                  33 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  65000, .pd = 0x4f, .d = 0x78 },
d                  34 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  70000, .pd = 0x4e, .d = 0x70 },
d                  35 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  75000, .pd = 0x4d, .d = 0x68 },
d                  36 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  82000, .pd = 0x4c, .d = 0x60 },
d                  37 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  89000, .pd = 0x4b, .d = 0x58 },
d                  38 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  98000, .pd = 0x4a, .d = 0x50 },
d                  39 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 109000, .pd = 0x49, .d = 0x48 },
d                  40 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 123000, .pd = 0x48, .d = 0x40 },
d                  41 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 131000, .pd = 0x3f, .d = 0x3c },
d                  42 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 141000, .pd = 0x3e, .d = 0x38 },
d                  43 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 151000, .pd = 0x3d, .d = 0x34 },
d                  44 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 164000, .pd = 0x3c, .d = 0x30 },
d                  45 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 179000, .pd = 0x3b, .d = 0x2c },
d                  46 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 197000, .pd = 0x3a, .d = 0x28 },
d                  47 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 219000, .pd = 0x39, .d = 0x24 },
d                  48 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 246000, .pd = 0x38, .d = 0x20 },
d                  49 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 263000, .pd = 0x2f, .d = 0x1e },
d                  50 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 282000, .pd = 0x2e, .d = 0x1c },
d                  51 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 303000, .pd = 0x2d, .d = 0x1a },
d                  52 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 329000, .pd = 0x2c, .d = 0x18 },
d                  53 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 359000, .pd = 0x2b, .d = 0x16 },
d                  54 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 395000, .pd = 0x2a, .d = 0x14 },
d                  55 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 438000, .pd = 0x29, .d = 0x12 },
d                  56 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 493000, .pd = 0x28, .d = 0x10 },
d                  57 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 526000, .pd = 0x1f, .d = 0x0f },
d                  58 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 564000, .pd = 0x1e, .d = 0x0e },
d                  59 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 607000, .pd = 0x1d, .d = 0x0d },
d                  60 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 658000, .pd = 0x1c, .d = 0x0c },
d                  61 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 718000, .pd = 0x1b, .d = 0x0b },
d                  62 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 790000, .pd = 0x1a, .d = 0x0a },
d                  63 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 877000, .pd = 0x19, .d = 0x09 },
d                  64 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 987000, .pd = 0x18, .d = 0x08 },
d                  65 drivers/media/tuners/tda18271-maps.c 	{ .lomax =      0, .pd = 0x00, .d = 0x00 }, /* end */
d                  69 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  33125, .pd = 0x57, .d = 0xf0 },
d                  70 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  35500, .pd = 0x56, .d = 0xe0 },
d                  71 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  38188, .pd = 0x55, .d = 0xd0 },
d                  72 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  41375, .pd = 0x54, .d = 0xc0 },
d                  73 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  45125, .pd = 0x53, .d = 0xb0 },
d                  74 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  49688, .pd = 0x52, .d = 0xa0 },
d                  75 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  55188, .pd = 0x51, .d = 0x90 },
d                  76 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  62125, .pd = 0x50, .d = 0x80 },
d                  77 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  66250, .pd = 0x47, .d = 0x78 },
d                  78 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  71000, .pd = 0x46, .d = 0x70 },
d                  79 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  76375, .pd = 0x45, .d = 0x68 },
d                  80 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  82750, .pd = 0x44, .d = 0x60 },
d                  81 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  90250, .pd = 0x43, .d = 0x58 },
d                  82 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  99375, .pd = 0x42, .d = 0x50 },
d                  83 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 110375, .pd = 0x41, .d = 0x48 },
d                  84 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 124250, .pd = 0x40, .d = 0x40 },
d                  85 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 132500, .pd = 0x37, .d = 0x3c },
d                  86 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 142000, .pd = 0x36, .d = 0x38 },
d                  87 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 152750, .pd = 0x35, .d = 0x34 },
d                  88 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 165500, .pd = 0x34, .d = 0x30 },
d                  89 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 180500, .pd = 0x33, .d = 0x2c },
d                  90 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 198750, .pd = 0x32, .d = 0x28 },
d                  91 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 220750, .pd = 0x31, .d = 0x24 },
d                  92 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 248500, .pd = 0x30, .d = 0x20 },
d                  93 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 265000, .pd = 0x27, .d = 0x1e },
d                  94 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 284000, .pd = 0x26, .d = 0x1c },
d                  95 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 305500, .pd = 0x25, .d = 0x1a },
d                  96 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 331000, .pd = 0x24, .d = 0x18 },
d                  97 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 361000, .pd = 0x23, .d = 0x16 },
d                  98 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 397500, .pd = 0x22, .d = 0x14 },
d                  99 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 441500, .pd = 0x21, .d = 0x12 },
d                 100 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 497000, .pd = 0x20, .d = 0x10 },
d                 101 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 530000, .pd = 0x17, .d = 0x0f },
d                 102 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 568000, .pd = 0x16, .d = 0x0e },
d                 103 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 611000, .pd = 0x15, .d = 0x0d },
d                 104 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 662000, .pd = 0x14, .d = 0x0c },
d                 105 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 722000, .pd = 0x13, .d = 0x0b },
d                 106 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 795000, .pd = 0x12, .d = 0x0a },
d                 107 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 883000, .pd = 0x11, .d = 0x09 },
d                 108 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 994000, .pd = 0x10, .d = 0x08 },
d                 109 drivers/media/tuners/tda18271-maps.c 	{ .lomax =      0, .pd = 0x00, .d = 0x00 }, /* end */
d                 113 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   33000, .pd = 0xdd, .d = 0xd0 },
d                 114 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   36000, .pd = 0xdc, .d = 0xc0 },
d                 115 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   40000, .pd = 0xdb, .d = 0xb0 },
d                 116 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   44000, .pd = 0xda, .d = 0xa0 },
d                 117 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   49000, .pd = 0xd9, .d = 0x90 },
d                 118 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   55000, .pd = 0xd8, .d = 0x80 },
d                 119 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   63000, .pd = 0xd3, .d = 0x70 },
d                 120 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   67000, .pd = 0xcd, .d = 0x68 },
d                 121 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   73000, .pd = 0xcc, .d = 0x60 },
d                 122 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   80000, .pd = 0xcb, .d = 0x58 },
d                 123 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   88000, .pd = 0xca, .d = 0x50 },
d                 124 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   98000, .pd = 0xc9, .d = 0x48 },
d                 125 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  110000, .pd = 0xc8, .d = 0x40 },
d                 126 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  126000, .pd = 0xc3, .d = 0x38 },
d                 127 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  135000, .pd = 0xbd, .d = 0x34 },
d                 128 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  147000, .pd = 0xbc, .d = 0x30 },
d                 129 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  160000, .pd = 0xbb, .d = 0x2c },
d                 130 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  176000, .pd = 0xba, .d = 0x28 },
d                 131 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  196000, .pd = 0xb9, .d = 0x24 },
d                 132 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  220000, .pd = 0xb8, .d = 0x20 },
d                 133 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  252000, .pd = 0xb3, .d = 0x1c },
d                 134 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  271000, .pd = 0xad, .d = 0x1a },
d                 135 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  294000, .pd = 0xac, .d = 0x18 },
d                 136 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  321000, .pd = 0xab, .d = 0x16 },
d                 137 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  353000, .pd = 0xaa, .d = 0x14 },
d                 138 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  392000, .pd = 0xa9, .d = 0x12 },
d                 139 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  441000, .pd = 0xa8, .d = 0x10 },
d                 140 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  505000, .pd = 0xa3, .d = 0x0e },
d                 141 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  543000, .pd = 0x9d, .d = 0x0d },
d                 142 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  589000, .pd = 0x9c, .d = 0x0c },
d                 143 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  642000, .pd = 0x9b, .d = 0x0b },
d                 144 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  707000, .pd = 0x9a, .d = 0x0a },
d                 145 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  785000, .pd = 0x99, .d = 0x09 },
d                 146 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  883000, .pd = 0x98, .d = 0x08 },
d                 147 drivers/media/tuners/tda18271-maps.c 	{ .lomax = 1010000, .pd = 0x93, .d = 0x07 },
d                 148 drivers/media/tuners/tda18271-maps.c 	{ .lomax =       0, .pd = 0x00, .d = 0x00 }, /* end */
d                 152 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   33813, .pd = 0xdd, .d = 0xd0 },
d                 153 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   36625, .pd = 0xdc, .d = 0xc0 },
d                 154 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   39938, .pd = 0xdb, .d = 0xb0 },
d                 155 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   43938, .pd = 0xda, .d = 0xa0 },
d                 156 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   48813, .pd = 0xd9, .d = 0x90 },
d                 157 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   54938, .pd = 0xd8, .d = 0x80 },
d                 158 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   62813, .pd = 0xd3, .d = 0x70 },
d                 159 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   67625, .pd = 0xcd, .d = 0x68 },
d                 160 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   73250, .pd = 0xcc, .d = 0x60 },
d                 161 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   79875, .pd = 0xcb, .d = 0x58 },
d                 162 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   87875, .pd = 0xca, .d = 0x50 },
d                 163 drivers/media/tuners/tda18271-maps.c 	{ .lomax =   97625, .pd = 0xc9, .d = 0x48 },
d                 164 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  109875, .pd = 0xc8, .d = 0x40 },
d                 165 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  125625, .pd = 0xc3, .d = 0x38 },
d                 166 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  135250, .pd = 0xbd, .d = 0x34 },
d                 167 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  146500, .pd = 0xbc, .d = 0x30 },
d                 168 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  159750, .pd = 0xbb, .d = 0x2c },
d                 169 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  175750, .pd = 0xba, .d = 0x28 },
d                 170 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  195250, .pd = 0xb9, .d = 0x24 },
d                 171 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  219750, .pd = 0xb8, .d = 0x20 },
d                 172 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  251250, .pd = 0xb3, .d = 0x1c },
d                 173 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  270500, .pd = 0xad, .d = 0x1a },
d                 174 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  293000, .pd = 0xac, .d = 0x18 },
d                 175 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  319500, .pd = 0xab, .d = 0x16 },
d                 176 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  351500, .pd = 0xaa, .d = 0x14 },
d                 177 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  390500, .pd = 0xa9, .d = 0x12 },
d                 178 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  439500, .pd = 0xa8, .d = 0x10 },
d                 179 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  502500, .pd = 0xa3, .d = 0x0e },
d                 180 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  541000, .pd = 0x9d, .d = 0x0d },
d                 181 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  586000, .pd = 0x9c, .d = 0x0c },
d                 182 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  639000, .pd = 0x9b, .d = 0x0b },
d                 183 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  703000, .pd = 0x9a, .d = 0x0a },
d                 184 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  781000, .pd = 0x99, .d = 0x09 },
d                 185 drivers/media/tuners/tda18271-maps.c 	{ .lomax =  879000, .pd = 0x98, .d = 0x08 },
d                 186 drivers/media/tuners/tda18271-maps.c 	{ .lomax =       0, .pd = 0x00, .d = 0x00 }, /* end */
d                 895 drivers/media/tuners/tda18271-maps.c 	u8 d;
d                 901 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x00, .r0 = 60, .r1 =  92 },
d                 902 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x01, .r0 = 62, .r1 =  94 },
d                 903 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x02, .r0 = 66, .r1 =  98 },
d                 904 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x03, .r0 = 64, .r1 =  96 },
d                 905 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x04, .r0 = 74, .r1 = 106 },
d                 906 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x05, .r0 = 72, .r1 = 104 },
d                 907 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x06, .r0 = 68, .r1 = 100 },
d                 908 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x07, .r0 = 70, .r1 = 102 },
d                 909 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x08, .r0 = 90, .r1 = 122 },
d                 910 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x09, .r0 = 88, .r1 = 120 },
d                 911 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0a, .r0 = 84, .r1 = 116 },
d                 912 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0b, .r0 = 86, .r1 = 118 },
d                 913 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0c, .r0 = 76, .r1 = 108 },
d                 914 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0d, .r0 = 78, .r1 = 110 },
d                 915 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0e, .r0 = 82, .r1 = 114 },
d                 916 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x0f, .r0 = 80, .r1 = 112 },
d                 917 drivers/media/tuners/tda18271-maps.c 	{ .d = 0x00, .r0 =  0, .r1 =   0 }, /* end */
d                 926 drivers/media/tuners/tda18271-maps.c 	while (tda18271_thermometer[i].d < (regs[R_TM] & 0x0f)) {
d                 927 drivers/media/tuners/tda18271-maps.c 		if (tda18271_thermometer[i + 1].d == 0)
d                1094 drivers/media/tuners/tda18271-maps.c 	*div      = map[i].d;
d                 745 drivers/media/usb/au0828/au0828-video.c static int au0828_analog_stream_enable(struct au0828_dev *d)
d                 752 drivers/media/usb/au0828/au0828-video.c 	if (test_bit(DEV_DISCONNECTED, &d->dev_state))
d                 755 drivers/media/usb/au0828/au0828-video.c 	iface = usb_ifnum_to_if(d->usbdev, 0);
d                 759 drivers/media/usb/au0828/au0828-video.c 		ret = usb_set_interface(d->usbdev, 0, 5);
d                 766 drivers/media/usb/au0828/au0828-video.c 	h = d->height / 2 + 2;
d                 767 drivers/media/usb/au0828/au0828-video.c 	w = d->width * 2;
d                 769 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, AU0828_SENSORCTRL_VBI_103, 0x00);
d                 770 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x106, 0x00);
d                 772 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x110, 0x00);
d                 773 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x111, 0x00);
d                 774 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x114, w & 0xff);
d                 775 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x115, w >> 8);
d                 777 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x112, 0x00);
d                 778 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x113, 0x00);
d                 779 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x116, h & 0xff);
d                 780 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, 0x117, h >> 8);
d                 781 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, AU0828_SENSORCTRL_100, 0xb3);
d                 786 drivers/media/usb/au0828/au0828-video.c static int au0828_analog_stream_disable(struct au0828_dev *d)
d                 789 drivers/media/usb/au0828/au0828-video.c 	au0828_writereg(d, AU0828_SENSORCTRL_100, 0x0);
d                1693 drivers/media/usb/cx231xx/cx231xx-cards.c 	struct device *d = &interface->dev;
d                1717 drivers/media/usb/cx231xx/cx231xx-cards.c 			dev_err(d,
d                1737 drivers/media/usb/cx231xx/cx231xx-cards.c 	dev->dev = d;
d                1778 drivers/media/usb/cx231xx/cx231xx-cards.c 	dev_info(d,
d                1795 drivers/media/usb/cx231xx/cx231xx-cards.c 		dev_err(d, "Not found matching IAD interface\n");
d                1800 drivers/media/usb/cx231xx/cx231xx-cards.c 	dev_dbg(d, "registering interface %d\n", ifnum);
d                1808 drivers/media/usb/cx231xx/cx231xx-cards.c 		dev_err(d, "cx231xx_media_device_init failed\n");
d                1818 drivers/media/usb/cx231xx/cx231xx-cards.c 		dev_err(d, "v4l2_device_register failed\n");
d                1835 drivers/media/usb/cx231xx/cx231xx-cards.c 			dev_err(d, "TS1 PCB interface #%d doesn't exist\n",
d                1852 drivers/media/usb/cx231xx/cx231xx-cards.c 		dev_info(d,
d                1876 drivers/media/usb/cx231xx/cx231xx-cards.c 			dev_dbg(d, "Alternate setting %i, max size= %i\n",
d                  17 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_ctrl_msg(struct dvb_usb_device *d, struct req_t *req)
d                  21 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                  22 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                  26 drivers/media/usb/dvb-usb-v2/af9015.c 	mutex_lock(&d->usb_mutex);
d                  91 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = dvb_usbv2_generic_rw_locked(d, state->buf, wlen,
d                 107 drivers/media/usb/dvb-usb-v2/af9015.c 	mutex_unlock(&d->usb_mutex);
d                 112 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_write_reg_i2c(struct dvb_usb_device *d, u8 addr, u16 reg,
d                 115 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 122 drivers/media/usb/dvb-usb-v2/af9015.c 	return af9015_ctrl_msg(d, &req);
d                 125 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_read_reg_i2c(struct dvb_usb_device *d, u8 addr, u16 reg,
d                 128 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 135 drivers/media/usb/dvb-usb-v2/af9015.c 	return af9015_ctrl_msg(d, &req);
d                 141 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 142 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 143 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 207 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 225 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 243 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 267 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_identify_state(struct dvb_usb_device *d, const char **name)
d                 269 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 274 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                 288 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_download_firmware(struct dvb_usb_device *d,
d                 291 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 292 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 311 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 321 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                 338 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_eeprom_hash(struct dvb_usb_device *d)
d                 340 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 341 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 350 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 371 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_read_config(struct dvb_usb_device *d)
d                 373 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 374 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 385 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 392 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_eeprom_hash(d);
d                 401 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                 413 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 425 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 447 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 454 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 465 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 470 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 479 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_ctrl_msg(d, &req);
d                 523 drivers/media/usb/dvb-usb-v2/af9015.c 	if (le16_to_cpu(d->udev->descriptor.idVendor) == USB_VID_AVERMEDIA &&
d                 524 drivers/media/usb/dvb-usb-v2/af9015.c 	    ((le16_to_cpu(d->udev->descriptor.idProduct) == USB_PID_AVERMEDIA_A850) ||
d                 525 drivers/media/usb/dvb-usb-v2/af9015.c 	    (le16_to_cpu(d->udev->descriptor.idProduct) == USB_PID_AVERMEDIA_A850T))) {
d                 540 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 541 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 545 drivers/media/usb/dvb-usb-v2/af9015.c 	if (d->udev->speed == USB_SPEED_FULL)
d                 553 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 554 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 555 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 567 drivers/media/usb/dvb-usb-v2/af9015.c 		utmp1 = (d->udev->speed == USB_SPEED_FULL ? 5 : 87) * 188 / 4;
d                 568 drivers/media/usb/dvb-usb-v2/af9015.c 		utmp2 = (d->udev->speed == USB_SPEED_FULL ? 64 : 512) / 4;
d                 658 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_get_adapter_count(struct dvb_usb_device *d)
d                 660 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 762 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_copy_firmware(struct dvb_usb_device *d)
d                 764 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                 765 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 779 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_read_reg_i2c(d, state->af9013_i2c_addr[1], 0x98be, &val);
d                 794 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                 806 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_write_reg_i2c(d, state->af9013_i2c_addr[1], 0xe205, 0x01);
d                 816 drivers/media/usb/dvb-usb-v2/af9015.c 		ret = af9015_read_reg_i2c(d, state->af9013_i2c_addr[1],
d                 846 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 847 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 884 drivers/media/usb/dvb-usb-v2/af9015.c 	client = dvb_module_probe("af9013", NULL, &d->i2c_adap,
d                 921 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 922 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                 999 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1000 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1001 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1101 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_init(struct dvb_usb_device *d)
d                1103 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1104 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1152 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_rc_query(struct dvb_usb_device *d)
d                1154 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1155 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1174 drivers/media/usb/dvb-usb-v2/af9015.c 		rc_repeat(d->rc_dev);
d                1213 drivers/media/usb/dvb-usb-v2/af9015.c 		rc_keydown(d->rc_dev, proto, state->rc_keycode, 0);
d                1238 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_get_rc_config(struct dvb_usb_device *d, struct dvb_usb_rc *rc)
d                1240 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1241 drivers/media/usb/dvb-usb-v2/af9015.c 	u16 vid = le16_to_cpu(d->udev->descriptor.idVendor);
d                1267 drivers/media/usb/dvb-usb-v2/af9015.c 		usb_string(d->udev, d->udev->descriptor.iManufacturer,
d                1296 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = context;
d                1297 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1304 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                1317 drivers/media/usb/dvb-usb-v2/af9015.c 	struct dvb_usb_device *d = context;
d                1318 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1325 drivers/media/usb/dvb-usb-v2/af9015.c 	ret = af9015_ctrl_msg(d, &req);
d                1335 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_probe(struct dvb_usb_device *d)
d                1337 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1338 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                1382 drivers/media/usb/dvb-usb-v2/af9015.c 	state->regmap = regmap_init(&intf->dev, &regmap_bus, d, &regmap_config);
d                1394 drivers/media/usb/dvb-usb-v2/af9015.c static void af9015_disconnect(struct dvb_usb_device *d)
d                1396 drivers/media/usb/dvb-usb-v2/af9015.c 	struct af9015_state *state = d_to_priv(d);
d                1397 drivers/media/usb/dvb-usb-v2/af9015.c 	struct usb_interface *intf = d->intf;
d                  32 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_ctrl_msg(struct dvb_usb_device *d, struct usb_req *req)
d                  38 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                  39 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                  43 drivers/media/usb/dvb-usb-v2/af9035.c 	mutex_lock(&d->usb_mutex);
d                  72 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = dvb_usbv2_generic_rw_locked(d,
d                 109 drivers/media/usb/dvb-usb-v2/af9035.c 	mutex_unlock(&d->usb_mutex);
d                 114 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_wr_regs(struct dvb_usb_device *d, u32 reg, u8 *val, int len)
d                 116 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 134 drivers/media/usb/dvb-usb-v2/af9035.c 	return af9035_ctrl_msg(d, &req);
d                 138 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_rd_regs(struct dvb_usb_device *d, u32 reg, u8 *val, int len)
d                 144 drivers/media/usb/dvb-usb-v2/af9035.c 	return af9035_ctrl_msg(d, &req);
d                 148 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_wr_reg(struct dvb_usb_device *d, u32 reg, u8 val)
d                 150 drivers/media/usb/dvb-usb-v2/af9035.c 	return af9035_wr_regs(d, reg, &val, 1);
d                 154 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_rd_reg(struct dvb_usb_device *d, u32 reg, u8 *val)
d                 156 drivers/media/usb/dvb-usb-v2/af9035.c 	return af9035_rd_regs(d, reg, val, 1);
d                 160 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_wr_reg_mask(struct dvb_usb_device *d, u32 reg, u8 val,
d                 168 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_rd_regs(d, reg, &tmp, 1);
d                 177 drivers/media/usb/dvb-usb-v2/af9035.c 	return af9035_wr_regs(d, reg, &val, 1);
d                 180 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_add_i2c_dev(struct dvb_usb_device *d, const char *type,
d                 184 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 185 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 231 drivers/media/usb/dvb-usb-v2/af9035.c static void af9035_del_i2c_dev(struct dvb_usb_device *d)
d                 234 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 235 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 269 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 270 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 273 drivers/media/usb/dvb-usb-v2/af9035.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 331 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_rd_regs(d, reg, &msg[1].buf[0],
d                 375 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_ctrl_msg(d, &req);
d                 390 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = (msg[0].len >= 3) ? af9035_wr_regs(d, reg,
d                 418 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_ctrl_msg(d, &req);
d                 448 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_ctrl_msg(d, &req);
d                 460 drivers/media/usb/dvb-usb-v2/af9035.c 	mutex_unlock(&d->i2c_mutex);
d                 478 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_identify_state(struct dvb_usb_device *d, const char **name)
d                 480 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 481 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 490 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_rd_regs(d, 0x1222, rbuf, 3);
d                 497 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_rd_reg(d, 0x384f, &state->prechip_version);
d                 514 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_rd_reg(d, utmp, &tmp);
d                 536 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_rd_regs(d, eeprom_addr + i, &state->eeprom[i], 32);
d                 571 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_ctrl_msg(d, &req);
d                 589 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_download_firmware_old(struct dvb_usb_device *d,
d                 592 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 634 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_ctrl_msg(d, &req);
d                 646 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_ctrl_msg(d, &req_fw_dl);
d                 653 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_ctrl_msg(d, &req);
d                 674 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_download_firmware_new(struct dvb_usb_device *d,
d                 677 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 703 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_ctrl_msg(d, &req_fw_dl);
d                 719 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_download_firmware(struct dvb_usb_device *d,
d                 722 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 723 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 741 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8b0, 0x01, 0x01);
d                 745 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8b1, 0x01, 0x01);
d                 749 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8af, 0x00, 0x01);
d                 755 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8af, 0x01, 0x01);
d                 768 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x004bfb, tmp);
d                 772 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00417f, tmp);
d                 777 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg_mask(d, 0x00d81a, 0x01, 0x01);
d                 784 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_download_firmware_old(d, fw);
d                 786 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_download_firmware_new(d, fw);
d                 792 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_ctrl_msg(d, &req);
d                 798 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_ctrl_msg(d, &req_fw_ver);
d                 819 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_read_config(struct dvb_usb_device *d)
d                 821 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                 822 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                 861 drivers/media/usb/dvb-usb-v2/af9035.c 		if ((le16_to_cpu(d->udev->descriptor.idVendor) == USB_VID_AVERMEDIA) &&
d                 862 drivers/media/usb/dvb-usb-v2/af9035.c 		    (le16_to_cpu(d->udev->descriptor.idProduct) == USB_PID_AVERMEDIA_TD310)) {
d                 970 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_rd_reg(d, 0x00d800, &tmp);
d                 986 drivers/media/usb/dvb-usb-v2/af9035.c 		le16_to_cpu(d->udev->descriptor.idVendor) == USB_VID_AVERMEDIA)
d                 988 drivers/media/usb/dvb-usb-v2/af9035.c 		switch (le16_to_cpu(d->udev->descriptor.idProduct)) {
d                1005 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_tua9001_tuner_callback(struct dvb_usb_device *d,
d                1008 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1027 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8e7, val, 0x01);
d                1037 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8eb, val, 0x01);
d                1052 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_fc0011_tuner_callback(struct dvb_usb_device *d,
d                1055 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1061 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8eb, 1, 1);
d                1065 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8ec, 1, 1);
d                1069 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8ed, 1, 1);
d                1074 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8d0, 1, 1);
d                1078 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8d1, 1, 1);
d                1085 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg(d, 0xd8e9, 1);
d                1089 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg(d, 0xd8e8, 1);
d                1093 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg(d, 0xd8e7, 1);
d                1099 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg(d, 0xd8e7, 0);
d                1118 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_tuner_callback(struct dvb_usb_device *d, int cmd, int arg)
d                1120 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                1124 drivers/media/usb/dvb-usb-v2/af9035.c 		return af9035_fc0011_tuner_callback(d, cmd, arg);
d                1126 drivers/media/usb/dvb-usb-v2/af9035.c 		return af9035_tua9001_tuner_callback(d, cmd, arg);
d                1138 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                1139 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1146 drivers/media/usb/dvb-usb-v2/af9035.c 		return af9035_tuner_callback(d, cmd, arg);
d                1154 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_get_adapter_count(struct dvb_usb_device *d)
d                1156 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                1164 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1165 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1178 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_add_i2c_dev(d, "af9033", state->af9033_i2c_addr[adap->id],
d                1179 drivers/media/usb/dvb-usb-v2/af9035.c 			&state->af9033_config[adap->id], &d->i2c_adap);
d                1203 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1204 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1212 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg(d, 0x00f6a7, 0x07);
d                1217 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg(d, 0x00f103, 0x07);
d                1222 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8d4, 0x01, 0x01);
d                1226 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8d5, 0x01, 0x01);
d                1230 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8d3, 0x01, 0x01);
d                1235 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8b8, 0x01, 0x01);
d                1239 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8b9, 0x01, 0x01);
d                1243 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8b7, 0x00, 0x01);
d                1249 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_wr_reg_mask(d, 0xd8b7, 0x01, 0x01);
d                1260 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_add_i2c_dev(d, "si2168",
d                1262 drivers/media/usb/dvb-usb-v2/af9035.c 				 &si2168_config, &d->i2c_adap);
d                1283 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1284 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1290 drivers/media/usb/dvb-usb-v2/af9035.c 			af9035_del_i2c_dev(d);
d                1293 drivers/media/usb/dvb-usb-v2/af9035.c 			af9035_del_i2c_dev(d);
d                1343 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1344 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1369 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8ec, 0x01, 0x01);
d                1373 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8ed, 0x01, 0x01);
d                1377 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8e8, 0x01, 0x01);
d                1381 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0x00d8e9, 0x01, 0x01);
d                1386 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_add_i2c_dev(d, "tua9001", 0x60, &tua9001_pdata,
d                1387 drivers/media/usb/dvb-usb-v2/af9035.c 					 &d->i2c_adap);
d                1396 drivers/media/usb/dvb-usb-v2/af9035.c 				&d->i2c_adap, &af9035_fc0011_config);
d                1400 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8e0, 1);
d                1404 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8e1, 1);
d                1408 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8df, 0);
d                1414 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8df, 1);
d                1420 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8c0, 1);
d                1424 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8c1, 1);
d                1428 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8bf, 0);
d                1432 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8b4, 1);
d                1436 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8b5, 1);
d                1440 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg(d, 0x00d8b3, 1);
d                1450 drivers/media/usb/dvb-usb-v2/af9035.c 		fe = dvb_attach(mxl5007t_attach, adap->fe[0], &d->i2c_adap,
d                1456 drivers/media/usb/dvb-usb-v2/af9035.c 				&d->i2c_adap, &af9035_tda18218_config);
d                1464 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8eb, 0x01, 0x01);
d                1468 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8ec, 0x01, 0x01);
d                1472 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, 0xd8ed, 0x01, 0x01);
d                1478 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_add_i2c_dev(d, "fc2580", 0x56, &fc2580_pdata,
d                1479 drivers/media/usb/dvb-usb-v2/af9035.c 					 &d->i2c_adap);
d                1495 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg_mask(d, 0xd8eb, 0x01, 0x01);
d                1499 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg_mask(d, 0xd8ec, 0x01, 0x01);
d                1503 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = af9035_wr_reg_mask(d, 0xd8ed, 0x01, 0x01);
d                1516 drivers/media/usb/dvb-usb-v2/af9035.c 			ret = i2c_transfer(&d->i2c_adap, msg, 1);
d                1523 drivers/media/usb/dvb-usb-v2/af9035.c 		fe = dvb_attach(fc0012_attach, adap->fe[0], &d->i2c_adap,
d                1566 drivers/media/usb/dvb-usb-v2/af9035.c 		pdev = platform_device_register_data(&d->intf->dev, name,
d                1604 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1605 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1614 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_add_i2c_dev(d, "si2157",
d                1632 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1633 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1639 drivers/media/usb/dvb-usb-v2/af9035.c 			af9035_del_i2c_dev(d);
d                1642 drivers/media/usb/dvb-usb-v2/af9035.c 			af9035_del_i2c_dev(d);
d                1652 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1653 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1662 drivers/media/usb/dvb-usb-v2/af9035.c 				af9035_del_i2c_dev(d);
d                1665 drivers/media/usb/dvb-usb-v2/af9035.c 				af9035_del_i2c_dev(d);
d                1689 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_init(struct dvb_usb_device *d)
d                1691 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                1692 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1694 drivers/media/usb/dvb-usb-v2/af9035.c 	u16 frame_size = (d->udev->speed == USB_SPEED_FULL ? 5 : 87) * 188 / 4;
d                1695 drivers/media/usb/dvb-usb-v2/af9035.c 	u8 packet_size = (d->udev->speed == USB_SPEED_FULL ? 64 : 512) / 4;
d                1718 drivers/media/usb/dvb-usb-v2/af9035.c 		d->udev->speed, frame_size, packet_size);
d                1722 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, tab[i].reg, tab[i].val,
d                1736 drivers/media/usb/dvb-usb-v2/af9035.c static int it930x_init(struct dvb_usb_device *d)
d                1738 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                1739 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1741 drivers/media/usb/dvb-usb-v2/af9035.c 	u16 frame_size = (d->udev->speed == USB_SPEED_FULL ? 5 : 816) * 188 / 4;
d                1742 drivers/media/usb/dvb-usb-v2/af9035.c 	u8 packet_size = (d->udev->speed == USB_SPEED_FULL ? 64 : 512) / 4;
d                1800 drivers/media/usb/dvb-usb-v2/af9035.c 		d->udev->speed, frame_size, packet_size);
d                1804 drivers/media/usb/dvb-usb-v2/af9035.c 		ret = af9035_wr_reg_mask(d, tab[i].reg,
d                1820 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_rc_query(struct dvb_usb_device *d)
d                1822 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1829 drivers/media/usb/dvb-usb-v2/af9035.c 	ret = af9035_ctrl_msg(d, &req);
d                1854 drivers/media/usb/dvb-usb-v2/af9035.c 	rc_keydown(d->rc_dev, proto, key, 0);
d                1864 drivers/media/usb/dvb-usb-v2/af9035.c static int af9035_get_rc_config(struct dvb_usb_device *d, struct dvb_usb_rc *rc)
d                1866 drivers/media/usb/dvb-usb-v2/af9035.c 	struct state *state = d_to_priv(d);
d                1867 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1902 drivers/media/usb/dvb-usb-v2/af9035.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                1903 drivers/media/usb/dvb-usb-v2/af9035.c 	struct usb_interface *intf = d->intf;
d                1907 drivers/media/usb/dvb-usb-v2/af9035.c 	if (d->udev->speed == USB_SPEED_FULL)
d                  36 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_ctrl_msg(struct dvb_usb_device *d,
d                  39 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                  42 drivers/media/usb/dvb-usb-v2/anysee.c 	mutex_lock(&d->usb_mutex);
d                  47 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: >>> %*ph\n", __func__, slen, state->buf);
d                  51 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = dvb_usbv2_generic_rw_locked(d, state->buf, sizeof(state->buf),
d                  70 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = usb_bulk_msg(d->udev, usb_rcvbulkpipe(d->udev,
d                  71 drivers/media/usb/dvb-usb-v2/anysee.c 				d->props->generic_bulk_ctrl_endpoint),
d                  74 drivers/media/usb/dvb-usb-v2/anysee.c 			dev_dbg(&d->udev->dev,
d                  78 drivers/media/usb/dvb-usb-v2/anysee.c 			dev_dbg(&d->udev->dev, "%s: <<< %*ph\n", __func__,
d                  82 drivers/media/usb/dvb-usb-v2/anysee.c 				dev_dbg(&d->udev->dev,
d                  90 drivers/media/usb/dvb-usb-v2/anysee.c 		dev_err(&d->udev->dev, "%s: recv bulk message failed=%d\n",
d                 100 drivers/media/usb/dvb-usb-v2/anysee.c 	mutex_unlock(&d->usb_mutex);
d                 104 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_read_reg(struct dvb_usb_device *d, u16 reg, u8 *val)
d                 108 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), val, 1);
d                 109 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: reg=%04x val=%02x\n", __func__, reg, *val);
d                 113 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_write_reg(struct dvb_usb_device *d, u16 reg, u8 val)
d                 116 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: reg=%04x val=%02x\n", __func__, reg, val);
d                 117 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_ctrl_msg(d, buf, sizeof(buf), NULL, 0);
d                 121 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_wr_reg_mask(struct dvb_usb_device *d, u16 reg, u8 val,
d                 129 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_read_reg(d, reg, &tmp);
d                 138 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_write_reg(d, reg, val);
d                 142 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_rd_reg_mask(struct dvb_usb_device *d, u16 reg, u8 *val,
d                 148 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_read_reg(d, reg, &tmp);
d                 164 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_get_hw_info(struct dvb_usb_device *d, u8 *id)
d                 167 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_ctrl_msg(d, buf, sizeof(buf), id, 3);
d                 177 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_led_ctrl(struct dvb_usb_device *d, u8 mode, u8 interval)
d                 180 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: state=%d interval=%d\n", __func__,
d                 182 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_ctrl_msg(d, buf, sizeof(buf), NULL, 0);
d                 185 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_ir_ctrl(struct dvb_usb_device *d, u8 onoff)
d                 188 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: onoff=%d\n", __func__, onoff);
d                 189 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_ctrl_msg(d, buf, sizeof(buf), NULL, 0);
d                 196 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 200 drivers/media/usb/dvb-usb-v2/anysee.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 215 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_ctrl_msg(d, buf, 6, msg[i+1].buf,
d                 228 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_ctrl_msg(d, buf, 4 + msg[i].len, NULL, 0);
d                 237 drivers/media/usb/dvb-usb-v2/anysee.c 	mutex_unlock(&d->i2c_mutex);
d                 489 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_read_config(struct dvb_usb_device *d)
d                 491 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                 499 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_get_hw_info(d, hw_info);
d                 503 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_get_hw_info(d, hw_info);
d                 510 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_info(&d->udev->dev, "%s: firmware version %d.%d hardware id %d\n",
d                 528 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 530 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: fe=%d onoff=%d\n", __func__, fe->id, onoff);
d                 543 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 0), 0x01);
d                 548 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 5), 0x20);
d                 553 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOE, (1 << 0), 0x01);
d                 558 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 5), 0x20);
d                 563 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 0), 0x01);
d                 568 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOE, (0 << 0), 0x01);
d                 581 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 6), 0x40);
d                 586 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 5), 0x20);
d                 591 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOE, (1 << 0), 0x01);
d                 596 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 5), 0x20);
d                 601 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 6), 0x40);
d                 606 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_wr_reg_mask(d, REG_IOE, (0 << 0), 0x01);
d                 620 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_add_i2c_dev(struct dvb_usb_device *d, const char *type,
d                 624 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                 626 drivers/media/usb/dvb-usb-v2/anysee.c 	struct i2c_adapter *adapter = &d->i2c_adap;
d                 640 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: num=%d\n", __func__, num);
d                 643 drivers/media/usb/dvb-usb-v2/anysee.c 		dev_err(&d->udev->dev, "%s: I2C client out of index\n",
d                 668 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 672 drivers/media/usb/dvb-usb-v2/anysee.c static void anysee_del_i2c_dev(struct dvb_usb_device *d)
d                 675 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                 685 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: num=%d\n", __func__, num);
d                 688 drivers/media/usb/dvb-usb-v2/anysee.c 		dev_err(&d->udev->dev, "%s: I2C client out of index\n",
d                 703 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s: failed\n", __func__);
d                 709 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 732 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap);
d                 738 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap);
d                 745 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 0), 0x01);
d                 750 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOA, (0 << 7), 0x80);
d                 756 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap);
d                 763 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 0), 0x01);
d                 769 drivers/media/usb/dvb-usb-v2/anysee.c 				&anysee_tda10023_config, &d->i2c_adap, 0x48);
d                 776 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 0), 0x01);
d                 782 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap);
d                 790 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOE, (1 << 4), 0x10);
d                 796 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = i2c_transfer(&d->i2c_adap, msg, 2);
d                 798 drivers/media/usb/dvb-usb-v2/anysee.c 			dev_dbg(&d->udev->dev, "%s: TDA18212 found\n",
d                 806 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOE, (0 << 4), 0x10);
d                 811 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 0), 0x01);
d                 816 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 5), 0x20);
d                 825 drivers/media/usb/dvb-usb-v2/anysee.c 					&d->i2c_adap, 0x48);
d                 835 drivers/media/usb/dvb-usb-v2/anysee.c 					&d->i2c_adap, 0x48);
d                 843 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 5), 0x20);
d                 848 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 0), 0x01);
d                 857 drivers/media/usb/dvb-usb-v2/anysee.c 					&d->i2c_adap);
d                 867 drivers/media/usb/dvb-usb-v2/anysee.c 					&d->i2c_adap);
d                 877 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 6), 0x40);
d                 882 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 5), 0x20);
d                 889 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap, 0x48);
d                 900 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 5), 0x20);
d                 905 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 6), 0x40);
d                 912 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap);
d                 927 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOE, (1 << 5), 0x20);
d                 933 drivers/media/usb/dvb-usb-v2/anysee.c 				&anysee_stv0900_config, &d->i2c_adap, 0);
d                 942 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_wr_reg_mask(d, REG_IOE, (1 << 5), 0x20);
d                 948 drivers/media/usb/dvb-usb-v2/anysee.c 				&anysee_cxd2820r_config, &d->i2c_adap, NULL);
d                 958 drivers/media/usb/dvb-usb-v2/anysee.c 		dev_err(&d->udev->dev,
d                 969 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 972 drivers/media/usb/dvb-usb-v2/anysee.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 988 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap, DVB_PLL_THOMSON_DTT7579);
d                 996 drivers/media/usb/dvb-usb-v2/anysee.c 				&d->i2c_adap, DVB_PLL_SAMSUNG_DTOS403IH102A);
d                1003 drivers/media/usb/dvb-usb-v2/anysee.c 		fe = dvb_attach(isl6423_attach, adap->fe[0], &d->i2c_adap,
d                1020 drivers/media/usb/dvb-usb-v2/anysee.c 			ret = anysee_add_i2c_dev(d, "tda18212", 0x60,
d                1038 drivers/media/usb/dvb-usb-v2/anysee.c 					(0xc0 >> 1), &d->i2c_adap,
d                1044 drivers/media/usb/dvb-usb-v2/anysee.c 						(0xc0 >> 1), &d->i2c_adap,
d                1058 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_add_i2c_dev(d, "tda18212", 0x60, &tda18212_config);
d                1079 drivers/media/usb/dvb-usb-v2/anysee.c 				&anysee_stv6110_config, &d->i2c_adap);
d                1084 drivers/media/usb/dvb-usb-v2/anysee.c 					&d->i2c_adap, &anysee_isl6423_config);
d                1096 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_add_i2c_dev(d, "tda18212", 0x60, &tda18212_config);
d                1115 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_rc_query(struct dvb_usb_device *d)
d                1129 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), ircode, sizeof(ircode));
d                1134 drivers/media/usb/dvb-usb-v2/anysee.c 		dev_dbg(&d->udev->dev, "%s: key pressed %02x\n", __func__,
d                1136 drivers/media/usb/dvb-usb-v2/anysee.c 		rc_keydown(d->rc_dev, RC_PROTO_NEC,
d                1143 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_get_rc_config(struct dvb_usb_device *d, struct dvb_usb_rc *rc)
d                1158 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1163 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), &val, 1);
d                1173 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1177 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), NULL, 0);
d                1187 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1192 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), &val, 1);
d                1202 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1206 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ctrl_msg(d, buf, sizeof(buf), NULL, 0);
d                1215 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1217 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1221 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOA, (0 << 7), 0x80);
d                1227 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOA, (1 << 7), 0x80);
d                1236 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1239 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOA, (0 << 7), 0x80);
d                1245 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOA, (1 << 7), 0x80);
d                1254 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1257 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 1), 0x02);
d                1267 drivers/media/usb/dvb-usb-v2/anysee.c 	struct dvb_usb_device *d = ci->data;
d                1268 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1272 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_rd_reg_mask(d, REG_IOC, &tmp, 0x40);
d                1285 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_ci_init(struct dvb_usb_device *d)
d                1287 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1299 drivers/media/usb/dvb-usb-v2/anysee.c 	state->ci.data                = d;
d                1301 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOA, (1 << 7), 0x80);
d                1305 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOD, (0 << 2)|(0 << 1)|(0 << 0), 0x07);
d                1309 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_wr_reg_mask(d, REG_IOD, (1 << 2)|(1 << 1)|(1 << 0), 0x07);
d                1313 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = dvb_ca_en50221_init(&d->adapter[0].dvb_adap, &state->ci, 0, 1);
d                1322 drivers/media/usb/dvb-usb-v2/anysee.c static void anysee_ci_release(struct dvb_usb_device *d)
d                1324 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1333 drivers/media/usb/dvb-usb-v2/anysee.c static int anysee_init(struct dvb_usb_device *d)
d                1335 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1342 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = usb_set_interface(d->udev, 0, 0);
d                1347 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_led_ctrl(d, 0x01, 0x03);
d                1352 drivers/media/usb/dvb-usb-v2/anysee.c 	ret = anysee_ir_ctrl(d, 1);
d                1358 drivers/media/usb/dvb-usb-v2/anysee.c 		ret = anysee_ci_init(d);
d                1366 drivers/media/usb/dvb-usb-v2/anysee.c static void anysee_exit(struct dvb_usb_device *d)
d                1368 drivers/media/usb/dvb-usb-v2/anysee.c 	struct anysee_state *state = d_to_priv(d);
d                1371 drivers/media/usb/dvb-usb-v2/anysee.c 		anysee_del_i2c_dev(d);
d                1373 drivers/media/usb/dvb-usb-v2/anysee.c 	return anysee_ci_release(d);
d                  14 drivers/media/usb/dvb-usb-v2/au6610.c static int au6610_usb_msg(struct dvb_usb_device *d, u8 operation, u8 addr,
d                  38 drivers/media/usb/dvb-usb-v2/au6610.c 		dev_err(&d->udev->dev, "%s: wlen=%d, aborting\n",
d                  44 drivers/media/usb/dvb-usb-v2/au6610.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0), operation,
d                  48 drivers/media/usb/dvb-usb-v2/au6610.c 	dvb_usb_dbg_usb_control_msg(d->udev, operation,
d                  66 drivers/media/usb/dvb-usb-v2/au6610.c static int au6610_i2c_msg(struct dvb_usb_device *d, u8 addr,
d                  78 drivers/media/usb/dvb-usb-v2/au6610.c 	return au6610_usb_msg(d, request, addr, wbuf, wlen, rbuf, rlen);
d                  86 drivers/media/usb/dvb-usb-v2/au6610.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                  92 drivers/media/usb/dvb-usb-v2/au6610.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                  98 drivers/media/usb/dvb-usb-v2/au6610.c 			if (au6610_i2c_msg(d, msg[i].addr, msg[i].buf,
d                 103 drivers/media/usb/dvb-usb-v2/au6610.c 		} else if (au6610_i2c_msg(d, msg[i].addr, msg[i].buf,
d                 108 drivers/media/usb/dvb-usb-v2/au6610.c 	mutex_unlock(&d->i2c_mutex);
d                 151 drivers/media/usb/dvb-usb-v2/au6610.c static int au6610_init(struct dvb_usb_device *d)
d                 155 drivers/media/usb/dvb-usb-v2/au6610.c 	return usb_set_interface(d->udev, 0, 5);
d                 125 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_read(struct dvb_usb_device *d, u8 req, u16 value,
d                 128 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *st = d->priv;
d                 134 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = __az6007_read(d->udev, req, value, index, b, blen);
d                 172 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_write(struct dvb_usb_device *d, u8 req, u16 value,
d                 175 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *st = d->priv;
d                 181 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = __az6007_write(d->udev, req, value, index, b, blen);
d                 190 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 194 drivers/media/usb/dvb-usb-v2/az6007.c 	return az6007_write(d, 0xbc, onoff, 0, NULL, 0);
d                 199 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_rc_query(struct dvb_usb_device *d)
d                 201 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *st = d_to_priv(d);
d                 205 drivers/media/usb/dvb-usb-v2/az6007.c 	az6007_read(d, AZ6007_READ_IR, 0, 0, st->data, 10);
d                 227 drivers/media/usb/dvb-usb-v2/az6007.c 	rc_keydown(d->rc_dev, proto, code, st->data[5]);
d                 232 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_get_rc_config(struct dvb_usb_device *d, struct dvb_usb_rc *rc)
d                 251 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 252 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 275 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_read(d, req, value, index, b, blen);
d                 293 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 294 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 312 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_write(d, req, value1, index, NULL, blen);
d                 324 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 325 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 348 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_read(d, req, value, index, b, blen);
d                 370 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 371 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 388 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_write(d, req, value1, index, NULL, blen);
d                 401 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 419 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_read(d, req, value, index, b, blen);
d                 432 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 433 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 448 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_write(d, req, value, index, NULL, blen);
d                 460 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_write(d, req, value, index, NULL, blen);
d                 488 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 489 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 504 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_write(d, req, value, index, NULL, blen);
d                 517 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 518 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 536 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_read(d, req, value, index, b, blen);
d                 554 drivers/media/usb/dvb-usb-v2/az6007.c static void az6007_ci_uninit(struct dvb_usb_device *d)
d                 560 drivers/media/usb/dvb-usb-v2/az6007.c 	if (NULL == d)
d                 563 drivers/media/usb/dvb-usb-v2/az6007.c 	state = d_to_priv(d);
d                 578 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 594 drivers/media/usb/dvb-usb-v2/az6007.c 	state->ca.data			= d;
d                 613 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 617 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = az6007_read(d, AZ6007_READ_DATA, 6, 0, st->data, 6);
d                 629 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 634 drivers/media/usb/dvb-usb-v2/az6007.c 				 &d->i2c_adap);
d                 650 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 655 drivers/media/usb/dvb-usb-v2/az6007.c 				 &d->i2c_adap);
d                 670 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 679 drivers/media/usb/dvb-usb-v2/az6007.c 			&d->i2c_adap))
d                 688 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 690 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *state = d_to_priv(d);
d                 698 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, AZ6007_POWER, 0, 2, NULL, 0);
d                 702 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, AZ6007_POWER, 1, 4, NULL, 0);
d                 706 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, AZ6007_POWER, 1, 3, NULL, 0);
d                 710 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, AZ6007_POWER, 1, 4, NULL, 0);
d                 715 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, FX2_SCON1, 0, 3, NULL, 0);
d                 719 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, FX2_SCON1, 1, 3, NULL, 0);
d                 723 drivers/media/usb/dvb-usb-v2/az6007.c 		ret = az6007_write(d, AZ6007_POWER, 0, 0, NULL, 0);
d                 735 drivers/media/usb/dvb-usb-v2/az6007.c 	az6007_write(d, AZ6007_POWER, 0, 0, NULL, 0);
d                 736 drivers/media/usb/dvb-usb-v2/az6007.c 	az6007_write(d, AZ6007_TS_THROUGH, 0, 0, NULL, 0);
d                 745 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 746 drivers/media/usb/dvb-usb-v2/az6007.c 	struct az6007_device_state *st = d_to_priv(d);
d                 777 drivers/media/usb/dvb-usb-v2/az6007.c 			ret = __az6007_read(d->udev, req, value, index,
d                 797 drivers/media/usb/dvb-usb-v2/az6007.c 			ret =  __az6007_write(d->udev, req, value, index,
d                 809 drivers/media/usb/dvb-usb-v2/az6007.c 			ret = __az6007_read(d->udev, req, value, index,
d                 837 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_identify_state(struct dvb_usb_device *d, const char **name)
d                 849 drivers/media/usb/dvb-usb-v2/az6007.c 	ret = __az6007_read(d->udev, AZ6007_READ_DATA, 6, 0, mac, 6);
d                 858 drivers/media/usb/dvb-usb-v2/az6007.c 		__az6007_write(d->udev, 0x09, 1, 0, NULL, 0);
d                 859 drivers/media/usb/dvb-usb-v2/az6007.c 		__az6007_write(d->udev, 0x00, 0, 0, NULL, 0);
d                 860 drivers/media/usb/dvb-usb-v2/az6007.c 		__az6007_write(d->udev, 0x00, 0, 0, NULL, 0);
d                 870 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 871 drivers/media/usb/dvb-usb-v2/az6007.c 	az6007_ci_uninit(d);
d                 875 drivers/media/usb/dvb-usb-v2/az6007.c static int az6007_download_firmware(struct dvb_usb_device *d,
d                 880 drivers/media/usb/dvb-usb-v2/az6007.c 	return cypress_load_firmware(d->udev, fw, CYPRESS_FX2);
d                 945 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 947 drivers/media/usb/dvb-usb-v2/az6007.c 	az6007_ci_uninit(d);
d                 953 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 954 drivers/media/usb/dvb-usb-v2/az6007.c 	struct dvb_usb_adapter *adap = &d->adapter[0];
d                  12 drivers/media/usb/dvb-usb-v2/ce6230.c static int ce6230_ctrl_msg(struct dvb_usb_device *d, struct usb_req *req)
d                  38 drivers/media/usb/dvb-usb-v2/ce6230.c 		dev_err(&d->udev->dev, "%s: unknown command=%02x\n",
d                  53 drivers/media/usb/dvb-usb-v2/ce6230.c 		pipe = usb_sndctrlpipe(d->udev, 0);
d                  56 drivers/media/usb/dvb-usb-v2/ce6230.c 		pipe = usb_rcvctrlpipe(d->udev, 0);
d                  61 drivers/media/usb/dvb-usb-v2/ce6230.c 	ret = usb_control_msg(d->udev, pipe, request, requesttype, value, index,
d                  64 drivers/media/usb/dvb-usb-v2/ce6230.c 	dvb_usb_dbg_usb_control_msg(d->udev, request, requesttype, value, index,
d                  68 drivers/media/usb/dvb-usb-v2/ce6230.c 		dev_err(&d->udev->dev, "%s: usb_control_msg() failed=%d\n",
d                  88 drivers/media/usb/dvb-usb-v2/ce6230.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                  97 drivers/media/usb/dvb-usb-v2/ce6230.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 109 drivers/media/usb/dvb-usb-v2/ce6230.c 				ret = ce6230_ctrl_msg(d, &req);
d                 111 drivers/media/usb/dvb-usb-v2/ce6230.c 				dev_err(&d->udev->dev, "%s: I2C read not " \
d                 125 drivers/media/usb/dvb-usb-v2/ce6230.c 				ret = ce6230_ctrl_msg(d, &req);
d                 132 drivers/media/usb/dvb-usb-v2/ce6230.c 				ret = ce6230_ctrl_msg(d, &req);
d                 140 drivers/media/usb/dvb-usb-v2/ce6230.c 	mutex_unlock(&d->i2c_mutex);
d                 167 drivers/media/usb/dvb-usb-v2/ce6230.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 169 drivers/media/usb/dvb-usb-v2/ce6230.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 172 drivers/media/usb/dvb-usb-v2/ce6230.c 			&d->i2c_adap);
d                 198 drivers/media/usb/dvb-usb-v2/ce6230.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 201 drivers/media/usb/dvb-usb-v2/ce6230.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 203 drivers/media/usb/dvb-usb-v2/ce6230.c 	ret = dvb_attach(mxl5005s_attach, adap->fe[0], &d->i2c_adap,
d                 208 drivers/media/usb/dvb-usb-v2/ce6230.c static int ce6230_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 212 drivers/media/usb/dvb-usb-v2/ce6230.c 	dev_dbg(&d->udev->dev, "%s: onoff=%d\n", __func__, onoff);
d                 216 drivers/media/usb/dvb-usb-v2/ce6230.c 	ret = usb_set_interface(d->udev, 1, onoff);
d                 218 drivers/media/usb/dvb-usb-v2/ce6230.c 		dev_err(&d->udev->dev, "%s: usb_set_interface() failed=%d\n",
d                  52 drivers/media/usb/dvb-usb-v2/dvb_usb.h #define d_to_priv(d) (d->priv)
d                 129 drivers/media/usb/dvb-usb-v2/dvb_usb.h 	int (*query) (struct dvb_usb_device *d);
d                 247 drivers/media/usb/dvb-usb-v2/dvb_usb.h 	int (*read_config) (struct dvb_usb_device *d);
d                  22 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_download_firmware(struct dvb_usb_device *d,
d                  27 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                  29 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (!d->props->download_firmware) {
d                  34 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = request_firmware(&fw, name, &d->udev->dev);
d                  36 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_err(&d->udev->dev,
d                  42 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_info(&d->udev->dev, "%s: downloading firmware from file '%s'\n",
d                  45 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = d->props->download_firmware(d, fw);
d                  52 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                  56 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_i2c_init(struct dvb_usb_device *d)
d                  59 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                  61 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (!d->props->i2c_algo)
d                  64 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	strscpy(d->i2c_adap.name, d->name, sizeof(d->i2c_adap.name));
d                  65 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->i2c_adap.algo = d->props->i2c_algo;
d                  66 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->i2c_adap.dev.parent = &d->udev->dev;
d                  67 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	i2c_set_adapdata(&d->i2c_adap, d);
d                  69 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = i2c_add_adapter(&d->i2c_adap);
d                  71 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->i2c_adap.algo = NULL;
d                  77 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                  81 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_i2c_exit(struct dvb_usb_device *d)
d                  83 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                  85 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->i2c_adap.algo)
d                  86 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		i2c_del_adapter(&d->i2c_adap);
d                  94 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = container_of(work,
d                 102 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (dvb_usbv2_disable_rc_polling || d->rc.bulk_mode) {
d                 103 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->rc_polling_active = false;
d                 107 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = d->rc.query(d);
d                 109 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_err(&d->udev->dev, "%s: rc.query() failed=%d\n",
d                 111 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->rc_polling_active = false;
d                 115 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	schedule_delayed_work(&d->rc_query_work,
d                 116 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			msecs_to_jiffies(d->rc.interval));
d                 119 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_remote_init(struct dvb_usb_device *d)
d                 123 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 125 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (dvb_usbv2_disable_rc_polling || !d->props->get_rc_config)
d                 128 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->rc.map_name = d->rc_map;
d                 129 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = d->props->get_rc_config(d, &d->rc);
d                 134 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (!d->rc.map_name)
d                 137 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev = rc_allocate_device(d->rc.driver_type);
d                 143 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->dev.parent = &d->udev->dev;
d                 144 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->device_name = d->name;
d                 145 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	usb_make_path(d->udev, d->rc_phys, sizeof(d->rc_phys));
d                 146 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	strlcat(d->rc_phys, "/ir0", sizeof(d->rc_phys));
d                 147 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->input_phys = d->rc_phys;
d                 148 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	usb_to_input_id(d->udev, &dev->input_id);
d                 149 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->driver_name = d->props->driver_name;
d                 150 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->map_name = d->rc.map_name;
d                 151 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->allowed_protocols = d->rc.allowed_protos;
d                 152 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->change_protocol = d->rc.change_protocol;
d                 153 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev->priv = d;
d                 161 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->rc_dev = dev;
d                 164 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->rc.query && !d->rc.bulk_mode) {
d                 166 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		INIT_DELAYED_WORK(&d->rc_query_work,
d                 168 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_info(&d->udev->dev,
d                 170 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				KBUILD_MODNAME, d->rc.interval);
d                 171 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		schedule_delayed_work(&d->rc_query_work,
d                 172 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				msecs_to_jiffies(d->rc.interval));
d                 173 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->rc_polling_active = true;
d                 178 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 182 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_remote_exit(struct dvb_usb_device *d)
d                 184 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 186 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->rc_dev) {
d                 187 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		cancel_delayed_work_sync(&d->rc_query_work);
d                 188 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		rc_unregister_device(d->rc_dev);
d                 189 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->rc_dev = NULL;
d                 243 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 246 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev,
d                 266 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->get_stream_config) {
d                 269 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->get_stream_config(adap->fe[adap->active_fe],
d                 272 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 299 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 305 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->streaming_ctrl) {
d                 306 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->streaming_ctrl(adap->fe[adap->active_fe], 1);
d                 308 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 319 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev, "%s: pid_filter() failed=%d\n",
d                 324 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 331 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 333 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev,
d                 347 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev, "%s: pid_filter() failed=%d\n",
d                 356 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->streaming_ctrl) {
d                 357 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->streaming_ctrl(adap->fe[adap->active_fe], 0);
d                 359 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 368 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 383 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 391 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 392 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct usb_device *udev = d->udev;
d                 398 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	media_device_usb_init(mdev, udev, d->name);
d                 402 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_info(&d->udev->dev, "media controller created\n");
d                 434 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 436 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: adap=%d\n", __func__, adap->id);
d                 438 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_register_adapter(&adap->dvb_adap, d->name, d->props->owner,
d                 439 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			&d->udev->dev, d->props->adapter_nr);
d                 441 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: dvb_register_adapter() failed=%d\n",
d                 450 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: dvb_usbv2_media_device_init() failed=%d\n",
d                 455 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->read_mac_address) {
d                 456 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->read_mac_address(adap,
d                 461 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_info(&d->udev->dev, "%s: MAC address: %pM\n",
d                 475 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_err(&d->udev->dev, "%s: dvb_dmx_init() failed=%d\n",
d                 485 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_err(&d->udev->dev, "%s: dvb_dmxdev_init() failed=%d\n",
d                 492 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_err(&d->udev->dev, "%s: dvb_net_init() failed=%d\n",
d                 527 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_device_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 532 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->powered++;
d                 534 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->powered--;
d                 536 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->powered == 0 || (onoff && d->powered == 1)) {
d                 538 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: power=%d\n", __func__, onoff);
d                 539 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (d->props->power_ctrl) {
d                 540 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			ret = d->props->power_ctrl(d, onoff);
d                 548 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 556 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 557 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: adap=%d fe=%d\n", __func__, adap->id,
d                 565 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_device_power_ctrl(d, 1);
d                 569 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->frontend_ctrl) {
d                 570 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->frontend_ctrl(fe, 1);
d                 587 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: ret=%d\n", __func__, ret);
d                 595 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 596 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: adap=%d fe=%d\n", __func__, adap->id,
d                 611 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->frontend_ctrl) {
d                 612 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->frontend_ctrl(fe, 0);
d                 617 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_device_power_ctrl(d, 0);
d                 627 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: ret=%d\n", __func__, ret);
d                 634 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 635 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: adap=%d\n", __func__, adap->id);
d                 640 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->frontend_attach) {
d                 641 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->frontend_attach(adap);
d                 643 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_dbg(&d->udev->dev,
d                 649 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		dev_dbg(&d->udev->dev, "%s: frontend_attach() do not exists\n",
d                 665 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 674 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->tuner_attach) {
d                 675 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->tuner_attach(adap);
d                 677 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_dbg(&d->udev->dev, "%s: tuner_attach() failed=%d\n",
d                 704 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 711 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 713 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: adap=%d\n", __func__, adap->id);
d                 722 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->tuner_detach) {
d                 723 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->tuner_detach(adap);
d                 725 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_dbg(&d->udev->dev, "%s: tuner_detach() failed=%d\n",
d                 730 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->frontend_detach) {
d                 731 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->frontend_detach(adap);
d                 733 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_dbg(&d->udev->dev,
d                 742 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_adapter_init(struct dvb_usb_device *d)
d                 748 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	adapter_count = d->props->num_adapters;
d                 749 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->get_adapter_count) {
d                 750 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->get_adapter_count(d);
d                 758 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		adap = &d->adapter[i];
d                 760 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		adap->props = &d->props->adapter[i];
d                 763 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (d->udev->speed == USB_SPEED_FULL &&
d                 765 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev,
d                 770 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		} else if ((d->udev->speed == USB_SPEED_FULL &&
d                 773 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_info(&d->udev->dev,
d                 780 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_info(&d->udev->dev,
d                 789 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_info(&d->udev->dev,
d                 815 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 819 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_adapter_exit(struct dvb_usb_device *d)
d                 822 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 825 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (d->adapter[i].props) {
d                 826 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dvb_usbv2_adapter_dvb_exit(&d->adapter[i]);
d                 827 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dvb_usbv2_adapter_stream_exit(&d->adapter[i]);
d                 828 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dvb_usbv2_adapter_frontend_exit(&d->adapter[i]);
d                 829 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dvb_usbv2_media_device_unregister(&d->adapter[i]);
d                 837 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_exit(struct dvb_usb_device *d)
d                 839 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 841 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_remote_exit(d);
d                 842 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_adapter_exit(d);
d                 843 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_i2c_exit(d);
d                 848 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_init(struct dvb_usb_device *d)
d                 851 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 853 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_device_power_ctrl(d, 1);
d                 855 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->read_config) {
d                 856 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->read_config(d);
d                 861 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_i2c_init(d);
d                 865 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_adapter_init(d);
d                 869 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->init) {
d                 870 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->init(d);
d                 875 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_remote_init(d);
d                 879 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_device_power_ctrl(d, 0);
d                 883 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_device_power_ctrl(d, 0);
d                 884 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 892 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d;
d                 906 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d = kzalloc(sizeof(struct dvb_usb_device), GFP_KERNEL);
d                 907 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (!d) {
d                 913 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->intf = intf;
d                 914 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->name = driver_info->name;
d                 915 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->rc_map = driver_info->rc_map;
d                 916 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->udev = udev;
d                 917 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	d->props = driver_info->props;
d                 920 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			d->props->bInterfaceNumber) {
d                 925 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	mutex_init(&d->usb_mutex);
d                 926 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	mutex_init(&d->i2c_mutex);
d                 928 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->size_of_priv) {
d                 929 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->priv = kzalloc(d->props->size_of_priv, GFP_KERNEL);
d                 930 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (!d->priv) {
d                 931 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_err(&d->udev->dev, "%s: kzalloc() failed\n",
d                 938 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->probe) {
d                 939 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->probe(d);
d                 944 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->identify_state) {
d                 946 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		ret = d->props->identify_state(d, &name);
d                 948 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			dev_info(&d->udev->dev,
d                 950 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 					KBUILD_MODNAME, d->name);
d                 953 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				name = d->props->firmware;
d                 955 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			ret = dvb_usbv2_download_firmware(d, name);
d                 977 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_info(&d->udev->dev, "%s: found a '%s' in warm state\n",
d                 978 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			KBUILD_MODNAME, d->name);
d                 980 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_init(d);
d                 984 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_info(&d->udev->dev,
d                 986 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			KBUILD_MODNAME, d->name);
d                 988 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	usb_set_intfdata(intf, d);
d                 992 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_exit(d);
d                 993 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->disconnect)
d                 994 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->props->disconnect(d);
d                 996 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	kfree(d->priv);
d                 998 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	kfree(d);
d                1007 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1008 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	const char *devname = kstrdup(dev_name(&d->udev->dev), GFP_KERNEL);
d                1009 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	const char *drvname = d->name;
d                1011 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s: bInterfaceNumber=%d\n", __func__,
d                1014 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->exit)
d                1015 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->props->exit(d);
d                1017 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_exit(d);
d                1019 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->disconnect)
d                1020 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->props->disconnect(d);
d                1022 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	kfree(d->priv);
d                1023 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	kfree(d);
d                1033 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1036 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                1039 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->rc_polling_active)
d                1040 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		cancel_delayed_work_sync(&d->rc_query_work);
d                1043 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		active_fe = d->adapter[i].active_fe;
d                1044 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (d->adapter[i].dvb_adap.priv && active_fe != -1) {
d                1045 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			fe = d->adapter[i].fe[active_fe];
d                1046 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			d->adapter[i].suspend_resume_active = true;
d                1048 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			if (d->props->streaming_ctrl)
d                1049 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				d->props->streaming_ctrl(fe, 0);
d                1052 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			usb_urb_killv2(&d->adapter[i].stream);
d                1062 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c static int dvb_usbv2_resume_common(struct dvb_usb_device *d)
d                1066 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                1069 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		active_fe = d->adapter[i].active_fe;
d                1070 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		if (d->adapter[i].dvb_adap.priv && active_fe != -1) {
d                1071 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			fe = d->adapter[i].fe[active_fe];
d                1076 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			usb_urb_submitv2(&d->adapter[i].stream, NULL);
d                1078 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			if (d->props->streaming_ctrl)
d                1079 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				d->props->streaming_ctrl(fe, 1);
d                1081 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 			d->adapter[i].suspend_resume_active = false;
d                1086 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->rc_polling_active)
d                1087 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		schedule_delayed_work(&d->rc_query_work,
d                1088 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 				msecs_to_jiffies(d->rc.interval));
d                1095 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1096 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                1098 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	return dvb_usbv2_resume_common(d);
d                1104 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1106 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                1108 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_device_power_ctrl(d, 1);
d                1110 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	if (d->props->init)
d                1111 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 		d->props->init(d);
d                1113 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	ret = dvb_usbv2_resume_common(d);
d                1115 drivers/media/usb/dvb-usb-v2/dvb_usb_core.c 	dvb_usbv2_device_power_ctrl(d, 0);
d                  11 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c static int dvb_usb_v2_generic_io(struct dvb_usb_device *d,
d                  16 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	if (!wbuf || !wlen || !d->props->generic_bulk_ctrl_endpoint ||
d                  17 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 			!d->props->generic_bulk_ctrl_endpoint_response) {
d                  18 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, -EINVAL);
d                  22 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	dev_dbg(&d->udev->dev, "%s: >>> %*ph\n", __func__, wlen, wbuf);
d                  24 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	ret = usb_bulk_msg(d->udev, usb_sndbulkpipe(d->udev,
d                  25 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 			d->props->generic_bulk_ctrl_endpoint), wbuf, wlen,
d                  28 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		dev_err(&d->udev->dev, "%s: usb_bulk_msg() failed=%d\n",
d                  33 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		dev_err(&d->udev->dev, "%s: usb_bulk_msg() write length=%d, actual=%d\n",
d                  40 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		if (d->props->generic_bulk_ctrl_delay)
d                  41 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 			usleep_range(d->props->generic_bulk_ctrl_delay,
d                  42 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 					d->props->generic_bulk_ctrl_delay
d                  45 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		ret = usb_bulk_msg(d->udev, usb_rcvbulkpipe(d->udev,
d                  46 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 				d->props->generic_bulk_ctrl_endpoint_response),
d                  49 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 			dev_err(&d->udev->dev,
d                  53 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 		dev_dbg(&d->udev->dev, "%s: <<< %*ph\n", __func__,
d                  60 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c int dvb_usbv2_generic_rw(struct dvb_usb_device *d,
d                  65 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	mutex_lock(&d->usb_mutex);
d                  66 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	ret = dvb_usb_v2_generic_io(d, wbuf, wlen, rbuf, rlen);
d                  67 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	mutex_unlock(&d->usb_mutex);
d                  73 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c int dvb_usbv2_generic_write(struct dvb_usb_device *d, u8 *buf, u16 len)
d                  77 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	mutex_lock(&d->usb_mutex);
d                  78 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	ret = dvb_usb_v2_generic_io(d, buf, len, NULL, 0);
d                  79 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	mutex_unlock(&d->usb_mutex);
d                  85 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c int dvb_usbv2_generic_rw_locked(struct dvb_usb_device *d,
d                  88 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	return dvb_usb_v2_generic_io(d, wbuf, wlen, rbuf, rlen);
d                  92 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c int dvb_usbv2_generic_write_locked(struct dvb_usb_device *d, u8 *buf, u16 len)
d                  94 drivers/media/usb/dvb-usb-v2/dvb_usb_urb.c 	return dvb_usb_v2_generic_io(d, buf, len, NULL, 0);
d                  40 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_usb_generic_rw(struct dvb_usb_device *d,
d                  44 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                  46 drivers/media/usb/dvb-usb-v2/dvbsky.c 	mutex_lock(&d->usb_mutex);
d                  50 drivers/media/usb/dvb-usb-v2/dvbsky.c 	ret = dvb_usbv2_generic_rw_locked(d, state->obuf, wlen,
d                  56 drivers/media/usb/dvb-usb-v2/dvbsky.c 	mutex_unlock(&d->usb_mutex);
d                  60 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_stream_ctrl(struct dvb_usb_device *d, u8 onoff)
d                  62 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                  68 drivers/media/usb/dvb-usb-v2/dvbsky.c 	ret = dvbsky_usb_generic_rw(d, obuf_pre, 3, NULL, 0);
d                  71 drivers/media/usb/dvb-usb-v2/dvbsky.c 		ret = dvbsky_usb_generic_rw(d, obuf_post, 3, NULL, 0);
d                  79 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                  81 drivers/media/usb/dvb-usb-v2/dvbsky.c 	return dvbsky_stream_ctrl(d, (onoff == 0) ? 0 : 1);
d                  85 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_gpio_ctrl(struct dvb_usb_device *d, u8 gport, u8 value)
d                  93 drivers/media/usb/dvb-usb-v2/dvbsky.c 	ret = dvbsky_usb_generic_rw(d, obuf, 3, ibuf, 1);
d                 101 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 105 drivers/media/usb/dvb-usb-v2/dvbsky.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 109 drivers/media/usb/dvb-usb-v2/dvbsky.c 		dev_err(&d->udev->dev,
d                 117 drivers/media/usb/dvb-usb-v2/dvbsky.c 			dev_err(&d->udev->dev,
d                 129 drivers/media/usb/dvb-usb-v2/dvbsky.c 			ret = dvbsky_usb_generic_rw(d, obuf, 4,
d                 139 drivers/media/usb/dvb-usb-v2/dvbsky.c 			ret = dvbsky_usb_generic_rw(d, obuf,
d                 144 drivers/media/usb/dvb-usb-v2/dvbsky.c 			dev_err(&d->udev->dev,
d                 156 drivers/media/usb/dvb-usb-v2/dvbsky.c 		ret = dvbsky_usb_generic_rw(d, obuf,
d                 162 drivers/media/usb/dvb-usb-v2/dvbsky.c 	mutex_unlock(&d->i2c_mutex);
d                 177 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_rc_query(struct dvb_usb_device *d)
d                 185 drivers/media/usb/dvb-usb-v2/dvbsky.c 	ret = dvbsky_usb_generic_rw(d, obuf, 1, ibuf, 2);
d                 189 drivers/media/usb/dvb-usb-v2/dvbsky.c 		dev_dbg(&d->udev->dev, "rc code: %x\n", code);
d                 194 drivers/media/usb/dvb-usb-v2/dvbsky.c 		rc_keydown(d->rc_dev, RC_PROTO_RC5, scancode, toggle);
d                 199 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_get_rc_config(struct dvb_usb_device *d, struct dvb_usb_rc *rc)
d                 218 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 219 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                 226 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0x80, value);
d                 233 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 250 drivers/media/usb/dvb-usb-v2/dvbsky.c 	if (i2c_transfer(&d->i2c_adap, msg, 2) == 2)
d                 259 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 260 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                 267 drivers/media/usb/dvb-usb-v2/dvbsky.c 		dvbsky_stream_ctrl(d, 1);
d                 276 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 293 drivers/media/usb/dvb-usb-v2/dvbsky.c 						   &d->i2c_adap,
d                 331 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 332 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                 339 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0x00, value);
d                 347 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = priv;
d                 363 drivers/media/usb/dvb-usb-v2/dvbsky.c 	ret = dvbsky_usb_generic_rw(d, command, command_size,
d                 371 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dev_err(&d->udev->dev, "ci control failed=%d\n", ret);
d                 378 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 396 drivers/media/usb/dvb-usb-v2/dvbsky.c 						   &d->i2c_adap,
d                 418 drivers/media/usb/dvb-usb-v2/dvbsky.c 	sp2_config.priv = d;
d                 422 drivers/media/usb/dvb-usb-v2/dvbsky.c 						&d->i2c_adap,
d                 449 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 461 drivers/media/usb/dvb-usb-v2/dvbsky.c 						   &d->i2c_adap,
d                 480 drivers/media/usb/dvb-usb-v2/dvbsky.c 	sp2_config.priv = d;
d                 484 drivers/media/usb/dvb-usb-v2/dvbsky.c 						&d->i2c_adap,
d                 499 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 511 drivers/media/usb/dvb-usb-v2/dvbsky.c 						   &d->i2c_adap,
d                 534 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 543 drivers/media/usb/dvb-usb-v2/dvbsky.c 	if (le16_to_cpu(d->udev->descriptor.idProduct) == USB_PID_MYGICA_T230C2)
d                 548 drivers/media/usb/dvb-usb-v2/dvbsky.c 						   &d->i2c_adap,
d                 555 drivers/media/usb/dvb-usb-v2/dvbsky.c 	if (le16_to_cpu(d->udev->descriptor.idProduct) == USB_PID_MYGICA_T230) {
d                 577 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_identify_state(struct dvb_usb_device *d, const char **name)
d                 579 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0x04, 1);
d                 581 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0x83, 0);
d                 582 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0xc0, 1);
d                 584 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0x83, 1);
d                 585 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dvbsky_gpio_ctrl(d, 0xc0, 0);
d                 591 drivers/media/usb/dvb-usb-v2/dvbsky.c static int dvbsky_init(struct dvb_usb_device *d)
d                 593 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                 610 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 611 drivers/media/usb/dvb-usb-v2/dvbsky.c 	struct dvbsky_state *state = d_to_priv(d);
d                 613 drivers/media/usb/dvb-usb-v2/dvbsky.c 	dev_dbg(&d->udev->dev, "%s: adap=%d\n", __func__, adap->id);
d                  14 drivers/media/usb/dvb-usb-v2/ec168.c static int ec168_ctrl_msg(struct dvb_usb_device *d, struct ec168_req *req)
d                  50 drivers/media/usb/dvb-usb-v2/ec168.c 		dev_err(&d->udev->dev, "%s: unknown command=%02x\n",
d                  65 drivers/media/usb/dvb-usb-v2/ec168.c 		pipe = usb_sndctrlpipe(d->udev, 0);
d                  68 drivers/media/usb/dvb-usb-v2/ec168.c 		pipe = usb_rcvctrlpipe(d->udev, 0);
d                  73 drivers/media/usb/dvb-usb-v2/ec168.c 	ret = usb_control_msg(d->udev, pipe, request, requesttype, req->value,
d                  76 drivers/media/usb/dvb-usb-v2/ec168.c 	dvb_usb_dbg_usb_control_msg(d->udev, request, requesttype, req->value,
d                  94 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 104 drivers/media/usb/dvb-usb-v2/ec168.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 112 drivers/media/usb/dvb-usb-v2/ec168.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 123 drivers/media/usb/dvb-usb-v2/ec168.c 				ret = ec168_ctrl_msg(d, &req);
d                 126 drivers/media/usb/dvb-usb-v2/ec168.c 				dev_err(&d->udev->dev, "%s: I2C read not " \
d                 139 drivers/media/usb/dvb-usb-v2/ec168.c 				ret = ec168_ctrl_msg(d, &req);
d                 147 drivers/media/usb/dvb-usb-v2/ec168.c 				ret = ec168_ctrl_msg(d, &req);
d                 158 drivers/media/usb/dvb-usb-v2/ec168.c 	mutex_unlock(&d->i2c_mutex);
d                 173 drivers/media/usb/dvb-usb-v2/ec168.c static int ec168_identify_state(struct dvb_usb_device *d, const char **name)
d                 178 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 180 drivers/media/usb/dvb-usb-v2/ec168.c 	ret = ec168_ctrl_msg(d, &req);
d                 184 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s: reply=%02x\n", __func__, reply);
d                 193 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 197 drivers/media/usb/dvb-usb-v2/ec168.c static int ec168_download_firmware(struct dvb_usb_device *d,
d                 202 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 214 drivers/media/usb/dvb-usb-v2/ec168.c 		ret = ec168_ctrl_msg(d, &req);
d                 216 drivers/media/usb/dvb-usb-v2/ec168.c 			dev_err(&d->udev->dev,
d                 229 drivers/media/usb/dvb-usb-v2/ec168.c 	ret = ec168_ctrl_msg(d, &req);
d                 237 drivers/media/usb/dvb-usb-v2/ec168.c 	ret = ec168_ctrl_msg(d, &req);
d                 245 drivers/media/usb/dvb-usb-v2/ec168.c 	ret = ec168_ctrl_msg(d, &req);
d                 251 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s: failed=%d\n", __func__, ret);
d                 261 drivers/media/usb/dvb-usb-v2/ec168.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 262 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 265 drivers/media/usb/dvb-usb-v2/ec168.c 			&d->i2c_adap);
d                 291 drivers/media/usb/dvb-usb-v2/ec168.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 292 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s:\n", __func__);
d                 294 drivers/media/usb/dvb-usb-v2/ec168.c 	return dvb_attach(mxl5005s_attach, adap->fe[0], &d->i2c_adap,
d                 300 drivers/media/usb/dvb-usb-v2/ec168.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 302 drivers/media/usb/dvb-usb-v2/ec168.c 	dev_dbg(&d->udev->dev, "%s: onoff=%d\n", __func__, onoff);
d                 306 drivers/media/usb/dvb-usb-v2/ec168.c 	return ec168_ctrl_msg(d, &req);
d                  17 drivers/media/usb/dvb-usb-v2/gl861.c static int gl861_i2c_msg(struct dvb_usb_device *d, u8 addr,
d                  48 drivers/media/usb/dvb-usb-v2/gl861.c 		dev_err(&d->udev->dev, "%s: wlen=%d, aborting\n",
d                  56 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0), req, type,
d                  70 drivers/media/usb/dvb-usb-v2/gl861.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                  76 drivers/media/usb/dvb-usb-v2/gl861.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                  82 drivers/media/usb/dvb-usb-v2/gl861.c 			if (gl861_i2c_msg(d, msg[i].addr, msg[i].buf,
d                  87 drivers/media/usb/dvb-usb-v2/gl861.c 			if (gl861_i2c_msg(d, msg[i].addr, msg[i].buf,
d                  92 drivers/media/usb/dvb-usb-v2/gl861.c 	mutex_unlock(&d->i2c_mutex);
d                 135 drivers/media/usb/dvb-usb-v2/gl861.c static int gl861_init(struct dvb_usb_device *d)
d                 143 drivers/media/usb/dvb-usb-v2/gl861.c 	return usb_set_interface(d->udev, 0, 0);
d                 195 drivers/media/usb/dvb-usb-v2/gl861.c gl861_i2c_write_ex(struct dvb_usb_device *d, u8 addr, u8 *wbuf, u16 wlen)
d                 204 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev, 0),
d                 212 drivers/media/usb/dvb-usb-v2/gl861.c gl861_i2c_read_ex(struct dvb_usb_device *d, u8 addr, u8 *rbuf, u16 rlen)
d                 221 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0),
d                 243 drivers/media/usb/dvb-usb-v2/gl861.c friio_i2c_tuner_read(struct dvb_usb_device *d, struct i2c_msg *msg)
d                 248 drivers/media/usb/dvb-usb-v2/gl861.c 	priv = d_to_priv(d);
d                 250 drivers/media/usb/dvb-usb-v2/gl861.c 	return gl861_i2c_read_ex(d, addr, msg->buf, msg->len);
d                 254 drivers/media/usb/dvb-usb-v2/gl861.c friio_i2c_tuner_write(struct dvb_usb_device *d, struct i2c_msg *msg)
d                 260 drivers/media/usb/dvb-usb-v2/gl861.c 	priv = d_to_priv(d);
d                 271 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev, 0),
d                 282 drivers/media/usb/dvb-usb-v2/gl861.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 288 drivers/media/usb/dvb-usb-v2/gl861.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 295 drivers/media/usb/dvb-usb-v2/gl861.c 			ret = friio_i2c_tuner_read(d, &msg[i]);
d                 297 drivers/media/usb/dvb-usb-v2/gl861.c 			ret = friio_i2c_tuner_write(d, &msg[i]);
d                 305 drivers/media/usb/dvb-usb-v2/gl861.c 	mutex_unlock(&d->i2c_mutex);
d                 325 drivers/media/usb/dvb-usb-v2/gl861.c static int friio_ext_ctl(struct dvb_usb_device *d,
d                 346 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = i2c_transfer(&d->i2c_adap, &msg, 1);
d                 348 drivers/media/usb/dvb-usb-v2/gl861.c 	ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 351 drivers/media/usb/dvb-usb-v2/gl861.c 	ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 353 drivers/media/usb/dvb-usb-v2/gl861.c 	ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 361 drivers/media/usb/dvb-usb-v2/gl861.c 		ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 363 drivers/media/usb/dvb-usb-v2/gl861.c 		ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 369 drivers/media/usb/dvb-usb-v2/gl861.c 	ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 371 drivers/media/usb/dvb-usb-v2/gl861.c 	ret += i2c_transfer(&d->i2c_adap, &msg, 1);
d                 389 drivers/media/usb/dvb-usb-v2/gl861.c static int friio_reset(struct dvb_usb_device *d)
d                 400 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = usb_set_interface(d->udev, 0, 0);
d                 406 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_msg(d, 0x00, wbuf, 2, NULL, 0);
d                 413 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_msg(d, 0x00, wbuf, 2, NULL, 0);
d                 425 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_write_ex(d, 0x09, wbuf, 2);
d                 430 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_read_ex(d, 0x09, rbuf, 2);
d                 438 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_write_ex(d, 0x48, wbuf, 2);
d                 443 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_read_ex(d, 0x48, rbuf, 2);
d                 451 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_msg(d, 0x00, wbuf, 2, NULL, 0);
d                 457 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_msg(d, 0x00, wbuf, 2, NULL, 0);
d                 463 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = gl861_i2c_msg(d, 0x00, wbuf, 2, NULL, 0);
d                 468 drivers/media/usb/dvb-usb-v2/gl861.c 		ret = gl861_i2c_msg(d, 0x00, (u8 *)friio_init_cmds[i], 2,
d                 480 drivers/media/usb/dvb-usb-v2/gl861.c static int friio_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 482 drivers/media/usb/dvb-usb-v2/gl861.c 	return onoff ? friio_reset(d) : 0;
d                 488 drivers/media/usb/dvb-usb-v2/gl861.c 	struct dvb_usb_device *d;
d                 494 drivers/media/usb/dvb-usb-v2/gl861.c 	d = adap_to_d(adap);
d                 496 drivers/media/usb/dvb-usb-v2/gl861.c 			      &d->i2c_adap, info->addr, &cfg);
d                 505 drivers/media/usb/dvb-usb-v2/gl861.c 	priv->tuner_adap.dev.parent = &d->udev->dev;
d                 506 drivers/media/usb/dvb-usb-v2/gl861.c 	strscpy(priv->tuner_adap.name, d->name, sizeof(priv->tuner_adap.name));
d                 509 drivers/media/usb/dvb-usb-v2/gl861.c 	i2c_set_adapdata(&priv->tuner_adap, d);
d                 553 drivers/media/usb/dvb-usb-v2/gl861.c static int friio_init(struct dvb_usb_device *d)
d                 572 drivers/media/usb/dvb-usb-v2/gl861.c 	ret = friio_ext_ctl(d, FRIIO_LED_STOPPED, true);
d                 578 drivers/media/usb/dvb-usb-v2/gl861.c 	priv = d_to_priv(d);
d                 590 drivers/media/usb/dvb-usb-v2/gl861.c static void friio_exit(struct dvb_usb_device *d)
d                 592 drivers/media/usb/dvb-usb-v2/gl861.c 	friio_ext_ctl(d, FRIIO_LED_STOPPED, false);
d                 140 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_usb_talk(struct dvb_usb_device *d,
d                 143 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 149 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ret = mutex_lock_interruptible(&d->usb_mutex);
d                 155 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ret = dvb_usbv2_generic_rw_locked(d, st->usb_buffer, wlen,
d                 161 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_unlock(&d->usb_mutex);
d                 166 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_stream_restart(struct dvb_usb_device *d)
d                 168 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 173 drivers/media/usb/dvb-usb-v2/lmedm04.c 		lme2510_usb_talk(d, all_pids, sizeof(all_pids),
d                 176 drivers/media/usb/dvb-usb-v2/lmedm04.c 	return lme2510_usb_talk(d, stream_on, sizeof(stream_on),
d                 180 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_enable_pid(struct dvb_usb_device *d, u8 index, u16 pid_out)
d                 182 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 191 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_stream_restart(d);
d                 202 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ret |= lme2510_usb_talk(d, pid_buff ,
d                 206 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_stream_restart(d);
d                 372 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 382 drivers/media/usb/dvb-usb-v2/lmedm04.c 			 d->udev,
d                 383 drivers/media/usb/dvb-usb-v2/lmedm04.c 			 usb_rcvintpipe(d->udev, 0xa),
d                 391 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ep = usb_pipe_endpoint(d->udev, lme_int->lme_urb->pipe);
d                 394 drivers/media/usb/dvb-usb-v2/lmedm04.c 		lme_int->lme_urb->pipe = usb_rcvbulkpipe(d->udev, 0xa),
d                 404 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 412 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_lock(&d->i2c_mutex);
d                 415 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_usb_talk(d, clear_pid_reg,
d                 423 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_unlock(&d->i2c_mutex);
d                 431 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 438 drivers/media/usb/dvb-usb-v2/lmedm04.c 		mutex_lock(&d->i2c_mutex);
d                 439 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_enable_pid(d, index, pid);
d                 440 drivers/media/usb/dvb-usb-v2/lmedm04.c 		mutex_unlock(&d->i2c_mutex);
d                 448 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_return_status(struct dvb_usb_device *d)
d                 457 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0),
d                 471 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_msg(struct dvb_usb_device *d,
d                 474 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 478 drivers/media/usb/dvb-usb-v2/lmedm04.c 	return lme2510_usb_talk(d, wbuf, wlen, rbuf, rlen);
d                 484 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 485 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 491 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_lock(&d->i2c_mutex);
d                 526 drivers/media/usb/dvb-usb-v2/lmedm04.c 		if (lme2510_msg(d, obuf, len, ibuf, 64) < 0) {
d                 528 drivers/media/usb/dvb-usb-v2/lmedm04.c 			mutex_unlock(&d->i2c_mutex);
d                 542 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_unlock(&d->i2c_mutex);
d                 559 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 573 drivers/media/usb/dvb-usb-v2/lmedm04.c 		mutex_lock(&d->i2c_mutex);
d                 575 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret = lme2510_usb_talk(d, clear_reg_3,
d                 580 drivers/media/usb/dvb-usb-v2/lmedm04.c 		mutex_unlock(&d->i2c_mutex);
d                 594 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_download_firmware(struct dvb_usb_device *d,
d                 633 drivers/media/usb/dvb-usb-v2/lmedm04.c 			lme2510_usb_talk(d, data, wlen, data, len_in);
d                 641 drivers/media/usb/dvb-usb-v2/lmedm04.c 	lme2510_usb_talk(d, data, len_in, data, len_in);
d                 653 drivers/media/usb/dvb-usb-v2/lmedm04.c static void lme_coldreset(struct dvb_usb_device *d)
d                 659 drivers/media/usb/dvb-usb-v2/lmedm04.c 	lme2510_usb_talk(d, data, sizeof(data), data, sizeof(data));
d                 671 drivers/media/usb/dvb-usb-v2/lmedm04.c static const char *lme_firmware_switch(struct dvb_usb_device *d, int cold)
d                 673 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 674 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct usb_device *udev = d->udev;
d                 750 drivers/media/usb/dvb-usb-v2/lmedm04.c 		lme_coldreset(d);
d                 816 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 823 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_lock(&d->i2c_mutex);
d                 827 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_usb_talk(d,
d                 834 drivers/media/usb/dvb-usb-v2/lmedm04.c 		ret |= lme2510_usb_talk(d,
d                 839 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_unlock(&d->i2c_mutex);
d                 851 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 852 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 865 drivers/media/usb/dvb-usb-v2/lmedm04.c 			mutex_lock(&d->i2c_mutex);
d                 868 drivers/media/usb/dvb-usb-v2/lmedm04.c 			ret = lme2510_stream_restart(d);
d                 870 drivers/media/usb/dvb-usb-v2/lmedm04.c 			mutex_unlock(&d->i2c_mutex);
d                 952 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 954 drivers/media/usb/dvb-usb-v2/lmedm04.c 	const char *desc = d->name;
d                 968 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 969 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                 973 drivers/media/usb/dvb-usb-v2/lmedm04.c 	switch (le16_to_cpu(d->udev->descriptor.idProduct)) {
d                 978 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&tda10086_config, &d->i2c_adap);
d                 987 drivers/media/usb/dvb-usb-v2/lmedm04.c 				ret = lme_firmware_switch(d, 1) ? 0 : -ENODEV;
d                 994 drivers/media/usb/dvb-usb-v2/lmedm04.c 				&sharp_z0194_config, &d->i2c_adap);
d                1003 drivers/media/usb/dvb-usb-v2/lmedm04.c 				ret = lme_firmware_switch(d, 1) ? 0 : -ENODEV;
d                1010 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&d->i2c_adap);
d                1020 drivers/media/usb/dvb-usb-v2/lmedm04.c 				ret = lme_firmware_switch(d, 1) ? 0 : -ENODEV;
d                1028 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&m88rs2000_config, &d->i2c_adap);
d                1052 drivers/media/usb/dvb-usb-v2/lmedm04.c 		d->rc_map = NULL;
d                1075 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1083 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&d->i2c_adap, 1))
d                1088 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&d->i2c_adap))
d                1093 drivers/media/usb/dvb-usb-v2/lmedm04.c 			&d->i2c_adap, DVB_PLL_OPERA1))
d                1098 drivers/media/usb/dvb-usb-v2/lmedm04.c 			       &ts2020_config, &d->i2c_adap))
d                1122 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_powerup(struct dvb_usb_device *d, int onoff)
d                1124 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                1130 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_lock(&d->i2c_mutex);
d                1132 drivers/media/usb/dvb-usb-v2/lmedm04.c 	ret = lme2510_usb_talk(d, onoff ? lnb_on : lnb_off, len, rbuf, rlen);
d                1136 drivers/media/usb/dvb-usb-v2/lmedm04.c 	mutex_unlock(&d->i2c_mutex);
d                1141 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_get_adapter_count(struct dvb_usb_device *d)
d                1146 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_identify_state(struct dvb_usb_device *d, const char **name)
d                1148 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                1151 drivers/media/usb/dvb-usb-v2/lmedm04.c 	usb_reset_configuration(d->udev);
d                1153 drivers/media/usb/dvb-usb-v2/lmedm04.c 	usb_set_interface(d->udev,
d                1154 drivers/media/usb/dvb-usb-v2/lmedm04.c 		d->props->bInterfaceNumber, 1);
d                1158 drivers/media/usb/dvb-usb-v2/lmedm04.c 	status = lme2510_return_status(d);
d                1160 drivers/media/usb/dvb-usb-v2/lmedm04.c 		*name = lme_firmware_switch(d, 0);
d                1174 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_device *d;
d                1179 drivers/media/usb/dvb-usb-v2/lmedm04.c 	d = adap_to_d(adap);
d                1187 drivers/media/usb/dvb-usb-v2/lmedm04.c 	if (!(le16_to_cpu(d->udev->descriptor.idProduct)
d                1194 drivers/media/usb/dvb-usb-v2/lmedm04.c static int lme2510_get_rc_config(struct dvb_usb_device *d,
d                1201 drivers/media/usb/dvb-usb-v2/lmedm04.c static void lme2510_exit(struct dvb_usb_device *d)
d                1203 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct lme2510_state *st = d->priv;
d                1204 drivers/media/usb/dvb-usb-v2/lmedm04.c 	struct dvb_usb_adapter *adap = &d->adapter[0];
d                 584 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	i2c_transfer(&state->d->i2c_adap, msg, 2);
d                 600 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	i2c_transfer(&state->d->i2c_adap, &msg[0], 1);
d                 616 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	i2c_transfer(&state->d->i2c_adap, &msg, 1);
d                 622 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	i2c_transfer(&state->d->i2c_adap, &msg, 1);
d                 659 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	ret = i2c_transfer(&state->d->i2c_adap, msg, 2);
d                 671 drivers/media/usb/dvb-usb-v2/mxl111sf-gpio.c 	ret = i2c_transfer(&state->d->i2c_adap, msg, 2);
d                 802 drivers/media/usb/dvb-usb-v2/mxl111sf-i2c.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 803 drivers/media/usb/dvb-usb-v2/mxl111sf-i2c.c 	struct mxl111sf_state *state = d->priv;
d                 807 drivers/media/usb/dvb-usb-v2/mxl111sf-i2c.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 825 drivers/media/usb/dvb-usb-v2/mxl111sf-i2c.c 	mutex_unlock(&d->i2c_mutex);
d                  55 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = state->d;
d                  73 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	ret = (wo) ? dvb_usbv2_generic_write(d, state->sndbuf, 1+wlen) :
d                  74 drivers/media/usb/dvb-usb-v2/mxl111sf.c 		dvb_usbv2_generic_rw(d, state->sndbuf, 1+wlen, state->rcvbuf,
d                 257 drivers/media/usb/dvb-usb-v2/mxl111sf.c static int mxl111sf_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 267 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 284 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 427 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 428 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 435 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	state->d = d;
d                 439 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 474 drivers/media/usb/dvb-usb-v2/mxl111sf.c 				 &d->i2c_adap);
d                 498 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 499 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 506 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	state->d = d;
d                 510 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 549 drivers/media/usb/dvb-usb-v2/mxl111sf.c 			      &d->i2c_adap);
d                 583 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 584 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 591 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	state->d = d;
d                 595 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 636 drivers/media/usb/dvb-usb-v2/mxl111sf.c 			      &d->i2c_adap);
d                 670 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 671 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 678 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	state->d = d;
d                 682 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 723 drivers/media/usb/dvb-usb-v2/mxl111sf.c 			      &d->i2c_adap);
d                 745 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 746 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 753 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	state->d = d;
d                 757 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	if (usb_set_interface(d->udev, 0, state->alt_mode) < 0)
d                 922 drivers/media/usb/dvb-usb-v2/mxl111sf.c static int mxl111sf_init(struct dvb_usb_device *d)
d                 924 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	struct mxl111sf_state *state = d_to_priv(d);
d                 945 drivers/media/usb/dvb-usb-v2/mxl111sf.c 	ret = i2c_transfer(&d->i2c_adap, msg, 2);
d                  59 drivers/media/usb/dvb-usb-v2/mxl111sf.h 	struct dvb_usb_device *d;
d                  17 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_ctrl_msg(struct dvb_usb_device *d, struct rtl28xxu_req *req)
d                  19 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d->priv;
d                  24 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	mutex_lock(&d->usb_mutex);
d                  27 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "too large message %u\n", req->size);
d                  36 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		pipe = usb_sndctrlpipe(d->udev, 0);
d                  40 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		pipe = usb_rcvctrlpipe(d->udev, 0);
d                  43 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = usb_control_msg(d->udev, pipe, 0, requesttype, req->value,
d                  45 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dvb_usb_dbg_usb_control_msg(d->udev, 0, requesttype, req->value,
d                  54 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	mutex_unlock(&d->usb_mutex);
d                  58 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	mutex_unlock(&d->usb_mutex);
d                  59 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                  63 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_wr_regs(struct dvb_usb_device *d, u16 reg, u8 *val, int len)
d                  78 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	return rtl28xxu_ctrl_msg(d, &req);
d                  81 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_rd_regs(struct dvb_usb_device *d, u16 reg, u8 *val, int len)
d                  96 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	return rtl28xxu_ctrl_msg(d, &req);
d                  99 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_wr_reg(struct dvb_usb_device *d, u16 reg, u8 val)
d                 101 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	return rtl28xxu_wr_regs(d, reg, &val, 1);
d                 104 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_rd_reg(struct dvb_usb_device *d, u16 reg, u8 *val)
d                 106 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	return rtl28xxu_rd_regs(d, reg, val, 1);
d                 109 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_wr_reg_mask(struct dvb_usb_device *d, u16 reg, u8 val,
d                 117 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_rd_reg(d, reg, &tmp);
d                 126 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	return rtl28xxu_wr_reg(d, reg, val);
d                 134 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 135 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d->priv;
d                 160 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 180 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 				ret = rtl28xxu_ctrl_msg(d, &req);
d                 188 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_ctrl_msg(d, &req);
d                 195 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_ctrl_msg(d, &req);
d                 203 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_ctrl_msg(d, &req);
d                 222 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 				ret = rtl28xxu_ctrl_msg(d, &req);
d                 230 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_ctrl_msg(d, &req);
d                 237 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_ctrl_msg(d, &req);
d                 244 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_ctrl_msg(d, &req);
d                 254 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	mutex_unlock(&d->i2c_mutex);
d                 269 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2831u_read_config(struct dvb_usb_device *d)
d                 271 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 280 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                 291 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg(d, SYS_GPIO_DIR, 0x0a);
d                 296 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg(d, SYS_GPIO_OUT_EN, 0x15);
d                 311 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_gate_open);
d                 316 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_qt1010);
d                 324 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_gate_open);
d                 329 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_mt2060);
d                 342 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "tuner=%s\n", dev->tuner_name);
d                 346 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 350 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_read_config(struct dvb_usb_device *d)
d                 352 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 378 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                 381 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_DIR, 0x00, 0x40);
d                 385 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x48, 0x48);
d                 395 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_gate_open);
d                 402 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_fc0012);
d                 410 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_fc0013);
d                 418 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_mt2266);
d                 426 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_fc2580);
d                 434 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_mt2063);
d                 442 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_max3543);
d                 450 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_tua9001);
d                 458 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_mxl5007t);
d                 466 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_e4000);
d                 474 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_tda18272);
d                 482 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_r820t);
d                 490 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_r828d);
d                 498 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x00, 0x21);
d                 502 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x00, 0x21);
d                 508 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x21, 0x21);
d                 512 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x21, 0x21);
d                 519 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_si2157);
d                 527 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "tuner=%s\n", dev->tuner_name);
d                 532 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x00, 0x01);
d                 536 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x00, 0x01);
d                 543 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x01, 0x01);
d                 547 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_DIR, 0x00, 0x01);
d                 551 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x01, 0x01);
d                 556 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_ctrl_msg(d, &req_mn88472);
d                 558 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			dev_dbg(&d->intf->dev, "MN88472 found\n");
d                 563 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_ctrl_msg(d, &req_mn88473);
d                 565 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			dev_dbg(&d->intf->dev, "MN88473 found\n");
d                 570 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_ctrl_msg(d, &req_cxd2837er);
d                 572 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			dev_dbg(&d->intf->dev, "CXD2837ER found\n");
d                 579 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_ctrl_msg(d, &req_si2168);
d                 581 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			dev_dbg(&d->intf->dev, "Si2168 found\n");
d                 589 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_gate_close);
d                 595 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 599 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_read_config(struct dvb_usb_device *d)
d                 601 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 604 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2831u_read_config(d);
d                 606 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2832u_read_config(d);
d                 609 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_identify_state(struct dvb_usb_device *d, const char **name)
d                 611 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 615 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                 621 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_ctrl_msg(d, &req_demod_i2c);
d                 627 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "chip type detection failed %d\n", ret);
d                 630 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "chip_id=%u\n", dev->chip_id);
d                 633 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	d->i2c_adap.retries = 3;
d                 634 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	d->i2c_adap.timeout = msecs_to_jiffies(10);
d                 638 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 669 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 670 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 676 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                 689 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "unknown tuner %s\n", dev->tuner_name);
d                 700 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	client = i2c_new_device(&d->i2c_adap, &board_info);
d                 719 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 758 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_fc0012_tuner_callback(struct dvb_usb_device *d,
d                 764 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "cmd=%d arg=%d\n", cmd, arg);
d                 769 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_rd_reg(d, SYS_GPIO_OUT_VAL, &val);
d                 779 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg(d, SYS_GPIO_OUT_VAL, val);
d                 789 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 793 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_tua9001_tuner_callback(struct dvb_usb_device *d,
d                 799 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "cmd=%d arg=%d\n", cmd, arg);
d                 814 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, val, 0x10);
d                 824 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, val, 0x02);
d                 832 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                 842 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d;
d                 856 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	d = i2c_get_adapdata(parent_adapter);
d                 857 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev = d->priv;
d                 859 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "component=%d cmd=%d arg=%d\n",
d                 866 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			return rtl2832u_fc0012_tuner_callback(d, cmd, arg);
d                 868 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			return rtl2832u_tua9001_tuner_callback(d, cmd, arg);
d                 877 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 878 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                 884 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                 910 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "unknown tuner %s\n", dev->tuner_name);
d                 921 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	client = i2c_new_device(&d->i2c_adap, &board_info);
d                 963 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			client = i2c_new_device(&d->i2c_adap, &info);
d                 985 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			client = i2c_new_device(&d->i2c_adap, &info);
d                1008 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 						 &d->i2c_adap);
d                1028 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			client = i2c_new_device(&d->i2c_adap, &info);
d                1049 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1065 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1066 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1069 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                1117 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1118 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1121 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                1141 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "unknown tuner %d\n", dev->tuner);
d                1151 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1177 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1178 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1186 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                1262 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_DIR, 0x00, 0x12);
d                1266 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_EN, 0x12, 0x12);
d                1319 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			client = i2c_new_device(&d->i2c_adap, &info);
d                1342 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_err(&d->intf->dev, "unknown tuner %d\n", dev->tuner);
d                1361 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		pdata.dvb_usb_device = d;
d                1365 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		pdev = platform_device_register_data(&d->intf->dev,
d                1374 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		dev_dbg(&d->intf->dev, "no SDR for tuner=%d\n", dev->tuner);
d                1379 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1395 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                1396 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1400 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                1417 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_init(struct dvb_usb_device *d)
d                1422 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "\n");
d                1425 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_reg(d, USB_SYSCTL_0, &val);
d                1431 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg(d, USB_SYSCTL_0, val);
d                1436 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_regs(d, USB_EPA_MAXPKT, "\x00\x02\x00\x00", 4);
d                1441 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_regs(d, USB_EPA_FIFO_CFG, "\x14\x00\x00\x00", 4);
d                1447 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1451 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2831u_power_ctrl(struct dvb_usb_device *d, int onoff)
d                1456 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "onoff=%d\n", onoff);
d                1459 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_reg(d, SYS_SYS0, &sys0);
d                1464 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_reg(d, SYS_GPIO_OUT_VAL, &gpio);
d                1468 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "RD SYS0=%02x GPIO_OUT_VAL=%02x\n", sys0, gpio);
d                1487 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "WR SYS0=%02x GPIO_OUT_VAL=%02x\n", sys0, gpio);
d                1490 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg(d, SYS_SYS0, sys0);
d                1495 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_reg(d, SYS_GPIO_OUT_VAL, gpio);
d                1500 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_wr_regs(d, USB_EPA_CTL, epa_ctl, 2);
d                1505 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		usb_clear_halt(d->udev, usb_rcvbulkpipe(d->udev, 0x81));
d                1509 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1513 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_power_ctrl(struct dvb_usb_device *d, int onoff)
d                1517 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "onoff=%d\n", onoff);
d                1521 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x08, 0x18);
d                1526 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_DEMOD_CTL1, 0x00, 0x10);
d                1531 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_DEMOD_CTL, 0x80, 0x80);
d                1536 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_DEMOD_CTL, 0x20, 0x20);
d                1541 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_regs(d, USB_EPA_CTL, "\x00\x00", 2);
d                1545 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = usb_clear_halt(d->udev, usb_rcvbulkpipe(d->udev, 0x81));
d                1550 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_GPIO_OUT_VAL, 0x10, 0x10);
d                1555 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_DEMOD_CTL, 0x00, 0x80);
d                1560 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_regs(d, USB_EPA_CTL, "\x10\x02", 2);
d                1567 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1571 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_power_ctrl(struct dvb_usb_device *d, int onoff)
d                1573 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1576 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2831u_power_ctrl(d, onoff);
d                1578 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2832u_power_ctrl(d, onoff);
d                1583 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                1589 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "fe=%d onoff=%d\n", fe->id, onoff);
d                1601 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, SYS_DEMOD_CTL, val, 0x48);
d                1613 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1618 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2831u_rc_query(struct dvb_usb_device *d)
d                1621 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d->priv;
d                1644 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_wr_reg(d, rc_nec_tab[i].reg,
d                1652 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_regs(d, SYS_IRRC_RP, buf, 5);
d                1677 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		rc_keydown(d->rc_dev, proto, rc_code, 0);
d                1679 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg(d, SYS_IRRC_SR, 1);
d                1684 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg(d, SYS_IRRC_SR, 1);
d                1691 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1695 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2831u_get_rc_config(struct dvb_usb_device *d,
d                1707 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_rc_query(struct dvb_usb_device *d)
d                1710 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d->priv;
d                1741 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 			ret = rtl28xxu_wr_reg_mask(d, init_tab[i].reg,
d                1750 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_reg(d, IR_RX_IF, &buf[0]);
d                1757 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_reg(d, IR_RX_BC, &buf[0]);
d                1764 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ret = rtl28xxu_rd_regs(d, IR_RX_BUF, buf, len);
d                1770 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ret = rtl28xxu_wr_reg_mask(d, refresh_tab[i].reg,
d                1780 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		ir_raw_event_store_with_filter(d->rc_dev, &ev);
d                1784 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ir_raw_event_set_idle(d->rc_dev, true);
d                1785 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	ir_raw_event_handle(d->rc_dev);
d                1789 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	dev_dbg(&d->intf->dev, "failed=%d\n", ret);
d                1793 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl2832u_get_rc_config(struct dvb_usb_device *d,
d                1798 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl28xxu_wr_reg(d, IR_RX_IE, 0x00);
d                1811 drivers/media/usb/dvb-usb-v2/rtl28xxu.c static int rtl28xxu_get_rc_config(struct dvb_usb_device *d,
d                1814 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 	struct rtl28xxu_dev *dev = d_to_priv(d);
d                1817 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2831u_get_rc_config(d, rc);
d                1819 drivers/media/usb/dvb-usb-v2/rtl28xxu.c 		return rtl2832u_get_rc_config(d, rc);
d                  25 drivers/media/usb/dvb-usb-v2/zd1301.c static int zd1301_ctrl_msg(struct dvb_usb_device *d, const u8 *wbuf,
d                  28 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct zd1301_dev *dev = d_to_priv(d);
d                  29 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                  32 drivers/media/usb/dvb-usb-v2/zd1301.c 	mutex_lock(&d->usb_mutex);
d                  38 drivers/media/usb/dvb-usb-v2/zd1301.c 	ret = usb_bulk_msg(d->udev, usb_sndbulkpipe(d->udev, 0x04), dev->buf,
d                  46 drivers/media/usb/dvb-usb-v2/zd1301.c 		ret = usb_bulk_msg(d->udev, usb_rcvbulkpipe(d->udev, 0x83),
d                  63 drivers/media/usb/dvb-usb-v2/zd1301.c 			ret = usb_bulk_msg(d->udev,
d                  64 drivers/media/usb/dvb-usb-v2/zd1301.c 					   usb_rcvbulkpipe(d->udev, 0x83),
d                  81 drivers/media/usb/dvb-usb-v2/zd1301.c 	mutex_unlock(&d->usb_mutex);
d                  87 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct dvb_usb_device *d = reg_priv;
d                  88 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                  93 drivers/media/usb/dvb-usb-v2/zd1301.c 	ret = zd1301_ctrl_msg(d, buf, 7, NULL, 0);
d                 105 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct dvb_usb_device *d = reg_priv;
d                 106 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                 111 drivers/media/usb/dvb-usb-v2/zd1301.c 	ret = zd1301_ctrl_msg(d, buf, 7, buf, 7);
d                 125 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 127 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                 138 drivers/media/usb/dvb-usb-v2/zd1301.c 	dev->demod_pdata.reg_priv = d;
d                 203 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct dvb_usb_device *d = adap_to_d(adap);
d                 204 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct zd1301_dev *dev = d_to_priv(d);
d                 205 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                 231 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct dvb_usb_device *d = fe_to_d(fe);
d                 232 drivers/media/usb/dvb-usb-v2/zd1301.c 	struct usb_interface *intf = d->intf;
d                 238 drivers/media/usb/dvb-usb-v2/zd1301.c 	ret = zd1301_ctrl_msg(d, buf, 3, NULL, 0);
d                  23 drivers/media/usb/dvb-usb/a800.c static int a800_power_ctrl(struct dvb_usb_device *d, int onoff)
d                  37 drivers/media/usb/dvb-usb/a800.c static int a800_rc_query(struct dvb_usb_device *d)
d                  44 drivers/media/usb/dvb-usb/a800.c 	if (usb_control_msg(d->udev,usb_rcvctrlpipe(d->udev,0),
d                  53 drivers/media/usb/dvb-usb/a800.c 		rc_keydown(d->rc_dev, RC_PROTO_NEC,
d                  56 drivers/media/usb/dvb-usb/a800.c 		rc_repeat(d->rc_dev);
d                  18 drivers/media/usb/dvb-usb/af9005-fe.c 	struct dvb_usb_device *d;
d                  49 drivers/media/usb/dvb-usb/af9005-fe.c static int af9005_write_word_agc(struct dvb_usb_device *d, u16 reghi,
d                  54 drivers/media/usb/dvb-usb/af9005-fe.c 	if ((ret = af9005_write_ofdm_register(d, reglo, (u8) (value & 0xff))))
d                  56 drivers/media/usb/dvb-usb/af9005-fe.c 	return af9005_write_register_bits(d, reghi, pos, len,
d                  60 drivers/media/usb/dvb-usb/af9005-fe.c static int af9005_read_word_agc(struct dvb_usb_device *d, u16 reghi,
d                  66 drivers/media/usb/dvb-usb/af9005-fe.c 	if ((ret = af9005_read_ofdm_register(d, reglo, &temp0)))
d                  68 drivers/media/usb/dvb-usb/af9005-fe.c 	if ((ret = af9005_read_ofdm_register(d, reghi, &temp1)))
d                  99 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_read_register_bits(state->d, xd_p_fec_vtb_rsd_mon_en,
d                 106 drivers/media/usb/dvb-usb/af9005-fe.c 		    af9005_read_register_bits(state->d,
d                 136 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_r_fec_rsd_ber_rdy,
d                 147 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d,
d                 153 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d,
d                 162 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_rsd_bit_err_cnt_7_0,
d                 167 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_rsd_bit_err_cnt_15_8,
d                 172 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_rsd_bit_err_cnt_23_16,
d                 181 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_p_fec_rsd_packet_unit_7_0,
d                 186 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_p_fec_rsd_packet_unit_15_8,
d                 233 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_r_fec_vtb_ber_rdy,
d                 243 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_vtb_err_bit_cnt_7_0,
d                 248 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_vtb_err_bit_cnt_15_8,
d                 253 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_fec_vtb_err_bit_cnt_23_16,
d                 260 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_p_fec_super_frm_unit_7_0,
d                 265 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_p_fec_super_frm_unit_15_8,
d                 277 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_txmod,
d                 295 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_const,
d                 327 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_ofdm_register(state->d, xd_p_fec_super_frm_unit_7_0,
d                 331 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, xd_p_fec_super_frm_unit_15_8,
d                 337 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_register_bits(state->d, xd_p_fec_vtb_ber_rst,
d                 351 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_ofdm_register(state->d, xd_p_fec_rsd_packet_unit_7_0,
d                 356 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_ofdm_register(state->d, xd_p_fec_rsd_packet_unit_15_8,
d                 362 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_register_bits(state->d, xd_p_fec_rsd_ber_rst,
d                 438 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_read_register_bits(state->d, xd_p_agc_lock,
d                 445 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_read_register_bits(state->d, xd_p_fd_tpsd_lock,
d                 453 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_read_register_bits(state->d,
d                 462 drivers/media/usb/dvb-usb/af9005-fe.c 		af9005_led_control(state->d, *stat & FE_HAS_LOCK);
d                 465 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_p_reg_strong_sginal_detected,
d                 507 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_reg_aagc_rf_gain,
d                 512 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_ofdm_register(state->d, xd_r_reg_aagc_if_gain,
d                 530 drivers/media/usb/dvb-usb/af9005-fe.c static int af9005_fe_program_cfoe(struct dvb_usb_device *d, u32 bw)
d                 589 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE00, buf[0]);
d                 594 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE01, buf[1]);
d                 599 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE02, buf[2]);
d                 604 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE03, buf[3]);
d                 623 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE04, buf[0]);
d                 627 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE05, buf[1]);
d                 631 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE06, buf[2]);
d                 635 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE07, buf[3]);
d                 654 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE08, buf[0]);
d                 658 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE09, buf[1]);
d                 662 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0A, buf[2]);
d                 666 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0B, buf[3]);
d                 685 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0C, buf[0]);
d                 689 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0D, buf[1]);
d                 693 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0E, buf[2]);
d                 697 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE0F, buf[3]);
d                 716 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE10, buf[0]);
d                 720 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE11, buf[1]);
d                 724 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE12, buf[2]);
d                 728 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE13, buf[3]);
d                 747 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE14, buf[0]);
d                 751 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE15, buf[1]);
d                 755 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE16, buf[2]);
d                 759 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(d, 0xAE17, buf[3]);
d                 764 drivers/media/usb/dvb-usb/af9005-fe.c static int af9005_fe_select_bw(struct dvb_usb_device *d, u32 bw)
d                 781 drivers/media/usb/dvb-usb/af9005-fe.c 	return af9005_write_register_bits(d, xd_g_reg_bw, reg_bw_pos,
d                 791 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_send_command(state->d, 0x03, &temp, 1, NULL, 0);
d                 817 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_I2C_reg_ofdm_rst_en,
d                 820 drivers/media/usb/dvb-usb/af9005-fe.c 	if ((ret = af9005_write_ofdm_register(state->d, APO_REG_RESET, 0)))
d                 826 drivers/media/usb/dvb-usb/af9005-fe.c 		     af9005_read_ofdm_register(state->d,
d                 846 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xb200, 0xa9);
d                 849 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, xd_g_reg_ofsm_clk, 0x07);
d                 853 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_send_command(state->d, 0x03, &temp, 1, NULL, 0);
d                 856 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, xd_g_reg_ofsm_clk, 0x00);
d                 859 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xb200, 0xa1);
d                 865 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_I2C_reg_ofdm_rst,
d                 868 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_register_bits(state->d, xd_I2C_reg_ofdm_rst,
d                 874 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xaefc, 0);
d                 881 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_dca_stand_alone,
d                 889 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_dca_upper_chip,
d                 894 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_dca_lower_chip,
d                 902 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_ofdm_register(state->d, xd_I2C_i2c_m_period, 0x14)))
d                 908 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_dca_en,
d                 912 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xa16c, 1);
d                 915 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xa3c1, 0);
d                 921 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_fe_program_cfoe(state->d, 6000000);
d                 927 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_feq_read_update,
d                 938 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_reg_dca_read_update,
d                 946 drivers/media/usb/dvb-usb/af9005-fe.c 	     af9005_write_register_bits(state->d, xd_p_fec_vtb_rsd_mon_en,
d                 952 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xa601, 0);
d                 956 drivers/media/usb/dvb-usb/af9005-fe.c 	if ((ret = af9005_write_ofdm_register(state->d, 0xaefb, 0x01)))
d                 964 drivers/media/usb/dvb-usb/af9005-fe.c 		     af9005_write_register_bits(state->d, script[i].reg,
d                 996 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_word_agc(state->d,
d                1005 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_word_agc(state->d,
d                1014 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_word_agc(state->d, 0xA60E, 0xA60A, 4, 2,
d                1021 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_word_agc(state->d, 0xA60E, 0xA60B, 6, 2,
d                1086 drivers/media/usb/dvb-usb/af9005-fe.c 			af9005_led_control(state->d, 0);
d                1107 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_led_control(state->d, 0);
d                1111 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_register_bits(state->d, XD_MP2IF_MISC, 2, 1, 0);
d                1120 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xae1a, temp0);
d                1123 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xae19, temp1);
d                1126 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xae18, temp2);
d                1133 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_word_agc(state->d,
d                1140 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_word_agc(state->d,
d                1147 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_word_agc(state->d, 0xA60E, 0xA60A, 4, 2,
d                1152 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_word_agc(state->d, 0xA60E, 0xA60B, 6, 2,
d                1159 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_fe_select_bw(state->d, fep->bandwidth_hz);
d                1162 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_fe_program_cfoe(state->d, fep->bandwidth_hz);
d                1168 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_ofdm_register(state->d, 0xaefd, 0);
d                1175 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_ofdm_register(state->d, xd_p_reg_unplug_th,
d                1188 drivers/media/usb/dvb-usb/af9005-fe.c 	ret = af9005_write_tuner_registers(state->d, 0xffff, &temp, 1);
d                1195 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_write_register_bits(state->d,
d                1226 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_const,
d                1250 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_hier,
d                1277 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_dec_pri,
d                1286 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_hpcr,
d                1317 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_lpcr,
d                1348 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_gi,
d                1374 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_tpsd_txmod,
d                1393 drivers/media/usb/dvb-usb/af9005-fe.c 	    af9005_read_register_bits(state->d, xd_g_reg_bw, reg_bw_pos,
d                1422 drivers/media/usb/dvb-usb/af9005-fe.c struct dvb_frontend *af9005_fe_attach(struct dvb_usb_device *d)
d                1433 drivers/media/usb/dvb-usb/af9005-fe.c 	state->d = d;
d                  73 drivers/media/usb/dvb-usb/af9005-remote.c int af9005_rc_decode(struct dvb_usb_device *d, u8 * data, int len, u32 * event,
d                  86 drivers/media/usb/dvb-usb/af9005-remote.c 				if (d->last_event == repeatable_keys[i]) {
d                  88 drivers/media/usb/dvb-usb/af9005-remote.c 					*event = d->last_event;
d                  32 drivers/media/usb/dvb-usb/af9005.c static int (*rc_decode) (struct dvb_usb_device *d, u8 *data, int len,
d                  45 drivers/media/usb/dvb-usb/af9005.c static int af9005_generic_read_write(struct dvb_usb_device *d, u16 reg,
d                  48 drivers/media/usb/dvb-usb/af9005.c 	struct af9005_device_state *st = d->priv;
d                  61 drivers/media/usb/dvb-usb/af9005.c 	mutex_lock(&d->data_mutex);
d                  91 drivers/media/usb/dvb-usb/af9005.c 	ret = dvb_usb_generic_rw(d, st->data, 16, st->data, 17, 0);
d                 129 drivers/media/usb/dvb-usb/af9005.c 	mutex_unlock(&d->data_mutex);
d                 134 drivers/media/usb/dvb-usb/af9005.c int af9005_read_ofdm_register(struct dvb_usb_device *d, u16 reg, u8 * value)
d                 138 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_generic_read_write(d, reg,
d                 148 drivers/media/usb/dvb-usb/af9005.c int af9005_read_ofdm_registers(struct dvb_usb_device *d, u16 reg,
d                 153 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_generic_read_write(d, reg,
d                 163 drivers/media/usb/dvb-usb/af9005.c int af9005_write_ofdm_register(struct dvb_usb_device *d, u16 reg, u8 value)
d                 168 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_generic_read_write(d, reg,
d                 178 drivers/media/usb/dvb-usb/af9005.c int af9005_write_ofdm_registers(struct dvb_usb_device *d, u16 reg,
d                 185 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_generic_read_write(d, reg,
d                 195 drivers/media/usb/dvb-usb/af9005.c int af9005_read_register_bits(struct dvb_usb_device *d, u16 reg, u8 pos,
d                 201 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_read_ofdm_register(d, reg, &temp);
d                 212 drivers/media/usb/dvb-usb/af9005.c int af9005_write_register_bits(struct dvb_usb_device *d, u16 reg, u8 pos,
d                 219 drivers/media/usb/dvb-usb/af9005.c 		return af9005_write_ofdm_register(d, reg, value);
d                 220 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_read_ofdm_register(d, reg, &temp);
d                 225 drivers/media/usb/dvb-usb/af9005.c 	return af9005_write_ofdm_register(d, reg, temp);
d                 229 drivers/media/usb/dvb-usb/af9005.c static int af9005_usb_read_tuner_registers(struct dvb_usb_device *d,
d                 232 drivers/media/usb/dvb-usb/af9005.c 	return af9005_generic_read_write(d, reg,
d                 237 drivers/media/usb/dvb-usb/af9005.c static int af9005_usb_write_tuner_registers(struct dvb_usb_device *d,
d                 240 drivers/media/usb/dvb-usb/af9005.c 	return af9005_generic_read_write(d, reg,
d                 245 drivers/media/usb/dvb-usb/af9005.c int af9005_write_tuner_registers(struct dvb_usb_device *d, u16 reg,
d                 253 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_usb_write_tuner_registers(d, reg, values, len);
d                 260 drivers/media/usb/dvb-usb/af9005.c 			    af9005_read_ofdm_register(d,
d                 279 drivers/media/usb/dvb-usb/af9005.c 			af9005_write_register_bits(d,
d                 288 drivers/media/usb/dvb-usb/af9005.c 		    af9005_write_register_bits(d,
d                 298 drivers/media/usb/dvb-usb/af9005.c int af9005_read_tuner_registers(struct dvb_usb_device *d, u16 reg, u8 addr,
d                 314 drivers/media/usb/dvb-usb/af9005.c 		ret = af9005_write_tuner_registers(d, 0x00c0, buf, 2);
d                 320 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_usb_read_tuner_registers(d, reg, values, 1);
d                 326 drivers/media/usb/dvb-usb/af9005.c 		ret = af9005_read_ofdm_register(d, 0xa408, &temp);
d                 337 drivers/media/usb/dvb-usb/af9005.c 	ret = af9005_write_ofdm_register(d, xd_I2C_i2c_m_data8, 1);
d                 343 drivers/media/usb/dvb-usb/af9005.c 		ret = af9005_read_ofdm_register(d, 0xa400 + i, &temp);
d                 351 drivers/media/usb/dvb-usb/af9005.c static int af9005_i2c_write(struct dvb_usb_device *d, u8 i2caddr, u8 reg,
d                 365 drivers/media/usb/dvb-usb/af9005.c 		    af9005_write_tuner_registers(d,
d                 377 drivers/media/usb/dvb-usb/af9005.c static int af9005_i2c_read(struct dvb_usb_device *d, u8 i2caddr, u8 reg,
d                 386 drivers/media/usb/dvb-usb/af9005.c 		    af9005_read_tuner_registers(d,
d                 405 drivers/media/usb/dvb-usb/af9005.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 410 drivers/media/usb/dvb-usb/af9005.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 421 drivers/media/usb/dvb-usb/af9005.c 		ret = af9005_i2c_read(d, addr, reg, value, 1);
d                 429 drivers/media/usb/dvb-usb/af9005.c 		ret = af9005_i2c_write(d, addr, reg, value, msg[0].len - 1);
d                 434 drivers/media/usb/dvb-usb/af9005.c 	mutex_unlock(&d->i2c_mutex);
d                 448 drivers/media/usb/dvb-usb/af9005.c int af9005_send_command(struct dvb_usb_device *d, u8 command, u8 * wbuf,
d                 451 drivers/media/usb/dvb-usb/af9005.c 	struct af9005_device_state *st = d->priv;
d                 470 drivers/media/usb/dvb-usb/af9005.c 	mutex_lock(&d->data_mutex);
d                 482 drivers/media/usb/dvb-usb/af9005.c 	ret = dvb_usb_generic_rw(d, st->data, wlen + 7, st->data, rlen + 7, 0);
d                 501 drivers/media/usb/dvb-usb/af9005.c 	mutex_unlock(&d->data_mutex);
d                 505 drivers/media/usb/dvb-usb/af9005.c int af9005_read_eeprom(struct dvb_usb_device *d, u8 address, u8 * values,
d                 508 drivers/media/usb/dvb-usb/af9005.c 	struct af9005_device_state *st = d->priv;
d                 512 drivers/media/usb/dvb-usb/af9005.c 	mutex_lock(&d->data_mutex);
d                 529 drivers/media/usb/dvb-usb/af9005.c 	ret = dvb_usb_generic_rw(d, st->data, 16, st->data, 14, 0);
d                 548 drivers/media/usb/dvb-usb/af9005.c 	mutex_unlock(&d->data_mutex);
d                 773 drivers/media/usb/dvb-usb/af9005.c int af9005_led_control(struct dvb_usb_device *d, int onoff)
d                 775 drivers/media/usb/dvb-usb/af9005.c 	struct af9005_device_state *st = d->priv;
d                 784 drivers/media/usb/dvb-usb/af9005.c 		    af9005_write_register_bits(d, xd_p_reg_top_locken1,
d                 790 drivers/media/usb/dvb-usb/af9005.c 		    af9005_write_register_bits(d, xd_p_reg_top_lock1,
d                 823 drivers/media/usb/dvb-usb/af9005.c static int af9005_rc_query(struct dvb_usb_device *d, u32 * event, int *state)
d                 825 drivers/media/usb/dvb-usb/af9005.c 	struct af9005_device_state *st = d->priv;
d                 835 drivers/media/usb/dvb-usb/af9005.c 	mutex_lock(&d->data_mutex);
d                 843 drivers/media/usb/dvb-usb/af9005.c 	ret = dvb_usb_generic_rw(d, st->data, 5, st->data, 256, 0);
d                 866 drivers/media/usb/dvb-usb/af9005.c 		ret = rc_decode(d, &st->data[6], len, event, state);
d                 873 drivers/media/usb/dvb-usb/af9005.c 				*event = d->last_event;
d                 878 drivers/media/usb/dvb-usb/af9005.c 	mutex_unlock(&d->data_mutex);
d                 882 drivers/media/usb/dvb-usb/af9005.c static int af9005_power_ctrl(struct dvb_usb_device *d, int onoff)
d                3451 drivers/media/usb/dvb-usb/af9005.h extern struct dvb_frontend *af9005_fe_attach(struct dvb_usb_device *d);
d                3452 drivers/media/usb/dvb-usb/af9005.h extern int af9005_read_ofdm_register(struct dvb_usb_device *d, u16 reg,
d                3454 drivers/media/usb/dvb-usb/af9005.h extern int af9005_read_ofdm_registers(struct dvb_usb_device *d, u16 reg,
d                3456 drivers/media/usb/dvb-usb/af9005.h extern int af9005_write_ofdm_register(struct dvb_usb_device *d, u16 reg,
d                3458 drivers/media/usb/dvb-usb/af9005.h extern int af9005_write_ofdm_registers(struct dvb_usb_device *d, u16 reg,
d                3460 drivers/media/usb/dvb-usb/af9005.h extern int af9005_read_tuner_registers(struct dvb_usb_device *d, u16 reg,
d                3462 drivers/media/usb/dvb-usb/af9005.h extern int af9005_write_tuner_registers(struct dvb_usb_device *d, u16 reg,
d                3464 drivers/media/usb/dvb-usb/af9005.h extern int af9005_read_register_bits(struct dvb_usb_device *d, u16 reg,
d                3466 drivers/media/usb/dvb-usb/af9005.h extern int af9005_write_register_bits(struct dvb_usb_device *d, u16 reg,
d                3468 drivers/media/usb/dvb-usb/af9005.h extern int af9005_send_command(struct dvb_usb_device *d, u8 command,
d                3470 drivers/media/usb/dvb-usb/af9005.h extern int af9005_read_eeprom(struct dvb_usb_device *d, u8 address,
d                3473 drivers/media/usb/dvb-usb/af9005.h extern int af9005_led_control(struct dvb_usb_device *d, int onoff);
d                3478 drivers/media/usb/dvb-usb/af9005.h extern int af9005_rc_decode(struct dvb_usb_device *d, u8 * data, int len,
d                 298 drivers/media/usb/dvb-usb/az6027.c static int az6027_usb_in_op(struct dvb_usb_device *d, u8 req,
d                 302 drivers/media/usb/dvb-usb/az6027.c 	if (mutex_lock_interruptible(&d->usb_mutex))
d                 305 drivers/media/usb/dvb-usb/az6027.c 	ret = usb_control_msg(d->udev,
d                 306 drivers/media/usb/dvb-usb/az6027.c 			      usb_rcvctrlpipe(d->udev, 0),
d                 324 drivers/media/usb/dvb-usb/az6027.c 	mutex_unlock(&d->usb_mutex);
d                 328 drivers/media/usb/dvb-usb/az6027.c static int az6027_usb_out_op(struct dvb_usb_device *d,
d                 340 drivers/media/usb/dvb-usb/az6027.c 	if (mutex_lock_interruptible(&d->usb_mutex))
d                 343 drivers/media/usb/dvb-usb/az6027.c 	ret = usb_control_msg(d->udev,
d                 344 drivers/media/usb/dvb-usb/az6027.c 			      usb_sndctrlpipe(d->udev, 0),
d                 355 drivers/media/usb/dvb-usb/az6027.c 		mutex_unlock(&d->usb_mutex);
d                 358 drivers/media/usb/dvb-usb/az6027.c 		mutex_unlock(&d->usb_mutex);
d                 392 drivers/media/usb/dvb-usb/az6027.c static int az6027_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 409 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 410 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 433 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_in_op(d, req, value, index, b, blen);
d                 451 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 452 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 470 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_out_op(d, req, value1, index, NULL, blen);
d                 482 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 483 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 506 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_in_op(d, req, value, index, b, blen);
d                 528 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 529 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 546 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_out_op(d, req, value1, index, NULL, blen);
d                 559 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 577 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_in_op(d, req, value, index, b, blen);
d                 590 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 591 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 606 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_out_op(d, req, value, index, NULL, blen);
d                 618 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_out_op(d, req, value, index, NULL, blen);
d                 646 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 647 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 662 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_out_op(d, req, value, index, NULL, blen);
d                 675 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 676 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 694 drivers/media/usb/dvb-usb/az6027.c 	ret = az6027_usb_in_op(d, req, value, index, b, blen);
d                 712 drivers/media/usb/dvb-usb/az6027.c static void az6027_ci_uninit(struct dvb_usb_device *d)
d                 718 drivers/media/usb/dvb-usb/az6027.c 	if (NULL == d)
d                 721 drivers/media/usb/dvb-usb/az6027.c 	state = (struct az6027_device_state *)d->priv;
d                 736 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = a->dev;
d                 737 drivers/media/usb/dvb-usb/az6027.c 	struct az6027_device_state *state = (struct az6027_device_state *)d->priv;
d                 753 drivers/media/usb/dvb-usb/az6027.c 	state->ca.data			= d;
d                 933 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 934 drivers/media/usb/dvb-usb/az6027.c 	az6027_ci_uninit(d);
d                 952 drivers/media/usb/dvb-usb/az6027.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 964 drivers/media/usb/dvb-usb/az6027.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0) {
d                 979 drivers/media/usb/dvb-usb/az6027.c 			az6027_usb_out_op(d, req, value, index, data, length);
d                 989 drivers/media/usb/dvb-usb/az6027.c 				az6027_usb_in_op(d, req, value, index, data, length);
d                1005 drivers/media/usb/dvb-usb/az6027.c 				az6027_usb_out_op(d, req, value, index, data, length);
d                1016 drivers/media/usb/dvb-usb/az6027.c 				az6027_usb_in_op(d, req, value, index, data, length);
d                1032 drivers/media/usb/dvb-usb/az6027.c 				az6027_usb_out_op(d, req, value, index, data, length);
d                1036 drivers/media/usb/dvb-usb/az6027.c 	mutex_unlock(&d->i2c_mutex);
d                  39 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct dvb_usb_device *d = adap->dev;
d                  40 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct cinergyt2_state *st = d->priv;
d                  43 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_lock(&d->data_mutex);
d                  47 drivers/media/usb/dvb-usb/cinergyT2-core.c 	ret = dvb_usb_generic_rw(d, st->data, 2, st->data, 64, 0);
d                  48 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_unlock(&d->data_mutex);
d                  53 drivers/media/usb/dvb-usb/cinergyT2-core.c static int cinergyt2_power_ctrl(struct dvb_usb_device *d, int enable)
d                  55 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct cinergyt2_state *st = d->priv;
d                  58 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_lock(&d->data_mutex);
d                  62 drivers/media/usb/dvb-usb/cinergyT2-core.c 	ret = dvb_usb_generic_rw(d, st->data, 2, st->data, 3, 0);
d                  63 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_unlock(&d->data_mutex);
d                  70 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct dvb_usb_device *d = adap->dev;
d                  71 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct cinergyt2_state *st = d->priv;
d                  76 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_lock(&d->data_mutex);
d                  79 drivers/media/usb/dvb-usb/cinergyT2-core.c 	ret = dvb_usb_generic_rw(d, st->data, 1, st->data, 3, 0);
d                  83 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_unlock(&d->data_mutex);
d                 145 drivers/media/usb/dvb-usb/cinergyT2-core.c static int cinergyt2_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 147 drivers/media/usb/dvb-usb/cinergyT2-core.c 	struct cinergyt2_state *st = d->priv;
d                 152 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_lock(&d->data_mutex);
d                 155 drivers/media/usb/dvb-usb/cinergyT2-core.c 	ret = dvb_usb_generic_rw(d, st->data, 1, st->data, 5, 0);
d                 164 drivers/media/usb/dvb-usb/cinergyT2-core.c 				if (d->last_event == repeatable_keys[i]) {
d                 166 drivers/media/usb/dvb-usb/cinergyT2-core.c 					*event = d->last_event;
d                 179 drivers/media/usb/dvb-usb/cinergyT2-core.c 	dvb_usb_nec_rc_key_to_event(d, st->data, event, state);
d                 181 drivers/media/usb/dvb-usb/cinergyT2-core.c 		if (*event != d->last_event)
d                 188 drivers/media/usb/dvb-usb/cinergyT2-core.c 	mutex_unlock(&d->data_mutex);
d                 127 drivers/media/usb/dvb-usb/cinergyT2-fe.c 	struct dvb_usb_device *d;
d                 144 drivers/media/usb/dvb-usb/cinergyT2-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1,
d                 252 drivers/media/usb/dvb-usb/cinergyT2-fe.c 	err = dvb_usb_generic_rw(state->d, state->data, sizeof(*param),
d                 269 drivers/media/usb/dvb-usb/cinergyT2-fe.c struct dvb_frontend *cinergyt2_fe_attach(struct dvb_usb_device *d)
d                 276 drivers/media/usb/dvb-usb/cinergyT2-fe.c 	s->d = d;
d                  78 drivers/media/usb/dvb-usb/cinergyT2.h extern struct dvb_frontend *cinergyt2_fe_attach(struct dvb_usb_device *d);
d                  86 drivers/media/usb/dvb-usb/cxusb.c int cxusb_ctrl_msg(struct dvb_usb_device *d,
d                  89 drivers/media/usb/dvb-usb/cxusb.c 	struct cxusb_state *st = d->priv;
d                 102 drivers/media/usb/dvb-usb/cxusb.c 	mutex_lock(&d->data_mutex);
d                 105 drivers/media/usb/dvb-usb/cxusb.c 	ret = dvb_usb_generic_rw(d, st->data, 1 + wlen, st->data, rlen, 0);
d                 109 drivers/media/usb/dvb-usb/cxusb.c 	mutex_unlock(&d->data_mutex);
d                 114 drivers/media/usb/dvb-usb/cxusb.c static void cxusb_gpio_tuner(struct dvb_usb_device *d, int onoff)
d                 116 drivers/media/usb/dvb-usb/cxusb.c 	struct cxusb_state *st = d->priv;
d                 125 drivers/media/usb/dvb-usb/cxusb.c 	cxusb_ctrl_msg(d, CMD_GPIO_WRITE, o, 2, &i, 1);
d                 134 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_bluebird_gpio_rw(struct dvb_usb_device *d, u8 changemask,
d                 143 drivers/media/usb/dvb-usb/cxusb.c 	rc = cxusb_ctrl_msg(d, CMD_BLUEBIRD_GPIO_RW, o, 2, &gpio_state, 1);
d                 150 drivers/media/usb/dvb-usb/cxusb.c static void cxusb_bluebird_gpio_pulse(struct dvb_usb_device *d, u8 pin, int low)
d                 152 drivers/media/usb/dvb-usb/cxusb.c 	cxusb_bluebird_gpio_rw(d, pin, low ? 0 : pin);
d                 154 drivers/media/usb/dvb-usb/cxusb.c 	cxusb_bluebird_gpio_rw(d, pin, low ? pin : 0);
d                 157 drivers/media/usb/dvb-usb/cxusb.c static void cxusb_nano2_led(struct dvb_usb_device *d, int onoff)
d                 159 drivers/media/usb/dvb-usb/cxusb.c 	cxusb_bluebird_gpio_rw(d, 0x40, onoff ? 0 : 0x40);
d                 162 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_d680_dmb_gpio_tuner(struct dvb_usb_device *d,
d                 169 drivers/media/usb/dvb-usb/cxusb.c 	rc = cxusb_ctrl_msg(d, CMD_GPIO_WRITE, o, 2, &i, 1);
d                 185 drivers/media/usb/dvb-usb/cxusb.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 189 drivers/media/usb/dvb-usb/cxusb.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 193 drivers/media/usb/dvb-usb/cxusb.c 		if (le16_to_cpu(d->udev->descriptor.idVendor) == USB_VID_MEDION)
d                 196 drivers/media/usb/dvb-usb/cxusb.c 				cxusb_gpio_tuner(d, 0);
d                 199 drivers/media/usb/dvb-usb/cxusb.c 				cxusb_gpio_tuner(d, 1);
d                 216 drivers/media/usb/dvb-usb/cxusb.c 			if (cxusb_ctrl_msg(d, CMD_I2C_READ,
d                 245 drivers/media/usb/dvb-usb/cxusb.c 			if (cxusb_ctrl_msg(d, CMD_I2C_READ,
d                 270 drivers/media/usb/dvb-usb/cxusb.c 			if (cxusb_ctrl_msg(d, CMD_I2C_WRITE, obuf,
d                 284 drivers/media/usb/dvb-usb/cxusb.c 	mutex_unlock(&d->i2c_mutex);
d                 298 drivers/media/usb/dvb-usb/cxusb.c static int _cxusb_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 305 drivers/media/usb/dvb-usb/cxusb.c 		return cxusb_ctrl_msg(d, CMD_POWER_ON, &b, 1, NULL, 0);
d                 307 drivers/media/usb/dvb-usb/cxusb.c 		return cxusb_ctrl_msg(d, CMD_POWER_OFF, &b, 1, NULL, 0);
d                 310 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 312 drivers/media/usb/dvb-usb/cxusb.c 	bool is_medion = d->props.devices[0].warm_ids[0] == &cxusb_table[MEDION_MD95700];
d                 316 drivers/media/usb/dvb-usb/cxusb.c 		struct cxusb_medion_dev *cxdev = d->priv;
d                 327 drivers/media/usb/dvb-usb/cxusb.c 	ret = _cxusb_power_ctrl(d, onoff);
d                 331 drivers/media/usb/dvb-usb/cxusb.c 		struct cxusb_medion_dev *cxdev = d->priv;
d                 339 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_aver_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 344 drivers/media/usb/dvb-usb/cxusb.c 		return cxusb_ctrl_msg(d, CMD_POWER_OFF, NULL, 0, NULL, 0);
d                 345 drivers/media/usb/dvb-usb/cxusb.c 	if (d->state == DVB_USB_STATE_INIT &&
d                 346 drivers/media/usb/dvb-usb/cxusb.c 	    usb_set_interface(d->udev, 0, 0) < 0)
d                 350 drivers/media/usb/dvb-usb/cxusb.c 	} while (!(ret = cxusb_ctrl_msg(d, CMD_POWER_ON, NULL, 0, NULL, 0)) &&
d                 351 drivers/media/usb/dvb-usb/cxusb.c 		 !(ret = cxusb_ctrl_msg(d, 0x15, NULL, 0, NULL, 0)) &&
d                 352 drivers/media/usb/dvb-usb/cxusb.c 		 !(ret = cxusb_ctrl_msg(d, 0x17, NULL, 0, NULL, 0)) && 0);
d                 375 drivers/media/usb/dvb-usb/cxusb.c 			ret = cxusb_ctrl_msg(d, CMD_I2C_WRITE,
d                 386 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_bluebird_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 391 drivers/media/usb/dvb-usb/cxusb.c 		return cxusb_ctrl_msg(d, CMD_POWER_ON, &b, 1, NULL, 0);
d                 396 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_nano2_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 400 drivers/media/usb/dvb-usb/cxusb.c 	rc = cxusb_power_ctrl(d, onoff);
d                 402 drivers/media/usb/dvb-usb/cxusb.c 		cxusb_nano2_led(d, 0);
d                 407 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_d680_dmb_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 412 drivers/media/usb/dvb-usb/cxusb.c 	ret = cxusb_power_ctrl(d, onoff);
d                 417 drivers/media/usb/dvb-usb/cxusb.c 	cxusb_ctrl_msg(d, CMD_DIGITAL, NULL, 0, &b, 1);
d                 458 drivers/media/usb/dvb-usb/cxusb.c static void cxusb_d680_dmb_drain_message(struct dvb_usb_device *d)
d                 460 drivers/media/usb/dvb-usb/cxusb.c 	int       ep = d->props.generic_bulk_ctrl_endpoint;
d                 471 drivers/media/usb/dvb-usb/cxusb.c 		if (usb_bulk_msg(d->udev,
d                 472 drivers/media/usb/dvb-usb/cxusb.c 				 usb_rcvbulkpipe(d->udev, ep),
d                 481 drivers/media/usb/dvb-usb/cxusb.c static void cxusb_d680_dmb_drain_video(struct dvb_usb_device *d)
d                 483 drivers/media/usb/dvb-usb/cxusb.c 	struct usb_data_stream_properties *p = &d->props.adapter[0].fe[0].stream;
d                 494 drivers/media/usb/dvb-usb/cxusb.c 		if (usb_bulk_msg(d->udev,
d                 495 drivers/media/usb/dvb-usb/cxusb.c 				 usb_rcvbulkpipe(d->udev, p->endpoint),
d                 520 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_rc_query(struct dvb_usb_device *d)
d                 524 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_ctrl_msg(d, CMD_GET_IR_CODE, NULL, 0, ircode, 4) < 0)
d                 528 drivers/media/usb/dvb-usb/cxusb.c 		rc_keydown(d->rc_dev, RC_PROTO_NEC,
d                 533 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_bluebird2_rc_query(struct dvb_usb_device *d)
d                 543 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_i2c_xfer(&d->i2c_adap, &msg, 1) != 1)
d                 547 drivers/media/usb/dvb-usb/cxusb.c 		rc_keydown(d->rc_dev, RC_PROTO_NEC,
d                 552 drivers/media/usb/dvb-usb/cxusb.c static int cxusb_d680_dmb_rc_query(struct dvb_usb_device *d)
d                 556 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_ctrl_msg(d, 0x10, NULL, 0, ircode, 2) < 0)
d                 560 drivers/media/usb/dvb-usb/cxusb.c 		rc_keydown(d->rc_dev, RC_PROTO_UNKNOWN,
d                 753 drivers/media/usb/dvb-usb/cxusb.c 	struct dvb_usb_device *d = adap->dev;
d                 758 drivers/media/usb/dvb-usb/cxusb.c 		cxusb_bluebird_gpio_pulse(d, 0x01, 1);
d                1256 drivers/media/usb/dvb-usb/cxusb.c 	struct dvb_usb_device *d = adap->dev;
d                1260 drivers/media/usb/dvb-usb/cxusb.c 	if (usb_set_interface(d->udev, 0, 0) < 0)
d                1264 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1265 drivers/media/usb/dvb-usb/cxusb.c 		       usb_sndbulkpipe(d->udev,
d                1266 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.generic_bulk_ctrl_endpoint));
d                1267 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1268 drivers/media/usb/dvb-usb/cxusb.c 		       usb_rcvbulkpipe(d->udev,
d                1269 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.generic_bulk_ctrl_endpoint));
d                1270 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1271 drivers/media/usb/dvb-usb/cxusb.c 		       usb_rcvbulkpipe(d->udev,
d                1272 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.adapter[0].fe[0].stream.endpoint));
d                1276 drivers/media/usb/dvb-usb/cxusb.c 		cxusb_d680_dmb_drain_message(d);
d                1277 drivers/media/usb/dvb-usb/cxusb.c 		cxusb_d680_dmb_drain_video(d);
d                1282 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_d680_dmb_gpio_tuner(d, 0x07, 0) < 0) {
d                1287 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_d680_dmb_gpio_tuner(d, 0x07, 1) < 0) {
d                1295 drivers/media/usb/dvb-usb/cxusb.c 					 &d680_lgs8gl5_cfg, &d->i2c_adap);
d                1318 drivers/media/usb/dvb-usb/cxusb.c 	struct dvb_usb_device *d = adap->dev;
d                1321 drivers/media/usb/dvb-usb/cxusb.c 	if (usb_set_interface(d->udev, 0, 0) < 0)
d                1325 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1326 drivers/media/usb/dvb-usb/cxusb.c 		       usb_sndbulkpipe(d->udev,
d                1327 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.generic_bulk_ctrl_endpoint));
d                1328 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1329 drivers/media/usb/dvb-usb/cxusb.c 		       usb_rcvbulkpipe(d->udev,
d                1330 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.generic_bulk_ctrl_endpoint));
d                1331 drivers/media/usb/dvb-usb/cxusb.c 	usb_clear_halt(d->udev,
d                1332 drivers/media/usb/dvb-usb/cxusb.c 		       usb_rcvbulkpipe(d->udev,
d                1333 drivers/media/usb/dvb-usb/cxusb.c 				       d->props.adapter[0].fe[0].stream.endpoint));
d                1336 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_d680_dmb_gpio_tuner(d, 0x07, 0) < 0) {
d                1341 drivers/media/usb/dvb-usb/cxusb.c 	if (cxusb_d680_dmb_gpio_tuner(d, 0x07, 1) < 0) {
d                1350 drivers/media/usb/dvb-usb/cxusb.c 					 &d->i2c_adap);
d                1674 drivers/media/usb/dvb-usb/cxusb.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1675 drivers/media/usb/dvb-usb/cxusb.c 	struct cxusb_state *st = d->priv;
d                1678 drivers/media/usb/dvb-usb/cxusb.c 	if (d->props.devices[0].warm_ids[0] == &cxusb_table[MEDION_MD95700])
d                1679 drivers/media/usb/dvb-usb/cxusb.c 		cxusb_medion_unregister_analog(d);
d                 177 drivers/media/usb/dvb-usb/cxusb.h int cxusb_ctrl_msg(struct dvb_usb_device *d,
d                  55 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_get_version(struct dvb_usb_device *d, u32 *hwversion,
d                  58 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_ctrl_clock(struct dvb_usb_device *d, u32 clk_MHz, u8 clock_out_gp3);
d                  59 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_ctrl_rd(struct dvb_usb_device *d, u8 *tx, u8 txlen, u8 *rx, u8 rxlen);
d                  61 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_rc_setup(struct dvb_usb_device *d, struct usb_interface *intf);
d                  67 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_set_i2c_speed(struct dvb_usb_device *d, u16 scl_kHz);
d                  21 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_get_version(struct dvb_usb_device *d, u32 *hwversion,
d                  24 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                  27 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                  32 drivers/media/usb/dvb-usb/dib0700_core.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0),
d                  48 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                  53 drivers/media/usb/dvb-usb/dib0700_core.c static int dib0700_ctrl_wr(struct dvb_usb_device *d, u8 *tx, u8 txlen)
d                  60 drivers/media/usb/dvb-usb/dib0700_core.c 	status = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev,0),
d                  71 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_ctrl_rd(struct dvb_usb_device *d, u8 *tx, u8 txlen, u8 *rx, u8 rxlen)
d                  95 drivers/media/usb/dvb-usb/dib0700_core.c 	status = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev,0), tx[0],
d                 108 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_set_gpio(struct dvb_usb_device *d, enum dib07x0_gpios gpio, u8 gpio_dir, u8 gpio_val)
d                 110 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 113 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 122 drivers/media/usb/dvb-usb/dib0700_core.c 	ret = dib0700_ctrl_wr(d, st->buf, 3);
d                 124 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                 128 drivers/media/usb/dvb-usb/dib0700_core.c static int dib0700_set_usb_xfer_len(struct dvb_usb_device *d, u16 nb_ts_packets)
d                 130 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 134 drivers/media/usb/dvb-usb/dib0700_core.c 		if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 145 drivers/media/usb/dvb-usb/dib0700_core.c 		ret = dib0700_ctrl_wr(d, st->buf, 3);
d                 146 drivers/media/usb/dvb-usb/dib0700_core.c 		mutex_unlock(&d->usb_mutex);
d                 164 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 165 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 174 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 206 drivers/media/usb/dvb-usb/dib0700_core.c 			result = usb_control_msg(d->udev,
d                 207 drivers/media/usb/dvb-usb/dib0700_core.c 						 usb_rcvctrlpipe(d->udev, 0),
d                 232 drivers/media/usb/dvb-usb/dib0700_core.c 			if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 248 drivers/media/usb/dvb-usb/dib0700_core.c 				mutex_unlock(&d->usb_mutex);
d                 259 drivers/media/usb/dvb-usb/dib0700_core.c 			result = usb_control_msg(d->udev,
d                 260 drivers/media/usb/dvb-usb/dib0700_core.c 						 usb_sndctrlpipe(d->udev, 0),
d                 265 drivers/media/usb/dvb-usb/dib0700_core.c 			mutex_unlock(&d->usb_mutex);
d                 275 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->i2c_mutex);
d                 285 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 286 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 289 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 291 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 293 drivers/media/usb/dvb-usb/dib0700_core.c 		mutex_unlock(&d->i2c_mutex);
d                 315 drivers/media/usb/dvb-usb/dib0700_core.c 			len = dib0700_ctrl_rd(d, st->buf, msg[i].len + 2,
d                 337 drivers/media/usb/dvb-usb/dib0700_core.c 			result = dib0700_ctrl_wr(d, st->buf, msg[i].len + 2);
d                 344 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                 345 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->i2c_mutex);
d                 353 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 354 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 398 drivers/media/usb/dvb-usb/dib0700_core.c static int dib0700_set_clock(struct dvb_usb_device *d, u8 en_pll,
d                 402 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 405 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 422 drivers/media/usb/dvb-usb/dib0700_core.c 	ret = dib0700_ctrl_wr(d, st->buf, 10);
d                 423 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                 428 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_set_i2c_speed(struct dvb_usb_device *d, u16 scl_kHz)
d                 430 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 437 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 458 drivers/media/usb/dvb-usb/dib0700_core.c 	ret = dib0700_ctrl_wr(d, st->buf, 8);
d                 459 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                 465 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_ctrl_clock(struct dvb_usb_device *d, u32 clk_MHz, u8 clock_out_gp3)
d                 468 drivers/media/usb/dvb-usb/dib0700_core.c 		case 72: dib0700_set_clock(d, 1, 0, 1, clock_out_gp3, 2, 24, 0, 0x4c); break;
d                 640 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = rc->priv;
d                 641 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 644 drivers/media/usb/dvb-usb/dib0700_core.c 	if (mutex_lock_interruptible(&d->usb_mutex) < 0) {
d                 674 drivers/media/usb/dvb-usb/dib0700_core.c 	ret = dib0700_ctrl_wr(d, st->buf, 3);
d                 680 drivers/media/usb/dvb-usb/dib0700_core.c 	d->props.rc.core.protocol = *rc_proto;
d                 683 drivers/media/usb/dvb-usb/dib0700_core.c 	mutex_unlock(&d->usb_mutex);
d                 710 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = purb->context;
d                 717 drivers/media/usb/dvb-usb/dib0700_core.c 	if (d->rc_dev == NULL) {
d                 744 drivers/media/usb/dvb-usb/dib0700_core.c 	switch (d->props.rc.core.protocol) {
d                 754 drivers/media/usb/dvb-usb/dib0700_core.c 			rc_repeat(d->rc_dev);
d                 797 drivers/media/usb/dvb-usb/dib0700_core.c 	rc_keydown(d->rc_dev, protocol, keycode, toggle);
d                 807 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_rc_setup(struct dvb_usb_device *d, struct usb_interface *intf)
d                 809 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 844 drivers/media/usb/dvb-usb/dib0700_core.c 			pipe = usb_rcvbulkpipe(d->udev, rc_ep);
d                 845 drivers/media/usb/dvb-usb/dib0700_core.c 			usb_fill_bulk_urb(purb, d->udev, pipe,
d                 848 drivers/media/usb/dvb-usb/dib0700_core.c 					  dib0700_rc_urb_completion, d);
d                 851 drivers/media/usb/dvb-usb/dib0700_core.c 			pipe = usb_rcvintpipe(d->udev, rc_ep);
d                 852 drivers/media/usb/dvb-usb/dib0700_core.c 			usb_fill_int_urb(purb, d->udev, pipe,
d                 855 drivers/media/usb/dvb-usb/dib0700_core.c 					  dib0700_rc_urb_completion, d, 1);
d                 913 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 914 drivers/media/usb/dvb-usb/dib0700_core.c 	struct dib0700_state *st = d->priv;
d                 516 drivers/media/usb/dvb-usb/dib0700_devices.c static int dib0700_rc_query_old_firmware(struct dvb_usb_device *d)
d                 522 drivers/media/usb/dvb-usb/dib0700_devices.c 	struct dib0700_state *st = d->priv;
d                 535 drivers/media/usb/dvb-usb/dib0700_devices.c 	i = dib0700_ctrl_rd(d, st->buf, 2, st->buf, 4);
d                 548 drivers/media/usb/dvb-usb/dib0700_devices.c 	dib0700_rc_setup(d, NULL); /* reset ir sensor data to prevent false events */
d                 550 drivers/media/usb/dvb-usb/dib0700_devices.c 	switch (d->props.rc.core.protocol) {
d                 555 drivers/media/usb/dvb-usb/dib0700_devices.c 			rc_repeat(d->rc_dev);
d                 572 drivers/media/usb/dvb-usb/dib0700_devices.c 	rc_keydown(d->rc_dev, protocol, scancode, toggle);
d                3742 drivers/media/usb/dvb-usb/dib0700_devices.c 	struct dvb_usb_device *d = adap->dev;
d                3775 drivers/media/usb/dvb-usb/dib0700_devices.c 	client_demod = i2c_new_device(&d->i2c_adap, &info);
d                  60 drivers/media/usb/dvb-usb/dibusb-common.c int dibusb_power_ctrl(struct dvb_usb_device *d, int onoff)
d                  73 drivers/media/usb/dvb-usb/dibusb-common.c 	ret = dvb_usb_generic_write(d, b, 3);
d                 113 drivers/media/usb/dvb-usb/dibusb-common.c int dibusb2_0_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 129 drivers/media/usb/dvb-usb/dibusb-common.c 	ret = dvb_usb_generic_write(d, b, 3);
d                 137 drivers/media/usb/dvb-usb/dibusb-common.c static int dibusb_i2c_msg(struct dvb_usb_device *d, u8 addr,
d                 168 drivers/media/usb/dvb-usb/dibusb-common.c 	ret = dvb_usb_generic_rw(d, sndbuf, len, rbuf, rlen, 0);
d                 180 drivers/media/usb/dvb-usb/dibusb-common.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 183 drivers/media/usb/dvb-usb/dibusb-common.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 190 drivers/media/usb/dvb-usb/dibusb-common.c 			if (dibusb_i2c_msg(d, msg[i].addr, msg[i].buf,msg[i].len,
d                 195 drivers/media/usb/dvb-usb/dibusb-common.c 			if (dibusb_i2c_msg(d, msg[i].addr, msg[i].buf,msg[i].len,NULL,0) < 0)
d                 201 drivers/media/usb/dvb-usb/dibusb-common.c 			if (dibusb_i2c_msg(d, msg[i].addr, NULL, 0, msg[i].buf, msg[i].len) < 0)
d                 206 drivers/media/usb/dvb-usb/dibusb-common.c 	mutex_unlock(&d->i2c_mutex);
d                 221 drivers/media/usb/dvb-usb/dibusb-common.c int dibusb_read_eeprom_byte(struct dvb_usb_device *d, u8 offs, u8 *val)
d                 232 drivers/media/usb/dvb-usb/dibusb-common.c 	rc = dibusb_i2c_msg(d, 0x50, &buf[0], 1, &buf[1], 1);
d                 374 drivers/media/usb/dvb-usb/dibusb-common.c int dibusb_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 385 drivers/media/usb/dvb-usb/dibusb-common.c 	ret = dvb_usb_generic_rw(d, buf, 1, buf, 5, 0);
d                 389 drivers/media/usb/dvb-usb/dibusb-common.c 	dvb_usb_nec_rc_key_to_event(d, buf, event, state);
d                  25 drivers/media/usb/dvb-usb/digitv.c static int digitv_ctrl_msg(struct dvb_usb_device *d,
d                  28 drivers/media/usb/dvb-usb/digitv.c 	struct digitv_state *st = d->priv;
d                  45 drivers/media/usb/dvb-usb/digitv.c 		ret = dvb_usb_generic_write(d, st->sndbuf, 7);
d                  47 drivers/media/usb/dvb-usb/digitv.c 		ret = dvb_usb_generic_rw(d, st->sndbuf, 7, st->rcvbuf, 7, 10);
d                  56 drivers/media/usb/dvb-usb/digitv.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                  59 drivers/media/usb/dvb-usb/digitv.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                  68 drivers/media/usb/dvb-usb/digitv.c 			if (digitv_ctrl_msg(d, USB_READ_COFDM, msg[i].buf[0], NULL, 0,
d                  73 drivers/media/usb/dvb-usb/digitv.c 			if (digitv_ctrl_msg(d,USB_WRITE_COFDM, msg[i].buf[0],
d                  78 drivers/media/usb/dvb-usb/digitv.c 	mutex_unlock(&d->i2c_mutex);
d                 231 drivers/media/usb/dvb-usb/digitv.c static int digitv_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 240 drivers/media/usb/dvb-usb/digitv.c 	ret = digitv_ctrl_msg(d, USB_READ_REMOTE, 0, NULL, 0, &key[1], 4);
d                 246 drivers/media/usb/dvb-usb/digitv.c 	ret = digitv_ctrl_msg(d, USB_WRITE_REMOTE, 0, b, 4, NULL, 0);
d                 253 drivers/media/usb/dvb-usb/digitv.c 		  for (i = 0; i < d->props.rc.legacy.rc_map_size; i++) {
d                 254 drivers/media/usb/dvb-usb/digitv.c 			if (rc5_custom(&d->props.rc.legacy.rc_map_table[i]) == key[1] &&
d                 255 drivers/media/usb/dvb-usb/digitv.c 			    rc5_data(&d->props.rc.legacy.rc_map_table[i]) == key[2]) {
d                 256 drivers/media/usb/dvb-usb/digitv.c 				*event = d->props.rc.legacy.rc_map_table[i].keycode;
d                 274 drivers/media/usb/dvb-usb/digitv.c 	struct dvb_usb_device *d;
d                 275 drivers/media/usb/dvb-usb/digitv.c 	int ret = dvb_usb_device_init(intf, &digitv_properties, THIS_MODULE, &d,
d                 280 drivers/media/usb/dvb-usb/digitv.c 		if (d != NULL) { /* do that only when the firmware is loaded */
d                 282 drivers/media/usb/dvb-usb/digitv.c 			digitv_ctrl_msg(d,USB_WRITE_REMOTE_TYPE,0,b,4,NULL,0);
d                 285 drivers/media/usb/dvb-usb/digitv.c 			digitv_ctrl_msg(d,USB_WRITE_REMOTE,0,b,4,NULL,0);
d                  12 drivers/media/usb/dvb-usb/dtt200u-fe.c 	struct dvb_usb_device *d;
d                  32 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1, state->data, 3, 0);
d                  64 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1, state->data, 3, 0);
d                  80 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1, state->data, 2, 0);
d                  96 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1, state->data, 1, 0);
d                 112 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_rw(state->d, state->data, 1, state->data, 1, 0);
d                 128 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_write(state->d, state->data, 1);
d                 171 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_write(state->d, state->data, 2);
d                 178 drivers/media/usb/dvb-usb/dtt200u-fe.c 	ret = dvb_usb_generic_write(state->d, state->data, 3);
d                 204 drivers/media/usb/dvb-usb/dtt200u-fe.c struct dvb_frontend* dtt200u_fe_attach(struct dvb_usb_device *d)
d                 215 drivers/media/usb/dvb-usb/dtt200u-fe.c 	state->d = d;
d                  24 drivers/media/usb/dvb-usb/dtt200u.c static int dtt200u_power_ctrl(struct dvb_usb_device *d, int onoff)
d                  26 drivers/media/usb/dvb-usb/dtt200u.c 	struct dtt200u_state *st = d->priv;
d                  29 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_lock(&d->data_mutex);
d                  34 drivers/media/usb/dvb-usb/dtt200u.c 		ret = dvb_usb_generic_write(d, st->data, 2);
d                  36 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_unlock(&d->data_mutex);
d                  42 drivers/media/usb/dvb-usb/dtt200u.c 	struct dvb_usb_device *d = adap->dev;
d                  43 drivers/media/usb/dvb-usb/dtt200u.c 	struct dtt200u_state *st = d->priv;
d                  46 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_lock(&d->data_mutex);
d                  61 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_unlock(&d->data_mutex);
d                  68 drivers/media/usb/dvb-usb/dtt200u.c 	struct dvb_usb_device *d = adap->dev;
d                  69 drivers/media/usb/dvb-usb/dtt200u.c 	struct dtt200u_state *st = d->priv;
d                  74 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_lock(&d->data_mutex);
d                  81 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_unlock(&d->data_mutex);
d                  86 drivers/media/usb/dvb-usb/dtt200u.c static int dtt200u_rc_query(struct dvb_usb_device *d)
d                  88 drivers/media/usb/dvb-usb/dtt200u.c 	struct dtt200u_state *st = d->priv;
d                  92 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_lock(&d->data_mutex);
d                  95 drivers/media/usb/dvb-usb/dtt200u.c 	ret = dvb_usb_generic_rw(d, st->data, 1, st->data, 5, 0);
d                 114 drivers/media/usb/dvb-usb/dtt200u.c 			rc_keydown(d->rc_dev, proto, scancode, 0);
d                 116 drivers/media/usb/dvb-usb/dtt200u.c 			rc_keyup(d->rc_dev);
d                 118 drivers/media/usb/dvb-usb/dtt200u.c 		rc_repeat(d->rc_dev);
d                 120 drivers/media/usb/dvb-usb/dtt200u.c 		rc_keyup(d->rc_dev);
d                 127 drivers/media/usb/dvb-usb/dtt200u.c 	mutex_unlock(&d->data_mutex);
d                  52 drivers/media/usb/dvb-usb/dtt200u.h extern struct dvb_frontend * dtt200u_fe_attach(struct dvb_usb_device *d);
d                  25 drivers/media/usb/dvb-usb/dtv5100.c static int dtv5100_i2c_msg(struct dvb_usb_device *d, u8 addr,
d                  28 drivers/media/usb/dvb-usb/dtv5100.c 	struct dtv5100_state *st = d->priv;
d                  57 drivers/media/usb/dvb-usb/dtv5100.c 	return usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0), request,
d                  66 drivers/media/usb/dvb-usb/dtv5100.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                  72 drivers/media/usb/dvb-usb/dtv5100.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                  78 drivers/media/usb/dvb-usb/dtv5100.c 			if (dtv5100_i2c_msg(d, msg[i].addr, msg[i].buf,
d                  83 drivers/media/usb/dvb-usb/dtv5100.c 		} else if (dtv5100_i2c_msg(d, msg[i].addr, msg[i].buf,
d                  88 drivers/media/usb/dvb-usb/dtv5100.c 	mutex_unlock(&d->i2c_mutex);
d                  31 drivers/media/usb/dvb-usb/dvb-usb-common.h extern int dvb_usb_device_power_ctrl(struct dvb_usb_device *d, int onoff);
d                 103 drivers/media/usb/dvb-usb/dvb-usb-dvb.c 	struct dvb_usb_device *d = adap->dev;
d                 104 drivers/media/usb/dvb-usb/dvb-usb-dvb.c 	struct usb_device *udev = d->udev;
d                 110 drivers/media/usb/dvb-usb/dvb-usb-dvb.c 	media_device_usb_init(mdev, udev, d->desc->name);
d                 114 drivers/media/usb/dvb-usb/dvb-usb-dvb.c 	dev_info(&d->udev->dev, "media controller created\n");
d                  11 drivers/media/usb/dvb-usb/dvb-usb-i2c.c int dvb_usb_i2c_init(struct dvb_usb_device *d)
d                  15 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	if (!(d->props.caps & DVB_USB_IS_AN_I2C_ADAPTER))
d                  18 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	if (d->props.i2c_algo == NULL) {
d                  23 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	strscpy(d->i2c_adap.name, d->desc->name, sizeof(d->i2c_adap.name));
d                  24 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	d->i2c_adap.algo      = d->props.i2c_algo;
d                  25 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	d->i2c_adap.algo_data = NULL;
d                  26 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	d->i2c_adap.dev.parent = &d->udev->dev;
d                  28 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	i2c_set_adapdata(&d->i2c_adap, d);
d                  30 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	if ((ret = i2c_add_adapter(&d->i2c_adap)) < 0)
d                  33 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	d->state |= DVB_USB_STATE_I2C;
d                  38 drivers/media/usb/dvb-usb/dvb-usb-i2c.c int dvb_usb_i2c_exit(struct dvb_usb_device *d)
d                  40 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	if (d->state & DVB_USB_STATE_I2C)
d                  41 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 		i2c_del_adapter(&d->i2c_adap);
d                  42 drivers/media/usb/dvb-usb/dvb-usb-i2c.c 	d->state &= ~DVB_USB_STATE_I2C;
d                  26 drivers/media/usb/dvb-usb/dvb-usb-init.c static int dvb_usb_adapter_init(struct dvb_usb_device *d, short *adapter_nrs)
d                  31 drivers/media/usb/dvb-usb/dvb-usb-init.c 	for (n = 0; n < d->props.num_adapters; n++) {
d                  32 drivers/media/usb/dvb-usb/dvb-usb-init.c 		adap = &d->adapter[n];
d                  33 drivers/media/usb/dvb-usb/dvb-usb-init.c 		adap->dev = d;
d                  36 drivers/media/usb/dvb-usb/dvb-usb-init.c 		memcpy(&adap->props, &d->props.adapter[n], sizeof(struct dvb_usb_adapter_properties));
d                  41 drivers/media/usb/dvb-usb/dvb-usb-init.c 			if (d->udev->speed == USB_SPEED_FULL && !(props->caps & DVB_USB_ADAP_HAS_PID_FILTER)) {
d                  46 drivers/media/usb/dvb-usb/dvb-usb-init.c 			if ((d->udev->speed == USB_SPEED_FULL && props->caps & DVB_USB_ADAP_HAS_PID_FILTER) ||
d                  92 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->num_adapters_initialized++;
d                  93 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->state |= DVB_USB_STATE_DVB;
d                 100 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d->props.generic_bulk_ctrl_endpoint != 0) {
d                 101 drivers/media/usb/dvb-usb/dvb-usb-init.c 		usb_clear_halt(d->udev, usb_sndbulkpipe(d->udev, d->props.generic_bulk_ctrl_endpoint));
d                 102 drivers/media/usb/dvb-usb/dvb-usb-init.c 		usb_clear_halt(d->udev, usb_rcvbulkpipe(d->udev, d->props.generic_bulk_ctrl_endpoint));
d                 108 drivers/media/usb/dvb-usb/dvb-usb-init.c static int dvb_usb_adapter_exit(struct dvb_usb_device *d)
d                 112 drivers/media/usb/dvb-usb/dvb-usb-init.c 	for (n = 0; n < d->num_adapters_initialized; n++) {
d                 113 drivers/media/usb/dvb-usb/dvb-usb-init.c 		dvb_usb_adapter_frontend_exit(&d->adapter[n]);
d                 114 drivers/media/usb/dvb-usb/dvb-usb-init.c 		dvb_usb_adapter_dvb_exit(&d->adapter[n]);
d                 115 drivers/media/usb/dvb-usb/dvb-usb-init.c 		dvb_usb_adapter_stream_exit(&d->adapter[n]);
d                 116 drivers/media/usb/dvb-usb/dvb-usb-init.c 		kfree(d->adapter[n].priv);
d                 118 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->num_adapters_initialized = 0;
d                 119 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->state &= ~DVB_USB_STATE_DVB;
d                 125 drivers/media/usb/dvb-usb/dvb-usb-init.c static int dvb_usb_exit(struct dvb_usb_device *d)
d                 127 drivers/media/usb/dvb-usb/dvb-usb-init.c 	deb_info("state before exiting everything: %x\n", d->state);
d                 128 drivers/media/usb/dvb-usb/dvb-usb-init.c 	dvb_usb_remote_exit(d);
d                 129 drivers/media/usb/dvb-usb/dvb-usb-init.c 	dvb_usb_adapter_exit(d);
d                 130 drivers/media/usb/dvb-usb/dvb-usb-init.c 	dvb_usb_i2c_exit(d);
d                 131 drivers/media/usb/dvb-usb/dvb-usb-init.c 	deb_info("state should be zero now: %x\n", d->state);
d                 132 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->state = DVB_USB_STATE_INIT;
d                 134 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d->priv != NULL && d->props.priv_destroy != NULL)
d                 135 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->props.priv_destroy(d);
d                 137 drivers/media/usb/dvb-usb/dvb-usb-init.c 	kfree(d->priv);
d                 138 drivers/media/usb/dvb-usb/dvb-usb-init.c 	kfree(d);
d                 142 drivers/media/usb/dvb-usb/dvb-usb-init.c static int dvb_usb_init(struct dvb_usb_device *d, short *adapter_nums)
d                 146 drivers/media/usb/dvb-usb/dvb-usb-init.c 	mutex_init(&d->data_mutex);
d                 147 drivers/media/usb/dvb-usb/dvb-usb-init.c 	mutex_init(&d->usb_mutex);
d                 148 drivers/media/usb/dvb-usb/dvb-usb-init.c 	mutex_init(&d->i2c_mutex);
d                 150 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->state = DVB_USB_STATE_INIT;
d                 152 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d->props.size_of_priv > 0) {
d                 153 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->priv = kzalloc(d->props.size_of_priv, GFP_KERNEL);
d                 154 drivers/media/usb/dvb-usb/dvb-usb-init.c 		if (d->priv == NULL) {
d                 159 drivers/media/usb/dvb-usb/dvb-usb-init.c 		if (d->props.priv_init != NULL) {
d                 160 drivers/media/usb/dvb-usb/dvb-usb-init.c 			ret = d->props.priv_init(d);
d                 162 drivers/media/usb/dvb-usb/dvb-usb-init.c 				kfree(d->priv);
d                 163 drivers/media/usb/dvb-usb/dvb-usb-init.c 				d->priv = NULL;
d                 170 drivers/media/usb/dvb-usb/dvb-usb-init.c 	dvb_usb_device_power_ctrl(d, 1);
d                 172 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if ((ret = dvb_usb_i2c_init(d)) ||
d                 173 drivers/media/usb/dvb-usb/dvb-usb-init.c 		(ret = dvb_usb_adapter_init(d, adapter_nums))) {
d                 174 drivers/media/usb/dvb-usb/dvb-usb-init.c 		dvb_usb_exit(d);
d                 178 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if ((ret = dvb_usb_remote_init(d)))
d                 181 drivers/media/usb/dvb-usb/dvb-usb-init.c 	dvb_usb_device_power_ctrl(d, 0);
d                 226 drivers/media/usb/dvb-usb/dvb-usb-init.c int dvb_usb_device_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 229 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->powered++;
d                 231 drivers/media/usb/dvb-usb/dvb-usb-init.c 		d->powered--;
d                 233 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d->powered == 0 || (onoff && d->powered == 1)) { /* when switching from 1 to 0 or from 0 to 1 */
d                 235 drivers/media/usb/dvb-usb/dvb-usb-init.c 		if (d->props.power_ctrl)
d                 236 drivers/media/usb/dvb-usb/dvb-usb-init.c 			return d->props.power_ctrl(d, onoff);
d                 250 drivers/media/usb/dvb-usb/dvb-usb-init.c 	struct dvb_usb_device *d = NULL;
d                 271 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d = kzalloc(sizeof(struct dvb_usb_device), GFP_KERNEL);
d                 272 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d == NULL) {
d                 277 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->udev = udev;
d                 278 drivers/media/usb/dvb-usb/dvb-usb-init.c 	memcpy(&d->props, props, sizeof(struct dvb_usb_device_properties));
d                 279 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->desc = desc;
d                 280 drivers/media/usb/dvb-usb/dvb-usb-init.c 	d->owner = owner;
d                 282 drivers/media/usb/dvb-usb/dvb-usb-init.c 	usb_set_intfdata(intf, d);
d                 285 drivers/media/usb/dvb-usb/dvb-usb-init.c 		*du = d;
d                 287 drivers/media/usb/dvb-usb/dvb-usb-init.c 	ret = dvb_usb_init(d, adapter_nums);
d                 299 drivers/media/usb/dvb-usb/dvb-usb-init.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 304 drivers/media/usb/dvb-usb/dvb-usb-init.c 	if (d != NULL && d->desc != NULL) {
d                 305 drivers/media/usb/dvb-usb/dvb-usb-init.c 		strscpy(name, d->desc->name, sizeof(name));
d                 306 drivers/media/usb/dvb-usb/dvb-usb-init.c 		dvb_usb_exit(d);
d                  48 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct dvb_usb_device *d = input_get_drvdata(dev);
d                  49 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct rc_map_table *keymap = d->props.rc.legacy.rc_map_table;
d                  50 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	unsigned int keymap_size = d->props.rc.legacy.rc_map_size;
d                  71 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct dvb_usb_device *d = input_get_drvdata(dev);
d                  72 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct rc_map_table *keymap = d->props.rc.legacy.rc_map_table;
d                  73 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	unsigned int keymap_size = d->props.rc.legacy.rc_map_size;
d                 111 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct dvb_usb_device *d =
d                 123 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (d->props.rc.legacy.rc_query(d,&event,&state)) {
d                 134 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			d->last_event = event;
d                 135 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_event(d->input_dev, EV_KEY, event, 1);
d                 136 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_sync(d->input_dev);
d                 137 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_event(d->input_dev, EV_KEY, d->last_event, 0);
d                 138 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_sync(d->input_dev);
d                 142 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_event(d->input_dev, EV_KEY, event, 1);
d                 143 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_sync(d->input_dev);
d                 144 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_event(d->input_dev, EV_KEY, d->last_event, 0);
d                 145 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_sync(d->input_dev);
d                 187 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	schedule_delayed_work(&d->rc_query_work,msecs_to_jiffies(d->props.rc.legacy.rc_interval));
d                 190 drivers/media/usb/dvb-usb/dvb-usb-remote.c static int legacy_dvb_usb_remote_init(struct dvb_usb_device *d)
d                 201 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	input_dev->phys = d->rc_phys;
d                 202 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	usb_to_input_id(d->udev, &input_dev->id);
d                 203 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	input_dev->dev.parent = &d->udev->dev;
d                 204 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->input_dev = input_dev;
d                 205 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->rc_dev = NULL;
d                 211 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	deb_rc("key map size: %d\n", d->props.rc.legacy.rc_map_size);
d                 212 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	for (i = 0; i < d->props.rc.legacy.rc_map_size; i++) {
d                 214 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			d->props.rc.legacy.rc_map_table[i].keycode, i);
d                 215 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		set_bit(d->props.rc.legacy.rc_map_table[i].keycode, input_dev->keybit);
d                 219 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	input_dev->rep[REP_PERIOD] = d->props.rc.legacy.rc_interval;
d                 220 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	input_dev->rep[REP_DELAY]  = d->props.rc.legacy.rc_interval + 150;
d                 222 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	input_set_drvdata(input_dev, d);
d                 228 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	rc_interval = d->props.rc.legacy.rc_interval;
d                 230 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	INIT_DELAYED_WORK(&d->rc_query_work, legacy_dvb_usb_read_remote_control);
d                 233 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	schedule_delayed_work(&d->rc_query_work,
d                 236 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->state |= DVB_USB_STATE_REMOTE;
d                 248 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct dvb_usb_device *d =
d                 258 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (dvb_usb_disable_rc_polling || d->props.rc.core.bulk_mode)
d                 261 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	err = d->props.rc.core.rc_query(d);
d                 265 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	schedule_delayed_work(&d->rc_query_work,
d                 266 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			      msecs_to_jiffies(d->props.rc.core.rc_interval));
d                 269 drivers/media/usb/dvb-usb/dvb-usb-remote.c static int rc_core_dvb_usb_remote_init(struct dvb_usb_device *d)
d                 274 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev = rc_allocate_device(d->props.rc.core.driver_type);
d                 278 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->driver_name = d->props.rc.core.module_name;
d                 279 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->map_name = d->props.rc.core.rc_codes;
d                 280 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->change_protocol = d->props.rc.core.change_protocol;
d                 281 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->allowed_protocols = d->props.rc.core.allowed_protos;
d                 282 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	usb_to_input_id(d->udev, &dev->input_id);
d                 283 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->device_name = d->desc->name;
d                 284 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->input_phys = d->rc_phys;
d                 285 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->dev.parent = &d->udev->dev;
d                 286 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->priv = d;
d                 287 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	dev->scancode_mask = d->props.rc.core.scancode_mask;
d                 295 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->input_dev = NULL;
d                 296 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->rc_dev = dev;
d                 298 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (!d->props.rc.core.rc_query || d->props.rc.core.bulk_mode)
d                 302 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	INIT_DELAYED_WORK(&d->rc_query_work, dvb_usb_read_remote_control);
d                 304 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	rc_interval = d->props.rc.core.rc_interval;
d                 307 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	schedule_delayed_work(&d->rc_query_work,
d                 313 drivers/media/usb/dvb-usb/dvb-usb-remote.c int dvb_usb_remote_init(struct dvb_usb_device *d)
d                 320 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (d->props.rc.legacy.rc_map_table && d->props.rc.legacy.rc_query)
d                 321 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		d->props.rc.mode = DVB_RC_LEGACY;
d                 322 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	else if (d->props.rc.core.rc_codes)
d                 323 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		d->props.rc.mode = DVB_RC_CORE;
d                 327 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	usb_make_path(d->udev, d->rc_phys, sizeof(d->rc_phys));
d                 328 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	strlcat(d->rc_phys, "/ir0", sizeof(d->rc_phys));
d                 331 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (d->props.rc.legacy.rc_interval < 40)
d                 332 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		d->props.rc.legacy.rc_interval = 100; /* default */
d                 334 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (d->props.rc.mode == DVB_RC_LEGACY)
d                 335 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		err = legacy_dvb_usb_remote_init(d);
d                 337 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		err = rc_core_dvb_usb_remote_init(d);
d                 341 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->state |= DVB_USB_STATE_REMOTE;
d                 346 drivers/media/usb/dvb-usb/dvb-usb-remote.c int dvb_usb_remote_exit(struct dvb_usb_device *d)
d                 348 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	if (d->state & DVB_USB_STATE_REMOTE) {
d                 349 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		cancel_delayed_work_sync(&d->rc_query_work);
d                 350 drivers/media/usb/dvb-usb/dvb-usb-remote.c 		if (d->props.rc.mode == DVB_RC_LEGACY)
d                 351 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			input_unregister_device(d->input_dev);
d                 353 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			rc_unregister_device(d->rc_dev);
d                 355 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	d->state &= ~DVB_USB_STATE_REMOTE;
d                 362 drivers/media/usb/dvb-usb/dvb-usb-remote.c int dvb_usb_nec_rc_key_to_event(struct dvb_usb_device *d,
d                 366 drivers/media/usb/dvb-usb/dvb-usb-remote.c 	struct rc_map_table *keymap = d->props.rc.legacy.rc_map_table;
d                 379 drivers/media/usb/dvb-usb/dvb-usb-remote.c 			for (i = 0; i < d->props.rc.legacy.rc_map_size; i++)
d                  12 drivers/media/usb/dvb-usb/dvb-usb-urb.c int dvb_usb_generic_rw(struct dvb_usb_device *d, u8 *wbuf, u16 wlen, u8 *rbuf,
d                  17 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	if (!d || wbuf == NULL || wlen == 0)
d                  20 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	if (d->props.generic_bulk_ctrl_endpoint == 0) {
d                  25 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	if ((ret = mutex_lock_interruptible(&d->usb_mutex)))
d                  31 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	ret = usb_bulk_msg(d->udev,usb_sndbulkpipe(d->udev,
d                  32 drivers/media/usb/dvb-usb/dvb-usb-urb.c 			d->props.generic_bulk_ctrl_endpoint), wbuf,wlen,&actlen,
d                  45 drivers/media/usb/dvb-usb/dvb-usb-urb.c 		ret = usb_bulk_msg(d->udev,usb_rcvbulkpipe(d->udev,
d                  46 drivers/media/usb/dvb-usb/dvb-usb-urb.c 				d->props.generic_bulk_ctrl_endpoint_response ?
d                  47 drivers/media/usb/dvb-usb/dvb-usb-urb.c 				d->props.generic_bulk_ctrl_endpoint_response :
d                  48 drivers/media/usb/dvb-usb/dvb-usb-urb.c 				d->props.generic_bulk_ctrl_endpoint),rbuf,rlen,&actlen,
d                  59 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	mutex_unlock(&d->usb_mutex);
d                  64 drivers/media/usb/dvb-usb/dvb-usb-urb.c int dvb_usb_generic_write(struct dvb_usb_device *d, u8 *buf, u16 len)
d                  66 drivers/media/usb/dvb-usb/dvb-usb-urb.c 	return dvb_usb_generic_rw(d,buf,len,NULL,0,0);
d                 211 drivers/media/usb/dvb-usb/dvb-usb.h 	int (*rc_query) (struct dvb_usb_device *d);
d                 119 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 124 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 126 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 134 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb5, value + i, 0,
d                 146 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 159 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 163 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0xb5, 0, 0,
d                 169 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 177 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 185 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 192 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 195 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 197 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 220 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 223 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc3, 0xd0, 0,
d                 242 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xc2, 0, 0, buf6,
d                 246 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 254 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 261 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 267 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 270 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 272 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 298 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 301 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc3, 0xd1 , 0,
d                 323 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 341 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 347 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 356 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 367 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 373 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 376 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 378 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 385 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 394 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb2, 0, 0,
d                 414 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0xc3,
d                 434 drivers/media/usb/dvb-usb/dw2102.c 					dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 454 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 466 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 473 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 477 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 479 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 503 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 506 drivers/media/usb/dvb-usb/dw2102.c 		dw210x_op_rw(d->udev, 0xc3, 0x19 , 0,
d                 528 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xc2, 0, 0,
d                 534 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 552 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 559 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 563 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 565 drivers/media/usb/dvb-usb/dw2102.c 	udev = d->udev;
d                 566 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 573 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0xb8, 0, 0,
d                 583 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0x8a, 0, 0,
d                 587 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0x8a, 0, 0,
d                 596 drivers/media/usb/dvb-usb/dw2102.c 			dw210x_op_rw(d->udev, 0x8a, 0, 0,
d                 617 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0x91, 0, 0,
d                 635 drivers/media/usb/dvb-usb/dw2102.c 					dw210x_op_rw(d->udev, 0x80, 0, 0,
d                 655 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev,
d                 674 drivers/media/usb/dvb-usb/dw2102.c 				dw210x_op_rw(d->udev, 0x80, 0, 0,
d                 686 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 693 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 696 drivers/media/usb/dvb-usb/dw2102.c 	if (!d)
d                 699 drivers/media/usb/dvb-usb/dw2102.c 	state = d->priv;
d                 701 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 703 drivers/media/usb/dvb-usb/dw2102.c 	if (mutex_lock_interruptible(&d->data_mutex) < 0) {
d                 704 drivers/media/usb/dvb-usb/dw2102.c 		mutex_unlock(&d->i2c_mutex);
d                 715 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_usb_generic_rw(d, state->data, 3,
d                 721 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_usb_generic_rw(d, state->data, 1,
d                 742 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_usb_generic_rw(d, state->data, msg[0].len + 3,
d                 769 drivers/media/usb/dvb-usb/dw2102.c 		if (dvb_usb_generic_rw(d, state->data, msg[0].len + 4,
d                 779 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->data_mutex);
d                 780 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->i2c_mutex);
d                 824 drivers/media/usb/dvb-usb/dw2102.c static int dw210x_read_mac_address(struct dvb_usb_device *d, u8 mac[6])
d                 831 drivers/media/usb/dvb-usb/dw2102.c 		if (dw210x_op_rw(d->udev, 0xb6, 0xa0 , i, ibuf, 2, DW210X_READ_MSG) < 0) {
d                 848 drivers/media/usb/dvb-usb/dw2102.c static int s6x0_read_mac_address(struct dvb_usb_device *d, u8 mac[6])
d                 869 drivers/media/usb/dvb-usb/dw2102.c 		ret = s6x0_i2c_transfer(&d->i2c_adap, msg, 2);
d                 904 drivers/media/usb/dvb-usb/dw2102.c static int su3000_power_ctrl(struct dvb_usb_device *d, int i)
d                 906 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *state = (struct dw2102_state *)d->priv;
d                 912 drivers/media/usb/dvb-usb/dw2102.c 		mutex_lock(&d->data_mutex);
d                 919 drivers/media/usb/dvb-usb/dw2102.c 		ret = dvb_usb_generic_rw(d, state->data, 2, NULL, 0, 0);
d                 920 drivers/media/usb/dvb-usb/dw2102.c 		mutex_unlock(&d->data_mutex);
d                 926 drivers/media/usb/dvb-usb/dw2102.c static int su3000_read_mac_address(struct dvb_usb_device *d, u8 mac[6])
d                 948 drivers/media/usb/dvb-usb/dw2102.c 		if (i2c_transfer(&d->i2c_adap, msg, 2) != 2)
d                 996 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_adapter *d =
d                 998 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *st = (struct dw2102_state *)d->dev->priv;
d                1028 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_adapter *d =
d                1030 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *st = (struct dw2102_state *)d->dev->priv;
d                1037 drivers/media/usb/dvb-usb/dw2102.c 		su3000_streaming_ctrl(d, 1);
d                1183 drivers/media/usb/dvb-usb/dw2102.c static int dw2104_frontend_attach(struct dvb_usb_adapter *d)
d                1188 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(stv0900_attach, &dw2104a_stv0900_config,
d                1189 drivers/media/usb/dvb-usb/dw2102.c 				&d->dev->i2c_adap, 0);
d                1190 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1191 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_attach(stb6100_attach, d->fe_adap[0].fe,
d                1193 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap)) {
d                1194 drivers/media/usb/dvb-usb/dw2102.c 				tuner_ops = &d->fe_adap[0].fe->ops.tuner_ops;
d                1199 drivers/media/usb/dvb-usb/dw2102.c 				d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1207 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(stv0900_attach, &dw2104_stv0900_config,
d                1208 drivers/media/usb/dvb-usb/dw2102.c 				&d->dev->i2c_adap, 0);
d                1209 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1210 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_attach(stv6110_attach, d->fe_adap[0].fe,
d                1212 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap)) {
d                1213 drivers/media/usb/dvb-usb/dw2102.c 				d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1221 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(cx24116_attach, &dw2104_config,
d                1222 drivers/media/usb/dvb-usb/dw2102.c 				&d->dev->i2c_adap);
d                1223 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1224 drivers/media/usb/dvb-usb/dw2102.c 			d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1230 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(ds3000_attach, &dw2104_ds3000_config,
d                1231 drivers/media/usb/dvb-usb/dw2102.c 			&d->dev->i2c_adap);
d                1232 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe != NULL) {
d                1233 drivers/media/usb/dvb-usb/dw2102.c 		dvb_attach(ts2020_attach, d->fe_adap[0].fe,
d                1234 drivers/media/usb/dvb-usb/dw2102.c 			&dw2104_ts2020_config, &d->dev->i2c_adap);
d                1235 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1247 drivers/media/usb/dvb-usb/dw2102.c static int dw2102_frontend_attach(struct dvb_usb_adapter *d)
d                1251 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(si21xx_attach, &serit_sp1511lhb_config,
d                1252 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap);
d                1253 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1254 drivers/media/usb/dvb-usb/dw2102.c 			d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1261 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(stv0288_attach, &earda_config,
d                1262 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap);
d                1263 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1264 drivers/media/usb/dvb-usb/dw2102.c 			if (dvb_attach(stb6000_attach, d->fe_adap[0].fe, 0x61,
d                1265 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap)) {
d                1266 drivers/media/usb/dvb-usb/dw2102.c 				d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1275 drivers/media/usb/dvb-usb/dw2102.c 		d->fe_adap[0].fe = dvb_attach(stv0299_attach, &sharp_z0194a_config,
d                1276 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap);
d                1277 drivers/media/usb/dvb-usb/dw2102.c 		if (d->fe_adap[0].fe != NULL) {
d                1278 drivers/media/usb/dvb-usb/dw2102.c 			d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1286 drivers/media/usb/dvb-usb/dw2102.c static int dw3101_frontend_attach(struct dvb_usb_adapter *d)
d                1288 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(tda10023_attach, &dw3101_tda10023_config,
d                1289 drivers/media/usb/dvb-usb/dw2102.c 				&d->dev->i2c_adap, 0x48);
d                1290 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe != NULL) {
d                1297 drivers/media/usb/dvb-usb/dw2102.c static int zl100313_frontend_attach(struct dvb_usb_adapter *d)
d                1299 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(mt312_attach, &zl313_config,
d                1300 drivers/media/usb/dvb-usb/dw2102.c 			&d->dev->i2c_adap);
d                1301 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe != NULL) {
d                1302 drivers/media/usb/dvb-usb/dw2102.c 		if (dvb_attach(zl10039_attach, d->fe_adap[0].fe, 0x60,
d                1303 drivers/media/usb/dvb-usb/dw2102.c 				&d->dev->i2c_adap)) {
d                1304 drivers/media/usb/dvb-usb/dw2102.c 			d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1313 drivers/media/usb/dvb-usb/dw2102.c static int stv0288_frontend_attach(struct dvb_usb_adapter *d)
d                1317 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(stv0288_attach, &earda_config,
d                1318 drivers/media/usb/dvb-usb/dw2102.c 			&d->dev->i2c_adap);
d                1320 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe == NULL)
d                1323 drivers/media/usb/dvb-usb/dw2102.c 	if (NULL == dvb_attach(stb6000_attach, d->fe_adap[0].fe, 0x61, &d->dev->i2c_adap))
d                1326 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1328 drivers/media/usb/dvb-usb/dw2102.c 	dw210x_op_rw(d->dev->udev, 0x8a, 0, 0, obuf, 2, DW210X_WRITE_MSG);
d                1336 drivers/media/usb/dvb-usb/dw2102.c static int ds3000_frontend_attach(struct dvb_usb_adapter *d)
d                1338 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *st = d->dev->priv;
d                1341 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(ds3000_attach, &s660_ds3000_config,
d                1342 drivers/media/usb/dvb-usb/dw2102.c 			&d->dev->i2c_adap);
d                1344 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe == NULL)
d                1347 drivers/media/usb/dvb-usb/dw2102.c 	dvb_attach(ts2020_attach, d->fe_adap[0].fe, &s660_ts2020_config,
d                1348 drivers/media/usb/dvb-usb/dw2102.c 		&d->dev->i2c_adap);
d                1350 drivers/media/usb/dvb-usb/dw2102.c 	st->old_set_voltage = d->fe_adap[0].fe->ops.set_voltage;
d                1351 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe->ops.set_voltage = s660_set_voltage;
d                1353 drivers/media/usb/dvb-usb/dw2102.c 	dw210x_op_rw(d->dev->udev, 0x8a, 0, 0, obuf, 2, DW210X_WRITE_MSG);
d                1360 drivers/media/usb/dvb-usb/dw2102.c static int prof_7500_frontend_attach(struct dvb_usb_adapter *d)
d                1364 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe = dvb_attach(stv0900_attach, &prof_7500_stv0900_config,
d                1365 drivers/media/usb/dvb-usb/dw2102.c 					&d->dev->i2c_adap, 0);
d                1366 drivers/media/usb/dvb-usb/dw2102.c 	if (d->fe_adap[0].fe == NULL)
d                1369 drivers/media/usb/dvb-usb/dw2102.c 	d->fe_adap[0].fe->ops.set_voltage = dw210x_set_voltage;
d                1371 drivers/media/usb/dvb-usb/dw2102.c 	dw210x_op_rw(d->dev->udev, 0x8a, 0, 0, obuf, 2, DW210X_WRITE_MSG);
d                1380 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = adap->dev;
d                1381 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *state = d->priv;
d                1383 drivers/media/usb/dvb-usb/dw2102.c 	mutex_lock(&d->data_mutex);
d                1389 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1396 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1404 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1411 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1416 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 1, state->data, 1, 0) < 0)
d                1419 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->data_mutex);
d                1422 drivers/media/usb/dvb-usb/dw2102.c 					&d->i2c_adap);
d                1428 drivers/media/usb/dvb-usb/dw2102.c 				&d->i2c_adap)) {
d                1439 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = adap->dev;
d                1440 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *state = d->priv;
d                1442 drivers/media/usb/dvb-usb/dw2102.c 	mutex_lock(&d->data_mutex);
d                1448 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1455 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1462 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1471 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1476 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 1, state->data, 1, 0) < 0)
d                1479 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->data_mutex);
d                1482 drivers/media/usb/dvb-usb/dw2102.c 					&d->i2c_adap, NULL);
d                1485 drivers/media/usb/dvb-usb/dw2102.c 					&d->i2c_adap, &tda18271_config)) {
d                1497 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = adap->dev;
d                1498 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *state = d->priv;
d                1500 drivers/media/usb/dvb-usb/dw2102.c 	mutex_lock(&d->data_mutex);
d                1504 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 1, state->data, 1, 0) < 0)
d                1507 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->data_mutex);
d                1511 drivers/media/usb/dvb-usb/dw2102.c 					&d->i2c_adap);
d                1518 drivers/media/usb/dvb-usb/dw2102.c 				&d->i2c_adap)) {
d                1529 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = adap->dev;
d                1530 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *state = d->priv;
d                1537 drivers/media/usb/dvb-usb/dw2102.c 	mutex_lock(&d->data_mutex);
d                1543 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1550 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1558 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1565 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 3, state->data, 1, 0) < 0)
d                1570 drivers/media/usb/dvb-usb/dw2102.c 	if (dvb_usb_generic_rw(d, state->data, 1, state->data, 1, 0) < 0)
d                1573 drivers/media/usb/dvb-usb/dw2102.c 	mutex_unlock(&d->data_mutex);
d                1593 drivers/media/usb/dvb-usb/dw2102.c 	client = i2c_new_device(&d->i2c_adap, &board_info);
d                1655 drivers/media/usb/dvb-usb/dw2102.c static int dw2102_rc_query(struct dvb_usb_device *d)
d                1665 drivers/media/usb/dvb-usb/dw2102.c 	if (d->props.i2c_algo->master_xfer(&d->i2c_adap, &msg, 1) == 1) {
d                1669 drivers/media/usb/dvb-usb/dw2102.c 			rc_keydown(d->rc_dev, RC_PROTO_UNKNOWN, key[0], 0);
d                1676 drivers/media/usb/dvb-usb/dw2102.c static int prof_rc_query(struct dvb_usb_device *d)
d                1686 drivers/media/usb/dvb-usb/dw2102.c 	if (d->props.i2c_algo->master_xfer(&d->i2c_adap, &msg, 1) == 1) {
d                1690 drivers/media/usb/dvb-usb/dw2102.c 			rc_keydown(d->rc_dev, RC_PROTO_UNKNOWN, key[0] ^ 0xff,
d                1698 drivers/media/usb/dvb-usb/dw2102.c static int su3000_rc_query(struct dvb_usb_device *d)
d                1708 drivers/media/usb/dvb-usb/dw2102.c 	if (d->props.i2c_algo->master_xfer(&d->i2c_adap, &msg, 1) == 1) {
d                1712 drivers/media/usb/dvb-usb/dw2102.c 			rc_keydown(d->rc_dev, RC_PROTO_RC5,
d                2428 drivers/media/usb/dvb-usb/dw2102.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                2429 drivers/media/usb/dvb-usb/dw2102.c 	struct dw2102_state *st = (struct dw2102_state *)d->priv;
d                  29 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_usb_in_op(struct dvb_usb_device *d, u8 req, u16 value,
d                  32 drivers/media/usb/dvb-usb/gp8psk.c 	struct gp8psk_state *st = d->priv;
d                  38 drivers/media/usb/dvb-usb/gp8psk.c 	if ((ret = mutex_lock_interruptible(&d->usb_mutex)))
d                  42 drivers/media/usb/dvb-usb/gp8psk.c 		ret = usb_control_msg(d->udev,
d                  43 drivers/media/usb/dvb-usb/gp8psk.c 			usb_rcvctrlpipe(d->udev,0),
d                  63 drivers/media/usb/dvb-usb/gp8psk.c 	mutex_unlock(&d->usb_mutex);
d                  68 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_usb_out_op(struct dvb_usb_device *d, u8 req, u16 value,
d                  71 drivers/media/usb/dvb-usb/gp8psk.c 	struct gp8psk_state *st = d->priv;
d                  80 drivers/media/usb/dvb-usb/gp8psk.c 	if ((ret = mutex_lock_interruptible(&d->usb_mutex)))
d                  84 drivers/media/usb/dvb-usb/gp8psk.c 	if (usb_control_msg(d->udev,
d                  85 drivers/media/usb/dvb-usb/gp8psk.c 			usb_sndctrlpipe(d->udev,0),
d                  94 drivers/media/usb/dvb-usb/gp8psk.c 	mutex_unlock(&d->usb_mutex);
d                 100 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_get_fw_version(struct dvb_usb_device *d, u8 *fw_vers)
d                 102 drivers/media/usb/dvb-usb/gp8psk.c 	return gp8psk_usb_in_op(d, GET_FW_VERS, 0, 0, fw_vers, 6);
d                 105 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_get_fpga_version(struct dvb_usb_device *d, u8 *fpga_vers)
d                 107 drivers/media/usb/dvb-usb/gp8psk.c 	return gp8psk_usb_in_op(d, GET_FPGA_VERS, 0, 0, fpga_vers, 1);
d                 110 drivers/media/usb/dvb-usb/gp8psk.c static void gp8psk_info(struct dvb_usb_device *d)
d                 114 drivers/media/usb/dvb-usb/gp8psk.c 	if (!gp8psk_get_fw_version(d, fw_vers))
d                 121 drivers/media/usb/dvb-usb/gp8psk.c 	if (!gp8psk_get_fpga_version(d, &fpga_vers))
d                 127 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_load_bcm4500fw(struct dvb_usb_device *d)
d                 134 drivers/media/usb/dvb-usb/gp8psk.c 					&d->udev->dev)) != 0) {
d                 142 drivers/media/usb/dvb-usb/gp8psk.c 	if (gp8psk_usb_out_op(d, LOAD_BCM4500,1,0,NULL, 0))
d                 166 drivers/media/usb/dvb-usb/gp8psk.c 		if (dvb_usb_generic_write(d, buf, buflen)) {
d                 183 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 186 drivers/media/usb/dvb-usb/gp8psk.c 	int gp_product_id = le16_to_cpu(d->udev->descriptor.idProduct);
d                 189 drivers/media/usb/dvb-usb/gp8psk.c 		gp8psk_usb_in_op(d, GET_8PSK_CONFIG,0,0,&status,1);
d                 192 drivers/media/usb/dvb-usb/gp8psk.c 				gp8psk_usb_out_op(d, CW3K_INIT, 1, 0, NULL, 0);
d                 193 drivers/media/usb/dvb-usb/gp8psk.c 			if (gp8psk_usb_in_op(d, BOOT_8PSK, 1, 0, &buf, 1))
d                 195 drivers/media/usb/dvb-usb/gp8psk.c 			gp8psk_info(d);
d                 200 drivers/media/usb/dvb-usb/gp8psk.c 				if(gp8psk_load_bcm4500fw(d))
d                 204 drivers/media/usb/dvb-usb/gp8psk.c 			if (gp8psk_usb_in_op(d, START_INTERSIL, 1, 0,
d                 210 drivers/media/usb/dvb-usb/gp8psk.c 			if (gp8psk_usb_out_op(d, SET_DVB_MODE, 1, 0, NULL, 0))
d                 213 drivers/media/usb/dvb-usb/gp8psk.c 		if (gp8psk_usb_out_op(d, ARM_TRANSFER, 0, 0, NULL, 0))
d                 217 drivers/media/usb/dvb-usb/gp8psk.c 		if (gp8psk_usb_in_op(d, START_INTERSIL, 0, 0, &buf, 1))
d                 220 drivers/media/usb/dvb-usb/gp8psk.c 		if (gp8psk_usb_in_op(d, BOOT_8PSK, 0, 0, &buf, 1))
d                 223 drivers/media/usb/dvb-usb/gp8psk.c 			gp8psk_usb_out_op(d, CW3K_INIT, 0, 0, NULL, 0);
d                 228 drivers/media/usb/dvb-usb/gp8psk.c static int gp8psk_bcm4500_reload(struct dvb_usb_device *d)
d                 231 drivers/media/usb/dvb-usb/gp8psk.c 	int gp_product_id = le16_to_cpu(d->udev->descriptor.idProduct);
d                 236 drivers/media/usb/dvb-usb/gp8psk.c 	if (gp8psk_usb_in_op(d, BOOT_8PSK, 0, 0, &buf, 1))
d                 239 drivers/media/usb/dvb-usb/gp8psk.c 	if (gp8psk_usb_in_op(d, BOOT_8PSK, 1, 0, &buf, 1))
d                 243 drivers/media/usb/dvb-usb/gp8psk.c 		if (gp8psk_load_bcm4500fw(d))
d                 258 drivers/media/usb/dvb-usb/gp8psk.c 	struct dvb_usb_device *d = priv;
d                 260 drivers/media/usb/dvb-usb/gp8psk.c 	return gp8psk_usb_in_op(d, req, value, index, b, blen);
d                 266 drivers/media/usb/dvb-usb/gp8psk.c 	struct dvb_usb_device *d = priv;
d                 268 drivers/media/usb/dvb-usb/gp8psk.c 	return gp8psk_usb_out_op(d, req, value, index, b, blen);
d                 273 drivers/media/usb/dvb-usb/gp8psk.c 	struct dvb_usb_device *d = priv;
d                 275 drivers/media/usb/dvb-usb/gp8psk.c 	return gp8psk_bcm4500_reload(d);
d                 286 drivers/media/usb/dvb-usb/gp8psk.c 	struct dvb_usb_device *d = adap->dev;
d                 287 drivers/media/usb/dvb-usb/gp8psk.c 	int id = le16_to_cpu(d->udev->descriptor.idProduct);
d                 293 drivers/media/usb/dvb-usb/gp8psk.c 					 &gp8psk_fe_ops, d, is_rev1);
d                  29 drivers/media/usb/dvb-usb/m920x.c static int m920x_set_filter(struct dvb_usb_device *d, int type, int idx, int pid);
d                  75 drivers/media/usb/dvb-usb/m920x.c static int m920x_init(struct dvb_usb_device *d, struct m920x_inits *rc_seq)
d                  81 drivers/media/usb/dvb-usb/m920x.c 	if (d->props.rc.legacy.rc_query || d->props.rc.core.rc_query) {
d                  83 drivers/media/usb/dvb-usb/m920x.c 		ret = m920x_write_seq(d->udev, M9206_CORE, rc_seq);
d                  92 drivers/media/usb/dvb-usb/m920x.c 	for (i = 0; i < d->props.num_adapters; i++)
d                  93 drivers/media/usb/dvb-usb/m920x.c 		flags |= d->adapter[i].props.fe[0].caps;
d                  97 drivers/media/usb/dvb-usb/m920x.c 		for (i = 0; i < d->props.num_adapters; i++) {
d                  98 drivers/media/usb/dvb-usb/m920x.c 			epi = d->adapter[i].props.fe[0].stream.endpoint - 0x81;
d                 112 drivers/media/usb/dvb-usb/m920x.c 			if ((ret = m920x_set_filter(d, 0x81 + i, 0, 0x0)) != 0)
d                 115 drivers/media/usb/dvb-usb/m920x.c 			if ((ret = m920x_set_filter(d, 0x81 + i, 0, 0x02f5)) != 0)
d                 137 drivers/media/usb/dvb-usb/m920x.c static inline void m920x_parse_rc_state(struct dvb_usb_device *d, u8 rc_state,
d                 140 drivers/media/usb/dvb-usb/m920x.c 	struct m920x_state *m = d->priv;
d                 179 drivers/media/usb/dvb-usb/m920x.c static int m920x_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 188 drivers/media/usb/dvb-usb/m920x.c 	ret = m920x_read(d->udev, M9206_CORE, 0x0, M9206_RC_STATE,
d                 193 drivers/media/usb/dvb-usb/m920x.c 	ret = m920x_read(d->udev, M9206_CORE, 0x0, M9206_RC_KEY,
d                 198 drivers/media/usb/dvb-usb/m920x.c 	m920x_parse_rc_state(d, rc_state[0], state);
d                 200 drivers/media/usb/dvb-usb/m920x.c 	for (i = 0; i < d->props.rc.legacy.rc_map_size; i++)
d                 201 drivers/media/usb/dvb-usb/m920x.c 		if (rc5_data(&d->props.rc.legacy.rc_map_table[i]) == rc_state[1]) {
d                 202 drivers/media/usb/dvb-usb/m920x.c 			*event = d->props.rc.legacy.rc_map_table[i].keycode;
d                 216 drivers/media/usb/dvb-usb/m920x.c static int m920x_rc_core_query(struct dvb_usb_device *d)
d                 226 drivers/media/usb/dvb-usb/m920x.c 	if ((ret = m920x_read(d->udev, M9206_CORE, 0x0, M9206_RC_STATE, &rc_state[0], 1)) != 0)
d                 229 drivers/media/usb/dvb-usb/m920x.c 	if ((ret = m920x_read(d->udev, M9206_CORE, 0x0, M9206_RC_KEY, &rc_state[1], 1)) != 0)
d                 234 drivers/media/usb/dvb-usb/m920x.c 	m920x_parse_rc_state(d, rc_state[0], &state);
d                 237 drivers/media/usb/dvb-usb/m920x.c 		rc_keyup(d->rc_dev);
d                 239 drivers/media/usb/dvb-usb/m920x.c 		rc_repeat(d->rc_dev);
d                 241 drivers/media/usb/dvb-usb/m920x.c 		rc_keydown(d->rc_dev, RC_PROTO_UNKNOWN, rc_state[1], 0);
d                 251 drivers/media/usb/dvb-usb/m920x.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 255 drivers/media/usb/dvb-usb/m920x.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 270 drivers/media/usb/dvb-usb/m920x.c 			if ((ret = m920x_write(d->udev, M9206_I2C,
d                 282 drivers/media/usb/dvb-usb/m920x.c 				if ((ret = m920x_read(d->udev, M9206_I2C, 0x0,
d                 292 drivers/media/usb/dvb-usb/m920x.c 				if ((ret = m920x_write(d->udev, M9206_I2C, msg[i].buf[j], stop)) != 0)
d                 301 drivers/media/usb/dvb-usb/m920x.c 	mutex_unlock(&d->i2c_mutex);
d                 317 drivers/media/usb/dvb-usb/m920x.c static int m920x_set_filter(struct dvb_usb_device *d, int type, int idx, int pid)
d                 326 drivers/media/usb/dvb-usb/m920x.c 	if ((ret = m920x_write(d->udev, M9206_FILTER, pid, (type << 8) | (idx * 4) )) != 0)
d                 329 drivers/media/usb/dvb-usb/m920x.c 	if ((ret = m920x_write(d->udev, M9206_FILTER, 0, (type << 8) | (idx * 4) )) != 0)
d                 815 drivers/media/usb/dvb-usb/m920x.c 	struct dvb_usb_device *d = NULL;
d                 828 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 835 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 843 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 850 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 857 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 864 drivers/media/usb/dvb-usb/m920x.c 					  THIS_MODULE, &d, adapter_nr);
d                 884 drivers/media/usb/dvb-usb/m920x.c 	if (d && (ret = m920x_init(d, rc_init_seq)) != 0)
d                  72 drivers/media/usb/dvb-usb/nova-t-usb2.c static int nova_t_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                  77 drivers/media/usb/dvb-usb/nova-t-usb2.c 	struct dibusb_device_state *st = d->priv;
d                  85 drivers/media/usb/dvb-usb/nova-t-usb2.c 	ret = dvb_usb_generic_rw(d, buf, 2, buf, 5, 0);
d                 131 drivers/media/usb/dvb-usb/nova-t-usb2.c static int nova_t_read_mac_address (struct dvb_usb_device *d, u8 mac[6])
d                 142 drivers/media/usb/dvb-usb/nova-t-usb2.c 		dibusb_read_eeprom_byte(d,i, &b);
d                 134 drivers/media/usb/dvb-usb/opera1.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 137 drivers/media/usb/dvb-usb/opera1.c 	if (!d)
d                 139 drivers/media/usb/dvb-usb/opera1.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 143 drivers/media/usb/dvb-usb/opera1.c 		if ((tmp = opera1_usb_i2c_msgxfer(d,
d                 153 drivers/media/usb/dvb-usb/opera1.c 	mutex_unlock(&d->i2c_mutex);
d                 262 drivers/media/usb/dvb-usb/opera1.c static int opera1_frontend_attach(struct dvb_usb_adapter *d)
d                 264 drivers/media/usb/dvb-usb/opera1.c 	d->fe_adap[0].fe = dvb_attach(stv0299_attach, &opera1_stv0299_config,
d                 265 drivers/media/usb/dvb-usb/opera1.c 				      &d->dev->i2c_adap);
d                 266 drivers/media/usb/dvb-usb/opera1.c 	if ((d->fe_adap[0].fe) != NULL) {
d                 267 drivers/media/usb/dvb-usb/opera1.c 		d->fe_adap[0].fe->ops.set_voltage = opera1_set_voltage;
d                 283 drivers/media/usb/dvb-usb/opera1.c static int opera1_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 289 drivers/media/usb/dvb-usb/opera1.c 	return opera1_xilinx_rw(d->udev, 0xb7, val,
d                 436 drivers/media/usb/dvb-usb/opera1.c static int opera1_read_mac_address(struct dvb_usb_device *d, u8 mac[6])
d                 439 drivers/media/usb/dvb-usb/opera1.c 	opera1_xilinx_rw(d->udev, 0xb1, 0xa0, command, 1, OPERA_WRITE_MSG);
d                 440 drivers/media/usb/dvb-usb/opera1.c 	opera1_xilinx_rw(d->udev, 0xb1, 0xa1, mac, 6, OPERA_READ_MSG);
d                  98 drivers/media/usb/dvb-usb/pctv452e.c static int tt3650_ci_msg(struct dvb_usb_device *d, u8 cmd, u8 *data,
d                 101 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 126 drivers/media/usb/dvb-usb/pctv452e.c 	ret = dvb_usb_generic_rw(d, buf, 4 + write_len,
d                 152 drivers/media/usb/dvb-usb/pctv452e.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 153 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 157 drivers/media/usb/dvb-usb/pctv452e.c 	ret = tt3650_ci_msg(d, cmd, data, write_len, read_len);
d                 285 drivers/media/usb/dvb-usb/pctv452e.c 	struct dvb_usb_device *d = (struct dvb_usb_device *)ca->data;
d                 286 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 299 drivers/media/usb/dvb-usb/pctv452e.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_RESET, buf, 1, 1);
d                 307 drivers/media/usb/dvb-usb/pctv452e.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_RESET, buf, 1, 1);
d                 315 drivers/media/usb/dvb-usb/pctv452e.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_SET_VIDEO_PORT, buf, 1, 1);
d                 345 drivers/media/usb/dvb-usb/pctv452e.c static void tt3650_ci_uninit(struct dvb_usb_device *d)
d                 351 drivers/media/usb/dvb-usb/pctv452e.c 	if (NULL == d)
d                 354 drivers/media/usb/dvb-usb/pctv452e.c 	state = (struct pctv452e_state *)d->priv;
d                 371 drivers/media/usb/dvb-usb/pctv452e.c 	struct dvb_usb_device *d = a->dev;
d                 372 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 388 drivers/media/usb/dvb-usb/pctv452e.c 	state->ca.data = d;
d                 406 drivers/media/usb/dvb-usb/pctv452e.c static int pctv452e_i2c_msg(struct dvb_usb_device *d, u8 addr,
d                 410 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 435 drivers/media/usb/dvb-usb/pctv452e.c 	ret = dvb_usb_generic_rw(d, buf, 7 + snd_len,
d                 468 drivers/media/usb/dvb-usb/pctv452e.c 	struct dvb_usb_device *d = i2c_get_adapdata(adapter);
d                 471 drivers/media/usb/dvb-usb/pctv452e.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 492 drivers/media/usb/dvb-usb/pctv452e.c 		ret = pctv452e_i2c_msg(d, addr, snd_buf, snd_len, rcv_buf,
d                 498 drivers/media/usb/dvb-usb/pctv452e.c 	mutex_unlock(&d->i2c_mutex);
d                 507 drivers/media/usb/dvb-usb/pctv452e.c static int pctv452e_power_ctrl(struct dvb_usb_device *d, int i)
d                 509 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 528 drivers/media/usb/dvb-usb/pctv452e.c 	ret = usb_set_interface(d->udev, 0, ISOC_INTERFACE_ALTERNATIVE);
d                 540 drivers/media/usb/dvb-usb/pctv452e.c 	ret = dvb_usb_generic_rw(d, b0, 5, rx, PCTV_ANSWER_LEN, 0);
d                 547 drivers/media/usb/dvb-usb/pctv452e.c 	ret = dvb_usb_generic_rw(d, b0, 5, rx, PCTV_ANSWER_LEN, 0);
d                 558 drivers/media/usb/dvb-usb/pctv452e.c static int pctv452e_rc_query(struct dvb_usb_device *d)
d                 560 drivers/media/usb/dvb-usb/pctv452e.c 	struct pctv452e_state *state = (struct pctv452e_state *)d->priv;
d                 580 drivers/media/usb/dvb-usb/pctv452e.c 	ret = dvb_usb_generic_rw(d, b, 4, rx, PCTV_ANSWER_LEN, 0);
d                 599 drivers/media/usb/dvb-usb/pctv452e.c 		rc_keydown(d->rc_dev, RC_PROTO_RC5, state->last_rc_key, 0);
d                 601 drivers/media/usb/dvb-usb/pctv452e.c 		rc_keyup(d->rc_dev);
d                 609 drivers/media/usb/dvb-usb/pctv452e.c static int pctv452e_read_mac_address(struct dvb_usb_device *d, u8 mac[6])
d                 616 drivers/media/usb/dvb-usb/pctv452e.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 619 drivers/media/usb/dvb-usb/pctv452e.c 	ret = pctv452e_i2c_msg(d, I2C_ADDR_24C16,
d                 625 drivers/media/usb/dvb-usb/pctv452e.c 		ret = pctv452e_i2c_msg(d, I2C_ADDR_24C64,
d                 629 drivers/media/usb/dvb-usb/pctv452e.c 	mutex_unlock(&d->i2c_mutex);
d                1059 drivers/media/usb/dvb-usb/pctv452e.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                1061 drivers/media/usb/dvb-usb/pctv452e.c 	tt3650_ci_uninit(d);
d                 165 drivers/media/usb/dvb-usb/technisat-usb2.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 169 drivers/media/usb/dvb-usb/technisat-usb2.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 174 drivers/media/usb/dvb-usb/technisat-usb2.c 			ret = technisat_usb2_i2c_access(d->udev, msg[i+1].addr,
d                 181 drivers/media/usb/dvb-usb/technisat-usb2.c 			ret = technisat_usb2_i2c_access(d->udev, msg[i].addr,
d                 192 drivers/media/usb/dvb-usb/technisat-usb2.c 	mutex_unlock(&d->i2c_mutex);
d                 226 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_set_led(struct dvb_usb_device *d, int red,
d                 229 drivers/media/usb/dvb-usb/technisat-usb2.c 	struct technisat_usb2_state *state = d->priv;
d                 262 drivers/media/usb/dvb-usb/technisat-usb2.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 265 drivers/media/usb/dvb-usb/technisat-usb2.c 	ret = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev, 0),
d                 271 drivers/media/usb/dvb-usb/technisat-usb2.c 	mutex_unlock(&d->i2c_mutex);
d                 275 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_set_led_timer(struct dvb_usb_device *d, u8 red, u8 green)
d                 277 drivers/media/usb/dvb-usb/technisat-usb2.c 	struct technisat_usb2_state *state = d->priv;
d                 283 drivers/media/usb/dvb-usb/technisat-usb2.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 286 drivers/media/usb/dvb-usb/technisat-usb2.c 	ret = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev, 0),
d                 292 drivers/media/usb/dvb-usb/technisat-usb2.c 	mutex_unlock(&d->i2c_mutex);
d                 370 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_power_ctrl(struct dvb_usb_device *d, int level)
d                 372 drivers/media/usb/dvb-usb/technisat-usb2.c 	struct technisat_usb2_state *state = d->priv;
d                 380 drivers/media/usb/dvb-usb/technisat-usb2.c 	technisat_usb2_set_led(d, 0, TECH_LED_OFF);
d                 382 drivers/media/usb/dvb-usb/technisat-usb2.c 	technisat_usb2_set_led(d, 1, TECH_LED_ON);
d                 388 drivers/media/usb/dvb-usb/technisat-usb2.c static void technisat_usb2_eeprom_dump(struct dvb_usb_device *d)
d                 397 drivers/media/usb/dvb-usb/technisat-usb2.c 		if (technisat_usb2_i2c_access(d->udev, 0x50 + j / 256, &reg, 1, b, 16) != 0)
d                 416 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_eeprom_lrc_read(struct dvb_usb_device *d,
d                 436 drivers/media/usb/dvb-usb/technisat-usb2.c 		if (i2c_transfer(&d->i2c_adap, msg, 2) != 2)
d                 451 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_read_mac_address(struct dvb_usb_device *d,
d                 456 drivers/media/usb/dvb-usb/technisat-usb2.c 	if (technisat_usb2_eeprom_lrc_read(d, EEPROM_MAC_START,
d                 608 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_get_ir(struct dvb_usb_device *d)
d                 610 drivers/media/usb/dvb-usb/technisat-usb2.c 	struct technisat_usb2_state *state = d->priv;
d                 621 drivers/media/usb/dvb-usb/technisat-usb2.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 623 drivers/media/usb/dvb-usb/technisat-usb2.c 	ret = usb_control_msg(d->udev, usb_sndctrlpipe(d->udev, 0),
d                 633 drivers/media/usb/dvb-usb/technisat-usb2.c 	ret = usb_control_msg(d->udev, usb_rcvctrlpipe(d->udev, 0),
d                 640 drivers/media/usb/dvb-usb/technisat-usb2.c 	mutex_unlock(&d->i2c_mutex);
d                 660 drivers/media/usb/dvb-usb/technisat-usb2.c 			ir_raw_event_store(d->rc_dev, &ev);
d                 667 drivers/media/usb/dvb-usb/technisat-usb2.c 		ir_raw_event_store(d->rc_dev, &ev);
d                 670 drivers/media/usb/dvb-usb/technisat-usb2.c 	ir_raw_event_handle(d->rc_dev);
d                 675 drivers/media/usb/dvb-usb/technisat-usb2.c static int technisat_usb2_rc_query(struct dvb_usb_device *d)
d                 677 drivers/media/usb/dvb-usb/technisat-usb2.c 	int ret = technisat_usb2_get_ir(d);
d                 686 drivers/media/usb/dvb-usb/technisat-usb2.c 		technisat_usb2_set_led(d, 1, TECH_LED_BLINK);
d                  71 drivers/media/usb/dvb-usb/ttusb2.c static int ttusb2_msg(struct dvb_usb_device *d, u8 cmd,
d                  74 drivers/media/usb/dvb-usb/ttusb2.c 	struct ttusb2_state *st = d->priv;
d                  97 drivers/media/usb/dvb-usb/ttusb2.c 	ret = dvb_usb_generic_rw(d, s, wlen+4, r, 64, 0);
d                 120 drivers/media/usb/dvb-usb/ttusb2.c static int tt3650_ci_msg(struct dvb_usb_device *d, u8 cmd, u8 *data, unsigned int write_len, unsigned int read_len)
d                 124 drivers/media/usb/dvb-usb/ttusb2.c 	ret = ttusb2_msg(d, cmd, data, write_len, rx, read_len);
d                 132 drivers/media/usb/dvb-usb/ttusb2.c 	struct dvb_usb_device *d = ca->data;
d                 133 drivers/media/usb/dvb-usb/ttusb2.c 	struct ttusb2_state *state = d->priv;
d                 137 drivers/media/usb/dvb-usb/ttusb2.c 	ret = tt3650_ci_msg(d, cmd, data, write_len, read_len);
d                 252 drivers/media/usb/dvb-usb/ttusb2.c 	struct dvb_usb_device *d = ca->data;
d                 253 drivers/media/usb/dvb-usb/ttusb2.c 	struct ttusb2_state *state = d->priv;
d                 266 drivers/media/usb/dvb-usb/ttusb2.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_RESET, buf, 1, 1);
d                 274 drivers/media/usb/dvb-usb/ttusb2.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_RESET, buf, 1, 1);
d                 282 drivers/media/usb/dvb-usb/ttusb2.c 	ret = tt3650_ci_msg(d, TT3650_CMD_CI_SET_VIDEO_PORT, buf, 1, 1);
d                 311 drivers/media/usb/dvb-usb/ttusb2.c static void tt3650_ci_uninit(struct dvb_usb_device *d)
d                 317 drivers/media/usb/dvb-usb/ttusb2.c 	if (NULL == d)
d                 320 drivers/media/usb/dvb-usb/ttusb2.c 	state = d->priv;
d                 334 drivers/media/usb/dvb-usb/ttusb2.c 	struct dvb_usb_device *d = a->dev;
d                 335 drivers/media/usb/dvb-usb/ttusb2.c 	struct ttusb2_state *state = d->priv;
d                 351 drivers/media/usb/dvb-usb/ttusb2.c 	state->ca.data = d;
d                 370 drivers/media/usb/dvb-usb/ttusb2.c 	struct dvb_usb_device *d = i2c_get_adapdata(adap);
d                 374 drivers/media/usb/dvb-usb/ttusb2.c 	if (mutex_lock_interruptible(&d->i2c_mutex) < 0)
d                 416 drivers/media/usb/dvb-usb/ttusb2.c 		if (ttusb2_msg(d, CMD_I2C_XFER, obuf, obuf[1]+3, ibuf, obuf[2] + 3) < 0) {
d                 428 drivers/media/usb/dvb-usb/ttusb2.c 	mutex_unlock(&d->i2c_mutex);
d                 446 drivers/media/usb/dvb-usb/ttusb2.c static int tt3650_rc_query(struct dvb_usb_device *d)
d                 450 drivers/media/usb/dvb-usb/ttusb2.c 	struct ttusb2_state *st = d->priv;
d                 451 drivers/media/usb/dvb-usb/ttusb2.c 	ret = ttusb2_msg(d, CMD_GET_IR_CODE, NULL, 0, rx, sizeof(rx));
d                 459 drivers/media/usb/dvb-usb/ttusb2.c 		rc_keydown(d->rc_dev, RC_PROTO_RC5, st->last_rc_key, rx[1]);
d                 461 drivers/media/usb/dvb-usb/ttusb2.c 		rc_keyup(d->rc_dev);
d                 478 drivers/media/usb/dvb-usb/ttusb2.c static int ttusb2_power_ctrl(struct dvb_usb_device *d, int onoff)
d                 481 drivers/media/usb/dvb-usb/ttusb2.c 	ttusb2_msg(d, CMD_POWER, &b, 0, NULL, 0);
d                 482 drivers/media/usb/dvb-usb/ttusb2.c 	return ttusb2_msg(d, CMD_POWER, &b, 1, NULL, 0);
d                 613 drivers/media/usb/dvb-usb/ttusb2.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 615 drivers/media/usb/dvb-usb/ttusb2.c 	tt3650_ci_uninit(d);
d                  21 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct dvb_usb_device *d;
d                  40 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct vp702x_device_state *dst = st->d->priv;
d                  47 drivers/media/usb/dvb-usb/vp702x-fe.c 		vp702x_usb_in_op(st->d, READ_STATUS, 0, 0, buf, 10);
d                  50 drivers/media/usb/dvb-usb/vp702x-fe.c 		vp702x_usb_in_op(st->d, READ_TUNER_REG_REQ, 0x11, 0, buf, 1);
d                  53 drivers/media/usb/dvb-usb/vp702x-fe.c 		vp702x_usb_in_op(st->d, READ_TUNER_REG_REQ, 0x15, 0, buf, 1);
d                 139 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct vp702x_device_state *dst = st->d->priv;
d                 185 drivers/media/usb/dvb-usb/vp702x-fe.c 	vp702x_usb_inout_op(st->d, cmd, 8, cmd, 10, 100);
d                 201 drivers/media/usb/dvb-usb/vp702x-fe.c 	vp702x_usb_in_op(st->d, RESET_TUNER, 0, 0, NULL, 0);
d                 216 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct vp702x_device_state *dst = st->d->priv;
d                 231 drivers/media/usb/dvb-usb/vp702x-fe.c 	vp702x_usb_inout_op(st->d, cmd, 8, cmd, 10, 100);
d                 254 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct vp702x_device_state *dst = st->d->priv;
d                 273 drivers/media/usb/dvb-usb/vp702x-fe.c 	vp702x_usb_inout_op(st->d, buf, 8, buf, 10, 100);
d                 288 drivers/media/usb/dvb-usb/vp702x-fe.c 	struct vp702x_device_state *dst = st->d->priv;
d                 306 drivers/media/usb/dvb-usb/vp702x-fe.c 	vp702x_usb_inout_op(st->d, buf, 8, buf, 10, 100);
d                 324 drivers/media/usb/dvb-usb/vp702x-fe.c struct dvb_frontend * vp702x_fe_attach(struct dvb_usb_device *d)
d                 330 drivers/media/usb/dvb-usb/vp702x-fe.c 	s->d = d;
d                  30 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_usb_in_op_unlocked(struct dvb_usb_device *d, u8 req,
d                  35 drivers/media/usb/dvb-usb/vp702x.c 	ret = usb_control_msg(d->udev,
d                  36 drivers/media/usb/dvb-usb/vp702x.c 		usb_rcvctrlpipe(d->udev, 0),
d                  55 drivers/media/usb/dvb-usb/vp702x.c int vp702x_usb_in_op(struct dvb_usb_device *d, u8 req, u16 value,
d                  60 drivers/media/usb/dvb-usb/vp702x.c 	mutex_lock(&d->usb_mutex);
d                  61 drivers/media/usb/dvb-usb/vp702x.c 	ret = vp702x_usb_in_op_unlocked(d, req, value, index, b, blen);
d                  62 drivers/media/usb/dvb-usb/vp702x.c 	mutex_unlock(&d->usb_mutex);
d                  67 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_usb_out_op_unlocked(struct dvb_usb_device *d, u8 req,
d                  74 drivers/media/usb/dvb-usb/vp702x.c 	if ((ret = usb_control_msg(d->udev,
d                  75 drivers/media/usb/dvb-usb/vp702x.c 			usb_sndctrlpipe(d->udev,0),
d                  86 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_usb_out_op(struct dvb_usb_device *d, u8 req, u16 value,
d                  91 drivers/media/usb/dvb-usb/vp702x.c 	mutex_lock(&d->usb_mutex);
d                  92 drivers/media/usb/dvb-usb/vp702x.c 	ret = vp702x_usb_out_op_unlocked(d, req, value, index, b, blen);
d                  93 drivers/media/usb/dvb-usb/vp702x.c 	mutex_unlock(&d->usb_mutex);
d                  98 drivers/media/usb/dvb-usb/vp702x.c int vp702x_usb_inout_op(struct dvb_usb_device *d, u8 *o, int olen, u8 *i, int ilen, int msec)
d                 102 drivers/media/usb/dvb-usb/vp702x.c 	if ((ret = mutex_lock_interruptible(&d->usb_mutex)))
d                 105 drivers/media/usb/dvb-usb/vp702x.c 	ret = vp702x_usb_out_op_unlocked(d, REQUEST_OUT, 0, 0, o, olen);
d                 107 drivers/media/usb/dvb-usb/vp702x.c 	ret = vp702x_usb_in_op_unlocked(d, REQUEST_IN, 0, 0, i, ilen);
d                 109 drivers/media/usb/dvb-usb/vp702x.c 	mutex_unlock(&d->usb_mutex);
d                 113 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_usb_inout_cmd(struct dvb_usb_device *d, u8 cmd, u8 *o,
d                 116 drivers/media/usb/dvb-usb/vp702x.c 	struct vp702x_device_state *st = d->priv;
d                 143 drivers/media/usb/dvb-usb/vp702x.c 	ret = vp702x_usb_inout_op(d, buf, olen+2, buf, ilen+1, msec);
d                 257 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_rc_query(struct dvb_usb_device *d, u32 *event, int *state)
d                 268 drivers/media/usb/dvb-usb/vp702x.c 	vp702x_usb_in_op(d,READ_REMOTE_REQ,0,0,key,10);
d                 291 drivers/media/usb/dvb-usb/vp702x.c static int vp702x_read_mac_addr(struct dvb_usb_device *d,u8 mac[6])
d                 294 drivers/media/usb/dvb-usb/vp702x.c 	struct vp702x_device_state *st = d->priv;
d                 299 drivers/media/usb/dvb-usb/vp702x.c 		vp702x_usb_in_op(d, READ_EEPROM_REQ, i, 1, &buf[i - 6], 1);
d                 332 drivers/media/usb/dvb-usb/vp702x.c 	struct dvb_usb_device *d;
d                 337 drivers/media/usb/dvb-usb/vp702x.c 				   THIS_MODULE, &d, adapter_nr);
d                 341 drivers/media/usb/dvb-usb/vp702x.c 	st = d->priv;
d                 358 drivers/media/usb/dvb-usb/vp702x.c 	struct dvb_usb_device *d = usb_get_intfdata(intf);
d                 359 drivers/media/usb/dvb-usb/vp702x.c 	struct vp702x_device_state *st = d->priv;
d                 109 drivers/media/usb/dvb-usb/vp702x.h extern struct dvb_frontend * vp702x_fe_attach(struct dvb_usb_device *d);
d                 111 drivers/media/usb/dvb-usb/vp702x.h extern int vp702x_usb_inout_op(struct dvb_usb_device *d, u8 *o, int olen, u8 *i, int ilen, int msec);
d                 112 drivers/media/usb/dvb-usb/vp702x.h extern int vp702x_usb_in_op(struct dvb_usb_device *d, u8 req, u16 value, u16 index, u8 *b, int blen);
d                  22 drivers/media/usb/dvb-usb/vp7045-fe.c 	struct dvb_usb_device *d;
d                  29 drivers/media/usb/dvb-usb/vp7045-fe.c 	u8 s0 = vp7045_read_reg(state->d,0x00),
d                  30 drivers/media/usb/dvb-usb/vp7045-fe.c 	   s1 = vp7045_read_reg(state->d,0x01),
d                  31 drivers/media/usb/dvb-usb/vp7045-fe.c 	   s3 = vp7045_read_reg(state->d,0x03);
d                  55 drivers/media/usb/dvb-usb/vp7045-fe.c 	*ber = (vp7045_read_reg(state->d, 0x0D) << 16) |
d                  56 drivers/media/usb/dvb-usb/vp7045-fe.c 	       (vp7045_read_reg(state->d, 0x0E) << 8) |
d                  57 drivers/media/usb/dvb-usb/vp7045-fe.c 		vp7045_read_reg(state->d, 0x0F);
d                  64 drivers/media/usb/dvb-usb/vp7045-fe.c 	*unc = (vp7045_read_reg(state->d, 0x10) << 8) |
d                  65 drivers/media/usb/dvb-usb/vp7045-fe.c 		    vp7045_read_reg(state->d, 0x11);
d                  72 drivers/media/usb/dvb-usb/vp7045-fe.c 	u16 signal = (vp7045_read_reg(state->d, 0x14) << 8) |
d                  73 drivers/media/usb/dvb-usb/vp7045-fe.c 		vp7045_read_reg(state->d, 0x15);
d                  82 drivers/media/usb/dvb-usb/vp7045-fe.c 	u8 _snr = vp7045_read_reg(state->d, 0x09);
d                 129 drivers/media/usb/dvb-usb/vp7045-fe.c 	vp7045_usb_op(state->d,LOCK_TUNER_COMMAND,buf,5,NULL,0,200);
d                 141 drivers/media/usb/dvb-usb/vp7045-fe.c struct dvb_frontend * vp7045_fe_attach(struct dvb_usb_device *d)
d                 147 drivers/media/usb/dvb-usb/vp7045-fe.c 	s->d = d;
d                  25 drivers/media/usb/dvb-usb/vp7045.c int vp7045_usb_op(struct dvb_usb_device *d, u8 cmd, u8 *out, int outlen, u8 *in, int inlen, int msec)
d                  28 drivers/media/usb/dvb-usb/vp7045.c 	u8 *buf = d->priv;
d                  38 drivers/media/usb/dvb-usb/vp7045.c 	ret = mutex_lock_interruptible(&d->usb_mutex);
d                  49 drivers/media/usb/dvb-usb/vp7045.c 	if (usb_control_msg(d->udev,
d                  50 drivers/media/usb/dvb-usb/vp7045.c 			usb_sndctrlpipe(d->udev,0),
d                  60 drivers/media/usb/dvb-usb/vp7045.c 	if (usb_control_msg(d->udev,
d                  61 drivers/media/usb/dvb-usb/vp7045.c 			usb_rcvctrlpipe(d->udev,0),
d                  76 drivers/media/usb/dvb-usb/vp7045.c 	mutex_unlock(&d->usb_mutex);
d                  81 drivers/media/usb/dvb-usb/vp7045.c u8 vp7045_read_reg(struct dvb_usb_device *d, u8 reg)
d                  86 drivers/media/usb/dvb-usb/vp7045.c 	vp7045_usb_op(d,TUNER_REG_READ,obuf,2,&v,1,30);
d                  91 drivers/media/usb/dvb-usb/vp7045.c static int vp7045_power_ctrl(struct dvb_usb_device *d, int onoff)
d                  94 drivers/media/usb/dvb-usb/vp7045.c 	return vp7045_usb_op(d,SET_TUNER_POWER,&v,1,NULL,0,150);
d                  97 drivers/media/usb/dvb-usb/vp7045.c static int vp7045_rc_query(struct dvb_usb_device *d)
d                 102 drivers/media/usb/dvb-usb/vp7045.c 	ret = vp7045_usb_op(d, RC_VAL_READ, NULL, 0, &key, 1, 20);
d                 114 drivers/media/usb/dvb-usb/vp7045.c 		rc_keydown(d->rc_dev, RC_PROTO_NEC, RC_SCANCODE_NEC(0, key), 0);
d                 120 drivers/media/usb/dvb-usb/vp7045.c static int vp7045_read_eeprom(struct dvb_usb_device *d,u8 *buf, int len, int offset)
d                 126 drivers/media/usb/dvb-usb/vp7045.c 		ret = vp7045_usb_op(d, GET_EE_VALUE, &v, 1, br, 2, 5);
d                 137 drivers/media/usb/dvb-usb/vp7045.c static int vp7045_read_mac_addr(struct dvb_usb_device *d,u8 mac[6])
d                 139 drivers/media/usb/dvb-usb/vp7045.c 	return vp7045_read_eeprom(d,mac, 6, MAC_0_ADDR);
d                  63 drivers/media/usb/dvb-usb/vp7045.h extern struct dvb_frontend * vp7045_fe_attach(struct dvb_usb_device *d);
d                  64 drivers/media/usb/dvb-usb/vp7045.h extern int vp7045_usb_op(struct dvb_usb_device *d, u8 cmd, u8 *out, int outlen, u8 *in, int inlen,int msec);
d                  65 drivers/media/usb/dvb-usb/vp7045.h extern u8 vp7045_read_reg(struct dvb_usb_device *d, u8 reg);
d                 784 drivers/media/usb/em28xx/em28xx.h #define kref_to_dev(d) container_of(d, struct em28xx, ref)
d                 486 drivers/media/usb/go7007/go7007-v4l2.c 	unsigned int n, d;
d                 493 drivers/media/usb/go7007/go7007-v4l2.c 	d = 1001 * parm->parm.capture.timeperframe.denominator;
d                 494 drivers/media/usb/go7007/go7007-v4l2.c 	if (n != 0 && d != 0 && n > d)
d                 495 drivers/media/usb/go7007/go7007-v4l2.c 		go->fps_scale = (n + d/2) / d;
d                 438 drivers/media/usb/gspca/cpia1.c 		      u8 a, u8 b, u8 c, u8 d)
d                 468 drivers/media/usb/gspca/cpia1.c 	cmd[5] = d;
d                 560 drivers/media/usb/gspca/cpia1.c 			       u8 a, u8 b, u8 c, u8 d,
d                 571 drivers/media/usb/gspca/cpia1.c 	cmd[5] = d;
d                  77 drivers/media/usb/gspca/spca501.c #define SPCA501_PROP_COMP_ENABLE(d) ((d) & 1)
d                  78 drivers/media/usb/gspca/spca501.c #define SPCA501_PROP_SNAP(d) ((d) & 0x40)
d                  79 drivers/media/usb/gspca/spca501.c #define SPCA501_PROP_SNAP_CTRL(d) ((d) & 0x10)
d                  80 drivers/media/usb/gspca/spca501.c #define SPCA501_PROP_COMP_THRESH(d) (((d) & 0x0e) >> 1)
d                  81 drivers/media/usb/gspca/spca501.c #define SPCA501_PROP_COMP_QUANT(d) (((d) & 0x70) >> 4)
d                   9 drivers/media/usb/pvrusb2/pvrusb2-util.h #define PVR2_DECOMPOSE_LE(t,i,d) \
d                  11 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i] = (d) & 0xff;\
d                  12 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+1] = ((d) >> 8) & 0xff;\
d                  13 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+2] = ((d) >> 16) & 0xff;\
d                  14 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+3] = ((d) >> 24) & 0xff;\
d                  17 drivers/media/usb/pvrusb2/pvrusb2-util.h #define PVR2_DECOMPOSE_BE(t,i,d) \
d                  19 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+3] = (d) & 0xff;\
d                  20 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+2] = ((d) >> 8) & 0xff;\
d                  21 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i+1] = ((d) >> 16) & 0xff;\
d                  22 drivers/media/usb/pvrusb2/pvrusb2-util.h 	(t)[i] = ((d) >> 24) & 0xff;\
d                 354 drivers/media/usb/pwc/pwc-dec23.c 	unsigned char *d = dst;
d                 356 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[0] >> scalebits];
d                 357 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[1] >> scalebits];
d                 358 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[2] >> scalebits];
d                 359 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[3] >> scalebits];
d                 361 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line;
d                 362 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[4] >> scalebits];
d                 363 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[5] >> scalebits];
d                 364 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[6] >> scalebits];
d                 365 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[7] >> scalebits];
d                 367 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line*2;
d                 368 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[8] >> scalebits];
d                 369 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[9] >> scalebits];
d                 370 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[10] >> scalebits];
d                 371 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[11] >> scalebits];
d                 373 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line*3;
d                 374 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[12] >> scalebits];
d                 375 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[13] >> scalebits];
d                 376 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[14] >> scalebits];
d                 377 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[15] >> scalebits];
d                 381 drivers/media/usb/pwc/pwc-dec23.c 	unsigned char *d = dst;
d                 383 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c) >> scalebits);
d                 385 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line;
d                 387 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c) >> scalebits);
d                 389 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line*2;
d                 391 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c) >> scalebits);
d                 393 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line*3;
d                 395 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c) >> scalebits);
d                 409 drivers/media/usb/pwc/pwc-dec23.c 	unsigned char *d = dst;
d                 411 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[0] >> scalebits];
d                 412 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[4] >> scalebits];
d                 413 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[1] >> scalebits];
d                 414 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[5] >> scalebits];
d                 415 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[2] >> scalebits];
d                 416 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[6] >> scalebits];
d                 417 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[3] >> scalebits];
d                 418 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[7] >> scalebits];
d                 420 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line;
d                 421 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[12] >> scalebits];
d                 422 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[8] >> scalebits];
d                 423 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[13] >> scalebits];
d                 424 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[9] >> scalebits];
d                 425 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[14] >> scalebits];
d                 426 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[10] >> scalebits];
d                 427 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[15] >> scalebits];
d                 428 drivers/media/usb/pwc/pwc-dec23.c 	*d++ = cm[c[11] >> scalebits];
d                 433 drivers/media/usb/pwc/pwc-dec23.c 	unsigned char *d = dst;
d                 436 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c1) >> scalebits);
d                 437 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c2) >> scalebits);
d                 440 drivers/media/usb/pwc/pwc-dec23.c 	d = dst + bytes_per_line;
d                 442 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c1) >> scalebits);
d                 443 drivers/media/usb/pwc/pwc-dec23.c 		*d++ = CLAMP((*c2) >> scalebits);
d                 110 drivers/media/usb/stkwebcam/stk-webcam.h #define vdev_to_camera(d) container_of(d, struct stk_camera, vdev)
d                 748 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 	struct usb_iso_packet_descriptor *d;
d                 773 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 			d = &urb->iso_frame_desc[i];
d                 774 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 			data = urb->transfer_buffer + d->offset;
d                 775 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 			len = d->actual_length;
d                 776 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 			d->actual_length = 0;
d                 777 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c 			d->status = 0;
d                 804 drivers/media/usb/ttusb-dec/ttusb_dec.c 			struct usb_iso_packet_descriptor *d;
d                 809 drivers/media/usb/ttusb-dec/ttusb_dec.c 			d = &urb->iso_frame_desc[i];
d                 810 drivers/media/usb/ttusb-dec/ttusb_dec.c 			b = urb->transfer_buffer + d->offset;
d                 811 drivers/media/usb/ttusb-dec/ttusb_dec.c 			length = d->actual_length;
d                 157 drivers/media/usb/uvc/uvc_v4l2.c 	unsigned int d, maxd;
d                 198 drivers/media/usb/uvc/uvc_v4l2.c 		d = min(w, rw) * min(h, rh);
d                 199 drivers/media/usb/uvc/uvc_v4l2.c 		d = w*h + rw*rh - 2*d;
d                 200 drivers/media/usb/uvc/uvc_v4l2.c 		if (d < maxd) {
d                 201 drivers/media/usb/uvc/uvc_v4l2.c 			maxd = d;
d                 403 drivers/media/usb/uvc/uvc_v4l2.c 		u32 d, ival;
d                 413 drivers/media/usb/uvc/uvc_v4l2.c 		d = abs((s32)ival - interval);
d                 414 drivers/media/usb/uvc/uvc_v4l2.c 		if (d >= maxd)
d                 420 drivers/media/usb/uvc/uvc_v4l2.c 		maxd = d;
d                 359 drivers/media/v4l2-core/v4l2-dv-timings.c 	unsigned long n, d;
d                 370 drivers/media/v4l2-core/v4l2-dv-timings.c 				    ratio.numerator, ratio.denominator, &n, &d);
d                 372 drivers/media/v4l2-core/v4l2-dv-timings.c 	ratio.denominator = d;
d                 389 drivers/media/v4l2-core/v4l2-dv-timings.c 	unsigned long n, d;
d                 408 drivers/media/v4l2-core/v4l2-dv-timings.c 	rational_best_approximation(fps, 100, fps, 100, &n, &d);
d                 410 drivers/media/v4l2-core/v4l2-dv-timings.c 	fps_fract.numerator = d;
d                1288 drivers/memory/omap-gpmc.c static void gpmc_irq_mask(struct irq_data *d)
d                1290 drivers/memory/omap-gpmc.c 	gpmc_irq_endis(d->hwirq, false);
d                1293 drivers/memory/omap-gpmc.c static void gpmc_irq_unmask(struct irq_data *d)
d                1295 drivers/memory/omap-gpmc.c 	gpmc_irq_endis(d->hwirq, true);
d                1318 drivers/memory/omap-gpmc.c static void gpmc_irq_ack(struct irq_data *d)
d                1320 drivers/memory/omap-gpmc.c 	unsigned int hwirq = d->hwirq;
d                1330 drivers/memory/omap-gpmc.c static int gpmc_irq_set_type(struct irq_data *d, unsigned int trigger)
d                1333 drivers/memory/omap-gpmc.c 	if (d->hwirq < GPMC_NR_NAND_IRQS)
d                1338 drivers/memory/omap-gpmc.c 		gpmc_irq_edge_config(d->hwirq, false);
d                1340 drivers/memory/omap-gpmc.c 		gpmc_irq_edge_config(d->hwirq, true);
d                1347 drivers/memory/omap-gpmc.c static int gpmc_irq_map(struct irq_domain *d, unsigned int virq,
d                1350 drivers/memory/omap-gpmc.c 	struct gpmc_device *gpmc = d->host_data;
d                5704 drivers/message/fusion/mptbase.c 		component_info->d.PhysDiskNum = phys_disk.PhysDiskNum;
d                5705 drivers/message/fusion/mptbase.c 		component_info->d.PhysDiskBus = phys_disk.PhysDiskBus;
d                5706 drivers/message/fusion/mptbase.c 		component_info->d.PhysDiskID = phys_disk.PhysDiskID;
d                5707 drivers/message/fusion/mptbase.c 		component_info->d.PhysDiskIOC = phys_disk.PhysDiskIOC;
d                 522 drivers/message/fusion/mptbase.h 	IOC_3_PHYS_DISK	 d;			/* phys disk info */
d                 123 drivers/message/fusion/mptlan.h #define NETDEV_TO_LANPRIV_PTR(d)	((struct mpt_lan_priv *)netdev_priv(d))
d                 124 drivers/message/fusion/mptlan.h #define NETDEV_PTR_TO_IOC_NAME_s(d)	(NETDEV_TO_LANPRIV_PTR(d)->mpt_dev->name)
d                 125 drivers/message/fusion/mptlan.h #define IOC_AND_NETDEV_NAMES_s_s(d)	NETDEV_PTR_TO_IOC_NAME_s(d), (d)->name
d                2190 drivers/message/fusion/mptscsih.c 		if ((component_info->d.PhysDiskID == id) &&
d                2191 drivers/message/fusion/mptscsih.c 		    (component_info->d.PhysDiskBus == channel))
d                2267 drivers/message/fusion/mptscsih.c 		if ((component_info->d.PhysDiskID == id) &&
d                2268 drivers/message/fusion/mptscsih.c 		    (component_info->d.PhysDiskBus == channel))
d                2269 drivers/message/fusion/mptscsih.c 			rc = component_info->d.PhysDiskNum;
d                 552 drivers/mfd/88pm860x-core.c static int pm860x_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                 555 drivers/mfd/88pm860x-core.c 	irq_set_chip_data(virq, d->host_data);
d                 556 drivers/mfd/ab8500-core.c static int ab8500_irq_map(struct irq_domain *d, unsigned int virq,
d                 559 drivers/mfd/ab8500-core.c 	struct ab8500 *ab8500 = d->host_data;
d                1553 drivers/mfd/ab8500-debugfs.c static int ab8500_hwreg_print(struct seq_file *s, void *d)
d                1475 drivers/mfd/db8500-prcmu.c 	u32 d;
d                1483 drivers/mfd/db8500-prcmu.c 	d = ((val & PRCM_PLL_FREQ_N_MASK) >> PRCM_PLL_FREQ_N_SHIFT);
d                1484 drivers/mfd/db8500-prcmu.c 	if (d > 1)
d                1485 drivers/mfd/db8500-prcmu.c 		div *= d;
d                1487 drivers/mfd/db8500-prcmu.c 	d = ((val & PRCM_PLL_FREQ_R_MASK) >> PRCM_PLL_FREQ_R_SHIFT);
d                1488 drivers/mfd/db8500-prcmu.c 	if (d > 1)
d                1489 drivers/mfd/db8500-prcmu.c 		div *= d;
d                1754 drivers/mfd/db8500-prcmu.c 		u64 d;
d                1756 drivers/mfd/db8500-prcmu.c 		d = (r * rate);
d                1757 drivers/mfd/db8500-prcmu.c 		(void)do_div(d, src_rate);
d                1758 drivers/mfd/db8500-prcmu.c 		if (d < 6)
d                1759 drivers/mfd/db8500-prcmu.c 			d = 6;
d                1760 drivers/mfd/db8500-prcmu.c 		else if (d > 255)
d                1761 drivers/mfd/db8500-prcmu.c 			d = 255;
d                1762 drivers/mfd/db8500-prcmu.c 		d *= src_rate;
d                1763 drivers/mfd/db8500-prcmu.c 		if (((2 * d) < (r * MIN_PLL_VCO_RATE)) ||
d                1764 drivers/mfd/db8500-prcmu.c 			((r * MAX_PLL_VCO_RATE) < (2 * d)))
d                1766 drivers/mfd/db8500-prcmu.c 		(void)do_div(d, r);
d                1767 drivers/mfd/db8500-prcmu.c 		if (rate < d) {
d                1769 drivers/mfd/db8500-prcmu.c 				rounded_rate = (long)d;
d                1772 drivers/mfd/db8500-prcmu.c 		if ((rate - d) < rem) {
d                1773 drivers/mfd/db8500-prcmu.c 			rem = (rate - d);
d                1774 drivers/mfd/db8500-prcmu.c 			rounded_rate = (long)d;
d                1914 drivers/mfd/db8500-prcmu.c 		u64 d;
d                1917 drivers/mfd/db8500-prcmu.c 		d = (r * rate);
d                1918 drivers/mfd/db8500-prcmu.c 		(void)do_div(d, src_rate);
d                1919 drivers/mfd/db8500-prcmu.c 		if (d < 6)
d                1920 drivers/mfd/db8500-prcmu.c 			d = 6;
d                1921 drivers/mfd/db8500-prcmu.c 		else if (d > 255)
d                1922 drivers/mfd/db8500-prcmu.c 			d = 255;
d                1923 drivers/mfd/db8500-prcmu.c 		hwrate = (d * src_rate);
d                1930 drivers/mfd/db8500-prcmu.c 				pll_freq = (((u32)d << PRCM_PLL_FREQ_D_SHIFT) |
d                1936 drivers/mfd/db8500-prcmu.c 			pll_freq = (((u32)d << PRCM_PLL_FREQ_D_SHIFT) |
d                2580 drivers/mfd/db8500-prcmu.c static void prcmu_irq_mask(struct irq_data *d)
d                2586 drivers/mfd/db8500-prcmu.c 	mb0_transfer.req.dbb_irqs &= ~prcmu_irq_bit[d->hwirq];
d                2590 drivers/mfd/db8500-prcmu.c 	if (d->irq != IRQ_PRCMU_CA_SLEEP)
d                2594 drivers/mfd/db8500-prcmu.c static void prcmu_irq_unmask(struct irq_data *d)
d                2600 drivers/mfd/db8500-prcmu.c 	mb0_transfer.req.dbb_irqs |= prcmu_irq_bit[d->hwirq];
d                2604 drivers/mfd/db8500-prcmu.c 	if (d->irq != IRQ_PRCMU_CA_SLEEP)
d                2608 drivers/mfd/db8500-prcmu.c static void noop(struct irq_data *d)
d                2662 drivers/mfd/db8500-prcmu.c static int db8500_irq_map(struct irq_domain *d, unsigned int virq,
d                 146 drivers/mfd/ezx-pcap.c static void pcap_mask_irq(struct irq_data *d)
d                 148 drivers/mfd/ezx-pcap.c 	struct pcap_chip *pcap = irq_data_get_irq_chip_data(d);
d                 150 drivers/mfd/ezx-pcap.c 	pcap->msr |= 1 << irq_to_pcap(pcap, d->irq);
d                 154 drivers/mfd/ezx-pcap.c static void pcap_unmask_irq(struct irq_data *d)
d                 156 drivers/mfd/ezx-pcap.c 	struct pcap_chip *pcap = irq_data_get_irq_chip_data(d);
d                 158 drivers/mfd/ezx-pcap.c 	pcap->msr &= ~(1 << irq_to_pcap(pcap, d->irq));
d                  46 drivers/mfd/fsl-imx25-tsadc.c static int mx25_tsadc_domain_map(struct irq_domain *d, unsigned int irq,
d                  49 drivers/mfd/fsl-imx25-tsadc.c 	struct mx25_tsadc *tsadc = d->host_data;
d                 131 drivers/mfd/lp8788-irq.c static int lp8788_irq_map(struct irq_domain *d, unsigned int virq,
d                 134 drivers/mfd/lp8788-irq.c 	struct lp8788_irq_data *irqd = d->host_data;
d                 644 drivers/mfd/max8925-core.c static int max8925_irq_domain_map(struct irq_domain *d, unsigned int virq,
d                 647 drivers/mfd/max8925-core.c 	irq_set_chip_data(virq, d->host_data);
d                 275 drivers/mfd/max8997-irq.c static int max8997_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 278 drivers/mfd/max8997-irq.c 	struct max8997_dev *max8997 = d->host_data;
d                 192 drivers/mfd/max8998-irq.c static int max8998_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 195 drivers/mfd/max8998-irq.c 	struct max8997_dev *max8998 = d->host_data;
d                  20 drivers/mfd/mcp-core.c #define to_mcp(d)		container_of(d, struct mcp, attached_device)
d                  21 drivers/mfd/mcp-core.c #define to_mcp_driver(d)	container_of(d, struct mcp_driver, drv)
d                  54 drivers/mfd/motorola-cpcap.c 	struct regmap_irq_chip_data *d = irq_get_chip_data(virq);
d                  55 drivers/mfd/motorola-cpcap.c 	int irq_base = regmap_irq_chip_get_base(d);
d                 114 drivers/mfd/mt6397-irq.c static int mt6397_irq_domain_map(struct irq_domain *d, unsigned int irq,
d                 117 drivers/mfd/mt6397-irq.c 	struct mt6397_chip *mt6397 = d->host_data;
d                 271 drivers/mfd/qcom-pm8xxx.c static void pm8xxx_irq_mask_ack(struct irq_data *d)
d                 273 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 274 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 283 drivers/mfd/qcom-pm8xxx.c static void pm8xxx_irq_unmask(struct irq_data *d)
d                 285 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 286 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 295 drivers/mfd/qcom-pm8xxx.c static int pm8xxx_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 297 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 298 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 325 drivers/mfd/qcom-pm8xxx.c static int pm8xxx_irq_get_irqchip_state(struct irq_data *d,
d                 329 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 330 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 405 drivers/mfd/qcom-pm8xxx.c static void pm8821_irq_mask_ack(struct irq_data *d)
d                 407 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 408 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 432 drivers/mfd/qcom-pm8xxx.c static void pm8821_irq_unmask(struct irq_data *d)
d                 434 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 435 drivers/mfd/qcom-pm8xxx.c 	unsigned int pmirq = irqd_to_hwirq(d);
d                 452 drivers/mfd/qcom-pm8xxx.c static int pm8821_irq_get_irqchip_state(struct irq_data *d,
d                 456 drivers/mfd/qcom-pm8xxx.c 	struct pm_irq_chip *chip = irq_data_get_irq_chip_data(d);
d                 457 drivers/mfd/qcom-pm8xxx.c 	int rc, pmirq = irqd_to_hwirq(d);
d                 233 drivers/mfd/stmfx.c static int stmfx_irq_map(struct irq_domain *d, unsigned int virq,
d                 236 drivers/mfd/stmfx.c 	irq_set_chip_data(virq, d->host_data);
d                 244 drivers/mfd/stmfx.c static void stmfx_irq_unmap(struct irq_domain *d, unsigned int virq)
d                1190 drivers/mfd/stmpe.c static int stmpe_irq_map(struct irq_domain *d, unsigned int virq,
d                1193 drivers/mfd/stmpe.c 	struct stmpe *stmpe = d->host_data;
d                1207 drivers/mfd/stmpe.c static void stmpe_irq_unmap(struct irq_domain *d, unsigned int virq)
d                 209 drivers/mfd/tc3589x.c static int tc3589x_irq_map(struct irq_domain *d, unsigned int virq,
d                 212 drivers/mfd/tc3589x.c 	struct tc3589x *tc3589x = d->host_data;
d                 223 drivers/mfd/tc3589x.c static void tc3589x_irq_unmap(struct irq_domain *d, unsigned int virq)
d                 219 drivers/mfd/twl6030-irq.c static int twl6030_irq_set_wake(struct irq_data *d, unsigned int on)
d                 221 drivers/mfd/twl6030-irq.c 	struct twl6030_irq *pdata = irq_data_get_irq_chip_data(d);
d                 333 drivers/mfd/twl6030-irq.c static int twl6030_irq_map(struct irq_domain *d, unsigned int virq,
d                 336 drivers/mfd/twl6030-irq.c 	struct twl6030_irq *pdata = d->host_data;
d                 347 drivers/mfd/twl6030-irq.c static void twl6030_irq_unmap(struct irq_domain *d, unsigned int virq)
d                 441 drivers/misc/cxl/cxl.h #define to_cxl_adapter(d) container_of(d, struct cxl, dev)
d                 442 drivers/misc/cxl/cxl.h #define to_cxl_afu(d) container_of(d, struct cxl_afu, dev)
d                  13 drivers/misc/cxl/sysfs.c #define to_afu_chardev_m(d) dev_get_drvdata(d)
d                  73 drivers/misc/fastrpc.c #define miscdev_to_cctx(d) container_of(d, struct fastrpc_channel_ctx, miscdev)
d                 478 drivers/misc/genwqe/card_base.h 			    struct genwqe_debug_data *d);
d                 720 drivers/misc/genwqe/card_ddcb.c int genwqe_init_debug_data(struct genwqe_dev *cd, struct genwqe_debug_data *d)
d                 725 drivers/misc/genwqe/card_ddcb.c 	if (d == NULL) {
d                 732 drivers/misc/genwqe/card_ddcb.c 	len  = sizeof(d->driver_version);
d                 733 drivers/misc/genwqe/card_ddcb.c 	snprintf(d->driver_version, len, "%s", DRV_VERSION);
d                 734 drivers/misc/genwqe/card_ddcb.c 	d->slu_unitcfg = cd->slu_unitcfg;
d                 735 drivers/misc/genwqe/card_ddcb.c 	d->app_unitcfg = cd->app_unitcfg;
d                  21 drivers/misc/mei/bus.c #define to_mei_cl_driver(d) container_of(d, struct mei_cl_driver, driver)
d                  17 drivers/misc/mic/bus/cosm_bus.c static int cosm_dev_probe(struct device *d)
d                  19 drivers/misc/mic/bus/cosm_bus.c 	struct cosm_device *dev = dev_to_cosm(d);
d                  25 drivers/misc/mic/bus/cosm_bus.c static int cosm_dev_remove(struct device *d)
d                  27 drivers/misc/mic/bus/cosm_bus.c 	struct cosm_device *dev = dev_to_cosm(d);
d                  53 drivers/misc/mic/bus/cosm_bus.c static inline void cosm_release_dev(struct device *d)
d                  55 drivers/misc/mic/bus/cosm_bus.c 	struct cosm_device *cdev = dev_to_cosm(d);
d                  17 drivers/misc/mic/bus/mic_bus.c static ssize_t device_show(struct device *d,
d                  20 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *dev = dev_to_mbus(d);
d                  25 drivers/misc/mic/bus/mic_bus.c static ssize_t vendor_show(struct device *d,
d                  28 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *dev = dev_to_mbus(d);
d                  33 drivers/misc/mic/bus/mic_bus.c static ssize_t modalias_show(struct device *d,
d                  36 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *dev = dev_to_mbus(d);
d                  84 drivers/misc/mic/bus/mic_bus.c static int mbus_dev_probe(struct device *d)
d                  87 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *dev = dev_to_mbus(d);
d                  97 drivers/misc/mic/bus/mic_bus.c static int mbus_dev_remove(struct device *d)
d                  99 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *dev = dev_to_mbus(d);
d                 128 drivers/misc/mic/bus/mic_bus.c static void mbus_release_dev(struct device *d)
d                 130 drivers/misc/mic/bus/mic_bus.c 	struct mbus_device *mbdev = dev_to_mbus(d);
d                  16 drivers/misc/mic/bus/scif_bus.c static ssize_t device_show(struct device *d,
d                  19 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *dev = dev_to_scif(d);
d                  25 drivers/misc/mic/bus/scif_bus.c static ssize_t vendor_show(struct device *d,
d                  28 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *dev = dev_to_scif(d);
d                  34 drivers/misc/mic/bus/scif_bus.c static ssize_t modalias_show(struct device *d,
d                  37 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *dev = dev_to_scif(d);
d                  86 drivers/misc/mic/bus/scif_bus.c static int scif_dev_probe(struct device *d)
d                  88 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *dev = dev_to_scif(d);
d                  94 drivers/misc/mic/bus/scif_bus.c static int scif_dev_remove(struct device *d)
d                  96 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *dev = dev_to_scif(d);
d                 125 drivers/misc/mic/bus/scif_bus.c static void scif_release_dev(struct device *d)
d                 127 drivers/misc/mic/bus/scif_bus.c 	struct scif_hw_dev *sdev = dev_to_scif(d);
d                  16 drivers/misc/mic/bus/vop_bus.c static ssize_t device_show(struct device *d,
d                  19 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                  25 drivers/misc/mic/bus/vop_bus.c static ssize_t vendor_show(struct device *d,
d                  28 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                  34 drivers/misc/mic/bus/vop_bus.c static ssize_t modalias_show(struct device *d,
d                  37 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                  86 drivers/misc/mic/bus/vop_bus.c static int vop_dev_probe(struct device *d)
d                  88 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                  94 drivers/misc/mic/bus/vop_bus.c static int vop_dev_remove(struct device *d)
d                  96 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                 125 drivers/misc/mic/bus/vop_bus.c static void vop_release_dev(struct device *d)
d                 127 drivers/misc/mic/bus/vop_bus.c 	struct vop_device *dev = dev_to_vop(d);
d                  23 drivers/misc/mic/scif/scif_peer_bus.c static void scif_peer_release_dev(struct device *d)
d                  25 drivers/misc/mic/scif/scif_peer_bus.c 	struct scif_peer_dev *sdev = dev_to_scif_peer(d);
d                  16 drivers/misc/mic/vop/vop_debugfs.c 	struct mic_device_desc *d;
d                  42 drivers/misc/mic/vop/vop_debugfs.c 		j < MIC_DP_SIZE; j += mic_total_desc_size(d)) {
d                  43 drivers/misc/mic/vop/vop_debugfs.c 		d = (void *)bootparam + j;
d                  44 drivers/misc/mic/vop/vop_debugfs.c 		dc = (void *)d + mic_aligned_desc_size(d);
d                  47 drivers/misc/mic/vop/vop_debugfs.c 		if (d->type == 0)
d                  50 drivers/misc/mic/vop/vop_debugfs.c 		if (d->type == -1)
d                  53 drivers/misc/mic/vop/vop_debugfs.c 		seq_printf(s, "Type %d ", d->type);
d                  54 drivers/misc/mic/vop/vop_debugfs.c 		seq_printf(s, "Num VQ %d ", d->num_vq);
d                  55 drivers/misc/mic/vop/vop_debugfs.c 		seq_printf(s, "Feature Len %d\n", d->feature_len);
d                  56 drivers/misc/mic/vop/vop_debugfs.c 		seq_printf(s, "Config Len %d ", d->config_len);
d                  57 drivers/misc/mic/vop/vop_debugfs.c 		seq_printf(s, "Shutdown Status %d\n", d->status);
d                  59 drivers/misc/mic/vop/vop_debugfs.c 		for (k = 0; k < d->num_vq; k++) {
d                  60 drivers/misc/mic/vop/vop_debugfs.c 			vqconfig = mic_vq_config(d) + k;
d                  69 drivers/misc/mic/vop/vop_debugfs.c 		features = (__u32 *)mic_vq_features(d);
d                  73 drivers/misc/mic/vop/vop_debugfs.c 		config = mic_vq_configspace(d);
d                  74 drivers/misc/mic/vop/vop_debugfs.c 		for (k = 0; k < d->config_len; k++)
d                  62 drivers/misc/mic/vop/vop_main.c #define _vop_aligned_desc_size(d) __mic_align(_vop_desc_size(d), 8)
d                 480 drivers/misc/mic/vop/vop_main.c static int _vop_add_device(struct mic_device_desc __iomem *d,
d                 486 drivers/misc/mic/vop/vop_main.c 	u8 type = ioread8(&d->type);
d                 497 drivers/misc/mic/vop/vop_main.c 	vdev->desc = d;
d                 498 drivers/misc/mic/vop/vop_main.c 	vdev->dc = (void __iomem *)d + _vop_aligned_desc_size(d);
d                 554 drivers/misc/mic/vop/vop_main.c static void _vop_handle_config_change(struct mic_device_desc __iomem *d,
d                 559 drivers/misc/mic/vop/vop_main.c 		= (void __iomem *)d + _vop_aligned_desc_size(d);
d                 574 drivers/misc/mic/vop/vop_main.c static int _vop_remove_device(struct mic_device_desc __iomem *d,
d                 578 drivers/misc/mic/vop/vop_main.c 		= (void __iomem *)d + _vop_aligned_desc_size(d);
d                 589 drivers/misc/mic/vop/vop_main.c 			ioread8(&dc->config_change), ioread8(&d->type), vdev);
d                 590 drivers/misc/mic/vop/vop_main.c 		status = ioread8(&d->status);
d                 601 drivers/misc/mic/vop/vop_main.c 		iowrite8(-1, &d->type);
d                 614 drivers/misc/mic/vop/vop_main.c 	struct mic_device_desc __iomem *d;
d                 620 drivers/misc/mic/vop/vop_main.c 			i < MIC_DP_SIZE; i += _vop_total_desc_size(d)) {
d                 621 drivers/misc/mic/vop/vop_main.c 		d = dp + i;
d                 622 drivers/misc/mic/vop/vop_main.c 		dc = (void __iomem *)d + _vop_aligned_desc_size(d);
d                 629 drivers/misc/mic/vop/vop_main.c 		type = ioread8(&d->type);
d                 639 drivers/misc/mic/vop/vop_main.c 		dev = device_find_child(&vpdev->dev, (void __force *)d,
d                 646 drivers/misc/mic/vop/vop_main.c 			_vop_handle_config_change(d, i, vpdev);
d                 647 drivers/misc/mic/vop/vop_main.c 			ret = _vop_remove_device(d, i, vpdev);
d                 657 drivers/misc/mic/vop/vop_main.c 			__func__, __LINE__, d);
d                 659 drivers/misc/mic/vop/vop_main.c 			_vop_add_device(d, i, vpdev, dnode);
d                 343 drivers/misc/sgi-gru/grufile.c static void gru_noop(struct irq_data *d)
d                 457 drivers/misc/sgi-gru/grumain.c static inline long gru_copy_handle(void *d, void *s)
d                 459 drivers/misc/sgi-gru/grumain.c 	memcpy(d, s, GRU_HANDLE_BYTES);
d                  28 drivers/mmc/core/bus.c #define to_mmc_driver(d)	container_of(d, struct mmc_driver, drv)
d                  17 drivers/mmc/core/card.h #define mmc_dev_to_card(d)	container_of(d, struct mmc_card, dev)
d                  32 drivers/mmc/core/host.c #define cls_dev_to_mmc_host(d)	container_of(d, struct mmc_host, class_dev)
d                  28 drivers/mmc/core/sdio_bus.c #define to_sdio_driver(d)	container_of(d, struct sdio_driver, drv)
d                 569 drivers/mmc/host/alcor.c static irqreturn_t alcor_irq_thread(int irq, void *d)
d                 571 drivers/mmc/host/alcor.c 	struct alcor_sdmmc_host *host = d;
d                 618 drivers/mmc/host/alcor.c static irqreturn_t alcor_irq(int irq, void *d)
d                 620 drivers/mmc/host/alcor.c 	struct alcor_sdmmc_host *host = d;
d                 963 drivers/mmc/host/alcor.c 	struct delayed_work *d = to_delayed_work(work);
d                 964 drivers/mmc/host/alcor.c 	struct alcor_sdmmc_host *host = container_of(d, struct alcor_sdmmc_host,
d                 828 drivers/mmc/host/bcm2835.c 	struct delayed_work *d = to_delayed_work(work);
d                 830 drivers/mmc/host/bcm2835.c 		container_of(d, struct bcm2835_host, timeout_work);
d                  71 drivers/mmc/host/dw_mmc.c #define IDMAC_64ADDR_SET_BUFFER1_SIZE(d, s) \
d                  72 drivers/mmc/host/dw_mmc.c 	((d)->des2 = ((d)->des2 & cpu_to_le32(0x03ffe000)) | \
d                  95 drivers/mmc/host/dw_mmc.c #define IDMAC_SET_BUFFER1_SIZE(d, s) \
d                  96 drivers/mmc/host/dw_mmc.c 	((d)->des1 = ((d)->des1 & cpu_to_le32(0x03ffe000)) | (cpu_to_le32((s) & 0x1fff)))
d                 526 drivers/mmc/host/jz4740_mmc.c 	uint32_t d;
d                 564 drivers/mmc/host/jz4740_mmc.c 				d = readl(fifo_addr);
d                 565 drivers/mmc/host/jz4740_mmc.c 				memcpy(buf, &d, i);
d                 581 drivers/mmc/host/jz4740_mmc.c 		d = readl(fifo_addr);
d                1316 drivers/mmc/host/sh_mmcif.c 	struct delayed_work *d = to_delayed_work(work);
d                1317 drivers/mmc/host/sh_mmcif.c 	struct sh_mmcif_host *host = container_of(d, struct sh_mmcif_host, timeout_work);
d                1274 drivers/mmc/host/usdhi6rol0.c 		u16 d = usdhi6_read16(host, USDHI6_SD_BUF0);
d                1275 drivers/mmc/host/usdhi6rol0.c 		((u8 *)p)[2 * i] = ((u8 *)&d)[0];
d                1277 drivers/mmc/host/usdhi6rol0.c 			((u8 *)p)[2 * i + 1] = ((u8 *)&d)[1];
d                1314 drivers/mmc/host/usdhi6rol0.c 		u16 d;
d                1315 drivers/mmc/host/usdhi6rol0.c 		((u8 *)&d)[0] = ((u8 *)p)[2 * i];
d                1317 drivers/mmc/host/usdhi6rol0.c 			((u8 *)&d)[1] = ((u8 *)p)[2 * i + 1];
d                1319 drivers/mmc/host/usdhi6rol0.c 			((u8 *)&d)[1] = 0;
d                1320 drivers/mmc/host/usdhi6rol0.c 		usdhi6_write16(host, USDHI6_SD_BUF0, d);
d                1669 drivers/mmc/host/usdhi6rol0.c 	struct delayed_work *d = to_delayed_work(work);
d                1670 drivers/mmc/host/usdhi6rol0.c 	struct usdhi6_host *host = container_of(d, struct usdhi6_host, timeout_work);
d                 355 drivers/mmc/host/vub300.c #define kref_to_vub300_mmc_host(d) container_of(d, struct vub300_mmc_host, kref)
d                 816 drivers/mtd/chips/cfi_cmdset_0002.c 	map_word d, t;
d                 826 drivers/mtd/chips/cfi_cmdset_0002.c 		d = map_read(map, addr);
d                 828 drivers/mtd/chips/cfi_cmdset_0002.c 		return map_word_andequal(map, d, ready, ready);
d                 831 drivers/mtd/chips/cfi_cmdset_0002.c 	d = map_read(map, addr);
d                 834 drivers/mtd/chips/cfi_cmdset_0002.c 	return map_word_equal(map, d, t);
d                 527 drivers/mtd/chips/cfi_cmdset_0020.c 		map_word d;
d                 528 drivers/mtd/chips/cfi_cmdset_0020.c 		d = map_word_load(map, buf);
d                 529 drivers/mtd/chips/cfi_cmdset_0020.c 		map_write(map, d, adr+z);
d                  32 drivers/mtd/hyperbus/hyperbus-core.c static void hyperbus_write16(struct map_info *map, map_word d,
d                  38 drivers/mtd/hyperbus/hyperbus-core.c 	ctlr->ops->write16(hbdev, addr, d.x[0]);
d                  80 drivers/mtd/maps/dc21285.c static void dc21285_write8(struct map_info *map, const map_word d, unsigned long adr)
d                  86 drivers/mtd/maps/dc21285.c 	*(uint8_t*)(map->virt + adr) = d.x[0];
d                  89 drivers/mtd/maps/dc21285.c static void dc21285_write16(struct map_info *map, const map_word d, unsigned long adr)
d                  95 drivers/mtd/maps/dc21285.c 	*(uint16_t*)(map->virt + adr) = d.x[0];
d                  98 drivers/mtd/maps/dc21285.c static void dc21285_write32(struct map_info *map, const map_word d, unsigned long adr)
d                 102 drivers/mtd/maps/dc21285.c 	*(uint32_t*)(map->virt + adr) = d.x[0];
d                 108 drivers/mtd/maps/dc21285.c 		map_word d;
d                 109 drivers/mtd/maps/dc21285.c 		d.x[0] = *((uint32_t*)from);
d                 110 drivers/mtd/maps/dc21285.c 		dc21285_write32(map, d, to);
d                 120 drivers/mtd/maps/dc21285.c 		map_word d;
d                 121 drivers/mtd/maps/dc21285.c 		d.x[0] = *((uint16_t*)from);
d                 122 drivers/mtd/maps/dc21285.c 		dc21285_write16(map, d, to);
d                 131 drivers/mtd/maps/dc21285.c 	map_word d;
d                 132 drivers/mtd/maps/dc21285.c 	d.x[0] = *((uint8_t*)from);
d                 133 drivers/mtd/maps/dc21285.c 	dc21285_write8(map, d, to);
d                  65 drivers/mtd/maps/ixp4xx.c static inline void flash_write16(u16 d, void __iomem *addr)
d                  67 drivers/mtd/maps/ixp4xx.c 	__raw_writew(cpu_to_be16(d), (void __iomem *)((unsigned long)addr ^ 0x2));
d                  80 drivers/mtd/maps/ixp4xx.c static inline void flash_write16(u16 d, void __iomem *addr)
d                  82 drivers/mtd/maps/ixp4xx.c 	__raw_writew(d, addr);
d                 132 drivers/mtd/maps/ixp4xx.c static void ixp4xx_probe_write16(struct map_info *map, map_word d, unsigned long adr)
d                 135 drivers/mtd/maps/ixp4xx.c 		flash_write16(d.x[0], map->virt + adr);
d                 141 drivers/mtd/maps/ixp4xx.c static void ixp4xx_write16(struct map_info *map, map_word d, unsigned long adr)
d                 143 drivers/mtd/maps/ixp4xx.c 	flash_write16(d.x[0], map->virt + adr);
d                  62 drivers/mtd/maps/lantiq-flash.c ltq_write16(struct map_info *map, map_word d, unsigned long adr)
d                  69 drivers/mtd/maps/lantiq-flash.c 	*(u16 *)(map->virt + adr) = d.x[0];
d                 111 drivers/mtd/maps/pcmciamtd.c 	map_word d = {{0}};
d                 115 drivers/mtd/maps/pcmciamtd.c 		return d;
d                 117 drivers/mtd/maps/pcmciamtd.c 	d.x[0] = readb(addr);
d                 118 drivers/mtd/maps/pcmciamtd.c 	pr_debug("ofs = 0x%08lx (%p) data = 0x%02lx\n", ofs, addr, d.x[0]);
d                 119 drivers/mtd/maps/pcmciamtd.c 	return d;
d                 126 drivers/mtd/maps/pcmciamtd.c 	map_word d = {{0}};
d                 130 drivers/mtd/maps/pcmciamtd.c 		return d;
d                 132 drivers/mtd/maps/pcmciamtd.c 	d.x[0] = readw(addr);
d                 133 drivers/mtd/maps/pcmciamtd.c 	pr_debug("ofs = 0x%08lx (%p) data = 0x%04lx\n", ofs, addr, d.x[0]);
d                 134 drivers/mtd/maps/pcmciamtd.c 	return d;
d                 164 drivers/mtd/maps/pcmciamtd.c static void pcmcia_write8_remap(struct map_info *map, map_word d, unsigned long adr)
d                 171 drivers/mtd/maps/pcmciamtd.c 	pr_debug("adr = 0x%08lx (%p)  data = 0x%02lx\n", adr, addr, d.x[0]);
d                 172 drivers/mtd/maps/pcmciamtd.c 	writeb(d.x[0], addr);
d                 176 drivers/mtd/maps/pcmciamtd.c static void pcmcia_write16_remap(struct map_info *map, map_word d, unsigned long adr)
d                 182 drivers/mtd/maps/pcmciamtd.c 	pr_debug("adr = 0x%08lx (%p)  data = 0x%04lx\n", adr, addr, d.x[0]);
d                 183 drivers/mtd/maps/pcmciamtd.c 	writew(d.x[0], addr);
d                 220 drivers/mtd/maps/pcmciamtd.c 	map_word d = {{0}};
d                 223 drivers/mtd/maps/pcmciamtd.c 		return d;
d                 225 drivers/mtd/maps/pcmciamtd.c 	d.x[0] = readb(win_base + ofs);
d                 227 drivers/mtd/maps/pcmciamtd.c 	      ofs, win_base + ofs, d.x[0]);
d                 228 drivers/mtd/maps/pcmciamtd.c 	return d;
d                 235 drivers/mtd/maps/pcmciamtd.c 	map_word d = {{0}};
d                 238 drivers/mtd/maps/pcmciamtd.c 		return d;
d                 240 drivers/mtd/maps/pcmciamtd.c 	d.x[0] = readw(win_base + ofs);
d                 242 drivers/mtd/maps/pcmciamtd.c 	      ofs, win_base + ofs, d.x[0]);
d                 243 drivers/mtd/maps/pcmciamtd.c 	return d;
d                 259 drivers/mtd/maps/pcmciamtd.c static void pcmcia_write8(struct map_info *map, map_word d, unsigned long adr)
d                 267 drivers/mtd/maps/pcmciamtd.c 	      adr, win_base + adr, d.x[0]);
d                 268 drivers/mtd/maps/pcmciamtd.c 	writeb(d.x[0], win_base + adr);
d                 272 drivers/mtd/maps/pcmciamtd.c static void pcmcia_write16(struct map_info *map, map_word d, unsigned long adr)
d                 280 drivers/mtd/maps/pcmciamtd.c 	      adr, win_base + adr, d.x[0]);
d                 281 drivers/mtd/maps/pcmciamtd.c 	writew(d.x[0], win_base + adr);
d                 129 drivers/mtd/maps/sbc_gxx.c static void sbc_gxx_write8(struct map_info *map, map_word d, unsigned long adr)
d                 133 drivers/mtd/maps/sbc_gxx.c 	writeb(d.x[0], iomapadr + (adr & WINDOW_MASK));
d                 343 drivers/mtd/mtd_blkdevs.c 	struct mtd_blktrans_dev *d;
d                 354 drivers/mtd/mtd_blkdevs.c 	list_for_each_entry(d, &tr->devs, list) {
d                 357 drivers/mtd/mtd_blkdevs.c 			if (d->devnum != last_devnum+1) {
d                 360 drivers/mtd/mtd_blkdevs.c 				list_add_tail(&new->list, &d->list);
d                 363 drivers/mtd/mtd_blkdevs.c 		} else if (d->devnum == new->devnum) {
d                 367 drivers/mtd/mtd_blkdevs.c 		} else if (d->devnum > new->devnum) {
d                 369 drivers/mtd/mtd_blkdevs.c 			list_add_tail(&new->list, &d->list);
d                 372 drivers/mtd/mtd_blkdevs.c 		last_devnum = d->devnum;
d                 160 drivers/mtd/mtdswap.c #define TREE_ROOT(d, name) (&d->trees[MTDSWAP_ ## name].root)
d                 161 drivers/mtd/mtdswap.c #define TREE_EMPTY(d, name) (TREE_ROOT(d, name)->rb_node == NULL)
d                 162 drivers/mtd/mtdswap.c #define TREE_NONEMPTY(d, name) (!TREE_EMPTY(d, name))
d                 163 drivers/mtd/mtdswap.c #define TREE_COUNT(d, name) (d->trees[MTDSWAP_ ## name].count)
d                 182 drivers/mtd/mtdswap.c static int mtdswap_gc(struct mtdswap_dev *d, unsigned int background);
d                 184 drivers/mtd/mtdswap.c static loff_t mtdswap_eb_offset(struct mtdswap_dev *d, struct swap_eb *eb)
d                 186 drivers/mtd/mtdswap.c 	return (loff_t)(eb - d->eb_data) * d->mtd->erasesize;
d                 189 drivers/mtd/mtdswap.c static void mtdswap_eb_detach(struct mtdswap_dev *d, struct swap_eb *eb)
d                 196 drivers/mtd/mtdswap.c 		oldidx = tp - &d->trees[0];
d                 198 drivers/mtd/mtdswap.c 		d->trees[oldidx].count--;
d                 222 drivers/mtd/mtdswap.c static void mtdswap_rb_add(struct mtdswap_dev *d, struct swap_eb *eb, int idx)
d                 226 drivers/mtd/mtdswap.c 	if (eb->root == &d->trees[idx].root)
d                 229 drivers/mtd/mtdswap.c 	mtdswap_eb_detach(d, eb);
d                 230 drivers/mtd/mtdswap.c 	root = &d->trees[idx].root;
d                 233 drivers/mtd/mtdswap.c 	d->trees[idx].count++;
d                 251 drivers/mtd/mtdswap.c static int mtdswap_handle_badblock(struct mtdswap_dev *d, struct swap_eb *eb)
d                 256 drivers/mtd/mtdswap.c 	d->spare_eblks--;
d                 258 drivers/mtd/mtdswap.c 	mtdswap_eb_detach(d, eb);
d                 262 drivers/mtd/mtdswap.c 	if (!mtd_can_have_bb(d->mtd))
d                 265 drivers/mtd/mtdswap.c 	offset = mtdswap_eb_offset(d, eb);
d                 266 drivers/mtd/mtdswap.c 	dev_warn(d->dev, "Marking bad block at %08llx\n", offset);
d                 267 drivers/mtd/mtdswap.c 	ret = mtd_block_markbad(d->mtd, offset);
d                 270 drivers/mtd/mtdswap.c 		dev_warn(d->dev, "Mark block bad failed for block at %08llx "
d                 279 drivers/mtd/mtdswap.c static int mtdswap_handle_write_error(struct mtdswap_dev *d, struct swap_eb *eb)
d                 282 drivers/mtd/mtdswap.c 	struct swap_eb *curr_write = d->curr_write;
d                 286 drivers/mtd/mtdswap.c 		d->curr_write = NULL;
d                 288 drivers/mtd/mtdswap.c 		if (!marked && d->curr_write_pos != 0) {
d                 289 drivers/mtd/mtdswap.c 			mtdswap_rb_add(d, eb, MTDSWAP_FAILING);
d                 294 drivers/mtd/mtdswap.c 	return mtdswap_handle_badblock(d, eb);
d                 297 drivers/mtd/mtdswap.c static int mtdswap_read_oob(struct mtdswap_dev *d, loff_t from,
d                 300 drivers/mtd/mtdswap.c 	int ret = mtd_read_oob(d->mtd, from, ops);
d                 306 drivers/mtd/mtdswap.c 		dev_warn(d->dev, "Read OOB failed %d for block at %08llx\n",
d                 312 drivers/mtd/mtdswap.c 		dev_warn(d->dev, "Read OOB return short read (%zd bytes not "
d                 321 drivers/mtd/mtdswap.c static int mtdswap_read_markers(struct mtdswap_dev *d, struct swap_eb *eb)
d                 328 drivers/mtd/mtdswap.c 	offset = mtdswap_eb_offset(d, eb);
d                 331 drivers/mtd/mtdswap.c 	if (mtd_can_have_bb(d->mtd) && mtd_block_isbad(d->mtd, offset))
d                 334 drivers/mtd/mtdswap.c 	ops.ooblen = 2 * d->mtd->oobavail;
d                 335 drivers/mtd/mtdswap.c 	ops.oobbuf = d->oob_buf;
d                 340 drivers/mtd/mtdswap.c 	ret = mtdswap_read_oob(d, offset, &ops);
d                 345 drivers/mtd/mtdswap.c 	data = (struct mtdswap_oobdata *)d->oob_buf;
d                 347 drivers/mtd/mtdswap.c 		(d->oob_buf + d->mtd->oobavail);
d                 367 drivers/mtd/mtdswap.c static int mtdswap_write_marker(struct mtdswap_dev *d, struct swap_eb *eb,
d                 384 drivers/mtd/mtdswap.c 		offset = mtdswap_eb_offset(d, eb);
d                 388 drivers/mtd/mtdswap.c 		offset = mtdswap_eb_offset(d, eb) + d->mtd->writesize;
d                 391 drivers/mtd/mtdswap.c 	ret = mtd_write_oob(d->mtd, offset, &ops);
d                 394 drivers/mtd/mtdswap.c 		dev_warn(d->dev, "Write OOB failed for block at %08llx "
d                 397 drivers/mtd/mtdswap.c 			mtdswap_handle_write_error(d, eb);
d                 402 drivers/mtd/mtdswap.c 		dev_warn(d->dev, "Short OOB write for block at %08llx: "
d                 416 drivers/mtd/mtdswap.c static void mtdswap_check_counts(struct mtdswap_dev *d)
d                 424 drivers/mtd/mtdswap.c 	for (i = 0; i < d->eblks; i++) {
d                 425 drivers/mtd/mtdswap.c 		eb = d->eb_data + i;
d                 440 drivers/mtd/mtdswap.c 	d->max_erase_count = MTDSWAP_ECNT_MAX(&hist_root);
d                 442 drivers/mtd/mtdswap.c 	for (i = 0; i < d->eblks; i++) {
d                 443 drivers/mtd/mtdswap.c 		eb = d->eb_data + i;
d                 455 drivers/mtd/mtdswap.c static void mtdswap_scan_eblks(struct mtdswap_dev *d)
d                 461 drivers/mtd/mtdswap.c 	for (i = 0; i < d->eblks; i++) {
d                 462 drivers/mtd/mtdswap.c 		eb = d->eb_data + i;
d                 464 drivers/mtd/mtdswap.c 		status = mtdswap_read_markers(d, eb);
d                 487 drivers/mtd/mtdswap.c 	mtdswap_check_counts(d);
d                 489 drivers/mtd/mtdswap.c 	for (i = 0; i < d->eblks; i++) {
d                 490 drivers/mtd/mtdswap.c 		eb = d->eb_data + i;
d                 496 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, idx);
d                 504 drivers/mtd/mtdswap.c static void mtdswap_store_eb(struct mtdswap_dev *d, struct swap_eb *eb)
d                 507 drivers/mtd/mtdswap.c 	unsigned int maxweight = d->pages_per_eblk;
d                 509 drivers/mtd/mtdswap.c 	if (eb == d->curr_write)
d                 513 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_BITFLIP);
d                 515 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_FAILING);
d                 517 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_USED);
d                 519 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_DIRTY);
d                 521 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_LOWFRAG);
d                 523 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_HIFRAG);
d                 526 drivers/mtd/mtdswap.c static int mtdswap_erase_block(struct mtdswap_dev *d, struct swap_eb *eb)
d                 528 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mtd;
d                 534 drivers/mtd/mtdswap.c 	if (eb->erase_count > d->max_erase_count)
d                 535 drivers/mtd/mtdswap.c 		d->max_erase_count = eb->erase_count;
d                 539 drivers/mtd/mtdswap.c 	erase.addr	= mtdswap_eb_offset(d, eb);
d                 545 drivers/mtd/mtdswap.c 			dev_warn(d->dev,
d                 552 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Cannot erase erase block %#llx on %s\n",
d                 555 drivers/mtd/mtdswap.c 		mtdswap_handle_badblock(d, eb);
d                 562 drivers/mtd/mtdswap.c static int mtdswap_map_free_block(struct mtdswap_dev *d, unsigned int page,
d                 566 drivers/mtd/mtdswap.c 	struct swap_eb *old_eb = d->curr_write;
d                 570 drivers/mtd/mtdswap.c 	if (old_eb == NULL || d->curr_write_pos >= d->pages_per_eblk) {
d                 572 drivers/mtd/mtdswap.c 			if (TREE_EMPTY(d, CLEAN))
d                 575 drivers/mtd/mtdswap.c 			clean_root = TREE_ROOT(d, CLEAN);
d                 579 drivers/mtd/mtdswap.c 			TREE_COUNT(d, CLEAN)--;
d                 581 drivers/mtd/mtdswap.c 			ret = mtdswap_write_marker(d, eb, MTDSWAP_TYPE_DIRTY);
d                 587 drivers/mtd/mtdswap.c 		d->curr_write_pos = 0;
d                 588 drivers/mtd/mtdswap.c 		d->curr_write = eb;
d                 590 drivers/mtd/mtdswap.c 			mtdswap_store_eb(d, old_eb);
d                 593 drivers/mtd/mtdswap.c 	*block = (d->curr_write - d->eb_data) * d->pages_per_eblk +
d                 594 drivers/mtd/mtdswap.c 		d->curr_write_pos;
d                 596 drivers/mtd/mtdswap.c 	d->curr_write->active_count++;
d                 597 drivers/mtd/mtdswap.c 	d->revmap[*block] = page;
d                 598 drivers/mtd/mtdswap.c 	d->curr_write_pos++;
d                 603 drivers/mtd/mtdswap.c static unsigned int mtdswap_free_page_cnt(struct mtdswap_dev *d)
d                 605 drivers/mtd/mtdswap.c 	return TREE_COUNT(d, CLEAN) * d->pages_per_eblk +
d                 606 drivers/mtd/mtdswap.c 		d->pages_per_eblk - d->curr_write_pos;
d                 609 drivers/mtd/mtdswap.c static unsigned int mtdswap_enough_free_pages(struct mtdswap_dev *d)
d                 611 drivers/mtd/mtdswap.c 	return mtdswap_free_page_cnt(d) > d->pages_per_eblk;
d                 614 drivers/mtd/mtdswap.c static int mtdswap_write_block(struct mtdswap_dev *d, char *buf,
d                 617 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mtd;
d                 625 drivers/mtd/mtdswap.c 		while (!mtdswap_enough_free_pages(d))
d                 626 drivers/mtd/mtdswap.c 			if (mtdswap_gc(d, 0) > 0)
d                 629 drivers/mtd/mtdswap.c 	ret = mtdswap_map_free_block(d, page, bp);
d                 630 drivers/mtd/mtdswap.c 	eb = d->eb_data + (*bp / d->pages_per_eblk);
d                 633 drivers/mtd/mtdswap.c 		d->curr_write = NULL;
d                 635 drivers/mtd/mtdswap.c 		d->revmap[*bp] = PAGE_UNDEF;
d                 645 drivers/mtd/mtdswap.c 		d->curr_write_pos--;
d                 647 drivers/mtd/mtdswap.c 		d->revmap[*bp] = PAGE_UNDEF;
d                 648 drivers/mtd/mtdswap.c 		mtdswap_handle_write_error(d, eb);
d                 653 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Write to MTD device failed: %d (%zd written)",
d                 659 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Short write to MTD device: %zd written",
d                 668 drivers/mtd/mtdswap.c 	d->curr_write_pos--;
d                 670 drivers/mtd/mtdswap.c 	d->revmap[*bp] = PAGE_UNDEF;
d                 675 drivers/mtd/mtdswap.c static int mtdswap_move_block(struct mtdswap_dev *d, unsigned int oldblock,
d                 678 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mtd;
d                 685 drivers/mtd/mtdswap.c 	page = d->revmap[oldblock];
d                 690 drivers/mtd/mtdswap.c 	ret = mtd_read(mtd, readpos, PAGE_SIZE, &retlen, d->page_buf);
d                 693 drivers/mtd/mtdswap.c 		oldeb = d->eb_data + oldblock / d->pages_per_eblk;
d                 696 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Read Error: %d (block %u)\n", ret,
d                 706 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Short read: %zd (block %u)\n", retlen,
d                 712 drivers/mtd/mtdswap.c 	ret = mtdswap_write_block(d, d->page_buf, page, newblock, 1);
d                 714 drivers/mtd/mtdswap.c 		d->page_data[page] = BLOCK_ERROR;
d                 715 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Write error: %d\n", ret);
d                 719 drivers/mtd/mtdswap.c 	eb = d->eb_data + *newblock / d->pages_per_eblk;
d                 720 drivers/mtd/mtdswap.c 	d->page_data[page] = *newblock;
d                 721 drivers/mtd/mtdswap.c 	d->revmap[oldblock] = PAGE_UNDEF;
d                 722 drivers/mtd/mtdswap.c 	eb = d->eb_data + oldblock / d->pages_per_eblk;
d                 728 drivers/mtd/mtdswap.c 	d->page_data[page] = BLOCK_ERROR;
d                 729 drivers/mtd/mtdswap.c 	d->revmap[oldblock] = PAGE_UNDEF;
d                 733 drivers/mtd/mtdswap.c static int mtdswap_gc_eblock(struct mtdswap_dev *d, struct swap_eb *eb)
d                 739 drivers/mtd/mtdswap.c 	eblk_base = (eb - d->eb_data) * d->pages_per_eblk;
d                 741 drivers/mtd/mtdswap.c 	for (i = 0; i < d->pages_per_eblk; i++) {
d                 742 drivers/mtd/mtdswap.c 		if (d->spare_eblks < MIN_SPARE_EBLOCKS)
d                 746 drivers/mtd/mtdswap.c 		if (d->revmap[block] == PAGE_UNDEF)
d                 749 drivers/mtd/mtdswap.c 		ret = mtdswap_move_block(d, block, &newblock);
d                 757 drivers/mtd/mtdswap.c static int __mtdswap_choose_gc_tree(struct mtdswap_dev *d)
d                 761 drivers/mtd/mtdswap.c 	if (TREE_COUNT(d, CLEAN) < LOW_FRAG_GC_THRESHOLD)
d                 767 drivers/mtd/mtdswap.c 		if (d->trees[idx].root.rb_node != NULL)
d                 801 drivers/mtd/mtdswap.c static int mtdswap_choose_wl_tree(struct mtdswap_dev *d)
d                 809 drivers/mtd/mtdswap.c 		root = &d->trees[i].root;
d                 813 drivers/mtd/mtdswap.c 		wear = d->max_erase_count - MTDSWAP_ECNT_MIN(root);
d                 829 drivers/mtd/mtdswap.c static int mtdswap_choose_gc_tree(struct mtdswap_dev *d,
d                 834 drivers/mtd/mtdswap.c 	if (TREE_NONEMPTY(d, FAILING) &&
d                 835 drivers/mtd/mtdswap.c 		(background || (TREE_EMPTY(d, CLEAN) && TREE_EMPTY(d, DIRTY))))
d                 838 drivers/mtd/mtdswap.c 	idx = mtdswap_choose_wl_tree(d);
d                 842 drivers/mtd/mtdswap.c 	return __mtdswap_choose_gc_tree(d);
d                 845 drivers/mtd/mtdswap.c static struct swap_eb *mtdswap_pick_gc_eblk(struct mtdswap_dev *d,
d                 852 drivers/mtd/mtdswap.c 	if (background && TREE_COUNT(d, CLEAN) > CLEAN_BLOCK_THRESHOLD &&
d                 853 drivers/mtd/mtdswap.c 		TREE_EMPTY(d, DIRTY) && TREE_EMPTY(d, FAILING))
d                 856 drivers/mtd/mtdswap.c 	idx = mtdswap_choose_gc_tree(d, background);
d                 860 drivers/mtd/mtdswap.c 	rp = &d->trees[idx].root;
d                 865 drivers/mtd/mtdswap.c 	d->trees[idx].count--;
d                 874 drivers/mtd/mtdswap.c static unsigned int mtdswap_eblk_passes(struct mtdswap_dev *d,
d                 877 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mtd;
d                 880 drivers/mtd/mtdswap.c 	unsigned int *p1 = (unsigned int *)d->page_buf;
d                 881 drivers/mtd/mtdswap.c 	unsigned char *p2 = (unsigned char *)d->oob_buf;
d                 889 drivers/mtd/mtdswap.c 	ops.datbuf = d->page_buf;
d                 890 drivers/mtd/mtdswap.c 	ops.oobbuf = d->oob_buf;
d                 891 drivers/mtd/mtdswap.c 	base = mtdswap_eb_offset(d, eb);
d                 892 drivers/mtd/mtdswap.c 	mtd_pages = d->pages_per_eblk * PAGE_SIZE / mtd->writesize;
d                 898 drivers/mtd/mtdswap.c 			memset(d->page_buf, patt, mtd->writesize);
d                 899 drivers/mtd/mtdswap.c 			memset(d->oob_buf, patt, mtd->oobavail);
d                 925 drivers/mtd/mtdswap.c 		ret = mtdswap_erase_block(d, eb);
d                 934 drivers/mtd/mtdswap.c 	mtdswap_handle_badblock(d, eb);
d                 938 drivers/mtd/mtdswap.c static int mtdswap_gc(struct mtdswap_dev *d, unsigned int background)
d                 943 drivers/mtd/mtdswap.c 	if (d->spare_eblks < MIN_SPARE_EBLOCKS)
d                 946 drivers/mtd/mtdswap.c 	eb = mtdswap_pick_gc_eblk(d, background);
d                 950 drivers/mtd/mtdswap.c 	ret = mtdswap_gc_eblock(d, eb);
d                 955 drivers/mtd/mtdswap.c 		mtdswap_handle_badblock(d, eb);
d                 960 drivers/mtd/mtdswap.c 	ret = mtdswap_erase_block(d, eb);
d                 962 drivers/mtd/mtdswap.c 		(ret || !mtdswap_eblk_passes(d, eb)))
d                 966 drivers/mtd/mtdswap.c 		ret = mtdswap_write_marker(d, eb, MTDSWAP_TYPE_CLEAN);
d                 969 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_CLEAN);
d                 971 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_DIRTY);
d                 978 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                 982 drivers/mtd/mtdswap.c 		ret = mtdswap_gc(d, 1);
d                 988 drivers/mtd/mtdswap.c static void mtdswap_cleanup(struct mtdswap_dev *d)
d                 990 drivers/mtd/mtdswap.c 	vfree(d->eb_data);
d                 991 drivers/mtd/mtdswap.c 	vfree(d->revmap);
d                 992 drivers/mtd/mtdswap.c 	vfree(d->page_data);
d                 993 drivers/mtd/mtdswap.c 	kfree(d->oob_buf);
d                 994 drivers/mtd/mtdswap.c 	kfree(d->page_buf);
d                 999 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                1001 drivers/mtd/mtdswap.c 	mtd_sync(d->mtd);
d                1023 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                1028 drivers/mtd/mtdswap.c 	d->sect_write_count++;
d                1030 drivers/mtd/mtdswap.c 	if (d->spare_eblks < MIN_SPARE_EBLOCKS)
d                1041 drivers/mtd/mtdswap.c 	mapped = d->page_data[page];
d                1043 drivers/mtd/mtdswap.c 		eb = d->eb_data + (mapped / d->pages_per_eblk);
d                1045 drivers/mtd/mtdswap.c 		mtdswap_store_eb(d, eb);
d                1046 drivers/mtd/mtdswap.c 		d->page_data[page] = BLOCK_UNDEF;
d                1047 drivers/mtd/mtdswap.c 		d->revmap[mapped] = PAGE_UNDEF;
d                1050 drivers/mtd/mtdswap.c 	ret = mtdswap_write_block(d, buf, page, &newblock, 0);
d                1051 drivers/mtd/mtdswap.c 	d->mtd_write_count++;
d                1056 drivers/mtd/mtdswap.c 	eb = d->eb_data + (newblock / d->pages_per_eblk);
d                1057 drivers/mtd/mtdswap.c 	d->page_data[page] = newblock;
d                1063 drivers/mtd/mtdswap.c static int mtdswap_auto_header(struct mtdswap_dev *d, char *buf)
d                1070 drivers/mtd/mtdswap.c 	hd->info.last_page = d->mbd_dev->size - 1;
d                1081 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                1082 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mtd;
d                1089 drivers/mtd/mtdswap.c 	d->sect_read_count++;
d                1093 drivers/mtd/mtdswap.c 			return mtdswap_auto_header(d, buf);
d                1098 drivers/mtd/mtdswap.c 	realblock = d->page_data[page];
d                1107 drivers/mtd/mtdswap.c 	eb = d->eb_data + (realblock / d->pages_per_eblk);
d                1108 drivers/mtd/mtdswap.c 	BUG_ON(d->revmap[realblock] == PAGE_UNDEF);
d                1116 drivers/mtd/mtdswap.c 	d->mtd_read_count++;
d                1119 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_BITFLIP);
d                1124 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Read error %d\n", ret);
d                1126 drivers/mtd/mtdswap.c 		mtdswap_rb_add(d, eb, MTDSWAP_FAILING);
d                1135 drivers/mtd/mtdswap.c 		dev_err(d->dev, "Short read %zd\n", retlen);
d                1145 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                1150 drivers/mtd/mtdswap.c 	d->discard_count++;
d                1153 drivers/mtd/mtdswap.c 		mapped = d->page_data[page];
d                1155 drivers/mtd/mtdswap.c 			eb = d->eb_data + (mapped / d->pages_per_eblk);
d                1157 drivers/mtd/mtdswap.c 			mtdswap_store_eb(d, eb);
d                1158 drivers/mtd/mtdswap.c 			d->page_data[page] = BLOCK_UNDEF;
d                1159 drivers/mtd/mtdswap.c 			d->revmap[mapped] = PAGE_UNDEF;
d                1160 drivers/mtd/mtdswap.c 			d->discard_page_count++;
d                1162 drivers/mtd/mtdswap.c 			d->page_data[page] = BLOCK_UNDEF;
d                1163 drivers/mtd/mtdswap.c 			d->discard_page_count++;
d                1172 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = (struct mtdswap_dev *) s->private;
d                1183 drivers/mtd/mtdswap.c 	mutex_lock(&d->mbd_dev->lock);
d                1186 drivers/mtd/mtdswap.c 		struct rb_root *root = &d->trees[i].root;
d                1189 drivers/mtd/mtdswap.c 			count[i] = d->trees[i].count;
d                1196 drivers/mtd/mtdswap.c 	if (d->curr_write) {
d                1198 drivers/mtd/mtdswap.c 		cwp = d->curr_write_pos;
d                1199 drivers/mtd/mtdswap.c 		cwecount = d->curr_write->erase_count;
d                1203 drivers/mtd/mtdswap.c 	for (i = 0; i < d->eblks; i++)
d                1204 drivers/mtd/mtdswap.c 		sum += d->eb_data[i].erase_count;
d                1206 drivers/mtd/mtdswap.c 	use_size = (uint64_t)d->eblks * d->mtd->erasesize;
d                1207 drivers/mtd/mtdswap.c 	bb_cnt = mtdswap_badblocks(d->mtd, use_size);
d                1210 drivers/mtd/mtdswap.c 	pages = d->mbd_dev->size;
d                1212 drivers/mtd/mtdswap.c 		if (d->page_data[i] != BLOCK_UNDEF)
d                1215 drivers/mtd/mtdswap.c 	mutex_unlock(&d->mbd_dev->lock);
d                1236 drivers/mtd/mtdswap.c 			cwp, d->pages_per_eblk - cwp, cwecount);
d                1242 drivers/mtd/mtdswap.c 	seq_printf(s, "mtdswap_readsect count: %llu\n", d->sect_read_count);
d                1243 drivers/mtd/mtdswap.c 	seq_printf(s, "mtdswap_writesect count: %llu\n", d->sect_write_count);
d                1244 drivers/mtd/mtdswap.c 	seq_printf(s, "mtdswap_discard count: %llu\n", d->discard_count);
d                1245 drivers/mtd/mtdswap.c 	seq_printf(s, "mtd read count: %llu\n", d->mtd_read_count);
d                1246 drivers/mtd/mtdswap.c 	seq_printf(s, "mtd write count: %llu\n", d->mtd_write_count);
d                1247 drivers/mtd/mtdswap.c 	seq_printf(s, "discarded pages count: %llu\n", d->discard_page_count);
d                1257 drivers/mtd/mtdswap.c static int mtdswap_add_debugfs(struct mtdswap_dev *d)
d                1259 drivers/mtd/mtdswap.c 	struct dentry *root = d->mtd->dbg.dfs_dir;
d                1268 drivers/mtd/mtdswap.c 	dent = debugfs_create_file("mtdswap_stats", S_IRUSR, root, d,
d                1271 drivers/mtd/mtdswap.c 		dev_err(d->dev, "debugfs_create_file failed\n");
d                1278 drivers/mtd/mtdswap.c static int mtdswap_init(struct mtdswap_dev *d, unsigned int eblocks,
d                1281 drivers/mtd/mtdswap.c 	struct mtd_info *mtd = d->mbd_dev->mtd;
d                1285 drivers/mtd/mtdswap.c 	d->mtd = mtd;
d                1286 drivers/mtd/mtdswap.c 	d->eblks = eblocks;
d                1287 drivers/mtd/mtdswap.c 	d->spare_eblks = spare_cnt;
d                1288 drivers/mtd/mtdswap.c 	d->pages_per_eblk = mtd->erasesize >> PAGE_SHIFT;
d                1290 drivers/mtd/mtdswap.c 	pages = d->mbd_dev->size;
d                1291 drivers/mtd/mtdswap.c 	blocks = eblocks * d->pages_per_eblk;
d                1294 drivers/mtd/mtdswap.c 		d->trees[i].root = RB_ROOT;
d                1296 drivers/mtd/mtdswap.c 	d->page_data = vmalloc(array_size(pages, sizeof(int)));
d                1297 drivers/mtd/mtdswap.c 	if (!d->page_data)
d                1300 drivers/mtd/mtdswap.c 	d->revmap = vmalloc(array_size(blocks, sizeof(int)));
d                1301 drivers/mtd/mtdswap.c 	if (!d->revmap)
d                1304 drivers/mtd/mtdswap.c 	eblk_bytes = sizeof(struct swap_eb)*d->eblks;
d                1305 drivers/mtd/mtdswap.c 	d->eb_data = vzalloc(eblk_bytes);
d                1306 drivers/mtd/mtdswap.c 	if (!d->eb_data)
d                1310 drivers/mtd/mtdswap.c 		d->page_data[i] = BLOCK_UNDEF;
d                1313 drivers/mtd/mtdswap.c 		d->revmap[i] = PAGE_UNDEF;
d                1315 drivers/mtd/mtdswap.c 	d->page_buf = kmalloc(PAGE_SIZE, GFP_KERNEL);
d                1316 drivers/mtd/mtdswap.c 	if (!d->page_buf)
d                1319 drivers/mtd/mtdswap.c 	d->oob_buf = kmalloc_array(2, mtd->oobavail, GFP_KERNEL);
d                1320 drivers/mtd/mtdswap.c 	if (!d->oob_buf)
d                1323 drivers/mtd/mtdswap.c 	mtdswap_scan_eblks(d);
d                1328 drivers/mtd/mtdswap.c 	kfree(d->page_buf);
d                1330 drivers/mtd/mtdswap.c 	vfree(d->eb_data);
d                1332 drivers/mtd/mtdswap.c 	vfree(d->revmap);
d                1334 drivers/mtd/mtdswap.c 	vfree(d->page_data);
d                1342 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d;
d                1424 drivers/mtd/mtdswap.c 	d = kzalloc(sizeof(struct mtdswap_dev), GFP_KERNEL);
d                1425 drivers/mtd/mtdswap.c 	if (!d)
d                1430 drivers/mtd/mtdswap.c 		kfree(d);
d                1434 drivers/mtd/mtdswap.c 	d->mbd_dev = mbd_dev;
d                1435 drivers/mtd/mtdswap.c 	mbd_dev->priv = d;
d                1445 drivers/mtd/mtdswap.c 	if (mtdswap_init(d, eblocks, spare_cnt) < 0)
d                1451 drivers/mtd/mtdswap.c 	d->dev = disk_to_dev(mbd_dev->disk);
d                1453 drivers/mtd/mtdswap.c 	ret = mtdswap_add_debugfs(d);
d                1463 drivers/mtd/mtdswap.c 	mtdswap_cleanup(d);
d                1467 drivers/mtd/mtdswap.c 	kfree(d);
d                1472 drivers/mtd/mtdswap.c 	struct mtdswap_dev *d = MTDSWAP_MBD_TO_MTDSWAP(dev);
d                1475 drivers/mtd/mtdswap.c 	mtdswap_cleanup(d);
d                1476 drivers/mtd/mtdswap.c 	kfree(d);
d                 152 drivers/mtd/nand/raw/cafe_nand.c 	uint8_t d;
d                 154 drivers/mtd/nand/raw/cafe_nand.c 	cafe_read_buf(chip, &d, 1);
d                 155 drivers/mtd/nand/raw/cafe_nand.c 	cafe_dev_dbg(&cafe->pdev->dev, "Read %02x\n", d);
d                 157 drivers/mtd/nand/raw/cafe_nand.c 	return d;
d                 257 drivers/mtd/nand/raw/mxc_nand.c 	u8 *d = buf;
d                 266 drivers/mtd/nand/raw/mxc_nand.c 			memcpy16_fromio(d + i * oob_chunk_size,
d                 271 drivers/mtd/nand/raw/mxc_nand.c 		memcpy16_fromio(d + i * oob_chunk_size,
d                 277 drivers/mtd/nand/raw/mxc_nand.c 				      &d[i * oob_chunk_size],
d                 282 drivers/mtd/nand/raw/mxc_nand.c 			      &d[i * oob_chunk_size],
d                2486 drivers/mtd/nand/raw/nand_base.c 		unsigned long d = *((unsigned long *)bitmap);
d                2487 drivers/mtd/nand/raw/nand_base.c 		if (d == ~0UL)
d                2489 drivers/mtd/nand/raw/nand_base.c 		weight = hweight_long(d);
d                 458 drivers/mtd/nand/raw/nandsim.c 		int d;
d                 460 drivers/mtd/nand/raw/nandsim.c 		for (d = 0; d < 10; ++d)
d                 461 drivers/mtd/nand/raw/nandsim.c 			if (wear <= decile_max[d]) {
d                 462 drivers/mtd/nand/raw/nandsim.c 				deciles[d] += 1;
d                 253 drivers/mtd/ubi/debug.c 	struct ubi_debug_info *d;
d                 260 drivers/mtd/ubi/debug.c 	d = &ubi->dbg;
d                 262 drivers/mtd/ubi/debug.c 	if (dent == d->dfs_chk_gen)
d                 263 drivers/mtd/ubi/debug.c 		val = d->chk_gen;
d                 264 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_chk_io)
d                 265 drivers/mtd/ubi/debug.c 		val = d->chk_io;
d                 266 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_chk_fastmap)
d                 267 drivers/mtd/ubi/debug.c 		val = d->chk_fastmap;
d                 268 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_disable_bgt)
d                 269 drivers/mtd/ubi/debug.c 		val = d->disable_bgt;
d                 270 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_emulate_bitflips)
d                 271 drivers/mtd/ubi/debug.c 		val = d->emulate_bitflips;
d                 272 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_emulate_io_failures)
d                 273 drivers/mtd/ubi/debug.c 		val = d->emulate_io_failures;
d                 274 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_emulate_power_cut) {
d                 275 drivers/mtd/ubi/debug.c 		snprintf(buf, sizeof(buf), "%u\n", d->emulate_power_cut);
d                 279 drivers/mtd/ubi/debug.c 	} else if (dent == d->dfs_power_cut_min) {
d                 280 drivers/mtd/ubi/debug.c 		snprintf(buf, sizeof(buf), "%u\n", d->power_cut_min);
d                 284 drivers/mtd/ubi/debug.c 	} else if (dent == d->dfs_power_cut_max) {
d                 285 drivers/mtd/ubi/debug.c 		snprintf(buf, sizeof(buf), "%u\n", d->power_cut_max);
d                 316 drivers/mtd/ubi/debug.c 	struct ubi_debug_info *d;
d                 324 drivers/mtd/ubi/debug.c 	d = &ubi->dbg;
d                 332 drivers/mtd/ubi/debug.c 	if (dent == d->dfs_power_cut_min) {
d                 333 drivers/mtd/ubi/debug.c 		if (kstrtouint(buf, 0, &d->power_cut_min) != 0)
d                 336 drivers/mtd/ubi/debug.c 	} else if (dent == d->dfs_power_cut_max) {
d                 337 drivers/mtd/ubi/debug.c 		if (kstrtouint(buf, 0, &d->power_cut_max) != 0)
d                 340 drivers/mtd/ubi/debug.c 	} else if (dent == d->dfs_emulate_power_cut) {
d                 344 drivers/mtd/ubi/debug.c 			d->emulate_power_cut = val;
d                 357 drivers/mtd/ubi/debug.c 	if (dent == d->dfs_chk_gen)
d                 358 drivers/mtd/ubi/debug.c 		d->chk_gen = val;
d                 359 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_chk_io)
d                 360 drivers/mtd/ubi/debug.c 		d->chk_io = val;
d                 361 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_chk_fastmap)
d                 362 drivers/mtd/ubi/debug.c 		d->chk_fastmap = val;
d                 363 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_disable_bgt)
d                 364 drivers/mtd/ubi/debug.c 		d->disable_bgt = val;
d                 365 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_emulate_bitflips)
d                 366 drivers/mtd/ubi/debug.c 		d->emulate_bitflips = val;
d                 367 drivers/mtd/ubi/debug.c 	else if (dent == d->dfs_emulate_io_failures)
d                 368 drivers/mtd/ubi/debug.c 		d->emulate_io_failures = val;
d                 508 drivers/mtd/ubi/debug.c 	struct ubi_debug_info *d = &ubi->dbg;
d                 513 drivers/mtd/ubi/debug.c 	n = snprintf(d->dfs_dir_name, UBI_DFS_DIR_LEN + 1, UBI_DFS_DIR_NAME,
d                 522 drivers/mtd/ubi/debug.c 	fname = d->dfs_dir_name;
d                 526 drivers/mtd/ubi/debug.c 	d->dfs_dir = dent;
d                 529 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 533 drivers/mtd/ubi/debug.c 	d->dfs_chk_gen = dent;
d                 536 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 540 drivers/mtd/ubi/debug.c 	d->dfs_chk_io = dent;
d                 543 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 547 drivers/mtd/ubi/debug.c 	d->dfs_chk_fastmap = dent;
d                 550 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 554 drivers/mtd/ubi/debug.c 	d->dfs_disable_bgt = dent;
d                 557 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 561 drivers/mtd/ubi/debug.c 	d->dfs_emulate_bitflips = dent;
d                 564 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 568 drivers/mtd/ubi/debug.c 	d->dfs_emulate_io_failures = dent;
d                 571 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 575 drivers/mtd/ubi/debug.c 	d->dfs_emulate_power_cut = dent;
d                 578 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 582 drivers/mtd/ubi/debug.c 	d->dfs_power_cut_min = dent;
d                 585 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, (void *)ubi_num,
d                 589 drivers/mtd/ubi/debug.c 	d->dfs_power_cut_max = dent;
d                 592 drivers/mtd/ubi/debug.c 	dent = debugfs_create_file(fname, S_IRUSR, d->dfs_dir, (void *)ubi_num,
d                 600 drivers/mtd/ubi/debug.c 	debugfs_remove_recursive(d->dfs_dir);
d                  72 drivers/net/bonding/bond_debugfs.c 	struct dentry *d;
d                  77 drivers/net/bonding/bond_debugfs.c 	d = debugfs_rename(bonding_debug_root, bond->debug_dir,
d                  79 drivers/net/bonding/bond_debugfs.c 	if (d) {
d                  80 drivers/net/bonding/bond_debugfs.c 		bond->debug_dir = d;
d                 141 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_sysfs_store_option(struct device *d,
d                 145 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 165 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_slaves(struct device *d,
d                 168 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 198 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_mode(struct device *d,
d                 201 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 211 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_xmit_hash(struct device *d,
d                 215 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 226 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_arp_validate(struct device *d,
d                 230 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 242 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_arp_all_targets(struct device *d,
d                 246 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 258 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_fail_over_mac(struct device *d,
d                 262 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 274 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_arp_interval(struct device *d,
d                 278 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 286 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_arp_targets(struct device *d,
d                 290 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 307 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_downdelay(struct device *d,
d                 311 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 318 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_updelay(struct device *d,
d                 322 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 330 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_peer_notif_delay(struct device *d,
d                 334 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 343 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_lacp(struct device *d,
d                 347 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 357 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_min_links(struct device *d,
d                 361 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 368 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_select(struct device *d,
d                 372 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 383 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_num_peer_notif(struct device *d,
d                 387 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 396 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_miimon(struct device *d,
d                 400 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 408 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_primary(struct device *d,
d                 412 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 428 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_primary_reselect(struct device *d,
d                 432 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 445 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_carrier(struct device *d,
d                 449 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 458 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_active_slave(struct device *d,
d                 462 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 478 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_mii_status(struct device *d,
d                 482 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 490 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_aggregator(struct device *d,
d                 495 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 510 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_num_ports(struct device *d,
d                 515 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 530 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_actor_key(struct device *d,
d                 535 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 550 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_partner_key(struct device *d,
d                 555 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 570 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_partner_mac(struct device *d,
d                 575 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 588 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_queue_id(struct device *d,
d                 592 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 623 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_slaves_active(struct device *d,
d                 627 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 635 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_resend_igmp(struct device *d,
d                 639 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 647 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_lp_interval(struct device *d,
d                 651 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 658 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_tlb_dynamic_lb(struct device *d,
d                 662 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 668 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_packets_per_slave(struct device *d,
d                 672 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 680 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_actor_sys_prio(struct device *d,
d                 684 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 694 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_actor_system(struct device *d,
d                 698 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 709 drivers/net/bonding/bond_sysfs.c static ssize_t bonding_show_ad_user_port_key(struct device *d,
d                 713 drivers/net/bonding/bond_sysfs.c 	struct bonding *bond = to_bond(d);
d                 297 drivers/net/can/peak_canfd/peak_canfd.c 		memcpy(cf->data, msg->d, cf->len);
d                 693 drivers/net/can/peak_canfd/peak_canfd.c 	memcpy(msg->d, cf->data, cf->len);
d                 476 drivers/net/can/usb/esd_usb2.c static ssize_t show_firmware(struct device *d,
d                 479 drivers/net/can/usb/esd_usb2.c 	struct usb_interface *intf = to_usb_interface(d);
d                 489 drivers/net/can/usb/esd_usb2.c static ssize_t show_hardware(struct device *d,
d                 492 drivers/net/can/usb/esd_usb2.c 	struct usb_interface *intf = to_usb_interface(d);
d                 502 drivers/net/can/usb/esd_usb2.c static ssize_t show_nets(struct device *d,
d                 505 drivers/net/can/usb/esd_usb2.c 	struct usb_interface *intf = to_usb_interface(d);
d                 939 drivers/net/can/usb/peak_usb/pcan_usb_core.c static int peak_usb_do_device_exit(struct device *d, void *arg)
d                 941 drivers/net/can/usb/peak_usb/pcan_usb_core.c 	struct usb_interface *intf = to_usb_interface(d);
d                 507 drivers/net/can/usb/peak_usb/pcan_usb_fd.c 		memcpy(cfd->data, rm->d, cfd->len);
d                 752 drivers/net/can/usb/peak_usb/pcan_usb_fd.c 	memcpy(tx_msg->d, cfd->data, cfd->len);
d                 126 drivers/net/dsa/mv88e6xxx/chip.c static void mv88e6xxx_g1_irq_mask(struct irq_data *d)
d                 128 drivers/net/dsa/mv88e6xxx/chip.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 129 drivers/net/dsa/mv88e6xxx/chip.c 	unsigned int n = d->hwirq;
d                 134 drivers/net/dsa/mv88e6xxx/chip.c static void mv88e6xxx_g1_irq_unmask(struct irq_data *d)
d                 136 drivers/net/dsa/mv88e6xxx/chip.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 137 drivers/net/dsa/mv88e6xxx/chip.c 	unsigned int n = d->hwirq;
d                 191 drivers/net/dsa/mv88e6xxx/chip.c static void mv88e6xxx_g1_irq_bus_lock(struct irq_data *d)
d                 193 drivers/net/dsa/mv88e6xxx/chip.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 198 drivers/net/dsa/mv88e6xxx/chip.c static void mv88e6xxx_g1_irq_bus_sync_unlock(struct irq_data *d)
d                 200 drivers/net/dsa/mv88e6xxx/chip.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 228 drivers/net/dsa/mv88e6xxx/chip.c static int mv88e6xxx_g1_irq_domain_map(struct irq_domain *d,
d                 232 drivers/net/dsa/mv88e6xxx/chip.c 	struct mv88e6xxx_chip *chip = d->host_data;
d                 234 drivers/net/dsa/mv88e6xxx/chip.c 	irq_set_chip_data(irq, d->host_data);
d                 978 drivers/net/dsa/mv88e6xxx/global2.c static void mv88e6xxx_g2_irq_mask(struct irq_data *d)
d                 980 drivers/net/dsa/mv88e6xxx/global2.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 981 drivers/net/dsa/mv88e6xxx/global2.c 	unsigned int n = d->hwirq;
d                 986 drivers/net/dsa/mv88e6xxx/global2.c static void mv88e6xxx_g2_irq_unmask(struct irq_data *d)
d                 988 drivers/net/dsa/mv88e6xxx/global2.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                 989 drivers/net/dsa/mv88e6xxx/global2.c 	unsigned int n = d->hwirq;
d                1020 drivers/net/dsa/mv88e6xxx/global2.c static void mv88e6xxx_g2_irq_bus_lock(struct irq_data *d)
d                1022 drivers/net/dsa/mv88e6xxx/global2.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                1027 drivers/net/dsa/mv88e6xxx/global2.c static void mv88e6xxx_g2_irq_bus_sync_unlock(struct irq_data *d)
d                1029 drivers/net/dsa/mv88e6xxx/global2.c 	struct mv88e6xxx_chip *chip = irq_data_get_irq_chip_data(d);
d                1047 drivers/net/dsa/mv88e6xxx/global2.c static int mv88e6xxx_g2_irq_domain_map(struct irq_domain *d,
d                1051 drivers/net/dsa/mv88e6xxx/global2.c 	struct mv88e6xxx_chip *chip = d->host_data;
d                1053 drivers/net/dsa/mv88e6xxx/global2.c 	irq_set_chip_data(irq, d->host_data);
d                 392 drivers/net/dsa/rtl8366rb.c static u32 rtl8366rb_get_irqmask(struct irq_data *d)
d                 394 drivers/net/dsa/rtl8366rb.c 	int line = irqd_to_hwirq(d);
d                 407 drivers/net/dsa/rtl8366rb.c static void rtl8366rb_mask_irq(struct irq_data *d)
d                 409 drivers/net/dsa/rtl8366rb.c 	struct realtek_smi *smi = irq_data_get_irq_chip_data(d);
d                 413 drivers/net/dsa/rtl8366rb.c 				 rtl8366rb_get_irqmask(d), 0);
d                 418 drivers/net/dsa/rtl8366rb.c static void rtl8366rb_unmask_irq(struct irq_data *d)
d                 420 drivers/net/dsa/rtl8366rb.c 	struct realtek_smi *smi = irq_data_get_irq_chip_data(d);
d                 424 drivers/net/dsa/rtl8366rb.c 				 rtl8366rb_get_irqmask(d),
d                 425 drivers/net/dsa/rtl8366rb.c 				 rtl8366rb_get_irqmask(d));
d                 479 drivers/net/dsa/rtl8366rb.c static void rtl8366rb_irq_unmap(struct irq_domain *d, unsigned int irq)
d                2001 drivers/net/dsa/sja1105/sja1105_main.c #define to_tagger(d) \
d                2002 drivers/net/dsa/sja1105/sja1105_main.c 	container_of((d), struct sja1105_tagger_data, rxtstamp_work)
d                2003 drivers/net/dsa/sja1105/sja1105_main.c #define to_sja1105(d) \
d                2004 drivers/net/dsa/sja1105/sja1105_main.c 	container_of((d), struct sja1105_private, tagger_data)
d                  53 drivers/net/dsa/sja1105/sja1105_ptp.c #define ptp_to_sja1105(d) container_of((d), struct sja1105_private, ptp_caps)
d                  54 drivers/net/dsa/sja1105/sja1105_ptp.c #define cc_to_sja1105(d) container_of((d), struct sja1105_private, tstamp_cc)
d                  55 drivers/net/dsa/sja1105/sja1105_ptp.c #define dw_to_sja1105(d) container_of((d), struct sja1105_private, refresh_work)
d                 514 drivers/net/ethernet/8390/etherh.c 	s = strchr(cd.d.string, '(');
d                 529 drivers/net/ethernet/8390/etherh.c 	       dev_name(&ec->dev), cd.d.string);
d                1318 drivers/net/ethernet/8390/pcnet_cs.c     u_short *d = dest;
d                1327 drivers/net/ethernet/8390/pcnet_cs.c 	do { *d++ = __raw_readw(s++); } while (--c);
d                1331 drivers/net/ethernet/8390/pcnet_cs.c 	*((u_char *)d) = readw(s) & 0xff;
d                1336 drivers/net/ethernet/8390/pcnet_cs.c     u_short __iomem *d = dest;
d                1345 drivers/net/ethernet/8390/pcnet_cs.c 	do { __raw_writew(*s++, d++); } while (--c);
d                1349 drivers/net/ethernet/8390/pcnet_cs.c 	writew((readw(d) & 0xff00) | *(u_char *)s, d);
d                 648 drivers/net/ethernet/adaptec/starfire.c 	struct device *d = &pdev->dev;
d                 670 drivers/net/ethernet/adaptec/starfire.c 		dev_err(d, "no PCI MEM resources, aborting\n");
d                 683 drivers/net/ethernet/adaptec/starfire.c 		dev_err(d, "cannot reserve PCI resources, aborting\n");
d                 689 drivers/net/ethernet/adaptec/starfire.c 		dev_err(d, "cannot remap %#x @ %#lx, aborting\n",
d                  50 drivers/net/ethernet/amd/hplance.c static int hplance_init_one(struct dio_dev *d, const struct dio_device_id *ent);
d                  51 drivers/net/ethernet/amd/hplance.c static void hplance_init(struct net_device *dev, struct dio_dev *d);
d                  52 drivers/net/ethernet/amd/hplance.c static void hplance_remove_one(struct dio_dev *d);
d                  84 drivers/net/ethernet/amd/hplance.c static int hplance_init_one(struct dio_dev *d, const struct dio_device_id *ent)
d                  94 drivers/net/ethernet/amd/hplance.c 	if (!request_mem_region(dio_resource_start(d),
d                  95 drivers/net/ethernet/amd/hplance.c 				dio_resource_len(d), d->name))
d                  98 drivers/net/ethernet/amd/hplance.c 	hplance_init(dev, d);
d                 103 drivers/net/ethernet/amd/hplance.c 	dio_set_drvdata(d, dev);
d                 106 drivers/net/ethernet/amd/hplance.c 	       dev->name, d->name, d->scode, dev->dev_addr, d->ipl);
d                 111 drivers/net/ethernet/amd/hplance.c 	release_mem_region(dio_resource_start(d), dio_resource_len(d));
d                 118 drivers/net/ethernet/amd/hplance.c static void hplance_remove_one(struct dio_dev *d)
d                 120 drivers/net/ethernet/amd/hplance.c 	struct net_device *dev = dio_get_drvdata(d);
d                 123 drivers/net/ethernet/amd/hplance.c 	release_mem_region(dio_resource_start(d), dio_resource_len(d));
d                 128 drivers/net/ethernet/amd/hplance.c static void hplance_init(struct net_device *dev, struct dio_dev *d)
d                 130 drivers/net/ethernet/amd/hplance.c 	unsigned long va = (d->resource.start + DIO_VIRADDRBASE);
d                 152 drivers/net/ethernet/amd/hplance.c 	lp->lance.name = d->name;
d                 157 drivers/net/ethernet/amd/hplance.c 	lp->lance.irq = d->ipl;
d                 106 drivers/net/ethernet/broadcom/bcmsysport.c 				     void __iomem *d,
d                 111 drivers/net/ethernet/broadcom/bcmsysport.c 		     d + DESC_ADDR_HI_STATUS_LEN);
d                 113 drivers/net/ethernet/broadcom/bcmsysport.c 	writel_relaxed(lower_32_bits(addr), d + DESC_ADDR_LO);
d                2658 drivers/net/ethernet/broadcom/bcmsysport.c static int __maybe_unused bcm_sysport_suspend(struct device *d)
d                2660 drivers/net/ethernet/broadcom/bcmsysport.c 	struct net_device *dev = dev_get_drvdata(d);
d                2714 drivers/net/ethernet/broadcom/bcmsysport.c 	if (device_may_wakeup(d) && priv->wolopts)
d                2720 drivers/net/ethernet/broadcom/bcmsysport.c static int __maybe_unused bcm_sysport_resume(struct device *d)
d                2722 drivers/net/ethernet/broadcom/bcmsysport.c 	struct net_device *dev = dev_get_drvdata(d);
d                  92 drivers/net/ethernet/broadcom/genet/bcmgenet.c 					     void __iomem *d, u32 value)
d                  94 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	bcmgenet_writel(value, d + DMA_DESC_LENGTH_STATUS);
d                  98 drivers/net/ethernet/broadcom/genet/bcmgenet.c 					    void __iomem *d)
d                 100 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	return bcmgenet_readl(d + DMA_DESC_LENGTH_STATUS);
d                 104 drivers/net/ethernet/broadcom/genet/bcmgenet.c 				    void __iomem *d,
d                 107 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	bcmgenet_writel(lower_32_bits(addr), d + DMA_DESC_ADDRESS_LO);
d                 115 drivers/net/ethernet/broadcom/genet/bcmgenet.c 		bcmgenet_writel(upper_32_bits(addr), d + DMA_DESC_ADDRESS_HI);
d                 121 drivers/net/ethernet/broadcom/genet/bcmgenet.c 			       void __iomem *d, dma_addr_t addr, u32 val)
d                 123 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	dmadesc_set_addr(priv, d, addr);
d                 124 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	dmadesc_set_length_status(priv, d, val);
d                 128 drivers/net/ethernet/broadcom/genet/bcmgenet.c 					  void __iomem *d)
d                 132 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	addr = bcmgenet_readl(d + DMA_DESC_ADDRESS_LO);
d                 140 drivers/net/ethernet/broadcom/genet/bcmgenet.c 		addr |= (u64)bcmgenet_readl(d + DMA_DESC_ADDRESS_HI) << 32;
d                3610 drivers/net/ethernet/broadcom/genet/bcmgenet.c static int bcmgenet_resume(struct device *d)
d                3612 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	struct net_device *dev = dev_get_drvdata(d);
d                3670 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	if (!device_may_wakeup(d))
d                3689 drivers/net/ethernet/broadcom/genet/bcmgenet.c static int bcmgenet_suspend(struct device *d)
d                3691 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	struct net_device *dev = dev_get_drvdata(d);
d                3702 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	if (!device_may_wakeup(d))
d                3706 drivers/net/ethernet/broadcom/genet/bcmgenet.c 	if (device_may_wakeup(d) && priv->wolopts) {
d                3717 drivers/net/ethernet/broadcom/genet/bcmgenet.c 		bcmgenet_resume(d);
d                 142 drivers/net/ethernet/broadcom/sb1250-mac.c #define SBDMA_NEXTBUF(d,f) ((((d)->f+1) == (d)->sbdma_dscrtable_end) ? \
d                 143 drivers/net/ethernet/broadcom/sb1250-mac.c 			  (d)->sbdma_dscrtable : (d)->f+1)
d                 269 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_initctx(struct sbmacdma *d, struct sbmac_softc *s, int chan,
d                 271 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_channel_start(struct sbmacdma *d, int rxtx);
d                 272 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_rcvbuffer(struct sbmac_softc *sc, struct sbmacdma *d,
d                 274 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_txbuffer(struct sbmacdma *d, struct sk_buff *m);
d                 275 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_emptyring(struct sbmacdma *d);
d                 276 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_fillring(struct sbmac_softc *sc, struct sbmacdma *d);
d                 277 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_rx_process(struct sbmac_softc *sc, struct sbmacdma *d,
d                 279 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_tx_process(struct sbmac_softc *sc, struct sbmacdma *d,
d                 557 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_initctx(struct sbmacdma *d, struct sbmac_softc *s, int chan,
d                 568 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_eth       = s;
d                 569 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_channel   = chan;
d                 570 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_txdir     = txrx;
d                 603 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_config0 =
d                 605 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_config1 =
d                 607 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrbase =
d                 609 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrcnt =
d                 611 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_curdscr =
d                 613 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (d->sbdma_txdir)
d                 614 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_oodpktlost = NULL;
d                 616 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_oodpktlost =
d                 623 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_maxdescr = maxdescr;
d                 625 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrtable_unaligned = kcalloc(d->sbdma_maxdescr + 1,
d                 626 drivers/net/ethernet/broadcom/sb1250-mac.c 					       sizeof(*d->sbdma_dscrtable),
d                 633 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrtable = (struct sbdmadscr *)
d                 634 drivers/net/ethernet/broadcom/sb1250-mac.c 			     ALIGN((unsigned long)d->sbdma_dscrtable_unaligned,
d                 635 drivers/net/ethernet/broadcom/sb1250-mac.c 				   sizeof(*d->sbdma_dscrtable));
d                 637 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrtable_end = d->sbdma_dscrtable + d->sbdma_maxdescr;
d                 639 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrtable_phys = virt_to_phys(d->sbdma_dscrtable);
d                 645 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_ctxtable = kcalloc(d->sbdma_maxdescr,
d                 646 drivers/net/ethernet/broadcom/sb1250-mac.c 				    sizeof(*d->sbdma_ctxtable), GFP_KERNEL);
d                 655 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_int_pktcnt = int_pktcnt;
d                 657 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_int_pktcnt = 1;
d                 662 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_int_timeout = int_timeout;
d                 664 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_int_timeout = 0;
d                 683 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_channel_start(struct sbmacdma *d, int rxtx)
d                 690 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(V_DMA_INT_TIMEOUT(d->sbdma_int_timeout) |
d                 691 drivers/net/ethernet/broadcom/sb1250-mac.c 		       0, d->sbdma_config1);
d                 693 drivers/net/ethernet/broadcom/sb1250-mac.c 		       V_DMA_RINGSZ(d->sbdma_maxdescr) |
d                 694 drivers/net/ethernet/broadcom/sb1250-mac.c 		       V_DMA_INT_PKTCNT(d->sbdma_int_pktcnt) |
d                 695 drivers/net/ethernet/broadcom/sb1250-mac.c 		       0, d->sbdma_config0);
d                 697 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, d->sbdma_config1);
d                 698 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(V_DMA_RINGSZ(d->sbdma_maxdescr) |
d                 699 drivers/net/ethernet/broadcom/sb1250-mac.c 		       0, d->sbdma_config0);
d                 702 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(d->sbdma_dscrtable_phys, d->sbdma_dscrbase);
d                 708 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_addptr = d->sbdma_dscrtable;
d                 709 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_remptr = d->sbdma_dscrtable;
d                 724 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_channel_stop(struct sbmacdma *d)
d                 730 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, d->sbdma_config1);
d                 732 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, d->sbdma_dscrbase);
d                 734 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, d->sbdma_config0);
d                 740 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_addptr = NULL;
d                 741 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_remptr = NULL;
d                 771 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_rcvbuffer(struct sbmac_softc *sc, struct sbmacdma *d,
d                 782 drivers/net/ethernet/broadcom/sb1250-mac.c 	dsc = d->sbdma_addptr;
d                 783 drivers/net/ethernet/broadcom/sb1250-mac.c 	nextdsc = SBDMA_NEXTBUF(d,sbdma_addptr);
d                 791 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (nextdsc == d->sbdma_remptr) {
d                 854 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_ctxtable[dsc-d->sbdma_dscrtable] = sb_new;
d                 860 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_addptr = nextdsc;
d                 866 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(1, d->sbdma_dscrcnt);
d                 887 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_txbuffer(struct sbmacdma *d, struct sk_buff *sb)
d                 897 drivers/net/ethernet/broadcom/sb1250-mac.c 	dsc = d->sbdma_addptr;
d                 898 drivers/net/ethernet/broadcom/sb1250-mac.c 	nextdsc = SBDMA_NEXTBUF(d,sbdma_addptr);
d                 906 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (nextdsc == d->sbdma_remptr) {
d                 944 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_ctxtable[dsc-d->sbdma_dscrtable] = sb;
d                 950 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_addptr = nextdsc;
d                 956 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(1, d->sbdma_dscrcnt);
d                 976 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_emptyring(struct sbmacdma *d)
d                 981 drivers/net/ethernet/broadcom/sb1250-mac.c 	for (idx = 0; idx < d->sbdma_maxdescr; idx++) {
d                 982 drivers/net/ethernet/broadcom/sb1250-mac.c 		sb = d->sbdma_ctxtable[idx];
d                 985 drivers/net/ethernet/broadcom/sb1250-mac.c 			d->sbdma_ctxtable[idx] = NULL;
d                1005 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_fillring(struct sbmac_softc *sc, struct sbmacdma *d)
d                1010 drivers/net/ethernet/broadcom/sb1250-mac.c 		if (sbdma_add_rcvbuffer(sc, d, NULL) != 0)
d                1052 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_rx_process(struct sbmac_softc *sc, struct sbmacdma *d,
d                1064 drivers/net/ethernet/broadcom/sb1250-mac.c 	prefetch(d);
d                1084 drivers/net/ethernet/broadcom/sb1250-mac.c 		dsc = d->sbdma_remptr;
d                1085 drivers/net/ethernet/broadcom/sb1250-mac.c 		curidx = dsc - d->sbdma_dscrtable;
d                1088 drivers/net/ethernet/broadcom/sb1250-mac.c 		prefetch(&d->sbdma_ctxtable[curidx]);
d                1090 drivers/net/ethernet/broadcom/sb1250-mac.c 		hwidx = ((__raw_readq(d->sbdma_curdscr) & M_DMA_CURDSCR_ADDR) -
d                1091 drivers/net/ethernet/broadcom/sb1250-mac.c 			 d->sbdma_dscrtable_phys) /
d                1092 drivers/net/ethernet/broadcom/sb1250-mac.c 			sizeof(*d->sbdma_dscrtable);
d                1107 drivers/net/ethernet/broadcom/sb1250-mac.c 		sb = d->sbdma_ctxtable[curidx];
d                1108 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_ctxtable[curidx] = NULL;
d                1126 drivers/net/ethernet/broadcom/sb1250-mac.c 			if (unlikely(sbdma_add_rcvbuffer(sc, d, NULL) ==
d                1130 drivers/net/ethernet/broadcom/sb1250-mac.c 				sbdma_add_rcvbuffer(sc, d, sb);
d                1133 drivers/net/ethernet/broadcom/sb1250-mac.c 				d->sbdma_remptr = SBDMA_NEXTBUF(d,sbdma_remptr);
d                1146 drivers/net/ethernet/broadcom/sb1250-mac.c 				sb->protocol = eth_type_trans(sb,d->sbdma_eth->sbm_dev);
d                1166 drivers/net/ethernet/broadcom/sb1250-mac.c 					d->sbdma_remptr = SBDMA_NEXTBUF(d,sbdma_remptr);
d                1180 drivers/net/ethernet/broadcom/sb1250-mac.c 			sbdma_add_rcvbuffer(sc, d, sb);
d                1188 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_remptr = SBDMA_NEXTBUF(d,sbdma_remptr);
d                1217 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_tx_process(struct sbmac_softc *sc, struct sbmacdma *d,
d                1230 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (d->sbdma_remptr == d->sbdma_addptr)
d                1233 drivers/net/ethernet/broadcom/sb1250-mac.c 	hwidx = ((__raw_readq(d->sbdma_curdscr) & M_DMA_CURDSCR_ADDR) -
d                1234 drivers/net/ethernet/broadcom/sb1250-mac.c 		 d->sbdma_dscrtable_phys) / sizeof(*d->sbdma_dscrtable);
d                1248 drivers/net/ethernet/broadcom/sb1250-mac.c 		curidx = d->sbdma_remptr - d->sbdma_dscrtable;
d                1263 drivers/net/ethernet/broadcom/sb1250-mac.c 		dsc = &(d->sbdma_dscrtable[curidx]);
d                1264 drivers/net/ethernet/broadcom/sb1250-mac.c 		sb = d->sbdma_ctxtable[curidx];
d                1265 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_ctxtable[curidx] = NULL;
d                1284 drivers/net/ethernet/broadcom/sb1250-mac.c 		d->sbdma_remptr = SBDMA_NEXTBUF(d,sbdma_remptr);
d                1297 drivers/net/ethernet/broadcom/sb1250-mac.c 		netif_wake_queue(d->sbdma_eth->sbm_dev);
d                1355 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_uninitctx(struct sbmacdma *d)
d                1357 drivers/net/ethernet/broadcom/sb1250-mac.c 	kfree(d->sbdma_dscrtable_unaligned);
d                1358 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_dscrtable_unaligned = d->sbdma_dscrtable = NULL;
d                1360 drivers/net/ethernet/broadcom/sb1250-mac.c 	kfree(d->sbdma_ctxtable);
d                1361 drivers/net/ethernet/broadcom/sb1250-mac.c 	d->sbdma_ctxtable = NULL;
d                 299 drivers/net/ethernet/chelsio/cxgb3/adapter.h #define tdev2adap(d) container_of(d, struct adapter, tdev)
d                 390 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 		struct net_device *d = adap->port[j];
d                 391 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 		const struct port_info *pi = netdev_priv(d);
d                 395 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 				 "%s-%d", d->name, pi->first_qset + i);
d                 684 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t attr_show(struct device *d, char *buf,
d                 691 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	len = (*format) (to_net_dev(d), buf);
d                 696 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t attr_store(struct device *d,
d                 714 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	ret = (*set) (to_net_dev(d), val);
d                 728 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t show_##name(struct device *d, struct device_attribute *attr, \
d                 731 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	return attr_show(d, buf, format_##name); \
d                 751 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t store_nfilters(struct device *d, struct device_attribute *attr,
d                 754 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	return attr_store(d, buf, len, set_nfilters, 0, ~0);
d                 771 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t store_nservers(struct device *d, struct device_attribute *attr,
d                 774 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	return attr_store(d, buf, len, set_nservers, 0, ~0);
d                 800 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t tm_attr_show(struct device *d,
d                 803 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	struct port_info *pi = netdev_priv(to_net_dev(d));
d                 826 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t tm_attr_store(struct device *d,
d                 829 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	struct port_info *pi = netdev_priv(to_net_dev(d));
d                 852 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t show_##name(struct device *d, struct device_attribute *attr, \
d                 855 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	return tm_attr_show(d, buf, sched); \
d                 857 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static ssize_t store_##name(struct device *d, struct device_attribute *attr, \
d                 860 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 	return tm_attr_store(d, buf, len, sched); \
d                1283 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	struct l2t_data *d = container_of(head, struct l2t_data, rcu_head);
d                1284 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	kvfree(d);
d                1292 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	struct l2t_data *d;
d                1301 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	d = L2DATA(tdev);
d                1304 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	call_rcu(&d->rcu_head, clean_l2_data);
d                  69 drivers/net/ethernet/chelsio/cxgb3/l2t.c 				    const struct l2t_data *d)
d                  71 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	return jhash_2words(key, ifindex, 0) & (d->nentries - 1);
d                 219 drivers/net/ethernet/chelsio/cxgb3/l2t.c static struct l2t_entry *alloc_l2e(struct l2t_data *d)
d                 223 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	if (!atomic_read(&d->nfree))
d                 227 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	for (e = d->rover, end = &d->l2tab[d->nentries]; e != end; ++e)
d                 231 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	for (e = &d->l2tab[1]; atomic_read(&e->refcnt); ++e) ;
d                 233 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	d->rover = e + 1;
d                 234 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	atomic_dec(&d->nfree);
d                 241 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		int hash = arp_hash(e->addr, e->ifindex, d);
d                 243 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		for (p = &d->l2tab[hash].first; *p; p = &(*p)->next)
d                 264 drivers/net/ethernet/chelsio/cxgb3/l2t.c void t3_l2e_free(struct l2t_data *d, struct l2t_entry *e)
d                 274 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	atomic_inc(&d->nfree);
d                 308 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	struct l2t_data *d;
d                 327 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	d = L2DATA(cdev);
d                 328 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	if (!d)
d                 331 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	hash = arp_hash(addr, ifidx, d);
d                 333 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	write_lock_bh(&d->lock);
d                 334 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	for (e = d->l2tab[hash].first; e; e = e->next)
d                 337 drivers/net/ethernet/chelsio/cxgb3/l2t.c 			l2t_hold(d, e);
d                 344 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	e = alloc_l2e(d);
d                 347 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		e->next = d->l2tab[hash].first;
d                 348 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		d->l2tab[hash].first = e;
d                 362 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	write_unlock_bh(&d->lock);
d                 403 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	struct l2t_data *d = L2DATA(dev);
d                 406 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	int hash = arp_hash(addr, ifidx, d);
d                 408 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	read_lock_bh(&d->lock);
d                 409 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	for (e = d->l2tab[hash].first; e; e = e->next)
d                 414 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	read_unlock_bh(&d->lock);
d                 420 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	read_unlock(&d->lock);
d                 445 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	struct l2t_data *d;
d                 448 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	d = kvzalloc(struct_size(d, l2tab, l2t_capacity), GFP_KERNEL);
d                 449 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	if (!d)
d                 452 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	d->nentries = l2t_capacity;
d                 453 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	d->rover = &d->l2tab[1];	/* entry 0 is not used */
d                 454 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	atomic_set(&d->nfree, l2t_capacity - 1);
d                 455 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	rwlock_init(&d->lock);
d                 458 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		d->l2tab[i].idx = i;
d                 459 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		d->l2tab[i].state = L2T_STATE_UNUSED;
d                 460 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		__skb_queue_head_init(&d->l2tab[i].arpq);
d                 461 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		spin_lock_init(&d->l2tab[i].lock);
d                 462 drivers/net/ethernet/chelsio/cxgb3/l2t.c 		atomic_set(&d->l2tab[i].refcnt, 0);
d                 464 drivers/net/ethernet/chelsio/cxgb3/l2t.c 	return d;
d                 110 drivers/net/ethernet/chelsio/cxgb3/l2t.h void t3_l2e_free(struct l2t_data *d, struct l2t_entry *e);
d                 131 drivers/net/ethernet/chelsio/cxgb3/l2t.h 	struct l2t_data *d;
d                 134 drivers/net/ethernet/chelsio/cxgb3/l2t.h 	d = L2DATA(t);
d                 136 drivers/net/ethernet/chelsio/cxgb3/l2t.h 	if (atomic_dec_and_test(&e->refcnt) && d)
d                 137 drivers/net/ethernet/chelsio/cxgb3/l2t.h 		t3_l2e_free(d, e);
d                 142 drivers/net/ethernet/chelsio/cxgb3/l2t.h static inline void l2t_hold(struct l2t_data *d, struct l2t_entry *e)
d                 144 drivers/net/ethernet/chelsio/cxgb3/l2t.h 	if (d && atomic_add_return(1, &e->refcnt) == 1)	/* 0 -> 1 transition */
d                 145 drivers/net/ethernet/chelsio/cxgb3/l2t.h 		atomic_dec(&d->nfree);
d                 240 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct tx_sw_desc *d = &q->sdesc[cidx];
d                 241 drivers/net/ethernet/chelsio/cxgb3/sge.c 	int nfrags, frag_idx, curflit, j = d->addr_idx;
d                 243 drivers/net/ethernet/chelsio/cxgb3/sge.c 	sgp = (struct sg_ent *)&q->desc[cidx].flit[d->sflit];
d                 244 drivers/net/ethernet/chelsio/cxgb3/sge.c 	frag_idx = d->fragidx;
d                 252 drivers/net/ethernet/chelsio/cxgb3/sge.c 	curflit = d->sflit + 1 + j;
d                 269 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d = cidx + 1 == q->size ? q->sdesc : d + 1;
d                 270 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->fragidx = frag_idx;
d                 271 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->addr_idx = j;
d                 272 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->sflit = curflit - WR_FLITS - j; /* sflit can be -1 */
d                 288 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct tx_sw_desc *d;
d                 295 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d = &q->sdesc[cidx];
d                 297 drivers/net/ethernet/chelsio/cxgb3/sge.c 		if (d->skb) {	/* an SGL is present */
d                 299 drivers/net/ethernet/chelsio/cxgb3/sge.c 				unmap_skb(d->skb, q, cidx, pdev);
d                 300 drivers/net/ethernet/chelsio/cxgb3/sge.c 			if (d->eop) {
d                 301 drivers/net/ethernet/chelsio/cxgb3/sge.c 				dev_consume_skb_any(d->skb);
d                 302 drivers/net/ethernet/chelsio/cxgb3/sge.c 				d->skb = NULL;
d                 305 drivers/net/ethernet/chelsio/cxgb3/sge.c 		++d;
d                 308 drivers/net/ethernet/chelsio/cxgb3/sge.c 			d = q->sdesc;
d                 353 drivers/net/ethernet/chelsio/cxgb3/sge.c 			  struct rx_sw_desc *d)
d                 355 drivers/net/ethernet/chelsio/cxgb3/sge.c 	if (q->use_pages && d->pg_chunk.page) {
d                 356 drivers/net/ethernet/chelsio/cxgb3/sge.c 		(*d->pg_chunk.p_cnt)--;
d                 357 drivers/net/ethernet/chelsio/cxgb3/sge.c 		if (!*d->pg_chunk.p_cnt)
d                 359 drivers/net/ethernet/chelsio/cxgb3/sge.c 				       d->pg_chunk.mapping,
d                 362 drivers/net/ethernet/chelsio/cxgb3/sge.c 		put_page(d->pg_chunk.page);
d                 363 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->pg_chunk.page = NULL;
d                 365 drivers/net/ethernet/chelsio/cxgb3/sge.c 		pci_unmap_single(pdev, dma_unmap_addr(d, dma_addr),
d                 367 drivers/net/ethernet/chelsio/cxgb3/sge.c 		kfree_skb(d->skb);
d                 368 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->skb = NULL;
d                 385 drivers/net/ethernet/chelsio/cxgb3/sge.c 		struct rx_sw_desc *d = &q->sdesc[cidx];
d                 388 drivers/net/ethernet/chelsio/cxgb3/sge.c 		clear_rx_desc(pdev, q, d);
d                 412 drivers/net/ethernet/chelsio/cxgb3/sge.c 				 struct rx_desc *d, struct rx_sw_desc *sd,
d                 423 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->addr_lo = cpu_to_be32(mapping);
d                 424 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->addr_hi = cpu_to_be32((u64) mapping >> 32);
d                 426 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->len_gen = cpu_to_be32(V_FLD_GEN1(gen));
d                 427 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->gen2 = cpu_to_be32(V_FLD_GEN2(gen));
d                 431 drivers/net/ethernet/chelsio/cxgb3/sge.c static inline int add_one_rx_chunk(dma_addr_t mapping, struct rx_desc *d,
d                 434 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->addr_lo = cpu_to_be32(mapping);
d                 435 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->addr_hi = cpu_to_be32((u64) mapping >> 32);
d                 437 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->len_gen = cpu_to_be32(V_FLD_GEN1(gen));
d                 438 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->gen2 = cpu_to_be32(V_FLD_GEN2(gen));
d                 508 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct rx_desc *d = &q->desc[q->pidx];
d                 524 drivers/net/ethernet/chelsio/cxgb3/sge.c 			add_one_rx_chunk(mapping, d, q->gen);
d                 537 drivers/net/ethernet/chelsio/cxgb3/sge.c 			err = add_one_rx_buf(buf_start, q->buf_size, d, sd,
d                 545 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d++;
d                 551 drivers/net/ethernet/chelsio/cxgb3/sge.c 			d = q->desc;
d                1065 drivers/net/ethernet/chelsio/cxgb3/sge.c static inline void wr_gen2(struct tx_desc *d, unsigned int gen)
d                1068 drivers/net/ethernet/chelsio/cxgb3/sge.c 	d->flit[TX_DESC_FLITS - 1] = cpu_to_be64(gen);
d                1092 drivers/net/ethernet/chelsio/cxgb3/sge.c 			     struct tx_desc *d, unsigned int pidx,
d                1099 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct work_request_hdr *wrp = (struct work_request_hdr *)d;
d                1116 drivers/net/ethernet/chelsio/cxgb3/sge.c 		wr_gen2(d, gen);
d                1130 drivers/net/ethernet/chelsio/cxgb3/sge.c 			memcpy(&d->flit[flits], fp, avail * sizeof(*fp));
d                1137 drivers/net/ethernet/chelsio/cxgb3/sge.c 			d++;
d                1143 drivers/net/ethernet/chelsio/cxgb3/sge.c 				d = q->desc;
d                1148 drivers/net/ethernet/chelsio/cxgb3/sge.c 			wrp = (struct work_request_hdr *)d;
d                1154 drivers/net/ethernet/chelsio/cxgb3/sge.c 			wr_gen2(d, gen);
d                1187 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct tx_desc *d = &q->desc[pidx];
d                1188 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct cpl_tx_pkt *cpl = (struct cpl_tx_pkt *)d;
d                1201 drivers/net/ethernet/chelsio/cxgb3/sge.c 		d->flit[2] = 0;
d                1220 drivers/net/ethernet/chelsio/cxgb3/sge.c 				skb_copy_from_linear_data(skb, &d->flit[2],
d                1223 drivers/net/ethernet/chelsio/cxgb3/sge.c 				skb_copy_bits(skb, 0, &d->flit[2], skb->len);
d                1232 drivers/net/ethernet/chelsio/cxgb3/sge.c 			wr_gen2(d, gen);
d                1240 drivers/net/ethernet/chelsio/cxgb3/sge.c 	sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl;
d                1243 drivers/net/ethernet/chelsio/cxgb3/sge.c 	write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, gen,
d                1383 drivers/net/ethernet/chelsio/cxgb3/sge.c static inline void write_imm(struct tx_desc *d, struct sk_buff *skb,
d                1387 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct work_request_hdr *to = (struct work_request_hdr *)d;
d                1399 drivers/net/ethernet/chelsio/cxgb3/sge.c 	wr_gen2(d, gen);
d                1637 drivers/net/ethernet/chelsio/cxgb3/sge.c 	struct tx_desc *d = &q->desc[pidx];
d                1641 drivers/net/ethernet/chelsio/cxgb3/sge.c 		write_imm(d, skb, skb->len, gen);
d                1648 drivers/net/ethernet/chelsio/cxgb3/sge.c 	memcpy(&d->flit[1], &from[1],
d                1652 drivers/net/ethernet/chelsio/cxgb3/sge.c 	sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl;
d                1661 drivers/net/ethernet/chelsio/cxgb3/sge.c 	write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits,
d                  28 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c static inline unsigned int ipv6_clip_hash(struct clip_tbl *d, const u32 *key)
d                  30 drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c 	unsigned int clipt_size_half = d->clipt_size / 2;
d                 702 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c 		struct net_device *d = adap->port[j];
d                 703 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c 		const struct port_info *pi = netdev_priv(d);
d                 707 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c 				 d->name, i);
d                  70 drivers/net/ethernet/chelsio/cxgb4/l2t.c static inline void l2t_hold(struct l2t_data *d, struct l2t_entry *e)
d                  73 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		atomic_dec(&d->nfree);
d                  86 drivers/net/ethernet/chelsio/cxgb4/l2t.c static inline unsigned int arp_hash(struct l2t_data *d, const u32 *key,
d                  89 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	unsigned int l2t_size_half = d->l2t_size / 2;
d                  94 drivers/net/ethernet/chelsio/cxgb4/l2t.c static inline unsigned int ipv6_hash(struct l2t_data *d, const u32 *key,
d                  97 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	unsigned int l2t_size_half = d->l2t_size / 2;
d                 104 drivers/net/ethernet/chelsio/cxgb4/l2t.c static unsigned int addr_hash(struct l2t_data *d, const u32 *addr,
d                 107 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	return addr_len == 4 ? arp_hash(d, addr, ifindex) :
d                 108 drivers/net/ethernet/chelsio/cxgb4/l2t.c 			       ipv6_hash(d, addr, ifindex);
d                 140 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d = adap->l2t;
d                 141 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	unsigned int l2t_idx = e->idx + d->l2t_start;
d                 188 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d = adap->l2t;
d                 200 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		struct l2t_entry *e = &d->l2tab[l2t_idx - d->l2t_start];
d                 264 drivers/net/ethernet/chelsio/cxgb4/l2t.c static struct l2t_entry *alloc_l2e(struct l2t_data *d)
d                 268 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	if (!atomic_read(&d->nfree))
d                 272 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (e = d->rover, end = &d->l2tab[d->l2t_size]; e != end; ++e)
d                 276 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (e = d->l2tab; atomic_read(&e->refcnt); ++e)
d                 279 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d->rover = e + 1;
d                 280 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	atomic_dec(&d->nfree);
d                 287 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		for (p = &d->l2tab[e->hash].first; *p; p = &(*p)->next)
d                 298 drivers/net/ethernet/chelsio/cxgb4/l2t.c static struct l2t_entry *find_or_alloc_l2e(struct l2t_data *d, u16 vlan,
d                 304 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (e = &d->l2tab[0], end = &d->l2tab[d->l2t_size]; e != end; ++e) {
d                 329 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		for (p = &d->l2tab[e->hash].first; *p; p = &(*p)->next)
d                 353 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d;
d                 365 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d = container_of(e, struct l2t_data, l2tab[e->idx]);
d                 366 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	atomic_inc(&d->nfree);
d                 372 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d;
d                 386 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d = container_of(e, struct l2t_data, l2tab[e->idx]);
d                 387 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	atomic_inc(&d->nfree);
d                 419 drivers/net/ethernet/chelsio/cxgb4/l2t.c struct l2t_entry *cxgb4_l2t_get(struct l2t_data *d, struct neighbour *neigh,
d                 429 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	int hash = addr_hash(d, addr, addr_len, ifidx);
d                 443 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	write_lock_bh(&d->lock);
d                 444 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (e = d->l2tab[hash].first; e; e = e->next)
d                 447 drivers/net/ethernet/chelsio/cxgb4/l2t.c 			l2t_hold(d, e);
d                 454 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	e = alloc_l2e(d);
d                 468 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		e->next = d->l2tab[hash].first;
d                 469 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		d->l2tab[hash].first = e;
d                 473 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	write_unlock_bh(&d->lock);
d                 538 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d = adap->l2t;
d                 542 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	int hash = addr_hash(d, addr, addr_len, ifidx);
d                 544 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	read_lock_bh(&d->lock);
d                 545 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (e = d->l2tab[hash].first; e; e = e->next)
d                 553 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	read_unlock_bh(&d->lock);
d                 557 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	read_unlock(&d->lock);
d                 588 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d = adap->l2t;
d                 592 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	write_lock_bh(&d->lock);
d                 593 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	e = find_or_alloc_l2e(d, vlan, port, eth_addr);
d                 606 drivers/net/ethernet/chelsio/cxgb4/l2t.c 				write_unlock_bh(&d->lock);
d                 615 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	write_unlock_bh(&d->lock);
d                 641 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d;
d                 649 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d = kvzalloc(struct_size(d, l2tab, l2t_size), GFP_KERNEL);
d                 650 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	if (!d)
d                 653 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d->l2t_start = l2t_start;
d                 654 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d->l2t_size = l2t_size;
d                 656 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	d->rover = d->l2tab;
d                 657 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	atomic_set(&d->nfree, l2t_size);
d                 658 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	rwlock_init(&d->lock);
d                 660 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	for (i = 0; i < d->l2t_size; ++i) {
d                 661 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		d->l2tab[i].idx = i;
d                 662 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		d->l2tab[i].state = L2T_STATE_UNUSED;
d                 663 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		spin_lock_init(&d->l2tab[i].lock);
d                 664 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		atomic_set(&d->l2tab[i].refcnt, 0);
d                 665 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		skb_queue_head_init(&d->l2tab[i].arpq);
d                 667 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	return d;
d                 672 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	struct l2t_data *d = seq->private;
d                 674 drivers/net/ethernet/chelsio/cxgb4/l2t.c 	return pos >= d->l2t_size ? NULL : &d->l2tab[pos];
d                 714 drivers/net/ethernet/chelsio/cxgb4/l2t.c 		struct l2t_data *d = seq->private;
d                 723 drivers/net/ethernet/chelsio/cxgb4/l2t.c 			   e->idx + d->l2t_start, ip, e->dmac,
d                 113 drivers/net/ethernet/chelsio/cxgb4/l2t.h struct l2t_entry *cxgb4_l2t_get(struct l2t_data *d, struct neighbour *neigh,
d                 189 drivers/net/ethernet/chelsio/cxgb4/sge.c static inline dma_addr_t get_buf_addr(const struct rx_sw_desc *d)
d                 191 drivers/net/ethernet/chelsio/cxgb4/sge.c 	return d->dma_addr & ~(dma_addr_t)RX_BUF_FLAGS;
d                 194 drivers/net/ethernet/chelsio/cxgb4/sge.c static inline bool is_buf_mapped(const struct rx_sw_desc *d)
d                 196 drivers/net/ethernet/chelsio/cxgb4/sge.c 	return !(d->dma_addr & RX_UNMAPPED_BUF);
d                 373 drivers/net/ethernet/chelsio/cxgb4/sge.c 	struct tx_sw_desc *d;
d                 377 drivers/net/ethernet/chelsio/cxgb4/sge.c 	d = &q->sdesc[cidx];
d                 379 drivers/net/ethernet/chelsio/cxgb4/sge.c 		if (d->skb) {                       /* an SGL is present */
d                 381 drivers/net/ethernet/chelsio/cxgb4/sge.c 				unmap_sgl(dev, d->skb, d->sgl, q);
d                 382 drivers/net/ethernet/chelsio/cxgb4/sge.c 			dev_consume_skb_any(d->skb);
d                 383 drivers/net/ethernet/chelsio/cxgb4/sge.c 			d->skb = NULL;
d                 385 drivers/net/ethernet/chelsio/cxgb4/sge.c 		++d;
d                 388 drivers/net/ethernet/chelsio/cxgb4/sge.c 			d = q->sdesc;
d                 455 drivers/net/ethernet/chelsio/cxgb4/sge.c 			       const struct rx_sw_desc *d)
d                 458 drivers/net/ethernet/chelsio/cxgb4/sge.c 	unsigned int rx_buf_size_idx = d->dma_addr & RX_BUF_SIZE;
d                 497 drivers/net/ethernet/chelsio/cxgb4/sge.c 		struct rx_sw_desc *d = &q->sdesc[q->cidx];
d                 499 drivers/net/ethernet/chelsio/cxgb4/sge.c 		if (is_buf_mapped(d))
d                 500 drivers/net/ethernet/chelsio/cxgb4/sge.c 			dma_unmap_page(adap->pdev_dev, get_buf_addr(d),
d                 501 drivers/net/ethernet/chelsio/cxgb4/sge.c 				       get_buf_size(adap, d),
d                 503 drivers/net/ethernet/chelsio/cxgb4/sge.c 		put_page(d->page);
d                 504 drivers/net/ethernet/chelsio/cxgb4/sge.c 		d->page = NULL;
d                 524 drivers/net/ethernet/chelsio/cxgb4/sge.c 	struct rx_sw_desc *d = &q->sdesc[q->cidx];
d                 526 drivers/net/ethernet/chelsio/cxgb4/sge.c 	if (is_buf_mapped(d))
d                 527 drivers/net/ethernet/chelsio/cxgb4/sge.c 		dma_unmap_page(adap->pdev_dev, get_buf_addr(d),
d                 528 drivers/net/ethernet/chelsio/cxgb4/sge.c 			       get_buf_size(adap, d), PCI_DMA_FROMDEVICE);
d                 529 drivers/net/ethernet/chelsio/cxgb4/sge.c 	d->page = NULL;
d                 598 drivers/net/ethernet/chelsio/cxgb4/sge.c 	__be64 *d = &q->desc[q->pidx];
d                 632 drivers/net/ethernet/chelsio/cxgb4/sge.c 		*d++ = cpu_to_be64(mapping);
d                 641 drivers/net/ethernet/chelsio/cxgb4/sge.c 			d = q->desc;
d                 661 drivers/net/ethernet/chelsio/cxgb4/sge.c 		*d++ = cpu_to_be64(mapping);
d                 670 drivers/net/ethernet/chelsio/cxgb4/sge.c 			d = q->desc;
d                3077 drivers/net/ethernet/chelsio/cxgb4/sge.c 	struct rx_sw_desc *d;
d                3084 drivers/net/ethernet/chelsio/cxgb4/sge.c 		d = &q->sdesc[q->cidx];
d                3085 drivers/net/ethernet/chelsio/cxgb4/sge.c 		d->page = si->frags[frags].page;
d                3086 drivers/net/ethernet/chelsio/cxgb4/sge.c 		d->dma_addr |= RX_UNMAPPED_BUF;
d                3546 drivers/net/ethernet/chelsio/cxgb4/t4_hw.c 		uint32_t d, c, k;
d                3548 drivers/net/ethernet/chelsio/cxgb4/t4_hw.c 		d = be32_to_cpu(drv_fw->fw_ver);
d                3557 drivers/net/ethernet/chelsio/cxgb4/t4_hw.c 			FW_HDR_FW_VER_MAJOR_G(d), FW_HDR_FW_VER_MINOR_G(d),
d                3558 drivers/net/ethernet/chelsio/cxgb4/t4_hw.c 			FW_HDR_FW_VER_MICRO_G(d), FW_HDR_FW_VER_BUILD_G(d),
d                 611 drivers/net/ethernet/chelsio/cxgb4vf/sge.c 	__be64 *d = &fl->desc[fl->pidx];
d                 661 drivers/net/ethernet/chelsio/cxgb4vf/sge.c 		*d++ = cpu_to_be64(dma_addr);
d                 670 drivers/net/ethernet/chelsio/cxgb4vf/sge.c 			d = fl->desc;
d                 690 drivers/net/ethernet/chelsio/cxgb4vf/sge.c 		*d++ = cpu_to_be64(dma_addr);
d                 699 drivers/net/ethernet/chelsio/cxgb4vf/sge.c 			d = fl->desc;
d                 454 drivers/net/ethernet/cirrus/ep93xx_eth.c 		dma_addr_t d;
d                 456 drivers/net/ethernet/cirrus/ep93xx_eth.c 		d = ep->descs->rdesc[i].buf_addr;
d                 457 drivers/net/ethernet/cirrus/ep93xx_eth.c 		if (d)
d                 458 drivers/net/ethernet/cirrus/ep93xx_eth.c 			dma_unmap_single(dev, d, PKT_BUF_SIZE, DMA_FROM_DEVICE);
d                 464 drivers/net/ethernet/cirrus/ep93xx_eth.c 		dma_addr_t d;
d                 466 drivers/net/ethernet/cirrus/ep93xx_eth.c 		d = ep->descs->tdesc[i].buf_addr;
d                 467 drivers/net/ethernet/cirrus/ep93xx_eth.c 		if (d)
d                 468 drivers/net/ethernet/cirrus/ep93xx_eth.c 			dma_unmap_single(dev, d, PKT_BUF_SIZE, DMA_TO_DEVICE);
d                 490 drivers/net/ethernet/cirrus/ep93xx_eth.c 		dma_addr_t d;
d                 496 drivers/net/ethernet/cirrus/ep93xx_eth.c 		d = dma_map_single(dev, buf, PKT_BUF_SIZE, DMA_FROM_DEVICE);
d                 497 drivers/net/ethernet/cirrus/ep93xx_eth.c 		if (dma_mapping_error(dev, d)) {
d                 503 drivers/net/ethernet/cirrus/ep93xx_eth.c 		ep->descs->rdesc[i].buf_addr = d;
d                 509 drivers/net/ethernet/cirrus/ep93xx_eth.c 		dma_addr_t d;
d                 515 drivers/net/ethernet/cirrus/ep93xx_eth.c 		d = dma_map_single(dev, buf, PKT_BUF_SIZE, DMA_TO_DEVICE);
d                 516 drivers/net/ethernet/cirrus/ep93xx_eth.c 		if (dma_mapping_error(dev, d)) {
d                 522 drivers/net/ethernet/cirrus/ep93xx_eth.c 		ep->descs->tdesc[i].buf_addr = d;
d                 234 drivers/net/ethernet/cisco/enic/enic_clsf.c 				struct enic_rfs_fltr_node *d;
d                 238 drivers/net/ethernet/cisco/enic/enic_clsf.c 				d = kmalloc(sizeof(*d), GFP_ATOMIC);
d                 239 drivers/net/ethernet/cisco/enic/enic_clsf.c 				if (d) {
d                 240 drivers/net/ethernet/cisco/enic/enic_clsf.c 					d->fltr_id = n->fltr_id;
d                 241 drivers/net/ethernet/cisco/enic/enic_clsf.c 					INIT_HLIST_NODE(&d->node);
d                 242 drivers/net/ethernet/cisco/enic/enic_clsf.c 					hlist_add_head(&d->node, head);
d                1233 drivers/net/ethernet/cortina/gemini.c 	unsigned short r, w, d;
d                1252 drivers/net/ethernet/cortina/gemini.c 	d = txq->cptr - w - 1;
d                1253 drivers/net/ethernet/cortina/gemini.c 	d &= m;
d                1255 drivers/net/ethernet/cortina/gemini.c 	if (d < nfrags + 2) {
d                1257 drivers/net/ethernet/cortina/gemini.c 		d = txq->cptr - w - 1;
d                1258 drivers/net/ethernet/cortina/gemini.c 		d &= m;
d                1260 drivers/net/ethernet/cortina/gemini.c 		if (d < nfrags + 2) {
d                1263 drivers/net/ethernet/cortina/gemini.c 			d = txq->cptr + nfrags + 16;
d                1264 drivers/net/ethernet/cortina/gemini.c 			d &= m;
d                1265 drivers/net/ethernet/cortina/gemini.c 			txq->ring[d].word3.bits.eofie = 1;
d                 193 drivers/net/ethernet/dec/tulip/xircom_cb.c 	struct device *d = &pdev->dev;
d                 234 drivers/net/ethernet/dec/tulip/xircom_cb.c 	private->rx_buffer = dma_alloc_coherent(d, 8192,
d                 240 drivers/net/ethernet/dec/tulip/xircom_cb.c 	private->tx_buffer = dma_alloc_coherent(d, 8192,
d                 290 drivers/net/ethernet/dec/tulip/xircom_cb.c 	dma_free_coherent(d, 8192, private->tx_buffer, private->tx_dma_handle);
d                 292 drivers/net/ethernet/dec/tulip/xircom_cb.c 	dma_free_coherent(d, 8192, private->rx_buffer, private->rx_dma_handle);
d                 313 drivers/net/ethernet/dec/tulip/xircom_cb.c 	struct device *d = &pdev->dev;
d                 317 drivers/net/ethernet/dec/tulip/xircom_cb.c 	dma_free_coherent(d, 8192, card->tx_buffer, card->tx_dma_handle);
d                 318 drivers/net/ethernet/dec/tulip/xircom_cb.c 	dma_free_coherent(d, 8192, card->rx_buffer, card->rx_dma_handle);
d                 862 drivers/net/ethernet/hisilicon/hip04_eth.c static int hip04_alloc_ring(struct net_device *ndev, struct device *d)
d                 867 drivers/net/ethernet/hisilicon/hip04_eth.c 	priv->tx_desc = dma_alloc_coherent(d,
d                 884 drivers/net/ethernet/hisilicon/hip04_eth.c static void hip04_free_ring(struct net_device *ndev, struct device *d)
d                 897 drivers/net/ethernet/hisilicon/hip04_eth.c 	dma_free_coherent(d, TX_DESC_NUM * sizeof(struct tx_desc),
d                 903 drivers/net/ethernet/hisilicon/hip04_eth.c 	struct device *d = &pdev->dev;
d                 904 drivers/net/ethernet/hisilicon/hip04_eth.c 	struct device_node *node = d->of_node;
d                 916 drivers/net/ethernet/hisilicon/hip04_eth.c 	priv->dev = d;
d                 937 drivers/net/ethernet/hisilicon/hip04_eth.c 		dev_warn(d, "no port-handle\n");
d                 959 drivers/net/ethernet/hisilicon/hip04_eth.c 		dev_warn(d, "no syscon hisilicon,hip04-ppe\n");
d                 966 drivers/net/ethernet/hisilicon/hip04_eth.c 		dev_warn(d, "not find phy-mode\n");
d                 977 drivers/net/ethernet/hisilicon/hip04_eth.c 	ret = devm_request_irq(d, irq, hip04_mac_interrupt,
d                1013 drivers/net/ethernet/hisilicon/hip04_eth.c 	ret = hip04_alloc_ring(ndev, d);
d                1026 drivers/net/ethernet/hisilicon/hip04_eth.c 	hip04_free_ring(ndev, d);
d                1037 drivers/net/ethernet/hisilicon/hip04_eth.c 	struct device *d = &pdev->dev;
d                1042 drivers/net/ethernet/hisilicon/hip04_eth.c 	hip04_free_ring(ndev, d);
d                2810 drivers/net/ethernet/hp/hp100.c static void cleanup_dev(struct net_device *d)
d                2812 drivers/net/ethernet/hp/hp100.c 	struct hp100_private *p = netdev_priv(d);
d                2814 drivers/net/ethernet/hp/hp100.c 	unregister_netdev(d);
d                2815 drivers/net/ethernet/hp/hp100.c 	release_region(d->base_addr, HP100_REGION_SIZE);
d                2820 drivers/net/ethernet/hp/hp100.c 				    virt_to_whatever(d, p->page_vaddr_algn));
d                2824 drivers/net/ethernet/hp/hp100.c 	free_netdev(d);
d                  28 drivers/net/ethernet/ibm/emac/debug.h #define EMAC_DBG(d, name, fmt, arg...) \
d                  29 drivers/net/ethernet/ibm/emac/debug.h 	printk(KERN_DEBUG #name "%pOF: " fmt, d->ofdev->dev.of_node, ## arg)
d                  32 drivers/net/ethernet/ibm/emac/debug.h #  define DBG(d,f,x...)		EMAC_DBG(d, emac, f, ##x)
d                  33 drivers/net/ethernet/ibm/emac/debug.h #  define MAL_DBG(d,f,x...)	EMAC_DBG(d, mal, f, ##x)
d                  34 drivers/net/ethernet/ibm/emac/debug.h #  define ZMII_DBG(d,f,x...)	EMAC_DBG(d, zmii, f, ##x)
d                  35 drivers/net/ethernet/ibm/emac/debug.h #  define RGMII_DBG(d,f,x...)	EMAC_DBG(d, rgmii, f, ##x)
d                  39 drivers/net/ethernet/ibm/emac/debug.h #  define MAL_DBG(d,f,x...)	((void)0)
d                  40 drivers/net/ethernet/ibm/emac/debug.h #  define ZMII_DBG(d,f,x...)	((void)0)
d                  41 drivers/net/ethernet/ibm/emac/debug.h #  define RGMII_DBG(d,f,x...)	((void)0)
d                  44 drivers/net/ethernet/ibm/emac/debug.h #  define DBG2(d,f,x...) 	DBG(d,f, ##x)
d                  45 drivers/net/ethernet/ibm/emac/debug.h #  define MAL_DBG2(d,f,x...) 	MAL_DBG(d,f, ##x)
d                  46 drivers/net/ethernet/ibm/emac/debug.h #  define ZMII_DBG2(d,f,x...) 	ZMII_DBG(d,f, ##x)
d                  47 drivers/net/ethernet/ibm/emac/debug.h #  define RGMII_DBG2(d,f,x...) 	RGMII_DBG(d,f, ##x)
d                  50 drivers/net/ethernet/ibm/emac/debug.h #  define MAL_DBG2(d,f,x...) 	((void)0)
d                  51 drivers/net/ethernet/ibm/emac/debug.h #  define ZMII_DBG2(d,f,x...) 	((void)0)
d                  52 drivers/net/ethernet/ibm/emac/debug.h #  define RGMII_DBG2(d,f,x...) 	((void)0)
d                 215 drivers/net/ethernet/intel/e1000e/netdev.c 		__le64 d;
d                 382 drivers/net/ethernet/intel/e1000e/netdev.c 					(unsigned long long)le64_to_cpu(u1->d),
d                 390 drivers/net/ethernet/intel/e1000e/netdev.c 					(unsigned long long)le64_to_cpu(u1->d),
d                 418 drivers/net/ethernet/intel/fm10k/fm10k.h 	return rx_desc->d.staterr & cpu_to_le32(stat_err_bits);
d                 458 drivers/net/ethernet/intel/fm10k/fm10k.h 	} d;
d                  91 drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c 			   i, rxd->d.data, rxd->d.rss, rxd->d.staterr,
d                 145 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		rx_desc->d.staterr = 0;
d                 409 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	skb_set_hash(skb, le32_to_cpu(rx_desc->d.rss),
d                 466 drivers/net/ethernet/intel/fm10k/fm10k_main.c 	FM10K_CB(skb)->fi.d.glort = rx_desc->d.glort;
d                 584 drivers/net/ethernet/intel/fm10k/fm10k_main.c 		if (!rx_desc->d.staterr)
d                 708 drivers/net/ethernet/intel/fm10k/fm10k_type.h 	} d; /* Writeback, 32b double-words */
d                 461 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 		struct i40e_aq_desc *d = I40E_ADMINQ_DESC(*ring, i);
d                 465 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			 i, d->flags, d->opcode, d->datalen, d->retval,
d                 466 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			 d->cookie_high, d->cookie_low);
d                 468 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			       16, 1, d->params.raw, 16, 0);
d                 474 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 		struct i40e_aq_desc *d = I40E_ADMINQ_DESC(*ring, i);
d                 478 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			 i, d->flags, d->opcode, d->datalen, d->retval,
d                 479 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			 d->cookie_high, d->cookie_low);
d                 481 drivers/net/ethernet/intel/i40e/i40e_debugfs.c 			       16, 1, d->params.raw, 16, 0);
d                  33 drivers/net/ethernet/intel/i40e/i40e_devids.h #define i40e_is_40G_device(d)		((d) == I40E_DEV_ID_QSFP_A  || \
d                  34 drivers/net/ethernet/intel/i40e/i40e_devids.h 					 (d) == I40E_DEV_ID_QSFP_B  || \
d                  35 drivers/net/ethernet/intel/i40e/i40e_devids.h 					 (d) == I40E_DEV_ID_QSFP_C)
d                 468 drivers/net/ethernet/marvell/octeontx2/af/rvu_reg.h #define NPC_AF_INTFX_LIDX_LTX_LDX_CFG(a, b, c, d) \
d                 469 drivers/net/ethernet/marvell/octeontx2/af/rvu_reg.h 		(0x900000 | (a) << 16 | (b) << 12 | (c) << 5 | (d) << 3)
d                 911 drivers/net/ethernet/marvell/skge.c 	struct skge_tx_desc *d;
d                 919 drivers/net/ethernet/marvell/skge.c 	for (i = 0, e = ring->start, d = vaddr; i < ring->count; i++, e++, d++) {
d                 920 drivers/net/ethernet/marvell/skge.c 		e->desc = d;
d                 923 drivers/net/ethernet/marvell/skge.c 			d->next_offset = base;
d                 926 drivers/net/ethernet/marvell/skge.c 			d->next_offset = base + (i+1) * sizeof(*d);
d                 116 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	u32 d;
d                 129 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	d = mtk_r32(eth, MTK_PHY_IAC) & 0xffff;
d                 131 drivers/net/ethernet/mediatek/mtk_eth_soc.c 	return d;
d                 375 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	struct mlx5_rsc_debug *d;
d                 382 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d = (void *)(desc - desc->i) - sizeof(*d);
d                 383 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	switch (d->type) {
d                 385 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		field = qp_read_field(d->dev, d->object, desc->i, &is_str);
d                 389 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		field = eq_read_field(d->dev, d->object, desc->i);
d                 393 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		field = cq_read_field(d->dev, d->object, desc->i);
d                 397 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		mlx5_core_warn(d->dev, "invalid resource type %d\n", d->type);
d                 419 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	struct mlx5_rsc_debug *d;
d                 423 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d = kzalloc(struct_size(d, fields, nfile), GFP_KERNEL);
d                 424 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	if (!d)
d                 427 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d->dev = dev;
d                 428 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d->object = data;
d                 429 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d->type = type;
d                 431 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	d->root = debugfs_create_dir(resn,  root);
d                 434 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		d->fields[i].i = i;
d                 435 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		debugfs_create_file(field[i], 0400, d->root, &d->fields[i],
d                 438 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	*dbg = d;
d                 443 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c static void rem_res_tree(struct mlx5_rsc_debug *d)
d                 445 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	debugfs_remove_recursive(d->root);
d                 446 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	kfree(d);
d                 992 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	struct net_device *d = __mlxsw_sp_ipip_netdev_ul_dev_get(ol_dev);
d                 994 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	if (d)
d                 995 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 		return l3mdev_fib_table(d) ? : RT_TABLE_MAIN;
d                  20 drivers/net/ethernet/neterion/s2io.h #define INV(d)  ((d&0xff)<<24) | (((d>>8)&0xff)<<16) | (((d>>16)&0xff)<<8)| ((d>>24)&0xff)
d                1869 drivers/net/ethernet/packetengines/hamachi.c 		u32 *d = (u32 *)&rq->ifr_ifru;
d                1877 drivers/net/ethernet/packetengines/hamachi.c 		writel(d[0], np->base + TxIntrCtrl);
d                1878 drivers/net/ethernet/packetengines/hamachi.c 		writel(d[1], np->base + RxIntrCtrl);
d                 116 drivers/net/ethernet/pasemi/pasemi_mac.c 	const void *d = skb;
d                 118 drivers/net/ethernet/pasemi/pasemi_mac.c 	prefetch(d);
d                 119 drivers/net/ethernet/pasemi/pasemi_mac.c 	prefetch(d+64);
d                 120 drivers/net/ethernet/pasemi/pasemi_mac.c 	prefetch(d+128);
d                 121 drivers/net/ethernet/pasemi/pasemi_mac.c 	prefetch(d+192);
d                1115 drivers/net/ethernet/realtek/8139cp.c 	struct device *d = &cp->pdev->dev;
d                1119 drivers/net/ethernet/realtek/8139cp.c 	mem = dma_alloc_coherent(d, CP_RING_BYTES, &cp->ring_dma, GFP_KERNEL);
d                1128 drivers/net/ethernet/realtek/8139cp.c 		dma_free_coherent(d, CP_RING_BYTES, cp->rx_ring, cp->ring_dma);
d                 756 drivers/net/ethernet/realtek/8139too.c 	struct device *d = &pdev->dev;
d                 806 drivers/net/ethernet/realtek/8139too.c 	dev_dbg(d, "%s region size = 0x%02lX\n", res[bar].type, io_len);
d                 809 drivers/net/ethernet/realtek/8139too.c 		dev_err(d, "region #%d not a %s resource, aborting\n", bar,
d                 815 drivers/net/ethernet/realtek/8139too.c 		dev_err(d, "Invalid PCI %s region size(s), aborting\n",
d                 823 drivers/net/ethernet/realtek/8139too.c 		dev_err(d, "cannot map %s\n", res[bar].type);
d                 783 drivers/net/ethernet/realtek/r8169_main.c static void rtl_udelay(unsigned int d)
d                 785 drivers/net/ethernet/realtek/r8169_main.c 	udelay(d);
d                 789 drivers/net/ethernet/realtek/r8169_main.c 			  void (*delay)(unsigned int), unsigned int d, int n,
d                 797 drivers/net/ethernet/realtek/r8169_main.c 		delay(d);
d                 800 drivers/net/ethernet/realtek/r8169_main.c 		  c->msg, !high, n, d);
d                 806 drivers/net/ethernet/realtek/r8169_main.c 				      unsigned int d, int n)
d                 808 drivers/net/ethernet/realtek/r8169_main.c 	return rtl_loop_wait(tp, c, rtl_udelay, d, n, true);
d                 813 drivers/net/ethernet/realtek/r8169_main.c 				     unsigned int d, int n)
d                 815 drivers/net/ethernet/realtek/r8169_main.c 	return rtl_loop_wait(tp, c, rtl_udelay, d, n, false);
d                 820 drivers/net/ethernet/realtek/r8169_main.c 				      unsigned int d, int n)
d                 822 drivers/net/ethernet/realtek/r8169_main.c 	return rtl_loop_wait(tp, c, msleep, d, n, true);
d                 827 drivers/net/ethernet/realtek/r8169_main.c 				     unsigned int d, int n)
d                 829 drivers/net/ethernet/realtek/r8169_main.c 	return rtl_loop_wait(tp, c, msleep, d, n, false);
d                1526 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                1531 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_get_noresume(d);
d                1537 drivers/net/ethernet/realtek/r8169_main.c 	if (pm_runtime_active(d))
d                1542 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_put_noidle(d);
d                1779 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                1784 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_get_noresume(d);
d                1786 drivers/net/ethernet/realtek/r8169_main.c 	if (pm_runtime_active(d))
d                1789 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_put_noidle(d);
d                2037 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                2043 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_get_noresume(d);
d                2045 drivers/net/ethernet/realtek/r8169_main.c 	if (!pm_runtime_active(d)) {
d                2051 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_put_noidle(d);
d                2059 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                2065 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_get_noresume(d);
d                2067 drivers/net/ethernet/realtek/r8169_main.c 	if (!pm_runtime_active(d)) {
d                2084 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_put_noidle(d);
d                3899 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                3906 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_get_noresume(d);
d                3908 drivers/net/ethernet/realtek/r8169_main.c 	if (pm_runtime_active(d))
d                3911 drivers/net/ethernet/realtek/r8169_main.c 	pm_runtime_put_noidle(d);
d                5539 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                5540 drivers/net/ethernet/realtek/r8169_main.c 	int node = dev_to_node(d);
d                5548 drivers/net/ethernet/realtek/r8169_main.c 	mapping = dma_map_page(d, data, 0, R8169_RX_BUF_SIZE, DMA_FROM_DEVICE);
d                5549 drivers/net/ethernet/realtek/r8169_main.c 	if (unlikely(dma_mapping_error(d, mapping))) {
d                5614 drivers/net/ethernet/realtek/r8169_main.c static void rtl8169_unmap_tx_skb(struct device *d, struct ring_info *tx_skb,
d                5619 drivers/net/ethernet/realtek/r8169_main.c 	dma_unmap_single(d, le64_to_cpu(desc->addr), len, DMA_TO_DEVICE);
d                5702 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                5716 drivers/net/ethernet/realtek/r8169_main.c 		mapping = dma_map_single(d, addr, len, DMA_TO_DEVICE);
d                5717 drivers/net/ethernet/realtek/r8169_main.c 		if (unlikely(dma_mapping_error(d, mapping))) {
d                5886 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                5912 drivers/net/ethernet/realtek/r8169_main.c 	mapping = dma_map_single(d, skb->data, len, DMA_TO_DEVICE);
d                5913 drivers/net/ethernet/realtek/r8169_main.c 	if (unlikely(dma_mapping_error(d, mapping))) {
d                5977 drivers/net/ethernet/realtek/r8169_main.c 	rtl8169_unmap_tx_skb(d, tp->tx_skb + entry, txd);
d                7021 drivers/net/ethernet/realtek/r8169_main.c 	struct device *d = tp_to_dev(tp);
d                7025 drivers/net/ethernet/realtek/r8169_main.c 	clk = devm_clk_get(d, "ether_clk");
d                7032 drivers/net/ethernet/realtek/r8169_main.c 			dev_err(d, "failed to get clk: %d\n", rc);
d                7037 drivers/net/ethernet/realtek/r8169_main.c 			dev_err(d, "failed to enable clk: %d\n", rc);
d                7039 drivers/net/ethernet/realtek/r8169_main.c 			rc = devm_add_action_or_reset(d, rtl_disable_clk, clk);
d                 201 drivers/net/ethernet/seeq/ether3.c 	if (ecard_readchunk(&cd, ec, 0xf5, 0) && (s = strchr(cd.d.string, '('))) {
d                 538 drivers/net/ethernet/sfc/bitfield.h #define EFX_OWORD32(a, b, c, d)				\
d                 540 drivers/net/ethernet/sfc/bitfield.h 		   cpu_to_le32(c), cpu_to_le32(d) } }
d                 535 drivers/net/ethernet/sfc/falcon/bitfield.h #define EF4_OWORD32(a, b, c, d)				\
d                 537 drivers/net/ethernet/sfc/falcon/bitfield.h 		   cpu_to_le32(c), cpu_to_le32(d) } }
d                 138 drivers/net/ethernet/sgi/ioc3-eth.c 	dma_addr_t d;
d                 150 drivers/net/ethernet/sgi/ioc3-eth.c 	d = dma_map_single(ip->dma_dev, new_skb->data,
d                 153 drivers/net/ethernet/sgi/ioc3-eth.c 	if (dma_mapping_error(ip->dma_dev, d)) {
d                 157 drivers/net/ethernet/sgi/ioc3-eth.c 	*rxb_dma = d;
d                 547 drivers/net/ethernet/sgi/ioc3-eth.c 	dma_addr_t d;
d                 565 drivers/net/ethernet/sgi/ioc3-eth.c 			if (ioc3_alloc_skb(ip, &new_skb, &rxb, &d)) {
d                 571 drivers/net/ethernet/sgi/ioc3-eth.c 				d = rxr[rx_entry];
d                 595 drivers/net/ethernet/sgi/ioc3-eth.c 			d = rxr[rx_entry];
d                 605 drivers/net/ethernet/sgi/ioc3-eth.c 		rxr[n_entry] = cpu_to_be64(ioc3_map(d, PCI64_ATTR_BAR));
d                 864 drivers/net/ethernet/sgi/ioc3-eth.c 	dma_addr_t d;
d                 872 drivers/net/ethernet/sgi/ioc3-eth.c 		if (ioc3_alloc_skb(ip, &ip->rx_skbs[i], &rxb, &d))
d                 876 drivers/net/ethernet/sgi/ioc3-eth.c 		ip->rxr[i] = cpu_to_be64(ioc3_map(d, PCI64_ATTR_BAR));
d                1460 drivers/net/ethernet/sgi/ioc3-eth.c 		dma_addr_t d;
d                1465 drivers/net/ethernet/sgi/ioc3-eth.c 		d = dma_map_single(ip->dma_dev, skb->data, len, DMA_TO_DEVICE);
d                1466 drivers/net/ethernet/sgi/ioc3-eth.c 		if (dma_mapping_error(ip->dma_dev, d))
d                1468 drivers/net/ethernet/sgi/ioc3-eth.c 		desc->p1     = cpu_to_be64(ioc3_map(d, PCI64_ATTR_PREF));
d                  46 drivers/net/ethernet/stmicro/stmmac/dwmac-oxnas.c #define DWMAC_TX_VARDELAY(d)		((d) << DWMAC_TX_VARDELAY_SHIFT)
d                  47 drivers/net/ethernet/stmicro/stmmac/dwmac-oxnas.c #define DWMAC_TXN_VARDELAY(d)		((d) << DWMAC_TXN_VARDELAY_SHIFT)
d                  48 drivers/net/ethernet/stmicro/stmmac/dwmac-oxnas.c #define DWMAC_RX_VARDELAY(d)		((d) << DWMAC_RX_VARDELAY_SHIFT)
d                  49 drivers/net/ethernet/stmicro/stmmac/dwmac-oxnas.c #define DWMAC_RXN_VARDELAY(d)		((d) << DWMAC_RXN_VARDELAY_SHIFT)
d                 999 drivers/net/ethernet/sun/sunvnet_common.c 		struct vio_net_desc *d;
d                1005 drivers/net/ethernet/sun/sunvnet_common.c 		d = vio_dring_entry(dr, txi);
d                1007 drivers/net/ethernet/sun/sunvnet_common.c 		if (d->hdr.state == VIO_DESC_READY) {
d                1012 drivers/net/ethernet/sun/sunvnet_common.c 			if (d->hdr.state != VIO_DESC_DONE)
d                1014 drivers/net/ethernet/sun/sunvnet_common.c 					  d->hdr.state);
d                1024 drivers/net/ethernet/sun/sunvnet_common.c 		} else if (d->hdr.state == VIO_DESC_FREE) {
d                1027 drivers/net/ethernet/sun/sunvnet_common.c 		d->hdr.state = VIO_DESC_FREE;
d                1332 drivers/net/ethernet/sun/sunvnet_common.c 	struct vio_net_desc *d;
d                1410 drivers/net/ethernet/sun/sunvnet_common.c 	d = vio_dring_cur(dr);
d                1441 drivers/net/ethernet/sun/sunvnet_common.c 	d->hdr.ack = VIO_ACK_DISABLE;
d                1442 drivers/net/ethernet/sun/sunvnet_common.c 	d->size = len;
d                1443 drivers/net/ethernet/sun/sunvnet_common.c 	d->ncookies = port->tx_bufs[txi].ncookies;
d                1444 drivers/net/ethernet/sun/sunvnet_common.c 	for (i = 0; i < d->ncookies; i++)
d                1445 drivers/net/ethernet/sun/sunvnet_common.c 		d->cookies[i] = port->tx_bufs[txi].cookies[i];
d                1447 drivers/net/ethernet/sun/sunvnet_common.c 		struct vio_net_dext *dext = vio_net_ext(d);
d                1467 drivers/net/ethernet/sun/sunvnet_common.c 	d->hdr.state = VIO_DESC_READY;
d                1498 drivers/net/ethernet/sun/sunvnet_common.c 		d->hdr.state = VIO_DESC_FREE;
d                1699 drivers/net/ethernet/sun/sunvnet_common.c 		struct vio_net_desc *d;
d                1705 drivers/net/ethernet/sun/sunvnet_common.c 		d = vio_dring_entry(dr, i);
d                1712 drivers/net/ethernet/sun/sunvnet_common.c 		d->hdr.state = VIO_DESC_FREE;
d                1771 drivers/net/ethernet/sun/sunvnet_common.c 		struct vio_net_desc *d;
d                1773 drivers/net/ethernet/sun/sunvnet_common.c 		d = vio_dring_entry(dr, i);
d                1774 drivers/net/ethernet/sun/sunvnet_common.c 		d->hdr.state = VIO_DESC_FREE;
d                1425 drivers/net/ethernet/tehuti/tehuti.c static int bdx_tx_db_init(struct txdb *d, int sz_type)
d                1429 drivers/net/ethernet/tehuti/tehuti.c 	d->start = vmalloc(memsz);
d                1430 drivers/net/ethernet/tehuti/tehuti.c 	if (!d->start)
d                1438 drivers/net/ethernet/tehuti/tehuti.c 	d->size = memsz / sizeof(struct tx_map) - 1;
d                1439 drivers/net/ethernet/tehuti/tehuti.c 	d->end = d->start + d->size + 1;	/* just after last element */
d                1442 drivers/net/ethernet/tehuti/tehuti.c 	d->rptr = d->start;
d                1443 drivers/net/ethernet/tehuti/tehuti.c 	d->wptr = d->start;
d                1452 drivers/net/ethernet/tehuti/tehuti.c static void bdx_tx_db_close(struct txdb *d)
d                1454 drivers/net/ethernet/tehuti/tehuti.c 	BDX_ASSERT(d == NULL);
d                1456 drivers/net/ethernet/tehuti/tehuti.c 	vfree(d->start);
d                1457 drivers/net/ethernet/tehuti/tehuti.c 	d->start = NULL;
d                  61 drivers/net/ethernet/ti/netcp_ethss.c #define IS_SS_ID_MU(d) \
d                  62 drivers/net/ethernet/ti/netcp_ethss.c 	((GBE_IDENT((d)->ss_version) == GBE_SS_ID_NU) || \
d                  63 drivers/net/ethernet/ti/netcp_ethss.c 	 (GBE_IDENT((d)->ss_version) == GBE_SS_ID_2U))
d                  65 drivers/net/ethernet/ti/netcp_ethss.c #define IS_SS_ID_NU(d) \
d                  66 drivers/net/ethernet/ti/netcp_ethss.c 	(GBE_IDENT((d)->ss_version) == GBE_SS_ID_NU)
d                  68 drivers/net/ethernet/ti/netcp_ethss.c #define IS_SS_ID_VER_14(d) \
d                  69 drivers/net/ethernet/ti/netcp_ethss.c 	(GBE_IDENT((d)->ss_version) == GBE_SS_VERSION_14)
d                  70 drivers/net/ethernet/ti/netcp_ethss.c #define IS_SS_ID_2U(d) \
d                  71 drivers/net/ethernet/ti/netcp_ethss.c 	(GBE_IDENT((d)->ss_version) == GBE_SS_ID_2U)
d                  95 drivers/net/ethernet/ti/netcp_ethss.c #define IS_SS_ID_XGBE(d)		((d)->ss_version == XGBE_SS_VERSION_10)
d                 159 drivers/net/ethernet/ti/netcp_ethss.c #define SGMII_BASE(d, s) \
d                 160 drivers/net/ethernet/ti/netcp_ethss.c 	(((s) < 2) ? (d)->sgmii_port_regs : (d)->sgmii_port34_regs)
d                 322 drivers/net/ethernet/toshiba/ps3_gelic_net.h static inline struct gelic_card *netdev_card(struct net_device *d)
d                 324 drivers/net/ethernet/toshiba/ps3_gelic_net.h 	return ((struct gelic_port *)netdev_priv(d))->card;
d                 326 drivers/net/ethernet/toshiba/ps3_gelic_net.h static inline struct gelic_port *netdev_port(struct net_device *d)
d                 328 drivers/net/ethernet/toshiba/ps3_gelic_net.h 	return (struct gelic_port *)netdev_priv(d);
d                 380 drivers/net/ethernet/toshiba/tc35815.c #define tc_writel(d, addr)	iowrite32(d, addr)
d                 401 drivers/net/ethernet/xircom/xirc2ps_cs.c     unsigned d;
d                 405 drivers/net/ethernet/xircom/xirc2ps_cs.c     d = GetByte(XIRCREG2_GPR2); /* read MDIO */
d                 408 drivers/net/ethernet/xircom/xirc2ps_cs.c     return d & 0x20; /* read MDIO */
d                 160 drivers/net/hamradio/mkiss.c static int kiss_esc(unsigned char *s, unsigned char *d, int len)
d                 162 drivers/net/hamradio/mkiss.c 	unsigned char *ptr = d;
d                 190 drivers/net/hamradio/mkiss.c 	return ptr - d;
d                 198 drivers/net/hamradio/mkiss.c static int kiss_esc_crc(unsigned char *s, unsigned char *d, unsigned short crc,
d                 201 drivers/net/hamradio/mkiss.c 	unsigned char *ptr = d;
d                 231 drivers/net/hamradio/mkiss.c 	return ptr - d;
d                1118 drivers/net/hyperv/netvsc.c 		= container_of(desc, const struct vmtransfer_page_packet_header, d);
d                1175 drivers/net/hyperv/netvsc.c 			     vmxferpage_packet->d.trans_id, status);
d                 108 drivers/net/ipvlan/ipvlan.h static inline struct ipvl_port *ipvlan_port_get_rcu(const struct net_device *d)
d                 110 drivers/net/ipvlan/ipvlan.h 	return rcu_dereference(d->rx_handler_data);
d                 113 drivers/net/ipvlan/ipvlan.h static inline struct ipvl_port *ipvlan_port_get_rcu_bh(const struct net_device *d)
d                 115 drivers/net/ipvlan/ipvlan.h 	return rcu_dereference_bh(d->rx_handler_data);
d                 118 drivers/net/ipvlan/ipvlan.h static inline struct ipvl_port *ipvlan_port_get_rtnl(const struct net_device *d)
d                 120 drivers/net/ipvlan/ipvlan.h 	return rtnl_dereference(d->rx_handler_data);
d                  33 drivers/net/ipvlan/ipvtap.c static const void *ipvtap_net_namespace(struct device *d)
d                  35 drivers/net/ipvlan/ipvtap.c 	struct net_device *dev = to_net_dev(d->parent);
d                2604 drivers/net/macsec.c 	int dev_idx, d;
d                2608 drivers/net/macsec.c 	d = 0;
d                2616 drivers/net/macsec.c 		if (d < dev_idx)
d                2626 drivers/net/macsec.c 		d++;
d                2631 drivers/net/macsec.c 	cb->args[0] = d;
d                  38 drivers/net/macvtap.c static const void *macvtap_net_namespace(struct device *d)
d                  40 drivers/net/macvtap.c 	struct net_device *dev = to_net_dev(d->parent);
d                 312 drivers/net/phy/mdio-bcm-unimac.c static int __maybe_unused unimac_mdio_suspend(struct device *d)
d                 314 drivers/net/phy/mdio-bcm-unimac.c 	struct unimac_mdio_priv *priv = dev_get_drvdata(d);
d                 321 drivers/net/phy/mdio-bcm-unimac.c static int __maybe_unused unimac_mdio_resume(struct device *d)
d                 323 drivers/net/phy/mdio-bcm-unimac.c 	struct unimac_mdio_priv *priv = dev_get_drvdata(d);
d                 249 drivers/net/phy/mdio_bus.c static void mdiobus_release(struct device *d)
d                 251 drivers/net/phy/mdio_bus.c 	struct mii_bus *bus = to_mii_bus(d);
d                 279 drivers/net/phy/mdio_bus.c 	struct device *d;
d                 284 drivers/net/phy/mdio_bus.c 	d = class_find_device_by_of_node(&mdio_bus_class, mdio_bus_np);
d                 285 drivers/net/phy/mdio_bus.c 	return d ? to_mii_bus(d) : NULL;
d                 118 drivers/net/phy/mdio_device.c 	unsigned int d;
d                 133 drivers/net/phy/mdio_device.c 	d = value ? mdiodev->reset_assert_delay : mdiodev->reset_deassert_delay;
d                 134 drivers/net/phy/mdio_device.c 	if (d)
d                 135 drivers/net/phy/mdio_device.c 		usleep_range(d, d + max_t(unsigned int, d / 10, 100));
d                  69 drivers/net/phy/phy-core.c #define PHY_SETTING(s, d, b) { .speed = SPEED_ ## s, .duplex = DUPLEX_ ## d, \
d                 994 drivers/net/phy/phy_device.c 	struct device *d;
d                1000 drivers/net/phy/phy_device.c 	d = bus_find_device_by_name(&mdio_bus_type, NULL, bus_id);
d                1001 drivers/net/phy/phy_device.c 	if (!d) {
d                1005 drivers/net/phy/phy_device.c 	phydev = to_phy_device(d);
d                1008 drivers/net/phy/phy_device.c 	put_device(d);
d                1204 drivers/net/phy/phy_device.c 	struct device *d = &phydev->mdio.dev;
d                1221 drivers/net/phy/phy_device.c 	get_device(d);
d                1226 drivers/net/phy/phy_device.c 	if (!d->driver) {
d                1228 drivers/net/phy/phy_device.c 			d->driver = &genphy_c45_driver.mdiodrv.driver;
d                1230 drivers/net/phy/phy_device.c 			d->driver = &genphy_driver.mdiodrv.driver;
d                1235 drivers/net/phy/phy_device.c 	if (!try_module_get(d->driver->owner)) {
d                1242 drivers/net/phy/phy_device.c 		err = d->driver->probe(d);
d                1244 drivers/net/phy/phy_device.c 			err = device_bind_driver(d);
d                1312 drivers/net/phy/phy_device.c 	module_put(d->driver->owner);
d                1314 drivers/net/phy/phy_device.c 	put_device(d);
d                1335 drivers/net/phy/phy_device.c 	struct device *d;
d                1344 drivers/net/phy/phy_device.c 	d = bus_find_device_by_name(bus, NULL, bus_id);
d                1345 drivers/net/phy/phy_device.c 	if (!d) {
d                1349 drivers/net/phy/phy_device.c 	phydev = to_phy_device(d);
d                1352 drivers/net/phy/phy_device.c 	put_device(d);
d                1363 drivers/net/phy/phy_device.c 	struct device *d = &phydev->mdio.dev;
d                1369 drivers/net/phy/phy_device.c 	get_device(d);
d                1370 drivers/net/phy/phy_device.c 	ret = d->driver == driver;
d                1371 drivers/net/phy/phy_device.c 	put_device(d);
d                1281 drivers/net/ppp/ppp_generic.c 			ppp->rcomp->decomp_stat(ppp->rc_state, &cstats.d);
d                 102 drivers/net/slip/slip.c static int slip_esc(unsigned char *p, unsigned char *d, int len);
d                 105 drivers/net/slip/slip.c static int slip_esc6(unsigned char *p, unsigned char *d, int len);
d                 921 drivers/net/slip/slip.c static int slip_esc(unsigned char *s, unsigned char *d, int len)
d                 923 drivers/net/slip/slip.c 	unsigned char *ptr = d;
d                 955 drivers/net/slip/slip.c 	return ptr - d;
d                1004 drivers/net/slip/slip.c static int slip_esc6(unsigned char *s, unsigned char *d, int len)
d                1006 drivers/net/slip/slip.c 	unsigned char *ptr = d;
d                1038 drivers/net/slip/slip.c 	return ptr - d;
d                 105 drivers/net/usb/cdc_ether.c 	struct usb_interface_descriptor	*d;
d                 203 drivers/net/usb/cdc_ether.c 	d = &info->data->cur_altsetting->desc;
d                 204 drivers/net/usb/cdc_ether.c 	if (d->bInterfaceClass != USB_CLASS_CDC_DATA) {
d                 205 drivers/net/usb/cdc_ether.c 		dev_dbg(&intf->dev, "slave class %u\n", d->bInterfaceClass);
d                 191 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_show_min_tx_pkt(struct device *d, struct device_attribute *attr, char *buf)
d                 193 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 199 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_show_rx_max(struct device *d, struct device_attribute *attr, char *buf)
d                 201 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 207 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_show_tx_max(struct device *d, struct device_attribute *attr, char *buf)
d                 209 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 215 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_show_tx_timer_usecs(struct device *d, struct device_attribute *attr, char *buf)
d                 217 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 223 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_store_min_tx_pkt(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 225 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 237 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_store_rx_max(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 239 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 250 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_store_tx_max(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 252 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 263 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_store_tx_timer_usecs(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 265 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 289 drivers/net/usb/cdc_ncm.c static ssize_t ndp_to_end_show(struct device *d, struct device_attribute *attr, char *buf)
d                 291 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 297 drivers/net/usb/cdc_ncm.c static ssize_t ndp_to_end_store(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 299 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 332 drivers/net/usb/cdc_ncm.c static ssize_t cdc_ncm_show_##name(struct device *d, struct device_attribute *attr, char *buf) \
d                 334 drivers/net/usb/cdc_ncm.c 	struct usbnet *dev = netdev_priv(to_net_dev(d)); \
d                1882 drivers/net/usb/lan78xx.c static int irq_map(struct irq_domain *d, unsigned int irq,
d                1885 drivers/net/usb/lan78xx.c 	struct irq_domain_data *data = d->host_data;
d                1894 drivers/net/usb/lan78xx.c static void irq_unmap(struct irq_domain *d, unsigned int irq)
d                 333 drivers/net/usb/pegasus.c 	__u8 tmp, d[4] = { 0x3f, 0, 0, EPROM_WRITE };
d                 337 drivers/net/usb/pegasus.c 	set_registers(pegasus, EpromOffset, 4, d);
d                 665 drivers/net/usb/pegasus.c 		u8 *d = urb->transfer_buffer;
d                 668 drivers/net/usb/pegasus.c 		if (d[0] & (TX_UNDERRUN|EXCESSIVE_COL
d                 671 drivers/net/usb/pegasus.c 			if (d[0] & TX_UNDERRUN)
d                 673 drivers/net/usb/pegasus.c 			if (d[0] & (EXCESSIVE_COL | JABBER_TIMEOUT))
d                 675 drivers/net/usb/pegasus.c 			if (d[0] & LATE_COL)
d                 685 drivers/net/usb/pegasus.c 		net->stats.rx_missed_errors += ((d[3] & 0x7f) << 8) | d[4];
d                 351 drivers/net/usb/qmi_wwan.c static ssize_t raw_ip_show(struct device *d, struct device_attribute *attr, char *buf)
d                 353 drivers/net/usb/qmi_wwan.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 359 drivers/net/usb/qmi_wwan.c static ssize_t raw_ip_store(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 361 drivers/net/usb/qmi_wwan.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 403 drivers/net/usb/qmi_wwan.c static ssize_t add_mux_show(struct device *d, struct device_attribute *attr, char *buf)
d                 405 drivers/net/usb/qmi_wwan.c 	struct net_device *dev = to_net_dev(d);
d                 421 drivers/net/usb/qmi_wwan.c static ssize_t add_mux_store(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 423 drivers/net/usb/qmi_wwan.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                 461 drivers/net/usb/qmi_wwan.c static ssize_t del_mux_show(struct device *d, struct device_attribute *attr, char *buf)
d                 463 drivers/net/usb/qmi_wwan.c 	return add_mux_show(d, attr, buf);
d                 466 drivers/net/usb/qmi_wwan.c static ssize_t del_mux_store(struct device *d,  struct device_attribute *attr, const char *buf, size_t len)
d                 468 drivers/net/usb/qmi_wwan.c 	struct usbnet *dev = netdev_priv(to_net_dev(d));
d                1455 drivers/net/usb/r8152.c 	__le16 *d;
d                1490 drivers/net/usb/r8152.c 	d = urb->transfer_buffer;
d                1491 drivers/net/usb/r8152.c 	if (INTR_LINK & __le16_to_cpu(d[0])) {
d                5288 drivers/net/usb/r8152.c 			       const struct ethtool_tunable *tunable, void *d)
d                5294 drivers/net/usb/r8152.c 		*(u32 *)d = tp->rx_copybreak;
d                5305 drivers/net/usb/r8152.c 			       const void *d)
d                5312 drivers/net/usb/r8152.c 		val = *(u32 *)d;
d                 477 drivers/net/usb/rtl8150.c 	__u8 *d;
d                 498 drivers/net/usb/rtl8150.c 	d = urb->transfer_buffer;
d                 499 drivers/net/usb/rtl8150.c 	if (d[0] & TSR_ERRORS) {
d                 501 drivers/net/usb/rtl8150.c 		if (d[INT_TSR] & (TSR_ECOL | TSR_JBR))
d                 503 drivers/net/usb/rtl8150.c 		if (d[INT_TSR] & TSR_LCOL)
d                 505 drivers/net/usb/rtl8150.c 		if (d[INT_TSR] & TSR_LOSS_CRS)
d                 509 drivers/net/usb/rtl8150.c 	if ((d[INT_MSR] & MSR_LINK) == 0) {
d                 269 drivers/net/wan/cosa.c static int cosa_net_open(struct net_device *d);
d                 270 drivers/net/wan/cosa.c static int cosa_net_close(struct net_device *d);
d                 271 drivers/net/wan/cosa.c static void cosa_net_timeout(struct net_device *d);
d                 272 drivers/net/wan/cosa.c static netdev_tx_t cosa_net_tx(struct sk_buff *skb, struct net_device *d);
d                1016 drivers/net/wan/cosa.c 	struct cosa_download d;
d                1028 drivers/net/wan/cosa.c 	if (copy_from_user(&d, arg, sizeof(d)))
d                1031 drivers/net/wan/cosa.c 	if (d.addr < 0 || d.addr > COSA_MAX_FIRMWARE_SIZE)
d                1033 drivers/net/wan/cosa.c 	if (d.len < 0 || d.len > COSA_MAX_FIRMWARE_SIZE)
d                1040 drivers/net/wan/cosa.c 	i = download(cosa, d.code, d.len, d.addr);
d                1047 drivers/net/wan/cosa.c 		cosa->num, d.len, d.addr);
d                1055 drivers/net/wan/cosa.c 	struct cosa_download d;
d                1067 drivers/net/wan/cosa.c 	if (copy_from_user(&d, arg, sizeof(d)))
d                1073 drivers/net/wan/cosa.c 	i = readmem(cosa, d.code, d.len, d.addr);
d                1079 drivers/net/wan/cosa.c 		cosa->num, d.len, d.addr);
d                  79 drivers/net/wan/hostess_sv11.c static int hostess_open(struct net_device *d)
d                  81 drivers/net/wan/hostess_sv11.c 	struct z8530_dev *sv11 = dev_to_sv(d);
d                  89 drivers/net/wan/hostess_sv11.c 			err = z8530_sync_open(d, &sv11->chanA);
d                  92 drivers/net/wan/hostess_sv11.c 			err = z8530_sync_dma_open(d, &sv11->chanA);
d                  95 drivers/net/wan/hostess_sv11.c 			err = z8530_sync_txdma_open(d, &sv11->chanA);
d                 102 drivers/net/wan/hostess_sv11.c 	err = hdlc_open(d);
d                 106 drivers/net/wan/hostess_sv11.c 				z8530_sync_close(d, &sv11->chanA);
d                 109 drivers/net/wan/hostess_sv11.c 				z8530_sync_dma_close(d, &sv11->chanA);
d                 112 drivers/net/wan/hostess_sv11.c 				z8530_sync_txdma_close(d, &sv11->chanA);
d                 123 drivers/net/wan/hostess_sv11.c 	netif_start_queue(d);
d                 127 drivers/net/wan/hostess_sv11.c static int hostess_close(struct net_device *d)
d                 129 drivers/net/wan/hostess_sv11.c 	struct z8530_dev *sv11 = dev_to_sv(d);
d                 135 drivers/net/wan/hostess_sv11.c 	hdlc_close(d);
d                 136 drivers/net/wan/hostess_sv11.c 	netif_stop_queue(d);
d                 140 drivers/net/wan/hostess_sv11.c 			z8530_sync_close(d, &sv11->chanA);
d                 143 drivers/net/wan/hostess_sv11.c 			z8530_sync_dma_close(d, &sv11->chanA);
d                 146 drivers/net/wan/hostess_sv11.c 			z8530_sync_txdma_close(d, &sv11->chanA);
d                 152 drivers/net/wan/hostess_sv11.c static int hostess_ioctl(struct net_device *d, struct ifreq *ifr, int cmd)
d                 156 drivers/net/wan/hostess_sv11.c 	return hdlc_ioctl(d, ifr, cmd);
d                 164 drivers/net/wan/hostess_sv11.c 					    struct net_device *d)
d                 166 drivers/net/wan/hostess_sv11.c 	return z8530_queue_xmit(&dev_to_sv(d)->chanA, skb);
d                 899 drivers/net/wan/lmc/lmc_media.c lmc_t1_write (lmc_softc_t * const sc, int a, int d)
d                 902 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 18, d);
d                  75 drivers/net/wan/sealevel.c static int sealevel_open(struct net_device *d)
d                  77 drivers/net/wan/sealevel.c 	struct slvl_device *slvl = dev_to_chan(d);
d                  87 drivers/net/wan/sealevel.c 			err = z8530_sync_dma_open(d, slvl->chan);
d                  90 drivers/net/wan/sealevel.c 			err = z8530_sync_open(d, slvl->chan);
d                  97 drivers/net/wan/sealevel.c 	err = hdlc_open(d);
d                 101 drivers/net/wan/sealevel.c 				z8530_sync_dma_close(d, slvl->chan);
d                 104 drivers/net/wan/sealevel.c 				z8530_sync_close(d, slvl->chan);
d                 115 drivers/net/wan/sealevel.c 	netif_start_queue(d);
d                 119 drivers/net/wan/sealevel.c static int sealevel_close(struct net_device *d)
d                 121 drivers/net/wan/sealevel.c 	struct slvl_device *slvl = dev_to_chan(d);
d                 130 drivers/net/wan/sealevel.c 	hdlc_close(d);
d                 131 drivers/net/wan/sealevel.c 	netif_stop_queue(d);
d                 135 drivers/net/wan/sealevel.c 			z8530_sync_dma_close(d, slvl->chan);
d                 138 drivers/net/wan/sealevel.c 			z8530_sync_close(d, slvl->chan);
d                 144 drivers/net/wan/sealevel.c static int sealevel_ioctl(struct net_device *d, struct ifreq *ifr, int cmd)
d                 148 drivers/net/wan/sealevel.c 	return hdlc_ioctl(d, ifr, cmd);
d                 156 drivers/net/wan/sealevel.c 					     struct net_device *d)
d                 158 drivers/net/wan/sealevel.c 	return z8530_queue_xmit(dev_to_chan(d)->chan, skb);
d                 340 drivers/net/wan/sealevel.c 		struct net_device *d = b->dev[u].chan->netdevice;
d                 341 drivers/net/wan/sealevel.c 		unregister_hdlc_device(d);
d                 342 drivers/net/wan/sealevel.c 		free_netdev(d);
d                  47 drivers/net/wan/x25_asy.c static int x25_asy_esc(unsigned char *p, unsigned char *d, int len);
d                 617 drivers/net/wan/x25_asy.c static int x25_asy_esc(unsigned char *s, unsigned char *d, int len)
d                 619 drivers/net/wan/x25_asy.c 	unsigned char *ptr = d;
d                 651 drivers/net/wan/x25_asy.c 	return ptr - d;
d                  99 drivers/net/wan/z85230.c static inline void z8530_write_port(unsigned long p, u8 d)
d                 101 drivers/net/wan/z85230.c 	outb(d,Z8530_PORT_OF(p));
d                 201 drivers/net/wimax/i2400m/i2400m.h #define I2400M_FW_POKE(a, d) {		\
d                 203 drivers/net/wimax/i2400m/i2400m.h 	.data = cpu_to_le32(d)		\
d                  91 drivers/net/wireless/ath/carl9170/fwdesc.h #define CARL9170FW_SET_DAY(d) (((d) - 1) % 31)
d                  95 drivers/net/wireless/ath/carl9170/fwdesc.h #define CARL9170FW_GET_DAY(d) (((d) % 31) + 1)
d                 141 drivers/net/wireless/ath/wcn36xx/debug.c 		struct dentry *d;				\
d                 142 drivers/net/wireless/ath/wcn36xx/debug.c 		d = debugfs_create_file(__stringify(name),	\
d                 145 drivers/net/wireless/ath/wcn36xx/debug.c 		dfs->file_##name.dentry = d;			\
d                 146 drivers/net/wireless/ath/wcn36xx/debug.c 		if (IS_ERR(d)) {				\
d                  71 drivers/net/wireless/ath/wil6210/debugfs.c 		struct wil_tx_enhanced_desc *d =
d                  75 drivers/net/wireless/ath/wil6210/debugfs.c 		num_of_descs = (u8)d->mac.d[2];
d                 132 drivers/net/wireless/ath/wil6210/debugfs.c 				volatile struct vring_tx_desc *d =
d                 134 drivers/net/wireless/ath/wil6210/debugfs.c 				seq_printf(s, "%c", (d->dma.status & BIT(0)) ?
d                 325 drivers/net/wireless/ath/wil6210/debugfs.c 		struct wil6210_mbox_ring_desc d;
d                 327 drivers/net/wireless/ath/wil6210/debugfs.c 		size_t delta = i * sizeof(d);
d                 330 drivers/net/wireless/ath/wil6210/debugfs.c 		wil_memcpy_fromio_32(&d, x, sizeof(d));
d                 333 drivers/net/wireless/ath/wil6210/debugfs.c 			   d.sync ? "F" : "E",
d                 336 drivers/net/wireless/ath/wil6210/debugfs.c 			   le32_to_cpu(d.addr));
d                 337 drivers/net/wireless/ath/wil6210/debugfs.c 		if (0 == wmi_read_hdr(wil, d.addr, &hdr)) {
d                 345 drivers/net/wireless/ath/wil6210/debugfs.c 				void __iomem *src = wmi_buffer(wil, d.addr) +
d                 387 drivers/net/wireless/ath/wil6210/debugfs.c 	struct wil_debugfs_iomem_data *d = (struct
d                 389 drivers/net/wireless/ath/wil6210/debugfs.c 	struct wil6210_priv *wil = d->wil;
d                 396 drivers/net/wireless/ath/wil6210/debugfs.c 	writel_relaxed(val, (void __iomem *)d->offset);
d                 407 drivers/net/wireless/ath/wil6210/debugfs.c 	struct wil_debugfs_iomem_data *d = (struct
d                 409 drivers/net/wireless/ath/wil6210/debugfs.c 	struct wil6210_priv *wil = d->wil;
d                 416 drivers/net/wireless/ath/wil6210/debugfs.c 	*val = readl((void __iomem *)d->offset);
d                 513 drivers/net/wireless/ath/wil6210/debugfs.c 	struct dentry *d = debugfs_create_dir(name, parent);
d                 515 drivers/net/wireless/ath/wil6210/debugfs.c 	wil6210_debugfs_init_offset(wil, d, (void * __force)wil->csr + off,
d                 529 drivers/net/wireless/ath/wil6210/debugfs.c 	struct dentry *d = debugfs_create_dir("PSEUDO_ISR", parent);
d                 531 drivers/net/wireless/ath/wil6210/debugfs.c 	wil6210_debugfs_init_offset(wil, d, (void * __force)wil->csr,
d                 577 drivers/net/wireless/ath/wil6210/debugfs.c 	struct dentry *d, *dtx, *drx;
d                 579 drivers/net/wireless/ath/wil6210/debugfs.c 	d = debugfs_create_dir("ITR_CNT", parent);
d                 581 drivers/net/wireless/ath/wil6210/debugfs.c 	dtx = debugfs_create_dir("TX", d);
d                 582 drivers/net/wireless/ath/wil6210/debugfs.c 	drx = debugfs_create_dir("RX", d);
d                 584 drivers/net/wireless/ath/wil6210/debugfs.c 	wil6210_debugfs_init_offset(wil, d, (void * __force)wil->csr,
d                1086 drivers/net/wireless/ath/wil6210/debugfs.c 	volatile struct vring_tx_desc *d;
d                1129 drivers/net/wireless/ath/wil6210/debugfs.c 	d = &ring->va[txdesc_idx].tx.legacy;
d                1130 drivers/net/wireless/ath/wil6210/debugfs.c 	u = (volatile u32 *)d;
d                  33 drivers/net/wireless/ath/wil6210/fw.c 	volatile u32 __iomem *d = dst;
d                  36 drivers/net/wireless/ath/wil6210/fw.c 		__raw_writel(val, d++);
d                 284 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_data *d = data;
d                 286 drivers/net/wireless/ath/wil6210/fw_inc.c 	size_t s = size - sizeof(*d);
d                 288 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (size < sizeof(*d) + sizeof(u32)) {
d                 296 drivers/net/wireless/ath/wil6210/fw_inc.c 	wil_memcpy_toio_32(dst, d->data, s);
d                 305 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_data *d = data;
d                 307 drivers/net/wireless/ath/wil6210/fw_inc.c 	return __fw_handle_data(wil, data, size, d->addr);
d                 313 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_fill *d = data;
d                 316 drivers/net/wireless/ath/wil6210/fw_inc.c 	size_t s = (size_t)le32_to_cpu(d->size);
d                 318 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (size != sizeof(*d)) {
d                 333 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (!wil_fw_addr_check(wil, &dst, d->addr, s, "address"))
d                 336 drivers/net/wireless/ath/wil6210/fw_inc.c 	v = le32_to_cpu(d->value);
d                 338 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->addr), v, s);
d                 348 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_file_header *d = data;
d                 350 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (size != sizeof(*d)) {
d                 356 drivers/net/wireless/ath/wil6210/fw_inc.c 		   d->version, d->data_len);
d                 357 drivers/net/wireless/ath/wil6210/fw_inc.c 	wil_hex_dump_fw("", DUMP_PREFIX_OFFSET, 16, 1, d->comment,
d                 358 drivers/net/wireless/ath/wil6210/fw_inc.c 			sizeof(d->comment), true);
d                 360 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (!memcmp(d->comment, WIL_FW_VERSION_PREFIX,
d                 363 drivers/net/wireless/ath/wil6210/fw_inc.c 		       d->comment + WIL_FW_VERSION_PREFIX_LEN,
d                 364 drivers/net/wireless/ath/wil6210/fw_inc.c 		       min(sizeof(d->comment) - WIL_FW_VERSION_PREFIX_LEN,
d                 373 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_direct_write *d = data;
d                 374 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_data_dwrite *block = d->data;
d                 430 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_gateway_data *d = data;
d                 431 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_data_gw *block = d->data;
d                 439 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (size < sizeof(*d) + sizeof(*block)) {
d                 444 drivers/net/wireless/ath/wil6210/fw_inc.c 	if ((size - sizeof(*d)) % sizeof(*block)) {
d                 447 drivers/net/wireless/ath/wil6210/fw_inc.c 			   sizeof(*block), size - sizeof(*d));
d                 450 drivers/net/wireless/ath/wil6210/fw_inc.c 	n = (size - sizeof(*d)) / sizeof(*block);
d                 452 drivers/net/wireless/ath/wil6210/fw_inc.c 	gw_cmd = le32_to_cpu(d->command);
d                 457 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (!wil_fw_addr_check(wil, &gwa_addr, d->gateway_addr_addr, 0,
d                 459 drivers/net/wireless/ath/wil6210/fw_inc.c 	    !wil_fw_addr_check(wil, &gwa_val, d->gateway_value_addr, 0,
d                 461 drivers/net/wireless/ath/wil6210/fw_inc.c 	    !wil_fw_addr_check(wil, &gwa_cmd, d->gateway_cmd_addr, 0,
d                 463 drivers/net/wireless/ath/wil6210/fw_inc.c 	    !wil_fw_addr_check(wil, &gwa_ctl, d->gateway_ctrl_address, 0,
d                 469 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_addr_addr),
d                 470 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_value_addr),
d                 471 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_cmd_addr),
d                 472 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_ctrl_address));
d                 494 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_record_gateway_data4 *d = data;
d                 495 drivers/net/wireless/ath/wil6210/fw_inc.c 	const struct wil_fw_data_gw4 *block = d->data;
d                 503 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (size < sizeof(*d) + sizeof(*block)) {
d                 508 drivers/net/wireless/ath/wil6210/fw_inc.c 	if ((size - sizeof(*d)) % sizeof(*block)) {
d                 511 drivers/net/wireless/ath/wil6210/fw_inc.c 			   sizeof(*block), size - sizeof(*d));
d                 514 drivers/net/wireless/ath/wil6210/fw_inc.c 	n = (size - sizeof(*d)) / sizeof(*block);
d                 516 drivers/net/wireless/ath/wil6210/fw_inc.c 	gw_cmd = le32_to_cpu(d->command);
d                 521 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (!wil_fw_addr_check(wil, &gwa_addr, d->gateway_addr_addr, 0,
d                 526 drivers/net/wireless/ath/wil6210/fw_inc.c 				       d->gateway_value_addr[k],
d                 529 drivers/net/wireless/ath/wil6210/fw_inc.c 	if (!wil_fw_addr_check(wil, &gwa_cmd, d->gateway_cmd_addr, 0,
d                 531 drivers/net/wireless/ath/wil6210/fw_inc.c 	    !wil_fw_addr_check(wil, &gwa_ctl, d->gateway_ctrl_address, 0,
d                 536 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_addr_addr),
d                 537 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_cmd_addr),
d                 538 drivers/net/wireless/ath/wil6210/fw_inc.c 		   le32_to_cpu(d->gateway_ctrl_address));
d                 540 drivers/net/wireless/ath/wil6210/fw_inc.c 			d->gateway_value_addr, sizeof(d->gateway_value_addr),
d                 663 drivers/net/wireless/ath/wil6210/fw_inc.c 	const void *d;
d                 677 drivers/net/wireless/ath/wil6210/fw_inc.c 	for (sz = fw->size, d = fw->data; sz; sz -= rc1, d += rc1) {
d                 678 drivers/net/wireless/ath/wil6210/fw_inc.c 		rc1 = wil_fw_verify(wil, d, sz);
d                 683 drivers/net/wireless/ath/wil6210/fw_inc.c 		rc = wil_fw_process(wil, d, rc1, load);
d                 155 drivers/net/wireless/ath/wil6210/main.c 	u32 *d = dst;
d                 159 drivers/net/wireless/ath/wil6210/main.c 		*d++ = __raw_readl(s++);
d                 165 drivers/net/wireless/ath/wil6210/main.c 		memcpy(d, &tmp, count);
d                 172 drivers/net/wireless/ath/wil6210/main.c 	volatile u32 __iomem *d = dst;
d                 176 drivers/net/wireless/ath/wil6210/main.c 		__raw_writel(*s++, d++);
d                 183 drivers/net/wireless/ath/wil6210/main.c 		__raw_writel(tmp, d);
d                 870 drivers/net/wireless/ath/wil6210/main.c static inline u32 ARC_me_imm32(u32 d)
d                 872 drivers/net/wireless/ath/wil6210/main.c 	return ((d & 0xffff0000) >> 16) | ((d & 0x0000ffff) << 16);
d                 154 drivers/net/wireless/ath/wil6210/pmc.c 		struct vring_tx_desc dd = {}, *d = &dd;
d                 173 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.addr.addr_low =
d                 175 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.addr.addr_high =
d                 177 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.status = 0; /* 0 = HW_OWNED */
d                 178 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.length = cpu_to_le16(descriptor_size);
d                 179 drivers/net/wireless/ath/wil6210/pmc.c 		d->dma.d0 = BIT(9) | RX_DMA_D0_CMD_DMA_IT;
d                 180 drivers/net/wireless/ath/wil6210/pmc.c 		*_d = *d;
d                 161 drivers/net/wireless/ath/wil6210/trace.h 	TP_PROTO(u16 index, struct vring_rx_desc *d),
d                 162 drivers/net/wireless/ath/wil6210/trace.h 	TP_ARGS(index, d),
d                 176 drivers/net/wireless/ath/wil6210/trace.h 		__entry->len = d->dma.length;
d                 177 drivers/net/wireless/ath/wil6210/trace.h 		__entry->mid = wil_rxdesc_mid(d);
d                 178 drivers/net/wireless/ath/wil6210/trace.h 		__entry->cid = wil_rxdesc_cid(d);
d                 179 drivers/net/wireless/ath/wil6210/trace.h 		__entry->tid = wil_rxdesc_tid(d);
d                 180 drivers/net/wireless/ath/wil6210/trace.h 		__entry->type = wil_rxdesc_ftype(d);
d                 181 drivers/net/wireless/ath/wil6210/trace.h 		__entry->subtype = wil_rxdesc_subtype(d);
d                 182 drivers/net/wireless/ath/wil6210/trace.h 		__entry->seq = wil_rxdesc_seq(d);
d                 183 drivers/net/wireless/ath/wil6210/trace.h 		__entry->mcs = wil_rxdesc_mcs(d);
d                 184 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_tx_desc *d = &desc->legacy;
d                 185 drivers/net/wireless/ath/wil6210/txrx.c 	dma_addr_t pa = wil_desc_addr(&d->dma.addr);
d                 186 drivers/net/wireless/ath/wil6210/txrx.c 	u16 dmalen = le16_to_cpu(d->dma.length);
d                 224 drivers/net/wireless/ath/wil6210/txrx.c 			struct vring_tx_desc dd, *d = &dd;
d                 236 drivers/net/wireless/ath/wil6210/txrx.c 			*d = *_d;
d                 237 drivers/net/wireless/ath/wil6210/txrx.c 			wil_txdesc_unmap(dev, (union wil_tx_desc *)d, ctx);
d                 242 drivers/net/wireless/ath/wil6210/txrx.c 			struct vring_rx_desc dd, *d = &dd;
d                 247 drivers/net/wireless/ath/wil6210/txrx.c 			*d = *_d;
d                 248 drivers/net/wireless/ath/wil6210/txrx.c 			pa = wil_desc_addr(&d->dma.addr);
d                 249 drivers/net/wireless/ath/wil6210/txrx.c 			dmalen = le16_to_cpu(d->dma.length);
d                 272 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc dd, *d = &dd;
d                 295 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 = RX_DMA_D0_CMD_DMA_RT | RX_DMA_D0_CMD_DMA_IT;
d                 296 drivers/net/wireless/ath/wil6210/txrx.c 	wil_desc_addr_set(&d->dma.addr, pa);
d                 300 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.status = 0; /* BIT(0) should be 0 for HW_OWNED */
d                 301 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.length = cpu_to_le16(sz);
d                 302 drivers/net/wireless/ath/wil6210/txrx.c 	*_d = *d;
d                 333 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                 352 drivers/net/wireless/ath/wil6210/txrx.c 	if (d->dma.status & RX_DMA_STATUS_ERROR)
d                 360 drivers/net/wireless/ath/wil6210/txrx.c 	rtap->mcs_index = wil_rxdesc_mcs(d);
d                 377 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                 378 drivers/net/wireless/ath/wil6210/txrx.c 	int mid = wil_rxdesc_mid(d);
d                 384 drivers/net/wireless/ath/wil6210/txrx.c 	int cid = wil_rxdesc_cid(d);
d                 395 drivers/net/wireless/ath/wil6210/txrx.c 	ftype = wil_rxdesc_ftype(d) << 2;
d                 459 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d;
d                 490 drivers/net/wireless/ath/wil6210/txrx.c 	d = wil_skb_rxdesc(skb);
d                 491 drivers/net/wireless/ath/wil6210/txrx.c 	*d = *_d;
d                 492 drivers/net/wireless/ath/wil6210/txrx.c 	pa = wil_desc_addr(&d->dma.addr);
d                 495 drivers/net/wireless/ath/wil6210/txrx.c 	dmalen = le16_to_cpu(d->dma.length);
d                 497 drivers/net/wireless/ath/wil6210/txrx.c 	trace_wil6210_rx(i, d);
d                 500 drivers/net/wireless/ath/wil6210/txrx.c 			  (const void *)d, sizeof(*d), false);
d                 502 drivers/net/wireless/ath/wil6210/txrx.c 	mid = wil_rxdesc_mid(d);
d                 533 drivers/net/wireless/ath/wil6210/txrx.c 	stats->last_mcs_rx = wil_rxdesc_mcs(d);
d                 548 drivers/net/wireless/ath/wil6210/txrx.c 	ftype = wil_rxdesc_ftype(d) << 2;
d                 550 drivers/net/wireless/ath/wil6210/txrx.c 		u8 fc1 = wil_rxdesc_fc1(d);
d                 551 drivers/net/wireless/ath/wil6210/txrx.c 		int tid = wil_rxdesc_tid(d);
d                 552 drivers/net/wireless/ath/wil6210/txrx.c 		u16 seq = wil_rxdesc_seq(d);
d                 571 drivers/net/wireless/ath/wil6210/txrx.c 					  (const void *)d, sizeof(*d), false);
d                 583 drivers/net/wireless/ath/wil6210/txrx.c 	if (likely(d->dma.status & RX_DMA_STATUS_L4I)) {
d                 585 drivers/net/wireless/ath/wil6210/txrx.c 		if (likely((d->dma.error & RX_DMA_ERROR_L4_ERR) == 0))
d                 674 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                 676 drivers/net/wireless/ath/wil6210/txrx.c 	int tid = wil_rxdesc_tid(d);
d                 677 drivers/net/wireless/ath/wil6210/txrx.c 	int key_id = wil_rxdesc_key_id(d);
d                 678 drivers/net/wireless/ath/wil6210/txrx.c 	int mc = wil_rxdesc_mcast(d);
d                 683 drivers/net/wireless/ath/wil6210/txrx.c 	const u8 *pn = (u8 *)&d->mac.pn_15_0;
d                 706 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                 708 drivers/net/wireless/ath/wil6210/txrx.c 	if ((d->dma.status & RX_DMA_STATUS_ERROR) &&
d                 709 drivers/net/wireless/ath/wil6210/txrx.c 	    (d->dma.error & RX_DMA_ERROR_MIC)) {
d                 721 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                 724 drivers/net/wireless/ath/wil6210/txrx.c 	*security = wil_rxdesc_security(d);
d                1129 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_tx_desc *d = &desc->legacy;
d                1131 drivers/net/wireless/ath/wil6210/txrx.c 	wil_desc_addr_set(&d->dma.addr, pa);
d                1132 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = 0;
d                1134 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = 0/*14 | BIT(7)*/;
d                1135 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.error = 0;
d                1136 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.status = 0; /* BIT(0) should be 0 for HW_OWNED */
d                1137 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.length = cpu_to_le16((u16)len);
d                1138 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 = (vring_index << DMA_CFG_DESC_TX_0_QID_POS);
d                1139 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.d[0] = 0;
d                1140 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.d[1] = 0;
d                1141 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.d[2] = 0;
d                1142 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.ucode_cmd = 0;
d                1144 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.d[2] = BIT(MAC_CFG_DESC_TX_2_SNAP_HDR_INSERTION_EN_POS) |
d                1625 drivers/net/wireless/ath/wil6210/txrx.c void wil_tx_desc_set_nr_frags(struct vring_tx_desc *d, int nr_frags)
d                1627 drivers/net/wireless/ath/wil6210/txrx.c 	d->mac.d[2] |= (nr_frags << MAC_CFG_DESC_TX_2_NUM_OF_DESCRIPTORS_POS);
d                1637 drivers/net/wireless/ath/wil6210/txrx.c static void wil_tx_desc_offload_setup_tso(struct vring_tx_desc *d,
d                1642 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = ETH_HLEN; /* MAC header length */
d                1643 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 |= is_ipv4 << DMA_CFG_DESC_TX_OFFLOAD_CFG_L3T_IPV4_POS;
d                1645 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (2 << DMA_CFG_DESC_TX_0_L4_TYPE_POS);
d                1647 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (tcp_hdr_len & DMA_CFG_DESC_TX_0_L4_LENGTH_MSK);
d                1650 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (BIT(DMA_CFG_DESC_TX_0_TCP_SEG_EN_POS)) |
d                1652 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= (is_ipv4 << DMA_CFG_DESC_TX_0_IPV4_CHECKSUM_EN_POS);
d                1654 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = skb_net_hdr_len;
d                1656 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_TCP_UDP_CHECKSUM_EN_POS);
d                1658 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_PSEUDO_HEADER_CALC_EN_POS);
d                1671 drivers/net/wireless/ath/wil6210/txrx.c static int wil_tx_desc_offload_setup(struct vring_tx_desc *d,
d                1678 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.b11 = ETH_HLEN; /* MAC header length */
d                1683 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.b11 |= BIT(DMA_CFG_DESC_TX_OFFLOAD_CFG_L3T_IPV4_POS);
d                1694 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |= (2 << DMA_CFG_DESC_TX_0_L4_TYPE_POS);
d                1696 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |=
d                1701 drivers/net/wireless/ath/wil6210/txrx.c 		d->dma.d0 |=
d                1708 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.ip_length = skb_network_header_len(skb);
d                1710 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_TCP_UDP_CHECKSUM_EN_POS);
d                1712 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_PSEUDO_HEADER_CALC_EN_POS);
d                1717 drivers/net/wireless/ath/wil6210/txrx.c static inline void wil_tx_last_desc(struct vring_tx_desc *d)
d                1719 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_EOP_POS) |
d                1724 drivers/net/wireless/ath/wil6210/txrx.c static inline void wil_set_tx_desc_last_tso(volatile struct vring_tx_desc *d)
d                1726 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= wil_tso_type_lst <<
d                1741 drivers/net/wireless/ath/wil6210/txrx.c 			     *d = &hdr_desc_mem, *hdr_desc = &hdr_desc_mem,
d                1896 drivers/net/wireless/ath/wil6210/txrx.c 				d = first_desc;
d                1898 drivers/net/wireless/ath/wil6210/txrx.c 				d = &desc_mem;
d                1901 drivers/net/wireless/ath/wil6210/txrx.c 			wil->txrx_ops.tx_desc_map((union wil_tx_desc *)d,
d                1903 drivers/net/wireless/ath/wil6210/txrx.c 			wil_tx_desc_offload_setup_tso(d, skb, desc_tso_type,
d                1936 drivers/net/wireless/ath/wil6210/txrx.c 				wil_tx_last_desc(d);
d                1942 drivers/net/wireless/ath/wil6210/txrx.c 				if (first_desc != d)
d                1949 drivers/net/wireless/ath/wil6210/txrx.c 					*_desc = *d;
d                1954 drivers/net/wireless/ath/wil6210/txrx.c 			} else if (first_desc != d) /* update mid descriptor */
d                1955 drivers/net/wireless/ath/wil6210/txrx.c 					*_desc = *d;
d                1966 drivers/net/wireless/ath/wil6210/txrx.c 		d = first_desc;
d                1969 drivers/net/wireless/ath/wil6210/txrx.c 	wil_set_tx_desc_last_tso(d);
d                1970 drivers/net/wireless/ath/wil6210/txrx.c 	*_desc = *d;
d                2020 drivers/net/wireless/ath/wil6210/txrx.c 		d = (struct vring_tx_desc *)&vring->va[i].tx.legacy;
d                2022 drivers/net/wireless/ath/wil6210/txrx.c 		*d = *_desc;
d                2025 drivers/net/wireless/ath/wil6210/txrx.c 		wil_txdesc_unmap(dev, (union wil_tx_desc *)d, ctx);
d                2037 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_tx_desc dd, *d = &dd;
d                2076 drivers/net/wireless/ath/wil6210/txrx.c 	wil->txrx_ops.tx_desc_map((union wil_tx_desc *)d, pa, len,
d                2079 drivers/net/wireless/ath/wil6210/txrx.c 		d->mac.d[0] |= BIT(MAC_CFG_DESC_TX_0_MCS_EN_POS); /* MCS 0 */
d                2081 drivers/net/wireless/ath/wil6210/txrx.c 			d->mac.d[0] |= (1 << MAC_CFG_DESC_TX_0_MCS_INDEX_POS);
d                2084 drivers/net/wireless/ath/wil6210/txrx.c 	if (unlikely(wil_tx_desc_offload_setup(d, skb))) {
d                2091 drivers/net/wireless/ath/wil6210/txrx.c 	wil_tx_desc_set_nr_frags(d, nr_frags + 1);
d                2098 drivers/net/wireless/ath/wil6210/txrx.c 		*_d = *d;
d                2101 drivers/net/wireless/ath/wil6210/txrx.c 				  (const void *)d, sizeof(*d), false);
d                2112 drivers/net/wireless/ath/wil6210/txrx.c 		wil->txrx_ops.tx_desc_map((union wil_tx_desc *)d,
d                2118 drivers/net/wireless/ath/wil6210/txrx.c 		wil_tx_desc_offload_setup(d, skb);
d                2121 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_EOP_POS);
d                2122 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_MARK_WB_POS);
d                2123 drivers/net/wireless/ath/wil6210/txrx.c 	d->dma.d0 |= BIT(DMA_CFG_DESC_TX_0_CMD_DMA_IT_POS);
d                2124 drivers/net/wireless/ath/wil6210/txrx.c 	*_d = *d;
d                2127 drivers/net/wireless/ath/wil6210/txrx.c 			  (const void *)d, sizeof(*d), false);
d                2179 drivers/net/wireless/ath/wil6210/txrx.c 		*d = *_d;
d                2182 drivers/net/wireless/ath/wil6210/txrx.c 					    (union wil_tx_desc *)d,
d                2475 drivers/net/wireless/ath/wil6210/txrx.c 			struct vring_tx_desc dd, *d = &dd;
d                2483 drivers/net/wireless/ath/wil6210/txrx.c 			*d = *_d;
d                2485 drivers/net/wireless/ath/wil6210/txrx.c 			dmalen = le16_to_cpu(d->dma.length);
d                2487 drivers/net/wireless/ath/wil6210/txrx.c 					      d->dma.error);
d                2491 drivers/net/wireless/ath/wil6210/txrx.c 				     d->dma.status, d->dma.error);
d                2493 drivers/net/wireless/ath/wil6210/txrx.c 					  (const void *)d, sizeof(*d), false);
d                2496 drivers/net/wireless/ath/wil6210/txrx.c 						    (union wil_tx_desc *)d,
d                2500 drivers/net/wireless/ath/wil6210/txrx.c 				if (likely(d->dma.error == 0)) {
d                2519 drivers/net/wireless/ath/wil6210/txrx.c 				wil_consume_skb(skb, d->dma.error == 0);
d                2565 drivers/net/wireless/ath/wil6210/txrx.c 	struct vring_rx_desc *d = wil_skb_rxdesc(skb);
d                2567 drivers/net/wireless/ath/wil6210/txrx.c 	*tid = wil_rxdesc_tid(d);
d                2569 drivers/net/wireless/ath/wil6210/txrx.c 	*mid = wil_rxdesc_mid(d);
d                2570 drivers/net/wireless/ath/wil6210/txrx.c 	*seq = wil_rxdesc_seq(d);
d                2571 drivers/net/wireless/ath/wil6210/txrx.c 	*mcast = wil_rxdesc_mcast(d);
d                2572 drivers/net/wireless/ath/wil6210/txrx.c 	*retry = wil_rxdesc_retry(d);
d                  85 drivers/net/wireless/ath/wil6210/txrx.h 	u32 d[3];
d                 513 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_tid(struct vring_rx_desc *d)
d                 515 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 0, 3);
d                 518 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_cid(struct vring_rx_desc *d)
d                 520 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 4, 6);
d                 523 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_mid(struct vring_rx_desc *d)
d                 525 drivers/net/wireless/ath/wil6210/txrx.h 	return (d->mac.d0 & RX_MAC_D0_MAC_ID_VALID) ?
d                 526 drivers/net/wireless/ath/wil6210/txrx.h 		WIL_GET_BITS(d->mac.d0, 8, 9) : 0;
d                 529 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_ftype(struct vring_rx_desc *d)
d                 531 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 10, 11);
d                 534 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_subtype(struct vring_rx_desc *d)
d                 536 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 12, 15);
d                 540 drivers/net/wireless/ath/wil6210/txrx.h static inline u8 wil_rxdesc_fc1(struct vring_rx_desc *d)
d                 542 drivers/net/wireless/ath/wil6210/txrx.h 	return (u8)(WIL_GET_BITS(d->mac.d0, 10, 15) << 2);
d                 545 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_seq(struct vring_rx_desc *d)
d                 547 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 16, 27);
d                 550 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_ext_subtype(struct vring_rx_desc *d)
d                 552 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 28, 31);
d                 555 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_retry(struct vring_rx_desc *d)
d                 557 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d0, 31, 31);
d                 560 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_key_id(struct vring_rx_desc *d)
d                 562 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d1, 4, 5);
d                 565 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_security(struct vring_rx_desc *d)
d                 567 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d1, 7, 7);
d                 570 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_ds_bits(struct vring_rx_desc *d)
d                 572 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d1, 8, 9);
d                 575 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_mcs(struct vring_rx_desc *d)
d                 577 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d1, 21, 24);
d                 580 drivers/net/wireless/ath/wil6210/txrx.h static inline int wil_rxdesc_mcast(struct vring_rx_desc *d)
d                 582 drivers/net/wireless/ath/wil6210/txrx.h 	return WIL_GET_BITS(d->mac.d1, 13, 14);
d                  42 drivers/net/wireless/ath/wil6210/txrx_edma.c 	struct wil_tx_enhanced_desc *d = (struct wil_tx_enhanced_desc *)desc;
d                  43 drivers/net/wireless/ath/wil6210/txrx_edma.c 	dma_addr_t pa = wil_tx_desc_get_addr_edma(&d->dma);
d                  44 drivers/net/wireless/ath/wil6210/txrx_edma.c 	u16 dmalen = le16_to_cpu(d->dma.length);
d                 176 drivers/net/wireless/ath/wil6210/txrx_edma.c 	struct wil_rx_enhanced_desc dd, *d = &dd;
d                 212 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil_desc_set_addr_edma(&d->dma.addr, &d->dma.addr_high_high, pa);
d                 213 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.length = cpu_to_le16(sz);
d                 214 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->mac.buff_id = cpu_to_le16(buff_id);
d                 215 drivers/net/wireless/ath/wil6210/txrx_edma.c 	*_d = *d;
d                 465 drivers/net/wireless/ath/wil6210/txrx_edma.c 		struct wil_tx_enhanced_desc dd, *d = &dd;
d                 478 drivers/net/wireless/ath/wil6210/txrx_edma.c 		*d = *_d;
d                 479 drivers/net/wireless/ath/wil6210/txrx_edma.c 		wil_tx_desc_unmap_edma(dev, (union wil_tx_desc *)d, ctx);
d                1124 drivers/net/wireless/ath/wil6210/txrx_edma.c 	struct wil_tx_enhanced_desc *d =
d                1127 drivers/net/wireless/ath/wil6210/txrx_edma.c 	memset(d, 0, sizeof(struct wil_tx_enhanced_desc));
d                1129 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil_desc_set_addr_edma(&d->dma.addr, &d->dma.addr_high_high, pa);
d                1132 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.length = cpu_to_le16((u16)len);
d                1133 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->mac.d[0] = (ring_index << WIL_EDMA_DESC_TX_MAC_CFG_0_QID_POS);
d                1137 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->mac.d[2] = BIT(MAC_CFG_DESC_TX_2_SNAP_HDR_INSERTION_EN_POS) |
d                1230 drivers/net/wireless/ath/wil6210/txrx_edma.c 			struct wil_tx_enhanced_desc dd, *d = &dd;
d                1236 drivers/net/wireless/ath/wil6210/txrx_edma.c 			*d = *_d;
d                1238 drivers/net/wireless/ath/wil6210/txrx_edma.c 			dmalen = le16_to_cpu(d->dma.length);
d                1249 drivers/net/wireless/ath/wil6210/txrx_edma.c 					       (union wil_tx_desc *)d,
d                1324 drivers/net/wireless/ath/wil6210/txrx_edma.c static void wil_tx_desc_offload_setup_tso_edma(struct wil_tx_enhanced_desc *d,
d                1331 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->mac.d[2] |= 1;
d                1333 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->mac.tso_mss |= cpu_to_le16(mss >> 2);
d                1335 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.l4_hdr_len |= tcp_hdr_len & DMA_CFG_DESC_TX_0_L4_LENGTH_MSK;
d                1339 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.cmd |= BIT(WIL_EDMA_DESC_TX_CFG_EOP_POS) |
d                1345 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.w1 |= BIT(WIL_EDMA_DESC_TX_CFG_PSEUDO_HEADER_CALC_EN_POS) |
d                1348 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.ip_length |= skb_net_hdr_len;
d                1350 drivers/net/wireless/ath/wil6210/txrx_edma.c 	d->dma.b11 |= ETH_HLEN |
d                1364 drivers/net/wireless/ath/wil6210/txrx_edma.c 	struct wil_tx_enhanced_desc desc_mem, *d = &desc_mem;
d                1383 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil->txrx_ops.tx_desc_map((union wil_tx_desc *)d, pa,
d                1385 drivers/net/wireless/ath/wil6210/txrx_edma.c 	wil_tx_desc_offload_setup_tso_edma(d, tso_desc_type, is_ipv4,
d                1397 drivers/net/wireless/ath/wil6210/txrx_edma.c 			  (const void *)d, sizeof(*d), false);
d                1399 drivers/net/wireless/ath/wil6210/txrx_edma.c 	*_desc = *d;
d                1535 drivers/net/wireless/ath/wil6210/txrx_edma.c 		struct wil_tx_enhanced_desc dd, *d = &dd;
d                1540 drivers/net/wireless/ath/wil6210/txrx_edma.c 		*d = *_desc;
d                1542 drivers/net/wireless/ath/wil6210/txrx_edma.c 		wil_tx_desc_unmap_edma(dev, (union wil_tx_desc *)d, ctx);
d                  95 drivers/net/wireless/ath/wil6210/txrx_edma.h 	u32 d[3];
d                 188 drivers/net/wireless/ath/wil6210/txrx_edma.h 	u32 d[3];
d                 777 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_ready(struct wil6210_vif *vif, int id, void *d, int len)
d                 781 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_ready_event *evt = d;
d                 825 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_rx_mgmt(struct wil6210_vif *vif, int id, void *d, int len)
d                 828 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_rx_mgmt_packet_event *data = d;
d                 919 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_tx_mgmt(struct wil6210_vif *vif, int id, void *d, int len)
d                 921 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_tx_mgmt_packet_event *data = d;
d                 931 drivers/net/wireless/ath/wil6210/wmi.c 				  void *d, int len)
d                 937 drivers/net/wireless/ath/wil6210/wmi.c 		struct wmi_scan_complete_event *data = d;
d                 963 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_connect(struct wil6210_vif *vif, int id, void *d, int len)
d                 968 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_connect_event *evt = d;
d                1132 drivers/net/wireless/ath/wil6210/wmi.c 			       void *d, int len)
d                1135 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_disconnect_event *evt = d;
d                1176 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_eapol_rx(struct wil6210_vif *vif, int id, void *d, int len)
d                1180 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_eapol_rx_event *evt = d;
d                1226 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_ring_en(struct wil6210_vif *vif, int id, void *d, int len)
d                1229 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_ring_en_event *evt = d;
d                1275 drivers/net/wireless/ath/wil6210/wmi.c 			      void *d, int len)
d                1278 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_ba_status_event *evt = d;
d                1307 drivers/net/wireless/ath/wil6210/wmi.c 				 void *d, int len)
d                1311 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_rcp_addba_req_event *evt = d;
d                1324 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_delba(struct wil6210_vif *vif, int id, void *d, int len)
d                1328 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_delba_event *evt = d;
d                1386 drivers/net/wireless/ath/wil6210/wmi.c wmi_evt_sched_scan_result(struct wil6210_vif *vif, int id, void *d, int len)
d                1389 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_sched_scan_result_event *data = d;
d                1565 drivers/net/wireless/ath/wil6210/wmi.c wmi_evt_link_stats(struct wil6210_vif *vif, int id, void *d, int len)
d                1568 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_link_stats_event *evt = d;
d                1633 drivers/net/wireless/ath/wil6210/wmi.c wmi_evt_auth_status(struct wil6210_vif *vif, int id, void *d, int len)
d                1637 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_ft_auth_status_event *data = d;
d                1722 drivers/net/wireless/ath/wil6210/wmi.c wmi_evt_reassoc_status(struct wil6210_vif *vif, int id, void *d, int len)
d                1727 drivers/net/wireless/ath/wil6210/wmi.c 	struct wmi_ft_reassoc_status_event *data = d;
d                1854 drivers/net/wireless/ath/wil6210/wmi.c static void wmi_evt_ignore(struct wil6210_vif *vif, int id, void *d, int len)
d                3287 drivers/net/wireless/ath/wil6210/wmi.c 				 void *d, int len)
d                3293 drivers/net/wireless/ath/wil6210/wmi.c 			wmi_evt_handlers[i].handler(vif, id, d, len);
d                 565 drivers/net/wireless/broadcom/b43/lo.c 				    struct b43_lo_g_statemachine *d)
d                 591 drivers/net/wireless/broadcom/b43/lo.c 	if (d->current_state == 0) {
d                 594 drivers/net/wireless/broadcom/b43/lo.c 	} else if (d->current_state % 2 == 0) {
d                 595 drivers/net/wireless/broadcom/b43/lo.c 		begin = d->current_state - 1;
d                 596 drivers/net/wireless/broadcom/b43/lo.c 		end = d->current_state + 1;
d                 598 drivers/net/wireless/broadcom/b43/lo.c 		begin = d->current_state - 2;
d                 599 drivers/net/wireless/broadcom/b43/lo.c 		end = d->current_state + 2;
d                 608 drivers/net/wireless/broadcom/b43/lo.c 	d->current_state = i;
d                 612 drivers/net/wireless/broadcom/b43/lo.c 		test_loctl.i += modifiers[i - 1].i * d->state_val_multiplier;
d                 613 drivers/net/wireless/broadcom/b43/lo.c 		test_loctl.q += modifiers[i - 1].q * d->state_val_multiplier;
d                 621 drivers/net/wireless/broadcom/b43/lo.c 			if (feedth < d->lowest_feedth) {
d                 625 drivers/net/wireless/broadcom/b43/lo.c 				d->lowest_feedth = feedth;
d                 626 drivers/net/wireless/broadcom/b43/lo.c 				if ((d->nr_measured < 2) &&
d                 638 drivers/net/wireless/broadcom/b43/lo.c 		d->current_state = i;
d                 650 drivers/net/wireless/broadcom/b43/lo.c 	struct b43_lo_g_statemachine d;
d                 656 drivers/net/wireless/broadcom/b43/lo.c 	d.nr_measured = 0;
d                 657 drivers/net/wireless/broadcom/b43/lo.c 	d.state_val_multiplier = 1;
d                 659 drivers/net/wireless/broadcom/b43/lo.c 		d.state_val_multiplier = 3;
d                 661 drivers/net/wireless/broadcom/b43/lo.c 	memcpy(&d.min_loctl, loctl, sizeof(struct b43_loctl));
d                 665 drivers/net/wireless/broadcom/b43/lo.c 		b43_lo_write(dev, &d.min_loctl);
d                 678 drivers/net/wireless/broadcom/b43/lo.c 		d.lowest_feedth = feedth;
d                 680 drivers/net/wireless/broadcom/b43/lo.c 		d.current_state = 0;
d                 683 drivers/net/wireless/broadcom/b43/lo.c 				    (d.current_state >= 0
d                 684 drivers/net/wireless/broadcom/b43/lo.c 				     && d.current_state <= 8));
d                 685 drivers/net/wireless/broadcom/b43/lo.c 			memcpy(&probe_loctl, &d.min_loctl,
d                 688 drivers/net/wireless/broadcom/b43/lo.c 			    lo_probe_possible_loctls(dev, &probe_loctl, &d);
d                 691 drivers/net/wireless/broadcom/b43/lo.c 			if ((probe_loctl.i == d.min_loctl.i) &&
d                 692 drivers/net/wireless/broadcom/b43/lo.c 			    (probe_loctl.q == d.min_loctl.q))
d                 694 drivers/net/wireless/broadcom/b43/lo.c 			memcpy(&d.min_loctl, &probe_loctl,
d                 696 drivers/net/wireless/broadcom/b43/lo.c 			d.nr_measured++;
d                 697 drivers/net/wireless/broadcom/b43/lo.c 		} while (d.nr_measured < 24);
d                 698 drivers/net/wireless/broadcom/b43/lo.c 		memcpy(loctl, &d.min_loctl, sizeof(struct b43_loctl));
d                 701 drivers/net/wireless/broadcom/b43/lo.c 			if (d.lowest_feedth > 0x1194)
d                 703 drivers/net/wireless/broadcom/b43/lo.c 			else if (d.lowest_feedth < 0x5DC)
d                 706 drivers/net/wireless/broadcom/b43/lo.c 				if (d.lowest_feedth <= 0x5DC) {
d                 707 drivers/net/wireless/broadcom/b43/lo.c 					d.state_val_multiplier = 1;
d                 710 drivers/net/wireless/broadcom/b43/lo.c 					d.state_val_multiplier = 2;
d                 712 drivers/net/wireless/broadcom/b43/lo.c 				d.state_val_multiplier = 1;
d                 513 drivers/net/wireless/broadcom/b43/phy_common.c 	unsigned int a, b, c, d;
d                 521 drivers/net/wireless/broadcom/b43/phy_common.c 	d = (tmp >> 24) & 0xFF;
d                 525 drivers/net/wireless/broadcom/b43/phy_common.c 	    d == 0 || d == B43_TSSI_MAX)
d                 536 drivers/net/wireless/broadcom/b43/phy_common.c 		d = (d + 32) & 0x3F;
d                 540 drivers/net/wireless/broadcom/b43/phy_common.c 	average = (a + b + c + d + 2) / 4;
d                 142 drivers/net/wireless/broadcom/b43/xmit.c 		u32 d;
d                 144 drivers/net/wireless/broadcom/b43/xmit.c 		d = b43_plcp_get_ratecode_ofdm(bitrate);
d                 146 drivers/net/wireless/broadcom/b43/xmit.c 		d |= (octets << 5);
d                 147 drivers/net/wireless/broadcom/b43/xmit.c 		plcp->data = cpu_to_le32(d);
d                2544 drivers/net/wireless/broadcom/b43legacy/main.c 	struct b43legacy_wldev *d;
d                2546 drivers/net/wireless/broadcom/b43legacy/main.c 	list_for_each_entry(d, &wl->devlist, list) {
d                2547 drivers/net/wireless/broadcom/b43legacy/main.c 		if (d->phy.possible_phymodes & phymode) {
d                2551 drivers/net/wireless/broadcom/b43legacy/main.c 			*dev = d;
d                 267 drivers/net/wireless/broadcom/b43legacy/pio.c static void tx_tasklet(unsigned long d)
d                 269 drivers/net/wireless/broadcom/b43legacy/pio.c 	struct b43legacy_pioqueue *queue = (struct b43legacy_pioqueue *)d;
d                 118 drivers/net/wireless/broadcom/b43legacy/xmit.c 		u16 d;
d                 120 drivers/net/wireless/broadcom/b43legacy/xmit.c 		d = b43legacy_plcp_get_ratecode_ofdm(bitrate);
d                 122 drivers/net/wireless/broadcom/b43legacy/xmit.c 		d |= (octets << 5);
d                 123 drivers/net/wireless/broadcom/b43legacy/xmit.c 		*data = cpu_to_le32(d);
d                1049 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_int.h 			     void *d);
d                1095 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_int.h 			  u8 type, bool d);
d                2351 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	u8 *s, *d;
d                2365 drivers/net/wireless/intel/ipw2x00/ipw2100.c 			d = (u8 *) & tmp;
d                2367 drivers/net/wireless/intel/ipw2x00/ipw2100.c 				if (*s != *d) {
d                2373 drivers/net/wireless/intel/ipw2x00/ipw2100.c 				d++;
d                3509 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_pci(struct device *d, struct device_attribute *attr,
d                3512 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct pci_dev *pci_dev = to_pci_dev(d);
d                3531 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_cfg(struct device *d, struct device_attribute *attr,
d                3534 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *p = dev_get_drvdata(d);
d                3540 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_status(struct device *d, struct device_attribute *attr,
d                3543 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *p = dev_get_drvdata(d);
d                3549 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_capability(struct device *d, struct device_attribute *attr,
d                3552 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *p = dev_get_drvdata(d);
d                3575 drivers/net/wireless/intel/ipw2x00/ipw2100.c #define IPW2100_ORD(x, d) { IPW_ORD_ ##x, #x, d }
d                3793 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_registers(struct device *d, struct device_attribute *attr,
d                3797 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                3815 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_hardware(struct device *d, struct device_attribute *attr,
d                3818 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                3856 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_memory(struct device *d, struct device_attribute *attr,
d                3859 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                3913 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t store_memory(struct device *d, struct device_attribute *attr,
d                3916 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                3950 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_ordinals(struct device *d, struct device_attribute *attr,
d                3953 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                3986 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_stats(struct device *d, struct device_attribute *attr,
d                3989 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4051 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_internals(struct device *d, struct device_attribute *attr,
d                4054 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4105 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_bssinfo(struct device *d, struct device_attribute *attr,
d                4108 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4151 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t debug_level_show(struct device_driver *d, char *buf)
d                4156 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t debug_level_store(struct device_driver *d,
d                4173 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_fatal_error(struct device *d,
d                4176 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4198 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t store_fatal_error(struct device *d,
d                4202 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4209 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_scan_age(struct device *d, struct device_attribute *attr,
d                4212 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4216 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t store_scan_age(struct device *d, struct device_attribute *attr,
d                4219 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4242 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t show_rf_kill(struct device *d, struct device_attribute *attr,
d                4249 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                4286 drivers/net/wireless/intel/ipw2x00/ipw2100.c static ssize_t store_rf_kill(struct device *d, struct device_attribute *attr,
d                4289 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_priv *priv = dev_get_drvdata(d);
d                 408 drivers/net/wireless/intel/ipw2x00/ipw2200.c #define ipw_read_indirect(a, b, c, d) ({ \
d                 410 drivers/net/wireless/intel/ipw2x00/ipw2200.c 			__LINE__, (u32)(b), (u32)(d)); \
d                 411 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	_ipw_read_indirect(a, b, c, d); \
d                 417 drivers/net/wireless/intel/ipw2x00/ipw2200.c #define ipw_write_indirect(a, b, c, d) do { \
d                 419 drivers/net/wireless/intel/ipw2x00/ipw2200.c 			__LINE__, (u32)(b), (u32)(d)); \
d                 420 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	_ipw_write_indirect(a, b, c, d); \
d                1184 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t debug_level_show(struct device_driver *d, char *buf)
d                1189 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t debug_level_store(struct device_driver *d, const char *buf,
d                1262 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_event_log(struct device *d,
d                1265 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1294 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_error(struct device *d,
d                1297 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1329 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t clear_error(struct device *d,
d                1333 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1342 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_cmd_log(struct device *d,
d                1345 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1372 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_rtap_iface(struct device *d,
d                1376 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1417 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_rtap_iface(struct device *d,
d                1421 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1434 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_rtap_filter(struct device *d,
d                1438 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1454 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_rtap_filter(struct device *d,
d                1458 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1466 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_scan_age(struct device *d, struct device_attribute *attr,
d                1469 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1473 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_scan_age(struct device *d, struct device_attribute *attr,
d                1476 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1509 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_led(struct device *d, struct device_attribute *attr,
d                1512 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1516 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_led(struct device *d, struct device_attribute *attr,
d                1519 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1542 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_status(struct device *d,
d                1545 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1551 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_cfg(struct device *d, struct device_attribute *attr,
d                1554 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1560 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_nic_type(struct device *d,
d                1563 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1569 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_ucode_version(struct device *d,
d                1573 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1583 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_rtc(struct device *d, struct device_attribute *attr,
d                1587 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1601 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_eeprom_delay(struct device *d,
d                1604 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1608 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_eeprom_delay(struct device *d,
d                1612 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1619 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_command_event_reg(struct device *d,
d                1623 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1628 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_command_event_reg(struct device *d,
d                1633 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1643 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_mem_gpio_reg(struct device *d,
d                1647 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1652 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_mem_gpio_reg(struct device *d,
d                1657 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *p = dev_get_drvdata(d);
d                1666 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_indirect_dword(struct device *d,
d                1670 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1679 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_indirect_dword(struct device *d,
d                1683 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1693 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_indirect_byte(struct device *d,
d                1697 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1706 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_indirect_byte(struct device *d,
d                1710 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1720 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_direct_dword(struct device *d,
d                1724 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1733 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_direct_dword(struct device *d,
d                1737 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1759 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_rf_kill(struct device *d, struct device_attribute *attr,
d                1766 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1805 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_rf_kill(struct device *d, struct device_attribute *attr,
d                1808 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1817 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_speed_scan(struct device *d, struct device_attribute *attr,
d                1820 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1832 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_speed_scan(struct device *d, struct device_attribute *attr,
d                1835 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1870 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_net_stats(struct device *d, struct device_attribute *attr,
d                1873 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1877 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t store_net_stats(struct device *d, struct device_attribute *attr,
d                1880 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                1891 drivers/net/wireless/intel/ipw2x00/ipw2200.c static ssize_t show_channels(struct device *d,
d                1895 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	struct ipw_priv *priv = dev_get_drvdata(d);
d                2628 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	int d = (bit ? EEPROM_BIT_DI : 0);
d                2629 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	eeprom_write_reg(p, EEPROM_BIT_CS | d);
d                2630 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	eeprom_write_reg(p, EEPROM_BIT_CS | d | EEPROM_BIT_SK);
d                3076 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_debug_level(struct device *d, struct device_attribute *attr,
d                3079 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3084 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_debug_level(struct device *d, struct device_attribute *attr,
d                3087 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3106 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_temperature(struct device *d, struct device_attribute *attr,
d                3109 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3120 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_tx_power(struct device *d, struct device_attribute *attr, char *buf)
d                3122 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3127 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_tx_power(struct device *d, struct device_attribute *attr,
d                3130 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3146 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_flags(struct device *d, struct device_attribute *attr, char *buf)
d                3148 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3154 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_flags(struct device *d, struct device_attribute *attr,
d                3157 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3179 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_filter_flags(struct device *d, struct device_attribute *attr,
d                3182 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3188 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_filter_flags(struct device *d, struct device_attribute *attr,
d                3191 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3215 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_measurement(struct device *d, struct device_attribute *attr,
d                3218 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3248 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_measurement(struct device *d, struct device_attribute *attr,
d                3251 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3286 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_retry_rate(struct device *d, struct device_attribute *attr,
d                3289 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3299 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_retry_rate(struct device *d, struct device_attribute *attr,
d                3302 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3310 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_channels(struct device *d, struct device_attribute *attr, char *buf)
d                3319 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_antenna(struct device *d, struct device_attribute *attr, char *buf)
d                3321 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3330 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_store_antenna(struct device *d, struct device_attribute *attr,
d                3333 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il __maybe_unused = dev_get_drvdata(d);
d                3356 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_show_status(struct device *d, struct device_attribute *attr, char *buf)
d                3358 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                3367 drivers/net/wireless/intel/iwlegacy/3945-mac.c il3945_dump_error_log(struct device *d, struct device_attribute *attr,
d                3370 drivers/net/wireless/intel/iwlegacy/3945-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                 289 drivers/net/wireless/intel/iwlegacy/3945.h 	s32 a, b, c, d, e;	/* coefficients for voltage->power
d                4552 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_show_debug_level(struct device *d, struct device_attribute *attr,
d                4555 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                4560 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_store_debug_level(struct device *d, struct device_attribute *attr,
d                4563 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                4582 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_show_temperature(struct device *d, struct device_attribute *attr,
d                4585 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                4596 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_show_tx_power(struct device *d, struct device_attribute *attr, char *buf)
d                4598 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                4607 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_store_tx_power(struct device *d, struct device_attribute *attr,
d                4610 drivers/net/wireless/intel/iwlegacy/4965-mac.c 	struct il_priv *il = dev_get_drvdata(d);
d                  41 drivers/net/wireless/intel/iwlwifi/iwl-debug.h #define IWL_ERR_DEV(d, f, a...)						\
d                  44 drivers/net/wireless/intel/iwlwifi/iwl-debug.h 		__iwl_err((d), false, false, f, ## a);			\
d                 182 drivers/net/wireless/intel/iwlwifi/iwl-debug.h #define IWL_DEBUG_EEPROM(d, f, a...)	IWL_DEBUG_DEV(d, IWL_DL_EEPROM, f, ## a)
d                 140 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c #define LWNG_SETVAL(f,i,s,l,d) \
d                 142 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c hdr->f.status = s; hdr->f.len = l; hdr->f.data = d
d                3161 drivers/net/wireless/intersil/hostap/hostap_hw.c #define HOSTAP_TASKLET_INIT(q, f, d) \
d                3162 drivers/net/wireless/intersil/hostap/hostap_hw.c do { memset((q), 0, sizeof(*(q))); (q)->func = (f); (q)->data = (d); } \
d                1044 drivers/net/wireless/intersil/hostap/hostap_wlan.h #define prism2_callback(d, e) do { } while (0)
d                 814 drivers/net/wireless/marvell/libertas/debugfs.c 	struct debug_data *d;
d                 822 drivers/net/wireless/marvell/libertas/debugfs.c 	d = file->private_data;
d                 825 drivers/net/wireless/marvell/libertas/debugfs.c 		if (d[i].size == 1)
d                 826 drivers/net/wireless/marvell/libertas/debugfs.c 			val = *((u8 *) d[i].addr);
d                 827 drivers/net/wireless/marvell/libertas/debugfs.c 		else if (d[i].size == 2)
d                 828 drivers/net/wireless/marvell/libertas/debugfs.c 			val = *((u16 *) d[i].addr);
d                 829 drivers/net/wireless/marvell/libertas/debugfs.c 		else if (d[i].size == 4)
d                 830 drivers/net/wireless/marvell/libertas/debugfs.c 			val = *((u32 *) d[i].addr);
d                 831 drivers/net/wireless/marvell/libertas/debugfs.c 		else if (d[i].size == 8)
d                 832 drivers/net/wireless/marvell/libertas/debugfs.c 			val = *((u64 *) d[i].addr);
d                 834 drivers/net/wireless/marvell/libertas/debugfs.c 		pos += sprintf(p + pos, "%s=%d\n", d[i].name, val);
d                 862 drivers/net/wireless/marvell/libertas/debugfs.c 	struct debug_data *d = f->private_data;
d                 874 drivers/net/wireless/marvell/libertas/debugfs.c 			p = strstr(p0, d[i].name);
d                 886 drivers/net/wireless/marvell/libertas/debugfs.c 			if (d[i].size == 1)
d                 887 drivers/net/wireless/marvell/libertas/debugfs.c 				*((u8 *) d[i].addr) = (u8) r;
d                 888 drivers/net/wireless/marvell/libertas/debugfs.c 			else if (d[i].size == 2)
d                 889 drivers/net/wireless/marvell/libertas/debugfs.c 				*((u16 *) d[i].addr) = (u16) r;
d                 890 drivers/net/wireless/marvell/libertas/debugfs.c 			else if (d[i].size == 4)
d                 891 drivers/net/wireless/marvell/libertas/debugfs.c 				*((u32 *) d[i].addr) = (u32) r;
d                 892 drivers/net/wireless/marvell/libertas/debugfs.c 			else if (d[i].size == 8)
d                 893 drivers/net/wireless/marvell/libertas/debugfs.c 				*((u64 *) d[i].addr) = (u64) r;
d                1469 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 	u8 *s = src, *d = dst;
d                1477 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 			*d++ = simple_strtol(s, NULL, 16);
d                1484 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 	return d - dst;
d                 255 drivers/net/wireless/marvell/mwifiex/util.c 	struct mwifiex_debug_data *d = &items[0];
d                 264 drivers/net/wireless/marvell/mwifiex/util.c 		p += sprintf(p, "%s=", d[i].name);
d                 266 drivers/net/wireless/marvell/mwifiex/util.c 		size = d[i].size / d[i].num;
d                 269 drivers/net/wireless/marvell/mwifiex/util.c 			addr = d[i].addr + (size_t)info;
d                 271 drivers/net/wireless/marvell/mwifiex/util.c 			addr = d[i].addr + (size_t)priv->adapter;
d                 273 drivers/net/wireless/marvell/mwifiex/util.c 		for (j = 0; j < d[i].num; j++) {
d                 332 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	struct tssi_data *d = &dev->ee->tssi_data;
d                 337 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	d->slope = eeprom[MT_EE_TX_TSSI_SLOPE];
d                 338 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	d->tx0_delta_offset = eeprom[MT_EE_TX_TSSI_OFFSET] * 1024;
d                 339 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	d->offset[0] = eeprom[MT_EE_TX_TSSI_OFFSET_GROUP];
d                 340 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	d->offset[1] = eeprom[MT_EE_TX_TSSI_OFFSET_GROUP + 1];
d                 341 drivers/net/wireless/mediatek/mt7601u/eeprom.c 	d->offset[2] = eeprom[MT_EE_TX_TSSI_OFFSET_GROUP + 2];
d                 629 drivers/net/wireless/mediatek/mt7601u/phy.c 	struct tssi_data *d = &dev->ee->tssi_data;
d                 632 drivers/net/wireless/mediatek/mt7601u/phy.c 	init_offset = -((tssi_db * d->slope + d->offset[1]) / 4096) + 10;
d                  18 drivers/net/wireless/ralink/rt2x00/rt2x00usb.h #define to_usb_device_intf(d) \
d                  20 drivers/net/wireless/ralink/rt2x00/rt2x00usb.h 	struct usb_interface *intf = to_usb_interface(d); \
d                 165 drivers/net/wireless/realtek/rtlwifi/debug.c RTL_DEBUG_IMPL_BB_SERIES(d, 0x0d00);
d                 604 drivers/net/wireless/realtek/rtw88/debug.c rtw_debug_impl_bb(d, 0x0d00);
d                 103 drivers/net/wireless/st/cw1200/debug.c 	struct cw1200_debug_priv *d = priv->debug;
d                 266 drivers/net/wireless/st/cw1200/debug.c 		   d->tx);
d                 268 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_agg);
d                 270 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_multi, d->tx_multi_frames);
d                 272 drivers/net/wireless/st/cw1200/debug.c 		   d->rx);
d                 274 drivers/net/wireless/st/cw1200/debug.c 		   d->rx_agg);
d                 276 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_cache_miss);
d                 278 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_align);
d                 280 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_burst);
d                 282 drivers/net/wireless/st/cw1200/debug.c 		   d->tx_ttl);
d                 363 drivers/net/wireless/st/cw1200/debug.c 	struct cw1200_debug_priv *d = kzalloc(sizeof(struct cw1200_debug_priv),
d                 365 drivers/net/wireless/st/cw1200/debug.c 	priv->debug = d;
d                 366 drivers/net/wireless/st/cw1200/debug.c 	if (!d)
d                 369 drivers/net/wireless/st/cw1200/debug.c 	d->debugfs_phy = debugfs_create_dir("cw1200",
d                 371 drivers/net/wireless/st/cw1200/debug.c 	debugfs_create_file("status", 0400, d->debugfs_phy, priv,
d                 373 drivers/net/wireless/st/cw1200/debug.c 	debugfs_create_file("counters", 0400, d->debugfs_phy, priv,
d                 375 drivers/net/wireless/st/cw1200/debug.c 	debugfs_create_file("wsm_dumps", 0200, d->debugfs_phy, priv,
d                 383 drivers/net/wireless/st/cw1200/debug.c 	struct cw1200_debug_priv *d = priv->debug;
d                 384 drivers/net/wireless/st/cw1200/debug.c 	if (d) {
d                 385 drivers/net/wireless/st/cw1200/debug.c 		debugfs_remove_recursive(d->debugfs_phy);
d                 387 drivers/net/wireless/st/cw1200/debug.c 		kfree(d);
d                  16 drivers/nfc/nfcsim.c #define NFCSIM_ERR(d, fmt, args...) nfc_err(&d->nfc_digital_dev->nfc_dev->dev, \
d                  19 drivers/nfc/nfcsim.c #define NFCSIM_DBG(d, fmt, args...) dev_dbg(&d->nfc_digital_dev->nfc_dev->dev, \
d                  14 drivers/nubus/bus.c #define to_nubus_board(d)       container_of(d, struct nubus_board, dev)
d                  15 drivers/nubus/bus.c #define to_nubus_driver(d)      container_of(d, struct nubus_driver, driver)
d                 220 drivers/nvdimm/btt.c 	struct dentry *d;
d                 227 drivers/nvdimm/btt.c 	d = debugfs_create_dir(dirname, parent);
d                 228 drivers/nvdimm/btt.c 	if (IS_ERR_OR_NULL(d))
d                 230 drivers/nvdimm/btt.c 	a->debugfs_dir = d;
d                 232 drivers/nvdimm/btt.c 	debugfs_create_x64("size", S_IRUGO, d, &a->size);
d                 233 drivers/nvdimm/btt.c 	debugfs_create_x64("external_lba_start", S_IRUGO, d,
d                 235 drivers/nvdimm/btt.c 	debugfs_create_x32("internal_nlba", S_IRUGO, d, &a->internal_nlba);
d                 236 drivers/nvdimm/btt.c 	debugfs_create_u32("internal_lbasize", S_IRUGO, d,
d                 238 drivers/nvdimm/btt.c 	debugfs_create_x32("external_nlba", S_IRUGO, d, &a->external_nlba);
d                 239 drivers/nvdimm/btt.c 	debugfs_create_u32("external_lbasize", S_IRUGO, d,
d                 241 drivers/nvdimm/btt.c 	debugfs_create_u32("nfree", S_IRUGO, d, &a->nfree);
d                 242 drivers/nvdimm/btt.c 	debugfs_create_u16("version_major", S_IRUGO, d, &a->version_major);
d                 243 drivers/nvdimm/btt.c 	debugfs_create_u16("version_minor", S_IRUGO, d, &a->version_minor);
d                 244 drivers/nvdimm/btt.c 	debugfs_create_x64("nextoff", S_IRUGO, d, &a->nextoff);
d                 245 drivers/nvdimm/btt.c 	debugfs_create_x64("infooff", S_IRUGO, d, &a->infooff);
d                 246 drivers/nvdimm/btt.c 	debugfs_create_x64("dataoff", S_IRUGO, d, &a->dataoff);
d                 247 drivers/nvdimm/btt.c 	debugfs_create_x64("mapoff", S_IRUGO, d, &a->mapoff);
d                 248 drivers/nvdimm/btt.c 	debugfs_create_x64("logoff", S_IRUGO, d, &a->logoff);
d                 249 drivers/nvdimm/btt.c 	debugfs_create_x64("info2off", S_IRUGO, d, &a->info2off);
d                 250 drivers/nvdimm/btt.c 	debugfs_create_x32("flags", S_IRUGO, d, &a->flags);
d                 251 drivers/nvdimm/btt.c 	debugfs_create_u32("log_index_0", S_IRUGO, d, &a->log_index[0]);
d                 252 drivers/nvdimm/btt.c 	debugfs_create_u32("log_index_1", S_IRUGO, d, &a->log_index[1]);
d                 497 drivers/nvdimm/bus.c static void nd_async_device_register(void *d, async_cookie_t cookie)
d                 499 drivers/nvdimm/bus.c 	struct device *dev = d;
d                 510 drivers/nvdimm/bus.c static void nd_async_device_unregister(void *d, async_cookie_t cookie)
d                 512 drivers/nvdimm/bus.c 	struct device *dev = d;
d                  90 drivers/nvdimm/nd.h #define nd_dbg_dpa(r, d, res, fmt, arg...) \
d                  91 drivers/nvdimm/nd.h 	dev_dbg((r) ? &(r)->dev : (d)->dev, "%s: %.13s: %#llx @ %#llx " fmt, \
d                  92 drivers/nvdimm/nd.h 		(r) ? dev_name((d)->dev) : "", res ? res->name : "null", \
d                 151 drivers/nvme/target/fabrics-cmd.c 	struct nvmf_connect_data *d;
d                 155 drivers/nvme/target/fabrics-cmd.c 	d = kmalloc(sizeof(*d), GFP_KERNEL);
d                 156 drivers/nvme/target/fabrics-cmd.c 	if (!d) {
d                 161 drivers/nvme/target/fabrics-cmd.c 	status = nvmet_copy_from_sgl(req, 0, d, sizeof(*d));
d                 176 drivers/nvme/target/fabrics-cmd.c 	if (unlikely(d->cntlid != cpu_to_le16(0xffff))) {
d                 178 drivers/nvme/target/fabrics-cmd.c 			d->cntlid);
d                 184 drivers/nvme/target/fabrics-cmd.c 	status = nvmet_alloc_ctrl(d->subsysnqn, d->hostnqn, req,
d                 193 drivers/nvme/target/fabrics-cmd.c 	uuid_copy(&ctrl->hostid, &d->hostid);
d                 206 drivers/nvme/target/fabrics-cmd.c 	kfree(d);
d                 214 drivers/nvme/target/fabrics-cmd.c 	struct nvmf_connect_data *d;
d                 219 drivers/nvme/target/fabrics-cmd.c 	d = kmalloc(sizeof(*d), GFP_KERNEL);
d                 220 drivers/nvme/target/fabrics-cmd.c 	if (!d) {
d                 225 drivers/nvme/target/fabrics-cmd.c 	status = nvmet_copy_from_sgl(req, 0, d, sizeof(*d));
d                 239 drivers/nvme/target/fabrics-cmd.c 	status = nvmet_ctrl_find_get(d->subsysnqn, d->hostnqn,
d                 240 drivers/nvme/target/fabrics-cmd.c 				     le16_to_cpu(d->cntlid),
d                 262 drivers/nvme/target/fabrics-cmd.c 	kfree(d);
d                  81 drivers/nvmem/core.c 	struct device *d;
d                  86 drivers/nvmem/core.c 	d = bus_find_device_by_of_node(&nvmem_bus_type, nvmem_np);
d                  88 drivers/nvmem/core.c 	if (!d)
d                  91 drivers/nvmem/core.c 	return to_nvmem_device(d);
d                  96 drivers/nvmem/core.c 	struct device *d;
d                  98 drivers/nvmem/core.c 	d = bus_find_device_by_name(&nvmem_bus_type, NULL, name);
d                 100 drivers/nvmem/core.c 	if (!d)
d                 103 drivers/nvmem/core.c 	return to_nvmem_device(d);
d                  32 drivers/nvmem/nvmem.h #define to_nvmem_device(d) container_of(d, struct nvmem_device, dev)
d                 646 drivers/of/irq.c 	struct irq_domain *d;
d                 651 drivers/of/irq.c 		d = irq_find_matching_host(msi_np, token);
d                 652 drivers/of/irq.c 		if (!d)
d                 654 drivers/of/irq.c 		return d;
d                 665 drivers/of/irq.c 			d = irq_find_matching_host(args.np, token);
d                 666 drivers/of/irq.c 			if (d)
d                 667 drivers/of/irq.c 				return d;
d                 293 drivers/of/of_mdio.c 	struct device *d;
d                 299 drivers/of/of_mdio.c 	d = bus_find_device_by_of_node(&mdio_bus_type, phy_np);
d                 300 drivers/of/of_mdio.c 	if (d) {
d                 301 drivers/of/of_mdio.c 		mdiodev = to_mdio_device(d);
d                 303 drivers/of/of_mdio.c 			return to_phy_device(d);
d                 304 drivers/of/of_mdio.c 		put_device(d);
d                  39 drivers/opp/debugfs.c 	struct dentry *d;
d                  48 drivers/opp/debugfs.c 		d = debugfs_create_dir(name, pdentry);
d                  50 drivers/opp/debugfs.c 		debugfs_create_ulong("u_volt_target", S_IRUGO, d,
d                  53 drivers/opp/debugfs.c 		debugfs_create_ulong("u_volt_min", S_IRUGO, d,
d                  56 drivers/opp/debugfs.c 		debugfs_create_ulong("u_volt_max", S_IRUGO, d,
d                  59 drivers/opp/debugfs.c 		debugfs_create_ulong("u_amp", S_IRUGO, d,
d                  67 drivers/opp/debugfs.c 	struct dentry *d;
d                  85 drivers/opp/debugfs.c 	d = debugfs_create_dir(name, pdentry);
d                  87 drivers/opp/debugfs.c 	debugfs_create_bool("available", S_IRUGO, d, &opp->available);
d                  88 drivers/opp/debugfs.c 	debugfs_create_bool("dynamic", S_IRUGO, d, &opp->dynamic);
d                  89 drivers/opp/debugfs.c 	debugfs_create_bool("turbo", S_IRUGO, d, &opp->turbo);
d                  90 drivers/opp/debugfs.c 	debugfs_create_bool("suspend", S_IRUGO, d, &opp->suspend);
d                  91 drivers/opp/debugfs.c 	debugfs_create_u32("performance_state", S_IRUGO, d, &opp->pstate);
d                  92 drivers/opp/debugfs.c 	debugfs_create_ulong("rate_hz", S_IRUGO, d, &opp->rate);
d                  93 drivers/opp/debugfs.c 	debugfs_create_ulong("clock_latency_ns", S_IRUGO, d,
d                  96 drivers/opp/debugfs.c 	opp_debug_create_supplies(opp, opp_table, d);
d                  98 drivers/opp/debugfs.c 	opp->dentry = d;
d                 105 drivers/opp/debugfs.c 	struct dentry *d;
d                 110 drivers/opp/debugfs.c 	d = debugfs_create_dir(opp_table->dentry_name, rootdir);
d                 112 drivers/opp/debugfs.c 	opp_dev->dentry = d;
d                 113 drivers/opp/debugfs.c 	opp_table->dentry = d;
d                 184 drivers/parisc/dino.c 	struct dino_device *d = DINO_DEV(parisc_walk_tree(bus->bridge));
d                 187 drivers/parisc/dino.c 	void __iomem *base_addr = d->hba.base_addr;
d                 192 drivers/parisc/dino.c 	spin_lock_irqsave(&d->dinosaur_pen, flags);
d                 206 drivers/parisc/dino.c 	spin_unlock_irqrestore(&d->dinosaur_pen, flags);
d                 219 drivers/parisc/dino.c 	struct dino_device *d = DINO_DEV(parisc_walk_tree(bus->bridge));
d                 222 drivers/parisc/dino.c 	void __iomem *base_addr = d->hba.base_addr;
d                 227 drivers/parisc/dino.c 	spin_lock_irqsave(&d->dinosaur_pen, flags);
d                 244 drivers/parisc/dino.c 	spin_unlock_irqrestore(&d->dinosaur_pen, flags);
d                 264 drivers/parisc/dino.c static u##size dino_in##size (struct pci_hba_data *d, u16 addr) \
d                 268 drivers/parisc/dino.c 	spin_lock_irqsave(&(DINO_DEV(d)->dinosaur_pen), flags); \
d                 270 drivers/parisc/dino.c 	__raw_writel((u32) addr, d->base_addr + DINO_PCI_ADDR); \
d                 272 drivers/parisc/dino.c 	v = read##type(d->base_addr+DINO_IO_DATA+(addr&mask)); \
d                 273 drivers/parisc/dino.c 	spin_unlock_irqrestore(&(DINO_DEV(d)->dinosaur_pen), flags); \
d                 282 drivers/parisc/dino.c static void dino_out##size (struct pci_hba_data *d, u16 addr, u##size val) \
d                 285 drivers/parisc/dino.c 	spin_lock_irqsave(&(DINO_DEV(d)->dinosaur_pen), flags); \
d                 287 drivers/parisc/dino.c 	__raw_writel((u32) addr, d->base_addr + DINO_PCI_ADDR); \
d                 289 drivers/parisc/dino.c 	write##type(val, d->base_addr+DINO_IO_DATA+(addr&mask)); \
d                 290 drivers/parisc/dino.c 	spin_unlock_irqrestore(&(DINO_DEV(d)->dinosaur_pen), flags); \
d                 306 drivers/parisc/dino.c static void dino_mask_irq(struct irq_data *d)
d                 308 drivers/parisc/dino.c 	struct dino_device *dino_dev = irq_data_get_irq_chip_data(d);
d                 309 drivers/parisc/dino.c 	int local_irq = gsc_find_local_irq(d->irq, dino_dev->global_irq, DINO_LOCAL_IRQS);
d                 311 drivers/parisc/dino.c 	DBG(KERN_WARNING "%s(0x%px, %d)\n", __func__, dino_dev, d->irq);
d                 318 drivers/parisc/dino.c static void dino_unmask_irq(struct irq_data *d)
d                 320 drivers/parisc/dino.c 	struct dino_device *dino_dev = irq_data_get_irq_chip_data(d);
d                 321 drivers/parisc/dino.c 	int local_irq = gsc_find_local_irq(d->irq, dino_dev->global_irq, DINO_LOCAL_IRQS);
d                 324 drivers/parisc/dino.c 	DBG(KERN_WARNING "%s(0x%px, %d)\n", __func__, dino_dev, d->irq);
d                 144 drivers/parisc/eisa.c static void eisa_mask_irq(struct irq_data *d)
d                 146 drivers/parisc/eisa.c 	unsigned int irq = d->irq;
d                 165 drivers/parisc/eisa.c static void eisa_unmask_irq(struct irq_data *d)
d                 167 drivers/parisc/eisa.c 	unsigned int irq = d->irq;
d                 104 drivers/parisc/gsc.c static void gsc_asic_mask_irq(struct irq_data *d)
d                 106 drivers/parisc/gsc.c 	struct gsc_asic *irq_dev = irq_data_get_irq_chip_data(d);
d                 107 drivers/parisc/gsc.c 	int local_irq = gsc_find_local_irq(d->irq, irq_dev->global_irq, 32);
d                 110 drivers/parisc/gsc.c 	DEBPRINTK(KERN_DEBUG "%s(%d) %s: IMR 0x%x\n", __func__, d->irq,
d                 119 drivers/parisc/gsc.c static void gsc_asic_unmask_irq(struct irq_data *d)
d                 121 drivers/parisc/gsc.c 	struct gsc_asic *irq_dev = irq_data_get_irq_chip_data(d);
d                 122 drivers/parisc/gsc.c 	int local_irq = gsc_find_local_irq(d->irq, irq_dev->global_irq, 32);
d                 125 drivers/parisc/gsc.c 	DEBPRINTK(KERN_DEBUG "%s(%d) %s: IMR 0x%x\n", __func__, d->irq,
d                 603 drivers/parisc/iosapic.c static void iosapic_mask_irq(struct irq_data *d)
d                 606 drivers/parisc/iosapic.c 	struct vector_info *vi = irq_data_get_irq_chip_data(d);
d                 616 drivers/parisc/iosapic.c static void iosapic_unmask_irq(struct irq_data *d)
d                 618 drivers/parisc/iosapic.c 	struct vector_info *vi = irq_data_get_irq_chip_data(d);
d                 654 drivers/parisc/iosapic.c 	DBG(KERN_DEBUG "enable_irq(%d): eoi(%p, 0x%x)\n", d->irq,
d                 659 drivers/parisc/iosapic.c static void iosapic_eoi_irq(struct irq_data *d)
d                 661 drivers/parisc/iosapic.c 	struct vector_info *vi = irq_data_get_irq_chip_data(d);
d                 664 drivers/parisc/iosapic.c 	cpu_eoi_irq(d);
d                 668 drivers/parisc/iosapic.c static int iosapic_set_affinity_irq(struct irq_data *d,
d                 671 drivers/parisc/iosapic.c 	struct vector_info *vi = irq_data_get_irq_chip_data(d);
d                 676 drivers/parisc/iosapic.c 	dest_cpu = cpu_check_affinity(d, dest);
d                 680 drivers/parisc/iosapic.c 	cpumask_copy(irq_data_get_affinity_mask(d), cpumask_of(dest_cpu));
d                 681 drivers/parisc/iosapic.c 	vi->txn_addr = txn_affinity_addr(d->irq, dest_cpu);
d                 109 drivers/parisc/lba_pci.c #define LBA_SKIP_PROBE(d) ((d)->flags & LBA_FLAG_SKIP_PROBE)
d                 159 drivers/parisc/lba_pci.c lba_dump_res(struct resource *r, int d)
d                 167 drivers/parisc/lba_pci.c 	for (i = d; i ; --i) printk(" ");
d                 170 drivers/parisc/lba_pci.c 	lba_dump_res(r->child, d+2);
d                 171 drivers/parisc/lba_pci.c 	lba_dump_res(r->sibling, d);
d                 189 drivers/parisc/lba_pci.c static int lba_device_present(u8 bus, u8 dfn, struct lba_device *d)
d                 191 drivers/parisc/lba_pci.c 	u8 first_bus = d->hba.hba_bus->busn_res.start;
d                 192 drivers/parisc/lba_pci.c 	u8 last_sub_bus = d->hba.hba_bus->busn_res.end;
d                 205 drivers/parisc/lba_pci.c #define LBA_CFG_SETUP(d, tok) {				\
d                 207 drivers/parisc/lba_pci.c     error_config = READ_REG32(d->hba.base_addr + LBA_ERROR_CONFIG);		\
d                 210 drivers/parisc/lba_pci.c     status_control = READ_REG32(d->hba.base_addr + LBA_STAT_CTL);		\
d                 216 drivers/parisc/lba_pci.c 	arb_mask = READ_REG32(d->hba.base_addr + LBA_ARB_MASK);		\
d                 222 drivers/parisc/lba_pci.c 	WRITE_REG32(0x1, d->hba.base_addr + LBA_ARB_MASK);		\
d                 228 drivers/parisc/lba_pci.c     WRITE_REG32(error_config | LBA_SMART_MODE, d->hba.base_addr + LBA_ERROR_CONFIG);	\
d                 232 drivers/parisc/lba_pci.c #define LBA_CFG_PROBE(d, tok) {				\
d                 237 drivers/parisc/lba_pci.c     WRITE_REG32(tok | PCI_VENDOR_ID, (d)->hba.base_addr + LBA_PCI_CFG_ADDR);\
d                 242 drivers/parisc/lba_pci.c     lba_t32 = READ_REG32((d)->hba.base_addr + LBA_PCI_CFG_ADDR);	\
d                 247 drivers/parisc/lba_pci.c     WRITE_REG32(~0, (d)->hba.base_addr + LBA_PCI_CFG_DATA);		\
d                 252 drivers/parisc/lba_pci.c     lba_t32 = READ_REG32((d)->hba.base_addr + LBA_PCI_CFG_ADDR);	\
d                 283 drivers/parisc/lba_pci.c #define LBA_CFG_MASTER_ABORT_CHECK(d, base, tok, error) {		\
d                 306 drivers/parisc/lba_pci.c #define LBA_CFG_TR4_ADDR_SETUP(d, addr)					\
d                 307 drivers/parisc/lba_pci.c 	WRITE_REG32(((addr) & ~3), (d)->hba.base_addr + LBA_PCI_CFG_ADDR);
d                 309 drivers/parisc/lba_pci.c #define LBA_CFG_ADDR_SETUP(d, addr) {					\
d                 310 drivers/parisc/lba_pci.c     WRITE_REG32(((addr) & ~3), (d)->hba.base_addr + LBA_PCI_CFG_ADDR);	\
d                 315 drivers/parisc/lba_pci.c     lba_t32 = READ_REG32((d)->hba.base_addr + LBA_PCI_CFG_ADDR);	\
d                 319 drivers/parisc/lba_pci.c #define LBA_CFG_RESTORE(d, base) {					\
d                 337 drivers/parisc/lba_pci.c lba_rd_cfg(struct lba_device *d, u32 tok, u8 reg, u32 size)
d                 345 drivers/parisc/lba_pci.c 	LBA_CFG_SETUP(d, tok);
d                 346 drivers/parisc/lba_pci.c 	LBA_CFG_PROBE(d, tok);
d                 347 drivers/parisc/lba_pci.c 	LBA_CFG_MASTER_ABORT_CHECK(d, d->hba.base_addr, tok, error);
d                 349 drivers/parisc/lba_pci.c 		void __iomem *data_reg = d->hba.base_addr + LBA_PCI_CFG_DATA;
d                 351 drivers/parisc/lba_pci.c 		LBA_CFG_ADDR_SETUP(d, tok | reg);
d                 358 drivers/parisc/lba_pci.c 	LBA_CFG_RESTORE(d, d->hba.base_addr);
d                 365 drivers/parisc/lba_pci.c 	struct lba_device *d = LBA_DEV(parisc_walk_tree(bus->bridge));
d                 368 drivers/parisc/lba_pci.c 	void __iomem *data_reg = d->hba.base_addr + LBA_PCI_CFG_DATA;
d                 377 drivers/parisc/lba_pci.c 		*data = lba_rd_cfg(d, tok, pos, size);
d                 382 drivers/parisc/lba_pci.c 	if (LBA_SKIP_PROBE(d) && !lba_device_present(bus->busn_res.start, devfn, d)) {
d                 393 drivers/parisc/lba_pci.c 	LBA_CFG_ADDR_SETUP(d, tok | pos);
d                 405 drivers/parisc/lba_pci.c lba_wr_cfg(struct lba_device *d, u32 tok, u8 reg, u32 data, u32 size)
d                 411 drivers/parisc/lba_pci.c 	void __iomem *data_reg = d->hba.base_addr + LBA_PCI_CFG_DATA;
d                 413 drivers/parisc/lba_pci.c 	LBA_CFG_SETUP(d, tok);
d                 414 drivers/parisc/lba_pci.c 	LBA_CFG_ADDR_SETUP(d, tok | reg);
d                 420 drivers/parisc/lba_pci.c 	LBA_CFG_MASTER_ABORT_CHECK(d, d->hba.base_addr, tok, error);
d                 421 drivers/parisc/lba_pci.c 	LBA_CFG_RESTORE(d, d->hba.base_addr);
d                 432 drivers/parisc/lba_pci.c 	struct lba_device *d = LBA_DEV(parisc_walk_tree(bus->bridge));
d                 439 drivers/parisc/lba_pci.c 	if (!LBA_SKIP_PROBE(d)) {
d                 441 drivers/parisc/lba_pci.c 		lba_wr_cfg(d, tok, pos, (u32) data, size);
d                 446 drivers/parisc/lba_pci.c 	if (LBA_SKIP_PROBE(d) && (!lba_device_present(bus->busn_res.start, devfn, d))) {
d                 454 drivers/parisc/lba_pci.c 	LBA_CFG_ADDR_SETUP(d, tok | pos);
d                 456 drivers/parisc/lba_pci.c 	case 1: WRITE_REG8 (data, d->hba.base_addr + LBA_PCI_CFG_DATA + (pos & 3));
d                 458 drivers/parisc/lba_pci.c 	case 2: WRITE_REG16(data, d->hba.base_addr + LBA_PCI_CFG_DATA + (pos & 2));
d                 460 drivers/parisc/lba_pci.c 	case 4: WRITE_REG32(data, d->hba.base_addr + LBA_PCI_CFG_DATA);
d                 464 drivers/parisc/lba_pci.c 	lba_t32 = READ_REG32(d->hba.base_addr + LBA_PCI_CFG_ADDR);
d                 482 drivers/parisc/lba_pci.c 	struct lba_device *d = LBA_DEV(parisc_walk_tree(bus->bridge));
d                 485 drivers/parisc/lba_pci.c 	void __iomem *data_reg = d->hba.base_addr + LBA_PCI_CFG_DATA;
d                 490 drivers/parisc/lba_pci.c 	LBA_CFG_TR4_ADDR_SETUP(d, tok | pos);
d                 514 drivers/parisc/lba_pci.c 	struct lba_device *d = LBA_DEV(parisc_walk_tree(bus->bridge));
d                 515 drivers/parisc/lba_pci.c 	void __iomem *data_reg = d->hba.base_addr + LBA_PCI_CFG_DATA;
d                 524 drivers/parisc/lba_pci.c 	LBA_CFG_TR4_ADDR_SETUP(d, tok | pos);
d                 538 drivers/parisc/lba_pci.c 	lba_t32 = READ_U32(d->hba.base_addr + LBA_PCI_CFG_ADDR);
d                 888 drivers/parisc/lba_pci.c static u##size lba_astro_in##size (struct pci_hba_data *d, u16 addr) \
d                 929 drivers/parisc/lba_pci.c static void lba_astro_out##size (struct pci_hba_data *d, u16 addr, u##size val) \
d                 931 drivers/parisc/lba_pci.c 	DBG_PORT("%s(0x%p, 0x%x, 0x%x)\n", __func__, d, addr, val); \
d                 933 drivers/parisc/lba_pci.c 	if (LBA_DEV(d)->hw_rev < 3) \
d                 934 drivers/parisc/lba_pci.c 		lba_t32 = READ_U32(d->base_addr + LBA_FUNC_ID); \
d                1355 drivers/parisc/lba_pci.c lba_hw_init(struct lba_device *d)
d                1362 drivers/parisc/lba_pci.c 		d->hba.base_addr,
d                1363 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_STAT_CTL),
d                1364 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ERROR_CONFIG),
d                1365 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ERROR_STATUS),
d                1366 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_DMA_CTL) );
d                1368 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ARB_MASK),
d                1369 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ARB_PRI),
d                1370 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ARB_MODE),
d                1371 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_ARB_MTLT) );
d                1373 drivers/parisc/lba_pci.c 		READ_REG64(d->hba.base_addr + LBA_HINT_CFG));
d                1377 drivers/parisc/lba_pci.c 		printk(" %Lx", READ_REG64(d->hba.base_addr + i));
d                1391 drivers/parisc/lba_pci.c 	bus_reset = READ_REG32(d->hba.base_addr + LBA_STAT_CTL + 4) & 1;
d                1396 drivers/parisc/lba_pci.c 	stat = READ_REG32(d->hba.base_addr + LBA_ERROR_CONFIG);
d                1400 drivers/parisc/lba_pci.c 		WRITE_REG32(stat, d->hba.base_addr + LBA_ERROR_CONFIG);
d                1418 drivers/parisc/lba_pci.c         stat = READ_REG32(d->hba.base_addr + LBA_STAT_CTL);
d                1420 drivers/parisc/lba_pci.c 	WRITE_REG32(stat | HF_ENABLE, d->hba.base_addr + LBA_STAT_CTL);
d                1422 drivers/parisc/lba_pci.c 	WRITE_REG32(stat & ~HF_ENABLE, d->hba.base_addr + LBA_STAT_CTL);
d                1433 drivers/parisc/lba_pci.c 	if (0 == READ_REG32(d->hba.base_addr + LBA_ARB_MASK)) {
d                1444 drivers/parisc/lba_pci.c 		WRITE_REG32(0x3, d->hba.base_addr + LBA_ARB_MASK);
d                 180 drivers/parisc/led.c 	int d;
d                 197 drivers/parisc/led.c 		d = *cur++ - '0';
d                 198 drivers/parisc/led.c 		if (d != 0 && d != 1) goto parse_error;
d                 199 drivers/parisc/led.c 		led_heartbeat = d;
d                 203 drivers/parisc/led.c 		d = *cur++ - '0';
d                 204 drivers/parisc/led.c 		if (d != 0 && d != 1) goto parse_error;
d                 205 drivers/parisc/led.c 		led_diskio = d;
d                 209 drivers/parisc/led.c 		d = *cur++ - '0';
d                 210 drivers/parisc/led.c 		if (d != 0 && d != 1) goto parse_error;
d                 211 drivers/parisc/led.c 		led_lanrxtx = d;
d                 107 drivers/parisc/sba_iommu.c #define SBA_DEV(d) ((struct sba_device *) (d))
d                 799 drivers/parisc/sba_iommu.c 	struct sba_dma_pair *d;
d                 829 drivers/parisc/sba_iommu.c 	d = &(ioc->saved[ioc->saved_cnt]);
d                 830 drivers/parisc/sba_iommu.c 	d->iova = iova;
d                 831 drivers/parisc/sba_iommu.c 	d->size = size;
d                 835 drivers/parisc/sba_iommu.c 			sba_free_range(ioc, d->iova, d->size);
d                 836 drivers/parisc/sba_iommu.c 			d--;
d                 285 drivers/parisc/superio.c static void superio_mask_irq(struct irq_data *d)
d                 287 drivers/parisc/superio.c 	unsigned int irq = d->irq;
d                 303 drivers/parisc/superio.c static void superio_unmask_irq(struct irq_data *d)
d                 305 drivers/parisc/superio.c 	unsigned int irq = d->irq;
d                  93 drivers/parport/parport_atari.c parport_atari_init_state(struct pardevice *d, struct parport_state *s)
d                 204 drivers/parport/parport_ax88796.c parport_ax88796_init_state(struct pardevice *d, struct parport_state *s)
d                 206 drivers/parport/parport_ax88796.c 	struct ax_drvdata *dd = pp_to_drv(d->port);
d                 210 drivers/parport/parport_ax88796.c 	dev_dbg(dd->dev, "init_state: %p: state=%p\n", d, s);
d                  71 drivers/parport/parport_gsc.h static inline void parport_gsc_write_data(struct parport *p, unsigned char d)
d                  74 drivers/parport/parport_gsc.h 	printk (KERN_DEBUG "parport_gsc_write_data(%p,0x%02x)\n", p, d);
d                  76 drivers/parport/parport_gsc.h 	parport_writeb(d, DATA(p));
d                 120 drivers/parport/parport_gsc.h 						 unsigned char d)
d                 128 drivers/parport/parport_gsc.h 	if (d & 0x20) {
d                 134 drivers/parport/parport_gsc.h 	__parport_gsc_frob_control (p, wm, d & wm);
d                 867 drivers/parport/parport_ip32.c static inline void parport_ip32_write_data(struct parport *p, unsigned char d)
d                 870 drivers/parport/parport_ip32.c 	writeb(d, priv->regs.data);
d                1018 drivers/parport/parport_pc.c 			int d;
d                1032 drivers/parport/parport_pc.c 			d = (cr26 & 0x0f);
d                1033 drivers/parport/parport_pc.c 			if (d == 1 || d == 3)
d                1034 drivers/parport/parport_pc.c 				s->dma = d;
d                  71 drivers/parport/parport_sunbpp.c static void parport_sunbpp_write_data(struct parport *p, unsigned char d)
d                  75 drivers/parport/parport_sunbpp.c 	sbus_writeb(d, &regs->p_dr);
d                  76 drivers/parport/parport_sunbpp.c 	dprintk((KERN_DEBUG "wrote 0x%x\n", d));
d                 181 drivers/parport/parport_sunbpp.c static void parport_sunbpp_write_control(struct parport *p, unsigned char d)
d                 188 drivers/parport/parport_sunbpp.c 	parport_sunbpp_frob_control (p, wm, d & wm);
d                  62 drivers/parport/share.c static void dead_initstate(struct pardevice *d, struct parport_state *s) { }
d                 373 drivers/parport/share.c 	int d;
d                 379 drivers/parport/share.c 	for (d = 0; d < 5; d++) {
d                 380 drivers/parport/share.c 		kfree(port->probe_info[d].class_name);
d                 381 drivers/parport/share.c 		kfree(port->probe_info[d].mfr);
d                 382 drivers/parport/share.c 		kfree(port->probe_info[d].model);
d                 383 drivers/parport/share.c 		kfree(port->probe_info[d].cmdset);
d                 384 drivers/parport/share.c 		kfree(port->probe_info[d].description);
d                 323 drivers/pci/controller/dwc/pci-keystone.c static void ks_pcie_ack_legacy_irq(struct irq_data *d)
d                 327 drivers/pci/controller/dwc/pci-keystone.c static void ks_pcie_mask_legacy_irq(struct irq_data *d)
d                 331 drivers/pci/controller/dwc/pci-keystone.c static void ks_pcie_unmask_legacy_irq(struct irq_data *d)
d                 342 drivers/pci/controller/dwc/pci-keystone.c static int ks_pcie_init_legacy_irq_map(struct irq_domain *d,
d                 348 drivers/pci/controller/dwc/pci-keystone.c 	irq_set_chip_data(irq, d->host_data);
d                  47 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_msi_ack_irq(struct irq_data *d)
d                  49 drivers/pci/controller/dwc/pcie-designware-host.c 	irq_chip_ack_parent(d);
d                  52 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_msi_mask_irq(struct irq_data *d)
d                  54 drivers/pci/controller/dwc/pcie-designware-host.c 	pci_msi_mask_irq(d);
d                  55 drivers/pci/controller/dwc/pcie-designware-host.c 	irq_chip_mask_parent(d);
d                  58 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_msi_unmask_irq(struct irq_data *d)
d                  60 drivers/pci/controller/dwc/pcie-designware-host.c 	pci_msi_unmask_irq(d);
d                  61 drivers/pci/controller/dwc/pcie-designware-host.c 	irq_chip_unmask_parent(d);
d                 124 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_pci_setup_msi_msg(struct irq_data *d, struct msi_msg *msg)
d                 126 drivers/pci/controller/dwc/pcie-designware-host.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 135 drivers/pci/controller/dwc/pcie-designware-host.c 	msg->data = d->hwirq;
d                 138 drivers/pci/controller/dwc/pcie-designware-host.c 		(int)d->hwirq, msg->address_hi, msg->address_lo);
d                 141 drivers/pci/controller/dwc/pcie-designware-host.c static int dw_pci_msi_set_affinity(struct irq_data *d,
d                 147 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_pci_bottom_mask(struct irq_data *d)
d                 149 drivers/pci/controller/dwc/pcie-designware-host.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 155 drivers/pci/controller/dwc/pcie-designware-host.c 	ctrl = d->hwirq / MAX_MSI_IRQS_PER_CTRL;
d                 157 drivers/pci/controller/dwc/pcie-designware-host.c 	bit = d->hwirq % MAX_MSI_IRQS_PER_CTRL;
d                 166 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_pci_bottom_unmask(struct irq_data *d)
d                 168 drivers/pci/controller/dwc/pcie-designware-host.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 174 drivers/pci/controller/dwc/pcie-designware-host.c 	ctrl = d->hwirq / MAX_MSI_IRQS_PER_CTRL;
d                 176 drivers/pci/controller/dwc/pcie-designware-host.c 	bit = d->hwirq % MAX_MSI_IRQS_PER_CTRL;
d                 185 drivers/pci/controller/dwc/pcie-designware-host.c static void dw_pci_bottom_ack(struct irq_data *d)
d                 187 drivers/pci/controller/dwc/pcie-designware-host.c 	struct pcie_port *pp  = irq_data_get_irq_chip_data(d);
d                 190 drivers/pci/controller/dwc/pcie-designware-host.c 	ctrl = d->hwirq / MAX_MSI_IRQS_PER_CTRL;
d                 192 drivers/pci/controller/dwc/pcie-designware-host.c 	bit = d->hwirq % MAX_MSI_IRQS_PER_CTRL;
d                 237 drivers/pci/controller/dwc/pcie-designware-host.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 238 drivers/pci/controller/dwc/pcie-designware-host.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 243 drivers/pci/controller/dwc/pcie-designware-host.c 	bitmap_release_region(pp->msi_irq_in_use, d->hwirq,
d                 556 drivers/pci/controller/dwc/pcie-tegra194.c 	struct dentry *d;
d                 558 drivers/pci/controller/dwc/pcie-tegra194.c 	d = debugfs_create_devm_seqfile(pcie->dev, "aspm_state_cnt",
d                 560 drivers/pci/controller/dwc/pcie-tegra194.c 	if (IS_ERR_OR_NULL(d))
d                 170 drivers/pci/controller/dwc/pcie-uniphier.c static void uniphier_pcie_irq_ack(struct irq_data *d)
d                 172 drivers/pci/controller/dwc/pcie-uniphier.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 179 drivers/pci/controller/dwc/pcie-uniphier.c 	val |= BIT(irqd_to_hwirq(d) + PCL_RCV_INTX_STATUS_SHIFT);
d                 183 drivers/pci/controller/dwc/pcie-uniphier.c static void uniphier_pcie_irq_mask(struct irq_data *d)
d                 185 drivers/pci/controller/dwc/pcie-uniphier.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 192 drivers/pci/controller/dwc/pcie-uniphier.c 	val |= BIT(irqd_to_hwirq(d) + PCL_RCV_INTX_MASK_SHIFT);
d                 196 drivers/pci/controller/dwc/pcie-uniphier.c static void uniphier_pcie_irq_unmask(struct irq_data *d)
d                 198 drivers/pci/controller/dwc/pcie-uniphier.c 	struct pcie_port *pp = irq_data_get_irq_chip_data(d);
d                 205 drivers/pci/controller/dwc/pcie-uniphier.c 	val &= ~BIT(irqd_to_hwirq(d) + PCL_RCV_INTX_MASK_SHIFT);
d                 715 drivers/pci/controller/pci-aardvark.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 719 drivers/pci/controller/pci-aardvark.c 	bitmap_clear(pcie->msi_used, d->hwirq, nr_irqs);
d                 728 drivers/pci/controller/pci-aardvark.c static void advk_pcie_irq_mask(struct irq_data *d)
d                 730 drivers/pci/controller/pci-aardvark.c 	struct advk_pcie *pcie = d->domain->host_data;
d                 731 drivers/pci/controller/pci-aardvark.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 739 drivers/pci/controller/pci-aardvark.c static void advk_pcie_irq_unmask(struct irq_data *d)
d                 741 drivers/pci/controller/pci-aardvark.c 	struct advk_pcie *pcie = d->domain->host_data;
d                 742 drivers/pci/controller/pci-aardvark.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 110 drivers/pci/controller/pci-ftpci100.c #define PCI_CONF_DEVICE(d)	(((d) & 0x1F) << 11)
d                 270 drivers/pci/controller/pci-ftpci100.c static void faraday_pci_ack_irq(struct irq_data *d)
d                 272 drivers/pci/controller/pci-ftpci100.c 	struct faraday_pci *p = irq_data_get_irq_chip_data(d);
d                 277 drivers/pci/controller/pci-ftpci100.c 	reg |= BIT(irqd_to_hwirq(d) + PCI_CTRL2_INTSTS_SHIFT);
d                 281 drivers/pci/controller/pci-ftpci100.c static void faraday_pci_mask_irq(struct irq_data *d)
d                 283 drivers/pci/controller/pci-ftpci100.c 	struct faraday_pci *p = irq_data_get_irq_chip_data(d);
d                 288 drivers/pci/controller/pci-ftpci100.c 		 | BIT(irqd_to_hwirq(d) + PCI_CTRL2_INTMASK_SHIFT));
d                 292 drivers/pci/controller/pci-ftpci100.c static void faraday_pci_unmask_irq(struct irq_data *d)
d                 294 drivers/pci/controller/pci-ftpci100.c 	struct faraday_pci *p = irq_data_get_irq_chip_data(d);
d                 299 drivers/pci/controller/pci-ftpci100.c 	reg |= BIT(irqd_to_hwirq(d) + PCI_CTRL2_INTMASK_SHIFT);
d                  48 drivers/pci/controller/pci-mvebu.c #define  PCIE_CONF_DEV(d)		(((d) & 0x1f) << 11)
d                1718 drivers/pci/controller/pci-tegra.c 	struct irq_data *d = irq_get_irq_data(irq);
d                1719 drivers/pci/controller/pci-tegra.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                 224 drivers/pci/controller/pci-xgene-msi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 225 drivers/pci/controller/pci-xgene-msi.c 	struct xgene_msi *msi = irq_data_get_irq_chip_data(d);
d                 230 drivers/pci/controller/pci-xgene-msi.c 	hwirq = hwirq_to_canonical_hwirq(d->hwirq);
d                 122 drivers/pci/controller/pci-xgene.c 	unsigned int b, d, f;
d                 126 drivers/pci/controller/pci-xgene.c 	d = PCI_SLOT(devfn);
d                 130 drivers/pci/controller/pci-xgene.c 		rtdid_val = (b << 8) | (d << 3) | f;
d                 150 drivers/pci/controller/pcie-altera-msi.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 151 drivers/pci/controller/pcie-altera-msi.c 	struct altera_msi *msi = irq_data_get_irq_chip_data(d);
d                 156 drivers/pci/controller/pcie-altera-msi.c 	if (!test_bit(d->hwirq, msi->used)) {
d                 158 drivers/pci/controller/pcie-altera-msi.c 			d->hwirq);
d                 160 drivers/pci/controller/pcie-altera-msi.c 		__clear_bit(d->hwirq, msi->used);
d                 162 drivers/pci/controller/pcie-altera-msi.c 		mask &= ~(1 << d->hwirq);
d                 464 drivers/pci/controller/pcie-mediatek.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 465 drivers/pci/controller/pcie-mediatek.c 	struct mtk_pcie_port *port = irq_data_get_irq_chip_data(d);
d                 469 drivers/pci/controller/pcie-mediatek.c 	if (!test_bit(d->hwirq, port->msi_irq_in_use))
d                 471 drivers/pci/controller/pcie-mediatek.c 			d->hwirq);
d                 473 drivers/pci/controller/pcie-mediatek.c 		__clear_bit(d->hwirq, port->msi_irq_in_use);
d                 788 drivers/pci/controller/pcie-mobiveil.c 	struct irq_data *d = irq_domain_get_irq_data(domain, virq);
d                 789 drivers/pci/controller/pcie-mobiveil.c 	struct mobiveil_pcie *pcie = irq_data_get_irq_chip_data(d);
d                 794 drivers/pci/controller/pcie-mobiveil.c 	if (!test_bit(d->hwirq, msi->msi_irq_in_use))
d                 796 drivers/pci/controller/pcie-mobiveil.c 			d->hwirq);
d                 798 drivers/pci/controller/pcie-mobiveil.c 		__clear_bit(d->hwirq, msi->msi_irq_in_use);
d                 128 drivers/pci/controller/pcie-rcar.c #define PCIE_CONF_DEV(d)	(((d) & 0x1f) << 19)
d                 856 drivers/pci/controller/pcie-rcar.c 	struct irq_data *d = irq_get_irq_data(irq);
d                 858 drivers/pci/controller/pcie-rcar.c 	rcar_msi_free(msi, d->hwirq);
d                  48 drivers/pci/controller/pcie-tango.c static void tango_ack(struct irq_data *d)
d                  50 drivers/pci/controller/pcie-tango.c 	struct tango_pcie *pcie = d->chip_data;
d                  51 drivers/pci/controller/pcie-tango.c 	u32 offset = (d->hwirq / 32) * 4;
d                  52 drivers/pci/controller/pcie-tango.c 	u32 bit = BIT(d->hwirq % 32);
d                  57 drivers/pci/controller/pcie-tango.c static void update_msi_enable(struct irq_data *d, bool unmask)
d                  60 drivers/pci/controller/pcie-tango.c 	struct tango_pcie *pcie = d->chip_data;
d                  61 drivers/pci/controller/pcie-tango.c 	u32 offset = (d->hwirq / 32) * 4;
d                  62 drivers/pci/controller/pcie-tango.c 	u32 bit = BIT(d->hwirq % 32);
d                  72 drivers/pci/controller/pcie-tango.c static void tango_mask(struct irq_data *d)
d                  74 drivers/pci/controller/pcie-tango.c 	update_msi_enable(d, false);
d                  77 drivers/pci/controller/pcie-tango.c static void tango_unmask(struct irq_data *d)
d                  79 drivers/pci/controller/pcie-tango.c 	update_msi_enable(d, true);
d                  82 drivers/pci/controller/pcie-tango.c static int tango_set_affinity(struct irq_data *d, const struct cpumask *mask,
d                  88 drivers/pci/controller/pcie-tango.c static void tango_compose_msi_msg(struct irq_data *d, struct msi_msg *msg)
d                  90 drivers/pci/controller/pcie-tango.c 	struct tango_pcie *pcie = d->chip_data;
d                  93 drivers/pci/controller/pcie-tango.c 	msg->data = d->hwirq;
d                 104 drivers/pci/controller/pcie-tango.c static void msi_ack(struct irq_data *d)
d                 106 drivers/pci/controller/pcie-tango.c 	irq_chip_ack_parent(d);
d                 109 drivers/pci/controller/pcie-tango.c static void msi_mask(struct irq_data *d)
d                 111 drivers/pci/controller/pcie-tango.c 	pci_msi_mask_irq(d);
d                 112 drivers/pci/controller/pcie-tango.c 	irq_chip_mask_parent(d);
d                 115 drivers/pci/controller/pcie-tango.c static void msi_unmask(struct irq_data *d)
d                 117 drivers/pci/controller/pcie-tango.c 	pci_msi_unmask_irq(d);
d                 118 drivers/pci/controller/pcie-tango.c 	irq_chip_unmask_parent(d);
d                 160 drivers/pci/controller/pcie-tango.c 	struct irq_data *d = irq_domain_get_irq_data(dom, virq);
d                 161 drivers/pci/controller/pcie-tango.c 	struct tango_pcie *pcie = d->chip_data;
d                 164 drivers/pci/controller/pcie-tango.c 	__clear_bit(d->hwirq, pcie->used_msi);
d                 217 drivers/pci/controller/pcie-xilinx.c 	struct irq_data *d = irq_get_irq_data(irq);
d                 218 drivers/pci/controller/pcie-xilinx.c 	irq_hw_number_t hwirq = irqd_to_hwirq(d);
d                  82 drivers/pci/of.c 	struct irq_domain *d;
d                  88 drivers/pci/of.c 	d = of_msi_get_domain(&bus->dev, bus->dev.of_node, DOMAIN_BUS_PCI_MSI);
d                  89 drivers/pci/of.c 	if (d)
d                  90 drivers/pci/of.c 		return d;
d                  96 drivers/pci/of.c 	d = irq_find_matching_host(bus->dev.of_node, DOMAIN_BUS_PCI_MSI);
d                  97 drivers/pci/of.c 	if (d)
d                  98 drivers/pci/of.c 		return d;
d                 459 drivers/pci/pci.h static inline void pci_ats_init(struct pci_dev *d) { }
d                 659 drivers/pci/pci.h static inline void pci_aer_init(struct pci_dev *d) { }
d                 660 drivers/pci/pci.h static inline void pci_aer_exit(struct pci_dev *d) { }
d                 100 drivers/pci/pcie/aer.c #define ERR_COR_ID(d)			(d & 0xffff)
d                 101 drivers/pci/pcie/aer.c #define ERR_UNCOR_ID(d)			(d >> 16)
d                  68 drivers/pci/pcie/portdrv.h #define to_pcie_device(d) container_of(d, struct pcie_device, device)
d                 101 drivers/pci/pcie/portdrv.h #define to_service_driver(d) \
d                 102 drivers/pci/pcie/portdrv.h 	container_of(d, struct pcie_port_service_driver, driver)
d                 213 drivers/pci/pcie/portdrv_pci.c static int __init dmi_pcie_pme_disable_msi(const struct dmi_system_id *d)
d                 216 drivers/pci/pcie/portdrv_pci.c 		  d->ident);
d                 778 drivers/pci/probe.c 	struct irq_domain *d;
d                 784 drivers/pci/probe.c 	d = pci_host_bridge_of_msi_domain(bus);
d                 785 drivers/pci/probe.c 	if (!d)
d                 786 drivers/pci/probe.c 		d = pci_host_bridge_acpi_msi_domain(bus);
d                 793 drivers/pci/probe.c 	if (!d) {
d                 797 drivers/pci/probe.c 			d = irq_find_matching_fwnode(fwnode,
d                 802 drivers/pci/probe.c 	return d;
d                 807 drivers/pci/probe.c 	struct irq_domain *d;
d                 815 drivers/pci/probe.c 	for (b = bus, d = NULL; !d && !pci_is_root_bus(b); b = b->parent) {
d                 817 drivers/pci/probe.c 			d = dev_get_msi_domain(&b->self->dev);
d                 820 drivers/pci/probe.c 	if (!d)
d                 821 drivers/pci/probe.c 		d = pci_host_bridge_msi_domain(b);
d                 823 drivers/pci/probe.c 	dev_set_msi_domain(&bus->dev, d);
d                2356 drivers/pci/probe.c 	struct irq_domain *d;
d                2362 drivers/pci/probe.c 	d = dev_get_msi_domain(&dev->dev);
d                2363 drivers/pci/probe.c 	if (d)
d                2364 drivers/pci/probe.c 		return d;
d                2370 drivers/pci/probe.c 	d = pci_msi_get_device_domain(dev);
d                2371 drivers/pci/probe.c 	if (d)
d                2372 drivers/pci/probe.c 		return d;
d                2379 drivers/pci/probe.c 	struct irq_domain *d;
d                2386 drivers/pci/probe.c 	d = pci_dev_msi_domain(dev);
d                2387 drivers/pci/probe.c 	if (!d)
d                2388 drivers/pci/probe.c 		d = dev_get_msi_domain(&dev->bus->dev);
d                2390 drivers/pci/probe.c 	dev_set_msi_domain(&dev->dev, d);
d                 226 drivers/pci/quirks.c 	struct pci_dev *d = NULL;
d                 233 drivers/pci/quirks.c 	while ((d = pci_get_device(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_82371SB_0, d))) {
d                 234 drivers/pci/quirks.c 		pci_read_config_byte(d, 0x82, &dlc);
d                 236 drivers/pci/quirks.c 			pci_info(d, "PIIX3: Enabling Passive Release\n");
d                 238 drivers/pci/quirks.c 			pci_write_config_byte(d, 0x82, dlc);
d                1067 drivers/pci/quirks.c static void quirk_via_acpi(struct pci_dev *d)
d                1072 drivers/pci/quirks.c 	pci_read_config_byte(d, 0x42, &irq);
d                1075 drivers/pci/quirks.c 		d->irq = irq;
d                1909 drivers/pci/quirks.c static int dmi_disable_ioapicreroute(const struct dmi_system_id *d)
d                1912 drivers/pci/quirks.c 	pr_info("%s detected: disable boot interrupt reroute\n", d->ident);
d                 420 drivers/pci/xen-pcifront.c 	struct pci_dev *d;
d                 428 drivers/pci/xen-pcifront.c 		d = pci_get_slot(b, devfn);
d                 429 drivers/pci/xen-pcifront.c 		if (d) {
d                 431 drivers/pci/xen-pcifront.c 			pci_dev_put(d);
d                 435 drivers/pci/xen-pcifront.c 		d = pci_scan_single_device(b, devfn);
d                 436 drivers/pci/xen-pcifront.c 		if (d)
d                 245 drivers/pcmcia/i82365.c     u_char d = i365_get(sock, reg);
d                 246 drivers/pcmcia/i82365.c     d |= mask;
d                 247 drivers/pcmcia/i82365.c     i365_set(sock, reg, d);
d                 252 drivers/pcmcia/i82365.c     u_char d = i365_get(sock, reg);
d                 253 drivers/pcmcia/i82365.c     d &= ~mask;
d                 254 drivers/pcmcia/i82365.c     i365_set(sock, reg, d);
d                 259 drivers/pcmcia/i82365.c     u_char d = i365_get(sock, reg);
d                 261 drivers/pcmcia/i82365.c 	d |= mask;
d                 263 drivers/pcmcia/i82365.c 	d &= ~mask;
d                 264 drivers/pcmcia/i82365.c     i365_set(sock, reg, d);
d                 298 drivers/pcmcia/rsrc_nonstatic.c 	int i, a = 0, b = -1, d;
d                 312 drivers/pcmcia/rsrc_nonstatic.c 			d = readl(virt+i);
d                 313 drivers/pcmcia/rsrc_nonstatic.c 			a += d;
d                 314 drivers/pcmcia/rsrc_nonstatic.c 			b &= d;
d                  73 drivers/perf/arm-ccn.c #define CCN_XP_DT_CONFIG__DT_CFG__DEVICE_PMU_EVENT(d, n) (0x8 + (d) * 4 + (n))
d                 659 drivers/perf/qcom_l3_pmu.c 	L3CACHE_EVENT_ATTR(read-hit-d-side, L3_EVENT_READ_HIT_D),
d                 660 drivers/perf/qcom_l3_pmu.c 	L3CACHE_EVENT_ATTR(read-miss-d-side, L3_EVENT_READ_MISS_D),
d                 133 drivers/phy/freescale/phy-fsl-imx8-mipi-dphy.c 	u32 d[] = {1, 0};
d                 141 drivers/phy/freescale/phy-fsl-imx8-mipi-dphy.c 		d[i] += (d[i ^ 1] * whole);
d                 142 drivers/phy/freescale/phy-fsl-imx8-mipi-dphy.c 		if ((n[i] > max_n) || (d[i] > max_d)) {
d                 151 drivers/phy/freescale/phy-fsl-imx8-mipi-dphy.c 	*pdenom = d[i];
d                 196 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c static void iproc_gpio_irq_ack(struct irq_data *d)
d                 198 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 200 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	unsigned gpio = d->hwirq;
d                 215 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c static void iproc_gpio_irq_set_mask(struct irq_data *d, bool unmask)
d                 217 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 219 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	unsigned gpio = d->hwirq;
d                 224 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c static void iproc_gpio_irq_mask(struct irq_data *d)
d                 226 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 231 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	iproc_gpio_irq_set_mask(d, false);
d                 235 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c static void iproc_gpio_irq_unmask(struct irq_data *d)
d                 237 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 242 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	iproc_gpio_irq_set_mask(d, true);
d                 246 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c static int iproc_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 248 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 250 drivers/pinctrl/bcm/pinctrl-iproc-gpio.c 	unsigned gpio = d->hwirq;
d                 170 drivers/pinctrl/bcm/pinctrl-ns2-mux.c #define NS2_PIN_DESC(p, n, b, o, s, i, pu, d)	\
d                 180 drivers/pinctrl/bcm/pinctrl-ns2-mux.c 		.drive_shift = d,		\
d                 172 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c static void nsp_gpio_irq_ack(struct irq_data *d)
d                 174 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	struct nsp_gpio *chip = irq_data_get_irq_chip_data(d);
d                 175 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	unsigned gpio = d->hwirq;
d                 179 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	trigger_type = irq_get_trigger_type(d->irq);
d                 190 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c static void nsp_gpio_irq_set_mask(struct irq_data *d, bool unmask)
d                 192 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	struct nsp_gpio *chip = irq_data_get_irq_chip_data(d);
d                 193 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	unsigned gpio = d->hwirq;
d                 196 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	trigger_type = irq_get_trigger_type(d->irq);
d                 203 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c static void nsp_gpio_irq_mask(struct irq_data *d)
d                 205 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	struct nsp_gpio *chip = irq_data_get_irq_chip_data(d);
d                 209 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	nsp_gpio_irq_set_mask(d, false);
d                 213 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c static void nsp_gpio_irq_unmask(struct irq_data *d)
d                 215 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	struct nsp_gpio *chip = irq_data_get_irq_chip_data(d);
d                 219 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	nsp_gpio_irq_set_mask(d, true);
d                 223 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c static int nsp_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 225 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	struct nsp_gpio *chip = irq_data_get_irq_chip_data(d);
d                 226 drivers/pinctrl/bcm/pinctrl-nsp-gpio.c 	unsigned gpio = d->hwirq;
d                  30 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                  32 drivers/pinctrl/freescale/pinctrl-mxs.c 	return d->soc->ngroups;
d                  38 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                  40 drivers/pinctrl/freescale/pinctrl-mxs.c 	return d->soc->groups[group].name;
d                  46 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                  48 drivers/pinctrl/freescale/pinctrl-mxs.c 	*pins = d->soc->groups[group].pins;
d                  49 drivers/pinctrl/freescale/pinctrl-mxs.c 	*num_pins = d->soc->groups[group].npins;
d                 164 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 166 drivers/pinctrl/freescale/pinctrl-mxs.c 	return d->soc->nfunctions;
d                 172 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 174 drivers/pinctrl/freescale/pinctrl-mxs.c 	return d->soc->functions[function].name;
d                 182 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 184 drivers/pinctrl/freescale/pinctrl-mxs.c 	*groups = d->soc->functions[group].groups;
d                 185 drivers/pinctrl/freescale/pinctrl-mxs.c 	*num_groups = d->soc->functions[group].ngroups;
d                 203 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 204 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_group *g = &d->soc->groups[group];
d                 213 drivers/pinctrl/freescale/pinctrl-mxs.c 		reg = d->base + d->soc->regs->muxsel;
d                 246 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 248 drivers/pinctrl/freescale/pinctrl-mxs.c 	*config = d->soc->groups[group].config;
d                 257 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = pinctrl_dev_get_drvdata(pctldev);
d                 258 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_group *g = &d->soc->groups[group];
d                 278 drivers/pinctrl/freescale/pinctrl-mxs.c 			reg = d->base + d->soc->regs->drive;
d                 298 drivers/pinctrl/freescale/pinctrl-mxs.c 				reg = d->base + d->soc->regs->pull;
d                 351 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d = platform_get_drvdata(pdev);
d                 352 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_group *g = &d->soc->groups[idx];
d                 396 drivers/pinctrl/freescale/pinctrl-mxs.c 				struct mxs_pinctrl_data *d)
d                 398 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_soc_data *soc = d->soc;
d                 526 drivers/pinctrl/freescale/pinctrl-mxs.c 	struct mxs_pinctrl_data *d;
d                 529 drivers/pinctrl/freescale/pinctrl-mxs.c 	d = devm_kzalloc(&pdev->dev, sizeof(*d), GFP_KERNEL);
d                 530 drivers/pinctrl/freescale/pinctrl-mxs.c 	if (!d)
d                 533 drivers/pinctrl/freescale/pinctrl-mxs.c 	d->dev = &pdev->dev;
d                 534 drivers/pinctrl/freescale/pinctrl-mxs.c 	d->soc = soc;
d                 536 drivers/pinctrl/freescale/pinctrl-mxs.c 	d->base = of_iomap(np, 0);
d                 537 drivers/pinctrl/freescale/pinctrl-mxs.c 	if (!d->base)
d                 540 drivers/pinctrl/freescale/pinctrl-mxs.c 	mxs_pinctrl_desc.pins = d->soc->pins;
d                 541 drivers/pinctrl/freescale/pinctrl-mxs.c 	mxs_pinctrl_desc.npins = d->soc->npins;
d                 544 drivers/pinctrl/freescale/pinctrl-mxs.c 	platform_set_drvdata(pdev, d);
d                 546 drivers/pinctrl/freescale/pinctrl-mxs.c 	ret = mxs_pinctrl_probe_dt(pdev, d);
d                 552 drivers/pinctrl/freescale/pinctrl-mxs.c 	d->pctl = pinctrl_register(&mxs_pinctrl_desc, &pdev->dev, d);
d                 553 drivers/pinctrl/freescale/pinctrl-mxs.c 	if (IS_ERR(d->pctl)) {
d                 555 drivers/pinctrl/freescale/pinctrl-mxs.c 		ret = PTR_ERR(d->pctl);
d                 562 drivers/pinctrl/freescale/pinctrl-mxs.c 	iounmap(d->base);
d                1304 drivers/pinctrl/intel/pinctrl-baytrail.c static void byt_irq_ack(struct irq_data *d)
d                1306 drivers/pinctrl/intel/pinctrl-baytrail.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1308 drivers/pinctrl/intel/pinctrl-baytrail.c 	unsigned int offset = irqd_to_hwirq(d);
d                1320 drivers/pinctrl/intel/pinctrl-baytrail.c static void byt_irq_mask(struct irq_data *d)
d                1322 drivers/pinctrl/intel/pinctrl-baytrail.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1325 drivers/pinctrl/intel/pinctrl-baytrail.c 	byt_gpio_clear_triggering(vg, irqd_to_hwirq(d));
d                1328 drivers/pinctrl/intel/pinctrl-baytrail.c static void byt_irq_unmask(struct irq_data *d)
d                1330 drivers/pinctrl/intel/pinctrl-baytrail.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1332 drivers/pinctrl/intel/pinctrl-baytrail.c 	unsigned int offset = irqd_to_hwirq(d);
d                1344 drivers/pinctrl/intel/pinctrl-baytrail.c 	switch (irqd_get_trigger_type(d)) {
d                1367 drivers/pinctrl/intel/pinctrl-baytrail.c static int byt_irq_type(struct irq_data *d, unsigned int type)
d                1369 drivers/pinctrl/intel/pinctrl-baytrail.c 	struct byt_gpio *vg = gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                1370 drivers/pinctrl/intel/pinctrl-baytrail.c 	u32 offset = irqd_to_hwirq(d);
d                1396 drivers/pinctrl/intel/pinctrl-baytrail.c 		irq_set_handler_locked(d, handle_edge_irq);
d                1398 drivers/pinctrl/intel/pinctrl-baytrail.c 		irq_set_handler_locked(d, handle_level_irq);
d                1316 drivers/pinctrl/intel/pinctrl-cherryview.c static void chv_gpio_irq_ack(struct irq_data *d)
d                1318 drivers/pinctrl/intel/pinctrl-cherryview.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1320 drivers/pinctrl/intel/pinctrl-cherryview.c 	int pin = irqd_to_hwirq(d);
d                1333 drivers/pinctrl/intel/pinctrl-cherryview.c static void chv_gpio_irq_mask_unmask(struct irq_data *d, bool mask)
d                1335 drivers/pinctrl/intel/pinctrl-cherryview.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1337 drivers/pinctrl/intel/pinctrl-cherryview.c 	int pin = irqd_to_hwirq(d);
d                1357 drivers/pinctrl/intel/pinctrl-cherryview.c static void chv_gpio_irq_mask(struct irq_data *d)
d                1359 drivers/pinctrl/intel/pinctrl-cherryview.c 	chv_gpio_irq_mask_unmask(d, true);
d                1362 drivers/pinctrl/intel/pinctrl-cherryview.c static void chv_gpio_irq_unmask(struct irq_data *d)
d                1364 drivers/pinctrl/intel/pinctrl-cherryview.c 	chv_gpio_irq_mask_unmask(d, false);
d                1367 drivers/pinctrl/intel/pinctrl-cherryview.c static unsigned chv_gpio_irq_startup(struct irq_data *d)
d                1379 drivers/pinctrl/intel/pinctrl-cherryview.c 	if (irqd_get_trigger_type(d) == IRQ_TYPE_NONE) {
d                1380 drivers/pinctrl/intel/pinctrl-cherryview.c 		struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1382 drivers/pinctrl/intel/pinctrl-cherryview.c 		unsigned int pin = irqd_to_hwirq(d);
d                1399 drivers/pinctrl/intel/pinctrl-cherryview.c 			irq_set_handler_locked(d, handler);
d                1405 drivers/pinctrl/intel/pinctrl-cherryview.c 	chv_gpio_irq_unmask(d);
d                1409 drivers/pinctrl/intel/pinctrl-cherryview.c static int chv_gpio_irq_type(struct irq_data *d, unsigned int type)
d                1411 drivers/pinctrl/intel/pinctrl-cherryview.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1413 drivers/pinctrl/intel/pinctrl-cherryview.c 	unsigned int pin = irqd_to_hwirq(d);
d                1462 drivers/pinctrl/intel/pinctrl-cherryview.c 		irq_set_handler_locked(d, handle_edge_irq);
d                1464 drivers/pinctrl/intel/pinctrl-cherryview.c 		irq_set_handler_locked(d, handle_level_irq);
d                 974 drivers/pinctrl/intel/pinctrl-intel.c static void intel_gpio_irq_ack(struct irq_data *d)
d                 976 drivers/pinctrl/intel/pinctrl-intel.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 982 drivers/pinctrl/intel/pinctrl-intel.c 	pin = intel_gpio_to_pin(pctrl, irqd_to_hwirq(d), &community, &padgrp);
d                 996 drivers/pinctrl/intel/pinctrl-intel.c static void intel_gpio_irq_mask_unmask(struct irq_data *d, bool mask)
d                 998 drivers/pinctrl/intel/pinctrl-intel.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1004 drivers/pinctrl/intel/pinctrl-intel.c 	pin = intel_gpio_to_pin(pctrl, irqd_to_hwirq(d), &community, &padgrp);
d                1032 drivers/pinctrl/intel/pinctrl-intel.c static void intel_gpio_irq_mask(struct irq_data *d)
d                1034 drivers/pinctrl/intel/pinctrl-intel.c 	intel_gpio_irq_mask_unmask(d, true);
d                1037 drivers/pinctrl/intel/pinctrl-intel.c static void intel_gpio_irq_unmask(struct irq_data *d)
d                1039 drivers/pinctrl/intel/pinctrl-intel.c 	intel_gpio_irq_mask_unmask(d, false);
d                1042 drivers/pinctrl/intel/pinctrl-intel.c static int intel_gpio_irq_type(struct irq_data *d, unsigned int type)
d                1044 drivers/pinctrl/intel/pinctrl-intel.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1046 drivers/pinctrl/intel/pinctrl-intel.c 	unsigned int pin = intel_gpio_to_pin(pctrl, irqd_to_hwirq(d), NULL, NULL);
d                1090 drivers/pinctrl/intel/pinctrl-intel.c 		irq_set_handler_locked(d, handle_edge_irq);
d                1092 drivers/pinctrl/intel/pinctrl-intel.c 		irq_set_handler_locked(d, handle_level_irq);
d                1099 drivers/pinctrl/intel/pinctrl-intel.c static int intel_gpio_irq_wake(struct irq_data *d, unsigned int on)
d                1101 drivers/pinctrl/intel/pinctrl-intel.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1103 drivers/pinctrl/intel/pinctrl-intel.c 	unsigned int pin = intel_gpio_to_pin(pctrl, irqd_to_hwirq(d), NULL, NULL);
d                 109 drivers/pinctrl/mediatek/mtk-eint.c static void mtk_eint_mask(struct irq_data *d)
d                 111 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 112 drivers/pinctrl/mediatek/mtk-eint.c 	u32 mask = BIT(d->hwirq & 0x1f);
d                 113 drivers/pinctrl/mediatek/mtk-eint.c 	void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq,
d                 116 drivers/pinctrl/mediatek/mtk-eint.c 	eint->cur_mask[d->hwirq >> 5] &= ~mask;
d                 121 drivers/pinctrl/mediatek/mtk-eint.c static void mtk_eint_unmask(struct irq_data *d)
d                 123 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 124 drivers/pinctrl/mediatek/mtk-eint.c 	u32 mask = BIT(d->hwirq & 0x1f);
d                 125 drivers/pinctrl/mediatek/mtk-eint.c 	void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq,
d                 128 drivers/pinctrl/mediatek/mtk-eint.c 	eint->cur_mask[d->hwirq >> 5] |= mask;
d                 132 drivers/pinctrl/mediatek/mtk-eint.c 	if (eint->dual_edge[d->hwirq])
d                 133 drivers/pinctrl/mediatek/mtk-eint.c 		mtk_eint_flip_edge(eint, d->hwirq);
d                 146 drivers/pinctrl/mediatek/mtk-eint.c static void mtk_eint_ack(struct irq_data *d)
d                 148 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 149 drivers/pinctrl/mediatek/mtk-eint.c 	u32 mask = BIT(d->hwirq & 0x1f);
d                 150 drivers/pinctrl/mediatek/mtk-eint.c 	void __iomem *reg = mtk_eint_get_offset(eint, d->hwirq,
d                 156 drivers/pinctrl/mediatek/mtk-eint.c static int mtk_eint_set_type(struct irq_data *d, unsigned int type)
d                 158 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 159 drivers/pinctrl/mediatek/mtk-eint.c 	u32 mask = BIT(d->hwirq & 0x1f);
d                 166 drivers/pinctrl/mediatek/mtk-eint.c 			d->irq, d->hwirq, type);
d                 171 drivers/pinctrl/mediatek/mtk-eint.c 		eint->dual_edge[d->hwirq] = 1;
d                 173 drivers/pinctrl/mediatek/mtk-eint.c 		eint->dual_edge[d->hwirq] = 0;
d                 176 drivers/pinctrl/mediatek/mtk-eint.c 		reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->pol_clr);
d                 179 drivers/pinctrl/mediatek/mtk-eint.c 		reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->pol_set);
d                 184 drivers/pinctrl/mediatek/mtk-eint.c 		reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->sens_clr);
d                 187 drivers/pinctrl/mediatek/mtk-eint.c 		reg = mtk_eint_get_offset(eint, d->hwirq, eint->regs->sens_set);
d                 191 drivers/pinctrl/mediatek/mtk-eint.c 	if (eint->dual_edge[d->hwirq])
d                 192 drivers/pinctrl/mediatek/mtk-eint.c 		mtk_eint_flip_edge(eint, d->hwirq);
d                 197 drivers/pinctrl/mediatek/mtk-eint.c static int mtk_eint_irq_set_wake(struct irq_data *d, unsigned int on)
d                 199 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 200 drivers/pinctrl/mediatek/mtk-eint.c 	int shift = d->hwirq & 0x1f;
d                 201 drivers/pinctrl/mediatek/mtk-eint.c 	int reg = d->hwirq >> 5;
d                 224 drivers/pinctrl/mediatek/mtk-eint.c static int mtk_eint_irq_request_resources(struct irq_data *d)
d                 226 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 231 drivers/pinctrl/mediatek/mtk-eint.c 	err = eint->gpio_xlate->get_gpio_n(eint->pctl, d->hwirq,
d                 241 drivers/pinctrl/mediatek/mtk-eint.c 			irqd_to_hwirq(d));
d                 245 drivers/pinctrl/mediatek/mtk-eint.c 	err = eint->gpio_xlate->set_gpio_as_eint(eint->pctl, d->hwirq);
d                 254 drivers/pinctrl/mediatek/mtk-eint.c static void mtk_eint_irq_release_resources(struct irq_data *d)
d                 256 drivers/pinctrl/mediatek/mtk-eint.c 	struct mtk_eint *eint = irq_data_get_irq_chip_data(d);
d                 260 drivers/pinctrl/mediatek/mtk-eint.c 	eint->gpio_xlate->get_gpio_n(eint->pctl, d->hwirq, &gpio_n,
d                 398 drivers/pinctrl/mediatek/mtk-eint.c 	struct irq_data *d;
d                 402 drivers/pinctrl/mediatek/mtk-eint.c 	d = irq_get_irq_data(virq);
d                 419 drivers/pinctrl/mediatek/mtk-eint.c 		mtk_eint_mask(d);
d                 439 drivers/pinctrl/mediatek/mtk-eint.c 		mtk_eint_unmask(d);
d                 375 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 					  struct irq_data *d)
d                 377 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	int offset = irqd_to_hwirq(d);
d                 513 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static void armada_37xx_irq_ack(struct irq_data *d)
d                 515 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 520 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_update_reg(&reg, d);
d                 522 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	writel(d->mask, info->base + reg);
d                 526 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static void armada_37xx_irq_mask(struct irq_data *d)
d                 528 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 533 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_update_reg(&reg, d);
d                 536 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	writel(val & ~d->mask, info->base + reg);
d                 540 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static void armada_37xx_irq_unmask(struct irq_data *d)
d                 542 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 547 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_update_reg(&reg, d);
d                 550 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	writel(val | d->mask, info->base + reg);
d                 554 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static int armada_37xx_irq_set_wake(struct irq_data *d, unsigned int on)
d                 556 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 561 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_update_reg(&reg, d);
d                 565 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		val |= (BIT(d->hwirq % GPIO_PER_REG));
d                 567 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		val &= ~(BIT(d->hwirq % GPIO_PER_REG));
d                 574 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static int armada_37xx_irq_set_type(struct irq_data *d, unsigned int type)
d                 576 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 582 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_update_reg(&reg, d);
d                 586 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		val &= ~(BIT(d->hwirq % GPIO_PER_REG));
d                 589 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		val |= (BIT(d->hwirq % GPIO_PER_REG));
d                 594 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		armada_37xx_irq_update_reg(&in_reg, d);
d                 598 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		if (in_val & BIT(d->hwirq % GPIO_PER_REG))
d                 599 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 			val |= BIT(d->hwirq % GPIO_PER_REG);	/* falling */
d                 601 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 			val &= ~(BIT(d->hwirq % GPIO_PER_REG));	/* rising */
d                 654 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct irq_domain *d = gc->irq.domain;
d                 658 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	for (i = 0; i <= d->revmap_size / GPIO_PER_REG; i++) {
d                 669 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 			u32 virq = irq_find_mapping(d, hwirq +
d                 704 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c static unsigned int armada_37xx_irq_startup(struct irq_data *d)
d                 712 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	d->mask = BIT(d->hwirq % GPIO_PER_REG);
d                 714 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	armada_37xx_irq_unmask(d);
d                1042 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	struct irq_domain *d;
d                1060 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 	d = gc->irq.domain;
d                1078 drivers/pinctrl/mvebu/pinctrl-armada-37xx.c 		virq = irq_find_mapping(d, i);
d                 414 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	struct irq_data *d = irq_get_irq_data(irq);
d                 419 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	if (!d || !irqd_irq_disabled(d))
d                 612 drivers/pinctrl/nomadik/pinctrl-nomadik.c static void nmk_gpio_irq_ack(struct irq_data *d)
d                 614 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 618 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	writel(BIT(d->hwirq), nmk_chip->addr + NMK_GPIO_IC);
d                 681 drivers/pinctrl/nomadik/pinctrl-nomadik.c static int nmk_gpio_irq_maskunmask(struct irq_data *d, bool enable)
d                 686 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_chip = irq_data_get_irq_chip_data(d);
d                 694 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	__nmk_gpio_irq_modify(nmk_chip, d->hwirq, NORMAL, enable);
d                 696 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	if (!(nmk_chip->real_wake & BIT(d->hwirq)))
d                 697 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_set_wake(nmk_chip, d->hwirq, enable);
d                 706 drivers/pinctrl/nomadik/pinctrl-nomadik.c static void nmk_gpio_irq_mask(struct irq_data *d)
d                 708 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_gpio_irq_maskunmask(d, false);
d                 711 drivers/pinctrl/nomadik/pinctrl-nomadik.c static void nmk_gpio_irq_unmask(struct irq_data *d)
d                 713 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_gpio_irq_maskunmask(d, true);
d                 716 drivers/pinctrl/nomadik/pinctrl-nomadik.c static int nmk_gpio_irq_set_wake(struct irq_data *d, unsigned int on)
d                 721 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_chip = irq_data_get_irq_chip_data(d);
d                 729 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	if (irqd_irq_disabled(d))
d                 730 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_set_wake(nmk_chip, d->hwirq, on);
d                 733 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		nmk_chip->real_wake |= BIT(d->hwirq);
d                 735 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		nmk_chip->real_wake &= ~BIT(d->hwirq);
d                 744 drivers/pinctrl/nomadik/pinctrl-nomadik.c static int nmk_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 746 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	bool enabled = !irqd_irq_disabled(d);
d                 747 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	bool wake = irqd_is_wakeup_set(d);
d                 751 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_chip = irq_data_get_irq_chip_data(d);
d                 763 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_irq_modify(nmk_chip, d->hwirq, NORMAL, false);
d                 766 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_irq_modify(nmk_chip, d->hwirq, WAKE, false);
d                 768 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_chip->edge_rising &= ~BIT(d->hwirq);
d                 770 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		nmk_chip->edge_rising |= BIT(d->hwirq);
d                 772 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_chip->edge_falling &= ~BIT(d->hwirq);
d                 774 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		nmk_chip->edge_falling |= BIT(d->hwirq);
d                 777 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_irq_modify(nmk_chip, d->hwirq, NORMAL, true);
d                 780 drivers/pinctrl/nomadik/pinctrl-nomadik.c 		__nmk_gpio_irq_modify(nmk_chip, d->hwirq, WAKE, true);
d                 788 drivers/pinctrl/nomadik/pinctrl-nomadik.c static unsigned int nmk_gpio_irq_startup(struct irq_data *d)
d                 790 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	struct nmk_gpio_chip *nmk_chip = irq_data_get_irq_chip_data(d);
d                 793 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_gpio_irq_unmask(d);
d                 797 drivers/pinctrl/nomadik/pinctrl-nomadik.c static void nmk_gpio_irq_shutdown(struct irq_data *d)
d                 799 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	struct nmk_gpio_chip *nmk_chip = irq_data_get_irq_chip_data(d);
d                 801 drivers/pinctrl/nomadik/pinctrl-nomadik.c 	nmk_gpio_irq_mask(d);
d                 238 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c static int npcmgpio_set_irq_type(struct irq_data *d, unsigned int type)
d                 241 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 242 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	unsigned int gpio = BIT(d->hwirq);
d                 244 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	dev_dbg(d->chip->parent_device, "setirqtype: %u.%u = %u\n", gpio,
d                 245 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		d->irq, type);
d                 248 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "edge.rising\n");
d                 253 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "edge.falling\n");
d                 258 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "edge.both\n");
d                 262 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "level.low\n");
d                 266 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "level.high\n");
d                 270 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		dev_dbg(d->chip->parent_device, "invalid irq type\n");
d                 276 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 280 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 286 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c static void npcmgpio_irq_ack(struct irq_data *d)
d                 289 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 290 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	unsigned int gpio = d->hwirq;
d                 292 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	dev_dbg(d->chip->parent_device, "irq_ack: %u.%u\n", gpio, d->irq);
d                 297 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c static void npcmgpio_irq_mask(struct irq_data *d)
d                 300 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 301 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	unsigned int gpio = d->hwirq;
d                 304 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	dev_dbg(d->chip->parent_device, "irq_mask: %u.%u\n", gpio, d->irq);
d                 309 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c static void npcmgpio_irq_unmask(struct irq_data *d)
d                 312 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 		gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 313 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	unsigned int gpio = d->hwirq;
d                 316 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	dev_dbg(d->chip->parent_device, "irq_unmask: %u.%u\n", gpio, d->irq);
d                 320 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c static unsigned int npcmgpio_irq_startup(struct irq_data *d)
d                 322 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 323 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	unsigned int gpio = d->hwirq;
d                 326 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	dev_dbg(d->chip->parent_device, "startup: %u.%u\n", gpio, d->irq);
d                 328 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	npcmgpio_irq_ack(d);
d                 329 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	npcmgpio_irq_unmask(d);
d                 897 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c #define NPCM7XX_PINCFG(a, b, c, d, e, f, g, h, i, j, k) \
d                 898 drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c 	[a] { .fn0 = fn_ ## b, .reg0 = NPCM7XX_GCR_ ## c, .bit0 = d, \
d                 341 drivers/pinctrl/pinctrl-amd.c static void amd_gpio_irq_enable(struct irq_data *d)
d                 345 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 349 drivers/pinctrl/pinctrl-amd.c 	pin_reg = readl(gpio_dev->base + (d->hwirq)*4);
d                 352 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg, gpio_dev->base + (d->hwirq)*4);
d                 356 drivers/pinctrl/pinctrl-amd.c static void amd_gpio_irq_disable(struct irq_data *d)
d                 360 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 364 drivers/pinctrl/pinctrl-amd.c 	pin_reg = readl(gpio_dev->base + (d->hwirq)*4);
d                 367 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg, gpio_dev->base + (d->hwirq)*4);
d                 371 drivers/pinctrl/pinctrl-amd.c static void amd_gpio_irq_mask(struct irq_data *d)
d                 375 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 379 drivers/pinctrl/pinctrl-amd.c 	pin_reg = readl(gpio_dev->base + (d->hwirq)*4);
d                 381 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg, gpio_dev->base + (d->hwirq)*4);
d                 385 drivers/pinctrl/pinctrl-amd.c static void amd_gpio_irq_unmask(struct irq_data *d)
d                 389 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 393 drivers/pinctrl/pinctrl-amd.c 	pin_reg = readl(gpio_dev->base + (d->hwirq)*4);
d                 395 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg, gpio_dev->base + (d->hwirq)*4);
d                 399 drivers/pinctrl/pinctrl-amd.c static void amd_gpio_irq_eoi(struct irq_data *d)
d                 403 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 413 drivers/pinctrl/pinctrl-amd.c static int amd_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 418 drivers/pinctrl/pinctrl-amd.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 422 drivers/pinctrl/pinctrl-amd.c 	pin_reg = readl(gpio_dev->base + (d->hwirq)*4);
d                 429 drivers/pinctrl/pinctrl-amd.c 	irq_flags = irq_get_trigger_type(d->irq);
d                 439 drivers/pinctrl/pinctrl-amd.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 447 drivers/pinctrl/pinctrl-amd.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 455 drivers/pinctrl/pinctrl-amd.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 464 drivers/pinctrl/pinctrl-amd.c 		irq_set_handler_locked(d, handle_level_irq);
d                 473 drivers/pinctrl/pinctrl-amd.c 		irq_set_handler_locked(d, handle_level_irq);
d                 504 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg_irq_en, gpio_dev->base + (d->hwirq)*4);
d                 505 drivers/pinctrl/pinctrl-amd.c 	while ((readl(gpio_dev->base + (d->hwirq)*4) & mask) != mask)
d                 507 drivers/pinctrl/pinctrl-amd.c 	writel(pin_reg, gpio_dev->base + (d->hwirq)*4);
d                 513 drivers/pinctrl/pinctrl-amd.c static void amd_irq_ack(struct irq_data *d)
d                 158 drivers/pinctrl/pinctrl-at91-pio4.c static void atmel_gpio_irq_ack(struct irq_data *d)
d                 166 drivers/pinctrl/pinctrl-at91-pio4.c static int atmel_gpio_irq_set_type(struct irq_data *d, unsigned type)
d                 168 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pioctrl *atmel_pioctrl = irq_data_get_irq_chip_data(d);
d                 169 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pin *pin = atmel_pioctrl->pins[d->hwirq];
d                 179 drivers/pinctrl/pinctrl-at91-pio4.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 183 drivers/pinctrl/pinctrl-at91-pio4.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 187 drivers/pinctrl/pinctrl-at91-pio4.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 191 drivers/pinctrl/pinctrl-at91-pio4.c 		irq_set_handler_locked(d, handle_level_irq);
d                 195 drivers/pinctrl/pinctrl-at91-pio4.c 		irq_set_handler_locked(d, handle_level_irq);
d                 208 drivers/pinctrl/pinctrl-at91-pio4.c static void atmel_gpio_irq_mask(struct irq_data *d)
d                 210 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pioctrl *atmel_pioctrl = irq_data_get_irq_chip_data(d);
d                 211 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pin *pin = atmel_pioctrl->pins[d->hwirq];
d                 217 drivers/pinctrl/pinctrl-at91-pio4.c static void atmel_gpio_irq_unmask(struct irq_data *d)
d                 219 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pioctrl *atmel_pioctrl = irq_data_get_irq_chip_data(d);
d                 220 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pin *pin = atmel_pioctrl->pins[d->hwirq];
d                 228 drivers/pinctrl/pinctrl-at91-pio4.c static int atmel_gpio_irq_set_wake(struct irq_data *d, unsigned int on)
d                 230 drivers/pinctrl/pinctrl-at91-pio4.c 	struct atmel_pioctrl *atmel_pioctrl = irq_data_get_irq_chip_data(d);
d                 231 drivers/pinctrl/pinctrl-at91-pio4.c 	int bank = ATMEL_PIO_BANK(d->hwirq);
d                 232 drivers/pinctrl/pinctrl-at91-pio4.c 	int line = ATMEL_PIO_LINE(d->hwirq);
d                 186 drivers/pinctrl/pinctrl-at91.c 	int (*irq_type)(struct irq_data *d, unsigned type);
d                 189 drivers/pinctrl/pinctrl-at91.c static int gpio_irq_type(struct irq_data *d, unsigned type);
d                 190 drivers/pinctrl/pinctrl-at91.c static int alt_gpio_irq_type(struct irq_data *d, unsigned type);
d                1529 drivers/pinctrl/pinctrl-at91.c static void gpio_irq_mask(struct irq_data *d)
d                1531 drivers/pinctrl/pinctrl-at91.c 	struct at91_gpio_chip *at91_gpio = irq_data_get_irq_chip_data(d);
d                1533 drivers/pinctrl/pinctrl-at91.c 	unsigned	mask = 1 << d->hwirq;
d                1539 drivers/pinctrl/pinctrl-at91.c static void gpio_irq_unmask(struct irq_data *d)
d                1541 drivers/pinctrl/pinctrl-at91.c 	struct at91_gpio_chip *at91_gpio = irq_data_get_irq_chip_data(d);
d                1543 drivers/pinctrl/pinctrl-at91.c 	unsigned	mask = 1 << d->hwirq;
d                1549 drivers/pinctrl/pinctrl-at91.c static int gpio_irq_type(struct irq_data *d, unsigned type)
d                1561 drivers/pinctrl/pinctrl-at91.c static int alt_gpio_irq_type(struct irq_data *d, unsigned type)
d                1563 drivers/pinctrl/pinctrl-at91.c 	struct at91_gpio_chip *at91_gpio = irq_data_get_irq_chip_data(d);
d                1565 drivers/pinctrl/pinctrl-at91.c 	unsigned	mask = 1 << d->hwirq;
d                1569 drivers/pinctrl/pinctrl-at91.c 		irq_set_handler_locked(d, handle_simple_irq);
d                1574 drivers/pinctrl/pinctrl-at91.c 		irq_set_handler_locked(d, handle_simple_irq);
d                1579 drivers/pinctrl/pinctrl-at91.c 		irq_set_handler_locked(d, handle_level_irq);
d                1584 drivers/pinctrl/pinctrl-at91.c 		irq_set_handler_locked(d, handle_level_irq);
d                1593 drivers/pinctrl/pinctrl-at91.c 		irq_set_handler_locked(d, handle_simple_irq);
d                1598 drivers/pinctrl/pinctrl-at91.c 		pr_warn("AT91: No type for GPIO irq offset %d\n", d->irq);
d                1608 drivers/pinctrl/pinctrl-at91.c static void gpio_irq_ack(struct irq_data *d)
d                1618 drivers/pinctrl/pinctrl-at91.c static int gpio_irq_set_wake(struct irq_data *d, unsigned state)
d                1620 drivers/pinctrl/pinctrl-at91.c 	struct at91_gpio_chip *at91_gpio = irq_data_get_irq_chip_data(d);
d                1622 drivers/pinctrl/pinctrl-at91.c 	unsigned mask = 1 << d->hwirq;
d                1724 drivers/pinctrl/pinctrl-at91.c 	struct irq_data		*d = irq_get_irq_data(at91_gpio->pioc_virq);
d                1732 drivers/pinctrl/pinctrl-at91.c 	at91_gpio->pioc_hwirq = irqd_to_hwirq(d);
d                 420 drivers/pinctrl/pinctrl-coh901.c static int u300_gpio_irq_type(struct irq_data *d, unsigned trigger)
d                 422 drivers/pinctrl/pinctrl-coh901.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 424 drivers/pinctrl/pinctrl-coh901.c 	struct u300_gpio_port *port = &gpio->ports[d->hwirq >> 3];
d                 425 drivers/pinctrl/pinctrl-coh901.c 	int offset = d->hwirq;
d                 457 drivers/pinctrl/pinctrl-coh901.c static void u300_gpio_irq_enable(struct irq_data *d)
d                 459 drivers/pinctrl/pinctrl-coh901.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 461 drivers/pinctrl/pinctrl-coh901.c 	struct u300_gpio_port *port = &gpio->ports[d->hwirq >> 3];
d                 462 drivers/pinctrl/pinctrl-coh901.c 	int offset = d->hwirq;
d                 467 drivers/pinctrl/pinctrl-coh901.c 		 d->hwirq, port->name, offset);
d                 474 drivers/pinctrl/pinctrl-coh901.c static void u300_gpio_irq_disable(struct irq_data *d)
d                 476 drivers/pinctrl/pinctrl-coh901.c 	struct gpio_chip *chip = irq_data_get_irq_chip_data(d);
d                 478 drivers/pinctrl/pinctrl-coh901.c 	int offset = d->hwirq;
d                  93 drivers/pinctrl/pinctrl-falcon.c static void lantiq_load_pin_desc(struct pinctrl_pin_desc *d, int bank, int len)
d                  99 drivers/pinctrl/pinctrl-falcon.c 		d[i].number = base + i;
d                 100 drivers/pinctrl/pinctrl-falcon.c 		d[i].name = kasprintf(GFP_KERNEL, "io%d", base + i);
d                 367 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,0,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 368 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,1,      R,     R,   EMC,      R,   GPIO,  SDMMC,      R,  SGPIO,      0, ND);
d                 369 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,2,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 370 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,3,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 371 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,4,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 372 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,5,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 373 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,6,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 374 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,7,      R,  CTIN,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 375 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,8,      R,  CTIN,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 376 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,9,      R, CTOUT,   EMC,      R,   GPIO,      R,      R,  SGPIO,      0, ND);
d                 377 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,10,     R,  CTIN,   EMC,      R,   GPIO,      R,      R,      R,      0, ND);
d                 378 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,11,     R,     R,   EMC,      R,   GPIO,   USB1,  CTOUT,      R,      0, ND);
d                 379 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,12,     R,     R,   EMC,      R,   GPIO,      R,  CTOUT,      R,      0, ND);
d                 380 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,13,     R,  CTIN,   EMC,      R,   GPIO,      R,  CTOUT,      R,      0, ND);
d                 381 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,14,     R,     R,   EMC,      R,   GPIO,      R,  CTOUT,      R,      0, ND);
d                 382 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,15,     R,     R,   EMC,      R,   GPIO,  SDMMC,  CTOUT,      R,      0, ND);
d                 383 drivers/pinctrl/pinctrl-lpc18xx.c LPC_P(d,16,     R,     R,   EMC,      R,   GPIO,  SDMMC,  CTOUT,      R,      0, ND);
d                 577 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,0),
d                 578 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,1),
d                 579 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,2),
d                 580 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,3),
d                 581 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,4),
d                 582 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,5),
d                 583 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,6),
d                 584 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,7),
d                 585 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,8),
d                 586 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,9),
d                 587 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,10),
d                 588 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,11),
d                 589 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,12),
d                 590 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,13),
d                 591 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,14),
d                 592 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,15),
d                 593 drivers/pinctrl/pinctrl-lpc18xx.c 	LPC18XX_PIN_P(d,16),
d                1694 drivers/pinctrl/pinctrl-pic32.c static inline struct pic32_gpio_bank *irqd_to_bank(struct irq_data *d)
d                1696 drivers/pinctrl/pinctrl-pic32.c 	return gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 841 drivers/pinctrl/pinctrl-pistachio.c static inline struct pistachio_gpio_bank *irqd_to_bank(struct irq_data *d)
d                 843 drivers/pinctrl/pinctrl-pistachio.c 	return gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                2896 drivers/pinctrl/pinctrl-rockchip.c static int rockchip_irq_set_type(struct irq_data *d, unsigned int type)
d                2898 drivers/pinctrl/pinctrl-rockchip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                2900 drivers/pinctrl/pinctrl-rockchip.c 	u32 mask = BIT(d->hwirq);
d                2908 drivers/pinctrl/pinctrl-rockchip.c 	ret = rockchip_set_mux(bank, d->hwirq, RK_FUNC_GPIO);
d                2922 drivers/pinctrl/pinctrl-rockchip.c 		irq_set_handler_locked(d, handle_edge_irq);
d                2924 drivers/pinctrl/pinctrl-rockchip.c 		irq_set_handler_locked(d, handle_level_irq);
d                2984 drivers/pinctrl/pinctrl-rockchip.c static void rockchip_irq_suspend(struct irq_data *d)
d                2986 drivers/pinctrl/pinctrl-rockchip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                2995 drivers/pinctrl/pinctrl-rockchip.c static void rockchip_irq_resume(struct irq_data *d)
d                2997 drivers/pinctrl/pinctrl-rockchip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                3005 drivers/pinctrl/pinctrl-rockchip.c static void rockchip_irq_enable(struct irq_data *d)
d                3007 drivers/pinctrl/pinctrl-rockchip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                3011 drivers/pinctrl/pinctrl-rockchip.c 	irq_gc_mask_clr_bit(d);
d                3014 drivers/pinctrl/pinctrl-rockchip.c static void rockchip_irq_disable(struct irq_data *d)
d                3016 drivers/pinctrl/pinctrl-rockchip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                3019 drivers/pinctrl/pinctrl-rockchip.c 	irq_gc_mask_set_bit(d);
d                3224 drivers/pinctrl/pinctrl-rockchip.c 						struct rockchip_pinctrl *d,
d                3246 drivers/pinctrl/pinctrl-rockchip.c 				if (!rockchip_get_bank_data(bank, d))
d                3263 drivers/pinctrl/pinctrl-rockchip.c 		bank->drvdata = d;
d                3298 drivers/pinctrl/pinctrl-rockchip.c 			dev_dbg(d->dev, "bank %d, iomux %d has iom_offset 0x%x drv_offset 0x%x\n",
d                1403 drivers/pinctrl/pinctrl-single.c static void pcs_irq_mask(struct irq_data *d)
d                1405 drivers/pinctrl/pinctrl-single.c 	struct pcs_soc_data *pcs_soc = irq_data_get_irq_chip_data(d);
d                1407 drivers/pinctrl/pinctrl-single.c 	pcs_irq_set(pcs_soc, d->irq, false);
d                1414 drivers/pinctrl/pinctrl-single.c static void pcs_irq_unmask(struct irq_data *d)
d                1416 drivers/pinctrl/pinctrl-single.c 	struct pcs_soc_data *pcs_soc = irq_data_get_irq_chip_data(d);
d                1418 drivers/pinctrl/pinctrl-single.c 	pcs_irq_set(pcs_soc, d->irq, true);
d                1429 drivers/pinctrl/pinctrl-single.c static int pcs_irq_set_wake(struct irq_data *d, unsigned int state)
d                1432 drivers/pinctrl/pinctrl-single.c 		pcs_irq_unmask(d);
d                1434 drivers/pinctrl/pinctrl-single.c 		pcs_irq_mask(d);
d                1480 drivers/pinctrl/pinctrl-single.c static irqreturn_t pcs_irq_handler(int irq, void *d)
d                1482 drivers/pinctrl/pinctrl-single.c 	struct pcs_soc_data *pcs_soc = d;
d                1507 drivers/pinctrl/pinctrl-single.c static int pcs_irqdomain_map(struct irq_domain *d, unsigned int irq,
d                1510 drivers/pinctrl/pinctrl-single.c 	struct pcs_soc_data *pcs_soc = d->host_data;
d                1277 drivers/pinctrl/pinctrl-st.c static void st_gpio_irq_mask(struct irq_data *d)
d                1279 drivers/pinctrl/pinctrl-st.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1282 drivers/pinctrl/pinctrl-st.c 	writel(BIT(d->hwirq), bank->base + REG_PIO_CLR_PMASK);
d                1285 drivers/pinctrl/pinctrl-st.c static void st_gpio_irq_unmask(struct irq_data *d)
d                1287 drivers/pinctrl/pinctrl-st.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1290 drivers/pinctrl/pinctrl-st.c 	writel(BIT(d->hwirq), bank->base + REG_PIO_SET_PMASK);
d                1293 drivers/pinctrl/pinctrl-st.c static int st_gpio_irq_request_resources(struct irq_data *d)
d                1295 drivers/pinctrl/pinctrl-st.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1297 drivers/pinctrl/pinctrl-st.c 	st_gpio_direction_input(gc, d->hwirq);
d                1299 drivers/pinctrl/pinctrl-st.c 	return gpiochip_lock_as_irq(gc, d->hwirq);
d                1302 drivers/pinctrl/pinctrl-st.c static void st_gpio_irq_release_resources(struct irq_data *d)
d                1304 drivers/pinctrl/pinctrl-st.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1306 drivers/pinctrl/pinctrl-st.c 	gpiochip_unlock_as_irq(gc, d->hwirq);
d                1309 drivers/pinctrl/pinctrl-st.c static int st_gpio_irq_set_type(struct irq_data *d, unsigned type)
d                1311 drivers/pinctrl/pinctrl-st.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                1314 drivers/pinctrl/pinctrl-st.c 	int comp, pin = d->hwirq;
d                 486 drivers/pinctrl/pinctrl-sx150x.c static void sx150x_irq_mask(struct irq_data *d)
d                 489 drivers/pinctrl/pinctrl-sx150x.c 			gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 490 drivers/pinctrl/pinctrl-sx150x.c 	unsigned int n = d->hwirq;
d                 495 drivers/pinctrl/pinctrl-sx150x.c static void sx150x_irq_unmask(struct irq_data *d)
d                 498 drivers/pinctrl/pinctrl-sx150x.c 			gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 499 drivers/pinctrl/pinctrl-sx150x.c 	unsigned int n = d->hwirq;
d                 519 drivers/pinctrl/pinctrl-sx150x.c static int sx150x_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 522 drivers/pinctrl/pinctrl-sx150x.c 			gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 528 drivers/pinctrl/pinctrl-sx150x.c 	n = d->hwirq;
d                 561 drivers/pinctrl/pinctrl-sx150x.c static void sx150x_irq_bus_lock(struct irq_data *d)
d                 564 drivers/pinctrl/pinctrl-sx150x.c 			gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 569 drivers/pinctrl/pinctrl-sx150x.c static void sx150x_irq_bus_sync_unlock(struct irq_data *d)
d                 572 drivers/pinctrl/pinctrl-sx150x.c 			gpiochip_get_data(irq_data_get_irq_chip_data(d));
d                 680 drivers/pinctrl/qcom/pinctrl-msm.c 					  struct irq_data *d)
d                 702 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_mask(struct irq_data *d)
d                 704 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 710 drivers/pinctrl/qcom/pinctrl-msm.c 	g = &pctrl->soc->groups[d->hwirq];
d                 735 drivers/pinctrl/qcom/pinctrl-msm.c 	if (irqd_get_trigger_type(d) & IRQ_TYPE_LEVEL_MASK)
d                 741 drivers/pinctrl/qcom/pinctrl-msm.c 	clear_bit(d->hwirq, pctrl->enabled_irqs);
d                 746 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_clear_unmask(struct irq_data *d, bool status_clear)
d                 748 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 754 drivers/pinctrl/qcom/pinctrl-msm.c 	g = &pctrl->soc->groups[d->hwirq];
d                 774 drivers/pinctrl/qcom/pinctrl-msm.c 	set_bit(d->hwirq, pctrl->enabled_irqs);
d                 779 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_enable(struct irq_data *d)
d                 782 drivers/pinctrl/qcom/pinctrl-msm.c 	msm_gpio_irq_clear_unmask(d, true);
d                 785 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_unmask(struct irq_data *d)
d                 787 drivers/pinctrl/qcom/pinctrl-msm.c 	msm_gpio_irq_clear_unmask(d, false);
d                 790 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_ack(struct irq_data *d)
d                 792 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 798 drivers/pinctrl/qcom/pinctrl-msm.c 	g = &pctrl->soc->groups[d->hwirq];
d                 809 drivers/pinctrl/qcom/pinctrl-msm.c 	if (test_bit(d->hwirq, pctrl->dual_edge_irqs))
d                 810 drivers/pinctrl/qcom/pinctrl-msm.c 		msm_gpio_update_dual_edge_pos(pctrl, g, d);
d                 815 drivers/pinctrl/qcom/pinctrl-msm.c static int msm_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 817 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 823 drivers/pinctrl/qcom/pinctrl-msm.c 	g = &pctrl->soc->groups[d->hwirq];
d                 831 drivers/pinctrl/qcom/pinctrl-msm.c 		set_bit(d->hwirq, pctrl->dual_edge_irqs);
d                 833 drivers/pinctrl/qcom/pinctrl-msm.c 		clear_bit(d->hwirq, pctrl->dual_edge_irqs);
d                 896 drivers/pinctrl/qcom/pinctrl-msm.c 	if (test_bit(d->hwirq, pctrl->dual_edge_irqs))
d                 897 drivers/pinctrl/qcom/pinctrl-msm.c 		msm_gpio_update_dual_edge_pos(pctrl, g, d);
d                 902 drivers/pinctrl/qcom/pinctrl-msm.c 		irq_set_handler_locked(d, handle_level_irq);
d                 904 drivers/pinctrl/qcom/pinctrl-msm.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 909 drivers/pinctrl/qcom/pinctrl-msm.c static int msm_gpio_irq_set_wake(struct irq_data *d, unsigned int on)
d                 911 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 924 drivers/pinctrl/qcom/pinctrl-msm.c static int msm_gpio_irq_reqres(struct irq_data *d)
d                 926 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 933 drivers/pinctrl/qcom/pinctrl-msm.c 	ret = msm_pinmux_request_gpio(pctrl->pctrl, NULL, d->hwirq);
d                 936 drivers/pinctrl/qcom/pinctrl-msm.c 	msm_gpio_direction_input(gc, d->hwirq);
d                 938 drivers/pinctrl/qcom/pinctrl-msm.c 	if (gpiochip_lock_as_irq(gc, d->hwirq)) {
d                 941 drivers/pinctrl/qcom/pinctrl-msm.c 			d->hwirq);
d                 951 drivers/pinctrl/qcom/pinctrl-msm.c static void msm_gpio_irq_relres(struct irq_data *d)
d                 953 drivers/pinctrl/qcom/pinctrl-msm.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 955 drivers/pinctrl/qcom/pinctrl-msm.c 	gpiochip_unlock_as_irq(gc, d->hwirq);
d                 245 drivers/pinctrl/samsung/pinctrl-exynos.c 	struct samsung_pinctrl_drv_data *d = data;
d                 246 drivers/pinctrl/samsung/pinctrl-exynos.c 	struct samsung_pin_bank *bank = d->pin_banks;
d                 274 drivers/pinctrl/samsung/pinctrl-exynos.c int exynos_eint_gpio_init(struct samsung_pinctrl_drv_data *d)
d                 277 drivers/pinctrl/samsung/pinctrl-exynos.c 	struct device *dev = d->dev;
d                 281 drivers/pinctrl/samsung/pinctrl-exynos.c 	if (!d->irq) {
d                 286 drivers/pinctrl/samsung/pinctrl-exynos.c 	ret = devm_request_irq(dev, d->irq, exynos_eint_gpio_irq,
d                 287 drivers/pinctrl/samsung/pinctrl-exynos.c 					0, dev_name(dev), d);
d                 293 drivers/pinctrl/samsung/pinctrl-exynos.c 	bank = d->pin_banks;
d                 294 drivers/pinctrl/samsung/pinctrl-exynos.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 305 drivers/pinctrl/samsung/pinctrl-exynos.c 		bank->soc_priv = devm_kzalloc(d->dev,
d                 469 drivers/pinctrl/samsung/pinctrl-exynos.c int exynos_eint_wkup_init(struct samsung_pinctrl_drv_data *d)
d                 471 drivers/pinctrl/samsung/pinctrl-exynos.c 	struct device *dev = d->dev;
d                 500 drivers/pinctrl/samsung/pinctrl-exynos.c 	bank = d->pin_banks;
d                 501 drivers/pinctrl/samsung/pinctrl-exynos.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 564 drivers/pinctrl/samsung/pinctrl-exynos.c 	bank = d->pin_banks;
d                 566 drivers/pinctrl/samsung/pinctrl-exynos.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 133 drivers/pinctrl/samsung/pinctrl-exynos.h int exynos_eint_gpio_init(struct samsung_pinctrl_drv_data *d);
d                 134 drivers/pinctrl/samsung/pinctrl-exynos.h int exynos_eint_wkup_init(struct samsung_pinctrl_drv_data *d);
d                 129 drivers/pinctrl/samsung/pinctrl-s3c24xx.c static void s3c24xx_eint_set_handler(struct irq_data *d, unsigned int type)
d                 133 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 135 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 138 drivers/pinctrl/samsung/pinctrl-s3c24xx.c static void s3c24xx_eint_set_function(struct samsung_pinctrl_drv_data *d,
d                 149 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	reg = d->virt_base + bank->pctl_offset;
d                 166 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 175 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 		dev_err(d->dev, "unsupported external interrupt type\n");
d                 182 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	reg = d->virt_base + EINT_REG(index);
d                 190 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	s3c24xx_eint_set_function(d, bank, data->hwirq);
d                 257 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 260 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(bitval, d->virt_base + EINTPEND_REG);
d                 266 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 269 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	mask = readl(d->virt_base + EINTMASK_REG);
d                 271 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(mask, d->virt_base + EINTMASK_REG);
d                 277 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 280 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	mask = readl(d->virt_base + EINTMASK_REG);
d                 282 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(mask, d->virt_base + EINTMASK_REG);
d                 317 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 320 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(1UL << index, d->virt_base + EINTPEND_REG);
d                 326 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 330 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	mask = readl(d->virt_base + EINTMASK_REG);
d                 332 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(mask, d->virt_base + EINTMASK_REG);
d                 338 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 342 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	mask = readl(d->virt_base + EINTMASK_REG);
d                 344 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	writel(mask, d->virt_base + EINTMASK_REG);
d                 360 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct samsung_pinctrl_drv_data *d = data->drvdata;
d                 365 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	pend = readl(d->virt_base + EINTPEND_REG);
d                 366 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	mask = readl(d->virt_base + EINTMASK_REG);
d                 468 drivers/pinctrl/samsung/pinctrl-s3c24xx.c static int s3c24xx_eint_init(struct samsung_pinctrl_drv_data *d)
d                 470 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	struct device *dev = d->dev;
d                 498 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	eint_data->drvdata = d;
d                 517 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	bank = d->pin_banks;
d                 518 drivers/pinctrl/samsung/pinctrl-s3c24xx.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 258 drivers/pinctrl/samsung/pinctrl-s3c64xx.c static void s3c64xx_irq_set_handler(struct irq_data *d, unsigned int type)
d                 262 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 		irq_set_handler_locked(d, handle_edge_irq);
d                 264 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 		irq_set_handler_locked(d, handle_level_irq);
d                 267 drivers/pinctrl/samsung/pinctrl-s3c64xx.c static void s3c64xx_irq_set_function(struct samsung_pinctrl_drv_data *d,
d                 278 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	reg = d->virt_base + bank->pctl_offset;
d                 306 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 308 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	void __iomem *reg = d->virt_base + EINTMASK_REG(bank->eint_offset);
d                 332 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 334 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	void __iomem *reg = d->virt_base + EINTPEND_REG(bank->eint_offset);
d                 342 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 357 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	reg = d->virt_base + EINTCON_REG(bank->eint_offset);
d                 366 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	s3c64xx_irq_set_function(d, bank, irqd->hwirq);
d                 451 drivers/pinctrl/samsung/pinctrl-s3c64xx.c static int s3c64xx_eint_gpio_init(struct samsung_pinctrl_drv_data *d)
d                 455 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct device *dev = d->dev;
d                 459 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	if (!d->irq) {
d                 465 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	bank = d->pin_banks;
d                 466 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 490 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	data->drvdata = d;
d                 492 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	bank = d->pin_banks;
d                 494 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                 501 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	irq_set_chained_handler_and_data(d->irq, s3c64xx_eint_gpio_irq, data);
d                 514 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = ddata->bank->drvdata;
d                 517 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	val = readl(d->virt_base + EINT0MASK_REG);
d                 522 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	writel(val, d->virt_base + EINT0MASK_REG);
d                 539 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = ddata->bank->drvdata;
d                 542 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 					d->virt_base + EINT0PEND_REG);
d                 550 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct samsung_pinctrl_drv_data *d = bank->drvdata;
d                 565 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	reg = d->virt_base + EINT0CON0_REG;
d                 578 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	s3c64xx_irq_set_function(d, bank, irqd->hwirq);
d                 688 drivers/pinctrl/samsung/pinctrl-s3c64xx.c static int s3c64xx_eint_eint0_init(struct samsung_pinctrl_drv_data *d)
d                 690 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	struct device *dev = d->dev;
d                 711 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	data->drvdata = d;
d                 729 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	bank = d->pin_banks;
d                 730 drivers/pinctrl/samsung/pinctrl-s3c64xx.c 	for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                1007 drivers/pinctrl/samsung/pinctrl-samsung.c samsung_pinctrl_get_soc_data(struct samsung_pinctrl_drv_data *d,
d                1023 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->suspend = ctrl->suspend;
d                1024 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->resume = ctrl->resume;
d                1025 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->nr_banks = ctrl->nr_banks;
d                1026 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->pin_banks = devm_kcalloc(&pdev->dev, d->nr_banks,
d                1027 drivers/pinctrl/samsung/pinctrl-samsung.c 					sizeof(*d->pin_banks), GFP_KERNEL);
d                1028 drivers/pinctrl/samsung/pinctrl-samsung.c 	if (!d->pin_banks)
d                1048 drivers/pinctrl/samsung/pinctrl-samsung.c 	bank = d->pin_banks;
d                1061 drivers/pinctrl/samsung/pinctrl-samsung.c 		bank->drvdata = d;
d                1062 drivers/pinctrl/samsung/pinctrl-samsung.c 		bank->pin_base = d->nr_pins;
d                1063 drivers/pinctrl/samsung/pinctrl-samsung.c 		d->nr_pins += bank->nr_pins;
d                1073 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->virt_base = virt_base[0];
d                1078 drivers/pinctrl/samsung/pinctrl-samsung.c 		bank = d->pin_banks;
d                1079 drivers/pinctrl/samsung/pinctrl-samsung.c 		for (i = 0; i < d->nr_banks; ++i, ++bank) {
d                1087 drivers/pinctrl/samsung/pinctrl-samsung.c 	d->pin_base = pin_base;
d                1088 drivers/pinctrl/samsung/pinctrl-samsung.c 	pin_base += d->nr_pins;
d                1612 drivers/pinctrl/sh-pfc/pfc-r8a7778.c #define SSI_PFC_DATA(name, d)			SH_PFC_MUX1(name, d)
d                5634 drivers/pinctrl/sirf/pinctrl-atlas7.c static void atlas7_gpio_irq_ack(struct irq_data *d)
d                5636 drivers/pinctrl/sirf/pinctrl-atlas7.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                5643 drivers/pinctrl/sirf/pinctrl-atlas7.c 	bank = atlas7_gpio_to_bank(a7gc, d->hwirq);
d                5644 drivers/pinctrl/sirf/pinctrl-atlas7.c 	pin_in_bank = d->hwirq - bank->gpio_offset;
d                5672 drivers/pinctrl/sirf/pinctrl-atlas7.c static void atlas7_gpio_irq_mask(struct irq_data *d)
d                5674 drivers/pinctrl/sirf/pinctrl-atlas7.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                5680 drivers/pinctrl/sirf/pinctrl-atlas7.c 	__atlas7_gpio_irq_mask(a7gc, d->hwirq);
d                5685 drivers/pinctrl/sirf/pinctrl-atlas7.c static void atlas7_gpio_irq_unmask(struct irq_data *d)
d                5687 drivers/pinctrl/sirf/pinctrl-atlas7.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                5694 drivers/pinctrl/sirf/pinctrl-atlas7.c 	bank = atlas7_gpio_to_bank(a7gc, d->hwirq);
d                5695 drivers/pinctrl/sirf/pinctrl-atlas7.c 	pin_in_bank = d->hwirq - bank->gpio_offset;
d                5708 drivers/pinctrl/sirf/pinctrl-atlas7.c static int atlas7_gpio_irq_type(struct irq_data *d,
d                5711 drivers/pinctrl/sirf/pinctrl-atlas7.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                5718 drivers/pinctrl/sirf/pinctrl-atlas7.c 	bank = atlas7_gpio_to_bank(a7gc, d->hwirq);
d                5719 drivers/pinctrl/sirf/pinctrl-atlas7.c 	pin_in_bank = d->hwirq - bank->gpio_offset;
d                 420 drivers/pinctrl/sirf/pinctrl-sirf.c static void sirfsoc_gpio_irq_ack(struct irq_data *d)
d                 422 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 424 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct sirfsoc_gpio_bank *bank = sirfsoc_gpio_to_bank(sgpio, d->hwirq);
d                 425 drivers/pinctrl/sirf/pinctrl-sirf.c 	int idx = sirfsoc_gpio_to_bankoff(d->hwirq);
d                 459 drivers/pinctrl/sirf/pinctrl-sirf.c static void sirfsoc_gpio_irq_mask(struct irq_data *d)
d                 461 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 463 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct sirfsoc_gpio_bank *bank = sirfsoc_gpio_to_bank(sgpio, d->hwirq);
d                 465 drivers/pinctrl/sirf/pinctrl-sirf.c 	__sirfsoc_gpio_irq_mask(sgpio, bank, d->hwirq % SIRFSOC_GPIO_BANK_SIZE);
d                 468 drivers/pinctrl/sirf/pinctrl-sirf.c static void sirfsoc_gpio_irq_unmask(struct irq_data *d)
d                 470 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 472 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct sirfsoc_gpio_bank *bank = sirfsoc_gpio_to_bank(sgpio, d->hwirq);
d                 473 drivers/pinctrl/sirf/pinctrl-sirf.c 	int idx = sirfsoc_gpio_to_bankoff(d->hwirq);
d                 489 drivers/pinctrl/sirf/pinctrl-sirf.c static int sirfsoc_gpio_irq_type(struct irq_data *d, unsigned type)
d                 491 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 493 drivers/pinctrl/sirf/pinctrl-sirf.c 	struct sirfsoc_gpio_bank *bank = sirfsoc_gpio_to_bank(sgpio, d->hwirq);
d                 494 drivers/pinctrl/sirf/pinctrl-sirf.c 	int idx = sirfsoc_gpio_to_bankoff(d->hwirq);
d                 280 drivers/pinctrl/spear/pinctrl-plgpio.c static void plgpio_irq_disable(struct irq_data *d)
d                 282 drivers/pinctrl/spear/pinctrl-plgpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 284 drivers/pinctrl/spear/pinctrl-plgpio.c 	int offset = d->hwirq;
d                 299 drivers/pinctrl/spear/pinctrl-plgpio.c static void plgpio_irq_enable(struct irq_data *d)
d                 301 drivers/pinctrl/spear/pinctrl-plgpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 303 drivers/pinctrl/spear/pinctrl-plgpio.c 	int offset = d->hwirq;
d                 318 drivers/pinctrl/spear/pinctrl-plgpio.c static int plgpio_irq_set_type(struct irq_data *d, unsigned trigger)
d                 320 drivers/pinctrl/spear/pinctrl-plgpio.c 	struct gpio_chip *gc = irq_data_get_irq_chip_data(d);
d                 322 drivers/pinctrl/spear/pinctrl-plgpio.c 	int offset = d->hwirq;
d                 345 drivers/pinctrl/stm32/pinctrl-stm32.c static int stm32_gpio_domain_translate(struct irq_domain *d,
d                 359 drivers/pinctrl/stm32/pinctrl-stm32.c static int stm32_gpio_domain_activate(struct irq_domain *d,
d                 362 drivers/pinctrl/stm32/pinctrl-stm32.c 	struct stm32_gpio_bank *bank = d->host_data;
d                 401 drivers/pinctrl/stm32/pinctrl-stm32.c static void stm32_gpio_domain_deactivate(struct irq_domain *d,
d                 404 drivers/pinctrl/stm32/pinctrl-stm32.c 	struct stm32_gpio_bank *bank = d->host_data;
d                 413 drivers/pinctrl/stm32/pinctrl-stm32.c static int stm32_gpio_domain_alloc(struct irq_domain *d,
d                 417 drivers/pinctrl/stm32/pinctrl-stm32.c 	struct stm32_gpio_bank *bank = d->host_data;
d                 423 drivers/pinctrl/stm32/pinctrl-stm32.c 	parent_fwspec.fwnode = d->parent->fwnode;
d                 428 drivers/pinctrl/stm32/pinctrl-stm32.c 	irq_domain_set_hwirq_and_chip(d, virq, hwirq, &stm32_gpio_irq_chip,
d                 431 drivers/pinctrl/stm32/pinctrl-stm32.c 	return irq_domain_alloc_irqs_parent(d, virq, nr_irqs, &parent_fwspec);
d                 931 drivers/pinctrl/sunxi/pinctrl-sunxi.c static int sunxi_pinctrl_irq_request_resources(struct irq_data *d)
d                 933 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                 938 drivers/pinctrl/sunxi/pinctrl-sunxi.c 					pctl->irq_array[d->hwirq], "irq");
d                 943 drivers/pinctrl/sunxi/pinctrl-sunxi.c 			pctl->irq_array[d->hwirq] - pctl->desc->pin_base);
d                 946 drivers/pinctrl/sunxi/pinctrl-sunxi.c 			irqd_to_hwirq(d));
d                 951 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	sunxi_pmx_set(pctl->pctl_dev, pctl->irq_array[d->hwirq], func->muxval);
d                 956 drivers/pinctrl/sunxi/pinctrl-sunxi.c static void sunxi_pinctrl_irq_release_resources(struct irq_data *d)
d                 958 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                 961 drivers/pinctrl/sunxi/pinctrl-sunxi.c 			      pctl->irq_array[d->hwirq] - pctl->desc->pin_base);
d                 964 drivers/pinctrl/sunxi/pinctrl-sunxi.c static int sunxi_pinctrl_irq_set_type(struct irq_data *d, unsigned int type)
d                 966 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                 967 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u32 reg = sunxi_irq_cfg_reg(pctl->desc, d->hwirq);
d                 968 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u8 index = sunxi_irq_cfg_offset(d->hwirq);
d                 996 drivers/pinctrl/sunxi/pinctrl-sunxi.c 		irq_set_chip_handler_name_locked(d, &sunxi_pinctrl_level_irq_chip,
d                 999 drivers/pinctrl/sunxi/pinctrl-sunxi.c 		irq_set_chip_handler_name_locked(d, &sunxi_pinctrl_edge_irq_chip,
d                1011 drivers/pinctrl/sunxi/pinctrl-sunxi.c static void sunxi_pinctrl_irq_ack(struct irq_data *d)
d                1013 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                1014 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u32 status_reg = sunxi_irq_status_reg(pctl->desc, d->hwirq);
d                1015 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u8 status_idx = sunxi_irq_status_offset(d->hwirq);
d                1021 drivers/pinctrl/sunxi/pinctrl-sunxi.c static void sunxi_pinctrl_irq_mask(struct irq_data *d)
d                1023 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                1024 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u32 reg = sunxi_irq_ctrl_reg(pctl->desc, d->hwirq);
d                1025 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u8 idx = sunxi_irq_ctrl_offset(d->hwirq);
d                1038 drivers/pinctrl/sunxi/pinctrl-sunxi.c static void sunxi_pinctrl_irq_unmask(struct irq_data *d)
d                1040 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = irq_data_get_irq_chip_data(d);
d                1041 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u32 reg = sunxi_irq_ctrl_reg(pctl->desc, d->hwirq);
d                1042 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	u8 idx = sunxi_irq_ctrl_offset(d->hwirq);
d                1055 drivers/pinctrl/sunxi/pinctrl-sunxi.c static void sunxi_pinctrl_irq_ack_unmask(struct irq_data *d)
d                1057 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	sunxi_pinctrl_irq_ack(d);
d                1058 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	sunxi_pinctrl_irq_unmask(d);
d                1088 drivers/pinctrl/sunxi/pinctrl-sunxi.c static int sunxi_pinctrl_irq_of_xlate(struct irq_domain *d,
d                1095 drivers/pinctrl/sunxi/pinctrl-sunxi.c 	struct sunxi_pinctrl *pctl = d->host_data;
d                 182 drivers/pinctrl/ti/pinctrl-ti-iodelay.c 	u64 m, d;
d                 186 drivers/pinctrl/ti/pinctrl-ti-iodelay.c 	d = 2 * (u64)delay * (u64)delay_m;
d                 189 drivers/pinctrl/ti/pinctrl-ti-iodelay.c 	return div64_u64(m, d);
d                 152 drivers/pinctrl/uniphier/pinctrl-uniphier.h #define UNIPHIER_PINCTRL_PIN(a, b, c, d, e, f, g)			\
d                 156 drivers/pinctrl/uniphier/pinctrl-uniphier.h 	.drv_data = (void *)UNIPHIER_PIN_ATTR_PACKED(c, d, e, f, g),	\
d                 419 drivers/platform/chrome/wilco_ec/event.c static void free_device_data(struct device *d)
d                 423 drivers/platform/chrome/wilco_ec/event.c 	dev_data = container_of(d, struct event_device_data, dev);
d                 344 drivers/platform/chrome/wilco_ec/telemetry.c static void telem_device_free(struct device *d)
d                 348 drivers/platform/chrome/wilco_ec/telemetry.c 	dev_data = container_of(d, struct telem_device_data, dev);
d                 205 drivers/platform/mellanox/mlxbf-tmfifo.c #define mlxbf_vdev_to_tmfifo(d) container_of(d, struct mlxbf_tmfifo_vdev, vdev)
d                 577 drivers/platform/x86/acer-wmi.c video_set_backlight_video_vendor(const struct dmi_system_id *d)
d                1234 drivers/platform/x86/acer-wmi.c static void __init type_aa_dmi_decode(const struct dmi_header *header, void *d)
d                 141 drivers/platform/x86/dell-smbios-base.c int dell_smbios_register_device(struct device *d, void *call_fn)
d                 145 drivers/platform/x86/dell-smbios-base.c 	priv = devm_kzalloc(d, sizeof(struct smbios_device), GFP_KERNEL);
d                 148 drivers/platform/x86/dell-smbios-base.c 	get_device(d);
d                 149 drivers/platform/x86/dell-smbios-base.c 	priv->device = d;
d                 154 drivers/platform/x86/dell-smbios-base.c 	dev_dbg(d, "Added device: %s\n", d->driver->name);
d                 159 drivers/platform/x86/dell-smbios-base.c void dell_smbios_unregister_device(struct device *d)
d                 165 drivers/platform/x86/dell-smbios-base.c 		if (priv->device == d) {
d                 167 drivers/platform/x86/dell-smbios-base.c 			put_device(d);
d                 172 drivers/platform/x86/dell-smbios-base.c 	dev_dbg(d, "Remove device: %s\n", d->driver->name);
d                 176 drivers/platform/x86/dell-smbios-base.c int dell_smbios_call_filter(struct device *d,
d                 184 drivers/platform/x86/dell-smbios-base.c 		dev_dbg(d, "class too big: %u\n", buffer->cmd_class);
d                 190 drivers/platform/x86/dell-smbios-base.c 		dev_dbg(d, "invalid command, supported commands: 0x%8x\n",
d                 202 drivers/platform/x86/dell-smbios-base.c 		dev_dbg(d, "blacklisted command: %u/%u\n",
d                 214 drivers/platform/x86/dell-smbios-base.c 			dev_dbg(d, "no token support on this system\n");
d                 228 drivers/platform/x86/dell-smbios-base.c 			dev_dbg(d, "token at location %04x doesn't exist\n",
d                 251 drivers/platform/x86/dell-smbios-base.c 				dev_dbg(d, "whitelisted token: %x\n", t);
d                 265 drivers/platform/x86/dell-smbios-base.c 			dev_dbg(d, "whitelisted capable command: %u/%u\n",
d                 269 drivers/platform/x86/dell-smbios-base.c 		dev_dbg(d, "missing capability %d for %u/%u\n",
d                 277 drivers/platform/x86/dell-smbios-base.c 		dev_dbg(d, "Allowing %u/%u due to CAP_SYS_RAWIO\n",
d                  57 drivers/platform/x86/dell-smbios.h int dell_smbios_register_device(struct device *d, void *call_fn);
d                  58 drivers/platform/x86/dell-smbios.h void dell_smbios_unregister_device(struct device *d);
d                  61 drivers/platform/x86/dell-smbios.h int dell_smbios_call_filter(struct device *d,
d                  57 drivers/platform/x86/intel_pmc_ipc.c #define S0IX_RESIDENCY_IN_USECS(d, s)		\
d                  59 drivers/platform/x86/intel_pmc_ipc.c 	u64 result = 10ull * ((d) + (s));	\
d                 456 drivers/platform/x86/pmc_atom.c 	const struct dmi_system_id *d = dmi_first_match(critclk_systems);
d                 464 drivers/platform/x86/pmc_atom.c 	if (d) {
d                 466 drivers/platform/x86/pmc_atom.c 		pr_info("%s critclks quirk enabled\n", d->ident);
d                1508 drivers/platform/x86/samsung-laptop.c static int __init samsung_dmi_matched(const struct dmi_system_id *d)
d                1510 drivers/platform/x86/samsung-laptop.c 	quirks = d->driver_data;
d                2425 drivers/platform/x86/thinkpad_acpi.c 	u8 d;
d                2428 drivers/platform/x86/thinkpad_acpi.c 		d = nvram_read_byte(TP_NVRAM_ADDR_HK2);
d                2429 drivers/platform/x86/thinkpad_acpi.c 		n->thinkpad_toggle = !!(d & TP_NVRAM_MASK_HKT_THINKPAD);
d                2430 drivers/platform/x86/thinkpad_acpi.c 		n->zoom_toggle = !!(d & TP_NVRAM_MASK_HKT_ZOOM);
d                2431 drivers/platform/x86/thinkpad_acpi.c 		n->display_toggle = !!(d & TP_NVRAM_MASK_HKT_DISPLAY);
d                2432 drivers/platform/x86/thinkpad_acpi.c 		n->hibernate_toggle = !!(d & TP_NVRAM_MASK_HKT_HIBERNATE);
d                2435 drivers/platform/x86/thinkpad_acpi.c 		d = nvram_read_byte(TP_NVRAM_ADDR_THINKLIGHT);
d                2436 drivers/platform/x86/thinkpad_acpi.c 		n->thinklight_toggle = !!(d & TP_NVRAM_MASK_THINKLIGHT);
d                2439 drivers/platform/x86/thinkpad_acpi.c 		d = nvram_read_byte(TP_NVRAM_ADDR_VIDEO);
d                2441 drivers/platform/x86/thinkpad_acpi.c 				!!(d & TP_NVRAM_MASK_HKT_DISPEXPND);
d                2444 drivers/platform/x86/thinkpad_acpi.c 		d = nvram_read_byte(TP_NVRAM_ADDR_BRIGHTNESS);
d                2445 drivers/platform/x86/thinkpad_acpi.c 		n->brightness_level = (d & TP_NVRAM_MASK_LEVEL_BRIGHTNESS)
d                2448 drivers/platform/x86/thinkpad_acpi.c 				!!(d & TP_NVRAM_MASK_HKT_BRIGHTNESS);
d                2451 drivers/platform/x86/thinkpad_acpi.c 		d = nvram_read_byte(TP_NVRAM_ADDR_MIXER);
d                2452 drivers/platform/x86/thinkpad_acpi.c 		n->volume_level = (d & TP_NVRAM_MASK_LEVEL_VOLUME)
d                2454 drivers/platform/x86/thinkpad_acpi.c 		n->mute = !!(d & TP_NVRAM_MASK_MUTE);
d                2455 drivers/platform/x86/thinkpad_acpi.c 		n->volume_toggle = !!(d & TP_NVRAM_MASK_HKT_VOLUME);
d                7485 drivers/platform/x86/thinkpad_acpi.c 	struct tpacpi_alsa_data *d;
d                7488 drivers/platform/x86/thinkpad_acpi.c 		d = alsa_card->private_data;
d                7489 drivers/platform/x86/thinkpad_acpi.c 		if (d->ctl_mute_id)
d                7492 drivers/platform/x86/thinkpad_acpi.c 					d->ctl_mute_id);
d                7493 drivers/platform/x86/thinkpad_acpi.c 		if (d->ctl_vol_id)
d                7496 drivers/platform/x86/thinkpad_acpi.c 					d->ctl_vol_id);
d                 193 drivers/pnp/isapnp/core.c 	unsigned char d = 0;
d                 197 drivers/pnp/isapnp/core.c 			d = isapnp_read_byte(0x05);
d                 198 drivers/pnp/isapnp/core.c 			if (d & 1)
d                 202 drivers/pnp/isapnp/core.c 		if (!(d & 1)) {
d                 207 drivers/pnp/isapnp/core.c 		d = isapnp_read_byte(0x04);	/* PRESDI */
d                 208 drivers/pnp/isapnp/core.c 		isapnp_checksum_value += d;
d                 210 drivers/pnp/isapnp/core.c 			*data++ = d;
d                 142 drivers/pnp/pnpbios/core.c 	int docked = -1, d = 0;
d                 165 drivers/pnp/pnpbios/core.c 			d = 0;
d                 168 drivers/pnp/pnpbios/core.c 			d = 1;
d                 175 drivers/pnp/pnpbios/core.c 		if (d != docked) {
d                 176 drivers/pnp/pnpbios/core.c 			if (pnp_dock_event(d, &now) == 0) {
d                 177 drivers/pnp/pnpbios/core.c 				docked = d;
d                 479 drivers/pnp/pnpbios/core.c static int __init exploding_pnp_bios(const struct dmi_system_id *d)
d                 481 drivers/pnp/pnpbios/core.c 	printk(KERN_WARNING "%s detected. Disabling PnPBIOS\n", d->ident);
d                 547 drivers/power/supply/cpcap-battery.c 	struct cpcap_interrupt_desc *d;
d                 552 drivers/power/supply/cpcap-battery.c 	list_for_each_entry(d, &ddata->irq_list, node) {
d                 553 drivers/power/supply/cpcap-battery.c 		if (irq == d->irq)
d                 557 drivers/power/supply/cpcap-battery.c 	if (!d)
d                 562 drivers/power/supply/cpcap-battery.c 	switch (d->action) {
d                 589 drivers/power/supply/cpcap-battery.c 	struct cpcap_interrupt_desc *d;
d                 607 drivers/power/supply/cpcap-battery.c 	d = devm_kzalloc(ddata->dev, sizeof(*d), GFP_KERNEL);
d                 608 drivers/power/supply/cpcap-battery.c 	if (!d)
d                 611 drivers/power/supply/cpcap-battery.c 	d->name = name;
d                 612 drivers/power/supply/cpcap-battery.c 	d->irq = irq;
d                 615 drivers/power/supply/cpcap-battery.c 		d->action = CPCAP_BATTERY_IRQ_ACTION_BATTERY_LOW;
d                 617 drivers/power/supply/cpcap-battery.c 		d->action = CPCAP_BATTERY_IRQ_ACTION_POWEROFF;
d                 619 drivers/power/supply/cpcap-battery.c 	list_add(&d->node, &ddata->irq_list);
d                 493 drivers/power/supply/cpcap-charger.c 	struct cpcap_interrupt_desc *d;
d                 511 drivers/power/supply/cpcap-charger.c 	d = devm_kzalloc(ddata->dev, sizeof(*d), GFP_KERNEL);
d                 512 drivers/power/supply/cpcap-charger.c 	if (!d)
d                 515 drivers/power/supply/cpcap-charger.c 	d->name = name;
d                 516 drivers/power/supply/cpcap-charger.c 	d->irq = irq;
d                 517 drivers/power/supply/cpcap-charger.c 	list_add(&d->node, &ddata->irq_list);
d                 214 drivers/power/supply/sbs-manager.c static int sbsm_do_alert(struct device *dev, void *d)
d                 238 drivers/power/supply/sbs-manager.c 		       unsigned int d)
d                 504 drivers/ps3/ps3av_cmd.c 	int d;
d                 506 drivers/ps3/ps3av_cmd.c 	d = ns_val = 0;
d                 511 drivers/ps3/ps3av_cmd.c 		d = 0;
d                 515 drivers/ps3/ps3av_cmd.c 		d = 1;
d                 519 drivers/ps3/ps3av_cmd.c 		d = 2;
d                 523 drivers/ps3/ps3av_cmd.c 		d = 3;
d                 530 drivers/ps3/ps3av_cmd.c 		d = 4;
d                 540 drivers/ps3/ps3av_cmd.c 		ns_val = ps3av_ns_table[PS3AV_CMD_AUDIO_FS_44K-BASE][d];
d                 485 drivers/ras/cec.c 	struct dentry *d, *pfn, *decay, *count, *array;
d                 487 drivers/ras/cec.c 	d = debugfs_create_dir("cec", ras_debugfs_dir);
d                 488 drivers/ras/cec.c 	if (!d) {
d                 493 drivers/ras/cec.c 	decay = debugfs_create_file("decay_interval", S_IRUSR | S_IWUSR, d,
d                 500 drivers/ras/cec.c 	count = debugfs_create_file("action_threshold", S_IRUSR | S_IWUSR, d,
d                 510 drivers/ras/cec.c 	pfn = debugfs_create_file("pfn", S_IRUSR | S_IWUSR, d, &dfs_pfn, &pfn_ops);
d                 516 drivers/ras/cec.c 	array = debugfs_create_file("array", S_IRUSR, d, NULL, &array_ops);
d                 525 drivers/ras/cec.c 	debugfs_remove_recursive(d);
d                  22 drivers/remoteproc/qcom_common.c #define to_glink_subdev(d) container_of(d, struct qcom_rproc_glink, subdev)
d                  23 drivers/remoteproc/qcom_common.c #define to_smd_subdev(d) container_of(d, struct qcom_rproc_subdev, subdev)
d                  24 drivers/remoteproc/qcom_common.c #define to_ssr_subdev(d) container_of(d, struct qcom_rproc_ssr, subdev)
d                 194 drivers/remoteproc/remoteproc_debugfs.c 	struct fw_rsc_devmem *d;
d                 221 drivers/remoteproc/remoteproc_debugfs.c 			d = rsc;
d                 223 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Device Address 0x%x\n", d->da);
d                 224 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Physical Address 0x%x\n", d->pa);
d                 225 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Length 0x%x Bytes\n", d->len);
d                 226 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Flags 0x%x\n", d->flags);
d                 227 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Reserved (should be zero) [%d]\n", d->reserved);
d                 228 drivers/remoteproc/remoteproc_debugfs.c 			seq_printf(seq, "  Name %s\n\n", d->name);
d                  10 drivers/remoteproc/remoteproc_sysfs.c #define to_rproc(d) container_of(d, struct rproc, dev)
d                 173 drivers/rpmsg/qcom_smd.c #define to_smd_edge(d)		container_of(d, struct qcom_smd_edge, dev)
d                  18 drivers/rpmsg/rpmsg_internal.h #define to_rpmsg_device(d) container_of(d, struct rpmsg_device, dev)
d                  19 drivers/rpmsg/rpmsg_internal.h #define to_rpmsg_driver(d) container_of(d, struct rpmsg_driver, drv)
d                 164 drivers/rtc/rtc-bd70528.c static inline void tmday2rtc(struct rtc_time *t, struct bd70528_rtc_day *d)
d                 166 drivers/rtc/rtc-bd70528.c 	d->sec &= ~BD70528_MASK_RTC_SEC;
d                 167 drivers/rtc/rtc-bd70528.c 	d->min &= ~BD70528_MASK_RTC_MINUTE;
d                 168 drivers/rtc/rtc-bd70528.c 	d->hour &= ~BD70528_MASK_RTC_HOUR;
d                 169 drivers/rtc/rtc-bd70528.c 	d->sec |= bin2bcd(t->tm_sec);
d                 170 drivers/rtc/rtc-bd70528.c 	d->min |= bin2bcd(t->tm_min);
d                 171 drivers/rtc/rtc-bd70528.c 	d->hour |= bin2bcd(t->tm_hour);
d                 391 drivers/s390/block/dasd_eckd.c 	int dn, d;
d                 409 drivers/s390/block/dasd_eckd.c 			d = 9 + ceil_quot(reclen + 6 * (dn + 1), 34);
d                 410 drivers/s390/block/dasd_eckd.c 			sector = (49 + (rec_on_trk - 1) * (10 + d)) / 8;
d                 413 drivers/s390/block/dasd_eckd.c 			d = 7 + ceil_quot(reclen + 12, 32);
d                 414 drivers/s390/block/dasd_eckd.c 			sector = (39 + (rec_on_trk - 1) * (8 + d)) / 7;
d                 608 drivers/s390/block/dasd_eckd.c 	int dn, d;
d                 625 drivers/s390/block/dasd_eckd.c 			d = 9 + ceil_quot(reclen + 6 * (dn + 1), 34);
d                 626 drivers/s390/block/dasd_eckd.c 			sector = (49 + (rec_on_trk - 1) * (10 + d)) / 8;
d                 629 drivers/s390/block/dasd_eckd.c 			d = 7 + ceil_quot(reclen + 12, 32);
d                 630 drivers/s390/block/dasd_eckd.c 			sector = (39 + (rec_on_trk - 1) * (8 + d)) / 7;
d                4216 drivers/s390/block/dasd_eckd.c 	int dn, d;
d                4317 drivers/s390/block/dasd_eckd.c 			d = 9 + ceil_quot(blksize + 6 * (dn + 1), 34);
d                4318 drivers/s390/block/dasd_eckd.c 			sector = (49 + (rec_on_trk - 1) * (10 + d)) / 8;
d                4321 drivers/s390/block/dasd_eckd.c 			d = 7 + ceil_quot(blksize + 12, 32);
d                4322 drivers/s390/block/dasd_eckd.c 			sector = (39 + (rec_on_trk - 1) * (8 + d)) / 7;
d                 883 drivers/s390/block/dasd_int.h #define dasd_eer_enable(d)	(0)
d                 884 drivers/s390/block/dasd_int.h #define dasd_eer_disable(d)	do { } while (0)
d                 885 drivers/s390/block/dasd_int.h #define dasd_eer_write(d,c,i)	do { } while (0)
d                 886 drivers/s390/block/dasd_int.h #define dasd_eer_snss(d)	do { } while (0)
d                 887 drivers/s390/block/dasd_int.h #define dasd_eer_enabled(d)	(0)
d                 214 drivers/s390/char/keyboard.c 	int i, d;
d                 216 drivers/s390/char/keyboard.c 	d = kbd->diacr;
d                 220 drivers/s390/char/keyboard.c 		if (kbd->accent_table[i].diacr == d &&
d                 225 drivers/s390/char/keyboard.c 	if (ch == ' ' || ch == d)
d                 226 drivers/s390/char/keyboard.c 		return d;
d                 228 drivers/s390/char/keyboard.c 	kbd_put_queue(kbd->port, d);
d                  21 drivers/s390/cio/scm.c #define	to_scm_drv(d) container_of(d, struct scm_driver, drv)
d                 726 drivers/s390/scsi/zfcp_dbf.c 	struct debug_info *d;
d                 728 drivers/s390/scsi/zfcp_dbf.c 	d = debug_register(name, size, 1, rec_size);
d                 729 drivers/s390/scsi/zfcp_dbf.c 	if (!d)
d                 732 drivers/s390/scsi/zfcp_dbf.c 	debug_register_view(d, &debug_hex_ascii_view);
d                 733 drivers/s390/scsi/zfcp_dbf.c 	debug_set_level(d, dbflevel);
d                 735 drivers/s390/scsi/zfcp_dbf.c 	return d;
d                1248 drivers/scsi/NCR5380.c 	unsigned char *d = *data;
d                1278 drivers/scsi/NCR5380.c 			NCR5380_write(OUTPUT_DATA_REG, *d);
d                1280 drivers/scsi/NCR5380.c 			*d = NCR5380_read(CURRENT_SCSI_DATA_REG);
d                1282 drivers/scsi/NCR5380.c 		++d;
d                1337 drivers/scsi/NCR5380.c 	*data = d;
d                1470 drivers/scsi/NCR5380.c 	unsigned char *d = *data;
d                1489 drivers/scsi/NCR5380.c 	         (p & SR_IO) ? "receive" : "send", c, d);
d                1505 drivers/scsi/NCR5380.c 			result = NCR5380_dma_recv_setup(hostdata, d, c);
d                1507 drivers/scsi/NCR5380.c 			result = NCR5380_dma_send_setup(hostdata, d, c);
d                1539 drivers/scsi/NCR5380.c 			result = NCR5380_dma_recv_setup(hostdata, d, c);
d                1541 drivers/scsi/NCR5380.c 			result = NCR5380_dma_send_setup(hostdata, d, c);
d                1618 drivers/scsi/NCR5380.c 			d[*count - 1] = NCR5380_read(INPUT_DATA_REG);
d                 545 drivers/scsi/aacraid/aachba.c 			char d[sizeof(((struct inquiry_data *)NULL)->inqd_pid)];
d                 546 drivers/scsi/aacraid/aachba.c 			int count = sizeof(d);
d                 547 drivers/scsi/aacraid/aachba.c 			char *dp = d;
d                 553 drivers/scsi/aacraid/aachba.c 			memcpy(inq.inqd_pid, d, sizeof(d));
d                 492 drivers/scsi/aic94xx/aic94xx_hwi.c 	u32 d;
d                 496 drivers/scsi/aic94xx/aic94xx_hwi.c 	d = asd_read_reg_dword(asd_ha, CTXDOMAIN);
d                 497 drivers/scsi/aic94xx/aic94xx_hwi.c 	d |= 4;
d                 498 drivers/scsi/aic94xx/aic94xx_hwi.c 	asd_write_reg_dword(asd_ha, CTXDOMAIN, d);
d                 506 drivers/scsi/aic94xx/aic94xx_hwi.c 	u32 d;
d                 531 drivers/scsi/aic94xx/aic94xx_hwi.c 	d = asd_read_reg_dword(asd_ha, CTXDOMAIN);
d                 532 drivers/scsi/aic94xx/aic94xx_hwi.c 	d &= ~4;
d                 533 drivers/scsi/aic94xx/aic94xx_hwi.c 	asd_write_reg_dword(asd_ha, CTXDOMAIN, d);
d                 544 drivers/scsi/aic94xx/aic94xx_hwi.c 	u32 d;
d                 567 drivers/scsi/aic94xx/aic94xx_hwi.c 	d = asd_read_reg_dword(asd_ha, CTXDOMAIN);
d                 568 drivers/scsi/aic94xx/aic94xx_hwi.c 	d &= ~1;
d                 569 drivers/scsi/aic94xx/aic94xx_hwi.c 	asd_write_reg_dword(asd_ha, CTXDOMAIN, d);
d                 551 drivers/scsi/aic94xx/aic94xx_sds.c 	u8 d;
d                 554 drivers/scsi/aic94xx/aic94xx_sds.c 		d  = asd_read_reg_byte(asd_ha, asd_ha->hw_prof.flash.bar);
d                 555 drivers/scsi/aic94xx/aic94xx_sds.c 		d ^= asd_read_reg_byte(asd_ha, asd_ha->hw_prof.flash.bar);
d                 556 drivers/scsi/aic94xx/aic94xx_sds.c 		if (!d)
d                3235 drivers/scsi/bfa/bfa_svc.c bfa_fcport_qos_stats_swap(struct bfa_qos_stats_s *d,
d                3238 drivers/scsi/bfa/bfa_svc.c 	u32	*dip = (u32 *) d;
d                3248 drivers/scsi/bfa/bfa_svc.c bfa_fcport_fcoe_stats_swap(struct bfa_fcoe_stats_s *d,
d                3251 drivers/scsi/bfa/bfa_svc.c 	u32	*dip = (u32 *) d;
d                5524 drivers/scsi/bfa/bfa_svc.c 	buf = &uf_buf->d[0];
d                 347 drivers/scsi/bfa/bfa_svc.h 	u8	d[BFA_UF_BUFSZ];
d                2370 drivers/scsi/csiostor/csio_hw.c 		uint32_t d, c, k;
d                2372 drivers/scsi/csiostor/csio_hw.c 		d = be32_to_cpu(drv_fw->fw_ver);
d                2381 drivers/scsi/csiostor/csio_hw.c 			FW_HDR_FW_VER_MAJOR_G(d), FW_HDR_FW_VER_MINOR_G(d),
d                2382 drivers/scsi/csiostor/csio_hw.c 			FW_HDR_FW_VER_MICRO_G(d), FW_HDR_FW_VER_BUILD_G(d),
d                 119 drivers/scsi/csiostor/csio_wr.c 	__be64 *d = (__be64 *)(flq->vstart);
d                 136 drivers/scsi/csiostor/csio_wr.c 		*d++ = cpu_to_be64(paddr);
d                 543 drivers/scsi/dpt_i2o.c 	struct adpt_device* d;
d                 572 drivers/scsi/dpt_i2o.c 			d = pHba->channel[chan].device[id];
d                 573 drivers/scsi/dpt_i2o.c 			while(d) {
d                 574 drivers/scsi/dpt_i2o.c 				seq_printf(m,"\t%-24.24s", d->pScsi_dev->vendor);
d                 575 drivers/scsi/dpt_i2o.c 				seq_printf(m," Rev: %-8.8s\n", d->pScsi_dev->rev);
d                 577 drivers/scsi/dpt_i2o.c 				unit = d->pI2o_dev->lct_data.tid;
d                 579 drivers/scsi/dpt_i2o.c 					       unit, (int)d->scsi_channel, (int)d->scsi_id, d->scsi_lun,
d                 580 drivers/scsi/dpt_i2o.c 					       scsi_device_online(d->pScsi_dev)? "online":"offline"); 
d                 581 drivers/scsi/dpt_i2o.c 				d = d->next_lun;
d                 687 drivers/scsi/dpt_i2o.c 	struct adpt_device* d = cmd->device->hostdata;
d                 691 drivers/scsi/dpt_i2o.c 	if (!d) {
d                 697 drivers/scsi/dpt_i2o.c 	msg[1] = (I2O_DEVICE_RESET<<24|HOST_TID<<12|d->tid);
d                 703 drivers/scsi/dpt_i2o.c 	old_state = d->state;
d                 704 drivers/scsi/dpt_i2o.c 	d->state |= DPTI_DEV_RESET;
d                 706 drivers/scsi/dpt_i2o.c 	d->state = old_state;
d                1037 drivers/scsi/dpt_i2o.c 	struct i2o_device* d;
d                1090 drivers/scsi/dpt_i2o.c 	for(d = pHba->devices; d ; d = next){
d                1091 drivers/scsi/dpt_i2o.c 		next = d->next;
d                1092 drivers/scsi/dpt_i2o.c 		kfree(d);
d                1121 drivers/scsi/dpt_i2o.c 	struct adpt_device* d;
d                1126 drivers/scsi/dpt_i2o.c 	d = pHba->channel[chan].device[id];
d                1127 drivers/scsi/dpt_i2o.c 	if(!d || d->tid == 0) {
d                1132 drivers/scsi/dpt_i2o.c 	if(d->scsi_lun == lun){
d                1133 drivers/scsi/dpt_i2o.c 		return d;
d                1137 drivers/scsi/dpt_i2o.c 	for(d=d->next_lun ; d ; d = d->next_lun){
d                1138 drivers/scsi/dpt_i2o.c 		if(d->scsi_lun == lun){
d                1139 drivers/scsi/dpt_i2o.c 			return d;
d                1410 drivers/scsi/dpt_i2o.c 	struct i2o_device *d;
d                1468 drivers/scsi/dpt_i2o.c 		d = kmalloc(sizeof(struct i2o_device), GFP_KERNEL);
d                1469 drivers/scsi/dpt_i2o.c 		if(d==NULL)
d                1475 drivers/scsi/dpt_i2o.c 		d->controller = pHba;
d                1476 drivers/scsi/dpt_i2o.c 		d->next = NULL;
d                1478 drivers/scsi/dpt_i2o.c 		memcpy(&d->lct_data, &lct->lct_entry[i], sizeof(i2o_lct_entry));
d                1480 drivers/scsi/dpt_i2o.c 		d->flags = 0;
d                1481 drivers/scsi/dpt_i2o.c 		tid = d->lct_data.tid;
d                1482 drivers/scsi/dpt_i2o.c 		adpt_i2o_report_hba_unit(pHba, d);
d                1483 drivers/scsi/dpt_i2o.c 		adpt_i2o_install_device(pHba, d);
d                1486 drivers/scsi/dpt_i2o.c 	for(d = pHba->devices; d ; d = d->next) {
d                1487 drivers/scsi/dpt_i2o.c 		if(d->lct_data.class_id  == I2O_CLASS_BUS_ADAPTER_PORT ||
d                1488 drivers/scsi/dpt_i2o.c 		   d->lct_data.class_id  == I2O_CLASS_FIBRE_CHANNEL_PORT){
d                1489 drivers/scsi/dpt_i2o.c 			tid = d->lct_data.tid;
d                1495 drivers/scsi/dpt_i2o.c 			pHba->channel[bus_no].type = d->lct_data.class_id;
d                1512 drivers/scsi/dpt_i2o.c 	for(d = pHba->devices; d ; d = d->next) {
d                1513 drivers/scsi/dpt_i2o.c 		if(d->lct_data.class_id  == I2O_CLASS_RANDOM_BLOCK_STORAGE ||
d                1514 drivers/scsi/dpt_i2o.c 		   d->lct_data.class_id  == I2O_CLASS_SCSI_PERIPHERAL ||
d                1515 drivers/scsi/dpt_i2o.c 		   d->lct_data.class_id  == I2O_CLASS_FIBRE_CHANNEL_PERIPHERAL ){
d                1517 drivers/scsi/dpt_i2o.c 			tid = d->lct_data.tid;
d                1550 drivers/scsi/dpt_i2o.c 				pDev->pI2o_dev = d;
d                1551 drivers/scsi/dpt_i2o.c 				d->owner = pDev;
d                1563 drivers/scsi/dpt_i2o.c 						d->lct_data.identity_tag);
d                1576 drivers/scsi/dpt_i2o.c static int adpt_i2o_install_device(adpt_hba* pHba, struct i2o_device *d)
d                1579 drivers/scsi/dpt_i2o.c 	d->controller=pHba;
d                1580 drivers/scsi/dpt_i2o.c 	d->owner=NULL;
d                1581 drivers/scsi/dpt_i2o.c 	d->next=pHba->devices;
d                1582 drivers/scsi/dpt_i2o.c 	d->prev=NULL;
d                1584 drivers/scsi/dpt_i2o.c 		pHba->devices->prev=d;
d                1586 drivers/scsi/dpt_i2o.c 	pHba->devices=d;
d                1587 drivers/scsi/dpt_i2o.c 	*d->dev_name = 0;
d                2189 drivers/scsi/dpt_i2o.c static s32 adpt_scsi_to_i2o(adpt_hba* pHba, struct scsi_cmnd* cmd, struct adpt_device* d)
d                2240 drivers/scsi/dpt_i2o.c 	msg[1] = ((0xff<<24)|(HOST_TID<<12)|d->tid);
d                2247 drivers/scsi/dpt_i2o.c 	msg[5] = d->tid;
d                2489 drivers/scsi/dpt_i2o.c 	struct i2o_device *d;
d                2508 drivers/scsi/dpt_i2o.c 	for (d = pHba->devices; d; d = d->next) {
d                2509 drivers/scsi/dpt_i2o.c 		pDev =(struct adpt_device*) d->owner;
d                2550 drivers/scsi/dpt_i2o.c 				d = kmalloc(sizeof(struct i2o_device),
d                2552 drivers/scsi/dpt_i2o.c 				if(d==NULL)
d                2558 drivers/scsi/dpt_i2o.c 				d->controller = pHba;
d                2559 drivers/scsi/dpt_i2o.c 				d->next = NULL;
d                2561 drivers/scsi/dpt_i2o.c 				memcpy(&d->lct_data, &lct->lct_entry[i], sizeof(i2o_lct_entry));
d                2563 drivers/scsi/dpt_i2o.c 				d->flags = 0;
d                2564 drivers/scsi/dpt_i2o.c 				adpt_i2o_report_hba_unit(pHba, d);
d                2565 drivers/scsi/dpt_i2o.c 				adpt_i2o_install_device(pHba, d);
d                2587 drivers/scsi/dpt_i2o.c 				pDev->tid = d->lct_data.tid;
d                2591 drivers/scsi/dpt_i2o.c 				pDev->pI2o_dev = d;
d                2592 drivers/scsi/dpt_i2o.c 				d->owner = pDev;
d                2615 drivers/scsi/dpt_i2o.c 					d = pDev->pI2o_dev;
d                2616 drivers/scsi/dpt_i2o.c 					if(d->lct_data.tid != tid) { // something changed
d                2618 drivers/scsi/dpt_i2o.c 						memcpy(&d->lct_data, &lct->lct_entry[i], sizeof(i2o_lct_entry));
d                2653 drivers/scsi/dpt_i2o.c 	struct scsi_device* 	d = NULL;
d                2655 drivers/scsi/dpt_i2o.c 	shost_for_each_device(d, pHba->host) {
d                2657 drivers/scsi/dpt_i2o.c 		spin_lock_irqsave(&d->list_lock, flags);
d                2658 drivers/scsi/dpt_i2o.c 		list_for_each_entry(cmd, &d->cmd_list, list) {
d                2662 drivers/scsi/dpt_i2o.c 		spin_unlock_irqrestore(&d->list_lock, flags);
d                3144 drivers/scsi/dpt_i2o.c static void adpt_i2o_report_hba_unit(adpt_hba* pHba, struct i2o_device *d)
d                3147 drivers/scsi/dpt_i2o.c 	int unit = d->lct_data.tid;
d                3167 drivers/scsi/dpt_i2o.c 	 printk(KERN_INFO "\tClass: %.21s\n", adpt_i2o_get_class_name(d->lct_data.class_id));
d                3168 drivers/scsi/dpt_i2o.c 	 printk(KERN_INFO "\tSubclass: 0x%04X\n", d->lct_data.sub_class);
d                3171 drivers/scsi/dpt_i2o.c 	 if(d->lct_data.device_flags&(1<<0))
d                3173 drivers/scsi/dpt_i2o.c 	 if(d->lct_data.device_flags&(1<<1))
d                3175 drivers/scsi/dpt_i2o.c 	 if(!(d->lct_data.device_flags&(1<<4)))
d                3177 drivers/scsi/dpt_i2o.c 	 if(!(d->lct_data.device_flags&(1<<5)))
d                 268 drivers/scsi/dpti.h static void adpt_i2o_report_hba_unit(adpt_hba* pHba, struct i2o_device *d);
d                 282 drivers/scsi/dpti.h static int adpt_i2o_install_device(adpt_hba* pHba, struct i2o_device *d);
d                 130 drivers/scsi/esas2r/esas2r.h #define LOWORD(d) ((u16)(u32)(d))
d                 131 drivers/scsi/esas2r/esas2r.h #define HIWORD(d) ((u16)(((u32)(d)) >> 16))
d                1312 drivers/scsi/esas2r/esas2r_main.c 	struct atto_vda_devinfo *d;
d                1345 drivers/scsi/esas2r/esas2r_main.c 		d = (struct atto_vda_devinfo *)data;
d                1347 drivers/scsi/esas2r/esas2r_main.c 		d->capacity = le64_to_cpu(d->capacity);
d                1348 drivers/scsi/esas2r/esas2r_main.c 		d->block_size = le32_to_cpu(d->block_size);
d                1349 drivers/scsi/esas2r/esas2r_main.c 		d->ses_dev_index = le16_to_cpu(d->ses_dev_index);
d                1350 drivers/scsi/esas2r/esas2r_main.c 		d->target_id = le16_to_cpu(d->target_id);
d                1351 drivers/scsi/esas2r/esas2r_main.c 		d->lun = le16_to_cpu(d->lun);
d                1352 drivers/scsi/esas2r/esas2r_main.c 		d->features = le16_to_cpu(d->features);
d                   8 drivers/scsi/fdomain_pci.c 			     const struct pci_device_id *d)
d                3200 drivers/scsi/hisi_sas/hisi_sas_main.c 	int d;
d                3229 drivers/scsi/hisi_sas/hisi_sas_main.c 	for (d = 0; d < hisi_hba->queue_count; d++) {
d                3230 drivers/scsi/hisi_sas/hisi_sas_main.c 		snprintf(name, 256, "%d", d);
d                3232 drivers/scsi/hisi_sas/hisi_sas_main.c 		debugfs_create_file(name, 0400, dentry, &hisi_hba->dq[d],
d                3720 drivers/scsi/hisi_sas/hisi_sas_main.c 	int p, c, d;
d                3763 drivers/scsi/hisi_sas/hisi_sas_main.c 	for (d = 0; d < hisi_hba->queue_count; d++) {
d                3764 drivers/scsi/hisi_sas/hisi_sas_main.c 		hisi_hba->debugfs_cmd_hdr[d] =
d                3767 drivers/scsi/hisi_sas/hisi_sas_main.c 		if (!hisi_hba->debugfs_cmd_hdr[d])
d                2914 drivers/scsi/hpsa.c 	struct device *d = &cp->h->pdev->dev;
d                2928 drivers/scsi/hpsa.c 			dev_warn(d, "SCSI Status = 02, Sense key = 0x%02x, ASC = 0x%02x, ASCQ = 0x%02x\n",
d                2931 drivers/scsi/hpsa.c 			dev_warn(d, "SCSI Status = 0x%02x\n", ei->ScsiStatus);
d                2933 drivers/scsi/hpsa.c 			dev_warn(d, "SCSI status is abnormally zero.  "
d                2948 drivers/scsi/hpsa.c 		dev_warn(d, "probably means device no longer present\n");
d                2980 drivers/scsi/hpsa.c 		dev_warn(d, "Unknown command status %x\n",
d                8417 drivers/scsi/hpsa.c 	struct offline_device_entry *d;
d                8422 drivers/scsi/hpsa.c 		d = list_entry(this, struct offline_device_entry,
d                8425 drivers/scsi/hpsa.c 		if (!hpsa_volume_offline(h, d->scsi3addr)) {
d                8427 drivers/scsi/hpsa.c 			list_del(&d->offline_list);
d                 359 drivers/scsi/hpsa_cmd.h #define TYPE_ATTR_DIR(t, a, d) ((((d) & 0x03) << 6) |\
d                2758 drivers/scsi/isci/host.c 	int d;
d                2764 drivers/scsi/isci/host.c 	for (d = 0; d < isci_gpio_count(ihost); d++) {
d                2771 drivers/scsi/isci/host.c 			bit = try_test_sas_gpio_gp_bit(to_sas_gpio_od(d, i),
d                2783 drivers/scsi/isci/host.c 		writel(val, &ihost->scu_registers->peg0.sgpio.output_data_select[d]);
d                2789 drivers/scsi/isci/host.c 	return d > 0;
d                 101 drivers/scsi/libsas/sas_event.c 		struct sas_discovery *d = &port->disc;
d                 104 drivers/scsi/libsas/sas_event.c 		if (!test_and_clear_bit(ev, &d->pending))
d                 452 drivers/scsi/libsas/sas_init.c 	struct sas_phy_data *d = container_of(work, typeof(*d), reset_work.work);
d                 454 drivers/scsi/libsas/sas_init.c 	d->reset_result = transport_sas_phy_reset(d->phy, d->hard_reset);
d                 459 drivers/scsi/libsas/sas_init.c 	struct sas_phy_data *d = container_of(work, typeof(*d), enable_work.work);
d                 461 drivers/scsi/libsas/sas_init.c 	d->enable_result = sas_phy_enable(d->phy, d->enable);
d                 466 drivers/scsi/libsas/sas_init.c 	struct sas_phy_data *d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 468 drivers/scsi/libsas/sas_init.c 	if (!d)
d                 471 drivers/scsi/libsas/sas_init.c 	mutex_init(&d->event_lock);
d                 472 drivers/scsi/libsas/sas_init.c 	INIT_SAS_WORK(&d->reset_work, phy_reset_work);
d                 473 drivers/scsi/libsas/sas_init.c 	INIT_SAS_WORK(&d->enable_work, phy_enable_work);
d                 474 drivers/scsi/libsas/sas_init.c 	d->phy = phy;
d                 475 drivers/scsi/libsas/sas_init.c 	phy->hostdata = d;
d                 484 drivers/scsi/libsas/sas_init.c 	struct sas_phy_data *d = phy->hostdata;
d                 487 drivers/scsi/libsas/sas_init.c 	if (!d)
d                 491 drivers/scsi/libsas/sas_init.c 	mutex_lock(&d->event_lock);
d                 492 drivers/scsi/libsas/sas_init.c 	d->reset_result = 0;
d                 493 drivers/scsi/libsas/sas_init.c 	d->hard_reset = hard_reset;
d                 496 drivers/scsi/libsas/sas_init.c 	sas_queue_work(ha, &d->reset_work);
d                 501 drivers/scsi/libsas/sas_init.c 		rc = d->reset_result;
d                 502 drivers/scsi/libsas/sas_init.c 	mutex_unlock(&d->event_lock);
d                 511 drivers/scsi/libsas/sas_init.c 	struct sas_phy_data *d = phy->hostdata;
d                 514 drivers/scsi/libsas/sas_init.c 	if (!d)
d                 518 drivers/scsi/libsas/sas_init.c 	mutex_lock(&d->event_lock);
d                 519 drivers/scsi/libsas/sas_init.c 	d->enable_result = 0;
d                 520 drivers/scsi/libsas/sas_init.c 	d->enable = enable;
d                 523 drivers/scsi/libsas/sas_init.c 	sas_queue_work(ha, &d->enable_work);
d                 528 drivers/scsi/libsas/sas_init.c 		rc = d->enable_result;
d                 529 drivers/scsi/libsas/sas_init.c 	mutex_unlock(&d->event_lock);
d                 281 drivers/scsi/mac_scsi.c 	unsigned char *d = dst;
d                 295 drivers/scsi/mac_scsi.c 		bytes = mac_pdma_recv(s, d, min(hostdata->pdma_residual, 512));
d                 298 drivers/scsi/mac_scsi.c 			d += bytes;
d                 320 drivers/scsi/mac_scsi.c 		         "%s: bus error (%d/%d)\n", __func__, d - dst, len);
d                 340 drivers/scsi/mac_scsi.c 	u8 __iomem *d = hostdata->pdma_io + (OUTPUT_DATA_REG << 4);
d                 354 drivers/scsi/mac_scsi.c 		bytes = mac_pdma_send(s, d, min(hostdata->pdma_residual, 512));
d                2193 drivers/scsi/megaraid/megaraid_sas.h 		__le64 d[12];
d                  72 drivers/scsi/megaraid/megaraid_sas_fp.c 	u64 d;
d                  77 drivers/scsi/megaraid/megaraid_sas_fp.c 	d = dividend;
d                  78 drivers/scsi/megaraid/megaraid_sas_fp.c 	remainder = do_div(d, divisor);
d                  91 drivers/scsi/megaraid/megaraid_sas_fp.c 	u64 d;
d                  96 drivers/scsi/megaraid/megaraid_sas_fp.c 	d = dividend;
d                  97 drivers/scsi/megaraid/megaraid_sas_fp.c 	remainder = do_div(d, divisor);
d                  99 drivers/scsi/megaraid/megaraid_sas_fp.c 	return d;
d                 100 drivers/scsi/mesh.c 	int	d;
d                 226 drivers/scsi/mesh.c 	tlp->d = a;
d                 257 drivers/scsi/mesh.c 		printk(lp->fmt, lp->d);
d                 282 drivers/scsi/mesh.c 		printk(lp->fmt, lp->d);
d                 300 drivers/scsi/mesh.c #define MKWORD(a, b, c, d)	(((a) << 24) + ((b) << 16) + ((c) << 8) + (d))
d                  61 drivers/scsi/mvsas/mv_sas.h #define MV_INIT_DELAYED_WORK(w, f, d)	INIT_DELAYED_WORK(w, f)
d                 948 drivers/scsi/pm8001/pm8001_sas.c 			uintptr_t d = (uintptr_t)pm8001_dev
d                 950 drivers/scsi/pm8001/pm8001_sas.c 			if (((d % sizeof(*pm8001_dev)) != 0)
d                 951 drivers/scsi/pm8001/pm8001_sas.c 			 || ((d / sizeof(*pm8001_dev)) >= PM8001_MAX_DEVICES))
d                3770 drivers/scsi/qla2xxx/qla_gs.c 	struct ct_sns_gpn_ft_data *d;
d                3781 drivers/scsi/qla2xxx/qla_gs.c 		d  = &ct_rsp->entries[i];
d                3784 drivers/scsi/qla2xxx/qla_gs.c 		id.b.domain = d->port_id[0];
d                3785 drivers/scsi/qla2xxx/qla_gs.c 		id.b.area   = d->port_id[1];
d                3786 drivers/scsi/qla2xxx/qla_gs.c 		id.b.al_pa  = d->port_id[2];
d                3787 drivers/scsi/qla2xxx/qla_gs.c 		wwn = wwn_to_u64(d->port_name);
d                3796 drivers/scsi/qla2xxx/qla_gs.c 				memcpy(rp->port_name, d->port_name, 8);
d                3804 drivers/scsi/qla2xxx/qla_gs.c 						    d->port_name, 8);
d                3817 drivers/scsi/qla2xxx/qla_gs.c 					    d->port_name, 8)) {
d                3836 drivers/scsi/qla2xxx/qla_gs.c 							    d->port_name, 8);
d                3848 drivers/scsi/qla2xxx/qla_gs.c 						    d->port_name, 8);
d                 194 drivers/scsi/raid_class.c raid_attr_show_internal(attr, %d, rd->attr, code)			\
d                 266 drivers/scsi/scsi_debug.c #define to_sdebug_host(d)	\
d                 267 drivers/scsi/scsi_debug.c 	container_of(d, struct sdebug_host_info, dev)
d                2939 drivers/scsi/scsi_lib.c 	const unsigned char *d, *cur_id_str;
d                2971 drivers/scsi/scsi_lib.c 	d = vpd_pg83->data + 4;
d                2972 drivers/scsi/scsi_lib.c 	while (d < vpd_pg83->data + vpd_pg83->len) {
d                2974 drivers/scsi/scsi_lib.c 		if ((d[1] & 0x30) != 0x00)
d                2977 drivers/scsi/scsi_lib.c 		switch (d[1] & 0xf) {
d                2980 drivers/scsi/scsi_lib.c 			if (cur_id_size > d[3])
d                2985 drivers/scsi/scsi_lib.c 			cur_id_size = d[3];
d                2988 drivers/scsi/scsi_lib.c 			cur_id_str = d + 4;
d                2989 drivers/scsi/scsi_lib.c 			cur_id_type = d[1] & 0xf;
d                2995 drivers/scsi/scsi_lib.c 			if (cur_id_size > d[3])
d                2999 drivers/scsi/scsi_lib.c 			    cur_id_size == d[3])
d                3001 drivers/scsi/scsi_lib.c 			cur_id_size = d[3];
d                3002 drivers/scsi/scsi_lib.c 			cur_id_str = d + 4;
d                3003 drivers/scsi/scsi_lib.c 			cur_id_type = d[1] & 0xf;
d                3027 drivers/scsi/scsi_lib.c 			if (cur_id_size > d[3])
d                3029 drivers/scsi/scsi_lib.c 			cur_id_size = d[3];
d                3030 drivers/scsi/scsi_lib.c 			cur_id_str = d + 4;
d                3031 drivers/scsi/scsi_lib.c 			cur_id_type = d[1] & 0xf;
d                3050 drivers/scsi/scsi_lib.c 			if (cur_id_size + 4 > d[3])
d                3053 drivers/scsi/scsi_lib.c 			if (cur_id_size && d[3] > id_len)
d                3055 drivers/scsi/scsi_lib.c 			cur_id_size = id_size = d[3];
d                3056 drivers/scsi/scsi_lib.c 			cur_id_str = d + 4;
d                3057 drivers/scsi/scsi_lib.c 			cur_id_type = d[1] & 0xf;
d                3069 drivers/scsi/scsi_lib.c 		d += d[3] + 4;
d                3088 drivers/scsi/scsi_lib.c 	const unsigned char *d;
d                3099 drivers/scsi/scsi_lib.c 	d = vpd_pg83->data + 4;
d                3100 drivers/scsi/scsi_lib.c 	while (d < vpd_pg83->data + vpd_pg83->len) {
d                3101 drivers/scsi/scsi_lib.c 		switch (d[1] & 0xf) {
d                3104 drivers/scsi/scsi_lib.c 			rel_port = get_unaligned_be16(&d[6]);
d                3108 drivers/scsi/scsi_lib.c 			group_id = get_unaligned_be16(&d[6]);
d                3113 drivers/scsi/scsi_lib.c 		d += d[3] + 4;
d                  44 drivers/scsi/scsi_transport_srp.c #define	dev_to_rport(d)	container_of(d, struct srp_rport, dev)
d                 431 drivers/scsi/ses.c 	unsigned char *d;
d                 439 drivers/scsi/ses.c 			d = desc + 4;
d                 440 drivers/scsi/ses.c 			slot = d[3];
d                 445 drivers/scsi/ses.c 			d = desc + 4;
d                 446 drivers/scsi/ses.c 			slot = d[3];
d                 447 drivers/scsi/ses.c 			d = desc + 8;
d                 449 drivers/scsi/ses.c 			d = desc + 4;
d                 451 drivers/scsi/ses.c 		addr = (u64)d[12] << 56 |
d                 452 drivers/scsi/ses.c 			(u64)d[13] << 48 |
d                 453 drivers/scsi/ses.c 			(u64)d[14] << 40 |
d                 454 drivers/scsi/ses.c 			(u64)d[15] << 32 |
d                 455 drivers/scsi/ses.c 			(u64)d[16] << 24 |
d                 456 drivers/scsi/ses.c 			(u64)d[17] << 16 |
d                 457 drivers/scsi/ses.c 			(u64)d[18] << 8 |
d                 458 drivers/scsi/ses.c 			(u64)d[19];
d                  51 drivers/scsi/sgiwd93.c #define DMA_DIR(d)   ((d == DATA_OUT_DIR) ? DMA_TO_DEVICE : DMA_FROM_DEVICE)
d                  97 drivers/scsi/snic/snic_disc.h #define dev_to_tgt(d) \
d                  98 drivers/scsi/snic/snic_disc.h 	container_of(d, struct snic_tgt, dev)
d                 177 drivers/scsi/st.c #define TAPE_MINOR(d, m, n) (((d & ~(255 >> (ST_NBR_MODE_BITS + 1))) << (ST_NBR_MODE_BITS + 1)) | \
d                 178 drivers/scsi/st.c   (d & (255 >> (ST_NBR_MODE_BITS + 1))) | (m << ST_MODE_SHIFT) | ((n != 0) << 7) )
d                1866 drivers/scsi/wd33c93.c 	unsigned int d, i;
d                1868 drivers/scsi/wd33c93.c 		d = 2;	/* divisor for  8-10 MHz input-clock */
d                1870 drivers/scsi/wd33c93.c 		d = 3;	/* divisor for 12-15 MHz input-clock */
d                1872 drivers/scsi/wd33c93.c 		d = 4;	/* divisor for 16-20 MHz input-clock */
d                1874 drivers/scsi/wd33c93.c 	d = (100000 * d) / 2 / mhz; /* 100 x DTCC / nanosec */
d                1879 drivers/scsi/wd33c93.c 		sx_table[i].period_ns = round_4((i+1)*d / 100);
d                 895 drivers/scsi/wd719x.c static int wd719x_pci_probe(struct pci_dev *pdev, const struct pci_device_id *d)
d                  14 drivers/sh/intc/access.c unsigned long intc_phys_to_virt(struct intc_desc_int *d, unsigned long address)
d                  20 drivers/sh/intc/access.c 	for (k = 0; k < d->nr_windows; k++) {
d                  21 drivers/sh/intc/access.c 		window = d->window + k;
d                  39 drivers/sh/intc/access.c unsigned int intc_get_reg(struct intc_desc_int *d, unsigned long address)
d                  43 drivers/sh/intc/access.c 	address = intc_phys_to_virt(d, address);
d                  45 drivers/sh/intc/access.c 	for (k = 0; k < d->nr_reg; k++) {
d                  46 drivers/sh/intc/access.c 		if (d->reg[k] == address)
d                  16 drivers/sh/intc/balancing.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                  23 drivers/sh/intc/balancing.c 	addr = INTC_REG(d, _INTC_ADDR_D(handle), 0);
d                  29 drivers/sh/intc/balancing.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                  36 drivers/sh/intc/balancing.c 	addr = INTC_REG(d, _INTC_ADDR_D(handle), 0);
d                  41 drivers/sh/intc/balancing.c 				   struct intc_desc_int *d,
d                  69 drivers/sh/intc/balancing.c 					intc_get_reg(d, reg_e),
d                  70 drivers/sh/intc/balancing.c 					intc_get_reg(d, reg_d),
d                  84 drivers/sh/intc/balancing.c 			  struct intc_desc_int *d, intc_enum id)
d                  95 drivers/sh/intc/balancing.c 	dist_handle[irq] = intc_dist_data(desc, d, id);
d                  19 drivers/sh/intc/chip.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                  23 drivers/sh/intc/chip.c 	for (cpu = 0; cpu < SMP_NR(d, _INTC_ADDR_E(handle)); cpu++) {
d                  28 drivers/sh/intc/chip.c 		addr = INTC_REG(d, _INTC_ADDR_E(handle), cpu);
d                  44 drivers/sh/intc/chip.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                  51 drivers/sh/intc/chip.c 	for (cpu = 0; cpu < SMP_NR(d, _INTC_ADDR_D(handle)); cpu++) {
d                  56 drivers/sh/intc/chip.c 		addr = INTC_REG(d, _INTC_ADDR_D(handle), cpu);
d                  84 drivers/sh/intc/chip.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                  94 drivers/sh/intc/chip.c 		addr = (void __iomem *)INTC_REG(d, _INTC_ADDR_D(handle), 0);
d                 131 drivers/sh/intc/chip.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                 138 drivers/sh/intc/chip.c 	ihp = intc_find_irq(d->prio, d->nr_prio, irq);
d                 177 drivers/sh/intc/chip.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                 187 drivers/sh/intc/chip.c 	ihp = intc_find_irq(d->sense, d->nr_sense, irq);
d                 193 drivers/sh/intc/chip.c 		addr = INTC_REG(d, _INTC_ADDR_E(ihp->handle), 0);
d                  74 drivers/sh/intc/core.c 				     struct intc_desc_int *d,
d                  84 drivers/sh/intc/core.c 	radix_tree_insert(&d->tree, enum_id, intc_irq_xlate_get(irq));
d                  95 drivers/sh/intc/core.c 	data[0] = intc_get_mask_handle(desc, d, enum_id, 0);
d                  96 drivers/sh/intc/core.c 	data[1] = intc_get_prio_handle(desc, d, enum_id, 0);
d                 106 drivers/sh/intc/core.c 	data[0] = data[0] ? data[0] : intc_get_mask_handle(desc, d, enum_id, 1);
d                 107 drivers/sh/intc/core.c 	data[1] = data[1] ? data[1] : intc_get_prio_handle(desc, d, enum_id, 1);
d                 117 drivers/sh/intc/core.c 	irq_set_chip_and_handler_name(irq, &d->chip, handle_level_irq,
d                 132 drivers/sh/intc/core.c 		hp = d->prio + d->nr_prio;
d                 144 drivers/sh/intc/core.c 		d->nr_prio++;
d                 148 drivers/sh/intc/core.c 	data[0] = intc_get_sense_handle(desc, d, enum_id);
d                 150 drivers/sh/intc/core.c 		(d->sense + d->nr_sense)->irq = irq;
d                 151 drivers/sh/intc/core.c 		(d->sense + d->nr_sense)->handle = data[0];
d                 152 drivers/sh/intc/core.c 		d->nr_sense++;
d                 156 drivers/sh/intc/core.c 	d->chip.irq_mask(irq_data);
d                 158 drivers/sh/intc/core.c 	intc_set_ack_handle(irq, desc, d, enum_id);
d                 159 drivers/sh/intc/core.c 	intc_set_dist_handle(irq, desc, d, enum_id);
d                 164 drivers/sh/intc/core.c static unsigned int __init save_reg(struct intc_desc_int *d,
d                 170 drivers/sh/intc/core.c 		value = intc_phys_to_virt(d, value);
d                 172 drivers/sh/intc/core.c 		d->reg[cnt] = value;
d                 174 drivers/sh/intc/core.c 		d->smp[cnt] = smp;
d                 186 drivers/sh/intc/core.c 	struct intc_desc_int *d;
d                 192 drivers/sh/intc/core.c 	d = kzalloc(sizeof(*d), GFP_NOWAIT);
d                 193 drivers/sh/intc/core.c 	if (!d)
d                 196 drivers/sh/intc/core.c 	INIT_LIST_HEAD(&d->list);
d                 197 drivers/sh/intc/core.c 	list_add_tail(&d->list, &intc_list);
d                 199 drivers/sh/intc/core.c 	raw_spin_lock_init(&d->lock);
d                 200 drivers/sh/intc/core.c 	INIT_RADIX_TREE(&d->tree, GFP_ATOMIC);
d                 202 drivers/sh/intc/core.c 	d->index = nr_intc_controllers;
d                 205 drivers/sh/intc/core.c 		d->nr_windows = desc->num_resources;
d                 206 drivers/sh/intc/core.c 		d->window = kcalloc(d->nr_windows, sizeof(*d->window),
d                 208 drivers/sh/intc/core.c 		if (!d->window)
d                 211 drivers/sh/intc/core.c 		for (k = 0; k < d->nr_windows; k++) {
d                 214 drivers/sh/intc/core.c 			d->window[k].phys = res->start;
d                 215 drivers/sh/intc/core.c 			d->window[k].size = resource_size(res);
d                 216 drivers/sh/intc/core.c 			d->window[k].virt = ioremap_nocache(res->start,
d                 218 drivers/sh/intc/core.c 			if (!d->window[k].virt)
d                 223 drivers/sh/intc/core.c 	d->nr_reg = hw->mask_regs ? hw->nr_mask_regs * 2 : 0;
d                 225 drivers/sh/intc/core.c 	if (d->nr_reg)
d                 226 drivers/sh/intc/core.c 		d->nr_reg += hw->nr_mask_regs;
d                 228 drivers/sh/intc/core.c 	d->nr_reg += hw->prio_regs ? hw->nr_prio_regs * 2 : 0;
d                 229 drivers/sh/intc/core.c 	d->nr_reg += hw->sense_regs ? hw->nr_sense_regs : 0;
d                 230 drivers/sh/intc/core.c 	d->nr_reg += hw->ack_regs ? hw->nr_ack_regs : 0;
d                 231 drivers/sh/intc/core.c 	d->nr_reg += hw->subgroups ? hw->nr_subgroups : 0;
d                 233 drivers/sh/intc/core.c 	d->reg = kcalloc(d->nr_reg, sizeof(*d->reg), GFP_NOWAIT);
d                 234 drivers/sh/intc/core.c 	if (!d->reg)
d                 238 drivers/sh/intc/core.c 	d->smp = kcalloc(d->nr_reg, sizeof(*d->smp), GFP_NOWAIT);
d                 239 drivers/sh/intc/core.c 	if (!d->smp)
d                 247 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->mask_regs[i].set_reg, smp);
d                 248 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->mask_regs[i].clr_reg, smp);
d                 250 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->mask_regs[i].dist_reg, 0);
d                 256 drivers/sh/intc/core.c 		d->prio = kcalloc(hw->nr_vectors, sizeof(*d->prio),
d                 258 drivers/sh/intc/core.c 		if (!d->prio)
d                 263 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->prio_regs[i].set_reg, smp);
d                 264 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->prio_regs[i].clr_reg, smp);
d                 267 drivers/sh/intc/core.c 		sort(d->prio, hw->nr_prio_regs, sizeof(*d->prio),
d                 272 drivers/sh/intc/core.c 		d->sense = kcalloc(hw->nr_vectors, sizeof(*d->sense),
d                 274 drivers/sh/intc/core.c 		if (!d->sense)
d                 278 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->sense_regs[i].reg, 0);
d                 280 drivers/sh/intc/core.c 		sort(d->sense, hw->nr_sense_regs, sizeof(*d->sense),
d                 287 drivers/sh/intc/core.c 				k+= save_reg(d, k, hw->subgroups[i].reg, 0);
d                 289 drivers/sh/intc/core.c 	memcpy(&d->chip, &intc_irq_chip, sizeof(struct irq_chip));
d                 290 drivers/sh/intc/core.c 	d->chip.name = desc->name;
d                 294 drivers/sh/intc/core.c 			k += save_reg(d, k, hw->ack_regs[i].set_reg, 0);
d                 296 drivers/sh/intc/core.c 		d->chip.irq_mask_ack = d->chip.irq_disable;
d                 300 drivers/sh/intc/core.c 		intc_enable_disable_enum(desc, d, desc->force_disable, 0);
d                 304 drivers/sh/intc/core.c 		intc_enable_disable_enum(desc, d, desc->force_enable, 0);
d                 308 drivers/sh/intc/core.c 	intc_irq_domain_init(d, hw);
d                 319 drivers/sh/intc/core.c 		res = irq_create_identity_mapping(d->domain, irq);
d                 322 drivers/sh/intc/core.c 				res = irq_domain_associate(d->domain, irq, irq);
d                 333 drivers/sh/intc/core.c 		intc_irq_xlate_set(irq, vect->enum_id, d);
d                 334 drivers/sh/intc/core.c 		intc_register_irq(desc, d, vect->enum_id, irq);
d                 348 drivers/sh/intc/core.c 			res = irq_create_identity_mapping(d->domain, irq2);
d                 351 drivers/sh/intc/core.c 					res = irq_domain_associate(d->domain,
d                 375 drivers/sh/intc/core.c 	intc_subgroup_init(desc, d);
d                 379 drivers/sh/intc/core.c 		intc_enable_disable_enum(desc, d, desc->force_enable, 1);
d                 381 drivers/sh/intc/core.c 	d->skip_suspend = desc->skip_syscore_suspend;
d                 387 drivers/sh/intc/core.c 	kfree(d->prio);
d                 390 drivers/sh/intc/core.c 	kfree(d->smp);
d                 393 drivers/sh/intc/core.c 	kfree(d->reg);
d                 395 drivers/sh/intc/core.c 	for (k = 0; k < d->nr_windows; k++)
d                 396 drivers/sh/intc/core.c 		if (d->window[k].virt)
d                 397 drivers/sh/intc/core.c 			iounmap(d->window[k].virt);
d                 399 drivers/sh/intc/core.c 	kfree(d->window);
d                 401 drivers/sh/intc/core.c 	kfree(d);
d                 410 drivers/sh/intc/core.c 	struct intc_desc_int *d;
d                 412 drivers/sh/intc/core.c 	list_for_each_entry(d, &intc_list, list) {
d                 415 drivers/sh/intc/core.c 		if (d->skip_suspend)
d                 425 drivers/sh/intc/core.c 			if (chip != &d->chip)
d                 436 drivers/sh/intc/core.c 	struct intc_desc_int *d;
d                 438 drivers/sh/intc/core.c 	list_for_each_entry(d, &intc_list, list) {
d                 441 drivers/sh/intc/core.c 		if (d->skip_suspend)
d                 454 drivers/sh/intc/core.c 			if (chip != &d->chip)
d                 477 drivers/sh/intc/core.c 	struct intc_desc_int *d;
d                 479 drivers/sh/intc/core.c 	d = container_of(dev, struct intc_desc_int, dev);
d                 481 drivers/sh/intc/core.c 	return sprintf(buf, "%s\n", d->chip.name);
d                 488 drivers/sh/intc/core.c 	struct intc_desc_int *d;
d                 495 drivers/sh/intc/core.c 		list_for_each_entry(d, &intc_list, list) {
d                 496 drivers/sh/intc/core.c 			d->dev.id = d->index;
d                 497 drivers/sh/intc/core.c 			d->dev.bus = &intc_subsys;
d                 498 drivers/sh/intc/core.c 			error = device_register(&d->dev);
d                 500 drivers/sh/intc/core.c 				error = device_create_file(&d->dev,
d                  39 drivers/sh/intc/handle.c 					   struct intc_desc_int *d,
d                  75 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_e),
d                  76 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_d),
d                  89 drivers/sh/intc/handle.c intc_get_mask_handle(struct intc_desc *desc, struct intc_desc_int *d,
d                  96 drivers/sh/intc/handle.c 	ret = _intc_mask_data(desc, d, enum_id, &i, &j);
d                 101 drivers/sh/intc/handle.c 		return intc_get_mask_handle(desc, d, intc_grp_id(desc, enum_id), 0);
d                 107 drivers/sh/intc/handle.c 					   struct intc_desc_int *d,
d                 145 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_e),
d                 146 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_d),
d                 158 drivers/sh/intc/handle.c intc_get_prio_handle(struct intc_desc *desc, struct intc_desc_int *d,
d                 165 drivers/sh/intc/handle.c 	ret = _intc_prio_data(desc, d, enum_id, &i, &j);
d                 170 drivers/sh/intc/handle.c 		return intc_get_prio_handle(desc, d, intc_grp_id(desc, enum_id), 0);
d                 176 drivers/sh/intc/handle.c 				  struct intc_desc_int *d, intc_enum enum_id)
d                 196 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_e),
d                 197 drivers/sh/intc/handle.c 					intc_get_reg(d, reg_d),
d                 206 drivers/sh/intc/handle.c static void intc_enable_disable(struct intc_desc_int *d,
d                 217 drivers/sh/intc/handle.c 		for (cpu = 0; cpu < SMP_NR(d, _INTC_ADDR_E(handle)); cpu++) {
d                 218 drivers/sh/intc/handle.c 			addr = INTC_REG(d, _INTC_ADDR_E(handle), cpu);
d                 223 drivers/sh/intc/handle.c 		for (cpu = 0; cpu < SMP_NR(d, _INTC_ADDR_D(handle)); cpu++) {
d                 224 drivers/sh/intc/handle.c 			addr = INTC_REG(d, _INTC_ADDR_D(handle), cpu);
d                 232 drivers/sh/intc/handle.c 				     struct intc_desc_int *d,
d                 240 drivers/sh/intc/handle.c 		data = _intc_mask_data(desc, d, enum_id, &i, &j);
d                 242 drivers/sh/intc/handle.c 			intc_enable_disable(d, data, enable);
d                 249 drivers/sh/intc/handle.c 		data = _intc_prio_data(desc, d, enum_id, &i, &j);
d                 251 drivers/sh/intc/handle.c 			intc_enable_disable(d, data, enable);
d                 258 drivers/sh/intc/handle.c intc_get_sense_handle(struct intc_desc *desc, struct intc_desc_int *d,
d                 278 drivers/sh/intc/handle.c 			return _INTC_MK(fn, 0, intc_get_reg(d, sr->reg),
d                 288 drivers/sh/intc/handle.c 			 struct intc_desc_int *d, intc_enum id)
d                 299 drivers/sh/intc/handle.c 	ack_handle[irq] = intc_ack_data(desc, d, id);
d                  24 drivers/sh/intc/internals.h #define INTC_REG(d, x, c)	(d->reg[(x)] + ((d->smp[(x)] & 0xff) * c))
d                  25 drivers/sh/intc/internals.h #define SMP_NR(d, x)		((d->smp[(x)] >> 8) ? (d->smp[(x)] >> 8) : 1)
d                  28 drivers/sh/intc/internals.h #define INTC_REG(d, x, c)	(d->reg[(x)])
d                  29 drivers/sh/intc/internals.h #define SMP_NR(d, x)		1
d                 134 drivers/sh/intc/internals.h unsigned long intc_phys_to_virt(struct intc_desc_int *d, unsigned long address);
d                 135 drivers/sh/intc/internals.h unsigned int intc_get_reg(struct intc_desc_int *d, unsigned long address);
d                 147 drivers/sh/intc/internals.h 			  struct intc_desc_int *d, intc_enum id);
d                 153 drivers/sh/intc/internals.h 		     struct intc_desc_int *d, intc_enum id) { }
d                 171 drivers/sh/intc/internals.h 				  struct intc_desc_int *d,
d                 174 drivers/sh/intc/internals.h 				  struct intc_desc_int *d,
d                 177 drivers/sh/intc/internals.h 				   struct intc_desc_int *d,
d                 180 drivers/sh/intc/internals.h 			 struct intc_desc_int *d, intc_enum id);
d                 182 drivers/sh/intc/internals.h void intc_enable_disable_enum(struct intc_desc *desc, struct intc_desc_int *d,
d                 186 drivers/sh/intc/internals.h void intc_irq_domain_init(struct intc_desc_int *d, struct intc_hw_desc *hw);
d                 189 drivers/sh/intc/internals.h void intc_subgroup_init(struct intc_desc *desc, struct intc_desc_int *d);
d                 190 drivers/sh/intc/internals.h void intc_irq_xlate_set(unsigned int irq, intc_enum id, struct intc_desc_int *d);
d                  27 drivers/sh/intc/irqdomain.c static int intc_evt_xlate(struct irq_domain *d, struct device_node *ctrlr,
d                  44 drivers/sh/intc/irqdomain.c void __init intc_irq_domain_init(struct intc_desc_int *d,
d                  62 drivers/sh/intc/irqdomain.c 		d->domain = irq_domain_add_linear(NULL, hw->nr_vectors,
d                  65 drivers/sh/intc/irqdomain.c 		d->domain = irq_domain_add_tree(NULL, &intc_evt_ops, NULL);
d                  67 drivers/sh/intc/irqdomain.c 	BUG_ON(!d->domain);
d                  35 drivers/sh/intc/virq.c void intc_irq_xlate_set(unsigned int irq, intc_enum id, struct intc_desc_int *d)
d                  41 drivers/sh/intc/virq.c 	intc_irq_xlate[irq].desc = d;
d                  53 drivers/sh/intc/virq.c 	struct intc_desc_int *d;
d                  56 drivers/sh/intc/virq.c 	list_for_each_entry(d, &intc_list, list) {
d                  59 drivers/sh/intc/virq.c 		if (strcmp(d->chip.name, chipname) != 0)
d                  68 drivers/sh/intc/virq.c 		tagged = radix_tree_tag_get(&d->tree, enum_id,
d                  73 drivers/sh/intc/virq.c 		ptr = radix_tree_lookup(&d->tree, enum_id);
d                 116 drivers/sh/intc/virq.c 	struct intc_desc_int *d = get_intc_desc(irq);
d                 126 drivers/sh/intc/virq.c 			addr = INTC_REG(d, _INTC_ADDR_E(handle), 0);
d                 136 drivers/sh/intc/virq.c 					       struct intc_desc_int *d,
d                 141 drivers/sh/intc/virq.c 	return _INTC_MK(fn, MODE_ENABLE_REG, intc_get_reg(d, subgroup->reg),
d                 146 drivers/sh/intc/virq.c 					  struct intc_desc_int *d,
d                 154 drivers/sh/intc/virq.c 	mapped = radix_tree_lookup(&d->tree, subgroup->parent_id);
d                 162 drivers/sh/intc/virq.c 	raw_spin_lock_irqsave(&d->lock, flags);
d                 177 drivers/sh/intc/virq.c 		entry->handle = intc_subgroup_data(subgroup, d, i);
d                 179 drivers/sh/intc/virq.c 		err = radix_tree_insert(&d->tree, entry->enum_id, entry);
d                 183 drivers/sh/intc/virq.c 		radix_tree_tag_set(&d->tree, entry->enum_id,
d                 187 drivers/sh/intc/virq.c 	raw_spin_unlock_irqrestore(&d->lock, flags);
d                 190 drivers/sh/intc/virq.c void __init intc_subgroup_init(struct intc_desc *desc, struct intc_desc_int *d)
d                 198 drivers/sh/intc/virq.c 		intc_subgroup_init_one(desc, d, desc->hw.subgroups + i);
d                 201 drivers/sh/intc/virq.c static void __init intc_subgroup_map(struct intc_desc_int *d)
d                 208 drivers/sh/intc/virq.c 	raw_spin_lock_irqsave(&d->lock, flags);
d                 211 drivers/sh/intc/virq.c 	nr_found = radix_tree_gang_lookup_tag_slot(&d->tree,
d                 236 drivers/sh/intc/virq.c 		intc_irq_xlate_set(irq, entry->enum_id, d);
d                 253 drivers/sh/intc/virq.c 		radix_tree_tag_clear(&d->tree, entry->enum_id,
d                 255 drivers/sh/intc/virq.c 		radix_tree_replace_slot(&d->tree, (void **)entries[i],
d                 259 drivers/sh/intc/virq.c 	raw_spin_unlock_irqrestore(&d->lock, flags);
d                 264 drivers/sh/intc/virq.c 	struct intc_desc_int *d;
d                 266 drivers/sh/intc/virq.c 	list_for_each_entry(d, &intc_list, list)
d                 267 drivers/sh/intc/virq.c 		if (radix_tree_tagged(&d->tree, INTC_TAG_VIRQ_NEEDS_ALLOC))
d                 268 drivers/sh/intc/virq.c 			intc_subgroup_map(d);
d                 263 drivers/slimbus/qcom-ctrl.c static irqreturn_t qcom_slim_interrupt(int irq, void *d)
d                 265 drivers/slimbus/qcom-ctrl.c 	struct qcom_slim_ctrl *ctrl = d;
d                 100 drivers/slimbus/qcom-ngd-ctrl.c #define to_ngd(d)	container_of(d, struct qcom_slim_ngd, dev)
d                 749 drivers/slimbus/qcom-ngd-ctrl.c static irqreturn_t qcom_slim_ngd_interrupt(int irq, void *d)
d                 751 drivers/slimbus/qcom-ngd-ctrl.c 	struct qcom_slim_ngd_ctrl *ctrl = d;
d                 108 drivers/slimbus/slimbus.h #define to_slim_framer(d) container_of(d, struct slim_framer, dev)
d                  42 drivers/soc/amlogic/meson-gx-pwrc-vpu.c struct meson_gx_pwrc_vpu *genpd_to_pd(struct generic_pm_domain *d)
d                  44 drivers/soc/amlogic/meson-gx-pwrc-vpu.c 	return container_of(d, struct meson_gx_pwrc_vpu, genpd);
d                  48 drivers/soc/fsl/dpio/dpio-service.c static inline struct dpaa2_io *service_select_by_cpu(struct dpaa2_io *d,
d                  51 drivers/soc/fsl/dpio/dpio-service.c 	if (d)
d                  52 drivers/soc/fsl/dpio/dpio-service.c 		return d;
d                  68 drivers/soc/fsl/dpio/dpio-service.c static inline struct dpaa2_io *service_select(struct dpaa2_io *d)
d                  70 drivers/soc/fsl/dpio/dpio-service.c 	if (d)
d                  71 drivers/soc/fsl/dpio/dpio-service.c 		return d;
d                  74 drivers/soc/fsl/dpio/dpio-service.c 	d = list_entry(dpio_list.next, struct dpaa2_io, node);
d                  75 drivers/soc/fsl/dpio/dpio-service.c 	list_del(&d->node);
d                  76 drivers/soc/fsl/dpio/dpio-service.c 	list_add_tail(&d->node, &dpio_list);
d                  79 drivers/soc/fsl/dpio/dpio-service.c 	return d;
d                 166 drivers/soc/fsl/dpio/dpio-service.c void dpaa2_io_down(struct dpaa2_io *d)
d                 169 drivers/soc/fsl/dpio/dpio-service.c 	dpio_by_cpu[d->dpio_desc.cpu] = NULL;
d                 170 drivers/soc/fsl/dpio/dpio-service.c 	list_del(&d->node);
d                 173 drivers/soc/fsl/dpio/dpio-service.c 	kfree(d);
d                 229 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_get_cpu(struct dpaa2_io *d)
d                 231 drivers/soc/fsl/dpio/dpio-service.c 	return d->dpio_desc.cpu;
d                 254 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_register(struct dpaa2_io *d,
d                 261 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select_by_cpu(d, ctx->desired_cpu);
d                 262 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 265 drivers/soc/fsl/dpio/dpio-service.c 	link = device_link_add(dev, d->dev, DL_FLAG_AUTOREMOVE_CONSUMER);
d                 269 drivers/soc/fsl/dpio/dpio-service.c 	ctx->dpio_id = d->dpio_desc.dpio_id;
d                 271 drivers/soc/fsl/dpio/dpio-service.c 	ctx->dpio_private = d;
d                 272 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_notifications, irqflags);
d                 273 drivers/soc/fsl/dpio/dpio-service.c 	list_add(&ctx->node, &d->notifications);
d                 274 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_notifications, irqflags);
d                 278 drivers/soc/fsl/dpio/dpio-service.c 		return qbman_swp_CDAN_set_context_enable(d->swp,
d                 298 drivers/soc/fsl/dpio/dpio-service.c 	struct dpaa2_io *d = ctx->dpio_private;
d                 302 drivers/soc/fsl/dpio/dpio-service.c 		qbman_swp_CDAN_disable(d->swp, (u16)ctx->id);
d                 304 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_notifications, irqflags);
d                 306 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_notifications, irqflags);
d                 324 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_rearm(struct dpaa2_io *d,
d                 330 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select_by_cpu(d, ctx->desired_cpu);
d                 331 drivers/soc/fsl/dpio/dpio-service.c 	if (!unlikely(d))
d                 334 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_mgmt_cmd, irqflags);
d                 336 drivers/soc/fsl/dpio/dpio-service.c 		err = qbman_swp_CDAN_enable(d->swp, (u16)ctx->id);
d                 338 drivers/soc/fsl/dpio/dpio-service.c 		err = qbman_swp_fq_schedule(d->swp, ctx->id);
d                 339 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_mgmt_cmd, irqflags);
d                 353 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_pull_fq(struct dpaa2_io *d, u32 fqid,
d                 364 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 365 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 367 drivers/soc/fsl/dpio/dpio-service.c 	s->swp = d->swp;
d                 368 drivers/soc/fsl/dpio/dpio-service.c 	err = qbman_swp_pull(d->swp, &pd);
d                 384 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_pull_channel(struct dpaa2_io *d, u32 channelid,
d                 395 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 396 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 399 drivers/soc/fsl/dpio/dpio-service.c 	s->swp = d->swp;
d                 400 drivers/soc/fsl/dpio/dpio-service.c 	err = qbman_swp_pull(d->swp, &pd);
d                 417 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_enqueue_fq(struct dpaa2_io *d,
d                 423 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 424 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 431 drivers/soc/fsl/dpio/dpio-service.c 	return qbman_swp_enqueue(d->swp, &ed, fd);
d                 446 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_enqueue_qd(struct dpaa2_io *d,
d                 452 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 453 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 460 drivers/soc/fsl/dpio/dpio-service.c 	return qbman_swp_enqueue(d->swp, &ed, fd);
d                 473 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_release(struct dpaa2_io *d,
d                 480 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 481 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 487 drivers/soc/fsl/dpio/dpio-service.c 	return qbman_swp_release(d->swp, &rd, buffers, num_buffers);
d                 502 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_service_acquire(struct dpaa2_io *d,
d                 510 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 511 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 514 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_mgmt_cmd, irqflags);
d                 515 drivers/soc/fsl/dpio/dpio-service.c 	err = qbman_swp_acquire(d->swp, bpid, buffers, num_buffers);
d                 516 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_mgmt_cmd, irqflags);
d                 651 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_query_fq_count(struct dpaa2_io *d, u32 fqid,
d                 659 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 660 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 663 drivers/soc/fsl/dpio/dpio-service.c 	swp = d->swp;
d                 664 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_mgmt_cmd, irqflags);
d                 666 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_mgmt_cmd, irqflags);
d                 685 drivers/soc/fsl/dpio/dpio-service.c int dpaa2_io_query_bp_count(struct dpaa2_io *d, u16 bpid, u32 *num)
d                 692 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select(d);
d                 693 drivers/soc/fsl/dpio/dpio-service.c 	if (!d)
d                 696 drivers/soc/fsl/dpio/dpio-service.c 	swp = d->swp;
d                 697 drivers/soc/fsl/dpio/dpio-service.c 	spin_lock_irqsave(&d->lock_mgmt_cmd, irqflags);
d                 699 drivers/soc/fsl/dpio/dpio-service.c 	spin_unlock_irqrestore(&d->lock_mgmt_cmd, irqflags);
d                 157 drivers/soc/fsl/dpio/qbman-portal.c struct qbman_swp *qbman_swp_init(const struct qbman_swp_desc *d)
d                 164 drivers/soc/fsl/dpio/qbman-portal.c 	p->desc = d;
d                 186 drivers/soc/fsl/dpio/qbman-portal.c 	p->addr_cena = d->cena_bar;
d                 187 drivers/soc/fsl/dpio/qbman-portal.c 	p->addr_cinh = d->cinh_bar;
d                 386 drivers/soc/fsl/dpio/qbman-portal.c void qbman_eq_desc_clear(struct qbman_eq_desc *d)
d                 388 drivers/soc/fsl/dpio/qbman-portal.c 	memset(d, 0, sizeof(*d));
d                 397 drivers/soc/fsl/dpio/qbman-portal.c void qbman_eq_desc_set_no_orp(struct qbman_eq_desc *d, int respond_success)
d                 399 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb &= ~(1 << QB_ENQUEUE_CMD_ORP_ENABLE_SHIFT);
d                 401 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb |= enqueue_response_always;
d                 403 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb |= enqueue_rejects_to_fq;
d                 418 drivers/soc/fsl/dpio/qbman-portal.c void qbman_eq_desc_set_fq(struct qbman_eq_desc *d, u32 fqid)
d                 420 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb &= ~(1 << QB_ENQUEUE_CMD_TARGET_TYPE_SHIFT);
d                 421 drivers/soc/fsl/dpio/qbman-portal.c 	d->tgtid = cpu_to_le32(fqid);
d                 431 drivers/soc/fsl/dpio/qbman-portal.c void qbman_eq_desc_set_qd(struct qbman_eq_desc *d, u32 qdid,
d                 434 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= 1 << QB_ENQUEUE_CMD_TARGET_TYPE_SHIFT;
d                 435 drivers/soc/fsl/dpio/qbman-portal.c 	d->tgtid = cpu_to_le32(qdid);
d                 436 drivers/soc/fsl/dpio/qbman-portal.c 	d->qdbin = cpu_to_le16(qd_bin);
d                 437 drivers/soc/fsl/dpio/qbman-portal.c 	d->qpri = qd_prio;
d                 467 drivers/soc/fsl/dpio/qbman-portal.c int qbman_swp_enqueue(struct qbman_swp *s, const struct qbman_eq_desc *d,
d                 477 drivers/soc/fsl/dpio/qbman-portal.c 	memcpy(&p->dca, &d->dca, 31);
d                 483 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | EQAR_VB(eqar);
d                 485 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | EQAR_VB(eqar);
d                 552 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_clear(struct qbman_pull_desc *d)
d                 554 drivers/soc/fsl/dpio/qbman-portal.c 	memset(d, 0, sizeof(*d));
d                 570 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_set_storage(struct qbman_pull_desc *d,
d                 576 drivers/soc/fsl/dpio/qbman-portal.c 	d->rsp_addr_virt = (u64)(uintptr_t)storage;
d                 579 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb &= ~(1 << QB_VDQCR_VERB_RLS_SHIFT);
d                 582 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= 1 << QB_VDQCR_VERB_RLS_SHIFT;
d                 584 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb |= 1 << QB_VDQCR_VERB_WAE_SHIFT;
d                 586 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb &= ~(1 << QB_VDQCR_VERB_WAE_SHIFT);
d                 588 drivers/soc/fsl/dpio/qbman-portal.c 	d->rsp_addr = cpu_to_le64(storage_phys);
d                 596 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_set_numframes(struct qbman_pull_desc *d, u8 numframes)
d                 598 drivers/soc/fsl/dpio/qbman-portal.c 	d->numf = numframes - 1;
d                 613 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_set_fq(struct qbman_pull_desc *d, u32 fqid)
d                 615 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= 1 << QB_VDQCR_VERB_DCT_SHIFT;
d                 616 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= qb_pull_dt_framequeue << QB_VDQCR_VERB_DT_SHIFT;
d                 617 drivers/soc/fsl/dpio/qbman-portal.c 	d->dq_src = cpu_to_le32(fqid);
d                 625 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_set_wq(struct qbman_pull_desc *d, u32 wqid,
d                 628 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= dct << QB_VDQCR_VERB_DCT_SHIFT;
d                 629 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= qb_pull_dt_workqueue << QB_VDQCR_VERB_DT_SHIFT;
d                 630 drivers/soc/fsl/dpio/qbman-portal.c 	d->dq_src = cpu_to_le32(wqid);
d                 639 drivers/soc/fsl/dpio/qbman-portal.c void qbman_pull_desc_set_channel(struct qbman_pull_desc *d, u32 chid,
d                 642 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= dct << QB_VDQCR_VERB_DCT_SHIFT;
d                 643 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= qb_pull_dt_channel << QB_VDQCR_VERB_DT_SHIFT;
d                 644 drivers/soc/fsl/dpio/qbman-portal.c 	d->dq_src = cpu_to_le32(chid);
d                 656 drivers/soc/fsl/dpio/qbman-portal.c int qbman_swp_pull(struct qbman_swp *s, struct qbman_pull_desc *d)
d                 664 drivers/soc/fsl/dpio/qbman-portal.c 	s->vdq.storage = (void *)(uintptr_t)d->rsp_addr_virt;
d                 669 drivers/soc/fsl/dpio/qbman-portal.c 	p->numf = d->numf;
d                 671 drivers/soc/fsl/dpio/qbman-portal.c 	p->dq_src = d->dq_src;
d                 672 drivers/soc/fsl/dpio/qbman-portal.c 	p->rsp_addr = d->rsp_addr;
d                 673 drivers/soc/fsl/dpio/qbman-portal.c 	p->rsp_addr_virt = d->rsp_addr_virt;
d                 678 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | s->vdq.valid_bit;
d                 681 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | s->vdq.valid_bit;
d                 844 drivers/soc/fsl/dpio/qbman-portal.c void qbman_release_desc_clear(struct qbman_release_desc *d)
d                 846 drivers/soc/fsl/dpio/qbman-portal.c 	memset(d, 0, sizeof(*d));
d                 847 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb = 1 << 5; /* Release Command Valid */
d                 853 drivers/soc/fsl/dpio/qbman-portal.c void qbman_release_desc_set_bpid(struct qbman_release_desc *d, u16 bpid)
d                 855 drivers/soc/fsl/dpio/qbman-portal.c 	d->bpid = cpu_to_le16(bpid);
d                 862 drivers/soc/fsl/dpio/qbman-portal.c void qbman_release_desc_set_rcdi(struct qbman_release_desc *d, int enable)
d                 865 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb |= 1 << 6;
d                 867 drivers/soc/fsl/dpio/qbman-portal.c 		d->verb &= ~(1 << 6);
d                 883 drivers/soc/fsl/dpio/qbman-portal.c int qbman_swp_release(struct qbman_swp *s, const struct qbman_release_desc *d,
d                 905 drivers/soc/fsl/dpio/qbman-portal.c 	p->bpid = d->bpid;
d                 913 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | RAR_VB(rar) | num_buffers;
d                 915 drivers/soc/fsl/dpio/qbman-portal.c 		p->verb = d->verb | RAR_VB(rar) | num_buffers;
d                 137 drivers/soc/fsl/dpio/qbman-portal.h struct qbman_swp *qbman_swp_init(const struct qbman_swp_desc *d);
d                 149 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_clear(struct qbman_pull_desc *d);
d                 150 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_set_storage(struct qbman_pull_desc *d,
d                 154 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_set_numframes(struct qbman_pull_desc *d, u8 numframes);
d                 155 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_set_fq(struct qbman_pull_desc *d, u32 fqid);
d                 156 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_set_wq(struct qbman_pull_desc *d, u32 wqid,
d                 158 drivers/soc/fsl/dpio/qbman-portal.h void qbman_pull_desc_set_channel(struct qbman_pull_desc *d, u32 chid,
d                 161 drivers/soc/fsl/dpio/qbman-portal.h int qbman_swp_pull(struct qbman_swp *p, struct qbman_pull_desc *d);
d                 168 drivers/soc/fsl/dpio/qbman-portal.h void qbman_eq_desc_clear(struct qbman_eq_desc *d);
d                 169 drivers/soc/fsl/dpio/qbman-portal.h void qbman_eq_desc_set_no_orp(struct qbman_eq_desc *d, int respond_success);
d                 170 drivers/soc/fsl/dpio/qbman-portal.h void qbman_eq_desc_set_token(struct qbman_eq_desc *d, u8 token);
d                 171 drivers/soc/fsl/dpio/qbman-portal.h void qbman_eq_desc_set_fq(struct qbman_eq_desc *d, u32 fqid);
d                 172 drivers/soc/fsl/dpio/qbman-portal.h void qbman_eq_desc_set_qd(struct qbman_eq_desc *d, u32 qdid,
d                 175 drivers/soc/fsl/dpio/qbman-portal.h int qbman_swp_enqueue(struct qbman_swp *p, const struct qbman_eq_desc *d,
d                 178 drivers/soc/fsl/dpio/qbman-portal.h void qbman_release_desc_clear(struct qbman_release_desc *d);
d                 179 drivers/soc/fsl/dpio/qbman-portal.h void qbman_release_desc_set_bpid(struct qbman_release_desc *d, u16 bpid);
d                 180 drivers/soc/fsl/dpio/qbman-portal.h void qbman_release_desc_set_rcdi(struct qbman_release_desc *d, int enable);
d                 182 drivers/soc/fsl/dpio/qbman-portal.h int qbman_swp_release(struct qbman_swp *s, const struct qbman_release_desc *d,
d                 190 drivers/soc/fsl/qe/qe_ic.c static inline struct qe_ic *qe_ic_from_irq_data(struct irq_data *d)
d                 192 drivers/soc/fsl/qe/qe_ic.c 	return irq_data_get_irq_chip_data(d);
d                 195 drivers/soc/fsl/qe/qe_ic.c static void qe_ic_unmask_irq(struct irq_data *d)
d                 197 drivers/soc/fsl/qe/qe_ic.c 	struct qe_ic *qe_ic = qe_ic_from_irq_data(d);
d                 198 drivers/soc/fsl/qe/qe_ic.c 	unsigned int src = irqd_to_hwirq(d);
d                 211 drivers/soc/fsl/qe/qe_ic.c static void qe_ic_mask_irq(struct irq_data *d)
d                 213 drivers/soc/fsl/qe/qe_ic.c 	struct qe_ic *qe_ic = qe_ic_from_irq_data(d);
d                 214 drivers/soc/fsl/qe/qe_ic.c 	unsigned int src = irqd_to_hwirq(d);
d                 286 drivers/soc/qcom/smp2p.c static int smp2p_irq_map(struct irq_domain *d,
d                 290 drivers/soc/qcom/smp2p.c 	struct smp2p_entry *entry = d->host_data;
d                 317 drivers/soc/qcom/smsm.c static int smsm_irq_map(struct irq_domain *d,
d                 321 drivers/soc/qcom/smsm.c 	struct smsm_entry *entry = d->host_data;
d                  17 drivers/soc/qcom/trace-rpmh.h 	TP_PROTO(struct rsc_drv *d, int m, const struct tcs_request *r, int e),
d                  19 drivers/soc/qcom/trace-rpmh.h 	TP_ARGS(d, m, r, e),
d                  22 drivers/soc/qcom/trace-rpmh.h 			 __string(name, d->name)
d                  30 drivers/soc/qcom/trace-rpmh.h 		       __assign_str(name, d->name);
d                  44 drivers/soc/qcom/trace-rpmh.h 	TP_PROTO(struct rsc_drv *d, int m, int n, u32 h,
d                  47 drivers/soc/qcom/trace-rpmh.h 	TP_ARGS(d, m, n, h, c),
d                  50 drivers/soc/qcom/trace-rpmh.h 			 __string(name, d->name)
d                  60 drivers/soc/qcom/trace-rpmh.h 		       __assign_str(name, d->name);
d                 176 drivers/soc/renesas/rcar-sysc.c static inline struct rcar_sysc_pd *to_rcar_pd(struct generic_pm_domain *d)
d                 178 drivers/soc/renesas/rcar-sysc.c 	return container_of(d, struct rcar_sysc_pd, genpd);
d                  43 drivers/soc/renesas/rmobile-sysc.c struct rmobile_pm_domain *to_rmobile_pd(struct generic_pm_domain *d)
d                  45 drivers/soc/renesas/rmobile-sysc.c 	return container_of(d, struct rmobile_pm_domain, genpd);
d                 324 drivers/soc/sunxi/sunxi_sram.c 	struct dentry *d;
d                 341 drivers/soc/sunxi/sunxi_sram.c 	d = debugfs_create_file("sram", S_IRUGO, NULL, NULL,
d                 343 drivers/soc/sunxi/sunxi_sram.c 	if (!d)
d                 124 drivers/soundwire/debugfs.c 	struct dentry *d;
d                 131 drivers/soundwire/debugfs.c 	d = debugfs_create_dir(name, master);
d                 133 drivers/soundwire/debugfs.c 	debugfs_create_file("registers", 0400, d, slave, &sdw_slave_reg_fops);
d                 135 drivers/soundwire/debugfs.c 	slave->debugfs = d;
d                  73 drivers/spi/spi-cavium.c 			u8 d;
d                  75 drivers/spi/spi-cavium.c 				d = *tx_buf++;
d                  77 drivers/spi/spi-cavium.c 				d = 0;
d                  78 drivers/spi/spi-cavium.c 			writeq(d, p->register_base + OCTEON_SPI_DAT0(p) + (8 * i));
d                  97 drivers/spi/spi-cavium.c 		u8 d;
d                  99 drivers/spi/spi-cavium.c 			d = *tx_buf++;
d                 101 drivers/spi/spi-cavium.c 			d = 0;
d                 102 drivers/spi/spi-cavium.c 		writeq(d, p->register_base + OCTEON_SPI_DAT0(p) + (8 * i));
d                 363 drivers/spi/spi-dln2.c 		__le16 *d = (__le16 *)dln2_buf;
d                 368 drivers/spi/spi-dln2.c 			*d++ = cpu_to_le16p(s++);
d                 370 drivers/spi/spi-dln2.c 		__le32 *d = (__le32 *)dln2_buf;
d                 375 drivers/spi/spi-dln2.c 			*d++ = cpu_to_le32p(s++);
d                 396 drivers/spi/spi-dln2.c 		u16 *d = (u16 *)dest;
d                 401 drivers/spi/spi-dln2.c 			*d++ = le16_to_cpup(s++);
d                 403 drivers/spi/spi-dln2.c 		u32 *d = (u32 *)dest;
d                 408 drivers/spi/spi-dln2.c 			*d++ = get_unaligned_le32(s++);
d                 119 drivers/spi/spi-imx.c static inline int is_imx27_cspi(struct spi_imx_data *d)
d                 121 drivers/spi/spi-imx.c 	return d->devtype_data->devtype == IMX27_CSPI;
d                 124 drivers/spi/spi-imx.c static inline int is_imx35_cspi(struct spi_imx_data *d)
d                 126 drivers/spi/spi-imx.c 	return d->devtype_data->devtype == IMX35_CSPI;
d                 129 drivers/spi/spi-imx.c static inline int is_imx51_ecspi(struct spi_imx_data *d)
d                 131 drivers/spi/spi-imx.c 	return d->devtype_data->devtype == IMX51_ECSPI;
d                 134 drivers/spi/spi-imx.c static inline int is_imx53_ecspi(struct spi_imx_data *d)
d                 136 drivers/spi/spi-imx.c 	return d->devtype_data->devtype == IMX53_ECSPI;
d                 128 drivers/spi/spi-lm70llp.c static inline void spidelay(unsigned d)
d                 130 drivers/spi/spi-lm70llp.c 	udelay(d);
d                 460 drivers/spmi/spmi-pmic-arb.c static void qpnpint_spmi_write(struct irq_data *d, u8 reg, void *buf,
d                 463 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 464 drivers/spmi/spmi-pmic-arb.c 	u8 sid = hwirq_to_sid(d->hwirq);
d                 465 drivers/spmi/spmi-pmic-arb.c 	u8 per = hwirq_to_per(d->hwirq);
d                 470 drivers/spmi/spmi-pmic-arb.c 				    d->irq);
d                 473 drivers/spmi/spmi-pmic-arb.c static void qpnpint_spmi_read(struct irq_data *d, u8 reg, void *buf, size_t len)
d                 475 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 476 drivers/spmi/spmi-pmic-arb.c 	u8 sid = hwirq_to_sid(d->hwirq);
d                 477 drivers/spmi/spmi-pmic-arb.c 	u8 per = hwirq_to_per(d->hwirq);
d                 482 drivers/spmi/spmi-pmic-arb.c 				    d->irq);
d                 557 drivers/spmi/spmi-pmic-arb.c static void qpnpint_irq_ack(struct irq_data *d)
d                 559 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 560 drivers/spmi/spmi-pmic-arb.c 	u8 irq = hwirq_to_irq(d->hwirq);
d                 561 drivers/spmi/spmi-pmic-arb.c 	u16 apid = hwirq_to_apid(d->hwirq);
d                 567 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_write(d, QPNPINT_REG_LATCHED_CLR, &data, 1);
d                 570 drivers/spmi/spmi-pmic-arb.c static void qpnpint_irq_mask(struct irq_data *d)
d                 572 drivers/spmi/spmi-pmic-arb.c 	u8 irq = hwirq_to_irq(d->hwirq);
d                 575 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_write(d, QPNPINT_REG_EN_CLR, &data, 1);
d                 578 drivers/spmi/spmi-pmic-arb.c static void qpnpint_irq_unmask(struct irq_data *d)
d                 580 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 582 drivers/spmi/spmi-pmic-arb.c 	u8 irq = hwirq_to_irq(d->hwirq);
d                 583 drivers/spmi/spmi-pmic-arb.c 	u16 apid = hwirq_to_apid(d->hwirq);
d                 589 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_read(d, QPNPINT_REG_EN_SET, &buf[0], 1);
d                 598 drivers/spmi/spmi-pmic-arb.c 		qpnpint_spmi_write(d, QPNPINT_REG_LATCHED_CLR, &buf, 2);
d                 602 drivers/spmi/spmi-pmic-arb.c static int qpnpint_irq_set_type(struct irq_data *d, unsigned int flow_type)
d                 606 drivers/spmi/spmi-pmic-arb.c 	u8 irq = hwirq_to_irq(d->hwirq);
d                 608 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_read(d, QPNPINT_REG_SET_TYPE, &type, sizeof(type));
d                 632 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_write(d, QPNPINT_REG_SET_TYPE, &type, sizeof(type));
d                 633 drivers/spmi/spmi-pmic-arb.c 	irq_set_handler_locked(d, flow_handler);
d                 638 drivers/spmi/spmi-pmic-arb.c static int qpnpint_irq_set_wake(struct irq_data *d, unsigned int on)
d                 640 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 645 drivers/spmi/spmi-pmic-arb.c static int qpnpint_get_irqchip_state(struct irq_data *d,
d                 649 drivers/spmi/spmi-pmic-arb.c 	u8 irq = hwirq_to_irq(d->hwirq);
d                 655 drivers/spmi/spmi-pmic-arb.c 	qpnpint_spmi_read(d, QPNPINT_REG_RT_STS, &status, 1);
d                 662 drivers/spmi/spmi-pmic-arb.c 				       struct irq_data *d, bool reserve)
d                 664 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = irq_data_get_irq_chip_data(d);
d                 665 drivers/spmi/spmi-pmic-arb.c 	u16 periph = hwirq_to_per(d->hwirq);
d                 666 drivers/spmi/spmi-pmic-arb.c 	u16 apid = hwirq_to_apid(d->hwirq);
d                 667 drivers/spmi/spmi-pmic-arb.c 	u16 sid = hwirq_to_sid(d->hwirq);
d                 668 drivers/spmi/spmi-pmic-arb.c 	u16 irq = hwirq_to_irq(d->hwirq);
d                 691 drivers/spmi/spmi-pmic-arb.c static int qpnpint_irq_domain_translate(struct irq_domain *d,
d                 696 drivers/spmi/spmi-pmic-arb.c 	struct spmi_pmic_arb *pmic_arb = d->host_data;
d                 704 drivers/spmi/spmi-pmic-arb.c 	if (irq_domain_get_of_node(d) != pmic_arb->spmic->dev.of_node)
d                  97 drivers/ssb/driver_gpio.c static void ssb_gpio_irq_chipco_mask(struct irq_data *d)
d                  99 drivers/ssb/driver_gpio.c 	struct ssb_bus *bus = irq_data_get_irq_chip_data(d);
d                 100 drivers/ssb/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                 105 drivers/ssb/driver_gpio.c static void ssb_gpio_irq_chipco_unmask(struct irq_data *d)
d                 107 drivers/ssb/driver_gpio.c 	struct ssb_bus *bus = irq_data_get_irq_chip_data(d);
d                 108 drivers/ssb/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                 294 drivers/ssb/driver_gpio.c static void ssb_gpio_irq_extif_mask(struct irq_data *d)
d                 296 drivers/ssb/driver_gpio.c 	struct ssb_bus *bus = irq_data_get_irq_chip_data(d);
d                 297 drivers/ssb/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                 302 drivers/ssb/driver_gpio.c static void ssb_gpio_irq_extif_unmask(struct irq_data *d)
d                 304 drivers/ssb/driver_gpio.c 	struct ssb_bus *bus = irq_data_get_irq_chip_data(d);
d                 305 drivers/ssb/driver_gpio.c 	int gpio = irqd_to_hwirq(d);
d                 259 drivers/ssb/driver_pcicore.c int ssb_pcicore_plat_dev_init(struct pci_dev *d)
d                 261 drivers/ssb/driver_pcicore.c 	if (d->bus->ops != &ssb_pcicore_pciops) {
d                 266 drivers/ssb/driver_pcicore.c 	dev_info(&d->dev, "PCI: Fixing up device %s\n", pci_name(d));
d                 269 drivers/ssb/driver_pcicore.c 	d->irq = ssb_mips_irq(extpci_core->dev) + 2;
d                 270 drivers/ssb/driver_pcicore.c 	pci_write_config_byte(d, PCI_INTERRUPT_LINE, d->irq);
d                  22 drivers/ssb/pcihost_wrapper.c static int ssb_pcihost_suspend(struct device *d)
d                  24 drivers/ssb/pcihost_wrapper.c 	struct pci_dev *dev = to_pci_dev(d);
d                  36 drivers/ssb/pcihost_wrapper.c 	device_set_wakeup_enable(d, d->power.wakeup_path);
d                  43 drivers/ssb/pcihost_wrapper.c static int ssb_pcihost_resume(struct device *d)
d                  45 drivers/ssb/pcihost_wrapper.c 	struct pci_dev *dev = to_pci_dev(d);
d                 249 drivers/staging/comedi/drivers/addi_apci_1032.c static irqreturn_t apci1032_interrupt(int irq, void *d)
d                 251 drivers/staging/comedi/drivers/addi_apci_1032.c 	struct comedi_device *dev = d;
d                 206 drivers/staging/comedi/drivers/addi_apci_1500.c static irqreturn_t apci1500_interrupt(int irq, void *d)
d                 208 drivers/staging/comedi/drivers/addi_apci_1500.c 	struct comedi_device *dev = d;
d                 203 drivers/staging/comedi/drivers/addi_apci_1564.c static irqreturn_t apci1564_interrupt(int irq, void *d)
d                 205 drivers/staging/comedi/drivers/addi_apci_1564.c 	struct comedi_device *dev = d;
d                 162 drivers/staging/comedi/drivers/addi_apci_2032.c static irqreturn_t apci2032_interrupt(int irq, void *d)
d                 164 drivers/staging/comedi/drivers/addi_apci_2032.c 	struct comedi_device *dev = d;
d                 469 drivers/staging/comedi/drivers/addi_apci_3120.c static irqreturn_t apci3120_interrupt(int irq, void *d)
d                 471 drivers/staging/comedi/drivers/addi_apci_3120.c 	struct comedi_device *dev = d;
d                 348 drivers/staging/comedi/drivers/addi_apci_3xxx.c static irqreturn_t apci3xxx_irq_handler(int irq, void *d)
d                 350 drivers/staging/comedi/drivers/addi_apci_3xxx.c 	struct comedi_device *dev = d;
d                 677 drivers/staging/comedi/drivers/adl_pci9118.c static irqreturn_t pci9118_interrupt(int irq, void *d)
d                 679 drivers/staging/comedi/drivers/adl_pci9118.c 	struct comedi_device *dev = d;
d                 480 drivers/staging/comedi/drivers/adv_pci1710.c static irqreturn_t pci1710_irq_handler(int irq, void *d)
d                 482 drivers/staging/comedi/drivers/adv_pci1710.c 	struct comedi_device *dev = d;
d                 349 drivers/staging/comedi/drivers/adv_pci_dio.c 	const struct diosubd_data *d;
d                 376 drivers/staging/comedi/drivers/adv_pci_dio.c 		d = &board->sdi[i];
d                 377 drivers/staging/comedi/drivers/adv_pci_dio.c 		if (d->chans) {
d                 381 drivers/staging/comedi/drivers/adv_pci_dio.c 			s->n_chan	= d->chans;
d                 387 drivers/staging/comedi/drivers/adv_pci_dio.c 			s->private	= (void *)d->addr;
d                 392 drivers/staging/comedi/drivers/adv_pci_dio.c 		d = &board->sdo[i];
d                 393 drivers/staging/comedi/drivers/adv_pci_dio.c 		if (d->chans) {
d                 397 drivers/staging/comedi/drivers/adv_pci_dio.c 			s->n_chan	= d->chans;
d                 403 drivers/staging/comedi/drivers/adv_pci_dio.c 			s->private	= (void *)d->addr;
d                 407 drivers/staging/comedi/drivers/adv_pci_dio.c 				outw(0, dev->iobase + d->addr);
d                 409 drivers/staging/comedi/drivers/adv_pci_dio.c 					outw(0, dev->iobase + d->addr + 2);
d                 411 drivers/staging/comedi/drivers/adv_pci_dio.c 				outb(0, dev->iobase + d->addr);
d                 413 drivers/staging/comedi/drivers/adv_pci_dio.c 					outb(0, dev->iobase + d->addr + 1);
d                 415 drivers/staging/comedi/drivers/adv_pci_dio.c 					outb(0, dev->iobase + d->addr + 2);
d                 417 drivers/staging/comedi/drivers/adv_pci_dio.c 					outb(0, dev->iobase + d->addr + 3);
d                 423 drivers/staging/comedi/drivers/adv_pci_dio.c 		d = &board->sdio[i];
d                 424 drivers/staging/comedi/drivers/adv_pci_dio.c 		for (j = 0; j < d->chans; j++) {
d                 427 drivers/staging/comedi/drivers/adv_pci_dio.c 					       d->addr + j * I8255_SIZE);
d                  56 drivers/staging/comedi/drivers/aio_iiro_16.c static irqreturn_t aio_iiro_16_cos(int irq, void *d)
d                  58 drivers/staging/comedi/drivers/aio_iiro_16.c 	struct comedi_device *dev = d;
d                 458 drivers/staging/comedi/drivers/amplc_dio200_common.c static irqreturn_t dio200_interrupt(int irq, void *d)
d                 460 drivers/staging/comedi/drivers/amplc_dio200_common.c 	struct comedi_device *dev = d;
d                 121 drivers/staging/comedi/drivers/amplc_pc236_common.c static irqreturn_t pc236_interrupt(int irq, void *d)
d                 123 drivers/staging/comedi/drivers/amplc_pc236_common.c 	struct comedi_device *dev = d;
d                 945 drivers/staging/comedi/drivers/amplc_pci224.c static irqreturn_t pci224_interrupt(int irq, void *d)
d                 947 drivers/staging/comedi/drivers/amplc_pci224.c 	struct comedi_device *dev = d;
d                2281 drivers/staging/comedi/drivers/amplc_pci230.c static irqreturn_t pci230_interrupt(int irq, void *d)
d                2284 drivers/staging/comedi/drivers/amplc_pci230.c 	struct comedi_device *dev = d;
d                1203 drivers/staging/comedi/drivers/cb_pcidas.c static irqreturn_t cb_pcidas_interrupt(int irq, void *d)
d                1205 drivers/staging/comedi/drivers/cb_pcidas.c 	struct comedi_device *dev = d;
d                3043 drivers/staging/comedi/drivers/cb_pcidas64.c static irqreturn_t handle_interrupt(int irq, void *d)
d                3045 drivers/staging/comedi/drivers/cb_pcidas64.c 	struct comedi_device *dev = d;
d                 180 drivers/staging/comedi/drivers/cb_pcimdas.c 	unsigned int d;
d                 186 drivers/staging/comedi/drivers/cb_pcimdas.c 	d = inb(devpriv->BADR3 + PCIMDAS_PACER_REG);
d                 187 drivers/staging/comedi/drivers/cb_pcimdas.c 	if ((d & PCIMDAS_PACER_SRC_MASK) != PCIMDAS_PACER_SRC_POLLED) {
d                 188 drivers/staging/comedi/drivers/cb_pcimdas.c 		d &= ~PCIMDAS_PACER_SRC_MASK;
d                 189 drivers/staging/comedi/drivers/cb_pcimdas.c 		d |= PCIMDAS_PACER_SRC_POLLED;
d                 190 drivers/staging/comedi/drivers/cb_pcimdas.c 		outb(d, devpriv->BADR3 + PCIMDAS_PACER_REG);
d                 184 drivers/staging/comedi/drivers/comedi_bond.c 		struct comedi_device *d;
d                 208 drivers/staging/comedi/drivers/comedi_bond.c 		d = comedi_open(file);
d                 210 drivers/staging/comedi/drivers/comedi_bond.c 		if (!d) {
d                 217 drivers/staging/comedi/drivers/comedi_bond.c 		while ((sdev = comedi_find_subdevice_by_type(d, COMEDI_SUBD_DIO,
d                 219 drivers/staging/comedi/drivers/comedi_bond.c 			nchans = comedi_get_n_channels(d, sdev);
d                 230 drivers/staging/comedi/drivers/comedi_bond.c 			bdev->dev = d;
d                 208 drivers/staging/comedi/drivers/comedi_parport.c static irqreturn_t parport_interrupt(int irq, void *d)
d                 210 drivers/staging/comedi/drivers/comedi_parport.c 	struct comedi_device *dev = d;
d                 451 drivers/staging/comedi/drivers/das16m1.c static irqreturn_t das16m1_interrupt(int irq, void *d)
d                 454 drivers/staging/comedi/drivers/das16m1.c 	struct comedi_device *dev = d;
d                 512 drivers/staging/comedi/drivers/das1800.c static irqreturn_t das1800_interrupt(int irq, void *d)
d                 514 drivers/staging/comedi/drivers/das1800.c 	struct comedi_device *dev = d;
d                 174 drivers/staging/comedi/drivers/das6402.c static irqreturn_t das6402_interrupt(int irq, void *d)
d                 176 drivers/staging/comedi/drivers/das6402.c 	struct comedi_device *dev = d;
d                 421 drivers/staging/comedi/drivers/das800.c static irqreturn_t das800_interrupt(int irq, void *d)
d                 423 drivers/staging/comedi/drivers/das800.c 	struct comedi_device *dev = d;
d                 403 drivers/staging/comedi/drivers/dmm32at.c static irqreturn_t dmm32at_isr(int irq, void *d)
d                 405 drivers/staging/comedi/drivers/dmm32at.c 	struct comedi_device *dev = d;
d                 450 drivers/staging/comedi/drivers/dt2801.c 	int d;
d                 458 drivers/staging/comedi/drivers/dt2801.c 		stat = dt2801_readdata2(dev, &d);
d                 463 drivers/staging/comedi/drivers/dt2801.c 		data[i] = d;
d                 193 drivers/staging/comedi/drivers/dt2811.c static irqreturn_t dt2811_interrupt(int irq, void *d)
d                 195 drivers/staging/comedi/drivers/dt2811.c 	struct comedi_device *dev = d;
d                 187 drivers/staging/comedi/drivers/dt2814.c static irqreturn_t dt2814_interrupt(int irq, void *d)
d                 190 drivers/staging/comedi/drivers/dt2814.c 	struct comedi_device *dev = d;
d                 501 drivers/staging/comedi/drivers/dt282x.c static irqreturn_t dt282x_interrupt(int irq, void *d)
d                 503 drivers/staging/comedi/drivers/dt282x.c 	struct comedi_device *dev = d;
d                 317 drivers/staging/comedi/drivers/dt3000.c static irqreturn_t dt3k_interrupt(int irq, void *d)
d                 319 drivers/staging/comedi/drivers/dt3000.c 	struct comedi_device *dev = d;
d                  69 drivers/staging/comedi/drivers/dyna_pci10xx.c 	u16 d = 0;
d                  90 drivers/staging/comedi/drivers/dyna_pci10xx.c 		d = inw_p(dev->iobase);
d                  92 drivers/staging/comedi/drivers/dyna_pci10xx.c 		d &= 0x0FFF;
d                  93 drivers/staging/comedi/drivers/dyna_pci10xx.c 		data[n] = d;
d                 128 drivers/staging/comedi/drivers/dyna_pci10xx.c 	u16 d = 0;
d                 132 drivers/staging/comedi/drivers/dyna_pci10xx.c 	d = inw_p(devpriv->BADR3);
d                 136 drivers/staging/comedi/drivers/dyna_pci10xx.c 	data[1] = d;
d                 177 drivers/staging/comedi/drivers/gsc_hpdi.c static irqreturn_t gsc_hpdi_interrupt(int irq, void *d)
d                 179 drivers/staging/comedi/drivers/gsc_hpdi.c 	struct comedi_device *dev = d;
d                 127 drivers/staging/comedi/drivers/mf6x4.c 	unsigned int d;
d                 143 drivers/staging/comedi/drivers/mf6x4.c 		d = ioread16(dev->mmio + MF6X4_ADDATA_REG);
d                 144 drivers/staging/comedi/drivers/mf6x4.c 		d &= s->maxdata;
d                 146 drivers/staging/comedi/drivers/mf6x4.c 		data[i] = comedi_offset_munge(s, d);
d                 187 drivers/staging/comedi/drivers/ni_6527.c static irqreturn_t ni6527_interrupt(int irq, void *d)
d                 189 drivers/staging/comedi/drivers/ni_6527.c 	struct comedi_device *dev = d;
d                 470 drivers/staging/comedi/drivers/ni_65xx.c static irqreturn_t ni_65xx_interrupt(int irq, void *d)
d                 472 drivers/staging/comedi/drivers/ni_65xx.c 	struct comedi_device *dev = d;
d                 430 drivers/staging/comedi/drivers/ni_660x.c static irqreturn_t ni_660x_interrupt(int irq, void *d)
d                 432 drivers/staging/comedi/drivers/ni_660x.c 	struct comedi_device *dev = d;
d                 132 drivers/staging/comedi/drivers/ni_at_a2150.c static irqreturn_t a2150_interrupt(int irq, void *d)
d                 134 drivers/staging/comedi/drivers/ni_at_a2150.c 	struct comedi_device *dev = d;
d                 221 drivers/staging/comedi/drivers/ni_atmio16d.c static irqreturn_t atmio16d_interrupt(int irq, void *d)
d                 223 drivers/staging/comedi/drivers/ni_atmio16d.c 	struct comedi_device *dev = d;
d                 136 drivers/staging/comedi/drivers/ni_daq_700.c 	int d;
d                 174 drivers/staging/comedi/drivers/ni_daq_700.c 		d = inw(dev->iobase + ADFIFO_R);
d                 177 drivers/staging/comedi/drivers/ni_daq_700.c 		d &= 0x0fff;
d                 178 drivers/staging/comedi/drivers/ni_daq_700.c 		d ^= 0x0800;
d                 179 drivers/staging/comedi/drivers/ni_daq_700.c 		data[n] = d;
d                 815 drivers/staging/comedi/drivers/ni_labpc_common.c static irqreturn_t labpc_interrupt(int irq, void *d)
d                 817 drivers/staging/comedi/drivers/ni_labpc_common.c 	struct comedi_device *dev = d;
d                 953 drivers/staging/comedi/drivers/ni_mio_common.c 	unsigned short d;
d                 957 drivers/staging/comedi/drivers/ni_mio_common.c 		comedi_buf_read_samples(s, &d, 1);
d                 960 drivers/staging/comedi/drivers/ni_mio_common.c 			packed_data = d & 0xffff;
d                 963 drivers/staging/comedi/drivers/ni_mio_common.c 				comedi_buf_read_samples(s, &d, 1);
d                 965 drivers/staging/comedi/drivers/ni_mio_common.c 				packed_data |= (d << 16) & 0xffff0000;
d                 969 drivers/staging/comedi/drivers/ni_mio_common.c 			ni_writew(dev, d, NI_E_AO_FIFO_DATA_REG);
d                1831 drivers/staging/comedi/drivers/ni_mio_common.c 	unsigned int d;
d                1848 drivers/staging/comedi/drivers/ni_mio_common.c 			d = 0;
d                1851 drivers/staging/comedi/drivers/ni_mio_common.c 					d = ni_readl(dev,
d                1853 drivers/staging/comedi/drivers/ni_mio_common.c 					d >>= 16;
d                1854 drivers/staging/comedi/drivers/ni_mio_common.c 					d &= 0xffff;
d                1859 drivers/staging/comedi/drivers/ni_mio_common.c 					d = ni_readl(dev,
d                1861 drivers/staging/comedi/drivers/ni_mio_common.c 					d &= 0xffff;
d                1869 drivers/staging/comedi/drivers/ni_mio_common.c 			d += signbits;
d                1870 drivers/staging/comedi/drivers/ni_mio_common.c 			data[n] = d & 0xffff;
d                1882 drivers/staging/comedi/drivers/ni_mio_common.c 			d = 0;
d                1889 drivers/staging/comedi/drivers/ni_mio_common.c 					d = ni_readl(dev,
d                1898 drivers/staging/comedi/drivers/ni_mio_common.c 			data[n] = (((d >> 16) & 0xFFFF) + signbits) & 0xFFFF;
d                1914 drivers/staging/comedi/drivers/ni_mio_common.c 				d = ni_readl(dev, NI_M_AI_FIFO_DATA_REG);
d                1915 drivers/staging/comedi/drivers/ni_mio_common.c 				d &= mask;
d                1916 drivers/staging/comedi/drivers/ni_mio_common.c 				data[n] = d;
d                1918 drivers/staging/comedi/drivers/ni_mio_common.c 				d = ni_readw(dev, NI_E_AI_FIFO_DATA_REG);
d                1919 drivers/staging/comedi/drivers/ni_mio_common.c 				d += signbits;
d                1920 drivers/staging/comedi/drivers/ni_mio_common.c 				data[n] = d & 0xffff;
d                5883 drivers/staging/comedi/drivers/ni_mio_common.c static irqreturn_t ni_E_interrupt(int irq, void *d)
d                5885 drivers/staging/comedi/drivers/ni_mio_common.c 	struct comedi_device *dev = d;
d                 374 drivers/staging/comedi/drivers/ni_pcidio.c static irqreturn_t nidio_interrupt(int irq, void *d)
d                 376 drivers/staging/comedi/drivers/ni_pcidio.c 	struct comedi_device *dev = d;
d                 182 drivers/staging/comedi/drivers/pcl711.c static irqreturn_t pcl711_interrupt(int irq, void *d)
d                 184 drivers/staging/comedi/drivers/pcl711.c 	struct comedi_device *dev = d;
d                 216 drivers/staging/comedi/drivers/pcl726.c static irqreturn_t pcl726_interrupt(int irq, void *d)
d                 218 drivers/staging/comedi/drivers/pcl726.c 	struct comedi_device *dev = d;
d                 836 drivers/staging/comedi/drivers/pcl812.c static irqreturn_t pcl812_interrupt(int irq, void *d)
d                 838 drivers/staging/comedi/drivers/pcl812.c 	struct comedi_device *dev = d;
d                 241 drivers/staging/comedi/drivers/pcl816.c static irqreturn_t pcl816_interrupt(int irq, void *d)
d                 243 drivers/staging/comedi/drivers/pcl816.c 	struct comedi_device *dev = d;
d                 534 drivers/staging/comedi/drivers/pcl818.c static irqreturn_t pcl818_interrupt(int irq, void *d)
d                 536 drivers/staging/comedi/drivers/pcl818.c 	struct comedi_device *dev = d;
d                 359 drivers/staging/comedi/drivers/pcmmio.c static irqreturn_t interrupt_pcmmio(int irq, void *d)
d                 361 drivers/staging/comedi/drivers/pcmmio.c 	struct comedi_device *dev = d;
d                 360 drivers/staging/comedi/drivers/pcmuio.c static irqreturn_t pcmuio_interrupt(int irq, void *d)
d                 362 drivers/staging/comedi/drivers/pcmuio.c 	struct comedi_device *dev = d;
d                 552 drivers/staging/comedi/drivers/rtd520.c 		unsigned short d;
d                 561 drivers/staging/comedi/drivers/rtd520.c 		d = readw(devpriv->las1 + LAS1_ADC_FIFO);
d                 562 drivers/staging/comedi/drivers/rtd520.c 		d >>= 3;	/* low 3 bits are marker lines */
d                 566 drivers/staging/comedi/drivers/rtd520.c 			d = comedi_offset_munge(s, d);
d                 568 drivers/staging/comedi/drivers/rtd520.c 		data[n] = d & s->maxdata;
d                 585 drivers/staging/comedi/drivers/rtd520.c 		unsigned short d;
d                 588 drivers/staging/comedi/drivers/rtd520.c 			d = readw(devpriv->las1 + LAS1_ADC_FIFO);
d                 592 drivers/staging/comedi/drivers/rtd520.c 		d = readw(devpriv->las1 + LAS1_ADC_FIFO);
d                 593 drivers/staging/comedi/drivers/rtd520.c 		d >>= 3;	/* low 3 bits are marker lines */
d                 597 drivers/staging/comedi/drivers/rtd520.c 			d = comedi_offset_munge(s, d);
d                 598 drivers/staging/comedi/drivers/rtd520.c 		d &= s->maxdata;
d                 600 drivers/staging/comedi/drivers/rtd520.c 		if (!comedi_buf_write_samples(s, &d, 1))
d                 609 drivers/staging/comedi/drivers/rtd520.c static irqreturn_t rtd_interrupt(int irq, void *d)
d                 611 drivers/staging/comedi/drivers/rtd520.c 	struct comedi_device *dev = d;
d                1230 drivers/staging/comedi/drivers/s626.c static irqreturn_t s626_irq_handler(int irq, void *d)
d                1232 drivers/staging/comedi/drivers/s626.c 	struct comedi_device *dev = d;
d                1142 drivers/staging/exfat/exfat_core.c 	u16 t = 0x00, d = 0x21;
d                1148 drivers/staging/exfat/exfat_core.c 		d = GET16_A(ep->create_date);
d                1152 drivers/staging/exfat/exfat_core.c 		d = GET16_A(ep->modify_date);
d                1159 drivers/staging/exfat/exfat_core.c 	tp->day  = (d & 0x001F);
d                1160 drivers/staging/exfat/exfat_core.c 	tp->mon  = (d >> 5) & 0x000F;
d                1161 drivers/staging/exfat/exfat_core.c 	tp->year = (d >> 9);
d                1167 drivers/staging/exfat/exfat_core.c 	u16 t = 0x00, d = 0x21;
d                1173 drivers/staging/exfat/exfat_core.c 		d = GET16_A(ep->create_date);
d                1177 drivers/staging/exfat/exfat_core.c 		d = GET16_A(ep->modify_date);
d                1181 drivers/staging/exfat/exfat_core.c 		d = GET16_A(ep->access_date);
d                1188 drivers/staging/exfat/exfat_core.c 	tp->day  = (d & 0x001F);
d                1189 drivers/staging/exfat/exfat_core.c 	tp->mon  = (d >> 5) & 0x000F;
d                1190 drivers/staging/exfat/exfat_core.c 	tp->year = (d >> 9);
d                1196 drivers/staging/exfat/exfat_core.c 	u16 t, d;
d                1200 drivers/staging/exfat/exfat_core.c 	d = (tp->year <<  9) | (tp->mon << 5) |  tp->day;
d                1205 drivers/staging/exfat/exfat_core.c 		SET16_A(ep->create_date, d);
d                1209 drivers/staging/exfat/exfat_core.c 		SET16_A(ep->modify_date, d);
d                1217 drivers/staging/exfat/exfat_core.c 	u16 t, d;
d                1221 drivers/staging/exfat/exfat_core.c 	d = (tp->year <<  9) | (tp->mon << 5) |  tp->day;
d                1226 drivers/staging/exfat/exfat_core.c 		SET16_A(ep->create_date, d);
d                1230 drivers/staging/exfat/exfat_core.c 		SET16_A(ep->modify_date, d);
d                1234 drivers/staging/exfat/exfat_core.c 		SET16_A(ep->access_date, d);
d                  18 drivers/staging/greybus/gbphy.h #define to_gbphy_dev(d) container_of(d, struct gbphy_device, dev)
d                  46 drivers/staging/greybus/gbphy.h #define to_gbphy_driver(d) container_of(d, struct gbphy_driver, driver)
d                  46 drivers/staging/greybus/gpio.c #define irq_data_to_gpio_chip(d) (d->domain->host_data)
d                 271 drivers/staging/greybus/gpio.c static void gb_gpio_irq_mask(struct irq_data *d)
d                 273 drivers/staging/greybus/gpio.c 	struct gpio_chip *chip = irq_data_to_gpio_chip(d);
d                 275 drivers/staging/greybus/gpio.c 	struct gb_gpio_line *line = &ggc->lines[d->hwirq];
d                 281 drivers/staging/greybus/gpio.c static void gb_gpio_irq_unmask(struct irq_data *d)
d                 283 drivers/staging/greybus/gpio.c 	struct gpio_chip *chip = irq_data_to_gpio_chip(d);
d                 285 drivers/staging/greybus/gpio.c 	struct gb_gpio_line *line = &ggc->lines[d->hwirq];
d                 291 drivers/staging/greybus/gpio.c static int gb_gpio_irq_set_type(struct irq_data *d, unsigned int type)
d                 293 drivers/staging/greybus/gpio.c 	struct gpio_chip *chip = irq_data_to_gpio_chip(d);
d                 295 drivers/staging/greybus/gpio.c 	struct gb_gpio_line *line = &ggc->lines[d->hwirq];
d                 329 drivers/staging/greybus/gpio.c static void gb_gpio_irq_bus_lock(struct irq_data *d)
d                 331 drivers/staging/greybus/gpio.c 	struct gpio_chip *chip = irq_data_to_gpio_chip(d);
d                 337 drivers/staging/greybus/gpio.c static void gb_gpio_irq_bus_sync_unlock(struct irq_data *d)
d                 339 drivers/staging/greybus/gpio.c 	struct gpio_chip *chip = irq_data_to_gpio_chip(d);
d                 341 drivers/staging/greybus/gpio.c 	struct gb_gpio_line *line = &ggc->lines[d->hwirq];
d                 344 drivers/staging/greybus/gpio.c 		_gb_gpio_irq_set_type(ggc, d->hwirq, line->irq_type);
d                 350 drivers/staging/greybus/gpio.c 			_gb_gpio_irq_mask(ggc, d->hwirq);
d                 352 drivers/staging/greybus/gpio.c 			_gb_gpio_irq_unmask(ggc, d->hwirq);
d                 300 drivers/staging/greybus/loopback.c gb_dev_loopback_rw_attr(type, d);
d                 304 drivers/staging/greybus/loopback.c gb_dev_loopback_rw_attr(us_wait, d);
d                 325 drivers/staging/greybus/tools/loopback_test.c 	struct loopback_device *d;
d                 333 drivers/staging/greybus/tools/loopback_test.c 		d = &t->devices[i];
d                 334 drivers/staging/greybus/tools/loopback_test.c 		r = &d->results;
d                 336 drivers/staging/greybus/tools/loopback_test.c 		r->error = read_sysfs_int(d->sysfs_entry, "error");
d                 337 drivers/staging/greybus/tools/loopback_test.c 		r->request_min = read_sysfs_int(d->sysfs_entry, "requests_per_second_min");
d                 338 drivers/staging/greybus/tools/loopback_test.c 		r->request_max = read_sysfs_int(d->sysfs_entry, "requests_per_second_max");
d                 339 drivers/staging/greybus/tools/loopback_test.c 		r->request_avg = read_sysfs_float(d->sysfs_entry, "requests_per_second_avg");
d                 341 drivers/staging/greybus/tools/loopback_test.c 		r->latency_min = read_sysfs_int(d->sysfs_entry, "latency_min");
d                 342 drivers/staging/greybus/tools/loopback_test.c 		r->latency_max = read_sysfs_int(d->sysfs_entry, "latency_max");
d                 343 drivers/staging/greybus/tools/loopback_test.c 		r->latency_avg = read_sysfs_float(d->sysfs_entry, "latency_avg");
d                 345 drivers/staging/greybus/tools/loopback_test.c 		r->throughput_min = read_sysfs_int(d->sysfs_entry, "throughput_min");
d                 346 drivers/staging/greybus/tools/loopback_test.c 		r->throughput_max = read_sysfs_int(d->sysfs_entry, "throughput_max");
d                 347 drivers/staging/greybus/tools/loopback_test.c 		r->throughput_avg = read_sysfs_float(d->sysfs_entry, "throughput_avg");
d                 350 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_int(d->sysfs_entry, "apbridge_unipro_latency_min");
d                 352 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_int(d->sysfs_entry, "apbridge_unipro_latency_max");
d                 354 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_float(d->sysfs_entry, "apbridge_unipro_latency_avg");
d                 357 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_int(d->sysfs_entry, "gbphy_firmware_latency_min");
d                 359 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_int(d->sysfs_entry, "gbphy_firmware_latency_max");
d                 361 drivers/staging/greybus/tools/loopback_test.c 			read_sysfs_float(d->sysfs_entry, "gbphy_firmware_latency_avg");
d                 588 drivers/staging/greybus/tools/loopback_test.c 	struct loopback_device *d;
d                 616 drivers/staging/greybus/tools/loopback_test.c 		d = &t->devices[t->device_count++];
d                 617 drivers/staging/greybus/tools/loopback_test.c 		snprintf(d->name, MAX_STR_LEN, "gb_loopback%u", dev_id);
d                 619 drivers/staging/greybus/tools/loopback_test.c 		snprintf(d->sysfs_entry, MAX_SYSFS_PATH, "%s%s/",
d                 620 drivers/staging/greybus/tools/loopback_test.c 			 t->sysfs_prefix, d->name);
d                 622 drivers/staging/greybus/tools/loopback_test.c 		snprintf(d->debugfs_entry, MAX_SYSFS_PATH, "%sraw_latency_%s",
d                 623 drivers/staging/greybus/tools/loopback_test.c 			 t->debugfs_prefix, d->name);
d                 626 drivers/staging/greybus/tools/loopback_test.c 			printf("add %s %s\n", d->sysfs_entry, d->debugfs_entry);
d                  99 drivers/staging/iio/adc/ad7280a.c #define AD7280A_CALC_VOLTAGE_CHAN_NUM(d, c) (((d) * AD7280A_CELLS_PER_DEV) + \
d                 101 drivers/staging/iio/adc/ad7280a.c #define AD7280A_CALC_TEMP_CHAN_NUM(d, c)    (((d) * AD7280A_CELLS_PER_DEV) + \
d                 128 drivers/staging/kpc2000/kpc2000_i2c.c #define outb_p(d, a) writeq(d, (void __iomem *)a)
d                 917 drivers/staging/media/imx/imx7-mipi-csis.c 	struct dentry *d;
d                 926 drivers/staging/media/imx/imx7-mipi-csis.c 	d = debugfs_create_bool("debug_enable", 0600, state->debugfs_root,
d                 928 drivers/staging/media/imx/imx7-mipi-csis.c 	if (!d)
d                 931 drivers/staging/media/imx/imx7-mipi-csis.c 	d = debugfs_create_file("dump_regs", 0600, state->debugfs_root,
d                 933 drivers/staging/media/imx/imx7-mipi-csis.c 	if (!d)
d                2200 drivers/staging/media/ipu3/include/intel-ipu3.h 	__s32 d:12;
d                2267 drivers/staging/media/ipu3/ipu3-css-params.c 			acc->tcc.macc_table.entries[i].d = 1024;
d                  47 drivers/staging/most/cdev/cdev.c #define to_channel(d) container_of(d, struct comp_channel, cdev)
d                  39 drivers/staging/most/core.c #define to_driver(d) container_of(d, struct mostcore, drv)
d                  74 drivers/staging/most/core.c #define to_channel(d) container_of(d, struct most_channel, dev)
d                 475 drivers/staging/most/core.c 	struct show_links_data *d = data;
d                 476 drivers/staging/most/core.c 	int offs = d->offs;
d                 477 drivers/staging/most/core.c 	char *buf = d->buf;
d                 499 drivers/staging/most/core.c 	d->offs = offs;
d                 505 drivers/staging/most/core.c 	struct show_links_data d = { .buf = buf };
d                 507 drivers/staging/most/core.c 	bus_for_each_dev(&mc.bus, NULL, &d, print_links);
d                 508 drivers/staging/most/core.c 	return d.offs;
d                 545 drivers/staging/most/core.c static int split_string(char *buf, char **a, char **b, char **c, char **d)
d                 559 drivers/staging/most/core.c 	if (d)
d                 560 drivers/staging/most/core.c 		*d = strsep(&buf, ":\n");
d                 254 drivers/staging/most/core.h #define to_most_interface(d) container_of(d, struct most_interface, dev)
d                  48 drivers/staging/most/dim2/hal.c #define ROUND_UP_TO(x, d)  (DIV_ROUND_UP(x, (d)) * (d))
d                 124 drivers/staging/most/usb/usb.c #define to_mdev(d) container_of(d, struct most_dev, iface)
d                1531 drivers/staging/rtl8188eu/core/rtw_security.c #define ROUND(i, d, s) \
d                1533 drivers/staging/rtl8188eu/core/rtw_security.c 	d##0 = TE0(s##0) ^ TE1(s##1) ^ TE2(s##2) ^ TE3(s##3) ^ rk[4 * i]; \
d                1534 drivers/staging/rtl8188eu/core/rtw_security.c 	d##1 = TE0(s##1) ^ TE1(s##2) ^ TE2(s##3) ^ TE3(s##0) ^ rk[4 * i + 1]; \
d                1535 drivers/staging/rtl8188eu/core/rtw_security.c 	d##2 = TE0(s##2) ^ TE1(s##3) ^ TE2(s##0) ^ TE3(s##1) ^ rk[4 * i + 2]; \
d                1536 drivers/staging/rtl8188eu/core/rtw_security.c 	d##3 = TE0(s##3) ^ TE1(s##0) ^ TE2(s##1) ^ TE3(s##2) ^ rk[4 * i + 3]; \
d                  19 drivers/staging/rtl8192u/r8192U_core.c unsigned int __fixunsdfsi(double d)
d                  21 drivers/staging/rtl8192u/r8192U_core.c 	return d;
d                  66 drivers/staging/rtl8712/rtl871x_eeprom.c 	u16 x, d = 0, i;
d                  72 drivers/staging/rtl8712/rtl871x_eeprom.c 	d = 0;
d                  74 drivers/staging/rtl8712/rtl871x_eeprom.c 		d <<= 1;
d                  81 drivers/staging/rtl8712/rtl871x_eeprom.c 			d |= 1;
d                  85 drivers/staging/rtl8712/rtl871x_eeprom.c 	return d;
d                  73 drivers/staging/rtl8723bs/core/rtw_eeprom.c 	u16 x, d = 0, i;
d                  82 drivers/staging/rtl8723bs/core/rtw_eeprom.c 	d = 0;
d                  85 drivers/staging/rtl8723bs/core/rtw_eeprom.c 		d = d << 1;
d                  95 drivers/staging/rtl8723bs/core/rtw_eeprom.c 		d |= 1;
d                 102 drivers/staging/rtl8723bs/core/rtw_eeprom.c 	return d;
d                2228 drivers/staging/rtl8723bs/core/rtw_security.c #define ROUND(i, d, s) \
d                2229 drivers/staging/rtl8723bs/core/rtw_security.c d##0 = TE0(s##0) ^ TE1(s##1) ^ TE2(s##2) ^ TE3(s##3) ^ rk[4 * i]; \
d                2230 drivers/staging/rtl8723bs/core/rtw_security.c d##1 = TE0(s##1) ^ TE1(s##2) ^ TE2(s##3) ^ TE3(s##0) ^ rk[4 * i + 1]; \
d                2231 drivers/staging/rtl8723bs/core/rtw_security.c d##2 = TE0(s##2) ^ TE1(s##3) ^ TE2(s##0) ^ TE3(s##1) ^ rk[4 * i + 2]; \
d                2232 drivers/staging/rtl8723bs/core/rtw_security.c d##3 = TE0(s##3) ^ TE1(s##0) ^ TE2(s##1) ^ TE3(s##2) ^ rk[4 * i + 3]
d                  15 drivers/staging/rtl8723bs/os_dep/sdio_intf.c #define dev_to_sdio_func(d)     container_of(d, struct sdio_func, dev)
d                 320 drivers/staging/sm750fb/ddk750_chip.c 	int N, M, X, d;
d                 359 drivers/staging/sm750fb/ddk750_chip.c 		for (d = max_d; d >= 0; d--) {
d                 360 drivers/staging/sm750fb/ddk750_chip.c 			X = BIT(d);
d                 374 drivers/staging/sm750fb/ddk750_chip.c 					if (d > max_OD)
d                 375 drivers/staging/sm750fb/ddk750_chip.c 						pll->POD = d - max_OD;
d                 376 drivers/staging/sm750fb/ddk750_chip.c 					pll->OD = d - pll->POD;
d                  31 drivers/staging/speakup/buffers.c 		if (vc_cons[i].d && vc_cons[i].d->port.tty)
d                  32 drivers/staging/speakup/buffers.c 			start_tty(vc_cons[i].d->port.tty);
d                  42 drivers/staging/speakup/buffers.c 		if (vc_cons[i].d && vc_cons[i].d->port.tty)
d                  43 drivers/staging/speakup/buffers.c 			stop_tty(vc_cons[i].d->port.tty);
d                 336 drivers/staging/speakup/kobjects.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                1660 drivers/staging/speakup/main.c 	int hc, d;
d                1667 drivers/staging/speakup/main.c 		d = vc->vc_y - speakup_console[vc_num]->ht.cy;
d                1668 drivers/staging/speakup/main.c 		if ((d == 1) || (d == -1))
d                1685 drivers/staging/speakup/main.c 	struct vc_data *vc = vc_cons[cursor_con].d;
d                2302 drivers/staging/speakup/main.c 			u16 d = param->c;
d                2304 drivers/staging/speakup/main.c 			speakup_con_write(vc, &d, 1);
d                2355 drivers/staging/speakup/main.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                2380 drivers/staging/speakup/main.c 		if (vc_cons[i].d) {
d                2381 drivers/staging/speakup/main.c 			err = speakup_allocate(vc_cons[i].d, GFP_KERNEL);
d                  48 drivers/staging/speakup/selection.c 	if (spk_sel_cons != vc_cons[fg_console].d) {
d                  49 drivers/staging/speakup/selection.c 		spk_sel_cons = vc_cons[fg_console].d;
d                 476 drivers/staging/unisys/visorhba/visorhba_main.c static u32 dma_data_dir_linux_to_spar(enum dma_data_direction d)
d                 478 drivers/staging/unisys/visorhba/visorhba_main.c 	switch (d) {
d                  64 drivers/staging/uwb/ie.c 	u8 *d;
d                  75 drivers/staging/uwb/ie.c 		d = (uint8_t *)ie + sizeof(struct uwb_ie_hdr);
d                  76 drivers/staging/uwb/ie.c 		while (d != ptr && r < size)
d                  77 drivers/staging/uwb/ie.c 			r += scnprintf(buf + r, size - r, " %02x", (unsigned)*d++);
d                  72 drivers/staging/uwb/include/umc.h #define to_umc_dev(d) container_of(d, struct umc_dev, dev)
d                  95 drivers/staging/uwb/include/umc.h #define to_umc_driver(d) container_of(d, struct umc_driver, driver)
d                  59 drivers/staging/uwb/uwb.h #define to_uwb_dev(d) container_of(d, struct uwb_dev, dev)
d                  19 drivers/staging/vc04_services/bcm2835-camera/mmal-common.h #define MMAL_FOURCC(a, b, c, d) ((a) | (b << 8) | (c << 16) | (d << 24))
d                 156 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_TRACE(d) \
d                 157 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h 	do { debug_ptr[DEBUG_ ## d] = __LINE__; dsb(sy); } while (0)
d                 158 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_VALUE(d, v) \
d                 159 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h 	do { debug_ptr[DEBUG_ ## d] = (v); dsb(sy); } while (0)
d                 160 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_COUNT(d) \
d                 161 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h 	do { debug_ptr[DEBUG_ ## d]++; dsb(sy); } while (0)
d                 166 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_TRACE(d)
d                 167 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_VALUE(d, v)
d                 168 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.h #define DEBUG_COUNT(d)
d                  29 drivers/staging/vt6655/tmacro.h #define LOWORD(d)           ((unsigned short)(d))
d                  32 drivers/staging/vt6655/tmacro.h #define HIWORD(d)           ((unsigned short)((((unsigned long)(d)) >> 16) & 0xFFFF))
d                  78 drivers/staging/wusbcore/host/whci/debug.c 	int d;
d                  80 drivers/staging/wusbcore/host/whci/debug.c 	for (d = 0; d < whc->n_devices; d++) {
d                  81 drivers/staging/wusbcore/host/whci/debug.c 		struct di_buf_entry *di = &whc->di_buf[d];
d                  83 drivers/staging/wusbcore/host/whci/debug.c 		seq_printf(s, "DI[%d]\n", d);
d                  28 drivers/staging/wusbcore/host/whci/init.c 	int d;
d                  31 drivers/staging/wusbcore/host/whci/init.c 	for (d = 0; d < whc->n_devices; d++)
d                  32 drivers/staging/wusbcore/host/whci/init.c 		whc->di_buf[d].addr_sec_info = WHC_DI_DISABLE;
d                 159 drivers/staging/wusbcore/host/whci/whci-hc.h #define QH_INFO3_MAX_DELAY(d)    ((d) << 0)  /* maximum stream delay in 125 us units (isoc only) */
d                2828 drivers/target/target_core_transport.c static const char *data_dir_name(enum dma_data_direction d)
d                2830 drivers/target/target_core_transport.c 	switch (d) {
d                  91 drivers/thermal/broadcom/bcm2835_thermal.c static int bcm2835_thermal_get_temp(void *d, int *temp)
d                  93 drivers/thermal/broadcom/bcm2835_thermal.c 	struct bcm2835_thermal_data *data = d;
d                  45 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	struct int3402_thermal_data *d;
d                  51 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	d = devm_kzalloc(&pdev->dev, sizeof(*d), GFP_KERNEL);
d                  52 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	if (!d)
d                  55 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	d->int340x_zone = int340x_thermal_zone_add(adev, NULL);
d                  56 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	if (IS_ERR(d->int340x_zone))
d                  57 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 		return PTR_ERR(d->int340x_zone);
d                  62 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 					  d);
d                  64 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 		int340x_thermal_zone_remove(d->int340x_zone);
d                  68 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	d->handle = adev->handle;
d                  69 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	platform_set_drvdata(pdev, d);
d                  76 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	struct int3402_thermal_data *d = platform_get_drvdata(pdev);
d                  78 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	acpi_remove_notify_handler(d->handle,
d                  80 drivers/thermal/intel/int340x_thermal/int3402_thermal.c 	int340x_thermal_zone_remove(d->int340x_zone);
d                  37 drivers/thermal/intel/int340x_thermal/int3406_thermal.c #define ACPI_TO_RAW(v, d) (d->raw_bd->props.max_brightness * v / 100)
d                  38 drivers/thermal/intel/int340x_thermal/int3406_thermal.c #define RAW_TO_ACPI(v, d) (v * 100 / d->raw_bd->props.max_brightness)
d                  44 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	struct int3406_thermal_data *d = cooling_dev->devdata;
d                  46 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	*state = d->upper_limit - d->lower_limit;
d                  54 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	struct int3406_thermal_data *d = cooling_dev->devdata;
d                  57 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	if (state > d->upper_limit - d->lower_limit)
d                  60 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	acpi_level = d->br->levels[d->upper_limit - state];
d                  62 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	raw_level = ACPI_TO_RAW(acpi_level, d);
d                  64 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	return backlight_device_set_brightness(d->raw_bd, raw_level);
d                  71 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	struct int3406_thermal_data *d = cooling_dev->devdata;
d                  75 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	acpi_level = RAW_TO_ACPI(d->raw_bd->props.brightness, d);
d                  82 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	for (index = d->lower_limit; index < d->upper_limit; index++) {
d                  83 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 		if (acpi_level <= d->br->levels[index])
d                  87 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	*state = d->upper_limit - index;
d                 108 drivers/thermal/intel/int340x_thermal/int3406_thermal.c static void int3406_thermal_get_limit(struct int3406_thermal_data *d)
d                 113 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	status = acpi_evaluate_integer(d->handle, "DDDL", NULL, &lower_limit);
d                 115 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 		d->lower_limit = int3406_thermal_get_index(d->br->levels,
d                 116 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 					d->br->count, lower_limit);
d                 118 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	status = acpi_evaluate_integer(d->handle, "DDPC", NULL, &upper_limit);
d                 120 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 		d->upper_limit = int3406_thermal_get_index(d->br->levels,
d                 121 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 					d->br->count, upper_limit);
d                 124 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d->lower_limit = d->lower_limit > 0 ? d->lower_limit : 2;
d                 125 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d->upper_limit = d->upper_limit > 0 ? d->upper_limit : d->br->count - 1;
d                 137 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	struct int3406_thermal_data *d;
d                 144 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d = devm_kzalloc(&pdev->dev, sizeof(*d), GFP_KERNEL);
d                 145 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	if (!d)
d                 147 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d->handle = ACPI_HANDLE(&pdev->dev);
d                 152 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d->raw_bd = bd;
d                 154 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	ret = acpi_video_get_levels(ACPI_COMPANION(&pdev->dev), &d->br, NULL);
d                 158 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	int3406_thermal_get_limit(d);
d                 160 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	d->cooling_dev = thermal_cooling_device_register(acpi_device_bid(adev),
d                 161 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 							 d, &video_cooling_ops);
d                 162 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	if (IS_ERR(d->cooling_dev))
d                 166 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 					  int3406_notify, d);
d                 170 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	platform_set_drvdata(pdev, d);
d                 175 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	thermal_cooling_device_unregister(d->cooling_dev);
d                 177 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	kfree(d->br);
d                 183 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	struct int3406_thermal_data *d = platform_get_drvdata(pdev);
d                 185 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	thermal_cooling_device_unregister(d->cooling_dev);
d                 186 drivers/thermal/intel/int340x_thermal/int3406_thermal.c 	kfree(d->br);
d                  16 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	struct int34x_thermal_zone *d = zone->devdata;
d                  20 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->override_ops && d->override_ops->get_temp)
d                  21 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		return d->override_ops->get_temp(zone, temp);
d                  23 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	status = acpi_evaluate_integer(d->adev->handle, "_TMP", NULL, &tmp);
d                  27 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->lpat_table) {
d                  30 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		conv_temp = acpi_lpat_raw_to_temp(d->lpat_table, (int)tmp);
d                  45 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	struct int34x_thermal_zone *d = zone->devdata;
d                  48 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->override_ops && d->override_ops->get_trip_temp)
d                  49 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		return d->override_ops->get_trip_temp(zone, trip, temp);
d                  51 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (trip < d->aux_trip_nr)
d                  52 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		*temp = d->aux_trips[trip];
d                  53 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->crt_trip_id)
d                  54 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		*temp = d->crt_temp;
d                  55 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->psv_trip_id)
d                  56 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		*temp = d->psv_temp;
d                  57 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->hot_trip_id)
d                  58 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		*temp = d->hot_temp;
d                  61 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 			if (d->act_trips[i].valid &&
d                  62 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 			    d->act_trips[i].id == trip) {
d                  63 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 				*temp = d->act_trips[i].temp;
d                  78 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	struct int34x_thermal_zone *d = zone->devdata;
d                  81 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->override_ops && d->override_ops->get_trip_type)
d                  82 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		return d->override_ops->get_trip_type(zone, trip, type);
d                  84 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (trip < d->aux_trip_nr)
d                  86 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->crt_trip_id)
d                  88 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->hot_trip_id)
d                  90 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	else if (trip == d->psv_trip_id)
d                  94 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 			if (d->act_trips[i].valid &&
d                  95 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 			    d->act_trips[i].id == trip) {
d                 110 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	struct int34x_thermal_zone *d = zone->devdata;
d                 114 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->override_ops && d->override_ops->set_trip_temp)
d                 115 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		return d->override_ops->set_trip_temp(zone, trip, temp);
d                 118 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	status = acpi_execute_simple_method(d->adev->handle, name,
d                 123 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	d->aux_trips[trip] = temp;
d                 132 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	struct int34x_thermal_zone *d = zone->devdata;
d                 136 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	if (d->override_ops && d->override_ops->get_trip_hyst)
d                 137 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 		return d->override_ops->get_trip_hyst(zone, trip, temp);
d                 139 drivers/thermal/intel/int340x_thermal/int340x_thermal_zone.c 	status = acpi_evaluate_integer(d->adev->handle, "GTSH", NULL, &hyst);
d                 304 drivers/thermal/intel/intel_powerclamp.c 	struct powerclamp_calibration_data *d = &cal_data[target_ratio];
d                 311 drivers/thermal/intel/intel_powerclamp.c 	if (d->confidence >= CONFIDENCE_OK ||
d                 319 drivers/thermal/intel/intel_powerclamp.c 		if (d->steady_comp)
d                 320 drivers/thermal/intel/intel_powerclamp.c 			d->steady_comp =
d                 321 drivers/thermal/intel/intel_powerclamp.c 				roundup(delta+d->steady_comp, 2)/2;
d                 323 drivers/thermal/intel/intel_powerclamp.c 			d->steady_comp = delta;
d                 324 drivers/thermal/intel/intel_powerclamp.c 		d->confidence++;
d                 196 drivers/thermal/power_allocator.c 	s64 p, i, d, power_range;
d                 242 drivers/thermal/power_allocator.c 	d = mul_frac(tz->tzp->k_d, err - params->prev_err);
d                 243 drivers/thermal/power_allocator.c 	d = div_frac(d, tz->passive_delay);
d                 246 drivers/thermal/power_allocator.c 	power_range = p + i + d;
d                 256 drivers/thermal/power_allocator.c 					  frac_to_int(d), power_range);
d                 150 drivers/thermal/rcar_thermal.c #define rcar_thermal_common_write(c, r, d) \
d                 151 drivers/thermal/rcar_thermal.c 	_rcar_thermal_common_write(c, COMMON_ ##r, d)
d                 158 drivers/thermal/rcar_thermal.c #define rcar_thermal_common_bset(c, r, m, d) \
d                 159 drivers/thermal/rcar_thermal.c 	_rcar_thermal_common_bset(c, COMMON_ ##r, m, d)
d                 177 drivers/thermal/rcar_thermal.c #define rcar_thermal_write(p, r, d) _rcar_thermal_write(p, REG_ ##r, d)
d                 184 drivers/thermal/rcar_thermal.c #define rcar_thermal_bset(p, r, m, d) _rcar_thermal_bset(p, REG_ ##r, m, d)
d                1123 drivers/thermal/tegra/soctherm.c 	struct soctherm_oc_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                1125 drivers/thermal/tegra/soctherm.c 	mutex_lock(&d->irq_lock);
d                1137 drivers/thermal/tegra/soctherm.c 	struct soctherm_oc_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                1139 drivers/thermal/tegra/soctherm.c 	mutex_unlock(&d->irq_lock);
d                1152 drivers/thermal/tegra/soctherm.c 	struct soctherm_oc_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                1154 drivers/thermal/tegra/soctherm.c 	d->irq_enable |= BIT(data->hwirq);
d                1168 drivers/thermal/tegra/soctherm.c 	struct soctherm_oc_irq_chip_data *d = irq_data_get_irq_chip_data(data);
d                1170 drivers/thermal/tegra/soctherm.c 	d->irq_enable &= ~BIT(data->hwirq);
d                1221 drivers/thermal/tegra/soctherm.c static int soctherm_irq_domain_xlate_twocell(struct irq_domain *d,
d                 425 drivers/tty/ipwireless/hardware.c 			unsigned short d = data[i];
d                 429 drivers/tty/ipwireless/hardware.c 				d |= data[i + 1] << 8;
d                 430 drivers/tty/ipwireless/hardware.c 			raw_data = cpu_to_le16(d);
d                 439 drivers/tty/ipwireless/hardware.c 			unsigned short d = data[i];
d                 443 drivers/tty/ipwireless/hardware.c 				d |= data[i + 1] << 8;
d                 444 drivers/tty/ipwireless/hardware.c 			raw_data = cpu_to_le16(d);
d                  59 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                  63 drivers/tty/serial/8250/8250_dw.c 		value |= d->msr_mask_on;
d                  64 drivers/tty/serial/8250/8250_dw.c 		value &= ~d->msr_mask_off;
d                 133 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 141 drivers/tty/serial/8250/8250_dw.c 	if (offset == UART_LCR && !d->uart_16550_compatible)
d                 148 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 152 drivers/tty/serial/8250/8250_dw.c 	if (offset == UART_LCR && !d->uart_16550_compatible)
d                 175 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 182 drivers/tty/serial/8250/8250_dw.c 	if (offset == UART_LCR && !d->uart_16550_compatible)
d                 189 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 193 drivers/tty/serial/8250/8250_dw.c 	if (offset == UART_LCR && !d->uart_16550_compatible)
d                 206 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 210 drivers/tty/serial/8250/8250_dw.c 	if (offset == UART_LCR && !d->uart_16550_compatible)
d                 225 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 255 drivers/tty/serial/8250/8250_dw.c 		(void)p->serial_in(p, d->usr_reg);
d                 279 drivers/tty/serial/8250/8250_dw.c 	struct dw8250_data *d = to_dw8250_data(p->private_data);
d                 283 drivers/tty/serial/8250/8250_dw.c 	if (IS_ERR(d->clk))
d                 286 drivers/tty/serial/8250/8250_dw.c 	clk_disable_unprepare(d->clk);
d                 287 drivers/tty/serial/8250/8250_dw.c 	rate = clk_round_rate(d->clk, baud * 16);
d                 293 drivers/tty/serial/8250/8250_dw.c 		ret = clk_set_rate(d->clk, rate);
d                 294 drivers/tty/serial/8250/8250_dw.c 	clk_prepare_enable(d->clk);
d                  64 drivers/tty/serial/8250/8250_dwlib.c 	struct dw8250_port_data *d = p->private_data;
d                  68 drivers/tty/serial/8250/8250_dwlib.c 	*frac = DIV_ROUND_CLOSEST(rem << d->dlf_size, base_baud);
d                 101 drivers/tty/serial/8250/8250_dwlib.c 		struct dw8250_port_data *d = p->private_data;
d                 103 drivers/tty/serial/8250/8250_dwlib.c 		d->dlf_size = fls(reg);
d                  38 drivers/tty/serial/8250/8250_hp300.c static int hpdca_init_one(struct dio_dev *d,
d                  40 drivers/tty/serial/8250/8250_hp300.c static void hpdca_remove_one(struct dio_dev *d);
d                 157 drivers/tty/serial/8250/8250_hp300.c static int hpdca_init_one(struct dio_dev *d,
d                 164 drivers/tty/serial/8250/8250_hp300.c 	if (hp300_uart_scode == d->scode) {
d                 174 drivers/tty/serial/8250/8250_hp300.c 	uart.port.irq = d->ipl;
d                 176 drivers/tty/serial/8250/8250_hp300.c 	uart.port.mapbase = (d->resource.start + UART_OFFSET);
d                 179 drivers/tty/serial/8250/8250_hp300.c 	uart.port.dev = &d->dev;
d                 183 drivers/tty/serial/8250/8250_hp300.c 		dev_notice(&d->dev,
d                 185 drivers/tty/serial/8250/8250_hp300.c 			  d->scode, uart.port.irq);
d                 190 drivers/tty/serial/8250/8250_hp300.c 	out_8(d->resource.start + DIO_VIRADDRBASE + DCA_IC, DCA_IC_IE);
d                 191 drivers/tty/serial/8250/8250_hp300.c 	dio_set_drvdata(d, (void *)line);
d                 194 drivers/tty/serial/8250/8250_hp300.c 	out_8(d->resource.start + DIO_VIRADDRBASE + DCA_ID, 0xff);
d                 288 drivers/tty/serial/8250/8250_hp300.c static void hpdca_remove_one(struct dio_dev *d)
d                 292 drivers/tty/serial/8250/8250_hp300.c 	line = (int) dio_get_drvdata(d);
d                 293 drivers/tty/serial/8250/8250_hp300.c 	if (d->resource.start) {
d                 295 drivers/tty/serial/8250/8250_hp300.c 		out_8(d->resource.start + DIO_VIRADDRBASE + DCA_IC, 0);
d                  52 drivers/tty/serial/icom.c #define to_icom_adapter(d) container_of(d, struct icom_adapter, kref)
d                 275 drivers/tty/serial/kgdboc.c 		con_debug_enter(vc_cons[fg_console].d);
d                 410 drivers/tty/serial/pch_uart.c 	const struct dmi_system_id *d;
d                 415 drivers/tty/serial/pch_uart.c 	d = dmi_first_match(pch_uart_dmi_table);
d                 416 drivers/tty/serial/pch_uart.c 	if (d)
d                 417 drivers/tty/serial/pch_uart.c 		return (unsigned long)d->driver_data;
d                4864 drivers/tty/synclink_gt.c 	struct slgt_desc *d;
d                4886 drivers/tty/synclink_gt.c 		d = &info->tbufs[i];
d                4889 drivers/tty/synclink_gt.c 		memcpy(d->buf, buf, count);
d                4900 drivers/tty/synclink_gt.c 			set_desc_eof(*d, 1);
d                4902 drivers/tty/synclink_gt.c 			set_desc_eof(*d, 0);
d                4906 drivers/tty/synclink_gt.c 			set_desc_count(*d, count);
d                4907 drivers/tty/synclink_gt.c 		d->buf_count = count;
d                4916 drivers/tty/synclink_gt.c 	d = &info->tbufs[info->tbuf_start];
d                4917 drivers/tty/synclink_gt.c 	set_desc_count(*d, d->buf_count);
d                3303 drivers/tty/tty_io.c void put_tty_driver(struct tty_driver *d)
d                3305 drivers/tty/tty_io.c 	tty_driver_kref_put(d);
d                3317 drivers/tty/tty_io.c 	struct device *d;
d                3345 drivers/tty/tty_io.c 			d = tty_register_device(driver, i, NULL);
d                3346 drivers/tty/tty_io.c 			if (IS_ERR(d)) {
d                3347 drivers/tty/tty_io.c 				error = PTR_ERR(d);
d                 305 drivers/tty/vt/consolemap.c 		p = *vc_cons[i].d->vc_uni_pagedir_loc;
d                 307 drivers/tty/vt/consolemap.c 			set_inverse_transl(vc_cons[i].d, p, USER_MAP);
d                 308 drivers/tty/vt/consolemap.c 			set_inverse_trans_unicode(vc_cons[i].d, p);
d                 350 drivers/tty/vt/consolemap.c 		ch = conv_uni_to_pc(vc_cons[fg_console].d, p[i]);
d                 442 drivers/tty/vt/consolemap.c 		q = *vc_cons[i].d->vc_uni_pagedir_loc;
d                 854 drivers/tty/vt/consolemap.c 		if (vc_cons_allocated(i) && !*vc_cons[i].d->vc_uni_pagedir_loc)
d                 855 drivers/tty/vt/consolemap.c 			con_set_default_unimap(vc_cons[i].d);
d                 177 drivers/tty/vt/keyboard.c 	struct getset_keycode_data *d = data;
d                 179 drivers/tty/vt/keyboard.c 	d->error = input_get_keycode(handle->dev, &d->ke);
d                 181 drivers/tty/vt/keyboard.c 	return d->error == 0; /* stop as soon as we successfully get one */
d                 186 drivers/tty/vt/keyboard.c 	struct getset_keycode_data d = {
d                 195 drivers/tty/vt/keyboard.c 	memcpy(d.ke.scancode, &scancode, sizeof(scancode));
d                 197 drivers/tty/vt/keyboard.c 	input_handler_for_each_handle(&kbd_handler, &d, getkeycode_helper);
d                 199 drivers/tty/vt/keyboard.c 	return d.error ?: d.ke.keycode;
d                 204 drivers/tty/vt/keyboard.c 	struct getset_keycode_data *d = data;
d                 206 drivers/tty/vt/keyboard.c 	d->error = input_set_keycode(handle->dev, &d->ke);
d                 208 drivers/tty/vt/keyboard.c 	return d->error == 0; /* stop as soon as we successfully set one */
d                 213 drivers/tty/vt/keyboard.c 	struct getset_keycode_data d = {
d                 222 drivers/tty/vt/keyboard.c 	memcpy(d.ke.scancode, &scancode, sizeof(scancode));
d                 224 drivers/tty/vt/keyboard.c 	input_handler_for_each_handle(&kbd_handler, &d, setkeycode_helper);
d                 226 drivers/tty/vt/keyboard.c 	return d.error;
d                 413 drivers/tty/vt/keyboard.c 	unsigned int d = diacr;
d                 418 drivers/tty/vt/keyboard.c 	if ((d & ~0xff) == BRL_UC_ROW) {
d                 420 drivers/tty/vt/keyboard.c 			return d | ch;
d                 423 drivers/tty/vt/keyboard.c 			if (accent_table[i].diacr == d && accent_table[i].base == ch)
d                 427 drivers/tty/vt/keyboard.c 	if (ch == ' ' || ch == (BRL_UC_ROW|0) || ch == d)
d                 428 drivers/tty/vt/keyboard.c 		return d;
d                 431 drivers/tty/vt/keyboard.c 		to_utf8(vc, d);
d                 433 drivers/tty/vt/keyboard.c 		int c = conv_uni_to_8bit(d);
d                1346 drivers/tty/vt/keyboard.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                1355 drivers/tty/vt/keyboard.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                 191 drivers/tty/vt/selection.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                 222 drivers/tty/vt/selection.c 	if (sel_cons != vc_cons[fg_console].d) {
d                 224 drivers/tty/vt/selection.c 		sel_cons = vc_cons[fg_console].d;
d                 200 drivers/tty/vt/vc_screen.c 	return vc_cons[currcons].d;
d                 444 drivers/tty/vt/vt.c 		unsigned int i, j, k, sz, d, clear;
d                 448 drivers/tty/vt/vt.c 		d = nr;
d                 451 drivers/tty/vt/vt.c 			d = sz - nr;
d                 453 drivers/tty/vt/vt.c 		for (i = 0; i < gcd(d, sz); i++) {
d                 457 drivers/tty/vt/vt.c 				k = j + d;
d                 630 drivers/tty/vt/vt.c 	u16 *clear, *d, *s;
d                 641 drivers/tty/vt/vt.c 	d = (u16 *)(vc->vc_origin + vc->vc_size_row * (t + nr));
d                 645 drivers/tty/vt/vt.c 		swap(s, d);
d                 647 drivers/tty/vt/vt.c 	scr_memmovew(d, s, (b - t - nr) * vc->vc_size_row);
d                 991 drivers/tty/vt/vt.c 		struct vc_data *old_vc = vc_cons[fg_console].d;
d                1045 drivers/tty/vt/vt.c 	return (i < MAX_NR_CONSOLES && vc_cons[i].d);
d                1105 drivers/tty/vt/vt.c 	if (vc_cons[currcons].d)
d                1118 drivers/tty/vt/vt.c 	vc_cons[currcons].d = vc;
d                1145 drivers/tty/vt/vt.c 	vc_cons[currcons].d = NULL;
d                1369 drivers/tty/vt/vt.c 		param.vc = vc = vc_cons[currcons].d;
d                1376 drivers/tty/vt/vt.c 		vc_cons[currcons].d = NULL;
d                1848 drivers/tty/vt/vt.c 	return vc_cons[fg_console].d->vc_report_mouse;
d                2853 drivers/tty/vt/vt.c 			hide_cursor(vc_cons[fg_console].d);
d                2854 drivers/tty/vt/vt.c 			change_console(vc_cons[want_console].d);
d                2866 drivers/tty/vt/vt.c 		struct vc_data *vc = vc_cons[fg_console].d;
d                2876 drivers/tty/vt/vt.c 	notify_update(vc_cons[fg_console].d);
d                2883 drivers/tty/vt/vt.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                2948 drivers/tty/vt/vt.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                2963 drivers/tty/vt/vt.c 		vc = vc_cons[kmsg_console - 1].d;
d                3134 drivers/tty/vt/vt.c 				scrollfront(vc_cons[fg_console].d, lines);
d                3261 drivers/tty/vt/vt.c 	vc = vc_cons[currcons].d;
d                3278 drivers/tty/vt/vt.c 		tty->winsize.ws_row = vc_cons[currcons].d->vc_rows;
d                3279 drivers/tty/vt/vt.c 		tty->winsize.ws_col = vc_cons[currcons].d->vc_cols;
d                3395 drivers/tty/vt/vt.c 		vc_cons[currcons].d = vc = kzalloc(sizeof(struct vc_data), GFP_NOWAIT);
d                3404 drivers/tty/vt/vt.c 	master_display_fg = vc = vc_cons[currcons].d;
d                3549 drivers/tty/vt/vt.c 		struct vc_data *vc = vc_cons[i].d;
d                3585 drivers/tty/vt/vt.c 		struct vc_data *vc = vc_cons[j].d;
d                3592 drivers/tty/vt/vt.c 			vc = vc_cons[k].d;
d                3971 drivers/tty/vt/vt.c 	vc_cons[fg_console].d->vc_mode = saved_vc_mode;
d                3973 drivers/tty/vt/vt.c 	vc = vc_cons[fg_console].d;
d                4223 drivers/tty/vt/vt.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                4304 drivers/tty/vt/vt.c 	vc = vc_cons[fg_console].d;
d                4365 drivers/tty/vt/vt.c 	if (ignore_poke || !vc_cons[fg_console].d || vc_cons[fg_console].d->vc_mode == KD_GRAPHICS)
d                4410 drivers/tty/vt/vt.c 			vc_cons[i].d->vc_palette[k++] = default_red[j];
d                4411 drivers/tty/vt/vt.c 			vc_cons[i].d->vc_palette[k++] = default_grn[j];
d                4412 drivers/tty/vt/vt.c 			vc_cons[i].d->vc_palette[k++] = default_blu[j];
d                4414 drivers/tty/vt/vt.c 		set_palette(vc_cons[i].d);
d                  46 drivers/tty/vt/vt_ioctl.c 	const struct vc_data *vc = vc_cons[i].d;
d                  63 drivers/tty/vt/vt_ioctl.c 	if (vc_is_sel(vc_cons[i].d))
d                 265 drivers/tty/vt/vt_ioctl.c 		return con_font_op(vc_cons[fg_console].d, op);
d                 273 drivers/tty/vt/vt_ioctl.c 		i = con_font_op(vc_cons[fg_console].d, op);
d                 725 drivers/tty/vt/vt_ioctl.c 				nvc = vc_cons[vsa.console].d;
d                 802 drivers/tty/vt/vt_ioctl.c 				complete_change_console(vc_cons[newvt].d);
d                 845 drivers/tty/vt/vt_ioctl.c 				vc = vc_cons[i].d;
d                 850 drivers/tty/vt/vt_ioctl.c 					vc_resize(vc_cons[i].d, cc, ll);
d                 891 drivers/tty/vt/vt_ioctl.c 			if (!vc_cons[i].d)
d                 894 drivers/tty/vt/vt_ioctl.c 			vcp = vc_cons[i].d;
d                 917 drivers/tty/vt/vt_ioctl.c 		ret = con_font_op(vc_cons[fg_console].d, &op);
d                 928 drivers/tty/vt/vt_ioctl.c 		ret = con_font_op(vc_cons[fg_console].d, &op);
d                 962 drivers/tty/vt/vt_ioctl.c 		ret = con_font_op(vc_cons[fg_console].d, &op);
d                 966 drivers/tty/vt/vt_ioctl.c 		con_set_default_unimap(vc_cons[fg_console].d);
d                1069 drivers/tty/vt/vt_ioctl.c 	vc = vc_con->d;
d                1112 drivers/tty/vt/vt_ioctl.c 		return con_font_op(vc_cons[fg_console].d, op);
d                1120 drivers/tty/vt/vt_ioctl.c 		i = con_font_op(vc_cons[fg_console].d, op);
d                1273 drivers/tty/vt/vt_ioctl.c 	old_vc_mode = vc_cons[fg_console].d->vc_mode;
d                1357 drivers/tty/vt/vt_ioctl.c 	vc = vc_cons[fg_console].d;
d                 418 drivers/usb/atm/ueagle-atm.c #define E1_MAKESA(a, b, c, d)						\
d                 420 drivers/usb/atm/ueagle-atm.c 	 ((d) & 0xff) << 16 |						\
d                  76 drivers/usb/chipidea/bits.h #define PORTSC_PHCD(d)	      ((d) ? BIT(22) : BIT(23))
d                  79 drivers/usb/chipidea/bits.h #define PORTSC_PTS(d)						\
d                  80 drivers/usb/chipidea/bits.h 	(u32)((((d) & 0x3) << 30) | (((d) & 0x4) ? BIT(25) : 0))
d                  93 drivers/usb/chipidea/bits.h #define DEVLC_PTS(d)          (u32)(((d) & 0x7) << 29)
d                 114 drivers/usb/class/usbtmc.c #define to_usbtmc_data(d) container_of(d, struct usbtmc_device_data, kref)
d                 225 drivers/usb/core/config.c 		int inum, int asnum, struct usb_endpoint_descriptor *d)
d                 245 drivers/usb/core/config.c 				if (endpoint_is_duplicate(epd, d))
d                 261 drivers/usb/core/config.c 	struct usb_endpoint_descriptor *d;
d                 267 drivers/usb/core/config.c 	d = (struct usb_endpoint_descriptor *) buffer;
d                 268 drivers/usb/core/config.c 	buffer += d->bLength;
d                 269 drivers/usb/core/config.c 	size -= d->bLength;
d                 271 drivers/usb/core/config.c 	if (d->bLength >= USB_DT_ENDPOINT_AUDIO_SIZE)
d                 273 drivers/usb/core/config.c 	else if (d->bLength >= USB_DT_ENDPOINT_SIZE)
d                 278 drivers/usb/core/config.c 		    cfgno, inum, asnum, d->bLength);
d                 282 drivers/usb/core/config.c 	i = d->bEndpointAddress & ~USB_ENDPOINT_DIR_MASK;
d                 286 drivers/usb/core/config.c 		    cfgno, inum, asnum, d->bEndpointAddress);
d                 295 drivers/usb/core/config.c 	if (config_endpoint_is_duplicate(config, inum, asnum, d)) {
d                 297 drivers/usb/core/config.c 				cfgno, inum, asnum, d->bEndpointAddress);
d                 303 drivers/usb/core/config.c 		if (usb_endpoint_is_blacklisted(udev, ifp, d)) {
d                 306 drivers/usb/core/config.c 					d->bEndpointAddress);
d                 314 drivers/usb/core/config.c 	memcpy(&endpoint->desc, d, n);
d                 323 drivers/usb/core/config.c 	if (usb_endpoint_xfer_int(d)) {
d                 335 drivers/usb/core/config.c 			n = fls(d->bInterval*8);
d                 348 drivers/usb/core/config.c 				n = clamp(fls(d->bInterval) + 3, i, j);
d                 357 drivers/usb/core/config.c 				n = clamp(fls(d->bInterval), i, j);
d                 370 drivers/usb/core/config.c 	} else if (usb_endpoint_xfer_isoc(d)) {
d                 382 drivers/usb/core/config.c 	if (d->bInterval < i || d->bInterval > j) {
d                 387 drivers/usb/core/config.c 		    d->bEndpointAddress, d->bInterval, n);
d                 396 drivers/usb/core/config.c 			usb_endpoint_xfer_bulk(d)) {
d                 399 drivers/usb/core/config.c 		    cfgno, inum, asnum, d->bEndpointAddress);
d                 413 drivers/usb/core/config.c 	if (maxp == 0 && !(usb_endpoint_xfer_isoc(d) && asnum == 0)) {
d                 415 drivers/usb/core/config.c 		    cfgno, inum, asnum, d->bEndpointAddress);
d                 429 drivers/usb/core/config.c 		if (usb_endpoint_xfer_int(d) || usb_endpoint_xfer_isoc(d)) {
d                 446 drivers/usb/core/config.c 		    cfgno, inum, asnum, d->bEndpointAddress, maxp, j);
d                 457 drivers/usb/core/config.c 			&& usb_endpoint_xfer_bulk(d)) {
d                 461 drivers/usb/core/config.c 				cfgno, inum, asnum, d->bEndpointAddress,
d                 507 drivers/usb/core/config.c 	struct usb_interface_descriptor	*d;
d                 515 drivers/usb/core/config.c 	d = (struct usb_interface_descriptor *) buffer;
d                 516 drivers/usb/core/config.c 	buffer += d->bLength;
d                 517 drivers/usb/core/config.c 	size -= d->bLength;
d                 519 drivers/usb/core/config.c 	if (d->bLength < USB_DT_INTERFACE_SIZE)
d                 524 drivers/usb/core/config.c 	inum = d->bInterfaceNumber;
d                 535 drivers/usb/core/config.c 	asnum = d->bAlternateSetting;
d                 548 drivers/usb/core/config.c 	memcpy(&alt->desc, d, USB_DT_INTERFACE_SIZE);
d                 672 drivers/usb/core/config.c 			struct usb_interface_descriptor *d;
d                 675 drivers/usb/core/config.c 			d = (struct usb_interface_descriptor *) header;
d                 676 drivers/usb/core/config.c 			if (d->bLength < USB_DT_INTERFACE_SIZE) {
d                 679 drivers/usb/core/config.c 				    "skipping\n", cfgno, d->bLength);
d                 683 drivers/usb/core/config.c 			inum = d->bInterfaceNumber;
d                 716 drivers/usb/core/config.c 			struct usb_interface_assoc_descriptor *d;
d                 718 drivers/usb/core/config.c 			d = (struct usb_interface_assoc_descriptor *)header;
d                 719 drivers/usb/core/config.c 			if (d->bLength < USB_DT_INTERFACE_ASSOCIATION_SIZE) {
d                 722 drivers/usb/core/config.c 					 cfgno, d->bLength);
d                 732 drivers/usb/core/config.c 				config->intf_assoc[iad_num] = d;
d                 474 drivers/usb/core/devio.c 	const char *t, *d;
d                 481 drivers/usb/core/devio.c 	d = dirs[!!usb_pipein(pipe)];
d                 487 drivers/usb/core/devio.c 					userurb, ep, t, d, length);
d                 491 drivers/usb/core/devio.c 					userurb, ep, t, d, length,
d                 497 drivers/usb/core/devio.c 					ep, t, d, length, timeout_or_status);
d                 501 drivers/usb/core/devio.c 					ep, t, d, length, timeout_or_status);
d                  16 drivers/usb/core/urb.c #define to_urb(d) container_of(d, struct urb, kref)
d                 415 drivers/usb/dwc3/debug.h static inline void dwc3_debugfs_init(struct dwc3 *d)
d                 417 drivers/usb/dwc3/debug.h static inline void dwc3_debugfs_exit(struct dwc3 *d)
d                  30 drivers/usb/dwc3/gadget.c #define DWC3_ALIGN_FRAME(d, n)	(((d)->frame_number + ((d)->interval * (n))) \
d                  31 drivers/usb/dwc3/gadget.c 					& ~((d)->interval - 1))
d                 107 drivers/usb/early/xhci-dbc.c static u32 __init xdbc_find_dbgp(int xdbc_num, u32 *b, u32 *d, u32 *f)
d                 123 drivers/usb/early/xhci-dbc.c 				*d = dev;
d                1437 drivers/usb/gadget/composite.c 			struct usb_os_desc *d;
d                1441 drivers/usb/gadget/composite.c 			d = f->os_desc_table[j].os_desc;
d                1442 drivers/usb/gadget/composite.c 			if (d && d->ext_compat_id)
d                1462 drivers/usb/gadget/composite.c 			struct usb_os_desc *d;
d                1466 drivers/usb/gadget/composite.c 			d = f->os_desc_table[j].os_desc;
d                1467 drivers/usb/gadget/composite.c 			if (d && d->ext_compat_id) {
d                1470 drivers/usb/gadget/composite.c 				memcpy(buf, d->ext_compat_id, 16);
d                1493 drivers/usb/gadget/composite.c 		struct usb_os_desc *d;
d                1497 drivers/usb/gadget/composite.c 		d = f->os_desc_table[j].os_desc;
d                1498 drivers/usb/gadget/composite.c 		if (d && d->ext_compat_id)
d                1499 drivers/usb/gadget/composite.c 			return d->ext_prop_count;
d                1507 drivers/usb/gadget/composite.c 	struct usb_os_desc *d;
d                1515 drivers/usb/gadget/composite.c 		d = f->os_desc_table[j].os_desc;
d                1516 drivers/usb/gadget/composite.c 		if (d)
d                1517 drivers/usb/gadget/composite.c 			return min(res + d->ext_prop_len, 4096);
d                1525 drivers/usb/gadget/composite.c 	struct usb_os_desc *d;
d                1535 drivers/usb/gadget/composite.c 		d = f->os_desc_table[j].os_desc;
d                1536 drivers/usb/gadget/composite.c 		if (d)
d                1537 drivers/usb/gadget/composite.c 			list_for_each_entry(ext_prop, &d->ext_prop, entry) {
d                1184 drivers/usb/gadget/configfs.c 		struct usb_os_desc *d;
d                1186 drivers/usb/gadget/configfs.c 		d = desc[n_interf];
d                1187 drivers/usb/gadget/configfs.c 		d->owner = owner;
d                1188 drivers/usb/gadget/configfs.c 		config_group_init_type_name(&d->group, "", interface_type);
d                1189 drivers/usb/gadget/configfs.c 		config_item_set_name(&d->group.cg_item, "interface.%s",
d                1191 drivers/usb/gadget/configfs.c 		configfs_add_default_group(&d->group, os_desc_group);
d                2212 drivers/usb/gadget/function/f_fs.c 	struct usb_endpoint_descriptor *d;
d                2240 drivers/usb/gadget/function/f_fs.c 		d = (void *)desc;
d                2247 drivers/usb/gadget/function/f_fs.c 				d->bEndpointAddress;
d                2249 drivers/usb/gadget/function/f_fs.c 				d->bEndpointAddress)
d                2388 drivers/usb/gadget/function/f_fs.c 		struct usb_ext_compat_desc *d = data;
d                2391 drivers/usb/gadget/function/f_fs.c 		if (len < sizeof(*d) ||
d                2392 drivers/usb/gadget/function/f_fs.c 		    d->bFirstInterfaceNumber >= ffs->interfaces_count)
d                2394 drivers/usb/gadget/function/f_fs.c 		if (d->Reserved1 != 1) {
d                2402 drivers/usb/gadget/function/f_fs.c 			d->Reserved1 = 1;
d                2404 drivers/usb/gadget/function/f_fs.c 		for (i = 0; i < ARRAY_SIZE(d->Reserved2); ++i)
d                2405 drivers/usb/gadget/function/f_fs.c 			if (d->Reserved2[i])
d                2412 drivers/usb/gadget/function/f_fs.c 		struct usb_ext_prop_desc *d = data;
d                2416 drivers/usb/gadget/function/f_fs.c 		if (len < sizeof(*d) || h->interface >= ffs->interfaces_count)
d                2418 drivers/usb/gadget/function/f_fs.c 		length = le32_to_cpu(d->dwSize);
d                2421 drivers/usb/gadget/function/f_fs.c 		type = le32_to_cpu(d->dwPropertyDataType);
d                2428 drivers/usb/gadget/function/f_fs.c 		pnl = le16_to_cpu(d->wPropertyNameLength);
d                2623 drivers/usb/gadget/function/f_fs.c 		vla_group(d);
d                2624 drivers/usb/gadget/function/f_fs.c 		vla_item(d, struct usb_gadget_strings *, stringtabs,
d                2626 drivers/usb/gadget/function/f_fs.c 		vla_item(d, struct usb_gadget_strings, stringtab, lang_count);
d                2627 drivers/usb/gadget/function/f_fs.c 		vla_item(d, struct usb_string, strings,
d                2630 drivers/usb/gadget/function/f_fs.c 		char *vlabuf = kmalloc(vla_group_size(d), GFP_KERNEL);
d                2638 drivers/usb/gadget/function/f_fs.c 		stringtabs = vla_ptr(vlabuf, d, stringtabs);
d                2639 drivers/usb/gadget/function/f_fs.c 		t = vla_ptr(vlabuf, d, stringtab);
d                2647 drivers/usb/gadget/function/f_fs.c 		stringtabs = vla_ptr(vlabuf, d, stringtabs);
d                2648 drivers/usb/gadget/function/f_fs.c 		t = vla_ptr(vlabuf, d, stringtab);
d                2649 drivers/usb/gadget/function/f_fs.c 		s = vla_ptr(vlabuf, d, strings);
d                3104 drivers/usb/gadget/function/f_fs.c 	vla_group(d);
d                3105 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct ffs_ep, eps, ffs->eps_count);
d                3106 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_descriptor_header *, fs_descs,
d                3108 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_descriptor_header *, hs_descs,
d                3110 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_descriptor_header *, ss_descs,
d                3112 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, short, inums, ffs->interfaces_count);
d                3113 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_os_desc_table, os_desc_table,
d                3115 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, char[16], ext_compat,
d                3117 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_os_desc, os_desc,
d                3119 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, struct usb_os_desc_ext_prop, ext_prop,
d                3121 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, char, ext_prop_name,
d                3123 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, char, ext_prop_data,
d                3125 drivers/usb/gadget/function/f_fs.c 	vla_item_with_sz(d, char, raw_descs, ffs->raw_descs_length);
d                3135 drivers/usb/gadget/function/f_fs.c 	vlabuf = kzalloc(vla_group_size(d), GFP_KERNEL);
d                3139 drivers/usb/gadget/function/f_fs.c 	ffs->ms_os_descs_ext_prop_avail = vla_ptr(vlabuf, d, ext_prop);
d                3141 drivers/usb/gadget/function/f_fs.c 		vla_ptr(vlabuf, d, ext_prop_name);
d                3143 drivers/usb/gadget/function/f_fs.c 		vla_ptr(vlabuf, d, ext_prop_data);
d                3146 drivers/usb/gadget/function/f_fs.c 	memcpy(vla_ptr(vlabuf, d, raw_descs), ffs->raw_descs,
d                3149 drivers/usb/gadget/function/f_fs.c 	memset(vla_ptr(vlabuf, d, inums), 0xff, d_inums__sz);
d                3150 drivers/usb/gadget/function/f_fs.c 	eps_ptr = vla_ptr(vlabuf, d, eps);
d                3157 drivers/usb/gadget/function/f_fs.c 	func->eps             = vla_ptr(vlabuf, d, eps);
d                3158 drivers/usb/gadget/function/f_fs.c 	func->interfaces_nums = vla_ptr(vlabuf, d, inums);
d                3166 drivers/usb/gadget/function/f_fs.c 		func->function.fs_descriptors = vla_ptr(vlabuf, d, fs_descs);
d                3168 drivers/usb/gadget/function/f_fs.c 				      vla_ptr(vlabuf, d, raw_descs),
d                3180 drivers/usb/gadget/function/f_fs.c 		func->function.hs_descriptors = vla_ptr(vlabuf, d, hs_descs);
d                3182 drivers/usb/gadget/function/f_fs.c 				      vla_ptr(vlabuf, d, raw_descs) + fs_len,
d                3194 drivers/usb/gadget/function/f_fs.c 		func->function.ss_descriptors = vla_ptr(vlabuf, d, ss_descs);
d                3196 drivers/usb/gadget/function/f_fs.c 				vla_ptr(vlabuf, d, raw_descs) + fs_len + hs_len,
d                3215 drivers/usb/gadget/function/f_fs.c 			   vla_ptr(vlabuf, d, raw_descs), d_raw_descs__sz,
d                3220 drivers/usb/gadget/function/f_fs.c 	func->function.os_desc_table = vla_ptr(vlabuf, d, os_desc_table);
d                3226 drivers/usb/gadget/function/f_fs.c 				vla_ptr(vlabuf, d, os_desc) +
d                3229 drivers/usb/gadget/function/f_fs.c 				vla_ptr(vlabuf, d, ext_compat) + i * 16;
d                3233 drivers/usb/gadget/function/f_fs.c 				      vla_ptr(vlabuf, d, raw_descs) +
d                 946 drivers/usb/gadget/function/f_hid.c 	char *d;
d                 956 drivers/usb/gadget/function/f_hid.c 	d = kmemdup(page, len, GFP_KERNEL);
d                 957 drivers/usb/gadget/function/f_hid.c 	if (!d) {
d                 962 drivers/usb/gadget/function/f_hid.c 	opts->report_desc = d;
d                 113 drivers/usb/gadget/function/u_ether.c #define xprintk(d, level, fmt, args...) \
d                 114 drivers/usb/gadget/function/u_ether.c 	printk(level "%s: " fmt , (d)->net->name , ## args)
d                 238 drivers/usb/gadget/legacy/inode.c #define xprintk(d,level,fmt,args...) \
d                  37 drivers/usb/gadget/udc/aspeed-vhub/dev.c void ast_vhub_dev_irq(struct ast_vhub_dev *d)
d                  39 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	u32 istat = readl(d->regs + AST_VHUB_DEV_ISR);
d                  41 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(istat, d->regs + AST_VHUB_DEV_ISR);
d                  44 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_ep0_handle_ack(&d->ep0, true);
d                  46 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_ep0_handle_ack(&d->ep0, false);
d                  48 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_ep0_handle_setup(&d->ep0);
d                  51 drivers/usb/gadget/udc/aspeed-vhub/dev.c static void ast_vhub_dev_enable(struct ast_vhub_dev *d)
d                  55 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->enabled)
d                  59 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ast_vhub_reset_ep0(d);
d                  66 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->gadget.speed == USB_SPEED_HIGH)
d                  68 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(reg, d->regs + AST_VHUB_DEV_EN_CTRL);
d                  71 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	hmsk = VHUB_IRQ_DEVICE1 << d->index;
d                  72 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	reg = readl(d->vhub->regs + AST_VHUB_IER);
d                  74 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(reg, d->vhub->regs + AST_VHUB_IER);
d                  77 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(d->ep0.buf_dma, d->regs + AST_VHUB_DEV_EP0_DATA);
d                  81 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		struct ast_vhub_ep *ep = d->epns[i];
d                  91 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->wakeup_en = false;
d                  92 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->enabled = true;
d                  95 drivers/usb/gadget/udc/aspeed-vhub/dev.c static void ast_vhub_dev_disable(struct ast_vhub_dev *d)
d                  99 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->enabled)
d                 103 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	hmsk = VHUB_IRQ_DEVICE1 << d->index;
d                 104 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	reg = readl(d->vhub->regs + AST_VHUB_IER);
d                 106 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(reg, d->vhub->regs + AST_VHUB_IER);
d                 109 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(0, d->regs + AST_VHUB_DEV_EN_CTRL);
d                 110 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.speed = USB_SPEED_UNKNOWN;
d                 111 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->enabled = false;
d                 114 drivers/usb/gadget/udc/aspeed-vhub/dev.c static int ast_vhub_dev_feature(struct ast_vhub_dev *d,
d                 118 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "%s_FEATURE(dev val=%02x)\n",
d                 124 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->wakeup_en = is_set;
d                 129 drivers/usb/gadget/udc/aspeed-vhub/dev.c static int ast_vhub_ep_feature(struct ast_vhub_dev *d,
d                 136 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "%s_FEATURE(ep%d val=%02x)\n",
d                 140 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (ep_num >= AST_VHUB_NUM_GEN_EPs || !d->epns[ep_num - 1])
d                 145 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ep = d->epns[ep_num - 1];
d                 153 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "%s stall on EP %d\n",
d                 161 drivers/usb/gadget/udc/aspeed-vhub/dev.c static int ast_vhub_dev_status(struct ast_vhub_dev *d,
d                 166 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "GET_STATUS(dev)\n");
d                 168 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	st0 = d->gadget.is_selfpowered << USB_DEVICE_SELF_POWERED;
d                 169 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->wakeup_en)
d                 172 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	return ast_vhub_simple_reply(&d->ep0, st0, 0);
d                 175 drivers/usb/gadget/udc/aspeed-vhub/dev.c static int ast_vhub_ep_status(struct ast_vhub_dev *d,
d                 182 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "GET_STATUS(ep%d)\n", ep_num);
d                 187 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ep = d->epns[ep_num - 1];
d                 197 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	return ast_vhub_simple_reply(&d->ep0, st0, 0);
d                 200 drivers/usb/gadget/udc/aspeed-vhub/dev.c static void ast_vhub_dev_set_address(struct ast_vhub_dev *d, u8 addr)
d                 204 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "SET_ADDRESS: Got address %x\n", addr);
d                 206 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	reg = readl(d->regs + AST_VHUB_DEV_EN_CTRL);
d                 209 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	writel(reg, d->regs + AST_VHUB_DEV_EN_CTRL);
d                 215 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = ep->dev;
d                 219 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->driver || !d->enabled) {
d                 222 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		      d->driver, d->enabled);
d                 233 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->gadget.speed == USB_SPEED_UNKNOWN) {
d                 234 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		d->gadget.speed = ep->vhub->speed;
d                 235 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		if (d->gadget.speed > d->driver->max_speed)
d                 236 drivers/usb/gadget/udc/aspeed-vhub/dev.c 			d->gadget.speed = d->driver->max_speed;
d                 237 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		DDBG(d, "fist packet, captured speed %d\n",
d                 238 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		     d->gadget.speed);
d                 247 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_set_address(d, wValue);
d                 252 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_dev_status(d, wIndex, wValue);
d                 256 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_ep_status(d, wIndex, wValue);
d                 260 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_dev_feature(d, wIndex, wValue, true);
d                 262 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_dev_feature(d, wIndex, wValue, false);
d                 264 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_ep_feature(d, wIndex, wValue, true);
d                 266 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		return ast_vhub_ep_feature(d, wIndex, wValue, false);
d                 273 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 277 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_lock_irqsave(&d->vhub->lock, flags);
d                 278 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->wakeup_en)
d                 281 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "Device initiated wakeup\n");
d                 284 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ast_vhub_hub_wake_all(d->vhub);
d                 287 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 293 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 295 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	return (readl(d->vhub->regs + AST_VHUB_USBSTS) >> 16) & 0x7ff;
d                 298 drivers/usb/gadget/udc/aspeed-vhub/dev.c static void ast_vhub_dev_nuke(struct ast_vhub_dev *d)
d                 303 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		if (!d->epns[i])
d                 305 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_nuke(d->epns[i], -ESHUTDOWN);
d                 311 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 314 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_lock_irqsave(&d->vhub->lock, flags);
d                 316 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "pullup(%d)\n", on);
d                 319 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ast_vhub_device_connect(d->vhub, d->index, on);
d                 325 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->enabled) {
d                 326 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_nuke(d);
d                 327 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_disable(d);
d                 330 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 338 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 341 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_lock_irqsave(&d->vhub->lock, flags);
d                 343 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "start\n");
d                 346 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->driver = driver;
d                 347 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.is_selfpowered = 1;
d                 349 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 358 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 363 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "Match EP type %d\n", usb_endpoint_type(desc));
d                 373 drivers/usb/gadget/udc/aspeed-vhub/dev.c 			DDBG(d, " -> using existing EP%d\n",
d                 420 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		if (d->epns[i] == NULL)
d                 430 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ep = ast_vhub_alloc_epn(d, addr);
d                 433 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "Allocated epn#%d for port EP%d\n",
d                 441 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = to_ast_dev(gadget);
d                 444 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_lock_irqsave(&d->vhub->lock, flags);
d                 446 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	DDBG(d, "stop\n");
d                 448 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->driver = NULL;
d                 449 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.speed = USB_SPEED_UNKNOWN;
d                 451 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ast_vhub_dev_nuke(d);
d                 453 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->enabled)
d                 454 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_disable(d);
d                 456 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 470 drivers/usb/gadget/udc/aspeed-vhub/dev.c void ast_vhub_dev_suspend(struct ast_vhub_dev *d)
d                 472 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->driver && d->driver->suspend) {
d                 473 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_unlock(&d->vhub->lock);
d                 474 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		d->driver->suspend(&d->gadget);
d                 475 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_lock(&d->vhub->lock);
d                 479 drivers/usb/gadget/udc/aspeed-vhub/dev.c void ast_vhub_dev_resume(struct ast_vhub_dev *d)
d                 481 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (d->driver && d->driver->resume) {
d                 482 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_unlock(&d->vhub->lock);
d                 483 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		d->driver->resume(&d->gadget);
d                 484 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_lock(&d->vhub->lock);
d                 488 drivers/usb/gadget/udc/aspeed-vhub/dev.c void ast_vhub_dev_reset(struct ast_vhub_dev *d)
d                 491 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->driver) {
d                 492 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_disable(d);
d                 497 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->enabled) {
d                 498 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		DDBG(d, "Reset of disabled device, enabling...\n");
d                 499 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_enable(d);
d                 501 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		DDBG(d, "Reset of enabled device, resetting...\n");
d                 502 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_unlock(&d->vhub->lock);
d                 503 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		usb_gadget_udc_reset(&d->gadget, d->driver);
d                 504 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_lock(&d->vhub->lock);
d                 510 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_disable(d);
d                 511 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		ast_vhub_dev_enable(d);
d                 515 drivers/usb/gadget/udc/aspeed-vhub/dev.c void ast_vhub_del_dev(struct ast_vhub_dev *d)
d                 519 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_lock_irqsave(&d->vhub->lock, flags);
d                 520 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->registered) {
d                 521 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 524 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->registered = false;
d                 525 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	spin_unlock_irqrestore(&d->vhub->lock, flags);
d                 527 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	usb_del_gadget_udc(&d->gadget);
d                 528 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	device_unregister(d->port_dev);
d                 538 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	struct ast_vhub_dev *d = &vhub->ports[idx].dev;
d                 542 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->vhub = vhub;
d                 543 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->index = idx;
d                 544 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->name = devm_kasprintf(parent, GFP_KERNEL, "port%d", idx+1);
d                 545 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->regs = vhub->regs + 0x100 + 0x10 * idx;
d                 547 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	ast_vhub_init_ep0(vhub, &d->ep0, d);
d                 554 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->port_dev = kzalloc(sizeof(struct device), GFP_KERNEL);
d                 555 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	if (!d->port_dev)
d                 557 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	device_initialize(d->port_dev);
d                 558 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->port_dev->release = ast_vhub_dev_release;
d                 559 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->port_dev->parent = parent;
d                 560 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	dev_set_name(d->port_dev, "%s:p%d", dev_name(parent), idx + 1);
d                 561 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	rc = device_add(d->port_dev);
d                 566 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	INIT_LIST_HEAD(&d->gadget.ep_list);
d                 567 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.ops = &ast_vhub_udc_ops;
d                 568 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.ep0 = &d->ep0.ep;
d                 569 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.name = KBUILD_MODNAME;
d                 571 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		d->gadget.max_speed = USB_SPEED_FULL;
d                 573 drivers/usb/gadget/udc/aspeed-vhub/dev.c 		d->gadget.max_speed = USB_SPEED_HIGH;
d                 574 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.speed = USB_SPEED_UNKNOWN;
d                 575 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->gadget.dev.of_node = vhub->pdev->dev.of_node;
d                 577 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	rc = usb_add_gadget_udc(d->port_dev, &d->gadget);
d                 580 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	d->registered = true;
d                 584 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	device_del(d->port_dev);
d                 586 drivers/usb/gadget/udc/aspeed-vhub/dev.c 	put_device(d->port_dev);
d                 794 drivers/usb/gadget/udc/aspeed-vhub/epn.c struct ast_vhub_ep *ast_vhub_alloc_epn(struct ast_vhub_dev *d, u8 addr)
d                 796 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	struct ast_vhub *vhub = d->vhub;
d                 813 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	ep->dev = d;
d                 816 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	DDBG(d, "Allocating gen EP %d for addr %d\n", i, addr);
d                 822 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	d->epns[addr-1] = ep;
d                 839 drivers/usb/gadget/udc/aspeed-vhub/epn.c 	list_add_tail(&ep->ep.ep_list, &d->gadget.ep_list);
d                 432 drivers/usb/gadget/udc/aspeed-vhub/vhub.h #define DVDBG(d, fmt, ...)	do {			\
d                 433 drivers/usb/gadget/udc/aspeed-vhub/vhub.h 	dev_dbg(&(d)->vhub->pdev->dev,			\
d                 434 drivers/usb/gadget/udc/aspeed-vhub/vhub.h 		"%s " fmt, (d)->name,			\
d                 441 drivers/usb/gadget/udc/aspeed-vhub/vhub.h #define DVDBG(d, fmt, ...)	do { } while(0)
d                 454 drivers/usb/gadget/udc/aspeed-vhub/vhub.h #define DDBG(d, fmt, ...)	do {			\
d                 455 drivers/usb/gadget/udc/aspeed-vhub/vhub.h 	dev_dbg(&(d)->vhub->pdev->dev,			\
d                 456 drivers/usb/gadget/udc/aspeed-vhub/vhub.h 		"%s " fmt, (d)->name,			\
d                 462 drivers/usb/gadget/udc/aspeed-vhub/vhub.h #define DDBG(d, fmt, ...)	do { } while(0)
d                 535 drivers/usb/gadget/udc/aspeed-vhub/vhub.h void ast_vhub_del_dev(struct ast_vhub_dev *d);
d                 536 drivers/usb/gadget/udc/aspeed-vhub/vhub.h void ast_vhub_dev_irq(struct ast_vhub_dev *d);
d                 543 drivers/usb/gadget/udc/aspeed-vhub/vhub.h struct ast_vhub_ep *ast_vhub_alloc_epn(struct ast_vhub_dev *d, u8 addr);
d                 544 drivers/usb/gadget/udc/aspeed-vhub/vhub.h void ast_vhub_dev_suspend(struct ast_vhub_dev *d);
d                 545 drivers/usb/gadget/udc/aspeed-vhub/vhub.h void ast_vhub_dev_resume(struct ast_vhub_dev *d);
d                 546 drivers/usb/gadget/udc/aspeed-vhub/vhub.h void ast_vhub_dev_reset(struct ast_vhub_dev *d);
d                 610 drivers/usb/gadget/udc/bcm63xx_udc.c 		struct bcm_enet_desc *d = iudma->write_bd;
d                 614 drivers/usb/gadget/udc/bcm63xx_udc.c 		if (d == iudma->end_bd) {
d                 648 drivers/usb/gadget/udc/bcm63xx_udc.c 		d->address = breq->req.dma + breq->offset;
d                 650 drivers/usb/gadget/udc/bcm63xx_udc.c 		d->len_stat = dmaflags;
d                 673 drivers/usb/gadget/udc/bcm63xx_udc.c 	struct bcm_enet_desc *d = iudma->read_bd;
d                 681 drivers/usb/gadget/udc/bcm63xx_udc.c 		dmaflags = d->len_stat;
d                 688 drivers/usb/gadget/udc/bcm63xx_udc.c 		if (d == iudma->end_bd)
d                 689 drivers/usb/gadget/udc/bcm63xx_udc.c 			d = iudma->bd_ring;
d                 691 drivers/usb/gadget/udc/bcm63xx_udc.c 			d++;
d                 694 drivers/usb/gadget/udc/bcm63xx_udc.c 	iudma->read_bd = d;
d                 707 drivers/usb/gadget/udc/bcm63xx_udc.c 	struct bcm_enet_desc *d;
d                 739 drivers/usb/gadget/udc/bcm63xx_udc.c 	for (d = iudma->bd_ring; d <= iudma->end_bd; d++)
d                 740 drivers/usb/gadget/udc/bcm63xx_udc.c 		d->len_stat = 0;
d                2220 drivers/usb/gadget/udc/bcm63xx_udc.c 			struct bcm_enet_desc *d = &iudma->bd_ring[i];
d                2223 drivers/usb/gadget/udc/bcm63xx_udc.c 				   i * sizeof(*d), i,
d                2224 drivers/usb/gadget/udc/bcm63xx_udc.c 				   d->len_stat >> 16, d->len_stat & 0xffff,
d                2225 drivers/usb/gadget/udc/bcm63xx_udc.c 				   d->address);
d                2226 drivers/usb/gadget/udc/bcm63xx_udc.c 			if (d == iudma->read_bd)
d                2228 drivers/usb/gadget/udc/bcm63xx_udc.c 			if (d == iudma->write_bd)
d                 254 drivers/usb/gadget/udc/fotg210-udc.c 			      dma_addr_t d, u32 len)
d                 274 drivers/usb/gadget/udc/fotg210-udc.c 	iowrite32(d, fotg210->reg + FOTG210_DMACPSR2);
d                 330 drivers/usb/gadget/udc/fotg210-udc.c 	dma_addr_t d;
d                 352 drivers/usb/gadget/udc/fotg210-udc.c 	d = dma_map_single(dev, buffer, length,
d                 355 drivers/usb/gadget/udc/fotg210-udc.c 	if (dma_mapping_error(dev, d)) {
d                 360 drivers/usb/gadget/udc/fotg210-udc.c 	fotg210_enable_dma(ep, d, length);
d                 370 drivers/usb/gadget/udc/fotg210-udc.c 	dma_unmap_single(dev, d, length, DMA_TO_DEVICE);
d                 887 drivers/usb/gadget/udc/fusb300_udc.c static void fusb300_fill_idma_prdtbl(struct fusb300_ep *ep, dma_addr_t d,
d                 900 drivers/usb/gadget/udc/fusb300_udc.c 	iowrite32(d, ep->fusb300->reg + FUSB300_OFFSET_EPPRD_W1(ep->epnum));
d                1825 drivers/usb/gadget/udc/net2280.c 			const struct usb_endpoint_descriptor	*d;
d                1827 drivers/usb/gadget/udc/net2280.c 			d = ep->desc;
d                1828 drivers/usb/gadget/udc/net2280.c 			if (!d)
d                1830 drivers/usb/gadget/udc/net2280.c 			t = d->bEndpointAddress;
d                1835 drivers/usb/gadget/udc/net2280.c 				type_string(d->bmAttributes),
d                1836 drivers/usb/gadget/udc/net2280.c 				usb_endpoint_maxp(d),
d                3046 drivers/usb/gadget/udc/pch_udc.c static int pch_udc_suspend(struct device *d)
d                3048 drivers/usb/gadget/udc/pch_udc.c 	struct pch_udc_dev *dev = dev_get_drvdata(d);
d                3056 drivers/usb/gadget/udc/pch_udc.c static int pch_udc_resume(struct device *d)
d                 260 drivers/usb/host/fsl-mph-dr-of.c static int __unregister_subdev(struct device *dev, void *d)
d                 347 drivers/usb/host/isp116x.h #define	isp116x_delay(h,d)	(h)->board->delay(	\
d                 348 drivers/usb/host/isp116x.h 				isp116x_to_hcd(h)->self.controller,d)
d                 351 drivers/usb/host/isp116x.h #define	isp116x_delay(h,d)	ndelay(d)
d                 354 drivers/usb/host/isp116x.h #define	isp116x_delay(h,d)	do{}while(0)
d                 442 drivers/usb/host/isp116x.h #define isp116x_show_reg_log(d,r,s) {				\
d                 445 drivers/usb/host/isp116x.h 			r, isp116x_read_reg32(d, r));		\
d                 448 drivers/usb/host/isp116x.h 			r, isp116x_read_reg16(d, r));	    	\
d                 451 drivers/usb/host/isp116x.h #define isp116x_show_reg_seq(d,r,s) {				\
d                 454 drivers/usb/host/isp116x.h 			r, isp116x_read_reg32(d, r));		\
d                 457 drivers/usb/host/isp116x.h 			r, isp116x_read_reg16(d, r));		\
d                 461 drivers/usb/host/isp116x.h #define isp116x_show_regs(d,type,s) {			\
d                 462 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCREVISION, s);	\
d                 463 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCCONTROL, s);	\
d                 464 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCCMDSTAT, s);	\
d                 465 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCINTSTAT, s);	\
d                 466 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCINTENB, s);	\
d                 467 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCFMINTVL, s);	\
d                 468 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCFMREM, s);		\
d                 469 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCFMNUM, s);		\
d                 470 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCLSTHRESH, s);	\
d                 471 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRHDESCA, s);	\
d                 472 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRHDESCB, s);	\
d                 473 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRHSTATUS, s);	\
d                 474 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRHPORT1, s);	\
d                 475 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRHPORT2, s);	\
d                 476 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCHWCFG, s);		\
d                 477 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCDMACFG, s);	\
d                 478 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCXFERCTR, s);	\
d                 479 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCuPINT, s);		\
d                 480 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCuPINTENB, s);	\
d                 481 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCCHIPID, s);	\
d                 482 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCSCRATCH, s);	\
d                 483 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCITLBUFLEN, s);	\
d                 484 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCATLBUFLEN, s);	\
d                 485 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCBUFSTAT, s);	\
d                 486 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRDITL0LEN, s);	\
d                 487 drivers/usb/host/isp116x.h 	isp116x_show_reg_##type(d, HCRDITL1LEN, s);	\
d                 569 drivers/usb/host/isp1362.h #define	isp1362_delay(h, d)	(h)->board->delay(isp1362_hcd_to_hcd(h)->self.controller, d)
d                 571 drivers/usb/host/isp1362.h #define	isp1362_delay(h, d)	ndelay(d)
d                 573 drivers/usb/host/isp1362.h #define	isp1362_delay(h, d)	do {} while (0)
d                 723 drivers/usb/host/isp1362.h #define isp1362_read_reg16(d, r)		({			\
d                 726 drivers/usb/host/isp1362.h 	isp1362_write_addr(d, ISP1362_REG_##r);				\
d                 727 drivers/usb/host/isp1362.h 	__v = isp1362_read_data16(d);					\
d                 733 drivers/usb/host/isp1362.h #define isp1362_read_reg32(d, r)		({			\
d                 736 drivers/usb/host/isp1362.h 	isp1362_write_addr(d, ISP1362_REG_##r);				\
d                 737 drivers/usb/host/isp1362.h 	__v = isp1362_read_data32(d);					\
d                 743 drivers/usb/host/isp1362.h #define isp1362_write_reg16(d, r, v)	{					\
d                 745 drivers/usb/host/isp1362.h 	isp1362_write_addr(d, (ISP1362_REG_##r) | ISP1362_REG_WRITE_OFFSET);	\
d                 746 drivers/usb/host/isp1362.h 	isp1362_write_data16(d, (u16)(v));					\
d                 751 drivers/usb/host/isp1362.h #define isp1362_write_reg32(d, r, v)	{					\
d                 753 drivers/usb/host/isp1362.h 	isp1362_write_addr(d, (ISP1362_REG_##r) | ISP1362_REG_WRITE_OFFSET);	\
d                 754 drivers/usb/host/isp1362.h 	isp1362_write_data32(d, (u32)(v));					\
d                 759 drivers/usb/host/isp1362.h #define isp1362_set_mask16(d, r, m) {			\
d                 761 drivers/usb/host/isp1362.h 	__v = isp1362_read_reg16(d, r);			\
d                 763 drivers/usb/host/isp1362.h 		isp1362_write_reg16(d, r, __v | m);	\
d                 766 drivers/usb/host/isp1362.h #define isp1362_clr_mask16(d, r, m) {			\
d                 768 drivers/usb/host/isp1362.h 	__v = isp1362_read_reg16(d, r);			\
d                 770 drivers/usb/host/isp1362.h 		isp1362_write_reg16(d, r, __v & ~m);	\
d                 773 drivers/usb/host/isp1362.h #define isp1362_set_mask32(d, r, m) {			\
d                 775 drivers/usb/host/isp1362.h 	__v = isp1362_read_reg32(d, r);			\
d                 777 drivers/usb/host/isp1362.h 		isp1362_write_reg32(d, r, __v | m);	\
d                 780 drivers/usb/host/isp1362.h #define isp1362_clr_mask32(d, r, m) {			\
d                 782 drivers/usb/host/isp1362.h 	__v = isp1362_read_reg32(d, r);			\
d                 784 drivers/usb/host/isp1362.h 		isp1362_write_reg32(d, r, __v & ~m);	\
d                 787 drivers/usb/host/isp1362.h #define isp1362_show_reg(d, r) {								\
d                 790 drivers/usb/host/isp1362.h 			ISP1362_REG_NO(ISP1362_REG_##r), isp1362_read_reg32(d, r));	\
d                 793 drivers/usb/host/isp1362.h 			ISP1362_REG_NO(ISP1362_REG_##r), isp1362_read_reg16(d, r));	\
d                 242 drivers/usb/host/u132-hcd.c #define kref_to_u132(d) container_of(d, struct u132, kref)
d                 243 drivers/usb/host/u132-hcd.c #define kref_to_u132_endp(d) container_of(d, struct u132_endp, kref)
d                 244 drivers/usb/host/u132-hcd.c #define kref_to_u132_udev(d) container_of(d, struct u132_udev, kref)
d                2339 drivers/usb/host/u132-hcd.c 			char *d = data;
d                2346 drivers/usb/host/u132-hcd.c 					int w = sprintf(d, " %02X", *b++);
d                2347 drivers/usb/host/u132-hcd.c 					d += w;
d                2350 drivers/usb/host/u132-hcd.c 					d += sprintf(d, " ..");
d                 156 drivers/usb/host/xhci-dbgcap.h #define dbc_bulkout_ctx(d)		\
d                 157 drivers/usb/host/xhci-dbgcap.h 	((struct xhci_ep_ctx *)((d)->ctx->bytes + DBC_CONTEXT_SIZE))
d                 158 drivers/usb/host/xhci-dbgcap.h #define dbc_bulkin_ctx(d)		\
d                 159 drivers/usb/host/xhci-dbgcap.h 	((struct xhci_ep_ctx *)((d)->ctx->bytes + DBC_CONTEXT_SIZE * 2))
d                 160 drivers/usb/host/xhci-dbgcap.h #define dbc_bulkout_enq(d)		\
d                 161 drivers/usb/host/xhci-dbgcap.h 	xhci_trb_virt_to_dma((d)->ring_out->enq_seg, (d)->ring_out->enqueue)
d                 162 drivers/usb/host/xhci-dbgcap.h #define dbc_bulkin_enq(d)		\
d                 163 drivers/usb/host/xhci-dbgcap.h 	xhci_trb_virt_to_dma((d)->ring_in->enq_seg, (d)->ring_in->enqueue)
d                 166 drivers/usb/host/xhci-dbgcap.h #define dbc_ep_dma_direction(d)		\
d                 167 drivers/usb/host/xhci-dbgcap.h 	((d)->direction ? DMA_FROM_DEVICE : DMA_TO_DEVICE)
d                 189 drivers/usb/misc/ftdi-elan.c #define kref_to_usb_ftdi(d) container_of(d, struct usb_ftdi, kref)
d                 190 drivers/usb/misc/ftdi-elan.c #define platform_device_to_usb_ftdi(d) container_of(d, struct usb_ftdi, \
d                 660 drivers/usb/misc/ftdi-elan.c 	char *d = data;
d                 675 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " %02X", 0x000000FF & *p);
d                 678 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " ..");
d                 799 drivers/usb/misc/ftdi-elan.c 		char *d = diag;
d                 806 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " %02X", *c++);
d                 808 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " ..");
d                 925 drivers/usb/misc/ftdi-elan.c 		char *d = diag;
d                 932 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " %02X", *c++);
d                 934 drivers/usb/misc/ftdi-elan.c 				d += sprintf(d, " ..");
d                 989 drivers/usb/misc/ftdi-elan.c 			char *d = diag;
d                 996 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " %02X", *c++);
d                 998 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " ..");
d                1625 drivers/usb/misc/ftdi-elan.c 			char *d = data;
d                1649 drivers/usb/misc/ftdi-elan.c 					int w = sprintf(d, " %02X", *b++);
d                1650 drivers/usb/misc/ftdi-elan.c 					d += w;
d                1653 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " ..");
d                1817 drivers/usb/misc/ftdi-elan.c 			char *d = diag;
d                1825 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " %02X",
d                1829 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " ..");
d                1990 drivers/usb/misc/ftdi-elan.c 				char *d = diag;
d                1999 drivers/usb/misc/ftdi-elan.c 						d += sprintf(d, " %02X", c);
d                2002 drivers/usb/misc/ftdi-elan.c 						d += sprintf(d, " ..");
d                2085 drivers/usb/misc/ftdi-elan.c 			char *d = diag;
d                2093 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " %02X",
d                2097 drivers/usb/misc/ftdi-elan.c 					d += sprintf(d, " ..");
d                1398 drivers/usb/misc/sisusbvga/sisusb.c #define GETREG(r, d) sisusb_read_memio_byte(sisusb, SISUSB_TYPE_IO, r, d)
d                1399 drivers/usb/misc/sisusbvga/sisusb.c #define SETREG(r, d) sisusb_write_memio_byte(sisusb, SISUSB_TYPE_IO, r, d)
d                1400 drivers/usb/misc/sisusbvga/sisusb.c #define SETIREG(r, i, d) sisusb_setidxreg(sisusb, r, i, d)
d                1401 drivers/usb/misc/sisusbvga/sisusb.c #define GETIREG(r, i, d) sisusb_getidxreg(sisusb, r, i, d)
d                1405 drivers/usb/misc/sisusbvga/sisusb.c #define READL(a, d) sisusb_read_memio_long(sisusb, SISUSB_TYPE_MEM, a, d)
d                1406 drivers/usb/misc/sisusbvga/sisusb.c #define WRITEL(a, d) sisusb_write_memio_long(sisusb, SISUSB_TYPE_MEM, a, d)
d                1407 drivers/usb/misc/sisusbvga/sisusb.c #define READB(a, d) sisusb_read_memio_byte(sisusb, SISUSB_TYPE_MEM, a, d)
d                1408 drivers/usb/misc/sisusbvga/sisusb.c #define WRITEB(a, d) sisusb_write_memio_byte(sisusb, SISUSB_TYPE_MEM, a, d)
d                 151 drivers/usb/misc/sisusbvga/sisusb.h #define to_sisusb_dev(d) container_of(d, struct sisusb_usb_data, kref)
d                1109 drivers/usb/misc/sisusbvga/sisusb_con.c 			struct vc_data *d = vc_cons[i].d;
d                1110 drivers/usb/misc/sisusbvga/sisusb_con.c 			if (d && d->vc_sw == &sisusb_con)
d                1111 drivers/usb/misc/sisusbvga/sisusb_con.c 				d->vc_hi_font_mask = ch512 ? 0x0800 : 0;
d                1176 drivers/usb/misc/sisusbvga/sisusb_con.c 			struct vc_data *vc = vc_cons[i].d;
d                  60 drivers/usb/misc/usblcd.c #define to_lcd_dev(d) container_of(d, struct usb_lcd, kref)
d                 996 drivers/usb/misc/usbtest.c 		struct usb_qualifier_descriptor *d = NULL;
d                1014 drivers/usb/misc/usbtest.c 			d = (struct usb_qualifier_descriptor *) dev->buf;
d                1017 drivers/usb/misc/usbtest.c 		if (d) {
d                1018 drivers/usb/misc/usbtest.c 			unsigned max = d->bNumConfigurations;
d                 334 drivers/usb/misc/uss720.c static void parport_uss720_write_data(struct parport *pp, unsigned char d)
d                 336 drivers/usb/misc/uss720.c 	set_1284_register(pp, 0, d, GFP_KERNEL);
d                 348 drivers/usb/misc/uss720.c static void parport_uss720_write_control(struct parport *pp, unsigned char d)
d                 352 drivers/usb/misc/uss720.c 	d = (d & 0xf) | (priv->reg[1] & 0xf0);
d                 353 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_KERNEL))
d                 355 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                 367 drivers/usb/misc/uss720.c 	unsigned char d;
d                 371 drivers/usb/misc/uss720.c 	d = (priv->reg[1] & (~mask)) ^ val;
d                 372 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_ATOMIC))
d                 374 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                 375 drivers/usb/misc/uss720.c 	return d & 0xf;
d                 390 drivers/usb/misc/uss720.c 	unsigned char d;
d                 392 drivers/usb/misc/uss720.c 	d = priv->reg[1] & ~0x10;
d                 393 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_KERNEL))
d                 395 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                 401 drivers/usb/misc/uss720.c 	unsigned char d;
d                 403 drivers/usb/misc/uss720.c 	d = priv->reg[1] | 0x10;
d                 404 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_KERNEL))
d                 406 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                 412 drivers/usb/misc/uss720.c 	unsigned char d;
d                 414 drivers/usb/misc/uss720.c 	d = priv->reg[1] & ~0x20;
d                 415 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_KERNEL))
d                 417 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                 423 drivers/usb/misc/uss720.c 	unsigned char d;
d                 425 drivers/usb/misc/uss720.c 	d = priv->reg[1] | 0x20;
d                 426 drivers/usb/misc/uss720.c 	if (set_1284_register(pp, 2, d, GFP_KERNEL))
d                 428 drivers/usb/misc/uss720.c 	priv->reg[1] = d;
d                  70 drivers/usb/misc/yurex.c #define to_yurex_dev(d) container_of(d, struct usb_yurex, kref)
d                 773 drivers/usb/musb/cppi_dma.c 	struct cppi_descriptor	*d;
d                 885 drivers/usb/musb/cppi_dma.c 	for (d = rx->head; d; d = d->next)
d                 886 drivers/usb/musb/cppi_dma.c 		cppi_dump_rxbd("S", d);
d                 180 drivers/usb/musb/musb_dma.h static inline void musb_dma_controller_destroy(struct dma_controller *d) { }
d                 448 drivers/usb/musb/musb_host.c 		struct usb_iso_packet_descriptor	*d;
d                 455 drivers/usb/musb/musb_host.c 		d = urb->iso_frame_desc + qh->iso_idx;
d                 456 drivers/usb/musb/musb_host.c 		buf = buffer + d->offset;
d                 457 drivers/usb/musb/musb_host.c 		length = d->length;
d                 468 drivers/usb/musb/musb_host.c 		d->actual_length = length;
d                 470 drivers/usb/musb/musb_host.c 		d->status = status;
d                1392 drivers/usb/musb/musb_host.c 			struct usb_iso_packet_descriptor	*d;
d                1394 drivers/usb/musb/musb_host.c 			d = urb->iso_frame_desc + qh->iso_idx;
d                1395 drivers/usb/musb/musb_host.c 			d->actual_length = length;
d                1396 drivers/usb/musb/musb_host.c 			d->status = status;
d                1400 drivers/usb/musb/musb_host.c 				d++;
d                1401 drivers/usb/musb/musb_host.c 				offset = d->offset;
d                1402 drivers/usb/musb/musb_host.c 				length = d->length;
d                1576 drivers/usb/musb/musb_host.c 		struct usb_iso_packet_descriptor *d;
d                1578 drivers/usb/musb/musb_host.c 		d = urb->iso_frame_desc + qh->iso_idx;
d                1579 drivers/usb/musb/musb_host.c 		d->actual_length = len;
d                1584 drivers/usb/musb/musb_host.c 		if (d->status != -EILSEQ && d->status != -EOVERFLOW)
d                1585 drivers/usb/musb/musb_host.c 			d->status = 0;
d                1650 drivers/usb/musb/musb_host.c 		struct usb_iso_packet_descriptor *d;
d                1652 drivers/usb/musb/musb_host.c 		d = urb->iso_frame_desc + qh->iso_idx;
d                1658 drivers/usb/musb/musb_host.c 		if (rx_count > d->length) {
d                1664 drivers/usb/musb/musb_host.c 				rx_count, d->length);
d                1666 drivers/usb/musb/musb_host.c 			length = d->length;
d                1669 drivers/usb/musb/musb_host.c 		d->status = d_status;
d                1670 drivers/usb/musb/musb_host.c 		buf = urb->transfer_dma + d->offset;
d                1435 drivers/usb/renesas_usbhs/fifo.c 			usbhs_get_dparam(priv, d##channel##_tx_id);	\
d                1437 drivers/usb/renesas_usbhs/fifo.c 			usbhs_get_dparam(priv, d##channel##_rx_id);	\
d                 131 drivers/usb/renesas_usbhs/mod_host.c #define usbhsh_usbv_to_udev(d)	dev_get_drvdata(&(d)->dev)
d                 138 drivers/usb/renesas_usbhs/mod_host.c #define usbhsh_device_parent(d)		(usbhsh_usbv_to_udev((d)->usbv->parent))
d                 139 drivers/usb/renesas_usbhs/mod_host.c #define usbhsh_device_hubport(d)	((d)->usbv->portnum)
d                 140 drivers/usb/renesas_usbhs/mod_host.c #define usbhsh_device_number(h, d)	((int)((d) - (h)->udev))
d                 141 drivers/usb/renesas_usbhs/mod_host.c #define usbhsh_device_nth(h, d)		((h)->udev + d)
d                  33 drivers/usb/roles/class.c #define to_role_switch(d)	container_of(d, struct usb_role_switch, dev)
d                 970 drivers/usb/serial/iuu_phoenix.c #define SOUP(a, b, c, d)  do { \
d                 973 drivers/usb/serial/iuu_phoenix.c 				b, a, c, d, NULL, 0, 1000); \
d                 974 drivers/usb/serial/iuu_phoenix.c 	dev_dbg(dev, "0x%x:0x%x:0x%x:0x%x  %d\n", a, b, c, d, result); } while (0)
d                 247 drivers/usb/serial/keyspan_usa26msg.h 		d;
d                 241 drivers/usb/serial/keyspan_usa67msg.h 		d;
d                 484 drivers/usb/serial/mos7720.c static void parport_mos7715_write_data(struct parport *pp, unsigned char d)
d                 491 drivers/usb/serial/mos7720.c 	write_mos_reg(mos_parport->serial, dummy, MOS7720_DPR, (__u8)d);
d                 498 drivers/usb/serial/mos7720.c 	unsigned char d;
d                 502 drivers/usb/serial/mos7720.c 	read_mos_reg(mos_parport->serial, dummy, MOS7720_DPR, &d);
d                 504 drivers/usb/serial/mos7720.c 	return d;
d                 507 drivers/usb/serial/mos7720.c static void parport_mos7715_write_control(struct parport *pp, unsigned char d)
d                 514 drivers/usb/serial/mos7720.c 	data = ((__u8)d & 0x0f) | (mos_parport->shadowDCR & 0xf0);
d                 248 drivers/usb/serial/quatech2.c 	struct qt2_device_detail d;
d                 251 drivers/usb/serial/quatech2.c 	for (i = 0; d = qt2_device_details[i], d.product_id != 0; i++) {
d                 252 drivers/usb/serial/quatech2.c 		if (d.product_id == le16_to_cpu(serial->dev->descriptor.idProduct))
d                 253 drivers/usb/serial/quatech2.c 			return d.num_ports;
d                 763 drivers/usb/serial/quatech2.c 	u8 *d;
d                 766 drivers/usb/serial/quatech2.c 	d = kzalloc(2, GFP_KERNEL);
d                 767 drivers/usb/serial/quatech2.c 	if (!d)
d                 770 drivers/usb/serial/quatech2.c 	r = qt2_getregister(dev, port_priv->device_port, UART_MCR, d);
d                 774 drivers/usb/serial/quatech2.c 	r = qt2_getregister(dev, port_priv->device_port, UART_MSR, d + 1);
d                 778 drivers/usb/serial/quatech2.c 	r = (d[0] & UART_MCR_DTR ? TIOCM_DTR : 0) |
d                 779 drivers/usb/serial/quatech2.c 	    (d[0] & UART_MCR_RTS ? TIOCM_RTS : 0) |
d                 780 drivers/usb/serial/quatech2.c 	    (d[1] & UART_MSR_CTS ? TIOCM_CTS : 0) |
d                 781 drivers/usb/serial/quatech2.c 	    (d[1] & UART_MSR_DCD ? TIOCM_CAR : 0) |
d                 782 drivers/usb/serial/quatech2.c 	    (d[1] & UART_MSR_RI ? TIOCM_RI : 0) |
d                 783 drivers/usb/serial/quatech2.c 	    (d[1] & UART_MSR_DSR ? TIOCM_DSR : 0);
d                 786 drivers/usb/serial/quatech2.c 	kfree(d);
d                 383 drivers/usb/serial/ssu100.c 	u8 *d;
d                 386 drivers/usb/serial/ssu100.c 	d = kzalloc(2, GFP_KERNEL);
d                 387 drivers/usb/serial/ssu100.c 	if (!d)
d                 390 drivers/usb/serial/ssu100.c 	r = ssu100_getregister(dev, 0, UART_MCR, d);
d                 394 drivers/usb/serial/ssu100.c 	r = ssu100_getregister(dev, 0, UART_MSR, d+1);
d                 398 drivers/usb/serial/ssu100.c 	r = (d[0] & UART_MCR_DTR ? TIOCM_DTR : 0) |
d                 399 drivers/usb/serial/ssu100.c 		(d[0] & UART_MCR_RTS ? TIOCM_RTS : 0) |
d                 400 drivers/usb/serial/ssu100.c 		(d[1] & UART_MSR_CTS ? TIOCM_CTS : 0) |
d                 401 drivers/usb/serial/ssu100.c 		(d[1] & UART_MSR_DCD ? TIOCM_CAR : 0) |
d                 402 drivers/usb/serial/ssu100.c 		(d[1] & UART_MSR_RI ? TIOCM_RI : 0) |
d                 403 drivers/usb/serial/ssu100.c 		(d[1] & UART_MSR_DSR ? TIOCM_DSR : 0);
d                 406 drivers/usb/serial/ssu100.c 	kfree(d);
d                  29 drivers/usb/typec/bus.h #define to_altmode(d) container_of(d, struct altmode, adev)
d                 144 drivers/usb/typec/tcpm/wcove.c #define USBC_TXINFO_RETRIES(d)		(d << 3)
d                  67 drivers/usb/usb-skeleton.c #define to_skel_dev(d) container_of(d, struct usb_skel, kref)
d                  39 drivers/vfio/mdev/mdev_private.h #define dev_is_mdev(d)		((d)->bus == &mdev_bus_type)
d                 756 drivers/vfio/vfio_iommu_type1.c 	struct vfio_domain *domain, *d;
d                 775 drivers/vfio/vfio_iommu_type1.c 	domain = d = list_first_entry(&iommu->domain_list,
d                 778 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry_continue(d, &iommu->domain_list, next) {
d                 779 drivers/vfio/vfio_iommu_type1.c 		iommu_unmap(d->domain, dma->iova, dma->size);
d                 988 drivers/vfio/vfio_iommu_type1.c 	struct vfio_domain *d;
d                 991 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry(d, &iommu->domain_list, next) {
d                 992 drivers/vfio/vfio_iommu_type1.c 		ret = iommu_map(d->domain, iova, (phys_addr_t)pfn << PAGE_SHIFT,
d                 993 drivers/vfio/vfio_iommu_type1.c 				npage << PAGE_SHIFT, prot | d->prot);
d                1003 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry_continue_reverse(d, &iommu->domain_list, next)
d                1004 drivers/vfio/vfio_iommu_type1.c 		iommu_unmap(d->domain, iova, npage << PAGE_SHIFT);
d                1190 drivers/vfio/vfio_iommu_type1.c 	struct vfio_domain *d;
d                1196 drivers/vfio/vfio_iommu_type1.c 	d = list_first_entry(&iommu->domain_list, struct vfio_domain, next);
d                1214 drivers/vfio/vfio_iommu_type1.c 				phys = iommu_iova_to_phys(d->domain, iova);
d                1225 drivers/vfio/vfio_iommu_type1.c 				       p == iommu_iova_to_phys(d->domain, i)) {
d                1657 drivers/vfio/vfio_iommu_type1.c 	struct vfio_domain *domain, *d;
d                1668 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry(d, &iommu->domain_list, next) {
d                1669 drivers/vfio/vfio_iommu_type1.c 		if (find_iommu_group(d, iommu_group)) {
d                1802 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry(d, &iommu->domain_list, next) {
d                1803 drivers/vfio/vfio_iommu_type1.c 		if (d->domain->ops == domain->domain->ops &&
d                1804 drivers/vfio/vfio_iommu_type1.c 		    d->prot == domain->prot) {
d                1806 drivers/vfio/vfio_iommu_type1.c 			if (!vfio_iommu_attach_group(d, group)) {
d                1807 drivers/vfio/vfio_iommu_type1.c 				list_add(&group->next, &d->group_list);
d                1944 drivers/vfio/vfio_iommu_type1.c 	struct vfio_domain *d;
d                1954 drivers/vfio/vfio_iommu_type1.c 	list_for_each_entry(d, &iommu->domain_list, next) {
d                1955 drivers/vfio/vfio_iommu_type1.c 		list_for_each_entry(g, &d->group_list, next) {
d                1037 drivers/vhost/net.c 	unsigned d;
d                1055 drivers/vhost/net.c 		d = r;
d                1056 drivers/vhost/net.c 		if (d == vq->num) {
d                1070 drivers/vhost/net.c 		heads[headcount].id = cpu_to_vhost32(vq, d);
d                 292 drivers/vhost/vhost.c static void vhost_vq_meta_reset(struct vhost_dev *d)
d                 296 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i)
d                 297 drivers/vhost/vhost.c 		__vhost_vq_meta_reset(d->vqs[i]);
d                 752 drivers/vhost/vhost.c static bool memory_access_ok(struct vhost_dev *d, struct vhost_umem *umem,
d                 757 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i) {
d                 761 drivers/vhost/vhost.c 		mutex_lock(&d->vqs[i]->mutex);
d                 762 drivers/vhost/vhost.c 		log = log_all || vhost_has_feature(d->vqs[i], VHOST_F_LOG_ALL);
d                 764 drivers/vhost/vhost.c 		if (d->vqs[i]->private_data)
d                 765 drivers/vhost/vhost.c 			ok = vq_memory_access_ok(d->vqs[i]->log_base,
d                 769 drivers/vhost/vhost.c 		mutex_unlock(&d->vqs[i]->mutex);
d                 966 drivers/vhost/vhost.c static void vhost_dev_lock_vqs(struct vhost_dev *d)
d                 969 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i)
d                 970 drivers/vhost/vhost.c 		mutex_lock_nested(&d->vqs[i]->mutex, i);
d                 973 drivers/vhost/vhost.c static void vhost_dev_unlock_vqs(struct vhost_dev *d)
d                 976 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i)
d                 977 drivers/vhost/vhost.c 		mutex_unlock(&d->vqs[i]->mutex);
d                1058 drivers/vhost/vhost.c static void vhost_iotlb_notify_vq(struct vhost_dev *d,
d                1063 drivers/vhost/vhost.c 	spin_lock(&d->iotlb_lock);
d                1065 drivers/vhost/vhost.c 	list_for_each_entry_safe(node, n, &d->pending_list, node) {
d                1076 drivers/vhost/vhost.c 	spin_unlock(&d->iotlb_lock);
d                1419 drivers/vhost/vhost.c static long vhost_set_memory(struct vhost_dev *d, struct vhost_memory __user *m)
d                1464 drivers/vhost/vhost.c 	if (!memory_access_ok(d, newumem, 0))
d                1467 drivers/vhost/vhost.c 	oldumem = d->umem;
d                1468 drivers/vhost/vhost.c 	d->umem = newumem;
d                1471 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i) {
d                1472 drivers/vhost/vhost.c 		mutex_lock(&d->vqs[i]->mutex);
d                1473 drivers/vhost/vhost.c 		d->vqs[i]->umem = newumem;
d                1474 drivers/vhost/vhost.c 		mutex_unlock(&d->vqs[i]->mutex);
d                1487 drivers/vhost/vhost.c static long vhost_vring_set_num(struct vhost_dev *d,
d                1508 drivers/vhost/vhost.c static long vhost_vring_set_addr(struct vhost_dev *d,
d                1561 drivers/vhost/vhost.c static long vhost_vring_set_num_addr(struct vhost_dev *d,
d                1572 drivers/vhost/vhost.c 		r = vhost_vring_set_num(d, vq, argp);
d                1575 drivers/vhost/vhost.c 		r = vhost_vring_set_addr(d, vq, argp);
d                1585 drivers/vhost/vhost.c long vhost_vring_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp)
d                1600 drivers/vhost/vhost.c 	if (idx >= d->nvqs)
d                1603 drivers/vhost/vhost.c 	idx = array_index_nospec(idx, d->nvqs);
d                1604 drivers/vhost/vhost.c 	vq = d->vqs[idx];
d                1608 drivers/vhost/vhost.c 		return vhost_vring_set_num_addr(d, vq, ioctl, argp);
d                1721 drivers/vhost/vhost.c int vhost_init_device_iotlb(struct vhost_dev *d, bool enabled)
d                1730 drivers/vhost/vhost.c 	oiotlb = d->iotlb;
d                1731 drivers/vhost/vhost.c 	d->iotlb = niotlb;
d                1733 drivers/vhost/vhost.c 	for (i = 0; i < d->nvqs; ++i) {
d                1734 drivers/vhost/vhost.c 		struct vhost_virtqueue *vq = d->vqs[i];
d                1749 drivers/vhost/vhost.c long vhost_dev_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp)
d                1758 drivers/vhost/vhost.c 		r = vhost_dev_set_owner(d);
d                1763 drivers/vhost/vhost.c 	r = vhost_dev_check_owner(d);
d                1769 drivers/vhost/vhost.c 		r = vhost_set_memory(d, argp);
d                1780 drivers/vhost/vhost.c 		for (i = 0; i < d->nvqs; ++i) {
d                1783 drivers/vhost/vhost.c 			vq = d->vqs[i];
d                1802 drivers/vhost/vhost.c 		swap(ctx, d->log_ctx);
d                1803 drivers/vhost/vhost.c 		for (i = 0; i < d->nvqs; ++i) {
d                1804 drivers/vhost/vhost.c 			mutex_lock(&d->vqs[i]->mutex);
d                1805 drivers/vhost/vhost.c 			d->vqs[i]->log_ctx = d->log_ctx;
d                1806 drivers/vhost/vhost.c 			mutex_unlock(&d->vqs[i]->mutex);
d                  48 drivers/vhost/vhost.h long vhost_vring_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp);
d                 189 drivers/vhost/vhost.h long vhost_vring_ioctl(struct vhost_dev *d, unsigned int ioctl, void __user *argp);
d                 229 drivers/vhost/vhost.h int vhost_init_device_iotlb(struct vhost_dev *d, bool enabled);
d                 582 drivers/video/console/newport_con.c 	unsigned short *s, *d;
d                 608 drivers/video/console/newport_con.c 		d = (unsigned short *) (vc->vc_origin +
d                 612 drivers/video/console/newport_con.c 			if (chattr != scr_readw(d)) {
d                 614 drivers/video/console/newport_con.c 				scr_writew(chattr, d);
d                 616 drivers/video/console/newport_con.c 			d++;
d                 622 drivers/video/console/newport_con.c 		d = (unsigned short *) (vc->vc_origin +
d                 627 drivers/video/console/newport_con.c 			if (scr_readw(d) != vc->vc_video_erase_char) {
d                 630 drivers/video/console/newport_con.c 				scr_writew(vc->vc_video_erase_char, d);
d                 632 drivers/video/console/newport_con.c 			d++;
d                 643 drivers/video/console/newport_con.c 		d = (unsigned short *) (vc->vc_origin +
d                 647 drivers/video/console/newport_con.c 			if (chattr != scr_readw(d)) {
d                 649 drivers/video/console/newport_con.c 				scr_writew(chattr, d);
d                 651 drivers/video/console/newport_con.c 			d--;
d                 657 drivers/video/console/newport_con.c 		d = (unsigned short *) (vc->vc_origin +
d                 662 drivers/video/console/newport_con.c 			if (scr_readw(d) != vc->vc_video_erase_char) {
d                 665 drivers/video/console/newport_con.c 				scr_writew(vc->vc_video_erase_char, d);
d                 667 drivers/video/console/newport_con.c 			d++;
d                 339 drivers/video/console/vgacon.c 		void *d = (void *) c->vc_visible_origin;
d                 345 drivers/video/console/vgacon.c 		scr_memcpyw(d, vgacon_scrollback_cur->data + soff, copysize);
d                 346 drivers/video/console/vgacon.c 		d += copysize;
d                 350 drivers/video/console/vgacon.c 			scr_memcpyw(d, vgacon_scrollback_cur->data, count);
d                 351 drivers/video/console/vgacon.c 			d += count;
d                 355 drivers/video/console/vgacon.c 			scr_memcpyw(d, s, diff * c->vc_size_row);
d                1208 drivers/video/console/vgacon.c 			struct vc_data *c = vc_cons[i].d;
d                1265 drivers/video/console/vgacon.c 		struct vc_data *c = vc_cons[i].d;
d                  40 drivers/video/fbdev/atafb_iplan2p2.c 	u32 *s, *d;
d                  61 drivers/video/fbdev/atafb_iplan2p2.c 				d = (u32 *)dst;
d                  66 drivers/video/fbdev/atafb_iplan2p2.c 						*d++ = *s++;
d                  68 drivers/video/fbdev/atafb_iplan2p2.c 					d = (u32 *)((u8 *)d + l);
d                  87 drivers/video/fbdev/atafb_iplan2p2.c 				d = (u32 *)dst;
d                  92 drivers/video/fbdev/atafb_iplan2p2.c 						*--d = *--s;
d                  94 drivers/video/fbdev/atafb_iplan2p2.c 					d = (u32 *)((u8 *)d - l);
d                 216 drivers/video/fbdev/atafb_iplan2p2.c 		u32 *d = dest;
d                 219 drivers/video/fbdev/atafb_iplan2p2.c 			d = fill16_col(d, rows, cval);
d                 220 drivers/video/fbdev/atafb_iplan2p2.c 			d = (u32 *)((long)d + off);
d                 259 drivers/video/fbdev/atafb_iplan2p2.c 			u16 d = *data16++;
d                 260 drivers/video/fbdev/atafb_iplan2p2.c 			m = d | ((u32)d << 16);
d                  40 drivers/video/fbdev/atafb_iplan2p4.c 	u32 *s, *d;
d                  61 drivers/video/fbdev/atafb_iplan2p4.c 				d = (u32 *)dst;
d                  66 drivers/video/fbdev/atafb_iplan2p4.c 						*d++ = *s++;
d                  68 drivers/video/fbdev/atafb_iplan2p4.c 					d = (u32 *)((u8 *)d + l);
d                  87 drivers/video/fbdev/atafb_iplan2p4.c 				d = (u32 *)dst;
d                  92 drivers/video/fbdev/atafb_iplan2p4.c 						*--d = *--s;
d                  94 drivers/video/fbdev/atafb_iplan2p4.c 					d = (u32 *)((u8 *)d - l);
d                 230 drivers/video/fbdev/atafb_iplan2p4.c 		u32 *d = dest;
d                 233 drivers/video/fbdev/atafb_iplan2p4.c 			d = fill16_col(d, rows, cval);
d                 234 drivers/video/fbdev/atafb_iplan2p4.c 			d = (u32 *)((long)d + off);
d                 273 drivers/video/fbdev/atafb_iplan2p4.c 			u16 d = *data16++;
d                 274 drivers/video/fbdev/atafb_iplan2p4.c 			m = d | ((u32)d << 16);
d                  47 drivers/video/fbdev/atafb_iplan2p8.c 	u32 *s, *d;
d                  68 drivers/video/fbdev/atafb_iplan2p8.c 				d = (u32 *)dst;
d                  73 drivers/video/fbdev/atafb_iplan2p8.c 						*d++ = *s++;
d                  75 drivers/video/fbdev/atafb_iplan2p8.c 					d = (u32 *)((u8 *)d + l);
d                  94 drivers/video/fbdev/atafb_iplan2p8.c 				d = (u32 *)dst;
d                  99 drivers/video/fbdev/atafb_iplan2p8.c 						*--d = *--s;
d                 101 drivers/video/fbdev/atafb_iplan2p8.c 					d = (u32 *)((u8 *)d - l);
d                 265 drivers/video/fbdev/atafb_iplan2p8.c 		u32 *d = dest;
d                 268 drivers/video/fbdev/atafb_iplan2p8.c 			d = fill16_col(d, rows, cval);
d                 269 drivers/video/fbdev/atafb_iplan2p8.c 			d = (u32 *)((long)d + off);
d                 308 drivers/video/fbdev/atafb_iplan2p8.c 			u16 d = *data16++;
d                 309 drivers/video/fbdev/atafb_iplan2p8.c 			m = d | ((u32)d << 16);
d                 145 drivers/video/fbdev/atafb_utils.h static inline void *fb_memmove(void *d, const void *s, size_t count)
d                 147 drivers/video/fbdev/atafb_utils.h 	if (d < s) {
d                 155 drivers/video/fbdev/atafb_utils.h 				: "=a" (d), "=a" (s), "=d" (count)
d                 156 drivers/video/fbdev/atafb_utils.h 				: "0" (d), "1" (s), "2" (count));
d                 176 drivers/video/fbdev/atafb_utils.h 				: "=a" (d), "=a" (s), "=d" (count), "=d" (tmp)
d                 177 drivers/video/fbdev/atafb_utils.h 				: "0" (d), "1" (s), "2" (count));
d                 187 drivers/video/fbdev/atafb_utils.h 				: "=a" (d), "=a" (s), "=d" (count)
d                 188 drivers/video/fbdev/atafb_utils.h 				: "0" ((char *) d + count), "1" ((char *) s + count), "2" (count));
d                 209 drivers/video/fbdev/atafb_utils.h 				: "=a" (d), "=a" (s), "=d" (count), "=d" (tmp)
d                 210 drivers/video/fbdev/atafb_utils.h 				: "0" ((char *) d + count), "1" ((char *) s + count), "2" (count));
d                 377 drivers/video/fbdev/atafb_utils.h 	u32 *s, *d, v;
d                 380 drivers/video/fbdev/atafb_utils.h         d = dst;
d                 382 drivers/video/fbdev/atafb_utils.h                 v = (*s++ & mask) | (*d  & ~mask);
d                 383 drivers/video/fbdev/atafb_utils.h                 *d++ = v;
d                 385 drivers/video/fbdev/atafb_utils.h                 v = (*s++ & mask) | (*d  & ~mask);
d                 386 drivers/video/fbdev/atafb_utils.h                 *d++ = v;
d                 389 drivers/video/fbdev/atafb_utils.h                 v = (*s++ & mask) | (*d  & ~mask);
d                 390 drivers/video/fbdev/atafb_utils.h                 *d++ = v;
d                 391 drivers/video/fbdev/atafb_utils.h                 v = (*s++ & mask) | (*d  & ~mask);
d                 392 drivers/video/fbdev/atafb_utils.h                 *d++ = v;
d                 394 drivers/video/fbdev/atafb_utils.h                 d = (u32 *)((u8 *)d + bytes);
d                 469 drivers/video/fbdev/aty/aty128fb.c #define round_div(n, d) ((n+(d/2))/d)
d                1374 drivers/video/fbdev/aty/aty128fb.c 	u32 n, d;
d                1398 drivers/video/fbdev/aty/aty128fb.c 	d = c.ref_clk;
d                1400 drivers/video/fbdev/aty/aty128fb.c 	pll->feedback_divider = round_div(n, d);
d                1439 drivers/video/fbdev/aty/aty128fb.c 	u32 n, d, bpp;
d                1445 drivers/video/fbdev/aty/aty128fb.c 	d = pll->vclk * bpp;
d                1446 drivers/video/fbdev/aty/aty128fb.c 	x = round_div(n, d);
d                1468 drivers/video/fbdev/aty/aty128fb.c 	x = round_div(n, d);
d                1018 drivers/video/fbdev/aty/radeon_monitor.c 			int d;
d                1024 drivers/video/fbdev/aty/radeon_monitor.c 			d = radeon_compare_modes(src, &db[i]);
d                1028 drivers/video/fbdev/aty/radeon_monitor.c 			if (d < distance) {
d                1030 drivers/video/fbdev/aty/radeon_monitor.c 				distance = d;
d                  20 drivers/video/fbdev/c2p_core.h static inline void _transp(u32 d[], unsigned int i1, unsigned int i2,
d                  23 drivers/video/fbdev/c2p_core.h 	u32 t = (d[i1] ^ (d[i2] >> shift)) & mask;
d                  25 drivers/video/fbdev/c2p_core.h 	d[i1] ^= t;
d                  26 drivers/video/fbdev/c2p_core.h 	d[i2] ^= t << shift;
d                  60 drivers/video/fbdev/c2p_core.h static __always_inline void transp8(u32 d[], unsigned int n, unsigned int m)
d                  67 drivers/video/fbdev/c2p_core.h 		_transp(d, 0, 1, n, mask);
d                  69 drivers/video/fbdev/c2p_core.h 		_transp(d, 2, 3, n, mask);
d                  71 drivers/video/fbdev/c2p_core.h 		_transp(d, 4, 5, n, mask);
d                  73 drivers/video/fbdev/c2p_core.h 		_transp(d, 6, 7, n, mask);
d                  78 drivers/video/fbdev/c2p_core.h 		_transp(d, 0, 2, n, mask);
d                  79 drivers/video/fbdev/c2p_core.h 		_transp(d, 1, 3, n, mask);
d                  81 drivers/video/fbdev/c2p_core.h 		_transp(d, 4, 6, n, mask);
d                  82 drivers/video/fbdev/c2p_core.h 		_transp(d, 5, 7, n, mask);
d                  87 drivers/video/fbdev/c2p_core.h 		_transp(d, 0, 4, n, mask);
d                  88 drivers/video/fbdev/c2p_core.h 		_transp(d, 1, 5, n, mask);
d                  89 drivers/video/fbdev/c2p_core.h 		_transp(d, 2, 6, n, mask);
d                  90 drivers/video/fbdev/c2p_core.h 		_transp(d, 3, 7, n, mask);
d                 102 drivers/video/fbdev/c2p_core.h static __always_inline void transp4(u32 d[], unsigned int n, unsigned int m)
d                 109 drivers/video/fbdev/c2p_core.h 		_transp(d, 0, 1, n, mask);
d                 111 drivers/video/fbdev/c2p_core.h 		_transp(d, 2, 3, n, mask);
d                 116 drivers/video/fbdev/c2p_core.h 		_transp(d, 0, 2, n, mask);
d                 117 drivers/video/fbdev/c2p_core.h 		_transp(d, 1, 3, n, mask);
d                 129 drivers/video/fbdev/c2p_core.h static __always_inline void transp4x(u32 d[], unsigned int n, unsigned int m)
d                 136 drivers/video/fbdev/c2p_core.h 		_transp(d, 2, 0, n, mask);
d                 137 drivers/video/fbdev/c2p_core.h 		_transp(d, 3, 1, n, mask);
d                  27 drivers/video/fbdev/c2p_iplan2.c static void c2p_16x8(u32 d[4])
d                  29 drivers/video/fbdev/c2p_iplan2.c 	transp4(d, 8, 2);
d                  30 drivers/video/fbdev/c2p_iplan2.c 	transp4(d, 1, 2);
d                  31 drivers/video/fbdev/c2p_iplan2.c 	transp4x(d, 16, 2);
d                  32 drivers/video/fbdev/c2p_iplan2.c 	transp4x(d, 2, 2);
d                  33 drivers/video/fbdev/c2p_iplan2.c 	transp4(d, 4, 1);
d                  48 drivers/video/fbdev/c2p_iplan2.c static inline void store_iplan2(void *dst, u32 bpp, u32 d[4])
d                  53 drivers/video/fbdev/c2p_iplan2.c 		put_unaligned_be32(d[perm_c2p_16x8[i]], dst);
d                  61 drivers/video/fbdev/c2p_iplan2.c static inline void store_iplan2_masked(void *dst, u32 bpp, u32 d[4], u32 mask)
d                  66 drivers/video/fbdev/c2p_iplan2.c 		put_unaligned_be32(comp(d[perm_c2p_16x8[i]],
d                  91 drivers/video/fbdev/c2p_iplan2.c 	} d;
d                 109 drivers/video/fbdev/c2p_iplan2.c 			memset(d.pixels, 0, sizeof(d));
d                 110 drivers/video/fbdev/c2p_iplan2.c 			memcpy(d.pixels+dst_idx, c, width);
d                 112 drivers/video/fbdev/c2p_iplan2.c 			c2p_16x8(d.words);
d                 113 drivers/video/fbdev/c2p_iplan2.c 			store_iplan2_masked(p, bpp, d.words, first);
d                 121 drivers/video/fbdev/c2p_iplan2.c 				memset(d.pixels, 0, dst_idx);
d                 122 drivers/video/fbdev/c2p_iplan2.c 				memcpy(d.pixels+dst_idx, c, w);
d                 124 drivers/video/fbdev/c2p_iplan2.c 				c2p_16x8(d.words);
d                 125 drivers/video/fbdev/c2p_iplan2.c 				store_iplan2_masked(p, bpp, d.words, first);
d                 131 drivers/video/fbdev/c2p_iplan2.c 				memcpy(d.pixels, c, 16);
d                 133 drivers/video/fbdev/c2p_iplan2.c 				c2p_16x8(d.words);
d                 134 drivers/video/fbdev/c2p_iplan2.c 				store_iplan2(p, bpp, d.words);
d                 141 drivers/video/fbdev/c2p_iplan2.c 				memcpy(d.pixels, c, w);
d                 142 drivers/video/fbdev/c2p_iplan2.c 				memset(d.pixels+w, 0, 16-w);
d                 143 drivers/video/fbdev/c2p_iplan2.c 				c2p_16x8(d.words);
d                 144 drivers/video/fbdev/c2p_iplan2.c 				store_iplan2_masked(p, bpp, d.words, last);
d                  27 drivers/video/fbdev/c2p_planar.c static void c2p_32x8(u32 d[8])
d                  29 drivers/video/fbdev/c2p_planar.c 	transp8(d, 16, 4);
d                  30 drivers/video/fbdev/c2p_planar.c 	transp8(d, 8, 2);
d                  31 drivers/video/fbdev/c2p_planar.c 	transp8(d, 4, 1);
d                  32 drivers/video/fbdev/c2p_planar.c 	transp8(d, 2, 4);
d                  33 drivers/video/fbdev/c2p_planar.c 	transp8(d, 1, 2);
d                  48 drivers/video/fbdev/c2p_planar.c static inline void store_planar(void *dst, u32 dst_inc, u32 bpp, u32 d[8])
d                  53 drivers/video/fbdev/c2p_planar.c 		put_unaligned_be32(d[perm_c2p_32x8[i]], dst);
d                  62 drivers/video/fbdev/c2p_planar.c 				       u32 d[8], u32 mask)
d                  67 drivers/video/fbdev/c2p_planar.c 		put_unaligned_be32(comp(d[perm_c2p_32x8[i]],
d                  93 drivers/video/fbdev/c2p_planar.c 	} d;
d                 109 drivers/video/fbdev/c2p_planar.c 			memset(d.pixels, 0, sizeof(d));
d                 110 drivers/video/fbdev/c2p_planar.c 			memcpy(d.pixels+dst_idx, c, width);
d                 112 drivers/video/fbdev/c2p_planar.c 			c2p_32x8(d.words);
d                 113 drivers/video/fbdev/c2p_planar.c 			store_planar_masked(p, dst_nextplane, bpp, d.words,
d                 122 drivers/video/fbdev/c2p_planar.c 				memset(d.pixels, 0, dst_idx);
d                 123 drivers/video/fbdev/c2p_planar.c 				memcpy(d.pixels+dst_idx, c, w);
d                 125 drivers/video/fbdev/c2p_planar.c 				c2p_32x8(d.words);
d                 127 drivers/video/fbdev/c2p_planar.c 						    d.words, first);
d                 133 drivers/video/fbdev/c2p_planar.c 				memcpy(d.pixels, c, 32);
d                 135 drivers/video/fbdev/c2p_planar.c 				c2p_32x8(d.words);
d                 136 drivers/video/fbdev/c2p_planar.c 				store_planar(p, dst_nextplane, bpp, d.words);
d                 143 drivers/video/fbdev/c2p_planar.c 				memcpy(d.pixels, c, w);
d                 144 drivers/video/fbdev/c2p_planar.c 				memset(d.pixels+w, 0, 32-w);
d                 145 drivers/video/fbdev/c2p_planar.c 				c2p_32x8(d.words);
d                 147 drivers/video/fbdev/c2p_planar.c 						    d.words, last);
d                2740 drivers/video/fbdev/cirrusfb.c 	int n, d;
d                2759 drivers/video/fbdev/cirrusfb.c 		d = (14318 * n) / freq;
d                2760 drivers/video/fbdev/cirrusfb.c 		if ((d >= 7) && (d <= 63)) {
d                2761 drivers/video/fbdev/cirrusfb.c 			int temp = d;
d                2776 drivers/video/fbdev/cirrusfb.c 		d++;
d                2777 drivers/video/fbdev/cirrusfb.c 		if ((d >= 7) && (d <= 63)) {
d                2778 drivers/video/fbdev/cirrusfb.c 			if (d > 31) {
d                2780 drivers/video/fbdev/cirrusfb.c 				d >>= 1;
d                2782 drivers/video/fbdev/cirrusfb.c 			h = ((14318 * n) / d) >> s;
d                2787 drivers/video/fbdev/cirrusfb.c 				*den = d;
d                 470 drivers/video/fbdev/controlfb.c #define RADACAL_WRITE(a,d) \
d                 472 drivers/video/fbdev/controlfb.c 	out_8(&p->cmap_regs->dat,   (d))
d                 264 drivers/video/fbdev/core/fbcon.c 		vc = vc_cons[i].d;
d                 405 drivers/video/fbdev/core/fbcon.c 		vc = vc_cons[ops->currcon].d;
d                 840 drivers/video/fbdev/core/fbcon.c 		struct vc_data *fg_vc = vc_cons[fg_console].d;
d                 849 drivers/video/fbdev/core/fbcon.c 	update_screen(vc_cons[fg_console].d);
d                 865 drivers/video/fbdev/core/fbcon.c 	struct vc_data *vc = vc_cons[unit].d;
d                 969 drivers/video/fbdev/core/fbcon.c 	struct vc_data *vc = vc_cons[fg_console].d;
d                1112 drivers/video/fbdev/core/fbcon.c 			struct vc_data *fvc = vc_cons[fg_console].d;
d                1427 drivers/video/fbdev/core/fbcon.c 	vc = vc_cons[unit].d;
d                1619 drivers/video/fbdev/core/fbcon.c 	unsigned short *d, *s;
d                1623 drivers/video/fbdev/core/fbcon.c 	d = (u16 *) softback_curr;
d                1624 drivers/video/fbdev/core/fbcon.c 	if (d == (u16 *) softback_in)
d                1625 drivers/video/fbdev/core/fbcon.c 		d = (u16 *) vc->vc_origin;
d                1680 drivers/video/fbdev/core/fbcon.c 			if (c == scr_readw(d)) {
d                1692 drivers/video/fbdev/core/fbcon.c 			d++;
d                1697 drivers/video/fbdev/core/fbcon.c 		if (d == (u16 *) softback_end)
d                1698 drivers/video/fbdev/core/fbcon.c 			d = (u16 *) softback_buf;
d                1699 drivers/video/fbdev/core/fbcon.c 		if (d == (u16 *) softback_in)
d                1700 drivers/video/fbdev/core/fbcon.c 			d = (u16 *) vc->vc_origin;
d                1746 drivers/video/fbdev/core/fbcon.c 	unsigned short *d = (unsigned short *)
d                1748 drivers/video/fbdev/core/fbcon.c 	unsigned short *s = d + offset;
d                1760 drivers/video/fbdev/core/fbcon.c 			if (c == scr_readw(d)) {
d                1772 drivers/video/fbdev/core/fbcon.c 			scr_writew(c, d);
d                1775 drivers/video/fbdev/core/fbcon.c 			d++;
d                1787 drivers/video/fbdev/core/fbcon.c 			d -= vc->vc_size_row;
d                1795 drivers/video/fbdev/core/fbcon.c 	unsigned short *d = (unsigned short *)
d                1797 drivers/video/fbdev/core/fbcon.c 	unsigned short *s = d + offset;
d                1817 drivers/video/fbdev/core/fbcon.c 			if (c == scr_readw(d)) {
d                1828 drivers/video/fbdev/core/fbcon.c 			scr_writew(c, d);
d                1831 drivers/video/fbdev/core/fbcon.c 			d++;
d                1842 drivers/video/fbdev/core/fbcon.c 			d -= vc->vc_size_row;
d                2254 drivers/video/fbdev/core/fbcon.c 		struct vc_data *conp2 = vc_cons[logo_shown].d;
d                2686 drivers/video/fbdev/core/fbcon.c 		struct vc_data *tmp = vc_cons[i].d;
d                2852 drivers/video/fbdev/core/fbcon.c 			struct vc_data *conp2 = vc_cons[logo_shown].d;
d                2942 drivers/video/fbdev/core/fbcon.c 	vc = vc_cons[ops->currcon].d;
d                2955 drivers/video/fbdev/core/fbcon.c 	vc = vc_cons[ops->currcon].d;
d                2969 drivers/video/fbdev/core/fbcon.c 	vc = vc_cons[ops->currcon].d;
d                3011 drivers/video/fbdev/core/fbcon.c 		vc = vc_cons[i].d;
d                3123 drivers/video/fbdev/core/fbcon.c 					ret = con2fb_release_oldinfo(vc_cons[i].d,
d                3274 drivers/video/fbdev/core/fbcon.c 	vc = vc_cons[ops->currcon].d;
d                3300 drivers/video/fbdev/core/fbcon.c 		vc = vc_cons[i].d;
d                3319 drivers/video/fbdev/core/fbcon.c 			vc = vc_cons[i].d;
d                3332 drivers/video/fbdev/core/fbcon.c 		vc = vc_cons[fg_console].d;
d                 188 drivers/video/fbdev/core/fbmem.c static inline unsigned safe_shift(unsigned d, int n)
d                 190 drivers/video/fbdev/core/fbmem.c 	return n < 0 ? d >> -n : d << n;
d                 279 drivers/video/fbdev/core/fbmem.c 	u8 fg = 1, d;
d                 312 drivers/video/fbdev/core/fbmem.c 				d = *src ^ xor;
d                 314 drivers/video/fbdev/core/fbmem.c 					*dst++ = ((d >> k) & 1) ? fg : 0;
d                 963 drivers/video/fbdev/core/modedb.c 		u32 d;
d                 969 drivers/video/fbdev/core/modedb.c 			d = (mode->xres - var->xres) +
d                 971 drivers/video/fbdev/core/modedb.c 			if (diff > d) {
d                 972 drivers/video/fbdev/core/modedb.c 				diff = d;
d                 974 drivers/video/fbdev/core/modedb.c 			} else if (diff == d && best &&
d                1001 drivers/video/fbdev/core/modedb.c 		u32 d;
d                1006 drivers/video/fbdev/core/modedb.c 		d = abs(cmode->xres - mode->xres) +
d                1008 drivers/video/fbdev/core/modedb.c 		if (diff > d) {
d                1009 drivers/video/fbdev/core/modedb.c 			diff = d;
d                1012 drivers/video/fbdev/core/modedb.c 		} else if (diff == d) {
d                1013 drivers/video/fbdev/core/modedb.c 			d = abs(cmode->refresh - mode->refresh);
d                1014 drivers/video/fbdev/core/modedb.c 			if (diff_refresh > d) {
d                1015 drivers/video/fbdev/core/modedb.c 				diff_refresh = d;
d                  53 drivers/video/fbdev/geode/display_gx1.c 	int d;
d                  64 drivers/video/fbdev/geode/display_gx1.c 	for (d = 0; d < 2; d++) {
d                 515 drivers/video/fbdev/hgafb.c 	u8 d;
d                 519 drivers/video/fbdev/hgafb.c 			d = *cdat++;
d                 521 drivers/video/fbdev/hgafb.c 			fb_writeb(d, dest);
d                 329 drivers/video/fbdev/hpfb.c static int hpfb_dio_probe(struct dio_dev *d, const struct dio_device_id *ent)
d                 333 drivers/video/fbdev/hpfb.c 	paddr = d->resource.start;
d                 334 drivers/video/fbdev/hpfb.c 	if (!request_mem_region(d->resource.start, resource_size(&d->resource), d->name))
d                 337 drivers/video/fbdev/hpfb.c 	if (d->scode >= DIOII_SCBASE) {
d                 338 drivers/video/fbdev/hpfb.c 		vaddr = (unsigned long)ioremap(paddr, resource_size(&d->resource));
d                 343 drivers/video/fbdev/hpfb.c 	       "(secondary id %02x)\n", d->scode, (d->id >> 8) & 0xff);
d                 345 drivers/video/fbdev/hpfb.c 		if (d->scode >= DIOII_SCBASE)
d                 352 drivers/video/fbdev/hpfb.c static void hpfb_remove_one(struct dio_dev *d)
d                 355 drivers/video/fbdev/hpfb.c 	if (d->scode >= DIOII_SCBASE)
d                 357 drivers/video/fbdev/hpfb.c 	release_mem_region(d->resource.start, resource_size(&d->resource));
d                1159 drivers/video/fbdev/imsttfb.c imstt_set_cursor(struct imstt_par *par, struct fb_image *d, int on)
d                1168 drivers/video/fbdev/imsttfb.c 			par->cmap_regs[PIDXDATA] = d->dx >> 8;	eieio();
d                1170 drivers/video/fbdev/imsttfb.c 			par->cmap_regs[PIDXDATA] = d->dx & 0xff;eieio();
d                1172 drivers/video/fbdev/imsttfb.c 			par->cmap_regs[PIDXDATA] = d->dy >> 8;	eieio();
d                1174 drivers/video/fbdev/imsttfb.c 			par->cmap_regs[PIDXDATA] = d->dy & 0xff;eieio();
d                1183 drivers/video/fbdev/imsttfb.c 			__u16 x = d->dx + 0x40, y = d->dy + 0x40;
d                 108 drivers/video/fbdev/intelfb/intelfb.h #define ACCEL(d, i)                                                     \
d                 109 drivers/video/fbdev/intelfb/intelfb.h 	((d)->accel && !(d)->ring_lockup &&                             \
d                 114 drivers/video/fbdev/intelfb/intelfb.h #define NOACCEL_CHIPSET(d)						\
d                 117 drivers/video/fbdev/intelfb/intelfb.h #define FIXED_MODE(d) ((d)->fixed_mode)
d                 989 drivers/video/fbdev/matrox/matroxfb_DAC1064.c 		u_int32_t d;
d                 205 drivers/video/fbdev/neofb.c 	int n, d, f;
d                 210 drivers/video/fbdev/neofb.c 		for (d = 0; d <= MAX_D; d++)
d                 215 drivers/video/fbdev/neofb.c 				f_out = ((14318 * (n + 1)) / (d + 1)) >> f;
d                 220 drivers/video/fbdev/neofb.c 					d_best = d;
d                  62 drivers/video/fbdev/nvidia/nv_local.h #define NV_WR08(p,i,d)  (__raw_writeb((d), (void __iomem *)(p) + (i)))
d                  64 drivers/video/fbdev/nvidia/nv_local.h #define NV_WR16(p,i,d)  (__raw_writew((d), (void __iomem *)(p) + (i)))
d                  66 drivers/video/fbdev/nvidia/nv_local.h #define NV_WR32(p,i,d)  (__raw_writel((d), (void __iomem *)(p) + (i)))
d                  70 drivers/video/fbdev/nvidia/nv_local.h #define VGA_WR08(p,i,d) (writeb((d), (void __iomem *)(p) + (i)))
d                 132 drivers/video/fbdev/omap2/omapfb/dss/core.c static int omap_dss_pm_notif(struct notifier_block *b, unsigned long v, void *d)
d                5370 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		const struct dsi_module_id_data *d;
d                5378 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		d = match->data;
d                5380 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		while (d->address != 0 && d->address != dsi_mem->start)
d                5381 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			d++;
d                5383 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		if (d->address == 0) {
d                5388 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		dsi->module_id = d->id;
d                 304 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	struct omapfb_display_data *d;
d                 315 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	d = get_display_data(fbdev, display);
d                 317 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	if (d->update_mode == mode) {
d                 330 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 		d->update_mode = mode;
d                 347 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	struct omapfb_display_data *d;
d                 354 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	d = get_display_data(fbdev, display);
d                 356 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	*mode = d->update_mode;
d                1222 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                1230 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, display);
d                1240 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 				d->update_mode == OMAPFB_AUTO_UPDATE &&
d                1241 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 				!d->auto_update_work_enabled)
d                1255 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		if (d->auto_update_work_enabled)
d                1662 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                1667 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = container_of(work, struct omapfb_display_data,
d                1670 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	dssdev = d->dssdev;
d                1672 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	fbdev = d->fbdev;
d                1687 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 			&d->auto_update_work, HZ / freq);
d                1693 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                1709 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, display);
d                1711 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	INIT_DELAYED_WORK(&d->auto_update_work, omapfb_auto_update_work);
d                1713 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d->auto_update_work_enabled = true;
d                1715 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	omapfb_auto_update_work(&d->auto_update_work.work);
d                1721 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                1723 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, display);
d                1725 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	cancel_delayed_work_sync(&d->auto_update_work);
d                1727 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d->auto_update_work_enabled = false;
d                2109 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                2115 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, display);
d                2116 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d->bpp_override = bpp;
d                2142 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                2146 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, dssdev);
d                2148 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	if (d->bpp_override != 0)
d                2149 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		return d->bpp_override;
d                2311 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	struct omapfb_display_data *d;
d                2321 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d = get_display_data(fbdev, dssdev);
d                2323 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 	d->fbdev = fbdev;
d                2330 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 			d->update_mode = OMAPFB_AUTO_UPDATE;
d                2332 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 			d->update_mode = OMAPFB_MANUAL_UPDATE;
d                2351 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		d->update_mode = OMAPFB_AUTO_UPDATE;
d                2498 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		struct omapfb_display_data *d;
d                2509 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		d = &fbdev->displays[fbdev->num_displays++];
d                2510 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 		d->dssdev = dssdev;
d                2512 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 			d->update_mode = OMAPFB_MANUAL_UPDATE;
d                2514 drivers/video/fbdev/omap2/omapfb/omapfb-main.c 			d->update_mode = OMAPFB_AUTO_UPDATE;
d                 233 drivers/video/fbdev/platinumfb.c #define STORE_D2(a, d) { \
d                 235 drivers/video/fbdev/platinumfb.c 	out_8(&cmap_regs->d2, (d)); \
d                1497 drivers/video/fbdev/riva/fbdev.c 	volatile u32 __iomem *d;
d                1539 drivers/video/fbdev/riva/fbdev.c 	d = &par->riva.Bitmap->MonochromeData01E;
d                1549 drivers/video/fbdev/riva/fbdev.c 			NV_WR32(d, i*4, tmp);
d                1559 drivers/video/fbdev/riva/fbdev.c 			NV_WR32(d, i*4, tmp);
d                  78 drivers/video/fbdev/riva/riva_hw.h #define NV_WR08(p,i,d)  (__raw_writeb((d), (void __iomem *)(p) + (i)))
d                  80 drivers/video/fbdev/riva/riva_hw.h #define NV_WR16(p,i,d)  (__raw_writew((d), (void __iomem *)(p) + (i)))
d                  82 drivers/video/fbdev/riva/riva_hw.h #define NV_WR32(p,i,d)  (__raw_writel((d), (void __iomem *)(p) + (i)))
d                  85 drivers/video/fbdev/riva/riva_hw.h #define VGA_WR08(p,i,d) (writeb((d), (void __iomem *)(p) + (i)))
d                 501 drivers/video/fbdev/sis/sis_main.c 		 if(monitor->dclockmax < sisfb_ddcsmodes[i].d) monitor->dclockmax = sisfb_ddcsmodes[i].d;
d                 523 drivers/video/fbdev/sis/sis_main.c 		      if(monitor->dclockmax < sisfb_ddcsmodes[j].d) monitor->dclockmax = sisfb_ddcsmodes[j].d;
d                 455 drivers/video/fbdev/sis/sis_main.h 	u32 d;
d                 477 drivers/video/fbdev/sis/sis_main.h 	u32 d;
d                 103 drivers/video/fbdev/sm712.h #define big_pixel_depth(p, d)	{if (p == 24) {p = 32; d = 32; } }
d                 110 drivers/video/fbdev/sm712.h #define big_pixel_depth(p, d)	do { } while (0)
d                 839 drivers/video/fbdev/tridentfb.c 	unsigned long fi, d, di;
d                 844 drivers/video/fbdev/tridentfb.c 	d = 20000;
d                 851 drivers/video/fbdev/tridentfb.c 				if (di < d || (di == d && k == best_k)) {
d                 852 drivers/video/fbdev/tridentfb.c 					d = di;
d                1013 drivers/video/fbdev/udlfb.c 		struct dlfb_deferred_free *d = list_entry(dlfb->deferred_free.next, struct dlfb_deferred_free, list);
d                1014 drivers/video/fbdev/udlfb.c 		list_del(&d->list);
d                1015 drivers/video/fbdev/udlfb.c 		vfree(d->mem);
d                1016 drivers/video/fbdev/udlfb.c 		kfree(d);
d                1207 drivers/video/fbdev/udlfb.c 	struct dlfb_deferred_free *d = kmalloc(sizeof(struct dlfb_deferred_free), GFP_KERNEL);
d                1208 drivers/video/fbdev/udlfb.c 	if (!d)
d                1210 drivers/video/fbdev/udlfb.c 	d->mem = mem;
d                1211 drivers/video/fbdev/udlfb.c 	list_add(&d->list, &dlfb->deferred_free);
d                 318 drivers/video/fbdev/uvesafb.c 	int i, match = -1, h = 0, d = 0x7fffffff;
d                 332 drivers/video/fbdev/uvesafb.c 		if (h < d || (h == d && par->vbe_modes[i].depth > depth)) {
d                 333 drivers/video/fbdev/uvesafb.c 			d = h;
d                 343 drivers/video/fbdev/uvesafb.c 	if (flags & UVESAFB_EXACT_RES && d > 24)
d                  18 drivers/video/fbdev/via/hw.h #define viafb_write_reg(i, p, d)		via_write_reg(p, i, d)
d                  19 drivers/video/fbdev/via/hw.h #define viafb_write_reg_mask(i, p, d, m)	via_write_reg_mask(p, i, d, m)
d                 125 drivers/vlynq/vlynq.c static void vlynq_irq_unmask(struct irq_data *d)
d                 127 drivers/vlynq/vlynq.c 	struct vlynq_device *dev = irq_data_get_irq_chip_data(d);
d                 132 drivers/vlynq/vlynq.c 	virq = d->irq - dev->irq_start;
d                 138 drivers/vlynq/vlynq.c static void vlynq_irq_mask(struct irq_data *d)
d                 140 drivers/vlynq/vlynq.c 	struct vlynq_device *dev = irq_data_get_irq_chip_data(d);
d                 145 drivers/vlynq/vlynq.c 	virq = d->irq - dev->irq_start;
d                 151 drivers/vlynq/vlynq.c static int vlynq_irq_type(struct irq_data *d, unsigned int flow_type)
d                 153 drivers/vlynq/vlynq.c 	struct vlynq_device *dev = irq_data_get_irq_chip_data(d);
d                 158 drivers/vlynq/vlynq.c 	virq = d->irq - dev->irq_start;
d                 182 drivers/vlynq/vlynq.c static void vlynq_local_ack(struct irq_data *d)
d                 184 drivers/vlynq/vlynq.c 	struct vlynq_device *dev = irq_data_get_irq_chip_data(d);
d                 192 drivers/vlynq/vlynq.c static void vlynq_remote_ack(struct irq_data *d)
d                 194 drivers/vlynq/vlynq.c 	struct vlynq_device *dev = irq_data_get_irq_chip_data(d);
d                 378 drivers/w1/masters/ds1wm.c 	u64 r_prime, d;
d                 409 drivers/w1/masters/ds1wm.c 		d = 0;
d                 442 drivers/w1/masters/ds1wm.c 			d |= (unsigned long long) _d << (i * 4);
d                 453 drivers/w1/masters/ds1wm.c 			pass, r_prime, d);
d                 461 drivers/w1/masters/ds1wm.c 		if ((r_prime & ((u64)1 << 63)) && (d & ((u64)1 << 63))) {
d                 478 drivers/w1/masters/ds1wm.c 		d &= ~r;
d                 480 drivers/w1/masters/ds1wm.c 		ms_discrep_bit = fls64(d) - 1;
d                 483 drivers/w1/masters/ds1wm.c 			pass, d, ms_discrep_bit);
d                 175 drivers/watchdog/octeon-wdt-main.c 	int d;
d                 178 drivers/watchdog/octeon-wdt-main.c 	for (d = 0; d < digits; d++) {
d                 179 drivers/watchdog/octeon-wdt-main.c 		v = (value >> ((digits - d - 1) * 4)) & 0xf;
d                  35 drivers/watchdog/renesas_wdt.c #define MUL_BY_CLKS_PER_SEC(p, d) \
d                  36 drivers/watchdog/renesas_wdt.c 	DIV_ROUND_UP((d) * (p)->clk_rate, clk_divs[(p)->cks])
d                  39 drivers/watchdog/renesas_wdt.c #define DIV_BY_CLKS_PER_SEC(p, d) ((d) * clk_divs[(p)->cks] / (p)->clk_rate)
d                 437 drivers/watchdog/w83627hf_wdt.c static int __init wdt_use_alt_key(const struct dmi_system_id *d)
d                1347 drivers/xen/events/events_base.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                1349 drivers/xen/events/events_base.c 	return set_affinity_irq(d, cpumask_of(tcpu), false);
d                1141 drivers/xen/grant-table.c 	struct unmap_refs_callback_data *d = data->data;
d                1143 drivers/xen/grant-table.c 	d->result = result;
d                1144 drivers/xen/grant-table.c 	complete(&d->completion);
d                 304 drivers/xen/xen-pciback/xenbus.c 	unsigned int d, b;
d                 326 drivers/xen/xen-pciback/xenbus.c 				   str, "%x:%x", &d, &b);
d                 334 drivers/xen/xen-pciback/xenbus.c 		if (d == domain && b == bus) {
d                 445 drivers/xen/xenbus/xenbus_xs.c 	char **d;
d                 448 drivers/xen/xenbus/xenbus_xs.c 	d = xenbus_directory(t, dir, node, &dir_n);
d                 449 drivers/xen/xenbus/xenbus_xs.c 	if (IS_ERR(d))
d                 451 drivers/xen/xenbus/xenbus_xs.c 	kfree(d);
d                 136 fs/9p/v9fs.h   extern int v9fs_vfs_unlink(struct inode *i, struct dentry *d);
d                 137 fs/9p/v9fs.h   extern int v9fs_vfs_rmdir(struct inode *i, struct dentry *d);
d                 924 fs/9p/vfs_inode.c int v9fs_vfs_unlink(struct inode *i, struct dentry *d)
d                 926 fs/9p/vfs_inode.c 	return v9fs_remove(i, d, 0);
d                 936 fs/9p/vfs_inode.c int v9fs_vfs_rmdir(struct inode *i, struct dentry *d)
d                 938 fs/9p/vfs_inode.c 	return v9fs_remove(i, d, AT_REMOVEDIR);
d                 913 fs/afs/dir.c   	struct dentry *d;
d                 961 fs/afs/dir.c   	d = d_splice_alias(inode, dentry);
d                 962 fs/afs/dir.c   	if (!IS_ERR_OR_NULL(d)) {
d                 963 fs/afs/dir.c   		d->d_fsdata = dentry->d_fsdata;
d                 964 fs/afs/dir.c   		trace_afs_lookup(dvnode, &d->d_name, &fid);
d                 968 fs/afs/dir.c   	return d;
d                 115 fs/afs/dir_edit.c 	int d, len, n;
d                 128 fs/afs/dir_edit.c 	for (d = (blocknum == 0 ? AFS_DIR_RESV_BLOCKS0 : AFS_DIR_RESV_BLOCKS);
d                 129 fs/afs/dir_edit.c 	     d < AFS_DIR_SLOTS_PER_BLOCK;
d                 130 fs/afs/dir_edit.c 	     d++) {
d                 131 fs/afs/dir_edit.c 		if (!((bitmap >> d) & 1))
d                 133 fs/afs/dir_edit.c 		de = &block->dirents[d];
d                 141 fs/afs/dir_edit.c 			return d;
d                 145 fs/afs/dir_edit.c 		d += n - 1;
d                 119 fs/autofs/expire.c 	struct dentry *p = prev, *ret = NULL, *d = NULL;
d                 129 fs/autofs/expire.c 		ret = positive_after(p, d);
d                 135 fs/autofs/expire.c 		d = p;
d                1448 fs/btrfs/ctree.h static inline unsigned long btrfs_device_uuid(struct btrfs_dev_item *d)
d                1450 fs/btrfs/ctree.h 	return (unsigned long)d + offsetof(struct btrfs_dev_item, uuid);
d                1453 fs/btrfs/ctree.h static inline unsigned long btrfs_device_fsid(struct btrfs_dev_item *d)
d                1455 fs/btrfs/ctree.h 	return (unsigned long)d + offsetof(struct btrfs_dev_item, fsid);
d                 256 fs/btrfs/raid56.c 	char *d;
d                 268 fs/btrfs/raid56.c 		d = kmap(rbio->stripe_pages[i]);
d                 270 fs/btrfs/raid56.c 		copy_page(d, s);
d                 305 fs/btrfs/raid56.c 	struct page *d;
d                 316 fs/btrfs/raid56.c 		d = dest->stripe_pages[i];
d                 317 fs/btrfs/raid56.c 		if (d)
d                 318 fs/btrfs/raid56.c 			__free_page(d);
d                 399 fs/cifs/connect.c 	struct super_cb_data *d = arg;
d                 403 fs/cifs/connect.c 	if (d->sb)
d                 408 fs/cifs/connect.c 	if (tcon->ses->server == d->server)
d                 409 fs/cifs/connect.c 		d->sb = sb;
d                 414 fs/cifs/connect.c 	struct super_cb_data d = {
d                 419 fs/cifs/connect.c 	iterate_supers_type(&cifs_fs_type, super_cb, &d);
d                 421 fs/cifs/connect.c 	if (unlikely(!d.sb))
d                 428 fs/cifs/connect.c 	cifs_sb_active(d.sb);
d                 429 fs/cifs/connect.c 	return d.sb;
d                 819 fs/cifs/smb2misc.c 	struct sdesc *d;
d                 837 fs/cifs/smb2misc.c 	d = ses->server->secmech.sdescsha512;
d                 838 fs/cifs/smb2misc.c 	rc = crypto_shash_init(&d->shash);
d                 844 fs/cifs/smb2misc.c 	rc = crypto_shash_update(&d->shash, ses->preauth_sha_hash,
d                 852 fs/cifs/smb2misc.c 		rc = crypto_shash_update(&d->shash,
d                 861 fs/cifs/smb2misc.c 	rc = crypto_shash_final(&d->shash, ses->preauth_sha_hash);
d                 375 fs/cifs/smb2transport.c 	struct derivation *d;
d                 377 fs/cifs/smb2transport.c 	d = &triplet.signing;
d                 378 fs/cifs/smb2transport.c 	d->label.iov_base = "SMB2AESCMAC";
d                 379 fs/cifs/smb2transport.c 	d->label.iov_len = 12;
d                 380 fs/cifs/smb2transport.c 	d->context.iov_base = "SmbSign";
d                 381 fs/cifs/smb2transport.c 	d->context.iov_len = 8;
d                 383 fs/cifs/smb2transport.c 	d = &triplet.encryption;
d                 384 fs/cifs/smb2transport.c 	d->label.iov_base = "SMB2AESCCM";
d                 385 fs/cifs/smb2transport.c 	d->label.iov_len = 11;
d                 386 fs/cifs/smb2transport.c 	d->context.iov_base = "ServerIn ";
d                 387 fs/cifs/smb2transport.c 	d->context.iov_len = 10;
d                 389 fs/cifs/smb2transport.c 	d = &triplet.decryption;
d                 390 fs/cifs/smb2transport.c 	d->label.iov_base = "SMB2AESCCM";
d                 391 fs/cifs/smb2transport.c 	d->label.iov_len = 11;
d                 392 fs/cifs/smb2transport.c 	d->context.iov_base = "ServerOut";
d                 393 fs/cifs/smb2transport.c 	d->context.iov_len = 10;
d                 403 fs/cifs/smb2transport.c 	struct derivation *d;
d                 405 fs/cifs/smb2transport.c 	d = &triplet.signing;
d                 406 fs/cifs/smb2transport.c 	d->label.iov_base = "SMBSigningKey";
d                 407 fs/cifs/smb2transport.c 	d->label.iov_len = 14;
d                 408 fs/cifs/smb2transport.c 	d->context.iov_base = ses->preauth_sha_hash;
d                 409 fs/cifs/smb2transport.c 	d->context.iov_len = 64;
d                 411 fs/cifs/smb2transport.c 	d = &triplet.encryption;
d                 412 fs/cifs/smb2transport.c 	d->label.iov_base = "SMBC2SCipherKey";
d                 413 fs/cifs/smb2transport.c 	d->label.iov_len = 16;
d                 414 fs/cifs/smb2transport.c 	d->context.iov_base = ses->preauth_sha_hash;
d                 415 fs/cifs/smb2transport.c 	d->context.iov_len = 64;
d                 417 fs/cifs/smb2transport.c 	d = &triplet.decryption;
d                 418 fs/cifs/smb2transport.c 	d->label.iov_base = "SMBS2CCipherKey";
d                 419 fs/cifs/smb2transport.c 	d->label.iov_len = 16;
d                 420 fs/cifs/smb2transport.c 	d->context.iov_base = ses->preauth_sha_hash;
d                 421 fs/cifs/smb2transport.c 	d->context.iov_len = 64;
d                 261 fs/compat_ioctl.c 		int d;
d                 265 fs/compat_ioctl.c 		    get_user(d, &r[i].duration) ||
d                 267 fs/compat_ioctl.c 		    put_user(d, &o[i].duration))
d                 382 fs/configfs/dir.c static void remove_dir(struct dentry * d)
d                 384 fs/configfs/dir.c 	struct dentry * parent = dget(d->d_parent);
d                 386 fs/configfs/dir.c 	configfs_remove_dirent(d);
d                 388 fs/configfs/dir.c 	if (d_really_is_positive(d))
d                 389 fs/configfs/dir.c 		simple_rmdir(d_inode(parent),d);
d                 391 fs/configfs/dir.c 	pr_debug(" o %pd removing done (%d)\n", d, d_count(d));
d                 325 fs/d_path.c    static char *__dentry_path(struct dentry *d, char *buf, int buflen)
d                 337 fs/d_path.c    	dentry = d;
d                  98 fs/dcookies.c  	struct dentry *d;
d                 102 fs/dcookies.c  	d = path->dentry;
d                 103 fs/dcookies.c  	spin_lock(&d->d_lock);
d                 104 fs/dcookies.c  	d->d_flags |= DCACHE_COOKIE;
d                 105 fs/dcookies.c  	spin_unlock(&d->d_lock);
d                 225 fs/dcookies.c  	struct list_head * d;
d                 261 fs/dcookies.c  	d = dcookie_hashtable;
d                 264 fs/dcookies.c  		INIT_LIST_HEAD(d);
d                 265 fs/dcookies.c  		d++;
d                 279 fs/dcookies.c  	struct dentry *d = dcs->path.dentry;
d                 281 fs/dcookies.c  	spin_lock(&d->d_lock);
d                 282 fs/dcookies.c  	d->d_flags &= ~DCACHE_COOKIE;
d                 283 fs/dcookies.c  	spin_unlock(&d->d_lock);
d                  88 fs/efivarfs/super.c 	struct dentry *d;
d                  99 fs/efivarfs/super.c 	d = d_alloc(parent, &q);
d                 100 fs/efivarfs/super.c 	if (d)
d                 101 fs/efivarfs/super.c 		return d;
d                  22 fs/efs/super.c static int efs_fill_super(struct super_block *s, void *d, int silent);
d                 250 fs/efs/super.c static int efs_fill_super(struct super_block *s, void *d, int silent)
d                  20 fs/ext4/hash.c 	__u32	a = in[0], b = in[1], c = in[2], d = in[3];
d                  26 fs/ext4/hash.c 		b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d);
d                  44 fs/ext4/hash.c #define ROUND(f, a, b, c, d, x, s)	\
d                  45 fs/ext4/hash.c 	(a += f(b, c, d) + x, a = rol32(a, s))
d                  55 fs/ext4/hash.c 	__u32 a = buf[0], b = buf[1], c = buf[2], d = buf[3];
d                  58 fs/ext4/hash.c 	ROUND(F, a, b, c, d, in[0] + K1,  3);
d                  59 fs/ext4/hash.c 	ROUND(F, d, a, b, c, in[1] + K1,  7);
d                  60 fs/ext4/hash.c 	ROUND(F, c, d, a, b, in[2] + K1, 11);
d                  61 fs/ext4/hash.c 	ROUND(F, b, c, d, a, in[3] + K1, 19);
d                  62 fs/ext4/hash.c 	ROUND(F, a, b, c, d, in[4] + K1,  3);
d                  63 fs/ext4/hash.c 	ROUND(F, d, a, b, c, in[5] + K1,  7);
d                  64 fs/ext4/hash.c 	ROUND(F, c, d, a, b, in[6] + K1, 11);
d                  65 fs/ext4/hash.c 	ROUND(F, b, c, d, a, in[7] + K1, 19);
d                  68 fs/ext4/hash.c 	ROUND(G, a, b, c, d, in[1] + K2,  3);
d                  69 fs/ext4/hash.c 	ROUND(G, d, a, b, c, in[3] + K2,  5);
d                  70 fs/ext4/hash.c 	ROUND(G, c, d, a, b, in[5] + K2,  9);
d                  71 fs/ext4/hash.c 	ROUND(G, b, c, d, a, in[7] + K2, 13);
d                  72 fs/ext4/hash.c 	ROUND(G, a, b, c, d, in[0] + K2,  3);
d                  73 fs/ext4/hash.c 	ROUND(G, d, a, b, c, in[2] + K2,  5);
d                  74 fs/ext4/hash.c 	ROUND(G, c, d, a, b, in[4] + K2,  9);
d                  75 fs/ext4/hash.c 	ROUND(G, b, c, d, a, in[6] + K2, 13);
d                  78 fs/ext4/hash.c 	ROUND(H, a, b, c, d, in[3] + K3,  3);
d                  79 fs/ext4/hash.c 	ROUND(H, d, a, b, c, in[7] + K3,  9);
d                  80 fs/ext4/hash.c 	ROUND(H, c, d, a, b, in[2] + K3, 11);
d                  81 fs/ext4/hash.c 	ROUND(H, b, c, d, a, in[6] + K3, 15);
d                  82 fs/ext4/hash.c 	ROUND(H, a, b, c, d, in[1] + K3,  3);
d                  83 fs/ext4/hash.c 	ROUND(H, d, a, b, c, in[5] + K3,  9);
d                  84 fs/ext4/hash.c 	ROUND(H, c, d, a, b, in[0] + K3, 11);
d                  85 fs/ext4/hash.c 	ROUND(H, b, c, d, a, in[4] + K3, 15);
d                  90 fs/ext4/hash.c 	buf[3] += d;
d                 314 fs/ext4/namei.c 	struct ext4_dir_entry *d, *top;
d                 316 fs/ext4/namei.c 	d = (struct ext4_dir_entry *)bh->b_data;
d                 320 fs/ext4/namei.c 	while (d < top && d->rec_len)
d                 321 fs/ext4/namei.c 		d = (struct ext4_dir_entry *)(((void *)d) +
d                 322 fs/ext4/namei.c 		    le16_to_cpu(d->rec_len));
d                 324 fs/ext4/namei.c 	if (d != top)
d                 327 fs/ext4/namei.c 	t = (struct ext4_dir_entry_tail *)d;
d                  94 fs/f2fs/dir.c  	struct f2fs_dentry_ptr d;
d                  98 fs/f2fs/dir.c  	make_dentry_ptr_block(dir, &d, dentry_blk);
d                  99 fs/f2fs/dir.c  	de = f2fs_find_target_dentry(fname, namehash, max_slots, &d);
d                 167 fs/f2fs/dir.c  static inline bool f2fs_match_name(struct f2fs_dentry_ptr *d,
d                 175 fs/f2fs/dir.c  	struct inode *parent = d->inode;
d                 184 fs/f2fs/dir.c  	entry.name = d->filename[bit_pos];
d                 197 fs/f2fs/dir.c  	if (fscrypt_match_name(fname, d->filename[bit_pos],
d                 205 fs/f2fs/dir.c  			struct f2fs_dentry_ptr *d)
d                 213 fs/f2fs/dir.c  	f2fs_fname_setup_ci_filename(d->inode, fname->usr_fname, &cf_str);
d                 218 fs/f2fs/dir.c  	while (bit_pos < d->max) {
d                 219 fs/f2fs/dir.c  		if (!test_bit_le(bit_pos, d->bitmap)) {
d                 225 fs/f2fs/dir.c  		de = &d->dentry[bit_pos];
d                 232 fs/f2fs/dir.c  		if (f2fs_match_name(d, de, fname, &cf_str, bit_pos, namehash))
d                 433 fs/f2fs/dir.c  					struct f2fs_dentry_ptr *d)
d                 439 fs/f2fs/dir.c  	f2fs_update_dentry(inode->i_ino, inode->i_mode, d, &dot, 0, 0);
d                 442 fs/f2fs/dir.c  	f2fs_update_dentry(parent->i_ino, parent->i_mode, d, &dotdot, 0, 1);
d                 450 fs/f2fs/dir.c  	struct f2fs_dentry_ptr d;
d                 461 fs/f2fs/dir.c  	make_dentry_ptr_block(NULL, &d, dentry_blk);
d                 462 fs/f2fs/dir.c  	f2fs_do_make_empty_dir(inode, parent, &d);
d                 581 fs/f2fs/dir.c  void f2fs_update_dentry(nid_t ino, umode_t mode, struct f2fs_dentry_ptr *d,
d                 589 fs/f2fs/dir.c  	de = &d->dentry[bit_pos];
d                 592 fs/f2fs/dir.c  	memcpy(d->filename[bit_pos], name->name, name->len);
d                 596 fs/f2fs/dir.c  		__set_bit_le(bit_pos + i, (void *)d->bitmap);
d                 615 fs/f2fs/dir.c  	struct f2fs_dentry_ptr d;
d                 678 fs/f2fs/dir.c  	make_dentry_ptr_block(NULL, &d, dentry_blk);
d                 679 fs/f2fs/dir.c  	f2fs_update_dentry(ino, mode, &d, new_name, dentry_hash, bit_pos);
d                 895 fs/f2fs/dir.c  int f2fs_fill_dentries(struct dir_context *ctx, struct f2fs_dentry_ptr *d,
d                 902 fs/f2fs/dir.c  	struct f2fs_sb_info *sbi = F2FS_I_SB(d->inode);
d                 907 fs/f2fs/dir.c  	bit_pos = ((unsigned long)ctx->pos % d->max);
d                 912 fs/f2fs/dir.c  	while (bit_pos < d->max) {
d                 913 fs/f2fs/dir.c  		bit_pos = find_next_bit_le(d->bitmap, d->max, bit_pos);
d                 914 fs/f2fs/dir.c  		if (bit_pos >= d->max)
d                 917 fs/f2fs/dir.c  		de = &d->dentry[bit_pos];
d                 930 fs/f2fs/dir.c  		de_name.name = d->filename[bit_pos];
d                 935 fs/f2fs/dir.c  		if (unlikely(bit_pos > d->max ||
d                 944 fs/f2fs/dir.c  		if (IS_ENCRYPTED(d->inode)) {
d                 947 fs/f2fs/dir.c  			err = fscrypt_fname_disk_to_usr(d->inode,
d                 983 fs/f2fs/dir.c  	struct f2fs_dentry_ptr d;
d                1029 fs/f2fs/dir.c  		make_dentry_ptr_block(inode, &d, dentry_blk);
d                1031 fs/f2fs/dir.c  		err = f2fs_fill_dentries(ctx, &d,
d                 510 fs/f2fs/f2fs.h 		struct f2fs_dentry_ptr *d, struct f2fs_dentry_block *t)
d                 512 fs/f2fs/f2fs.h 	d->inode = inode;
d                 513 fs/f2fs/f2fs.h 	d->max = NR_DENTRY_IN_BLOCK;
d                 514 fs/f2fs/f2fs.h 	d->nr_bitmap = SIZE_OF_DENTRY_BITMAP;
d                 515 fs/f2fs/f2fs.h 	d->bitmap = t->dentry_bitmap;
d                 516 fs/f2fs/f2fs.h 	d->dentry = t->dentry;
d                 517 fs/f2fs/f2fs.h 	d->filename = t->filename;
d                 521 fs/f2fs/f2fs.h 					struct f2fs_dentry_ptr *d, void *t)
d                 527 fs/f2fs/f2fs.h 	d->inode = inode;
d                 528 fs/f2fs/f2fs.h 	d->max = entry_cnt;
d                 529 fs/f2fs/f2fs.h 	d->nr_bitmap = bitmap_size;
d                 530 fs/f2fs/f2fs.h 	d->bitmap = t;
d                 531 fs/f2fs/f2fs.h 	d->dentry = t + bitmap_size + reserved_size;
d                 532 fs/f2fs/f2fs.h 	d->filename = t + bitmap_size + reserved_size +
d                2973 fs/f2fs/f2fs.h 			struct f2fs_dentry_ptr *d);
d                2974 fs/f2fs/f2fs.h int f2fs_fill_dentries(struct dir_context *ctx, struct f2fs_dentry_ptr *d,
d                2977 fs/f2fs/f2fs.h 			struct f2fs_dentry_ptr *d);
d                2994 fs/f2fs/f2fs.h void f2fs_update_dentry(nid_t ino, umode_t mode, struct f2fs_dentry_ptr *d,
d                  30 fs/f2fs/hash.c 	__u32 a = in[0], b = in[1], c = in[2], d = in[3];
d                  36 fs/f2fs/hash.c 		b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d);
d                 313 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 328 fs/f2fs/inline.c 	make_dentry_ptr_inline(dir, &d, inline_dentry);
d                 329 fs/f2fs/inline.c 	de = f2fs_find_target_dentry(fname, namehash, NULL, &d);
d                 342 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 347 fs/f2fs/inline.c 	make_dentry_ptr_inline(inode, &d, inline_dentry);
d                 348 fs/f2fs/inline.c 	f2fs_do_make_empty_dir(inode, parent, &d);
d                 438 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 442 fs/f2fs/inline.c 	make_dentry_ptr_inline(dir, &d, inline_dentry);
d                 444 fs/f2fs/inline.c 	while (bit_pos < d.max) {
d                 450 fs/f2fs/inline.c 		if (!test_bit_le(bit_pos, d.bitmap)) {
d                 455 fs/f2fs/inline.c 		de = &d.dentry[bit_pos];
d                 462 fs/f2fs/inline.c 		new_name.name = d.filename[bit_pos];
d                 551 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 561 fs/f2fs/inline.c 	make_dentry_ptr_inline(dir, &d, inline_dentry);
d                 563 fs/f2fs/inline.c 	bit_pos = f2fs_room_for_filename(d.bitmap, slots, d.max);
d                 564 fs/f2fs/inline.c 	if (bit_pos >= d.max) {
d                 585 fs/f2fs/inline.c 	f2fs_update_dentry(ino, mode, &d, new_name, name_hash, bit_pos);
d                 612 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 622 fs/f2fs/inline.c 	make_dentry_ptr_inline(dir, &d, inline_dentry);
d                 624 fs/f2fs/inline.c 	bit_pos = dentry - d.dentry;
d                 626 fs/f2fs/inline.c 		__clear_bit_le(bit_pos + i, d.bitmap);
d                 644 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 651 fs/f2fs/inline.c 	make_dentry_ptr_inline(dir, &d, inline_dentry);
d                 653 fs/f2fs/inline.c 	bit_pos = find_next_bit_le(d.bitmap, d.max, bit_pos);
d                 657 fs/f2fs/inline.c 	if (bit_pos < d.max)
d                 668 fs/f2fs/inline.c 	struct f2fs_dentry_ptr d;
d                 672 fs/f2fs/inline.c 	make_dentry_ptr_inline(inode, &d, inline_dentry);
d                 674 fs/f2fs/inline.c 	if (ctx->pos == d.max)
d                 689 fs/f2fs/inline.c 	make_dentry_ptr_inline(inode, &d, inline_dentry);
d                 691 fs/f2fs/inline.c 	err = f2fs_fill_dentries(ctx, &d, 0, fstr);
d                 693 fs/f2fs/inline.c 		ctx->pos = d.max;
d                  80 fs/freevxfs/vxfs_bmap.c 		struct direct *d = vip->vii_ext4.ve4_direct + i;
d                  81 fs/freevxfs/vxfs_bmap.c 		if (bn >= 0 && bn < fs32_to_cpu(sbi, d->size))
d                  82 fs/freevxfs/vxfs_bmap.c 			return (bn + fs32_to_cpu(sbi, d->extent));
d                  83 fs/freevxfs/vxfs_bmap.c 		bn -= fs32_to_cpu(sbi, d->size);
d                 576 fs/fuse/dir.c  	struct dentry *d;
d                 609 fs/fuse/dir.c  	d = d_splice_alias(inode, entry);
d                 610 fs/fuse/dir.c  	if (IS_ERR(d))
d                 611 fs/fuse/dir.c  		return PTR_ERR(d);
d                 613 fs/fuse/dir.c  	if (d) {
d                 614 fs/fuse/dir.c  		fuse_change_entry_timeout(d, &outarg);
d                 615 fs/fuse/dir.c  		dput(d);
d                 432 fs/gfs2/bmap.c 	u64 d = be64_to_cpu(*ptr);
d                 439 fs/gfs2/bmap.c 		d++;
d                 440 fs/gfs2/bmap.c 	} while(be64_to_cpu(*ptr) == d);
d                 611 fs/gfs2/dir.c  			       const struct gfs2_dirent *d, const void *end_p)
d                 613 fs/gfs2/dir.c  	const void *ptr = d;
d                 614 fs/gfs2/dir.c  	u16 rec_len = be16_to_cpu(d->de_rec_len);
d                 839 fs/gfs2/inode.c 	struct dentry *d;
d                 859 fs/gfs2/inode.c 	d = d_splice_alias(inode, dentry);
d                 860 fs/gfs2/inode.c 	if (IS_ERR(d)) {
d                 862 fs/gfs2/inode.c 		return d;
d                 869 fs/gfs2/inode.c 		dput(d);
d                 872 fs/gfs2/inode.c 	return d;
d                1236 fs/gfs2/inode.c 	struct dentry *d;
d                1242 fs/gfs2/inode.c 	d = __gfs2_lookup(dir, dentry, file);
d                1243 fs/gfs2/inode.c 	if (IS_ERR(d))
d                1244 fs/gfs2/inode.c 		return PTR_ERR(d);
d                1245 fs/gfs2/inode.c 	if (d != NULL)
d                1246 fs/gfs2/inode.c 		dentry = d;
d                1249 fs/gfs2/inode.c 			return finish_no_open(file, d);
d                1250 fs/gfs2/inode.c 		dput(d);
d                1254 fs/gfs2/inode.c 	BUG_ON(d != NULL);
d                 331 fs/gfs2/ops_fstype.c 		u64 space, d;
d                 335 fs/gfs2/ops_fstype.c 		d = space;
d                 336 fs/gfs2/ops_fstype.c 		m = do_div(d, sdp->sd_inptrs);
d                 338 fs/gfs2/ops_fstype.c 		if (d != sdp->sd_heightsize[x - 1] || m)
d                 908 fs/hostfs/hostfs_kern.c static int hostfs_fill_sb_common(struct super_block *sb, void *d, int silent)
d                 911 fs/hostfs/hostfs_kern.c 	char *host_root_path, *req_root = d;
d                 431 fs/hpfs/alloc.c 	struct dnode *d;
d                 439 fs/hpfs/alloc.c 	if (!(d = hpfs_get_4sectors(s, *dno, qbh))) {
d                 443 fs/hpfs/alloc.c 	memset(d, 0, 2048);
d                 444 fs/hpfs/alloc.c 	d->magic = cpu_to_le32(DNODE_MAGIC);
d                 445 fs/hpfs/alloc.c 	d->first_free = cpu_to_le32(52);
d                 446 fs/hpfs/alloc.c 	d->dirent[0] = 32;
d                 447 fs/hpfs/alloc.c 	d->dirent[2] = 8;
d                 448 fs/hpfs/alloc.c 	d->dirent[30] = 1;
d                 449 fs/hpfs/alloc.c 	d->dirent[31] = 255;
d                 450 fs/hpfs/alloc.c 	d->self = cpu_to_le32(*dno);
d                 451 fs/hpfs/alloc.c 	return d;
d                  12 fs/hpfs/dnode.c static loff_t get_pos(struct dnode *d, struct hpfs_dirent *fde)
d                  15 fs/hpfs/dnode.c 	struct hpfs_dirent *de_end = dnode_end_de(d);
d                  17 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de)) {
d                  18 fs/hpfs/dnode.c 		if (de == fde) return ((loff_t) le32_to_cpu(d->self) << 4) | (loff_t)i;
d                  22 fs/hpfs/dnode.c 	return ((loff_t)le32_to_cpu(d->self) << 4) | (loff_t)1;
d                  96 fs/hpfs/dnode.c static void hpfs_pos_ins(loff_t *p, loff_t d, loff_t c)
d                  98 fs/hpfs/dnode.c 	if ((*p & ~0x3f) == (d & ~0x3f) && (*p & 0x3f) >= (d & 0x3f)) {
d                 108 fs/hpfs/dnode.c static void hpfs_pos_del(loff_t *p, loff_t d, loff_t c)
d                 110 fs/hpfs/dnode.c 	if ((*p & ~0x3f) == (d & ~0x3f) && (*p & 0x3f) >= (d & 0x3f)) {
d                 120 fs/hpfs/dnode.c static struct hpfs_dirent *dnode_pre_last_de(struct dnode *d)
d                 123 fs/hpfs/dnode.c 	de_end = dnode_end_de(d);
d                 124 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de)) {
d                 130 fs/hpfs/dnode.c static struct hpfs_dirent *dnode_last_de(struct dnode *d)
d                 133 fs/hpfs/dnode.c 	de_end = dnode_end_de(d);
d                 134 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de)) {
d                 140 fs/hpfs/dnode.c static void set_last_pointer(struct super_block *s, struct dnode *d, dnode_secno ptr)
d                 143 fs/hpfs/dnode.c 	if (!(de = dnode_last_de(d))) {
d                 144 fs/hpfs/dnode.c 		hpfs_error(s, "set_last_pointer: empty dnode %08x", le32_to_cpu(d->self));
d                 150 fs/hpfs/dnode.c 				le32_to_cpu(d->self), de_down_pointer(de));
d                 154 fs/hpfs/dnode.c 			hpfs_error(s, "set_last_pointer: bad last dirent in dnode %08x", le32_to_cpu(d->self));
d                 159 fs/hpfs/dnode.c 		le32_add_cpu(&d->first_free, 4);
d                 160 fs/hpfs/dnode.c 		if (le32_to_cpu(d->first_free) > 2048) {
d                 161 fs/hpfs/dnode.c 			hpfs_error(s, "set_last_pointer: too long dnode %08x", le32_to_cpu(d->self));
d                 162 fs/hpfs/dnode.c 			le32_add_cpu(&d->first_free, -4);
d                 173 fs/hpfs/dnode.c struct hpfs_dirent *hpfs_add_de(struct super_block *s, struct dnode *d,
d                 178 fs/hpfs/dnode.c 	struct hpfs_dirent *de_end = dnode_end_de(d);
d                 180 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de)) {
d                 183 fs/hpfs/dnode.c 			hpfs_error(s, "name (%c,%d) already exists in dnode %08x", *name, namelen, le32_to_cpu(d->self));
d                 198 fs/hpfs/dnode.c 	le32_add_cpu(&d->first_free, d_size);
d                 204 fs/hpfs/dnode.c static void hpfs_delete_de(struct super_block *s, struct dnode *d,
d                 208 fs/hpfs/dnode.c 		hpfs_error(s, "attempt to delete last dirent in dnode %08x", le32_to_cpu(d->self));
d                 211 fs/hpfs/dnode.c 	d->first_free = cpu_to_le32(le32_to_cpu(d->first_free) - le16_to_cpu(de->length));
d                 212 fs/hpfs/dnode.c 	memmove(de, de_next_de(de), le32_to_cpu(d->first_free) + (char *)d - (char *)de);
d                 215 fs/hpfs/dnode.c static void fix_up_ptrs(struct super_block *s, struct dnode *d)
d                 218 fs/hpfs/dnode.c 	struct hpfs_dirent *de_end = dnode_end_de(d);
d                 219 fs/hpfs/dnode.c 	dnode_secno dno = le32_to_cpu(d->self);
d                 220 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de))
d                 242 fs/hpfs/dnode.c 	struct dnode *d, *ad, *rd, *nd = NULL;
d                 263 fs/hpfs/dnode.c 	if (!(d = hpfs_map_dnode(i->i_sb, dno, &qbh))) {
d                 276 fs/hpfs/dnode.c 	if (le32_to_cpu(d->first_free) + de_size(namelen, down_ptr) <= 2048) {
d                 278 fs/hpfs/dnode.c 		copy_de(de=hpfs_add_de(i->i_sb, d, name, namelen, down_ptr), new_de);
d                 279 fs/hpfs/dnode.c 		t = get_pos(d, de);
d                 300 fs/hpfs/dnode.c 	memcpy(nd, d, le32_to_cpu(d->first_free));
d                 304 fs/hpfs/dnode.c 	if (!(ad = hpfs_alloc_dnode(i->i_sb, le32_to_cpu(d->up), &adno, &qbh1))) {
d                 329 fs/hpfs/dnode.c 	memcpy(d, nd, le32_to_cpu(nd->first_free));
d                 332 fs/hpfs/dnode.c 	if (!d->root_dnode) {
d                 333 fs/hpfs/dnode.c 		ad->up = d->up;
d                 341 fs/hpfs/dnode.c 	if (!(rd = hpfs_alloc_dnode(i->i_sb, le32_to_cpu(d->up), &rdno, &qbh2))) {
d                 352 fs/hpfs/dnode.c 	rd->up = d->up;
d                 353 fs/hpfs/dnode.c 	if (!(fnode = hpfs_map_fnode(i->i_sb, le32_to_cpu(d->up), &bh))) {
d                 366 fs/hpfs/dnode.c 	d->up = ad->up = cpu_to_le32(rdno);
d                 367 fs/hpfs/dnode.c 	d->root_dnode = ad->root_dnode = 0;
d                 375 fs/hpfs/dnode.c 	d = rd;
d                 392 fs/hpfs/dnode.c 	struct dnode *d;
d                 402 fs/hpfs/dnode.c 	if (!(d = hpfs_map_dnode(i->i_sb, dno, &qbh))) return 1;
d                 403 fs/hpfs/dnode.c 	de_end = dnode_end_de(d);
d                 404 fs/hpfs/dnode.c 	for (de = dnode_first_de(d); de < de_end; de = de_next_de(de)) {
d                 821 fs/hpfs/dnode.c 	dnode_secno d = dno;
d                 828 fs/hpfs/dnode.c 		if (hpfs_stop_cycles(s, d, &c1, &c2, "hpfs_de_as_down_as_possible"))
d                 829 fs/hpfs/dnode.c 			return d;
d                 830 fs/hpfs/dnode.c 	if (!(de = map_nth_dirent(s, d, 1, &qbh, NULL))) return dno;
d                 833 fs/hpfs/dnode.c 			hpfs_error(s, "hpfs_de_as_down_as_possible: bad up pointer; dnode %08x, down %08x points to %08x", up, d, le32_to_cpu(((struct dnode *)qbh.data)->up));
d                 836 fs/hpfs/dnode.c 		return d;
d                 838 fs/hpfs/dnode.c 	up = d;
d                 839 fs/hpfs/dnode.c 	d = de_down_pointer(de);
d                 850 fs/hpfs/dnode.c 	struct hpfs_dirent *de, *d;
d                 864 fs/hpfs/dnode.c 	if ((d = de_next_de(de)) < dnode_end_de(dnode)) {
d                 872 fs/hpfs/dnode.c 		if (d->down) {
d                 873 fs/hpfs/dnode.c 			*posp = ((loff_t) hpfs_de_as_down_as_possible(inode->i_sb, de_down_pointer(d)) << 4) + 1;
d                1004 fs/hpfs/dnode.c 	struct dnode *d;
d                1040 fs/hpfs/dnode.c 	if (!(d = hpfs_map_dnode(s, dno, qbh))) {
d                1044 fs/hpfs/dnode.c 	de_end = dnode_end_de(d);
d                1045 fs/hpfs/dnode.c 	de = dnode_first_de(d);
d                1080 fs/hpfs/dnode.c 	if (d->root_dnode) goto not_found;
d                1082 fs/hpfs/dnode.c 	dno = le32_to_cpu(d->up);
d                 123 fs/hpfs/map.c  		__le32 *d = hpfs_map_sector(s, bmp+i, &bh, n - i - 1);
d                 124 fs/hpfs/map.c  		if (!d) {
d                 128 fs/hpfs/map.c  		memcpy((char *)b + 512 * i, d, 512);
d                 270 fs/hpfs/map.c  			unsigned char *d = (unsigned char *)dnode;
d                 284 fs/hpfs/map.c  			for (p = 20; p < le32_to_cpu(dnode->first_free); p += d[p] + (d[p+1] << 8)) {
d                 308 fs/hpfs/map.c  			if (d[pp + 30] != 1 || d[pp + 31] != 255) {
d                1512 fs/isofs/inode.c 	struct isofs_iget5_callback_data *d =
d                1514 fs/isofs/inode.c 	return (i->i_iget5_block == d->block)
d                1515 fs/isofs/inode.c 		&& (i->i_iget5_offset == d->offset);
d                1521 fs/isofs/inode.c 	struct isofs_iget5_callback_data *d =
d                1523 fs/isofs/inode.c 	i->i_iget5_block = d->block;
d                1524 fs/isofs/inode.c 	i->i_iget5_offset = d->offset;
d                 842 fs/jffs2/debug.c 		printk(JFFS2_DBG "pino:\t%#08x\n", je32_to_cpu(node.d.pino));
d                 843 fs/jffs2/debug.c 		printk(JFFS2_DBG "version:\t%#08x\n", je32_to_cpu(node.d.version));
d                 844 fs/jffs2/debug.c 		printk(JFFS2_DBG "ino:\t%#08x\n", je32_to_cpu(node.d.ino));
d                 845 fs/jffs2/debug.c 		printk(JFFS2_DBG "mctime:\t%#08x\n", je32_to_cpu(node.d.mctime));
d                 846 fs/jffs2/debug.c 		printk(JFFS2_DBG "nsize:\t%#02x\n", node.d.nsize);
d                 847 fs/jffs2/debug.c 		printk(JFFS2_DBG "type:\t%#02x\n", node.d.type);
d                 848 fs/jffs2/debug.c 		printk(JFFS2_DBG "node_crc:\t%#08x\n", je32_to_cpu(node.d.node_crc));
d                 849 fs/jffs2/debug.c 		printk(JFFS2_DBG "name_crc:\t%#08x\n", je32_to_cpu(node.d.name_crc));
d                 851 fs/jffs2/debug.c 		node.d.name[node.d.nsize] = '\0';
d                 852 fs/jffs2/debug.c 		printk(JFFS2_DBG "name:\t\"%s\"\n", node.d.name);
d                 854 fs/jffs2/debug.c 		crc = crc32(0, &node.d, sizeof(node.d) - 8);
d                 855 fs/jffs2/debug.c 		if (crc != je32_to_cpu(node.d.node_crc)) {
d                 663 fs/jffs2/gc.c  		crc = crc32(0, node, sizeof(node->d)-8);
d                 664 fs/jffs2/gc.c  		if (je32_to_cpu(node->d.node_crc) != crc) {
d                 667 fs/jffs2/gc.c  				je32_to_cpu(node->d.node_crc), crc);
d                 671 fs/jffs2/gc.c  		if (strnlen(node->d.name, node->d.nsize) != node->d.nsize) {
d                 677 fs/jffs2/gc.c  		if (node->d.nsize) {
d                 678 fs/jffs2/gc.c  			crc = crc32(0, node->d.name, node->d.nsize);
d                 679 fs/jffs2/gc.c  			if (je32_to_cpu(node->d.name_crc) != crc) {
d                 682 fs/jffs2/gc.c  					je32_to_cpu(node->d.name_crc), crc);
d                  87 fs/jffs2/os-linux.h #define jffs2_flash_writev(a,b,c,d,e,f) jffs2_flash_direct_writev(a,b,c,d,e)
d                1073 fs/jffs2/readinode.c 			err = read_direntry(c, ref, &node->d, retlen, rii);
d                  79 fs/jffs2/summary.c 			s->sum_size += JFFS2_SUMMARY_DIRENT_SIZE(item->d.nsize);
d                  82 fs/jffs2/summary.c 						je32_to_cpu(item->d.ino));
d                 283 fs/jffs2/summary.c 				kmalloc(sizeof(struct jffs2_sum_dirent_mem) + node->d.nsize, GFP_KERNEL);
d                 288 fs/jffs2/summary.c 			temp->nodetype = node->d.nodetype;
d                 289 fs/jffs2/summary.c 			temp->totlen = node->d.totlen;
d                 291 fs/jffs2/summary.c 			temp->pino = node->d.pino;
d                 292 fs/jffs2/summary.c 			temp->version = node->d.version;
d                 293 fs/jffs2/summary.c 			temp->ino = node->d.ino;
d                 294 fs/jffs2/summary.c 			temp->nsize = node->d.nsize;
d                 295 fs/jffs2/summary.c 			temp->type = node->d.type;
d                 300 fs/jffs2/summary.c 					memcpy(temp->name,node->d.name,node->d.nsize);
d                 304 fs/jffs2/summary.c 					memcpy(temp->name,invecs[1].iov_base,node->d.nsize);
d                 739 fs/jffs2/summary.c 				sdrnt_ptr->nodetype = temp->d.nodetype;
d                 740 fs/jffs2/summary.c 				sdrnt_ptr->totlen = temp->d.totlen;
d                 741 fs/jffs2/summary.c 				sdrnt_ptr->offset = temp->d.offset;
d                 742 fs/jffs2/summary.c 				sdrnt_ptr->pino = temp->d.pino;
d                 743 fs/jffs2/summary.c 				sdrnt_ptr->version = temp->d.version;
d                 744 fs/jffs2/summary.c 				sdrnt_ptr->ino = temp->d.ino;
d                 745 fs/jffs2/summary.c 				sdrnt_ptr->nsize = temp->d.nsize;
d                 746 fs/jffs2/summary.c 				sdrnt_ptr->type = temp->d.type;
d                 748 fs/jffs2/summary.c 				memcpy(sdrnt_ptr->name, temp->d.name,
d                 749 fs/jffs2/summary.c 							temp->d.nsize);
d                 751 fs/jffs2/summary.c 				wpage += JFFS2_SUMMARY_DIRENT_SIZE(temp->d.nsize);
d                  86 fs/jffs2/summary.h 	struct jffs2_sum_dirent_flash d;
d                 144 fs/jffs2/summary.h 	struct jffs2_sum_dirent_mem d;
d                 201 fs/jffs2/summary.h #define jffs2_sum_add_kvec(a,b,c,d) (0)
d                 209 fs/jffs2/summary.h #define jffs2_sum_scan_sumnode(a,b,c,d,e) (0)
d                 262 fs/jfs/jfs_dmap.h #define	BLKSTOL2(d)		(blkstol2(d))
d                2908 fs/jfs/jfs_dtree.c 	struct ldtentry *d;
d                2940 fs/jfs/jfs_dtree.c 		d = (struct ldtentry *) &p->slot[stbl[i]];
d                2941 fs/jfs/jfs_dtree.c 		index = le32_to_cpu(d->index);
d                2943 fs/jfs/jfs_dtree.c 			d->index = cpu_to_le32(add_index(tid, inode, bn, i));
d                3011 fs/jfs/jfs_dtree.c 	struct ldtentry *d;
d                3185 fs/jfs/jfs_dtree.c 			d = (struct ldtentry *) & p->slot[stbl[i]];
d                3187 fs/jfs/jfs_dtree.c 			if (((long) jfs_dirent + d->namlen + 1) >
d                3195 fs/jfs/jfs_dtree.c 			d_namleft = d->namlen;
d                3197 fs/jfs/jfs_dtree.c 			jfs_dirent->ino = le32_to_cpu(d->inumber);
d                3201 fs/jfs/jfs_dtree.c 				jfs_dirent->position = le32_to_cpu(d->index);
d                3238 fs/jfs/jfs_dtree.c 			outlen = jfs_strfromUCS_le(name_ptr, d->name, len,
d                3243 fs/jfs/jfs_dtree.c 			next = d->next;
d                4062 fs/jfs/jfs_dtree.c 	struct dtslot *h, *s, *d;
d                4113 fs/jfs/jfs_dtree.c 		h = d = &dp->slot[dsi];
d                4187 fs/jfs/jfs_dtree.c 			d++;
d                4190 fs/jfs/jfs_dtree.c 			UniStrncpy_le(d->name, s->name, len);
d                4197 fs/jfs/jfs_dtree.c 			d->next = dsi;
d                4209 fs/jfs/jfs_dtree.c 		if (h == d) {
d                4217 fs/jfs/jfs_dtree.c 			d->next = -1;
d                 107 fs/libfs.c     		struct dentry *d = list_entry(p, struct dentry, d_child);
d                 109 fs/libfs.c     		if (d->d_flags & DCACHE_DENTRY_CURSOR)
d                 111 fs/libfs.c     		if (simple_positive(d) && !--count) {
d                 112 fs/libfs.c     			spin_lock_nested(&d->d_lock, DENTRY_D_LOCK_NESTED);
d                 113 fs/libfs.c     			if (simple_positive(d))
d                 114 fs/libfs.c     				found = dget_dlock(d);
d                 115 fs/libfs.c     			spin_unlock(&d->d_lock);
d                 841 fs/namei.c     		struct dentry *d;
d                 843 fs/namei.c     		d = nd->path.dentry;
d                 844 fs/namei.c     		nd->inode = d->d_inode;
d                 846 fs/namei.c     		if (unlikely(read_seqcount_retry(&d->d_seq, nd->seq)))
d                2400 fs/namei.c     	struct dentry *d;
d                2414 fs/namei.c     	d = __lookup_hash(&last, path->dentry, 0);
d                2415 fs/namei.c     	if (IS_ERR(d)) {
d                2420 fs/namei.c     	return d;
d                 177 fs/nfs/blocklayout/blocklayout.h void bl_free_deviceid_node(struct nfs4_deviceid_node *d);
d                  43 fs/nfs/blocklayout/dev.c bl_free_deviceid_node(struct nfs4_deviceid_node *d)
d                  46 fs/nfs/blocklayout/dev.c 		container_of(d, struct pnfs_block_dev, node);
d                 230 fs/nfs/blocklayout/dev.c bl_parse_deviceid(struct nfs_server *server, struct pnfs_block_dev *d,
d                 235 fs/nfs/blocklayout/dev.c bl_parse_simple(struct nfs_server *server, struct pnfs_block_dev *d,
d                 252 fs/nfs/blocklayout/dev.c 	d->bdev = bdev;
d                 255 fs/nfs/blocklayout/dev.c 	d->len = i_size_read(d->bdev->bd_inode);
d                 256 fs/nfs/blocklayout/dev.c 	d->map = bl_map_simple;
d                 259 fs/nfs/blocklayout/dev.c 		d->bdev->bd_disk->disk_name);
d                 352 fs/nfs/blocklayout/dev.c bl_parse_scsi(struct nfs_server *server, struct pnfs_block_dev *d,
d                 368 fs/nfs/blocklayout/dev.c 	d->bdev = bdev;
d                 370 fs/nfs/blocklayout/dev.c 	d->len = i_size_read(d->bdev->bd_inode);
d                 371 fs/nfs/blocklayout/dev.c 	d->map = bl_map_simple;
d                 372 fs/nfs/blocklayout/dev.c 	d->pr_key = v->scsi.pr_key;
d                 375 fs/nfs/blocklayout/dev.c 		d->bdev->bd_disk->disk_name, d->pr_key);
d                 377 fs/nfs/blocklayout/dev.c 	ops = d->bdev->bd_disk->fops->pr_ops;
d                 380 fs/nfs/blocklayout/dev.c 				d->bdev->bd_disk->disk_name);
d                 385 fs/nfs/blocklayout/dev.c 	error = ops->pr_register(d->bdev, 0, d->pr_key, true);
d                 388 fs/nfs/blocklayout/dev.c 				d->bdev->bd_disk->disk_name);
d                 392 fs/nfs/blocklayout/dev.c 	d->pr_registered = true;
d                 396 fs/nfs/blocklayout/dev.c 	blkdev_put(d->bdev, FMODE_READ | FMODE_WRITE);
d                 401 fs/nfs/blocklayout/dev.c bl_parse_slice(struct nfs_server *server, struct pnfs_block_dev *d,
d                 407 fs/nfs/blocklayout/dev.c 	ret = bl_parse_deviceid(server, d, volumes, v->slice.volume, gfp_mask);
d                 411 fs/nfs/blocklayout/dev.c 	d->disk_offset = v->slice.start;
d                 412 fs/nfs/blocklayout/dev.c 	d->len = v->slice.len;
d                 417 fs/nfs/blocklayout/dev.c bl_parse_concat(struct nfs_server *server, struct pnfs_block_dev *d,
d                 424 fs/nfs/blocklayout/dev.c 	d->children = kcalloc(v->concat.volumes_count,
d                 426 fs/nfs/blocklayout/dev.c 	if (!d->children)
d                 430 fs/nfs/blocklayout/dev.c 		ret = bl_parse_deviceid(server, &d->children[i],
d                 435 fs/nfs/blocklayout/dev.c 		d->nr_children++;
d                 436 fs/nfs/blocklayout/dev.c 		d->children[i].start += len;
d                 437 fs/nfs/blocklayout/dev.c 		len += d->children[i].len;
d                 440 fs/nfs/blocklayout/dev.c 	d->len = len;
d                 441 fs/nfs/blocklayout/dev.c 	d->map = bl_map_concat;
d                 446 fs/nfs/blocklayout/dev.c bl_parse_stripe(struct nfs_server *server, struct pnfs_block_dev *d,
d                 453 fs/nfs/blocklayout/dev.c 	d->children = kcalloc(v->stripe.volumes_count,
d                 455 fs/nfs/blocklayout/dev.c 	if (!d->children)
d                 459 fs/nfs/blocklayout/dev.c 		ret = bl_parse_deviceid(server, &d->children[i],
d                 464 fs/nfs/blocklayout/dev.c 		d->nr_children++;
d                 465 fs/nfs/blocklayout/dev.c 		len += d->children[i].len;
d                 468 fs/nfs/blocklayout/dev.c 	d->len = len;
d                 469 fs/nfs/blocklayout/dev.c 	d->chunk_size = v->stripe.chunk_size;
d                 470 fs/nfs/blocklayout/dev.c 	d->map = bl_map_stripe;
d                 475 fs/nfs/blocklayout/dev.c bl_parse_deviceid(struct nfs_server *server, struct pnfs_block_dev *d,
d                 480 fs/nfs/blocklayout/dev.c 		return bl_parse_simple(server, d, volumes, idx, gfp_mask);
d                 482 fs/nfs/blocklayout/dev.c 		return bl_parse_slice(server, d, volumes, idx, gfp_mask);
d                 484 fs/nfs/blocklayout/dev.c 		return bl_parse_concat(server, d, volumes, idx, gfp_mask);
d                 486 fs/nfs/blocklayout/dev.c 		return bl_parse_stripe(server, d, volumes, idx, gfp_mask);
d                 488 fs/nfs/blocklayout/dev.c 		return bl_parse_scsi(server, d, volumes, idx, gfp_mask);
d                  74 fs/nfs/cache_lib.c static void nfs_dns_cache_revisit(struct cache_deferred_req *d, int toomany)
d                  78 fs/nfs/cache_lib.c 	dreq = container_of(d, struct nfs_cache_defer_req, deferred_req);
d                1701 fs/nfs/dir.c   	struct dentry *d;
d                1720 fs/nfs/dir.c   	d = d_splice_alias(inode, dentry);
d                1723 fs/nfs/dir.c   	return d;
d                1726 fs/nfs/dir.c   	d = ERR_PTR(error);
d                1738 fs/nfs/dir.c   	struct dentry *d;
d                1740 fs/nfs/dir.c   	d = nfs_add_or_obtain(dentry, fhandle, fattr, label);
d                1741 fs/nfs/dir.c   	if (IS_ERR(d))
d                1742 fs/nfs/dir.c   		return PTR_ERR(d);
d                1745 fs/nfs/dir.c   	dput(d);
d                 541 fs/nfs/filelayout/filelayout.c 	struct nfs4_deviceid_node *d;
d                 550 fs/nfs/filelayout/filelayout.c 	d = nfs4_find_get_deviceid(NFS_SERVER(lo->plh_inode), &fl->deviceid,
d                 552 fs/nfs/filelayout/filelayout.c 	if (d == NULL)
d                 555 fs/nfs/filelayout/filelayout.c 	dsaddr = container_of(d, struct nfs4_file_layout_dsaddr, id_node);
d                1132 fs/nfs/filelayout/filelayout.c filelayout_free_deviceid_node(struct nfs4_deviceid_node *d)
d                1134 fs/nfs/filelayout/filelayout.c 	nfs4_fl_free_deviceid(container_of(d, struct nfs4_file_layout_dsaddr, id_node));
d                2005 fs/nfs/flexfilelayout/flexfilelayout.c ff_layout_free_deviceid_node(struct nfs4_deviceid_node *d)
d                2007 fs/nfs/flexfilelayout/flexfilelayout.c 	nfs4_ff_layout_free_deviceid(container_of(d, struct nfs4_ff_layout_ds,
d                 598 fs/nfs/nfs4_fs.h #define nfs4_state_protect(a, b, c, d) do { } while (0)
d                 599 fs/nfs/nfs4_fs.h #define nfs4_state_protect_write(a, b, c, d) do { } while (0)
d                1553 fs/nfs/pnfs.c  	s64 d;
d                1556 fs/nfs/pnfs.c  	d = l1->offset - l2->offset;
d                1557 fs/nfs/pnfs.c  	if (d)
d                1558 fs/nfs/pnfs.c  		return d;
d                1561 fs/nfs/pnfs.c  	d = l2->length - l1->length;
d                1562 fs/nfs/pnfs.c  	if (d)
d                1563 fs/nfs/pnfs.c  		return d;
d                 409 fs/nfs/pnfs.h  nfs4_get_deviceid(struct nfs4_deviceid_node *d)
d                 411 fs/nfs/pnfs.h  	atomic_inc(&d->ref);
d                 412 fs/nfs/pnfs.h  	return d;
d                  81 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d;
d                  83 fs/nfs/pnfs_dev.c 	hlist_for_each_entry_rcu(d, &nfs4_deviceid_cache[hash], node)
d                  84 fs/nfs/pnfs_dev.c 		if (d->ld == ld && d->nfs_client == clp &&
d                  85 fs/nfs/pnfs_dev.c 		    !memcmp(&d->deviceid, id, sizeof(*id))) {
d                  86 fs/nfs/pnfs_dev.c 			if (atomic_read(&d->ref))
d                  87 fs/nfs/pnfs_dev.c 				return d;
d                  99 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d = NULL;
d                 149 fs/nfs/pnfs_dev.c 	d = server->pnfs_curr_ld->alloc_deviceid_node(server, pdev,
d                 151 fs/nfs/pnfs_dev.c 	if (d && pdev->nocache)
d                 152 fs/nfs/pnfs_dev.c 		set_bit(NFS_DEVICEID_NOCACHE, &d->flags);
d                 160 fs/nfs/pnfs_dev.c 	dprintk("<-- %s d %p\n", __func__, d);
d                 161 fs/nfs/pnfs_dev.c 	return d;
d                 174 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d;
d                 177 fs/nfs/pnfs_dev.c 	d = _lookup_deviceid(server->pnfs_curr_ld, server->nfs_client, id,
d                 179 fs/nfs/pnfs_dev.c 	if (d != NULL && !atomic_inc_not_zero(&d->ref))
d                 180 fs/nfs/pnfs_dev.c 		d = NULL;
d                 182 fs/nfs/pnfs_dev.c 	return d;
d                 191 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d, *new;
d                 193 fs/nfs/pnfs_dev.c 	d = __nfs4_find_get_deviceid(server, id, hash);
d                 194 fs/nfs/pnfs_dev.c 	if (d)
d                 195 fs/nfs/pnfs_dev.c 		return d;
d                 202 fs/nfs/pnfs_dev.c 	d = __nfs4_find_get_deviceid(server, id, hash);
d                 203 fs/nfs/pnfs_dev.c 	if (d) {
d                 206 fs/nfs/pnfs_dev.c 		return d;
d                 228 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d;
d                 232 fs/nfs/pnfs_dev.c 	d = _lookup_deviceid(ld, clp, id, nfs4_deviceid_hash(id));
d                 234 fs/nfs/pnfs_dev.c 	if (!d) {
d                 238 fs/nfs/pnfs_dev.c 	hlist_del_init_rcu(&d->node);
d                 239 fs/nfs/pnfs_dev.c 	clear_bit(NFS_DEVICEID_NOCACHE, &d->flags);
d                 243 fs/nfs/pnfs_dev.c 	nfs4_put_deviceid_node(d);
d                 248 fs/nfs/pnfs_dev.c nfs4_init_deviceid_node(struct nfs4_deviceid_node *d, struct nfs_server *server,
d                 251 fs/nfs/pnfs_dev.c 	INIT_HLIST_NODE(&d->node);
d                 252 fs/nfs/pnfs_dev.c 	INIT_HLIST_NODE(&d->tmpnode);
d                 253 fs/nfs/pnfs_dev.c 	d->ld = server->pnfs_curr_ld;
d                 254 fs/nfs/pnfs_dev.c 	d->nfs_client = server->nfs_client;
d                 255 fs/nfs/pnfs_dev.c 	d->flags = 0;
d                 256 fs/nfs/pnfs_dev.c 	d->deviceid = *id;
d                 257 fs/nfs/pnfs_dev.c 	atomic_set(&d->ref, 1);
d                 272 fs/nfs/pnfs_dev.c nfs4_put_deviceid_node(struct nfs4_deviceid_node *d)
d                 274 fs/nfs/pnfs_dev.c 	if (test_bit(NFS_DEVICEID_NOCACHE, &d->flags)) {
d                 275 fs/nfs/pnfs_dev.c 		if (atomic_add_unless(&d->ref, -1, 2))
d                 277 fs/nfs/pnfs_dev.c 		nfs4_delete_deviceid(d->ld, d->nfs_client, &d->deviceid);
d                 279 fs/nfs/pnfs_dev.c 	if (!atomic_dec_and_test(&d->ref))
d                 281 fs/nfs/pnfs_dev.c 	d->ld->free_deviceid_node(d);
d                 326 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d;
d                 331 fs/nfs/pnfs_dev.c 	hlist_for_each_entry_rcu(d, &nfs4_deviceid_cache[hash], node)
d                 332 fs/nfs/pnfs_dev.c 		if (d->nfs_client == clp && atomic_read(&d->ref)) {
d                 333 fs/nfs/pnfs_dev.c 			hlist_del_init_rcu(&d->node);
d                 334 fs/nfs/pnfs_dev.c 			hlist_add_head(&d->tmpnode, &tmp);
d                 335 fs/nfs/pnfs_dev.c 			clear_bit(NFS_DEVICEID_NOCACHE, &d->flags);
d                 344 fs/nfs/pnfs_dev.c 		d = hlist_entry(tmp.first, struct nfs4_deviceid_node, tmpnode);
d                 345 fs/nfs/pnfs_dev.c 		hlist_del(&d->tmpnode);
d                 346 fs/nfs/pnfs_dev.c 		nfs4_put_deviceid_node(d);
d                 367 fs/nfs/pnfs_dev.c 	struct nfs4_deviceid_node *d;
d                 372 fs/nfs/pnfs_dev.c 		hlist_for_each_entry_rcu(d, &nfs4_deviceid_cache[i], node)
d                 373 fs/nfs/pnfs_dev.c 			if (d->nfs_client == clp)
d                 374 fs/nfs/pnfs_dev.c 				set_bit(NFS_DEVICEID_INVALID, &d->flags);
d                 228 fs/nfsd/blocklayout.c 	u8 *buf, *d, type, assoc;
d                 278 fs/nfsd/blocklayout.c 	d = buf + 4;
d                 279 fs/nfsd/blocklayout.c 	for (d = buf + 4; d < buf + len; d += id_len + 4) {
d                 280 fs/nfsd/blocklayout.c 		id_len = d[3];
d                 281 fs/nfsd/blocklayout.c 		type = d[1] & 0xf;
d                 282 fs/nfsd/blocklayout.c 		assoc = (d[1] >> 4) & 0x3;
d                 299 fs/nfsd/blocklayout.c 		memcpy(b->scsi.designator, d + 4, id_len);
d                1198 fs/nfsd/vfs.c  			struct dentry *d;
d                1199 fs/nfsd/vfs.c  			d = lookup_one_len(dchild->d_name.name,
d                1202 fs/nfsd/vfs.c  			if (IS_ERR(d)) {
d                1203 fs/nfsd/vfs.c  				host_err = PTR_ERR(d);
d                1206 fs/nfsd/vfs.c  			if (unlikely(d_is_negative(d))) {
d                1207 fs/nfsd/vfs.c  				dput(d);
d                1212 fs/nfsd/vfs.c  			resfhp->fh_dentry = dget(d);
d                1215 fs/nfsd/vfs.c  			dchild = d;
d                  60 fs/nsfs.c      	unsigned long d;
d                  63 fs/nsfs.c      	d = atomic_long_read(&ns->stashed);
d                  64 fs/nsfs.c      	if (!d)
d                  66 fs/nsfs.c      	dentry = (struct dentry *)d;
d                  96 fs/nsfs.c      	d = atomic_long_cmpxchg(&ns->stashed, 0, (unsigned long)dentry);
d                  97 fs/nsfs.c      	if (d) {
d                  99 fs/ocfs2/blockcheck.c u32 ocfs2_hamming_encode(u32 parity, void *data, unsigned int d, unsigned int nr)
d                 103 fs/ocfs2/blockcheck.c 	BUG_ON(!d);
d                 114 fs/ocfs2/blockcheck.c 	for (i = 0; (i = ocfs2_find_next_bit(data, d, i)) < d; i++)
d                 158 fs/ocfs2/blockcheck.c void ocfs2_hamming_fix(void *data, unsigned int d, unsigned int nr,
d                 163 fs/ocfs2/blockcheck.c 	BUG_ON(!d);
d                 176 fs/ocfs2/blockcheck.c 	if (fix >= calc_code_bit(nr + d, NULL))
d                 189 fs/ocfs2/blockcheck.c 	for (i = 0; i < d; i++, b++)
d                  77 fs/ocfs2/blockcheck.h u32 ocfs2_hamming_encode(u32 parity, void *data, unsigned int d,
d                  89 fs/ocfs2/blockcheck.h void ocfs2_hamming_fix(void *data, unsigned int d, unsigned int nr,
d                 202 fs/ocfs2/cluster/tcp.c # define o2net_init_nst(a, b, c, d, e)
d                 213 fs/ocfs2/dir.c 	__u32	a = in[0], b = in[1], c = in[2], d = in[3];
d                 219 fs/ocfs2/dir.c 		b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d);
d                  73 fs/ocfs2/quota_global.c 	struct ocfs2_global_disk_dqblk *d = dp;
d                  78 fs/ocfs2/quota_global.c 		m->dqb_ihardlimit = le64_to_cpu(d->dqb_ihardlimit);
d                  79 fs/ocfs2/quota_global.c 		m->dqb_isoftlimit = le64_to_cpu(d->dqb_isoftlimit);
d                  82 fs/ocfs2/quota_global.c 		m->dqb_curinodes = le64_to_cpu(d->dqb_curinodes);
d                  84 fs/ocfs2/quota_global.c 		m->dqb_bhardlimit = le64_to_cpu(d->dqb_bhardlimit);
d                  85 fs/ocfs2/quota_global.c 		m->dqb_bsoftlimit = le64_to_cpu(d->dqb_bsoftlimit);
d                  88 fs/ocfs2/quota_global.c 		m->dqb_curspace = le64_to_cpu(d->dqb_curspace);
d                  90 fs/ocfs2/quota_global.c 		m->dqb_btime = le64_to_cpu(d->dqb_btime);
d                  92 fs/ocfs2/quota_global.c 		m->dqb_itime = le64_to_cpu(d->dqb_itime);
d                  93 fs/ocfs2/quota_global.c 	OCFS2_DQUOT(dquot)->dq_use_count = le32_to_cpu(d->dqb_use_count);
d                  98 fs/ocfs2/quota_global.c 	struct ocfs2_global_disk_dqblk *d = dp;
d                 101 fs/ocfs2/quota_global.c 	d->dqb_id = cpu_to_le32(from_kqid(&init_user_ns, dquot->dq_id));
d                 102 fs/ocfs2/quota_global.c 	d->dqb_use_count = cpu_to_le32(OCFS2_DQUOT(dquot)->dq_use_count);
d                 103 fs/ocfs2/quota_global.c 	d->dqb_ihardlimit = cpu_to_le64(m->dqb_ihardlimit);
d                 104 fs/ocfs2/quota_global.c 	d->dqb_isoftlimit = cpu_to_le64(m->dqb_isoftlimit);
d                 105 fs/ocfs2/quota_global.c 	d->dqb_curinodes = cpu_to_le64(m->dqb_curinodes);
d                 106 fs/ocfs2/quota_global.c 	d->dqb_bhardlimit = cpu_to_le64(m->dqb_bhardlimit);
d                 107 fs/ocfs2/quota_global.c 	d->dqb_bsoftlimit = cpu_to_le64(m->dqb_bsoftlimit);
d                 108 fs/ocfs2/quota_global.c 	d->dqb_curspace = cpu_to_le64(m->dqb_curspace);
d                 109 fs/ocfs2/quota_global.c 	d->dqb_btime = cpu_to_le64(m->dqb_btime);
d                 110 fs/ocfs2/quota_global.c 	d->dqb_itime = cpu_to_le64(m->dqb_itime);
d                 111 fs/ocfs2/quota_global.c 	d->dqb_pad1 = d->dqb_pad2 = 0;
d                 116 fs/ocfs2/quota_global.c 	struct ocfs2_global_disk_dqblk *d = dp;
d                 124 fs/ocfs2/quota_global.c 				le32_to_cpu(d->dqb_id)),
d                 481 fs/orangefs/super.c 	struct dentry *d = ERR_PTR(-EINVAL);
d                 519 fs/orangefs/super.c 		d = ERR_CAST(sb);
d                 528 fs/orangefs/super.c 		d = ERR_PTR(-ENOMEM);
d                 537 fs/orangefs/super.c 		d = ERR_PTR(ret);
d                 595 fs/orangefs/super.c 	return d;
d                 120 fs/overlayfs/dir.c 	struct dentry *d, *dentry = *newdentry;
d                 134 fs/overlayfs/dir.c 	d = lookup_one_len(dentry->d_name.name, dentry->d_parent,
d                 136 fs/overlayfs/dir.c 	if (IS_ERR(d)) {
d                 139 fs/overlayfs/dir.c 		return PTR_ERR(d);
d                 142 fs/overlayfs/dir.c 	*newdentry = d;
d                 906 fs/overlayfs/dir.c 	struct dentry *d, *tmp;
d                 921 fs/overlayfs/dir.c 	for (d = dget(dentry); !IS_ROOT(d);) {
d                 925 fs/overlayfs/dir.c 		spin_lock(&d->d_lock);
d                 926 fs/overlayfs/dir.c 		name = ovl_dentry_get_redirect(d);
d                 930 fs/overlayfs/dir.c 			name = d->d_name.name;
d                 931 fs/overlayfs/dir.c 			thislen = d->d_name.len;
d                 937 fs/overlayfs/dir.c 			spin_unlock(&d->d_lock);
d                 943 fs/overlayfs/dir.c 		tmp = dget_dlock(d->d_parent);
d                 944 fs/overlayfs/dir.c 		spin_unlock(&d->d_lock);
d                 946 fs/overlayfs/dir.c 		dput(d);
d                 947 fs/overlayfs/dir.c 		d = tmp;
d                 957 fs/overlayfs/dir.c 	dput(d);
d                  28 fs/overlayfs/namei.c static int ovl_check_redirect(struct dentry *dentry, struct ovl_lookup_data *d,
d                  47 fs/overlayfs/namei.c 		d->stop = false;
d                  51 fs/overlayfs/namei.c 		memcpy(buf, d->name.name, prelen);
d                  55 fs/overlayfs/namei.c 	kfree(d->redirect);
d                  56 fs/overlayfs/namei.c 	d->redirect = buf;
d                  57 fs/overlayfs/namei.c 	d->name.name = d->redirect;
d                  58 fs/overlayfs/namei.c 	d->name.len = strlen(d->redirect);
d                 194 fs/overlayfs/namei.c static int ovl_lookup_single(struct dentry *base, struct ovl_lookup_data *d,
d                 220 fs/overlayfs/namei.c 		d->stop = d->opaque = true;
d                 227 fs/overlayfs/namei.c 	if (last_element && d->metacopy && !d_is_reg(this)) {
d                 228 fs/overlayfs/namei.c 		d->stop = true;
d                 232 fs/overlayfs/namei.c 		if (d->is_dir || !last_element) {
d                 233 fs/overlayfs/namei.c 			d->stop = true;
d                 240 fs/overlayfs/namei.c 		d->metacopy = err;
d                 241 fs/overlayfs/namei.c 		d->stop = !d->metacopy;
d                 242 fs/overlayfs/namei.c 		if (!d->metacopy || d->last)
d                 245 fs/overlayfs/namei.c 		if (ovl_lookup_trap_inode(d->sb, this)) {
d                 252 fs/overlayfs/namei.c 			d->is_dir = true;
d                 253 fs/overlayfs/namei.c 		if (d->last)
d                 257 fs/overlayfs/namei.c 			d->stop = true;
d                 259 fs/overlayfs/namei.c 				d->opaque = true;
d                 263 fs/overlayfs/namei.c 	err = ovl_check_redirect(this, d, prelen, post);
d                 280 fs/overlayfs/namei.c static int ovl_lookup_layer(struct dentry *base, struct ovl_lookup_data *d,
d                 284 fs/overlayfs/namei.c 	size_t rem = d->name.len - 1;
d                 288 fs/overlayfs/namei.c 	if (d->name.name[0] != '/')
d                 289 fs/overlayfs/namei.c 		return ovl_lookup_single(base, d, d->name.name, d->name.len,
d                 293 fs/overlayfs/namei.c 		const char *s = d->name.name + d->name.len - rem;
d                 302 fs/overlayfs/namei.c 		err = ovl_lookup_single(base, d, s, thislen,
d                 303 fs/overlayfs/namei.c 					d->name.len - rem, next, &base);
d                 313 fs/overlayfs/namei.c 		if (WARN_ON(rem >= d->name.len))
d                 833 fs/overlayfs/namei.c 	struct ovl_lookup_data d = {
d                 850 fs/overlayfs/namei.c 		err = ovl_lookup_layer(upperdir, &d, &upperdentry);
d                 859 fs/overlayfs/namei.c 		if (upperdentry && !d.is_dir) {
d                 877 fs/overlayfs/namei.c 			if (d.metacopy)
d                 881 fs/overlayfs/namei.c 		if (d.redirect) {
d                 883 fs/overlayfs/namei.c 			upperredirect = kstrdup(d.redirect, GFP_KERNEL);
d                 886 fs/overlayfs/namei.c 			if (d.redirect[0] == '/')
d                 889 fs/overlayfs/namei.c 		upperopaque = d.opaque;
d                 892 fs/overlayfs/namei.c 	if (!d.stop && poe->numlower) {
d                 900 fs/overlayfs/namei.c 	for (i = 0; !d.stop && i < poe->numlower; i++) {
d                 904 fs/overlayfs/namei.c 			d.last = i == poe->numlower - 1;
d                 906 fs/overlayfs/namei.c 			d.last = lower.layer->idx == roe->numlower;
d                 908 fs/overlayfs/namei.c 		err = ovl_lookup_layer(lower.dentry, &d, &this);
d                 919 fs/overlayfs/namei.c 		if (upperdentry && !ctr && !ofs->noxattr && d.is_dir) {
d                 937 fs/overlayfs/namei.c 		    ((d.is_dir && ovl_verify_lower(dentry->d_sb)) ||
d                 938 fs/overlayfs/namei.c 		     (!d.is_dir && ofs->config.index && origin_path))) {
d                 942 fs/overlayfs/namei.c 				if (d.is_dir)
d                 949 fs/overlayfs/namei.c 		if (d.metacopy)
d                 955 fs/overlayfs/namei.c 		if (d.metacopy && ctr) {
d                 975 fs/overlayfs/namei.c 		if (d.redirect && !ofs->config.redirect_follow) {
d                 981 fs/overlayfs/namei.c 		if (d.stop)
d                 984 fs/overlayfs/namei.c 		if (d.redirect && d.redirect[0] == '/' && poe != roe) {
d                 996 fs/overlayfs/namei.c 		if (d.metacopy) {
d                1007 fs/overlayfs/namei.c 	} else if (!d.is_dir && upperdentry && !ctr && origin_path) {
d                1032 fs/overlayfs/namei.c 	if (ctr && (!upperdentry || (!d.is_dir && !metacopy)))
d                1036 fs/overlayfs/namei.c 	    (!d.is_dir || ovl_index_all(dentry->d_sb))) {
d                1075 fs/overlayfs/namei.c 			.lowerdata = (ctr > 1 && !d.is_dir) ?
d                1092 fs/overlayfs/namei.c 	kfree(d.redirect);
d                1111 fs/overlayfs/namei.c 	kfree(d.redirect);
d                 123 fs/overlayfs/super.c 		struct dentry *d = oe->lowerstack[i].dentry;
d                 125 fs/overlayfs/super.c 		if (d->d_flags & DCACHE_OP_REVALIDATE) {
d                 126 fs/overlayfs/super.c 			ret = d->d_op->d_revalidate(d, flags);
d                 131 fs/overlayfs/super.c 					d_invalidate(d);
d                 146 fs/overlayfs/super.c 		struct dentry *d = oe->lowerstack[i].dentry;
d                 148 fs/overlayfs/super.c 		if (d->d_flags & DCACHE_OP_WEAK_REVALIDATE) {
d                 149 fs/overlayfs/super.c 			ret = d->d_op->d_weak_revalidate(d, flags);
d                 703 fs/overlayfs/super.c 	char *d = s;
d                 705 fs/overlayfs/super.c 	for (;; s++, d++) {
d                 708 fs/overlayfs/super.c 		*d = *s;
d                 838 fs/overlayfs/super.c 	char *s, *d;
d                 840 fs/overlayfs/super.c 	for (s = d = str;; s++, d++) {
d                 844 fs/overlayfs/super.c 			*d = '\0';
d                 848 fs/overlayfs/super.c 		*d = *s;
d                  65 fs/pnode.c     		struct mount *d = get_peer_under_root(m, mnt->mnt_ns, root);
d                  66 fs/pnode.c     		if (d)
d                  67 fs/pnode.c     			return d->mnt_group_id;
d                  32 fs/quota/quota_v1.c static void v1_disk2mem_dqblk(struct mem_dqblk *m, struct v1_disk_dqblk *d)
d                  34 fs/quota/quota_v1.c 	m->dqb_ihardlimit = d->dqb_ihardlimit;
d                  35 fs/quota/quota_v1.c 	m->dqb_isoftlimit = d->dqb_isoftlimit;
d                  36 fs/quota/quota_v1.c 	m->dqb_curinodes = d->dqb_curinodes;
d                  37 fs/quota/quota_v1.c 	m->dqb_bhardlimit = v1_qbtos(d->dqb_bhardlimit);
d                  38 fs/quota/quota_v1.c 	m->dqb_bsoftlimit = v1_qbtos(d->dqb_bsoftlimit);
d                  39 fs/quota/quota_v1.c 	m->dqb_curspace = v1_qbtos(d->dqb_curblocks);
d                  40 fs/quota/quota_v1.c 	m->dqb_itime = d->dqb_itime;
d                  41 fs/quota/quota_v1.c 	m->dqb_btime = d->dqb_btime;
d                  44 fs/quota/quota_v1.c static void v1_mem2disk_dqblk(struct v1_disk_dqblk *d, struct mem_dqblk *m)
d                  46 fs/quota/quota_v1.c 	d->dqb_ihardlimit = m->dqb_ihardlimit;
d                  47 fs/quota/quota_v1.c 	d->dqb_isoftlimit = m->dqb_isoftlimit;
d                  48 fs/quota/quota_v1.c 	d->dqb_curinodes = m->dqb_curinodes;
d                  49 fs/quota/quota_v1.c 	d->dqb_bhardlimit = v1_stoqb(m->dqb_bhardlimit);
d                  50 fs/quota/quota_v1.c 	d->dqb_bsoftlimit = v1_stoqb(m->dqb_bsoftlimit);
d                  51 fs/quota/quota_v1.c 	d->dqb_curblocks = v1_stoqb(m->dqb_curspace);
d                  52 fs/quota/quota_v1.c 	d->dqb_itime = m->dqb_itime;
d                  53 fs/quota/quota_v1.c 	d->dqb_btime = m->dqb_btime;
d                 200 fs/quota/quota_v2.c 	struct v2r0_disk_dqblk *d = dp, empty;
d                 203 fs/quota/quota_v2.c 	m->dqb_ihardlimit = le32_to_cpu(d->dqb_ihardlimit);
d                 204 fs/quota/quota_v2.c 	m->dqb_isoftlimit = le32_to_cpu(d->dqb_isoftlimit);
d                 205 fs/quota/quota_v2.c 	m->dqb_curinodes = le32_to_cpu(d->dqb_curinodes);
d                 206 fs/quota/quota_v2.c 	m->dqb_itime = le64_to_cpu(d->dqb_itime);
d                 207 fs/quota/quota_v2.c 	m->dqb_bhardlimit = v2_qbtos(le32_to_cpu(d->dqb_bhardlimit));
d                 208 fs/quota/quota_v2.c 	m->dqb_bsoftlimit = v2_qbtos(le32_to_cpu(d->dqb_bsoftlimit));
d                 209 fs/quota/quota_v2.c 	m->dqb_curspace = le64_to_cpu(d->dqb_curspace);
d                 210 fs/quota/quota_v2.c 	m->dqb_btime = le64_to_cpu(d->dqb_btime);
d                 220 fs/quota/quota_v2.c 	struct v2r0_disk_dqblk *d = dp;
d                 225 fs/quota/quota_v2.c 	d->dqb_ihardlimit = cpu_to_le32(m->dqb_ihardlimit);
d                 226 fs/quota/quota_v2.c 	d->dqb_isoftlimit = cpu_to_le32(m->dqb_isoftlimit);
d                 227 fs/quota/quota_v2.c 	d->dqb_curinodes = cpu_to_le32(m->dqb_curinodes);
d                 228 fs/quota/quota_v2.c 	d->dqb_itime = cpu_to_le64(m->dqb_itime);
d                 229 fs/quota/quota_v2.c 	d->dqb_bhardlimit = cpu_to_le32(v2_stoqb(m->dqb_bhardlimit));
d                 230 fs/quota/quota_v2.c 	d->dqb_bsoftlimit = cpu_to_le32(v2_stoqb(m->dqb_bsoftlimit));
d                 231 fs/quota/quota_v2.c 	d->dqb_curspace = cpu_to_le64(m->dqb_curspace);
d                 232 fs/quota/quota_v2.c 	d->dqb_btime = cpu_to_le64(m->dqb_btime);
d                 233 fs/quota/quota_v2.c 	d->dqb_id = cpu_to_le32(from_kqid(&init_user_ns, dquot->dq_id));
d                 235 fs/quota/quota_v2.c 		d->dqb_itime = cpu_to_le64(1);
d                 240 fs/quota/quota_v2.c 	struct v2r0_disk_dqblk *d = dp;
d                 247 fs/quota/quota_v2.c 				le32_to_cpu(d->dqb_id)),
d                 253 fs/quota/quota_v2.c 	struct v2r1_disk_dqblk *d = dp, empty;
d                 256 fs/quota/quota_v2.c 	m->dqb_ihardlimit = le64_to_cpu(d->dqb_ihardlimit);
d                 257 fs/quota/quota_v2.c 	m->dqb_isoftlimit = le64_to_cpu(d->dqb_isoftlimit);
d                 258 fs/quota/quota_v2.c 	m->dqb_curinodes = le64_to_cpu(d->dqb_curinodes);
d                 259 fs/quota/quota_v2.c 	m->dqb_itime = le64_to_cpu(d->dqb_itime);
d                 260 fs/quota/quota_v2.c 	m->dqb_bhardlimit = v2_qbtos(le64_to_cpu(d->dqb_bhardlimit));
d                 261 fs/quota/quota_v2.c 	m->dqb_bsoftlimit = v2_qbtos(le64_to_cpu(d->dqb_bsoftlimit));
d                 262 fs/quota/quota_v2.c 	m->dqb_curspace = le64_to_cpu(d->dqb_curspace);
d                 263 fs/quota/quota_v2.c 	m->dqb_btime = le64_to_cpu(d->dqb_btime);
d                 273 fs/quota/quota_v2.c 	struct v2r1_disk_dqblk *d = dp;
d                 278 fs/quota/quota_v2.c 	d->dqb_ihardlimit = cpu_to_le64(m->dqb_ihardlimit);
d                 279 fs/quota/quota_v2.c 	d->dqb_isoftlimit = cpu_to_le64(m->dqb_isoftlimit);
d                 280 fs/quota/quota_v2.c 	d->dqb_curinodes = cpu_to_le64(m->dqb_curinodes);
d                 281 fs/quota/quota_v2.c 	d->dqb_itime = cpu_to_le64(m->dqb_itime);
d                 282 fs/quota/quota_v2.c 	d->dqb_bhardlimit = cpu_to_le64(v2_stoqb(m->dqb_bhardlimit));
d                 283 fs/quota/quota_v2.c 	d->dqb_bsoftlimit = cpu_to_le64(v2_stoqb(m->dqb_bsoftlimit));
d                 284 fs/quota/quota_v2.c 	d->dqb_curspace = cpu_to_le64(m->dqb_curspace);
d                 285 fs/quota/quota_v2.c 	d->dqb_btime = cpu_to_le64(m->dqb_btime);
d                 286 fs/quota/quota_v2.c 	d->dqb_id = cpu_to_le32(from_kqid(&init_user_ns, dquot->dq_id));
d                 288 fs/quota/quota_v2.c 		d->dqb_itime = cpu_to_le64(1);
d                 293 fs/quota/quota_v2.c 	struct v2r1_disk_dqblk *d = dp;
d                 300 fs/quota/quota_v2.c 				le32_to_cpu(d->dqb_id)),
d                  37 fs/reiserfs/hashes.c 			b1 += ((b0 << 4)+c) ^ (b0+sum) ^ ((b0 >> 5)+d);	\
d                  49 fs/reiserfs/hashes.c 	u32 a, b, c, d;
d                  66 fs/reiserfs/hashes.c 		d = (u32) msg[12] |
d                  85 fs/reiserfs/hashes.c 		d = pad;
d                  87 fs/reiserfs/hashes.c 			d <<= 8;
d                  88 fs/reiserfs/hashes.c 			d |= msg[i];
d                  96 fs/reiserfs/hashes.c 		c = d = pad;
d                 105 fs/reiserfs/hashes.c 		b = c = d = pad;
d                 111 fs/reiserfs/hashes.c 		a = b = c = d = pad;
d                2720 fs/reiserfs/reiserfs.h #define get_desc_trans_id(d)   le32_to_cpu((d)->j_trans_id)
d                2721 fs/reiserfs/reiserfs.h #define get_desc_trans_len(d)  le32_to_cpu((d)->j_len)
d                2722 fs/reiserfs/reiserfs.h #define get_desc_mount_id(d)   le32_to_cpu((d)->j_mount_id)
d                2724 fs/reiserfs/reiserfs.h #define set_desc_trans_id(d,val)       do { (d)->j_trans_id = cpu_to_le32 (val); } while (0)
d                2725 fs/reiserfs/reiserfs.h #define set_desc_trans_len(d,val)      do { (d)->j_len = cpu_to_le32 (val); } while (0)
d                2726 fs/reiserfs/reiserfs.h #define set_desc_mount_id(d,val)       do { (d)->j_mount_id = cpu_to_le32 (val); } while (0)
d                  26 fs/squashfs/xattr.c ssize_t squashfs_listxattr(struct dentry *d, char *buffer,
d                  29 fs/squashfs/xattr.c 	struct inode *inode = d_inode(d);
d                  57 fs/squashfs/xattr.c 		if (handler && (!handler->list || handler->list(d))) {
d                 225 fs/squashfs/xattr.c static bool squashfs_trusted_xattr_handler_list(struct dentry *d)
d                 208 fs/sysv/sysv.h static inline __fs32 fs32_add(struct sysv_sb_info *sbi, __fs32 *n, int d)
d                 211 fs/sysv/sysv.h 		*(__u32*)n = PDP_swab(PDP_swab(*(__u32*)n)+d);
d                 213 fs/sysv/sysv.h 		le32_add_cpu((__le32 *)n, d);
d                 215 fs/sysv/sysv.h 		be32_add_cpu((__be32 *)n, d);
d                 235 fs/sysv/sysv.h static inline __fs16 fs16_add(struct sysv_sb_info *sbi, __fs16 *n, int d)
d                 238 fs/sysv/sysv.h 		le16_add_cpu((__le16 *)n, d);
d                 240 fs/sysv/sysv.h 		be16_add_cpu((__be16 *)n, d);
d                 518 fs/ubifs/commit.c 	struct ubifs_debug_info *d = c->dbg;
d                 520 fs/ubifs/commit.c 	d->old_zroot = *zroot;
d                 521 fs/ubifs/commit.c 	lnum = d->old_zroot.lnum;
d                 522 fs/ubifs/commit.c 	offs = d->old_zroot.offs;
d                 523 fs/ubifs/commit.c 	len = d->old_zroot.len;
d                 533 fs/ubifs/commit.c 	d->old_zroot_level = le16_to_cpu(idx->level);
d                 534 fs/ubifs/commit.c 	d->old_zroot_sqnum = le64_to_cpu(idx->ch.sqnum);
d                 557 fs/ubifs/commit.c 	struct ubifs_debug_info *d = c->dbg;
d                 574 fs/ubifs/commit.c 	lnum = d->old_zroot.lnum;
d                 575 fs/ubifs/commit.c 	offs = d->old_zroot.offs;
d                 576 fs/ubifs/commit.c 	len = d->old_zroot.len;
d                 609 fs/ubifs/commit.c 			if (le16_to_cpu(idx->level) != d->old_zroot_level) {
d                 613 fs/ubifs/commit.c 			if (le64_to_cpu(idx->ch.sqnum) != d->old_zroot_sqnum) {
d                 970 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                 974 fs/ubifs/debug.c 	memcpy(&d->saved_lst, &c->lst, sizeof(struct ubifs_lp_stats));
d                 975 fs/ubifs/debug.c 	memcpy(&d->saved_bi, &c->bi, sizeof(struct ubifs_budg_info));
d                 976 fs/ubifs/debug.c 	d->saved_idx_gc_cnt = c->idx_gc_cnt;
d                1004 fs/ubifs/debug.c 	d->saved_free = ubifs_get_free_space_nolock(c);
d                1020 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                1032 fs/ubifs/debug.c 	if (free != d->saved_free) {
d                1034 fs/ubifs/debug.c 			  d->saved_free, free);
d                1042 fs/ubifs/debug.c 	ubifs_dump_lstats(&d->saved_lst);
d                1044 fs/ubifs/debug.c 	ubifs_dump_budg(c, &d->saved_bi);
d                1045 fs/ubifs/debug.c 	ubifs_msg(c, "saved idx_gc_cnt %d", d->saved_idx_gc_cnt);
d                2455 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                2459 fs/ubifs/debug.c 	if (!d->pc_cnt) {
d                2465 fs/ubifs/debug.c 				d->pc_delay = 1;
d                2468 fs/ubifs/debug.c 				d->pc_timeout = jiffies;
d                2469 fs/ubifs/debug.c 				d->pc_timeout += msecs_to_jiffies(delay);
d                2472 fs/ubifs/debug.c 				d->pc_delay = 2;
d                2475 fs/ubifs/debug.c 				d->pc_cnt_max = delay;
d                2480 fs/ubifs/debug.c 		d->pc_cnt += 1;
d                2484 fs/ubifs/debug.c 	if (d->pc_delay == 1 && time_before(jiffies, d->pc_timeout))
d                2486 fs/ubifs/debug.c 	if (d->pc_delay == 2 && d->pc_cnt++ < d->pc_cnt_max)
d                2541 fs/ubifs/debug.c 	d->pc_happened = 1;
d                2683 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                2686 fs/ubifs/debug.c 	if (dent == d->dfs_chk_gen)
d                2687 fs/ubifs/debug.c 		val = d->chk_gen;
d                2688 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_index)
d                2689 fs/ubifs/debug.c 		val = d->chk_index;
d                2690 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_orph)
d                2691 fs/ubifs/debug.c 		val = d->chk_orph;
d                2692 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_lprops)
d                2693 fs/ubifs/debug.c 		val = d->chk_lprops;
d                2694 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_fs)
d                2695 fs/ubifs/debug.c 		val = d->chk_fs;
d                2696 fs/ubifs/debug.c 	else if (dent == d->dfs_tst_rcvry)
d                2697 fs/ubifs/debug.c 		val = d->tst_rcvry;
d                2698 fs/ubifs/debug.c 	else if (dent == d->dfs_ro_error)
d                2736 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                2752 fs/ubifs/debug.c 	if (file->f_path.dentry == d->dfs_dump_lprops) {
d                2756 fs/ubifs/debug.c 	if (file->f_path.dentry == d->dfs_dump_budg) {
d                2760 fs/ubifs/debug.c 	if (file->f_path.dentry == d->dfs_dump_tnc) {
d                2771 fs/ubifs/debug.c 	if (dent == d->dfs_chk_gen)
d                2772 fs/ubifs/debug.c 		d->chk_gen = val;
d                2773 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_index)
d                2774 fs/ubifs/debug.c 		d->chk_index = val;
d                2775 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_orph)
d                2776 fs/ubifs/debug.c 		d->chk_orph = val;
d                2777 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_lprops)
d                2778 fs/ubifs/debug.c 		d->chk_lprops = val;
d                2779 fs/ubifs/debug.c 	else if (dent == d->dfs_chk_fs)
d                2780 fs/ubifs/debug.c 		d->chk_fs = val;
d                2781 fs/ubifs/debug.c 	else if (dent == d->dfs_tst_rcvry)
d                2782 fs/ubifs/debug.c 		d->tst_rcvry = val;
d                2783 fs/ubifs/debug.c 	else if (dent == d->dfs_ro_error)
d                2814 fs/ubifs/debug.c 	struct ubifs_debug_info *d = c->dbg;
d                2816 fs/ubifs/debug.c 	n = snprintf(d->dfs_dir_name, UBIFS_DFS_DIR_LEN + 1, UBIFS_DFS_DIR_NAME,
d                2823 fs/ubifs/debug.c 	fname = d->dfs_dir_name;
d                2824 fs/ubifs/debug.c 	d->dfs_dir = debugfs_create_dir(fname, dfs_rootdir);
d                2827 fs/ubifs/debug.c 	d->dfs_dump_lprops = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, c,
d                2831 fs/ubifs/debug.c 	d->dfs_dump_budg = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, c,
d                2835 fs/ubifs/debug.c 	d->dfs_dump_tnc = debugfs_create_file(fname, S_IWUSR, d->dfs_dir, c,
d                2839 fs/ubifs/debug.c 	d->dfs_chk_gen = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2840 fs/ubifs/debug.c 					     d->dfs_dir, c, &dfs_fops);
d                2843 fs/ubifs/debug.c 	d->dfs_chk_index = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2844 fs/ubifs/debug.c 					       d->dfs_dir, c, &dfs_fops);
d                2847 fs/ubifs/debug.c 	d->dfs_chk_orph = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2848 fs/ubifs/debug.c 					      d->dfs_dir, c, &dfs_fops);
d                2851 fs/ubifs/debug.c 	d->dfs_chk_lprops = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2852 fs/ubifs/debug.c 						d->dfs_dir, c, &dfs_fops);
d                2855 fs/ubifs/debug.c 	d->dfs_chk_fs = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2856 fs/ubifs/debug.c 					    d->dfs_dir, c, &dfs_fops);
d                2859 fs/ubifs/debug.c 	d->dfs_tst_rcvry = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2860 fs/ubifs/debug.c 					       d->dfs_dir, c, &dfs_fops);
d                2863 fs/ubifs/debug.c 	d->dfs_ro_error = debugfs_create_file(fname, S_IRUSR | S_IWUSR,
d                2864 fs/ubifs/debug.c 					      d->dfs_dir, c, &dfs_fops);
d                1746 fs/ubifs/lpt_commit.c 	struct ubifs_debug_info *d = c->dbg;
d                1755 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz = 0;
d                1756 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz2 = 0;
d                1757 fs/ubifs/lpt_commit.c 		d->chk_lpt_lebs = 0;
d                1758 fs/ubifs/lpt_commit.c 		d->chk_lpt_wastage = 0;
d                1771 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz += len;
d                1774 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz += len;
d                1775 fs/ubifs/lpt_commit.c 		d->chk_lpt_wastage += len;
d                1776 fs/ubifs/lpt_commit.c 		d->chk_lpt_lebs += 1;
d                1780 fs/ubifs/lpt_commit.c 		chk_lpt_sz *= d->chk_lpt_lebs;
d                1782 fs/ubifs/lpt_commit.c 		if (d->chk_lpt_sz != chk_lpt_sz) {
d                1784 fs/ubifs/lpt_commit.c 				  d->chk_lpt_sz, chk_lpt_sz);
d                1787 fs/ubifs/lpt_commit.c 		if (d->chk_lpt_sz > c->lpt_sz) {
d                1789 fs/ubifs/lpt_commit.c 				  d->chk_lpt_sz, c->lpt_sz);
d                1792 fs/ubifs/lpt_commit.c 		if (d->chk_lpt_sz2 && d->chk_lpt_sz != d->chk_lpt_sz2) {
d                1794 fs/ubifs/lpt_commit.c 				  d->chk_lpt_sz, d->chk_lpt_sz2);
d                1797 fs/ubifs/lpt_commit.c 		if (d->chk_lpt_sz2 && d->new_nhead_offs != len) {
d                1799 fs/ubifs/lpt_commit.c 				  d->new_nhead_offs, len);
d                1807 fs/ubifs/lpt_commit.c 		if (d->chk_lpt_sz - d->chk_lpt_wastage > lpt_sz) {
d                1809 fs/ubifs/lpt_commit.c 				  d->chk_lpt_sz, d->chk_lpt_wastage, lpt_sz);
d                1817 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz2 = d->chk_lpt_sz;
d                1818 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz = 0;
d                1819 fs/ubifs/lpt_commit.c 		d->chk_lpt_wastage = 0;
d                1820 fs/ubifs/lpt_commit.c 		d->chk_lpt_lebs = 0;
d                1821 fs/ubifs/lpt_commit.c 		d->new_nhead_offs = len;
d                1824 fs/ubifs/lpt_commit.c 		d->chk_lpt_sz += len;
d                1825 fs/ubifs/lpt_commit.c 		d->chk_lpt_wastage += len;
d                 336 fs/ubifs/tnc_commit.c 	int d;
d                 342 fs/ubifs/tnc_commit.c 	d = c->leb_size / c->max_idx_node_sz;
d                 343 fs/ubifs/tnc_commit.c 	return DIV_ROUND_UP(cnt, d);
d                  63 fs/ufs/swab.h  fs32_add(struct super_block *sbp, __fs32 *n, int d)
d                  66 fs/ufs/swab.h  		le32_add_cpu((__le32 *)n, d);
d                  68 fs/ufs/swab.h  		be32_add_cpu((__be32 *)n, d);
d                  72 fs/ufs/swab.h  fs32_sub(struct super_block *sbp, __fs32 *n, int d)
d                  75 fs/ufs/swab.h  		le32_add_cpu((__le32 *)n, -d);
d                  77 fs/ufs/swab.h  		be32_add_cpu((__be32 *)n, -d);
d                  99 fs/ufs/swab.h  fs16_add(struct super_block *sbp, __fs16 *n, int d)
d                 102 fs/ufs/swab.h  		le16_add_cpu((__le16 *)n, d);
d                 104 fs/ufs/swab.h  		be16_add_cpu((__be16 *)n, d);
d                 108 fs/ufs/swab.h  fs16_sub(struct super_block *sbp, __fs16 *n, int d)
d                 111 fs/ufs/swab.h  		le16_add_cpu((__le16 *)n, -d);
d                 113 fs/ufs/swab.h  		be16_add_cpu((__be16 *)n, -d);
d                  44 fs/verity/signature.c 	struct fsverity_signed_digest *d;
d                  61 fs/verity/signature.c 	d = kzalloc(sizeof(*d) + hash_alg->digest_size, GFP_KERNEL);
d                  62 fs/verity/signature.c 	if (!d)
d                  64 fs/verity/signature.c 	memcpy(d->magic, "FSVerity", 8);
d                  65 fs/verity/signature.c 	d->digest_algorithm = cpu_to_le16(hash_alg - fsverity_hash_algs);
d                  66 fs/verity/signature.c 	d->digest_size = cpu_to_le16(hash_alg->digest_size);
d                  67 fs/verity/signature.c 	memcpy(d->digest, vi->measurement, hash_alg->digest_size);
d                  69 fs/verity/signature.c 	err = verify_pkcs7_signature(d, sizeof(*d) + hash_alg->digest_size,
d                  74 fs/verity/signature.c 	kfree(d);
d                 414 fs/xattr.c     setxattr(struct dentry *d, const char __user *name, const void __user *value,
d                 444 fs/xattr.c     			error = cap_convert_nscap(d, &kvalue, size);
d                 451 fs/xattr.c     	error = vfs_setxattr(d, kname, kvalue, size, flags);
d                 517 fs/xattr.c     getxattr(struct dentry *d, const char __user *name, void __user *value,
d                 538 fs/xattr.c     	error = vfs_getxattr(d, kname, kvalue, size);
d                 605 fs/xattr.c     listxattr(struct dentry *d, char __user *list, size_t size)
d                 618 fs/xattr.c     	error = vfs_listxattr(d, klist, size);
d                 680 fs/xattr.c     removexattr(struct dentry *d, const char __user *name)
d                 691 fs/xattr.c     	return vfs_removexattr(d, kname);
d                 693 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;		/* real disk block address */
d                 696 fs/xfs/libxfs/xfs_btree.c 	d = XFS_FSB_TO_DADDR(mp, fsbno);
d                 697 fs/xfs/libxfs/xfs_btree.c 	return xfs_trans_get_buf(tp, mp->m_ddev_targp, d, mp->m_bsize, 0);
d                 711 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;		/* real disk block address */
d                 715 fs/xfs/libxfs/xfs_btree.c 	d = XFS_AGB_TO_DADDR(mp, agno, agbno);
d                 716 fs/xfs/libxfs/xfs_btree.c 	return xfs_trans_get_buf(tp, mp->m_ddev_targp, d, mp->m_bsize, 0);
d                 848 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;		/* real disk block address */
d                 853 fs/xfs/libxfs/xfs_btree.c 	d = XFS_FSB_TO_DADDR(mp, fsbno);
d                 854 fs/xfs/libxfs/xfs_btree.c 	error = xfs_trans_read_buf(mp, tp, mp->m_ddev_targp, d,
d                 876 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;
d                 879 fs/xfs/libxfs/xfs_btree.c 	d = XFS_FSB_TO_DADDR(mp, fsbno);
d                 880 fs/xfs/libxfs/xfs_btree.c 	xfs_buf_readahead(mp->m_ddev_targp, d, mp->m_bsize * count, ops);
d                 896 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;
d                 900 fs/xfs/libxfs/xfs_btree.c 	d = XFS_AGB_TO_DADDR(mp, agno, agbno);
d                 901 fs/xfs/libxfs/xfs_btree.c 	xfs_buf_readahead(mp->m_ddev_targp, d, mp->m_bsize * count, ops);
d                1288 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;
d                1291 fs/xfs/libxfs/xfs_btree.c 	error = xfs_btree_ptr_to_daddr(cur, ptr, &d);
d                1294 fs/xfs/libxfs/xfs_btree.c 	*bpp = xfs_trans_get_buf(cur->bc_tp, mp->m_ddev_targp, d,
d                1318 fs/xfs/libxfs/xfs_btree.c 	xfs_daddr_t		d;
d                1324 fs/xfs/libxfs/xfs_btree.c 	error = xfs_btree_ptr_to_daddr(cur, ptr, &d);
d                1327 fs/xfs/libxfs/xfs_btree.c 	error = xfs_trans_read_buf(mp, cur->bc_tp, mp->m_ddev_targp, d,
d                 142 fs/xfs/libxfs/xfs_dquot_buf.c 	struct xfs_dqblk	*d = (struct xfs_dqblk *)bp->b_addr;
d                 159 fs/xfs/libxfs/xfs_dquot_buf.c 	for (i = 0; i < ndquots; i++, d++) {
d                 160 fs/xfs/libxfs/xfs_dquot_buf.c 		if (!xfs_verify_cksum((char *)d, sizeof(struct xfs_dqblk),
d                 164 fs/xfs/libxfs/xfs_dquot_buf.c 					d, sizeof(*d), __this_address);
d                 565 fs/xfs/libxfs/xfs_format.h #define	XFS_HDR_BLOCK(mp,d)	((xfs_agblock_t)XFS_BB_TO_FSBT(mp,d))
d                 566 fs/xfs/libxfs/xfs_format.h #define	XFS_DADDR_TO_FSB(mp,d)	XFS_AGB_TO_FSB(mp, \
d                 567 fs/xfs/libxfs/xfs_format.h 			xfs_daddr_to_agno(mp,d), xfs_daddr_to_agbno(mp,d))
d                 813 fs/xfs/libxfs/xfs_format.h #define	XFS_AG_DADDR(mp,agno,d)		(XFS_AGB_TO_DADDR(mp, agno, 0) + (d))
d                 819 fs/xfs/libxfs/xfs_format.h #define	XFS_AG_CHECK_DADDR(mp,d,len)	\
d                 821 fs/xfs/libxfs/xfs_format.h 	    ASSERT((d) == XFS_SB_DADDR || \
d                 822 fs/xfs/libxfs/xfs_format.h 		   xfs_daddr_to_agbno(mp, d) != XFS_SB_DADDR) : \
d                 823 fs/xfs/libxfs/xfs_format.h 	    ASSERT(xfs_daddr_to_agno(mp, d) == \
d                 824 fs/xfs/libxfs/xfs_format.h 		   xfs_daddr_to_agno(mp, (d) + (len) - 1)))
d                 277 fs/xfs/libxfs/xfs_ialloc.c 	xfs_daddr_t		d;
d                 328 fs/xfs/libxfs/xfs_ialloc.c 		d = XFS_AGB_TO_DADDR(mp, agno, agbno +
d                 330 fs/xfs/libxfs/xfs_ialloc.c 		fbuf = xfs_trans_get_buf(tp, mp->m_ddev_targp, d,
d                 233 fs/xfs/libxfs/xfs_rmap_btree.c 	int64_t			d;
d                 235 fs/xfs/libxfs/xfs_rmap_btree.c 	d = (int64_t)be32_to_cpu(kp->rm_startblock) - rec->rm_startblock;
d                 236 fs/xfs/libxfs/xfs_rmap_btree.c 	if (d)
d                 237 fs/xfs/libxfs/xfs_rmap_btree.c 		return d;
d                 263 fs/xfs/libxfs/xfs_rmap_btree.c 	int64_t			d;
d                 266 fs/xfs/libxfs/xfs_rmap_btree.c 	d = (int64_t)be32_to_cpu(kp1->rm_startblock) -
d                 268 fs/xfs/libxfs/xfs_rmap_btree.c 	if (d)
d                 269 fs/xfs/libxfs/xfs_rmap_btree.c 		return d;
d                  82 fs/xfs/scrub/quota.c 	struct xfs_disk_dquot	*d = &dq->q_core;
d                  95 fs/xfs/scrub/quota.c 	xfs_dqid_t		id = be32_to_cpu(d->d_id);
d                 108 fs/xfs/scrub/quota.c 	if (dqtype != (d->d_flags & XFS_DQ_ALLTYPES))
d                 111 fs/xfs/scrub/quota.c 	if (d->d_pad0 != cpu_to_be32(0) || d->d_pad != cpu_to_be16(0))
d                 115 fs/xfs/scrub/quota.c 	bhard = be64_to_cpu(d->d_blk_hardlimit);
d                 116 fs/xfs/scrub/quota.c 	ihard = be64_to_cpu(d->d_ino_hardlimit);
d                 117 fs/xfs/scrub/quota.c 	rhard = be64_to_cpu(d->d_rtb_hardlimit);
d                 119 fs/xfs/scrub/quota.c 	bsoft = be64_to_cpu(d->d_blk_softlimit);
d                 120 fs/xfs/scrub/quota.c 	isoft = be64_to_cpu(d->d_ino_softlimit);
d                 121 fs/xfs/scrub/quota.c 	rsoft = be64_to_cpu(d->d_rtb_softlimit);
d                 147 fs/xfs/scrub/quota.c 	bcount = be64_to_cpu(d->d_bcount);
d                 148 fs/xfs/scrub/quota.c 	icount = be64_to_cpu(d->d_icount);
d                 149 fs/xfs/scrub/quota.c 	rcount = be64_to_cpu(d->d_rtbcount);
d                  73 fs/xfs/xfs_dquot.c 	struct xfs_disk_dquot	*d = &dq->q_core;
d                  77 fs/xfs/xfs_dquot.c 	ASSERT(d->d_id);
d                  80 fs/xfs/xfs_dquot.c 	if (defq->bsoftlimit && !d->d_blk_softlimit) {
d                  81 fs/xfs/xfs_dquot.c 		d->d_blk_softlimit = cpu_to_be64(defq->bsoftlimit);
d                  84 fs/xfs/xfs_dquot.c 	if (defq->bhardlimit && !d->d_blk_hardlimit) {
d                  85 fs/xfs/xfs_dquot.c 		d->d_blk_hardlimit = cpu_to_be64(defq->bhardlimit);
d                  88 fs/xfs/xfs_dquot.c 	if (defq->isoftlimit && !d->d_ino_softlimit)
d                  89 fs/xfs/xfs_dquot.c 		d->d_ino_softlimit = cpu_to_be64(defq->isoftlimit);
d                  90 fs/xfs/xfs_dquot.c 	if (defq->ihardlimit && !d->d_ino_hardlimit)
d                  91 fs/xfs/xfs_dquot.c 		d->d_ino_hardlimit = cpu_to_be64(defq->ihardlimit);
d                  92 fs/xfs/xfs_dquot.c 	if (defq->rtbsoftlimit && !d->d_rtb_softlimit)
d                  93 fs/xfs/xfs_dquot.c 		d->d_rtb_softlimit = cpu_to_be64(defq->rtbsoftlimit);
d                  94 fs/xfs/xfs_dquot.c 	if (defq->rtbhardlimit && !d->d_rtb_hardlimit)
d                  95 fs/xfs/xfs_dquot.c 		d->d_rtb_hardlimit = cpu_to_be64(defq->rtbhardlimit);
d                 117 fs/xfs/xfs_dquot.c 	xfs_disk_dquot_t	*d)
d                 119 fs/xfs/xfs_dquot.c 	ASSERT(d->d_id);
d                 122 fs/xfs/xfs_dquot.c 	if (d->d_blk_hardlimit)
d                 123 fs/xfs/xfs_dquot.c 		ASSERT(be64_to_cpu(d->d_blk_softlimit) <=
d                 124 fs/xfs/xfs_dquot.c 		       be64_to_cpu(d->d_blk_hardlimit));
d                 125 fs/xfs/xfs_dquot.c 	if (d->d_ino_hardlimit)
d                 126 fs/xfs/xfs_dquot.c 		ASSERT(be64_to_cpu(d->d_ino_softlimit) <=
d                 127 fs/xfs/xfs_dquot.c 		       be64_to_cpu(d->d_ino_hardlimit));
d                 128 fs/xfs/xfs_dquot.c 	if (d->d_rtb_hardlimit)
d                 129 fs/xfs/xfs_dquot.c 		ASSERT(be64_to_cpu(d->d_rtb_softlimit) <=
d                 130 fs/xfs/xfs_dquot.c 		       be64_to_cpu(d->d_rtb_hardlimit));
d                 133 fs/xfs/xfs_dquot.c 	if (!d->d_btimer) {
d                 134 fs/xfs/xfs_dquot.c 		if ((d->d_blk_softlimit &&
d                 135 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_bcount) >
d                 136 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_blk_softlimit))) ||
d                 137 fs/xfs/xfs_dquot.c 		    (d->d_blk_hardlimit &&
d                 138 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_bcount) >
d                 139 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_blk_hardlimit)))) {
d                 140 fs/xfs/xfs_dquot.c 			d->d_btimer = cpu_to_be32(get_seconds() +
d                 143 fs/xfs/xfs_dquot.c 			d->d_bwarns = 0;
d                 146 fs/xfs/xfs_dquot.c 		if ((!d->d_blk_softlimit ||
d                 147 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_bcount) <=
d                 148 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_blk_softlimit))) &&
d                 149 fs/xfs/xfs_dquot.c 		    (!d->d_blk_hardlimit ||
d                 150 fs/xfs/xfs_dquot.c 		    (be64_to_cpu(d->d_bcount) <=
d                 151 fs/xfs/xfs_dquot.c 		     be64_to_cpu(d->d_blk_hardlimit)))) {
d                 152 fs/xfs/xfs_dquot.c 			d->d_btimer = 0;
d                 156 fs/xfs/xfs_dquot.c 	if (!d->d_itimer) {
d                 157 fs/xfs/xfs_dquot.c 		if ((d->d_ino_softlimit &&
d                 158 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_icount) >
d                 159 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_ino_softlimit))) ||
d                 160 fs/xfs/xfs_dquot.c 		    (d->d_ino_hardlimit &&
d                 161 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_icount) >
d                 162 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_ino_hardlimit)))) {
d                 163 fs/xfs/xfs_dquot.c 			d->d_itimer = cpu_to_be32(get_seconds() +
d                 166 fs/xfs/xfs_dquot.c 			d->d_iwarns = 0;
d                 169 fs/xfs/xfs_dquot.c 		if ((!d->d_ino_softlimit ||
d                 170 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_icount) <=
d                 171 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_ino_softlimit)))  &&
d                 172 fs/xfs/xfs_dquot.c 		    (!d->d_ino_hardlimit ||
d                 173 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_icount) <=
d                 174 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_ino_hardlimit)))) {
d                 175 fs/xfs/xfs_dquot.c 			d->d_itimer = 0;
d                 179 fs/xfs/xfs_dquot.c 	if (!d->d_rtbtimer) {
d                 180 fs/xfs/xfs_dquot.c 		if ((d->d_rtb_softlimit &&
d                 181 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_rtbcount) >
d                 182 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_rtb_softlimit))) ||
d                 183 fs/xfs/xfs_dquot.c 		    (d->d_rtb_hardlimit &&
d                 184 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_rtbcount) >
d                 185 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_rtb_hardlimit)))) {
d                 186 fs/xfs/xfs_dquot.c 			d->d_rtbtimer = cpu_to_be32(get_seconds() +
d                 189 fs/xfs/xfs_dquot.c 			d->d_rtbwarns = 0;
d                 192 fs/xfs/xfs_dquot.c 		if ((!d->d_rtb_softlimit ||
d                 193 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_rtbcount) <=
d                 194 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_rtb_softlimit))) &&
d                 195 fs/xfs/xfs_dquot.c 		    (!d->d_rtb_hardlimit ||
d                 196 fs/xfs/xfs_dquot.c 		     (be64_to_cpu(d->d_rtbcount) <=
d                 197 fs/xfs/xfs_dquot.c 		      be64_to_cpu(d->d_rtb_hardlimit)))) {
d                 198 fs/xfs/xfs_dquot.c 			d->d_rtbtimer = 0;
d                 215 fs/xfs/xfs_dquot.c 	xfs_dqblk_t	*d;
d                 222 fs/xfs/xfs_dquot.c 	d = bp->b_addr;
d                 228 fs/xfs/xfs_dquot.c 	memset(d, 0, BBTOB(q->qi_dqchunklen));
d                 229 fs/xfs/xfs_dquot.c 	for (i = 0; i < q->qi_dqperchunk; i++, d++, curid++) {
d                 230 fs/xfs/xfs_dquot.c 		d->dd_diskdq.d_magic = cpu_to_be16(XFS_DQUOT_MAGIC);
d                 231 fs/xfs/xfs_dquot.c 		d->dd_diskdq.d_version = XFS_DQUOT_VERSION;
d                 232 fs/xfs/xfs_dquot.c 		d->dd_diskdq.d_id = cpu_to_be32(curid);
d                 233 fs/xfs/xfs_dquot.c 		d->dd_diskdq.d_flags = type;
d                 235 fs/xfs/xfs_dquot.c 			uuid_copy(&d->dd_uuid, &mp->m_sb.sb_meta_uuid);
d                 236 fs/xfs/xfs_dquot.c 			xfs_update_cksum((char *)d, sizeof(struct xfs_dqblk),
d                 492 fs/xfs/xfs_mount.c 	xfs_daddr_t	d;
d                 495 fs/xfs/xfs_mount.c 	d = (xfs_daddr_t)XFS_FSB_TO_BB(mp, mp->m_sb.sb_dblocks);
d                 496 fs/xfs/xfs_mount.c 	if (XFS_BB_TO_FSB(mp, d) != mp->m_sb.sb_dblocks) {
d                 501 fs/xfs/xfs_mount.c 					d - XFS_FSS_TO_BB(mp, 1),
d                 512 fs/xfs/xfs_mount.c 	d = (xfs_daddr_t)XFS_FSB_TO_BB(mp, mp->m_sb.sb_logblocks);
d                 513 fs/xfs/xfs_mount.c 	if (XFS_BB_TO_FSB(mp, d) != mp->m_sb.sb_logblocks) {
d                 518 fs/xfs/xfs_mount.c 					d - XFS_FSB_TO_BB(mp, 1),
d                 315 fs/xfs/xfs_mount.h xfs_daddr_to_agno(struct xfs_mount *mp, xfs_daddr_t d)
d                 317 fs/xfs/xfs_mount.h 	xfs_rfsblock_t ld = XFS_BB_TO_FSBT(mp, d);
d                 323 fs/xfs/xfs_mount.h xfs_daddr_to_agbno(struct xfs_mount *mp, xfs_daddr_t d)
d                 325 fs/xfs/xfs_mount.h 	xfs_rfsblock_t ld = XFS_BB_TO_FSBT(mp, d);
d                 145 fs/xfs/xfs_quota.h #define xfs_qm_dqrele(d)
d                 764 fs/xfs/xfs_rtalloc.c 	xfs_daddr_t		d;		/* disk block address */
d                 829 fs/xfs/xfs_rtalloc.c 			d = XFS_FSB_TO_DADDR(mp, fsbno);
d                 830 fs/xfs/xfs_rtalloc.c 			bp = xfs_trans_get_buf(tp, mp->m_ddev_targp, d,
d                1188 fs/xfs/xfs_rtalloc.c 	xfs_daddr_t		d;	/* address of last block of subvolume */
d                1208 fs/xfs/xfs_rtalloc.c 	d = (xfs_daddr_t)XFS_FSB_TO_BB(mp, mp->m_sb.sb_rblocks);
d                1209 fs/xfs/xfs_rtalloc.c 	if (XFS_BB_TO_FSB(mp, d) != mp->m_sb.sb_rblocks) {
d                1211 fs/xfs/xfs_rtalloc.c 			(unsigned long long) XFS_BB_TO_FSB(mp, d),
d                1216 fs/xfs/xfs_rtalloc.c 					d - XFS_FSB_TO_BB(mp, 1),
d                  33 fs/xfs/xfs_symlink.c 	xfs_daddr_t		d;
d                  52 fs/xfs/xfs_symlink.c 		d = XFS_FSB_TO_DADDR(mp, mval[n].br_startblock);
d                  55 fs/xfs/xfs_symlink.c 		bp = xfs_buf_read(mp->m_ddev_targp, d, BTOBB(byte_cnt), 0,
d                 162 fs/xfs/xfs_symlink.c 	xfs_daddr_t		d;
d                 290 fs/xfs/xfs_symlink.c 			d = XFS_FSB_TO_DADDR(mp, mval[n].br_startblock);
d                 292 fs/xfs/xfs_symlink.c 			bp = xfs_trans_get_buf(tp, mp->m_ddev_targp, d,
d                 153 fs/xfs/xfs_trans.h #define	xfs_trans_agblocks_delta(tp, d)	((tp)->t_ag_freeblks_delta += (int64_t)d)
d                 154 fs/xfs/xfs_trans.h #define	xfs_trans_agflist_delta(tp, d)	((tp)->t_ag_flist_delta += (int64_t)d)
d                 155 fs/xfs/xfs_trans.h #define	xfs_trans_agbtree_delta(tp, d)	((tp)->t_ag_btree_delta += (int64_t)d)
d                 157 fs/xfs/xfs_trans.h #define	xfs_trans_agblocks_delta(tp, d)
d                 158 fs/xfs/xfs_trans.h #define	xfs_trans_agflist_delta(tp, d)
d                 159 fs/xfs/xfs_trans.h #define	xfs_trans_agbtree_delta(tp, d)
d                 312 fs/xfs/xfs_trans_dquot.c 	struct xfs_disk_dquot	*d;
d                 344 fs/xfs/xfs_trans_dquot.c 			d = &dqp->q_core;
d                 365 fs/xfs/xfs_trans_dquot.c 				ASSERT(be64_to_cpu(d->d_bcount) >=
d                 369 fs/xfs/xfs_trans_dquot.c 				ASSERT(be64_to_cpu(d->d_rtbcount) >=
d                 373 fs/xfs/xfs_trans_dquot.c 				ASSERT(be64_to_cpu(d->d_icount) >=
d                 377 fs/xfs/xfs_trans_dquot.c 				be64_add_cpu(&d->d_bcount, (xfs_qcnt_t)totalbdelta);
d                 380 fs/xfs/xfs_trans_dquot.c 				be64_add_cpu(&d->d_icount, (xfs_qcnt_t)qtrx->qt_icount_delta);
d                 383 fs/xfs/xfs_trans_dquot.c 				be64_add_cpu(&d->d_rtbcount, (xfs_qcnt_t)totalrtbdelta);
d                 389 fs/xfs/xfs_trans_dquot.c 			if (d->d_id) {
d                 391 fs/xfs/xfs_trans_dquot.c 				xfs_qm_adjust_dqtimers(tp->t_mountp, d);
d                 426 include/acpi/acoutput.h #define ACPI_DUMP_PATHNAME(a, b, c, d)  acpi_ns_dump_pathname(a, b, c, d)
d                 429 include/acpi/acoutput.h #define ACPI_TRACE_POINT(a, b, c, d)    acpi_trace_point (a, b, c, d)
d                 449 include/acpi/acoutput.h #define ACPI_DUMP_PATHNAME(a, b, c, d)
d                 452 include/acpi/acoutput.h #define ACPI_TRACE_POINT(a, b, c, d)
d                 212 include/acpi/acpi_bus.h #define acpi_device_dir(d)	((d)->dir.entry)
d                 244 include/acpi/acpi_bus.h #define acpi_device_bid(d)	((d)->pnp.bus_id)
d                 245 include/acpi/acpi_bus.h #define acpi_device_adr(d)	((d)->pnp.bus_address)
d                 247 include/acpi/acpi_bus.h #define acpi_device_uid(d)	((d)->pnp.unique_id)
d                 248 include/acpi/acpi_bus.h #define acpi_device_name(d)	((d)->pnp.device_name)
d                 249 include/acpi/acpi_bus.h #define acpi_device_class(d)	((d)->pnp.device_class)
d                 442 include/acpi/acpi_bus.h static inline void *acpi_driver_data(struct acpi_device *d)
d                 444 include/acpi/acpi_bus.h 	return d->driver_data;
d                 447 include/acpi/acpi_bus.h #define to_acpi_device(d)	container_of(d, struct acpi_device, dev)
d                 448 include/acpi/acpi_bus.h #define to_acpi_driver(d)	container_of(d, struct acpi_driver, drv)
d                 636 include/acpi/acpi_bus.h static inline int acpi_pm_device_sleep_state(struct device *d, int *p, int m)
d                 511 include/acpi/actypes.h #define ACPI_OFFSET(d, f)               ACPI_PTR_DIFF (&(((d *) 0)->f), (void *) 0)
d                  23 include/acpi/platform/acgcc.h #define va_copy(d, s)           __builtin_va_copy(d, s)
d                 205 include/crypto/algapi.h 		unsigned long *d = (unsigned long *)dst;
d                 209 include/crypto/algapi.h 			*d++ ^= *s++;
d                 223 include/crypto/algapi.h 		unsigned long *d = (unsigned long *)dst;
d                 228 include/crypto/algapi.h 			*d++ = *s1++ ^ *s2++;
d                  34 include/crypto/internal/rsa.h 	const u8 *d;
d                 159 include/drm/drm_legacy.h int drm_legacy_addmap(struct drm_device *d, resource_size_t offset,
d                 163 include/drm/drm_legacy.h void drm_legacy_rmmap(struct drm_device *d, struct drm_local_map *map);
d                 164 include/drm/drm_legacy.h int drm_legacy_rmmap_locked(struct drm_device *d, struct drm_local_map *map);
d                 168 include/drm/drm_legacy.h int drm_legacy_addbufs_agp(struct drm_device *d, struct drm_buf_desc *req);
d                 169 include/drm/drm_legacy.h int drm_legacy_addbufs_pci(struct drm_device *d, struct drm_buf_desc *req);
d                 178 include/drm/drm_mipi_dbi.h 	u8 d[] = { seq }; \
d                 179 include/drm/drm_mipi_dbi.h 	mipi_dbi_command_stackbuf(dbi, cmd, d, ARRAY_SIZE(d)); \
d                  14 include/drm/drm_os_linux.h #define DRM_UDELAY(d)			udelay(d)
d                 102 include/linux/amba/bus.h #define to_amba_device(d)	container_of(d, struct amba_device, dev)
d                 104 include/linux/amba/bus.h #define amba_get_drvdata(d)	dev_get_drvdata(&d->dev)
d                 105 include/linux/amba/bus.h #define amba_set_drvdata(d,p)	dev_set_drvdata(&d->dev, p)
d                 162 include/linux/amba/bus.h #define amba_config(d)	AMBA_CONFIG_BITS((d)->periphid)
d                 163 include/linux/amba/bus.h #define amba_rev(d)	AMBA_REV_BITS((d)->periphid)
d                 164 include/linux/amba/bus.h #define amba_manf(d)	AMBA_MANF_BITS((d)->periphid)
d                 165 include/linux/amba/bus.h #define amba_part(d)	AMBA_PART_BITS((d)->periphid)
d                1649 include/linux/blkdev.h static inline void blk_integrity_register(struct gendisk *d,
d                1653 include/linux/blkdev.h static inline void blk_integrity_unregister(struct gendisk *d)
d                 192 include/linux/can/dev/peak_canfd.h 	u8	d[0];
d                 258 include/linux/can/dev/peak_canfd.h #define PUCAN_MSG_CHANNEL_DLC(c, d)	(((c) & 0xf) | ((d) << 4))
d                 269 include/linux/can/dev/peak_canfd.h 	u8	d[0];
d                 109 include/linux/cb710.h #define cb710_dump_regs(c, d) do {} while (0)
d                  30 include/linux/ceph/pagelist.h extern int ceph_pagelist_append(struct ceph_pagelist *pl, const void *d, size_t l);
d                  26 include/linux/clkdev.h #define CLKDEV_INIT(d, n, c)	\
d                  28 include/linux/clkdev.h 		.dev_id = d,	\
d                 148 include/linux/console_struct.h 	struct vc_data *d;
d                 193 include/linux/coresight.h #define to_coresight_device(d) container_of(d, struct coresight_device, dev)
d                  20 include/linux/crush/hash.h extern __u32 crush_hash32_4(int type, __u32 a, __u32 b, __u32 c, __u32 d);
d                  21 include/linux/crush/hash.h extern __u32 crush_hash32_5(int type, __u32 a, __u32 b, __u32 c, __u32 d,
d                 129 include/linux/delayacct.h static inline int delayacct_add_tsk(struct taskstats *d,
d                 134 include/linux/delayacct.h 	return __delayacct_add_tsk(d, tsk);
d                 183 include/linux/delayacct.h static inline int delayacct_add_tsk(struct taskstats *d,
d                 162 include/linux/device-mapper.h void dm_put_device(struct dm_target *ti, struct dm_dev *d);
d                1379 include/linux/device.h static inline void dev_set_msi_domain(struct device *dev, struct irq_domain *d)
d                1382 include/linux/device.h 	dev->msi_domain = d;
d                 251 include/linux/dio.h static inline struct dio_driver *dio_dev_driver(const struct dio_dev *d)
d                 253 include/linux/dio.h     return d->driver;
d                 256 include/linux/dio.h #define dio_resource_start(d) ((d)->resource.start)
d                 257 include/linux/dio.h #define dio_resource_end(d)   ((d)->resource.end)
d                 258 include/linux/dio.h #define dio_resource_len(d)   (resource_size(&(d)->resource))
d                 259 include/linux/dio.h #define dio_resource_flags(d) ((d)->resource.flags)
d                 261 include/linux/dio.h #define dio_request_device(d, name) \
d                 262 include/linux/dio.h     request_mem_region(dio_resource_start(d), dio_resource_len(d), name)
d                 263 include/linux/dio.h #define dio_release_device(d) \
d                 264 include/linux/dio.h     release_mem_region(dio_resource_start(d), dio_resource_len(d))
d                 270 include/linux/dio.h static inline void *dio_get_drvdata (struct dio_dev *d)
d                 272 include/linux/dio.h 	return dev_get_drvdata(&d->dev);
d                 275 include/linux/dio.h static inline void dio_set_drvdata (struct dio_dev *d, void *data)
d                 277 include/linux/dio.h 	dev_set_drvdata(&d->dev, data);
d                 615 include/linux/dma-mapping.h #define dma_map_single(d, a, s, r) dma_map_single_attrs(d, a, s, r, 0)
d                 616 include/linux/dma-mapping.h #define dma_unmap_single(d, a, s, r) dma_unmap_single_attrs(d, a, s, r, 0)
d                 617 include/linux/dma-mapping.h #define dma_map_sg(d, s, n, r) dma_map_sg_attrs(d, s, n, r, 0)
d                 618 include/linux/dma-mapping.h #define dma_unmap_sg(d, s, n, r) dma_unmap_sg_attrs(d, s, n, r, 0)
d                 619 include/linux/dma-mapping.h #define dma_map_page(d, p, o, s, r) dma_map_page_attrs(d, p, o, s, r, 0)
d                 620 include/linux/dma-mapping.h #define dma_unmap_page(d, a, s, r) dma_unmap_page_attrs(d, a, s, r, 0)
d                 621 include/linux/dma-mapping.h #define dma_get_sgtable(d, t, v, h, s) dma_get_sgtable_attrs(d, t, v, h, s, 0)
d                 622 include/linux/dma-mapping.h #define dma_mmap_coherent(d, v, c, h, s) dma_mmap_attrs(d, v, c, h, s, 0)
d                  45 include/linux/fpga/fpga-bridge.h #define to_fpga_bridge(d) container_of(d, struct fpga_bridge, dev)
d                 177 include/linux/fpga/fpga-mgr.h #define to_fpga_manager(d) container_of(d, struct fpga_manager, dev)
d                  32 include/linux/fpga/fpga-region.h #define to_fpga_region(d) container_of(d, struct fpga_region, dev)
d                  49 include/linux/gameport.h #define to_gameport_port(d)	container_of(d, struct gameport, dev)
d                  62 include/linux/gameport.h #define to_gameport_driver(d)	container_of(d, struct gameport_driver, driver)
d                 571 include/linux/gpio/driver.h int gpiochip_irq_map(struct irq_domain *d, unsigned int irq,
d                 573 include/linux/gpio/driver.h void gpiochip_irq_unmap(struct irq_domain *d, unsigned int irq);
d                  67 include/linux/greybus.h #define to_greybus_driver(d) container_of(d, struct greybus_driver, driver)
d                  37 include/linux/greybus/bundle.h #define to_gb_bundle(d) container_of(d, struct gb_bundle, dev)
d                  30 include/linux/greybus/control.h #define to_gb_control(d) container_of(d, struct gb_control, dev)
d                  63 include/linux/greybus/hd.h #define to_gb_host_device(d) container_of(d, struct gb_host_device, dev)
d                  69 include/linux/greybus/interface.h #define to_gb_interface(d) container_of(d, struct gb_interface, dev)
d                  28 include/linux/greybus/module.h #define to_gb_module(d) container_of(d, struct gb_module, dev)
d                  60 include/linux/greybus/svc.h #define to_gb_svc(d) container_of(d, struct gb_svc, dev)
d                  41 include/linux/hid-debug.h #define hid_dump_report(a,b,c,d)	do { } while (0)
d                 286 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 295 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 301 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 325 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 336 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 346 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                 354 include/linux/hyperv.h 	struct vmpacket_descriptor d;
d                1186 include/linux/hyperv.h static inline struct hv_device *device_to_hv_device(struct device *d)
d                1188 include/linux/hyperv.h 	return container_of(d, struct hv_device, device);
d                1191 include/linux/hyperv.h static inline struct hv_driver *drv_to_hv_drv(struct device_driver *d)
d                1193 include/linux/hyperv.h 	return container_of(d, struct hv_driver, driver);
d                 293 include/linux/i2c.h #define to_i2c_driver(d) container_of(d, struct i2c_driver, driver)
d                 337 include/linux/i2c.h #define to_i2c_client(d) container_of(d, struct i2c_client, dev)
d                 715 include/linux/i2c.h #define to_i2c_adapter(d) container_of(d, struct i2c_adapter, dev)
d                1230 include/linux/ide.h #define ide_pci_register_driver(d) __ide_pci_register_driver(d, THIS_MODULE, KBUILD_MODNAME)
d                1232 include/linux/ide.h #define ide_pci_register_driver(d) pci_register_driver(d)
d                1253 include/linux/ide.h 				     const struct ide_port_info *d)
d                  58 include/linux/iio/sw_device.h void iio_swd_group_init_type_name(struct iio_sw_device *d,
d                  63 include/linux/iio/sw_device.h 	config_group_init_type_name(&d->group, name, type);
d                  80 include/linux/iio/trigger.h static inline struct iio_trigger *to_iio_trigger(struct device *d)
d                  82 include/linux/iio/trigger.h 	return container_of(d, struct iio_trigger, dev);
d                 205 include/linux/input.h #define to_input_dev(d) container_of(d, struct input_dev, dev)
d                 258 include/linux/intel-iommu.h #define DMA_CCMD_DID(d) ((u64)((d) & 0xffff))
d                 274 include/linux/intel-iommu.h #define dma_frcd_type(d) ((d >> 30) & 1)
d                 280 include/linux/intel-iommu.h #define dma_frcd_page_addr(d) (d & (((u64)-1) << PAGE_SHIFT))
d                 322 include/linux/intel-iommu.h #define QI_IWD_STATUS_DATA(d)	(((u64)d) << 32)
d                 239 include/linux/irq.h #define __irqd_to_state(d) ACCESS_PRIVATE((d)->common, state_use_accessors)
d                 241 include/linux/irq.h static inline bool irqd_is_setaffinity_pending(struct irq_data *d)
d                 243 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_SETAFFINITY_PENDING;
d                 246 include/linux/irq.h static inline bool irqd_is_per_cpu(struct irq_data *d)
d                 248 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_PER_CPU;
d                 251 include/linux/irq.h static inline bool irqd_can_balance(struct irq_data *d)
d                 253 include/linux/irq.h 	return !(__irqd_to_state(d) & (IRQD_PER_CPU | IRQD_NO_BALANCING));
d                 256 include/linux/irq.h static inline bool irqd_affinity_was_set(struct irq_data *d)
d                 258 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_AFFINITY_SET;
d                 261 include/linux/irq.h static inline void irqd_mark_affinity_was_set(struct irq_data *d)
d                 263 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_AFFINITY_SET;
d                 266 include/linux/irq.h static inline bool irqd_trigger_type_was_set(struct irq_data *d)
d                 268 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_DEFAULT_TRIGGER_SET;
d                 271 include/linux/irq.h static inline u32 irqd_get_trigger_type(struct irq_data *d)
d                 273 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_TRIGGER_MASK;
d                 280 include/linux/irq.h static inline void irqd_set_trigger_type(struct irq_data *d, u32 type)
d                 282 include/linux/irq.h 	__irqd_to_state(d) &= ~IRQD_TRIGGER_MASK;
d                 283 include/linux/irq.h 	__irqd_to_state(d) |= type & IRQD_TRIGGER_MASK;
d                 284 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_DEFAULT_TRIGGER_SET;
d                 287 include/linux/irq.h static inline bool irqd_is_level_type(struct irq_data *d)
d                 289 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_LEVEL;
d                 296 include/linux/irq.h static inline void irqd_set_single_target(struct irq_data *d)
d                 298 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_SINGLE_TARGET;
d                 301 include/linux/irq.h static inline bool irqd_is_single_target(struct irq_data *d)
d                 303 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_SINGLE_TARGET;
d                 306 include/linux/irq.h static inline bool irqd_is_wakeup_set(struct irq_data *d)
d                 308 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_WAKEUP_STATE;
d                 311 include/linux/irq.h static inline bool irqd_can_move_in_process_context(struct irq_data *d)
d                 313 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_MOVE_PCNTXT;
d                 316 include/linux/irq.h static inline bool irqd_irq_disabled(struct irq_data *d)
d                 318 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_IRQ_DISABLED;
d                 321 include/linux/irq.h static inline bool irqd_irq_masked(struct irq_data *d)
d                 323 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_IRQ_MASKED;
d                 326 include/linux/irq.h static inline bool irqd_irq_inprogress(struct irq_data *d)
d                 328 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_IRQ_INPROGRESS;
d                 331 include/linux/irq.h static inline bool irqd_is_wakeup_armed(struct irq_data *d)
d                 333 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_WAKEUP_ARMED;
d                 336 include/linux/irq.h static inline bool irqd_is_forwarded_to_vcpu(struct irq_data *d)
d                 338 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_FORWARDED_TO_VCPU;
d                 341 include/linux/irq.h static inline void irqd_set_forwarded_to_vcpu(struct irq_data *d)
d                 343 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_FORWARDED_TO_VCPU;
d                 346 include/linux/irq.h static inline void irqd_clr_forwarded_to_vcpu(struct irq_data *d)
d                 348 include/linux/irq.h 	__irqd_to_state(d) &= ~IRQD_FORWARDED_TO_VCPU;
d                 351 include/linux/irq.h static inline bool irqd_affinity_is_managed(struct irq_data *d)
d                 353 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_AFFINITY_MANAGED;
d                 356 include/linux/irq.h static inline bool irqd_is_activated(struct irq_data *d)
d                 358 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_ACTIVATED;
d                 361 include/linux/irq.h static inline void irqd_set_activated(struct irq_data *d)
d                 363 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_ACTIVATED;
d                 366 include/linux/irq.h static inline void irqd_clr_activated(struct irq_data *d)
d                 368 include/linux/irq.h 	__irqd_to_state(d) &= ~IRQD_ACTIVATED;
d                 371 include/linux/irq.h static inline bool irqd_is_started(struct irq_data *d)
d                 373 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_IRQ_STARTED;
d                 376 include/linux/irq.h static inline bool irqd_is_managed_and_shutdown(struct irq_data *d)
d                 378 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_MANAGED_SHUTDOWN;
d                 381 include/linux/irq.h static inline void irqd_set_can_reserve(struct irq_data *d)
d                 383 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_CAN_RESERVE;
d                 386 include/linux/irq.h static inline void irqd_clr_can_reserve(struct irq_data *d)
d                 388 include/linux/irq.h 	__irqd_to_state(d) &= ~IRQD_CAN_RESERVE;
d                 391 include/linux/irq.h static inline bool irqd_can_reserve(struct irq_data *d)
d                 393 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_CAN_RESERVE;
d                 396 include/linux/irq.h static inline void irqd_set_msi_nomask_quirk(struct irq_data *d)
d                 398 include/linux/irq.h 	__irqd_to_state(d) |= IRQD_MSI_NOMASK_QUIRK;
d                 401 include/linux/irq.h static inline void irqd_clr_msi_nomask_quirk(struct irq_data *d)
d                 403 include/linux/irq.h 	__irqd_to_state(d) &= ~IRQD_MSI_NOMASK_QUIRK;
d                 406 include/linux/irq.h static inline bool irqd_msi_nomask_quirk(struct irq_data *d)
d                 408 include/linux/irq.h 	return __irqd_to_state(d) & IRQD_MSI_NOMASK_QUIRK;
d                 413 include/linux/irq.h static inline irq_hw_number_t irqd_to_hwirq(struct irq_data *d)
d                 415 include/linux/irq.h 	return d->hwirq;
d                 769 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 770 include/linux/irq.h 	return d ? d->chip : NULL;
d                 773 include/linux/irq.h static inline struct irq_chip *irq_data_get_irq_chip(struct irq_data *d)
d                 775 include/linux/irq.h 	return d->chip;
d                 780 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 781 include/linux/irq.h 	return d ? d->chip_data : NULL;
d                 784 include/linux/irq.h static inline void *irq_data_get_irq_chip_data(struct irq_data *d)
d                 786 include/linux/irq.h 	return d->chip_data;
d                 791 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 792 include/linux/irq.h 	return d ? d->common->handler_data : NULL;
d                 795 include/linux/irq.h static inline void *irq_data_get_irq_handler_data(struct irq_data *d)
d                 797 include/linux/irq.h 	return d->common->handler_data;
d                 802 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 803 include/linux/irq.h 	return d ? d->common->msi_desc : NULL;
d                 806 include/linux/irq.h static inline struct msi_desc *irq_data_get_msi_desc(struct irq_data *d)
d                 808 include/linux/irq.h 	return d->common->msi_desc;
d                 813 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 814 include/linux/irq.h 	return d ? irqd_get_trigger_type(d) : 0;
d                 817 include/linux/irq.h static inline int irq_common_data_get_node(struct irq_common_data *d)
d                 820 include/linux/irq.h 	return d->node;
d                 826 include/linux/irq.h static inline int irq_data_get_node(struct irq_data *d)
d                 828 include/linux/irq.h 	return irq_common_data_get_node(d->common);
d                 833 include/linux/irq.h 	struct irq_data *d = irq_get_irq_data(irq);
d                 835 include/linux/irq.h 	return d ? d->common->affinity : NULL;
d                 838 include/linux/irq.h static inline struct cpumask *irq_data_get_affinity_mask(struct irq_data *d)
d                 840 include/linux/irq.h 	return d->common->affinity;
d                 845 include/linux/irq.h struct cpumask *irq_data_get_effective_affinity_mask(struct irq_data *d)
d                 847 include/linux/irq.h 	return d->common->effective_affinity;
d                 849 include/linux/irq.h static inline void irq_data_update_effective_affinity(struct irq_data *d,
d                 852 include/linux/irq.h 	cpumask_copy(d->common->effective_affinity, m);
d                 855 include/linux/irq.h static inline void irq_data_update_effective_affinity(struct irq_data *d,
d                 860 include/linux/irq.h struct cpumask *irq_data_get_effective_affinity_mask(struct irq_data *d)
d                 862 include/linux/irq.h 	return d->common->affinity;
d                1067 include/linux/irq.h void irq_gc_noop(struct irq_data *d);
d                1068 include/linux/irq.h void irq_gc_mask_disable_reg(struct irq_data *d);
d                1069 include/linux/irq.h void irq_gc_mask_set_bit(struct irq_data *d);
d                1070 include/linux/irq.h void irq_gc_mask_clr_bit(struct irq_data *d);
d                1071 include/linux/irq.h void irq_gc_unmask_enable_reg(struct irq_data *d);
d                1072 include/linux/irq.h void irq_gc_ack_set_bit(struct irq_data *d);
d                1073 include/linux/irq.h void irq_gc_ack_clr_bit(struct irq_data *d);
d                1074 include/linux/irq.h void irq_gc_mask_disable_and_ack_set(struct irq_data *d);
d                1075 include/linux/irq.h void irq_gc_eoi(struct irq_data *d);
d                1076 include/linux/irq.h int irq_gc_set_wake(struct irq_data *d, unsigned int on);
d                1079 include/linux/irq.h int irq_map_generic_chip(struct irq_domain *d, unsigned int virq,
d                1087 include/linux/irq.h int irq_setup_alt_chip(struct irq_data *d, unsigned int type);
d                1099 include/linux/irq.h struct irq_chip_generic *irq_get_domain_generic_chip(struct irq_domain *d, unsigned int hw_irq);
d                1101 include/linux/irq.h int __irq_alloc_domain_generic_chips(struct irq_domain *d, int irqs_per_chip,
d                1107 include/linux/irq.h #define irq_alloc_domain_generic_chips(d, irqs_per_chip, num_ct, name,	\
d                1111 include/linux/irq.h 	__irq_alloc_domain_generic_chips(d, irqs_per_chip, num_ct, name,\
d                1128 include/linux/irq.h static inline struct irq_chip_type *irq_data_get_chip_type(struct irq_data *d)
d                1130 include/linux/irq.h 	return container_of(d->chip, struct irq_chip_type, chip);
d                 104 include/linux/irqdomain.h 	int (*match)(struct irq_domain *d, struct device_node *node,
d                 106 include/linux/irqdomain.h 	int (*select)(struct irq_domain *d, struct irq_fwspec *fwspec,
d                 108 include/linux/irqdomain.h 	int (*map)(struct irq_domain *d, unsigned int virq, irq_hw_number_t hw);
d                 109 include/linux/irqdomain.h 	void (*unmap)(struct irq_domain *d, unsigned int virq);
d                 110 include/linux/irqdomain.h 	int (*xlate)(struct irq_domain *d, struct device_node *node,
d                 115 include/linux/irqdomain.h 	int (*alloc)(struct irq_domain *d, unsigned int virq,
d                 117 include/linux/irqdomain.h 	void (*free)(struct irq_domain *d, unsigned int virq,
d                 119 include/linux/irqdomain.h 	int (*activate)(struct irq_domain *d, struct irq_data *irqd, bool reserve);
d                 120 include/linux/irqdomain.h 	void (*deactivate)(struct irq_domain *d, struct irq_data *irq_data);
d                 121 include/linux/irqdomain.h 	int (*translate)(struct irq_domain *d, struct irq_fwspec *fwspec,
d                 125 include/linux/irqdomain.h 	void (*debug_show)(struct seq_file *m, struct irq_domain *d,
d                 223 include/linux/irqdomain.h static inline struct device_node *irq_domain_get_of_node(struct irq_domain *d)
d                 225 include/linux/irqdomain.h 	return to_of_node(d->fwnode);
d                 315 include/linux/irqdomain.h 	struct irq_domain *d;
d                 317 include/linux/irqdomain.h 	d = irq_find_matching_host(node, DOMAIN_BUS_WIRED);
d                 318 include/linux/irqdomain.h 	if (!d)
d                 319 include/linux/irqdomain.h 		d = irq_find_matching_host(node, DOMAIN_BUS_ANY);
d                 321 include/linux/irqdomain.h 	return d;
d                 421 include/linux/irqdomain.h int irq_domain_xlate_onecell(struct irq_domain *d, struct device_node *ctrlr,
d                 424 include/linux/irqdomain.h int irq_domain_xlate_twocell(struct irq_domain *d, struct device_node *ctrlr,
d                 427 include/linux/irqdomain.h int irq_domain_xlate_onetwocell(struct irq_domain *d, struct device_node *ctrlr,
d                 431 include/linux/irqdomain.h int irq_domain_translate_twocell(struct irq_domain *d,
d                  31 include/linux/isa.h static inline int isa_register_driver(struct isa_driver *d, unsigned int i)
d                  36 include/linux/isa.h static inline void isa_unregister_driver(struct isa_driver *d)
d                  95 include/linux/kernel.h #define DIV_ROUND_DOWN_ULL(ll, d) \
d                  96 include/linux/kernel.h 	({ unsigned long long _tmp = (ll); do_div(_tmp, d); _tmp; })
d                  98 include/linux/kernel.h #define DIV_ROUND_UP_ULL(ll, d) \
d                  99 include/linux/kernel.h 	DIV_ROUND_DOWN_ULL((unsigned long long)(ll) + (d) - 1, (d))
d                 102 include/linux/kernel.h # define DIV_ROUND_UP_SECTOR_T(ll,d) DIV_ROUND_UP_ULL(ll, d)
d                 104 include/linux/kernel.h # define DIV_ROUND_UP_SECTOR_T(ll,d) DIV_ROUND_UP(ll,d)
d                 496 include/linux/key.h #define key_remove_domain(d)		do { } while(0)
d                 103 include/linux/maple.h #define maple_get_drvdata(d)		dev_get_drvdata(&(d)->dev)
d                 104 include/linux/maple.h #define maple_set_drvdata(d,p)		dev_set_drvdata(&(d)->dev, (p))
d                 284 include/linux/math64.h #define DIV64_U64_ROUND_UP(ll, d)	\
d                 285 include/linux/math64.h 	({ u64 _tmp = (d); div64_u64((ll) + _tmp - 1, _tmp); })
d                  44 include/linux/mdio.h #define to_mdio_device(d) container_of(d, struct mdio_device, dev)
d                  52 include/linux/mdio.h #define to_mdio_common_driver(d) \
d                  53 include/linux/mdio.h 	container_of(d, struct mdio_driver_common, driver)
d                  68 include/linux/mdio.h #define to_mdio_driver(d)						\
d                  69 include/linux/mdio.h 	container_of(to_mdio_common_driver(d), struct mdio_driver, mdiodrv)
d                  61 include/linux/mei_cl_bus.h #define to_mei_cl_device(d) container_of(d, struct mei_cl_device, dev)
d                  56 include/linux/mfd/mcp.h #define mcp_set_drvdata(mcp,d)	dev_set_drvdata(&(mcp)->attached_device, d)
d                  67 include/linux/mmc/sdio_func.h #define sdio_set_drvdata(f,d)	dev_set_drvdata(&(f)->dev, d)
d                  68 include/linux/mmc/sdio_func.h #define dev_to_sdio_func(d)	container_of(d, struct sdio_func, dev)
d                  37 include/linux/mpi.h 	mpi_limb_t *d;		/* array with the limbs */
d                 104 include/linux/mtd/doc2000.h #define WriteDOC_(d, adr, reg)  writeb(d, (void __iomem *)(adr) + (reg))
d                 115 include/linux/mtd/doc2000.h #define WriteDOC(d, adr, reg)  WriteDOC_(d,adr,DoC_##reg)
d                2070 include/linux/netdevice.h #define to_net_dev(d) container_of(d, struct net_device, dev)
d                2557 include/linux/netdevice.h #define for_each_netdev(net, d)		\
d                2558 include/linux/netdevice.h 		list_for_each_entry(d, &(net)->dev_base_head, dev_list)
d                2559 include/linux/netdevice.h #define for_each_netdev_reverse(net, d)	\
d                2560 include/linux/netdevice.h 		list_for_each_entry_reverse(d, &(net)->dev_base_head, dev_list)
d                2561 include/linux/netdevice.h #define for_each_netdev_rcu(net, d)		\
d                2562 include/linux/netdevice.h 		list_for_each_entry_rcu(d, &(net)->dev_base_head, dev_list)
d                2563 include/linux/netdevice.h #define for_each_netdev_safe(net, d, n)	\
d                2564 include/linux/netdevice.h 		list_for_each_entry_safe(d, n, &(net)->dev_base_head, dev_list)
d                2565 include/linux/netdevice.h #define for_each_netdev_continue(net, d)		\
d                2566 include/linux/netdevice.h 		list_for_each_entry_continue(d, &(net)->dev_base_head, dev_list)
d                2567 include/linux/netdevice.h #define for_each_netdev_continue_rcu(net, d)		\
d                2568 include/linux/netdevice.h 	list_for_each_entry_continue_rcu(d, &(net)->dev_base_head, dev_list)
d                 294 include/linux/omap-dma.h #define __dma_omap15xx(d) (dma_omap1() && (d)->dev_caps & ENABLE_1510_MODE)
d                 295 include/linux/omap-dma.h #define __dma_omap16xx(d) (dma_omap1() && (d)->dev_caps & ENABLE_16XX_MODE)
d                 296 include/linux/omap-dma.h #define dma_omap15xx()	__dma_omap15xx(d)
d                 297 include/linux/omap-dma.h #define dma_omap16xx()	__dma_omap16xx(d)
d                  92 include/linux/omap-gpmc.h extern int gpmc_onenand_init(struct omap_onenand_platform_data *d);
d                  95 include/linux/omap-gpmc.h static inline int gpmc_onenand_init(struct omap_onenand_platform_data *d)
d                  55 include/linux/overflow.h #define check_add_overflow(a, b, d) ({		\
d                  58 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  64 include/linux/overflow.h #define check_sub_overflow(a, b, d) ({		\
d                  67 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  73 include/linux/overflow.h #define check_mul_overflow(a, b, d) ({		\
d                  76 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  86 include/linux/overflow.h #define __unsigned_add_overflow(a, b, d) ({	\
d                  89 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  95 include/linux/overflow.h #define __unsigned_sub_overflow(a, b, d) ({	\
d                  98 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 107 include/linux/overflow.h #define __unsigned_mul_overflow(a, b, d) ({		\
d                 110 include/linux/overflow.h 	typeof(d) __d = (d);				\
d                 135 include/linux/overflow.h #define __signed_add_overflow(a, b, d) ({	\
d                 138 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 151 include/linux/overflow.h #define __signed_sub_overflow(a, b, d) ({	\
d                 154 include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 178 include/linux/overflow.h #define __signed_mul_overflow(a, b, d) ({				\
d                 181 include/linux/overflow.h 	typeof(d) __d = (d);						\
d                 193 include/linux/overflow.h #define check_add_overflow(a, b, d)					\
d                 195 include/linux/overflow.h 			__signed_add_overflow(a, b, d),			\
d                 196 include/linux/overflow.h 			__unsigned_add_overflow(a, b, d))
d                 198 include/linux/overflow.h #define check_sub_overflow(a, b, d)					\
d                 200 include/linux/overflow.h 			__signed_sub_overflow(a, b, d),			\
d                 201 include/linux/overflow.h 			__unsigned_sub_overflow(a, b, d))
d                 203 include/linux/overflow.h #define check_mul_overflow(a, b, d)					\
d                 205 include/linux/overflow.h 			__signed_mul_overflow(a, b, d),			\
d                 206 include/linux/overflow.h 			__unsigned_mul_overflow(a, b, d))
d                 230 include/linux/overflow.h #define check_shl_overflow(a, s, d) ({					\
d                 233 include/linux/overflow.h 	typeof(d) _d = d;						\
d                 236 include/linux/overflow.h 		is_non_negative(_s) && _s < 8 * sizeof(*d) ? _s : 0;	\
d                  63 include/linux/parport_pc.h static __inline__ void parport_pc_write_data(struct parport *p, unsigned char d)
d                  66 include/linux/parport_pc.h 	printk (KERN_DEBUG "parport_pc_write_data(%p,0x%02x)\n", p, d);
d                  68 include/linux/parport_pc.h 	outb(d, DATA(p));
d                 157 include/linux/parport_pc.h 						 unsigned char d)
d                 165 include/linux/parport_pc.h 	if (d & 0x20) {
d                 171 include/linux/parport_pc.h 	__parport_pc_frob_control (p, wm, d & wm);
d                 485 include/linux/pci.h #define for_each_pci_dev(d) while ((d = pci_get_device(PCI_ANY_ID, PCI_ANY_ID, d)) != NULL)
d                1031 include/linux/pci.h #define dev_is_pci(d) ((d)->bus == &pci_bus_type)
d                1032 include/linux/pci.h #define dev_is_pf(d) ((dev_is_pci(d) ? to_pci_dev(d)->is_physfn : false))
d                1523 include/linux/pci.h static inline int pci_irqd_intx_xlate(struct irq_domain *d,
d                1740 include/linux/pci.h #define dev_is_pci(d) (false)
d                1741 include/linux/pci.h #define dev_is_pf(d) (false)
d                1744 include/linux/pci.h static inline int pci_irqd_intx_xlate(struct irq_domain *d,
d                1786 include/linux/pci.h static inline int pci_enable_ats(struct pci_dev *d, int ps) { return -ENODEV; }
d                1787 include/linux/pci.h static inline void pci_disable_ats(struct pci_dev *d) { }
d                1788 include/linux/pci.h static inline int pci_ats_queue_depth(struct pci_dev *d) { return -ENODEV; }
d                 256 include/linux/phy.h #define to_mii_bus(d) container_of(d, struct mii_bus, dev)
d                 446 include/linux/phy.h #define to_phy_device(d) container_of(to_mdio_device(d), \
d                 635 include/linux/phy.h #define to_phy_driver(d) container_of(to_mdio_common_driver(d),		\
d                 167 include/linux/pinctrl/pinconf-generic.h #define PCONFDUMP(a, b, c, d) {					\
d                 168 include/linux/pinctrl/pinconf-generic.h 	.param = a, .display = b, .format = c, .has_arg = d	\
d                  31 include/linux/reciprocal_div.h struct reciprocal_value reciprocal_value(u32 d);
d                 102 include/linux/reciprocal_div.h struct reciprocal_value_adv reciprocal_value_adv(u32 d, u8 prec);
d                 267 include/linux/rio_regs.h #define RIO_DEV_PORT_N_MNT_REQ_CSR(d, n)	\
d                 268 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_MNT_REQ_CSR(n, d->phys_rmap))
d                 270 include/linux/rio_regs.h #define RIO_DEV_PORT_N_MNT_RSP_CSR(d, n)	\
d                 271 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_MNT_RSP_CSR(n, d->phys_rmap))
d                 273 include/linux/rio_regs.h #define RIO_DEV_PORT_N_ACK_STS_CSR(d, n)	\
d                 274 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_ACK_STS_CSR(n))
d                 276 include/linux/rio_regs.h #define RIO_DEV_PORT_N_CTL2_CSR(d, n)		\
d                 277 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_CTL2_CSR(n, d->phys_rmap))
d                 279 include/linux/rio_regs.h #define RIO_DEV_PORT_N_ERR_STS_CSR(d, n)	\
d                 280 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_ERR_STS_CSR(n, d->phys_rmap))
d                 282 include/linux/rio_regs.h #define RIO_DEV_PORT_N_CTL_CSR(d, n)		\
d                 283 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_CTL_CSR(n, d->phys_rmap))
d                 285 include/linux/rio_regs.h #define RIO_DEV_PORT_N_OB_ACK_CSR(d, n)		\
d                 286 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_OB_ACK_CSR(n))
d                 288 include/linux/rio_regs.h #define RIO_DEV_PORT_N_IB_ACK_CSR(d, n)		\
d                 289 include/linux/rio_regs.h 		(d->phys_efptr + RIO_PORT_N_IB_ACK_CSR(n))
d                 160 include/linux/rtc.h #define to_rtc_device(d) container_of(d, struct rtc_device, dev)
d                 264 include/linux/scmi_protocol.h #define to_scmi_dev(d) container_of(d, struct scmi_device, dev)
d                 283 include/linux/scmi_protocol.h #define to_scmi_driver(d) container_of(d, struct scmi_driver, driver)
d                  50 include/linux/serdev.h static inline struct serdev_device *to_serdev_device(struct device *d)
d                  52 include/linux/serdev.h 	return container_of(d, struct serdev_device, dev);
d                  68 include/linux/serdev.h static inline struct serdev_device_driver *to_serdev_device_driver(struct device_driver *d)
d                  70 include/linux/serdev.h 	return container_of(d, struct serdev_device_driver, driver);
d                 110 include/linux/serdev.h static inline struct serdev_controller *to_serdev_controller(struct device *d)
d                 112 include/linux/serdev.h 	return container_of(d, struct serdev_controller, dev);
d                  65 include/linux/serio.h #define to_serio_port(d)	container_of(d, struct serio, dev)
d                  83 include/linux/serio.h #define to_serio_driver(d)	container_of(d, struct serio_driver, driver)
d                 113 include/linux/shdma-base.h #define shdma_for_each_chan(c, d, i) for (i = 0, c = (d)->schan[0]; \
d                 114 include/linux/shdma-base.h 				i < (d)->dma_dev.chancnt; c = (d)->schan[++i])
d                  38 include/linux/siphash.h u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
d                  50 include/linux/siphash.h 			       const u32 d, const siphash_key_t *key)
d                  52 include/linux/siphash.h 	return siphash_2u64((u64)b << 32 | a, (u64)d << 32 | c, key);
d                 108 include/linux/siphash.h u32 hsiphash_4u32(const u32 a, const u32 b, const u32 c, const u32 d,
d                4186 include/linux/skbuff.h static inline void __skb_ext_copy(struct sk_buff *d, const struct sk_buff *s) {}
d                  69 include/linux/slimbus.h #define to_slim_device(d) container_of(d, struct slim_device, dev)
d                  94 include/linux/slimbus.h #define to_slim_driver(d) container_of(d, struct slim_driver, driver)
d                  92 include/linux/soc/qcom/apr.h #define to_apr_device(d) container_of(d, struct apr_device, dev)
d                  98 include/linux/soc/qcom/apr.h 			    struct apr_resp_pkt *d);
d                 103 include/linux/soc/qcom/apr.h #define to_apr_driver(d) container_of(d, struct apr_driver, driver)
d                  45 include/linux/spmi.h static inline struct spmi_device *to_spmi_device(struct device *d)
d                  47 include/linux/spmi.h 	return container_of(d, struct spmi_device, dev);
d                  90 include/linux/spmi.h static inline struct spmi_controller *to_spmi_controller(struct device *d)
d                  92 include/linux/spmi.h 	return container_of(d, struct spmi_controller, dev);
d                 143 include/linux/spmi.h static inline struct spmi_driver *to_spmi_driver(struct device_driver *d)
d                 145 include/linux/spmi.h 	return container_of(d, struct spmi_driver, driver);
d                  97 include/linux/ssb/ssb_driver_pci.h int ssb_pcicore_plat_dev_init(struct pci_dev *d);
d                 120 include/linux/ssb/ssb_driver_pci.h int ssb_pcicore_plat_dev_init(struct pci_dev *d)
d                  36 include/linux/sunxi-rsb.h static inline struct sunxi_rsb_device *to_sunxi_rsb_device(struct device *d)
d                  38 include/linux/sunxi-rsb.h 	return container_of(d, struct sunxi_rsb_device, dev);
d                  65 include/linux/sunxi-rsb.h static inline struct sunxi_rsb_driver *to_sunxi_rsb_driver(struct device_driver *d)
d                  67 include/linux/sunxi-rsb.h 	return container_of(d, struct sunxi_rsb_driver, driver);
d                  60 include/linux/superhyway.h #define to_superhyway_driver(d)	container_of((d), struct superhyway_driver, drv)
d                  76 include/linux/superhyway.h #define to_superhyway_device(d)	container_of((d), struct superhyway_device, dev)
d                  78 include/linux/superhyway.h #define superhyway_get_drvdata(d)	dev_get_drvdata(&(d)->dev)
d                  79 include/linux/superhyway.h #define superhyway_set_drvdata(d,p)	dev_set_drvdata(&(d)->dev, (p))
d                 565 include/linux/tee_drv.h #define to_tee_client_device(d) container_of(d, struct tee_client_device, dev)
d                 577 include/linux/tee_drv.h #define to_tee_client_driver(d) \
d                 578 include/linux/tee_drv.h 		container_of(d, struct tee_client_driver, driver)
d                 781 include/linux/tty.h static inline void proc_tty_register_driver(struct tty_driver *d) {}
d                 782 include/linux/tty.h static inline void proc_tty_unregister_driver(struct tty_driver *d) {}
d                 363 include/linux/tty_driver.h static inline struct tty_driver *tty_driver_kref_get(struct tty_driver *d)
d                 365 include/linux/tty_driver.h 	kref_get(&d->kref);
d                 366 include/linux/tty_driver.h 	return d;
d                 373 include/linux/uaccess.h #define unsafe_copy_to_user(d,s,l,e) unsafe_op_wrap(__copy_to_user(d,s,l),e)
d                  23 include/linux/ulpi/driver.h #define to_ulpi_dev(d) container_of(d, struct ulpi, dev)
d                  49 include/linux/ulpi/driver.h #define to_ulpi_driver(d) container_of(d, struct ulpi_driver, driver)
d                 261 include/linux/usb.h #define	to_usb_interface(d) container_of(d, struct usb_interface, dev)
d                 712 include/linux/usb.h #define	to_usb_device(d) container_of(d, struct usb_device, dev)
d                1211 include/linux/usb.h #define	to_usb_driver(d) container_of(d, struct usb_driver, drvwrap.driver)
d                1246 include/linux/usb.h #define	to_usb_device_driver(d) container_of(d, struct usb_device_driver, \
d                 631 include/linux/usb/composite.h #define DBG(d, fmt, args...) \
d                 632 include/linux/usb/composite.h 	dev_dbg(&(d)->gadget->dev , fmt , ## args)
d                 633 include/linux/usb/composite.h #define VDBG(d, fmt, args...) \
d                 634 include/linux/usb/composite.h 	dev_vdbg(&(d)->gadget->dev , fmt , ## args)
d                 635 include/linux/usb/composite.h #define ERROR(d, fmt, args...) \
d                 636 include/linux/usb/composite.h 	dev_err(&(d)->gadget->dev , fmt , ## args)
d                 637 include/linux/usb/composite.h #define WARNING(d, fmt, args...) \
d                 638 include/linux/usb/composite.h 	dev_warn(&(d)->gadget->dev , fmt , ## args)
d                 639 include/linux/usb/composite.h #define INFO(d, fmt, args...) \
d                 640 include/linux/usb/composite.h 	dev_info(&(d)->gadget->dev , fmt , ## args)
d                 117 include/linux/usb/serial.h #define to_usb_serial_port(d) container_of(d, struct usb_serial_port, dev)
d                 166 include/linux/usb/serial.h #define to_usb_serial(d) container_of(d, struct usb_serial, kref)
d                 316 include/linux/usb/serial.h #define to_usb_serial_driver(d) \
d                 317 include/linux/usb/serial.h 	container_of(d, struct usb_serial_driver, driver)
d                  35 include/linux/usb/typec_altmode.h #define to_typec_altmode(d) container_of(d, struct typec_altmode, dev)
d                 147 include/linux/usb/typec_altmode.h #define to_altmode_driver(d) container_of(d, struct typec_altmode_driver, \
d                  42 include/linux/vt_buffer.h static inline void scr_memcpyw(u16 *d, const u16 *s, unsigned int count)
d                  47 include/linux/vt_buffer.h 		scr_writew(scr_readw(s++), d++);
d                  49 include/linux/vt_buffer.h 	memcpy(d, s, count);
d                  55 include/linux/vt_buffer.h static inline void scr_memmovew(u16 *d, const u16 *s, unsigned int count)
d                  58 include/linux/vt_buffer.h 	if (d < s)
d                  59 include/linux/vt_buffer.h 		scr_memcpyw(d, s, count);
d                  62 include/linux/vt_buffer.h 		d += count;
d                  65 include/linux/vt_buffer.h 			scr_writew(scr_readw(--s), --d);
d                  68 include/linux/vt_buffer.h 	memmove(d, s, count);
d                  51 include/linux/xattr.h ssize_t vfs_listxattr(struct dentry *d, char *list, size_t size);
d                 348 include/math-emu/op-2.h     union { double d; UDItype i; } _l240, _m240, _n240, _o240,			\
d                 375 include/math-emu/op-2.h     _s240.d =							      _e240*_j240;\
d                 376 include/math-emu/op-2.h     _r240.d =						_d240*_j240 + _e240*_i240;\
d                 377 include/math-emu/op-2.h     _q240.d =				  _c240*_j240 + _d240*_i240 + _e240*_h240;\
d                 378 include/math-emu/op-2.h     _p240.d =		    _b240*_j240 + _c240*_i240 + _d240*_h240 + _e240*_g240;\
d                 379 include/math-emu/op-2.h     _o240.d = _a240*_j240 + _b240*_i240 + _c240*_h240 + _d240*_g240 + _e240*_f240;\
d                 380 include/math-emu/op-2.h     _n240.d = _a240*_i240 + _b240*_h240 + _c240*_g240 + _d240*_f240;		\
d                 381 include/math-emu/op-2.h     _m240.d = _a240*_h240 + _b240*_g240 + _c240*_f240;				\
d                 382 include/math-emu/op-2.h     _l240.d = _a240*_g240 + _b240*_f240;					\
d                 384 include/math-emu/op-2.h     _r240.d += _s240.d;								\
d                 385 include/math-emu/op-2.h     _q240.d += _r240.d;								\
d                 386 include/math-emu/op-2.h     _p240.d += _q240.d;								\
d                 387 include/math-emu/op-2.h     _o240.d += _p240.d;								\
d                 388 include/math-emu/op-2.h     _n240.d += _o240.d;								\
d                 389 include/math-emu/op-2.h     _m240.d += _n240.d;								\
d                 390 include/math-emu/op-2.h     _l240.d += _m240.d;								\
d                 391 include/math-emu/op-2.h     _k240 += _l240.d;								\
d                 392 include/math-emu/op-2.h     _s240.d -= ((_const[10]+_s240.d)-_const[10]);				\
d                 393 include/math-emu/op-2.h     _r240.d -= ((_const[9]+_r240.d)-_const[9]);					\
d                 394 include/math-emu/op-2.h     _q240.d -= ((_const[8]+_q240.d)-_const[8]);					\
d                 395 include/math-emu/op-2.h     _p240.d -= ((_const[7]+_p240.d)-_const[7]);					\
d                 396 include/math-emu/op-2.h     _o240.d += _const[7];							\
d                 397 include/math-emu/op-2.h     _n240.d += _const[6];							\
d                 398 include/math-emu/op-2.h     _m240.d += _const[5];							\
d                 399 include/math-emu/op-2.h     _l240.d += _const[4];							\
d                 400 include/math-emu/op-2.h     if (_s240.d != 0.0) _y240 = 1;						\
d                 401 include/math-emu/op-2.h     if (_r240.d != 0.0) _y240 = 1;						\
d                 402 include/math-emu/op-2.h     if (_q240.d != 0.0) _y240 = 1;						\
d                 403 include/math-emu/op-2.h     if (_p240.d != 0.0) _y240 = 1;						\
d                 877 include/math-emu/op-common.h #define _FP_DIV_HELP_imm(q, r, n, d)		\
d                 879 include/math-emu/op-common.h     q = n / d, r = n % d;			\
d                 500 include/media/cec.h 	u8 d;
d                 524 include/media/cec.h 		d = edid[offset + 2] & 0x7f;
d                 526 include/media/cec.h 		if (d <= 4)
d                 528 include/media/cec.h 		if (d > 4) {
d                 530 include/media/cec.h 			unsigned int end = offset + d;
d                 231 include/media/rc-core.h #define to_rc_dev(d) container_of(d, struct rc_dev, dev)
d                1018 include/net/bluetooth/hci_core.h static inline void hci_dev_put(struct hci_dev *d)
d                1020 include/net/bluetooth/hci_core.h 	BT_DBG("%s orig refcnt %d", d->name,
d                1021 include/net/bluetooth/hci_core.h 	       kref_read(&d->dev.kobj.kref));
d                1023 include/net/bluetooth/hci_core.h 	put_device(&d->dev);
d                1026 include/net/bluetooth/hci_core.h static inline struct hci_dev *hci_dev_hold(struct hci_dev *d)
d                1028 include/net/bluetooth/hci_core.h 	BT_DBG("%s orig refcnt %d", d->name,
d                1029 include/net/bluetooth/hci_core.h 	       kref_read(&d->dev.kobj.kref));
d                1031 include/net/bluetooth/hci_core.h 	get_device(&d->dev);
d                1032 include/net/bluetooth/hci_core.h 	return d;
d                1035 include/net/bluetooth/hci_core.h #define hci_dev_lock(d)		mutex_lock(&d->lock)
d                1036 include/net/bluetooth/hci_core.h #define hci_dev_unlock(d)	mutex_unlock(&d->lock)
d                1038 include/net/bluetooth/hci_core.h #define to_hci_dev(d) container_of(d, struct hci_dev, dev)
d                 198 include/net/bluetooth/rfcomm.h 	void (*data_ready)(struct rfcomm_dlc *d, struct sk_buff *skb);
d                 199 include/net/bluetooth/rfcomm.h 	void (*state_change)(struct rfcomm_dlc *d, int err);
d                 200 include/net/bluetooth/rfcomm.h 	void (*modem_status)(struct rfcomm_dlc *d, u8 v24_sig);
d                 236 include/net/bluetooth/rfcomm.h void rfcomm_dlc_free(struct rfcomm_dlc *d);
d                 237 include/net/bluetooth/rfcomm.h int  rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst,
d                 239 include/net/bluetooth/rfcomm.h int  rfcomm_dlc_close(struct rfcomm_dlc *d, int reason);
d                 240 include/net/bluetooth/rfcomm.h int  rfcomm_dlc_send(struct rfcomm_dlc *d, struct sk_buff *skb);
d                 241 include/net/bluetooth/rfcomm.h void rfcomm_dlc_send_noerror(struct rfcomm_dlc *d, struct sk_buff *skb);
d                 242 include/net/bluetooth/rfcomm.h int  rfcomm_dlc_set_modem_status(struct rfcomm_dlc *d, u8 v24_sig);
d                 243 include/net/bluetooth/rfcomm.h int  rfcomm_dlc_get_modem_status(struct rfcomm_dlc *d, u8 *v24_sig);
d                 244 include/net/bluetooth/rfcomm.h void rfcomm_dlc_accept(struct rfcomm_dlc *d);
d                 247 include/net/bluetooth/rfcomm.h #define rfcomm_dlc_lock(d)     mutex_lock(&d->lock)
d                 248 include/net/bluetooth/rfcomm.h #define rfcomm_dlc_unlock(d)   mutex_unlock(&d->lock)
d                 250 include/net/bluetooth/rfcomm.h static inline void rfcomm_dlc_hold(struct rfcomm_dlc *d)
d                 252 include/net/bluetooth/rfcomm.h 	refcount_inc(&d->refcnt);
d                 255 include/net/bluetooth/rfcomm.h static inline void rfcomm_dlc_put(struct rfcomm_dlc *d)
d                 257 include/net/bluetooth/rfcomm.h 	if (refcount_dec_and_test(&d->refcnt))
d                 258 include/net/bluetooth/rfcomm.h 		rfcomm_dlc_free(d);
d                 261 include/net/bluetooth/rfcomm.h void __rfcomm_dlc_throttle(struct rfcomm_dlc *d);
d                 262 include/net/bluetooth/rfcomm.h void __rfcomm_dlc_unthrottle(struct rfcomm_dlc *d);
d                 264 include/net/bluetooth/rfcomm.h static inline void rfcomm_dlc_throttle(struct rfcomm_dlc *d)
d                 266 include/net/bluetooth/rfcomm.h 	if (!test_and_set_bit(RFCOMM_RX_THROTTLED, &d->flags))
d                 267 include/net/bluetooth/rfcomm.h 		__rfcomm_dlc_throttle(d);
d                 270 include/net/bluetooth/rfcomm.h static inline void rfcomm_dlc_unthrottle(struct rfcomm_dlc *d)
d                 272 include/net/bluetooth/rfcomm.h 	if (test_and_clear_bit(RFCOMM_RX_THROTTLED, &d->flags))
d                 273 include/net/bluetooth/rfcomm.h 		__rfcomm_dlc_unthrottle(d);
d                 318 include/net/bluetooth/rfcomm.h 							struct rfcomm_dlc **d);
d                  32 include/net/gen_stats.h 			  struct gnet_dump *d, int padattr);
d                  36 include/net/gen_stats.h 				 spinlock_t *lock, struct gnet_dump *d,
d                  40 include/net/gen_stats.h 			  struct gnet_dump *d,
d                  48 include/net/gen_stats.h 			     struct gnet_dump *d,
d                  51 include/net/gen_stats.h int gnet_stats_copy_rate_est(struct gnet_dump *d,
d                  53 include/net/gen_stats.h int gnet_stats_copy_queue(struct gnet_dump *d,
d                  59 include/net/gen_stats.h int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len);
d                  61 include/net/gen_stats.h int gnet_stats_finish_copy(struct gnet_dump *d);
d                 911 include/net/sch_generic.h static inline int qdisc_qstats_copy(struct gnet_dump *d, struct Qdisc *sch)
d                 915 include/net/sch_generic.h 	return gnet_stats_copy_queue(d, sch->cpu_qstats, &sch->qstats, qlen);
d                1509 include/net/xfrm.h static inline void xfrm_tmpl_sort(struct xfrm_tmpl **d, struct xfrm_tmpl **s,
d                1514 include/net/xfrm.h static inline void xfrm_state_sort(struct xfrm_state **d, struct xfrm_state **s,
d                  31 include/scsi/fcoe_sysfs.h #define dev_to_ctlr(d)					\
d                  32 include/scsi/fcoe_sysfs.h 	container_of((d), struct fcoe_ctlr_device, dev)
d                 102 include/scsi/fcoe_sysfs.h #define dev_to_fcf(d)					\
d                 103 include/scsi/fcoe_sysfs.h 	container_of((d), struct fcoe_fcf_device, dev)
d                 233 include/scsi/scsi_device.h #define	to_scsi_device(d)	\
d                 234 include/scsi/scsi_device.h 	container_of(d, struct scsi_device, sdev_gendev)
d                 235 include/scsi/scsi_device.h #define	class_to_sdev(d)	\
d                 236 include/scsi/scsi_device.h 	container_of(d, struct scsi_device, sdev_dev)
d                 318 include/scsi/scsi_device.h #define to_scsi_target(d)	container_of(d, struct scsi_target, dev)
d                 463 include/scsi/scsi_device.h static inline int scsi_autopm_get_device(struct scsi_device *d) { return 0; }
d                 464 include/scsi/scsi_device.h static inline void scsi_autopm_put_device(struct scsi_device *d) {}
d                 703 include/scsi/scsi_host.h #define		class_to_shost(d)	\
d                 704 include/scsi/scsi_host.h 	container_of(d, struct Scsi_Host, shost_dev)
d                 251 include/scsi/scsi_transport_fc.h #define	dev_to_vport(d)				\
d                 252 include/scsi/scsi_transport_fc.h 	container_of(d, struct fc_vport, dev)
d                 361 include/scsi/scsi_transport_fc.h #define	dev_to_rport(d)				\
d                 362 include/scsi/scsi_transport_fc.h 	container_of(d, struct fc_rport, dev)
d                  87 include/scsi/scsi_transport_sas.h #define dev_to_phy(d) \
d                  88 include/scsi/scsi_transport_sas.h 	container_of((d), struct sas_phy, dev)
d                 103 include/scsi/scsi_transport_sas.h #define dev_to_rphy(d) \
d                 104 include/scsi/scsi_transport_sas.h 	container_of((d), struct sas_rphy, dev)
d                 162 include/scsi/scsi_transport_sas.h #define dev_to_sas_port(d) \
d                 163 include/scsi/scsi_transport_sas.h 	container_of((d), struct sas_port, dev)
d                  63 include/soc/fsl/dpaa2-io.h void dpaa2_io_down(struct dpaa2_io *d);
d                  94 include/soc/fsl/dpaa2-io.h int dpaa2_io_get_cpu(struct dpaa2_io *d);
d                 105 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_pull_fq(struct dpaa2_io *d, u32 fqid,
d                 107 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_pull_channel(struct dpaa2_io *d, u32 channelid,
d                 110 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_enqueue_fq(struct dpaa2_io *d, u32 fqid,
d                 112 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_enqueue_qd(struct dpaa2_io *d, u32 qdid, u8 prio,
d                 114 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_release(struct dpaa2_io *d, u16 bpid,
d                 116 include/soc/fsl/dpaa2-io.h int dpaa2_io_service_acquire(struct dpaa2_io *d, u16 bpid,
d                 124 include/soc/fsl/dpaa2-io.h int dpaa2_io_query_fq_count(struct dpaa2_io *d, u32 fqid,
d                 126 include/soc/fsl/dpaa2-io.h int dpaa2_io_query_bp_count(struct dpaa2_io *d, u16 bpid,
d                  71 include/sound/ac97/codec.h static inline struct ac97_codec_device *to_ac97_device(struct device *d)
d                  73 include/sound/ac97/codec.h 	return container_of(d, struct ac97_codec_device, dev);
d                  76 include/sound/ac97/codec.h static inline struct ac97_codec_driver *to_ac97_driver(struct device_driver *d)
d                  78 include/sound/ac97/codec.h 	return container_of(d, struct ac97_codec_driver, driver);
d                 287 include/sound/ac97_codec.h #define to_ac97_t(d) container_of(d, struct snd_ac97, dev)
d                 272 include/sound/pcm_params.h static inline void snd_interval_copy(struct snd_interval *d, const struct snd_interval *s)
d                 274 include/sound/pcm_params.h 	*d = *s;
d                 505 include/sound/wavefront.h     wavefront_drum d;
d                  11 include/trace/events/bcache.h 	TP_PROTO(struct bcache_device *d, struct bio *bio),
d                  12 include/trace/events/bcache.h 	TP_ARGS(d, bio),
d                  26 include/trace/events/bcache.h 		__entry->orig_major	= d->disk->major;
d                  27 include/trace/events/bcache.h 		__entry->orig_minor	= d->disk->first_minor;
d                  81 include/trace/events/bcache.h 	TP_PROTO(struct bcache_device *d, struct bio *bio),
d                  82 include/trace/events/bcache.h 	TP_ARGS(d, bio)
d                  86 include/trace/events/bcache.h 	TP_PROTO(struct bcache_device *d, struct bio *bio),
d                  87 include/trace/events/bcache.h 	TP_ARGS(d, bio)
d                  60 include/trace/events/thermal_power_allocator.h 		 s64 p, s64 i, s64 d, s32 output),
d                  61 include/trace/events/thermal_power_allocator.h 	TP_ARGS(tz, err, err_integral, p, i, d, output),
d                  68 include/trace/events/thermal_power_allocator.h 		__field(s64, d           )
d                  77 include/trace/events/thermal_power_allocator.h 		__entry->d = d;
d                  83 include/trace/events/thermal_power_allocator.h 		  __entry->p, __entry->i, __entry->d, __entry->output)
d                  69 include/uapi/drm/drm_fourcc.h #define fourcc_code(a, b, c, d) ((__u32)(a) | ((__u32)(b) << 8) | \
d                  70 include/uapi/drm/drm_fourcc.h 				 ((__u32)(c) << 16) | ((__u32)(d) << 24))
d                 112 include/uapi/drm/virtgpu_drm.h 	__u32 d;
d                 119 include/uapi/linux/bcache.h 	__u64 *d = (void *) k;
d                 121 include/uapi/linux/bcache.h 	return (struct bkey *) (d + bkey_u64s(k));
d                 126 include/uapi/linux/bcache.h 	__u64 *d = (void *) k;
d                 128 include/uapi/linux/bcache.h 	return (struct bkey *) (d + nr_keys);
d                 207 include/uapi/linux/bcache.h 	__u64			d[SB_JOURNAL_BUCKETS];	/* journal buckets */
d                 295 include/uapi/linux/bcache.h 		__u64		d[0];
d                 360 include/uapi/linux/bcache.h 		__u64		d[0];
d                 814 include/uapi/linux/fuse.h #define FUSE_DIRENT_SIZE(d) \
d                 815 include/uapi/linux/fuse.h 	FUSE_DIRENT_ALIGN(FUSE_NAME_OFFSET + (d)->namelen)
d                 824 include/uapi/linux/fuse.h #define FUSE_DIRENTPLUS_SIZE(d) \
d                 825 include/uapi/linux/fuse.h 	FUSE_DIRENT_ALIGN(FUSE_NAME_OFFSET_DIRENTPLUS + (d)->dirent.namelen)
d                 205 include/uapi/linux/jffs2.h 	struct jffs2_raw_dirent d;
d                  13 include/uapi/linux/kernel.h #define __KERNEL_DIV_ROUND_UP(n, d) (((n) + (d) - 1) / (d))
d                  98 include/uapi/linux/map_to_7segment.h #define _SEG7(l,a,b,c,d,e,f,g)	\
d                  99 include/uapi/linux/map_to_7segment.h       (	a<<BIT_SEG7_A |	b<<BIT_SEG7_B |	c<<BIT_SEG7_C |	d<<BIT_SEG7_D |	\
d                 174 include/uapi/linux/mic_common.h #define mic_aligned_desc_size(d) __mic_align(mic_desc_size(d), 8)
d                 439 include/uapi/linux/pkt_sched.h 	__u32	d;		/* x-projection of the first segment in us */
d                 139 include/uapi/linux/ppp_defs.h     struct compstat	d;	/* packet decompression statistics */
d                  19 include/uapi/linux/romfs_fs.h #define __mk4(a,b,c,d) cpu_to_be32(__mkl(__mkw(a,b),__mkw(c,d)))
d                  81 include/uapi/linux/videodev2.h #define v4l2_fourcc(a, b, c, d)\
d                  82 include/uapi/linux/videodev2.h 	((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
d                  83 include/uapi/linux/videodev2.h #define v4l2_fourcc_be(a, b, c, d)	(v4l2_fourcc(a, b, c, d) | (1U << 31))
d                 214 include/uapi/linux/virtio_gpu.h 	__le32 w, h, d;
d                 213 include/uapi/sound/asequencer.h 	unsigned char d[12];	/* 8 bit value */
d                 218 include/uapi/sound/asequencer.h 	unsigned int d[3];	/* 32 bit value */
d                 930 include/uapi/sound/asound.h 		unsigned short d[4];		/* dimensions */
d                 741 include/video/omapfb_dss.h #define for_each_dss_dev(d) while ((d = omap_dss_get_next_device(d)) != NULL)
d                 234 include/video/pm3fb.h 	#define PM3VideoOverlayShrinkXDelta_DELTA(s,d)	\
d                 235 include/video/pm3fb.h 		((((s) << 16)/(d)) & 0x0ffffff0)
d                 238 include/video/pm3fb.h 	#define PM3VideoOverlayZoomXDelta_DELTA(s,d)	\
d                 239 include/video/pm3fb.h 		((((s) << 16)/(d)) & 0x0001fff0)
d                 242 include/video/pm3fb.h 	#define PM3VideoOverlayYDelta_DELTA(s,d)	\
d                 243 include/video/pm3fb.h 		((((s) << 16)/(d)) & 0x0ffffff0)
d                 301 kernel/audit.h #define audit_watch_compare(w, i, d) 0
d                 307 kernel/audit.h #define audit_mark_compare(m, i, d) 0
d                 351 kernel/audit_watch.c 	struct dentry *d = kern_path_locked(watch->path, parent);
d                 352 kernel/audit_watch.c 	if (IS_ERR(d))
d                 353 kernel/audit_watch.c 		return PTR_ERR(d);
d                 354 kernel/audit_watch.c 	if (d_is_positive(d)) {
d                 356 kernel/audit_watch.c 		watch->dev = d->d_sb->s_dev;
d                 357 kernel/audit_watch.c 		watch->ino = d_backing_inode(d)->i_ino;
d                 360 kernel/audit_watch.c 	dput(d);
d                 110 kernel/auditsc.c 	struct audit_aux_data	d;
d                 121 kernel/auditsc.c 	struct audit_aux_data	d;
d                1768 kernel/auditsc.c 	const struct dentry *d, *parent;
d                1778 kernel/auditsc.c 	d = dentry;
d                1782 kernel/auditsc.c 		struct inode *inode = d_backing_inode(d);
d                1793 kernel/auditsc.c 		parent = d->d_parent;
d                1794 kernel/auditsc.c 		if (parent == d)
d                1796 kernel/auditsc.c 		d = parent;
d                2413 kernel/auditsc.c 		axp->d.type = AUDIT_OBJ_PID;
d                2414 kernel/auditsc.c 		axp->d.next = ctx->aux_pids;
d                2452 kernel/auditsc.c 	ax->d.type = AUDIT_BPRM_FCAPS;
d                2453 kernel/auditsc.c 	ax->d.next = context->aux;
d                  85 kernel/delayacct.c int __delayacct_add_tsk(struct taskstats *d, struct task_struct *tsk)
d                  93 kernel/delayacct.c 	tmp = (s64)d->cpu_run_real_total;
d                  95 kernel/delayacct.c 	d->cpu_run_real_total = (tmp < (s64)d->cpu_run_real_total) ? 0 : tmp;
d                  98 kernel/delayacct.c 	tmp = (s64)d->cpu_scaled_run_real_total;
d                 100 kernel/delayacct.c 	d->cpu_scaled_run_real_total =
d                 101 kernel/delayacct.c 		(tmp < (s64)d->cpu_scaled_run_real_total) ? 0 : tmp;
d                 111 kernel/delayacct.c 	d->cpu_count += t1;
d                 113 kernel/delayacct.c 	tmp = (s64)d->cpu_delay_total + t2;
d                 114 kernel/delayacct.c 	d->cpu_delay_total = (tmp < (s64)d->cpu_delay_total) ? 0 : tmp;
d                 116 kernel/delayacct.c 	tmp = (s64)d->cpu_run_virtual_total + t3;
d                 117 kernel/delayacct.c 	d->cpu_run_virtual_total =
d                 118 kernel/delayacct.c 		(tmp < (s64)d->cpu_run_virtual_total) ?	0 : tmp;
d                 123 kernel/delayacct.c 	tmp = d->blkio_delay_total + tsk->delays->blkio_delay;
d                 124 kernel/delayacct.c 	d->blkio_delay_total = (tmp < d->blkio_delay_total) ? 0 : tmp;
d                 125 kernel/delayacct.c 	tmp = d->swapin_delay_total + tsk->delays->swapin_delay;
d                 126 kernel/delayacct.c 	d->swapin_delay_total = (tmp < d->swapin_delay_total) ? 0 : tmp;
d                 127 kernel/delayacct.c 	tmp = d->freepages_delay_total + tsk->delays->freepages_delay;
d                 128 kernel/delayacct.c 	d->freepages_delay_total = (tmp < d->freepages_delay_total) ? 0 : tmp;
d                 129 kernel/delayacct.c 	tmp = d->thrashing_delay_total + tsk->delays->thrashing_delay;
d                 130 kernel/delayacct.c 	d->thrashing_delay_total = (tmp < d->thrashing_delay_total) ? 0 : tmp;
d                 131 kernel/delayacct.c 	d->blkio_count += tsk->delays->blkio_count;
d                 132 kernel/delayacct.c 	d->swapin_count += tsk->delays->swapin_count;
d                 133 kernel/delayacct.c 	d->freepages_count += tsk->delays->freepages_count;
d                 134 kernel/delayacct.c 	d->thrashing_count += tsk->delays->thrashing_count;
d                 374 kernel/events/uprobes.c __update_ref_ctr(struct mm_struct *mm, unsigned long vaddr, short d)
d                 382 kernel/events/uprobes.c 	if (!vaddr || !d)
d                 398 kernel/events/uprobes.c 	if (unlikely(*ptr + d < 0)) {
d                 400 kernel/events/uprobes.c 			"curr val: %d, delta: %d\n", vaddr, *ptr, d);
d                 405 kernel/events/uprobes.c 	*ptr += d;
d                 414 kernel/events/uprobes.c 				struct mm_struct *mm, short d)
d                 418 kernel/events/uprobes.c 		d > 0 ? "increment" : "decrement", uprobe->inode->i_ino,
d                 424 kernel/events/uprobes.c 			  short d)
d                 434 kernel/events/uprobes.c 		ret = __update_ref_ctr(mm, rc_vaddr, d);
d                 436 kernel/events/uprobes.c 			update_ref_ctr_warn(uprobe, mm, d);
d                 438 kernel/events/uprobes.c 		if (d > 0)
d                 443 kernel/events/uprobes.c 	if (d > 0)
d                 196 kernel/irq/chip.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                 198 kernel/irq/chip.c 	if (!irqd_affinity_is_managed(d))
d                 201 kernel/irq/chip.c 	irqd_clr_managed_shutdown(d);
d                 224 kernel/irq/chip.c 	if (WARN_ON(irq_domain_activate_irq(d, false)))
d                 238 kernel/irq/chip.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                 242 kernel/irq/chip.c 	WARN_ON_ONCE(!irqd_is_activated(d));
d                 244 kernel/irq/chip.c 	if (d->chip->irq_startup) {
d                 245 kernel/irq/chip.c 		ret = d->chip->irq_startup(d);
d                 257 kernel/irq/chip.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                 258 kernel/irq/chip.c 	struct cpumask *aff = irq_data_get_affinity_mask(d);
d                 263 kernel/irq/chip.c 	if (irqd_is_started(d)) {
d                 272 kernel/irq/chip.c 			irq_do_set_affinity(d, aff, false);
d                 276 kernel/irq/chip.c 			irqd_set_managed_shutdown(d);
d                 288 kernel/irq/chip.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                 290 kernel/irq/chip.c 	if (!irqd_affinity_is_managed(d))
d                 291 kernel/irq/chip.c 		return irq_domain_activate_irq(d, false);
d                  19 kernel/irq/cpuhotplug.c static inline bool irq_needs_fixup(struct irq_data *d)
d                  21 kernel/irq/cpuhotplug.c 	const struct cpumask *m = irq_data_get_effective_affinity_mask(d);
d                  31 kernel/irq/cpuhotplug.c 		m = irq_data_get_affinity_mask(d);
d                  45 kernel/irq/cpuhotplug.c 			cpumask_pr_args(m), d->irq, cpu);
d                  54 kernel/irq/cpuhotplug.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                  55 kernel/irq/cpuhotplug.c 	struct irq_chip *chip = irq_data_get_irq_chip(d);
d                  56 kernel/irq/cpuhotplug.c 	bool maskchip = !irq_can_move_pcntxt(d) && !irqd_irq_masked(d);
d                  67 kernel/irq/cpuhotplug.c 		pr_debug("IRQ %u: Unable to migrate away\n", d->irq);
d                  80 kernel/irq/cpuhotplug.c 	if (irqd_is_per_cpu(d) || !irqd_is_started(d) || !irq_needs_fixup(d)) {
d                 106 kernel/irq/cpuhotplug.c 		affinity = irq_data_get_affinity_mask(d);
d                 110 kernel/irq/cpuhotplug.c 		chip->irq_mask(d);
d                 117 kernel/irq/cpuhotplug.c 		if (irqd_affinity_is_managed(d)) {
d                 118 kernel/irq/cpuhotplug.c 			irqd_set_managed_shutdown(d);
d                 131 kernel/irq/cpuhotplug.c 	err = irq_do_set_affinity(d, affinity, false);
d                 134 kernel/irq/cpuhotplug.c 				    d->irq, err);
d                 139 kernel/irq/cpuhotplug.c 		chip->irq_unmask(d);
d                  25 kernel/irq/generic-chip.c void irq_gc_noop(struct irq_data *d)
d                  36 kernel/irq/generic-chip.c void irq_gc_mask_disable_reg(struct irq_data *d)
d                  38 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  39 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  40 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                  55 kernel/irq/generic-chip.c void irq_gc_mask_set_bit(struct irq_data *d)
d                  57 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  58 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  59 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                  75 kernel/irq/generic-chip.c void irq_gc_mask_clr_bit(struct irq_data *d)
d                  77 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  78 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  79 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                  95 kernel/irq/generic-chip.c void irq_gc_unmask_enable_reg(struct irq_data *d)
d                  97 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                  98 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                  99 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                 111 kernel/irq/generic-chip.c void irq_gc_ack_set_bit(struct irq_data *d)
d                 113 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 114 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 115 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                 127 kernel/irq/generic-chip.c void irq_gc_ack_clr_bit(struct irq_data *d)
d                 129 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 130 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 131 kernel/irq/generic-chip.c 	u32 mask = ~d->mask;
d                 150 kernel/irq/generic-chip.c void irq_gc_mask_disable_and_ack_set(struct irq_data *d)
d                 152 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 153 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 154 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                 167 kernel/irq/generic-chip.c void irq_gc_eoi(struct irq_data *d)
d                 169 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 170 kernel/irq/generic-chip.c 	struct irq_chip_type *ct = irq_data_get_chip_type(d);
d                 171 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                 187 kernel/irq/generic-chip.c int irq_gc_set_wake(struct irq_data *d, unsigned int on)
d                 189 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 190 kernel/irq/generic-chip.c 	u32 mask = d->mask;
d                 282 kernel/irq/generic-chip.c int __irq_alloc_domain_generic_chips(struct irq_domain *d, int irqs_per_chip,
d                 294 kernel/irq/generic-chip.c 	if (d->gc)
d                 297 kernel/irq/generic-chip.c 	numchips = DIV_ROUND_UP(d->revmap_size, irqs_per_chip);
d                 313 kernel/irq/generic-chip.c 	d->gc = dgc;
d                 323 kernel/irq/generic-chip.c 		gc->domain = d;
d                 340 kernel/irq/generic-chip.c __irq_get_domain_generic_chip(struct irq_domain *d, unsigned int hw_irq)
d                 342 kernel/irq/generic-chip.c 	struct irq_domain_chip_generic *dgc = d->gc;
d                 359 kernel/irq/generic-chip.c irq_get_domain_generic_chip(struct irq_domain *d, unsigned int hw_irq)
d                 361 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = __irq_get_domain_generic_chip(d, hw_irq);
d                 377 kernel/irq/generic-chip.c int irq_map_generic_chip(struct irq_domain *d, unsigned int virq,
d                 380 kernel/irq/generic-chip.c 	struct irq_data *data = irq_domain_get_irq_data(d, virq);
d                 381 kernel/irq/generic-chip.c 	struct irq_domain_chip_generic *dgc = d->gc;
d                 388 kernel/irq/generic-chip.c 	gc = __irq_get_domain_generic_chip(d, hw_irq);
d                 422 kernel/irq/generic-chip.c 	irq_domain_set_info(d, virq, hw_irq, chip, gc, ct->handler, NULL, NULL);
d                 427 kernel/irq/generic-chip.c static void irq_unmap_generic_chip(struct irq_domain *d, unsigned int virq)
d                 429 kernel/irq/generic-chip.c 	struct irq_data *data = irq_domain_get_irq_data(d, virq);
d                 430 kernel/irq/generic-chip.c 	struct irq_domain_chip_generic *dgc = d->gc;
d                 435 kernel/irq/generic-chip.c 	gc = irq_get_domain_generic_chip(d, hw_irq);
d                 442 kernel/irq/generic-chip.c 	irq_domain_set_info(d, virq, hw_irq, &no_irq_chip, NULL, NULL, NULL,
d                 489 kernel/irq/generic-chip.c 			struct irq_data *d = irq_get_irq_data(i);
d                 492 kernel/irq/generic-chip.c 				chip->irq_calc_mask(d);
d                 494 kernel/irq/generic-chip.c 				d->mask = 1 << (i - gc->irq_base);
d                 511 kernel/irq/generic-chip.c int irq_setup_alt_chip(struct irq_data *d, unsigned int type)
d                 513 kernel/irq/generic-chip.c 	struct irq_chip_generic *gc = irq_data_get_irq_chip_data(d);
d                 519 kernel/irq/generic-chip.c 			d->chip = &ct->chip;
d                 520 kernel/irq/generic-chip.c 			irq_data_to_desc(d)->handle_irq = ct->handler;
d                 193 kernel/irq/internals.h #define __irqd_to_state(d) ACCESS_PRIVATE((d)->common, state_use_accessors)
d                 195 kernel/irq/internals.h static inline unsigned int irqd_get(struct irq_data *d)
d                 197 kernel/irq/internals.h 	return __irqd_to_state(d);
d                 203 kernel/irq/internals.h static inline void irqd_set_move_pending(struct irq_data *d)
d                 205 kernel/irq/internals.h 	__irqd_to_state(d) |= IRQD_SETAFFINITY_PENDING;
d                 208 kernel/irq/internals.h static inline void irqd_clr_move_pending(struct irq_data *d)
d                 210 kernel/irq/internals.h 	__irqd_to_state(d) &= ~IRQD_SETAFFINITY_PENDING;
d                 213 kernel/irq/internals.h static inline void irqd_set_managed_shutdown(struct irq_data *d)
d                 215 kernel/irq/internals.h 	__irqd_to_state(d) |= IRQD_MANAGED_SHUTDOWN;
d                 218 kernel/irq/internals.h static inline void irqd_clr_managed_shutdown(struct irq_data *d)
d                 220 kernel/irq/internals.h 	__irqd_to_state(d) &= ~IRQD_MANAGED_SHUTDOWN;
d                 223 kernel/irq/internals.h static inline void irqd_clear(struct irq_data *d, unsigned int mask)
d                 225 kernel/irq/internals.h 	__irqd_to_state(d) &= ~mask;
d                 228 kernel/irq/internals.h static inline void irqd_set(struct irq_data *d, unsigned int mask)
d                 230 kernel/irq/internals.h 	__irqd_to_state(d) |= mask;
d                 233 kernel/irq/internals.h static inline bool irqd_has_set(struct irq_data *d, unsigned int mask)
d                 235 kernel/irq/internals.h 	return __irqd_to_state(d) & mask;
d                 486 kernel/irq/internals.h static inline void irq_add_debugfs_entry(unsigned int irq, struct irq_desc *d)
d                 489 kernel/irq/internals.h static inline void irq_remove_debugfs_entry(struct irq_desc *d)
d                  38 kernel/irq/irqdomain.c static void debugfs_add_domain_dir(struct irq_domain *d);
d                  39 kernel/irq/irqdomain.c static void debugfs_remove_domain_dir(struct irq_domain *d);
d                  41 kernel/irq/irqdomain.c static inline void debugfs_add_domain_dir(struct irq_domain *d) { }
d                  42 kernel/irq/irqdomain.c static inline void debugfs_remove_domain_dir(struct irq_domain *d) { }
d                 730 kernel/irq/irqdomain.c static int irq_domain_translate(struct irq_domain *d,
d                 735 kernel/irq/irqdomain.c 	if (d->ops->translate)
d                 736 kernel/irq/irqdomain.c 		return d->ops->translate(d, fwspec, hwirq, type);
d                 738 kernel/irq/irqdomain.c 	if (d->ops->xlate)
d                 739 kernel/irq/irqdomain.c 		return d->ops->xlate(d, to_of_node(fwspec->fwnode),
d                 927 kernel/irq/irqdomain.c int irq_domain_xlate_onecell(struct irq_domain *d, struct device_node *ctrlr,
d                 946 kernel/irq/irqdomain.c int irq_domain_xlate_twocell(struct irq_domain *d, struct device_node *ctrlr,
d                 953 kernel/irq/irqdomain.c 	return irq_domain_translate_twocell(d, &fwspec, out_hwirq, out_type);
d                 968 kernel/irq/irqdomain.c int irq_domain_xlate_onetwocell(struct irq_domain *d,
d                 997 kernel/irq/irqdomain.c int irq_domain_translate_twocell(struct irq_domain *d,
d                1376 kernel/irq/irqdomain.c static void irq_domain_fix_revmap(struct irq_data *d)
d                1380 kernel/irq/irqdomain.c 	if (d->hwirq < d->domain->revmap_size)
d                1384 kernel/irq/irqdomain.c 	mutex_lock(&d->domain->revmap_tree_mutex);
d                1385 kernel/irq/irqdomain.c 	slot = radix_tree_lookup_slot(&d->domain->revmap_tree, d->hwirq);
d                1387 kernel/irq/irqdomain.c 		radix_tree_replace_slot(&d->domain->revmap_tree, slot, d);
d                1388 kernel/irq/irqdomain.c 	mutex_unlock(&d->domain->revmap_tree_mutex);
d                1741 kernel/irq/irqdomain.c irq_domain_debug_show_one(struct seq_file *m, struct irq_domain *d, int ind)
d                1743 kernel/irq/irqdomain.c 	seq_printf(m, "%*sname:   %s\n", ind, "", d->name);
d                1745 kernel/irq/irqdomain.c 		   d->revmap_size + d->revmap_direct_max_irq);
d                1746 kernel/irq/irqdomain.c 	seq_printf(m, "%*smapped: %u\n", ind + 1, "", d->mapcount);
d                1747 kernel/irq/irqdomain.c 	seq_printf(m, "%*sflags:  0x%08x\n", ind +1 , "", d->flags);
d                1748 kernel/irq/irqdomain.c 	if (d->ops && d->ops->debug_show)
d                1749 kernel/irq/irqdomain.c 		d->ops->debug_show(m, d, NULL, ind + 1);
d                1751 kernel/irq/irqdomain.c 	if (!d->parent)
d                1753 kernel/irq/irqdomain.c 	seq_printf(m, "%*sparent: %s\n", ind + 1, "", d->parent->name);
d                1754 kernel/irq/irqdomain.c 	irq_domain_debug_show_one(m, d->parent, ind + 4);
d                1760 kernel/irq/irqdomain.c 	struct irq_domain *d = m->private;
d                1763 kernel/irq/irqdomain.c 	if (!d) {
d                1766 kernel/irq/irqdomain.c 		d = irq_default_domain;
d                1768 kernel/irq/irqdomain.c 	irq_domain_debug_show_one(m, d, 0);
d                1773 kernel/irq/irqdomain.c static void debugfs_add_domain_dir(struct irq_domain *d)
d                1775 kernel/irq/irqdomain.c 	if (!d->name || !domain_dir || d->debugfs_file)
d                1777 kernel/irq/irqdomain.c 	d->debugfs_file = debugfs_create_file(d->name, 0444, domain_dir, d,
d                1781 kernel/irq/irqdomain.c static void debugfs_remove_domain_dir(struct irq_domain *d)
d                1783 kernel/irq/irqdomain.c 	debugfs_remove(d->debugfs_file);
d                1784 kernel/irq/irqdomain.c 	d->debugfs_file = NULL;
d                1789 kernel/irq/irqdomain.c 	struct irq_domain *d;
d                1796 kernel/irq/irqdomain.c 	list_for_each_entry(d, &irq_domain_list, link)
d                1797 kernel/irq/irqdomain.c 		debugfs_add_domain_dir(d);
d                1176 kernel/irq/manage.c 	struct irq_data *d = &desc->irq_data;
d                1177 kernel/irq/manage.c 	struct irq_chip *c = d->chip;
d                1179 kernel/irq/manage.c 	return c->irq_request_resources ? c->irq_request_resources(d) : 0;
d                1184 kernel/irq/manage.c 	struct irq_data *d = &desc->irq_data;
d                1185 kernel/irq/manage.c 	struct irq_chip *c = d->chip;
d                1188 kernel/irq/manage.c 		c->irq_release_resources(d);
d                1193 kernel/irq/manage.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                1197 kernel/irq/manage.c 	if (d->parent_data)
d                1201 kernel/irq/manage.c 	if (d->chip->irq_bus_lock || d->chip->irq_bus_sync_unlock)
d                1204 kernel/irq/manage.c 	return d->chip->flags & IRQCHIP_SUPPORTS_NMI;
d                1209 kernel/irq/manage.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                1210 kernel/irq/manage.c 	struct irq_chip *c = d->chip;
d                1212 kernel/irq/manage.c 	return c->irq_nmi_setup ? c->irq_nmi_setup(d) : -EINVAL;
d                1217 kernel/irq/manage.c 	struct irq_data *d = irq_desc_get_irq_data(desc);
d                1218 kernel/irq/manage.c 	struct irq_chip *c = d->chip;
d                1221 kernel/irq/manage.c 		c->irq_nmi_teardown(d);
d                  22 kernel/locking/rtmutex.h #define debug_rt_mutex_deadlock(d, a ,l)		do { } while (0)
d                  32 kernel/power/energy_model.c 	struct dentry *d;
d                  38 kernel/power/energy_model.c 	d = debugfs_create_dir(name, pd);
d                  39 kernel/power/energy_model.c 	debugfs_create_ulong("frequency", 0444, d, &cs->frequency);
d                  40 kernel/power/energy_model.c 	debugfs_create_ulong("power", 0444, d, &cs->power);
d                  41 kernel/power/energy_model.c 	debugfs_create_ulong("cost", 0444, d, &cs->cost);
d                  54 kernel/power/energy_model.c 	struct dentry *d;
d                  61 kernel/power/energy_model.c 	d = debugfs_create_dir(name, rootdir);
d                  63 kernel/power/energy_model.c 	debugfs_create_file("cpus", 0444, d, pd->cpus, &em_debug_cpus_fops);
d                  67 kernel/power/energy_model.c 		em_debug_create_cs(&pd->table[i], d);
d                 532 kernel/power/qos.c static int register_pm_qos_misc(struct pm_qos_object *qos, struct dentry *d)
d                 538 kernel/power/qos.c 	debugfs_create_file(qos->name, S_IRUGO, d, (void *)qos,
d                 634 kernel/power/qos.c 	struct dentry *d;
d                 638 kernel/power/qos.c 	d = debugfs_create_dir("pm_qos", NULL);
d                 641 kernel/power/qos.c 		ret = register_pm_qos_misc(pm_qos_array[i], d);
d                 593 kernel/power/swap.c 	struct crc_data *d = data;
d                 597 kernel/power/swap.c 		wait_event(d->go, atomic_read(&d->ready) ||
d                 600 kernel/power/swap.c 			d->thr = NULL;
d                 601 kernel/power/swap.c 			atomic_set(&d->stop, 1);
d                 602 kernel/power/swap.c 			wake_up(&d->done);
d                 605 kernel/power/swap.c 		atomic_set(&d->ready, 0);
d                 607 kernel/power/swap.c 		for (i = 0; i < d->run_threads; i++)
d                 608 kernel/power/swap.c 			*d->crc32 = crc32_le(*d->crc32,
d                 609 kernel/power/swap.c 			                     d->unc[i], *d->unc_len[i]);
d                 610 kernel/power/swap.c 		atomic_set(&d->stop, 1);
d                 611 kernel/power/swap.c 		wake_up(&d->done);
d                 637 kernel/power/swap.c 	struct cmp_data *d = data;
d                 640 kernel/power/swap.c 		wait_event(d->go, atomic_read(&d->ready) ||
d                 643 kernel/power/swap.c 			d->thr = NULL;
d                 644 kernel/power/swap.c 			d->ret = -1;
d                 645 kernel/power/swap.c 			atomic_set(&d->stop, 1);
d                 646 kernel/power/swap.c 			wake_up(&d->done);
d                 649 kernel/power/swap.c 		atomic_set(&d->ready, 0);
d                 651 kernel/power/swap.c 		d->ret = lzo1x_1_compress(d->unc, d->unc_len,
d                 652 kernel/power/swap.c 		                          d->cmp + LZO_HEADER, &d->cmp_len,
d                 653 kernel/power/swap.c 		                          d->wrk);
d                 654 kernel/power/swap.c 		atomic_set(&d->stop, 1);
d                 655 kernel/power/swap.c 		wake_up(&d->done);
d                1121 kernel/power/swap.c 	struct dec_data *d = data;
d                1124 kernel/power/swap.c 		wait_event(d->go, atomic_read(&d->ready) ||
d                1127 kernel/power/swap.c 			d->thr = NULL;
d                1128 kernel/power/swap.c 			d->ret = -1;
d                1129 kernel/power/swap.c 			atomic_set(&d->stop, 1);
d                1130 kernel/power/swap.c 			wake_up(&d->done);
d                1133 kernel/power/swap.c 		atomic_set(&d->ready, 0);
d                1135 kernel/power/swap.c 		d->unc_len = LZO_UNC_SIZE;
d                1136 kernel/power/swap.c 		d->ret = lzo1x_decompress_safe(d->cmp + LZO_HEADER, d->cmp_len,
d                1137 kernel/power/swap.c 		                               d->unc, &d->unc_len);
d                1139 kernel/power/swap.c 			flush_icache_range((unsigned long)d->unc,
d                1140 kernel/power/swap.c 					   (unsigned long)d->unc + d->unc_len);
d                1142 kernel/power/swap.c 		atomic_set(&d->stop, 1);
d                1143 kernel/power/swap.c 		wake_up(&d->done);
d                3424 kernel/rcu/tree.c 	ulong d;
d                3435 kernel/rcu/tree.c 	d = RCU_JIFFIES_TILL_FORCE_QS + nr_cpu_ids / RCU_JIFFIES_FQS_DIV;
d                3437 kernel/rcu/tree.c 		jiffies_till_first_fqs = d;
d                3439 kernel/rcu/tree.c 		jiffies_till_next_fqs = d;
d                7540 kernel/sched/core.c 			       struct cfs_schedulable_data *d)
d                7544 kernel/sched/core.c 	if (tg == d->tg) {
d                7545 kernel/sched/core.c 		period = d->period;
d                7546 kernel/sched/core.c 		quota = d->quota;
d                7561 kernel/sched/core.c 	struct cfs_schedulable_data *d = data;
d                7570 kernel/sched/core.c 		quota = normalize_cfs_quota(tg, d);
d                3866 kernel/sched/fair.c 	s64 d = se->vruntime - cfs_rq->min_vruntime;
d                3868 kernel/sched/fair.c 	if (d < 0)
d                3869 kernel/sched/fair.c 		d = -d;
d                3871 kernel/sched/fair.c 	if (d > 3*sysctl_sched_latency)
d                2429 kernel/sched/rt.c 	struct rt_schedulable_data *d = data;
d                2437 kernel/sched/rt.c 	if (tg == d->tg) {
d                2438 kernel/sched/rt.c 		period = d->rt_period;
d                2439 kernel/sched/rt.c 		runtime = d->rt_runtime;
d                2469 kernel/sched/rt.c 		if (child == d->tg) {
d                2470 kernel/sched/rt.c 			period = d->rt_period;
d                2471 kernel/sched/rt.c 			runtime = d->rt_runtime;
d                1220 kernel/sched/topology.c static void __free_domain_allocs(struct s_data *d, enum s_alloc what,
d                1225 kernel/sched/topology.c 		if (!atomic_read(&d->rd->refcount))
d                1226 kernel/sched/topology.c 			free_rootdomain(&d->rd->rcu);
d                1229 kernel/sched/topology.c 		free_percpu(d->sd);
d                1240 kernel/sched/topology.c __visit_domain_allocation_hell(struct s_data *d, const struct cpumask *cpu_map)
d                1242 kernel/sched/topology.c 	memset(d, 0, sizeof(*d));
d                1246 kernel/sched/topology.c 	d->sd = alloc_percpu(struct sched_domain *);
d                1247 kernel/sched/topology.c 	if (!d->sd)
d                1249 kernel/sched/topology.c 	d->rd = alloc_rootdomain();
d                1250 kernel/sched/topology.c 	if (!d->rd)
d                1989 kernel/sched/topology.c 	struct s_data d;
d                1998 kernel/sched/topology.c 	alloc_state = __visit_domain_allocation_hell(&d, cpu_map);
d                2023 kernel/sched/topology.c 				*per_cpu_ptr(d.sd, i) = sd;
d                2033 kernel/sched/topology.c 		for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) {
d                2050 kernel/sched/topology.c 		for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) {
d                2060 kernel/sched/topology.c 		sd = *per_cpu_ptr(d.sd, i);
d                2063 kernel/sched/topology.c 		if (rq->cpu_capacity_orig > READ_ONCE(d.rd->max_cpu_capacity))
d                2064 kernel/sched/topology.c 			WRITE_ONCE(d.rd->max_cpu_capacity, rq->cpu_capacity_orig);
d                2066 kernel/sched/topology.c 		cpu_attach_domain(sd, d.rd, i);
d                2080 kernel/sched/topology.c 	__free_domain_allocs(&d, alloc_state, cpu_map);
d                  79 kernel/time/tick-internal.h static inline void tick_do_periodic_broadcast(struct clock_event_device *d) { }
d                1092 kernel/trace/trace.h static inline void ftrace_init_tracefs(struct trace_array *tr, struct dentry *d) { }
d                1093 kernel/trace/trace.h static inline void ftrace_init_tracefs_toplevel(struct trace_array *tr, struct dentry *d) { }
d                2107 kernel/trace/trace_events_filter.c 	.rec    = { .a = va, .b = vb, .c = vc, .d = vd, \
d                2221 kernel/trace/trace_events_filter.c 		struct test_filter_data_t *d = &test_filter_data[i];
d                2225 kernel/trace/trace_events_filter.c 				    d->filter, false, &filter);
d                2229 kernel/trace/trace_events_filter.c 			       d->filter, err);
d                2241 kernel/trace/trace_events_filter.c 		if (*d->not_visited)
d                2242 kernel/trace/trace_events_filter.c 			update_pred_fn(filter, d->not_visited);
d                2245 kernel/trace/trace_events_filter.c 		err = filter_match_preds(filter, &d->rec);
d                2255 kernel/trace/trace_events_filter.c 			       d->filter);
d                2259 kernel/trace/trace_events_filter.c 		if (err != d->match) {
d                2262 kernel/trace/trace_events_filter.c 			       d->filter, d->match);
d                  12 kernel/trace/trace_events_filter_test.h 	TP_PROTO(int a, int b, int c, int d, int e, int f, int g, int h),
d                  14 kernel/trace/trace_events_filter_test.h 	TP_ARGS(a, b, c, d, e, f, g, h),
d                  20 kernel/trace/trace_events_filter_test.h 		__field(int, d)
d                  31 kernel/trace/trace_events_filter_test.h 		__entry->d = d;
d                  39 kernel/trace/trace_events_filter_test.h 		  __entry->a, __entry->b, __entry->c, __entry->d,
d                 507 kernel/trace/trace_output.c char trace_find_mark(unsigned long long d)
d                 513 kernel/trace/trace_output.c 		if (d > mark[i].val)
d                 139 lib/842/842_compress.c #define replace_hash(p, b, i, d)	do {				\
d                 140 lib/842/842_compress.c 	struct sw842_hlist_node##b *_n = &(p)->node##b[(i)+(d)];	\
d                 142 lib/842/842_compress.c 	_n->data = (p)->data##b[d];					\
d                 152 lib/842/842_compress.c static int add_bits(struct sw842_param *p, u64 d, u8 n);
d                 154 lib/842/842_compress.c static int __split_add_bits(struct sw842_param *p, u64 d, u8 n, u8 s)
d                 161 lib/842/842_compress.c 	ret = add_bits(p, d >> s, n - s);
d                 164 lib/842/842_compress.c 	return add_bits(p, d & GENMASK_ULL(s - 1, 0), s);
d                 167 lib/842/842_compress.c static int add_bits(struct sw842_param *p, u64 d, u8 n)
d                 173 lib/842/842_compress.c 	pr_debug("add %u bits %lx\n", (unsigned char)n, (unsigned long)d);
d                 182 lib/842/842_compress.c 		return __split_add_bits(p, d, n, 32);
d                 184 lib/842/842_compress.c 		return __split_add_bits(p, d, n, 16);
d                 186 lib/842/842_compress.c 		return __split_add_bits(p, d, n, 8);
d                 192 lib/842/842_compress.c 	d <<= s;
d                 195 lib/842/842_compress.c 		*out = o | d;
d                 197 lib/842/842_compress.c 		put_unaligned(cpu_to_be16(o << 8 | d), (__be16 *)out);
d                 199 lib/842/842_compress.c 		put_unaligned(cpu_to_be32(o << 24 | d << 8), (__be32 *)out);
d                 201 lib/842/842_compress.c 		put_unaligned(cpu_to_be32(o << 24 | d), (__be32 *)out);
d                 203 lib/842/842_compress.c 		put_unaligned(cpu_to_be64(o << 56 | d << 24), (__be64 *)out);
d                 205 lib/842/842_compress.c 		put_unaligned(cpu_to_be64(o << 56 | d << 16), (__be64 *)out);
d                 207 lib/842/842_compress.c 		put_unaligned(cpu_to_be64(o << 56 | d << 8), (__be64 *)out);
d                 209 lib/842/842_compress.c 		put_unaligned(cpu_to_be64(o << 56 | d), (__be64 *)out);
d                  59 lib/842/842_decompress.c #define beN_to_cpu(d, s)					\
d                  60 lib/842/842_decompress.c 	((s) == 2 ? be16_to_cpu(get_unaligned((__be16 *)d)) :	\
d                  61 lib/842/842_decompress.c 	 (s) == 4 ? be32_to_cpu(get_unaligned((__be32 *)d)) :	\
d                  62 lib/842/842_decompress.c 	 (s) == 8 ? be64_to_cpu(get_unaligned((__be64 *)d)) :	\
d                  65 lib/842/842_decompress.c static int next_bits(struct sw842_param *p, u64 *d, u8 n);
d                  67 lib/842/842_decompress.c static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s)
d                  80 lib/842/842_decompress.c 	ret = next_bits(p, d, s);
d                  83 lib/842/842_decompress.c 	*d |= tmp << s;
d                  87 lib/842/842_decompress.c static int next_bits(struct sw842_param *p, u64 *d, u8 n)
d                 100 lib/842/842_decompress.c 		return __split_next_bits(p, d, n, 32);
d                 102 lib/842/842_decompress.c 		return __split_next_bits(p, d, n, 16);
d                 104 lib/842/842_decompress.c 		return __split_next_bits(p, d, n, 8);
d                 110 lib/842/842_decompress.c 		*d = *in >> (8 - bits);
d                 112 lib/842/842_decompress.c 		*d = be16_to_cpu(get_unaligned((__be16 *)in)) >> (16 - bits);
d                 114 lib/842/842_decompress.c 		*d = be32_to_cpu(get_unaligned((__be32 *)in)) >> (32 - bits);
d                 116 lib/842/842_decompress.c 		*d = be64_to_cpu(get_unaligned((__be64 *)in)) >> (64 - bits);
d                 118 lib/842/842_decompress.c 	*d &= GENMASK_ULL(n - 1, 0);
d                 391 lib/bch.c      	unsigned int i, j, tmp, l, pd = 1, d = syn[0];
d                 407 lib/bch.c      		if (d) {
d                 411 lib/bch.c      			tmp = a_log(bch, d)+n-a_log(bch, pd);
d                 423 lib/bch.c      				pd = d;
d                 429 lib/bch.c      			d = syn[2*i+2];
d                 431 lib/bch.c      				d ^= gf_mul(bch, elp->c[j], syn[2*i+2-j]);
d                 649 lib/bch.c      	unsigned int a, b, c, d, e = 0, f, a2, b2, c2, e4;
d                 656 lib/bch.c      	d = gf_div(bch, poly->c[0], e4);
d                 677 lib/bch.c      			d = a_pow(bch, 2*l)^gf_mul(bch, b, f)^d;
d                 681 lib/bch.c      		if (d == 0)
d                 685 lib/bch.c      		c2 = gf_inv(bch, d);
d                 686 lib/bch.c      		b2 = gf_div(bch, a, d);
d                 687 lib/bch.c      		a2 = gf_div(bch, b, d);
d                 690 lib/bch.c      		c2 = d;
d                 712 lib/bch.c      	int i, d = a->deg, l = GF_N(bch)-a_log(bch, a->c[a->deg]);
d                 715 lib/bch.c      	for (i = 0; i < d; i++)
d                 727 lib/bch.c      	const unsigned int d = b->deg;
d                 729 lib/bch.c      	if (a->deg < d)
d                 738 lib/bch.c      	for (j = a->deg; j >= d; j--) {
d                 741 lib/bch.c      			p = j-d;
d                 742 lib/bch.c      			for (i = 0; i < d; i++, p++) {
d                 750 lib/bch.c      	a->deg = d-1;
d                1091 lib/bch.c      	int i, j, b, d;
d                1106 lib/bch.c      				d = deg(data);
d                1108 lib/bch.c      				data ^= g[0] >> (31-d);
d                1110 lib/bch.c      					hi = (d < 31) ? g[j] << (d+1) : 0;
d                1112 lib/bch.c      						g[j+1] >> (31-d) : 0;
d                 582 lib/crypto/des.c #define ROUND(L, R, A, B, K, d)					\
d                 583 lib/crypto/des.c 	B = K[0];			A = K[1];	K += d;	\
d                 609 lib/crypto/des.c #define DES_PC2(a, b, c, d) (T4(d) | T3(c) | T2(b) | T1(a))
d                 626 lib/crypto/des.c 	unsigned long a, b, c, d, w;
d                 629 lib/crypto/des.c 	d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d];
d                 634 lib/crypto/des.c 	pe[15 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d];
d                 635 lib/crypto/des.c 	pe[14 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 636 lib/crypto/des.c 	pe[13 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 637 lib/crypto/des.c 	pe[12 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 638 lib/crypto/des.c 	pe[11 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 639 lib/crypto/des.c 	pe[10 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 640 lib/crypto/des.c 	pe[ 9 * 2 + 0] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 641 lib/crypto/des.c 	pe[ 8 * 2 + 0] = DES_PC2(d, a, b, c); c = rs[c];
d                 642 lib/crypto/des.c 	pe[ 7 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 643 lib/crypto/des.c 	pe[ 6 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 644 lib/crypto/des.c 	pe[ 5 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 645 lib/crypto/des.c 	pe[ 4 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 646 lib/crypto/des.c 	pe[ 3 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 647 lib/crypto/des.c 	pe[ 2 * 2 + 0] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 648 lib/crypto/des.c 	pe[ 1 * 2 + 0] = DES_PC2(c, d, a, b); b = rs[b];
d                 649 lib/crypto/des.c 	pe[ 0 * 2 + 0] = DES_PC2(b, c, d, a);
d                 652 lib/crypto/des.c 	w  = (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
d                 657 lib/crypto/des.c 	d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1];
d                 663 lib/crypto/des.c 	w |= (a ^ c) | (b ^ d) | (rs[a] ^ c) | (b ^ rs[d]);
d                 665 lib/crypto/des.c 	pe[15 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
d                 666 lib/crypto/des.c 	pe[14 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 667 lib/crypto/des.c 	pe[13 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 668 lib/crypto/des.c 	pe[12 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 669 lib/crypto/des.c 	pe[11 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 670 lib/crypto/des.c 	pe[10 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 671 lib/crypto/des.c 	pe[ 9 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 672 lib/crypto/des.c 	pe[ 8 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
d                 673 lib/crypto/des.c 	pe[ 7 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 674 lib/crypto/des.c 	pe[ 6 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 675 lib/crypto/des.c 	pe[ 5 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 676 lib/crypto/des.c 	pe[ 4 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 677 lib/crypto/des.c 	pe[ 3 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 678 lib/crypto/des.c 	pe[ 2 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 679 lib/crypto/des.c 	pe[ 1 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
d                 680 lib/crypto/des.c 	pe[ 0 * 2 + 1] = DES_PC2(b, c, d, a);
d                 683 lib/crypto/des.c 	for (d = 0; d < 16; ++d) {
d                 684 lib/crypto/des.c 		a = pe[2 * d];
d                 685 lib/crypto/des.c 		b = pe[2 * d + 1];
d                 691 lib/crypto/des.c 		pe[2 * d] = a;
d                 692 lib/crypto/des.c 		pe[2 * d + 1] = b;
d                 717 lib/crypto/des.c 	unsigned long a, b, c, d;
d                 720 lib/crypto/des.c 	d = k[4]; d &= 0x0e; d <<= 4; d |= k[0] & 0x1e; d = pc1[d];
d                 725 lib/crypto/des.c 	pe[ 0 * 2] = DES_PC2(a, b, c, d); d = rs[d];
d                 726 lib/crypto/des.c 	pe[ 1 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 727 lib/crypto/des.c 	pe[ 2 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 728 lib/crypto/des.c 	pe[ 3 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 729 lib/crypto/des.c 	pe[ 4 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 730 lib/crypto/des.c 	pe[ 5 * 2] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 731 lib/crypto/des.c 	pe[ 6 * 2] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 732 lib/crypto/des.c 	pe[ 7 * 2] = DES_PC2(d, a, b, c); c = rs[c];
d                 733 lib/crypto/des.c 	pe[ 8 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 734 lib/crypto/des.c 	pe[ 9 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 735 lib/crypto/des.c 	pe[10 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 736 lib/crypto/des.c 	pe[11 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 737 lib/crypto/des.c 	pe[12 * 2] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 738 lib/crypto/des.c 	pe[13 * 2] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 739 lib/crypto/des.c 	pe[14 * 2] = DES_PC2(c, d, a, b); b = rs[b];
d                 740 lib/crypto/des.c 	pe[15 * 2] = DES_PC2(b, c, d, a);
d                 745 lib/crypto/des.c 	d = k[0]; d &= 0xe0; d >>= 4; d |= k[4] & 0xf0; d = pc1[d + 1];
d                 750 lib/crypto/des.c 	pe[ 0 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d];
d                 751 lib/crypto/des.c 	pe[ 1 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 752 lib/crypto/des.c 	pe[ 2 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 753 lib/crypto/des.c 	pe[ 3 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 754 lib/crypto/des.c 	pe[ 4 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 755 lib/crypto/des.c 	pe[ 5 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c]; b = rs[b];
d                 756 lib/crypto/des.c 	pe[ 6 * 2 + 1] = DES_PC2(b, c, d, a); a = rs[a]; d = rs[d];
d                 757 lib/crypto/des.c 	pe[ 7 * 2 + 1] = DES_PC2(d, a, b, c); c = rs[c];
d                 758 lib/crypto/des.c 	pe[ 8 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 759 lib/crypto/des.c 	pe[ 9 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 760 lib/crypto/des.c 	pe[10 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 761 lib/crypto/des.c 	pe[11 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 762 lib/crypto/des.c 	pe[12 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b]; a = rs[a];
d                 763 lib/crypto/des.c 	pe[13 * 2 + 1] = DES_PC2(a, b, c, d); d = rs[d]; c = rs[c];
d                 764 lib/crypto/des.c 	pe[14 * 2 + 1] = DES_PC2(c, d, a, b); b = rs[b];
d                 765 lib/crypto/des.c 	pe[15 * 2 + 1] = DES_PC2(b, c, d, a);
d                 768 lib/crypto/des.c 	for (d = 0; d < 16; ++d) {
d                 769 lib/crypto/des.c 		a = pe[2 * d];
d                 770 lib/crypto/des.c 		b = pe[2 * d + 1];
d                 776 lib/crypto/des.c 		pe[2 * d] = a;
d                 777 lib/crypto/des.c 		pe[2 * d + 1] = b;
d                  48 lib/crypto/sha256.c 	u32 a, b, c, d, e, f, g, h, t1, t2;
d                  61 lib/crypto/sha256.c 	a = state[0];  b = state[1];  c = state[2];  d = state[3];
d                  66 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                  67 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0x71374491 + W[1];
d                  69 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0xb5c0fbcf + W[2];
d                  71 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0xe9b5dba5 + W[3];
d                  73 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x3956c25b + W[4];
d                  74 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                  76 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                  78 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                  80 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                  83 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                  84 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0x12835b01 + W[9];
d                  86 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0x243185be + W[10];
d                  88 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0x550c7dc3 + W[11];
d                  90 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x72be5d74 + W[12];
d                  91 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                  93 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                  95 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                  97 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 100 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 101 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0xefbe4786 + W[17];
d                 103 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0x0fc19dc6 + W[18];
d                 105 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0x240ca1cc + W[19];
d                 107 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x2de92c6f + W[20];
d                 108 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 110 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 112 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 114 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 117 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 118 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0xa831c66d + W[25];
d                 120 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0xb00327c8 + W[26];
d                 122 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0xbf597fc7 + W[27];
d                 124 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0xc6e00bf3 + W[28];
d                 125 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 127 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 129 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 131 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 134 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 135 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0x2e1b2138 + W[33];
d                 137 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0x4d2c6dfc + W[34];
d                 139 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0x53380d13 + W[35];
d                 141 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x650a7354 + W[36];
d                 142 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 144 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 146 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 148 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 151 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 152 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0xa81a664b + W[41];
d                 154 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0xc24b8b70 + W[42];
d                 156 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0xc76c51a3 + W[43];
d                 158 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0xd192e819 + W[44];
d                 159 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 161 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 163 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 165 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 168 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 169 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0x1e376c08 + W[49];
d                 171 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0x2748774c + W[50];
d                 173 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0x34b0bcb5 + W[51];
d                 175 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x391c0cb3 + W[52];
d                 176 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 178 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 180 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 182 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 185 lib/crypto/sha256.c 	t2 = e0(a) + Maj(a, b, c);    d += t1;    h = t1 + t2;
d                 186 lib/crypto/sha256.c 	t1 = g + e1(d) + Ch(d, e, f) + 0x78a5636f + W[57];
d                 188 lib/crypto/sha256.c 	t1 = f + e1(c) + Ch(c, d, e) + 0x84c87814 + W[58];
d                 190 lib/crypto/sha256.c 	t1 = e + e1(b) + Ch(b, c, d) + 0x8cc70208 + W[59];
d                 192 lib/crypto/sha256.c 	t1 = d + e1(a) + Ch(a, b, c) + 0x90befffa + W[60];
d                 193 lib/crypto/sha256.c 	t2 = e0(e) + Maj(e, f, g);    h += t1;    d = t1 + t2;
d                 195 lib/crypto/sha256.c 	t2 = e0(d) + Maj(d, e, f);    g += t1;    c = t1 + t2;
d                 197 lib/crypto/sha256.c 	t2 = e0(c) + Maj(c, d, e);    f += t1;    b = t1 + t2;
d                 199 lib/crypto/sha256.c 	t2 = e0(b) + Maj(b, c, d);    e += t1;    a = t1 + t2;
d                 201 lib/crypto/sha256.c 	state[0] += a; state[1] += b; state[2] += c; state[3] += d;
d                 205 lib/crypto/sha256.c 	a = b = c = d = e = f = g = h = t1 = t2 = 0;
d                 206 lib/decompress_unxz.c 	uint8_t *d = dest;
d                 210 lib/decompress_unxz.c 	if (d < s) {
d                 212 lib/decompress_unxz.c 			d[i] = s[i];
d                 213 lib/decompress_unxz.c 	} else if (d > s) {
d                 216 lib/decompress_unxz.c 			d[i] = s[i];
d                  57 lib/glob.c     		unsigned char d = *pat++;
d                  59 lib/glob.c     		switch (d) {
d                 104 lib/glob.c     			d = *pat++;
d                 108 lib/glob.c     			if (c == d) {
d                 109 lib/glob.c     				if (d == '\0')
d                 326 lib/inflate.c  	const ush *d,           /* list of base values for non-simple codes */
d                 530 lib/inflate.c          r.v.n = d[*p++ - s];
d                 599 lib/inflate.c    unsigned n, d;        /* length and index for copy */
d                 660 lib/inflate.c        d = w - t->v.n - ((unsigned)b & mask_bits[e]);
d                 662 lib/inflate.c        Tracevv((stderr,"\\[%d,%d]", w-d, n));
d                 666 lib/inflate.c          n -= (e = (e = WSIZE - ((d &= WSIZE-1) > w ? d : w)) > n ? n : e);
d                 668 lib/inflate.c          if (w - d >= e)         /* (this test assumes unsigned comparison) */
d                 670 lib/inflate.c            memcpy(slide + w, slide + d, e);
d                 672 lib/inflate.c            d += e;
d                 677 lib/inflate.c              slide[w++] = slide[d++];
d                 162 lib/lz4/lz4defs.h 	BYTE *d = (BYTE *)dstPtr;
d                 167 lib/lz4/lz4defs.h 		LZ4_copy8(d, s);
d                 168 lib/lz4/lz4defs.h 		d += 8;
d                 170 lib/lz4/lz4defs.h 	} while (d < e);
d                  33 lib/math/div64.c 	uint64_t res, d = 1;
d                  46 lib/math/div64.c 		d = d+d;
d                  52 lib/math/div64.c 			res += d;
d                  55 lib/math/div64.c 		d >>= 1;
d                  56 lib/math/div64.c 	} while (d);
d                  36 lib/math/rational.c 	unsigned long n, d, n0, d0, n1, d1;
d                  38 lib/math/rational.c 	d = given_denominator;
d                  48 lib/math/rational.c 		if (d == 0)
d                  50 lib/math/rational.c 		t = d;
d                  51 lib/math/rational.c 		a = n / d;
d                  52 lib/math/rational.c 		d = n % d;
d                  13 lib/math/reciprocal_div.c struct reciprocal_value reciprocal_value(u32 d)
d                  19 lib/math/reciprocal_div.c 	l = fls(d - 1);
d                  20 lib/math/reciprocal_div.c 	m = ((1ULL << 32) * ((1ULL << l) - d));
d                  21 lib/math/reciprocal_div.c 	do_div(m, d);
d                  31 lib/math/reciprocal_div.c struct reciprocal_value_adv reciprocal_value_adv(u32 d, u8 prec)
d                  38 lib/math/reciprocal_div.c 	l = fls(d - 1);
d                  45 lib/math/reciprocal_div.c 	     d, __func__);
d                  48 lib/math/reciprocal_div.c 	do_div(mlow, d);
d                  50 lib/math/reciprocal_div.c 	do_div(mhigh, d);
d                 144 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 150 lib/mpi/longlong.h 		"r" ((USItype)(d)))
d                 162 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 164 lib/mpi/longlong.h 	(q) = __udiv_qrnnd(&__r, (n1), (n0), (d)); \
d                 287 lib/mpi/longlong.h #define udiv_qrnnd(q, r, nh, nl, d) \
d                 293 lib/mpi/longlong.h 		"g" ((USItype)(d)))
d                 339 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 341 lib/mpi/longlong.h 	(q) = __udiv_qrnnd(&__r, (n1), (n0), (d)); \
d                 379 lib/mpi/longlong.h #define sdiv_qrnnd(q, r, n1, n0, d) \
d                 387 lib/mpi/longlong.h 	: "0" (__xx.__ll), "r" (d)); \
d                 421 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 427 lib/mpi/longlong.h 	     "rm" ((USItype)(d)))
d                 486 lib/mpi/longlong.h #define udiv_qrnnd(q, r, nh, nl, d) \
d                 495 lib/mpi/longlong.h 	     "dI" ((USItype)(d))); \
d                 541 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 547 lib/mpi/longlong.h 	     "dmi" ((USItype)(d)))
d                 549 lib/mpi/longlong.h #define sdiv_qrnnd(q, r, n1, n0, d) \
d                 555 lib/mpi/longlong.h 	     "dmi" ((USItype)(d)))
d                 622 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 628 lib/mpi/longlong.h 	: "=r" (__q.__ll) : "r" (__x.__ll), "r" (d)); \
d                 629 lib/mpi/longlong.h 	(r) = (n0) - __q.__l * (d); (q) = __q.__l; })
d                 705 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 713 lib/mpi/longlong.h 	     "g" ((USItype)(d))); \
d                 827 lib/mpi/longlong.h #define sdiv_qrnnd(q, r, nh, nl, d) \
d                 830 lib/mpi/longlong.h 	: "r" ((SItype)(nh)), "1" ((SItype)(nl)), "r" ((SItype)(d)))
d                 983 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                 990 lib/mpi/longlong.h 	"r" ((USItype)(d))); \
d                 991 lib/mpi/longlong.h 	(r) = (n0) - __q * (d); \
d                1007 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                1051 lib/mpi/longlong.h 	"rI" ((USItype)(d)) \
d                1107 lib/mpi/longlong.h #define udiv_qrnnd(q, r, n1, n0, d) \
d                1134 lib/mpi/longlong.h 	   : "r" ((USItype)(d)),					\
d                1177 lib/mpi/longlong.h #define sdiv_qrnnd(q, r, n1, n0, d) \
d                1185 lib/mpi/longlong.h 	: "g" (__xx.__ll), "g" (d)); \
d                1308 lib/mpi/longlong.h #define __udiv_qrnnd_c(q, r, n1, n0, d) \
d                1311 lib/mpi/longlong.h 	__d1 = __ll_highpart(d); \
d                1312 lib/mpi/longlong.h 	__d0 = __ll_lowpart(d); \
d                1319 lib/mpi/longlong.h 		__q1--, __r1 += (d); \
d                1320 lib/mpi/longlong.h 		if (__r1 >= (d)) /* i.e. we didn't get carry when adding to __r1 */ \
d                1322 lib/mpi/longlong.h 			__q1--, __r1 += (d); \
d                1331 lib/mpi/longlong.h 		__q0--, __r0 += (d); \
d                1332 lib/mpi/longlong.h 		if (__r0 >= (d)) \
d                1334 lib/mpi/longlong.h 				__q0--, __r0 += (d); \
d                1345 lib/mpi/longlong.h #define udiv_qrnnd(q, r, nh, nl, d) \
d                1348 lib/mpi/longlong.h 	(q) = __MPN(udiv_w_sdiv) (&__r, nh, nl, d); \
d                  32 lib/mpi/mpi-bit.c 	for (; a->nlimbs && !a->d[a->nlimbs - 1]; a->nlimbs--)
d                  46 lib/mpi/mpi-bit.c 		mpi_limb_t alimb = a->d[a->nlimbs - 1];
d                  35 lib/mpi/mpi-cmp.c 	if (u->d[0] == limb)
d                  37 lib/mpi/mpi-cmp.c 	else if (u->d[0] > limb)
d                  63 lib/mpi/mpi-cmp.c 	cmp = mpihelp_cmp(u->d, v->d, usize);
d                  56 lib/mpi/mpi-internal.h #define MPN_COPY(d, s, n) \
d                  60 lib/mpi/mpi-internal.h 			(d)[_i] = (s)[_i];	\
d                  63 lib/mpi/mpi-internal.h #define MPN_COPY_DECR(d, s, n) \
d                  67 lib/mpi/mpi-internal.h 			(d)[_i] = (s)[_i];	\
d                  71 lib/mpi/mpi-internal.h #define MPN_ZERO(d, n) \
d                  75 lib/mpi/mpi-internal.h 			(d)[_i] = 0;		\
d                  78 lib/mpi/mpi-internal.h #define MPN_NORMALIZE(d, n)  \
d                  81 lib/mpi/mpi-internal.h 			if ((d)[(n)-1])		\
d                  46 lib/mpi/mpi-pow.c 	rp = res->d;
d                  47 lib/mpi/mpi-pow.c 	ep = exp->d;
d                  55 lib/mpi/mpi-pow.c 		res->nlimbs = (msize == 1 && mod->d[0] == 1) ? 0 : 1;
d                  59 lib/mpi/mpi-pow.c 			rp = res->d;
d                  73 lib/mpi/mpi-pow.c 	mod_shift_cnt = count_leading_zeros(mod->d[msize - 1]);
d                  75 lib/mpi/mpi-pow.c 		mpihelp_lshift(mp, mod->d, msize, mod_shift_cnt);
d                  77 lib/mpi/mpi-pow.c 		MPN_COPY(mp, mod->d, msize);
d                  87 lib/mpi/mpi-pow.c 		MPN_COPY(bp, base->d, bsize);
d                  96 lib/mpi/mpi-pow.c 		bp = base->d;
d                 116 lib/mpi/mpi-pow.c 			rp = res->d;
d                 263 lib/mpi/mpi-pow.c 			    mpihelp_lshift(res->d, rp, rsize, mod_shift_cnt);
d                 264 lib/mpi/mpi-pow.c 			rp = res->d;
d                 270 lib/mpi/mpi-pow.c 			MPN_COPY(res->d, rp, rsize);
d                 271 lib/mpi/mpi-pow.c 			rp = res->d;
d                  74 lib/mpi/mpicoder.c 			val->d[j - 1] = a;
d                 118 lib/mpi/mpicoder.c 		alimb = a->d[i];
d                 177 lib/mpi/mpicoder.c 		alimb = cpu_to_be32(a->d[i]);
d                 179 lib/mpi/mpicoder.c 		alimb = cpu_to_be64(a->d[i]);
d                 294 lib/mpi/mpicoder.c 		alimb = a->d[i] ? cpu_to_be32(a->d[i]) : 0;
d                 296 lib/mpi/mpicoder.c 		alimb = a->d[i] ? cpu_to_be64(a->d[i]) : 0;
d                 406 lib/mpi/mpicoder.c 				val->d[j--] = a;
d                  64 lib/mpi/mpih-div.c 			mpi_limb_t d;
d                  66 lib/mpi/mpih-div.c 			d = dp[0];
d                  69 lib/mpi/mpih-div.c 			if (n1 >= d) {
d                  70 lib/mpi/mpih-div.c 				n1 -= d;
d                  76 lib/mpi/mpih-div.c 				udiv_qrnnd(qp[i], n1, n1, np[i], d);
d                  80 lib/mpi/mpih-div.c 				udiv_qrnnd(qp[i], n1, n1, 0, d);
d                  39 lib/mpi/mpiutil.c 		a->d = mpi_alloc_limb_space(nlimbs);
d                  40 lib/mpi/mpiutil.c 		if (!a->d) {
d                  45 lib/mpi/mpiutil.c 		a->d = NULL;
d                  77 lib/mpi/mpiutil.c 	mpi_free_limb_space(a->d);
d                  78 lib/mpi/mpiutil.c 	a->d = ap;
d                  93 lib/mpi/mpiutil.c 	if (a->d) {
d                  97 lib/mpi/mpiutil.c 		memcpy(p, a->d, a->alloced * sizeof(mpi_limb_t));
d                  98 lib/mpi/mpiutil.c 		kzfree(a->d);
d                  99 lib/mpi/mpiutil.c 		a->d = p;
d                 101 lib/mpi/mpiutil.c 		a->d = kcalloc(nlimbs, sizeof(mpi_limb_t), GFP_KERNEL);
d                 102 lib/mpi/mpiutil.c 		if (!a->d)
d                 115 lib/mpi/mpiutil.c 		kzfree(a->d);
d                 117 lib/mpi/mpiutil.c 		mpi_free_limb_space(a->d);
d                 591 lib/nlattr.c   	int d = nla_len(nla) - size;
d                 593 lib/nlattr.c   	if (d == 0)
d                 594 lib/nlattr.c   		d = memcmp(nla_data(nla), data, size);
d                 596 lib/nlattr.c   	return d;
d                 610 lib/nlattr.c   	int d;
d                 615 lib/nlattr.c   	d = attrlen - len;
d                 616 lib/nlattr.c   	if (d == 0)
d                 617 lib/nlattr.c   		d = memcmp(nla_data(nla), str, len);
d                 619 lib/nlattr.c   	return d;
d                  40 lib/raid6/avx2.c 	int d, z, z0;
d                  51 lib/raid6/avx2.c 	for (d = 0; d < bytes; d += 32) {
d                  52 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                  53 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */
d                  54 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d]));
d                  56 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d]));
d                  58 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                  65 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z][d]));
d                  74 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
d                  76 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
d                  89 lib/raid6/avx2.c 	int d, z, z0;
d                  99 lib/raid6/avx2.c 	for (d = 0 ; d < bytes ; d += 32) {
d                 100 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm4" :: "m" (dptr[z0][d]));
d                 101 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm2" : : "m" (p[d]));
d                 110 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm5" :: "m" (dptr[z][d]));
d                 122 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm4,%%ymm4" : : "m" (q[d]));
d                 124 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm4,%0" : "=m" (q[d]));
d                 125 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm2,%0" : "=m" (p[d]));
d                 147 lib/raid6/avx2.c 	int d, z, z0;
d                 159 lib/raid6/avx2.c 	for (d = 0; d < bytes; d += 64) {
d                 160 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                 161 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d+32]));
d                 162 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */
d                 163 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm3" : : "m" (dptr[z0][d+32]));/* P[1] */
d                 167 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                 168 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d+32]));
d                 177 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm5" : : "m" (dptr[z][d]));
d                 178 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm7" : : "m" (dptr[z][d+32]));
d                 184 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
d                 185 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm3,%0" : "=m" (p[d+32]));
d                 186 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
d                 187 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm6,%0" : "=m" (q[d+32]));
d                 199 lib/raid6/avx2.c 	int d, z, z0;
d                 209 lib/raid6/avx2.c 	for (d = 0 ; d < bytes ; d += 64) {
d                 210 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm4" :: "m" (dptr[z0][d]));
d                 211 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm6" :: "m" (dptr[z0][d+32]));
d                 212 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm2" : : "m" (p[d]));
d                 213 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm3" : : "m" (p[d+32]));
d                 228 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm5" :: "m" (dptr[z][d]));
d                 230 lib/raid6/avx2.c 				     :: "m" (dptr[z][d+32]));
d                 249 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm4,%%ymm4" : : "m" (q[d]));
d                 250 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm6,%%ymm6" : : "m" (q[d+32]));
d                 252 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm4,%0" : "=m" (q[d]));
d                 253 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm6,%0" : "=m" (q[d+32]));
d                 254 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm2,%0" : "=m" (p[d]));
d                 255 lib/raid6/avx2.c 		asm volatile("vmovdqa %%ymm3,%0" : "=m" (p[d+32]));
d                 279 lib/raid6/avx2.c 	int d, z, z0;
d                 298 lib/raid6/avx2.c 	for (d = 0; d < bytes; d += 128) {
d                 300 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                 301 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d+32]));
d                 302 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d+64]));
d                 303 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d+96]));
d                 320 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm5" : : "m" (dptr[z][d]));
d                 321 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm7" : : "m" (dptr[z][d+32]));
d                 322 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm13" : : "m" (dptr[z][d+64]));
d                 323 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm15" : : "m" (dptr[z][d+96]));
d                 333 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
d                 335 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm3,%0" : "=m" (p[d+32]));
d                 337 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm10,%0" : "=m" (p[d+64]));
d                 339 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm11,%0" : "=m" (p[d+96]));
d                 341 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
d                 343 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm6,%0" : "=m" (q[d+32]));
d                 345 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm12,%0" : "=m" (q[d+64]));
d                 347 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm14,%0" : "=m" (q[d+96]));
d                 360 lib/raid6/avx2.c 	int d, z, z0;
d                 370 lib/raid6/avx2.c 	for (d = 0 ; d < bytes ; d += 128) {
d                 371 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm4" :: "m" (dptr[z0][d]));
d                 372 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm6" :: "m" (dptr[z0][d+32]));
d                 373 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm12" :: "m" (dptr[z0][d+64]));
d                 374 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm14" :: "m" (dptr[z0][d+96]));
d                 375 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm2" : : "m" (p[d]));
d                 376 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm3" : : "m" (p[d+32]));
d                 377 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm10" : : "m" (p[d+64]));
d                 378 lib/raid6/avx2.c 		asm volatile("vmovdqa %0,%%ymm11" : : "m" (p[d+96]));
d                 385 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d]));
d                 386 lib/raid6/avx2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d+64]));
d                 407 lib/raid6/avx2.c 			asm volatile("vmovdqa %0,%%ymm5" :: "m" (dptr[z][d]));
d                 409 lib/raid6/avx2.c 				     :: "m" (dptr[z][d+32]));
d                 411 lib/raid6/avx2.c 				     :: "m" (dptr[z][d+64]));
d                 413 lib/raid6/avx2.c 				     :: "m" (dptr[z][d+96]));
d                 423 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" :: "m" (q[d]));
d                 424 lib/raid6/avx2.c 		asm volatile("prefetchnta %0" :: "m" (q[d+64]));
d                 448 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm2,%0" : "=m" (p[d]));
d                 449 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm3,%0" : "=m" (p[d+32]));
d                 450 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm10,%0" : "=m" (p[d+64]));
d                 451 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm11,%0" : "=m" (p[d+96]));
d                 452 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm4,%%ymm4" : : "m" (q[d]));
d                 453 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm6,%%ymm6" : : "m" (q[d+32]));
d                 454 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm12,%%ymm12" : : "m" (q[d+64]));
d                 455 lib/raid6/avx2.c 		asm volatile("vpxor %0,%%ymm14,%%ymm14" : : "m" (q[d+96]));
d                 456 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm4,%0" : "=m" (q[d]));
d                 457 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm6,%0" : "=m" (q[d+32]));
d                 458 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm12,%0" : "=m" (q[d+64]));
d                 459 lib/raid6/avx2.c 		asm volatile("vmovntdq %%ymm14,%0" : "=m" (q[d+96]));
d                  48 lib/raid6/avx512.c 	int d, z, z0;
d                  61 lib/raid6/avx512.c 	for (d = 0; d < bytes; d += 64) {
d                  68 lib/raid6/avx512.c 			     : "m" (dptr[z0][d]), "m" (dptr[z0-1][d]));
d                  80 lib/raid6/avx512.c 				     : "m" (dptr[z][d]));
d                  94 lib/raid6/avx512.c 			     : "m" (p[d]), "m" (q[d]));
d                 106 lib/raid6/avx512.c 	int d, z, z0;
d                 117 lib/raid6/avx512.c 	for (d = 0 ; d < bytes ; d += 64) {
d                 122 lib/raid6/avx512.c 			     : "m" (dptr[z0][d]),  "m" (p[d]));
d                 135 lib/raid6/avx512.c 				     : "m" (dptr[z][d]));
d                 153 lib/raid6/avx512.c 			     : "m" (q[d]), "m" (p[d]));
d                 175 lib/raid6/avx512.c 	int d, z, z0;
d                 189 lib/raid6/avx512.c 	for (d = 0; d < bytes; d += 128) {
d                 197 lib/raid6/avx512.c 			     : "m" (dptr[z0][d]), "m" (dptr[z0][d+64]));
d                 218 lib/raid6/avx512.c 				     : "m" (dptr[z][d]), "m" (dptr[z][d+64]));
d                 225 lib/raid6/avx512.c 			     : "m" (p[d]), "m" (p[d+64]), "m" (q[d]),
d                 226 lib/raid6/avx512.c 			       "m" (q[d+64]));
d                 238 lib/raid6/avx512.c 	int d, z, z0;
d                 249 lib/raid6/avx512.c 	for (d = 0 ; d < bytes ; d += 128) {
d                 257 lib/raid6/avx512.c 			     : "m" (dptr[z0][d]), "m" (dptr[z0][d+64]),
d                 258 lib/raid6/avx512.c 			       "m" (p[d]), "m" (p[d+64]));
d                 280 lib/raid6/avx512.c 				     : "m" (dptr[z][d]),  "m" (dptr[z][d+64]));
d                 309 lib/raid6/avx512.c 			     : "m" (q[d]), "m" (q[d+64]), "m" (p[d]),
d                 310 lib/raid6/avx512.c 			       "m" (p[d+64]));
d                 334 lib/raid6/avx512.c 	int d, z, z0;
d                 355 lib/raid6/avx512.c 	for (d = 0; d < bytes; d += 256) {
d                 394 lib/raid6/avx512.c 			     : "m" (dptr[z][d]), "m" (dptr[z][d+64]),
d                 395 lib/raid6/avx512.c 			       "m" (dptr[z][d+128]), "m" (dptr[z][d+192]));
d                 414 lib/raid6/avx512.c 			     : "m" (p[d]), "m" (p[d+64]), "m" (p[d+128]),
d                 415 lib/raid6/avx512.c 			       "m" (p[d+192]), "m" (q[d]), "m" (q[d+64]),
d                 416 lib/raid6/avx512.c 			       "m" (q[d+128]), "m" (q[d+192]));
d                 428 lib/raid6/avx512.c 	int d, z, z0;
d                 439 lib/raid6/avx512.c 	for (d = 0 ; d < bytes ; d += 256) {
d                 453 lib/raid6/avx512.c 			     : "m" (dptr[z0][d]), "m" (dptr[z0][d+64]),
d                 454 lib/raid6/avx512.c 			       "m" (dptr[z0][d+128]), "m" (dptr[z0][d+192]),
d                 455 lib/raid6/avx512.c 			       "m" (p[d]), "m" (p[d+64]), "m" (p[d+128]),
d                 456 lib/raid6/avx512.c 			       "m" (p[d+192]));
d                 498 lib/raid6/avx512.c 				     : "m" (dptr[z][d]), "m" (dptr[z][d+64]),
d                 499 lib/raid6/avx512.c 				       "m" (dptr[z][d+128]),
d                 500 lib/raid6/avx512.c 				       "m" (dptr[z][d+192]));
d                 505 lib/raid6/avx512.c 			     : "m" (q[d]), "m" (q[d+128]));
d                 548 lib/raid6/avx512.c 			     : "m" (p[d]),  "m" (p[d+64]), "m" (p[d+128]),
d                 549 lib/raid6/avx512.c 			       "m" (p[d+192]), "m" (q[d]),  "m" (q[d+64]),
d                 550 lib/raid6/avx512.c 			       "m" (q[d+128]), "m" (q[d+192]));
d                  39 lib/raid6/mmx.c 	int d, z, z0;
d                  50 lib/raid6/mmx.c 	for ( d = 0 ; d < bytes ; d += 8 ) {
d                  51 lib/raid6/mmx.c 		asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */
d                  54 lib/raid6/mmx.c 			asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d]));
d                  63 lib/raid6/mmx.c 		asm volatile("movq %%mm2,%0" : "=m" (p[d]));
d                  65 lib/raid6/mmx.c 		asm volatile("movq %%mm4,%0" : "=m" (q[d]));
d                  87 lib/raid6/mmx.c 	int d, z, z0;
d                  99 lib/raid6/mmx.c 	for ( d = 0 ; d < bytes ; d += 16 ) {
d                 100 lib/raid6/mmx.c 		asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */
d                 101 lib/raid6/mmx.c 		asm volatile("movq %0,%%mm3" : : "m" (dptr[z0][d+8]));
d                 113 lib/raid6/mmx.c 			asm volatile("movq %0,%%mm5" : : "m" (dptr[z][d]));
d                 114 lib/raid6/mmx.c 			asm volatile("movq %0,%%mm7" : : "m" (dptr[z][d+8]));
d                 122 lib/raid6/mmx.c 		asm volatile("movq %%mm2,%0" : "=m" (p[d]));
d                 123 lib/raid6/mmx.c 		asm volatile("movq %%mm3,%0" : "=m" (p[d+8]));
d                 124 lib/raid6/mmx.c 		asm volatile("movq %%mm4,%0" : "=m" (q[d]));
d                 125 lib/raid6/mmx.c 		asm volatile("movq %%mm6,%0" : "=m" (q[d+8]));
d                  44 lib/raid6/sse1.c 	int d, z, z0;
d                  55 lib/raid6/sse1.c 	for ( d = 0 ; d < bytes ; d += 8 ) {
d                  56 lib/raid6/sse1.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                  57 lib/raid6/sse1.c 		asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */
d                  58 lib/raid6/sse1.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d]));
d                  60 lib/raid6/sse1.c 		asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d]));
d                  62 lib/raid6/sse1.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                  70 lib/raid6/sse1.c 			asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d]));
d                  80 lib/raid6/sse1.c 		asm volatile("movntq %%mm2,%0" : "=m" (p[d]));
d                  81 lib/raid6/sse1.c 		asm volatile("movntq %%mm4,%0" : "=m" (q[d]));
d                 103 lib/raid6/sse1.c 	int d, z, z0;
d                 116 lib/raid6/sse1.c 	for ( d = 0 ; d < bytes ; d += 16 ) {
d                 117 lib/raid6/sse1.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                 118 lib/raid6/sse1.c 		asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */
d                 119 lib/raid6/sse1.c 		asm volatile("movq %0,%%mm3" : : "m" (dptr[z0][d+8])); /* P[1] */
d                 123 lib/raid6/sse1.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                 132 lib/raid6/sse1.c 			asm volatile("movq %0,%%mm5" : : "m" (dptr[z][d]));
d                 133 lib/raid6/sse1.c 			asm volatile("movq %0,%%mm7" : : "m" (dptr[z][d+8]));
d                 141 lib/raid6/sse1.c 		asm volatile("movntq %%mm2,%0" : "=m" (p[d]));
d                 142 lib/raid6/sse1.c 		asm volatile("movntq %%mm3,%0" : "=m" (p[d+8]));
d                 143 lib/raid6/sse1.c 		asm volatile("movntq %%mm4,%0" : "=m" (q[d]));
d                 144 lib/raid6/sse1.c 		asm volatile("movntq %%mm6,%0" : "=m" (q[d+8]));
d                  40 lib/raid6/sse2.c 	int d, z, z0;
d                  51 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 16 ) {
d                  52 lib/raid6/sse2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                  53 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */
d                  54 lib/raid6/sse2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d]));
d                  56 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d]));
d                  58 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                  66 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z][d]));
d                  76 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm2,%0" : "=m" (p[d]));
d                  78 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm4,%0" : "=m" (q[d]));
d                  92 lib/raid6/sse2.c 	int d, z, z0;
d                 102 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 16 ) {
d                 103 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d]));
d                 104 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm2" : : "m" (p[d]));
d                 113 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d]));
d                 125 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm4" : : "m" (q[d]));
d                 127 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm4,%0" : "=m" (q[d]));
d                 128 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm2,%0" : "=m" (p[d]));
d                 150 lib/raid6/sse2.c 	int d, z, z0;
d                 163 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 32 ) {
d                 164 lib/raid6/sse2.c 		asm volatile("prefetchnta %0" : : "m" (dptr[z0][d]));
d                 165 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d]));    /* P[0] */
d                 166 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm3" : : "m" (dptr[z0][d+16])); /* P[1] */
d                 170 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" : : "m" (dptr[z][d]));
d                 179 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm5" : : "m" (dptr[z][d]));
d                 180 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm7" : : "m" (dptr[z][d+16]));
d                 188 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm2,%0" : "=m" (p[d]));
d                 189 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16]));
d                 190 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm4,%0" : "=m" (q[d]));
d                 191 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16]));
d                 203 lib/raid6/sse2.c 	int d, z, z0;
d                 213 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 32 ) {
d                 214 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d]));
d                 215 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm6" :: "m" (dptr[z0][d+16]));
d                 216 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm2" : : "m" (p[d]));
d                 217 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm3" : : "m" (p[d+16]));
d                 232 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d]));
d                 233 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16]));
d                 252 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm4" : : "m" (q[d]));
d                 253 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm6" : : "m" (q[d+16]));
d                 255 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm4,%0" : "=m" (q[d]));
d                 256 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm6,%0" : "=m" (q[d+16]));
d                 257 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm2,%0" : "=m" (p[d]));
d                 258 lib/raid6/sse2.c 		asm volatile("movdqa %%xmm3,%0" : "=m" (p[d+16]));
d                 282 lib/raid6/sse2.c 	int d, z, z0;
d                 304 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 64 ) {
d                 307 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d]));
d                 308 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d+32]));
d                 325 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d]));
d                 326 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16]));
d                 327 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm13" :: "m" (dptr[z][d+32]));
d                 328 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm15" :: "m" (dptr[z][d+48]));
d                 342 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm2,%0" : "=m" (p[d]));
d                 344 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16]));
d                 346 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm10,%0" : "=m" (p[d+32]));
d                 348 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm11,%0" : "=m" (p[d+48]));
d                 350 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm4,%0" : "=m" (q[d]));
d                 352 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16]));
d                 354 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm12,%0" : "=m" (q[d+32]));
d                 356 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm14,%0" : "=m" (q[d+48]));
d                 369 lib/raid6/sse2.c 	int d, z, z0;
d                 379 lib/raid6/sse2.c 	for ( d = 0 ; d < bytes ; d += 64 ) {
d                 380 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm4" :: "m" (dptr[z0][d]));
d                 381 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm6" :: "m" (dptr[z0][d+16]));
d                 382 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm12" :: "m" (dptr[z0][d+32]));
d                 383 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm14" :: "m" (dptr[z0][d+48]));
d                 384 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm2" : : "m" (p[d]));
d                 385 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm3" : : "m" (p[d+16]));
d                 386 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm10" : : "m" (p[d+32]));
d                 387 lib/raid6/sse2.c 		asm volatile("movdqa %0,%%xmm11" : : "m" (p[d+48]));
d                 394 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d]));
d                 395 lib/raid6/sse2.c 			asm volatile("prefetchnta %0" :: "m" (dptr[z][d+32]));
d                 416 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm5" :: "m" (dptr[z][d]));
d                 417 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm7" :: "m" (dptr[z][d+16]));
d                 418 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm13" :: "m" (dptr[z][d+32]));
d                 419 lib/raid6/sse2.c 			asm volatile("movdqa %0,%%xmm15" :: "m" (dptr[z][d+48]));
d                 429 lib/raid6/sse2.c 		asm volatile("prefetchnta %0" :: "m" (q[d]));
d                 430 lib/raid6/sse2.c 		asm volatile("prefetchnta %0" :: "m" (q[d+32]));
d                 454 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm2,%0" : "=m" (p[d]));
d                 455 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm3,%0" : "=m" (p[d+16]));
d                 456 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm10,%0" : "=m" (p[d+32]));
d                 457 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm11,%0" : "=m" (p[d+48]));
d                 458 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm4" : : "m" (q[d]));
d                 459 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm6" : : "m" (q[d+16]));
d                 460 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm12" : : "m" (q[d+32]));
d                 461 lib/raid6/sse2.c 		asm volatile("pxor %0,%%xmm14" : : "m" (q[d+48]));
d                 462 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm4,%0" : "=m" (q[d]));
d                 463 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm6,%0" : "=m" (q[d+16]));
d                 464 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm12,%0" : "=m" (q[d+32]));
d                 465 lib/raid6/sse2.c 		asm volatile("movntdq %%xmm14,%0" : "=m" (q[d+48]));
d                  41 lib/raid6/test/test.c static char disk_type(int d)
d                  43 lib/raid6/test/test.c 	switch (d) {
d                  62 lib/random32.c #define TAUSWORTHE(s, a, b, c, d) ((s & c) << d) ^ (((s << a) ^ s) >> b)
d                 233 lib/sort.c     		size_t b, c, d;
d                 254 lib/sort.c     		for (b = a; c = 2*b + size, (d = c + size) < n;)
d                 255 lib/sort.c     			b = do_cmp(base + c, base + d, cmp_func, priv) >= 0 ? c : d;
d                 256 lib/sort.c     		if (d == n)	/* Special case last leaf with no sibling */
d                 151 lib/test_hexdump.c 	size_t d = min_t(size_t, sizeof(data_b), rowsize);
d                 152 lib/test_hexdump.c 	size_t len = get_random_int() % d + 1;
d                 126 lib/ts_fsm.c   static inline int match_token(struct ts_fsm_token *t, u8 d)
d                 129 lib/ts_fsm.c   		return (token_lookup_tbl[d] & t->type) != 0;
d                 131 lib/ts_fsm.c   		return t->value == d;
d                 831 lib/vsprintf.c char *dentry_name(char *buf, char *end, const struct dentry *d, struct printf_spec spec,
d                 848 lib/vsprintf.c 	for (i = 0; i < depth; i++, d = p) {
d                 849 lib/vsprintf.c 		if (check_pointer(&buf, end, d, spec)) {
d                 854 lib/vsprintf.c 		p = READ_ONCE(d->d_parent);
d                 855 lib/vsprintf.c 		array[i] = READ_ONCE(d->d_name.name);
d                 856 lib/vsprintf.c 		if (p == d) {
d                1791 lib/zstd/compress.c #define NEXT_IN_CHAIN(d, mask) chainTable[(d)&mask]
d                1105 mm/mempolicy.c 		int s,d;
d                1130 mm/mempolicy.c 			d = node_remap(s, *from, *to);
d                1131 mm/mempolicy.c 			if (s == d)
d                1135 mm/mempolicy.c 			dest = d;
d                3034 mm/slab.c      #define cache_alloc_debugcheck_after(a,b,objp,d) (objp)
d                 697 mm/slob.c      int __kmem_cache_shrink(struct kmem_cache *d)
d                2324 mm/slub.c      static void flush_cpu_slab(void *d)
d                2326 mm/slub.c      	struct kmem_cache *s = d;
d                 152 net/802/garp.c 	int d;
d                 156 net/802/garp.c 		d = garp_attr_cmp(attr, data, len, type);
d                 157 net/802/garp.c 		if (d > 0)
d                 159 net/802/garp.c 		else if (d < 0)
d                 172 net/802/garp.c 	int d;
d                 177 net/802/garp.c 		d = garp_attr_cmp(attr, data, len, type);
d                 178 net/802/garp.c 		if (d > 0)
d                 180 net/802/garp.c 		else if (d < 0)
d                 241 net/802/mrp.c  	int d;
d                 245 net/802/mrp.c  		d = mrp_attr_cmp(attr, value, len, type);
d                 246 net/802/mrp.c  		if (d > 0)
d                 248 net/802/mrp.c  		else if (d < 0)
d                 261 net/802/mrp.c  	int d;
d                 266 net/802/mrp.c  		d = mrp_attr_cmp(attr, value, len, type);
d                 267 net/802/mrp.c  		if (d > 0)
d                 269 net/802/mrp.c  		else if (d < 0)
d                 160 net/ax25/ax25_addr.c 	int d = 0;
d                 189 net/ax25/ax25_addr.c 		if (d >= AX25_MAX_DIGIS)
d                 194 net/ax25/ax25_addr.c 		memcpy(&digi->calls[d], buf, AX25_ADDR_LEN);
d                 195 net/ax25/ax25_addr.c 		digi->ndigi = d + 1;
d                 198 net/ax25/ax25_addr.c 			digi->repeated[d] = 1;
d                 199 net/ax25/ax25_addr.c 			digi->lastrepeat  = d;
d                 201 net/ax25/ax25_addr.c 			digi->repeated[d] = 0;
d                 206 net/ax25/ax25_addr.c 		d++;
d                 216 net/ax25/ax25_addr.c 	const ax25_address *dest, const ax25_digi *d, int flag, int modulus)
d                 244 net/ax25/ax25_addr.c 	if (d == NULL || d->ndigi == 0) {
d                 252 net/ax25/ax25_addr.c 	while (ct < d->ndigi) {
d                 253 net/ax25/ax25_addr.c 		memcpy(buf, &d->calls[ct], AX25_ADDR_LEN);
d                 255 net/ax25/ax25_addr.c 		if (d->repeated[ct])
d                 622 net/bluetooth/hci_conn.c 	struct hci_dev *hdev = NULL, *d;
d                 628 net/bluetooth/hci_conn.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 629 net/bluetooth/hci_conn.c 		if (!test_bit(HCI_UP, &d->flags) ||
d                 630 net/bluetooth/hci_conn.c 		    hci_dev_test_flag(d, HCI_USER_CHANNEL) ||
d                 631 net/bluetooth/hci_conn.c 		    d->dev_type != HCI_PRIMARY)
d                 644 net/bluetooth/hci_conn.c 				if (!lmp_bredr_capable(d))
d                 646 net/bluetooth/hci_conn.c 				bacpy(&id_addr, &d->bdaddr);
d                 649 net/bluetooth/hci_conn.c 				if (!lmp_le_capable(d))
d                 652 net/bluetooth/hci_conn.c 				hci_copy_identity_address(d, &id_addr,
d                 663 net/bluetooth/hci_conn.c 				hdev = d; break;
d                 666 net/bluetooth/hci_conn.c 			if (bacmp(&d->bdaddr, dst)) {
d                 667 net/bluetooth/hci_conn.c 				hdev = d; break;
d                1006 net/bluetooth/hci_core.c 	struct hci_dev *hdev = NULL, *d;
d                1014 net/bluetooth/hci_core.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                1015 net/bluetooth/hci_core.c 		if (d->id == index) {
d                1016 net/bluetooth/hci_core.c 			hdev = hci_dev_hold(d);
d                1258 net/bluetooth/hci_event.c 	struct discovery_state *d = &hdev->discovery;
d                1260 net/bluetooth/hci_event.c 	return bacmp(&d->last_adv_addr, BDADDR_ANY);
d                1265 net/bluetooth/hci_event.c 	struct discovery_state *d = &hdev->discovery;
d                1267 net/bluetooth/hci_event.c 	bacpy(&d->last_adv_addr, BDADDR_ANY);
d                1268 net/bluetooth/hci_event.c 	d->last_adv_data_len = 0;
d                1275 net/bluetooth/hci_event.c 	struct discovery_state *d = &hdev->discovery;
d                1277 net/bluetooth/hci_event.c 	bacpy(&d->last_adv_addr, bdaddr);
d                1278 net/bluetooth/hci_event.c 	d->last_adv_addr_type = bdaddr_type;
d                1279 net/bluetooth/hci_event.c 	d->last_adv_rssi = rssi;
d                1280 net/bluetooth/hci_event.c 	d->last_adv_flags = flags;
d                1281 net/bluetooth/hci_event.c 	memcpy(d->last_adv_data, data, len);
d                1282 net/bluetooth/hci_event.c 	d->last_adv_data_len = len;
d                1302 net/bluetooth/hci_event.c 			struct discovery_state *d = &hdev->discovery;
d                1304 net/bluetooth/hci_event.c 			mgmt_device_found(hdev, &d->last_adv_addr, LE_LINK,
d                1305 net/bluetooth/hci_event.c 					  d->last_adv_addr_type, NULL,
d                1306 net/bluetooth/hci_event.c 					  d->last_adv_rssi, d->last_adv_flags,
d                1307 net/bluetooth/hci_event.c 					  d->last_adv_data,
d                1308 net/bluetooth/hci_event.c 					  d->last_adv_data_len, NULL, 0);
d                5235 net/bluetooth/hci_event.c 	struct discovery_state *d = &hdev->discovery;
d                5384 net/bluetooth/hci_event.c 	match = (!bacmp(bdaddr, &d->last_adv_addr) &&
d                5385 net/bluetooth/hci_event.c 		 bdaddr_type == d->last_adv_addr_type);
d                5394 net/bluetooth/hci_event.c 			mgmt_device_found(hdev, &d->last_adv_addr, LE_LINK,
d                5395 net/bluetooth/hci_event.c 					  d->last_adv_addr_type, NULL,
d                5396 net/bluetooth/hci_event.c 					  d->last_adv_rssi, d->last_adv_flags,
d                5397 net/bluetooth/hci_event.c 					  d->last_adv_data,
d                5398 net/bluetooth/hci_event.c 					  d->last_adv_data_len, NULL, 0);
d                5422 net/bluetooth/hci_event.c 	mgmt_device_found(hdev, &d->last_adv_addr, LE_LINK,
d                5423 net/bluetooth/hci_event.c 			  d->last_adv_addr_type, NULL, rssi, d->last_adv_flags,
d                5424 net/bluetooth/hci_event.c 			  d->last_adv_data, d->last_adv_data_len, data, len);
d                2630 net/bluetooth/hci_request.c 	struct discovery_state *d = &hdev->discovery;
d                2637 net/bluetooth/hci_request.c 	if (d->state == DISCOVERY_FINDING || d->state == DISCOVERY_STOPPING) {
d                2656 net/bluetooth/hci_request.c 	if (d->type == DISCOV_TYPE_LE)
d                2659 net/bluetooth/hci_request.c 	if (d->state == DISCOVERY_RESOLVING || d->state == DISCOVERY_STOPPING) {
d                  28 net/bluetooth/leds.c 		struct hci_dev *d;
d                  32 net/bluetooth/leds.c 		list_for_each_entry(d, &hci_dev_list, list) {
d                  33 net/bluetooth/leds.c 			if (test_bit(HCI_UP, &d->flags))
d                  36 net/bluetooth/lib.c 	unsigned char *d = (unsigned char *)dst;
d                  40 net/bluetooth/lib.c 		d[i] = s[5 - i];
d                 356 net/bluetooth/mgmt.c 	struct hci_dev *d;
d                 366 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 367 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY &&
d                 368 net/bluetooth/mgmt.c 		    !hci_dev_test_flag(d, HCI_UNCONFIGURED))
d                 380 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 381 net/bluetooth/mgmt.c 		if (hci_dev_test_flag(d, HCI_SETUP) ||
d                 382 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_CONFIG) ||
d                 383 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_USER_CHANNEL))
d                 389 net/bluetooth/mgmt.c 		if (test_bit(HCI_QUIRK_RAW_DEVICE, &d->quirks))
d                 392 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY &&
d                 393 net/bluetooth/mgmt.c 		    !hci_dev_test_flag(d, HCI_UNCONFIGURED)) {
d                 394 net/bluetooth/mgmt.c 			rp->index[count++] = cpu_to_le16(d->id);
d                 395 net/bluetooth/mgmt.c 			BT_DBG("Added hci%u", d->id);
d                 416 net/bluetooth/mgmt.c 	struct hci_dev *d;
d                 426 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 427 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY &&
d                 428 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_UNCONFIGURED))
d                 440 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 441 net/bluetooth/mgmt.c 		if (hci_dev_test_flag(d, HCI_SETUP) ||
d                 442 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_CONFIG) ||
d                 443 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_USER_CHANNEL))
d                 449 net/bluetooth/mgmt.c 		if (test_bit(HCI_QUIRK_RAW_DEVICE, &d->quirks))
d                 452 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY &&
d                 453 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_UNCONFIGURED)) {
d                 454 net/bluetooth/mgmt.c 			rp->index[count++] = cpu_to_le16(d->id);
d                 455 net/bluetooth/mgmt.c 			BT_DBG("Added hci%u", d->id);
d                 476 net/bluetooth/mgmt.c 	struct hci_dev *d;
d                 485 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 486 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY || d->dev_type == HCI_AMP)
d                 497 net/bluetooth/mgmt.c 	list_for_each_entry(d, &hci_dev_list, list) {
d                 498 net/bluetooth/mgmt.c 		if (hci_dev_test_flag(d, HCI_SETUP) ||
d                 499 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_CONFIG) ||
d                 500 net/bluetooth/mgmt.c 		    hci_dev_test_flag(d, HCI_USER_CHANNEL))
d                 506 net/bluetooth/mgmt.c 		if (test_bit(HCI_QUIRK_RAW_DEVICE, &d->quirks))
d                 509 net/bluetooth/mgmt.c 		if (d->dev_type == HCI_PRIMARY) {
d                 510 net/bluetooth/mgmt.c 			if (hci_dev_test_flag(d, HCI_UNCONFIGURED))
d                 514 net/bluetooth/mgmt.c 		} else if (d->dev_type == HCI_AMP) {
d                 520 net/bluetooth/mgmt.c 		rp->entry[count].bus = d->bus;
d                 521 net/bluetooth/mgmt.c 		rp->entry[count++].index = cpu_to_le16(d->id);
d                 522 net/bluetooth/mgmt.c 		BT_DBG("Added hci%u", d->id);
d                  57 net/bluetooth/rfcomm/core.c static int rfcomm_queue_disc(struct rfcomm_dlc *d);
d                  59 net/bluetooth/rfcomm/core.c static int rfcomm_send_pn(struct rfcomm_session *s, int cr, struct rfcomm_dlc *d);
d                 212 net/bluetooth/rfcomm/core.c static int rfcomm_check_security(struct rfcomm_dlc *d)
d                 214 net/bluetooth/rfcomm/core.c 	struct sock *sk = d->session->sock->sk;
d                 219 net/bluetooth/rfcomm/core.c 	switch (d->sec_level) {
d                 232 net/bluetooth/rfcomm/core.c 	return hci_conn_security(conn->hcon, d->sec_level, auth_type,
d                 233 net/bluetooth/rfcomm/core.c 				 d->out);
d                 263 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d = from_timer(d, t, timer);
d                 265 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld", d, d->state);
d                 267 net/bluetooth/rfcomm/core.c 	set_bit(RFCOMM_TIMED_OUT, &d->flags);
d                 268 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_put(d);
d                 272 net/bluetooth/rfcomm/core.c static void rfcomm_dlc_set_timer(struct rfcomm_dlc *d, long timeout)
d                 274 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld timeout %ld", d, d->state, timeout);
d                 276 net/bluetooth/rfcomm/core.c 	if (!mod_timer(&d->timer, jiffies + timeout))
d                 277 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_hold(d);
d                 280 net/bluetooth/rfcomm/core.c static void rfcomm_dlc_clear_timer(struct rfcomm_dlc *d)
d                 282 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld", d, d->state);
d                 284 net/bluetooth/rfcomm/core.c 	if (del_timer(&d->timer))
d                 285 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_put(d);
d                 288 net/bluetooth/rfcomm/core.c static void rfcomm_dlc_clear_state(struct rfcomm_dlc *d)
d                 290 net/bluetooth/rfcomm/core.c 	BT_DBG("%p", d);
d                 292 net/bluetooth/rfcomm/core.c 	d->state      = BT_OPEN;
d                 293 net/bluetooth/rfcomm/core.c 	d->flags      = 0;
d                 294 net/bluetooth/rfcomm/core.c 	d->mscex      = 0;
d                 295 net/bluetooth/rfcomm/core.c 	d->sec_level  = BT_SECURITY_LOW;
d                 296 net/bluetooth/rfcomm/core.c 	d->mtu        = RFCOMM_DEFAULT_MTU;
d                 297 net/bluetooth/rfcomm/core.c 	d->v24_sig    = RFCOMM_V24_RTC | RFCOMM_V24_RTR | RFCOMM_V24_DV;
d                 299 net/bluetooth/rfcomm/core.c 	d->cfc        = RFCOMM_CFC_DISABLED;
d                 300 net/bluetooth/rfcomm/core.c 	d->rx_credits = RFCOMM_DEFAULT_CREDITS;
d                 305 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d = kzalloc(sizeof(*d), prio);
d                 307 net/bluetooth/rfcomm/core.c 	if (!d)
d                 310 net/bluetooth/rfcomm/core.c 	timer_setup(&d->timer, rfcomm_dlc_timeout, 0);
d                 312 net/bluetooth/rfcomm/core.c 	skb_queue_head_init(&d->tx_queue);
d                 313 net/bluetooth/rfcomm/core.c 	mutex_init(&d->lock);
d                 314 net/bluetooth/rfcomm/core.c 	refcount_set(&d->refcnt, 1);
d                 316 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_clear_state(d);
d                 318 net/bluetooth/rfcomm/core.c 	BT_DBG("%p", d);
d                 320 net/bluetooth/rfcomm/core.c 	return d;
d                 323 net/bluetooth/rfcomm/core.c void rfcomm_dlc_free(struct rfcomm_dlc *d)
d                 325 net/bluetooth/rfcomm/core.c 	BT_DBG("%p", d);
d                 327 net/bluetooth/rfcomm/core.c 	skb_queue_purge(&d->tx_queue);
d                 328 net/bluetooth/rfcomm/core.c 	kfree(d);
d                 331 net/bluetooth/rfcomm/core.c static void rfcomm_dlc_link(struct rfcomm_session *s, struct rfcomm_dlc *d)
d                 333 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p session %p", d, s);
d                 336 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_hold(d);
d                 337 net/bluetooth/rfcomm/core.c 	list_add(&d->list, &s->dlcs);
d                 338 net/bluetooth/rfcomm/core.c 	d->session = s;
d                 341 net/bluetooth/rfcomm/core.c static void rfcomm_dlc_unlink(struct rfcomm_dlc *d)
d                 343 net/bluetooth/rfcomm/core.c 	struct rfcomm_session *s = d->session;
d                 345 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p refcnt %d session %p", d, refcount_read(&d->refcnt), s);
d                 347 net/bluetooth/rfcomm/core.c 	list_del(&d->list);
d                 348 net/bluetooth/rfcomm/core.c 	d->session = NULL;
d                 349 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_put(d);
d                 357 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d;
d                 359 net/bluetooth/rfcomm/core.c 	list_for_each_entry(d, &s->dlcs, list)
d                 360 net/bluetooth/rfcomm/core.c 		if (d->dlci == dlci)
d                 361 net/bluetooth/rfcomm/core.c 			return d;
d                 371 net/bluetooth/rfcomm/core.c static int __rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst, u8 channel)
d                 378 net/bluetooth/rfcomm/core.c 	       d, d->state, src, dst, channel);
d                 383 net/bluetooth/rfcomm/core.c 	if (d->state != BT_OPEN && d->state != BT_CLOSED)
d                 388 net/bluetooth/rfcomm/core.c 		s = rfcomm_session_create(src, dst, d->sec_level, &err);
d                 399 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_clear_state(d);
d                 401 net/bluetooth/rfcomm/core.c 	d->dlci     = dlci;
d                 402 net/bluetooth/rfcomm/core.c 	d->addr     = __addr(s->initiator, dlci);
d                 403 net/bluetooth/rfcomm/core.c 	d->priority = 7;
d                 405 net/bluetooth/rfcomm/core.c 	d->state = BT_CONFIG;
d                 406 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_link(s, d);
d                 408 net/bluetooth/rfcomm/core.c 	d->out = 1;
d                 410 net/bluetooth/rfcomm/core.c 	d->mtu = s->mtu;
d                 411 net/bluetooth/rfcomm/core.c 	d->cfc = (s->cfc == RFCOMM_CFC_UNKNOWN) ? 0 : s->cfc;
d                 414 net/bluetooth/rfcomm/core.c 		if (rfcomm_check_security(d))
d                 415 net/bluetooth/rfcomm/core.c 			rfcomm_send_pn(s, 1, d);
d                 417 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_AUTH_PENDING, &d->flags);
d                 420 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_set_timer(d, RFCOMM_CONN_TIMEOUT);
d                 425 net/bluetooth/rfcomm/core.c int rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst, u8 channel)
d                 431 net/bluetooth/rfcomm/core.c 	r = __rfcomm_dlc_open(d, src, dst, channel);
d                 437 net/bluetooth/rfcomm/core.c static void __rfcomm_dlc_disconn(struct rfcomm_dlc *d)
d                 439 net/bluetooth/rfcomm/core.c 	struct rfcomm_session *s = d->session;
d                 441 net/bluetooth/rfcomm/core.c 	d->state = BT_DISCONN;
d                 442 net/bluetooth/rfcomm/core.c 	if (skb_queue_empty(&d->tx_queue)) {
d                 443 net/bluetooth/rfcomm/core.c 		rfcomm_send_disc(s, d->dlci);
d                 444 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_set_timer(d, RFCOMM_DISC_TIMEOUT);
d                 446 net/bluetooth/rfcomm/core.c 		rfcomm_queue_disc(d);
d                 447 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_set_timer(d, RFCOMM_DISC_TIMEOUT * 2);
d                 451 net/bluetooth/rfcomm/core.c static int __rfcomm_dlc_close(struct rfcomm_dlc *d, int err)
d                 453 net/bluetooth/rfcomm/core.c 	struct rfcomm_session *s = d->session;
d                 458 net/bluetooth/rfcomm/core.c 			d, d->state, d->dlci, err, s);
d                 460 net/bluetooth/rfcomm/core.c 	switch (d->state) {
d                 465 net/bluetooth/rfcomm/core.c 		if (test_and_clear_bit(RFCOMM_DEFER_SETUP, &d->flags)) {
d                 466 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_AUTH_REJECT, &d->flags);
d                 472 net/bluetooth/rfcomm/core.c 	switch (d->state) {
d                 475 net/bluetooth/rfcomm/core.c 		__rfcomm_dlc_disconn(d);
d                 480 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_disconn(d);
d                 489 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_clear_timer(d);
d                 491 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_lock(d);
d                 492 net/bluetooth/rfcomm/core.c 		d->state = BT_CLOSED;
d                 493 net/bluetooth/rfcomm/core.c 		d->state_change(d, err);
d                 494 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_unlock(d);
d                 496 net/bluetooth/rfcomm/core.c 		skb_queue_purge(&d->tx_queue);
d                 497 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_unlink(d);
d                 503 net/bluetooth/rfcomm/core.c int rfcomm_dlc_close(struct rfcomm_dlc *d, int err)
d                 509 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld dlci %d err %d", d, d->state, d->dlci, err);
d                 513 net/bluetooth/rfcomm/core.c 	s = d->session;
d                 523 net/bluetooth/rfcomm/core.c 				if (d_list == d) {
d                 524 net/bluetooth/rfcomm/core.c 					r = __rfcomm_dlc_close(d, err);
d                 556 net/bluetooth/rfcomm/core.c int rfcomm_dlc_send(struct rfcomm_dlc *d, struct sk_buff *skb)
d                 560 net/bluetooth/rfcomm/core.c 	if (d->state != BT_CONNECTED)
d                 563 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p mtu %d len %d", d, d->mtu, len);
d                 565 net/bluetooth/rfcomm/core.c 	if (len > d->mtu)
d                 568 net/bluetooth/rfcomm/core.c 	rfcomm_make_uih(skb, d->addr);
d                 569 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
d                 571 net/bluetooth/rfcomm/core.c 	if (!test_bit(RFCOMM_TX_THROTTLED, &d->flags))
d                 576 net/bluetooth/rfcomm/core.c void rfcomm_dlc_send_noerror(struct rfcomm_dlc *d, struct sk_buff *skb)
d                 580 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p mtu %d len %d", d, d->mtu, len);
d                 582 net/bluetooth/rfcomm/core.c 	rfcomm_make_uih(skb, d->addr);
d                 583 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
d                 585 net/bluetooth/rfcomm/core.c 	if (d->state == BT_CONNECTED &&
d                 586 net/bluetooth/rfcomm/core.c 	    !test_bit(RFCOMM_TX_THROTTLED, &d->flags))
d                 590 net/bluetooth/rfcomm/core.c void __rfcomm_dlc_throttle(struct rfcomm_dlc *d)
d                 592 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld", d, d->state);
d                 594 net/bluetooth/rfcomm/core.c 	if (!d->cfc) {
d                 595 net/bluetooth/rfcomm/core.c 		d->v24_sig |= RFCOMM_V24_FC;
d                 596 net/bluetooth/rfcomm/core.c 		set_bit(RFCOMM_MSC_PENDING, &d->flags);
d                 601 net/bluetooth/rfcomm/core.c void __rfcomm_dlc_unthrottle(struct rfcomm_dlc *d)
d                 603 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p state %ld", d, d->state);
d                 605 net/bluetooth/rfcomm/core.c 	if (!d->cfc) {
d                 606 net/bluetooth/rfcomm/core.c 		d->v24_sig &= ~RFCOMM_V24_FC;
d                 607 net/bluetooth/rfcomm/core.c 		set_bit(RFCOMM_MSC_PENDING, &d->flags);
d                 617 net/bluetooth/rfcomm/core.c int rfcomm_dlc_set_modem_status(struct rfcomm_dlc *d, u8 v24_sig)
d                 620 net/bluetooth/rfcomm/core.c 			d, d->state, v24_sig);
d                 622 net/bluetooth/rfcomm/core.c 	if (test_bit(RFCOMM_RX_THROTTLED, &d->flags))
d                 627 net/bluetooth/rfcomm/core.c 	d->v24_sig = v24_sig;
d                 629 net/bluetooth/rfcomm/core.c 	if (!test_and_set_bit(RFCOMM_MSC_PENDING, &d->flags))
d                 635 net/bluetooth/rfcomm/core.c int rfcomm_dlc_get_modem_status(struct rfcomm_dlc *d, u8 *v24_sig)
d                 638 net/bluetooth/rfcomm/core.c 			d, d->state, d->v24_sig);
d                 640 net/bluetooth/rfcomm/core.c 	*v24_sig = d->v24_sig;
d                 711 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d, *n;
d                 718 net/bluetooth/rfcomm/core.c 	list_for_each_entry_safe(d, n, &s->dlcs, list) {
d                 719 net/bluetooth/rfcomm/core.c 		d->state = BT_CLOSED;
d                 720 net/bluetooth/rfcomm/core.c 		__rfcomm_dlc_close(d, err);
d                 856 net/bluetooth/rfcomm/core.c static int rfcomm_queue_disc(struct rfcomm_dlc *d)
d                 861 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p dlci %d", d, d->dlci);
d                 868 net/bluetooth/rfcomm/core.c 	cmd->addr = d->addr;
d                 873 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
d                 917 net/bluetooth/rfcomm/core.c static int rfcomm_send_pn(struct rfcomm_session *s, int cr, struct rfcomm_dlc *d)
d                 924 net/bluetooth/rfcomm/core.c 	BT_DBG("%p cr %d dlci %d mtu %d", s, cr, d->dlci, d->mtu);
d                 936 net/bluetooth/rfcomm/core.c 	pn->dlci        = d->dlci;
d                 937 net/bluetooth/rfcomm/core.c 	pn->priority    = d->priority;
d                 952 net/bluetooth/rfcomm/core.c 		pn->mtu = cpu_to_le16(d->mtu);
d                1173 net/bluetooth/rfcomm/core.c 		struct rfcomm_dlc *d = rfcomm_dlc_get(s, dlci);
d                1174 net/bluetooth/rfcomm/core.c 		if (!d) {
d                1179 net/bluetooth/rfcomm/core.c 		switch (d->state) {
d                1181 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_clear_timer(d);
d                1183 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_lock(d);
d                1184 net/bluetooth/rfcomm/core.c 			d->state = BT_CONNECTED;
d                1185 net/bluetooth/rfcomm/core.c 			d->state_change(d, 0);
d                1186 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_unlock(d);
d                1188 net/bluetooth/rfcomm/core.c 			rfcomm_send_msc(s, 1, dlci, d->v24_sig);
d                1192 net/bluetooth/rfcomm/core.c 			d->state = BT_CLOSED;
d                1193 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, 0);
d                1227 net/bluetooth/rfcomm/core.c 		struct rfcomm_dlc *d = rfcomm_dlc_get(s, dlci);
d                1228 net/bluetooth/rfcomm/core.c 		if (d) {
d                1229 net/bluetooth/rfcomm/core.c 			if (d->state == BT_CONNECT || d->state == BT_CONFIG)
d                1234 net/bluetooth/rfcomm/core.c 			d->state = BT_CLOSED;
d                1235 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, err);
d                1256 net/bluetooth/rfcomm/core.c 		struct rfcomm_dlc *d = rfcomm_dlc_get(s, dlci);
d                1257 net/bluetooth/rfcomm/core.c 		if (d) {
d                1260 net/bluetooth/rfcomm/core.c 			if (d->state == BT_CONNECT || d->state == BT_CONFIG)
d                1265 net/bluetooth/rfcomm/core.c 			d->state = BT_CLOSED;
d                1266 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, err);
d                1283 net/bluetooth/rfcomm/core.c void rfcomm_dlc_accept(struct rfcomm_dlc *d)
d                1285 net/bluetooth/rfcomm/core.c 	struct sock *sk = d->session->sock->sk;
d                1288 net/bluetooth/rfcomm/core.c 	BT_DBG("dlc %p", d);
d                1290 net/bluetooth/rfcomm/core.c 	rfcomm_send_ua(d->session, d->dlci);
d                1292 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_clear_timer(d);
d                1294 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_lock(d);
d                1295 net/bluetooth/rfcomm/core.c 	d->state = BT_CONNECTED;
d                1296 net/bluetooth/rfcomm/core.c 	d->state_change(d, 0);
d                1297 net/bluetooth/rfcomm/core.c 	rfcomm_dlc_unlock(d);
d                1299 net/bluetooth/rfcomm/core.c 	if (d->role_switch)
d                1302 net/bluetooth/rfcomm/core.c 	rfcomm_send_msc(d->session, 1, d->dlci, d->v24_sig);
d                1305 net/bluetooth/rfcomm/core.c static void rfcomm_check_accept(struct rfcomm_dlc *d)
d                1307 net/bluetooth/rfcomm/core.c 	if (rfcomm_check_security(d)) {
d                1308 net/bluetooth/rfcomm/core.c 		if (d->defer_setup) {
d                1309 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_DEFER_SETUP, &d->flags);
d                1310 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_set_timer(d, RFCOMM_AUTH_TIMEOUT);
d                1312 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_lock(d);
d                1313 net/bluetooth/rfcomm/core.c 			d->state = BT_CONNECT2;
d                1314 net/bluetooth/rfcomm/core.c 			d->state_change(d, 0);
d                1315 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_unlock(d);
d                1317 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_accept(d);
d                1319 net/bluetooth/rfcomm/core.c 		set_bit(RFCOMM_AUTH_PENDING, &d->flags);
d                1320 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_set_timer(d, RFCOMM_AUTH_TIMEOUT);
d                1326 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d;
d                1342 net/bluetooth/rfcomm/core.c 	d = rfcomm_dlc_get(s, dlci);
d                1343 net/bluetooth/rfcomm/core.c 	if (d) {
d                1344 net/bluetooth/rfcomm/core.c 		if (d->state == BT_OPEN) {
d                1346 net/bluetooth/rfcomm/core.c 			rfcomm_check_accept(d);
d                1353 net/bluetooth/rfcomm/core.c 	if (rfcomm_connect_ind(s, channel, &d)) {
d                1354 net/bluetooth/rfcomm/core.c 		d->dlci = dlci;
d                1355 net/bluetooth/rfcomm/core.c 		d->addr = __addr(s->initiator, dlci);
d                1356 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_link(s, d);
d                1358 net/bluetooth/rfcomm/core.c 		rfcomm_check_accept(d);
d                1366 net/bluetooth/rfcomm/core.c static int rfcomm_apply_pn(struct rfcomm_dlc *d, int cr, struct rfcomm_pn *pn)
d                1368 net/bluetooth/rfcomm/core.c 	struct rfcomm_session *s = d->session;
d                1371 net/bluetooth/rfcomm/core.c 			d, d->state, d->dlci, pn->mtu, pn->flow_ctrl, pn->credits);
d                1375 net/bluetooth/rfcomm/core.c 		d->cfc = RFCOMM_CFC_ENABLED;
d                1376 net/bluetooth/rfcomm/core.c 		d->tx_credits = pn->credits;
d                1378 net/bluetooth/rfcomm/core.c 		d->cfc = RFCOMM_CFC_DISABLED;
d                1379 net/bluetooth/rfcomm/core.c 		set_bit(RFCOMM_TX_THROTTLED, &d->flags);
d                1383 net/bluetooth/rfcomm/core.c 		s->cfc = d->cfc;
d                1385 net/bluetooth/rfcomm/core.c 	d->priority = pn->priority;
d                1387 net/bluetooth/rfcomm/core.c 	d->mtu = __le16_to_cpu(pn->mtu);
d                1389 net/bluetooth/rfcomm/core.c 	if (cr && d->mtu > s->mtu)
d                1390 net/bluetooth/rfcomm/core.c 		d->mtu = s->mtu;
d                1398 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d;
d                1406 net/bluetooth/rfcomm/core.c 	d = rfcomm_dlc_get(s, dlci);
d                1407 net/bluetooth/rfcomm/core.c 	if (d) {
d                1410 net/bluetooth/rfcomm/core.c 			rfcomm_apply_pn(d, cr, pn);
d                1411 net/bluetooth/rfcomm/core.c 			rfcomm_send_pn(s, 0, d);
d                1414 net/bluetooth/rfcomm/core.c 			switch (d->state) {
d                1416 net/bluetooth/rfcomm/core.c 				rfcomm_apply_pn(d, cr, pn);
d                1418 net/bluetooth/rfcomm/core.c 				d->state = BT_CONNECT;
d                1419 net/bluetooth/rfcomm/core.c 				rfcomm_send_sabm(s, d->dlci);
d                1431 net/bluetooth/rfcomm/core.c 		if (rfcomm_connect_ind(s, channel, &d)) {
d                1432 net/bluetooth/rfcomm/core.c 			d->dlci = dlci;
d                1433 net/bluetooth/rfcomm/core.c 			d->addr = __addr(s->initiator, dlci);
d                1434 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_link(s, d);
d                1436 net/bluetooth/rfcomm/core.c 			rfcomm_apply_pn(d, cr, pn);
d                1438 net/bluetooth/rfcomm/core.c 			d->state = BT_OPEN;
d                1439 net/bluetooth/rfcomm/core.c 			rfcomm_send_pn(s, 0, d);
d                1575 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d;
d                1580 net/bluetooth/rfcomm/core.c 	d = rfcomm_dlc_get(s, dlci);
d                1581 net/bluetooth/rfcomm/core.c 	if (!d)
d                1585 net/bluetooth/rfcomm/core.c 		if (msc->v24_sig & RFCOMM_V24_FC && !d->cfc)
d                1586 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_TX_THROTTLED, &d->flags);
d                1588 net/bluetooth/rfcomm/core.c 			clear_bit(RFCOMM_TX_THROTTLED, &d->flags);
d                1590 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_lock(d);
d                1592 net/bluetooth/rfcomm/core.c 		d->remote_v24_sig = msc->v24_sig;
d                1594 net/bluetooth/rfcomm/core.c 		if (d->modem_status)
d                1595 net/bluetooth/rfcomm/core.c 			d->modem_status(d, msc->v24_sig);
d                1597 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_unlock(d);
d                1601 net/bluetooth/rfcomm/core.c 		d->mscex |= RFCOMM_MSCEX_RX;
d                1603 net/bluetooth/rfcomm/core.c 		d->mscex |= RFCOMM_MSCEX_TX;
d                1670 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d;
d                1674 net/bluetooth/rfcomm/core.c 	d = rfcomm_dlc_get(s, dlci);
d                1675 net/bluetooth/rfcomm/core.c 	if (!d) {
d                1680 net/bluetooth/rfcomm/core.c 	if (pf && d->cfc) {
d                1683 net/bluetooth/rfcomm/core.c 		d->tx_credits += credits;
d                1684 net/bluetooth/rfcomm/core.c 		if (d->tx_credits)
d                1685 net/bluetooth/rfcomm/core.c 			clear_bit(RFCOMM_TX_THROTTLED, &d->flags);
d                1688 net/bluetooth/rfcomm/core.c 	if (skb->len && d->state == BT_CONNECTED) {
d                1689 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_lock(d);
d                1690 net/bluetooth/rfcomm/core.c 		d->rx_credits--;
d                1691 net/bluetooth/rfcomm/core.c 		d->data_ready(d, skb);
d                1692 net/bluetooth/rfcomm/core.c 		rfcomm_dlc_unlock(d);
d                1771 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d, *n;
d                1775 net/bluetooth/rfcomm/core.c 	list_for_each_entry_safe(d, n, &s->dlcs, list) {
d                1776 net/bluetooth/rfcomm/core.c 		if (d->state == BT_CONFIG) {
d                1777 net/bluetooth/rfcomm/core.c 			d->mtu = s->mtu;
d                1778 net/bluetooth/rfcomm/core.c 			if (rfcomm_check_security(d)) {
d                1779 net/bluetooth/rfcomm/core.c 				rfcomm_send_pn(s, 1, d);
d                1781 net/bluetooth/rfcomm/core.c 				set_bit(RFCOMM_AUTH_PENDING, &d->flags);
d                1782 net/bluetooth/rfcomm/core.c 				rfcomm_dlc_set_timer(d, RFCOMM_AUTH_TIMEOUT);
d                1791 net/bluetooth/rfcomm/core.c static int rfcomm_process_tx(struct rfcomm_dlc *d)
d                1797 net/bluetooth/rfcomm/core.c 			d, d->state, d->cfc, d->rx_credits, d->tx_credits);
d                1800 net/bluetooth/rfcomm/core.c 	if (test_and_clear_bit(RFCOMM_MSC_PENDING, &d->flags))
d                1801 net/bluetooth/rfcomm/core.c 		rfcomm_send_msc(d->session, 1, d->dlci, d->v24_sig);
d                1803 net/bluetooth/rfcomm/core.c 	if (d->cfc) {
d                1806 net/bluetooth/rfcomm/core.c 		if (!test_bit(RFCOMM_RX_THROTTLED, &d->flags) &&
d                1807 net/bluetooth/rfcomm/core.c 				d->rx_credits <= (d->cfc >> 2)) {
d                1808 net/bluetooth/rfcomm/core.c 			rfcomm_send_credits(d->session, d->addr, d->cfc - d->rx_credits);
d                1809 net/bluetooth/rfcomm/core.c 			d->rx_credits = d->cfc;
d                1814 net/bluetooth/rfcomm/core.c 		d->tx_credits = 5;
d                1817 net/bluetooth/rfcomm/core.c 	if (test_bit(RFCOMM_TX_THROTTLED, &d->flags))
d                1818 net/bluetooth/rfcomm/core.c 		return skb_queue_len(&d->tx_queue);
d                1820 net/bluetooth/rfcomm/core.c 	while (d->tx_credits && (skb = skb_dequeue(&d->tx_queue))) {
d                1821 net/bluetooth/rfcomm/core.c 		err = rfcomm_send_frame(d->session, skb->data, skb->len);
d                1823 net/bluetooth/rfcomm/core.c 			skb_queue_head(&d->tx_queue, skb);
d                1827 net/bluetooth/rfcomm/core.c 		d->tx_credits--;
d                1830 net/bluetooth/rfcomm/core.c 	if (d->cfc && !d->tx_credits) {
d                1833 net/bluetooth/rfcomm/core.c 		set_bit(RFCOMM_TX_THROTTLED, &d->flags);
d                1836 net/bluetooth/rfcomm/core.c 	return skb_queue_len(&d->tx_queue);
d                1841 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d, *n;
d                1845 net/bluetooth/rfcomm/core.c 	list_for_each_entry_safe(d, n, &s->dlcs, list) {
d                1846 net/bluetooth/rfcomm/core.c 		if (test_bit(RFCOMM_TIMED_OUT, &d->flags)) {
d                1847 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, ETIMEDOUT);
d                1851 net/bluetooth/rfcomm/core.c 		if (test_bit(RFCOMM_ENC_DROP, &d->flags)) {
d                1852 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, ECONNREFUSED);
d                1856 net/bluetooth/rfcomm/core.c 		if (test_and_clear_bit(RFCOMM_AUTH_ACCEPT, &d->flags)) {
d                1857 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_clear_timer(d);
d                1858 net/bluetooth/rfcomm/core.c 			if (d->out) {
d                1859 net/bluetooth/rfcomm/core.c 				rfcomm_send_pn(s, 1, d);
d                1860 net/bluetooth/rfcomm/core.c 				rfcomm_dlc_set_timer(d, RFCOMM_CONN_TIMEOUT);
d                1862 net/bluetooth/rfcomm/core.c 				if (d->defer_setup) {
d                1863 net/bluetooth/rfcomm/core.c 					set_bit(RFCOMM_DEFER_SETUP, &d->flags);
d                1864 net/bluetooth/rfcomm/core.c 					rfcomm_dlc_set_timer(d, RFCOMM_AUTH_TIMEOUT);
d                1866 net/bluetooth/rfcomm/core.c 					rfcomm_dlc_lock(d);
d                1867 net/bluetooth/rfcomm/core.c 					d->state = BT_CONNECT2;
d                1868 net/bluetooth/rfcomm/core.c 					d->state_change(d, 0);
d                1869 net/bluetooth/rfcomm/core.c 					rfcomm_dlc_unlock(d);
d                1871 net/bluetooth/rfcomm/core.c 					rfcomm_dlc_accept(d);
d                1874 net/bluetooth/rfcomm/core.c 		} else if (test_and_clear_bit(RFCOMM_AUTH_REJECT, &d->flags)) {
d                1875 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_clear_timer(d);
d                1876 net/bluetooth/rfcomm/core.c 			if (!d->out)
d                1877 net/bluetooth/rfcomm/core.c 				rfcomm_send_dm(s, d->dlci);
d                1879 net/bluetooth/rfcomm/core.c 				d->state = BT_CLOSED;
d                1880 net/bluetooth/rfcomm/core.c 			__rfcomm_dlc_close(d, ECONNREFUSED);
d                1884 net/bluetooth/rfcomm/core.c 		if (test_bit(RFCOMM_SEC_PENDING, &d->flags))
d                1890 net/bluetooth/rfcomm/core.c 		if ((d->state == BT_CONNECTED || d->state == BT_DISCONN) &&
d                1891 net/bluetooth/rfcomm/core.c 						d->mscex == RFCOMM_MSCEX_OK)
d                1892 net/bluetooth/rfcomm/core.c 			rfcomm_process_tx(d);
d                2102 net/bluetooth/rfcomm/core.c 	struct rfcomm_dlc *d, *n;
d                2110 net/bluetooth/rfcomm/core.c 	list_for_each_entry_safe(d, n, &s->dlcs, list) {
d                2111 net/bluetooth/rfcomm/core.c 		if (test_and_clear_bit(RFCOMM_SEC_PENDING, &d->flags)) {
d                2112 net/bluetooth/rfcomm/core.c 			rfcomm_dlc_clear_timer(d);
d                2114 net/bluetooth/rfcomm/core.c 				set_bit(RFCOMM_ENC_DROP, &d->flags);
d                2119 net/bluetooth/rfcomm/core.c 		if (d->state == BT_CONNECTED && !status && encrypt == 0x00) {
d                2120 net/bluetooth/rfcomm/core.c 			if (d->sec_level == BT_SECURITY_MEDIUM) {
d                2121 net/bluetooth/rfcomm/core.c 				set_bit(RFCOMM_SEC_PENDING, &d->flags);
d                2122 net/bluetooth/rfcomm/core.c 				rfcomm_dlc_set_timer(d, RFCOMM_AUTH_TIMEOUT);
d                2124 net/bluetooth/rfcomm/core.c 			} else if (d->sec_level == BT_SECURITY_HIGH ||
d                2125 net/bluetooth/rfcomm/core.c 				   d->sec_level == BT_SECURITY_FIPS) {
d                2126 net/bluetooth/rfcomm/core.c 				set_bit(RFCOMM_ENC_DROP, &d->flags);
d                2131 net/bluetooth/rfcomm/core.c 		if (!test_and_clear_bit(RFCOMM_AUTH_PENDING, &d->flags))
d                2134 net/bluetooth/rfcomm/core.c 		if (!status && hci_conn_check_secure(conn, d->sec_level))
d                2135 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_AUTH_ACCEPT, &d->flags);
d                2137 net/bluetooth/rfcomm/core.c 			set_bit(RFCOMM_AUTH_REJECT, &d->flags);
d                2156 net/bluetooth/rfcomm/core.c 		struct rfcomm_dlc *d;
d                2157 net/bluetooth/rfcomm/core.c 		list_for_each_entry(d, &s->dlcs, list) {
d                2160 net/bluetooth/rfcomm/core.c 				   d->state, d->dlci, d->mtu,
d                2161 net/bluetooth/rfcomm/core.c 				   d->rx_credits, d->tx_credits);
d                  50 net/bluetooth/rfcomm/sock.c static void rfcomm_sk_data_ready(struct rfcomm_dlc *d, struct sk_buff *skb)
d                  52 net/bluetooth/rfcomm/sock.c 	struct sock *sk = d->owner;
d                  61 net/bluetooth/rfcomm/sock.c 		rfcomm_dlc_throttle(d);
d                  64 net/bluetooth/rfcomm/sock.c static void rfcomm_sk_state_change(struct rfcomm_dlc *d, int err)
d                  66 net/bluetooth/rfcomm/sock.c 	struct sock *sk = d->owner, *parent;
d                  72 net/bluetooth/rfcomm/sock.c 	BT_DBG("dlc %p state %ld err %d", d, d->state, err);
d                  80 net/bluetooth/rfcomm/sock.c 	sk->sk_state = d->state;
d                  84 net/bluetooth/rfcomm/sock.c 		if (d->state == BT_CLOSED) {
d                  90 net/bluetooth/rfcomm/sock.c 		if (d->state == BT_CONNECTED)
d                  91 net/bluetooth/rfcomm/sock.c 			rfcomm_session_getaddr(d->session,
d                 102 net/bluetooth/rfcomm/sock.c 		rfcomm_dlc_unlock(d);
d                 104 net/bluetooth/rfcomm/sock.c 		rfcomm_dlc_lock(d);
d                 158 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d = rfcomm_pi(sk)->dlc;
d                 160 net/bluetooth/rfcomm/sock.c 	BT_DBG("sk %p dlc %p", sk, d);
d                 165 net/bluetooth/rfcomm/sock.c 	rfcomm_dlc_lock(d);
d                 169 net/bluetooth/rfcomm/sock.c 	if (d->owner == sk)
d                 170 net/bluetooth/rfcomm/sock.c 		d->owner = NULL;
d                 171 net/bluetooth/rfcomm/sock.c 	rfcomm_dlc_unlock(d);
d                 173 net/bluetooth/rfcomm/sock.c 	rfcomm_dlc_put(d);
d                 210 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d = rfcomm_pi(sk)->dlc;
d                 223 net/bluetooth/rfcomm/sock.c 		rfcomm_dlc_close(d, 0);
d                 276 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d;
d                 286 net/bluetooth/rfcomm/sock.c 	d = rfcomm_dlc_alloc(prio);
d                 287 net/bluetooth/rfcomm/sock.c 	if (!d) {
d                 292 net/bluetooth/rfcomm/sock.c 	d->data_ready   = rfcomm_sk_data_ready;
d                 293 net/bluetooth/rfcomm/sock.c 	d->state_change = rfcomm_sk_state_change;
d                 295 net/bluetooth/rfcomm/sock.c 	rfcomm_pi(sk)->dlc = d;
d                 296 net/bluetooth/rfcomm/sock.c 	d->owner = sk;
d                 388 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d = rfcomm_pi(sk)->dlc;
d                 413 net/bluetooth/rfcomm/sock.c 	d->sec_level = rfcomm_pi(sk)->sec_level;
d                 414 net/bluetooth/rfcomm/sock.c 	d->role_switch = rfcomm_pi(sk)->role_switch;
d                 416 net/bluetooth/rfcomm/sock.c 	err = rfcomm_dlc_open(d, &rfcomm_pi(sk)->src, &sa->rc_bdaddr,
d                 563 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d = rfcomm_pi(sk)->dlc;
d                 567 net/bluetooth/rfcomm/sock.c 	if (test_bit(RFCOMM_DEFER_SETUP, &d->flags))
d                 585 net/bluetooth/rfcomm/sock.c 		size_t size = min_t(size_t, len, d->mtu);
d                 607 net/bluetooth/rfcomm/sock.c 		err = rfcomm_dlc_send(d, skb);
d                 629 net/bluetooth/rfcomm/sock.c 	struct rfcomm_dlc *d = rfcomm_pi(sk)->dlc;
d                 632 net/bluetooth/rfcomm/sock.c 	if (test_and_clear_bit(RFCOMM_DEFER_SETUP, &d->flags)) {
d                 633 net/bluetooth/rfcomm/sock.c 		rfcomm_dlc_accept(d);
d                 956 net/bluetooth/rfcomm/sock.c int rfcomm_connect_ind(struct rfcomm_session *s, u8 channel, struct rfcomm_dlc **d)
d                 994 net/bluetooth/rfcomm/sock.c 	*d = rfcomm_pi(sk)->dlc;
d                 647 net/bridge/br_netfilter_hooks.c 	struct net_device **d = (struct net_device **)(skb->cb);
d                 673 net/bridge/br_netfilter_hooks.c 	*d = state->in;
d                  27 net/bridge/br_sysfs_br.c static ssize_t store_bridge_parm(struct device *d,
d                  31 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                  55 net/bridge/br_sysfs_br.c static ssize_t forward_delay_show(struct device *d,
d                  58 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                  62 net/bridge/br_sysfs_br.c static ssize_t forward_delay_store(struct device *d,
d                  66 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_set_forward_delay);
d                  70 net/bridge/br_sysfs_br.c static ssize_t hello_time_show(struct device *d, struct device_attribute *attr,
d                  74 net/bridge/br_sysfs_br.c 		       jiffies_to_clock_t(to_bridge(d)->hello_time));
d                  77 net/bridge/br_sysfs_br.c static ssize_t hello_time_store(struct device *d,
d                  81 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_set_hello_time);
d                  85 net/bridge/br_sysfs_br.c static ssize_t max_age_show(struct device *d, struct device_attribute *attr,
d                  89 net/bridge/br_sysfs_br.c 		       jiffies_to_clock_t(to_bridge(d)->max_age));
d                  92 net/bridge/br_sysfs_br.c static ssize_t max_age_store(struct device *d, struct device_attribute *attr,
d                  95 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_set_max_age);
d                  99 net/bridge/br_sysfs_br.c static ssize_t ageing_time_show(struct device *d,
d                 102 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 111 net/bridge/br_sysfs_br.c static ssize_t ageing_time_store(struct device *d,
d                 115 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_ageing_time);
d                 119 net/bridge/br_sysfs_br.c static ssize_t stp_state_show(struct device *d,
d                 122 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 134 net/bridge/br_sysfs_br.c static ssize_t stp_state_store(struct device *d,
d                 138 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_stp_state);
d                 142 net/bridge/br_sysfs_br.c static ssize_t group_fwd_mask_show(struct device *d,
d                 146 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 160 net/bridge/br_sysfs_br.c static ssize_t group_fwd_mask_store(struct device *d,
d                 165 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_group_fwd_mask);
d                 169 net/bridge/br_sysfs_br.c static ssize_t priority_show(struct device *d, struct device_attribute *attr,
d                 172 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 183 net/bridge/br_sysfs_br.c static ssize_t priority_store(struct device *d, struct device_attribute *attr,
d                 186 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_priority);
d                 190 net/bridge/br_sysfs_br.c static ssize_t root_id_show(struct device *d, struct device_attribute *attr,
d                 193 net/bridge/br_sysfs_br.c 	return br_show_bridge_id(buf, &to_bridge(d)->designated_root);
d                 197 net/bridge/br_sysfs_br.c static ssize_t bridge_id_show(struct device *d, struct device_attribute *attr,
d                 200 net/bridge/br_sysfs_br.c 	return br_show_bridge_id(buf, &to_bridge(d)->bridge_id);
d                 204 net/bridge/br_sysfs_br.c static ssize_t root_port_show(struct device *d, struct device_attribute *attr,
d                 207 net/bridge/br_sysfs_br.c 	return sprintf(buf, "%d\n", to_bridge(d)->root_port);
d                 211 net/bridge/br_sysfs_br.c static ssize_t root_path_cost_show(struct device *d,
d                 214 net/bridge/br_sysfs_br.c 	return sprintf(buf, "%d\n", to_bridge(d)->root_path_cost);
d                 218 net/bridge/br_sysfs_br.c static ssize_t topology_change_show(struct device *d,
d                 221 net/bridge/br_sysfs_br.c 	return sprintf(buf, "%d\n", to_bridge(d)->topology_change);
d                 225 net/bridge/br_sysfs_br.c static ssize_t topology_change_detected_show(struct device *d,
d                 229 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 234 net/bridge/br_sysfs_br.c static ssize_t hello_timer_show(struct device *d,
d                 237 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 242 net/bridge/br_sysfs_br.c static ssize_t tcn_timer_show(struct device *d, struct device_attribute *attr,
d                 245 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 250 net/bridge/br_sysfs_br.c static ssize_t topology_change_timer_show(struct device *d,
d                 254 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 259 net/bridge/br_sysfs_br.c static ssize_t gc_timer_show(struct device *d, struct device_attribute *attr,
d                 262 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 267 net/bridge/br_sysfs_br.c static ssize_t group_addr_show(struct device *d,
d                 270 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 274 net/bridge/br_sysfs_br.c static ssize_t group_addr_store(struct device *d,
d                 278 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 319 net/bridge/br_sysfs_br.c static ssize_t flush_store(struct device *d,
d                 323 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_flush);
d                 327 net/bridge/br_sysfs_br.c static ssize_t no_linklocal_learn_show(struct device *d,
d                 331 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 340 net/bridge/br_sysfs_br.c static ssize_t no_linklocal_learn_store(struct device *d,
d                 344 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_no_linklocal_learn);
d                 349 net/bridge/br_sysfs_br.c static ssize_t multicast_router_show(struct device *d,
d                 352 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 356 net/bridge/br_sysfs_br.c static ssize_t multicast_router_store(struct device *d,
d                 360 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_multicast_set_router);
d                 364 net/bridge/br_sysfs_br.c static ssize_t multicast_snooping_show(struct device *d,
d                 368 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 372 net/bridge/br_sysfs_br.c static ssize_t multicast_snooping_store(struct device *d,
d                 376 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_multicast_toggle);
d                 380 net/bridge/br_sysfs_br.c static ssize_t multicast_query_use_ifaddr_show(struct device *d,
d                 384 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 396 net/bridge/br_sysfs_br.c multicast_query_use_ifaddr_store(struct device *d,
d                 400 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_query_use_ifaddr);
d                 404 net/bridge/br_sysfs_br.c static ssize_t multicast_querier_show(struct device *d,
d                 408 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 412 net/bridge/br_sysfs_br.c static ssize_t multicast_querier_store(struct device *d,
d                 416 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_multicast_set_querier);
d                 420 net/bridge/br_sysfs_br.c static ssize_t hash_elasticity_show(struct device *d,
d                 433 net/bridge/br_sysfs_br.c static ssize_t hash_elasticity_store(struct device *d,
d                 437 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_elasticity);
d                 441 net/bridge/br_sysfs_br.c static ssize_t hash_max_show(struct device *d, struct device_attribute *attr,
d                 444 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 454 net/bridge/br_sysfs_br.c static ssize_t hash_max_store(struct device *d, struct device_attribute *attr,
d                 457 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_hash_max);
d                 461 net/bridge/br_sysfs_br.c static ssize_t multicast_igmp_version_show(struct device *d,
d                 465 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 470 net/bridge/br_sysfs_br.c static ssize_t multicast_igmp_version_store(struct device *d,
d                 474 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_multicast_set_igmp_version);
d                 478 net/bridge/br_sysfs_br.c static ssize_t multicast_last_member_count_show(struct device *d,
d                 482 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 492 net/bridge/br_sysfs_br.c static ssize_t multicast_last_member_count_store(struct device *d,
d                 496 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_last_member_count);
d                 501 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 503 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 514 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 517 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_startup_query_count);
d                 522 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 524 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 536 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 539 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_last_member_interval);
d                 544 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 546 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 558 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 561 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_membership_interval);
d                 565 net/bridge/br_sysfs_br.c static ssize_t multicast_querier_interval_show(struct device *d,
d                 569 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 580 net/bridge/br_sysfs_br.c static ssize_t multicast_querier_interval_store(struct device *d,
d                 584 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_querier_interval);
d                 588 net/bridge/br_sysfs_br.c static ssize_t multicast_query_interval_show(struct device *d,
d                 592 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 603 net/bridge/br_sysfs_br.c static ssize_t multicast_query_interval_store(struct device *d,
d                 607 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_query_interval);
d                 612 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 614 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 627 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 630 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_query_response_interval);
d                 635 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 637 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 650 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 653 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_startup_query_interval);
d                 657 net/bridge/br_sysfs_br.c static ssize_t multicast_stats_enabled_show(struct device *d,
d                 661 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 673 net/bridge/br_sysfs_br.c static ssize_t multicast_stats_enabled_store(struct device *d,
d                 678 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_stats_enabled);
d                 683 net/bridge/br_sysfs_br.c static ssize_t multicast_mld_version_show(struct device *d,
d                 687 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 692 net/bridge/br_sysfs_br.c static ssize_t multicast_mld_version_store(struct device *d,
d                 696 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_multicast_set_mld_version);
d                 703 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 705 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 716 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 719 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_nf_call_iptables);
d                 724 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 726 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 737 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 740 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_nf_call_ip6tables);
d                 745 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, char *buf)
d                 747 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 758 net/bridge/br_sysfs_br.c 	struct device *d, struct device_attribute *attr, const char *buf,
d                 761 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, set_nf_call_arptables);
d                 766 net/bridge/br_sysfs_br.c static ssize_t vlan_filtering_show(struct device *d,
d                 770 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 774 net/bridge/br_sysfs_br.c static ssize_t vlan_filtering_store(struct device *d,
d                 778 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_vlan_filter_toggle);
d                 782 net/bridge/br_sysfs_br.c static ssize_t vlan_protocol_show(struct device *d,
d                 786 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 790 net/bridge/br_sysfs_br.c static ssize_t vlan_protocol_store(struct device *d,
d                 794 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_vlan_set_proto);
d                 798 net/bridge/br_sysfs_br.c static ssize_t default_pvid_show(struct device *d,
d                 802 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 806 net/bridge/br_sysfs_br.c static ssize_t default_pvid_store(struct device *d,
d                 810 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_vlan_set_default_pvid);
d                 814 net/bridge/br_sysfs_br.c static ssize_t vlan_stats_enabled_show(struct device *d,
d                 818 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 822 net/bridge/br_sysfs_br.c static ssize_t vlan_stats_enabled_store(struct device *d,
d                 826 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_vlan_set_stats);
d                 830 net/bridge/br_sysfs_br.c static ssize_t vlan_stats_per_port_show(struct device *d,
d                 834 net/bridge/br_sysfs_br.c 	struct net_bridge *br = to_bridge(d);
d                 838 net/bridge/br_sysfs_br.c static ssize_t vlan_stats_per_port_store(struct device *d,
d                 842 net/bridge/br_sysfs_br.c 	return store_bridge_parm(d, buf, len, br_vlan_set_stats_per_port);
d                  80 net/ceph/armor.c 		int a, b, c, d;
d                  91 net/ceph/armor.c 		d = decode_bits(src[3]);
d                  92 net/ceph/armor.c 		if (a < 0 || b < 0 || c < 0 || d < 0)
d                 101 net/ceph/armor.c 		*dst++ = ((c & 3) << 6) | d;
d                 844 net/ceph/auth_x.c 			__le64 a, b, c, d;
d                 864 net/ceph/auth_x.c 		*psig = penc->a ^ penc->b ^ penc->c ^ penc->d;
d                 222 net/ceph/ceph_common.c 	int d;
d                 235 net/ceph/ceph_common.c 		if (sscanf(tmp, "%x", &d) < 1)
d                 237 net/ceph/ceph_common.c 		fsid->fsid[i] = d & 0xff;
d                  62 net/ceph/crush/hash.c static __u32 crush_hash32_rjenkins1_4(__u32 a, __u32 b, __u32 c, __u32 d)
d                  64 net/ceph/crush/hash.c 	__u32 hash = crush_hash_seed ^ a ^ b ^ c ^ d;
d                  68 net/ceph/crush/hash.c 	crush_hashmix(c, d, hash);
d                  72 net/ceph/crush/hash.c 	crush_hashmix(y, d, hash);
d                  76 net/ceph/crush/hash.c static __u32 crush_hash32_rjenkins1_5(__u32 a, __u32 b, __u32 c, __u32 d,
d                  79 net/ceph/crush/hash.c 	__u32 hash = crush_hash_seed ^ a ^ b ^ c ^ d ^ e;
d                  83 net/ceph/crush/hash.c 	crush_hashmix(c, d, hash);
d                  88 net/ceph/crush/hash.c 	crush_hashmix(d, x, hash);
d                 124 net/ceph/crush/hash.c __u32 crush_hash32_4(int type, __u32 a, __u32 b, __u32 c, __u32 d)
d                 128 net/ceph/crush/hash.c 		return crush_hash32_rjenkins1_4(a, b, c, d);
d                 134 net/ceph/crush/hash.c __u32 crush_hash32_5(int type, __u32 a, __u32 b, __u32 c, __u32 d, __u32 e)
d                 138 net/ceph/crush/hash.c 		return crush_hash32_rjenkins1_5(a, b, c, d, e);
d                 981 net/core/dev.c 	struct net_device *d;
d                1001 net/core/dev.c 		for_each_netdev(net, d) {
d                1002 net/core/dev.c 			if (!sscanf(d->name, name, &i))
d                1009 net/core/dev.c 			if (!strncmp(buf, d->name, IFNAMSIZ))
d                  24 net/core/gen_stats.c gnet_stats_copy(struct gnet_dump *d, int type, void *buf, int size, int padattr)
d                  26 net/core/gen_stats.c 	if (nla_put_64bit(d->skb, type, size, buf, padattr))
d                  31 net/core/gen_stats.c 	if (d->lock)
d                  32 net/core/gen_stats.c 		spin_unlock_bh(d->lock);
d                  33 net/core/gen_stats.c 	kfree(d->xstats);
d                  34 net/core/gen_stats.c 	d->xstats = NULL;
d                  35 net/core/gen_stats.c 	d->xstats_len = 0;
d                  61 net/core/gen_stats.c 			     struct gnet_dump *d, int padattr)
d                  64 net/core/gen_stats.c 	memset(d, 0, sizeof(*d));
d                  67 net/core/gen_stats.c 		d->tail = (struct nlattr *)skb_tail_pointer(skb);
d                  68 net/core/gen_stats.c 	d->skb = skb;
d                  69 net/core/gen_stats.c 	d->compat_tc_stats = tc_stats_type;
d                  70 net/core/gen_stats.c 	d->compat_xstats = xstats_type;
d                  71 net/core/gen_stats.c 	d->padattr = padattr;
d                  73 net/core/gen_stats.c 		d->lock = lock;
d                  76 net/core/gen_stats.c 	if (d->tail) {
d                  77 net/core/gen_stats.c 		int ret = gnet_stats_copy(d, type, NULL, 0, padattr);
d                  85 net/core/gen_stats.c 		if (ret == 0 && d->tail->nla_type == padattr)
d                  86 net/core/gen_stats.c 			d->tail = (struct nlattr *)((char *)d->tail +
d                  87 net/core/gen_stats.c 						    NLA_ALIGN(d->tail->nla_len));
d                 111 net/core/gen_stats.c 		      struct gnet_dump *d, int padattr)
d                 113 net/core/gen_stats.c 	return gnet_stats_start_copy_compat(skb, type, 0, 0, lock, d, padattr);
d                 163 net/core/gen_stats.c 			 struct gnet_dump *d,
d                 172 net/core/gen_stats.c 	if (d->compat_tc_stats && type == TCA_STATS_BASIC) {
d                 173 net/core/gen_stats.c 		d->tc_stats.bytes = bstats.bytes;
d                 174 net/core/gen_stats.c 		d->tc_stats.packets = bstats.packets;
d                 177 net/core/gen_stats.c 	if (d->tail) {
d                 183 net/core/gen_stats.c 		return gnet_stats_copy(d, type, &sb, sizeof(sb),
d                 204 net/core/gen_stats.c 		      struct gnet_dump *d,
d                 208 net/core/gen_stats.c 	return ___gnet_stats_copy_basic(running, d, cpu, b,
d                 228 net/core/gen_stats.c 			 struct gnet_dump *d,
d                 232 net/core/gen_stats.c 	return ___gnet_stats_copy_basic(running, d, cpu, b,
d                 249 net/core/gen_stats.c gnet_stats_copy_rate_est(struct gnet_dump *d,
d                 262 net/core/gen_stats.c 	if (d->compat_tc_stats) {
d                 263 net/core/gen_stats.c 		d->tc_stats.bps = est.bps;
d                 264 net/core/gen_stats.c 		d->tc_stats.pps = est.pps;
d                 267 net/core/gen_stats.c 	if (d->tail) {
d                 268 net/core/gen_stats.c 		res = gnet_stats_copy(d, TCA_STATS_RATE_EST, &est, sizeof(est),
d                 273 net/core/gen_stats.c 		return gnet_stats_copy(d, TCA_STATS_RATE_EST64, &sample,
d                 332 net/core/gen_stats.c gnet_stats_copy_queue(struct gnet_dump *d,
d                 340 net/core/gen_stats.c 	if (d->compat_tc_stats) {
d                 341 net/core/gen_stats.c 		d->tc_stats.drops = qstats.drops;
d                 342 net/core/gen_stats.c 		d->tc_stats.qlen = qstats.qlen;
d                 343 net/core/gen_stats.c 		d->tc_stats.backlog = qstats.backlog;
d                 344 net/core/gen_stats.c 		d->tc_stats.overlimits = qstats.overlimits;
d                 347 net/core/gen_stats.c 	if (d->tail)
d                 348 net/core/gen_stats.c 		return gnet_stats_copy(d, TCA_STATS_QUEUE,
d                 370 net/core/gen_stats.c gnet_stats_copy_app(struct gnet_dump *d, void *st, int len)
d                 372 net/core/gen_stats.c 	if (d->compat_xstats) {
d                 373 net/core/gen_stats.c 		d->xstats = kmemdup(st, len, GFP_ATOMIC);
d                 374 net/core/gen_stats.c 		if (!d->xstats)
d                 376 net/core/gen_stats.c 		d->xstats_len = len;
d                 379 net/core/gen_stats.c 	if (d->tail)
d                 380 net/core/gen_stats.c 		return gnet_stats_copy(d, TCA_STATS_APP, st, len,
d                 386 net/core/gen_stats.c 	if (d->lock)
d                 387 net/core/gen_stats.c 		spin_unlock_bh(d->lock);
d                 388 net/core/gen_stats.c 	d->xstats_len = 0;
d                 406 net/core/gen_stats.c gnet_stats_finish_copy(struct gnet_dump *d)
d                 408 net/core/gen_stats.c 	if (d->tail)
d                 409 net/core/gen_stats.c 		d->tail->nla_len = skb_tail_pointer(d->skb) - (u8 *)d->tail;
d                 411 net/core/gen_stats.c 	if (d->compat_tc_stats)
d                 412 net/core/gen_stats.c 		if (gnet_stats_copy(d, d->compat_tc_stats, &d->tc_stats,
d                 413 net/core/gen_stats.c 				    sizeof(d->tc_stats), d->padattr) < 0)
d                 416 net/core/gen_stats.c 	if (d->compat_xstats && d->xstats) {
d                 417 net/core/gen_stats.c 		if (gnet_stats_copy(d, d->compat_xstats, d->xstats,
d                 418 net/core/gen_stats.c 				    d->xstats_len, d->padattr) < 0)
d                 422 net/core/gen_stats.c 	if (d->lock)
d                 423 net/core/gen_stats.c 		spin_unlock_bh(d->lock);
d                 424 net/core/gen_stats.c 	kfree(d->xstats);
d                 425 net/core/gen_stats.c 	d->xstats = NULL;
d                 426 net/core/gen_stats.c 	d->xstats_len = 0;
d                 546 net/core/net-sysfs.c static ssize_t netstat_show(const struct device *d,
d                 550 net/core/net-sysfs.c 	struct net_device *dev = to_net_dev(d);
d                 569 net/core/net-sysfs.c static ssize_t name##_show(struct device *d,				\
d                 572 net/core/net-sysfs.c 	return netstat_show(d, attr, buf,				\
d                1610 net/core/net-sysfs.c static int netdev_uevent(struct device *d, struct kobj_uevent_env *env)
d                1612 net/core/net-sysfs.c 	struct net_device *dev = to_net_dev(d);
d                1634 net/core/net-sysfs.c static void netdev_release(struct device *d)
d                1636 net/core/net-sysfs.c 	struct net_device *dev = to_net_dev(d);
d                1647 net/core/net-sysfs.c static const void *net_namespace(struct device *d)
d                1649 net/core/net-sysfs.c 	struct net_device *dev = to_net_dev(d);
d                1654 net/core/net-sysfs.c static void net_get_ownership(struct device *d, kuid_t *uid, kgid_t *gid)
d                1656 net/core/net-sysfs.c 	struct net_device *dev = to_net_dev(d);
d                 123 net/core/utils.c 	u8 *d;
d                 132 net/core/utils.c 	d = dbuf;
d                 143 net/core/utils.c 			*d++ = w & 0xff;
d                 189 net/core/utils.c 	u8 *d, *dc = NULL;
d                 199 net/core/utils.c 	d = dbuf;
d                 212 net/core/utils.c 				*d++ = (w >> 8) & 0xff;
d                 213 net/core/utils.c 				*d++ = w & 0xff;
d                 227 net/core/utils.c 				dc = d;
d                 249 net/core/utils.c 			ret = in4_pton(tok ? tok : s, srclen + (int)(s - tok), d, delim, &s);
d                 251 net/core/utils.c 				d += 4;
d                 262 net/core/utils.c 		if (!dc && d + 2 < dbuf + sizeof(dbuf)) {
d                 266 net/core/utils.c 		if (d + 2 >= dbuf + sizeof(dbuf)) {
d                 270 net/core/utils.c 		if ((dc && d + 4 < dbuf + sizeof(dbuf)) ||
d                 271 net/core/utils.c 		    d + 4 == dbuf + sizeof(dbuf)) {
d                 274 net/core/utils.c 		if (d >= dbuf + sizeof(dbuf)) {
d                 281 net/core/utils.c 	i = 15; d--;
d                 284 net/core/utils.c 		while (d >= dc)
d                 285 net/core/utils.c 			dst[i--] = *d--;
d                 289 net/core/utils.c 			dst[i--] = *d--;
d                 154 net/dsa/dsa.c  	struct device *d;
d                 156 net/dsa/dsa.c  	d = dev_find_class(dev, "net");
d                 157 net/dsa/dsa.c  	if (d != NULL) {
d                 160 net/dsa/dsa.c  		nd = to_net_dev(d);
d                 162 net/dsa/dsa.c  		put_device(d);
d                 267 net/dsa/master.c static ssize_t tagging_show(struct device *d, struct device_attribute *attr,
d                 270 net/dsa/master.c 	struct net_device *dev = to_net_dev(d);
d                  71 net/hsr/hsr_debugfs.c 	struct dentry *d;
d                  73 net/hsr/hsr_debugfs.c 	d = debugfs_rename(hsr_debugfs_root_dir, priv->node_tbl_root,
d                  75 net/hsr/hsr_debugfs.c 	if (IS_ERR(d))
d                  78 net/hsr/hsr_debugfs.c 		priv->node_tbl_root = d;
d                 213 net/ipv4/ipconfig.c 	struct ic_device *d, **last;
d                 248 net/ipv4/ipconfig.c 			if (!(d = kmalloc(sizeof(struct ic_device), GFP_KERNEL))) {
d                 252 net/ipv4/ipconfig.c 			d->dev = dev;
d                 253 net/ipv4/ipconfig.c 			*last = d;
d                 254 net/ipv4/ipconfig.c 			last = &d->next;
d                 255 net/ipv4/ipconfig.c 			d->flags = oflags;
d                 256 net/ipv4/ipconfig.c 			d->able = able;
d                 258 net/ipv4/ipconfig.c 				get_random_bytes(&d->xid, sizeof(__be32));
d                 260 net/ipv4/ipconfig.c 				d->xid = 0;
d                 263 net/ipv4/ipconfig.c 				 dev->name, able, d->xid);
d                 310 net/ipv4/ipconfig.c 	struct ic_device *d, *next;
d                 315 net/ipv4/ipconfig.c 	while ((d = next)) {
d                 316 net/ipv4/ipconfig.c 		next = d->next;
d                 317 net/ipv4/ipconfig.c 		dev = d->dev;
d                 318 net/ipv4/ipconfig.c 		if (d != ic_dev && !netdev_uses_dsa(dev)) {
d                 320 net/ipv4/ipconfig.c 			dev_change_flags(dev, d->flags, NULL);
d                 322 net/ipv4/ipconfig.c 		kfree(d);
d                 481 net/ipv4/ipconfig.c 	struct ic_device *d;
d                 525 net/ipv4/ipconfig.c 	d = ic_first_dev;
d                 526 net/ipv4/ipconfig.c 	while (d && d->dev != dev)
d                 527 net/ipv4/ipconfig.c 		d = d->next;
d                 528 net/ipv4/ipconfig.c 	if (!d)
d                 548 net/ipv4/ipconfig.c 	ic_dev = d;
d                 569 net/ipv4/ipconfig.c static void __init ic_rarp_send_if(struct ic_device *d)
d                 571 net/ipv4/ipconfig.c 	struct net_device *dev = d->dev;
d                 653 net/ipv4/ipconfig.c ic_dhcp_init_options(u8 *options, struct ic_device *d)
d                 660 net/ipv4/ipconfig.c 	pr_debug("DHCP: Sending message type %d (%s)\n", mt, d->dev->name);
d                 796 net/ipv4/ipconfig.c static void __init ic_bootp_send_if(struct ic_device *d, unsigned long jiffies_diff)
d                 798 net/ipv4/ipconfig.c 	struct net_device *dev = d->dev;
d                 847 net/ipv4/ipconfig.c 	b->xid = d->xid;
d                 852 net/ipv4/ipconfig.c 		ic_dhcp_init_options(b->exten, d);
d                 962 net/ipv4/ipconfig.c 	struct ic_device *d;
d                1028 net/ipv4/ipconfig.c 	d = ic_first_dev;
d                1029 net/ipv4/ipconfig.c 	while (d && d->dev != dev)
d                1030 net/ipv4/ipconfig.c 		d = d->next;
d                1031 net/ipv4/ipconfig.c 	if (!d)
d                1036 net/ipv4/ipconfig.c 	    b->xid != d->xid) {
d                1038 net/ipv4/ipconfig.c 				    d->dev->name, b->op, b->xid);
d                1073 net/ipv4/ipconfig.c 			pr_debug("DHCP: Got message type %d (%s)\n", mt, d->dev->name);
d                1128 net/ipv4/ipconfig.c 	ic_dev = d;
d                1162 net/ipv4/ipconfig.c 	struct ic_device *d;
d                1217 net/ipv4/ipconfig.c 	d = ic_first_dev;
d                1223 net/ipv4/ipconfig.c 		if (do_bootp && (d->able & IC_BOOTP))
d                1224 net/ipv4/ipconfig.c 			ic_bootp_send_if(d, jiffies - start_jiffies);
d                1227 net/ipv4/ipconfig.c 		if (do_rarp && (d->able & IC_RARP))
d                1228 net/ipv4/ipconfig.c 			ic_rarp_send_if(d);
d                1231 net/ipv4/ipconfig.c 		if (!d->next) {
d                1243 net/ipv4/ipconfig.c 			d = ic_dev;
d                1254 net/ipv4/ipconfig.c 		if ((d = d->next))
d                1262 net/ipv4/ipconfig.c 		d = ic_first_dev;
d                  29 net/ipv6/proc.c #define MAX4(a, b, c, d) \
d                  30 net/ipv6/proc.c 	max_t(u32, max_t(u32, a, b), max_t(u32, c, d))
d                 776 net/ipv6/sit.c 		u32 d;
d                 781 net/ipv6/sit.c 		d = tunnel->ip6rd.relay_prefixlen < 32 ?
d                 787 net/ipv6/sit.c 			d |= ntohl(v6dst->s6_addr32[pbw0 + 1]) >>
d                 790 net/ipv6/sit.c 		*v4dst = tunnel->ip6rd.relay_prefix | htonl(d);
d                2867 net/key/af_key.c 				const struct xfrm_algo_desc *d)
d                2869 net/key/af_key.c 	unsigned int id = d->desc.sadb_alg_id;
d                2878 net/key/af_key.c 				const struct xfrm_algo_desc *d)
d                2880 net/key/af_key.c 	unsigned int id = d->desc.sadb_alg_id;
d                  28 net/mac80211/fils_aead.c 	u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {};
d                  35 net/mac80211/fils_aead.c 	crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d);
d                  39 net/mac80211/fils_aead.c 		gf_mulx(d); /* dbl */
d                  41 net/mac80211/fils_aead.c 		crypto_xor(d, tmp, AES_BLOCK_SIZE);
d                  50 net/mac80211/fils_aead.c 		crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE,
d                  55 net/mac80211/fils_aead.c 		gf_mulx(d); /* dbl */
d                  56 net/mac80211/fils_aead.c 		crypto_xor(d, addr[i], len[i]);
d                  57 net/mac80211/fils_aead.c 		d[len[i]] ^= 0x80;
d                  60 net/mac80211/fils_aead.c 	crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v);
d                 437 net/mac80211/rc80211_minstrel.c 		    struct minstrel_rate *d,
d                 444 net/mac80211/rc80211_minstrel.c 	d->perfect_tx_time = ieee80211_frame_duration(band, 1200,
d                 447 net/mac80211/rc80211_minstrel.c 	d->ack_time = ieee80211_frame_duration(band, 10,
d                 911 net/mac80211/wpa.c static inline void bip_ipn_set64(u8 *d, u64 pn)
d                 913 net/mac80211/wpa.c 	*d++ = pn;
d                 914 net/mac80211/wpa.c 	*d++ = pn >> 8;
d                 915 net/mac80211/wpa.c 	*d++ = pn >> 16;
d                 916 net/mac80211/wpa.c 	*d++ = pn >> 24;
d                 917 net/mac80211/wpa.c 	*d++ = pn >> 32;
d                 918 net/mac80211/wpa.c 	*d = pn >> 40;
d                 921 net/mac80211/wpa.c static inline void bip_ipn_swap(u8 *d, const u8 *s)
d                 923 net/mac80211/wpa.c 	*d++ = s[5];
d                 924 net/mac80211/wpa.c 	*d++ = s[4];
d                 925 net/mac80211/wpa.c 	*d++ = s[3];
d                 926 net/mac80211/wpa.c 	*d++ = s[2];
d                 927 net/mac80211/wpa.c 	*d++ = s[1];
d                 928 net/mac80211/wpa.c 	*d = s[0];
d                 197 net/netfilter/ipset/ip_set_hash_gen.h #define SET_ELEM_EXPIRED(set, d)	\
d                 199 net/netfilter/ipset/ip_set_hash_gen.h 	 ip_set_timeout_expired(ext_timeout(d, set)))
d                 262 net/netfilter/ipset/ip_set_hash_gen.h #define mtype_do_data_match(d)	1
d                 341 net/netfilter/ipset/ip_set_hash_gen.h 	struct mtype_elem d;	/* Element value */
d                 517 net/netfilter/ipset/ip_set_hash_gen.h 	u32 i, j, d;
d                 530 net/netfilter/ipset/ip_set_hash_gen.h 		for (j = 0, d = 0; j < n->pos; j++) {
d                 532 net/netfilter/ipset/ip_set_hash_gen.h 				d++;
d                 549 net/netfilter/ipset/ip_set_hash_gen.h 			d++;
d                 551 net/netfilter/ipset/ip_set_hash_gen.h 		if (d >= AHASH_INIT_SIZE) {
d                 552 net/netfilter/ipset/ip_set_hash_gen.h 			if (d >= n->size) {
d                 566 net/netfilter/ipset/ip_set_hash_gen.h 			for (j = 0, d = 0; j < n->pos; j++) {
d                 570 net/netfilter/ipset/ip_set_hash_gen.h 				memcpy(tmp->value + d * dsize,
d                 572 net/netfilter/ipset/ip_set_hash_gen.h 				set_bit(d, tmp->used);
d                 573 net/netfilter/ipset/ip_set_hash_gen.h 				d++;
d                 575 net/netfilter/ipset/ip_set_hash_gen.h 			tmp->pos = d;
d                 653 net/netfilter/ipset/ip_set_hash_gen.h 	struct mtype_elem *d;
d                 766 net/netfilter/ipset/ip_set_hash_gen.h 				d = ahash_data(m, m->pos, dsize);
d                 767 net/netfilter/ipset/ip_set_hash_gen.h 				memcpy(d, data, dsize);
d                 771 net/netfilter/ipset/ip_set_hash_gen.h 				mtype_data_reset_flags(d, &flags);
d                 793 net/netfilter/ipset/ip_set_hash_gen.h 			mtype_add(set, &x->d, &x->ext, &x->mext, x->flags);
d                 795 net/netfilter/ipset/ip_set_hash_gen.h 			mtype_del(set, &x->d, NULL, NULL, 0);
d                 860 net/netfilter/ipset/ip_set_hash_gen.h 	const struct mtype_elem *d = value;
d                 918 net/netfilter/ipset/ip_set_hash_gen.h 		if (mtype_data_equal(data, d, &multi)) {
d                 956 net/netfilter/ipset/ip_set_hash_gen.h 			mtype_data_next(&h->next, d);
d                 982 net/netfilter/ipset/ip_set_hash_gen.h 		mtype_add_cidr(set, h, NCIDR_PUT(DCIDR_GET(d->cidr, i)), i);
d                 984 net/netfilter/ipset/ip_set_hash_gen.h 	memcpy(data, d, sizeof(struct mtype_elem));
d                1017 net/netfilter/ipset/ip_set_hash_gen.h 		memcpy(&x->d, value, sizeof(struct mtype_elem));
d                1050 net/netfilter/ipset/ip_set_hash_gen.h 	const struct mtype_elem *d = value;
d                1078 net/netfilter/ipset/ip_set_hash_gen.h 		if (!mtype_data_equal(data, d, &multi))
d                1092 net/netfilter/ipset/ip_set_hash_gen.h 				       NCIDR_PUT(DCIDR_GET(d->cidr, j)), j);
d                1104 net/netfilter/ipset/ip_set_hash_gen.h 				memcpy(&x->d, value,
d                1170 net/netfilter/ipset/ip_set_hash_gen.h mtype_test_cidrs(struct ip_set *set, struct mtype_elem *d,
d                1179 net/netfilter/ipset/ip_set_hash_gen.h 	struct mtype_elem orig = *d;
d                1189 net/netfilter/ipset/ip_set_hash_gen.h 		mtype_data_reset_elem(d, &orig);
d                1190 net/netfilter/ipset/ip_set_hash_gen.h 		mtype_data_netmask(d, NCIDR_GET(h->nets[j].cidr[0]), false);
d                1193 net/netfilter/ipset/ip_set_hash_gen.h 			mtype_data_netmask(d, NCIDR_GET(h->nets[k].cidr[1]),
d                1196 net/netfilter/ipset/ip_set_hash_gen.h 		mtype_data_netmask(d, NCIDR_GET(h->nets[j].cidr[0]));
d                1198 net/netfilter/ipset/ip_set_hash_gen.h 		key = HKEY(d, h->initval, t->htable_bits);
d                1206 net/netfilter/ipset/ip_set_hash_gen.h 			if (!mtype_data_equal(data, d, &multi))
d                1231 net/netfilter/ipset/ip_set_hash_gen.h 	struct mtype_elem *d = value;
d                1244 net/netfilter/ipset/ip_set_hash_gen.h 		if (DCIDR_GET(d->cidr, i) != HOST_MASK)
d                1247 net/netfilter/ipset/ip_set_hash_gen.h 		ret = mtype_test_cidrs(set, d, ext, mext, flags);
d                1252 net/netfilter/ipset/ip_set_hash_gen.h 	key = HKEY(d, h->initval, t->htable_bits);
d                1262 net/netfilter/ipset/ip_set_hash_gen.h 		if (!mtype_data_equal(data, d, &multi))
d                  69 net/netfilter/ipset/ip_set_hash_ipmark.c 		       const struct hash_ipmark4_elem *d)
d                  71 net/netfilter/ipset/ip_set_hash_ipmark.c 	next->ip = d->ip;
d                 192 net/netfilter/ipset/ip_set_hash_ipmark.c 		       const struct hash_ipmark6_elem *d)
d                  76 net/netfilter/ipset/ip_set_hash_ipport.c 		       const struct hash_ipport4_elem *d)
d                  78 net/netfilter/ipset/ip_set_hash_ipport.c 	next->ip = d->ip;
d                  79 net/netfilter/ipset/ip_set_hash_ipport.c 	next->port = d->port;
d                 231 net/netfilter/ipset/ip_set_hash_ipport.c 		       const struct hash_ipport6_elem *d)
d                 233 net/netfilter/ipset/ip_set_hash_ipport.c 	next->port = d->port;
d                  77 net/netfilter/ipset/ip_set_hash_ipportip.c 			 const struct hash_ipportip4_elem *d)
d                  79 net/netfilter/ipset/ip_set_hash_ipportip.c 	next->ip = d->ip;
d                  80 net/netfilter/ipset/ip_set_hash_ipportip.c 	next->port = d->port;
d                 241 net/netfilter/ipset/ip_set_hash_ipportip.c 			 const struct hash_ipportip6_elem *d)
d                 243 net/netfilter/ipset/ip_set_hash_ipportip.c 	next->port = d->port;
d                 121 net/netfilter/ipset/ip_set_hash_ipportnet.c 			  const struct hash_ipportnet4_elem *d)
d                 123 net/netfilter/ipset/ip_set_hash_ipportnet.c 	next->ip = d->ip;
d                 124 net/netfilter/ipset/ip_set_hash_ipportnet.c 	next->port = d->port;
d                 125 net/netfilter/ipset/ip_set_hash_ipportnet.c 	next->ip2 = d->ip2;
d                 370 net/netfilter/ipset/ip_set_hash_ipportnet.c 			  const struct hash_ipportnet6_elem *d)
d                 372 net/netfilter/ipset/ip_set_hash_ipportnet.c 	next->port = d->port;
d                 102 net/netfilter/ipset/ip_set_hash_net.c 		    const struct hash_net4_elem *d)
d                 104 net/netfilter/ipset/ip_set_hash_net.c 	next->ip = d->ip;
d                 267 net/netfilter/ipset/ip_set_hash_net.c 		    const struct hash_net6_elem *d)
d                 124 net/netfilter/ipset/ip_set_hash_netiface.c 			 const struct hash_netiface4_elem *d)
d                 126 net/netfilter/ipset/ip_set_hash_netiface.c 	next->ip = d->ip;
d                 347 net/netfilter/ipset/ip_set_hash_netiface.c 			 const struct hash_netiface6_elem *d)
d                 122 net/netfilter/ipset/ip_set_hash_netnet.c 		       const struct hash_netnet4_elem *d)
d                 124 net/netfilter/ipset/ip_set_hash_netnet.c 	next->ipcmp = d->ipcmp;
d                 353 net/netfilter/ipset/ip_set_hash_netnet.c 		       const struct hash_netnet6_elem *d)
d                 117 net/netfilter/ipset/ip_set_hash_netport.c 			const struct hash_netport4_elem *d)
d                 119 net/netfilter/ipset/ip_set_hash_netport.c 	next->ip = d->ip;
d                 120 net/netfilter/ipset/ip_set_hash_netport.c 	next->port = d->port;
d                 330 net/netfilter/ipset/ip_set_hash_netport.c 			const struct hash_netport6_elem *d)
d                 332 net/netfilter/ipset/ip_set_hash_netport.c 	next->port = d->port;
d                 131 net/netfilter/ipset/ip_set_hash_netportnet.c 			   const struct hash_netportnet4_elem *d)
d                 133 net/netfilter/ipset/ip_set_hash_netportnet.c 	next->ipcmp = d->ipcmp;
d                 134 net/netfilter/ipset/ip_set_hash_netportnet.c 	next->port = d->port;
d                 407 net/netfilter/ipset/ip_set_hash_netportnet.c 			   const struct hash_netportnet6_elem *d)
d                 409 net/netfilter/ipset/ip_set_hash_netportnet.c 	next->port = d->port;
d                 189 net/netfilter/ipset/ip_set_list_set.c 	struct set_adt_elem *d = value;
d                 197 net/netfilter/ipset/ip_set_list_set.c 		else if (e->id != d->id) {
d                 202 net/netfilter/ipset/ip_set_list_set.c 		if (d->before == 0) {
d                 204 net/netfilter/ipset/ip_set_list_set.c 		} else if (d->before > 0) {
d                 207 net/netfilter/ipset/ip_set_list_set.c 			      next->id == d->refid;
d                 209 net/netfilter/ipset/ip_set_list_set.c 			ret = prev && prev->id == d->refid;
d                 236 net/netfilter/ipset/ip_set_list_set.c 	struct set_adt_elem *d = value;
d                 246 net/netfilter/ipset/ip_set_list_set.c 		else if (d->id == e->id)
d                 248 net/netfilter/ipset/ip_set_list_set.c 		else if (d->before == 0 || e->id != d->refid)
d                 250 net/netfilter/ipset/ip_set_list_set.c 		else if (d->before > 0)
d                 257 net/netfilter/ipset/ip_set_list_set.c 	if ((d->before > 0 && !next) ||
d                 258 net/netfilter/ipset/ip_set_list_set.c 	    (d->before < 0 && !prev))
d                 270 net/netfilter/ipset/ip_set_list_set.c 		ip_set_put_byindex(map->net, d->id);
d                 274 net/netfilter/ipset/ip_set_list_set.c 	if (d->before == 0) {
d                 278 net/netfilter/ipset/ip_set_list_set.c 	} else if (d->before > 0) {
d                 296 net/netfilter/ipset/ip_set_list_set.c 	e->id = d->id;
d                 318 net/netfilter/ipset/ip_set_list_set.c 	struct set_adt_elem *d = value;
d                 325 net/netfilter/ipset/ip_set_list_set.c 		else if (e->id != d->id) {
d                 330 net/netfilter/ipset/ip_set_list_set.c 		if (d->before > 0) {
d                 333 net/netfilter/ipset/ip_set_list_set.c 			    next->id != d->refid)
d                 335 net/netfilter/ipset/ip_set_list_set.c 		} else if (d->before < 0) {
d                 336 net/netfilter/ipset/ip_set_list_set.c 			if (!prev || prev->id != d->refid)
d                 342 net/netfilter/ipset/ip_set_list_set.c 	return d->before != 0 ? -IPSET_ERR_REF_EXIST : -IPSET_ERR_EXIST;
d                 142 net/netfilter/ipset/pfxlen.c #define E(a, b, c, d) \
d                 145 net/netfilter/ipset/pfxlen.c 		htonl(c), htonl(d), \
d                 157 net/netfilter/ipset/pfxlen.c #define E(a, b, c, d)					\
d                 159 net/netfilter/ipset/pfxlen.c 		  (__force __be32)c, (__force __be32)d,	\
d                2345 net/netfilter/ipvs/ip_vs_ctl.c 	struct ip_vs_dest_user		d;
d                2756 net/netfilter/ipvs/ip_vs_ctl.c 		struct ip_vs_daemon_user d[2];
d                2758 net/netfilter/ipvs/ip_vs_ctl.c 		memset(&d, 0, sizeof(d));
d                2761 net/netfilter/ipvs/ip_vs_ctl.c 			d[0].state = IP_VS_STATE_MASTER;
d                2762 net/netfilter/ipvs/ip_vs_ctl.c 			strlcpy(d[0].mcast_ifn, ipvs->mcfg.mcast_ifn,
d                2763 net/netfilter/ipvs/ip_vs_ctl.c 				sizeof(d[0].mcast_ifn));
d                2764 net/netfilter/ipvs/ip_vs_ctl.c 			d[0].syncid = ipvs->mcfg.syncid;
d                2767 net/netfilter/ipvs/ip_vs_ctl.c 			d[1].state = IP_VS_STATE_BACKUP;
d                2768 net/netfilter/ipvs/ip_vs_ctl.c 			strlcpy(d[1].mcast_ifn, ipvs->bcfg.mcast_ifn,
d                2769 net/netfilter/ipvs/ip_vs_ctl.c 				sizeof(d[1].mcast_ifn));
d                2770 net/netfilter/ipvs/ip_vs_ctl.c 			d[1].syncid = ipvs->bcfg.syncid;
d                2772 net/netfilter/ipvs/ip_vs_ctl.c 		if (copy_to_user(user, &d, sizeof(d)) != 0)
d                 464 net/netfilter/ipvs/ip_vs_lblc.c 		struct ip_vs_dest *d;
d                 466 net/netfilter/ipvs/ip_vs_lblc.c 		list_for_each_entry_rcu(d, &svc->destinations, n_list) {
d                 467 net/netfilter/ipvs/ip_vs_lblc.c 			if (atomic_read(&d->activeconns)*2
d                 468 net/netfilter/ipvs/ip_vs_lblc.c 			    < atomic_read(&d->weight)) {
d                 628 net/netfilter/ipvs/ip_vs_lblcr.c 		struct ip_vs_dest *d;
d                 630 net/netfilter/ipvs/ip_vs_lblcr.c 		list_for_each_entry_rcu(d, &svc->destinations, n_list) {
d                 631 net/netfilter/ipvs/ip_vs_lblcr.c 			if (atomic_read(&d->activeconns)*2
d                 632 net/netfilter/ipvs/ip_vs_lblcr.c 			    < atomic_read(&d->weight)) {
d                 465 net/netfilter/nf_conntrack_core.c 	unsigned long a, b, c, d;
d                 472 net/netfilter/nf_conntrack_core.c 	d = (unsigned long)siphash(&ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple,
d                 476 net/netfilter/nf_conntrack_core.c 	return siphash_4u64((u64)a, (u64)b, (u64)c, (u64)d, &ct_id_seed);
d                 478 net/netfilter/nf_conntrack_core.c 	return siphash_4u32((u32)a, (u32)b, (u32)c, (u32)d, &ct_id_seed);
d                2124 net/netfilter/nf_conntrack_core.c 	struct iter_data *d = data;
d                2126 net/netfilter/nf_conntrack_core.c 	if (!net_eq(d->net, nf_ct_net(i)))
d                2129 net/netfilter/nf_conntrack_core.c 	return d->iter(i, d->data);
d                2176 net/netfilter/nf_conntrack_core.c 	struct iter_data d;
d                2183 net/netfilter/nf_conntrack_core.c 	d.iter = iter;
d                2184 net/netfilter/nf_conntrack_core.c 	d.data = data;
d                2185 net/netfilter/nf_conntrack_core.c 	d.net = net;
d                2187 net/netfilter/nf_conntrack_core.c 	nf_ct_iterate_cleanup(iter_net_only, &d, portid, report);
d                1576 net/netfilter/nf_conntrack_netlink.c 	unsigned long d;
d                1578 net/netfilter/nf_conntrack_netlink.c 	d = ct->status ^ status;
d                1580 net/netfilter/nf_conntrack_netlink.c 	if (d & (IPS_EXPECTED|IPS_CONFIRMED|IPS_DYING))
d                1584 net/netfilter/nf_conntrack_netlink.c 	if (d & IPS_SEEN_REPLY && !(status & IPS_SEEN_REPLY))
d                1588 net/netfilter/nf_conntrack_netlink.c 	if (d & IPS_ASSURED && !(status & IPS_ASSURED))
d                2505 net/netfilter/nf_conntrack_netlink.c 	unsigned long d = ct->status ^ status;
d                2507 net/netfilter/nf_conntrack_netlink.c 	if (d & IPS_SEEN_REPLY && !(status & IPS_SEEN_REPLY))
d                2511 net/netfilter/nf_conntrack_netlink.c 	if (d & IPS_ASSURED && !(status & IPS_ASSURED))
d                2711 net/netfilter/nf_conntrack_netlink.c 	unsigned long a, b, c, d;
d                2718 net/netfilter/nf_conntrack_netlink.c 	d = (unsigned long)siphash(&exp->tuple, sizeof(exp->tuple), &exp_id_seed);
d                2721 net/netfilter/nf_conntrack_netlink.c 	return (__force __be32)siphash_4u64((u64)a, (u64)b, (u64)c, (u64)d, &exp_id_seed);
d                2723 net/netfilter/nf_conntrack_netlink.c 	return (__force __be32)siphash_4u32((u32)a, (u32)b, (u32)c, (u32)d, &exp_id_seed);
d                  33 net/netfilter/nft_byteorder.c 	union { u32 u32; u16 u16; } *s, *d;
d                  37 net/netfilter/nft_byteorder.c 	d = (void *)dst;
d                  64 net/netfilter/nft_byteorder.c 				d[i].u32 = ntohl((__force __be32)s[i].u32);
d                  68 net/netfilter/nft_byteorder.c 				d[i].u32 = (__force __u32)htonl(s[i].u32);
d                  76 net/netfilter/nft_byteorder.c 				d[i].u16 = ntohs((__force __be16)s[i].u16);
d                  80 net/netfilter/nft_byteorder.c 				d[i].u16 = (__force __u16)htons(s[i].u16);
d                  30 net/netfilter/nft_cmp.c 	int d;
d                  32 net/netfilter/nft_cmp.c 	d = memcmp(&regs->data[priv->sreg], &priv->data, priv->len);
d                  35 net/netfilter/nft_cmp.c 		if (d != 0)
d                  39 net/netfilter/nft_cmp.c 		if (d == 0)
d                  43 net/netfilter/nft_cmp.c 		if (d == 0)
d                  47 net/netfilter/nft_cmp.c 		if (d > 0)
d                  51 net/netfilter/nft_cmp.c 		if (d == 0)
d                  55 net/netfilter/nft_cmp.c 		if (d < 0)
d                  48 net/netfilter/nft_ct.c 				   enum ip_conntrack_dir d)
d                  50 net/netfilter/nft_ct.c 	if (d < IP_CT_DIR_MAX)
d                  51 net/netfilter/nft_ct.c 		return k == NFT_CT_BYTES ? atomic64_read(&c[d].bytes) :
d                  52 net/netfilter/nft_ct.c 					   atomic64_read(&c[d].packets);
d                 100 net/netfilter/nft_immediate.c 				  const struct nft_data **d)
d                 196 net/netfilter/nft_lookup.c 			       const struct nft_data **d)
d                  28 net/netfilter/nft_payload.c nft_payload_copy_vlan(u32 *d, const struct sk_buff *skb, u8 offset, u8 len)
d                  31 net/netfilter/nft_payload.c 	u8 vlan_len, *vlanh, *dst_u8 = (u8 *) d;
d                  56 net/netfilter/nft_set_rbtree.c 	int d;
d                  66 net/netfilter/nft_set_rbtree.c 		d = memcmp(this, key, set->klen);
d                  67 net/netfilter/nft_set_rbtree.c 		if (d < 0) {
d                  75 net/netfilter/nft_set_rbtree.c 		} else if (d > 0)
d                 137 net/netfilter/nft_set_rbtree.c 	int d;
d                 147 net/netfilter/nft_set_rbtree.c 		d = memcmp(this, key, set->klen);
d                 148 net/netfilter/nft_set_rbtree.c 		if (d < 0) {
d                 152 net/netfilter/nft_set_rbtree.c 		} else if (d > 0) {
d                 225 net/netfilter/nft_set_rbtree.c 	int d;
d                 232 net/netfilter/nft_set_rbtree.c 		d = memcmp(nft_set_ext_key(&rbe->ext),
d                 235 net/netfilter/nft_set_rbtree.c 		if (d < 0)
d                 237 net/netfilter/nft_set_rbtree.c 		else if (d > 0)
d                 321 net/netfilter/nft_set_rbtree.c 	int d;
d                 326 net/netfilter/nft_set_rbtree.c 		d = memcmp(nft_set_ext_key(&rbe->ext), &elem->key.val,
d                 328 net/netfilter/nft_set_rbtree.c 		if (d < 0)
d                 330 net/netfilter/nft_set_rbtree.c 		else if (d > 0)
d                  38 net/netfilter/xt_set.c #define ADT_OPT(n, f, d, fs, cfs, t, p, b, po, bo)	\
d                  41 net/netfilter/xt_set.c 	.dim = d,					\
d                 957 net/nfc/core.c static void nfc_release(struct device *d)
d                 959 net/nfc/core.c 	struct nfc_dev *dev = to_nfc_dev(d);
d                1019 net/nfc/core.c static int match_idx(struct device *d, const void *data)
d                1021 net/nfc/core.c 	struct nfc_dev *dev = to_nfc_dev(d);
d                1029 net/nfc/core.c 	struct device *d;
d                1031 net/nfc/core.c 	d = class_find_device(&nfc_class, NULL, &idx, match_idx);
d                1032 net/nfc/core.c 	if (!d)
d                1035 net/nfc/core.c 	return to_nfc_dev(d);
d                 112 net/nfc/nfc.h  	struct device *d = class_dev_iter_next(iter);
d                 113 net/nfc/nfc.h  	if (!d)
d                 116 net/nfc/nfc.h  	return to_nfc_dev(d);
d                  54 net/openvswitch/flow_table.c 	long *d = (long *)((u8 *)dst + start);
d                  63 net/openvswitch/flow_table.c 		*d++ = *s++ & *m++;
d                  67 net/rfkill/core.c #define to_rfkill(d)	container_of(d, struct rfkill, dev)
d                 101 net/rose/rose_in.c static int rose_state3_machine(struct sock *sk, struct sk_buff *skb, int frametype, int ns, int nr, int q, int d, int m)
d                 265 net/rose/rose_in.c 	int queued = 0, frametype, ns, nr, q, d, m;
d                 270 net/rose/rose_in.c 	frametype = rose_decode(skb, &ns, &nr, &q, &d, &m);
d                 280 net/rose/rose_in.c 		queued = rose_state3_machine(sk, skb, frametype, ns, nr, q, d, m);
d                 201 net/rose/rose_subr.c int rose_decode(struct sk_buff *skb, int *ns, int *nr, int *q, int *d, int *m)
d                 207 net/rose/rose_subr.c 	*ns = *nr = *q = *d = *m = 0;
d                 229 net/rose/rose_subr.c 		*d  = (frame[0] & ROSE_D_BIT) == ROSE_D_BIT;
d                 996 net/sched/act_api.c 	struct gnet_dump d;
d                1009 net/sched/act_api.c 							   &p->tcfa_lock, &d,
d                1015 net/sched/act_api.c 					    &p->tcfa_lock, &d, TCA_ACT_PAD);
d                1020 net/sched/act_api.c 	if (gnet_stats_copy_basic(NULL, &d, p->cpu_bstats, &p->tcfa_bstats) < 0 ||
d                1021 net/sched/act_api.c 	    gnet_stats_copy_basic_hw(NULL, &d, p->cpu_bstats_hw,
d                1023 net/sched/act_api.c 	    gnet_stats_copy_rate_est(&d, &p->tcfa_rate_est) < 0 ||
d                1024 net/sched/act_api.c 	    gnet_stats_copy_queue(&d, p->cpu_qstats,
d                1029 net/sched/act_api.c 	if (gnet_stats_finish_copy(&d) < 0)
d                 350 net/sched/act_pedit.c 				u8 *d, _d;
d                 357 net/sched/act_pedit.c 				d = skb_header_pointer(skb, hoffset + tkey->at,
d                 359 net/sched/act_pedit.c 				if (!d)
d                 361 net/sched/act_pedit.c 				offset += (*d & tkey->offmask) >> tkey->shift;
d                  28 net/sched/act_simple.c 	struct tcf_defact *d = to_defact(a);
d                  30 net/sched/act_simple.c 	spin_lock(&d->tcf_lock);
d                  31 net/sched/act_simple.c 	tcf_lastuse_update(&d->tcf_tm);
d                  32 net/sched/act_simple.c 	bstats_update(&d->tcf_bstats, skb);
d                  39 net/sched/act_simple.c 	       (char *)d->tcfd_defdata, d->tcf_bstats.packets);
d                  40 net/sched/act_simple.c 	spin_unlock(&d->tcf_lock);
d                  41 net/sched/act_simple.c 	return d->tcf_action;
d                  46 net/sched/act_simple.c 	struct tcf_defact *d = to_defact(a);
d                  47 net/sched/act_simple.c 	kfree(d->tcfd_defdata);
d                  50 net/sched/act_simple.c static int alloc_defdata(struct tcf_defact *d, const struct nlattr *defdata)
d                  52 net/sched/act_simple.c 	d->tcfd_defdata = kzalloc(SIMP_MAX_DATA, GFP_KERNEL);
d                  53 net/sched/act_simple.c 	if (unlikely(!d->tcfd_defdata))
d                  55 net/sched/act_simple.c 	nla_strlcpy(d->tcfd_defdata, defdata, SIMP_MAX_DATA);
d                  64 net/sched/act_simple.c 	struct tcf_defact *d;
d                  70 net/sched/act_simple.c 	d = to_defact(a);
d                  71 net/sched/act_simple.c 	spin_lock_bh(&d->tcf_lock);
d                  73 net/sched/act_simple.c 	memset(d->tcfd_defdata, 0, SIMP_MAX_DATA);
d                  74 net/sched/act_simple.c 	nla_strlcpy(d->tcfd_defdata, defdata, SIMP_MAX_DATA);
d                  75 net/sched/act_simple.c 	spin_unlock_bh(&d->tcf_lock);
d                  95 net/sched/act_simple.c 	struct tcf_defact *d;
d                 136 net/sched/act_simple.c 		d = to_defact(*a);
d                 142 net/sched/act_simple.c 		err = alloc_defdata(d, tb[TCA_DEF_DATA]);
d                 174 net/sched/act_simple.c 	struct tcf_defact *d = to_defact(a);
d                 176 net/sched/act_simple.c 		.index   = d->tcf_index,
d                 177 net/sched/act_simple.c 		.refcnt  = refcount_read(&d->tcf_refcnt) - ref,
d                 178 net/sched/act_simple.c 		.bindcnt = atomic_read(&d->tcf_bindcnt) - bind,
d                 182 net/sched/act_simple.c 	spin_lock_bh(&d->tcf_lock);
d                 183 net/sched/act_simple.c 	opt.action = d->tcf_action;
d                 185 net/sched/act_simple.c 	    nla_put_string(skb, TCA_DEF_DATA, d->tcfd_defdata))
d                 188 net/sched/act_simple.c 	tcf_tm_dump(&t, &d->tcf_tm);
d                 191 net/sched/act_simple.c 	spin_unlock_bh(&d->tcf_lock);
d                 196 net/sched/act_simple.c 	spin_unlock_bh(&d->tcf_lock);
d                  29 net/sched/act_skbedit.c 	struct tcf_skbedit *d = to_skbedit(a);
d                  33 net/sched/act_skbedit.c 	tcf_lastuse_update(&d->tcf_tm);
d                  34 net/sched/act_skbedit.c 	bstats_cpu_update(this_cpu_ptr(d->common.cpu_bstats), skb);
d                  36 net/sched/act_skbedit.c 	params = rcu_dereference_bh(d->params);
d                  37 net/sched/act_skbedit.c 	action = READ_ONCE(d->tcf_action);
d                  72 net/sched/act_skbedit.c 	qstats_drop_inc(this_cpu_ptr(d->common.cpu_qstats));
d                  97 net/sched/act_skbedit.c 	struct tcf_skbedit *d;
d                 174 net/sched/act_skbedit.c 		d = to_skbedit(*a);
d                 177 net/sched/act_skbedit.c 		d = to_skbedit(*a);
d                 207 net/sched/act_skbedit.c 	spin_lock_bh(&d->tcf_lock);
d                 209 net/sched/act_skbedit.c 	rcu_swap_protected(d->params, params_new,
d                 210 net/sched/act_skbedit.c 			   lockdep_is_held(&d->tcf_lock));
d                 211 net/sched/act_skbedit.c 	spin_unlock_bh(&d->tcf_lock);
d                 232 net/sched/act_skbedit.c 	struct tcf_skbedit *d = to_skbedit(a);
d                 235 net/sched/act_skbedit.c 		.index   = d->tcf_index,
d                 236 net/sched/act_skbedit.c 		.refcnt  = refcount_read(&d->tcf_refcnt) - ref,
d                 237 net/sched/act_skbedit.c 		.bindcnt = atomic_read(&d->tcf_bindcnt) - bind,
d                 242 net/sched/act_skbedit.c 	spin_lock_bh(&d->tcf_lock);
d                 243 net/sched/act_skbedit.c 	params = rcu_dereference_protected(d->params,
d                 244 net/sched/act_skbedit.c 					   lockdep_is_held(&d->tcf_lock));
d                 245 net/sched/act_skbedit.c 	opt.action = d->tcf_action;
d                 270 net/sched/act_skbedit.c 	tcf_tm_dump(&t, &d->tcf_tm);
d                 273 net/sched/act_skbedit.c 	spin_unlock_bh(&d->tcf_lock);
d                 278 net/sched/act_skbedit.c 	spin_unlock_bh(&d->tcf_lock);
d                 285 net/sched/act_skbedit.c 	struct tcf_skbedit *d = to_skbedit(a);
d                 288 net/sched/act_skbedit.c 	params = rcu_dereference_protected(d->params, 1);
d                  27 net/sched/act_skbmod.c 	struct tcf_skbmod *d = to_skbmod(a);
d                  33 net/sched/act_skbmod.c 	tcf_lastuse_update(&d->tcf_tm);
d                  34 net/sched/act_skbmod.c 	bstats_cpu_update(this_cpu_ptr(d->common.cpu_bstats), skb);
d                  44 net/sched/act_skbmod.c 	action = READ_ONCE(d->tcf_action);
d                  48 net/sched/act_skbmod.c 	p = rcu_dereference_bh(d->skbmod_p);
d                  68 net/sched/act_skbmod.c 	qstats_overlimit_inc(this_cpu_ptr(d->common.cpu_qstats));
d                  91 net/sched/act_skbmod.c 	struct tcf_skbmod *d;
d                 161 net/sched/act_skbmod.c 	d = to_skbmod(*a);
d                 172 net/sched/act_skbmod.c 		spin_lock_bh(&d->tcf_lock);
d                 175 net/sched/act_skbmod.c 	p_old = rcu_dereference_protected(d->skbmod_p, 1);
d                 184 net/sched/act_skbmod.c 	rcu_assign_pointer(d->skbmod_p, p);
d                 186 net/sched/act_skbmod.c 		spin_unlock_bh(&d->tcf_lock);
d                 206 net/sched/act_skbmod.c 	struct tcf_skbmod *d = to_skbmod(a);
d                 209 net/sched/act_skbmod.c 	p = rcu_dereference_protected(d->skbmod_p, 1);
d                 217 net/sched/act_skbmod.c 	struct tcf_skbmod *d = to_skbmod(a);
d                 221 net/sched/act_skbmod.c 		.index   = d->tcf_index,
d                 222 net/sched/act_skbmod.c 		.refcnt  = refcount_read(&d->tcf_refcnt) - ref,
d                 223 net/sched/act_skbmod.c 		.bindcnt = atomic_read(&d->tcf_bindcnt) - bind,
d                 227 net/sched/act_skbmod.c 	spin_lock_bh(&d->tcf_lock);
d                 228 net/sched/act_skbmod.c 	opt.action = d->tcf_action;
d                 229 net/sched/act_skbmod.c 	p = rcu_dereference_protected(d->skbmod_p,
d                 230 net/sched/act_skbmod.c 				      lockdep_is_held(&d->tcf_lock));
d                 244 net/sched/act_skbmod.c 	tcf_tm_dump(&t, &d->tcf_tm);
d                 248 net/sched/act_skbmod.c 	spin_unlock_bh(&d->tcf_lock);
d                 251 net/sched/act_skbmod.c 	spin_unlock_bh(&d->tcf_lock);
d                 875 net/sched/sch_api.c 	struct gnet_dump d;
d                 917 net/sched/sch_api.c 					 NULL, &d, TCA_PAD) < 0)
d                 920 net/sched/sch_api.c 	if (q->ops->dump_stats && q->ops->dump_stats(q, &d) < 0)
d                 929 net/sched/sch_api.c 				  &d, cpu_bstats, &q->bstats) < 0 ||
d                 930 net/sched/sch_api.c 	    gnet_stats_copy_rate_est(&d, &q->rate_est) < 0 ||
d                 931 net/sched/sch_api.c 	    gnet_stats_copy_queue(&d, cpu_qstats, &q->qstats, qlen) < 0)
d                 934 net/sched/sch_api.c 	if (gnet_stats_finish_copy(&d) < 0)
d                1792 net/sched/sch_api.c 	struct gnet_dump d;
d                1813 net/sched/sch_api.c 					 NULL, &d, TCA_PAD) < 0)
d                1816 net/sched/sch_api.c 	if (cl_ops->dump_stats && cl_ops->dump_stats(q, cl, &d) < 0)
d                1819 net/sched/sch_api.c 	if (gnet_stats_finish_copy(&d) < 0)
d                 650 net/sched/sch_atm.c 			struct gnet_dump *d)
d                 655 net/sched/sch_atm.c 				  d, NULL, &flow->bstats) < 0 ||
d                 656 net/sched/sch_atm.c 	    gnet_stats_copy_queue(d, NULL, &flow->qstats, flow->q->q.qlen) < 0)
d                2809 net/sched/sch_cake.c static int cake_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                2811 net/sched/sch_cake.c 	struct nlattr *stats = nla_nest_start_noflag(d->skb, TCA_STATS_APP);
d                2820 net/sched/sch_cake.c 		if (nla_put_u32(d->skb, TCA_CAKE_STATS_ ## attr, data)) \
d                2824 net/sched/sch_cake.c 		if (nla_put_u64_64bit(d->skb, TCA_CAKE_STATS_ ## attr, \
d                2841 net/sched/sch_cake.c 	tstats = nla_nest_start_noflag(d->skb, TCA_CAKE_STATS_TIN_STATS);
d                2846 net/sched/sch_cake.c 		if (nla_put_u32(d->skb, TCA_CAKE_TIN_STATS_ ## attr, data)) \
d                2850 net/sched/sch_cake.c 		if (nla_put_u64_64bit(d->skb, TCA_CAKE_TIN_STATS_ ## attr, \
d                2858 net/sched/sch_cake.c 		ts = nla_nest_start_noflag(d->skb, i + 1);
d                2894 net/sched/sch_cake.c 		nla_nest_end(d->skb, ts);
d                2900 net/sched/sch_cake.c 	nla_nest_end(d->skb, tstats);
d                2901 net/sched/sch_cake.c 	return nla_nest_end(d->skb, stats);
d                2904 net/sched/sch_cake.c 	nla_nest_cancel(d->skb, stats);
d                2946 net/sched/sch_cake.c 				 struct gnet_dump *d)
d                2973 net/sched/sch_cake.c 	if (gnet_stats_copy_queue(d, NULL, &qs, qs.qlen) < 0)
d                2978 net/sched/sch_cake.c 		stats = nla_nest_start_noflag(d->skb, TCA_STATS_APP);
d                2983 net/sched/sch_cake.c 		if (nla_put_u32(d->skb, TCA_CAKE_STATS_ ## attr, data)) \
d                2987 net/sched/sch_cake.c 		if (nla_put_s32(d->skb, TCA_CAKE_STATS_ ## attr, data)) \
d                3008 net/sched/sch_cake.c 		if (nla_nest_end(d->skb, stats) < 0)
d                3015 net/sched/sch_cake.c 	nla_nest_cancel(d->skb, stats);
d                1338 net/sched/sch_cbq.c cbq_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                1343 net/sched/sch_cbq.c 	return gnet_stats_copy_app(d, &q->link.xstats, sizeof(q->link.xstats));
d                1374 net/sched/sch_cbq.c 	struct gnet_dump *d)
d                1388 net/sched/sch_cbq.c 				  d, NULL, &cl->bstats) < 0 ||
d                1389 net/sched/sch_cbq.c 	    gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
d                1390 net/sched/sch_cbq.c 	    gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0)
d                1393 net/sched/sch_cbq.c 	return gnet_stats_copy_app(d, &cl->xstats, sizeof(cl->xstats));
d                 467 net/sched/sch_choke.c static int choke_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 478 net/sched/sch_choke.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 245 net/sched/sch_codel.c static int codel_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 268 net/sched/sch_codel.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 258 net/sched/sch_drr.c 				struct gnet_dump *d)
d                 270 net/sched/sch_drr.c 				  d, NULL, &cl->bstats) < 0 ||
d                 271 net/sched/sch_drr.c 	    gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
d                 272 net/sched/sch_drr.c 	    gnet_stats_copy_queue(d, cl_q->cpu_qstats, &cl_q->qstats, qlen) < 0)
d                 275 net/sched/sch_drr.c 	return gnet_stats_copy_app(d, &xstats, sizeof(xstats));
d                 937 net/sched/sch_fq.c static int fq_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 960 net/sched/sch_fq.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 551 net/sched/sch_fq_codel.c static int fq_codel_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 575 net/sched/sch_fq_codel.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 616 net/sched/sch_fq_codel.c 				     struct gnet_dump *d)
d                 655 net/sched/sch_fq_codel.c 	if (gnet_stats_copy_queue(d, NULL, &qs, qs.qlen) < 0)
d                 658 net/sched/sch_fq_codel.c 		return gnet_stats_copy_app(d, &xstats, sizeof(xstats));
d                 442 net/sched/sch_hfsc.c d2dx(u32 d)
d                 446 net/sched/sch_hfsc.c 	dx = ((u64)d * PSCHED_TICKS_PER_SEC);
d                 466 net/sched/sch_hfsc.c 	u64 d;
d                 468 net/sched/sch_hfsc.c 	d = dx * USEC_PER_SEC;
d                 469 net/sched/sch_hfsc.c 	do_div(d, PSCHED_TICKS_PER_SEC);
d                 470 net/sched/sch_hfsc.c 	return (u32)d;
d                 478 net/sched/sch_hfsc.c 	isc->dx   = d2dx(sc->d);
d                1259 net/sched/sch_hfsc.c 	tsc.d  = dx2d(sc->dx);
d                1318 net/sched/sch_hfsc.c 	struct gnet_dump *d)
d                1330 net/sched/sch_hfsc.c 	if (gnet_stats_copy_basic(qdisc_root_sleeping_running(sch), d, NULL, &cl->bstats) < 0 ||
d                1331 net/sched/sch_hfsc.c 	    gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
d                1332 net/sched/sch_hfsc.c 	    gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0)
d                1335 net/sched/sch_hfsc.c 	return gnet_stats_copy_app(d, &xstats, sizeof(xstats));
d                 680 net/sched/sch_hhf.c static int hhf_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 690 net/sched/sch_hhf.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                1121 net/sched/sch_htb.c htb_dump_class_stats(struct Qdisc *sch, unsigned long arg, struct gnet_dump *d)
d                1139 net/sched/sch_htb.c 				  d, NULL, &cl->bstats) < 0 ||
d                1140 net/sched/sch_htb.c 	    gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
d                1141 net/sched/sch_htb.c 	    gnet_stats_copy_queue(d, NULL, &qs, qlen) < 0)
d                1144 net/sched/sch_htb.c 	return gnet_stats_copy_app(d, &cl->xstats, sizeof(cl->xstats));
d                 244 net/sched/sch_mq.c 			       struct gnet_dump *d)
d                 249 net/sched/sch_mq.c 	if (gnet_stats_copy_basic(&sch->running, d, sch->cpu_bstats,
d                 251 net/sched/sch_mq.c 	    qdisc_qstats_copy(d, sch) < 0)
d                 509 net/sched/sch_mqprio.c 				   struct gnet_dump *d)
d                 510 net/sched/sch_mqprio.c 	__releases(d->lock)
d                 511 net/sched/sch_mqprio.c 	__acquires(d->lock)
d                 526 net/sched/sch_mqprio.c 		if (d->lock)
d                 527 net/sched/sch_mqprio.c 			spin_unlock_bh(d->lock);
d                 552 net/sched/sch_mqprio.c 		if (d->lock)
d                 553 net/sched/sch_mqprio.c 			spin_lock_bh(d->lock);
d                 554 net/sched/sch_mqprio.c 		if (gnet_stats_copy_basic(NULL, d, NULL, &bstats) < 0 ||
d                 555 net/sched/sch_mqprio.c 		    gnet_stats_copy_queue(d, NULL, &qstats, qlen) < 0)
d                 561 net/sched/sch_mqprio.c 		if (gnet_stats_copy_basic(qdisc_root_sleeping_running(sch), d,
d                 563 net/sched/sch_mqprio.c 		    qdisc_qstats_copy(d, sch) < 0)
d                 335 net/sched/sch_multiq.c 				 struct gnet_dump *d)
d                 342 net/sched/sch_multiq.c 				  d, cl_q->cpu_bstats, &cl_q->bstats) < 0 ||
d                 343 net/sched/sch_multiq.c 	    qdisc_qstats_copy(d, cl_q) < 0)
d                 766 net/sched/sch_netem.c static void dist_free(struct disttable *d)
d                 768 net/sched/sch_netem.c 	kvfree(d);
d                 782 net/sched/sch_netem.c 	struct disttable *d;
d                 788 net/sched/sch_netem.c 	d = kvmalloc(sizeof(struct disttable) + n * sizeof(s16), GFP_KERNEL);
d                 789 net/sched/sch_netem.c 	if (!d)
d                 792 net/sched/sch_netem.c 	d->size = n;
d                 794 net/sched/sch_netem.c 		d->table[i] = data[i];
d                 799 net/sched/sch_netem.c 	swap(*tbl, d);
d                 802 net/sched/sch_netem.c 	dist_free(d);
d                 510 net/sched/sch_pie.c static int pie_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 527 net/sched/sch_pie.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 358 net/sched/sch_prio.c 				 struct gnet_dump *d)
d                 365 net/sched/sch_prio.c 				  d, cl_q->cpu_bstats, &cl_q->bstats) < 0 ||
d                 366 net/sched/sch_prio.c 	    qdisc_qstats_copy(d, cl_q) < 0)
d                 633 net/sched/sch_qfq.c 				struct gnet_dump *d)
d                 644 net/sched/sch_qfq.c 				  d, NULL, &cl->bstats) < 0 ||
d                 645 net/sched/sch_qfq.c 	    gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
d                 646 net/sched/sch_qfq.c 	    qdisc_qstats_copy(d, cl->qdisc) < 0)
d                 649 net/sched/sch_qfq.c 	return gnet_stats_copy_app(d, &xstats, sizeof(xstats));
d                 331 net/sched/sch_red.c static int red_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 354 net/sched/sch_red.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 594 net/sched/sch_sfb.c static int sfb_dump_stats(struct Qdisc *sch, struct gnet_dump *d)
d                 608 net/sched/sch_sfb.c 	return gnet_stats_copy_app(d, &st, sizeof(st));
d                 231 net/sched/sch_sfq.c 	int d;
d                 235 net/sched/sch_sfq.c 	d = q->slots[x].qlen--;
d                 236 net/sched/sch_sfq.c 	if (n == p && q->cur_depth == d)
d                 244 net/sched/sch_sfq.c 	int d;
d                 248 net/sched/sch_sfq.c 	d = ++q->slots[x].qlen;
d                 249 net/sched/sch_sfq.c 	if (q->cur_depth < d)
d                 250 net/sched/sch_sfq.c 		q->cur_depth = d;
d                 296 net/sched/sch_sfq.c 	sfq_index x, d = q->cur_depth;
d                 302 net/sched/sch_sfq.c 	if (d > 1) {
d                 303 net/sched/sch_sfq.c 		x = q->dep[d].next;
d                 316 net/sched/sch_sfq.c 	if (d == 1) {
d                 863 net/sched/sch_sfq.c 				struct gnet_dump *d)
d                 877 net/sched/sch_sfq.c 	if (gnet_stats_copy_queue(d, NULL, &qs, qs.qlen) < 0)
d                 879 net/sched/sch_sfq.c 	return gnet_stats_copy_app(d, &xstats, sizeof(xstats));
d                 254 net/sched/sch_skbprio.c 				   struct gnet_dump *d)
d                 257 net/sched/sch_skbprio.c 	if (gnet_stats_copy_queue(d, NULL, &q->qstats[cl - 1],
d                1879 net/sched/sch_taprio.c 				   struct gnet_dump *d)
d                1880 net/sched/sch_taprio.c 	__releases(d->lock)
d                1881 net/sched/sch_taprio.c 	__acquires(d->lock)
d                1886 net/sched/sch_taprio.c 	if (gnet_stats_copy_basic(&sch->running, d, NULL, &sch->bstats) < 0 ||
d                1887 net/sched/sch_taprio.c 	    qdisc_qstats_copy(d, sch) < 0)
d                 442 net/sunrpc/cache.c 		struct cache_detail *d;
d                 464 net/sunrpc/cache.c 		d = current_detail;
d                 470 net/sunrpc/cache.c 			cache_fresh_unlocked(ch, d);
d                 471 net/sunrpc/cache.c 			cache_put(ch, d);
d                 139 net/tipc/discover.c static bool tipc_disc_addr_trial_msg(struct tipc_discoverer *d,
d                 147 net/tipc/discover.c 	struct net *net = d->net;
d                 162 net/tipc/discover.c 		msg_set_prevnode(buf_msg(d->skb), sugg_addr);
d                 170 net/tipc/discover.c 		msg_set_prevnode(buf_msg(d->skb), tn->trial_addr);
d                 171 net/tipc/discover.c 		msg_set_type(buf_msg(d->skb), DSC_REQ_MSG);
d                 257 net/tipc/discover.c void tipc_disc_add_dest(struct tipc_discoverer *d)
d                 259 net/tipc/discover.c 	spin_lock_bh(&d->lock);
d                 260 net/tipc/discover.c 	d->num_nodes++;
d                 261 net/tipc/discover.c 	spin_unlock_bh(&d->lock);
d                 266 net/tipc/discover.c void tipc_disc_remove_dest(struct tipc_discoverer *d)
d                 270 net/tipc/discover.c 	spin_lock_bh(&d->lock);
d                 271 net/tipc/discover.c 	d->num_nodes--;
d                 272 net/tipc/discover.c 	num = d->num_nodes;
d                 273 net/tipc/discover.c 	intv = d->timer_intv;
d                 275 net/tipc/discover.c 		d->timer_intv = TIPC_DISC_INIT;
d                 276 net/tipc/discover.c 		mod_timer(&d->timer, jiffies + d->timer_intv);
d                 278 net/tipc/discover.c 	spin_unlock_bh(&d->lock);
d                 289 net/tipc/discover.c 	struct tipc_discoverer *d = from_timer(d, t, timer);
d                 290 net/tipc/discover.c 	struct tipc_net *tn = tipc_net(d->net);
d                 293 net/tipc/discover.c 	struct net *net = d->net;
d                 296 net/tipc/discover.c 	spin_lock_bh(&d->lock);
d                 299 net/tipc/discover.c 	if (tipc_node(d->domain) && d->num_nodes) {
d                 300 net/tipc/discover.c 		d->timer_intv = TIPC_DISC_INACTIVE;
d                 306 net/tipc/discover.c 		mod_timer(&d->timer, jiffies + TIPC_DISC_INIT);
d                 307 net/tipc/discover.c 		spin_unlock_bh(&d->lock);
d                 314 net/tipc/discover.c 		d->timer_intv = TIPC_DISC_INIT;
d                 316 net/tipc/discover.c 		d->timer_intv *= 2;
d                 317 net/tipc/discover.c 		if (d->num_nodes && d->timer_intv > TIPC_DISC_SLOW)
d                 318 net/tipc/discover.c 			d->timer_intv = TIPC_DISC_SLOW;
d                 319 net/tipc/discover.c 		else if (!d->num_nodes && d->timer_intv > TIPC_DISC_FAST)
d                 320 net/tipc/discover.c 			d->timer_intv = TIPC_DISC_FAST;
d                 321 net/tipc/discover.c 		msg_set_type(buf_msg(d->skb), DSC_REQ_MSG);
d                 322 net/tipc/discover.c 		msg_set_prevnode(buf_msg(d->skb), tn->trial_addr);
d                 325 net/tipc/discover.c 	mod_timer(&d->timer, jiffies + d->timer_intv);
d                 326 net/tipc/discover.c 	memcpy(&maddr, &d->dest, sizeof(maddr));
d                 327 net/tipc/discover.c 	skb = skb_clone(d->skb, GFP_ATOMIC);
d                 328 net/tipc/discover.c 	bearer_id = d->bearer_id;
d                 330 net/tipc/discover.c 	spin_unlock_bh(&d->lock);
d                 348 net/tipc/discover.c 	struct tipc_discoverer *d;
d                 350 net/tipc/discover.c 	d = kmalloc(sizeof(*d), GFP_ATOMIC);
d                 351 net/tipc/discover.c 	if (!d)
d                 353 net/tipc/discover.c 	d->skb = tipc_buf_acquire(MAX_H_SIZE + NODE_ID_LEN, GFP_ATOMIC);
d                 354 net/tipc/discover.c 	if (!d->skb) {
d                 355 net/tipc/discover.c 		kfree(d);
d                 358 net/tipc/discover.c 	tipc_disc_init_msg(net, d->skb, DSC_REQ_MSG, b);
d                 363 net/tipc/discover.c 		msg_set_type(buf_msg(d->skb), DSC_TRIAL_MSG);
d                 365 net/tipc/discover.c 	memcpy(&d->dest, dest, sizeof(*dest));
d                 366 net/tipc/discover.c 	d->net = net;
d                 367 net/tipc/discover.c 	d->bearer_id = b->identity;
d                 368 net/tipc/discover.c 	d->domain = b->domain;
d                 369 net/tipc/discover.c 	d->num_nodes = 0;
d                 370 net/tipc/discover.c 	d->timer_intv = TIPC_DISC_INIT;
d                 371 net/tipc/discover.c 	spin_lock_init(&d->lock);
d                 372 net/tipc/discover.c 	timer_setup(&d->timer, tipc_disc_timeout, 0);
d                 373 net/tipc/discover.c 	mod_timer(&d->timer, jiffies + d->timer_intv);
d                 374 net/tipc/discover.c 	b->disc = d;
d                 375 net/tipc/discover.c 	*skb = skb_clone(d->skb, GFP_ATOMIC);
d                 383 net/tipc/discover.c void tipc_disc_delete(struct tipc_discoverer *d)
d                 385 net/tipc/discover.c 	del_timer_sync(&d->timer);
d                 386 net/tipc/discover.c 	kfree_skb(d->skb);
d                 387 net/tipc/discover.c 	kfree(d);
d                 398 net/tipc/discover.c 	struct tipc_discoverer *d = b->disc;
d                 402 net/tipc/discover.c 	spin_lock_bh(&d->lock);
d                 403 net/tipc/discover.c 	tipc_disc_init_msg(net, d->skb, DSC_REQ_MSG, b);
d                 404 net/tipc/discover.c 	d->net = net;
d                 405 net/tipc/discover.c 	d->bearer_id = b->identity;
d                 406 net/tipc/discover.c 	d->domain = b->domain;
d                 407 net/tipc/discover.c 	d->num_nodes = 0;
d                 408 net/tipc/discover.c 	d->timer_intv = TIPC_DISC_INIT;
d                 409 net/tipc/discover.c 	memcpy(&maddr, &d->dest, sizeof(maddr));
d                 410 net/tipc/discover.c 	mod_timer(&d->timer, jiffies + d->timer_intv);
d                 411 net/tipc/discover.c 	skb = skb_clone(d->skb, GFP_ATOMIC);
d                 412 net/tipc/discover.c 	spin_unlock_bh(&d->lock);
d                 258 net/tipc/msg.h static inline void msg_set_syn(struct tipc_msg *m, u32 d)
d                 260 net/tipc/msg.h 	msg_set_bits(m, 0, 17, 1, d);
d                 268 net/tipc/msg.h static inline void msg_set_dest_droppable(struct tipc_msg *m, u32 d)
d                 270 net/tipc/msg.h 	msg_set_bits(m, 0, 19, 1, d);
d                 278 net/tipc/msg.h static inline void msg_set_is_keepalive(struct tipc_msg *m, u32 d)
d                 280 net/tipc/msg.h 	msg_set_bits(m, 0, 19, 1, d);
d                 288 net/tipc/msg.h static inline void msg_set_src_droppable(struct tipc_msg *m, u32 d)
d                 290 net/tipc/msg.h 	msg_set_bits(m, 0, 18, 1, d);
d                 298 net/tipc/msg.h static inline void msg_set_is_rcast(struct tipc_msg *m, bool d)
d                 300 net/tipc/msg.h 	msg_set_bits(m, 0, 18, 0x1, d);
d                 741 net/vmw_vsock/hyperv_transport.c 			 struct vsock_transport_recv_notify_data *d)
d                 748 net/vmw_vsock/hyperv_transport.c 			      struct vsock_transport_recv_notify_data *d)
d                 755 net/vmw_vsock/hyperv_transport.c 				struct vsock_transport_recv_notify_data *d)
d                 763 net/vmw_vsock/hyperv_transport.c 				 struct vsock_transport_recv_notify_data *d)
d                 770 net/vmw_vsock/hyperv_transport.c 			 struct vsock_transport_send_notify_data *d)
d                 777 net/vmw_vsock/hyperv_transport.c 			      struct vsock_transport_send_notify_data *d)
d                 784 net/vmw_vsock/hyperv_transport.c 				struct vsock_transport_send_notify_data *d)
d                 791 net/vmw_vsock/hyperv_transport.c 				 struct vsock_transport_send_notify_data *d)
d                 148 net/wireless/sysfs.c static const void *wiphy_namespace(struct device *d)
d                 150 net/wireless/sysfs.c 	struct wiphy *wiphy = container_of(d, struct wiphy, dev);
d                 208 net/x25/x25_in.c static int x25_state3_machine(struct sock *sk, struct sk_buff *skb, int frametype, int ns, int nr, int q, int d, int m)
d                 389 net/x25/x25_in.c 	int queued = 0, frametype, ns, nr, q, d, m;
d                 394 net/x25/x25_in.c 	frametype = x25_decode(sk, skb, &ns, &nr, &q, &d, &m);
d                 404 net/x25/x25_in.c 		queued = x25_state3_machine(sk, skb, frametype, ns, nr, q, d, m);
d                 261 net/x25/x25_subr.c 	       int *d, int *m)
d                 270 net/x25/x25_subr.c 	*ns = *nr = *q = *d = *m = 0;
d                 316 net/x25/x25_subr.c 			*d  = (frame[0] & X25_D_BIT) == X25_D_BIT;
d                 325 net/x25/x25_subr.c 			*d  = (frame[0] & X25_D_BIT) == X25_D_BIT;
d                 272 net/xdp/xsk_queue.h static inline bool xskq_is_valid_desc(struct xsk_queue *q, struct xdp_desc *d,
d                 276 net/xdp/xsk_queue.h 		if (!xskq_is_valid_addr_unaligned(q, d->addr, d->len, umem))
d                 279 net/xdp/xsk_queue.h 		if (d->len > umem->chunk_size_nohr || d->options) {
d                 287 net/xdp/xsk_queue.h 	if (!xskq_is_valid_addr(q, d->addr))
d                 290 net/xdp/xsk_queue.h 	if (((d->addr + d->len) & q->chunk_mask) != (d->addr & q->chunk_mask) ||
d                 291 net/xdp/xsk_queue.h 	    d->options) {
d                   7 samples/bpf/hash_func01.h #define get16bits(d) (*((const __u16 *) (d)))
d                  64 samples/livepatch/livepatch-shadow-fix1.c 	struct dummy *d;
d                  67 samples/livepatch/livepatch-shadow-fix1.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                  68 samples/livepatch/livepatch-shadow-fix1.c 	if (!d)
d                  71 samples/livepatch/livepatch-shadow-fix1.c 	d->jiffies_expire = jiffies +
d                  81 samples/livepatch/livepatch-shadow-fix1.c 		kfree(d);
d                  85 samples/livepatch/livepatch-shadow-fix1.c 	klp_shadow_alloc(d, SV_LEAK, sizeof(leak), GFP_KERNEL,
d                  89 samples/livepatch/livepatch-shadow-fix1.c 		__func__, d, d->jiffies_expire);
d                  91 samples/livepatch/livepatch-shadow-fix1.c 	return d;
d                  96 samples/livepatch/livepatch-shadow-fix1.c 	void *d = obj;
d                 101 samples/livepatch/livepatch-shadow-fix1.c 			 __func__, d, *shadow_leak);
d                 104 samples/livepatch/livepatch-shadow-fix1.c static void livepatch_fix1_dummy_free(struct dummy *d)
d                 114 samples/livepatch/livepatch-shadow-fix1.c 	shadow_leak = klp_shadow_get(d, SV_LEAK);
d                 116 samples/livepatch/livepatch-shadow-fix1.c 		klp_shadow_free(d, SV_LEAK, livepatch_fix1_dummy_leak_dtor);
d                 118 samples/livepatch/livepatch-shadow-fix1.c 		pr_info("%s: dummy @ %p leaked!\n", __func__, d);
d                 120 samples/livepatch/livepatch-shadow-fix1.c 	kfree(d);
d                  41 samples/livepatch/livepatch-shadow-fix2.c static bool livepatch_fix2_dummy_check(struct dummy *d, unsigned long jiffies)
d                  50 samples/livepatch/livepatch-shadow-fix2.c 	shadow_count = klp_shadow_get_or_alloc(d, SV_COUNTER,
d                  56 samples/livepatch/livepatch-shadow-fix2.c 	return time_after(jiffies, d->jiffies_expire);
d                  61 samples/livepatch/livepatch-shadow-fix2.c 	void *d = obj;
d                  66 samples/livepatch/livepatch-shadow-fix2.c 			 __func__, d, *shadow_leak);
d                  69 samples/livepatch/livepatch-shadow-fix2.c static void livepatch_fix2_dummy_free(struct dummy *d)
d                  75 samples/livepatch/livepatch-shadow-fix2.c 	shadow_leak = klp_shadow_get(d, SV_LEAK);
d                  77 samples/livepatch/livepatch-shadow-fix2.c 		klp_shadow_free(d, SV_LEAK, livepatch_fix2_dummy_leak_dtor);
d                  79 samples/livepatch/livepatch-shadow-fix2.c 		pr_info("%s: dummy @ %p leaked!\n", __func__, d);
d                  85 samples/livepatch/livepatch-shadow-fix2.c 	shadow_count = klp_shadow_get(d, SV_COUNTER);
d                  88 samples/livepatch/livepatch-shadow-fix2.c 			__func__, d, *shadow_count);
d                  89 samples/livepatch/livepatch-shadow-fix2.c 		klp_shadow_free(d, SV_COUNTER, NULL);
d                  92 samples/livepatch/livepatch-shadow-fix2.c 	kfree(d);
d                  97 samples/livepatch/livepatch-shadow-mod.c 	struct dummy *d;
d                 100 samples/livepatch/livepatch-shadow-mod.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 101 samples/livepatch/livepatch-shadow-mod.c 	if (!d)
d                 104 samples/livepatch/livepatch-shadow-mod.c 	d->jiffies_expire = jiffies +
d                 110 samples/livepatch/livepatch-shadow-mod.c 		kfree(d);
d                 115 samples/livepatch/livepatch-shadow-mod.c 		__func__, d, d->jiffies_expire);
d                 117 samples/livepatch/livepatch-shadow-mod.c 	return d;
d                 120 samples/livepatch/livepatch-shadow-mod.c static __used noinline void dummy_free(struct dummy *d)
d                 123 samples/livepatch/livepatch-shadow-mod.c 		__func__, d, d->jiffies_expire);
d                 125 samples/livepatch/livepatch-shadow-mod.c 	kfree(d);
d                 128 samples/livepatch/livepatch-shadow-mod.c static __used noinline bool dummy_check(struct dummy *d,
d                 131 samples/livepatch/livepatch-shadow-mod.c 	return time_after(jiffies, d->jiffies_expire);
d                 145 samples/livepatch/livepatch-shadow-mod.c 	struct dummy *d;
d                 147 samples/livepatch/livepatch-shadow-mod.c 	d = dummy_alloc();
d                 148 samples/livepatch/livepatch-shadow-mod.c 	if (!d)
d                 152 samples/livepatch/livepatch-shadow-mod.c 	list_add(&d->list, &dummy_list);
d                 170 samples/livepatch/livepatch-shadow-mod.c 	struct dummy *d, *tmp;
d                 177 samples/livepatch/livepatch-shadow-mod.c 	list_for_each_entry_safe(d, tmp, &dummy_list, list) {
d                 180 samples/livepatch/livepatch-shadow-mod.c 		if (dummy_check(d, j)) {
d                 181 samples/livepatch/livepatch-shadow-mod.c 			list_del(&d->list);
d                 182 samples/livepatch/livepatch-shadow-mod.c 			dummy_free(d);
d                 203 samples/livepatch/livepatch-shadow-mod.c 	struct dummy *d, *tmp;
d                 210 samples/livepatch/livepatch-shadow-mod.c 	list_for_each_entry_safe(d, tmp, &dummy_list, list) {
d                 211 samples/livepatch/livepatch-shadow-mod.c 		list_del(&d->list);
d                 212 samples/livepatch/livepatch-shadow-mod.c 		dummy_free(d);
d                 300 samples/mic/mpssd/mpssd.c 	struct mic_device_desc *d;
d                 305 samples/mic/mpssd/mpssd.c 		i += mic_total_desc_size(d)) {
d                 306 samples/mic/mpssd/mpssd.c 		d = dp + i;
d                 309 samples/mic/mpssd/mpssd.c 		if (d->type == 0)
d                 312 samples/mic/mpssd/mpssd.c 		if (d->type == -1)
d                 316 samples/mic/mpssd/mpssd.c 			mic->name, __func__, d->type, d);
d                 318 samples/mic/mpssd/mpssd.c 		if (d->type == (__u8)type)
d                 319 samples/mic/mpssd/mpssd.c 			return d;
d                   8 scripts/dtc/data.c void data_free(struct data d)
d                  12 scripts/dtc/data.c 	m = d.markers;
d                  20 scripts/dtc/data.c 	if (d.val)
d                  21 scripts/dtc/data.c 		free(d.val);
d                  24 scripts/dtc/data.c struct data data_grow_for(struct data d, int xlen)
d                  30 scripts/dtc/data.c 		return d;
d                  32 scripts/dtc/data.c 	nd = d;
d                  36 scripts/dtc/data.c 	while ((d.len + xlen) > newsize)
d                  39 scripts/dtc/data.c 	nd.val = xrealloc(d.val, newsize);
d                  46 scripts/dtc/data.c 	struct data d;
d                  48 scripts/dtc/data.c 	d = data_grow_for(empty_data, len);
d                  50 scripts/dtc/data.c 	d.len = len;
d                  51 scripts/dtc/data.c 	memcpy(d.val, mem, len);
d                  53 scripts/dtc/data.c 	return d;
d                  59 scripts/dtc/data.c 	struct data d;
d                  62 scripts/dtc/data.c 	d = data_add_marker(empty_data, TYPE_STRING, NULL);
d                  63 scripts/dtc/data.c 	d = data_grow_for(d, len + 1);
d                  65 scripts/dtc/data.c 	q = d.val;
d                  72 scripts/dtc/data.c 		q[d.len++] = c;
d                  75 scripts/dtc/data.c 	q[d.len++] = '\0';
d                  76 scripts/dtc/data.c 	return d;
d                  81 scripts/dtc/data.c 	struct data d = empty_data;
d                  83 scripts/dtc/data.c 	d = data_add_marker(d, TYPE_NONE, NULL);
d                  84 scripts/dtc/data.c 	while (!feof(f) && (d.len < maxlen)) {
d                  90 scripts/dtc/data.c 			chunksize = maxlen - d.len;
d                  92 scripts/dtc/data.c 		d = data_grow_for(d, chunksize);
d                  93 scripts/dtc/data.c 		ret = fread(d.val + d.len, 1, chunksize, f);
d                  98 scripts/dtc/data.c 		if (d.len + ret < d.len)
d                 101 scripts/dtc/data.c 		d.len += ret;
d                 104 scripts/dtc/data.c 	return d;
d                 107 scripts/dtc/data.c struct data data_append_data(struct data d, const void *p, int len)
d                 109 scripts/dtc/data.c 	d = data_grow_for(d, len);
d                 110 scripts/dtc/data.c 	memcpy(d.val + d.len, p, len);
d                 111 scripts/dtc/data.c 	d.len += len;
d                 112 scripts/dtc/data.c 	return d;
d                 115 scripts/dtc/data.c struct data data_insert_at_marker(struct data d, struct marker *m,
d                 118 scripts/dtc/data.c 	d = data_grow_for(d, len);
d                 119 scripts/dtc/data.c 	memmove(d.val + m->offset + len, d.val + m->offset, d.len - m->offset);
d                 120 scripts/dtc/data.c 	memcpy(d.val + m->offset, p, len);
d                 121 scripts/dtc/data.c 	d.len += len;
d                 127 scripts/dtc/data.c 	return d;
d                 130 scripts/dtc/data.c static struct data data_append_markers(struct data d, struct marker *m)
d                 132 scripts/dtc/data.c 	struct marker **mp = &d.markers;
d                 138 scripts/dtc/data.c 	return d;
d                 143 scripts/dtc/data.c 	struct data d;
d                 146 scripts/dtc/data.c 	d = data_append_markers(data_append_data(d1, d2.val, d2.len), m2);
d                 155 scripts/dtc/data.c 	return d;
d                 158 scripts/dtc/data.c struct data data_append_integer(struct data d, uint64_t value, int bits)
d                 168 scripts/dtc/data.c 		return data_append_data(d, &value_8, 1);
d                 172 scripts/dtc/data.c 		return data_append_data(d, &value_16, 2);
d                 176 scripts/dtc/data.c 		return data_append_data(d, &value_32, 4);
d                 180 scripts/dtc/data.c 		return data_append_data(d, &value_64, 8);
d                 187 scripts/dtc/data.c struct data data_append_re(struct data d, uint64_t address, uint64_t size)
d                 194 scripts/dtc/data.c 	return data_append_data(d, &re, sizeof(re));
d                 197 scripts/dtc/data.c struct data data_append_cell(struct data d, cell_t word)
d                 199 scripts/dtc/data.c 	return data_append_integer(d, word, sizeof(word) * 8);
d                 202 scripts/dtc/data.c struct data data_append_addr(struct data d, uint64_t addr)
d                 204 scripts/dtc/data.c 	return data_append_integer(d, addr, sizeof(addr) * 8);
d                 207 scripts/dtc/data.c struct data data_append_byte(struct data d, uint8_t byte)
d                 209 scripts/dtc/data.c 	return data_append_data(d, &byte, 1);
d                 212 scripts/dtc/data.c struct data data_append_zeroes(struct data d, int len)
d                 214 scripts/dtc/data.c 	d = data_grow_for(d, len);
d                 216 scripts/dtc/data.c 	memset(d.val + d.len, 0, len);
d                 217 scripts/dtc/data.c 	d.len += len;
d                 218 scripts/dtc/data.c 	return d;
d                 221 scripts/dtc/data.c struct data data_append_align(struct data d, int align)
d                 223 scripts/dtc/data.c 	int newlen = ALIGN(d.len, align);
d                 224 scripts/dtc/data.c 	return data_append_zeroes(d, newlen - d.len);
d                 227 scripts/dtc/data.c struct data data_add_marker(struct data d, enum markertype type, char *ref)
d                 232 scripts/dtc/data.c 	m->offset = d.len;
d                 237 scripts/dtc/data.c 	return data_append_markers(d, m);
d                 240 scripts/dtc/data.c bool data_is_one_string(struct data d)
d                 243 scripts/dtc/data.c 	int len = d.len;
d                 249 scripts/dtc/data.c 		if (d.val[i] == '\0')
d                 252 scripts/dtc/data.c 	if (d.val[len-1] != '\0')
d                 307 scripts/dtc/dtc-parser.y 			struct data d;
d                 315 scripts/dtc/dtc-parser.y 			d = data_copy_file(f, $8);
d                 317 scripts/dtc/dtc-parser.y 			$$ = data_merge($1, d);
d                 323 scripts/dtc/dtc-parser.y 			struct data d = empty_data;
d                 325 scripts/dtc/dtc-parser.y 			d = data_copy_file(f, -1);
d                 327 scripts/dtc/dtc-parser.y 			$$ = data_merge($1, d);
d                  99 scripts/dtc/dtc.h void data_free(struct data d);
d                 101 scripts/dtc/dtc.h struct data data_grow_for(struct data d, int xlen);
d                 107 scripts/dtc/dtc.h struct data data_append_data(struct data d, const void *p, int len);
d                 108 scripts/dtc/dtc.h struct data data_insert_at_marker(struct data d, struct marker *m,
d                 111 scripts/dtc/dtc.h struct data data_append_cell(struct data d, cell_t word);
d                 112 scripts/dtc/dtc.h struct data data_append_integer(struct data d, uint64_t word, int bits);
d                 113 scripts/dtc/dtc.h struct data data_append_re(struct data d, uint64_t address, uint64_t size);
d                 114 scripts/dtc/dtc.h struct data data_append_addr(struct data d, uint64_t addr);
d                 115 scripts/dtc/dtc.h struct data data_append_byte(struct data d, uint8_t byte);
d                 116 scripts/dtc/dtc.h struct data data_append_zeroes(struct data d, int len);
d                 117 scripts/dtc/dtc.h struct data data_append_align(struct data d, int align);
d                 119 scripts/dtc/dtc.h struct data data_add_marker(struct data d, enum markertype type, char *ref);
d                 121 scripts/dtc/dtc.h bool data_is_one_string(struct data d);
d                  70 scripts/dtc/flattree.c static void bin_emit_data(void *e, struct data d)
d                  74 scripts/dtc/flattree.c 	*dtbuf = data_append_data(*dtbuf, d.val, d.len);
d                 149 scripts/dtc/flattree.c static void asm_emit_data(void *e, struct data d)
d                 153 scripts/dtc/flattree.c 	struct marker *m = d.markers;
d                 158 scripts/dtc/flattree.c 	while ((d.len - off) >= sizeof(uint32_t)) {
d                 159 scripts/dtc/flattree.c 		asm_emit_cell(e, fdt32_to_cpu(*((fdt32_t *)(d.val+off))));
d                 163 scripts/dtc/flattree.c 	while ((d.len - off) >= 1) {
d                 164 scripts/dtc/flattree.c 		fprintf(f, "\t.byte\t0x%hhx\n", d.val[off]);
d                 168 scripts/dtc/flattree.c 	assert(off == d.len);
d                 220 scripts/dtc/flattree.c static int stringtable_insert(struct data *d, const char *str)
d                 226 scripts/dtc/flattree.c 	for (i = 0; i < d->len; i++) {
d                 227 scripts/dtc/flattree.c 		if (streq(str, d->val + i))
d                 231 scripts/dtc/flattree.c 	*d = data_append_data(*d, str, strlen(str)+1);
d                 297 scripts/dtc/flattree.c 	struct data d = empty_data;
d                 301 scripts/dtc/flattree.c 		d = data_append_re(d, re->address, re->size);
d                 307 scripts/dtc/flattree.c 		d = data_append_re(d, 0, 0);
d                 310 scripts/dtc/flattree.c 	return d;
d                 629 scripts/dtc/flattree.c 	struct data d = empty_data;
d                 634 scripts/dtc/flattree.c 	d = data_grow_for(d, len);
d                 635 scripts/dtc/flattree.c 	d.len = len;
d                 637 scripts/dtc/flattree.c 	flat_read_chunk(inb, d.val, len);
d                 641 scripts/dtc/flattree.c 	return d;
d                  13 scripts/dtc/fstree.c 	DIR *d;
d                  18 scripts/dtc/fstree.c 	d = opendir(dirname);
d                  19 scripts/dtc/fstree.c 	if (!d)
d                  24 scripts/dtc/fstree.c 	while ((de = readdir(d)) != NULL) {
d                  64 scripts/dtc/fstree.c 	closedir(d);
d                 233 scripts/dtc/livetree.c 	struct data d = empty_data;
d                 237 scripts/dtc/livetree.c 		d = data_add_marker(d, TYPE_STRING, ref);
d                 238 scripts/dtc/livetree.c 		d = data_append_data(d, ref, strlen(ref) + 1);
d                 240 scripts/dtc/livetree.c 		p = build_property("target-path", d, NULL);
d                 242 scripts/dtc/livetree.c 		d = data_add_marker(d, REF_PHANDLE, ref);
d                 243 scripts/dtc/livetree.c 		d = data_append_integer(d, 0xffffffff, 32);
d                 245 scripts/dtc/livetree.c 		p = build_property("target", d, NULL);
d                 342 scripts/dtc/livetree.c 	struct data d;
d                 347 scripts/dtc/livetree.c 		d = data_add_marker(p->val, type, name);
d                 348 scripts/dtc/livetree.c 		d = data_append_data(d, data, len);
d                 349 scripts/dtc/livetree.c 		p->val = d;
d                 351 scripts/dtc/livetree.c 		d = data_add_marker(empty_data, type, name);
d                 352 scripts/dtc/livetree.c 		d = data_append_data(d, data, len);
d                 353 scripts/dtc/livetree.c 		p = build_property(name, d, NULL);
d                 596 scripts/dtc/livetree.c 	struct data d = empty_data;
d                 606 scripts/dtc/livetree.c 	d = data_add_marker(d, TYPE_UINT32, NULL);
d                 607 scripts/dtc/livetree.c 	d = data_append_cell(d, phandle);
d                 611 scripts/dtc/livetree.c 		add_property(node, build_property("linux,phandle", d, NULL));
d                 615 scripts/dtc/livetree.c 		add_property(node, build_property("phandle", d, NULL));
d                  28 scripts/dtc/util.c 	char *d = xmalloc(len);
d                  30 scripts/dtc/util.c 	memcpy(d, s, len);
d                  32 scripts/dtc/util.c 	return d;
d                 254 scripts/gcc-plugins/gcc-common.h static inline void add_local_decl(struct function *fun, tree d)
d                 256 scripts/gcc-plugins/gcc-common.h 	gcc_assert(TREE_CODE(d) == VAR_DECL);
d                 257 scripts/gcc-plugins/gcc-common.h 	fun->local_decls = tree_cons(NULL_TREE, d, fun->local_decls);
d                 142 scripts/gcc-plugins/randomize_layout_plugin.c typedef struct ranctx { u64 a; u64 b; u64 c; u64 d; } ranctx;
d                 148 scripts/gcc-plugins/randomize_layout_plugin.c 	x->b = x->c + rot(x->d, 37);
d                 149 scripts/gcc-plugins/randomize_layout_plugin.c 	x->c = x->d + e;
d                 150 scripts/gcc-plugins/randomize_layout_plugin.c 	x->d = e + x->a;
d                 151 scripts/gcc-plugins/randomize_layout_plugin.c 	return x->d;
d                 160 scripts/gcc-plugins/randomize_layout_plugin.c 	x->d = seed[3];
d                 133 scripts/kconfig/confdata.c 	char *d, c;
d                 139 scripts/kconfig/confdata.c 	d = depfile_path + depfile_prefix_len;
d                 143 scripts/kconfig/confdata.c 		*d++ = (c == '_') ? '/' : tolower(c);
d                 144 scripts/kconfig/confdata.c 	strcpy(d, ".h");
d                1343 scripts/kconfig/qconf.cc 	QDesktopWidget *d = configApp->desktop();
d                1350 scripts/kconfig/qconf.cc 	width = configSettings->value("/window width", d->width() - 64).toInt();
d                1351 scripts/kconfig/qconf.cc 	height = configSettings->value("/window height", d->height() - 64).toInt();
d                 951 scripts/mod/file2alias.c static void dmi_ascii_filter(char *d, const char *s)
d                 956 scripts/mod/file2alias.c 			*(d++) = *s;
d                 958 scripts/mod/file2alias.c 	*d = 0;
d                1321 scripts/mod/modpost.c 	Elf64_Sword d;
d                1338 scripts/mod/modpost.c 		d = sym->st_value - addr;
d                1339 scripts/mod/modpost.c 		if (d < 0)
d                1340 scripts/mod/modpost.c 			d = addr - sym->st_value;
d                1341 scripts/mod/modpost.c 		if (d < distance) {
d                1342 scripts/mod/modpost.c 			distance = d;
d                2450 scripts/mod/modpost.c 		char *symname, *namespace, *modname, *d, *export;
d                2468 scripts/mod/modpost.c 		crc = strtoul(line, &d, 16);
d                2469 scripts/mod/modpost.c 		if (*symname == '\0' || *modname == '\0' || *d != '\0')
d                  67 scripts/mod/sumversion.c #define ROUND1(a,b,c,d,k,s) (a = lshift(a + F(b,c,d) + k, s))
d                  68 scripts/mod/sumversion.c #define ROUND2(a,b,c,d,k,s) (a = lshift(a + G(b,c,d) + k + (uint32_t)0x5A827999,s))
d                  69 scripts/mod/sumversion.c #define ROUND3(a,b,c,d,k,s) (a = lshift(a + H(b,c,d) + k + (uint32_t)0x6ED9EBA1,s))
d                  90 scripts/mod/sumversion.c 	uint32_t a, b, c, d;
d                  95 scripts/mod/sumversion.c 	d = hash[3];
d                  97 scripts/mod/sumversion.c 	ROUND1(a, b, c, d, in[0], 3);
d                  98 scripts/mod/sumversion.c 	ROUND1(d, a, b, c, in[1], 7);
d                  99 scripts/mod/sumversion.c 	ROUND1(c, d, a, b, in[2], 11);
d                 100 scripts/mod/sumversion.c 	ROUND1(b, c, d, a, in[3], 19);
d                 101 scripts/mod/sumversion.c 	ROUND1(a, b, c, d, in[4], 3);
d                 102 scripts/mod/sumversion.c 	ROUND1(d, a, b, c, in[5], 7);
d                 103 scripts/mod/sumversion.c 	ROUND1(c, d, a, b, in[6], 11);
d                 104 scripts/mod/sumversion.c 	ROUND1(b, c, d, a, in[7], 19);
d                 105 scripts/mod/sumversion.c 	ROUND1(a, b, c, d, in[8], 3);
d                 106 scripts/mod/sumversion.c 	ROUND1(d, a, b, c, in[9], 7);
d                 107 scripts/mod/sumversion.c 	ROUND1(c, d, a, b, in[10], 11);
d                 108 scripts/mod/sumversion.c 	ROUND1(b, c, d, a, in[11], 19);
d                 109 scripts/mod/sumversion.c 	ROUND1(a, b, c, d, in[12], 3);
d                 110 scripts/mod/sumversion.c 	ROUND1(d, a, b, c, in[13], 7);
d                 111 scripts/mod/sumversion.c 	ROUND1(c, d, a, b, in[14], 11);
d                 112 scripts/mod/sumversion.c 	ROUND1(b, c, d, a, in[15], 19);
d                 114 scripts/mod/sumversion.c 	ROUND2(a, b, c, d,in[ 0], 3);
d                 115 scripts/mod/sumversion.c 	ROUND2(d, a, b, c, in[4], 5);
d                 116 scripts/mod/sumversion.c 	ROUND2(c, d, a, b, in[8], 9);
d                 117 scripts/mod/sumversion.c 	ROUND2(b, c, d, a, in[12], 13);
d                 118 scripts/mod/sumversion.c 	ROUND2(a, b, c, d, in[1], 3);
d                 119 scripts/mod/sumversion.c 	ROUND2(d, a, b, c, in[5], 5);
d                 120 scripts/mod/sumversion.c 	ROUND2(c, d, a, b, in[9], 9);
d                 121 scripts/mod/sumversion.c 	ROUND2(b, c, d, a, in[13], 13);
d                 122 scripts/mod/sumversion.c 	ROUND2(a, b, c, d, in[2], 3);
d                 123 scripts/mod/sumversion.c 	ROUND2(d, a, b, c, in[6], 5);
d                 124 scripts/mod/sumversion.c 	ROUND2(c, d, a, b, in[10], 9);
d                 125 scripts/mod/sumversion.c 	ROUND2(b, c, d, a, in[14], 13);
d                 126 scripts/mod/sumversion.c 	ROUND2(a, b, c, d, in[3], 3);
d                 127 scripts/mod/sumversion.c 	ROUND2(d, a, b, c, in[7], 5);
d                 128 scripts/mod/sumversion.c 	ROUND2(c, d, a, b, in[11], 9);
d                 129 scripts/mod/sumversion.c 	ROUND2(b, c, d, a, in[15], 13);
d                 131 scripts/mod/sumversion.c 	ROUND3(a, b, c, d,in[ 0], 3);
d                 132 scripts/mod/sumversion.c 	ROUND3(d, a, b, c, in[8], 9);
d                 133 scripts/mod/sumversion.c 	ROUND3(c, d, a, b, in[4], 11);
d                 134 scripts/mod/sumversion.c 	ROUND3(b, c, d, a, in[12], 15);
d                 135 scripts/mod/sumversion.c 	ROUND3(a, b, c, d, in[2], 3);
d                 136 scripts/mod/sumversion.c 	ROUND3(d, a, b, c, in[10], 9);
d                 137 scripts/mod/sumversion.c 	ROUND3(c, d, a, b, in[6], 11);
d                 138 scripts/mod/sumversion.c 	ROUND3(b, c, d, a, in[14], 15);
d                 139 scripts/mod/sumversion.c 	ROUND3(a, b, c, d, in[1], 3);
d                 140 scripts/mod/sumversion.c 	ROUND3(d, a, b, c, in[9], 9);
d                 141 scripts/mod/sumversion.c 	ROUND3(c, d, a, b, in[5], 11);
d                 142 scripts/mod/sumversion.c 	ROUND3(b, c, d, a, in[13], 15);
d                 143 scripts/mod/sumversion.c 	ROUND3(a, b, c, d, in[3], 3);
d                 144 scripts/mod/sumversion.c 	ROUND3(d, a, b, c, in[11], 9);
d                 145 scripts/mod/sumversion.c 	ROUND3(c, d, a, b, in[7], 11);
d                 146 scripts/mod/sumversion.c 	ROUND3(b, c, d, a, in[15], 15);
d                 151 scripts/mod/sumversion.c 	hash[3] += d;
d                 314 security/apparmor/domain.c 	struct dentry *d;
d                 325 security/apparmor/domain.c 	d = bprm->file->f_path.dentry;
d                 328 security/apparmor/domain.c 		size = vfs_getxattr_alloc(d, profile->xattrs[i], &value,
d                 153 security/apparmor/policy_unpack.c 	struct aa_loaddata *d = container_of(work, struct aa_loaddata, work);
d                 154 security/apparmor/policy_unpack.c 	struct aa_ns *ns = aa_get_ns(d->ns);
d                 158 security/apparmor/policy_unpack.c 		__aa_fs_remove_rawdata(d);
d                 163 security/apparmor/policy_unpack.c 	kzfree(d->hash);
d                 164 security/apparmor/policy_unpack.c 	kzfree(d->name);
d                 165 security/apparmor/policy_unpack.c 	kvfree(d->data);
d                 166 security/apparmor/policy_unpack.c 	kzfree(d);
d                 171 security/apparmor/policy_unpack.c 	struct aa_loaddata *d = container_of(kref, struct aa_loaddata, count);
d                 173 security/apparmor/policy_unpack.c 	if (d) {
d                 174 security/apparmor/policy_unpack.c 		INIT_WORK(&d->work, do_loaddata_free);
d                 175 security/apparmor/policy_unpack.c 		schedule_work(&d->work);
d                 181 security/apparmor/policy_unpack.c 	struct aa_loaddata *d;
d                 183 security/apparmor/policy_unpack.c 	d = kzalloc(sizeof(*d), GFP_KERNEL);
d                 184 security/apparmor/policy_unpack.c 	if (d == NULL)
d                 186 security/apparmor/policy_unpack.c 	d->data = kvzalloc(size, GFP_KERNEL);
d                 187 security/apparmor/policy_unpack.c 	if (!d->data) {
d                 188 security/apparmor/policy_unpack.c 		kfree(d);
d                 191 security/apparmor/policy_unpack.c 	kref_init(&d->count);
d                 192 security/apparmor/policy_unpack.c 	INIT_LIST_HEAD(&d->list);
d                 194 security/apparmor/policy_unpack.c 	return d;
d                 648 security/integrity/ima/ima_crypto.c static void __init ima_pcrread(u32 idx, struct tpm_digest *d)
d                 653 security/integrity/ima/ima_crypto.c 	if (tpm_pcr_read(ima_tpm_chip, idx, d) != 0)
d                 663 security/integrity/ima/ima_crypto.c 	struct tpm_digest d = { .alg_id = TPM_ALG_SHA1, .digest = {0} };
d                 676 security/integrity/ima/ima_crypto.c 		ima_pcrread(i, &d);
d                 678 security/integrity/ima/ima_crypto.c 		rc = crypto_shash_update(shash, d.digest, TPM_DIGEST_SIZE);
d                 272 security/keys/keyring.c 	const u8 *d;
d                 290 security/keys/keyring.c 		d = index_key->description + sizeof(index_key->desc);
d                 291 security/keys/keyring.c 		d += level * sizeof(long);
d                 297 security/keys/keyring.c 			chunk |= *d++;
d                 389 security/selinux/ss/avtab.c 				   struct avtab_datum *d, void *p),
d                 550 security/selinux/ss/avtab.c 			 struct avtab_datum *d, void *p)
d                 552 security/selinux/ss/avtab.c 	return avtab_insert(a, k, d);
d                  99 security/selinux/ss/avtab.h 				  struct avtab_datum *d, void *p),
d                 268 security/selinux/ss/conditional.c static int cond_insertf(struct avtab *a, struct avtab_key *k, struct avtab_datum *d, void *ptr)
d                 322 security/selinux/ss/conditional.c 	node_ptr = avtab_insert_nonunique(&p->te_cond_avtab, k, d);
d                 124 security/selinux/ss/hashtab.c 		int (*apply)(void *k, void *d, void *args),
d                  55 security/selinux/ss/hashtab.h int hashtab_insert(struct hashtab *h, void *k, void *d);
d                  82 security/selinux/ss/hashtab.h 		int (*apply)(void *k, void *d, void *args),
d                 441 security/selinux/ss/services.c static int dump_masked_av_helper(void *k, void *d, void *args)
d                 443 security/selinux/ss/services.c 	struct perm_datum *pdatum = d;
d                3125 security/selinux/ss/services.c static int get_classes_callback(void *k, void *d, void *args)
d                3127 security/selinux/ss/services.c 	struct class_datum *datum = d;
d                3172 security/selinux/ss/services.c static int get_permissions_callback(void *k, void *d, void *args)
d                3174 security/selinux/ss/services.c 	struct perm_datum *datum = d;
d                 468 security/smack/smack.h 						    struct dentry *d)
d                 470 security/smack/smack.h 	a->a.u.dentry = d;
d                 499 security/smack/smack.h 						    struct dentry *d)
d                 438 security/tomoyo/util.c 	unsigned char d;
d                 481 security/tomoyo/util.c 				d = *string++;
d                 483 security/tomoyo/util.c 				if (d < '0' || d > '7' || e < '0' || e > '7')
d                 485 security/tomoyo/util.c 				c = tomoyo_make_byte(c, d, e);
d                  92 sound/aoa/soundbus/i2sbus/interface.h 	int d;
d                 101 sound/aoa/soundbus/i2sbus/interface.h 		d = div/2-1;
d                 102 sound/aoa/soundbus/i2sbus/interface.h 		if (d == 0x14 || d == 0x13 || d == 0x12 || d == 0x0E)
d                 119 sound/aoa/soundbus/i2sbus/interface.h 	int d;
d                 126 sound/aoa/soundbus/i2sbus/interface.h 		d = div/2-1;
d                 127 sound/aoa/soundbus/i2sbus/interface.h 		if (d == 8 || d == 9) return -1;
d                 172 sound/aoa/soundbus/soundbus.h #define to_soundbus_device(d) container_of(d, struct soundbus_dev, ofdev.dev)
d                 173 sound/aoa/soundbus/soundbus.h #define of_to_soundbus_device(d) container_of(d, struct soundbus_dev, ofdev)
d                 766 sound/core/control.c 	if (info->dimen.d[0] == 0)
d                 770 sound/core/control.c 	for (i = 0; i < ARRAY_SIZE(info->dimen.d); ++i) {
d                 771 sound/core/control.c 		if (info->dimen.d[i] == 0)
d                 773 sound/core/control.c 		members *= info->dimen.d[i];
d                 783 sound/core/control.c 	for (++i; i < ARRAY_SIZE(info->dimen.d); ++i) {
d                 784 sound/core/control.c 		if (info->dimen.d[i] > 0)
d                 324 sound/core/pcm_native.c 	unsigned int d;
d                 379 sound/core/pcm_native.c 		for (d = 0; r->deps[d] >= 0; d++) {
d                 380 sound/core/pcm_native.c 			if (vstamps[r->deps[d]] > rstamps[k])
d                 383 sound/core/pcm_native.c 		if (r->deps[d] < 0)
d                 595 sound/core/seq/oss/seq_oss_synth.c 	memcpy(ev->data.raw8.d, data, 8);
d                1086 sound/core/seq/seq_clientmgr.c 				void *ptr = (void __force *)compat_ptr(event.data.raw32.d[1]);
d                 109 sound/core/seq/seq_ports.h 			 struct snd_seq_client *d, struct snd_seq_client_port *dp,
d                 115 sound/core/seq/seq_ports.h 			    struct snd_seq_client *d, struct snd_seq_client_port *dp,
d                  23 sound/drivers/pcsp/pcsp.h #define CALC_DIV(d) (MAX_DIV >> (d))
d                  65 sound/drivers/vx/vx_mixer.c #define SET_CDC_DATA_VAL(di,d)          ((di).b.ll = (u8) (d))
d                1142 sound/firewire/amdtp-stream.c int amdtp_domain_init(struct amdtp_domain *d)
d                1144 sound/firewire/amdtp-stream.c 	INIT_LIST_HEAD(&d->streams);
d                1154 sound/firewire/amdtp-stream.c void amdtp_domain_destroy(struct amdtp_domain *d)
d                1168 sound/firewire/amdtp-stream.c int amdtp_domain_add_stream(struct amdtp_domain *d, struct amdtp_stream *s,
d                1173 sound/firewire/amdtp-stream.c 	list_for_each_entry(tmp, &d->streams, list) {
d                1178 sound/firewire/amdtp-stream.c 	list_add(&s->list, &d->streams);
d                1191 sound/firewire/amdtp-stream.c int amdtp_domain_start(struct amdtp_domain *d)
d                1196 sound/firewire/amdtp-stream.c 	list_for_each_entry(s, &d->streams, list) {
d                1203 sound/firewire/amdtp-stream.c 		list_for_each_entry(s, &d->streams, list)
d                1215 sound/firewire/amdtp-stream.c void amdtp_domain_stop(struct amdtp_domain *d)
d                1219 sound/firewire/amdtp-stream.c 	list_for_each_entry_safe(s, next, &d->streams, list) {
d                 277 sound/firewire/amdtp-stream.h int amdtp_domain_init(struct amdtp_domain *d);
d                 278 sound/firewire/amdtp-stream.h void amdtp_domain_destroy(struct amdtp_domain *d);
d                 280 sound/firewire/amdtp-stream.h int amdtp_domain_add_stream(struct amdtp_domain *d, struct amdtp_stream *s,
d                 283 sound/firewire/amdtp-stream.h int amdtp_domain_start(struct amdtp_domain *d);
d                 284 sound/firewire/amdtp-stream.h void amdtp_domain_stop(struct amdtp_domain *d);
d                  69 sound/firewire/oxfw/oxfw.c 	const char *d, *v, *m;
d                  95 sound/firewire/oxfw/oxfw.c 		d = info->driver_name;
d                  99 sound/firewire/oxfw/oxfw.c 		d = "OXFW";
d                 104 sound/firewire/oxfw/oxfw.c 	strcpy(oxfw->card->driver, d);
d                 232 sound/isa/gus/gus_main.c 	unsigned char d;
d                 240 sound/isa/gus/gus_main.c 	for (idx = 1, d = 0xab; idx < 4; idx++, d++) {
d                 242 sound/isa/gus/gus_main.c 		snd_gf1_poke(gus, local, d);
d                 243 sound/isa/gus/gus_main.c 		snd_gf1_poke(gus, local + 1, d + 1);
d                 244 sound/isa/gus/gus_main.c 		if (snd_gf1_peek(gus, local) != d ||
d                 245 sound/isa/gus/gus_main.c 		    snd_gf1_peek(gus, local + 1) != d + 1 ||
d                 114 sound/isa/gus/gusclassic.c 	unsigned char d;
d                 117 sound/isa/gus/gusclassic.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 0) {
d                 118 sound/isa/gus/gusclassic.c 		snd_printdd("[0x%lx] check 1 failed - 0x%x\n", gus->gf1.port, d);
d                 124 sound/isa/gus/gusclassic.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 1) {
d                 125 sound/isa/gus/gusclassic.c 		snd_printdd("[0x%lx] check 2 failed - 0x%x\n", gus->gf1.port, d);
d                 150 sound/isa/gus/gusextreme.c 	unsigned char d;
d                 181 sound/isa/gus/gusextreme.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 0) {
d                 182 sound/isa/gus/gusextreme.c 		snd_printdd("[0x%lx] check 1 failed - 0x%x\n", gus->gf1.port, d);
d                 188 sound/isa/gus/gusextreme.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 1) {
d                 189 sound/isa/gus/gusextreme.c 		snd_printdd("[0x%lx] check 2 failed - 0x%x\n", gus->gf1.port, d);
d                  72 sound/isa/gus/gusmax.c 	unsigned char d;
d                  75 sound/isa/gus/gusmax.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 0) {
d                  76 sound/isa/gus/gusmax.c 		snd_printdd("[0x%lx] check 1 failed - 0x%x\n", gus->gf1.port, d);
d                  82 sound/isa/gus/gusmax.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 1) {
d                  83 sound/isa/gus/gusmax.c 		snd_printdd("[0x%lx] check 2 failed - 0x%x\n", gus->gf1.port, d);
d                 247 sound/isa/gus/interwave.c 	int d;
d                 250 sound/isa/gus/interwave.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 0) {
d                 251 sound/isa/gus/interwave.c 		snd_printdd("[0x%lx] check 1 failed - 0x%x\n", gus->gf1.port, d);
d                 257 sound/isa/gus/interwave.c 	if (((d = snd_gf1_i_look8(gus, SNDRV_GF1_GB_RESET)) & 0x07) != 1) {
d                 258 sound/isa/gus/interwave.c 		snd_printdd("[0x%lx] check 2 failed - 0x%x\n", gus->gf1.port, d);
d                 318 sound/isa/gus/interwave.c 	unsigned char d;
d                 322 sound/isa/gus/interwave.c 		d = 0x55;
d                 325 sound/isa/gus/interwave.c 		     local += 0x40000, d++) {
d                 326 sound/isa/gus/interwave.c 			snd_gf1_poke(gus, local, d);
d                 327 sound/isa/gus/interwave.c 			snd_gf1_poke(gus, local + 1, d + 1);
d                 331 sound/isa/gus/interwave.c 			       d,
d                 336 sound/isa/gus/interwave.c 			if (snd_gf1_peek(gus, local) != d ||
d                 337 sound/isa/gus/interwave.c 			    snd_gf1_peek(gus, local + 1) != d + 1 ||
d                 160 sound/isa/msnd/msnd_pinnacle_mixer.c #define update_potm(d, s, ar)						\
d                 162 sound/isa/msnd/msnd_pinnacle_mixer.c 		writeb((dev->left_levels[d] >> 8) *			\
d                 165 sound/isa/msnd/msnd_pinnacle_mixer.c 		writeb((dev->right_levels[d] >> 8) *			\
d                 172 sound/isa/msnd/msnd_pinnacle_mixer.c #define update_pot(d, s, ar)						\
d                 174 sound/isa/msnd/msnd_pinnacle_mixer.c 		writeb(dev->left_levels[d] >> 8,			\
d                 176 sound/isa/msnd/msnd_pinnacle_mixer.c 		writeb(dev->right_levels[d] >> 8,			\
d                 183 sound/isa/msnd/msnd_pinnacle_mixer.c static int snd_msndmix_set(struct snd_msnd *dev, int d, int left, int right)
d                 189 sound/isa/msnd/msnd_pinnacle_mixer.c 	if (d >= LEVEL_ENTRIES)
d                 198 sound/isa/msnd/msnd_pinnacle_mixer.c 	dev->left_levels[d] = wLeft;
d                 199 sound/isa/msnd/msnd_pinnacle_mixer.c 	dev->right_levels[d] = wRight;
d                 201 sound/isa/msnd/msnd_pinnacle_mixer.c 	switch (d) {
d                  32 sound/isa/sb/sb16_csp.c #define CSP_HDR_VALUE(a,b,c,d)	((a) | ((b)<<8) | ((c)<<16) | ((d)<<24))
d                  34 sound/isa/sb/sb16_csp.c #define CSP_HDR_VALUE(a,b,c,d)	((d) | ((c)<<8) | ((b)<<16) | ((a)<<24))
d                 700 sound/isa/sscape.c 	unsigned d;
d                 713 sound/isa/sscape.c 	d = inb(ODIE_ADDR_IO(s->io_base)) & 0xf0;
d                 714 sound/isa/sscape.c 	if ((d & 0x80) != 0)
d                 717 sound/isa/sscape.c 	if (d == 0)
d                 719 sound/isa/sscape.c 	else if ((d & 0x60) != 0)
d                 733 sound/isa/sscape.c 	d = inb(ODIE_DATA_IO(s->io_base));
d                 734 sound/isa/sscape.c 	if (s->type != SSCAPE_VIVO && (d & 0x9f) != 0x0e)
d                 743 sound/isa/sscape.c 	d  = sscape_read_unsafe(s->io_base, GA_HMCTL_REG);
d                 744 sound/isa/sscape.c 	sscape_write_unsafe(s->io_base, GA_HMCTL_REG, d | 0xc0);
d                 747 sound/isa/sscape.c 	for (d = 0; d < 500; d++) {
d                 761 sound/isa/sscape.c 	d  = sscape_read_unsafe(s->io_base, GA_HMCTL_REG) & 0x3f;
d                 762 sound/isa/sscape.c 	sscape_write_unsafe(s->io_base, GA_HMCTL_REG, d);
d                 767 sound/isa/sscape.c 	d = sscape_read_unsafe(s->io_base, GA_HMCTL_REG);
d                 768 sound/isa/sscape.c 	sscape_write_unsafe(s->io_base, GA_HMCTL_REG, d | 0xc0);
d                 770 sound/isa/sscape.c 	for (d = 0; d < 500; d++) {
d                1276 sound/isa/wavefront/wavefront_synth.c 		char d[2];
d                1284 sound/isa/wavefront/wavefront_synth.c 		d[0] = val;
d                1291 sound/isa/wavefront/wavefront_synth.c 		d[1] = val;
d                1294 sound/isa/wavefront/wavefront_synth.c 			demunge_int32 ((unsigned char *) d, 2);
d                1309 sound/isa/wavefront/wavefront_synth.c 	wavefront_drum *drum = &header->hdr.d;
d                1422 sound/isa/wavefront/wavefront_synth.c 		if (copy_from_user (&header->hdr.d, header->hdrptr,
d                 178 sound/pci/asihpi/asihpi.c 	hm.u.d.u.buffer.buffer_size = size_in_bytes;
d                 179 sound/pci/asihpi/asihpi.c 	hm.u.d.u.buffer.pci_address = pci_address;
d                 180 sound/pci/asihpi/asihpi.c 	hm.u.d.u.buffer.command = HPI_BUFFER_CMD_INTERNAL_GRANTADAPTER;
d                 201 sound/pci/asihpi/asihpi.c 	hm.u.d.u.buffer.command = HPI_BUFFER_CMD_INTERNAL_REVOKEADAPTER;
d                1412 sound/pci/asihpi/hpi6000.c 	u32 *p_data = (u32 *)phm->u.d.u.data.pb_data;
d                1418 sound/pci/asihpi/hpi6000.c 	while ((data_sent < (phm->u.d.u.data.data_size & ~3L))
d                1485 sound/pci/asihpi/hpi6000.c 	u32 *p_data = (u32 *)phm->u.d.u.data.pb_data;
d                1490 sound/pci/asihpi/hpi6000.c 	while (data_got < (phm->u.d.u.data.data_size & ~3L)) {
d                1746 sound/pci/asihpi/hpi6000.c 			hm.obj_index = phm->u.d.u.stream.stream_index;
d                1747 sound/pci/asihpi/hpi6000.c 			hm.object = phm->u.d.u.stream.object_type;
d                 733 sound/pci/asihpi/hpi6205.c 	u32 command = phm->u.d.u.buffer.command;
d                 744 sound/pci/asihpi/hpi6205.c 		phm->u.d.u.buffer.buffer_size =
d                 745 sound/pci/asihpi/hpi6205.c 			roundup_pow_of_two(phm->u.d.u.buffer.buffer_size);
d                 748 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.data_available =
d                 750 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.buffer_size =
d                 751 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size;
d                 754 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size) {
d                 765 sound/pci/asihpi/hpi6205.c 			[phm->obj_index], phm->u.d.u.buffer.buffer_size,
d                 776 sound/pci/asihpi/hpi6205.c 			&phm->u.d.u.buffer.pci_address);
d                 781 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.auxiliary_data_available =
d                 782 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.pci_address;
d                 800 sound/pci/asihpi/hpi6205.c 		if (phm->u.d.u.buffer.buffer_size & (phm->u.d.u.buffer.
d                 804 sound/pci/asihpi/hpi6205.c 				phm->u.d.u.buffer.buffer_size);
d                 809 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size;
d                 816 sound/pci/asihpi/hpi6205.c 		status->size_in_bytes = phm->u.d.u.buffer.buffer_size;
d                 851 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.hostbuffer_info.p_buffer = p_bbm_data;
d                 852 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.hostbuffer_info.p_status = status;
d                 864 sound/pci/asihpi/hpi6205.c 	u32 command = phm->u.d.u.buffer.command;
d                 910 sound/pci/asihpi/hpi6205.c 	if (space_available < phm->u.d.u.data.data_size) {
d                 917 sound/pci/asihpi/hpi6205.c 	if (phm->u.d.u.data.pb_data
d                 922 sound/pci/asihpi/hpi6205.c 		u8 *p_app_data = (u8 *)phm->u.d.u.data.pb_data;
d                 934 sound/pci/asihpi/hpi6205.c 			min(phm->u.d.u.data.data_size,
d                 943 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.data.data_size - l_first_write);
d                 963 sound/pci/asihpi/hpi6205.c 	status->host_index += phm->u.d.u.data.data_size;
d                 982 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.state = (u16)status->stream_state;
d                 983 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.samples_transferred =
d                 985 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.buffer_size = status->size_in_bytes;
d                 986 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.data_available =
d                 988 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.auxiliary_data_available =
d                1019 sound/pci/asihpi/hpi6205.c 	u32 command = phm->u.d.u.buffer.command;
d                1028 sound/pci/asihpi/hpi6205.c 		phm->u.d.u.buffer.buffer_size =
d                1029 sound/pci/asihpi/hpi6205.c 			roundup_pow_of_two(phm->u.d.u.buffer.buffer_size);
d                1030 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.data_available =
d                1032 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.buffer_size =
d                1033 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size;
d                1036 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size) {
d                1047 sound/pci/asihpi/hpi6205.c 				obj_index], phm->u.d.u.buffer.buffer_size,
d                1058 sound/pci/asihpi/hpi6205.c 			&phm->u.d.u.buffer.pci_address);
d                1061 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.stream_info.auxiliary_data_available =
d                1062 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.pci_address;
d                1076 sound/pci/asihpi/hpi6205.c 		if (phm->u.d.u.buffer.buffer_size & (phm->u.d.u.buffer.
d                1080 sound/pci/asihpi/hpi6205.c 				phm->u.d.u.buffer.buffer_size);
d                1086 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.buffer.buffer_size;
d                1093 sound/pci/asihpi/hpi6205.c 		status->size_in_bytes = phm->u.d.u.buffer.buffer_size;
d                1128 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.hostbuffer_info.p_buffer = p_bbm_data;
d                1129 sound/pci/asihpi/hpi6205.c 		phr->u.d.u.hostbuffer_info.p_status = status;
d                1141 sound/pci/asihpi/hpi6205.c 	u32 command = phm->u.d.u.buffer.command;
d                1185 sound/pci/asihpi/hpi6205.c 	u8 *p_app_data = (u8 *)phm->u.d.u.data.pb_data;
d                1195 sound/pci/asihpi/hpi6205.c 	if (data_available < phm->u.d.u.data.data_size) {
d                1212 sound/pci/asihpi/hpi6205.c 			min(phm->u.d.u.data.data_size,
d                1222 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.data.data_size - l_first_read);
d                1224 sound/pci/asihpi/hpi6205.c 	status->host_index += phm->u.d.u.data.data_size;
d                1242 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.state = (u16)status->stream_state;
d                1243 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.samples_transferred =
d                1245 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.buffer_size = status->size_in_bytes;
d                1246 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.data_available =
d                1248 sound/pci/asihpi/hpi6205.c 	phr->u.d.u.stream_info.auxiliary_data_available =
d                2202 sound/pci/asihpi/hpi6205.c 		err = hpi6205_transfer_data(pao, phm->u.d.u.data.pb_data,
d                2203 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.data.data_size, H620_HIF_SEND_DATA);
d                2208 sound/pci/asihpi/hpi6205.c 		err = hpi6205_transfer_data(pao, phm->u.d.u.data.pb_data,
d                2209 sound/pci/asihpi/hpi6205.c 			phm->u.d.u.data.data_size, H620_HIF_GET_DATA);
d                1009 sound/pci/asihpi/hpi_internal.h 		struct hpi_stream_msg d;
d                1071 sound/pci/asihpi/hpi_internal.h 		struct hpi_stream_res d;
d                1180 sound/pci/asihpi/hpi_internal.h 		struct hpi_stream_msg d;
d                1199 sound/pci/asihpi/hpi_internal.h 		struct hpi_stream_res d;
d                 485 sound/pci/asihpi/hpifunc.c 		*pw_state = hr.u.d.u.stream_info.state;
d                 487 sound/pci/asihpi/hpifunc.c 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
d                 489 sound/pci/asihpi/hpifunc.c 		*pdata_to_play = hr.u.d.u.stream_info.data_available;
d                 491 sound/pci/asihpi/hpifunc.c 		*psamples_played = hr.u.d.u.stream_info.samples_transferred;
d                 494 sound/pci/asihpi/hpifunc.c 			hr.u.d.u.stream_info.auxiliary_data_available;
d                 507 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.pb_data = (u8 *)pb_data;
d                 508 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size = bytes_to_write;
d                 510 sound/pci/asihpi/hpifunc.c 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
d                 597 sound/pci/asihpi/hpifunc.c 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
d                 614 sound/pci/asihpi/hpifunc.c 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
d                 630 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.velocity = velocity;
d                 648 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.pio.punch_in_sample = punch_in_sample;
d                 649 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.pio.punch_out_sample = punch_out_sample;
d                 665 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.format.channels = mode;
d                 683 sound/pci/asihpi/hpifunc.c 				hr.u.d.u.stream_info.data_available /
d                 701 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
d                 702 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size =
d                 705 sound/pci/asihpi/hpifunc.c 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
d                 722 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.time_scale = time_scale;
d                 738 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size = size_in_bytes;
d                 757 sound/pci/asihpi/hpifunc.c 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
d                 759 sound/pci/asihpi/hpifunc.c 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
d                 791 sound/pci/asihpi/hpifunc.c 			&hm.u.d.u.stream.stream_index))
d                 798 sound/pci/asihpi/hpifunc.c 		hm.u.d.u.stream.object_type = c_obj_type;
d                 823 sound/pci/asihpi/hpifunc.c 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
d                 825 sound/pci/asihpi/hpifunc.c 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
d                 899 sound/pci/asihpi/hpifunc.c 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
d                 915 sound/pci/asihpi/hpifunc.c 	hpi_format_to_msg(&hm.u.d.u.data.format, p_format);
d                 931 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size = bytes_to_read;
d                 932 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.pb_data = pb_data;
d                1013 sound/pci/asihpi/hpifunc.c 		*pw_state = hr.u.d.u.stream_info.state;
d                1015 sound/pci/asihpi/hpifunc.c 		*pbuffer_size = hr.u.d.u.stream_info.buffer_size;
d                1017 sound/pci/asihpi/hpifunc.c 		*pdata_recorded = hr.u.d.u.stream_info.data_available;
d                1019 sound/pci/asihpi/hpifunc.c 		*psamples_recorded = hr.u.d.u.stream_info.samples_transferred;
d                1022 sound/pci/asihpi/hpifunc.c 			hr.u.d.u.stream_info.auxiliary_data_available;
d                1035 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.format.attributes = bytes_per_frame;
d                1036 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.format.format = (mode << 8) | (alignment & 0xff);
d                1037 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.format.channels = idle_bit;
d                1053 sound/pci/asihpi/hpifunc.c 			(hr.u.d.u.stream_info.buffer_size -
d                1054 sound/pci/asihpi/hpifunc.c 			hr.u.d.u.stream_info.data_available) /
d                1071 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.pb_data = (u8 *)p_anc_frame_buffer;
d                1072 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size =
d                1075 sound/pci/asihpi/hpifunc.c 	if (hm.u.d.u.data.data_size <= anc_frame_buffer_size_in_bytes)
d                1092 sound/pci/asihpi/hpifunc.c 	hm.u.d.u.data.data_size = size_in_bytes;
d                1111 sound/pci/asihpi/hpifunc.c 			*pp_buffer = hr.u.d.u.hostbuffer_info.p_buffer;
d                1113 sound/pci/asihpi/hpifunc.c 			*pp_status = hr.u.d.u.hostbuffer_info.p_status;
d                1147 sound/pci/asihpi/hpifunc.c 			&hm.u.d.u.stream.stream_index))
d                1155 sound/pci/asihpi/hpifunc.c 		hm.u.d.u.stream.object_type = c_obj_type;
d                1181 sound/pci/asihpi/hpifunc.c 		*poutstream_map = hr.u.d.u.group_info.outstream_group_map;
d                1183 sound/pci/asihpi/hpifunc.c 		*pinstream_map = hr.u.d.u.group_info.instream_group_map;
d                 113 sound/pci/asihpi/hpimsgx.c 	struct hpi_stream_res d;
d                 209 sound/pci/asihpi/hpioctl.c 				ptr = (u16 __user *)hm->m0.u.d.u.data.pb_data;
d                 210 sound/pci/asihpi/hpioctl.c 				size = hm->m0.u.d.u.data.data_size;
d                 241 sound/pci/asihpi/hpioctl.c 				hm->m0.u.d.u.data.pb_data = pa->p_buffer;
d                 195 sound/pci/au88x0/au88x0.h 				  int dir, int fmt, int d,
d                 201 sound/pci/au88x0/au88x0.h static void vortex_wtdma_setmode(vortex_t * vortex, int wtdma, int ie, int fmt, int d,	/*int e, */
d                  47 sound/pci/au88x0/au88x0_a3d.c a3dsrc_SetAtmosTarget(a3dsrc_t * a, short aa, short b, short c, short d,
d                  53 sound/pci/au88x0/au88x0_a3d.c 		(e << 0x10) | d);
d                  62 sound/pci/au88x0/au88x0_a3d.c a3dsrc_SetAtmosCurrent(a3dsrc_t * a, short aa, short b, short c, short d,
d                  68 sound/pci/au88x0/au88x0_a3d.c 		(e << 0x10) | d);
d                  89 sound/pci/au88x0/au88x0_a3d.c 		      short *d, short *e)
d                 560 sound/pci/au88x0/au88x0_core.c 			unsigned int cr, unsigned int b, int sweep, int d,
d                 617 sound/pci/au88x0/au88x0_core.c 		(sl << 0x9) | (sweep << 0x8) | ((esi & 0xf) << 4) | d);
d                1413 sound/pci/au88x0/au88x0_core.c vortex_wtdma_setmode(vortex_t * vortex, int wtdma, int ie, int fmt, int d,
d                1419 sound/pci/au88x0/au88x0_core.c 	dma->dma_unknown = d;
d                2856 sound/pci/au88x0/au88x0_core.c 	short int d, this_148;
d                2858 sound/pci/au88x0/au88x0_core.c 	d = ((bits >> 3) * nch);
d                2859 sound/pci/au88x0/au88x0_core.c 	this_148 = 0xbb80 / d;
d                1784 sound/pci/ctxfi/cthw20k1.c #define CTLBITS(a, b, c, d)	(((a) << 24) | ((b) << 16) | ((c) << 8) | (d))
d                1265 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[0] = num_busses_out(chip);
d                1266 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[1] = num_busses_in(chip);
d                1337 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[0] = num_busses_out(chip);
d                1338 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[1] = num_pipes_out(chip);
d                1722 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[0] = 3;	/* Out, In, Virt */
d                1724 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[0] = 2;	/* Out, In */
d                1726 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[1] = 16;	/* 16 channels */
d                1727 sound/pci/echoaudio/echoaudio.c 	uinfo->dimen.d[2] = 2;	/* 0=level, 1=peak */
d                1498 sound/pci/emu10k1/emufx.c 		int j, k, l, d;
d                1502 sound/pci/emu10k1/emufx.c 			d = playback + SND_EMU10K1_PLAYBACK_CHANNELS + z * 2 + j;
d                1504 sound/pci/emu10k1/emufx.c 			A_OP(icode, &ptr, iMAC0, A_C_00000000, A_C_00000000, A_GPR(d), A_GPR(BASS_GPR + 0 + j));
d                1506 sound/pci/emu10k1/emufx.c 			A_OP(icode, &ptr, iMACMV, A_GPR(k), A_GPR(d), A_GPR(k), A_GPR(BASS_GPR + 2 + j));
d                1518 sound/pci/emu10k1/emufx.c 			A_OP(icode, &ptr, iACC3, A_GPR(d), A_GPR(l+2), A_C_00000000, A_C_00000000);
d                2220 sound/pci/emu10k1/emufx.c 		int j, k, l, d;
d                2224 sound/pci/emu10k1/emufx.c 			d = playback + SND_EMU10K1_PLAYBACK_CHANNELS + z * 2 + j;
d                2226 sound/pci/emu10k1/emufx.c 			OP(icode, &ptr, iMAC0, C_00000000, C_00000000, GPR(d), GPR(BASS_GPR + 0 + j));
d                2228 sound/pci/emu10k1/emufx.c 			OP(icode, &ptr, iMACMV, GPR(k), GPR(d), GPR(k), GPR(BASS_GPR + 2 + j));
d                2240 sound/pci/emu10k1/emufx.c 			OP(icode, &ptr, iACC3, GPR(d), GPR(l+2), C_00000000, C_00000000);
d                 216 sound/pci/ens1370.c #define   ES_1370_CODEC_WRITE(a,d) ((((a)&0xff)<<8)|(((d)&0xff)<<0))
d                 222 sound/pci/ens1370.c #define   ES_1371_CODEC_WRITE(a,d) ((((a)&0x7f)<<16)|(((d)&0xffff)<<0))
d                1460 sound/pci/es1938.c 	unsigned char *s, *d;
d                1465 sound/pci/es1938.c 	for (s = saved_regs, d = chip->saved_regs; *s; s++, d++)
d                1466 sound/pci/es1938.c 		*d = snd_es1938_reg_read(chip, *s);
d                1481 sound/pci/es1938.c 	unsigned char *s, *d;
d                1494 sound/pci/es1938.c 	for (s = saved_regs, d = chip->saved_regs; *s; s++, d++) {
d                1496 sound/pci/es1938.c 			snd_es1938_mixer_write(chip, *s, *d);
d                1498 sound/pci/es1938.c 			snd_es1938_write(chip, *s, *d);
d                2280 sound/pci/hda/hda_codec.c 	const hda_nid_t *d;
d                2284 sound/pci/hda/hda_codec.c 	d = codec->slave_dig_outs;
d                2285 sound/pci/hda/hda_codec.c 	if (!d)
d                2287 sound/pci/hda/hda_codec.c 	for (; *d; d++)
d                2288 sound/pci/hda/hda_codec.c 		snd_hdac_regmap_update(&codec->core, *d,
d                3577 sound/pci/hda/hda_codec.c 		const hda_nid_t *d;
d                3578 sound/pci/hda/hda_codec.c 		for (d = codec->slave_dig_outs; *d; d++)
d                3579 sound/pci/hda/hda_codec.c 			snd_hda_codec_setup_stream(codec, *d, stream_tag, 0,
d                3592 sound/pci/hda/hda_codec.c 		const hda_nid_t *d;
d                3593 sound/pci/hda/hda_codec.c 		for (d = codec->slave_dig_outs; *d; d++)
d                3594 sound/pci/hda/hda_codec.c 			snd_hda_codec_cleanup_stream(codec, *d);
d                 990 sound/pci/oxygen/xonar_wm87x6.c #define WM8776_FIELD_CTL_VOLUME(a, b, c, d, e, f, g, h, tlv_p) { \
d                 991 sound/pci/oxygen/xonar_wm87x6.c 	_WM8776_FIELD_CTL(a " Capture Volume", b, c, d, e, f, g, h), \
d                 233 sound/pci/riptide/riptide.c #define SEND_RMEM(p,b,c,d)         sendcmd(p,PARM|RESP,RMEM|BYTE1(b),LONG0(c),RET(d))	/* memory access for firmware write */
d                 243 sound/pci/riptide/riptide.c #define SEND_GPOS(p,b,c,d)         sendcmd(p,PARM|RESP,GPOS,BYTE3(c)|BYTE2(b),RET(d))	/* get position in dma */
d                 244 sound/pci/riptide/riptide.c #define SEND_SETF(p,b,c,d,e,f,g)   sendcmd(p,PARM,SETF|WORD1(b)|BYTE3(c),d|BYTE1(e)|BYTE2(f)|BYTE3(g),RET(0))	/* set sample format at mixer */
d                 245 sound/pci/riptide/riptide.c #define SEND_GSTS(p,b,c,d)         sendcmd(p,PARM|RESP,GSTS,BYTE3(c)|BYTE2(b),RET(d))
d                 246 sound/pci/riptide/riptide.c #define SEND_NGPOS(p,b,c,d)        sendcmd(p,PARM|RESP,NGPOS,BYTE3(c)|BYTE2(b),RET(d))
d                 250 sound/pci/riptide/riptide.c #define SEND_RSSV(p,b,c,d)         sendcmd(p,PARM|RESP,RSSV,BYTE2(b)|BYTE3(c),RET(d))
d                 251 sound/pci/riptide/riptide.c #define SEND_LSEL(p,b,c,d,e,f,g,h) sendcmd(p,PARM,LSEL|BYTE1(b)|BYTE2(c)|BYTE3(d),BYTE0(e)|BYTE1(f)|BYTE2(g)|BYTE3(h),RET(0))	/* select paths for internal connections */
d                 252 sound/pci/riptide/riptide.c #define SEND_SSRC(p,b,c,d,e)       sendcmd(p,PARM,SSRC|BYTE1(b)|WORD2(c),WORD0(d)|WORD2(e),RET(0))	/* configure source */
d                 256 sound/pci/riptide/riptide.c #define SEND_SDGV(p,b,c,d,e)       sendcmd(p,PARM,SDGV|BYTE2(b)|BYTE3(c),WORD0(d)|WORD2(e),RET(0))	/* set digital mixer */
d                 257 sound/pci/riptide/riptide.c #define SEND_RDGV(p,b,c,d)         sendcmd(p,PARM|RESP,RDGV|BYTE2(b)|BYTE3(c),0,RET(d))	/* read digital mixer */
d                 262 sound/pci/riptide/riptide.c #define SEND_TXAC(p,b,c,d,e,f)     sendcmd(p,PARM,TXAC|BYTE1(b)|WORD2(c),WORD0(d)|BYTE2(e)|BYTE3(f),RET(0))
d                 263 sound/pci/riptide/riptide.c #define SEND_RXAC(p,b,c,d)         sendcmd(p,PARM|RESP,RXAC,BYTE2(b)|BYTE3(c),RET(d))
d                 629 sound/soc/codecs/ak4642.c #define ak4642_of_parse_mcko(d) 0
d                 110 sound/soc/codecs/rl6231.c 	unsigned int d;
d                 115 sound/soc/codecs/rl6231.c 	d = in / max;
d                 117 sound/soc/codecs/rl6231.c 		d++;
d                 119 sound/soc/codecs/rl6231.c 	while (div % d != 0)
d                 120 sound/soc/codecs/rl6231.c 		d++;
d                 123 sound/soc/codecs/rl6231.c 	return d;
d                 187 sound/soc/codecs/tas2552.c 		unsigned int d, q, t;
d                 197 sound/soc/codecs/tas2552.c 		d = t % pll_clkin;
d                 199 sound/soc/codecs/tas2552.c 		q = d / (t + 1);
d                 200 sound/soc/codecs/tas2552.c 		d = q + ((9999 - pll_clkin % 10000) * (d / t - q)) / 10000;
d                 202 sound/soc/codecs/tas2552.c 		if (d && (pll_clkin < 512000 || pll_clkin > 9200000)) {
d                 223 sound/soc/codecs/tas2552.c 			      TAS2552_PLL_D_UPPER(d));
d                 225 sound/soc/codecs/tas2552.c 			      TAS2552_PLL_D_LOWER(d));
d                  36 sound/soc/codecs/tlv320aic32x4-clk.c 	u16 d;
d                  98 sound/soc/codecs/tlv320aic32x4-clk.c 	settings->d = val << 8;
d                 103 sound/soc/codecs/tlv320aic32x4-clk.c 	settings->d |= val;
d                 129 sound/soc/codecs/tlv320aic32x4-clk.c 	ret = regmap_write(pll->regmap, AIC32X4_PLLDMSB, (settings->d >> 8));
d                 132 sound/soc/codecs/tlv320aic32x4-clk.c 	ret = regmap_write(pll->regmap, AIC32X4_PLLDLSB, (settings->d & 0xff));
d                 149 sound/soc/codecs/tlv320aic32x4-clk.c 				((settings->j * 10000) + settings->d);
d                 188 sound/soc/codecs/tlv320aic32x4-clk.c 	settings->d = (u32) multiplier % 10000;
d                1051 sound/soc/codecs/tlv320aic3x.c 	u16 d, pll_d = 1;
d                1161 sound/soc/codecs/tlv320aic3x.c 		d = ((2048 * p * fsref) - j * aic3x->sysclk)
d                1164 sound/soc/codecs/tlv320aic3x.c 		clk = (10000 * j + d) / (10 * p);
d                1169 sound/soc/codecs/tlv320aic3x.c 			pll_j = j; pll_d = d; pll_r = 1; pll_p = p;
d                  30 sound/soc/intel/haswell/sst-haswell-pcm.c #define SST_OLD_POSITION(d, r, o) ((d) +		\
d                 165 sound/soc/intel/skylake/skl-debug.c void skl_debug_init_module(struct skl_debug *d,
d                 169 sound/soc/intel/skylake/skl-debug.c 	debugfs_create_file(w->name, 0444, d->modules, mconfig,
d                 176 sound/soc/intel/skylake/skl-debug.c 	struct skl_debug *d = file->private_data;
d                 177 sound/soc/intel/skylake/skl-debug.c 	struct sst_dsp *sst = d->skl->dsp;
d                 190 sound/soc/intel/skylake/skl-debug.c 	memset(d->fw_read_buff, 0, FW_REG_BUF);
d                 193 sound/soc/intel/skylake/skl-debug.c 		__ioread32_copy(d->fw_read_buff, fw_reg_addr, w0_stat_sz >> 2);
d                 197 sound/soc/intel/skylake/skl-debug.c 		hex_dump_to_buffer(d->fw_read_buff + offset, 16, 16, 4,
d                 220 sound/soc/intel/skylake/skl-debug.c 	struct skl_debug *d;
d                 222 sound/soc/intel/skylake/skl-debug.c 	d = devm_kzalloc(&skl->pci->dev, sizeof(*d), GFP_KERNEL);
d                 223 sound/soc/intel/skylake/skl-debug.c 	if (!d)
d                 227 sound/soc/intel/skylake/skl-debug.c 	d->fs = debugfs_create_dir("dsp", skl->component->debugfs_root);
d                 229 sound/soc/intel/skylake/skl-debug.c 	d->skl = skl;
d                 230 sound/soc/intel/skylake/skl-debug.c 	d->dev = &skl->pci->dev;
d                 233 sound/soc/intel/skylake/skl-debug.c 	d->modules = debugfs_create_dir("modules", d->fs);
d                 235 sound/soc/intel/skylake/skl-debug.c 	debugfs_create_file("fw_soft_regs_rd", 0444, d->fs, d,
d                 238 sound/soc/intel/skylake/skl-debug.c 	return d;
d                 243 sound/soc/intel/skylake/skl-debug.c 	struct skl_debug *d = skl->debugfs;
d                 245 sound/soc/intel/skylake/skl-debug.c 	debugfs_remove_recursive(d->fs);
d                 247 sound/soc/intel/skylake/skl-debug.c 	d = NULL;
d                 193 sound/soc/intel/skylake/skl.h void skl_debug_init_module(struct skl_debug *d,
d                 205 sound/soc/intel/skylake/skl.h static inline void skl_debug_init_module(struct skl_debug *d,
d                 334 sound/soc/sh/fsi.c #define fsi_reg_write(p, r, d)\
d                 335 sound/soc/sh/fsi.c 	__fsi_reg_write((p->base + REG_##r), d)
d                 340 sound/soc/sh/fsi.c #define fsi_reg_mask_set(p, r, m, d)\
d                 341 sound/soc/sh/fsi.c 	__fsi_reg_mask_set((p->base + REG_##r), m, d)
d                 357 sound/soc/sh/fsi.c #define fsi_master_mask_set(p, r, m, d) _fsi_master_mask_set(p, MST_##r, m, d)
d                 358 sound/soc/sh/fsi.c #define fsi_core_mask_set(p, r, m, d)  _fsi_master_mask_set(p, p->core->r, m, d)
d                 240 sound/soc/sh/hac.c 	int d = substream->stream == SNDRV_PCM_STREAM_PLAYBACK ? 0 : 1;
d                 244 sound/soc/sh/hac.c 		HACREG(HACACR) |= d ?  ACR_DMARX16 :  ACR_DMATX16;
d                 245 sound/soc/sh/hac.c 		HACREG(HACACR) &= d ? ~ACR_DMARX20 : ~ACR_DMATX20;
d                 248 sound/soc/sh/hac.c 		HACREG(HACACR) &= d ? ~ACR_DMARX16 : ~ACR_DMATX16;
d                 249 sound/soc/sh/hac.c 		HACREG(HACACR) |= d ?  ACR_DMARX20 :  ACR_DMATX20;
d                1635 sound/soc/soc-dapm.c 	struct snd_soc_dapm_context *d;
d                1729 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list) {
d                1730 sound/soc/soc-dapm.c 		soc_dapm_async_complete(d);
d                1793 sound/soc/soc-dapm.c 	struct snd_soc_dapm_context *d = data;
d                1797 sound/soc/soc-dapm.c 	if (d->bias_level == SND_SOC_BIAS_OFF &&
d                1798 sound/soc/soc-dapm.c 	    d->target_bias_level != SND_SOC_BIAS_OFF) {
d                1799 sound/soc/soc-dapm.c 		if (d->dev)
d                1800 sound/soc/soc-dapm.c 			pm_runtime_get_sync(d->dev);
d                1802 sound/soc/soc-dapm.c 		ret = snd_soc_dapm_set_bias_level(d, SND_SOC_BIAS_STANDBY);
d                1804 sound/soc/soc-dapm.c 			dev_err(d->dev,
d                1809 sound/soc/soc-dapm.c 	if ((d->target_bias_level == SND_SOC_BIAS_ON &&
d                1810 sound/soc/soc-dapm.c 	     d->bias_level != SND_SOC_BIAS_ON) ||
d                1811 sound/soc/soc-dapm.c 	    (d->target_bias_level != SND_SOC_BIAS_ON &&
d                1812 sound/soc/soc-dapm.c 	     d->bias_level == SND_SOC_BIAS_ON)) {
d                1813 sound/soc/soc-dapm.c 		ret = snd_soc_dapm_set_bias_level(d, SND_SOC_BIAS_PREPARE);
d                1815 sound/soc/soc-dapm.c 			dev_err(d->dev,
d                1825 sound/soc/soc-dapm.c 	struct snd_soc_dapm_context *d = data;
d                1829 sound/soc/soc-dapm.c 	if (d->bias_level == SND_SOC_BIAS_PREPARE &&
d                1830 sound/soc/soc-dapm.c 	    (d->target_bias_level == SND_SOC_BIAS_STANDBY ||
d                1831 sound/soc/soc-dapm.c 	     d->target_bias_level == SND_SOC_BIAS_OFF)) {
d                1832 sound/soc/soc-dapm.c 		ret = snd_soc_dapm_set_bias_level(d, SND_SOC_BIAS_STANDBY);
d                1834 sound/soc/soc-dapm.c 			dev_err(d->dev, "ASoC: Failed to apply standby bias: %d\n",
d                1839 sound/soc/soc-dapm.c 	if (d->bias_level == SND_SOC_BIAS_STANDBY &&
d                1840 sound/soc/soc-dapm.c 	    d->target_bias_level == SND_SOC_BIAS_OFF) {
d                1841 sound/soc/soc-dapm.c 		ret = snd_soc_dapm_set_bias_level(d, SND_SOC_BIAS_OFF);
d                1843 sound/soc/soc-dapm.c 			dev_err(d->dev, "ASoC: Failed to turn off bias: %d\n",
d                1846 sound/soc/soc-dapm.c 		if (d->dev)
d                1847 sound/soc/soc-dapm.c 			pm_runtime_put(d->dev);
d                1851 sound/soc/soc-dapm.c 	if (d->bias_level == SND_SOC_BIAS_PREPARE &&
d                1852 sound/soc/soc-dapm.c 	    d->target_bias_level == SND_SOC_BIAS_ON) {
d                1853 sound/soc/soc-dapm.c 		ret = snd_soc_dapm_set_bias_level(d, SND_SOC_BIAS_ON);
d                1855 sound/soc/soc-dapm.c 			dev_err(d->dev, "ASoC: Failed to apply active bias: %d\n",
d                1955 sound/soc/soc-dapm.c 	struct snd_soc_dapm_context *d;
d                1966 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list) {
d                1967 sound/soc/soc-dapm.c 		if (dapm_idle_bias_off(d))
d                1968 sound/soc/soc-dapm.c 			d->target_bias_level = SND_SOC_BIAS_OFF;
d                1970 sound/soc/soc-dapm.c 			d->target_bias_level = SND_SOC_BIAS_STANDBY;
d                1997 sound/soc/soc-dapm.c 			d = w->dapm;
d                2015 sound/soc/soc-dapm.c 				if (d->target_bias_level < SND_SOC_BIAS_STANDBY)
d                2016 sound/soc/soc-dapm.c 					d->target_bias_level = SND_SOC_BIAS_STANDBY;
d                2019 sound/soc/soc-dapm.c 				d->target_bias_level = SND_SOC_BIAS_ON;
d                2030 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list)
d                2031 sound/soc/soc-dapm.c 		if (d->target_bias_level > bias)
d                2032 sound/soc/soc-dapm.c 			bias = d->target_bias_level;
d                2033 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list)
d                2034 sound/soc/soc-dapm.c 		if (!dapm_idle_bias_off(d))
d                2035 sound/soc/soc-dapm.c 			d->target_bias_level = bias;
d                2042 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list) {
d                2043 sound/soc/soc-dapm.c 		if (d != &card->dapm && d->bias_level != d->target_bias_level)
d                2044 sound/soc/soc-dapm.c 			async_schedule_domain(dapm_pre_sequence_async, d,
d                2066 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list) {
d                2067 sound/soc/soc-dapm.c 		if (d != &card->dapm && d->bias_level != d->target_bias_level)
d                2068 sound/soc/soc-dapm.c 			async_schedule_domain(dapm_post_sequence_async, d,
d                2076 sound/soc/soc-dapm.c 	list_for_each_entry(d, &card->dapm_list, list) {
d                2077 sound/soc/soc-dapm.c 		if (!d->component)
d                2080 sound/soc/soc-dapm.c 		ret = snd_soc_component_stream_event(d->component, event);
d                1223 sound/soc/soc-pcm.c 	struct snd_soc_dpcm *dpcm, *d;
d                1226 sound/soc/soc-pcm.c 	for_each_dpcm_be_safe(fe, stream, dpcm, d) {
d                2372 sound/soc/soc-topology.c 			       struct snd_soc_tplg_dai *d)
d                2383 sound/soc/soc-topology.c 	dai_component.dai_name = d->dai_name;
d                2387 sound/soc/soc-topology.c 			d->dai_name);
d                2391 sound/soc/soc-topology.c 	if (le32_to_cpu(d->dai_id) != dai->id) {
d                2393 sound/soc/soc-topology.c 			d->dai_name);
d                2401 sound/soc/soc-topology.c 	if (d->playback) {
d                2403 sound/soc/soc-topology.c 		caps = &d->caps[SND_SOC_TPLG_STREAM_PLAYBACK];
d                2407 sound/soc/soc-topology.c 	if (d->capture) {
d                2409 sound/soc/soc-topology.c 		caps = &d->caps[SND_SOC_TPLG_STREAM_CAPTURE];
d                2413 sound/soc/soc-topology.c 	if (d->flag_mask)
d                2415 sound/soc/soc-topology.c 			      le32_to_cpu(d->flag_mask),
d                2416 sound/soc/soc-topology.c 			      le32_to_cpu(d->flags));
d                 385 sound/soc/sof/ops.h *sof_get_ops(const struct sof_dev_desc *d,
d                 391 sound/soc/sof/ops.h 		if (d == mach_ops[i].desc)
d                 159 sound/soc/stm/stm32_adfsdm.c 	u16 *d = (u16 *)dest, *s = (u16 *)src;
d                 163 sound/soc/stm/stm32_adfsdm.c 		*d++ = *s++;
d                1704 sound/sparc/cs4231.c static void sbus_dma_prepare(struct cs4231_dma_control *dma_cont, int d)
d                 303 sound/synth/emux/emux_oss.c 	data = ev->data.raw8.d;
d                 124 sound/usb/caiaq/device.h #define caiaqdev_to_dev(d)	(d->chip.card->dev)
d                 770 sound/usb/mixer.c 	struct uac_input_terminal_descriptor *d = p1;
d                 772 sound/usb/mixer.c 	term->type = le16_to_cpu(d->wTerminalType);
d                 773 sound/usb/mixer.c 	term->channels = d->bNrChannels;
d                 774 sound/usb/mixer.c 	term->chconfig = le16_to_cpu(d->wChannelConfig);
d                 775 sound/usb/mixer.c 	term->name = d->iTerminal;
d                 783 sound/usb/mixer.c 	struct uac2_input_terminal_descriptor *d = p1;
d                 787 sound/usb/mixer.c 	err = __check_input_term(state, d->bCSourceID, term);
d                 795 sound/usb/mixer.c 	term->type = le16_to_cpu(d->wTerminalType);
d                 796 sound/usb/mixer.c 	term->channels = d->bNrChannels;
d                 797 sound/usb/mixer.c 	term->chconfig = le32_to_cpu(d->bmChannelConfig);
d                 798 sound/usb/mixer.c 	term->name = d->iTerminal;
d                 806 sound/usb/mixer.c 	struct uac3_input_terminal_descriptor *d = p1;
d                 810 sound/usb/mixer.c 	err = __check_input_term(state, d->bCSourceID, term);
d                 818 sound/usb/mixer.c 	term->type = le16_to_cpu(d->wTerminalType);
d                 820 sound/usb/mixer.c 	err = get_cluster_channels_v3(state, le16_to_cpu(d->wClusterDescrID));
d                 828 sound/usb/mixer.c 	term->name = le16_to_cpu(d->wTerminalDescrStr);
d                 836 sound/usb/mixer.c 	struct uac_mixer_unit_descriptor *d = p1;
d                 840 sound/usb/mixer.c 	err = uac_mixer_unit_get_channels(state, d);
d                 847 sound/usb/mixer.c 		term->chconfig = uac_mixer_unit_wChannelConfig(d, protocol);
d                 848 sound/usb/mixer.c 		term->name = uac_mixer_unit_iMixer(d);
d                 857 sound/usb/mixer.c 	struct uac_selector_unit_descriptor *d = p1;
d                 861 sound/usb/mixer.c 	err = __check_input_term(state, d->baSourceID[0], term);
d                 867 sound/usb/mixer.c 		term->name = uac_selector_unit_iSelector(d);
d                 875 sound/usb/mixer.c 	struct uac_processing_unit_descriptor *d = p1;
d                 879 sound/usb/mixer.c 	if (d->bNrInPins) {
d                 881 sound/usb/mixer.c 		err = __check_input_term(state, d->baSourceID[0], term);
d                 893 sound/usb/mixer.c 		term->channels = uac_processing_unit_bNrChannels(d);
d                 894 sound/usb/mixer.c 		term->chconfig = uac_processing_unit_wChannelConfig(d, protocol);
d                 896 sound/usb/mixer.c 	term->name = uac_processing_unit_iProcessing(d, protocol);
d                 913 sound/usb/mixer.c 	struct uac_clock_source_descriptor *d = p1;
d                 917 sound/usb/mixer.c 	term->name = d->iClockSource;
d                 925 sound/usb/mixer.c 	struct uac3_clock_source_descriptor *d = p1;
d                 929 sound/usb/mixer.c 	term->name = le16_to_cpu(d->wClockSourceStr);
d                 965 sound/usb/mixer.c 			struct uac_feature_unit_descriptor *d = p1;
d                 967 sound/usb/mixer.c 			id = d->bSourceID;
d                  28 sound/usb/validate.c 	const struct uac1_ac_header_descriptor *d = p;
d                  30 sound/usb/validate.c 	return d->bLength >= sizeof(*d) &&
d                  31 sound/usb/validate.c 		d->bLength >= sizeof(*d) + d->bInCollection;
d                  38 sound/usb/validate.c 	const struct uac_mixer_unit_descriptor *d = p;
d                  41 sound/usb/validate.c 	if (d->bLength < sizeof(*d) || !d->bNrInPins)
d                  43 sound/usb/validate.c 	len = sizeof(*d) + d->bNrInPins;
d                  65 sound/usb/validate.c 	return d->bLength >= len;
d                  72 sound/usb/validate.c 	const struct uac_processing_unit_descriptor *d = p;
d                  76 sound/usb/validate.c 	if (d->bLength < sizeof(*d))
d                  78 sound/usb/validate.c 	len = sizeof(*d) + d->bNrInPins;
d                  79 sound/usb/validate.c 	if (d->bLength < len)
d                  86 sound/usb/validate.c 		if (d->bLength < len + 1) /* bControlSize */
d                 105 sound/usb/validate.c 	if (d->bLength < len)
d                 113 sound/usb/validate.c 		switch (le16_to_cpu(d->wProcessType)) {
d                 116 sound/usb/validate.c 			if (d->bLength < len + 1) /* bNrModes */
d                 128 sound/usb/validate.c 		switch (le16_to_cpu(d->wProcessType)) {
d                 131 sound/usb/validate.c 			if (d->bLength < len + 1) /* bNrModes */
d                 145 sound/usb/validate.c 		switch (le16_to_cpu(d->wProcessType)) {
d                 147 sound/usb/validate.c 			if (d->bLength < len + 1) /* bNrModes */
d                 160 sound/usb/validate.c 	if (d->bLength < len)
d                 170 sound/usb/validate.c 	const struct uac_selector_unit_descriptor *d = p;
d                 173 sound/usb/validate.c 	if (d->bLength < sizeof(*d))
d                 175 sound/usb/validate.c 	len = sizeof(*d) + d->bNrInPins;
d                 188 sound/usb/validate.c 	return d->bLength >= len;
d                 194 sound/usb/validate.c 	const struct uac_feature_unit_descriptor *d = p;
d                 196 sound/usb/validate.c 	if (d->bLength < sizeof(*d) || !d->bControlSize)
d                 199 sound/usb/validate.c 	return d->bLength >= sizeof(*d) + d->bControlSize + 1;
d                 205 sound/usb/validate.c 	const struct uac2_feature_unit_descriptor *d = p;
d                 207 sound/usb/validate.c 	if (d->bLength < sizeof(*d))
d                 210 sound/usb/validate.c 	return d->bLength >= sizeof(*d) + 4 + 1;
d                 216 sound/usb/validate.c 	const struct uac3_feature_unit_descriptor *d = p;
d                 218 sound/usb/validate.c 	if (d->bLength < sizeof(*d))
d                 221 sound/usb/validate.c 	return d->bLength >= sizeof(*d) + 4 + 2;
d                 227 sound/usb/validate.c 	const struct usb_midi_out_jack_descriptor *d = p;
d                 229 sound/usb/validate.c 	return d->bLength >= sizeof(*d) &&
d                 230 sound/usb/validate.c 		d->bLength >= sizeof(*d) + d->bNrInputPins * 2;
d                 298 sound/xen/xen_snd_front_evtchnl.c 	int d, ret = 0;
d                 308 sound/xen/xen_snd_front_evtchnl.c 	for (d = 0; d < cfg->num_pcm_instances; d++) {
d                 312 sound/xen/xen_snd_front_evtchnl.c 		pcm_instance = &cfg->pcm_instances[d];
d                 392 sound/xen/xen_snd_front_evtchnl.c 	int ret, d;
d                 402 sound/xen/xen_snd_front_evtchnl.c 	for (d = 0; d < cfg->num_pcm_instances; d++) {
d                 406 sound/xen/xen_snd_front_evtchnl.c 		pcm_instance = &cfg->pcm_instances[d];
d                 190 tools/arch/s390/include/uapi/asm/ptrace.h 	double	d;
d                 637 tools/bpf/bpf_dbg.c 	int d;
d                 665 tools/bpf/bpf_dbg.c 		d = pkt_caplen - K;
d                 666 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint32_t))
d                 672 tools/bpf/bpf_dbg.c 		d = pkt_caplen - K;
d                 673 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint16_t))
d                 679 tools/bpf/bpf_dbg.c 		d = pkt_caplen - K;
d                 680 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint8_t))
d                 686 tools/bpf/bpf_dbg.c 		d = pkt_caplen - (r->X + K);
d                 687 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint32_t))
d                 691 tools/bpf/bpf_dbg.c 		d = pkt_caplen - (r->X + K);
d                 692 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint16_t))
d                 698 tools/bpf/bpf_dbg.c 		d = pkt_caplen - (r->X + K);
d                 699 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint8_t))
d                 705 tools/bpf/bpf_dbg.c 		d = pkt_caplen - K;
d                 706 tools/bpf/bpf_dbg.c 		if (d >= sizeof(uint8_t)) {
d                 363 tools/bpf/bpftool/btf.c 	struct btf_dump *d;
d                 366 tools/bpf/bpftool/btf.c 	d = btf_dump__new(btf, NULL, NULL, btf_dump_printf);
d                 367 tools/bpf/bpftool/btf.c 	if (IS_ERR(d))
d                 368 tools/bpf/bpftool/btf.c 		return PTR_ERR(d);
d                 372 tools/bpf/bpftool/btf.c 			err = btf_dump__dump_type(d, root_type_ids[i]);
d                 380 tools/bpf/bpftool/btf.c 			err = btf_dump__dump_type(d, i);
d                 387 tools/bpf/bpftool/btf.c 	btf_dump__free(d);
d                  22 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_do_type(const struct btf_dumper *d, __u32 type_id,
d                  34 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_modifier(const struct btf_dumper *d, __u32 type_id,
d                  39 tools/bpf/bpftool/btf_dumper.c 	actual_type_id = btf__resolve_type(d->btf, type_id);
d                  43 tools/bpf/bpftool/btf_dumper.c 	return btf_dumper_do_type(d, actual_type_id, bit_offset, data);
d                  51 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_array(const struct btf_dumper *d, __u32 type_id,
d                  54 tools/bpf/bpftool/btf_dumper.c 	const struct btf_type *t = btf__type_by_id(d->btf, type_id);
d                  60 tools/bpf/bpftool/btf_dumper.c 	elem_size = btf__resolve_size(d->btf, arr->type);
d                  64 tools/bpf/bpftool/btf_dumper.c 	jsonw_start_array(d->jw);
d                  66 tools/bpf/bpftool/btf_dumper.c 		ret = btf_dumper_do_type(d, arr->type, 0,
d                  72 tools/bpf/bpftool/btf_dumper.c 	jsonw_end_array(d->jw);
d                 264 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_struct(const struct btf_dumper *d, __u32 type_id,
d                 274 tools/bpf/bpftool/btf_dumper.c 	t = btf__type_by_id(d->btf, type_id);
d                 280 tools/bpf/bpftool/btf_dumper.c 	jsonw_start_object(d->jw);
d                 292 tools/bpf/bpftool/btf_dumper.c 		jsonw_name(d->jw, btf__name_by_offset(d->btf, m[i].name_off));
d                 297 tools/bpf/bpftool/btf_dumper.c 					    data_off, d->jw, d->is_plain_text);
d                 299 tools/bpf/bpftool/btf_dumper.c 			ret = btf_dumper_do_type(d, m[i].type,
d                 307 tools/bpf/bpftool/btf_dumper.c 	jsonw_end_object(d->jw);
d                 312 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_var(const struct btf_dumper *d, __u32 type_id,
d                 315 tools/bpf/bpftool/btf_dumper.c 	const struct btf_type *t = btf__type_by_id(d->btf, type_id);
d                 318 tools/bpf/bpftool/btf_dumper.c 	jsonw_start_object(d->jw);
d                 319 tools/bpf/bpftool/btf_dumper.c 	jsonw_name(d->jw, btf__name_by_offset(d->btf, t->name_off));
d                 320 tools/bpf/bpftool/btf_dumper.c 	ret = btf_dumper_do_type(d, t->type, bit_offset, data);
d                 321 tools/bpf/bpftool/btf_dumper.c 	jsonw_end_object(d->jw);
d                 326 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_datasec(const struct btf_dumper *d, __u32 type_id,
d                 333 tools/bpf/bpftool/btf_dumper.c 	t = btf__type_by_id(d->btf, type_id);
d                 340 tools/bpf/bpftool/btf_dumper.c 	jsonw_start_object(d->jw);
d                 341 tools/bpf/bpftool/btf_dumper.c 	jsonw_name(d->jw, btf__name_by_offset(d->btf, t->name_off));
d                 342 tools/bpf/bpftool/btf_dumper.c 	jsonw_start_array(d->jw);
d                 344 tools/bpf/bpftool/btf_dumper.c 		ret = btf_dumper_do_type(d, vsi[i].type, 0, data + vsi[i].offset);
d                 348 tools/bpf/bpftool/btf_dumper.c 	jsonw_end_array(d->jw);
d                 349 tools/bpf/bpftool/btf_dumper.c 	jsonw_end_object(d->jw);
d                 354 tools/bpf/bpftool/btf_dumper.c static int btf_dumper_do_type(const struct btf_dumper *d, __u32 type_id,
d                 357 tools/bpf/bpftool/btf_dumper.c 	const struct btf_type *t = btf__type_by_id(d->btf, type_id);
d                 361 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_int(t, bit_offset, data, d->jw,
d                 362 tools/bpf/bpftool/btf_dumper.c 				     d->is_plain_text);
d                 365 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_struct(d, type_id, data);
d                 367 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_array(d, type_id, data);
d                 369 tools/bpf/bpftool/btf_dumper.c 		btf_dumper_enum(data, d->jw);
d                 372 tools/bpf/bpftool/btf_dumper.c 		btf_dumper_ptr(data, d->jw, d->is_plain_text);
d                 375 tools/bpf/bpftool/btf_dumper.c 		jsonw_printf(d->jw, "(unknown)");
d                 379 tools/bpf/bpftool/btf_dumper.c 		jsonw_printf(d->jw, "(fwd-kind-invalid)");
d                 385 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_modifier(d, type_id, bit_offset, data);
d                 387 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_var(d, type_id, bit_offset, data);
d                 389 tools/bpf/bpftool/btf_dumper.c 		return btf_dumper_datasec(d, type_id, data);
d                 391 tools/bpf/bpftool/btf_dumper.c 		jsonw_printf(d->jw, "(unsupported-kind");
d                 396 tools/bpf/bpftool/btf_dumper.c int btf_dumper_type(const struct btf_dumper *d, __u32 type_id,
d                 399 tools/bpf/bpftool/btf_dumper.c 	return btf_dumper_do_type(d, type_id, 0, data);
d                 211 tools/bpf/bpftool/main.h int btf_dumper_type(const struct btf_dumper *d, __u32 type_id,
d                 149 tools/bpf/bpftool/map.c static int do_dump_btf(const struct btf_dumper *d,
d                 156 tools/bpf/bpftool/map.c 	jsonw_start_object(d->jw);
d                 159 tools/bpf/bpftool/map.c 		jsonw_name(d->jw, "key");
d                 161 tools/bpf/bpftool/map.c 		ret = btf_dumper_type(d, map_info->btf_key_type_id, key);
d                 167 tools/bpf/bpftool/map.c 		jsonw_name(d->jw, "value");
d                 168 tools/bpf/bpftool/map.c 		ret = btf_dumper_type(d, map_info->btf_value_type_id, value);
d                 172 tools/bpf/bpftool/map.c 		jsonw_name(d->jw, "values");
d                 173 tools/bpf/bpftool/map.c 		jsonw_start_array(d->jw);
d                 177 tools/bpf/bpftool/map.c 			jsonw_start_object(d->jw);
d                 178 tools/bpf/bpftool/map.c 			jsonw_int_field(d->jw, "cpu", i);
d                 179 tools/bpf/bpftool/map.c 			jsonw_name(d->jw, "value");
d                 180 tools/bpf/bpftool/map.c 			ret = btf_dumper_type(d, map_info->btf_value_type_id,
d                 182 tools/bpf/bpftool/map.c 			jsonw_end_object(d->jw);
d                 186 tools/bpf/bpftool/map.c 		jsonw_end_array(d->jw);
d                 191 tools/bpf/bpftool/map.c 	jsonw_end_object(d->jw);
d                 218 tools/bpf/bpftool/map.c 			struct btf_dumper d = {
d                 225 tools/bpf/bpftool/map.c 			do_dump_btf(&d, info, key, value);
d                 251 tools/bpf/bpftool/map.c 			struct btf_dumper d = {
d                 258 tools/bpf/bpftool/map.c 			do_dump_btf(&d, info, key, value);
d                 721 tools/bpf/bpftool/map.c 				struct btf_dumper d = {
d                 727 tools/bpf/bpftool/map.c 				do_dump_btf(&d, map_info, key, value);
d                 945 tools/bpf/bpftool/map.c 			struct btf_dumper d = {
d                 951 tools/bpf/bpftool/map.c 			do_dump_btf(&d, info, key, value);
d                   6 tools/build/tests/ex/ex.c int d(void);
d                  16 tools/build/tests/ex/ex.c 	d();
d                  37 tools/include/linux/irqflags.h #define trace_lock_acquire(a, b, c, d, e, f, g)
d                  16 tools/include/linux/kernel.h #define DIV_ROUND_UP(n,d) (((n) + (d) - 1) / (d))
d                  49 tools/include/linux/overflow.h #define check_add_overflow(a, b, d) ({		\
d                  52 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  58 tools/include/linux/overflow.h #define check_sub_overflow(a, b, d) ({		\
d                  61 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  67 tools/include/linux/overflow.h #define check_mul_overflow(a, b, d) ({		\
d                  70 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  80 tools/include/linux/overflow.h #define __unsigned_add_overflow(a, b, d) ({	\
d                  83 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                  89 tools/include/linux/overflow.h #define __unsigned_sub_overflow(a, b, d) ({	\
d                  92 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 101 tools/include/linux/overflow.h #define __unsigned_mul_overflow(a, b, d) ({		\
d                 104 tools/include/linux/overflow.h 	typeof(d) __d = (d);				\
d                 129 tools/include/linux/overflow.h #define __signed_add_overflow(a, b, d) ({	\
d                 132 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 145 tools/include/linux/overflow.h #define __signed_sub_overflow(a, b, d) ({	\
d                 148 tools/include/linux/overflow.h 	typeof(d) __d = (d);			\
d                 172 tools/include/linux/overflow.h #define __signed_mul_overflow(a, b, d) ({				\
d                 175 tools/include/linux/overflow.h 	typeof(d) __d = (d);						\
d                 187 tools/include/linux/overflow.h #define check_add_overflow(a, b, d)					\
d                 189 tools/include/linux/overflow.h 			__signed_add_overflow(a, b, d),			\
d                 190 tools/include/linux/overflow.h 			__unsigned_add_overflow(a, b, d))
d                 192 tools/include/linux/overflow.h #define check_sub_overflow(a, b, d)					\
d                 194 tools/include/linux/overflow.h 			__signed_sub_overflow(a, b, d),			\
d                 195 tools/include/linux/overflow.h 			__unsigned_sub_overflow(a, b, d))
d                 197 tools/include/linux/overflow.h #define check_mul_overflow(a, b, d)					\
d                 199 tools/include/linux/overflow.h 			__signed_mul_overflow(a, b, d),			\
d                 200 tools/include/linux/overflow.h 			__unsigned_mul_overflow(a, b, d))
d                2376 tools/include/nolibc/nolibc.h 	unsigned long d;
d                2385 tools/include/nolibc/nolibc.h 		d = (*s++) - '0';
d                2386 tools/include/nolibc/nolibc.h 		if (d > 9)
d                2389 tools/include/nolibc/nolibc.h 		ret += d;
d                 438 tools/include/uapi/linux/pkt_sched.h 	__u32	d;		/* x-projection of the first segment in us */
d                 930 tools/include/uapi/sound/asound.h 		unsigned short d[4];		/* dimensions */
d                1078 tools/lib/bpf/btf.c static void btf_dedup_free(struct btf_dedup *d);
d                1079 tools/lib/bpf/btf.c static int btf_dedup_strings(struct btf_dedup *d);
d                1080 tools/lib/bpf/btf.c static int btf_dedup_prim_types(struct btf_dedup *d);
d                1081 tools/lib/bpf/btf.c static int btf_dedup_struct_types(struct btf_dedup *d);
d                1082 tools/lib/bpf/btf.c static int btf_dedup_ref_types(struct btf_dedup *d);
d                1083 tools/lib/bpf/btf.c static int btf_dedup_compact_types(struct btf_dedup *d);
d                1084 tools/lib/bpf/btf.c static int btf_dedup_remap_types(struct btf_dedup *d);
d                1226 tools/lib/bpf/btf.c 	struct btf_dedup *d = btf_dedup_new(btf, btf_ext, opts);
d                1229 tools/lib/bpf/btf.c 	if (IS_ERR(d)) {
d                1230 tools/lib/bpf/btf.c 		pr_debug("btf_dedup_new failed: %ld", PTR_ERR(d));
d                1234 tools/lib/bpf/btf.c 	err = btf_dedup_strings(d);
d                1239 tools/lib/bpf/btf.c 	err = btf_dedup_prim_types(d);
d                1244 tools/lib/bpf/btf.c 	err = btf_dedup_struct_types(d);
d                1249 tools/lib/bpf/btf.c 	err = btf_dedup_ref_types(d);
d                1254 tools/lib/bpf/btf.c 	err = btf_dedup_compact_types(d);
d                1259 tools/lib/bpf/btf.c 	err = btf_dedup_remap_types(d);
d                1266 tools/lib/bpf/btf.c 	btf_dedup_free(d);
d                1318 tools/lib/bpf/btf.c #define for_each_dedup_cand(d, node, hash) \
d                1319 tools/lib/bpf/btf.c 	hashmap__for_each_key_entry(d->dedup_table, node, (void *)hash)
d                1321 tools/lib/bpf/btf.c static int btf_dedup_table_add(struct btf_dedup *d, long hash, __u32 type_id)
d                1323 tools/lib/bpf/btf.c 	return hashmap__append(d->dedup_table,
d                1327 tools/lib/bpf/btf.c static int btf_dedup_hypot_map_add(struct btf_dedup *d,
d                1330 tools/lib/bpf/btf.c 	if (d->hypot_cnt == d->hypot_cap) {
d                1333 tools/lib/bpf/btf.c 		d->hypot_cap += max(16, d->hypot_cap / 2);
d                1334 tools/lib/bpf/btf.c 		new_list = realloc(d->hypot_list, sizeof(__u32) * d->hypot_cap);
d                1337 tools/lib/bpf/btf.c 		d->hypot_list = new_list;
d                1339 tools/lib/bpf/btf.c 	d->hypot_list[d->hypot_cnt++] = from_id;
d                1340 tools/lib/bpf/btf.c 	d->hypot_map[from_id] = to_id;
d                1344 tools/lib/bpf/btf.c static void btf_dedup_clear_hypot_map(struct btf_dedup *d)
d                1348 tools/lib/bpf/btf.c 	for (i = 0; i < d->hypot_cnt; i++)
d                1349 tools/lib/bpf/btf.c 		d->hypot_map[d->hypot_list[i]] = BTF_UNPROCESSED_ID;
d                1350 tools/lib/bpf/btf.c 	d->hypot_cnt = 0;
d                1353 tools/lib/bpf/btf.c static void btf_dedup_free(struct btf_dedup *d)
d                1355 tools/lib/bpf/btf.c 	hashmap__free(d->dedup_table);
d                1356 tools/lib/bpf/btf.c 	d->dedup_table = NULL;
d                1358 tools/lib/bpf/btf.c 	free(d->map);
d                1359 tools/lib/bpf/btf.c 	d->map = NULL;
d                1361 tools/lib/bpf/btf.c 	free(d->hypot_map);
d                1362 tools/lib/bpf/btf.c 	d->hypot_map = NULL;
d                1364 tools/lib/bpf/btf.c 	free(d->hypot_list);
d                1365 tools/lib/bpf/btf.c 	d->hypot_list = NULL;
d                1367 tools/lib/bpf/btf.c 	free(d);
d                1388 tools/lib/bpf/btf.c 	struct btf_dedup *d = calloc(1, sizeof(struct btf_dedup));
d                1392 tools/lib/bpf/btf.c 	if (!d)
d                1395 tools/lib/bpf/btf.c 	d->opts.dont_resolve_fwds = opts && opts->dont_resolve_fwds;
d                1400 tools/lib/bpf/btf.c 	d->btf = btf;
d                1401 tools/lib/bpf/btf.c 	d->btf_ext = btf_ext;
d                1403 tools/lib/bpf/btf.c 	d->dedup_table = hashmap__new(hash_fn, btf_dedup_equal_fn, NULL);
d                1404 tools/lib/bpf/btf.c 	if (IS_ERR(d->dedup_table)) {
d                1405 tools/lib/bpf/btf.c 		err = PTR_ERR(d->dedup_table);
d                1406 tools/lib/bpf/btf.c 		d->dedup_table = NULL;
d                1410 tools/lib/bpf/btf.c 	d->map = malloc(sizeof(__u32) * (1 + btf->nr_types));
d                1411 tools/lib/bpf/btf.c 	if (!d->map) {
d                1416 tools/lib/bpf/btf.c 	d->map[0] = 0;
d                1418 tools/lib/bpf/btf.c 		struct btf_type *t = d->btf->types[i];
d                1422 tools/lib/bpf/btf.c 			d->map[i] = i;
d                1424 tools/lib/bpf/btf.c 			d->map[i] = BTF_UNPROCESSED_ID;
d                1427 tools/lib/bpf/btf.c 	d->hypot_map = malloc(sizeof(__u32) * (1 + btf->nr_types));
d                1428 tools/lib/bpf/btf.c 	if (!d->hypot_map) {
d                1433 tools/lib/bpf/btf.c 		d->hypot_map[i] = BTF_UNPROCESSED_ID;
d                1437 tools/lib/bpf/btf.c 		btf_dedup_free(d);
d                1441 tools/lib/bpf/btf.c 	return d;
d                1450 tools/lib/bpf/btf.c static int btf_for_each_str_off(struct btf_dedup *d, str_off_fn_t fn, void *ctx)
d                1456 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                1457 tools/lib/bpf/btf.c 		t = d->btf->types[i];
d                1505 tools/lib/bpf/btf.c 	if (!d->btf_ext)
d                1508 tools/lib/bpf/btf.c 	line_data_cur = d->btf_ext->line_info.info;
d                1509 tools/lib/bpf/btf.c 	line_data_end = d->btf_ext->line_info.info + d->btf_ext->line_info.len;
d                1510 tools/lib/bpf/btf.c 	rec_size = d->btf_ext->line_info.rec_size;
d                1609 tools/lib/bpf/btf.c static int btf_dedup_strings(struct btf_dedup *d)
d                1611 tools/lib/bpf/btf.c 	const struct btf_header *hdr = d->btf->hdr;
d                1612 tools/lib/bpf/btf.c 	char *start = (char *)d->btf->nohdr_data + hdr->str_off;
d                1613 tools/lib/bpf/btf.c 	char *end = start + d->btf->hdr->str_len;
d                1647 tools/lib/bpf/btf.c 	tmp_strs = malloc(d->btf->hdr->str_len);
d                1655 tools/lib/bpf/btf.c 	err = btf_for_each_str_off(d, btf_str_mark_as_used, &strs);
d                1704 tools/lib/bpf/btf.c 	d->btf->hdr->str_len = p - tmp_strs;
d                1705 tools/lib/bpf/btf.c 	memmove(start, tmp_strs, d->btf->hdr->str_len);
d                1706 tools/lib/bpf/btf.c 	end = start + d->btf->hdr->str_len;
d                1712 tools/lib/bpf/btf.c 	err = btf_for_each_str_off(d, btf_str_remap_offset, &strs);
d                1716 tools/lib/bpf/btf.c 	d->btf->hdr->str_len = end - start;
d                1991 tools/lib/bpf/btf.c static int btf_dedup_prim_type(struct btf_dedup *d, __u32 type_id)
d                1993 tools/lib/bpf/btf.c 	struct btf_type *t = d->btf->types[type_id];
d                2018 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2020 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2030 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2032 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2037 tools/lib/bpf/btf.c 			if (d->opts.dont_resolve_fwds)
d                2046 tools/lib/bpf/btf.c 				d->map[cand_id] = type_id;
d                2053 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2055 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2067 tools/lib/bpf/btf.c 	d->map[type_id] = new_id;
d                2068 tools/lib/bpf/btf.c 	if (type_id == new_id && btf_dedup_table_add(d, h, type_id))
d                2074 tools/lib/bpf/btf.c static int btf_dedup_prim_types(struct btf_dedup *d)
d                2078 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                2079 tools/lib/bpf/btf.c 		err = btf_dedup_prim_type(d, i);
d                2089 tools/lib/bpf/btf.c static inline bool is_type_mapped(struct btf_dedup *d, uint32_t type_id)
d                2091 tools/lib/bpf/btf.c 	return d->map[type_id] <= BTF_MAX_NR_TYPES;
d                2099 tools/lib/bpf/btf.c static inline __u32 resolve_type_id(struct btf_dedup *d, __u32 type_id)
d                2101 tools/lib/bpf/btf.c 	while (is_type_mapped(d, type_id) && d->map[type_id] != type_id)
d                2102 tools/lib/bpf/btf.c 		type_id = d->map[type_id];
d                2110 tools/lib/bpf/btf.c static uint32_t resolve_fwd_id(struct btf_dedup *d, uint32_t type_id)
d                2114 tools/lib/bpf/btf.c 	if (!btf_is_fwd(d->btf->types[type_id]))
d                2117 tools/lib/bpf/btf.c 	while (is_type_mapped(d, type_id) && d->map[type_id] != type_id)
d                2118 tools/lib/bpf/btf.c 		type_id = d->map[type_id];
d                2120 tools/lib/bpf/btf.c 	if (!btf_is_fwd(d->btf->types[type_id]))
d                2225 tools/lib/bpf/btf.c static int btf_dedup_is_equiv(struct btf_dedup *d, __u32 cand_id,
d                2236 tools/lib/bpf/btf.c 	if (resolve_type_id(d, cand_id) == resolve_type_id(d, canon_id))
d                2239 tools/lib/bpf/btf.c 	canon_id = resolve_fwd_id(d, canon_id);
d                2241 tools/lib/bpf/btf.c 	hypot_type_id = d->hypot_map[canon_id];
d                2245 tools/lib/bpf/btf.c 	if (btf_dedup_hypot_map_add(d, canon_id, cand_id))
d                2248 tools/lib/bpf/btf.c 	cand_type = d->btf->types[cand_id];
d                2249 tools/lib/bpf/btf.c 	canon_type = d->btf->types[canon_id];
d                2257 tools/lib/bpf/btf.c 	if (!d->opts.dont_resolve_fwds
d                2281 tools/lib/bpf/btf.c 		if (d->opts.dont_resolve_fwds)
d                2297 tools/lib/bpf/btf.c 		return btf_dedup_is_equiv(d, cand_type->type, canon_type->type);
d                2306 tools/lib/bpf/btf.c 		eq = btf_dedup_is_equiv(d,
d                2310 tools/lib/bpf/btf.c 		return btf_dedup_is_equiv(d, cand_arr->type, canon_arr->type);
d                2324 tools/lib/bpf/btf.c 			eq = btf_dedup_is_equiv(d, cand_m->type, canon_m->type);
d                2340 tools/lib/bpf/btf.c 		eq = btf_dedup_is_equiv(d, cand_type->type, canon_type->type);
d                2347 tools/lib/bpf/btf.c 			eq = btf_dedup_is_equiv(d, cand_p->type, canon_p->type);
d                2388 tools/lib/bpf/btf.c static void btf_dedup_merge_hypot_map(struct btf_dedup *d)
d                2395 tools/lib/bpf/btf.c 	for (i = 0; i < d->hypot_cnt; i++) {
d                2396 tools/lib/bpf/btf.c 		cand_type_id = d->hypot_list[i];
d                2397 tools/lib/bpf/btf.c 		targ_type_id = d->hypot_map[cand_type_id];
d                2398 tools/lib/bpf/btf.c 		t_id = resolve_type_id(d, targ_type_id);
d                2399 tools/lib/bpf/btf.c 		c_id = resolve_type_id(d, cand_type_id);
d                2400 tools/lib/bpf/btf.c 		t_kind = btf_kind(d->btf->types[t_id]);
d                2401 tools/lib/bpf/btf.c 		c_kind = btf_kind(d->btf->types[c_id]);
d                2415 tools/lib/bpf/btf.c 			d->map[c_id] = t_id;
d                2417 tools/lib/bpf/btf.c 			d->map[t_id] = c_id;
d                2421 tools/lib/bpf/btf.c 		    is_type_mapped(d, c_id) &&
d                2422 tools/lib/bpf/btf.c 		    !is_type_mapped(d, t_id)) {
d                2429 tools/lib/bpf/btf.c 			d->map[t_id] = c_id;
d                2456 tools/lib/bpf/btf.c static int btf_dedup_struct_type(struct btf_dedup *d, __u32 type_id)
d                2466 tools/lib/bpf/btf.c 	if (d->map[type_id] <= BTF_MAX_NR_TYPES)
d                2469 tools/lib/bpf/btf.c 	t = d->btf->types[type_id];
d                2476 tools/lib/bpf/btf.c 	for_each_dedup_cand(d, hash_entry, h) {
d                2490 tools/lib/bpf/btf.c 		cand_type = d->btf->types[cand_id];
d                2494 tools/lib/bpf/btf.c 		btf_dedup_clear_hypot_map(d);
d                2495 tools/lib/bpf/btf.c 		eq = btf_dedup_is_equiv(d, type_id, cand_id);
d                2501 tools/lib/bpf/btf.c 		btf_dedup_merge_hypot_map(d);
d                2505 tools/lib/bpf/btf.c 	d->map[type_id] = new_id;
d                2506 tools/lib/bpf/btf.c 	if (type_id == new_id && btf_dedup_table_add(d, h, type_id))
d                2512 tools/lib/bpf/btf.c static int btf_dedup_struct_types(struct btf_dedup *d)
d                2516 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                2517 tools/lib/bpf/btf.c 		err = btf_dedup_struct_type(d, i);
d                2548 tools/lib/bpf/btf.c static int btf_dedup_ref_type(struct btf_dedup *d, __u32 type_id)
d                2557 tools/lib/bpf/btf.c 	if (d->map[type_id] == BTF_IN_PROGRESS_ID)
d                2559 tools/lib/bpf/btf.c 	if (d->map[type_id] <= BTF_MAX_NR_TYPES)
d                2560 tools/lib/bpf/btf.c 		return resolve_type_id(d, type_id);
d                2562 tools/lib/bpf/btf.c 	t = d->btf->types[type_id];
d                2563 tools/lib/bpf/btf.c 	d->map[type_id] = BTF_IN_PROGRESS_ID;
d                2572 tools/lib/bpf/btf.c 		ref_type_id = btf_dedup_ref_type(d, t->type);
d                2578 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2580 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2591 tools/lib/bpf/btf.c 		ref_type_id = btf_dedup_ref_type(d, info->type);
d                2596 tools/lib/bpf/btf.c 		ref_type_id = btf_dedup_ref_type(d, info->index_type);
d                2602 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2604 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2618 tools/lib/bpf/btf.c 		ref_type_id = btf_dedup_ref_type(d, t->type);
d                2626 tools/lib/bpf/btf.c 			ref_type_id = btf_dedup_ref_type(d, param->type);
d                2634 tools/lib/bpf/btf.c 		for_each_dedup_cand(d, hash_entry, h) {
d                2636 tools/lib/bpf/btf.c 			cand = d->btf->types[cand_id];
d                2649 tools/lib/bpf/btf.c 	d->map[type_id] = new_id;
d                2650 tools/lib/bpf/btf.c 	if (type_id == new_id && btf_dedup_table_add(d, h, type_id))
d                2656 tools/lib/bpf/btf.c static int btf_dedup_ref_types(struct btf_dedup *d)
d                2660 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                2661 tools/lib/bpf/btf.c 		err = btf_dedup_ref_type(d, i);
d                2666 tools/lib/bpf/btf.c 	hashmap__free(d->dedup_table);
d                2667 tools/lib/bpf/btf.c 	d->dedup_table = NULL;
d                2682 tools/lib/bpf/btf.c static int btf_dedup_compact_types(struct btf_dedup *d)
d                2690 tools/lib/bpf/btf.c 	d->hypot_map[0] = 0;
d                2691 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++)
d                2692 tools/lib/bpf/btf.c 		d->hypot_map[i] = BTF_UNPROCESSED_ID;
d                2694 tools/lib/bpf/btf.c 	types_start = d->btf->nohdr_data + d->btf->hdr->type_off;
d                2697 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                2698 tools/lib/bpf/btf.c 		if (d->map[i] != i)
d                2701 tools/lib/bpf/btf.c 		len = btf_type_size(d->btf->types[i]);
d                2705 tools/lib/bpf/btf.c 		memmove(p, d->btf->types[i], len);
d                2706 tools/lib/bpf/btf.c 		d->hypot_map[i] = next_type_id;
d                2707 tools/lib/bpf/btf.c 		d->btf->types[next_type_id] = (struct btf_type *)p;
d                2713 tools/lib/bpf/btf.c 	d->btf->nr_types = next_type_id - 1;
d                2714 tools/lib/bpf/btf.c 	d->btf->types_size = d->btf->nr_types;
d                2715 tools/lib/bpf/btf.c 	d->btf->hdr->type_len = p - types_start;
d                2716 tools/lib/bpf/btf.c 	new_types = realloc(d->btf->types,
d                2717 tools/lib/bpf/btf.c 			    (1 + d->btf->nr_types) * sizeof(struct btf_type *));
d                2720 tools/lib/bpf/btf.c 	d->btf->types = new_types;
d                2723 tools/lib/bpf/btf.c 	d->btf->hdr->str_off = p - (char *)d->btf->nohdr_data;
d                2724 tools/lib/bpf/btf.c 	memmove(p, d->btf->strings, d->btf->hdr->str_len);
d                2725 tools/lib/bpf/btf.c 	d->btf->strings = p;
d                2726 tools/lib/bpf/btf.c 	p += d->btf->hdr->str_len;
d                2728 tools/lib/bpf/btf.c 	d->btf->data_size = p - (char *)d->btf->data;
d                2738 tools/lib/bpf/btf.c static int btf_dedup_remap_type_id(struct btf_dedup *d, __u32 type_id)
d                2742 tools/lib/bpf/btf.c 	resolved_type_id = resolve_type_id(d, type_id);
d                2743 tools/lib/bpf/btf.c 	new_type_id = d->hypot_map[resolved_type_id];
d                2759 tools/lib/bpf/btf.c static int btf_dedup_remap_type(struct btf_dedup *d, __u32 type_id)
d                2761 tools/lib/bpf/btf.c 	struct btf_type *t = d->btf->types[type_id];
d                2777 tools/lib/bpf/btf.c 		r = btf_dedup_remap_type_id(d, t->type);
d                2786 tools/lib/bpf/btf.c 		r = btf_dedup_remap_type_id(d, arr_info->type);
d                2790 tools/lib/bpf/btf.c 		r = btf_dedup_remap_type_id(d, arr_info->index_type);
d                2803 tools/lib/bpf/btf.c 			r = btf_dedup_remap_type_id(d, member->type);
d                2816 tools/lib/bpf/btf.c 		r = btf_dedup_remap_type_id(d, t->type);
d                2822 tools/lib/bpf/btf.c 			r = btf_dedup_remap_type_id(d, param->type);
d                2836 tools/lib/bpf/btf.c 			r = btf_dedup_remap_type_id(d, var->type);
d                2852 tools/lib/bpf/btf.c static int btf_dedup_remap_types(struct btf_dedup *d)
d                2856 tools/lib/bpf/btf.c 	for (i = 1; i <= d->btf->nr_types; i++) {
d                2857 tools/lib/bpf/btf.c 		r = btf_dedup_remap_type(d, i);
d                 124 tools/lib/bpf/btf.h LIBBPF_API void btf_dump__free(struct btf_dump *d);
d                 126 tools/lib/bpf/btf.h LIBBPF_API int btf_dump__dump_type(struct btf_dump *d, __u32 id);
d                 105 tools/lib/bpf/btf_dump.c static const char *btf_name_of(const struct btf_dump *d, __u32 name_off)
d                 107 tools/lib/bpf/btf_dump.c 	return btf__name_by_offset(d->btf, name_off);
d                 110 tools/lib/bpf/btf_dump.c static void btf_dump_printf(const struct btf_dump *d, const char *fmt, ...)
d                 115 tools/lib/bpf/btf_dump.c 	d->printf_fn(d->opts.ctx, fmt, args);
d                 124 tools/lib/bpf/btf_dump.c 	struct btf_dump *d;
d                 127 tools/lib/bpf/btf_dump.c 	d = calloc(1, sizeof(struct btf_dump));
d                 128 tools/lib/bpf/btf_dump.c 	if (!d)
d                 131 tools/lib/bpf/btf_dump.c 	d->btf = btf;
d                 132 tools/lib/bpf/btf_dump.c 	d->btf_ext = btf_ext;
d                 133 tools/lib/bpf/btf_dump.c 	d->printf_fn = printf_fn;
d                 134 tools/lib/bpf/btf_dump.c 	d->opts.ctx = opts ? opts->ctx : NULL;
d                 136 tools/lib/bpf/btf_dump.c 	d->type_names = hashmap__new(str_hash_fn, str_equal_fn, NULL);
d                 137 tools/lib/bpf/btf_dump.c 	if (IS_ERR(d->type_names)) {
d                 138 tools/lib/bpf/btf_dump.c 		err = PTR_ERR(d->type_names);
d                 139 tools/lib/bpf/btf_dump.c 		d->type_names = NULL;
d                 140 tools/lib/bpf/btf_dump.c 		btf_dump__free(d);
d                 143 tools/lib/bpf/btf_dump.c 	d->ident_names = hashmap__new(str_hash_fn, str_equal_fn, NULL);
d                 144 tools/lib/bpf/btf_dump.c 	if (IS_ERR(d->ident_names)) {
d                 145 tools/lib/bpf/btf_dump.c 		err = PTR_ERR(d->ident_names);
d                 146 tools/lib/bpf/btf_dump.c 		d->ident_names = NULL;
d                 147 tools/lib/bpf/btf_dump.c 		btf_dump__free(d);
d                 151 tools/lib/bpf/btf_dump.c 	return d;
d                 154 tools/lib/bpf/btf_dump.c void btf_dump__free(struct btf_dump *d)
d                 158 tools/lib/bpf/btf_dump.c 	if (!d)
d                 161 tools/lib/bpf/btf_dump.c 	free(d->type_states);
d                 162 tools/lib/bpf/btf_dump.c 	if (d->cached_names) {
d                 164 tools/lib/bpf/btf_dump.c 		for (i = 0, cnt = btf__get_nr_types(d->btf); i <= cnt; i++) {
d                 165 tools/lib/bpf/btf_dump.c 			if (d->cached_names[i])
d                 166 tools/lib/bpf/btf_dump.c 				free((void *)d->cached_names[i]);
d                 169 tools/lib/bpf/btf_dump.c 	free(d->cached_names);
d                 170 tools/lib/bpf/btf_dump.c 	free(d->emit_queue);
d                 171 tools/lib/bpf/btf_dump.c 	free(d->decl_stack);
d                 172 tools/lib/bpf/btf_dump.c 	hashmap__free(d->type_names);
d                 173 tools/lib/bpf/btf_dump.c 	hashmap__free(d->ident_names);
d                 175 tools/lib/bpf/btf_dump.c 	free(d);
d                 178 tools/lib/bpf/btf_dump.c static int btf_dump_mark_referenced(struct btf_dump *d);
d                 179 tools/lib/bpf/btf_dump.c static int btf_dump_order_type(struct btf_dump *d, __u32 id, bool through_ptr);
d                 180 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type(struct btf_dump *d, __u32 id, __u32 cont_id);
d                 198 tools/lib/bpf/btf_dump.c int btf_dump__dump_type(struct btf_dump *d, __u32 id)
d                 202 tools/lib/bpf/btf_dump.c 	if (id > btf__get_nr_types(d->btf))
d                 206 tools/lib/bpf/btf_dump.c 	if (!d->type_states) {
d                 207 tools/lib/bpf/btf_dump.c 		d->type_states = calloc(1 + btf__get_nr_types(d->btf),
d                 208 tools/lib/bpf/btf_dump.c 					sizeof(d->type_states[0]));
d                 209 tools/lib/bpf/btf_dump.c 		if (!d->type_states)
d                 211 tools/lib/bpf/btf_dump.c 		d->cached_names = calloc(1 + btf__get_nr_types(d->btf),
d                 212 tools/lib/bpf/btf_dump.c 					 sizeof(d->cached_names[0]));
d                 213 tools/lib/bpf/btf_dump.c 		if (!d->cached_names)
d                 217 tools/lib/bpf/btf_dump.c 		d->type_states[0].order_state = ORDERED;
d                 218 tools/lib/bpf/btf_dump.c 		d->type_states[0].emit_state = EMITTED;
d                 221 tools/lib/bpf/btf_dump.c 		err = btf_dump_mark_referenced(d);
d                 226 tools/lib/bpf/btf_dump.c 	d->emit_queue_cnt = 0;
d                 227 tools/lib/bpf/btf_dump.c 	err = btf_dump_order_type(d, id, false);
d                 231 tools/lib/bpf/btf_dump.c 	for (i = 0; i < d->emit_queue_cnt; i++)
d                 232 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type(d, d->emit_queue[i], 0 /*top-level*/);
d                 249 tools/lib/bpf/btf_dump.c static int btf_dump_mark_referenced(struct btf_dump *d)
d                 251 tools/lib/bpf/btf_dump.c 	int i, j, n = btf__get_nr_types(d->btf);
d                 256 tools/lib/bpf/btf_dump.c 		t = btf__type_by_id(d->btf, i);
d                 272 tools/lib/bpf/btf_dump.c 			d->type_states[t->type].referenced = 1;
d                 278 tools/lib/bpf/btf_dump.c 			d->type_states[a->index_type].referenced = 1;
d                 279 tools/lib/bpf/btf_dump.c 			d->type_states[a->type].referenced = 1;
d                 287 tools/lib/bpf/btf_dump.c 				d->type_states[m->type].referenced = 1;
d                 294 tools/lib/bpf/btf_dump.c 				d->type_states[p->type].referenced = 1;
d                 301 tools/lib/bpf/btf_dump.c 				d->type_states[v->type].referenced = 1;
d                 310 tools/lib/bpf/btf_dump.c static int btf_dump_add_emit_queue_id(struct btf_dump *d, __u32 id)
d                 315 tools/lib/bpf/btf_dump.c 	if (d->emit_queue_cnt >= d->emit_queue_cap) {
d                 316 tools/lib/bpf/btf_dump.c 		new_cap = max(16, d->emit_queue_cap * 3 / 2);
d                 317 tools/lib/bpf/btf_dump.c 		new_queue = realloc(d->emit_queue,
d                 321 tools/lib/bpf/btf_dump.c 		d->emit_queue = new_queue;
d                 322 tools/lib/bpf/btf_dump.c 		d->emit_queue_cap = new_cap;
d                 325 tools/lib/bpf/btf_dump.c 	d->emit_queue[d->emit_queue_cnt++] = id;
d                 403 tools/lib/bpf/btf_dump.c static int btf_dump_order_type(struct btf_dump *d, __u32 id, bool through_ptr)
d                 416 tools/lib/bpf/btf_dump.c 	struct btf_dump_type_aux_state *tstate = &d->type_states[id];
d                 425 tools/lib/bpf/btf_dump.c 	t = btf__type_by_id(d->btf, id);
d                 441 tools/lib/bpf/btf_dump.c 		err = btf_dump_order_type(d, t->type, true);
d                 446 tools/lib/bpf/btf_dump.c 		return btf_dump_order_type(d, btf_array(t)->type, through_ptr);
d                 463 tools/lib/bpf/btf_dump.c 			err = btf_dump_order_type(d, m->type, false);
d                 469 tools/lib/bpf/btf_dump.c 			err = btf_dump_add_emit_queue_id(d, id);
d                 485 tools/lib/bpf/btf_dump.c 			err = btf_dump_add_emit_queue_id(d, id);
d                 495 tools/lib/bpf/btf_dump.c 		is_strong = btf_dump_order_type(d, t->type, through_ptr);
d                 504 tools/lib/bpf/btf_dump.c 		err = btf_dump_add_emit_queue_id(d, id);
d                 508 tools/lib/bpf/btf_dump.c 		d->type_states[id].order_state = ORDERED;
d                 514 tools/lib/bpf/btf_dump.c 		return btf_dump_order_type(d, t->type, through_ptr);
d                 520 tools/lib/bpf/btf_dump.c 		err = btf_dump_order_type(d, t->type, through_ptr);
d                 527 tools/lib/bpf/btf_dump.c 			err = btf_dump_order_type(d, p->type, through_ptr);
d                 538 tools/lib/bpf/btf_dump.c 		d->type_states[id].order_state = ORDERED;
d                 546 tools/lib/bpf/btf_dump.c static void btf_dump_emit_struct_fwd(struct btf_dump *d, __u32 id,
d                 548 tools/lib/bpf/btf_dump.c static void btf_dump_emit_struct_def(struct btf_dump *d, __u32 id,
d                 551 tools/lib/bpf/btf_dump.c static void btf_dump_emit_enum_fwd(struct btf_dump *d, __u32 id,
d                 553 tools/lib/bpf/btf_dump.c static void btf_dump_emit_enum_def(struct btf_dump *d, __u32 id,
d                 556 tools/lib/bpf/btf_dump.c static void btf_dump_emit_fwd_def(struct btf_dump *d, __u32 id,
d                 559 tools/lib/bpf/btf_dump.c static void btf_dump_emit_typedef_def(struct btf_dump *d, __u32 id,
d                 568 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type_decl(struct btf_dump *d, __u32 id,
d                 570 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type_chain(struct btf_dump *d,
d                 574 tools/lib/bpf/btf_dump.c static const char *btf_dump_type_name(struct btf_dump *d, __u32 id);
d                 575 tools/lib/bpf/btf_dump.c static const char *btf_dump_ident_name(struct btf_dump *d, __u32 id);
d                 576 tools/lib/bpf/btf_dump.c static size_t btf_dump_name_dups(struct btf_dump *d, struct hashmap *name_map,
d                 579 tools/lib/bpf/btf_dump.c static bool btf_dump_is_blacklisted(struct btf_dump *d, __u32 id)
d                 581 tools/lib/bpf/btf_dump.c 	const struct btf_type *t = btf__type_by_id(d->btf, id);
d                 591 tools/lib/bpf/btf_dump.c 	return strcmp(btf_name_of(d, t->name_off), "__builtin_va_list") == 0;
d                 612 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type(struct btf_dump *d, __u32 id, __u32 cont_id)
d                 614 tools/lib/bpf/btf_dump.c 	struct btf_dump_type_aux_state *tstate = &d->type_states[id];
d                 622 tools/lib/bpf/btf_dump.c 	t = btf__type_by_id(d->btf, id);
d                 643 tools/lib/bpf/btf_dump.c 			btf_dump_emit_struct_fwd(d, id, t);
d                 644 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ";\n\n");
d                 653 tools/lib/bpf/btf_dump.c 			if (!btf_dump_is_blacklisted(d, id)) {
d                 654 tools/lib/bpf/btf_dump.c 				btf_dump_emit_typedef_def(d, id, t, 0);
d                 655 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, ";\n\n");
d                 672 tools/lib/bpf/btf_dump.c 			btf_dump_emit_enum_def(d, id, t, 0);
d                 673 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ";\n\n");
d                 681 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type(d, t->type, cont_id);
d                 684 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type(d, btf_array(t)->type, cont_id);
d                 687 tools/lib/bpf/btf_dump.c 		btf_dump_emit_fwd_def(d, id, t);
d                 688 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, ";\n\n");
d                 693 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type(d, t->type, id);
d                 701 tools/lib/bpf/btf_dump.c 		if (!tstate->fwd_emitted && !btf_dump_is_blacklisted(d, id)) {
d                 702 tools/lib/bpf/btf_dump.c 			btf_dump_emit_typedef_def(d, id, t, 0);
d                 703 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ";\n\n");
d                 724 tools/lib/bpf/btf_dump.c 				btf_dump_emit_type(d, m->type, new_cont_id);
d                 726 tools/lib/bpf/btf_dump.c 			btf_dump_emit_struct_fwd(d, id, t);
d                 727 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ";\n\n");
d                 732 tools/lib/bpf/btf_dump.c 			btf_dump_emit_struct_def(d, id, t, 0);
d                 733 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ";\n\n");
d                 744 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type(d, t->type, cont_id);
d                 746 tools/lib/bpf/btf_dump.c 			btf_dump_emit_type(d, p->type, cont_id);
d                 824 tools/lib/bpf/btf_dump.c static void btf_dump_emit_bit_padding(const struct btf_dump *d,
d                 855 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "\n%s%s: %d;", pfx(lvl), pad_type, pad_bits);
d                 860 tools/lib/bpf/btf_dump.c static void btf_dump_emit_struct_fwd(struct btf_dump *d, __u32 id,
d                 863 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "%s %s",
d                 865 tools/lib/bpf/btf_dump.c 			btf_dump_type_name(d, id));
d                 868 tools/lib/bpf/btf_dump.c static void btf_dump_emit_struct_def(struct btf_dump *d,
d                 878 tools/lib/bpf/btf_dump.c 	packed = is_struct ? btf_is_struct_packed(d->btf, id, t) : 0;
d                 880 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "%s%s%s {",
d                 883 tools/lib/bpf/btf_dump.c 			btf_dump_type_name(d, id));
d                 889 tools/lib/bpf/btf_dump.c 		fname = btf_name_of(d, m->name_off);
d                 892 tools/lib/bpf/btf_dump.c 		align = packed ? 1 : btf_align_of(d->btf, m->type);
d                 894 tools/lib/bpf/btf_dump.c 		btf_dump_emit_bit_padding(d, off, m_off, m_sz, align, lvl + 1);
d                 895 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "\n%s", pfx(lvl + 1));
d                 896 tools/lib/bpf/btf_dump.c 		btf_dump_emit_type_decl(d, m->type, fname, lvl + 1);
d                 899 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ": %d", m_sz);
d                 902 tools/lib/bpf/btf_dump.c 			m_sz = max(0, btf__resolve_size(d->btf, m->type));
d                 905 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, ";");
d                 910 tools/lib/bpf/btf_dump.c 		align = packed ? 1 : btf_align_of(d->btf, id);
d                 911 tools/lib/bpf/btf_dump.c 		btf_dump_emit_bit_padding(d, off, t->size * 8, 0, align,
d                 916 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "\n");
d                 917 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "%s}", pfx(lvl));
d                 919 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, " __attribute__((packed))");
d                 922 tools/lib/bpf/btf_dump.c static void btf_dump_emit_enum_fwd(struct btf_dump *d, __u32 id,
d                 925 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "enum %s", btf_dump_type_name(d, id));
d                 928 tools/lib/bpf/btf_dump.c static void btf_dump_emit_enum_def(struct btf_dump *d, __u32 id,
d                 938 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "enum%s%s",
d                 940 tools/lib/bpf/btf_dump.c 			btf_dump_type_name(d, id));
d                 943 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, " {");
d                 945 tools/lib/bpf/btf_dump.c 			name = btf_name_of(d, v->name_off);
d                 947 tools/lib/bpf/btf_dump.c 			dup_cnt = btf_dump_name_dups(d, d->ident_names, name);
d                 949 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, "\n%s%s___%zu = %d,",
d                 953 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, "\n%s%s = %d,",
d                 958 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "\n%s}", pfx(lvl));
d                 962 tools/lib/bpf/btf_dump.c static void btf_dump_emit_fwd_def(struct btf_dump *d, __u32 id,
d                 965 tools/lib/bpf/btf_dump.c 	const char *name = btf_dump_type_name(d, id);
d                 968 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "union %s", name);
d                 970 tools/lib/bpf/btf_dump.c 		btf_dump_printf(d, "struct %s", name);
d                 973 tools/lib/bpf/btf_dump.c static void btf_dump_emit_typedef_def(struct btf_dump *d, __u32 id,
d                 976 tools/lib/bpf/btf_dump.c 	const char *name = btf_dump_ident_name(d, id);
d                 978 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "typedef ");
d                 979 tools/lib/bpf/btf_dump.c 	btf_dump_emit_type_decl(d, t->type, name, lvl);
d                 982 tools/lib/bpf/btf_dump.c static int btf_dump_push_decl_stack_id(struct btf_dump *d, __u32 id)
d                 987 tools/lib/bpf/btf_dump.c 	if (d->decl_stack_cnt >= d->decl_stack_cap) {
d                 988 tools/lib/bpf/btf_dump.c 		new_cap = max(16, d->decl_stack_cap * 3 / 2);
d                 989 tools/lib/bpf/btf_dump.c 		new_stack = realloc(d->decl_stack,
d                 993 tools/lib/bpf/btf_dump.c 		d->decl_stack = new_stack;
d                 994 tools/lib/bpf/btf_dump.c 		d->decl_stack_cap = new_cap;
d                 997 tools/lib/bpf/btf_dump.c 	d->decl_stack[d->decl_stack_cnt++] = id;
d                1043 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type_decl(struct btf_dump *d, __u32 id,
d                1050 tools/lib/bpf/btf_dump.c 	stack_start = d->decl_stack_cnt;
d                1052 tools/lib/bpf/btf_dump.c 		err = btf_dump_push_decl_stack_id(d, id);
d                1060 tools/lib/bpf/btf_dump.c 			d->decl_stack_cnt = stack_start;
d                1068 tools/lib/bpf/btf_dump.c 		t = btf__type_by_id(d->btf, id);
d                1104 tools/lib/bpf/btf_dump.c 	decl_stack.ids = d->decl_stack + stack_start;
d                1105 tools/lib/bpf/btf_dump.c 	decl_stack.cnt = d->decl_stack_cnt - stack_start;
d                1106 tools/lib/bpf/btf_dump.c 	btf_dump_emit_type_chain(d, &decl_stack, fname, lvl);
d                1115 tools/lib/bpf/btf_dump.c 	d->decl_stack_cnt = stack_start;
d                1118 tools/lib/bpf/btf_dump.c static void btf_dump_emit_mods(struct btf_dump *d, struct id_stack *decl_stack)
d                1125 tools/lib/bpf/btf_dump.c 		t = btf__type_by_id(d->btf, id);
d                1129 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "volatile ");
d                1132 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "const ");
d                1135 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "restrict ");
d                1144 tools/lib/bpf/btf_dump.c static void btf_dump_emit_name(const struct btf_dump *d,
d                1149 tools/lib/bpf/btf_dump.c 	btf_dump_printf(d, "%s%s", separate ? " " : "", name);
d                1152 tools/lib/bpf/btf_dump.c static void btf_dump_emit_type_chain(struct btf_dump *d,
d                1175 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1176 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "void");
d                1181 tools/lib/bpf/btf_dump.c 		t = btf__type_by_id(d->btf, id);
d                1186 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1187 tools/lib/bpf/btf_dump.c 			name = btf_name_of(d, t->name_off);
d                1188 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "%s", name);
d                1192 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1195 tools/lib/bpf/btf_dump.c 				btf_dump_emit_struct_def(d, id, t, lvl);
d                1197 tools/lib/bpf/btf_dump.c 				btf_dump_emit_struct_fwd(d, id, t);
d                1200 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1203 tools/lib/bpf/btf_dump.c 				btf_dump_emit_enum_def(d, id, t, lvl);
d                1205 tools/lib/bpf/btf_dump.c 				btf_dump_emit_enum_fwd(d, id, t);
d                1208 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1209 tools/lib/bpf/btf_dump.c 			btf_dump_emit_fwd_def(d, id, t);
d                1212 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1213 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "%s", btf_dump_ident_name(d, id));
d                1216 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "%s", last_was_ptr ? "*" : " *");
d                1219 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, " volatile");
d                1222 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, " const");
d                1225 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, " restrict");
d                1244 tools/lib/bpf/btf_dump.c 				next_t = btf__type_by_id(d->btf, next_id);
d                1252 tools/lib/bpf/btf_dump.c 				btf_dump_emit_name(d, fname, last_was_ptr);
d                1253 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, "[%u]", a->nelems);
d                1258 tools/lib/bpf/btf_dump.c 			next_t = btf__type_by_id(d->btf, next_id);
d                1262 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, " ");
d                1265 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, "(");
d                1266 tools/lib/bpf/btf_dump.c 			btf_dump_emit_type_chain(d, decls, fname, lvl);
d                1268 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, ")");
d                1269 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "[%u]", a->nelems);
d                1277 tools/lib/bpf/btf_dump.c 			btf_dump_emit_mods(d, decls);
d                1279 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, " (");
d                1280 tools/lib/bpf/btf_dump.c 				btf_dump_emit_type_chain(d, decls, fname, lvl);
d                1281 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, ")");
d                1283 tools/lib/bpf/btf_dump.c 				btf_dump_emit_name(d, fname, last_was_ptr);
d                1285 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, "(");
d                1293 tools/lib/bpf/btf_dump.c 				btf_dump_printf(d, ")");
d                1299 tools/lib/bpf/btf_dump.c 					btf_dump_printf(d, ", ");
d                1303 tools/lib/bpf/btf_dump.c 					btf_dump_printf(d, "...");
d                1307 tools/lib/bpf/btf_dump.c 				name = btf_name_of(d, p->name_off);
d                1308 tools/lib/bpf/btf_dump.c 				btf_dump_emit_type_decl(d, p->type, name, lvl);
d                1311 tools/lib/bpf/btf_dump.c 			btf_dump_printf(d, ")");
d                1323 tools/lib/bpf/btf_dump.c 	btf_dump_emit_name(d, fname, last_was_ptr);
d                1327 tools/lib/bpf/btf_dump.c static size_t btf_dump_name_dups(struct btf_dump *d, struct hashmap *name_map,
d                1339 tools/lib/bpf/btf_dump.c static const char *btf_dump_resolve_name(struct btf_dump *d, __u32 id,
d                1342 tools/lib/bpf/btf_dump.c 	struct btf_dump_type_aux_state *s = &d->type_states[id];
d                1343 tools/lib/bpf/btf_dump.c 	const struct btf_type *t = btf__type_by_id(d->btf, id);
d                1344 tools/lib/bpf/btf_dump.c 	const char *orig_name = btf_name_of(d, t->name_off);
d                1345 tools/lib/bpf/btf_dump.c 	const char **cached_name = &d->cached_names[id];
d                1354 tools/lib/bpf/btf_dump.c 	dup_cnt = btf_dump_name_dups(d, name_map, orig_name);
d                1367 tools/lib/bpf/btf_dump.c static const char *btf_dump_type_name(struct btf_dump *d, __u32 id)
d                1369 tools/lib/bpf/btf_dump.c 	return btf_dump_resolve_name(d, id, d->type_names);
d                1372 tools/lib/bpf/btf_dump.c static const char *btf_dump_ident_name(struct btf_dump *d, __u32 id)
d                1374 tools/lib/bpf/btf_dump.c 	return btf_dump_resolve_name(d, id, d->ident_names);
d                   7 tools/lib/lockdep/tests/ABBCCDDA.c 	pthread_mutex_t a, b, c, d;
d                  12 tools/lib/lockdep/tests/ABBCCDDA.c 	pthread_mutex_init(&d, NULL);
d                  16 tools/lib/lockdep/tests/ABBCCDDA.c 	LOCK_UNLOCK_2(c, d);
d                  17 tools/lib/lockdep/tests/ABBCCDDA.c 	LOCK_UNLOCK_2(d, a);
d                  19 tools/lib/lockdep/tests/ABBCCDDA.c 	pthread_mutex_destroy(&d);
d                   7 tools/lib/lockdep/tests/ABCDBCDA.c 	pthread_mutex_t a, b, c, d;
d                  12 tools/lib/lockdep/tests/ABCDBCDA.c 	pthread_mutex_init(&d, NULL);
d                  15 tools/lib/lockdep/tests/ABCDBCDA.c 	LOCK_UNLOCK_2(c, d);
d                  17 tools/lib/lockdep/tests/ABCDBCDA.c 	LOCK_UNLOCK_2(d, a);
d                  19 tools/lib/lockdep/tests/ABCDBCDA.c 	pthread_mutex_destroy(&d);
d                   7 tools/lib/lockdep/tests/ABCDBDDA.c 	pthread_mutex_t a, b, c, d;
d                  12 tools/lib/lockdep/tests/ABCDBDDA.c 	pthread_mutex_init(&d, NULL);
d                  15 tools/lib/lockdep/tests/ABCDBDDA.c 	LOCK_UNLOCK_2(c, d);
d                  16 tools/lib/lockdep/tests/ABCDBDDA.c 	LOCK_UNLOCK_2(b, d);
d                  17 tools/lib/lockdep/tests/ABCDBDDA.c 	LOCK_UNLOCK_2(d, a);
d                  19 tools/lib/lockdep/tests/ABCDBDDA.c 	pthread_mutex_destroy(&d);
d                 140 tools/lib/subcmd/parse-options.h #define OPT_STRING_OPTARG(s, l, v, a, h, d) \
d                 143 tools/lib/subcmd/parse-options.h 	  .flags = PARSE_OPT_OPTARG, .defval = (intptr_t)(d) }
d                 144 tools/lib/subcmd/parse-options.h #define OPT_STRING_OPTARG_SET(s, l, v, os, a, h, d) \
d                 147 tools/lib/subcmd/parse-options.h 	  .flags = PARSE_OPT_OPTARG, .defval = (intptr_t)(d), \
d                 156 tools/lib/subcmd/parse-options.h #define OPT_CALLBACK_DEFAULT(s, l, v, a, h, f, d) \
d                 157 tools/lib/subcmd/parse-options.h 	{ .type = OPTION_CALLBACK, .short_name = (s), .long_name = (l), .value = (v), .argh = (a), .help = (h), .callback = (f), .defval = (intptr_t)d, .flags = PARSE_OPT_LASTARG_DEFAULT }
d                 158 tools/lib/subcmd/parse-options.h #define OPT_CALLBACK_DEFAULT_NOOPT(s, l, v, a, h, f, d) \
d                 160 tools/lib/subcmd/parse-options.h 	.value = (v), .arg = (a), .help = (h), .callback = (f), .defval = (intptr_t)d,\
d                 162 tools/lib/subcmd/parse-options.h #define OPT_CALLBACK_OPTARG(s, l, v, d, a, h, f) \
d                 165 tools/lib/subcmd/parse-options.h 	  .flags = PARSE_OPT_OPTARG, .data = (d) }
d                 193 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c static void decoding_failed(struct test_data *d)
d                 197 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	dump_packet(&d->packet, d->bytes, d->len);
d                 200 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c static int fail(struct test_data *d, struct intel_pt_pkt *packet, int len,
d                 203 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	decoding_failed(d);
d                 205 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (len != d->len)
d                 207 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			 d->len, len);
d                 209 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (packet->type != d->packet.type)
d                 211 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			 d->packet.type, packet->type);
d                 213 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (packet->count != d->packet.count)
d                 215 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			 d->packet.count, packet->count);
d                 217 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (packet->payload != d->packet.payload)
d                 219 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			 (unsigned long long)d->packet.payload,
d                 222 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (new_ctx != d->new_ctx)
d                 224 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			 d->new_ctx, new_ctx);
d                 229 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c static int test_ctx_unchanged(struct test_data *d, struct intel_pt_pkt *packet,
d                 237 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		decoding_failed(d);
d                 245 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c static int test_one(struct test_data *d)
d                 248 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	enum intel_pt_pkt_ctx ctx = d->ctx;
d                 254 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	ret = intel_pt_get_packet(d->bytes, d->len, &packet, &ctx);
d                 256 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		decoding_failed(d);
d                 262 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (d->ctx_unchanged) {
d                 265 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		err = test_ctx_unchanged(d, &packet, INTEL_PT_NO_CTX);
d                 268 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		err = test_ctx_unchanged(d, &packet, INTEL_PT_BLK_4_CTX);
d                 271 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		err = test_ctx_unchanged(d, &packet, INTEL_PT_BLK_8_CTX);
d                 277 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (ret != d->len || packet.type != d->packet.type ||
d                 278 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	    packet.count != d->packet.count ||
d                 279 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	    packet.payload != d->packet.payload || ctx != d->new_ctx)
d                 280 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		return fail(d, &packet, ret, ctx);
d                 283 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	ret = dump_packet(&d->packet, d->bytes, d->len);
d                 294 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	struct test_data *d = data;
d                 297 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	for (d = data; d->len; d++) {
d                 298 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		ret = test_one(d);
d                  15 tools/perf/arch/x86/util/header.c       unsigned int *d)
d                  22 tools/perf/arch/x86/util/header.c 			"=d" (*d)
d                  29 tools/perf/arch/x86/util/header.c 	unsigned int a, b, c, d, lvl;
d                  34 tools/perf/arch/x86/util/header.c 	cpuid(0, &lvl, &b, &c, &d);
d                  36 tools/perf/arch/x86/util/header.c 	strncpy(&vendor[4], (char *)(&d), 4);
d                  41 tools/perf/arch/x86/util/header.c 		cpuid(1, &a, &b, &c, &d);
d                 307 tools/perf/arch/x86/util/intel-pt.c static void intel_pt_tsc_ctc_ratio(u32 *n, u32 *d)
d                 313 tools/perf/arch/x86/util/intel-pt.c 	*d = eax;
d                 772 tools/perf/bench/numa.c 	u64 *d0, *d, *d1;
d                 818 tools/perf/bench/numa.c 		d  = data + off + 1;
d                 823 tools/perf/bench/numa.c 			if (unlikely(d >= d1))
d                 824 tools/perf/bench/numa.c 				d = data;
d                 825 tools/perf/bench/numa.c 			if (unlikely(d == d0))
d                 828 tools/perf/bench/numa.c 			val = access_data(d, val);
d                 830 tools/perf/bench/numa.c 			d++;
d                 836 tools/perf/bench/numa.c 		d  = data + off - 1;
d                 841 tools/perf/bench/numa.c 			if (unlikely(d < data))
d                 842 tools/perf/bench/numa.c 				d = data + words-1;
d                 843 tools/perf/bench/numa.c 			if (unlikely(d == d0))
d                 846 tools/perf/bench/numa.c 			val = access_data(d, val);
d                 848 tools/perf/bench/numa.c 			d--;
d                  86 tools/perf/builtin-buildid-cache.c 	DIR *d;
d                  88 tools/perf/builtin-buildid-cache.c 	d = opendir(to_dir);
d                  89 tools/perf/builtin-buildid-cache.c 	if (!d)
d                  95 tools/perf/builtin-buildid-cache.c 		dent = readdir(d);
d                 112 tools/perf/builtin-buildid-cache.c 	closedir(d);
d                  78 tools/perf/builtin-diff.c #define data__for_each_file_start(i, d, s)	\
d                  79 tools/perf/builtin-diff.c 	for (i = s, d = &data__files[s];	\
d                  81 tools/perf/builtin-diff.c 	     i++, d = &data__files[i])
d                  83 tools/perf/builtin-diff.c #define data__for_each_file(i, d) data__for_each_file_start(i, d, 0)
d                  84 tools/perf/builtin-diff.c #define data__for_each_file_new(i, d) data__for_each_file_start(i, d, 1)
d                 484 tools/perf/builtin-diff.c 	struct data__file *d = container_of(ptr, struct data__file, fmt);
d                 486 tools/perf/builtin-diff.c 	return d;
d                 490 tools/perf/builtin-diff.c get_pair_data(struct hist_entry *he, struct data__file *d)
d                 496 tools/perf/builtin-diff.c 			if (pair->hists == d->hists)
d                 506 tools/perf/builtin-diff.c 	struct data__file *d = fmt_to_data_file(&dfmt->fmt);
d                 508 tools/perf/builtin-diff.c 	return get_pair_data(he, d);
d                 744 tools/perf/builtin-diff.c 		struct data__file *d;
d                 753 tools/perf/builtin-diff.c 		data__for_each_file_new(i, d) {
d                 754 tools/perf/builtin-diff.c 			pair = get_pair_data(he, d);
d                 921 tools/perf/builtin-diff.c 	struct data__file *d = fmt_to_data_file(fmt);
d                 923 tools/perf/builtin-diff.c 	return hist_entry__cmp_compute(right, left, COMPUTE_DELTA, d->idx);
d                 930 tools/perf/builtin-diff.c 	struct data__file *d = fmt_to_data_file(fmt);
d                 932 tools/perf/builtin-diff.c 	return hist_entry__cmp_compute(right, left, COMPUTE_DELTA_ABS, d->idx);
d                 939 tools/perf/builtin-diff.c 	struct data__file *d = fmt_to_data_file(fmt);
d                 941 tools/perf/builtin-diff.c 	return hist_entry__cmp_compute(right, left, COMPUTE_RATIO, d->idx);
d                 948 tools/perf/builtin-diff.c 	struct data__file *d = fmt_to_data_file(fmt);
d                 950 tools/perf/builtin-diff.c 	return hist_entry__cmp_compute(right, left, COMPUTE_WEIGHTED_DIFF, d->idx);
d                1002 tools/perf/builtin-diff.c 	struct data__file *d;
d                1007 tools/perf/builtin-diff.c 	data__for_each_file(i, d)
d                1009 tools/perf/builtin-diff.c 			d->idx, d->data.path,
d                1010 tools/perf/builtin-diff.c 			!d->idx ? "(Baseline)" : "");
d                1023 tools/perf/builtin-diff.c 		struct data__file *d;
d                1026 tools/perf/builtin-diff.c 		data__for_each_file_new(i, d) {
d                1027 tools/perf/builtin-diff.c 			struct evlist *evlist = d->session->evlist;
d                1036 tools/perf/builtin-diff.c 			d->hists = hists;
d                1061 tools/perf/builtin-diff.c static void data__free(struct data__file *d)
d                1066 tools/perf/builtin-diff.c 		struct diff_hpp_fmt *fmt = &d->fmt[col];
d                1086 tools/perf/builtin-diff.c static int parse_absolute_time(struct data__file *d, char **pstr)
d                1110 tools/perf/builtin-diff.c 	ret = perf_time__parse_for_ranges(*pstr, d->session,
d                1125 tools/perf/builtin-diff.c static int parse_percent_time(struct data__file *d)
d                1129 tools/perf/builtin-diff.c 	ret = perf_time__parse_for_ranges(pdiff.time_str, d->session,
d                1136 tools/perf/builtin-diff.c static int parse_time_str(struct data__file *d, char *abstime_ostr,
d                1142 tools/perf/builtin-diff.c 		ret = parse_absolute_time(d, pabstime_tmp);
d                1144 tools/perf/builtin-diff.c 		ret = parse_percent_time(d);
d                1151 tools/perf/builtin-diff.c 	struct data__file *d;
d                1155 tools/perf/builtin-diff.c 	data__for_each_file(i, d) {
d                1156 tools/perf/builtin-diff.c 		d->session = perf_session__new(&d->data, false, &pdiff.tool);
d                1157 tools/perf/builtin-diff.c 		if (IS_ERR(d->session)) {
d                1158 tools/perf/builtin-diff.c 			pr_err("Failed to open %s\n", d->data.path);
d                1159 tools/perf/builtin-diff.c 			return PTR_ERR(d->session);
d                1162 tools/perf/builtin-diff.c 		has_br_stack = perf_header__has_feat(&d->session->header,
d                1164 tools/perf/builtin-diff.c 		perf_session__delete(d->session);
d                1176 tools/perf/builtin-diff.c 	struct data__file *d;
d                1187 tools/perf/builtin-diff.c 	data__for_each_file(i, d) {
d                1188 tools/perf/builtin-diff.c 		d->session = perf_session__new(&d->data, false, &pdiff.tool);
d                1189 tools/perf/builtin-diff.c 		if (IS_ERR(d->session)) {
d                1190 tools/perf/builtin-diff.c 			ret = PTR_ERR(d->session);
d                1191 tools/perf/builtin-diff.c 			pr_err("Failed to open %s\n", d->data.path);
d                1196 tools/perf/builtin-diff.c 			ret = parse_time_str(d, abstime_ostr, &abstime_tmp);
d                1202 tools/perf/builtin-diff.c 			ret = perf_session__cpu_bitmap(d->session, cpu_list,
d                1208 tools/perf/builtin-diff.c 		ret = perf_session__process_events(d->session);
d                1210 tools/perf/builtin-diff.c 			pr_err("Failed to process %s\n", d->data.path);
d                1214 tools/perf/builtin-diff.c 		perf_evlist__collapse_resort(d->session->evlist);
d                1223 tools/perf/builtin-diff.c 	data__for_each_file(i, d) {
d                1224 tools/perf/builtin-diff.c 		perf_session__delete(d->session);
d                1225 tools/perf/builtin-diff.c 		data__free(d);
d                1599 tools/perf/builtin-diff.c static void init_header(struct data__file *d, struct diff_hpp_fmt *dfmt)
d                1615 tools/perf/builtin-diff.c 		scnprintf(buf, MAX_HEADER_NAME, "%s/%d", header, d->idx);
d                1631 tools/perf/builtin-diff.c static void data__hpp_register(struct data__file *d, int idx)
d                1633 tools/perf/builtin-diff.c 	struct diff_hpp_fmt *dfmt = &d->fmt[idx];
d                1675 tools/perf/builtin-diff.c 	init_header(d, dfmt);
d                1682 tools/perf/builtin-diff.c 	struct data__file *d;
d                1686 tools/perf/builtin-diff.c 	data__for_each_file(i, d) {
d                1696 tools/perf/builtin-diff.c 		data__hpp_register(d, i ? compute_2_hpp[compute] :
d                1707 tools/perf/builtin-diff.c 			data__hpp_register(d, PERF_HPP_DIFF__FORMULA);
d                1710 tools/perf/builtin-diff.c 			data__hpp_register(d, i ? PERF_HPP_DIFF__PERIOD :
d                1767 tools/perf/builtin-diff.c 	struct data__file *d;
d                1798 tools/perf/builtin-diff.c 	data__for_each_file(i, d) {
d                1799 tools/perf/builtin-diff.c 		struct perf_data *data = &d->data;
d                1805 tools/perf/builtin-diff.c 		d->idx  = i;
d                 879 tools/perf/pmu-events/jevents.c 	DIR *d;
d                 883 tools/perf/pmu-events/jevents.c 	d = opendir(fpath);
d                 884 tools/perf/pmu-events/jevents.c 	if (!d)
d                 887 tools/perf/pmu-events/jevents.c 	while ((dir = readdir(d)) != NULL) {
d                 909 tools/perf/pmu-events/jevents.c 	closedir(d);
d                 165 tools/perf/tests/attr.c static int run_dir(const char *d, const char *perf)
d                 175 tools/perf/tests/attr.c 		  d, d, perf, vcnt, v);
d                 294 tools/perf/tests/code-reading.c 		size_t d;
d                 296 tools/perf/tests/code-reading.c 		for (d = 0; d < state->done_cnt; d++) {
d                 297 tools/perf/tests/code-reading.c 			if (state->done[d] == al.map->start) {
d                  70 tools/perf/tests/time-utils-test.c static bool test__perf_time__parse_for_ranges(struct test_data *d)
d                  73 tools/perf/tests/time-utils-test.c 		.first_sample_time = d->first,
d                  74 tools/perf/tests/time-utils-test.c 		.last_sample_time = d->last,
d                  82 tools/perf/tests/time-utils-test.c 	pr_debug("\nperf_time__parse_for_ranges(\"%s\")\n", d->str);
d                  84 tools/perf/tests/time-utils-test.c 	if (strchr(d->str, '%'))
d                  86 tools/perf/tests/time-utils-test.c 			 d->first, d->last);
d                  88 tools/perf/tests/time-utils-test.c 	err = perf_time__parse_for_ranges(d->str, &session, &ptime, &range_size,
d                  95 tools/perf/tests/time-utils-test.c 	if (range_size < d->num || range_num != d->num) {
d                  97 tools/perf/tests/time-utils-test.c 			 range_size, range_num, d->num);
d                 101 tools/perf/tests/time-utils-test.c 	for (i = 0; i < d->num; i++) {
d                 102 tools/perf/tests/time-utils-test.c 		if (ptime[i].start != d->ptime[i].start ||
d                 103 tools/perf/tests/time-utils-test.c 		    ptime[i].end != d->ptime[i].end) {
d                 105 tools/perf/tests/time-utils-test.c 				 i, d->ptime[i].start, d->ptime[i].end);
d                 110 tools/perf/tests/time-utils-test.c 	if (perf_time__ranges_skip_sample(ptime, d->num, 0)) {
d                 116 tools/perf/tests/time-utils-test.c 		if (d->skip[i] &&
d                 117 tools/perf/tests/time-utils-test.c 		    !perf_time__ranges_skip_sample(ptime, d->num, d->skip[i])) {
d                 118 tools/perf/tests/time-utils-test.c 			pr_debug("failed to skip %" PRIu64 "\n", d->skip[i]);
d                 121 tools/perf/tests/time-utils-test.c 		if (d->noskip[i] &&
d                 122 tools/perf/tests/time-utils-test.c 		    perf_time__ranges_skip_sample(ptime, d->num, d->noskip[i])) {
d                 123 tools/perf/tests/time-utils-test.c 			pr_debug("failed to keep %" PRIu64 "\n", d->noskip[i]);
d                 160 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 168 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                 175 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 185 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                 190 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 200 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                 205 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 215 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                 220 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 230 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                 235 tools/perf/tests/time-utils-test.c 		struct test_data d = {
d                 245 tools/perf/tests/time-utils-test.c 		pass &= test__perf_time__parse_for_ranges(&d);
d                  45 tools/perf/util/arm-spe-pkt-decoder.c #define memcpy_le64(d, s, n) do { \
d                  46 tools/perf/util/arm-spe-pkt-decoder.c 	memcpy((d), (s), (n));    \
d                  47 tools/perf/util/arm-spe-pkt-decoder.c 	*(d) = le64_to_cpu(*(d)); \
d                 424 tools/perf/util/build-id.c 				  struct dirent *d)
d                 426 tools/perf/util/build-id.c 	return (strlen(d->d_name) == 2) &&
d                 427 tools/perf/util/build-id.c 		isxdigit(d->d_name[0]) && isxdigit(d->d_name[1]);
d                 431 tools/perf/util/build-id.c 				  struct dirent *d)
d                 434 tools/perf/util/build-id.c 	while (isxdigit(d->d_name[i]) && i < SBUILD_ID_SIZE - 3)
d                 436 tools/perf/util/build-id.c 	return (i == SBUILD_ID_SIZE - 3) && (d->d_name[i] == '\0');
d                 271 tools/perf/util/event.h #define perf_synth__raw_size(d) (sizeof(d) - 4)
d                 273 tools/perf/util/event.h #define perf_sample__bad_synth_size(s, d) ((s)->raw_size < sizeof(d) - 4)
d                2387 tools/perf/util/evsel.c 	struct dirent *d;
d                2395 tools/perf/util/evsel.c 	while (ret && (d = readdir(dir)) != NULL) {
d                2400 tools/perf/util/evsel.c 		if ((d->d_type != DT_DIR) ||
d                2401 tools/perf/util/evsel.c 		     !strcmp(".", d->d_name) ||
d                2402 tools/perf/util/evsel.c 		     !strcmp("..", d->d_name))
d                2406 tools/perf/util/evsel.c 			  procfs__mountpoint(), d->d_name);
d                 162 tools/perf/util/genelf.c 	Elf_Data *d;
d                 176 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 177 tools/perf/util/genelf.c 	if (!d) {
d                 182 tools/perf/util/genelf.c 	d->d_align = 8;
d                 183 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 184 tools/perf/util/genelf.c 	d->d_buf = unwinding;
d                 185 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 186 tools/perf/util/genelf.c 	d->d_size = unwinding_table_size;
d                 187 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 210 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 211 tools/perf/util/genelf.c 	if (!d) {
d                 216 tools/perf/util/genelf.c 	d->d_align = 4;
d                 217 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 218 tools/perf/util/genelf.c 	d->d_buf = unwinding + unwinding_table_size;
d                 219 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 220 tools/perf/util/genelf.c 	d->d_size = unwinding_header_size;
d                 221 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 252 tools/perf/util/genelf.c 	Elf_Data *d;
d                 298 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 299 tools/perf/util/genelf.c 	if (!d) {
d                 304 tools/perf/util/genelf.c 	d->d_align = 16;
d                 305 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 306 tools/perf/util/genelf.c 	d->d_buf = (void *)code;
d                 307 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 308 tools/perf/util/genelf.c 	d->d_size = csize;
d                 309 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 344 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 345 tools/perf/util/genelf.c 	if (!d) {
d                 350 tools/perf/util/genelf.c 	d->d_align = 1;
d                 351 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 352 tools/perf/util/genelf.c 	d->d_buf = shd_string_table;
d                 353 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 354 tools/perf/util/genelf.c 	d->d_size = sizeof(shd_string_table);
d                 355 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 380 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 381 tools/perf/util/genelf.c 	if (!d) {
d                 386 tools/perf/util/genelf.c 	d->d_align = 8;
d                 387 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 388 tools/perf/util/genelf.c 	d->d_buf = symtab;
d                 389 tools/perf/util/genelf.c 	d->d_type = ELF_T_SYM;
d                 390 tools/perf/util/genelf.c 	d->d_size = sizeof(symtab);
d                 391 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 423 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 424 tools/perf/util/genelf.c 	if (!d) {
d                 429 tools/perf/util/genelf.c 	d->d_align = 1;
d                 430 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 431 tools/perf/util/genelf.c 	d->d_buf = strsym;
d                 432 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 433 tools/perf/util/genelf.c 	d->d_size = symlen;
d                 434 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 456 tools/perf/util/genelf.c 	d = elf_newdata(scn);
d                 457 tools/perf/util/genelf.c 	if (!d) {
d                 471 tools/perf/util/genelf.c 	d->d_align = 4;
d                 472 tools/perf/util/genelf.c 	d->d_off = 0LL;
d                 473 tools/perf/util/genelf.c 	d->d_buf = &bnote;
d                 474 tools/perf/util/genelf.c 	d->d_type = ELF_T_BYTE;
d                 475 tools/perf/util/genelf.c 	d->d_size = sizeof(bnote);
d                 476 tools/perf/util/genelf.c 	d->d_version = EV_CURRENT;
d                 486 tools/perf/util/genelf_debug.c 	Elf_Data *d;
d                 508 tools/perf/util/genelf_debug.c 	d = elf_newdata(scn);
d                 509 tools/perf/util/genelf_debug.c 	if (!d) {
d                 514 tools/perf/util/genelf_debug.c 	d->d_align = 1;
d                 515 tools/perf/util/genelf_debug.c 	d->d_off = 0LL;
d                 516 tools/perf/util/genelf_debug.c 	d->d_buf = buffer_ext_addr(&dl);
d                 517 tools/perf/util/genelf_debug.c 	d->d_type = ELF_T_BYTE;
d                 518 tools/perf/util/genelf_debug.c 	d->d_size = buffer_ext_size(&dl);
d                 519 tools/perf/util/genelf_debug.c 	d->d_version = EV_CURRENT;
d                 542 tools/perf/util/genelf_debug.c 	d = elf_newdata(scn);
d                 543 tools/perf/util/genelf_debug.c 	if (!d) {
d                 548 tools/perf/util/genelf_debug.c 	d->d_align = 1;
d                 549 tools/perf/util/genelf_debug.c 	d->d_off = 0LL;
d                 550 tools/perf/util/genelf_debug.c 	d->d_buf = buffer_ext_addr(&di);
d                 551 tools/perf/util/genelf_debug.c 	d->d_type = ELF_T_BYTE;
d                 552 tools/perf/util/genelf_debug.c 	d->d_size = buffer_ext_size(&di);
d                 553 tools/perf/util/genelf_debug.c 	d->d_version = EV_CURRENT;
d                 576 tools/perf/util/genelf_debug.c 	d = elf_newdata(scn);
d                 577 tools/perf/util/genelf_debug.c 	if (!d) {
d                 582 tools/perf/util/genelf_debug.c 	d->d_align = 1;
d                 583 tools/perf/util/genelf_debug.c 	d->d_off = 0LL;
d                 584 tools/perf/util/genelf_debug.c 	d->d_buf = buffer_ext_addr(&da);
d                 585 tools/perf/util/genelf_debug.c 	d->d_type = ELF_T_BYTE;
d                 586 tools/perf/util/genelf_debug.c 	d->d_size = buffer_ext_size(&da);
d                 587 tools/perf/util/genelf_debug.c 	d->d_version = EV_CURRENT;
d                1839 tools/perf/util/header.c 	double d;
d                1851 tools/perf/util/header.c 	d = (double)(session->evlist->last_sample_time -
d                1854 tools/perf/util/header.c 	fprintf(fp, "# sample duration : %10.3f ms\n", d);
d                 213 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c static uint64_t multdiv(uint64_t t, uint32_t n, uint32_t d)
d                 215 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	if (!d)
d                 217 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	return (t / d) * n + ((t % d) * n) / d;
d                2984 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	struct fast_forward_data *d = data;
d                3006 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	if (tsc < d->timestamp)
d                3007 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 		d->buf_timestamp = buffer->ref_timestamp;
d                3025 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	struct fast_forward_data d = { .timestamp = timestamp };
d                3033 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	err = decoder->lookahead(decoder->data, intel_pt_ff_cb, &d);
d                3038 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	if (d.buf_timestamp) {
d                3046 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 		} while (decoder->buf_timestamp != d.buf_timestamp);
d                  25 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c #define memcpy_le64(d, s, n) do { \
d                  26 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 	memcpy((d), (s), (n));    \
d                  27 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 	*(d) = le64_to_cpu(*(d)); \
d                 406 tools/perf/util/intel-pt.c 	int *d = data;
d                 412 tools/perf/util/intel-pt.c 			*d = val;
d                 420 tools/perf/util/intel-pt.c 	static int d;
d                 422 tools/perf/util/intel-pt.c 	if (d)
d                 423 tools/perf/util/intel-pt.c 		return d;
d                 425 tools/perf/util/intel-pt.c 	perf_config(intel_pt_config_div, &d);
d                 427 tools/perf/util/intel-pt.c 	if (!d)
d                 428 tools/perf/util/intel-pt.c 		d = 64;
d                 430 tools/perf/util/intel-pt.c 	return d;
d                  45 tools/perf/util/levenshtein.c 		int w, int s, int a, int d)
d                  58 tools/perf/util/levenshtein.c 		row2[0] = (i + 1) * d;
d                  68 tools/perf/util/levenshtein.c 			if (row2[j + 1] > row1[j + 1] + d)
d                  69 tools/perf/util/levenshtein.c 				row2[j + 1] = row1[j + 1] + d;
d                1109 tools/perf/util/session.c static inline const char *regs_dump_abi(struct regs_dump *d)
d                1111 tools/perf/util/session.c 	if (d->abi > PERF_SAMPLE_REGS_ABI_64)
d                1114 tools/perf/util/session.c 	return regs_abi[d->abi];
d                1659 tools/perf/util/sort.c #define DIM(d, n, func) [d] = { .name = n, .entry = &(func) }
d                1684 tools/perf/util/sort.c #define DIM(d, n, func) [d - __SORT_BRANCH_STACK] = { .name = n, .entry = &(func) }
d                1702 tools/perf/util/sort.c #define DIM(d, n, func) [d - __SORT_MEMORY_MODE] = { .name = n, .entry = &(func) }
d                1724 tools/perf/util/sort.c #define DIM(d, n) { .name = n, .fmt = &perf_hpp__format[d], }
d                 270 tools/perf/util/string.c 	char *s, *d, *p, *ret = strdup(str);
d                 275 tools/perf/util/string.c 	d = strchr(ret, '\\');
d                 276 tools/perf/util/string.c 	if (!d)
d                 279 tools/perf/util/string.c 	s = d + 1;
d                 282 tools/perf/util/string.c 			*d = '\0';
d                 287 tools/perf/util/string.c 			memmove(d, s, p - s);
d                 288 tools/perf/util/string.c 			d += p - s;
d                 291 tools/perf/util/string.c 			memmove(d, s, strlen(s) + 1);
d                1880 tools/perf/util/symbol.c static bool visible_dir_filter(const char *name, struct dirent *d)
d                1882 tools/perf/util/symbol.c 	if (d->d_type != DT_DIR)
d                1884 tools/perf/util/symbol.c 	return lsdir_no_dot_filter(name, d);
d                  70 tools/perf/util/time-utils.c 	char *d, *str;
d                  81 tools/perf/util/time-utils.c 	d = strchr(start_str, ch);
d                  82 tools/perf/util/time-utils.c 	if (d) {
d                  83 tools/perf/util/time-utils.c 		*d = '\0';
d                  84 tools/perf/util/time-utils.c 		++d;
d                  86 tools/perf/util/time-utils.c 	end_str = d;
d                 183 tools/perf/util/time-utils.c 	double d;
d                 191 tools/perf/util/time-utils.c 	d = strtod(str, &endptr);
d                 195 tools/perf/util/time-utils.c 	*pcnt = d / 100.0;
d                  77 tools/perf/util/util.c 	char *d = path;
d                  79 tools/perf/util/util.c 	if (*d != '/')
d                  85 tools/perf/util/util.c 	while (*++d == '/');
d                  87 tools/perf/util/util.c 	while ((d = strchr(d, '/'))) {
d                  88 tools/perf/util/util.c 		*d = '\0';
d                  90 tools/perf/util/util.c 		*d++ = '/';
d                  93 tools/perf/util/util.c 		while (*d == '/')
d                  94 tools/perf/util/util.c 			++d;
d                 134 tools/perf/util/util.c 	struct dirent *d;
d                 152 tools/perf/util/util.c 	while ((d = readdir(dir)) != NULL && !ret) {
d                 154 tools/perf/util/util.c 		if (!strcmp(d->d_name, ".") || !strcmp(d->d_name, ".."))
d                 157 tools/perf/util/util.c 		if (!match_pat(d->d_name, pat)) {
d                 163 tools/perf/util/util.c 			  path, d->d_name);
d                 202 tools/perf/util/util.c bool lsdir_no_dot_filter(const char *name __maybe_unused, struct dirent *d)
d                 204 tools/perf/util/util.c 	return d->d_name[0] != '.';
d                 213 tools/perf/util/util.c 	struct dirent *d;
d                 225 tools/perf/util/util.c 	while ((d = readdir(dir)) != NULL) {
d                 226 tools/perf/util/util.c 		if (!filter || filter(name, d))
d                 227 tools/perf/util/util.c 			strlist__add(list, d->d_name);
d                  26 tools/perf/util/util.h bool lsdir_no_dot_filter(const char *name, struct dirent *d);
d                 135 tools/power/x86/x86_energy_perf_policy/x86_energy_perf_policy.c 	double d;
d                 140 tools/power/x86/x86_energy_perf_policy/x86_energy_perf_policy.c 	d = (double)msr_perf * (double) bdx_highest_ratio / 255.0;
d                 141 tools/power/x86/x86_energy_perf_policy/x86_energy_perf_policy.c 	d = d + 0.5;	/* round */
d                 142 tools/power/x86/x86_energy_perf_policy/x86_energy_perf_policy.c 	ratio = (int)d;
d                 145 tools/power/x86/x86_energy_perf_policy/x86_energy_perf_policy.c 		fprintf(stderr, "%d = msr_perf_ratio(%d) {%f}\n", ratio, msr_perf, d);
d                 327 tools/testing/nvdimm/test/iomap.c 				struct resource **d;
d                 329 tools/testing/nvdimm/test/iomap.c 				d = devres_alloc(nfit_devres_release,
d                 332 tools/testing/nvdimm/test/iomap.c 				if (!d)
d                 334 tools/testing/nvdimm/test/iomap.c 				*d = res;
d                 335 tools/testing/nvdimm/test/iomap.c 				devres_add(dev, d);
d                   1 tools/testing/radix-tree/linux/cpu.h #define cpuhp_setup_state_nocalls(a, b, c, d)	(0)
d                 119 tools/testing/scatterlist/linux/mm.h #define kmemleak_alloc(a, b, c, d)
d                  58 tools/testing/selftests/bpf/prog_tests/core_reloc.c 	.d = { [0] = { [0] = { .d = 4 } } },				\
d                  88 tools/testing/selftests/bpf/prog_tests/core_reloc.c 	.d = (void *)4,							\
d                 118 tools/testing/selftests/bpf/prog_tests/core_reloc.c 		.d = (void *)4,						\
d                 126 tools/testing/selftests/bpf/prog_tests/core_reloc.c 		.a = 1, .b = 2, .c = 3, .d = 4,				\
d                  35 tools/testing/selftests/bpf/progs/btf_dump_test_case_bitfields.c 	} d: 1;
d                  62 tools/testing/selftests/bpf/progs/btf_dump_test_case_bitfields.c 	long d: 8;
d                  35 tools/testing/selftests/bpf/progs/btf_dump_test_case_packing.c 		int d;
d                  83 tools/testing/selftests/bpf/progs/btf_dump_test_case_syntax.c 		char d;
d                 103 tools/testing/selftests/bpf/progs/btf_dump_test_case_syntax.c 	how_about_this_ptr_t d;
d                 168 tools/testing/selftests/bpf/progs/btf_dump_test_case_syntax.c 		int d;
d                 183 tools/testing/selftests/bpf/progs/btf_dump_test_case_syntax.c 			const char *d;
d                 325 tools/testing/selftests/bpf/progs/core_reloc_types.h 	int d;
d                 332 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 340 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[2][3];
d                 354 tools/testing/selftests/bpf/progs/core_reloc_types.h 		int d;
d                 356 tools/testing/selftests/bpf/progs/core_reloc_types.h 	} d[1][2];
d                 363 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 370 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 377 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 384 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 391 tools/testing/selftests/bpf/progs/core_reloc_types.h 	struct core_reloc_arrays_substruct d[1][2];
d                 406 tools/testing/selftests/bpf/progs/core_reloc_types.h 	void *d;
d                 413 tools/testing/selftests/bpf/progs/core_reloc_types.h 	void *d;
d                 423 tools/testing/selftests/bpf/progs/core_reloc_types.h 	void *d;
d                 430 tools/testing/selftests/bpf/progs/core_reloc_types.h 	const char * const d; /* different pointee type + modifiers */
d                 441 tools/testing/selftests/bpf/progs/core_reloc_types.h 	void *d;
d                 449 tools/testing/selftests/bpf/progs/core_reloc_types.h 	void *d;
d                 457 tools/testing/selftests/bpf/progs/core_reloc_types.h 	int d; /* int instead of ptr */
d                 465 tools/testing/selftests/bpf/progs/core_reloc_types.h 	int a, b, c, d, e, f, g, h;
d                 486 tools/testing/selftests/bpf/progs/core_reloc_types.h 	char_ptr_t d;
d                 497 tools/testing/selftests/bpf/progs/core_reloc_types.h 	char *d;
d                 527 tools/testing/selftests/bpf/progs/core_reloc_types.h 	fancy_char_ptr_t d;
d                 666 tools/testing/selftests/bpf/progs/core_reloc_types.h 	int d;
d                  14 tools/testing/selftests/bpf/progs/sockmap_parse_prog.c 	__u8 *d = data;
d                  33 tools/testing/selftests/bpf/progs/sockmap_parse_prog.c 	d = data;
d                  34 tools/testing/selftests/bpf/progs/sockmap_parse_prog.c 	d[7] = 1;
d                  14 tools/testing/selftests/bpf/progs/sockmap_tcp_msg_prog.c 	char *d;
d                  20 tools/testing/selftests/bpf/progs/sockmap_tcp_msg_prog.c 	d = (char *)data;
d                  21 tools/testing/selftests/bpf/progs/sockmap_tcp_msg_prog.c 	bpf_printk("hello sendmsg hook %i %i\n", d[0], d[1]);
d                  42 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	__u8 *d = data;
d                  48 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	map = d[0];
d                  49 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	sk = d[1];
d                  51 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[0] = 0xd;
d                  52 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[1] = 0xe;
d                  53 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[2] = 0xa;
d                  54 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[3] = 0xd;
d                  55 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[4] = 0xb;
d                  56 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[5] = 0xe;
d                  57 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[6] = 0xe;
d                  58 tools/testing/selftests/bpf/progs/sockmap_verdict_prog.c 	d[7] = 0xf;
d                  24 tools/testing/selftests/bpf/progs/test_core_reloc_arrays.c 	int d;
d                  31 tools/testing/selftests/bpf/progs/test_core_reloc_arrays.c 	struct core_reloc_arrays_substruct d[1][2];
d                  50 tools/testing/selftests/bpf/progs/test_core_reloc_arrays.c 	if (BPF_CORE_READ(&out->d00d, &in->d[0][0].d))
d                  16 tools/testing/selftests/bpf/progs/test_core_reloc_mods.c 	int a, b, c, d, e, f, g, h;
d                  37 tools/testing/selftests/bpf/progs/test_core_reloc_mods.c 	char_ptr_t d;
d                  53 tools/testing/selftests/bpf/progs/test_core_reloc_mods.c 	    BPF_CORE_READ(&out->d, &in->d) ||
d                  24 tools/testing/selftests/bpf/progs/test_core_reloc_primitives.c 	void *d;
d                  37 tools/testing/selftests/bpf/progs/test_core_reloc_primitives.c 	    BPF_CORE_READ(&out->d, &in->d) ||
d                  40 tools/testing/selftests/bpf/test_btf_dump.c 	struct btf_dump *d;
d                  43 tools/testing/selftests/bpf/test_btf_dump.c 	d = btf_dump__new(btf, NULL, opts, btf_dump_printf);
d                  44 tools/testing/selftests/bpf/test_btf_dump.c 	if (IS_ERR(d))
d                  45 tools/testing/selftests/bpf/test_btf_dump.c 		return PTR_ERR(d);
d                  48 tools/testing/selftests/bpf/test_btf_dump.c 		err = btf_dump__dump_type(d, id);
d                  54 tools/testing/selftests/bpf/test_btf_dump.c 	btf_dump__free(d);
d                 392 tools/testing/selftests/bpf/test_sockmap.c 		unsigned char *d = calloc(iov_length, sizeof(char));
d                 394 tools/testing/selftests/bpf/test_sockmap.c 		if (!d) {
d                 398 tools/testing/selftests/bpf/test_sockmap.c 		iov[i].iov_base = d;
d                 405 tools/testing/selftests/bpf/test_sockmap.c 				d[j] = k++;
d                 425 tools/testing/selftests/bpf/test_sockmap.c 		unsigned char *d = msg->msg_iov[i].iov_base;
d                 429 tools/testing/selftests/bpf/test_sockmap.c 			if (d[j] != k++) {
d                 432 tools/testing/selftests/bpf/test_sockmap.c 					i, j, d[j], k - 1, d[j+1], k);
d                  66 tools/testing/selftests/ir/ir_loopback.c 	DIR *d;
d                  71 tools/testing/selftests/ir/ir_loopback.c 	d = opendir(buf);
d                  72 tools/testing/selftests/ir/ir_loopback.c 	if (!d)
d                  75 tools/testing/selftests/ir/ir_loopback.c 	while ((dent = readdir(d)) != NULL) {
d                  85 tools/testing/selftests/ir/ir_loopback.c 	closedir(d);
d                  99 tools/testing/selftests/kvm/include/x86_64/processor.h 	uint32_t a, d;
d                 101 tools/testing/selftests/kvm/include/x86_64/processor.h 	__asm__ __volatile__("rdmsr" : "=a"(a), "=d"(d) : "c"(msr) : "memory");
d                 103 tools/testing/selftests/kvm/include/x86_64/processor.h 	return a | ((uint64_t) d << 32);
d                 109 tools/testing/selftests/kvm/include/x86_64/processor.h 	uint32_t d = value >> 32;
d                 111 tools/testing/selftests/kvm/include/x86_64/processor.h 	__asm__ __volatile__("wrmsr" :: "a"(a), "d"(d), "c"(msr) : "memory");
d                  23 tools/testing/selftests/kvm/lib/kvm_util_internal.h #define DIV_ROUND_UP(n, d)	(((n) + (d) - 1) / (d))
d                 973 tools/testing/selftests/kvm/lib/sparsebit.c void sparsebit_copy(struct sparsebit *d, struct sparsebit *s)
d                 976 tools/testing/selftests/kvm/lib/sparsebit.c 	sparsebit_clear_all(d);
d                 979 tools/testing/selftests/kvm/lib/sparsebit.c 		d->root = node_copy_subtree(s->root);
d                 980 tools/testing/selftests/kvm/lib/sparsebit.c 		d->num_set = s->num_set;
d                  77 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	void test_##name(char *s, char *d)			\
d                  82 tools/testing/selftests/powerpc/alignment/alignment_handler.c 			:: "r"(s), "r"(d), "r"(0)		\
d                 130 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	char *s, *d;
d                 134 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	d = dst;
d                 135 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	d += offset;
d                 141 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	test_func(s, d); /* run the actual test */
d                  70 tools/testing/selftests/powerpc/benchmarks/null_syscall.c 	double d;
d                  96 tools/testing/selftests/powerpc/benchmarks/null_syscall.c 				d = strtod(p + 1, &end);
d                  99 tools/testing/selftests/powerpc/benchmarks/null_syscall.c 					if ((d * 1000000ULL) > clock_frequency)
d                 100 tools/testing/selftests/powerpc/benchmarks/null_syscall.c 						clock_frequency = d * 1000000ULL;
d                  24 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		unsigned long d, cur_dscr, cur_dscr_usr;
d                  32 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		d = dscr;
d                  42 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		if (cur_dscr != d) {
d                  44 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 				"but is %ld\n", thread, d, cur_dscr);
d                  49 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		if (cur_dscr_usr != d) {
d                  51 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 				"but is %ld\n", thread, d, cur_dscr_usr);
d                  18 tools/testing/selftests/powerpc/ptrace/ptrace-tm-spd-gpr.c float d = FPR_4;
d                  67 tools/testing/selftests/powerpc/ptrace/ptrace-tm-spd-gpr.c 		[flt_4] "b" (&d)
d                 109 tools/testing/selftests/proc/fd-001-lookup.c 		DIR *d;
d                 111 tools/testing/selftests/proc/fd-001-lookup.c 		d = opendir("/proc/self/fd");
d                 112 tools/testing/selftests/proc/fd-001-lookup.c 		if (!d)
d                 115 tools/testing/selftests/proc/fd-001-lookup.c 		de = xreaddir(d);
d                 119 tools/testing/selftests/proc/fd-001-lookup.c 		de = xreaddir(d);
d                 123 tools/testing/selftests/proc/fd-001-lookup.c 		de = xreaddir(d);
d                 136 tools/testing/selftests/proc/fd-001-lookup.c 			if (fd == dirfd(d))
d                 141 tools/testing/selftests/proc/fd-001-lookup.c 		closedir(d);
d                  98 tools/testing/selftests/proc/fd-003-kthread.c 	DIR *d;
d                 101 tools/testing/selftests/proc/fd-003-kthread.c 	d = fdopendir(fd);
d                 102 tools/testing/selftests/proc/fd-003-kthread.c 	assert(d);
d                 104 tools/testing/selftests/proc/fd-003-kthread.c 	de = xreaddir(d);
d                 108 tools/testing/selftests/proc/fd-003-kthread.c 	de = xreaddir(d);
d                 112 tools/testing/selftests/proc/fd-003-kthread.c 	de = xreaddir(d);
d                  43 tools/testing/selftests/proc/proc.h static struct dirent *xreaddir(DIR *d)
d                  48 tools/testing/selftests/proc/proc.h 	de = readdir(d);
d                  38 tools/testing/selftests/proc/read.c static void f_reg(DIR *d, const char *filename)
d                  45 tools/testing/selftests/proc/read.c 	fd = openat(dirfd(d), filename, O_RDONLY|O_NONBLOCK);
d                  53 tools/testing/selftests/proc/read.c static void f_reg_write(DIR *d, const char *filename, const char *buf, size_t len)
d                  58 tools/testing/selftests/proc/read.c 	fd = openat(dirfd(d), filename, O_WRONLY);
d                  66 tools/testing/selftests/proc/read.c static void f_lnk(DIR *d, const char *filename)
d                  71 tools/testing/selftests/proc/read.c 	rv = readlinkat(dirfd(d), filename, buf, sizeof(buf));
d                  75 tools/testing/selftests/proc/read.c static void f(DIR *d, unsigned int level)
d                  79 tools/testing/selftests/proc/read.c 	de = xreaddir(d);
d                  83 tools/testing/selftests/proc/read.c 	de = xreaddir(d);
d                  87 tools/testing/selftests/proc/read.c 	while ((de = xreaddir(d))) {
d                  97 tools/testing/selftests/proc/read.c 				f_reg_write(d, de->d_name, "h", 1);
d                  99 tools/testing/selftests/proc/read.c 				f_reg_write(d, de->d_name, "1", 1);
d                 101 tools/testing/selftests/proc/read.c 				f_reg_write(d, de->d_name, "1", 1);
d                 103 tools/testing/selftests/proc/read.c 				f_reg(d, de->d_name);
d                 107 tools/testing/selftests/proc/read.c 			fd = openat(dirfd(d), de->d_name, O_DIRECTORY|O_RDONLY);
d                 117 tools/testing/selftests/proc/read.c 			f_lnk(d, de->d_name);
d                 127 tools/testing/selftests/proc/read.c 	DIR *d;
d                 130 tools/testing/selftests/proc/read.c 	d = opendir("/proc");
d                 131 tools/testing/selftests/proc/read.c 	if (!d)
d                 135 tools/testing/selftests/proc/read.c 	if (fstatfs(dirfd(d), &sfs) == -1) {
d                 143 tools/testing/selftests/proc/read.c 	f(d, 0);
d                  78 tools/testing/selftests/sync/sync_fence.c 	int a, b, c, d, valid;
d                  91 tools/testing/selftests/sync/sync_fence.c 	d = sync_merge("mergeFence", b, a);
d                  92 tools/testing/selftests/sync/sync_fence.c 	d = sync_merge("mergeFence", c, d);
d                  93 tools/testing/selftests/sync/sync_fence.c 	valid = sw_sync_fence_is_valid(d);
d                 110 tools/testing/selftests/sync/sync_fence.c 	ASSERT(sync_fence_count_with_status(d, FENCE_STATUS_ACTIVE) == 1,
d                 116 tools/testing/selftests/sync/sync_fence.c 	ASSERT(sync_fence_count_with_status(d, FENCE_STATUS_ACTIVE) == 1,
d                 122 tools/testing/selftests/sync/sync_fence.c 	ASSERT(sync_fence_count_with_status(d, FENCE_STATUS_ACTIVE) == 0 &&
d                 123 tools/testing/selftests/sync/sync_fence.c 	       sync_fence_count_with_status(d, FENCE_STATUS_SIGNALED) == 1,
d                 126 tools/testing/selftests/sync/sync_fence.c 	sw_sync_fence_destroy(d);
d                  63 tools/testing/selftests/sync/sync_stress_consumer.c static int mpsc_producer_thread(void *d)
d                  65 tools/testing/selftests/sync/sync_stress_consumer.c 	int id = (long)d;
d                  40 tools/testing/selftests/sync/sync_stress_parallelism.c static int test_stress_two_threads_shared_timeline_thread(void *d)
d                  42 tools/testing/selftests/sync/sync_stress_parallelism.c 	int thread_id = (long)d;
d                 139 tools/testing/selftests/x86/ldt_gdt.c static bool install_valid_mode(const struct user_desc *d, uint32_t ar,
d                 142 tools/testing/selftests/x86/ldt_gdt.c 	struct user_desc desc = *d;
d                  16 tools/virtio/linux/dma-mapping.h #define dma_alloc_coherent(d, s, hp, f) ({ \
d                  22 tools/virtio/linux/dma-mapping.h #define dma_free_coherent(d, s, p, h) kfree(p)
d                  24 tools/virtio/linux/dma-mapping.h #define dma_map_page(d, p, o, s, dir) (page_to_phys(p) + (o))
d                  26 tools/virtio/linux/dma-mapping.h #define dma_map_single(d, p, s, dir) (virt_to_phys(p))
d                 662 tools/virtio/vringh_test.c 		struct vring_desc *d = __user_addr_max - USER_MEM/2;
d                 674 tools/virtio/vringh_test.c 		sg_set_buf(&guest_sg[0], d, sizeof(*d)*2);
d                 675 tools/virtio/vringh_test.c 		sg_set_buf(&guest_sg[1], d + 2, sizeof(*d)*1);
d                 677 tools/virtio/vringh_test.c 		sg_set_buf(&guest_sg[3], d + 3, sizeof(*d)*3);
d                 686 tools/virtio/vringh_test.c 		assert(vring.desc[0].addr == (unsigned long)d);
d                 687 tools/virtio/vringh_test.c 		assert(vring.desc[1].addr == (unsigned long)(d+2));
d                 689 tools/virtio/vringh_test.c 		assert(vring.desc[3].addr == (unsigned long)(d+3));
d                 695 tools/virtio/vringh_test.c 		d[0].addr = (unsigned long)data;
d                 696 tools/virtio/vringh_test.c 		d[0].len = 1;
d                 697 tools/virtio/vringh_test.c 		d[0].flags = VRING_DESC_F_NEXT;
d                 698 tools/virtio/vringh_test.c 		d[0].next = 1;
d                 699 tools/virtio/vringh_test.c 		d[1].addr = (unsigned long)data + 1;
d                 700 tools/virtio/vringh_test.c 		d[1].len = 2;
d                 701 tools/virtio/vringh_test.c 		d[1].flags = 0;
d                 704 tools/virtio/vringh_test.c 		d[2].addr = (unsigned long)data + 3;
d                 705 tools/virtio/vringh_test.c 		d[2].len = 3;
d                 706 tools/virtio/vringh_test.c 		d[2].flags = 0;
d                 709 tools/virtio/vringh_test.c 		d[3].addr = (unsigned long)data + 10;
d                 710 tools/virtio/vringh_test.c 		d[3].len = 5;
d                 711 tools/virtio/vringh_test.c 		d[3].flags = VRING_DESC_F_NEXT;
d                 712 tools/virtio/vringh_test.c 		d[3].next = 1;
d                 713 tools/virtio/vringh_test.c 		d[4].addr = (unsigned long)data + 15;
d                 714 tools/virtio/vringh_test.c 		d[4].len = 6;
d                 715 tools/virtio/vringh_test.c 		d[4].flags = VRING_DESC_F_NEXT;
d                 716 tools/virtio/vringh_test.c 		d[4].next = 2;
d                 717 tools/virtio/vringh_test.c 		d[5].addr = (unsigned long)data + 21;
d                 718 tools/virtio/vringh_test.c 		d[5].len = 7;
d                 719 tools/virtio/vringh_test.c 		d[5].flags = 0;
d                 703 virt/kvm/arm/vgic/vgic-mmio-v3.c 	struct vgic_dist *d = &kvm->arch.vgic;
d                 705 virt/kvm/arm/vgic/vgic-mmio-v3.c 	struct list_head *rd_regions = &d->rd_regions;
d                 736 virt/kvm/arm/vgic/vgic-mmio-v3.c 	if (!count && !IS_VGIC_ADDR_UNDEF(d->vgic_dist_base) &&
d                 422 virt/kvm/arm/vgic/vgic-v3.c 	struct vgic_dist *d = &kvm->arch.vgic;
d                 425 virt/kvm/arm/vgic/vgic-v3.c 	list_for_each_entry(rdreg, &d->rd_regions, list) {
d                 439 virt/kvm/arm/vgic/vgic-v3.c 	struct vgic_dist *d = &kvm->arch.vgic;
d                 442 virt/kvm/arm/vgic/vgic-v3.c 	if (!IS_VGIC_ADDR_UNDEF(d->vgic_dist_base) &&
d                 443 virt/kvm/arm/vgic/vgic-v3.c 	    d->vgic_dist_base + KVM_VGIC_V3_DIST_SIZE < d->vgic_dist_base)
d                 446 virt/kvm/arm/vgic/vgic-v3.c 	list_for_each_entry(rdreg, &d->rd_regions, list) {
d                 452 virt/kvm/arm/vgic/vgic-v3.c 	if (IS_VGIC_ADDR_UNDEF(d->vgic_dist_base))
d                 455 virt/kvm/arm/vgic/vgic-v3.c 	return !vgic_v3_rdist_overlap(kvm, d->vgic_dist_base,
d                 301 virt/kvm/arm/vgic/vgic.h 	struct vgic_dist *d = &kvm->arch.vgic;
d                 303 virt/kvm/arm/vgic/vgic.h 	return (base + size > d->vgic_dist_base) &&
d                 304 virt/kvm/arm/vgic/vgic.h 		(base < d->vgic_dist_base + KVM_VGIC_V3_DIST_SIZE);