rx 13 arch/csky/abiv1/alignment.c static inline uint32_t get_ptreg(struct pt_regs *regs, uint32_t rx) rx 15 arch/csky/abiv1/alignment.c return rx == 15 ? regs->lr : *((uint32_t *)&(regs->a0) - 2 + rx); rx 18 arch/csky/abiv1/alignment.c static inline void put_ptreg(struct pt_regs *regs, uint32_t rx, uint32_t val) rx 20 arch/csky/abiv1/alignment.c if (rx == 15) rx 23 arch/csky/abiv1/alignment.c *((uint32_t *)&(regs->a0) - 2 + rx) = val; rx 217 arch/csky/abiv1/alignment.c uint32_t rx = 0; rx 253 arch/csky/abiv1/alignment.c rx = opcode & 0xf; rx 258 arch/csky/abiv1/alignment.c if (rx == 0 || rx == 1 || rz == 0 || rz == 1) rx 263 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 1); rx 267 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 2); rx 271 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 1); rx 275 arch/csky/abiv1/alignment.c addr = get_ptreg(regs, rx) + (imm << 2); rx 294 arch/csky/abiv1/alignment.c __func__, opcode, rz, rx, imm, addr); rx 131 arch/csky/abiv1/inc/abi/entry.h .macro RD_MIR rx rx 135 arch/csky/abiv1/inc/abi/entry.h .macro RD_MEH rx rx 139 arch/csky/abiv1/inc/abi/entry.h .macro RD_MCIR rx rx 143 arch/csky/abiv1/inc/abi/entry.h .macro RD_PGDR rx rx 147 arch/csky/abiv1/inc/abi/entry.h .macro WR_MEH rx rx 151 arch/csky/abiv1/inc/abi/entry.h .macro WR_MCIR rx rx 179 arch/csky/abiv1/inc/abi/entry.h .macro ANDI_R3 rx, imm rx 150 arch/csky/abiv2/inc/abi/entry.h .macro RD_MIR rx rx 154 arch/csky/abiv2/inc/abi/entry.h .macro RD_MEH rx rx 158 arch/csky/abiv2/inc/abi/entry.h .macro RD_MCIR rx rx 162 arch/csky/abiv2/inc/abi/entry.h .macro RD_PGDR rx rx 166 arch/csky/abiv2/inc/abi/entry.h .macro RD_PGDR_K rx rx 170 arch/csky/abiv2/inc/abi/entry.h .macro WR_MEH rx rx 174 arch/csky/abiv2/inc/abi/entry.h .macro WR_MCIR rx rx 247 arch/csky/abiv2/inc/abi/entry.h .macro ANDI_R3 rx, imm rx 239 arch/mips/include/asm/ip32/mace.h volatile unsigned long rx; rx 1066 arch/mips/include/asm/octeon/cvmx-agl-defs.h uint64_t rx:2; rx 1068 arch/mips/include/asm/octeon/cvmx-agl-defs.h uint64_t rx:2; rx 1079 arch/mips/include/asm/octeon/cvmx-agl-defs.h uint64_t rx:1; rx 1081 arch/mips/include/asm/octeon/cvmx-agl-defs.h uint64_t rx:1; rx 215 arch/mips/include/asm/processor.h u64 rx[4]; rx 996 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int rx : 3, rx 1029 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int rx : 3, rx 1036 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int rx : 3, rx 378 arch/mips/kernel/branch.c regs->regs[reg16to32[inst.rr.rx]]; rx 2067 arch/mips/kernel/unaligned.c reg = reg16to32[mips16inst.ri.rx]; rx 2082 arch/mips/kernel/unaligned.c reg = reg16to32[mips16inst.ri.rx]; rx 2086 arch/mips/kernel/unaligned.c reg = reg16to32[mips16inst.ri.rx]; rx 55 arch/mips/netlogic/xlp/cop2-ex.c : "r"(r->tx), "r"(r->rx)); rx 85 arch/mips/netlogic/xlp/cop2-ex.c : : "m"(*r), "r"(r->tx), "r"(r->rx)); rx 88 arch/s390/boot/mem_detect.c : [rx] "d" (_rx1), "d" (_rx2) rx 39 arch/s390/boot/startup.c int _diag14_dma(unsigned long rx, unsigned long ry1, unsigned long subcode); rx 65 arch/s390/include/asm/diag.h extern int diag14(unsigned long rx, unsigned long ry1, unsigned long subcode); rx 317 arch/s390/include/asm/diag.h int (*diag14)(unsigned long rx, unsigned long ry1, unsigned long subcode); rx 146 arch/s390/kernel/diag.c int diag14(unsigned long rx, unsigned long ry1, unsigned long subcode) rx 149 arch/s390/kernel/diag.c return diag_dma_ops.diag14(rx, ry1, subcode); rx 271 arch/s390/kernel/uprobes.c union split_register *rx; rx 278 arch/s390/kernel/uprobes.c rx = (union split_register *) ®s->gprs[insn->reg]; rx 286 arch/s390/kernel/uprobes.c rx->u64 = (unsigned long)uptr; rx 293 arch/s390/kernel/uprobes.c rc = emu_load_ril((u16 __user *)uptr, &rx->u32[1]); rx 296 arch/s390/kernel/uprobes.c rc = emu_load_ril((s16 __user *)uptr, &rx->u64); rx 299 arch/s390/kernel/uprobes.c rc = emu_load_ril((s16 __user *)uptr, &rx->u32[1]); rx 302 arch/s390/kernel/uprobes.c rc = emu_load_ril((u16 __user *)uptr, &rx->u64); rx 305 arch/s390/kernel/uprobes.c rc = emu_load_ril((u64 __user *)uptr, &rx->u64); rx 308 arch/s390/kernel/uprobes.c rc = emu_load_ril((s32 __user *)uptr, &rx->u64); rx 311 arch/s390/kernel/uprobes.c rc = emu_load_ril((u32 __user *)uptr, &rx->u32[1]); rx 314 arch/s390/kernel/uprobes.c rc = emu_load_ril((u32 __user *)uptr, &rx->u64); rx 317 arch/s390/kernel/uprobes.c rc = emu_store_ril(regs, (u16 __user *)uptr, &rx->u16[3]); rx 320 arch/s390/kernel/uprobes.c rc = emu_store_ril(regs, (u64 __user *)uptr, &rx->u64); rx 323 arch/s390/kernel/uprobes.c rc = emu_store_ril(regs, (u32 __user *)uptr, &rx->u32[1]); rx 334 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (s16 __user *)uptr, &rx->s64); rx 337 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (s16 __user *)uptr, &rx->s32[1]); rx 340 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (u16 __user *)uptr, &rx->u64); rx 343 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (u16 __user *)uptr, &rx->u32[1]); rx 346 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (s64 __user *)uptr, &rx->s64); rx 349 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (u64 __user *)uptr, &rx->u64); rx 352 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (s32 __user *)uptr, &rx->s64); rx 355 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (s32 __user *)uptr, &rx->s32[1]); rx 358 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (u32 __user *)uptr, &rx->u64); rx 361 arch/s390/kernel/uprobes.c rc = emu_cmp_ril(regs, (u32 __user *)uptr, &rx->u32[1]); rx 73 arch/s390/kvm/diag.c u16 rx = (vcpu->arch.sie_block->ipa & 0xf0) >> 4; rx 77 arch/s390/kvm/diag.c vcpu->run->s.regs.gprs[rx]); rx 79 arch/s390/kvm/diag.c if (vcpu->run->s.regs.gprs[rx] & 7) rx 81 arch/s390/kvm/diag.c rc = read_guest(vcpu, vcpu->run->s.regs.gprs[rx], rx, &parm, sizeof(parm)); rx 136 arch/s390/mm/extmem.c unsigned long rx, ry; rx 139 arch/s390/mm/extmem.c rx = (unsigned long) parameter; rx 147 arch/s390/mm/extmem.c : "+d" (rx), "+d" (ry), "=d" (rc) : : "cc"); rx 148 arch/s390/mm/extmem.c *ret1 = rx; rx 1148 crypto/ecc.c u64 rx[2][ECC_MAX_DIGITS]; rx 1162 crypto/ecc.c vli_set(rx[1], point->x, ndigits); rx 1165 crypto/ecc.c xycz_initial_double(rx[1], ry[1], rx[0], ry[0], initial_z, curve_prime, rx 1170 crypto/ecc.c xycz_add_c(rx[1 - nb], ry[1 - nb], rx[nb], ry[nb], curve_prime, rx 1172 crypto/ecc.c xycz_add(rx[nb], ry[nb], rx[1 - nb], ry[1 - nb], curve_prime, rx 1177 crypto/ecc.c xycz_add_c(rx[1 - nb], ry[1 - nb], rx[nb], ry[nb], curve_prime, rx 1182 crypto/ecc.c vli_mod_sub(z, rx[1], rx[0], curve_prime, ndigits); rx 1194 crypto/ecc.c vli_mod_mult_fast(z, z, rx[1 - nb], curve_prime, ndigits); rx 1197 crypto/ecc.c xycz_add(rx[nb], ry[nb], rx[1 - nb], ry[1 - nb], curve_prime, ndigits); rx 1199 crypto/ecc.c apply_z(rx[0], ry[0], z, curve_prime, ndigits); rx 1201 crypto/ecc.c vli_set(result->x, rx[0], ndigits); rx 1235 crypto/ecc.c u64 *rx = result->x; rx 1257 crypto/ecc.c vli_set(rx, point->x, ndigits); rx 1263 crypto/ecc.c ecc_point_double_jacobian(rx, ry, z, curve->p, ndigits); rx 1274 crypto/ecc.c vli_mod_sub(tz, rx, tx, curve->p, ndigits); rx 1275 crypto/ecc.c xycz_add(tx, ty, rx, ry, curve->p, ndigits); rx 1280 crypto/ecc.c apply_z(rx, ry, z, curve->p, ndigits); rx 456 drivers/atm/ambassador.c static void rx_complete (amb_dev * dev, rx_out * rx) { rx 457 drivers/atm/ambassador.c struct sk_buff * skb = bus_to_virt (rx->handle); rx 458 drivers/atm/ambassador.c u16 vc = be16_to_cpu (rx->vc); rx 460 drivers/atm/ambassador.c u16 status = be16_to_cpu (rx->status); rx 461 drivers/atm/ambassador.c u16 rx_len = be16_to_cpu (rx->length); rx 463 drivers/atm/ambassador.c PRINTD (DBG_FLOW|DBG_RX, "rx_complete %p %p (len=%hu)", dev, rx, rx_len); rx 468 drivers/atm/ambassador.c dev->stats.rx.ok++; rx 483 drivers/atm/ambassador.c atomic_inc(&atm_vcc->stats->rx); rx 507 drivers/atm/ambassador.c dev->stats.rx.error++; rx 509 drivers/atm/ambassador.c dev->stats.rx.badcrc++; rx 511 drivers/atm/ambassador.c dev->stats.rx.toolong++; rx 513 drivers/atm/ambassador.c dev->stats.rx.aborted++; rx 515 drivers/atm/ambassador.c dev->stats.rx.unused++; rx 672 drivers/atm/ambassador.c static int rx_give (amb_dev * dev, rx_in * rx, unsigned char pool) { rx 683 drivers/atm/ambassador.c *rxq->in.ptr = *rx; rx 768 drivers/atm/ambassador.c rx_in rx; rx 791 drivers/atm/ambassador.c rx.handle = virt_to_bus (skb); rx 792 drivers/atm/ambassador.c rx.host_address = cpu_to_be32 (virt_to_bus (skb->data)); rx 793 drivers/atm/ambassador.c if (rx_give (dev, &rx, pool)) rx 1377 drivers/atm/ambassador.c rx_in rx; rx 1387 drivers/atm/ambassador.c rx.handle = virt_to_bus (skb); rx 1388 drivers/atm/ambassador.c rx.host_address = cpu_to_be32 (virt_to_bus (skb->data)); rx 1394 drivers/atm/ambassador.c if (!rx_give (dev, &rx, pool)) { rx 1423 drivers/atm/ambassador.c s->tx_ok, s->rx.ok, s->rx.error, rx 1424 drivers/atm/ambassador.c s->rx.badcrc, s->rx.toolong, rx 1425 drivers/atm/ambassador.c s->rx.aborted, s->rx.unused); rx 591 drivers/atm/ambassador.h } rx; rx 230 drivers/atm/atmtcp.c atomic_inc(&out_vcc->stats->rx); rx 317 drivers/atm/atmtcp.c atomic_inc(&out_vcc->stats->rx); rx 194 drivers/atm/eni.c if (eni_dev->rx_map[i] && ENI_VCC(eni_dev->rx_map[i])->rx) rx 634 drivers/atm/eni.c if (ENI_VCC(vcc)->rx(vcc)) return 1; rx 651 drivers/atm/eni.c if (ENI_VCC(vcc)->rx(vcc)) return 1; rx 775 drivers/atm/eni.c atomic_inc(&vcc->stats->rx); rx 790 drivers/atm/eni.c eni_vcc->rx = NULL; rx 800 drivers/atm/eni.c eni_vcc->rx = vcc->qos.aal == ATM_AAL5 ? rx_aal5 : rx_aal0; rx 821 drivers/atm/eni.c if (!eni_vcc->rx) return 0; rx 849 drivers/atm/eni.c if (!eni_vcc->rx) return; rx 901 drivers/atm/eni.c eni_vcc->rx = NULL; rx 2015 drivers/atm/eni.c if ((mult.tx && mult.tx <= 100) || (mult.rx &&mult.rx <= 100) || rx 2016 drivers/atm/eni.c mult.tx > 65536 || mult.rx > 65536) rx 2019 drivers/atm/eni.c if (mult.rx) eni_dev->rx_mult = mult.rx; rx 2189 drivers/atm/eni.c if (eni_vcc->rx) { rx 54 drivers/atm/eni.h int (*rx)(struct atm_vcc *vcc); /* RX function, NULL if none */ rx 808 drivers/atm/firestream.c atomic_inc(&atm_vcc->stats->rx); rx 1047 drivers/atm/fore200e.c atomic_inc(&vcc->stats->rx); rx 1772 drivers/atm/he.c atomic_inc(&vcc->stats->rx); rx 1007 drivers/atm/horizon.c atomic_inc(&vcc->stats->rx); rx 1098 drivers/atm/idt77252.c atomic_inc(&vcc->stats->rx); rx 1170 drivers/atm/idt77252.c atomic_inc(&vcc->stats->rx); rx 1192 drivers/atm/idt77252.c atomic_inc(&vcc->stats->rx); rx 1328 drivers/atm/idt77252.c atomic_inc(&vcc->stats->rx); rx 1351 drivers/atm/iphase.c atomic_inc(&vcc->stats->rx); rx 234 drivers/atm/lanai.c } rx; rx 696 drivers/atm/lanai.c if (lvcc->rx.atmvcc->qos.aal == ATM_AAL5) { rx 697 drivers/atm/lanai.c dma_addr_t dmaaddr = lvcc->rx.buf.dmaaddr; rx 705 drivers/atm/lanai.c RXADDR1_SET_SIZE(lanai_buf_size_cardorder(&lvcc->rx.buf))| rx 1367 drivers/atm/lanai.c int m = ((const unsigned char *) lvcc->rx.buf.ptr) + n - rx 1368 drivers/atm/lanai.c ((const unsigned char *) (lvcc->rx.buf.end)); rx 1371 drivers/atm/lanai.c memcpy(dest, lvcc->rx.buf.ptr, n - m); rx 1372 drivers/atm/lanai.c memcpy(dest + n - m, lvcc->rx.buf.start, m); rx 1383 drivers/atm/lanai.c u32 *end = &lvcc->rx.buf.start[endptr * 4]; rx 1384 drivers/atm/lanai.c int n = ((unsigned long) end) - ((unsigned long) lvcc->rx.buf.ptr); rx 1386 drivers/atm/lanai.c n += lanai_buf_size(&lvcc->rx.buf); rx 1387 drivers/atm/lanai.c APRINTK(n >= 0 && n < lanai_buf_size(&lvcc->rx.buf) && !(n & 15), rx 1389 drivers/atm/lanai.c n, lanai_buf_size(&lvcc->rx.buf)); rx 1391 drivers/atm/lanai.c if ((x = &end[-2]) < lvcc->rx.buf.start) rx 1392 drivers/atm/lanai.c x = &lvcc->rx.buf.end[-2]; rx 1403 drivers/atm/lanai.c lvcc->rx.atmvcc->dev->number, lvcc->vci, size, n); rx 1407 drivers/atm/lanai.c skb = atm_alloc_charge(lvcc->rx.atmvcc, size, GFP_ATOMIC); rx 1414 drivers/atm/lanai.c ATM_SKB(skb)->vcc = lvcc->rx.atmvcc; rx 1416 drivers/atm/lanai.c lvcc->rx.atmvcc->push(lvcc->rx.atmvcc, skb); rx 1417 drivers/atm/lanai.c atomic_inc(&lvcc->rx.atmvcc->stats->rx); rx 1419 drivers/atm/lanai.c lvcc->rx.buf.ptr = end; rx 1502 drivers/atm/lanai.c return lanai_get_sized_buffer(lanai, &lvcc->rx.buf, rx 1646 drivers/atm/lanai.c if (unlikely(lvcc->rx.atmvcc == NULL)) { rx 1653 drivers/atm/lanai.c if (unlikely(lvcc->rx.atmvcc->qos.aal != ATM_AAL5)) { rx 1658 drivers/atm/lanai.c atomic_inc(&lvcc->rx.atmvcc->stats->rx_err); rx 1670 drivers/atm/lanai.c atomic_inc(&lvcc->rx.atmvcc->stats->rx_err); rx 1673 drivers/atm/lanai.c (((unsigned long) lvcc->rx.buf.ptr) - rx 1674 drivers/atm/lanai.c ((unsigned long) lvcc->rx.buf.start)) + 47; rx 1676 drivers/atm/lanai.c bytes += lanai_buf_size(&lvcc->rx.buf); rx 1682 drivers/atm/lanai.c atomic_inc(&lvcc->rx.atmvcc->stats->rx_err); rx 1690 drivers/atm/lanai.c atomic_inc(&lvcc->rx.atmvcc->stats->rx_err); rx 1692 drivers/atm/lanai.c lvcc->rx.buf.ptr = &lvcc->rx.buf.start[SERVICE_GET_END(s) * 4]; rx 1996 drivers/atm/lanai.c lvcc->rx.atmvcc != NULL && lvcc->rx.atmvcc != atmvcc) rx 2008 drivers/atm/lanai.c if (vci0 != NULL && vci0->rx.atmvcc != NULL) rx 2279 drivers/atm/lanai.c if (lvcc->rx.atmvcc == atmvcc) { rx 2285 drivers/atm/lanai.c lanai_buf_deallocate(&lvcc->rx.buf, lanai->pci); rx 2286 drivers/atm/lanai.c lvcc->rx.atmvcc = NULL; rx 2336 drivers/atm/lanai.c APRINTK(lvcc->rx.atmvcc == NULL, "rx.atmvcc!=NULL, vci=%d\n", rx 2346 drivers/atm/lanai.c lvcc->rx.atmvcc = atmvcc; rx 2374 drivers/atm/lanai.c if (atmvcc == lvcc->rx.atmvcc) rx 2506 drivers/atm/lanai.c if (lvcc->rx.atmvcc != NULL) { rx 2508 drivers/atm/lanai.c lvcc->rx.atmvcc->qos.aal == ATM_AAL5 ? 5 : 0); rx 2509 drivers/atm/lanai.c if (lvcc->rx.atmvcc->qos.aal == ATM_AAL5) rx 2513 drivers/atm/lanai.c lanai_buf_size(&lvcc->rx.buf), rx 1259 drivers/atm/nicstar.c if (vcc->qos.rxtp.traffic_class != ATM_NONE && vc->rx) rx 1386 drivers/atm/nicstar.c vc->rx = 1; rx 1437 drivers/atm/nicstar.c vc->rx = 0; rx 2008 drivers/atm/nicstar.c if (!vc->rx) { rx 2051 drivers/atm/nicstar.c atomic_inc(&vcc->stats->rx); rx 2169 drivers/atm/nicstar.c atomic_inc(&vcc->stats->rx); rx 2187 drivers/atm/nicstar.c atomic_inc(&vcc->stats->rx); rx 2206 drivers/atm/nicstar.c atomic_inc(&vcc->stats->rx); rx 2312 drivers/atm/nicstar.c atomic_inc(&vcc->stats->rx); rx 699 drivers/atm/nicstar.h volatile unsigned int rx:1; /* RX vc? */ rx 840 drivers/atm/solos-pci.c atomic_inc(&vcc->stats->rx); rx 474 drivers/atm/zatm.c atomic_inc(&vcc->stats->rx); rx 1268 drivers/atm/zatm.c int pools,vccs,rx; rx 1288 drivers/atm/zatm.c for (rx = 1; rx < vccs; rx <<= 1) ld++; rx 1293 drivers/atm/zatm.c curr = rx*RX_SIZE/4; rx 1304 drivers/atm/zatm.c "%ld VCs\n",dev->number,NR_SHAPERS,pools,rx, rx 920 drivers/char/pcmcia/synclink_cs.c icount->rx++; rx 952 drivers/char/pcmcia/synclink_cs.c __FILE__,__LINE__,icount->rx,icount->brk, rx 2206 drivers/char/pcmcia/synclink_cs.c icount->rx = cnow.rx; rx 2583 drivers/char/pcmcia/synclink_cs.c info->icount.tx, info->icount.rx); rx 56 drivers/clk/tegra/clk-bpmp.c } rx; rx 83 drivers/clk/tegra/clk-bpmp.c msg.rx.data = clk->rx.data; rx 84 drivers/clk/tegra/clk-bpmp.c msg.rx.size = clk->rx.size; rx 89 drivers/clk/tegra/clk-bpmp.c else if (msg.rx.ret < 0) rx 133 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 134 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 157 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 158 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 184 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 185 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 210 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 211 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 233 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 234 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 266 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 267 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 316 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 317 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 340 drivers/clk/tegra/clk-bpmp.c msg.rx.data = &response; rx 341 drivers/clk/tegra/clk-bpmp.c msg.rx.size = sizeof(response); rx 689 drivers/dma/timb_dma.c if ((i % 2) == pchan->rx) { rx 705 drivers/dma/timb_dma.c td_chan->direction = pchan->rx ? DMA_DEV_TO_MEM : rx 710 drivers/dma/timb_dma.c (pchan->rx ? 0 : TIMBDMA_INSTANCE_TX_OFFSET); rx 47 drivers/firmware/arm_scmi/base.c attr_info = t->rx.buf; rx 90 drivers/firmware/arm_scmi/base.c memcpy(vendor_id, t->rx.buf, size); rx 121 drivers/firmware/arm_scmi/base.c impl_ver = t->rx.buf; rx 155 drivers/firmware/arm_scmi/base.c num_ret = t->rx.buf; rx 156 drivers/firmware/arm_scmi/base.c list = t->rx.buf + sizeof(*num_ret); rx 211 drivers/firmware/arm_scmi/base.c strlcpy(name, t->rx.buf, SCMI_MAX_STR_SIZE); rx 86 drivers/firmware/arm_scmi/clock.c attr = t->rx.buf; rx 111 drivers/firmware/arm_scmi/clock.c attr = t->rx.buf; rx 142 drivers/firmware/arm_scmi/clock.c rlist = t->rx.buf; rx 211 drivers/firmware/arm_scmi/clock.c *value = get_unaligned_le64(t->rx.buf); rx 95 drivers/firmware/arm_scmi/common.h struct scmi_msg rx; rx 202 drivers/firmware/arm_scmi/driver.c xfer->rx.len = min_t(size_t, xfer->rx.len, ioread32(&mem->length) - 8); rx 205 drivers/firmware/arm_scmi/driver.c memcpy_fromio(xfer->rx.buf, mem->msg_payload + 4, xfer->rx.len); rx 550 drivers/firmware/arm_scmi/driver.c xfer->rx.len = rx_size ? : info->desc->max_msg_size; rx 585 drivers/firmware/arm_scmi/driver.c rev_info = t->rx.buf; rx 702 drivers/firmware/arm_scmi/driver.c xfer->rx.buf = devm_kcalloc(dev, sizeof(u8), desc->max_msg_size, rx 704 drivers/firmware/arm_scmi/driver.c if (!xfer->rx.buf) rx 707 drivers/firmware/arm_scmi/driver.c xfer->tx.buf = xfer->rx.buf; rx 167 drivers/firmware/arm_scmi/perf.c attr = t->rx.buf; rx 199 drivers/firmware/arm_scmi/perf.c attr = t->rx.buf; rx 254 drivers/firmware/arm_scmi/perf.c level_info = t->rx.buf; rx 386 drivers/firmware/arm_scmi/perf.c limits = t->rx.buf; rx 465 drivers/firmware/arm_scmi/perf.c *level = get_unaligned_le32(t->rx.buf); rx 526 drivers/firmware/arm_scmi/perf.c resp = t->rx.buf; rx 71 drivers/firmware/arm_scmi/power.c attr = t->rx.buf; rx 100 drivers/firmware/arm_scmi/power.c attr = t->rx.buf; rx 154 drivers/firmware/arm_scmi/power.c *state = get_unaligned_le32(t->rx.buf); rx 69 drivers/firmware/arm_scmi/reset.c attr = get_unaligned_le32(t->rx.buf); rx 92 drivers/firmware/arm_scmi/reset.c attr = t->rx.buf; rx 90 drivers/firmware/arm_scmi/sensors.c attr = t->rx.buf; rx 119 drivers/firmware/arm_scmi/sensors.c buf = t->rx.buf; rx 239 drivers/firmware/arm_scmi/sensors.c ((__le32 *)t->rx.buf + 1)); rx 244 drivers/firmware/arm_scmi/sensors.c *value = get_unaligned_le64(t->rx.buf); rx 120 drivers/firmware/tegra/bpmp-debugfs.c .rx = { rx 177 drivers/firmware/tegra/bpmp-debugfs.c .rx = { rx 23 drivers/firmware/tegra/bpmp-tegra186.c } tx, rx; rx 125 drivers/firmware/tegra/bpmp-tegra186.c priv->rx.virt + offset, priv->rx.phys + offset, rx 188 drivers/firmware/tegra/bpmp-tegra186.c priv->rx.pool = of_gen_pool_get(bpmp->dev->of_node, "shmem", 1); rx 189 drivers/firmware/tegra/bpmp-tegra186.c if (!priv->rx.pool) { rx 195 drivers/firmware/tegra/bpmp-tegra186.c priv->rx.virt = gen_pool_dma_alloc(priv->rx.pool, 4096, &priv->rx.phys); rx 196 drivers/firmware/tegra/bpmp-tegra186.c if (!priv->rx.virt) { rx 254 drivers/firmware/tegra/bpmp-tegra186.c gen_pool_free(priv->rx.pool, (unsigned long)priv->rx.virt, 4096); rx 274 drivers/firmware/tegra/bpmp-tegra186.c gen_pool_free(priv->rx.pool, (unsigned long)priv->rx.virt, 4096); rx 96 drivers/firmware/tegra/bpmp.c (msg->rx.size <= MSG_DATA_MIN_SZ) && rx 98 drivers/firmware/tegra/bpmp.c (msg->rx.size == 0 || msg->rx.data); rx 349 drivers/firmware/tegra/bpmp.c return __tegra_bpmp_channel_read(channel, msg->rx.data, msg->rx.size, rx 350 drivers/firmware/tegra/bpmp.c &msg->rx.ret); rx 382 drivers/firmware/tegra/bpmp.c return tegra_bpmp_channel_read(channel, msg->rx.data, msg->rx.size, rx 383 drivers/firmware/tegra/bpmp.c &msg->rx.ret); rx 512 drivers/firmware/tegra/bpmp.c .rx = { rx 520 drivers/firmware/tegra/bpmp.c if (ret || msg.rx.ret) rx 560 drivers/firmware/tegra/bpmp.c msg.rx.data = &response; rx 561 drivers/firmware/tegra/bpmp.c msg.rx.size = sizeof(response); rx 624 drivers/firmware/tegra/bpmp.c .rx = { rx 638 drivers/firmware/tegra/bpmp.c if (msg.rx.ret < 0) rx 68 drivers/firmware/tegra/ivc.c } rx; rx 98 drivers/firmware/tegra/ivc.c u32 rx = READ_ONCE(header->rx.count); rx 110 drivers/firmware/tegra/ivc.c if (tx - rx > ivc->num_frames) rx 113 drivers/firmware/tegra/ivc.c return tx == rx; rx 120 drivers/firmware/tegra/ivc.c u32 rx = READ_ONCE(header->rx.count); rx 126 drivers/firmware/tegra/ivc.c return tx - rx >= ivc->num_frames; rx 133 drivers/firmware/tegra/ivc.c u32 rx = READ_ONCE(header->rx.count); rx 141 drivers/firmware/tegra/ivc.c return tx - rx; rx 157 drivers/firmware/tegra/ivc.c WRITE_ONCE(ivc->rx.channel->rx.count, rx 158 drivers/firmware/tegra/ivc.c READ_ONCE(ivc->rx.channel->rx.count) + 1); rx 160 drivers/firmware/tegra/ivc.c if (ivc->rx.position == ivc->num_frames - 1) rx 161 drivers/firmware/tegra/ivc.c ivc->rx.position = 0; rx 163 drivers/firmware/tegra/ivc.c ivc->rx.position++; rx 188 drivers/firmware/tegra/ivc.c if (!tegra_ivc_empty(ivc, ivc->rx.channel)) rx 191 drivers/firmware/tegra/ivc.c tegra_ivc_invalidate(ivc, ivc->rx.phys + offset); rx 193 drivers/firmware/tegra/ivc.c if (tegra_ivc_empty(ivc, ivc->rx.channel)) rx 201 drivers/firmware/tegra/ivc.c unsigned int offset = offsetof(struct tegra_ivc_header, rx.count); rx 284 drivers/firmware/tegra/ivc.c tegra_ivc_invalidate_frame(ivc, ivc->rx.phys, ivc->rx.position, 0, rx 287 drivers/firmware/tegra/ivc.c return tegra_ivc_frame_virt(ivc, ivc->rx.channel, ivc->rx.position); rx 293 drivers/firmware/tegra/ivc.c unsigned int rx = offsetof(struct tegra_ivc_header, rx.count); rx 308 drivers/firmware/tegra/ivc.c tegra_ivc_flush(ivc, ivc->rx.phys + rx); rx 321 drivers/firmware/tegra/ivc.c tegra_ivc_invalidate(ivc, ivc->rx.phys + tx); rx 323 drivers/firmware/tegra/ivc.c if (tegra_ivc_available(ivc, ivc->rx.channel) == ivc->num_frames - 1) rx 347 drivers/firmware/tegra/ivc.c unsigned int rx = offsetof(struct tegra_ivc_header, rx.count); rx 377 drivers/firmware/tegra/ivc.c tegra_ivc_invalidate(ivc, ivc->tx.phys + rx); rx 422 drivers/firmware/tegra/ivc.c tegra_ivc_invalidate(ivc, ivc->rx.phys + offset); rx 423 drivers/firmware/tegra/ivc.c state = READ_ONCE(ivc->rx.channel->tx.state); rx 440 drivers/firmware/tegra/ivc.c ivc->rx.channel->rx.count = 0; rx 443 drivers/firmware/tegra/ivc.c ivc->rx.position = 0; rx 479 drivers/firmware/tegra/ivc.c ivc->rx.channel->rx.count = 0; rx 482 drivers/firmware/tegra/ivc.c ivc->rx.position = 0; rx 561 drivers/firmware/tegra/ivc.c static int tegra_ivc_check_params(unsigned long rx, unsigned long tx, rx 566 drivers/firmware/tegra/ivc.c BUILD_BUG_ON(!IS_ALIGNED(offsetof(struct tegra_ivc_header, rx.count), rx 585 drivers/firmware/tegra/ivc.c if (!IS_ALIGNED(rx, TEGRA_IVC_ALIGN)) { rx 586 drivers/firmware/tegra/ivc.c pr_err("IVC channel start not aligned: %#lx\n", rx); rx 595 drivers/firmware/tegra/ivc.c if (rx < tx) { rx 596 drivers/firmware/tegra/ivc.c if (rx + frame_size * num_frames > tx) { rx 598 drivers/firmware/tegra/ivc.c rx, frame_size * num_frames, tx); rx 602 drivers/firmware/tegra/ivc.c if (tx + frame_size * num_frames > rx) { rx 604 drivers/firmware/tegra/ivc.c tx, frame_size * num_frames, rx); rx 612 drivers/firmware/tegra/ivc.c int tegra_ivc_init(struct tegra_ivc *ivc, struct device *peer, void *rx, rx 631 drivers/firmware/tegra/ivc.c err = tegra_ivc_check_params((unsigned long)rx, (unsigned long)tx, rx 639 drivers/firmware/tegra/ivc.c ivc->rx.phys = dma_map_single(peer, rx, queue_size, rx 641 drivers/firmware/tegra/ivc.c if (dma_mapping_error(peer, ivc->rx.phys)) rx 647 drivers/firmware/tegra/ivc.c dma_unmap_single(peer, ivc->rx.phys, queue_size, rx 652 drivers/firmware/tegra/ivc.c ivc->rx.phys = rx_phys; rx 656 drivers/firmware/tegra/ivc.c ivc->rx.channel = rx; rx 669 drivers/firmware/tegra/ivc.c ivc->rx.position = 0; rx 681 drivers/firmware/tegra/ivc.c dma_unmap_single(ivc->peer, ivc->rx.phys, size, rx 69 drivers/fpga/machxo2-spi.c struct spi_transfer rx, tx; rx 73 drivers/fpga/machxo2-spi.c memset(&rx, 0, sizeof(rx)); rx 77 drivers/fpga/machxo2-spi.c rx.rx_buf = status; rx 78 drivers/fpga/machxo2-spi.c rx.len = 4; rx 81 drivers/fpga/machxo2-spi.c spi_message_add_tail(&rx, &msg); rx 78 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.h #define DDI_POWERGATING_ARG(phyID, lanemask, rx, tx, core) \ rx 81 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.h ((rx) ? DISPLAYPHY_RX_SELECT : 0) | \ rx 298 drivers/gpu/drm/mcde/mcde_dsi.c u8 *rx = msg->rx_buf; rx 310 drivers/gpu/drm/mcde/mcde_dsi.c rx[i] = (rddat >> (i * 8)) & 0xff; rx 1122 drivers/gpu/drm/tegra/dsi.c u8 *rx = msg->rx_buf; rx 1143 drivers/gpu/drm/tegra/dsi.c rx[0] = (value >> 8) & 0xff; rx 1148 drivers/gpu/drm/tegra/dsi.c rx[0] = (value >> 8) & 0xff; rx 1149 drivers/gpu/drm/tegra/dsi.c rx[1] = (value >> 16) & 0xff; rx 1171 drivers/gpu/drm/tegra/dsi.c u8 *rx = msg->rx_buf + j; rx 1176 drivers/gpu/drm/tegra/dsi.c rx[j + k] = (value >> (k << 3)) & 0xff; rx 101 drivers/gpu/drm/tiny/repaper.c const void *tx, void *rx, size_t len) rx 125 drivers/gpu/drm/tiny/repaper.c if (rx) { rx 139 drivers/gpu/drm/tiny/repaper.c if (rx && !ret) rx 140 drivers/gpu/drm/tiny/repaper.c memcpy(rx, rxbuf, len); rx 674 drivers/greybus/connection.c static int _gb_connection_enable(struct gb_connection *connection, bool rx) rx 680 drivers/greybus/connection.c if (!(connection->handler && rx)) rx 703 drivers/greybus/connection.c if (connection->handler && rx) rx 55 drivers/hid/hid-led.c } rx; rx 328 drivers/hid/hid-led.c union delcom_packet dp = { .rx.cmd = 104 }; rx 1046 drivers/hid/hid-wiimote-modules.c __s8 rx, ry, lx, ly, lt, rt; rx 1099 drivers/hid/hid-wiimote-modules.c rx = (ext[0] >> 3) & 0x18; rx 1100 drivers/hid/hid-wiimote-modules.c rx |= (ext[1] >> 5) & 0x06; rx 1101 drivers/hid/hid-wiimote-modules.c rx |= (ext[2] >> 7) & 0x01; rx 1108 drivers/hid/hid-wiimote-modules.c rx <<= 1; rx 1115 drivers/hid/hid-wiimote-modules.c input_report_abs(wdata->extension.input, ABS_HAT2X, rx - 0x20); rx 1606 drivers/hid/hid-wiimote-modules.c __s16 rx, ry, lx, ly; rx 1650 drivers/hid/hid-wiimote-modules.c rx = (ext[2] & 0xff) | ((ext[3] & 0x0f) << 8); rx 1657 drivers/hid/hid-wiimote-modules.c rx -= 0x800; rx 1671 drivers/hid/hid-wiimote-modules.c if (abs(rx) < 500) rx 1672 drivers/hid/hid-wiimote-modules.c wdata->state.calib_pro_sticks[2] = -rx; rx 1680 drivers/hid/hid-wiimote-modules.c rx += wdata->state.calib_pro_sticks[2]; rx 1685 drivers/hid/hid-wiimote-modules.c input_report_abs(wdata->extension.input, ABS_RX, rx); rx 39 drivers/hwmon/ad7314.c u16 rx ____cacheline_aligned; rx 46 drivers/hwmon/ad7314.c ret = spi_read(chip->spi_dev, (u8 *)&chip->rx, sizeof(chip->rx)); rx 52 drivers/hwmon/ad7314.c return be16_to_cpu(chip->rx); rx 163 drivers/hwmon/sht21.c u8 rx[8]; rx 176 drivers/hwmon/sht21.c .buf = rx, rx 186 drivers/hwmon/sht21.c eic[2] = rx[0]; rx 187 drivers/hwmon/sht21.c eic[3] = rx[2]; rx 188 drivers/hwmon/sht21.c eic[4] = rx[4]; rx 189 drivers/hwmon/sht21.c eic[5] = rx[6]; rx 197 drivers/hwmon/sht21.c eic[0] = rx[3]; rx 198 drivers/hwmon/sht21.c eic[1] = rx[4]; rx 199 drivers/hwmon/sht21.c eic[6] = rx[0]; rx 200 drivers/hwmon/sht21.c eic[7] = rx[1]; rx 179 drivers/i2c/busses/i2c-cpm.c struct i2c_msg *pmsg, int num, int tx, int rx) rx 189 drivers/i2c/busses/i2c-cpm.c rbdf = cpm->rbase + rx; rx 194 drivers/i2c/busses/i2c-cpm.c rb = cpm->rxbuf[rx]; rx 222 drivers/i2c/busses/i2c-cpm.c if (rx + 1 == CPM_MAXBD) rx 238 drivers/i2c/busses/i2c-cpm.c struct i2c_msg *pmsg, int tx, int rx) rx 247 drivers/i2c/busses/i2c-cpm.c rbdf = cpm->rbase + rx; rx 250 drivers/i2c/busses/i2c-cpm.c rb = cpm->rxbuf[rx]; rx 115 drivers/i2c/busses/i2c-dln2.c } __packed *rx = dln2->buf; rx 116 drivers/i2c/busses/i2c-dln2.c unsigned rx_len = sizeof(*rx); rx 118 drivers/i2c/busses/i2c-dln2.c BUILD_BUG_ON(sizeof(*rx) > DLN2_I2C_BUF_SIZE); rx 127 drivers/i2c/busses/i2c-dln2.c rx, &rx_len); rx 130 drivers/i2c/busses/i2c-dln2.c if (rx_len < sizeof(rx->buf_len) + data_len) rx 132 drivers/i2c/busses/i2c-dln2.c if (le16_to_cpu(rx->buf_len) != data_len) rx 135 drivers/i2c/busses/i2c-dln2.c memcpy(data, rx->buf, data_len); rx 212 drivers/i2c/busses/i2c-tegra-bpmp.c msg.rx.data = response; rx 213 drivers/i2c/busses/i2c-tegra-bpmp.c msg.rx.size = sizeof(*response); rx 170 drivers/iio/accel/sca3000.c u8 rx[384] ____cacheline_aligned; rx 294 drivers/iio/accel/sca3000.c .rx_buf = st->rx, rx 316 drivers/iio/accel/sca3000.c return !(st->rx[0] & SCA3000_LOCKED); rx 418 drivers/iio/accel/sca3000.c return st->rx[0]; rx 440 drivers/iio/accel/sca3000.c st->rx[0] & SCA3000_REG_REVID_MAJOR_MASK, rx 441 drivers/iio/accel/sca3000.c st->rx[0] & SCA3000_REG_REVID_MINOR_MASK); rx 574 drivers/iio/accel/sca3000.c switch (SCA3000_REG_MODE_MODE_MASK & st->rx[0]) { rx 666 drivers/iio/accel/sca3000.c st->rx[0] &= SCA3000_REG_MODE_MODE_MASK; rx 667 drivers/iio/accel/sca3000.c switch (st->rx[0]) { rx 703 drivers/iio/accel/sca3000.c st->rx[0] &= ~SCA3000_REG_MODE_MODE_MASK; rx 704 drivers/iio/accel/sca3000.c st->rx[0] |= (mode & SCA3000_REG_MODE_MODE_MASK); rx 706 drivers/iio/accel/sca3000.c return sca3000_write_reg(st, SCA3000_REG_MODE_ADDR, st->rx[0]); rx 733 drivers/iio/accel/sca3000.c *val = (be16_to_cpup((__be16 *)st->rx) >> 3) & 0x1FFF; rx 745 drivers/iio/accel/sca3000.c *val = ((st->rx[0] & 0x3F) << 3) | rx 746 drivers/iio/accel/sca3000.c ((st->rx[1] & 0xE0) >> 5); rx 826 drivers/iio/accel/sca3000.c val = st->rx[0]; rx 966 drivers/iio/accel/sca3000.c u8 *rx, rx 976 drivers/iio/accel/sca3000.c .rx_buf = rx, rx 1007 drivers/iio/accel/sca3000.c num_available = st->rx[0]; rx 1012 drivers/iio/accel/sca3000.c ret = sca3000_read_data(st, SCA3000_REG_RING_OUT_ADDR, st->rx, rx 1024 drivers/iio/accel/sca3000.c iio_push_to_buffers(indio_dev, st->rx + i * 3 * 2); rx 1056 drivers/iio/accel/sca3000.c val = st->rx[0]; rx 1122 drivers/iio/accel/sca3000.c ret = !!(st->rx[0] & SCA3000_REG_MODE_FREE_FALL_DETECT); rx 1131 drivers/iio/accel/sca3000.c if ((st->rx[0] & SCA3000_REG_MODE_MODE_MASK) rx 1164 drivers/iio/accel/sca3000.c if (state && !(st->rx[0] & SCA3000_REG_MODE_FREE_FALL_DETECT)) rx 1166 drivers/iio/accel/sca3000.c st->rx[0] | SCA3000_REG_MODE_FREE_FALL_DETECT); rx 1168 drivers/iio/accel/sca3000.c else if (!state && (st->rx[0] & SCA3000_REG_MODE_FREE_FALL_DETECT)) rx 1170 drivers/iio/accel/sca3000.c st->rx[0] & ~SCA3000_REG_MODE_FREE_FALL_DETECT); rx 1214 drivers/iio/accel/sca3000.c ((st->rx[0] & SCA3000_REG_MODE_MODE_MASK) rx 1217 drivers/iio/accel/sca3000.c (st->rx[0] & ~SCA3000_REG_MODE_MODE_MASK) rx 1221 drivers/iio/accel/sca3000.c ((st->rx[0] & SCA3000_REG_MODE_MODE_MASK) rx 1224 drivers/iio/accel/sca3000.c st->rx[0] & SCA3000_REG_MODE_MODE_MASK); rx 1303 drivers/iio/accel/sca3000.c (st->rx[0] | SCA3000_REG_MODE_RING_BUF_ENABLE)); rx 1307 drivers/iio/accel/sca3000.c (st->rx[0] & ~SCA3000_REG_MODE_RING_BUF_ENABLE)); rx 1336 drivers/iio/accel/sca3000.c st->rx[0] | SCA3000_REG_INT_MASK_RING_HALF); rx 1367 drivers/iio/accel/sca3000.c st->rx[0] & ~SCA3000_REG_INT_MASK_RING_HALF); rx 1436 drivers/iio/accel/sca3000.c (st->rx[0] & SCA3000_MODE_PROT_MASK)); rx 1524 drivers/iio/accel/sca3000.c (st->rx[0] & rx 792 drivers/iio/adc/max1363.c u8 rx; rx 796 drivers/iio/adc/max1363.c st->recv(st->client, &rx, 1); rx 797 drivers/iio/adc/max1363.c mask = rx; rx 77 drivers/iio/gyro/adxrs450.c __be32 rx; rx 102 drivers/iio/gyro/adxrs450.c .rx_buf = &st->rx, rx 104 drivers/iio/gyro/adxrs450.c .len = sizeof(st->rx), rx 122 drivers/iio/gyro/adxrs450.c *val = (be32_to_cpu(st->rx) >> 5) & 0xFFFF; rx 176 drivers/iio/gyro/adxrs450.c .rx_buf = &st->rx, rx 178 drivers/iio/gyro/adxrs450.c .len = sizeof(st->rx), rx 191 drivers/iio/gyro/adxrs450.c *val = (be32_to_cpu(st->rx) >> 10) & 0xFFFF; rx 211 drivers/iio/gyro/adxrs450.c .rx_buf = &st->rx, rx 227 drivers/iio/gyro/adxrs450.c *val = be32_to_cpu(st->rx); rx 215 drivers/iio/health/afe4403.c u8 rx[3]; rx 223 drivers/iio/health/afe4403.c ret = spi_write_then_read(afe->spi, ®, 1, rx, 3); rx 227 drivers/iio/health/afe4403.c *val = (rx[0] << 16) | rx 228 drivers/iio/health/afe4403.c (rx[1] << 8) | rx 229 drivers/iio/health/afe4403.c (rx[2]); rx 314 drivers/iio/health/afe4403.c u8 rx[3]; rx 325 drivers/iio/health/afe4403.c rx, 3); rx 329 drivers/iio/health/afe4403.c buffer[i++] = (rx[0] << 16) | rx 330 drivers/iio/health/afe4403.c (rx[1] << 8) | rx 331 drivers/iio/health/afe4403.c (rx[2]); rx 154 drivers/iio/imu/adis.c .rx_buf = adis->rx, rx 162 drivers/iio/imu/adis.c .rx_buf = adis->rx + 2, rx 206 drivers/iio/imu/adis.c *val = get_unaligned_be32(adis->rx); rx 209 drivers/iio/imu/adis.c *val = get_unaligned_be16(adis->rx + 2); rx 69 drivers/iio/imu/adis_buffer.c __be16 *tx, *rx; rx 90 drivers/iio/imu/adis_buffer.c rx = adis->buffer; rx 91 drivers/iio/imu/adis_buffer.c tx = rx + scan_count; rx 104 drivers/iio/imu/adis_buffer.c adis->xfer[j].rx_buf = &rx[j - 1]; rx 20 drivers/iio/pressure/mpl115_spi.c u8 rx[4]; rx 43 drivers/iio/pressure/mpl115_spi.c .rx_buf = buf->rx, rx 55 drivers/iio/pressure/mpl115_spi.c return (buf->rx[1] << 8) | buf->rx[3]; rx 44 drivers/iio/resolver/ad2s1200.c __be16 rx ____cacheline_aligned; rx 82 drivers/iio/resolver/ad2s1200.c ret = spi_read(st->sdev, &st->rx, 2); rx 90 drivers/iio/resolver/ad2s1200.c *val = be16_to_cpup(&st->rx) >> 4; rx 93 drivers/iio/resolver/ad2s1200.c *val = sign_extend32(be16_to_cpup(&st->rx) >> 4, 11); rx 27 drivers/iio/resolver/ad2s90.c u8 rx[2] ____cacheline_aligned; rx 50 drivers/iio/resolver/ad2s90.c ret = spi_read(st->sdev, st->rx, 2); rx 55 drivers/iio/resolver/ad2s90.c *val = (((u16)(st->rx[0])) << 4) | ((st->rx[1] & 0xF0) >> 4); rx 7333 drivers/infiniband/hw/hfi1/chip.c u16 tx, rx; rx 7347 drivers/infiniband/hw/hfi1/chip.c rx = nibble_to_count(enable_lane_rx); rx 7374 drivers/infiniband/hw/hfi1/chip.c enable_lane_tx, tx, enable_lane_rx, rx); rx 7376 drivers/infiniband/hw/hfi1/chip.c *rx_width = link_width_to_bits(dd, rx); rx 7395 drivers/infiniband/hw/hfi1/chip.c u16 widths, tx, rx; rx 7401 drivers/infiniband/hw/hfi1/chip.c rx = (widths >> 8) & 0xf; rx 7404 drivers/infiniband/hw/hfi1/chip.c *rx_width = link_width_to_bits(dd, rx); rx 7625 drivers/infiniband/hw/hfi1/chip.c u16 tx, rx; rx 7651 drivers/infiniband/hw/hfi1/chip.c get_link_widths(ppd->dd, &tx, &rx); rx 7653 drivers/infiniband/hw/hfi1/chip.c ppd->link_width_downgrade_rx_active = rx; rx 82 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c has_data = pvrdma_idx_ring_has_data(&cq->ring_state->rx, rx 283 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c has_data = pvrdma_idx_ring_has_data(&cq->ring_state->rx, rx 288 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c int tail = pvrdma_idx(&cq->ring_state->rx.prod_tail, rx 310 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c &cq->ring_state->rx.cons_head, rx 328 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c has_data = pvrdma_idx_ring_has_data(&cq->ring_state->rx, rx 369 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c pvrdma_idx_ring_inc(&cq->ring_state->rx.cons_head, cq->ibcq.cqe); rx 425 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c struct pvrdma_ring *ring = &dev->async_ring_state->rx; rx 501 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c struct pvrdma_ring *ring = &dev->cq_ring_state->rx; rx 60 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h struct pvrdma_ring rx; /* Rx ring. */ rx 491 drivers/infiniband/sw/siw/siw.h #define rx_qp(rx) container_of(rx, struct siw_qp, rx_stream) rx 117 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct ipoib_cm_rx *rx, rx 127 drivers/infiniband/ulp/ipoib/ipoib_cm.c sge[i].addr = rx->rx_ring[id].mapping[i]; rx 129 drivers/infiniband/ulp/ipoib/ipoib_cm.c ret = ib_post_recv(rx->qp, wr, NULL); rx 133 drivers/infiniband/ulp/ipoib/ipoib_cm.c rx->rx_ring[id].mapping); rx 134 drivers/infiniband/ulp/ipoib/ipoib_cm.c dev_kfree_skb_any(rx->rx_ring[id].skb); rx 135 drivers/infiniband/ulp/ipoib/ipoib_cm.c rx->rx_ring[id].skb = NULL; rx 348 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct ipoib_cm_rx *rx) rx 358 drivers/infiniband/ulp/ipoib/ipoib_cm.c rx->rx_ring = vzalloc(array_size(ipoib_recvq_size, rx 359 drivers/infiniband/ulp/ipoib/ipoib_cm.c sizeof(*rx->rx_ring))); rx 360 drivers/infiniband/ulp/ipoib/ipoib_cm.c if (!rx->rx_ring) rx 384 drivers/infiniband/ulp/ipoib/ipoib_cm.c if (!ipoib_cm_alloc_rx_skb(dev, rx->rx_ring, i, IPOIB_CM_RX_SG - 1, rx 385 drivers/infiniband/ulp/ipoib/ipoib_cm.c rx->rx_ring[i].mapping, rx 391 drivers/infiniband/ulp/ipoib/ipoib_cm.c ret = ipoib_cm_post_receive_nonsrq(dev, rx, &t->wr, t->sge, i); rx 400 drivers/infiniband/ulp/ipoib/ipoib_cm.c rx->recv_count = ipoib_recvq_size; rx 415 drivers/infiniband/ulp/ipoib/ipoib_cm.c ipoib_cm_free_rx_ring(dev, rx->rx_ring); rx 907 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct ipoib_cm_rx *rx, *n; rx 914 drivers/infiniband/ulp/ipoib/ipoib_cm.c list_for_each_entry_safe(rx, n, &list, list) { rx 915 drivers/infiniband/ulp/ipoib/ipoib_cm.c ib_destroy_cm_id(rx->id); rx 916 drivers/infiniband/ulp/ipoib/ipoib_cm.c ib_destroy_qp(rx->qp); rx 918 drivers/infiniband/ulp/ipoib/ipoib_cm.c ipoib_cm_free_rx_ring(priv->dev, rx->rx_ring); rx 923 drivers/infiniband/ulp/ipoib/ipoib_cm.c kfree(rx); rx 215 drivers/input/rmi4/rmi_f54.c u8 rx = drv_data->num_rx_electrodes ? : f54->num_rx_electrodes; rx 221 drivers/input/rmi4/rmi_f54.c size = rx * tx; rx 228 drivers/input/rmi4/rmi_f54.c size = sizeof(u16) * rx * tx; rx 402 drivers/input/rmi4/rmi_f54.c u8 rx = drv_data->num_rx_electrodes ? : f54->num_rx_electrodes; rx 418 drivers/input/rmi4/rmi_f54.c f->width = rx; rx 651 drivers/input/rmi4/rmi_f54.c u8 rx, tx; rx 667 drivers/input/rmi4/rmi_f54.c rx = f54->num_rx_electrodes; rx 670 drivers/input/rmi4/rmi_f54.c array3_size(tx, rx, sizeof(u16)), rx 78 drivers/input/serio/maceps2.c byte = port->rx; rx 2254 drivers/input/touchscreen/atmel_mxt_ts.c unsigned int i, rx, ry; rx 2258 drivers/input/touchscreen/atmel_mxt_ts.c rx = data->xy_switch ? y : x; rx 2260 drivers/input/touchscreen/atmel_mxt_ts.c rx = data->invertx ? (data->xsize - 1 - rx) : rx; rx 2263 drivers/input/touchscreen/atmel_mxt_ts.c outbuf[i] = mxt_get_debug_value(data, rx, ry); rx 107 drivers/ipack/devices/ipoctal.c stats->rx = 0; rx 139 drivers/ipack/devices/ipoctal.c icount->rx = channel->stats.rx; rx 31 drivers/ipack/devices/ipoctal.h unsigned long rx; rx 250 drivers/leds/trigger/ledtrig-netdev.c static DEVICE_ATTR_RW(rx); rx 991 drivers/mailbox/bcm-pdc-mailbox.c struct pdc_ring_alloc rx; rx 1001 drivers/mailbox/bcm-pdc-mailbox.c rx.vbase = dma_pool_zalloc(pdcs->ring_pool, GFP_KERNEL, &rx.dmabase); rx 1002 drivers/mailbox/bcm-pdc-mailbox.c if (unlikely(!rx.vbase)) { rx 1009 drivers/mailbox/bcm-pdc-mailbox.c dev_dbg(dev, " - base DMA addr of rx ring %pad", &rx.dmabase); rx 1010 drivers/mailbox/bcm-pdc-mailbox.c dev_dbg(dev, " - base virtual addr of rx ring %p", rx.vbase); rx 1013 drivers/mailbox/bcm-pdc-mailbox.c memcpy(&pdcs->rx_ring_alloc, &rx, sizeof(rx)); rx 2014 drivers/media/dvb-frontends/dib7000p.c u8 *tx, *rx; rx 2024 drivers/media/dvb-frontends/dib7000p.c rx = kzalloc(2, GFP_KERNEL); rx 2025 drivers/media/dvb-frontends/dib7000p.c if (!rx) { rx 2031 drivers/media/dvb-frontends/dib7000p.c msg[1].buf = rx; rx 2037 drivers/media/dvb-frontends/dib7000p.c if (rx[0] == 0x01 && rx[1] == 0xb3) { rx 2046 drivers/media/dvb-frontends/dib7000p.c if (rx[0] == 0x01 && rx[1] == 0xb3) { rx 2055 drivers/media/dvb-frontends/dib7000p.c kfree(rx); rx 91 drivers/media/i2c/st-mipid02.c struct v4l2_fwnode_endpoint rx; rx 369 drivers/media/i2c/st-mipid02.c struct v4l2_fwnode_endpoint *ep = &bridge->rx; rx 412 drivers/media/i2c/st-mipid02.c struct v4l2_fwnode_endpoint *ep = &bridge->rx; rx 461 drivers/media/i2c/st-mipid02.c struct v4l2_fwnode_endpoint *ep = &bridge->rx; rx 875 drivers/media/i2c/st-mipid02.c bridge->rx = ep; rx 18 drivers/media/pci/cobalt/cobalt-irq.c int rx = s->video_channel; rx 20 drivers/media/pci/cobalt/cobalt-irq.c COBALT_CVI_FREEWHEEL(s->cobalt, rx); rx 22 drivers/media/pci/cobalt/cobalt-irq.c COBALT_CVI_VMR(s->cobalt, rx); rx 24 drivers/media/pci/cobalt/cobalt-irq.c COBALT_CVI(s->cobalt, rx); rx 26 drivers/media/pci/cobalt/cobalt-irq.c COBALT_CVI_CLK_LOSS(s->cobalt, rx); rx 252 drivers/media/pci/cobalt/cobalt-v4l2.c int rx = s->video_channel; rx 254 drivers/media/pci/cobalt/cobalt-v4l2.c COBALT_CVI_EVCNT(cobalt, rx); rx 286 drivers/media/pci/cobalt/cobalt-v4l2.c int rx = s->video_channel; rx 287 drivers/media/pci/cobalt/cobalt-v4l2.c struct m00389_cvi_regmap __iomem *cvi = COBALT_CVI(cobalt, rx); rx 288 drivers/media/pci/cobalt/cobalt-v4l2.c struct m00460_evcnt_regmap __iomem *evcnt = COBALT_CVI_EVCNT(cobalt, rx); rx 303 drivers/media/pci/cobalt/cobalt-v4l2.c fw = COBALT_CVI_FREEWHEEL(cobalt, rx); rx 304 drivers/media/pci/cobalt/cobalt-v4l2.c vmr = COBALT_CVI_VMR(cobalt, rx); rx 305 drivers/media/pci/cobalt/cobalt-v4l2.c clkloss = COBALT_CVI_CLK_LOSS(cobalt, rx); rx 354 drivers/media/pci/cobalt/cobalt-v4l2.c int rx = s->video_channel; rx 356 drivers/media/pci/cobalt/cobalt-v4l2.c COBALT_CVI_EVCNT(cobalt, rx); rx 392 drivers/media/pci/cobalt/cobalt-v4l2.c int rx = s->video_channel; rx 414 drivers/media/pci/cobalt/cobalt-v4l2.c fw = COBALT_CVI_FREEWHEEL(cobalt, rx); rx 415 drivers/media/pci/cobalt/cobalt-v4l2.c vmr = COBALT_CVI_VMR(cobalt, rx); rx 416 drivers/media/pci/cobalt/cobalt-v4l2.c clkloss = COBALT_CVI_CLK_LOSS(cobalt, rx); rx 500 drivers/media/pci/cobalt/cobalt-v4l2.c int rx = s->video_channel; rx 505 drivers/media/pci/cobalt/cobalt-v4l2.c cvi = COBALT_CVI(cobalt, rx); rx 506 drivers/media/pci/cobalt/cobalt-v4l2.c vmr = COBALT_CVI_VMR(cobalt, rx); rx 507 drivers/media/pci/cobalt/cobalt-v4l2.c fw = COBALT_CVI_FREEWHEEL(cobalt, rx); rx 508 drivers/media/pci/cobalt/cobalt-v4l2.c clkloss = COBALT_CVI_CLK_LOSS(cobalt, rx); rx 509 drivers/media/pci/cobalt/cobalt-v4l2.c packer = COBALT_CVI_PACKER(cobalt, rx); rx 514 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: cvi resolution: %dx%d\n", rx, rx 516 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: cvi control: %s%s%s\n", rx, rx 523 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: cvi status: %s%s\n", rx, rx 529 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: Measurements: %s%s%s%s%s%s%s\n", rx, rx 544 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: irq_status: 0x%02x irq_triggers: 0x%02x\n", rx, rx 547 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: vsync: %d\n", rx, ioread32(&vmr->vsync_time)); rx 548 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: vbp: %d\n", rx, ioread32(&vmr->vback_porch)); rx 549 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: vact: %d\n", rx, ioread32(&vmr->vactive_area)); rx 550 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: vfb: %d\n", rx, ioread32(&vmr->vfront_porch)); rx 551 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: hsync: %d\n", rx, ioread32(&vmr->hsync_time)); rx 552 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: hbp: %d\n", rx, ioread32(&vmr->hback_porch)); rx 553 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: hact: %d\n", rx, ioread32(&vmr->hactive_area)); rx 554 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: hfb: %d\n", rx, ioread32(&vmr->hfront_porch)); rx 555 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: Freewheeling: %s%s%s\n", rx, rx 563 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: Clock Loss Detection: %s%s\n", rx, rx 568 drivers/media/pci/cobalt/cobalt-v4l2.c cobalt_info("rx%d: Packer: %x\n", rx, ioread32(&packer->control)); rx 465 drivers/media/pci/saa7134/saa7134-tvaudio.c int max1,max2,carrier,rx,mode,lastmode,default_carrier; rx 593 drivers/media/pci/saa7134/saa7134-tvaudio.c rx = tvaudio_getstereo(dev, &tvaudio[audio]); rx 594 drivers/media/pci/saa7134/saa7134-tvaudio.c mode = saa7134_tvaudio_rx2mode(rx); rx 893 drivers/media/pci/saa7134/saa7134-tvaudio.c int saa7134_tvaudio_rx2mode(u32 rx) rx 898 drivers/media/pci/saa7134/saa7134-tvaudio.c if (rx & V4L2_TUNER_SUB_STEREO) rx 900 drivers/media/pci/saa7134/saa7134-tvaudio.c else if (rx & V4L2_TUNER_SUB_LANG1) rx 902 drivers/media/pci/saa7134/saa7134-tvaudio.c else if (rx & V4L2_TUNER_SUB_LANG2) rx 1717 drivers/media/pci/saa7134/saa7134-video.c int rx, mode; rx 1724 drivers/media/pci/saa7134/saa7134-video.c rx = saa7134_tvaudio_getstereo(dev); rx 1725 drivers/media/pci/saa7134/saa7134-video.c mode = saa7134_tvaudio_rx2mode(rx); rx 878 drivers/media/pci/saa7134/saa7134.h int saa7134_tvaudio_rx2mode(u32 rx); rx 111 drivers/media/platform/s5p-cec/s5p_cec.c if (cec->rx != STATE_IDLE) rx 113 drivers/media/platform/s5p-cec/s5p_cec.c cec->rx = STATE_BUSY; rx 118 drivers/media/platform/s5p-cec/s5p_cec.c cec->rx = STATE_DONE; rx 156 drivers/media/platform/s5p-cec/s5p_cec.c switch (cec->rx) { rx 159 drivers/media/platform/s5p-cec/s5p_cec.c cec->rx = STATE_IDLE; rx 71 drivers/media/platform/s5p-cec/s5p_cec.h enum cec_state rx; rx 219 drivers/media/radio/wl128x/fmdrv.h struct fm_rx rx; /* FM receiver info */ rx 240 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.region = region_configs[region_to_set]; rx 625 drivers/media/radio/wl128x/fmdrv_common.c struct tuned_station_info *stat_info = &fmdev->rx.stat_info; rx 626 drivers/media/radio/wl128x/fmdrv_common.c u8 reg_idx = fmdev->rx.region.fm_band; rx 632 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.stat_info.af_list_max = (af - FM_RDS_1_AF_FOLLOWS + 1); rx 633 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.stat_info.afcache_size = 0; rx 634 drivers/media/radio/wl128x/fmdrv_common.c fmdbg("No of expected AF : %d\n", fmdev->rx.stat_info.af_list_max); rx 645 drivers/media/radio/wl128x/fmdrv_common.c freq = fmdev->rx.region.bot_freq + (af * 100); rx 646 drivers/media/radio/wl128x/fmdrv_common.c if (freq == fmdev->rx.freq) { rx 648 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.freq, freq); rx 700 drivers/media/radio/wl128x/fmdrv_common.c struct fm_rds *rds = &fmdev->rx.rds; rx 752 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.stat_info.picode != cur_picode) rx 753 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.stat_info.picode = cur_picode; rx 846 drivers/media/radio/wl128x/fmdrv_common.c if ((fmdev->rx.af_mode == FM_RX_RDS_AF_SWITCH_MODE_ON) && rx 848 drivers/media/radio/wl128x/fmdrv_common.c (fmdev->rx.freq != FM_UNDEFINED_FREQ) && rx 849 drivers/media/radio/wl128x/fmdrv_common.c (fmdev->rx.stat_info.afcache_size != 0)) { rx 855 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.afjump_idx = 0; rx 856 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.freq_before_jump = fmdev->rx.freq; rx 871 drivers/media/radio/wl128x/fmdrv_common.c payload = fmdev->rx.stat_info.picode; rx 905 drivers/media/radio/wl128x/fmdrv_common.c fmdbg("Switch to %d KHz\n", fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx]); rx 906 drivers/media/radio/wl128x/fmdrv_common.c frq_index = (fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx] - rx 907 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.region.bot_freq) / FM_FREQ_MUL; rx 977 drivers/media/radio/wl128x/fmdrv_common.c curr_freq = fmdev->rx.region.bot_freq + ((u32)read_freq * FM_FREQ_MUL); rx 979 drivers/media/radio/wl128x/fmdrv_common.c jumped_freq = fmdev->rx.stat_info.af_cache[fmdev->rx.afjump_idx]; rx 982 drivers/media/radio/wl128x/fmdrv_common.c if ((curr_freq != fmdev->rx.freq_before_jump) && (curr_freq == jumped_freq)) { rx 984 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.freq = curr_freq; rx 988 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.af_mode == FM_RX_RDS_AF_SWITCH_MODE_ON) rx 993 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.afjump_idx++; rx 996 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.afjump_idx >= fmdev->rx.stat_info.afcache_size) { rx 1048 drivers/media/radio/wl128x/fmdrv_common.c poll_wait(file, &fmdev->rx.rds.read_queue, pts); rx 1049 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.rds.rd_idx != fmdev->rx.rds.wr_idx) rx 1064 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.rds.wr_idx == fmdev->rx.rds.rd_idx) { rx 1068 drivers/media/radio/wl128x/fmdrv_common.c ret = wait_event_interruptible(fmdev->rx.rds.read_queue, rx 1069 drivers/media/radio/wl128x/fmdrv_common.c (fmdev->rx.rds.wr_idx != fmdev->rx.rds.rd_idx)); rx 1082 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.rds.wr_idx == fmdev->rx.rds.rd_idx) { rx 1086 drivers/media/radio/wl128x/fmdrv_common.c memcpy(tmpbuf, &fmdev->rx.rds.buff[fmdev->rx.rds.rd_idx], rx 1088 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds.rd_idx += FM_RDS_BLK_SIZE; rx 1089 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.rds.rd_idx >= fmdev->rx.rds.buf_size) rx 1090 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds.rd_idx = 0; rx 1120 drivers/media/radio/wl128x/fmdrv_common.c if (fmdev->rx.freq == FM_UNDEFINED_FREQ) { rx 1131 drivers/media/radio/wl128x/fmdrv_common.c *cur_tuned_frq = fmdev->rx.freq; rx 1553 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.region = region_configs[default_radio_region]; rx 1555 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.mute_mode = FM_MUTE_OFF; rx 1556 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rf_depend_mute = FM_RX_RF_DEPENDENT_MUTE_OFF; rx 1557 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds.flag = FM_RDS_DISABLE; rx 1558 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.freq = FM_UNDEFINED_FREQ; rx 1559 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds_mode = FM_RDS_SYSTEM_RDS; rx 1560 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.af_mode = FM_RX_RDS_AF_SWITCH_MODE_OFF; rx 1564 drivers/media/radio/wl128x/fmdrv_common.c init_waitqueue_head(&fmdev->rx.rds.read_queue); rx 1586 drivers/media/radio/wl128x/fmdrv_common.c wake_up_interruptible(&fmdev->rx.rds.read_queue); rx 1595 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.freq = 0; rx 1627 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds.buf_size = default_rds_buf * FM_RDS_BLK_SIZE; rx 1628 drivers/media/radio/wl128x/fmdrv_common.c fmdev->rx.rds.buff = kzalloc(fmdev->rx.rds.buf_size, GFP_KERNEL); rx 1629 drivers/media/radio/wl128x/fmdrv_common.c if (NULL == fmdev->rx.rds.buff) { rx 1645 drivers/media/radio/wl128x/fmdrv_common.c kfree(fmdev->rx.rds.buff); rx 1659 drivers/media/radio/wl128x/fmdrv_common.c kfree(fmdev->rx.rds.buff); rx 17 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.flag = FM_RDS_DISABLE; rx 18 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.last_blk_idx = 0; rx 19 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.wr_idx = 0; rx 20 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.rd_idx = 0; rx 22 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.af_mode == FM_RX_RDS_AF_SWITCH_MODE_ON) rx 28 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.stat_info.picode = FM_NO_PI_CODE; rx 29 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.stat_info.afcache_size = 0; rx 30 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.stat_info.af_list_max = 0; rx 41 drivers/media/radio/wl128x/fmdrv_rx.c if (freq < fmdev->rx.region.bot_freq || freq > fmdev->rx.region.top_freq) { rx 62 drivers/media/radio/wl128x/fmdrv_rx.c payload = (freq - fmdev->rx.region.bot_freq) / FM_FREQ_MUL; rx 107 drivers/media/radio/wl128x/fmdrv_rx.c curr_frq_in_khz = (fmdev->rx.region.bot_freq + ((u32)curr_frq * FM_FREQ_MUL)); rx 115 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.freq = curr_frq_in_khz; rx 151 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.chanl_space = spacing * FM_FREQ_MUL; rx 180 drivers/media/radio/wl128x/fmdrv_rx.c last_frq = (fmdev->rx.region.top_freq - fmdev->rx.region.bot_freq) / FM_FREQ_MUL; rx 183 drivers/media/radio/wl128x/fmdrv_rx.c space_idx = fmdev->rx.region.chanl_space / FM_FREQ_MUL; rx 256 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.freq = seek_upward ? rx 257 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.top_freq : rx 258 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.bot_freq; rx 260 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.freq = seek_upward ? rx 261 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.bot_freq : rx 262 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.top_freq; rx 264 drivers/media/radio/wl128x/fmdrv_rx.c next_frq = (fmdev->rx.freq - rx 265 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.region.bot_freq) / FM_FREQ_MUL; rx 276 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.freq = (fmdev->rx.region.bot_freq + rx 308 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.volume = vol_to_set; rx 323 drivers/media/radio/wl128x/fmdrv_rx.c *curr_vol = fmdev->rx.volume / FM_RX_VOLUME_GAIN_STEP; rx 332 drivers/media/radio/wl128x/fmdrv_rx.c *bot_freq = fmdev->rx.region.bot_freq; rx 335 drivers/media/radio/wl128x/fmdrv_rx.c *top_freq = fmdev->rx.region.top_freq; rx 343 drivers/media/radio/wl128x/fmdrv_rx.c *region = fmdev->rx.region.fm_band; rx 359 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.region.fm_band == region_to_set) { rx 374 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.freq < fmdev->rx.region.bot_freq) rx 375 drivers/media/radio/wl128x/fmdrv_rx.c new_frq = fmdev->rx.region.bot_freq; rx 376 drivers/media/radio/wl128x/fmdrv_rx.c else if (fmdev->rx.freq > fmdev->rx.region.top_freq) rx 377 drivers/media/radio/wl128x/fmdrv_rx.c new_frq = fmdev->rx.region.top_freq; rx 400 drivers/media/radio/wl128x/fmdrv_rx.c *curr_mute_mode = fmdev->rx.mute_mode; rx 411 drivers/media/radio/wl128x/fmdrv_rx.c switch (fmdev->rx.mute_mode) { rx 424 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.rf_depend_mute == FM_RX_RF_DEPENDENT_MUTE_ON) rx 444 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.mute_mode == mute_mode_toset) rx 447 drivers/media/radio/wl128x/fmdrv_rx.c org_state = fmdev->rx.mute_mode; rx 448 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.mute_mode = mute_mode_toset; rx 452 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.mute_mode = org_state; rx 470 drivers/media/radio/wl128x/fmdrv_rx.c *curr_mute_mode = fmdev->rx.rf_depend_mute; rx 489 drivers/media/radio/wl128x/fmdrv_rx.c if (fmdev->rx.rf_depend_mute == rfdepend_mute) rx 492 drivers/media/radio/wl128x/fmdrv_rx.c org_state = fmdev->rx.rf_depend_mute; rx 493 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rf_depend_mute = rfdepend_mute; rx 497 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rf_depend_mute = org_state; rx 546 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rssi_threshold = rssi_lvl_toset; rx 562 drivers/media/radio/wl128x/fmdrv_rx.c *curr_rssi_lvl = fmdev->rx.rssi_threshold; rx 638 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.deemphasis_mode = mode; rx 654 drivers/media/radio/wl128x/fmdrv_rx.c *curr_deemphasis_mode = fmdev->rx.deemphasis_mode; rx 671 drivers/media/radio/wl128x/fmdrv_rx.c && fmdev->rx.rds.flag == FM_RDS_DISABLE) { rx 710 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.flag = FM_RDS_ENABLE; rx 712 drivers/media/radio/wl128x/fmdrv_rx.c && fmdev->rx.rds.flag == FM_RDS_ENABLE) { rx 721 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.last_blk_idx = 0; rx 722 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.wr_idx = 0; rx 723 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.rd_idx = 0; rx 728 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds.flag = FM_RDS_DISABLE; rx 745 drivers/media/radio/wl128x/fmdrv_rx.c *curr_rds_en_dis = fmdev->rx.rds.flag; rx 770 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.rds_mode = rds_mode; rx 801 drivers/media/radio/wl128x/fmdrv_rx.c fmdev->rx.af_mode = af_mode; rx 817 drivers/media/radio/wl128x/fmdrv_rx.c *af_mode = fmdev->rx.af_mode; rx 286 drivers/media/radio/wl128x/fmdrv_v4l2.c ((fmdev->rx.rds.flag == FM_RDS_ENABLE) ? V4L2_TUNER_SUB_RDS : 0); rx 59 drivers/media/usb/dvb-usb/dib0700.h extern int dib0700_ctrl_rd(struct dvb_usb_device *d, u8 *tx, u8 txlen, u8 *rx, u8 rxlen); rx 71 drivers/media/usb/dvb-usb/dib0700_core.c int dib0700_ctrl_rd(struct dvb_usb_device *d, u8 *tx, u8 txlen, u8 *rx, u8 rxlen) rx 96 drivers/media/usb/dvb-usb/dib0700_core.c USB_TYPE_VENDOR | USB_DIR_IN, value, index, rx, rxlen, rx 103 drivers/media/usb/dvb-usb/dib0700_core.c debug_dump(rx, rxlen, deb_data); rx 510 drivers/media/usb/dvb-usb/pctv452e.c u8 *b0, *rx; rx 525 drivers/media/usb/dvb-usb/pctv452e.c rx = b0 + 5; rx 540 drivers/media/usb/dvb-usb/pctv452e.c ret = dvb_usb_generic_rw(d, b0, 5, rx, PCTV_ANSWER_LEN, 0); rx 547 drivers/media/usb/dvb-usb/pctv452e.c ret = dvb_usb_generic_rw(d, b0, 5, rx, PCTV_ANSWER_LEN, 0); rx 561 drivers/media/usb/dvb-usb/pctv452e.c u8 *b, *rx; rx 569 drivers/media/usb/dvb-usb/pctv452e.c rx = b + CMD_BUFFER_SIZE; rx 580 drivers/media/usb/dvb-usb/pctv452e.c ret = dvb_usb_generic_rw(d, b, 4, rx, PCTV_ANSWER_LEN, 0); rx 585 drivers/media/usb/dvb-usb/pctv452e.c info("%s: read: %2d: %*ph: ", __func__, ret, 3, rx); rx 586 drivers/media/usb/dvb-usb/pctv452e.c for (i = 0; (i < rx[3]) && ((i+3) < PCTV_ANSWER_LEN); i++) rx 587 drivers/media/usb/dvb-usb/pctv452e.c info(" %02x", rx[i+3]); rx 592 drivers/media/usb/dvb-usb/pctv452e.c if ((rx[3] == 9) && (rx[12] & 0x01)) { rx 594 drivers/media/usb/dvb-usb/pctv452e.c state->last_rc_key = RC_SCANCODE_RC5(rx[7], rx[6]); rx 597 drivers/media/usb/dvb-usb/pctv452e.c __func__, rx[6], rx[7]); rx 85 drivers/media/usb/dvb-usb/technisat-usb2.c u8 device_addr, u8 *tx, u8 txlen, u8 *rx, u8 rxlen) rx 112 drivers/media/usb/dvb-usb/technisat-usb2.c if (rx != NULL) { rx 147 drivers/media/usb/dvb-usb/technisat-usb2.c if (rx != NULL) { rx 148 drivers/media/usb/dvb-usb/technisat-usb2.c memcpy(rx, &b[2], rxlen); rx 151 drivers/media/usb/dvb-usb/technisat-usb2.c debug_dump(rx, rxlen, deb_i2c); rx 123 drivers/media/usb/dvb-usb/ttusb2.c u8 rx[60];/* (64 -4) */ rx 124 drivers/media/usb/dvb-usb/ttusb2.c ret = ttusb2_msg(d, cmd, data, write_len, rx, read_len); rx 126 drivers/media/usb/dvb-usb/ttusb2.c memcpy(data, rx, read_len); rx 449 drivers/media/usb/dvb-usb/ttusb2.c u8 rx[9]; /* A CMD_GET_IR_CODE reply is 9 bytes long */ rx 451 drivers/media/usb/dvb-usb/ttusb2.c ret = ttusb2_msg(d, CMD_GET_IR_CODE, NULL, 0, rx, sizeof(rx)); rx 455 drivers/media/usb/dvb-usb/ttusb2.c if (rx[8] & 0x01) { rx 457 drivers/media/usb/dvb-usb/ttusb2.c st->last_rc_key = RC_SCANCODE_RC5(rx[3], rx[2]); rx 458 drivers/media/usb/dvb-usb/ttusb2.c deb_info("%s: cmd=0x%02x sys=0x%02x\n", __func__, rx[2], rx[3]); rx 459 drivers/media/usb/dvb-usb/ttusb2.c rc_keydown(d->rc_dev, RC_PROTO_RC5, st->last_rc_key, rx[1]); rx 241 drivers/media/usb/hackrf/hackrf.c const bool rx = test_bit(RX_ON, &dev->flags); rx 264 drivers/media/usb/hackrf/hackrf.c if (!rx && !tx) { rx 270 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_ADC_FREQUENCY, &dev->flags)) { rx 298 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_BANDWIDTH, &dev->flags)) { rx 346 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_RF_FREQUENCY, &dev->flags)) { rx 374 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_RF_GAIN, &dev->flags)) { rx 398 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_LNA_GAIN, &dev->flags)) { rx 409 drivers/media/usb/hackrf/hackrf.c if (rx && test_and_clear_bit(RX_IF_GAIN, &dev->flags)) { rx 144 drivers/mfd/ipaq-micro.c struct ipaq_micro_rxdev *rx = µ->rx; rx 146 drivers/mfd/ipaq-micro.c switch (rx->state) { rx 149 drivers/mfd/ipaq-micro.c rx->state = STATE_ID; /* Next byte is the id and len */ rx 152 drivers/mfd/ipaq-micro.c rx->id = (ch & 0xf0) >> 4; rx 153 drivers/mfd/ipaq-micro.c rx->len = (ch & 0x0f); rx 154 drivers/mfd/ipaq-micro.c rx->index = 0; rx 155 drivers/mfd/ipaq-micro.c rx->chksum = ch; rx 156 drivers/mfd/ipaq-micro.c rx->state = (rx->len > 0) ? STATE_DATA : STATE_CHKSUM; rx 159 drivers/mfd/ipaq-micro.c rx->chksum += ch; rx 160 drivers/mfd/ipaq-micro.c rx->buf[rx->index] = ch; rx 161 drivers/mfd/ipaq-micro.c if (++rx->index == rx->len) rx 162 drivers/mfd/ipaq-micro.c rx->state = STATE_CHKSUM; rx 165 drivers/mfd/ipaq-micro.c if (ch == rx->chksum) rx 166 drivers/mfd/ipaq-micro.c micro_rx_msg(micro, rx->id, rx->len, rx->buf); rx 167 drivers/mfd/ipaq-micro.c rx->state = STATE_SOF; rx 300 drivers/mfd/ipaq-micro.c struct ipaq_micro_rxdev *rx = µ->rx; rx 307 drivers/mfd/ipaq-micro.c rx->state = STATE_SOF; /* Reset the state machine */ rx 298 drivers/mfd/timberdale.c .rx = true, rx 304 drivers/mfd/timberdale.c .rx = false, rx 310 drivers/mfd/timberdale.c .rx = true, rx 316 drivers/mfd/timberdale.c .rx = false, rx 322 drivers/mfd/timberdale.c .rx = true, rx 332 drivers/mfd/timberdale.c .rx = true, rx 339 drivers/mfd/timberdale.c .rx = true, rx 345 drivers/mfd/timberdale.c .rx = false, rx 262 drivers/misc/echo/echo.c int16_t oslec_update(struct oslec_state *ec, int16_t tx, int16_t rx) rx 276 drivers/misc/echo/echo.c ec->rx = rx; rx 278 drivers/misc/echo/echo.c rx >>= 1; rx 296 drivers/misc/echo/echo.c tmp = rx << 15; rx 320 drivers/misc/echo/echo.c rx = tmp1; rx 346 drivers/misc/echo/echo.c ec->lrxacc += abs(rx) - ec->lrx; rx 353 drivers/misc/echo/echo.c ec->clean = rx - echo_value; rx 360 drivers/misc/echo/echo.c clean_bg = rx - echo_value; rx 524 drivers/misc/echo/echo.c ec->clean_nlp = rx; rx 119 drivers/misc/echo/echo.h int16_t rx; rx 70 drivers/misc/echo/oslec.h int16_t oslec_update(struct oslec_state *ec, int16_t tx, int16_t rx); rx 55 drivers/mmc/core/sdio_uart.c __u32 rx; rx 379 drivers/mmc/core/sdio_uart.c port->icount.rx++; rx 974 drivers/mmc/core/sdio_uart.c port->icount.tx, port->icount.rx); rx 606 drivers/mmc/host/rtsx_pci_sdmmc.c u8 sample_point, bool rx) rx 611 drivers/mmc/host/rtsx_pci_sdmmc.c __func__, rx ? "RX" : "TX", sample_point); rx 614 drivers/mmc/host/rtsx_pci_sdmmc.c if (rx) { rx 154 drivers/mtd/spi-nor/mtk-quadspi.c u8 *tx, int txlen, u8 *rx, int rxlen) rx 190 drivers/mtd/spi-nor/mtk-quadspi.c rx[i] = readb(mtk_nor->base + MTK_NOR_SHREG(idx)); rx 156 drivers/net/arcnet/arc-rawmode.c .rx = rx, rx 196 drivers/net/arcnet/arcdevice.h void (*rx)(struct net_device *dev, int bufnum, rx 92 drivers/net/arcnet/arcnet.c .rx = null_rx, rx 1120 drivers/net/arcnet/arcnet.c arc_proto_map[soft->proto]->rx(dev, bufnum, &rxdata.pkt, length); rx 235 drivers/net/arcnet/capmode.c .rx = rx, rx 40 drivers/net/arcnet/rfc1051.c static void rx(struct net_device *dev, int bufnum, rx 51 drivers/net/arcnet/rfc1051.c .rx = rx, rx 41 drivers/net/arcnet/rfc1201.c static void rx(struct net_device *dev, int bufnum, rx 53 drivers/net/arcnet/rfc1201.c .rx = rx, rx 224 drivers/net/can/grcan.c struct grcan_dma_buffer rx; rx 937 drivers/net/can/grcan.c struct grcan_dma_buffer *large = rsize > tsize ? &dma->rx : &dma->tx; rx 938 drivers/net/can/grcan.c struct grcan_dma_buffer *small = rsize > tsize ? &dma->tx : &dma->rx; rx 961 drivers/net/can/grcan.c dma->rx.size = rsize; rx 986 drivers/net/can/grcan.c grcan_write_reg(®s->rxaddr, priv->dma.rx.handle); rx 987 drivers/net/can/grcan.c grcan_write_reg(®s->rxsize, priv->dma.rx.size); rx 1193 drivers/net/can/grcan.c slot = dma->rx.buf + rd; rx 1221 drivers/net/can/grcan.c rd = grcan_ring_add(rd, GRCAN_MSG_SIZE, dma->rx.size); rx 336 drivers/net/can/kvaser_pciefd.c u32 tx_len, u8 *rx, u32 rx_len) rx 367 drivers/net/can/kvaser_pciefd.c *rx++ = ioread32(pcie->reg_base + KVASER_PCIEFD_SPI_RX_REG); rx 299 drivers/net/can/mscan/mscan.c can_id = in_be16(®s->rx.idr1_0); rx 302 drivers/net/can/mscan/mscan.c can_id = ((can_id << 16) | in_be16(®s->rx.idr3_2)); rx 314 drivers/net/can/mscan/mscan.c frame->can_dlc = get_can_dlc(in_8(®s->rx.dlr) & 0xf); rx 317 drivers/net/can/mscan/mscan.c void __iomem *data = ®s->rx.dsr1_0; rx 198 drivers/net/can/mscan/mscan.h } rx; rx 185 drivers/net/can/usb/ems_usb.c u8 rx; rx 166 drivers/net/can/usb/esd_usb2.c struct rx_msg rx; rx 220 drivers/net/can/usb/esd_usb2.c u32 id = le32_to_cpu(msg->msg.rx.id) & ESD_IDMASK; rx 223 drivers/net/can/usb/esd_usb2.c u8 state = msg->msg.rx.data[0]; rx 224 drivers/net/can/usb/esd_usb2.c u8 ecc = msg->msg.rx.data[1]; rx 225 drivers/net/can/usb/esd_usb2.c u8 txerr = msg->msg.rx.data[2]; rx 226 drivers/net/can/usb/esd_usb2.c u8 rxerr = msg->msg.rx.data[3]; rx 312 drivers/net/can/usb/esd_usb2.c id = le32_to_cpu(msg->msg.rx.id); rx 324 drivers/net/can/usb/esd_usb2.c cf->can_dlc = get_can_dlc(msg->msg.rx.dlc & ~ESD_RTR); rx 329 drivers/net/can/usb/esd_usb2.c if (msg->msg.rx.dlc & ESD_RTR) { rx 333 drivers/net/can/usb/esd_usb2.c cf->data[i] = msg->msg.rx.data[i]; rx 402 drivers/net/can/usb/esd_usb2.c if (msg->msg.rx.net >= dev->net_count) { rx 407 drivers/net/can/usb/esd_usb2.c esd_usb2_rx_can_msg(dev->nets[msg->msg.rx.net], msg); rx 521 drivers/net/can/usb/peak_usb/pcan_usb_pro.c struct pcan_usb_pro_rxmsg *rx) rx 523 drivers/net/can/usb/peak_usb/pcan_usb_pro.c const unsigned int ctrl_idx = (rx->len >> 4) & 0x0f; rx 534 drivers/net/can/usb/peak_usb/pcan_usb_pro.c can_frame->can_id = le32_to_cpu(rx->id); rx 535 drivers/net/can/usb/peak_usb/pcan_usb_pro.c can_frame->can_dlc = rx->len & 0x0f; rx 537 drivers/net/can/usb/peak_usb/pcan_usb_pro.c if (rx->flags & PCAN_USBPRO_EXT) rx 540 drivers/net/can/usb/peak_usb/pcan_usb_pro.c if (rx->flags & PCAN_USBPRO_RTR) rx 543 drivers/net/can/usb/peak_usb/pcan_usb_pro.c memcpy(can_frame->data, rx->data, can_frame->can_dlc); rx 546 drivers/net/can/usb/peak_usb/pcan_usb_pro.c peak_usb_get_ts_time(&usb_if->time_ref, le32_to_cpu(rx->ts32), rx 18 drivers/net/dsa/sja1105/sja1105_spi.c const void *tx, void *rx, int size) rx 23 drivers/net/dsa/sja1105/sja1105_spi.c .rx_buf = rx, rx 954 drivers/net/ethernet/3com/3c574_cs.c u8 rx, tx, up; rx 975 drivers/net/ethernet/3com/3c574_cs.c rx = inw(ioaddr + 10); rx 1636 drivers/net/ethernet/3com/typhoon.c struct rx_desc *rx; rx 1651 drivers/net/ethernet/3com/typhoon.c rx = (struct rx_desc *) (rxRing->ringBase + rxaddr); rx 1652 drivers/net/ethernet/3com/typhoon.c idx = rx->addr; rx 1659 drivers/net/ethernet/3com/typhoon.c if(rx->flags & TYPHOON_RX_ERROR) { rx 1664 drivers/net/ethernet/3com/typhoon.c pkt_len = le16_to_cpu(rx->frameLen); rx 1686 drivers/net/ethernet/3com/typhoon.c csum_bits = rx->rxStatus & (TYPHOON_RX_IP_CHK_GOOD | rx 1696 drivers/net/ethernet/3com/typhoon.c if (rx->rxStatus & TYPHOON_RX_VLAN) rx 1698 drivers/net/ethernet/3com/typhoon.c ntohl(rx->vlanTag) & 0xffff); rx 53 drivers/net/ethernet/amazon/ena/ena_ethtool.c ENA_STAT_ENTRY(stat, rx) rx 261 drivers/net/ethernet/amd/sunlance.c void (*rx)(struct net_device *); rx 836 drivers/net/ethernet/amd/sunlance.c lp->rx(dev); rx 1369 drivers/net/ethernet/amd/sunlance.c lp->rx = lance_rx_pio; rx 1381 drivers/net/ethernet/amd/sunlance.c lp->rx = lance_rx_dvma; rx 376 drivers/net/ethernet/amd/xgbe/xgbe-desc.c xgbe_set_buffer_data(&rdata->rx.hdr, &ring->rx_hdr_pa, rx 380 drivers/net/ethernet/amd/xgbe/xgbe-desc.c xgbe_set_buffer_data(&rdata->rx.buf, &ring->rx_buf_pa, rx 490 drivers/net/ethernet/amd/xgbe/xgbe-desc.c if (rdata->rx.hdr.pa.pages) rx 491 drivers/net/ethernet/amd/xgbe/xgbe-desc.c put_page(rdata->rx.hdr.pa.pages); rx 493 drivers/net/ethernet/amd/xgbe/xgbe-desc.c if (rdata->rx.hdr.pa_unmap.pages) { rx 494 drivers/net/ethernet/amd/xgbe/xgbe-desc.c dma_unmap_page(pdata->dev, rdata->rx.hdr.pa_unmap.pages_dma, rx 495 drivers/net/ethernet/amd/xgbe/xgbe-desc.c rdata->rx.hdr.pa_unmap.pages_len, rx 497 drivers/net/ethernet/amd/xgbe/xgbe-desc.c put_page(rdata->rx.hdr.pa_unmap.pages); rx 500 drivers/net/ethernet/amd/xgbe/xgbe-desc.c if (rdata->rx.buf.pa.pages) rx 501 drivers/net/ethernet/amd/xgbe/xgbe-desc.c put_page(rdata->rx.buf.pa.pages); rx 503 drivers/net/ethernet/amd/xgbe/xgbe-desc.c if (rdata->rx.buf.pa_unmap.pages) { rx 504 drivers/net/ethernet/amd/xgbe/xgbe-desc.c dma_unmap_page(pdata->dev, rdata->rx.buf.pa_unmap.pages_dma, rx 505 drivers/net/ethernet/amd/xgbe/xgbe-desc.c rdata->rx.buf.pa_unmap.pages_len, rx 507 drivers/net/ethernet/amd/xgbe/xgbe-desc.c put_page(rdata->rx.buf.pa_unmap.pages); rx 511 drivers/net/ethernet/amd/xgbe/xgbe-desc.c memset(&rdata->rx, 0, sizeof(rdata->rx)); rx 1465 drivers/net/ethernet/amd/xgbe/xgbe-dev.c hdr_dma = rdata->rx.hdr.dma_base + rdata->rx.hdr.dma_off; rx 1466 drivers/net/ethernet/amd/xgbe/xgbe-dev.c buf_dma = rdata->rx.buf.dma_base + rdata->rx.buf.dma_off; rx 1952 drivers/net/ethernet/amd/xgbe/xgbe-dev.c rdata->rx.hdr_len = XGMAC_GET_BITS_LE(rdesc->desc2, rx 1954 drivers/net/ethernet/amd/xgbe/xgbe-dev.c if (rdata->rx.hdr_len) rx 1990 drivers/net/ethernet/amd/xgbe/xgbe-dev.c rdata->rx.len = XGMAC_GET_BITS_LE(rdesc->desc3, RX_NORMAL_DESC3, PL); rx 2056 drivers/net/ethernet/amd/xgbe/xgbe-dev.c pdata->ext_stats.rxq_bytes[channel->queue_index] += rdata->rx.len; rx 2556 drivers/net/ethernet/amd/xgbe/xgbe-drv.c skb = napi_alloc_skb(napi, rdata->rx.hdr.dma_len); rx 2563 drivers/net/ethernet/amd/xgbe/xgbe-drv.c dma_sync_single_range_for_cpu(pdata->dev, rdata->rx.hdr.dma_base, rx 2564 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.hdr.dma_off, rx 2565 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.hdr.dma_len, DMA_FROM_DEVICE); rx 2567 drivers/net/ethernet/amd/xgbe/xgbe-drv.c packet = page_address(rdata->rx.hdr.pa.pages) + rx 2568 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.hdr.pa.pages_offset; rx 2583 drivers/net/ethernet/amd/xgbe/xgbe-drv.c if (rdata->rx.hdr_len) rx 2584 drivers/net/ethernet/amd/xgbe/xgbe-drv.c return rdata->rx.hdr_len; rx 2590 drivers/net/ethernet/amd/xgbe/xgbe-drv.c return rdata->rx.hdr.dma_len; rx 2595 drivers/net/ethernet/amd/xgbe/xgbe-drv.c return min_t(unsigned int, rdata->rx.hdr.dma_len, rdata->rx.len); rx 2604 drivers/net/ethernet/amd/xgbe/xgbe-drv.c return rdata->rx.buf.dma_len; rx 2609 drivers/net/ethernet/amd/xgbe/xgbe-drv.c return rdata->rx.len - len; rx 2778 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.dma_base, rx 2779 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.dma_off, rx 2780 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.dma_len, rx 2784 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.pa.pages, rx 2785 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.pa.pages_offset, rx 2787 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.dma_len); rx 2788 drivers/net/ethernet/amd/xgbe/xgbe-drv.c rdata->rx.buf.pa.pages = NULL; rx 660 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c unsigned int rx, tx; rx 683 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = __rounddown_pow_of_two(ringparam->rx_pending); rx 684 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c if (rx != ringparam->rx_pending) rx 687 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx); rx 695 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c if ((rx == pdata->rx_desc_count) && rx 699 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c pdata->rx_desc_count = rx; rx 712 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c unsigned int rx, tx, combined; rx 720 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = min(pdata->hw_feat.rx_ch_cnt, pdata->rx_max_channel_count); rx 721 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = min(rx, pdata->channel_irq_count); rx 726 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c combined = min(rx, tx); rx 729 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c channels->max_rx = rx ? rx - 1 : 0; rx 733 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = pdata->new_rx_ring_count ? : pdata->rx_ring_count; rx 736 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c combined = min(rx, tx); rx 737 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx -= combined; rx 741 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c channels->rx_count = rx; rx 757 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c unsigned int rx, rx_curr, tx, tx_curr, combined; rx 765 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = min(pdata->hw_feat.rx_ch_cnt, pdata->rx_max_channel_count); rx 766 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = min(rx, pdata->channel_irq_count); rx 771 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c combined = min(rx, tx); rx 806 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c if ((channels->combined_count + channels->rx_count) > rx) { rx 809 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c channels->combined_count + channels->rx_count, rx); rx 822 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c rx = channels->combined_count + channels->rx_count; rx 828 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c if ((rx == rx_curr) && (tx == tx_curr)) rx 831 drivers/net/ethernet/amd/xgbe/xgbe-ethtool.c pdata->new_rx_ring_count = rx; rx 431 drivers/net/ethernet/amd/xgbe/xgbe.h struct xgbe_rx_ring_data rx; /* Rx-related data */ rx 607 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c u32 *rx, u32 *tx) rx 612 drivers/net/ethernet/apm/xgene/xgene_enet_hw.c *rx = ICM_DROP_COUNT(count); rx 149 drivers/net/ethernet/apm/xgene/xgene_enet_main.h void (*get_drop_cnt)(struct xgene_enet_pdata *pdata, u32 *rx, u32 *tx); rx 87 drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c u32 *rx, u32 *tx) rx 95 drivers/net/ethernet/apm/xgene/xgene_enet_sgmac.c *rx = ICM_DROP_COUNT(count); rx 172 drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c u32 *rx, u32 *tx) rx 177 drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.c *rx = ICM_DROP_COUNT(count); rx 71 drivers/net/ethernet/aquantia/atlantic/aq_ring.c self->stats.rx.pg_flips++; rx 78 drivers/net/ethernet/aquantia/atlantic/aq_ring.c self->stats.rx.pg_losts++; rx 82 drivers/net/ethernet/aquantia/atlantic/aq_ring.c self->stats.rx.pg_reuses++; rx 247 drivers/net/ethernet/aquantia/atlantic/aq_ring.c ++self->stats.rx.packets; rx 268 drivers/net/ethernet/aquantia/atlantic/aq_ring.c ++self->stats.rx.errors; rx 336 drivers/net/ethernet/aquantia/atlantic/aq_ring.c ++self->stats.rx.errors; rx 342 drivers/net/ethernet/aquantia/atlantic/aq_ring.c ++self->stats.rx.errors; rx 430 drivers/net/ethernet/aquantia/atlantic/aq_ring.c ++self->stats.rx.packets; rx 431 drivers/net/ethernet/aquantia/atlantic/aq_ring.c self->stats.rx.bytes += skb->len; rx 107 drivers/net/ethernet/aquantia/atlantic/aq_ring.h struct aq_ring_stats_rx_s rx; rx 344 drivers/net/ethernet/aquantia/atlantic/aq_vec.c struct aq_ring_stats_rx_s *rx = &ring[AQ_VEC_RX_ID].stats.rx; rx 346 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->packets += rx->packets; rx 347 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->bytes += rx->bytes; rx 348 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->errors += rx->errors; rx 349 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->jumbo_packets += rx->jumbo_packets; rx 350 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->lro_packets += rx->lro_packets; rx 351 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->pg_losts += rx->pg_losts; rx 352 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->pg_flips += rx->pg_flips; rx 353 drivers/net/ethernet/aquantia/atlantic/aq_vec.c stats_rx->pg_reuses += rx->pg_reuses; rx 711 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_a0.c ++ring->stats.rx.jumbo_packets; rx 770 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c ++ring->stats.rx.lro_packets; rx 776 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_b0.c ++ring->stats.rx.jumbo_packets; rx 88 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c u32 rx) rx 122 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c aq_hw_write_reg_bit(aq_hw, itr_imr_rxren_adr[rx], rx 123 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c itr_imr_rxren_msk[rx], rx 124 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c itr_imr_rxren_shift[rx], rx 169 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c void hw_atl_itr_irq_map_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_rx, u32 rx) rx 203 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c aq_hw_write_reg_bit(aq_hw, itr_imr_rxr_adr[rx], rx 204 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c itr_imr_rxr_msk[rx], rx 205 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c itr_imr_rxr_shift[rx], rx 102 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h u32 rx); rx 109 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h void hw_atl_itr_irq_map_rx_set(struct aq_hw_s *aq_hw, u32 irq_map_rx, u32 rx); rx 259 drivers/net/ethernet/atheros/ag71xx.c } rx; rx 706 drivers/net/ethernet/atheros/ag71xx.c u32 rx, tx; rx 710 drivers/net/ethernet/atheros/ag71xx.c rx = ag71xx_rr(ag, AG71XX_REG_RX_CTRL) & RX_CTRL_RXE; rx 712 drivers/net/ethernet/atheros/ag71xx.c if (!rx && !tx) rx 1022 drivers/net/ethernet/atheros/ag71xx.c if (ring->buf[i].rx.rx_buf) { rx 1024 drivers/net/ethernet/atheros/ag71xx.c ring->buf[i].rx.dma_addr, rx 1026 drivers/net/ethernet/atheros/ag71xx.c skb_free_frag(ring->buf[i].rx.rx_buf); rx 1050 drivers/net/ethernet/atheros/ag71xx.c buf->rx.rx_buf = data; rx 1051 drivers/net/ethernet/atheros/ag71xx.c buf->rx.dma_addr = dma_map_single(&ag->pdev->dev, data, ag->rx_buf_size, rx 1053 drivers/net/ethernet/atheros/ag71xx.c desc->data = (u32)buf->rx.dma_addr + offset; rx 1113 drivers/net/ethernet/atheros/ag71xx.c if (!ring->buf[i].rx.rx_buf && rx 1134 drivers/net/ethernet/atheros/ag71xx.c struct ag71xx_ring *rx = &ag->rx_ring; rx 1137 drivers/net/ethernet/atheros/ag71xx.c ring_size = BIT(tx->order) + BIT(rx->order); rx 1153 drivers/net/ethernet/atheros/ag71xx.c rx->buf = &tx->buf[tx_size]; rx 1154 drivers/net/ethernet/atheros/ag71xx.c rx->descs_cpu = ((void *)tx->descs_cpu) + tx_size * AG71XX_DESC_SIZE; rx 1155 drivers/net/ethernet/atheros/ag71xx.c rx->descs_dma = tx->descs_dma + tx_size * AG71XX_DESC_SIZE; rx 1164 drivers/net/ethernet/atheros/ag71xx.c struct ag71xx_ring *rx = &ag->rx_ring; rx 1167 drivers/net/ethernet/atheros/ag71xx.c ring_size = BIT(tx->order) + BIT(rx->order); rx 1176 drivers/net/ethernet/atheros/ag71xx.c rx->descs_cpu = NULL; rx 1178 drivers/net/ethernet/atheros/ag71xx.c rx->buf = NULL; rx 1474 drivers/net/ethernet/atheros/ag71xx.c dma_unmap_single(&ag->pdev->dev, ring->buf[i].rx.dma_addr, rx 1480 drivers/net/ethernet/atheros/ag71xx.c skb = build_skb(ring->buf[i].rx.rx_buf, ag71xx_buffer_size(ag)); rx 1482 drivers/net/ethernet/atheros/ag71xx.c skb_free_frag(ring->buf[i].rx.rx_buf); rx 1499 drivers/net/ethernet/atheros/ag71xx.c ring->buf[i].rx.rx_buf = NULL; rx 1531 drivers/net/ethernet/atheros/ag71xx.c if (!rx_ring->buf[rx_ring->dirty % rx_ring_size].rx.rx_buf) rx 1130 drivers/net/ethernet/aurora/nb8800.c u32 rx, tx; rx 1133 drivers/net/ethernet/aurora/nb8800.c rx = nb8800_read_stat(dev, i); rx 1135 drivers/net/ethernet/aurora/nb8800.c st[i] = rx; rx 205 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_64_octets", mib.rx.pkt_cnt.cnt_64), rx 206 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_65_127_oct", mib.rx.pkt_cnt.cnt_127), rx 207 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_128_255_oct", mib.rx.pkt_cnt.cnt_255), rx 208 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_256_511_oct", mib.rx.pkt_cnt.cnt_511), rx 209 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_512_1023_oct", mib.rx.pkt_cnt.cnt_1023), rx 210 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_1024_1518_oct", mib.rx.pkt_cnt.cnt_1518), rx 211 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_vlan_1519_1522_oct", mib.rx.pkt_cnt.cnt_mgv), rx 212 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_1522_2047_oct", mib.rx.pkt_cnt.cnt_2047), rx 213 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_2048_4095_oct", mib.rx.pkt_cnt.cnt_4095), rx 214 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_4096_9216_oct", mib.rx.pkt_cnt.cnt_9216), rx 215 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_pkts", mib.rx.pkt), rx 216 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_bytes", mib.rx.bytes), rx 217 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_multicast", mib.rx.mca), rx 218 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_broadcast", mib.rx.bca), rx 219 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_fcs", mib.rx.fcs), rx 220 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_control", mib.rx.cf), rx 221 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_pause", mib.rx.pf), rx 222 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_unknown", mib.rx.uo), rx 223 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_align", mib.rx.aln), rx 224 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_outrange", mib.rx.flr), rx 225 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_code", mib.rx.cde), rx 226 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_carrier", mib.rx.fcr), rx 227 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_oversize", mib.rx.ovr), rx 228 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_jabber", mib.rx.jbr), rx 229 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_mtu_err", mib.rx.mtue), rx 230 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_good_pkts", mib.rx.pok), rx 231 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_unicast", mib.rx.uc), rx 232 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_ppp", mib.rx.ppp), rx 233 drivers/net/ethernet/broadcom/bcmsysport.c STAT_MIB_RX("rx_crc", mib.rx.rcrc), rx 589 drivers/net/ethernet/broadcom/bcmsysport.h struct bcm_sysport_rx_counters rx; rx 338 drivers/net/ethernet/broadcom/bgmac.c struct bgmac_rx_header *rx; rx 347 drivers/net/ethernet/broadcom/bgmac.c rx = buf + BGMAC_RX_BUF_OFFSET; rx 348 drivers/net/ethernet/broadcom/bgmac.c rx->len = cpu_to_le16(0xdead); rx 349 drivers/net/ethernet/broadcom/bgmac.c rx->flags = cpu_to_le16(0xbeef); rx 402 drivers/net/ethernet/broadcom/bgmac.c struct bgmac_rx_header *rx = slot->buf + BGMAC_RX_BUF_OFFSET; rx 406 drivers/net/ethernet/broadcom/bgmac.c rx->len = cpu_to_le16(0xdead); rx 407 drivers/net/ethernet/broadcom/bgmac.c rx->flags = cpu_to_le16(0xbeef); rx 427 drivers/net/ethernet/broadcom/bgmac.c struct bgmac_rx_header *rx = slot->buf + BGMAC_RX_BUF_OFFSET; rx 445 drivers/net/ethernet/broadcom/bgmac.c len = le16_to_cpu(rx->len); rx 446 drivers/net/ethernet/broadcom/bgmac.c flags = le16_to_cpu(rx->flags); rx 7338 drivers/net/ethernet/broadcom/bnx2.c bnx2_change_ring_size(struct bnx2 *bp, u32 rx, u32 tx, bool reset_irq) rx 7356 drivers/net/ethernet/broadcom/bnx2.c bnx2_set_rx_ring_size(bp, rx); rx 1980 drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c int rc, tx, rx; rx 1983 drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c rx = BNX2X_NUM_ETH_QUEUES(bp); rx 1987 drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c rx++; rx 1996 drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c rc = netif_set_real_num_rx_queues(bp->dev, rx); rx 2003 drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c tx, rx); rx 3740 drivers/net/ethernet/broadcom/bnx2x/bnx2x_hsi.h struct client_init_rx_data rx; rx 12159 drivers/net/ethernet/broadcom/bnx2x/bnx2x_link.c u32 rx = 0, tx = 0, i; rx 12166 drivers/net/ethernet/broadcom/bnx2x/bnx2x_link.c rx = REG_RD(bp, shmem_base + rx 12174 drivers/net/ethernet/broadcom/bnx2x/bnx2x_link.c rx = REG_RD(bp, shmem_base + rx 12183 drivers/net/ethernet/broadcom/bnx2x/bnx2x_link.c phy->rx_preemphasis[i << 1] = ((rx>>16) & 0xffff); rx 12184 drivers/net/ethernet/broadcom/bnx2x/bnx2x_link.c phy->rx_preemphasis[(i << 1) + 1] = (rx & 0xffff); rx 8726 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c __set_bit(BNX2X_Q_FLG_HC, &init_params->rx.flags); rx 8732 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c __set_bit(BNX2X_Q_FLG_HC_EN, &init_params->rx.flags); rx 8736 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c init_params->rx.hc_rate = bp->rx_ticks ? rx 8742 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c init_params->rx.fw_sb_id = init_params->tx.fw_sb_id = rx 8749 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c init_params->rx.sb_cq_index = HC_INDEX_ETH_RX_CQ_CONS; rx 4785 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c data->rx.tpa_en |= test_bit(BNX2X_Q_FLG_TPA_IPV6, ¶ms->flags) * rx 4961 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c &data->rx, rx 4966 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c &data->rx); rx 5022 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c test_bit(BNX2X_Q_FLG_HC, &init->rx.flags)) { rx 5023 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c hc_usec = init->rx.hc_rate ? 1000000 / init->rx.hc_rate : 0; rx 5025 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c bnx2x_update_coalesce_sb_index(bp, init->rx.fw_sb_id, rx 5026 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c init->rx.sb_cq_index, rx 5027 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c !test_bit(BNX2X_Q_FLG_HC_EN, &init->rx.flags), rx 893 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.h } rx; rx 154 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c init_params->rx.sb_cq_index, rx 155 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c init_params->rx.hc_rate, rx 178 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c if (test_bit(BNX2X_Q_FLG_HC, &init_p->rx.flags)) rx 179 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c __set_bit(BNX2X_Q_FLG_HC_EN, &init_p->rx.flags); rx 185 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c init_p->rx.fw_sb_id = vf_igu_sb(vf, q->sb_idx); rx 1598 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c init_p->rx.hc_rate = setup_q->rxq.hc_rate; rx 1599 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c init_p->rx.sb_cq_index = setup_q->rxq.sb_index; rx 1601 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c &init_p->rx.flags); rx 5650 drivers/net/ethernet/broadcom/bnxt/bnxt.c static int bnxt_trim_rings(struct bnxt *bp, int *rx, int *tx, int max, rx 5684 drivers/net/ethernet/broadcom/bnxt/bnxt.c int rx = hw_resc->resv_rx_rings; rx 5688 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx >>= 1; rx 5689 drivers/net/ethernet/broadcom/bnxt/bnxt.c if (cp < (rx + tx)) { rx 5690 drivers/net/ethernet/broadcom/bnxt/bnxt.c bnxt_trim_rings(bp, &rx, &tx, cp, false); rx 5692 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx <<= 1; rx 5693 drivers/net/ethernet/broadcom/bnxt/bnxt.c hw_resc->resv_rx_rings = rx; rx 5697 drivers/net/ethernet/broadcom/bnxt/bnxt.c hw_resc->resv_hw_ring_grps = rx; rx 5864 drivers/net/ethernet/broadcom/bnxt/bnxt.c static int bnxt_hwrm_reserve_rings(struct bnxt *bp, int tx, int rx, int grp, rx 5868 drivers/net/ethernet/broadcom/bnxt/bnxt.c return bnxt_hwrm_reserve_pf_rings(bp, tx, rx, grp, cp, stat, rx 5871 drivers/net/ethernet/broadcom/bnxt/bnxt.c return bnxt_hwrm_reserve_vf_rings(bp, tx, rx, grp, cp, stat, rx 5920 drivers/net/ethernet/broadcom/bnxt/bnxt.c int rx = bp->rx_nr_rings, stat; rx 5921 drivers/net/ethernet/broadcom/bnxt/bnxt.c int vnic = 1, grp = rx; rx 5930 drivers/net/ethernet/broadcom/bnxt/bnxt.c vnic = rx + 1; rx 5932 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx <<= 1; rx 5935 drivers/net/ethernet/broadcom/bnxt/bnxt.c (hw_resc->resv_rx_rings != rx || hw_resc->resv_cp_rings != cp || rx 5951 drivers/net/ethernet/broadcom/bnxt/bnxt.c int rx = bp->rx_nr_rings; rx 5962 drivers/net/ethernet/broadcom/bnxt/bnxt.c vnic = rx + 1; rx 5964 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx <<= 1; rx 5968 drivers/net/ethernet/broadcom/bnxt/bnxt.c rc = bnxt_hwrm_reserve_rings(bp, tx, rx, grp, cp, stat, vnic); rx 5974 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx = hw_resc->resv_rx_rings; rx 5981 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx_rings = rx; rx 5983 drivers/net/ethernet/broadcom/bnxt/bnxt.c if (rx >= 2) { rx 5984 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx_rings = rx >> 1; rx 6003 drivers/net/ethernet/broadcom/bnxt/bnxt.c rx = rx_rings << 1; rx 6009 drivers/net/ethernet/broadcom/bnxt/bnxt.c if (!tx || !rx || !cp || !grp || !vnic || !stat) rx 7854 drivers/net/ethernet/broadcom/bnxt/bnxt.c static int bnxt_trim_rings(struct bnxt *bp, int *rx, int *tx, int max, rx 7857 drivers/net/ethernet/broadcom/bnxt/bnxt.c int _rx = *rx, _tx = *tx; rx 7860 drivers/net/ethernet/broadcom/bnxt/bnxt.c *rx = min_t(int, _rx, max); rx 7872 drivers/net/ethernet/broadcom/bnxt/bnxt.c *rx = _rx; rx 9497 drivers/net/ethernet/broadcom/bnxt/bnxt.c struct rx_port_stats *rx = bp->hw_rx_port_stats; rx 9500 drivers/net/ethernet/broadcom/bnxt/bnxt.c stats->rx_crc_errors = le64_to_cpu(rx->rx_fcs_err_frames); rx 9501 drivers/net/ethernet/broadcom/bnxt/bnxt.c stats->rx_frame_errors = le64_to_cpu(rx->rx_align_err_frames); rx 9502 drivers/net/ethernet/broadcom/bnxt/bnxt.c stats->rx_length_errors = le64_to_cpu(rx->rx_undrsz_frames) + rx 9503 drivers/net/ethernet/broadcom/bnxt/bnxt.c le64_to_cpu(rx->rx_ovrsz_frames) + rx 9504 drivers/net/ethernet/broadcom/bnxt/bnxt.c le64_to_cpu(rx->rx_runt_frames); rx 9505 drivers/net/ethernet/broadcom/bnxt/bnxt.c stats->rx_errors = le64_to_cpu(rx->rx_false_carrier_frames) + rx 9506 drivers/net/ethernet/broadcom/bnxt/bnxt.c le64_to_cpu(rx->rx_jbr_frames); rx 10342 drivers/net/ethernet/broadcom/bnxt/bnxt.c int bnxt_check_rings(struct bnxt *bp, int tx, int rx, bool sh, int tcs, rx 10347 drivers/net/ethernet/broadcom/bnxt/bnxt.c int rx_rings = rx; rx 10357 drivers/net/ethernet/broadcom/bnxt/bnxt.c if (max_rx < rx) rx 10370 drivers/net/ethernet/broadcom/bnxt/bnxt.c cp = sh ? max_t(int, tx_rings_needed, rx) : tx_rings_needed + rx; rx 10376 drivers/net/ethernet/broadcom/bnxt/bnxt.c return bnxt_hwrm_check_rings(bp, tx_rings_needed, rx_rings, rx, cp, rx 11494 drivers/net/ethernet/broadcom/bnxt/bnxt.c int rx, tx, cp; rx 11496 drivers/net/ethernet/broadcom/bnxt/bnxt.c _bnxt_get_max_rings(bp, &rx, &tx, &cp); rx 11497 drivers/net/ethernet/broadcom/bnxt/bnxt.c *max_rx = rx; rx 11499 drivers/net/ethernet/broadcom/bnxt/bnxt.c if (!rx || !tx || !cp) rx 1997 drivers/net/ethernet/broadcom/bnxt/bnxt.h int bnxt_check_rings(struct bnxt *bp, int tx, int rx, bool sh, int tcs, rx 302 drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c int rx, tx; rx 305 drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c bnxt_get_max_rings(bp, &rx, &tx, true); rx 306 drivers/net/ethernet/broadcom/bnxt/bnxt_xdp.c if (rx > 1) { rx 5018 drivers/net/ethernet/broadcom/cnic.c data->rx.bd_page_base.hi = cpu_to_le32(val); rx 5022 drivers/net/ethernet/broadcom/cnic.c data->rx.bd_page_base.lo = cpu_to_le32(val); rx 5027 drivers/net/ethernet/broadcom/cnic.c data->rx.cqe_page_base.hi = cpu_to_le32(val); rx 5031 drivers/net/ethernet/broadcom/cnic.c data->rx.cqe_page_base.lo = cpu_to_le32(val); rx 5034 drivers/net/ethernet/broadcom/cnic.c data->rx.client_qzone_id = cl_qzone_id; rx 5035 drivers/net/ethernet/broadcom/cnic.c data->rx.rx_sb_index_number = HC_SP_INDEX_ETH_ISCSI_RX_CQ_CONS; rx 5036 drivers/net/ethernet/broadcom/cnic.c data->rx.status_block_id = BNX2X_DEF_SB_ID; rx 5038 drivers/net/ethernet/broadcom/cnic.c data->rx.cache_line_alignment_log_size = L1_CACHE_SHIFT; rx 5040 drivers/net/ethernet/broadcom/cnic.c data->rx.max_bytes_on_bd = cpu_to_le16(cp->l2_single_buf_size); rx 5041 drivers/net/ethernet/broadcom/cnic.c data->rx.outer_vlan_removal_enable_flg = 1; rx 5042 drivers/net/ethernet/broadcom/cnic.c data->rx.silent_vlan_removal_flg = 1; rx 5043 drivers/net/ethernet/broadcom/cnic.c data->rx.silent_vlan_value = 0; rx 5044 drivers/net/ethernet/broadcom/cnic.c data->rx.silent_vlan_mask = 0xffff; rx 787 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_64_octets", mib.rx.pkt_cnt.cnt_64), rx 788 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_65_127_oct", mib.rx.pkt_cnt.cnt_127), rx 789 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_128_255_oct", mib.rx.pkt_cnt.cnt_255), rx 790 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_256_511_oct", mib.rx.pkt_cnt.cnt_511), rx 791 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_512_1023_oct", mib.rx.pkt_cnt.cnt_1023), rx 792 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_1024_1518_oct", mib.rx.pkt_cnt.cnt_1518), rx 793 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_vlan_1519_1522_oct", mib.rx.pkt_cnt.cnt_mgv), rx 794 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_1522_2047_oct", mib.rx.pkt_cnt.cnt_2047), rx 795 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_2048_4095_oct", mib.rx.pkt_cnt.cnt_4095), rx 796 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_4096_9216_oct", mib.rx.pkt_cnt.cnt_9216), rx 797 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_pkts", mib.rx.pkt), rx 798 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_bytes", mib.rx.bytes), rx 799 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_multicast", mib.rx.mca), rx 800 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_broadcast", mib.rx.bca), rx 801 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_fcs", mib.rx.fcs), rx 802 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_control", mib.rx.cf), rx 803 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_pause", mib.rx.pf), rx 804 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_unknown", mib.rx.uo), rx 805 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_align", mib.rx.aln), rx 806 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_outrange", mib.rx.flr), rx 807 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_code", mib.rx.cde), rx 808 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_carrier", mib.rx.fcr), rx 809 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_oversize", mib.rx.ovr), rx 810 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_jabber", mib.rx.jbr), rx 811 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_mtu_err", mib.rx.mtue), rx 812 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_good_pkts", mib.rx.pok), rx 813 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_unicast", mib.rx.uc), rx 814 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_ppp", mib.rx.ppp), rx 815 drivers/net/ethernet/broadcom/genet/bcmgenet.c STAT_GENET_MIB_RX("rx_crc", mib.rx.rcrc), rx 135 drivers/net/ethernet/broadcom/genet/bcmgenet.h struct bcmgenet_rx_counters rx; rx 110 drivers/net/ethernet/brocade/bna/bna.h cbfn(cbarg, rxf->rx); \ rx 322 drivers/net/ethernet/brocade/bna/bna.h void bna_bfi_rx_enet_start_rsp(struct bna_rx *rx, rx 324 drivers/net/ethernet/brocade/bna/bna.h void bna_bfi_rx_enet_stop_rsp(struct bna_rx *rx, rx 349 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_destroy(struct bna_rx *rx); rx 350 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_enable(struct bna_rx *rx); rx 351 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_disable(struct bna_rx *rx, enum bna_cleanup_type type, rx 353 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_cleanup_complete(struct bna_rx *rx); rx 354 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_coalescing_timeo_set(struct bna_rx *rx, int coalescing_timeo); rx 357 drivers/net/ethernet/brocade/bna/bna.h enum bna_cb_status bna_rx_ucast_set(struct bna_rx *rx, const u8 *ucmac); rx 358 drivers/net/ethernet/brocade/bna/bna.h enum bna_cb_status bna_rx_ucast_listset(struct bna_rx *rx, int count, rx 360 drivers/net/ethernet/brocade/bna/bna.h enum bna_cb_status bna_rx_mcast_add(struct bna_rx *rx, const u8 *mcmac, rx 363 drivers/net/ethernet/brocade/bna/bna.h enum bna_cb_status bna_rx_mcast_listset(struct bna_rx *rx, int count, rx 366 drivers/net/ethernet/brocade/bna/bna.h bna_rx_mcast_delall(struct bna_rx *rx); rx 368 drivers/net/ethernet/brocade/bna/bna.h bna_rx_mode_set(struct bna_rx *rx, enum bna_rxmode rxmode, rx 370 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_vlan_add(struct bna_rx *rx, int vlan_id); rx 371 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_vlan_del(struct bna_rx *rx, int vlan_id); rx 372 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_vlanfilter_enable(struct bna_rx *rx); rx 373 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_vlan_strip_enable(struct bna_rx *rx); rx 374 drivers/net/ethernet/brocade/bna/bna.h void bna_rx_vlan_strip_disable(struct bna_rx *rx); rx 277 drivers/net/ethernet/brocade/bna/bna_enet.c struct bna_rx *rx; rx 281 drivers/net/ethernet/brocade/bna/bna_enet.c bna_rx_from_rid(bna, msghdr->enet_id, rx); rx 282 drivers/net/ethernet/brocade/bna/bna_enet.c if (rx) rx 283 drivers/net/ethernet/brocade/bna/bna_enet.c bna_bfi_rx_enet_start_rsp(rx, msghdr); rx 287 drivers/net/ethernet/brocade/bna/bna_enet.c bna_rx_from_rid(bna, msghdr->enet_id, rx); rx 288 drivers/net/ethernet/brocade/bna/bna_enet.c if (rx) rx 289 drivers/net/ethernet/brocade/bna/bna_enet.c bna_bfi_rx_enet_stop_rsp(rx, msghdr); rx 304 drivers/net/ethernet/brocade/bna/bna_enet.c bna_rx_from_rid(bna, msghdr->enet_id, rx); rx 305 drivers/net/ethernet/brocade/bna/bna_enet.c if (rx) rx 306 drivers/net/ethernet/brocade/bna/bna_enet.c bna_bfi_rxf_cfg_rsp(&rx->rxf, msghdr); rx 310 drivers/net/ethernet/brocade/bna/bna_enet.c bna_rx_from_rid(bna, msghdr->enet_id, rx); rx 311 drivers/net/ethernet/brocade/bna/bna_enet.c if (rx) rx 312 drivers/net/ethernet/brocade/bna/bna_enet.c bna_bfi_rxf_ucast_set_rsp(&rx->rxf, msghdr); rx 316 drivers/net/ethernet/brocade/bna/bna_enet.c bna_rx_from_rid(bna, msghdr->enet_id, rx); rx 317 drivers/net/ethernet/brocade/bna/bna_enet.c if (rx) rx 318 drivers/net/ethernet/brocade/bna/bna_enet.c bna_bfi_rxf_mcast_add_rsp(&rx->rxf, msghdr); rx 184 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfi_msgq_mhdr_set(req->mh, BFI_MC_ENET, req_type, 0, rxf->rx->rid); rx 190 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 200 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 0, rxf->rx->rid); rx 206 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 216 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 0, rxf->rx->rid); rx 222 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 231 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_MAC_MCAST_FILTER_REQ, 0, rxf->rx->rid); rx 237 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 246 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RX_PROMISCUOUS_REQ, 0, rxf->rx->rid); rx 252 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 263 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RX_VLAN_SET_REQ, 0, rxf->rx->rid); rx 277 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 286 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RX_VLAN_STRIP_ENABLE_REQ, 0, rxf->rx->rid); rx 292 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 301 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RIT_CFG_REQ, 0, rxf->rx->rid); rx 308 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 318 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RSS_CFG_REQ, 0, rxf->rx->rid); rx 328 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 337 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RSS_ENABLE_REQ, 0, rxf->rx->rid); rx 343 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rxf->rx->bna->msgq, &rxf->msgq_cmd); rx 384 drivers/net/ethernet/brocade/bna/bna_tx_rx.c mchandle = bna_mcam_mod_handle_get(&rxf->rx->bna->mcam_mod); rx 411 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_mcam_mod_handle_put(&rxf->rx->bna->mcam_mod, mchandle); rx 429 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); rx 477 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_mcam_mod_del_q(rxf->rx->bna)); rx 561 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx = rxf->rx; rx 566 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rxp, &rx->rxp_q, qe) { rx 609 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx, rx 613 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->rx = rx; rx 661 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_ucam_mod_free_q(rxf->rx->bna)); rx 666 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ucam_mod_free_q(rxf->rx->bna)); rx 673 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); rx 678 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rxf->rx->bna->promisc_rid == rxf->rx->rid) rx 679 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->rx->bna->promisc_rid = BFI_INVALID_RID; rx 680 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rxf->rx->bna->default_mode_rid == rxf->rx->rid) rx 681 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->rx->bna->default_mode_rid = BFI_INVALID_RID; rx 686 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->rx = NULL; rx 690 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_cb_rxf_started(struct bna_rx *rx) rx 692 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_RXF_STARTED); rx 699 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->start_cbarg = rxf->rx; rx 704 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_cb_rxf_stopped(struct bna_rx *rx) rx 706 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_RXF_STOPPED); rx 713 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->stop_cbarg = rxf->rx; rx 724 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_ucast_set(struct bna_rx *rx, const u8 *ucmac) rx 726 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 730 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_cam_mod_mac_get(bna_ucam_mod_free_q(rxf->rx->bna)); rx 738 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->cam_fltr_cbarg = rx->bna->bnad; rx 746 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mcast_add(struct bna_rx *rx, const u8 *addr, rx 749 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 756 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cbfn(rx->bna->bnad, rx); rx 760 drivers/net/ethernet/brocade/bna/bna_tx_rx.c mac = bna_cam_mod_mac_get(bna_mcam_mod_free_q(rxf->rx->bna)); rx 767 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->cam_fltr_cbarg = rx->bna->bnad; rx 775 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_ucast_listset(struct bna_rx *rx, int count, const u8 *uclist) rx 777 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_ucam_mod *ucam_mod = &rx->bna->ucam_mod; rx 778 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 833 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mcast_listset(struct bna_rx *rx, int count, const u8 *mclist) rx 835 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_mcam_mod *mcam_mod = &rx->bna->mcam_mod; rx 836 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 893 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mcast_delall(struct bna_rx *rx) rx 895 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 903 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); rx 911 drivers/net/ethernet/brocade/bna/bna_tx_rx.c del_mac = bna_cam_mod_mac_get(bna_mcam_mod_del_q(rxf->rx->bna)); rx 915 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_add_tail(&mac->qe, bna_mcam_mod_free_q(rxf->rx->bna)); rx 924 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_vlan_add(struct bna_rx *rx, int vlan_id) rx 926 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 939 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_vlan_del(struct bna_rx *rx, int vlan_id) rx 941 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 963 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&mac->qe, bna_ucam_mod_del_q(rxf->rx->bna)); rx 1001 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ucam_mod_del_q(rxf->rx->bna)); rx 1006 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ucam_mod_del_q(rxf->rx->bna)); rx 1039 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna *bna = rxf->rx->bna; rx 1067 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna *bna = rxf->rx->bna; rx 1153 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna *bna = rxf->rx->bna; rx 1169 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna->promisc_rid = rxf->rx->rid; rx 1179 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna *bna = rxf->rx->bna; rx 1270 drivers/net/ethernet/brocade/bna/bna_tx_rx.c #define call_rx_stop_cbfn(rx) \ rx 1272 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if ((rx)->stop_cbfn) { \ rx 1275 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cbfn = (rx)->stop_cbfn; \ rx 1276 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cbarg = (rx)->stop_cbarg; \ rx 1277 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (rx)->stop_cbfn = NULL; \ rx 1278 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (rx)->stop_cbarg = NULL; \ rx 1279 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cbfn(cbarg, rx); \ rx 1283 drivers/net/ethernet/brocade/bna/bna_tx_rx.c #define call_rx_stall_cbfn(rx) \ rx 1285 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if ((rx)->rx_stall_cbfn) \ rx 1286 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (rx)->rx_stall_cbfn((rx)->bna->bnad, (rx)); \ rx 1301 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_bfi_rx_enet_start(struct bna_rx *rx); rx 1302 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_enet_stop(struct bna_rx *rx); rx 1303 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_mod_cb_rx_stopped(void *arg, struct bna_rx *rx); rx 1326 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_stopped_entry(struct bna_rx *rx) rx 1328 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stop_cbfn(rx); rx 1331 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_stopped(struct bna_rx *rx, rx 1336 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_start_wait); rx 1340 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stop_cbfn(rx); rx 1353 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_start_wait_entry(struct bna_rx *rx) rx 1355 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_start(rx); rx 1359 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_stop_wait_entry(struct bna_rx *rx) rx 1364 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_stop_wait(struct bna_rx *rx, enum bna_rx_event event) rx 1369 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); rx 1370 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_cleanup_cbfn(rx->bna->bnad, rx); rx 1374 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_enet_stop(rx); rx 1383 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_start_wait(struct bna_rx *rx, rx 1388 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_start_stop_wait); rx 1392 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stopped); rx 1396 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_rxf_start_wait); rx 1405 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_rxf_start_wait_entry(struct bna_rx *rx) rx 1407 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_post_cbfn(rx->bna->bnad, rx); rx 1408 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_start(&rx->rxf); rx 1412 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_rxf_stop_wait_entry(struct bna_rx *rx) rx 1417 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_rxf_stop_wait(struct bna_rx *rx, enum bna_rx_event event) rx 1421 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); rx 1422 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_fail(&rx->rxf); rx 1423 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stall_cbfn(rx); rx 1424 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_cleanup_cbfn(rx->bna->bnad, rx); rx 1428 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_stop(&rx->rxf); rx 1432 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stop_wait); rx 1433 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stall_cbfn(rx); rx 1434 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_enet_stop(rx); rx 1445 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_start_stop_wait_entry(struct bna_rx *rx) rx 1450 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_start_stop_wait(struct bna_rx *rx, enum bna_rx_event event) rx 1455 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stopped); rx 1459 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_enet_stop(rx); rx 1468 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_started_entry(struct bna_rx *rx) rx 1471 drivers/net/ethernet/brocade/bna/bna_tx_rx.c int is_regular = (rx->type == BNA_RX_T_REGULAR); rx 1474 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rxp, &rx->rxp_q, qe) rx 1475 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ib_start(rx->bna, &rxp->cq.ib, is_regular); rx 1477 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ethport_cb_rx_started(&rx->bna->ethport); rx 1481 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_started(struct bna_rx *rx, enum bna_rx_event event) rx 1485 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_rxf_stop_wait); rx 1486 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ethport_cb_rx_stopped(&rx->bna->ethport); rx 1487 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_stop(&rx->rxf); rx 1491 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_failed); rx 1492 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ethport_cb_rx_stopped(&rx->bna->ethport); rx 1493 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_fail(&rx->rxf); rx 1494 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stall_cbfn(rx); rx 1495 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_cleanup_cbfn(rx->bna->bnad, rx); rx 1504 drivers/net/ethernet/brocade/bna/bna_tx_rx.c static void bna_rx_sm_rxf_start_wait(struct bna_rx *rx, rx 1509 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_rxf_stop_wait); rx 1513 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_failed); rx 1514 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_fail(&rx->rxf); rx 1515 drivers/net/ethernet/brocade/bna/bna_tx_rx.c call_rx_stall_cbfn(rx); rx 1516 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_cleanup_cbfn(rx->bna->bnad, rx); rx 1520 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_started); rx 1530 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_cleanup_wait_entry(struct bna_rx *rx) rx 1535 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_cleanup_wait(struct bna_rx *rx, enum bna_rx_event event) rx 1544 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stopped); rx 1554 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_failed_entry(struct bna_rx *rx) rx 1559 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_failed(struct bna_rx *rx, enum bna_rx_event event) rx 1563 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_quiesce_wait); rx 1567 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); rx 1577 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stopped); rx 1586 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_quiesce_wait_entry(struct bna_rx *rx) rx 1591 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_sm_quiesce_wait(struct bna_rx *rx, enum bna_rx_event event) rx 1595 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_cleanup_wait); rx 1599 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_failed); rx 1603 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_start_wait); rx 1613 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_start(struct bna_rx *rx) rx 1615 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bfi_enet_rx_cfg_req *cfg_req = &rx->bfi_enet_cmd.cfg_req; rx 1621 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RX_CFG_SET_REQ, 0, rx->rid); rx 1625 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->rx_cfg.frame_size = bna_enet_mtu_get(&rx->bna->enet); rx 1626 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->num_queue_sets = rx->num_paths; rx 1627 drivers/net/ethernet/brocade/bna/bna_tx_rx.c for (i = 0; i < rx->num_paths; i++) { rx 1629 drivers/net/ethernet/brocade/bna/bna_tx_rx.c : list_first_entry(&rx->rxp_q, struct bna_rxp, qe); rx 1655 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_enet_mtu_get(&rx->bna->enet); rx 1695 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->rx_cfg.hds.type = rx->hds_cfg.hdr_type; rx 1696 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->rx_cfg.hds.force_offset = rx->hds_cfg.forced_offset; rx 1697 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->rx_cfg.hds.max_header_size = rx->hds_cfg.forced_offset; rx 1707 drivers/net/ethernet/brocade/bna/bna_tx_rx.c cfg_req->rx_cfg.strip_vlan = rx->rxf.vlan_strip_status; rx 1709 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_set(&rx->msgq_cmd, NULL, NULL, rx 1711 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); rx 1715 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_stop(struct bna_rx *rx) rx 1717 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bfi_enet_req *req = &rx->bfi_enet_cmd.req; rx 1720 drivers/net/ethernet/brocade/bna/bna_tx_rx.c BFI_ENET_H2I_RX_CFG_CLR_REQ, 0, rx->rid); rx 1723 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_set(&rx->msgq_cmd, NULL, NULL, sizeof(struct bfi_enet_req), rx 1725 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_cmd_post(&rx->bna->msgq, &rx->msgq_cmd); rx 1729 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_enet_stop(struct bna_rx *rx) rx 1734 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rxp, &rx->rxp_q, qe) rx 1735 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_ib_stop(rx->bna, &rxp->cq.ib); rx 1737 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_stop(rx); rx 1802 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx = NULL; rx 1806 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx = list_first_entry(&rx_mod->rx_free_q, struct bna_rx, qe); rx 1808 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx = list_last_entry(&rx_mod->rx_free_q, struct bna_rx, qe); rx 1811 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_move_tail(&rx->qe, &rx_mod->rx_active_q); rx 1812 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->type = type; rx 1814 drivers/net/ethernet/brocade/bna/bna_tx_rx.c return rx; rx 1818 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_put(struct bna_rx_mod *rx_mod, struct bna_rx *rx) rx 1823 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (((struct bna_rx *)qe)->rid < rx->rid) rx 1826 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_add(&rx->qe, qe); rx 1930 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mod_cb_rx_stopped(void *arg, struct bna_rx *rx) rx 1948 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_start(struct bna_rx *rx) rx 1950 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags |= BNA_RX_F_ENET_STARTED; rx 1951 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rx_flags & BNA_RX_F_ENABLED) rx 1952 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_START); rx 1956 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_stop(struct bna_rx *rx) rx 1958 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags &= ~BNA_RX_F_ENET_STARTED; rx 1959 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->fsm == (bfa_fsm_t) bna_rx_sm_stopped) rx 1960 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mod_cb_rx_stopped(&rx->bna->rx_mod, rx); rx 1962 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbfn = bna_rx_mod_cb_rx_stopped; rx 1963 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbarg = &rx->bna->rx_mod; rx 1964 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_STOP); rx 1969 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_fail(struct bna_rx *rx) rx 1972 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags &= ~BNA_RX_F_ENET_STARTED; rx 1973 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_FAIL); rx 1979 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx; rx 1985 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rx, &rx_mod->rx_active_q, qe) rx 1986 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->type == type) rx 1987 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_start(rx); rx 1993 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx; rx 2002 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rx, &rx_mod->rx_active_q, qe) rx 2003 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->type == type) { rx 2005 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_stop(rx); rx 2014 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx; rx 2019 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rx, &rx_mod->rx_active_q, qe) rx 2020 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_fail(rx); rx 2034 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx_mod->rx = (struct bna_rx *) rx 2052 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx_ptr = &rx_mod->rx[index]; rx 2086 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_start_rsp(struct bna_rx *rx, struct bfi_msgq_mhdr *msghdr) rx 2088 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bfi_enet_rx_cfg_rsp *cfg_rsp = &rx->bfi_enet_cmd.cfg_rsp; rx 2093 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_msgq_rsp_copy(&rx->bna->msgq, (u8 *)cfg_rsp, rx 2096 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->hw_id = cfg_rsp->hw_id; rx 2098 drivers/net/ethernet/brocade/bna/bna_tx_rx.c for (i = 0, rxp = list_first_entry(&rx->rxp_q, struct bna_rxp, qe); rx 2099 drivers/net/ethernet/brocade/bna/bna_tx_rx.c i < rx->num_paths; i++, rxp = list_next_entry(rxp, qe)) { rx 2104 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->bna->pcidev.pci_bar_kva rx 2108 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->bna->pcidev.pci_bar_kva rx 2113 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->bna->pcidev.pci_bar_kva rx 2126 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_STARTED); rx 2130 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_bfi_rx_enet_stop_rsp(struct bna_rx *rx, struct bfi_msgq_mhdr *msghdr) rx 2132 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_STOPPED); rx 2257 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx *rx; rx 2307 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx = bna_rx_get(rx_mod, rx_cfg->rx_type); rx 2308 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->bna = bna; rx 2309 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags = 0; rx 2310 drivers/net/ethernet/brocade/bna/bna_tx_rx.c INIT_LIST_HEAD(&rx->rxp_q); rx 2311 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbfn = NULL; rx 2312 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbarg = NULL; rx 2313 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->priv = priv; rx 2315 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_setup_cbfn = rx_cbfn->rcb_setup_cbfn; rx 2316 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_destroy_cbfn = rx_cbfn->rcb_destroy_cbfn; rx 2317 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->ccb_setup_cbfn = rx_cbfn->ccb_setup_cbfn; rx 2318 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->ccb_destroy_cbfn = rx_cbfn->ccb_destroy_cbfn; rx 2319 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_stall_cbfn = rx_cbfn->rx_stall_cbfn; rx 2321 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_cleanup_cbfn = rx_cbfn->rx_cleanup_cbfn; rx 2322 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_post_cbfn = rx_cbfn->rx_post_cbfn; rx 2324 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_STARTED) { rx 2325 drivers/net/ethernet/brocade/bna/bna_tx_rx.c switch (rx->type) { rx 2327 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (!(rx->bna->rx_mod.flags & rx 2329 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags |= BNA_RX_F_ENET_STARTED; rx 2332 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->bna->rx_mod.flags & BNA_RX_MOD_F_ENET_LOOPBACK) rx 2333 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags |= BNA_RX_F_ENET_STARTED; rx 2338 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->num_paths = rx_cfg->num_paths; rx 2340 drivers/net/ethernet/brocade/bna/bna_tx_rx.c i < rx->num_paths; i++) { rx 2342 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_add_tail(&rxp->qe, &rx->rxp_q); rx 2344 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxp->rx = rx; rx 2345 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxp->cq.rx = rx; rx 2379 drivers/net/ethernet/brocade/bna/bna_tx_rx.c q0->rx = rx; rx 2400 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rcb_setup_cbfn) rx 2401 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_setup_cbfn(bnad, q0->rcb); rx 2406 drivers/net/ethernet/brocade/bna/bna_tx_rx.c q1->rx = rx; rx 2430 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rcb_setup_cbfn) rx 2431 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_setup_cbfn(bnad, q1->rcb); rx 2467 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->ccb_setup_cbfn) rx 2468 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->ccb_setup_cbfn(bnad, rxp->cq.ccb); rx 2471 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->hds_cfg = rx_cfg->hds_config; rx 2473 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_init(&rx->rxf, rx, rx_cfg, res_info); rx 2475 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_set_state(rx, bna_rx_sm_stopped); rx 2477 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx_mod->rid_mask |= BIT(rx->rid); rx 2479 drivers/net/ethernet/brocade/bna/bna_tx_rx.c return rx; rx 2483 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_destroy(struct bna_rx *rx) rx 2485 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rx_mod *rx_mod = &rx->bna->rx_mod; rx 2491 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rxf_uninit(&rx->rxf); rx 2493 drivers/net/ethernet/brocade/bna/bna_tx_rx.c while (!list_empty(&rx->rxp_q)) { rx 2494 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxp = list_first_entry(&rx->rxp_q, struct bna_rxp, qe); rx 2497 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rcb_destroy_cbfn) rx 2498 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_destroy_cbfn(rx->bna->bnad, q0->rcb); rx 2501 drivers/net/ethernet/brocade/bna/bna_tx_rx.c q0->rx = NULL; rx 2505 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rcb_destroy_cbfn) rx 2506 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rcb_destroy_cbfn(rx->bna->bnad, q1->rcb); rx 2509 drivers/net/ethernet/brocade/bna/bna_tx_rx.c q1->rx = NULL; rx 2515 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->ccb_destroy_cbfn) rx 2516 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->ccb_destroy_cbfn(rx->bna->bnad, rxp->cq.ccb); rx 2518 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxp->rx = NULL; rx 2523 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (qe == &rx->qe) { rx 2524 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_del(&rx->qe); rx 2528 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx_mod->rid_mask &= ~BIT(rx->rid); rx 2530 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->bna = NULL; rx 2531 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->priv = NULL; rx 2532 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_put(rx_mod, rx); rx 2536 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_enable(struct bna_rx *rx) rx 2538 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->fsm != (bfa_sm_t)bna_rx_sm_stopped) rx 2541 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags |= BNA_RX_F_ENABLED; rx 2542 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->rx_flags & BNA_RX_F_ENET_STARTED) rx 2543 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_START); rx 2547 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_disable(struct bna_rx *rx, enum bna_cleanup_type type, rx 2552 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (*cbfn)(rx->bna->bnad, rx); rx 2554 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbfn = cbfn; rx 2555 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->stop_cbarg = rx->bna->bnad; rx 2557 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rx->rx_flags &= ~BNA_RX_F_ENABLED; rx 2559 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_STOP); rx 2564 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_cleanup_complete(struct bna_rx *rx) rx 2566 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bfa_fsm_send_event(rx, RX_E_CLEANUP_DONE); rx 2570 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_vlan_strip_enable(struct bna_rx *rx) rx 2572 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 2582 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_vlan_strip_disable(struct bna_rx *rx) rx 2584 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 2594 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_mode_set(struct bna_rx *rx, enum bna_rxmode new_mode, rx 2597 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 2604 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if ((rx->bna->promisc_rid != BFI_INVALID_RID) && rx 2605 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (rx->bna->promisc_rid != rxf->rx->rid)) rx 2609 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->bna->default_mode_rid != BFI_INVALID_RID) rx 2619 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if ((rx->bna->default_mode_rid != BFI_INVALID_RID) && rx 2620 drivers/net/ethernet/brocade/bna/bna_tx_rx.c (rx->bna->default_mode_rid != rxf->rx->rid)) { rx 2625 drivers/net/ethernet/brocade/bna/bna_tx_rx.c if (rx->bna->promisc_rid != BFI_INVALID_RID) rx 2651 drivers/net/ethernet/brocade/bna/bna_tx_rx.c rxf->cam_fltr_cbarg = rx->bna->bnad; rx 2662 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_vlanfilter_enable(struct bna_rx *rx) rx 2664 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna_rxf *rxf = &rx->rxf; rx 2674 drivers/net/ethernet/brocade/bna/bna_tx_rx.c bna_rx_coalescing_timeo_set(struct bna_rx *rx, int coalescing_timeo) rx 2678 drivers/net/ethernet/brocade/bna/bna_tx_rx.c list_for_each_entry(rxp, &rx->rxp_q, qe) { rx 2697 drivers/net/ethernet/brocade/bna/bna_tx_rx.c struct bna *bna = ccb->cq->rx->bna; rx 574 drivers/net/ethernet/brocade/bna/bna_types.h struct bna_rx *rx; rx 640 drivers/net/ethernet/brocade/bna/bna_types.h struct bna_rx *rx; rx 693 drivers/net/ethernet/brocade/bna/bna_types.h struct bna_rx *rx; rx 716 drivers/net/ethernet/brocade/bna/bna_types.h void (*start_cbfn) (struct bna_rx *rx); rx 720 drivers/net/ethernet/brocade/bna/bna_types.h void (*stop_cbfn) (struct bna_rx *rx); rx 730 drivers/net/ethernet/brocade/bna/bna_types.h void (*cam_fltr_cbfn)(struct bnad *bnad, struct bna_rx *rx); rx 768 drivers/net/ethernet/brocade/bna/bna_types.h struct bna_rx *rx; rx 808 drivers/net/ethernet/brocade/bna/bna_types.h void (*stop_cbfn)(void *arg, struct bna_rx *rx); rx 830 drivers/net/ethernet/brocade/bna/bna_types.h struct bna_rx *rx; /* BFI_MAX_RXQ entries */ rx 835 drivers/net/ethernet/brocade/bna/bnad.c if (!rx_info->rx) rx 1022 drivers/net/ethernet/brocade/bna/bnad.c (struct bnad_rx_info *)ccb->cq->rx->priv; rx 1032 drivers/net/ethernet/brocade/bna/bnad.c (struct bnad_rx_info *)ccb->cq->rx->priv; rx 1150 drivers/net/ethernet/brocade/bna/bnad.c bnad_cb_rx_stall(struct bnad *bnad, struct bna_rx *rx) rx 1152 drivers/net/ethernet/brocade/bna/bnad.c struct bnad_rx_info *rx_info = (struct bnad_rx_info *)rx->priv; rx 1204 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_cleanup_complete(rx_info->rx); rx 1209 drivers/net/ethernet/brocade/bna/bnad.c bnad_cb_rx_cleanup(struct bnad *bnad, struct bna_rx *rx) rx 1211 drivers/net/ethernet/brocade/bna/bnad.c struct bnad_rx_info *rx_info = (struct bnad_rx_info *)rx->priv; rx 1232 drivers/net/ethernet/brocade/bna/bnad.c bnad_cb_rx_post(struct bnad *bnad, struct bna_rx *rx) rx 1234 drivers/net/ethernet/brocade/bna/bnad.c struct bnad_rx_info *rx_info = (struct bnad_rx_info *)rx->priv; rx 1262 drivers/net/ethernet/brocade/bna/bnad.c bnad_cb_rx_disabled(void *arg, struct bna_rx *rx) rx 1270 drivers/net/ethernet/brocade/bna/bnad.c bnad_cb_rx_mcast_add(struct bnad *bnad, struct bna_rx *rx) rx 1757 drivers/net/ethernet/brocade/bna/bnad.c if (!rx_info->rx) rx 2110 drivers/net/ethernet/brocade/bna/bnad.c if (!bnad->rx_info[rx_id].rx) rx 2130 drivers/net/ethernet/brocade/bna/bnad.c if (bnad->rx_info[0].rx && !err) { rx 2152 drivers/net/ethernet/brocade/bna/bnad.c if (!rx_info->rx) rx 2169 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_disable(rx_info->rx, BNA_HARD_CLEANUP, bnad_cb_rx_disabled); rx 2179 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_destroy(rx_info->rx); rx 2181 drivers/net/ethernet/brocade/bna/bnad.c rx_info->rx = NULL; rx 2207 drivers/net/ethernet/brocade/bna/bnad.c struct bna_rx *rx; rx 2243 drivers/net/ethernet/brocade/bna/bnad.c rx = bna_rx_create(&bnad->bna, bnad, rx_config, &rx_cbfn, res_info, rx 2245 drivers/net/ethernet/brocade/bna/bnad.c if (!rx) { rx 2250 drivers/net/ethernet/brocade/bna/bnad.c rx_info->rx = rx; rx 2277 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlanfilter_enable(rx); rx 2283 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_enable(rx); rx 2315 drivers/net/ethernet/brocade/bna/bnad.c if (!rx_info->rx) rx 2317 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_coalescing_timeo_set(rx_info->rx, rx 2334 drivers/net/ethernet/brocade/bna/bnad.c if (!bnad->rx_info[0].rx) rx 2337 drivers/net/ethernet/brocade/bna/bnad.c ret = bna_rx_ucast_set(bnad->rx_info[0].rx, mac_addr); rx 2355 drivers/net/ethernet/brocade/bna/bnad.c ret = bna_rx_mcast_add(rx_info->rx, bnad_bcast_addr, rx 2379 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlan_add(bnad->rx_info[rx_id].rx, vid); rx 3129 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_ucast_listset(bnad->rx_info[0].rx, 0, NULL); rx 3146 drivers/net/ethernet/brocade/bna/bnad.c ret = bna_rx_ucast_listset(bnad->rx_info[0].rx, entry, mac_list); rx 3157 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_ucast_listset(bnad->rx_info[0].rx, 0, NULL); rx 3186 drivers/net/ethernet/brocade/bna/bnad.c ret = bna_rx_mcast_listset(bnad->rx_info[0].rx, mc_count + 1, mac_list); rx 3196 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_mcast_delall(bnad->rx_info[0].rx); rx 3208 drivers/net/ethernet/brocade/bna/bnad.c if (bnad->rx_info[0].rx == NULL) { rx 3235 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_mode_set(bnad->rx_info[0].rx, new_mode, mode_mask); rx 3321 drivers/net/ethernet/brocade/bna/bnad.c if (!bnad->rx_info[0].rx) rx 3327 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlan_add(bnad->rx_info[0].rx, vid); rx 3342 drivers/net/ethernet/brocade/bna/bnad.c if (!bnad->rx_info[0].rx) rx 3349 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlan_del(bnad->rx_info[0].rx, vid); rx 3368 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlan_strip_enable(bnad->rx_info[0].rx); rx 3370 drivers/net/ethernet/brocade/bna/bnad.c bna_rx_vlan_strip_disable(bnad->rx_info[0].rx); rx 3401 drivers/net/ethernet/brocade/bna/bnad.c if (!rx_info->rx) rx 211 drivers/net/ethernet/brocade/bna/bnad.h struct bna_rx *rx; /* 1:1 between rx_info & rx */ rx 452 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 460 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!err && bnad->rx_info[0].rx) { rx 606 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 632 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 733 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 760 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 785 drivers/net/ethernet/brocade/bna/bnad_ethtool.c if (!bnad->rx_info[i].rx) rx 644 drivers/net/ethernet/calxeda/xgmac.c static int xgmac_set_flow_ctrl(struct xgmac_priv *priv, int rx, int tx) rx 649 drivers/net/ethernet/calxeda/xgmac.c priv->rx_pause = rx; rx 652 drivers/net/ethernet/calxeda/xgmac.c if (rx || tx) { rx 653 drivers/net/ethernet/calxeda/xgmac.c if (rx) rx 503 drivers/net/ethernet/cavium/thunder/nic.h u8 rx; rx 241 drivers/net/ethernet/cavium/thunder/nic_main.c mbx.bgx_stats.rx = bgx->rx; rx 243 drivers/net/ethernet/cavium/thunder/nic_main.c if (bgx->rx) rx 195 drivers/net/ethernet/cavium/thunder/nicvf_main.c if (bgx->rx) rx 1642 drivers/net/ethernet/cavium/thunder/nicvf_main.c mbx.bgx_stats.rx = 1; rx 1653 drivers/net/ethernet/cavium/thunder/nicvf_main.c mbx.bgx_stats.rx = 0; rx 2699 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c #define R3(fmt_spec, s, v) S3(fmt_spec, s, rx[i].v) rx 2700 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c #define R(s, v) S3("u", s, rx[i].v) rx 2705 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_eth_rxq *rx = &s->ethrxq[base_qset]; rx 2712 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx[i].rspq.netdev ? rx[i].rspq.netdev->name : "N/A"); rx 2732 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2733 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2782 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2785 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[r * 4]; rx 2790 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx[i].rspq.netdev ? rx[i].rspq.netdev->name : "N/A"); rx 2796 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2797 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2810 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2815 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[ciq_idx]; rx 2820 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx[i].rspq.netdev ? rx[i].rspq.netdev->name : "N/A"); rx 2826 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2827 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2834 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2837 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[r * 4]; rx 2846 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2847 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2860 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2863 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[r * 4]; rx 2872 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2873 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2886 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2889 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[r * 4]; rx 2898 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2899 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 2912 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c const struct sge_ofld_rxq *rx; rx 2918 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c rx = &urxq_info->uldrxq[r * 4]; rx 2932 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr delay:", qtimer_val(adap, &rx[i].rspq)); rx 2933 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c S3("u", "Intr pktcnt:", s->counter_val[rx[i].rspq.pktcnt_idx]); rx 245 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c const struct sge_eth_rxq *rx = &adap->sge.ethrxq[p->first_qset]; rx 248 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c for (i = 0; i < p->nqsets; i++, rx++, tx++) { rx 251 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c s->rx_csum += rx->stats.rx_cso; rx 252 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c s->vlan_ex += rx->stats.vlan_ex; rx 254 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c s->gro_pkts += rx->stats.lro_pkts; rx 255 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ethtool.c s->gro_merged += rx->stats.lro_merged; rx 3046 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c struct sge_eth_rxq *rx = &adap->sge.ethrxq[pi->first_qset]; rx 3048 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c for (i = pi->nqsets; i; i--, rx++) rx 3049 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c t4_sge_intr_msix(0, &rx->rspq); rx 314 drivers/net/ethernet/cirrus/ep93xx_eth.c int rx; rx 316 drivers/net/ethernet/cirrus/ep93xx_eth.c rx = ep93xx_rx(dev, budget); rx 317 drivers/net/ethernet/cirrus/ep93xx_eth.c if (rx < budget && napi_complete_done(napi, rx)) { rx 323 drivers/net/ethernet/cirrus/ep93xx_eth.c if (rx) { rx 324 drivers/net/ethernet/cirrus/ep93xx_eth.c wrw(ep, REG_RXDENQ, rx); rx 325 drivers/net/ethernet/cirrus/ep93xx_eth.c wrw(ep, REG_RXSTSENQ, rx); rx 328 drivers/net/ethernet/cirrus/ep93xx_eth.c return rx; rx 284 drivers/net/ethernet/cisco/enic/enic_ethtool.c *(data++) = ((u64 *)&vstats->rx)[enic_rx_stats[i].index]; rx 929 drivers/net/ethernet/cisco/enic/enic_main.c net_stats->rx_packets = stats->rx.rx_frames_ok; rx 930 drivers/net/ethernet/cisco/enic/enic_main.c net_stats->rx_bytes = stats->rx.rx_bytes_ok; rx 931 drivers/net/ethernet/cisco/enic/enic_main.c net_stats->rx_errors = stats->rx.rx_errors; rx 932 drivers/net/ethernet/cisco/enic/enic_main.c net_stats->multicast = stats->rx.rx_multicast_frames_ok; rx 935 drivers/net/ethernet/cisco/enic/enic_main.c net_stats->rx_dropped = stats->rx.rx_no_bufs + stats->rx.rx_drop; rx 72 drivers/net/ethernet/cisco/enic/vnic_stats.h struct vnic_rx_stats rx; rx 272 drivers/net/ethernet/cortina/gemini.c static void gmac_set_flow_control(struct net_device *netdev, bool tx, bool rx) rx 284 drivers/net/ethernet/cortina/gemini.c if (rx) rx 1402 drivers/net/ethernet/cortina/gemini.c struct gmac_rxdesc *rx = NULL; rx 1423 drivers/net/ethernet/cortina/gemini.c rx = port->rxq_ring + r; rx 1424 drivers/net/ethernet/cortina/gemini.c word0 = rx->word0; rx 1425 drivers/net/ethernet/cortina/gemini.c word1 = rx->word1; rx 1426 drivers/net/ethernet/cortina/gemini.c mapping = rx->word2.buf_adr; rx 1427 drivers/net/ethernet/cortina/gemini.c word3 = rx->word3; rx 522 drivers/net/ethernet/dec/tulip/interrupt.c int rx = 0; rx 572 drivers/net/ethernet/dec/tulip/interrupt.c rx += tulip_rx(dev); rx 731 drivers/net/ethernet/dec/tulip/interrupt.c if (tx > maxtx || rx > maxrx || oi > maxoi) { rx 734 drivers/net/ethernet/dec/tulip/interrupt.c csr5, tp->nir, tx, rx, oi); rx 785 drivers/net/ethernet/dec/tulip/interrupt.c tp->nir, tp->cur_rx, tp->ttimer, rx); rx 878 drivers/net/ethernet/faraday/ftmac100.c int rx = 0; rx 893 drivers/net/ethernet/faraday/ftmac100.c retry = ftmac100_rx_packet(priv, &rx); rx 894 drivers/net/ethernet/faraday/ftmac100.c } while (retry && rx < budget); rx 896 drivers/net/ethernet/faraday/ftmac100.c if (retry && rx == budget) rx 943 drivers/net/ethernet/faraday/ftmac100.c return rx; rx 303 drivers/net/ethernet/freescale/fman/fman_port.c struct fman_port_rx_bmi_regs rx; rx 486 drivers/net/ethernet/freescale/fman/fman_port.c struct fman_port_rx_bmi_regs __iomem *regs = &port->bmi_regs->rx; rx 783 drivers/net/ethernet/freescale/fman/fman_port.c bp_reg = port->bmi_regs->rx.fmbm_ebmpi; rx 784 drivers/net/ethernet/freescale/fman/fman_port.c bp_depl_reg = &port->bmi_regs->rx.fmbm_mpd; rx 1426 drivers/net/ethernet/freescale/fman/fman_port.c iowrite32be(NIA_ENG_HWK, &port->bmi_regs->rx.fmbm_rfpne); rx 1430 drivers/net/ethernet/freescale/fman/fman_port.c &port->bmi_regs->rx.fmbm_rfpne); rx 1600 drivers/net/ethernet/freescale/fman/fman_port.c bmi_cfg_reg = &port->bmi_regs->rx.fmbm_rcfg; rx 1601 drivers/net/ethernet/freescale/fman/fman_port.c bmi_status_reg = &port->bmi_regs->rx.fmbm_rst; rx 1676 drivers/net/ethernet/freescale/fman/fman_port.c bmi_cfg_reg = &port->bmi_regs->rx.fmbm_rcfg; rx 334 drivers/net/ethernet/freescale/fman/mac.c int fman_set_mac_active_pause(struct mac_device *mac_dev, bool rx, bool tx) rx 339 drivers/net/ethernet/freescale/fman/mac.c if (rx != mac_dev->rx_pause_active) { rx 340 drivers/net/ethernet/freescale/fman/mac.c err = mac_dev->set_rx_pause(fman_mac, rx); rx 342 drivers/net/ethernet/freescale/fman/mac.c mac_dev->rx_pause_active = rx; rx 96 drivers/net/ethernet/freescale/fman/mac.h int fman_set_mac_active_pause(struct mac_device *mac_dev, bool rx, bool tx); rx 149 drivers/net/ethernet/google/gve/gve.h struct gve_rx_ring *rx; /* rx rings on this block */ rx 167 drivers/net/ethernet/google/gve/gve.h struct gve_rx_ring *rx; /* array of rx_cfg.num_queues */ rx 441 drivers/net/ethernet/google/gve/gve.h void gve_rx_write_doorbell(struct gve_priv *priv, struct gve_rx_ring *rx); rx 445 drivers/net/ethernet/google/gve/gve.h bool gve_clean_rx_done(struct gve_rx_ring *rx, int budget, rx 214 drivers/net/ethernet/google/gve/gve_adminq.c struct gve_rx_ring *rx = &priv->rx[queue_index]; rx 223 drivers/net/ethernet/google/gve/gve_adminq.c .ntfy_id = cpu_to_be32(rx->ntfy_id), rx 224 drivers/net/ethernet/google/gve/gve_adminq.c .queue_resources_addr = cpu_to_be64(rx->q_resources_bus), rx 225 drivers/net/ethernet/google/gve/gve_adminq.c .rx_desc_ring_addr = cpu_to_be64(rx->desc.bus), rx 226 drivers/net/ethernet/google/gve/gve_adminq.c .rx_data_ring_addr = cpu_to_be64(rx->data.data_bus), rx 227 drivers/net/ethernet/google/gve/gve_adminq.c .queue_page_list_id = cpu_to_be32(rx->data.qpl->id), rx 292 drivers/net/ethernet/google/gve/gve_adminq.c if (priv->rx_desc_cnt * sizeof(priv->rx->desc.desc_ring[0]) rx 294 drivers/net/ethernet/google/gve/gve_adminq.c priv->rx_desc_cnt * sizeof(priv->rx->data.data_ring[0]) rx 103 drivers/net/ethernet/google/gve/gve_ethtool.c if (priv->rx) { rx 106 drivers/net/ethernet/google/gve/gve_ethtool.c u64_stats_fetch_begin(&priv->rx[ring].statss); rx 107 drivers/net/ethernet/google/gve/gve_ethtool.c rx_pkts += priv->rx[ring].rpackets; rx 108 drivers/net/ethernet/google/gve/gve_ethtool.c rx_bytes += priv->rx[ring].rbytes; rx 109 drivers/net/ethernet/google/gve/gve_ethtool.c } while (u64_stats_fetch_retry(&priv->rx[ring].statss, rx 137 drivers/net/ethernet/google/gve/gve_ethtool.c if (priv->rx) { rx 139 drivers/net/ethernet/google/gve/gve_ethtool.c struct gve_rx_ring *rx = &priv->rx[ring]; rx 141 drivers/net/ethernet/google/gve/gve_ethtool.c data[i++] = rx->cnt; rx 142 drivers/net/ethernet/google/gve/gve_ethtool.c data[i++] = rx->fill_cnt; rx 35 drivers/net/ethernet/google/gve/gve_main.c if (priv->rx) { rx 39 drivers/net/ethernet/google/gve/gve_main.c u64_stats_fetch_begin(&priv->rx[ring].statss); rx 40 drivers/net/ethernet/google/gve/gve_main.c s->rx_packets += priv->rx[ring].rpackets; rx 41 drivers/net/ethernet/google/gve/gve_main.c s->rx_bytes += priv->rx[ring].rbytes; rx 42 drivers/net/ethernet/google/gve/gve_main.c } while (u64_stats_fetch_retry(&priv->rx[ring].statss, rx 111 drivers/net/ethernet/google/gve/gve_main.c if (block->rx) rx 127 drivers/net/ethernet/google/gve/gve_main.c if (block->rx) rx 401 drivers/net/ethernet/google/gve/gve_main.c gve_rx_write_doorbell(priv, &priv->rx[i]); rx 423 drivers/net/ethernet/google/gve/gve_main.c priv->rx = kvzalloc(priv->rx_cfg.num_queues * sizeof(*priv->rx), rx 425 drivers/net/ethernet/google/gve/gve_main.c if (!priv->rx) { rx 440 drivers/net/ethernet/google/gve/gve_main.c u64_stats_init(&priv->rx[i].statss); rx 448 drivers/net/ethernet/google/gve/gve_main.c kvfree(priv->rx); rx 449 drivers/net/ethernet/google/gve/gve_main.c priv->rx = NULL; rx 506 drivers/net/ethernet/google/gve/gve_main.c if (priv->rx) { rx 512 drivers/net/ethernet/google/gve/gve_main.c kvfree(priv->rx); rx 513 drivers/net/ethernet/google/gve/gve_main.c priv->rx = NULL; rx 16 drivers/net/ethernet/google/gve/gve_rx.c block->rx = NULL; rx 21 drivers/net/ethernet/google/gve/gve_rx.c struct gve_rx_ring *rx = &priv->rx[idx]; rx 29 drivers/net/ethernet/google/gve/gve_rx.c dma_free_coherent(dev, bytes, rx->desc.desc_ring, rx->desc.bus); rx 30 drivers/net/ethernet/google/gve/gve_rx.c rx->desc.desc_ring = NULL; rx 32 drivers/net/ethernet/google/gve/gve_rx.c dma_free_coherent(dev, sizeof(*rx->q_resources), rx 33 drivers/net/ethernet/google/gve/gve_rx.c rx->q_resources, rx->q_resources_bus); rx 34 drivers/net/ethernet/google/gve/gve_rx.c rx->q_resources = NULL; rx 36 drivers/net/ethernet/google/gve/gve_rx.c gve_unassign_qpl(priv, rx->data.qpl->id); rx 37 drivers/net/ethernet/google/gve/gve_rx.c rx->data.qpl = NULL; rx 38 drivers/net/ethernet/google/gve/gve_rx.c kvfree(rx->data.page_info); rx 40 drivers/net/ethernet/google/gve/gve_rx.c slots = rx->mask + 1; rx 41 drivers/net/ethernet/google/gve/gve_rx.c bytes = sizeof(*rx->data.data_ring) * slots; rx 42 drivers/net/ethernet/google/gve/gve_rx.c dma_free_coherent(dev, bytes, rx->data.data_ring, rx 43 drivers/net/ethernet/google/gve/gve_rx.c rx->data.data_bus); rx 44 drivers/net/ethernet/google/gve/gve_rx.c rx->data.data_ring = NULL; rx 58 drivers/net/ethernet/google/gve/gve_rx.c static int gve_prefill_rx_pages(struct gve_rx_ring *rx) rx 60 drivers/net/ethernet/google/gve/gve_rx.c struct gve_priv *priv = rx->gve; rx 67 drivers/net/ethernet/google/gve/gve_rx.c slots = rx->mask + 1; rx 69 drivers/net/ethernet/google/gve/gve_rx.c rx->data.page_info = kvzalloc(slots * rx 70 drivers/net/ethernet/google/gve/gve_rx.c sizeof(*rx->data.page_info), GFP_KERNEL); rx 71 drivers/net/ethernet/google/gve/gve_rx.c if (!rx->data.page_info) rx 74 drivers/net/ethernet/google/gve/gve_rx.c rx->data.qpl = gve_assign_rx_qpl(priv); rx 77 drivers/net/ethernet/google/gve/gve_rx.c struct page *page = rx->data.qpl->pages[i]; rx 80 drivers/net/ethernet/google/gve/gve_rx.c gve_setup_rx_buffer(&rx->data.page_info[i], rx 81 drivers/net/ethernet/google/gve/gve_rx.c &rx->data.data_ring[i], addr, page); rx 91 drivers/net/ethernet/google/gve/gve_rx.c struct gve_rx_ring *rx = &priv->rx[queue_idx]; rx 93 drivers/net/ethernet/google/gve/gve_rx.c block->rx = rx; rx 94 drivers/net/ethernet/google/gve/gve_rx.c rx->ntfy_id = ntfy_idx; rx 99 drivers/net/ethernet/google/gve/gve_rx.c struct gve_rx_ring *rx = &priv->rx[idx]; rx 108 drivers/net/ethernet/google/gve/gve_rx.c memset(rx, 0, sizeof(*rx)); rx 110 drivers/net/ethernet/google/gve/gve_rx.c rx->gve = priv; rx 111 drivers/net/ethernet/google/gve/gve_rx.c rx->q_num = idx; rx 114 drivers/net/ethernet/google/gve/gve_rx.c rx->mask = slots - 1; rx 117 drivers/net/ethernet/google/gve/gve_rx.c bytes = sizeof(*rx->data.data_ring) * slots; rx 118 drivers/net/ethernet/google/gve/gve_rx.c rx->data.data_ring = dma_alloc_coherent(hdev, bytes, rx 119 drivers/net/ethernet/google/gve/gve_rx.c &rx->data.data_bus, rx 121 drivers/net/ethernet/google/gve/gve_rx.c if (!rx->data.data_ring) rx 123 drivers/net/ethernet/google/gve/gve_rx.c filled_pages = gve_prefill_rx_pages(rx); rx 128 drivers/net/ethernet/google/gve/gve_rx.c rx->fill_cnt = filled_pages; rx 133 drivers/net/ethernet/google/gve/gve_rx.c rx->q_resources = rx 135 drivers/net/ethernet/google/gve/gve_rx.c sizeof(*rx->q_resources), rx 136 drivers/net/ethernet/google/gve/gve_rx.c &rx->q_resources_bus, rx 138 drivers/net/ethernet/google/gve/gve_rx.c if (!rx->q_resources) { rx 143 drivers/net/ethernet/google/gve/gve_rx.c (unsigned long)rx->data.data_bus); rx 153 drivers/net/ethernet/google/gve/gve_rx.c rx->desc.desc_ring = dma_alloc_coherent(hdev, bytes, &rx->desc.bus, rx 155 drivers/net/ethernet/google/gve/gve_rx.c if (!rx->desc.desc_ring) { rx 159 drivers/net/ethernet/google/gve/gve_rx.c rx->mask = slots - 1; rx 160 drivers/net/ethernet/google/gve/gve_rx.c rx->cnt = 0; rx 161 drivers/net/ethernet/google/gve/gve_rx.c rx->desc.seqno = 1; rx 167 drivers/net/ethernet/google/gve/gve_rx.c dma_free_coherent(hdev, sizeof(*rx->q_resources), rx 168 drivers/net/ethernet/google/gve/gve_rx.c rx->q_resources, rx->q_resources_bus); rx 169 drivers/net/ethernet/google/gve/gve_rx.c rx->q_resources = NULL; rx 171 drivers/net/ethernet/google/gve/gve_rx.c kvfree(rx->data.page_info); rx 173 drivers/net/ethernet/google/gve/gve_rx.c bytes = sizeof(*rx->data.data_ring) * slots; rx 174 drivers/net/ethernet/google/gve/gve_rx.c dma_free_coherent(hdev, bytes, rx->data.data_ring, rx->data.data_bus); rx 175 drivers/net/ethernet/google/gve/gve_rx.c rx->data.data_ring = NULL; rx 212 drivers/net/ethernet/google/gve/gve_rx.c void gve_rx_write_doorbell(struct gve_priv *priv, struct gve_rx_ring *rx) rx 214 drivers/net/ethernet/google/gve/gve_rx.c u32 db_idx = be32_to_cpu(rx->q_resources->db_index); rx 216 drivers/net/ethernet/google/gve/gve_rx.c iowrite32be(rx->fill_cnt, &priv->db_bar2[db_idx]); rx 275 drivers/net/ethernet/google/gve/gve_rx.c static bool gve_rx(struct gve_rx_ring *rx, struct gve_rx_desc *rx_desc, rx 279 drivers/net/ethernet/google/gve/gve_rx.c struct gve_priv *priv = rx->gve; rx 280 drivers/net/ethernet/google/gve/gve_rx.c struct napi_struct *napi = &priv->ntfy_blocks[rx->ntfy_id].napi; rx 291 drivers/net/ethernet/google/gve/gve_rx.c page_info = &rx->data.page_info[idx]; rx 292 drivers/net/ethernet/google/gve/gve_rx.c dma_sync_single_for_cpu(&priv->pdev->dev, rx->data.qpl->page_buses[idx], rx 322 drivers/net/ethernet/google/gve/gve_rx.c gve_rx_flip_buff(page_info, &rx->data.data_ring[idx]); rx 365 drivers/net/ethernet/google/gve/gve_rx.c static bool gve_rx_work_pending(struct gve_rx_ring *rx) rx 371 drivers/net/ethernet/google/gve/gve_rx.c next_idx = rx->cnt & rx->mask; rx 372 drivers/net/ethernet/google/gve/gve_rx.c desc = rx->desc.desc_ring + next_idx; rx 378 drivers/net/ethernet/google/gve/gve_rx.c return (GVE_SEQNO(flags_seq) == rx->desc.seqno); rx 381 drivers/net/ethernet/google/gve/gve_rx.c bool gve_clean_rx_done(struct gve_rx_ring *rx, int budget, rx 384 drivers/net/ethernet/google/gve/gve_rx.c struct gve_priv *priv = rx->gve; rx 386 drivers/net/ethernet/google/gve/gve_rx.c u32 cnt = rx->cnt; rx 387 drivers/net/ethernet/google/gve/gve_rx.c u32 idx = cnt & rx->mask; rx 391 drivers/net/ethernet/google/gve/gve_rx.c desc = rx->desc.desc_ring + idx; rx 392 drivers/net/ethernet/google/gve/gve_rx.c while ((GVE_SEQNO(desc->flags_seq) == rx->desc.seqno) && rx 396 drivers/net/ethernet/google/gve/gve_rx.c rx->q_num, idx, desc, desc->flags_seq); rx 399 drivers/net/ethernet/google/gve/gve_rx.c rx->q_num, GVE_SEQNO(desc->flags_seq), rx 400 drivers/net/ethernet/google/gve/gve_rx.c rx->desc.seqno); rx 402 drivers/net/ethernet/google/gve/gve_rx.c if (!gve_rx(rx, desc, feat, idx)) rx 405 drivers/net/ethernet/google/gve/gve_rx.c idx = cnt & rx->mask; rx 406 drivers/net/ethernet/google/gve/gve_rx.c desc = rx->desc.desc_ring + idx; rx 407 drivers/net/ethernet/google/gve/gve_rx.c rx->desc.seqno = gve_next_seqno(rx->desc.seqno); rx 414 drivers/net/ethernet/google/gve/gve_rx.c u64_stats_update_begin(&rx->statss); rx 415 drivers/net/ethernet/google/gve/gve_rx.c rx->rpackets += work_done; rx 416 drivers/net/ethernet/google/gve/gve_rx.c rx->rbytes += bytes; rx 417 drivers/net/ethernet/google/gve/gve_rx.c u64_stats_update_end(&rx->statss); rx 418 drivers/net/ethernet/google/gve/gve_rx.c rx->cnt = cnt; rx 419 drivers/net/ethernet/google/gve/gve_rx.c rx->fill_cnt += work_done; rx 421 drivers/net/ethernet/google/gve/gve_rx.c gve_rx_write_doorbell(priv, rx); rx 422 drivers/net/ethernet/google/gve/gve_rx.c return gve_rx_work_pending(rx); rx 427 drivers/net/ethernet/google/gve/gve_rx.c struct gve_rx_ring *rx = block->rx; rx 438 drivers/net/ethernet/google/gve/gve_rx.c repoll |= gve_clean_rx_done(rx, budget, feat); rx 440 drivers/net/ethernet/google/gve/gve_rx.c repoll |= gve_rx_work_pending(rx); rx 584 drivers/net/ethernet/hisilicon/hip04_eth.c int rx = 0; rx 622 drivers/net/ethernet/hisilicon/hip04_eth.c rx++; rx 638 drivers/net/ethernet/hisilicon/hip04_eth.c if (rx >= budget) { rx 652 drivers/net/ethernet/hisilicon/hip04_eth.c napi_complete_done(napi, rx); rx 655 drivers/net/ethernet/hisilicon/hip04_eth.c if (rx < budget && tx_remaining) rx 658 drivers/net/ethernet/hisilicon/hip04_eth.c return rx; rx 330 drivers/net/ethernet/hisilicon/hix5hd2_gmac.c static void hix5hd2_set_desc_depth(struct hix5hd2_priv *priv, int rx, int tx) rx 333 drivers/net/ethernet/hisilicon/hix5hd2_gmac.c writel_relaxed(rx << 3, priv->base + RX_FQ_DEPTH); rx 337 drivers/net/ethernet/hisilicon/hix5hd2_gmac.c writel_relaxed(rx << 3, priv->base + RX_BQ_DEPTH); rx 209 drivers/net/ethernet/hisilicon/hns/hnae.h } rx; rx 653 drivers/net/ethernet/hisilicon/hns/hnae.h ring->desc[i].rx.ipoff_bnum_pid_flag = 0; rx 661 drivers/net/ethernet/hisilicon/hns/hnae.h ring->desc[i].rx.ipoff_bnum_pid_flag = 0; rx 96 drivers/net/ethernet/hisilicon/hns/hns_dsaf_gmac.c static void hns_gmac_get_en(void *mac_drv, u32 *rx, u32 *tx) rx 103 drivers/net/ethernet/hisilicon/hns/hns_dsaf_gmac.c *rx = dsaf_get_bit(porten, GMAC_PORT_RX_EN_B); rx 510 drivers/net/ethernet/hisilicon/hns/hns_dsaf_gmac.c u32 rx; rx 522 drivers/net/ethernet/hisilicon/hns/hns_dsaf_gmac.c hns_gmac_get_en(mac_drv, &rx, &tx); rx 523 drivers/net/ethernet/hisilicon/hns/hns_dsaf_gmac.c mac_info->port_en = rx && tx; rx 422 drivers/net/ethernet/hisilicon/hns/hns_enet.c size = le16_to_cpu(desc->rx.size); rx 573 drivers/net/ethernet/hisilicon/hns/hns_enet.c length = le16_to_cpu(desc->rx.pkt_len); rx 574 drivers/net/ethernet/hisilicon/hns/hns_enet.c bnum_flag = le32_to_cpu(desc->rx.ipoff_bnum_pid_flag); rx 629 drivers/net/ethernet/hisilicon/hns/hns_enet.c bnum_flag = le32_to_cpu(desc->rx.ipoff_bnum_pid_flag); rx 639 drivers/net/ethernet/hisilicon/hns/hns_enet.c if (unlikely((!desc->rx.pkt_len) || rx 225 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)l234_info: %u\n", rx_desc->rx.l234_info); rx 226 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)pkt_len: %u\n", rx_desc->rx.pkt_len); rx 227 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)size: %u\n", rx_desc->rx.size); rx 228 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)rss_hash: %u\n", rx_desc->rx.rss_hash); rx 229 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)fd_id: %u\n", rx_desc->rx.fd_id); rx 230 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)vlan_tag: %u\n", rx_desc->rx.vlan_tag); rx 231 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)o_dm_vlan_id_fb: %u\n", rx_desc->rx.o_dm_vlan_id_fb); rx 232 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)ot_vlan_tag: %u\n", rx_desc->rx.ot_vlan_tag); rx 233 drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c dev_info(dev, "(RX)bd_base_info: %u\n", rx_desc->rx.bd_base_info); rx 2282 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ring->desc[i].rx.bd_base_info = 0; rx 2290 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ring->desc[i].rx.bd_base_info = 0; rx 2437 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c int size = le16_to_cpu(desc->rx.size); rx 2589 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.ot_vlan_tag); rx 2591 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.vlan_tag); rx 2611 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.ot_vlan_tag); rx 2618 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.vlan_tag); rx 2623 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.ot_vlan_tag); rx 2625 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c *vlan_tag = le16_to_cpu(desc->rx.vlan_tag); rx 2701 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c bd_base_info = le32_to_cpu(pre_desc->rx.bd_base_info); rx 2703 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c bd_base_info = le32_to_cpu(desc->rx.bd_base_info); rx 2709 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c bd_base_info = le32_to_cpu(desc->rx.bd_base_info); rx 2736 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c head_skb->data_len += le16_to_cpu(desc->rx.size); rx 2737 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c head_skb->len += le16_to_cpu(desc->rx.size); rx 2809 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c bd_base_info = le32_to_cpu(desc->rx.bd_base_info); rx 2810 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c l234info = le32_to_cpu(desc->rx.l234_info); rx 2811 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ol_info = le32_to_cpu(desc->rx.ol_info); rx 2825 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c if (unlikely(!desc->rx.pkt_len || (l234info & (BIT(HNS3_RXD_TRUNCAT_B) | rx 2866 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c hns3_set_rx_skb_rss_type(ring, skb, le32_to_cpu(desc->rx.rss_hash)); rx 2885 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c length = le16_to_cpu(desc->rx.size); rx 2886 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c bd_base_info = le32_to_cpu(desc->rx.bd_base_info); rx 297 drivers/net/ethernet/hisilicon/hns3/hns3_enet.h } rx; rx 161 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_tm.c int hclge_mac_pause_en_cfg(struct hclge_dev *hdev, bool tx, bool rx) rx 168 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_tm.c (rx ? HCLGE_RX_MAC_PAUSE_EN_MSK : 0)); rx 153 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_tm.h int hclge_mac_pause_en_cfg(struct hclge_dev *hdev, bool tx, bool rx); rx 881 drivers/net/ethernet/ibm/ehea/ehea_main.c int rx = 0; rx 884 drivers/net/ethernet/ibm/ehea/ehea_main.c rx += ehea_proc_rwqes(dev, pr, budget - rx); rx 886 drivers/net/ethernet/ibm/ehea/ehea_main.c while (rx != budget) { rx 897 drivers/net/ethernet/ibm/ehea/ehea_main.c return rx; rx 900 drivers/net/ethernet/ibm/ehea/ehea_main.c return rx; rx 903 drivers/net/ethernet/ibm/ehea/ehea_main.c rx += ehea_proc_rwqes(dev, pr, budget - rx); rx 906 drivers/net/ethernet/ibm/ehea/ehea_main.c return rx; rx 420 drivers/net/ethernet/intel/e100.c struct rx *next, *prev; rx 547 drivers/net/ethernet/intel/e100.c struct rx *rxs ____cacheline_aligned; rx 548 drivers/net/ethernet/intel/e100.c struct rx *rx_to_use; rx 549 drivers/net/ethernet/intel/e100.c struct rx *rx_to_clean; rx 1908 drivers/net/ethernet/intel/e100.c static inline void e100_start_receiver(struct nic *nic, struct rx *rx) rx 1914 drivers/net/ethernet/intel/e100.c if (!rx) rx = nic->rxs; rx 1917 drivers/net/ethernet/intel/e100.c if (rx->skb) { rx 1918 drivers/net/ethernet/intel/e100.c e100_exec_cmd(nic, ruc_start, rx->dma_addr); rx 1924 drivers/net/ethernet/intel/e100.c static int e100_rx_alloc_skb(struct nic *nic, struct rx *rx) rx 1926 drivers/net/ethernet/intel/e100.c if (!(rx->skb = netdev_alloc_skb_ip_align(nic->netdev, RFD_BUF_LEN))) rx 1930 drivers/net/ethernet/intel/e100.c skb_copy_to_linear_data(rx->skb, &nic->blank_rfd, sizeof(struct rfd)); rx 1931 drivers/net/ethernet/intel/e100.c rx->dma_addr = pci_map_single(nic->pdev, rx->skb->data, rx 1934 drivers/net/ethernet/intel/e100.c if (pci_dma_mapping_error(nic->pdev, rx->dma_addr)) { rx 1935 drivers/net/ethernet/intel/e100.c dev_kfree_skb_any(rx->skb); rx 1936 drivers/net/ethernet/intel/e100.c rx->skb = NULL; rx 1937 drivers/net/ethernet/intel/e100.c rx->dma_addr = 0; rx 1944 drivers/net/ethernet/intel/e100.c if (rx->prev->skb) { rx 1945 drivers/net/ethernet/intel/e100.c struct rfd *prev_rfd = (struct rfd *)rx->prev->skb->data; rx 1946 drivers/net/ethernet/intel/e100.c put_unaligned_le32(rx->dma_addr, &prev_rfd->link); rx 1947 drivers/net/ethernet/intel/e100.c pci_dma_sync_single_for_device(nic->pdev, rx->prev->dma_addr, rx 1954 drivers/net/ethernet/intel/e100.c static int e100_rx_indicate(struct nic *nic, struct rx *rx, rx 1958 drivers/net/ethernet/intel/e100.c struct sk_buff *skb = rx->skb; rx 1967 drivers/net/ethernet/intel/e100.c pci_dma_sync_single_for_cpu(nic->pdev, rx->dma_addr, rx 1987 drivers/net/ethernet/intel/e100.c pci_dma_sync_single_for_device(nic->pdev, rx->dma_addr, rx 2001 drivers/net/ethernet/intel/e100.c pci_unmap_single(nic->pdev, rx->dma_addr, rx 2048 drivers/net/ethernet/intel/e100.c rx->skb = NULL; rx 2056 drivers/net/ethernet/intel/e100.c struct rx *rx; rx 2058 drivers/net/ethernet/intel/e100.c struct rx *old_before_last_rx, *new_before_last_rx; rx 2062 drivers/net/ethernet/intel/e100.c for (rx = nic->rx_to_clean; rx->skb; rx = nic->rx_to_clean = rx->next) { rx 2063 drivers/net/ethernet/intel/e100.c err = e100_rx_indicate(nic, rx, work_done, work_to_do); rx 2083 drivers/net/ethernet/intel/e100.c for (rx = nic->rx_to_use; !rx->skb; rx = nic->rx_to_use = rx->next) { rx 2084 drivers/net/ethernet/intel/e100.c if (unlikely(e100_rx_alloc_skb(nic, rx))) rx 2132 drivers/net/ethernet/intel/e100.c struct rx *rx; rx 2138 drivers/net/ethernet/intel/e100.c for (rx = nic->rxs, i = 0; i < count; rx++, i++) { rx 2139 drivers/net/ethernet/intel/e100.c if (rx->skb) { rx 2140 drivers/net/ethernet/intel/e100.c pci_unmap_single(nic->pdev, rx->dma_addr, rx 2142 drivers/net/ethernet/intel/e100.c dev_kfree_skb(rx->skb); rx 2154 drivers/net/ethernet/intel/e100.c struct rx *rx; rx 2161 drivers/net/ethernet/intel/e100.c if (!(nic->rxs = kcalloc(count, sizeof(struct rx), GFP_ATOMIC))) rx 2164 drivers/net/ethernet/intel/e100.c for (rx = nic->rxs, i = 0; i < count; rx++, i++) { rx 2165 drivers/net/ethernet/intel/e100.c rx->next = (i + 1 < count) ? rx + 1 : nic->rxs; rx 2166 drivers/net/ethernet/intel/e100.c rx->prev = (i == 0) ? nic->rxs + count - 1 : rx - 1; rx 2167 drivers/net/ethernet/intel/e100.c if (e100_rx_alloc_skb(nic, rx)) { rx 2179 drivers/net/ethernet/intel/e100.c rx = nic->rxs->prev->prev; rx 2180 drivers/net/ethernet/intel/e100.c before_last = (struct rfd *)rx->skb->data; rx 2183 drivers/net/ethernet/intel/e100.c pci_dma_sync_single_for_device(nic->pdev, rx->dma_addr, rx 189 drivers/net/ethernet/intel/fm10k/fm10k.h struct fm10k_ring_container rx, tx; rx 120 drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c if (ring < q_vector->rx.ring) rx 176 drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c for (i = 0; i < q_vector->rx.count; i++) { rx 177 drivers/net/ethernet/intel/fm10k/fm10k_debugfs.c struct fm10k_ring *ring = &q_vector->rx.ring[i]; rx 681 drivers/net/ethernet/intel/fm10k/fm10k_ethtool.c qv->rx.itr = rx_itr; rx 631 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.total_packets += total_packets; rx 632 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.total_bytes += total_bytes; rx 1419 drivers/net/ethernet/intel/fm10k/fm10k_main.c fm10k_update_itr(&q_vector->rx); rx 1425 drivers/net/ethernet/intel/fm10k/fm10k_main.c itr |= (q_vector->rx.itr & FM10K_ITR_MAX) << FM10K_ITR_INTERVAL1_SHIFT; rx 1451 drivers/net/ethernet/intel/fm10k/fm10k_main.c if (q_vector->rx.count > 1) rx 1452 drivers/net/ethernet/intel/fm10k/fm10k_main.c per_ring_budget = max(budget / q_vector->rx.count, 1); rx 1456 drivers/net/ethernet/intel/fm10k/fm10k_main.c fm10k_for_each_ring(ring, q_vector->rx) { rx 1658 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.ring = ring; rx 1659 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.itr = interface->rx_itr; rx 1660 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.itr_scale = interface->hw.mac.itr_scale; rx 1661 drivers/net/ethernet/intel/fm10k/fm10k_main.c q_vector->rx.count = rxr_count; rx 1711 drivers/net/ethernet/intel/fm10k/fm10k_main.c fm10k_for_each_ring(ring, q_vector->rx) rx 216 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u32 *tail = mbx->rx.buffer + fm10k_fifo_tail_offset(&mbx->rx, 0); rx 298 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c struct fm10k_mbx_fifo *fifo = &mbx->rx; rx 426 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c struct fm10k_mbx_fifo *fifo = &mbx->rx; rx 475 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c struct fm10k_mbx_fifo *fifo = &mbx->rx; rx 640 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c struct fm10k_mbx_fifo *fifo = &mbx->rx; rx 664 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 msg_size = fm10k_fifo_head_len(&mbx->rx); rx 666 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return msg_size && (fm10k_fifo_used(&mbx->rx) >= msg_size); rx 705 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c struct fm10k_mbx_fifo *fifo = &mbx->rx; rx 842 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c FM10K_MSG_HDR_FIELD_SET(mbx->rx.size - 1, CONNECT_SIZE); rx 1062 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->max_size = mbx->rx.size - 1; rx 1084 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->rx.tail = 0; rx 1085 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->rx.head = 0; rx 1165 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (size > mbx->rx.size) { rx 1166 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->max_size = mbx->rx.size - 1; rx 1438 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (!mbx->rx.buffer) rx 1608 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fm10k_fifo_init(&mbx->rx, &mbx->buffer[FM10K_MBX_TX_BUFFER_SIZE], rx 1696 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (!mbx->rx.buffer) rx 2166 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fm10k_fifo_init(&mbx->rx, &mbx->buffer[FM10K_MBX_TX_BUFFER_SIZE], rx 256 drivers/net/ethernet/intel/fm10k/fm10k_mbx.h struct fm10k_mbx_fifo rx; rx 722 drivers/net/ethernet/intel/fm10k/fm10k_pci.c if (!qv->tx.count && !qv->rx.count) rx 1187 drivers/net/ethernet/intel/fm10k/fm10k_pci.c if (q_vector->rx.count || q_vector->tx.count) rx 1734 drivers/net/ethernet/intel/fm10k/fm10k_pci.c if (!q_vector->tx.count && !q_vector->rx.count) rx 1768 drivers/net/ethernet/intel/fm10k/fm10k_pci.c if (q_vector->tx.count && q_vector->rx.count) { rx 1772 drivers/net/ethernet/intel/fm10k/fm10k_pci.c } else if (q_vector->rx.count) { rx 1818 drivers/net/ethernet/intel/fm10k/fm10k_pci.c if (!q_vector->tx.count && !q_vector->rx.count) rx 863 drivers/net/ethernet/intel/i40e/i40e.h struct i40e_ring_container rx; rx 2836 drivers/net/ethernet/intel/i40e/i40e_ethtool.c q_vector->rx.target_itr = ITR_TO_REG(rx_ring->itr_setting); rx 3533 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.next_update = jiffies + 1; rx 3534 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.target_itr = rx 3537 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.target_itr >> 1); rx 3538 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 3647 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.next_update = jiffies + 1; rx 3648 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.target_itr = ITR_TO_REG(vsi->rx_rings[0]->itr_setting); rx 3649 drivers/net/ethernet/intel/i40e/i40e_main.c wr32(hw, I40E_PFINT_ITR0(I40E_RX_ITR), q_vector->rx.target_itr >> 1); rx 3650 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 3725 drivers/net/ethernet/intel/i40e/i40e_main.c if (!q_vector->tx.ring && !q_vector->rx.ring) rx 3783 drivers/net/ethernet/intel/i40e/i40e_main.c if (q_vector->tx.ring && q_vector->rx.ring) { rx 3787 drivers/net/ethernet/intel/i40e/i40e_main.c } else if (q_vector->rx.ring) { rx 4183 drivers/net/ethernet/intel/i40e/i40e_main.c rx_ring->next = q_vector->rx.ring; rx 4184 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.ring = rx_ring; rx 4185 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.count++; rx 4220 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.count = 0; rx 4222 drivers/net/ethernet/intel/i40e/i40e_main.c q_vector->rx.ring = NULL; rx 4736 drivers/net/ethernet/intel/i40e/i40e_main.c i40e_for_each_ring(ring, q_vector->rx) rx 4819 drivers/net/ethernet/intel/i40e/i40e_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 4838 drivers/net/ethernet/intel/i40e/i40e_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 12638 drivers/net/ethernet/intel/i40e/i40e_main.c if (q_vector->rx.ring || q_vector->tx.ring) { rx 964 drivers/net/ethernet/intel/i40e/i40e_txrx.c return &q_vector->rx == rc; rx 1064 drivers/net/ethernet/intel/i40e/i40e_txrx.c (q_vector->rx.target_itr & I40E_ITR_MASK) == rx 1092 drivers/net/ethernet/intel/i40e/i40e_txrx.c itr = min(q_vector->tx.current_itr, q_vector->rx.current_itr); rx 2291 drivers/net/ethernet/intel/i40e/i40e_txrx.c rx_ring->q_vector->rx.total_packets += total_rx_packets; rx 2292 drivers/net/ethernet/intel/i40e/i40e_txrx.c rx_ring->q_vector->rx.total_bytes += total_rx_bytes; rx 2516 drivers/net/ethernet/intel/i40e/i40e_txrx.c i40e_update_itr(q_vector, &q_vector->rx); rx 2526 drivers/net/ethernet/intel/i40e/i40e_txrx.c if (q_vector->rx.target_itr < q_vector->rx.current_itr) { rx 2529 drivers/net/ethernet/intel/i40e/i40e_txrx.c q_vector->rx.target_itr); rx 2530 drivers/net/ethernet/intel/i40e/i40e_txrx.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 2533 drivers/net/ethernet/intel/i40e/i40e_txrx.c ((q_vector->rx.target_itr - q_vector->rx.current_itr) < rx 2542 drivers/net/ethernet/intel/i40e/i40e_txrx.c } else if (q_vector->rx.current_itr != q_vector->rx.target_itr) { rx 2545 drivers/net/ethernet/intel/i40e/i40e_txrx.c q_vector->rx.target_itr); rx 2546 drivers/net/ethernet/intel/i40e/i40e_txrx.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 2609 drivers/net/ethernet/intel/i40e/i40e_txrx.c i40e_for_each_ring(ring, q_vector->rx) { rx 97 drivers/net/ethernet/intel/iavf/iavf.h struct iavf_ring_container rx; rx 731 drivers/net/ethernet/intel/iavf/iavf_ethtool.c q_vector->rx.target_itr = ITR_TO_REG(rx_ring->itr_setting); rx 287 drivers/net/ethernet/intel/iavf/iavf_main.c if (!q_vector->tx.ring && !q_vector->rx.ring) rx 309 drivers/net/ethernet/intel/iavf/iavf_main.c rx_ring->next = q_vector->rx.ring; rx 311 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.ring = rx_ring; rx 312 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.count++; rx 313 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.next_update = jiffies + 1; rx 314 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.target_itr = ITR_TO_REG(rx_ring->itr_setting); rx 317 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.current_itr >> 1); rx 318 drivers/net/ethernet/intel/iavf/iavf_main.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 431 drivers/net/ethernet/intel/iavf/iavf_main.c if (q_vector->tx.ring && q_vector->rx.ring) { rx 435 drivers/net/ethernet/intel/iavf/iavf_main.c } else if (q_vector->rx.ring) { rx 374 drivers/net/ethernet/intel/iavf/iavf_txrx.c return &q_vector->rx == rc; rx 474 drivers/net/ethernet/intel/iavf/iavf_txrx.c (q_vector->rx.target_itr & IAVF_ITR_MASK) == rx 502 drivers/net/ethernet/intel/iavf/iavf_txrx.c itr = min(q_vector->tx.current_itr, q_vector->rx.current_itr); rx 1590 drivers/net/ethernet/intel/iavf/iavf_txrx.c rx_ring->q_vector->rx.total_packets += total_rx_packets; rx 1591 drivers/net/ethernet/intel/iavf/iavf_txrx.c rx_ring->q_vector->rx.total_bytes += total_rx_bytes; rx 1651 drivers/net/ethernet/intel/iavf/iavf_txrx.c iavf_update_itr(q_vector, &q_vector->rx); rx 1661 drivers/net/ethernet/intel/iavf/iavf_txrx.c if (q_vector->rx.target_itr < q_vector->rx.current_itr) { rx 1664 drivers/net/ethernet/intel/iavf/iavf_txrx.c q_vector->rx.target_itr); rx 1665 drivers/net/ethernet/intel/iavf/iavf_txrx.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 1668 drivers/net/ethernet/intel/iavf/iavf_txrx.c ((q_vector->rx.target_itr - q_vector->rx.current_itr) < rx 1677 drivers/net/ethernet/intel/iavf/iavf_txrx.c } else if (q_vector->rx.current_itr != q_vector->rx.target_itr) { rx 1680 drivers/net/ethernet/intel/iavf/iavf_txrx.c q_vector->rx.target_itr); rx 1681 drivers/net/ethernet/intel/iavf/iavf_txrx.c q_vector->rx.current_itr = q_vector->rx.target_itr; rx 1740 drivers/net/ethernet/intel/iavf/iavf_txrx.c iavf_for_each_ring(ring, q_vector->rx) { rx 295 drivers/net/ethernet/intel/ice/ice.h struct ice_ring_container rx; rx 3149 drivers/net/ethernet/intel/ice/ice_ethtool.c &vsi->rx_rings[q_num]->q_vector->rx)) rx 3156 drivers/net/ethernet/intel/ice/ice_ethtool.c &vsi->rx_rings[q_num]->q_vector->rx)) rx 3322 drivers/net/ethernet/intel/ice/ice_ethtool.c &vsi->rx_rings[q_num]->q_vector->rx, rx 3332 drivers/net/ethernet/intel/ice/ice_ethtool.c &vsi->rx_rings[q_num]->q_vector->rx, rx 523 drivers/net/ethernet/intel/ice/ice_lib.c if (!q_vector->tx.ring && !q_vector->rx.ring) rx 1120 drivers/net/ethernet/intel/ice/ice_lib.c ice_for_each_ring(ring, q_vector->rx) rx 1388 drivers/net/ethernet/intel/ice/ice_lib.c q_vector->rx.ring = NULL; rx 1389 drivers/net/ethernet/intel/ice/ice_lib.c q_vector->rx.itr_idx = ICE_RX_ITR; rx 1396 drivers/net/ethernet/intel/ice/ice_lib.c rx_ring->next = q_vector->rx.ring; rx 1397 drivers/net/ethernet/intel/ice/ice_lib.c q_vector->rx.ring = rx_ring; rx 1887 drivers/net/ethernet/intel/ice/ice_lib.c struct ice_ring_container *rc = &q_vector->rx; rx 2019 drivers/net/ethernet/intel/ice/ice_lib.c q_vector->rx.itr_idx); rx 1617 drivers/net/ethernet/intel/ice/ice_main.c if (q_vector->tx.ring && q_vector->rx.ring) { rx 1621 drivers/net/ethernet/intel/ice/ice_main.c } else if (q_vector->rx.ring) { rx 3528 drivers/net/ethernet/intel/ice/ice_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 3895 drivers/net/ethernet/intel/ice/ice_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 1107 drivers/net/ethernet/intel/ice/ice_txrx.c rx_ring->q_vector->rx.total_pkts += total_rx_pkts; rx 1108 drivers/net/ethernet/intel/ice/ice_txrx.c rx_ring->q_vector->rx.total_bytes += total_rx_bytes; rx 1213 drivers/net/ethernet/intel/ice/ice_txrx.c container_is_rx = (&q_vector->rx == rc); rx 1252 drivers/net/ethernet/intel/ice/ice_txrx.c (q_vector->rx.target_itr & ICE_ITR_MASK) == rx 1280 drivers/net/ethernet/intel/ice/ice_txrx.c itr = min(q_vector->tx.current_itr, q_vector->rx.current_itr); rx 1365 drivers/net/ethernet/intel/ice/ice_txrx.c struct ice_ring_container *rx = &q_vector->rx; rx 1374 drivers/net/ethernet/intel/ice/ice_txrx.c itr_val = ice_buildreg_itr(rx->itr_idx, ICE_WB_ON_ITR_USECS); rx 1377 drivers/net/ethernet/intel/ice/ice_txrx.c rx->target_itr = rx->itr_setting; rx 1379 drivers/net/ethernet/intel/ice/ice_txrx.c rx->current_itr = ICE_WB_ON_ITR_USECS | rx 1380 drivers/net/ethernet/intel/ice/ice_txrx.c (rx->itr_setting & ICE_ITR_DYNAMIC); rx 1388 drivers/net/ethernet/intel/ice/ice_txrx.c ice_update_itr(q_vector, rx); rx 1398 drivers/net/ethernet/intel/ice/ice_txrx.c if (rx->target_itr < rx->current_itr) { rx 1400 drivers/net/ethernet/intel/ice/ice_txrx.c itr_val = ice_buildreg_itr(rx->itr_idx, rx->target_itr); rx 1401 drivers/net/ethernet/intel/ice/ice_txrx.c rx->current_itr = rx->target_itr; rx 1404 drivers/net/ethernet/intel/ice/ice_txrx.c ((rx->target_itr - rx->current_itr) < rx 1412 drivers/net/ethernet/intel/ice/ice_txrx.c } else if (rx->current_itr != rx->target_itr) { rx 1414 drivers/net/ethernet/intel/ice/ice_txrx.c itr_val = ice_buildreg_itr(rx->itr_idx, rx->target_itr); rx 1415 drivers/net/ethernet/intel/ice/ice_txrx.c rx->current_itr = rx->target_itr; rx 1505 drivers/net/ethernet/intel/ice/ice_txrx.c ice_for_each_ring(ring, q_vector->rx) { rx 2195 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c q_vector->rx.itr_idx = map->rxitr_idx; rx 2198 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c q_vector->rx.itr_idx); rx 302 drivers/net/ethernet/intel/igb/igb.h struct igb_ring_container rx, tx; rx 2237 drivers/net/ethernet/intel/igb/igb_ethtool.c if (q_vector->rx.ring) rx 798 drivers/net/ethernet/intel/igb/igb_main.c if (q_vector->rx.ring) rx 799 drivers/net/ethernet/intel/igb/igb_main.c rx_queue = q_vector->rx.ring->reg_idx; rx 958 drivers/net/ethernet/intel/igb/igb_main.c if (q_vector->rx.ring && q_vector->tx.ring) rx 960 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.ring->queue_index); rx 964 drivers/net/ethernet/intel/igb/igb_main.c else if (q_vector->rx.ring) rx 966 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.ring->queue_index); rx 1034 drivers/net/ethernet/intel/igb/igb_main.c if (q_vector->rx.ring) rx 1035 drivers/net/ethernet/intel/igb/igb_main.c adapter->rx_ring[q_vector->rx.ring->queue_index] = NULL; rx 1290 drivers/net/ethernet/intel/igb/igb_main.c igb_add_ring(ring, &q_vector->rx); rx 5478 drivers/net/ethernet/intel/igb/igb_main.c packets = q_vector->rx.total_packets; rx 5480 drivers/net/ethernet/intel/igb/igb_main.c avg_wire_size = q_vector->rx.total_bytes / packets; rx 5505 drivers/net/ethernet/intel/igb/igb_main.c ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || rx 5506 drivers/net/ethernet/intel/igb/igb_main.c (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) rx 5515 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.total_bytes = 0; rx 5516 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.total_packets = 0; rx 5603 drivers/net/ethernet/intel/igb/igb_main.c igb_update_itr(q_vector, &q_vector->rx); rx 5605 drivers/net/ethernet/intel/igb/igb_main.c current_itr = max(q_vector->rx.itr, q_vector->tx.itr); rx 5609 drivers/net/ethernet/intel/igb/igb_main.c ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || rx 5610 drivers/net/ethernet/intel/igb/igb_main.c (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) rx 6645 drivers/net/ethernet/intel/igb/igb_main.c if (q_vector->rx.ring) rx 6646 drivers/net/ethernet/intel/igb/igb_main.c igb_update_rx_dca(adapter, q_vector->rx.ring, cpu); rx 7681 drivers/net/ethernet/intel/igb/igb_main.c if ((q_vector->rx.ring && (adapter->rx_itr_setting & 3)) || rx 7682 drivers/net/ethernet/intel/igb/igb_main.c (!q_vector->rx.ring && (adapter->tx_itr_setting & 3))) { rx 7717 drivers/net/ethernet/intel/igb/igb_main.c if (q_vector->rx.ring) { rx 8305 drivers/net/ethernet/intel/igb/igb_main.c struct igb_ring *rx_ring = q_vector->rx.ring; rx 8385 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.total_packets += total_packets; rx 8386 drivers/net/ethernet/intel/igb/igb_main.c q_vector->rx.total_bytes += total_bytes; rx 292 drivers/net/ethernet/intel/igc/igc.h struct igc_ring_container rx, tx; rx 865 drivers/net/ethernet/intel/igc/igc_ethtool.c if (q_vector->rx.ring) rx 1560 drivers/net/ethernet/intel/igc/igc_main.c struct igc_ring *rx_ring = q_vector->rx.ring; rx 1639 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.total_packets += total_packets; rx 1640 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.total_bytes += total_bytes; rx 2598 drivers/net/ethernet/intel/igc/igc_main.c if (q_vector->rx.ring) rx 2599 drivers/net/ethernet/intel/igc/igc_main.c rx_queue = q_vector->rx.ring->reg_idx; rx 2707 drivers/net/ethernet/intel/igc/igc_main.c if (q_vector->rx.ring && q_vector->tx.ring) rx 2709 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.ring->queue_index); rx 2713 drivers/net/ethernet/intel/igc/igc_main.c else if (q_vector->rx.ring) rx 2715 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.ring->queue_index); rx 2763 drivers/net/ethernet/intel/igc/igc_main.c if (q_vector->rx.ring) rx 2764 drivers/net/ethernet/intel/igc/igc_main.c adapter->rx_ring[q_vector->rx.ring->queue_index] = NULL; rx 3108 drivers/net/ethernet/intel/igc/igc_main.c packets = q_vector->rx.total_packets; rx 3110 drivers/net/ethernet/intel/igc/igc_main.c avg_wire_size = q_vector->rx.total_bytes / packets; rx 3135 drivers/net/ethernet/intel/igc/igc_main.c ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || rx 3136 drivers/net/ethernet/intel/igc/igc_main.c (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) rx 3145 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.total_bytes = 0; rx 3146 drivers/net/ethernet/intel/igc/igc_main.c q_vector->rx.total_packets = 0; rx 3313 drivers/net/ethernet/intel/igc/igc_main.c igc_update_itr(q_vector, &q_vector->rx); rx 3315 drivers/net/ethernet/intel/igc/igc_main.c current_itr = max(q_vector->rx.itr, q_vector->tx.itr); rx 3319 drivers/net/ethernet/intel/igc/igc_main.c ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || rx 3320 drivers/net/ethernet/intel/igc/igc_main.c (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) rx 3364 drivers/net/ethernet/intel/igc/igc_main.c if ((q_vector->rx.ring && (adapter->rx_itr_setting & 3)) || rx 3365 drivers/net/ethernet/intel/igc/igc_main.c (!q_vector->rx.ring && (adapter->tx_itr_setting & 3))) { rx 3396 drivers/net/ethernet/intel/igc/igc_main.c if (q_vector->rx.ring) { rx 3590 drivers/net/ethernet/intel/igc/igc_main.c igc_add_ring(ring, &q_vector->rx); rx 456 drivers/net/ethernet/intel/ixgbe/ixgbe.h struct ixgbe_ring_container rx, tx; rx 31 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c int rx = DCB_RX_CONFIG; rx 70 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].prio_type != src->path[rx].prio_type) { rx 71 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].prio_type = src->path[rx].prio_type; rx 75 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].bwg_id != src->path[rx].bwg_id) { rx 76 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].bwg_id = src->path[rx].bwg_id; rx 80 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].bwg_percent != src->path[rx].bwg_percent) { rx 81 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].bwg_percent = src->path[rx].bwg_percent; rx 85 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].up_to_tc_bitmap != rx 86 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c src->path[rx].up_to_tc_bitmap) { rx 87 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].up_to_tc_bitmap = rx 88 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c src->path[rx].up_to_tc_bitmap; rx 99 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dcfg->bw_percentage[rx][j] != scfg->bw_percentage[rx][j]) { rx 100 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dcfg->bw_percentage[rx][j] = scfg->bw_percentage[rx][j]; rx 2285 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c if (adapter->q_vector[0]->tx.count && adapter->q_vector[0]->rx.count) rx 2336 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c if (adapter->q_vector[0]->tx.count && adapter->q_vector[0]->rx.count) { rx 2370 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c if (adapter->q_vector[0]->tx.count && adapter->q_vector[0]->rx.count) rx 2390 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c if (q_vector->tx.count && !q_vector->rx.count) rx 91 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c unsigned int *tx, unsigned int *rx) rx 97 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c *rx = 0; rx 103 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c *rx = tc << 3; /* 0, 8, 16, 24, 32, 40, 48, 56 */ rx 116 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c *rx = tc << 4; rx 129 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c *rx = tc << 5; rx 889 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c q_vector->rx.itr = IXGBE_ITR_ADAPTIVE_MAX_USECS | rx 972 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c ixgbe_add_ring(ring, &q_vector->rx); rx 1030 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c ixgbe_for_each_ring(ring, q_vector->rx) rx 1355 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ixgbe_for_each_ring(ring, q_vector->rx) rx 2423 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c q_vector->rx.total_packets += total_rx_packets; rx 2424 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c q_vector->rx.total_bytes += total_rx_bytes; rx 2456 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ixgbe_for_each_ring(ring, q_vector->rx) rx 2721 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ixgbe_update_itr(q_vector, &q_vector->rx); rx 2724 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c new_itr = min(q_vector->rx.itr, q_vector->tx.itr); rx 3143 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 3185 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c if (q_vector->rx.count > 1) rx 3186 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c per_ring_budget = max(budget/q_vector->rx.count, 1); rx 3190 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ixgbe_for_each_ring(ring, q_vector->rx) { rx 3235 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c if (q_vector->tx.ring && q_vector->rx.ring) { rx 3239 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c } else if (q_vector->rx.ring) { rx 3406 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c if (!q_vector->rx.ring && !q_vector->tx.ring) rx 7347 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c if (qv->rx.ring || qv->tx.ring) rx 546 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c q_vector->rx.total_packets += total_rx_packets; rx 547 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c q_vector->rx.total_bytes += total_rx_bytes; rx 803 drivers/net/ethernet/intel/ixgbevf/ethtool.c if (adapter->q_vector[0]->tx.count && adapter->q_vector[0]->rx.count) rx 825 drivers/net/ethernet/intel/ixgbevf/ethtool.c adapter->q_vector[0]->rx.count && ec->tx_coalesce_usecs) rx 856 drivers/net/ethernet/intel/ixgbevf/ethtool.c if (q_vector->tx.count && !q_vector->rx.count) rx 239 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h struct ixgbevf_ring_container rx, tx; rx 1248 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c q_vector->rx.total_packets += total_rx_packets; rx 1249 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c q_vector->rx.total_bytes += total_rx_bytes; rx 1282 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (q_vector->rx.count > 1) rx 1283 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c per_ring_budget = max(budget/q_vector->rx.count, 1); rx 1287 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ixgbevf_for_each_ring(ring, q_vector->rx) { rx 1356 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ixgbevf_for_each_ring(ring, q_vector->rx) rx 1362 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (q_vector->tx.ring && !q_vector->rx.ring) { rx 1463 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ixgbevf_update_itr(q_vector, &q_vector->rx); rx 1465 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c current_itr = max(q_vector->rx.itr, q_vector->tx.itr); rx 1518 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (q_vector->rx.ring || q_vector->tx.ring) rx 1542 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (q_vector->tx.ring && q_vector->rx.ring) { rx 1546 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c } else if (q_vector->rx.ring) { rx 1628 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (!adapter->q_vector[i]->rx.ring && rx 2789 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ixgbevf_add_ring(ring, &q_vector->rx); rx 2831 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ixgbevf_for_each_ring(ring, q_vector->rx) rx 3207 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (qv->rx.ring || qv->tx.ring) rx 216 drivers/net/ethernet/lantiq_xrx200.c int rx = 0; rx 219 drivers/net/ethernet/lantiq_xrx200.c while (rx < budget) { rx 226 drivers/net/ethernet/lantiq_xrx200.c rx++; rx 232 drivers/net/ethernet/lantiq_xrx200.c if (rx < budget) { rx 237 drivers/net/ethernet/lantiq_xrx200.c return rx; rx 509 drivers/net/ethernet/marvell/mv643xx_eth.c int rx; rx 511 drivers/net/ethernet/marvell/mv643xx_eth.c rx = 0; rx 512 drivers/net/ethernet/marvell/mv643xx_eth.c while (rx < budget && rxq->rx_desc_count) { rx 535 drivers/net/ethernet/marvell/mv643xx_eth.c rx++; rx 591 drivers/net/ethernet/marvell/mv643xx_eth.c if (rx < budget) rx 594 drivers/net/ethernet/marvell/mv643xx_eth.c return rx; rx 605 drivers/net/ethernet/marvell/mv643xx_eth.c int rx; rx 622 drivers/net/ethernet/marvell/mv643xx_eth.c rx = rxq->rx_used_desc++; rx 626 drivers/net/ethernet/marvell/mv643xx_eth.c rx_desc = rxq->rx_desc_area + rx; rx 633 drivers/net/ethernet/marvell/mv643xx_eth.c rxq->rx_skb[rx] = skb; rx 568 drivers/net/ethernet/marvell/octeontx2/af/mbox.h } rx; rx 1581 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c if (req->rx.vtag_type > 7 || req->vtag_size > VTAGSIZE_T8) rx 1584 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c if (req->rx.capture_vtag) rx 1586 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c if (req->rx.strip_vtag) rx 1590 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c NIX_AF_LFX_RX_VTAG_TYPEX(nixlf, req->rx.vtag_type), regval); rx 2226 drivers/net/ethernet/marvell/sky2.c static enum flow_control sky2_flow(int rx, int tx) rx 2228 drivers/net/ethernet/marvell/sky2.c if (rx) rx 70 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ipsec_rxtx.c struct mlx5e_ipsec_rx_metadata rx; rx 307 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ipsec_rxtx.c sa_handle = be32_to_cpu(mdata->content.rx.sa_handle); rx 325 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ipsec_rxtx.c xo->proto = mdata->content.rx.nexthdr; rx 423 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c if (MLX5_GET(tls_extended_cap, buf, rx)) rx 685 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_action.c action->dest_tbl.tbl->rx.s_anchor->chunk->icm_addr : rx 236 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.ste_type = MLX5DR_STE_TYPE_RX; rx 237 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.default_icm_addr = dmn->info.caps.nic_rx_drop_address; rx 238 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.drop_icm_addr = dmn->info.caps.nic_rx_drop_address; rx 256 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.ste_type = MLX5DR_STE_TYPE_RX; rx 266 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.default_icm_addr = vport_cap->icm_address_rx; rx 267 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_domain.c dmn->info.rx.drop_icm_addr = dmn->info.caps.esw_rx_drop_address; rx 178 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c bool inner, rx; rx 190 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c rx = nic_dmn->ste_type == MLX5DR_STE_TYPE_RX; rx 221 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_general_purpose(&sb[idx++], &mask, inner, rx); rx 224 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_register_0(&sb[idx++], &mask, inner, rx); rx 227 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_register_1(&sb[idx++], &mask, inner, rx); rx 233 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c dmn, inner, rx); rx 241 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 247 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l2_src(&sb[idx++], &mask, inner, rx); rx 250 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l2_dst(&sb[idx++], &mask, inner, rx); rx 255 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 259 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 263 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 267 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 271 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 277 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 280 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l4_misc(&sb[idx++], &mask, inner, rx); rx 283 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_mpls(&sb[idx++], &mask, inner, rx); rx 287 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 296 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 301 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_gre(&sb[idx++], &mask, inner, rx); rx 312 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l2_tnl(&sb[idx++], &mask, inner, rx); rx 317 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c &mask, inner, rx); rx 323 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l2_src(&sb[idx++], &mask, inner, rx); rx 326 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l2_dst(&sb[idx++], &mask, inner, rx); rx 331 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 335 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 339 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 343 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 347 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c inner, rx); rx 351 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_eth_l4_misc(&sb[idx++], &mask, inner, rx); rx 354 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_mpls(&sb[idx++], &mask, inner, rx); rx 357 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_flex_parser_0(&sb[idx++], &mask, inner, rx); rx 361 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c mlx5dr_ste_build_empty_always_hit(&sb[idx++], rx); rx 468 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c ret = dr_matcher_connect(dmn, &matcher->rx, rx 469 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c next_matcher ? &next_matcher->rx : NULL, rx 470 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c prev_matcher ? &prev_matcher->rx : NULL); rx 503 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c dr_matcher_uninit_nic(&matcher->rx); rx 513 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c dr_matcher_uninit_nic(&matcher->rx); rx 577 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c ret = dr_matcher_init_nic(matcher, &matcher->rx); rx 588 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c dr_matcher_uninit_nic(&matcher->rx); rx 615 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c matcher->rx.nic_tbl = &tbl->rx; rx 616 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c ret = dr_matcher_init_nic(matcher, &matcher->rx); rx 623 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c matcher->rx.nic_tbl = &tbl->rx; rx 729 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c ret = dr_matcher_disconnect(dmn, &tbl->rx, rx 730 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c next_matcher ? &next_matcher->rx : NULL, rx 731 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_matcher.c prev_matcher ? &prev_matcher->rx : NULL); rx 947 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c dr_rule_destroy_rule_nic(rule, &rule->rx); rx 958 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c dr_rule_destroy_rule_nic(rule, &rule->rx); rx 1150 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c ret = dr_rule_create_rule_nic(rule, &rule->rx, param, rx 1163 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c dr_rule_destroy_rule_nic(rule, &rule->rx); rx 1194 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c rule->rx.nic_matcher = &matcher->rx; rx 1195 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c ret = dr_rule_create_rule_nic(rule, &rule->rx, ¶m, rx 1204 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c rule->rx.nic_matcher = &matcher->rx; rx 85 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c #define DR_STE_CALC_LU_TYPE(lookup_type, rx, inner) \ rx 87 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c (rx) ? MLX5DR_STE_LU_TYPE_##lookup_type##_D : \ rx 1102 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1110 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1112 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL2_SRC_DST, rx, inner); rx 1148 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1152 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1154 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL3_IPV6_DST, rx, inner); rx 1188 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1192 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1194 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL3_IPV6_SRC, rx, inner); rx 1261 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1265 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1267 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL3_IPV4_5_TUPLE, rx, inner); rx 1413 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1416 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1418 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL2_SRC, rx, inner); rx 1450 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1454 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1456 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL2_DST, rx, inner); rx 1536 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c struct mlx5dr_match_param *mask, bool inner, bool rx) rx 1540 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1570 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1574 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1576 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL3_IPV4_MISC, rx, inner); rx 1630 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1634 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1636 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL4, rx, inner); rx 1648 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c void mlx5dr_ste_build_empty_always_hit(struct mlx5dr_ste_build *sb, bool rx) rx 1650 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1685 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1689 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1691 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(MPLS_FIRST, rx, inner); rx 1732 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c struct mlx5dr_match_param *mask, bool inner, bool rx) rx 1736 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1813 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1817 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 1973 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 1981 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2017 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2021 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2067 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2071 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2073 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->lu_type = DR_STE_CALC_LU_TYPE(ETHL4_MISC, rx, inner); rx 2131 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2135 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2175 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2179 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2219 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2223 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 2296 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool inner, bool rx) rx 2307 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c sb->rx = rx; rx 27 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c last_htbl = last_matcher->rx.e_anchor; rx 29 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c last_htbl = tbl->rx.s_anchor; rx 31 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c tbl->rx.default_icm_addr = action ? rx 32 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c action->dest_tbl.tbl->rx.s_anchor->chunk->icm_addr : rx 33 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c tbl->rx.nic_dmn->default_icm_addr; rx 36 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c info.miss_icm_addr = tbl->rx.default_icm_addr; rx 39 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c tbl->rx.nic_dmn, rx 92 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c dr_table_uninit_nic(&tbl->rx); rx 102 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c dr_table_uninit_nic(&tbl->rx); rx 155 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c ret = dr_table_init_nic(tbl->dmn, &tbl->rx); rx 166 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c dr_table_uninit_nic(&tbl->rx); rx 181 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c tbl->rx.nic_dmn = &tbl->dmn->info.rx; rx 182 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c ret = dr_table_init_nic(tbl->dmn, &tbl->rx); rx 191 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c tbl->rx.nic_dmn = &tbl->dmn->info.rx; rx 218 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c if (tbl->rx.s_anchor) rx 219 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_table.c icm_addr_rx = tbl->rx.s_anchor->chunk->icm_addr; rx 182 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h u8 rx:1; rx 283 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 286 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 289 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 292 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 295 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 298 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 301 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 304 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 307 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 310 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 313 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 316 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 319 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 323 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 326 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 329 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 332 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 335 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 339 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool inner, bool rx); rx 340 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h void mlx5dr_ste_build_empty_always_hit(struct mlx5dr_ste_build *sb, bool rx); rx 638 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h struct mlx5dr_domain_rx_tx rx; rx 670 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h struct mlx5dr_table_rx_tx rx; rx 695 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h struct mlx5dr_matcher_rx_tx rx; rx 780 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h struct mlx5dr_rule_rx_tx rx; rx 293 drivers/net/ethernet/micrel/ks8851.c __le16 rx = 0; rx 295 drivers/net/ethernet/micrel/ks8851.c ks8851_rdreg(ks, MK_OP(reg & 2 ? 0xC : 0x3, reg), (u8 *)&rx, 2); rx 296 drivers/net/ethernet/micrel/ks8851.c return le16_to_cpu(rx); rx 310 drivers/net/ethernet/micrel/ks8851.c __le32 rx = 0; rx 314 drivers/net/ethernet/micrel/ks8851.c ks8851_rdreg(ks, MK_OP(0xf, reg), (u8 *)&rx, 4); rx 315 drivers/net/ethernet/micrel/ks8851.c return le32_to_cpu(rx); rx 921 drivers/net/ethernet/micrel/ksz884x.c struct ksz_desc_rx_stat rx; rx 927 drivers/net/ethernet/micrel/ksz884x.c struct ksz_desc_rx_buf rx; rx 1592 drivers/net/ethernet/micrel/ksz884x.c status.rx.hw_owned = 0; rx 1622 drivers/net/ethernet/micrel/ksz884x.c desc->sw.buf.rx.buf_size = len; rx 3204 drivers/net/ethernet/micrel/ksz884x.c static void set_flow_ctrl(struct ksz_hw *hw, int rx, int tx) rx 3211 drivers/net/ethernet/micrel/ksz884x.c if (rx) rx 3230 drivers/net/ethernet/micrel/ksz884x.c int rx; rx 3236 drivers/net/ethernet/micrel/ksz884x.c rx = tx = 0; rx 3238 drivers/net/ethernet/micrel/ksz884x.c rx = tx = 1; rx 3241 drivers/net/ethernet/micrel/ksz884x.c rx = tx = 1; rx 3249 drivers/net/ethernet/micrel/ksz884x.c rx = 1; rx 3252 drivers/net/ethernet/micrel/ksz884x.c set_flow_ctrl(hw, rx, tx); rx 3841 drivers/net/ethernet/micrel/ksz884x.c previous->sw.buf.rx.end_of_ring = 1; rx 4991 drivers/net/ethernet/micrel/ksz884x.c packet_len = status.rx.frame_len - 4; rx 5046 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.hw_owned) rx 5050 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.last_desc && status.rx.first_desc) { rx 5082 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.hw_owned) rx 5087 drivers/net/ethernet/micrel/ksz884x.c int p = HW_TO_DEV_PORT(status.rx.src_port); rx 5095 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.last_desc && status.rx.first_desc) { rx 5127 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.hw_owned) rx 5132 drivers/net/ethernet/micrel/ksz884x.c int p = HW_TO_DEV_PORT(status.rx.src_port); rx 5140 drivers/net/ethernet/micrel/ksz884x.c if (status.rx.last_desc && status.rx.first_desc) { rx 5146 drivers/net/ethernet/micrel/ksz884x.c if (!status.rx.error || (status.data & rx 94 drivers/net/ethernet/microchip/enc28j60.c struct spi_transfer rx = { rx 105 drivers/net/ethernet/microchip/enc28j60.c spi_message_add_tail(&rx, &msg); rx 497 drivers/net/ethernet/microchip/lan743x_ethtool.c for (i = 0; i < ARRAY_SIZE(adapter->rx); i++) rx 498 drivers/net/ethernet/microchip/lan743x_ethtool.c data[data_index++] = (u64)(adapter->rx[i].frame_count); rx 197 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_rx *rx = context; rx 198 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = rx->adapter; rx 203 drivers/net/ethernet/microchip/lan743x_main.c INT_BIT_DMA_RX_(rx->channel_number)); rx 206 drivers/net/ethernet/microchip/lan743x_main.c if (int_sts & INT_BIT_DMA_RX_(rx->channel_number)) { rx 207 drivers/net/ethernet/microchip/lan743x_main.c u32 rx_frame_bit = DMAC_INT_BIT_RXFRM_(rx->channel_number); rx 224 drivers/net/ethernet/microchip/lan743x_main.c napi_schedule(&rx->napi); rx 232 drivers/net/ethernet/microchip/lan743x_main.c INT_BIT_DMA_RX_(rx->channel_number)); rx 247 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_isr(&adapter->rx[channel], rx 656 drivers/net/ethernet/microchip/lan743x_main.c &adapter->rx[index]); rx 1892 drivers/net/ethernet/microchip/lan743x_main.c static int lan743x_rx_next_index(struct lan743x_rx *rx, int index) rx 1894 drivers/net/ethernet/microchip/lan743x_main.c return ((++index) % rx->ring_size); rx 1897 drivers/net/ethernet/microchip/lan743x_main.c static struct sk_buff *lan743x_rx_allocate_skb(struct lan743x_rx *rx) rx 1902 drivers/net/ethernet/microchip/lan743x_main.c return __netdev_alloc_skb(rx->adapter->netdev, rx 1906 drivers/net/ethernet/microchip/lan743x_main.c static int lan743x_rx_init_ring_element(struct lan743x_rx *rx, int index, rx 1914 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[index]; rx 1915 drivers/net/ethernet/microchip/lan743x_main.c buffer_info = &rx->buffer_info[index]; rx 1919 drivers/net/ethernet/microchip/lan743x_main.c buffer_info->dma_ptr = dma_map_single(&rx->adapter->pdev->dev, rx 1923 drivers/net/ethernet/microchip/lan743x_main.c if (dma_mapping_error(&rx->adapter->pdev->dev, rx 1940 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_rx_reuse_ring_element(struct lan743x_rx *rx, int index) rx 1945 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[index]; rx 1946 drivers/net/ethernet/microchip/lan743x_main.c buffer_info = &rx->buffer_info[index]; rx 1956 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_rx_release_ring_element(struct lan743x_rx *rx, int index) rx 1961 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[index]; rx 1962 drivers/net/ethernet/microchip/lan743x_main.c buffer_info = &rx->buffer_info[index]; rx 1967 drivers/net/ethernet/microchip/lan743x_main.c dma_unmap_single(&rx->adapter->pdev->dev, rx 1982 drivers/net/ethernet/microchip/lan743x_main.c static int lan743x_rx_process_packet(struct lan743x_rx *rx) rx 1993 drivers/net/ethernet/microchip/lan743x_main.c current_head_index = *rx->head_cpu_ptr; rx 1994 drivers/net/ethernet/microchip/lan743x_main.c if (current_head_index < 0 || current_head_index >= rx->ring_size) rx 1997 drivers/net/ethernet/microchip/lan743x_main.c if (rx->last_head < 0 || rx->last_head >= rx->ring_size) rx 2000 drivers/net/ethernet/microchip/lan743x_main.c if (rx->last_head != current_head_index) { rx 2001 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[rx->last_head]; rx 2008 drivers/net/ethernet/microchip/lan743x_main.c first_index = rx->last_head; rx 2010 drivers/net/ethernet/microchip/lan743x_main.c last_index = rx->last_head; rx 2014 drivers/net/ethernet/microchip/lan743x_main.c index = lan743x_rx_next_index(rx, first_index); rx 2016 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[index]; rx 2024 drivers/net/ethernet/microchip/lan743x_main.c index = lan743x_rx_next_index(rx, index); rx 2028 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[last_index]; rx 2031 drivers/net/ethernet/microchip/lan743x_main.c int index = lan743x_rx_next_index(rx, rx 2034 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[index]; rx 2066 drivers/net/ethernet/microchip/lan743x_main.c new_skb = lan743x_rx_allocate_skb(rx); rx 2072 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_reuse_ring_element(rx, first_index); rx 2076 drivers/net/ethernet/microchip/lan743x_main.c buffer_info = &rx->buffer_info[first_index]; rx 2078 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[first_index]; rx 2082 drivers/net/ethernet/microchip/lan743x_main.c dma_unmap_single(&rx->adapter->pdev->dev, rx 2094 drivers/net/ethernet/microchip/lan743x_main.c rx->adapter->netdev); rx 2095 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_init_ring_element(rx, first_index, new_skb); rx 2108 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_reuse_ring_element(rx, rx 2110 drivers/net/ethernet/microchip/lan743x_main.c index = lan743x_rx_next_index(rx, rx 2116 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_reuse_ring_element(rx, rx 2118 drivers/net/ethernet/microchip/lan743x_main.c index = lan743x_rx_next_index(rx, rx 2126 drivers/net/ethernet/microchip/lan743x_main.c descriptor = &rx->ring_cpu_ptr[extension_index]; rx 2127 drivers/net/ethernet/microchip/lan743x_main.c buffer_info = &rx->buffer_info[extension_index]; rx 2132 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_reuse_ring_element(rx, extension_index); rx 2149 drivers/net/ethernet/microchip/lan743x_main.c napi_gro_receive(&rx->napi, skb); rx 2154 drivers/net/ethernet/microchip/lan743x_main.c rx->last_tail = real_last_index; rx 2155 drivers/net/ethernet/microchip/lan743x_main.c rx->last_head = lan743x_rx_next_index(rx, real_last_index); rx 2163 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_rx *rx = container_of(napi, struct lan743x_rx, napi); rx 2164 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = rx->adapter; rx 2168 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_STATUS_W2C) { rx 2171 drivers/net/ethernet/microchip/lan743x_main.c DMAC_INT_BIT_RXFRM_(rx->channel_number)); rx 2175 drivers/net/ethernet/microchip/lan743x_main.c int rx_process_result = lan743x_rx_process_packet(rx); rx 2187 drivers/net/ethernet/microchip/lan743x_main.c rx->frame_count += count; rx 2194 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_VECTOR_ENABLE_AUTO_SET) rx 2196 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_ENABLE_AUTO_SET) { rx 2200 drivers/net/ethernet/microchip/lan743x_main.c INT_BIT_DMA_RX_(rx->channel_number)); rx 2204 drivers/net/ethernet/microchip/lan743x_main.c lan743x_csr_write(adapter, RX_TAIL(rx->channel_number), rx 2205 drivers/net/ethernet/microchip/lan743x_main.c rx_tail_flags | rx->last_tail); rx 2210 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_rx_ring_cleanup(struct lan743x_rx *rx) rx 2212 drivers/net/ethernet/microchip/lan743x_main.c if (rx->buffer_info && rx->ring_cpu_ptr) { rx 2215 drivers/net/ethernet/microchip/lan743x_main.c for (index = 0; index < rx->ring_size; index++) rx 2216 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_release_ring_element(rx, index); rx 2219 drivers/net/ethernet/microchip/lan743x_main.c if (rx->head_cpu_ptr) { rx 2220 drivers/net/ethernet/microchip/lan743x_main.c pci_free_consistent(rx->adapter->pdev, rx 2221 drivers/net/ethernet/microchip/lan743x_main.c sizeof(*rx->head_cpu_ptr), rx 2222 drivers/net/ethernet/microchip/lan743x_main.c rx->head_cpu_ptr, rx 2223 drivers/net/ethernet/microchip/lan743x_main.c rx->head_dma_ptr); rx 2224 drivers/net/ethernet/microchip/lan743x_main.c rx->head_cpu_ptr = NULL; rx 2225 drivers/net/ethernet/microchip/lan743x_main.c rx->head_dma_ptr = 0; rx 2228 drivers/net/ethernet/microchip/lan743x_main.c kfree(rx->buffer_info); rx 2229 drivers/net/ethernet/microchip/lan743x_main.c rx->buffer_info = NULL; rx 2231 drivers/net/ethernet/microchip/lan743x_main.c if (rx->ring_cpu_ptr) { rx 2232 drivers/net/ethernet/microchip/lan743x_main.c pci_free_consistent(rx->adapter->pdev, rx 2233 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_allocation_size, rx 2234 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_cpu_ptr, rx 2235 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_dma_ptr); rx 2236 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_allocation_size = 0; rx 2237 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_cpu_ptr = NULL; rx 2238 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_dma_ptr = 0; rx 2241 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_size = 0; rx 2242 drivers/net/ethernet/microchip/lan743x_main.c rx->last_head = 0; rx 2245 drivers/net/ethernet/microchip/lan743x_main.c static int lan743x_rx_ring_init(struct lan743x_rx *rx) rx 2253 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_size = LAN743X_RX_RING_SIZE; rx 2254 drivers/net/ethernet/microchip/lan743x_main.c if (rx->ring_size <= 1) { rx 2258 drivers/net/ethernet/microchip/lan743x_main.c if (rx->ring_size & ~RX_CFG_B_RX_RING_LEN_MASK_) { rx 2262 drivers/net/ethernet/microchip/lan743x_main.c ring_allocation_size = ALIGN(rx->ring_size * rx 2266 drivers/net/ethernet/microchip/lan743x_main.c cpu_ptr = pci_zalloc_consistent(rx->adapter->pdev, rx 2272 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_allocation_size = ring_allocation_size; rx 2273 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_cpu_ptr = (struct lan743x_rx_descriptor *)cpu_ptr; rx 2274 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_dma_ptr = dma_ptr; rx 2276 drivers/net/ethernet/microchip/lan743x_main.c cpu_ptr = kcalloc(rx->ring_size, sizeof(*rx->buffer_info), rx 2282 drivers/net/ethernet/microchip/lan743x_main.c rx->buffer_info = (struct lan743x_rx_buffer_info *)cpu_ptr; rx 2284 drivers/net/ethernet/microchip/lan743x_main.c cpu_ptr = pci_zalloc_consistent(rx->adapter->pdev, rx 2285 drivers/net/ethernet/microchip/lan743x_main.c sizeof(*rx->head_cpu_ptr), &dma_ptr); rx 2291 drivers/net/ethernet/microchip/lan743x_main.c rx->head_cpu_ptr = cpu_ptr; rx 2292 drivers/net/ethernet/microchip/lan743x_main.c rx->head_dma_ptr = dma_ptr; rx 2293 drivers/net/ethernet/microchip/lan743x_main.c if (rx->head_dma_ptr & 0x3) { rx 2298 drivers/net/ethernet/microchip/lan743x_main.c rx->last_head = 0; rx 2299 drivers/net/ethernet/microchip/lan743x_main.c for (index = 0; index < rx->ring_size; index++) { rx 2300 drivers/net/ethernet/microchip/lan743x_main.c struct sk_buff *new_skb = lan743x_rx_allocate_skb(rx); rx 2302 drivers/net/ethernet/microchip/lan743x_main.c ret = lan743x_rx_init_ring_element(rx, index, new_skb); rx 2309 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_ring_cleanup(rx); rx 2313 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_rx_close(struct lan743x_rx *rx) rx 2315 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = rx->adapter; rx 2318 drivers/net/ethernet/microchip/lan743x_main.c FCT_RX_CTL_DIS_(rx->channel_number)); rx 2320 drivers/net/ethernet/microchip/lan743x_main.c FCT_RX_CTL_EN_(rx->channel_number), rx 2324 drivers/net/ethernet/microchip/lan743x_main.c DMAC_CMD_STOP_R_(rx->channel_number)); rx 2325 drivers/net/ethernet/microchip/lan743x_main.c lan743x_dmac_rx_wait_till_stopped(adapter, rx->channel_number); rx 2328 drivers/net/ethernet/microchip/lan743x_main.c DMAC_INT_BIT_RXFRM_(rx->channel_number)); rx 2330 drivers/net/ethernet/microchip/lan743x_main.c INT_BIT_DMA_RX_(rx->channel_number)); rx 2331 drivers/net/ethernet/microchip/lan743x_main.c napi_disable(&rx->napi); rx 2333 drivers/net/ethernet/microchip/lan743x_main.c netif_napi_del(&rx->napi); rx 2335 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_ring_cleanup(rx); rx 2338 drivers/net/ethernet/microchip/lan743x_main.c static int lan743x_rx_open(struct lan743x_rx *rx) rx 2340 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = rx->adapter; rx 2344 drivers/net/ethernet/microchip/lan743x_main.c rx->frame_count = 0; rx 2345 drivers/net/ethernet/microchip/lan743x_main.c ret = lan743x_rx_ring_init(rx); rx 2350 drivers/net/ethernet/microchip/lan743x_main.c &rx->napi, lan743x_rx_napi_poll, rx 2351 drivers/net/ethernet/microchip/lan743x_main.c rx->ring_size - 1); rx 2354 drivers/net/ethernet/microchip/lan743x_main.c DMAC_CMD_RX_SWR_(rx->channel_number)); rx 2356 drivers/net/ethernet/microchip/lan743x_main.c DMAC_CMD_RX_SWR_(rx->channel_number), rx 2361 drivers/net/ethernet/microchip/lan743x_main.c RX_BASE_ADDRH(rx->channel_number), rx 2362 drivers/net/ethernet/microchip/lan743x_main.c DMA_ADDR_HIGH32(rx->ring_dma_ptr)); rx 2364 drivers/net/ethernet/microchip/lan743x_main.c RX_BASE_ADDRL(rx->channel_number), rx 2365 drivers/net/ethernet/microchip/lan743x_main.c DMA_ADDR_LOW32(rx->ring_dma_ptr)); rx 2369 drivers/net/ethernet/microchip/lan743x_main.c RX_HEAD_WRITEBACK_ADDRH(rx->channel_number), rx 2370 drivers/net/ethernet/microchip/lan743x_main.c DMA_ADDR_HIGH32(rx->head_dma_ptr)); rx 2372 drivers/net/ethernet/microchip/lan743x_main.c RX_HEAD_WRITEBACK_ADDRL(rx->channel_number), rx 2373 drivers/net/ethernet/microchip/lan743x_main.c DMA_ADDR_LOW32(rx->head_dma_ptr)); rx 2384 drivers/net/ethernet/microchip/lan743x_main.c RX_CFG_A(rx->channel_number), data); rx 2387 drivers/net/ethernet/microchip/lan743x_main.c data = lan743x_csr_read(adapter, RX_CFG_B(rx->channel_number)); rx 2394 drivers/net/ethernet/microchip/lan743x_main.c data |= ((rx->ring_size) & RX_CFG_B_RX_RING_LEN_MASK_); rx 2399 drivers/net/ethernet/microchip/lan743x_main.c lan743x_csr_write(adapter, RX_CFG_B(rx->channel_number), data); rx 2400 drivers/net/ethernet/microchip/lan743x_main.c rx->vector_flags = lan743x_intr_get_vector_flags(adapter, rx 2402 drivers/net/ethernet/microchip/lan743x_main.c (rx->channel_number)); rx 2406 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_ENABLE_AUTO_CLEAR) rx 2408 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_STATUS_AUTO_CLEAR) rx 2410 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_STATUS_R2C) rx 2412 drivers/net/ethernet/microchip/lan743x_main.c if (rx->vector_flags & LAN743X_VECTOR_FLAG_SOURCE_ENABLE_R2C) rx 2414 drivers/net/ethernet/microchip/lan743x_main.c lan743x_csr_write(adapter, RX_CFG_C(rx->channel_number), data); rx 2416 drivers/net/ethernet/microchip/lan743x_main.c rx->last_tail = ((u32)(rx->ring_size - 1)); rx 2417 drivers/net/ethernet/microchip/lan743x_main.c lan743x_csr_write(adapter, RX_TAIL(rx->channel_number), rx 2418 drivers/net/ethernet/microchip/lan743x_main.c rx->last_tail); rx 2419 drivers/net/ethernet/microchip/lan743x_main.c rx->last_head = lan743x_csr_read(adapter, RX_HEAD(rx->channel_number)); rx 2420 drivers/net/ethernet/microchip/lan743x_main.c if (rx->last_head) { rx 2425 drivers/net/ethernet/microchip/lan743x_main.c napi_enable(&rx->napi); rx 2428 drivers/net/ethernet/microchip/lan743x_main.c INT_BIT_DMA_RX_(rx->channel_number)); rx 2430 drivers/net/ethernet/microchip/lan743x_main.c DMAC_INT_BIT_RXFRM_(rx->channel_number)); rx 2432 drivers/net/ethernet/microchip/lan743x_main.c DMAC_INT_BIT_RXFRM_(rx->channel_number)); rx 2434 drivers/net/ethernet/microchip/lan743x_main.c DMAC_CMD_START_R_(rx->channel_number)); rx 2438 drivers/net/ethernet/microchip/lan743x_main.c FCT_RX_CTL_RESET_(rx->channel_number)); rx 2440 drivers/net/ethernet/microchip/lan743x_main.c FCT_RX_CTL_RESET_(rx->channel_number), rx 2442 drivers/net/ethernet/microchip/lan743x_main.c lan743x_csr_write(adapter, FCT_FLOW(rx->channel_number), rx 2449 drivers/net/ethernet/microchip/lan743x_main.c FCT_RX_CTL_EN_(rx->channel_number)); rx 2453 drivers/net/ethernet/microchip/lan743x_main.c netif_napi_del(&rx->napi); rx 2454 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_ring_cleanup(rx); rx 2468 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_close(&adapter->rx[index]); rx 2506 drivers/net/ethernet/microchip/lan743x_main.c ret = lan743x_rx_open(&adapter->rx[index]); rx 2519 drivers/net/ethernet/microchip/lan743x_main.c if (adapter->rx[index].ring_cpu_ptr) rx 2520 drivers/net/ethernet/microchip/lan743x_main.c lan743x_rx_close(&adapter->rx[index]); rx 2702 drivers/net/ethernet/microchip/lan743x_main.c adapter->rx[index].adapter = adapter; rx 2703 drivers/net/ethernet/microchip/lan743x_main.c adapter->rx[index].channel_number = index; rx 719 drivers/net/ethernet/microchip/lan743x_main.h struct lan743x_rx rx[LAN743X_MAX_RX_CHANNELS]; rx 220 drivers/net/ethernet/moxa/moxart_ether.c int rx = 0; rx 222 drivers/net/ethernet/moxa/moxart_ether.c while (rx < budget) { rx 259 drivers/net/ethernet/moxa/moxart_ether.c rx++; rx 274 drivers/net/ethernet/moxa/moxart_ether.c if (rx < budget) rx 275 drivers/net/ethernet/moxa/moxart_ether.c napi_complete_done(napi, rx); rx 280 drivers/net/ethernet/moxa/moxart_ether.c return rx; rx 1190 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_alloc_rx_pages(struct myri10ge_priv *mgp, struct myri10ge_rx_buf *rx, rx 1200 drivers/net/ethernet/myricom/myri10ge/myri10ge.c if (unlikely(rx->watchdog_needed && !watchdog)) rx 1204 drivers/net/ethernet/myricom/myri10ge/myri10ge.c while (rx->fill_cnt != (rx->cnt + rx->mask + 1)) { rx 1205 drivers/net/ethernet/myricom/myri10ge/myri10ge.c idx = rx->fill_cnt & rx->mask; rx 1206 drivers/net/ethernet/myricom/myri10ge/myri10ge.c if (rx->page_offset + bytes <= MYRI10GE_ALLOC_SIZE) { rx 1208 drivers/net/ethernet/myricom/myri10ge/myri10ge.c get_page(rx->page); rx 1215 drivers/net/ethernet/myricom/myri10ge/myri10ge.c if (rx->fill_cnt - rx->cnt < 16) rx 1216 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->watchdog_needed = 1; rx 1225 drivers/net/ethernet/myricom/myri10ge/myri10ge.c if (rx->fill_cnt - rx->cnt < 16) rx 1226 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->watchdog_needed = 1; rx 1230 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->page = page; rx 1231 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->page_offset = 0; rx 1232 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->bus = bus; rx 1235 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->info[idx].page = rx->page; rx 1236 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->info[idx].page_offset = rx->page_offset; rx 1239 drivers/net/ethernet/myricom/myri10ge/myri10ge.c dma_unmap_addr_set(&rx->info[idx], bus, rx->bus); rx 1240 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->shadow[idx].addr_low = rx 1241 drivers/net/ethernet/myricom/myri10ge/myri10ge.c htonl(MYRI10GE_LOWPART_TO_U32(rx->bus) + rx->page_offset); rx 1242 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->shadow[idx].addr_high = rx 1243 drivers/net/ethernet/myricom/myri10ge/myri10ge.c htonl(MYRI10GE_HIGHPART_TO_U32(rx->bus)); rx 1246 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->page_offset += SKB_DATA_ALIGN(bytes); rx 1250 drivers/net/ethernet/myricom/myri10ge/myri10ge.c end_offset = rx->page_offset + bytes - 1; rx 1251 drivers/net/ethernet/myricom/myri10ge/myri10ge.c if ((unsigned)(rx->page_offset ^ end_offset) > 4095) rx 1252 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->page_offset = end_offset & ~4095; rx 1254 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->fill_cnt++; rx 1258 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_submit_8rx(&rx->lanai[idx - 7], rx 1259 drivers/net/ethernet/myricom/myri10ge/myri10ge.c &rx->shadow[idx - 7]); rx 1322 drivers/net/ethernet/myricom/myri10ge/myri10ge.c struct myri10ge_rx_buf *rx; rx 1329 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx = &ss->rx_small; rx 1332 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx = &ss->rx_big; rx 1337 drivers/net/ethernet/myricom/myri10ge/myri10ge.c idx = rx->cnt & rx->mask; rx 1338 drivers/net/ethernet/myricom/myri10ge/myri10ge.c va = page_address(rx->info[idx].page) + rx->info[idx].page_offset; rx 1345 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_unmap_rx_page(pdev, &rx->info[idx], bytes); rx 1346 drivers/net/ethernet/myricom/myri10ge/myri10ge.c put_page(rx->info[idx].page); rx 1347 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->cnt++; rx 1348 drivers/net/ethernet/myricom/myri10ge/myri10ge.c idx = rx->cnt & rx->mask; rx 1356 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_unmap_rx_page(pdev, &rx->info[idx], bytes); rx 1357 drivers/net/ethernet/myricom/myri10ge/myri10ge.c skb_fill_page_desc(skb, i, rx->info[idx].page, rx 1358 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->info[idx].page_offset, rx 1361 drivers/net/ethernet/myricom/myri10ge/myri10ge.c rx->cnt++; rx 1362 drivers/net/ethernet/myricom/myri10ge/myri10ge.c idx = rx->cnt & rx->mask; rx 1903 drivers/net/ethernet/neterion/vxge/vxge-config.c u32 port, u32 *tx, u32 *rx) rx 1927 drivers/net/ethernet/neterion/vxge/vxge-config.c *rx = 1; rx 1938 drivers/net/ethernet/neterion/vxge/vxge-config.c u32 port, u32 tx, u32 rx) rx 1963 drivers/net/ethernet/neterion/vxge/vxge-config.c if (rx) rx 1889 drivers/net/ethernet/neterion/vxge/vxge-config.h u32 *rx); rx 1895 drivers/net/ethernet/neterion/vxge/vxge-config.h u32 rx); rx 150 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c struct dentry *queues, *tx, *rx, *xdp; rx 166 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c rx = debugfs_create_dir("rx", queues); rx 172 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c debugfs_create_file(name, 0400, rx, rx 106 drivers/net/ethernet/pasemi/pasemi_mac.c return mac->rx; rx 432 drivers/net/ethernet/pasemi/pasemi_mac.c mac->rx = ring; rx 544 drivers/net/ethernet/pasemi/pasemi_mac.c struct pasemi_mac_rxring *rx = rx_ring(mac); rx 549 drivers/net/ethernet/pasemi/pasemi_mac.c info = &RX_DESC_INFO(rx, i); rx 562 drivers/net/ethernet/pasemi/pasemi_mac.c RX_BUFF(rx, i) = 0; rx 574 drivers/net/ethernet/pasemi/pasemi_mac.c mac->rx = NULL; rx 581 drivers/net/ethernet/pasemi/pasemi_mac.c struct pasemi_mac_rxring *rx = rx_ring(mac); rx 589 drivers/net/ethernet/pasemi/pasemi_mac.c struct pasemi_mac_buffer *info = &RX_DESC_INFO(rx, fill); rx 590 drivers/net/ethernet/pasemi/pasemi_mac.c u64 *buff = &RX_BUFF(rx, fill); rx 628 drivers/net/ethernet/pasemi/pasemi_mac.c struct pasemi_mac_rxring *rx = rx_ring(mac); rx 634 drivers/net/ethernet/pasemi/pasemi_mac.c pcnt = *rx->chan.status & PAS_STATUS_PCNT_M; rx 638 drivers/net/ethernet/pasemi/pasemi_mac.c if (*rx->chan.status & PAS_STATUS_TIMER) rx 641 drivers/net/ethernet/pasemi/pasemi_mac.c write_iob_reg(PAS_IOB_DMA_RXCH_RESET(mac->rx->chan.chno), reg); rx 693 drivers/net/ethernet/pasemi/pasemi_mac.c static int pasemi_mac_clean_rx(struct pasemi_mac_rxring *rx, rx 696 drivers/net/ethernet/pasemi/pasemi_mac.c const struct pasemi_dmachan *chan = &rx->chan; rx 697 drivers/net/ethernet/pasemi/pasemi_mac.c struct pasemi_mac *mac = rx->mac; rx 710 drivers/net/ethernet/pasemi/pasemi_mac.c spin_lock(&rx->lock); rx 712 drivers/net/ethernet/pasemi/pasemi_mac.c n = rx->next_to_clean; rx 714 drivers/net/ethernet/pasemi/pasemi_mac.c prefetch(&RX_DESC(rx, n)); rx 717 drivers/net/ethernet/pasemi/pasemi_mac.c macrx = RX_DESC(rx, n); rx 718 drivers/net/ethernet/pasemi/pasemi_mac.c prefetch(&RX_DESC(rx, n+4)); rx 731 drivers/net/ethernet/pasemi/pasemi_mac.c eval = (RX_DESC(rx, n+1) & XCT_RXRES_8B_EVAL_M) >> rx 735 drivers/net/ethernet/pasemi/pasemi_mac.c dma = (RX_DESC(rx, n+2) & XCT_PTR_ADDR_M); rx 736 drivers/net/ethernet/pasemi/pasemi_mac.c info = &RX_DESC_INFO(rx, buf_index); rx 776 drivers/net/ethernet/pasemi/pasemi_mac.c RX_DESC(rx, n) = 0; rx 777 drivers/net/ethernet/pasemi/pasemi_mac.c RX_DESC(rx, n+1) = 0; rx 782 drivers/net/ethernet/pasemi/pasemi_mac.c RX_BUFF(rx, buf_index) = 0; rx 799 drivers/net/ethernet/pasemi/pasemi_mac.c write_dma_reg(PAS_DMA_RXCHAN_INCR(mac->rx->chan.chno), count << 1); rx 1101 drivers/net/ethernet/pasemi/pasemi_mac.c write_iob_reg(PAS_IOB_DMA_RXCH_CFG(mac->rx->chan.chno), rx 1108 drivers/net/ethernet/pasemi/pasemi_mac.c PAS_MAC_IPC_CHNL_DCHNO(mac->rx->chan.chno) | rx 1109 drivers/net/ethernet/pasemi/pasemi_mac.c PAS_MAC_IPC_CHNL_BCH(mac->rx->chan.chno)); rx 1180 drivers/net/ethernet/pasemi/pasemi_mac.c ret = request_irq(mac->rx->chan.irq, pasemi_mac_rx_intr, 0, rx 1181 drivers/net/ethernet/pasemi/pasemi_mac.c mac->rx_irq_name, mac->rx); rx 1184 drivers/net/ethernet/pasemi/pasemi_mac.c mac->rx->chan.irq, ret); rx 1320 drivers/net/ethernet/pasemi/pasemi_mac.c free_irq(mac->rx->chan.irq, mac->rx); rx 1587 drivers/net/ethernet/pasemi/pasemi_mac.c disable_irq(mac->rx->chan.irq); rx 1588 drivers/net/ethernet/pasemi/pasemi_mac.c pasemi_mac_rx_intr(mac->rx->chan.irq, mac->rx); rx 1589 drivers/net/ethernet/pasemi/pasemi_mac.c enable_irq(mac->rx->chan.irq); rx 1805 drivers/net/ethernet/pasemi/pasemi_mac.c pasemi_dma_free_chan(&mac->rx->chan); rx 77 drivers/net/ethernet/pasemi/pasemi_mac.h struct pasemi_mac_rxring *rx; rx 96 drivers/net/ethernet/pasemi/pasemi_mac.h #define RX_DESC(rx, num) ((rx)->chan.ring_virt[(num) & (RX_RING_SIZE-1)]) rx 97 drivers/net/ethernet/pasemi/pasemi_mac.h #define RX_DESC_INFO(rx, num) ((rx)->ring_info[(num) & (RX_RING_SIZE-1)]) rx 98 drivers/net/ethernet/pasemi/pasemi_mac.h #define RX_BUFF(rx, num) ((rx)->buffers[(num) & (RX_RING_SIZE-1)]) rx 79 drivers/net/ethernet/pasemi/pasemi_mac_ethtool.c ering->rx_pending = RING_USED(mac->rx)/4; rx 57 drivers/net/ethernet/pensando/ionic/ionic_lif.h struct ionic_rx_stats rx; rx 82 drivers/net/ethernet/pensando/ionic/ionic_lif.h #define q_to_rx_stats(q) (&q_to_qcq(q)->stats->rx) rx 186 drivers/net/ethernet/pensando/ionic/ionic_lif.h #define lif_to_rxstats(lif, i) ((lif)->rxqcqs[i].stats->rx) rx 272 drivers/net/ethernet/pensando/ionic/ionic_lif.h #define DEBUG_STATS_RX_BUFF_CNT(qcq) ((qcq)->stats->rx.buffers_posted++) rx 97 drivers/net/ethernet/pensando/ionic/ionic_stats.c rstats = &rxqcq->stats->rx; rx 5733 drivers/net/ethernet/qlogic/qed/qed_hsi.h u8 rx; rx 93 drivers/net/ethernet/qlogic/qed/qed_l2.c u8 rx = 0, tx = 0; rx 95 drivers/net/ethernet/qlogic/qed/qed_l2.c qed_vf_get_num_rxqs(p_hwfn, &rx); rx 98 drivers/net/ethernet/qlogic/qed/qed_l2.c p_l2_info->queues = max_t(u8, rx, tx); rx 1280 drivers/net/ethernet/qlogic/qed/qed_l2.c p_ramrod->filter_cmd_hdr.rx = p_filter_cmd->is_rx_filter ? 1 : 0; rx 1509 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h struct __qlcnic_esw_statistics rx; rx 1387 drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c QLCNIC_QUERY_RX_COUNTER, &port_stats.rx); rx 1391 drivers/net/ethernet/qlogic/qlcnic/qlcnic_ethtool.c data = qlcnic_fill_stats(data, &port_stats.rx, QLCNIC_ESW_STATS); rx 794 drivers/net/ethernet/qlogic/qlcnic/qlcnic_sysfs.c &port_stats.rx); rx 829 drivers/net/ethernet/qlogic/qlcnic/qlcnic_sysfs.c &esw_stats.rx); rx 468 drivers/net/ethernet/realtek/8139cp.c int rx = 0; rx 472 drivers/net/ethernet/realtek/8139cp.c while (rx < budget) { rx 538 drivers/net/ethernet/realtek/8139cp.c rx++; rx 557 drivers/net/ethernet/realtek/8139cp.c if (rx < budget && napi_complete_done(napi, rx)) { rx 565 drivers/net/ethernet/realtek/8139cp.c return rx; rx 277 drivers/net/ethernet/socionext/sni_ave.c struct ave_desc_info rx; rx 302 drivers/net/ethernet/socionext/sni_ave.c addr = ((id == AVE_DESCID_TX) ? priv->tx.daddr : priv->rx.daddr) rx 320 drivers/net/ethernet/socionext/sni_ave.c addr = ((id == AVE_DESCID_TX) ? priv->tx.daddr : priv->rx.daddr) rx 585 drivers/net/ethernet/socionext/sni_ave.c skb = priv->rx.desc[entry].skbs; rx 609 drivers/net/ethernet/socionext/sni_ave.c ret = ave_dma_map(ndev, &priv->rx.desc[entry], rx 618 drivers/net/ethernet/socionext/sni_ave.c priv->rx.desc[entry].skbs = skb; rx 751 drivers/net/ethernet/socionext/sni_ave.c proc_idx = priv->rx.proc_idx; rx 752 drivers/net/ethernet/socionext/sni_ave.c done_idx = priv->rx.done_idx; rx 753 drivers/net/ethernet/socionext/sni_ave.c ndesc = priv->rx.ndesc; rx 776 drivers/net/ethernet/socionext/sni_ave.c skb = priv->rx.desc[proc_idx].skbs; rx 777 drivers/net/ethernet/socionext/sni_ave.c priv->rx.desc[proc_idx].skbs = NULL; rx 779 drivers/net/ethernet/socionext/sni_ave.c ave_dma_unmap(ndev, &priv->rx.desc[proc_idx], DMA_FROM_DEVICE); rx 796 drivers/net/ethernet/socionext/sni_ave.c priv->rx.proc_idx = proc_idx; rx 811 drivers/net/ethernet/socionext/sni_ave.c priv->rx.done_idx = done_idx; rx 903 drivers/net/ethernet/socionext/sni_ave.c ave_rx_receive(ndev, priv->rx.ndesc); rx 1282 drivers/net/ethernet/socionext/sni_ave.c priv->rx.desc = kcalloc(priv->rx.ndesc, sizeof(*priv->rx.desc), rx 1284 drivers/net/ethernet/socionext/sni_ave.c if (!priv->rx.desc) { rx 1302 drivers/net/ethernet/socionext/sni_ave.c priv->rx.proc_idx = 0; rx 1303 drivers/net/ethernet/socionext/sni_ave.c priv->rx.done_idx = 0; rx 1304 drivers/net/ethernet/socionext/sni_ave.c for (entry = 0; entry < priv->rx.ndesc; entry++) { rx 1309 drivers/net/ethernet/socionext/sni_ave.c (((priv->rx.ndesc * priv->desc_size) << 16) & AVE_RXDC0_SIZE), rx 1380 drivers/net/ethernet/socionext/sni_ave.c for (entry = 0; entry < priv->rx.ndesc; entry++) { rx 1381 drivers/net/ethernet/socionext/sni_ave.c if (!priv->rx.desc[entry].skbs) rx 1384 drivers/net/ethernet/socionext/sni_ave.c ave_dma_unmap(ndev, &priv->rx.desc[entry], DMA_FROM_DEVICE); rx 1385 drivers/net/ethernet/socionext/sni_ave.c dev_kfree_skb_any(priv->rx.desc[entry].skbs); rx 1386 drivers/net/ethernet/socionext/sni_ave.c priv->rx.desc[entry].skbs = NULL; rx 1388 drivers/net/ethernet/socionext/sni_ave.c priv->rx.proc_idx = 0; rx 1389 drivers/net/ethernet/socionext/sni_ave.c priv->rx.done_idx = 0; rx 1392 drivers/net/ethernet/socionext/sni_ave.c kfree(priv->rx.desc); rx 1625 drivers/net/ethernet/socionext/sni_ave.c priv->rx.daddr = AVE_RXDM_64; rx 1630 drivers/net/ethernet/socionext/sni_ave.c priv->rx.daddr = AVE_RXDM_32; rx 1638 drivers/net/ethernet/socionext/sni_ave.c priv->rx.ndesc = AVE_NR_RXDESC; rx 74 drivers/net/ethernet/stmicro/stmmac/dwmac-rk.c #define DELAY_ENABLE(soc, tx, rx) \ rx 76 drivers/net/ethernet/stmicro/stmmac/dwmac-rk.c ((rx) ? soc##_GMAC_RXCLK_DLY_ENABLE : soc##_GMAC_RXCLK_DLY_DISABLE)) rx 403 drivers/net/ethernet/stmicro/stmmac/dwmac4_descs.c static void dwmac4_display_ring(void *head, unsigned int size, bool rx) rx 408 drivers/net/ethernet/stmicro/stmmac/dwmac4_descs.c pr_info("%s descriptor ring:\n", rx ? "RX" : "TX"); rx 18 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c u32 tx, rx; rx 21 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c rx = readl(ioaddr + XGMAC_RX_CONFIG); rx 24 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c rx |= XGMAC_CORE_INIT_RX; rx 45 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c writel(rx, ioaddr + XGMAC_RX_CONFIG); rx 52 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c u32 rx = readl(ioaddr + XGMAC_RX_CONFIG); rx 56 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c rx |= XGMAC_CONFIG_RE; rx 59 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c rx &= ~XGMAC_CONFIG_RE; rx 63 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c writel(rx, ioaddr + XGMAC_RX_CONFIG); rx 420 drivers/net/ethernet/stmicro/stmmac/enh_desc.c static void enh_desc_display_ring(void *head, unsigned int size, bool rx) rx 425 drivers/net/ethernet/stmicro/stmmac/enh_desc.c pr_info("Extended %s descriptor ring:\n", rx ? "RX" : "TX"); rx 80 drivers/net/ethernet/stmicro/stmmac/hwif.h void (*display_ring)(void *head, unsigned int size, bool rx); rx 272 drivers/net/ethernet/stmicro/stmmac/norm_desc.c static void ndesc_display_ring(void *head, unsigned int size, bool rx) rx 277 drivers/net/ethernet/stmicro/stmmac/norm_desc.c pr_info("%s descriptor ring:\n", rx ? "RX" : "TX"); rx 9386 drivers/net/ethernet/sun/niu.c int rx) rx 9394 drivers/net/ethernet/sun/niu.c arr = (rx ? p->rxchan_per_port : p->txchan_per_port); rx 41 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c if (desc_data->rx.hdr.pa.pages) rx 42 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c put_page(desc_data->rx.hdr.pa.pages); rx 44 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c if (desc_data->rx.hdr.pa_unmap.pages) { rx 45 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c dma_unmap_page(pdata->dev, desc_data->rx.hdr.pa_unmap.pages_dma, rx 46 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c desc_data->rx.hdr.pa_unmap.pages_len, rx 48 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c put_page(desc_data->rx.hdr.pa_unmap.pages); rx 51 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c if (desc_data->rx.buf.pa.pages) rx 52 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c put_page(desc_data->rx.buf.pa.pages); rx 54 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c if (desc_data->rx.buf.pa_unmap.pages) { rx 55 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c dma_unmap_page(pdata->dev, desc_data->rx.buf.pa_unmap.pages_dma, rx 56 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c desc_data->rx.buf.pa_unmap.pages_len, rx 58 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c put_page(desc_data->rx.buf.pa_unmap.pages); rx 62 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c memset(&desc_data->rx, 0, sizeof(desc_data->rx)); rx 411 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c xlgmac_set_buffer_data(&desc_data->rx.hdr, &ring->rx_hdr_pa, rx 415 drivers/net/ethernet/synopsys/dwc-xlgmac-desc.c xlgmac_set_buffer_data(&desc_data->rx.buf, &ring->rx_buf_pa, rx 1110 drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c hdr_dma = desc_data->rx.hdr.dma_base + desc_data->rx.hdr.dma_off; rx 1111 drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c buf_dma = desc_data->rx.buf.dma_base + desc_data->rx.buf.dma_off; rx 2704 drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c desc_data->rx.hdr_len = XLGMAC_GET_REG_BITS_LE(dma_desc->desc2, rx 2707 drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c if (desc_data->rx.hdr_len) rx 2739 drivers/net/ethernet/synopsys/dwc-xlgmac-hw.c desc_data->rx.len = XLGMAC_GET_REG_BITS_LE(dma_desc->desc3, rx 996 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c skb = napi_alloc_skb(napi, desc_data->rx.hdr.dma_len); rx 1003 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c dma_sync_single_range_for_cpu(pdata->dev, desc_data->rx.hdr.dma_base, rx 1004 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.hdr.dma_off, rx 1005 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.hdr.dma_len, rx 1008 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c packet = page_address(desc_data->rx.hdr.pa.pages) + rx 1009 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.hdr.pa.pages_offset; rx 1010 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c copy_len = (desc_data->rx.hdr_len) ? desc_data->rx.hdr_len : len; rx 1011 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c copy_len = min(desc_data->rx.hdr.dma_len, copy_len); rx 1019 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_base, rx 1020 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_off, rx 1021 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_len, rx 1025 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages, rx 1026 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages_offset, rx 1027 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c len, desc_data->rx.buf.dma_len); rx 1028 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages = NULL; rx 1188 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c dma_desc_len = desc_data->rx.len - len; rx 1199 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_base, rx 1200 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_off, rx 1201 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_len, rx 1206 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages, rx 1207 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages_offset, rx 1209 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.dma_len); rx 1210 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c desc_data->rx.buf.pa.pages = NULL; rx 298 drivers/net/ethernet/synopsys/dwc-xlgmac.h struct xlgmac_rx_desc_data rx; rx 531 drivers/net/ethernet/ti/cpsw_ethtool.c static int cpsw_update_channels_res(struct cpsw_priv *priv, int ch_num, int rx, rx 540 drivers/net/ethernet/ti/cpsw_ethtool.c if (rx) { rx 551 drivers/net/ethernet/ti/cpsw_ethtool.c vch = rx ? *ch : 7 - *ch; rx 552 drivers/net/ethernet/ti/cpsw_ethtool.c vec[*ch].ch = cpdma_chan_create(cpsw->dma, vch, handler, rx); rx 563 drivers/net/ethernet/ti/cpsw_ethtool.c (rx ? "rx" : "tx")); rx 575 drivers/net/ethernet/ti/cpsw_ethtool.c (rx ? "rx" : "tx")); rx 680 drivers/net/ethernet/ti/davinci_cpdma.c int rx, int desc_num, rx 691 drivers/net/ethernet/ti/davinci_cpdma.c if (rx) { rx 1665 drivers/net/ethernet/ti/netcp_core.c config.u.rx.einfo_present = true; rx 1666 drivers/net/ethernet/ti/netcp_core.c config.u.rx.psinfo_present = true; rx 1667 drivers/net/ethernet/ti/netcp_core.c config.u.rx.err_mode = DMA_DROP; rx 1668 drivers/net/ethernet/ti/netcp_core.c config.u.rx.desc_type = DMA_DESC_HOST; rx 1669 drivers/net/ethernet/ti/netcp_core.c config.u.rx.psinfo_at_sop = false; rx 1670 drivers/net/ethernet/ti/netcp_core.c config.u.rx.sop_offset = NETCP_SOP_OFFSET; rx 1671 drivers/net/ethernet/ti/netcp_core.c config.u.rx.dst_q = netcp->rx_queue_id; rx 1672 drivers/net/ethernet/ti/netcp_core.c config.u.rx.thresh = DMA_THRESH_NONE; rx 1677 drivers/net/ethernet/ti/netcp_core.c config.u.rx.fdq[i] = last_fdq; rx 991 drivers/net/ethernet/toshiba/ps3_gelic_net.c if (card->vlan[i].rx == vid) { rx 1573 drivers/net/ethernet/toshiba/ps3_gelic_net.c int rx; rx 1577 drivers/net/ethernet/toshiba/ps3_gelic_net.c .rx = GELIC_LV1_VLAN_RX_ETHERNET_0 rx 1581 drivers/net/ethernet/toshiba/ps3_gelic_net.c .rx = GELIC_LV1_VLAN_RX_WIRELESS rx 1597 drivers/net/ethernet/toshiba/ps3_gelic_net.c card->vlan[i].rx = 0; rx 1605 drivers/net/ethernet/toshiba/ps3_gelic_net.c vlan_id_ix[i].rx, rx 1611 drivers/net/ethernet/toshiba/ps3_gelic_net.c vlan_id_ix[i].rx, status); rx 1613 drivers/net/ethernet/toshiba/ps3_gelic_net.c card->vlan[i].rx = 0; rx 1616 drivers/net/ethernet/toshiba/ps3_gelic_net.c card->vlan[i].rx = (u16)v1; rx 1619 drivers/net/ethernet/toshiba/ps3_gelic_net.c i, card->vlan[i].tx, card->vlan[i].rx); rx 1631 drivers/net/ethernet/toshiba/ps3_gelic_net.c card->vlan[GELIC_PORT_WIRELESS].rx = 0; rx 258 drivers/net/ethernet/toshiba/ps3_gelic_net.h u16 rx; rx 748 drivers/net/ethernet/tundra/tsi108_eth.c int rx = data->rxtail; rx 751 drivers/net/ethernet/tundra/tsi108_eth.c if (data->rxring[rx].misc & TSI108_RX_OWN) rx 754 drivers/net/ethernet/tundra/tsi108_eth.c skb = data->rxskbs[rx]; rx 759 drivers/net/ethernet/tundra/tsi108_eth.c if (data->rxring[rx].misc & TSI108_RX_BAD) { rx 762 drivers/net/ethernet/tundra/tsi108_eth.c if (data->rxring[rx].misc & TSI108_RX_CRC) rx 764 drivers/net/ethernet/tundra/tsi108_eth.c if (data->rxring[rx].misc & TSI108_RX_OVER) rx 775 drivers/net/ethernet/tundra/tsi108_eth.c dev->name, data->rxring[rx].len); rx 776 drivers/net/ethernet/tundra/tsi108_eth.c for (i = 0; i < data->rxring[rx].len; i++) rx 781 drivers/net/ethernet/tundra/tsi108_eth.c skb_put(skb, data->rxring[rx].len); rx 795 drivers/net/ethernet/tundra/tsi108_eth.c int rx = data->rxhead; rx 799 drivers/net/ethernet/tundra/tsi108_eth.c data->rxskbs[rx] = skb; rx 803 drivers/net/ethernet/tundra/tsi108_eth.c data->rxring[rx].buf0 = dma_map_single(&data->pdev->dev, rx 812 drivers/net/ethernet/tundra/tsi108_eth.c data->rxring[rx].blen = TSI108_RX_SKB_SIZE; rx 813 drivers/net/ethernet/tundra/tsi108_eth.c data->rxring[rx].misc = TSI108_RX_OWN | TSI108_RX_INT; rx 1414 drivers/net/ethernet/tundra/tsi108_eth.c int rx = data->rxtail; rx 1417 drivers/net/ethernet/tundra/tsi108_eth.c skb = data->rxskbs[rx]; rx 543 drivers/net/ethernet/via/via-velocity.c vptr->rx.dirty = vptr->rx.filled = vptr->rx.curr = 0; rx 565 drivers/net/ethernet/via/via-velocity.c vptr->rx.ring[i].rdesc0.len |= OWNED_BY_NIC; rx 568 drivers/net/ethernet/via/via-velocity.c writel(vptr->rx.pool_dma, ®s->RDBaseLo); rx 1392 drivers/net/ethernet/via/via-velocity.c writel(vptr->rx.pool_dma, ®s->RDBaseLo); rx 1437 drivers/net/ethernet/via/via-velocity.c if (vptr->rx.filled < 4) rx 1442 drivers/net/ethernet/via/via-velocity.c unusable = vptr->rx.filled & 0x0003; rx 1443 drivers/net/ethernet/via/via-velocity.c dirty = vptr->rx.dirty - unusable; rx 1444 drivers/net/ethernet/via/via-velocity.c for (avail = vptr->rx.filled & 0xfffc; avail; avail--) { rx 1446 drivers/net/ethernet/via/via-velocity.c vptr->rx.ring[dirty].rdesc0.len |= OWNED_BY_NIC; rx 1449 drivers/net/ethernet/via/via-velocity.c writew(vptr->rx.filled & 0xfffc, ®s->RBRDU); rx 1450 drivers/net/ethernet/via/via-velocity.c vptr->rx.filled = unusable; rx 1483 drivers/net/ethernet/via/via-velocity.c vptr->rx.ring = pool; rx 1484 drivers/net/ethernet/via/via-velocity.c vptr->rx.pool_dma = pool_dma; rx 1501 drivers/net/ethernet/via/via-velocity.c vptr->rx.buf_sz = (mtu <= ETH_DATA_LEN) ? PKT_BUF_SZ : mtu + 32; rx 1516 drivers/net/ethernet/via/via-velocity.c struct rx_desc *rd = &(vptr->rx.ring[idx]); rx 1517 drivers/net/ethernet/via/via-velocity.c struct velocity_rd_info *rd_info = &(vptr->rx.info[idx]); rx 1519 drivers/net/ethernet/via/via-velocity.c rd_info->skb = netdev_alloc_skb(vptr->netdev, vptr->rx.buf_sz + 64); rx 1530 drivers/net/ethernet/via/via-velocity.c vptr->rx.buf_sz, DMA_FROM_DEVICE); rx 1537 drivers/net/ethernet/via/via-velocity.c rd->size = cpu_to_le16(vptr->rx.buf_sz) | RX_INTEN; rx 1546 drivers/net/ethernet/via/via-velocity.c int dirty = vptr->rx.dirty, done = 0; rx 1549 drivers/net/ethernet/via/via-velocity.c struct rx_desc *rd = vptr->rx.ring + dirty; rx 1555 drivers/net/ethernet/via/via-velocity.c if (!vptr->rx.info[dirty].skb) { rx 1561 drivers/net/ethernet/via/via-velocity.c } while (dirty != vptr->rx.curr); rx 1564 drivers/net/ethernet/via/via-velocity.c vptr->rx.dirty = dirty; rx 1565 drivers/net/ethernet/via/via-velocity.c vptr->rx.filled += done; rx 1582 drivers/net/ethernet/via/via-velocity.c if (vptr->rx.info == NULL) rx 1586 drivers/net/ethernet/via/via-velocity.c struct velocity_rd_info *rd_info = &(vptr->rx.info[i]); rx 1587 drivers/net/ethernet/via/via-velocity.c struct rx_desc *rd = vptr->rx.ring + i; rx 1593 drivers/net/ethernet/via/via-velocity.c dma_unmap_single(vptr->dev, rd_info->skb_dma, vptr->rx.buf_sz, rx 1601 drivers/net/ethernet/via/via-velocity.c kfree(vptr->rx.info); rx 1602 drivers/net/ethernet/via/via-velocity.c vptr->rx.info = NULL; rx 1616 drivers/net/ethernet/via/via-velocity.c vptr->rx.info = kcalloc(vptr->options.numrx, rx 1618 drivers/net/ethernet/via/via-velocity.c if (!vptr->rx.info) rx 1675 drivers/net/ethernet/via/via-velocity.c dma_free_coherent(vptr->dev, size, vptr->rx.ring, vptr->rx.pool_dma); rx 2032 drivers/net/ethernet/via/via-velocity.c struct velocity_rd_info *rd_info = &(vptr->rx.info[idx]); rx 2033 drivers/net/ethernet/via/via-velocity.c struct rx_desc *rd = &(vptr->rx.ring[idx]); rx 2050 drivers/net/ethernet/via/via-velocity.c vptr->rx.buf_sz, DMA_FROM_DEVICE); rx 2057 drivers/net/ethernet/via/via-velocity.c dma_unmap_single(vptr->dev, rd_info->skb_dma, vptr->rx.buf_sz, rx 2061 drivers/net/ethernet/via/via-velocity.c vptr->rx.buf_sz, DMA_FROM_DEVICE); rx 2091 drivers/net/ethernet/via/via-velocity.c int rd_curr = vptr->rx.curr; rx 2095 drivers/net/ethernet/via/via-velocity.c struct rx_desc *rd = vptr->rx.ring + rd_curr; rx 2097 drivers/net/ethernet/via/via-velocity.c if (!vptr->rx.info[rd_curr].skb) rx 2128 drivers/net/ethernet/via/via-velocity.c vptr->rx.curr = rd_curr; rx 2285 drivers/net/ethernet/via/via-velocity.c struct rx_info rx; rx 2311 drivers/net/ethernet/via/via-velocity.c rx = vptr->rx; rx 2314 drivers/net/ethernet/via/via-velocity.c vptr->rx = tmp_vptr->rx; rx 2317 drivers/net/ethernet/via/via-velocity.c tmp_vptr->rx = rx; rx 1462 drivers/net/ethernet/via/via-velocity.h } rx; rx 279 drivers/net/fddi/skfp/fplustm.c smc->hw.fp.rx[QUEUE_R1] = queue = &smc->hw.fp.rx_q[QUEUE_R1] ; rx 286 drivers/net/fddi/skfp/fplustm.c smc->hw.fp.rx[QUEUE_R2] = queue = &smc->hw.fp.rx_q[QUEUE_R2] ; rx 186 drivers/net/fddi/skfp/h/fplustm.h struct s_smt_rx_queue *rx[USED_QUEUES] ; rx 398 drivers/net/fddi/skfp/hwmtm.c queue = smc->hw.fp.rx[QUEUE_R1] ; rx 577 drivers/net/fddi/skfp/hwmtm.c phys = repair_rxd_ring(smc,smc->hw.fp.rx[QUEUE_R1]) ; rx 1056 drivers/net/fddi/skfp/hwmtm.c queue = smc->hw.fp.rx[QUEUE_R1] ; rx 1473 drivers/net/fddi/skfp/hwmtm.c queue = smc->hw.fp.rx[QUEUE_R1] ; rx 250 drivers/net/fjes/fjes_hw.c result = fjes_hw_alloc_epbuf(&buf_pair->rx); rx 257 drivers/net/fjes/fjes_hw.c fjes_hw_setup_epbuf(&buf_pair->rx, mac, rx 296 drivers/net/fjes/fjes_hw.c fjes_hw_free_epbuf(&hw->ep_shm_info[epidx].rx); rx 494 drivers/net/fjes/fjes_hw.c buf_pair->rx.size); rx 508 drivers/net/fjes/fjes_hw.c req_buf->share_buffer.buffer[idx++] = buf_pair->rx.size; rx 509 drivers/net/fjes/fjes_hw.c page_count = buf_pair->rx.size / EP_BUFFER_INFO_SIZE; rx 511 drivers/net/fjes/fjes_hw.c addr = ((u8 *)(buf_pair->rx.buffer)) + rx 804 drivers/net/fjes/fjes_hw.c info = hw->ep_shm_info[epidx].rx.info; rx 252 drivers/net/fjes/fjes_hw.h } tx, rx; rx 594 drivers/net/fjes/fjes_main.c !(hw->ep_shm_info[epid].rx.info->v1i.rx_status & rx 700 drivers/net/fjes/fjes_main.c &adapter->hw.ep_shm_info[dest_epid].rx, 0)) { rx 710 drivers/net/fjes/fjes_main.c &adapter->hw.ep_shm_info[dest_epid].rx, rx 722 drivers/net/fjes/fjes_main.c &adapter->hw.ep_shm_info[dest_epid].rx, rx 948 drivers/net/fjes/fjes_main.c if (hw->ep_shm_info[src_epid].rx.info->v1i.rx_status & rx 1070 drivers/net/fjes/fjes_main.c &hw->ep_shm_info[cur_epid].rx)) rx 1088 drivers/net/fjes/fjes_main.c &adapter->hw.ep_shm_info[*cur_epid].rx, psize); rx 1095 drivers/net/fjes/fjes_main.c fjes_hw_epbuf_rx_curpkt_drop(&adapter->hw.ep_shm_info[cur_epid].rx); rx 1408 drivers/net/fjes/fjes_main.c stop_req_done = hw->ep_shm_info[epidx].rx.info->v1i.rx_status & rx 102 drivers/net/fjes/fjes_trace.h __field(u64, rx) rx 106 drivers/net/fjes/fjes_trace.h void *tx, *rx; rx 109 drivers/net/fjes/fjes_trace.h rx = (void *)buf_pair->rx.buffer; rx 113 drivers/net/fjes/fjes_trace.h __entry->rx_size = buf_pair->rx.size; rx 116 drivers/net/fjes/fjes_trace.h __entry->rx = page_to_phys(vmalloc_to_page(rx)) + rx 117 drivers/net/fjes/fjes_trace.h offset_in_page(rx); rx 121 drivers/net/fjes/fjes_trace.h __entry->rx, __entry->rx_size) rx 44 drivers/net/netdevsim/ipsec.c i, (sap->rx ? 'r' : 't'), sap->ipaddr[0], rx 175 drivers/net/netdevsim/ipsec.c sa.rx = true; rx 40 drivers/net/netdevsim/netdevsim.h bool rx; rx 2094 drivers/net/phy/phy_device.c void phy_set_sym_pause(struct phy_device *phydev, bool rx, bool tx, rx 2099 drivers/net/phy/phy_device.c if (rx && tx && autoneg) rx 2118 drivers/net/phy/phy_device.c void phy_set_asym_pause(struct phy_device *phydev, bool rx, bool tx) rx 2129 drivers/net/phy/phy_device.c if (rx) { rx 198 drivers/net/usb/asix.h struct asix_rx_fixup_info *rx); rx 66 drivers/net/usb/asix_common.c static void reset_asix_rx_fixup_info(struct asix_rx_fixup_info *rx) rx 73 drivers/net/usb/asix_common.c if (rx->ax_skb) { rx 75 drivers/net/usb/asix_common.c kfree_skb(rx->ax_skb); rx 76 drivers/net/usb/asix_common.c rx->ax_skb = NULL; rx 82 drivers/net/usb/asix_common.c rx->remaining = 0; rx 83 drivers/net/usb/asix_common.c rx->split_head = false; rx 84 drivers/net/usb/asix_common.c rx->header = 0; rx 88 drivers/net/usb/asix_common.c struct asix_rx_fixup_info *rx) rx 102 drivers/net/usb/asix_common.c if (rx->remaining && (rx->remaining + sizeof(u32) <= skb->len)) { rx 103 drivers/net/usb/asix_common.c offset = ((rx->remaining + 1) & 0xfffe); rx 104 drivers/net/usb/asix_common.c rx->header = get_unaligned_le32(skb->data + offset); rx 107 drivers/net/usb/asix_common.c size = (u16)(rx->header & 0x7ff); rx 108 drivers/net/usb/asix_common.c if (size != ((~rx->header >> 16) & 0x7ff)) { rx 110 drivers/net/usb/asix_common.c rx->remaining); rx 111 drivers/net/usb/asix_common.c reset_asix_rx_fixup_info(rx); rx 118 drivers/net/usb/asix_common.c if (!rx->remaining) { rx 120 drivers/net/usb/asix_common.c rx->header = get_unaligned_le16( rx 122 drivers/net/usb/asix_common.c rx->split_head = true; rx 127 drivers/net/usb/asix_common.c if (rx->split_head == true) { rx 128 drivers/net/usb/asix_common.c rx->header |= (get_unaligned_le16( rx 130 drivers/net/usb/asix_common.c rx->split_head = false; rx 133 drivers/net/usb/asix_common.c rx->header = get_unaligned_le32(skb->data + rx 139 drivers/net/usb/asix_common.c size = (u16)(rx->header & 0x7ff); rx 140 drivers/net/usb/asix_common.c if (size != ((~rx->header >> 16) & 0x7ff)) { rx 142 drivers/net/usb/asix_common.c rx->header, offset); rx 143 drivers/net/usb/asix_common.c reset_asix_rx_fixup_info(rx); rx 149 drivers/net/usb/asix_common.c reset_asix_rx_fixup_info(rx); rx 158 drivers/net/usb/asix_common.c rx->ax_skb = netdev_alloc_skb_ip_align(dev->net, size); rx 160 drivers/net/usb/asix_common.c rx->remaining = size; rx 163 drivers/net/usb/asix_common.c if (rx->remaining > skb->len - offset) { rx 165 drivers/net/usb/asix_common.c rx->remaining -= copy_length; rx 167 drivers/net/usb/asix_common.c copy_length = rx->remaining; rx 168 drivers/net/usb/asix_common.c rx->remaining = 0; rx 171 drivers/net/usb/asix_common.c if (rx->ax_skb) { rx 172 drivers/net/usb/asix_common.c skb_put_data(rx->ax_skb, skb->data + offset, rx 174 drivers/net/usb/asix_common.c if (!rx->remaining) { rx 175 drivers/net/usb/asix_common.c usbnet_skb_return(dev, rx->ax_skb); rx 176 drivers/net/usb/asix_common.c rx->ax_skb = NULL; rx 186 drivers/net/usb/asix_common.c reset_asix_rx_fixup_info(rx); rx 196 drivers/net/usb/asix_common.c struct asix_rx_fixup_info *rx = &dp->rx_fixup_info; rx 198 drivers/net/usb/asix_common.c return asix_rx_fixup_internal(dev, skb, rx); rx 203 drivers/net/usb/asix_common.c struct asix_rx_fixup_info *rx; rx 208 drivers/net/usb/asix_common.c rx = &dp->rx_fixup_info; rx 210 drivers/net/usb/asix_common.c if (rx->ax_skb) { rx 211 drivers/net/usb/asix_common.c kfree_skb(rx->ax_skb); rx 212 drivers/net/usb/asix_common.c rx->ax_skb = NULL; rx 359 drivers/net/usb/ax88172a.c struct asix_rx_fixup_info *rx = &dp->rx_fixup_info; rx 361 drivers/net/usb/ax88172a.c return asix_rx_fixup_internal(dev, skb, rx); rx 631 drivers/net/usb/catc.c u8 rx = RxEnable | RxPolarity | RxMultiCast; rx 641 drivers/net/usb/catc.c rx |= (!catc->is_f5u011) ? RxPromisc : AltRxPromisc; rx 657 drivers/net/usb/catc.c catc_set_reg_async(catc, RxUnit, rx); rx 661 drivers/net/usb/catc.c if (catc->rxmode[0] != rx) { rx 662 drivers/net/usb/catc.c catc->rxmode[0] = rx; rx 1609 drivers/net/usb/hso.c icount->rx = cnow.rx; rx 337 drivers/net/veth.c struct veth_rq_stats rx; rx 344 drivers/net/veth.c veth_stats_rx(&rx, dev); rx 345 drivers/net/veth.c tot->rx_dropped = rx.xdp_drops; rx 346 drivers/net/veth.c tot->rx_bytes = rx.xdp_bytes; rx 347 drivers/net/veth.c tot->rx_packets = rx.xdp_packets; rx 356 drivers/net/veth.c veth_stats_rx(&rx, peer); rx 357 drivers/net/veth.c tot->tx_bytes += rx.xdp_bytes; rx 358 drivers/net/veth.c tot->tx_packets += rx.xdp_packets; rx 328 drivers/net/wan/ixp4xx_hss.c int tx, txdone, rx, rxfree; rx 647 drivers/net/wan/ixp4xx_hss.c qmgr_disable_irq(queue_ids[port->id].rx); rx 655 drivers/net/wan/ixp4xx_hss.c unsigned int rxq = queue_ids[port->id].rx; rx 922 drivers/net/wan/ixp4xx_hss.c err = qmgr_request_queue(queue_ids[port->id].rx, RX_DESCS, 0, 0, rx 948 drivers/net/wan/ixp4xx_hss.c qmgr_release_queue(queue_ids[port->id].rx); rx 959 drivers/net/wan/ixp4xx_hss.c qmgr_release_queue(queue_ids[port->id].rx); rx 1080 drivers/net/wan/ixp4xx_hss.c qmgr_set_irq(queue_ids[port->id].rx, QUEUE_IRQ_SRC_NOT_EMPTY, rx 1116 drivers/net/wan/ixp4xx_hss.c qmgr_disable_irq(queue_ids[port->id].rx); rx 1124 drivers/net/wan/ixp4xx_hss.c while (queue_get_desc(queue_ids[port->id].rx, port, 0) >= 0) rx 484 drivers/net/wan/z85230.c .rx = z8530_rx, rx 604 drivers/net/wan/z85230.c .rx = z8530_dma_rx, rx 610 drivers/net/wan/z85230.c .rx = z8530_rx, rx 678 drivers/net/wan/z85230.c .rx = z8530_rx_clear, rx 737 drivers/net/wan/z85230.c irqs->rx(&dev->chanA); rx 749 drivers/net/wan/z85230.c irqs->rx(&dev->chanB); rx 255 drivers/net/wan/z85230.h void (*rx)(struct z8530_channel *); rx 203 drivers/net/wimax/i2400m/debugfs.c d_level_register_debugfs("dl_", rx, dentry); rx 980 drivers/net/wimax/i2400m/driver.c D_SUBMODULE_DEFINE(rx), rx 157 drivers/net/wimax/i2400m/rx.c #define D_SUBMODULE rx rx 91 drivers/net/wimax/i2400m/usb-rx.c #define D_SUBMODULE rx rx 364 drivers/net/wimax/i2400m/usb.c D_SUBMODULE_DEFINE(rx), rx 380 drivers/net/wimax/i2400m/usb.c d_level_register_debugfs("dl_", rx, dentry); rx 2110 drivers/net/wireless/ath/ath10k/htt.h struct htt_rx_fragment_indication *rx, rx 2152 drivers/net/wireless/ath/ath10k/htt.h struct htt_rx_fragment_indication *rx, rx 2158 drivers/net/wireless/ath/ath10k/htt.h return htt->rx_ops->htt_rx_proc_rx_frag_ind(htt, rx, skb); rx 2088 drivers/net/wireless/ath/ath10k/htt_rx.c struct htt_rx_indication_hl *rx) rx 2104 drivers/net/wireless/ath/ath10k/htt_rx.c if (!(rx->fw_desc.flags & FW_RX_DESC_FLAGS_FIRST_MSDU)) rx 2107 drivers/net/wireless/ath/ath10k/htt_rx.c num_mpdu_ranges = MS(__le32_to_cpu(rx->hdr.info1), rx 2110 drivers/net/wireless/ath/ath10k/htt_rx.c rx_desc = (struct htt_hl_rx_desc *)&rx->mpdu_ranges[num_mpdu_ranges]; rx 2116 drivers/net/wireless/ath/ath10k/htt_rx.c tid = MS(rx->hdr.info0, HTT_RX_INDICATION_INFO0_EXT_TID); rx 2145 drivers/net/wireless/ath/ath10k/htt_rx.c struct htt_rx_indication_hl *rx, rx 2168 drivers/net/wireless/ath/ath10k/htt_rx.c peer_id = __le16_to_cpu(rx->hdr.peer_id); rx 2169 drivers/net/wireless/ath/ath10k/htt_rx.c tid = MS(rx->hdr.info0, HTT_RX_INDICATION_INFO0_EXT_TID); rx 2180 drivers/net/wireless/ath/ath10k/htt_rx.c num_mpdu_ranges = MS(__le32_to_cpu(rx->hdr.info1), rx 2182 drivers/net/wireless/ath/ath10k/htt_rx.c mpdu_ranges = htt_rx_ind_get_mpdu_ranges_hl(rx); rx 2183 drivers/net/wireless/ath/ath10k/htt_rx.c fw_desc = &rx->fw_desc; rx 2204 drivers/net/wireless/ath/ath10k/htt_rx.c rx_desc = (struct htt_hl_rx_desc *)&rx->mpdu_ranges[num_mpdu_ranges]; rx 2213 drivers/net/wireless/ath/ath10k/htt_rx.c first_msdu = rx->fw_desc.flags & FW_RX_DESC_FLAGS_FIRST_MSDU; rx 2219 drivers/net/wireless/ath/ath10k/htt_rx.c pn_invalid = ath10k_htt_rx_pn_check_replay_hl(ar, peer, rx); rx 2229 drivers/net/wireless/ath/ath10k/htt_rx.c tot_hdr_len = sizeof(struct htt_resp_hdr) + sizeof(rx->hdr) + rx 2230 drivers/net/wireless/ath/ath10k/htt_rx.c sizeof(rx->ppdu) + sizeof(rx->prefix) + rx 2231 drivers/net/wireless/ath/ath10k/htt_rx.c sizeof(rx->fw_desc) + rx 2240 drivers/net/wireless/ath/ath10k/htt_rx.c if (rx->ppdu.combined_rssi == 0) { rx 2246 drivers/net/wireless/ath/ath10k/htt_rx.c rx->ppdu.combined_rssi; rx 2264 drivers/net/wireless/ath/ath10k/htt_rx.c if (rx->fw_desc.flags & FW_RX_DESC_FLAGS_LAST_MSDU) rx 2439 drivers/net/wireless/ath/ath10k/htt_rx.c struct htt_rx_fragment_indication *rx, rx 2462 drivers/net/wireless/ath/ath10k/htt_rx.c peer_id = __le16_to_cpu(rx->peer_id); rx 2592 drivers/net/wireless/ath/ath10k/htt_rx.c struct htt_rx_indication *rx) rx 2601 drivers/net/wireless/ath/ath10k/htt_rx.c num_mpdu_ranges = MS(__le32_to_cpu(rx->hdr.info1), rx 2603 drivers/net/wireless/ath/ath10k/htt_rx.c peer_id = __le16_to_cpu(rx->hdr.peer_id); rx 2604 drivers/net/wireless/ath/ath10k/htt_rx.c tid = MS(rx->hdr.info0, HTT_RX_INDICATION_INFO0_EXT_TID); rx 2606 drivers/net/wireless/ath/ath10k/htt_rx.c mpdu_ranges = htt_rx_ind_get_mpdu_ranges(rx); rx 2609 drivers/net/wireless/ath/ath10k/htt_rx.c rx, struct_size(rx, mpdu_ranges, num_mpdu_ranges)); rx 2881 drivers/net/wireless/ath/ath10k/htt_rx.c struct htt_rx_offload_msdu *rx; rx 2890 drivers/net/wireless/ath/ath10k/htt_rx.c rx = (void *)msdu->data; rx 2892 drivers/net/wireless/ath/ath10k/htt_rx.c skb_put(msdu, sizeof(*rx)); rx 2893 drivers/net/wireless/ath/ath10k/htt_rx.c skb_pull(msdu, sizeof(*rx)); rx 2895 drivers/net/wireless/ath/ath10k/htt_rx.c if (skb_tailroom(msdu) < __le16_to_cpu(rx->msdu_len)) { rx 2901 drivers/net/wireless/ath/ath10k/htt_rx.c skb_put(msdu, __le16_to_cpu(rx->msdu_len)); rx 2921 drivers/net/wireless/ath/ath10k/htt_rx.c ath10k_htt_rx_h_channel(ar, status, NULL, rx->vdev_id); rx 15 drivers/net/wireless/ath/ath10k/wmi-ops.h void (*rx)(struct ath10k *ar, struct sk_buff *skb); rx 227 drivers/net/wireless/ath/ath10k/wmi-ops.h if (WARN_ON_ONCE(!ar->wmi.ops->rx)) rx 230 drivers/net/wireless/ath/ath10k/wmi-ops.h ar->wmi.ops->rx(ar, skb); rx 1391 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 4287 drivers/net/wireless/ath/ath10k/wmi-tlv.c .rx = ath10k_wmi_tlv_op_rx, rx 2953 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 3007 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 3068 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 3145 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 3237 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); rx 5670 drivers/net/wireless/ath/ath10k/wmi.c u64 busy, total, tx, rx, rx_bss; rx 5684 drivers/net/wireless/ath/ath10k/wmi.c rx = __le64_to_cpu(ev->cycle_rx); rx 5689 drivers/net/wireless/ath/ath10k/wmi.c freq, noise_floor, busy, total, tx, rx, rx_bss); rx 8971 drivers/net/wireless/ath/ath10k/wmi.c .rx = ath10k_wmi_op_rx, rx 9040 drivers/net/wireless/ath/ath10k/wmi.c .rx = ath10k_wmi_10_1_op_rx, rx 9110 drivers/net/wireless/ath/ath10k/wmi.c .rx = ath10k_wmi_10_2_op_rx, rx 9178 drivers/net/wireless/ath/ath10k/wmi.c .rx = ath10k_wmi_10_2_op_rx, rx 9252 drivers/net/wireless/ath/ath10k/wmi.c .rx = ath10k_wmi_10_4_op_rx, rx 4640 drivers/net/wireless/ath/ath10k/wmi.h struct wmi_pdev_stats_rx rx; rx 4656 drivers/net/wireless/ath/ath10k/wmi.h struct wmi_pdev_stats_rx rx; rx 4670 drivers/net/wireless/ath/ath10k/wmi.h struct wmi_pdev_stats_rx rx; rx 4680 drivers/net/wireless/ath/ath10k/wmi.h struct wmi_pdev_stats_rx rx; rx 740 drivers/net/wireless/ath/ath5k/mac80211-ops.c u32 *tx, u32 *tx_max, u32 *rx, u32 *rx_max) rx 747 drivers/net/wireless/ath/ath5k/mac80211-ops.c *rx = *rx_max = ATH_RXBUF; rx 751 drivers/net/wireless/ath/ath5k/mac80211-ops.c static int ath5k_set_ringparam(struct ieee80211_hw *hw, u32 tx, u32 rx) rx 757 drivers/net/wireless/ath/ath5k/mac80211-ops.c if (rx != ATH_RXBUF) rx 3410 drivers/net/wireless/ath/ath6kl/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 3416 drivers/net/wireless/ath/ath6kl/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 3422 drivers/net/wireless/ath/ath6kl/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 3428 drivers/net/wireless/ath/ath6kl/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 318 drivers/net/wireless/ath/ath6kl/htc.h struct htc_rx_packet_info rx; rx 340 drivers/net/wireless/ath/ath6kl/htc.h void (*rx) (struct htc_target *, struct htc_packet *); rx 1282 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (packet->info.rx.rx_flags & HTC_RX_PKT_NO_RECYCLE) { rx 1285 drivers/net/wireless/ath/ath6kl/htc_mbox.c ep->ep_cb.rx(ep->target, packet); rx 1318 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet, packet->info.rx.exp_hdr, rx 1344 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.indicat_flags |= rx 1435 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags = 0; rx 1436 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.indicat_flags = 0; rx 1445 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags |= HTC_RX_PKT_NO_RECYCLE; rx 1456 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags |= HTC_RX_PKT_REFRESH_HDR; rx 1457 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.exp_hdr = 0xFFFFFFFF; rx 1460 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.exp_hdr = *lk_ahds; rx 1823 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (packet->info.rx.rx_flags & HTC_RX_PKT_REFRESH_HDR) { rx 1829 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.exp_hdr = lk_ahd; rx 1853 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (lk_ahd != packet->info.rx.exp_hdr) { rx 1855 drivers/net/wireless/ath/ath6kl/htc_mbox.c __func__, packet, packet->info.rx.rx_flags); rx 1857 drivers/net/wireless/ath/ath6kl/htc_mbox.c "", &packet->info.rx.exp_hdr, 4); rx 1873 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (packet->info.rx.rx_flags & HTC_RX_PKT_IGNORE_LOOKAHEAD) { rx 1907 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint->ep_cb.rx(endpoint->target, packet); rx 1972 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags |= rx 1979 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags |= HTC_RX_PKT_PART_OF_BUNDLE; rx 2039 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.indicat_flags |= rx 2044 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (packet->info.rx.rx_flags & HTC_RX_PKT_PART_OF_BUNDLE) rx 2101 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags |= rx 2283 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.rx_flags = 0; rx 2284 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->info.rx.exp_hdr = look_ahead; rx 2713 drivers/net/wireless/ath/ath6kl/htc_mbox.c connect.ep_cb.rx = htc_ctrl_rx; rx 924 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->ep_cb.rx(ep->target, packet); rx 1598 drivers/net/wireless/ath/ath6kl/htc_pipe.c connect.ep_cb.rx = htc_rxctrl_complete; rx 332 drivers/net/wireless/ath/ath6kl/init.c connect.ep_cb.rx = ath6kl_rx; rx 741 drivers/net/wireless/ath/ath6kl/main.c stats->rx_pkt += le32_to_cpu(tgt_stats->stats.rx.pkt); rx 742 drivers/net/wireless/ath/ath6kl/main.c stats->rx_byte += le32_to_cpu(tgt_stats->stats.rx.byte); rx 743 drivers/net/wireless/ath/ath6kl/main.c stats->rx_ucast_pkt += le32_to_cpu(tgt_stats->stats.rx.ucast_pkt); rx 744 drivers/net/wireless/ath/ath6kl/main.c stats->rx_ucast_byte += le32_to_cpu(tgt_stats->stats.rx.ucast_byte); rx 745 drivers/net/wireless/ath/ath6kl/main.c stats->rx_mcast_pkt += le32_to_cpu(tgt_stats->stats.rx.mcast_pkt); rx 746 drivers/net/wireless/ath/ath6kl/main.c stats->rx_mcast_byte += le32_to_cpu(tgt_stats->stats.rx.mcast_byte); rx 747 drivers/net/wireless/ath/ath6kl/main.c stats->rx_bcast_pkt += le32_to_cpu(tgt_stats->stats.rx.bcast_pkt); rx 748 drivers/net/wireless/ath/ath6kl/main.c stats->rx_bcast_byte += le32_to_cpu(tgt_stats->stats.rx.bcast_byte); rx 749 drivers/net/wireless/ath/ath6kl/main.c stats->rx_frgment_pkt += le32_to_cpu(tgt_stats->stats.rx.frgment_pkt); rx 750 drivers/net/wireless/ath/ath6kl/main.c stats->rx_err += le32_to_cpu(tgt_stats->stats.rx.err); rx 751 drivers/net/wireless/ath/ath6kl/main.c stats->rx_crc_err += le32_to_cpu(tgt_stats->stats.rx.crc_err); rx 753 drivers/net/wireless/ath/ath6kl/main.c le32_to_cpu(tgt_stats->stats.rx.key_cache_miss); rx 754 drivers/net/wireless/ath/ath6kl/main.c stats->rx_decrypt_err += le32_to_cpu(tgt_stats->stats.rx.decrypt_err); rx 755 drivers/net/wireless/ath/ath6kl/main.c stats->rx_dupl_frame += le32_to_cpu(tgt_stats->stats.rx.dupl_frame); rx 757 drivers/net/wireless/ath/ath6kl/main.c rate = a_sle32_to_cpu(tgt_stats->stats.rx.ucast_rate); rx 1833 drivers/net/wireless/ath/ath6kl/wmi.h struct rx_stats rx; rx 414 drivers/net/wireless/ath/ath9k/ar9002_mac.c memset(&ads->u.rx, 0, sizeof(ads->u.rx)); rx 676 drivers/net/wireless/ath/ath9k/ar9003_phy.c void ar9003_hw_set_chain_masks(struct ath_hw *ah, u8 rx, u8 tx) rx 682 drivers/net/wireless/ath/ath9k/ar9003_phy.c REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx); rx 683 drivers/net/wireless/ath/ath9k/ar9003_phy.c REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx); rx 1025 drivers/net/wireless/ath/ath9k/ath9k.h struct ath_rx rx; rx 181 drivers/net/wireless/ath/ath9k/gpio.c (sc->rx.num_pkts < ATH_BTCOEX_STOMP_FTP_THRESH); rx 185 drivers/net/wireless/ath/ath9k/gpio.c sc->rx.num_pkts = 0; rx 174 drivers/net/wireless/ath/ath9k/htc.h __be32 rx; rx 498 drivers/net/wireless/ath/ath9k/htc.h struct ath9k_htc_rx rx; rx 42 drivers/net/wireless/ath/ath9k/htc_drv_debug.c be32_to_cpu(cmd_rsp.rx)); rx 134 drivers/net/wireless/ath/ath9k/htc_drv_init.c req.ep_callbacks.rx = ath9k_htc_rxep; rx 1074 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_lock_irqsave(&priv->rx.rxbuflock, flags); rx 1075 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c list_for_each_entry(tmp_buf, &priv->rx.rxbuf, list) { rx 1083 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_unlock_irqrestore(&priv->rx.rxbuflock, flags); rx 1103 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_unlock_irqrestore(&priv->rx.rxbuflock, flags); rx 1107 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_lock_irqsave(&priv->rx.rxbuflock, flags); rx 1111 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c list_move_tail(&rxbuf->list, &priv->rx.rxbuf); rx 1113 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_unlock_irqrestore(&priv->rx.rxbuflock, flags); rx 1127 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_lock_irqsave(&priv->rx.rxbuflock, flags); rx 1128 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c list_for_each_entry(tmp_buf, &priv->rx.rxbuf, list) { rx 1134 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_unlock_irqrestore(&priv->rx.rxbuflock, flags); rx 1141 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_lock_irqsave(&priv->rx.rxbuflock, flags); rx 1144 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_unlock_irqrestore(&priv->rx.rxbuflock, flags); rx 1158 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c list_for_each_entry_safe(rxbuf, tbuf, &priv->rx.rxbuf, list) { rx 1170 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c INIT_LIST_HEAD(&priv->rx.rxbuf); rx 1171 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c spin_lock_init(&priv->rx.rxbuflock); rx 1179 drivers/net/wireless/ath/ath9k/htc_drv_txrx.c list_add_tail(&rxbuf->list, &priv->rx.rxbuf); rx 458 drivers/net/wireless/ath/ath9k/htc_hst.c if (endpoint->ep_callbacks.rx) rx 459 drivers/net/wireless/ath/ath9k/htc_hst.c endpoint->ep_callbacks.rx(endpoint->ep_callbacks.priv, rx 95 drivers/net/wireless/ath/ath9k/htc_hst.h void (*rx) (void *, struct sk_buff *, enum htc_endpoint_id); rx 1127 drivers/net/wireless/ath/ath9k/hw.h void ar9003_hw_set_chain_masks(struct ath_hw *ah, u8 rx, u8 tx); rx 534 drivers/net/wireless/ath/ath9k/mac.c ads.u.rx = adsp->u.rx; rx 310 drivers/net/wireless/ath/ath9k/mac.h } rx; rx 339 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus0 u.rx.status0 rx 340 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus1 u.rx.status1 rx 341 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus2 u.rx.status2 rx 342 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus3 u.rx.status3 rx 343 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus4 u.rx.status4 rx 344 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus5 u.rx.status5 rx 345 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus6 u.rx.status6 rx 346 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus7 u.rx.status7 rx 347 drivers/net/wireless/ath/ath9k/mac.h #define ds_rxstatus8 u.rx.status8 rx 875 drivers/net/wireless/ath/ath9k/main.c if (sc->rx.frag) { rx 876 drivers/net/wireless/ath/ath9k/main.c dev_kfree_skb_any(sc->rx.frag); rx 877 drivers/net/wireless/ath/ath9k/main.c sc->rx.frag = NULL; rx 63 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.rxlink) rx 64 drivers/net/wireless/ath/ath9k/recv.c *sc->rx.rxlink = bf->bf_daddr; rx 68 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = &ds->ds_link; rx 74 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.buf_hold) rx 75 drivers/net/wireless/ath/ath9k/recv.c ath_rx_buf_link(sc, sc->rx.buf_hold, flush); rx 77 drivers/net/wireless/ath/ath9k/recv.c sc->rx.buf_hold = bf; rx 84 drivers/net/wireless/ath/ath9k/recv.c sc->rx.defant = antenna; rx 85 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxotherant = 0; rx 118 drivers/net/wireless/ath/ath9k/recv.c rx_edma = &sc->rx.rx_edma[qtype]; rx 122 drivers/net/wireless/ath/ath9k/recv.c bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list); rx 144 drivers/net/wireless/ath/ath9k/recv.c if (list_empty(&sc->rx.rxbuf)) { rx 149 drivers/net/wireless/ath/ath9k/recv.c list_for_each_entry_safe(bf, tbf, &sc->rx.rxbuf, list) rx 162 drivers/net/wireless/ath/ath9k/recv.c rx_edma = &sc->rx.rx_edma[qtype]; rx 167 drivers/net/wireless/ath/ath9k/recv.c list_add_tail(&bf->list, &sc->rx.rxbuf); rx 180 drivers/net/wireless/ath/ath9k/recv.c list_for_each_entry(bf, &sc->rx.rxbuf, list) { rx 210 drivers/net/wireless/ath/ath9k/recv.c ath_rx_edma_init_queue(&sc->rx.rx_edma[ATH9K_RX_QUEUE_LP], rx 212 drivers/net/wireless/ath/ath9k/recv.c ath_rx_edma_init_queue(&sc->rx.rx_edma[ATH9K_RX_QUEUE_HP], rx 220 drivers/net/wireless/ath/ath9k/recv.c INIT_LIST_HEAD(&sc->rx.rxbuf); rx 246 drivers/net/wireless/ath/ath9k/recv.c list_add_tail(&bf->list, &sc->rx.rxbuf); rx 291 drivers/net/wireless/ath/ath9k/recv.c error = ath_descdma_setup(sc, &sc->rx.rxdma, &sc->rx.rxbuf, rx 300 drivers/net/wireless/ath/ath9k/recv.c list_for_each_entry(bf, &sc->rx.rxbuf, list) { rx 323 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = NULL; rx 343 drivers/net/wireless/ath/ath9k/recv.c list_for_each_entry(bf, &sc->rx.rxbuf, list) { rx 450 drivers/net/wireless/ath/ath9k/recv.c if (list_empty(&sc->rx.rxbuf)) rx 453 drivers/net/wireless/ath/ath9k/recv.c sc->rx.buf_hold = NULL; rx 454 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = NULL; rx 455 drivers/net/wireless/ath/ath9k/recv.c list_for_each_entry_safe(bf, tbf, &sc->rx.rxbuf, list) { rx 460 drivers/net/wireless/ath/ath9k/recv.c if (list_empty(&sc->rx.rxbuf)) rx 463 drivers/net/wireless/ath/ath9k/recv.c bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list); rx 493 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = NULL; rx 631 drivers/net/wireless/ath/ath9k/recv.c struct ath_rx_edma *rx_edma = &sc->rx.rx_edma[qtype]; rx 659 drivers/net/wireless/ath/ath9k/recv.c list_add_tail(&bf->list, &sc->rx.rxbuf); rx 668 drivers/net/wireless/ath/ath9k/recv.c list_add_tail(&bf->list, &sc->rx.rxbuf); rx 703 drivers/net/wireless/ath/ath9k/recv.c if (list_empty(&sc->rx.rxbuf)) { rx 704 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = NULL; rx 708 drivers/net/wireless/ath/ath9k/recv.c bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list); rx 709 drivers/net/wireless/ath/ath9k/recv.c if (bf == sc->rx.buf_hold) rx 732 drivers/net/wireless/ath/ath9k/recv.c if (list_is_last(&bf->list, &sc->rx.rxbuf)) { rx 733 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxlink = NULL; rx 817 drivers/net/wireless/ath/ath9k/recv.c bool discard_current = sc->rx.discard_next; rx 827 drivers/net/wireless/ath/ath9k/recv.c sc->rx.discard_next = false; rx 942 drivers/net/wireless/ath/ath9k/recv.c sc->rx.num_pkts++; rx 948 drivers/net/wireless/ath/ath9k/recv.c sc->rx.discard_next = rx_stats->rs_more; rx 976 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.defant != rs->rs_antenna) { rx 977 drivers/net/wireless/ath/ath9k/recv.c if (++sc->rx.rxotherant >= 3) rx 980 drivers/net/wireless/ath/ath9k/recv.c sc->rx.rxotherant = 0; rx 997 drivers/net/wireless/ath/ath9k/recv.c rxs->ampdu_reference = sc->rx.ampdu_ref; rx 1001 drivers/net/wireless/ath/ath9k/recv.c sc->rx.ampdu_ref++; rx 1107 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.frag) rx 1108 drivers/net/wireless/ath/ath9k/recv.c hdr_skb = sc->rx.frag; rx 1163 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.frag) { rx 1165 drivers/net/wireless/ath/ath9k/recv.c dev_kfree_skb_any(sc->rx.frag); rx 1170 drivers/net/wireless/ath/ath9k/recv.c sc->rx.frag = skb; rx 1174 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.frag) { rx 1183 drivers/net/wireless/ath/ath9k/recv.c sc->rx.frag = NULL; rx 1214 drivers/net/wireless/ath/ath9k/recv.c if (sc->rx.frag) { rx 1215 drivers/net/wireless/ath/ath9k/recv.c dev_kfree_skb_any(sc->rx.frag); rx 1216 drivers/net/wireless/ath/ath9k/recv.c sc->rx.frag = NULL; rx 1219 drivers/net/wireless/ath/ath9k/recv.c list_add_tail(&bf->list, &sc->rx.rxbuf); rx 265 drivers/net/wireless/ath/ath9k/wmi.c connect.ep_callbacks.rx = ath9k_wmi_ctrl_rx; rx 144 drivers/net/wireless/ath/hw.c u32 cycles, busy, rx, tx; rx 153 drivers/net/wireless/ath/hw.c rx = REG_READ(ah, AR_RFCNT); rx 168 drivers/net/wireless/ath/hw.c common->cc_ani.rx_frame += rx; rx 173 drivers/net/wireless/ath/hw.c common->cc_survey.rx_frame += rx; rx 288 drivers/net/wireless/ath/wil6210/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 298 drivers/net/wireless/ath/wil6210/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 309 drivers/net/wireless/ath/wil6210/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 315 drivers/net/wireless/ath/wil6210/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 321 drivers/net/wireless/ath/wil6210/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 63 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[idx].rx.enhanced; rx 377 drivers/net/wireless/ath/wil6210/debugfs.c offsetof(struct wil6210_mbox_ctl, rx)); rx 1139 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[txdesc_idx].rx.enhanced; rx 1477 drivers/net/wireless/ath/wil6210/debugfs.c int rx = atomic_xchg(&wil->isr_count_rx, 0); rx 1486 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, "Rx irqs:packets : %8d : %8ld\n", rx, rxf - rxf_old); rx 521 drivers/net/wireless/ath/wil6210/interrupt.c wil_mbox_ring_le2cpus(&wil->mbox_ctl.rx); rx 530 drivers/net/wireless/ath/wil6210/interrupt.c if (wil->mbox_ctl.rx.entry_size < min_size) { rx 532 drivers/net/wireless/ath/wil6210/interrupt.c wil->mbox_ctl.rx.entry_size); rx 244 drivers/net/wireless/ath/wil6210/txrx.c &vring->va[vring->swhead].rx.legacy; rx 273 drivers/net/wireless/ath/wil6210/txrx.c volatile struct vring_rx_desc *_d = &vring->va[i].rx.legacy; rx 368 drivers/net/wireless/ath/wil6210/txrx.c _d = (struct vring_rx_desc *)&ring->va[ring->swhead].rx.legacy; rx 477 drivers/net/wireless/ath/wil6210/txrx.c _d = &vring->va[i].rx.legacy; rx 498 drivers/net/wireless/ath/wil6210/txrx.h union wil_rx_desc rx; rx 178 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[i].rx.enhanced; rx 481 drivers/net/wireless/ath/wil6210/wil6210.h struct wil6210_mbox_ring rx; rx 1895 drivers/net/wireless/ath/wil6210/wmi.c struct wil6210_mbox_ring *r = &wil->mbox_ctl.rx; rx 1919 drivers/net/wireless/ath/wil6210/wmi.c offsetof(struct wil6210_mbox_ctl, rx.head)); rx 2003 drivers/net/wireless/ath/wil6210/wmi.c offsetof(struct wil6210_mbox_ctl, rx.tail), r->tail); rx 3410 drivers/net/wireless/ath/wil6210/wmi.c struct wil6210_mbox_ring *r = &wil->mbox_ctl.rx; rx 3429 drivers/net/wireless/ath/wil6210/wmi.c offsetof(struct wil6210_mbox_ctl, rx.head)); rx 746 drivers/net/wireless/broadcom/b43/phy_lp.c static void lpphy_set_trsw_over(struct b43_wldev *dev, bool tx, bool rx) rx 748 drivers/net/wireless/broadcom/b43/phy_lp.c u16 trsw = (tx << 1) | rx; rx 1840 drivers/net/wireless/broadcom/b43/phy_lp.c bool rx, bool pa, struct lpphy_tx_gains *gains) rx 1876 drivers/net/wireless/broadcom/b43/phy_lp.c rx = false; rx 1880 drivers/net/wireless/broadcom/b43/phy_lp.c lpphy_set_trsw_over(dev, tx, rx); rx 34 drivers/net/wireless/broadcom/b43/radio_2056.c const struct b2056_inittab_entry *rx; rx 3013 drivers/net/wireless/broadcom/b43/radio_2056.c .rx = prefix##_rx, \ rx 10233 drivers/net/wireless/broadcom/b43/radio_2056.c B2056_RX0, pts->rx, pts->rx_length); rx 10235 drivers/net/wireless/broadcom/b43/radio_2056.c B2056_RX1, pts->rx, pts->rx_length); rx 6404 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 6409 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 6414 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 6424 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 6429 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 71 drivers/net/wireless/broadcom/brcm80211/brcmsmac/d11.h struct pio2regs rx; rx 83 drivers/net/wireless/broadcom/brcm80211/brcmsmac/d11.h struct pio4regs rx; rx 1152 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c static void wlc_lcnphy_set_trsw_override(struct brcms_phy *pi, bool tx, bool rx) rx 1157 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c (0x1 << 0), (tx ? (0x1 << 1) : 0) | (rx ? (0x1 << 0) : 0)); rx 968 drivers/net/wireless/cisco/airo.c u32 rx; // Received sequence number rx 1318 drivers/net/wireless/cisco/airo.c cur->rx = 0; /* Rx Sequence numbers */ rx 1569 drivers/net/wireless/cisco/airo.c context->rx = 0; // Reset rx rx 1574 drivers/net/wireless/cisco/airo.c context->rx = 0; // Reset rx rx 1594 drivers/net/wireless/cisco/airo.c if (!(context->rx & index)) { rx 1597 drivers/net/wireless/cisco/airo.c context->rx |= index; rx 1616 drivers/net/wireless/cisco/airo.c context->rx >>= shift; rx 1618 drivers/net/wireless/cisco/airo.c context->rx = 0; rx 63 drivers/net/wireless/intel/iwlegacy/3945-debug.c ofdm = &il->_3945.stats.rx.ofdm; rx 64 drivers/net/wireless/intel/iwlegacy/3945-debug.c cck = &il->_3945.stats.rx.cck; rx 65 drivers/net/wireless/intel/iwlegacy/3945-debug.c general = &il->_3945.stats.rx.general; rx 66 drivers/net/wireless/intel/iwlegacy/3945-debug.c accum_ofdm = &il->_3945.accum_stats.rx.ofdm; rx 67 drivers/net/wireless/intel/iwlegacy/3945-debug.c accum_cck = &il->_3945.accum_stats.rx.cck; rx 68 drivers/net/wireless/intel/iwlegacy/3945-debug.c accum_general = &il->_3945.accum_stats.rx.general; rx 69 drivers/net/wireless/intel/iwlegacy/3945-debug.c delta_ofdm = &il->_3945.delta_stats.rx.ofdm; rx 70 drivers/net/wireless/intel/iwlegacy/3945-debug.c delta_cck = &il->_3945.delta_stats.rx.cck; rx 71 drivers/net/wireless/intel/iwlegacy/3945-debug.c delta_general = &il->_3945.delta_stats.rx.general; rx 72 drivers/net/wireless/intel/iwlegacy/3945-debug.c max_ofdm = &il->_3945.max_delta.rx.ofdm; rx 73 drivers/net/wireless/intel/iwlegacy/3945-debug.c max_cck = &il->_3945.max_delta.rx.cck; rx 74 drivers/net/wireless/intel/iwlegacy/3945-debug.c max_general = &il->_3945.max_delta.rx.general; rx 1487 drivers/net/wireless/intel/iwlegacy/3945-mac.c il->isr_stats.rx++; rx 511 drivers/net/wireless/intel/iwlegacy/4965-calib.c rx_info = &(((struct il_notif_stats *)resp)->rx.general); rx 512 drivers/net/wireless/intel/iwlegacy/4965-calib.c ofdm = &(((struct il_notif_stats *)resp)->rx.ofdm); rx 513 drivers/net/wireless/intel/iwlegacy/4965-calib.c cck = &(((struct il_notif_stats *)resp)->rx.cck); rx 814 drivers/net/wireless/intel/iwlegacy/4965-calib.c rx_info = &(((struct il_notif_stats *)stat_resp)->rx.general); rx 72 drivers/net/wireless/intel/iwlegacy/4965-debug.c ofdm = &il->_4965.stats.rx.ofdm; rx 73 drivers/net/wireless/intel/iwlegacy/4965-debug.c cck = &il->_4965.stats.rx.cck; rx 74 drivers/net/wireless/intel/iwlegacy/4965-debug.c general = &il->_4965.stats.rx.general; rx 75 drivers/net/wireless/intel/iwlegacy/4965-debug.c ht = &il->_4965.stats.rx.ofdm_ht; rx 76 drivers/net/wireless/intel/iwlegacy/4965-debug.c accum_ofdm = &il->_4965.accum_stats.rx.ofdm; rx 77 drivers/net/wireless/intel/iwlegacy/4965-debug.c accum_cck = &il->_4965.accum_stats.rx.cck; rx 78 drivers/net/wireless/intel/iwlegacy/4965-debug.c accum_general = &il->_4965.accum_stats.rx.general; rx 79 drivers/net/wireless/intel/iwlegacy/4965-debug.c accum_ht = &il->_4965.accum_stats.rx.ofdm_ht; rx 80 drivers/net/wireless/intel/iwlegacy/4965-debug.c delta_ofdm = &il->_4965.delta_stats.rx.ofdm; rx 81 drivers/net/wireless/intel/iwlegacy/4965-debug.c delta_cck = &il->_4965.delta_stats.rx.cck; rx 82 drivers/net/wireless/intel/iwlegacy/4965-debug.c delta_general = &il->_4965.delta_stats.rx.general; rx 83 drivers/net/wireless/intel/iwlegacy/4965-debug.c delta_ht = &il->_4965.delta_stats.rx.ofdm_ht; rx 84 drivers/net/wireless/intel/iwlegacy/4965-debug.c max_ofdm = &il->_4965.max_delta.rx.ofdm; rx 85 drivers/net/wireless/intel/iwlegacy/4965-debug.c max_cck = &il->_4965.max_delta.rx.cck; rx 86 drivers/net/wireless/intel/iwlegacy/4965-debug.c max_general = &il->_4965.max_delta.rx.general; rx 87 drivers/net/wireless/intel/iwlegacy/4965-debug.c max_ht = &il->_4965.max_delta.rx.ofdm_ht; rx 1280 drivers/net/wireless/intel/iwlegacy/4965-mac.c rx_info = &(il->_4965.stats.rx.general); rx 4488 drivers/net/wireless/intel/iwlegacy/4965-mac.c il->isr_stats.rx++; rx 2979 drivers/net/wireless/intel/iwlegacy/commands.h struct iwl39_stats_rx rx; rx 2986 drivers/net/wireless/intel/iwlegacy/commands.h struct stats_rx rx; rx 1015 drivers/net/wireless/intel/iwlegacy/common.h u32 rx; rx 692 drivers/net/wireless/intel/iwlegacy/debug.c il->isr_stats.rx); rx 2759 drivers/net/wireless/intel/iwlwifi/dvm/commands.h struct statistics_rx rx; rx 2766 drivers/net/wireless/intel/iwlwifi/dvm/commands.h struct statistics_rx_bt rx; rx 2136 drivers/net/wireless/intel/iwlwifi/dvm/main.c .rx = iwl_rx_dispatch, rx 374 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_non_phy = &stats->rx.general.common; rx 375 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_ofdm = &stats->rx.ofdm; rx 376 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_ofdm_ht = &stats->rx.ofdm_ht; rx 377 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_cck = &stats->rx.cck; rx 383 drivers/net/wireless/intel/iwlwifi/dvm/rx.c priv->statistics.num_bt_kills = stats->rx.general.num_bt_kills; rx 385 drivers/net/wireless/intel/iwlwifi/dvm/rx.c le32_to_cpu(stats->rx.general.num_bt_kills)); rx 392 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_non_phy = &stats->rx.general; rx 393 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_ofdm = &stats->rx.ofdm; rx 394 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_ofdm_ht = &stats->rx.ofdm_ht; rx 395 drivers/net/wireless/intel/iwlwifi/dvm/rx.c rx_cck = &stats->rx.cck; rx 420 drivers/net/wireless/intel/iwlwifi/fw/api/stats.h struct mvm_statistics_rx_v3 rx; rx 427 drivers/net/wireless/intel/iwlwifi/fw/api/stats.h struct mvm_statistics_rx_v3 rx; rx 435 drivers/net/wireless/intel/iwlwifi/fw/api/stats.h struct mvm_statistics_rx rx; rx 150 drivers/net/wireless/intel/iwlwifi/iwl-op-mode.h void (*rx)(struct iwl_op_mode *op_mode, struct napi_struct *napi, rx 191 drivers/net/wireless/intel/iwlwifi/iwl-op-mode.h return op_mode->ops->rx(op_mode, napi, rxb); rx 620 drivers/net/wireless/intel/iwlwifi/mvm/mvm.h } rx; rx 1351 drivers/net/wireless/intel/iwlwifi/mvm/ops.c .rx = iwl_mvm_rx, rx 1374 drivers/net/wireless/intel/iwlwifi/mvm/ops.c .rx = iwl_mvm_rx_mq, rx 282 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mdata->rx.pkts[ac]++; rx 285 drivers/net/wireless/intel/iwlwifi/mvm/rx.c if (mdata->rx.last_ampdu_ref != mvm->ampdu_ref) { rx 286 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mdata->rx.last_ampdu_ref = mvm->ampdu_ref; rx 287 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mdata->rx.airtime += le16_to_cpu(phy_info->frame_time); rx 734 drivers/net/wireless/intel/iwlwifi/mvm/rx.c data.mac_id = stats->rx.general.mac_id; rx 738 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mvm->rx_stats_v3 = stats->rx; rx 755 drivers/net/wireless/intel/iwlwifi/mvm/rx.c data.mac_id = stats->rx.general.mac_id; rx 759 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mvm->rx_stats = stats->rx; rx 836 drivers/net/wireless/intel/iwlwifi/mvm/rx.c mdata->rx.airtime += airtime; rx 1248 drivers/net/wireless/intel/iwlwifi/mvm/utils.c u32 airtime = mdata->rx.airtime + mdata->tx.airtime; rx 1259 drivers/net/wireless/intel/iwlwifi/mvm/utils.c vo_vi_pkts += mdata->rx.pkts[ac] + rx 1270 drivers/net/wireless/intel/iwlwifi/mvm/utils.c memset(&mdata->rx.pkts, 0, sizeof(mdata->rx.pkts)); rx 1281 drivers/net/wireless/intel/iwlwifi/mvm/utils.c memset(&mdata->rx.airtime, 0, sizeof(mdata->rx.airtime)); rx 1391 drivers/net/wireless/intel/iwlwifi/mvm/utils.c memset(&mdata->rx.pkts, 0, sizeof(mdata->rx.pkts)); rx 1393 drivers/net/wireless/intel/iwlwifi/mvm/utils.c memset(&mdata->rx.airtime, 0, sizeof(mdata->rx.airtime)); rx 131 drivers/net/wireless/intel/iwlwifi/pcie/internal.h u32 rx; rx 1939 drivers/net/wireless/intel/iwlwifi/pcie/rx.c isr_stats->rx++; rx 2681 drivers/net/wireless/intel/iwlwifi/pcie/trans.c "Rx command responses:\t\t %u\n", isr_stats->rx); rx 31 drivers/net/wireless/intersil/hostap/hostap.h const struct hfa384x_rx_frame *rx); rx 518 drivers/net/wireless/intersil/hostap/hostap_main.c void hostap_dump_rx_header(const char *name, const struct hfa384x_rx_frame *rx) rx 522 drivers/net/wireless/intersil/hostap/hostap_main.c status = __le16_to_cpu(rx->status); rx 528 drivers/net/wireless/intersil/hostap/hostap_main.c rx->silence, rx->signal, rx->rate, rx->rxflow, jiffies); rx 530 drivers/net/wireless/intersil/hostap/hostap_main.c fc = __le16_to_cpu(rx->frame_control); rx 535 drivers/net/wireless/intersil/hostap/hostap_main.c __le16_to_cpu(rx->duration_id), __le16_to_cpu(rx->seq_ctrl), rx 536 drivers/net/wireless/intersil/hostap/hostap_main.c __le16_to_cpu(rx->data_len), rx 541 drivers/net/wireless/intersil/hostap/hostap_main.c rx->addr1, rx->addr2, rx->addr3, rx->addr4); rx 544 drivers/net/wireless/intersil/hostap/hostap_main.c rx->dst_addr, rx->src_addr, rx 545 drivers/net/wireless/intersil/hostap/hostap_main.c __be16_to_cpu(rx->len)); rx 1670 drivers/net/wireless/marvell/mwifiex/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 1676 drivers/net/wireless/marvell/mwifiex/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 1682 drivers/net/wireless/marvell/mwifiex/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 1688 drivers/net/wireless/marvell/mwifiex/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 1214 drivers/net/wireless/marvell/mwl8k.c int rx; rx 1225 drivers/net/wireless/marvell/mwl8k.c rx = rxq->tail++; rx 1228 drivers/net/wireless/marvell/mwl8k.c rxq->buf[rx].skb = skb; rx 1229 drivers/net/wireless/marvell/mwl8k.c dma_unmap_addr_set(&rxq->buf[rx], dma, addr); rx 1231 drivers/net/wireless/marvell/mwl8k.c rxd = rxq->rxd + (rx * priv->rxd_ops->rxd_size); rx 3537 drivers/net/wireless/marvell/mwl8k.c static int mwl8k_cmd_mimo_config(struct ieee80211_hw *hw, __u8 rx, __u8 tx) rx 3549 drivers/net/wireless/marvell/mwl8k.c cmd->rx_antenna_map = rx; rx 88 drivers/net/wireless/mediatek/mt76/mt76x0/eeprom.c dev->cal.rx.temp_offset = mt76x02_sign_extend(val, 8); rx 90 drivers/net/wireless/mediatek/mt76/mt76x0/eeprom.c dev->cal.rx.temp_offset = -10; rx 95 drivers/net/wireless/mediatek/mt76/mt76x0/eeprom.c struct mt76x02_rx_freq_cal *caldata = &dev->cal.rx; rx 113 drivers/net/wireless/mediatek/mt76/mt76x0/eeprom.c struct mt76x02_rx_freq_cal *caldata = &dev->cal.rx; rx 416 drivers/net/wireless/mediatek/mt76/mt76x0/phy.c gain -= dev->cal.rx.lna_gain * 2; rx 1037 drivers/net/wireless/mediatek/mt76/mt76x0/phy.c val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25; rx 1191 drivers/net/wireless/mediatek/mt76/mt76x0/phy.c min_t(u8, dev->cal.rx.freq_offset, 0xbf)); rx 35 drivers/net/wireless/mediatek/mt76/mt76x02.h struct mt76x02_rx_freq_cal rx; rx 699 drivers/net/wireless/mediatek/mt76/mt76x02_mac.c struct mt76x02_rx_freq_cal *cal = &dev->cal.rx; rx 180 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c s8 *dest = dev->cal.rx.high_gain; rx 195 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c s8 *dest = dev->cal.rx.rssi_offset; rx 268 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c dev->cal.rx.mcu_gain = (lna_2g & 0xff); rx 269 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c dev->cal.rx.mcu_gain |= (lna_5g[0] & 0xff) << 8; rx 270 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c dev->cal.rx.mcu_gain |= (lna_5g[1] & 0xff) << 16; rx 271 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c dev->cal.rx.mcu_gain |= (lna_5g[2] & 0xff) << 24; rx 274 drivers/net/wireless/mediatek/mt76/mt76x2/eeprom.c dev->cal.rx.lna_gain = mt76x02_sign_extend(lna, 8); rx 204 drivers/net/wireless/mediatek/mt76/mt76x2/pci_phy.c mt76x2_mcu_init_gain(dev, channel, dev->cal.rx.mcu_gain, true); rx 35 drivers/net/wireless/mediatek/mt76/mt76x2/phy.c s8 *gain_adj = dev->cal.rx.high_gain; rx 141 drivers/net/wireless/mediatek/mt76/mt76x2/usb_phy.c mt76x2_mcu_init_gain(dev, channel, dev->cal.rx.mcu_gain, true); rx 58 drivers/net/wireless/quantenna/qtnfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 64 drivers/net/wireless/quantenna/qtnfmac/cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 400 drivers/net/wireless/ralink/rt2x00/rt2400pci.c BUG_ON(ant->rx == ANTENNA_SW_DIVERSITY || rx 425 drivers/net/wireless/ralink/rt2x00/rt2400pci.c switch (ant->rx) { rx 797 drivers/net/wireless/ralink/rt2x00/rt2400pci.c rt2x00_set_field32(®, RXCSR1_RXD_SIZE, rt2x00dev->rx->desc_size); rx 798 drivers/net/wireless/ralink/rt2x00/rt2400pci.c rt2x00_set_field32(®, RXCSR1_NUM_RXD, rt2x00dev->rx->limit); rx 801 drivers/net/wireless/ralink/rt2x00/rt2400pci.c entry_priv = rt2x00dev->rx->entries[0].priv_data; rx 827 drivers/net/wireless/ralink/rt2x00/rt2400pci.c (rt2x00dev->rx->data_size / 128)); rx 1494 drivers/net/wireless/ralink/rt2x00/rt2400pci.c rt2x00dev->default_ant.rx = rx 1505 drivers/net/wireless/ralink/rt2x00/rt2400pci.c if (rt2x00dev->default_ant.rx == ANTENNA_SW_DIVERSITY) rx 1506 drivers/net/wireless/ralink/rt2x00/rt2400pci.c rt2x00dev->default_ant.rx = ANTENNA_HW_DIVERSITY; rx 407 drivers/net/wireless/ralink/rt2x00/rt2500pci.c BUG_ON(ant->rx == ANTENNA_SW_DIVERSITY || rx 434 drivers/net/wireless/ralink/rt2x00/rt2500pci.c switch (ant->rx) { rx 882 drivers/net/wireless/ralink/rt2x00/rt2500pci.c rt2x00_set_field32(®, RXCSR1_RXD_SIZE, rt2x00dev->rx->desc_size); rx 883 drivers/net/wireless/ralink/rt2x00/rt2500pci.c rt2x00_set_field32(®, RXCSR1_NUM_RXD, rt2x00dev->rx->limit); rx 886 drivers/net/wireless/ralink/rt2x00/rt2500pci.c entry_priv = rt2x00dev->rx->entries[0].priv_data; rx 912 drivers/net/wireless/ralink/rt2x00/rt2500pci.c rt2x00dev->rx->data_size / 128); rx 1653 drivers/net/wireless/ralink/rt2x00/rt2500pci.c rt2x00dev->default_ant.rx = rx 512 drivers/net/wireless/ralink/rt2x00/rt2500usb.c BUG_ON(ant->rx == ANTENNA_SW_DIVERSITY || rx 545 drivers/net/wireless/ralink/rt2x00/rt2500usb.c switch (ant->rx) { rx 858 drivers/net/wireless/ralink/rt2x00/rt2500usb.c rt2x00dev->rx->data_size); rx 1463 drivers/net/wireless/ralink/rt2x00/rt2500usb.c rt2x00dev->default_ant.rx = rx 1474 drivers/net/wireless/ralink/rt2x00/rt2500usb.c if (rt2x00dev->default_ant.rx == ANTENNA_SW_DIVERSITY) rx 1475 drivers/net/wireless/ralink/rt2x00/rt2500usb.c rt2x00dev->default_ant.rx = ANTENNA_HW_DIVERSITY; rx 2237 drivers/net/wireless/ralink/rt2x00/rt2800lib.c rt2x00dev->default_ant.rx); rx 9480 drivers/net/wireless/ralink/rt2x00/rt2800lib.c rt2x00dev->default_ant.rx = ANTENNA_A; rx 9484 drivers/net/wireless/ralink/rt2x00/rt2800lib.c rt2x00dev->default_ant.rx = ANTENNA_B; rx 9489 drivers/net/wireless/ralink/rt2x00/rt2800lib.c rt2x00dev->default_ant.rx = ANTENNA_A; rx 9496 drivers/net/wireless/ralink/rt2x00/rt2800lib.c rt2x00dev->default_ant.rx = ANTENNA_HW_DIVERSITY; /* Unused */ rx 707 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c entry_priv = rt2x00dev->rx->entries[0].priv_data; rx 711 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c rt2x00dev->rx[0].limit); rx 713 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c rt2x00dev->rx[0].limit - 1); rx 311 drivers/net/wireless/ralink/rt2x00/rt2800usb.c ((rt2x00dev->rx->limit * DATA_FRAME_SIZE) rx 201 drivers/net/wireless/ralink/rt2x00/rt2x00.h enum antenna rx; rx 961 drivers/net/wireless/ralink/rt2x00/rt2x00.h struct data_queue *rx; rx 1480 drivers/net/wireless/ralink/rt2x00/rt2x00.h u32 *tx, u32 *tx_max, u32 *rx, u32 *rx_max); rx 119 drivers/net/wireless/ralink/rt2x00/rt2x00config.c if (config.rx == ANTENNA_SW_DIVERSITY) { rx 122 drivers/net/wireless/ralink/rt2x00/rt2x00config.c if (def->rx == ANTENNA_SW_DIVERSITY) rx 123 drivers/net/wireless/ralink/rt2x00/rt2x00config.c config.rx = ANTENNA_B; rx 125 drivers/net/wireless/ralink/rt2x00/rt2x00config.c config.rx = def->rx; rx 127 drivers/net/wireless/ralink/rt2x00/rt2x00config.c } else if (config.rx == ANTENNA_SW_DIVERSITY) rx 128 drivers/net/wireless/ralink/rt2x00/rt2x00config.c config.rx = active->rx; rx 147 drivers/net/wireless/ralink/rt2x00/rt2x00config.c rt2x00queue_stop_queue(rt2x00dev->rx); rx 161 drivers/net/wireless/ralink/rt2x00/rt2x00config.c rt2x00queue_start_queue(rt2x00dev->rx); rx 857 drivers/net/wireless/ralink/rt2x00/rt2x00dev.c rx_status->antenna = rt2x00dev->link.ant.active.rx; rx 97 drivers/net/wireless/ralink/rt2x00/rt2x00link.c other_antenna = (ant->active.rx == ANTENNA_A) ? ANTENNA_B : ANTENNA_A; rx 100 drivers/net/wireless/ralink/rt2x00/rt2x00link.c new_ant.rx = other_antenna; rx 140 drivers/net/wireless/ralink/rt2x00/rt2x00link.c new_ant.rx = (new_ant.rx == ANTENNA_A) ? ANTENNA_B : ANTENNA_A; rx 306 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c rt2x00queue_stop_queue(rt2x00dev->rx); rx 329 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c rt2x00queue_start_queue(rt2x00dev->rx); rx 740 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c if (rx_ant == 3 && def->rx == ANTENNA_SW_DIVERSITY) { rx 746 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c setup.rx = rx_ant; rx 772 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c *rx_ant = active->rx; rx 779 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c u32 *tx, u32 *tx_max, u32 *rx, u32 *rx_max) rx 789 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c *rx = rt2x00dev->rx->length; rx 790 drivers/net/wireless/ralink/rt2x00/rt2x00mac.c *rx_max = rt2x00dev->rx->limit; rx 51 drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c struct data_queue *queue = rt2x00dev->rx; rx 1035 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00queue_start_queue(rt2x00dev->rx); rx 1054 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00queue_stop_queue(rt2x00dev->rx); rx 1065 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00queue_flush_queue(rt2x00dev->rx, drop); rx 1167 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c status = rt2x00queue_alloc_entries(rt2x00dev->rx); rx 1187 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c status = rt2x00queue_alloc_rxskbs(rt2x00dev->rx); rx 1205 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00queue_free_skbs(rt2x00dev->rx); rx 1255 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00dev->rx = queue; rx 1269 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00queue_init(rt2x00dev, rt2x00dev->rx, QID_RX); rx 1284 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c kfree(rt2x00dev->rx); rx 1285 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c rt2x00dev->rx = NULL; rx 494 drivers/net/wireless/ralink/rt2x00/rt2x00queue.h &(__dev)->rx[(__dev)->data_queues] rx 541 drivers/net/wireless/ralink/rt2x00/rt2x00queue.h queue_loop(__entry, (__dev)->rx, queue_end(__dev)) rx 349 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c while (!rt2x00queue_empty(rt2x00dev->rx)) { rx 350 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c entry = rt2x00queue_get_entry(rt2x00dev->rx, Q_INDEX_DONE); rx 615 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c rt2x00usb_assign_endpoint(rt2x00dev->rx, ep_desc); rx 628 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c if (!rt2x00dev->rx->usb_endpoint || !rt2x00dev->tx->usb_endpoint) { rx 549 drivers/net/wireless/ralink/rt2x00/rt61pci.c switch (ant->rx) { rx 597 drivers/net/wireless/ralink/rt2x00/rt61pci.c switch (ant->rx) { rx 647 drivers/net/wireless/ralink/rt2x00/rt61pci.c switch (ant->rx) { rx 715 drivers/net/wireless/ralink/rt2x00/rt61pci.c BUG_ON(ant->rx == ANTENNA_SW_DIVERSITY || rx 1376 drivers/net/wireless/ralink/rt2x00/rt61pci.c rt2x00_set_field32(®, RX_RING_CSR_RING_SIZE, rt2x00dev->rx->limit); rx 1378 drivers/net/wireless/ralink/rt2x00/rt61pci.c rt2x00dev->rx->desc_size / 4); rx 1382 drivers/net/wireless/ralink/rt2x00/rt61pci.c entry_priv = rt2x00dev->rx->entries[0].priv_data; rx 2441 drivers/net/wireless/ralink/rt2x00/rt61pci.c rt2x00dev->default_ant.rx = rx 2482 drivers/net/wireless/ralink/rt2x00/rt61pci.c rt2x00dev->default_ant.rx = rx 2490 drivers/net/wireless/ralink/rt2x00/rt61pci.c rt2x00dev->default_ant.rx = ANTENNA_SW_DIVERSITY; rx 584 drivers/net/wireless/ralink/rt2x00/rt73usb.c switch (ant->rx) { rx 633 drivers/net/wireless/ralink/rt2x00/rt73usb.c switch (ant->rx) { rx 696 drivers/net/wireless/ralink/rt2x00/rt73usb.c BUG_ON(ant->rx == ANTENNA_SW_DIVERSITY || rx 1881 drivers/net/wireless/ralink/rt2x00/rt73usb.c rt2x00dev->default_ant.rx = rx 423 drivers/net/wireless/st/cw1200/bh.c int rx, tx, term, suspend; rx 457 drivers/net/wireless/st/cw1200/bh.c rx = atomic_xchg(&priv->bh_rx, 0); rx 462 drivers/net/wireless/st/cw1200/bh.c (rx || tx || term || suspend || priv->bh_error); rx 466 drivers/net/wireless/st/cw1200/bh.c rx, tx, term, suspend, priv->bh_error, status); rx 480 drivers/net/wireless/st/cw1200/bh.c if (priv->hw_bufs_used && (!rx || !tx)) { rx 484 drivers/net/wireless/st/cw1200/bh.c rx = 1; rx 544 drivers/net/wireless/st/cw1200/bh.c rx: rx 594 drivers/net/wireless/st/cw1200/bh.c goto rx; rx 272 drivers/net/wireless/st/cw1200/debug.c d->rx); rx 16 drivers/net/wireless/st/cw1200/debug.h int rx; rx 52 drivers/net/wireless/st/cw1200/debug.h ++priv->debug->rx; rx 865 drivers/net/wireless/st/cw1200/wsm.c struct wsm_rx rx; rx 870 drivers/net/wireless/st/cw1200/wsm.c rx.status = WSM_GET32(buf); rx 871 drivers/net/wireless/st/cw1200/wsm.c rx.channel_number = WSM_GET16(buf); rx 872 drivers/net/wireless/st/cw1200/wsm.c rx.rx_rate = WSM_GET8(buf); rx 873 drivers/net/wireless/st/cw1200/wsm.c rx.rcpi_rssi = WSM_GET8(buf); rx 874 drivers/net/wireless/st/cw1200/wsm.c rx.flags = WSM_GET32(buf); rx 881 drivers/net/wireless/st/cw1200/wsm.c if (!rx.rcpi_rssi && rx 890 drivers/net/wireless/st/cw1200/wsm.c rx.rcpi_rssi = rx.rcpi_rssi / 2 - 110; rx 895 drivers/net/wireless/st/cw1200/wsm.c if (!rx.status && ieee80211_is_deauth(fctl)) { rx 905 drivers/net/wireless/st/cw1200/wsm.c cw1200_rx_cb(priv, &rx, link_id, skb_p); rx 1064 drivers/net/wireless/ti/wl1251/acx.h struct acx_rx_statistics rx; rx 103 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, out_of_mem, 20, "%u"); rx 104 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, hdr_overflow, 20, "%u"); rx 105 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, hw_stuck, 20, "%u"); rx 106 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, dropped, 20, "%u"); rx 107 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, fcs_err, 20, "%u"); rx 108 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, xfr_hint_trig, 20, "%u"); rx 109 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, path_reset, 20, "%u"); rx 110 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(rx, reset_counter, 20, "%u"); rx 243 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, out_of_mem); rx 244 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, hdr_overflow); rx 245 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, hw_stuck); rx 246 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, dropped); rx 247 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, fcs_err); rx 248 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, xfr_hint_trig); rx 249 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, path_reset); rx 250 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(rx, reset_counter); rx 342 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, out_of_mem); rx 343 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, hdr_overflow); rx 344 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, hw_stuck); rx 345 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, dropped); rx 346 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, fcs_err); rx 347 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, xfr_hint_trig); rx 348 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, path_reset); rx 349 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(rx, reset_counter); rx 245 drivers/net/wireless/ti/wl12xx/acx.h struct wl12xx_acx_rx_statistics rx; rx 21 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, out_of_mem, "%u"); rx 22 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, hdr_overflow, "%u"); rx 23 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, hw_stuck, "%u"); rx 24 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, dropped, "%u"); rx 25 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, fcs_err, "%u"); rx 26 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, xfr_hint_trig, "%u"); rx 27 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, path_reset, "%u"); rx 28 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(rx, reset_counter, "%u"); rx 121 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, out_of_mem); rx 122 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, hdr_overflow); rx 123 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, hw_stuck); rx 124 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, dropped); rx 125 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, fcs_err); rx 126 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, xfr_hint_trig); rx 127 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, path_reset); rx 128 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, reset_counter); rx 103 drivers/net/wireless/ti/wl12xx/main.c .rx = { rx 271 drivers/net/wireless/ti/wl18xx/acx.h struct wl18xx_acx_rx_stats rx; rx 83 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_beacon_early_term, "%u"); rx 84 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_out_of_mpdu_nodes, "%u"); rx 85 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_hdr_overflow, "%u"); rx 86 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_dropped_frame, "%u"); rx 87 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_done, "%u"); rx 88 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_defrag, "%u"); rx 89 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_defrag_end, "%u"); rx 90 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_cmplt, "%u"); rx 91 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_pre_complt, "%u"); rx 92 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_cmplt_task, "%u"); rx 93 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_phy_hdr, "%u"); rx 94 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_timeout, "%u"); rx 95 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_rts_timeout, "%u"); rx 96 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_timeout_wa, "%u"); rx 97 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_called, "%u"); rx 98 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_init_called, "%u"); rx 99 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_in_process_called, "%u"); rx 100 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_tkip_called, "%u"); rx 101 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_need_defrag, "%u"); rx 102 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_decrypt_failed, "%u"); rx 103 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, decrypt_key_not_found, "%u"); rx 104 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, defrag_need_decrypt, "%u"); rx 105 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_tkip_replays, "%u"); rx 106 drivers/net/wireless/ti/wl18xx/debugfs.c WL18XX_DEBUGFS_FWSTATS_FILE(rx, rx_xfr, "%u"); rx 473 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_beacon_early_term); rx 474 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_out_of_mpdu_nodes); rx 475 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_hdr_overflow); rx 476 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_dropped_frame); rx 477 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_done); rx 478 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_defrag); rx 479 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_defrag_end); rx 480 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_cmplt); rx 481 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_pre_complt); rx 482 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_cmplt_task); rx 483 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_phy_hdr); rx 484 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_timeout); rx 485 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_rts_timeout); rx 486 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_timeout_wa); rx 487 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_called); rx 488 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_init_called); rx 489 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_in_process_called); rx 490 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_tkip_called); rx 491 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_need_defrag); rx 492 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_decrypt_failed); rx 493 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, decrypt_key_not_found); rx 494 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, defrag_need_decrypt); rx 495 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_tkip_replays); rx 496 drivers/net/wireless/ti/wl18xx/debugfs.c DEBUGFS_FWSTATS_ADD(rx, rx_xfr); rx 244 drivers/net/wireless/ti/wl18xx/main.c .rx = { rx 171 drivers/net/wireless/ti/wlcore/acx.c acx->lifetime = cpu_to_le32(wl->conf.rx.rx_msdu_life_time); rx 260 drivers/net/wireless/ti/wlcore/acx.c rx_timeout->ps_poll_timeout = cpu_to_le16(wl->conf.rx.ps_poll_timeout); rx 261 drivers/net/wireless/ti/wlcore/acx.c rx_timeout->upsd_timeout = cpu_to_le16(wl->conf.rx.upsd_timeout); rx 287 drivers/net/wireless/ti/wlcore/acx.c rts_threshold = wl->conf.rx.rts_threshold; rx 550 drivers/net/wireless/ti/wlcore/acx.c detection->rx_cca_threshold = cpu_to_le16(wl->conf.rx.rx_cca_threshold); rx 1041 drivers/net/wireless/ti/wlcore/acx.c rx_conf->threshold = cpu_to_le16(wl->conf.rx.irq_pkt_threshold); rx 1042 drivers/net/wireless/ti/wlcore/acx.c rx_conf->timeout = cpu_to_le16(wl->conf.rx.irq_timeout); rx 1043 drivers/net/wireless/ti/wlcore/acx.c rx_conf->mblk_threshold = cpu_to_le16(wl->conf.rx.irq_blk_threshold); rx 1044 drivers/net/wireless/ti/wlcore/acx.c rx_conf->queue_type = wl->conf.rx.queue_type; rx 1132 drivers/net/wireless/ti/wlcore/conf.h struct conf_rx_settings rx; rx 180 drivers/net/wireless/ti/wlcore/debugfs.c WL12XX_CONF_DEBUGFS(irq_pkt_threshold, rx, 0, 65535, rx 182 drivers/net/wireless/ti/wlcore/debugfs.c WL12XX_CONF_DEBUGFS(irq_blk_threshold, rx, 0, 65535, rx 184 drivers/net/wireless/ti/wlcore/debugfs.c WL12XX_CONF_DEBUGFS(irq_timeout, rx, 0, 100, rx 655 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx; rx 679 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx = &usb->rx; rx 681 drivers/net/wireless/zydas/zd1211rw/zd_usb.c tasklet_schedule(&rx->reset_timer_tasklet); rx 683 drivers/net/wireless/zydas/zd1211rw/zd_usb.c if (length%rx->usb_packet_size > rx->usb_packet_size-4) { rx 686 drivers/net/wireless/zydas/zd1211rw/zd_usb.c ZD_ASSERT(length <= ARRAY_SIZE(rx->fragment)); rx 687 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irqsave(&rx->lock, flags); rx 688 drivers/net/wireless/zydas/zd1211rw/zd_usb.c memcpy(rx->fragment, buffer, length); rx 689 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->fragment_length = length; rx 690 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 694 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irqsave(&rx->lock, flags); rx 695 drivers/net/wireless/zydas/zd1211rw/zd_usb.c if (rx->fragment_length > 0) { rx 697 drivers/net/wireless/zydas/zd1211rw/zd_usb.c ZD_ASSERT(length + rx->fragment_length <= rx 698 drivers/net/wireless/zydas/zd1211rw/zd_usb.c ARRAY_SIZE(rx->fragment)); rx 700 drivers/net/wireless/zydas/zd1211rw/zd_usb.c memcpy(rx->fragment+rx->fragment_length, buffer, length); rx 701 drivers/net/wireless/zydas/zd1211rw/zd_usb.c handle_rx_packet(usb, rx->fragment, rx 702 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->fragment_length + length); rx 703 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->fragment_length = 0; rx 704 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 706 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 752 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 768 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irq(&rx->lock); rx 769 drivers/net/wireless/zydas/zd1211rw/zd_usb.c if (rx->urbs) { rx 770 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irq(&rx->lock); rx 774 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs = urbs; rx 775 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs_count = RX_URBS_COUNT; rx 776 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irq(&rx->lock); rx 789 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irq(&rx->lock); rx 790 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs = NULL; rx 791 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs_count = 0; rx 792 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irq(&rx->lock); rx 804 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 806 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_lock(&rx->setup_mutex); rx 808 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_unlock(&rx->setup_mutex); rx 821 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 823 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irqsave(&rx->lock, flags); rx 824 drivers/net/wireless/zydas/zd1211rw/zd_usb.c urbs = rx->urbs; rx 825 drivers/net/wireless/zydas/zd1211rw/zd_usb.c count = rx->urbs_count; rx 826 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 836 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irqsave(&rx->lock, flags); rx 837 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs = NULL; rx 838 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->urbs_count = 0; rx 839 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 844 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 846 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_lock(&rx->setup_mutex); rx 848 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_unlock(&rx->setup_mutex); rx 850 drivers/net/wireless/zydas/zd1211rw/zd_usb.c tasklet_kill(&rx->reset_timer_tasklet); rx 851 drivers/net/wireless/zydas/zd1211rw/zd_usb.c cancel_delayed_work_sync(&rx->idle_work); rx 857 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 860 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_lock(&rx->setup_mutex); rx 862 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_irqsave(&rx->lock, flags); rx 863 drivers/net/wireless/zydas/zd1211rw/zd_usb.c do_reset = rx->urbs != NULL; rx 864 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_unlock_irqrestore(&rx->lock, flags); rx 871 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_unlock(&rx->setup_mutex); rx 1133 drivers/net/wireless/zydas/zd1211rw/zd_usb.c container_of(work, struct zd_usb, rx.idle_work.work); rx 1154 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 1156 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mod_delayed_work(zd_workqueue, &rx->idle_work, ZD_RX_IDLE_INTERVAL); rx 1172 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb_rx *rx = &usb->rx; rx 1174 drivers/net/wireless/zydas/zd1211rw/zd_usb.c spin_lock_init(&rx->lock); rx 1175 drivers/net/wireless/zydas/zd1211rw/zd_usb.c mutex_init(&rx->setup_mutex); rx 1177 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->usb_packet_size = 512; rx 1179 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->usb_packet_size = 64; rx 1181 drivers/net/wireless/zydas/zd1211rw/zd_usb.c ZD_ASSERT(rx->fragment_length == 0); rx 1182 drivers/net/wireless/zydas/zd1211rw/zd_usb.c INIT_DELAYED_WORK(&rx->idle_work, zd_rx_idle_timer_handler); rx 1183 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->reset_timer_tasklet.func = zd_usb_reset_rx_idle_timer_tasklet; rx 1184 drivers/net/wireless/zydas/zd1211rw/zd_usb.c rx->reset_timer_tasklet.data = (unsigned long)usb; rx 208 drivers/net/wireless/zydas/zd1211rw/zd_usb.h struct zd_usb_rx rx; rx 186 drivers/net/xen-netback/common.h struct xen_netif_rx_back_ring rx; rx 1444 drivers/net/xen-netback/netback.c if (queue->rx.sring) rx 1446 drivers/net/xen-netback/netback.c queue->rx.sring); rx 1473 drivers/net/xen-netback/netback.c BACK_RING_INIT(&queue->rx, rxs, XEN_PAGE_SIZE); rx 53 drivers/net/xen-netback/rx.c prod = queue->rx.sring->req_prod; rx 54 drivers/net/xen-netback/rx.c cons = queue->rx.req_cons; rx 59 drivers/net/xen-netback/rx.c queue->rx.sring->req_event = prod + 1; rx 65 drivers/net/xen-netback/rx.c } while (queue->rx.sring->req_prod != prod); rx 151 drivers/net/xen-netback/rx.c rsp = RING_GET_RESPONSE(&queue->rx, rx 160 drivers/net/xen-netback/rx.c RING_PUSH_RESPONSES_AND_CHECK_NOTIFY(&queue->rx, notify); rx 200 drivers/net/xen-netback/rx.c queue->rx_copy.idx[queue->rx_copy.num] = queue->rx.req_cons; rx 293 drivers/net/xen-netback/rx.c queue->rx.rsp_prod_pvt = queue->rx.req_cons; rx 434 drivers/net/xen-netback/rx.c req = RING_GET_REQUEST(&queue->rx, queue->rx.req_cons); rx 435 drivers/net/xen-netback/rx.c rsp = RING_GET_RESPONSE(&queue->rx, queue->rx.req_cons); rx 443 drivers/net/xen-netback/rx.c queue->rx.req_cons++; rx 474 drivers/net/xen-netback/rx.c prod = queue->rx.sring->req_prod; rx 475 drivers/net/xen-netback/rx.c cons = queue->rx.req_cons; rx 487 drivers/net/xen-netback/rx.c prod = queue->rx.sring->req_prod; rx 488 drivers/net/xen-netback/rx.c cons = queue->rx.req_cons; rx 30 drivers/net/xen-netback/xenbus.c struct xen_netif_rx_back_ring *rx_ring = &queue->rx; rx 139 drivers/net/xen-netfront.c struct xen_netif_rx_front_ring rx; rx 166 drivers/net/xen-netfront.c struct xen_netif_rx_response rx; rx 285 drivers/net/xen-netfront.c RING_IDX req_prod = queue->rx.req_prod_pvt; rx 292 drivers/net/xen-netfront.c for (req_prod = queue->rx.req_prod_pvt; rx 293 drivers/net/xen-netfront.c req_prod - queue->rx.rsp_cons < NET_RX_RING_SIZE; rx 318 drivers/net/xen-netfront.c req = RING_GET_REQUEST(&queue->rx, req_prod); rx 327 drivers/net/xen-netfront.c queue->rx.req_prod_pvt = req_prod; rx 334 drivers/net/xen-netfront.c if (req_prod - queue->rx.rsp_cons < NET_RX_SLOTS_MIN || rx 340 drivers/net/xen-netfront.c RING_PUSH_REQUESTS_AND_CHECK_NOTIFY(&queue->rx, notify); rx 362 drivers/net/xen-netfront.c queue->rx.sring->rsp_event = queue->rx.rsp_cons + 1; rx 363 drivers/net/xen-netfront.c if (RING_HAS_UNCONSUMED_RESPONSES(&queue->rx)) rx 727 drivers/net/xen-netfront.c int new = xennet_rxidx(queue->rx.req_prod_pvt); rx 732 drivers/net/xen-netfront.c RING_GET_REQUEST(&queue->rx, queue->rx.req_prod_pvt)->id = new; rx 733 drivers/net/xen-netfront.c RING_GET_REQUEST(&queue->rx, queue->rx.req_prod_pvt)->gref = ref; rx 734 drivers/net/xen-netfront.c queue->rx.req_prod_pvt++; rx 744 drivers/net/xen-netfront.c RING_IDX cons = queue->rx.rsp_cons; rx 759 drivers/net/xen-netfront.c RING_GET_RESPONSE(&queue->rx, ++cons); rx 777 drivers/net/xen-netfront.c queue->rx.rsp_cons = cons; rx 785 drivers/net/xen-netfront.c struct xen_netif_rx_response *rx = &rinfo->rx; rx 788 drivers/net/xen-netfront.c RING_IDX cons = queue->rx.rsp_cons; rx 791 drivers/net/xen-netfront.c int max = XEN_NETIF_NR_SLOTS_MIN + (rx->status <= RX_COPY_THRESHOLD); rx 796 drivers/net/xen-netfront.c if (rx->flags & XEN_NETRXF_extra_info) { rx 798 drivers/net/xen-netfront.c cons = queue->rx.rsp_cons; rx 802 drivers/net/xen-netfront.c if (unlikely(rx->status < 0 || rx 803 drivers/net/xen-netfront.c rx->offset + rx->status > XEN_PAGE_SIZE)) { rx 806 drivers/net/xen-netfront.c rx->offset, rx->status); rx 820 drivers/net/xen-netfront.c rx->id); rx 833 drivers/net/xen-netfront.c if (!(rx->flags & XEN_NETRXF_more_data)) rx 843 drivers/net/xen-netfront.c rx = RING_GET_RESPONSE(&queue->rx, cons + slots); rx 856 drivers/net/xen-netfront.c queue->rx.rsp_cons = cons + slots; rx 894 drivers/net/xen-netfront.c RING_IDX cons = queue->rx.rsp_cons; rx 898 drivers/net/xen-netfront.c struct xen_netif_rx_response *rx = rx 899 drivers/net/xen-netfront.c RING_GET_RESPONSE(&queue->rx, ++cons); rx 909 drivers/net/xen-netfront.c queue->rx.rsp_cons = ++cons + skb_queue_len(list); rx 916 drivers/net/xen-netfront.c rx->offset, rx->status, PAGE_SIZE); rx 922 drivers/net/xen-netfront.c queue->rx.rsp_cons = cons; rx 993 drivers/net/xen-netfront.c struct xen_netif_rx_response *rx = &rinfo.rx; rx 1008 drivers/net/xen-netfront.c rp = queue->rx.sring->rsp_prod; rx 1011 drivers/net/xen-netfront.c i = queue->rx.rsp_cons; rx 1014 drivers/net/xen-netfront.c memcpy(rx, RING_GET_RESPONSE(&queue->rx, i), sizeof(*rx)); rx 1024 drivers/net/xen-netfront.c i = queue->rx.rsp_cons; rx 1036 drivers/net/xen-netfront.c queue->rx.rsp_cons += skb_queue_len(&tmpq); rx 1041 drivers/net/xen-netfront.c NETFRONT_SKB_CB(skb)->pull_to = rx->status; rx 1045 drivers/net/xen-netfront.c skb_frag_off_set(&skb_shinfo(skb)->frags[0], rx->offset); rx 1046 drivers/net/xen-netfront.c skb_frag_size_set(&skb_shinfo(skb)->frags[0], rx->status); rx 1047 drivers/net/xen-netfront.c skb->data_len = rx->status; rx 1048 drivers/net/xen-netfront.c skb->len += rx->status; rx 1053 drivers/net/xen-netfront.c if (rx->flags & XEN_NETRXF_csum_blank) rx 1055 drivers/net/xen-netfront.c else if (rx->flags & XEN_NETRXF_data_validated) rx 1060 drivers/net/xen-netfront.c i = ++queue->rx.rsp_cons; rx 1075 drivers/net/xen-netfront.c RING_FINAL_CHECK_FOR_RESPONSES(&queue->rx, more_to_do); rx 1239 drivers/net/xen-netfront.c RING_HAS_UNCONSUMED_RESPONSES(&queue->rx))) rx 1416 drivers/net/xen-netfront.c xennet_end_access(queue->rx_ring_ref, queue->rx.sring); rx 1421 drivers/net/xen-netfront.c queue->rx.sring = NULL; rx 1542 drivers/net/xen-netfront.c queue->rx.sring = NULL; rx 1566 drivers/net/xen-netfront.c FRONT_RING_INIT(&queue->rx, rxs, XEN_PAGE_SIZE); rx 1016 drivers/pci/controller/dwc/pcie-tegra194.c msg.rx.data = &resp; rx 1017 drivers/pci/controller/dwc/pcie-tegra194.c msg.rx.size = sizeof(resp); rx 954 drivers/phy/qualcomm/phy-qcom-qmp.c void __iomem *rx; rx 1448 drivers/phy/qualcomm/phy-qcom-qmp.c void __iomem *rx = qphy->rx; rx 1510 drivers/phy/qualcomm/phy-qcom-qmp.c qcom_qmp_phy_configure(rx, cfg->regs, cfg->rx_tbl, cfg->rx_tbl_num); rx 1885 drivers/phy/qualcomm/phy-qcom-qmp.c qphy->rx = of_iomap(np, 1); rx 1886 drivers/phy/qualcomm/phy-qcom-qmp.c if (!qphy->rx) rx 1909 drivers/phy/qualcomm/phy-qcom-qmp.c qphy->rx2 = qphy->rx + QMP_PHY_LEGACY_LANE_STRIDE; rx 1295 drivers/pinctrl/sh-pfc/pfc-r8a7778.c #define CAN_PFC_DATA(name, tx, rx) SH_PFC_MUX2(name, tx, rx) rx 1338 drivers/pinctrl/sh-pfc/pfc-r8a7778.c #define SCIF_PFC_DAT(name, tx, rx) SH_PFC_MUX2(name, tx, rx) rx 1370 drivers/pinctrl/sh-pfc/pfc-r8a7778.c #define HSPI_PFC_DAT(name, clk, cs, rx, tx) SH_PFC_MUX4(name, clk, cs, rx, tx) rx 399 drivers/platform/mellanox/mlxbf-tmfifo.c int rx, tx; rx 401 drivers/platform/mellanox/mlxbf-tmfifo.c rx = !test_and_set_bit(MLXBF_TM_RX_HWM_IRQ, &fifo->pend_events); rx 404 drivers/platform/mellanox/mlxbf-tmfifo.c if (rx || tx) rx 178 drivers/ps3/ps3-vuart.c trig->rx = size - val; rx 181 drivers/ps3/ps3-vuart.c trig->tx, trig->rx); rx 187 drivers/ps3/ps3-vuart.c unsigned int rx) rx 211 drivers/ps3/ps3-vuart.c PARAM_RX_TRIGGER, size - rx); rx 220 drivers/ps3/ps3-vuart.c tx, rx); rx 74 drivers/ps3/vuart.h unsigned long rx; rx 81 drivers/ps3/vuart.h unsigned int rx); rx 1579 drivers/rpmsg/qcom_glink_native.c struct qcom_glink_pipe *rx, rx 1593 drivers/rpmsg/qcom_glink_native.c glink->rx_pipe = rx; rx 31 drivers/rpmsg/qcom_glink_native.h struct qcom_glink_pipe *rx, rx 185 drivers/rpmsg/qcom_glink_rpm.c struct glink_rpm_pipe *rx, rx 228 drivers/rpmsg/qcom_glink_rpm.c rx->native.length = size; rx 230 drivers/rpmsg/qcom_glink_rpm.c rx->tail = msg_ram + offset; rx 231 drivers/rpmsg/qcom_glink_rpm.c rx->head = msg_ram + offset + sizeof(u32); rx 232 drivers/rpmsg/qcom_glink_rpm.c rx->fifo = msg_ram + offset + 2 * sizeof(u32); rx 244 drivers/rpmsg/qcom_glink_rpm.c if (!rx->fifo || !tx->fifo) { rx 250 drivers/rpmsg/qcom_smd.c struct smd_channel_info rx; rx 272 drivers/rpmsg/qcom_smd.c struct smd_channel_info_word rx; rx 277 drivers/rpmsg/qcom_smd.c BUILD_BUG_ON(sizeof(channel->info->rx.param) != sizeof(u8)); \ rx 279 drivers/rpmsg/qcom_smd.c le32_to_cpu(channel->info_word->rx.param) : \ rx 280 drivers/rpmsg/qcom_smd.c channel->info->rx.param; \ rx 285 drivers/rpmsg/qcom_smd.c BUILD_BUG_ON(sizeof(channel->info->rx.param) != sizeof(u32)); \ rx 287 drivers/rpmsg/qcom_smd.c channel->info_word->rx.param : \ rx 288 drivers/rpmsg/qcom_smd.c channel->info->rx.param); \ rx 293 drivers/rpmsg/qcom_smd.c BUILD_BUG_ON(sizeof(channel->info->rx.param) != sizeof(u8)); \ rx 295 drivers/rpmsg/qcom_smd.c channel->info_word->rx.param = cpu_to_le32(value); \ rx 297 drivers/rpmsg/qcom_smd.c channel->info->rx.param = value; \ rx 302 drivers/rpmsg/qcom_smd.c BUILD_BUG_ON(sizeof(channel->info->rx.param) != sizeof(u32)); \ rx 304 drivers/rpmsg/qcom_smd.c channel->info_word->rx.param = cpu_to_le32(value); \ rx 306 drivers/rpmsg/qcom_smd.c channel->info->rx.param = cpu_to_le32(value); \ rx 495 drivers/rtc/rtc-ds1305.c u8 *addr, size_t count, char *tx, char *rx) rx 507 drivers/rtc/rtc-ds1305.c x->rx_buf = rx; rx 291 drivers/s390/net/qeth_core.h } rx; rx 847 drivers/s390/net/qeth_core.h struct qeth_rx rx; rx 2672 drivers/s390/net/qeth_core_main.c memset(&card->rx, 0, sizeof(struct qeth_rx)); rx 5157 drivers/s390/net/qeth_core_main.c if (!card->rx.b_count) { rx 5158 drivers/s390/net/qeth_core_main.c card->rx.qdio_err = 0; rx 5159 drivers/s390/net/qeth_core_main.c card->rx.b_count = qdio_get_next_buffers( rx 5160 drivers/s390/net/qeth_core_main.c card->data.ccwdev, 0, &card->rx.b_index, rx 5161 drivers/s390/net/qeth_core_main.c &card->rx.qdio_err); rx 5162 drivers/s390/net/qeth_core_main.c if (card->rx.b_count <= 0) { rx 5163 drivers/s390/net/qeth_core_main.c card->rx.b_count = 0; rx 5166 drivers/s390/net/qeth_core_main.c card->rx.b_element = rx 5167 drivers/s390/net/qeth_core_main.c &card->qdio.in_q->bufs[card->rx.b_index] rx 5169 drivers/s390/net/qeth_core_main.c card->rx.e_offset = 0; rx 5172 drivers/s390/net/qeth_core_main.c while (card->rx.b_count) { rx 5173 drivers/s390/net/qeth_core_main.c buffer = &card->qdio.in_q->bufs[card->rx.b_index]; rx 5174 drivers/s390/net/qeth_core_main.c if (!(card->rx.qdio_err && rx 5176 drivers/s390/net/qeth_core_main.c card->rx.qdio_err, "qinerr"))) rx 5187 drivers/s390/net/qeth_core_main.c qeth_queue_input_buffer(card, card->rx.b_index); rx 5188 drivers/s390/net/qeth_core_main.c card->rx.b_count--; rx 5189 drivers/s390/net/qeth_core_main.c if (card->rx.b_count) { rx 5190 drivers/s390/net/qeth_core_main.c card->rx.b_index = rx 5191 drivers/s390/net/qeth_core_main.c (card->rx.b_index + 1) % rx 5193 drivers/s390/net/qeth_core_main.c card->rx.b_element = rx 5195 drivers/s390/net/qeth_core_main.c ->bufs[card->rx.b_index] rx 5197 drivers/s390/net/qeth_core_main.c card->rx.e_offset = 0; rx 313 drivers/s390/net/qeth_l2_main.c &card->qdio.in_q->bufs[card->rx.b_index], rx 314 drivers/s390/net/qeth_l2_main.c &card->rx.b_element, &card->rx.e_offset, &hdr); rx 1349 drivers/s390/net/qeth_l3_main.c tg_addr, &hdr->hdr.l3.next_hop.rx.src_mac, rx 1362 drivers/s390/net/qeth_l3_main.c hdr->hdr.l3.next_hop.rx.vlan_id; rx 1381 drivers/s390/net/qeth_l3_main.c &card->qdio.in_q->bufs[card->rx.b_index], rx 1382 drivers/s390/net/qeth_l3_main.c &card->rx.b_element, &card->rx.e_offset, &hdr); rx 1142 drivers/scsi/aacraid/aacraid.h #define rx_readb(AEP, CSR) readb(&((AEP)->regs.rx->CSR)) rx 1143 drivers/scsi/aacraid/aacraid.h #define rx_readl(AEP, CSR) readl(&((AEP)->regs.rx->CSR)) rx 1144 drivers/scsi/aacraid/aacraid.h #define rx_writeb(AEP, CSR, value) writeb(value, &((AEP)->regs.rx->CSR)) rx 1145 drivers/scsi/aacraid/aacraid.h #define rx_writel(AEP, CSR, value) writel(value, &((AEP)->regs.rx->CSR)) rx 1624 drivers/scsi/aacraid/aacraid.h struct rx_registers __iomem *rx; rx 33 drivers/scsi/aacraid/nark.c iounmap(dev->regs.rx); rx 34 drivers/scsi/aacraid/nark.c dev->regs.rx = NULL; rx 40 drivers/scsi/aacraid/nark.c dev->regs.rx = ioremap((u64)pci_resource_start(dev->pdev, 0) | rx 44 drivers/scsi/aacraid/nark.c if (dev->regs.rx == NULL) rx 48 drivers/scsi/aacraid/nark.c iounmap(dev->regs.rx); rx 49 drivers/scsi/aacraid/nark.c dev->regs.rx = NULL; rx 452 drivers/scsi/aacraid/rx.c iounmap(dev->regs.rx); rx 455 drivers/scsi/aacraid/rx.c dev->base = dev->regs.rx = ioremap(dev->base_start, size); rx 458 drivers/scsi/aacraid/rx.c dev->IndexRegs = &dev->regs.rx->IndexRegs; rx 225 drivers/scsi/cxgbi/libcxgbi.h struct cxgbi_skb_rx_cb rx; rx 237 drivers/scsi/cxgbi/libcxgbi.h #define cxgbi_skcb_rx_ddigest(skb) (CXGBI_SKB_CB(skb)->rx.ddigest) rx 238 drivers/scsi/cxgbi/libcxgbi.h #define cxgbi_skcb_rx_pdulen(skb) (CXGBI_SKB_CB(skb)->rx.pdulen) rx 231 drivers/scsi/fnic/fnic_main.c stats->rx_frames = vs->rx.rx_unicast_frames_ok; rx 232 drivers/scsi/fnic/fnic_main.c stats->rx_words = vs->rx.rx_unicast_bytes_ok / 4; rx 233 drivers/scsi/fnic/fnic_main.c stats->error_frames = vs->tx.tx_errors + vs->rx.rx_errors; rx 234 drivers/scsi/fnic/fnic_main.c stats->dumped_frames = vs->tx.tx_drops + vs->rx.rx_drop; rx 235 drivers/scsi/fnic/fnic_main.c stats->invalid_crc_count = vs->rx.rx_crc_errors; rx 65 drivers/scsi/fnic/vnic_stats.h struct vnic_rx_stats rx; rx 120 drivers/scsi/mvsas/mv_init.c if (mvi->rx) rx 122 drivers/scsi/mvsas/mv_init.c sizeof(*mvi->rx) * (MVS_RX_RING_SZ + 1), rx 123 drivers/scsi/mvsas/mv_init.c mvi->rx, mvi->rx_dma); rx 251 drivers/scsi/mvsas/mv_init.c mvi->rx = dma_alloc_coherent(mvi->dev, rx 252 drivers/scsi/mvsas/mv_init.c sizeof(*mvi->rx) * (MVS_RX_RING_SZ + 1), rx 254 drivers/scsi/mvsas/mv_init.c if (!mvi->rx) rx 256 drivers/scsi/mvsas/mv_init.c memset(mvi->rx, 0, sizeof(*mvi->rx) * (MVS_RX_RING_SZ + 1)); rx 257 drivers/scsi/mvsas/mv_init.c mvi->rx[0] = cpu_to_le32(0xfff); rx 2058 drivers/scsi/mvsas/mv_sas.c mvi->rx_cons = le32_to_cpu(mvi->rx[0]); rx 2075 drivers/scsi/mvsas/mv_sas.c rx_desc = le32_to_cpu(mvi->rx[rx_prod_idx + 1]); rx 356 drivers/scsi/mvsas/mv_sas.h __le32 *rx; rx 65 drivers/scsi/snic/vnic_stats.h struct vnic_rx_stats rx; rx 109 drivers/slimbus/qcom-ctrl.c struct slim_ctrl_buf rx; rx 136 drivers/slimbus/qcom-ctrl.c spin_lock_irqsave(&ctrl->rx.lock, flags); rx 137 drivers/slimbus/qcom-ctrl.c if ((ctrl->rx.tail + 1) % ctrl->rx.n == ctrl->rx.head) { rx 138 drivers/slimbus/qcom-ctrl.c spin_unlock_irqrestore(&ctrl->rx.lock, flags); rx 142 drivers/slimbus/qcom-ctrl.c idx = ctrl->rx.tail; rx 143 drivers/slimbus/qcom-ctrl.c ctrl->rx.tail = (ctrl->rx.tail + 1) % ctrl->rx.n; rx 144 drivers/slimbus/qcom-ctrl.c spin_unlock_irqrestore(&ctrl->rx.lock, flags); rx 146 drivers/slimbus/qcom-ctrl.c return ctrl->rx.base + (idx * ctrl->rx.sl_sz); rx 425 drivers/slimbus/qcom-ctrl.c spin_lock_irqsave(&ctrl->rx.lock, flags); rx 426 drivers/slimbus/qcom-ctrl.c if (ctrl->rx.tail == ctrl->rx.head) { rx 427 drivers/slimbus/qcom-ctrl.c spin_unlock_irqrestore(&ctrl->rx.lock, flags); rx 430 drivers/slimbus/qcom-ctrl.c memcpy(buf, ctrl->rx.base + (ctrl->rx.head * ctrl->rx.sl_sz), rx 431 drivers/slimbus/qcom-ctrl.c ctrl->rx.sl_sz); rx 433 drivers/slimbus/qcom-ctrl.c ctrl->rx.head = (ctrl->rx.head + 1) % ctrl->rx.n; rx 434 drivers/slimbus/qcom-ctrl.c spin_unlock_irqrestore(&ctrl->rx.lock, flags); rx 539 drivers/slimbus/qcom-ctrl.c ctrl->rx.n = QCOM_RX_MSGS; rx 540 drivers/slimbus/qcom-ctrl.c ctrl->rx.sl_sz = SLIM_MSGQ_BUF_LEN; rx 546 drivers/slimbus/qcom-ctrl.c spin_lock_init(&ctrl->rx.lock); rx 585 drivers/slimbus/qcom-ctrl.c ctrl->rx.base = devm_kcalloc(&pdev->dev,ctrl->rx.n, ctrl->rx.sl_sz, rx 587 drivers/slimbus/qcom-ctrl.c if (!ctrl->rx.base) { rx 139 drivers/soc/fsl/qbman/qman_test_stash.c struct qman_fq rx; rx 354 drivers/soc/fsl/qbman/qman_test_stash.c if (qman_retire_fq(&handler->rx, &flags) || rx 360 drivers/soc/fsl/qbman/qman_test_stash.c if (qman_oos_fq(&handler->rx)) { rx 365 drivers/soc/fsl/qbman/qman_test_stash.c qman_destroy_fq(&handler->rx); rx 398 drivers/soc/fsl/qbman/qman_test_stash.c memset(&handler->rx, 0, sizeof(handler->rx)); rx 400 drivers/soc/fsl/qbman/qman_test_stash.c handler->rx.cb.dqrr = special_dqrr; rx 402 drivers/soc/fsl/qbman/qman_test_stash.c handler->rx.cb.dqrr = normal_dqrr; rx 403 drivers/soc/fsl/qbman/qman_test_stash.c err = qman_create_fq(handler->fqid_rx, 0, &handler->rx); rx 413 drivers/soc/fsl/qbman/qman_test_stash.c err = qman_init_fq(&handler->rx, QMAN_INITFQ_FLAG_SCHED | rx 52 drivers/soc/tegra/powergate-bpmp.c else if (msg.rx.ret < 0) rx 76 drivers/soc/tegra/powergate-bpmp.c msg.rx.data = &response; rx 77 drivers/soc/tegra/powergate-bpmp.c msg.rx.size = sizeof(response); rx 82 drivers/soc/tegra/powergate-bpmp.c else if (msg.rx.ret < 0) rx 104 drivers/soc/tegra/powergate-bpmp.c msg.rx.data = &response; rx 105 drivers/soc/tegra/powergate-bpmp.c msg.rx.size = sizeof(response); rx 110 drivers/soc/tegra/powergate-bpmp.c else if (msg.rx.ret < 0) rx 134 drivers/soc/tegra/powergate-bpmp.c msg.rx.data = &response; rx 135 drivers/soc/tegra/powergate-bpmp.c msg.rx.size = sizeof(response); rx 138 drivers/soc/tegra/powergate-bpmp.c if (err < 0 || msg.rx.ret < 0) rx 173 drivers/soc/ti/knav_dma.c if (cfg->u.rx.einfo_present) rx 175 drivers/soc/ti/knav_dma.c if (cfg->u.rx.psinfo_present) rx 177 drivers/soc/ti/knav_dma.c if (cfg->u.rx.err_mode == DMA_RETRY) rx 179 drivers/soc/ti/knav_dma.c v |= (cfg->u.rx.desc_type & DESC_TYPE_MASK) << DESC_TYPE_SHIFT; rx 180 drivers/soc/ti/knav_dma.c if (cfg->u.rx.psinfo_at_sop) rx 182 drivers/soc/ti/knav_dma.c v |= (cfg->u.rx.sop_offset & CHAN_SOP_OFF_MASK) rx 184 drivers/soc/ti/knav_dma.c v |= cfg->u.rx.dst_q & CHAN_QNUM_MASK; rx 190 drivers/soc/ti/knav_dma.c v = cfg->u.rx.fdq[0] << 16; rx 191 drivers/soc/ti/knav_dma.c v |= cfg->u.rx.fdq[1] & CHAN_QNUM_MASK; rx 194 drivers/soc/ti/knav_dma.c v = cfg->u.rx.fdq[2] << 16; rx 195 drivers/soc/ti/knav_dma.c v |= cfg->u.rx.fdq[3] & CHAN_QNUM_MASK; rx 335 drivers/soc/ti/knav_dma.c chan->cfg.u.rx.einfo_present, rx 336 drivers/soc/ti/knav_dma.c chan->cfg.u.rx.psinfo_present, rx 337 drivers/soc/ti/knav_dma.c chan->cfg.u.rx.desc_type); rx 339 drivers/soc/ti/knav_dma.c chan->cfg.u.rx.dst_q, rx 340 drivers/soc/ti/knav_dma.c chan->cfg.u.rx.thresh); rx 342 drivers/soc/ti/knav_dma.c seq_printf(s, "[%d]", chan->cfg.u.rx.fdq[i]); rx 53 drivers/spi/spi-altera.c unsigned char *rx; rx 100 drivers/spi/spi-altera.c if (hw->rx) { rx 103 drivers/spi/spi-altera.c hw->rx[hw->count] = rxd; rx 106 drivers/spi/spi-altera.c hw->rx[hw->count * 2] = rxd; rx 107 drivers/spi/spi-altera.c hw->rx[hw->count * 2 + 1] = rxd >> 8; rx 121 drivers/spi/spi-altera.c hw->rx = t->rx_buf; rx 50 drivers/spi/spi-au1550.c u8 *rx; rx 317 drivers/spi/spi-au1550.c hw->rx = t->rx_buf; rx 357 drivers/spi/spi-au1550.c hw->rx = hw->dma_rx_tmpbuf; rx 366 drivers/spi/spi-au1550.c hw->tx = hw->rx; rx 370 drivers/spi/spi-au1550.c res = au1xxx_dbdma_put_dest(hw->dma_rx_ch, virt_to_phys(hw->rx), rx 474 drivers/spi/spi-au1550.c if (hw->rx) { \ rx 475 drivers/spi/spi-au1550.c *(u##size *)hw->rx = (u##size)fifoword; \ rx 476 drivers/spi/spi-au1550.c hw->rx += (size) / 8; \ rx 509 drivers/spi/spi-au1550.c hw->rx = t->rx_buf; rx 163 drivers/spi/spi-bcm63xx-hsspi.c u8 *rx = t->rx_buf; rx 168 drivers/spi/spi-bcm63xx-hsspi.c if (tx && rx) rx 172 drivers/spi/spi-bcm63xx-hsspi.c else if (rx) rx 212 drivers/spi/spi-bcm63xx-hsspi.c if (rx) { rx 213 drivers/spi/spi-bcm63xx-hsspi.c memcpy_fromio(rx, bs->fifo, curr_step); rx 214 drivers/spi/spi-bcm63xx-hsspi.c rx += curr_step; rx 67 drivers/spi/spi-bitbang.c u8 *rx = t->rx_buf; rx 75 drivers/spi/spi-bitbang.c if (rx) rx 76 drivers/spi/spi-bitbang.c *rx++ = word; rx 95 drivers/spi/spi-bitbang.c u16 *rx = t->rx_buf; rx 103 drivers/spi/spi-bitbang.c if (rx) rx 104 drivers/spi/spi-bitbang.c *rx++ = word; rx 123 drivers/spi/spi-bitbang.c u32 *rx = t->rx_buf; rx 131 drivers/spi/spi-bitbang.c if (rx) rx 132 drivers/spi/spi-bitbang.c *rx++ = word; rx 116 drivers/spi/spi-davinci.c void *rx; rx 137 drivers/spi/spi-davinci.c if (dspi->rx) { rx 138 drivers/spi/spi-davinci.c u8 *rx = dspi->rx; rx 139 drivers/spi/spi-davinci.c *rx++ = (u8)data; rx 140 drivers/spi/spi-davinci.c dspi->rx = rx; rx 146 drivers/spi/spi-davinci.c if (dspi->rx) { rx 147 drivers/spi/spi-davinci.c u16 *rx = dspi->rx; rx 148 drivers/spi/spi-davinci.c *rx++ = (u16)data; rx 149 drivers/spi/spi-davinci.c dspi->rx = rx; rx 591 drivers/spi/spi-davinci.c dspi->rx = t->rx_buf; rx 191 drivers/spi/spi-dln2.c } rx; rx 192 drivers/spi/spi-dln2.c unsigned rx_len = sizeof(rx); rx 196 drivers/spi/spi-dln2.c &rx, &rx_len); rx 199 drivers/spi/spi-dln2.c if (rx_len < sizeof(rx)) rx 202 drivers/spi/spi-dln2.c *cs_num = le16_to_cpu(rx.cs_count); rx 217 drivers/spi/spi-dln2.c } rx; rx 218 drivers/spi/spi-dln2.c unsigned rx_len = sizeof(rx); rx 222 drivers/spi/spi-dln2.c ret = dln2_transfer(dln2->pdev, cmd, &tx, sizeof(tx), &rx, &rx_len); rx 225 drivers/spi/spi-dln2.c if (rx_len < sizeof(rx)) rx 228 drivers/spi/spi-dln2.c *freq = le32_to_cpu(rx.speed); rx 267 drivers/spi/spi-dln2.c } rx; rx 268 drivers/spi/spi-dln2.c int rx_len = sizeof(rx); rx 274 drivers/spi/spi-dln2.c &rx, &rx_len); rx 277 drivers/spi/spi-dln2.c if (rx_len < sizeof(rx)) rx 326 drivers/spi/spi-dln2.c } *rx = dln2->buf; rx 327 drivers/spi/spi-dln2.c unsigned rx_len = sizeof(*rx); rx 333 drivers/spi/spi-dln2.c &tx, sizeof(tx), rx, &rx_len); rx 336 drivers/spi/spi-dln2.c if (rx_len < sizeof(*rx)) rx 338 drivers/spi/spi-dln2.c if (rx->count > ARRAY_SIZE(rx->frame_sizes)) rx 342 drivers/spi/spi-dln2.c for (i = 0; i < rx->count; i++) rx 343 drivers/spi/spi-dln2.c *bpw_mask |= BIT(rx->frame_sizes[i] - 1); rx 459 drivers/spi/spi-dln2.c } __packed *rx = dln2->buf; rx 460 drivers/spi/spi-dln2.c unsigned rx_len = sizeof(*rx); rx 462 drivers/spi/spi-dln2.c BUILD_BUG_ON(sizeof(*rx) > DLN2_SPI_BUF_SIZE); rx 472 drivers/spi/spi-dln2.c rx, &rx_len); rx 475 drivers/spi/spi-dln2.c if (rx_len < sizeof(rx->size) + data_len) rx 477 drivers/spi/spi-dln2.c if (le16_to_cpu(rx->size) != data_len) rx 480 drivers/spi/spi-dln2.c dln2_spi_copy_from_buf(data, rx->buf, data_len, dln2->bpw); rx 501 drivers/spi/spi-dln2.c } __packed *rx; rx 505 drivers/spi/spi-dln2.c sizeof(*rx) > DLN2_SPI_BUF_SIZE); rx 516 drivers/spi/spi-dln2.c rx = dln2->buf; rx 525 drivers/spi/spi-dln2.c rx_len = sizeof(*rx); rx 528 drivers/spi/spi-dln2.c rx, &rx_len); rx 531 drivers/spi/spi-dln2.c if (rx_len < sizeof(rx->size) + data_len) rx 533 drivers/spi/spi-dln2.c if (le16_to_cpu(rx->size) != data_len) rx 536 drivers/spi/spi-dln2.c dln2_spi_copy_from_buf(rx_data, rx->buf, data_len, dln2->bpw); rx 42 drivers/spi/spi-dw-mid.c struct dw_dma_slave *rx = dws->dma_rx; rx 57 drivers/spi/spi-dw-mid.c rx->dma_dev = &dma_dev->dev; rx 58 drivers/spi/spi-dw-mid.c dws->rxchan = dma_request_channel(mask, mid_spi_dma_chan_filter, rx); rx 167 drivers/spi/spi-dw.c rxtx_gap = ((dws->rx_end - dws->rx) - (dws->tx_end - dws->tx)) rx 176 drivers/spi/spi-dw.c u32 rx_left = (dws->rx_end - dws->rx) / dws->n_bytes; rx 214 drivers/spi/spi-dw.c *(u8 *)(dws->rx) = rxw; rx 216 drivers/spi/spi-dw.c *(u16 *)(dws->rx) = rxw; rx 218 drivers/spi/spi-dw.c dws->rx += dws->n_bytes; rx 244 drivers/spi/spi-dw.c if (dws->rx_end == dws->rx) { rx 283 drivers/spi/spi-dw.c } while (dws->rx_end > dws->rx); rx 303 drivers/spi/spi-dw.c dws->rx = transfer->rx_buf; rx 304 drivers/spi/spi-dw.c dws->rx_end = dws->rx + transfer->len; rx 340 drivers/spi/spi-dw.c if (dws->rx && dws->tx) rx 342 drivers/spi/spi-dw.c else if (dws->rx) rx 124 drivers/spi/spi-dw.h void *rx; rx 90 drivers/spi/spi-ep93xx.c size_t rx; rx 202 drivers/spi/spi-ep93xx.c ((u16 *)xfer->rx_buf)[espi->rx] = val; rx 203 drivers/spi/spi-ep93xx.c espi->rx += 2; rx 206 drivers/spi/spi-ep93xx.c ((u8 *)xfer->rx_buf)[espi->rx] = val; rx 207 drivers/spi/spi-ep93xx.c espi->rx += 1; rx 239 drivers/spi/spi-ep93xx.c if (espi->rx == xfer->len) rx 493 drivers/spi/spi-ep93xx.c espi->rx = 0; rx 138 drivers/spi/spi-fsl-cpm.c mspi->rx_dma = dma_map_single(dev, mspi->rx, t->len, rx 187 drivers/spi/spi-fsl-dspi.c void *rx; rx 231 drivers/spi/spi-fsl-dspi.c if (!dspi->rx) rx 238 drivers/spi/spi-fsl-dspi.c *(u8 *)dspi->rx = rxdata; rx 240 drivers/spi/spi-fsl-dspi.c *(u16 *)dspi->rx = rxdata; rx 242 drivers/spi/spi-fsl-dspi.c *(u32 *)dspi->rx = rxdata; rx 243 drivers/spi/spi-fsl-dspi.c dspi->rx += dspi->bytes_per_word; rx 260 drivers/spi/spi-fsl-dspi.c if (dspi->rx) { rx 638 drivers/spi/spi-fsl-dspi.c while ((dspi->rx < dspi->rx_end) && fifo_size--) rx 749 drivers/spi/spi-fsl-dspi.c dspi->rx = transfer->rx_buf; rx 750 drivers/spi/spi-fsl-dspi.c dspi->rx_end = dspi->rx + transfer->len; rx 32 drivers/spi/spi-fsl-lib.c type *rx = mpc8xxx_spi->rx; \ rx 33 drivers/spi/spi-fsl-lib.c *rx++ = (type)(data >> mpc8xxx_spi->rx_shift); \ rx 34 drivers/spi/spi-fsl-lib.c mpc8xxx_spi->rx = rx; \ rx 26 drivers/spi/spi-fsl-lib.h void *rx; rx 111 drivers/spi/spi-fsl-lpspi.c void (*rx)(struct fsl_lpspi_data *); rx 273 drivers/spi/spi-fsl-lpspi.c fsl_lpspi->rx(fsl_lpspi); rx 358 drivers/spi/spi-fsl-lpspi.c struct dma_slave_config rx = {}, tx = {}; rx 387 drivers/spi/spi-fsl-lpspi.c rx.direction = DMA_DEV_TO_MEM; rx 388 drivers/spi/spi-fsl-lpspi.c rx.src_addr = fsl_lpspi->base_phys + IMX7ULP_RDR; rx 389 drivers/spi/spi-fsl-lpspi.c rx.src_addr_width = buswidth; rx 390 drivers/spi/spi-fsl-lpspi.c rx.src_maxburst = 1; rx 391 drivers/spi/spi-fsl-lpspi.c ret = dmaengine_slave_config(controller->dma_rx, &rx); rx 456 drivers/spi/spi-fsl-lpspi.c fsl_lpspi->rx = fsl_lpspi_buf_rx_u8; rx 459 drivers/spi/spi-fsl-lpspi.c fsl_lpspi->rx = fsl_lpspi_buf_rx_u16; rx 462 drivers/spi/spi-fsl-lpspi.c fsl_lpspi->rx = fsl_lpspi_buf_rx_u32; rx 572 drivers/spi/spi-fsl-lpspi.c struct sg_table *tx = &transfer->tx_sg, *rx = &transfer->rx_sg; rx 580 drivers/spi/spi-fsl-lpspi.c rx->sgl, rx->nents, DMA_DEV_TO_MEM, rx 336 drivers/spi/spi-fsl-spi.c mpc8xxx_spi->rx = t->rx_buf; rx 505 drivers/spi/spi-fsl-spi.c if (mspi->rx) rx 99 drivers/spi/spi-imx.c void (*rx)(struct spi_imx_data *); rx 1126 drivers/spi/spi-imx.c spi_imx->rx(spi_imx); rx 1154 drivers/spi/spi-imx.c struct dma_slave_config rx = {}, tx = {}; rx 1181 drivers/spi/spi-imx.c rx.direction = DMA_DEV_TO_MEM; rx 1182 drivers/spi/spi-imx.c rx.src_addr = spi_imx->base_phys + MXC_CSPIRXDATA; rx 1183 drivers/spi/spi-imx.c rx.src_addr_width = buswidth; rx 1184 drivers/spi/spi-imx.c rx.src_maxburst = spi_imx->wml; rx 1185 drivers/spi/spi-imx.c ret = dmaengine_slave_config(master->dma_rx, &rx); rx 1214 drivers/spi/spi-imx.c spi_imx->rx = spi_imx_buf_rx_swap; rx 1220 drivers/spi/spi-imx.c spi_imx->rx = spi_imx_buf_rx_u8; rx 1223 drivers/spi/spi-imx.c spi_imx->rx = spi_imx_buf_rx_u16; rx 1226 drivers/spi/spi-imx.c spi_imx->rx = spi_imx_buf_rx_u32; rx 1238 drivers/spi/spi-imx.c spi_imx->rx = mx53_ecspi_rx_slave; rx 1340 drivers/spi/spi-imx.c struct sg_table *tx = &transfer->tx_sg, *rx = &transfer->rx_sg; rx 1341 drivers/spi/spi-imx.c struct scatterlist *last_sg = sg_last(rx->sgl, rx->nents); rx 1372 drivers/spi/spi-imx.c rx->sgl, rx->nents, DMA_DEV_TO_MEM, rx 107 drivers/spi/spi-jcore.c unsigned char *rx; rx 115 drivers/spi/spi-jcore.c rx = t->rx_buf; rx 128 drivers/spi/spi-jcore.c if (rx) rx 129 drivers/spi/spi-jcore.c *rx++ = readl(data_reg); rx 179 drivers/spi/spi-lantiq-ssc.c u8 *rx; rx 528 drivers/spi/spi-lantiq-ssc.c rx8 = spi->rx; rx 531 drivers/spi/spi-lantiq-ssc.c spi->rx++; rx 534 drivers/spi/spi-lantiq-ssc.c rx16 = (u16 *) spi->rx; rx 537 drivers/spi/spi-lantiq-ssc.c spi->rx += 2; rx 540 drivers/spi/spi-lantiq-ssc.c rx32 = (u32 *) spi->rx; rx 543 drivers/spi/spi-lantiq-ssc.c spi->rx += 4; rx 575 drivers/spi/spi-lantiq-ssc.c rx8 = spi->rx; rx 582 drivers/spi/spi-lantiq-ssc.c spi->rx++; rx 586 drivers/spi/spi-lantiq-ssc.c rx32 = (u32 *) spi->rx; rx 590 drivers/spi/spi-lantiq-ssc.c spi->rx += 4; rx 618 drivers/spi/spi-lantiq-ssc.c if (spi->rx && spi->rx_todo) rx 625 drivers/spi/spi-lantiq-ssc.c } else if (spi->rx) { rx 686 drivers/spi/spi-lantiq-ssc.c spi->rx = t->rx_buf; rx 695 drivers/spi/spi-lantiq-ssc.c if (spi->rx) { rx 465 drivers/spi/spi-loopback-test.c void *rx) rx 481 drivers/spi/spi-loopback-test.c rx, SPI_TEST_MAX_SIZE_PLUS)) { rx 497 drivers/spi/spi-loopback-test.c for (addr = rx; addr < (u8 *)rx + SPI_TEST_MAX_SIZE_PLUS; addr++) { rx 560 drivers/spi/spi-loopback-test.c void *tx, void *rx) rx 569 drivers/spi/spi-loopback-test.c ret = spi_check_rx_ranges(spi, msg, rx); rx 622 drivers/spi/spi-loopback-test.c void *tx, void *rx) rx 641 drivers/spi/spi-loopback-test.c *ptr = rx + off; rx 749 drivers/spi/spi-loopback-test.c void *tx, void *rx) rx 759 drivers/spi/spi-loopback-test.c memset(rx, SPI_TEST_PATTERN_DO_NOT_WRITE, SPI_TEST_MAX_SIZE_PLUS); rx 767 drivers/spi/spi-loopback-test.c (void *)tx, rx); rx 773 drivers/spi/spi-loopback-test.c (void *)tx, rx); rx 788 drivers/spi/spi-loopback-test.c ret = test->execute_msg(spi, test, tx, rx); rx 790 drivers/spi/spi-loopback-test.c ret = spi_test_execute_msg(spi, test, tx, rx); rx 811 drivers/spi/spi-loopback-test.c void *tx, void *rx, rx 876 drivers/spi/spi-loopback-test.c return _spi_test_run_iter(spi, &test, tx, rx); rx 890 drivers/spi/spi-loopback-test.c void *tx, void *rx) rx 932 drivers/spi/spi-loopback-test.c ret = spi_test_check_loopback_result(spi, msg, tx, rx); rx 963 drivers/spi/spi-loopback-test.c void *tx, void *rx) rx 1001 drivers/spi/spi-loopback-test.c tx, rx, rx 1026 drivers/spi/spi-loopback-test.c char *rx = NULL, *tx = NULL; rx 1034 drivers/spi/spi-loopback-test.c rx = vmalloc(SPI_TEST_MAX_SIZE_PLUS); rx 1036 drivers/spi/spi-loopback-test.c rx = kzalloc(SPI_TEST_MAX_SIZE_PLUS, GFP_KERNEL); rx 1037 drivers/spi/spi-loopback-test.c if (!rx) rx 1058 drivers/spi/spi-loopback-test.c ret = test->run_test(spi, test, tx, rx); rx 1060 drivers/spi/spi-loopback-test.c ret = spi_test_run_test(spi, test, tx, rx); rx 1074 drivers/spi/spi-loopback-test.c kvfree(rx); rx 106 drivers/spi/spi-lp8841-rtc.c u8 *rx = t->rx_buf; rx 119 drivers/spi/spi-lp8841-rtc.c } else if (rx) { rx 125 drivers/spi/spi-lp8841-rtc.c *rx++ = word; rx 172 drivers/spi/spi-mt7621.c int rx = min(rx_len, 32); rx 176 drivers/spi/spi-mt7621.c val |= (rx * 8) << 12; rx 187 drivers/spi/spi-mt7621.c for (i = 0; i < rx; i++) { rx 190 drivers/spi/spi-omap-100k.c u8 *rx; rx 193 drivers/spi/spi-omap-100k.c rx = xfer->rx_buf; rx 200 drivers/spi/spi-omap-100k.c *rx++ = spi100k_read_data(spi->master, word_len); rx 203 drivers/spi/spi-omap-100k.c u16 *rx; rx 206 drivers/spi/spi-omap-100k.c rx = xfer->rx_buf; rx 213 drivers/spi/spi-omap-100k.c *rx++ = spi100k_read_data(spi->master, word_len); rx 216 drivers/spi/spi-omap-100k.c u32 *rx; rx 219 drivers/spi/spi-omap-100k.c rx = xfer->rx_buf; rx 226 drivers/spi/spi-omap-100k.c *rx = spi100k_read_data(spi->master, word_len); rx 587 drivers/spi/spi-omap2-mcspi.c u8 *rx; rx 620 drivers/spi/spi-omap2-mcspi.c rx = xfer->rx_buf; rx 636 drivers/spi/spi-omap2-mcspi.c if (rx != NULL) rx 667 drivers/spi/spi-omap2-mcspi.c if (rx == NULL) { rx 717 drivers/spi/spi-omap2-mcspi.c u8 *rx; rx 720 drivers/spi/spi-omap2-mcspi.c rx = xfer->rx_buf; rx 735 drivers/spi/spi-omap2-mcspi.c if (rx != NULL) { rx 745 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 747 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 759 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 761 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 765 drivers/spi/spi-omap2-mcspi.c u16 *rx; rx 768 drivers/spi/spi-omap2-mcspi.c rx = xfer->rx_buf; rx 782 drivers/spi/spi-omap2-mcspi.c if (rx != NULL) { rx 792 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 794 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 806 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 808 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 812 drivers/spi/spi-omap2-mcspi.c u32 *rx; rx 815 drivers/spi/spi-omap2-mcspi.c rx = xfer->rx_buf; rx 829 drivers/spi/spi-omap2-mcspi.c if (rx != NULL) { rx 839 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 841 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 853 drivers/spi/spi-omap2-mcspi.c *rx++ = readl_relaxed(rx_reg); rx 855 drivers/spi/spi-omap2-mcspi.c word_len, *(rx - 1)); rx 464 drivers/spi/spi-orion.c u8 *rx = xfer->rx_buf; rx 467 drivers/spi/spi-orion.c if (orion_spi_write_read_8bit(spi, &tx, &rx) < 0) rx 475 drivers/spi/spi-orion.c u16 *rx = xfer->rx_buf; rx 478 drivers/spi/spi-orion.c if (orion_spi_write_read_16bit(spi, &tx, &rx) < 0) rx 116 drivers/spi/spi-pic32.c const void *rx; rx 176 drivers/spi/spi-pic32.c rxtx_gap = ((pic32s->rx_end - pic32s->rx) - rx 184 drivers/spi/spi-pic32.c u32 rx_left = (pic32s->rx_end - pic32s->rx) / n_bytes; rx 197 drivers/spi/spi-pic32.c *(__type *)(pic32s->rx) = v; \ rx 198 drivers/spi/spi-pic32.c pic32s->rx += sizeof(__type); \ rx 268 drivers/spi/spi-pic32.c if (pic32s->rx_end == pic32s->rx) { rx 537 drivers/spi/spi-pic32.c pic32s->rx = (const void *)transfer->rx_buf; rx 539 drivers/spi/spi-pic32.c pic32s->rx_end = pic32s->rx + transfer->len; rx 380 drivers/spi/spi-pl022.c void *rx; rx 685 drivers/spi/spi-pl022.c __func__, pl022->rx, pl022->rx_end, pl022->tx, pl022->tx_end); rx 689 drivers/spi/spi-pl022.c && (pl022->rx < pl022->rx_end)) { rx 695 drivers/spi/spi-pl022.c *(u8 *) (pl022->rx) = rx 699 drivers/spi/spi-pl022.c *(u16 *) (pl022->rx) = rx 703 drivers/spi/spi-pl022.c *(u32 *) (pl022->rx) = rx 707 drivers/spi/spi-pl022.c pl022->rx += (pl022->cur_chip->n_bytes); rx 738 drivers/spi/spi-pl022.c && (pl022->rx < pl022->rx_end)) { rx 744 drivers/spi/spi-pl022.c *(u8 *) (pl022->rx) = rx 748 drivers/spi/spi-pl022.c *(u16 *) (pl022->rx) = rx 752 drivers/spi/spi-pl022.c *(u32 *) (pl022->rx) = rx 756 drivers/spi/spi-pl022.c pl022->rx += (pl022->cur_chip->n_bytes); rx 1044 drivers/spi/spi-pl022.c setup_dma_scatter(pl022, pl022->rx, rx 1315 drivers/spi/spi-pl022.c if (pl022->rx >= pl022->rx_end) { rx 1319 drivers/spi/spi-pl022.c if (unlikely(pl022->rx > pl022->rx_end)) { rx 1323 drivers/spi/spi-pl022.c (u32) (pl022->rx - pl022->rx_end)); rx 1360 drivers/spi/spi-pl022.c pl022->rx = (void *)transfer->rx_buf; rx 1361 drivers/spi/spi-pl022.c pl022->rx_end = pl022->rx + pl022->cur_transfer->len; rx 1364 drivers/spi/spi-pl022.c pl022->read = pl022->rx ? pl022->cur_chip->read : READING_NULL; rx 1548 drivers/spi/spi-pl022.c while (pl022->tx < pl022->tx_end || pl022->rx < pl022->rx_end) { rx 128 drivers/spi/spi-ppc4xx.c unsigned char *rx; rx 153 drivers/spi/spi-ppc4xx.c hw->rx = t->rx_buf; rx 329 drivers/spi/spi-ppc4xx.c if (hw->rx) rx 330 drivers/spi/spi-ppc4xx.c hw->rx[count] = data; rx 106 drivers/spi/spi-pxa2xx-pci.c struct dw_dma_slave *tx, *rx; rx 134 drivers/spi/spi-pxa2xx-pci.c rx = c->rx_param; rx 135 drivers/spi/spi-pxa2xx-pci.c rx->dma_dev = &dma_dev->dev; rx 504 drivers/spi/spi-pxa2xx.c && (drv_data->rx < drv_data->rx_end)) { rx 506 drivers/spi/spi-pxa2xx.c drv_data->rx += n_bytes; rx 509 drivers/spi/spi-pxa2xx.c return drv_data->rx == drv_data->rx_end; rx 527 drivers/spi/spi-pxa2xx.c && (drv_data->rx < drv_data->rx_end)) { rx 528 drivers/spi/spi-pxa2xx.c *(u8 *)(drv_data->rx) = pxa2xx_spi_read(drv_data, SSDR); rx 529 drivers/spi/spi-pxa2xx.c ++drv_data->rx; rx 532 drivers/spi/spi-pxa2xx.c return drv_data->rx == drv_data->rx_end; rx 550 drivers/spi/spi-pxa2xx.c && (drv_data->rx < drv_data->rx_end)) { rx 551 drivers/spi/spi-pxa2xx.c *(u16 *)(drv_data->rx) = pxa2xx_spi_read(drv_data, SSDR); rx 552 drivers/spi/spi-pxa2xx.c drv_data->rx += 2; rx 555 drivers/spi/spi-pxa2xx.c return drv_data->rx == drv_data->rx_end; rx 573 drivers/spi/spi-pxa2xx.c && (drv_data->rx < drv_data->rx_end)) { rx 574 drivers/spi/spi-pxa2xx.c *(u32 *)(drv_data->rx) = pxa2xx_spi_read(drv_data, SSDR); rx 575 drivers/spi/spi-pxa2xx.c drv_data->rx += 4; rx 578 drivers/spi/spi-pxa2xx.c return drv_data->rx == drv_data->rx_end; rx 685 drivers/spi/spi-pxa2xx.c bytes_left = drv_data->rx_end - drv_data->rx; rx 984 drivers/spi/spi-pxa2xx.c drv_data->rx = transfer->rx_buf; rx 985 drivers/spi/spi-pxa2xx.c drv_data->rx_end = drv_data->rx + transfer->len; rx 987 drivers/spi/spi-pxa2xx.c drv_data->read = drv_data->rx ? chip->read : null_reader; rx 52 drivers/spi/spi-pxa2xx.h void *rx; rx 171 drivers/spi/spi-rockchip.c void *rx; rx 309 drivers/spi/spi-rockchip.c if (!rs->rx) rx 313 drivers/spi/spi-rockchip.c *(u8 *)rs->rx = (u8)rxw; rx 315 drivers/spi/spi-rockchip.c *(u16 *)rs->rx = (u16)rxw; rx 316 drivers/spi/spi-rockchip.c rs->rx += rs->n_bytes; rx 342 drivers/spi/spi-rockchip.c rs->rx = xfer->rx_buf; rx 493 drivers/spi/spi-rspi.c static int rspi_pio_transfer(struct rspi_data *rspi, const u8 *tx, u8 *rx, rx 502 drivers/spi/spi-rspi.c if (rx) { rx 506 drivers/spi/spi-rspi.c *rx++ = ret; rx 522 drivers/spi/spi-rspi.c struct sg_table *rx) rx 531 drivers/spi/spi-rspi.c if (rx) { rx 532 drivers/spi/spi-rspi.c desc_rx = dmaengine_prep_slave_sg(rspi->ctlr->dma_rx, rx->sgl, rx 533 drivers/spi/spi-rspi.c rx->nents, DMA_DEV_TO_MEM, rx 560 drivers/spi/spi-rspi.c if (rx) { rx 582 drivers/spi/spi-rspi.c if (rx && rspi->rx_irq != other_irq) rx 589 drivers/spi/spi-rspi.c if (rx) rx 605 drivers/spi/spi-rspi.c if (rx) rx 613 drivers/spi/spi-rspi.c if (rx && rspi->rx_irq != other_irq) rx 619 drivers/spi/spi-rspi.c if (rx) rx 734 drivers/spi/spi-rspi.c u8 *rx, unsigned int len) rx 756 drivers/spi/spi-rspi.c *rx++ = rspi_read_data(rspi); rx 813 drivers/spi/spi-rspi.c u8 *rx = xfer->rx_buf; rx 832 drivers/spi/spi-rspi.c *rx++ = rspi_read_data(rspi); rx 71 drivers/spi/spi-s3c24xx.c unsigned char *rx; rx 275 drivers/spi/spi-s3c24xx.c if (hw->tx && !hw->rx) rx 277 drivers/spi/spi-s3c24xx.c else if (hw->rx && !hw->tx) rx 283 drivers/spi/spi-s3c24xx.c regs.uregs[fiq_rrx] = (long)hw->rx; rx 403 drivers/spi/spi-s3c24xx.c hw->rx = t->rx_buf; rx 441 drivers/spi/spi-s3c24xx.c if (hw->rx) rx 442 drivers/spi/spi-s3c24xx.c hw->rx[count] = readb(hw->regs + S3C2410_SPRDAT); rx 454 drivers/spi/spi-s3c24xx.c if (hw->rx) rx 455 drivers/spi/spi-s3c24xx.c hw->rx[hw->len-1] = readb(hw->regs + S3C2410_SPRDAT); rx 149 drivers/spi/spi-sh-hspi.c u32 rx; rx 185 drivers/spi/spi-sh-hspi.c rx = hspi_read(hspi, SPRBR); rx 187 drivers/spi/spi-sh-hspi.c ((u8 *)t->rx_buf)[i] = (u8)rx; rx 744 drivers/spi/spi-sh-msiof.c void *rx, unsigned int len) rx 752 drivers/spi/spi-sh-msiof.c if (rx) { rx 792 drivers/spi/spi-sh-msiof.c sh_msiof_spi_set_mode_regs(p, tx, rx, 32, len / 4); rx 802 drivers/spi/spi-sh-msiof.c if (rx) rx 807 drivers/spi/spi-sh-msiof.c ret = sh_msiof_spi_start(p, rx); rx 820 drivers/spi/spi-sh-msiof.c if (rx) { rx 838 drivers/spi/spi-sh-msiof.c ret = sh_msiof_spi_stop(p, rx); rx 844 drivers/spi/spi-sh-msiof.c if (rx) rx 852 drivers/spi/spi-sh-msiof.c sh_msiof_spi_stop(p, rx); rx 857 drivers/spi/spi-sh-msiof.c if (rx) rx 261 drivers/spi/spi-sirf.c void *rx; rx 315 drivers/spi/spi-sirf.c u8 *rx = sspi->rx; rx 319 drivers/spi/spi-sirf.c if (rx) { rx 320 drivers/spi/spi-sirf.c *rx++ = (u8) data; rx 321 drivers/spi/spi-sirf.c sspi->rx = rx; rx 343 drivers/spi/spi-sirf.c u16 *rx = sspi->rx; rx 347 drivers/spi/spi-sirf.c if (rx) { rx 348 drivers/spi/spi-sirf.c *rx++ = (u16) data; rx 349 drivers/spi/spi-sirf.c sspi->rx = rx; rx 372 drivers/spi/spi-sirf.c u32 *rx = sspi->rx; rx 376 drivers/spi/spi-sirf.c if (rx) { rx 377 drivers/spi/spi-sirf.c *rx++ = (u32) data; rx 378 drivers/spi/spi-sirf.c sspi->rx = rx; rx 547 drivers/spi/spi-sirf.c sspi->dst_start = dma_map_single(&spi->dev, sspi->rx, t->len, rx 710 drivers/spi/spi-sirf.c sspi->rx = t->rx_buf; rx 210 drivers/spi/spi-stm32.c const struct stm32_spi_reg rx; rx 330 drivers/spi/spi-stm32.c .rx = { STM32F4_SPI_DR }, rx 349 drivers/spi/spi-stm32.c .rx = { STM32H7_SPI_RXDR }, rx 1134 drivers/spi/spi-stm32.c dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg; rx 78 drivers/spi/spi-test.h void *tx, void *rx); rx 80 drivers/spi/spi-test.h void *tx, void *rx); rx 109 drivers/spi/spi-test.h void *tx, void *rx); rx 114 drivers/spi/spi-test.h void *tx, void *rx); rx 587 drivers/staging/fwserial/fwserial.c port->icount.rx += len; rx 1389 drivers/staging/fwserial/fwserial.c icount->rx = port->icount.rx; rx 1408 drivers/staging/fwserial/fwserial.c port->icount.tx + stats.xchars, port->icount.rx); rx 111 drivers/staging/gdm724x/gdm_mux.c static struct mux_rx *get_rx_struct(struct rx_cxt *rx) rx 116 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->free_list_lock, flags); rx 118 drivers/staging/gdm724x/gdm_mux.c if (list_empty(&rx->rx_free_list)) { rx 119 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->free_list_lock, flags); rx 123 drivers/staging/gdm724x/gdm_mux.c r = list_entry(rx->rx_free_list.prev, struct mux_rx, free_list); rx 126 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->free_list_lock, flags); rx 131 drivers/staging/gdm724x/gdm_mux.c static void put_rx_struct(struct rx_cxt *rx, struct mux_rx *r) rx 135 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->free_list_lock, flags); rx 136 drivers/staging/gdm724x/gdm_mux.c list_add_tail(&r->free_list, &rx->rx_free_list); rx 137 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->free_list_lock, flags); rx 210 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx = &mux_dev->rx; rx 215 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 216 drivers/staging/gdm724x/gdm_mux.c if (list_empty(&rx->to_host_list)) { rx 217 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 220 drivers/staging/gdm724x/gdm_mux.c r = list_entry(rx->to_host_list.next, struct mux_rx, rx 223 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 229 drivers/staging/gdm724x/gdm_mux.c put_rx_struct(rx, r); rx 233 drivers/staging/gdm724x/gdm_mux.c static void remove_rx_submit_list(struct mux_rx *r, struct rx_cxt *rx) rx 238 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 239 drivers/staging/gdm724x/gdm_mux.c list_for_each_entry_safe(r_remove, r_remove_next, &rx->rx_submit_list, rx 244 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 251 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx = &mux_dev->rx; rx 254 drivers/staging/gdm724x/gdm_mux.c remove_rx_submit_list(r, rx); rx 260 drivers/staging/gdm724x/gdm_mux.c put_rx_struct(rx, r); rx 263 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 264 drivers/staging/gdm724x/gdm_mux.c list_add_tail(&r->to_host_list, &rx->to_host_list); rx 266 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 277 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx = &mux_dev->rx; rx 286 drivers/staging/gdm724x/gdm_mux.c r = get_rx_struct(rx); rx 305 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 306 drivers/staging/gdm724x/gdm_mux.c list_add_tail(&r->rx_submit_list, &rx->rx_submit_list); rx 307 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 312 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 314 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 316 drivers/staging/gdm724x/gdm_mux.c put_rx_struct(rx, r); rx 431 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx = &mux_dev->rx; rx 437 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 438 drivers/staging/gdm724x/gdm_mux.c list_for_each_entry_safe(r, r_next, &rx->rx_submit_list, rx 440 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 442 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 444 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 446 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->free_list_lock, flags); rx 447 drivers/staging/gdm724x/gdm_mux.c list_for_each_entry_safe(r, r_next, &rx->rx_free_list, free_list) { rx 451 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->free_list_lock, flags); rx 453 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 454 drivers/staging/gdm724x/gdm_mux.c list_for_each_entry_safe(r, r_next, &rx->to_host_list, to_host_list) { rx 460 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 466 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx = &mux_dev->rx; rx 471 drivers/staging/gdm724x/gdm_mux.c INIT_LIST_HEAD(&rx->to_host_list); rx 472 drivers/staging/gdm724x/gdm_mux.c INIT_LIST_HEAD(&rx->rx_submit_list); rx 473 drivers/staging/gdm724x/gdm_mux.c INIT_LIST_HEAD(&rx->rx_free_list); rx 474 drivers/staging/gdm724x/gdm_mux.c spin_lock_init(&rx->to_host_lock); rx 475 drivers/staging/gdm724x/gdm_mux.c spin_lock_init(&rx->submit_list_lock); rx 476 drivers/staging/gdm724x/gdm_mux.c spin_lock_init(&rx->free_list_lock); rx 485 drivers/staging/gdm724x/gdm_mux.c list_add(&r->free_list, &rx->rx_free_list); rx 585 drivers/staging/gdm724x/gdm_mux.c struct rx_cxt *rx; rx 591 drivers/staging/gdm724x/gdm_mux.c rx = &mux_dev->rx; rx 602 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 603 drivers/staging/gdm724x/gdm_mux.c list_for_each_entry_safe(r, r_next, &rx->rx_submit_list, rx 605 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 607 drivers/staging/gdm724x/gdm_mux.c spin_lock_irqsave(&rx->submit_list_lock, flags); rx 609 drivers/staging/gdm724x/gdm_mux.c spin_unlock_irqrestore(&rx->submit_list_lock, flags); rx 72 drivers/staging/gdm724x/gdm_mux.h struct rx_cxt rx; rx 208 drivers/staging/gdm724x/gdm_usb.c static struct usb_rx *get_rx_struct(struct rx_cxt *rx, int *no_spc) rx 213 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->rx_lock, flags); rx 215 drivers/staging/gdm724x/gdm_usb.c if (list_empty(&rx->free_list)) { rx 216 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->rx_lock, flags); rx 220 drivers/staging/gdm724x/gdm_usb.c r = list_entry(rx->free_list.next, struct usb_rx, free_list); rx 223 drivers/staging/gdm724x/gdm_usb.c rx->avail_count--; rx 225 drivers/staging/gdm724x/gdm_usb.c *no_spc = list_empty(&rx->free_list) ? 1 : 0; rx 227 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->rx_lock, flags); rx 232 drivers/staging/gdm724x/gdm_usb.c static void put_rx_struct(struct rx_cxt *rx, struct usb_rx *r) rx 236 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->rx_lock, flags); rx 238 drivers/staging/gdm724x/gdm_usb.c list_add_tail(&r->free_list, &rx->free_list); rx 239 drivers/staging/gdm724x/gdm_usb.c rx->avail_count++; rx 241 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->rx_lock, flags); rx 246 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx = &udev->rx; rx 270 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 271 drivers/staging/gdm724x/gdm_usb.c list_for_each_entry_safe(r, r_next, &rx->rx_submit_list, rx 273 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 275 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 277 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 279 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->rx_lock, flags); rx 280 drivers/staging/gdm724x/gdm_usb.c list_for_each_entry_safe(r, r_next, &rx->free_list, free_list) { rx 284 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->rx_lock, flags); rx 286 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 287 drivers/staging/gdm724x/gdm_usb.c list_for_each_entry_safe(r, r_next, &rx->to_host_list, to_host_list) { rx 293 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 301 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx = &udev->rx; rx 313 drivers/staging/gdm724x/gdm_usb.c INIT_LIST_HEAD(&rx->rx_submit_list); rx 314 drivers/staging/gdm724x/gdm_usb.c INIT_LIST_HEAD(&rx->free_list); rx 315 drivers/staging/gdm724x/gdm_usb.c INIT_LIST_HEAD(&rx->to_host_list); rx 317 drivers/staging/gdm724x/gdm_usb.c spin_lock_init(&rx->rx_lock); rx 318 drivers/staging/gdm724x/gdm_usb.c spin_lock_init(&rx->submit_lock); rx 319 drivers/staging/gdm724x/gdm_usb.c spin_lock_init(&rx->to_host_lock); rx 322 drivers/staging/gdm724x/gdm_usb.c rx->avail_count = 0; rx 344 drivers/staging/gdm724x/gdm_usb.c list_add(&r->free_list, &rx->free_list); rx 345 drivers/staging/gdm724x/gdm_usb.c rx->avail_count++; rx 381 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx = &udev->rx; rx 390 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 391 drivers/staging/gdm724x/gdm_usb.c if (list_empty(&rx->to_host_list)) { rx 392 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 395 drivers/staging/gdm724x/gdm_usb.c r = list_entry(rx->to_host_list.next, rx 398 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 428 drivers/staging/gdm724x/gdm_usb.c put_rx_struct(rx, r); rx 437 drivers/staging/gdm724x/gdm_usb.c static void remove_rx_submit_list(struct usb_rx *r, struct rx_cxt *rx) rx 442 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 444 drivers/staging/gdm724x/gdm_usb.c &rx->rx_submit_list, rx_submit_list) { rx 450 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 456 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx = r->rx; rx 458 drivers/staging/gdm724x/gdm_usb.c struct lte_udev *udev = container_of(r->rx, struct lte_udev, rx); rx 461 drivers/staging/gdm724x/gdm_usb.c remove_rx_submit_list(r, rx); rx 464 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->to_host_lock, flags); rx 465 drivers/staging/gdm724x/gdm_usb.c list_add_tail(&r->to_host_list, &rx->to_host_list); rx 467 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->to_host_lock, flags); rx 473 drivers/staging/gdm724x/gdm_usb.c put_rx_struct(rx, r); rx 487 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx = &udev->rx; rx 498 drivers/staging/gdm724x/gdm_usb.c r = get_rx_struct(rx, &no_spc); rx 508 drivers/staging/gdm724x/gdm_usb.c r->rx = rx; rx 518 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 519 drivers/staging/gdm724x/gdm_usb.c list_add_tail(&r->rx_submit_list, &rx->rx_submit_list); rx 520 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 528 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 530 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 533 drivers/staging/gdm724x/gdm_usb.c put_rx_struct(rx, r); rx 905 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx; rx 912 drivers/staging/gdm724x/gdm_usb.c rx = &udev->rx; rx 920 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 921 drivers/staging/gdm724x/gdm_usb.c list_for_each_entry_safe(r, r_next, &rx->rx_submit_list, rx 923 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 925 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->submit_lock, flags); rx 927 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->submit_lock, flags); rx 940 drivers/staging/gdm724x/gdm_usb.c struct rx_cxt *rx; rx 947 drivers/staging/gdm724x/gdm_usb.c rx = &udev->rx; rx 955 drivers/staging/gdm724x/gdm_usb.c spin_lock_irqsave(&rx->rx_lock, flags); rx 956 drivers/staging/gdm724x/gdm_usb.c issue_count = rx->avail_count - MAX_RX_SUBMIT_COUNT; rx 957 drivers/staging/gdm724x/gdm_usb.c spin_unlock_irqrestore(&rx->rx_lock, flags); rx 58 drivers/staging/gdm724x/gdm_usb.h struct rx_cxt *rx; rx 87 drivers/staging/gdm724x/gdm_usb.h struct rx_cxt rx; rx 84 drivers/staging/iio/meter/ade7854-i2c.c ret = i2c_master_recv(st->i2c, st->rx, bits); rx 90 drivers/staging/iio/meter/ade7854-i2c.c *val = st->rx[0]; rx 93 drivers/staging/iio/meter/ade7854-i2c.c *val = (st->rx[0] << 8) | st->rx[1]; rx 96 drivers/staging/iio/meter/ade7854-i2c.c *val = (st->rx[0] << 16) | (st->rx[1] << 8) | st->rx[2]; rx 99 drivers/staging/iio/meter/ade7854-i2c.c *val = (st->rx[0] << 24) | (st->rx[1] << 16) | rx 100 drivers/staging/iio/meter/ade7854-i2c.c (st->rx[2] << 8) | st->rx[3]; rx 83 drivers/staging/iio/meter/ade7854-spi.c .rx_buf = st->rx, rx 104 drivers/staging/iio/meter/ade7854-spi.c *val = st->rx[0]; rx 107 drivers/staging/iio/meter/ade7854-spi.c *val = be16_to_cpup((const __be16 *)st->rx); rx 110 drivers/staging/iio/meter/ade7854-spi.c *val = (st->rx[0] << 16) | (st->rx[1] << 8) | st->rx[2]; rx 113 drivers/staging/iio/meter/ade7854-spi.c *val = be32_to_cpup((const __be32 *)st->rx); rx 166 drivers/staging/iio/meter/ade7854.h u8 rx[ADE7854_MAX_RX]; rx 97 drivers/staging/iio/resolver/ad2s1210.c u8 rx[2] ____cacheline_aligned; rx 136 drivers/staging/iio/resolver/ad2s1210.c .rx_buf = &st->rx[0], rx 141 drivers/staging/iio/resolver/ad2s1210.c .rx_buf = &st->rx[1], rx 154 drivers/staging/iio/resolver/ad2s1210.c return st->rx[1]; rx 489 drivers/staging/iio/resolver/ad2s1210.c ret = spi_read(st->sdev, st->rx, 2); rx 495 drivers/staging/iio/resolver/ad2s1210.c pos = be16_to_cpup((__be16 *)st->rx); rx 502 drivers/staging/iio/resolver/ad2s1210.c negative = st->rx[0] & 0x80; rx 503 drivers/staging/iio/resolver/ad2s1210.c vel = be16_to_cpup((__be16 *)st->rx); rx 137 drivers/staging/kpc2000/kpc2000_spi.c unsigned int rx : 1; /* Rx Status */ rx 214 drivers/staging/kpc2000/kpc2000_spi.c u8 *rx = transfer->rx_buf; rx 231 drivers/staging/kpc2000/kpc2000_spi.c else if (rx) { rx 242 drivers/staging/kpc2000/kpc2000_spi.c *rx++ = test; rx 44 drivers/staging/most/i2c/i2c.c } rx; rx 90 drivers/staging/most/i2c/i2c.c dev->rx.int_disabled = false; rx 100 drivers/staging/most/i2c/i2c.c dev->rx.delay = delay ? delay : 1; rx 101 drivers/staging/most/i2c/i2c.c pr = MSEC_PER_SEC / jiffies_to_msecs(dev->rx.delay); rx 132 drivers/staging/most/i2c/i2c.c cancel_delayed_work_sync(&dev->rx.dwork); rx 133 drivers/staging/most/i2c/i2c.c list_add_tail(&mbo->list, &dev->rx.list); rx 134 drivers/staging/most/i2c/i2c.c if (dev->rx.int_disabled || polling_rate) rx 135 drivers/staging/most/i2c/i2c.c pending_rx_work(&dev->rx.dwork.work); rx 176 drivers/staging/most/i2c/i2c.c cancel_delayed_work_sync(&dev->rx.dwork); rx 178 drivers/staging/most/i2c/i2c.c while (!list_empty(&dev->rx.list)) { rx 179 drivers/staging/most/i2c/i2c.c mbo = list_first_mbo(&dev->rx.list); rx 218 drivers/staging/most/i2c/i2c.c mbo = list_first_mbo(&dev->rx.list); rx 235 drivers/staging/most/i2c/i2c.c struct hdm_i2c *dev = container_of(work, struct hdm_i2c, rx.dwork.work); rx 237 drivers/staging/most/i2c/i2c.c if (list_empty(&dev->rx.list)) rx 243 drivers/staging/most/i2c/i2c.c schedule_delayed_work(&dev->rx.dwork, dev->rx.delay); rx 245 drivers/staging/most/i2c/i2c.c dev->rx.int_disabled = false; rx 273 drivers/staging/most/i2c/i2c.c dev->rx.int_disabled = true; rx 274 drivers/staging/most/i2c/i2c.c schedule_delayed_work(&dev->rx.dwork, 0); rx 319 drivers/staging/most/i2c/i2c.c INIT_LIST_HEAD(&dev->rx.list); rx 321 drivers/staging/most/i2c/i2c.c INIT_DELAYED_WORK(&dev->rx.dwork, pending_rx_work); rx 65 drivers/staging/most/net/net.c struct net_dev_channel rx; rx 193 drivers/staging/most/net/net.c if (most_start_channel(nd->iface, nd->rx.ch_id, &comp)) { rx 201 drivers/staging/most/net/net.c most_stop_channel(nd->iface, nd->rx.ch_id, &comp); rx 227 drivers/staging/most/net/net.c most_stop_channel(nd->iface, nd->rx.ch_id, &comp); rx 297 drivers/staging/most/net/net.c if (nd && nd->rx.linked && nd->tx.linked) rx 339 drivers/staging/most/net/net.c ch = ccfg->direction == MOST_CH_TX ? &nd->tx : &nd->rx; rx 341 drivers/staging/most/net/net.c ch = ccfg->direction == MOST_CH_TX ? &nd->tx : &nd->rx; rx 377 drivers/staging/most/net/net.c if (nd->rx.linked && channel_idx == nd->rx.ch_id) { rx 378 drivers/staging/most/net/net.c ch = &nd->rx; rx 386 drivers/staging/most/net/net.c if (nd->rx.linked && nd->tx.linked) { rx 443 drivers/staging/most/net/net.c if (nd->rx.ch_id != mbo->hdm_channel_id) { rx 490 drivers/staging/nvec/nvec.c if (nvec->rx->pos != nvec_msg_size(nvec->rx)) { rx 492 drivers/staging/nvec/nvec.c (uint)nvec_msg_size(nvec->rx), rx 493 drivers/staging/nvec/nvec.c (uint)nvec->rx->pos); rx 495 drivers/staging/nvec/nvec.c nvec_msg_free(nvec, nvec->rx); rx 499 drivers/staging/nvec/nvec.c if (nvec->rx->data[0] == NVEC_BAT) rx 511 drivers/staging/nvec/nvec.c list_add_tail(&nvec->rx->node, &nvec->rx_data); rx 517 drivers/staging/nvec/nvec.c if (!nvec_msg_is_event(nvec->rx)) rx 616 drivers/staging/nvec/nvec.c nvec->rx = nvec_msg_alloc(nvec, NVEC_MSG_RX); rx 618 drivers/staging/nvec/nvec.c if (unlikely(!nvec->rx)) { rx 622 drivers/staging/nvec/nvec.c nvec->rx->data[0] = received; rx 623 drivers/staging/nvec/nvec.c nvec->rx->pos = 1; rx 630 drivers/staging/nvec/nvec.c if (nvec->rx->data[0] != 0x01) { rx 636 drivers/staging/nvec/nvec.c nvec_msg_free(nvec, nvec->rx); rx 642 drivers/staging/nvec/nvec.c nvec->rx->data[1] = received; rx 643 drivers/staging/nvec/nvec.c nvec->rx->pos = 2; rx 670 drivers/staging/nvec/nvec.c else if (nvec->rx && nvec->rx->pos < NVEC_MSG_SIZE) rx 671 drivers/staging/nvec/nvec.c nvec->rx->data[nvec->rx->pos++] = received; rx 675 drivers/staging/nvec/nvec.c nvec->rx, nvec->rx ? nvec->rx->pos : 0, rx 814 drivers/staging/nvec/nvec.c nvec->rx = &nvec->msg_pool[0]; rx 147 drivers/staging/nvec/nvec.h struct nvec_msg *rx; rx 337 drivers/staging/rtl8723bs/include/drv_types.h u32 rx; rx 175 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) rx 179 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 184 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 195 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 205 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 210 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 1577 drivers/staging/speakup/main.c speakup_console[vc_num]->ht.rx[bi] = vc->vc_x; rx 1676 drivers/staging/speakup/main.c spk_x = spk_cx = speakup_console[vc_num]->ht.rx[hc]; rx 63 drivers/staging/speakup/spk_types.h u_long rpos[8], rx[8], ry[8]; rx 36 drivers/staging/wilc1000/wilc_wfi_cfgoperations.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 41 drivers/staging/wilc1000/wilc_wfi_cfgoperations.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 51 drivers/staging/wilc1000/wilc_wfi_cfgoperations.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 338 drivers/staging/wlan-ng/p80211conv.c wlandev->rx.decrypt_err++; rx 349 drivers/staging/wlan-ng/p80211conv.c wlandev->rx.decrypt++; rx 571 drivers/staging/wlan-ng/p80211conv.c rxmeta = frmmeta->rx; rx 631 drivers/staging/wlan-ng/p80211conv.c frmmeta->rx = rxmeta; rx 658 drivers/staging/wlan-ng/p80211conv.c if (meta && meta->rx) rx 84 drivers/staging/wlan-ng/p80211conv.h struct p80211_rxmeta *rx; rx 102 drivers/staging/wlan-ng/p80211conv.h return frmmeta ? frmmeta->rx : NULL; rx 938 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.mgmt++; rx 942 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.assocreq++; rx 946 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.assocresp++; rx 950 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.reassocreq++; rx 954 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.reassocresp++; rx 958 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.probereq++; rx 962 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.proberesp++; rx 966 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.beacon++; rx 970 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.atim++; rx 974 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.disassoc++; rx 978 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.authen++; rx 982 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.deauthen++; rx 986 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.mgmt_unknown++; rx 1000 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.ctl++; rx 1004 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.pspoll++; rx 1008 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.rts++; rx 1012 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cts++; rx 1016 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.ack++; rx 1020 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cfend++; rx 1024 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cfendcfack++; rx 1028 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.ctl_unknown++; rx 1036 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.data++; rx 1039 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.dataonly++; rx 1042 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.data_cfack++; rx 1045 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.data_cfpoll++; rx 1048 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.data__cfack_cfpoll++; rx 1052 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.null++; rx 1056 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cfack++; rx 1060 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cfpoll++; rx 1064 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.cfack_cfpoll++; rx 1068 drivers/staging/wlan-ng/p80211netdev.c wlandev->rx.data_unknown++; rx 215 drivers/staging/wlan-ng/p80211netdev.h struct p80211_frmrx rx; rx 86 drivers/target/iscsi/cxgbit/cxgbit.h struct cxgbit_skb_rx_cb rx; rx 102 drivers/target/iscsi/cxgbit/cxgbit.h #define cxgbit_skcb_rx_opcode(skb) (CXGBIT_SKB_CB(skb)->rx.opcode) rx 103 drivers/target/iscsi/cxgbit/cxgbit.h #define cxgbit_skcb_rx_backlog_fn(skb) (CXGBIT_SKB_CB(skb)->rx.backlog_fn) rx 104 drivers/target/iscsi/cxgbit/cxgbit.h #define cxgbit_rx_pdu_cb(skb) (CXGBIT_SKB_CB(skb)->rx.pdu_cb) rx 49 drivers/thermal/tegra/tegra-bpmp-thermal.c msg.rx.data = &reply; rx 50 drivers/thermal/tegra/tegra-bpmp-thermal.c msg.rx.size = sizeof(reply); rx 137 drivers/thermal/tegra/tegra-bpmp-thermal.c msg.rx.data = &reply; rx 138 drivers/thermal/tegra/tegra-bpmp-thermal.c msg.rx.size = sizeof(reply); rx 28 drivers/thunderbolt/ctl.c struct tb_ring *rx; rx 381 drivers/thunderbolt/ctl.c tb_ring_rx(pkg->ctl->rx, &pkg->frame); /* rx 423 drivers/thunderbolt/ctl.c goto rx; rx 439 drivers/thunderbolt/ctl.c goto rx; rx 444 drivers/thunderbolt/ctl.c goto rx; rx 454 drivers/thunderbolt/ctl.c goto rx; rx 459 drivers/thunderbolt/ctl.c goto rx; rx 479 drivers/thunderbolt/ctl.c rx: rx 623 drivers/thunderbolt/ctl.c ctl->rx = tb_ring_alloc_rx(nhi, 0, 10, RING_FLAG_NO_SUSPEND, 0xffff, rx 625 drivers/thunderbolt/ctl.c if (!ctl->rx) rx 656 drivers/thunderbolt/ctl.c if (ctl->rx) rx 657 drivers/thunderbolt/ctl.c tb_ring_free(ctl->rx); rx 678 drivers/thunderbolt/ctl.c tb_ring_start(ctl->rx); rx 699 drivers/thunderbolt/ctl.c tb_ring_stop(ctl->rx); rx 272 drivers/tty/amiserial.c icount->rx++; rx 1194 drivers/tty/amiserial.c icount->rx = cnow.rx; rx 1516 drivers/tty/amiserial.c seq_printf(m, " tx:%d rx:%d", state->icount.tx, state->icount.rx); rx 1649 drivers/tty/amiserial.c state->icount.rx = state->icount.tx = 0; rx 473 drivers/tty/cyclades.c info->icount.rx++; rx 482 drivers/tty/cyclades.c info->icount.rx++; rx 495 drivers/tty/cyclades.c info->icount.rx++; rx 502 drivers/tty/cyclades.c info->icount.rx++; rx 507 drivers/tty/cyclades.c info->icount.rx++; rx 515 drivers/tty/cyclades.c info->icount.rx++; rx 524 drivers/tty/cyclades.c info->icount.rx++; rx 528 drivers/tty/cyclades.c info->icount.rx++; rx 552 drivers/tty/cyclades.c info->icount.rx++; rx 971 drivers/tty/cyclades.c info->icount.rx += len; rx 983 drivers/tty/cyclades.c info->icount.rx++; rx 1102 drivers/tty/cyclades.c info->icount.rx++; rx 1107 drivers/tty/cyclades.c info->icount.rx++; rx 1112 drivers/tty/cyclades.c info->icount.rx++; rx 2751 drivers/tty/cyclades.c sic->rx = cnow.rx; rx 1106 drivers/tty/ipwireless/hardware.c int rx = 0; rx 1157 drivers/tty/ipwireless/hardware.c rx = 1; rx 1175 drivers/tty/ipwireless/hardware.c rx = 1; rx 1191 drivers/tty/ipwireless/hardware.c if (tx || rx) rx 1839 drivers/tty/mxser.c icount->rx = cnow.rx; rx 1737 drivers/tty/nozomi.c icount->rx = cnow.rx; rx 91 drivers/tty/serial/21285.c port->icount.rx++; rx 57 drivers/tty/serial/8250/8250_dma.c p->port.icount.rx += count; rx 117 drivers/tty/serial/8250/8250_mtk.c up->port.icount.rx += copied; rx 796 drivers/tty/serial/8250/8250_omap.c p->port.icount.rx += ret; rx 1657 drivers/tty/serial/8250/8250_port.c port->icount.rx++; rx 127 drivers/tty/serial/altera_jtaguart.c port->icount.rx++; rx 212 drivers/tty/serial/altera_uart.c port->icount.rx++; rx 126 drivers/tty/serial/amba-pl010.c uap->port.icount.rx++; rx 327 drivers/tty/serial/amba-pl011.c uap->port.icount.rx++; rx 909 drivers/tty/serial/amba-pl011.c uap->port.icount.rx += dma_count; rx 87 drivers/tty/serial/apbuart.c port->icount.rx++; rx 321 drivers/tty/serial/ar933x_uart.c up->port.icount.rx++; rx 238 drivers/tty/serial/arc_uart.c port->icount.rx++; rx 1158 drivers/tty/serial/atmel_serial.c port->icount.rx += count; rx 1170 drivers/tty/serial/atmel_serial.c port->icount.rx += count; rx 1535 drivers/tty/serial/atmel_serial.c port->icount.rx++; rx 1650 drivers/tty/serial/atmel_serial.c port->icount.rx += count; rx 265 drivers/tty/serial/bcm63xx_uart.c port->icount.rx++; rx 113 drivers/tty/serial/clps711x.c port->icount.rx++; rx 278 drivers/tty/serial/cpm_uart/cpm_uart_core.c port->icount.rx++; rx 148 drivers/tty/serial/digicolor-usart.c port->icount.rx++; rx 200 drivers/tty/serial/dz.c icount->rx++; rx 212 drivers/tty/serial/efm32-uart.c port->icount.rx++; rx 248 drivers/tty/serial/fsl_linflexuart.c unsigned char rx; rx 255 drivers/tty/serial/fsl_linflexuart.c rx = readb(sport->membase + BDRM); rx 258 drivers/tty/serial/fsl_linflexuart.c sport->icount.rx++; rx 267 drivers/tty/serial/fsl_linflexuart.c if (!rx) rx 283 drivers/tty/serial/fsl_linflexuart.c if (uart_handle_sysrq_char(sport, (unsigned char)rx)) rx 286 drivers/tty/serial/fsl_linflexuart.c tty_insert_flip_char(port, rx, flg); rx 826 drivers/tty/serial/fsl_lpuart.c unsigned char rx, sr; rx 832 drivers/tty/serial/fsl_lpuart.c sport->port.icount.rx++; rx 838 drivers/tty/serial/fsl_lpuart.c rx = readb(sport->port.membase + UARTDR); rx 840 drivers/tty/serial/fsl_lpuart.c if (uart_handle_sysrq_char(&sport->port, (unsigned char)rx)) rx 873 drivers/tty/serial/fsl_lpuart.c tty_insert_flip_char(port, rx, flg); rx 907 drivers/tty/serial/fsl_lpuart.c unsigned long rx, sr; rx 913 drivers/tty/serial/fsl_lpuart.c sport->port.icount.rx++; rx 919 drivers/tty/serial/fsl_lpuart.c rx = lpuart32_read(&sport->port, UARTDATA); rx 920 drivers/tty/serial/fsl_lpuart.c rx &= 0x3ff; rx 922 drivers/tty/serial/fsl_lpuart.c if (uart_handle_sysrq_char(&sport->port, (unsigned char)rx)) rx 955 drivers/tty/serial/fsl_lpuart.c tty_insert_flip_char(port, rx, flg); rx 1103 drivers/tty/serial/fsl_lpuart.c sport->port.icount.rx += count; rx 1114 drivers/tty/serial/fsl_lpuart.c sport->port.icount.rx += count; rx 758 drivers/tty/serial/icom.c icount->rx += count; rx 743 drivers/tty/serial/imx.c unsigned int rx, flg, ignored = 0; rx 750 drivers/tty/serial/imx.c sport->port.icount.rx++; rx 752 drivers/tty/serial/imx.c rx = imx_uart_readl(sport, URXD0); rx 761 drivers/tty/serial/imx.c if (uart_handle_sysrq_char(&sport->port, (unsigned char)rx)) rx 764 drivers/tty/serial/imx.c if (unlikely(rx & URXD_ERR)) { rx 765 drivers/tty/serial/imx.c if (rx & URXD_BRK) rx 767 drivers/tty/serial/imx.c else if (rx & URXD_PRERR) rx 769 drivers/tty/serial/imx.c else if (rx & URXD_FRMERR) rx 771 drivers/tty/serial/imx.c if (rx & URXD_OVRRUN) rx 774 drivers/tty/serial/imx.c if (rx & sport->port.ignore_status_mask) { rx 780 drivers/tty/serial/imx.c rx &= (sport->port.read_status_mask | 0xFF); rx 782 drivers/tty/serial/imx.c if (rx & URXD_BRK) rx 784 drivers/tty/serial/imx.c else if (rx & URXD_PRERR) rx 786 drivers/tty/serial/imx.c else if (rx & URXD_FRMERR) rx 788 drivers/tty/serial/imx.c if (rx & URXD_OVRRUN) rx 799 drivers/tty/serial/imx.c if (tty_insert_flip_char(port, rx, flg) == 0) rx 1132 drivers/tty/serial/imx.c sport->port.icount.rx += w_bytes; rx 283 drivers/tty/serial/ip22zilog.c up->port.icount.rx++; rx 176 drivers/tty/serial/lantiq.c port->icount.rx++; rx 262 drivers/tty/serial/lpc32xx_hs.c port->icount.rx++; rx 191 drivers/tty/serial/max3100.c static int max3100_sr(struct max3100_port *s, u16 tx, u16 *rx) rx 210 drivers/tty/serial/max3100.c *rx = be16_to_cpu(erx); rx 211 drivers/tty/serial/max3100.c s->tx_empty = (*rx & MAX3100_T) > 0; rx 212 drivers/tty/serial/max3100.c dev_dbg(&s->spi->dev, "%04x - %04x\n", tx, *rx); rx 216 drivers/tty/serial/max3100.c static int max3100_handlerx(struct max3100_port *s, u16 rx) rx 221 drivers/tty/serial/max3100.c if (rx & MAX3100_R && s->rx_enabled) { rx 223 drivers/tty/serial/max3100.c ch = rx & (s->parity & MAX3100_7BIT ? 0x7f : 0xff); rx 224 drivers/tty/serial/max3100.c if (rx & MAX3100_RAFE) { rx 230 drivers/tty/serial/max3100.c if (max3100_check_parity(s, rx)) { rx 231 drivers/tty/serial/max3100.c s->port.icount.rx++; rx 239 drivers/tty/serial/max3100.c s->port.icount.rx++; rx 247 drivers/tty/serial/max3100.c cts = (rx & MAX3100_CTS) > 0; rx 260 drivers/tty/serial/max3100.c u16 tx, rx; rx 276 drivers/tty/serial/max3100.c max3100_sr(s, MAX3100_WC | conf, &rx); rx 279 drivers/tty/serial/max3100.c (s->rts ? MAX3100_RTS : 0), &rx); rx 280 drivers/tty/serial/max3100.c rxchars += max3100_handlerx(s, rx); rx 283 drivers/tty/serial/max3100.c max3100_sr(s, MAX3100_RD, &rx); rx 284 drivers/tty/serial/max3100.c rxchars += max3100_handlerx(s, rx); rx 286 drivers/tty/serial/max3100.c if (rx & MAX3100_T) { rx 302 drivers/tty/serial/max3100.c max3100_sr(s, tx, &rx); rx 303 drivers/tty/serial/max3100.c rxchars += max3100_handlerx(s, rx); rx 316 drivers/tty/serial/max3100.c ((rx & MAX3100_R) || rx 571 drivers/tty/serial/max3100.c u16 tx, rx; rx 574 drivers/tty/serial/max3100.c max3100_sr(s, tx, &rx); rx 616 drivers/tty/serial/max3100.c u16 tx, rx; rx 618 drivers/tty/serial/max3100.c max3100_sr(s, tx, &rx); rx 741 drivers/tty/serial/max3100.c u16 tx, rx; rx 805 drivers/tty/serial/max3100.c max3100_sr(max3100s[i], tx, &rx); rx 860 drivers/tty/serial/max3100.c u16 tx, rx; rx 863 drivers/tty/serial/max3100.c max3100_sr(s, tx, &rx); rx 674 drivers/tty/serial/max310x.c port->icount.rx += rxlen; rx 709 drivers/tty/serial/max310x.c port->icount.rx++; rx 289 drivers/tty/serial/mcf.c port->icount.rx++; rx 282 drivers/tty/serial/men_z135_uart.c port->icount.rx += copied; rx 183 drivers/tty/serial/meson_uart.c port->icount.rx++; rx 171 drivers/tty/serial/milbeaut_usio.c port->icount.rx++; rx 1395 drivers/tty/serial/mpc52xx_uart.c port->icount.rx++; rx 195 drivers/tty/serial/mps2-uart.c port->icount.rx++; rx 313 drivers/tty/serial/mps2-uart.c MAKE_NAME(-rx), mps_port); rx 564 drivers/tty/serial/msm_serial.c port->icount.rx += count; rx 710 drivers/tty/serial/msm_serial.c port->icount.rx += count; rx 791 drivers/tty/serial/msm_serial.c port->icount.rx++; rx 233 drivers/tty/serial/mux.c __u32 start_count = port->icount.rx; rx 244 drivers/tty/serial/mux.c port->icount.rx++; rx 258 drivers/tty/serial/mux.c if (start_count != port->icount.rx) rx 247 drivers/tty/serial/mvebu-uart.c port->icount.rx++; rx 657 drivers/tty/serial/mxs-auart.c s->port.icount.rx++; rx 501 drivers/tty/serial/omap-serial.c up->port.icount.rx++; rx 551 drivers/tty/serial/omap-serial.c up->port.icount.rx++; rx 235 drivers/tty/serial/owl-uart.c port->icount.rx++; rx 670 drivers/tty/serial/pch_uart.c port->icount.rx += room; rx 239 drivers/tty/serial/pic32_uart.c port->icount.rx++; rx 272 drivers/tty/serial/pmac_zilog.c uap->port.icount.rx++; rx 191 drivers/tty/serial/pnx8xxx_uart.c sport->port.icount.rx++; rx 112 drivers/tty/serial/pxa.c up->port.icount.rx++; rx 497 drivers/tty/serial/qcom_geni_serial.c uport->icount.rx++; rx 543 drivers/tty/serial/qcom_geni_serial.c uport->icount.rx += ret; rx 395 drivers/tty/serial/rda-uart.c port->icount.rx++; rx 424 drivers/tty/serial/rp2.c up->port.icount.rx++; rx 194 drivers/tty/serial/sa1100.c sport->port.icount.rx++; rx 370 drivers/tty/serial/samsung.c ourport->port.icount.rx += count; rx 641 drivers/tty/serial/samsung.c port->icount.rx++; rx 352 drivers/tty/serial/sb1250-duart.c icount->rx++; rx 590 drivers/tty/serial/sc16is7xx.c port->icount.rx++; rx 401 drivers/tty/serial/sccnxp.c port->icount.rx++; rx 652 drivers/tty/serial/serial-tegra.c tup->uport.icount.rx++; rx 672 drivers/tty/serial/serial-tegra.c tup->uport.icount.rx += count; rx 1272 drivers/tty/serial/serial_core.c icount->rx = cnow.rx; rx 1857 drivers/tty/serial/serial_core.c uport->icount.tx, uport->icount.rx); rx 279 drivers/tty/serial/serial_txx9.c up->port.icount.rx++; rx 911 drivers/tty/serial/sh-sci.c port->icount.rx += count; rx 1225 drivers/tty/serial/sh-sci.c port->icount.rx += copied; rx 446 drivers/tty/serial/sifive.c ssp->port.icount.rx++; rx 415 drivers/tty/serial/sirfsoc_uart.c port->icount.rx += rx_count; rx 1203 drivers/tty/serial/sirfsoc_uart.c port->icount.rx += inserted; rx 397 drivers/tty/serial/sprd_serial.c port->icount.rx += sp->rx_dma.trans_len; rx 616 drivers/tty/serial/sprd_serial.c port->icount.rx++; rx 314 drivers/tty/serial/st-asc.c port->icount.rx++; rx 247 drivers/tty/serial/stm32-usart.c port->icount.rx++; rx 107 drivers/tty/serial/sunhv.c port->icount.rx++; rx 164 drivers/tty/serial/sunhv.c port->icount.rx += bytes_read; rx 174 drivers/tty/serial/sunsab.c up->port.icount.rx++; rx 334 drivers/tty/serial/sunsu.c up->port.icount.rx++; rx 370 drivers/tty/serial/sunzilog.c up->port.icount.rx++; rx 31 drivers/tty/serial/tegra-tcu.c struct mbox_chan *tx, *rx; rx 198 drivers/tty/serial/tegra-tcu.c tcu->rx = mbox_request_channel_byname(&tcu->rx_client, "rx"); rx 199 drivers/tty/serial/tegra-tcu.c if (IS_ERR(tcu->rx)) { rx 200 drivers/tty/serial/tegra-tcu.c err = PTR_ERR(tcu->rx); rx 259 drivers/tty/serial/tegra-tcu.c mbox_free_channel(tcu->rx); rx 275 drivers/tty/serial/tegra-tcu.c mbox_free_channel(tcu->rx); rx 86 drivers/tty/serial/timbuart.c port->icount.rx++; rx 95 drivers/tty/serial/timbuart.c __func__, port->icount.rx); rx 138 drivers/tty/serial/uartlite.c port->icount.rx++; rx 495 drivers/tty/serial/ucc_uart.c port->icount.rx++; rx 311 drivers/tty/serial/vr41xx_siu.c port->icount.rx++; rx 185 drivers/tty/serial/vt8500_serial.c port->icount.rx++; rx 230 drivers/tty/serial/xilinx_uartps.c port->icount.rx++; rx 564 drivers/tty/serial/zs.c icount->rx++; rx 1468 drivers/tty/synclink.c icount->rx++; rx 1516 drivers/tty/synclink.c __FILE__,__LINE__,icount->rx,icount->brk, rx 2930 drivers/tty/synclink.c icount->rx = cnow.rx; rx 3483 drivers/tty/synclink.c info->icount.tx, info->icount.rx); rx 1112 drivers/tty/synclink_gt.c icount->rx = cnow.rx; rx 1275 drivers/tty/synclink_gt.c info->icount.tx, info->icount.rx); rx 1843 drivers/tty/synclink_gt.c icount->rx++; rx 1321 drivers/tty/synclinkmp.c icount->rx = cnow.rx; rx 1388 drivers/tty/synclinkmp.c info->icount.tx, info->icount.rx); rx 2154 drivers/tty/synclinkmp.c icount->rx++; rx 2195 drivers/tty/synclinkmp.c icount->rx,icount->brk,icount->parity, rx 403 drivers/usb/atm/usbatm.c atomic_inc(&vcc->stats->rx); rx 747 drivers/usb/atm/usbatm.c atomic_read(&atm_dev->stats.aal5.rx), rx 46 drivers/usb/gadget/function/f_phonet.c } rx; rx 322 drivers/usb/gadget/function/f_phonet.c spin_lock_irqsave(&fp->rx.lock, flags); rx 323 drivers/usb/gadget/function/f_phonet.c skb = fp->rx.skb; rx 325 drivers/usb/gadget/function/f_phonet.c skb = fp->rx.skb = netdev_alloc_skb(dev, 12); rx 327 drivers/usb/gadget/function/f_phonet.c fp->rx.skb = NULL; rx 328 drivers/usb/gadget/function/f_phonet.c spin_unlock_irqrestore(&fp->rx.lock, flags); rx 388 drivers/usb/gadget/function/f_phonet.c if (fp->rx.skb) { rx 389 drivers/usb/gadget/function/f_phonet.c dev_kfree_skb_irq(fp->rx.skb); rx 390 drivers/usb/gadget/function/f_phonet.c fp->rx.skb = NULL; rx 685 drivers/usb/gadget/function/f_phonet.c spin_lock_init(&fp->rx.lock); rx 149 drivers/usb/host/max3421-hcd.c struct max3421_dma_buf *rx; rx 369 drivers/usb/host/max3421-hcd.c transfer.rx_buf = max3421_hcd->rx->data; rx 375 drivers/usb/host/max3421-hcd.c return max3421_hcd->rx->data[1]; rx 1907 drivers/usb/host/max3421-hcd.c max3421_hcd->rx = kmalloc(sizeof(*max3421_hcd->rx), GFP_KERNEL); rx 1908 drivers/usb/host/max3421-hcd.c if (!max3421_hcd->rx) rx 1941 drivers/usb/host/max3421-hcd.c kfree(max3421_hcd->rx); rx 86 drivers/usb/musb/cppi_dma.c static void cppi_reset_rx(struct cppi_rx_stateram __iomem *rx) rx 88 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_skipbytes, 0, 0); rx 89 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_head, 0, 0); rx 90 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_sop, 0, 0); rx 91 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_current, 0, 0); rx 92 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_buf_current, 0, 0); rx 93 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_len_len, 0, 0); rx 94 drivers/usb/musb/cppi_dma.c musb_writel(&rx->rx_cnt_cnt, 0, 0); rx 165 drivers/usb/musb/cppi_dma.c for (i = 0; i < ARRAY_SIZE(controller->rx); i++) { rx 166 drivers/usb/musb/cppi_dma.c controller->rx[i].transmit = false; rx 167 drivers/usb/musb/cppi_dma.c controller->rx[i].index = i; rx 173 drivers/usb/musb/cppi_dma.c for (i = 0; i < ARRAY_SIZE(controller->rx); i++) rx 174 drivers/usb/musb/cppi_dma.c cppi_pool_init(controller, controller->rx + i); rx 190 drivers/usb/musb/cppi_dma.c for (i = 0; i < ARRAY_SIZE(controller->rx); i++) { rx 191 drivers/usb/musb/cppi_dma.c struct cppi_channel *rx_ch = controller->rx + i; rx 192 drivers/usb/musb/cppi_dma.c struct cppi_rx_stateram __iomem *rx; rx 196 drivers/usb/musb/cppi_dma.c rx = tibase + DAVINCI_RXCPPI_STATERAM_OFFSET(i); rx 197 drivers/usb/musb/cppi_dma.c rx_ch->state_ram = rx; rx 198 drivers/usb/musb/cppi_dma.c cppi_reset_rx(rx); rx 243 drivers/usb/musb/cppi_dma.c for (i = 0; i < ARRAY_SIZE(controller->rx); i++) rx 244 drivers/usb/musb/cppi_dma.c cppi_pool_free(controller->rx + i); rx 307 drivers/usb/musb/cppi_dma.c if (index >= ARRAY_SIZE(controller->rx)) { rx 311 drivers/usb/musb/cppi_dma.c cppi_ch = controller->rx + index; rx 355 drivers/usb/musb/cppi_dma.c struct cppi_rx_stateram __iomem *rx = c->state_ram; rx 368 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_skipbytes, 0), rx 369 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_head, 0), rx 370 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_sop, 0), rx 371 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_current, 0), rx 373 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_buf_current, 0), rx 374 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_len_len, 0), rx 375 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_cnt_cnt, 0), rx 376 drivers/usb/musb/cppi_dma.c musb_readl(&rx->rx_complete, 0) rx 438 drivers/usb/musb/cppi_dma.c static void cppi_dump_rxq(int level, const char *tag, struct cppi_channel *rx) rx 442 drivers/usb/musb/cppi_dma.c cppi_dump_rx(level, rx, tag); rx 443 drivers/usb/musb/cppi_dma.c if (rx->last_processed) rx 444 drivers/usb/musb/cppi_dma.c cppi_dump_rxbd("last", rx->last_processed); rx 445 drivers/usb/musb/cppi_dma.c for (bd = rx->head; bd; bd = bd->next) rx 453 drivers/usb/musb/cppi_dma.c static inline int cppi_autoreq_update(struct cppi_channel *rx, rx 464 drivers/usb/musb/cppi_dma.c val = tmp & ~((0x3) << (rx->index * 2)); rx 472 drivers/usb/musb/cppi_dma.c val |= ((0x3) << (rx->index * 2)); rx 476 drivers/usb/musb/cppi_dma.c val |= ((0x1) << (rx->index * 2)); rx 495 drivers/usb/musb/cppi_dma.c if (n_bds && rx->channel.actual_len) { rx 496 drivers/usb/musb/cppi_dma.c void __iomem *regs = rx->hw_ep->regs; rx 762 drivers/usb/musb/cppi_dma.c cppi_next_rx_segment(struct musb *musb, struct cppi_channel *rx, int onepacket) rx 764 drivers/usb/musb/cppi_dma.c unsigned maxpacket = rx->maxpacket; rx 765 drivers/usb/musb/cppi_dma.c dma_addr_t addr = rx->buf_dma + rx->offset; rx 766 drivers/usb/musb/cppi_dma.c size_t length = rx->buf_len - rx->offset; rx 772 drivers/usb/musb/cppi_dma.c struct cppi_rx_stateram __iomem *rx_ram = rx->state_ram; rx 809 drivers/usb/musb/cppi_dma.c n_bds = cppi_autoreq_update(rx, tibase, onepacket, n_bds); rx 811 drivers/usb/musb/cppi_dma.c cppi_rndis_update(rx, 1, musb->ctrl_base, is_rndis); rx 817 drivers/usb/musb/cppi_dma.c rx->index, maxpacket, rx 823 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4)) rx 826 drivers/usb/musb/cppi_dma.c rx->channel.actual_len, rx->buf_len); rx 831 drivers/usb/musb/cppi_dma.c bd = cppi_bd_alloc(rx); rx 832 drivers/usb/musb/cppi_dma.c rx->head = bd; rx 839 drivers/usb/musb/cppi_dma.c bd = cppi_bd_alloc(rx); rx 855 drivers/usb/musb/cppi_dma.c rx->offset += bd_len; rx 866 drivers/usb/musb/cppi_dma.c WARNING("rx dma%d -- no BDs? need %d\n", rx->index, n_bds); rx 869 drivers/usb/musb/cppi_dma.c WARNING("rx dma%d -- only %d of %d BDs\n", rx->index, i, n_bds); rx 874 drivers/usb/musb/cppi_dma.c bd = rx->head; rx 875 drivers/usb/musb/cppi_dma.c rx->tail = tail; rx 885 drivers/usb/musb/cppi_dma.c for (d = rx->head; d; d = d->next) rx 889 drivers/usb/musb/cppi_dma.c tail = rx->last_processed; rx 895 drivers/usb/musb/cppi_dma.c core_rxirq_enable(tibase, rx->index + 1); rx 910 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4)) rx 915 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4), rx 919 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4), rx 923 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4)) rx 927 drivers/usb/musb/cppi_dma.c rx->index, i, n_bds); rx 929 drivers/usb/musb/cppi_dma.c DAVINCI_RXCPPI_BUFCNT0_REG + (rx->index * 4), rx 933 drivers/usb/musb/cppi_dma.c cppi_dump_rx(4, rx, "/S"); rx 1003 drivers/usb/musb/cppi_dma.c struct cppi_channel *rx = &cppi->rx[ch]; rx 1004 drivers/usb/musb/cppi_dma.c struct cppi_rx_stateram __iomem *state = rx->state_ram; rx 1006 drivers/usb/musb/cppi_dma.c struct cppi_descriptor *last = rx->last_processed; rx 1011 drivers/usb/musb/cppi_dma.c void __iomem *regs = rx->hw_ep->regs; rx 1014 drivers/usb/musb/cppi_dma.c cppi_dump_rx(6, rx, "/K"); rx 1016 drivers/usb/musb/cppi_dma.c bd = last ? last->next : rx->head; rx 1035 drivers/usb/musb/cppi_dma.c rx->channel.actual_len); rx 1055 drivers/usb/musb/cppi_dma.c rx->channel.actual_len); rx 1074 drivers/usb/musb/cppi_dma.c rx->channel.actual_len += len; rx 1076 drivers/usb/musb/cppi_dma.c cppi_bd_free(rx, last); rx 1083 drivers/usb/musb/cppi_dma.c rx->last_processed = last; rx 1089 drivers/usb/musb/cppi_dma.c if (safe2ack == 0 || safe2ack == rx->last_processed->dma) rx 1092 drivers/usb/musb/cppi_dma.c cppi_bd_free(rx, last); rx 1093 drivers/usb/musb/cppi_dma.c rx->last_processed = NULL; rx 1098 drivers/usb/musb/cppi_dma.c WARN_ON(rx->head); rx 1100 drivers/usb/musb/cppi_dma.c musb_ep_select(cppi->mregs, rx->index + 1); rx 1104 drivers/usb/musb/cppi_dma.c rx->index, rx 1105 drivers/usb/musb/cppi_dma.c rx->head, rx->tail, rx 1106 drivers/usb/musb/cppi_dma.c rx->last_processed rx 1108 drivers/usb/musb/cppi_dma.c rx->last_processed->dma rx 1112 drivers/usb/musb/cppi_dma.c cppi_dump_rxq(4, "/what?", rx); rx 1118 drivers/usb/musb/cppi_dma.c rx->head = bd; rx 1123 drivers/usb/musb/cppi_dma.c csr = musb_readw(rx->hw_ep->regs, MUSB_RXCSR); rx 1130 drivers/usb/musb/cppi_dma.c csr = musb_readw(rx->hw_ep->regs, MUSB_RXCSR); rx 1133 drivers/usb/musb/cppi_dma.c rx->head = NULL; rx 1134 drivers/usb/musb/cppi_dma.c rx->tail = NULL; rx 1137 drivers/usb/musb/cppi_dma.c cppi_dump_rx(6, rx, completed ? "/completed" : "/cleaned"); rx 1147 drivers/usb/musb/cppi_dma.c u32 rx, tx; rx 1158 drivers/usb/musb/cppi_dma.c rx = musb_readl(tibase, DAVINCI_RXCPPI_MASKED_REG); rx 1160 drivers/usb/musb/cppi_dma.c if (!tx && !rx) { rx 1166 drivers/usb/musb/cppi_dma.c musb_dbg(musb, "CPPI IRQ Tx%x Rx%x", tx, rx); rx 1259 drivers/usb/musb/cppi_dma.c for (index = 0; rx; rx = rx >> 1, index++) { rx 1261 drivers/usb/musb/cppi_dma.c if (rx & 1) { rx 1264 drivers/usb/musb/cppi_dma.c rx_ch = cppi->rx + index; rx 117 drivers/usb/musb/cppi_dma.h struct cppi_channel rx[4]; rx 2559 drivers/usb/serial/ftdi_sio.c port->icount.rx += len; rx 563 drivers/usb/serial/generic.c icount->rx = cnow.rx; rx 1834 drivers/usb/serial/io_edgeport.c edge_port->port->icount.rx += rxLen; rx 1779 drivers/usb/serial/io_ti.c edge_port->port->icount.rx += urb->actual_length; rx 771 drivers/usb/serial/mos7840.c port->icount.rx += urb->actual_length; rx 772 drivers/usb/serial/mos7840.c dev_dbg(&port->dev, "icount.rx is %d:\n", port->icount.rx); rx 1230 drivers/usb/serial/ti_usb_3410_5052.c port->icount.rx += urb->actual_length; rx 190 drivers/video/backlight/ili922x.c static int ili922x_read(struct spi_device *spi, u8 reg, u16 *rx) rx 234 drivers/video/backlight/ili922x.c *rx = (rbuf[1 + send_bytes] << 8) + rbuf[2 + send_bytes]; rx 300 drivers/video/backlight/ili922x.c u16 rx; rx 305 drivers/video/backlight/ili922x.c ili922x_read(spi, reg, &rx); rx 306 drivers/video/backlight/ili922x.c dev_dbg(&spi->dev, "reg @ 0x%02X: 0x%04X\n", reg, rx); rx 386 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_RXBITS(rx) (((rx) - 1)<<16) /* 0x1F~0x1 */ rx 394 drivers/video/fbdev/mmp/hw/mmp_ctrl.h #define CFG_RXBITSTO0(rx) ((rx)<<5) rx 177 drivers/video/fbdev/pxa168fb.h #define CFG_RXBITS(rx) ((rx) << 16) /* 0x1F~0x1 */ rx 185 drivers/video/fbdev/pxa168fb.h #define CFG_RXBITSTO0(rx) ((rx) << 5) rx 589 fs/dlm/lowcomms.c bool tx, bool rx) rx 597 fs/dlm/lowcomms.c if (rx && !closing && cancel_work_sync(&con->rwork)) { rx 328 fs/jffs2/scan.c struct jffs2_raw_xattr *rx, uint32_t ofs, rx 335 fs/jffs2/scan.c crc = crc32(0, rx, sizeof(struct jffs2_raw_xattr) - 4); rx 336 fs/jffs2/scan.c if (crc != je32_to_cpu(rx->node_crc)) { rx 338 fs/jffs2/scan.c ofs, je32_to_cpu(rx->node_crc), crc); rx 339 fs/jffs2/scan.c if ((err = jffs2_scan_dirty_space(c, jeb, je32_to_cpu(rx->totlen)))) rx 344 fs/jffs2/scan.c xid = je32_to_cpu(rx->xid); rx 345 fs/jffs2/scan.c version = je32_to_cpu(rx->version); rx 348 fs/jffs2/scan.c + rx->name_len + 1 + je16_to_cpu(rx->value_len)); rx 349 fs/jffs2/scan.c if (totlen != je32_to_cpu(rx->totlen)) { rx 351 fs/jffs2/scan.c ofs, je32_to_cpu(rx->totlen), totlen); rx 352 fs/jffs2/scan.c if ((err = jffs2_scan_dirty_space(c, jeb, je32_to_cpu(rx->totlen)))) rx 368 fs/jffs2/scan.c xd->xprefix = rx->xprefix; rx 369 fs/jffs2/scan.c xd->name_len = rx->name_len; rx 370 fs/jffs2/scan.c xd->value_len = je16_to_cpu(rx->value_len); rx 371 fs/jffs2/scan.c xd->data_crc = je32_to_cpu(rx->data_crc); rx 377 fs/jffs2/scan.c jffs2_sum_add_xattr_mem(s, rx, ofs - jeb->offset); rx 158 fs/jffs2/summary.c int jffs2_sum_add_xattr_mem(struct jffs2_summary *s, struct jffs2_raw_xattr *rx, uint32_t ofs) rx 166 fs/jffs2/summary.c temp->nodetype = rx->nodetype; rx 167 fs/jffs2/summary.c temp->xid = rx->xid; rx 168 fs/jffs2/summary.c temp->version = rx->version; rx 170 fs/jffs2/summary.c temp->totlen = rx->totlen; rx 187 fs/jffs2/summary.h int jffs2_sum_add_xattr_mem(struct jffs2_summary *s, struct jffs2_raw_xattr *rx, uint32_t ofs); rx 137 fs/jffs2/xattr.c struct jffs2_raw_xattr rx; rx 148 fs/jffs2/xattr.c rc = jffs2_flash_read(c, offset, sizeof(rx), &readlen, (char *)&rx); rx 149 fs/jffs2/xattr.c if (rc || readlen != sizeof(rx)) { rx 151 fs/jffs2/xattr.c rc, sizeof(rx), readlen, offset); rx 154 fs/jffs2/xattr.c crc = crc32(0, &rx, sizeof(rx) - 4); rx 155 fs/jffs2/xattr.c if (crc != je32_to_cpu(rx.node_crc)) { rx 157 fs/jffs2/xattr.c offset, je32_to_cpu(rx.hdr_crc), crc); rx 161 fs/jffs2/xattr.c totlen = PAD(sizeof(rx) + rx.name_len + 1 + je16_to_cpu(rx.value_len)); rx 162 fs/jffs2/xattr.c if (je16_to_cpu(rx.magic) != JFFS2_MAGIC_BITMASK rx 163 fs/jffs2/xattr.c || je16_to_cpu(rx.nodetype) != JFFS2_NODETYPE_XATTR rx 164 fs/jffs2/xattr.c || je32_to_cpu(rx.totlen) != totlen rx 165 fs/jffs2/xattr.c || je32_to_cpu(rx.xid) != xd->xid rx 166 fs/jffs2/xattr.c || je32_to_cpu(rx.version) != xd->version) { rx 169 fs/jffs2/xattr.c offset, je16_to_cpu(rx.magic), JFFS2_MAGIC_BITMASK, rx 170 fs/jffs2/xattr.c je16_to_cpu(rx.nodetype), JFFS2_NODETYPE_XATTR, rx 171 fs/jffs2/xattr.c je32_to_cpu(rx.totlen), totlen, rx 172 fs/jffs2/xattr.c je32_to_cpu(rx.xid), xd->xid, rx 173 fs/jffs2/xattr.c je32_to_cpu(rx.version), xd->version); rx 177 fs/jffs2/xattr.c xd->xprefix = rx.xprefix; rx 178 fs/jffs2/xattr.c xd->name_len = rx.name_len; rx 179 fs/jffs2/xattr.c xd->value_len = je16_to_cpu(rx.value_len); rx 180 fs/jffs2/xattr.c xd->data_crc = je32_to_cpu(rx.data_crc); rx 287 fs/jffs2/xattr.c struct jffs2_raw_xattr rx; rx 296 fs/jffs2/xattr.c vecs[0].iov_base = ℞ rx 297 fs/jffs2/xattr.c vecs[0].iov_len = sizeof(rx); rx 303 fs/jffs2/xattr.c memset(&rx, 0, sizeof(rx)); rx 304 fs/jffs2/xattr.c rx.magic = cpu_to_je16(JFFS2_MAGIC_BITMASK); rx 305 fs/jffs2/xattr.c rx.nodetype = cpu_to_je16(JFFS2_NODETYPE_XATTR); rx 306 fs/jffs2/xattr.c rx.totlen = cpu_to_je32(PAD(totlen)); rx 307 fs/jffs2/xattr.c rx.hdr_crc = cpu_to_je32(crc32(0, &rx, sizeof(struct jffs2_unknown_node) - 4)); rx 309 fs/jffs2/xattr.c rx.xid = cpu_to_je32(xd->xid); rx 310 fs/jffs2/xattr.c rx.version = cpu_to_je32(++xd->version); rx 311 fs/jffs2/xattr.c rx.xprefix = xd->xprefix; rx 312 fs/jffs2/xattr.c rx.name_len = xd->name_len; rx 313 fs/jffs2/xattr.c rx.value_len = cpu_to_je16(xd->value_len); rx 314 fs/jffs2/xattr.c rx.data_crc = cpu_to_je32(crc32(0, vecs[1].iov_base, vecs[1].iov_len)); rx 315 fs/jffs2/xattr.c rx.node_crc = cpu_to_je32(crc32(0, &rx, sizeof(struct jffs2_raw_xattr) - 4)); rx 55 include/linux/can/can-ml.h struct hlist_head rx[RX_MAX]; rx 105 include/linux/cyclades.h __u32 cts, dsr, rng, dcd, tx, rx; rx 71 include/linux/iio/imu/adis.h uint8_t rx[4]; rx 115 include/linux/mfd/ipaq-micro.h struct ipaq_micro_rxdev rx; /* receive ISR state */ rx 402 include/linux/mlx5/mlx5_ifc_fpga.h u8 rx[0x1]; rx 1165 include/linux/phy.h void phy_set_sym_pause(struct phy_device *phydev, bool rx, bool tx, rx 1167 include/linux/phy.h void phy_set_asym_pause(struct phy_device *phydev, bool rx, bool tx); rx 21 include/linux/serial.h __u32 cts, dsr, rng, dcd, tx, rx; rx 93 include/linux/serial_core.h __u32 rx; rx 134 include/linux/soc/ti/knav_dma.h struct knav_dma_rx_cfg rx; rx 26 include/linux/timb_dma.h bool rx; rx 4155 include/net/cfg80211.h u16 tx, rx; rx 3931 include/net/mac80211.h int (*set_ringparam)(struct ieee80211_hw *hw, u32 tx, u32 rx); rx 3933 include/net/mac80211.h u32 *tx, u32 *tx_max, u32 *rx, u32 *rx_max); rx 269 include/net/tls.h struct cipher_context rx; rx 82 include/net/xdp_sock.h struct xsk_queue *rx; rx 203 include/soc/fsl/qe/immap_qe.h u8 rx[0x400]; rx 106 include/soc/tegra/bpmp.h } rx; rx 21 include/soc/tegra/ivc.h } rx, tx; rx 94 include/soc/tegra/ivc.h int tegra_ivc_init(struct tegra_ivc *ivc, struct device *peer, void *rx, rx 15 include/uapi/linux/atm_eni.h int tx,rx; /* values are in percent and must be > 100 */ rx 37 include/uapi/linux/atmdev.h __HANDLE_ITEM(rx); /* RX okay */ \ rx 51 include/uapi/linux/if_xdp.h struct xdp_ring_offset rx; rx 104 include/uapi/linux/serial.h int rx, tx; rx 214 include/uapi/linux/synclink.h __u32 cts, dsr, rng, dcd, tx, rx; rx 49 net/atm/proc.c atomic_read(&stats->rx), atomic_read(&stats->rx_err), rx 373 net/can/af_can.c return &dev_rcv_lists->rx[RX_ERR]; rx 389 net/can/af_can.c return &dev_rcv_lists->rx[RX_INV]; rx 393 net/can/af_can.c return &dev_rcv_lists->rx[RX_ALL]; rx 408 net/can/af_can.c return &dev_rcv_lists->rx[RX_FIL]; rx 585 net/can/af_can.c hlist_for_each_entry_rcu(rcv, &dev_rcv_lists->rx[RX_ERR], list) { rx 595 net/can/af_can.c hlist_for_each_entry_rcu(rcv, &dev_rcv_lists->rx[RX_ALL], list) { rx 601 net/can/af_can.c hlist_for_each_entry_rcu(rcv, &dev_rcv_lists->rx[RX_FIL], list) { rx 609 net/can/af_can.c hlist_for_each_entry_rcu(rcv, &dev_rcv_lists->rx[RX_INV], list) { rx 278 net/can/j1939/j1939-priv.h unsigned int rx; rx 868 net/can/j1939/transport.c len = session->pkt.total - session->pkt.rx; rx 873 net/can/j1939/transport.c pkt = session->pkt.rx + 1; rx 882 net/can/j1939/transport.c dat[2] = session->pkt.rx + 1; rx 970 net/can/j1939/transport.c if (session->pkt.rx >= session->pkt.total) { rx 972 net/can/j1939/transport.c } else if (session->pkt.rx >= session->pkt.last) { rx 1571 net/can/j1939/transport.c session->pkt.rx = 0; rx 1748 net/can/j1939/transport.c (session->pkt.rx + 1) > session->pkt.total) { rx 1773 net/can/j1939/transport.c if (packet == session->pkt.rx) rx 1774 net/can/j1939/transport.c session->pkt.rx++; rx 1778 net/can/j1939/transport.c if (session->pkt.rx >= session->pkt.total) rx 1782 net/can/j1939/transport.c if (session->pkt.rx >= session->pkt.last) rx 306 net/can/proc.c if (!hlist_empty(&dev_rcv_lists->rx[idx])) { rx 308 net/can/proc.c can_print_rcvlist(m, &dev_rcv_lists->rx[idx], dev); rx 8849 net/core/dev.c struct netdev_rx_queue *rx; rx 8850 net/core/dev.c size_t sz = count * sizeof(*rx); rx 8855 net/core/dev.c rx = kvzalloc(sz, GFP_KERNEL | __GFP_RETRY_MAYFAIL); rx 8856 net/core/dev.c if (!rx) rx 8859 net/core/dev.c dev->_rx = rx; rx 8862 net/core/dev.c rx[i].dev = dev; rx 8865 net/core/dev.c err = xdp_rxq_info_reg(&rx[i].xdp_rxq, dev, i); rx 8874 net/core/dev.c xdp_rxq_info_unreg(&rx[i].xdp_rxq); rx 140 net/dccp/ccid.c struct ccid *ccid_new(const u8 id, struct sock *sk, bool rx) rx 148 net/dccp/ccid.c ccid = kmem_cache_alloc(rx ? ccid_ops->ccid_hc_rx_slab : rx 153 net/dccp/ccid.c if (rx) { rx 167 net/dccp/ccid.c kmem_cache_free(rx ? ccid_ops->ccid_hc_rx_slab : rx 111 net/dccp/ccid.h struct ccid *ccid_new(const u8 id, struct sock *sk, bool rx); rx 36 net/dccp/feat.c static int dccp_hdlr_ccid(struct sock *sk, u64 ccid, bool rx) rx 39 net/dccp/feat.c struct ccid *new_ccid = ccid_new(ccid, sk, rx); rx 44 net/dccp/feat.c if (rx) { rx 54 net/dccp/feat.c static int dccp_hdlr_seq_win(struct sock *sk, u64 seq_win, bool rx) rx 58 net/dccp/feat.c if (rx) { rx 70 net/dccp/feat.c static int dccp_hdlr_ack_ratio(struct sock *sk, u64 ratio, bool rx) rx 72 net/dccp/feat.c if (rx) rx 79 net/dccp/feat.c static int dccp_hdlr_ackvec(struct sock *sk, u64 enable, bool rx) rx 83 net/dccp/feat.c if (rx) { rx 96 net/dccp/feat.c static int dccp_hdlr_ndp(struct sock *sk, u64 enable, bool rx) rx 98 net/dccp/feat.c if (!rx) rx 112 net/dccp/feat.c static int dccp_hdlr_min_cscov(struct sock *sk, u64 cscov, bool rx) rx 116 net/dccp/feat.c if (rx) rx 133 net/dccp/feat.c int (*activation_hdlr)(struct sock *sk, u64 val, bool rx); rx 306 net/dccp/feat.c bool rx; rx 333 net/dccp/feat.c rx = (is_local == (dccp_feat_table[idx].rxtx == FEAT_AT_RX)); rx 335 net/dccp/feat.c dccp_debug(" -> activating %s %s, %sval=%llu\n", rx ? "RX" : "TX", rx 339 net/dccp/feat.c return dccp_feat_table[idx].activation_hdlr(sk, val, rx); rx 1448 net/dccp/feat.c } tx, rx; rx 1476 net/dccp/feat.c if (ccid_get_builtin_ccids(&rx.val, &rx.len)) { rx 1482 net/dccp/feat.c !dccp_feat_prefer(sysctl_dccp_rx_ccid, rx.val, rx.len)) rx 1489 net/dccp/feat.c rc = __feat_register_sp(fn, DCCPF_CCID, false, false, rx.val, rx.len); rx 1493 net/dccp/feat.c kfree(rx.val); rx 439 net/dccp/proto.c static int dccp_setsockopt_cscov(struct sock *sk, int cscov, bool rx) rx 463 net/dccp/proto.c rc = dccp_feat_register_sp(sk, DCCPF_MIN_CSUM_COVER, rx, list, len); rx 466 net/dccp/proto.c if (rx) rx 174 net/mac80211/debugfs_key.c key->u.tkip.rx[i].iv32, rx 175 net/mac80211/debugfs_key.c key->u.tkip.rx[i].iv16); rx 711 net/mac80211/driver-ops.h u32 tx, u32 rx) rx 717 net/mac80211/driver-ops.h trace_drv_set_ringparam(local, tx, rx); rx 719 net/mac80211/driver-ops.h ret = local->ops->set_ringparam(&local->hw, tx, rx); rx 726 net/mac80211/driver-ops.h u32 *tx, u32 *tx_max, u32 *rx, u32 *rx_max) rx 730 net/mac80211/driver-ops.h trace_drv_get_ringparam(local, tx, tx_max, rx, rx_max); rx 732 net/mac80211/driver-ops.h local->ops->get_ringparam(&local->hw, tx, tx_max, rx, rx_max); rx 535 net/mac80211/key.c key->u.tkip.rx[i].iv32 = rx 537 net/mac80211/key.c key->u.tkip.rx[i].iv16 = rx 1114 net/mac80211/key.c seq->tkip.iv32 = key->u.tkip.rx[tid].iv32; rx 1115 net/mac80211/key.c seq->tkip.iv16 = key->u.tkip.rx[tid].iv16; rx 1167 net/mac80211/key.c key->u.tkip.rx[tid].iv32 = seq->tkip.iv32; rx 1168 net/mac80211/key.c key->u.tkip.rx[tid].iv16 = seq->tkip.iv16; rx 78 net/mac80211/key.h struct tkip_ctx_rx rx[IEEE80211_NUM_TIDS]; rx 412 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 419 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 424 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 435 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 445 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 450 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ASSOC_REQ >> 4) | rx 460 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 466 net/mac80211/main.c .rx = BIT(IEEE80211_STYPE_ACTION >> 4) | rx 877 net/mac80211/rx.c static void ieee80211_parse_qos(struct ieee80211_rx_data *rx) rx 879 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 880 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 912 net/mac80211/rx.c rx->seqno_idx = seqno_idx; rx 913 net/mac80211/rx.c rx->security_idx = security_idx; rx 916 net/mac80211/rx.c rx->skb->priority = (tid > 7) ? 0 : tid; rx 944 net/mac80211/rx.c static void ieee80211_verify_alignment(struct ieee80211_rx_data *rx) rx 947 net/mac80211/rx.c WARN_ON_ONCE((unsigned long)rx->skb->data & 1); rx 1046 net/mac80211/rx.c static ieee80211_rx_result ieee80211_rx_mesh_check(struct ieee80211_rx_data *rx) rx 1048 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 1049 net/mac80211/rx.c char *dev_addr = rx->sdata->vif.addr; rx 1070 net/mac80211/rx.c if (!rx->sta || sta_plink_state(rx->sta) != NL80211_PLINK_ESTAB) { rx 1080 net/mac80211/rx.c if (rx->skb->len < IEEE80211_MIN_ACTION_SIZE) rx 1359 net/mac80211/rx.c static void ieee80211_rx_reorder_ampdu(struct ieee80211_rx_data *rx, rx 1362 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 1363 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 1365 net/mac80211/rx.c struct sta_info *sta = rx->sta; rx 1389 net/mac80211/rx.c !test_bit(tid, rx->sta->ampdu_mlme.agg_session_valid) && rx 1390 net/mac80211/rx.c !test_and_set_bit(tid, rx->sta->ampdu_mlme.unexpected_agg)) rx 1391 net/mac80211/rx.c ieee80211_send_delba(rx->sdata, rx->sta->sta.addr, tid, rx 1415 net/mac80211/rx.c skb_queue_tail(&rx->sdata->skb_queue, skb); rx 1416 net/mac80211/rx.c ieee80211_queue_work(&local->hw, &rx->sdata->work); rx 1427 net/mac80211/rx.c if (ieee80211_sta_manage_reorder_buf(rx->sdata, tid_agg_rx, skb, rx 1436 net/mac80211/rx.c ieee80211_rx_h_check_dup(struct ieee80211_rx_data *rx) rx 1438 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 1439 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 1449 net/mac80211/rx.c if (rx->skb->len < 24) rx 1457 net/mac80211/rx.c if (!rx->sta) rx 1461 net/mac80211/rx.c rx->sta->last_seq_ctrl[rx->seqno_idx] == hdr->seq_ctrl)) { rx 1462 net/mac80211/rx.c I802_DEBUG_INC(rx->local->dot11FrameDuplicateCount); rx 1463 net/mac80211/rx.c rx->sta->rx_stats.num_duplicates++; rx 1466 net/mac80211/rx.c rx->sta->last_seq_ctrl[rx->seqno_idx] = hdr->seq_ctrl; rx 1473 net/mac80211/rx.c ieee80211_rx_h_check(struct ieee80211_rx_data *rx) rx 1475 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 1486 net/mac80211/rx.c if (ieee80211_vif_is_mesh(&rx->sdata->vif)) rx 1487 net/mac80211/rx.c return ieee80211_rx_mesh_check(rx); rx 1491 net/mac80211/rx.c rx->sdata->vif.type != NL80211_IFTYPE_ADHOC && rx 1492 net/mac80211/rx.c rx->sdata->vif.type != NL80211_IFTYPE_WDS && rx 1493 net/mac80211/rx.c rx->sdata->vif.type != NL80211_IFTYPE_OCB && rx 1494 net/mac80211/rx.c (!rx->sta || !test_sta_flag(rx->sta, WLAN_STA_ASSOC)))) { rx 1500 net/mac80211/rx.c if (rx->sta && rx->sdata->vif.type == NL80211_IFTYPE_STATION && rx 1507 net/mac80211/rx.c if (rx->skb->len < hdrlen + 8) rx 1510 net/mac80211/rx.c skb_copy_bits(rx->skb, hdrlen + 6, ðertype, 2); rx 1511 net/mac80211/rx.c if (ethertype == rx->sdata->control_port_protocol) rx 1515 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_AP && rx 1516 net/mac80211/rx.c cfg80211_rx_spurious_frame(rx->sdata->dev, rx 1529 net/mac80211/rx.c ieee80211_rx_h_check_more_data(struct ieee80211_rx_data *rx) rx 1535 net/mac80211/rx.c local = rx->local; rx 1536 net/mac80211/rx.c skb = rx->skb; rx 1556 net/mac80211/rx.c ieee80211_send_pspoll(local, rx->sdata); rx 1689 net/mac80211/rx.c ieee80211_rx_h_uapsd_and_pspoll(struct ieee80211_rx_data *rx) rx 1691 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 1692 net/mac80211/rx.c struct ieee80211_hdr *hdr = (void *)rx->skb->data; rx 1693 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 1695 net/mac80211/rx.c if (!rx->sta) rx 1715 net/mac80211/rx.c if (!test_sta_flag(rx->sta, WLAN_STA_PS_STA)) rx 1719 net/mac80211/rx.c ieee80211_sta_pspoll(&rx->sta->sta); rx 1723 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 1733 net/mac80211/rx.c ieee80211_sta_uapsd_trigger(&rx->sta->sta, tid); rx 1740 net/mac80211/rx.c ieee80211_rx_h_sta_process(struct ieee80211_rx_data *rx) rx 1742 net/mac80211/rx.c struct sta_info *sta = rx->sta; rx 1743 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 1759 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_ADHOC) { rx 1760 net/mac80211/rx.c u8 *bssid = ieee80211_get_bssid(hdr, rx->skb->len, rx 1762 net/mac80211/rx.c if (ether_addr_equal(bssid, rx->sdata->u.ibss.bssid) && rx 1770 net/mac80211/rx.c } else if (rx->sdata->vif.type == NL80211_IFTYPE_OCB) { rx 1782 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_STATION) rx 1783 net/mac80211/rx.c ieee80211_sta_rx_notify(rx->sdata, hdr); rx 1787 net/mac80211/rx.c u64_stats_update_begin(&rx->sta->rx_stats.syncp); rx 1788 net/mac80211/rx.c sta->rx_stats.bytes += rx->skb->len; rx 1789 net/mac80211/rx.c u64_stats_update_end(&rx->sta->rx_stats.syncp); rx 1821 net/mac80211/rx.c (rx->sdata->vif.type == NL80211_IFTYPE_AP || rx 1822 net/mac80211/rx.c rx->sdata->vif.type == NL80211_IFTYPE_AP_VLAN)) { rx 1833 net/mac80211/rx.c if (ieee80211_vif_is_mesh(&rx->sdata->vif)) rx 1841 net/mac80211/rx.c I802_DEBUG_INC(rx->local->rx_handlers_drop_nullfunc); rx 1850 net/mac80211/rx.c (rx->sdata->vif.type == NL80211_IFTYPE_AP || rx 1851 net/mac80211/rx.c (rx->sdata->vif.type == NL80211_IFTYPE_AP_VLAN && rx 1852 net/mac80211/rx.c !rx->sdata->u.vlan.sta))) { rx 1855 net/mac80211/rx.c rx->sdata->dev, sta->sta.addr, rx 1864 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 1872 net/mac80211/rx.c ieee80211_rx_h_decrypt(struct ieee80211_rx_data *rx) rx 1874 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 1913 net/mac80211/rx.c rx->key = NULL; rx 1916 net/mac80211/rx.c if (rx->sta) { rx 1917 net/mac80211/rx.c int keyid = rx->sta->ptk_idx; rx 1918 net/mac80211/rx.c sta_ptk = rcu_dereference(rx->sta->ptk[keyid]); rx 1921 net/mac80211/rx.c cs = rx->sta->cipher_scheme; rx 1922 net/mac80211/rx.c keyid = ieee80211_get_keyid(rx->skb, cs); rx 1927 net/mac80211/rx.c ptk_idx = rcu_dereference(rx->sta->ptk[keyid]); rx 1932 net/mac80211/rx.c mmie_keyidx = ieee80211_get_mmie_keyidx(rx->skb); rx 1935 net/mac80211/rx.c rx->key = ptk_idx ? ptk_idx : sta_ptk; rx 1951 net/mac80211/rx.c if (rx->sta) { rx 1953 net/mac80211/rx.c test_sta_flag(rx->sta, WLAN_STA_MFP)) rx 1956 net/mac80211/rx.c rx->key = rcu_dereference(rx->sta->gtk[mmie_keyidx]); rx 1958 net/mac80211/rx.c if (!rx->key) rx 1959 net/mac80211/rx.c rx->key = rcu_dereference(rx->sdata->keys[mmie_keyidx]); rx 1968 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 1973 net/mac80211/rx.c (key = rcu_dereference(rx->sdata->default_mgmt_key))) rx 1974 net/mac80211/rx.c rx->key = key; rx 1976 net/mac80211/rx.c if (rx->sta) { rx 1978 net/mac80211/rx.c key = rcu_dereference(rx->sta->gtk[i]); rx 1991 net/mac80211/rx.c rx->key = key; rx 2008 net/mac80211/rx.c keyidx = ieee80211_get_keyid(rx->skb, cs); rx 2014 net/mac80211/rx.c if (is_multicast_ether_addr(hdr->addr1) && rx->sta) rx 2015 net/mac80211/rx.c rx->key = rcu_dereference(rx->sta->gtk[keyidx]); rx 2018 net/mac80211/rx.c if (!rx->key) { rx 2019 net/mac80211/rx.c rx->key = rcu_dereference(rx->sdata->keys[keyidx]); rx 2026 net/mac80211/rx.c if (rx->key && rx 2027 net/mac80211/rx.c rx->key->conf.cipher != WLAN_CIPHER_SUITE_WEP40 && rx 2028 net/mac80211/rx.c rx->key->conf.cipher != WLAN_CIPHER_SUITE_WEP104 && rx 2030 net/mac80211/rx.c rx->key = NULL; rx 2034 net/mac80211/rx.c if (rx->key) { rx 2035 net/mac80211/rx.c if (unlikely(rx->key->flags & KEY_FLAG_TAINTED)) rx 2043 net/mac80211/rx.c switch (rx->key->conf.cipher) { rx 2046 net/mac80211/rx.c result = ieee80211_crypto_wep_decrypt(rx); rx 2049 net/mac80211/rx.c result = ieee80211_crypto_tkip_decrypt(rx); rx 2053 net/mac80211/rx.c rx, IEEE80211_CCMP_MIC_LEN); rx 2057 net/mac80211/rx.c rx, IEEE80211_CCMP_256_MIC_LEN); rx 2060 net/mac80211/rx.c result = ieee80211_crypto_aes_cmac_decrypt(rx); rx 2063 net/mac80211/rx.c result = ieee80211_crypto_aes_cmac_256_decrypt(rx); rx 2067 net/mac80211/rx.c result = ieee80211_crypto_aes_gmac_decrypt(rx); rx 2071 net/mac80211/rx.c result = ieee80211_crypto_gcmp_decrypt(rx); rx 2074 net/mac80211/rx.c result = ieee80211_crypto_hw_decrypt(rx); rx 2157 net/mac80211/rx.c ieee80211_rx_h_defragment(struct ieee80211_rx_data *rx) rx 2166 net/mac80211/rx.c hdr = (struct ieee80211_hdr *)rx->skb->data; rx 2176 net/mac80211/rx.c I802_DEBUG_INC(rx->local->dot11MulticastReceivedFrameCount); rx 2183 net/mac80211/rx.c I802_DEBUG_INC(rx->local->rx_handlers_fragments); rx 2185 net/mac80211/rx.c if (skb_linearize(rx->skb)) rx 2193 net/mac80211/rx.c hdr = (struct ieee80211_hdr *)rx->skb->data; rx 2198 net/mac80211/rx.c entry = ieee80211_reassemble_add(rx->sdata, frag, seq, rx 2199 net/mac80211/rx.c rx->seqno_idx, &(rx->skb)); rx 2200 net/mac80211/rx.c if (rx->key && rx 2201 net/mac80211/rx.c (rx->key->conf.cipher == WLAN_CIPHER_SUITE_CCMP || rx 2202 net/mac80211/rx.c rx->key->conf.cipher == WLAN_CIPHER_SUITE_CCMP_256 || rx 2203 net/mac80211/rx.c rx->key->conf.cipher == WLAN_CIPHER_SUITE_GCMP || rx 2204 net/mac80211/rx.c rx->key->conf.cipher == WLAN_CIPHER_SUITE_GCMP_256) && rx 2206 net/mac80211/rx.c int queue = rx->security_idx; rx 2213 net/mac80211/rx.c rx->key->u.ccmp.rx_pn[queue], rx 2219 net/mac80211/rx.c BUILD_BUG_ON(sizeof(rx->key->u.ccmp.rx_pn[queue]) != rx 2220 net/mac80211/rx.c sizeof(rx->key->u.gcmp.rx_pn[queue])); rx 2230 net/mac80211/rx.c entry = ieee80211_reassemble_find(rx->sdata, frag, seq, rx 2231 net/mac80211/rx.c rx->seqno_idx, hdr); rx 2233 net/mac80211/rx.c I802_DEBUG_INC(rx->local->rx_handlers_drop_defrag); rx 2247 net/mac80211/rx.c if (!rx->key || rx 2248 net/mac80211/rx.c (rx->key->conf.cipher != WLAN_CIPHER_SUITE_CCMP && rx 2249 net/mac80211/rx.c rx->key->conf.cipher != WLAN_CIPHER_SUITE_CCMP_256 && rx 2250 net/mac80211/rx.c rx->key->conf.cipher != WLAN_CIPHER_SUITE_GCMP && rx 2251 net/mac80211/rx.c rx->key->conf.cipher != WLAN_CIPHER_SUITE_GCMP_256)) rx 2259 net/mac80211/rx.c queue = rx->security_idx; rx 2260 net/mac80211/rx.c rpn = rx->key->u.ccmp.rx_pn[queue]; rx 2266 net/mac80211/rx.c skb_pull(rx->skb, ieee80211_hdrlen(fc)); rx 2267 net/mac80211/rx.c __skb_queue_tail(&entry->skb_list, rx->skb); rx 2269 net/mac80211/rx.c entry->extra_len += rx->skb->len; rx 2271 net/mac80211/rx.c rx->skb = NULL; rx 2275 net/mac80211/rx.c rx->skb = __skb_dequeue(&entry->skb_list); rx 2276 net/mac80211/rx.c if (skb_tailroom(rx->skb) < entry->extra_len) { rx 2277 net/mac80211/rx.c I802_DEBUG_INC(rx->local->rx_expand_skb_head_defrag); rx 2278 net/mac80211/rx.c if (unlikely(pskb_expand_head(rx->skb, 0, entry->extra_len, rx 2280 net/mac80211/rx.c I802_DEBUG_INC(rx->local->rx_handlers_drop_defrag); rx 2286 net/mac80211/rx.c skb_put_data(rx->skb, skb->data, skb->len); rx 2291 net/mac80211/rx.c ieee80211_led_rx(rx->local); rx 2293 net/mac80211/rx.c if (rx->sta) rx 2294 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 2298 net/mac80211/rx.c static int ieee80211_802_1x_port_control(struct ieee80211_rx_data *rx) rx 2300 net/mac80211/rx.c if (unlikely(!rx->sta || !test_sta_flag(rx->sta, WLAN_STA_AUTHORIZED))) rx 2306 net/mac80211/rx.c static int ieee80211_drop_unencrypted(struct ieee80211_rx_data *rx, __le16 fc) rx 2308 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 2321 net/mac80211/rx.c ieee80211_is_data(fc) && rx->key)) rx 2327 net/mac80211/rx.c static int ieee80211_drop_unencrypted_mgmt(struct ieee80211_rx_data *rx) rx 2329 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 2330 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 2340 net/mac80211/rx.c if (rx->sta && test_sta_flag(rx->sta, WLAN_STA_MFP)) { rx 2342 net/mac80211/rx.c ieee80211_is_unicast_robust_mgmt_frame(rx->skb) && rx 2343 net/mac80211/rx.c rx->key)) { rx 2346 net/mac80211/rx.c cfg80211_rx_unprot_mlme_mgmt(rx->sdata->dev, rx 2347 net/mac80211/rx.c rx->skb->data, rx 2348 net/mac80211/rx.c rx->skb->len); rx 2352 net/mac80211/rx.c if (unlikely(ieee80211_is_multicast_robust_mgmt_frame(rx->skb) && rx 2353 net/mac80211/rx.c ieee80211_get_mmie_keyidx(rx->skb) < 0)) { rx 2356 net/mac80211/rx.c cfg80211_rx_unprot_mlme_mgmt(rx->sdata->dev, rx 2357 net/mac80211/rx.c rx->skb->data, rx 2358 net/mac80211/rx.c rx->skb->len); rx 2365 net/mac80211/rx.c if (unlikely(ieee80211_is_action(fc) && !rx->key && rx 2366 net/mac80211/rx.c ieee80211_is_robust_mgmt_frame(rx->skb))) rx 2374 net/mac80211/rx.c __ieee80211_data_to_8023(struct ieee80211_rx_data *rx, bool *port_control) rx 2376 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 2377 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 2400 net/mac80211/rx.c ret = ieee80211_data_to_8023(rx->skb, sdata->vif.addr, sdata->vif.type); rx 2404 net/mac80211/rx.c ehdr = (struct ethhdr *) rx->skb->data; rx 2405 net/mac80211/rx.c if (ehdr->h_proto == rx->sdata->control_port_protocol) rx 2416 net/mac80211/rx.c static bool ieee80211_frame_allowed(struct ieee80211_rx_data *rx, __le16 fc) rx 2420 net/mac80211/rx.c struct ethhdr *ehdr = (struct ethhdr *) rx->skb->data; rx 2426 net/mac80211/rx.c if (ehdr->h_proto == rx->sdata->control_port_protocol && rx 2427 net/mac80211/rx.c (ether_addr_equal(ehdr->h_dest, rx->sdata->vif.addr) || rx 2431 net/mac80211/rx.c if (ieee80211_802_1x_port_control(rx) || rx 2432 net/mac80211/rx.c ieee80211_drop_unencrypted(rx, fc)) rx 2439 net/mac80211/rx.c struct ieee80211_rx_data *rx) rx 2441 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 2456 net/mac80211/rx.c if (rx->napi) rx 2457 net/mac80211/rx.c napi_gro_receive(rx->napi, skb); rx 2467 net/mac80211/rx.c ieee80211_deliver_skb(struct ieee80211_rx_data *rx) rx 2469 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 2472 net/mac80211/rx.c struct ethhdr *ehdr = (struct ethhdr *) rx->skb->data; rx 2475 net/mac80211/rx.c skb = rx->skb; rx 2480 net/mac80211/rx.c if (rx->sta) { rx 2486 net/mac80211/rx.c u64_stats_update_begin(&rx->sta->rx_stats.syncp); rx 2487 net/mac80211/rx.c rx->sta->rx_stats.msdu[rx->seqno_idx]++; rx 2488 net/mac80211/rx.c u64_stats_update_end(&rx->sta->rx_stats.syncp); rx 2549 net/mac80211/rx.c ieee80211_deliver_skb_to_local_stack(skb, rx); rx 2567 net/mac80211/rx.c __ieee80211_rx_h_amsdu(struct ieee80211_rx_data *rx, u8 data_offset) rx 2569 net/mac80211/rx.c struct net_device *dev = rx->sdata->dev; rx 2570 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 2580 net/mac80211/rx.c } else switch (rx->sdata->vif.type) { rx 2586 net/mac80211/rx.c if (!rx->sta || rx 2587 net/mac80211/rx.c !test_sta_flag(rx->sta, WLAN_STA_TDLS_PEER)) rx 2601 net/mac80211/rx.c rx->sdata->vif.addr, rx 2602 net/mac80211/rx.c rx->sdata->vif.type, rx 2607 net/mac80211/rx.c rx->sdata->vif.type, rx 2608 net/mac80211/rx.c rx->local->hw.extra_tx_headroom, rx 2612 net/mac80211/rx.c rx->skb = __skb_dequeue(&frame_list); rx 2614 net/mac80211/rx.c if (!ieee80211_frame_allowed(rx, fc)) { rx 2615 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 2619 net/mac80211/rx.c ieee80211_deliver_skb(rx); rx 2626 net/mac80211/rx.c ieee80211_rx_h_amsdu(struct ieee80211_rx_data *rx) rx 2628 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 2643 net/mac80211/rx.c switch (rx->sdata->vif.type) { rx 2645 net/mac80211/rx.c if (!rx->sdata->u.vlan.sta) rx 2649 net/mac80211/rx.c if (!rx->sdata->u.mgd.use_4addr) rx 2660 net/mac80211/rx.c return __ieee80211_rx_h_amsdu(rx, 0); rx 2665 net/mac80211/rx.c ieee80211_rx_h_mesh_fwding(struct ieee80211_rx_data *rx) rx 2670 net/mac80211/rx.c struct sk_buff *skb = rx->skb, *fwd_skb; rx 2671 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 2672 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 2681 net/mac80211/rx.c if (!pskb_may_pull(rx->skb, hdrlen + 6)) rx 2687 net/mac80211/rx.c if (!pskb_may_pull(rx->skb, rx 2695 net/mac80211/rx.c if (ieee80211_drop_unencrypted(rx, hdr->frame_control)) rx 2701 net/mac80211/rx.c mesh_rmc_check(rx->sdata, hdr->addr3, mesh_hdr)) rx 2778 net/mac80211/rx.c info->control.vif = &rx->sdata->vif; rx 2809 net/mac80211/rx.c ieee80211_rx_h_data(struct ieee80211_rx_data *rx) rx 2811 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 2812 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 2814 net/mac80211/rx.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 2831 net/mac80211/rx.c if (rx->sta && rx 2832 net/mac80211/rx.c !test_and_set_sta_flag(rx->sta, WLAN_STA_4ADDR_EVENT)) rx 2834 net/mac80211/rx.c rx->sdata->dev, rx->sta->sta.addr, GFP_ATOMIC); rx 2838 net/mac80211/rx.c err = __ieee80211_data_to_8023(rx, &port_control); rx 2842 net/mac80211/rx.c if (!ieee80211_frame_allowed(rx, fc)) rx 2846 net/mac80211/rx.c if (unlikely(((struct ethhdr *)rx->skb->data)->h_proto == rx 2848 net/mac80211/rx.c struct ieee80211_tdls_data *tf = (void *)rx->skb->data; rx 2850 net/mac80211/rx.c if (pskb_may_pull(rx->skb, rx 2856 net/mac80211/rx.c skb_queue_tail(&local->skb_queue_tdls_chsw, rx->skb); rx 2858 net/mac80211/rx.c if (rx->sta) rx 2859 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 2865 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_AP_VLAN && rx 2870 net/mac80211/rx.c rx->sdata = sdata; rx 2873 net/mac80211/rx.c rx->skb->dev = dev; rx 2878 net/mac80211/rx.c ((struct ethhdr *)rx->skb->data)->h_dest) && rx 2884 net/mac80211/rx.c ieee80211_deliver_skb(rx); rx 2890 net/mac80211/rx.c ieee80211_rx_h_ctrl(struct ieee80211_rx_data *rx, struct sk_buff_head *frames) rx 2892 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 2909 net/mac80211/rx.c if (!rx->sta) rx 2918 net/mac80211/rx.c if (!test_bit(tid, rx->sta->ampdu_mlme.agg_session_valid) && rx 2919 net/mac80211/rx.c !test_and_set_bit(tid, rx->sta->ampdu_mlme.unexpected_agg)) rx 2920 net/mac80211/rx.c ieee80211_send_delba(rx->sdata, rx->sta->sta.addr, tid, rx 2924 net/mac80211/rx.c tid_agg_rx = rcu_dereference(rx->sta->ampdu_mlme.tid_rx[tid]); rx 2931 net/mac80211/rx.c event.u.ba.sta = &rx->sta->sta; rx 2940 net/mac80211/rx.c ieee80211_release_reorder_frames(rx->sdata, tid_agg_rx, rx 2944 net/mac80211/rx.c drv_event_callback(rx->local, rx->sdata, &event); rx 3004 net/mac80211/rx.c ieee80211_rx_h_mgmt_check(struct ieee80211_rx_data *rx) rx 3006 net/mac80211/rx.c struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *) rx->skb->data; rx 3007 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 3014 net/mac80211/rx.c if (rx->skb->len < 24) rx 3020 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_AP && rx 3022 net/mac80211/rx.c !(rx->flags & IEEE80211_RX_BEACON_REPORTED)) { rx 3025 net/mac80211/rx.c if (ieee80211_hw_check(&rx->local->hw, SIGNAL_DBM) && rx 3029 net/mac80211/rx.c cfg80211_report_obss_beacon(rx->local->hw.wiphy, rx 3030 net/mac80211/rx.c rx->skb->data, rx->skb->len, rx 3032 net/mac80211/rx.c rx->flags |= IEEE80211_RX_BEACON_REPORTED; rx 3035 net/mac80211/rx.c if (ieee80211_drop_unencrypted_mgmt(rx)) rx 3042 net/mac80211/rx.c ieee80211_rx_h_action(struct ieee80211_rx_data *rx) rx 3044 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 3045 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 3046 net/mac80211/rx.c struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *) rx->skb->data; rx 3047 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 3048 net/mac80211/rx.c int len = rx->skb->len; rx 3057 net/mac80211/rx.c if (!rx->sta && mgmt->u.action.category != WLAN_CATEGORY_PUBLIC && rx 3065 net/mac80211/rx.c if (!rx->sta->sta.ht_cap.ht_supported) rx 3101 net/mac80211/rx.c if (rx->sta->sta.smps_mode == smps_mode) rx 3103 net/mac80211/rx.c rx->sta->sta.smps_mode = smps_mode; rx 3108 net/mac80211/rx.c sband = rx->local->hw.wiphy->bands[status->band]; rx 3110 net/mac80211/rx.c rate_control_rate_update(local, sband, rx->sta, rx 3113 net/mac80211/rx.c rx->sta->addr, rx 3125 net/mac80211/rx.c if (!(rx->sta->sta.ht_cap.cap & rx 3132 net/mac80211/rx.c max_bw = ieee80211_sta_cap_rx_bw(rx->sta); rx 3135 net/mac80211/rx.c rx->sta->cur_max_bandwidth = max_bw; rx 3136 net/mac80211/rx.c new_bw = ieee80211_sta_cur_vht_bw(rx->sta); rx 3138 net/mac80211/rx.c if (rx->sta->sta.bandwidth == new_bw) rx 3141 net/mac80211/rx.c rx->sta->sta.bandwidth = new_bw; rx 3142 net/mac80211/rx.c sband = rx->local->hw.wiphy->bands[status->band]; rx 3144 net/mac80211/rx.c ieee80211_sta_rx_bw_to_chan_width(rx->sta); rx 3147 net/mac80211/rx.c rate_control_rate_update(local, sband, rx->sta, rx 3150 net/mac80211/rx.c rx->sta->addr, rx 3165 net/mac80211/rx.c if (!rx->sta) rx 3339 net/mac80211/rx.c if (rx->sta) rx 3340 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 3341 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 3345 net/mac80211/rx.c skb_queue_tail(&sdata->skb_queue, rx->skb); rx 3347 net/mac80211/rx.c if (rx->sta) rx 3348 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 3353 net/mac80211/rx.c ieee80211_rx_h_userspace_mgmt(struct ieee80211_rx_data *rx) rx 3355 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 3369 net/mac80211/rx.c if (ieee80211_hw_check(&rx->local->hw, SIGNAL_DBM) && rx 3373 net/mac80211/rx.c if (cfg80211_rx_mgmt(&rx->sdata->wdev, status->freq, sig, rx 3374 net/mac80211/rx.c rx->skb->data, rx->skb->len, 0)) { rx 3375 net/mac80211/rx.c if (rx->sta) rx 3376 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 3377 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 3385 net/mac80211/rx.c ieee80211_rx_h_action_return(struct ieee80211_rx_data *rx) rx 3387 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 3388 net/mac80211/rx.c struct ieee80211_mgmt *mgmt = (struct ieee80211_mgmt *) rx->skb->data; rx 3390 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 3391 net/mac80211/rx.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 3418 net/mac80211/rx.c nskb = skb_copy_expand(rx->skb, local->hw.extra_tx_headroom, 0, rx 3425 net/mac80211/rx.c memcpy(nmgmt->sa, rx->sdata->vif.addr, ETH_ALEN); rx 3429 net/mac80211/rx.c if (rx->sdata->vif.type == NL80211_IFTYPE_P2P_DEVICE) { rx 3440 net/mac80211/rx.c __ieee80211_tx_skb_tid_band(rx->sdata, nskb, 7, rx 3443 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 3448 net/mac80211/rx.c ieee80211_rx_h_mgmt(struct ieee80211_rx_data *rx) rx 3450 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 3451 net/mac80211/rx.c struct ieee80211_mgmt *mgmt = (void *)rx->skb->data; rx 3500 net/mac80211/rx.c skb_queue_tail(&sdata->skb_queue, rx->skb); rx 3501 net/mac80211/rx.c ieee80211_queue_work(&rx->local->hw, &sdata->work); rx 3502 net/mac80211/rx.c if (rx->sta) rx 3503 net/mac80211/rx.c rx->sta->rx_stats.packets++; rx 3508 net/mac80211/rx.c static void ieee80211_rx_cooked_monitor(struct ieee80211_rx_data *rx, rx 3512 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 3513 net/mac80211/rx.c struct sk_buff *skb = rx->skb, *skb2; rx 3522 net/mac80211/rx.c if (rx->flags & IEEE80211_RX_CMNTR) rx 3524 net/mac80211/rx.c rx->flags |= IEEE80211_RX_CMNTR; rx 3578 net/mac80211/rx.c static void ieee80211_rx_handlers_result(struct ieee80211_rx_data *rx, rx 3583 net/mac80211/rx.c I802_DEBUG_INC(rx->sdata->local->rx_handlers_drop); rx 3584 net/mac80211/rx.c if (rx->sta) rx 3585 net/mac80211/rx.c rx->sta->rx_stats.dropped++; rx 3592 net/mac80211/rx.c status = IEEE80211_SKB_RXCB((rx->skb)); rx 3594 net/mac80211/rx.c sband = rx->local->hw.wiphy->bands[status->band]; rx 3598 net/mac80211/rx.c ieee80211_rx_cooked_monitor(rx, rate); rx 3602 net/mac80211/rx.c I802_DEBUG_INC(rx->sdata->local->rx_handlers_drop); rx 3603 net/mac80211/rx.c if (rx->sta) rx 3604 net/mac80211/rx.c rx->sta->rx_stats.dropped++; rx 3605 net/mac80211/rx.c dev_kfree_skb(rx->skb); rx 3608 net/mac80211/rx.c I802_DEBUG_INC(rx->sdata->local->rx_handlers_queued); rx 3613 net/mac80211/rx.c static void ieee80211_rx_handlers(struct ieee80211_rx_data *rx, rx 3621 net/mac80211/rx.c res = rxh(rx); \ rx 3632 net/mac80211/rx.c spin_lock_bh(&rx->local->rx_path_lock); rx 3640 net/mac80211/rx.c rx->skb = skb; rx 3650 net/mac80211/rx.c if (ieee80211_vif_is_mesh(&rx->sdata->vif)) rx 3657 net/mac80211/rx.c res = ieee80211_rx_h_ctrl(rx, frames); rx 3668 net/mac80211/rx.c ieee80211_rx_handlers_result(rx, res); rx 3673 net/mac80211/rx.c spin_unlock_bh(&rx->local->rx_path_lock); rx 3676 net/mac80211/rx.c static void ieee80211_invoke_rx_handlers(struct ieee80211_rx_data *rx) rx 3685 net/mac80211/rx.c res = rxh(rx); \ rx 3693 net/mac80211/rx.c ieee80211_rx_reorder_ampdu(rx, &reorder_release); rx 3695 net/mac80211/rx.c ieee80211_rx_handlers(rx, &reorder_release); rx 3699 net/mac80211/rx.c ieee80211_rx_handlers_result(rx, res); rx 3711 net/mac80211/rx.c struct ieee80211_rx_data rx = { rx 3738 net/mac80211/rx.c drv_event_callback(rx.local, rx.sdata, &event); rx 3741 net/mac80211/rx.c ieee80211_rx_handlers(&rx, &frames); rx 3751 net/mac80211/rx.c struct ieee80211_rx_data rx = { rx 3765 net/mac80211/rx.c rx.sta = sta; rx 3766 net/mac80211/rx.c rx.sdata = sta->sdata; rx 3767 net/mac80211/rx.c rx.local = sta->local; rx 3816 net/mac80211/rx.c ieee80211_rx_handlers(&rx, &frames); rx 3825 net/mac80211/rx.c static bool ieee80211_accept_frame(struct ieee80211_rx_data *rx) rx 3827 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 3828 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 3838 net/mac80211/rx.c if (ieee80211_is_robust_mgmt_frame(skb) && !rx->sta) rx 3856 net/mac80211/rx.c if (!rx->sta) { rx 3876 net/mac80211/rx.c if (!rx->sta) { rx 4132 net/mac80211/rx.c static bool ieee80211_invoke_fast_rx(struct ieee80211_rx_data *rx, rx 4135 net/mac80211/rx.c struct sk_buff *skb = rx->skb; rx 4138 net/mac80211/rx.c struct sta_info *sta = rx->sta; rx 4224 net/mac80211/rx.c if (rx->key && !(status->flag & RX_FLAG_MIC_STRIPPED) && rx 4229 net/mac80211/rx.c ieee80211_sta_rx_notify(rx->sdata, hdr); rx 4259 net/mac80211/rx.c if (rx->key && !ieee80211_has_protected(hdr->frame_control)) rx 4263 net/mac80211/rx.c if (__ieee80211_rx_h_amsdu(rx, snap_offs - hdrlen) != rx 4294 net/mac80211/rx.c stats->msdu[rx->seqno_idx]++; rx 4303 net/mac80211/rx.c sta_info_get(rx->sdata, addrs.da)) { rx 4328 net/mac80211/rx.c if (rx->napi) rx 4329 net/mac80211/rx.c napi_gro_receive(rx->napi, skb); rx 4346 net/mac80211/rx.c static bool ieee80211_prepare_and_rx_handle(struct ieee80211_rx_data *rx, rx 4349 net/mac80211/rx.c struct ieee80211_local *local = rx->local; rx 4350 net/mac80211/rx.c struct ieee80211_sub_if_data *sdata = rx->sdata; rx 4352 net/mac80211/rx.c rx->skb = skb; rx 4361 net/mac80211/rx.c if (consume && rx->sta) { rx 4364 net/mac80211/rx.c fast_rx = rcu_dereference(rx->sta->fast_rx); rx 4365 net/mac80211/rx.c if (fast_rx && ieee80211_invoke_fast_rx(rx, fast_rx)) rx 4369 net/mac80211/rx.c if (!ieee80211_accept_frame(rx)) rx 4382 net/mac80211/rx.c rx->skb = skb; rx 4385 net/mac80211/rx.c ieee80211_invoke_rx_handlers(rx); rx 4402 net/mac80211/rx.c struct ieee80211_rx_data rx; rx 4408 net/mac80211/rx.c memset(&rx, 0, sizeof(rx)); rx 4409 net/mac80211/rx.c rx.skb = skb; rx 4410 net/mac80211/rx.c rx.local = local; rx 4411 net/mac80211/rx.c rx.napi = napi; rx 4432 net/mac80211/rx.c ieee80211_parse_qos(&rx); rx 4433 net/mac80211/rx.c ieee80211_verify_alignment(&rx); rx 4443 net/mac80211/rx.c rx.sta = container_of(pubsta, struct sta_info, sta); rx 4444 net/mac80211/rx.c rx.sdata = rx.sta->sdata; rx 4445 net/mac80211/rx.c if (ieee80211_prepare_and_rx_handle(&rx, skb, true)) rx 4458 net/mac80211/rx.c rx.sta = prev_sta; rx 4459 net/mac80211/rx.c rx.sdata = prev_sta->sdata; rx 4460 net/mac80211/rx.c ieee80211_prepare_and_rx_handle(&rx, skb, false); rx 4466 net/mac80211/rx.c rx.sta = prev_sta; rx 4467 net/mac80211/rx.c rx.sdata = prev_sta->sdata; rx 4469 net/mac80211/rx.c if (ieee80211_prepare_and_rx_handle(&rx, skb, true)) rx 4496 net/mac80211/rx.c rx.sta = sta_info_get_bss(prev, hdr->addr2); rx 4497 net/mac80211/rx.c rx.sdata = prev; rx 4498 net/mac80211/rx.c ieee80211_prepare_and_rx_handle(&rx, skb, false); rx 4504 net/mac80211/rx.c rx.sta = sta_info_get_bss(prev, hdr->addr2); rx 4505 net/mac80211/rx.c rx.sdata = prev; rx 4507 net/mac80211/rx.c if (ieee80211_prepare_and_rx_handle(&rx, skb, true)) rx 250 net/mac80211/tkip.c struct tkip_ctx_rx *rx_ctx = &key->u.tkip.rx[queue]; rx 1252 net/mac80211/trace.h TP_PROTO(struct ieee80211_local *local, u32 tx, u32 rx), rx 1254 net/mac80211/trace.h TP_ARGS(local, tx, rx), rx 1259 net/mac80211/trace.h __field(u32, rx) rx 1265 net/mac80211/trace.h __entry->rx = rx; rx 1270 net/mac80211/trace.h LOCAL_PR_ARG, __entry->tx, __entry->rx rx 1276 net/mac80211/trace.h u32 *rx, u32 *rx_max), rx 1278 net/mac80211/trace.h TP_ARGS(local, tx, tx_max, rx, rx_max), rx 1284 net/mac80211/trace.h __field(u32, rx) rx 1292 net/mac80211/trace.h __entry->rx = *rx; rx 1299 net/mac80211/trace.h __entry->tx, __entry->tx_max, __entry->rx, __entry->rx_max rx 243 net/mac80211/wep.c ieee80211_crypto_wep_decrypt(struct ieee80211_rx_data *rx) rx 245 net/mac80211/wep.c struct sk_buff *skb = rx->skb; rx 254 net/mac80211/wep.c if (skb_linearize(rx->skb)) rx 256 net/mac80211/wep.c if (ieee80211_wep_decrypt(rx->local, rx->skb, rx->key)) rx 259 net/mac80211/wep.c if (!pskb_may_pull(rx->skb, ieee80211_hdrlen(fc) + rx 262 net/mac80211/wep.c ieee80211_wep_remove_iv(rx->local, rx->skb, rx->key); rx 265 net/mac80211/wep.c pskb_trim(rx->skb, rx->skb->len - IEEE80211_WEP_ICV_LEN)) rx 26 net/mac80211/wep.h ieee80211_crypto_wep_decrypt(struct ieee80211_rx_data *rx); rx 94 net/mac80211/wpa.c ieee80211_rx_h_michael_mic_verify(struct ieee80211_rx_data *rx) rx 100 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 121 net/mac80211/wpa.c if (!(status->flag & RX_FLAG_IV_STRIPPED) && rx->key && rx 122 net/mac80211/wpa.c rx->key->conf.cipher == WLAN_CIPHER_SUITE_TKIP) rx 133 net/mac80211/wpa.c if (!rx->key || rx->key->conf.cipher != WLAN_CIPHER_SUITE_TKIP || rx 137 net/mac80211/wpa.c if (rx->sdata->vif.type == NL80211_IFTYPE_AP && rx->key->conf.keyidx) { rx 154 net/mac80211/wpa.c if (skb_linearize(rx->skb)) rx 160 net/mac80211/wpa.c key = &rx->key->conf.key[NL80211_TKIP_DATA_OFFSET_RX_MIC_KEY]; rx 170 net/mac80211/wpa.c rx->key->u.tkip.rx[rx->security_idx].iv32 = rx->tkip_iv32; rx 171 net/mac80211/wpa.c rx->key->u.tkip.rx[rx->security_idx].iv16 = rx->tkip_iv16; rx 176 net/mac80211/wpa.c rx->key->u.tkip.mic_failures++; rx 184 net/mac80211/wpa.c cfg80211_michael_mic_failure(rx->sdata->dev, hdr->addr2, rx 188 net/mac80211/wpa.c rx->key ? rx->key->conf.keyidx : -1, rx 264 net/mac80211/wpa.c ieee80211_crypto_tkip_decrypt(struct ieee80211_rx_data *rx) rx 266 net/mac80211/wpa.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) rx->skb->data; rx 268 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 269 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 277 net/mac80211/wpa.c if (!rx->sta || skb->len - hdrlen < 12) rx 281 net/mac80211/wpa.c if (skb_linearize(rx->skb)) rx 293 net/mac80211/wpa.c res = ieee80211_tkip_decrypt_data(&rx->local->wep_rx_ctx, rx 295 net/mac80211/wpa.c skb->len - hdrlen, rx->sta->sta.addr, rx 296 net/mac80211/wpa.c hdr->addr1, hwaccel, rx->security_idx, rx 297 net/mac80211/wpa.c &rx->tkip_iv32, rx 298 net/mac80211/wpa.c &rx->tkip_iv16); rx 494 net/mac80211/wpa.c ieee80211_crypto_ccmp_decrypt(struct ieee80211_rx_data *rx, rx 497 net/mac80211/wpa.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 499 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 500 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 513 net/mac80211/wpa.c if (!pskb_may_pull(rx->skb, hdrlen + IEEE80211_CCMP_HDR_LEN)) rx 518 net/mac80211/wpa.c if (skb_linearize(rx->skb)) rx 523 net/mac80211/wpa.c if (!rx->sta || data_len < 0) rx 531 net/mac80211/wpa.c queue = rx->security_idx; rx 723 net/mac80211/wpa.c ieee80211_crypto_gcmp_decrypt(struct ieee80211_rx_data *rx) rx 725 net/mac80211/wpa.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 727 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 728 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 740 net/mac80211/wpa.c if (!pskb_may_pull(rx->skb, hdrlen + IEEE80211_GCMP_HDR_LEN)) rx 745 net/mac80211/wpa.c if (skb_linearize(rx->skb)) rx 750 net/mac80211/wpa.c if (!rx->sta || data_len < 0) rx 758 net/mac80211/wpa.c queue = rx->security_idx; rx 839 net/mac80211/wpa.c ieee80211_crypto_cs_decrypt(struct ieee80211_rx_data *rx) rx 841 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 842 net/mac80211/wpa.c struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)rx->skb->data; rx 845 net/mac80211/wpa.c struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(rx->skb); rx 851 net/mac80211/wpa.c if (!rx->sta || !rx->sta->cipher_scheme || rx 858 net/mac80211/wpa.c cs = rx->sta->cipher_scheme; rx 860 net/mac80211/wpa.c data_len = rx->skb->len - hdrlen - cs->hdr_len; rx 870 net/mac80211/wpa.c if (skb_linearize(rx->skb)) rx 873 net/mac80211/wpa.c hdr = (struct ieee80211_hdr *)rx->skb->data; rx 876 net/mac80211/wpa.c skb_pn = rx->skb->data + hdrlen + cs->pn_off; rx 884 net/mac80211/wpa.c if (pskb_trim(rx->skb, rx->skb->len - cs->mic_len)) rx 887 net/mac80211/wpa.c memmove(rx->skb->data + cs->hdr_len, rx->skb->data, hdrlen); rx 888 net/mac80211/wpa.c skb_pull(rx->skb, cs->hdr_len); rx 1024 net/mac80211/wpa.c ieee80211_crypto_aes_cmac_decrypt(struct ieee80211_rx_data *rx) rx 1026 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 1028 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 1074 net/mac80211/wpa.c ieee80211_crypto_aes_cmac_256_decrypt(struct ieee80211_rx_data *rx) rx 1076 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 1078 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 1173 net/mac80211/wpa.c ieee80211_crypto_aes_gmac_decrypt(struct ieee80211_rx_data *rx) rx 1175 net/mac80211/wpa.c struct sk_buff *skb = rx->skb; rx 1177 net/mac80211/wpa.c struct ieee80211_key *key = rx->key; rx 1259 net/mac80211/wpa.c ieee80211_crypto_hw_decrypt(struct ieee80211_rx_data *rx) rx 1261 net/mac80211/wpa.c if (rx->sta && rx->sta->cipher_scheme) rx 1262 net/mac80211/wpa.c return ieee80211_crypto_cs_decrypt(rx); rx 16 net/mac80211/wpa.h ieee80211_rx_h_michael_mic_verify(struct ieee80211_rx_data *rx); rx 21 net/mac80211/wpa.h ieee80211_crypto_tkip_decrypt(struct ieee80211_rx_data *rx); rx 27 net/mac80211/wpa.h ieee80211_crypto_ccmp_decrypt(struct ieee80211_rx_data *rx, rx 35 net/mac80211/wpa.h ieee80211_crypto_aes_cmac_decrypt(struct ieee80211_rx_data *rx); rx 37 net/mac80211/wpa.h ieee80211_crypto_aes_cmac_256_decrypt(struct ieee80211_rx_data *rx); rx 41 net/mac80211/wpa.h ieee80211_crypto_aes_gmac_decrypt(struct ieee80211_rx_data *rx); rx 45 net/mac80211/wpa.h ieee80211_crypto_hw_decrypt(struct ieee80211_rx_data *rx); rx 50 net/mac80211/wpa.h ieee80211_crypto_gcmp_decrypt(struct ieee80211_rx_data *rx); rx 177 net/nfc/nci/spi.c struct spi_transfer tx, rx; rx 192 net/nfc/nci/spi.c memset(&rx, 0, sizeof(struct spi_transfer)); rx 193 net/nfc/nci/spi.c rx.rx_buf = resp_hdr; rx 194 net/nfc/nci/spi.c rx.len = 2; rx 195 net/nfc/nci/spi.c rx.cs_change = 1; rx 196 net/nfc/nci/spi.c rx.speed_hz = nspi->xfer_speed_hz; rx 197 net/nfc/nci/spi.c spi_message_add_tail(&rx, &m); rx 215 net/nfc/nci/spi.c memset(&rx, 0, sizeof(struct spi_transfer)); rx 216 net/nfc/nci/spi.c rx.rx_buf = skb_put(skb, rx_len); rx 217 net/nfc/nci/spi.c rx.len = rx_len; rx 218 net/nfc/nci/spi.c rx.cs_change = 0; rx 219 net/nfc/nci/spi.c rx.delay_usecs = nspi->xfer_udelay; rx 220 net/nfc/nci/spi.c rx.speed_hz = nspi->xfer_speed_hz; rx 221 net/nfc/nci/spi.c spi_message_add_tail(&rx, &m); rx 76 net/rxrpc/af_rxrpc.c static int rxrpc_validate_address(struct rxrpc_sock *rx, rx 96 net/rxrpc/af_rxrpc.c if (srx->transport.family != rx->family && rx 97 net/rxrpc/af_rxrpc.c srx->transport.family == AF_INET && rx->family != AF_INET6) rx 133 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 137 net/rxrpc/af_rxrpc.c _enter("%p,%p,%d", rx, saddr, len); rx 139 net/rxrpc/af_rxrpc.c ret = rxrpc_validate_address(rx, srx, len); rx 144 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 146 net/rxrpc/af_rxrpc.c switch (rx->sk.sk_state) { rx 148 net/rxrpc/af_rxrpc.c rx->srx = *srx; rx 149 net/rxrpc/af_rxrpc.c local = rxrpc_lookup_local(sock_net(&rx->sk), &rx->srx); rx 159 net/rxrpc/af_rxrpc.c rx->local = local; rx 160 net/rxrpc/af_rxrpc.c rcu_assign_pointer(local->service, rx); rx 163 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_SERVER_BOUND; rx 165 net/rxrpc/af_rxrpc.c rx->local = local; rx 166 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_CLIENT_BOUND; rx 175 net/rxrpc/af_rxrpc.c if (service_id == rx->srx.srx_service) rx 178 net/rxrpc/af_rxrpc.c srx->srx_service = rx->srx.srx_service; rx 179 net/rxrpc/af_rxrpc.c if (memcmp(srx, &rx->srx, sizeof(*srx)) != 0) rx 181 net/rxrpc/af_rxrpc.c rx->second_service = service_id; rx 182 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_SERVER_BOUND2; rx 190 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 200 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 212 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sk); rx 216 net/rxrpc/af_rxrpc.c _enter("%p,%d", rx, backlog); rx 218 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 220 net/rxrpc/af_rxrpc.c switch (rx->sk.sk_state) { rx 226 net/rxrpc/af_rxrpc.c ASSERT(rx->local != NULL); rx 235 net/rxrpc/af_rxrpc.c ret = rxrpc_service_prealloc(rx, GFP_KERNEL); rx 237 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_SERVER_LISTENING; rx 243 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_SERVER_LISTEN_DISABLED; rx 245 net/rxrpc/af_rxrpc.c rxrpc_discard_prealloc(rx); rx 255 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 294 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 299 net/rxrpc/af_rxrpc.c ret = rxrpc_validate_address(rx, srx, sizeof(*srx)); rx 303 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 306 net/rxrpc/af_rxrpc.c key = rx->key; rx 316 net/rxrpc/af_rxrpc.c cp.local = rx->local; rx 318 net/rxrpc/af_rxrpc.c cp.security_level = rx->min_sec_level; rx 322 net/rxrpc/af_rxrpc.c call = rxrpc_new_client_call(rx, &cp, srx, &p, gfp, debug_id); rx 412 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 414 net/rxrpc/af_rxrpc.c rx->notify_new_call = notify_new_call; rx 415 net/rxrpc/af_rxrpc.c rx->discard_new_call = discard_new_call; rx 453 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 456 net/rxrpc/af_rxrpc.c _enter("%p,%p,%d,%d", rx, addr, addr_len, flags); rx 458 net/rxrpc/af_rxrpc.c ret = rxrpc_validate_address(rx, srx, addr_len); rx 464 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 467 net/rxrpc/af_rxrpc.c if (test_bit(RXRPC_SOCK_CONNECTED, &rx->flags)) rx 470 net/rxrpc/af_rxrpc.c switch (rx->sk.sk_state) { rx 472 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_CLIENT_UNBOUND; rx 481 net/rxrpc/af_rxrpc.c rx->connect_srx = *srx; rx 482 net/rxrpc/af_rxrpc.c set_bit(RXRPC_SOCK_CONNECTED, &rx->flags); rx 486 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 502 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 505 net/rxrpc/af_rxrpc.c _enter(",{%d},,%zu", rx->sk.sk_state, len); rx 511 net/rxrpc/af_rxrpc.c ret = rxrpc_validate_address(rx, m->msg_name, m->msg_namelen); rx 518 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 520 net/rxrpc/af_rxrpc.c switch (rx->sk.sk_state) { rx 523 net/rxrpc/af_rxrpc.c rx->srx.srx_family = AF_RXRPC; rx 524 net/rxrpc/af_rxrpc.c rx->srx.srx_service = 0; rx 525 net/rxrpc/af_rxrpc.c rx->srx.transport_type = SOCK_DGRAM; rx 526 net/rxrpc/af_rxrpc.c rx->srx.transport.family = rx->family; rx 527 net/rxrpc/af_rxrpc.c switch (rx->family) { rx 529 net/rxrpc/af_rxrpc.c rx->srx.transport_len = sizeof(struct sockaddr_in); rx 533 net/rxrpc/af_rxrpc.c rx->srx.transport_len = sizeof(struct sockaddr_in6); rx 540 net/rxrpc/af_rxrpc.c local = rxrpc_lookup_local(sock_net(sock->sk), &rx->srx); rx 546 net/rxrpc/af_rxrpc.c rx->local = local; rx 547 net/rxrpc/af_rxrpc.c rx->sk.sk_state = RXRPC_CLIENT_BOUND; rx 552 net/rxrpc/af_rxrpc.c test_bit(RXRPC_SOCK_CONNECTED, &rx->flags)) { rx 553 net/rxrpc/af_rxrpc.c m->msg_name = &rx->connect_srx; rx 554 net/rxrpc/af_rxrpc.c m->msg_namelen = sizeof(rx->connect_srx); rx 559 net/rxrpc/af_rxrpc.c ret = rxrpc_do_sendmsg(rx, m, len); rx 568 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 580 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 587 net/rxrpc/af_rxrpc.c lock_sock(&rx->sk); rx 597 net/rxrpc/af_rxrpc.c if (rx->sk.sk_state != RXRPC_UNBOUND) rx 599 net/rxrpc/af_rxrpc.c rx->exclusive = true; rx 604 net/rxrpc/af_rxrpc.c if (rx->key) rx 607 net/rxrpc/af_rxrpc.c if (rx->sk.sk_state != RXRPC_UNBOUND) rx 609 net/rxrpc/af_rxrpc.c ret = rxrpc_request_key(rx, optval, optlen); rx 614 net/rxrpc/af_rxrpc.c if (rx->key) rx 617 net/rxrpc/af_rxrpc.c if (rx->sk.sk_state != RXRPC_UNBOUND) rx 619 net/rxrpc/af_rxrpc.c ret = rxrpc_server_keyring(rx, optval, optlen); rx 627 net/rxrpc/af_rxrpc.c if (rx->sk.sk_state != RXRPC_UNBOUND) rx 636 net/rxrpc/af_rxrpc.c rx->min_sec_level = min_sec_level; rx 642 net/rxrpc/af_rxrpc.c rx->service_upgrade.from != 0) rx 645 net/rxrpc/af_rxrpc.c if (rx->sk.sk_state != RXRPC_SERVER_BOUND2) rx 652 net/rxrpc/af_rxrpc.c if ((service_upgrade[0] != rx->srx.srx_service || rx 653 net/rxrpc/af_rxrpc.c service_upgrade[1] != rx->second_service) && rx 654 net/rxrpc/af_rxrpc.c (service_upgrade[0] != rx->second_service || rx 655 net/rxrpc/af_rxrpc.c service_upgrade[1] != rx->srx.srx_service)) rx 657 net/rxrpc/af_rxrpc.c rx->service_upgrade.from = service_upgrade[0]; rx 658 net/rxrpc/af_rxrpc.c rx->service_upgrade.to = service_upgrade[1]; rx 669 net/rxrpc/af_rxrpc.c release_sock(&rx->sk); rx 708 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sk); rx 716 net/rxrpc/af_rxrpc.c if (!list_empty(&rx->recvmsg_q)) rx 735 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx; rx 762 net/rxrpc/af_rxrpc.c rx = rxrpc_sk(sk); rx 763 net/rxrpc/af_rxrpc.c rx->family = protocol; rx 764 net/rxrpc/af_rxrpc.c rx->calls = RB_ROOT; rx 766 net/rxrpc/af_rxrpc.c spin_lock_init(&rx->incoming_lock); rx 767 net/rxrpc/af_rxrpc.c INIT_LIST_HEAD(&rx->sock_calls); rx 768 net/rxrpc/af_rxrpc.c INIT_LIST_HEAD(&rx->to_be_accepted); rx 769 net/rxrpc/af_rxrpc.c INIT_LIST_HEAD(&rx->recvmsg_q); rx 770 net/rxrpc/af_rxrpc.c rwlock_init(&rx->recvmsg_lock); rx 771 net/rxrpc/af_rxrpc.c rwlock_init(&rx->call_lock); rx 772 net/rxrpc/af_rxrpc.c memset(&rx->srx, 0, sizeof(rx->srx)); rx 774 net/rxrpc/af_rxrpc.c rxnet = rxrpc_net(sock_net(&rx->sk)); rx 777 net/rxrpc/af_rxrpc.c _leave(" = 0 [%p]", rx); rx 787 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sk); rx 808 net/rxrpc/af_rxrpc.c rxrpc_discard_prealloc(rx); rx 838 net/rxrpc/af_rxrpc.c struct rxrpc_sock *rx = rxrpc_sk(sk); rx 855 net/rxrpc/af_rxrpc.c rx->local->service_closed = true; rx 863 net/rxrpc/af_rxrpc.c if (rx->local && rcu_access_pointer(rx->local->service) == rx) { rx 864 net/rxrpc/af_rxrpc.c write_lock(&rx->local->services_lock); rx 865 net/rxrpc/af_rxrpc.c rcu_assign_pointer(rx->local->service, NULL); rx 866 net/rxrpc/af_rxrpc.c write_unlock(&rx->local->services_lock); rx 870 net/rxrpc/af_rxrpc.c rxrpc_discard_prealloc(rx); rx 871 net/rxrpc/af_rxrpc.c rxrpc_release_calls_on_socket(rx); rx 875 net/rxrpc/af_rxrpc.c rxrpc_unuse_local(rx->local); rx 876 net/rxrpc/af_rxrpc.c rxrpc_put_local(rx->local); rx 877 net/rxrpc/af_rxrpc.c rx->local = NULL; rx 878 net/rxrpc/af_rxrpc.c key_put(rx->key); rx 879 net/rxrpc/af_rxrpc.c rx->key = NULL; rx 880 net/rxrpc/af_rxrpc.c key_put(rx->securities); rx 881 net/rxrpc/af_rxrpc.c rx->securities = NULL; rx 29 net/rxrpc/call_accept.c static int rxrpc_service_prealloc_one(struct rxrpc_sock *rx, rx 38 net/rxrpc/call_accept.c struct rxrpc_net *rxnet = rxrpc_net(sock_net(&rx->sk)); rx 43 net/rxrpc/call_accept.c max = rx->sk.sk_max_ack_backlog; rx 44 net/rxrpc/call_accept.c tmp = rx->sk.sk_ack_backlog; rx 67 net/rxrpc/call_accept.c struct rxrpc_peer *peer = rxrpc_alloc_peer(rx->local, gfp); rx 94 net/rxrpc/call_accept.c call = rxrpc_alloc_call(rx, gfp, debug_id); rx 104 net/rxrpc/call_accept.c write_lock(&rx->call_lock); rx 110 net/rxrpc/call_accept.c pp = &rx->calls.rb_node; rx 129 net/rxrpc/call_accept.c rb_insert_color(&call->sock_node, &rx->calls); rx 133 net/rxrpc/call_accept.c list_add(&call->sock_link, &rx->sock_calls); rx 135 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 148 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 161 net/rxrpc/call_accept.c int rxrpc_service_prealloc(struct rxrpc_sock *rx, gfp_t gfp) rx 163 net/rxrpc/call_accept.c struct rxrpc_backlog *b = rx->backlog; rx 169 net/rxrpc/call_accept.c rx->backlog = b; rx 172 net/rxrpc/call_accept.c if (rx->discard_new_call) rx 175 net/rxrpc/call_accept.c while (rxrpc_service_prealloc_one(rx, b, NULL, NULL, 0, gfp, rx 185 net/rxrpc/call_accept.c void rxrpc_discard_prealloc(struct rxrpc_sock *rx) rx 187 net/rxrpc/call_accept.c struct rxrpc_backlog *b = rx->backlog; rx 188 net/rxrpc/call_accept.c struct rxrpc_net *rxnet = rxrpc_net(sock_net(&rx->sk)); rx 193 net/rxrpc/call_accept.c rx->backlog = NULL; rx 198 net/rxrpc/call_accept.c spin_lock_bh(&rx->incoming_lock); rx 199 net/rxrpc/call_accept.c spin_unlock_bh(&rx->incoming_lock); rx 227 net/rxrpc/call_accept.c rcu_assign_pointer(call->socket, rx); rx 228 net/rxrpc/call_accept.c if (rx->discard_new_call) { rx 230 net/rxrpc/call_accept.c rx->discard_new_call(call, call->user_call_ID); rx 234 net/rxrpc/call_accept.c rxrpc_release_call(rx, call); rx 262 net/rxrpc/call_accept.c static struct rxrpc_call *rxrpc_alloc_incoming_call(struct rxrpc_sock *rx, rx 270 net/rxrpc/call_accept.c struct rxrpc_backlog *b = rx->backlog; rx 304 net/rxrpc/call_accept.c rxrpc_new_incoming_peer(rx, local, peer); rx 315 net/rxrpc/call_accept.c rxrpc_new_incoming_connection(rx, conn, sec, key, skb); rx 350 net/rxrpc/call_accept.c struct rxrpc_sock *rx, rx 362 net/rxrpc/call_accept.c spin_lock(&rx->incoming_lock); rx 363 net/rxrpc/call_accept.c if (rx->sk.sk_state == RXRPC_SERVER_LISTEN_DISABLED || rx 364 net/rxrpc/call_accept.c rx->sk.sk_state == RXRPC_CLOSE) { rx 379 net/rxrpc/call_accept.c if (!conn && !rxrpc_look_up_server_security(local, rx, &sec, &key, skb)) rx 382 net/rxrpc/call_accept.c call = rxrpc_alloc_incoming_call(rx, local, peer, conn, sec, key, skb); rx 393 net/rxrpc/call_accept.c rxrpc_incoming_call(rx, call, skb); rx 396 net/rxrpc/call_accept.c if (rx->notify_new_call) rx 397 net/rxrpc/call_accept.c rx->notify_new_call(&rx->sk, call, call->user_call_ID); rx 399 net/rxrpc/call_accept.c sk_acceptq_added(&rx->sk); rx 412 net/rxrpc/call_accept.c if (rx->discard_new_call) rx 432 net/rxrpc/call_accept.c spin_unlock(&rx->incoming_lock); rx 450 net/rxrpc/call_accept.c spin_unlock(&rx->incoming_lock); rx 460 net/rxrpc/call_accept.c struct rxrpc_call *rxrpc_accept_call(struct rxrpc_sock *rx, rx 463 net/rxrpc/call_accept.c __releases(&rx->sk.sk_lock.slock) rx 474 net/rxrpc/call_accept.c write_lock(&rx->call_lock); rx 476 net/rxrpc/call_accept.c if (list_empty(&rx->to_be_accepted)) { rx 477 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 478 net/rxrpc/call_accept.c release_sock(&rx->sk); rx 484 net/rxrpc/call_accept.c pp = &rx->calls.rb_node; rx 501 net/rxrpc/call_accept.c call = list_entry(rx->to_be_accepted.next, rx 503 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 511 net/rxrpc/call_accept.c release_sock(&rx->sk); rx 516 net/rxrpc/call_accept.c write_lock(&rx->call_lock); rx 518 net/rxrpc/call_accept.c sk_acceptq_removed(&rx->sk); rx 522 net/rxrpc/call_accept.c pp = &rx->calls.rb_node; rx 553 net/rxrpc/call_accept.c rb_insert_color(&call->sock_node, &rx->calls); rx 558 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 560 net/rxrpc/call_accept.c rxrpc_service_prealloc(rx, GFP_KERNEL); rx 561 net/rxrpc/call_accept.c release_sock(&rx->sk); rx 568 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 569 net/rxrpc/call_accept.c rxrpc_release_call(rx, call); rx 575 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 577 net/rxrpc/call_accept.c rxrpc_service_prealloc(rx, GFP_KERNEL); rx 578 net/rxrpc/call_accept.c release_sock(&rx->sk); rx 587 net/rxrpc/call_accept.c int rxrpc_reject_call(struct rxrpc_sock *rx) rx 597 net/rxrpc/call_accept.c write_lock(&rx->call_lock); rx 599 net/rxrpc/call_accept.c if (list_empty(&rx->to_be_accepted)) { rx 600 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 607 net/rxrpc/call_accept.c call = list_entry(rx->to_be_accepted.next, rx 610 net/rxrpc/call_accept.c sk_acceptq_removed(&rx->sk); rx 628 net/rxrpc/call_accept.c write_unlock(&rx->call_lock); rx 631 net/rxrpc/call_accept.c rxrpc_release_call(rx, call); rx 634 net/rxrpc/call_accept.c rxrpc_service_prealloc(rx, GFP_KERNEL); rx 660 net/rxrpc/call_accept.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 661 net/rxrpc/call_accept.c struct rxrpc_backlog *b = rx->backlog; rx 666 net/rxrpc/call_accept.c return rxrpc_service_prealloc_one(rx, b, notify_rx, rx 62 net/rxrpc/call_object.c struct rxrpc_call *rxrpc_find_call_by_user_ID(struct rxrpc_sock *rx, rx 68 net/rxrpc/call_object.c _enter("%p,%lx", rx, user_call_ID); rx 70 net/rxrpc/call_object.c read_lock(&rx->call_lock); rx 72 net/rxrpc/call_object.c p = rx->calls.rb_node; rx 84 net/rxrpc/call_object.c read_unlock(&rx->call_lock); rx 90 net/rxrpc/call_object.c read_unlock(&rx->call_lock); rx 98 net/rxrpc/call_object.c struct rxrpc_call *rxrpc_alloc_call(struct rxrpc_sock *rx, gfp_t gfp, rx 102 net/rxrpc/call_object.c struct rxrpc_net *rxnet = rxrpc_net(sock_net(&rx->sk)); rx 123 net/rxrpc/call_object.c if (rx->sk.sk_kern_sock) rx 169 net/rxrpc/call_object.c static struct rxrpc_call *rxrpc_alloc_client_call(struct rxrpc_sock *rx, rx 179 net/rxrpc/call_object.c call = rxrpc_alloc_call(rx, gfp, debug_id); rx 216 net/rxrpc/call_object.c struct rxrpc_call *rxrpc_new_client_call(struct rxrpc_sock *rx, rx 222 net/rxrpc/call_object.c __releases(&rx->sk.sk_lock.slock) rx 231 net/rxrpc/call_object.c _enter("%p,%lx", rx, p->user_call_ID); rx 233 net/rxrpc/call_object.c call = rxrpc_alloc_client_call(rx, srx, gfp, debug_id); rx 235 net/rxrpc/call_object.c release_sock(&rx->sk); rx 252 net/rxrpc/call_object.c write_lock(&rx->call_lock); rx 254 net/rxrpc/call_object.c pp = &rx->calls.rb_node; rx 268 net/rxrpc/call_object.c rcu_assign_pointer(call->socket, rx); rx 273 net/rxrpc/call_object.c rb_insert_color(&call->sock_node, &rx->calls); rx 274 net/rxrpc/call_object.c list_add(&call->sock_link, &rx->sock_calls); rx 276 net/rxrpc/call_object.c write_unlock(&rx->call_lock); rx 284 net/rxrpc/call_object.c release_sock(&rx->sk); rx 289 net/rxrpc/call_object.c ret = rxrpc_connect_call(rx, call, cp, srx, gfp); rx 309 net/rxrpc/call_object.c write_unlock(&rx->call_lock); rx 310 net/rxrpc/call_object.c release_sock(&rx->sk); rx 318 net/rxrpc/call_object.c rxrpc_release_call(rx, call); rx 329 net/rxrpc/call_object.c void rxrpc_incoming_call(struct rxrpc_sock *rx, rx 339 net/rxrpc/call_object.c rcu_assign_pointer(call->socket, rx); rx 443 net/rxrpc/call_object.c void rxrpc_release_call(struct rxrpc_sock *rx, struct rxrpc_call *call) rx 465 net/rxrpc/call_object.c write_lock_bh(&rx->recvmsg_lock); rx 478 net/rxrpc/call_object.c write_unlock_bh(&rx->recvmsg_lock); rx 482 net/rxrpc/call_object.c write_lock(&rx->call_lock); rx 485 net/rxrpc/call_object.c rb_erase(&call->sock_node, &rx->calls); rx 491 net/rxrpc/call_object.c write_unlock(&rx->call_lock); rx 507 net/rxrpc/call_object.c void rxrpc_release_calls_on_socket(struct rxrpc_sock *rx) rx 511 net/rxrpc/call_object.c _enter("%p", rx); rx 513 net/rxrpc/call_object.c while (!list_empty(&rx->to_be_accepted)) { rx 514 net/rxrpc/call_object.c call = list_entry(rx->to_be_accepted.next, rx 521 net/rxrpc/call_object.c while (!list_empty(&rx->sock_calls)) { rx 522 net/rxrpc/call_object.c call = list_entry(rx->sock_calls.next, rx 527 net/rxrpc/call_object.c rxrpc_release_call(rx, call); rx 275 net/rxrpc/conn_client.c static int rxrpc_get_client_conn(struct rxrpc_sock *rx, rx 289 net/rxrpc/conn_client.c cp->peer = rxrpc_lookup_peer(rx, cp->local, srx, gfp); rx 694 net/rxrpc/conn_client.c int rxrpc_connect_call(struct rxrpc_sock *rx, rx 708 net/rxrpc/conn_client.c ret = rxrpc_get_client_conn(rx, call, cp, srx, gfp); rx 149 net/rxrpc/conn_service.c void rxrpc_new_incoming_connection(struct rxrpc_sock *rx, rx 177 net/rxrpc/conn_service.c conn->service_id == rx->service_upgrade.from) rx 178 net/rxrpc/conn_service.c conn->service_id = rx->service_upgrade.to; rx 1083 net/rxrpc/input.c static void rxrpc_input_implicit_end_call(struct rxrpc_sock *rx, rx 1102 net/rxrpc/input.c spin_lock(&rx->incoming_lock); rx 1104 net/rxrpc/input.c spin_unlock(&rx->incoming_lock); rx 1202 net/rxrpc/input.c struct rxrpc_sock *rx = NULL; rx 1313 net/rxrpc/input.c rx = rcu_dereference(local->service); rx 1314 net/rxrpc/input.c if (!rx || (sp->hdr.serviceId != rx->srx.srx_service && rx 1315 net/rxrpc/input.c sp->hdr.serviceId != rx->second_service)) { rx 1389 net/rxrpc/input.c rxrpc_input_implicit_end_call(rx, conn, call); rx 1409 net/rxrpc/input.c call = rxrpc_new_incoming_call(local, rx, skb); rx 899 net/rxrpc/key.c int rxrpc_request_key(struct rxrpc_sock *rx, char __user *optval, int optlen) rx 913 net/rxrpc/key.c key = request_key_net(&key_type_rxrpc, description, sock_net(&rx->sk), NULL); rx 920 net/rxrpc/key.c rx->key = key; rx 929 net/rxrpc/key.c int rxrpc_server_keyring(struct rxrpc_sock *rx, char __user *optval, rx 944 net/rxrpc/key.c key = request_key_net(&key_type_keyring, description, sock_net(&rx->sk), NULL); rx 951 net/rxrpc/key.c rx->securities = key; rx 152 net/rxrpc/peer_object.c static void rxrpc_assess_MTU_size(struct rxrpc_sock *rx, rx 155 net/rxrpc/peer_object.c struct net *net = sock_net(&rx->sk); rx 244 net/rxrpc/peer_object.c static void rxrpc_init_peer(struct rxrpc_sock *rx, struct rxrpc_peer *peer, rx 248 net/rxrpc/peer_object.c rxrpc_assess_MTU_size(rx, peer); rx 280 net/rxrpc/peer_object.c static struct rxrpc_peer *rxrpc_create_peer(struct rxrpc_sock *rx, rx 293 net/rxrpc/peer_object.c rxrpc_init_peer(rx, peer, hash_key); rx 305 net/rxrpc/peer_object.c void rxrpc_new_incoming_peer(struct rxrpc_sock *rx, struct rxrpc_local *local, rx 312 net/rxrpc/peer_object.c rxrpc_init_peer(rx, peer, hash_key); rx 323 net/rxrpc/peer_object.c struct rxrpc_peer *rxrpc_lookup_peer(struct rxrpc_sock *rx, rx 344 net/rxrpc/peer_object.c candidate = rxrpc_create_peer(rx, local, srx, hash_key, gfp); rx 58 net/rxrpc/proc.c struct rxrpc_sock *rx; rx 77 net/rxrpc/proc.c rx = rcu_dereference(call->socket); rx 78 net/rxrpc/proc.c if (rx) { rx 79 net/rxrpc/proc.c local = READ_ONCE(rx->local); rx 25 net/rxrpc/recvmsg.c struct rxrpc_sock *rx; rx 35 net/rxrpc/recvmsg.c rx = rcu_dereference(call->socket); rx 36 net/rxrpc/recvmsg.c sk = &rx->sk; rx 37 net/rxrpc/recvmsg.c if (rx && sk->sk_state < RXRPC_CLOSE) { rx 43 net/rxrpc/recvmsg.c write_lock_bh(&rx->recvmsg_lock); rx 46 net/rxrpc/recvmsg.c list_add_tail(&call->recvmsg_link, &rx->recvmsg_q); rx 48 net/rxrpc/recvmsg.c write_unlock_bh(&rx->recvmsg_lock); rx 109 net/rxrpc/recvmsg.c static int rxrpc_recvmsg_new_call(struct rxrpc_sock *rx, rx 119 net/rxrpc/recvmsg.c write_lock_bh(&rx->recvmsg_lock); rx 121 net/rxrpc/recvmsg.c write_unlock_bh(&rx->recvmsg_lock); rx 124 net/rxrpc/recvmsg.c write_lock(&rx->call_lock); rx 125 net/rxrpc/recvmsg.c list_add_tail(&call->accept_link, &rx->to_be_accepted); rx 126 net/rxrpc/recvmsg.c write_unlock(&rx->call_lock); rx 444 net/rxrpc/recvmsg.c struct rxrpc_sock *rx = rxrpc_sk(sock->sk); rx 457 net/rxrpc/recvmsg.c timeo = sock_rcvtimeo(&rx->sk, flags & MSG_DONTWAIT); rx 460 net/rxrpc/recvmsg.c lock_sock(&rx->sk); rx 463 net/rxrpc/recvmsg.c if (RB_EMPTY_ROOT(&rx->calls) && rx 464 net/rxrpc/recvmsg.c list_empty(&rx->recvmsg_q) && rx 465 net/rxrpc/recvmsg.c rx->sk.sk_state != RXRPC_SERVER_LISTENING) { rx 466 net/rxrpc/recvmsg.c release_sock(&rx->sk); rx 470 net/rxrpc/recvmsg.c if (list_empty(&rx->recvmsg_q)) { rx 477 net/rxrpc/recvmsg.c release_sock(&rx->sk); rx 480 net/rxrpc/recvmsg.c prepare_to_wait_exclusive(sk_sleep(&rx->sk), &wait, rx 482 net/rxrpc/recvmsg.c ret = sock_error(&rx->sk); rx 486 net/rxrpc/recvmsg.c if (list_empty(&rx->recvmsg_q)) { rx 493 net/rxrpc/recvmsg.c finish_wait(sk_sleep(&rx->sk), &wait); rx 500 net/rxrpc/recvmsg.c write_lock_bh(&rx->recvmsg_lock); rx 501 net/rxrpc/recvmsg.c l = rx->recvmsg_q.next; rx 507 net/rxrpc/recvmsg.c write_unlock_bh(&rx->recvmsg_lock); rx 523 net/rxrpc/recvmsg.c release_sock(&rx->sk); rx 555 net/rxrpc/recvmsg.c ret = rxrpc_recvmsg_new_call(rx, call, msg, flags); rx 582 net/rxrpc/recvmsg.c rxrpc_release_call(rx, call); rx 601 net/rxrpc/recvmsg.c write_lock_bh(&rx->recvmsg_lock); rx 602 net/rxrpc/recvmsg.c list_add(&call->recvmsg_link, &rx->recvmsg_q); rx 603 net/rxrpc/recvmsg.c write_unlock_bh(&rx->recvmsg_lock); rx 609 net/rxrpc/recvmsg.c release_sock(&rx->sk); rx 617 net/rxrpc/recvmsg.c finish_wait(sk_sleep(&rx->sk), &wait); rx 106 net/rxrpc/security.c bool rxrpc_look_up_server_security(struct rxrpc_local *local, struct rxrpc_sock *rx, rx 133 net/rxrpc/security.c if (!rx->securities) { rx 143 net/rxrpc/security.c kref = keyring_search(make_key_ref(rx->securities, 1UL), rx 38 net/rxrpc/sendmsg.c static int rxrpc_wait_for_tx_window_intr(struct rxrpc_sock *rx, rx 65 net/rxrpc/sendmsg.c static int rxrpc_wait_for_tx_window_waitall(struct rxrpc_sock *rx, rx 106 net/rxrpc/sendmsg.c static int rxrpc_wait_for_tx_window_nonintr(struct rxrpc_sock *rx, rx 127 net/rxrpc/sendmsg.c static int rxrpc_wait_for_tx_window(struct rxrpc_sock *rx, rx 143 net/rxrpc/sendmsg.c ret = rxrpc_wait_for_tx_window_waitall(rx, call); rx 145 net/rxrpc/sendmsg.c ret = rxrpc_wait_for_tx_window_intr(rx, call, timeo); rx 150 net/rxrpc/sendmsg.c ret = rxrpc_wait_for_tx_window_nonintr(rx, call, timeo); rx 182 net/rxrpc/sendmsg.c static void rxrpc_notify_end_tx(struct rxrpc_sock *rx, struct rxrpc_call *call, rx 186 net/rxrpc/sendmsg.c notify_end_tx(&rx->sk, call, call->user_call_ID); rx 194 net/rxrpc/sendmsg.c static int rxrpc_queue_packet(struct rxrpc_sock *rx, struct rxrpc_call *call, rx 233 net/rxrpc/sendmsg.c rxrpc_notify_end_tx(rx, call, notify_end_tx); rx 247 net/rxrpc/sendmsg.c rxrpc_notify_end_tx(rx, call, notify_end_tx); rx 292 net/rxrpc/sendmsg.c static int rxrpc_send_data(struct rxrpc_sock *rx, rx 299 net/rxrpc/sendmsg.c struct sock *sk = &rx->sk; rx 340 net/rxrpc/sendmsg.c ret = rxrpc_wait_for_tx_window(rx, call, rx 456 net/rxrpc/sendmsg.c ret = rxrpc_queue_packet(rx, call, skb, rx 595 net/rxrpc/sendmsg.c rxrpc_new_client_call_for_sendmsg(struct rxrpc_sock *rx, struct msghdr *msg, rx 597 net/rxrpc/sendmsg.c __releases(&rx->sk.sk_lock.slock) rx 609 net/rxrpc/sendmsg.c release_sock(&rx->sk); rx 613 net/rxrpc/sendmsg.c key = rx->key; rx 614 net/rxrpc/sendmsg.c if (key && !rx->key->payload.data[0]) rx 618 net/rxrpc/sendmsg.c cp.local = rx->local; rx 619 net/rxrpc/sendmsg.c cp.key = rx->key; rx 620 net/rxrpc/sendmsg.c cp.security_level = rx->min_sec_level; rx 621 net/rxrpc/sendmsg.c cp.exclusive = rx->exclusive | p->exclusive; rx 624 net/rxrpc/sendmsg.c call = rxrpc_new_client_call(rx, &cp, srx, &p->call, GFP_KERNEL, rx 638 net/rxrpc/sendmsg.c int rxrpc_do_sendmsg(struct rxrpc_sock *rx, struct msghdr *msg, size_t len) rx 639 net/rxrpc/sendmsg.c __releases(&rx->sk.sk_lock.slock) rx 666 net/rxrpc/sendmsg.c if (rx->sk.sk_state != RXRPC_SERVER_LISTENING) rx 668 net/rxrpc/sendmsg.c call = rxrpc_accept_call(rx, p.call.user_call_ID, NULL); rx 676 net/rxrpc/sendmsg.c call = rxrpc_find_call_by_user_ID(rx, p.call.user_call_ID); rx 681 net/rxrpc/sendmsg.c call = rxrpc_new_client_call_for_sendmsg(rx, msg, &p); rx 701 net/rxrpc/sendmsg.c release_sock(&rx->sk); rx 765 net/rxrpc/sendmsg.c ret = rxrpc_send_data(rx, call, msg, len, NULL); rx 776 net/rxrpc/sendmsg.c release_sock(&rx->sk); rx 692 net/tls/tls_device.c memcpy(rcd_sn, tls_ctx->rx.rec_seq, prot->rec_seq_size); rx 762 net/tls/tls_device.c memcpy(rcd_sn, tls_ctx->rx.rec_seq, prot->rec_seq_size); rx 1465 net/tls/tls_sw.c memcpy(iv + iv_offset, tls_ctx->rx.iv, rx 1468 net/tls/tls_sw.c memcpy(iv + iv_offset, tls_ctx->rx.iv, prot->salt_size); rx 1470 net/tls/tls_sw.c xor_iv_with_seq(prot->version, iv, tls_ctx->rx.rec_seq); rx 1475 net/tls/tls_sw.c tls_ctx->rx.rec_seq, prot->rec_seq_size, rx 1551 net/tls/tls_sw.c &tls_ctx->rx); rx 1566 net/tls/tls_sw.c tls_advance_record_sn(sk, prot, &tls_ctx->rx); rx 2167 net/tls/tls_sw.c kfree(tls_ctx->rx.rec_seq); rx 2168 net/tls/tls_sw.c kfree(tls_ctx->rx.iv); rx 2321 net/tls/tls_sw.c cctx = &ctx->rx; rx 491 net/wireless/mlme.c if (!(wdev->wiphy->mgmt_stypes[wdev->iftype].rx & BIT(mgmt_type))) rx 718 net/wireless/mlme.c if (!(stypes->rx & BIT(stype))) { rx 1701 net/wireless/nl80211.c stypes = mgmt_stypes[ift].rx; rx 1590 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, int ret, u32 tx, u32 rx), rx 1591 net/wireless/trace.h TP_ARGS(wiphy, ret, tx, rx), rx 1596 net/wireless/trace.h __field(u32, rx) rx 1602 net/wireless/trace.h __entry->rx = rx; rx 1605 net/wireless/trace.h WIPHY_PR_ARG, __entry->ret, __entry->tx, __entry->rx) rx 1610 net/wireless/trace.h u32 rx, u32 rx_max), rx 1611 net/wireless/trace.h TP_ARGS(wiphy, tx, tx_max, rx, rx_max), rx 1616 net/wireless/trace.h __field(u32, rx) rx 1623 net/wireless/trace.h __entry->rx = rx; rx 1627 net/wireless/trace.h WIPHY_PR_ARG, __entry->tx, __entry->tx_max, __entry->rx, rx 1632 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, u32 tx, u32 rx), rx 1633 net/wireless/trace.h TP_ARGS(wiphy, rx, tx), rx 1637 net/wireless/trace.h __field(u32, rx) rx 1642 net/wireless/trace.h __entry->rx = rx; rx 1645 net/wireless/trace.h WIPHY_PR_ARG, __entry->tx, __entry->rx) rx 1649 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, u32 tx, u32 rx), rx 1650 net/wireless/trace.h TP_ARGS(wiphy, rx, tx) rx 36 net/xdp/xsk.c return READ_ONCE(xs->rx) && READ_ONCE(xs->umem) && rx 169 net/xdp/xsk.c err = xskq_produce_batch_desc(xs->rx, addr, len); rx 182 net/xdp/xsk.c int err = xskq_produce_batch_desc(xs->rx, (u64)xdp->handle, len); rx 218 net/xdp/xsk.c xskq_produce_flush_desc(xs->rx); rx 249 net/xdp/xsk.c err = xskq_produce_batch_desc(xs->rx, addr, len); rx 254 net/xdp/xsk.c xskq_produce_flush_desc(xs->rx); rx 452 net/xdp/xsk.c if (xs->rx && !xskq_empty_desc(xs->rx)) rx 563 net/xdp/xsk.c xskq_destroy(xs->rx); rx 642 net/xdp/xsk.c if (!xs->rx && !xs->tx) { rx 707 net/xdp/xsk.c xskq_set_umem(xs->rx, xs->umem->size, xs->umem->chunk_mask); rx 761 net/xdp/xsk.c q = (optname == XDP_TX_RING) ? &xs->tx : &xs->rx; rx 872 net/xdp/xsk.c stats.rx_invalid_descs = xskq_nb_invalid_descs(xs->rx); rx 900 net/xdp/xsk.c &off.rx); rx 907 net/xdp/xsk.c off.rx.flags = offsetof(struct xdp_rxtx_ring, rx 919 net/xdp/xsk.c xsk_enter_rxtx_offsets(&off_v1.rx); rx 977 net/xdp/xsk.c q = READ_ONCE(xs->rx); rx 14 net/xdp/xsk.h struct xdp_ring_offset_v1 rx; rx 40 net/xdp/xsk_diag.c if (xs->rx) rx 41 net/xdp/xsk_diag.c err = xsk_diag_put_ring(xs->rx, XDP_DIAG_RX_RING, nlskb); rx 1188 net/xfrm/xfrm_state.c struct xfrm_state *rx = NULL, *x = NULL; rx 1202 net/xfrm/xfrm_state.c rx = x; rx 1207 net/xfrm/xfrm_state.c if (rx) rx 1208 net/xfrm/xfrm_state.c xfrm_state_hold(rx); rx 1212 net/xfrm/xfrm_state.c return rx; rx 88 samples/bpf/xdpsock_user.c struct xsk_ring_cons rx; rx 329 samples/bpf/xdpsock_user.c &xsk->rx, &xsk->tx, &cfg); rx 554 samples/bpf/xdpsock_user.c rcvd = xsk_ring_cons__peek(&xsk->rx, BATCH_SIZE, &idx_rx); rx 571 samples/bpf/xdpsock_user.c u64 addr = xsk_ring_cons__rx_desc(&xsk->rx, idx_rx)->addr; rx 572 samples/bpf/xdpsock_user.c u32 len = xsk_ring_cons__rx_desc(&xsk->rx, idx_rx++)->len; rx 583 samples/bpf/xdpsock_user.c xsk_ring_cons__release(&xsk->rx, rcvd); rx 669 samples/bpf/xdpsock_user.c rcvd = xsk_ring_cons__peek(&xsk->rx, BATCH_SIZE, &idx_rx); rx 686 samples/bpf/xdpsock_user.c u64 addr = xsk_ring_cons__rx_desc(&xsk->rx, idx_rx)->addr; rx 687 samples/bpf/xdpsock_user.c u32 len = xsk_ring_cons__rx_desc(&xsk->rx, idx_rx++)->len; rx 701 samples/bpf/xdpsock_user.c xsk_ring_cons__release(&xsk->rx, rcvd); rx 85 sound/firewire/amdtp-am824.c s->ctx_data.rx.fdf = AMDTP_FDF_AM824 | s->sfc; rx 108 sound/firewire/amdtp-stream.c s->ctx_data.rx.syt_override = -1; rx 273 sound/firewire/amdtp-stream.c s->ctx_data.rx.transfer_delay = rx 279 sound/firewire/amdtp-stream.c s->ctx_data.rx.transfer_delay += rx 340 sound/firewire/amdtp-stream.c data_blocks = s->ctx_data.rx.data_block_state; rx 342 sound/firewire/amdtp-stream.c phase = s->ctx_data.rx.data_block_state; rx 361 sound/firewire/amdtp-stream.c s->ctx_data.rx.data_block_state = phase; rx 373 sound/firewire/amdtp-stream.c if (s->ctx_data.rx.last_syt_offset < TICKS_PER_CYCLE) { rx 375 sound/firewire/amdtp-stream.c syt_offset = s->ctx_data.rx.last_syt_offset + rx 376 sound/firewire/amdtp-stream.c s->ctx_data.rx.syt_offset_state; rx 388 sound/firewire/amdtp-stream.c phase = s->ctx_data.rx.syt_offset_state; rx 390 sound/firewire/amdtp-stream.c syt_offset = s->ctx_data.rx.last_syt_offset; rx 395 sound/firewire/amdtp-stream.c s->ctx_data.rx.syt_offset_state = phase; rx 398 sound/firewire/amdtp-stream.c syt_offset = s->ctx_data.rx.last_syt_offset - TICKS_PER_CYCLE; rx 399 sound/firewire/amdtp-stream.c s->ctx_data.rx.last_syt_offset = syt_offset; rx 402 sound/firewire/amdtp-stream.c syt_offset += s->ctx_data.rx.transfer_delay; rx 487 sound/firewire/amdtp-stream.c ((s->ctx_data.rx.fdf << CIP_FDF_SHIFT) & CIP_FDF_MASK) | rx 800 sound/firewire/amdtp-stream.c if (s->ctx_data.rx.syt_override < 0) rx 803 sound/firewire/amdtp-stream.c syt = s->ctx_data.rx.syt_override; rx 930 sound/firewire/amdtp-stream.c s->ctx_data.rx.data_block_state = entry->data_block; rx 931 sound/firewire/amdtp-stream.c s->ctx_data.rx.syt_offset_state = entry->syt_offset; rx 932 sound/firewire/amdtp-stream.c s->ctx_data.rx.last_syt_offset = TICKS_PER_CYCLE; rx 145 sound/firewire/amdtp-stream.h } rx; rx 107 sound/firewire/dice/dice-proc.c } rx; rx 198 sound/firewire/dice/dice-proc.c quadlets = min_t(u32, tx_rx_header.size, sizeof(buf.rx) / 4); rx 200 sound/firewire/dice/dice-proc.c if (dice_proc_read_mem(dice, &buf.rx, sections[4] + 2 + rx 205 sound/firewire/dice/dice-proc.c snd_iprintf(buffer, " iso channel: %d\n", (int)buf.rx.iso); rx 206 sound/firewire/dice/dice-proc.c snd_iprintf(buffer, " sequence start: %u\n", buf.rx.seq_start); rx 208 sound/firewire/dice/dice-proc.c buf.rx.number_audio); rx 209 sound/firewire/dice/dice-proc.c snd_iprintf(buffer, " midi ports: %u\n", buf.rx.number_midi); rx 211 sound/firewire/dice/dice-proc.c dice_proc_fixup_string(buf.rx.names, RX_NAMES_SIZE); rx 212 sound/firewire/dice/dice-proc.c snd_iprintf(buffer, " names: %s\n", buf.rx.names); rx 216 sound/firewire/dice/dice-proc.c buf.rx.ac3_caps); rx 218 sound/firewire/dice/dice-proc.c buf.rx.ac3_enable); rx 130 sound/firewire/digi00x/amdtp-dot.c s->ctx_data.rx.fdf = AMDTP_FDF_AM824 | s->sfc; rx 483 sound/firewire/motu/amdtp-motu.c s->ctx_data.rx.fdf = MOTU_FDF_AM824; rx 485 sound/firewire/motu/amdtp-motu.c s->ctx_data.rx.syt_override = 0xffff; rx 251 sound/firewire/tascam/amdtp-tascam.c s->ctx_data.rx.fdf = 0x00; rx 253 sound/firewire/tascam/amdtp-tascam.c s->ctx_data.rx.syt_override = 0x0000; rx 154 sound/soc/bcm/bcm2835-i2s.c bool tx, bool rx) rx 165 sound/soc/bcm/bcm2835-i2s.c off |= rx ? BCM2835_I2S_RXON : 0; rx 168 sound/soc/bcm/bcm2835-i2s.c clr |= rx ? BCM2835_I2S_RXCLR : 0; rx 443 sound/soc/codecs/wcd9335.h #define WCD9335_CDC_RX_PATH_CTL(rx) WCD9335_REG(0x0b, (0x041 + rx * 0x14)) rx 450 sound/soc/codecs/wcd9335.h #define WCD9335_CDC_RX_PATH_MIX_CTL(rx) WCD9335_REG(0x0b, (0x46 + rx * 0x14)) rx 680 sound/soc/fsl/fsl_esai.c bool tx = true, rx = false, enabled[2]; rx 689 sound/soc/fsl/fsl_esai.c enabled[rx] = rfcr & ESAI_xFCR_xFEN; rx 693 sound/soc/fsl/fsl_esai.c fsl_esai_trigger_stop(esai_priv, rx); rx 720 sound/soc/fsl/fsl_esai.c if (enabled[rx]) rx 721 sound/soc/fsl/fsl_esai.c fsl_esai_trigger_start(esai_priv, rx); rx 243 sound/soc/intel/baytrail/sst-baytrail-ipc.c msg->rx.header = header; rx 245 sound/soc/intel/baytrail/sst-baytrail-ipc.c msg->rx.size = sst_byt_header_data(header); rx 246 sound/soc/intel/baytrail/sst-baytrail-ipc.c sst_dsp_inbox_read(byt->dsp, msg->rx.data, msg->rx.size); rx 66 sound/soc/intel/common/sst-ipc.c reply->header = msg->rx.header; rx 68 sound/soc/intel/common/sst-ipc.c memcpy(reply->data, msg->rx.data, msg->rx.size); rx 95 sound/soc/intel/common/sst-ipc.c msg->rx.header = 0; rx 96 sound/soc/intel/common/sst-ipc.c msg->rx.size = reply ? reply->size : 0; rx 129 sound/soc/intel/common/sst-ipc.c ipc->msg[i].rx.data = kzalloc(ipc->rx_data_max_size, GFP_KERNEL); rx 130 sound/soc/intel/common/sst-ipc.c if (ipc->msg[i].rx.data == NULL) { rx 144 sound/soc/intel/common/sst-ipc.c kfree(ipc->msg[i-1].rx.data); rx 311 sound/soc/intel/common/sst-ipc.c kfree(ipc->msg[i].rx.data); rx 29 sound/soc/intel/common/sst-ipc.h struct sst_ipc_message rx; rx 555 sound/soc/intel/haswell/sst-haswell-ipc.c msg->rx.header = header; rx 566 sound/soc/intel/haswell/sst-haswell-ipc.c sst_dsp_inbox_read(hsw->dsp, msg->rx.data, rx 567 sound/soc/intel/haswell/sst-haswell-ipc.c msg->rx.size); rx 571 sound/soc/intel/haswell/sst-haswell-ipc.c sst_dsp_outbox_read(hsw->dsp, msg->rx.data, rx 572 sound/soc/intel/haswell/sst-haswell-ipc.c msg->rx.size); rx 450 sound/soc/intel/skylake/skl-sst-ipc.c msg->rx.header = *ipc_header; rx 455 sound/soc/intel/skylake/skl-sst-ipc.c sst_dsp_inbox_read(ipc->dsp, msg->rx.data, msg->rx.size); rx 136 sound/soc/mediatek/common/mtk-btcvsd.c struct mtk_btcvsd_snd_stream *rx; rx 212 sound/soc/mediatek/common/mtk-btcvsd.c bt->tx->state, bt->rx->state, bt->irq_disabled); rx 217 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->state == BT_SCO_STATE_IDLE) { rx 247 sound/soc/mediatek/common/mtk-btcvsd.c memset(bt->rx, 0, sizeof(*bt->rx)); rx 250 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->packet_size = BTCVSD_RX_PACKET_SIZE; rx 251 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->buf_size = BTCVSD_RX_BUF_SIZE; rx 252 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->timeout = 0; rx 253 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->rw_cnt = 0; rx 254 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->stream = SNDRV_PCM_STREAM_CAPTURE; rx 268 sound/soc/mediatek/common/mtk-btcvsd.c ts->time_stamp_us = bt->rx->time_stamp; rx 269 sound/soc/mediatek/common/mtk-btcvsd.c ts->data_count_equi_time = bt->rx->buf_data_equivalent_time; rx 385 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->temp_packet_buf, packet_length, rx 390 sound/soc/mediatek/common/mtk-btcvsd.c packet_buf_ofs = (bt->rx->packet_w & SCO_RX_PACKET_MASK) * rx 391 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->packet_size; rx 393 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->temp_packet_buf + (SCO_RX_PLC_SIZE * i), rx 404 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->packet_w++; rx 487 sound/soc/mediatek/common/mtk-btcvsd.c if (bt->rx->state != BT_SCO_STATE_RUNNING && rx 488 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->state != BT_SCO_STATE_ENDING && rx 493 sound/soc/mediatek/common/mtk-btcvsd.c __func__, bt->rx->state, bt->tx->state); rx 549 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->rw_cnt++; rx 553 sound/soc/mediatek/common/mtk-btcvsd.c if (bt->rx->state == BT_SCO_STATE_RUNNING || rx 554 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->state == BT_SCO_STATE_ENDING) { rx 555 sound/soc/mediatek/common/mtk-btcvsd.c if (bt->rx->xrun) { rx 556 sound/soc/mediatek/common/mtk-btcvsd.c if (bt->rx->packet_w - bt->rx->packet_r <= rx 562 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->xrun = 0; rx 568 sound/soc/mediatek/common/mtk-btcvsd.c if (!bt->rx->xrun && rx 569 sound/soc/mediatek/common/mtk-btcvsd.c (bt->rx->packet_w - bt->rx->packet_r <= rx 577 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->rw_cnt++; rx 579 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->xrun = 1; rx 618 sound/soc/mediatek/common/mtk-btcvsd.c if (bt->rx->state == BT_SCO_STATE_RUNNING || rx 619 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->state == BT_SCO_STATE_ENDING) { rx 620 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->wait_flag = 1; rx 622 sound/soc/mediatek/common/mtk-btcvsd.c snd_pcm_period_elapsed(bt->rx->substream); rx 706 sound/soc/mediatek/common/mtk-btcvsd.c unsigned int packet_size = bt->rx->packet_size; rx 711 sound/soc/mediatek/common/mtk-btcvsd.c avail = (bt->rx->packet_w - bt->rx->packet_r) * packet_size; rx 713 sound/soc/mediatek/common/mtk-btcvsd.c cur_read_idx = (bt->rx->packet_r & SCO_RX_PACKET_MASK) * rx 718 sound/soc/mediatek/common/mtk-btcvsd.c int ret = wait_for_bt_irq(bt, bt->rx); rx 742 sound/soc/mediatek/common/mtk-btcvsd.c cont = bt->rx->buf_size - cur_read_idx; rx 755 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->packet_r += read_size / packet_size; rx 767 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->time_stamp = sched_clock(); rx 768 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->buf_data_equivalent_time = rx 769 sound/soc/mediatek/common/mtk-btcvsd.c (unsigned long long)(bt->rx->packet_w - bt->rx->packet_r) * rx 771 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->buf_data_equivalent_time += read_count * SCO_RX_PLC_SIZE * rx 774 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->buf_data_equivalent_time *= 1000; rx 811 sound/soc/mediatek/common/mtk-btcvsd.c int ret = wait_for_bt_irq(bt, bt->rx); rx 863 sound/soc/mediatek/common/mtk-btcvsd.c return bt->rx; rx 896 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->substream = substream; rx 1016 sound/soc/mediatek/common/mtk-btcvsd.c bt_stream = bt->rx; rx 1021 sound/soc/mediatek/common/mtk-btcvsd.c bt->tx->packet_r : bt->rx->packet_w; rx 1127 sound/soc/mediatek/common/mtk-btcvsd.c mtk_btcvsd_snd_set_state(bt, bt->rx, BT_SCO_STATE_LOOPBACK); rx 1130 sound/soc/mediatek/common/mtk-btcvsd.c mtk_btcvsd_snd_set_state(bt, bt->rx, BT_SCO_STATE_RUNNING); rx 1169 sound/soc/mediatek/common/mtk-btcvsd.c if (!bt->rx) rx 1172 sound/soc/mediatek/common/mtk-btcvsd.c ucontrol->value.integer.value[0] = bt->rx->rw_cnt ? 1 : 0; rx 1182 sound/soc/mediatek/common/mtk-btcvsd.c if (!bt->rx) rx 1185 sound/soc/mediatek/common/mtk-btcvsd.c ucontrol->value.integer.value[0] = bt->rx->timeout; rx 1186 sound/soc/mediatek/common/mtk-btcvsd.c bt->rx->timeout = 0; rx 1319 sound/soc/mediatek/common/mtk-btcvsd.c btcvsd->rx = devm_kzalloc(btcvsd->dev, sizeof(*btcvsd->rx), GFP_KERNEL); rx 1320 sound/soc/mediatek/common/mtk-btcvsd.c if (!btcvsd->rx) rx 1395 sound/soc/mediatek/common/mtk-btcvsd.c mtk_btcvsd_snd_set_state(btcvsd, btcvsd->rx, BT_SCO_STATE_IDLE); rx 20 sound/soc/meson/axg-card.c u32 rx; rx 216 sound/soc/meson/axg-card.c be->codec_masks[i].rx, rx 339 sound/soc/meson/axg-card.c u32 tx, rx; rx 359 sound/soc/meson/axg-card.c for (i = 0, rx = 0; i < AXG_TDM_NUM_LANES; i++) { rx 362 sound/soc/meson/axg-card.c rx = max(rx, be->rx_mask[i]); rx 366 sound/soc/meson/axg-card.c if (!rx) rx 370 sound/soc/meson/axg-card.c if (!tx && !rx) { rx 381 sound/soc/meson/axg-card.c be->slots = fls(max(tx, rx)); rx 382 sound/soc/meson/axg-card.c } else if (be->slots < fls(max(tx, rx)) || be->slots > 32) { rx 413 sound/soc/meson/axg-card.c &codec_mask->rx); rx 42 sound/soc/meson/axg-tdm-interface.c struct axg_tdm_stream *rx = (struct axg_tdm_stream *) rx 89 sound/soc/meson/axg-tdm-interface.c if (rx) { rx 90 sound/soc/meson/axg-tdm-interface.c rx->mask = rx_mask; rx 16 sound/soc/samsung/dma.h const char *tx, const char *rx, rx 18 sound/soc/samsung/dmaengine.c const char *tx, const char *rx, rx 32 sound/soc/samsung/dmaengine.c pcm_conf->chan_names[SNDRV_PCM_STREAM_CAPTURE] = rx; rx 420 sound/soc/ti/omap-mcbsp.c int rx = !tx; rx 444 sound/soc/ti/omap-mcbsp.c rx &= 1; rx 446 sound/soc/ti/omap-mcbsp.c MCBSP_WRITE(mcbsp, SPCR1, w | rx); rx 468 sound/soc/ti/omap-mcbsp.c w &= ~(rx ? RDISABLE : 0); rx 479 sound/soc/ti/omap-mcbsp.c int rx = !tx; rx 494 sound/soc/ti/omap-mcbsp.c rx &= 1; rx 497 sound/soc/ti/omap-mcbsp.c w |= (rx ? RDISABLE : 0); rx 501 sound/soc/ti/omap-mcbsp.c MCBSP_WRITE(mcbsp, SPCR1, w & ~rx); rx 51 tools/include/uapi/linux/if_xdp.h struct xdp_ring_offset rx; rx 57 tools/lib/bpf/xsk.c struct xsk_ring_cons *rx; rx 85 tools/lib/bpf/xsk.c struct xdp_ring_offset_v1 rx; rx 161 tools/lib/bpf/xsk.c off->rx.producer = off_v1.rx.producer; rx 162 tools/lib/bpf/xsk.c off->rx.consumer = off_v1.rx.consumer; rx 163 tools/lib/bpf/xsk.c off->rx.desc = off_v1.rx.desc; rx 164 tools/lib/bpf/xsk.c off->rx.flags = off_v1.rx.consumer + sizeof(__u32); rx 560 tools/lib/bpf/xsk.c struct xsk_ring_cons *rx, struct xsk_ring_prod *tx, rx 569 tools/lib/bpf/xsk.c if (!umem || !xsk_ptr || !rx || !tx) rx 606 tools/lib/bpf/xsk.c if (rx) { rx 631 tools/lib/bpf/xsk.c if (rx) { rx 632 tools/lib/bpf/xsk.c rx_map = mmap(NULL, off.rx.desc + rx 641 tools/lib/bpf/xsk.c rx->mask = xsk->config.rx_size - 1; rx 642 tools/lib/bpf/xsk.c rx->size = xsk->config.rx_size; rx 643 tools/lib/bpf/xsk.c rx->producer = rx_map + off.rx.producer; rx 644 tools/lib/bpf/xsk.c rx->consumer = rx_map + off.rx.consumer; rx 645 tools/lib/bpf/xsk.c rx->flags = rx_map + off.rx.flags; rx 646 tools/lib/bpf/xsk.c rx->ring = rx_map + off.rx.desc; rx 648 tools/lib/bpf/xsk.c xsk->rx = rx; rx 697 tools/lib/bpf/xsk.c if (rx) rx 698 tools/lib/bpf/xsk.c munmap(rx_map, off.rx.desc + rx 749 tools/lib/bpf/xsk.c if (xsk->rx) { rx 750 tools/lib/bpf/xsk.c munmap(xsk->rx->ring - off.rx.desc, rx 751 tools/lib/bpf/xsk.c off.rx.desc + xsk->config.rx_size * desc_sz); rx 73 tools/lib/bpf/xsk.h xsk_ring_cons__rx_desc(const struct xsk_ring_cons *rx, __u32 idx) rx 75 tools/lib/bpf/xsk.h const struct xdp_desc *descs = (const struct xdp_desc *)rx->ring; rx 77 tools/lib/bpf/xsk.h return &descs[idx & rx->mask]; rx 234 tools/lib/bpf/xsk.h struct xsk_ring_cons *rx, rx 113 tools/spi/spidev_test.c static void transfer(int fd, uint8_t const *tx, uint8_t const *rx, size_t len) rx 119 tools/spi/spidev_test.c .rx_buf = (unsigned long)rx, rx 153 tools/spi/spidev_test.c ret = write(out_fd, rx, len); rx 161 tools/spi/spidev_test.c hex_dump(rx, len, 32, "RX"); rx 301 tools/spi/spidev_test.c uint8_t *rx; rx 307 tools/spi/spidev_test.c rx = malloc(size); rx 308 tools/spi/spidev_test.c if (!rx) rx 312 tools/spi/spidev_test.c transfer(fd, tx, rx, size); rx 313 tools/spi/spidev_test.c free(rx); rx 323 tools/spi/spidev_test.c uint8_t *rx; rx 336 tools/spi/spidev_test.c rx = malloc(sb.st_size); rx 337 tools/spi/spidev_test.c if (!rx) rx 344 tools/spi/spidev_test.c transfer(fd, tx, rx, sb.st_size); rx 345 tools/spi/spidev_test.c free(rx); rx 370 tools/spi/spidev_test.c uint8_t *rx; rx 379 tools/spi/spidev_test.c rx = malloc(len); rx 380 tools/spi/spidev_test.c if (!rx) rx 383 tools/spi/spidev_test.c transfer(fd, tx, rx, len); rx 389 tools/spi/spidev_test.c if (memcmp(tx, rx, len)) { rx 392 tools/spi/spidev_test.c hex_dump(rx, len, 32, "RX"); rx 397 tools/spi/spidev_test.c free(rx); rx 514 tools/testing/selftests/bpf/test_flow_dissector.c int fdt = -1, fdr = -1, len, tx = 0, rx = 0; rx 532 tools/testing/selftests/bpf/test_flow_dissector.c rx += do_rx(fdr); rx 539 tools/testing/selftests/bpf/test_flow_dissector.c fprintf(stderr, "pkts: tx=%u rx=%u\n", tx, rx); rx 541 tools/testing/selftests/bpf/test_flow_dissector.c rx = 0; rx 551 tools/testing/selftests/bpf/test_flow_dissector.c if (rx < tx) { rx 553 tools/testing/selftests/bpf/test_flow_dissector.c while (rx < tx) { rx 559 tools/testing/selftests/bpf/test_flow_dissector.c rx += do_rx(fdr); rx 563 tools/testing/selftests/bpf/test_flow_dissector.c fprintf(stderr, "pkts: tx=%u rx=%u\n", tx, rx); rx 575 tools/testing/selftests/bpf/test_flow_dissector.c return rx != 0; rx 577 tools/testing/selftests/bpf/test_flow_dissector.c return rx != tx;