tail 411 arch/arm/crypto/aes-ce-glue.c u8 __aligned(8) tail[AES_BLOCK_SIZE]; tail 422 arch/arm/crypto/aes-ce-glue.c ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx), tail 425 arch/arm/crypto/aes-ce-glue.c crypto_xor_cpy(tdst, tsrc, tail, nbytes); tail 459 arch/arm/crypto/aes-ce-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; tail 470 arch/arm/crypto/aes-ce-glue.c if (unlikely(tail > 0 && walk.nbytes < walk.total)) { tail 486 arch/arm/crypto/aes-ce-glue.c tail = 0; tail 503 arch/arm/crypto/aes-ce-glue.c if (err || likely(!tail)) tail 510 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, tail 531 arch/arm/crypto/aes-ce-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; tail 542 arch/arm/crypto/aes-ce-glue.c if (unlikely(tail > 0 && walk.nbytes < walk.total)) { tail 558 arch/arm/crypto/aes-ce-glue.c tail = 0; tail 575 arch/arm/crypto/aes-ce-glue.c if (err || likely(!tail)) tail 582 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, tail 335 arch/arm/crypto/aes-neonbs-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; tail 344 arch/arm/crypto/aes-neonbs-glue.c if (unlikely(tail)) { tail 350 arch/arm/crypto/aes-neonbs-glue.c req->cryptlen - tail, req->iv); tail 362 arch/arm/crypto/aes-neonbs-glue.c int reorder_last_tweak = !encrypt && tail > 0; tail 378 arch/arm/crypto/aes-neonbs-glue.c if (err || likely(!tail)) tail 384 arch/arm/crypto/aes-neonbs-glue.c memcpy(buf + AES_BLOCK_SIZE, buf, tail); tail 385 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); tail 397 arch/arm/crypto/aes-neonbs-glue.c AES_BLOCK_SIZE + tail, 1); tail 34 arch/arm/kernel/perf_callchain.c user_backtrace(struct frame_tail __user *tail, tail 40 arch/arm/kernel/perf_callchain.c if (!access_ok(tail, sizeof(buftail))) tail 44 arch/arm/kernel/perf_callchain.c err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); tail 56 arch/arm/kernel/perf_callchain.c if (tail + 1 >= buftail.fp) tail 65 arch/arm/kernel/perf_callchain.c struct frame_tail __user *tail; tail 77 arch/arm/kernel/perf_callchain.c tail = (struct frame_tail __user *)regs->ARM_fp - 1; tail 80 arch/arm/kernel/perf_callchain.c tail && !((unsigned long)tail & 0x3)) tail 81 arch/arm/kernel/perf_callchain.c tail = user_backtrace(tail, entry); tail 86 arch/arm/oprofile/common.c static struct frame_tail* user_backtrace(struct frame_tail *tail) tail 91 arch/arm/oprofile/common.c if (!access_ok(tail, sizeof(buftail))) tail 93 arch/arm/oprofile/common.c if (__copy_from_user_inatomic(buftail, tail, sizeof(buftail))) tail 100 arch/arm/oprofile/common.c if (tail + 1 >= buftail[0].fp) tail 108 arch/arm/oprofile/common.c struct frame_tail *tail = ((struct frame_tail *) regs->ARM_fp) - 1; tail 117 arch/arm/oprofile/common.c while (depth-- && tail && !((unsigned long) tail & 3)) tail 118 arch/arm/oprofile/common.c tail = user_backtrace(tail); tail 188 arch/arm64/crypto/aes-ce-ccm-glue.c u32 tail = walk->nbytes % AES_BLOCK_SIZE; tail 193 arch/arm64/crypto/aes-ce-ccm-glue.c if (nbytes == walk->total && tail > 0) { tail 195 arch/arm64/crypto/aes-ce-ccm-glue.c tail = 0; tail 217 arch/arm64/crypto/aes-ce-ccm-glue.c err = skcipher_walk_done(walk, tail); tail 252 arch/arm64/crypto/aes-ce-ccm-glue.c u32 tail = walk.nbytes % AES_BLOCK_SIZE; tail 255 arch/arm64/crypto/aes-ce-ccm-glue.c tail = 0; tail 260 arch/arm64/crypto/aes-ce-ccm-glue.c walk.nbytes - tail, ctx->key_enc, tail 264 arch/arm64/crypto/aes-ce-ccm-glue.c err = skcipher_walk_done(&walk, tail); tail 310 arch/arm64/crypto/aes-ce-ccm-glue.c u32 tail = walk.nbytes % AES_BLOCK_SIZE; tail 313 arch/arm64/crypto/aes-ce-ccm-glue.c tail = 0; tail 318 arch/arm64/crypto/aes-ce-ccm-glue.c walk.nbytes - tail, ctx->key_enc, tail 322 arch/arm64/crypto/aes-ce-ccm-glue.c err = skcipher_walk_done(&walk, tail); tail 481 arch/arm64/crypto/aes-glue.c u8 __aligned(8) tail[AES_BLOCK_SIZE]; tail 492 arch/arm64/crypto/aes-glue.c aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds, tail 495 arch/arm64/crypto/aes-glue.c crypto_xor_cpy(tdst, tsrc, tail, nbytes); tail 530 arch/arm64/crypto/aes-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; tail 541 arch/arm64/crypto/aes-glue.c if (unlikely(tail > 0 && walk.nbytes < walk.total)) { tail 557 arch/arm64/crypto/aes-glue.c tail = 0; tail 574 arch/arm64/crypto/aes-glue.c if (err || likely(!tail)) tail 581 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, tail 602 arch/arm64/crypto/aes-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; tail 613 arch/arm64/crypto/aes-glue.c if (unlikely(tail > 0 && walk.nbytes < walk.total)) { tail 629 arch/arm64/crypto/aes-glue.c tail = 0; tail 646 arch/arm64/crypto/aes-glue.c if (err || likely(!tail)) tail 653 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, tail 323 arch/arm64/crypto/aes-neonbs-glue.c int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); tail 336 arch/arm64/crypto/aes-neonbs-glue.c if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) { tail 349 arch/arm64/crypto/aes-neonbs-glue.c tail = 0; tail 390 arch/arm64/crypto/aes-neonbs-glue.c if (err || likely(!tail)) tail 398 arch/arm64/crypto/aes-neonbs-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, tail 23 arch/arm64/kernel/perf_callchain.c user_backtrace(struct frame_tail __user *tail, tail 31 arch/arm64/kernel/perf_callchain.c if (!access_ok(tail, sizeof(buftail))) tail 35 arch/arm64/kernel/perf_callchain.c err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); tail 49 arch/arm64/kernel/perf_callchain.c if (tail >= buftail.fp) tail 71 arch/arm64/kernel/perf_callchain.c compat_user_backtrace(struct compat_frame_tail __user *tail, tail 78 arch/arm64/kernel/perf_callchain.c if (!access_ok(tail, sizeof(buftail))) tail 82 arch/arm64/kernel/perf_callchain.c err = __copy_from_user_inatomic(&buftail, tail, sizeof(buftail)); tail 94 arch/arm64/kernel/perf_callchain.c if (tail + 1 >= (struct compat_frame_tail __user *) tail 114 arch/arm64/kernel/perf_callchain.c struct frame_tail __user *tail; tail 116 arch/arm64/kernel/perf_callchain.c tail = (struct frame_tail __user *)regs->regs[29]; tail 119 arch/arm64/kernel/perf_callchain.c tail && !((unsigned long)tail & 0xf)) tail 120 arch/arm64/kernel/perf_callchain.c tail = user_backtrace(tail, entry); tail 124 arch/arm64/kernel/perf_callchain.c struct compat_frame_tail __user *tail; tail 126 arch/arm64/kernel/perf_callchain.c tail = (struct compat_frame_tail __user *)regs->compat_fp - 1; tail 129 arch/arm64/kernel/perf_callchain.c tail && !((unsigned long)tail & 0x3)) tail 130 arch/arm64/kernel/perf_callchain.c tail = compat_user_backtrace(tail, entry); tail 85 arch/mips/cavium-octeon/executive/cvmx-pko.c config.s.tail = (queue == (num_queues - 1)); tail 304 arch/mips/cavium-octeon/executive/cvmx-pko.c config.s.tail = 1; tail 444 arch/mips/cavium-octeon/executive/cvmx-pko.c config.s.tail = queue == (num_queues - 1); tail 729 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 739 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 746 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 756 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 767 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 773 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 780 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 790 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 1019 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 1029 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 1173 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 1181 arch/mips/include/asm/octeon/cvmx-pko-defs.h uint64_t tail:1; tail 675 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 691 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 730 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 746 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 843 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 851 arch/mips/include/asm/octeon/cvmx-pow.h uint64_t tail:1; tail 26 arch/powerpc/crypto/crc32c-vpmsum_glue.c unsigned int tail; tail 48 arch/powerpc/crypto/crc32c-vpmsum_glue.c tail = len & VMX_ALIGN_MASK; tail 49 arch/powerpc/crypto/crc32c-vpmsum_glue.c if (tail) { tail 51 arch/powerpc/crypto/crc32c-vpmsum_glue.c crc = __crc32c_le(crc, p, tail); tail 30 arch/powerpc/crypto/crct10dif-vpmsum_glue.c unsigned int tail; tail 55 arch/powerpc/crypto/crct10dif-vpmsum_glue.c tail = len & VMX_ALIGN_MASK; tail 56 arch/powerpc/crypto/crct10dif-vpmsum_glue.c if (tail) { tail 58 arch/powerpc/crypto/crct10dif-vpmsum_glue.c crc = crc_t10dif_generic(crc, p, tail); tail 84 arch/powerpc/kvm/book3s_hv_rm_mmu.c struct revmap_entry *head, *tail; tail 92 arch/powerpc/kvm/book3s_hv_rm_mmu.c tail = &kvm->arch.hpt.rev[head->back]; tail 94 arch/powerpc/kvm/book3s_hv_rm_mmu.c tail = real_vmalloc_addr(tail); tail 97 arch/powerpc/kvm/book3s_hv_rm_mmu.c tail->forw = pte_index; tail 74 arch/powerpc/oprofile/cell/pr_util.h unsigned int head, tail; tail 55 arch/powerpc/oprofile/cell/spu_task_sync.c if (spu_buff[spu].head >= spu_buff[spu].tail) { tail 56 arch/powerpc/oprofile/cell/spu_task_sync.c if ((spu_buff[spu].head - spu_buff[spu].tail) tail 60 arch/powerpc/oprofile/cell/spu_task_sync.c } else if (spu_buff[spu].tail > spu_buff[spu].head) { tail 61 arch/powerpc/oprofile/cell/spu_task_sync.c if ((spu_buff[spu].tail - spu_buff[spu].head) tail 113 arch/powerpc/oprofile/cell/spu_task_sync.c spu_buff[spu].tail, tail 117 arch/powerpc/oprofile/cell/spu_task_sync.c spu_buff[spu].tail = curr_head; tail 467 arch/powerpc/oprofile/cell/spu_task_sync.c spu_buff[spu].tail = 0; tail 2305 arch/powerpc/platforms/cell/spufs/file.c return (ctx->switch_log->head - ctx->switch_log->tail) % tail 2336 arch/powerpc/platforms/cell/spufs/file.c ctx->switch_log->head = ctx->switch_log->tail = 0; tail 2365 arch/powerpc/platforms/cell/spufs/file.c p = ctx->switch_log->log + ctx->switch_log->tail % SWITCH_LOG_BUFSIZE; tail 2428 arch/powerpc/platforms/cell/spufs/file.c ctx->switch_log->tail = tail 2429 arch/powerpc/platforms/cell/spufs/file.c (ctx->switch_log->tail + 1) % tail 57 arch/powerpc/platforms/cell/spufs/spufs.h unsigned long tail; tail 22 arch/powerpc/platforms/pseries/of_helpers.c const char *tail; tail 25 arch/powerpc/platforms/pseries/of_helpers.c tail = kbasename(path) - 1; tail 31 arch/powerpc/platforms/pseries/of_helpers.c if (tail > path) { tail 32 arch/powerpc/platforms/pseries/of_helpers.c parent_path = kstrndup(path, tail - path, GFP_KERNEL); tail 75 arch/s390/kernel/perf_cpum_sf.c unsigned long *tail; /* last sample-data-block-table */ tail 196 arch/s390/kernel/perf_cpum_sf.c unsigned long *new, *tail, *tail_prev = NULL; tail 198 arch/s390/kernel/perf_cpum_sf.c if (!sfb->sdbt || !sfb->tail) tail 201 arch/s390/kernel/perf_cpum_sf.c if (!is_link_entry(sfb->tail)) tail 209 arch/s390/kernel/perf_cpum_sf.c tail = sfb->tail; tail 214 arch/s390/kernel/perf_cpum_sf.c if (sfb->sdbt != get_next_sdbt(tail)) { tail 218 arch/s390/kernel/perf_cpum_sf.c (void *) sfb->sdbt, (void *) tail); tail 226 arch/s390/kernel/perf_cpum_sf.c if (require_table_link(tail)) { tail 234 arch/s390/kernel/perf_cpum_sf.c *tail = (unsigned long)(void *) new + 1; tail 235 arch/s390/kernel/perf_cpum_sf.c tail_prev = tail; tail 236 arch/s390/kernel/perf_cpum_sf.c tail = new; tail 244 arch/s390/kernel/perf_cpum_sf.c rc = alloc_sample_data_block(tail, gfp_flags); tail 254 arch/s390/kernel/perf_cpum_sf.c tail = tail_prev; tail 259 arch/s390/kernel/perf_cpum_sf.c tail++; tail 264 arch/s390/kernel/perf_cpum_sf.c *tail = (unsigned long) sfb->sdbt + 1; tail 265 arch/s390/kernel/perf_cpum_sf.c sfb->tail = tail; tail 301 arch/s390/kernel/perf_cpum_sf.c sfb->tail = sfb->sdbt; tail 302 arch/s390/kernel/perf_cpum_sf.c *sfb->tail = (unsigned long)(void *) sfb->sdbt + 1; tail 1696 arch/s390/kernel/perf_cpum_sf.c unsigned long *new, *tail; tail 1737 arch/s390/kernel/perf_cpum_sf.c tail = sfb->tail = sfb->sdbt; tail 1743 arch/s390/kernel/perf_cpum_sf.c for (i = 0; i < nr_pages; i++, tail++) { tail 1744 arch/s390/kernel/perf_cpum_sf.c if (require_table_link(tail)) { tail 1750 arch/s390/kernel/perf_cpum_sf.c *tail = (unsigned long)(void *) new + 1; tail 1751 arch/s390/kernel/perf_cpum_sf.c tail = new; tail 1754 arch/s390/kernel/perf_cpum_sf.c *tail = (unsigned long)pages[i]; tail 1761 arch/s390/kernel/perf_cpum_sf.c *tail = (unsigned long) sfb->sdbt + 1; tail 1762 arch/s390/kernel/perf_cpum_sf.c sfb->tail = tail; tail 658 arch/sh/mm/pmb.c struct pmb_entry *tail; tail 663 arch/sh/mm/pmb.c tail = head->link; tail 664 arch/sh/mm/pmb.c while (tail) { tail 665 arch/sh/mm/pmb.c span += tail->size; tail 673 arch/sh/mm/pmb.c if (!tail->link) tail 676 arch/sh/mm/pmb.c tail = tail->link; tail 180 arch/sparc/boot/piggyback.c int image, tail; tail 251 arch/sparc/boot/piggyback.c if ((tail = open(argv[4], O_RDONLY)) < 0) tail 253 arch/sparc/boot/piggyback.c while ((i = read(tail, buffer, 1024)) > 0) tail 258 arch/sparc/boot/piggyback.c if (close(tail) < 0) tail 3385 arch/sparc/include/asm/hypervisor.h unsigned long tail; /* New tail index to use */ tail 252 arch/sparc/kernel/ldc.c unsigned long limit, tail, new_tail, diff; tail 256 arch/sparc/kernel/ldc.c tail = lp->tx_tail; tail 257 arch/sparc/kernel/ldc.c new_tail = tx_advance(lp, tail); tail 292 arch/sparc/kernel/ldc.c static int set_tx_tail(struct ldc_channel *lp, unsigned long tail) tail 297 arch/sparc/kernel/ldc.c lp->tx_tail = tail; tail 301 arch/sparc/kernel/ldc.c err = sun4v_ldc_tx_set_qtail(lp->id, tail); tail 1576 arch/sparc/kernel/ldc.c unsigned long hv_err, tail; tail 1594 arch/sparc/kernel/ldc.c tail = lp->tx_tail; tail 1596 arch/sparc/kernel/ldc.c struct ldc_packet *p = lp->tx_base + (tail / LDC_PACKET_SIZE); tail 1629 arch/sparc/kernel/ldc.c tail = tx_advance(lp, tail); tail 1632 arch/sparc/kernel/ldc.c err = set_tx_tail(lp, tail); tail 353 arch/sparc/kernel/signal32.c void __user *tail; tail 382 arch/sparc/kernel/signal32.c tail = (sf + 1); tail 407 arch/sparc/kernel/signal32.c __siginfo_fpu_t __user *fp = tail; tail 408 arch/sparc/kernel/signal32.c tail += sizeof(*fp); tail 415 arch/sparc/kernel/signal32.c __siginfo_rwin_t __user *rwp = tail; tail 416 arch/sparc/kernel/signal32.c tail += sizeof(*rwp); tail 488 arch/sparc/kernel/signal32.c void __user *tail; tail 516 arch/sparc/kernel/signal32.c tail = (sf + 1); tail 541 arch/sparc/kernel/signal32.c __siginfo_fpu_t __user *fp = tail; tail 542 arch/sparc/kernel/signal32.c tail += sizeof(*fp); tail 549 arch/sparc/kernel/signal32.c __siginfo_rwin_t __user *rwp = tail; tail 550 arch/sparc/kernel/signal32.c tail += sizeof(*rwp); tail 232 arch/sparc/kernel/signal_32.c void __user *tail; tail 253 arch/sparc/kernel/signal_32.c tail = sf + 1; tail 261 arch/sparc/kernel/signal_32.c __siginfo_fpu_t __user *fp = tail; tail 262 arch/sparc/kernel/signal_32.c tail += sizeof(*fp); tail 269 arch/sparc/kernel/signal_32.c __siginfo_rwin_t __user *rwp = tail; tail 270 arch/sparc/kernel/signal_32.c tail += sizeof(*rwp); tail 327 arch/sparc/kernel/signal_32.c void __user *tail; tail 345 arch/sparc/kernel/signal_32.c tail = sf + 1; tail 357 arch/sparc/kernel/signal_32.c __siginfo_fpu_t __user *fp = tail; tail 358 arch/sparc/kernel/signal_32.c tail += sizeof(*fp); tail 365 arch/sparc/kernel/signal_32.c __siginfo_rwin_t __user *rwp = tail; tail 366 arch/sparc/kernel/signal_32.c tail += sizeof(*rwp); tail 356 arch/sparc/kernel/signal_64.c void __user *tail; tail 381 arch/sparc/kernel/signal_64.c tail = (sf + 1); tail 387 arch/sparc/kernel/signal_64.c __siginfo_fpu_t __user *fpu_save = tail; tail 388 arch/sparc/kernel/signal_64.c tail += sizeof(__siginfo_fpu_t); tail 395 arch/sparc/kernel/signal_64.c __siginfo_rwin_t __user *rwin_save = tail; tail 396 arch/sparc/kernel/signal_64.c tail += sizeof(__siginfo_rwin_t); tail 43 arch/um/drivers/line.c n = line->head - line->tail; tail 98 arch/um/drivers/line.c line->tail = line->buffer; tail 104 arch/um/drivers/line.c end = line->buffer + LINE_BUFSIZE - line->tail; tail 107 arch/um/drivers/line.c memcpy(line->tail, buf, len); tail 108 arch/um/drivers/line.c line->tail += len; tail 112 arch/um/drivers/line.c memcpy(line->tail, buf, end); tail 115 arch/um/drivers/line.c line->tail = line->buffer + len - end; tail 134 arch/um/drivers/line.c if ((line->buffer == NULL) || (line->head == line->tail)) tail 137 arch/um/drivers/line.c if (line->tail < line->head) { tail 157 arch/um/drivers/line.c count = line->tail - line->head; tail 165 arch/um/drivers/line.c return line->head == line->tail; tail 199 arch/um/drivers/line.c if (line->head != line->tail) tail 258 arch/um/drivers/line.c line->tail = line->buffer; tail 52 arch/um/drivers/line.h char *tail; tail 242 arch/um/drivers/vector_kern.c qi->tail = 0; tail 258 arch/um/drivers/vector_kern.c qi->tail = tail 259 arch/um/drivers/vector_kern.c (qi->tail + advance) tail 327 arch/um/drivers/vector_kern.c *(qi->skbuff_vector + qi->tail) = skb; tail 328 arch/um/drivers/vector_kern.c mmsg_vector += qi->tail; tail 573 arch/um/drivers/vector_kern.c result->tail = 0; tail 43 arch/um/drivers/vector_kern.h int queue_depth, head, tail, max_depth, max_iov_frags; tail 270 arch/x86/crypto/glue_helper.c unsigned int nbytes, tail; tail 279 arch/x86/crypto/glue_helper.c tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; tail 286 arch/x86/crypto/glue_helper.c req->cryptlen - tail, req->iv); tail 337 arch/x86/crypto/glue_helper.c memcpy(b + 1, b, tail - XTS_BLOCK_SIZE); tail 339 arch/x86/crypto/glue_helper.c tail - XTS_BLOCK_SIZE, 0); tail 340 arch/x86/crypto/glue_helper.c scatterwalk_map_and_copy(b, dst, 0, tail, 1); tail 132 arch/x86/platform/geode/alix.c const char *tail; tail 150 arch/x86/platform/geode/alix.c tail = p + alix_sig_len; tail 151 arch/x86/platform/geode/alix.c if ((tail[0] == '2' || tail[0] == '3' || tail[0] == '6')) { tail 1801 arch/x86/platform/uv/tlb_uv.c unsigned long gnode, first, last, tail; tail 1832 arch/x86/platform/uv/tlb_uv.c tail = first; tail 1834 arch/x86/platform/uv/tlb_uv.c first = (gnode << UV_PAYLOADQ_GNODE_SHIFT) | tail; tail 1835 arch/x86/platform/uv/tlb_uv.c write_mmr_payload_tail(pnode, tail); tail 1390 block/blk-core.c if (list->tail) tail 1391 block/blk-core.c list->tail->bi_next = rq->bio; tail 1394 block/blk-core.c list->tail = rq->biotail; tail 37 crypto/authenc.c char tail[]; tail 129 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); tail 150 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); tail 151 crypto/authenc.c u8 *hash = areq_ctx->tail; tail 211 crypto/authenc.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + tail 247 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); tail 248 crypto/authenc.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + tail 297 crypto/authenc.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ictx->reqoff); tail 298 crypto/authenc.c u8 *hash = areq_ctx->tail; tail 40 crypto/authencesn.c char tail[]; tail 103 crypto/authencesn.c u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail, tail 136 crypto/authencesn.c u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail, tail 138 crypto/authencesn.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); tail 196 crypto/authencesn.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + tail 236 crypto/authencesn.c struct skcipher_request *skreq = (void *)(areq_ctx->tail + tail 239 crypto/authencesn.c u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail, tail 285 crypto/authencesn.c struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff); tail 288 crypto/authencesn.c u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail, tail 309 crypto/authencesn.c goto tail; tail 328 crypto/authencesn.c tail: tail 42 crypto/chacha20poly1305.c } tail; tail 179 crypto/chacha20poly1305.c preq->tail.assoclen = cpu_to_le64(rctx->assoclen); tail 180 crypto/chacha20poly1305.c preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); tail 181 crypto/chacha20poly1305.c sg_init_one(preq->src, &preq->tail, sizeof(preq->tail)); tail 187 crypto/chacha20poly1305.c rctx->tag, sizeof(preq->tail)); tail 225 crypto/cryptd.c unsigned int tail) tail 231 crypto/cryptd.c p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL); tail 35 crypto/xts.c struct scatterlist *tail; tail 155 crypto/xts.c scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); tail 157 crypto/xts.c scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); tail 170 crypto/xts.c int tail = req->cryptlen % XTS_BLOCK_SIZE; tail 174 crypto/xts.c rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst, tail 177 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); tail 178 crypto/xts.c memcpy(b + 1, b, tail); tail 179 crypto/xts.c scatterwalk_map_and_copy(b, req->src, offset, tail, 0); tail 183 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1); tail 187 crypto/xts.c skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail, tail 194 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); tail 196 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); tail 28 drivers/acpi/acpi_dbg.c (CIRC_CNT((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 30 drivers/acpi/acpi_dbg.c (CIRC_CNT_TO_END((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 32 drivers/acpi/acpi_dbg.c (CIRC_SPACE((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 34 drivers/acpi/acpi_dbg.c (CIRC_SPACE_TO_END((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 290 drivers/acpi/acpi_dbg.c p = &crc->buf[crc->tail]; tail 294 drivers/acpi/acpi_dbg.c crc->tail = (crc->tail + 1) & (ACPI_AML_BUF_SIZE - 1); tail 520 drivers/acpi/acpi_dbg.c acpi_aml_io.out_crc.head = acpi_aml_io.out_crc.tail = 0; tail 521 drivers/acpi/acpi_dbg.c acpi_aml_io.in_crc.head = acpi_aml_io.in_crc.tail = 0; tail 591 drivers/acpi/acpi_dbg.c p = &crc->buf[crc->tail]; tail 599 drivers/acpi/acpi_dbg.c crc->tail = (crc->tail + n) & (ACPI_AML_BUF_SIZE - 1); tail 51 drivers/acpi/utils.c u8 *tail = NULL; tail 181 drivers/acpi/utils.c tail = buffer->pointer + tail_offset; tail 202 drivers/acpi/utils.c *pointer = tail; tail 203 drivers/acpi/utils.c *((u64 *) tail) = tail 206 drivers/acpi/utils.c tail += sizeof(u64); tail 208 drivers/acpi/utils.c *tail = (char)0; tail 209 drivers/acpi/utils.c tail += sizeof(char); tail 222 drivers/acpi/utils.c *pointer = tail; tail 223 drivers/acpi/utils.c memcpy(tail, element->string.pointer, tail 226 drivers/acpi/utils.c tail += element->string.length * sizeof(char); tail 228 drivers/acpi/utils.c *tail = (char)0; tail 229 drivers/acpi/utils.c tail += sizeof(char); tail 233 drivers/acpi/utils.c *pointer = tail; tail 234 drivers/acpi/utils.c memcpy(tail, element->buffer.pointer, tail 237 drivers/acpi/utils.c tail += element->buffer.length; tail 246 drivers/ata/sata_nv.c unsigned int tail; tail 1694 drivers/ata/sata_nv.c WARN_ON(dq->tail - dq->head == ATA_MAX_QUEUE); tail 1696 drivers/ata/sata_nv.c dq->tag[dq->tail++ & (ATA_MAX_QUEUE - 1)] = qc->hw_tag; tail 1705 drivers/ata/sata_nv.c if (dq->head == dq->tail) /* null queue */ tail 1732 drivers/ata/sata_nv.c dq->tail = 0; tail 782 drivers/atm/fore200e.c entry = &txq->host_entry[ txq->tail ]; tail 789 drivers/atm/fore200e.c entry, txq->tail, entry->vc_map, entry->skb); tail 856 drivers/atm/fore200e.c FORE200E_NEXT_ENTRY(txq->tail, QUEUE_SIZE_TX); tail 2250 drivers/atm/fore200e.c txq->tail = 0; tail 613 drivers/atm/fore200e.h int tail; /* tail of tx queue */ tail 94 drivers/atm/he.h #define NEXT_ENTRY(base, tail, mask) \ tail 95 drivers/atm/he.h (((unsigned long)base)|(((unsigned long)(tail+1))&mask)) tail 1239 drivers/atm/idt77252.c u32 head, tail; tail 1254 drivers/atm/idt77252.c tail = readl(SAR_REG_RAWCT); tail 1260 drivers/atm/idt77252.c while (head != tail) { tail 99 drivers/atm/iphase.c que->tail = NULL; tail 106 drivers/atm/iphase.c que->next = que->tail = data; tail 121 drivers/atm/iphase.c que->next = que->tail = entry; tail 123 drivers/atm/iphase.c que->tail->next = entry; tail 124 drivers/atm/iphase.c que->tail = que->tail->next; tail 134 drivers/atm/iphase.c if ( que->next == que->tail) tail 135 drivers/atm/iphase.c que->next = que->tail = NULL; tail 888 drivers/atm/iphase.h struct ia_rtn_q *next, *tail; tail 887 drivers/atm/nicstar.c scq->tail = scq->last; tail 1478 drivers/atm/nicstar.c if (scqep == scq->tail) { tail 1484 drivers/atm/nicstar.c if (!ns_scqe_is_tsr(scqep) && scq->tail != scq->next) { tail 1728 drivers/atm/nicstar.c while (scq->tail == scq->next) { tail 1737 drivers/atm/nicstar.c scq->tail != scq->next, tail 1773 drivers/atm/nicstar.c while (scq->tail == scq->next) { tail 1787 drivers/atm/nicstar.c scq->tail != scq->next, tail 1919 drivers/atm/nicstar.c i = (int)(scq->tail - scq->base); tail 1942 drivers/atm/nicstar.c scq->tail = scq->base + pos; tail 668 drivers/atm/nicstar.h volatile ns_scqe *tail; /* Not related to the nicstar register */ tail 2111 drivers/block/xen-blkfront.c merge_bio.tail = shadow[j].request->biotail; tail 246 drivers/bluetooth/dtl1_cs.c info->rx_skb->tail--; tail 1032 drivers/char/xillybus/xillybus_core.c unsigned char *tail = channel->rd_buffers[bufidx]->addr + tail 1037 drivers/char/xillybus/xillybus_core.c channel->rd_leftovers[i] = *tail++; tail 1236 drivers/char/xillybus/xillybus_core.c unsigned char *tail; tail 1248 drivers/char/xillybus/xillybus_core.c tail = channel-> tail 1257 drivers/char/xillybus/xillybus_core.c *tail++; tail 255 drivers/crypto/amcc/crypto4xx_core.c u32 tail; tail 265 drivers/crypto/amcc/crypto4xx_core.c tail = dev->pdr_tail; tail 268 drivers/crypto/amcc/crypto4xx_core.c return tail; tail 1080 drivers/crypto/amcc/crypto4xx_core.c u32 tail = core_dev->dev->pdr_tail; tail 1084 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo = &core_dev->dev->pdr_uinfo[tail]; tail 1085 drivers/crypto/amcc/crypto4xx_core.c pd = &core_dev->dev->pdr[tail]; tail 1090 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_pd_done(core_dev->dev, tail); tail 1091 drivers/crypto/amcc/crypto4xx_core.c tail = crypto4xx_put_pd_to_pdr(core_dev->dev, tail); tail 1096 drivers/crypto/amcc/crypto4xx_core.c } while (head != tail); tail 762 drivers/crypto/atmel-sha.c unsigned int length, final, tail; tail 789 drivers/crypto/atmel-sha.c tail = length & (ctx->block_size - 1); tail 790 drivers/crypto/atmel-sha.c length -= tail; tail 801 drivers/crypto/atmel-sha.c tail = length & (ctx->block_size - 1); tail 802 drivers/crypto/atmel-sha.c length -= tail; tail 803 drivers/crypto/atmel-sha.c ctx->total += tail; tail 61 drivers/crypto/caam/intern.h int tail; /* entinfo (s/w ring) tail index */ tail 194 drivers/crypto/caam/jr.c int hw_idx, sw_idx, i, head, tail; tail 207 drivers/crypto/caam/jr.c sw_idx = tail = jrp->tail; tail 210 drivers/crypto/caam/jr.c for (i = 0; CIRC_CNT(head, tail + i, JOBR_DEPTH) >= 1; i++) { tail 211 drivers/crypto/caam/jr.c sw_idx = (tail + i) & (JOBR_DEPTH - 1); tail 218 drivers/crypto/caam/jr.c BUG_ON(CIRC_CNT(head, tail + i, JOBR_DEPTH) <= 0); tail 255 drivers/crypto/caam/jr.c if (sw_idx == tail) { tail 257 drivers/crypto/caam/jr.c tail = (tail + 1) & (JOBR_DEPTH - 1); tail 258 drivers/crypto/caam/jr.c } while (CIRC_CNT(head, tail, JOBR_DEPTH) >= 1 && tail 259 drivers/crypto/caam/jr.c jrp->entinfo[tail].desc_addr_dma == 0); tail 261 drivers/crypto/caam/jr.c jrp->tail = tail; tail 361 drivers/crypto/caam/jr.c int head, tail, desc_size; tail 374 drivers/crypto/caam/jr.c tail = READ_ONCE(jrp->tail); tail 377 drivers/crypto/caam/jr.c CIRC_SPACE(head, tail, JOBR_DEPTH) <= 0) { tail 459 drivers/crypto/caam/jr.c jrp->tail = 0; tail 226 drivers/crypto/ccp/ccp-dev-v5.c u32 tail; tail 249 drivers/crypto/ccp/ccp-dev-v5.c tail = low_address(cmd_q->qdma_tail + cmd_q->qidx * Q_DESC_SIZE); tail 250 drivers/crypto/ccp/ccp-dev-v5.c iowrite32(tail, cmd_q->reg_tail_lo); tail 267 drivers/crypto/ccp/ccp-dev-v5.c iowrite32(tail, cmd_q->reg_head_lo); tail 578 drivers/crypto/ccree/cc_request_mgr.c unsigned int *tail = &request_mgr_handle->req_queue_tail; tail 587 drivers/crypto/ccree/cc_request_mgr.c if (*head == *tail) { tail 597 drivers/crypto/ccree/cc_request_mgr.c cc_req = &request_mgr_handle->req_queue[*tail]; tail 615 drivers/crypto/ccree/cc_request_mgr.c *tail = (*tail + 1) & (MAX_REQUEST_QUEUE_SIZE - 1); tail 616 drivers/crypto/ccree/cc_request_mgr.c dev_dbg(dev, "Dequeue request tail=%u\n", *tail); tail 1440 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && !sk->sk_backlog.tail) tail 1473 drivers/crypto/chelsio/chtls/chtls_io.c if (sk->sk_backlog.tail) { tail 1618 drivers/crypto/chelsio/chtls/chtls_io.c if (sk->sk_backlog.tail) { tail 1746 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && !sk->sk_backlog.tail) tail 1777 drivers/crypto/chelsio/chtls/chtls_io.c if (sk->sk_backlog.tail) { tail 174 drivers/crypto/hisilicon/qm.c (qc)->tail = 0; \ tail 210 drivers/crypto/hisilicon/qm.c __le16 tail; tail 225 drivers/crypto/hisilicon/qm.c __le16 tail; tail 239 drivers/crypto/hisilicon/qm.c __le16 tail; tail 249 drivers/crypto/hisilicon/qm.c __le16 tail; tail 516 drivers/crypto/mediatek/mtk-sha.c u32 len, final, tail; tail 539 drivers/crypto/mediatek/mtk-sha.c tail = len & (ctx->bs - 1); tail 540 drivers/crypto/mediatek/mtk-sha.c len -= tail; tail 553 drivers/crypto/mediatek/mtk-sha.c tail = len & (ctx->bs - 1); tail 554 drivers/crypto/mediatek/mtk-sha.c len -= tail; tail 555 drivers/crypto/mediatek/mtk-sha.c ctx->total += tail; tail 58 drivers/crypto/n2_core.c unsigned long tail; tail 177 drivers/crypto/n2_core.c unsigned long tail = q->tail; tail 181 drivers/crypto/n2_core.c if (head > tail) tail 182 drivers/crypto/n2_core.c diff = head - tail; tail 184 drivers/crypto/n2_core.c diff = (end - tail) + head; tail 194 drivers/crypto/n2_core.c return q->q + q->tail; tail 207 drivers/crypto/n2_core.c q->tail = new_tail; tail 242 drivers/crypto/n2_core.c qp->head != qp->tail) tail 501 drivers/crypto/n2_core.c if (head == qp->tail) { tail 562 drivers/crypto/n2_core.c ent = qp->q + qp->tail; tail 223 drivers/crypto/n2_core.h unsigned long *tail); tail 225 drivers/crypto/n2_core.h unsigned long tail); tail 125 drivers/crypto/qat/qat_common/adf_transport.c memcpy((void *)((uintptr_t)ring->base_addr + ring->tail), msg, tail 128 drivers/crypto/qat/qat_common/adf_transport.c ring->tail = adf_modulo(ring->tail + tail 132 drivers/crypto/qat/qat_common/adf_transport.c ring->ring_number, ring->tail); tail 284 drivers/crypto/qat/qat_common/adf_transport.c ring->tail = 0; tail 92 drivers/crypto/qat/qat_common/adf_transport_debug.c int head, tail, empty; tail 96 drivers/crypto/qat/qat_common/adf_transport_debug.c tail = READ_CSR_RING_TAIL(csr, bank->bank_number, tail 106 drivers/crypto/qat/qat_common/adf_transport_debug.c head, tail, (empty & 1 << ring->ring_number) tail 210 drivers/crypto/qat/qat_common/adf_transport_debug.c int head, tail, empty; tail 217 drivers/crypto/qat/qat_common/adf_transport_debug.c tail = READ_CSR_RING_TAIL(csr, bank->bank_number, tail 223 drivers/crypto/qat/qat_common/adf_transport_debug.c ring->ring_number, head, tail, tail 67 drivers/crypto/qat/qat_common/adf_transport_internal.h uint16_t tail; tail 297 drivers/crypto/qat/qat_common/qat_uclo.c struct icp_qat_uof_batch_init *init_header, *tail; tail 318 drivers/crypto/qat/qat_common/qat_uclo.c tail = tail_old; tail 328 drivers/crypto/qat/qat_common/qat_uclo.c tail->next = mem_init; tail 329 drivers/crypto/qat/qat_common/qat_uclo.c tail = mem_init; tail 346 drivers/crypto/talitos.c int tail, status; tail 351 drivers/crypto/talitos.c tail = priv->chan[ch].tail; tail 352 drivers/crypto/talitos.c while (priv->chan[ch].fifo[tail].desc) { tail 355 drivers/crypto/talitos.c request = &priv->chan[ch].fifo[tail]; tail 383 drivers/crypto/talitos.c priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1); tail 395 drivers/crypto/talitos.c tail = priv->chan[ch].tail; tail 466 drivers/crypto/talitos.c int tail, iter; tail 477 drivers/crypto/talitos.c tail = priv->chan[ch].tail; tail 479 drivers/crypto/talitos.c iter = tail; tail 483 drivers/crypto/talitos.c if (iter == tail) { tail 106 drivers/crypto/talitos.h int tail; tail 384 drivers/dma/fsldma.c struct fsl_desc_sw *tail = to_fsl_desc(chan->ld_pending.prev); tail 396 drivers/dma/fsldma.c set_desc_next(chan, &tail->hw, desc->async_tx.phys); tail 167 drivers/dma/ioat/dma.c __func__, ioat_chan->head, ioat_chan->tail, tail 208 drivers/dma/ioat/dma.c __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); tail 240 drivers/dma/ioat/dma.c ioat_chan->issued = ioat_chan->tail; tail 246 drivers/dma/ioat/dma.c __func__, ioat_chan->head, ioat_chan->tail, tail 252 drivers/dma/ioat/dma.c desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); tail 460 drivers/dma/ioat/dma.c ioat_chan->tail, ioat_chan->issued); tail 469 drivers/dma/ioat/dma.c ioat_chan->tail, ioat_chan->issued); tail 581 drivers/dma/ioat/dma.c int idx = ioat_chan->tail, i; tail 585 drivers/dma/ioat/dma.c __func__, ioat_chan->head, ioat_chan->tail, ioat_chan->issued); tail 636 drivers/dma/ioat/dma.c ioat_chan->tail = idx + i; tail 711 drivers/dma/ioat/dma.c int idx = ioat_chan->tail, i; tail 753 drivers/dma/ioat/dma.c ioat_chan->tail = idx + active; tail 755 drivers/dma/ioat/dma.c desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); tail 782 drivers/dma/ioat/dma.c desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail); tail 125 drivers/dma/ioat/dma.h u16 tail; tail 316 drivers/dma/ioat/dma.h return CIRC_CNT(ioat_chan->head, ioat_chan->tail, tail 648 drivers/dma/ioat/init.c desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail + i); tail 717 drivers/dma/ioat/init.c ioat_chan->tail = 0; tail 81 drivers/dma/mic_x100_dma.c u32 tail; tail 85 drivers/dma/mic_x100_dma.c tail = mic_dma_read_cmp_cnt(ch); tail 92 drivers/dma/mic_x100_dma.c for (last_tail = ch->last_tail; tail != last_tail;) { tail 107 drivers/dma/mic_x100_dma.c static u32 mic_dma_ring_count(u32 head, u32 tail) tail 111 drivers/dma/mic_x100_dma.c if (head >= tail) tail 112 drivers/dma/mic_x100_dma.c count = (tail - 0) + (MIC_DMA_DESC_RX_SIZE - head); tail 114 drivers/dma/mic_x100_dma.c count = tail - head; tail 728 drivers/dma/ppc4xx/adma.c struct ppc440spe_adma_desc_slot *tail = next_desc; tail 753 drivers/dma/ppc4xx/adma.c while (tail->hw_next) tail 754 drivers/dma/ppc4xx/adma.c tail = tail->hw_next; tail 755 drivers/dma/ppc4xx/adma.c xor_last_linked = tail; tail 344 drivers/dma/qcom/bam_dma.c #define IS_BUSY(chan) (CIRC_SPACE(bchan->tail, bchan->head,\ tail 364 drivers/dma/qcom/bam_dma.c unsigned short tail; /* end of active descriptor entries */ tail 490 drivers/dma/qcom/bam_dma.c bchan->tail = 0; tail 1013 drivers/dma/qcom/bam_dma.c avail = CIRC_SPACE(bchan->tail, bchan->head, tail 1044 drivers/dma/qcom/bam_dma.c if (bchan->tail + async_desc->xfer_len > MAX_DESCRIPTORS) { tail 1045 drivers/dma/qcom/bam_dma.c u32 partial = MAX_DESCRIPTORS - bchan->tail; tail 1047 drivers/dma/qcom/bam_dma.c memcpy(&fifo[bchan->tail], desc, tail 1053 drivers/dma/qcom/bam_dma.c memcpy(&fifo[bchan->tail], desc, tail 1058 drivers/dma/qcom/bam_dma.c bchan->tail += async_desc->xfer_len; tail 1059 drivers/dma/qcom/bam_dma.c bchan->tail %= MAX_DESCRIPTORS; tail 1065 drivers/dma/qcom/bam_dma.c writel_relaxed(bchan->tail * sizeof(struct bam_desc_hw), tail 579 drivers/edac/thunderx_edac.c unsigned long tail; tail 595 drivers/edac/thunderx_edac.c tail = ring_pos(lmc->ring_tail, ARRAY_SIZE(lmc->err_ctx)); tail 597 drivers/edac/thunderx_edac.c ctx = &lmc->err_ctx[tail]; tail 1112 drivers/edac/thunderx_edac.c unsigned long tail; tail 1126 drivers/edac/thunderx_edac.c tail = ring_pos(ocx->com_ring_tail, tail 1128 drivers/edac/thunderx_edac.c ctx = &ocx->com_err_ctx[tail]; tail 1193 drivers/edac/thunderx_edac.c unsigned long tail; tail 1207 drivers/edac/thunderx_edac.c tail = ring_pos(ocx->link_ring_head, tail 1210 drivers/edac/thunderx_edac.c ctx = &ocx->link_err_ctx[tail]; tail 1848 drivers/edac/thunderx_edac.c unsigned long tail = ring_pos(l2c->ring_tail, ARRAY_SIZE(l2c->err_ctx)); tail 1849 drivers/edac/thunderx_edac.c struct l2c_err_ctx *ctx = &l2c->err_ctx[tail]; tail 63 drivers/firewire/nosy.c struct packet *head, *tail; tail 121 drivers/firewire/nosy.c buffer->tail = (struct packet *) buffer->data; tail 194 drivers/firewire/nosy.c buffer->tail->length = length; tail 196 drivers/firewire/nosy.c if (&buffer->tail->data[length] < end) { tail 197 drivers/firewire/nosy.c memcpy(buffer->tail->data, data, length); tail 198 drivers/firewire/nosy.c buffer->tail = (struct packet *) &buffer->tail->data[length]; tail 200 drivers/firewire/nosy.c size_t split = end - buffer->tail->data; tail 202 drivers/firewire/nosy.c memcpy(buffer->tail->data, data, split); tail 204 drivers/firewire/nosy.c buffer->tail = (struct packet *) &buffer->data[length - split]; tail 179 drivers/gpu/drm/drm_debugfs_crc.c return CIRC_CNT(crc->head, crc->tail, DRM_CRC_ENTRIES_NR); tail 188 drivers/gpu/drm/drm_debugfs_crc.c crc->tail = 0; tail 310 drivers/gpu/drm/drm_debugfs_crc.c entry = &crc->entries[crc->tail]; tail 318 drivers/gpu/drm/drm_debugfs_crc.c crc->tail = (crc->tail + 1) & (DRM_CRC_ENTRIES_NR - 1); tail 393 drivers/gpu/drm/drm_debugfs_crc.c int head, tail; tail 405 drivers/gpu/drm/drm_debugfs_crc.c tail = crc->tail; tail 407 drivers/gpu/drm/drm_debugfs_crc.c if (CIRC_SPACE(head, tail, DRM_CRC_ENTRIES_NR) < 1) { tail 252 drivers/gpu/drm/i810/i810_dma.c ring->space = ring->head - (ring->tail + 8); tail 280 drivers/gpu/drm/i810/i810_dma.c ring->tail = I810_READ(LP_RING + RING_TAIL); tail 281 drivers/gpu/drm/i810/i810_dma.c ring->space = ring->head - (ring->tail + 8); tail 78 drivers/gpu/drm/i810/i810_drv.h int tail; tail 154 drivers/gpu/drm/i810/i810_drv.h outring = dev_priv->ring.tail; \ tail 162 drivers/gpu/drm/i810/i810_drv.h dev_priv->ring.tail = outring; \ tail 72 drivers/gpu/drm/i915/gt/intel_context.c ce->ring->head, ce->ring->tail); tail 212 drivers/gpu/drm/i915/gt/intel_engine.h void intel_ring_reset(struct intel_ring *ring, u32 tail); tail 283 drivers/gpu/drm/i915/gt/intel_engine.h assert_ring_tail_valid(const struct intel_ring *ring, unsigned int tail) tail 285 drivers/gpu/drm/i915/gt/intel_engine.h GEM_BUG_ON(!intel_ring_offset_valid(ring, tail)); tail 302 drivers/gpu/drm/i915/gt/intel_engine.h GEM_BUG_ON(cacheline(tail) == cacheline(ring->head) && tail 303 drivers/gpu/drm/i915/gt/intel_engine.h tail < ring->head); tail 308 drivers/gpu/drm/i915/gt/intel_engine.h intel_ring_set_tail(struct intel_ring *ring, unsigned int tail) tail 316 drivers/gpu/drm/i915/gt/intel_engine.h assert_ring_tail_valid(ring, tail); tail 317 drivers/gpu/drm/i915/gt/intel_engine.h ring->tail = tail; tail 318 drivers/gpu/drm/i915/gt/intel_engine.h return tail; tail 322 drivers/gpu/drm/i915/gt/intel_engine.h __intel_ring_space(unsigned int head, unsigned int tail, unsigned int size) tail 330 drivers/gpu/drm/i915/gt/intel_engine.h return (head - tail - CACHELINE_BYTES) & (size - 1); tail 1328 drivers/gpu/drm/i915/gt/intel_engine_cs.c rq->head, rq->postfix, rq->tail, tail 1332 drivers/gpu/drm/i915/gt/intel_engine_cs.c size = rq->tail - rq->head; tail 1333 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (rq->tail < rq->head) tail 1342 drivers/gpu/drm/i915/gt/intel_engine_cs.c if (rq->tail < head) { tail 1393 drivers/gpu/drm/i915/gt/intel_engine_cs.c rq->ring->tail); tail 105 drivers/gpu/drm/i915/gt/intel_engine_types.h u32 tail; tail 650 drivers/gpu/drm/i915/gt/intel_lrc.c u32 tail, prev; tail 669 drivers/gpu/drm/i915/gt/intel_lrc.c tail = intel_ring_set_tail(rq->ring, rq->tail); tail 671 drivers/gpu/drm/i915/gt/intel_lrc.c if (unlikely(intel_ring_direction(rq->ring, tail, prev) <= 0)) tail 673 drivers/gpu/drm/i915/gt/intel_lrc.c ce->lrc_reg_state[CTX_RING_TAIL + 1] = tail; tail 674 drivers/gpu/drm/i915/gt/intel_lrc.c rq->tail = rq->wa_tail; tail 1491 drivers/gpu/drm/i915/gt/intel_lrc.c u8 head, tail; tail 1506 drivers/gpu/drm/i915/gt/intel_lrc.c tail = READ_ONCE(*execlists->csb_write); tail 1507 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_TRACE("%s cs-irq head=%d, tail=%d\n", engine->name, head, tail); tail 1508 drivers/gpu/drm/i915/gt/intel_lrc.c if (unlikely(head == tail)) tail 1600 drivers/gpu/drm/i915/gt/intel_lrc.c } while (head != tail); tail 1759 drivers/gpu/drm/i915/gt/intel_lrc.c intel_ring_reset(ce->ring, ce->ring->tail); tail 1770 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(!intel_ring_offset_valid(ring, ring->tail)); tail 1774 drivers/gpu/drm/i915/gt/intel_lrc.c regs[CTX_RING_TAIL + 1] = ring->tail; tail 2485 drivers/gpu/drm/i915/gt/intel_lrc.c ce->ring->head = ce->ring->tail; tail 2539 drivers/gpu/drm/i915/gt/intel_lrc.c engine->name, ce->ring->head, ce->ring->tail); tail 2936 drivers/gpu/drm/i915/gt/intel_lrc.c request->tail = intel_ring_offset(request, cs); tail 2937 drivers/gpu/drm/i915/gt/intel_lrc.c assert_ring_tail_valid(request->ring, request->tail); tail 331 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 332 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 434 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 435 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 451 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 452 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 482 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 483 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 645 drivers/gpu/drm/i915/gt/intel_ringbuffer.c engine->name, ring->head, ring->tail); tail 693 drivers/gpu/drm/i915/gt/intel_ringbuffer.c GEM_BUG_ON(!intel_ring_offset_valid(ring, ring->tail)); tail 714 drivers/gpu/drm/i915/gt/intel_ringbuffer.c ENGINE_READ(engine, RING_TAIL), ring->tail, tail 726 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (ring->tail != ring->head) { tail 727 drivers/gpu/drm/i915/gt/intel_ringbuffer.c ENGINE_WRITE(engine, RING_TAIL, ring->tail); tail 840 drivers/gpu/drm/i915/gt/intel_ringbuffer.c head = engine->legacy.ring->tail; tail 935 drivers/gpu/drm/i915/gt/intel_ringbuffer.c intel_ring_set_tail(request->ring, request->tail)); tail 952 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 953 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 977 drivers/gpu/drm/i915/gt/intel_ringbuffer.c rq->tail = intel_ring_offset(rq, cs); tail 978 drivers/gpu/drm/i915/gt/intel_ringbuffer.c assert_ring_tail_valid(rq->ring, rq->tail); tail 1235 drivers/gpu/drm/i915/gt/intel_ringbuffer.c void intel_ring_reset(struct intel_ring *ring, u32 tail) tail 1237 drivers/gpu/drm/i915/gt/intel_ringbuffer.c tail = intel_ring_wrap(ring, tail); tail 1238 drivers/gpu/drm/i915/gt/intel_ringbuffer.c ring->tail = tail; tail 1239 drivers/gpu/drm/i915/gt/intel_ringbuffer.c ring->head = tail; tail 1240 drivers/gpu/drm/i915/gt/intel_ringbuffer.c ring->emit = tail; tail 80 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c desc, desc->head, desc->tail); tail 82 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c desc->tail = 0; tail 299 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 tail = desc->tail / 4; /* in dwords */ tail 308 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c GEM_BUG_ON(desc->tail % 4); tail 309 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c GEM_BUG_ON(tail >= size); tail 315 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c if (tail < head) tail 316 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c used = (size - head) + tail; tail 318 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c used = tail - head; tail 339 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c cmds[tail] = header; tail 340 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c tail = (tail + 1) % size; tail 342 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c cmds[tail] = fence; tail 343 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c tail = (tail + 1) % size; tail 346 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c cmds[tail] = action[i]; tail 347 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c tail = (tail + 1) % size; tail 351 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c desc->tail = tail * 4; tail 352 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c GEM_BUG_ON(desc->tail > desc->size); tail 557 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c u32 tail = desc->tail / 4; /* in dwords */ tail 566 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c GEM_BUG_ON(desc->tail % 4); tail 567 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c GEM_BUG_ON(tail >= size); tail 571 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c available = tail - head; tail 578 drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c CT_DEBUG_DRIVER("CT: available %d (%u:%u)\n", available, head, tail); tail 119 drivers/gpu/drm/i915/gt/uc/intel_guc_fwif.h u32 tail; tail 252 drivers/gpu/drm/i915/gt/uc/intel_guc_fwif.h u32 tail; /* offset updated by owner */ tail 416 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c wq_off = READ_ONCE(desc->tail); tail 439 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c WRITE_ONCE(desc->tail, (wq_off + wqi_size) & (GUC_WQ_SIZE - 1)); tail 468 drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c u32 ring_tail = intel_ring_set_tail(rq->ring, rq->tail) / sizeof(u64); tail 69 drivers/gpu/drm/i915/gt/uc/selftest_guc.c err = wait_for(READ_ONCE(desc->head) == READ_ONCE(desc->tail), 10); tail 811 drivers/gpu/drm/i915/gvt/scheduler.c u32 head, tail; tail 818 drivers/gpu/drm/i915/gvt/scheduler.c tail = workload->rb_tail; tail 821 drivers/gpu/drm/i915/gvt/scheduler.c if (tail < head) { tail 828 drivers/gpu/drm/i915/gvt/scheduler.c head = (wrap_count << RB_HEAD_WRAP_CNT_OFF) | tail; tail 831 drivers/gpu/drm/i915/gvt/scheduler.c vgpu_vreg_t(vgpu, RING_TAIL(ring_base)) = tail; tail 1485 drivers/gpu/drm/i915/gvt/scheduler.c u32 head, tail, start, ctl, ctx_ctl, per_ctx, indirect_ctx; tail 1500 drivers/gpu/drm/i915/gvt/scheduler.c RING_CTX_OFF(ring_tail.val), &tail, 4); tail 1505 drivers/gpu/drm/i915/gvt/scheduler.c tail &= RB_TAIL_OFF_MASK; tail 1548 drivers/gpu/drm/i915/gvt/scheduler.c workload->rb_tail = tail; tail 1591 drivers/gpu/drm/i915/gvt/scheduler.c workload, ring_id, head, tail, start, ctl); tail 264 drivers/gpu/drm/i915/i915_active.c struct llist_node *head = NULL, *tail = NULL; tail 292 drivers/gpu/drm/i915/i915_active.c if (!tail) tail 293 drivers/gpu/drm/i915/i915_active.c tail = pos; tail 296 drivers/gpu/drm/i915/i915_active.c llist_add_batch(head, tail, &engine->barrier_tasks); tail 1562 drivers/gpu/drm/i915/i915_debugfs.c ring->space, ring->head, ring->tail, ring->emit); tail 466 drivers/gpu/drm/i915/i915_gpu_error.c erq->start, erq->head, erq->tail); tail 489 drivers/gpu/drm/i915/i915_gpu_error.c ee->tail, ee->rq_post, ee->rq_tail); tail 1102 drivers/gpu/drm/i915/i915_gpu_error.c ee->tail = ENGINE_READ(engine, RING_TAIL); tail 1181 drivers/gpu/drm/i915/i915_gpu_error.c erq->tail = request->tail; tail 1415 drivers/gpu/drm/i915/i915_gpu_error.c ee->cpu_ring_tail = request->ring->tail; tail 1419 drivers/gpu/drm/i915/i915_gpu_error.c ee->rq_tail = request->tail; tail 101 drivers/gpu/drm/i915/i915_gpu_error.h u32 tail; tail 150 drivers/gpu/drm/i915/i915_gpu_error.h u32 tail; tail 225 drivers/gpu/drm/i915/i915_perf.c #define OA_TAKEN(tail, head) ((tail - head) & (OA_BUFFER_SIZE - 1)) tail 666 drivers/gpu/drm/i915/i915_perf.c u32 head, tail; tail 677 drivers/gpu/drm/i915/i915_perf.c tail = stream->oa_buffer.tails[aged_tail_idx].offset; tail 685 drivers/gpu/drm/i915/i915_perf.c if (tail == INVALID_TAIL_PTR) tail 693 drivers/gpu/drm/i915/i915_perf.c tail -= gtt_offset; tail 703 drivers/gpu/drm/i915/i915_perf.c tail > OA_BUFFER_SIZE || tail % report_size, tail 705 drivers/gpu/drm/i915/i915_perf.c head, tail)) tail 710 drivers/gpu/drm/i915/i915_perf.c (taken = OA_TAKEN(tail, head)); tail 954 drivers/gpu/drm/i915/i915_perf.c u32 head, tail; tail 965 drivers/gpu/drm/i915/i915_perf.c tail = stream->oa_buffer.tails[aged_tail_idx].offset; tail 972 drivers/gpu/drm/i915/i915_perf.c if (tail == INVALID_TAIL_PTR) tail 979 drivers/gpu/drm/i915/i915_perf.c tail -= gtt_offset; tail 988 drivers/gpu/drm/i915/i915_perf.c tail > OA_BUFFER_SIZE || tail % report_size, tail 990 drivers/gpu/drm/i915/i915_perf.c head, tail)) tail 995 drivers/gpu/drm/i915/i915_perf.c (taken = OA_TAKEN(tail, head)); tail 197 drivers/gpu/drm/i915/i915_request.h u32 tail; tail 84 drivers/gpu/drm/mga/mga_dma.c primary->tail = 0; tail 106 drivers/gpu/drm/mga/mga_dma.c u32 head, tail; tail 120 drivers/gpu/drm/mga/mga_dma.c if (primary->tail == primary->last_flush) { tail 125 drivers/gpu/drm/mga/mga_dma.c tail = primary->tail + dev_priv->primary->offset; tail 139 drivers/gpu/drm/mga/mga_dma.c primary->last_flush = primary->tail; tail 143 drivers/gpu/drm/mga/mga_dma.c if (head <= tail) tail 144 drivers/gpu/drm/mga/mga_dma.c primary->space = primary->size - primary->tail; tail 146 drivers/gpu/drm/mga/mga_dma.c primary->space = head - tail; tail 149 drivers/gpu/drm/mga/mga_dma.c DRM_DEBUG(" tail = 0x%06lx\n", (unsigned long)(tail - dev_priv->primary->offset)); tail 153 drivers/gpu/drm/mga/mga_dma.c MGA_WRITE(MGA_PRIMEND, tail | dev_priv->dma_access); tail 161 drivers/gpu/drm/mga/mga_dma.c u32 head, tail; tail 173 drivers/gpu/drm/mga/mga_dma.c tail = primary->tail + dev_priv->primary->offset; tail 175 drivers/gpu/drm/mga/mga_dma.c primary->tail = 0; tail 187 drivers/gpu/drm/mga/mga_dma.c DRM_DEBUG(" tail = 0x%06x\n", primary->tail); tail 192 drivers/gpu/drm/mga/mga_dma.c MGA_WRITE(MGA_PRIMEND, tail | dev_priv->dma_access); tail 275 drivers/gpu/drm/mga/mga_dma.c dev_priv->tail = entry; tail 301 drivers/gpu/drm/mga/mga_dma.c dev_priv->head = dev_priv->tail = NULL; tail 327 drivers/gpu/drm/mga/mga_dma.c drm_mga_freelist_t *tail = dev_priv->tail; tail 335 drivers/gpu/drm/mga/mga_dma.c tail->age.head ? tail 336 drivers/gpu/drm/mga/mga_dma.c (unsigned long)(tail->age.head - dev_priv->primary->offset) : 0, tail 337 drivers/gpu/drm/mga/mga_dma.c tail->age.wrap); tail 341 drivers/gpu/drm/mga/mga_dma.c if (TEST_AGE(&tail->age, head, wrap)) { tail 342 drivers/gpu/drm/mga/mga_dma.c prev = dev_priv->tail->prev; tail 343 drivers/gpu/drm/mga/mga_dma.c next = dev_priv->tail; tail 346 drivers/gpu/drm/mga/mga_dma.c dev_priv->tail = prev; tail 371 drivers/gpu/drm/mga/mga_dma.c prev = dev_priv->tail; tail 917 drivers/gpu/drm/mga/mga_dma.c dev_priv->prim.tail = 0; tail 67 drivers/gpu/drm/mga/mga_drv.h u32 tail; tail 97 drivers/gpu/drm/mga/mga_drv.h drm_mga_freelist_t *tail; tail 292 drivers/gpu/drm/mga/mga_drv.h write = dev_priv->prim.tail; \ tail 302 drivers/gpu/drm/mga/mga_drv.h write = dev_priv->prim.tail; \ tail 307 drivers/gpu/drm/mga/mga_drv.h dev_priv->prim.tail = write; \ tail 318 drivers/gpu/drm/mga/mga_drv.h dev_priv->prim.tail, \ tail 370 drivers/gpu/drm/mga/mga_drv.h entry->age.head = (dev_priv->prim.tail + \ tail 579 drivers/gpu/drm/mga/mga_state.c sarea_priv->last_frame.head = dev_priv->prim.tail; tail 71 drivers/gpu/drm/msm/msm_rd.c (CIRC_CNT((circ)->head, (circ)->tail, BUF_SZ)) tail 73 drivers/gpu/drm/msm/msm_rd.c (CIRC_CNT_TO_END((circ)->head, (circ)->tail, BUF_SZ)) tail 76 drivers/gpu/drm/msm/msm_rd.c (CIRC_SPACE((circ)->head, (circ)->tail, BUF_SZ)) tail 78 drivers/gpu/drm/msm/msm_rd.c (CIRC_SPACE_TO_END((circ)->head, (circ)->tail, BUF_SZ)) tail 142 drivers/gpu/drm/msm/msm_rd.c const char *fptr = &fifo->buf[fifo->tail]; tail 162 drivers/gpu/drm/msm/msm_rd.c smp_store_release(&fifo->tail, (fifo->tail + n) & (BUF_SZ - 1)); tail 1665 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c u32 tail = head + init->count * init->pitch; tail 1666 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c while (head < tail) { tail 63 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c u32 head, tail; tail 66 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c tail = nvkm_falcon_rd32(falcon, queue->tail_reg); tail 68 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c return head == tail; tail 77 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c u32 head, tail, available; tail 84 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c tail = queue->position; tail 86 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c available = head - tail; tail 98 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c nvkm_falcon_read_dmem(priv->falcon, tail, size, 0, data); tail 159 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c u32 head, tail, free; tail 164 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c tail = nvkm_falcon_rd32(falcon, queue->tail_reg); tail 166 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c if (head >= tail) { tail 176 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c if (head < tail) tail 177 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c free = tail - head - 1; tail 387 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c u32 tail; tail 412 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c tail = nvkm_falcon_rd32(falcon, tail_reg); tail 413 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c nvkm_falcon_read_dmem(falcon, tail, HDR_SIZE, 0, hdr); tail 420 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c nvkm_falcon_read_dmem(falcon, tail + HDR_SIZE, hdr->size - HDR_SIZE, 0, tail 423 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c tail += ALIGN(hdr->size, QUEUE_ALIGNMENT); tail 424 drivers/gpu/drm/nouveau/nvkm/falcon/msgqueue.c nvkm_falcon_wr32(falcon, tail_reg, tail); tail 761 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vma_tail(struct nvkm_vma *vma, u64 tail) tail 765 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c BUG_ON(vma->size == tail); tail 767 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!(new = nvkm_vma_new(vma->addr + (vma->size - tail), tail))) tail 769 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vma->size -= tail; tail 1642 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c u64 addr, tail; tail 1715 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c tail = this->addr + this->size; tail 1717 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c tail = ALIGN_DOWN(tail, vmm->func->page_block); tail 1719 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (addr <= tail && tail - addr >= size) { tail 191 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h struct nvkm_vma *nvkm_vma_tail(struct nvkm_vma *, u64 tail); tail 213 drivers/gpu/drm/r128/r128_cce.c if (GET_RING_HEAD(dev_priv) == dev_priv->ring.tail) { tail 255 drivers/gpu/drm/r128/r128_cce.c dev_priv->ring.tail = 0; tail 801 drivers/gpu/drm/r128/r128_cce.c dev_priv->tail = entry; tail 80 drivers/gpu/drm/r128/r128_drv.h u32 tail; tail 96 drivers/gpu/drm/r128/r128_drv.h drm_r128_freelist_t *tail; tail 429 drivers/gpu/drm/r128/r128_drv.h ring->space = (GET_RING_HEAD(dev_priv) - ring->tail) * sizeof(u32); tail 498 drivers/gpu/drm/r128/r128_drv.h write = dev_priv->ring.tail; \ tail 512 drivers/gpu/drm/r128/r128_drv.h write, dev_priv->ring.tail); \ tail 517 drivers/gpu/drm/r128/r128_drv.h if (((dev_priv->ring.tail + _nr) & tail_mask) != write) \ tail 520 drivers/gpu/drm/r128/r128_drv.h ((dev_priv->ring.tail + _nr) & tail_mask), \ tail 523 drivers/gpu/drm/r128/r128_drv.h dev_priv->ring.tail = write; \ tail 529 drivers/gpu/drm/r128/r128_drv.h dev_priv->ring.tail); \ tail 531 drivers/gpu/drm/r128/r128_drv.h R128_WRITE(R128_PM4_BUFFER_DL_WPTR, dev_priv->ring.tail); \ tail 224 drivers/gpu/drm/savage/savage_bci.c dev_priv->head.next = &dev_priv->tail; tail 228 drivers/gpu/drm/savage/savage_bci.c dev_priv->tail.next = NULL; tail 229 drivers/gpu/drm/savage/savage_bci.c dev_priv->tail.prev = &dev_priv->head; tail 230 drivers/gpu/drm/savage/savage_bci.c dev_priv->tail.buf = NULL; tail 251 drivers/gpu/drm/savage/savage_bci.c drm_savage_buf_priv_t *tail = dev_priv->tail.prev; tail 265 drivers/gpu/drm/savage/savage_bci.c DRM_DEBUG(" tail=0x%04x %d\n", tail->age.event, tail->age.wrap); tail 268 drivers/gpu/drm/savage/savage_bci.c if (tail->buf && (TEST_AGE(&tail->age, event, wrap) || event == 0)) { tail 269 drivers/gpu/drm/savage/savage_bci.c drm_savage_buf_priv_t *next = tail->next; tail 270 drivers/gpu/drm/savage/savage_bci.c drm_savage_buf_priv_t *prev = tail->prev; tail 273 drivers/gpu/drm/savage/savage_bci.c tail->next = tail->prev = NULL; tail 274 drivers/gpu/drm/savage/savage_bci.c return tail->buf; tail 277 drivers/gpu/drm/savage/savage_bci.c DRM_DEBUG("returning NULL, tail->buf=%p!\n", tail->buf); tail 136 drivers/gpu/drm/savage/savage_drv.h drm_savage_buf_priv_t head, tail; tail 35 drivers/gpu/drm/vboxvideo/vbox_hgsmi.c const struct hgsmi_buffer_tail *tail) tail 42 drivers/gpu/drm/vboxvideo/vbox_hgsmi.c checksum = hgsmi_hash_process(checksum, (u8 *)tail, 4); tail 52 drivers/hid/hid-wiimote-core.c while (wdata->queue.head != wdata->queue.tail) { tail 55 drivers/hid/hid-wiimote-core.c wdata->queue.outq[wdata->queue.tail].data, tail 56 drivers/hid/hid-wiimote-core.c wdata->queue.outq[wdata->queue.tail].size); tail 64 drivers/hid/hid-wiimote-core.c wdata->queue.tail = (wdata->queue.tail + 1) % WIIMOTE_BUFSIZE; tail 99 drivers/hid/hid-wiimote-core.c if (wdata->queue.head == wdata->queue.tail) { tail 102 drivers/hid/hid-wiimote-core.c } else if (newhead != wdata->queue.tail) { tail 111 drivers/hid/hid-wiimote.h __u8 tail; tail 48 drivers/hid/hidraw.c if (list->head == list->tail) { tail 52 drivers/hid/hidraw.c while (list->head == list->tail) { tail 80 drivers/hid/hidraw.c len = list->buffer[list->tail].len > count ? tail 81 drivers/hid/hidraw.c count : list->buffer[list->tail].len; tail 83 drivers/hid/hidraw.c if (list->buffer[list->tail].value) { tail 84 drivers/hid/hidraw.c if (copy_to_user(buffer, list->buffer[list->tail].value, len)) { tail 91 drivers/hid/hidraw.c kfree(list->buffer[list->tail].value); tail 92 drivers/hid/hidraw.c list->buffer[list->tail].value = NULL; tail 93 drivers/hid/hidraw.c list->tail = (list->tail + 1) & (HIDRAW_BUFFER_SIZE - 1); tail 255 drivers/hid/hidraw.c if (list->head != list->tail) tail 489 drivers/hid/hidraw.c if (new_head == list->tail) tail 42 drivers/hid/uhid.c __u8 tail; tail 78 drivers/hid/uhid.c if (newhead != uhid->tail) { tail 666 drivers/hid/uhid.c if (uhid->head == uhid->tail) tail 670 drivers/hid/uhid.c uhid->head != uhid->tail); tail 679 drivers/hid/uhid.c if (uhid->head == uhid->tail) { tail 684 drivers/hid/uhid.c if (copy_to_user(buffer, uhid->outq[uhid->tail], len)) { tail 687 drivers/hid/uhid.c kfree(uhid->outq[uhid->tail]); tail 688 drivers/hid/uhid.c uhid->outq[uhid->tail] = NULL; tail 691 drivers/hid/uhid.c uhid->tail = (uhid->tail + 1) % UHID_BUFSIZE; tail 773 drivers/hid/uhid.c if (uhid->head != uhid->tail) tail 41 drivers/hid/usbhid/hiddev.c int tail; tail 338 drivers/hid/usbhid/hiddev.c if (list->head == list->tail) { tail 341 drivers/hid/usbhid/hiddev.c while (list->head == list->tail) { tail 374 drivers/hid/usbhid/hiddev.c while (list->head != list->tail && tail 377 drivers/hid/usbhid/hiddev.c if (list->buffer[list->tail].field_index != HID_FIELD_INDEX_NONE) { tail 380 drivers/hid/usbhid/hiddev.c event.hid = list->buffer[list->tail].usage_code; tail 381 drivers/hid/usbhid/hiddev.c event.value = list->buffer[list->tail].value; tail 389 drivers/hid/usbhid/hiddev.c if (list->buffer[list->tail].field_index != HID_FIELD_INDEX_NONE || tail 392 drivers/hid/usbhid/hiddev.c if (copy_to_user(buffer + retval, list->buffer + list->tail, sizeof(struct hiddev_usage_ref))) { tail 399 drivers/hid/usbhid/hiddev.c list->tail = (list->tail + 1) & (HIDDEV_BUFFER_SIZE - 1); tail 417 drivers/hid/usbhid/hiddev.c if (list->head != list->tail) tail 2615 drivers/infiniband/hw/bnxt_re/ib_verbs.c resp.tail = cq->qplib_cq.hwq.cons; tail 8407 drivers/infiniband/hw/hfi1/chip.c u32 tail; tail 8420 drivers/infiniband/hw/hfi1/chip.c tail = (u32)read_uctxt_csr(rcd->dd, rcd->ctxt, RCV_HDR_TAIL); tail 8421 drivers/infiniband/hw/hfi1/chip.c return rcd->head != tail; tail 11830 drivers/infiniband/hw/hfi1/chip.c u32 head, tail; tail 11836 drivers/infiniband/hw/hfi1/chip.c tail = get_rcvhdrtail(rcd); tail 11838 drivers/infiniband/hw/hfi1/chip.c tail = read_uctxt_csr(rcd->dd, rcd->ctxt, RCV_HDR_TAIL); tail 11840 drivers/infiniband/hw/hfi1/chip.c return head == tail; tail 1700 drivers/infiniband/hw/hfi1/pio.c u32 head, tail; tail 1723 drivers/infiniband/hw/hfi1/pio.c tail = sc->sr_tail; tail 1724 drivers/infiniband/hw/hfi1/pio.c while (head != tail) { tail 1725 drivers/infiniband/hw/hfi1/pio.c pbuf = &sc->sr[tail].pbuf; tail 1737 drivers/infiniband/hw/hfi1/pio.c tail++; tail 1738 drivers/infiniband/hw/hfi1/pio.c if (tail >= sc->sr_size) tail 1739 drivers/infiniband/hw/hfi1/pio.c tail = 0; tail 1741 drivers/infiniband/hw/hfi1/pio.c sc->sr_tail = tail; tail 1741 drivers/infiniband/hw/hfi1/rc.c u32 opcode, head, tail; tail 1774 drivers/infiniband/hw/hfi1/rc.c tail = priv->s_tid_cur; tail 1783 drivers/infiniband/hw/hfi1/rc.c wqe = rvt_get_swqe_ptr(qp, tail); tail 1785 drivers/infiniband/hw/hfi1/rc.c if (head == tail && req->comp_seg < req->total_segs) { tail 1786 drivers/infiniband/hw/hfi1/rc.c if (tail == 0) tail 1787 drivers/infiniband/hw/hfi1/rc.c tail = qp->s_size - 1; tail 1789 drivers/infiniband/hw/hfi1/rc.c tail -= 1; tail 1793 drivers/infiniband/hw/hfi1/rc.c tail = qp->s_acked; tail 1800 drivers/infiniband/hw/hfi1/rc.c if ((psn & IB_BTH_REQ_ACK) && tail != head && tail 579 drivers/infiniband/hw/hfi1/sdma.c u16 head, tail; tail 589 drivers/infiniband/hw/hfi1/sdma.c tail = sde->descq_tail & sde->sdma_mask; tail 590 drivers/infiniband/hw/hfi1/sdma.c while (head != tail) { tail 598 drivers/infiniband/hw/hfi1/sdma.c trace_hfi1_sdma_progress(sde, head, tail, txp); tail 2009 drivers/infiniband/hw/hfi1/sdma.c static inline void sdma_update_tail(struct sdma_engine *sde, u16 tail) tail 2013 drivers/infiniband/hw/hfi1/sdma.c writeq(tail, sde->tail_csr); tail 2150 drivers/infiniband/hw/hfi1/sdma.c u16 head, tail, cnt; tail 2153 drivers/infiniband/hw/hfi1/sdma.c tail = sde->descq_tail & sde->sdma_mask; tail 2158 drivers/infiniband/hw/hfi1/sdma.c sde->this_idx, head, tail, cnt, tail 2162 drivers/infiniband/hw/hfi1/sdma.c while (head != tail) { tail 2213 drivers/infiniband/hw/hfi1/sdma.c u16 head, tail; tail 2221 drivers/infiniband/hw/hfi1/sdma.c tail = READ_ONCE(sde->descq_tail) & sde->sdma_mask; tail 2228 drivers/infiniband/hw/hfi1/sdma.c (unsigned long long)read_sde_csr(sde, SD(TAIL)), tail, tail 2245 drivers/infiniband/hw/hfi1/sdma.c while (head != tail) { tail 2310 drivers/infiniband/hw/hfi1/sdma.c u16 tail; tail 2314 drivers/infiniband/hw/hfi1/sdma.c tail = sde->descq_tail & sde->sdma_mask; tail 2315 drivers/infiniband/hw/hfi1/sdma.c sde->descq[tail].qw[0] = cpu_to_le64(descp->qw[0]); tail 2316 drivers/infiniband/hw/hfi1/sdma.c sde->descq[tail].qw[1] = cpu_to_le64(add_gen(sde, descp->qw[1])); tail 2318 drivers/infiniband/hw/hfi1/sdma.c tail, &sde->descq[tail]); tail 2319 drivers/infiniband/hw/hfi1/sdma.c tail = ++sde->descq_tail & sde->sdma_mask; tail 2326 drivers/infiniband/hw/hfi1/sdma.c sde->descq[tail].qw[0] = cpu_to_le64(descp->qw[0]); tail 2335 drivers/infiniband/hw/hfi1/sdma.c sde->descq[tail].qw[1] = cpu_to_le64(qw1); tail 2337 drivers/infiniband/hw/hfi1/sdma.c tail, &sde->descq[tail]); tail 2338 drivers/infiniband/hw/hfi1/sdma.c tail = ++sde->descq_tail & sde->sdma_mask; tail 2340 drivers/infiniband/hw/hfi1/sdma.c tx->next_descq_idx = tail; tail 2348 drivers/infiniband/hw/hfi1/sdma.c return tail; tail 2401 drivers/infiniband/hw/hfi1/sdma.c u16 tail; tail 2414 drivers/infiniband/hw/hfi1/sdma.c tail = submit_tx(sde, tx); tail 2417 drivers/infiniband/hw/hfi1/sdma.c sdma_update_tail(sde, tail); tail 2480 drivers/infiniband/hw/hfi1/sdma.c u16 tail = INVALID_TAIL; tail 2496 drivers/infiniband/hw/hfi1/sdma.c tail = submit_tx(sde, tx); tail 2498 drivers/infiniband/hw/hfi1/sdma.c if (tail != INVALID_TAIL && tail 2500 drivers/infiniband/hw/hfi1/sdma.c sdma_update_tail(sde, tail); tail 2501 drivers/infiniband/hw/hfi1/sdma.c tail = INVALID_TAIL; tail 2511 drivers/infiniband/hw/hfi1/sdma.c if (tail != INVALID_TAIL) tail 2512 drivers/infiniband/hw/hfi1/sdma.c sdma_update_tail(sde, tail); tail 1677 drivers/infiniband/hw/hfi1/tid_rdma.c u16 head, tail; tail 1681 drivers/infiniband/hw/hfi1/tid_rdma.c tail = req->clear_tail; tail 1682 drivers/infiniband/hw/hfi1/tid_rdma.c for ( ; CIRC_CNT(head, tail, MAX_FLOWS); tail 1683 drivers/infiniband/hw/hfi1/tid_rdma.c tail = CIRC_NEXT(tail, MAX_FLOWS)) { tail 1684 drivers/infiniband/hw/hfi1/tid_rdma.c flow = &req->flows[tail]; tail 1688 drivers/infiniband/hw/hfi1/tid_rdma.c *fidx = tail; tail 434 drivers/infiniband/hw/hns/hns_roce_device.h u32 tail; tail 541 drivers/infiniband/hw/hns/hns_roce_device.h int tail; tail 2331 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wq->tail += (wqe_ctr - (u16)wq->tail) & tail 2334 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 2335 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ++wq->tail; tail 2369 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 2370 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ++wq->tail; tail 2699 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->rq.tail = 0; tail 2701 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->sq.tail = 0; tail 3312 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->rq.tail = 0; tail 3314 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->sq.tail = 0; tail 2482 drivers/infiniband/hw/hns/hns_roce_hw_v2.c srq->tail++; tail 2751 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wq->tail += (wqe_ctr - (u16)wq->tail) & tail 2755 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 2756 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ++wq->tail; tail 2767 drivers/infiniband/hw/hns/hns_roce_hw_v2.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 2768 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ++wq->tail; tail 4468 drivers/infiniband/hw/hns/hns_roce_hw_v2.c hr_qp->rq.tail = 0; tail 4470 drivers/infiniband/hw/hns/hns_roce_hw_v2.c hr_qp->sq.tail = 0; tail 6256 drivers/infiniband/hw/hns/hns_roce_hw_v2.c if (unlikely(srq->head == srq->tail)) { tail 1589 drivers/infiniband/hw/hns/hns_roce_hw_v2.h u32 tail; tail 1293 drivers/infiniband/hw/hns/hns_roce_qp.c cur = hr_wq->head - hr_wq->tail; tail 1299 drivers/infiniband/hw/hns/hns_roce_qp.c cur = hr_wq->head - hr_wq->tail; tail 284 drivers/infiniband/hw/hns/hns_roce_srq.c srq->tail = srq->max - 1; tail 77 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 *tail, tail 82 drivers/infiniband/hw/i40iw/i40iw_ctrl.c *tail = RS_32(*val, I40E_PFPE_CQPTAIL_WQTAIL); tail 86 drivers/infiniband/hw/i40iw/i40iw_ctrl.c *tail = RS_32(*val, I40E_VFPE_CQPTAIL_WQTAIL); tail 99 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 tail, tail 114 drivers/infiniband/hw/i40iw/i40iw_ctrl.c if (newtail != tail) { tail 594 drivers/infiniband/hw/i40iw/i40iw_ctrl.c cqp->sq_ring.tail, tail 616 drivers/infiniband/hw/i40iw/i40iw_ctrl.c cqp->sq_ring.tail, tail 917 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 val, tail, error; tail 936 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 943 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, 1000000); tail 990 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 tail, val, error; tail 1008 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 1016 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, I40IW_DONE_COUNT); tail 1052 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 tail, val, error; tail 1071 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 1079 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, I40IW_DONE_COUNT); tail 2066 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 tail, val, error; tail 2090 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 2096 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, 1000); tail 3729 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 error, val, tail; tail 3736 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 3741 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, I40IW_DONE_COUNT); tail 3825 drivers/infiniband/hw/i40iw/i40iw_ctrl.c u32 tail, val, error; tail 3842 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_get_cqp_reg_info(cqp, &val, &tail, &error); tail 3851 drivers/infiniband/hw/i40iw/i40iw_ctrl.c ret_code = i40iw_cqp_poll_registers(cqp, tail, 1000); tail 1548 drivers/infiniband/hw/i40iw/i40iw_d.h (_ring).tail = 0; \ tail 1553 drivers/infiniband/hw/i40iw/i40iw_d.h #define I40IW_RING_GETCURRENT_TAIL(_ring) ((_ring).tail) tail 1580 drivers/infiniband/hw/i40iw/i40iw_d.h (_ring).tail = ((_ring).tail + 1) % (_ring).size tail 1586 drivers/infiniband/hw/i40iw/i40iw_d.h (_ring).tail = ((_ring).tail + (_count)) % (_ring).size tail 1589 drivers/infiniband/hw/i40iw/i40iw_d.h (_ring).tail = (_pos) % (_ring).size tail 1613 drivers/infiniband/hw/i40iw/i40iw_d.h (((_ring).head + (_ring).size - (_ring).tail) % (_ring).size) \ tail 807 drivers/infiniband/hw/i40iw/i40iw_uk.c info->wr_id = qp->rq_wrid_array[qp->rq_ring.tail]; tail 808 drivers/infiniband/hw/i40iw/i40iw_uk.c array_idx = qp->rq_ring.tail; tail 826 drivers/infiniband/hw/i40iw/i40iw_uk.c if (!wqe_idx && (qp->sq_ring.head == qp->sq_ring.tail)) { tail 849 drivers/infiniband/hw/i40iw/i40iw_uk.c u32 tail; tail 851 drivers/infiniband/hw/i40iw/i40iw_uk.c tail = qp->sq_ring.tail; tail 852 drivers/infiniband/hw/i40iw/i40iw_uk.c sw_wqe = qp->sq_base[tail].elem; tail 856 drivers/infiniband/hw/i40iw/i40iw_uk.c addl_wqes = qp->sq_wrtrk_array[tail].wqe_size / I40IW_QP_WQE_MIN_SIZE; tail 857 drivers/infiniband/hw/i40iw/i40iw_uk.c I40IW_RING_SET_TAIL(qp->sq_ring, (tail + addl_wqes)); tail 859 drivers/infiniband/hw/i40iw/i40iw_uk.c info->wr_id = qp->sq_wrtrk_array[tail].wrid; tail 860 drivers/infiniband/hw/i40iw/i40iw_uk.c info->bytes_xfered = qp->sq_wrtrk_array[tail].wr_len; tail 889 drivers/infiniband/hw/i40iw/i40iw_uk.c qword3 |= LS_64(pring->tail, I40IW_CQ_WQEIDX); tail 119 drivers/infiniband/hw/i40iw/i40iw_user.h u32 tail; tail 586 drivers/infiniband/hw/mlx4/cq.c unsigned tail, struct mlx4_cqe *cqe, int is_eth) tail 591 drivers/infiniband/hw/mlx4/cq.c qp->sqp_proxy_rcv[tail].map, tail 594 drivers/infiniband/hw/mlx4/cq.c hdr = (struct mlx4_ib_proxy_sqp_hdr *) (qp->sqp_proxy_rcv[tail].addr); tail 620 drivers/infiniband/hw/mlx4/cq.c cur = wq->head - wq->tail; tail 626 drivers/infiniband/hw/mlx4/cq.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 629 drivers/infiniband/hw/mlx4/cq.c wq->tail++; tail 675 drivers/infiniband/hw/mlx4/cq.c unsigned tail = 0; tail 740 drivers/infiniband/hw/mlx4/cq.c wq->tail += (u16) (wqe_ctr - (u16) wq->tail); tail 742 drivers/infiniband/hw/mlx4/cq.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 743 drivers/infiniband/hw/mlx4/cq.c ++wq->tail; tail 756 drivers/infiniband/hw/mlx4/cq.c tail = wq->tail & (wq->wqe_cnt - 1); tail 757 drivers/infiniband/hw/mlx4/cq.c wc->wr_id = wq->wrid[tail]; tail 758 drivers/infiniband/hw/mlx4/cq.c ++wq->tail; tail 846 drivers/infiniband/hw/mlx4/cq.c use_tunnel_data(*cur_qp, cq, wc, tail, cqe, tail 3121 drivers/infiniband/hw/mlx4/main.c if (mqp->sq.tail != mqp->sq.head) { tail 3139 drivers/infiniband/hw/mlx4/main.c if (mqp->rq.tail != mqp->rq.head) { tail 176 drivers/infiniband/hw/mlx4/mlx4_ib.h unsigned tail; tail 362 drivers/infiniband/hw/mlx4/mlx4_ib.h int tail; tail 2607 drivers/infiniband/hw/mlx4/qp.c qp->rq.tail = 0; tail 2609 drivers/infiniband/hw/mlx4/qp.c qp->sq.tail = 0; tail 3294 drivers/infiniband/hw/mlx4/qp.c cur = wq->head - wq->tail; tail 3300 drivers/infiniband/hw/mlx4/qp.c cur = wq->head - wq->tail; tail 143 drivers/infiniband/hw/mlx4/srq.c srq->tail = srq->msrq.max - 1; tail 293 drivers/infiniband/hw/mlx4/srq.c next = get_wqe(srq, srq->tail); tail 295 drivers/infiniband/hw/mlx4/srq.c srq->tail = wqe_index; tail 327 drivers/infiniband/hw/mlx4/srq.c if (unlikely(srq->head == srq->tail)) { tail 195 drivers/infiniband/hw/mlx5/cq.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 196 drivers/infiniband/hw/mlx5/cq.c ++wq->tail; tail 334 drivers/infiniband/hw/mlx5/cq.c u16 tail, u16 head) tail 339 drivers/infiniband/hw/mlx5/cq.c idx = tail & (qp->sq.wqe_cnt - 1); tail 343 drivers/infiniband/hw/mlx5/cq.c tail = qp->sq.w_list[idx].next; tail 345 drivers/infiniband/hw/mlx5/cq.c tail = qp->sq.w_list[idx].next; tail 346 drivers/infiniband/hw/mlx5/cq.c qp->sq.last_poll = tail; tail 395 drivers/infiniband/hw/mlx5/cq.c cur = wq->head - wq->tail; tail 404 drivers/infiniband/hw/mlx5/cq.c idx = (is_send) ? wq->last_poll : wq->tail; tail 409 drivers/infiniband/hw/mlx5/cq.c wq->tail++; tail 503 drivers/infiniband/hw/mlx5/cq.c wq->tail = wq->wqe_head[idx] + 1; tail 529 drivers/infiniband/hw/mlx5/cq.c wq->tail = wq->wqe_head[idx] + 1; tail 540 drivers/infiniband/hw/mlx5/cq.c wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)]; tail 541 drivers/infiniband/hw/mlx5/cq.c ++wq->tail; tail 4483 drivers/infiniband/hw/mlx5/main.c if (mqp->sq.tail != mqp->sq.head) { tail 4500 drivers/infiniband/hw/mlx5/main.c if (mqp->rq.tail != mqp->rq.head) { tail 284 drivers/infiniband/hw/mlx5/mlx5_ib.h unsigned tail; tail 544 drivers/infiniband/hw/mlx5/mlx5_ib.h int tail; tail 3722 drivers/infiniband/hw/mlx5/qp.c qp->rq.tail = 0; tail 3724 drivers/infiniband/hw/mlx5/qp.c qp->sq.tail = 0; tail 4075 drivers/infiniband/hw/mlx5/qp.c cur = wq->head - wq->tail; tail 4081 drivers/infiniband/hw/mlx5/qp.c cur = wq->head - wq->tail; tail 155 drivers/infiniband/hw/mlx5/srq.c srq->tail = srq->msrq.max - 1; tail 413 drivers/infiniband/hw/mlx5/srq.c next = get_wqe(srq, srq->tail); tail 415 drivers/infiniband/hw/mlx5/srq.c srq->tail = wqe_index; tail 448 drivers/infiniband/hw/mlx5/srq.c if (unlikely(srq->head == srq->tail)) { tail 563 drivers/infiniband/hw/mthca/mthca_cq.c wq->tail += wqe_index - wq->last_comp; tail 565 drivers/infiniband/hw/mthca/mthca_cq.c wq->tail += wqe_index + wq->max - wq->last_comp; tail 252 drivers/infiniband/hw/mthca/mthca_provider.h unsigned tail; tail 235 drivers/infiniband/hw/mthca/mthca_qp.c wq->tail = 0; tail 1572 drivers/infiniband/hw/mthca/mthca_qp.c cur = wq->head - wq->tail; tail 1578 drivers/infiniband/hw/mthca/mthca_qp.c cur = wq->head - wq->tail; tail 1657 drivers/infiniband/hw/mthca/mthca_qp.c qp->sq.head, qp->sq.tail, tail 1853 drivers/infiniband/hw/mthca/mthca_qp.c qp->rq.head, qp->rq.tail, tail 1987 drivers/infiniband/hw/mthca/mthca_qp.c qp->sq.head, qp->sq.tail, tail 2182 drivers/infiniband/hw/mthca/mthca_qp.c qp->rq.head, qp->rq.tail, tail 138 drivers/infiniband/hw/ocrdma/ocrdma.h u16 head, tail; tail 366 drivers/infiniband/hw/ocrdma/ocrdma.h u32 head, tail; tail 113 drivers/infiniband/hw/ocrdma/ocrdma_hw.c return eq->q.va + (eq->q.tail * sizeof(struct ocrdma_eqe)); tail 118 drivers/infiniband/hw/ocrdma/ocrdma_hw.c eq->q.tail = (eq->q.tail + 1) & (OCRDMA_EQ_LEN - 1); tail 124 drivers/infiniband/hw/ocrdma/ocrdma_hw.c (dev->mq.cq.va + (dev->mq.cq.tail * sizeof(struct ocrdma_mcqe))); tail 133 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->mq.cq.tail = (dev->mq.cq.tail + 1) & (OCRDMA_MQ_CQ_LEN - 1); tail 2129 drivers/infiniband/hw/ocrdma/ocrdma_hw.c qp->sq.tail = 0; tail 2131 drivers/infiniband/hw/ocrdma/ocrdma_hw.c qp->rq.tail = 0; tail 1566 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c return ((q->max_wqe_idx - q->head) + q->tail) % q->max_cnt; tail 1571 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c return (qp->sq.tail == qp->sq.head); tail 1576 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c return (qp->rq.tail == qp->rq.head); tail 1597 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c q->tail = (q->tail + 1) & q->max_wqe_idx; tail 2511 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->wr_id = qp->rqe_wr_id_tbl[qp->rq.tail]; tail 2520 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ocrdma_update_wc(qp, ibwc, qp->sq.tail); tail 2576 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c int tail = qp->sq.tail; tail 2579 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c if (!qp->wqe_wr_id_tbl[tail].signaled) { tail 2585 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ocrdma_update_wc(qp, ibwc, tail); tail 2590 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c if (tail != wqe_idx) tail 2724 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->wr_id = qp->rqe_wr_id_tbl[qp->rq.tail]; tail 2836 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ocrdma_update_wc(qp, ibwc, qp->sq.tail); tail 2839 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ibwc->wr_id = qp->rqe_wr_id_tbl[qp->rq.tail]; tail 2059 drivers/infiniband/hw/qib/qib_iba6120.c u32 head, tail; tail 2063 drivers/infiniband/hw/qib/qib_iba6120.c tail = qib_get_rcvhdrtail(rcd); tail 2065 drivers/infiniband/hw/qib/qib_iba6120.c tail = qib_read_ureg32(rcd->dd, ur_rcvhdrtail, rcd->ctxt); tail 2066 drivers/infiniband/hw/qib/qib_iba6120.c return head == tail; tail 3361 drivers/infiniband/hw/qib/qib_iba6120.c static void qib_sdma_update_6120_tail(struct qib_pportdata *ppd, u16 tail) tail 2711 drivers/infiniband/hw/qib/qib_iba7220.c u32 head, tail; tail 2715 drivers/infiniband/hw/qib/qib_iba7220.c tail = qib_get_rcvhdrtail(rcd); tail 2717 drivers/infiniband/hw/qib/qib_iba7220.c tail = qib_read_ureg32(rcd->dd, ur_rcvhdrtail, rcd->ctxt); tail 2718 drivers/infiniband/hw/qib/qib_iba7220.c return head == tail; tail 4170 drivers/infiniband/hw/qib/qib_iba7220.c static void qib_sdma_update_7220_tail(struct qib_pportdata *ppd, u16 tail) tail 4174 drivers/infiniband/hw/qib/qib_iba7220.c ppd->sdma_descq_tail = tail; tail 4175 drivers/infiniband/hw/qib/qib_iba7220.c qib_write_kreg(ppd->dd, kr_senddmatail, tail); tail 1546 drivers/infiniband/hw/qib/qib_iba7322.c static void qib_sdma_update_7322_tail(struct qib_pportdata *ppd, u16 tail) tail 1550 drivers/infiniband/hw/qib/qib_iba7322.c ppd->sdma_descq_tail = tail; tail 1551 drivers/infiniband/hw/qib/qib_iba7322.c qib_write_kreg_port(ppd, krp_senddmatail, tail); tail 4448 drivers/infiniband/hw/qib/qib_iba7322.c u32 head, tail; tail 4452 drivers/infiniband/hw/qib/qib_iba7322.c tail = qib_get_rcvhdrtail(rcd); tail 4454 drivers/infiniband/hw/qib/qib_iba7322.c tail = qib_read_ureg32(rcd->dd, ur_rcvhdrtail, rcd->ctxt); tail 4455 drivers/infiniband/hw/qib/qib_iba7322.c return head == tail; tail 519 drivers/infiniband/hw/qib/qib_sdma.c u16 tail; tail 551 drivers/infiniband/hw/qib/qib_sdma.c tail = ppd->sdma_descq_tail; tail 552 drivers/infiniband/hw/qib/qib_sdma.c descqp = &ppd->sdma_descq[tail].qw[0]; tail 557 drivers/infiniband/hw/qib/qib_sdma.c if (++tail == ppd->sdma_descq_cnt) { tail 558 drivers/infiniband/hw/qib/qib_sdma.c tail = 0; tail 563 drivers/infiniband/hw/qib/qib_sdma.c tx->txreq.start_idx = tail; tail 587 drivers/infiniband/hw/qib/qib_sdma.c if (++tail == ppd->sdma_descq_cnt) { tail 588 drivers/infiniband/hw/qib/qib_sdma.c tail = 0; tail 597 drivers/infiniband/hw/qib/qib_sdma.c if (!tail) tail 607 drivers/infiniband/hw/qib/qib_sdma.c tx->txreq.next_descq_idx = tail; tail 608 drivers/infiniband/hw/qib/qib_sdma.c ppd->dd->f_sdma_update_tail(ppd, tail); tail 615 drivers/infiniband/hw/qib/qib_sdma.c if (!tail) tail 616 drivers/infiniband/hw/qib/qib_sdma.c tail = ppd->sdma_descq_cnt - 1; tail 618 drivers/infiniband/hw/qib/qib_sdma.c tail--; tail 619 drivers/infiniband/hw/qib/qib_sdma.c if (tail == ppd->sdma_descq_tail) tail 621 drivers/infiniband/hw/qib/qib_sdma.c unmap_desc(ppd, tail); tail 688 drivers/infiniband/hw/qib/qib_sdma.c u16 head, tail, cnt; tail 691 drivers/infiniband/hw/qib/qib_sdma.c tail = ppd->sdma_descq_tail; tail 698 drivers/infiniband/hw/qib/qib_sdma.c "SDMA ppd->sdma_descq_tail: %u\n", tail); tail 703 drivers/infiniband/hw/qib/qib_sdma.c while (head != tail) { tail 1210 drivers/infiniband/hw/qib/qib_user_sdma.c unsigned ofs, u16 tail, u8 gen) tail 1218 drivers/infiniband/hw/qib/qib_user_sdma.c descqp = &ppd->sdma_descq[tail].qw[0]; tail 1240 drivers/infiniband/hw/qib/qib_user_sdma.c u16 tail, tail_c; tail 1249 drivers/infiniband/hw/qib/qib_user_sdma.c tail_c = tail = ppd->sdma_descq_tail; tail 1257 drivers/infiniband/hw/qib/qib_user_sdma.c u16 dtail = tail; tail 1260 drivers/infiniband/hw/qib/qib_user_sdma.c qib_user_sdma_send_frag(ppd, pkt, i, ofs, tail, gen); tail 1263 drivers/infiniband/hw/qib/qib_user_sdma.c if (++tail == ppd->sdma_descq_cnt) { tail 1264 drivers/infiniband/hw/qib/qib_user_sdma.c tail = 0; tail 1267 drivers/infiniband/hw/qib/qib_user_sdma.c } else if (tail == (ppd->sdma_descq_cnt>>1)) { tail 1290 drivers/infiniband/hw/qib/qib_user_sdma.c tail_c = dtail = tail; tail 288 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c int tail = pvrdma_idx(&cq->ring_state->rx.prod_tail, tail 293 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c items = (tail > head) ? (tail - head) : tail 294 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c (cq->ibcq.cqe - head + tail); tail 295 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c curr = --tail; tail 299 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c if (tail < 0) tail 300 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c tail = cq->ibcq.cqe - 1; tail 303 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c if (curr != tail) { tail 304 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c cqe = get_cqe(cq, tail); tail 307 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c tail--; tail 640 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c unsigned int tail = 0; tail 643 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c qp->sq.ring, qp->sq.wqe_cnt, &tail))) { tail 705 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c wqe_hdr = (struct pvrdma_sq_wqe_hdr *)get_sq_wqe(qp, tail); tail 857 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c unsigned int tail = 0; tail 869 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c qp->rq.ring, qp->rq.wqe_cnt, &tail))) { tail 877 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c wqe_hdr = (struct pvrdma_rq_wqe_hdr *)get_rq_wqe(qp, tail); tail 89 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h const __u32 tail = atomic_read(&r->prod_tail); tail 92 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h if (pvrdma_idx_valid(tail, max_elems) && tail 94 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h *out_tail = tail & (max_elems - 1); tail 95 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h return tail != (head ^ max_elems); tail 103 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h const __u32 tail = atomic_read(&r->prod_tail); tail 106 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h if (pvrdma_idx_valid(tail, max_elems) && tail 109 drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h return tail != head; tail 76 drivers/infiniband/sw/rdmavt/cq.c u32 tail; tail 84 drivers/infiniband/sw/rdmavt/cq.c tail = RDMA_READ_UAPI_ATOMIC(u_wc->tail); tail 89 drivers/infiniband/sw/rdmavt/cq.c tail = k_wc->tail; tail 103 drivers/infiniband/sw/rdmavt/cq.c if (unlikely(next == tail || cq->cq_full)) { tail 360 drivers/infiniband/sw/rdmavt/cq.c RDMA_READ_UAPI_ATOMIC(cq->queue->tail)) tail 363 drivers/infiniband/sw/rdmavt/cq.c if (cq->kqueue->head != cq->kqueue->tail) tail 382 drivers/infiniband/sw/rdmavt/cq.c u32 head, tail, n; tail 427 drivers/infiniband/sw/rdmavt/cq.c tail = RDMA_READ_UAPI_ATOMIC(old_u_wc->tail); tail 431 drivers/infiniband/sw/rdmavt/cq.c tail = old_k_wc->tail; tail 436 drivers/infiniband/sw/rdmavt/cq.c if (tail > (u32)cq->ibcq.cqe) tail 437 drivers/infiniband/sw/rdmavt/cq.c tail = (u32)cq->ibcq.cqe; tail 438 drivers/infiniband/sw/rdmavt/cq.c if (head < tail) tail 439 drivers/infiniband/sw/rdmavt/cq.c n = cq->ibcq.cqe + 1 + head - tail; tail 441 drivers/infiniband/sw/rdmavt/cq.c n = head - tail; tail 446 drivers/infiniband/sw/rdmavt/cq.c for (n = 0; tail != head; n++) { tail 448 drivers/infiniband/sw/rdmavt/cq.c u_wc->uqueue[n] = old_u_wc->uqueue[tail]; tail 450 drivers/infiniband/sw/rdmavt/cq.c k_wc->kqueue[n] = old_k_wc->kqueue[tail]; tail 451 drivers/infiniband/sw/rdmavt/cq.c if (tail == (u32)cq->ibcq.cqe) tail 452 drivers/infiniband/sw/rdmavt/cq.c tail = 0; tail 454 drivers/infiniband/sw/rdmavt/cq.c tail++; tail 459 drivers/infiniband/sw/rdmavt/cq.c RDMA_WRITE_UAPI_ATOMIC(u_wc->tail, 0); tail 463 drivers/infiniband/sw/rdmavt/cq.c k_wc->tail = 0; tail 523 drivers/infiniband/sw/rdmavt/cq.c u32 tail; tail 532 drivers/infiniband/sw/rdmavt/cq.c tail = wc->tail; tail 533 drivers/infiniband/sw/rdmavt/cq.c if (tail > (u32)cq->ibcq.cqe) tail 534 drivers/infiniband/sw/rdmavt/cq.c tail = (u32)cq->ibcq.cqe; tail 536 drivers/infiniband/sw/rdmavt/cq.c if (tail == wc->head) tail 539 drivers/infiniband/sw/rdmavt/cq.c trace_rvt_cq_poll(cq, &wc->kqueue[tail], npolled); tail 540 drivers/infiniband/sw/rdmavt/cq.c *entry = wc->kqueue[tail]; tail 541 drivers/infiniband/sw/rdmavt/cq.c if (tail >= cq->ibcq.cqe) tail 542 drivers/infiniband/sw/rdmavt/cq.c tail = 0; tail 544 drivers/infiniband/sw/rdmavt/cq.c tail++; tail 546 drivers/infiniband/sw/rdmavt/cq.c wc->tail = tail; tail 1372 drivers/infiniband/sw/rdmavt/qp.c u32 tail; tail 1381 drivers/infiniband/sw/rdmavt/qp.c tail = RDMA_READ_UAPI_ATOMIC(wq->tail); tail 1385 drivers/infiniband/sw/rdmavt/qp.c tail = kwq->tail; tail 1390 drivers/infiniband/sw/rdmavt/qp.c if (tail >= qp->r_rq.size) tail 1391 drivers/infiniband/sw/rdmavt/qp.c tail = 0; tail 1392 drivers/infiniband/sw/rdmavt/qp.c while (tail != head) { tail 1393 drivers/infiniband/sw/rdmavt/qp.c wc.wr_id = rvt_get_rwqe_ptr(&qp->r_rq, tail)->wr_id; tail 1394 drivers/infiniband/sw/rdmavt/qp.c if (++tail >= qp->r_rq.size) tail 1395 drivers/infiniband/sw/rdmavt/qp.c tail = 0; tail 1399 drivers/infiniband/sw/rdmavt/qp.c RDMA_WRITE_UAPI_ATOMIC(wq->tail, tail); tail 1401 drivers/infiniband/sw/rdmavt/qp.c kwq->tail = tail; tail 1850 drivers/infiniband/sw/rdmavt/qp.c if (next == READ_ONCE(wq->tail)) { tail 2265 drivers/infiniband/sw/rdmavt/qp.c if (next == READ_ONCE(wq->tail)) { tail 2362 drivers/infiniband/sw/rdmavt/qp.c static u32 get_count(struct rvt_rq *rq, u32 tail, u32 head) tail 2370 drivers/infiniband/sw/rdmavt/qp.c if (count < tail) tail 2371 drivers/infiniband/sw/rdmavt/qp.c count += rq->size - tail; tail 2373 drivers/infiniband/sw/rdmavt/qp.c count -= tail; tail 2416 drivers/infiniband/sw/rdmavt/qp.c u32 tail; tail 2441 drivers/infiniband/sw/rdmavt/qp.c tail = RDMA_READ_UAPI_ATOMIC(wq->tail); tail 2443 drivers/infiniband/sw/rdmavt/qp.c tail = kwq->tail; tail 2447 drivers/infiniband/sw/rdmavt/qp.c if (tail >= rq->size) tail 2448 drivers/infiniband/sw/rdmavt/qp.c tail = 0; tail 2452 drivers/infiniband/sw/rdmavt/qp.c kwq->count = get_count(rq, tail, head); tail 2460 drivers/infiniband/sw/rdmavt/qp.c wqe = rvt_get_rwqe_ptr(rq, tail); tail 2466 drivers/infiniband/sw/rdmavt/qp.c if (++tail >= rq->size) tail 2467 drivers/infiniband/sw/rdmavt/qp.c tail = 0; tail 2469 drivers/infiniband/sw/rdmavt/qp.c RDMA_WRITE_UAPI_ATOMIC(wq->tail, tail); tail 2471 drivers/infiniband/sw/rdmavt/qp.c kwq->tail = tail; tail 2487 drivers/infiniband/sw/rdmavt/qp.c kwq->count = get_count(rq, tail, get_rvt_head(rq, ip)); tail 108 drivers/infiniband/sw/rdmavt/rc.c u32 tail; tail 115 drivers/infiniband/sw/rdmavt/rc.c tail = RDMA_READ_UAPI_ATOMIC(qp->r_rq.wq->tail); tail 118 drivers/infiniband/sw/rdmavt/rc.c tail = READ_ONCE(qp->r_rq.kwq->tail); tail 122 drivers/infiniband/sw/rdmavt/rc.c if (tail >= qp->r_rq.size) tail 123 drivers/infiniband/sw/rdmavt/rc.c tail = 0; tail 130 drivers/infiniband/sw/rdmavt/rc.c credits = head - tail; tail 179 drivers/infiniband/sw/rdmavt/srq.c u32 sz, size, n, head, tail; tail 217 drivers/infiniband/sw/rdmavt/srq.c tail = RDMA_READ_UAPI_ATOMIC(owq->tail); tail 221 drivers/infiniband/sw/rdmavt/srq.c tail = okwq->tail; tail 223 drivers/infiniband/sw/rdmavt/srq.c if (head >= srq->rq.size || tail >= srq->rq.size) { tail 228 drivers/infiniband/sw/rdmavt/srq.c if (n < tail) tail 229 drivers/infiniband/sw/rdmavt/srq.c n += srq->rq.size - tail; tail 231 drivers/infiniband/sw/rdmavt/srq.c n -= tail; tail 238 drivers/infiniband/sw/rdmavt/srq.c while (tail != head) { tail 242 drivers/infiniband/sw/rdmavt/srq.c wqe = rvt_get_rwqe_ptr(&srq->rq, tail); tail 249 drivers/infiniband/sw/rdmavt/srq.c if (++tail >= srq->rq.size) tail 250 drivers/infiniband/sw/rdmavt/srq.c tail = 0; tail 256 drivers/infiniband/sw/rdmavt/srq.c RDMA_WRITE_UAPI_ATOMIC(tmp_rq.wq->tail, 0); tail 259 drivers/infiniband/sw/rdmavt/srq.c tmp_rq.kwq->tail = 0; tail 535 drivers/infiniband/ulp/ipoib/ipoib_cm.c skb->tail += size; tail 43 drivers/input/evdev.c unsigned int tail; tail 110 drivers/input/evdev.c head = client->tail; tail 111 drivers/input/evdev.c client->packet_head = client->tail; tail 116 drivers/input/evdev.c for (i = client->tail; i != client->head; i = (i + 1) & mask) { tail 158 drivers/input/evdev.c if (unlikely(client->head == client->tail)) { tail 160 drivers/input/evdev.c client->tail = (client->head - 1) & (client->bufsize - 1); tail 161 drivers/input/evdev.c client->packet_head = client->tail; tail 203 drivers/input/evdev.c if (client->head != client->tail) { tail 204 drivers/input/evdev.c client->packet_head = client->head = client->tail; tail 220 drivers/input/evdev.c if (unlikely(client->head == client->tail)) { tail 225 drivers/input/evdev.c client->tail = (client->head - 2) & (client->bufsize - 1); tail 227 drivers/input/evdev.c client->buffer[client->tail] = (struct input_event) { tail 235 drivers/input/evdev.c client->packet_head = client->tail; tail 546 drivers/input/evdev.c have_event = client->packet_head != client->tail; tail 548 drivers/input/evdev.c *event = client->buffer[client->tail++]; tail 549 drivers/input/evdev.c client->tail &= client->bufsize - 1; tail 573 drivers/input/evdev.c if (client->packet_head == client->tail && tail 598 drivers/input/evdev.c client->packet_head != client->tail || tail 622 drivers/input/evdev.c if (client->packet_head != client->tail) tail 61 drivers/input/joydev.c int tail; tail 104 drivers/input/joydev.c if (client->tail == client->head) tail 327 drivers/input/joydev.c have_event = client->head != client->tail; tail 329 drivers/input/joydev.c *event = client->buffer[client->tail++]; tail 330 drivers/input/joydev.c client->tail &= JOYDEV_BUFFER_SIZE - 1; tail 365 drivers/input/joydev.c client->tail = client->head; tail 381 drivers/input/joydev.c client->head != client->tail; tail 33 drivers/input/joystick/iforce/iforce-packets.c int head, tail; tail 42 drivers/input/joystick/iforce/iforce-packets.c tail = iforce->xmit.tail; tail 45 drivers/input/joystick/iforce/iforce-packets.c if (CIRC_SPACE(head, tail, XMIT_SIZE) < n+2) { tail 52 drivers/input/joystick/iforce/iforce-packets.c empty = head == tail; tail 63 drivers/input/joystick/iforce/iforce-packets.c c = CIRC_SPACE_TO_END(head, tail, XMIT_SIZE); tail 41 drivers/input/joystick/iforce/iforce-serio.c if (iforce->xmit.head == iforce->xmit.tail) { tail 51 drivers/input/joystick/iforce/iforce-serio.c serio_write(iforce_serio->serio, iforce->xmit.buf[iforce->xmit.tail]); tail 52 drivers/input/joystick/iforce/iforce-serio.c cs ^= iforce->xmit.buf[iforce->xmit.tail]; tail 53 drivers/input/joystick/iforce/iforce-serio.c XMIT_INC(iforce->xmit.tail, 1); tail 55 drivers/input/joystick/iforce/iforce-serio.c for (i=iforce->xmit.buf[iforce->xmit.tail]; i >= 0; --i) { tail 57 drivers/input/joystick/iforce/iforce-serio.c iforce->xmit.buf[iforce->xmit.tail]); tail 58 drivers/input/joystick/iforce/iforce-serio.c cs ^= iforce->xmit.buf[iforce->xmit.tail]; tail 59 drivers/input/joystick/iforce/iforce-serio.c XMIT_INC(iforce->xmit.tail, 1); tail 32 drivers/input/joystick/iforce/iforce-usb.c if (iforce->xmit.head == iforce->xmit.tail) { tail 38 drivers/input/joystick/iforce/iforce-usb.c ((char *)iforce_usb->out->transfer_buffer)[0] = iforce->xmit.buf[iforce->xmit.tail]; tail 39 drivers/input/joystick/iforce/iforce-usb.c XMIT_INC(iforce->xmit.tail, 1); tail 40 drivers/input/joystick/iforce/iforce-usb.c n = iforce->xmit.buf[iforce->xmit.tail]; tail 41 drivers/input/joystick/iforce/iforce-usb.c XMIT_INC(iforce->xmit.tail, 1); tail 47 drivers/input/joystick/iforce/iforce-usb.c c = CIRC_CNT_TO_END(iforce->xmit.head, iforce->xmit.tail, XMIT_SIZE); tail 51 drivers/input/joystick/iforce/iforce-usb.c &iforce->xmit.buf[iforce->xmit.tail], tail 58 drivers/input/joystick/iforce/iforce-usb.c XMIT_INC(iforce->xmit.tail, n); tail 62 drivers/input/misc/uinput.c unsigned char tail; tail 628 drivers/input/misc/uinput.c have_event = udev->head != udev->tail; tail 630 drivers/input/misc/uinput.c *event = udev->buff[udev->tail]; tail 631 drivers/input/misc/uinput.c udev->tail = (udev->tail + 1) % UINPUT_BUFFER_SIZE; tail 673 drivers/input/misc/uinput.c else if (udev->head == udev->tail && tail 686 drivers/input/misc/uinput.c udev->head != udev->tail || tail 699 drivers/input/misc/uinput.c if (udev->head != udev->tail) tail 100 drivers/input/mousedev.c unsigned int head, tail; tail 278 drivers/input/mousedev.c if (new_head != client->tail) { tail 573 drivers/input/mousedev.c struct mousedev_motion *p = &client->packets[client->tail]; tail 620 drivers/input/mousedev.c if (client->tail == client->head) { tail 624 drivers/input/mousedev.c client->tail = (client->tail + 1) % PACKET_QUEUE_LEN; tail 52 drivers/input/serio/sa1111ps2.c unsigned int tail; tail 97 drivers/input/serio/sa1111ps2.c if (ps2if->head == ps2if->tail) { tail 101 drivers/input/serio/sa1111ps2.c writel_relaxed(ps2if->buf[ps2if->tail], ps2if->base + PS2DATA); tail 102 drivers/input/serio/sa1111ps2.c ps2if->tail = (ps2if->tail + 1) & (sizeof(ps2if->buf) - 1); tail 127 drivers/input/serio/sa1111ps2.c if (ps2if->head == ps2if->tail) tail 130 drivers/input/serio/sa1111ps2.c if (head != ps2if->tail) { tail 30 drivers/input/serio/serio_raw.c unsigned int tail, head; tail 146 drivers/input/serio/serio_raw.c empty = serio_raw->head == serio_raw->tail; tail 148 drivers/input/serio/serio_raw.c *c = serio_raw->queue[serio_raw->tail]; tail 149 drivers/input/serio/serio_raw.c serio_raw->tail = (serio_raw->tail + 1) % SERIO_RAW_QUEUE_LEN; tail 170 drivers/input/serio/serio_raw.c if (serio_raw->head == serio_raw->tail && tail 188 drivers/input/serio/serio_raw.c serio_raw->head != serio_raw->tail || tail 248 drivers/input/serio/serio_raw.c if (serio_raw->head != serio_raw->tail) tail 280 drivers/input/serio/serio_raw.c if (likely(head != serio_raw->tail)) { tail 42 drivers/input/serio/userio.c u8 tail; tail 65 drivers/input/serio/userio.c if (userio->head == userio->tail) tail 141 drivers/input/serio/userio.c userio->tail, tail 145 drivers/input/serio/userio.c memcpy(buf, &userio->buf[userio->tail], copylen); tail 146 drivers/input/serio/userio.c userio->tail = (userio->tail + copylen) % tail 167 drivers/input/serio/userio.c userio->head != userio->tail); tail 257 drivers/input/serio/userio.c if (userio->head != userio->tail) tail 643 drivers/iommu/amd_iommu.c u32 head, tail; tail 646 drivers/iommu/amd_iommu.c tail = readl(iommu->mmio_base + MMIO_EVT_TAIL_OFFSET); tail 648 drivers/iommu/amd_iommu.c while (head != tail) { tail 676 drivers/iommu/amd_iommu.c u32 head, tail; tail 682 drivers/iommu/amd_iommu.c tail = readl(iommu->mmio_base + MMIO_PPR_TAIL_OFFSET); tail 684 drivers/iommu/amd_iommu.c while (head != tail) { tail 721 drivers/iommu/amd_iommu.c tail = readl(iommu->mmio_base + MMIO_PPR_TAIL_OFFSET); tail 738 drivers/iommu/amd_iommu.c u32 head, tail, cnt = 0; tail 744 drivers/iommu/amd_iommu.c tail = readl(iommu->mmio_base + MMIO_GA_TAIL_OFFSET); tail 746 drivers/iommu/amd_iommu.c while (head != tail) { tail 1159 drivers/iommu/dmar.c int head, tail; tail 1202 drivers/iommu/dmar.c tail = readl(iommu->reg + DMAR_IQT_REG); tail 1203 drivers/iommu/dmar.c tail = ((tail >> shift) - 1 + QI_LENGTH) % QI_LENGTH; tail 1211 drivers/iommu/dmar.c } while (head != tail); tail 535 drivers/iommu/intel-svm.c int head, tail, handled = 0; tail 541 drivers/iommu/intel-svm.c tail = dmar_readq(iommu->reg + DMAR_PQT_REG) & PRQ_RING_MASK; tail 543 drivers/iommu/intel-svm.c while (head != tail) { tail 665 drivers/iommu/intel-svm.c dmar_writeq(iommu->reg + DMAR_PQH_REG, tail); tail 100 drivers/iommu/iova.c fq->tail = 0; tail 460 drivers/iommu/iova.c for ((i) = (fq)->head; (i) != (fq)->tail; (i) = ((i) + 1) % IOVA_FQ_SIZE) tail 465 drivers/iommu/iova.c return (((fq->tail + 1) % IOVA_FQ_SIZE) == fq->head); tail 470 drivers/iommu/iova.c unsigned idx = fq->tail; tail 474 drivers/iommu/iova.c fq->tail = (idx + 1) % IOVA_FQ_SIZE; tail 105 drivers/iommu/virtio-iommu.c struct virtio_iommu_req_tail *tail = buf + len - sizeof(*tail); tail 107 drivers/iommu/virtio-iommu.c switch (tail->status) { tail 131 drivers/iommu/virtio-iommu.c struct virtio_iommu_req_tail *tail = buf + len - sizeof(*tail); tail 133 drivers/iommu/virtio-iommu.c tail->status = status; tail 184 drivers/lightnvm/pblk-rb.c #define pblk_rb_ring_count(head, tail, size) CIRC_CNT(head, tail, size) tail 185 drivers/lightnvm/pblk-rb.c #define pblk_rb_ring_space(rb, head, tail, size) \ tail 186 drivers/lightnvm/pblk-rb.c (CIRC_SPACE(head, tail, size)) tail 113 drivers/md/dm-cache-policy-smq.c unsigned head, tail; tail 119 drivers/md/dm-cache-policy-smq.c l->head = l->tail = INDEXER_NULL; tail 129 drivers/md/dm-cache-policy-smq.c return to_entry(es, l->tail); tail 157 drivers/md/dm-cache-policy-smq.c l->head = l->tail = to_index(es, e); tail 165 drivers/md/dm-cache-policy-smq.c struct entry *tail = l_tail(es, l); tail 168 drivers/md/dm-cache-policy-smq.c e->prev = l->tail; tail 170 drivers/md/dm-cache-policy-smq.c if (tail) tail 171 drivers/md/dm-cache-policy-smq.c tail->next = l->tail = to_index(es, e); tail 173 drivers/md/dm-cache-policy-smq.c l->head = l->tail = to_index(es, e); tail 210 drivers/md/dm-cache-policy-smq.c l->tail = e->prev; tail 686 drivers/media/dvb-core/dvb_net.c h->priv->ule_skb->tail -= 4; tail 821 drivers/media/dvb-core/dvb_net.c const u8 *tail; tail 836 drivers/media/dvb-core/dvb_net.c tail = skb_tail_pointer(h.priv->ule_skb); tail 837 drivers/media/dvb-core/dvb_net.c expected_crc = *(tail - 4) << 24 | tail 838 drivers/media/dvb-core/dvb_net.c *(tail - 3) << 16 | tail 839 drivers/media/dvb-core/dvb_net.c *(tail - 2) << 8 | tail 840 drivers/media/dvb-core/dvb_net.c *(tail - 1); tail 306 drivers/media/pci/ivtv/ivtv-driver.h unsigned int tail; tail 18 drivers/media/pci/ivtv/ivtv-udma.c dma_page->tail = 1 + ((first+size-1) & ~PAGE_MASK); tail 22 drivers/media/pci/ivtv/ivtv-udma.c if (dma_page->page_count == 1) dma_page->tail -= dma_page->offset; tail 38 drivers/media/pci/ivtv/ivtv-udma.c dma_page->tail : PAGE_SIZE - offset; tail 379 drivers/media/pci/ivtv/ivtvfb.c u16 lead = 0, tail = 0; tail 422 drivers/media/pci/ivtv/ivtvfb.c tail = (count - lead) & 3; tail 424 drivers/media/pci/ivtv/ivtvfb.c dma_size = count - lead - tail; tail 432 drivers/media/pci/ivtv/ivtvfb.c if (tail && copy_from_user(dst, buf, tail)) tail 158 drivers/media/pci/tw5864/tw5864-h264.c int *tail_nb_bits, u8 *tail) tail 187 drivers/media/pci/tw5864/tw5864-h264.c *tail = ((s->ptr[0]) << s->bits_left); tail 190 drivers/media/pci/tw5864/tw5864-h264.c *tail = 0; tail 232 drivers/media/pci/tw5864/tw5864-h264.c int *tail_nb_bits, u8 *tail) tail 247 drivers/media/pci/tw5864/tw5864-h264.c tail); tail 1258 drivers/media/pci/tw5864/tw5864-video.c &input->tail_nb_bits, &input->tail); tail 1411 drivers/media/pci/tw5864/tw5864-video.c dst[0] = (input->tail & tail_mask) | (vlc_first_byte & vlc_mask); tail 102 drivers/media/pci/tw5864/tw5864.h u8 tail; tail 195 drivers/media/pci/tw5864/tw5864.h int *tail_nb_bits, u8 *tail); tail 745 drivers/media/usb/ttusb-dec/ttusb_dec.c int tail = 4; tail 751 drivers/media/usb/ttusb-dec/ttusb_dec.c tail++; tail 754 drivers/media/usb/ttusb-dec/ttusb_dec.c dec->packet_payload_length + tail) { tail 84 drivers/mfd/pcf50633-adc.c int head, tail; tail 89 drivers/mfd/pcf50633-adc.c tail = adc->queue_tail; tail 91 drivers/mfd/pcf50633-adc.c if (adc->queue[tail]) { tail 97 drivers/mfd/pcf50633-adc.c adc->queue[tail] = req; tail 98 drivers/mfd/pcf50633-adc.c if (head == tail) tail 100 drivers/mfd/pcf50633-adc.c adc->queue_tail = (tail + 1) & (PCF50633_MAX_ADC_FIFO_DEPTH - 1); tail 78 drivers/misc/hpilo.c if (!(fifo_q->fifobar[(fifo_q->tail + 1) & fifo_q->imask] tail 80 drivers/misc/hpilo.c fifo_q->fifobar[fifo_q->tail & fifo_q->imask] |= tail 82 drivers/misc/hpilo.c fifo_q->tail += 1; tail 220 drivers/misc/hpilo.c fifo_q->tail = 0; tail 172 drivers/misc/hpilo.h u64 tail; tail 16 drivers/misc/mic/scif/scif_rb.c #define scif_rb_ring_cnt(head, tail, size) CIRC_CNT(head, tail, size) tail 17 drivers/misc/mic/scif/scif_rb.c #define scif_rb_ring_space(head, tail, size) CIRC_SPACE(head, tail, size) tail 2532 drivers/misc/vmw_vmci/vmci_queue_pair.c u64 tail; tail 2551 drivers/misc/vmw_vmci/vmci_queue_pair.c tail = vmci_q_header_producer_tail(produce_q->q_header); tail 2552 drivers/misc/vmw_vmci/vmci_queue_pair.c if (likely(tail + written < produce_q_size)) { tail 2553 drivers/misc/vmw_vmci/vmci_queue_pair.c result = qp_memcpy_to_queue_iter(produce_q, tail, from, written); tail 2557 drivers/misc/vmw_vmci/vmci_queue_pair.c const size_t tmp = (size_t) (produce_q_size - tail); tail 2559 drivers/misc/vmw_vmci/vmci_queue_pair.c result = qp_memcpy_to_queue_iter(produce_q, tail, from, tmp); tail 2367 drivers/net/ethernet/alteon/acenic.c struct sk_buff *tail, u32 idx) tail 2377 drivers/net/ethernet/alteon/acenic.c info->skb = tail; tail 123 drivers/net/ethernet/amazon/ena/ena_com.c sq->tail = 0; tail 240 drivers/net/ethernet/amazon/ena/ena_com.c tail_masked = admin_queue->sq.tail & queue_size_mask; tail 274 drivers/net/ethernet/amazon/ena/ena_com.c admin_queue->sq.tail++; tail 277 drivers/net/ethernet/amazon/ena/ena_com.c if (unlikely((admin_queue->sq.tail & queue_size_mask) == 0)) tail 280 drivers/net/ethernet/amazon/ena/ena_com.c writel(admin_queue->sq.tail, admin_queue->sq.db_addr); tail 410 drivers/net/ethernet/amazon/ena/ena_com.c io_sq->tail = 0; tail 200 drivers/net/ethernet/amazon/ena/ena_com.h u16 tail; tail 225 drivers/net/ethernet/amazon/ena/ena_com.h u16 tail; tail 67 drivers/net/ethernet/amazon/ena/ena_eth_com.c tail_masked = io_sq->tail & (io_sq->q_depth - 1); tail 82 drivers/net/ethernet/amazon/ena/ena_eth_com.c dst_tail_mask = io_sq->tail & (io_sq->q_depth - 1); tail 105 drivers/net/ethernet/amazon/ena/ena_eth_com.c io_sq->tail++; tail 108 drivers/net/ethernet/amazon/ena/ena_eth_com.c if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) tail 233 drivers/net/ethernet/amazon/ena/ena_eth_com.c io_sq->tail++; tail 236 drivers/net/ethernet/amazon/ena/ena_eth_com.c if (unlikely((io_sq->tail & (io_sq->q_depth - 1)) == 0)) tail 378 drivers/net/ethernet/amazon/ena/ena_eth_com.c u16 start_tail = io_sq->tail; tail 416 drivers/net/ethernet/amazon/ena/ena_eth_com.c *nb_hw_desc = io_sq->tail - start_tail; tail 510 drivers/net/ethernet/amazon/ena/ena_eth_com.c *nb_hw_desc = io_sq->tail - start_tail; tail 100 drivers/net/ethernet/amazon/ena/ena_eth_com.h u16 tail, next_to_comp, cnt; tail 103 drivers/net/ethernet/amazon/ena/ena_eth_com.h tail = io_sq->tail; tail 104 drivers/net/ethernet/amazon/ena/ena_eth_com.h cnt = tail - next_to_comp; tail 177 drivers/net/ethernet/amazon/ena/ena_eth_com.h u16 tail = io_sq->tail; tail 180 drivers/net/ethernet/amazon/ena/ena_eth_com.h io_sq->qid, tail); tail 182 drivers/net/ethernet/amazon/ena/ena_eth_com.h writel(tail, io_sq->db_addr); tail 72 drivers/net/ethernet/apm/xgene-v2/main.c u8 tail = ring->tail; tail 79 drivers/net/ethernet/apm/xgene-v2/main.c raw_desc = &ring->raw_desc[tail]; tail 93 drivers/net/ethernet/apm/xgene-v2/main.c ring->pkt_info[tail].skb = skb; tail 94 drivers/net/ethernet/apm/xgene-v2/main.c ring->pkt_info[tail].dma_addr = dma_addr; tail 106 drivers/net/ethernet/apm/xgene-v2/main.c tail = (tail + 1) & slots; tail 109 drivers/net/ethernet/apm/xgene-v2/main.c ring->tail = tail; tail 181 drivers/net/ethernet/apm/xgene-v2/main.c u8 tail; tail 185 drivers/net/ethernet/apm/xgene-v2/main.c tail = tx_ring->tail; tail 187 drivers/net/ethernet/apm/xgene-v2/main.c raw_desc = &tx_ring->raw_desc[tail]; tail 210 drivers/net/ethernet/apm/xgene-v2/main.c tx_ring->pkt_info[tail].skb = skb; tail 211 drivers/net/ethernet/apm/xgene-v2/main.c tx_ring->pkt_info[tail].dma_addr = dma_addr; tail 212 drivers/net/ethernet/apm/xgene-v2/main.c tx_ring->pkt_info[tail].pkt_buf = pkt_buf; tail 222 drivers/net/ethernet/apm/xgene-v2/main.c tx_ring->tail = (tail + 1) & (XGENE_ENET_NUM_DESC - 1); tail 43 drivers/net/ethernet/apm/xgene-v2/ring.c ring->tail = 0; tail 55 drivers/net/ethernet/apm/xgene-v2/ring.c ring->tail = 0; tail 77 drivers/net/ethernet/apm/xgene-v2/ring.h u8 tail; tail 76 drivers/net/ethernet/apm/xgene/xgene_enet_main.c u32 slots, tail; tail 87 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tail = buf_pool->tail; tail 90 drivers/net/ethernet/apm/xgene/xgene_enet_main.c raw_desc = &buf_pool->raw_desc16[tail]; tail 108 drivers/net/ethernet/apm/xgene/xgene_enet_main.c buf_pool->frag_page[tail] = page; tail 109 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tail = (tail + 1) & slots; tail 113 drivers/net/ethernet/apm/xgene/xgene_enet_main.c buf_pool->tail = tail; tail 127 drivers/net/ethernet/apm/xgene/xgene_enet_main.c u32 tail = buf_pool->tail; tail 140 drivers/net/ethernet/apm/xgene/xgene_enet_main.c raw_desc = &buf_pool->raw_desc16[tail]; tail 153 drivers/net/ethernet/apm/xgene/xgene_enet_main.c buf_pool->rx_skb[tail] = skb; tail 158 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tail = (tail + 1) & slots; tail 162 drivers/net/ethernet/apm/xgene/xgene_enet_main.c buf_pool->tail = tail; tail 403 drivers/net/ethernet/apm/xgene/xgene_enet_main.c return &ring->cp_ring->frag_dma_addr[ring->tail * MAX_SKB_FRAGS]; tail 415 drivers/net/ethernet/apm/xgene/xgene_enet_main.c u16 tail = tx_ring->tail; tail 424 drivers/net/ethernet/apm/xgene/xgene_enet_main.c raw_desc = &tx_ring->raw_desc[tail]; tail 425 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tail = (tail + 1) & (tx_ring->slots - 1); tail 454 drivers/net/ethernet/apm/xgene/xgene_enet_main.c exp_desc = (void *)&tx_ring->raw_desc[tail]; tail 455 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tail = (tail + 1) & (tx_ring->slots - 1); tail 541 drivers/net/ethernet/apm/xgene/xgene_enet_main.c SET_VAL(USERINFO, tx_ring->tail)); tail 542 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tx_ring->cp_ring->cp_skb[tx_ring->tail] = skb; tail 544 drivers/net/ethernet/apm/xgene/xgene_enet_main.c tx_ring->tail = tail; tail 96 drivers/net/ethernet/apm/xgene/xgene_enet_main.h u16 tail; tail 3078 drivers/net/ethernet/broadcom/bnx2.c unsigned int tail = 4 - frag_len; tail 3084 drivers/net/ethernet/broadcom/bnx2.c skb->len -= tail; tail 3086 drivers/net/ethernet/broadcom/bnx2.c skb->tail -= tail; tail 3090 drivers/net/ethernet/broadcom/bnx2.c skb_frag_size_sub(frag, tail); tail 3091 drivers/net/ethernet/broadcom/bnx2.c skb->data_len -= tail; tail 965 drivers/net/ethernet/broadcom/bnxt/bnxt.c skb->tail += payload; tail 733 drivers/net/ethernet/cadence/macb_main.c unsigned int tail; tail 762 drivers/net/ethernet/cadence/macb_main.c for (tail = queue->tx_tail; tail != queue->tx_head; tail++) { tail 765 drivers/net/ethernet/cadence/macb_main.c desc = macb_tx_desc(queue, tail); tail 767 drivers/net/ethernet/cadence/macb_main.c tx_skb = macb_tx_skb(queue, tail); tail 774 drivers/net/ethernet/cadence/macb_main.c tail++; tail 775 drivers/net/ethernet/cadence/macb_main.c tx_skb = macb_tx_skb(queue, tail); tail 784 drivers/net/ethernet/cadence/macb_main.c macb_tx_ring_wrap(bp, tail), tail 837 drivers/net/ethernet/cadence/macb_main.c unsigned int tail; tail 853 drivers/net/ethernet/cadence/macb_main.c for (tail = queue->tx_tail; tail != head; tail++) { tail 859 drivers/net/ethernet/cadence/macb_main.c desc = macb_tx_desc(queue, tail); tail 873 drivers/net/ethernet/cadence/macb_main.c for (;; tail++) { tail 874 drivers/net/ethernet/cadence/macb_main.c tx_skb = macb_tx_skb(queue, tail); tail 888 drivers/net/ethernet/cadence/macb_main.c macb_tx_ring_wrap(bp, tail), tail 908 drivers/net/ethernet/cadence/macb_main.c queue->tx_tail = tail; tail 1196 drivers/net/ethernet/cadence/macb_main.c unsigned int tail; tail 1199 drivers/net/ethernet/cadence/macb_main.c for (tail = queue->rx_tail; budget > 0; tail++) { tail 1200 drivers/net/ethernet/cadence/macb_main.c struct macb_dma_desc *desc = macb_rx_desc(queue, tail); tail 1216 drivers/net/ethernet/cadence/macb_main.c discard_partial_frame(queue, first_frag, tail); tail 1217 drivers/net/ethernet/cadence/macb_main.c first_frag = tail; tail 1228 drivers/net/ethernet/cadence/macb_main.c dropped = macb_rx_frame(queue, napi, first_frag, tail); tail 1264 drivers/net/ethernet/cadence/macb_main.c queue->rx_tail = tail; tail 1356 drivers/net/ethernet/cadence/macb_main.c unsigned int tail = queue->tx_tail; tail 1362 drivers/net/ethernet/cadence/macb_main.c if (head == tail) tail 2675 drivers/net/ethernet/cadence/macb_main.c unsigned int tail, head; tail 2681 drivers/net/ethernet/cadence/macb_main.c tail = macb_tx_ring_wrap(bp, bp->queues[0].tx_tail); tail 2693 drivers/net/ethernet/cadence/macb_main.c regs_buff[8] = tail; tail 2695 drivers/net/ethernet/cadence/macb_main.c regs_buff[10] = macb_tx_dma(&bp->queues[0], tail); tail 299 drivers/net/ethernet/cadence/macb_ptp.c unsigned long tail = READ_ONCE(queue->tx_ts_tail); tail 307 drivers/net/ethernet/cadence/macb_ptp.c if (CIRC_SPACE(head, tail, PTP_TS_BUFFER_SIZE) == 0) tail 330 drivers/net/ethernet/cadence/macb_ptp.c unsigned long head, tail; tail 335 drivers/net/ethernet/cadence/macb_ptp.c tail = queue->tx_ts_tail; tail 337 drivers/net/ethernet/cadence/macb_ptp.c while (CIRC_CNT(head, tail, PTP_TS_BUFFER_SIZE)) { tail 338 drivers/net/ethernet/cadence/macb_ptp.c tx_ts = &queue->tx_timestamps[tail]; tail 344 drivers/net/ethernet/cadence/macb_ptp.c (tail + 1) & (PTP_TS_BUFFER_SIZE - 1)); tail 345 drivers/net/ethernet/cadence/macb_ptp.c tail = queue->tx_ts_tail; tail 272 drivers/net/ethernet/cavium/thunder/nicvf_queues.c rbdr->tail = 0; tail 304 drivers/net/ethernet/cavium/thunder/nicvf_queues.c rbdr->tail = idx - 1; tail 320 drivers/net/ethernet/cavium/thunder/nicvf_queues.c int head, tail; tail 333 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail = rbdr->tail; tail 336 drivers/net/ethernet/cavium/thunder/nicvf_queues.c while (head != tail) { tail 348 drivers/net/ethernet/cavium/thunder/nicvf_queues.c desc = GET_RBDR_DESC(rbdr, tail); tail 383 drivers/net/ethernet/cavium/thunder/nicvf_queues.c int tail, qcount; tail 412 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail = nicvf_queue_reg_read(nic, NIC_QSET_RBDR_0_1_TAIL, rbdr_idx) >> 3; tail 414 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail++; tail 415 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail &= (rbdr->dmem.q_len - 1); tail 420 drivers/net/ethernet/cavium/thunder/nicvf_queues.c desc = GET_RBDR_DESC(rbdr, tail); tail 519 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->tail = 0; tail 592 drivers/net/ethernet/cavium/thunder/nicvf_queues.c while (sq->head != sq->tail) { tail 669 drivers/net/ethernet/cavium/thunder/nicvf_queues.c rbdr->tail = nicvf_queue_reg_read(nic, tail 1122 drivers/net/ethernet/cavium/thunder/nicvf_queues.c qentry = sq->tail; tail 1127 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->tail += desc_cnt; tail 1128 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->tail &= (sq->dmem.q_len - 1); tail 1137 drivers/net/ethernet/cavium/thunder/nicvf_queues.c sq->tail = qentry; tail 1182 drivers/net/ethernet/cavium/thunder/nicvf_queues.c u64 head, tail; tail 1188 drivers/net/ethernet/cavium/thunder/nicvf_queues.c tail = nicvf_queue_reg_read(nic, NIC_QSET_SQ_0_7_TAIL, qidx) >> 4; tail 228 drivers/net/ethernet/cavium/thunder/nicvf_queues.h u32 tail; tail 271 drivers/net/ethernet/cavium/thunder/nicvf_queues.h u32 tail; tail 129 drivers/net/ethernet/emulex/benet/be.h u32 tail, head; tail 159 drivers/net/ethernet/emulex/benet/be.h return q->dma_mem.va + q->tail * q->entry_size; tail 179 drivers/net/ethernet/emulex/benet/be.h index_inc(&q->tail, q->len); tail 1434 drivers/net/ethernet/emulex/benet/be_main.c i, txo->q.head, txo->q.tail, tail 1449 drivers/net/ethernet/emulex/benet/be_main.c i, txo->cq.head, txo->cq.tail, tail 2283 drivers/net/ethernet/emulex/benet/be_main.c u32 frag_idx = rxq->tail; tail 2343 drivers/net/ethernet/emulex/benet/be_main.c skb->tail += curr_frag_len; tail 2355 drivers/net/ethernet/emulex/benet/be_main.c skb->tail += hdr_len; tail 2760 drivers/net/ethernet/emulex/benet/be_main.c if (sent_skbs[txq->tail]) { tail 2764 drivers/net/ethernet/emulex/benet/be_main.c skb = sent_skbs[txq->tail]; tail 2765 drivers/net/ethernet/emulex/benet/be_main.c sent_skbs[txq->tail] = NULL; tail 2771 drivers/net/ethernet/emulex/benet/be_main.c frag_index = txq->tail; tail 2823 drivers/net/ethernet/emulex/benet/be_main.c rxq->tail = 0; tail 2914 drivers/net/ethernet/emulex/benet/be_main.c notified_idx = txq->tail; tail 2915 drivers/net/ethernet/emulex/benet/be_main.c end_idx = txq->tail; tail 2929 drivers/net/ethernet/emulex/benet/be_main.c txq->tail = notified_idx; tail 251 drivers/net/ethernet/hisilicon/hip04_eth.c static inline unsigned int tx_count(unsigned int head, unsigned int tail) tail 253 drivers/net/ethernet/hisilicon/hip04_eth.c return (head - tail) % TX_DESC_NUM; tail 105 drivers/net/ethernet/hisilicon/hisi_femac.c unsigned int tail; tail 163 drivers/net/ethernet/hisilicon/hisi_femac.c skb = txq->skb[txq->tail]; tail 169 drivers/net/ethernet/hisilicon/hisi_femac.c hisi_femac_tx_dma_unmap(priv, skb, txq->tail); tail 177 drivers/net/ethernet/hisilicon/hisi_femac.c txq->skb[txq->tail] = NULL; tail 178 drivers/net/ethernet/hisilicon/hisi_femac.c txq->tail = (txq->tail + 1) % txq->num; tail 220 drivers/net/ethernet/hisilicon/hisi_femac.c if (!CIRC_SPACE(pos, rxq->tail, rxq->num)) tail 253 drivers/net/ethernet/hisilicon/hisi_femac.c pos = rxq->tail; tail 292 drivers/net/ethernet/hisilicon/hisi_femac.c rxq->tail = pos; tail 363 drivers/net/ethernet/hisilicon/hisi_femac.c queue->tail = 0; tail 393 drivers/net/ethernet/hisilicon/hisi_femac.c pos = rxq->tail; tail 410 drivers/net/ethernet/hisilicon/hisi_femac.c rxq->tail = pos; tail 412 drivers/net/ethernet/hisilicon/hisi_femac.c pos = txq->tail; tail 425 drivers/net/ethernet/hisilicon/hisi_femac.c txq->tail = pos; tail 517 drivers/net/ethernet/hisilicon/hisi_femac.c if (unlikely(!CIRC_SPACE(txq->head, txq->tail, tail 67 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c u32 head, tail; tail 70 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c tail = dsaf_read_dev(&qs->tx_ring, RCB_REG_TAIL); tail 74 drivers/net/ethernet/hisilicon/hns/hns_dsaf_rcb.c if (tail == head) tail 1625 drivers/net/ethernet/hisilicon/hns/hns_enet.c int head, tail; tail 1657 drivers/net/ethernet/hisilicon/hns/hns_enet.c tail = readl_relaxed(ring->io_base + RCB_REG_TAIL); tail 1659 drivers/net/ethernet/hisilicon/hns/hns_enet.c fetch_num = ring_dist(ring, head, tail); tail 1661 drivers/net/ethernet/hisilicon/hns/hns_enet.c while (head != tail) { tail 118 drivers/net/ethernet/hisilicon/hns3/hclge_mbx.h u32 tail; tail 126 drivers/net/ethernet/hisilicon/hns3/hclge_mbx.h (arq.tail = (arq.tail + 1) % HCLGE_MBX_MAX_ARQ_MSG_SIZE) tail 29 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_cmd.h u32 tail; tail 580 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_mbx.c u32 tail = hclge_read_dev(hw, HCLGE_NIC_CRQ_TAIL_REG); tail 582 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_mbx.c return tail == hw->cmq.crq.next_to_use; tail 379 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_cmd.c hdev->arq.tail = 0; tail 37 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_cmd.h u32 tail; tail 140 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c u32 tail = hclgevf_read_dev(hw, HCLGEVF_NIC_CRQ_TAIL_REG); tail 142 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c return tail == hw->cmq.crq.next_to_use; tail 227 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c msg_q = hdev->arq.msg_q[hdev->arq.tail]; tail 258 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c u32 tail; tail 266 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c tail = hdev->arq.tail; tail 269 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c while (tail != hdev->arq.head) { tail 614 drivers/net/ethernet/i825xx/ether1.c int start, tail; tail 617 drivers/net/ethernet/i825xx/ether1.c tail = priv(dev)->tx_tail; tail 620 drivers/net/ethernet/i825xx/ether1.c if (tail > priv(dev)->tx_head) tail 623 drivers/net/ethernet/i825xx/ether1.c if (start + size > tail) tail 627 drivers/net/ethernet/i825xx/ether1.c if (priv(dev)->tx_head < tail && (priv(dev)->tx_head + size) > tail) tail 160 drivers/net/ethernet/intel/e1000e/e1000.h void __iomem *tail; tail 612 drivers/net/ethernet/intel/e1000e/netdev.c writel(i, rx_ring->tail); tail 614 drivers/net/ethernet/intel/e1000e/netdev.c if (unlikely(!ret_val && (i != readl(rx_ring->tail)))) { tail 629 drivers/net/ethernet/intel/e1000e/netdev.c writel(i, tx_ring->tail); tail 631 drivers/net/ethernet/intel/e1000e/netdev.c if (unlikely(!ret_val && (i != readl(tx_ring->tail)))) { tail 697 drivers/net/ethernet/intel/e1000e/netdev.c writel(i, rx_ring->tail); tail 797 drivers/net/ethernet/intel/e1000e/netdev.c writel(i << 1, rx_ring->tail); tail 889 drivers/net/ethernet/intel/e1000e/netdev.c writel(i, rx_ring->tail); tail 1145 drivers/net/ethernet/intel/e1000e/netdev.c readl(tx_ring->head), readl(tx_ring->tail), tx_ring->next_to_use, tail 2927 drivers/net/ethernet/intel/e1000e/netdev.c tx_ring->tail = adapter->hw.hw_addr + E1000_TDT(0); tail 2933 drivers/net/ethernet/intel/e1000e/netdev.c writel(0, tx_ring->tail); tail 3251 drivers/net/ethernet/intel/e1000e/netdev.c rx_ring->tail = adapter->hw.hw_addr + E1000_RDT(0); tail 3257 drivers/net/ethernet/intel/e1000e/netdev.c writel(0, rx_ring->tail); tail 5925 drivers/net/ethernet/intel/e1000e/netdev.c writel(tx_ring->next_to_use, tx_ring->tail); tail 113 drivers/net/ethernet/intel/fm10k/fm10k.h u32 __iomem *tail; tail 167 drivers/net/ethernet/intel/fm10k/fm10k_main.c writel(i, rx_ring->tail); tail 1041 drivers/net/ethernet/intel/fm10k/fm10k_main.c writel(i, tx_ring->tail); tail 1127 drivers/net/ethernet/intel/fm10k/fm10k_main.c u32 head, tail; tail 1131 drivers/net/ethernet/intel/fm10k/fm10k_main.c tail = ring->next_to_use; tail 1134 drivers/net/ethernet/intel/fm10k/fm10k_main.c tail = fm10k_read_reg(hw, FM10K_TDT(ring->reg_idx)); tail 1137 drivers/net/ethernet/intel/fm10k/fm10k_main.c return ((head <= tail) ? tail : tail + ring->count) - head; tail 17 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fifo->tail = 0; tail 28 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return fifo->tail - fifo->head; tail 39 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return fifo->size + fifo->head - fifo->tail; tail 50 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return fifo->head == fifo->tail; tail 74 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return (fifo->tail + offset) & (fifo->size - 1); tail 120 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fifo->head = fifo->tail; tail 132 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c static u16 fm10k_mbx_index_len(struct fm10k_mbx_info *mbx, u16 head, u16 tail) tail 134 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 len = tail - head; tail 137 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (len > tail) tail 153 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 tail = (mbx->tail + offset + 1) & ((mbx->mbmem_len << 1) - 1); tail 156 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return (tail > mbx->tail) ? --tail : ++tail; tail 169 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 tail = (mbx->tail - offset - 1) & ((mbx->mbmem_len << 1) - 1); tail 172 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return (tail < mbx->tail) ? ++tail : --tail; tail 216 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u32 *tail = mbx->rx.buffer + fm10k_fifo_tail_offset(&mbx->rx, 0); tail 222 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return FM10K_TLV_DWORD_LEN(*tail); tail 240 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u32 *tail = fifo->buffer + end; tail 252 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c memcpy(tail, msg, end << 2); tail 283 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fifo->tail += len; tail 336 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 end, len, tail, mask; tail 344 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail = fm10k_mbx_tail_sub(mbx, len); tail 345 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (tail > mask) tail 346 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail++; tail 359 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail &= mask; tail 360 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (!tail) tail 361 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail++; tail 366 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fm10k_write_reg(hw, mbmem + tail++, *(head++)); tail 385 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 mbmem_len, len, ack = fm10k_mbx_index_len(mbx, head, mbx->tail); tail 398 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = fm10k_mbx_tail_add(mbx, len - ack); tail 428 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u32 *tail = fifo->buffer; tail 439 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail += end; tail 442 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c for (end = fifo->size - end; len; tail = fifo->buffer) { tail 452 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c *(tail++) = fm10k_read_reg(hw, mbmem + head++); tail 473 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 tail) tail 476 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 len, seq = fm10k_mbx_index_len(mbx, mbx->head, tail); tail 505 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fifo->tail += len; tail 619 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 len = mbx->tail_len - fm10k_mbx_index_len(mbx, head, mbx->tail); tail 720 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c memmove(fifo->buffer, fifo->buffer + fifo->tail, mbx->pushed << 2); tail 723 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fifo->tail -= fifo->head; tail 854 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c FM10K_MSG_HDR_FIELD_SET(mbx->tail, TAIL) | tail 880 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c FM10K_MSG_HDR_FIELD_SET(mbx->tail, TAIL) | tail 902 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c FM10K_MSG_HDR_FIELD_SET(mbx->tail, HEAD); tail 952 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 type, rsvd0, head, tail, size; tail 957 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail = FM10K_MSG_HDR_FIELD_GET(*hdr, TAIL); tail 967 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (tail != mbx->head) tail 975 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (fm10k_mbx_index_len(mbx, head, mbx->tail) > mbx->tail_len) tail 979 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (!tail || (tail == FM10K_MSG_HDR_MASK(TAIL))) tail 981 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (fm10k_mbx_index_len(mbx, mbx->head, tail) < mbx->mbmem_len) tail 995 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (tail) tail 1066 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c ack = fm10k_mbx_index_len(mbx, head, mbx->tail); tail 1084 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->rx.tail = 0; tail 1179 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = head; tail 1197 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 head, tail; tail 1202 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail = FM10K_MSG_HDR_FIELD_GET(*hdr, TAIL); tail 1206 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = head; tail 1211 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c err = fm10k_mbx_push_tail(hw, mbx, tail); tail 1266 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (head != mbx->tail) tail 1308 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = head; tail 1323 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c return fm10k_mbx_create_reply(hw, mbx, mbx->tail); tail 1595 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = 1; tail 1635 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->mbx_hdr = FM10K_MSG_HDR_FIELD_SET(mbx->tail, SM_TAIL) | tail 1652 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->mbx_hdr = FM10K_MSG_HDR_FIELD_SET(mbx->tail, SM_TAIL) | tail 1674 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = 1; tail 1774 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 tail, head, ver; tail 1776 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail = FM10K_MSG_HDR_FIELD_GET(*hdr, SM_TAIL); tail 1786 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (!tail || tail > FM10K_SM_MBX_FIFO_LEN) tail 1788 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (mbx->tail < head) tail 1790 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (tail < mbx->head) tail 1791 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail += mbx->mbmem_len - 1; tail 1792 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (fm10k_mbx_index_len(mbx, head, mbx->tail) > mbx->tail_len) tail 1794 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (fm10k_mbx_index_len(mbx, mbx->head, tail) < mbx->mbmem_len) tail 1879 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 tail) tail 1886 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (tail < mbx->head) tail 1887 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail += mbmem_len; tail 1890 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c err = fm10k_mbx_push_tail(hw, mbx, tail); tail 1926 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (mbx->tail < head) tail 1942 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail = fm10k_mbx_tail_sub(mbx, mbx->tail_len - tail_len); tail 1947 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c if (mbx->tail > mbmem_len) tail 1948 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c mbx->tail -= mbmem_len; tail 2024 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c fm10k_sm_mbx_create_reply(hw, mbx, mbx->tail); tail 2041 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c u16 head, tail; tail 2045 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c tail = FM10K_MSG_HDR_FIELD_GET(*hdr, SM_TAIL); tail 2060 drivers/net/ethernet/intel/fm10k/fm10k_mbx.c len = fm10k_sm_mbx_receive(hw, mbx, tail); tail 232 drivers/net/ethernet/intel/fm10k/fm10k_mbx.h u16 tail; tail 266 drivers/net/ethernet/intel/fm10k/fm10k_mbx.h u16 tail, tail_len, pulled; tail 893 drivers/net/ethernet/intel/fm10k/fm10k_pci.c ring->tail = &interface->uc_addr[FM10K_TDT(reg_idx)]; tail 1007 drivers/net/ethernet/intel/fm10k/fm10k_pci.c ring->tail = &interface->uc_addr[FM10K_RDT(reg_idx)]; tail 22 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.asq.tail = I40E_VF_ATQT1; tail 27 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.arq.tail = I40E_VF_ARQT1; tail 33 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.asq.tail = I40E_PF_ATQT; tail 38 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.arq.tail = I40E_PF_ARQT; tail 276 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.asq.tail, 0); tail 305 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.arq.tail, 0); tail 314 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); tail 461 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.asq.tail, 0); tail 495 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.arq.tail, 0); tail 847 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); tail 1019 drivers/net/ethernet/intel/i40e/i40e_adminq.c wr32(hw, hw->aq.arq.tail, ntc); tail 35 drivers/net/ethernet/intel/i40e/i40e_adminq.h u32 tail; tail 2071 drivers/net/ethernet/intel/i40e/i40e_ethtool.c rx_rings[i].tail = hw->hw_addr + I40E_PRTGEN_STATUS; tail 2114 drivers/net/ethernet/intel/i40e/i40e_ethtool.c rx_rings[i].tail = vsi->rx_rings[i]->tail; tail 368 drivers/net/ethernet/intel/i40e/i40e_main.c readl(tx_ring->tail), val); tail 3233 drivers/net/ethernet/intel/i40e/i40e_main.c ring->tail = hw->hw_addr + I40E_QTX_TAIL(pf_q); tail 3344 drivers/net/ethernet/intel/i40e/i40e_main.c ring->tail = hw->hw_addr + I40E_QRX_TAIL(pf_q); tail 3345 drivers/net/ethernet/intel/i40e/i40e_main.c writel(0, ring->tail); tail 156 drivers/net/ethernet/intel/i40e/i40e_txrx.c writel(tx_ring->next_to_use, tx_ring->tail); tail 696 drivers/net/ethernet/intel/i40e/i40e_txrx.c u32 head, tail; tail 700 drivers/net/ethernet/intel/i40e/i40e_txrx.c tail = readl(ring->tail); tail 703 drivers/net/ethernet/intel/i40e/i40e_txrx.c tail = ring->next_to_use; tail 706 drivers/net/ethernet/intel/i40e/i40e_txrx.c if (head != tail) tail 707 drivers/net/ethernet/intel/i40e/i40e_txrx.c return (head < tail) ? tail 708 drivers/net/ethernet/intel/i40e/i40e_txrx.c tail - head : (tail + ring->count - head); tail 1496 drivers/net/ethernet/intel/i40e/i40e_txrx.c writel(val, rx_ring->tail); tail 2272 drivers/net/ethernet/intel/i40e/i40e_txrx.c writel_relaxed(xdp_ring->next_to_use, xdp_ring->tail); tail 3474 drivers/net/ethernet/intel/i40e/i40e_txrx.c writel(i, tx_ring->tail); tail 365 drivers/net/ethernet/intel/i40e/i40e_txrx.h u8 __iomem *tail; tail 19 drivers/net/ethernet/intel/iavf/iavf_adminq.c hw->aq.asq.tail = IAVF_VF_ATQT1; tail 24 drivers/net/ethernet/intel/iavf/iavf_adminq.c hw->aq.arq.tail = IAVF_VF_ARQT1; tail 263 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.asq.tail, 0); tail 292 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.arq.tail, 0); tail 301 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); tail 448 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.asq.tail, 0); tail 482 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.arq.tail, 0); tail 749 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); tail 920 drivers/net/ethernet/intel/iavf/iavf_adminq.c wr32(hw, hw->aq.arq.tail, ntc); tail 35 drivers/net/ethernet/intel/iavf/iavf_adminq.h u32 tail; tail 560 drivers/net/ethernet/intel/iavf/iavf_main.c adapter->tx_rings[i].tail = hw->hw_addr + IAVF_QTX_TAIL1(i); tail 597 drivers/net/ethernet/intel/iavf/iavf_main.c adapter->rx_rings[i].tail = hw->hw_addr + IAVF_QRX_TAIL1(i); tail 115 drivers/net/ethernet/intel/iavf/iavf_txrx.c u32 head, tail; tail 118 drivers/net/ethernet/intel/iavf/iavf_txrx.c tail = readl(ring->tail); tail 120 drivers/net/ethernet/intel/iavf/iavf_txrx.c if (head != tail) tail 121 drivers/net/ethernet/intel/iavf/iavf_txrx.c return (head < tail) ? tail 122 drivers/net/ethernet/intel/iavf/iavf_txrx.c tail - head : (tail + ring->count - head); tail 788 drivers/net/ethernet/intel/iavf/iavf_txrx.c writel(val, rx_ring->tail); tail 2379 drivers/net/ethernet/intel/iavf/iavf_txrx.c writel(i, tx_ring->tail); tail 338 drivers/net/ethernet/intel/iavf/iavf_txrx.h u8 __iomem *tail; tail 9 drivers/net/ethernet/intel/ice/ice_controlq.c (qinfo)->sq.tail = prefix##_ATQT; \ tail 17 drivers/net/ethernet/intel/ice/ice_controlq.c (qinfo)->rq.tail = prefix##_ARQT; \ tail 258 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, ring->tail, 0); tail 302 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, cq->rq.tail, (u32)(cq->num_rq_entries - 1)); tail 467 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, cq->sq.tail, 0); tail 534 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, cq->rq.tail, 0); tail 945 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, cq->sq.tail, cq->sq.next_to_use); tail 1106 drivers/net/ethernet/intel/ice/ice_controlq.c wr32(hw, cq->rq.tail, ntc); tail 56 drivers/net/ethernet/intel/ice/ice_controlq.h u32 tail; tail 591 drivers/net/ethernet/intel/ice/ice_ethtool.c writel_relaxed(tx_ring->next_to_use, tx_ring->tail); tail 2686 drivers/net/ethernet/intel/ice/ice_ethtool.c rx_rings[i].tail = vsi->back->hw.hw_addr + PRTGEN_STATUS; tail 2726 drivers/net/ethernet/intel/ice/ice_ethtool.c rx_rings[i].tail = vsi->rx_rings[i]->tail; tail 100 drivers/net/ethernet/intel/ice/ice_lib.c ring->tail = hw->hw_addr + QRX_TAIL(pf_q); tail 101 drivers/net/ethernet/intel/ice/ice_lib.c writel(0, ring->tail); tail 1743 drivers/net/ethernet/intel/ice/ice_lib.c ring->tail = pf->hw.hw_addr + QTX_COMM_DBELL(pf_q); tail 56 drivers/net/ethernet/intel/ice/ice_main.c u16 head, tail; tail 59 drivers/net/ethernet/intel/ice/ice_main.c tail = ring->next_to_use; tail 61 drivers/net/ethernet/intel/ice/ice_main.c if (head != tail) tail 62 drivers/net/ethernet/intel/ice/ice_main.c return (head < tail) ? tail 63 drivers/net/ethernet/intel/ice/ice_main.c tail - head : (tail + ring->count - head); tail 401 drivers/net/ethernet/intel/ice/ice_txrx.c writel(val, rx_ring->tail); tail 1713 drivers/net/ethernet/intel/ice/ice_txrx.c writel(i, tx_ring->tail); tail 173 drivers/net/ethernet/intel/ice/ice_txrx.h u8 __iomem *tail; tail 258 drivers/net/ethernet/intel/igb/igb.h void __iomem *tail; /* pointer to ring tail register */ tail 4143 drivers/net/ethernet/intel/igb/igb_main.c ring->tail = adapter->io_addr + E1000_TDT(reg_idx); tail 4145 drivers/net/ethernet/intel/igb/igb_main.c writel(0, ring->tail); tail 4495 drivers/net/ethernet/intel/igb/igb_main.c ring->tail = adapter->io_addr + E1000_RDT(reg_idx); tail 4497 drivers/net/ethernet/intel/igb/igb_main.c writel(0, ring->tail); tail 6023 drivers/net/ethernet/intel/igb/igb_main.c writel(i, tx_ring->tail); tail 7871 drivers/net/ethernet/intel/igb/igb_main.c readl(tx_ring->tail), tail 8505 drivers/net/ethernet/intel/igb/igb_main.c writel(i, rx_ring->tail); tail 136 drivers/net/ethernet/intel/igbvf/igbvf.h u16 tail; tail 231 drivers/net/ethernet/intel/igbvf/netdev.c writel(i, adapter->hw.hw_addr + rx_ring->tail); tail 519 drivers/net/ethernet/intel/igbvf/netdev.c writel(0, adapter->hw.hw_addr + tx_ring->tail); tail 602 drivers/net/ethernet/intel/igbvf/netdev.c writel(0, adapter->hw.hw_addr + rx_ring->tail); tail 1302 drivers/net/ethernet/intel/igbvf/netdev.c tx_ring->tail = E1000_TDT(0); tail 1383 drivers/net/ethernet/intel/igbvf/netdev.c rx_ring->tail = E1000_RDT(0); tail 2281 drivers/net/ethernet/intel/igbvf/netdev.c writel(i, adapter->hw.hw_addr + tx_ring->tail); tail 253 drivers/net/ethernet/intel/igc/igc.h void __iomem *tail; /* pointer to ring tail register */ tail 540 drivers/net/ethernet/intel/igc/igc_main.c ring->tail = adapter->io_addr + IGC_RDT(reg_idx); tail 542 drivers/net/ethernet/intel/igc/igc_main.c writel(0, ring->tail); tail 619 drivers/net/ethernet/intel/igc/igc_main.c ring->tail = adapter->io_addr + IGC_TDT(reg_idx); tail 621 drivers/net/ethernet/intel/igc/igc_main.c writel(0, ring->tail); tail 1059 drivers/net/ethernet/intel/igc/igc_main.c writel(i, tx_ring->tail); tail 1553 drivers/net/ethernet/intel/igc/igc_main.c writel(i, rx_ring->tail); tail 1827 drivers/net/ethernet/intel/igc/igc_main.c readl(tx_ring->tail), tail 320 drivers/net/ethernet/intel/ixgbe/ixgbe.h u8 __iomem *tail; tail 818 drivers/net/ethernet/intel/ixgbe/ixgbe.h struct sk_buff *tail; tail 1018 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c unsigned int head, tail; tail 1021 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c tail = ring->next_to_use; tail 1023 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c return ((head <= tail) ? tail : tail + ring->count) - head; tail 1640 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c writel(i, rx_ring->tail); tail 1813 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c skb->tail += pull_len; tail 2416 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c writel(ring->next_to_use, ring->tail); tail 3501 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ring->tail = adapter->io_addr + IXGBE_TDT(reg_idx); tail 4104 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ring->tail = adapter->io_addr + IXGBE_RDT(reg_idx); tail 8323 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c writel(i, tx_ring->tail); tail 10326 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c writel(ring->next_to_use, ring->tail); tail 375 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c writel(i, rx_ring->tail); tail 539 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c writel(ring->next_to_use, ring->tail); tail 117 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h u8 __iomem *tail; tail 296 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h writel(value, ring->tail); tail 204 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c u32 tail = IXGBE_READ_REG(hw, IXGBE_VFTDT(ring->reg_idx)); tail 206 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c if (head != tail) tail 207 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c return (head < tail) ? tail 208 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c tail - head : (tail + ring->count - head); tail 1706 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ring->tail = adapter->io_addr + IXGBE_VFTDT(reg_idx); tail 1928 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c ring->tail = adapter->io_addr + IXGBE_VFRDT(reg_idx); tail 1655 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c struct mce *mce, *tail = NULL; tail 1665 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c tail = mce; tail 1684 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c if (!tail) tail 1687 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c hlist_add_behind(&mce->node, &tail->node); tail 459 drivers/net/ethernet/marvell/octeontx2/af/rvu_struct.h u64 tail : 20; tail 461 drivers/net/ethernet/marvell/octeontx2/af/rvu_struct.h u64 tail : 20; tail 2494 drivers/net/ethernet/marvell/sky2.c skb->tail += size; tail 597 drivers/net/ethernet/mellanox/mlx5/core/en.h u32 tail; tail 616 drivers/net/ethernet/mellanox/mlx5/core/en_main.c rq->page_cache.tail = 0; tail 658 drivers/net/ethernet/mellanox/mlx5/core/en_main.c for (i = rq->page_cache.head; i != rq->page_cache.tail; tail 196 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u32 tail_next = (cache->tail + 1) & (MLX5E_CACHE_SIZE - 1); tail 209 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c cache->page_cache[cache->tail] = *dma_info; tail 210 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c cache->tail = tail_next; tail 220 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c if (unlikely(cache->head == cache->tail)) { tail 851 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c void *tail; tail 858 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c tail = skb_header_pointer(skb, offset, len, tail_padding); tail 859 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c if (unlikely(!tail)) { tail 865 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c skb->csum = csum_block_add(skb->csum, csum_partial(tail, len, 0), offset); tail 1131 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c skb->tail += headlen; tail 1276 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c skb->tail += headlen; tail 46 drivers/net/ethernet/netronome/nfp/flower/metadata.c if (!CIRC_SPACE(ring->head, ring->tail, tail 83 drivers/net/ethernet/netronome/nfp/flower/metadata.c if (ring->head == ring->tail) { tail 88 drivers/net/ethernet/netronome/nfp/flower/metadata.c memcpy(&temp_stats_id, &ring->buf[ring->tail], NFP_FL_STATS_ELEM_RS); tail 90 drivers/net/ethernet/netronome/nfp/flower/metadata.c memcpy(&ring->buf[ring->tail], &freed_stats_id, NFP_FL_STATS_ELEM_RS); tail 91 drivers/net/ethernet/netronome/nfp/flower/metadata.c ring->tail = (ring->tail + NFP_FL_STATS_ELEM_RS) % tail 141 drivers/net/ethernet/netronome/nfp/flower/metadata.c if (CIRC_SPACE(ring->head, ring->tail, NFP_FLOWER_MASK_ENTRY_RS) == 0) tail 170 drivers/net/ethernet/netronome/nfp/flower/metadata.c if (ring->head == ring->tail) tail 173 drivers/net/ethernet/netronome/nfp/flower/metadata.c memcpy(&temp_id, &ring->buf[ring->tail], NFP_FLOWER_MASK_ELEMENT_RS); tail 182 drivers/net/ethernet/netronome/nfp/flower/metadata.c memcpy(&ring->buf[ring->tail], &freed_id, NFP_FLOWER_MASK_ELEMENT_RS); tail 183 drivers/net/ethernet/netronome/nfp/flower/metadata.c ring->tail = (ring->tail + NFP_FLOWER_MASK_ELEMENT_RS) % tail 79 drivers/net/ethernet/pensando/ionic/ionic_debugfs.c seq_printf(seq, "%d\n", q->tail->index); tail 99 drivers/net/ethernet/pensando/ionic/ionic_debugfs.c seq_printf(seq, "%d\n", cq->tail->index); tail 312 drivers/net/ethernet/pensando/ionic/ionic_dev.c cq->tail = cq->info; tail 357 drivers/net/ethernet/pensando/ionic/ionic_dev.c while (cb(cq, cq->tail)) { tail 358 drivers/net/ethernet/pensando/ionic/ionic_dev.c if (cq->tail->last) tail 360 drivers/net/ethernet/pensando/ionic/ionic_dev.c cq->tail = cq->tail->next; tail 395 drivers/net/ethernet/pensando/ionic/ionic_dev.c q->tail = q->info; tail 396 drivers/net/ethernet/pensando/ionic/ionic_dev.c q->head = q->tail; tail 461 drivers/net/ethernet/pensando/ionic/ionic_dev.c unsigned int mask, tail, head; tail 464 drivers/net/ethernet/pensando/ionic/ionic_dev.c tail = q->tail->index; tail 467 drivers/net/ethernet/pensando/ionic/ionic_dev.c return ((pos - tail) & mask) < ((head - tail) & mask); tail 478 drivers/net/ethernet/pensando/ionic/ionic_dev.c if (q->tail->index == q->head->index) tail 485 drivers/net/ethernet/pensando/ionic/ionic_dev.c q->name, stop_index, q->tail->index, q->head->index); tail 488 drivers/net/ethernet/pensando/ionic/ionic_dev.c desc_info = q->tail; tail 489 drivers/net/ethernet/pensando/ionic/ionic_dev.c q->tail = desc_info->next; tail 173 drivers/net/ethernet/pensando/ionic/ionic_dev.h struct ionic_desc_info *tail; tail 209 drivers/net/ethernet/pensando/ionic/ionic_dev.h struct ionic_cq_info *tail; tail 230 drivers/net/ethernet/pensando/ionic/ionic_dev.h unsigned int avail = q->tail->index; tail 179 drivers/net/ethernet/pensando/ionic/ionic_main.c while (adminq->tail != adminq->head) { tail 180 drivers/net/ethernet/pensando/ionic/ionic_main.c memset(adminq->tail->desc, 0, sizeof(union ionic_adminq_cmd)); tail 181 drivers/net/ethernet/pensando/ionic/ionic_main.c adminq->tail->cb = NULL; tail 182 drivers/net/ethernet/pensando/ionic/ionic_main.c adminq->tail->cb_arg = NULL; tail 183 drivers/net/ethernet/pensando/ionic/ionic_main.c adminq->tail = adminq->tail->next; tail 169 drivers/net/ethernet/pensando/ionic/ionic_txrx.c if (q->tail->index == q->head->index) tail 172 drivers/net/ethernet/pensando/ionic/ionic_txrx.c desc_info = q->tail; tail 176 drivers/net/ethernet/pensando/ionic/ionic_txrx.c q->tail = desc_info->next; tail 191 drivers/net/ethernet/pensando/ionic/ionic_txrx.c while (ionic_rx_service(rxcq, rxcq->tail)) { tail 192 drivers/net/ethernet/pensando/ionic/ionic_txrx.c if (rxcq->tail->last) tail 194 drivers/net/ethernet/pensando/ionic/ionic_txrx.c rxcq->tail = rxcq->tail->next; tail 290 drivers/net/ethernet/pensando/ionic/ionic_txrx.c for (cur = q->tail; cur != q->head; cur = cur->next) { tail 418 drivers/net/ethernet/pensando/ionic/ionic_txrx.c struct ionic_txq_comp *comp = cq->tail->cq_desc; tail 432 drivers/net/ethernet/pensando/ionic/ionic_txrx.c desc_info = q->tail; tail 433 drivers/net/ethernet/pensando/ionic/ionic_txrx.c q->tail = desc_info->next; tail 434 drivers/net/ethernet/pensando/ionic/ionic_txrx.c ionic_tx_clean(q, desc_info, cq->tail, tail 440 drivers/net/ethernet/pensando/ionic/ionic_txrx.c if (cq->tail->last) tail 443 drivers/net/ethernet/pensando/ionic/ionic_txrx.c cq->tail = cq->tail->next; tail 444 drivers/net/ethernet/pensando/ionic/ionic_txrx.c comp = cq->tail->cq_desc; tail 81 drivers/net/ethernet/qualcomm/qca_debug.c else if (qca->txr.skb[qca->txr.tail]) tail 461 drivers/net/ethernet/qualcomm/qca_spi.c if (txr->skb[txr->tail]) tail 487 drivers/net/ethernet/qualcomm/qca_spi.c qca->txr.tail = 0; tail 730 drivers/net/ethernet/qualcomm/qca_spi.c if (qca->txr.skb[qca->txr.tail]) { tail 766 drivers/net/ethernet/qualcomm/qca_spi.c new_tail = qca->txr.tail + 1; tail 770 drivers/net/ethernet/qualcomm/qca_spi.c qca->txr.skb[qca->txr.tail] = skb; tail 771 drivers/net/ethernet/qualcomm/qca_spi.c qca->txr.tail = new_tail; tail 59 drivers/net/ethernet/qualcomm/qca_spi.h u16 tail; tail 6206 drivers/net/ethernet/realtek/r8169_main.c skb->tail += pkt_size; tail 31 drivers/net/ethernet/rocker/rocker.h u32 tail; tail 374 drivers/net/ethernet/rocker/rocker_main.c if (head == info->tail) tail 392 drivers/net/ethernet/rocker/rocker_main.c BUG_ON(head == info->tail); tail 403 drivers/net/ethernet/rocker/rocker_main.c if (info->tail == info->head) tail 405 drivers/net/ethernet/rocker/rocker_main.c desc_info = &info->desc_info[info->tail]; tail 408 drivers/net/ethernet/rocker/rocker_main.c info->tail = __pos_inc(info->tail, info->size); tail 438 drivers/net/ethernet/rocker/rocker_main.c info->tail = 0; tail 479 drivers/net/ethernet/rocker/rocker_main.c BUG_ON(info->head || info->tail); tail 283 drivers/net/ethernet/socionext/netsec.c u16 head, tail; tail 632 drivers/net/ethernet/socionext/netsec.c int tail = dring->tail; tail 639 drivers/net/ethernet/socionext/netsec.c entry = dring->vaddr + DESC_SZ * tail; tail 646 drivers/net/ethernet/socionext/netsec.c desc = &dring->desc[tail]; tail 677 drivers/net/ethernet/socionext/netsec.c dring->tail = (tail + 1) % DESC_NUM; tail 679 drivers/net/ethernet/socionext/netsec.c tail = dring->tail; tail 680 drivers/net/ethernet/socionext/netsec.c entry = dring->vaddr + DESC_SZ * tail; tail 824 drivers/net/ethernet/socionext/netsec.c if (tx_ring->head >= tx_ring->tail) tail 825 drivers/net/ethernet/socionext/netsec.c filled = tx_ring->head - tx_ring->tail; tail 827 drivers/net/ethernet/socionext/netsec.c filled = tx_ring->head + DESC_NUM - tx_ring->tail; tail 940 drivers/net/ethernet/socionext/netsec.c u16 idx = dring->tail; tail 973 drivers/net/ethernet/socionext/netsec.c dring->tail = (dring->tail + 1) % DESC_NUM; tail 1044 drivers/net/ethernet/socionext/netsec.c dring->tail = (dring->tail + 1) % DESC_NUM; tail 1080 drivers/net/ethernet/socionext/netsec.c if (dring->head >= dring->tail) tail 1081 drivers/net/ethernet/socionext/netsec.c used = dring->head - dring->tail; tail 1083 drivers/net/ethernet/socionext/netsec.c used = dring->head + DESC_NUM - dring->tail; tail 1213 drivers/net/ethernet/socionext/netsec.c dring->tail = 0; tail 593 drivers/net/ethernet/socionext/sni_ave.c skb->tail += AVE_FRAME_HEADROOM; tail 792 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c u32 tail; tail 794 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tail = priv->rx_queue[i].dma_rx_phy + tail 797 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c stmmac_set_rx_tail_ptr(priv, priv->ioaddr, tail, i); tail 109 drivers/net/ethernet/ti/davinci_cpdma.c struct cpdma_desc __iomem *head, *tail; tail 984 drivers/net/ethernet/ti/davinci_cpdma.c struct cpdma_desc __iomem *prev = chan->tail; tail 995 drivers/net/ethernet/ti/davinci_cpdma.c chan->tail = desc; tail 1003 drivers/net/ethernet/ti/davinci_cpdma.c chan->tail = desc; tail 231 drivers/net/ethernet/toshiba/ps3_gelic_net.c chain->tail = (descr - 1); tail 343 drivers/net/ethernet/toshiba/ps3_gelic_net.c chain->tail = start_descr; tail 482 drivers/net/ethernet/toshiba/ps3_gelic_net.c chain->tail = card->rx_top->prev; /* point to the last */ tail 546 drivers/net/ethernet/toshiba/ps3_gelic_net.c tx_chain->head != tx_chain->tail && tx_chain->tail; tail 547 drivers/net/ethernet/toshiba/ps3_gelic_net.c tx_chain->tail = tx_chain->tail->next) { tail 548 drivers/net/ethernet/toshiba/ps3_gelic_net.c status = gelic_descr_get_status(tx_chain->tail); tail 549 drivers/net/ethernet/toshiba/ps3_gelic_net.c netdev = tx_chain->tail->skb->dev; tail 563 drivers/net/ethernet/toshiba/ps3_gelic_net.c if (tx_chain->tail->skb) { tail 566 drivers/net/ethernet/toshiba/ps3_gelic_net.c tx_chain->tail->skb->len; tail 577 drivers/net/ethernet/toshiba/ps3_gelic_net.c gelic_descr_release_tx(card, tx_chain->tail); tail 681 drivers/net/ethernet/toshiba/ps3_gelic_net.c if (card->tx_chain.tail != card->tx_chain.head->next && tail 1059 drivers/net/ethernet/toshiba/ps3_gelic_net.c chain->tail = descr; tail 1131 drivers/net/ethernet/toshiba/ps3_gelic_net.c gelic_card_kick_txdma(card, card->tx_chain.tail); tail 253 drivers/net/ethernet/toshiba/ps3_gelic_net.h struct gelic_descr *tail; tail 340 drivers/net/ethernet/toshiba/spider_net.c chain->tail = chain->ring; tail 446 drivers/net/ethernet/toshiba/spider_net.c card->rx_chain.tail->bus_addr); tail 517 drivers/net/ethernet/toshiba/spider_net.c struct spider_net_descr *start = chain->tail; tail 663 drivers/net/ethernet/toshiba/spider_net.c if (descr->next == chain->tail->prev) { tail 702 drivers/net/ethernet/toshiba/spider_net.c struct spider_net_descr *descr = card->tx_chain.tail; tail 724 drivers/net/ethernet/toshiba/spider_net.c descr = card->tx_chain.tail; tail 768 drivers/net/ethernet/toshiba/spider_net.c if (chain->tail == chain->head) { tail 772 drivers/net/ethernet/toshiba/spider_net.c descr = chain->tail; tail 810 drivers/net/ethernet/toshiba/spider_net.c chain->tail = descr->next; tail 847 drivers/net/ethernet/toshiba/spider_net.c descr = card->tx_chain.tail; tail 981 drivers/net/ethernet/toshiba/spider_net.c struct spider_net_descr *start= chain->tail; tail 1110 drivers/net/ethernet/toshiba/spider_net.c descr = chain->tail; tail 1119 drivers/net/ethernet/toshiba/spider_net.c chain->tail = descr; tail 1142 drivers/net/ethernet/toshiba/spider_net.c struct spider_net_descr *descr = chain->tail; tail 1155 drivers/net/ethernet/toshiba/spider_net.c chain->tail = descr->next; tail 395 drivers/net/ethernet/toshiba/spider_net.h struct spider_net_descr *tail; tail 1659 drivers/net/ethernet/via/via-velocity.c vptr->tx.tail[j] = vptr->tx.curr[j] = vptr->tx.used[j] = 0; tail 1899 drivers/net/ethernet/via/via-velocity.c for (idx = vptr->tx.tail[qnum]; vptr->tx.used[qnum] > 0; tail 1932 drivers/net/ethernet/via/via-velocity.c vptr->tx.tail[qnum] = idx; tail 1447 drivers/net/ethernet/via/via-velocity.h int tail[TX_QUEUE_NO]; tail 804 drivers/net/ethernet/xilinx/ll_temac_main.c int tail; tail 806 drivers/net/ethernet/xilinx/ll_temac_main.c tail = lp->tx_bd_tail; tail 807 drivers/net/ethernet/xilinx/ll_temac_main.c cur_p = &lp->tx_bd_v[tail]; tail 813 drivers/net/ethernet/xilinx/ll_temac_main.c tail++; tail 814 drivers/net/ethernet/xilinx/ll_temac_main.c if (tail >= TX_BD_NUM) tail 815 drivers/net/ethernet/xilinx/ll_temac_main.c tail = 0; tail 817 drivers/net/ethernet/xilinx/ll_temac_main.c cur_p = &lp->tx_bd_v[tail]; tail 157 drivers/net/fjes/fjes_hw.c info->v1i.tail = 1; tail 902 drivers/net/fjes/fjes_hw.c return EP_RING_EMPTY(info->v1i.head, info->v1i.tail, tail 941 drivers/net/fjes/fjes_hw.c if (EP_RING_FULL(info->v1i.head, info->v1i.tail, info->v1i.count_max)) tail 945 drivers/net/fjes/fjes_hw.c (info->v1i.tail - 1, tail 952 drivers/net/fjes/fjes_hw.c EP_RING_INDEX_INC(epbh->info->v1i.tail, info->v1i.count_max); tail 219 drivers/net/fjes/fjes_hw.h u32 tail; tail 526 drivers/net/fjes/fjes_main.c if (EP_RING_FULL(info->v1i.head, info->v1i.tail, tail 2441 drivers/net/ppp/ppp_generic.c struct sk_buff *head, *tail; tail 2448 drivers/net/ppp/ppp_generic.c tail = NULL; tail 2506 drivers/net/ppp/ppp_generic.c tail = p; tail 2536 drivers/net/ppp/ppp_generic.c if (tail != NULL) { tail 2561 drivers/net/ppp/ppp_generic.c if (head != tail) { tail 2575 drivers/net/ppp/ppp_generic.c if (p == tail) tail 2582 drivers/net/ppp/ppp_generic.c ppp->nextseq = PPP_MP_CB(tail)->sequence + 1; tail 45 drivers/net/wireless/ath/ath10k/swap.c if (memcmp(swap_item->tail.magic_signature, swap_magic, tail 51 drivers/net/wireless/ath/ath10k/swap.c __le32_to_cpu(swap_item->tail.bmi_write_addr); tail 30 drivers/net/wireless/ath/ath10k/swap.h struct ath10k_swap_code_seg_tail tail; tail 2716 drivers/net/wireless/ath/ath6kl/cfg80211.c if (!beacon->tail) tail 2719 drivers/net/wireless/ath/ath6kl/cfg80211.c rsn_ie = cfg80211_find_ie(WLAN_EID_RSN, beacon->tail, beacon->tail_len); tail 397 drivers/net/wireless/ath/carl9170/wlan.h struct ar9170_rx_frame_tail tail; tail 1839 drivers/net/wireless/ath/wil6210/cfg80211.c b->tail, b->tail_len, true); tail 1912 drivers/net/wireless/ath/wil6210/cfg80211.c bcon->tail_len, bcon->tail); tail 2084 drivers/net/wireless/ath/wil6210/cfg80211.c if (bcon->tail && tail 2085 drivers/net/wireless/ath/wil6210/cfg80211.c cfg80211_find_ie(WLAN_EID_RSN, bcon->tail, tail 307 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, " tail = 0x%08x\n", r.tail); tail 318 drivers/net/wireless/ath/wil6210/debugfs.c !wmi_addr(wil, r.tail) || tail 334 drivers/net/wireless/ath/wil6210/debugfs.c (r.tail - r.base == delta) ? "t" : " ", tail 1296 drivers/net/wireless/ath/wil6210/main.c le32_to_cpus(&r->tail); tail 469 drivers/net/wireless/ath/wil6210/wil6210.h u32 tail; tail 720 drivers/net/wireless/ath/wil6210/wmi.c r->tail = wil_r(wil, RGF_MBOX + tail 721 drivers/net/wireless/ath/wil6210/wmi.c offsetof(struct wil6210_mbox_ctl, tx.tail)); tail 722 drivers/net/wireless/ath/wil6210/wmi.c if (next_head != r->tail) tail 726 drivers/net/wireless/ath/wil6210/wmi.c if (next_head == r->tail) { tail 1920 drivers/net/wireless/ath/wil6210/wmi.c if (r->tail == r->head) tail 1924 drivers/net/wireless/ath/wil6210/wmi.c r->head, r->tail); tail 1926 drivers/net/wireless/ath/wil6210/wmi.c wil_memcpy_fromio_32(&d_tail, wil->csr + HOSTADDR(r->tail), tail 1957 drivers/net/wireless/ath/wil6210/wmi.c wil_w(wil, r->tail + tail 2000 drivers/net/wireless/ath/wil6210/wmi.c r->tail = r->base + ((r->tail - r->base + tail 2003 drivers/net/wireless/ath/wil6210/wmi.c offsetof(struct wil6210_mbox_ctl, rx.tail), r->tail); tail 3430 drivers/net/wireless/ath/wil6210/wmi.c if (r->tail != r->head) tail 333 drivers/net/wireless/broadcom/b43/pio.c u8 *tail = wl->pio_tailspace; tail 339 drivers/net/wireless/broadcom/b43/pio.c tail[0] = data[data_len - 1]; tail 340 drivers/net/wireless/broadcom/b43/pio.c tail[1] = 0; tail 341 drivers/net/wireless/broadcom/b43/pio.c b43_block_write(dev, tail, 2, tail 387 drivers/net/wireless/broadcom/b43/pio.c u8 *tail = wl->pio_tailspace; tail 390 drivers/net/wireless/broadcom/b43/pio.c memset(tail, 0, 4); tail 397 drivers/net/wireless/broadcom/b43/pio.c tail[0] = data[data_len - 3]; tail 398 drivers/net/wireless/broadcom/b43/pio.c tail[1] = data[data_len - 2]; tail 399 drivers/net/wireless/broadcom/b43/pio.c tail[2] = data[data_len - 1]; tail 403 drivers/net/wireless/broadcom/b43/pio.c tail[0] = data[data_len - 2]; tail 404 drivers/net/wireless/broadcom/b43/pio.c tail[1] = data[data_len - 1]; tail 407 drivers/net/wireless/broadcom/b43/pio.c tail[0] = data[data_len - 1]; tail 411 drivers/net/wireless/broadcom/b43/pio.c b43_block_write(dev, tail, 4, tail 709 drivers/net/wireless/broadcom/b43/pio.c u8 *tail = wl->pio_tailspace; tail 713 drivers/net/wireless/broadcom/b43/pio.c b43_block_read(dev, tail, 4, tail 718 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 3] = tail[0]; tail 719 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 2] = tail[1]; tail 720 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 1] = tail[2]; tail 723 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 2] = tail[0]; tail 724 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 1] = tail[1]; tail 727 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 1] = tail[0]; tail 736 drivers/net/wireless/broadcom/b43/pio.c u8 *tail = wl->pio_tailspace; tail 740 drivers/net/wireless/broadcom/b43/pio.c b43_block_read(dev, tail, 2, tail 743 drivers/net/wireless/broadcom/b43/pio.c skb->data[len + padding - 1] = tail[0]; tail 4441 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c beacon->tail, beacon->tail_len); tail 4497 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c country_ie = brcmf_parse_tlvs((u8 *)settings->beacon.tail, tail 4528 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c rsn_ie = brcmf_parse_tlvs((u8 *)settings->beacon.tail, tail 4532 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c wpa_ie = brcmf_find_wpaie((u8 *)settings->beacon.tail, tail 2126 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c (pkt->end - pkt->tail); tail 4376 drivers/net/wireless/cisco/airo.c static u8 tail[(30-10) + 2 + 6] = {[30-10] = 6}; tail 4398 drivers/net/wireless/cisco/airo.c bap_write(ai, (__le16 *)(tail + (hdrlen - 10)), 38 - hdrlen, BAP1); tail 596 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (frag_skb->tail + flen > frag_skb->end) { tail 192 drivers/net/wireless/intel/iwlwifi/fw/api/phy-ctxt.h struct iwl_phy_context_cmd_tail tail; tail 141 drivers/net/wireless/intel/iwlwifi/fw/api/tdls.h struct iwl_tdls_channel_switch_cmd_tail tail; tail 368 drivers/net/wireless/intel/iwlwifi/fw/api/time-event.h struct iwl_hs20_roc_req_tail tail; tail 3613 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c struct iwl_hs20_roc_req_tail *tail = iwl_mvm_chan_info_cmd_tail(mvm, tail 3625 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c tail->apply_time = cpu_to_le32(iwl_mvm_get_systime(mvm)); tail 3650 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c tail->duration = cpu_to_le32(req_dur); tail 3651 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c tail->apply_time_max_delay = cpu_to_le32(delay); tail 3658 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c memcpy(tail->node_addr, vif->addr, ETH_ALEN); tail 147 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c struct iwl_phy_context_cmd_tail *tail = tail 169 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c tail->rxchain_info = cpu_to_le32(iwl_mvm_get_valid_rx_ant(mvm) << tail 171 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c tail->rxchain_info |= cpu_to_le32(idle_cnt << PHY_RX_CHAIN_CNT_POS); tail 172 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c tail->rxchain_info |= cpu_to_le32(active_cnt << tail 176 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c tail->rxchain_info = cpu_to_le32(mvm->dbgfs_rx_phyinfo); tail 179 drivers/net/wireless/intel/iwlwifi/mvm/phy-ctxt.c tail->txchain_info = cpu_to_le32(iwl_mvm_get_valid_tx_ant(mvm)); tail 860 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c struct sk_buff *tail; tail 1020 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c tail = skb_peek_tail(&entries[index].e.frames); tail 1021 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c if (tail && !amsdu) tail 1023 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c else if (tail && (sn != buffer->last_amsdu || tail 392 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c struct iwl_tdls_channel_switch_cmd_tail *tail = tail 410 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c tail->timing.frame_timestamp = cpu_to_le32(timestamp); tail 411 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c tail->timing.switch_time = cpu_to_le32(switch_time); tail 412 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c tail->timing.switch_timeout = cpu_to_le32(switch_timeout); tail 448 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c tail->timing.max_offchan_duration = tail 453 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c tail->frame.switch_time_offset = cpu_to_le32(ch_sw_tm_ie + 2); tail 463 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c iwl_mvm_set_tx_cmd_ccmp(info, &tail->frame.tx_cmd); tail 466 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c iwl_mvm_set_tx_cmd(mvm, skb, &tail->frame.tx_cmd, info, tail 469 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c iwl_mvm_set_tx_cmd_rate(mvm, &tail->frame.tx_cmd, info, sta, tail 473 drivers/net/wireless/intel/iwlwifi/mvm/tdls.c memcpy(tail->frame.data, skb->data, skb->len); tail 935 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c if (frag_skb->tail + flen > frag_skb->end) { tail 87 drivers/net/wireless/marvell/mwifiex/11n_aggr.c *pad = (4 - ((unsigned long)skb_aggr->tail & 0x3)) % 4; tail 3955 drivers/net/wireless/marvell/mwifiex/cfg80211.c params->beacon_csa.tail, tail 338 drivers/net/wireless/marvell/mwifiex/ie.c if (!info->tail || !info->tail_len) tail 351 drivers/net/wireless/marvell/mwifiex/ie.c hdr = (void *)(info->tail + parsed_len); tail 396 drivers/net/wireless/marvell/mwifiex/ie.c info->tail, info->tail_len); tail 167 drivers/net/wireless/marvell/mwifiex/uap_cmd.c ht_ie = cfg80211_find_ie(WLAN_EID_HT_CAPABILITY, params->beacon.tail, tail 191 drivers/net/wireless/marvell/mwifiex/uap_cmd.c vht_ie = cfg80211_find_ie(WLAN_EID_VHT_CAPABILITY, params->beacon.tail, tail 213 drivers/net/wireless/marvell/mwifiex/uap_cmd.c tpc_ie = cfg80211_find_ie(WLAN_EID_TPC_REQUEST, params->beacon.tail, tail 275 drivers/net/wireless/marvell/mwifiex/uap_cmd.c params->beacon.tail, tail 395 drivers/net/wireless/marvell/mwifiex/uap_cmd.c params->beacon.tail, tail 462 drivers/net/wireless/marvell/mwifiex/uap_cmd.c country_ie = cfg80211_find_ie(WLAN_EID_COUNTRY, beacon_data->tail, tail 144 drivers/net/wireless/marvell/mwl8k.c int tail; tail 159 drivers/net/wireless/marvell/mwl8k.c int tail; tail 1168 drivers/net/wireless/marvell/mwl8k.c rxq->tail = 0; tail 1225 drivers/net/wireless/marvell/mwl8k.c rx = rxq->tail++; tail 1226 drivers/net/wireless/marvell/mwl8k.c if (rxq->tail == MWL8K_RX_DESCS) tail 1227 drivers/net/wireless/marvell/mwl8k.c rxq->tail = 0; tail 1459 drivers/net/wireless/marvell/mwl8k.c txq->tail = 0; tail 1529 drivers/net/wireless/marvell/mwl8k.c txq->len, txq->head, txq->tail, tail 2087 drivers/net/wireless/marvell/mwl8k.c BUG_ON(txq->skb[txq->tail] != NULL); tail 2088 drivers/net/wireless/marvell/mwl8k.c txq->skb[txq->tail] = skb; tail 2090 drivers/net/wireless/marvell/mwl8k.c tx = txq->txd + txq->tail; tail 2114 drivers/net/wireless/marvell/mwl8k.c txq->tail++; tail 2115 drivers/net/wireless/marvell/mwl8k.c if (txq->tail == MWL8K_TX_DESCS) tail 2116 drivers/net/wireless/marvell/mwl8k.c txq->tail = 0; tail 42 drivers/net/wireless/mediatek/mt76/debugfs.c i, q->q->queued, q->q->head, q->q->tail, tail 134 drivers/net/wireless/mediatek/mt76/dma.c q->tail = q->head; tail 157 drivers/net/wireless/mediatek/mt76/dma.c while ((q->queued > n_queued) && q->tail != last) { tail 158 drivers/net/wireless/mediatek/mt76/dma.c mt76_dma_tx_cleanup_idx(dev, q, q->tail, &entry); tail 162 drivers/net/wireless/mediatek/mt76/dma.c q->tail = (q->tail + 1) % q->ndesc; tail 174 drivers/net/wireless/mediatek/mt76/dma.c if (!flush && q->tail == last) tail 235 drivers/net/wireless/mediatek/mt76/dma.c int idx = q->tail; tail 244 drivers/net/wireless/mediatek/mt76/dma.c q->tail = (q->tail + 1) % q->ndesc; tail 117 drivers/net/wireless/mediatek/mt76/mt76.h u16 tail; tail 8 drivers/net/wireless/mediatek/mt76/mt7603/beacon.c struct sk_buff *tail[MT7603_MAX_INTERFACES]; tail 63 drivers/net/wireless/mediatek/mt76/mt7603/beacon.c data->tail[mvif->idx] = skb; tail 110 drivers/net/wireless/mediatek/mt76/mt7603/beacon.c for (i = 0; i < ARRAY_SIZE(data.tail); i++) { tail 111 drivers/net/wireless/mediatek/mt76/mt7603/beacon.c if (!data.tail[i]) tail 114 drivers/net/wireless/mediatek/mt76/mt7603/beacon.c mt76_skb_set_moredata(data.tail[i], false); tail 190 drivers/net/wireless/mediatek/mt76/mt76x02.h struct sk_buff *tail[8]; tail 213 drivers/net/wireless/mediatek/mt76/mt76x02_beacon.c data->tail[mvif->idx] = skb; tail 237 drivers/net/wireless/mediatek/mt76/mt76x02_beacon.c for (i = 0; i < ARRAY_SIZE(data->tail); i++) { tail 238 drivers/net/wireless/mediatek/mt76/mt76x02_beacon.c if (!data->tail[i]) tail 240 drivers/net/wireless/mediatek/mt76/mt76x02_beacon.c mt76_skb_set_moredata(data->tail[i], false); tail 41 drivers/net/wireless/mediatek/mt76/mt76x02_mmio.c for (i = 0; i < ARRAY_SIZE(data.tail); i++) { tail 42 drivers/net/wireless/mediatek/mt76/mt76x02_mmio.c if (!data.tail[i]) tail 45 drivers/net/wireless/mediatek/mt76/mt76x02_mmio.c mt76_skb_set_moredata(data.tail[i], false); tail 517 drivers/net/wireless/mediatek/mt76/usb.c if (WARN_ONCE(q->entry[q->tail].urb != urb, "rx urb mismatch")) tail 520 drivers/net/wireless/mediatek/mt76/usb.c q->tail = (q->tail + 1) % q->ndesc; tail 575 drivers/net/wireless/mediatek/mt76/usb.c q->head = q->tail = 0; tail 776 drivers/net/wireless/mediatek/mt76/usb.c u16 idx = q->tail; tail 795 drivers/net/wireless/mediatek/mt76/usb.c q->tail = (q->tail + 1) % q->ndesc; tail 807 drivers/net/wireless/mediatek/mt76/usb.c while (q->first != q->tail) { tail 282 drivers/net/wireless/quantenna/qtnfmac/commands.c s->beacon.tail, s->beacon.tail_len); tail 795 drivers/net/wireless/realtek/rtlwifi/pci.c if (skb->end - skb->tail > len) { tail 806 drivers/net/wireless/realtek/rtlwifi/pci.c skb->end - skb->tail, len); tail 2421 drivers/ntb/ntb_transport.c unsigned int tail = qp->remote_rx_info->entry; tail 2423 drivers/ntb/ntb_transport.c return tail > head ? tail - head : qp->tx_max_entry + tail - head; tail 114 drivers/of/pdt.c struct property *head, *tail; tail 116 drivers/of/pdt.c head = tail = of_pdt_build_one_prop(node, NULL, tail 119 drivers/of/pdt.c tail->next = of_pdt_build_one_prop(node, NULL, NULL, NULL, 0); tail 120 drivers/of/pdt.c tail = tail->next; tail 121 drivers/of/pdt.c while(tail) { tail 122 drivers/of/pdt.c tail->next = of_pdt_build_one_prop(node, tail->name, tail 124 drivers/of/pdt.c tail = tail->next; tail 317 drivers/pci/controller/pcie-iproc-msi.c u32 eq, head, tail, nr_events; tail 340 drivers/pci/controller/pcie-iproc-msi.c tail = iproc_msi_read_reg(msi, IPROC_MSI_EQ_TAIL, tail 347 drivers/pci/controller/pcie-iproc-msi.c nr_events = (tail < head) ? tail 348 drivers/pci/controller/pcie-iproc-msi.c (EQ_LEN - (head - tail)) : (tail - head); tail 392 drivers/perf/arm_spe_pmu.c u64 head, tail, wakeup; tail 422 drivers/perf/arm_spe_pmu.c tail = PERF_IDX2OFF(handle->head + handle->size, buf); tail 431 drivers/perf/arm_spe_pmu.c if (head < tail) tail 432 drivers/perf/arm_spe_pmu.c limit = round_down(tail, PAGE_SIZE); tail 83 drivers/platform/chrome/cros_ec_debugfs.c buf_space = CIRC_SPACE(cb->head, cb->tail, LOG_SIZE); tail 137 drivers/platform/chrome/cros_ec_debugfs.c while (!CIRC_CNT(cb->head, cb->tail, LOG_SIZE)) { tail 146 drivers/platform/chrome/cros_ec_debugfs.c CIRC_CNT(cb->head, cb->tail, LOG_SIZE)); tail 156 drivers/platform/chrome/cros_ec_debugfs.c ret = min_t(size_t, CIRC_CNT_TO_END(cb->head, cb->tail, LOG_SIZE), tail 159 drivers/platform/chrome/cros_ec_debugfs.c if (copy_to_user(buf, cb->buf + cb->tail, ret)) { tail 164 drivers/platform/chrome/cros_ec_debugfs.c cb->tail = CIRC_ADD(cb->tail, LOG_SIZE, ret); tail 181 drivers/platform/chrome/cros_ec_debugfs.c debug_info->log_buffer.tail, tail 359 drivers/platform/chrome/cros_ec_debugfs.c debug_info->log_buffer.tail = 0; tail 98 drivers/platform/chrome/wilco_ec/event.c int tail; tail 122 drivers/platform/chrome/wilco_ec/event.c return q->head == q->tail && !q->entries[q->head]; tail 128 drivers/platform/chrome/wilco_ec/event.c return q->head == q->tail && q->entries[q->head]; tail 138 drivers/platform/chrome/wilco_ec/event.c ev = q->entries[q->tail]; tail 139 drivers/platform/chrome/wilco_ec/event.c q->entries[q->tail] = NULL; tail 140 drivers/platform/chrome/wilco_ec/event.c q->tail = (q->tail + 1) % q->capacity; tail 424 drivers/platform/mellanox/mlxbf-tmfifo.c seg = CIRC_SPACE_TO_END(cons->tx_buf.head, cons->tx_buf.tail, tail 454 drivers/platform/mellanox/mlxbf-tmfifo.c avail = CIRC_SPACE(cons->tx_buf.head, cons->tx_buf.tail, tail 513 drivers/platform/mellanox/mlxbf-tmfifo.c size = CIRC_CNT(cons->tx_buf.head, cons->tx_buf.tail, tail 531 drivers/platform/mellanox/mlxbf-tmfifo.c addr = cons->tx_buf.buf + cons->tx_buf.tail; tail 533 drivers/platform/mellanox/mlxbf-tmfifo.c seg = CIRC_CNT_TO_END(cons->tx_buf.head, cons->tx_buf.tail, tail 545 drivers/platform/mellanox/mlxbf-tmfifo.c cons->tx_buf.tail = (cons->tx_buf.tail + sizeof(u64)) % tail 549 drivers/platform/mellanox/mlxbf-tmfifo.c cons->tx_buf.tail = (cons->tx_buf.tail + size) % tail 141 drivers/pnp/quirks.c struct pnp_option *tail = NULL, *first_new_option = NULL; tail 147 drivers/pnp/quirks.c tail = option; tail 149 drivers/pnp/quirks.c if (!tail) { tail 171 drivers/pnp/quirks.c list_add(&new_option->list, &tail->list); tail 172 drivers/pnp/quirks.c tail = new_option; tail 459 drivers/ps3/ps3-vuart.c const unsigned char *tail; tail 520 drivers/ps3/ps3-vuart.c lb->tail = lb->data + bytes; tail 575 drivers/ps3/ps3-vuart.c lb->tail = lb->data + bytes; tail 628 drivers/ps3/ps3-vuart.c bytes_read = min((unsigned int)(lb->tail - lb->head), bytes); tail 635 drivers/ps3/ps3-vuart.c if (bytes_read < lb->tail - lb->head) { tail 734 drivers/ps3/ps3-vuart.c result = ps3_vuart_raw_write(dev, lb->head, lb->tail - lb->head, tail 746 drivers/ps3/ps3-vuart.c if (bytes_written < lb->tail - lb->head) { tail 53 drivers/ptp/ptp_clock.c dst = &queue->buf[queue->tail]; tail 61 drivers/ptp/ptp_clock.c queue->tail = (queue->tail + 1) % PTP_MAX_TIMESTAMPS; tail 25 drivers/ptp/ptp_private.h int tail; tail 60 drivers/ptp/ptp_private.h int cnt = q->tail - q->head; tail 168 drivers/rapidio/rio_cm.c int tail; tail 901 drivers/rapidio/rio_cm.c rxmsg = ch->rx_ring.buf[ch->rx_ring.tail]; tail 902 drivers/rapidio/rio_cm.c ch->rx_ring.buf[ch->rx_ring.tail] = NULL; tail 904 drivers/rapidio/rio_cm.c ch->rx_ring.tail++; tail 905 drivers/rapidio/rio_cm.c ch->rx_ring.tail %= RIOCM_RX_RING_SIZE; tail 1321 drivers/rapidio/rio_cm.c ch->rx_ring.tail = 0; tail 50 drivers/rpmsg/qcom_glink_rpm.c void __iomem *tail; tail 60 drivers/rpmsg/qcom_glink_rpm.c unsigned int tail; tail 63 drivers/rpmsg/qcom_glink_rpm.c tail = readl(pipe->tail); tail 65 drivers/rpmsg/qcom_glink_rpm.c if (head < tail) tail 66 drivers/rpmsg/qcom_glink_rpm.c return pipe->native.length - tail + head; tail 68 drivers/rpmsg/qcom_glink_rpm.c return head - tail; tail 75 drivers/rpmsg/qcom_glink_rpm.c unsigned int tail; tail 78 drivers/rpmsg/qcom_glink_rpm.c tail = readl(pipe->tail); tail 79 drivers/rpmsg/qcom_glink_rpm.c tail += offset; tail 80 drivers/rpmsg/qcom_glink_rpm.c if (tail >= pipe->native.length) tail 81 drivers/rpmsg/qcom_glink_rpm.c tail -= pipe->native.length; tail 83 drivers/rpmsg/qcom_glink_rpm.c len = min_t(size_t, count, pipe->native.length - tail); tail 85 drivers/rpmsg/qcom_glink_rpm.c __ioread32_copy(data, pipe->fifo + tail, tail 99 drivers/rpmsg/qcom_glink_rpm.c unsigned int tail; tail 101 drivers/rpmsg/qcom_glink_rpm.c tail = readl(pipe->tail); tail 103 drivers/rpmsg/qcom_glink_rpm.c tail += count; tail 104 drivers/rpmsg/qcom_glink_rpm.c if (tail >= pipe->native.length) tail 105 drivers/rpmsg/qcom_glink_rpm.c tail -= pipe->native.length; tail 107 drivers/rpmsg/qcom_glink_rpm.c writel(tail, pipe->tail); tail 114 drivers/rpmsg/qcom_glink_rpm.c unsigned int tail; tail 117 drivers/rpmsg/qcom_glink_rpm.c tail = readl(pipe->tail); tail 119 drivers/rpmsg/qcom_glink_rpm.c if (tail <= head) tail 120 drivers/rpmsg/qcom_glink_rpm.c return pipe->native.length - head + tail; tail 122 drivers/rpmsg/qcom_glink_rpm.c return tail - head; tail 230 drivers/rpmsg/qcom_glink_rpm.c rx->tail = msg_ram + offset; tail 237 drivers/rpmsg/qcom_glink_rpm.c tx->tail = msg_ram + offset; tail 298 drivers/rpmsg/qcom_glink_rpm.c writel(0, rx_pipe->tail); tail 39 drivers/rpmsg/qcom_glink_smem.c __le32 *tail; tail 55 drivers/rpmsg/qcom_glink_smem.c u32 tail; tail 71 drivers/rpmsg/qcom_glink_smem.c tail = le32_to_cpu(*pipe->tail); tail 73 drivers/rpmsg/qcom_glink_smem.c if (head < tail) tail 74 drivers/rpmsg/qcom_glink_smem.c return pipe->native.length - tail + head; tail 76 drivers/rpmsg/qcom_glink_smem.c return head - tail; tail 84 drivers/rpmsg/qcom_glink_smem.c u32 tail; tail 86 drivers/rpmsg/qcom_glink_smem.c tail = le32_to_cpu(*pipe->tail); tail 87 drivers/rpmsg/qcom_glink_smem.c tail += offset; tail 88 drivers/rpmsg/qcom_glink_smem.c if (tail >= pipe->native.length) tail 89 drivers/rpmsg/qcom_glink_smem.c tail -= pipe->native.length; tail 91 drivers/rpmsg/qcom_glink_smem.c len = min_t(size_t, count, pipe->native.length - tail); tail 93 drivers/rpmsg/qcom_glink_smem.c memcpy_fromio(data, pipe->fifo + tail, len); tail 103 drivers/rpmsg/qcom_glink_smem.c u32 tail; tail 105 drivers/rpmsg/qcom_glink_smem.c tail = le32_to_cpu(*pipe->tail); tail 107 drivers/rpmsg/qcom_glink_smem.c tail += count; tail 108 drivers/rpmsg/qcom_glink_smem.c if (tail >= pipe->native.length) tail 109 drivers/rpmsg/qcom_glink_smem.c tail -= pipe->native.length; tail 111 drivers/rpmsg/qcom_glink_smem.c *pipe->tail = cpu_to_le32(tail); tail 118 drivers/rpmsg/qcom_glink_smem.c u32 tail; tail 122 drivers/rpmsg/qcom_glink_smem.c tail = le32_to_cpu(*pipe->tail); tail 124 drivers/rpmsg/qcom_glink_smem.c if (tail <= head) tail 125 drivers/rpmsg/qcom_glink_smem.c avail = pipe->native.length - head + tail; tail 127 drivers/rpmsg/qcom_glink_smem.c avail = tail - head; tail 247 drivers/rpmsg/qcom_glink_smem.c tx_pipe->tail = &descs[0]; tail 249 drivers/rpmsg/qcom_glink_smem.c rx_pipe->tail = &descs[2]; tail 276 drivers/rpmsg/qcom_glink_smem.c *rx_pipe->tail = 0; tail 244 drivers/rpmsg/qcom_smd.c __le32 tail; tail 266 drivers/rpmsg/qcom_smd.c __le32 tail; tail 402 drivers/rpmsg/qcom_smd.c SET_RX_CHANNEL_INFO(channel, tail, 0); tail 430 drivers/rpmsg/qcom_smd.c unsigned tail; tail 433 drivers/rpmsg/qcom_smd.c tail = GET_RX_CHANNEL_INFO(channel, tail); tail 435 drivers/rpmsg/qcom_smd.c return (head - tail) & (channel->fifo_size - 1); tail 501 drivers/rpmsg/qcom_smd.c unsigned tail; tail 505 drivers/rpmsg/qcom_smd.c tail = GET_RX_CHANNEL_INFO(channel, tail); tail 507 drivers/rpmsg/qcom_smd.c len = min_t(size_t, count, channel->fifo_size - tail); tail 510 drivers/rpmsg/qcom_smd.c channel->rx_fifo + tail, tail 531 drivers/rpmsg/qcom_smd.c unsigned tail; tail 533 drivers/rpmsg/qcom_smd.c tail = GET_RX_CHANNEL_INFO(channel, tail); tail 534 drivers/rpmsg/qcom_smd.c tail += count; tail 535 drivers/rpmsg/qcom_smd.c tail &= (channel->fifo_size - 1); tail 536 drivers/rpmsg/qcom_smd.c SET_RX_CHANNEL_INFO(channel, tail, tail); tail 545 drivers/rpmsg/qcom_smd.c unsigned tail; tail 550 drivers/rpmsg/qcom_smd.c tail = GET_RX_CHANNEL_INFO(channel, tail); tail 553 drivers/rpmsg/qcom_smd.c if (tail + channel->pkt_size >= channel->fifo_size) { tail 557 drivers/rpmsg/qcom_smd.c ptr = channel->rx_fifo + tail; tail 686 drivers/rpmsg/qcom_smd.c unsigned tail; tail 690 drivers/rpmsg/qcom_smd.c tail = GET_TX_CHANNEL_INFO(channel, tail); tail 692 drivers/rpmsg/qcom_smd.c return mask - ((head - tail) & mask); tail 92 drivers/s390/block/dasd_eer.c int tail; tail 106 drivers/s390/block/dasd_eer.c if (eerb->head < eerb->tail) tail 107 drivers/s390/block/dasd_eer.c return eerb->tail - eerb->head - 1; tail 108 drivers/s390/block/dasd_eer.c return eerb->buffersize - eerb->head + eerb->tail -1; tail 118 drivers/s390/block/dasd_eer.c if (eerb->head >= eerb->tail) tail 119 drivers/s390/block/dasd_eer.c return eerb->head - eerb->tail; tail 120 drivers/s390/block/dasd_eer.c return eerb->buffersize - eerb->tail + eerb->head; tail 167 drivers/s390/block/dasd_eer.c tailindex = eerb->tail / PAGE_SIZE; tail 168 drivers/s390/block/dasd_eer.c localtail = eerb->tail % PAGE_SIZE; tail 173 drivers/s390/block/dasd_eer.c eerb->tail += len; tail 174 drivers/s390/block/dasd_eer.c if (eerb->tail == eerb->buffersize) tail 175 drivers/s390/block/dasd_eer.c eerb->tail = 0; /* wrap around */ tail 176 drivers/s390/block/dasd_eer.c BUG_ON(eerb->tail > eerb->buffersize); tail 196 drivers/s390/block/dasd_eer.c eerb->tail += eerb->residual; tail 197 drivers/s390/block/dasd_eer.c if (eerb->tail >= eerb->buffersize) tail 198 drivers/s390/block/dasd_eer.c eerb->tail -= eerb->buffersize; tail 203 drivers/s390/block/dasd_eer.c eerb->tail += tailcount; tail 204 drivers/s390/block/dasd_eer.c if (eerb->tail >= eerb->buffersize) tail 205 drivers/s390/block/dasd_eer.c eerb->tail -= eerb->buffersize; tail 638 drivers/s390/block/dasd_eer.c eerb->head != eerb->tail); tail 674 drivers/s390/block/dasd_eer.c if (eerb->head != eerb->tail) tail 219 drivers/s390/char/vmlogrdr.c char *tail; tail 225 drivers/s390/char/vmlogrdr.c tail=strnchr(cp_response,len,'='); tail 226 drivers/s390/char/vmlogrdr.c if (!tail) tail 228 drivers/s390/char/vmlogrdr.c tail++; tail 229 drivers/s390/char/vmlogrdr.c if (!strncmp("ANY",tail,3)) tail 231 drivers/s390/char/vmlogrdr.c if (!strncmp("NONE",tail,4)) tail 237 drivers/s390/char/vmlogrdr.c for (i=tail-cp_response; i<len; i++) tail 300 drivers/s390/cio/vfio_ccw_cp.c u32 tail = head + (len - 1) * sizeof(struct ccw1); tail 302 drivers/s390/cio/vfio_ccw_cp.c return (head <= cpa && cpa <= tail); tail 725 drivers/s390/net/ctcm_main.c hi = ((unsigned long)skb->tail + TH_HEADER_LENGTH) >> 31; tail 1273 drivers/scsi/aic7xxx/aic79xx_core.c u_int tail; tail 1275 drivers/scsi/aic7xxx/aic79xx_core.c tail = ahd_inw(ahd, COMPLETE_DMA_SCB_TAIL); tail 1276 drivers/scsi/aic7xxx/aic79xx_core.c ahd_set_scbptr(ahd, tail); tail 83 drivers/scsi/arcmsr/arcmsr_attr.c unsigned int tail = acb->rqbuf_getIndex; tail 85 drivers/scsi/arcmsr/arcmsr_attr.c unsigned int cnt_to_end = CIRC_CNT_TO_END(head, tail, ARCMSR_MAX_QBUFFER); tail 87 drivers/scsi/arcmsr/arcmsr_attr.c allxfer_len = CIRC_CNT(head, tail, ARCMSR_MAX_QBUFFER); tail 92 drivers/scsi/arcmsr/arcmsr_attr.c memcpy(ptmpQbuffer, acb->rqbuffer + tail, allxfer_len); tail 94 drivers/scsi/arcmsr/arcmsr_attr.c memcpy(ptmpQbuffer, acb->rqbuffer + tail, cnt_to_end); tail 2809 drivers/scsi/arcmsr/arcmsr_hba.c unsigned int tail = acb->rqbuf_getIndex; tail 2811 drivers/scsi/arcmsr/arcmsr_hba.c unsigned int cnt_to_end = CIRC_CNT_TO_END(head, tail, ARCMSR_MAX_QBUFFER); tail 2813 drivers/scsi/arcmsr/arcmsr_hba.c allxfer_len = CIRC_CNT(head, tail, ARCMSR_MAX_QBUFFER); tail 2818 drivers/scsi/arcmsr/arcmsr_hba.c memcpy(ptmpQbuffer, acb->rqbuffer + tail, allxfer_len); tail 2820 drivers/scsi/arcmsr/arcmsr_hba.c memcpy(ptmpQbuffer, acb->rqbuffer + tail, cnt_to_end); tail 35 drivers/scsi/be2iscsi/be.h u16 tail, head; tail 63 drivers/scsi/be2iscsi/be.h return q->dma_mem.va + q->tail * q->entry_size; tail 73 drivers/scsi/be2iscsi/be.h index_inc(&q->tail, q->len); tail 73 drivers/scsi/bfa/bfa_core.c int tail = trcm->tail; tail 74 drivers/scsi/bfa/bfa_core.c struct bfa_trc_s *trc = &trcm->trc[tail]; tail 84 drivers/scsi/bfa/bfa_core.c trcm->tail = (trcm->tail + 1) & (BFA_TRC_MAX - 1); tail 85 drivers/scsi/bfa/bfa_core.c if (trcm->tail == trcm->head) tail 60 drivers/scsi/bfa/bfa_cs.h u32 tail; tail 93 drivers/scsi/bfa/bfa_cs.h trcm->head = trcm->tail = trcm->stopped = 0; tail 132 drivers/scsi/bfa/bfa_plog.h u16 tail; tail 286 drivers/scsi/bfa/bfa_svc.c u16 tail; tail 297 drivers/scsi/bfa/bfa_svc.c tail = plog->tail; tail 299 drivers/scsi/bfa/bfa_svc.c pl_recp = &(plog->plog_recs[tail]); tail 304 drivers/scsi/bfa/bfa_svc.c BFA_PL_LOG_REC_INCR(plog->tail); tail 306 drivers/scsi/bfa/bfa_svc.c if (plog->head == plog->tail) tail 316 drivers/scsi/bfa/bfa_svc.c plog->head = plog->tail = 0; tail 2060 drivers/scsi/bfa/bfad_bsg.c bfad->plog_buf.head = bfad->plog_buf.tail = 0; tail 2763 drivers/scsi/ips.c if (!queue->tail) tail 2764 drivers/scsi/ips.c queue->tail = item; tail 2796 drivers/scsi/ips.c if (queue->tail == item) tail 2797 drivers/scsi/ips.c queue->tail = NULL; tail 2839 drivers/scsi/ips.c queue->tail = p; tail 2870 drivers/scsi/ips.c if (queue->tail) tail 2871 drivers/scsi/ips.c queue->tail->host_scribble = (char *) item; tail 2873 drivers/scsi/ips.c queue->tail = item; tail 2907 drivers/scsi/ips.c if (queue->tail == item) tail 2908 drivers/scsi/ips.c queue->tail = NULL; tail 2950 drivers/scsi/ips.c queue->tail = p; tail 2982 drivers/scsi/ips.c if (queue->tail) tail 2983 drivers/scsi/ips.c queue->tail->next = item; tail 2985 drivers/scsi/ips.c queue->tail = item; tail 3020 drivers/scsi/ips.c if (queue->tail == item) tail 3021 drivers/scsi/ips.c queue->tail = NULL; tail 3063 drivers/scsi/ips.c queue->tail = p; tail 963 drivers/scsi/ips.h struct ips_scb *tail; tail 972 drivers/scsi/ips.h struct scsi_cmnd *tail; tail 983 drivers/scsi/ips.h struct ips_copp_wait_item *tail; tail 2553 drivers/scsi/isci/host.c u16 tail = ihost->tci_tail & (SCI_MAX_IO_REQUESTS-1); tail 2555 drivers/scsi/isci/host.c ihost->tci_pool[tail] = tci; tail 2556 drivers/scsi/isci/host.c ihost->tci_tail = tail + 1; tail 97 drivers/slimbus/qcom-ctrl.c int tail; tail 137 drivers/slimbus/qcom-ctrl.c if ((ctrl->rx.tail + 1) % ctrl->rx.n == ctrl->rx.head) { tail 142 drivers/slimbus/qcom-ctrl.c idx = ctrl->rx.tail; tail 143 drivers/slimbus/qcom-ctrl.c ctrl->rx.tail = (ctrl->rx.tail + 1) % ctrl->rx.n; tail 310 drivers/slimbus/qcom-ctrl.c if (((ctrl->tx.head + 1) % ctrl->tx.n) == ctrl->tx.tail) { tail 315 drivers/slimbus/qcom-ctrl.c idx = ctrl->tx.tail; tail 317 drivers/slimbus/qcom-ctrl.c ctrl->tx.tail = (ctrl->tx.tail + 1) % ctrl->tx.n; tail 426 drivers/slimbus/qcom-ctrl.c if (ctrl->rx.tail == ctrl->rx.head) { tail 348 drivers/staging/isdn/gigaset/asyncdata.c if (inbuf->head == inbuf->tail || tail 409 drivers/staging/isdn/gigaset/asyncdata.c gig_dbg(DEBUG_INTR, "buffer state: %u -> %u", inbuf->head, inbuf->tail); tail 411 drivers/staging/isdn/gigaset/asyncdata.c while (inbuf->head != inbuf->tail) { tail 416 drivers/staging/isdn/gigaset/asyncdata.c numbytes = (inbuf->head > inbuf->tail ? tail 417 drivers/staging/isdn/gigaset/asyncdata.c RBUFSIZE : inbuf->tail) - inbuf->head; tail 296 drivers/staging/isdn/gigaset/common.c unsigned head, tail; tail 302 drivers/staging/isdn/gigaset/common.c tail = cs->ev_tail; tail 304 drivers/staging/isdn/gigaset/common.c while (tail != head) { tail 310 drivers/staging/isdn/gigaset/common.c cs->ev_head = tail; tail 334 drivers/staging/isdn/gigaset/common.c unsigned next, tail; tail 341 drivers/staging/isdn/gigaset/common.c tail = cs->ev_tail; tail 342 drivers/staging/isdn/gigaset/common.c next = (tail + 1) % MAX_EVENTS; tail 346 drivers/staging/isdn/gigaset/common.c event = cs->events + tail; tail 550 drivers/staging/isdn/gigaset/common.c inbuf->tail = 0; tail 566 drivers/staging/isdn/gigaset/common.c unsigned n, head, tail, bytesleft; tail 574 drivers/staging/isdn/gigaset/common.c tail = inbuf->tail; tail 576 drivers/staging/isdn/gigaset/common.c gig_dbg(DEBUG_INTR, "buffer state: %u -> %u", head, tail); tail 579 drivers/staging/isdn/gigaset/common.c if (head > tail) tail 580 drivers/staging/isdn/gigaset/common.c n = head - 1 - tail; tail 582 drivers/staging/isdn/gigaset/common.c n = (RBUFSIZE - 1) - tail; tail 584 drivers/staging/isdn/gigaset/common.c n = RBUFSIZE - tail; tail 593 drivers/staging/isdn/gigaset/common.c memcpy(inbuf->data + tail, src, n); tail 595 drivers/staging/isdn/gigaset/common.c tail = (tail + n) % RBUFSIZE; tail 598 drivers/staging/isdn/gigaset/common.c gig_dbg(DEBUG_INTR, "setting tail to %u", tail); tail 599 drivers/staging/isdn/gigaset/common.c inbuf->tail = tail; tail 832 drivers/staging/isdn/gigaset/common.c cs->inbuf->tail = 0; tail 408 drivers/staging/isdn/gigaset/ev-layer.c unsigned next, tail; tail 415 drivers/staging/isdn/gigaset/ev-layer.c tail = cs->ev_tail; tail 416 drivers/staging/isdn/gigaset/ev-layer.c next = (tail + 1) % MAX_EVENTS; tail 421 drivers/staging/isdn/gigaset/ev-layer.c event = cs->events + tail; tail 1847 drivers/staging/isdn/gigaset/ev-layer.c unsigned head, tail; tail 1857 drivers/staging/isdn/gigaset/ev-layer.c tail = cs->ev_tail; tail 1858 drivers/staging/isdn/gigaset/ev-layer.c if (tail == head) { tail 1865 drivers/staging/isdn/gigaset/ev-layer.c tail = cs->ev_tail; tail 1866 drivers/staging/isdn/gigaset/ev-layer.c if (tail == head) { tail 1904 drivers/staging/isdn/gigaset/ev-layer.c if (cs->inbuf->head != cs->inbuf->tail) { tail 280 drivers/staging/isdn/gigaset/gigaset.h int head, tail; tail 941 drivers/staging/isdn/gigaset/isocdata.c unsigned tail, head, numbytes; tail 945 drivers/staging/isdn/gigaset/isocdata.c while (head != (tail = inbuf->tail)) { tail 946 drivers/staging/isdn/gigaset/isocdata.c gig_dbg(DEBUG_INTR, "buffer state: %u -> %u", head, tail); tail 947 drivers/staging/isdn/gigaset/isocdata.c if (head > tail) tail 948 drivers/staging/isdn/gigaset/isocdata.c tail = RBUFSIZE; tail 950 drivers/staging/isdn/gigaset/isocdata.c numbytes = tail - head; tail 661 drivers/staging/isdn/gigaset/ser-gigaset.c unsigned tail, head, n; tail 673 drivers/staging/isdn/gigaset/ser-gigaset.c tail = inbuf->tail; tail 676 drivers/staging/isdn/gigaset/ser-gigaset.c head, tail, count); tail 678 drivers/staging/isdn/gigaset/ser-gigaset.c if (head <= tail) { tail 680 drivers/staging/isdn/gigaset/ser-gigaset.c n = min_t(unsigned, count, RBUFSIZE - tail); tail 681 drivers/staging/isdn/gigaset/ser-gigaset.c memcpy(inbuf->data + tail, buf, n); tail 682 drivers/staging/isdn/gigaset/ser-gigaset.c tail = (tail + n) % RBUFSIZE; tail 689 drivers/staging/isdn/gigaset/ser-gigaset.c n = head - tail - 1; tail 696 drivers/staging/isdn/gigaset/ser-gigaset.c memcpy(inbuf->data + tail, buf, count); tail 697 drivers/staging/isdn/gigaset/ser-gigaset.c tail += count; tail 700 drivers/staging/isdn/gigaset/ser-gigaset.c gig_dbg(DEBUG_INTR, "setting tail to %u", tail); tail 701 drivers/staging/isdn/gigaset/ser-gigaset.c inbuf->tail = tail; tail 107 drivers/staging/media/allegro-dvt/allegro-core.c unsigned int tail; tail 763 drivers/staging/media/allegro-dvt/allegro-core.c mbox->tail = base + 0x4; tail 769 drivers/staging/media/allegro-dvt/allegro-core.c regmap_write(dev->sram, mbox->tail, 0); tail 778 drivers/staging/media/allegro-dvt/allegro-core.c unsigned int tail; tail 804 drivers/staging/media/allegro-dvt/allegro-core.c regmap_read(dev->sram, mbox->tail, &tail); tail 805 drivers/staging/media/allegro-dvt/allegro-core.c if (tail > mbox->size) { tail 808 drivers/staging/media/allegro-dvt/allegro-core.c tail, mbox->size); tail 812 drivers/staging/media/allegro-dvt/allegro-core.c size_no_wrap = min(size, mbox->size - (size_t)tail); tail 813 drivers/staging/media/allegro-dvt/allegro-core.c regmap_bulk_write(dev->sram, mbox->data + tail, src, size_no_wrap / 4); tail 816 drivers/staging/media/allegro-dvt/allegro-core.c regmap_write(dev->sram, mbox->tail, (tail + size) % mbox->size); tail 343 drivers/staging/octeon-usb/octeon-hcd.c int tail; tail 1208 drivers/staging/octeon-usb/octeon-hcd.c while (available && (fifo->head != fifo->tail)) { tail 1209 drivers/staging/octeon-usb/octeon-hcd.c int i = fifo->tail; tail 1218 drivers/staging/octeon-usb/octeon-hcd.c fifo->tail++; tail 1219 drivers/staging/octeon-usb/octeon-hcd.c if (fifo->tail > MAX_CHANNELS) tail 1220 drivers/staging/octeon-usb/octeon-hcd.c fifo->tail = 0; tail 1248 drivers/staging/octeon-usb/octeon-hcd.c return fifo->head != fifo->tail; tail 1258 drivers/staging/octeon-usb/octeon-hcd.c if (usb->periodic.head != usb->periodic.tail) { tail 1272 drivers/staging/octeon-usb/octeon-hcd.c if (usb->nonperiodic.head != usb->nonperiodic.tail) { tail 1749 drivers/staging/qlge/qlge_main.c skb->tail -= QLGE_SB_PAD - NET_IP_ALIGN; tail 77 drivers/staging/rtl8188eu/include/rtw_event.h int tail; tail 244 drivers/staging/rtl8188eu/include/rtw_xmit.h volatile int tail; tail 1121 drivers/staging/rtl8192e/rtllib_rx.c if (frag_skb->tail + flen > frag_skb->end) { tail 827 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c sub_skb->tail = sub_skb->data + nSubframe_Length; tail 1143 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (frag_skb->tail + flen > frag_skb->end) { tail 87 drivers/staging/rtl8712/rtl871x_event.h /*volatile*/ int tail; tail 96 drivers/staging/rtl8712/rtl871x_event.h /*volatile*/ int tail; tail 186 drivers/staging/rtl8712/rtl871x_xmit.h /*volatile*/ sint tail; tail 222 drivers/staging/rtl8723bs/hal/rtl8723bs_recv.c u8 *end, u8 *tail, tail 225 drivers/staging/rtl8723bs/hal/rtl8723bs_recv.c if (end > tail) { tail 95 drivers/staging/rtl8723bs/include/rtw_event.h volatile int tail; tail 104 drivers/staging/rtl8723bs/include/rtw_event.h volatile int tail; tail 344 drivers/staging/rtl8723bs/include/rtw_xmit.h volatile sint tail; tail 2734 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c static int rtw_add_beacon(struct adapter *adapter, const u8 *head, size_t head_len, const u8 *tail, size_t tail_len) tail 2754 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c memcpy(pbuf+head_len-24, (void *)tail, tail_len); tail 2791 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c settings->beacon.tail, settings->beacon.tail_len); tail 2816 drivers/staging/rtl8723bs/os_dep/ioctl_cfg80211.c ret = rtw_add_beacon(adapter, info->head, info->head_len, info->tail, info->tail_len); tail 1255 drivers/staging/unisys/visornic/visornic_main.c skb->tail += RCVPOST_BUF_SIZE; tail 1269 drivers/staging/unisys/visornic/visornic_main.c skb->tail += skb->len; tail 1310 drivers/staging/unisys/visornic/visornic_main.c curr->tail += currsize; tail 1836 drivers/staging/wilc1000/wilc_hif.c memcpy(cur_byte, params->tail, params->tail_len); tail 614 drivers/target/target_core_user.c static inline size_t spc_used(size_t head, size_t tail, size_t size) tail 616 drivers/target/target_core_user.c int diff = head - tail; tail 624 drivers/target/target_core_user.c static inline size_t spc_free(size_t head, size_t tail, size_t size) tail 627 drivers/target/target_core_user.c return (size - spc_used(head, tail, size) - 1); tail 181 drivers/thunderbolt/nhi.c return ((ring->head + 1) % ring->size) == ring->tail; tail 186 drivers/thunderbolt/nhi.c return ring->head == ring->tail; tail 247 drivers/thunderbolt/nhi.c if (!(ring->descriptors[ring->tail].flags tail 254 drivers/thunderbolt/nhi.c frame->size = ring->descriptors[ring->tail].length; tail 255 drivers/thunderbolt/nhi.c frame->eof = ring->descriptors[ring->tail].eof; tail 256 drivers/thunderbolt/nhi.c frame->sof = ring->descriptors[ring->tail].sof; tail 257 drivers/thunderbolt/nhi.c frame->flags = ring->descriptors[ring->tail].flags; tail 259 drivers/thunderbolt/nhi.c ring->tail = (ring->tail + 1) % ring->size; tail 315 drivers/thunderbolt/nhi.c if (ring->descriptors[ring->tail].flags & RING_DESC_COMPLETED) { tail 321 drivers/thunderbolt/nhi.c frame->size = ring->descriptors[ring->tail].length; tail 322 drivers/thunderbolt/nhi.c frame->eof = ring->descriptors[ring->tail].eof; tail 323 drivers/thunderbolt/nhi.c frame->sof = ring->descriptors[ring->tail].sof; tail 324 drivers/thunderbolt/nhi.c frame->flags = ring->descriptors[ring->tail].flags; tail 327 drivers/thunderbolt/nhi.c ring->tail = (ring->tail + 1) % ring->size; tail 520 drivers/thunderbolt/nhi.c ring->tail = 0; tail 683 drivers/thunderbolt/nhi.c ring->tail = 0; tail 216 drivers/tty/amiserial.c if (info->xmit.head != info->xmit.tail tail 349 drivers/tty/amiserial.c if (info->xmit.head == info->xmit.tail tail 358 drivers/tty/amiserial.c custom.serdat = info->xmit.buf[info->xmit.tail++] | 0x100; tail 360 drivers/tty/amiserial.c info->xmit.tail = info->xmit.tail & (SERIAL_XMIT_SIZE-1); tail 364 drivers/tty/amiserial.c info->xmit.tail, tail 371 drivers/tty/amiserial.c if (info->xmit.head == info->xmit.tail) { tail 571 drivers/tty/amiserial.c info->xmit.head = info->xmit.tail = 0; tail 794 drivers/tty/amiserial.c info->xmit.tail, tail 814 drivers/tty/amiserial.c if (info->xmit.head == info->xmit.tail tail 845 drivers/tty/amiserial.c info->xmit.tail, tail 861 drivers/tty/amiserial.c if (info->xmit.head != info->xmit.tail tail 883 drivers/tty/amiserial.c return CIRC_SPACE(info->xmit.head, info->xmit.tail, SERIAL_XMIT_SIZE); tail 892 drivers/tty/amiserial.c return CIRC_CNT(info->xmit.head, info->xmit.tail, SERIAL_XMIT_SIZE); tail 903 drivers/tty/amiserial.c info->xmit.head = info->xmit.tail = 0; tail 54 drivers/tty/ehv_bytechan.c unsigned int tail; /* circular buffer tail */ tail 416 drivers/tty/ehv_bytechan.c CIRC_CNT_TO_END(bc->head, bc->tail, BUF_SIZE), tail 419 drivers/tty/ehv_bytechan.c ret = local_ev_byte_channel_send(bc->handle, &len, bc->buf + bc->tail); tail 423 drivers/tty/ehv_bytechan.c bc->tail = (bc->tail + len) & (BUF_SIZE - 1); tail 425 drivers/tty/ehv_bytechan.c count = CIRC_CNT(bc->head, bc->tail, BUF_SIZE); tail 430 drivers/tty/ehv_bytechan.c if (CIRC_CNT(bc->head, bc->tail, BUF_SIZE)) tail 479 drivers/tty/ehv_bytechan.c len = CIRC_SPACE_TO_END(bc->head, bc->tail, BUF_SIZE); tail 546 drivers/tty/ehv_bytechan.c count = CIRC_SPACE(bc->head, bc->tail, BUF_SIZE); tail 694 drivers/tty/ehv_bytechan.c bc->tail = 0; tail 1872 drivers/tty/moxa.c u16 head, tail, tx_mask, spage, epage; tail 1880 drivers/tty/moxa.c tail = readw(ofsAddr + TXwptr); tail 1882 drivers/tty/moxa.c c = (head > tail) ? (head - tail - 1) : (head - tail + tx_mask); tail 1891 drivers/tty/moxa.c if (head > tail) tail 1892 drivers/tty/moxa.c len = head - tail - 1; tail 1894 drivers/tty/moxa.c len = tx_mask + 1 - tail; tail 1896 drivers/tty/moxa.c ofs = baseAddr + DynPage_addr + bufhead + tail; tail 1899 drivers/tty/moxa.c tail = (tail + len) & tx_mask; tail 1903 drivers/tty/moxa.c pageno = spage + (tail >> 13); tail 1904 drivers/tty/moxa.c pageofs = tail & Page_mask; tail 1918 drivers/tty/moxa.c tail = (tail + total) & tx_mask; tail 1920 drivers/tty/moxa.c writew(tail, ofsAddr + TXwptr); tail 1931 drivers/tty/moxa.c u16 tail, rx_mask, spage, epage; tail 1937 drivers/tty/moxa.c tail = readw(ofsAddr + RXwptr); tail 1941 drivers/tty/moxa.c count = (tail >= head) ? (tail - head) : (tail - head + rx_mask + 1); tail 1952 drivers/tty/moxa.c len = (tail >= head) ? (tail - head) : tail 168 drivers/tty/n_tty.c size_t tail, size_t n) tail 171 drivers/tty/n_tty.c size_t size = N_TTY_BUF_SIZE - tail; tail 172 drivers/tty/n_tty.c void *from = read_buf_addr(ldata, tail); tail 630 drivers/tty/n_tty.c size_t tail; tail 635 drivers/tty/n_tty.c tail = ldata->echo_tail; tail 636 drivers/tty/n_tty.c while (MASK(ldata->echo_commit) != MASK(tail)) { tail 637 drivers/tty/n_tty.c c = echo_buf(ldata, tail); tail 647 drivers/tty/n_tty.c if (MASK(ldata->echo_commit) == MASK(tail + 1)) tail 654 drivers/tty/n_tty.c op = echo_buf(ldata, tail + 1); tail 660 drivers/tty/n_tty.c if (MASK(ldata->echo_commit) == MASK(tail + 2)) tail 662 drivers/tty/n_tty.c num_chars = echo_buf(ldata, tail + 2); tail 688 drivers/tty/n_tty.c tail += 3; tail 693 drivers/tty/n_tty.c tail += 2; tail 699 drivers/tty/n_tty.c tail += 2; tail 711 drivers/tty/n_tty.c tail += 2; tail 732 drivers/tty/n_tty.c tail += 2; tail 749 drivers/tty/n_tty.c tail += 1; tail 756 drivers/tty/n_tty.c while (ldata->echo_commit > tail && tail 757 drivers/tty/n_tty.c ldata->echo_commit - tail >= ECHO_DISCARD_WATERMARK) { tail 758 drivers/tty/n_tty.c if (echo_buf(ldata, tail) == ECHO_OP_START) { tail 759 drivers/tty/n_tty.c if (echo_buf(ldata, tail + 1) == ECHO_OP_ERASE_TAB) tail 760 drivers/tty/n_tty.c tail += 3; tail 762 drivers/tty/n_tty.c tail += 2; tail 764 drivers/tty/n_tty.c tail++; tail 768 drivers/tty/n_tty.c ldata->echo_tail = tail; tail 1054 drivers/tty/n_tty.c size_t tail = ldata->read_head; tail 1063 drivers/tty/n_tty.c while (MASK(tail) != MASK(ldata->canon_head)) { tail 1064 drivers/tty/n_tty.c tail--; tail 1065 drivers/tty/n_tty.c c = read_buf(ldata, tail); tail 1333 drivers/tty/n_tty.c size_t tail = ldata->canon_head; tail 1338 drivers/tty/n_tty.c while (MASK(tail) != MASK(ldata->read_head)) { tail 1339 drivers/tty/n_tty.c echo_char(read_buf(ldata, tail), tty); tail 1340 drivers/tty/n_tty.c tail++; tail 1721 drivers/tty/n_tty.c size_t tail = smp_load_acquire(&ldata->read_tail); tail 1723 drivers/tty/n_tty.c room = N_TTY_BUF_SIZE - (ldata->read_head - tail); tail 1728 drivers/tty/n_tty.c overflow = ldata->icanon && ldata->canon_head == tail; tail 1972 drivers/tty/n_tty.c size_t tail = ldata->read_tail & (N_TTY_BUF_SIZE - 1); tail 1975 drivers/tty/n_tty.c n = min(head - ldata->read_tail, N_TTY_BUF_SIZE - tail); tail 1978 drivers/tty/n_tty.c unsigned char *from = read_buf_addr(ldata, tail); tail 2025 drivers/tty/n_tty.c size_t tail; tail 2034 drivers/tty/n_tty.c tail = ldata->read_tail & (N_TTY_BUF_SIZE - 1); tail 2035 drivers/tty/n_tty.c size = min_t(size_t, tail + n, N_TTY_BUF_SIZE); tail 2038 drivers/tty/n_tty.c __func__, *nr, tail, n, size); tail 2040 drivers/tty/n_tty.c eol = find_next_bit(ldata->read_flags, size, tail); tail 2041 drivers/tty/n_tty.c more = n - (size - tail); tail 2049 drivers/tty/n_tty.c n = eol - tail; tail 2060 drivers/tty/n_tty.c __func__, eol, found, n, c, tail, more); tail 2062 drivers/tty/n_tty.c ret = tty_copy_to_user(tty, *b, tail, n); tail 2143 drivers/tty/n_tty.c size_t tail; tail 2175 drivers/tty/n_tty.c tail = ldata->read_tail; tail 2266 drivers/tty/n_tty.c if (tail != ldata->read_tail) tail 2431 drivers/tty/n_tty.c size_t nr, head, tail; tail 2436 drivers/tty/n_tty.c tail = ldata->read_tail; tail 2437 drivers/tty/n_tty.c nr = head - tail; tail 2439 drivers/tty/n_tty.c while (MASK(head) != MASK(tail)) { tail 2440 drivers/tty/n_tty.c if (test_bit(tail & (N_TTY_BUF_SIZE - 1), ldata->read_flags) && tail 2441 drivers/tty/n_tty.c read_buf(ldata, tail) == __DISABLED_CHAR) tail 2443 drivers/tty/n_tty.c tail++; tail 137 drivers/tty/serial/21285.c *CSR_UARTDR = xmit->buf[xmit->tail]; tail 138 drivers/tty/serial/21285.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 29 drivers/tty/serial/8250/8250_dma.c xmit->tail += dma->tx_size; tail 30 drivers/tty/serial/8250/8250_dma.c xmit->tail &= UART_XMIT_SIZE - 1; tail 78 drivers/tty/serial/8250/8250_dma.c dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 81 drivers/tty/serial/8250/8250_dma.c dma->tx_addr + xmit->tail, tail 912 drivers/tty/serial/8250/8250_omap.c xmit->tail += dma->tx_size; tail 913 drivers/tty/serial/8250/8250_omap.c xmit->tail &= UART_XMIT_SIZE - 1; tail 968 drivers/tty/serial/8250/8250_omap.c dma->tx_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 1000 drivers/tty/serial/8250/8250_omap.c dma->tx_addr + xmit->tail + skip_byte, tail 1024 drivers/tty/serial/8250/8250_omap.c serial_out(p, UART_TX, xmit->buf[xmit->tail]); tail 1746 drivers/tty/serial/8250/8250_port.c serial_out(up, UART_TX, xmit->buf[xmit->tail]); tail 1747 drivers/tty/serial/8250/8250_port.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 163 drivers/tty/serial/altera_jtaguart.c writel(xmit->buf[xmit->tail], tail 165 drivers/tty/serial/altera_jtaguart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 266 drivers/tty/serial/altera_uart.c if (xmit->head == xmit->tail) tail 268 drivers/tty/serial/altera_uart.c altera_uart_writel(port, xmit->buf[xmit->tail], tail 270 drivers/tty/serial/altera_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 277 drivers/tty/serial/altera_uart.c if (xmit->head == xmit->tail) { tail 189 drivers/tty/serial/amba-pl010.c writel(xmit->buf[xmit->tail], uap->port.membase + UART01x_DR); tail 190 drivers/tty/serial/amba-pl010.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 629 drivers/tty/serial/amba-pl011.c if (xmit->tail < xmit->head) tail 630 drivers/tty/serial/amba-pl011.c memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], count); tail 632 drivers/tty/serial/amba-pl011.c size_t first = UART_XMIT_SIZE - xmit->tail; tail 639 drivers/tty/serial/amba-pl011.c memcpy(&dmatx->buf[0], &xmit->buf[xmit->tail], first); tail 683 drivers/tty/serial/amba-pl011.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 1416 drivers/tty/serial/amba-pl011.c if (!pl011_tx_char(uap, xmit->buf[xmit->tail], from_irq)) tail 1419 drivers/tty/serial/amba-pl011.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 149 drivers/tty/serial/apbuart.c UART_PUT_CHAR(port, xmit->buf[xmit->tail]); tail 150 drivers/tty/serial/apbuart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 362 drivers/tty/serial/ar933x_uart.c ar933x_uart_putc(up, xmit->buf[xmit->tail]); tail 364 drivers/tty/serial/ar933x_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 172 drivers/tty/serial/arc_uart.c ch = xmit->buf[xmit->tail]; tail 173 drivers/tty/serial/arc_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 740 drivers/tty/serial/atmel_serial.c if (!CIRC_SPACE(ring->head, ring->tail, ATMEL_SERIAL_RINGSIZE)) tail 845 drivers/tty/serial/atmel_serial.c atmel_uart_write_char(port, xmit->buf[xmit->tail]); tail 846 drivers/tty/serial/atmel_serial.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 873 drivers/tty/serial/atmel_serial.c xmit->tail += atmel_port->tx_len; tail 874 drivers/tty/serial/atmel_serial.c xmit->tail &= UART_XMIT_SIZE - 1; tail 952 drivers/tty/serial/atmel_serial.c xmit->tail, tail 967 drivers/tty/serial/atmel_serial.c phys_addr = sg_dma_address(sg_tx) + xmit->tail; tail 1153 drivers/tty/serial/atmel_serial.c if (ring->head < ring->tail) { tail 1154 drivers/tty/serial/atmel_serial.c count = sg_dma_len(&atmel_port->sg_rx) - ring->tail; tail 1156 drivers/tty/serial/atmel_serial.c tty_insert_flip_string(tport, ring->buf + ring->tail, count); tail 1157 drivers/tty/serial/atmel_serial.c ring->tail = 0; tail 1162 drivers/tty/serial/atmel_serial.c if (ring->tail < ring->head) { tail 1163 drivers/tty/serial/atmel_serial.c count = ring->head - ring->tail; tail 1165 drivers/tty/serial/atmel_serial.c tty_insert_flip_string(tport, ring->buf + ring->tail, count); tail 1169 drivers/tty/serial/atmel_serial.c ring->tail = ring->head; tail 1462 drivers/tty/serial/atmel_serial.c xmit->tail += pdc->ofs; tail 1463 drivers/tty/serial/atmel_serial.c xmit->tail &= UART_XMIT_SIZE - 1; tail 1479 drivers/tty/serial/atmel_serial.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 1483 drivers/tty/serial/atmel_serial.c pdc->dma_addr + xmit->tail); tail 1525 drivers/tty/serial/atmel_serial.c while (ring->head != ring->tail) { tail 1531 drivers/tty/serial/atmel_serial.c c = ((struct atmel_uart_char *)ring->buf)[ring->tail]; tail 1533 drivers/tty/serial/atmel_serial.c ring->tail = (ring->tail + 1) & (ATMEL_SERIAL_RINGSIZE - 1); tail 1609 drivers/tty/serial/atmel_serial.c unsigned int tail; tail 1618 drivers/tty/serial/atmel_serial.c tail = pdc->ofs; tail 1632 drivers/tty/serial/atmel_serial.c if (likely(head != tail)) { tail 1642 drivers/tty/serial/atmel_serial.c count = head - tail; tail 2084 drivers/tty/serial/atmel_serial.c atmel_port->rx_ring.tail = 0; tail 338 drivers/tty/serial/bcm63xx_uart.c c = xmit->buf[xmit->tail]; tail 340 drivers/tty/serial/bcm63xx_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 172 drivers/tty/serial/clps711x.c writew(xmit->buf[xmit->tail], port->membase + UARTDR_OFFSET); tail 173 drivers/tty/serial/clps711x.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 718 drivers/tty/serial/cpm_uart/cpm_uart_core.c xmit->tail != xmit->head) { tail 722 drivers/tty/serial/cpm_uart/cpm_uart_core.c *p++ = xmit->buf[xmit->tail]; tail 723 drivers/tty/serial/cpm_uart/cpm_uart_core.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 726 drivers/tty/serial/cpm_uart/cpm_uart_core.c if (xmit->head == xmit->tail) tail 204 drivers/tty/serial/digicolor-usart.c writeb(xmit->buf[xmit->tail], port->membase + UA_EMI_REC); tail 205 drivers/tty/serial/digicolor-usart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 285 drivers/tty/serial/dz.c tmp = xmit->buf[xmit->tail]; tail 286 drivers/tty/serial/dz.c xmit->tail = (xmit->tail + 1) & (DZ_XMIT_SIZE - 1); tail 152 drivers/tty/serial/efm32-uart.c efm32_uart_write32(efm_port, xmit->buf[xmit->tail], tail 154 drivers/tty/serial/efm32-uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 172 drivers/tty/serial/fsl_linflexuart.c c = xmit->buf[xmit->tail]; tail 181 drivers/tty/serial/fsl_linflexuart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 423 drivers/tty/serial/fsl_lpuart.c if (xmit->tail < xmit->head || xmit->head == 0) { tail 425 drivers/tty/serial/fsl_lpuart.c sg_init_one(sgl, xmit->buf + xmit->tail, sport->dma_tx_bytes); tail 429 drivers/tty/serial/fsl_lpuart.c sg_set_buf(sgl, xmit->buf + xmit->tail, tail 430 drivers/tty/serial/fsl_lpuart.c UART_XMIT_SIZE - xmit->tail); tail 472 drivers/tty/serial/fsl_lpuart.c xmit->tail = (xmit->tail + sport->dma_tx_bytes) & (UART_XMIT_SIZE - 1); tail 696 drivers/tty/serial/fsl_lpuart.c writeb(xmit->buf[xmit->tail], sport->port.membase + UARTDR); tail 697 drivers/tty/serial/fsl_lpuart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 729 drivers/tty/serial/fsl_lpuart.c lpuart32_write(&sport->port, xmit->buf[xmit->tail], UARTDATA); tail 730 drivers/tty/serial/fsl_lpuart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 1098 drivers/tty/serial/fsl_lpuart.c if (ring->head < ring->tail) { tail 1099 drivers/tty/serial/fsl_lpuart.c count = sport->rx_sgl.length - ring->tail; tail 1101 drivers/tty/serial/fsl_lpuart.c tty_insert_flip_string(port, ring->buf + ring->tail, count); tail 1102 drivers/tty/serial/fsl_lpuart.c ring->tail = 0; tail 1107 drivers/tty/serial/fsl_lpuart.c if (ring->tail < ring->head) { tail 1108 drivers/tty/serial/fsl_lpuart.c count = ring->head - ring->tail; tail 1109 drivers/tty/serial/fsl_lpuart.c tty_insert_flip_string(port, ring->buf + ring->tail, count); tail 1113 drivers/tty/serial/fsl_lpuart.c ring->tail = ring->head; tail 1227 drivers/tty/serial/fsl_lpuart.c sport->rx_ring.tail = 0; tail 619 drivers/tty/serial/icom.c int temp_tail = port->state->xmit.tail; tail 712 drivers/tty/serial/icom.c icom_port->uart_port.state->xmit.tail++; tail 713 drivers/tty/serial/icom.c icom_port->uart_port.state->xmit.tail &= tail 536 drivers/tty/serial/imx.c imx_uart_writel(sport, xmit->buf[xmit->tail], URTX0); tail 537 drivers/tty/serial/imx.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 565 drivers/tty/serial/imx.c xmit->tail = (xmit->tail + sport->tx_bytes) & (UART_XMIT_SIZE - 1); tail 606 drivers/tty/serial/imx.c if (xmit->tail < xmit->head || xmit->head == 0) { tail 608 drivers/tty/serial/imx.c sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes); tail 612 drivers/tty/serial/imx.c sg_set_buf(sgl, xmit->buf + xmit->tail, tail 613 drivers/tty/serial/imx.c UART_XMIT_SIZE - xmit->tail); tail 1110 drivers/tty/serial/imx.c rx_ring->tail = ((rx_ring->head-1) / bd_size) * bd_size; tail 1113 drivers/tty/serial/imx.c rx_ring->head > rx_ring->tail) { tail 1116 drivers/tty/serial/imx.c r_bytes = rx_ring->head - rx_ring->tail; tail 1123 drivers/tty/serial/imx.c sport->rx_buf + rx_ring->tail, r_bytes); tail 1135 drivers/tty/serial/imx.c WARN_ON(rx_ring->head <= rx_ring->tail); tail 1157 drivers/tty/serial/imx.c sport->rx_ring.tail = 0; tail 412 drivers/tty/serial/ip22zilog.c writeb(xmit->buf[xmit->tail], &channel->data); tail 416 drivers/tty/serial/ip22zilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 612 drivers/tty/serial/ip22zilog.c writeb(xmit->buf[xmit->tail], &channel->data); tail 616 drivers/tty/serial/ip22zilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 356 drivers/tty/serial/jsm/jsm_cls.c u16 tail; tail 366 drivers/tty/serial/jsm/jsm_cls.c tail = ch->ch_r_tail & RQUEUEMASK; tail 373 drivers/tty/serial/jsm/jsm_cls.c qleft = tail - head - 1; tail 416 drivers/tty/serial/jsm/jsm_cls.c tail = (tail + 1) & RQUEUEMASK; tail 417 drivers/tty/serial/jsm/jsm_cls.c ch->ch_r_tail = tail; tail 451 drivers/tty/serial/jsm/jsm_cls.c u16 tail; tail 477 drivers/tty/serial/jsm/jsm_cls.c tail = circ->tail & (UART_XMIT_SIZE - 1); tail 484 drivers/tty/serial/jsm/jsm_cls.c writeb(circ->buf[tail], &ch->ch_cls_uart->txrx); tail 485 drivers/tty/serial/jsm/jsm_cls.c tail = (tail + 1) & (UART_XMIT_SIZE - 1); tail 492 drivers/tty/serial/jsm/jsm_cls.c circ->tail = tail & (UART_XMIT_SIZE - 1); tail 283 drivers/tty/serial/jsm/jsm_neo.c u16 tail; tail 287 drivers/tty/serial/jsm/jsm_neo.c tail = ch->ch_r_tail & RQUEUEMASK; tail 294 drivers/tty/serial/jsm/jsm_neo.c if ((qleft = tail - head - 1) < 0) tail 443 drivers/tty/serial/jsm/jsm_neo.c ch->ch_rqueue[tail], ch->ch_equeue[tail]); tail 445 drivers/tty/serial/jsm/jsm_neo.c ch->ch_r_tail = tail = (tail + 1) & RQUEUEMASK; tail 477 drivers/tty/serial/jsm/jsm_neo.c u16 tail; tail 506 drivers/tty/serial/jsm/jsm_neo.c writeb(circ->buf[circ->tail], &ch->ch_neo_uart->txrx); tail 508 drivers/tty/serial/jsm/jsm_neo.c "Tx data: %x\n", circ->buf[circ->tail]); tail 509 drivers/tty/serial/jsm/jsm_neo.c circ->tail = (circ->tail + 1) & (UART_XMIT_SIZE - 1); tail 525 drivers/tty/serial/jsm/jsm_neo.c tail = circ->tail & (UART_XMIT_SIZE - 1); tail 533 drivers/tty/serial/jsm/jsm_neo.c s = ((head >= tail) ? head : UART_XMIT_SIZE) - tail; tail 539 drivers/tty/serial/jsm/jsm_neo.c memcpy_toio(&ch->ch_neo_uart->txrxburst, circ->buf + tail, s); tail 541 drivers/tty/serial/jsm/jsm_neo.c tail = (tail + s) & (UART_XMIT_SIZE - 1); tail 548 drivers/tty/serial/jsm/jsm_neo.c circ->tail = tail & (UART_XMIT_SIZE - 1); tail 516 drivers/tty/serial/jsm/jsm_tty.c u16 tail; tail 542 drivers/tty/serial/jsm/jsm_tty.c tail = ch->ch_r_tail & rmask; tail 544 drivers/tty/serial/jsm/jsm_tty.c data_len = (head - tail) & rmask; tail 561 drivers/tty/serial/jsm/jsm_tty.c ch->ch_r_head = tail; tail 577 drivers/tty/serial/jsm/jsm_tty.c ch->ch_portnum, head, tail); tail 591 drivers/tty/serial/jsm/jsm_tty.c s = ((head >= tail) ? head : RQUEUESIZE) - tail; tail 610 drivers/tty/serial/jsm/jsm_tty.c if (*(ch->ch_equeue +tail +i) & UART_LSR_BI) tail 611 drivers/tty/serial/jsm/jsm_tty.c tty_insert_flip_char(port, *(ch->ch_rqueue +tail +i), TTY_BREAK); tail 612 drivers/tty/serial/jsm/jsm_tty.c else if (*(ch->ch_equeue +tail +i) & UART_LSR_PE) tail 613 drivers/tty/serial/jsm/jsm_tty.c tty_insert_flip_char(port, *(ch->ch_rqueue +tail +i), TTY_PARITY); tail 614 drivers/tty/serial/jsm/jsm_tty.c else if (*(ch->ch_equeue +tail +i) & UART_LSR_FE) tail 615 drivers/tty/serial/jsm/jsm_tty.c tty_insert_flip_char(port, *(ch->ch_rqueue +tail +i), TTY_FRAME); tail 617 drivers/tty/serial/jsm/jsm_tty.c tty_insert_flip_char(port, *(ch->ch_rqueue +tail +i), TTY_NORMAL); tail 620 drivers/tty/serial/jsm/jsm_tty.c tty_insert_flip_string(port, ch->ch_rqueue + tail, s); tail 622 drivers/tty/serial/jsm/jsm_tty.c tail += s; tail 625 drivers/tty/serial/jsm/jsm_tty.c tail &= rmask; tail 628 drivers/tty/serial/jsm/jsm_tty.c ch->ch_r_tail = tail & rmask; tail 629 drivers/tty/serial/jsm/jsm_tty.c ch->ch_e_tail = tail & rmask; tail 245 drivers/tty/serial/lantiq.c writeb(port->state->xmit.buf[port->state->xmit.tail], tail 247 drivers/tty/serial/lantiq.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 301 drivers/tty/serial/lpc32xx_hs.c writel((u32) xmit->buf[xmit->tail], tail 303 drivers/tty/serial/lpc32xx_hs.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 294 drivers/tty/serial/max3100.c tx = xmit->buf[xmit->tail]; tail 295 drivers/tty/serial/max3100.c xmit->tail = (xmit->tail + 1) & tail 765 drivers/tty/serial/max310x.c until_end = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 775 drivers/tty/serial/max310x.c max310x_batch_write(port, xmit->buf + xmit->tail, until_end); tail 778 drivers/tty/serial/max310x.c max310x_batch_write(port, xmit->buf + xmit->tail, to_send); tail 783 drivers/tty/serial/max310x.c xmit->tail = (xmit->tail + to_send) & (UART_XMIT_SIZE - 1); tail 343 drivers/tty/serial/mcf.c if (xmit->head == xmit->tail) tail 345 drivers/tty/serial/mcf.c writeb(xmit->buf[xmit->tail], port->membase + MCFUART_UTB); tail 346 drivers/tty/serial/mcf.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE -1); tail 353 drivers/tty/serial/mcf.c if (xmit->head == xmit->tail) { tail 303 drivers/tty/serial/men_z135_uart.c int tail; tail 349 drivers/tty/serial/men_z135_uart.c tail = xmit->tail & (UART_XMIT_SIZE - 1); tail 351 drivers/tty/serial/men_z135_uart.c s = ((head >= tail) ? head : UART_XMIT_SIZE) - tail; tail 354 drivers/tty/serial/men_z135_uart.c memcpy_toio(port->membase + MEN_Z135_TX_RAM, &xmit->buf[xmit->tail], n); tail 355 drivers/tty/serial/men_z135_uart.c xmit->tail = (xmit->tail + n) & (UART_XMIT_SIZE - 1); tail 159 drivers/tty/serial/meson_uart.c ch = xmit->buf[xmit->tail]; tail 161 drivers/tty/serial/meson_uart.c xmit->tail = (xmit->tail+1) & (SERIAL_XMIT_SIZE - 1); tail 103 drivers/tty/serial/milbeaut_usio.c writew(xmit->buf[xmit->tail], port->membase + MLB_USIO_REG_DR); tail 105 drivers/tty/serial/milbeaut_usio.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 1460 drivers/tty/serial/mpc52xx_uart.c psc_ops->write_char(port, xmit->buf[xmit->tail]); tail 1461 drivers/tty/serial/mpc52xx_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 145 drivers/tty/serial/mps2-uart.c mps2_uart_write8(port, xmit->buf[xmit->tail], UARTn_DATA); tail 146 drivers/tty/serial/mps2-uart.c xmit->tail = (xmit->tail + 1) % UART_XMIT_SIZE; tail 461 drivers/tty/serial/msm_serial.c xmit->tail += count; tail 462 drivers/tty/serial/msm_serial.c xmit->tail &= UART_XMIT_SIZE - 1; tail 485 drivers/tty/serial/msm_serial.c cpu_addr = &xmit->buf[xmit->tail]; tail 844 drivers/tty/serial/msm_serial.c buf[i] = xmit->buf[xmit->tail + i]; tail 849 drivers/tty/serial/msm_serial.c xmit->tail = (xmit->tail + num_chars) & (UART_XMIT_SIZE - 1); tail 893 drivers/tty/serial/msm_serial.c pio_count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 894 drivers/tty/serial/msm_serial.c dma_count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 204 drivers/tty/serial/mux.c UART_PUT_CHAR(port, xmit->buf[xmit->tail]); tail 205 drivers/tty/serial/mux.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 198 drivers/tty/serial/mvebu-uart.c writel(xmit->buf[xmit->tail], port->membase + UART_TSH(port)); tail 199 drivers/tty/serial/mvebu-uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 317 drivers/tty/serial/mvebu-uart.c writel(xmit->buf[xmit->tail], port->membase + UART_TSH(port)); tail 318 drivers/tty/serial/mvebu-uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 598 drivers/tty/serial/mxs-auart.c xmit->tail, tail 600 drivers/tty/serial/mxs-auart.c memcpy(buffer + i, xmit->buf + xmit->tail, size); tail 601 drivers/tty/serial/mxs-auart.c xmit->tail = (xmit->tail + size) & (UART_XMIT_SIZE - 1); tail 630 drivers/tty/serial/mxs-auart.c mxs_write(xmit->buf[xmit->tail], s, REG_DATA); tail 631 drivers/tty/serial/mxs-auart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 384 drivers/tty/serial/omap-serial.c serial_out(up, UART_TX, xmit->buf[xmit->tail]); tail 385 drivers/tty/serial/omap-serial.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 198 drivers/tty/serial/owl-uart.c ch = xmit->buf[xmit->tail]; tail 200 drivers/tty/serial/owl-uart.c xmit->tail = (xmit->tail + 1) & (SERIAL_XMIT_SIZE - 1); tail 786 drivers/tty/serial/pch_uart.c xmit->tail += sg_dma_len(sg); tail 789 drivers/tty/serial/pch_uart.c xmit->tail &= UART_XMIT_SIZE - 1; tail 810 drivers/tty/serial/pch_uart.c CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 812 drivers/tty/serial/pch_uart.c pch_uart_hal_write(priv, &xmit->buf[xmit->tail], sz); tail 813 drivers/tty/serial/pch_uart.c xmit->tail = (xmit->tail + sz) & (UART_XMIT_SIZE - 1); tail 909 drivers/tty/serial/pch_uart.c size = min(xmit->head - xmit->tail, fifo_size); tail 966 drivers/tty/serial/pch_uart.c bytes = min((int)CIRC_CNT(xmit->head, xmit->tail, tail 968 drivers/tty/serial/pch_uart.c xmit->tail, UART_XMIT_SIZE)); tail 1019 drivers/tty/serial/pch_uart.c sg->offset = (xmit->tail & (UART_XMIT_SIZE - 1)) + tail 309 drivers/tty/serial/pic32_uart.c unsigned int c = xmit->buf[xmit->tail]; tail 313 drivers/tty/serial/pic32_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 420 drivers/tty/serial/pmac_zilog.c write_zsdata(uap, xmit->buf[xmit->tail]); tail 423 drivers/tty/serial/pmac_zilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 643 drivers/tty/serial/pmac_zilog.c write_zsdata(uap, xmit->buf[xmit->tail]); tail 645 drivers/tty/serial/pmac_zilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 273 drivers/tty/serial/pnx8xxx_uart.c serial_out(sport, PNX8XXX_FIFO, xmit->buf[xmit->tail]); tail 274 drivers/tty/serial/pnx8xxx_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 196 drivers/tty/serial/pxa.c serial_out(up, UART_TX, xmit->buf[xmit->tail]); tail 197 drivers/tty/serial/pxa.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 689 drivers/tty/serial/qcom_geni_serial.c int tail; tail 708 drivers/tty/serial/qcom_geni_serial.c tail = xmit->tail; tail 733 drivers/tty/serial/qcom_geni_serial.c buf[c] = xmit->buf[tail++]; tail 734 drivers/tty/serial/qcom_geni_serial.c tail &= UART_XMIT_SIZE - 1; tail 745 drivers/tty/serial/qcom_geni_serial.c xmit->tail = tail; tail 352 drivers/tty/serial/rda-uart.c ch = xmit->buf[xmit->tail]; tail 354 drivers/tty/serial/rda-uart.c xmit->tail = (xmit->tail + 1) & (SERIAL_XMIT_SIZE - 1); tail 453 drivers/tty/serial/rp2.c writeb(xmit->buf[xmit->tail], up->base + RP2_DATA_BYTE); tail 454 drivers/tty/serial/rp2.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 264 drivers/tty/serial/sa1100.c UART_PUT_CHAR(sport, xmit->buf[xmit->tail]); tail 265 drivers/tty/serial/sa1100.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 181 drivers/tty/serial/samsung.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 216 drivers/tty/serial/samsung.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 293 drivers/tty/serial/samsung.c dma->tx_transfer_addr = dma->tx_addr + xmit->tail; tail 323 drivers/tty/serial/samsung.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 332 drivers/tty/serial/samsung.c xmit->tail & (dma_get_cache_alignment() - 1)) tail 714 drivers/tty/serial/samsung.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 719 drivers/tty/serial/samsung.c (xmit->tail & (dma_get_cache_alignment() - 1)); tail 753 drivers/tty/serial/samsung.c wr_regb(port, S3C2410_UTXH, xmit->buf[xmit->tail]); tail 754 drivers/tty/serial/samsung.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 406 drivers/tty/serial/sb1250-duart.c write_sbdchn(sport, R_DUART_TX_HOLD, xmit->buf[xmit->tail]); tail 407 drivers/tty/serial/sb1250-duart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 667 drivers/tty/serial/sc16is7xx.c s->buf[i] = xmit->buf[xmit->tail]; tail 668 drivers/tty/serial/sc16is7xx.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 474 drivers/tty/serial/sccnxp.c sccnxp_port_write(port, SCCNXP_THR_REG, xmit->buf[xmit->tail]); tail 475 drivers/tty/serial/sccnxp.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 492 drivers/tty/serial/serial-tegra.c tegra_uart_write(tup, xmit->buf[xmit->tail], UART_TX); tail 493 drivers/tty/serial/serial-tegra.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 522 drivers/tty/serial/serial-tegra.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 540 drivers/tty/serial/serial-tegra.c tx_phys_addr = tup->tx_dma_buf_phys + xmit->tail; tail 560 drivers/tty/serial/serial-tegra.c unsigned long tail; tail 567 drivers/tty/serial/serial-tegra.c tail = (unsigned long)&xmit->buf[xmit->tail]; tail 568 drivers/tty/serial/serial-tegra.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 574 drivers/tty/serial/serial-tegra.c else if (BYTES_TO_ALIGN(tail) > 0) tail 575 drivers/tty/serial/serial-tegra.c tegra_uart_start_pio_tx(tup, BYTES_TO_ALIGN(tail)); tail 620 drivers/tty/serial/serial-tegra.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 600 drivers/tty/serial/serial_core.c c = CIRC_SPACE_TO_END(circ->head, circ->tail, UART_XMIT_SIZE); tail 361 drivers/tty/serial/serial_txx9.c sio_out(up, TXX9_SITFIFO, xmit->buf[xmit->tail]); tail 362 drivers/tty/serial/serial_txx9.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 826 drivers/tty/serial/sh-sci.c c = xmit->buf[xmit->tail]; tail 827 drivers/tty/serial/sh-sci.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 1192 drivers/tty/serial/sh-sci.c xmit->tail += s->tx_dma_len; tail 1193 drivers/tty/serial/sh-sci.c xmit->tail &= UART_XMIT_SIZE - 1; tail 1406 drivers/tty/serial/sh-sci.c int head, tail; tail 1417 drivers/tty/serial/sh-sci.c tail = xmit->tail; tail 1418 drivers/tty/serial/sh-sci.c buf = s->tx_dma_addr + (tail & (UART_XMIT_SIZE - 1)); tail 1420 drivers/tty/serial/sh-sci.c CIRC_CNT(head, tail, UART_XMIT_SIZE), tail 1421 drivers/tty/serial/sh-sci.c CIRC_CNT_TO_END(head, tail, UART_XMIT_SIZE)); tail 1451 drivers/tty/serial/sh-sci.c __func__, xmit->buf, tail, head, s->cookie_tx); tail 317 drivers/tty/serial/sifive.c __ssp_transmit_char(ssp, xmit->buf[xmit->tail]); tail 318 drivers/tty/serial/sifive.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 185 drivers/tty/serial/sirfsoc_uart.c tran_size = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 186 drivers/tty/serial/sirfsoc_uart.c tran_start = (unsigned long)(xmit->buf + xmit->tail); tail 241 drivers/tty/serial/sirfsoc_uart.c xmit->buf + xmit->tail, tail 433 drivers/tty/serial/sirfsoc_uart.c xmit->buf[xmit->tail]); tail 434 drivers/tty/serial/sirfsoc_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 451 drivers/tty/serial/sirfsoc_uart.c xmit->tail = (xmit->tail + sirfport->transfer_size) & tail 604 drivers/tty/serial/sirfsoc_uart.c sirfport->rx_dma_items.xmit.tail = tail 973 drivers/tty/serial/sirfsoc_uart.c sirfport->rx_dma_items.xmit.tail = tail 1027 drivers/tty/serial/sirfsoc_uart.c !CIRC_CNT(xmit->head, xmit->tail, tail 1196 drivers/tty/serial/sirfsoc_uart.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, tail 1200 drivers/tty/serial/sirfsoc_uart.c (const unsigned char *)&xmit->buf[xmit->tail], count); tail 1204 drivers/tty/serial/sirfsoc_uart.c xmit->tail = (xmit->tail + inserted) & tail 1206 drivers/tty/serial/sirfsoc_uart.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, tail 1404 drivers/tty/serial/sirfsoc_uart.c sirfport->rx_dma_items.xmit.tail = 0; tail 222 drivers/tty/serial/sprd_serial.c xmit->tail = (xmit->tail + trans_len) & (UART_XMIT_SIZE - 1); tail 239 drivers/tty/serial/sprd_serial.c CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 242 drivers/tty/serial/sprd_serial.c (void *)&(xmit->buf[xmit->tail]), tail 260 drivers/tty/serial/sprd_serial.c xmit->tail = (xmit->tail + sp->tx_dma.trans_len) & (UART_XMIT_SIZE - 1); tail 650 drivers/tty/serial/sprd_serial.c serial_out(port, SPRD_TXD, xmit->buf[xmit->tail]); tail 651 drivers/tty/serial/sprd_serial.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 277 drivers/tty/serial/st-asc.c c = xmit->buf[xmit->tail]; tail 278 drivers/tty/serial/st-asc.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 340 drivers/tty/serial/stm32-usart.c writel_relaxed(xmit->buf[xmit->tail], port->membase + ofs->tdr); tail 341 drivers/tty/serial/stm32-usart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 371 drivers/tty/serial/stm32-usart.c if (xmit->tail < xmit->head) { tail 372 drivers/tty/serial/stm32-usart.c memcpy(&stm32port->tx_buf[0], &xmit->buf[xmit->tail], count); tail 374 drivers/tty/serial/stm32-usart.c size_t one = UART_XMIT_SIZE - xmit->tail; tail 381 drivers/tty/serial/stm32-usart.c memcpy(&stm32port->tx_buf[0], &xmit->buf[xmit->tail], one); tail 409 drivers/tty/serial/stm32-usart.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 49 drivers/tty/serial/sunhv.c long status = sun4v_con_putchar(xmit->buf[xmit->tail]); tail 54 drivers/tty/serial/sunhv.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 62 drivers/tty/serial/sunhv.c unsigned long ra = __pa(xmit->buf + xmit->tail); tail 65 drivers/tty/serial/sunhv.c len = CIRC_CNT_TO_END(xmit->head, xmit->tail, tail 70 drivers/tty/serial/sunhv.c xmit->tail = (xmit->tail + sent) & (UART_XMIT_SIZE - 1); tail 271 drivers/tty/serial/sunsab.c writeb(xmit->buf[xmit->tail], tail 273 drivers/tty/serial/sunsab.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 458 drivers/tty/serial/sunsab.c writeb(xmit->buf[xmit->tail], tail 460 drivers/tty/serial/sunsab.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 422 drivers/tty/serial/sunsu.c serial_out(up, UART_TX, xmit->buf[xmit->tail]); tail 423 drivers/tty/serial/sunsu.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 511 drivers/tty/serial/sunzilog.c writeb(xmit->buf[xmit->tail], &channel->data); tail 515 drivers/tty/serial/sunzilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 712 drivers/tty/serial/sunzilog.c writeb(xmit->buf[xmit->tail], &channel->data); tail 716 drivers/tty/serial/sunzilog.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 99 drivers/tty/serial/tegra-tcu.c count = CIRC_CNT_TO_END(xmit->head, xmit->tail, UART_XMIT_SIZE); tail 103 drivers/tty/serial/tegra-tcu.c tegra_tcu_write(tcu, &xmit->buf[xmit->tail], count); tail 104 drivers/tty/serial/tegra-tcu.c xmit->tail = (xmit->tail + count) & (UART_XMIT_SIZE - 1); tail 104 drivers/tty/serial/timbuart.c iowrite8(xmit->buf[xmit->tail], tail 106 drivers/tty/serial/timbuart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 193 drivers/tty/serial/uartlite.c uart_out32(xmit->buf[xmit->tail], ULITE_TX, port); tail 194 drivers/tty/serial/uartlite.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE-1); tail 368 drivers/tty/serial/ucc_uart.c (xmit->tail != xmit->head)) { tail 372 drivers/tty/serial/ucc_uart.c *p++ = xmit->buf[xmit->tail]; tail 373 drivers/tty/serial/ucc_uart.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 376 drivers/tty/serial/ucc_uart.c if (xmit->head == xmit->tail) tail 397 drivers/tty/serial/vr41xx_siu.c siu_write(port, UART_TX, xmit->buf[xmit->tail]); tail 398 drivers/tty/serial/vr41xx_siu.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 214 drivers/tty/serial/vt8500_serial.c writeb(xmit->buf[xmit->tail], port->membase + VT8500_TXFIFO); tail 216 drivers/tty/serial/vt8500_serial.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 325 drivers/tty/serial/xilinx_uartps.c port->state->xmit.buf[port->state->xmit.tail], tail 334 drivers/tty/serial/xilinx_uartps.c port->state->xmit.tail = tail 335 drivers/tty/serial/xilinx_uartps.c (port->state->xmit.tail + 1) & tail 629 drivers/tty/serial/zs.c write_zsdata(zport, xmit->buf[xmit->tail]); tail 630 drivers/tty/serial/zs.c xmit->tail = (xmit->tail + 1) & (UART_XMIT_SIZE - 1); tail 136 drivers/tty/tty_buffer.c buf->tail = &buf->sentinel; tail 264 drivers/tty/tty_buffer.c b = buf->tail; tail 276 drivers/tty/tty_buffer.c buf->tail = n; tail 319 drivers/tty/tty_buffer.c struct tty_buffer *tb = port->buf.tail; tail 354 drivers/tty/tty_buffer.c struct tty_buffer *tb = port->buf.tail; tail 387 drivers/tty/tty_buffer.c tb = port->buf.tail; tail 412 drivers/tty/tty_buffer.c smp_store_release(&buf->tail->commit, buf->tail->used); tail 435 drivers/tty/tty_buffer.c struct tty_buffer *tb = port->buf.tail; tail 575 drivers/tty/tty_buffer.c buf->tail = &buf->sentinel; tail 325 drivers/usb/atm/usbatm.c if (sarb->tail + ATM_CELL_PAYLOAD > sarb->end) { tail 739 drivers/usb/gadget/udc/fsl_udc_core.c lastreq->tail->next_td_ptr = tail 861 drivers/usb/gadget/udc/fsl_udc_core.c req->tail = dtd; tail 976 drivers/usb/gadget/udc/fsl_udc_core.c prev_req->tail->next_td_ptr = req->tail->next_td_ptr; tail 451 drivers/usb/gadget/udc/fsl_usb2_udc.h struct ep_td_struct *head, *tail; /* For dTD List tail 241 drivers/usb/gadget/udc/mv_udc.h struct mv_dtd *dtd, *head, *tail; tail 270 drivers/usb/gadget/udc/mv_udc_core.c lastreq->tail->dtd_next = tail 430 drivers/usb/gadget/udc/mv_udc_core.c req->tail = dtd; tail 833 drivers/usb/gadget/udc/mv_udc_core.c writel(readl(&req->tail->dtd_next), tail 834 drivers/usb/gadget/udc/mv_udc_core.c &prev_req->tail->dtd_next); tail 146 drivers/usb/host/uhci-debug.c goto tail; tail 161 drivers/usb/host/uhci-debug.c tail: tail 217 drivers/usb/host/uhci-debug.c goto tail; tail 234 drivers/usb/host/uhci-debug.c goto tail; tail 252 drivers/usb/host/uhci-debug.c goto tail; tail 258 drivers/usb/host/uhci-debug.c tail: tail 396 drivers/usb/host/uhci-debug.c goto tail; tail 409 drivers/usb/host/uhci-debug.c goto tail; tail 449 drivers/usb/host/uhci-debug.c goto tail; tail 490 drivers/usb/host/uhci-debug.c goto tail; tail 516 drivers/usb/host/uhci-debug.c goto tail; tail 545 drivers/usb/host/uhci-debug.c tail: tail 116 drivers/usb/musb/cppi_dma.c c->tail = NULL; tail 656 drivers/usb/musb/cppi_dma.c tx->tail = bd; tail 767 drivers/usb/musb/cppi_dma.c struct cppi_descriptor *bd, *tail; tail 835 drivers/usb/musb/cppi_dma.c for (i = 0, tail = NULL; bd && i < n_bds; i++, tail = bd) { tail 842 drivers/usb/musb/cppi_dma.c tail->next = bd; tail 843 drivers/usb/musb/cppi_dma.c tail->hw_next = bd->dma; tail 865 drivers/usb/musb/cppi_dma.c if (!tail) { tail 871 drivers/usb/musb/cppi_dma.c tail->next = NULL; tail 872 drivers/usb/musb/cppi_dma.c tail->hw_next = 0; tail 875 drivers/usb/musb/cppi_dma.c rx->tail = tail; tail 883 drivers/usb/musb/cppi_dma.c tail->hw_options |= CPPI_EOP_SET; tail 889 drivers/usb/musb/cppi_dma.c tail = rx->last_processed; tail 890 drivers/usb/musb/cppi_dma.c if (tail) { tail 891 drivers/usb/musb/cppi_dma.c tail->next = bd; tail 892 drivers/usb/musb/cppi_dma.c tail->hw_next = bd->dma; tail 1105 drivers/usb/musb/cppi_dma.c rx->head, rx->tail, tail 1134 drivers/usb/musb/cppi_dma.c rx->tail = NULL; tail 1241 drivers/usb/musb/cppi_dma.c tx_ch->tail = NULL; tail 1409 drivers/usb/musb/cppi_dma.c cppi_ch->tail = NULL; tail 99 drivers/usb/musb/cppi_dma.h struct cppi_descriptor *tail; tail 154 drivers/usb/serial/io_edgeport.c unsigned int tail; /* index to tail pointer (read) */ tail 987 drivers/usb/serial/io_edgeport.c edge_port->txfifo.tail = 0; tail 1344 drivers/usb/serial/io_edgeport.c bytesleft = fifo->size - fifo->tail; tail 1346 drivers/usb/serial/io_edgeport.c memcpy(&buffer[2], &fifo->fifo[fifo->tail], firsthalf); tail 1347 drivers/usb/serial/io_edgeport.c fifo->tail += firsthalf; tail 1349 drivers/usb/serial/io_edgeport.c if (fifo->tail == fifo->size) tail 1350 drivers/usb/serial/io_edgeport.c fifo->tail = 0; tail 1354 drivers/usb/serial/io_edgeport.c memcpy(&buffer[2+firsthalf], &fifo->fifo[fifo->tail], tail 1356 drivers/usb/serial/io_edgeport.c fifo->tail += secondhalf; tail 194 drivers/usb/typec/tcpm/fusb302.c int tail; tail 197 drivers/usb/typec/tcpm/fusb302.c tail = chip->logbuffer_tail; tail 198 drivers/usb/typec/tcpm/fusb302.c while (tail != chip->logbuffer_head) { tail 199 drivers/usb/typec/tcpm/fusb302.c seq_printf(s, "%s\n", chip->logbuffer[tail]); tail 200 drivers/usb/typec/tcpm/fusb302.c tail = (tail + 1) % LOG_BUFFER_ENTRIES; tail 203 drivers/usb/typec/tcpm/fusb302.c chip->logbuffer_tail = tail; tail 559 drivers/usb/typec/tcpm/tcpm.c int tail; tail 562 drivers/usb/typec/tcpm/tcpm.c tail = port->logbuffer_tail; tail 563 drivers/usb/typec/tcpm/tcpm.c while (tail != port->logbuffer_head) { tail 564 drivers/usb/typec/tcpm/tcpm.c seq_printf(s, "%s\n", port->logbuffer[tail]); tail 565 drivers/usb/typec/tcpm/tcpm.c tail = (tail + 1) % LOG_BUFFER_ENTRIES; tail 568 drivers/usb/typec/tcpm/tcpm.c port->logbuffer_tail = tail; tail 302 drivers/vfio/vfio_iommu_type1.c struct page *tail = pfn_to_page(pfn); tail 303 drivers/vfio/vfio_iommu_type1.c struct page *head = compound_head(tail); tail 305 drivers/vfio/vfio_iommu_type1.c if (head != tail) { tail 317 drivers/vfio/vfio_iommu_type1.c if (PageTail(tail)) tail 320 drivers/vfio/vfio_iommu_type1.c return PageReserved(tail); tail 103 drivers/vhost/net.c int tail; tail 153 drivers/vhost/net.c if (rxq->tail != rxq->head) tail 161 drivers/vhost/net.c return rxq->tail - rxq->head; tail 166 drivers/vhost/net.c return rxq->tail == rxq->head; tail 181 drivers/vhost/net.c rxq->tail = ptr_ring_consume_batched(nvq->rx_ring, rxq->queue, tail 183 drivers/vhost/net.c return rxq->tail; tail 194 drivers/vhost/net.c rxq->head = rxq->tail = 0; tail 225 drivers/vhost/net.c rxq->head = rxq->tail = 0; tail 172 drivers/video/console/vgacon.c int tail; tail 196 drivers/video/console/vgacon.c scrollback->tail = 0; tail 255 drivers/video/console/vgacon.c vgacon_scrollback_cur->tail, tail 260 drivers/video/console/vgacon.c vgacon_scrollback_cur->tail += c->vc_size_row; tail 262 drivers/video/console/vgacon.c if (vgacon_scrollback_cur->tail >= vgacon_scrollback_cur->size) tail 263 drivers/video/console/vgacon.c vgacon_scrollback_cur->tail = 0; tail 323 drivers/video/console/vgacon.c soff = vgacon_scrollback_cur->tail - tail 62 drivers/video/fbdev/i810/i810_accel.c u32 head, count = WAIT_COUNT, tail; tail 65 drivers/video/fbdev/i810/i810_accel.c tail = par->cur_tail; tail 68 drivers/video/fbdev/i810/i810_accel.c if ((tail == head) || tail 69 drivers/video/fbdev/i810/i810_accel.c (tail > head && tail 70 drivers/video/fbdev/i810/i810_accel.c (par->iring.size - tail + head) >= space) || tail 71 drivers/video/fbdev/i810/i810_accel.c (tail < head && (head - tail) >= space)) { tail 551 drivers/video/fbdev/intelfb/intelfbhw.h u32 head, tail; \ tail 554 drivers/video/fbdev/intelfb/intelfbhw.h tail = INREG(PRI_RING_TAIL) & RING_TAIL_MASK; \ tail 556 drivers/video/fbdev/intelfb/intelfbhw.h } while (head != tail); \ tail 472 drivers/virt/fsl_hypervisor.c unsigned int tail; tail 499 drivers/virt/fsl_hypervisor.c if (dbq->head != nextp(dbq->tail)) { tail 500 drivers/virt/fsl_hypervisor.c dbq->q[dbq->tail] = doorbell; tail 506 drivers/virt/fsl_hypervisor.c dbq->tail = nextp(dbq->tail); tail 580 drivers/virt/fsl_hypervisor.c mask = (dbq->head == dbq->tail) ? 0 : (EPOLLIN | EPOLLRDNORM); tail 613 drivers/virt/fsl_hypervisor.c if (dbq->head == dbq->tail) { tail 620 drivers/virt/fsl_hypervisor.c dbq->head != dbq->tail)) tail 220 drivers/visorbus/visorchannel.c if (sig_hdr.head == sig_hdr.tail) tail 222 drivers/visorbus/visorchannel.c sig_hdr.tail = (sig_hdr.tail + 1) % sig_hdr.max_slots; tail 223 drivers/visorbus/visorchannel.c error = sig_read_data(channel, queue, &sig_hdr, sig_hdr.tail, msg); tail 232 drivers/visorbus/visorchannel.c error = SIG_WRITE_FIELD(channel, queue, &sig_hdr, tail); tail 274 drivers/visorbus/visorchannel.c return (sig_hdr.head == sig_hdr.tail); tail 310 drivers/visorbus/visorchannel.c if (sig_hdr.head == sig_hdr.tail) { tail 41 fs/9p/vfs_dir.c int tail; tail 108 fs/9p/vfs_dir.c if (rdir->tail == rdir->head) { tail 120 fs/9p/vfs_dir.c rdir->tail = n; tail 122 fs/9p/vfs_dir.c while (rdir->head < rdir->tail) { tail 124 fs/9p/vfs_dir.c rdir->tail - rdir->head, &st); tail 166 fs/9p/vfs_dir.c if (rdir->tail == rdir->head) { tail 173 fs/9p/vfs_dir.c rdir->tail = err; tail 176 fs/9p/vfs_dir.c while (rdir->head < rdir->tail) { tail 179 fs/9p/vfs_dir.c rdir->tail - rdir->head, tail 22 fs/affs/inode.c struct affs_tail *tail; tail 50 fs/affs/inode.c tail = AFFS_TAIL(sb, bh); tail 51 fs/affs/inode.c prot = be32_to_cpu(tail->protect); tail 76 fs/affs/inode.c id = be16_to_cpu(tail->uid); tail 84 fs/affs/inode.c id = be16_to_cpu(tail->gid); tail 92 fs/affs/inode.c switch (be32_to_cpu(tail->stype)) { tail 98 fs/affs/inode.c if (be32_to_cpu(tail->stype) == ST_USERDIR || tail 127 fs/affs/inode.c size = be32_to_cpu(tail->size); tail 136 fs/affs/inode.c if (tail->link_chain) tail 153 fs/affs/inode.c = (be32_to_cpu(tail->change.days) * 86400LL + tail 154 fs/affs/inode.c be32_to_cpu(tail->change.mins) * 60 + tail 155 fs/affs/inode.c be32_to_cpu(tail->change.ticks) / 50 + tail 174 fs/affs/inode.c struct affs_tail *tail; tail 188 fs/affs/inode.c tail = AFFS_TAIL(sb, bh); tail 189 fs/affs/inode.c if (tail->stype == cpu_to_be32(ST_ROOT)) { tail 193 fs/affs/inode.c tail->protect = cpu_to_be32(AFFS_I(inode)->i_protect); tail 194 fs/affs/inode.c tail->size = cpu_to_be32(inode->i_size); tail 195 fs/affs/inode.c affs_secs_to_datestamp(inode->i_mtime.tv_sec, &tail->change); tail 206 fs/affs/inode.c tail->uid = cpu_to_be16(uid); tail 208 fs/affs/inode.c tail->gid = cpu_to_be16(gid); tail 37 fs/affs/super.c struct affs_root_tail *tail = AFFS_ROOT_TAIL(sb, bh); tail 40 fs/affs/super.c affs_secs_to_datestamp(ktime_get_real_seconds(), &tail->disk_change); tail 63 fs/aio.c unsigned tail; tail 160 fs/aio.c unsigned tail; tail 547 fs/aio.c ring->head = ring->tail = 0; tail 958 fs/aio.c unsigned tail) tail 964 fs/aio.c if (head <= tail) tail 965 fs/aio.c events_in_ring = tail - head; tail 967 fs/aio.c events_in_ring = ctx->nr_events - (head - tail); tail 1006 fs/aio.c refill_reqs_available(ctx, head, ctx->tail); tail 1094 fs/aio.c unsigned tail, pos, head; tail 1104 fs/aio.c tail = ctx->tail; tail 1105 fs/aio.c pos = tail + AIO_EVENTS_OFFSET; tail 1107 fs/aio.c if (++tail >= ctx->nr_events) tail 1108 fs/aio.c tail = 0; tail 1118 fs/aio.c pr_debug("%p[%u]: %p: %p %Lx %Lx %Lx\n", ctx, tail, iocb, tail 1127 fs/aio.c ctx->tail = tail; tail 1131 fs/aio.c ring->tail = tail; tail 1137 fs/aio.c refill_reqs_available(ctx, head, tail); tail 1140 fs/aio.c pr_debug("added to ring %p at [%u]\n", iocb, tail); tail 1178 fs/aio.c unsigned head, tail, pos; tail 1194 fs/aio.c tail = ring->tail; tail 1203 fs/aio.c pr_debug("h%u t%u m%u\n", head, tail, ctx->nr_events); tail 1205 fs/aio.c if (head == tail) tail 1209 fs/aio.c tail %= ctx->nr_events; tail 1216 fs/aio.c avail = (head <= tail ? tail : ctx->nr_events) - head; tail 1217 fs/aio.c if (head == tail) tail 1247 fs/aio.c pr_debug("%li h%u t%u\n", ret, head, tail); tail 1089 fs/btrfs/raid56.c struct bio *last = bio_list->tail; tail 505 fs/btrfs/volumes.c struct bio *head, struct bio *tail) tail 512 fs/btrfs/volumes.c if (pending_bios->tail) tail 513 fs/btrfs/volumes.c tail->bi_next = old_head; tail 515 fs/btrfs/volumes.c pending_bios->tail = tail; tail 535 fs/btrfs/volumes.c struct bio *tail; tail 575 fs/btrfs/volumes.c tail = pending_bios->tail; tail 576 fs/btrfs/volumes.c WARN_ON(pending && !tail); tail 596 fs/btrfs/volumes.c pending_bios->tail = NULL; tail 612 fs/btrfs/volumes.c requeue_list(pending_bios, pending, tail); tail 679 fs/btrfs/volumes.c requeue_list(pending_bios, pending, tail); tail 6497 fs/btrfs/volumes.c if (pending_bios->tail) tail 6498 fs/btrfs/volumes.c pending_bios->tail->bi_next = bio; tail 6500 fs/btrfs/volumes.c pending_bios->tail = bio; tail 23 fs/btrfs/volumes.h struct bio *tail; tail 867 fs/buffer.c struct buffer_head *bh, *tail; tail 871 fs/buffer.c tail = bh; tail 874 fs/buffer.c tail->b_this_page = head; tail 1527 fs/buffer.c struct buffer_head *bh, *head, *tail; tail 1533 fs/buffer.c tail = bh; tail 1536 fs/buffer.c tail->b_this_page = head; tail 111 fs/direct-io.c unsigned tail; /* last valid page + 1 */ tail 161 fs/direct-io.c return sdio->tail - sdio->head; tail 186 fs/direct-io.c sdio->tail = 1; tail 196 fs/direct-io.c sdio->tail = (ret + PAGE_SIZE - 1) / PAGE_SIZE; tail 493 fs/direct-io.c while (sdio->head < sdio->tail) tail 979 fs/direct-io.c to = (sdio->head == sdio->tail - 1) ? sdio->to : PAGE_SIZE; tail 1178 fs/fs-writeback.c struct inode *tail; tail 1180 fs/fs-writeback.c tail = wb_inode(wb->b_dirty.next); tail 1181 fs/fs-writeback.c if (time_before(inode->dirtied_when, tail->dirtied_when)) tail 449 fs/fs_context.c sizeof(log->tail) != sizeof(u8)); tail 450 fs/fs_context.c if ((u8)(log->head - log->tail) == logsize) { tail 454 fs/fs_context.c log->tail++; tail 39 fs/fsopen.c if (log->head == log->tail) { tail 44 fs/fsopen.c index = log->tail & (logsize - 1); tail 49 fs/fsopen.c log->tail++; tail 484 fs/gfs2/log.c unsigned int tail; tail 489 fs/gfs2/log.c tail = sdp->sd_log_head; tail 493 fs/gfs2/log.c tail = tr->tr_first; tail 498 fs/gfs2/log.c return tail; tail 688 fs/gfs2/log.c u64 seq, u32 tail, u32 lblock, u32 flags, tail 709 fs/gfs2/log.c lh->lh_tail = cpu_to_be32(tail); tail 764 fs/gfs2/log.c unsigned int tail; tail 769 fs/gfs2/log.c tail = current_tail(sdp); tail 776 fs/gfs2/log.c sdp->sd_log_idle = (tail == sdp->sd_log_flush_head); tail 777 fs/gfs2/log.c gfs2_write_log_header(sdp, sdp->sd_jdesc, sdp->sd_log_sequence++, tail, tail 780 fs/gfs2/log.c if (sdp->sd_log_tail != tail) tail 781 fs/gfs2/log.c log_pull_tail(sdp, tail); tail 70 fs/gfs2/log.h u64 seq, u32 tail, u32 lblock, u32 flags, tail 85 fs/io_uring.c u32 tail ____cacheline_aligned_in_smp; tail 481 fs/io_uring.c if (ctx->cached_cq_tail != READ_ONCE(rings->cq.tail)) { tail 483 fs/io_uring.c smp_store_release(&rings->cq.tail, ctx->cached_cq_tail); tail 555 fs/io_uring.c unsigned tail; tail 557 fs/io_uring.c tail = ctx->cached_cq_tail; tail 563 fs/io_uring.c if (tail - READ_ONCE(rings->cq.head) == rings->cq_ring_entries) tail 567 fs/io_uring.c return &rings->cqes[tail & ctx->cq_mask]; tail 751 fs/io_uring.c return READ_ONCE(rings->cq.tail) - READ_ONCE(rings->cq.head); tail 759 fs/io_uring.c return smp_load_acquire(&rings->sq.tail) - ctx->cached_sq_head; tail 2661 fs/io_uring.c if (head == smp_load_acquire(&rings->sq.tail)) tail 3020 fs/io_uring.c return READ_ONCE(rings->cq.head) == READ_ONCE(rings->cq.tail) ? ret : 0; tail 3662 fs/io_uring.c if (READ_ONCE(ctx->rings->sq.tail) - ctx->cached_sq_head != tail 3945 fs/io_uring.c p->sq_off.tail = offsetof(struct io_rings, sq.tail); tail 3954 fs/io_uring.c p->cq_off.tail = offsetof(struct io_rings, cq.tail); tail 857 fs/jbd2/journal.c struct jbd2_journal_block_tail *tail; tail 863 fs/jbd2/journal.c tail = (struct jbd2_journal_block_tail *)(bh->b_data + j->j_blocksize - tail 865 fs/jbd2/journal.c tail->t_checksum = 0; tail 867 fs/jbd2/journal.c tail->t_checksum = cpu_to_be32(csum); tail 176 fs/jbd2/recovery.c struct jbd2_journal_block_tail *tail; tail 183 fs/jbd2/recovery.c tail = (struct jbd2_journal_block_tail *)(buf + j->j_blocksize - tail 185 fs/jbd2/recovery.c provided = tail->t_checksum; tail 186 fs/jbd2/recovery.c tail->t_checksum = 0; tail 188 fs/jbd2/recovery.c tail->t_checksum = provided; tail 2025 fs/jfs/jfs_logmgr.c struct lbuf *tail; tail 2047 fs/jfs/jfs_logmgr.c tail = log->wqueue; tail 2052 fs/jfs/jfs_logmgr.c if (tail == NULL) { tail 2057 fs/jfs/jfs_logmgr.c bp->l_wqnext = tail->l_wqnext; tail 2058 fs/jfs/jfs_logmgr.c tail->l_wqnext = bp; tail 2061 fs/jfs/jfs_logmgr.c tail = bp; tail 2065 fs/jfs/jfs_logmgr.c if ((bp != tail->l_wqnext) || !(flag & lbmWRITE)) { tail 2182 fs/jfs/jfs_logmgr.c struct lbuf *nextbp, *tail; tail 2242 fs/jfs/jfs_logmgr.c tail = log->wqueue; tail 2245 fs/jfs/jfs_logmgr.c if (bp == tail) { tail 2260 fs/jfs/jfs_logmgr.c nextbp = tail->l_wqnext = bp->l_wqnext; tail 403 fs/nfsd/nfs3xdr.c struct kvec *tail = rqstp->rq_arg.tail; tail 426 fs/nfsd/nfs3xdr.c dlen = head->iov_len + rqstp->rq_arg.page_len + tail->iov_len - hdr; tail 505 fs/nfsd/nfs3xdr.c rqstp->rq_arg.tail[0].iov_len; tail 715 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_base = p; tail 717 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_len = 4 - (resp->len&3); tail 740 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_base = p; tail 742 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_len = 4 - (resp->count & 3); tail 823 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_base = p; tail 826 fs/nfsd/nfs3xdr.c rqstp->rq_res.tail[0].iov_len = 2<<2; tail 164 fs/nfsd/nfs4xdr.c struct kvec *vec = &argp->rqstp->rq_arg.tail[0]; tail 166 fs/nfsd/nfs4xdr.c if (!argp->tail) { tail 167 fs/nfsd/nfs4xdr.c argp->tail = true; tail 3496 fs/nfsd/nfs4xdr.c buf->tail[0].iov_base = xdr->p; tail 3497 fs/nfsd/nfs4xdr.c buf->tail[0].iov_len = 0; tail 3498 fs/nfsd/nfs4xdr.c xdr->iov = buf->tail; tail 3504 fs/nfsd/nfs4xdr.c buf->tail[0].iov_base += maxcount&3; tail 3505 fs/nfsd/nfs4xdr.c buf->tail[0].iov_len = pad; tail 4565 fs/nfsd/nfs4xdr.c args->tail = false; tail 4584 fs/nfsd/nfs4xdr.c buf->tail[0].iov_len); tail 395 fs/nfsd/nfsxdr.c p = rqstp->rq_arg.tail[0].iov_base; tail 461 fs/nfsd/nfsxdr.c rqstp->rq_res.tail[0].iov_base = p; tail 463 fs/nfsd/nfsxdr.c rqstp->rq_res.tail[0].iov_len = 4 - (resp->len&3); tail 481 fs/nfsd/nfsxdr.c rqstp->rq_res.tail[0].iov_base = p; tail 483 fs/nfsd/nfsxdr.c rqstp->rq_res.tail[0].iov_len = 4 - (resp->count&3); tail 651 fs/nfsd/xdr4.h bool tail; tail 1730 fs/ntfs/aops.c struct buffer_head *tail; tail 1734 fs/ntfs/aops.c tail = bh; tail 1737 fs/ntfs/aops.c tail->b_this_page = head; tail 498 fs/ntfs/mft.c struct buffer_head *tail; tail 503 fs/ntfs/mft.c tail = bh; tail 506 fs/ntfs/mft.c tail->b_this_page = head; tail 373 fs/ntfs/runlist.c int tail; /* Start of tail of @dst. */ tail 412 fs/ntfs/runlist.c tail = loc + right + 1; tail 423 fs/ntfs/runlist.c ntfs_rl_mm(dst, marker, tail, dsize - tail); tail 427 fs/ntfs/runlist.c if (dsize - tail > 0 && dst[marker].lcn == LCN_ENOENT) tail 1111 fs/pipe.c unsigned int tail; tail 1114 fs/pipe.c tail = pipe->curbuf + pipe->nrbufs; tail 1115 fs/pipe.c if (tail < pipe->buffers) tail 1116 fs/pipe.c tail = 0; tail 1118 fs/pipe.c tail &= (pipe->buffers - 1); tail 1120 fs/pipe.c head = pipe->nrbufs - tail; tail 1123 fs/pipe.c if (tail) tail 1124 fs/pipe.c memcpy(bufs + head, pipe->bufs, tail * sizeof(struct pipe_buffer)); tail 749 fs/reiserfs/journal.c int tail) tail 775 fs/reiserfs/journal.c if (tail) tail 214 fs/reiserfs/tail_conversion.c char *tail; tail 245 fs/reiserfs/tail_conversion.c tail = (char *)kmap(page); /* this can schedule */ tail 275 fs/reiserfs/tail_conversion.c tail = tail + (pos & (PAGE_SIZE - 1)); tail 284 fs/reiserfs/tail_conversion.c tail ? tail : NULL) < 0) { tail 38 include/asm-generic/qspinlock_types.h u16 tail; tail 42 include/asm-generic/qspinlock_types.h u16 tail; tail 40 include/crypto/ctr.h int tail = 0; tail 43 include/crypto/ctr.h tail = walk.nbytes & (blocksize - 1); tail 44 include/crypto/ctr.h nbytes -= tail; tail 60 include/crypto/ctr.h err = skcipher_walk_done(&walk, tail); tail 58 include/drm/drm_debugfs_crc.h int head, tail; tail 43 include/drm/spsc_queue.h atomic_long_t tail; tail 51 include/drm/spsc_queue.h atomic_long_set(&queue->tail, (long)&queue->head); tail 67 include/drm/spsc_queue.h struct spsc_node **tail; tail 73 include/drm/spsc_queue.h tail = (struct spsc_node **)atomic_long_xchg(&queue->tail, (long)&node->next); tail 74 include/drm/spsc_queue.h WRITE_ONCE(*tail, node); tail 85 include/drm/spsc_queue.h return tail == &queue->head; tail 107 include/drm/spsc_queue.h if (atomic_long_cmpxchg(&queue->tail, tail 1057 include/linux/ata.h unsigned long lba_sects, chs_sects, head, tail; tail 1086 include/linux/ata.h tail = lba_sects & 0xffff; tail 1087 include/linux/ata.h lba_sects = head | (tail << 16); tail 577 include/linux/bio.h struct bio *tail; tail 587 include/linux/bio.h bl->head = bl->tail = NULL; tail 610 include/linux/bio.h if (bl->tail) tail 611 include/linux/bio.h bl->tail->bi_next = bio; tail 615 include/linux/bio.h bl->tail = bio; tail 624 include/linux/bio.h if (!bl->tail) tail 625 include/linux/bio.h bl->tail = bio; tail 633 include/linux/bio.h if (bl->tail) tail 634 include/linux/bio.h bl->tail->bi_next = bl2->head; tail 638 include/linux/bio.h bl->tail = bl2->tail; tail 648 include/linux/bio.h bl2->tail->bi_next = bl->head; tail 650 include/linux/bio.h bl->tail = bl2->tail; tail 667 include/linux/bio.h bl->tail = NULL; tail 679 include/linux/bio.h bl->head = bl->tail = NULL; tail 12 include/linux/circ_buf.h int tail; tail 16 include/linux/circ_buf.h #define CIRC_CNT(head,tail,size) (((head) - (tail)) & ((size)-1)) tail 21 include/linux/circ_buf.h #define CIRC_SPACE(head,tail,size) CIRC_CNT((tail),((head)+1),(size)) tail 26 include/linux/circ_buf.h #define CIRC_CNT_TO_END(head,tail,size) \ tail 27 include/linux/circ_buf.h ({int end = (size) - (tail); \ tail 32 include/linux/circ_buf.h #define CIRC_SPACE_TO_END(head,tail,size) \ tail 34 include/linux/circ_buf.h int n = (end + (tail)) & ((size)-1); \ tail 179 include/linux/fs_context.h u8 tail; /* Removal index in buffer[] */ tail 30 include/linux/hidraw.h int tail; tail 63 include/linux/iova.h unsigned head, tail; tail 20 include/linux/osq_lock.h atomic_t tail; tail 30 include/linux/osq_lock.h atomic_set(&lock->tail, OSQ_UNLOCKED_VAL); tail 38 include/linux/osq_lock.h return atomic_read(&lock->tail) != OSQ_UNLOCKED_VAL; tail 23 include/linux/rcu_segcblist.h struct rcu_head **tail; tail 28 include/linux/rcu_segcblist.h #define RCU_CBLIST_INITIALIZER(n) { .head = NULL, .tail = &n.head } tail 107 include/linux/rtnetlink.h void rtnl_kfree_skbs(struct sk_buff *head, struct sk_buff *tail); tail 422 include/linux/serial_core.h #define uart_circ_empty(circ) ((circ)->head == (circ)->tail) tail 423 include/linux/serial_core.h #define uart_circ_clear(circ) ((circ)->head = (circ)->tail = 0) tail 426 include/linux/serial_core.h (CIRC_CNT((circ)->head, (circ)->tail, UART_XMIT_SIZE)) tail 429 include/linux/serial_core.h (CIRC_SPACE((circ)->head, (circ)->tail, UART_XMIT_SIZE)) tail 878 include/linux/skbuff.h sk_buff_data_t tail; tail 2155 include/linux/skbuff.h return skb->head + skb->tail; tail 2160 include/linux/skbuff.h skb->tail = skb->data - skb->head; tail 2166 include/linux/skbuff.h skb->tail += offset; tail 2172 include/linux/skbuff.h return skb->tail; tail 2177 include/linux/skbuff.h skb->tail = skb->data; tail 2182 include/linux/skbuff.h skb->tail = skb->data + offset; tail 2190 include/linux/skbuff.h void *pskb_put(struct sk_buff *skb, struct sk_buff *tail, int len); tail 2196 include/linux/skbuff.h skb->tail += len; tail 2314 include/linux/skbuff.h return skb_is_nonlinear(skb) ? 0 : skb->end - skb->tail; tail 2329 include/linux/skbuff.h return skb->end - skb->tail - skb->reserved_tailroom; tail 2343 include/linux/skbuff.h skb->tail += len; tail 258 include/linux/string.h const char *tail = strrchr(path, '/'); tail 259 include/linux/string.h return tail ? tail + 1 : path; tail 54 include/linux/sunrpc/xdr.h tail[1]; /* Appended after page data */ tail 74 include/linux/sunrpc/xdr.h buf->tail[0].iov_len = 0; tail 229 include/linux/swap.h struct swap_cluster_info tail; tail 486 include/linux/thunderbolt.h int tail; tail 94 include/linux/tty.h struct tty_buffer *tail; /* Active buffer */ tail 21 include/linux/tty_flip.h struct tty_buffer *tb = port->buf.tail; tail 171 include/linux/visorbus.h u32 tail; tail 855 include/linux/vmw_vmci_defs.h u64 tail; tail 859 include/linux/vmw_vmci_defs.h tail = vmci_q_header_producer_tail(produce_q_header); tail 862 include/linux/vmw_vmci_defs.h if (tail >= produce_q_size || head >= produce_q_size) tail 870 include/linux/vmw_vmci_defs.h if (tail >= head) tail 871 include/linux/vmw_vmci_defs.h free_space = produce_q_size - (tail - head) - 1; tail 873 include/linux/vmw_vmci_defs.h free_space = head - tail - 1; tail 489 include/net/bluetooth/l2cap.h __u16 tail; tail 897 include/net/cfg80211.h const u8 *head, *tail; tail 20 include/net/gen_stats.h struct nlattr * tail; tail 52 include/net/sch_generic.h struct sk_buff *tail; tail 949 include/net/sch_generic.h qh->tail = NULL; tail 956 include/net/sch_generic.h struct sk_buff *last = qh->tail; tail 961 include/net/sch_generic.h qh->tail = skb; tail 963 include/net/sch_generic.h qh->tail = skb; tail 982 include/net/sch_generic.h qh->tail = skb; tail 995 include/net/sch_generic.h qh->tail = NULL; tail 1138 include/net/sch_generic.h rtnl_kfree_skbs(qh->head, qh->tail); tail 1141 include/net/sch_generic.h qh->tail = NULL; tail 382 include/net/sock.h struct sk_buff *tail; tail 907 include/net/sock.h if (!sk->sk_backlog.tail) tail 910 include/net/sock.h sk->sk_backlog.tail->next = skb; tail 912 include/net/sock.h sk->sk_backlog.tail = skb; tail 1041 include/net/sock.h if (unlikely(READ_ONCE(sk->sk_backlog.tail))) { tail 84 include/rdma/rdmavt_cq.h u32 tail; /* index of next ib_poll_cq() entry */ tail 216 include/rdma/rdmavt_qp.h u32 tail; /* receives pull requests from here. */ tail 941 include/rdma/rdmavt_qp.h RDMA_READ_UAPI_ATOMIC(cq->queue->tail) : tail 942 include/rdma/rdmavt_qp.h ibcq_to_rvtcq(send_cq)->kqueue->tail; tail 1001 include/trace/events/ext4.h __field( __u16, tail ) tail 1024 include/trace/events/ext4.h __entry->tail = ac->ac_tail; tail 1040 include/trace/events/ext4.h show_mballoc_flags(__entry->flags), __entry->tail, tail 578 include/trace/events/rpcrdma.h __entry->taillen = rqst->rq_snd_buf.tail[0].iov_len; tail 388 include/trace/events/sunrpc.h __entry->tail_base = xdr->buf->tail[0].iov_base, tail 389 include/trace/events/sunrpc.h __entry->tail_len = xdr->buf->tail[0].iov_len, tail 447 include/trace/events/sunrpc.h __entry->tail_base = xdr->buf->tail[0].iov_base, tail 448 include/trace/events/sunrpc.h __entry->tail_len = xdr->buf->tail[0].iov_len, tail 488 include/trace/events/sunrpc.h __entry->tail_base = req->rq_rcv_buf.tail[0].iov_base; tail 489 include/trace/events/sunrpc.h __entry->tail_len = req->rq_rcv_buf.tail[0].iov_len; tail 93 include/uapi/linux/io_uring.h __u32 tail; tail 110 include/uapi/linux/io_uring.h __u32 tail; tail 74 include/uapi/linux/virtio_iommu.h struct virtio_iommu_req_tail tail; tail 82 include/uapi/linux/virtio_iommu.h struct virtio_iommu_req_tail tail; tail 100 include/uapi/linux/virtio_iommu.h struct virtio_iommu_req_tail tail; tail 109 include/uapi/linux/virtio_iommu.h struct virtio_iommu_req_tail tail; tail 85 include/uapi/rdma/bnxt_re-abi.h __u32 tail; tail 32 include/uapi/rdma/rvt-abi.h RDMA_ATOMIC_UAPI(__u32, tail); tail 63 include/uapi/rdma/rvt-abi.h RDMA_ATOMIC_UAPI(__u32, tail); tail 19 kernel/bpf/queue_stack_maps.c u32 head, tail; tail 32 kernel/bpf/queue_stack_maps.c return qs->head == qs->tail; tail 42 kernel/bpf/queue_stack_maps.c return head == qs->tail; tail 129 kernel/bpf/queue_stack_maps.c ptr = &qs->elements[qs->tail * qs->map.value_size]; tail 133 kernel/bpf/queue_stack_maps.c if (unlikely(++qs->tail >= qs->size)) tail 134 kernel/bpf/queue_stack_maps.c qs->tail = 0; tail 224 kernel/bpf/queue_stack_maps.c if (unlikely(++qs->tail >= qs->size)) tail 225 kernel/bpf/queue_stack_maps.c qs->tail = 0; tail 138 kernel/events/ring_buffer.c ring_buffer_has_space(unsigned long head, unsigned long tail, tail 143 kernel/events/ring_buffer.c return CIRC_SPACE(head, tail, data_size) >= size; tail 145 kernel/events/ring_buffer.c return CIRC_SPACE(tail, head, data_size) >= size; tail 154 kernel/events/ring_buffer.c unsigned long tail, offset, head; tail 192 kernel/events/ring_buffer.c tail = READ_ONCE(rb->user_page->data_tail); tail 195 kernel/events/ring_buffer.c if (unlikely(!ring_buffer_has_space(head, tail, tail 576 kernel/futex.c struct page *page, *tail; tail 645 kernel/futex.c tail = page; tail 740 kernel/futex.c key->shared.pgoff = basepage_index(tail); tail 86 kernel/kallsyms.c goto tail; tail 96 kernel/kallsyms.c tail: tail 58 kernel/locking/osq_lock.c if (atomic_read(&lock->tail) == curr && tail 59 kernel/locking/osq_lock.c atomic_cmpxchg_acquire(&lock->tail, curr, old) == curr) { tail 107 kernel/locking/osq_lock.c old = atomic_xchg(&lock->tail, curr); tail 214 kernel/locking/osq_lock.c if (likely(atomic_cmpxchg_release(&lock->tail, curr, tail 116 kernel/locking/qspinlock.c u32 tail; tail 118 kernel/locking/qspinlock.c tail = (cpu + 1) << _Q_TAIL_CPU_OFFSET; tail 119 kernel/locking/qspinlock.c tail |= idx << _Q_TAIL_IDX_OFFSET; /* assume < 4 */ tail 121 kernel/locking/qspinlock.c return tail; tail 124 kernel/locking/qspinlock.c static inline __pure struct mcs_spinlock *decode_tail(u32 tail) tail 126 kernel/locking/qspinlock.c int cpu = (tail >> _Q_TAIL_CPU_OFFSET) - 1; tail 127 kernel/locking/qspinlock.c int idx = (tail & _Q_TAIL_IDX_MASK) >> _Q_TAIL_IDX_OFFSET; tail 175 kernel/locking/qspinlock.c static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) tail 181 kernel/locking/qspinlock.c return (u32)xchg_relaxed(&lock->tail, tail 182 kernel/locking/qspinlock.c tail >> _Q_TAIL_OFFSET) << _Q_TAIL_OFFSET; tail 219 kernel/locking/qspinlock.c static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) tail 224 kernel/locking/qspinlock.c new = (val & _Q_LOCKED_PENDING_MASK) | tail; tail 317 kernel/locking/qspinlock.c u32 old, tail; tail 401 kernel/locking/qspinlock.c tail = encode_tail(smp_processor_id(), idx); tail 459 kernel/locking/qspinlock.c old = xchg_tail(lock, tail); tail 534 kernel/locking/qspinlock.c if ((val & _Q_TAIL_MASK) == tail) { tail 21 kernel/rcu/rcu_segcblist.c rclp->tail = &rclp->head; tail 34 kernel/rcu/rcu_segcblist.c *rclp->tail = rhp; tail 35 kernel/rcu/rcu_segcblist.c rclp->tail = &rhp->next; tail 53 kernel/rcu/rcu_segcblist.c drclp->tail = srclp->tail; tail 55 kernel/rcu/rcu_segcblist.c drclp->tail = &drclp->head; tail 63 kernel/rcu/rcu_segcblist.c srclp->tail = &rhp->next; tail 86 kernel/rcu/rcu_segcblist.c rclp->tail = &rclp->head; tail 327 kernel/rcu/rcu_segcblist.c *rclp->tail = rsclp->head; tail 330 kernel/rcu/rcu_segcblist.c rclp->tail = rsclp->tails[RCU_DONE_TAIL]; tail 350 kernel/rcu/rcu_segcblist.c *rclp->tail = *rsclp->tails[RCU_DONE_TAIL]; tail 351 kernel/rcu/rcu_segcblist.c rclp->tail = rsclp->tails[RCU_NEXT_TAIL]; tail 381 kernel/rcu/rcu_segcblist.c *rclp->tail = rsclp->head; tail 385 kernel/rcu/rcu_segcblist.c WRITE_ONCE(rsclp->tails[i], rclp->tail); tail 389 kernel/rcu/rcu_segcblist.c rclp->tail = &rclp->head; tail 402 kernel/rcu/rcu_segcblist.c WRITE_ONCE(rsclp->tails[RCU_NEXT_TAIL], rclp->tail); tail 404 kernel/rcu/rcu_segcblist.c rclp->tail = &rclp->head; tail 465 kernel/softirq.c struct tasklet_struct **tail; tail 481 kernel/softirq.c *head->tail = t; tail 482 kernel/softirq.c head->tail = &(t->next); tail 510 kernel/softirq.c tl_head->tail = &tl_head->head; tail 532 kernel/softirq.c *tl_head->tail = t; tail 533 kernel/softirq.c tl_head->tail = &t->next; tail 580 kernel/softirq.c per_cpu(tasklet_vec, cpu).tail = tail 582 kernel/softirq.c per_cpu(tasklet_hi_vec, cpu).tail = tail 637 kernel/softirq.c per_cpu(tasklet_vec, cpu).tail = i; tail 650 kernel/softirq.c if (&per_cpu(tasklet_vec, cpu).head != per_cpu(tasklet_vec, cpu).tail) { tail 651 kernel/softirq.c *__this_cpu_read(tasklet_vec.tail) = per_cpu(tasklet_vec, cpu).head; tail 652 kernel/softirq.c __this_cpu_write(tasklet_vec.tail, per_cpu(tasklet_vec, cpu).tail); tail 654 kernel/softirq.c per_cpu(tasklet_vec, cpu).tail = &per_cpu(tasklet_vec, cpu).head; tail 658 kernel/softirq.c if (&per_cpu(tasklet_hi_vec, cpu).head != per_cpu(tasklet_hi_vec, cpu).tail) { tail 659 kernel/softirq.c *__this_cpu_read(tasklet_hi_vec.tail) = per_cpu(tasklet_hi_vec, cpu).head; tail 660 kernel/softirq.c __this_cpu_write(tasklet_hi_vec.tail, per_cpu(tasklet_hi_vec, cpu).tail); tail 662 kernel/softirq.c per_cpu(tasklet_hi_vec, cpu).tail = &per_cpu(tasklet_hi_vec, cpu).head; tail 2130 kernel/trace/ring_buffer.c unsigned long tail, struct rb_event_info *info) tail 2140 kernel/trace/ring_buffer.c if (tail >= BUF_PAGE_SIZE) { tail 2146 kernel/trace/ring_buffer.c if (tail == BUF_PAGE_SIZE) tail 2153 kernel/trace/ring_buffer.c event = __rb_page_index(tail_page, tail); tail 2156 kernel/trace/ring_buffer.c local_add(BUF_PAGE_SIZE - tail, &cpu_buffer->entries_bytes); tail 2163 kernel/trace/ring_buffer.c tail_page->real_end = tail; tail 2176 kernel/trace/ring_buffer.c if (tail > (BUF_PAGE_SIZE - RB_EVNT_MIN_SIZE)) { tail 2188 kernel/trace/ring_buffer.c event->array[0] = (BUF_PAGE_SIZE - tail) - RB_EVNT_HDR_SIZE; tail 2194 kernel/trace/ring_buffer.c length = (tail + length) - BUF_PAGE_SIZE; tail 2205 kernel/trace/ring_buffer.c unsigned long tail, struct rb_event_info *info) tail 2289 kernel/trace/ring_buffer.c rb_reset_tail(cpu_buffer, tail, info); tail 2301 kernel/trace/ring_buffer.c rb_reset_tail(cpu_buffer, tail, info); tail 2821 kernel/trace/ring_buffer.c unsigned long tail, write; tail 2837 kernel/trace/ring_buffer.c tail = write - info->length; tail 2843 kernel/trace/ring_buffer.c if (!tail && !ring_buffer_time_stamp_abs(cpu_buffer->buffer)) tail 2848 kernel/trace/ring_buffer.c return rb_move_tail(cpu_buffer, tail, info); tail 2852 kernel/trace/ring_buffer.c event = __rb_page_index(tail_page, tail); tail 2861 kernel/trace/ring_buffer.c if (!tail) tail 156 kernel/trace/trace.c struct trace_eval_map_tail tail; tail 5259 kernel/trace/trace.c if (ptr->tail.next) { tail 5260 kernel/trace/trace.c ptr = ptr->tail.next; tail 5386 kernel/trace/trace.c if (!ptr->tail.next) tail 5388 kernel/trace/trace.c ptr = ptr->tail.next; tail 5391 kernel/trace/trace.c ptr->tail.next = map_array; tail 8741 kernel/trace/trace.c last = &map->tail.next; tail 8742 kernel/trace/trace.c map = map->tail.next; tail 8747 kernel/trace/trace.c *last = trace_eval_jmp_to_tail(map)->tail.next; tail 206 kernel/trace/trace_events.c struct ftrace_event_field *tail; tail 214 kernel/trace/trace_events.c tail = list_first_entry(head, struct ftrace_event_field, link); tail 215 kernel/trace/trace_events.c return tail->offset + tail->size; tail 60 kernel/trace/trace_functions_graph.c { TRACER_OPT(funcgraph-tail, TRACE_GRAPH_PRINT_TAIL) }, tail 514 kernel/trace/trace_probe.c char *tail; tail 519 kernel/trace/trace_probe.c bw = simple_strtoul(bf + 1, &tail, 0); /* Use simple one */ tail 521 kernel/trace/trace_probe.c if (bw == 0 || *tail != '@') tail 524 kernel/trace/trace_probe.c bf = tail + 1; tail 525 kernel/trace/trace_probe.c bo = simple_strtoul(bf, &tail, 0); tail 527 kernel/trace/trace_probe.c if (tail == bf || *tail != '/') tail 635 kernel/trace/trace_uprobe.c char *tail; tail 638 kernel/trace/trace_uprobe.c tail = kstrdup(kbasename(filename), GFP_KERNEL); tail 639 kernel/trace/trace_uprobe.c if (!tail) { tail 644 kernel/trace/trace_uprobe.c ptr = strpbrk(tail, ".-_"); tail 648 kernel/trace/trace_uprobe.c snprintf(buf, MAX_EVENT_NAME_LEN, "%c_%s_0x%lx", 'p', tail, offset); tail 650 kernel/trace/trace_uprobe.c kfree(tail); tail 22 lib/list_sort.c struct list_head *head, **tail = &head; tail 27 lib/list_sort.c *tail = a; tail 28 lib/list_sort.c tail = &a->next; tail 31 lib/list_sort.c *tail = b; tail 35 lib/list_sort.c *tail = b; tail 36 lib/list_sort.c tail = &b->next; tail 39 lib/list_sort.c *tail = a; tail 58 lib/list_sort.c struct list_head *tail = head; tail 64 lib/list_sort.c tail->next = a; tail 65 lib/list_sort.c a->prev = tail; tail 66 lib/list_sort.c tail = a; tail 71 lib/list_sort.c tail->next = b; tail 72 lib/list_sort.c b->prev = tail; tail 73 lib/list_sort.c tail = b; tail 83 lib/list_sort.c tail->next = b; tail 94 lib/list_sort.c b->prev = tail; tail 95 lib/list_sort.c tail = b; tail 100 lib/list_sort.c tail->next = head; tail 101 lib/list_sort.c head->prev = tail; tail 221 lib/list_sort.c struct list_head **tail = &pending; tail 225 lib/list_sort.c tail = &(*tail)->prev; tail 228 lib/list_sort.c struct list_head *a = *tail, *b = a->prev; tail 233 lib/list_sort.c *tail = a; tail 2436 mm/huge_memory.c static void __split_huge_page_tail(struct page *head, int tail, tail 2439 mm/huge_memory.c struct page *page_tail = head + tail; tail 2463 mm/huge_memory.c VM_BUG_ON_PAGE(tail > 2 && page_tail->mapping != TAIL_MAPPING, tail 2466 mm/huge_memory.c page_tail->index = head->index + tail; tail 44 mm/kasan/quarantine.c struct qlist_node *tail; tail 57 mm/kasan/quarantine.c q->head = q->tail = NULL; tail 67 mm/kasan/quarantine.c q->tail->next = qlink; tail 68 mm/kasan/quarantine.c q->tail = qlink; tail 84 mm/kasan/quarantine.c to->tail->next = from->head; tail 85 mm/kasan/quarantine.c to->tail = from->tail; tail 1170 mm/slub.c void *head, void *tail, int bulk_cnt, tail 1202 mm/slub.c if (object != tail) { tail 1352 mm/slub.c void *head, void *tail, int bulk_cnt, tail 1428 mm/slub.c void **head, void **tail) tail 1433 mm/slub.c void *old_tail = *tail ? *tail : *head; tail 1438 mm/slub.c *tail = NULL; tail 1461 mm/slub.c if (!*tail) tail 1462 mm/slub.c *tail = object; tail 1466 mm/slub.c if (*head == *tail) tail 1467 mm/slub.c *tail = NULL; tail 1759 mm/slub.c __add_partial(struct kmem_cache_node *n, struct page *page, int tail) tail 1762 mm/slub.c if (tail == DEACTIVATE_TO_TAIL) tail 1769 mm/slub.c struct page *page, int tail) tail 1772 mm/slub.c __add_partial(n, page, tail); tail 2044 mm/slub.c int tail = DEACTIVATE_TO_HEAD; tail 2050 mm/slub.c tail = DEACTIVATE_TO_TAIL; tail 2145 mm/slub.c add_partial(n, page, tail); tail 2161 mm/slub.c stat(s, tail); tail 2837 mm/slub.c void *head, void *tail, int cnt, tail 2851 mm/slub.c !free_debug_processing(s, page, head, tail, cnt, addr)) tail 2861 mm/slub.c set_freepointer(s, tail, prior); tail 2965 mm/slub.c struct page *page, void *head, void *tail, tail 2968 mm/slub.c void *tail_obj = tail ? : head; tail 3007 mm/slub.c void *head, void *tail, int cnt, tail 3014 mm/slub.c if (slab_free_freelist_hook(s, &head, &tail)) tail 3015 mm/slub.c do_slab_free(s, page, head, tail, cnt, addr); tail 3037 mm/slub.c void *tail; tail 3094 mm/slub.c df->tail = object; tail 3139 mm/slub.c slab_free(df.s, df.page, df.freelist, df.tail, df.cnt,_RET_IP_); tail 397 mm/swapfile.c cluster_set_null(&list->tail); tail 406 mm/swapfile.c cluster_set_next_flag(&list->tail, idx, 0); tail 409 mm/swapfile.c unsigned int tail = cluster_next(&list->tail); tail 415 mm/swapfile.c ci_tail = ci + tail; tail 419 mm/swapfile.c cluster_set_next_flag(&list->tail, idx, 0); tail 429 mm/swapfile.c if (cluster_next(&list->tail) == idx) { tail 431 mm/swapfile.c cluster_set_null(&list->tail); tail 213 net/bluetooth/cmtp/core.c unsigned int size, tail; tail 226 net/bluetooth/cmtp/core.c tail = session->mtu - nskb->len; tail 227 net/bluetooth/cmtp/core.c if (tail < 5) { tail 230 net/bluetooth/cmtp/core.c tail = session->mtu; tail 233 net/bluetooth/cmtp/core.c size = min_t(uint, ((tail < 258) ? (tail - 2) : (tail - 3)), skb->len); tail 345 net/bluetooth/l2cap_core.c seq_list->tail = L2CAP_SEQ_LIST_CLEAR; tail 374 net/bluetooth/l2cap_core.c seq_list->tail = L2CAP_SEQ_LIST_CLEAR; tail 391 net/bluetooth/l2cap_core.c seq_list->tail = L2CAP_SEQ_LIST_CLEAR; tail 403 net/bluetooth/l2cap_core.c if (seq_list->tail == L2CAP_SEQ_LIST_CLEAR) tail 406 net/bluetooth/l2cap_core.c seq_list->list[seq_list->tail & mask] = seq; tail 408 net/bluetooth/l2cap_core.c seq_list->tail = seq; tail 2632 net/bluetooth/l2cap_core.c if (chan->srej_list.tail == L2CAP_SEQ_LIST_CLEAR) tail 2638 net/bluetooth/l2cap_core.c control.reqseq = chan->srej_list.tail; tail 1717 net/bluetooth/rfcomm/core.c skb->len--; skb->tail--; tail 313 net/caif/cfpkt_skbuff.c if (dst->tail + neededtailspace > dst->end) { tail 2142 net/ceph/osd_client.c void *oloc, *oid, *tail; tail 2168 net/ceph/osd_client.c tail = p; tail 2193 net/ceph/osd_client.c BUG_ON(p >= tail); tail 2194 net/ceph/osd_client.c memmove(p, tail, tail_len); tail 3311 net/core/dev.c struct sk_buff *next, *head = NULL, *tail; tail 3327 net/core/dev.c tail->next = skb; tail 3331 net/core/dev.c tail = skb->prev; tail 4253 net/core/dev.c int troom = skb->tail + skb->data_len - skb->end; tail 5414 net/core/dev.c skb->end - skb->tail); tail 5422 net/core/dev.c BUG_ON(skb->end - skb->tail < grow); tail 5427 net/core/dev.c skb->tail += grow; tail 67 net/core/gen_stats.c d->tail = (struct nlattr *)skb_tail_pointer(skb); tail 76 net/core/gen_stats.c if (d->tail) { tail 85 net/core/gen_stats.c if (ret == 0 && d->tail->nla_type == padattr) tail 86 net/core/gen_stats.c d->tail = (struct nlattr *)((char *)d->tail + tail 87 net/core/gen_stats.c NLA_ALIGN(d->tail->nla_len)); tail 177 net/core/gen_stats.c if (d->tail) { tail 267 net/core/gen_stats.c if (d->tail) { tail 347 net/core/gen_stats.c if (d->tail) tail 379 net/core/gen_stats.c if (d->tail) tail 408 net/core/gen_stats.c if (d->tail) tail 409 net/core/gen_stats.c d->tail->nla_len = skb_tail_pointer(d->skb) - (u8 *)d->tail; tail 83 net/core/rtnetlink.c void rtnl_kfree_skbs(struct sk_buff *head, struct sk_buff *tail) tail 85 net/core/rtnetlink.c if (head && tail) { tail 86 net/core/rtnetlink.c tail->next = defer_kfree_skb_list; tail 106 net/core/skbuff.c (unsigned long)skb->tail, (unsigned long)skb->end, tail 224 net/core/skbuff.c memset(skb, 0, offsetof(struct sk_buff, tail)); tail 232 net/core/skbuff.c skb->end = skb->tail + size; tail 275 net/core/skbuff.c skb->end = skb->tail + size; tail 314 net/core/skbuff.c memset(skb, 0, offsetof(struct sk_buff, tail)); tail 998 net/core/skbuff.c C(tail); tail 1198 net/core/skbuff.c struct sk_buff *tail, *skb = skb_from_uarg(uarg); tail 1229 net/core/skbuff.c tail = skb_peek_tail(q); tail 1230 net/core/skbuff.c if (!tail || SKB_EXT_ERR(tail)->ee.ee_origin != SO_EE_ORIGIN_ZEROCOPY || tail 1231 net/core/skbuff.c !skb_zerocopy_notify_extend(tail, lo, len)) { tail 1674 net/core/skbuff.c skb->tail += off; tail 1803 net/core/skbuff.c ntail = skb->data_len + pad - (skb->end - skb->tail); tail 1840 net/core/skbuff.c void *pskb_put(struct sk_buff *skb, struct sk_buff *tail, int len) tail 1842 net/core/skbuff.c if (tail != skb) { tail 1846 net/core/skbuff.c return skb_put(tail, len); tail 1863 net/core/skbuff.c skb->tail += len; tail 1865 net/core/skbuff.c if (unlikely(skb->tail > skb->end)) tail 2056 net/core/skbuff.c int i, k, eat = (skb->tail + delta) - skb->end; tail 2163 net/core/skbuff.c skb->tail += delta; tail 3654 net/core/skbuff.c struct sk_buff *tail = NULL; tail 3821 net/core/skbuff.c tail->next = nskb; tail 3824 net/core/skbuff.c tail = nskb; tail 3942 net/core/skbuff.c segs->prev = tail; tail 3963 net/core/skbuff.c if (tail->len - doffset <= gso_size) tail 3964 net/core/skbuff.c skb_shinfo(tail)->gso_size = 0; tail 3965 net/core/skbuff.c else if (tail != segs) tail 3966 net/core/skbuff.c skb_shinfo(tail)->gso_segs = DIV_ROUND_UP(tail->len - doffset, gso_size); tail 3974 net/core/skbuff.c swap(tail->truesize, head_skb->truesize); tail 3975 net/core/skbuff.c swap(tail->destructor, head_skb->destructor); tail 3976 net/core/skbuff.c swap(tail->sk, head_skb->sk); tail 5956 net/core/skbuff.c if (skb->data_len > skb->end - skb->tail || tail 1814 net/core/sock.c newsk->sk_backlog.head = newsk->sk_backlog.tail = NULL; tail 2433 net/core/sock.c sk->sk_backlog.head = sk->sk_backlog.tail = NULL; tail 2957 net/core/sock.c if (sk->sk_backlog.tail) tail 405 net/dccp/options.c const unsigned char *tail, *from; tail 431 net/dccp/options.c tail = av->av_buf + DCCPAV_MAX_ACKVEC_LEN; tail 449 net/dccp/options.c if (from + copylen > tail) { tail 450 net/dccp/options.c const u16 tailsize = tail - from; tail 47 net/decnet/netfilter/dn_rtmsg.c old_tail = skb->tail; tail 58 net/decnet/netfilter/dn_rtmsg.c nlh->nlmsg_len = skb->tail - old_tail; tail 212 net/ipv4/esp4.c static void esp_output_fill_trailer(u8 *tail, int tfclen, int plen, __u8 proto) tail 216 net/ipv4/esp4.c memset(tail, 0, tfclen); tail 217 net/ipv4/esp4.c tail += tfclen; tail 222 net/ipv4/esp4.c tail[i] = i + 1; tail 224 net/ipv4/esp4.c tail[plen - 2] = plen - 2; tail 225 net/ipv4/esp4.c tail[plen - 1] = proto; tail 274 net/ipv4/esp4.c u8 *tail; tail 294 net/ipv4/esp4.c tail = skb_tail_pointer(trailer); tail 319 net/ipv4/esp4.c tail = vaddr + pfrag->offset; tail 321 net/ipv4/esp4.c esp_output_fill_trailer(tail, esp->tfclen, esp->plen, esp->proto); tail 353 net/ipv4/esp4.c tail = skb_tail_pointer(trailer); tail 357 net/ipv4/esp4.c esp_output_fill_trailer(tail, esp->tfclen, esp->plen, esp->proto); tail 901 net/ipv4/tcp.c skb->reserved_tailroom = skb->end - skb->tail - size; tail 2056 net/ipv4/tcp.c if (copied >= target && !sk->sk_backlog.tail) tail 85 net/ipv4/tcp_cdg.c u8 tail; tail 197 net/ipv4/tcp_cdg.c ca->gsum.min += gmin - ca->gradients[ca->tail].min; tail 198 net/ipv4/tcp_cdg.c ca->gsum.max += gmax - ca->gradients[ca->tail].max; tail 199 net/ipv4/tcp_cdg.c ca->gradients[ca->tail].min = gmin; tail 200 net/ipv4/tcp_cdg.c ca->gradients[ca->tail].max = gmax; tail 201 net/ipv4/tcp_cdg.c ca->tail = (ca->tail + 1) & (window - 1); tail 218 net/ipv4/tcp_cdg.c else if (ca->tail == 0) tail 221 net/ipv4/tcp_cdg.c grad = (grad * window) / (int)ca->tail; tail 4483 net/ipv4/tcp_input.c struct sk_buff *skb, *tail; tail 4506 net/ipv4/tcp_input.c tail = skb_peek_tail(&sk->sk_receive_queue); tail 4507 net/ipv4/tcp_input.c eaten = tail && tcp_try_coalesce(sk, tail, skb, &fragstolen); tail 4684 net/ipv4/tcp_input.c struct sk_buff *tail = skb_peek_tail(&sk->sk_receive_queue); tail 4686 net/ipv4/tcp_input.c eaten = (tail && tail 4687 net/ipv4/tcp_input.c tcp_try_coalesce(sk, tail, tail 4914 net/ipv4/tcp_input.c struct sk_buff *head, struct sk_buff *tail, u32 start, u32 end) tail 4924 net/ipv4/tcp_input.c for (end_of_skbs = true; skb != NULL && skb != tail; skb = n) { tail 4947 net/ipv4/tcp_input.c if (n && n != tail && tail 4998 net/ipv4/tcp_input.c skb == tail || tail 1654 net/ipv4/tcp_ipv4.c struct sk_buff *tail; tail 1691 net/ipv4/tcp_ipv4.c tail = sk->sk_backlog.tail; tail 1692 net/ipv4/tcp_ipv4.c if (!tail) tail 1694 net/ipv4/tcp_ipv4.c thtail = (struct tcphdr *)tail->data; tail 1696 net/ipv4/tcp_ipv4.c if (TCP_SKB_CB(tail)->end_seq != TCP_SKB_CB(skb)->seq || tail 1697 net/ipv4/tcp_ipv4.c TCP_SKB_CB(tail)->ip_dsfield != TCP_SKB_CB(skb)->ip_dsfield || tail 1698 net/ipv4/tcp_ipv4.c ((TCP_SKB_CB(tail)->tcp_flags | tail 1700 net/ipv4/tcp_ipv4.c !((TCP_SKB_CB(tail)->tcp_flags & tail 1702 net/ipv4/tcp_ipv4.c ((TCP_SKB_CB(tail)->tcp_flags ^ tail 1705 net/ipv4/tcp_ipv4.c tail->decrypted != skb->decrypted || tail 1712 net/ipv4/tcp_ipv4.c if (skb_try_coalesce(tail, skb, &fragstolen, &delta)) { tail 1715 net/ipv4/tcp_ipv4.c TCP_SKB_CB(tail)->end_seq = TCP_SKB_CB(skb)->end_seq; tail 1717 net/ipv4/tcp_ipv4.c if (after(TCP_SKB_CB(skb)->ack_seq, TCP_SKB_CB(tail)->ack_seq)) tail 1718 net/ipv4/tcp_ipv4.c TCP_SKB_CB(tail)->ack_seq = TCP_SKB_CB(skb)->ack_seq; tail 1729 net/ipv4/tcp_ipv4.c TCP_SKB_CB(tail)->tcp_flags |= TCP_SKB_CB(skb)->tcp_flags; tail 1732 net/ipv4/tcp_ipv4.c TCP_SKB_CB(tail)->has_rxtstamp = true; tail 1733 net/ipv4/tcp_ipv4.c tail->tstamp = skb->tstamp; tail 1734 net/ipv4/tcp_ipv4.c skb_hwtstamps(tail)->hwtstamp = skb_hwtstamps(skb)->hwtstamp; tail 1738 net/ipv4/tcp_ipv4.c skb_shinfo(tail)->gso_size = max(shinfo->gso_size, tail 1739 net/ipv4/tcp_ipv4.c skb_shinfo(tail)->gso_size); tail 1741 net/ipv4/tcp_ipv4.c gso_segs = skb_shinfo(tail)->gso_segs + shinfo->gso_segs; tail 1742 net/ipv4/tcp_ipv4.c skb_shinfo(tail)->gso_segs = min_t(u32, gso_segs, 0xFFFF); tail 210 net/ipv6/esp6.c static void esp_output_fill_trailer(u8 *tail, int tfclen, int plen, __u8 proto) tail 214 net/ipv6/esp6.c memset(tail, 0, tfclen); tail 215 net/ipv6/esp6.c tail += tfclen; tail 220 net/ipv6/esp6.c tail[i] = i + 1; tail 222 net/ipv6/esp6.c tail[plen - 2] = plen - 2; tail 223 net/ipv6/esp6.c tail[plen - 1] = proto; tail 228 net/ipv6/esp6.c u8 *tail; tail 239 net/ipv6/esp6.c tail = skb_tail_pointer(trailer); tail 264 net/ipv6/esp6.c tail = vaddr + pfrag->offset; tail 266 net/ipv6/esp6.c esp_output_fill_trailer(tail, esp->tfclen, esp->plen, esp->proto); tail 296 net/ipv6/esp6.c tail = skb_tail_pointer(trailer); tail 299 net/ipv6/esp6.c esp_output_fill_trailer(tail, esp->tfclen, esp->plen, esp->proto); tail 783 net/llc/af_llc.c if (copied >= target && !sk->sk_backlog.tail) tail 198 net/llc/llc_proc.c !!sk->sk_backlog.tail, !!sk->sk_lock.owned); tail 894 net/mac80211/cfg.c if (params->tail || !old) tail 913 net/mac80211/cfg.c new->tail = new->head + new_head_len; tail 931 net/mac80211/cfg.c if (params->tail) tail 932 net/mac80211/cfg.c memcpy(new->tail, params->tail, new_tail_len); tail 935 net/mac80211/cfg.c memcpy(new->tail, old->tail, new_tail_len); tail 3003 net/mac80211/cfg.c new_beacon->tail = pos; tail 3004 net/mac80211/cfg.c memcpy(pos, beacon->tail, beacon->tail_len); tail 258 net/mac80211/ieee80211_i.h u8 *head, *tail; tail 875 net/mac80211/mesh.c bcn->tail = bcn->head + bcn->head_len; tail 892 net/mac80211/mesh.c memcpy(bcn->tail, skb->data, bcn->tail_len); tail 894 net/mac80211/mesh.c (bcn->tail + ifmsh->meshconf_offset); tail 1199 net/mac80211/mesh.c skb_put_data(presp, bcn->tail, bcn->tail_len); tail 1107 net/mac80211/rx.c struct sk_buff *tail = skb_peek_tail(frames); tail 1113 net/mac80211/rx.c if (!tail) tail 1116 net/mac80211/rx.c status = IEEE80211_SKB_RXCB(tail); tail 4309 net/mac80211/tx.c beacon_data = beacon->tail; tail 4421 net/mac80211/tx.c if (WARN_ON(!beacon || !beacon->tail)) tail 4423 net/mac80211/tx.c beacon_data = beacon->tail; tail 4527 net/mac80211/tx.c if (beacon->tail) tail 4528 net/mac80211/tx.c skb_put_data(skb, beacon->tail, tail 4595 net/mac80211/tx.c skb_put_data(skb, beacon->tail, beacon->tail_len); tail 37 net/mac80211/wpa.c int tail; tail 65 net/mac80211/wpa.c tail = MICHAEL_MIC_LEN; tail 67 net/mac80211/wpa.c tail += IEEE80211_TKIP_ICV_LEN; tail 69 net/mac80211/wpa.c if (WARN(skb_tailroom(skb) < tail || tail 73 net/mac80211/wpa.c skb_tailroom(skb), tail)) tail 199 net/mac80211/wpa.c int len, tail; tail 214 net/mac80211/wpa.c tail = 0; tail 216 net/mac80211/wpa.c tail = IEEE80211_TKIP_ICV_LEN; tail 218 net/mac80211/wpa.c if (WARN_ON(skb_tailroom(skb) < tail || tail 411 net/mac80211/wpa.c int hdrlen, len, tail; tail 435 net/mac80211/wpa.c tail = 0; tail 437 net/mac80211/wpa.c tail = mic_len; tail 439 net/mac80211/wpa.c if (WARN_ON(skb_tailroom(skb) < tail || tail 642 net/mac80211/wpa.c int hdrlen, len, tail; tail 665 net/mac80211/wpa.c tail = 0; tail 667 net/mac80211/wpa.c tail = IEEE80211_GCMP_MIC_LEN; tail 669 net/mac80211/wpa.c if (WARN_ON(skb_tailroom(skb) < tail || tail 457 net/netfilter/nfnetlink_log.c sk_buff_data_t old_tail = inst->skb->tail; tail 653 net/netfilter/nfnetlink_log.c nlh->nlmsg_len = inst->skb->tail - old_tail; tail 1273 net/netlink/af_netlink.c delta = skb->end - skb->tail; tail 65 net/openvswitch/actions.c int tail; tail 98 net/openvswitch/actions.c fifo->tail = 0; tail 103 net/openvswitch/actions.c return (fifo->head == fifo->tail); tail 111 net/openvswitch/actions.c return &fifo->fifo[fifo->tail++]; tail 1951 net/packet/af_packet.c skb->tail -= hhlen; tail 80 net/rxrpc/af_rxrpc.c unsigned int tail; tail 104 net/rxrpc/af_rxrpc.c tail = offsetof(struct sockaddr_rxrpc, transport.sin.__pad); tail 111 net/rxrpc/af_rxrpc.c tail = offsetof(struct sockaddr_rxrpc, transport) + tail 120 net/rxrpc/af_rxrpc.c if (tail < len) tail 121 net/rxrpc/af_rxrpc.c memset((void *)srx + tail, 0, len - tail); tail 41 net/rxrpc/call_accept.c unsigned int head, tail, call_head, call_tail; tail 65 net/rxrpc/call_accept.c tail = READ_ONCE(b->peer_backlog_tail); tail 66 net/rxrpc/call_accept.c if (CIRC_CNT(head, tail, size) < max) { tail 76 net/rxrpc/call_accept.c tail = READ_ONCE(b->conn_backlog_tail); tail 77 net/rxrpc/call_accept.c if (CIRC_CNT(head, tail, size) < max) { tail 189 net/rxrpc/call_accept.c unsigned int size = RXRPC_BACKLOG_MAX, head, tail; tail 202 net/rxrpc/call_accept.c tail = b->peer_backlog_tail; tail 203 net/rxrpc/call_accept.c while (CIRC_CNT(head, tail, size) > 0) { tail 204 net/rxrpc/call_accept.c struct rxrpc_peer *peer = b->peer_backlog[tail]; tail 206 net/rxrpc/call_accept.c tail = (tail + 1) & (size - 1); tail 210 net/rxrpc/call_accept.c tail = b->conn_backlog_tail; tail 211 net/rxrpc/call_accept.c while (CIRC_CNT(head, tail, size) > 0) { tail 212 net/rxrpc/call_accept.c struct rxrpc_connection *conn = b->conn_backlog[tail]; tail 220 net/rxrpc/call_accept.c tail = (tail + 1) & (size - 1); tail 224 net/rxrpc/call_accept.c tail = b->call_backlog_tail; tail 225 net/rxrpc/call_accept.c while (CIRC_CNT(head, tail, size) > 0) { tail 226 net/rxrpc/call_accept.c struct rxrpc_call *call = b->call_backlog[tail]; tail 236 net/rxrpc/call_accept.c tail = (tail + 1) & (size - 1); tail 439 net/sched/ematch.c u8 *tail; tail 454 net/sched/ematch.c tail = skb_tail_pointer(skb); tail 456 net/sched/ematch.c struct nlattr *match_start = (struct nlattr *)tail; tail 476 net/sched/ematch.c tail = skb_tail_pointer(skb); tail 477 net/sched/ematch.c match_start->nla_len = tail - (u8 *)match_start; tail 128 net/sched/sch_cake.c struct sk_buff *tail; tail 835 net/sched/sch_cake.c flow->tail->next = skb; tail 836 net/sched/sch_cake.c flow->tail = skb; tail 1129 net/sched/sch_cake.c if (flow->head == flow->tail) tail 1132 net/sched/sch_cake.c skb = flow->tail; tail 68 net/sched/sch_choke.c unsigned int tail; tail 78 net/sched/sch_choke.c return (q->tail - q->head) & q->tab_mask; tail 98 net/sched/sch_choke.c if (q->head == q->tail) tail 107 net/sched/sch_choke.c q->tail = (q->tail - 1) & q->tab_mask; tail 108 net/sched/sch_choke.c if (q->head == q->tail) tail 110 net/sched/sch_choke.c } while (q->tab[q->tail] == NULL); tail 124 net/sched/sch_choke.c if (idx == q->tail) tail 212 net/sched/sch_choke.c if (q->head == q->tail) tail 275 net/sched/sch_choke.c q->tab[q->tail] = skb; tail 276 net/sched/sch_choke.c q->tail = (q->tail + 1) & q->tab_mask; tail 295 net/sched/sch_choke.c if (q->head == q->tail) { tail 315 net/sched/sch_choke.c while (q->head != q->tail) { tail 328 net/sched/sch_choke.c q->head = q->tail = 0; tail 388 net/sched/sch_choke.c unsigned int oqlen = sch->q.qlen, tail = 0; tail 391 net/sched/sch_choke.c while (q->head != q->tail) { tail 397 net/sched/sch_choke.c if (tail < mask) { tail 398 net/sched/sch_choke.c ntab[tail++] = skb; tail 408 net/sched/sch_choke.c q->tail = tail; tail 425 net/sched/sch_choke.c if (q->head == q->tail) tail 492 net/sched/sch_choke.c return (q->head != q->tail) ? q->tab[q->head] : NULL; tail 72 net/sched/sch_fq.c struct sk_buff *tail; /* last skb in the list */ tail 395 net/sched/sch_fq.c fq_skb_cb(skb)->time_to_send >= fq_skb_cb(flow->tail)->time_to_send) { tail 399 net/sched/sch_fq.c flow->tail->next = skb; tail 400 net/sched/sch_fq.c flow->tail = skb; tail 613 net/sched/sch_fq.c rtnl_kfree_skbs(flow->head, flow->tail); tail 45 net/sched/sch_fq_codel.c struct sk_buff *tail; tail 133 net/sched/sch_fq_codel.c flow->tail->next = skb; tail 134 net/sched/sch_fq_codel.c flow->tail = skb; tail 332 net/sched/sch_fq_codel.c rtnl_kfree_skbs(flow->head, flow->tail); tail 122 net/sched/sch_hhf.c struct sk_buff *tail; tail 344 net/sched/sch_hhf.c bucket->tail->next = skb; tail 345 net/sched/sch_hhf.c bucket->tail = skb; tail 544 net/sched/sch_netem.c if (sch->q.tail) tail 545 net/sched/sch_netem.c last = netem_skb_cb(sch->q.tail); tail 131 net/sched/sch_sfq.c struct sfq_slot *tail; /* current slot in round */ tail 318 net/sched/sch_sfq.c x = q->tail->next; tail 320 net/sched/sch_sfq.c q->tail->next = slot->next; tail 447 net/sched/sch_sfq.c if (q->tail == NULL) { /* It is the first flow */ tail 450 net/sched/sch_sfq.c slot->next = q->tail->next; tail 451 net/sched/sch_sfq.c q->tail->next = x; tail 457 net/sched/sch_sfq.c q->tail = slot; tail 488 net/sched/sch_sfq.c if (q->tail == NULL) tail 492 net/sched/sch_sfq.c a = q->tail->next; tail 495 net/sched/sch_sfq.c q->tail = slot; tail 510 net/sched/sch_sfq.c q->tail = NULL; /* no more active slots */ tail 513 net/sched/sch_sfq.c q->tail->next = next_a; tail 560 net/sched/sch_sfq.c q->tail = NULL; tail 591 net/sched/sch_sfq.c if (q->tail == NULL) { /* It is the first flow */ tail 594 net/sched/sch_sfq.c slot->next = q->tail->next; tail 595 net/sched/sch_sfq.c q->tail->next = x; tail 597 net/sched/sch_sfq.c q->tail = slot; tail 615 net/sched/sch_sfq.c if (!q->filter_list && q->tail) tail 631 net/sched/sch_sfq.c struct sk_buff *tail = NULL; tail 692 net/sched/sch_sfq.c if (!tail) tail 693 net/sched/sch_sfq.c tail = to_free; tail 696 net/sched/sch_sfq.c rtnl_kfree_skbs(to_free, tail); tail 753 net/sched/sch_sfq.c q->tail = NULL; tail 1855 net/sunrpc/auth_gss/auth_gss.c if (snd_buf->page_len || snd_buf->tail[0].iov_len) { tail 1859 net/sunrpc/auth_gss/auth_gss.c memcpy(tmp, snd_buf->tail[0].iov_base, snd_buf->tail[0].iov_len); tail 1860 net/sunrpc/auth_gss/auth_gss.c snd_buf->tail[0].iov_base = tmp; tail 1876 net/sunrpc/auth_gss/auth_gss.c if (snd_buf->page_len || snd_buf->tail[0].iov_len) tail 1877 net/sunrpc/auth_gss/auth_gss.c iov = snd_buf->tail; tail 768 net/sunrpc/auth_gss/gss_krb5_crypto.c if (buf->tail[0].iov_base != NULL) { tail 769 net/sunrpc/auth_gss/gss_krb5_crypto.c ecptr = buf->tail[0].iov_base + buf->tail[0].iov_len; tail 771 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->tail[0].iov_base = buf->head[0].iov_base tail 773 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->tail[0].iov_len = 0; tail 774 net/sunrpc/auth_gss/gss_krb5_crypto.c ecptr = buf->tail[0].iov_base; tail 779 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->tail[0].iov_len += GSS_KRB5_TOK_HDR_LEN; tail 784 net/sunrpc/auth_gss/gss_krb5_crypto.c hmac.data = buf->tail[0].iov_base + buf->tail[0].iov_len; tail 844 net/sunrpc/auth_gss/gss_krb5_crypto.c buf->tail[0].iov_len += kctx->gk5e->cksumlength; tail 55 net/sunrpc/auth_gss/gss_krb5_wrap.c if (buf->page_len || buf->tail[0].iov_len) tail 56 net/sunrpc/auth_gss/gss_krb5_wrap.c iov = &buf->tail[0]; tail 91 net/sunrpc/auth_gss/gss_krb5_wrap.c BUG_ON(len > buf->tail[0].iov_len); tail 92 net/sunrpc/auth_gss/gss_krb5_wrap.c pad = *(u8 *)(buf->tail[0].iov_base + len - 1); tail 910 net/sunrpc/auth_gss/svcauth_gss.c return buf->head[0].iov_len + buf->page_len + buf->tail[0].iov_len; tail 1684 net/sunrpc/auth_gss/svcauth_gss.c if (resbuf->tail[0].iov_base == NULL) { tail 1687 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_base = resbuf->head[0].iov_base tail 1689 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_len = 0; tail 1691 net/sunrpc/auth_gss/svcauth_gss.c resv = &resbuf->tail[0]; tail 1736 net/sunrpc/auth_gss/svcauth_gss.c if (resbuf->tail[0].iov_base) { tail 1737 net/sunrpc/auth_gss/svcauth_gss.c BUG_ON(resbuf->tail[0].iov_base >= resbuf->head[0].iov_base tail 1739 net/sunrpc/auth_gss/svcauth_gss.c BUG_ON(resbuf->tail[0].iov_base < resbuf->head[0].iov_base); tail 1740 net/sunrpc/auth_gss/svcauth_gss.c if (resbuf->tail[0].iov_len + resbuf->head[0].iov_len tail 1743 net/sunrpc/auth_gss/svcauth_gss.c memmove(resbuf->tail[0].iov_base + RPC_MAX_AUTH_SIZE, tail 1744 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_base, tail 1745 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_len); tail 1746 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_base += RPC_MAX_AUTH_SIZE; tail 1755 net/sunrpc/auth_gss/svcauth_gss.c if (resbuf->tail[0].iov_base == NULL) { tail 1758 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_base = resbuf->head[0].iov_base tail 1760 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_len = 0; tail 1766 net/sunrpc/auth_gss/svcauth_gss.c p = (__be32 *)(resbuf->tail[0].iov_base + resbuf->tail[0].iov_len); tail 1768 net/sunrpc/auth_gss/svcauth_gss.c resbuf->tail[0].iov_len += pad; tail 139 net/sunrpc/socklib.c len = xdr->tail[0].iov_len; tail 141 net/sunrpc/socklib.c copied += copy_actor(desc, (char *)xdr->tail[0].iov_base + base, len - base); tail 1519 net/sunrpc/svc.c rqstp->rq_res.tail[0].iov_base = NULL; tail 1520 net/sunrpc/svc.c rqstp->rq_res.tail[0].iov_len = 0; tail 671 net/sunrpc/svc_xprt.c arg->tail[0].iov_len = 0; tail 904 net/sunrpc/svc_xprt.c xb->tail[0].iov_len; tail 224 net/sunrpc/svcsock.c if (xdr->tail[0].iov_len) { tail 226 net/sunrpc/svcsock.c xdr->tail[0].iov_len, 0); tail 269 net/sunrpc/svcsock.c tailoff = ((unsigned long)xdr->tail[0].iov_base) & (PAGE_SIZE-1); tail 181 net/sunrpc/xdr.c struct kvec *tail = xdr->tail; tail 191 net/sunrpc/xdr.c tail->iov_base = buf + offset; tail 192 net/sunrpc/xdr.c tail->iov_len = buflen - offset; tail 194 net/sunrpc/xdr.c tail->iov_len -= sizeof(__be32); tail 365 net/sunrpc/xdr.c struct kvec *head, *tail; tail 371 net/sunrpc/xdr.c tail = buf->tail; tail 379 net/sunrpc/xdr.c if (tail->iov_len != 0) { tail 380 net/sunrpc/xdr.c if (tail->iov_len > len) { tail 381 net/sunrpc/xdr.c copy = tail->iov_len - len; tail 382 net/sunrpc/xdr.c memmove((char *)tail->iov_base + len, tail 383 net/sunrpc/xdr.c tail->iov_base, copy); tail 391 net/sunrpc/xdr.c if (offs >= tail->iov_len) tail 393 net/sunrpc/xdr.c else if (copy > tail->iov_len - offs) tail 394 net/sunrpc/xdr.c copy = tail->iov_len - offs; tail 396 net/sunrpc/xdr.c _copy_from_pages((char *)tail->iov_base + offs, tail 405 net/sunrpc/xdr.c if (copy > tail->iov_len) tail 406 net/sunrpc/xdr.c copy = tail->iov_len; tail 407 net/sunrpc/xdr.c memcpy(tail->iov_base, tail 449 net/sunrpc/xdr.c struct kvec *tail; tail 456 net/sunrpc/xdr.c tail = buf->tail; tail 463 net/sunrpc/xdr.c unsigned int free_space = tailbuf_len - tail->iov_len; tail 467 net/sunrpc/xdr.c tail->iov_len += free_space; tail 470 net/sunrpc/xdr.c if (tail->iov_len > len) { tail 471 net/sunrpc/xdr.c char *p = (char *)tail->iov_base + len; tail 472 net/sunrpc/xdr.c memmove(p, tail->iov_base, tail->iov_len - len); tail 473 net/sunrpc/xdr.c result += tail->iov_len - len; tail 475 net/sunrpc/xdr.c copy = tail->iov_len; tail 477 net/sunrpc/xdr.c _copy_from_pages((char *)tail->iov_base, tail 526 net/sunrpc/xdr.c int scratch_len = buf->buflen - buf->page_len - buf->tail[0].iov_len; tail 674 net/sunrpc/xdr.c struct kvec *tail = buf->tail; tail 684 net/sunrpc/xdr.c fraglen = min_t(int, buf->len - len, tail->iov_len); tail 685 net/sunrpc/xdr.c tail->iov_len -= fraglen; tail 687 net/sunrpc/xdr.c if (tail->iov_len) { tail 688 net/sunrpc/xdr.c xdr->p = tail->iov_base + tail->iov_len; tail 761 net/sunrpc/xdr.c struct kvec *iov = buf->tail; tail 836 net/sunrpc/xdr.c xdr_set_iov(xdr, xdr->buf->tail, xdr->nwords << 2); tail 845 net/sunrpc/xdr.c xdr_set_iov(xdr, xdr->buf->tail, xdr->nwords << 2); tail 1042 net/sunrpc/xdr.c xdr->iov = iov = buf->tail; tail 1088 net/sunrpc/xdr.c buf->tail[0] = empty_iov; tail 1136 net/sunrpc/xdr.c if (base < buf->tail[0].iov_len) { tail 1137 net/sunrpc/xdr.c subbuf->tail[0].iov_base = buf->tail[0].iov_base + base; tail 1138 net/sunrpc/xdr.c subbuf->tail[0].iov_len = min_t(unsigned int, len, tail 1139 net/sunrpc/xdr.c buf->tail[0].iov_len - base); tail 1140 net/sunrpc/xdr.c len -= subbuf->tail[0].iov_len; tail 1143 net/sunrpc/xdr.c base -= buf->tail[0].iov_len; tail 1144 net/sunrpc/xdr.c subbuf->tail[0].iov_len = 0; tail 1168 net/sunrpc/xdr.c if (buf->tail[0].iov_len) { tail 1169 net/sunrpc/xdr.c cur = min_t(size_t, buf->tail[0].iov_len, trim); tail 1170 net/sunrpc/xdr.c buf->tail[0].iov_len -= cur; tail 1207 net/sunrpc/xdr.c this_len = min_t(unsigned int, len, subbuf->tail[0].iov_len); tail 1208 net/sunrpc/xdr.c memcpy(obj, subbuf->tail[0].iov_base, this_len); tail 1238 net/sunrpc/xdr.c this_len = min_t(unsigned int, len, subbuf->tail[0].iov_len); tail 1239 net/sunrpc/xdr.c memcpy(subbuf->tail[0].iov_base, obj, this_len); tail 1318 net/sunrpc/xdr.c mic->data = subbuf.tail[0].iov_base; tail 1319 net/sunrpc/xdr.c if (subbuf.tail[0].iov_len == mic->len) tail 1325 net/sunrpc/xdr.c if (buf->tail[0].iov_len != 0) tail 1326 net/sunrpc/xdr.c mic->data = buf->tail[0].iov_base + buf->tail[0].iov_len; tail 1497 net/sunrpc/xdr.c c = buf->tail->iov_base + base; tail 1545 net/sunrpc/xdr.c buf->head->iov_len + buf->page_len + buf->tail->iov_len) tail 1604 net/sunrpc/xdr.c if (offset < buf->tail[0].iov_len) { tail 1605 net/sunrpc/xdr.c thislen = buf->tail[0].iov_len - offset; tail 1608 net/sunrpc/xdr.c sg_set_buf(sg, buf->tail[0].iov_base + offset, thislen); tail 1845 net/sunrpc/xprt.c xbufp->tail[0].iov_len; tail 182 net/sunrpc/xprtrdma/rpc_rdma.c return (buf->head[0].iov_len + buf->tail[0].iov_len) < tail 273 net/sunrpc/xprtrdma/rpc_rdma.c if (xdrbuf->tail[0].iov_len) tail 274 net/sunrpc/xprtrdma/rpc_rdma.c seg = rpcrdma_convert_kvec(&xdrbuf->tail[0], seg, &n); tail 644 net/sunrpc/xprtrdma/rpc_rdma.c len = xdr->tail[0].iov_len; tail 650 net/sunrpc/xprtrdma/rpc_rdma.c page = virt_to_page(xdr->tail[0].iov_base); tail 651 net/sunrpc/xprtrdma/rpc_rdma.c page_base = offset_in_page(xdr->tail[0].iov_base); tail 697 net/sunrpc/xprtrdma/rpc_rdma.c if (xdr->tail[0].iov_len) { tail 698 net/sunrpc/xprtrdma/rpc_rdma.c page = virt_to_page(xdr->tail[0].iov_base); tail 699 net/sunrpc/xprtrdma/rpc_rdma.c page_base = offset_in_page(xdr->tail[0].iov_base); tail 700 net/sunrpc/xprtrdma/rpc_rdma.c len = xdr->tail[0].iov_len; tail 1007 net/sunrpc/xprtrdma/rpc_rdma.c rqst->rq_rcv_buf.tail[0].iov_base = srcp; tail 1008 net/sunrpc/xprtrdma/rpc_rdma.c rqst->rq_private_buf.tail[0].iov_base = srcp; tail 372 net/sunrpc/xprtrdma/svc_rdma_recvfrom.c arg->tail[0].iov_base = NULL; tail 373 net/sunrpc/xprtrdma/svc_rdma_recvfrom.c arg->tail[0].iov_len = 0; tail 664 net/sunrpc/xprtrdma/svc_rdma_recvfrom.c rqstp->rq_arg.tail[0] = head->rc_arg.tail[0]; tail 579 net/sunrpc/xprtrdma/svc_rdma_rw.c if (xdr->tail[0].iov_len) { tail 580 net/sunrpc/xprtrdma/svc_rdma_rw.c ret = svc_rdma_send_xdr_kvec(info, &xdr->tail[0]); tail 583 net/sunrpc/xprtrdma/svc_rdma_rw.c consumed += xdr->tail[0].iov_len; tail 733 net/sunrpc/xprtrdma/svc_rdma_rw.c head->rc_arg.tail[0].iov_base = tail 735 net/sunrpc/xprtrdma/svc_rdma_rw.c head->rc_arg.tail[0].iov_len = tail 825 net/sunrpc/xprtrdma/svc_rdma_rw.c head->rc_arg.tail[0] = rqstp->rq_arg.tail[0]; tail 567 net/sunrpc/xprtrdma/svc_rdma_sendto.c if (xdr->tail[0].iov_len) tail 591 net/sunrpc/xprtrdma/svc_rdma_sendto.c tailbase = xdr->tail[0].iov_base; tail 592 net/sunrpc/xprtrdma/svc_rdma_sendto.c taillen = xdr->tail[0].iov_len; tail 669 net/sunrpc/xprtrdma/svc_rdma_sendto.c base = xdr->tail[0].iov_base; tail 670 net/sunrpc/xprtrdma/svc_rdma_sendto.c len = xdr->tail[0].iov_len; tail 678 net/sunrpc/xprtrdma/svc_rdma_sendto.c goto tail; tail 697 net/sunrpc/xprtrdma/svc_rdma_sendto.c base = xdr->tail[0].iov_base; tail 698 net/sunrpc/xprtrdma/svc_rdma_sendto.c len = xdr->tail[0].iov_len; tail 699 net/sunrpc/xprtrdma/svc_rdma_sendto.c tail: tail 446 net/sunrpc/xprtsock.c want = min_t(size_t, count - offset, buf->tail[0].iov_len); tail 448 net/sunrpc/xprtsock.c ret = xs_read_kvec(sock, msg, flags, &buf->tail[0], want, seek); tail 859 net/sunrpc/xprtsock.c if (base >= xdr->tail[0].iov_len) tail 862 net/sunrpc/xprtsock.c err = xs_send_kvec(sock, &msg, &xdr->tail[0], base); tail 2669 net/sunrpc/xprtsock.c if (xbufp->tail[0].iov_len) tail 2670 net/sunrpc/xprtsock.c tailpage = virt_to_page(xbufp->tail[0].iov_base); tail 2671 net/sunrpc/xprtsock.c tailoff = (unsigned long)xbufp->tail[0].iov_base & ~PAGE_MASK; tail 2322 net/tipc/link.c u16 tail = l->snd_nxt - 1; tail 2326 net/tipc/link.c skb_queue_len(&l->transmq), head, tail, tail 126 net/tipc/msg.c struct sk_buff *tail = NULL; tail 147 net/tipc/msg.c TIPC_SKB_CB(head)->tail = NULL; tail 149 net/tipc/msg.c skb_walk_frags(head, tail) { tail 150 net/tipc/msg.c TIPC_SKB_CB(head)->tail = tail; tail 164 net/tipc/msg.c tail = TIPC_SKB_CB(head)->tail; tail 168 net/tipc/msg.c tail->next = frag; tail 172 net/tipc/msg.c TIPC_SKB_CB(head)->tail = frag; tail 180 net/tipc/msg.c TIPC_SKB_CB(head)->tail = NULL; tail 105 net/tipc/msg.h struct sk_buff *tail; tail 242 net/tipc/netlink_compat.c char *tail = skb_tail_pointer(msg->rep); tail 244 net/tipc/netlink_compat.c if (*tail != '\0') tail 245 net/tipc/netlink_compat.c sprintf(tail - sizeof(REPLY_TRUNCATED) - 1, tail 3815 net/tipc/socket.c if (sk->sk_backlog.tail != sk->sk_backlog.head) { tail 3817 net/tipc/socket.c i += tipc_skb_dump(sk->sk_backlog.tail, false, tail 1894 net/unix/af_unix.c struct sk_buff *skb, *newskb = NULL, *tail = NULL; tail 1945 net/unix/af_unix.c if (tail && tail == skb) { tail 1951 net/unix/af_unix.c tail = skb; tail 1964 net/unix/af_unix.c tail = skb; tail 2152 net/unix/af_unix.c struct sk_buff *tail; tail 2160 net/unix/af_unix.c tail = skb_peek_tail(&sk->sk_receive_queue); tail 2161 net/unix/af_unix.c if (tail != last || tail 2162 net/unix/af_unix.c (tail && tail->len != last_len) || tail 185 net/vmw_vsock/vmci_transport_notify.c u64 tail; tail 201 net/vmw_vsock/vmci_transport_notify.c vmci_qpair_get_consume_indexes(vmci_trans(vsk)->qpair, &tail, &head); tail 227 net/vmw_vsock/vmci_transport_notify.c u64 tail; tail 237 net/vmw_vsock/vmci_transport_notify.c vmci_qpair_get_produce_indexes(vmci_trans(vsk)->qpair, &tail, &head); tail 238 net/vmw_vsock/vmci_transport_notify.c room_left = vmci_trans(vsk)->produce_size - tail; tail 244 net/vmw_vsock/vmci_transport_notify.c waiting_info.offset = tail + room_needed + 1; tail 4420 net/wireless/nl80211.c bcn->tail = nla_data(attrs[NL80211_ATTR_BEACON_TAIL]); tail 4538 net/wireless/nl80211.c const u8 *ies = bcn->tail; tail 8542 net/wireless/nl80211.c if (params.beacon_csa.tail[offset] != params.count) tail 563 net/wireless/trace.h __dynamic_array(u8, tail, info ? info->tail_len : 0) tail 578 net/wireless/trace.h if (info->tail) tail 579 net/wireless/trace.h memcpy(__get_dynamic_array(tail), info->tail, tail 113 samples/vfio-mdev/mtty.c u8 head, tail; tail 357 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail)) { tail 378 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail = 0; tail 474 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail) { tail 476 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail]; tail 478 samples/vfio-mdev/mtty.c CIRCULAR_BUF_INC_IDX(mdev_state->s[index].rxtx.tail); tail 482 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail) { tail 525 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail)) tail 556 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail) tail 565 samples/vfio-mdev/mtty.c mdev_state->s[index].rxtx.tail) tail 665 scripts/kallsyms.c const char *tail = str; tail 667 scripts/kallsyms.c while (*tail == '_') tail 668 scripts/kallsyms.c tail++; tail 670 scripts/kallsyms.c return tail - str; tail 1003 scripts/kconfig/expr.c char *tail; tail 1015 scripts/kconfig/expr.c val->s = strtoll(str, &tail, 10); tail 1019 scripts/kconfig/expr.c val->u = strtoull(str, &tail, 16); tail 1023 scripts/kconfig/expr.c val->s = strtoll(str, &tail, 0); tail 1027 scripts/kconfig/expr.c return !errno && !*tail && tail > str && isxdigit(tail[-1]) tail 104 security/selinux/ibpkey.c struct sel_ib_pkey *tail; tail 106 security/selinux/ibpkey.c tail = list_entry( tail 111 security/selinux/ibpkey.c list_del_rcu(&tail->list); tail 112 security/selinux/ibpkey.c kfree_rcu(tail, rcu); tail 80 security/selinux/netlink.c tmp = skb->tail; tail 85 security/selinux/netlink.c nlh->nlmsg_len = skb->tail - tmp; tail 166 security/selinux/netnode.c struct sel_netnode *tail; tail 167 security/selinux/netnode.c tail = list_entry( tail 171 security/selinux/netnode.c list_del_rcu(&tail->list); tail 172 security/selinux/netnode.c kfree_rcu(tail, rcu); tail 114 security/selinux/netport.c struct sel_netport *tail; tail 115 security/selinux/netport.c tail = list_entry( tail 120 security/selinux/netport.c list_del_rcu(&tail->list); tail 121 security/selinux/netport.c kfree_rcu(tail, rcu); tail 265 security/selinux/ss/conditional.c struct cond_av_list *tail; tail 339 security/selinux/ss/conditional.c data->tail->next = list; tail 340 security/selinux/ss/conditional.c data->tail = list; tail 369 security/selinux/ss/conditional.c data.tail = NULL; tail 49 sound/core/seq/oss/seq_oss_readq.c q->head = q->tail = 0; tail 78 sound/core/seq/oss/seq_oss_readq.c q->head = q->tail = 0; tail 151 sound/core/seq/oss/seq_oss_readq.c memcpy(&q->q[q->tail], ev, sizeof(*ev)); tail 152 sound/core/seq/oss/seq_oss_readq.c q->tail = (q->tail + 1) % q->maxlen; tail 184 sound/core/seq/oss/seq_oss_readq.c (q->qlen > 0 || q->head == q->tail), tail 22 sound/core/seq/oss/seq_oss_readq.h int head, tail; tail 43 sound/core/seq/seq_fifo.c f->tail = NULL; tail 122 sound/core/seq/seq_fifo.c if (f->tail != NULL) tail 123 sound/core/seq/seq_fifo.c f->tail->next = cell; tail 124 sound/core/seq/seq_fifo.c f->tail = cell; tail 150 sound/core/seq/seq_fifo.c if (f->tail == cell) tail 151 sound/core/seq/seq_fifo.c f->tail = NULL; tail 207 sound/core/seq/seq_fifo.c if (!f->tail) tail 208 sound/core/seq/seq_fifo.c f->tail = cell; tail 248 sound/core/seq/seq_fifo.c f->tail = NULL; tail 18 sound/core/seq/seq_fifo.h struct snd_seq_event_cell *tail; /* pointer to tail of fifo */ tail 310 sound/core/seq/seq_memory.c struct snd_seq_event_cell *src, *tmp, *tail; tail 318 sound/core/seq/seq_memory.c tail = NULL; tail 330 sound/core/seq/seq_memory.c if (tail) tail 331 sound/core/seq/seq_memory.c tail->next = tmp; tail 332 sound/core/seq/seq_memory.c tail = tmp; tail 52 sound/core/seq/seq_prioq.c f->tail = NULL; tail 150 sound/core/seq/seq_prioq.c if (f->tail && !prior) { tail 151 sound/core/seq/seq_prioq.c if (compare_timestamp(&cell->event, &f->tail->event)) { tail 153 sound/core/seq/seq_prioq.c f->tail->next = cell; tail 154 sound/core/seq/seq_prioq.c f->tail = cell; tail 196 sound/core/seq/seq_prioq.c f->tail = cell; tail 231 sound/core/seq/seq_prioq.c if (f->tail == cell) tail 232 sound/core/seq/seq_prioq.c f->tail = NULL; tail 294 sound/core/seq/seq_prioq.c if (cell == f->tail) tail 295 sound/core/seq/seq_prioq.c f->tail = cell->next; tail 408 sound/core/seq/seq_prioq.c if (cell == f->tail) tail 409 sound/core/seq/seq_prioq.c f->tail = cell->next; tail 16 sound/core/seq/seq_prioq.h struct snd_seq_event_cell *tail; /* pointer to tail of prioq */ tail 72 sound/isa/msnd/msnd_midi.c u16 tail; tail 74 sound/isa/msnd/msnd_midi.c tail = readw(mpu->dev->MIDQ + JQS_wTail); tail 75 sound/isa/msnd/msnd_midi.c writew(tail, mpu->dev->MIDQ + JQS_wHead); tail 108 sound/isa/msnd/msnd_midi.c u16 head, tail, size; tail 112 sound/isa/msnd/msnd_midi.c tail = readw(mpu->dev->MIDQ + JQS_wTail); tail 114 sound/isa/msnd/msnd_midi.c if (head > size || tail > size) tail 116 sound/isa/msnd/msnd_midi.c while (head != tail) { tail 159 sound/isa/msnd/msnd_pinnacle.c u16 head, tail, size; tail 166 sound/isa/msnd/msnd_pinnacle.c tail = readw(chip->DSPQ + JQS_wTail); tail 168 sound/isa/msnd/msnd_pinnacle.c if (head > size || tail > size) tail 170 sound/isa/msnd/msnd_pinnacle.c while (head != tail) { tail 748 sound/pci/sis7019.c u16 tail = vperiod % period_size; tail 751 sound/pci/sis7019.c if (tail && tail < quarter_period) { tail 754 sound/pci/sis7019.c tail = quarter_period - tail; tail 755 sound/pci/sis7019.c tail += loops - 1; tail 756 sound/pci/sis7019.c tail /= loops; tail 757 sound/pci/sis7019.c period_size -= tail; tail 31 tools/bpf/bpftool/cfg.c struct bpf_insn *tail; tail 205 tools/bpf/bpftool/cfg.c last->tail = func->end; tail 208 tools/bpf/bpftool/cfg.c bb->tail = bb_next(bb)->head - 1; tail 303 tools/bpf/bpftool/cfg.c insn = bb->tail; tail 406 tools/bpf/bpftool/cfg.c dump_xlated_for_graph(&dd, bb->head, bb->tail, start_idx); tail 69 tools/include/linux/ring_buffer.h u64 tail) tail 71 tools/include/linux/ring_buffer.h smp_store_release(&base->data_tail, tail); tail 39 tools/io_uring/io_uring-bench.c unsigned *tail; tail 48 tools/io_uring/io_uring-bench.c unsigned *tail; tail 197 tools/io_uring/io_uring-bench.c unsigned index, tail, next_tail, prepped = 0; tail 199 tools/io_uring/io_uring-bench.c next_tail = tail = *ring->tail; tail 206 tools/io_uring/io_uring-bench.c index = tail & sq_ring_mask; tail 210 tools/io_uring/io_uring-bench.c tail = next_tail; tail 213 tools/io_uring/io_uring-bench.c if (*ring->tail != tail) { tail 216 tools/io_uring/io_uring-bench.c *ring->tail = tail; tail 255 tools/io_uring/io_uring-bench.c if (head == *ring->tail) tail 437 tools/io_uring/io_uring-bench.c sring->tail = ptr + p.sq_off.tail; tail 454 tools/io_uring/io_uring-bench.c cring->tail = ptr + p.cq_off.tail; tail 23 tools/io_uring/setup.c sq->ktail = ptr + p->sq_off.tail; tail 50 tools/io_uring/setup.c cq->ktail = ptr + p->cq_off.tail; tail 1086 tools/perf/builtin-record.c static int record__synthesize_workload(struct record *rec, bool tail) tail 1091 tools/perf/builtin-record.c if (rec->opts.tail_synthesize != tail) tail 1106 tools/perf/builtin-record.c static int record__synthesize(struct record *rec, bool tail); tail 1219 tools/perf/builtin-record.c static int record__synthesize(struct record *rec, bool tail) tail 1229 tools/perf/builtin-record.c if (rec->opts.tail_synthesize != tail) tail 417 tools/perf/util/auxtrace.h static inline void auxtrace_mmap__write_tail(struct auxtrace_mmap *mm, u64 tail) tail 427 tools/perf/util/auxtrace.h pc->aux_tail = tail; tail 431 tools/perf/util/auxtrace.h } while (!__sync_bool_compare_and_swap(&pc->aux_tail, old_tail, tail)); tail 205 tools/perf/util/block-range.c struct block_range *tail = malloc(sizeof(struct block_range)); tail 206 tools/perf/util/block-range.c if (!tail) tail 209 tools/perf/util/block-range.c *tail = (struct block_range){ tail 225 tools/perf/util/block-range.c rb_link_right_of_node(&tail->node, &entry->node); tail 226 tools/perf/util/block-range.c rb_insert_color(&tail->node, &block_ranges.root); tail 250 tools/perf/util/block-range.c struct block_range *tail; tail 252 tools/perf/util/block-range.c tail = malloc(sizeof(struct block_range)); tail 253 tools/perf/util/block-range.c if (!tail) tail 256 tools/perf/util/block-range.c *tail = (struct block_range){ tail 263 tools/perf/util/block-range.c rb_link_right_of_node(&tail->node, &entry->node); tail 264 tools/perf/util/block-range.c rb_insert_color(&tail->node, &block_ranges.root); tail 267 tools/perf/util/block-range.c iter.end = tail; tail 355 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c et = packet_queue->tail; tail 357 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c packet_queue->tail = et; tail 395 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c packet = &packet_queue->packet_buffer[packet_queue->tail]; tail 483 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c packet = &queue->packet_buffer[queue->tail]; tail 197 tools/perf/util/cs-etm.c queue->tail = 0; tail 154 tools/perf/util/cs-etm.h u32 tail; tail 57 tools/perf/util/mmap.h static inline void perf_mmap__write_tail(struct mmap *md, u64 tail) tail 59 tools/perf/util/mmap.h ring_buffer_write_tail(md->core.base, tail); tail 95 tools/perf/util/symbol.c const char *tail = str; tail 97 tools/perf/util/symbol.c while (*tail == '_') tail 98 tools/perf/util/symbol.c tail++; tail 100 tools/perf/util/symbol.c return tail - str; tail 42 tools/power/acpi/tools/acpidbg/acpidbg.c (CIRC_CNT((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 44 tools/power/acpi/tools/acpidbg/acpidbg.c (CIRC_CNT_TO_END((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 46 tools/power/acpi/tools/acpidbg/acpidbg.c (CIRC_SPACE((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 48 tools/power/acpi/tools/acpidbg/acpidbg.c (CIRC_SPACE_TO_END((circ)->head, (circ)->tail, ACPI_AML_BUF_SIZE)) tail 78 tools/power/acpi/tools/acpidbg/acpidbg.c .tail = 0, tail 83 tools/power/acpi/tools/acpidbg/acpidbg.c .tail = 0, tail 228 tools/power/acpi/tools/acpidbg/acpidbg.c p = &crc->buf[crc->tail]; tail 234 tools/power/acpi/tools/acpidbg/acpidbg.c crc->tail = (crc->tail + len) & (ACPI_AML_BUF_SIZE - 1); tail 243 tools/power/acpi/tools/acpidbg/acpidbg.c p = &crc->buf[crc->tail]; tail 251 tools/power/acpi/tools/acpidbg/acpidbg.c crc->tail = (crc->tail + len) & (ACPI_AML_BUF_SIZE - 1); tail 32 tools/testing/selftests/powerpc/pmu/ebb/trace.c tb->tail = tb->data; tail 66 tools/testing/selftests/powerpc/pmu/ebb/trace.c p = tb->tail; tail 67 tools/testing/selftests/powerpc/pmu/ebb/trace.c newtail = tb->tail + bytes; tail 71 tools/testing/selftests/powerpc/pmu/ebb/trace.c tb->tail = newtail; tail 277 tools/testing/selftests/powerpc/pmu/ebb/trace.c printf(" tail %p\n", tb->tail); tail 287 tools/testing/selftests/powerpc/pmu/ebb/trace.c while (trace_check_bounds(tb, p) && p < tb->tail) { tail 28 tools/testing/selftests/powerpc/pmu/ebb/trace.h void *tail;