segments 301 arch/powerpc/platforms/cell/iommu.c unsigned long segments, stab_size; segments 303 arch/powerpc/platforms/cell/iommu.c segments = max(dbase + dsize, fbase + fsize) >> IO_SEGMENT_SHIFT; segments 306 arch/powerpc/platforms/cell/iommu.c __func__, iommu->nid, segments); segments 309 arch/powerpc/platforms/cell/iommu.c stab_size = segments * sizeof(unsigned long); segments 322 arch/powerpc/platforms/cell/iommu.c unsigned long reg, segments, pages_per_segment, ptab_size, segments 326 arch/powerpc/platforms/cell/iommu.c segments = size >> IO_SEGMENT_SHIFT; segments 332 arch/powerpc/platforms/cell/iommu.c ptab_size = segments * pages_per_segment * sizeof(unsigned long); segments 363 arch/powerpc/platforms/cell/iommu.c for (i = start_seg; i < (start_seg + segments); i++) { segments 648 block/blk-core.c unsigned short segments = blk_rq_nr_discard_segments(req); segments 650 block/blk-core.c if (segments >= queue_max_discard_segments(q)) segments 661 block/blk-core.c req->nr_phys_segments = segments + 1; segments 30 block/blk-integrity.c unsigned int segments = 0; segments 46 block/blk-integrity.c segments++; segments 54 block/blk-integrity.c return segments; segments 73 block/blk-integrity.c unsigned int segments = 0; segments 96 block/blk-integrity.c segments++; segments 106 block/blk-integrity.c return segments; segments 607 block/blk-merge.c unsigned short segments = blk_rq_nr_discard_segments(req); segments 609 block/blk-merge.c if (segments >= queue_max_discard_segments(q)) segments 615 block/blk-merge.c req->nr_phys_segments = segments + blk_rq_nr_discard_segments(next); segments 195 drivers/block/virtio_blk.c unsigned short segments = blk_rq_nr_discard_segments(req); segments 204 drivers/block/virtio_blk.c range = kmalloc_array(segments, sizeof(*range), GFP_ATOMIC); segments 220 drivers/block/virtio_blk.c req->special_vec.bv_len = sizeof(*range) * segments; segments 756 drivers/block/xen-blkback/blkback.c struct grant_page **pages = req->segments; segments 948 drivers/block/xen-blkback/blkback.c rc = xen_blkbk_map(pending_req->ring, pending_req->segments, segments 963 drivers/block/xen-blkback/blkback.c struct blkif_request_segment *segments = NULL; segments 981 drivers/block/xen-blkback/blkback.c if (segments) segments 982 drivers/block/xen-blkback/blkback.c kunmap_atomic(segments); segments 983 drivers/block/xen-blkback/blkback.c segments = kmap_atomic(pages[n/SEGS_PER_INDIRECT_FRAME]->page); segments 987 drivers/block/xen-blkback/blkback.c pending_req->segments[n]->gref = segments[i].gref; segments 989 drivers/block/xen-blkback/blkback.c first_sect = READ_ONCE(segments[i].first_sect); segments 990 drivers/block/xen-blkback/blkback.c last_sect = READ_ONCE(segments[i].last_sect); segments 1002 drivers/block/xen-blkback/blkback.c if (segments) segments 1003 drivers/block/xen-blkback/blkback.c kunmap_atomic(segments); segments 1238 drivers/block/xen-blkback/blkback.c struct grant_page **pages = pending_req->segments; segments 1417 drivers/block/xen-blkback/blkback.c xen_blkbk_unmap(ring, pending_req->segments, segments 350 drivers/block/xen-blkback/common.h struct grant_page *segments[MAX_INDIRECT_SEGMENTS]; segments 290 drivers/block/xen-blkback/xenbus.c kfree(req->segments[j]); segments 985 drivers/block/xen-blkback/xenbus.c req->segments[j] = kzalloc(sizeof(*req->segments[0]), GFP_KERNEL); segments 986 drivers/block/xen-blkback/xenbus.c if (!req->segments[j]) segments 1010 drivers/block/xen-blkback/xenbus.c if (!req->segments[j]) segments 1012 drivers/block/xen-blkback/xenbus.c kfree(req->segments[j]); segments 570 drivers/block/xen-blkfront.c struct blkif_request_segment *segments; segments 615 drivers/block/xen-blkfront.c if (setup->segments) segments 616 drivers/block/xen-blkfront.c kunmap_atomic(setup->segments); segments 621 drivers/block/xen-blkfront.c setup->segments = kmap_atomic(gnt_list_entry->page); segments 663 drivers/block/xen-blkfront.c setup->segments[grant_idx % GRANTS_PER_INDIRECT_FRAME] = segments 701 drivers/block/xen-blkfront.c .segments = NULL, segments 830 drivers/block/xen-blkfront.c if (setup.segments) segments 831 drivers/block/xen-blkfront.c kunmap_atomic(setup.segments); segments 931 drivers/block/xen-blkfront.c unsigned int segments = info->max_indirect_segments ? : segments 948 drivers/block/xen-blkfront.c blk_queue_max_hw_sectors(rq, (segments * XEN_PAGE_SIZE) / 512); segments 955 drivers/block/xen-blkfront.c blk_queue_max_segments(rq, segments / GRANTS_PER_PSEG); segments 111 drivers/char/agp/frontend.c seg = *(client->segments); segments 129 drivers/char/agp/frontend.c if (client->segments != NULL) { segments 130 drivers/char/agp/frontend.c if (*(client->segments) != NULL) { segments 131 drivers/char/agp/frontend.c DBG("Freeing %p from client %p", *(client->segments), client); segments 132 drivers/char/agp/frontend.c kfree(*(client->segments)); segments 134 drivers/char/agp/frontend.c DBG("Freeing %p from client %p", client->segments, client); segments 135 drivers/char/agp/frontend.c kfree(client->segments); segments 136 drivers/char/agp/frontend.c client->segments = NULL; segments 145 drivers/char/agp/frontend.c prev_seg = client->segments; segments 152 drivers/char/agp/frontend.c client->segments = seg; segments 309 drivers/dma/xilinx/xilinx_dma.c struct list_head segments; segments 680 drivers/dma/xilinx/xilinx_dma.c INIT_LIST_HEAD(&desc->segments); segments 702 drivers/dma/xilinx/xilinx_dma.c list_for_each_entry_safe(segment, next, &desc->segments, node) { segments 708 drivers/dma/xilinx/xilinx_dma.c &desc->segments, node) { segments 714 drivers/dma/xilinx/xilinx_dma.c &desc->segments, node) { segments 1022 drivers/dma/xilinx/xilinx_dma.c list_for_each_entry(segment, &desc->segments, node) { segments 1164 drivers/dma/xilinx/xilinx_dma.c list_for_each_entry(segment, &desc->segments, node) { segments 1220 drivers/dma/xilinx/xilinx_dma.c tail_segment = list_last_entry(&tail_desc->segments, segments 1248 drivers/dma/xilinx/xilinx_dma.c segment = list_first_entry(&head_desc->segments, segments 1292 drivers/dma/xilinx/xilinx_dma.c tail_segment = list_last_entry(&tail_desc->segments, segments 1355 drivers/dma/xilinx/xilinx_dma.c segment = list_first_entry(&head_desc->segments, segments 1551 drivers/dma/xilinx/xilinx_dma.c tail_segment = list_last_entry(&tail_desc->segments, segments 1556 drivers/dma/xilinx/xilinx_dma.c cdma_tail_segment = list_last_entry(&tail_desc->segments, segments 1561 drivers/dma/xilinx/xilinx_dma.c axidma_tail_segment = list_last_entry(&tail_desc->segments, segments 1694 drivers/dma/xilinx/xilinx_dma.c list_add_tail(&segment->node, &desc->segments); segments 1697 drivers/dma/xilinx/xilinx_dma.c segment = list_first_entry(&desc->segments, segments 1752 drivers/dma/xilinx/xilinx_dma.c list_add_tail(&segment->node, &desc->segments); segments 1839 drivers/dma/xilinx/xilinx_dma.c list_add_tail(&segment->node, &desc->segments); segments 1843 drivers/dma/xilinx/xilinx_dma.c segment = list_first_entry(&desc->segments, segments 1850 drivers/dma/xilinx/xilinx_dma.c segment = list_last_entry(&desc->segments, segments 1939 drivers/dma/xilinx/xilinx_dma.c list_add_tail(&segment->node, &desc->segments); segments 1943 drivers/dma/xilinx/xilinx_dma.c head_segment = list_first_entry(&desc->segments, segments 1952 drivers/dma/xilinx/xilinx_dma.c segment = list_last_entry(&desc->segments, segments 2031 drivers/dma/xilinx/xilinx_dma.c list_add_tail(&segment->node, &desc->segments); segments 2034 drivers/dma/xilinx/xilinx_dma.c segment = list_first_entry(&desc->segments, segments 2041 drivers/dma/xilinx/xilinx_dma.c segment = list_last_entry(&desc->segments, segments 96 drivers/gpu/drm/arm/malidp_crtc.c } segments[MALIDP_COEFFTAB_NUM_COEFFS] = { segments 133 drivers/gpu/drm/arm/malidp_crtc.c delta_in = segments[i].end - segments[i].start; segments 135 drivers/gpu/drm/arm/malidp_crtc.c out_start = drm_color_lut_extract(lut[segments[i].start].green, segments 137 drivers/gpu/drm/arm/malidp_crtc.c out_end = drm_color_lut_extract(lut[segments[i].end].green, 12); segments 1762 drivers/media/dvb-frontends/dib7000p.c static u32 interpolate_value(u32 value, struct linear_segments *segments, segments 1770 drivers/media/dvb-frontends/dib7000p.c if (value >= segments[0].x) segments 1771 drivers/media/dvb-frontends/dib7000p.c return segments[0].y; segments 1772 drivers/media/dvb-frontends/dib7000p.c if (value < segments[len-1].x) segments 1773 drivers/media/dvb-frontends/dib7000p.c return segments[len-1].y; segments 1777 drivers/media/dvb-frontends/dib7000p.c if (value == segments[i].x) segments 1778 drivers/media/dvb-frontends/dib7000p.c return segments[i].y; segments 1779 drivers/media/dvb-frontends/dib7000p.c if (value > segments[i].x) segments 1784 drivers/media/dvb-frontends/dib7000p.c dy = segments[i - 1].y - segments[i].y; segments 1785 drivers/media/dvb-frontends/dib7000p.c dx = segments[i - 1].x - segments[i].x; segments 1787 drivers/media/dvb-frontends/dib7000p.c tmp64 = value - segments[i].x; segments 1790 drivers/media/dvb-frontends/dib7000p.c ret = segments[i].y + tmp64; segments 3950 drivers/media/dvb-frontends/dib8000.c static u32 interpolate_value(u32 value, struct linear_segments *segments, segments 3958 drivers/media/dvb-frontends/dib8000.c if (value >= segments[0].x) segments 3959 drivers/media/dvb-frontends/dib8000.c return segments[0].y; segments 3960 drivers/media/dvb-frontends/dib8000.c if (value < segments[len-1].x) segments 3961 drivers/media/dvb-frontends/dib8000.c return segments[len-1].y; segments 3965 drivers/media/dvb-frontends/dib8000.c if (value == segments[i].x) segments 3966 drivers/media/dvb-frontends/dib8000.c return segments[i].y; segments 3967 drivers/media/dvb-frontends/dib8000.c if (value > segments[i].x) segments 3972 drivers/media/dvb-frontends/dib8000.c dy = segments[i - 1].y - segments[i].y; segments 3973 drivers/media/dvb-frontends/dib8000.c dx = segments[i - 1].x - segments[i].x; segments 3975 drivers/media/dvb-frontends/dib8000.c tmp64 = value - segments[i].x; segments 3978 drivers/media/dvb-frontends/dib8000.c ret = segments[i].y + tmp64; segments 1352 drivers/media/dvb-frontends/mb86a20s.c static u32 interpolate_value(u32 value, const struct linear_segments *segments, segments 1359 drivers/media/dvb-frontends/mb86a20s.c if (value >= segments[0].x) segments 1360 drivers/media/dvb-frontends/mb86a20s.c return segments[0].y; segments 1361 drivers/media/dvb-frontends/mb86a20s.c if (value < segments[len-1].x) segments 1362 drivers/media/dvb-frontends/mb86a20s.c return segments[len-1].y; segments 1366 drivers/media/dvb-frontends/mb86a20s.c if (value == segments[i].x) segments 1367 drivers/media/dvb-frontends/mb86a20s.c return segments[i].y; segments 1368 drivers/media/dvb-frontends/mb86a20s.c if (value > segments[i].x) segments 1373 drivers/media/dvb-frontends/mb86a20s.c dy = segments[i].y - segments[i - 1].y; segments 1374 drivers/media/dvb-frontends/mb86a20s.c dx = segments[i - 1].x - segments[i].x; segments 1375 drivers/media/dvb-frontends/mb86a20s.c tmp64 = value - segments[i].x; segments 1378 drivers/media/dvb-frontends/mb86a20s.c ret = segments[i].y - tmp64; segments 61 drivers/media/i2c/ad9389b.c u32 segments; segments 376 drivers/media/i2c/ad9389b.c edid->segments ? "found" : "no", edid->blocks); segments 672 drivers/media/i2c/ad9389b.c if (!state->edid.segments) { segments 676 drivers/media/i2c/ad9389b.c if (edid->start_block >= state->edid.segments * 2) segments 678 drivers/media/i2c/ad9389b.c if (edid->blocks + edid->start_block >= state->edid.segments * 2) segments 679 drivers/media/i2c/ad9389b.c edid->blocks = state->edid.segments * 2 - edid->start_block; segments 934 drivers/media/i2c/ad9389b.c v4l2_ctrl_s_ctrl(state->have_edid0_ctrl, state->edid.segments ? 0x1 : 0x0); segments 1045 drivers/media/i2c/ad9389b.c state->edid.segments = segment + 1; segments 1046 drivers/media/i2c/ad9389b.c if (((state->edid.data[0x7e] >> 1) + 1) > state->edid.segments) { segments 1049 drivers/media/i2c/ad9389b.c __func__, state->edid.segments); segments 1051 drivers/media/i2c/ad9389b.c ad9389b_wr(sd, 0xc4, state->edid.segments); segments 1062 drivers/media/i2c/ad9389b.c v4l2_ctrl_s_ctrl(state->have_edid0_ctrl, state->edid.segments ? 0x1 : 0x0); segments 75 drivers/media/i2c/adv7511-v4l2.c u32 segments; segments 611 drivers/media/i2c/adv7511-v4l2.c edid->segments ? "found" : "no", segments 1210 drivers/media/i2c/adv7511-v4l2.c edid->blocks = state->edid.segments * 2; segments 1214 drivers/media/i2c/adv7511-v4l2.c if (state->edid.segments == 0) segments 1217 drivers/media/i2c/adv7511-v4l2.c if (edid->start_block >= state->edid.segments * 2) segments 1220 drivers/media/i2c/adv7511-v4l2.c if (edid->start_block + edid->blocks > state->edid.segments * 2) segments 1221 drivers/media/i2c/adv7511-v4l2.c edid->blocks = state->edid.segments * 2 - edid->start_block; segments 1589 drivers/media/i2c/adv7511-v4l2.c if ((status & MASK_ADV7511_HPD_DETECT) && ((status & MASK_ADV7511_MSEN_DETECT) || state->edid.segments)) { segments 1693 drivers/media/i2c/adv7511-v4l2.c state->edid.segments = segment + 1; segments 1695 drivers/media/i2c/adv7511-v4l2.c if (((state->edid.data[0x7e] >> 1) + 1) > state->edid.segments) { segments 1697 drivers/media/i2c/adv7511-v4l2.c v4l2_dbg(1, debug, sd, "%s: request segment %d\n", __func__, state->edid.segments); segments 1699 drivers/media/i2c/adv7511-v4l2.c adv7511_wr(sd, 0xc4, state->edid.segments); segments 1705 drivers/media/i2c/adv7511-v4l2.c v4l2_dbg(1, debug, sd, "%s: edid complete with %d segment(s)\n", __func__, state->edid.segments); segments 1708 drivers/media/i2c/adv7511-v4l2.c state->edid.segments * 256, segments 465 drivers/net/ethernet/sfc/tx.c struct sk_buff *segments, *next; segments 467 drivers/net/ethernet/sfc/tx.c segments = skb_gso_segment(skb, 0); segments 468 drivers/net/ethernet/sfc/tx.c if (IS_ERR(segments)) segments 469 drivers/net/ethernet/sfc/tx.c return PTR_ERR(segments); segments 472 drivers/net/ethernet/sfc/tx.c skb = segments; segments 506 drivers/net/ethernet/sfc/tx.c unsigned int segments; segments 511 drivers/net/ethernet/sfc/tx.c segments = skb_is_gso(skb) ? skb_shinfo(skb)->gso_segs : 0; segments 512 drivers/net/ethernet/sfc/tx.c if (segments == 1) segments 513 drivers/net/ethernet/sfc/tx.c segments = 0; /* Don't use TSO for a single segment. */ segments 519 drivers/net/ethernet/sfc/tx.c if (segments) { segments 548 drivers/net/ethernet/sfc/tx.c if (!data_mapped && (efx_tx_map_data(tx_queue, skb, segments))) segments 569 drivers/net/ethernet/sfc/tx.c if (segments) { segments 571 drivers/net/ethernet/sfc/tx.c tx_queue->tso_packets += segments; segments 572 drivers/net/ethernet/sfc/tx.c tx_queue->tx_packets += segments; segments 611 drivers/nvme/host/core.c unsigned short segments = blk_rq_nr_discard_segments(req), n = 0; segments 639 drivers/nvme/host/core.c if (n < segments) { segments 647 drivers/nvme/host/core.c if (WARN_ON_ONCE(n != segments)) { segments 657 drivers/nvme/host/core.c cmnd->dsm.nr = cpu_to_le32(segments - 1); segments 1127 drivers/s390/block/dcssblk.c module_param_string(segments, dcssblk_segments, DCSSBLK_PARM_LEN, 0444); segments 1128 drivers/s390/block/dcssblk.c MODULE_PARM_DESC(segments, "Name of DCSS segment(s) to be loaded, " segments 4117 drivers/scsi/ncr53c8xx.c int segments; segments 4244 drivers/scsi/ncr53c8xx.c segments = ncr_scatter(np, cp, cp->cmd); segments 4245 drivers/scsi/ncr53c8xx.c if (segments < 0) { segments 4252 drivers/scsi/ncr53c8xx.c segments = 0; segments 4289 drivers/scsi/ncr53c8xx.c if (segments <= MAX_SCATTERL) segments 4290 drivers/scsi/ncr53c8xx.c lastp = goalp - 8 - (segments * 16); segments 4293 drivers/scsi/ncr53c8xx.c lastp -= (segments - MAX_SCATTERL) * 16; segments 4302 drivers/scsi/ncr53c8xx.c if (segments <= MAX_SCATTERL) segments 4303 drivers/scsi/ncr53c8xx.c lastp = goalp - 8 - (segments * 16); segments 4306 drivers/scsi/ncr53c8xx.c lastp -= (segments - MAX_SCATTERL) * 16; segments 7629 drivers/scsi/qla2xxx/qla_init.c uint templates, segments, fragment; segments 7657 drivers/scsi/qla2xxx/qla_init.c segments = FA_RISC_CODE_SEGMENTS; segments 7658 drivers/scsi/qla2xxx/qla_init.c for (j = 0; j < segments; j++) { segments 7875 drivers/scsi/qla2xxx/qla_init.c uint templates, segments, fragment; segments 7912 drivers/scsi/qla2xxx/qla_init.c segments = FA_RISC_CODE_SEGMENTS; segments 7913 drivers/scsi/qla2xxx/qla_init.c for (j = 0; j < segments; j++) { segments 348 drivers/scsi/sym53c8xx_2/sym_glue.c cp->segments = sym_scatter(np, cp, cmd); segments 349 drivers/scsi/sym53c8xx_2/sym_glue.c if (cp->segments < 0) { segments 357 drivers/scsi/sym53c8xx_2/sym_glue.c if (!cp->segments) segments 361 drivers/scsi/sym53c8xx_2/sym_glue.c cp->segments = 0; segments 374 drivers/scsi/sym53c8xx_2/sym_glue.c lastp = goalp - 8 - (cp->segments * (2*4)); segments 379 drivers/scsi/sym53c8xx_2/sym_glue.c lastp = goalp - 8 - (cp->segments * (2*4)); segments 3670 drivers/scsi/sym53c8xx_2/sym_hipd.c dp_sgmin = SYM_CONF_MAX_SG - cp->segments; segments 737 drivers/scsi/sym53c8xx_2/sym_hipd.h int segments; /* Number of SG segments */ segments 140 drivers/staging/octeon/ethernet-rx.c int segments = work->word2.s.bufs; segments 145 drivers/staging/octeon/ethernet-rx.c while (segments--) { segments 177 drivers/staging/octeon/ethernet.c int segments = work->word2.s.bufs; segments 180 drivers/staging/octeon/ethernet.c while (segments--) { segments 1006 drivers/xen/gntdev.c if (copy_from_user(&seg, ©.segments[i], sizeof(seg))) { segments 1011 drivers/xen/gntdev.c ret = gntdev_grant_copy_seg(&batch, &seg, ©.segments[i].status); segments 473 fs/f2fs/f2fs.h u32 segments; /* # of segments to flush */ segments 2763 fs/f2fs/file.c end_segno = min(start_segno + range.segments, dev_end_segno); segments 814 fs/nilfs2/recovery.c LIST_HEAD(segments); segments 893 fs/nilfs2/recovery.c nilfs_dispose_segment_list(&segments); segments 931 fs/nilfs2/recovery.c ret = nilfs_segment_list_add(&segments, segnum); segments 944 fs/nilfs2/recovery.c list_splice_tail(&segments, &ri->ri_used_segments); segments 952 fs/nilfs2/recovery.c nilfs_dispose_segment_list(&segments); segments 447 fs/nilfs2/sysfs.c NILFS_DEV_INT_GROUP_OPS(segments, dev); segments 448 fs/nilfs2/sysfs.c NILFS_DEV_INT_GROUP_TYPE(segments, dev); segments 449 fs/nilfs2/sysfs.c NILFS_DEV_INT_GROUP_FNS(segments, dev); segments 74 fs/nilfs2/sysfs.h NILFS_DEV_ATTR_STRUCT(segments); segments 119 fs/nilfs2/sysfs.h NILFS_RO_ATTR(segments, name) segments 96 include/linux/agpgart.h struct agp_segment_priv **segments; segments 601 include/linux/syscalls.h struct kexec_segment __user *segments, segments 33 include/uapi/linux/seg6.h struct in6_addr segments[0]; segments 196 include/uapi/xen/gntdev.h struct gntdev_grant_copy_segment __user *segments; segments 24 kernel/kexec.c struct kexec_segment __user *segments) segments 31 kernel/kexec.c segment_bytes = nr_segments * sizeof(*segments); segments 32 kernel/kexec.c ret = copy_from_user(image->segment, segments, segment_bytes); segments 41 kernel/kexec.c struct kexec_segment __user *segments, segments 62 kernel/kexec.c ret = copy_user_segment_list(image, nr_segments, segments); segments 107 kernel/kexec.c struct kexec_segment __user *segments, unsigned long flags) segments 135 kernel/kexec.c ret = kimage_alloc_init(&image, entry, nr_segments, segments, flags); segments 233 kernel/kexec.c struct kexec_segment __user *, segments, unsigned long, flags) segments 257 kernel/kexec.c result = do_kexec_load(entry, nr_segments, segments, flags); segments 267 kernel/kexec.c struct compat_kexec_segment __user *, segments, segments 286 kernel/kexec.c result = copy_from_user(&in, &segments[i], sizeof(in)); segments 179 lib/assoc_array.c unsigned long segments; segments 199 lib/assoc_array.c segments = ops->get_key_chunk(index_key, level); segments 200 lib/assoc_array.c pr_devel("segments[%d]: %lx\n", level, segments); segments 207 lib/assoc_array.c slot = segments >> (level & ASSOC_ARRAY_KEY_CHUNK_MASK); segments 253 lib/assoc_array.c segments = ops->get_key_chunk(index_key, sc_level); segments 256 lib/assoc_array.c dissimilarity = segments ^ sc_segments; segments 5035 net/core/filter.c ptr + len > (void *)&srh->segments) segments 216 net/ipv6/ah6.c int segments, segments_left; segments 232 net/ipv6/ah6.c segments = rthdr->hdrlen >> 1; segments 235 net/ipv6/ah6.c final_addr = addrs[segments - 1]; segments 237 net/ipv6/ah6.c addrs += segments - segments_left; segments 334 net/ipv6/exthdrs.c addr = hdr->segments + hdr->segments_left; segments 434 net/ipv6/exthdrs.c addr = hdr->segments + hdr->segments_left; segments 905 net/ipv6/exthdrs.c memcpy(sr_phdr->segments + 1, sr_ihdr->segments + 1, segments 908 net/ipv6/exthdrs.c sr_phdr->segments[0] = **addr_p; segments 909 net/ipv6/exthdrs.c *addr_p = &sr_ihdr->segments[sr_ihdr->segments_left]; segments 1159 net/ipv6/exthdrs.c fl6->daddr = srh->segments[srh->segments_left]; segments 211 net/ipv6/seg6_hmac.c memcpy(off, hdr->segments + i, 16); segments 155 net/ipv6/seg6_iptunnel.c hdr->daddr = isrh->segments[isrh->first_segment]; segments 205 net/ipv6/seg6_iptunnel.c isrh->segments[0] = hdr->daddr; segments 206 net/ipv6/seg6_iptunnel.c hdr->daddr = isrh->segments[isrh->first_segment]; segments 150 net/ipv6/seg6_local.c addr = srh->segments + srh->segments_left; segments 83 sound/pci/cs46xx/cs46xx_dsp_spos.h struct dsp_segment_desc * segments; segments 357 sound/pci/cs46xx/cs46xx_lib.c if (module->segments) { segments 360 sound/pci/cs46xx/cs46xx_lib.c kfree(module->segments[i].data); segments 361 sound/pci/cs46xx/cs46xx_lib.c kfree(module->segments); segments 436 sound/pci/cs46xx/cs46xx_lib.c module->segments = segments 438 sound/pci/cs46xx/cs46xx_lib.c if (!module->segments) segments 441 sound/pci/cs46xx/cs46xx_lib.c struct dsp_segment_desc *entry = &module->segments[i]; segments 117 sound/pci/cs46xx/dsp_spos.c if (module->segments[i].segment_type == seg_type) { segments 118 sound/pci/cs46xx/dsp_spos.c return (module->segments + i); segments 536 sound/pci/cs46xx/dsp_spos.c struct dsp_segment_desc * desc = (ins->modules[i].segments + j); segments 62 sound/soc/intel/skylake/skl-sst-utils.c struct segment_desc segments[3]; segments 33 tools/include/uapi/linux/seg6.h struct in6_addr segments[0]; segments 44 tools/testing/selftests/bpf/progs/test_lwt_seg6local.c struct ip6_addr_t segments[0]; segments 44 tools/testing/selftests/bpf/progs/test_seg6_loop.c struct ip6_addr_t segments[0];