seg_size 156 arch/powerpc/include/asm/kvm_book3s.h extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size); seg_size 467 arch/powerpc/kvm/book3s_64_mmu.c u64 seg_size; seg_size 482 arch/powerpc/kvm/book3s_64_mmu.c seg_size = 1ull << kvmppc_slb_sid_shift(slbe); seg_size 483 arch/powerpc/kvm/book3s_64_mmu.c kvmppc_mmu_flush_segment(vcpu, ea & ~(seg_size - 1), seg_size); seg_size 356 arch/powerpc/kvm/book3s_64_mmu_host.c void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong ea, ulong seg_size) seg_size 359 arch/powerpc/kvm/book3s_64_mmu_host.c ulong seg_mask = -seg_size; seg_size 62 arch/powerpc/mm/dma-noncoherent.c size_t seg_size = min((size_t)(PAGE_SIZE - offset), size); seg_size 63 arch/powerpc/mm/dma-noncoherent.c size_t cur_size = seg_size; seg_size 65 arch/powerpc/mm/dma-noncoherent.c int nr_segs = 1 + ((size - seg_size) + PAGE_SIZE - 1)/PAGE_SIZE; seg_size 74 arch/powerpc/mm/dma-noncoherent.c __dma_sync((void *)start, seg_size, direction); seg_size 79 arch/powerpc/mm/dma-noncoherent.c seg_size = min((size_t)PAGE_SIZE, size - cur_size); seg_size 82 arch/powerpc/mm/dma-noncoherent.c cur_size += seg_size; seg_size 386 arch/x86/kernel/amd_gart_64.c unsigned int seg_size; seg_size 396 arch/x86/kernel/amd_gart_64.c seg_size = 0; seg_size 416 arch/x86/kernel/amd_gart_64.c (s->length + seg_size > max_seg_size) || seg_size 423 arch/x86/kernel/amd_gart_64.c seg_size = 0; seg_size 431 arch/x86/kernel/amd_gart_64.c seg_size += s->length; seg_size 31 block/blk-integrity.c unsigned int seg_size = 0; seg_size 40 block/blk-integrity.c if (seg_size + iv.bv_len > queue_max_segment_size(q)) seg_size 43 block/blk-integrity.c seg_size += iv.bv_len; seg_size 47 block/blk-integrity.c seg_size = iv.bv_len; seg_size 201 block/blk-merge.c unsigned seg_size = 0; seg_size 204 block/blk-merge.c seg_size = get_max_segment_size(q, bv->bv_offset + total_len); seg_size 205 block/blk-merge.c seg_size = min(seg_size, len); seg_size 208 block/blk-merge.c total_len += seg_size; seg_size 209 block/blk-merge.c len -= seg_size; seg_size 225 drivers/bluetooth/btqca.c static int qca_tlv_send_segment(struct hci_dev *hdev, int seg_size, seg_size 235 drivers/bluetooth/btqca.c cmd[1] = seg_size; seg_size 236 drivers/bluetooth/btqca.c memcpy(cmd + 2, data, seg_size); seg_size 239 drivers/bluetooth/btqca.c return __hci_cmd_send(hdev, EDL_PATCH_CMD_OPCODE, seg_size + 2, seg_size 242 drivers/bluetooth/btqca.c skb = __hci_cmd_sync_ev(hdev, EDL_PATCH_CMD_OPCODE, seg_size + 2, cmd, seg_size 984 drivers/infiniband/core/mad.c int seg_size, pad; seg_size 986 drivers/infiniband/core/mad.c seg_size = mad_size - hdr_len; seg_size 987 drivers/infiniband/core/mad.c if (data_len && seg_size) { seg_size 988 drivers/infiniband/core/mad.c pad = seg_size - data_len % seg_size; seg_size 989 drivers/infiniband/core/mad.c return pad == seg_size ? 0 : pad; seg_size 991 drivers/infiniband/core/mad.c return seg_size; seg_size 1010 drivers/infiniband/core/mad.c int left, seg_size, pad; seg_size 1012 drivers/infiniband/core/mad.c send_buf->seg_size = mad_size - send_buf->hdr_len; seg_size 1014 drivers/infiniband/core/mad.c seg_size = send_buf->seg_size; seg_size 1018 drivers/infiniband/core/mad.c for (left = send_buf->data_len + pad; left > 0; left -= seg_size) { seg_size 1019 drivers/infiniband/core/mad.c seg = kmalloc(sizeof (*seg) + seg_size, gfp_mask); seg_size 1030 drivers/infiniband/core/mad.c memset(seg->data + seg_size - pad, 0, pad); seg_size 297 drivers/infiniband/core/user_mad.c size_t seg_size; seg_size 300 drivers/infiniband/core/user_mad.c seg_size = packet->recv_wc->mad_seg_size; seg_size 303 drivers/infiniband/core/user_mad.c if ((packet->length <= seg_size && seg_size 305 drivers/infiniband/core/user_mad.c (packet->length > seg_size && seg_size 306 drivers/infiniband/core/user_mad.c count < hdr_size(file) + seg_size)) seg_size 313 drivers/infiniband/core/user_mad.c seg_payload = min_t(int, packet->length, seg_size); seg_size 330 drivers/infiniband/core/user_mad.c max_seg_payload = seg_size - offset; seg_size 430 drivers/infiniband/core/user_mad.c seg++, left -= msg->seg_size, buf += msg->seg_size) { seg_size 432 drivers/infiniband/core/user_mad.c min(left, msg->seg_size))) seg_size 828 drivers/infiniband/hw/hns/hns_roce_hem.c u32 seg_size = 64; /* 8 bytes per BA and 8 BA per segment */ seg_size 842 drivers/infiniband/hw/hns/hns_roce_hem.c dma_offset = offset = (obj & (table->num_obj - 1)) * seg_size % seg_size 73 drivers/lightnvm/pblk-rb.c unsigned int seg_size) seg_size 89 drivers/lightnvm/pblk-rb.c power_seg_sz = get_count_order(seg_size); seg_size 93 drivers/lightnvm/pblk-rb.c rb->seg_size = (1 << power_seg_sz); seg_size 147 drivers/lightnvm/pblk-rb.c entry->data = kaddr + (i * rb->seg_size); seg_size 331 drivers/lightnvm/pblk-rb.c memcpy(entry->data, data, rb->seg_size); seg_size 599 drivers/lightnvm/pblk-rb.c if (bio_add_pc_page(q, bio, page, rb->seg_size, 0) != seg_size 600 drivers/lightnvm/pblk-rb.c rb->seg_size) { seg_size 677 drivers/lightnvm/pblk-rb.c memcpy(data, entry->data, rb->seg_size); seg_size 188 drivers/lightnvm/pblk.h unsigned int seg_size; /* Size of the data segments being seg_size 123 drivers/mmc/core/sdio_ops.c unsigned int seg_size = card->host->max_seg_size; seg_size 151 drivers/mmc/core/sdio_ops.c nents = DIV_ROUND_UP(left_size, seg_size); seg_size 160 drivers/mmc/core/sdio_ops.c sg_set_buf(sg_ptr, buf + i * seg_size, seg_size 161 drivers/mmc/core/sdio_ops.c min(seg_size, left_size)); seg_size 162 drivers/mmc/core/sdio_ops.c left_size -= seg_size; seg_size 411 drivers/net/ethernet/intel/ice/ice_flex_pipe.c le32_to_cpu(ice_seg->hdr.seg_size), ice_seg->hdr.seg_name); seg_size 573 drivers/net/ethernet/intel/ice/ice_flex_pipe.c if (len < off + le32_to_cpu(seg->seg_size)) seg_size 31 drivers/net/ethernet/intel/ice/ice_flex_type.h __le32 seg_size; seg_size 76 drivers/net/wireless/ath/ath10k/swap.c u32 seg_size; seg_size 84 drivers/net/wireless/ath/ath10k/swap.c seg_size = __le32_to_cpu(seg_info->seg_hw_info.size); seg_size 85 drivers/net/wireless/ath/ath10k/swap.c dma_free_coherent(ar->dev, seg_size, seg_info->virt_address[0], seg_size 703 drivers/staging/qlge/qlge_dbg.c u32 seg_number, u32 seg_size, u8 *desc) seg_size 708 drivers/staging/qlge/qlge_dbg.c seg_hdr->segSize = seg_size; seg_size 368 drivers/staging/rtl8192u/r8192U.h u16 seg_size; seg_size 146 drivers/staging/wusbcore/wa-xfer.c size_t seg_size; seg_size 348 drivers/staging/wusbcore/wa-xfer.c && seg->result < xfer->seg_size seg_size 546 drivers/staging/wusbcore/wa-xfer.c <= xfer->seg_size)) { seg_size 610 drivers/staging/wusbcore/wa-xfer.c xfer->seg_size = le16_to_cpu(rpipe->descr.wBlocks) seg_size 615 drivers/staging/wusbcore/wa-xfer.c if (xfer->seg_size < maxpktsize) { seg_size 618 drivers/staging/wusbcore/wa-xfer.c xfer->seg_size, maxpktsize); seg_size 622 drivers/staging/wusbcore/wa-xfer.c xfer->seg_size = (xfer->seg_size / maxpktsize) * maxpktsize; seg_size 632 drivers/staging/wusbcore/wa-xfer.c int seg_size; /* don't care. */ seg_size 634 drivers/staging/wusbcore/wa-xfer.c index, &seg_size); seg_size 639 drivers/staging/wusbcore/wa-xfer.c xfer->seg_size); seg_size 646 drivers/staging/wusbcore/wa-xfer.c (urb->transfer_buffer_length/xfer->seg_size), seg_size 1205 drivers/staging/wusbcore/wa-xfer.c buf_itr_size = min(buf_size, xfer->seg_size); seg_size 1348 drivers/staging/wusbcore/wa-xfer.c xfer_hdr0->dwTransferLength = transfer_size > xfer->seg_size ? seg_size 1349 drivers/staging/wusbcore/wa-xfer.c cpu_to_le32(xfer->seg_size) : seg_size 1351 drivers/staging/wusbcore/wa-xfer.c transfer_size -= xfer->seg_size; seg_size 1357 drivers/staging/wusbcore/wa-xfer.c transfer_size > xfer->seg_size ? seg_size 1358 drivers/staging/wusbcore/wa-xfer.c cpu_to_le32(xfer->seg_size) seg_size 1361 drivers/staging/wusbcore/wa-xfer.c transfer_size -= xfer->seg_size; seg_size 2230 drivers/staging/wusbcore/wa-xfer.c + (seg_idx * xfer->seg_size); seg_size 2242 drivers/staging/wusbcore/wa-xfer.c + (seg_idx * xfer->seg_size); seg_size 2252 drivers/staging/wusbcore/wa-xfer.c seg_idx * xfer->seg_size, seg_size 252 drivers/usb/early/xhci-dbc.c entry->seg_size = cpu_to_le32(XDBC_TRBS_PER_SEGMENT); seg_size 61 drivers/usb/early/xhci-dbc.h __le32 seg_size; seg_size 1819 drivers/usb/host/xhci-mem.c entry->seg_size = cpu_to_le32(TRBS_PER_SEGMENT); seg_size 1622 drivers/usb/host/xhci.h __le32 seg_size; seg_size 23 drivers/usb/mtu3/mtu3_core.c static int ep_fifo_alloc(struct mtu3_ep *mep, u32 seg_size) seg_size 26 drivers/usb/mtu3/mtu3_core.c u32 num_bits = DIV_ROUND_UP(seg_size, MTU3_EP_FIFO_UNIT); seg_size 149 drivers/video/fbdev/via/via-core.c u32 seg_size; /* Size, 16-byte units */ seg_size 259 drivers/video/fbdev/via/via-core.c descr->seg_size = sg_dma_len(sgentry) >> 4; seg_size 25 fs/afs/xdr_fs.h __be32 seg_size; seg_size 498 include/rdma/ib_mad.h int seg_size; seg_size 3782 net/ipv4/tcp_output.c unsigned int seg_size = tcp_wnd_end(tp) - TCP_SKB_CB(skb)->seq; seg_size 3791 net/ipv4/tcp_output.c if (seg_size < TCP_SKB_CB(skb)->end_seq - TCP_SKB_CB(skb)->seq || seg_size 3793 net/ipv4/tcp_output.c seg_size = min(seg_size, mss); seg_size 3796 net/ipv4/tcp_output.c skb, seg_size, mss, GFP_ATOMIC)) seg_size 165 tools/testing/selftests/kvm/lib/elf.c size_t seg_size = seg_vend - seg_vstart + 1; seg_size 167 tools/testing/selftests/kvm/lib/elf.c vm_vaddr_t vaddr = vm_vaddr_alloc(vm, seg_size, seg_vstart, seg_size 175 tools/testing/selftests/kvm/lib/elf.c memset(addr_gva2hva(vm, vaddr), 0, seg_size);