dist 737 arch/arm64/kernel/hw_breakpoint.c u64 min_dist = -1, dist; dist 771 arch/arm64/kernel/hw_breakpoint.c dist = get_distance_from_watchpoint(addr, val, &ctrl); dist 772 arch/arm64/kernel/hw_breakpoint.c if (dist < min_dist) { dist 773 arch/arm64/kernel/hw_breakpoint.c min_dist = dist; dist 777 arch/arm64/kernel/hw_breakpoint.c if (dist != 0) dist 102 arch/hexagon/kernel/module.c int dist = (int)(value - (uint32_t)location); dist 103 arch/hexagon/kernel/module.c if ((dist < -0x00800000) || dist 104 arch/hexagon/kernel/module.c (dist >= 0x00800000)) { dist 109 arch/hexagon/kernel/module.c dist, value, (uint32_t)location, dist 116 arch/hexagon/kernel/module.c *location |= 0x00003fff & dist; dist 117 arch/hexagon/kernel/module.c *location |= 0x01ff0000 & (dist<<2); dist 84 arch/ia64/kernel/ptrace.c unsigned long dist; \ dist 86 arch/ia64/kernel/ptrace.c dist = 64 + bit - first; \ dist 88 arch/ia64/kernel/ptrace.c dist = bit - first; \ dist 89 arch/ia64/kernel/ptrace.c ia64_rotr(unat, dist) & mask; \ dist 123 arch/ia64/kernel/ptrace.c long dist; \ dist 125 arch/ia64/kernel/ptrace.c dist = 64 + bit - first; \ dist 127 arch/ia64/kernel/ptrace.c dist = bit - first; \ dist 128 arch/ia64/kernel/ptrace.c ia64_rotl(nat & mask, dist); \ dist 54 arch/m68k/kernel/time.c static unsigned cnt = 0, period = 0, dist = 0; dist 56 arch/m68k/kernel/time.c if (cnt == 0 || cnt == dist) dist 58 arch/m68k/kernel/time.c else if (cnt == 7 || cnt == dist+7) dist 68 arch/m68k/kernel/time.c dist = period / 4; dist 168 arch/powerpc/mm/numa.c int dist = 0; dist 176 arch/powerpc/mm/numa.c dist++; dist 179 arch/powerpc/mm/numa.c return dist; dist 308 arch/powerpc/platforms/pseries/papr_scm.c int min_dist = INT_MAX, dist; dist 316 arch/powerpc/platforms/pseries/papr_scm.c dist = node_distance(node, nid); dist 317 arch/powerpc/platforms/pseries/papr_scm.c if (dist < min_dist) { dist 318 arch/powerpc/platforms/pseries/papr_scm.c min_dist = dist; dist 1043 arch/s390/mm/gmap.c unsigned long vmaddr, dist; dist 1063 arch/s390/mm/gmap.c dist = HPAGE_SIZE - (gaddr & ~HPAGE_MASK); dist 1064 arch/s390/mm/gmap.c len = len < dist ? 0 : len - dist; dist 505 arch/x86/mm/numa_emulation.c int dist; dist 507 arch/x86/mm/numa_emulation.c if (get_option(&emu_cmdline, &dist) == 2) dist 510 arch/x86/mm/numa_emulation.c dist = physi == physj ? dist 513 arch/x86/mm/numa_emulation.c dist = phys_dist[physi * numa_dist_cnt + physj]; dist 515 arch/x86/mm/numa_emulation.c numa_set_distance(i, j, dist); dist 101 drivers/acpi/numa.c int min_dist = INT_MAX, dist, n; dist 104 drivers/acpi/numa.c dist = node_distance(node, n); dist 105 drivers/acpi/numa.c if (dist < min_dist) { dist 106 drivers/acpi/numa.c min_dist = dist; dist 987 drivers/ata/sata_sx4.c long dist; dist 1005 drivers/ata/sata_sx4.c dist = ((long) (window_size - (offset + size))) >= 0 ? size : dist 1007 drivers/ata/sata_sx4.c memcpy_fromio(psource, dimm_mmio + offset / 4, dist); dist 1009 drivers/ata/sata_sx4.c psource += dist; dist 1010 drivers/ata/sata_sx4.c size -= dist; dist 1039 drivers/ata/sata_sx4.c long dist; dist 1054 drivers/ata/sata_sx4.c dist = ((long)(s32)(window_size - (offset + size))) >= 0 ? size : dist 1056 drivers/ata/sata_sx4.c memcpy_toio(dimm_mmio + offset / 4, psource, dist); dist 1060 drivers/ata/sata_sx4.c psource += dist; dist 1061 drivers/ata/sata_sx4.c size -= dist; dist 392 drivers/base/regmap/regcache-rbtree.c unsigned int dist, best_dist = UINT_MAX; dist 413 drivers/base/regmap/regcache-rbtree.c dist = base_reg - reg; dist 415 drivers/base/regmap/regcache-rbtree.c dist = reg - top_reg; dist 417 drivers/base/regmap/regcache-rbtree.c dist = 0; dist 418 drivers/base/regmap/regcache-rbtree.c if (dist < best_dist) { dist 420 drivers/base/regmap/regcache-rbtree.c best_dist = dist; dist 62 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c int dist = mmVML2VC0_VM_CONTEXT1_PAGE_TABLE_BASE_ADDR_LO32 dist 67 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c dist * vmid + hubid * MMHUB_INSTANCE_REGISTER_OFFSET, dist 72 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c dist * vmid + hubid * MMHUB_INSTANCE_REGISTER_OFFSET, dist 537 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c int dist = mmDAGB1_CNTL_MISC2 - mmDAGB0_CNTL_MISC2; dist 557 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c j * dist); dist 581 drivers/gpu/drm/amd/amdgpu/mmhub_v9_4.c j * dist, data1); dist 50 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm200.c u32 dist[TPC_MAX / 4] = {}; dist 57 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm200.c dist[sm / 4] |= ((gpc << 4) | tpc) << ((sm % 4) * 8); dist 62 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgm200.c nvkm_wr32(device, 0x405b60 + (i * 4), dist[i]); dist 100 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c u32 dist[TPC_MAX / 4] = {}, gpcs[16] = {}; dist 106 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c dist[sm / 4] |= ((gpc << 4) | tpc) << ((sm % 4) * 8); dist 111 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgp100.c nvkm_wr32(device, 0x405b60 + (i * 4), dist[i]); dist 1943 drivers/irqchip/irq-gic-v3.c struct acpi_madt_generic_distributor *dist; dist 1946 drivers/irqchip/irq-gic-v3.c dist = (struct acpi_madt_generic_distributor *)header; dist 1947 drivers/irqchip/irq-gic-v3.c if (dist->version != ape->driver_data) dist 2043 drivers/irqchip/irq-gic-v3.c struct acpi_madt_generic_distributor *dist; dist 2049 drivers/irqchip/irq-gic-v3.c dist = (struct acpi_madt_generic_distributor *)header; dist 2050 drivers/irqchip/irq-gic-v3.c acpi_data.dist_base = ioremap(dist->base_address, dist 2075 drivers/irqchip/irq-gic-v3.c domain_handle = irq_domain_alloc_fwnode(&dist->base_address); dist 1550 drivers/irqchip/irq-gic.c struct acpi_madt_generic_distributor *dist; dist 1551 drivers/irqchip/irq-gic.c dist = (struct acpi_madt_generic_distributor *)header; dist 1553 drivers/irqchip/irq-gic.c return (dist->version == ape->driver_data && dist 1554 drivers/irqchip/irq-gic.c (dist->version != ACPI_MADT_GIC_VERSION_NONE || dist 1599 drivers/irqchip/irq-gic.c struct acpi_madt_generic_distributor *dist; dist 1618 drivers/irqchip/irq-gic.c dist = (struct acpi_madt_generic_distributor *)header; dist 1619 drivers/irqchip/irq-gic.c gic->raw_dist_base = ioremap(dist->base_address, dist 1638 drivers/irqchip/irq-gic.c domain_handle = irq_domain_alloc_fwnode(&dist->base_address); dist 618 drivers/md/raid1.c sector_t dist; dist 691 drivers/md/raid1.c dist = abs(this_sector - conf->mirrors[disk].head_position); dist 698 drivers/md/raid1.c || dist == 0) { dist 735 drivers/md/raid1.c if (dist < best_dist) { dist 736 drivers/md/raid1.c best_dist = dist; dist 1778 drivers/media/dvb-frontends/mb86a16.c u8 dist; dist 1781 drivers/media/dvb-frontends/mb86a16.c if (mb86a16_read(state, MB86A16_DISTMON, &dist) != 2) { dist 1785 drivers/media/dvb-frontends/mb86a16.c *ucblocks = dist; dist 424 drivers/media/i2c/et8ek8/et8ek8_driver.c unsigned int dist; dist 431 drivers/media/i2c/et8ek8/et8ek8_driver.c dist = min(fmt->width, format.width) dist 433 drivers/media/i2c/et8ek8/et8ek8_driver.c dist = format.width * format.height dist 434 drivers/media/i2c/et8ek8/et8ek8_driver.c + fmt->width * fmt->height - 2 * dist; dist 438 drivers/media/i2c/et8ek8/et8ek8_driver.c if (dist < max_dist_match || !best_match) { dist 440 drivers/media/i2c/et8ek8/et8ek8_driver.c max_dist_match = dist; dist 443 drivers/media/i2c/et8ek8/et8ek8_driver.c if (dist < max_dist_other || !best_other) { dist 445 drivers/media/i2c/et8ek8/et8ek8_driver.c max_dist_other = dist; dist 792 drivers/media/i2c/ov5695.c int dist; dist 798 drivers/media/i2c/ov5695.c dist = ov5695_get_reso_dist(&supported_modes[i], framefmt); dist 799 drivers/media/i2c/ov5695.c if (cur_best_fit_dist == -1 || dist < cur_best_fit_dist) { dist 800 drivers/media/i2c/ov5695.c cur_best_fit_dist = dist; dist 1009 drivers/media/i2c/ov7251.c unsigned int dist; dist 1018 drivers/media/i2c/ov7251.c dist = abs(fps_req - fps_tmp); dist 1020 drivers/media/i2c/ov7251.c if (dist < max_dist_match) { dist 1022 drivers/media/i2c/ov7251.c max_dist_match = dist; dist 807 drivers/media/i2c/s5c73m3/s5c73m3-core.c int dist = abs(fs->width - fmt->width) + dist 809 drivers/media/i2c/s5c73m3/s5c73m3-core.c if (dist < best_dist) { dist 810 drivers/media/i2c/s5c73m3/s5c73m3-core.c best_dist = dist; dist 108 drivers/media/usb/uvc/uvc_v4l2.c u32 best = -1, dist; dist 111 drivers/media/usb/uvc/uvc_v4l2.c dist = interval > frame->dwFrameInterval[i] dist 115 drivers/media/usb/uvc/uvc_v4l2.c if (dist > best) dist 118 drivers/media/usb/uvc/uvc_v4l2.c best = dist; dist 775 drivers/mtd/mtdswap.c unsigned int h, x, y, dist, base; dist 783 drivers/mtd/mtdswap.c dist = maxdiff - MAX_ERASE_DIFF; dist 784 drivers/mtd/mtdswap.c if (dist > COLLECT_NONDIRTY_BASE) dist 785 drivers/mtd/mtdswap.c dist = COLLECT_NONDIRTY_BASE; dist 795 drivers/mtd/mtdswap.c x = dist - base; dist 482 drivers/net/ethernet/mellanox/mlx4/alloc.c u32 dist = (u32)-1; dist 497 drivers/net/ethernet/mellanox/mlx4/alloc.c if (curr_dist < dist) { dist 498 drivers/net/ethernet/mellanox/mlx4/alloc.c dist = curr_dist; dist 1210 drivers/net/wireless/ath/ath6kl/htc_mbox.c bool dist = false; dist 1222 drivers/net/wireless/ath/ath6kl/htc_mbox.c dist = true; dist 1227 drivers/net/wireless/ath/ath6kl/htc_mbox.c dist = true; dist 1231 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (dist) { dist 1246 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (dist && !active) dist 1599 drivers/net/wireless/ath/ath6kl/htc_mbox.c bool dist = false; dist 1640 drivers/net/wireless/ath/ath6kl/htc_mbox.c dist = true; dist 1654 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (dist) { dist 342 drivers/pci/p2pdma.c int *dist, bool *acs_redirects, struct seq_buf *acs_list) dist 380 drivers/pci/p2pdma.c if (dist) dist 381 drivers/pci/p2pdma.c *dist = dist_a + dist_b; dist 400 drivers/pci/p2pdma.c if (dist) dist 401 drivers/pci/p2pdma.c *dist = dist_a + dist_b; dist 460 drivers/pci/p2pdma.c int *dist, bool *acs_redirects, struct seq_buf *acs_list) dist 464 drivers/pci/p2pdma.c map_type = __upstream_bridge_distance(provider, client, dist, dist 481 drivers/pci/p2pdma.c int *dist) dist 491 drivers/pci/p2pdma.c ret = upstream_bridge_distance(provider, client, dist, &acs_redirects, dist 3071 drivers/scsi/lpfc/lpfc_hw.h uint32_t dist:2; /* Dist Type */ dist 3075 drivers/scsi/lpfc/lpfc_hw.h uint32_t dist:2; /* Dist Type */ dist 309 drivers/scsi/lpfc/lpfc_init.c char dist = ' '; dist 324 drivers/scsi/lpfc/lpfc_init.c if (prg->dist < 4) dist 325 drivers/scsi/lpfc/lpfc_init.c dist = dist_char[prg->dist]; dist 327 drivers/scsi/lpfc/lpfc_init.c if ((prg->dist == 3) && (prg->num == 0)) dist 333 drivers/scsi/lpfc/lpfc_init.c dist, prg->num); dist 1840 drivers/video/fbdev/sh_mobile_lcdcfb.c unsigned int dist; dist 1846 drivers/video/fbdev/sh_mobile_lcdcfb.c dist = var->xres * var->yres + mode->xres * mode->yres dist 1850 drivers/video/fbdev/sh_mobile_lcdcfb.c if (dist < best_dist) { dist 1853 drivers/video/fbdev/sh_mobile_lcdcfb.c best_dist = dist; dist 293 fs/ceph/inode.c frag->dist[i] = le32_to_cpu(dirinfo->dist[i]); dist 970 fs/ceph/mds_client.c mds = frag.dist[r]; dist 247 fs/ceph/super.h int dist[CEPH_MAX_DIRFRAG_REP]; dist 155 fs/f2fs/debug.c unsigned long long bimodal, dist; dist 165 fs/f2fs/debug.c dist = abs(vblocks - hblks_per_sec); dist 166 fs/f2fs/debug.c bimodal += dist * dist; dist 173 fs/f2fs/debug.c dist = div_u64(MAIN_SECS(sbi) * hblks_per_sec * hblks_per_sec, 100); dist 174 fs/f2fs/debug.c si->bimodal = div64_u64(bimodal, dist); dist 1729 fs/gfs2/dir.c u32 dist = 1; dist 1739 fs/gfs2/dir.c dist++; dist 1757 fs/gfs2/dir.c leaf->lf_dist = cpu_to_be32(dist); dist 422 fs/gfs2/log.c int dist; dist 424 fs/gfs2/log.c dist = newer - older; dist 425 fs/gfs2/log.c if (dist < 0) dist 426 fs/gfs2/log.c dist += sdp->sd_jdesc->jd_blocks; dist 428 fs/gfs2/log.c return dist; dist 503 fs/gfs2/log.c unsigned int dist = log_distance(sdp, new_tail, sdp->sd_log_tail); dist 507 fs/gfs2/log.c atomic_add(dist, &sdp->sd_log_blks_free); dist 508 fs/gfs2/log.c trace_gfs2_log_blocks(sdp, dist); dist 537 include/linux/ceph/ceph_fs.h __le32 dist[]; dist 30 include/linux/cpu_rmap.h u16 dist; dist 158 include/linux/irqchip/arm-gic.h void gic_init(void __iomem *dist , void __iomem *cpu); dist 1318 kernel/sched/fair.c int dist = node_distance(nid, node); dist 1324 kernel/sched/fair.c if (dist == sched_max_numa_distance || node == nid) dist 1335 kernel/sched/fair.c dist >= maxdist) dist 1353 kernel/sched/fair.c faults *= (sched_max_numa_distance - dist); dist 1370 kernel/sched/fair.c int dist) dist 1383 kernel/sched/fair.c faults += score_nearby_nodes(p, nid, dist, true); dist 1389 kernel/sched/fair.c int dist) dist 1403 kernel/sched/fair.c faults += score_nearby_nodes(p, nid, dist, false); dist 1513 kernel/sched/fair.c int dist; dist 1597 kernel/sched/fair.c int dist = env->dist; dist 1640 kernel/sched/fair.c imp = taskimp + task_weight(cur, env->src_nid, dist) - dist 1641 kernel/sched/fair.c task_weight(cur, env->dst_nid, dist); dist 1654 kernel/sched/fair.c imp += group_weight(cur, env->src_nid, dist) - dist 1655 kernel/sched/fair.c group_weight(cur, env->dst_nid, dist); dist 1657 kernel/sched/fair.c imp += task_weight(cur, env->src_nid, dist) - dist 1658 kernel/sched/fair.c task_weight(cur, env->dst_nid, dist); dist 1756 kernel/sched/fair.c int nid, ret, dist; dist 1784 kernel/sched/fair.c dist = env.dist = node_distance(env.src_nid, env.dst_nid); dist 1785 kernel/sched/fair.c taskweight = task_weight(p, env.src_nid, dist); dist 1786 kernel/sched/fair.c groupweight = group_weight(p, env.src_nid, dist); dist 1788 kernel/sched/fair.c taskimp = task_weight(p, env.dst_nid, dist) - taskweight; dist 1789 kernel/sched/fair.c groupimp = group_weight(p, env.dst_nid, dist) - groupweight; dist 1808 kernel/sched/fair.c dist = node_distance(env.src_nid, env.dst_nid); dist 1810 kernel/sched/fair.c dist != env.dist) { dist 1811 kernel/sched/fair.c taskweight = task_weight(p, env.src_nid, dist); dist 1812 kernel/sched/fair.c groupweight = group_weight(p, env.src_nid, dist); dist 1816 kernel/sched/fair.c taskimp = task_weight(p, nid, dist) - taskweight; dist 1817 kernel/sched/fair.c groupimp = group_weight(p, nid, dist) - groupweight; dist 1821 kernel/sched/fair.c env.dist = dist; dist 2043 kernel/sched/fair.c int dist; dist 2058 kernel/sched/fair.c dist = sched_max_numa_distance; dist 2061 kernel/sched/fair.c score = group_weight(p, node, dist); dist 2080 kernel/sched/fair.c for (dist = sched_max_numa_distance; dist > LOCAL_DISTANCE; dist--) { dist 2086 kernel/sched/fair.c if (!find_numa_distance(dist)) dist 2096 kernel/sched/fair.c if (node_distance(a, b) < dist) { dist 7192 kernel/sched/fair.c int src_nid, dst_nid, dist; dist 7222 kernel/sched/fair.c dist = node_distance(src_nid, dst_nid); dist 7224 kernel/sched/fair.c src_weight = group_weight(p, src_nid, dist); dist 7225 kernel/sched/fair.c dst_weight = group_weight(p, dst_nid, dist); dist 7227 kernel/sched/fair.c src_weight = task_weight(p, src_nid, dist); dist 7228 kernel/sched/fair.c dst_weight = task_weight(p, dst_nid, dist); dist 54 lib/cpu_rmap.c rmap->near[cpu].dist = CPU_RMAP_DIST_INF; dist 95 lib/cpu_rmap.c const struct cpumask *mask, u16 dist) dist 100 lib/cpu_rmap.c if (rmap->near[cpu].dist > dist && dist 101 lib/cpu_rmap.c rmap->near[neigh].dist <= dist) { dist 103 lib/cpu_rmap.c rmap->near[cpu].dist = dist; dist 121 lib/cpu_rmap.c cpu, index, rmap->near[cpu].dist); dist 169 lib/cpu_rmap.c rmap->near[cpu].dist = CPU_RMAP_DIST_INF; dist 181 lib/cpu_rmap.c rmap->near[cpu].dist = 0; dist 320 lib/xz/xz_dec_lzma2.c static inline uint32_t dict_get(const struct dictionary *dict, uint32_t dist) dist 322 lib/xz/xz_dec_lzma2.c size_t offset = dict->pos - dist - 1; dist 324 lib/xz/xz_dec_lzma2.c if (dist >= dict->pos) dist 346 lib/xz/xz_dec_lzma2.c static bool dict_repeat(struct dictionary *dict, uint32_t *len, uint32_t dist) dist 351 lib/xz/xz_dec_lzma2.c if (dist >= dict->full || dist >= dict->size) dist 357 lib/xz/xz_dec_lzma2.c back = dict->pos - dist - 1; dist 358 lib/xz/xz_dec_lzma2.c if (dist >= dict->pos) dist 165 lib/zlib_deflate/deftree.c #define d_code(dist) \ dist 166 lib/zlib_deflate/deftree.c ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)]) dist 232 lib/zlib_deflate/deftree.c int dist; /* distance index */ dist 254 lib/zlib_deflate/deftree.c dist = 0; dist 256 lib/zlib_deflate/deftree.c base_dist[code] = dist; dist 258 lib/zlib_deflate/deftree.c dist_code[dist++] = (uch)code; dist 261 lib/zlib_deflate/deftree.c Assert (dist == 256, "tr_static_init: dist != 256"); dist 262 lib/zlib_deflate/deftree.c dist >>= 7; /* from now on, all distances are divided by 128 */ dist 264 lib/zlib_deflate/deftree.c base_dist[code] = dist << 7; dist 266 lib/zlib_deflate/deftree.c dist_code[256 + dist++] = (uch)code; dist 269 lib/zlib_deflate/deftree.c Assert (dist == 256, "tr_static_init: 256+dist != 512"); dist 971 lib/zlib_deflate/deftree.c unsigned dist, /* distance of matched string */ dist 975 lib/zlib_deflate/deftree.c s->d_buf[s->last_lit] = (ush)dist; dist 977 lib/zlib_deflate/deftree.c if (dist == 0) { dist 983 lib/zlib_deflate/deftree.c dist--; /* dist = match distance - 1 */ dist 984 lib/zlib_deflate/deftree.c Assert((ush)dist < (ush)MAX_DIST(s) && dist 986 lib/zlib_deflate/deftree.c (ush)d_code(dist) < (ush)D_CODES, "zlib_tr_tally: bad match"); dist 989 lib/zlib_deflate/deftree.c s->dyn_dtree[d_code(dist)].Freq++; dist 1024 lib/zlib_deflate/deftree.c unsigned dist; /* distance of matched string */ dist 1031 lib/zlib_deflate/deftree.c dist = s->d_buf[lx]; dist 1033 lib/zlib_deflate/deftree.c if (dist == 0) { dist 1045 lib/zlib_deflate/deftree.c dist--; /* dist is now the match distance - 1 */ dist 1046 lib/zlib_deflate/deftree.c code = d_code(dist); dist 1052 lib/zlib_deflate/deftree.c dist -= base_dist[code]; dist 1053 lib/zlib_deflate/deftree.c send_bits(s, dist, extra); /* send the extra distance bits */ dist 277 lib/zlib_deflate/defutil.h int zlib_tr_tally (deflate_state *s, unsigned dist, unsigned lc); dist 113 lib/zlib_inflate/inffast.c unsigned dist; /* match distance */ dist 180 lib/zlib_inflate/inffast.c dist = (unsigned)(this.val); dist 190 lib/zlib_inflate/inffast.c dist += (unsigned)hold & ((1U << op) - 1); dist 192 lib/zlib_inflate/inffast.c if (dist > dmax) { dist 201 lib/zlib_inflate/inffast.c if (dist > op) { /* see if copy from window */ dist 202 lib/zlib_inflate/inffast.c op = dist - op; /* distance back in window */ dist 216 lib/zlib_inflate/inffast.c from = out - dist; /* rest from output */ dist 234 lib/zlib_inflate/inffast.c from = out - dist; /* rest from output */ dist 245 lib/zlib_inflate/inffast.c from = out - dist; /* rest from output */ dist 264 lib/zlib_inflate/inffast.c from = out - dist; /* copy direct from output */ dist 272 lib/zlib_inflate/inffast.c if (dist > 2) { dist 290 lib/zlib_inflate/inffast.c if (dist == 1) { dist 104 lib/zlib_inflate/inflate.c unsigned copy, dist; dist 116 lib/zlib_inflate/inflate.c dist = state->wsize - state->write; dist 117 lib/zlib_inflate/inflate.c if (dist > copy) dist = copy; dist 118 lib/zlib_inflate/inflate.c memcpy(state->window + state->write, strm->next_out - copy, dist); dist 119 lib/zlib_inflate/inflate.c copy -= dist; dist 126 lib/zlib_inflate/inflate.c state->write += dist; dist 128 lib/zlib_inflate/inflate.c if (state->whave < state->wsize) state->whave += dist; dist 103 net/ipv4/tcp_bic.c __u32 dist = (ca->last_max_cwnd - cwnd) dist 106 net/ipv4/tcp_bic.c if (dist > max_increment) dist 109 net/ipv4/tcp_bic.c else if (dist <= 1U) dist 114 net/ipv4/tcp_bic.c ca->cnt = cwnd / dist; dist 337 net/netfilter/nf_nat_core.c u32 minip, maxip, j, dist; dist 379 net/netfilter/nf_nat_core.c dist = maxip - minip + 1; dist 382 net/netfilter/nf_nat_core.c dist = ~0; dist 386 net/netfilter/nf_nat_core.c htonl(minip + reciprocal_scale(j, dist)); dist 320 net/sched/sch_netem.c const struct disttable *dist) dist 332 net/sched/sch_netem.c if (dist == NULL) dist 335 net/sched/sch_netem.c t = dist->table[rnd % dist->size]; dist 72 sound/pci/ctxfi/cttimer.c unsigned int position, dist, interval; dist 75 sound/pci/ctxfi/cttimer.c dist = (position + buffer_size - ti->position) % buffer_size; dist 76 sound/pci/ctxfi/cttimer.c if (dist >= period_size || dist 143 virt/kvm/arm/vgic/vgic-debug.c static void print_dist_state(struct seq_file *s, struct vgic_dist *dist) dist 145 virt/kvm/arm/vgic/vgic-debug.c bool v3 = dist->vgic_model == KVM_DEV_TYPE_ARM_VGIC_V3; dist 150 virt/kvm/arm/vgic/vgic-debug.c seq_printf(s, "nr_spis:\t%d\n", dist->nr_spis); dist 152 virt/kvm/arm/vgic/vgic-debug.c seq_printf(s, "nr_lpis:\t%d\n", dist->lpi_list_count); dist 153 virt/kvm/arm/vgic/vgic-debug.c seq_printf(s, "enabled:\t%d\n", dist->enabled); dist 54 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &kvm->arch.vgic; dist 56 virt/kvm/arm/vgic/vgic-init.c INIT_LIST_HEAD(&dist->lpi_list_head); dist 57 virt/kvm/arm/vgic/vgic-init.c INIT_LIST_HEAD(&dist->lpi_translation_cache); dist 58 virt/kvm/arm/vgic/vgic-init.c raw_spin_lock_init(&dist->lpi_list_lock); dist 144 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &kvm->arch.vgic; dist 148 virt/kvm/arm/vgic/vgic-init.c dist->spis = kcalloc(nr_spis, sizeof(struct vgic_irq), GFP_KERNEL); dist 149 virt/kvm/arm/vgic/vgic-init.c if (!dist->spis) dist 161 virt/kvm/arm/vgic/vgic-init.c struct vgic_irq *irq = &dist->spis[i]; dist 169 virt/kvm/arm/vgic/vgic-init.c switch (dist->vgic_model) { dist 179 virt/kvm/arm/vgic/vgic-init.c kfree(dist->spis); dist 198 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &vcpu->kvm->arch.vgic; dist 237 virt/kvm/arm/vgic/vgic-init.c if (dist->vgic_model == KVM_DEV_TYPE_ARM_VGIC_V3) { dist 265 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &kvm->arch.vgic; dist 277 virt/kvm/arm/vgic/vgic-init.c if (!dist->nr_spis) dist 278 virt/kvm/arm/vgic/vgic-init.c dist->nr_spis = VGIC_NR_IRQS_LEGACY - VGIC_NR_PRIVATE_IRQS; dist 280 virt/kvm/arm/vgic/vgic-init.c ret = kvm_vgic_dist_init(kvm, dist->nr_spis); dist 290 virt/kvm/arm/vgic/vgic-init.c switch (dist->vgic_model) { dist 322 virt/kvm/arm/vgic/vgic-init.c dist->implementation_rev = 2; dist 323 virt/kvm/arm/vgic/vgic-init.c dist->initialized = true; dist 331 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &kvm->arch.vgic; dist 334 virt/kvm/arm/vgic/vgic-init.c dist->ready = false; dist 335 virt/kvm/arm/vgic/vgic-init.c dist->initialized = false; dist 337 virt/kvm/arm/vgic/vgic-init.c kfree(dist->spis); dist 338 virt/kvm/arm/vgic/vgic-init.c dist->spis = NULL; dist 339 virt/kvm/arm/vgic/vgic-init.c dist->nr_spis = 0; dist 342 virt/kvm/arm/vgic/vgic-init.c list_for_each_entry_safe(rdreg, next, &dist->rd_regions, list) { dist 346 virt/kvm/arm/vgic/vgic-init.c INIT_LIST_HEAD(&dist->rd_regions); dist 424 virt/kvm/arm/vgic/vgic-init.c struct vgic_dist *dist = &kvm->arch.vgic; dist 431 virt/kvm/arm/vgic/vgic-init.c if (dist->vgic_model == KVM_DEV_TYPE_ARM_VGIC_V2) dist 124 virt/kvm/arm/vgic/vgic-irqfd.c struct vgic_dist *dist = &kvm->arch.vgic; dist 125 virt/kvm/arm/vgic/vgic-irqfd.c u32 nr = dist->nr_spis; dist 42 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 65 virt/kvm/arm/vgic/vgic-its.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 71 virt/kvm/arm/vgic/vgic-its.c list_for_each_entry(oldirq, &dist->lpi_list_head, lpi_list) { dist 89 virt/kvm/arm/vgic/vgic-its.c list_add_tail(&irq->lpi_list, &dist->lpi_list_head); dist 90 virt/kvm/arm/vgic/vgic-its.c dist->lpi_list_count++; dist 93 virt/kvm/arm/vgic/vgic-its.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 314 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 327 virt/kvm/arm/vgic/vgic-its.c irq_count = READ_ONCE(dist->lpi_list_count); dist 332 virt/kvm/arm/vgic/vgic-its.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 333 virt/kvm/arm/vgic/vgic-its.c list_for_each_entry(irq, &dist->lpi_list_head, lpi_list) { dist 341 virt/kvm/arm/vgic/vgic-its.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 538 virt/kvm/arm/vgic/vgic-its.c static struct vgic_irq *__vgic_its_check_cache(struct vgic_dist *dist, dist 544 virt/kvm/arm/vgic/vgic-its.c list_for_each_entry(cte, &dist->lpi_translation_cache, entry) { dist 560 virt/kvm/arm/vgic/vgic-its.c if (!list_is_first(&cte->entry, &dist->lpi_translation_cache)) dist 561 virt/kvm/arm/vgic/vgic-its.c list_move(&cte->entry, &dist->lpi_translation_cache); dist 572 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 576 virt/kvm/arm/vgic/vgic-its.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 577 virt/kvm/arm/vgic/vgic-its.c irq = __vgic_its_check_cache(dist, db, devid, eventid); dist 578 virt/kvm/arm/vgic/vgic-its.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 587 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 596 virt/kvm/arm/vgic/vgic-its.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 598 virt/kvm/arm/vgic/vgic-its.c if (unlikely(list_empty(&dist->lpi_translation_cache))) dist 607 virt/kvm/arm/vgic/vgic-its.c if (__vgic_its_check_cache(dist, db, devid, eventid)) dist 611 virt/kvm/arm/vgic/vgic-its.c cte = list_last_entry(&dist->lpi_translation_cache, dist 630 virt/kvm/arm/vgic/vgic-its.c list_move(&cte->entry, &dist->lpi_translation_cache); dist 633 virt/kvm/arm/vgic/vgic-its.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 638 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 642 virt/kvm/arm/vgic/vgic-its.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 644 virt/kvm/arm/vgic/vgic-its.c list_for_each_entry(cte, &dist->lpi_translation_cache, entry) { dist 656 virt/kvm/arm/vgic/vgic-its.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 1829 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 1833 virt/kvm/arm/vgic/vgic-its.c if (!list_empty(&dist->lpi_translation_cache)) dist 1847 virt/kvm/arm/vgic/vgic-its.c list_add(&cte->entry, &dist->lpi_translation_cache); dist 1853 virt/kvm/arm/vgic/vgic-its.c struct vgic_dist *dist = &kvm->arch.vgic; dist 1859 virt/kvm/arm/vgic/vgic-its.c &dist->lpi_translation_cache, entry) { dist 56 virt/kvm/arm/vgic/vgic-mmio-v2.c struct vgic_dist *dist = &vcpu->kvm->arch.vgic; dist 57 virt/kvm/arm/vgic/vgic-mmio-v2.c bool was_enabled = dist->enabled; dist 61 virt/kvm/arm/vgic/vgic-mmio-v2.c dist->enabled = val & GICD_ENABLE; dist 62 virt/kvm/arm/vgic/vgic-mmio-v2.c if (!was_enabled && dist->enabled) dist 41 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_dist *dist = &kvm->arch.vgic; dist 43 virt/kvm/arm/vgic/vgic-mmio-v3.c if (dist->vgic_model != KVM_DEV_TYPE_ARM_VGIC_V3) dist 46 virt/kvm/arm/vgic/vgic-mmio-v3.c return dist->has_its; dist 99 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_dist *dist = &vcpu->kvm->arch.vgic; dist 100 virt/kvm/arm/vgic/vgic-mmio-v3.c bool was_enabled = dist->enabled; dist 104 virt/kvm/arm/vgic/vgic-mmio-v3.c dist->enabled = val & GICD_CTLR_ENABLE_SS_G1; dist 106 virt/kvm/arm/vgic/vgic-mmio-v3.c if (!was_enabled && dist->enabled) dist 387 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_dist *dist = &vcpu->kvm->arch.vgic; dist 389 virt/kvm/arm/vgic/vgic-mmio-v3.c return extract_bytes(dist->propbaser, addr & 7, len); dist 396 virt/kvm/arm/vgic/vgic-mmio-v3.c struct vgic_dist *dist = &vcpu->kvm->arch.vgic; dist 405 virt/kvm/arm/vgic/vgic-mmio-v3.c old_propbaser = READ_ONCE(dist->propbaser); dist 409 virt/kvm/arm/vgic/vgic-mmio-v3.c } while (cmpxchg64(&dist->propbaser, old_propbaser, dist 306 virt/kvm/arm/vgic/vgic-v2.c struct vgic_dist *dist = &kvm->arch.vgic; dist 312 virt/kvm/arm/vgic/vgic-v2.c if (IS_VGIC_ADDR_UNDEF(dist->vgic_dist_base) || dist 313 virt/kvm/arm/vgic/vgic-v2.c IS_VGIC_ADDR_UNDEF(dist->vgic_cpu_base)) { dist 319 virt/kvm/arm/vgic/vgic-v2.c if (!vgic_v2_check_base(dist->vgic_dist_base, dist->vgic_cpu_base)) { dist 335 virt/kvm/arm/vgic/vgic-v2.c ret = vgic_register_dist_iodev(kvm, dist->vgic_dist_base, VGIC_V2); dist 342 virt/kvm/arm/vgic/vgic-v2.c ret = kvm_phys_addr_ioremap(kvm, dist->vgic_cpu_base, dist 351 virt/kvm/arm/vgic/vgic-v2.c dist->ready = true; dist 365 virt/kvm/arm/vgic/vgic-v3.c struct vgic_dist *dist = &kvm->arch.vgic; dist 371 virt/kvm/arm/vgic/vgic-v3.c list_for_each_entry(irq, &dist->lpi_list_head, lpi_list) { dist 498 virt/kvm/arm/vgic/vgic-v3.c struct vgic_dist *dist = &kvm->arch.vgic; dist 516 virt/kvm/arm/vgic/vgic-v3.c if (IS_VGIC_ADDR_UNDEF(dist->vgic_dist_base)) { dist 537 virt/kvm/arm/vgic/vgic-v3.c ret = vgic_register_dist_iodev(kvm, dist->vgic_dist_base, VGIC_V3); dist 543 virt/kvm/arm/vgic/vgic-v3.c dist->ready = true; dist 106 virt/kvm/arm/vgic/vgic-v4.c struct vgic_dist *dist = &kvm->arch.vgic; dist 113 virt/kvm/arm/vgic/vgic-v4.c if (dist->its_vm.vpes) dist 118 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.vpes = kcalloc(nr_vcpus, sizeof(*dist->its_vm.vpes), dist 120 virt/kvm/arm/vgic/vgic-v4.c if (!dist->its_vm.vpes) dist 123 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.nr_vpes = nr_vcpus; dist 126 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; dist 128 virt/kvm/arm/vgic/vgic-v4.c ret = its_alloc_vcpu_irqs(&dist->its_vm); dist 131 virt/kvm/arm/vgic/vgic-v4.c kfree(dist->its_vm.vpes); dist 132 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.nr_vpes = 0; dist 133 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.vpes = NULL; dist 138 virt/kvm/arm/vgic/vgic-v4.c int irq = dist->its_vm.vpes[i]->irq; dist 156 virt/kvm/arm/vgic/vgic-v4.c dist->its_vm.nr_vpes = i; dist 60 virt/kvm/arm/vgic/vgic.c struct vgic_dist *dist = &kvm->arch.vgic; dist 64 virt/kvm/arm/vgic/vgic.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 66 virt/kvm/arm/vgic/vgic.c list_for_each_entry(irq, &dist->lpi_list_head, lpi_list) { dist 80 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags); dist 127 virt/kvm/arm/vgic/vgic.c struct vgic_dist *dist = &kvm->arch.vgic; dist 133 virt/kvm/arm/vgic/vgic.c dist->lpi_list_count--; dist 140 virt/kvm/arm/vgic/vgic.c struct vgic_dist *dist = &kvm->arch.vgic; dist 146 virt/kvm/arm/vgic/vgic.c raw_spin_lock_irqsave(&dist->lpi_list_lock, flags); dist 148 virt/kvm/arm/vgic/vgic.c raw_spin_unlock_irqrestore(&dist->lpi_list_lock, flags);