span 146 arch/sh/mm/pmb.c unsigned long span; span 169 arch/sh/mm/pmb.c span = pmbe->size; span 176 arch/sh/mm/pmb.c span += iter->size; span 181 arch/sh/mm/pmb.c if (size <= span) { span 657 arch/sh/mm/pmb.c unsigned long span, newsize; span 661 arch/sh/mm/pmb.c span = newsize = head->size; span 665 arch/sh/mm/pmb.c span += tail->size; span 667 arch/sh/mm/pmb.c if (pmb_size_valid(span)) { span 668 arch/sh/mm/pmb.c newsize = span; span 1304 drivers/acpi/arm64/iort.c res[num_res].end = smmu->base_address + smmu->span - 1; span 68 drivers/iio/dac/ad5791.c int (*get_lin_comp) (unsigned int span); span 212 drivers/iio/dac/ad5791.c static int ad5791_get_lin_comp(unsigned int span) span 214 drivers/iio/dac/ad5791.c if (span <= 10000) span 216 drivers/iio/dac/ad5791.c else if (span <= 12000) span 218 drivers/iio/dac/ad5791.c else if (span <= 16000) span 220 drivers/iio/dac/ad5791.c else if (span <= 19000) span 226 drivers/iio/dac/ad5791.c static int ad5780_get_lin_comp(unsigned int span) span 228 drivers/iio/dac/ad5791.c if (span <= 10000) span 447 drivers/iommu/arm-smmu-v3.c u8 span; span 544 drivers/iommu/arm-smmu-v3.c u8 span; span 1499 drivers/iommu/arm-smmu-v3.c val |= FIELD_PREP(STRTAB_L1_DESC_SPAN, desc->span); span 1678 drivers/iommu/arm-smmu-v3.c desc->span = STRTAB_SPLIT + 1; span 2106 drivers/iommu/arm-smmu-v3.c static int arm_smmu_bitmap_alloc(unsigned long *map, int span) span 2108 drivers/iommu/arm-smmu-v3.c int idx, size = 1 << span; span 1477 drivers/irqchip/irq-gic-v3-its.c u32 span; span 1480 drivers/irqchip/irq-gic-v3-its.c static struct lpi_range *mk_lpi_range(u32 base, u32 span) span 1487 drivers/irqchip/irq-gic-v3-its.c range->span = span; span 1501 drivers/irqchip/irq-gic-v3-its.c if (range->span >= nr_lpis) { span 1504 drivers/irqchip/irq-gic-v3-its.c range->span -= nr_lpis; span 1506 drivers/irqchip/irq-gic-v3-its.c if (range->span == 0) { span 1526 drivers/irqchip/irq-gic-v3-its.c if (a->base_id + a->span != b->base_id) span 1529 drivers/irqchip/irq-gic-v3-its.c b->span += a->span; span 167 drivers/net/ethernet/mellanox/mlxsw/spectrum.h } span; span 24 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c mlxsw_sp->span.entries_count = MLXSW_CORE_RES_GET(mlxsw_sp->core, span 26 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c mlxsw_sp->span.entries = kcalloc(mlxsw_sp->span.entries_count, span 29 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c if (!mlxsw_sp->span.entries) span 32 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 33 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 46 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 47 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 51 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c kfree(mlxsw_sp->span.entries); span 627 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 628 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c if (!mlxsw_sp->span.entries[i].ref_count) { span 629 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c span_entry = &mlxsw_sp->span.entries[i]; span 655 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 656 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 676 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 677 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 718 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 719 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 807 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 809 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c &mlxsw_sp->span.entries[i]; span 965 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c for (i = 0; i < mlxsw_sp->span.entries_count; i++) { span 966 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span.entries[i]; span 307 drivers/net/wireless/mediatek/mt7601u/mac.c u32 span; span 330 drivers/net/wireless/mediatek/mt7601u/mac.c for (j = 0; j < spans[i].span; j++) { span 1850 drivers/net/wireless/realtek/rtlwifi/btcoexist/halbtcoutsrc.c u32 offset, u32 span, u32 seconds) span 774 drivers/net/wireless/realtek/rtlwifi/btcoexist/halbtcoutsrc.h u32 offset, u32 span, u32 seconds); span 251 drivers/perf/arm_smmuv3_pmu.c int idx, u32 span, u32 sid) span 256 drivers/perf/arm_smmuv3_pmu.c evtyper = get_event(event) | span << SMMU_PMCG_SID_SPAN_SHIFT; span 277 drivers/perf/arm_smmuv3_pmu.c u32 span, sid; span 281 drivers/perf/arm_smmuv3_pmu.c span = filter_en ? get_filter_span(event) : span 288 drivers/perf/arm_smmuv3_pmu.c smmu_pmu_set_event_filter(event, idx, span, sid); span 296 drivers/perf/arm_smmuv3_pmu.c smmu_pmu_set_event_filter(event, 0, span, sid); span 52 drivers/s390/scsi/zfcp_qdio.c unsigned long long now, span; span 56 drivers/s390/scsi/zfcp_qdio.c span = (now - qdio->req_q_time) >> 12; span 58 drivers/s390/scsi/zfcp_qdio.c qdio->req_q_util += used * span; span 426 drivers/scsi/megaraid.h adp_span_40ld span[MAX_SPAN_DEPTH]; span 431 drivers/scsi/megaraid.h adp_span_8ld span[MAX_SPAN_DEPTH]; span 649 drivers/scsi/megaraid/mbox_defs.h adap_span_40ld_t span[SPAN_DEPTH_8_SPANS]; span 662 drivers/scsi/megaraid/mbox_defs.h adap_span_8ld_t span[SPAN_DEPTH_8_SPANS]; span 675 drivers/scsi/megaraid/mbox_defs.h adap_span_8ld_t span[SPAN_DEPTH_4_SPANS]; span 2654 drivers/scsi/megaraid/megaraid_sas.h u16 MR_LdSpanArrayGet(u32 ld, u32 span, struct MR_DRV_RAID_MAP_ALL *map); span 124 drivers/scsi/megaraid/megaraid_sas_fp.c u16 MR_LdSpanArrayGet(u32 ld, u32 span, struct MR_DRV_RAID_MAP_ALL *map) span 126 drivers/scsi/megaraid/megaraid_sas_fp.c return le16_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span].span.arrayRef); span 149 drivers/scsi/megaraid/megaraid_sas_fp.c static struct MR_LD_SPAN *MR_LdSpanPtrGet(u32 ld, u32 span, span 152 drivers/scsi/megaraid/megaraid_sas_fp.c return &map->raidMap.ldSpanMap[ld].spanBlock[span].span; span 376 drivers/scsi/megaraid/megaraid_sas_fp.c u32 span, j; span 378 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0; span < raid->spanDepth; span++, pSpanBlock++) { span 396 drivers/scsi/megaraid/megaraid_sas_fp.c return span; span 428 drivers/scsi/megaraid/megaraid_sas_fp.c u32 span, info; span 440 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0; span < raid->spanDepth; span++) span 441 drivers/scsi/megaraid/megaraid_sas_fp.c if (le32_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span]. span 444 drivers/scsi/megaraid/megaraid_sas_fp.c spanBlock[span]. span 461 drivers/scsi/megaraid/megaraid_sas_fp.c return span; span 491 drivers/scsi/megaraid/megaraid_sas_fp.c u32 info, strip_offset, span, span_offset; span 507 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0, span_offset = 0; span < raid->spanDepth; span++) span 508 drivers/scsi/megaraid/megaraid_sas_fp.c if (le32_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span]. span 511 drivers/scsi/megaraid/megaraid_sas_fp.c span_set->strip_offset[span]) span 549 drivers/scsi/megaraid/megaraid_sas_fp.c u32 span, info; span 560 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0; span < raid->spanDepth; span++) span 561 drivers/scsi/megaraid/megaraid_sas_fp.c if (le32_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span]. span 564 drivers/scsi/megaraid/megaraid_sas_fp.c spanBlock[span].block_span_info.quad[info]; span 575 drivers/scsi/megaraid/megaraid_sas_fp.c strip += span_set->strip_offset[span]; span 609 drivers/scsi/megaraid/megaraid_sas_fp.c u32 info, strip_offset, span, span_offset, retval; span 623 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0, span_offset = 0; span < raid->spanDepth; span++) span 624 drivers/scsi/megaraid/megaraid_sas_fp.c if (le32_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span]. span 627 drivers/scsi/megaraid/megaraid_sas_fp.c span_set->strip_offset[span]) span 629 drivers/scsi/megaraid/megaraid_sas_fp.c span_set->strip_offset[span]; span 646 drivers/scsi/megaraid/megaraid_sas_fp.c u8 get_arm(struct megasas_instance *instance, u32 ld, u8 span, u64 stripe, span 657 drivers/scsi/megaraid/megaraid_sas_fp.c arm = mega_mod64(stripe, SPAN_ROW_SIZE(map, ld, span)); span 695 drivers/scsi/megaraid/megaraid_sas_fp.c u8 physArm, span; span 709 drivers/scsi/megaraid/megaraid_sas_fp.c span = io_info->start_span; span 716 drivers/scsi/megaraid/megaraid_sas_fp.c rowMod = mega_mod64(row, SPAN_ROW_SIZE(map, ld, span)); span 717 drivers/scsi/megaraid/megaraid_sas_fp.c armQ = SPAN_ROW_SIZE(map, ld, span) - 1 - rowMod; span 719 drivers/scsi/megaraid/megaraid_sas_fp.c if (arm >= SPAN_ROW_SIZE(map, ld, span)) span 720 drivers/scsi/megaraid/megaraid_sas_fp.c arm -= SPAN_ROW_SIZE(map, ld, span); span 724 drivers/scsi/megaraid/megaraid_sas_fp.c physArm = get_arm(instance, ld, span, stripRow, map); span 728 drivers/scsi/megaraid/megaraid_sas_fp.c arRef = MR_LdSpanArrayGet(ld, span, map); span 759 drivers/scsi/megaraid/megaraid_sas_fp.c *pdBlock += stripRef + le64_to_cpu(MR_LdSpanPtrGet(ld, span, map)->startBlk); span 762 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 764 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 767 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 798 drivers/scsi/megaraid/megaraid_sas_fp.c u8 physArm, span; span 834 drivers/scsi/megaraid/megaraid_sas_fp.c span = 0; span 837 drivers/scsi/megaraid/megaraid_sas_fp.c span = (u8)MR_GetSpanBlock(ld, row, pdBlock, map); span 838 drivers/scsi/megaraid/megaraid_sas_fp.c if (span == SPAN_INVALID) span 843 drivers/scsi/megaraid/megaraid_sas_fp.c arRef = MR_LdSpanArrayGet(ld, span, map); span 877 drivers/scsi/megaraid/megaraid_sas_fp.c *pdBlock += stripRef + le64_to_cpu(MR_LdSpanPtrGet(ld, span, map)->startBlk); span 880 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 882 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 885 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | physArm; span 911 drivers/scsi/megaraid/megaraid_sas_fp.c u8 span, dataArms, arms, dataArm, logArm; span 929 drivers/scsi/megaraid/megaraid_sas_fp.c span = 0; span 931 drivers/scsi/megaraid/megaraid_sas_fp.c span = (u8)MR_GetSpanBlock(ld, rowNum, pdBlock, map); span 932 drivers/scsi/megaraid/megaraid_sas_fp.c if (span == SPAN_INVALID) span 956 drivers/scsi/megaraid/megaraid_sas_fp.c pRAID_Context->span_arm = (span << RAID_CTX_SPANARM_SPAN_SHIFT) | dataArm; span 1225 drivers/scsi/megaraid/megaraid_sas_fp.c u8 span, count; span 1241 drivers/scsi/megaraid/megaraid_sas_fp.c for (span = 0; span < raid->spanDepth; span++) { span 1242 drivers/scsi/megaraid/megaraid_sas_fp.c if (le32_to_cpu(map->raidMap.ldSpanMap[ld].spanBlock[span]. span 1248 drivers/scsi/megaraid/megaraid_sas_fp.c spanBlock[span].block_span_info. span 1317 drivers/scsi/megaraid/megaraid_sas_fp.c if (span == raid->spanDepth) span 1359 drivers/scsi/megaraid/megaraid_sas_fp.c u8 bestArm, pd0, pd1, span, arm; span 1365 drivers/scsi/megaraid/megaraid_sas_fp.c span = ((io_info->span_arm & RAID_CTX_SPANARM_SPAN_MASK) span 1372 drivers/scsi/megaraid/megaraid_sas_fp.c SPAN_ROW_SIZE(drv_map, ld, span) : raid->rowSize; span 1374 drivers/scsi/megaraid/megaraid_sas_fp.c arRef = MR_LdSpanArrayGet(ld, span, drv_map); span 1404 drivers/scsi/megaraid/megaraid_sas_fp.c (span << RAID_CTX_SPANARM_SPAN_SHIFT) | bestArm; span 2993 drivers/scsi/megaraid/megaraid_sas_fusion.c u8 span, physArm; span 3045 drivers/scsi/megaraid/megaraid_sas_fusion.c span = physArm = 0; span 3046 drivers/scsi/megaraid/megaraid_sas_fusion.c arRef = MR_LdSpanArrayGet(ld, span, local_map_ptr); span 764 drivers/scsi/megaraid/megaraid_sas_fusion.h struct MR_LD_SPAN span; span 1204 drivers/usb/host/ehci-sched.c iso_sched->span = urb->number_of_packets * stream->uperiod; span 1269 drivers/usb/host/ehci-sched.c num_itds = 1 + (sched->span + 7) / 8; span 1486 drivers/usb/host/ehci-sched.c u32 now, base, next, start, period, span, now2; span 1495 drivers/usb/host/ehci-sched.c span = sched->span; span 1497 drivers/usb/host/ehci-sched.c span <<= 3; span 1622 drivers/usb/host/ehci-sched.c if (skip >= span) { /* Entirely in the past? */ span 1624 drivers/usb/host/ehci-sched.c urb, start + base, span - period, now2 + base, span 1628 drivers/usb/host/ehci-sched.c skip = span - period; span 1632 drivers/usb/host/ehci-sched.c skip = span; /* Skip the entire URB */ span 1649 drivers/usb/host/ehci-sched.c if (unlikely(start + span - period >= mod + wrap)) { span 1651 drivers/usb/host/ehci-sched.c urb, start, span - period, mod + wrap); span 2003 drivers/usb/host/ehci-sched.c iso_sched->span = urb->number_of_packets * stream->ps.period; span 461 drivers/usb/host/ehci.h unsigned span; span 4037 drivers/usb/host/fotg210-hcd.c iso_sched->span = urb->number_of_packets * stream->interval; span 4094 drivers/usb/host/fotg210-hcd.c num_itds = 1 + (sched->span + 7) / 8; span 4168 drivers/usb/host/fotg210-hcd.c u32 now, next, start, period, span; span 4174 drivers/usb/host/fotg210-hcd.c span = sched->span; span 4176 drivers/usb/host/fotg210-hcd.c if (span > mod - SCHEDULE_SLOP) { span 4259 drivers/usb/host/fotg210-hcd.c if (unlikely(start - now + span - period >= span 4262 drivers/usb/host/fotg210-hcd.c urb, start - now, span - period, span 492 drivers/usb/host/fotg210.h unsigned span; span 10845 fs/btrfs/inode.c sector_t *span) span 11047 fs/btrfs/inode.c *span = bsi.highest_ppage - bsi.lowest_ppage + 1; span 11059 fs/btrfs/inode.c sector_t *span) span 3034 fs/f2fs/data.c struct file *swap_file, sector_t *span) span 3112 fs/f2fs/data.c *span = 1 + highest_block - lowest_block; span 3126 fs/f2fs/data.c sector_t *span) span 3141 fs/f2fs/data.c ret = check_swap_activate(sis, file, span); span 3159 fs/f2fs/data.c sector_t *span) span 1983 fs/io_uring.c unsigned span = 0; span 2048 fs/io_uring.c span++; span 2051 fs/io_uring.c req->sequence -= span; span 490 fs/nfs/file.c sector_t *span) span 494 fs/nfs/file.c *span = sis->pages; span 2277 fs/xfs/libxfs/xfs_attr_leaf.c int span; span 2291 fs/xfs/libxfs/xfs_attr_leaf.c probe = span = ichdr.count / 2; span 2292 fs/xfs/libxfs/xfs_attr_leaf.c for (entry = &entries[probe]; span > 4; entry = &entries[probe]) { span 2293 fs/xfs/libxfs/xfs_attr_leaf.c span /= 2; span 2295 fs/xfs/libxfs/xfs_attr_leaf.c probe += span; span 2297 fs/xfs/libxfs/xfs_attr_leaf.c probe -= span; span 2303 fs/xfs/libxfs/xfs_attr_leaf.c if (!(span <= 4 || be32_to_cpu(entry->hashval) == hashval)) span 1497 fs/xfs/libxfs/xfs_da_btree.c int span; span 1574 fs/xfs/libxfs/xfs_da_btree.c probe = span = max / 2; span 1576 fs/xfs/libxfs/xfs_da_btree.c while (span > 4) { span 1577 fs/xfs/libxfs/xfs_da_btree.c span /= 2; span 1580 fs/xfs/libxfs/xfs_da_btree.c probe += span; span 1582 fs/xfs/libxfs/xfs_da_btree.c probe -= span; span 1587 fs/xfs/libxfs/xfs_da_btree.c ASSERT((span <= 4) || span 1182 fs/xfs/xfs_aops.c sector_t *span) span 1185 fs/xfs/xfs_aops.c return iomap_swapfile_activate(sis, swap_file, span, &xfs_iomap_ops); span 176 include/acpi/actbl2.h u64 span; /* Length of memory range */ span 67 include/linux/energy_model.h int em_register_perf_domain(cpumask_t *span, unsigned int nr_states, span 167 include/linux/energy_model.h static inline int em_register_perf_domain(cpumask_t *span, span 408 include/linux/fs.h sector_t *span); span 145 include/linux/sched/topology.h unsigned long span[0]; span 150 include/linux/sched/topology.h return to_cpumask(sd->span); span 81 kernel/power/energy_model.c static struct em_perf_domain *em_create_pd(cpumask_t *span, int nr_states, span 86 kernel/power/energy_model.c int i, ret, cpu = cpumask_first(span); span 158 kernel/power/energy_model.c cpumask_copy(to_cpumask(pd->cpus), span); span 199 kernel/power/energy_model.c int em_register_perf_domain(cpumask_t *span, unsigned int nr_states, span 206 kernel/power/energy_model.c if (!span || !nr_states || !cb) span 215 kernel/power/energy_model.c for_each_cpu(cpu, span) { span 229 kernel/power/energy_model.c cpumask_pr_args(span)); span 237 kernel/power/energy_model.c pd = em_create_pd(span, nr_states, cb); span 243 kernel/power/energy_model.c for_each_cpu(cpu, span) { span 252 kernel/power/energy_model.c pr_debug("Created perf domain %*pbl\n", cpumask_pr_args(span)); span 4916 kernel/sched/core.c cpumask_t *span = rq->rd->span; span 4923 kernel/sched/core.c if (!cpumask_subset(span, p->cpus_ptr) || span 5441 kernel/sched/core.c if (!cpumask_subset(task_rq(p)->rd->span, new_mask)) { span 6112 kernel/sched/core.c if (dl_task(p) && !cpumask_intersects(task_rq(p)->rd->span, span 6412 kernel/sched/core.c BUG_ON(!cpumask_test_cpu(cpu, rq->rd->span)); span 6481 kernel/sched/core.c BUG_ON(!cpumask_test_cpu(cpu, rq->rd->span)); span 61 kernel/sched/deadline.c for_each_cpu_and(i, rd->span, cpu_active_mask) span 585 kernel/sched/deadline.c __dl_sub(dl_b, p->dl.dl_bw, cpumask_weight(rq->rd->span)); span 590 kernel/sched/deadline.c __dl_add(dl_b, p->dl.dl_bw, cpumask_weight(later_rq->rd->span)); span 2247 kernel/sched/deadline.c if (!cpumask_intersects(src_rd->span, new_mask)) { span 2307 kernel/sched/deadline.c __dl_add(dl_b, p->dl.dl_bw, cpumask_weight(rq->rd->span)); span 10633 kernel/sched/fair.c return rd ? rd->span : NULL; span 543 kernel/sched/rt.c return this_rq()->rd->span; span 646 kernel/sched/rt.c weight = cpumask_weight(rd->span); span 650 kernel/sched/rt.c for_each_cpu(i, rd->span) { span 727 kernel/sched/rt.c for_each_cpu(i, rd->span) { span 815 kernel/sched/rt.c const struct cpumask *span; span 817 kernel/sched/rt.c span = sched_rt_period_mask(); span 829 kernel/sched/rt.c span = cpu_online_mask; span 831 kernel/sched/rt.c for_each_cpu(i, span) { span 742 kernel/sched/sched.h cpumask_var_t span; span 2232 kernel/sched/sched.h for_each_cpu_and(i, rd->span, cpu_active_mask) { span 435 kernel/sched/topology.c free_cpumask_var(rd->span); span 453 kernel/sched/topology.c cpumask_clear_cpu(rq->cpu, old_rd->span); span 467 kernel/sched/topology.c cpumask_set_cpu(rq->cpu, rd->span); span 492 kernel/sched/topology.c if (!zalloc_cpumask_var(&rd->span, GFP_KERNEL)) span 524 kernel/sched/topology.c free_cpumask_var(rd->span); span 930 kernel/sched/topology.c const struct cpumask *span = sched_domain_span(sd); span 938 kernel/sched/topology.c for_each_cpu_wrap(i, span, cpu) { span 1106 kernel/sched/topology.c const struct cpumask *span = sched_domain_span(sd); span 1115 kernel/sched/topology.c for_each_cpu_wrap(i, span, cpu) { span 914 mm/page-writeback.c unsigned long span; span 1057 mm/page-writeback.c span = (dtc->thresh - wb_thresh + 8 * write_bw) * (u64)x >> 16; span 1058 mm/page-writeback.c x_intercept = wb_setpoint + span; span 1060 mm/page-writeback.c if (dtc->wb_dirty < x_intercept - span / 4) { span 148 mm/page_io.c sector_t *span) span 226 mm/page_io.c *span = 1 + highest_block - lowest_block; span 2381 mm/swapfile.c static int setup_swap_extents(struct swap_info_struct *sis, sector_t *span) span 2390 mm/swapfile.c *span = sis->pages; span 2395 mm/swapfile.c ret = mapping->a_ops->swap_activate(sis, swap_file, span); span 2401 mm/swapfile.c *span = sis->pages; span 2406 mm/swapfile.c return generic_swapfile_activate(sis, swap_file, span); span 3008 mm/swapfile.c sector_t *span) span 3050 mm/swapfile.c nr_extents = setup_swap_extents(p, span); span 3108 mm/swapfile.c sector_t span; span 3234 mm/swapfile.c cluster_info, maxpages, &span); span 3299 mm/swapfile.c nr_extents, (unsigned long long)span<<(PAGE_SHIFT-10), span 37 net/mac802154/rx.c __le16 span, sshort; span 42 net/mac802154/rx.c span = wpan_dev->pan_id; span 55 net/mac802154/rx.c if (mac_cb(skb)->dest.pan_id != span && span 64 net/mac802154/rx.c if (mac_cb(skb)->dest.pan_id != span && span 390 tools/perf/builtin-c2c.c int span; span 446 tools/perf/builtin-c2c.c struct hists *hists, int line, int *span) span 465 tools/perf/builtin-c2c.c if (*span) { span 466 tools/perf/builtin-c2c.c (*span)--; span 469 tools/perf/builtin-c2c.c *span = dim->header.line[line].span; span 1262 tools/perf/builtin-c2c.c .span = __s, \ span 1579 tools/perf/builtin-diff.c int *span __maybe_unused) span 1603 tools/perf/ui/browsers/hists.c int span = 0; span 1615 tools/perf/ui/browsers/hists.c ret = fmt->header(fmt, &dummy_hpp, hists, line, &span); span 1619 tools/perf/ui/browsers/hists.c if (span) span 242 tools/perf/ui/hist.c int *span __maybe_unused) span 711 tools/perf/ui/stdio/hist.c int span = 0; span 717 tools/perf/ui/stdio/hist.c if (!first && !span) span 722 tools/perf/ui/stdio/hist.c fmt->header(fmt, hpp, hists, line, &span); span 724 tools/perf/ui/stdio/hist.c if (!span) span 259 tools/perf/util/hist.h struct hists *hists, int line, int *span); span 1757 tools/perf/util/sort.c int *span __maybe_unused) span 2064 tools/perf/util/sort.c int *span __maybe_unused)