avg 1892 arch/ia64/hp/common/sba_iommu.c unsigned long i = 0, avg = 0, min, max; avg 1895 arch/ia64/hp/common/sba_iommu.c avg += ioc->avg_search[i]; avg 1899 arch/ia64/hp/common/sba_iommu.c avg /= SBA_SEARCH_SAMPLE; avg 1901 arch/ia64/hp/common/sba_iommu.c min, avg, max); avg 33 arch/mips/cavium-octeon/oct_ilm.c u64 cpuclk, avg, max, min; avg 40 arch/mips/cavium-octeon/oct_ilm.c avg = (curr_li.latency_sum * 1000000000) / (cpuclk * curr_li.interrupt_cnt); avg 43 arch/mips/cavium-octeon/oct_ilm.c curr_li.interrupt_cnt, avg, max, min); avg 234 arch/parisc/kernel/irq.c unsigned int k, avg, min, max; avg 238 arch/parisc/kernel/irq.c for (avg = k = 0; k < PARISC_CR16_HIST_SIZE; k++) { avg 242 arch/parisc/kernel/irq.c avg += hist; avg 250 arch/parisc/kernel/irq.c avg /= k; avg 252 arch/parisc/kernel/irq.c min,avg,max); avg 50 drivers/clk/bcm/clk-bcm53573-ilp.c int avg; avg 87 drivers/clk/bcm/clk-bcm53573-ilp.c avg = sum / num; avg 89 drivers/clk/bcm/clk-bcm53573-ilp.c return parent_rate * 4 / avg; avg 183 drivers/cpuidle/governors/menu.c unsigned int min, max, thresh, avg; avg 216 drivers/cpuidle/governors/menu.c avg = sum >> INTERVAL_SHIFT; avg 218 drivers/cpuidle/governors/menu.c avg = div_u64(sum, divisor); avg 225 drivers/cpuidle/governors/menu.c int64_t diff = (int64_t)value - avg; avg 247 drivers/cpuidle/governors/menu.c if ((((u64)avg*avg > variance*36) && (divisor * 4 >= INTERVALS * 3)) avg 249 drivers/cpuidle/governors/menu.c return avg; avg 210 drivers/devfreq/tegra30-devfreq.c u32 avg = dev->avg_count; avg 214 drivers/devfreq/tegra30-devfreq.c device_writel(dev, avg + band, ACTMON_DEV_AVG_UPPER_WMARK); avg 216 drivers/devfreq/tegra30-devfreq.c avg = max(dev->avg_count, band); avg 217 drivers/devfreq/tegra30-devfreq.c device_writel(dev, avg - band, ACTMON_DEV_AVG_LOWER_WMARK); avg 333 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c unsigned long avg = 2 * count / num_objects; avg 336 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c max((i915->mm.shrinker.batch + avg) >> 1, avg 146 drivers/hwmon/ina2xx.c int avg = ina226_avg_tab[INA226_READ_AVG(config)]; avg 152 drivers/hwmon/ina2xx.c return DIV_ROUND_CLOSEST(avg * INA226_TOTAL_CONV_TIME_DEFAULT, 1000); avg 161 drivers/hwmon/ina2xx.c int avg, avg_bits; avg 163 drivers/hwmon/ina2xx.c avg = DIV_ROUND_CLOSEST(interval * 1000, avg 165 drivers/hwmon/ina2xx.c avg_bits = find_closest(avg, ina226_avg_tab, avg 222 drivers/hwmon/lm93.c u8 avg; avg 1059 drivers/hwmon/lm93.c data->block4[i].avg = avg 2125 drivers/hwmon/lm93.c return sprintf(buf, "%d\n", data->block4[nr].avg); avg 99 drivers/iio/adc/ina2xx-adc.c * c->avg) avg 143 drivers/iio/adc/ina2xx-adc.c int avg; avg 197 drivers/iio/adc/ina2xx-adc.c *val = chip->avg; avg 291 drivers/iio/adc/ina2xx-adc.c chip->avg = ina226_avg_tab[bits]; avg 836 drivers/iio/adc/ina2xx-adc.c 1000000 / sampling_us, chip->avg); avg 1004 drivers/iio/adc/ina2xx-adc.c chip->avg = 1; avg 112 drivers/iio/adc/twl4030-madc.c u8 avg; avg 284 drivers/iio/adc/twl4030-madc.c .avg = TWL4030_MADC_RTAVERAGE_LSB, avg 289 drivers/iio/adc/twl4030-madc.c .avg = TWL4030_MADC_SW1AVERAGE_LSB, avg 295 drivers/iio/adc/twl4030-madc.c .avg = TWL4030_MADC_SW2AVERAGE_LSB, avg 639 drivers/iio/adc/twl4030-madc.c method->avg); avg 643 drivers/iio/adc/twl4030-madc.c method->avg); avg 180 drivers/iio/humidity/hts221_core.c const struct hts221_avg *avg = &hts221_avg_list[type]; avg 184 drivers/iio/humidity/hts221_core.c if (avg->avg_avl[i] == val) avg 190 drivers/iio/humidity/hts221_core.c data = ((i << __ffs(avg->mask)) & avg->mask); avg 191 drivers/iio/humidity/hts221_core.c err = regmap_update_bits(hw->regmap, avg->addr, avg 192 drivers/iio/humidity/hts221_core.c avg->mask, data); avg 221 drivers/iio/humidity/hts221_core.c const struct hts221_avg *avg = &hts221_avg_list[HTS221_SENSOR_H]; avg 225 drivers/iio/humidity/hts221_core.c for (i = 0; i < ARRAY_SIZE(avg->avg_avl); i++) avg 227 drivers/iio/humidity/hts221_core.c avg->avg_avl[i]); avg 238 drivers/iio/humidity/hts221_core.c const struct hts221_avg *avg = &hts221_avg_list[HTS221_SENSOR_T]; avg 242 drivers/iio/humidity/hts221_core.c for (i = 0; i < ARRAY_SIZE(avg->avg_avl); i++) avg 244 drivers/iio/humidity/hts221_core.c avg->avg_avl[i]); avg 454 drivers/iio/humidity/hts221_core.c const struct hts221_avg *avg; avg 458 drivers/iio/humidity/hts221_core.c avg = &hts221_avg_list[HTS221_SENSOR_H]; avg 460 drivers/iio/humidity/hts221_core.c *val = avg->avg_avl[idx]; avg 464 drivers/iio/humidity/hts221_core.c avg = &hts221_avg_list[HTS221_SENSOR_T]; avg 466 drivers/iio/humidity/hts221_core.c *val = avg->avg_avl[idx]; avg 1660 drivers/media/platform/atmel/atmel-isc-base.c u64 avg = 0; avg 1670 drivers/media/platform/atmel/atmel-isc-base.c avg = (u64)hist_count[ISC_HIS_CFG_MODE_GR] + avg 1672 drivers/media/platform/atmel/atmel-isc-base.c avg >>= 1; avg 1675 drivers/media/platform/atmel/atmel-isc-base.c if (!avg) avg 1719 drivers/media/platform/atmel/atmel-isc-base.c gw_gain[c] = div_u64(avg << 9, hist_count[c]); avg 27 drivers/mfd/pcf50633-adc.c int avg; avg 54 drivers/mfd/pcf50633-adc.c static void adc_setup(struct pcf50633 *pcf, int channel, int avg) avg 63 drivers/mfd/pcf50633-adc.c pcf50633_reg_write(pcf, PCF50633_REG_ADCC1, channel | avg | avg 77 drivers/mfd/pcf50633-adc.c adc_setup(pcf, adc->queue[head]->mux, adc->queue[head]->avg); avg 116 drivers/mfd/pcf50633-adc.c int pcf50633_adc_sync_read(struct pcf50633 *pcf, int mux, int avg) avg 123 drivers/mfd/pcf50633-adc.c ret = pcf50633_adc_async_read(pcf, mux, avg, avg 134 drivers/mfd/pcf50633-adc.c int pcf50633_adc_async_read(struct pcf50633 *pcf, int mux, int avg, avg 146 drivers/mfd/pcf50633-adc.c req->avg = avg; avg 897 drivers/mmc/host/sdhci-esdhc-imx.c int min, max, avg, ret; avg 920 drivers/mmc/host/sdhci-esdhc-imx.c avg = (min + max) / 2; avg 921 drivers/mmc/host/sdhci-esdhc-imx.c esdhc_prepare_tuning(host, avg); avg 926 drivers/mmc/host/sdhci-esdhc-imx.c ret ? "failed" : "passed", avg, ret); avg 437 drivers/mtd/nand/raw/nandsim.c unsigned long wmin = -1, wmax = 0, avg; avg 466 drivers/mtd/nand/raw/nandsim.c avg = tot / wear_eb_count; avg 471 drivers/mtd/nand/raw/nandsim.c seq_printf(m, "Average number of erases: %lu\n", avg); avg 2908 drivers/net/virtio_net.c struct ewma_pkt_len *avg; avg 2911 drivers/net/virtio_net.c avg = &vi->rq[queue_index].mrg_avg_pkt_len; avg 2913 drivers/net/virtio_net.c get_mergeable_buf_len(&vi->rq[queue_index], avg, avg 4010 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c s16 avg = 0; avg 4025 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (s16) (tempsenseval1 - 512); avg 4027 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (s16) tempsenseval1; avg 4030 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg += (s16) (tempsenseval2 - 512); avg 4032 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg += (s16) tempsenseval2; avg 4034 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg /= 2; avg 4046 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c return avg; avg 4052 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c s32 avg = 0; avg 4069 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)(tempsenseval1 - 512); avg 4071 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)tempsenseval1; avg 4075 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)(avg - tempsenseval2 + 512); avg 4077 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)(avg - tempsenseval2); avg 4080 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)(avg + tempsenseval2 - 512); avg 4082 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (int)(avg + tempsenseval2); avg 4083 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = avg / 2; avg 4085 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c if (avg < 0) avg 4086 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = avg + 512; avg 4089 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = tempsenseval1; avg 4104 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c return (u16) avg; avg 4120 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c s32 avg = 0; avg 4135 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (s32) (vbatsenseval - 512); avg 4137 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (s32) vbatsenseval; avg 4139 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c avg = (avg * LCN_VBAT_SCALE_NOM + avg 4146 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c return (s8) avg; avg 4072 drivers/net/wireless/intel/ipw2x00/ipw2200.c static inline void average_init(struct average *avg) avg 4074 drivers/net/wireless/intel/ipw2x00/ipw2200.c memset(avg, 0, sizeof(*avg)); avg 4084 drivers/net/wireless/intel/ipw2x00/ipw2200.c static void average_add(struct average *avg, s16 val) avg 4086 drivers/net/wireless/intel/ipw2x00/ipw2200.c avg->sum -= avg->entries[avg->pos]; avg 4087 drivers/net/wireless/intel/ipw2x00/ipw2200.c avg->sum += val; avg 4088 drivers/net/wireless/intel/ipw2x00/ipw2200.c avg->entries[avg->pos++] = val; avg 4089 drivers/net/wireless/intel/ipw2x00/ipw2200.c if (unlikely(avg->pos == AVG_ENTRIES)) { avg 4090 drivers/net/wireless/intel/ipw2x00/ipw2200.c avg->init = 1; avg 4091 drivers/net/wireless/intel/ipw2x00/ipw2200.c avg->pos = 0; avg 4095 drivers/net/wireless/intel/ipw2x00/ipw2200.c static s16 average_value(struct average *avg) avg 4097 drivers/net/wireless/intel/ipw2x00/ipw2200.c if (!unlikely(avg->init)) { avg 4098 drivers/net/wireless/intel/ipw2x00/ipw2200.c if (avg->pos) avg 4099 drivers/net/wireless/intel/ipw2x00/ipw2200.c return avg->sum / avg->pos; avg 4103 drivers/net/wireless/intel/ipw2x00/ipw2200.c return avg->sum / AVG_ENTRIES; avg 27 drivers/net/wireless/ralink/rt2x00/rt2x00link.c unsigned long avg; avg 29 drivers/net/wireless/ralink/rt2x00/rt2x00link.c avg = ewma_rssi_read(ewma); avg 30 drivers/net/wireless/ralink/rt2x00/rt2x00link.c if (avg) avg 31 drivers/net/wireless/ralink/rt2x00/rt2x00link.c return -avg; avg 1038 drivers/parisc/ccio-dma.c unsigned long avg = 0, min, max; avg 1062 drivers/parisc/ccio-dma.c avg += ioc->avg_search[j]; avg 1068 drivers/parisc/ccio-dma.c avg /= CCIO_SEARCH_SAMPLE; avg 1070 drivers/parisc/ccio-dma.c min, avg, max); avg 1780 drivers/parisc/sba_iommu.c unsigned long avg = 0, min, max; avg 1814 drivers/parisc/sba_iommu.c avg += ioc->avg_search[i]; avg 1818 drivers/parisc/sba_iommu.c avg /= SBA_SEARCH_SAMPLE; avg 1820 drivers/parisc/sba_iommu.c min, avg, max); avg 626 drivers/platform/x86/intel_ips.c int avg; avg 630 drivers/platform/x86/intel_ips.c avg = cpu ? ips->ctv2_avg_temp : ips->ctv1_avg_temp; avg 631 drivers/platform/x86/intel_ips.c if (avg > (ips->limits->core_temp_limit * 100)) avg 818 drivers/platform/x86/intel_ips.c u16 avg; avg 825 drivers/platform/x86/intel_ips.c avg = (u16)total; avg 827 drivers/platform/x86/intel_ips.c return avg; avg 891 drivers/platform/x86/intel_ips.c static u16 update_average_temp(u16 avg, u16 val) avg 897 drivers/platform/x86/intel_ips.c (((temp_decay_factor - 1) * avg) / temp_decay_factor); avg 902 drivers/platform/x86/intel_ips.c static u16 update_average_power(u32 avg, u32 val) avg 907 drivers/platform/x86/intel_ips.c (((power_decay_factor - 1) * avg) / power_decay_factor); avg 915 drivers/platform/x86/intel_ips.c u32 avg; avg 922 drivers/platform/x86/intel_ips.c avg = (u32)total; avg 924 drivers/platform/x86/intel_ips.c return avg; avg 107 drivers/power/supply/ab8500_fg.c int avg; avg 383 drivers/power/supply/ab8500_fg.c struct ab8500_fg_avg_cap *avg = &di->avg_cap; avg 386 drivers/power/supply/ab8500_fg.c avg->sum += sample - avg->samples[avg->pos]; avg 387 drivers/power/supply/ab8500_fg.c avg->samples[avg->pos] = sample; avg 388 drivers/power/supply/ab8500_fg.c avg->time_stamps[avg->pos] = now; avg 389 drivers/power/supply/ab8500_fg.c avg->pos++; avg 391 drivers/power/supply/ab8500_fg.c if (avg->pos == NBR_AVG_SAMPLES) avg 392 drivers/power/supply/ab8500_fg.c avg->pos = 0; avg 394 drivers/power/supply/ab8500_fg.c if (avg->nbr_samples < NBR_AVG_SAMPLES) avg 395 drivers/power/supply/ab8500_fg.c avg->nbr_samples++; avg 401 drivers/power/supply/ab8500_fg.c } while (now - VALID_CAPACITY_SEC > avg->time_stamps[avg->pos]); avg 403 drivers/power/supply/ab8500_fg.c avg->avg = avg->sum / avg->nbr_samples; avg 405 drivers/power/supply/ab8500_fg.c return avg->avg; avg 417 drivers/power/supply/ab8500_fg.c struct ab8500_fg_avg_cap *avg = &di->avg_cap; avg 419 drivers/power/supply/ab8500_fg.c avg->pos = 0; avg 420 drivers/power/supply/ab8500_fg.c avg->nbr_samples = 0; avg 421 drivers/power/supply/ab8500_fg.c avg->sum = 0; avg 422 drivers/power/supply/ab8500_fg.c avg->avg = 0; avg 425 drivers/power/supply/ab8500_fg.c avg->samples[i] = 0; avg 426 drivers/power/supply/ab8500_fg.c avg->time_stamps[i] = 0; avg 441 drivers/power/supply/ab8500_fg.c struct ab8500_fg_avg_cap *avg = &di->avg_cap; avg 446 drivers/power/supply/ab8500_fg.c avg->samples[i] = sample; avg 447 drivers/power/supply/ab8500_fg.c avg->time_stamps[i] = now; avg 450 drivers/power/supply/ab8500_fg.c avg->pos = 0; avg 451 drivers/power/supply/ab8500_fg.c avg->nbr_samples = NBR_AVG_SAMPLES; avg 452 drivers/power/supply/ab8500_fg.c avg->sum = sample * NBR_AVG_SAMPLES; avg 453 drivers/power/supply/ab8500_fg.c avg->avg = sample; avg 1200 drivers/scsi/bfa/bfa_defs_svc.h u32 avg[BFA_IOBUCKET_MAX]; avg 453 drivers/scsi/bfa/bfa_fcpim.c io_lat->avg[idx] += val; avg 152 drivers/staging/greybus/loopback.c u64 avg, rem; \ avg 157 drivers/staging/greybus/loopback.c avg = stats->sum + count / 2000000; /* round closest */ \ avg 158 drivers/staging/greybus/loopback.c rem = do_div(avg, count); \ avg 161 drivers/staging/greybus/loopback.c return sprintf(buf, "%llu.%06u\n", avg, (u32)rem); \ avg 798 drivers/staging/uwb/uwb.h int min, max, avg; avg 801 drivers/staging/uwb/uwb.h min = max = avg = 0; avg 805 drivers/staging/uwb/uwb.h avg = stats->sigma / samples; avg 807 drivers/staging/uwb/uwb.h return scnprintf(buf, PAGE_SIZE, "%d %d %d\n", min, max, avg); avg 2642 drivers/usb/host/xhci.h u16 avg; avg 2670 drivers/usb/host/xhci.h avg = EP_AVG_TRB_LENGTH(tx_info); avg 2683 drivers/usb/host/xhci.h ret += sprintf(str + ret, "avg trb len %d", avg); avg 2059 fs/btrfs/extent-tree.c u64 avg; avg 2066 fs/btrfs/extent-tree.c avg = fs_info->avg_delayed_ref_runtime * 3 + runtime; avg 2067 fs/btrfs/extent-tree.c fs_info->avg_delayed_ref_runtime = avg >> 2; /* div by 4 */ avg 63 include/linux/mfd/pcf50633/adc.h pcf50633_adc_async_read(struct pcf50633 *pcf, int mux, int avg, avg 67 include/linux/mfd/pcf50633/adc.h pcf50633_adc_sync_read(struct pcf50633 *pcf, int mux, int avg); avg 145 include/linux/psi_types.h unsigned long avg[NR_PSI_STATES - 1][3]; avg 477 include/linux/sched.h struct sched_avg avg; avg 34 include/uapi/linux/netfilter/xt_hashlimit.h __u32 avg; /* Average secs between packets * scale */ avg 58 include/uapi/linux/netfilter/xt_hashlimit.h __u32 avg; /* Average secs between packets * scale */ avg 71 include/uapi/linux/netfilter/xt_hashlimit.h __u64 avg; /* Average secs between packets * scale */ avg 85 include/uapi/linux/netfilter/xt_hashlimit.h __u64 avg; /* Average secs between packets * scale */ avg 15 include/uapi/linux/netfilter/xt_limit.h __u32 avg; /* Average secs between packets * scale */ avg 16 include/uapi/linux/netfilter_bridge/ebt_limit.h __u32 avg; /* Average secs between packets * scale */ avg 2124 kernel/sched/core.c static void update_avg(u64 *avg, u64 sample) avg 2126 kernel/sched/core.c s64 diff = sample - *avg; avg 2127 kernel/sched/core.c *avg += diff >> 3; avg 407 kernel/sched/debug.c P(se->avg.load_avg); avg 408 kernel/sched/debug.c P(se->avg.util_avg); avg 409 kernel/sched/debug.c P(se->avg.runnable_load_avg); avg 529 kernel/sched/debug.c cfs_rq->avg.load_avg); avg 531 kernel/sched/debug.c cfs_rq->avg.runnable_load_avg); avg 533 kernel/sched/debug.c cfs_rq->avg.util_avg); avg 535 kernel/sched/debug.c cfs_rq->avg.util_est.enqueued); avg 945 kernel/sched/debug.c P(se.avg.load_sum); avg 946 kernel/sched/debug.c P(se.avg.runnable_load_sum); avg 947 kernel/sched/debug.c P(se.avg.util_sum); avg 948 kernel/sched/debug.c P(se.avg.load_avg); avg 949 kernel/sched/debug.c P(se.avg.runnable_load_avg); avg 950 kernel/sched/debug.c P(se.avg.util_avg); avg 951 kernel/sched/debug.c P(se.avg.last_update_time); avg 952 kernel/sched/debug.c P(se.avg.util_est.ewma); avg 953 kernel/sched/debug.c P(se.avg.util_est.enqueued); avg 734 kernel/sched/fair.c struct sched_avg *sa = &se->avg; avg 784 kernel/sched/fair.c struct sched_avg *sa = &se->avg; avg 786 kernel/sched/fair.c long cap = (long)(cpu_scale - cfs_rq->avg.util_avg) / 2; avg 789 kernel/sched/fair.c if (cfs_rq->avg.util_avg != 0) { avg 790 kernel/sched/fair.c sa->util_avg = cfs_rq->avg.util_avg * se->load.weight; avg 791 kernel/sched/fair.c sa->util_avg /= (cfs_rq->avg.load_avg + 1); avg 811 kernel/sched/fair.c se->avg.last_update_time = cfs_rq_clock_pelt(cfs_rq); avg 2025 kernel/sched/fair.c delta = p->se.avg.load_sum; avg 2838 kernel/sched/fair.c cfs_rq->avg.runnable_load_avg += se->avg.runnable_load_avg; avg 2839 kernel/sched/fair.c cfs_rq->avg.runnable_load_sum += se_runnable(se) * se->avg.runnable_load_sum; avg 2847 kernel/sched/fair.c sub_positive(&cfs_rq->avg.runnable_load_avg, se->avg.runnable_load_avg); avg 2848 kernel/sched/fair.c sub_positive(&cfs_rq->avg.runnable_load_sum, avg 2849 kernel/sched/fair.c se_runnable(se) * se->avg.runnable_load_sum); avg 2855 kernel/sched/fair.c cfs_rq->avg.load_avg += se->avg.load_avg; avg 2856 kernel/sched/fair.c cfs_rq->avg.load_sum += se_weight(se) * se->avg.load_sum; avg 2862 kernel/sched/fair.c sub_positive(&cfs_rq->avg.load_avg, se->avg.load_avg); avg 2863 kernel/sched/fair.c sub_positive(&cfs_rq->avg.load_sum, se_weight(se) * se->avg.load_sum); avg 2893 kernel/sched/fair.c u32 divider = LOAD_AVG_MAX - 1024 + se->avg.period_contrib; avg 2895 kernel/sched/fair.c se->avg.load_avg = div_u64(se_weight(se) * se->avg.load_sum, divider); avg 2896 kernel/sched/fair.c se->avg.runnable_load_avg = avg 2897 kernel/sched/fair.c div_u64(se_runnable(se) * se->avg.runnable_load_sum, divider); avg 3001 kernel/sched/fair.c load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg); avg 3059 kernel/sched/fair.c load_avg = max(cfs_rq->avg.load_avg, avg 3062 kernel/sched/fair.c runnable = max(cfs_rq->avg.runnable_load_avg, avg 3151 kernel/sched/fair.c long delta = cfs_rq->avg.load_avg - cfs_rq->tg_load_avg_contrib; avg 3161 kernel/sched/fair.c cfs_rq->tg_load_avg_contrib = cfs_rq->avg.load_avg; avg 3186 kernel/sched/fair.c if (!(se->avg.last_update_time && prev)) avg 3200 kernel/sched/fair.c p_last_update_time = prev->avg.last_update_time; avg 3201 kernel/sched/fair.c n_last_update_time = next->avg.last_update_time; avg 3207 kernel/sched/fair.c p_last_update_time = prev->avg.last_update_time; avg 3208 kernel/sched/fair.c n_last_update_time = next->avg.last_update_time; avg 3211 kernel/sched/fair.c se->avg.last_update_time = n_last_update_time; avg 3286 kernel/sched/fair.c long delta = gcfs_rq->avg.util_avg - se->avg.util_avg; avg 3301 kernel/sched/fair.c se->avg.util_avg = gcfs_rq->avg.util_avg; avg 3302 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * LOAD_AVG_MAX; avg 3305 kernel/sched/fair.c add_positive(&cfs_rq->avg.util_avg, delta); avg 3306 kernel/sched/fair.c cfs_rq->avg.util_sum = cfs_rq->avg.util_avg * LOAD_AVG_MAX; avg 3327 kernel/sched/fair.c runnable_sum += se->avg.load_sum; avg 3335 kernel/sched/fair.c load_sum = div_s64(gcfs_rq->avg.load_sum, avg 3340 kernel/sched/fair.c runnable_sum = min(se->avg.load_sum, load_sum); avg 3349 kernel/sched/fair.c running_sum = se->avg.util_sum >> SCHED_CAPACITY_SHIFT; avg 3355 kernel/sched/fair.c delta_sum = load_sum - (s64)se_weight(se) * se->avg.load_sum; avg 3356 kernel/sched/fair.c delta_avg = load_avg - se->avg.load_avg; avg 3358 kernel/sched/fair.c se->avg.load_sum = runnable_sum; avg 3359 kernel/sched/fair.c se->avg.load_avg = load_avg; avg 3360 kernel/sched/fair.c add_positive(&cfs_rq->avg.load_avg, delta_avg); avg 3361 kernel/sched/fair.c add_positive(&cfs_rq->avg.load_sum, delta_sum); avg 3365 kernel/sched/fair.c delta_sum = runnable_load_sum - se_weight(se) * se->avg.runnable_load_sum; avg 3366 kernel/sched/fair.c delta_avg = runnable_load_avg - se->avg.runnable_load_avg; avg 3368 kernel/sched/fair.c se->avg.runnable_load_sum = runnable_sum; avg 3369 kernel/sched/fair.c se->avg.runnable_load_avg = runnable_load_avg; avg 3372 kernel/sched/fair.c add_positive(&cfs_rq->avg.runnable_load_avg, delta_avg); avg 3373 kernel/sched/fair.c add_positive(&cfs_rq->avg.runnable_load_sum, delta_sum); avg 3422 kernel/sched/fair.c if (se->avg.load_avg || se->avg.util_avg) avg 3473 kernel/sched/fair.c struct sched_avg *sa = &cfs_rq->avg; avg 3521 kernel/sched/fair.c u32 divider = LOAD_AVG_MAX - 1024 + cfs_rq->avg.period_contrib; avg 3530 kernel/sched/fair.c se->avg.last_update_time = cfs_rq->avg.last_update_time; avg 3531 kernel/sched/fair.c se->avg.period_contrib = cfs_rq->avg.period_contrib; avg 3539 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * divider; avg 3541 kernel/sched/fair.c se->avg.load_sum = divider; avg 3543 kernel/sched/fair.c se->avg.load_sum = avg 3544 kernel/sched/fair.c div_u64(se->avg.load_avg * se->avg.load_sum, se_weight(se)); avg 3547 kernel/sched/fair.c se->avg.runnable_load_sum = se->avg.load_sum; avg 3550 kernel/sched/fair.c cfs_rq->avg.util_avg += se->avg.util_avg; avg 3551 kernel/sched/fair.c cfs_rq->avg.util_sum += se->avg.util_sum; avg 3553 kernel/sched/fair.c add_tg_cfs_propagate(cfs_rq, se->avg.load_sum); avg 3571 kernel/sched/fair.c sub_positive(&cfs_rq->avg.util_avg, se->avg.util_avg); avg 3572 kernel/sched/fair.c sub_positive(&cfs_rq->avg.util_sum, se->avg.util_sum); avg 3574 kernel/sched/fair.c add_tg_cfs_propagate(cfs_rq, -se->avg.load_sum); avg 3598 kernel/sched/fair.c if (se->avg.last_update_time && !(flags & SKIP_AGE_LOAD)) avg 3604 kernel/sched/fair.c if (!se->avg.last_update_time && (flags & DO_ATTACH)) { avg 3633 kernel/sched/fair.c last_update_time = cfs_rq->avg.last_update_time; avg 3641 kernel/sched/fair.c return cfs_rq->avg.last_update_time; avg 3677 kernel/sched/fair.c cfs_rq->removed.util_avg += se->avg.util_avg; avg 3678 kernel/sched/fair.c cfs_rq->removed.load_avg += se->avg.load_avg; avg 3679 kernel/sched/fair.c cfs_rq->removed.runnable_sum += se->avg.load_sum; /* == runnable_sum */ avg 3685 kernel/sched/fair.c return cfs_rq->avg.runnable_load_avg; avg 3690 kernel/sched/fair.c return cfs_rq->avg.load_avg; avg 3695 kernel/sched/fair.c return READ_ONCE(p->se.avg.util_avg); avg 3700 kernel/sched/fair.c struct util_est ue = READ_ONCE(p->se.avg.util_est); avg 3719 kernel/sched/fair.c enqueued = cfs_rq->avg.util_est.enqueued; avg 3721 kernel/sched/fair.c WRITE_ONCE(cfs_rq->avg.util_est.enqueued, enqueued); avg 3748 kernel/sched/fair.c ue.enqueued = cfs_rq->avg.util_est.enqueued; avg 3750 kernel/sched/fair.c WRITE_ONCE(cfs_rq->avg.util_est.enqueued, ue.enqueued); avg 3763 kernel/sched/fair.c ue = p->se.avg.util_est; avg 3804 kernel/sched/fair.c WRITE_ONCE(p->se.avg.util_est, ue); avg 6107 kernel/sched/fair.c util = READ_ONCE(cfs_rq->avg.util_avg); avg 6110 kernel/sched/fair.c util = max(util, READ_ONCE(cfs_rq->avg.util_est.enqueued)); avg 6134 kernel/sched/fair.c if (cpu != task_cpu(p) || !READ_ONCE(p->se.avg.last_update_time)) avg 6138 kernel/sched/fair.c util = READ_ONCE(cfs_rq->avg.util_avg); avg 6171 kernel/sched/fair.c READ_ONCE(cfs_rq->avg.util_est.enqueued); avg 6238 kernel/sched/fair.c unsigned long util_est, util = READ_ONCE(cfs_rq->avg.util_avg); avg 6252 kernel/sched/fair.c util_est = READ_ONCE(cfs_rq->avg.util_est.enqueued); avg 6587 kernel/sched/fair.c p->se.avg.last_update_time = 0; avg 7508 kernel/sched/fair.c if (cfs_rq->avg.load_avg) avg 7511 kernel/sched/fair.c if (cfs_rq->avg.util_avg) avg 7575 kernel/sched/fair.c if (cfs_rq->avg.load_sum) avg 7578 kernel/sched/fair.c if (cfs_rq->avg.util_sum) avg 7581 kernel/sched/fair.c if (cfs_rq->avg.runnable_load_sum) avg 7657 kernel/sched/fair.c load = div64_ul(load * se->avg.load_avg, avg 7670 kernel/sched/fair.c return div64_ul(p->se.avg.load_avg * cfs_rq->h_load, avg 7688 kernel/sched/fair.c return p->se.avg.load_avg; avg 10243 kernel/sched/fair.c p->se.avg.last_update_time = 0; avg 10567 kernel/sched/fair.c return cfs_rq ? &cfs_rq->avg : NULL; avg 268 kernel/sched/pelt.c if (___update_load_sum(now, &se->avg, 0, 0, 0)) { avg 269 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se)); avg 279 kernel/sched/pelt.c if (___update_load_sum(now, &se->avg, !!se->on_rq, !!se->on_rq, avg 282 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se)); avg 283 kernel/sched/pelt.c cfs_se_util_change(&se->avg); avg 293 kernel/sched/pelt.c if (___update_load_sum(now, &cfs_rq->avg, avg 298 kernel/sched/pelt.c ___update_load_avg(&cfs_rq->avg, 1, 1); avg 29 kernel/sched/pelt.h static inline void cfs_se_util_change(struct sched_avg *avg) avg 37 kernel/sched/pelt.h enqueued = avg->util_est.enqueued; avg 43 kernel/sched/pelt.h WRITE_ONCE(avg->util_est.enqueued, enqueued); avg 100 kernel/sched/pelt.h u32 util_sum = rq->cfs.avg.util_sum; avg 282 kernel/sched/psi.c static void calc_avgs(unsigned long avg[3], int missed_periods, avg 289 kernel/sched/psi.c avg[0] = calc_load_n(avg[0], EXP_10s, 0, missed_periods); avg 290 kernel/sched/psi.c avg[1] = calc_load_n(avg[1], EXP_60s, 0, missed_periods); avg 291 kernel/sched/psi.c avg[2] = calc_load_n(avg[2], EXP_300s, 0, missed_periods); avg 297 kernel/sched/psi.c avg[0] = calc_load(avg[0], EXP_10s, pct); avg 298 kernel/sched/psi.c avg[1] = calc_load(avg[1], EXP_60s, pct); avg 299 kernel/sched/psi.c avg[2] = calc_load(avg[2], EXP_300s, pct); avg 404 kernel/sched/psi.c calc_avgs(group->avg[s], missed_periods, sample, period); avg 957 kernel/sched/psi.c unsigned long avg[3]; avg 962 kernel/sched/psi.c avg[w] = group->avg[res * 2 + full][w]; avg 968 kernel/sched/psi.c LOAD_INT(avg[0]), LOAD_FRAC(avg[0]), avg 969 kernel/sched/psi.c LOAD_INT(avg[1]), LOAD_FRAC(avg[1]), avg 970 kernel/sched/psi.c LOAD_INT(avg[2]), LOAD_FRAC(avg[2]), avg 528 kernel/sched/sched.h struct sched_avg avg; avg 2407 kernel/sched/sched.h unsigned long util = READ_ONCE(rq->cfs.avg.util_avg); avg 2411 kernel/sched/sched.h READ_ONCE(rq->cfs.avg.util_est.enqueued)); avg 32 kernel/time/test_udelay.c uint64_t avg; avg 56 kernel/time/test_udelay.c avg = sum; avg 57 kernel/time/test_udelay.c do_div(avg, iters); avg 60 kernel/time/test_udelay.c (usecs * 1000) - allowed_error_ns, min, avg, max); avg 515 kernel/trace/ftrace.c unsigned long long avg; avg 527 kernel/trace/ftrace.c avg = div64_ul(rec->time, rec->counter); avg 528 kernel/trace/ftrace.c if (tracing_thresh && (avg < tracing_thresh)) avg 560 kernel/trace/ftrace.c trace_print_graph_duration(avg, &s); avg 238 kernel/trace/ring_buffer_benchmark.c unsigned long avg; avg 348 kernel/trace/ring_buffer_benchmark.c avg = NSEC_PER_MSEC / hit; avg 349 kernel/trace/ring_buffer_benchmark.c trace_printk("%ld ns per entry\n", avg); avg 366 kernel/trace/ring_buffer_benchmark.c avg = NSEC_PER_MSEC / (hit + missed); avg 367 kernel/trace/ring_buffer_benchmark.c trace_printk("%ld ns per entry\n", avg); avg 45 kernel/trace/trace_benchmark.c unsigned int avg; avg 109 kernel/trace/trace_benchmark.c avg = delta; avg 124 kernel/trace/trace_benchmark.c seed = avg; avg 140 kernel/trace/trace_benchmark.c bm_last, bm_first, bm_max, bm_min, avg, std, stddev); avg 143 kernel/trace/trace_benchmark.c bm_avg = avg; avg 1088 mm/page-writeback.c unsigned long avg = wb->avg_write_bandwidth; avg 1106 mm/page-writeback.c avg = bw; avg 1115 mm/page-writeback.c if (avg > old && old >= (unsigned long)bw) avg 1116 mm/page-writeback.c avg -= (avg - old) >> 3; avg 1118 mm/page-writeback.c if (avg < old && old <= (unsigned long)bw) avg 1119 mm/page-writeback.c avg += (old - avg) >> 3; avg 1123 mm/page-writeback.c avg = max(avg, 1LU); avg 1125 mm/page-writeback.c long delta = avg - wb->avg_write_bandwidth; avg 1130 mm/page-writeback.c wb->avg_write_bandwidth = avg; avg 75 net/bridge/netfilter/ebt_limit.c user2credits(info->avg * info->burst) < user2credits(info->avg)) { avg 77 net/bridge/netfilter/ebt_limit.c info->avg, info->burst); avg 83 net/bridge/netfilter/ebt_limit.c info->credit = user2credits(info->avg * info->burst); avg 84 net/bridge/netfilter/ebt_limit.c info->credit_cap = user2credits(info->avg * info->burst); avg 85 net/bridge/netfilter/ebt_limit.c info->cost = user2credits(info->avg); avg 96 net/bridge/netfilter/ebt_limit.c compat_uint_t avg, burst; avg 49 net/dccp/ccids/lib/tfrc.h static inline u32 tfrc_ewma(const u32 avg, const u32 newval, const u8 weight) avg 51 net/dccp/ccids/lib/tfrc.h return avg ? (weight * avg + (10 - weight) * newval) / 10 : newval; avg 145 net/netfilter/xt_hashlimit.c to->avg = cfg->avg; avg 157 net/netfilter/xt_hashlimit.c to->avg = cfg->avg; avg 582 net/netfilter/xt_hashlimit.c user2rate_bytes((u32)hinfo->cfg.avg); avg 589 net/netfilter/xt_hashlimit.c dh->rateinfo.rate = user2rate(hinfo->cfg.avg); avg 596 net/netfilter/xt_hashlimit.c dh->rateinfo.cost = user2credits_byte(hinfo->cfg.avg); avg 599 net/netfilter/xt_hashlimit.c dh->rateinfo.credit = user2credits(hinfo->cfg.avg * avg 601 net/netfilter/xt_hashlimit.c dh->rateinfo.cost = user2credits(hinfo->cfg.avg, revision); avg 872 net/netfilter/xt_hashlimit.c if (cfg->avg == 0 || cfg->avg > U32_MAX) { avg 882 net/netfilter/xt_hashlimit.c if (user2credits_byte(cfg->avg) == 0) { avg 884 net/netfilter/xt_hashlimit.c cfg->avg); avg 888 net/netfilter/xt_hashlimit.c user2credits(cfg->avg * cfg->burst, revision) < avg 889 net/netfilter/xt_hashlimit.c user2credits(cfg->avg, revision)) { avg 891 net/netfilter/xt_hashlimit.c cfg->avg, cfg->burst); avg 105 net/netfilter/xt_limit.c || user2credits(r->avg * r->burst) < user2credits(r->avg)) { avg 107 net/netfilter/xt_limit.c r->avg, r->burst); avg 120 net/netfilter/xt_limit.c priv->credit = user2credits(r->avg * r->burst); /* Credits full. */ avg 123 net/netfilter/xt_limit.c r->cost = user2credits(r->avg); avg 139 net/netfilter/xt_limit.c u_int32_t avg; avg 155 net/netfilter/xt_limit.c .avg = cm->avg, avg 169 net/netfilter/xt_limit.c .avg = m->avg, avg 1267 net/sched/sch_cake.c static u64 cake_ewma(u64 avg, u64 sample, u32 shift) avg 1269 net/sched/sch_cake.c avg -= avg >> shift; avg 1270 net/sched/sch_cake.c avg += sample >> shift; avg 1271 net/sched/sch_cake.c return avg; avg 282 tools/perf/bench/epoll-ctl.c unsigned long avg[EPOLL_NR_OPS]; avg 286 tools/perf/bench/epoll-ctl.c avg[i] = avg_stats(&all_stats[i]); avg 291 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_ADD], rel_stddev_stats(stddev[OP_EPOLL_ADD], avg 292 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_ADD])); avg 294 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_MOD], rel_stddev_stats(stddev[OP_EPOLL_MOD], avg 295 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_MOD])); avg 297 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_DEL], rel_stddev_stats(stddev[OP_EPOLL_DEL], avg 298 tools/perf/bench/epoll-ctl.c avg[OP_EPOLL_DEL])); avg 285 tools/perf/bench/epoll-wait.c unsigned long avg = avg_stats(&throughput_stats); avg 289 tools/perf/bench/epoll-wait.c avg, rel_stddev_stats(stddev, avg), avg 112 tools/perf/bench/futex-hash.c unsigned long avg = avg_stats(&throughput_stats); avg 116 tools/perf/bench/futex-hash.c !silent ? "\n" : "", avg, rel_stddev_stats(stddev, avg), avg 62 tools/perf/bench/futex-lock-pi.c unsigned long avg = avg_stats(&throughput_stats); avg 66 tools/perf/bench/futex-lock-pi.c !silent ? "\n" : "", avg, rel_stddev_stats(stddev, avg), avg 1321 tools/perf/builtin-sched.c u64 avg; avg 1343 tools/perf/builtin-sched.c avg = work_list->total_lat / work_list->nb_atoms; avg 1348 tools/perf/builtin-sched.c work_list->nb_atoms, (double)avg / NSEC_PER_MSEC, avg 3703 tools/perf/builtin-trace.c double avg = avg_stats(stats); avg 3707 tools/perf/builtin-trace.c pct = avg ? 100.0 * stddev_stats(stats)/avg : 0.0; avg 3708 tools/perf/builtin-trace.c avg /= NSEC_PER_MSEC; avg 3713 tools/perf/builtin-trace.c n, syscall_stats_entry->msecs, min, avg); avg 38 tools/perf/util/stat-display.c double total, double avg) avg 40 tools/perf/util/stat-display.c double pct = rel_stddev_stats(total, avg); avg 49 tools/perf/util/stat-display.c struct evsel *evsel, double avg) avg 57 tools/perf/util/stat-display.c print_noise_pct(config, stddev_stats(&ps->res_stats[0]), avg); avg 338 tools/perf/util/stat-display.c int id, int nr, struct evsel *evsel, double avg) avg 355 tools/perf/util/stat-display.c fprintf(output, fmt, avg, config->csv_sep); avg 779 tools/perf/util/stat-display.c double avg, avg_enabled, avg_running; avg 789 tools/perf/util/stat-display.c cd->avg += avg_stats(&ps->res_stats[0]); avg 804 tools/perf/util/stat-display.c struct caggr_data cd = { .avg = 0.0 }; avg 812 tools/perf/util/stat-display.c uval = cd.avg * counter->scale; avg 814 tools/perf/util/stat-display.c cd.avg, &rt_stat); avg 1050 tools/perf/util/stat-display.c FILE *output, int precision, double avg) avg 1055 tools/perf/util/stat-display.c scnprintf(tmp, 64, " %17.*f", precision, avg); avg 1063 tools/perf/util/stat-display.c int h, n = 1 + abs((int) (100.0 * (run - avg)/run) / 5); avg 1066 tools/perf/util/stat-display.c precision, run, precision, run - avg); avg 1084 tools/perf/util/stat-display.c double avg = avg_stats(config->walltime_nsecs_stats) / NSEC_PER_SEC; avg 1092 tools/perf/util/stat-display.c fprintf(output, " %17.9f seconds time elapsed", avg); avg 1111 tools/perf/util/stat-display.c print_table(config, output, precision, avg); avg 1114 tools/perf/util/stat-display.c precision, avg, precision, sd); avg 1116 tools/perf/util/stat-display.c print_noise_pct(config, sd, avg); avg 425 tools/perf/util/stat-shadow.c struct evsel *evsel, double avg, avg 436 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 449 tools/perf/util/stat-shadow.c struct evsel *evsel, double avg, avg 460 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 470 tools/perf/util/stat-shadow.c double avg, avg 481 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 491 tools/perf/util/stat-shadow.c double avg, avg 503 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 513 tools/perf/util/stat-shadow.c double avg, avg 525 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 534 tools/perf/util/stat-shadow.c double avg, avg 545 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 554 tools/perf/util/stat-shadow.c double avg, avg 565 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 574 tools/perf/util/stat-shadow.c double avg, avg 585 tools/perf/util/stat-shadow.c ratio = avg / total * 100.0; avg 726 tools/perf/util/stat-shadow.c double avg, avg 740 tools/perf/util/stat-shadow.c expr__add_id(&pctx, name, avg); avg 817 tools/perf/util/stat-shadow.c double avg, int cpu, avg 834 tools/perf/util/stat-shadow.c ratio = avg / total; avg 848 tools/perf/util/stat-shadow.c if (total && avg) { avg 850 tools/perf/util/stat-shadow.c ratio = total / avg; avg 857 tools/perf/util/stat-shadow.c print_branch_misses(config, cpu, evsel, avg, out, st); avg 867 tools/perf/util/stat-shadow.c print_l1_dcache_misses(config, cpu, evsel, avg, out, st); avg 877 tools/perf/util/stat-shadow.c print_l1_icache_misses(config, cpu, evsel, avg, out, st); avg 887 tools/perf/util/stat-shadow.c print_dtlb_cache_misses(config, cpu, evsel, avg, out, st); avg 897 tools/perf/util/stat-shadow.c print_itlb_cache_misses(config, cpu, evsel, avg, out, st); avg 907 tools/perf/util/stat-shadow.c print_ll_cache_misses(config, cpu, evsel, avg, out, st); avg 914 tools/perf/util/stat-shadow.c ratio = avg * 100 / total; avg 922 tools/perf/util/stat-shadow.c print_stalled_cycles_frontend(config, cpu, evsel, avg, out, st); avg 924 tools/perf/util/stat-shadow.c print_stalled_cycles_backend(config, cpu, evsel, avg, out, st); avg 929 tools/perf/util/stat-shadow.c ratio = avg / total; avg 940 tools/perf/util/stat-shadow.c 100.0 * (avg / total)); avg 948 tools/perf/util/stat-shadow.c if (total2 < avg) avg 949 tools/perf/util/stat-shadow.c total2 = avg; avg 952 tools/perf/util/stat-shadow.c 100.0 * ((total2-avg) / total)); avg 959 tools/perf/util/stat-shadow.c if (avg) avg 960 tools/perf/util/stat-shadow.c ratio = total / avg; avg 972 tools/perf/util/stat-shadow.c if (avg) avg 973 tools/perf/util/stat-shadow.c ratio = total / avg; avg 979 tools/perf/util/stat-shadow.c avg / (ratio * evsel->scale)); avg 1024 tools/perf/util/stat-shadow.c evsel->metric_name, NULL, avg, cpu, out, st); avg 1032 tools/perf/util/stat-shadow.c ratio = 1000.0 * avg / total; avg 1053 tools/perf/util/stat-shadow.c mexp->metric_unit, avg, cpu, out, st); avg 68 tools/perf/util/stat.c double rel_stddev_stats(double stddev, double avg) avg 72 tools/perf/util/stat.c if (avg) avg 73 tools/perf/util/stat.c pct = 100.0 * stddev/avg; avg 134 tools/perf/util/stat.h double rel_stddev_stats(double stddev, double avg); avg 187 tools/perf/util/stat.h double avg, int cpu,