tkr_raw 94 include/linux/timekeeper_internal.h struct tk_read_base tkr_raw; tkr_raw 101 kernel/time/timekeeping.c while (tk->tkr_raw.xtime_nsec >= ((u64)NSEC_PER_SEC << tk->tkr_raw.shift)) { tkr_raw 102 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec -= (u64)NSEC_PER_SEC << tk->tkr_raw.shift; tkr_raw 299 kernel/time/timekeeping.c tk->tkr_raw.clock = clock; tkr_raw 300 kernel/time/timekeeping.c tk->tkr_raw.mask = clock->mask; tkr_raw 301 kernel/time/timekeeping.c tk->tkr_raw.cycle_last = tk->tkr_mono.cycle_last; tkr_raw 325 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec >>= -shift_change; tkr_raw 328 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec <<= shift_change; tkr_raw 333 kernel/time/timekeeping.c tk->tkr_raw.shift = clock->shift; tkr_raw 345 kernel/time/timekeeping.c tk->tkr_raw.mult = clock->mult; tkr_raw 568 kernel/time/timekeeping.c tkr = &tk->tkr_raw; tkr_raw 657 kernel/time/timekeeping.c tk->tkr_raw.base = ns_to_ktime(tk->raw_sec * NSEC_PER_SEC); tkr_raw 676 kernel/time/timekeeping.c update_fast_timekeeper(&tk->tkr_raw, &tk_fast_raw); tkr_raw 704 kernel/time/timekeeping.c tk->tkr_raw.cycle_last = cycle_now; tkr_raw 712 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec += delta * tk->tkr_raw.mult; tkr_raw 715 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec += (u64)arch_gettimeoffset() << tk->tkr_raw.shift; tkr_raw 863 kernel/time/timekeeping.c base = tk->tkr_raw.base; tkr_raw 864 kernel/time/timekeeping.c nsecs = timekeeping_get_ns(&tk->tkr_raw); tkr_raw 986 kernel/time/timekeeping.c base_raw = tk->tkr_raw.base; tkr_raw 988 kernel/time/timekeeping.c nsec_raw = timekeeping_cycles_to_ns(&tk->tkr_raw, now); tkr_raw 1072 kernel/time/timekeeping.c (corr_raw, tk->tkr_mono.mult, tk->tkr_raw.mult); tkr_raw 1172 kernel/time/timekeeping.c base_raw = tk->tkr_raw.base; tkr_raw 1176 kernel/time/timekeeping.c nsec_raw = timekeeping_cycles_to_ns(&tk->tkr_raw, tkr_raw 1428 kernel/time/timekeeping.c nsecs = timekeeping_get_ns(&tk->tkr_raw); tkr_raw 1722 kernel/time/timekeeping.c tk->tkr_raw.cycle_last = cycle_now; tkr_raw 2023 kernel/time/timekeeping.c tk->tkr_raw.cycle_last += interval; tkr_raw 2029 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec += tk->raw_interval << shift; tkr_raw 2030 kernel/time/timekeeping.c snsec_per_sec = (u64)NSEC_PER_SEC << tk->tkr_raw.shift; tkr_raw 2031 kernel/time/timekeeping.c while (tk->tkr_raw.xtime_nsec >= snsec_per_sec) { tkr_raw 2032 kernel/time/timekeeping.c tk->tkr_raw.xtime_nsec -= snsec_per_sec; tkr_raw 26 kernel/time/vsyscall.c vdata[CS_RAW].cycle_last = tk->tkr_raw.cycle_last; tkr_raw 27 kernel/time/vsyscall.c vdata[CS_RAW].mask = tk->tkr_raw.mask; tkr_raw 28 kernel/time/vsyscall.c vdata[CS_RAW].mult = tk->tkr_raw.mult; tkr_raw 29 kernel/time/vsyscall.c vdata[CS_RAW].shift = tk->tkr_raw.shift; tkr_raw 62 kernel/time/vsyscall.c vdso_ts->nsec = tk->tkr_raw.xtime_nsec;