Lines Matching refs:u64
18 static inline u64 div_u64_rem(u64 dividend, u32 divisor, u32 *remainder) in div_u64_rem()
36 static inline u64 div64_u64_rem(u64 dividend, u64 divisor, u64 *remainder) in div64_u64_rem()
45 static inline u64 div64_u64(u64 dividend, u64 divisor) in div64_u64()
64 static inline u64 div_u64_rem(u64 dividend, u32 divisor, u32 *remainder) in div_u64_rem()
76 extern u64 div64_u64_rem(u64 dividend, u64 divisor, u64 *remainder);
80 extern u64 div64_u64(u64 dividend, u64 divisor);
97 static inline u64 div_u64(u64 dividend, u32 divisor) in div_u64()
115 u32 iter_div_u64_rem(u64 dividend, u32 divisor, u64 *remainder);
118 __iter_div_u64_rem(u64 dividend, u32 divisor, u64 *remainder) in __iter_div_u64_rem()
139 static inline u64 mul_u64_u32_shr(u64 a, u32 mul, unsigned int shift) in mul_u64_u32_shr()
141 return (u64)(((unsigned __int128)a * mul) >> shift); in mul_u64_u32_shr()
146 static inline u64 mul_u64_u64_shr(u64 a, u64 mul, unsigned int shift) in mul_u64_u64_shr()
148 return (u64)(((unsigned __int128)a * mul) >> shift); in mul_u64_u64_shr()
155 static inline u64 mul_u64_u32_shr(u64 a, u32 mul, unsigned int shift) in mul_u64_u32_shr()
158 u64 ret; in mul_u64_u32_shr()
163 ret = ((u64)al * mul) >> shift; in mul_u64_u32_shr()
165 ret += ((u64)ah * mul) << (32 - shift); in mul_u64_u32_shr()
172 static inline u64 mul_u64_u64_shr(u64 a, u64 b, unsigned int shift) in mul_u64_u64_shr()
175 u64 ll; in mul_u64_u64_shr()
184 u64 c; in mul_u64_u64_shr()
189 rl.ll = (u64)a0.l.low * b0.l.low; in mul_u64_u64_shr()
190 rm.ll = (u64)a0.l.low * b0.l.high; in mul_u64_u64_shr()
191 rn.ll = (u64)a0.l.high * b0.l.low; in mul_u64_u64_shr()
192 rh.ll = (u64)a0.l.high * b0.l.high; in mul_u64_u64_shr()
199 rl.l.high = c = (u64)rl.l.high + rm.l.low + rn.l.low; in mul_u64_u64_shr()
218 static inline u64 mul_u64_u32_div(u64 a, u32 mul, u32 divisor) in mul_u64_u32_div()
221 u64 ll; in mul_u64_u32_div()
232 rl.ll = (u64)u.l.low * mul; in mul_u64_u32_div()
233 rh.ll = (u64)u.l.high * mul + rl.l.high; in mul_u64_u32_div()