dst 112 arch/alpha/boot/bootp.c load(unsigned long dst, unsigned long src, unsigned long count) dst 114 arch/alpha/boot/bootp.c memcpy((void *)dst, (void *)src, count); dst 71 arch/alpha/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 72 arch/alpha/include/asm/cacheflush.h do { memcpy(dst, src, len); \ dst 75 arch/alpha/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 76 arch/alpha/include/asm/cacheflush.h memcpy(dst, src, len) dst 44 arch/alpha/include/asm/checksum.h __wsum csum_partial_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *errp); dst 46 arch/alpha/include/asm/checksum.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum); dst 532 arch/alpha/include/asm/io.h extern void insb (unsigned long port, void *dst, unsigned long count); dst 533 arch/alpha/include/asm/io.h extern void insw (unsigned long port, void *dst, unsigned long count); dst 534 arch/alpha/include/asm/io.h extern void insl (unsigned long port, void *dst, unsigned long count); dst 214 arch/alpha/kernel/io.c void ioread8_rep(void __iomem *port, void *dst, unsigned long count) dst 216 arch/alpha/kernel/io.c while ((unsigned long)dst & 0x3) { dst 220 arch/alpha/kernel/io.c *(unsigned char *)dst = ioread8(port); dst 221 arch/alpha/kernel/io.c dst += 1; dst 231 arch/alpha/kernel/io.c *(unsigned int *)dst = w; dst 232 arch/alpha/kernel/io.c dst += 4; dst 237 arch/alpha/kernel/io.c *(unsigned char *)dst = ioread8(port); dst 238 arch/alpha/kernel/io.c dst += 1; dst 242 arch/alpha/kernel/io.c void insb(unsigned long port, void *dst, unsigned long count) dst 244 arch/alpha/kernel/io.c ioread8_rep(ioport_map(port, 1), dst, count); dst 257 arch/alpha/kernel/io.c void ioread16_rep(void __iomem *port, void *dst, unsigned long count) dst 259 arch/alpha/kernel/io.c if (unlikely((unsigned long)dst & 0x3)) { dst 262 arch/alpha/kernel/io.c BUG_ON((unsigned long)dst & 0x1); dst 264 arch/alpha/kernel/io.c *(unsigned short *)dst = ioread16(port); dst 265 arch/alpha/kernel/io.c dst += 2; dst 273 arch/alpha/kernel/io.c *(unsigned int *)dst = w; dst 274 arch/alpha/kernel/io.c dst += 4; dst 278 arch/alpha/kernel/io.c *(unsigned short*)dst = ioread16(port); dst 282 arch/alpha/kernel/io.c void insw(unsigned long port, void *dst, unsigned long count) dst 284 arch/alpha/kernel/io.c ioread16_rep(ioport_map(port, 2), dst, count); dst 297 arch/alpha/kernel/io.c void ioread32_rep(void __iomem *port, void *dst, unsigned long count) dst 299 arch/alpha/kernel/io.c if (unlikely((unsigned long)dst & 0x3)) { dst 302 arch/alpha/kernel/io.c ((struct S *)dst)->x = ioread32(port); dst 303 arch/alpha/kernel/io.c dst += 4; dst 308 arch/alpha/kernel/io.c *(unsigned int *)dst = ioread32(port); dst 309 arch/alpha/kernel/io.c dst += 4; dst 314 arch/alpha/kernel/io.c void insl(unsigned long port, void *dst, unsigned long count) dst 316 arch/alpha/kernel/io.c ioread32_rep(ioport_map(port, 4), dst, count); dst 97 arch/alpha/lib/csum_partial_copy.c csum_partial_cfu_aligned(const unsigned long __user *src, unsigned long *dst, dst 112 arch/alpha/lib/csum_partial_copy.c *dst = word; dst 113 arch/alpha/lib/csum_partial_copy.c dst++; dst 120 arch/alpha/lib/csum_partial_copy.c tmp = *dst; dst 125 arch/alpha/lib/csum_partial_copy.c *dst = word | tmp; dst 138 arch/alpha/lib/csum_partial_copy.c unsigned long *dst, dst 162 arch/alpha/lib/csum_partial_copy.c *dst = word; dst 163 arch/alpha/lib/csum_partial_copy.c dst++; dst 172 arch/alpha/lib/csum_partial_copy.c tmp = *dst; dst 180 arch/alpha/lib/csum_partial_copy.c *dst = word | tmp; dst 192 arch/alpha/lib/csum_partial_copy.c unsigned long *dst, dst 209 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | second_dest, dst); dst 214 arch/alpha/lib/csum_partial_copy.c dst++; dst 228 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest, dst); dst 230 arch/alpha/lib/csum_partial_copy.c dst++; dst 235 arch/alpha/lib/csum_partial_copy.c ldq_u(second_dest, dst); dst 237 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | second_dest, dst); dst 250 arch/alpha/lib/csum_partial_copy.c unsigned long * dst, dst 278 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | second_dest, dst); dst 281 arch/alpha/lib/csum_partial_copy.c dst++; dst 298 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | second_dest, dst); dst 300 arch/alpha/lib/csum_partial_copy.c ldq_u(second_dest, dst+1); dst 303 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | second_dest, dst+1); dst 314 arch/alpha/lib/csum_partial_copy.c ldq_u(second_dest, dst); dst 320 arch/alpha/lib/csum_partial_copy.c stq_u(partial_dest | word | second_dest, dst); dst 328 arch/alpha/lib/csum_partial_copy.c csum_partial_copy_from_user(const void __user *src, void *dst, int len, dst 333 arch/alpha/lib/csum_partial_copy.c unsigned long doff = 7 & (unsigned long) dst; dst 338 arch/alpha/lib/csum_partial_copy.c memset(dst, 0, len); dst 345 arch/alpha/lib/csum_partial_copy.c (unsigned long *) dst, dst 350 arch/alpha/lib/csum_partial_copy.c (unsigned long *) dst, dst 354 arch/alpha/lib/csum_partial_copy.c ldq_u(partial_dest, dst); dst 358 arch/alpha/lib/csum_partial_copy.c (unsigned long *) dst, dst 364 arch/alpha/lib/csum_partial_copy.c (unsigned long *) dst, dst 375 arch/alpha/lib/csum_partial_copy.c csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 381 arch/alpha/lib/csum_partial_copy.c dst, len, sum, NULL); dst 15 arch/alpha/lib/srm_printk.c char *src, *dst; dst 32 arch/alpha/lib/srm_printk.c for (dst = src + num_lf; src >= buf; ) { dst 34 arch/alpha/lib/srm_printk.c *dst-- = '\r'; dst 36 arch/alpha/lib/srm_printk.c *dst-- = *src--; dst 107 arch/arc/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 109 arch/arc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 111 arch/arc/include/asm/cacheflush.h __sync_icache_dcache((unsigned long)(dst), vaddr, len); \ dst 114 arch/arc/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 115 arch/arc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 75 arch/arc/include/asm/uaccess.h #define __arc_get_user_one(dst, src, op, ret) \ dst 92 arch/arc/include/asm/uaccess.h : "+r" (ret), "=r" (dst) \ dst 95 arch/arc/include/asm/uaccess.h #define __arc_get_user_one_64(dst, src, ret) \ dst 115 arch/arc/include/asm/uaccess.h : "+r" (ret), "=r" (dst) \ dst 130 arch/arc/include/asm/uaccess.h #define __arc_put_user_one(src, dst, op, ret) \ dst 145 arch/arc/include/asm/uaccess.h : "r" (src), "r" (dst), "ir" (-EFAULT)) dst 147 arch/arc/include/asm/uaccess.h #define __arc_put_user_one_64(src, dst, ret) \ dst 164 arch/arc/include/asm/uaccess.h : "r" (src), "r" (dst), "ir" (-EFAULT)) dst 659 arch/arc/include/asm/uaccess.h __arc_strncpy_from_user(char *dst, const char __user *src, long count) dst 684 arch/arc/include/asm/uaccess.h : "+r"(res), "+r"(dst), "+r"(src), "=r"(val) dst 732 arch/arc/include/asm/uaccess.h extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src, dst 35 arch/arc/mm/extable.c long arc_strncpy_from_user_noinline(char *dst, const char __user *src, dst 38 arch/arc/mm/extable.c return __arc_strncpy_from_user(dst, src, count); dst 27 arch/arm/crypto/aes-ce-glue.c asmlinkage void ce_aes_invert(void *dst, void *src); dst 189 arch/arm/crypto/aes-ce-glue.c ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 209 arch/arm/crypto/aes-ce-glue.c ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 227 arch/arm/crypto/aes-ce-glue.c ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, dst 257 arch/arm/crypto/aes-ce-glue.c ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, dst 282 arch/arm/crypto/aes-ce-glue.c struct scatterlist *src = req->src, *dst = req->dst; dst 299 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 311 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); dst 312 arch/arm/crypto/aes-ce-glue.c if (req->dst != req->src) dst 313 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, dst 318 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, src, dst, dst 327 arch/arm/crypto/aes-ce-glue.c ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 340 arch/arm/crypto/aes-ce-glue.c struct scatterlist *src = req->src, *dst = req->dst; dst 357 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 369 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); dst 370 arch/arm/crypto/aes-ce-glue.c if (req->dst != req->src) dst 371 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, dst 376 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, src, dst, dst 385 arch/arm/crypto/aes-ce-glue.c ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 404 arch/arm/crypto/aes-ce-glue.c ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 413 arch/arm/crypto/aes-ce-glue.c u8 *tdst = walk.dst.virt.addr; dst 431 arch/arm/crypto/aes-ce-glue.c static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) dst 442 arch/arm/crypto/aes-ce-glue.c aes_encrypt(ctx, dst, src); dst 462 arch/arm/crypto/aes-ce-glue.c struct scatterlist *src, *dst; dst 480 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 496 arch/arm/crypto/aes-ce-glue.c ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 506 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); dst 507 arch/arm/crypto/aes-ce-glue.c if (req->dst != req->src) dst 508 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); dst 510 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, dst 518 arch/arm/crypto/aes-ce-glue.c ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 534 arch/arm/crypto/aes-ce-glue.c struct scatterlist *src, *dst; dst 552 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 568 arch/arm/crypto/aes-ce-glue.c ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 578 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); dst 579 arch/arm/crypto/aes-ce-glue.c if (req->dst != req->src) dst 580 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); dst 582 arch/arm/crypto/aes-ce-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, dst 590 arch/arm/crypto/aes-ce-glue.c ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 105 arch/arm/crypto/aes-neonbs-glue.c fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, dst 145 arch/arm/crypto/aes-neonbs-glue.c static void cbc_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) dst 149 arch/arm/crypto/aes-neonbs-glue.c crypto_cipher_encrypt_one(ctx->enc_tfm, dst, src); dst 174 arch/arm/crypto/aes-neonbs-glue.c aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 241 arch/arm/crypto/aes-neonbs-glue.c aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 246 arch/arm/crypto/aes-neonbs-glue.c u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; dst 249 arch/arm/crypto/aes-neonbs-glue.c crypto_xor_cpy(dst, src, final, dst 262 arch/arm/crypto/aes-neonbs-glue.c static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) dst 273 arch/arm/crypto/aes-neonbs-glue.c aes_encrypt(&ctx->fallback, dst, src); dst 349 arch/arm/crypto/aes-neonbs-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 371 arch/arm/crypto/aes-neonbs-glue.c fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk, dst 382 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, dst 396 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, dst 33 arch/arm/crypto/chacha-neon-glue.c asmlinkage void chacha_block_xor_neon(const u32 *state, u8 *dst, const u8 *src, dst 35 arch/arm/crypto/chacha-neon-glue.c asmlinkage void chacha_4block_xor_neon(const u32 *state, u8 *dst, const u8 *src, dst 39 arch/arm/crypto/chacha-neon-glue.c static void chacha_doneon(u32 *state, u8 *dst, const u8 *src, dst 45 arch/arm/crypto/chacha-neon-glue.c chacha_4block_xor_neon(state, dst, src, nrounds); dst 48 arch/arm/crypto/chacha-neon-glue.c dst += CHACHA_BLOCK_SIZE * 4; dst 52 arch/arm/crypto/chacha-neon-glue.c chacha_block_xor_neon(state, dst, src, nrounds); dst 55 arch/arm/crypto/chacha-neon-glue.c dst += CHACHA_BLOCK_SIZE; dst 61 arch/arm/crypto/chacha-neon-glue.c memcpy(dst, buf, bytes); dst 83 arch/arm/crypto/chacha-neon-glue.c chacha_doneon(state, walk.dst.virt.addr, walk.src.virt.addr, dst 76 arch/arm/crypto/ghash-ce-glue.c be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) }; dst 89 arch/arm/crypto/ghash-ce-glue.c crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); dst 90 arch/arm/crypto/ghash-ce-glue.c gf128mul_lle(&dst, &key->k); dst 93 arch/arm/crypto/ghash-ce-glue.c dg[0] = be64_to_cpu(dst.b); dst 94 arch/arm/crypto/ghash-ce-glue.c dg[1] = be64_to_cpu(dst.a); dst 131 arch/arm/crypto/ghash-ce-glue.c static int ghash_final(struct shash_desc *desc, u8 *dst) dst 142 arch/arm/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[1], dst); dst 143 arch/arm/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[0], dst + 8); dst 170 arch/arm/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 172 arch/arm/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 38 arch/arm/include/asm/checksum.h csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum); dst 41 arch/arm/include/asm/checksum.h csum_partial_copy_from_user(const void __user *src, void *dst, int len, __wsum sum, int *err_ptr); dst 318 arch/arm/include/asm/io.h static inline void memset_io(volatile void __iomem *dst, unsigned c, dst 322 arch/arm/include/asm/io.h mmioset((void __force *)dst, c, count); dst 324 arch/arm/include/asm/io.h #define memset_io(dst,c,count) memset_io(dst,c,count) dst 14 arch/arm/include/asm/xor.h #define GET_BLOCK_2(dst) \ dst 16 arch/arm/include/asm/xor.h : "=r" (dst), "=r" (a1), "=r" (a2) \ dst 17 arch/arm/include/asm/xor.h : "0" (dst)) dst 19 arch/arm/include/asm/xor.h #define GET_BLOCK_4(dst) \ dst 21 arch/arm/include/asm/xor.h : "=r" (dst), "=r" (a1), "=r" (a2), "=r" (a3), "=r" (a4) \ dst 22 arch/arm/include/asm/xor.h : "0" (dst)) dst 36 arch/arm/include/asm/xor.h #define PUT_BLOCK_2(dst) \ dst 38 arch/arm/include/asm/xor.h : "=r" (dst) \ dst 39 arch/arm/include/asm/xor.h : "0" (dst), "r" (a1), "r" (a2)) dst 41 arch/arm/include/asm/xor.h #define PUT_BLOCK_4(dst) \ dst 43 arch/arm/include/asm/xor.h : "=r" (dst) \ dst 44 arch/arm/include/asm/xor.h : "0" (dst), "r" (a1), "r" (a2), "r" (a3), "r" (a4)) dst 77 arch/arm/kernel/io.c void _memset_io(volatile void __iomem *dst, int c, size_t count) dst 81 arch/arm/kernel/io.c writeb(c, dst); dst 82 arch/arm/kernel/io.c dst++; dst 186 arch/arm/mm/flush.c unsigned long uaddr, void *dst, const void *src, dst 192 arch/arm/mm/flush.c memcpy(dst, src, len); dst 193 arch/arm/mm/flush.c flush_ptrace_access(vma, page, uaddr, dst, len); dst 176 arch/arm/mm/nommu.c unsigned long uaddr, void *dst, const void *src, dst 179 arch/arm/mm/nommu.c memcpy(dst, src, len); dst 167 arch/arm/net/bpf_jit_32.c #define dst_lo dst[1] dst 168 arch/arm/net/bpf_jit_32.c #define dst_hi dst[0] dst 586 arch/arm/net/bpf_jit_32.c static inline void emit_a32_mov_i(const s8 dst, const u32 val, dst 591 arch/arm/net/bpf_jit_32.c if (is_stacked(dst)) { dst 593 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg32(dst, tmp[1], ctx); dst 595 arch/arm/net/bpf_jit_32.c emit_mov_i(dst, val, ctx); dst 599 arch/arm/net/bpf_jit_32.c static void emit_a32_mov_i64(const s8 dst[], u64 val, struct jit_ctx *ctx) dst 602 arch/arm/net/bpf_jit_32.c const s8 *rd = is_stacked(dst_lo) ? tmp : dst; dst 607 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 611 arch/arm/net/bpf_jit_32.c static inline void emit_a32_mov_se_i64(const bool is64, const s8 dst[], dst 617 arch/arm/net/bpf_jit_32.c emit_a32_mov_i64(dst, val64, ctx); dst 620 arch/arm/net/bpf_jit_32.c static inline void emit_a32_add_r(const u8 dst, const u8 src, dst 630 arch/arm/net/bpf_jit_32.c emit(ARM_ADDS_R(dst, dst, src), ctx); dst 632 arch/arm/net/bpf_jit_32.c emit(ARM_ADC_R(dst, dst, src), ctx); dst 634 arch/arm/net/bpf_jit_32.c emit(ARM_ADD_R(dst, dst, src), ctx); dst 637 arch/arm/net/bpf_jit_32.c static inline void emit_a32_sub_r(const u8 dst, const u8 src, dst 647 arch/arm/net/bpf_jit_32.c emit(ARM_SUBS_R(dst, dst, src), ctx); dst 649 arch/arm/net/bpf_jit_32.c emit(ARM_SBC_R(dst, dst, src), ctx); dst 651 arch/arm/net/bpf_jit_32.c emit(ARM_SUB_R(dst, dst, src), ctx); dst 654 arch/arm/net/bpf_jit_32.c static inline void emit_alu_r(const u8 dst, const u8 src, const bool is64, dst 659 arch/arm/net/bpf_jit_32.c emit_a32_add_r(dst, src, is64, hi, ctx); dst 663 arch/arm/net/bpf_jit_32.c emit_a32_sub_r(dst, src, is64, hi, ctx); dst 667 arch/arm/net/bpf_jit_32.c emit(ARM_ORR_R(dst, dst, src), ctx); dst 671 arch/arm/net/bpf_jit_32.c emit(ARM_AND_R(dst, dst, src), ctx); dst 675 arch/arm/net/bpf_jit_32.c emit(ARM_EOR_R(dst, dst, src), ctx); dst 679 arch/arm/net/bpf_jit_32.c emit(ARM_MUL(dst, dst, src), ctx); dst 683 arch/arm/net/bpf_jit_32.c emit(ARM_LSL_R(dst, dst, src), ctx); dst 687 arch/arm/net/bpf_jit_32.c emit(ARM_LSR_R(dst, dst, src), ctx); dst 691 arch/arm/net/bpf_jit_32.c emit(ARM_MOV_SR(dst, dst, SRTYPE_ASR, src), ctx); dst 699 arch/arm/net/bpf_jit_32.c static inline void emit_a32_alu_r(const s8 dst, const s8 src, dst 706 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg32(dst, tmp[0], ctx); dst 709 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg32(dst, rd, ctx); dst 713 arch/arm/net/bpf_jit_32.c static inline void emit_a32_alu_r64(const bool is64, const s8 dst[], dst 720 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 740 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 744 arch/arm/net/bpf_jit_32.c static inline void emit_a32_mov_r(const s8 dst, const s8 src, dst 750 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg32(dst, rt, ctx); dst 754 arch/arm/net/bpf_jit_32.c static inline void emit_a32_mov_r64(const bool is64, const s8 dst[], dst 773 arch/arm/net/bpf_jit_32.c emit(ARM_LDRD_I(dst[1], ARM_FP, EBPF_SCRATCH_TO_ARM_FP(src_lo)), ctx); dst 777 arch/arm/net/bpf_jit_32.c emit(ARM_MOV_R(dst[0], src[0]), ctx); dst 778 arch/arm/net/bpf_jit_32.c emit(ARM_MOV_R(dst[1], src[1]), ctx); dst 783 arch/arm/net/bpf_jit_32.c static inline void emit_a32_alu_i(const s8 dst, const u32 val, dst 788 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg32(dst, tmp[0], ctx); dst 803 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg32(dst, rd, ctx); dst 807 arch/arm/net/bpf_jit_32.c static inline void emit_a32_neg64(const s8 dst[], dst 813 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 819 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 823 arch/arm/net/bpf_jit_32.c static inline void emit_a32_lsh_r64(const s8 dst[], const s8 src[], dst 832 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 847 arch/arm/net/bpf_jit_32.c static inline void emit_a32_arsh_r64(const s8 dst[], const s8 src[], dst 856 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 872 arch/arm/net/bpf_jit_32.c static inline void emit_a32_rsh_r64(const s8 dst[], const s8 src[], dst 881 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 896 arch/arm/net/bpf_jit_32.c static inline void emit_a32_lsh_i64(const s8 dst[], dst 903 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 918 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 922 arch/arm/net/bpf_jit_32.c static inline void emit_a32_rsh_i64(const s8 dst[], dst 929 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 948 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 952 arch/arm/net/bpf_jit_32.c static inline void emit_a32_arsh_i64(const s8 dst[], dst 959 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 978 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 981 arch/arm/net/bpf_jit_32.c static inline void emit_a32_mul_r64(const s8 dst[], const s8 src[], dst 988 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 1024 arch/arm/net/bpf_jit_32.c static inline void emit_str_r(const s8 dst, const s8 src[], dst 1029 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg32(dst, tmp[1], ctx); dst 1059 arch/arm/net/bpf_jit_32.c static inline void emit_ldx_r(const s8 dst[], const s8 src, dst 1062 arch/arm/net/bpf_jit_32.c const s8 *rd = is_stacked(dst_lo) ? tmp : dst; dst 1099 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 1348 arch/arm/net/bpf_jit_32.c const s8 *dst = bpf2a32[insn->dst_reg]; dst 1385 arch/arm/net/bpf_jit_32.c emit_a32_mov_r64(is64, dst, src, ctx); dst 1389 arch/arm/net/bpf_jit_32.c emit_a32_mov_se_i64(is64, dst, imm, ctx); dst 1429 arch/arm/net/bpf_jit_32.c emit_a32_alu_r64(is64, dst, src, ctx, BPF_OP(code)); dst 1439 arch/arm/net/bpf_jit_32.c emit_a32_alu_r64(is64, dst, tmp2, ctx, BPF_OP(code)); dst 1487 arch/arm/net/bpf_jit_32.c emit_a32_lsh_i64(dst, imm, ctx); dst 1493 arch/arm/net/bpf_jit_32.c emit_a32_rsh_i64(dst, imm, ctx); dst 1497 arch/arm/net/bpf_jit_32.c emit_a32_lsh_r64(dst, src, ctx); dst 1501 arch/arm/net/bpf_jit_32.c emit_a32_rsh_r64(dst, src, ctx); dst 1505 arch/arm/net/bpf_jit_32.c emit_a32_arsh_r64(dst, src, ctx); dst 1511 arch/arm/net/bpf_jit_32.c emit_a32_arsh_i64(dst, imm, ctx); dst 1521 arch/arm/net/bpf_jit_32.c emit_a32_neg64(dst, ctx); dst 1528 arch/arm/net/bpf_jit_32.c emit_a32_mul_r64(dst, src, ctx); dst 1538 arch/arm/net/bpf_jit_32.c emit_a32_mul_r64(dst, tmp2, ctx); dst 1546 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 1586 arch/arm/net/bpf_jit_32.c arm_bpf_put_reg64(dst, rd, ctx); dst 1593 arch/arm/net/bpf_jit_32.c emit_a32_mov_i64(dst, val, ctx); dst 1603 arch/arm/net/bpf_jit_32.c emit_ldx_r(dst, rn, off, ctx, BPF_SIZE(code)); dst 1714 arch/arm/net/bpf_jit_32.c rd = arm_bpf_get_reg64(dst, tmp, ctx); dst 66 arch/arm/plat-omap/sram.c void *dst = NULL; dst 77 arch/arm/plat-omap/sram.c dst = fncpy(sram, funcp, size); dst 82 arch/arm/plat-omap/sram.c return dst; dst 117 arch/arm/probes/uprobes/core.c void *dst = xol_page_kaddr + (vaddr & ~PAGE_MASK); dst 122 arch/arm/probes/uprobes/core.c memcpy(dst, src, len); dst 125 arch/arm/probes/uprobes/core.c flush_uprobe_xol_access(page, vaddr, dst, len); dst 111 arch/arm/vdso/vdsomunge.c static void write_elf_word(Elf32_Word val, Elf32_Word *dst, bool swap) dst 113 arch/arm/vdso/vdsomunge.c *dst = swap ? swab32(val) : val; dst 189 arch/arm64/crypto/aes-ce-ccm-glue.c u8 *dst = walk->dst.virt.addr; dst 209 arch/arm64/crypto/aes-ce-ccm-glue.c crypto_xor_cpy(dst, src, buf, bsize); dst 211 arch/arm64/crypto/aes-ce-ccm-glue.c crypto_xor(mac, dst, bsize); dst 212 arch/arm64/crypto/aes-ce-ccm-glue.c dst += bsize; dst 258 arch/arm64/crypto/aes-ce-ccm-glue.c ce_aes_ccm_encrypt(walk.dst.virt.addr, dst 279 arch/arm64/crypto/aes-ce-ccm-glue.c scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, dst 316 arch/arm64/crypto/aes-ce-ccm-glue.c ce_aes_ccm_decrypt(walk.dst.virt.addr, dst 46 arch/arm64/crypto/aes-ce-glue.c static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[]) dst 51 arch/arm64/crypto/aes-ce-glue.c aes_encrypt(ctx, dst, src); dst 56 arch/arm64/crypto/aes-ce-glue.c __aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx)); dst 60 arch/arm64/crypto/aes-ce-glue.c static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[]) dst 65 arch/arm64/crypto/aes-ce-glue.c aes_decrypt(ctx, dst, src); dst 70 arch/arm64/crypto/aes-ce-glue.c __aes_ce_decrypt(ctx->key_dec, dst, src, num_rounds(ctx)); dst 203 arch/arm64/crypto/aes-glue.c aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 223 arch/arm64/crypto/aes-glue.c aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 241 arch/arm64/crypto/aes-glue.c aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr, dst 270 arch/arm64/crypto/aes-glue.c aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr, dst 295 arch/arm64/crypto/aes-glue.c struct scatterlist *src = req->src, *dst = req->dst; dst 311 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 323 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); dst 324 arch/arm64/crypto/aes-glue.c if (req->dst != req->src) dst 325 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, dst 330 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, src, dst, dst 339 arch/arm64/crypto/aes-glue.c aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 352 arch/arm64/crypto/aes-glue.c struct scatterlist *src = req->src, *dst = req->dst; dst 368 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 380 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); dst 381 arch/arm64/crypto/aes-glue.c if (req->dst != req->src) dst 382 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, dst 387 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, src, dst, dst 396 arch/arm64/crypto/aes-glue.c aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 432 arch/arm64/crypto/aes-glue.c aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 454 arch/arm64/crypto/aes-glue.c aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 475 arch/arm64/crypto/aes-glue.c aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 483 arch/arm64/crypto/aes-glue.c u8 *tdst = walk.dst.virt.addr; dst 502 arch/arm64/crypto/aes-glue.c static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) dst 513 arch/arm64/crypto/aes-glue.c aes_encrypt(ctx, dst, src); dst 533 arch/arm64/crypto/aes-glue.c struct scatterlist *src, *dst; dst 551 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 567 arch/arm64/crypto/aes-glue.c aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 577 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); dst 578 arch/arm64/crypto/aes-glue.c if (req->dst != req->src) dst 579 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); dst 581 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, dst 589 arch/arm64/crypto/aes-glue.c aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 605 arch/arm64/crypto/aes-glue.c struct scatterlist *src, *dst; dst 623 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 639 arch/arm64/crypto/aes-glue.c aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 649 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); dst 650 arch/arm64/crypto/aes-glue.c if (req->dst != req->src) dst 651 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); dst 653 arch/arm64/crypto/aes-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, dst 662 arch/arm64/crypto/aes-glue.c aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 116 arch/arm64/crypto/aes-neonbs-glue.c fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, dst 172 arch/arm64/crypto/aes-neonbs-glue.c neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 198 arch/arm64/crypto/aes-neonbs-glue.c aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 249 arch/arm64/crypto/aes-neonbs-glue.c aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr, dst 254 arch/arm64/crypto/aes-neonbs-glue.c u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; dst 257 arch/arm64/crypto/aes-neonbs-glue.c crypto_xor_cpy(dst, src, final, dst 294 arch/arm64/crypto/aes-neonbs-glue.c static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) dst 305 arch/arm64/crypto/aes-neonbs-glue.c aes_encrypt(&ctx->fallback, dst, src); dst 326 arch/arm64/crypto/aes-neonbs-glue.c struct scatterlist *src, *dst; dst 344 arch/arm64/crypto/aes-neonbs-glue.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 363 arch/arm64/crypto/aes-neonbs-glue.c out = walk.dst.virt.addr; dst 394 arch/arm64/crypto/aes-neonbs-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); dst 395 arch/arm64/crypto/aes-neonbs-glue.c if (req->dst != req->src) dst 396 arch/arm64/crypto/aes-neonbs-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); dst 398 arch/arm64/crypto/aes-neonbs-glue.c skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, dst 405 arch/arm64/crypto/aes-neonbs-glue.c out = walk.dst.virt.addr; dst 33 arch/arm64/crypto/chacha-neon-glue.c asmlinkage void chacha_block_xor_neon(u32 *state, u8 *dst, const u8 *src, dst 35 arch/arm64/crypto/chacha-neon-glue.c asmlinkage void chacha_4block_xor_neon(u32 *state, u8 *dst, const u8 *src, dst 39 arch/arm64/crypto/chacha-neon-glue.c static void chacha_doneon(u32 *state, u8 *dst, const u8 *src, dst 50 arch/arm64/crypto/chacha-neon-glue.c memcpy(dst, buf, l); dst 54 arch/arm64/crypto/chacha-neon-glue.c chacha_4block_xor_neon(state, dst, src, nrounds, l); dst 57 arch/arm64/crypto/chacha-neon-glue.c dst += CHACHA_BLOCK_SIZE * 5; dst 80 arch/arm64/crypto/chacha-neon-glue.c chacha_doneon(state, walk.dst.virt.addr, walk.src.virt.addr, dst 61 arch/arm64/crypto/ghash-ce-glue.c asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[], dst 66 arch/arm64/crypto/ghash-ce-glue.c asmlinkage void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[], dst 70 arch/arm64/crypto/ghash-ce-glue.c asmlinkage void pmull_gcm_encrypt_block(u8 dst[], u8 const src[], dst 93 arch/arm64/crypto/ghash-ce-glue.c be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) }; dst 106 arch/arm64/crypto/ghash-ce-glue.c crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); dst 107 arch/arm64/crypto/ghash-ce-glue.c gf128mul_lle(&dst, &key->k); dst 110 arch/arm64/crypto/ghash-ce-glue.c dg[0] = be64_to_cpu(dst.b); dst 111 arch/arm64/crypto/ghash-ce-glue.c dg[1] = be64_to_cpu(dst.a); dst 174 arch/arm64/crypto/ghash-ce-glue.c static int ghash_final_p8(struct shash_desc *desc, u8 *dst) dst 187 arch/arm64/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[1], dst); dst 188 arch/arm64/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[0], dst + 8); dst 194 arch/arm64/crypto/ghash-ce-glue.c static int ghash_final_p64(struct shash_desc *desc, u8 *dst) dst 207 arch/arm64/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[1], dst); dst 208 arch/arm64/crypto/ghash-ce-glue.c put_unaligned_be64(ctx->digest[0], dst + 8); dst 456 arch/arm64/crypto/ghash-ce-glue.c pmull_gcm_encrypt(blocks, dg, walk.dst.virt.addr, dst 473 arch/arm64/crypto/ghash-ce-glue.c u8 *dst = walk.dst.virt.addr; dst 479 arch/arm64/crypto/ghash-ce-glue.c crypto_xor_cpy(dst, src, ks, AES_BLOCK_SIZE); dst 482 arch/arm64/crypto/ghash-ce-glue.c dst += AES_BLOCK_SIZE; dst 487 arch/arm64/crypto/ghash-ce-glue.c walk.dst.virt.addr, &ctx->ghash_key, dst 506 arch/arm64/crypto/ghash-ce-glue.c u8 *dst = walk.dst.virt.addr; dst 509 arch/arm64/crypto/ghash-ce-glue.c crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, ks, dst 513 arch/arm64/crypto/ghash-ce-glue.c head = dst; dst 514 arch/arm64/crypto/ghash-ce-glue.c dst += GHASH_BLOCK_SIZE; dst 518 arch/arm64/crypto/ghash-ce-glue.c memcpy(buf, dst, nbytes); dst 532 arch/arm64/crypto/ghash-ce-glue.c scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen, dst 573 arch/arm64/crypto/ghash-ce-glue.c pmull_gcm_decrypt(blocks, dg, walk.dst.virt.addr, dst 606 arch/arm64/crypto/ghash-ce-glue.c u8 *dst = walk.dst.virt.addr; dst 615 arch/arm64/crypto/ghash-ce-glue.c crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE); dst 618 arch/arm64/crypto/ghash-ce-glue.c dst += AES_BLOCK_SIZE; dst 655 arch/arm64/crypto/ghash-ce-glue.c crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv, dst 64 arch/arm64/include/asm/asm-uaccess.h .macro untagged_addr, dst, addr dst 201 arch/arm64/include/asm/assembler.h .macro adr_l, dst, sym dst 213 arch/arm64/include/asm/assembler.h .macro ldr_l, dst, sym, tmp= dst 239 arch/arm64/include/asm/assembler.h .macro adr_this_cpu, dst, sym, tmp dst 255 arch/arm64/include/asm/assembler.h .macro ldr_this_cpu dst, sym, tmp dst 126 arch/arm64/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 128 arch/arm64/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 400 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_add_sub_imm(enum aarch64_insn_register dst, dst 407 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_bitfield(enum aarch64_insn_register dst, dst 412 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_movewide(enum aarch64_insn_register dst, dst 416 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_add_sub_shifted_reg(enum aarch64_insn_register dst, dst 422 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_data1(enum aarch64_insn_register dst, dst 426 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_data2(enum aarch64_insn_register dst, dst 431 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_data3(enum aarch64_insn_register dst, dst 437 arch/arm64/include/asm/insn.h u32 aarch64_insn_gen_logical_shifted_reg(enum aarch64_insn_register dst, dst 62 arch/arm64/include/asm/module.h struct plt_entry get_plt_entry(u64 dst, void *pc); dst 48 arch/arm64/include/asm/string.h void memcpy_flushcache(void *dst, const void *src, size_t cnt); dst 58 arch/arm64/include/asm/string.h #define memcpy(dst, src, len) __memcpy(dst, src, len) dst 59 arch/arm64/include/asm/string.h #define memmove(dst, src, len) __memmove(dst, src, len) dst 440 arch/arm64/include/asm/uaccess.h static inline int __copy_from_user_flushcache(void *dst, const void __user *src, unsigned size) dst 442 arch/arm64/include/asm/uaccess.h kasan_check_write(dst, size); dst 443 arch/arm64/include/asm/uaccess.h return __copy_user_flushcache(dst, __uaccess_mask_ptr(src), size); dst 123 arch/arm64/kernel/cpu_errata.c void *dst = lm_alias(__bp_harden_hyp_vecs_start + slot * SZ_2K); dst 127 arch/arm64/kernel/cpu_errata.c memcpy(dst + i, hyp_vecs_start, hyp_vecs_end - hyp_vecs_start); dst 129 arch/arm64/kernel/cpu_errata.c __flush_icache_range((uintptr_t)dst, (uintptr_t)dst + SZ_2K); dst 76 arch/arm64/kernel/ftrace.c struct plt_entry trampoline, *dst; dst 109 arch/arm64/kernel/ftrace.c dst = mod->arch.ftrace_trampoline; dst 110 arch/arm64/kernel/ftrace.c trampoline = get_plt_entry(addr, dst); dst 111 arch/arm64/kernel/ftrace.c if (memcmp(dst, &trampoline, sizeof(trampoline))) { dst 112 arch/arm64/kernel/ftrace.c if (plt_entry_is_initialized(dst)) { dst 119 arch/arm64/kernel/ftrace.c *dst = trampoline; dst 132 arch/arm64/kernel/ftrace.c flush_icache_range((unsigned long)&dst[0], dst 133 arch/arm64/kernel/ftrace.c (unsigned long)&dst[1]); dst 135 arch/arm64/kernel/ftrace.c addr = (unsigned long)dst; dst 209 arch/arm64/kernel/hibernate.c unsigned long dst = (unsigned long)allocator(mask); dst 211 arch/arm64/kernel/hibernate.c if (!dst) { dst 216 arch/arm64/kernel/hibernate.c memcpy((void *)dst, src_start, length); dst 217 arch/arm64/kernel/hibernate.c __flush_icache_range(dst, dst + length); dst 256 arch/arm64/kernel/hibernate.c set_pte(ptep, pfn_pte(virt_to_pfn(dst), PAGE_KERNEL_EXEC)); dst 275 arch/arm64/kernel/hibernate.c *phys_dst_addr = virt_to_phys((void *)dst); dst 836 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_add_sub_imm(enum aarch64_insn_register dst, dst 886 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 897 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_bitfield(enum aarch64_insn_register dst, dst 943 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 952 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_movewide(enum aarch64_insn_register dst, dst 1002 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 1007 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_add_sub_shifted_reg(enum aarch64_insn_register dst, dst 1056 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 1065 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_data1(enum aarch64_insn_register dst, dst 1103 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 1108 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_data2(enum aarch64_insn_register dst, dst 1151 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 1158 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_data3(enum aarch64_insn_register dst, dst 1190 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 1201 arch/arm64/kernel/insn.c u32 aarch64_insn_gen_logical_shifted_reg(enum aarch64_insn_register dst, dst 1262 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_register(AARCH64_INSN_REGTYPE_RD, insn, dst); dst 71 arch/arm64/kernel/io.c void __memset_io(volatile void __iomem *dst, int c, size_t count) dst 79 arch/arm64/kernel/io.c while (count && !IS_ALIGNED((unsigned long)dst, 8)) { dst 80 arch/arm64/kernel/io.c __raw_writeb(c, dst); dst 81 arch/arm64/kernel/io.c dst++; dst 86 arch/arm64/kernel/io.c __raw_writeq(qc, dst); dst 87 arch/arm64/kernel/io.c dst += 8; dst 92 arch/arm64/kernel/io.c __raw_writeb(c, dst); dst 93 arch/arm64/kernel/io.c dst++; dst 11 arch/arm64/kernel/module-plts.c static struct plt_entry __get_adrp_add_pair(u64 dst, u64 pc, dst 16 arch/arm64/kernel/module-plts.c adrp = aarch64_insn_gen_adr(pc, dst, reg, AARCH64_INSN_ADR_TYPE_ADRP); dst 17 arch/arm64/kernel/module-plts.c add = aarch64_insn_gen_add_sub_imm(reg, reg, dst % SZ_4K, dst 24 arch/arm64/kernel/module-plts.c struct plt_entry get_plt_entry(u64 dst, void *pc) dst 33 arch/arm64/kernel/module-plts.c plt = __get_adrp_add_pair(dst, (u64)pc, AARCH64_INSN_REG_16); dst 18 arch/arm64/kernel/probes/uprobes.c void *dst = xol_page_kaddr + (vaddr & ~PAGE_MASK); dst 21 arch/arm64/kernel/probes/uprobes.c memcpy(dst, src, len); dst 24 arch/arm64/kernel/probes/uprobes.c sync_icache_aliases(dst, len); dst 337 arch/arm64/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 341 arch/arm64/kernel/process.c *dst = *src; dst 355 arch/arm64/kernel/process.c dst->thread.sve_state = NULL; dst 356 arch/arm64/kernel/process.c clear_tsk_thread_flag(dst, TIF_SVE); dst 10 arch/arm64/lib/uaccess_flushcache.c void memcpy_flushcache(void *dst, const void *src, size_t cnt) dst 17 arch/arm64/lib/uaccess_flushcache.c memcpy(dst, src, cnt); dst 18 arch/arm64/lib/uaccess_flushcache.c __clean_dcache_area_pop(dst, cnt); dst 47 arch/arm64/mm/flush.c unsigned long uaddr, void *dst, const void *src, dst 50 arch/arm64/mm/flush.c memcpy(dst, src, len); dst 51 arch/arm64/mm/flush.c flush_ptrace_access(vma, page, uaddr, dst, len); dst 346 arch/arm64/net/bpf_jit_comp.c const u8 dst = bpf2a64[insn->dst_reg]; dst 375 arch/arm64/net/bpf_jit_comp.c emit(A64_MOV(is64, dst, src), ctx); dst 380 arch/arm64/net/bpf_jit_comp.c emit(A64_ADD(is64, dst, dst, src), ctx); dst 384 arch/arm64/net/bpf_jit_comp.c emit(A64_SUB(is64, dst, dst, src), ctx); dst 388 arch/arm64/net/bpf_jit_comp.c emit(A64_AND(is64, dst, dst, src), ctx); dst 392 arch/arm64/net/bpf_jit_comp.c emit(A64_ORR(is64, dst, dst, src), ctx); dst 396 arch/arm64/net/bpf_jit_comp.c emit(A64_EOR(is64, dst, dst, src), ctx); dst 400 arch/arm64/net/bpf_jit_comp.c emit(A64_MUL(is64, dst, dst, src), ctx); dst 408 arch/arm64/net/bpf_jit_comp.c emit(A64_UDIV(is64, dst, dst, src), ctx); dst 411 arch/arm64/net/bpf_jit_comp.c emit(A64_UDIV(is64, tmp, dst, src), ctx); dst 412 arch/arm64/net/bpf_jit_comp.c emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); dst 418 arch/arm64/net/bpf_jit_comp.c emit(A64_LSLV(is64, dst, dst, src), ctx); dst 422 arch/arm64/net/bpf_jit_comp.c emit(A64_LSRV(is64, dst, dst, src), ctx); dst 426 arch/arm64/net/bpf_jit_comp.c emit(A64_ASRV(is64, dst, dst, src), ctx); dst 431 arch/arm64/net/bpf_jit_comp.c emit(A64_NEG(is64, dst, dst), ctx); dst 445 arch/arm64/net/bpf_jit_comp.c emit(A64_REV16(is64, dst, dst), ctx); dst 447 arch/arm64/net/bpf_jit_comp.c emit(A64_UXTH(is64, dst, dst), ctx); dst 450 arch/arm64/net/bpf_jit_comp.c emit(A64_REV32(is64, dst, dst), ctx); dst 454 arch/arm64/net/bpf_jit_comp.c emit(A64_REV64(dst, dst), ctx); dst 462 arch/arm64/net/bpf_jit_comp.c emit(A64_UXTH(is64, dst, dst), ctx); dst 466 arch/arm64/net/bpf_jit_comp.c emit(A64_UXTW(is64, dst, dst), ctx); dst 476 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i(is64, dst, imm, ctx); dst 482 arch/arm64/net/bpf_jit_comp.c emit(A64_ADD(is64, dst, dst, tmp), ctx); dst 487 arch/arm64/net/bpf_jit_comp.c emit(A64_SUB(is64, dst, dst, tmp), ctx); dst 492 arch/arm64/net/bpf_jit_comp.c emit(A64_AND(is64, dst, dst, tmp), ctx); dst 497 arch/arm64/net/bpf_jit_comp.c emit(A64_ORR(is64, dst, dst, tmp), ctx); dst 502 arch/arm64/net/bpf_jit_comp.c emit(A64_EOR(is64, dst, dst, tmp), ctx); dst 507 arch/arm64/net/bpf_jit_comp.c emit(A64_MUL(is64, dst, dst, tmp), ctx); dst 512 arch/arm64/net/bpf_jit_comp.c emit(A64_UDIV(is64, dst, dst, tmp), ctx); dst 517 arch/arm64/net/bpf_jit_comp.c emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); dst 518 arch/arm64/net/bpf_jit_comp.c emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); dst 522 arch/arm64/net/bpf_jit_comp.c emit(A64_LSL(is64, dst, dst, imm), ctx); dst 526 arch/arm64/net/bpf_jit_comp.c emit(A64_LSR(is64, dst, dst, imm), ctx); dst 530 arch/arm64/net/bpf_jit_comp.c emit(A64_ASR(is64, dst, dst, imm), ctx); dst 560 arch/arm64/net/bpf_jit_comp.c emit(A64_CMP(is64, dst, src), ctx); dst 603 arch/arm64/net/bpf_jit_comp.c emit(A64_TST(is64, dst, src), ctx); dst 627 arch/arm64/net/bpf_jit_comp.c emit(A64_CMP(is64, dst, tmp), ctx); dst 632 arch/arm64/net/bpf_jit_comp.c emit(A64_TST(is64, dst, tmp), ctx); dst 674 arch/arm64/net/bpf_jit_comp.c emit_a64_mov_i64(dst, imm64, ctx); dst 687 arch/arm64/net/bpf_jit_comp.c emit(A64_LDR32(dst, src, tmp), ctx); dst 690 arch/arm64/net/bpf_jit_comp.c emit(A64_LDRH(dst, src, tmp), ctx); dst 693 arch/arm64/net/bpf_jit_comp.c emit(A64_LDRB(dst, src, tmp), ctx); dst 696 arch/arm64/net/bpf_jit_comp.c emit(A64_LDR64(dst, src, tmp), ctx); dst 711 arch/arm64/net/bpf_jit_comp.c emit(A64_STR32(tmp, dst, tmp2), ctx); dst 714 arch/arm64/net/bpf_jit_comp.c emit(A64_STRH(tmp, dst, tmp2), ctx); dst 717 arch/arm64/net/bpf_jit_comp.c emit(A64_STRB(tmp, dst, tmp2), ctx); dst 720 arch/arm64/net/bpf_jit_comp.c emit(A64_STR64(tmp, dst, tmp2), ctx); dst 733 arch/arm64/net/bpf_jit_comp.c emit(A64_STR32(src, dst, tmp), ctx); dst 736 arch/arm64/net/bpf_jit_comp.c emit(A64_STRH(src, dst, tmp), ctx); dst 739 arch/arm64/net/bpf_jit_comp.c emit(A64_STRB(src, dst, tmp), ctx); dst 742 arch/arm64/net/bpf_jit_comp.c emit(A64_STR64(src, dst, tmp), ctx); dst 752 arch/arm64/net/bpf_jit_comp.c reg = dst; dst 755 arch/arm64/net/bpf_jit_comp.c emit(A64_ADD(1, tmp, tmp, dst), ctx); dst 53 arch/c6x/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 55 arch/c6x/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 56 arch/c6x/include/asm/cacheflush.h flush_icache_range((unsigned) (dst), (unsigned) (dst) + (len)); \ dst 59 arch/c6x/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 60 arch/c6x/include/asm/cacheflush.h memcpy(dst, src, len) dst 49 arch/c6x/kernel/c6x_ksyms.c extern void __c6xabi_strasgi(int *dst, const int *src, unsigned cnt); dst 58 arch/c6x/kernel/c6x_ksyms.c extern void __c6xabi_strasgi_64plus(int *dst, const int *src, unsigned cnt); dst 13 arch/c6x/lib/checksum.c csum_partial_copy_from_user(const void __user *src, void *dst, int len, dst 18 arch/c6x/lib/checksum.c missing = __copy_from_user(dst, src, len); dst 20 arch/c6x/lib/checksum.c memset(dst + len - missing, 0, missing); dst 25 arch/c6x/lib/checksum.c return csum_partial(dst, len, sum); dst 55 arch/csky/abiv1/inc/abi/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 57 arch/csky/abiv1/inc/abi/cacheflush.h memcpy(dst, src, len); \ dst 60 arch/csky/abiv1/inc/abi/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 62 arch/csky/abiv1/inc/abi/cacheflush.h memcpy(dst, src, len); \ dst 38 arch/csky/abiv2/inc/abi/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 40 arch/csky/abiv2/inc/abi/cacheflush.h memcpy(dst, src, len); \ dst 41 arch/csky/abiv2/inc/abi/cacheflush.h cache_wbinv_range((unsigned long)dst, (unsigned long)dst + len); \ dst 43 arch/csky/abiv2/inc/abi/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 44 arch/csky/abiv2/inc/abi/cacheflush.h memcpy(dst, src, len) dst 392 arch/csky/include/asm/uaccess.h long strncpy_from_user(char *dst, const char *src, long count); dst 393 arch/csky/include/asm/uaccess.h long __strncpy_from_user(char *dst, const char *src, long count); dst 27 arch/csky/lib/usercopy.c #define __do_strncpy_from_user(dst, src, count, res) \ dst 53 arch/csky/lib/usercopy.c : "=r"(res), "=r"(count), "=r"(dst), \ dst 56 arch/csky/lib/usercopy.c "2"(dst), "3"(src) \ dst 81 arch/csky/lib/usercopy.c long __strncpy_from_user(char *dst, const char *src, long count) dst 85 arch/csky/lib/usercopy.c __do_strncpy_from_user(dst, src, count, res); dst 108 arch/csky/lib/usercopy.c long strncpy_from_user(char *dst, const char *src, long count) dst 113 arch/csky/lib/usercopy.c __do_strncpy_from_user(dst, src, count, res); dst 80 arch/hexagon/include/asm/cacheflush.h unsigned long vaddr, void *dst, void *src, int len); dst 82 arch/hexagon/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 83 arch/hexagon/include/asm/cacheflush.h memcpy(dst, src, len) dst 20 arch/hexagon/include/asm/checksum.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 186 arch/hexagon/include/asm/io.h static inline void memcpy_fromio(void *dst, const volatile void __iomem *src, dst 189 arch/hexagon/include/asm/io.h memcpy(dst, (void *) src, count); dst 192 arch/hexagon/include/asm/io.h static inline void memcpy_toio(volatile void __iomem *dst, const void *src, dst 195 arch/hexagon/include/asm/io.h memcpy((void *) dst, src, count); dst 61 arch/hexagon/include/asm/uaccess.h #define __strncpy_from_user(dst, src, n) hexagon_strncpy_from_user(dst, src, n) dst 68 arch/hexagon/include/asm/uaccess.h static inline long hexagon_strncpy_from_user(char *dst, const char __user *src, dst 74 arch/hexagon/include/asm/uaccess.h static inline long hexagon_strncpy_from_user(char *dst, const char __user *src, dst 83 arch/hexagon/include/asm/uaccess.h long left = raw_copy_from_user(dst, src, n); dst 85 arch/hexagon/include/asm/uaccess.h memset(dst + (n - left), 0, left); dst 88 arch/hexagon/include/asm/uaccess.h long left = raw_copy_from_user(dst, src, res); dst 90 arch/hexagon/include/asm/uaccess.h memset(dst + (res - left), 0, left); dst 184 arch/hexagon/lib/checksum.c csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 186 arch/hexagon/lib/checksum.c memcpy(dst, src, len); dst 187 arch/hexagon/lib/checksum.c return csum_partial(dst, len, sum); dst 21 arch/hexagon/lib/io.c short int *dst = (short int *) data; dst 27 arch/hexagon/lib/io.c *dst++ = *src; dst 40 arch/hexagon/lib/io.c volatile short int *dst = (short int *)addr; dst 46 arch/hexagon/lib/io.c *dst = *src++; dst 55 arch/hexagon/lib/io.c long *dst = (long *) data; dst 61 arch/hexagon/lib/io.c *dst++ = *src; dst 69 arch/hexagon/lib/io.c volatile long *dst = (long *)addr; dst 75 arch/hexagon/lib/io.c *dst = *src++; dst 119 arch/hexagon/mm/cache.c unsigned long vaddr, void *dst, void *src, int len) dst 121 arch/hexagon/mm/cache.c memcpy(dst, src, len); dst 123 arch/hexagon/mm/cache.c flush_icache_range((unsigned long) dst, dst 124 arch/hexagon/mm/cache.c (unsigned long) dst + len); dst 48 arch/ia64/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 49 arch/ia64/include/asm/cacheflush.h do { memcpy(dst, src, len); \ dst 52 arch/ia64/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 53 arch/ia64/include/asm/cacheflush.h memcpy(dst, src, len) dst 46 arch/ia64/include/asm/checksum.h extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 50 arch/ia64/include/asm/checksum.h extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 193 arch/ia64/include/asm/elf.h extern void ia64_elf_core_copy_regs (struct pt_regs *src, elf_gregset_t dst); dst 200 arch/ia64/include/asm/io.h static inline void insb(unsigned long port, void *dst, unsigned long count) dst 202 arch/ia64/include/asm/io.h unsigned char *dp = dst; dst 209 arch/ia64/include/asm/io.h static inline void insw(unsigned long port, void *dst, unsigned long count) dst 211 arch/ia64/include/asm/io.h unsigned short *dp = dst; dst 218 arch/ia64/include/asm/io.h static inline void insl(unsigned long port, void *dst, unsigned long count) dst 220 arch/ia64/include/asm/io.h unsigned int *dp = dst; dst 274 arch/ia64/include/asm/io.h extern void memcpy_fromio(void *dst, const volatile void __iomem *src, long n); dst 275 arch/ia64/include/asm/io.h extern void memcpy_toio(volatile void __iomem *dst, const void *src, long n); dst 44 arch/ia64/kernel/crash.c elf_greg_t *dst = (elf_greg_t *)&(prstatus->pr_reg); dst 48 arch/ia64/kernel/crash.c ia64_dump_cpu_regs(dst); dst 49 arch/ia64/kernel/crash.c cfm = dst[43]; dst 52 arch/ia64/kernel/crash.c dst[46] = (unsigned long)ia64_rse_skip_regs((unsigned long *)dst[46], dst 448 arch/ia64/kernel/process.c elf_greg_t *dst = arg; dst 453 arch/ia64/kernel/process.c memset(dst, 0, sizeof(elf_gregset_t)); /* don't leak any kernel bits to user-level */ dst 482 arch/ia64/kernel/process.c unw_get_gr(info, i, &dst[i], &nat); dst 487 arch/ia64/kernel/process.c dst[32] = nat_bits; dst 488 arch/ia64/kernel/process.c unw_get_pr(info, &dst[33]); dst 491 arch/ia64/kernel/process.c unw_get_br(info, i, &dst[34 + i]); dst 494 arch/ia64/kernel/process.c dst[42] = ip + ia64_psr(pt)->ri; dst 495 arch/ia64/kernel/process.c dst[43] = cfm; dst 496 arch/ia64/kernel/process.c dst[44] = pt->cr_ipsr & IA64_PSR_UM; dst 498 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_RSC, &dst[45]); dst 503 arch/ia64/kernel/process.c dst[46] = urbs_end; /* note: by convention PT_AR_BSP points to the end of the urbs! */ dst 504 arch/ia64/kernel/process.c dst[47] = pt->ar_bspstore; dst 505 arch/ia64/kernel/process.c dst[48] = ar_rnat; dst 506 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_CCV, &dst[49]); dst 507 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_UNAT, &dst[50]); dst 508 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_FPSR, &dst[51]); dst 509 arch/ia64/kernel/process.c dst[52] = pt->ar_pfs; /* UNW_AR_PFS is == to pt->cr_ifs for interrupt frames */ dst 510 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_LC, &dst[53]); dst 511 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_EC, &dst[54]); dst 512 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_CSD, &dst[55]); dst 513 arch/ia64/kernel/process.c unw_get_ar(info, UNW_AR_SSD, &dst[56]); dst 519 arch/ia64/kernel/process.c elf_fpreg_t *dst = arg; dst 522 arch/ia64/kernel/process.c memset(dst, 0, sizeof(elf_fpregset_t)); /* don't leak any "random" bits */ dst 530 arch/ia64/kernel/process.c unw_get_fr(info, i, dst + i); dst 534 arch/ia64/kernel/process.c memcpy(dst + 32, task->thread.fph, 96*16); dst 550 arch/ia64/kernel/process.c ia64_elf_core_copy_regs (struct pt_regs *pt, elf_gregset_t dst) dst 552 arch/ia64/kernel/process.c unw_init_running(do_copy_regs, dst); dst 556 arch/ia64/kernel/process.c dump_fpu (struct pt_regs *pt, elf_fpregset_t dst) dst 558 arch/ia64/kernel/process.c unw_init_running(do_dump_fpu, dst); dst 1505 arch/ia64/kernel/ptrace.c struct regset_getset *dst = arg; dst 1525 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_GR_OFFSET(1)) { dst 1526 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout_zero(&dst->pos, &dst->count, dst 1527 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, dst 1528 arch/ia64/kernel/ptrace.c &dst->u.get.ubuf, dst 1530 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1535 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_GR_OFFSET(16)) { dst 1536 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_GR_OFFSET(1)) / sizeof(elf_greg_t); dst 1537 arch/ia64/kernel/ptrace.c min_copy = ELF_GR_OFFSET(16) > (dst->pos + dst->count) ? dst 1538 arch/ia64/kernel/ptrace.c (dst->pos + dst->count) : ELF_GR_OFFSET(16); dst 1539 arch/ia64/kernel/ptrace.c for (i = dst->pos; i < min_copy; i += sizeof(elf_greg_t), dst 1541 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1543 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1546 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout(&dst->pos, &dst->count, dst 1547 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, tmp, dst 1549 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1554 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_NAT_OFFSET) { dst 1555 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); dst 1556 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout(&dst->pos, &dst->count, dst 1557 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, &pt->r16, dst 1559 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1564 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_CR_IIP_OFFSET) { dst 1565 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_NAT_OFFSET) / sizeof(elf_greg_t); dst 1566 arch/ia64/kernel/ptrace.c min_copy = ELF_CR_IIP_OFFSET > (dst->pos + dst->count) ? dst 1567 arch/ia64/kernel/ptrace.c (dst->pos + dst->count) : ELF_CR_IIP_OFFSET; dst 1568 arch/ia64/kernel/ptrace.c for (i = dst->pos; i < min_copy; i += sizeof(elf_greg_t), dst 1570 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1572 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1575 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout(&dst->pos, &dst->count, dst 1576 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, tmp, dst 1578 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1585 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < (ELF_AR_END_OFFSET)) { dst 1586 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_CR_IIP_OFFSET) / sizeof(elf_greg_t); dst 1587 arch/ia64/kernel/ptrace.c min_copy = ELF_AR_END_OFFSET > (dst->pos + dst->count) ? dst 1588 arch/ia64/kernel/ptrace.c (dst->pos + dst->count) : ELF_AR_END_OFFSET; dst 1589 arch/ia64/kernel/ptrace.c for (i = dst->pos; i < min_copy; i += sizeof(elf_greg_t), dst 1591 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1593 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1596 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout(&dst->pos, &dst->count, dst 1597 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, tmp, dst 1605 arch/ia64/kernel/ptrace.c struct regset_getset *dst = arg; dst 1613 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_GR_OFFSET(1)) { dst 1614 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin_ignore(&dst->pos, &dst->count, dst 1615 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, dst 1616 arch/ia64/kernel/ptrace.c &dst->u.set.ubuf, dst 1618 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1623 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_GR_OFFSET(16)) { dst 1624 arch/ia64/kernel/ptrace.c i = dst->pos; dst 1625 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_GR_OFFSET(1)) / sizeof(elf_greg_t); dst 1626 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1627 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, tmp, dst 1629 arch/ia64/kernel/ptrace.c if (dst->ret) dst 1631 arch/ia64/kernel/ptrace.c for ( ; i < dst->pos; i += sizeof(elf_greg_t), index++) dst 1632 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1634 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1637 arch/ia64/kernel/ptrace.c if (dst->count == 0) dst 1642 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_NAT_OFFSET) { dst 1643 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); dst 1644 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1645 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, &pt->r16, dst 1647 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1652 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_CR_IIP_OFFSET) { dst 1653 arch/ia64/kernel/ptrace.c i = dst->pos; dst 1654 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_NAT_OFFSET) / sizeof(elf_greg_t); dst 1655 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1656 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, tmp, dst 1658 arch/ia64/kernel/ptrace.c if (dst->ret) dst 1660 arch/ia64/kernel/ptrace.c for (; i < dst->pos; i += sizeof(elf_greg_t), index++) dst 1661 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1663 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1666 arch/ia64/kernel/ptrace.c if (dst->count == 0) dst 1673 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < (ELF_AR_END_OFFSET)) { dst 1674 arch/ia64/kernel/ptrace.c i = dst->pos; dst 1675 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_CR_IIP_OFFSET) / sizeof(elf_greg_t); dst 1676 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1677 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, tmp, dst 1679 arch/ia64/kernel/ptrace.c if (dst->ret) dst 1681 arch/ia64/kernel/ptrace.c for ( ; i < dst->pos; i += sizeof(elf_greg_t), index++) dst 1682 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, dst 1684 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1694 arch/ia64/kernel/ptrace.c struct regset_getset *dst = arg; dst 1695 arch/ia64/kernel/ptrace.c struct task_struct *task = dst->target; dst 1703 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_FP_OFFSET(2)) { dst 1704 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout_zero(&dst->pos, &dst->count, dst 1705 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, dst 1706 arch/ia64/kernel/ptrace.c &dst->u.get.ubuf, dst 1708 arch/ia64/kernel/ptrace.c if (dst->count == 0 || dst->ret) dst 1713 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_FP_OFFSET(32)) { dst 1714 arch/ia64/kernel/ptrace.c index = (dst->pos - ELF_FP_OFFSET(2)) / sizeof(elf_fpreg_t); dst 1717 arch/ia64/kernel/ptrace.c dst->pos + dst->count); dst 1718 arch/ia64/kernel/ptrace.c for (i = dst->pos; i < min_copy; i += sizeof(elf_fpreg_t), dst 1722 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1725 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout(&dst->pos, &dst->count, dst 1726 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, tmp, dst 1728 arch/ia64/kernel/ptrace.c if (dst->count == 0 || dst->ret) dst 1733 arch/ia64/kernel/ptrace.c if (dst->count > 0) { dst 1734 arch/ia64/kernel/ptrace.c ia64_flush_fph(dst->target); dst 1736 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout( dst 1737 arch/ia64/kernel/ptrace.c &dst->pos, &dst->count, dst 1738 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, dst 1739 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, dst 1743 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyout_zero( dst 1744 arch/ia64/kernel/ptrace.c &dst->pos, &dst->count, dst 1745 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, dst 1752 arch/ia64/kernel/ptrace.c struct regset_getset *dst = arg; dst 1760 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_FP_OFFSET(2)) { dst 1761 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin_ignore(&dst->pos, &dst->count, dst 1762 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, dst 1763 arch/ia64/kernel/ptrace.c &dst->u.set.ubuf, dst 1765 arch/ia64/kernel/ptrace.c if (dst->count == 0 || dst->ret) dst 1770 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_FP_OFFSET(32)) { dst 1771 arch/ia64/kernel/ptrace.c start = dst->pos; dst 1773 arch/ia64/kernel/ptrace.c dst->pos + dst->count); dst 1774 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1775 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, tmp, dst 1777 arch/ia64/kernel/ptrace.c if (dst->ret) dst 1783 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1793 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1804 arch/ia64/kernel/ptrace.c dst->ret = -EIO; dst 1808 arch/ia64/kernel/ptrace.c if (dst->ret || dst->count == 0) dst 1813 arch/ia64/kernel/ptrace.c if (dst->count > 0 && dst->pos < ELF_FP_OFFSET(128)) { dst 1814 arch/ia64/kernel/ptrace.c ia64_sync_fph(dst->target); dst 1815 arch/ia64/kernel/ptrace.c dst->ret = user_regset_copyin(&dst->pos, &dst->count, dst 1816 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, dst 1817 arch/ia64/kernel/ptrace.c &dst->u.set.ubuf, dst 1818 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, dst 974 arch/ia64/kernel/unwind.c desc_reg_gr (unsigned char reg, unsigned char dst, struct unw_state_record *sr) dst 976 arch/ia64/kernel/unwind.c set_reg(sr->curr.reg + reg, UNW_WHERE_GR, sr->region_start + sr->region_len - 1, dst); dst 994 arch/ia64/kernel/unwind.c desc_rp_br (unsigned char dst, struct unw_state_record *sr) dst 996 arch/ia64/kernel/unwind.c sr->return_link_reg = dst; dst 1396 arch/ia64/kernel/unwind.c insn.dst = unw.preg_index[i]; dst 1481 arch/ia64/kernel/unwind.c insn.dst = unw.preg_index[i]; dst 1495 arch/ia64/kernel/unwind.c insn.dst = insn.val = unw.preg_index[UNW_REG_PSP]; dst 1678 arch/ia64/kernel/unwind.c insn.dst = offsetof(struct unw_frame_info, psp)/8; dst 1723 arch/ia64/kernel/unwind.c unsigned long opc, dst, val, off; dst 1735 arch/ia64/kernel/unwind.c dst = next_insn.dst; dst 1742 arch/ia64/kernel/unwind.c s[dst] += val; dst 1748 arch/ia64/kernel/unwind.c s[dst+1] = s[val+1]; dst 1749 arch/ia64/kernel/unwind.c s[dst] = s[val]; dst 1755 arch/ia64/kernel/unwind.c s[dst] = s[val]; dst 1760 arch/ia64/kernel/unwind.c s[dst] = (unsigned long) get_scratch_regs(state) + val; dst 1762 arch/ia64/kernel/unwind.c s[dst] = 0; dst 1764 arch/ia64/kernel/unwind.c __func__, dst, val); dst 1770 arch/ia64/kernel/unwind.c s[dst] = (unsigned long) &unw.r0; dst 1772 arch/ia64/kernel/unwind.c s[dst] = 0; dst 1780 arch/ia64/kernel/unwind.c s[dst] = (unsigned long) ia64_rse_skip_regs((unsigned long *)state->bsp, dst 1785 arch/ia64/kernel/unwind.c s[dst] = state->psp + val; dst 1789 arch/ia64/kernel/unwind.c s[dst] = state->sp + val; dst 1796 arch/ia64/kernel/unwind.c s[dst+1] = ((unsigned long) state->pri_unat_loc - s[dst]) | UNW_NAT_MEMSTK; dst 1800 arch/ia64/kernel/unwind.c s[dst+1] = val; dst 1813 arch/ia64/kernel/unwind.c s[dst] = *(unsigned long *) s[val]; dst 218 arch/ia64/kernel/unwind_decoder.c unsigned char byte1 = *dp++, r, dst; dst 221 arch/ia64/kernel/unwind_decoder.c dst = (byte1 & 0x7f); dst 224 arch/ia64/kernel/unwind_decoder.c case 0: UNW_DEC_REG_GR(P3, UNW_REG_PSP, dst, arg); break; dst 225 arch/ia64/kernel/unwind_decoder.c case 1: UNW_DEC_REG_GR(P3, UNW_REG_RP, dst, arg); break; dst 226 arch/ia64/kernel/unwind_decoder.c case 2: UNW_DEC_REG_GR(P3, UNW_REG_PFS, dst, arg); break; dst 227 arch/ia64/kernel/unwind_decoder.c case 3: UNW_DEC_REG_GR(P3, UNW_REG_PR, dst, arg); break; dst 228 arch/ia64/kernel/unwind_decoder.c case 4: UNW_DEC_REG_GR(P3, UNW_REG_UNAT, dst, arg); break; dst 229 arch/ia64/kernel/unwind_decoder.c case 5: UNW_DEC_REG_GR(P3, UNW_REG_LC, dst, arg); break; dst 230 arch/ia64/kernel/unwind_decoder.c case 6: UNW_DEC_RP_BR(P3, dst, arg); break; dst 231 arch/ia64/kernel/unwind_decoder.c case 7: UNW_DEC_REG_GR(P3, UNW_REG_RNAT, dst, arg); break; dst 232 arch/ia64/kernel/unwind_decoder.c case 8: UNW_DEC_REG_GR(P3, UNW_REG_BSP, dst, arg); break; dst 233 arch/ia64/kernel/unwind_decoder.c case 9: UNW_DEC_REG_GR(P3, UNW_REG_BSPSTORE, dst, arg); break; dst 234 arch/ia64/kernel/unwind_decoder.c case 10: UNW_DEC_REG_GR(P3, UNW_REG_FPSR, dst, arg); break; dst 235 arch/ia64/kernel/unwind_decoder.c case 11: UNW_DEC_PRIUNAT_GR(P3, dst, arg); break; dst 142 arch/ia64/kernel/unwind_i.h unsigned int dst : 9; dst 109 arch/ia64/lib/csum_partial_copy.c csum_partial_copy_from_user(const void __user *src, void *dst, dst 120 arch/ia64/lib/csum_partial_copy.c if (__copy_from_user(dst, src, len) != 0 && errp) dst 123 arch/ia64/lib/csum_partial_copy.c result = do_csum(dst, len); dst 135 arch/ia64/lib/csum_partial_copy.c csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 138 arch/ia64/lib/csum_partial_copy.c dst, len, sum, NULL); dst 13 arch/ia64/lib/io.c char *dst = to; dst 17 arch/ia64/lib/io.c *dst++ = readb(from++); dst 41 arch/ia64/lib/io.c void memset_io(volatile void __iomem *dst, int c, long count) dst 47 arch/ia64/lib/io.c writeb(ch, dst); dst 48 arch/ia64/lib/io.c dst++; dst 372 arch/ia64/mm/discontig.c pg_data_t **dst; dst 385 arch/ia64/mm/discontig.c dst = LOCAL_DATA_ADDR(pgdat_list[node])->pg_data_ptrs; dst 386 arch/ia64/mm/discontig.c memcpy(dst, pgdat_list, sizeof(pgdat_list)); dst 263 arch/m68k/include/asm/cacheflush_mm.h void *dst, void *src, int len) dst 266 arch/m68k/include/asm/cacheflush_mm.h memcpy(dst, src, len); dst 271 arch/m68k/include/asm/cacheflush_mm.h void *dst, void *src, int len) dst 274 arch/m68k/include/asm/cacheflush_mm.h memcpy(dst, src, len); dst 27 arch/m68k/include/asm/cacheflush_no.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 28 arch/m68k/include/asm/cacheflush_no.h memcpy(dst, src, len) dst 29 arch/m68k/include/asm/cacheflush_no.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 30 arch/m68k/include/asm/cacheflush_no.h memcpy(dst, src, len) dst 34 arch/m68k/include/asm/checksum.h void *dst, dst 39 arch/m68k/include/asm/checksum.h void *dst, int len, dst 48 arch/m68k/include/asm/kmap.h static inline void memcpy_fromio(void *dst, const volatile void __iomem *src, dst 51 arch/m68k/include/asm/kmap.h __builtin_memcpy(dst, (void __force *) src, count); dst 55 arch/m68k/include/asm/kmap.h static inline void memcpy_toio(volatile void __iomem *dst, const void *src, dst 58 arch/m68k/include/asm/kmap.h __builtin_memcpy((void __force *) dst, src, count); dst 383 arch/m68k/include/asm/uaccess_mm.h extern long strncpy_from_user(char *dst, const char __user *src, long count); dst 126 arch/m68k/include/asm/uaccess_no.h strncpy_from_user(char *dst, const char *src, long count) dst 129 arch/m68k/include/asm/uaccess_no.h strncpy(dst, src, count); dst 130 arch/m68k/include/asm/uaccess_no.h for (tmp = dst; *tmp && count > 0; tmp++, count--) dst 132 arch/m68k/include/asm/uaccess_no.h return(tmp - dst); /* DAVIDM should we count a NUL ? check getname */ dst 132 arch/m68k/lib/checksum.c csum_partial_copy_from_user(const void __user *src, void *dst, dst 309 arch/m68k/lib/checksum.c : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst), dst 311 arch/m68k/lib/checksum.c : "0" (sum), "1" (len), "2" (src), "3" (dst) dst 327 arch/m68k/lib/checksum.c csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 415 arch/m68k/lib/checksum.c : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst), dst 417 arch/m68k/lib/checksum.c : "0" (sum), "1" (len), "2" (src), "3" (dst) dst 107 arch/microblaze/include/asm/cacheflush.h void *dst, void *src, int len) dst 109 arch/microblaze/include/asm/cacheflush.h u32 addr = virt_to_phys(dst); dst 110 arch/microblaze/include/asm/cacheflush.h memcpy(dst, src, len); dst 119 arch/microblaze/include/asm/cacheflush.h void *dst, void *src, int len) dst 121 arch/microblaze/include/asm/cacheflush.h memcpy(dst, src, len); dst 333 arch/microblaze/include/asm/uaccess.h strncpy_from_user(char *dst, const char __user *src, long count) dst 337 arch/microblaze/include/asm/uaccess.h return __strncpy_user(dst, src, count); dst 98 arch/microblaze/kernel/setup.c unsigned long *src, *dst; dst 173 arch/microblaze/kernel/setup.c dst = (unsigned long *) (offset * sizeof(u32)); dst 174 arch/microblaze/kernel/setup.c for (src = __ivt_start + offset; src < __ivt_end; src++, dst++) dst 175 arch/microblaze/kernel/setup.c *dst = *src; dst 39 arch/microblaze/lib/memcpy.c char *dst = v_dst; dst 43 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 51 arch/microblaze/lib/memcpy.c char *dst = v_dst; dst 67 arch/microblaze/lib/memcpy.c switch ((unsigned long)dst & 3) { dst 69 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 72 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 75 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 79 arch/microblaze/lib/memcpy.c i_dst = (void *)dst; dst 171 arch/microblaze/lib/memcpy.c dst = (void *)i_dst; dst 178 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 180 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 182 arch/microblaze/lib/memcpy.c *dst++ = *src++; dst 38 arch/microblaze/lib/memmove.c char *dst = v_dst; dst 49 arch/microblaze/lib/memmove.c dst += c; dst 53 arch/microblaze/lib/memmove.c *--dst = *--src; dst 61 arch/microblaze/lib/memmove.c char *dst = v_dst; dst 80 arch/microblaze/lib/memmove.c dst += c; dst 89 arch/microblaze/lib/memmove.c switch ((unsigned long)dst & 3) { dst 91 arch/microblaze/lib/memmove.c *--dst = *--src; dst 94 arch/microblaze/lib/memmove.c *--dst = *--src; dst 97 arch/microblaze/lib/memmove.c *--dst = *--src; dst 101 arch/microblaze/lib/memmove.c i_dst = (void *)dst; dst 196 arch/microblaze/lib/memmove.c dst = (void *)i_dst; dst 203 arch/microblaze/lib/memmove.c *--dst = *--src; dst 205 arch/microblaze/lib/memmove.c *--dst = *--src; dst 207 arch/microblaze/lib/memmove.c *--dst = *--src; dst 209 arch/microblaze/lib/memmove.c *--dst = *--src; dst 150 arch/mips/cavium-octeon/crypto/octeon-sha1.c __be32 *dst = (__be32 *)out; dst 177 arch/mips/cavium-octeon/crypto/octeon-sha1.c dst[i] = cpu_to_be32(sctx->state[i]); dst 162 arch/mips/cavium-octeon/crypto/octeon-sha256.c __be32 *dst = (__be32 *)out; dst 189 arch/mips/cavium-octeon/crypto/octeon-sha256.c dst[i] = cpu_to_be32(sctx->state[i]); dst 176 arch/mips/cavium-octeon/crypto/octeon-sha512.c __be64 *dst = (__be64 *)hash; dst 204 arch/mips/cavium-octeon/crypto/octeon-sha512.c dst[i] = cpu_to_be64(sctx->state[i]); dst 105 arch/mips/include/asm/cacheflush.h struct page *page, unsigned long vaddr, void *dst, const void *src, dst 109 arch/mips/include/asm/cacheflush.h struct page *page, unsigned long vaddr, void *dst, const void *src, dst 37 arch/mips/include/asm/checksum.h __wsum __csum_partial_copy_kernel(const void *src, void *dst, dst 40 arch/mips/include/asm/checksum.h __wsum __csum_partial_copy_from_user(const void *src, void *dst, dst 42 arch/mips/include/asm/checksum.h __wsum __csum_partial_copy_to_user(const void *src, void *dst, dst 49 arch/mips/include/asm/checksum.h __wsum csum_partial_copy_from_user(const void __user *src, void *dst, int len, dst 54 arch/mips/include/asm/checksum.h return __csum_partial_copy_kernel((__force void *)src, dst, dst 57 arch/mips/include/asm/checksum.h return __csum_partial_copy_from_user((__force void *)src, dst, dst 63 arch/mips/include/asm/checksum.h __wsum csum_and_copy_from_user(const void __user *src, void *dst, dst 67 arch/mips/include/asm/checksum.h return csum_partial_copy_from_user(src, dst, len, sum, dst 80 arch/mips/include/asm/checksum.h __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len, dst 84 arch/mips/include/asm/checksum.h if (access_ok(dst, len)) { dst 87 arch/mips/include/asm/checksum.h (__force void *)dst, dst 91 arch/mips/include/asm/checksum.h (__force void *)dst, dst 104 arch/mips/include/asm/checksum.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 22 arch/mips/include/asm/ftrace.h #define safe_load(load, src, dst, error) \ dst 38 arch/mips/include/asm/ftrace.h : [tmp_dst] "=&r" (dst), [tmp_err] "=r" (error)\ dst 44 arch/mips/include/asm/ftrace.h #define safe_store(store, src, dst, error) \ dst 61 arch/mips/include/asm/ftrace.h : [tmp_dst] "r" (dst), [tmp_src] "r" (src)\ dst 66 arch/mips/include/asm/ftrace.h #define safe_load_code(dst, src, error) \ dst 67 arch/mips/include/asm/ftrace.h safe_load(STR(lw), src, dst, error) dst 68 arch/mips/include/asm/ftrace.h #define safe_store_code(src, dst, error) \ dst 69 arch/mips/include/asm/ftrace.h safe_store(STR(sw), src, dst, error) dst 71 arch/mips/include/asm/ftrace.h #define safe_load_stack(dst, src, error) \ dst 72 arch/mips/include/asm/ftrace.h safe_load(STR(PTR_L), src, dst, error) dst 74 arch/mips/include/asm/ftrace.h #define safe_store_stack(src, dst, error) \ dst 75 arch/mips/include/asm/ftrace.h safe_store(STR(PTR_S), src, dst, error) dst 584 arch/mips/include/asm/io.h static inline void memcpy_fromio(void *dst, const volatile void __iomem *src, int count) dst 586 arch/mips/include/asm/io.h memcpy(dst, (void __force *) src, count); dst 588 arch/mips/include/asm/io.h static inline void memcpy_toio(volatile void __iomem *dst, const void *src, int count) dst 590 arch/mips/include/asm/io.h memcpy((void __force *) dst, src, count); dst 340 arch/mips/include/asm/mach-loongson64/loongson.h #define LOONGSON_ADDRWIN_CFG(s, d, w, src, dst, size) do {\ dst 342 arch/mips/include/asm/mach-loongson64/loongson.h s##_WIN##w##_MMAP = (dst) | ADDRWIN_MAP_DST_##d; \ dst 346 arch/mips/include/asm/mach-loongson64/loongson.h #define LOONGSON_ADDRWIN_CPUTOPCI(win, src, dst, size) \ dst 347 arch/mips/include/asm/mach-loongson64/loongson.h LOONGSON_ADDRWIN_CFG(CPU, PCI, win, src, dst, size) dst 348 arch/mips/include/asm/mach-loongson64/loongson.h #define LOONGSON_ADDRWIN_CPUTODDR(win, src, dst, size) \ dst 349 arch/mips/include/asm/mach-loongson64/loongson.h LOONGSON_ADDRWIN_CFG(CPU, DDR, win, src, dst, size) dst 350 arch/mips/include/asm/mach-loongson64/loongson.h #define LOONGSON_ADDRWIN_PCITODDR(win, src, dst, size) \ dst 351 arch/mips/include/asm/mach-loongson64/loongson.h LOONGSON_ADDRWIN_CFG(PCIDMA, DDR, win, src, dst, size) dst 402 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:9; dst 406 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:9; dst 421 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:9; dst 425 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:9; dst 597 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:8; dst 601 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:8; dst 616 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:8; dst 620 arch/mips/include/asm/octeon/cvmx-iob-defs.h uint64_t dst:8; dst 2536 arch/mips/include/asm/octeon/cvmx-pip-defs.h uint64_t dst:16; dst 2540 arch/mips/include/asm/octeon/cvmx-pip-defs.h uint64_t dst:16; dst 150 arch/mips/include/asm/txx9/tx4939.h __u32 dst; dst 94 arch/mips/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 114 arch/mips/kernel/process.c *dst = *src; dst 457 arch/mips/kernel/smp-bmips.c static void bmips_wr_vec(unsigned long dst, char *start, char *end) dst 459 arch/mips/kernel/smp-bmips.c memcpy((void *)dst, start, end - start); dst 460 arch/mips/kernel/smp-bmips.c dma_cache_wback(dst, end - start); dst 461 arch/mips/kernel/smp-bmips.c local_flush_icache_range(dst, dst + (end - start)); dst 19 arch/mips/lib/iomap_copy.c u64 *dst = to; dst 24 arch/mips/lib/iomap_copy.c *dst++ = __raw_readq(src++); dst 196 arch/mips/mm/init.c struct page *page, unsigned long vaddr, void *dst, const void *src, dst 205 arch/mips/mm/init.c memcpy(dst, src, len); dst 214 arch/mips/mm/init.c struct page *page, unsigned long vaddr, void *dst, const void *src, dst 220 arch/mips/mm/init.c memcpy(dst, vfrom, len); dst 223 arch/mips/mm/init.c memcpy(dst, src, len); dst 426 arch/mips/net/ebpf_jit.c int dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 428 arch/mips/net/ebpf_jit.c if (dst < 0) dst 429 arch/mips/net/ebpf_jit.c return dst; dst 465 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 471 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 477 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, insn->imm); dst 481 arch/mips/net/ebpf_jit.c emit_instr(ctx, andi, dst, dst, insn->imm); dst 485 arch/mips/net/ebpf_jit.c emit_instr(ctx, ori, dst, dst, insn->imm); dst 489 arch/mips/net/ebpf_jit.c emit_instr(ctx, xori, dst, dst, insn->imm); dst 492 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, dst, insn->imm); dst 495 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, dst, -insn->imm); dst 498 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsrl_safe, dst, dst, insn->imm & 0x3f); dst 501 arch/mips/net/ebpf_jit.c emit_instr(ctx, srl, dst, dst, insn->imm & 0x1f); dst 504 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsll_safe, dst, dst, insn->imm & 0x3f); dst 507 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, insn->imm & 0x1f); dst 510 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsra_safe, dst, dst, insn->imm & 0x3f); dst 513 arch/mips/net/ebpf_jit.c emit_instr(ctx, sra, dst, dst, insn->imm & 0x1f); dst 516 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, MIPS_R_ZERO, insn->imm); dst 519 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, dst, insn->imm); dst 522 arch/mips/net/ebpf_jit.c emit_instr(ctx, addiu, dst, dst, -insn->imm); dst 530 arch/mips/net/ebpf_jit.c gen_imm_to_reg(insn, dst, ctx); dst 536 arch/mips/net/ebpf_jit.c emit_instr(ctx, and, dst, dst, MIPS_R_AT); dst 540 arch/mips/net/ebpf_jit.c emit_instr(ctx, or, dst, dst, MIPS_R_AT); dst 544 arch/mips/net/ebpf_jit.c emit_instr(ctx, xor, dst, dst, MIPS_R_AT); dst 547 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddu, dst, dst, MIPS_R_AT); dst 550 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsubu, dst, dst, MIPS_R_AT); dst 553 arch/mips/net/ebpf_jit.c emit_instr(ctx, addu, dst, dst, MIPS_R_AT); dst 556 arch/mips/net/ebpf_jit.c emit_instr(ctx, subu, dst, dst, MIPS_R_AT); dst 567 arch/mips/net/ebpf_jit.c static void emit_const_to_reg(struct jit_ctx *ctx, int dst, u64 value) dst 570 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, (int)value); dst 573 arch/mips/net/ebpf_jit.c emit_instr(ctx, lui, dst, (s32)(s16)(value >> 16)); dst 574 arch/mips/net/ebpf_jit.c emit_instr(ctx, ori, dst, dst, (unsigned int)(value & 0xffff)); dst 584 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsll_safe, dst, dst, needed_shift); dst 589 arch/mips/net/ebpf_jit.c emit_instr(ctx, lui, dst, (s32)(s16)part); dst 592 arch/mips/net/ebpf_jit.c emit_instr(ctx, ori, dst, dst 593 arch/mips/net/ebpf_jit.c seen_part ? dst : MIPS_R_ZERO, dst 662 arch/mips/net/ebpf_jit.c int src, dst, r, td, ts, mem_off, b_off; dst 697 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 698 arch/mips/net/ebpf_jit.c if (dst < 0) dst 699 arch/mips/net/ebpf_jit.c return dst; dst 701 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 706 arch/mips/net/ebpf_jit.c emit_instr(ctx, dmulu, dst, dst, MIPS_R_AT); dst 708 arch/mips/net/ebpf_jit.c emit_instr(ctx, dmultu, MIPS_R_AT, dst); dst 709 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 713 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 714 arch/mips/net/ebpf_jit.c if (dst < 0) dst 715 arch/mips/net/ebpf_jit.c return dst; dst 717 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 718 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsubu, dst, MIPS_R_ZERO, dst); dst 721 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 722 arch/mips/net/ebpf_jit.c if (dst < 0) dst 723 arch/mips/net/ebpf_jit.c return dst; dst 727 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 733 arch/mips/net/ebpf_jit.c emit_instr(ctx, mulu, dst, dst, MIPS_R_AT); dst 735 arch/mips/net/ebpf_jit.c emit_instr(ctx, multu, dst, MIPS_R_AT); dst 736 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 740 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 741 arch/mips/net/ebpf_jit.c if (dst < 0) dst 742 arch/mips/net/ebpf_jit.c return dst; dst 746 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 748 arch/mips/net/ebpf_jit.c emit_instr(ctx, subu, dst, MIPS_R_ZERO, dst); dst 754 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 755 arch/mips/net/ebpf_jit.c if (dst < 0) dst 756 arch/mips/net/ebpf_jit.c return dst; dst 760 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 764 arch/mips/net/ebpf_jit.c emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO); dst 770 arch/mips/net/ebpf_jit.c emit_instr(ctx, divu_r6, dst, dst, MIPS_R_AT); dst 772 arch/mips/net/ebpf_jit.c emit_instr(ctx, modu, dst, dst, MIPS_R_AT); dst 775 arch/mips/net/ebpf_jit.c emit_instr(ctx, divu, dst, MIPS_R_AT); dst 777 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 779 arch/mips/net/ebpf_jit.c emit_instr(ctx, mfhi, dst); dst 785 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 786 arch/mips/net/ebpf_jit.c if (dst < 0) dst 787 arch/mips/net/ebpf_jit.c return dst; dst 789 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 793 arch/mips/net/ebpf_jit.c emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO); dst 799 arch/mips/net/ebpf_jit.c emit_instr(ctx, ddivu_r6, dst, dst, MIPS_R_AT); dst 801 arch/mips/net/ebpf_jit.c emit_instr(ctx, modu, dst, dst, MIPS_R_AT); dst 804 arch/mips/net/ebpf_jit.c emit_instr(ctx, ddivu, dst, MIPS_R_AT); dst 806 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 808 arch/mips/net/ebpf_jit.c emit_instr(ctx, mfhi, dst); dst 823 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 824 arch/mips/net/ebpf_jit.c if (src < 0 || dst < 0) dst 827 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 831 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddiu, dst, MIPS_R_SP, MAX_BPF_STACK); dst 841 arch/mips/net/ebpf_jit.c tmp_reg = dst; dst 851 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddu, dst, src, MIPS_R_ZERO); dst 854 arch/mips/net/ebpf_jit.c emit_instr(ctx, daddu, dst, dst, src); dst 857 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsubu, dst, dst, src); dst 860 arch/mips/net/ebpf_jit.c emit_instr(ctx, xor, dst, dst, src); dst 863 arch/mips/net/ebpf_jit.c emit_instr(ctx, or, dst, dst, src); dst 866 arch/mips/net/ebpf_jit.c emit_instr(ctx, and, dst, dst, src); dst 870 arch/mips/net/ebpf_jit.c emit_instr(ctx, dmulu, dst, dst, src); dst 872 arch/mips/net/ebpf_jit.c emit_instr(ctx, dmultu, dst, src); dst 873 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 881 arch/mips/net/ebpf_jit.c dst, dst, src); dst 883 arch/mips/net/ebpf_jit.c emit_instr(ctx, modu, dst, dst, src); dst 886 arch/mips/net/ebpf_jit.c emit_instr(ctx, ddivu, dst, src); dst 888 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 890 arch/mips/net/ebpf_jit.c emit_instr(ctx, mfhi, dst); dst 893 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsllv, dst, dst, src); dst 896 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsrlv, dst, dst, src); dst 899 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsrav, dst, dst, src); dst 919 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 920 arch/mips/net/ebpf_jit.c if (src < 0 || dst < 0) dst 925 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 933 arch/mips/net/ebpf_jit.c tmp_reg = dst; dst 943 arch/mips/net/ebpf_jit.c emit_instr(ctx, addu, dst, src, MIPS_R_ZERO); dst 946 arch/mips/net/ebpf_jit.c emit_instr(ctx, addu, dst, dst, src); dst 949 arch/mips/net/ebpf_jit.c emit_instr(ctx, subu, dst, dst, src); dst 952 arch/mips/net/ebpf_jit.c emit_instr(ctx, xor, dst, dst, src); dst 955 arch/mips/net/ebpf_jit.c emit_instr(ctx, or, dst, dst, src); dst 958 arch/mips/net/ebpf_jit.c emit_instr(ctx, and, dst, dst, src); dst 961 arch/mips/net/ebpf_jit.c emit_instr(ctx, mul, dst, dst, src); dst 967 arch/mips/net/ebpf_jit.c emit_instr(ctx, divu_r6, dst, dst, src); dst 969 arch/mips/net/ebpf_jit.c emit_instr(ctx, modu, dst, dst, src); dst 972 arch/mips/net/ebpf_jit.c emit_instr(ctx, divu, dst, src); dst 974 arch/mips/net/ebpf_jit.c emit_instr(ctx, mflo, dst); dst 976 arch/mips/net/ebpf_jit.c emit_instr(ctx, mfhi, dst); dst 979 arch/mips/net/ebpf_jit.c emit_instr(ctx, sllv, dst, dst, src); dst 982 arch/mips/net/ebpf_jit.c emit_instr(ctx, srlv, dst, dst, src); dst 985 arch/mips/net/ebpf_jit.c emit_instr(ctx, srav, dst, dst, src); dst 1004 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok); dst 1005 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1006 arch/mips/net/ebpf_jit.c return dst; dst 1026 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1027 arch/mips/net/ebpf_jit.c if (src < 0 || dst < 0) dst 1035 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, MIPS_R_AT, dst, 0); dst 1036 arch/mips/net/ebpf_jit.c dst = MIPS_R_AT; dst 1039 arch/mips/net/ebpf_jit.c emit_instr(ctx, and, MIPS_R_AT, dst, src); dst 1041 arch/mips/net/ebpf_jit.c dst = MIPS_R_AT; dst 1044 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsubu, MIPS_R_AT, dst, src); dst 1066 arch/mips/net/ebpf_jit.c emit_instr(ctx, slt, MIPS_R_AT, dst, src); dst 1068 arch/mips/net/ebpf_jit.c dst = MIPS_R_AT; dst 1072 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsubu, MIPS_R_T8, dst, src); dst 1073 arch/mips/net/ebpf_jit.c emit_instr(ctx, sltu, MIPS_R_AT, dst, src); dst 1086 arch/mips/net/ebpf_jit.c dst = MIPS_R_AT; dst 1089 arch/mips/net/ebpf_jit.c emit_instr(ctx, sltu, MIPS_R_AT, dst, src); dst 1091 arch/mips/net/ebpf_jit.c dst = MIPS_R_AT; dst 1118 arch/mips/net/ebpf_jit.c emit_instr(ctx, bne, dst, src, b_off); dst 1120 arch/mips/net/ebpf_jit.c emit_instr(ctx, beq, dst, src, b_off); dst 1142 arch/mips/net/ebpf_jit.c emit_instr(ctx, beq, dst, src, b_off); dst 1144 arch/mips/net/ebpf_jit.c emit_instr(ctx, bne, dst, src, b_off); dst 1156 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok); dst 1157 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1158 arch/mips/net/ebpf_jit.c return dst; dst 1167 arch/mips/net/ebpf_jit.c emit_instr(ctx, blez, dst, b_off); dst 1170 arch/mips/net/ebpf_jit.c emit_instr(ctx, bltz, dst, b_off); dst 1173 arch/mips/net/ebpf_jit.c emit_instr(ctx, bgez, dst, b_off); dst 1176 arch/mips/net/ebpf_jit.c emit_instr(ctx, bgtz, dst, b_off); dst 1187 arch/mips/net/ebpf_jit.c emit_instr(ctx, bgtz, dst, b_off); dst 1190 arch/mips/net/ebpf_jit.c emit_instr(ctx, bgez, dst, b_off); dst 1193 arch/mips/net/ebpf_jit.c emit_instr(ctx, bltz, dst, b_off); dst 1196 arch/mips/net/ebpf_jit.c emit_instr(ctx, blez, dst, b_off); dst 1215 arch/mips/net/ebpf_jit.c emit_instr(ctx, slti, MIPS_R_AT, dst, (int)t64s); dst 1217 arch/mips/net/ebpf_jit.c dst = MIPS_R_ZERO; dst 1221 arch/mips/net/ebpf_jit.c emit_instr(ctx, slt, MIPS_R_AT, dst, MIPS_R_AT); dst 1223 arch/mips/net/ebpf_jit.c dst = MIPS_R_ZERO; dst 1231 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok); dst 1232 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1233 arch/mips/net/ebpf_jit.c return dst; dst 1248 arch/mips/net/ebpf_jit.c emit_instr(ctx, sltu, MIPS_R_AT, dst, MIPS_R_AT); dst 1250 arch/mips/net/ebpf_jit.c dst = MIPS_R_ZERO; dst 1254 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok); dst 1255 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1256 arch/mips/net/ebpf_jit.c return dst; dst 1263 arch/mips/net/ebpf_jit.c emit_instr(ctx, bbit0, dst, ffs((u32)insn->imm) - 1, b_off); dst 1270 arch/mips/net/ebpf_jit.c emit_instr(ctx, bbit1, dst, ffs((u32)insn->imm) - 1, b_off); dst 1276 arch/mips/net/ebpf_jit.c emit_instr(ctx, and, MIPS_R_AT, dst, MIPS_R_AT); dst 1278 arch/mips/net/ebpf_jit.c dst = MIPS_R_ZERO; dst 1301 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1302 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1303 arch/mips/net/ebpf_jit.c return dst; dst 1305 arch/mips/net/ebpf_jit.c emit_const_to_reg(ctx, dst, t64); dst 1324 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1325 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1326 arch/mips/net/ebpf_jit.c return dst; dst 1329 arch/mips/net/ebpf_jit.c emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32); dst 1333 arch/mips/net/ebpf_jit.c emit_instr(ctx, sll, dst, dst, 0); dst 1343 arch/mips/net/ebpf_jit.c emit_instr(ctx, wsbh, dst, dst); dst 1344 arch/mips/net/ebpf_jit.c emit_instr(ctx, andi, dst, dst, 0xffff); dst 1347 arch/mips/net/ebpf_jit.c emit_instr(ctx, wsbh, dst, dst); dst 1348 arch/mips/net/ebpf_jit.c emit_instr(ctx, rotr, dst, dst, 16); dst 1352 arch/mips/net/ebpf_jit.c emit_instr(ctx, dsbh, dst, dst); dst 1353 arch/mips/net/ebpf_jit.c emit_instr(ctx, dshd, dst, dst); dst 1364 arch/mips/net/ebpf_jit.c dst = MIPS_R_SP; dst 1367 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1368 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1369 arch/mips/net/ebpf_jit.c return dst; dst 1375 arch/mips/net/ebpf_jit.c emit_instr(ctx, sb, MIPS_R_AT, mem_off, dst); dst 1378 arch/mips/net/ebpf_jit.c emit_instr(ctx, sh, MIPS_R_AT, mem_off, dst); dst 1381 arch/mips/net/ebpf_jit.c emit_instr(ctx, sw, MIPS_R_AT, mem_off, dst); dst 1384 arch/mips/net/ebpf_jit.c emit_instr(ctx, sd, MIPS_R_AT, mem_off, dst); dst 1403 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1404 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1405 arch/mips/net/ebpf_jit.c return dst; dst 1408 arch/mips/net/ebpf_jit.c emit_instr(ctx, lbu, dst, mem_off, src); dst 1411 arch/mips/net/ebpf_jit.c emit_instr(ctx, lhu, dst, mem_off, src); dst 1414 arch/mips/net/ebpf_jit.c emit_instr(ctx, lw, dst, mem_off, src); dst 1417 arch/mips/net/ebpf_jit.c emit_instr(ctx, ld, dst, mem_off, src); dst 1430 arch/mips/net/ebpf_jit.c dst = MIPS_R_SP; dst 1433 arch/mips/net/ebpf_jit.c dst = ebpf_to_mips_reg(ctx, insn, dst_reg); dst 1434 arch/mips/net/ebpf_jit.c if (dst < 0) dst 1435 arch/mips/net/ebpf_jit.c return dst; dst 1449 arch/mips/net/ebpf_jit.c dst, mem_off); dst 1451 arch/mips/net/ebpf_jit.c dst = MIPS_R_T6; dst 1459 arch/mips/net/ebpf_jit.c emit_instr(ctx, ll, MIPS_R_T8, mem_off, dst); dst 1461 arch/mips/net/ebpf_jit.c emit_instr(ctx, sc, MIPS_R_T8, mem_off, dst); dst 1475 arch/mips/net/ebpf_jit.c emit_instr(ctx, lld, MIPS_R_T8, mem_off, dst); dst 1477 arch/mips/net/ebpf_jit.c emit_instr(ctx, scd, MIPS_R_T8, mem_off, dst); dst 1485 arch/mips/net/ebpf_jit.c emit_instr(ctx, sb, src, mem_off, dst); dst 1488 arch/mips/net/ebpf_jit.c emit_instr(ctx, sh, src, mem_off, dst); dst 1491 arch/mips/net/ebpf_jit.c emit_instr(ctx, sw, src, mem_off, dst); dst 1499 arch/mips/net/ebpf_jit.c emit_instr(ctx, sd, src, mem_off, dst); dst 72 arch/mips/pic32/pic32mzda/init.c char *dst = &(arcs_cmdline[0]); dst 78 arch/mips/pic32/pic32mzda/init.c *dst++ = *src++; dst 81 arch/mips/pic32/pic32mzda/init.c *dst++ = ' '; dst 84 arch/mips/pic32/pic32mzda/init.c --dst; dst 86 arch/mips/pic32/pic32mzda/init.c *dst = 0; dst 31 arch/nds32/include/asm/cacheflush.h unsigned long vaddr, void *dst, void *src, int len); dst 33 arch/nds32/include/asm/cacheflush.h unsigned long vaddr, void *dst, void *src, int len); dst 267 arch/nds32/mm/cacheflush.c unsigned long vaddr, void *dst, void *src, int len) dst 273 arch/nds32/mm/cacheflush.c dst = (void *)(vto | (vaddr & (PAGE_SIZE - 1))); dst 274 arch/nds32/mm/cacheflush.c memcpy(dst, src, len); dst 277 arch/nds32/mm/cacheflush.c start = (unsigned long)dst & ~(line_size - 1); dst 279 arch/nds32/mm/cacheflush.c ((unsigned long)dst + len + line_size - 1) & ~(line_size - dst 288 arch/nds32/mm/cacheflush.c unsigned long vaddr, void *dst, void *src, int len) dst 295 arch/nds32/mm/cacheflush.c memcpy(dst, src, len); dst 41 arch/nios2/include/asm/cacheflush.h void *dst, void *src, int len); dst 44 arch/nios2/include/asm/cacheflush.h void *dst, void *src, int len); dst 15 arch/nios2/include/asm/checksum.h extern __wsum csum_partial_copy(const void *src, void *dst, int len, dst 17 arch/nios2/include/asm/checksum.h extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 19 arch/nios2/include/asm/checksum.h #define csum_partial_copy_nocheck(src, dst, len, sum) \ dst 20 arch/nios2/include/asm/checksum.h csum_partial_copy((src), (dst), (len), (sum)) dst 15 arch/nios2/lib/memmove.c unsigned long dst, src; dst 21 arch/nios2/lib/memmove.c dst = (unsigned long) d; dst 24 arch/nios2/lib/memmove.c if ((count < 8) || ((dst ^ src) & 3)) dst 27 arch/nios2/lib/memmove.c if (dst & 1) { dst 28 arch/nios2/lib/memmove.c *(char *)dst++ = *(char *)src++; dst 31 arch/nios2/lib/memmove.c if (dst & 2) { dst 32 arch/nios2/lib/memmove.c *(short *)dst = *(short *)src; dst 34 arch/nios2/lib/memmove.c dst += 2; dst 38 arch/nios2/lib/memmove.c *(long *)dst = *(long *)src; dst 40 arch/nios2/lib/memmove.c dst += 4; dst 45 arch/nios2/lib/memmove.c *(char *)dst++ = *(char *)src++; dst 47 arch/nios2/lib/memmove.c dst = (unsigned long) d + count; dst 50 arch/nios2/lib/memmove.c if ((count < 8) || ((dst ^ src) & 3)) dst 53 arch/nios2/lib/memmove.c if (dst & 1) { dst 55 arch/nios2/lib/memmove.c dst--; dst 57 arch/nios2/lib/memmove.c *(char *)dst = *(char *)src; dst 59 arch/nios2/lib/memmove.c if (dst & 2) { dst 61 arch/nios2/lib/memmove.c dst -= 2; dst 63 arch/nios2/lib/memmove.c *(short *)dst = *(short *)src; dst 67 arch/nios2/lib/memmove.c dst -= 4; dst 69 arch/nios2/lib/memmove.c *(long *)dst = *(long *)src; dst 74 arch/nios2/lib/memmove.c dst--; dst 75 arch/nios2/lib/memmove.c *(char *)dst = *(char *)src; dst 254 arch/nios2/mm/cacheflush.c void *dst, void *src, int len) dst 257 arch/nios2/mm/cacheflush.c memcpy(dst, src, len); dst 265 arch/nios2/mm/cacheflush.c void *dst, void *src, int len) dst 268 arch/nios2/mm/cacheflush.c memcpy(dst, src, len); dst 269 arch/nios2/mm/cacheflush.c __flush_dcache((unsigned long)dst, (unsigned long)dst + len); dst 271 arch/nios2/mm/cacheflush.c __flush_icache((unsigned long)dst, (unsigned long)dst + len); dst 82 arch/openrisc/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 84 arch/openrisc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 89 arch/openrisc/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 90 arch/openrisc/include/asm/cacheflush.h memcpy(dst, src, len) dst 71 arch/parisc/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 74 arch/parisc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 75 arch/parisc/include/asm/cacheflush.h flush_kernel_dcache_range_asm((unsigned long)dst, (unsigned long)dst + len); \ dst 78 arch/parisc/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 81 arch/parisc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 34 arch/parisc/include/asm/checksum.h void *dst, int len, __wsum sum, int *errp); dst 193 arch/parisc/include/asm/checksum.h void __user *dst, dst 200 arch/parisc/include/asm/checksum.h if (copy_to_user(dst, src, len)) { dst 277 arch/parisc/include/asm/elf.h #define ELF_CORE_COPY_REGS(dst, pt) \ dst 278 arch/parisc/include/asm/elf.h memset(dst, 0, sizeof(dst)); /* don't leak any "random" bits */ \ dst 280 arch/parisc/include/asm/elf.h for (i = 0; i < 32; i++) dst[i] = pt->gr[i]; \ dst 281 arch/parisc/include/asm/elf.h for (i = 0; i < 8; i++) dst[32 + i] = pt->sr[i]; \ dst 283 arch/parisc/include/asm/elf.h dst[40] = pt->iaoq[0]; dst[41] = pt->iaoq[1]; \ dst 284 arch/parisc/include/asm/elf.h dst[42] = pt->iasq[0]; dst[43] = pt->iasq[1]; \ dst 285 arch/parisc/include/asm/elf.h dst[44] = pt->sar; dst[45] = pt->iir; \ dst 286 arch/parisc/include/asm/elf.h dst[46] = pt->isr; dst[47] = pt->ior; \ dst 287 arch/parisc/include/asm/elf.h dst[48] = mfctl(22); dst[49] = mfctl(0); \ dst 288 arch/parisc/include/asm/elf.h dst[50] = mfctl(24); dst[51] = mfctl(25); \ dst 289 arch/parisc/include/asm/elf.h dst[52] = mfctl(26); dst[53] = mfctl(27); \ dst 290 arch/parisc/include/asm/elf.h dst[54] = mfctl(28); dst[55] = mfctl(29); \ dst 291 arch/parisc/include/asm/elf.h dst[56] = mfctl(30); dst[57] = mfctl(31); \ dst 292 arch/parisc/include/asm/elf.h dst[58] = mfctl( 8); dst[59] = mfctl( 9); \ dst 293 arch/parisc/include/asm/elf.h dst[60] = mfctl(12); dst[61] = mfctl(13); \ dst 294 arch/parisc/include/asm/elf.h dst[62] = mfctl(10); dst[63] = mfctl(15); dst 106 arch/parisc/include/asm/hash.h #define _ASSIGN(dst, src, ...) asm("" : "=r" (dst) : "0" (src), ##__VA_ARGS__) dst 233 arch/parisc/include/asm/io.h void memcpy_fromio(void *dst, const volatile void __iomem *src, int count); dst 234 arch/parisc/include/asm/io.h void memcpy_toio(volatile void __iomem *dst, const void *src, int count); dst 294 arch/parisc/include/asm/io.h extern void insb (unsigned long port, void *dst, unsigned long count); dst 295 arch/parisc/include/asm/io.h extern void insw (unsigned long port, void *dst, unsigned long count); dst 296 arch/parisc/include/asm/io.h extern void insl (unsigned long port, void *dst, unsigned long count); dst 214 arch/parisc/include/asm/uaccess.h unsigned long __must_check raw_copy_to_user(void __user *dst, const void *src, dst 216 arch/parisc/include/asm/uaccess.h unsigned long __must_check raw_copy_from_user(void *dst, const void __user *src, dst 218 arch/parisc/include/asm/uaccess.h unsigned long __must_check raw_copy_in_user(void __user *dst, const void __user *src, dst 113 arch/parisc/lib/checksum.c __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 121 arch/parisc/lib/checksum.c memcpy(dst, src, len); dst 132 arch/parisc/lib/checksum.c void *dst, int len, dst 137 arch/parisc/lib/checksum.c missing = copy_from_user(dst, src, len); dst 139 arch/parisc/lib/checksum.c memset(dst + len - missing, 0, missing); dst 143 arch/parisc/lib/checksum.c return csum_partial(dst, len, sum); dst 19 arch/parisc/lib/io.c void memcpy_toio(volatile void __iomem *dst, const void *src, int count) dst 21 arch/parisc/lib/io.c if (((unsigned long)dst & 3) != ((unsigned long)src & 3)) dst 23 arch/parisc/lib/io.c while ((unsigned long)dst & 3) { dst 24 arch/parisc/lib/io.c writeb(*(char *)src, dst++); dst 29 arch/parisc/lib/io.c __raw_writel(*(u32 *)src, dst); dst 31 arch/parisc/lib/io.c dst += 4; dst 36 arch/parisc/lib/io.c writeb(*(char *)src, dst++); dst 54 arch/parisc/lib/io.c void memcpy_fromio(void *dst, const volatile void __iomem *src, int count) dst 57 arch/parisc/lib/io.c if ( (((unsigned long)dst ^ (unsigned long)src) & 1) || (count < 2) ) dst 60 arch/parisc/lib/io.c if ( (((unsigned long)dst ^ (unsigned long)src) & 2) || (count < 4) ) dst 65 arch/parisc/lib/io.c *(u8 *)dst = readb(src); dst 67 arch/parisc/lib/io.c dst++; dst 73 arch/parisc/lib/io.c *(u16 *)dst = __raw_readw(src); dst 75 arch/parisc/lib/io.c dst += 2; dst 80 arch/parisc/lib/io.c *(u32 *)dst = __raw_readl(src); dst 81 arch/parisc/lib/io.c dst += 4; dst 88 arch/parisc/lib/io.c *(u16 *)dst = __raw_readw(src); dst 90 arch/parisc/lib/io.c dst += 2; dst 96 arch/parisc/lib/io.c *(char *)dst = readb(src); dst 98 arch/parisc/lib/io.c dst++; dst 127 arch/parisc/lib/io.c void insb (unsigned long port, void *dst, unsigned long count) dst 131 arch/parisc/lib/io.c p = (unsigned char *)dst; dst 167 arch/parisc/lib/io.c void insw (unsigned long port, void *dst, unsigned long count) dst 172 arch/parisc/lib/io.c p = (unsigned char *)dst; dst 239 arch/parisc/lib/io.c void insl (unsigned long port, void *dst, unsigned long count) dst 244 arch/parisc/lib/io.c p = (unsigned char *)dst; dst 249 arch/parisc/lib/io.c switch (((unsigned long) dst) & 0x3) dst 102 arch/parisc/lib/iomap.c static void ioport_read8r(void __iomem *addr, void *dst, unsigned long count) dst 104 arch/parisc/lib/iomap.c insb(ADDR2PORT(addr), dst, count); dst 107 arch/parisc/lib/iomap.c static void ioport_read16r(void __iomem *addr, void *dst, unsigned long count) dst 109 arch/parisc/lib/iomap.c insw(ADDR2PORT(addr), dst, count); dst 112 arch/parisc/lib/iomap.c static void ioport_read32r(void __iomem *addr, void *dst, unsigned long count) dst 114 arch/parisc/lib/iomap.c insl(ADDR2PORT(addr), dst, count); dst 223 arch/parisc/lib/iomap.c static void iomem_read8r(void __iomem *addr, void *dst, unsigned long count) dst 226 arch/parisc/lib/iomap.c *(u8 *)dst = __raw_readb(addr); dst 227 arch/parisc/lib/iomap.c dst++; dst 231 arch/parisc/lib/iomap.c static void iomem_read16r(void __iomem *addr, void *dst, unsigned long count) dst 234 arch/parisc/lib/iomap.c *(u16 *)dst = __raw_readw(addr); dst 235 arch/parisc/lib/iomap.c dst += 2; dst 239 arch/parisc/lib/iomap.c static void iomem_read32r(void __iomem *addr, void *dst, unsigned long count) dst 242 arch/parisc/lib/iomap.c *(u32 *)dst = __raw_readl(addr); dst 243 arch/parisc/lib/iomap.c dst += 4; dst 414 arch/parisc/lib/iomap.c void ioread8_rep(void __iomem *addr, void *dst, unsigned long count) dst 417 arch/parisc/lib/iomap.c iomap_ops[ADDR_TO_REGION(addr)]->read8r(addr, dst, count); dst 420 arch/parisc/lib/iomap.c *(u8 *)dst = *(u8 *)addr; dst 421 arch/parisc/lib/iomap.c dst++; dst 426 arch/parisc/lib/iomap.c void ioread16_rep(void __iomem *addr, void *dst, unsigned long count) dst 429 arch/parisc/lib/iomap.c iomap_ops[ADDR_TO_REGION(addr)]->read16r(addr, dst, count); dst 432 arch/parisc/lib/iomap.c *(u16 *)dst = *(u16 *)addr; dst 433 arch/parisc/lib/iomap.c dst += 2; dst 438 arch/parisc/lib/iomap.c void ioread32_rep(void __iomem *addr, void *dst, unsigned long count) dst 441 arch/parisc/lib/iomap.c iomap_ops[ADDR_TO_REGION(addr)]->read32r(addr, dst, count); dst 444 arch/parisc/lib/iomap.c *(u32 *)dst = *(u32 *)addr; dst 445 arch/parisc/lib/iomap.c dst += 4; dst 20 arch/parisc/lib/memcpy.c extern unsigned long pa_memcpy(void *dst, const void *src, dst 23 arch/parisc/lib/memcpy.c unsigned long raw_copy_to_user(void __user *dst, const void *src, dst 28 arch/parisc/lib/memcpy.c return pa_memcpy((void __force *)dst, src, len); dst 32 arch/parisc/lib/memcpy.c unsigned long raw_copy_from_user(void *dst, const void __user *src, dst 37 arch/parisc/lib/memcpy.c return pa_memcpy(dst, (void __force *)src, len); dst 41 arch/parisc/lib/memcpy.c unsigned long raw_copy_in_user(void __user *dst, const void __user *src, unsigned long len) dst 45 arch/parisc/lib/memcpy.c return pa_memcpy((void __force *)dst, (void __force *)src, len); dst 49 arch/parisc/lib/memcpy.c void * memcpy(void * dst,const void *src, size_t count) dst 53 arch/parisc/lib/memcpy.c pa_memcpy(dst, src, count); dst 54 arch/parisc/lib/memcpy.c return dst; dst 60 arch/parisc/lib/memcpy.c long probe_kernel_read(void *dst, const void *src, size_t size) dst 69 arch/parisc/lib/memcpy.c return __probe_kernel_read(dst, src, size); dst 90 arch/parisc/math-emu/dbl_float.h #define Dbl_rightshiftby1_withextent(leftb,right,dst) \ dst 91 arch/parisc/math-emu/dbl_float.h Extall(dst) = (Dallp2(leftb) << 31) | ((unsigned int)Extall(right) >> 1) | \ dst 53 arch/parisc/math-emu/sgl_float.h #define Sgl_rightshiftby1_withextent(left,right,dst) \ dst 181 arch/powerpc/crypto/aes-spe-glue.c static int ppc_ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 190 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 199 arch/powerpc/crypto/aes-spe-glue.c ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, dst 209 arch/powerpc/crypto/aes-spe-glue.c static int ppc_ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 218 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 227 arch/powerpc/crypto/aes-spe-glue.c ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr, dst 237 arch/powerpc/crypto/aes-spe-glue.c static int ppc_cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 246 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 255 arch/powerpc/crypto/aes-spe-glue.c ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, dst 265 arch/powerpc/crypto/aes-spe-glue.c static int ppc_cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 274 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 283 arch/powerpc/crypto/aes-spe-glue.c ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr, dst 293 arch/powerpc/crypto/aes-spe-glue.c static int ppc_ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 302 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 312 arch/powerpc/crypto/aes-spe-glue.c ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr, dst 323 arch/powerpc/crypto/aes-spe-glue.c static int ppc_xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 333 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 343 arch/powerpc/crypto/aes-spe-glue.c ppc_encrypt_xts(walk.dst.virt.addr, walk.src.virt.addr, dst 354 arch/powerpc/crypto/aes-spe-glue.c static int ppc_xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, dst 364 arch/powerpc/crypto/aes-spe-glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 374 arch/powerpc/crypto/aes-spe-glue.c ppc_decrypt_xts(walk.dst.virt.addr, walk.src.virt.addr, dst 84 arch/powerpc/crypto/md5-glue.c __le32 *dst = (__le32 *)out; dst 99 arch/powerpc/crypto/md5-glue.c dst[0] = cpu_to_le32(sctx->hash[0]); dst 100 arch/powerpc/crypto/md5-glue.c dst[1] = cpu_to_le32(sctx->hash[1]); dst 101 arch/powerpc/crypto/md5-glue.c dst[2] = cpu_to_le32(sctx->hash[2]); dst 102 arch/powerpc/crypto/md5-glue.c dst[3] = cpu_to_le32(sctx->hash[3]); dst 124 arch/powerpc/crypto/sha1-spe-glue.c __be32 *dst = (__be32 *)out; dst 144 arch/powerpc/crypto/sha1-spe-glue.c dst[0] = cpu_to_be32(sctx->state[0]); dst 145 arch/powerpc/crypto/sha1-spe-glue.c dst[1] = cpu_to_be32(sctx->state[1]); dst 146 arch/powerpc/crypto/sha1-spe-glue.c dst[2] = cpu_to_be32(sctx->state[2]); dst 147 arch/powerpc/crypto/sha1-spe-glue.c dst[3] = cpu_to_be32(sctx->state[3]); dst 148 arch/powerpc/crypto/sha1-spe-glue.c dst[4] = cpu_to_be32(sctx->state[4]); dst 77 arch/powerpc/crypto/sha1.c __be32 *dst = (__be32 *)out; dst 94 arch/powerpc/crypto/sha1.c dst[i] = cpu_to_be32(sctx->state[i]); dst 146 arch/powerpc/crypto/sha256-spe-glue.c __be32 *dst = (__be32 *)out; dst 166 arch/powerpc/crypto/sha256-spe-glue.c dst[0] = cpu_to_be32(sctx->state[0]); dst 167 arch/powerpc/crypto/sha256-spe-glue.c dst[1] = cpu_to_be32(sctx->state[1]); dst 168 arch/powerpc/crypto/sha256-spe-glue.c dst[2] = cpu_to_be32(sctx->state[2]); dst 169 arch/powerpc/crypto/sha256-spe-glue.c dst[3] = cpu_to_be32(sctx->state[3]); dst 170 arch/powerpc/crypto/sha256-spe-glue.c dst[4] = cpu_to_be32(sctx->state[4]); dst 171 arch/powerpc/crypto/sha256-spe-glue.c dst[5] = cpu_to_be32(sctx->state[5]); dst 172 arch/powerpc/crypto/sha256-spe-glue.c dst[6] = cpu_to_be32(sctx->state[6]); dst 173 arch/powerpc/crypto/sha256-spe-glue.c dst[7] = cpu_to_be32(sctx->state[7]); dst 182 arch/powerpc/crypto/sha256-spe-glue.c __be32 *dst = (__be32 *)out; dst 187 arch/powerpc/crypto/sha256-spe-glue.c dst[0] = D[0]; dst 188 arch/powerpc/crypto/sha256-spe-glue.c dst[1] = D[1]; dst 189 arch/powerpc/crypto/sha256-spe-glue.c dst[2] = D[2]; dst 190 arch/powerpc/crypto/sha256-spe-glue.c dst[3] = D[3]; dst 191 arch/powerpc/crypto/sha256-spe-glue.c dst[4] = D[4]; dst 192 arch/powerpc/crypto/sha256-spe-glue.c dst[5] = D[5]; dst 193 arch/powerpc/crypto/sha256-spe-glue.c dst[6] = D[6]; dst 22 arch/powerpc/include/asm/async_tx.h #define async_tx_find_channel(dep, type, dst, dst_count, src, src_count, len) \ dst 118 arch/powerpc/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 120 arch/powerpc/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 123 arch/powerpc/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 124 arch/powerpc/include/asm/cacheflush.h memcpy(dst, src, len) dst 21 arch/powerpc/include/asm/checksum.h extern __wsum csum_partial_copy_generic(const void *src, void *dst, dst 26 arch/powerpc/include/asm/checksum.h extern __wsum csum_and_copy_from_user(const void __user *src, void *dst, dst 29 arch/powerpc/include/asm/checksum.h extern __wsum csum_and_copy_to_user(const void *src, void __user *dst, dst 32 arch/powerpc/include/asm/checksum.h #define csum_partial_copy_nocheck(src, dst, len, sum) \ dst 33 arch/powerpc/include/asm/checksum.h csum_partial_copy_generic((src), (dst), (len), (sum), NULL, NULL) dst 700 arch/powerpc/include/asm/cpm2.h uint dst; /* destination data buffer pointer */ dst 551 arch/powerpc/include/asm/io.h #define __do_memcpy_toio(dst, src, n) \ dst 552 arch/powerpc/include/asm/io.h _memcpy_toio(PCI_FIX_ADDR(dst), src, n) dst 555 arch/powerpc/include/asm/io.h #define __do_memcpy_fromio(dst, src, n) \ dst 556 arch/powerpc/include/asm/io.h eeh_memcpy_fromio(dst, PCI_FIX_ADDR(src), n) dst 558 arch/powerpc/include/asm/io.h #define __do_memcpy_fromio(dst, src, n) \ dst 559 arch/powerpc/include/asm/io.h _memcpy_fromio(dst,PCI_FIX_ADDR(src),n) dst 754 arch/powerpc/include/asm/io.h #define mmio_insb(addr, dst, count) readsb(addr, dst, count) dst 755 arch/powerpc/include/asm/io.h #define mmio_insw(addr, dst, count) readsw(addr, dst, count) dst 756 arch/powerpc/include/asm/io.h #define mmio_insl(addr, dst, count) readsl(addr, dst, count) dst 14 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fres(u64 *fpscr, u32 *dst, u32 *src1); dst 15 arch/powerpc/include/asm/kvm_fpu.h extern void fps_frsqrte(u64 *fpscr, u32 *dst, u32 *src1); dst 16 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fsqrts(u64 *fpscr, u32 *dst, u32 *src1); dst 18 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fadds(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2); dst 19 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fdivs(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2); dst 20 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fmuls(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2); dst 21 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fsubs(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2); dst 23 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fmadds(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2, dst 25 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fmsubs(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2, dst 27 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fnmadds(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2, dst 29 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fnmsubs(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2, dst 31 arch/powerpc/include/asm/kvm_fpu.h extern void fps_fsel(u64 *fpscr, u32 *dst, u32 *src1, u32 *src2, dst 35 arch/powerpc/include/asm/kvm_fpu.h u64 *dst, u64 *src1); dst 37 arch/powerpc/include/asm/kvm_fpu.h u64 *dst, u64 *src1, u64 *src2); dst 39 arch/powerpc/include/asm/kvm_fpu.h u64 *dst, u64 *src1, u64 *src2, u64 *src3); dst 42 arch/powerpc/include/asm/string.h #define memcpy(dst, src, len) __memcpy(dst, src, len) dst 43 arch/powerpc/include/asm/string.h #define memmove(dst, src, len) __memmove(dst, src, len) dst 58 arch/powerpc/include/asm/string.h extern int memcpy_mcsafe(void *dst, const void *src, __kernel_size_t sz); dst 60 arch/powerpc/include/asm/thread_info.h extern int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src); dst 423 arch/powerpc/include/asm/uaccess.h extern long strncpy_from_user(char *dst, const char __user *src, long count); dst 426 arch/powerpc/include/asm/uaccess.h extern long __copy_from_user_flushcache(void *dst, const void __user *src, dst 363 arch/powerpc/kernel/btext.c unsigned int *dst = (unsigned int *)calc_base(0,0); dst 373 arch/powerpc/kernel/btext.c unsigned int *dst_ptr = dst; dst 377 arch/powerpc/kernel/btext.c dst += (dispDeviceRowBytes >> 2); dst 381 arch/powerpc/kernel/btext.c unsigned int *dst_ptr = dst; dst 384 arch/powerpc/kernel/btext.c dst += (dispDeviceRowBytes >> 2); dst 142 arch/powerpc/kernel/iomap.c void ioread8_rep(void __iomem *addr, void *dst, unsigned long count) dst 144 arch/powerpc/kernel/iomap.c readsb(addr, dst, count); dst 146 arch/powerpc/kernel/iomap.c void ioread16_rep(void __iomem *addr, void *dst, unsigned long count) dst 148 arch/powerpc/kernel/iomap.c readsw(addr, dst, count); dst 150 arch/powerpc/kernel/iomap.c void ioread32_rep(void __iomem *addr, void *dst, unsigned long count) dst 152 arch/powerpc/kernel/iomap.c readsl(addr, dst, count); dst 1541 arch/powerpc/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 1556 arch/powerpc/kernel/process.c *dst = *src; dst 1558 arch/powerpc/kernel/process.c clear_task_ebb(dst); dst 15 arch/powerpc/kernel/vecemu.c extern void vaddfp(vector128 *dst, vector128 *a, vector128 *b); dst 16 arch/powerpc/kernel/vecemu.c extern void vsubfp(vector128 *dst, vector128 *a, vector128 *b); dst 17 arch/powerpc/kernel/vecemu.c extern void vmaddfp(vector128 *dst, vector128 *a, vector128 *b, vector128 *c); dst 18 arch/powerpc/kernel/vecemu.c extern void vnmsubfp(vector128 *dst, vector128 *a, vector128 *b, vector128 *c); dst 19 arch/powerpc/kernel/vecemu.c extern void vrefp(vector128 *dst, vector128 *src); dst 20 arch/powerpc/kernel/vecemu.c extern void vrsqrtefp(vector128 *dst, vector128 *src); dst 21 arch/powerpc/kernel/vecemu.c extern void vexptep(vector128 *dst, vector128 *src); dst 494 arch/powerpc/kvm/book3s_paired_singles.c u32 *dst, u32 *src1, dst 542 arch/powerpc/kvm/book3s_paired_singles.c u32 *dst, u32 *src1, dst 593 arch/powerpc/kvm/book3s_paired_singles.c u32 *dst, u32 *src1)) dst 224 arch/powerpc/kvm/mpic.c struct irq_dest dst[MAX_CPU]; dst 242 arch/powerpc/kvm/mpic.c static void mpic_irq_raise(struct openpic *opp, struct irq_dest *dst, dst 249 arch/powerpc/kvm/mpic.c if (!dst->vcpu) { dst 251 arch/powerpc/kvm/mpic.c __func__, (int)(dst - &opp->dst[0])); dst 255 arch/powerpc/kvm/mpic.c pr_debug("%s: cpu %d output %d\n", __func__, dst->vcpu->arch.irq_cpu_id, dst 261 arch/powerpc/kvm/mpic.c kvm_vcpu_ioctl_interrupt(dst->vcpu, &irq); dst 264 arch/powerpc/kvm/mpic.c static void mpic_irq_lower(struct openpic *opp, struct irq_dest *dst, dst 267 arch/powerpc/kvm/mpic.c if (!dst->vcpu) { dst 269 arch/powerpc/kvm/mpic.c __func__, (int)(dst - &opp->dst[0])); dst 273 arch/powerpc/kvm/mpic.c pr_debug("%s: cpu %d output %d\n", __func__, dst->vcpu->arch.irq_cpu_id, dst 279 arch/powerpc/kvm/mpic.c kvmppc_core_dequeue_external(dst->vcpu); dst 327 arch/powerpc/kvm/mpic.c struct irq_dest *dst; dst 331 arch/powerpc/kvm/mpic.c dst = &opp->dst[n_CPU]; dst 340 arch/powerpc/kvm/mpic.c dst->outputs_active[src->output]); dst 348 arch/powerpc/kvm/mpic.c dst->outputs_active[src->output]++ == 0) { dst 351 arch/powerpc/kvm/mpic.c mpic_irq_raise(opp, dst, src->output); dst 355 arch/powerpc/kvm/mpic.c --dst->outputs_active[src->output] == 0) { dst 358 arch/powerpc/kvm/mpic.c mpic_irq_lower(opp, dst, src->output); dst 371 arch/powerpc/kvm/mpic.c IRQ_setbit(&dst->raised, n_IRQ); dst 373 arch/powerpc/kvm/mpic.c IRQ_resetbit(&dst->raised, n_IRQ); dst 375 arch/powerpc/kvm/mpic.c IRQ_check(opp, &dst->raised); dst 377 arch/powerpc/kvm/mpic.c if (active && priority <= dst->ctpr) { dst 379 arch/powerpc/kvm/mpic.c __func__, n_IRQ, priority, dst->ctpr, n_CPU); dst 384 arch/powerpc/kvm/mpic.c if (IRQ_get_next(opp, &dst->servicing) >= 0 && dst 385 arch/powerpc/kvm/mpic.c priority <= dst->servicing.priority) { dst 387 arch/powerpc/kvm/mpic.c __func__, n_IRQ, dst->servicing.next, n_CPU); dst 390 arch/powerpc/kvm/mpic.c __func__, n_CPU, n_IRQ, dst->raised.next); dst 391 arch/powerpc/kvm/mpic.c mpic_irq_raise(opp, dst, ILR_INTTGT_INT); dst 394 arch/powerpc/kvm/mpic.c IRQ_get_next(opp, &dst->servicing); dst 395 arch/powerpc/kvm/mpic.c if (dst->raised.priority > dst->ctpr && dst 396 arch/powerpc/kvm/mpic.c dst->raised.priority > dst->servicing.priority) { dst 398 arch/powerpc/kvm/mpic.c __func__, n_IRQ, dst->raised.next, dst 399 arch/powerpc/kvm/mpic.c dst->raised.priority, dst->ctpr, dst 400 arch/powerpc/kvm/mpic.c dst->servicing.priority, n_CPU); dst 404 arch/powerpc/kvm/mpic.c __func__, n_IRQ, dst->ctpr, dst 405 arch/powerpc/kvm/mpic.c dst->servicing.priority, n_CPU); dst 406 arch/powerpc/kvm/mpic.c mpic_irq_lower(opp, dst, ILR_INTTGT_INT); dst 547 arch/powerpc/kvm/mpic.c opp->dst[i].ctpr = 15; dst 548 arch/powerpc/kvm/mpic.c memset(&opp->dst[i].raised, 0, sizeof(struct irq_queue)); dst 549 arch/powerpc/kvm/mpic.c opp->dst[i].raised.next = -1; dst 550 arch/powerpc/kvm/mpic.c memset(&opp->dst[i].servicing, 0, sizeof(struct irq_queue)); dst 551 arch/powerpc/kvm/mpic.c opp->dst[i].servicing.next = -1; dst 1029 arch/powerpc/kvm/mpic.c struct irq_dest *dst; dst 1041 arch/powerpc/kvm/mpic.c dst = &opp->dst[idx]; dst 1055 arch/powerpc/kvm/mpic.c dst->ctpr = val & 0x0000000F; dst 1058 arch/powerpc/kvm/mpic.c __func__, idx, dst->ctpr, dst->raised.priority, dst 1059 arch/powerpc/kvm/mpic.c dst->servicing.priority); dst 1061 arch/powerpc/kvm/mpic.c if (dst->raised.priority <= dst->ctpr) { dst 1064 arch/powerpc/kvm/mpic.c mpic_irq_lower(opp, dst, ILR_INTTGT_INT); dst 1065 arch/powerpc/kvm/mpic.c } else if (dst->raised.priority > dst->servicing.priority) { dst 1067 arch/powerpc/kvm/mpic.c __func__, idx, dst->raised.next); dst 1068 arch/powerpc/kvm/mpic.c mpic_irq_raise(opp, dst, ILR_INTTGT_INT); dst 1082 arch/powerpc/kvm/mpic.c s_IRQ = IRQ_get_next(opp, &dst->servicing); dst 1090 arch/powerpc/kvm/mpic.c IRQ_resetbit(&dst->servicing, s_IRQ); dst 1094 arch/powerpc/kvm/mpic.c s_IRQ = IRQ_get_next(opp, &dst->servicing); dst 1096 arch/powerpc/kvm/mpic.c n_IRQ = IRQ_get_next(opp, &dst->raised); dst 1100 arch/powerpc/kvm/mpic.c IVPR_PRIORITY(src->ivpr) > dst->servicing.priority)) { dst 1103 arch/powerpc/kvm/mpic.c mpic_irq_raise(opp, dst, ILR_INTTGT_INT); dst 1127 arch/powerpc/kvm/mpic.c static uint32_t openpic_iack(struct openpic *opp, struct irq_dest *dst, dst 1134 arch/powerpc/kvm/mpic.c mpic_irq_lower(opp, dst, ILR_INTTGT_INT); dst 1136 arch/powerpc/kvm/mpic.c irq = IRQ_get_next(opp, &dst->raised); dst 1145 arch/powerpc/kvm/mpic.c !(IVPR_PRIORITY(src->ivpr) > dst->ctpr)) { dst 1147 arch/powerpc/kvm/mpic.c __func__, irq, dst->ctpr, src->ivpr); dst 1152 arch/powerpc/kvm/mpic.c IRQ_setbit(&dst->servicing, irq); dst 1160 arch/powerpc/kvm/mpic.c IRQ_resetbit(&dst->raised, irq); dst 1186 arch/powerpc/kvm/mpic.c kvmppc_set_epr(vcpu, openpic_iack(opp, &opp->dst[cpu], cpu)); dst 1195 arch/powerpc/kvm/mpic.c struct irq_dest *dst; dst 1207 arch/powerpc/kvm/mpic.c dst = &opp->dst[idx]; dst 1211 arch/powerpc/kvm/mpic.c retval = dst->ctpr; dst 1217 arch/powerpc/kvm/mpic.c retval = openpic_iack(opp, dst, idx); dst 1748 arch/powerpc/kvm/mpic.c if (opp->dst[cpu].vcpu) { dst 1757 arch/powerpc/kvm/mpic.c opp->dst[cpu].vcpu = vcpu; dst 1780 arch/powerpc/kvm/mpic.c BUG_ON(!opp->dst[vcpu->arch.irq_cpu_id].vcpu); dst 1782 arch/powerpc/kvm/mpic.c opp->dst[vcpu->arch.irq_cpu_id].vcpu = NULL; dst 14 arch/powerpc/lib/checksum_wrappers.c __wsum csum_and_copy_from_user(const void __user *src, void *dst, dst 35 arch/powerpc/lib/checksum_wrappers.c csum = csum_partial_copy_generic((void __force *)src, dst, dst 39 arch/powerpc/lib/checksum_wrappers.c int missing = __copy_from_user(dst, src, len); dst 42 arch/powerpc/lib/checksum_wrappers.c memset(dst + len - missing, 0, missing); dst 48 arch/powerpc/lib/checksum_wrappers.c csum = csum_partial(dst, len, sum); dst 57 arch/powerpc/lib/checksum_wrappers.c __wsum csum_and_copy_to_user(const void *src, void __user *dst, int len, dst 63 arch/powerpc/lib/checksum_wrappers.c allow_write_to_user(dst, len); dst 72 arch/powerpc/lib/checksum_wrappers.c if (unlikely((len < 0) || !access_ok(dst, len))) { dst 78 arch/powerpc/lib/checksum_wrappers.c csum = csum_partial_copy_generic(src, (void __force *)dst, dst 84 arch/powerpc/lib/checksum_wrappers.c if (copy_to_user(dst, src, len)) { dst 91 arch/powerpc/lib/checksum_wrappers.c prevent_write_to_user(dst, len); dst 394 arch/powerpc/mm/slice.c static inline void slice_copy_mask(struct slice_mask *dst, dst 397 arch/powerpc/mm/slice.c dst->low_slices = src->low_slices; dst 400 arch/powerpc/mm/slice.c bitmap_copy(dst->high_slices, src->high_slices, SLICE_NUM_HIGH); dst 403 arch/powerpc/mm/slice.c static inline void slice_or_mask(struct slice_mask *dst, dst 407 arch/powerpc/mm/slice.c dst->low_slices = src1->low_slices | src2->low_slices; dst 410 arch/powerpc/mm/slice.c bitmap_or(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); dst 413 arch/powerpc/mm/slice.c static inline void slice_andnot_mask(struct slice_mask *dst, dst 417 arch/powerpc/mm/slice.c dst->low_slices = src1->low_slices & ~src2->low_slices; dst 420 arch/powerpc/mm/slice.c bitmap_andnot(dst->high_slices, src1->high_slices, src2->high_slices, SLICE_NUM_HIGH); dst 224 arch/powerpc/platforms/512x/mpc512x_shared.c unsigned long dst; dst 253 arch/powerpc/platforms/512x/mpc512x_shared.c dst = (unsigned long)&diu_shared_fb.ad0; dst 254 arch/powerpc/platforms/512x/mpc512x_shared.c flush_dcache_range(dst, dst + sizeof(struct diu_ad) - 1); dst 273 arch/powerpc/platforms/512x/mpc512x_shared.c dst = (unsigned long)&diu_shared_fb.gamma; dst 274 arch/powerpc/platforms/512x/mpc512x_shared.c flush_dcache_range(dst, dst + sizeof(diu_shared_fb.gamma) - 1); dst 72 arch/powerpc/platforms/powermac/time.c int dst; dst 79 arch/powerpc/platforms/powermac/time.c dst = ((pmac_xpram_read(PMAC_XPRAM_MACHINE_LOC + 0x8) & 0x80) != 0); dst 81 arch/powerpc/platforms/powermac/time.c dst ? "on" : "off"); dst 62 arch/riscv/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 64 arch/riscv/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 67 arch/riscv/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 68 arch/riscv/include/asm/cacheflush.h memcpy(dst, src, len) dst 95 arch/riscv/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 98 arch/riscv/kernel/process.c *dst = *src; dst 10 arch/s390/boot/pgm_check_info.c #define add_val_as_hex(dst, val) \ dst 11 arch/s390/boot/pgm_check_info.c __add_val_as_hex(dst, (const unsigned char *)&val, sizeof(val)) dst 13 arch/s390/boot/pgm_check_info.c static char *__add_val_as_hex(char *dst, const unsigned char *src, size_t count) dst 16 arch/s390/boot/pgm_check_info.c dst = hex_byte_pack(dst, *src++); dst 17 arch/s390/boot/pgm_check_info.c return dst; dst 20 arch/s390/boot/pgm_check_info.c static char *add_str(char *dst, char *src) dst 22 arch/s390/boot/pgm_check_info.c strcpy(dst, src); dst 23 arch/s390/boot/pgm_check_info.c return dst + strlen(dst); dst 202 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 212 arch/s390/crypto/aes_s390.c skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); dst 221 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 231 arch/s390/crypto/aes_s390.c skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); dst 270 arch/s390/crypto/aes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 278 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 285 arch/s390/crypto/aes_s390.c return fallback_blk_enc(desc, dst, src, nbytes); dst 287 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 292 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 299 arch/s390/crypto/aes_s390.c return fallback_blk_dec(desc, dst, src, nbytes); dst 301 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 391 arch/s390/crypto/aes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 399 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 406 arch/s390/crypto/aes_s390.c return fallback_blk_enc(desc, dst, src, nbytes); dst 408 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 413 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 420 arch/s390/crypto/aes_s390.c return fallback_blk_dec(desc, dst, src, nbytes); dst 422 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 471 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 481 arch/s390/crypto/aes_s390.c skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); dst 490 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 500 arch/s390/crypto/aes_s390.c skcipher_request_set_crypt(req, src, dst, nbytes, desc->info); dst 576 arch/s390/crypto/aes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 583 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 593 arch/s390/crypto/aes_s390.c return xts_fallback_encrypt(desc, dst, src, nbytes); dst 595 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 600 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 610 arch/s390/crypto/aes_s390.c return xts_fallback_decrypt(desc, dst, src, nbytes); dst 612 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 716 arch/s390/crypto/aes_s390.c walk->dst.virt.addr, walk->src.virt.addr, dst 733 arch/s390/crypto/aes_s390.c memcpy(walk->dst.virt.addr, buf, nbytes); dst 742 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 749 arch/s390/crypto/aes_s390.c return fallback_blk_enc(desc, dst, src, nbytes); dst 751 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 756 arch/s390/crypto/aes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 763 arch/s390/crypto/aes_s390.c return fallback_blk_dec(desc, dst, src, nbytes); dst 765 arch/s390/crypto/aes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 1036 arch/s390/crypto/aes_s390.c gcm_walk_start(&gw_out, req->dst, len); dst 1083 arch/s390/crypto/aes_s390.c scatterwalk_map_and_copy(param.t, req->dst, len, taglen, 1); dst 93 arch/s390/crypto/des_s390.c cpacf_km(fc, ctx->key, walk->dst.virt.addr, dst 117 arch/s390/crypto/des_s390.c cpacf_kmc(fc, ¶m, walk->dst.virt.addr, dst 126 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 131 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 136 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 141 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 166 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 171 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 176 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 181 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 235 arch/s390/crypto/des_s390.c static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 239 arch/s390/crypto/des_s390.c cpacf_km(CPACF_KM_TDEA_192, ctx->key, dst, src, DES_BLOCK_SIZE); dst 242 arch/s390/crypto/des_s390.c static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 247 arch/s390/crypto/des_s390.c ctx->key, dst, src, DES_BLOCK_SIZE); dst 270 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 275 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 280 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 285 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 311 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 316 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 321 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 326 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 383 arch/s390/crypto/des_s390.c cpacf_kmctr(fc, ctx->key, walk->dst.virt.addr, dst 397 arch/s390/crypto/des_s390.c memcpy(walk->dst.virt.addr, buf, nbytes); dst 405 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 410 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 415 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 420 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 446 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 451 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 456 arch/s390/crypto/des_s390.c struct scatterlist *dst, struct scatterlist *src, dst 461 arch/s390/crypto/des_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 109 arch/s390/crypto/ghash_s390.c static int ghash_final(struct shash_desc *desc, u8 *dst) dst 116 arch/s390/crypto/ghash_s390.c memcpy(dst, dctx->icv, GHASH_BLOCK_SIZE); dst 173 arch/s390/crypto/paes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 185 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 190 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 195 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 200 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 297 arch/s390/crypto/paes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 311 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 316 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 321 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 326 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 467 arch/s390/crypto/paes_s390.c walk->dst.virt.addr, walk->src.virt.addr, n); dst 480 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 485 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 490 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 495 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 608 arch/s390/crypto/paes_s390.c walk->dst.virt.addr, walk->src.virt.addr, dst 640 arch/s390/crypto/paes_s390.c memcpy(walk->dst.virt.addr, buf, nbytes); dst 649 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 654 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 659 arch/s390/crypto/paes_s390.c struct scatterlist *dst, struct scatterlist *src, dst 664 arch/s390/crypto/paes_s390.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 51 arch/s390/include/asm/checksum.h csum_partial_copy_from_user(const void __user *src, void *dst, dst 55 arch/s390/include/asm/checksum.h if (unlikely(copy_from_user(dst, src, len))) dst 57 arch/s390/include/asm/checksum.h return csum_partial(dst, len, sum); dst 62 arch/s390/include/asm/checksum.h csum_partial_copy_nocheck (const void *src, void *dst, int len, __wsum sum) dst 64 arch/s390/include/asm/checksum.h memcpy(dst,src,len); dst 65 arch/s390/include/asm/checksum.h return csum_partial(dst, len, sum); dst 261 arch/s390/include/asm/cpacf.h : [src] "+a" (r2), [len] "+d" (r3), [dst] "+a" (r4) dst 291 arch/s390/include/asm/cpacf.h : [src] "+a" (r2), [len] "+d" (r3), [dst] "+a" (r4) dst 399 arch/s390/include/asm/cpacf.h [dst] "+a" (r4), [ctr] "+a" (r6) dst 430 arch/s390/include/asm/cpacf.h : [dst] "+a" (r2), [dlen] "+d" (r3) dst 526 arch/s390/include/asm/cpacf.h : [dst] "+a" (r6), [src] "+a" (r2), [slen] "+d" (r3), dst 59 arch/s390/include/asm/io.h #define memcpy_fromio(dst, src, count) zpci_memcpy_fromio(dst, src, count) dst 60 arch/s390/include/asm/io.h #define memcpy_toio(dst, src, count) zpci_memcpy_toio(dst, src, count) dst 61 arch/s390/include/asm/io.h #define memset_io(dst, val, count) zpci_memset_io(dst, val, count) dst 22 arch/s390/include/asm/kasan.h extern void kasan_copy_shadow(pgd_t *dst); dst 26 arch/s390/include/asm/kasan.h static inline void kasan_copy_shadow(pgd_t *dst) { } dst 42 arch/s390/include/asm/os_info.h int copy_oldmem_kernel(void *dst, void *src, size_t count); dst 71 arch/s390/include/asm/pci_io.h static inline int zpci_write_single(volatile void __iomem *dst, const void *src, dst 93 arch/s390/include/asm/pci_io.h return zpci_store(dst, val, len); dst 96 arch/s390/include/asm/pci_io.h static inline int zpci_read_single(void *dst, const volatile void __iomem *src, dst 108 arch/s390/include/asm/pci_io.h *((u8 *) dst) = (u8) data; dst 111 arch/s390/include/asm/pci_io.h *((u16 *) dst) = (u16) data; dst 114 arch/s390/include/asm/pci_io.h *((u32 *) dst) = (u32) data; dst 117 arch/s390/include/asm/pci_io.h *((u64 *) dst) = (u64) data; dst 124 arch/s390/include/asm/pci_io.h int zpci_write_block(volatile void __iomem *dst, const void *src, dst 127 arch/s390/include/asm/pci_io.h static inline u8 zpci_get_max_write_size(u64 src, u64 dst, int len, int max) dst 131 arch/s390/include/asm/pci_io.h while (!(src & 0x1) && !(dst & 0x1) && ((size << 1) <= count)) { dst 132 arch/s390/include/asm/pci_io.h dst = dst >> 1; dst 139 arch/s390/include/asm/pci_io.h static inline int zpci_memcpy_fromio(void *dst, dst 147 arch/s390/include/asm/pci_io.h (u64) dst, n, dst 149 arch/s390/include/asm/pci_io.h rc = zpci_read_single(dst, src, size); dst 153 arch/s390/include/asm/pci_io.h dst += size; dst 159 arch/s390/include/asm/pci_io.h static inline int zpci_memcpy_toio(volatile void __iomem *dst, dst 168 arch/s390/include/asm/pci_io.h size = zpci_get_max_write_size((u64 __force) dst, dst 172 arch/s390/include/asm/pci_io.h rc = zpci_write_block(dst, src, size); dst 174 arch/s390/include/asm/pci_io.h rc = zpci_write_single(dst, src, size); dst 178 arch/s390/include/asm/pci_io.h dst += size; dst 184 arch/s390/include/asm/pci_io.h static inline int zpci_memset_io(volatile void __iomem *dst, dst 194 arch/s390/include/asm/pci_io.h rc = zpci_memcpy_toio(dst, src, count); dst 135 arch/s390/include/asm/sclp.h void sclp_ocf_cpc_name_copy(char *dst); dst 71 arch/s390/include/asm/string.h #define memcpy(dst, src, len) __memcpy(dst, src, len) dst 72 arch/s390/include/asm/string.h #define memmove(dst, src, len) __memmove(dst, src, len) dst 139 arch/s390/include/asm/string.h static inline char *strcat(char *dst, const char *src) dst 143 arch/s390/include/asm/string.h char *ret = dst; dst 150 arch/s390/include/asm/string.h : "=&a" (dummy), "+a" (dst), "+a" (src) dst 157 arch/s390/include/asm/string.h static inline char *strcpy(char *dst, const char *src) dst 160 arch/s390/include/asm/string.h char *ret = dst; dst 165 arch/s390/include/asm/string.h : "+&a" (dst), "+&a" (src) : "d" (r0) dst 202 arch/s390/include/asm/string.h char *strcat(char *dst, const char *src); dst 203 arch/s390/include/asm/string.h char *strcpy(char *dst, const char *src); dst 200 arch/s390/include/asm/sysinfo.h int sthyi_fill(void *dst, u64 *rc); dst 51 arch/s390/include/asm/thread_info.h int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src); dst 250 arch/s390/include/asm/uaccess.h long __strncpy_from_user(char *dst, const char __user *src, long count); dst 253 arch/s390/include/asm/uaccess.h strncpy_from_user(char *dst, const char __user *src, long count) dst 256 arch/s390/include/asm/uaccess.h return __strncpy_from_user(dst, src, count); dst 279 arch/s390/include/asm/uaccess.h void s390_kernel_write(void *dst, const void *src, size_t size); dst 135 arch/s390/kernel/crash_dump.c int copy_oldmem_kernel(void *dst, void *src, size_t count) dst 146 arch/s390/kernel/crash_dump.c rc = memcpy_hsa_kernel(dst, from, len); dst 160 arch/s390/kernel/crash_dump.c if (is_vmalloc_or_module_addr(dst)) { dst 161 arch/s390/kernel/crash_dump.c ra = load_real_addr(dst); dst 164 arch/s390/kernel/crash_dump.c ra = dst; dst 169 arch/s390/kernel/crash_dump.c dst += len; dst 179 arch/s390/kernel/crash_dump.c static int copy_oldmem_user(void __user *dst, void *src, size_t count) dst 189 arch/s390/kernel/crash_dump.c rc = memcpy_hsa_user(dst, from, len); dst 203 arch/s390/kernel/crash_dump.c rc = copy_to_user_real(dst, (void *) from, count); dst 207 arch/s390/kernel/crash_dump.c dst += len; dst 769 arch/s390/kernel/ipl.c static void reipl_get_ascii_nss_name(char *dst, dst 772 arch/s390/kernel/ipl.c memcpy(dst, ipb->ccw.nss_name, NSS_NAME_SIZE); dst 773 arch/s390/kernel/ipl.c EBCASC(dst, NSS_NAME_SIZE); dst 774 arch/s390/kernel/ipl.c dst[NSS_NAME_SIZE] = 0; dst 1607 arch/s390/kernel/ipl.c static void __init strncpy_skip_quote(char *dst, char *src, int n) dst 1615 arch/s390/kernel/ipl.c dst[dx++] = src[sx]; dst 58 arch/s390/kernel/lgr.c static void cpascii(char *dst, char *src, int size) dst 60 arch/s390/kernel/lgr.c memcpy(dst, src, size); dst 61 arch/s390/kernel/lgr.c EBCASC(dst, size); dst 68 arch/s390/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 77 arch/s390/kernel/process.c memcpy(dst, src, arch_task_struct_size); dst 78 arch/s390/kernel/process.c dst->thread.fpu.regs = dst->thread.fpu.fprs; dst 414 arch/s390/kernel/sthyi.c static int fill_dst(void *dst, u64 *rc) dst 416 arch/s390/kernel/sthyi.c struct sthyi_sctns *sctns = (struct sthyi_sctns *)dst; dst 423 arch/s390/kernel/sthyi.c return sthyi((u64)dst, rc); dst 467 arch/s390/kernel/sthyi.c int sthyi_fill(void *dst, u64 *rc) dst 483 arch/s390/kernel/sthyi.c memcpy(dst, sthyi_cache.info, PAGE_SIZE); dst 429 arch/s390/lib/uaccess.c long __strncpy_from_user(char *dst, const char __user *src, long size) dst 439 arch/s390/lib/uaccess.c if (copy_from_user(dst, src, len)) dst 441 arch/s390/lib/uaccess.c len_str = strnlen(dst, len); dst 444 arch/s390/lib/uaccess.c dst += len_str; dst 21 arch/s390/mm/maccess.c static notrace long s390_kernel_write_odd(void *dst, const void *src, size_t size) dst 26 arch/s390/mm/maccess.c aligned = (unsigned long) dst & ~7UL; dst 27 arch/s390/mm/maccess.c offset = (unsigned long) dst & 7UL; dst 58 arch/s390/mm/maccess.c void notrace s390_kernel_write(void *dst, const void *src, size_t size) dst 65 arch/s390/mm/maccess.c copied = s390_kernel_write_odd(dst, src, size); dst 66 arch/s390/mm/maccess.c dst += copied; dst 329 arch/s390/pci/pci_insn.c static inline int zpci_write_block_fh(volatile void __iomem *dst, dst 332 arch/s390/pci/pci_insn.c struct zpci_iomap_entry *entry = &zpci_iomap_start[ZPCI_IDX(dst)]; dst 334 arch/s390/pci/pci_insn.c u64 offset = ZPCI_OFFSET(dst); dst 356 arch/s390/pci/pci_insn.c int zpci_write_block(volatile void __iomem *dst, dst 363 arch/s390/pci/pci_insn.c return zpci_write_block_fh(dst, src, len); dst 365 arch/s390/pci/pci_insn.c cc = __pcistb_mio(src, (__force u64) dst, len, &status); dst 367 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, 0, (__force u64) dst); dst 91 arch/s390/pci/pci_mmio.c static inline int __memcpy_toio_inuser(void __iomem *dst, dst 103 arch/s390/pci/pci_mmio.c size = zpci_get_max_write_size((u64 __force) dst, dst 107 arch/s390/pci/pci_mmio.c rc = __pcistb_mio_inuser(dst, src, size, &status); dst 109 arch/s390/pci/pci_mmio.c rc = __pcistg_mio_inuser(dst, src, size, &status); dst 113 arch/s390/pci/pci_mmio.c dst += size; dst 118 arch/s390/pci/pci_mmio.c zpci_err_mmio(rc, status, (__force u64) dst); dst 198 arch/s390/pci/pci_mmio.c void __user *dst, const void __iomem *ioaddr, dst 229 arch/s390/pci/pci_mmio.c [dst] "+a" (dst), [cnt] "+d" (cnt), [tmp] "=d" (tmp), dst 243 arch/s390/pci/pci_mmio.c static inline int __memcpy_fromio_inuser(void __user *dst, dst 254 arch/s390/pci/pci_mmio.c (u64 __force) dst, n, dst 256 arch/s390/pci/pci_mmio.c rc = __pcilg_mio_inuser(dst, src, size, &status); dst 260 arch/s390/pci/pci_mmio.c dst += size; dst 265 arch/s390/pci/pci_mmio.c zpci_err_mmio(rc, status, (__force u64) dst); dst 83 arch/sh/include/asm/cacheflush.h struct page *page, unsigned long vaddr, void *dst, const void *src, dst 87 arch/sh/include/asm/cacheflush.h struct page *page, unsigned long vaddr, void *dst, const void *src, dst 33 arch/sh/include/asm/checksum_32.h asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst, dst 45 arch/sh/include/asm/checksum_32.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 48 arch/sh/include/asm/checksum_32.h return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL); dst 52 arch/sh/include/asm/checksum_32.h __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 55 arch/sh/include/asm/checksum_32.h return csum_partial_copy_generic((__force const void *)src, dst, dst 196 arch/sh/include/asm/checksum_32.h void __user *dst, dst 200 arch/sh/include/asm/checksum_32.h if (access_ok(dst, len)) dst 202 arch/sh/include/asm/checksum_32.h dst, len, sum, NULL, err_ptr); dst 94 arch/sh/include/asm/thread_info.h extern int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src); dst 152 arch/sh/include/asm/uaccess.h unsigned long (*from)(void *dst, const void __user *src, unsigned long cnt); dst 153 arch/sh/include/asm/uaccess.h unsigned long (*to)(void __user *dst, const void *src, unsigned long cnt); dst 128 arch/sh/kernel/dwarf.c static inline int dwarf_read_addr(unsigned long *src, unsigned long *dst) dst 131 arch/sh/kernel/dwarf.c put_unaligned(val, dst); dst 103 arch/sh/kernel/io.c void memset_io(volatile void __iomem *dst, int c, unsigned long count) dst 107 arch/sh/kernel/io.c writeb(c, dst); dst 108 arch/sh/kernel/io.c dst++; dst 218 arch/sh/kernel/io_trapped.c static unsigned long from_device(void *dst, const void *src, unsigned long cnt) dst 235 arch/sh/kernel/io_trapped.c (unsigned long)dst, cnt); dst 241 arch/sh/kernel/io_trapped.c static unsigned long to_device(void *dst, const void *src, unsigned long cnt) dst 244 arch/sh/kernel/io_trapped.c unsigned long dst_addr = (unsigned long)dst; dst 77 arch/sh/kernel/iomap.c static inline void mmio_insb(void __iomem *addr, u8 *dst, int count) dst 81 arch/sh/kernel/iomap.c *dst = data; dst 82 arch/sh/kernel/iomap.c dst++; dst 86 arch/sh/kernel/iomap.c static inline void mmio_insw(void __iomem *addr, u16 *dst, int count) dst 90 arch/sh/kernel/iomap.c *dst = data; dst 91 arch/sh/kernel/iomap.c dst++; dst 95 arch/sh/kernel/iomap.c static inline void mmio_insl(void __iomem *addr, u32 *dst, int count) dst 99 arch/sh/kernel/iomap.c *dst = data; dst 100 arch/sh/kernel/iomap.c dst++; dst 128 arch/sh/kernel/iomap.c void ioread8_rep(void __iomem *addr, void *dst, unsigned long count) dst 130 arch/sh/kernel/iomap.c mmio_insb(addr, dst, count); dst 134 arch/sh/kernel/iomap.c void ioread16_rep(void __iomem *addr, void *dst, unsigned long count) dst 136 arch/sh/kernel/iomap.c mmio_insw(addr, dst, count); dst 140 arch/sh/kernel/iomap.c void ioread32_rep(void __iomem *addr, void *dst, unsigned long count) dst 142 arch/sh/kernel/iomap.c mmio_insl(addr, dst, count); dst 24 arch/sh/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 29 arch/sh/kernel/process.c *dst = *src; dst 32 arch/sh/kernel/process.c dst->thread.xstate = kmem_cache_alloc(task_xstate_cachep, dst 34 arch/sh/kernel/process.c if (!dst->thread.xstate) dst 36 arch/sh/kernel/process.c memcpy(dst->thread.xstate, src->thread.xstate, xstate_size); dst 48 arch/sh/kernel/traps_32.c static inline void sign_extend(unsigned int count, unsigned char *dst) dst 51 arch/sh/kernel/traps_32.c if ((count == 1) && dst[0] & 0x80) { dst 52 arch/sh/kernel/traps_32.c dst[1] = 0xff; dst 53 arch/sh/kernel/traps_32.c dst[2] = 0xff; dst 54 arch/sh/kernel/traps_32.c dst[3] = 0xff; dst 56 arch/sh/kernel/traps_32.c if ((count == 2) && dst[1] & 0x80) { dst 57 arch/sh/kernel/traps_32.c dst[2] = 0xff; dst 58 arch/sh/kernel/traps_32.c dst[3] = 0xff; dst 61 arch/sh/kernel/traps_32.c if ((count == 1) && dst[3] & 0x80) { dst 62 arch/sh/kernel/traps_32.c dst[2] = 0xff; dst 63 arch/sh/kernel/traps_32.c dst[1] = 0xff; dst 64 arch/sh/kernel/traps_32.c dst[0] = 0xff; dst 66 arch/sh/kernel/traps_32.c if ((count == 2) && dst[2] & 0x80) { dst 67 arch/sh/kernel/traps_32.c dst[1] = 0xff; dst 68 arch/sh/kernel/traps_32.c dst[0] = 0xff; dst 90 arch/sh/kernel/traps_32.c unsigned char *src, *dst; dst 115 arch/sh/kernel/traps_32.c dst = (unsigned char *)rn; dst 116 arch/sh/kernel/traps_32.c *(unsigned long *)dst = 0; dst 119 arch/sh/kernel/traps_32.c dst += 4-count; dst 121 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, count)) dst 124 arch/sh/kernel/traps_32.c sign_extend(count, dst); dst 166 arch/sh/kernel/traps_32.c dst = (unsigned char *)rn; dst 167 arch/sh/kernel/traps_32.c *(unsigned long *)dst = 0; dst 169 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, 4)) dst 178 arch/sh/kernel/traps_32.c dst = (unsigned char*) rn; dst 179 arch/sh/kernel/traps_32.c *(unsigned long*)dst = 0; dst 182 arch/sh/kernel/traps_32.c dst += 4-count; dst 184 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, count)) dst 186 arch/sh/kernel/traps_32.c sign_extend(count, dst); dst 208 arch/sh/kernel/traps_32.c dst = (unsigned char *) ®s->regs[0]; dst 209 arch/sh/kernel/traps_32.c *(unsigned long *)dst = 0; dst 212 arch/sh/kernel/traps_32.c dst += 2; dst 214 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, 2)) dst 216 arch/sh/kernel/traps_32.c sign_extend(2, dst); dst 226 arch/sh/kernel/traps_32.c dst = (unsigned char *)rn; dst 227 arch/sh/kernel/traps_32.c *(unsigned long *)dst = 0; dst 230 arch/sh/kernel/traps_32.c dst += 2; dst 233 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, 2)) dst 235 arch/sh/kernel/traps_32.c sign_extend(2, dst); dst 243 arch/sh/kernel/traps_32.c dst = (unsigned char *)rn; dst 244 arch/sh/kernel/traps_32.c *(unsigned long *)dst = 0; dst 246 arch/sh/kernel/traps_32.c if (ma->from(dst, srcu, 4)) dst 61 arch/sh/mm/cache.c unsigned long vaddr, void *dst, const void *src, dst 70 arch/sh/mm/cache.c memcpy(dst, src, len); dst 80 arch/sh/mm/cache.c unsigned long vaddr, void *dst, const void *src, dst 86 arch/sh/mm/cache.c memcpy(dst, vfrom, len); dst 89 arch/sh/mm/cache.c memcpy(dst, src, len); dst 200 arch/sparc/crypto/aes_glue.c static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 204 arch/sparc/crypto/aes_glue.c ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); dst 207 arch/sparc/crypto/aes_glue.c static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 211 arch/sparc/crypto/aes_glue.c ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst); dst 217 arch/sparc/crypto/aes_glue.c struct scatterlist *dst, struct scatterlist *src, dst 224 arch/sparc/crypto/aes_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 235 arch/sparc/crypto/aes_glue.c (u64 *) walk.dst.virt.addr, dst 246 arch/sparc/crypto/aes_glue.c struct scatterlist *dst, struct scatterlist *src, dst 254 arch/sparc/crypto/aes_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 266 arch/sparc/crypto/aes_glue.c (u64 *) walk.dst.virt.addr, block_len); dst 277 arch/sparc/crypto/aes_glue.c struct scatterlist *dst, struct scatterlist *src, dst 284 arch/sparc/crypto/aes_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 295 arch/sparc/crypto/aes_glue.c (u64 *) walk.dst.virt.addr, dst 306 arch/sparc/crypto/aes_glue.c struct scatterlist *dst, struct scatterlist *src, dst 314 arch/sparc/crypto/aes_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 326 arch/sparc/crypto/aes_glue.c (u64 *) walk.dst.virt.addr, dst 343 arch/sparc/crypto/aes_glue.c u8 *dst = walk->dst.virt.addr; dst 348 arch/sparc/crypto/aes_glue.c crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes); dst 353 arch/sparc/crypto/aes_glue.c struct scatterlist *dst, struct scatterlist *src, dst 360 arch/sparc/crypto/aes_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 371 arch/sparc/crypto/aes_glue.c (u64 *) walk.dst.virt.addr, dst 58 arch/sparc/crypto/camellia_glue.c static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 64 arch/sparc/crypto/camellia_glue.c (u32 *) dst, ctx->key_len); dst 67 arch/sparc/crypto/camellia_glue.c static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 73 arch/sparc/crypto/camellia_glue.c (u32 *) dst, ctx->key_len); dst 87 arch/sparc/crypto/camellia_glue.c struct scatterlist *dst, struct scatterlist *src, dst 100 arch/sparc/crypto/camellia_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 117 arch/sparc/crypto/camellia_glue.c dst64 = (u64 *) walk.dst.virt.addr; dst 128 arch/sparc/crypto/camellia_glue.c struct scatterlist *dst, struct scatterlist *src, dst 131 arch/sparc/crypto/camellia_glue.c return __ecb_crypt(desc, dst, src, nbytes, true); dst 135 arch/sparc/crypto/camellia_glue.c struct scatterlist *dst, struct scatterlist *src, dst 138 arch/sparc/crypto/camellia_glue.c return __ecb_crypt(desc, dst, src, nbytes, false); dst 150 arch/sparc/crypto/camellia_glue.c struct scatterlist *dst, struct scatterlist *src, dst 163 arch/sparc/crypto/camellia_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 177 arch/sparc/crypto/camellia_glue.c dst64 = (u64 *) walk.dst.virt.addr; dst 189 arch/sparc/crypto/camellia_glue.c struct scatterlist *dst, struct scatterlist *src, dst 202 arch/sparc/crypto/camellia_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 216 arch/sparc/crypto/camellia_glue.c dst64 = (u64 *) walk.dst.virt.addr; dst 67 arch/sparc/crypto/des_glue.c static void sparc_des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 72 arch/sparc/crypto/des_glue.c des_sparc64_crypt(K, (const u64 *) src, (u64 *) dst); dst 75 arch/sparc/crypto/des_glue.c static void sparc_des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 80 arch/sparc/crypto/des_glue.c des_sparc64_crypt(K, (const u64 *) src, (u64 *) dst); dst 91 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 98 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 111 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 122 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 125 arch/sparc/crypto/des_glue.c return __ecb_crypt(desc, dst, src, nbytes, true); dst 129 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 132 arch/sparc/crypto/des_glue.c return __ecb_crypt(desc, dst, src, nbytes, false); dst 139 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 146 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 156 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 170 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 177 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 187 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 233 arch/sparc/crypto/des_glue.c static void sparc_des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 238 arch/sparc/crypto/des_glue.c des3_ede_sparc64_crypt(K, (const u64 *) src, (u64 *) dst); dst 241 arch/sparc/crypto/des_glue.c static void sparc_des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 246 arch/sparc/crypto/des_glue.c des3_ede_sparc64_crypt(K, (const u64 *) src, (u64 *) dst); dst 255 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 263 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 278 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 289 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 292 arch/sparc/crypto/des_glue.c return __ecb3_crypt(desc, dst, src, nbytes, true); dst 296 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 299 arch/sparc/crypto/des_glue.c return __ecb3_crypt(desc, dst, src, nbytes, false); dst 307 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 315 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 327 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 343 arch/sparc/crypto/des_glue.c struct scatterlist *dst, struct scatterlist *src, dst 351 arch/sparc/crypto/des_glue.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 363 arch/sparc/crypto/des_glue.c (u64 *) walk.dst.virt.addr, dst 88 arch/sparc/crypto/md5_glue.c u32 *dst = (u32 *)out; dst 109 arch/sparc/crypto/md5_glue.c dst[i] = sctx->hash[i]; dst 83 arch/sparc/crypto/sha1_glue.c __be32 *dst = (__be32 *)out; dst 104 arch/sparc/crypto/sha1_glue.c dst[i] = cpu_to_be32(sctx->state[i]); dst 103 arch/sparc/crypto/sha256_glue.c __be32 *dst = (__be32 *)out; dst 124 arch/sparc/crypto/sha256_glue.c dst[i] = cpu_to_be32(sctx->state[i]); dst 104 arch/sparc/crypto/sha512_glue.c __be64 *dst = (__be64 *)out; dst 128 arch/sparc/crypto/sha512_glue.c dst[i] = cpu_to_be64(sctx->state[i]); dst 22 arch/sparc/include/asm/cacheflush_32.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 25 arch/sparc/include/asm/cacheflush_32.h memcpy(dst, src, len); \ dst 27 arch/sparc/include/asm/cacheflush_32.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 30 arch/sparc/include/asm/cacheflush_32.h memcpy(dst, src, len); \ dst 58 arch/sparc/include/asm/cacheflush_64.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 61 arch/sparc/include/asm/cacheflush_64.h memcpy(dst, src, len); \ dst 65 arch/sparc/include/asm/cacheflush_64.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 68 arch/sparc/include/asm/cacheflush_64.h memcpy(dst, src, len); \ dst 69 arch/sparc/include/asm/cacheflush_64.h flush_ptrace_access(vma, page, vaddr, dst, len, 1); \ dst 45 arch/sparc/include/asm/checksum_32.h csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 48 arch/sparc/include/asm/checksum_32.h register char *d asm("o1") = dst; dst 63 arch/sparc/include/asm/checksum_32.h csum_partial_copy_from_user(const void __user *src, void *dst, int len, dst 67 arch/sparc/include/asm/checksum_32.h register char *d asm("o1") = dst; dst 87 arch/sparc/include/asm/checksum_32.h csum_partial_copy_to_user(const void *src, void __user *dst, int len, dst 90 arch/sparc/include/asm/checksum_32.h if (!access_ok(dst, len)) { dst 95 arch/sparc/include/asm/checksum_32.h register char __user *d asm("o1") = dst; dst 41 arch/sparc/include/asm/checksum_64.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 45 arch/sparc/include/asm/checksum_64.h void *dst, int len, dst 50 arch/sparc/include/asm/checksum_64.h void *dst, int len, dst 53 arch/sparc/include/asm/checksum_64.h long ret = __csum_partial_copy_from_user(src, dst, len, sum); dst 64 arch/sparc/include/asm/checksum_64.h void __user *dst, int len, dst 69 arch/sparc/include/asm/checksum_64.h void __user *dst, int len, dst 72 arch/sparc/include/asm/checksum_64.h long ret = __csum_partial_copy_to_user(src, dst, len, sum); dst 36 arch/sparc/include/asm/ide.h static inline void __ide_insw(void __iomem *port, void *dst, u32 count) dst 39 arch/sparc/include/asm/ide.h unsigned long end = (unsigned long)dst + (count << 1); dst 41 arch/sparc/include/asm/ide.h u16 *ps = dst; dst 62 arch/sparc/include/asm/ide.h __flush_dcache_range((unsigned long)dst, end); dst 16 arch/sparc/include/asm/io_32.h static inline void _memset_io(volatile void __iomem *dst, dst 19 arch/sparc/include/asm/io_32.h volatile void __iomem *d = dst; dst 27 arch/sparc/include/asm/io_32.h static inline void _memcpy_fromio(void *dst, const volatile void __iomem *src, dst 30 arch/sparc/include/asm/io_32.h char *d = dst; dst 39 arch/sparc/include/asm/io_32.h static inline void _memcpy_toio(volatile void __iomem *dst, const void *src, dst 43 arch/sparc/include/asm/io_32.h volatile void __iomem *d = dst; dst 97 arch/sparc/include/asm/io_32.h static inline void sbus_memcpy_fromio(void *dst, dst 101 arch/sparc/include/asm/io_32.h char *d = dst; dst 110 arch/sparc/include/asm/io_32.h static inline void sbus_memcpy_toio(volatile void __iomem *dst, dst 115 arch/sparc/include/asm/io_32.h volatile void __iomem *d = dst; dst 330 arch/sparc/include/asm/io_64.h static inline void sbus_memset_io(volatile void __iomem *dst, int c, __kernel_size_t n) dst 333 arch/sparc/include/asm/io_64.h sbus_writeb(c, dst); dst 334 arch/sparc/include/asm/io_64.h dst++; dst 338 arch/sparc/include/asm/io_64.h static inline void memset_io(volatile void __iomem *dst, int c, __kernel_size_t n) dst 340 arch/sparc/include/asm/io_64.h volatile void __iomem *d = dst; dst 348 arch/sparc/include/asm/io_64.h static inline void sbus_memcpy_fromio(void *dst, const volatile void __iomem *src, dst 351 arch/sparc/include/asm/io_64.h char *d = dst; dst 361 arch/sparc/include/asm/io_64.h static inline void memcpy_fromio(void *dst, const volatile void __iomem *src, dst 364 arch/sparc/include/asm/io_64.h char *d = dst; dst 373 arch/sparc/include/asm/io_64.h static inline void sbus_memcpy_toio(volatile void __iomem *dst, const void *src, dst 377 arch/sparc/include/asm/io_64.h volatile void __iomem *d = dst; dst 386 arch/sparc/include/asm/io_64.h static inline void memcpy_toio(volatile void __iomem *dst, const void *src, dst 390 arch/sparc/include/asm/io_64.h volatile void __iomem *d = dst; dst 117 arch/sparc/kernel/btext.c unsigned int *dst = (unsigned int *)calc_base(0,0); dst 125 arch/sparc/kernel/btext.c unsigned int *dst_ptr = dst; dst 129 arch/sparc/kernel/btext.c dst += (dispDeviceRowBytes >> 2); dst 133 arch/sparc/kernel/btext.c unsigned int *dst_ptr = dst; dst 136 arch/sparc/kernel/btext.c dst += (dispDeviceRowBytes >> 2); dst 234 arch/sparc/kernel/process_32.c clone_stackframe(struct sparc_stackf __user *dst, dst 246 arch/sparc/kernel/process_32.c fp = (unsigned long) dst; dst 683 arch/sparc/kernel/process_64.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 700 arch/sparc/kernel/process_64.c *dst = *src; dst 88 arch/sparc/lib/PeeCeeI.c void insb(unsigned long __addr, void *dst, unsigned long count) dst 94 arch/sparc/lib/PeeCeeI.c u8 *pb = dst; dst 116 arch/sparc/lib/PeeCeeI.c void insw(unsigned long __addr, void *dst, unsigned long count) dst 121 arch/sparc/lib/PeeCeeI.c u16 *ps = dst; dst 144 arch/sparc/lib/PeeCeeI.c void insl(unsigned long __addr, void *dst, unsigned long count) dst 149 arch/sparc/lib/PeeCeeI.c if ((((unsigned long)dst) & 0x3) == 0) { dst 150 arch/sparc/lib/PeeCeeI.c u32 *pi = dst; dst 158 arch/sparc/lib/PeeCeeI.c switch (((unsigned long)dst) & 3) { dst 160 arch/sparc/lib/PeeCeeI.c ps = dst; dst 175 arch/sparc/lib/PeeCeeI.c pb = dst; dst 192 arch/sparc/lib/PeeCeeI.c pb = (u8 *)dst; dst 288 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu(u32 opcode, u32 src, u32 dst, struct jit_ctx *ctx) dst 290 arch/sparc/net/bpf_jit_comp_64.c emit(opcode | RS1(dst) | RS2(src) | RD(dst), ctx); dst 298 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu_K(unsigned int opcode, unsigned int dst, unsigned int imm, dst 304 arch/sparc/net/bpf_jit_comp_64.c insn |= RS1(dst) | RD(dst); dst 318 arch/sparc/net/bpf_jit_comp_64.c unsigned int dst, struct jit_ctx *ctx) dst 323 arch/sparc/net/bpf_jit_comp_64.c insn |= RS1(src) | RD(dst); dst 628 arch/sparc/net/bpf_jit_comp_64.c const u8 dst, const u8 src, struct jit_ctx *ctx) dst 632 arch/sparc/net/bpf_jit_comp_64.c emit(cb_opc | WDISP10(off << 2) | RS1(dst) | RS2(src), ctx); dst 636 arch/sparc/net/bpf_jit_comp_64.c const u8 dst, s32 imm, struct jit_ctx *ctx) dst 640 arch/sparc/net/bpf_jit_comp_64.c emit(cb_opc | IMMED | WDISP10(off << 2) | RS1(dst) | S5(imm), ctx); dst 658 arch/sparc/net/bpf_jit_comp_64.c static int emit_compare_and_branch(const u8 code, const u8 dst, u8 src, dst 693 arch/sparc/net/bpf_jit_comp_64.c emit_btsti(dst, imm, ctx); dst 695 arch/sparc/net/bpf_jit_comp_64.c emit_btst(dst, src, ctx); dst 698 arch/sparc/net/bpf_jit_comp_64.c emit_cmpi(dst, imm, ctx); dst 700 arch/sparc/net/bpf_jit_comp_64.c emit_cmp(dst, src, ctx); dst 785 arch/sparc/net/bpf_jit_comp_64.c dst, imm, ctx); dst 788 arch/sparc/net/bpf_jit_comp_64.c dst, src, ctx); dst 898 arch/sparc/net/bpf_jit_comp_64.c const u8 dst = bpf2sparc[insn->dst_reg]; dst 910 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SRL, src, 0, dst, ctx); dst 915 arch/sparc/net/bpf_jit_comp_64.c emit_reg_move(src, dst, ctx); dst 920 arch/sparc/net/bpf_jit_comp_64.c emit_alu(ADD, src, dst, ctx); dst 924 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SUB, src, dst, ctx); dst 928 arch/sparc/net/bpf_jit_comp_64.c emit_alu(AND, src, dst, ctx); dst 932 arch/sparc/net/bpf_jit_comp_64.c emit_alu(OR, src, dst, ctx); dst 936 arch/sparc/net/bpf_jit_comp_64.c emit_alu(XOR, src, dst, ctx); dst 939 arch/sparc/net/bpf_jit_comp_64.c emit_alu(MUL, src, dst, ctx); dst 942 arch/sparc/net/bpf_jit_comp_64.c emit_alu(MULX, src, dst, ctx); dst 946 arch/sparc/net/bpf_jit_comp_64.c emit_alu(DIV, src, dst, ctx); dst 951 arch/sparc/net/bpf_jit_comp_64.c emit_alu(UDIVX, src, dst, ctx); dst 959 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(DIV, dst, src, tmp, ctx); dst 961 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(SUB, dst, tmp, dst, ctx); dst 969 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(UDIVX, dst, src, tmp, ctx); dst 971 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(SUB, dst, tmp, dst, ctx); dst 975 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SLL, src, dst, ctx); dst 978 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SLLX, src, dst, ctx); dst 981 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SRL, src, dst, ctx); dst 986 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SRLX, src, dst, ctx); dst 989 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SRA, src, dst, ctx); dst 992 arch/sparc/net/bpf_jit_comp_64.c emit_alu(SRAX, src, dst, ctx); dst 998 arch/sparc/net/bpf_jit_comp_64.c emit(SUB | RS1(0) | RS2(dst) | RD(dst), ctx); dst 1004 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SLL, dst, 16, ctx); dst 1005 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRL, dst, 16, ctx); dst 1011 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRL, dst, 0, ctx); dst 1028 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(AND, dst, 0xff, tmp, ctx); dst 1029 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SRL, dst, 8, dst, ctx); dst 1030 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(AND, dst, 0xff, dst, ctx); dst 1032 arch/sparc/net/bpf_jit_comp_64.c emit_alu(OR, tmp, dst, ctx); dst 1039 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SRL, dst, 24, tmp, ctx); /* tmp = dst >> 24 */ dst 1040 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SRL, dst, 16, tmp2, ctx); /* tmp2 = dst >> 16 */ dst 1044 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SRL, dst, 8, tmp2, ctx); /* tmp2 = dst >> 8 */ dst 1048 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(AND, dst, 0xff, dst, ctx); /* dst = dst & 0xff */ dst 1049 arch/sparc/net/bpf_jit_comp_64.c emit_alu3_K(SLL, dst, 24, dst, ctx); /* dst = dst << 24 */ dst 1050 arch/sparc/net/bpf_jit_comp_64.c emit_alu(OR, tmp, dst, ctx); /* dst = dst | tmp */ dst 1057 arch/sparc/net/bpf_jit_comp_64.c emit(ST64 | RS1(tmp) | RS2(G0) | RD(dst), ctx); dst 1058 arch/sparc/net/bpf_jit_comp_64.c emit(LD64A | ASI(ASI_PL) | RS1(tmp) | RS2(G0) | RD(dst), ctx); dst 1065 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm32(imm, dst, ctx); dst 1070 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm_sext(imm, dst, ctx); dst 1075 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(ADD, dst, imm, ctx); dst 1079 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SUB, dst, imm, ctx); dst 1083 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(AND, dst, imm, ctx); dst 1087 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(OR, dst, imm, ctx); dst 1091 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(XOR, dst, imm, ctx); dst 1094 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(MUL, dst, imm, ctx); dst 1097 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(MULX, dst, imm, ctx); dst 1104 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(DIV, dst, imm, ctx); dst 1110 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(UDIVX, dst, imm, ctx); dst 1127 arch/sparc/net/bpf_jit_comp_64.c emit(div | IMMED | RS1(dst) | S13(imm) | RD(tmp), ctx); dst 1129 arch/sparc/net/bpf_jit_comp_64.c emit(SUB | RS1(dst) | RS2(tmp) | RD(dst), ctx); dst 1136 arch/sparc/net/bpf_jit_comp_64.c emit(div | RS1(dst) | RS2(tmp1) | RD(tmp), ctx); dst 1138 arch/sparc/net/bpf_jit_comp_64.c emit(SUB | RS1(dst) | RS2(tmp) | RD(dst), ctx); dst 1143 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SLL, dst, imm, ctx); dst 1146 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SLLX, dst, imm, ctx); dst 1149 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRL, dst, imm, ctx); dst 1154 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRLX, dst, imm, ctx); dst 1157 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRA, dst, imm, ctx); dst 1160 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRAX, dst, imm, ctx); dst 1166 arch/sparc/net/bpf_jit_comp_64.c emit_alu_K(SRL, dst, 0, ctx); dst 1188 arch/sparc/net/bpf_jit_comp_64.c err = emit_compare_and_branch(code, dst, src, 0, false, i + off, ctx); dst 1207 arch/sparc/net/bpf_jit_comp_64.c err = emit_compare_and_branch(code, dst, 0, imm, true, i + off, ctx); dst 1249 arch/sparc/net/bpf_jit_comp_64.c emit_loadimm64(imm64, dst, ctx); dst 1285 arch/sparc/net/bpf_jit_comp_64.c emit(opcode | RS1(src) | rs2 | RD(dst), ctx); dst 1328 arch/sparc/net/bpf_jit_comp_64.c emit(opcode | RS1(dst) | rs2 | RD(tmp2), ctx); dst 1365 arch/sparc/net/bpf_jit_comp_64.c emit(opcode | RS1(dst) | rs2 | RD(src), ctx); dst 1382 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(ADD, dst, tmp, tmp, ctx); dst 1405 arch/sparc/net/bpf_jit_comp_64.c emit_alu3(ADD, dst, tmp, tmp, ctx); dst 277 arch/um/drivers/vector_user.c char *src, *dst; dst 282 arch/um/drivers/vector_user.c dst = uml_vector_fetch_arg(ifspec, "dst"); dst 306 arch/um/drivers/vector_user.c if ((dst == NULL) || (strlen(dst) > MAX_UN_LEN)) dst 309 arch/um/drivers/vector_user.c memcpy(remote_addr->sun_path, dst, strlen(dst) + 1); dst 439 arch/um/drivers/vector_user.c char *src, *dst, *srcport, *dstport; dst 458 arch/um/drivers/vector_user.c dst = uml_vector_fetch_arg(ifspec, "dst"); dst 518 arch/um/drivers/vector_user.c gairet = getaddrinfo(dst, dstport, &dsthints, &gairesult); dst 26 arch/um/include/asm/uaccess.h extern long __strncpy_from_user(char *dst, const char __user *src, long count); dst 10 arch/um/kernel/maccess.c long probe_kernel_read(void *dst, const void *src, size_t size) dst 20 arch/um/kernel/maccess.c return __probe_kernel_read(dst, src, size); dst 187 arch/um/kernel/skas/uaccess.c long __strncpy_from_user(char *dst, const char __user *src, long count) dst 190 arch/um/kernel/skas/uaccess.c char *ptr = dst; dst 193 arch/um/kernel/skas/uaccess.c strncpy(dst, (__force void *) src, count); dst 194 arch/um/kernel/skas/uaccess.c return strnlen(dst, count); dst 201 arch/um/kernel/skas/uaccess.c return strnlen(dst, count); dst 22 arch/um/os-Linux/file.c static void copy_stat(struct uml_stat *dst, const struct stat64 *src) dst 24 arch/um/os-Linux/file.c *dst = ((struct uml_stat) { dst 108 arch/unicore32/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 110 arch/unicore32/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 51 arch/unicore32/mm/flush.c unsigned long uaddr, void *dst, const void *src, dst 54 arch/unicore32/mm/flush.c memcpy(dst, src, len); dst 55 arch/unicore32/mm/flush.c flush_ptrace_access(vma, page, uaddr, dst, len); dst 215 arch/x86/boot/boot.h void copy_to_fs(addr_t dst, void *src, size_t len); dst 216 arch/x86/boot/boot.h void *copy_from_fs(void *dst, addr_t src, size_t len); dst 217 arch/x86/boot/boot.h void copy_to_gs(addr_t dst, void *src, size_t len); dst 218 arch/x86/boot/boot.h void *copy_from_gs(void *dst, addr_t src, size_t len); dst 10 arch/x86/boot/string.h void *memcpy(void *dst, const void *src, size_t len); dst 11 arch/x86/boot/string.h void *memset(void *dst, int c, size_t len); dst 262 arch/x86/boot/video.c addr_t dst = 0; dst 280 arch/x86/boot/video.c copy_to_fs(dst, src, copy*sizeof(u16)); dst 281 arch/x86/boot/video.c dst += copy*sizeof(u16); dst 297 arch/x86/boot/video.c : "+D" (dst), "+c" (npad) dst 32 arch/x86/crypto/aegis128-aesni-glue.c void *state, unsigned int length, const void *src, void *dst); dst 35 arch/x86/crypto/aegis128-aesni-glue.c void *state, unsigned int length, const void *src, void *dst); dst 38 arch/x86/crypto/aegis128-aesni-glue.c void *state, unsigned int length, const void *src, void *dst); dst 41 arch/x86/crypto/aegis128-aesni-glue.c void *state, unsigned int length, const void *src, void *dst); dst 64 arch/x86/crypto/aegis128-aesni-glue.c void *dst); dst 66 arch/x86/crypto/aegis128-aesni-glue.c void *dst); dst 124 arch/x86/crypto/aegis128-aesni-glue.c walk->src.virt.addr, walk->dst.virt.addr); dst 130 arch/x86/crypto/aegis128-aesni-glue.c walk->dst.virt.addr); dst 204 arch/x86/crypto/aegis128-aesni-glue.c scatterwalk_map_and_copy(tag.bytes, req->dst, dst 347 arch/x86/crypto/aesni-intel_glue.c static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 352 arch/x86/crypto/aesni-intel_glue.c aes_encrypt(ctx, dst, src); dst 355 arch/x86/crypto/aesni-intel_glue.c aesni_enc(ctx, dst, src); dst 360 arch/x86/crypto/aesni-intel_glue.c static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 365 arch/x86/crypto/aesni-intel_glue.c aes_decrypt(ctx, dst, src); dst 368 arch/x86/crypto/aesni-intel_glue.c aesni_dec(ctx, dst, src); dst 392 arch/x86/crypto/aesni-intel_glue.c aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 414 arch/x86/crypto/aesni-intel_glue.c aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 436 arch/x86/crypto/aesni-intel_glue.c aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 458 arch/x86/crypto/aesni-intel_glue.c aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 475 arch/x86/crypto/aesni-intel_glue.c u8 *dst = walk->dst.virt.addr; dst 479 arch/x86/crypto/aesni-intel_glue.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 515 arch/x86/crypto/aesni-intel_glue.c aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 558 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 560 arch/x86/crypto/aesni-intel_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc)); dst 563 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 565 arch/x86/crypto/aesni-intel_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec)); dst 568 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 570 arch/x86/crypto/aesni-intel_glue.c aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv); dst 573 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 575 arch/x86/crypto/aesni-intel_glue.c aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv); dst 720 arch/x86/crypto/aesni-intel_glue.c u8 *src, *dst, *assoc; dst 755 arch/x86/crypto/aesni-intel_glue.c if (req->src != req->dst) { dst 756 arch/x86/crypto/aesni-intel_glue.c dst_sg = scatterwalk_ffwd(dst_start, req->dst, dst 765 arch/x86/crypto/aesni-intel_glue.c if (req->src != req->dst) { dst 768 arch/x86/crypto/aesni-intel_glue.c dst = scatterwalk_map(&dst_sg_walk); dst 775 arch/x86/crypto/aesni-intel_glue.c dst, src, len); dst 778 arch/x86/crypto/aesni-intel_glue.c dst, src, len); dst 783 arch/x86/crypto/aesni-intel_glue.c scatterwalk_unmap(dst); dst 791 arch/x86/crypto/aesni-intel_glue.c dst = src = scatterwalk_map(&src_sg_walk); dst 830 arch/x86/crypto/aesni-intel_glue.c scatterwalk_map_and_copy(authTag, req->dst, dst 22 arch/x86/crypto/blowfish_glue.c asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src, dst 24 arch/x86/crypto/blowfish_glue.c asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src); dst 27 arch/x86/crypto/blowfish_glue.c asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst, dst 29 arch/x86/crypto/blowfish_glue.c asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst, dst 32 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src) dst 34 arch/x86/crypto/blowfish_glue.c __blowfish_enc_blk(ctx, dst, src, false); dst 37 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_xor(struct bf_ctx *ctx, u8 *dst, dst 40 arch/x86/crypto/blowfish_glue.c __blowfish_enc_blk(ctx, dst, src, true); dst 43 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst, dst 46 arch/x86/crypto/blowfish_glue.c __blowfish_enc_blk_4way(ctx, dst, src, false); dst 49 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_xor_4way(struct bf_ctx *ctx, u8 *dst, dst 52 arch/x86/crypto/blowfish_glue.c __blowfish_enc_blk_4way(ctx, dst, src, true); dst 55 arch/x86/crypto/blowfish_glue.c static void blowfish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 57 arch/x86/crypto/blowfish_glue.c blowfish_enc_blk(crypto_tfm_ctx(tfm), dst, src); dst 60 arch/x86/crypto/blowfish_glue.c static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 62 arch/x86/crypto/blowfish_glue.c blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src); dst 86 arch/x86/crypto/blowfish_glue.c u8 *wdst = walk.dst.virt.addr; dst 134 arch/x86/crypto/blowfish_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 138 arch/x86/crypto/blowfish_glue.c *dst = *src ^ *iv; dst 139 arch/x86/crypto/blowfish_glue.c blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst); dst 140 arch/x86/crypto/blowfish_glue.c iv = dst; dst 143 arch/x86/crypto/blowfish_glue.c dst += 1; dst 175 arch/x86/crypto/blowfish_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 181 arch/x86/crypto/blowfish_glue.c dst += nbytes / bsize - 1; dst 190 arch/x86/crypto/blowfish_glue.c dst -= 4 - 1; dst 196 arch/x86/crypto/blowfish_glue.c blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src); dst 198 arch/x86/crypto/blowfish_glue.c dst[1] ^= ivs[0]; dst 199 arch/x86/crypto/blowfish_glue.c dst[2] ^= ivs[1]; dst 200 arch/x86/crypto/blowfish_glue.c dst[3] ^= ivs[2]; dst 206 arch/x86/crypto/blowfish_glue.c *dst ^= *(src - 1); dst 208 arch/x86/crypto/blowfish_glue.c dst -= 1; dst 214 arch/x86/crypto/blowfish_glue.c blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src); dst 220 arch/x86/crypto/blowfish_glue.c *dst ^= *(src - 1); dst 222 arch/x86/crypto/blowfish_glue.c dst -= 1; dst 226 arch/x86/crypto/blowfish_glue.c *dst ^= *(u64 *)walk->iv; dst 255 arch/x86/crypto/blowfish_glue.c u8 *dst = walk->dst.virt.addr; dst 259 arch/x86/crypto/blowfish_glue.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 269 arch/x86/crypto/blowfish_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 276 arch/x86/crypto/blowfish_glue.c if (dst != src) { dst 277 arch/x86/crypto/blowfish_glue.c dst[0] = src[0]; dst 278 arch/x86/crypto/blowfish_glue.c dst[1] = src[1]; dst 279 arch/x86/crypto/blowfish_glue.c dst[2] = src[2]; dst 280 arch/x86/crypto/blowfish_glue.c dst[3] = src[3]; dst 289 arch/x86/crypto/blowfish_glue.c blowfish_enc_blk_xor_4way(ctx, (u8 *)dst, dst 293 arch/x86/crypto/blowfish_glue.c dst += 4; dst 302 arch/x86/crypto/blowfish_glue.c if (dst != src) dst 303 arch/x86/crypto/blowfish_glue.c *dst = *src; dst 307 arch/x86/crypto/blowfish_glue.c blowfish_enc_blk_xor(ctx, (u8 *)dst, (u8 *)ctrblocks); dst 310 arch/x86/crypto/blowfish_glue.c dst += 1; dst 22 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst, dst 24 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst, dst 27 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst, dst 29 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst, dst 32 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst, dst 34 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst, dst 21 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst, dst 25 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 29 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 33 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst, dst 37 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst, dst 41 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 45 arch/x86/crypto/camellia_aesni_avx_glue.c void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 47 arch/x86/crypto/camellia_aesni_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 52 arch/x86/crypto/camellia_aesni_avx_glue.c void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 54 arch/x86/crypto/camellia_aesni_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 21 arch/x86/crypto/camellia_glue.c asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst, dst 24 arch/x86/crypto/camellia_glue.c asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst, dst 29 arch/x86/crypto/camellia_glue.c asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst, dst 32 arch/x86/crypto/camellia_glue.c asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst, dst 36 arch/x86/crypto/camellia_glue.c static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 38 arch/x86/crypto/camellia_glue.c camellia_enc_blk(crypto_tfm_ctx(tfm), dst, src); dst 41 arch/x86/crypto/camellia_glue.c static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 43 arch/x86/crypto/camellia_glue.c camellia_dec_blk(crypto_tfm_ctx(tfm), dst, src); dst 1270 arch/x86/crypto/camellia_glue.c void camellia_decrypt_cbc_2way(void *ctx, u128 *dst, const u128 *src) dst 1274 arch/x86/crypto/camellia_glue.c camellia_dec_blk_2way(ctx, (u8 *)dst, (u8 *)src); dst 1276 arch/x86/crypto/camellia_glue.c u128_xor(&dst[1], &dst[1], &iv); dst 1280 arch/x86/crypto/camellia_glue.c void camellia_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 1284 arch/x86/crypto/camellia_glue.c if (dst != src) dst 1285 arch/x86/crypto/camellia_glue.c *dst = *src; dst 1290 arch/x86/crypto/camellia_glue.c camellia_enc_blk_xor(ctx, (u8 *)dst, (u8 *)&ctrblk); dst 1294 arch/x86/crypto/camellia_glue.c void camellia_crypt_ctr_2way(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 1298 arch/x86/crypto/camellia_glue.c if (dst != src) { dst 1299 arch/x86/crypto/camellia_glue.c dst[0] = src[0]; dst 1300 arch/x86/crypto/camellia_glue.c dst[1] = src[1]; dst 1308 arch/x86/crypto/camellia_glue.c camellia_enc_blk_xor_2way(ctx, (u8 *)dst, (u8 *)ctrblks); dst 20 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ecb_enc_16way(struct cast5_ctx *ctx, u8 *dst, dst 22 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ecb_dec_16way(struct cast5_ctx *ctx, u8 *dst, dst 24 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_cbc_dec_16way(struct cast5_ctx *ctx, u8 *dst, dst 26 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ctr_16way(struct cast5_ctx *ctx, u8 *dst, const u8 *src, dst 55 arch/x86/crypto/cast5_avx_glue.c void (*fn)(struct cast5_ctx *ctx, u8 *dst, const u8 *src); dst 62 arch/x86/crypto/cast5_avx_glue.c u8 *wdst = walk.dst.virt.addr; dst 123 arch/x86/crypto/cast5_avx_glue.c u64 *dst = (u64 *)walk.dst.virt.addr; dst 127 arch/x86/crypto/cast5_avx_glue.c *dst = *src ^ *iv; dst 128 arch/x86/crypto/cast5_avx_glue.c __cast5_encrypt(ctx, (u8 *)dst, (u8 *)dst); dst 129 arch/x86/crypto/cast5_avx_glue.c iv = dst; dst 131 arch/x86/crypto/cast5_avx_glue.c dst++; dst 148 arch/x86/crypto/cast5_avx_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 153 arch/x86/crypto/cast5_avx_glue.c dst += nbytes / bsize - 1; dst 162 arch/x86/crypto/cast5_avx_glue.c dst -= CAST5_PARALLEL_BLOCKS - 1; dst 164 arch/x86/crypto/cast5_avx_glue.c cast5_cbc_dec_16way(ctx, (u8 *)dst, (u8 *)src); dst 170 arch/x86/crypto/cast5_avx_glue.c *dst ^= *(src - 1); dst 172 arch/x86/crypto/cast5_avx_glue.c dst -= 1; dst 178 arch/x86/crypto/cast5_avx_glue.c __cast5_decrypt(ctx, (u8 *)dst, (u8 *)src); dst 184 arch/x86/crypto/cast5_avx_glue.c *dst ^= *(src - 1); dst 186 arch/x86/crypto/cast5_avx_glue.c dst -= 1; dst 190 arch/x86/crypto/cast5_avx_glue.c *dst ^= *(u64 *)walk->iv; dst 222 arch/x86/crypto/cast5_avx_glue.c u8 *dst = walk->dst.virt.addr; dst 226 arch/x86/crypto/cast5_avx_glue.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 237 arch/x86/crypto/cast5_avx_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 242 arch/x86/crypto/cast5_avx_glue.c cast5_ctr_16way(ctx, (u8 *)dst, (u8 *)src, dst 246 arch/x86/crypto/cast5_avx_glue.c dst += CAST5_PARALLEL_BLOCKS; dst 258 arch/x86/crypto/cast5_avx_glue.c if (dst != src) dst 259 arch/x86/crypto/cast5_avx_glue.c *dst = *src; dst 265 arch/x86/crypto/cast5_avx_glue.c *dst ^= ctrblk; dst 268 arch/x86/crypto/cast5_avx_glue.c dst += 1; dst 23 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst, dst 25 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst, dst 28 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst, dst 30 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src, dst 33 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst, dst 35 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst, dst 44 arch/x86/crypto/cast6_avx_glue.c static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 46 arch/x86/crypto/cast6_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 50 arch/x86/crypto/cast6_avx_glue.c static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 52 arch/x86/crypto/cast6_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 56 arch/x86/crypto/cast6_avx_glue.c static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 64 arch/x86/crypto/cast6_avx_glue.c u128_xor(dst, src, (u128 *)&ctrblk); dst 19 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src, dst 21 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src, dst 25 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src, dst 27 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src, dst 29 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src, dst 33 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, dst 35 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, dst 37 arch/x86/crypto/chacha_glue.c asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, dst 49 arch/x86/crypto/chacha_glue.c static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src, dst 56 arch/x86/crypto/chacha_glue.c chacha_8block_xor_avx512vl(state, dst, src, bytes, dst 60 arch/x86/crypto/chacha_glue.c dst += CHACHA_BLOCK_SIZE * 8; dst 64 arch/x86/crypto/chacha_glue.c chacha_8block_xor_avx512vl(state, dst, src, bytes, dst 70 arch/x86/crypto/chacha_glue.c chacha_4block_xor_avx512vl(state, dst, src, bytes, dst 76 arch/x86/crypto/chacha_glue.c chacha_2block_xor_avx512vl(state, dst, src, bytes, dst 85 arch/x86/crypto/chacha_glue.c chacha_8block_xor_avx2(state, dst, src, bytes, nrounds); dst 88 arch/x86/crypto/chacha_glue.c dst += CHACHA_BLOCK_SIZE * 8; dst 92 arch/x86/crypto/chacha_glue.c chacha_8block_xor_avx2(state, dst, src, bytes, nrounds); dst 97 arch/x86/crypto/chacha_glue.c chacha_4block_xor_avx2(state, dst, src, bytes, nrounds); dst 102 arch/x86/crypto/chacha_glue.c chacha_2block_xor_avx2(state, dst, src, bytes, nrounds); dst 109 arch/x86/crypto/chacha_glue.c chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds); dst 112 arch/x86/crypto/chacha_glue.c dst += CHACHA_BLOCK_SIZE * 4; dst 116 arch/x86/crypto/chacha_glue.c chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds); dst 121 arch/x86/crypto/chacha_glue.c chacha_block_xor_ssse3(state, dst, src, bytes, nrounds); dst 146 arch/x86/crypto/chacha_glue.c chacha_dosimd(state, walk->dst.virt.addr, walk->src.virt.addr, dst 27 arch/x86/crypto/des3_ede_glue.c asmlinkage void des3_ede_x86_64_crypt_blk(const u32 *expkey, u8 *dst, dst 31 arch/x86/crypto/des3_ede_glue.c asmlinkage void des3_ede_x86_64_crypt_blk_3way(const u32 *expkey, u8 *dst, dst 34 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_enc_blk(struct des3_ede_x86_ctx *ctx, u8 *dst, dst 39 arch/x86/crypto/des3_ede_glue.c des3_ede_x86_64_crypt_blk(enc_ctx, dst, src); dst 42 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_dec_blk(struct des3_ede_x86_ctx *ctx, u8 *dst, dst 47 arch/x86/crypto/des3_ede_glue.c des3_ede_x86_64_crypt_blk(dec_ctx, dst, src); dst 50 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_enc_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst, dst 55 arch/x86/crypto/des3_ede_glue.c des3_ede_x86_64_crypt_blk_3way(enc_ctx, dst, src); dst 58 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_dec_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst, dst 63 arch/x86/crypto/des3_ede_glue.c des3_ede_x86_64_crypt_blk_3way(dec_ctx, dst, src); dst 66 arch/x86/crypto/des3_ede_glue.c static void des3_ede_x86_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 68 arch/x86/crypto/des3_ede_glue.c des3_ede_enc_blk(crypto_tfm_ctx(tfm), dst, src); dst 71 arch/x86/crypto/des3_ede_glue.c static void des3_ede_x86_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 73 arch/x86/crypto/des3_ede_glue.c des3_ede_dec_blk(crypto_tfm_ctx(tfm), dst, src); dst 87 arch/x86/crypto/des3_ede_glue.c u8 *wdst = walk.dst.virt.addr; dst 142 arch/x86/crypto/des3_ede_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 146 arch/x86/crypto/des3_ede_glue.c *dst = *src ^ *iv; dst 147 arch/x86/crypto/des3_ede_glue.c des3_ede_enc_blk(ctx, (u8 *)dst, (u8 *)dst); dst 148 arch/x86/crypto/des3_ede_glue.c iv = dst; dst 151 arch/x86/crypto/des3_ede_glue.c dst += 1; dst 183 arch/x86/crypto/des3_ede_glue.c u64 *dst = (u64 *)walk->dst.virt.addr; dst 189 arch/x86/crypto/des3_ede_glue.c dst += nbytes / bsize - 1; dst 198 arch/x86/crypto/des3_ede_glue.c dst -= 3 - 1; dst 203 arch/x86/crypto/des3_ede_glue.c des3_ede_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src); dst 205 arch/x86/crypto/des3_ede_glue.c dst[1] ^= ivs[0]; dst 206 arch/x86/crypto/des3_ede_glue.c dst[2] ^= ivs[1]; dst 212 arch/x86/crypto/des3_ede_glue.c *dst ^= *(src - 1); dst 214 arch/x86/crypto/des3_ede_glue.c dst -= 1; dst 220 arch/x86/crypto/des3_ede_glue.c des3_ede_dec_blk(ctx, (u8 *)dst, (u8 *)src); dst 226 arch/x86/crypto/des3_ede_glue.c *dst ^= *(src - 1); dst 228 arch/x86/crypto/des3_ede_glue.c dst -= 1; dst 232 arch/x86/crypto/des3_ede_glue.c *dst ^= *(u64 *)walk->iv; dst 262 arch/x86/crypto/des3_ede_glue.c u8 *dst = walk->dst.virt.addr; dst 266 arch/x86/crypto/des3_ede_glue.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 277 arch/x86/crypto/des3_ede_glue.c __be64 *dst = (__be64 *)walk->dst.virt.addr; dst 292 arch/x86/crypto/des3_ede_glue.c dst[0] = src[0] ^ ctrblocks[0]; dst 293 arch/x86/crypto/des3_ede_glue.c dst[1] = src[1] ^ ctrblocks[1]; dst 294 arch/x86/crypto/des3_ede_glue.c dst[2] = src[2] ^ ctrblocks[2]; dst 297 arch/x86/crypto/des3_ede_glue.c dst += 3; dst 310 arch/x86/crypto/des3_ede_glue.c dst[0] = src[0] ^ ctrblocks[0]; dst 313 arch/x86/crypto/des3_ede_glue.c dst += 1; dst 26 arch/x86/crypto/ghash-clmulni-intel_glue.c void clmul_ghash_mul(char *dst, const u128 *shash); dst 28 arch/x86/crypto/ghash-clmulni-intel_glue.c void clmul_ghash_update(char *dst, const char *src, unsigned int srclen, dst 83 arch/x86/crypto/ghash-clmulni-intel_glue.c u8 *dst = dctx->buffer; dst 88 arch/x86/crypto/ghash-clmulni-intel_glue.c u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); dst 97 arch/x86/crypto/ghash-clmulni-intel_glue.c clmul_ghash_mul(dst, &ctx->shash); dst 100 arch/x86/crypto/ghash-clmulni-intel_glue.c clmul_ghash_update(dst, src, srclen, &ctx->shash); dst 108 arch/x86/crypto/ghash-clmulni-intel_glue.c *dst++ ^= *src++; dst 116 arch/x86/crypto/ghash-clmulni-intel_glue.c u8 *dst = dctx->buffer; dst 119 arch/x86/crypto/ghash-clmulni-intel_glue.c u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); dst 125 arch/x86/crypto/ghash-clmulni-intel_glue.c clmul_ghash_mul(dst, &ctx->shash); dst 132 arch/x86/crypto/ghash-clmulni-intel_glue.c static int ghash_final(struct shash_desc *desc, u8 *dst) dst 139 arch/x86/crypto/ghash-clmulni-intel_glue.c memcpy(dst, buf, GHASH_BLOCK_SIZE); dst 35 arch/x86/crypto/glue_helper.c u8 *dst = walk.dst.virt.addr; dst 49 arch/x86/crypto/glue_helper.c gctx->funcs[i].fn_u.ecb(ctx, dst, src); dst 51 arch/x86/crypto/glue_helper.c dst += func_bytes; dst 79 arch/x86/crypto/glue_helper.c u128 *dst = (u128 *)walk.dst.virt.addr; dst 83 arch/x86/crypto/glue_helper.c u128_xor(dst, src, iv); dst 84 arch/x86/crypto/glue_helper.c fn(ctx, (u8 *)dst, (u8 *)dst); dst 85 arch/x86/crypto/glue_helper.c iv = dst; dst 87 arch/x86/crypto/glue_helper.c dst++; dst 112 arch/x86/crypto/glue_helper.c u128 *dst = walk.dst.virt.addr; dst 121 arch/x86/crypto/glue_helper.c dst += nbytes / bsize - 1; dst 135 arch/x86/crypto/glue_helper.c dst -= num_blocks - 1; dst 137 arch/x86/crypto/glue_helper.c gctx->funcs[i].fn_u.cbc(ctx, dst, src); dst 143 arch/x86/crypto/glue_helper.c u128_xor(dst, dst, --src); dst 144 arch/x86/crypto/glue_helper.c dst--; dst 148 arch/x86/crypto/glue_helper.c u128_xor(dst, dst, (u128 *)walk.iv); dst 172 arch/x86/crypto/glue_helper.c u128 *dst = walk.dst.virt.addr; dst 191 arch/x86/crypto/glue_helper.c gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk); dst 193 arch/x86/crypto/glue_helper.c dst += num_blocks; dst 215 arch/x86/crypto/glue_helper.c memcpy(walk.dst.virt.addr, &tmp, nbytes); dst 232 arch/x86/crypto/glue_helper.c u128 *dst = walk->dst.virt.addr; dst 243 arch/x86/crypto/glue_helper.c gctx->funcs[i].fn_u.xts(ctx, dst, src, dst 247 arch/x86/crypto/glue_helper.c dst += num_blocks; dst 285 arch/x86/crypto/glue_helper.c skcipher_request_set_crypt(&subreq, req->src, req->dst, dst 312 arch/x86/crypto/glue_helper.c struct scatterlist *src, *dst; dst 316 arch/x86/crypto/glue_helper.c dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); dst 317 arch/x86/crypto/glue_helper.c if (req->dst != req->src) dst 318 arch/x86/crypto/glue_helper.c dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); dst 327 arch/x86/crypto/glue_helper.c skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE, dst 336 arch/x86/crypto/glue_helper.c scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0); dst 340 arch/x86/crypto/glue_helper.c scatterwalk_map_and_copy(b, dst, 0, tail, 1); dst 342 arch/x86/crypto/glue_helper.c skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE, dst 357 arch/x86/crypto/glue_helper.c void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv, dst 366 arch/x86/crypto/glue_helper.c u128_xor(dst, src, (u128 *)&ivblk); dst 369 arch/x86/crypto/glue_helper.c fn(ctx, (u8 *)dst, (u8 *)dst); dst 372 arch/x86/crypto/glue_helper.c u128_xor(dst, dst, (u128 *)&ivblk); dst 22 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst, dst 24 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst, dst 26 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src); dst 28 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src, dst 30 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst, dst 32 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst, dst 23 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 27 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 31 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 35 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 39 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 43 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 47 arch/x86/crypto/serpent_avx_glue.c void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 55 arch/x86/crypto/serpent_avx_glue.c u128_xor(dst, src, (u128 *)&ctrblk); dst 59 arch/x86/crypto/serpent_avx_glue.c void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 61 arch/x86/crypto/serpent_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 66 arch/x86/crypto/serpent_avx_glue.c void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 68 arch/x86/crypto/serpent_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 34 arch/x86/crypto/serpent_sse2_glue.c static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src) dst 42 arch/x86/crypto/serpent_sse2_glue.c serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src); dst 45 arch/x86/crypto/serpent_sse2_glue.c u128_xor(dst + (j + 1), dst + (j + 1), ivs + j); dst 48 arch/x86/crypto/serpent_sse2_glue.c static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 56 arch/x86/crypto/serpent_sse2_glue.c u128_xor(dst, src, (u128 *)&ctrblk); dst 59 arch/x86/crypto/serpent_sse2_glue.c static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src, dst 66 arch/x86/crypto/serpent_sse2_glue.c if (dst != src) dst 67 arch/x86/crypto/serpent_sse2_glue.c dst[i] = src[i]; dst 73 arch/x86/crypto/serpent_sse2_glue.c serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks); dst 25 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst, dst 27 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst, dst 30 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst, dst 32 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst, dst 35 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst, dst 37 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst, dst 46 arch/x86/crypto/twofish_avx_glue.c static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, dst 49 arch/x86/crypto/twofish_avx_glue.c __twofish_enc_blk_3way(ctx, dst, src, false); dst 52 arch/x86/crypto/twofish_avx_glue.c static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 54 arch/x86/crypto/twofish_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 58 arch/x86/crypto/twofish_avx_glue.c static void twofish_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 60 arch/x86/crypto/twofish_avx_glue.c glue_xts_crypt_128bit_one(ctx, dst, src, iv, dst 47 arch/x86/crypto/twofish_glue.c asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst, dst 50 arch/x86/crypto/twofish_glue.c asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst, dst 54 arch/x86/crypto/twofish_glue.c static void twofish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 56 arch/x86/crypto/twofish_glue.c twofish_enc_blk(crypto_tfm_ctx(tfm), dst, src); dst 59 arch/x86/crypto/twofish_glue.c static void twofish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 61 arch/x86/crypto/twofish_glue.c twofish_dec_blk(crypto_tfm_ctx(tfm), dst, src); dst 28 arch/x86/crypto/twofish_glue_3way.c static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, dst 31 arch/x86/crypto/twofish_glue_3way.c __twofish_enc_blk_3way(ctx, dst, src, false); dst 34 arch/x86/crypto/twofish_glue_3way.c static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst, dst 37 arch/x86/crypto/twofish_glue_3way.c __twofish_enc_blk_3way(ctx, dst, src, true); dst 40 arch/x86/crypto/twofish_glue_3way.c void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src) dst 47 arch/x86/crypto/twofish_glue_3way.c twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src); dst 49 arch/x86/crypto/twofish_glue_3way.c u128_xor(&dst[1], &dst[1], &ivs[0]); dst 50 arch/x86/crypto/twofish_glue_3way.c u128_xor(&dst[2], &dst[2], &ivs[1]); dst 54 arch/x86/crypto/twofish_glue_3way.c void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) dst 58 arch/x86/crypto/twofish_glue_3way.c if (dst != src) dst 59 arch/x86/crypto/twofish_glue_3way.c *dst = *src; dst 65 arch/x86/crypto/twofish_glue_3way.c u128_xor(dst, dst, (u128 *)&ctrblk); dst 69 arch/x86/crypto/twofish_glue_3way.c void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src, dst 74 arch/x86/crypto/twofish_glue_3way.c if (dst != src) { dst 75 arch/x86/crypto/twofish_glue_3way.c dst[0] = src[0]; dst 76 arch/x86/crypto/twofish_glue_3way.c dst[1] = src[1]; dst 77 arch/x86/crypto/twofish_glue_3way.c dst[2] = src[2]; dst 87 arch/x86/crypto/twofish_glue_3way.c twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks); dst 30 arch/x86/include/asm/checksum_32.h asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst, dst 41 arch/x86/include/asm/checksum_32.h static inline __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 44 arch/x86/include/asm/checksum_32.h return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL); dst 48 arch/x86/include/asm/checksum_32.h void *dst, dst 56 arch/x86/include/asm/checksum_32.h ret = csum_partial_copy_generic((__force void *)src, dst, dst 178 arch/x86/include/asm/checksum_32.h void __user *dst, dst 185 arch/x86/include/asm/checksum_32.h if (access_ok(dst, len)) { dst 187 arch/x86/include/asm/checksum_32.h ret = csum_partial_copy_generic(src, (__force void *)dst, dst 137 arch/x86/include/asm/checksum_64.h extern __visible __wsum csum_partial_copy_generic(const void *src, const void *dst, dst 142 arch/x86/include/asm/checksum_64.h extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 144 arch/x86/include/asm/checksum_64.h extern __wsum csum_partial_copy_to_user(const void *src, void __user *dst, dst 146 arch/x86/include/asm/checksum_64.h extern __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 35 arch/x86/include/asm/crypto/camellia.h asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst, dst 37 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst, dst 41 arch/x86/include/asm/crypto/camellia.h asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst, dst 43 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst, dst 47 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst, dst 49 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 52 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 54 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst, dst 57 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst, dst 59 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst, dst 62 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst, dst 65 arch/x86/include/asm/crypto/camellia.h __camellia_enc_blk(ctx, dst, src, false); dst 68 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_xor(struct camellia_ctx *ctx, u8 *dst, dst 71 arch/x86/include/asm/crypto/camellia.h __camellia_enc_blk(ctx, dst, src, true); dst 74 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst, dst 77 arch/x86/include/asm/crypto/camellia.h __camellia_enc_blk_2way(ctx, dst, src, false); dst 80 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_xor_2way(struct camellia_ctx *ctx, u8 *dst, dst 83 arch/x86/include/asm/crypto/camellia.h __camellia_enc_blk_2way(ctx, dst, src, true); dst 87 arch/x86/include/asm/crypto/camellia.h extern void camellia_decrypt_cbc_2way(void *ctx, u128 *dst, const u128 *src); dst 88 arch/x86/include/asm/crypto/camellia.h extern void camellia_crypt_ctr(void *ctx, u128 *dst, const u128 *src, dst 90 arch/x86/include/asm/crypto/camellia.h extern void camellia_crypt_ctr_2way(void *ctx, u128 *dst, const u128 *src, dst 93 arch/x86/include/asm/crypto/camellia.h extern void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv); dst 94 arch/x86/include/asm/crypto/camellia.h extern void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv); dst 14 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src); dst 15 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src); dst 16 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src, dst 18 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src, dst 77 arch/x86/include/asm/crypto/glue_helper.h static inline void le128_to_be128(be128 *dst, const le128 *src) dst 79 arch/x86/include/asm/crypto/glue_helper.h dst->a = cpu_to_be64(le64_to_cpu(src->a)); dst 80 arch/x86/include/asm/crypto/glue_helper.h dst->b = cpu_to_be64(le64_to_cpu(src->b)); dst 83 arch/x86/include/asm/crypto/glue_helper.h static inline void be128_to_le128(le128 *dst, const be128 *src) dst 85 arch/x86/include/asm/crypto/glue_helper.h dst->a = cpu_to_le64(be64_to_cpu(src->a)); dst 86 arch/x86/include/asm/crypto/glue_helper.h dst->b = cpu_to_le64(be64_to_cpu(src->b)); dst 119 arch/x86/include/asm/crypto/glue_helper.h extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, dst 18 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 20 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 23 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 25 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 28 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 30 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst, dst 33 arch/x86/include/asm/crypto/serpent-avx.h extern void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, dst 36 arch/x86/include/asm/crypto/serpent-avx.h extern void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv); dst 37 arch/x86/include/asm/crypto/serpent-avx.h extern void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv); dst 12 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst, dst 14 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst, dst 17 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, dst 20 arch/x86/include/asm/crypto/serpent-sse2.h __serpent_enc_blk_4way(ctx, dst, src, false); dst 23 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, dst 26 arch/x86/include/asm/crypto/serpent-sse2.h __serpent_enc_blk_4way(ctx, dst, src, true); dst 29 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, dst 32 arch/x86/include/asm/crypto/serpent-sse2.h serpent_dec_blk_4way(ctx, dst, src); dst 39 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst, dst 41 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst, dst 44 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst, dst 47 arch/x86/include/asm/crypto/serpent-sse2.h __serpent_enc_blk_8way(ctx, dst, src, false); dst 50 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst, dst 53 arch/x86/include/asm/crypto/serpent-sse2.h __serpent_enc_blk_8way(ctx, dst, src, true); dst 56 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst, dst 59 arch/x86/include/asm/crypto/serpent-sse2.h serpent_dec_blk_8way(ctx, dst, src); dst 10 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst, dst 12 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst, dst 16 arch/x86/include/asm/crypto/twofish.h asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst, dst 18 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst, dst 22 arch/x86/include/asm/crypto/twofish.h extern void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src); dst 23 arch/x86/include/asm/crypto/twofish.h extern void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, dst 25 arch/x86/include/asm/crypto/twofish.h extern void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src, dst 33 arch/x86/include/asm/fpu/internal.h extern int fpu__copy(struct task_struct *dst, struct task_struct *src); dst 19 arch/x86/include/asm/irqdomain.h extern void copy_irq_alloc_info(struct irq_alloc_info *dst, dst 322 arch/x86/include/asm/kvm_emulate.h struct operand dst; dst 105 arch/x86/include/asm/mpspec.h #define physids_and(dst, src1, src2) \ dst 106 arch/x86/include/asm/mpspec.h bitmap_and((dst).mask, (src1).mask, (src2).mask, MAX_LOCAL_APIC) dst 108 arch/x86/include/asm/mpspec.h #define physids_or(dst, src1, src2) \ dst 109 arch/x86/include/asm/mpspec.h bitmap_or((dst).mask, (src1).mask, (src2).mask, MAX_LOCAL_APIC) dst 114 arch/x86/include/asm/mpspec.h #define physids_complement(dst, src) \ dst 115 arch/x86/include/asm/mpspec.h bitmap_complement((dst).mask, (src).mask, MAX_LOCAL_APIC) dst 1301 arch/x86/include/asm/pgtable.h static inline void clone_pgd_range(pgd_t *dst, pgd_t *src, int count) dst 1303 arch/x86/include/asm/pgtable.h memcpy(dst, src, count * sizeof(pgd_t)); dst 1308 arch/x86/include/asm/pgtable.h memcpy(kernel_to_user_pgdp(dst), kernel_to_user_pgdp(src), dst 268 arch/x86/include/asm/ptrace.h extern long probe_kernel_read(void *dst, const void *src, size_t size); dst 75 arch/x86/include/asm/string_64.h #define memcpy(dst, src, len) __memcpy(dst, src, len) dst 76 arch/x86/include/asm/string_64.h #define memmove(dst, src, len) __memmove(dst, src, len) dst 86 arch/x86/include/asm/string_64.h __must_check unsigned long __memcpy_mcsafe(void *dst, const void *src, dst 106 arch/x86/include/asm/string_64.h memcpy_mcsafe(void *dst, const void *src, size_t cnt) dst 110 arch/x86/include/asm/string_64.h return __memcpy_mcsafe(dst, src, cnt); dst 113 arch/x86/include/asm/string_64.h memcpy(dst, src, cnt); dst 119 arch/x86/include/asm/string_64.h void __memcpy_flushcache(void *dst, const void *src, size_t cnt); dst 120 arch/x86/include/asm/string_64.h static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt) dst 125 arch/x86/include/asm/string_64.h asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src)); dst 128 arch/x86/include/asm/string_64.h asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); dst 131 arch/x86/include/asm/string_64.h asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src)); dst 132 arch/x86/include/asm/string_64.h asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8))); dst 136 arch/x86/include/asm/string_64.h __memcpy_flushcache(dst, src, cnt); dst 246 arch/x86/include/asm/thread_info.h extern int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src); dst 580 arch/x86/include/asm/uaccess.h strncpy_from_user(char *dst, const char __user *src, long count); dst 741 arch/x86/include/asm/uaccess.h #define unsafe_copy_loop(dst, src, len, type, label) \ dst 743 arch/x86/include/asm/uaccess.h unsafe_put_user(*(type *)src,(type __user *)dst,label); \ dst 744 arch/x86/include/asm/uaccess.h dst += sizeof(type); \ dst 66 arch/x86/include/asm/uaccess_64.h raw_copy_from_user(void *dst, const void __user *src, unsigned long size) dst 71 arch/x86/include/asm/uaccess_64.h return copy_user_generic(dst, (__force void *)src, size); dst 75 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u8 *)dst, (u8 __user *)src, dst 81 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u16 *)dst, (u16 __user *)src, dst 87 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u32 *)dst, (u32 __user *)src, dst 93 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, dst 99 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, dst 102 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u16 *)(8 + (char *)dst), dst 109 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, dst 112 arch/x86/include/asm/uaccess_64.h __get_user_asm_nozero(*(u64 *)(8 + (char *)dst), dst 118 arch/x86/include/asm/uaccess_64.h return copy_user_generic(dst, (__force void *)src, size); dst 123 arch/x86/include/asm/uaccess_64.h raw_copy_to_user(void __user *dst, const void *src, unsigned long size) dst 128 arch/x86/include/asm/uaccess_64.h return copy_user_generic((__force void *)dst, src, size); dst 132 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u8 *)src, (u8 __user *)dst, dst 138 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u16 *)src, (u16 __user *)dst, dst 144 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u32 *)src, (u32 __user *)dst, dst 150 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u64 *)src, (u64 __user *)dst, dst 156 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u64 *)src, (u64 __user *)dst, dst 160 arch/x86/include/asm/uaccess_64.h __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst, dst 167 arch/x86/include/asm/uaccess_64.h __put_user_asm(*(u64 *)src, (u64 __user *)dst, dst 171 arch/x86/include/asm/uaccess_64.h __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst, dst 177 arch/x86/include/asm/uaccess_64.h return copy_user_generic((__force void *)dst, src, size); dst 182 arch/x86/include/asm/uaccess_64.h unsigned long raw_copy_in_user(void __user *dst, const void __user *src, unsigned long size) dst 184 arch/x86/include/asm/uaccess_64.h return copy_user_generic((__force void *)dst, dst 188 arch/x86/include/asm/uaccess_64.h extern long __copy_user_nocache(void *dst, const void __user *src, dst 191 arch/x86/include/asm/uaccess_64.h extern long __copy_user_flushcache(void *dst, const void __user *src, unsigned size); dst 196 arch/x86/include/asm/uaccess_64.h __copy_from_user_inatomic_nocache(void *dst, const void __user *src, dst 199 arch/x86/include/asm/uaccess_64.h kasan_check_write(dst, size); dst 200 arch/x86/include/asm/uaccess_64.h return __copy_user_nocache(dst, src, size, 0); dst 204 arch/x86/include/asm/uaccess_64.h __copy_from_user_flushcache(void *dst, const void __user *src, unsigned size) dst 206 arch/x86/include/asm/uaccess_64.h kasan_check_write(dst, size); dst 207 arch/x86/include/asm/uaccess_64.h return __copy_user_flushcache(dst, src, size); dst 886 arch/x86/kernel/apic/io_apic.c static void ioapic_copy_alloc_attr(struct irq_alloc_info *dst, dst 892 arch/x86/kernel/apic/io_apic.c copy_irq_alloc_info(dst, src); dst 893 arch/x86/kernel/apic/io_apic.c dst->type = X86_IRQ_ALLOC_TYPE_IOAPIC; dst 894 arch/x86/kernel/apic/io_apic.c dst->ioapic_id = mpc_ioapic_id(ioapic_idx); dst 895 arch/x86/kernel/apic/io_apic.c dst->ioapic_pin = pin; dst 896 arch/x86/kernel/apic/io_apic.c dst->ioapic_valid = 1; dst 898 arch/x86/kernel/apic/io_apic.c dst->ioapic_node = src->ioapic_node; dst 899 arch/x86/kernel/apic/io_apic.c dst->ioapic_trigger = src->ioapic_trigger; dst 900 arch/x86/kernel/apic/io_apic.c dst->ioapic_polarity = src->ioapic_polarity; dst 902 arch/x86/kernel/apic/io_apic.c dst->ioapic_node = NUMA_NO_NODE; dst 904 arch/x86/kernel/apic/io_apic.c dst->ioapic_trigger = trigger; dst 905 arch/x86/kernel/apic/io_apic.c dst->ioapic_polarity = polarity; dst 911 arch/x86/kernel/apic/io_apic.c dst->ioapic_trigger = IOAPIC_LEVEL; dst 912 arch/x86/kernel/apic/io_apic.c dst->ioapic_polarity = IOAPIC_POL_LOW; dst 70 arch/x86/kernel/apic/vector.c void copy_irq_alloc_info(struct irq_alloc_info *dst, struct irq_alloc_info *src) dst 73 arch/x86/kernel/apic/vector.c *dst = *src; dst 75 arch/x86/kernel/apic/vector.c memset(dst, 0, sizeof(*dst)); dst 167 arch/x86/kernel/fpu/core.c int fpu__copy(struct task_struct *dst, struct task_struct *src) dst 169 arch/x86/kernel/fpu/core.c struct fpu *dst_fpu = &dst->thread.fpu; dst 202 arch/x86/kernel/fpu/core.c set_tsk_thread_flag(dst, TIF_NEED_FPU_LOAD); dst 1162 arch/x86/kernel/fpu/xstate.c void *dst = __raw_xsave_addr(xsave, i); dst 1167 arch/x86/kernel/fpu/xstate.c memcpy(dst, kbuf + offset, size); dst 1216 arch/x86/kernel/fpu/xstate.c void *dst = __raw_xsave_addr(xsave, i); dst 1221 arch/x86/kernel/fpu/xstate.c if (__copy_from_user(dst, ubuf + offset, size)) dst 97 arch/x86/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 99 arch/x86/kernel/process.c memcpy(dst, src, arch_task_struct_size); dst 101 arch/x86/kernel/process.c dst->thread.vm86 = NULL; dst 104 arch/x86/kernel/process.c return fpu__copy(dst, src); dst 348 arch/x86/kvm/emulate.c #define FOP1E(op, dst) \ dst 349 arch/x86/kvm/emulate.c __FOP_FUNC(#op "_" #dst) \ dst 350 arch/x86/kvm/emulate.c "10: " #op " %" #dst " \n\t" \ dst 351 arch/x86/kvm/emulate.c __FOP_RET(#op "_" #dst) dst 353 arch/x86/kvm/emulate.c #define FOP1EEX(op, dst) \ dst 354 arch/x86/kvm/emulate.c FOP1E(op, dst) _ASM_EXTABLE(10b, kvm_fastop_exception) dst 382 arch/x86/kvm/emulate.c #define FOP2E(op, dst, src) \ dst 383 arch/x86/kvm/emulate.c __FOP_FUNC(#op "_" #dst "_" #src) \ dst 384 arch/x86/kvm/emulate.c #op " %" #src ", %" #dst " \n\t" \ dst 385 arch/x86/kvm/emulate.c __FOP_RET(#op "_" #dst "_" #src) dst 422 arch/x86/kvm/emulate.c #define FOP3E(op, dst, src, src2) \ dst 423 arch/x86/kvm/emulate.c __FOP_FUNC(#op "_" #dst "_" #src "_" #src2) \ dst 424 arch/x86/kvm/emulate.c #op " %" #src2 ", %" #src ", %" #dst " \n\t"\ dst 425 arch/x86/kvm/emulate.c __FOP_RET(#op "_" #dst "_" #src "_" #src2) dst 505 arch/x86/kvm/emulate.c .dst_val = ctxt->dst.val64, dst 507 arch/x86/kvm/emulate.c .dst_bytes = ctxt->dst.bytes, dst 779 arch/x86/kvm/emulate.c static inline int assign_eip(struct x86_emulate_ctxt *ctxt, ulong dst, dst 786 arch/x86/kvm/emulate.c .ea = dst }; dst 789 arch/x86/kvm/emulate.c addr.ea = dst & ((1UL << (ctxt->op_bytes << 3)) - 1); dst 796 arch/x86/kvm/emulate.c static inline int assign_eip_near(struct x86_emulate_ctxt *ctxt, ulong dst) dst 798 arch/x86/kvm/emulate.c return assign_eip(ctxt, dst, ctxt->mode); dst 801 arch/x86/kvm/emulate.c static int assign_eip_far(struct x86_emulate_ctxt *ctxt, ulong dst, dst 821 arch/x86/kvm/emulate.c rc = assign_eip(ctxt, dst, mode); dst 1038 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 1046 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 1204 arch/x86/kvm/emulate.c ctxt->dst.val = fcw; dst 1220 arch/x86/kvm/emulate.c ctxt->dst.val = fsw; dst 1424 arch/x86/kvm/emulate.c if (ctxt->dst.type == OP_MEM && ctxt->src.type == OP_REG) { dst 1425 arch/x86/kvm/emulate.c mask = ~((long)ctxt->dst.bytes * 8 - 1); dst 1434 arch/x86/kvm/emulate.c ctxt->dst.addr.mem.ea = address_mask(ctxt, dst 1435 arch/x86/kvm/emulate.c ctxt->dst.addr.mem.ea + (sv >> 3)); dst 1439 arch/x86/kvm/emulate.c ctxt->src.val &= (ctxt->dst.bytes << 3) - 1; dst 1534 arch/x86/kvm/emulate.c ctxt->dst.data = rc->data + rc->pos; dst 1535 arch/x86/kvm/emulate.c ctxt->dst.type = OP_MEM_STR; dst 1536 arch/x86/kvm/emulate.c ctxt->dst.count = (rc->end - rc->pos) / size; dst 1893 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 1915 arch/x86/kvm/emulate.c return emulate_pop(ctxt, &ctxt->dst.val, ctxt->op_bytes); dst 1962 arch/x86/kvm/emulate.c ctxt->dst.type = OP_REG; dst 1963 arch/x86/kvm/emulate.c ctxt->dst.addr.reg = &ctxt->eflags; dst 1964 arch/x86/kvm/emulate.c ctxt->dst.bytes = ctxt->op_bytes; dst 1965 arch/x86/kvm/emulate.c return emulate_popf(ctxt, &ctxt->dst.val, ctxt->op_bytes); dst 2266 arch/x86/kvm/emulate.c u64 old = ctxt->dst.orig_val64; dst 2268 arch/x86/kvm/emulate.c if (ctxt->dst.bytes == 16) dst 2277 arch/x86/kvm/emulate.c ctxt->dst.val64 = ((u64)reg_read(ctxt, VCPU_REGS_RCX) << 32) | dst 2340 arch/x86/kvm/emulate.c ctxt->dst.orig_val = ctxt->dst.val; dst 2341 arch/x86/kvm/emulate.c ctxt->dst.val = reg_read(ctxt, VCPU_REGS_RAX); dst 2343 arch/x86/kvm/emulate.c ctxt->src.val = ctxt->dst.orig_val; dst 2349 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src.orig_val; dst 2354 arch/x86/kvm/emulate.c ctxt->src.val = ctxt->dst.orig_val; dst 2356 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->dst.orig_val; dst 2373 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src.val; dst 3416 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3444 arch/x86/kvm/emulate.c al = ctxt->dst.val; dst 3462 arch/x86/kvm/emulate.c ctxt->dst.val = al; dst 3483 arch/x86/kvm/emulate.c al = ctxt->dst.val & 0xff; dst 3487 arch/x86/kvm/emulate.c ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al | (ah << 8); dst 3500 arch/x86/kvm/emulate.c u8 al = ctxt->dst.val & 0xff; dst 3501 arch/x86/kvm/emulate.c u8 ah = (ctxt->dst.val >> 8) & 0xff; dst 3505 arch/x86/kvm/emulate.c ctxt->dst.val = (ctxt->dst.val & 0xffff0000) | al; dst 3590 arch/x86/kvm/emulate.c ctxt->src.val = ctxt->dst.val; dst 3594 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src.orig_val; dst 3601 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src2.val; dst 3607 arch/x86/kvm/emulate.c ctxt->dst.type = OP_REG; dst 3608 arch/x86/kvm/emulate.c ctxt->dst.bytes = ctxt->src.bytes; dst 3609 arch/x86/kvm/emulate.c ctxt->dst.addr.reg = reg_rmw(ctxt, VCPU_REGS_RDX); dst 3610 arch/x86/kvm/emulate.c ctxt->dst.val = ~((ctxt->src.val >> (ctxt->src.bytes * 8 - 1)) - 1); dst 3621 arch/x86/kvm/emulate.c ctxt->dst.val = tsc_aux; dst 3648 arch/x86/kvm/emulate.c memcpy(ctxt->dst.valptr, ctxt->src.valptr, sizeof(ctxt->src.valptr)); dst 3677 arch/x86/kvm/emulate.c ctxt->dst.val &= ~0xffffUL; dst 3678 arch/x86/kvm/emulate.c ctxt->dst.val |= (unsigned long)swab16(tmp); dst 3681 arch/x86/kvm/emulate.c ctxt->dst.val = swab32((u32)ctxt->src.val); dst 3684 arch/x86/kvm/emulate.c ctxt->dst.val = swab64(ctxt->src.val); dst 3698 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3716 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3751 arch/x86/kvm/emulate.c ctxt->dst.val = get_segment_selector(ctxt, segment); dst 3752 arch/x86/kvm/emulate.c if (ctxt->dst.bytes == 4 && ctxt->dst.type == OP_MEM) dst 3753 arch/x86/kvm/emulate.c ctxt->dst.bytes = 2; dst 3776 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3790 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3804 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3817 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3841 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3863 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3864 arch/x86/kvm/emulate.c return segmented_write_std(ctxt, ctxt->dst.addr.mem, dst 3898 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3918 arch/x86/kvm/emulate.c if (ctxt->dst.type == OP_MEM) dst 3919 arch/x86/kvm/emulate.c ctxt->dst.bytes = 2; dst 3920 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->ops->get_cr(ctxt, 0); dst 3928 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 3956 arch/x86/kvm/emulate.c if (!pio_in_emulated(ctxt, ctxt->dst.bytes, ctxt->src.val, dst 3957 arch/x86/kvm/emulate.c &ctxt->dst.val)) dst 3965 arch/x86/kvm/emulate.c ctxt->ops->pio_out_emulated(ctxt, ctxt->src.bytes, ctxt->dst.val, dst 3968 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 4037 arch/x86/kvm/emulate.c asm("bswap %0" : "+r"(ctxt->dst.val)); dst 4041 arch/x86/kvm/emulate.c asm("bswap %0" : "+r"(*(u32 *)&ctxt->dst.val)); dst 4055 arch/x86/kvm/emulate.c ctxt->dst.val = (s32) ctxt->src.val; dst 4404 arch/x86/kvm/emulate.c ctxt->dst.bytes = min(ctxt->dst.bytes, 4u); dst 4405 arch/x86/kvm/emulate.c if (!emulator_io_permited(ctxt, ctxt->src.val, ctxt->dst.bytes)) dst 4414 arch/x86/kvm/emulate.c if (!emulator_io_permited(ctxt, ctxt->dst.val, ctxt->src.bytes)) dst 5467 arch/x86/kvm/emulate.c rc = decode_operand(ctxt, &ctxt->dst, (ctxt->d >> DstShift) & OpMask); dst 5530 arch/x86/kvm/emulate.c fop += __ffs(ctxt->dst.bytes) * FASTOP_SIZE; dst 5533 arch/x86/kvm/emulate.c : "+a"(ctxt->dst.val), "+d"(ctxt->src.val), [flags]"+D"(flags), dst 5557 arch/x86/kvm/emulate.c int saved_dst_type = ctxt->dst.type; dst 5563 arch/x86/kvm/emulate.c if (ctxt->lock_prefix && (!(ctxt->d & Lock) || ctxt->dst.type != OP_MEM)) { dst 5604 arch/x86/kvm/emulate.c fetch_possible_mmx_operand(ctxt, &ctxt->dst); dst 5673 arch/x86/kvm/emulate.c if ((ctxt->dst.type == OP_MEM) && !(ctxt->d & Mov)) { dst 5675 arch/x86/kvm/emulate.c rc = segmented_read(ctxt, ctxt->dst.addr.mem, dst 5676 arch/x86/kvm/emulate.c &ctxt->dst.val, ctxt->dst.bytes); dst 5686 arch/x86/kvm/emulate.c ctxt->dst.orig_val64 = ctxt->dst.val64; dst 5727 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src.addr.mem.ea; dst 5730 arch/x86/kvm/emulate.c if (ctxt->dst.addr.reg == reg_rmw(ctxt, VCPU_REGS_RAX)) dst 5731 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; dst 5737 arch/x86/kvm/emulate.c case 2: ctxt->dst.val = (s8)ctxt->dst.val; break; dst 5738 arch/x86/kvm/emulate.c case 4: ctxt->dst.val = (s16)ctxt->dst.val; break; dst 5739 arch/x86/kvm/emulate.c case 8: ctxt->dst.val = (s32)ctxt->dst.val; break; dst 5755 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; /* Disable writeback. */ dst 5791 arch/x86/kvm/emulate.c rc = writeback(ctxt, &ctxt->dst); dst 5800 arch/x86/kvm/emulate.c ctxt->dst.type = saved_dst_type; dst 5806 arch/x86/kvm/emulate.c string_addr_inc(ctxt, VCPU_REGS_RDI, &ctxt->dst); dst 5814 arch/x86/kvm/emulate.c count = ctxt->dst.count; dst 5864 arch/x86/kvm/emulate.c ctxt->dst.val = ops->get_cr(ctxt, ctxt->modrm_reg); dst 5867 arch/x86/kvm/emulate.c ops->get_dr(ctxt, ctxt->modrm_reg, &ctxt->dst.val); dst 5871 arch/x86/kvm/emulate.c ctxt->dst.val = ctxt->src.val; dst 5873 arch/x86/kvm/emulate.c ctxt->dst.type = OP_NONE; /* no writeback */ dst 5880 arch/x86/kvm/emulate.c ctxt->dst.val = test_cc(ctxt->b, ctxt->eflags); dst 5883 arch/x86/kvm/emulate.c ctxt->dst.bytes = ctxt->op_bytes; dst 5884 arch/x86/kvm/emulate.c ctxt->dst.val = (ctxt->src.bytes == 1) ? (u8) ctxt->src.val dst 5888 arch/x86/kvm/emulate.c ctxt->dst.bytes = ctxt->op_bytes; dst 5889 arch/x86/kvm/emulate.c ctxt->dst.val = (ctxt->src.bytes == 1) ? (s8) ctxt->src.val : dst 881 arch/x86/kvm/lapic.c struct kvm_apic_map *map, struct kvm_lapic ***dst, dst 887 arch/x86/kvm/lapic.c *dst = src; dst 901 arch/x86/kvm/lapic.c *dst = &map->phys_map[dest_id]; dst 908 arch/x86/kvm/lapic.c if (!kvm_apic_map_get_logical_dest(map, irq->dest_id, dst, dst 918 arch/x86/kvm/lapic.c if (!(*dst)[i]) dst 922 arch/x86/kvm/lapic.c else if (kvm_apic_compare_prio((*dst)[i]->vcpu, dst 923 arch/x86/kvm/lapic.c (*dst)[lowest]->vcpu) < 0) dst 933 arch/x86/kvm/lapic.c if (!(*dst)[lowest]) { dst 950 arch/x86/kvm/lapic.c struct kvm_lapic **dst = NULL; dst 964 arch/x86/kvm/lapic.c ret = kvm_apic_map_get_dest_lapic(kvm, &src, irq, map, &dst, &bitmap); dst 968 arch/x86/kvm/lapic.c if (!dst[i]) dst 970 arch/x86/kvm/lapic.c *r += kvm_apic_set_irq(dst[i]->vcpu, irq, dest_map); dst 997 arch/x86/kvm/lapic.c struct kvm_lapic **dst = NULL; dst 1006 arch/x86/kvm/lapic.c if (kvm_apic_map_get_dest_lapic(kvm, NULL, irq, map, &dst, &bitmap) && dst 1010 arch/x86/kvm/lapic.c if (dst[i]) { dst 1011 arch/x86/kvm/lapic.c *dest_vcpu = dst[i]->vcpu; dst 3320 arch/x86/kvm/svm.c struct vmcb_control_area *dst = &dst_vmcb->control; dst 3323 arch/x86/kvm/svm.c dst->intercept_cr = from->intercept_cr; dst 3324 arch/x86/kvm/svm.c dst->intercept_dr = from->intercept_dr; dst 3325 arch/x86/kvm/svm.c dst->intercept_exceptions = from->intercept_exceptions; dst 3326 arch/x86/kvm/svm.c dst->intercept = from->intercept; dst 3327 arch/x86/kvm/svm.c dst->iopm_base_pa = from->iopm_base_pa; dst 3328 arch/x86/kvm/svm.c dst->msrpm_base_pa = from->msrpm_base_pa; dst 3329 arch/x86/kvm/svm.c dst->tsc_offset = from->tsc_offset; dst 3331 arch/x86/kvm/svm.c dst->tlb_ctl = from->tlb_ctl; dst 3332 arch/x86/kvm/svm.c dst->int_ctl = from->int_ctl; dst 3333 arch/x86/kvm/svm.c dst->int_vector = from->int_vector; dst 3334 arch/x86/kvm/svm.c dst->int_state = from->int_state; dst 3335 arch/x86/kvm/svm.c dst->exit_code = from->exit_code; dst 3336 arch/x86/kvm/svm.c dst->exit_code_hi = from->exit_code_hi; dst 3337 arch/x86/kvm/svm.c dst->exit_info_1 = from->exit_info_1; dst 3338 arch/x86/kvm/svm.c dst->exit_info_2 = from->exit_info_2; dst 3339 arch/x86/kvm/svm.c dst->exit_int_info = from->exit_int_info; dst 3340 arch/x86/kvm/svm.c dst->exit_int_info_err = from->exit_int_info_err; dst 3341 arch/x86/kvm/svm.c dst->nested_ctl = from->nested_ctl; dst 3342 arch/x86/kvm/svm.c dst->event_inj = from->event_inj; dst 3343 arch/x86/kvm/svm.c dst->event_inj_err = from->event_inj_err; dst 3344 arch/x86/kvm/svm.c dst->nested_cr3 = from->nested_cr3; dst 3345 arch/x86/kvm/svm.c dst->virt_ext = from->virt_ext; dst 3346 arch/x86/kvm/svm.c dst->pause_filter_count = from->pause_filter_count; dst 3347 arch/x86/kvm/svm.c dst->pause_filter_thresh = from->pause_filter_thresh; dst 6712 arch/x86/kvm/svm.c unsigned long dst, int size, dst 6724 arch/x86/kvm/svm.c data->dst_addr = dst; dst 7279 arch/x86/kvm/vmx/vmx.c gpa_t gpa, dst; dst 7299 arch/x86/kvm/vmx/vmx.c dst = vmcs12->pml_address + sizeof(u64) * vmcs12->guest_pml_index; dst 7301 arch/x86/kvm/vmx/vmx.c if (kvm_write_guest_page(vcpu->kvm, gpa_to_gfn(dst), &gpa, dst 7302 arch/x86/kvm/vmx/vmx.c offset_in_page(dst), sizeof(gpa))) dst 24 arch/x86/lib/csum-wrappers_64.c csum_partial_copy_from_user(const void __user *src, void *dst, dst 48 arch/x86/lib/csum-wrappers_64.c *(__u16 *)dst = val16; dst 52 arch/x86/lib/csum-wrappers_64.c dst += 2; dst 58 arch/x86/lib/csum-wrappers_64.c dst, len, isum, errp, NULL); dst 67 arch/x86/lib/csum-wrappers_64.c memset(dst, 0, len); dst 85 arch/x86/lib/csum-wrappers_64.c csum_partial_copy_to_user(const void *src, void __user *dst, dst 92 arch/x86/lib/csum-wrappers_64.c if (unlikely(!access_ok(dst, len))) { dst 97 arch/x86/lib/csum-wrappers_64.c if (unlikely((unsigned long)dst & 6)) { dst 98 arch/x86/lib/csum-wrappers_64.c while (((unsigned long)dst & 6) && len >= 2) { dst 103 arch/x86/lib/csum-wrappers_64.c *errp = __put_user(val16, (__u16 __user *)dst); dst 107 arch/x86/lib/csum-wrappers_64.c dst += 2; dst 114 arch/x86/lib/csum-wrappers_64.c ret = csum_partial_copy_generic(src, (void __force *)dst, dst 131 arch/x86/lib/csum-wrappers_64.c csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum) dst 133 arch/x86/lib/csum-wrappers_64.c return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL); dst 43 arch/x86/lib/usercopy_64.c : [size8] "=&c"(size), [dst] "=&D" (__d0) dst 107 arch/x86/lib/usercopy_64.c long __copy_user_flushcache(void *dst, const void __user *src, unsigned size) dst 109 arch/x86/lib/usercopy_64.c unsigned long flushed, dest = (unsigned long) dst; dst 110 arch/x86/lib/usercopy_64.c long rc = __copy_user_nocache(dst, src, size, 0); dst 122 arch/x86/lib/usercopy_64.c clean_cache_range(dst, 1); dst 126 arch/x86/lib/usercopy_64.c clean_cache_range(dst, 1); dst 129 arch/x86/lib/usercopy_64.c flushed = dest - (unsigned long) dst; dst 131 arch/x86/lib/usercopy_64.c clean_cache_range(dst + size - 1, 1); dst 62 arch/x86/mm/mem_encrypt.c void *src, *dst; dst 84 arch/x86/mm/mem_encrypt.c dst = enc ? early_memremap_encrypted(paddr, len) : dst 92 arch/x86/mm/mem_encrypt.c BUG_ON(!src || !dst); dst 99 arch/x86/mm/mem_encrypt.c memcpy(dst, sme_early_buffer, len); dst 101 arch/x86/mm/mem_encrypt.c early_memunmap(dst, len); dst 166 arch/x86/net/bpf_jit_comp32.c #define dst_lo dst[0] dst 167 arch/x86/net/bpf_jit_comp32.c #define dst_hi dst[1] dst 205 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mov_i(const u8 dst, const u32 val, bool dstk, dst 217 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 220 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst), val); dst 224 arch/x86/net/bpf_jit_comp32.c EMIT2(0x33, add_2reg(0xC0, dst, dst)); dst 226 arch/x86/net/bpf_jit_comp32.c EMIT2_off32(0xC7, add_1reg(0xC0, dst), dst 233 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mov_r(const u8 dst, const u8 src, bool dstk, dst 245 arch/x86/net/bpf_jit_comp32.c EMIT3(0x89, add_2reg(0x40, IA32_EBP, sreg), STACK_VAR(dst)); dst 248 arch/x86/net/bpf_jit_comp32.c EMIT2(0x89, add_2reg(0xC0, dst, sreg)); dst 254 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mov_r64(const bool is64, const u8 dst[], dst 269 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mov_i64(const bool is64, const u8 dst[], dst 284 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mul_r(const u8 dst, const u8 src, bool dstk, dst 297 arch/x86/net/bpf_jit_comp32.c EMIT3(0x8B, add_2reg(0x40, IA32_EBP, IA32_EAX), STACK_VAR(dst)); dst 300 arch/x86/net/bpf_jit_comp32.c EMIT2(0x8B, add_2reg(0xC0, dst, IA32_EAX)); dst 308 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 311 arch/x86/net/bpf_jit_comp32.c EMIT2(0x89, add_2reg(0xC0, dst, IA32_EAX)); dst 316 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_to_le_r64(const u8 dst[], s32 val, dst 364 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_to_be_r64(const u8 dst[], s32 val, dst 434 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_div_mod_r(const u8 op, const u8 dst, const u8 src, dst 451 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 454 arch/x86/net/bpf_jit_comp32.c EMIT2(0x8B, add_2reg(0xC0, dst, IA32_EAX)); dst 464 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 466 arch/x86/net/bpf_jit_comp32.c EMIT2(0x89, add_2reg(0xC0, dst, IA32_EDX)); dst 470 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 472 arch/x86/net/bpf_jit_comp32.c EMIT2(0x89, add_2reg(0xC0, dst, IA32_EAX)); dst 481 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_shift_r(const u8 op, const u8 dst, const u8 src, dst 486 arch/x86/net/bpf_jit_comp32.c u8 dreg = dstk ? IA32_EAX : dst; dst 491 arch/x86/net/bpf_jit_comp32.c EMIT3(0x8B, add_2reg(0x40, IA32_EBP, IA32_EAX), STACK_VAR(dst)); dst 514 arch/x86/net/bpf_jit_comp32.c EMIT3(0x89, add_2reg(0x40, IA32_EBP, dreg), STACK_VAR(dst)); dst 523 arch/x86/net/bpf_jit_comp32.c const u8 dst, const u8 src, bool dstk, dst 529 arch/x86/net/bpf_jit_comp32.c u8 dreg = dstk ? IA32_EDX : dst; dst 537 arch/x86/net/bpf_jit_comp32.c EMIT3(0x8B, add_2reg(0x40, IA32_EBP, IA32_EDX), STACK_VAR(dst)); dst 571 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 577 arch/x86/net/bpf_jit_comp32.c const u8 dst[], const u8 src[], dst 597 arch/x86/net/bpf_jit_comp32.c const u8 dst, const s32 val, bool dstk, dst 602 arch/x86/net/bpf_jit_comp32.c u8 dreg = dstk ? IA32_EAX : dst; dst 607 arch/x86/net/bpf_jit_comp32.c EMIT3(0x8B, add_2reg(0x40, IA32_EBP, IA32_EAX), STACK_VAR(dst)); dst 671 arch/x86/net/bpf_jit_comp32.c STACK_VAR(dst)); dst 677 arch/x86/net/bpf_jit_comp32.c const u8 dst[], const u32 val, dst 697 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_neg64(const u8 dst[], bool dstk, u8 **pprog) dst 730 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_lsh_r64(const u8 dst[], const u8 src[], dst 783 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_arsh_r64(const u8 dst[], const u8 src[], dst 836 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_rsh_r64(const u8 dst[], const u8 src[], bool dstk, dst 889 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_lsh_i64(const u8 dst[], const u32 val, dst 937 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_rsh_i64(const u8 dst[], const u32 val, dst 986 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_arsh_i64(const u8 dst[], const u32 val, dst 1034 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mul_r64(const u8 dst[], const u8 src[], bool dstk, dst 1111 arch/x86/net/bpf_jit_comp32.c static inline void emit_ia32_mul_i64(const u8 dst[], const u32 val, dst 1481 arch/x86/net/bpf_jit_comp32.c const u8 *dst = bpf2ia32[insn->dst_reg]; dst 1503 arch/x86/net/bpf_jit_comp32.c emit_ia32_mov_r64(is64, dst, src, dstk, sstk, dst 1508 arch/x86/net/bpf_jit_comp32.c emit_ia32_mov_i64(is64, dst, imm32, dst 1543 arch/x86/net/bpf_jit_comp32.c emit_ia32_alu_r64(is64, BPF_OP(code), dst, dst 1548 arch/x86/net/bpf_jit_comp32.c emit_ia32_alu_i64(is64, BPF_OP(code), dst, dst 1638 arch/x86/net/bpf_jit_comp32.c emit_ia32_lsh_i64(dst, imm32, dstk, &prog); dst 1644 arch/x86/net/bpf_jit_comp32.c emit_ia32_rsh_i64(dst, imm32, dstk, &prog); dst 1648 arch/x86/net/bpf_jit_comp32.c emit_ia32_lsh_r64(dst, src, dstk, sstk, &prog); dst 1652 arch/x86/net/bpf_jit_comp32.c emit_ia32_rsh_r64(dst, src, dstk, sstk, &prog); dst 1656 arch/x86/net/bpf_jit_comp32.c emit_ia32_arsh_r64(dst, src, dstk, sstk, &prog); dst 1662 arch/x86/net/bpf_jit_comp32.c emit_ia32_arsh_i64(dst, imm32, dstk, &prog); dst 1673 arch/x86/net/bpf_jit_comp32.c emit_ia32_neg64(dst, dstk, &prog); dst 1680 arch/x86/net/bpf_jit_comp32.c emit_ia32_mul_r64(dst, src, dstk, sstk, &prog); dst 1683 arch/x86/net/bpf_jit_comp32.c emit_ia32_mul_i64(dst, imm32, dstk, &prog); dst 1689 arch/x86/net/bpf_jit_comp32.c emit_ia32_to_le_r64(dst, imm32, dstk, &prog, dst 1694 arch/x86/net/bpf_jit_comp32.c emit_ia32_to_be_r64(dst, imm32, dstk, &prog, dst 32 arch/x86/um/asm/checksum.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 35 arch/x86/um/asm/checksum.h memcpy(dst, src, len); dst 36 arch/x86/um/asm/checksum.h return csum_partial(dst, len, sum); dst 48 arch/x86/um/asm/checksum.h __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 51 arch/x86/um/asm/checksum.h if (copy_from_user(dst, src, len)) { dst 56 arch/x86/um/asm/checksum.h return csum_partial(dst, len, sum); dst 43 arch/x86/um/asm/checksum_32.h void __user *dst, dst 46 arch/x86/um/asm/checksum_32.h if (access_ok(dst, len)) { dst 47 arch/x86/um/asm/checksum_32.h if (copy_to_user(dst, src, len)) { dst 35 arch/xtensa/boot/lib/zmem.c void gunzip (void *dst, int dstlen, unsigned char *src, int *lenp) dst 70 arch/xtensa/boot/lib/zmem.c s.next_out = dst; dst 77 arch/xtensa/boot/lib/zmem.c *lenp = s.next_out - (unsigned char *) dst; dst 170 arch/xtensa/include/asm/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 172 arch/xtensa/include/asm/cacheflush.h memcpy(dst, src, len); \ dst 173 arch/xtensa/include/asm/cacheflush.h __flush_dcache_range((unsigned long) dst, len); \ dst 174 arch/xtensa/include/asm/cacheflush.h __invalidate_icache_range((unsigned long) dst, len); \ dst 177 arch/xtensa/include/asm/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 178 arch/xtensa/include/asm/cacheflush.h memcpy(dst, src, len) dst 40 arch/xtensa/include/asm/checksum.h asmlinkage __wsum csum_partial_copy_generic(const void *src, void *dst, dst 51 arch/xtensa/include/asm/checksum.h __wsum csum_partial_copy_nocheck(const void *src, void *dst, dst 54 arch/xtensa/include/asm/checksum.h return csum_partial_copy_generic(src, dst, len, sum, NULL, NULL); dst 58 arch/xtensa/include/asm/checksum.h __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 61 arch/xtensa/include/asm/checksum.h return csum_partial_copy_generic((__force const void *)src, dst, dst 243 arch/xtensa/include/asm/checksum.h void __user *dst, int len, dst 246 arch/xtensa/include/asm/checksum.h if (access_ok(dst, len)) dst 247 arch/xtensa/include/asm/checksum.h return csum_partial_copy_generic(src,dst,len,sum,NULL,err_ptr); dst 131 arch/xtensa/include/asm/string.h #define memcpy(dst, src, len) __memcpy(dst, src, len) dst 132 arch/xtensa/include/asm/string.h #define memmove(dst, src, len) __memmove(dst, src, len) dst 296 arch/xtensa/include/asm/uaccess.h strncpy_from_user(char *dst, const char *src, long count) dst 299 arch/xtensa/include/asm/uaccess.h return __strncpy_user(dst, src, count); dst 303 arch/xtensa/include/asm/uaccess.h long strncpy_from_user(char *dst, const char *src, long count); dst 154 arch/xtensa/kernel/process.c int arch_dup_task_struct(struct task_struct *dst, struct task_struct *src) dst 159 arch/xtensa/kernel/process.c *dst = *src; dst 105 arch/xtensa/mm/cache.c void copy_user_highpage(struct page *dst, struct page *src, dst 109 arch/xtensa/mm/cache.c void *dst_vaddr = coherent_kvaddr(dst, TLBTEMP_BASE_1, vaddr, dst 115 arch/xtensa/mm/cache.c kmap_invalidate_coherent(dst, vaddr); dst 116 arch/xtensa/mm/cache.c set_bit(PG_arch_1, &dst->flags); dst 259 arch/xtensa/mm/cache.c unsigned long vaddr, void *dst, const void *src, dst 274 arch/xtensa/mm/cache.c memcpy(dst, src, len); dst 284 arch/xtensa/mm/cache.c __flush_invalidate_dcache_range((unsigned long) dst, len); dst 289 arch/xtensa/mm/cache.c __flush_dcache_range((unsigned long)dst,len); dst 290 arch/xtensa/mm/cache.c __invalidate_icache_range((unsigned long) dst, len); dst 295 arch/xtensa/mm/cache.c unsigned long vaddr, void *dst, const void *src, dst 311 arch/xtensa/mm/cache.c memcpy(dst, src, len); dst 1053 block/bio.c void bio_copy_data_iter(struct bio *dst, struct bvec_iter *dst_iter, dst 1062 block/bio.c dst_bv = bio_iter_iovec(dst, *dst_iter); dst 1079 block/bio.c bio_advance_iter(dst, dst_iter, bytes); dst 1092 block/bio.c void bio_copy_data(struct bio *dst, struct bio *src) dst 1095 block/bio.c struct bvec_iter dst_iter = dst->bi_iter; dst 1097 block/bio.c bio_copy_data_iter(dst, &dst_iter, src, &src_iter); dst 1111 block/bio.c void bio_list_copy_data(struct bio *dst, struct bio *src) dst 1114 block/bio.c struct bvec_iter dst_iter = dst->bi_iter; dst 1126 block/bio.c dst = dst->bi_next; dst 1127 block/bio.c if (!dst) dst 1130 block/bio.c dst_iter = dst->bi_iter; dst 1133 block/bio.c bio_copy_data_iter(dst, &dst_iter, src, &src_iter); dst 2174 block/bio.c void bio_clone_blkg_association(struct bio *dst, struct bio *src) dst 2179 block/bio.c __bio_associate_blkg(dst, src->bi_blkg); dst 1584 block/blk-core.c static void __blk_rq_prep_clone(struct request *dst, struct request *src) dst 1586 block/blk-core.c dst->__sector = blk_rq_pos(src); dst 1587 block/blk-core.c dst->__data_len = blk_rq_bytes(src); dst 1589 block/blk-core.c dst->rq_flags |= RQF_SPECIAL_PAYLOAD; dst 1590 block/blk-core.c dst->special_vec = src->special_vec; dst 1592 block/blk-core.c dst->nr_phys_segments = src->nr_phys_segments; dst 1593 block/blk-core.c dst->ioprio = src->ioprio; dst 1594 block/blk-core.c dst->extra_len = src->extra_len; dst 29 block/blk-stat.c void blk_rq_stat_sum(struct blk_rq_stat *dst, struct blk_rq_stat *src) dst 34 block/blk-stat.c dst->min = min(dst->min, src->min); dst 35 block/blk-stat.c dst->max = max(dst->max, src->max); dst 37 block/blk-stat.c dst->mean = div_u64(src->batch + dst->mean * dst->nr_samples, dst 38 block/blk-stat.c dst->nr_samples + src->nr_samples); dst 40 block/blk-stat.c dst->nr_samples += src->nr_samples; dst 1587 block/sed-opal.c u8 *dst; dst 1629 block/sed-opal.c dst = add_bytestring_header(&err, dev, len); dst 1630 block/sed-opal.c if (!dst) dst 1632 block/sed-opal.c if (copy_from_user(dst, src + off, len)) dst 67 crypto/842.c u8 *dst, unsigned int *dlen) dst 71 crypto/842.c return sw842_compress(src, slen, dst, dlen, ctx->wmem); dst 76 crypto/842.c u8 *dst, unsigned int *dlen, void *ctx) dst 78 crypto/842.c return sw842_compress(src, slen, dst, dlen, ctx); dst 83 crypto/842.c u8 *dst, unsigned int *dlen) dst 85 crypto/842.c return sw842_decompress(src, slen, dst, dlen); dst 90 crypto/842.c u8 *dst, unsigned int *dlen, void *ctx) dst 92 crypto/842.c return sw842_decompress(src, slen, dst, dlen); dst 26 crypto/ablkcipher.c struct scatter_walk dst; dst 37 crypto/ablkcipher.c scatterwalk_copychunks(p->data, &p->dst, p->len, 1); dst 55 crypto/ablkcipher.c p->dst = walk->out; dst 145 crypto/ablkcipher.c void *src, *dst, *base; dst 158 crypto/ablkcipher.c dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); dst 159 crypto/ablkcipher.c src = dst = ablkcipher_get_spot(dst, bsize); dst 162 crypto/ablkcipher.c p->data = dst; dst 172 crypto/ablkcipher.c *dst_p = dst; dst 207 crypto/ablkcipher.c walk->dst.page = scatterwalk_page(&walk->out); dst 208 crypto/ablkcipher.c walk->dst.offset = offset_in_page(walk->out.offset); dst 218 crypto/ablkcipher.c void *src, *dst; dst 229 crypto/ablkcipher.c src = dst = NULL; dst 239 crypto/ablkcipher.c &src, &dst); dst 250 crypto/ablkcipher.c walk->dst.page = virt_to_page(dst); dst 252 crypto/ablkcipher.c walk->dst.offset = ((unsigned long)dst & (PAGE_SIZE - 1)); dst 134 crypto/acompress.c acomp->dst_free(req->dst); dst 135 crypto/acompress.c req->dst = NULL; dst 309 crypto/adiantum.c err = adiantum_hash_message(req, req->dst, &digest); dst 314 crypto/adiantum.c scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->dst, dst 387 crypto/adiantum.c req->dst, stream_len, &rctx->rbuf); dst 27 crypto/aegis.h static __always_inline void crypto_aegis_block_xor(union aegis_block *dst, dst 30 crypto/aegis.h dst->words64[0] ^= src->words64[0]; dst 31 crypto/aegis.h dst->words64[1] ^= src->words64[1]; dst 34 crypto/aegis.h static __always_inline void crypto_aegis_block_and(union aegis_block *dst, dst 37 crypto/aegis.h dst->words64[0] &= src->words64[0]; dst 38 crypto/aegis.h dst->words64[1] &= src->words64[1]; dst 41 crypto/aegis.h static __always_inline void crypto_aegis_aesenc(union aegis_block *dst, dst 54 crypto/aegis.h dst->words32[0] = cpu_to_le32(d0) ^ key->words32[0]; dst 55 crypto/aegis.h dst->words32[1] = cpu_to_le32(d1) ^ key->words32[1]; dst 56 crypto/aegis.h dst->words32[2] = cpu_to_le32(d2) ^ key->words32[2]; dst 57 crypto/aegis.h dst->words32[3] = cpu_to_le32(d3) ^ key->words32[3]; dst 42 crypto/aegis128-core.c void (*crypt_chunk)(struct aegis_state *state, u8 *dst, dst 70 crypto/aegis128-core.c void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst, dst 72 crypto/aegis128-core.c void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst, dst 158 crypto/aegis128-core.c static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, dst 163 crypto/aegis128-core.c if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { dst 166 crypto/aegis128-core.c (union aegis_block *)dst; dst 182 crypto/aegis128-core.c dst += AEGIS_BLOCK_SIZE; dst 194 crypto/aegis128-core.c memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); dst 198 crypto/aegis128-core.c dst += AEGIS_BLOCK_SIZE; dst 215 crypto/aegis128-core.c memcpy(dst, msg.bytes, size); dst 219 crypto/aegis128-core.c static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, dst 224 crypto/aegis128-core.c if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { dst 227 crypto/aegis128-core.c (union aegis_block *)dst; dst 243 crypto/aegis128-core.c dst += AEGIS_BLOCK_SIZE; dst 255 crypto/aegis128-core.c memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); dst 259 crypto/aegis128-core.c dst += AEGIS_BLOCK_SIZE; dst 277 crypto/aegis128-core.c memcpy(dst, msg.bytes, size); dst 340 crypto/aegis128-core.c ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr, dst 427 crypto/aegis128-core.c scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, dst 146 crypto/aegis128-neon-inner.c void crypto_aegis128_encrypt_chunk_neon(void *state, void *dst, const void *src, dst 159 crypto/aegis128-neon-inner.c vst1q_u8(dst, msg ^ s); dst 163 crypto/aegis128-neon-inner.c dst += AEGIS_BLOCK_SIZE; dst 174 crypto/aegis128-neon-inner.c memcpy(dst, buf, size); dst 180 crypto/aegis128-neon-inner.c void crypto_aegis128_decrypt_chunk_neon(void *state, void *dst, const void *src, dst 191 crypto/aegis128-neon-inner.c vst1q_u8(dst, msg); dst 195 crypto/aegis128-neon-inner.c dst += AEGIS_BLOCK_SIZE; dst 206 crypto/aegis128-neon-inner.c memcpy(dst, buf, size); dst 12 crypto/aegis128-neon.c void crypto_aegis128_encrypt_chunk_neon(void *state, void *dst, const void *src, dst 14 crypto/aegis128-neon.c void crypto_aegis128_decrypt_chunk_neon(void *state, void *dst, const void *src, dst 35 crypto/aegis128-neon.c void crypto_aegis128_encrypt_chunk_simd(union aegis_block *state, u8 *dst, dst 39 crypto/aegis128-neon.c crypto_aegis128_encrypt_chunk_neon(state, dst, src, size); dst 43 crypto/aegis128-neon.c void crypto_aegis128_decrypt_chunk_simd(union aegis_block *state, u8 *dst, dst 47 crypto/aegis128-neon.c crypto_aegis128_decrypt_chunk_neon(state, dst, src, size); dst 583 crypto/af_alg.c void af_alg_pull_tsgl(struct sock *sk, size_t used, struct scatterlist *dst, dst 608 crypto/af_alg.c if (dst) { dst 615 crypto/af_alg.c sg_set_page(dst + j, page, dst 977 crypto/algapi.c void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) dst 983 crypto/algapi.c int d = (((unsigned long)dst ^ (unsigned long)src1) | dst 984 crypto/algapi.c ((unsigned long)dst ^ (unsigned long)src2)) & dst 995 crypto/algapi.c while (((unsigned long)dst & (relalign - 1)) && len > 0) { dst 996 crypto/algapi.c *dst++ = *src1++ ^ *src2++; dst 1002 crypto/algapi.c *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; dst 1003 crypto/algapi.c dst += 8; dst 1010 crypto/algapi.c *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; dst 1011 crypto/algapi.c dst += 4; dst 1018 crypto/algapi.c *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; dst 1019 crypto/algapi.c dst += 2; dst 1026 crypto/algapi.c *dst++ = *src1++ ^ *src2++; dst 76 crypto/algif_aead.c struct scatterlist *dst, unsigned int len) dst 83 crypto/algif_aead.c skcipher_request_set_crypt(skreq, src, dst, len, NULL); dst 578 crypto/anubis.c __be32 *dst = (__be32 *)ciphertext; dst 659 crypto/anubis.c dst[i] = cpu_to_be32(inter[i]); dst 662 crypto/anubis.c static void anubis_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 665 crypto/anubis.c anubis_crypt(ctx->E, dst, src, ctx->R); dst 668 crypto/anubis.c static void anubis_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 671 crypto/anubis.c anubis_crypt(ctx->D, dst, src, ctx->R); dst 34 crypto/arc4.c arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr, dst 94 crypto/asymmetric_keys/public_key.c static u8 *pkey_pack_u32(u8 *dst, u32 val) dst 96 crypto/asymmetric_keys/public_key.c memcpy(dst, &val, sizeof(val)); dst 97 crypto/asymmetric_keys/public_key.c return dst + sizeof(val); dst 36 crypto/authenc.c struct scatterlist dst[2]; dst 134 crypto/authenc.c scatterwalk_map_and_copy(ahreq->result, req->dst, dst 158 crypto/authenc.c ahash_request_set_crypt(ahreq, req->dst, hash, dst 167 crypto/authenc.c scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, dst 196 crypto/authenc.c skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen, dst 213 crypto/authenc.c struct scatterlist *src, *dst; dst 217 crypto/authenc.c dst = src; dst 219 crypto/authenc.c if (req->src != req->dst) { dst 224 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); dst 230 crypto/authenc.c skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); dst 252 crypto/authenc.c struct scatterlist *src, *dst; dst 260 crypto/authenc.c dst = src; dst 262 crypto/authenc.c if (req->src != req->dst) dst 263 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); dst 268 crypto/authenc.c skcipher_request_set_crypt(skreq, src, dst, dst 39 crypto/authencesn.c struct scatterlist dst[2]; dst 108 crypto/authencesn.c struct scatterlist *dst = req->dst; dst 112 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); dst 113 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); dst 114 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); dst 116 crypto/authencesn.c scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); dst 142 crypto/authencesn.c struct scatterlist *dst = req->dst; dst 149 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 0, 8, 0); dst 150 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 4, 4, 1); dst 151 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); dst 153 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); dst 154 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4); dst 157 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); dst 186 crypto/authencesn.c skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL); dst 201 crypto/authencesn.c struct scatterlist *src, *dst; dst 206 crypto/authencesn.c dst = src; dst 208 crypto/authencesn.c if (req->src != req->dst) { dst 213 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); dst 214 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen); dst 220 crypto/authencesn.c skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); dst 243 crypto/authencesn.c struct scatterlist *dst = req->dst; dst 251 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 4, 4, 0); dst 252 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); dst 253 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 0, 8, 1); dst 260 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); dst 261 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen); dst 266 crypto/authencesn.c skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv); dst 293 crypto/authencesn.c struct scatterlist *dst = req->dst; dst 299 crypto/authencesn.c if (req->src != dst) { dst 312 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 0, 8, 0); dst 313 crypto/authencesn.c scatterwalk_map_and_copy(tmp, dst, 4, 4, 1); dst 314 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); dst 316 crypto/authencesn.c sg_init_table(areq_ctx->dst, 2); dst 317 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4); dst 320 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen); dst 46 crypto/blkcipher.c walk->dst.virt.addr = scatterwalk_map(&walk->out); dst 56 crypto/blkcipher.c scatterwalk_unmap(walk->dst.virt.addr); dst 83 crypto/blkcipher.c memcpy(walk->dst.virt.addr, walk->page, n); dst 161 crypto/blkcipher.c walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer, dst 163 crypto/blkcipher.c walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize); dst 164 crypto/blkcipher.c walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr + dst 184 crypto/blkcipher.c walk->dst.virt.addr = tmp; dst 196 crypto/blkcipher.c walk->dst.phys.page = scatterwalk_page(&walk->out); dst 197 crypto/blkcipher.c walk->dst.phys.offset = offset_in_page(walk->out.offset); dst 202 crypto/blkcipher.c diff = walk->src.phys.offset - walk->dst.phys.offset; dst 203 crypto/blkcipher.c diff |= walk->src.virt.page - walk->dst.virt.page; dst 206 crypto/blkcipher.c walk->dst.virt.addr = walk->src.virt.addr; dst 262 crypto/blkcipher.c walk->dst.phys.page = virt_to_page(walk->dst.virt.addr); dst 264 crypto/blkcipher.c walk->dst.phys.offset &= PAGE_SIZE - 1; dst 423 crypto/blkcipher.c return alg->encrypt(&desc, req->dst, req->src, req->nbytes); dst 436 crypto/blkcipher.c return alg->decrypt(&desc, req->dst, req->src, req->nbytes); dst 310 crypto/blowfish_common.c static void encrypt_block(struct bf_ctx *bctx, u32 *dst, u32 *src) dst 337 crypto/blowfish_common.c dst[0] = yr; dst 338 crypto/blowfish_common.c dst[1] = yl; dst 36 crypto/blowfish_generic.c static void bf_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 40 crypto/blowfish_generic.c __be32 *const out_blk = (__be32 *)dst; dst 70 crypto/blowfish_generic.c static void bf_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 74 crypto/blowfish_generic.c __be32 *const out_blk = (__be32 *)dst; dst 1001 crypto/camellia_generic.c __be32 *dst = (__be32 *)out; dst 1019 crypto/camellia_generic.c dst[0] = cpu_to_be32(tmp[2]); dst 1020 crypto/camellia_generic.c dst[1] = cpu_to_be32(tmp[3]); dst 1021 crypto/camellia_generic.c dst[2] = cpu_to_be32(tmp[0]); dst 1022 crypto/camellia_generic.c dst[3] = cpu_to_be32(tmp[1]); dst 1029 crypto/camellia_generic.c __be32 *dst = (__be32 *)out; dst 1047 crypto/camellia_generic.c dst[0] = cpu_to_be32(tmp[2]); dst 1048 crypto/camellia_generic.c dst[1] = cpu_to_be32(tmp[3]); dst 1049 crypto/camellia_generic.c dst[2] = cpu_to_be32(tmp[0]); dst 1050 crypto/camellia_generic.c dst[3] = cpu_to_be32(tmp[1]); dst 306 crypto/cast5_generic.c __be32 *dst = (__be32 *)outbuf; dst 350 crypto/cast5_generic.c dst[0] = cpu_to_be32(r); dst 351 crypto/cast5_generic.c dst[1] = cpu_to_be32(l); dst 363 crypto/cast5_generic.c __be32 *dst = (__be32 *)outbuf; dst 394 crypto/cast5_generic.c dst[0] = cpu_to_be32(r); dst 395 crypto/cast5_generic.c dst[1] = cpu_to_be32(l); dst 179 crypto/cast6_generic.c __be32 *dst = (__be32 *)outbuf; dst 202 crypto/cast6_generic.c dst[0] = cpu_to_be32(block[0]); dst 203 crypto/cast6_generic.c dst[1] = cpu_to_be32(block[1]); dst 204 crypto/cast6_generic.c dst[2] = cpu_to_be32(block[2]); dst 205 crypto/cast6_generic.c dst[3] = cpu_to_be32(block[3]); dst 217 crypto/cast6_generic.c __be32 *dst = (__be32 *)outbuf; dst 240 crypto/cast6_generic.c dst[0] = cpu_to_be32(block[0]); dst 241 crypto/cast6_generic.c dst[1] = cpu_to_be32(block[1]); dst 242 crypto/cast6_generic.c dst[2] = cpu_to_be32(block[2]); dst 243 crypto/cast6_generic.c dst[3] = cpu_to_be32(block[3]); dst 18 crypto/cbc.c const u8 *src, u8 *dst) dst 20 crypto/cbc.c crypto_cipher_encrypt_one(skcipher_cipher_simple(tfm), dst, src); dst 29 crypto/cbc.c const u8 *src, u8 *dst) dst 31 crypto/cbc.c crypto_cipher_decrypt_one(skcipher_cipher_simple(tfm), dst, src); dst 37 crypto/ccm.c struct scatterlist dst[3]; dst 47 crypto/ccm.c struct scatterlist dst[3]; dst 243 crypto/ccm.c scatterwalk_map_and_copy(odata, req->dst, dst 282 crypto/ccm.c if (req->src != req->dst) { dst 283 crypto/ccm.c sg_init_table(pctx->dst, 3); dst 284 crypto/ccm.c sg_set_buf(pctx->dst, tag, 16); dst 285 crypto/ccm.c sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); dst 286 crypto/ccm.c if (sg != pctx->dst + 1) dst 287 crypto/ccm.c sg_chain(pctx->dst, 2, sg); dst 299 crypto/ccm.c struct scatterlist *dst; dst 313 crypto/ccm.c dst = pctx->src; dst 314 crypto/ccm.c if (req->src != req->dst) dst 315 crypto/ccm.c dst = pctx->dst; dst 320 crypto/ccm.c skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); dst 326 crypto/ccm.c scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen, dst 339 crypto/ccm.c struct scatterlist *dst; dst 343 crypto/ccm.c dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst); dst 346 crypto/ccm.c err = crypto_ccm_auth(req, dst, cryptlen); dst 359 crypto/ccm.c struct scatterlist *dst; dst 376 crypto/ccm.c dst = pctx->src; dst 377 crypto/ccm.c if (req->src != req->dst) dst 378 crypto/ccm.c dst = pctx->dst; dst 385 crypto/ccm.c skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); dst 390 crypto/ccm.c err = crypto_ccm_auth(req, sg_next(dst), cryptlen); dst 667 crypto/ccm.c if (req->src != req->dst) { dst 668 crypto/ccm.c sg_init_table(rctx->dst, 3); dst 669 crypto/ccm.c sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); dst 670 crypto/ccm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); dst 671 crypto/ccm.c if (sg != rctx->dst + 1) dst 672 crypto/ccm.c sg_chain(rctx->dst, 2, sg); dst 679 crypto/ccm.c req->src == req->dst ? rctx->src : rctx->dst, dst 36 crypto/cfb.c const u8 *src, u8 *dst) dst 38 crypto/cfb.c crypto_cipher_encrypt_one(skcipher_cipher_simple(tfm), dst, src); dst 49 crypto/cfb.c u8 *dst = walk->dst.virt.addr; dst 54 crypto/cfb.c crypto_xor_cpy(dst, stream, src, nbytes); dst 63 crypto/cfb.c u8 *dst = walk->dst.virt.addr; dst 67 crypto/cfb.c crypto_cfb_encrypt_one(tfm, iv, dst); dst 68 crypto/cfb.c crypto_xor(dst, src, bsize); dst 69 crypto/cfb.c iv = dst; dst 72 crypto/cfb.c dst += bsize; dst 112 crypto/cfb.c if (walk.src.virt.addr == walk.dst.virt.addr) dst 133 crypto/cfb.c u8 *dst = walk->dst.virt.addr; dst 137 crypto/cfb.c crypto_cfb_encrypt_one(tfm, iv, dst); dst 138 crypto/cfb.c crypto_xor(dst, src, bsize); dst 142 crypto/cfb.c dst += bsize; dst 172 crypto/cfb.c if (walk->src.virt.addr == walk->dst.virt.addr) dst 55 crypto/chacha20poly1305.c struct scatterlist dst[2]; dst 114 crypto/chacha20poly1305.c scatterwalk_map_and_copy(rctx->tag, req->dst, dst 130 crypto/chacha20poly1305.c struct scatterlist *src, *dst; dst 139 crypto/chacha20poly1305.c dst = src; dst 140 crypto/chacha20poly1305.c if (req->src != req->dst) dst 141 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); dst 146 crypto/chacha20poly1305.c skcipher_request_set_crypt(&creq->req, src, dst, dst 239 crypto/chacha20poly1305.c crypt = req->dst; dst 406 crypto/chacha20poly1305.c struct scatterlist *src, *dst; dst 415 crypto/chacha20poly1305.c dst = src; dst 416 crypto/chacha20poly1305.c if (req->src != req->dst) dst 417 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); dst 422 crypto/chacha20poly1305.c skcipher_request_set_crypt(&creq->req, src, dst, dst 15 crypto/chacha_generic.c static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src, dst 23 crypto/chacha_generic.c crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE); dst 25 crypto/chacha_generic.c dst += CHACHA_BLOCK_SIZE; dst 30 crypto/chacha_generic.c crypto_xor_cpy(dst, src, stream, bytes); dst 51 crypto/chacha_generic.c chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, dst 62 crypto/cipher.c u8 *dst, const u8 *src) dst 71 crypto/cipher.c memcpy(dst, tmp, size); dst 75 crypto/cipher.c u8 *dst, const u8 *src) dst 80 crypto/cipher.c if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { dst 81 crypto/cipher.c cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src); dst 85 crypto/cipher.c cipher->cia_encrypt(tfm, dst, src); dst 89 crypto/cipher.c u8 *dst, const u8 *src) dst 94 crypto/cipher.c if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { dst 95 crypto/cipher.c cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src); dst 99 crypto/cipher.c cipher->cia_decrypt(tfm, dst, src); dst 17 crypto/compress.c u8 *dst, unsigned int *dlen) dst 19 crypto/compress.c return tfm->__crt_alg->cra_compress.coa_compress(tfm, src, slen, dst, dst 25 crypto/compress.c u8 *dst, unsigned int *dlen) dst 27 crypto/compress.c return tfm->__crt_alg->cra_compress.coa_decompress(tfm, src, slen, dst, dst 299 crypto/cryptd.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 327 crypto/cryptd.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 28 crypto/crypto_null.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 32 crypto/crypto_null.c memcpy(dst, src, slen); dst 71 crypto/crypto_null.c static void null_crypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 73 crypto/crypto_null.c memcpy(dst, src, NULL_BLOCK_SIZE); dst 84 crypto/crypto_null.c if (walk.src.virt.addr != walk.dst.virt.addr) dst 85 crypto/crypto_null.c memcpy(walk.dst.virt.addr, walk.src.virt.addr, dst 36 crypto/ctr.c u8 *dst = walk->dst.virt.addr; dst 40 crypto/ctr.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 53 crypto/ctr.c u8 *dst = walk->dst.virt.addr; dst 58 crypto/ctr.c fn(crypto_cipher_tfm(tfm), dst, ctrblk); dst 59 crypto/ctr.c crypto_xor(dst, src, bsize); dst 65 crypto/ctr.c dst += bsize; dst 110 crypto/ctr.c if (walk.src.virt.addr == walk.dst.virt.addr) dst 217 crypto/ctr.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 116 crypto/cts.c sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); dst 166 crypto/cts.c skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes, dst 176 crypto/cts.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 198 crypto/cts.c sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); dst 261 crypto/cts.c skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes, dst 280 crypto/cts.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 177 crypto/deflate.c u8 *dst, unsigned int *dlen, void *ctx) dst 191 crypto/deflate.c stream->next_out = (u8 *)dst; dst 206 crypto/deflate.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 210 crypto/deflate.c return __deflate_compress(src, slen, dst, dlen, dctx); dst 214 crypto/deflate.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 217 crypto/deflate.c return __deflate_compress(src, slen, dst, dlen, ctx); dst 221 crypto/deflate.c u8 *dst, unsigned int *dlen, void *ctx) dst 236 crypto/deflate.c stream->next_out = (u8 *)dst; dst 262 crypto/deflate.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 266 crypto/deflate.c return __deflate_decompress(src, slen, dst, dlen, dctx); dst 270 crypto/deflate.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 273 crypto/deflate.c return __deflate_decompress(src, slen, dst, dlen, ctx); dst 40 crypto/des_generic.c static void crypto_des_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 44 crypto/des_generic.c des_encrypt(dctx, dst, src); dst 47 crypto/des_generic.c static void crypto_des_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 51 crypto/des_generic.c des_decrypt(dctx, dst, src); dst 75 crypto/des_generic.c static void crypto_des3_ede_encrypt(struct crypto_tfm *tfm, u8 *dst, dst 80 crypto/des_generic.c des3_ede_encrypt(dctx, dst, src); dst 83 crypto/des_generic.c static void crypto_des3_ede_decrypt(struct crypto_tfm *tfm, u8 *dst, dst 88 crypto/des_generic.c des3_ede_decrypt(dctx, dst, src); dst 182 crypto/dh.c ret = mpi_write_to_sgl(val, req->dst, req->dst_len, &sign); dst 15 crypto/dh_helper.c static inline u8 *dh_pack_data(u8 *dst, u8 *end, const void *src, size_t size) dst 17 crypto/dh_helper.c if (!dst || size > end - dst) dst 19 crypto/dh_helper.c memcpy(dst, src, size); dst 20 crypto/dh_helper.c return dst + size; dst 23 crypto/dh_helper.c static inline const u8 *dh_unpack_data(void *dst, const void *src, size_t size) dst 25 crypto/dh_helper.c memcpy(dst, src, size); dst 778 crypto/drbg.c static inline void drbg_add_buf(unsigned char *dst, size_t dstlen, dst 787 crypto/drbg.c dstptr = dst + (dstlen-1); dst 941 crypto/drbg.c unsigned char *dst = drbg->scratchpad + drbg_statelen(drbg); dst 953 crypto/drbg.c ret = drbg_kcapi_hash(drbg, dst, &datalist); dst 961 crypto/drbg.c memcpy(buf + len, dst, outlen); dst 1933 crypto/drbg.c u8 *dst, unsigned int dlen) dst 1945 crypto/drbg.c return drbg_generate_long(drbg, dst, dlen, addtl); dst 28 crypto/ecb.c u8 *dst = walk.dst.virt.addr; dst 31 crypto/ecb.c fn(crypto_cipher_tfm(cipher), dst, src); dst 34 crypto/ecb.c dst += bsize; dst 119 crypto/ecdh.c copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst, dst 15 crypto/ecdh_helper.c static inline u8 *ecdh_pack_data(void *dst, const void *src, size_t sz) dst 17 crypto/ecdh_helper.c memcpy(dst, src, sz); dst 18 crypto/ecdh_helper.c return dst + sz; dst 21 crypto/ecdh_helper.c static inline const u8 *ecdh_unpack_data(void *dst, const void *src, size_t sz) dst 23 crypto/ecdh_helper.c memcpy(dst, src, sz); dst 44 crypto/echainiv.c if (req->src != req->dst) { dst 50 crypto/echainiv.c skcipher_request_set_crypt(nreq, req->src, req->dst, dst 61 crypto/echainiv.c aead_request_set_crypt(subreq, req->dst, req->dst, dst 69 crypto/echainiv.c scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); dst 103 crypto/echainiv.c aead_request_set_crypt(subreq, req->src, req->dst, dst 183 crypto/ecrdsa.c static u8 *ecrdsa_unpack_u32(u32 *dst, void *src) dst 185 crypto/ecrdsa.c memcpy(dst, src, sizeof(u32)); dst 167 crypto/essiv.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 213 crypto/essiv.c if (req->src == req->dst || !enc) { dst 214 crypto/essiv.c scatterwalk_map_and_copy(req->iv, req->dst, dst 263 crypto/essiv.c aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv); dst 236 crypto/fcrypt.c static void fcrypt_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 262 crypto/fcrypt.c memcpy(dst, &X, sizeof(X)); dst 268 crypto/fcrypt.c static void fcrypt_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 294 crypto/fcrypt.c memcpy(dst, &X, sizeof(X)); dst 40 crypto/gcm.c struct scatterlist dst[3]; dst 69 crypto/gcm.c struct scatterlist dst[3]; dst 174 crypto/gcm.c if (req->src != req->dst) { dst 175 crypto/gcm.c sg_init_table(pctx->dst, 3); dst 176 crypto/gcm.c sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag)); dst 177 crypto/gcm.c sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); dst 178 crypto/gcm.c if (sg != pctx->dst + 1) dst 179 crypto/gcm.c sg_chain(pctx->dst, 2, sg); dst 190 crypto/gcm.c struct scatterlist *dst; dst 192 crypto/gcm.c dst = req->src == req->dst ? pctx->src : pctx->dst; dst 195 crypto/gcm.c skcipher_request_set_crypt(skreq, pctx->src, dst, dst 424 crypto/gcm.c scatterwalk_map_and_copy(auth_tag, req->dst, dst 435 crypto/gcm.c gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst); dst 783 crypto/gcm.c if (req->src != req->dst) { dst 784 crypto/gcm.c sg_init_table(rctx->dst, 3); dst 785 crypto/gcm.c sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8); dst 786 crypto/gcm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); dst 787 crypto/gcm.c if (sg != rctx->dst + 1) dst 788 crypto/gcm.c sg_chain(rctx->dst, 2, sg); dst 795 crypto/gcm.c req->src == req->dst ? rctx->src : rctx->dst, dst 999 crypto/gcm.c if (req->src != req->dst) { dst 1011 crypto/gcm.c aead_request_set_crypt(subreq, req->src, req->dst, dst 1030 crypto/gcm.c skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL); dst 85 crypto/ghash-generic.c u8 *dst = dctx->buffer; dst 89 crypto/ghash-generic.c u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); dst 98 crypto/ghash-generic.c gf128mul_4k_lle((be128 *)dst, ctx->gf128); dst 102 crypto/ghash-generic.c crypto_xor(dst, src, GHASH_BLOCK_SIZE); dst 103 crypto/ghash-generic.c gf128mul_4k_lle((be128 *)dst, ctx->gf128); dst 111 crypto/ghash-generic.c *dst++ ^= *src++; dst 119 crypto/ghash-generic.c u8 *dst = dctx->buffer; dst 122 crypto/ghash-generic.c u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); dst 127 crypto/ghash-generic.c gf128mul_4k_lle((be128 *)dst, ctx->gf128); dst 133 crypto/ghash-generic.c static int ghash_final(struct shash_desc *desc, u8 *dst) dst 140 crypto/ghash-generic.c memcpy(dst, buf, GHASH_BLOCK_SIZE); dst 128 crypto/keywrap.c struct scatterlist *src, *dst; dst 149 crypto/keywrap.c dst = req->dst; dst 170 crypto/keywrap.c crypto_kw_scatterlist_ff(&dst_walk, dst, nbytes); dst 179 crypto/keywrap.c src = req->dst; dst 180 crypto/keywrap.c dst = req->dst; dst 197 crypto/keywrap.c struct scatterlist *src, *dst; dst 222 crypto/keywrap.c dst = req->dst; dst 229 crypto/keywrap.c scatterwalk_start(&dst_walk, dst); dst 251 crypto/keywrap.c src = req->dst; dst 252 crypto/keywrap.c dst = req->dst; dst 806 crypto/khazad.c __be64 *dst = (__be64 *)ciphertext; dst 834 crypto/khazad.c *dst = cpu_to_be64(state); dst 837 crypto/khazad.c static void khazad_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 840 crypto/khazad.c khazad_crypt(ctx->E, dst, src); dst 843 crypto/khazad.c static void khazad_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 846 crypto/khazad.c khazad_crypt(ctx->D, dst, src); dst 176 crypto/lrw.c wdst = w.dst.virt.addr; dst 232 crypto/lrw.c skcipher_request_set_crypt(subreq, req->dst, req->dst, dst 54 crypto/lz4.c u8 *dst, unsigned int *dlen, void *ctx) dst 56 crypto/lz4.c int out_len = LZ4_compress_default(src, dst, dst 67 crypto/lz4.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 70 crypto/lz4.c return __lz4_compress_crypto(src, slen, dst, dlen, ctx); dst 74 crypto/lz4.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 78 crypto/lz4.c return __lz4_compress_crypto(src, slen, dst, dlen, ctx->lz4_comp_mem); dst 82 crypto/lz4.c u8 *dst, unsigned int *dlen, void *ctx) dst 84 crypto/lz4.c int out_len = LZ4_decompress_safe(src, dst, slen, *dlen); dst 94 crypto/lz4.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 97 crypto/lz4.c return __lz4_decompress_crypto(src, slen, dst, dlen, NULL); dst 101 crypto/lz4.c unsigned int slen, u8 *dst, dst 104 crypto/lz4.c return __lz4_decompress_crypto(src, slen, dst, dlen, NULL); dst 53 crypto/lz4hc.c u8 *dst, unsigned int *dlen, void *ctx) dst 55 crypto/lz4hc.c int out_len = LZ4_compress_HC(src, dst, slen, dst 66 crypto/lz4hc.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 69 crypto/lz4hc.c return __lz4hc_compress_crypto(src, slen, dst, dlen, ctx); dst 73 crypto/lz4hc.c unsigned int slen, u8 *dst, dst 78 crypto/lz4hc.c return __lz4hc_compress_crypto(src, slen, dst, dlen, dst 83 crypto/lz4hc.c u8 *dst, unsigned int *dlen, void *ctx) dst 85 crypto/lz4hc.c int out_len = LZ4_decompress_safe(src, dst, slen, *dlen); dst 95 crypto/lz4hc.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 98 crypto/lz4hc.c return __lz4hc_decompress_crypto(src, slen, dst, dlen, NULL); dst 102 crypto/lz4hc.c unsigned int slen, u8 *dst, dst 105 crypto/lz4hc.c return __lz4hc_decompress_crypto(src, slen, dst, dlen, NULL); dst 53 crypto/lzo-rle.c u8 *dst, unsigned int *dlen, void *ctx) dst 58 crypto/lzo-rle.c err = lzorle1x_1_compress(src, slen, dst, &tmp_len, ctx); dst 68 crypto/lzo-rle.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 72 crypto/lzo-rle.c return __lzorle_compress(src, slen, dst, dlen, ctx->lzorle_comp_mem); dst 76 crypto/lzo-rle.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 79 crypto/lzo-rle.c return __lzorle_compress(src, slen, dst, dlen, ctx); dst 83 crypto/lzo-rle.c u8 *dst, unsigned int *dlen) dst 88 crypto/lzo-rle.c err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); dst 98 crypto/lzo-rle.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 100 crypto/lzo-rle.c return __lzorle_decompress(src, slen, dst, dlen); dst 104 crypto/lzo-rle.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 107 crypto/lzo-rle.c return __lzorle_decompress(src, slen, dst, dlen); dst 53 crypto/lzo.c u8 *dst, unsigned int *dlen, void *ctx) dst 58 crypto/lzo.c err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx); dst 68 crypto/lzo.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 72 crypto/lzo.c return __lzo_compress(src, slen, dst, dlen, ctx->lzo_comp_mem); dst 76 crypto/lzo.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 79 crypto/lzo.c return __lzo_compress(src, slen, dst, dlen, ctx); dst 83 crypto/lzo.c u8 *dst, unsigned int *dlen) dst 88 crypto/lzo.c err = lzo1x_decompress_safe(src, slen, dst, &tmp_len); dst 98 crypto/lzo.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 100 crypto/lzo.c return __lzo_decompress(src, slen, dst, dlen); dst 104 crypto/lzo.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 107 crypto/lzo.c return __lzo_decompress(src, slen, dst, dlen); dst 104 crypto/michael_mic.c __le32 *dst = (__le32 *)out; dst 126 crypto/michael_mic.c dst[0] = cpu_to_le32(mctx->l); dst 127 crypto/michael_mic.c dst[1] = cpu_to_le32(mctx->r); dst 197 crypto/nhpoly1305.c int crypto_nhpoly1305_final_helper(struct shash_desc *desc, u8 *dst, nh_t nh_fn) dst 212 crypto/nhpoly1305.c poly1305_core_emit(&state->poly_state, dst); dst 217 crypto/nhpoly1305.c int crypto_nhpoly1305_final(struct shash_desc *desc, u8 *dst) dst 219 crypto/nhpoly1305.c return crypto_nhpoly1305_final_helper(desc, dst, nh_generic); dst 29 crypto/ofb.c u8 *dst = walk.dst.virt.addr; dst 35 crypto/ofb.c crypto_xor_cpy(dst, src, iv, bsize); dst 36 crypto/ofb.c dst += bsize; dst 45 crypto/ofb.c crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv, dst 26 crypto/pcbc.c u8 *dst = walk->dst.virt.addr; dst 31 crypto/pcbc.c crypto_cipher_encrypt_one(tfm, dst, iv); dst 32 crypto/pcbc.c crypto_xor_cpy(iv, dst, src, bsize); dst 35 crypto/pcbc.c dst += bsize; dst 74 crypto/pcbc.c if (walk.src.virt.addr == walk.dst.virt.addr) dst 93 crypto/pcbc.c u8 *dst = walk->dst.virt.addr; dst 97 crypto/pcbc.c crypto_cipher_decrypt_one(tfm, dst, src); dst 98 crypto/pcbc.c crypto_xor(dst, iv, bsize); dst 99 crypto/pcbc.c crypto_xor_cpy(iv, dst, src, bsize); dst 102 crypto/pcbc.c dst += bsize; dst 141 crypto/pcbc.c if (walk.src.virt.addr == walk.dst.virt.addr) dst 112 crypto/pcrypt.c aead_request_set_crypt(creq, req->src, req->dst, dst 157 crypto/pcrypt.c aead_request_set_crypt(creq, req->src, req->dst, dst 216 crypto/poly1305_generic.c void poly1305_core_emit(const struct poly1305_state *state, void *dst) dst 257 crypto/poly1305_generic.c put_unaligned_le32((h0 >> 0) | (h1 << 26), dst + 0); dst 258 crypto/poly1305_generic.c put_unaligned_le32((h1 >> 6) | (h2 << 20), dst + 4); dst 259 crypto/poly1305_generic.c put_unaligned_le32((h2 >> 12) | (h3 << 14), dst + 8); dst 260 crypto/poly1305_generic.c put_unaligned_le32((h3 >> 18) | (h4 << 8), dst + 12); dst 264 crypto/poly1305_generic.c int crypto_poly1305_final(struct shash_desc *desc, u8 *dst) dst 284 crypto/poly1305_generic.c put_unaligned_le32(f, dst + 0); dst 286 crypto/poly1305_generic.c put_unaligned_le32(f, dst + 4); dst 288 crypto/poly1305_generic.c put_unaligned_le32(f, dst + 8); dst 290 crypto/poly1305_generic.c put_unaligned_le32(f, dst + 12); dst 270 crypto/rmd128.c __le32 *dst = (__le32 *)out; dst 285 crypto/rmd128.c dst[i] = cpu_to_le32p(&rctx->state[i]); dst 314 crypto/rmd160.c __le32 *dst = (__le32 *)out; dst 329 crypto/rmd160.c dst[i] = cpu_to_le32p(&rctx->state[i]); dst 289 crypto/rmd256.c __le32 *dst = (__le32 *)out; dst 304 crypto/rmd256.c dst[i] = cpu_to_le32p(&rctx->state[i]); dst 338 crypto/rmd320.c __le32 *dst = (__le32 *)out; dst 353 crypto/rmd320.c dst[i] = cpu_to_le32p(&rctx->state[i]); dst 197 crypto/rsa-pkcs1pad.c sg_copy_to_buffer(req->dst, sg_nents_for_len(req->dst, len), dst 199 crypto/rsa-pkcs1pad.c sg_copy_from_buffer(req->dst, dst 200 crypto/rsa-pkcs1pad.c sg_nents_for_len(req->dst, ctx->key_size), dst 267 crypto/rsa-pkcs1pad.c req->dst, ctx->key_size - 1, req->dst_len); dst 320 crypto/rsa-pkcs1pad.c sg_copy_from_buffer(req->dst, dst 321 crypto/rsa-pkcs1pad.c sg_nents_for_len(req->dst, req->dst_len), dst 426 crypto/rsa-pkcs1pad.c req->dst, ctx->key_size - 1, req->dst_len); dst 538 crypto/rsa-pkcs1pad.c if (WARN_ON(req->dst) || dst 79 crypto/rsa.c ret = mpi_write_to_sgl(c, req->dst, req->dst_len, &sign); dst 118 crypto/rsa.c ret = mpi_write_to_sgl(m, req->dst, req->dst_len, &sign); dst 84 crypto/salsa20_generic.c static void salsa20_docrypt(u32 *state, u8 *dst, const u8 *src, dst 91 crypto/salsa20_generic.c crypto_xor_cpy(dst, src, (const u8 *)stream, dst 94 crypto/salsa20_generic.c dst += SALSA20_BLOCK_SIZE; dst 99 crypto/salsa20_generic.c crypto_xor_cpy(dst, src, (const u8 *)stream, bytes); dst 171 crypto/salsa20_generic.c salsa20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr, dst 21 crypto/scatterwalk.c void *dst = out ? sgdata : buf; dst 23 crypto/scatterwalk.c memcpy(dst, src, nbytes); dst 72 crypto/scatterwalk.c struct scatterlist *scatterwalk_ffwd(struct scatterlist dst[2], dst 87 crypto/scatterwalk.c sg_init_table(dst, 2); dst 88 crypto/scatterwalk.c sg_set_page(dst, sg_page(src), src->length - len, src->offset + len); dst 89 crypto/scatterwalk.c scatterwalk_crypto_chain(dst, sg_next(src), 2); dst 91 crypto/scatterwalk.c return dst; dst 30 crypto/scompress.c void *dst; dst 77 crypto/scompress.c vfree(scratch->dst); dst 79 crypto/scompress.c scratch->dst = NULL; dst 100 crypto/scompress.c scratch->dst = mem; dst 132 crypto/scompress.c if (req->dst && !req->dlen) dst 144 crypto/scompress.c scratch->dst, &req->dlen, *ctx); dst 147 crypto/scompress.c scratch->dst, &req->dlen, *ctx); dst 149 crypto/scompress.c if (!req->dst) { dst 150 crypto/scompress.c req->dst = sgl_alloc(req->dlen, GFP_ATOMIC, NULL); dst 151 crypto/scompress.c if (!req->dst) { dst 156 crypto/scompress.c scatterwalk_map_and_copy(scratch->dst, req->dst, 0, req->dlen, dst 368 crypto/seed.c __be32 *dst = (__be32 *)out; dst 394 crypto/seed.c dst[0] = cpu_to_be32(x3); dst 395 crypto/seed.c dst[1] = cpu_to_be32(x4); dst 396 crypto/seed.c dst[2] = cpu_to_be32(x1); dst 397 crypto/seed.c dst[3] = cpu_to_be32(x2); dst 406 crypto/seed.c __be32 *dst = (__be32 *)out; dst 432 crypto/seed.c dst[0] = cpu_to_be32(x3); dst 433 crypto/seed.c dst[1] = cpu_to_be32(x4); dst 434 crypto/seed.c dst[2] = cpu_to_be32(x1); dst 435 crypto/seed.c dst[3] = cpu_to_be32(x2); dst 70 crypto/seqiv.c if (req->src != req->dst) { dst 76 crypto/seqiv.c skcipher_request_set_crypt(nreq, req->src, req->dst, dst 98 crypto/seqiv.c aead_request_set_crypt(subreq, req->dst, req->dst, dst 103 crypto/seqiv.c scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); dst 129 crypto/seqiv.c aead_request_set_crypt(subreq, req->src, req->dst, dst 452 crypto/serpent_generic.c void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) dst 456 crypto/serpent_generic.c __le32 *d = (__le32 *)dst; dst 510 crypto/serpent_generic.c static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 514 crypto/serpent_generic.c __serpent_encrypt(ctx, dst, src); dst 517 crypto/serpent_generic.c void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src) dst 521 crypto/serpent_generic.c __le32 *d = (__le32 *)dst; dst 570 crypto/serpent_generic.c static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 574 crypto/serpent_generic.c __serpent_decrypt(ctx, dst, src); dst 589 crypto/serpent_generic.c static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 592 crypto/serpent_generic.c u32 * const d = (u32 * const)dst; dst 609 crypto/serpent_generic.c static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 612 crypto/serpent_generic.c u32 * const d = (u32 * const)dst; dst 36 crypto/skcipher.c struct scatter_walk dst; dst 65 crypto/skcipher.c walk->dst.virt.addr = skcipher_map(&walk->out); dst 75 crypto/skcipher.c skcipher_unmap(&walk->out, walk->dst.virt.addr); dst 128 crypto/skcipher.c memcpy(walk->dst.virt.addr, walk->page, n); dst 197 crypto/skcipher.c scatterwalk_copychunks(data, &p->dst, p->len, 1); dst 220 crypto/skcipher.c p->dst = walk->out; dst 273 crypto/skcipher.c walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1); dst 274 crypto/skcipher.c walk->dst.virt.addr = skcipher_get_spot(walk->dst.virt.addr, bsize); dst 275 crypto/skcipher.c walk->src.virt.addr = walk->dst.virt.addr; dst 295 crypto/skcipher.c walk->dst.virt.addr = tmp; dst 323 crypto/skcipher.c walk->dst.phys.page = scatterwalk_page(&walk->out); dst 324 crypto/skcipher.c walk->dst.phys.offset = offset_in_page(walk->out.offset); dst 329 crypto/skcipher.c diff = walk->src.phys.offset - walk->dst.phys.offset; dst 330 crypto/skcipher.c diff |= walk->src.virt.page - walk->dst.virt.page; dst 333 crypto/skcipher.c walk->dst.virt.addr = walk->src.virt.addr; dst 389 crypto/skcipher.c walk->dst.phys.page = virt_to_page(walk->dst.virt.addr); dst 391 crypto/skcipher.c walk->dst.phys.offset &= PAGE_SIZE - 1; dst 462 crypto/skcipher.c scatterwalk_start(&walk->out, req->dst); dst 526 crypto/skcipher.c scatterwalk_start(&walk->out, req->dst); dst 635 crypto/skcipher.c return crypt(&desc, req->dst, req->src, req->cryptlen); dst 731 crypto/skcipher.c ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 57 crypto/tea.c static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 63 crypto/tea.c __le32 *out = (__le32 *)dst; dst 85 crypto/tea.c static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 91 crypto/tea.c __le32 *out = (__le32 *)dst; dst 130 crypto/tea.c static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 136 crypto/tea.c __le32 *out = (__le32 *)dst; dst 151 crypto/tea.c static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 156 crypto/tea.c __le32 *out = (__le32 *)dst; dst 174 crypto/tea.c static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 180 crypto/tea.c __le32 *out = (__le32 *)dst; dst 195 crypto/tea.c static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 200 crypto/tea.c __le32 *out = (__le32 *)dst; dst 677 crypto/testmgr.c struct test_sglist dst; dst 690 crypto/testmgr.c if (init_test_sglist(&tsgls->dst) != 0) dst 706 crypto/testmgr.c destroy_test_sglist(&tsgls->dst); dst 733 crypto/testmgr.c tsgls->dst.sgl_ptr = tsgls->src.sgl; dst 734 crypto/testmgr.c tsgls->dst.nents = tsgls->src.nents; dst 737 crypto/testmgr.c return build_test_sglist(&tsgls->dst, dst 1921 crypto/testmgr.c aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr, dst 1936 crypto/testmgr.c req->dst != tsgls->dst.sgl_ptr || dst 1951 crypto/testmgr.c if (req->dst != tsgls->dst.sgl_ptr) dst 1968 crypto/testmgr.c if (tsgls->dst.sgl_ptr != tsgls->src.sgl && dst 1969 crypto/testmgr.c is_test_sglist_corrupted(&tsgls->dst)) { dst 1991 crypto/testmgr.c err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext, dst 2066 crypto/testmgr.c struct scatterlist src[2], dst; dst 2116 crypto/testmgr.c sg_init_one(&dst, vec->ctext, vec->alen + vec->clen); dst 2119 crypto/testmgr.c aead_request_set_crypt(req, src, &dst, vec->plen, iv); dst 2511 crypto/testmgr.c skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr, dst 2524 crypto/testmgr.c req->dst != tsgls->dst.sgl_ptr || dst 2537 crypto/testmgr.c if (req->dst != tsgls->dst.sgl_ptr) dst 2554 crypto/testmgr.c if (tsgls->dst.sgl_ptr != tsgls->src.sgl && dst 2555 crypto/testmgr.c is_test_sglist_corrupted(&tsgls->dst)) { dst 2576 crypto/testmgr.c err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext, dst 2655 crypto/testmgr.c struct scatterlist src, dst; dst 2679 crypto/testmgr.c sg_init_one(&dst, vec->ctext, vec->len); dst 2682 crypto/testmgr.c skcipher_request_set_crypt(req, &src, &dst, vec->len, iv); dst 3025 crypto/testmgr.c struct scatterlist src, dst; dst 3053 crypto/testmgr.c sg_init_one(&dst, output, dlen); dst 3064 crypto/testmgr.c acomp_request_set_params(req, &src, &dst, ilen, dlen); dst 3080 crypto/testmgr.c sg_init_one(&dst, decomp_out, dlen); dst 3082 crypto/testmgr.c acomp_request_set_params(req, &src, &dst, ilen, dlen); dst 3130 crypto/testmgr.c sg_init_one(&dst, output, dlen); dst 3141 crypto/testmgr.c acomp_request_set_params(req, &src, &dst, ilen, dlen); dst 3485 crypto/testmgr.c struct scatterlist src, dst; dst 3506 crypto/testmgr.c sg_init_one(&dst, output_buf, out_len_max); dst 3507 crypto/testmgr.c kpp_request_set_output(req, &dst, out_len_max); dst 3521 crypto/testmgr.c a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL); dst 3528 crypto/testmgr.c if (memcmp(vec->expected_a_public, sg_virt(req->dst), dst 3545 crypto/testmgr.c sg_init_one(&dst, output_buf, out_len_max); dst 3547 crypto/testmgr.c kpp_request_set_output(req, &dst, out_len_max); dst 3559 crypto/testmgr.c a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL); dst 3575 crypto/testmgr.c sg_init_one(&dst, output_buf, out_len_max); dst 3577 crypto/testmgr.c kpp_request_set_output(req, &dst, out_len_max); dst 3597 crypto/testmgr.c if (memcmp(shared_secret, sg_virt(req->dst), dst 3651 crypto/testmgr.c static u8 *test_pack_u32(u8 *dst, u32 val) dst 3653 crypto/testmgr.c memcpy(dst, &val, sizeof(val)); dst 3654 crypto/testmgr.c return dst + sizeof(val); dst 3667 crypto/testmgr.c struct scatterlist src, dst, src_tab[3]; dst 3740 crypto/testmgr.c sg_init_one(&dst, outbuf_enc, out_len_max); dst 3741 crypto/testmgr.c akcipher_request_set_crypt(req, src_tab, &dst, m_size, dst 3793 crypto/testmgr.c sg_init_one(&dst, outbuf_dec, out_len_max); dst 3795 crypto/testmgr.c akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max); dst 553 crypto/tgr192.c __be64 *dst = (__be64 *)out; dst 597 crypto/tgr192.c dst[0] = be64p[0] = cpu_to_be64(tctx->a); dst 598 crypto/tgr192.c dst[1] = be64p[1] = cpu_to_be64(tctx->b); dst 599 crypto/tgr192.c dst[2] = be64p[2] = cpu_to_be64(tctx->c); dst 90 crypto/twofish_generic.c dst[n] = cpu_to_le32(x) dst 99 crypto/twofish_generic.c __le32 *dst = (__le32 *)out; dst 136 crypto/twofish_generic.c __le32 *dst = (__le32 *)out; dst 110 crypto/xts.c wdst = w.dst.virt.addr; dst 174 crypto/xts.c rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst, dst 252 crypto/xts.c skcipher_request_set_crypt(subreq, req->dst, req->dst, dst 151 crypto/zstd.c u8 *dst, unsigned int *dlen, void *ctx) dst 157 crypto/zstd.c out_len = ZSTD_compressCCtx(zctx->cctx, dst, *dlen, src, slen, params); dst 165 crypto/zstd.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 169 crypto/zstd.c return __zstd_compress(src, slen, dst, dlen, ctx); dst 173 crypto/zstd.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 176 crypto/zstd.c return __zstd_compress(src, slen, dst, dlen, ctx); dst 180 crypto/zstd.c u8 *dst, unsigned int *dlen, void *ctx) dst 185 crypto/zstd.c out_len = ZSTD_decompressDCtx(zctx->dctx, dst, *dlen, src, slen); dst 193 crypto/zstd.c unsigned int slen, u8 *dst, unsigned int *dlen) dst 197 crypto/zstd.c return __zstd_decompress(src, slen, dst, dlen, ctx); dst 201 crypto/zstd.c unsigned int slen, u8 *dst, unsigned int *dlen, dst 204 crypto/zstd.c return __zstd_decompress(src, slen, dst, dlen, ctx); dst 260 drivers/acpi/apei/erst.c void *src, *dst; dst 275 drivers/acpi/apei/erst.c dst = ioremap(ctx->dst_base + offset, ctx->var2); dst 276 drivers/acpi/apei/erst.c if (!dst) { dst 281 drivers/acpi/apei/erst.c memmove(dst, src, ctx->var2); dst 284 drivers/acpi/apei/erst.c iounmap(dst); dst 223 drivers/acpi/nfit/core.c void *dst; dst 251 drivers/acpi/nfit/core.c dst = buf + 1; dst 254 drivers/acpi/nfit/core.c buf->buffer.pointer = dst; dst 259 drivers/acpi/nfit/core.c memcpy(dst, &obj->integer.value, 4); dst 260 drivers/acpi/nfit/core.c dst += 4; dst 262 drivers/acpi/nfit/core.c memcpy(dst, obj->buffer.pointer, obj->buffer.length); dst 263 drivers/acpi/nfit/core.c dst += obj->buffer.length; dst 274 drivers/acpi/nfit/core.c void *dst = NULL; dst 285 drivers/acpi/nfit/core.c dst = buf + 1; dst 288 drivers/acpi/nfit/core.c buf->buffer.pointer = dst; dst 289 drivers/acpi/nfit/core.c memcpy(dst, &integer->integer.value, 4); dst 44 drivers/ata/ahci_xgene.c #define PORTADDR_SET(dst, src) \ dst 45 drivers/ata/ahci_xgene.c (((dst) & ~0x0000003f) | (((u32)(src)) & 0x0000003f)) dst 47 drivers/ata/ahci_xgene.c #define PORTPHY1CFG_FRCPHYRDY_SET(dst, src) \ dst 48 drivers/ata/ahci_xgene.c (((dst) & ~0x00100000) | (((u32)(src) << 0x14) & 0x00100000)) dst 54 drivers/ata/ahci_xgene.c #define PORTPHY5CFG_RTCHG_SET(dst, src) \ dst 55 drivers/ata/ahci_xgene.c (((dst) & ~0xfff00000) | (((u32)(src) << 0x14) & 0xfff00000)) dst 56 drivers/ata/ahci_xgene.c #define PORTAXICFG_EN_CONTEXT_SET(dst, src) \ dst 57 drivers/ata/ahci_xgene.c (((dst) & ~0x01000000) | (((u32)(src) << 0x18) & 0x01000000)) dst 59 drivers/ata/ahci_xgene.c #define PORTAXICFG_OUTTRANS_SET(dst, src) \ dst 60 drivers/ata/ahci_xgene.c (((dst) & ~0x00f00000) | (((u32)(src) << 0x14) & 0x00f00000)) dst 62 drivers/ata/ahci_xgene.c #define PORTRANSCFG_RXWM_SET(dst, src) \ dst 63 drivers/ata/ahci_xgene.c (((dst) & ~0x0000007f) | (((u32)(src)) & 0x0000007f)) dst 547 drivers/ata/libata-scsi.c u16 __user *dst = arg; dst 553 drivers/ata/libata-scsi.c if (copy_to_user(dst, dev->id, ATA_ID_WORDS * sizeof(u16))) dst 557 drivers/ata/libata-scsi.c if (copy_to_user(dst + ATA_ID_PROD, buf, ATA_ID_PROD_LEN)) dst 561 drivers/ata/libata-scsi.c if (copy_to_user(dst + ATA_ID_FW_REV, buf, ATA_ID_FW_REV_LEN)) dst 565 drivers/ata/libata-scsi.c if (copy_to_user(dst + ATA_ID_SERNO, buf, ATA_ID_SERNO_LEN)) dst 19 drivers/base/regmap/regcache-lzo.c void *dst; dst 48 drivers/base/regmap/regcache-lzo.c lzo_ctx->dst, &compress_size, lzo_ctx->wmem); dst 62 drivers/base/regmap/regcache-lzo.c lzo_ctx->dst, &dst_len); dst 74 drivers/base/regmap/regcache-lzo.c lzo_ctx->dst = kmalloc(lzo_ctx->dst_len, GFP_KERNEL); dst 75 drivers/base/regmap/regcache-lzo.c if (!lzo_ctx->dst) { dst 92 drivers/base/regmap/regcache-lzo.c lzo_ctx->dst = kmalloc(lzo_ctx->dst_len, GFP_KERNEL); dst 93 drivers/base/regmap/regcache-lzo.c if (!lzo_ctx->dst) { dst 218 drivers/base/regmap/regcache-lzo.c kfree(lzo_blocks[i]->dst); dst 244 drivers/base/regmap/regcache-lzo.c tmp_dst = lzo_block->dst; dst 248 drivers/base/regmap/regcache-lzo.c lzo_block->src = lzo_block->dst; dst 255 drivers/base/regmap/regcache-lzo.c *value = regcache_get_val(map, lzo_block->dst, blkpos); dst 257 drivers/base/regmap/regcache-lzo.c kfree(lzo_block->dst); dst 259 drivers/base/regmap/regcache-lzo.c lzo_block->dst = tmp_dst; dst 281 drivers/base/regmap/regcache-lzo.c tmp_dst = lzo_block->dst; dst 285 drivers/base/regmap/regcache-lzo.c lzo_block->src = lzo_block->dst; dst 291 drivers/base/regmap/regcache-lzo.c kfree(lzo_block->dst); dst 296 drivers/base/regmap/regcache-lzo.c if (regcache_set_val(map, lzo_block->dst, blkpos, value)) { dst 297 drivers/base/regmap/regcache-lzo.c kfree(lzo_block->dst); dst 302 drivers/base/regmap/regcache-lzo.c lzo_block->src = lzo_block->dst; dst 308 drivers/base/regmap/regcache-lzo.c kfree(lzo_block->dst); dst 319 drivers/base/regmap/regcache-lzo.c lzo_block->dst = tmp_dst; dst 340 drivers/base/swnode.c static int property_copy_string_array(struct property_entry *dst, dst 361 drivers/base/swnode.c dst->pointer.str = d; dst 365 drivers/base/swnode.c static int property_entry_copy_data(struct property_entry *dst, dst 377 drivers/base/swnode.c error = property_copy_string_array(dst, src); dst 380 drivers/base/swnode.c new = dst->pointer.str; dst 394 drivers/base/swnode.c dst->length = src->length; dst 395 drivers/base/swnode.c dst->is_array = src->is_array; dst 396 drivers/base/swnode.c dst->type = src->type; dst 398 drivers/base/swnode.c property_set_pointer(dst, new); dst 400 drivers/base/swnode.c dst->name = kstrdup(src->name, GFP_KERNEL); dst 401 drivers/base/swnode.c if (!dst->name) dst 407 drivers/base/swnode.c property_entry_free_data(dst); dst 40 drivers/block/aoe/aoe.h unsigned char dst[6]; dst 134 drivers/block/aoe/aoecmd.c memcpy(h->dst, t->addr, sizeof h->dst); dst 437 drivers/block/aoe/aoecmd.c memset(h->dst, 0xff, sizeof h->dst); dst 477 drivers/block/aoe/aoecmd.c h->src, h->dst, t->nout); dst 484 drivers/block/aoe/aoecmd.c memcpy(h->dst, t->addr, sizeof h->dst); dst 1348 drivers/block/aoe/aoecmd.c h->dst); dst 196 drivers/block/brd.c void *dst; dst 204 drivers/block/brd.c dst = kmap_atomic(page); dst 205 drivers/block/brd.c memcpy(dst + offset, src, copy); dst 206 drivers/block/brd.c kunmap_atomic(dst); dst 215 drivers/block/brd.c dst = kmap_atomic(page); dst 216 drivers/block/brd.c memcpy(dst, src, copy); dst 217 drivers/block/brd.c kunmap_atomic(dst); dst 224 drivers/block/brd.c static void copy_from_brd(void *dst, struct brd_device *brd, dst 236 drivers/block/brd.c memcpy(dst, src + offset, copy); dst 239 drivers/block/brd.c memset(dst, 0, copy); dst 242 drivers/block/brd.c dst += copy; dst 248 drivers/block/brd.c memcpy(dst, src, copy); dst 251 drivers/block/brd.c memset(dst, 0, copy); dst 835 drivers/block/null_blk_main.c void *dst, *src; dst 856 drivers/block/null_blk_main.c dst = kmap_atomic(t_page->page); dst 862 drivers/block/null_blk_main.c memcpy(dst + offset, src + offset, dst 868 drivers/block/null_blk_main.c kunmap_atomic(dst); dst 937 drivers/block/null_blk_main.c void *dst, *src; dst 952 drivers/block/null_blk_main.c dst = kmap_atomic(t_page->page); dst 953 drivers/block/null_blk_main.c memcpy(dst + offset, src + off + count, temp); dst 954 drivers/block/null_blk_main.c kunmap_atomic(dst); dst 974 drivers/block/null_blk_main.c void *dst, *src; dst 983 drivers/block/null_blk_main.c dst = kmap_atomic(dest); dst 985 drivers/block/null_blk_main.c memset(dst + off + count, 0, temp); dst 989 drivers/block/null_blk_main.c memcpy(dst + off + count, src + offset, temp); dst 992 drivers/block/null_blk_main.c kunmap_atomic(dst); dst 395 drivers/block/xen-blkback/common.h static inline void blkif_get_x86_32_req(struct blkif_request *dst, dst 399 drivers/block/xen-blkback/common.h dst->operation = READ_ONCE(src->operation); dst 400 drivers/block/xen-blkback/common.h switch (dst->operation) { dst 405 drivers/block/xen-blkback/common.h dst->u.rw.nr_segments = src->u.rw.nr_segments; dst 406 drivers/block/xen-blkback/common.h dst->u.rw.handle = src->u.rw.handle; dst 407 drivers/block/xen-blkback/common.h dst->u.rw.id = src->u.rw.id; dst 408 drivers/block/xen-blkback/common.h dst->u.rw.sector_number = src->u.rw.sector_number; dst 410 drivers/block/xen-blkback/common.h if (n > dst->u.rw.nr_segments) dst 411 drivers/block/xen-blkback/common.h n = dst->u.rw.nr_segments; dst 413 drivers/block/xen-blkback/common.h dst->u.rw.seg[i] = src->u.rw.seg[i]; dst 416 drivers/block/xen-blkback/common.h dst->u.discard.flag = src->u.discard.flag; dst 417 drivers/block/xen-blkback/common.h dst->u.discard.id = src->u.discard.id; dst 418 drivers/block/xen-blkback/common.h dst->u.discard.sector_number = src->u.discard.sector_number; dst 419 drivers/block/xen-blkback/common.h dst->u.discard.nr_sectors = src->u.discard.nr_sectors; dst 422 drivers/block/xen-blkback/common.h dst->u.indirect.indirect_op = src->u.indirect.indirect_op; dst 423 drivers/block/xen-blkback/common.h dst->u.indirect.nr_segments = src->u.indirect.nr_segments; dst 424 drivers/block/xen-blkback/common.h dst->u.indirect.handle = src->u.indirect.handle; dst 425 drivers/block/xen-blkback/common.h dst->u.indirect.id = src->u.indirect.id; dst 426 drivers/block/xen-blkback/common.h dst->u.indirect.sector_number = src->u.indirect.sector_number; dst 428 drivers/block/xen-blkback/common.h j = min(MAX_INDIRECT_PAGES, INDIRECT_PAGES(dst->u.indirect.nr_segments)); dst 430 drivers/block/xen-blkback/common.h dst->u.indirect.indirect_grefs[i] = dst 438 drivers/block/xen-blkback/common.h dst->u.other.id = src->u.other.id; dst 443 drivers/block/xen-blkback/common.h static inline void blkif_get_x86_64_req(struct blkif_request *dst, dst 447 drivers/block/xen-blkback/common.h dst->operation = READ_ONCE(src->operation); dst 448 drivers/block/xen-blkback/common.h switch (dst->operation) { dst 453 drivers/block/xen-blkback/common.h dst->u.rw.nr_segments = src->u.rw.nr_segments; dst 454 drivers/block/xen-blkback/common.h dst->u.rw.handle = src->u.rw.handle; dst 455 drivers/block/xen-blkback/common.h dst->u.rw.id = src->u.rw.id; dst 456 drivers/block/xen-blkback/common.h dst->u.rw.sector_number = src->u.rw.sector_number; dst 458 drivers/block/xen-blkback/common.h if (n > dst->u.rw.nr_segments) dst 459 drivers/block/xen-blkback/common.h n = dst->u.rw.nr_segments; dst 461 drivers/block/xen-blkback/common.h dst->u.rw.seg[i] = src->u.rw.seg[i]; dst 464 drivers/block/xen-blkback/common.h dst->u.discard.flag = src->u.discard.flag; dst 465 drivers/block/xen-blkback/common.h dst->u.discard.id = src->u.discard.id; dst 466 drivers/block/xen-blkback/common.h dst->u.discard.sector_number = src->u.discard.sector_number; dst 467 drivers/block/xen-blkback/common.h dst->u.discard.nr_sectors = src->u.discard.nr_sectors; dst 470 drivers/block/xen-blkback/common.h dst->u.indirect.indirect_op = src->u.indirect.indirect_op; dst 471 drivers/block/xen-blkback/common.h dst->u.indirect.nr_segments = src->u.indirect.nr_segments; dst 472 drivers/block/xen-blkback/common.h dst->u.indirect.handle = src->u.indirect.handle; dst 473 drivers/block/xen-blkback/common.h dst->u.indirect.id = src->u.indirect.id; dst 474 drivers/block/xen-blkback/common.h dst->u.indirect.sector_number = src->u.indirect.sector_number; dst 476 drivers/block/xen-blkback/common.h j = min(MAX_INDIRECT_PAGES, INDIRECT_PAGES(dst->u.indirect.nr_segments)); dst 478 drivers/block/xen-blkback/common.h dst->u.indirect.indirect_grefs[i] = dst 486 drivers/block/xen-blkback/common.h dst->u.other.id = src->u.other.id; dst 247 drivers/block/xsysace.c u8 *dst = ace->data_ptr; dst 250 drivers/block/xsysace.c *dst++ = in_8(r++); dst 251 drivers/block/xsysace.c ace->data_ptr = dst; dst 285 drivers/block/xsysace.c u16 *dst = ace->data_ptr; dst 287 drivers/block/xsysace.c *dst++ = in_le16(ace->baseaddr + 0x40); dst 288 drivers/block/xsysace.c ace->data_ptr = dst; dst 314 drivers/block/xsysace.c u16 *dst = ace->data_ptr; dst 316 drivers/block/xsysace.c *dst++ = in_be16(ace->baseaddr + 0x40); dst 317 drivers/block/xsysace.c ace->data_ptr = dst; dst 149 drivers/block/zram/zcomp.c const void *src, unsigned int src_len, void *dst) dst 155 drivers/block/zram/zcomp.c dst, &dst_len); dst 37 drivers/block/zram/zcomp.h const void *src, unsigned int src_len, void *dst); dst 1218 drivers/block/zram/zram_drv.c void *src, *dst; dst 1251 drivers/block/zram/zram_drv.c dst = kmap_atomic(page); dst 1252 drivers/block/zram/zram_drv.c memcpy(dst, src, PAGE_SIZE); dst 1253 drivers/block/zram/zram_drv.c kunmap_atomic(dst); dst 1258 drivers/block/zram/zram_drv.c dst = kmap_atomic(page); dst 1259 drivers/block/zram/zram_drv.c ret = zcomp_decompress(zstrm, src, size, dst); dst 1260 drivers/block/zram/zram_drv.c kunmap_atomic(dst); dst 1292 drivers/block/zram/zram_drv.c void *dst = kmap_atomic(bvec->bv_page); dst 1295 drivers/block/zram/zram_drv.c memcpy(dst + bvec->bv_offset, src + offset, bvec->bv_len); dst 1297 drivers/block/zram/zram_drv.c kunmap_atomic(dst); dst 1313 drivers/block/zram/zram_drv.c void *src, *dst, *mem; dst 1383 drivers/block/zram/zram_drv.c dst = zs_map_object(zram->mem_pool, handle, ZS_MM_WO); dst 1388 drivers/block/zram/zram_drv.c memcpy(dst, src, comp_len); dst 1432 drivers/block/zram/zram_drv.c void *dst; dst 1446 drivers/block/zram/zram_drv.c dst = kmap_atomic(page); dst 1447 drivers/block/zram/zram_drv.c memcpy(dst + offset, src + bvec->bv_offset, bvec->bv_len); dst 1448 drivers/block/zram/zram_drv.c kunmap_atomic(dst); dst 53 drivers/char/hw_random/xgene-rng.c #define MAX_REFILL_CYCLES_SET(dst, src) \ dst 54 drivers/char/hw_random/xgene-rng.c ((dst & ~0xffff0000) | (((u32)src << 16) & 0xffff0000)) dst 55 drivers/char/hw_random/xgene-rng.c #define MIN_REFILL_CYCLES_SET(dst, src) \ dst 56 drivers/char/hw_random/xgene-rng.c ((dst & ~0x000000ff) | (((u32)src) & 0x000000ff)) dst 57 drivers/char/hw_random/xgene-rng.c #define ALARM_THRESHOLD_SET(dst, src) \ dst 58 drivers/char/hw_random/xgene-rng.c ((dst & ~0x000000ff) | (((u32)src) & 0x000000ff)) dst 59 drivers/char/hw_random/xgene-rng.c #define ENABLE_RNG_SET(dst, src) \ dst 60 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(10)) | (((u32)src << 10) & BIT(10))) dst 61 drivers/char/hw_random/xgene-rng.c #define REGSPEC_TEST_MODE_SET(dst, src) \ dst 62 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(8)) | (((u32)src << 8) & BIT(8))) dst 63 drivers/char/hw_random/xgene-rng.c #define MONOBIT_FAIL_MASK_SET(dst, src) \ dst 64 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(7)) | (((u32)src << 7) & BIT(7))) dst 65 drivers/char/hw_random/xgene-rng.c #define POKER_FAIL_MASK_SET(dst, src) \ dst 66 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(6)) | (((u32)src << 6) & BIT(6))) dst 67 drivers/char/hw_random/xgene-rng.c #define LONG_RUN_FAIL_MASK_SET(dst, src) \ dst 68 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(5)) | (((u32)src << 5) & BIT(5))) dst 69 drivers/char/hw_random/xgene-rng.c #define RUN_FAIL_MASK_SET(dst, src) \ dst 70 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(4)) | (((u32)src << 4) & BIT(4))) dst 71 drivers/char/hw_random/xgene-rng.c #define NOISE_FAIL_MASK_SET(dst, src) \ dst 72 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(3)) | (((u32)src << 3) & BIT(3))) dst 73 drivers/char/hw_random/xgene-rng.c #define STUCK_OUT_MASK_SET(dst, src) \ dst 74 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(2)) | (((u32)src << 2) & BIT(2))) dst 75 drivers/char/hw_random/xgene-rng.c #define SHUTDOWN_OFLO_MASK_SET(dst, src) \ dst 76 drivers/char/hw_random/xgene-rng.c ((dst & ~BIT(1)) | (((u32)src << 1) & BIT(1))) dst 170 drivers/char/ps3flash.c void *dst; dst 193 drivers/char/ps3flash.c dst = dev->bounce_buf + offset; dst 206 drivers/char/ps3flash.c __func__, __LINE__, n, userbuf, kernelbuf, dst); dst 208 drivers/char/ps3flash.c if (copy_from_user(dst, userbuf, n)) { dst 215 drivers/char/ps3flash.c memcpy(dst, kernelbuf, n); dst 3567 drivers/clk/clk.c const char *dst; dst 3575 drivers/clk/clk.c *dst_p = dst = kstrdup_const(src, GFP_KERNEL); dst 3576 drivers/clk/clk.c if (!dst) dst 516 drivers/cpufreq/s3c24xx-cpufreq.c static void s3c_cpufreq_freq_min(struct s3c_freq *dst, dst 519 drivers/cpufreq/s3c24xx-cpufreq.c dst->fclk = do_min(a->fclk, b->fclk); dst 520 drivers/cpufreq/s3c24xx-cpufreq.c dst->hclk = do_min(a->hclk, b->hclk); dst 521 drivers/cpufreq/s3c24xx-cpufreq.c dst->pclk = do_min(a->pclk, b->pclk); dst 522 drivers/cpufreq/s3c24xx-cpufreq.c dst->armclk = do_min(a->armclk, b->armclk); dst 83 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, dst 235 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, dst 250 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, dst 278 drivers/crypto/amcc/crypto4xx_alg.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 370 drivers/crypto/amcc/crypto4xx_alg.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 487 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, dst 489 drivers/crypto/amcc/crypto4xx_alg.c sa, ctx->sa_len, req->assoclen, rctx->dst); dst 625 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, dst 628 drivers/crypto/amcc/crypto4xx_alg.c ctx->sa_len, req->assoclen, rctx->dst); dst 706 drivers/crypto/amcc/crypto4xx_alg.c struct scatterlist dst; dst 709 drivers/crypto/amcc/crypto4xx_alg.c sg_init_one(&dst, req->result, ds); dst 711 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, dst 725 drivers/crypto/amcc/crypto4xx_alg.c struct scatterlist dst; dst 728 drivers/crypto/amcc/crypto4xx_alg.c sg_init_one(&dst, req->result, ds); dst 730 drivers/crypto/amcc/crypto4xx_alg.c return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, dst 449 drivers/crypto/amcc/crypto4xx_core.c struct scatterlist *dst) dst 475 drivers/crypto/amcc/crypto4xx_core.c scatterwalk_map_and_copy(buf, dst, dst_start, to_copy, 1); dst 487 drivers/crypto/amcc/crypto4xx_core.c static void crypto4xx_copy_digest_to_dst(void *dst, dst 494 drivers/crypto/amcc/crypto4xx_core.c memcpy(dst, pd_uinfo->sr_va->save_digest, dst 523 drivers/crypto/amcc/crypto4xx_core.c struct scatterlist *dst; dst 530 drivers/crypto/amcc/crypto4xx_core.c req->cryptlen, req->dst); dst 532 drivers/crypto/amcc/crypto4xx_core.c dst = pd_uinfo->dest_va; dst 533 drivers/crypto/amcc/crypto4xx_core.c addr = dma_map_page(dev->core_dev->device, sg_page(dst), dst 534 drivers/crypto/amcc/crypto4xx_core.c dst->offset, dst->length, DMA_FROM_DEVICE); dst 576 drivers/crypto/amcc/crypto4xx_core.c struct scatterlist *dst = pd_uinfo->dest_va; dst 585 drivers/crypto/amcc/crypto4xx_core.c dst); dst 587 drivers/crypto/amcc/crypto4xx_core.c dma_unmap_page(dev->core_dev->device, pd->dest, dst->length, dst 596 drivers/crypto/amcc/crypto4xx_core.c scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen, dst 681 drivers/crypto/amcc/crypto4xx_core.c struct scatterlist *dst, dst 733 drivers/crypto/amcc/crypto4xx_core.c dst = scatterwalk_ffwd(_dst, dst, assoclen); dst 737 drivers/crypto/amcc/crypto4xx_core.c if (sg_is_last(dst) && force_sd == false) { dst 814 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->dest_va = dst; dst 879 drivers/crypto/amcc/crypto4xx_core.c sg_page(dst), dst->offset, dst 880 drivers/crypto/amcc/crypto4xx_core.c min(datalen, dst->length), dst 130 drivers/crypto/amcc/crypto4xx_core.h struct scatterlist dst[2]; dst 155 drivers/crypto/amcc/crypto4xx_core.h struct scatterlist *dst, dst 193 drivers/crypto/amcc/crypto4xx_core.h static inline void crypto4xx_memcpy_swab32(u32 *dst, const void *buf, dst 197 drivers/crypto/amcc/crypto4xx_core.h *dst++ = __swab32p((u32 *) buf); dst 204 drivers/crypto/amcc/crypto4xx_core.h *dst = (tmp[2] << 16) | dst 209 drivers/crypto/amcc/crypto4xx_core.h *dst = (tmp[1] << 8) | dst 213 drivers/crypto/amcc/crypto4xx_core.h *dst = tmp[0]; dst 221 drivers/crypto/amcc/crypto4xx_core.h static inline void crypto4xx_memcpy_from_le32(u32 *dst, const void *buf, dst 224 drivers/crypto/amcc/crypto4xx_core.h crypto4xx_memcpy_swab32(dst, buf, len); dst 227 drivers/crypto/amcc/crypto4xx_core.h static inline void crypto4xx_memcpy_to_le32(__le32 *dst, const void *buf, dst 230 drivers/crypto/amcc/crypto4xx_core.h crypto4xx_memcpy_swab32((u32 *)dst, buf, len); dst 122 drivers/crypto/atmel-aes.c struct scatterlist dst[2]; dst 129 drivers/crypto/atmel-aes.c struct scatterlist dst[2]; dst 164 drivers/crypto/atmel-aes.c struct scatterlist dst[2]; dst 210 drivers/crypto/atmel-aes.c struct atmel_aes_dma dst; dst 503 drivers/crypto/atmel-aes.c scatterwalk_map_and_copy(req->info, req->dst, dst 506 drivers/crypto/atmel-aes.c if (req->src == req->dst) dst 612 drivers/crypto/atmel-aes.c struct scatterlist *dst, dst 624 drivers/crypto/atmel-aes.c dd->real_dst = dst; dst 689 drivers/crypto/atmel-aes.c struct scatterlist *dst, dst 697 drivers/crypto/atmel-aes.c dd->dst.sg = dst; dst 698 drivers/crypto/atmel-aes.c dd->real_dst = dst; dst 701 drivers/crypto/atmel-aes.c if (src == dst) dst 704 drivers/crypto/atmel-aes.c dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); dst 719 drivers/crypto/atmel-aes.c dd->dst.sg = &dd->aligned_sg; dst 720 drivers/crypto/atmel-aes.c dd->dst.nents = 1; dst 721 drivers/crypto/atmel-aes.c dd->dst.remainder = 0; dst 728 drivers/crypto/atmel-aes.c if (dd->src.sg == dd->dst.sg) { dst 731 drivers/crypto/atmel-aes.c dd->dst.sg_len = dd->src.sg_len; dst 740 drivers/crypto/atmel-aes.c dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, dst 742 drivers/crypto/atmel-aes.c if (!dd->dst.sg_len) { dst 754 drivers/crypto/atmel-aes.c if (dd->src.sg == dd->dst.sg) { dst 761 drivers/crypto/atmel-aes.c dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, dst 764 drivers/crypto/atmel-aes.c if (dd->dst.sg != &dd->aligned_sg) dst 765 drivers/crypto/atmel-aes.c atmel_aes_restore_sg(&dd->dst); dst 774 drivers/crypto/atmel-aes.c if (dd->dst.sg == &dd->aligned_sg) dst 805 drivers/crypto/atmel-aes.c dma = &dd->dst; dst 842 drivers/crypto/atmel-aes.c dma = &dd->dst; dst 854 drivers/crypto/atmel-aes.c struct scatterlist *dst, dst 889 drivers/crypto/atmel-aes.c err = atmel_aes_map(dd, src, dst, len); dst 997 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, dst 1000 drivers/crypto/atmel-aes.c return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, dst 1014 drivers/crypto/atmel-aes.c struct scatterlist *src, *dst; dst 1044 drivers/crypto/atmel-aes.c dst = ((req->src == req->dst) ? src : dst 1045 drivers/crypto/atmel-aes.c scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset)); dst 1059 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, src, dst, datalen, dst 1062 drivers/crypto/atmel-aes.c return atmel_aes_cpu_start(dd, src, dst, datalen, dst 1122 drivers/crypto/atmel-aes.c if (!(mode & AES_FLAGS_ENCRYPT) && (req->src == req->dst)) { dst 1627 drivers/crypto/atmel-aes.c struct scatterlist *src, *dst; dst 1650 drivers/crypto/atmel-aes.c dst = ((req->src == req->dst) ? src : dst 1651 drivers/crypto/atmel-aes.c scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen)); dst 1662 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, src, dst, ctx->textlen, dst 1666 drivers/crypto/atmel-aes.c return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, dst 1733 drivers/crypto/atmel-aes.c scatterwalk_map_and_copy(otag, req->dst, offset, authsize, 1); dst 1908 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, dst 1911 drivers/crypto/atmel-aes.c return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, dst 2040 drivers/crypto/atmel-aes.c struct scatterlist *src, *dst; dst 2051 drivers/crypto/atmel-aes.c dst = src; dst 2053 drivers/crypto/atmel-aes.c if (req->src != req->dst) dst 2054 drivers/crypto/atmel-aes.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); dst 2073 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, src, dst, rctx->textlen, dst 2107 drivers/crypto/atmel-aes.c scatterwalk_map_and_copy(odigest, req->dst, offs, authsize, 1); dst 2403 drivers/crypto/atmel-aes.c dd->dst.chan = dma_request_slave_channel_compat(mask, atmel_aes_filter, dst 2405 drivers/crypto/atmel-aes.c if (!dd->dst.chan) dst 2419 drivers/crypto/atmel-aes.c dma_release_channel(dd->dst.chan); dst 2731 drivers/crypto/atmel-aes.c dma_chan_name(aes_dd->dst.chan)); dst 65 drivers/crypto/atmel-ecc.c copied = sg_copy_from_buffer(req->dst, sg_nents_for_len(req->dst, n_sz), dst 169 drivers/crypto/atmel-ecc.c copied = sg_copy_from_buffer(req->dst, dst 170 drivers/crypto/atmel-ecc.c sg_nents_for_len(req->dst, nbytes), dst 621 drivers/crypto/atmel-tdes.c dd->out_sg = req->dst; dst 578 drivers/crypto/axis/artpec6_crypto.c void *dst, unsigned int len, bool eop) dst 596 drivers/crypto/axis/artpec6_crypto.c memcpy(d->shrt.data, dst, len); dst 705 drivers/crypto/axis/artpec6_crypto.c void *dst, unsigned int len, bool eop, dst 709 drivers/crypto/axis/artpec6_crypto.c return artpec6_crypto_setup_out_descr_short(common, dst, len, dst 715 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_dma_map_single(common, dst, len, dst 1000 drivers/crypto/axis/artpec6_crypto.c create_hash_pad(int oper, unsigned char *dst, u64 dgstlen, u64 bitcount) dst 1025 drivers/crypto/axis/artpec6_crypto.c memset(dst + 1, 0, pad_bytes); dst 1026 drivers/crypto/axis/artpec6_crypto.c dst[0] = 0x80; dst 1029 drivers/crypto/axis/artpec6_crypto.c memset(dst + 1 + pad_bytes, 0, 8); dst 1030 drivers/crypto/axis/artpec6_crypto.c memcpy(dst + 1 + pad_bytes + 8, &bits, 8); dst 1032 drivers/crypto/axis/artpec6_crypto.c memcpy(dst + 1 + pad_bytes, &bits, 8); dst 1117 drivers/crypto/axis/artpec6_crypto.c req->dst, req->cryptlen); dst 1162 drivers/crypto/axis/artpec6_crypto.c req->dst, req->cryptlen); dst 1209 drivers/crypto/axis/artpec6_crypto.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 1795 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->dst); dst 1978 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->dst); dst 2178 drivers/crypto/axis/artpec6_crypto.c scatterwalk_map_and_copy(cipher_req->iv, cipher_req->dst, dst 143 drivers/crypto/bcm/cipher.c mssg->spu.dst = kcalloc(rx_frag_num, sizeof(struct scatterlist), dst 145 drivers/crypto/bcm/cipher.c if (!mssg->spu.dst) dst 148 drivers/crypto/bcm/cipher.c sg = mssg->spu.dst; dst 361 drivers/crypto/bcm/cipher.c sg_copy_part_to_buf(req->dst, rctx->msg_buf.iv_ctr, dst 529 drivers/crypto/bcm/cipher.c dump_sg(req->dst, rctx->total_received, payload_len); dst 571 drivers/crypto/bcm/cipher.c mssg->spu.dst = kcalloc(rx_frag_num, sizeof(struct scatterlist), dst 573 drivers/crypto/bcm/cipher.c if (!mssg->spu.dst) dst 576 drivers/crypto/bcm/cipher.c sg = mssg->spu.dst; dst 1117 drivers/crypto/bcm/cipher.c mssg->spu.dst = kcalloc(rx_frag_num, sizeof(struct scatterlist), dst 1119 drivers/crypto/bcm/cipher.c if (!mssg->spu.dst) dst 1122 drivers/crypto/bcm/cipher.c sg = mssg->spu.dst; dst 1598 drivers/crypto/bcm/cipher.c sg_copy_part_from_buf(req->dst, rctx->msg_buf.digest, dst 1604 drivers/crypto/bcm/cipher.c dump_sg(req->dst, req->assoclen, result_len); dst 1632 drivers/crypto/bcm/cipher.c kfree(mssg->spu.dst); dst 1774 drivers/crypto/bcm/cipher.c rctx->dst_sg = req->dst; dst 2744 drivers/crypto/bcm/cipher.c if (req->dst == req->src) { dst 2753 drivers/crypto/bcm/cipher.c if (spu_sg_at_offset(req->dst, req->assoclen, &rctx->dst_sg, dst 2781 drivers/crypto/bcm/cipher.c flow_log(" dst sg: %p\n", req->dst); dst 921 drivers/crypto/caam/caamalg.c struct scatterlist *dst, int src_nents, dst 926 drivers/crypto/caam/caamalg.c if (dst != src) { dst 930 drivers/crypto/caam/caamalg.c dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); dst 946 drivers/crypto/caam/caamalg.c caam_unmap(dev, req->src, req->dst, dst 957 drivers/crypto/caam/caamalg.c caam_unmap(dev, req->src, req->dst, dst 1037 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, req->dst, dst 1077 drivers/crypto/caam/caamalg.c DUMP_PREFIX_ADDRESS, 16, 4, req->dst, dst 1124 drivers/crypto/caam/caamalg.c if (unlikely(req->src != req->dst)) { dst 1129 drivers/crypto/caam/caamalg.c dst_dma = sg_dma_address(req->dst); dst 1304 drivers/crypto/caam/caamalg.c if (likely(req->src == req->dst)) { dst 1308 drivers/crypto/caam/caamalg.c dst_dma = sg_dma_address(req->dst); dst 1336 drivers/crypto/caam/caamalg.c if (unlikely(req->dst != req->src)) { dst 1347 drivers/crypto/caam/caamalg.c dst_nents = sg_nents_for_len(req->dst, dst_len); dst 1365 drivers/crypto/caam/caamalg.c if (likely(req->src == req->dst)) { dst 1387 drivers/crypto/caam/caamalg.c mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst 1417 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, dst 1437 drivers/crypto/caam/caamalg.c sg_to_sec4_sg_last(req->dst, dst_len, dst 1701 drivers/crypto/caam/caamalg.c if (req->dst != req->src) { dst 1702 drivers/crypto/caam/caamalg.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); dst 1710 drivers/crypto/caam/caamalg.c if (likely(req->src == req->dst)) { dst 1724 drivers/crypto/caam/caamalg.c mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, dst 1752 drivers/crypto/caam/caamalg.c if (req->src == req->dst) dst 1770 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, dst 1791 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst 1803 drivers/crypto/caam/caamalg.c if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) dst 1804 drivers/crypto/caam/caamalg.c sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + dst 1821 drivers/crypto/caam/caamalg.c caam_unmap(jrdev, req->src, req->dst, src_nents, dst 870 drivers/crypto/caam/caamalg_qi.c struct scatterlist *dst, int src_nents, dst 875 drivers/crypto/caam/caamalg_qi.c if (dst != src) { dst 879 drivers/crypto/caam/caamalg_qi.c dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); dst 897 drivers/crypto/caam/caamalg_qi.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, dst 909 drivers/crypto/caam/caamalg_qi.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, dst 970 drivers/crypto/caam/caamalg_qi.c if (likely(req->src == req->dst)) { dst 1001 drivers/crypto/caam/caamalg_qi.c dst_nents = sg_nents_for_len(req->dst, dst_len); dst 1022 drivers/crypto/caam/caamalg_qi.c mapped_dst_nents = dma_map_sg(qidev, req->dst, dst 1055 drivers/crypto/caam/caamalg_qi.c else if ((req->src == req->dst) && (mapped_src_nents > 1)) dst 1067 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, dst 1082 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst 1101 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, dst 1117 drivers/crypto/caam/caamalg_qi.c sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0); dst 1123 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, dst 1139 drivers/crypto/caam/caamalg_qi.c if (req->dst == req->src) { dst 1148 drivers/crypto/caam/caamalg_qi.c dma_to_qm_sg_one(&fd_sgt[0], sg_dma_address(req->dst), out_len, dst 1228 drivers/crypto/caam/caamalg_qi.c DUMP_PREFIX_ADDRESS, 16, 4, req->dst, dst 1274 drivers/crypto/caam/caamalg_qi.c if (unlikely(req->src != req->dst)) { dst 1275 drivers/crypto/caam/caamalg_qi.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); dst 1289 drivers/crypto/caam/caamalg_qi.c mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents, dst 1316 drivers/crypto/caam/caamalg_qi.c if (req->src != req->dst) dst 1326 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, dst 1335 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, dst 1348 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, dst 1365 drivers/crypto/caam/caamalg_qi.c if (req->src != req->dst) dst 1366 drivers/crypto/caam/caamalg_qi.c sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0); dst 1375 drivers/crypto/caam/caamalg_qi.c caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, dst 1386 drivers/crypto/caam/caamalg_qi.c if (req->src == req->dst) dst 142 drivers/crypto/caam/caamalg_qi2.c struct scatterlist *dst, int src_nents, dst 147 drivers/crypto/caam/caamalg_qi2.c if (dst != src) { dst 151 drivers/crypto/caam/caamalg_qi2.c dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); dst 377 drivers/crypto/caam/caamalg_qi2.c if (unlikely(req->dst != req->src)) { dst 389 drivers/crypto/caam/caamalg_qi2.c dst_nents = sg_nents_for_len(req->dst, dst_len); dst 410 drivers/crypto/caam/caamalg_qi2.c mapped_dst_nents = dma_map_sg(dev, req->dst, dst_nents, dst 461 drivers/crypto/caam/caamalg_qi2.c else if ((req->src == req->dst) && (mapped_src_nents > 1)) dst 474 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, dst 489 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst 513 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, dst 529 drivers/crypto/caam/caamalg_qi2.c sg_to_qm_sg_last(req->dst, dst_len, sg_table + qm_sg_index, 0); dst 535 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, dst 554 drivers/crypto/caam/caamalg_qi2.c if (req->dst == req->src) { dst 573 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_addr(out_fle, sg_dma_address(req->dst)); dst 1141 drivers/crypto/caam/caamalg_qi2.c if (unlikely(req->dst != req->src)) { dst 1142 drivers/crypto/caam/caamalg_qi2.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); dst 1156 drivers/crypto/caam/caamalg_qi2.c mapped_dst_nents = dma_map_sg(dev, req->dst, dst_nents, dst 1183 drivers/crypto/caam/caamalg_qi2.c if (req->src != req->dst) dst 1193 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, dst 1202 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, dst 1215 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, dst 1229 drivers/crypto/caam/caamalg_qi2.c if (req->src != req->dst) dst 1230 drivers/crypto/caam/caamalg_qi2.c sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0); dst 1239 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, dst 1255 drivers/crypto/caam/caamalg_qi2.c if (req->src == req->dst) dst 1271 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, dst 1283 drivers/crypto/caam/caamalg_qi2.c caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, dst 1416 drivers/crypto/caam/caamalg_qi2.c DUMP_PREFIX_ADDRESS, 16, 4, req->dst, dst 1454 drivers/crypto/caam/caamalg_qi2.c DUMP_PREFIX_ADDRESS, 16, 4, req->dst, dst 48 drivers/crypto/caam/caampkc.c dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE); dst 286 drivers/crypto/caam/caampkc.c dst_nents = sg_nents_for_len(req->dst, req->dst_len); dst 312 drivers/crypto/caam/caampkc.c sgc = dma_map_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE); dst 328 drivers/crypto/caam/caampkc.c sg_to_sec4_sg_last(req->dst, req->dst_len, dst 354 drivers/crypto/caam/caampkc.c dma_unmap_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE); dst 399 drivers/crypto/caam/caampkc.c pdb->g_dma = sg_dma_address(req->dst); dst 446 drivers/crypto/caam/caampkc.c pdb->f_dma = sg_dma_address(req->dst); dst 511 drivers/crypto/caam/caampkc.c pdb->f_dma = sg_dma_address(req->dst); dst 600 drivers/crypto/caam/caampkc.c pdb->f_dma = sg_dma_address(req->dst); dst 828 drivers/crypto/caam/caampkc.c u8 *dst; dst 834 drivers/crypto/caam/caampkc.c dst = kzalloc(dstlen, GFP_DMA | GFP_KERNEL); dst 835 drivers/crypto/caam/caampkc.c if (!dst) dst 838 drivers/crypto/caam/caampkc.c memcpy(dst + (dstlen - nbytes), ptr, nbytes); dst 840 drivers/crypto/caam/caampkc.c return dst; dst 188 drivers/crypto/cavium/cpt/cptvf_algs.c update_output_data(req_info, req->dst, req->nbytes, &argcnt); dst 101 drivers/crypto/cavium/nitrox/nitrox_aead.c struct scatterlist *dst, int ivsize, int buflen) dst 103 drivers/crypto/cavium/nitrox/nitrox_aead.c int nents = sg_nents_for_len(dst, buflen); dst 120 drivers/crypto/cavium/nitrox/nitrox_aead.c nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen); dst 132 drivers/crypto/cavium/nitrox/nitrox_aead.c kfree(nkreq->dst); dst 164 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, dst 209 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; dst 240 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; dst 380 drivers/crypto/cavium/nitrox/nitrox_aead.c if (areq->src != areq->dst) { dst 381 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_init_table(rctx->dst, 3); dst 382 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_set_buf(rctx->dst, rctx->assoc, assoclen); dst 383 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen); dst 384 drivers/crypto/cavium/nitrox/nitrox_aead.c if (sg != rctx->dst + 1) dst 385 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_chain(rctx->dst, 2, sg); dst 389 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst; dst 78 drivers/crypto/cavium/nitrox/nitrox_req.h struct scatterlist *dst; dst 211 drivers/crypto/cavium/nitrox/nitrox_req.h u8 *dst; dst 240 drivers/crypto/cavium/nitrox/nitrox_req.h struct scatterlist *dst; dst 254 drivers/crypto/cavium/nitrox/nitrox_req.h struct scatterlist dst[3]; dst 624 drivers/crypto/cavium/nitrox/nitrox_req.h static inline void nitrox_creq_copy_iv(char *dst, char *src, int size) dst 626 drivers/crypto/cavium/nitrox/nitrox_req.h memcpy(dst, src, size); dst 664 drivers/crypto/cavium/nitrox/nitrox_req.h nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); dst 665 drivers/crypto/cavium/nitrox/nitrox_req.h if (!nkreq->dst) dst 675 drivers/crypto/cavium/nitrox/nitrox_req.h creq->orh = (u64 *)(nkreq->dst); dst 683 drivers/crypto/cavium/nitrox/nitrox_req.h creq->comp = (u64 *)(nkreq->dst + ORH_HLEN); dst 687 drivers/crypto/cavium/nitrox/nitrox_req.h static inline struct scatterlist *nitrox_creq_dst_sg(char *dst) dst 689 drivers/crypto/cavium/nitrox/nitrox_req.h return (struct scatterlist *)(dst + ORH_HLEN + COMP_HLEN); dst 694 drivers/crypto/cavium/nitrox/nitrox_req.h struct scatterlist *dst, int buflen) dst 700 drivers/crypto/cavium/nitrox/nitrox_req.h creq->dst = nitrox_creq_dst_sg(nkreq->dst); dst 701 drivers/crypto/cavium/nitrox/nitrox_req.h sg = creq->dst; dst 715 drivers/crypto/cavium/nitrox/nitrox_req.h sg = create_multi_sg(sg, dst, buflen); dst 191 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c nents = dma_map_sg(dev, req->dst, sg_nents(req->dst), dst 196 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c sr->out.sg = req->dst; dst 205 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c dma_unmap_sg(dev, req->dst, nents, DMA_BIDIRECTIONAL); dst 158 drivers/crypto/cavium/nitrox/nitrox_skcipher.c int nents = sg_nents(skreq->dst) + 3; dst 170 drivers/crypto/cavium/nitrox/nitrox_skcipher.c nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, dst 187 drivers/crypto/cavium/nitrox/nitrox_skcipher.c kfree(nkreq->dst); dst 123 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen, dst 131 drivers/crypto/cavium/zip/zip_crypto.c if (!zip_ctx || !src || !dst || !dlen) dst 152 drivers/crypto/cavium/zip/zip_crypto.c memcpy(dst, zip_ops->output, *dlen); dst 159 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen, dst 167 drivers/crypto/cavium/zip/zip_crypto.c if (!zip_ctx || !src || !dst || !dlen) dst 192 drivers/crypto/cavium/zip/zip_crypto.c memcpy(dst, zip_ops->output, *dlen); dst 228 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen) dst 233 drivers/crypto/cavium/zip/zip_crypto.c ret = zip_compress(src, slen, dst, dlen, zip_ctx); dst 240 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen) dst 245 drivers/crypto/cavium/zip/zip_crypto.c ret = zip_decompress(src, slen, dst, dlen, zip_ctx); dst 299 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen, void *ctx) dst 304 drivers/crypto/cavium/zip/zip_crypto.c ret = zip_compress(src, slen, dst, dlen, zip_ctx); dst 311 drivers/crypto/cavium/zip/zip_crypto.c u8 *dst, unsigned int *dlen, void *ctx) dst 316 drivers/crypto/cavium/zip/zip_crypto.c ret = zip_decompress(src, slen, dst, dlen, zip_ctx); dst 65 drivers/crypto/cavium/zip/zip_crypto.h u8 *dst, unsigned int *dlen); dst 68 drivers/crypto/cavium/zip/zip_crypto.h u8 *dst, unsigned int *dlen); dst 75 drivers/crypto/cavium/zip/zip_crypto.h u8 *dst, unsigned int *dlen, void *ctx); dst 78 drivers/crypto/cavium/zip/zip_crypto.h u8 *dst, unsigned int *dlen, void *ctx); dst 168 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.dst = NULL; dst 133 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.dst = req->dst; dst 157 drivers/crypto/ccp/ccp-crypto-aes-xts.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 181 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.dst = req->dst; dst 106 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.dst = req->dst; dst 101 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.dst = req->dst; dst 87 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.dst = req->dst; dst 163 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = ccp_addr_lo(&op->dst.u.dma); dst 165 drivers/crypto/ccp/ccp-dev-v3.c | ccp_addr_hi(&op->dst.u.dma); dst 193 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = ccp_addr_lo(&op->dst.u.dma); dst 195 drivers/crypto/ccp/ccp-dev-v3.c | ccp_addr_hi(&op->dst.u.dma); dst 246 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = ccp_addr_lo(&op->dst.u.dma); dst 248 drivers/crypto/ccp/ccp-dev-v3.c | ccp_addr_hi(&op->dst.u.dma); dst 265 drivers/crypto/ccp/ccp-dev-v3.c cr[1] = op->dst.u.dma.length - 1; dst 279 drivers/crypto/ccp/ccp-dev-v3.c if (op->dst.type == CCP_MEMTYPE_SYSTEM) { dst 280 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = ccp_addr_lo(&op->dst.u.dma); dst 282 drivers/crypto/ccp/ccp-dev-v3.c | ccp_addr_hi(&op->dst.u.dma); dst 284 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = op->dst.u.sb * CCP_SB_BYTES; dst 307 drivers/crypto/ccp/ccp-dev-v3.c cr[4] = ccp_addr_lo(&op->dst.u.dma); dst 309 drivers/crypto/ccp/ccp-dev-v3.c | ccp_addr_hi(&op->dst.u.dma); dst 310 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 311 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 353 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 354 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 437 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 438 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 479 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 480 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 496 drivers/crypto/ccp/ccp-dev-v5.c struct ccp_dma_info *daddr = &op->dst.u.dma; dst 537 drivers/crypto/ccp/ccp-dev-v5.c if (op->dst.type == CCP_MEMTYPE_SYSTEM) { dst 538 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 539 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 542 drivers/crypto/ccp/ccp-dev-v5.c u32 key_addr = op->dst.u.sb * CCP_SB_BYTES; dst 580 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_LO(&desc) = ccp_addr_lo(&op->dst.u.dma); dst 581 drivers/crypto/ccp/ccp-dev-v5.c CCP5_CMD_DST_HI(&desc) = ccp_addr_hi(&op->dst.u.dma); dst 542 drivers/crypto/ccp/ccp-dev.h struct ccp_mem dst; dst 480 drivers/crypto/ccp/ccp-dmaengine.c struct dma_chan *dma_chan, dma_addr_t dst, dma_addr_t src, size_t len, dst 490 drivers/crypto/ccp/ccp-dmaengine.c __func__, &src, &dst, len, flags); dst 493 drivers/crypto/ccp/ccp-dmaengine.c sg_dma_address(&dst_sg) = dst; dst 323 drivers/crypto/ccp/ccp-ops.c static void ccp_prepare_data(struct ccp_data *src, struct ccp_data *dst, dst 337 drivers/crypto/ccp/ccp-ops.c if (dst) { dst 338 drivers/crypto/ccp/ccp-ops.c sg_dst_len = sg_dma_len(dst->sg_wa.sg) - dst->sg_wa.sg_used; dst 375 drivers/crypto/ccp/ccp-ops.c if (dst) { dst 382 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.address = dst->dm_wa.dma.address; dst 383 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.offset = 0; dst 384 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.length = op->src.u.dma.length; dst 389 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.address = sg_dma_address(dst->sg_wa.sg); dst 390 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.offset = dst->sg_wa.sg_used; dst 391 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.length = op->src.u.dma.length; dst 396 drivers/crypto/ccp/ccp-ops.c static void ccp_process_data(struct ccp_data *src, struct ccp_data *dst, dst 401 drivers/crypto/ccp/ccp-ops.c if (dst) { dst 402 drivers/crypto/ccp/ccp-ops.c if (op->dst.u.dma.address == dst->dm_wa.dma.address) dst 403 drivers/crypto/ccp/ccp-ops.c ccp_empty_queue_buf(dst); dst 405 drivers/crypto/ccp/ccp-ops.c ccp_update_sg_workarea(&dst->sg_wa, dst 406 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.length); dst 426 drivers/crypto/ccp/ccp-ops.c op.dst.type = CCP_MEMTYPE_SYSTEM; dst 427 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = wa->dma.address; dst 428 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = wa->length; dst 433 drivers/crypto/ccp/ccp-ops.c op.dst.type = CCP_MEMTYPE_SB; dst 434 drivers/crypto/ccp/ccp-ops.c op.dst.u.sb = sb; dst 617 drivers/crypto/ccp/ccp-ops.c struct ccp_data src, dst; dst 668 drivers/crypto/ccp/ccp-ops.c p_outp = scatterwalk_ffwd(sg_outp, aes->dst, aes->aad_len); dst 769 drivers/crypto/ccp/ccp-ops.c dst = src; dst 771 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, p_outp, ilen, dst 781 drivers/crypto/ccp/ccp-ops.c ccp_prepare_data(&src, &dst, &op, AES_BLOCK_SIZE, true); dst 797 drivers/crypto/ccp/ccp-ops.c ccp_process_data(&src, &dst, &op); dst 844 drivers/crypto/ccp/ccp-ops.c op.dst.type = CCP_MEMTYPE_SYSTEM; dst 845 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = final_wa.dma.address; dst 846 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = AES_BLOCK_SIZE; dst 876 drivers/crypto/ccp/ccp-ops.c ccp_free_data(&dst, cmd_q); dst 900 drivers/crypto/ccp/ccp-ops.c struct ccp_data src, dst; dst 916 drivers/crypto/ccp/ccp-ops.c if (!aes->key || !aes->src || !aes->dst) dst 999 drivers/crypto/ccp/ccp-ops.c if (sg_virt(aes->src) == sg_virt(aes->dst)) dst 1009 drivers/crypto/ccp/ccp-ops.c dst = src; dst 1011 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, aes->dst, aes->src_len, dst 1019 drivers/crypto/ccp/ccp-ops.c ccp_prepare_data(&src, &dst, &op, AES_BLOCK_SIZE, true); dst 1037 drivers/crypto/ccp/ccp-ops.c ccp_process_data(&src, &dst, &op); dst 1058 drivers/crypto/ccp/ccp-ops.c ccp_free_data(&dst, cmd_q); dst 1077 drivers/crypto/ccp/ccp-ops.c struct ccp_data src, dst; dst 1119 drivers/crypto/ccp/ccp-ops.c if (!xts->key || !xts->iv || !xts->src || !xts->dst) dst 1209 drivers/crypto/ccp/ccp-ops.c if (sg_virt(xts->src) == sg_virt(xts->dst)) dst 1219 drivers/crypto/ccp/ccp-ops.c dst = src; dst 1221 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, xts->dst, xts->src_len, dst 1229 drivers/crypto/ccp/ccp-ops.c ccp_prepare_data(&src, &dst, &op, unit_size, true); dst 1239 drivers/crypto/ccp/ccp-ops.c ccp_process_data(&src, &dst, &op); dst 1258 drivers/crypto/ccp/ccp-ops.c ccp_free_data(&dst, cmd_q); dst 1278 drivers/crypto/ccp/ccp-ops.c struct ccp_data src, dst; dst 1300 drivers/crypto/ccp/ccp-ops.c if (!des3->key || !des3->src || !des3->dst) dst 1399 drivers/crypto/ccp/ccp-ops.c if (sg_virt(des3->src) == sg_virt(des3->dst)) dst 1409 drivers/crypto/ccp/ccp-ops.c dst = src; dst 1411 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, des3->dst, des3->src_len, dst 1419 drivers/crypto/ccp/ccp-ops.c ccp_prepare_data(&src, &dst, &op, DES3_EDE_BLOCK_SIZE, true); dst 1436 drivers/crypto/ccp/ccp-ops.c ccp_process_data(&src, &dst, &op); dst 1454 drivers/crypto/ccp/ccp-ops.c ccp_free_data(&dst, cmd_q); dst 1818 drivers/crypto/ccp/ccp-ops.c struct ccp_dm_workarea exp, src, dst; dst 1827 drivers/crypto/ccp/ccp-ops.c if (!rsa->exp || !rsa->mod || !rsa->src || !rsa->dst) dst 1907 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_dm_workarea(&dst, cmd_q, o_len, DMA_FROM_DEVICE); dst 1915 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = dst.dma.address; dst 1916 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = 0; dst 1917 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = o_len; dst 1928 drivers/crypto/ccp/ccp-ops.c ccp_reverse_get_dm_area(&dst, 0, rsa->dst, 0, rsa->mod_len); dst 1931 drivers/crypto/ccp/ccp-ops.c ccp_dm_free(&dst); dst 1951 drivers/crypto/ccp/ccp-ops.c struct ccp_data src, dst; dst 1960 drivers/crypto/ccp/ccp-ops.c if (!pt->src || !pt->dst) dst 2002 drivers/crypto/ccp/ccp-ops.c if (sg_virt(pt->src) == sg_virt(pt->dst)) dst 2012 drivers/crypto/ccp/ccp-ops.c dst = src; dst 2014 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, pt->dst, pt->src_len, dst 2027 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used = 0; dst 2029 drivers/crypto/ccp/ccp-ops.c if (!dst.sg_wa.sg || dst 2030 drivers/crypto/ccp/ccp-ops.c (dst.sg_wa.sg->length < src.sg_wa.sg->length)) { dst 2045 drivers/crypto/ccp/ccp-ops.c op.dst.type = CCP_MEMTYPE_SYSTEM; dst 2046 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = sg_dma_address(dst.sg_wa.sg); dst 2047 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = dst.sg_wa.sg_used; dst 2048 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = op.src.u.dma.length; dst 2056 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used += src.sg_wa.sg->length; dst 2057 drivers/crypto/ccp/ccp-ops.c if (dst.sg_wa.sg_used == dst.sg_wa.sg->length) { dst 2058 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg = sg_next(dst.sg_wa.sg); dst 2059 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used = 0; dst 2066 drivers/crypto/ccp/ccp-ops.c ccp_free_data(&dst, cmd_q); dst 2131 drivers/crypto/ccp/ccp-ops.c op.dst.type = CCP_MEMTYPE_SYSTEM; dst 2132 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = pt->dst_dma; dst 2133 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = 0; dst 2134 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = pt->src_len; dst 2146 drivers/crypto/ccp/ccp-ops.c struct ccp_dm_workarea src, dst; dst 2209 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_dm_workarea(&dst, cmd_q, CCP_ECC_DST_BUF_SIZE, dst 2218 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = dst.dma.address; dst 2219 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = 0; dst 2220 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = dst.length; dst 2231 drivers/crypto/ccp/ccp-ops.c (const __le16 *)(dst.address + CCP_ECC_RESULT_OFFSET)); dst 2238 drivers/crypto/ccp/ccp-ops.c ccp_reverse_get_dm_area(&dst, 0, ecc->u.mm.result, 0, dst 2242 drivers/crypto/ccp/ccp-ops.c ccp_dm_free(&dst); dst 2253 drivers/crypto/ccp/ccp-ops.c struct ccp_dm_workarea src, dst; dst 2367 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_dm_workarea(&dst, cmd_q, CCP_ECC_DST_BUF_SIZE, dst 2376 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = dst.dma.address; dst 2377 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = 0; dst 2378 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = dst.length; dst 2389 drivers/crypto/ccp/ccp-ops.c (const __le16 *)(dst.address + CCP_ECC_RESULT_OFFSET)); dst 2398 drivers/crypto/ccp/ccp-ops.c save = dst.address; dst 2401 drivers/crypto/ccp/ccp-ops.c ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.x, 0, dst 2403 drivers/crypto/ccp/ccp-ops.c dst.address += CCP_ECC_OUTPUT_SIZE; dst 2404 drivers/crypto/ccp/ccp-ops.c ccp_reverse_get_dm_area(&dst, 0, ecc->u.pm.result.y, 0, dst 2406 drivers/crypto/ccp/ccp-ops.c dst.address += CCP_ECC_OUTPUT_SIZE; dst 2409 drivers/crypto/ccp/ccp-ops.c dst.address = save; dst 2412 drivers/crypto/ccp/ccp-ops.c ccp_dm_free(&dst); dst 239 drivers/crypto/ccree/cc_aead.c sg_zero_buffer(areq->dst, sg_nents(areq->dst), dst 811 drivers/crypto/ccree/cc_aead.c mlli_addr = areq_ctx->dst.sram_addr; dst 812 drivers/crypto/ccree/cc_aead.c mlli_nents = areq_ctx->dst.mlli_nents; dst 868 drivers/crypto/ccree/cc_aead.c set_dout_mlli(&desc[idx], areq_ctx->dst.sram_addr, dst 869 drivers/crypto/ccree/cc_aead.c areq_ctx->dst.mlli_nents, NS_BIT, 0); dst 1919 drivers/crypto/ccree/cc_aead.c sg_virt(req->dst), req->dst->offset, req->cryptlen); dst 88 drivers/crypto/ccree/cc_aead.h struct cc_mlli dst; dst 371 drivers/crypto/ccree/cc_buffer_mgr.c struct scatterlist *dst) dst 392 drivers/crypto/ccree/cc_buffer_mgr.c if (src != dst) { dst 393 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, dst, req_ctx->out_nents, DMA_BIDIRECTIONAL); dst 394 drivers/crypto/ccree/cc_buffer_mgr.c dev_dbg(dev, "Unmapped req->dst=%pK\n", sg_virt(dst)); dst 401 drivers/crypto/ccree/cc_buffer_mgr.c struct scatterlist *dst, gfp_t flags) dst 441 drivers/crypto/ccree/cc_buffer_mgr.c if (src == dst) { dst 451 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_map_sg(dev, dst, nbytes, DMA_BIDIRECTIONAL, dst 463 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, &sg_data, req_ctx->out_nents, dst, dst 482 drivers/crypto/ccree/cc_buffer_mgr.c cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); dst 551 drivers/crypto/ccree/cc_buffer_mgr.c if (req->src != req->dst) { dst 553 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(req->dst)); dst 554 drivers/crypto/ccree/cc_buffer_mgr.c dma_unmap_sg(dev, req->dst, areq_ctx->dst.mapped_nents, dst 559 drivers/crypto/ccree/cc_buffer_mgr.c req->src == req->dst) { dst 710 drivers/crypto/ccree/cc_buffer_mgr.c if ((req->src == req->dst) || direct == DRV_CRYPTO_DIRECTION_DECRYPT) { dst 734 drivers/crypto/ccree/cc_buffer_mgr.c if (req->src == req->dst) { dst 780 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, dst 783 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mlli_nents); dst 808 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, dst 811 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mlli_nents); dst 818 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->dst.nents, authsize, dst 822 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]; dst 865 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl = req->dst; dst 895 drivers/crypto/ccree/cc_buffer_mgr.c if (req->src != req->dst) { dst 906 drivers/crypto/ccree/cc_buffer_mgr.c rc = cc_map_sg(dev, req->dst, size_for_map, DMA_BIDIRECTIONAL, dst 907 drivers/crypto/ccree/cc_buffer_mgr.c &areq_ctx->dst.mapped_nents, dst 914 drivers/crypto/ccree/cc_buffer_mgr.c dst_mapped_nents = cc_get_sgl_nents(dev, req->dst, size_for_map, dst 934 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.nents = dst_mapped_nents; dst 967 drivers/crypto/ccree/cc_buffer_mgr.c if (req->src == req->dst) { dst 968 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents = areq_ctx->src.mlli_nents; dst 971 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = areq_ctx->src.sram_addr; dst 981 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = dst 989 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr = dst 993 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.sram_addr + dst 994 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents * dst 998 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.mlli_nents; dst 1029 drivers/crypto/ccree/cc_buffer_mgr.c req->src == req->dst) dst 1118 drivers/crypto/ccree/cc_buffer_mgr.c (req->src == req->dst)) { dst 1193 drivers/crypto/ccree/cc_buffer_mgr.c dev_dbg(dev, "dst params mn %d\n", areq_ctx->dst.mlli_nents); dst 47 drivers/crypto/ccree/cc_buffer_mgr.h struct scatterlist *dst, gfp_t flags); dst 50 drivers/crypto/ccree/cc_buffer_mgr.h struct scatterlist *src, struct scatterlist *dst); dst 747 drivers/crypto/ccree/cc_cipher.c struct scatterlist *dst, struct scatterlist *src, dst 774 drivers/crypto/ccree/cc_cipher.c struct scatterlist *dst, struct scatterlist *src, dst 789 drivers/crypto/ccree/cc_cipher.c &sg_dma_address(dst), nbytes); dst 793 drivers/crypto/ccree/cc_cipher.c set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), dst 836 drivers/crypto/ccree/cc_cipher.c struct scatterlist *dst = req->dst; dst 844 drivers/crypto/ccree/cc_cipher.c cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); dst 859 drivers/crypto/ccree/cc_cipher.c struct scatterlist *dst = req->dst; dst 916 drivers/crypto/ccree/cc_cipher.c req_ctx->iv, src, dst, flags); dst 927 drivers/crypto/ccree/cc_cipher.c cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len); dst 933 drivers/crypto/ccree/cc_cipher.c cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len); dst 945 drivers/crypto/ccree/cc_cipher.c cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); dst 104 drivers/crypto/ccree/cc_sram_mgr.c void cc_set_sram_desc(const u32 *src, cc_sram_addr_t dst, dst 114 drivers/crypto/ccree/cc_sram_mgr.c set_dout_sram(&seq[idx], dst + (i * sizeof(u32)), sizeof(u32)); dst 61 drivers/crypto/ccree/cc_sram_mgr.h void cc_set_sram_desc(const u32 *src, cc_sram_addr_t dst, dst 642 drivers/crypto/chelsio/chcr_algo.c struct scatterlist *dst, dst 656 drivers/crypto/chelsio/chcr_algo.c if (sg_dma_len(dst) == dstskip) { dst 657 drivers/crypto/chelsio/chcr_algo.c dst = sg_next(dst); dst 661 drivers/crypto/chelsio/chcr_algo.c while (src && dst && dst 668 drivers/crypto/chelsio/chcr_algo.c while (dst && ((dstsg + 1) <= MAX_DSGL_ENT) && dst 672 drivers/crypto/chelsio/chcr_algo.c less = min_t(unsigned int, sg_dma_len(dst) - offset - dst 676 drivers/crypto/chelsio/chcr_algo.c if ((offset + dstskip) == sg_dma_len(dst)) { dst 677 drivers/crypto/chelsio/chcr_algo.c dst = sg_next(dst); dst 697 drivers/crypto/chelsio/chcr_algo.c struct scatterlist *dst, dst 708 drivers/crypto/chelsio/chcr_algo.c skcipher_request_set_crypt(subreq, src, dst, dst 1155 drivers/crypto/chelsio/chcr_algo.c req->dst, dst 1222 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->nbytes, dst 1236 drivers/crypto/chelsio/chcr_algo.c bytes = chcr_sg_ent_in_wr(req->src, req->dst, 0, dst 1270 drivers/crypto/chelsio/chcr_algo.c req->dst, dst 1278 drivers/crypto/chelsio/chcr_algo.c reqctx->dstsg = req->dst; dst 2315 drivers/crypto/chelsio/chcr_algo.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 2357 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + dst 2482 drivers/crypto/chelsio/chcr_algo.c if (req->src == req->dst) { dst 2492 drivers/crypto/chelsio/chcr_algo.c error = dma_map_sg(dev, req->dst, sg_nents(req->dst), dst 2523 drivers/crypto/chelsio/chcr_algo.c if (req->src == req->dst) { dst 2529 drivers/crypto/chelsio/chcr_algo.c dma_unmap_sg(dev, req->dst, sg_nents(req->dst), dst 2575 drivers/crypto/chelsio/chcr_algo.c dsgl_walk_add_sg(&dsgl_walk, req->dst, temp, 0); dst 2687 drivers/crypto/chelsio/chcr_algo.c if (req->src == req->dst) { dst 2697 drivers/crypto/chelsio/chcr_algo.c error = dma_map_sg(dev, req->dst, sg_nents(req->dst), dst 2714 drivers/crypto/chelsio/chcr_algo.c if (req->src == req->dst) { dst 2720 drivers/crypto/chelsio/chcr_algo.c dma_unmap_sg(dev, req->dst, sg_nents(req->dst), dst 2915 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen dst 3007 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + dst 865 drivers/crypto/chelsio/chtls/chtls_cm.c struct dst_entry *dst; dst 875 drivers/crypto/chelsio/chtls/chtls_cm.c dst = __sk_dst_get(sk); dst 884 drivers/crypto/chelsio/chtls/chtls_cm.c tp->advmss = dst_metric_advmss(dst); dst 1032 drivers/crypto/chelsio/chtls/chtls_cm.c struct dst_entry *dst; dst 1045 drivers/crypto/chelsio/chtls/chtls_cm.c dst = inet_csk_route_child_sock(lsk, newsk, oreq); dst 1046 drivers/crypto/chelsio/chtls/chtls_cm.c if (!dst) dst 1049 drivers/crypto/chelsio/chtls/chtls_cm.c n = dst_neigh_lookup(dst, &iph->saddr); dst 1077 drivers/crypto/chelsio/chtls/chtls_cm.c sk_setup_caps(newsk, dst); dst 1110 drivers/crypto/chelsio/chtls/chtls_cm.c dst_release(dst); dst 141 drivers/crypto/exynos-rng.c u8 *dst, unsigned int dlen, dst 165 drivers/crypto/exynos-rng.c memcpy_fromio(dst, rng->mem + EXYNOS_RNG_OUT_BASE, *read); dst 196 drivers/crypto/exynos-rng.c u8 *dst, unsigned int dlen) dst 209 drivers/crypto/exynos-rng.c ret = exynos_rng_get_random(rng, dst, dlen, &read); dst 214 drivers/crypto/exynos-rng.c dst += read; dst 46 drivers/crypto/geode-aes.c do_crypt(const void *src, void *dst, u32 len, u32 flags) dst 52 drivers/crypto/geode-aes.c iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG); dst 70 drivers/crypto/geode-aes.c void *dst, u32 len, u8 *iv, int mode, int dir) dst 98 drivers/crypto/geode-aes.c ret = do_crypt(src, dst, len, flags); dst 300 drivers/crypto/geode-aes.c geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr, dst 1319 drivers/crypto/hifn_795x.c struct scatterlist *src, struct scatterlist *dst, dst 1348 drivers/crypto/hifn_795x.c BUG_ON(!sg_page(dst)); dst 1349 drivers/crypto/hifn_795x.c dpage = sg_page(dst); dst 1350 drivers/crypto/hifn_795x.c doff = dst->offset; dst 1351 drivers/crypto/hifn_795x.c len = dst->length; dst 1357 drivers/crypto/hifn_795x.c dst++; dst 1407 drivers/crypto/hifn_795x.c static int ablkcipher_add(unsigned int *drestp, struct scatterlist *dst, dst 1417 drivers/crypto/hifn_795x.c copy = min3(drest, size, dst->length); dst 1426 drivers/crypto/hifn_795x.c dst++; dst 1439 drivers/crypto/hifn_795x.c struct scatterlist *dst, *t; dst 1449 drivers/crypto/hifn_795x.c dst = &req->dst[idx]; dst 1452 drivers/crypto/hifn_795x.c __func__, dst->length, dst->offset, offset, nbytes); dst 1454 drivers/crypto/hifn_795x.c if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) || dst 1455 drivers/crypto/hifn_795x.c !IS_ALIGNED(dst->length, HIFN_D_DST_DALIGN) || dst 1457 drivers/crypto/hifn_795x.c unsigned slen = min(dst->length - offset, nbytes); dst 1462 drivers/crypto/hifn_795x.c err = ablkcipher_add(&dlen, dst, slen, &nbytes); dst 1499 drivers/crypto/hifn_795x.c dst = &req->dst[idx]; dst 1501 drivers/crypto/hifn_795x.c err = ablkcipher_add(&dlen, dst, nbytes, &nbytes); dst 1511 drivers/crypto/hifn_795x.c nbytes -= min(dst->length, nbytes); dst 1529 drivers/crypto/hifn_795x.c struct scatterlist *dst; dst 1537 drivers/crypto/hifn_795x.c dst = &req->dst[idx]; dst 1538 drivers/crypto/hifn_795x.c dlen = min(dst->length, nbytes); dst 1540 drivers/crypto/hifn_795x.c if (!IS_ALIGNED(dst->offset, HIFN_D_DST_DALIGN) || dst 1566 drivers/crypto/hifn_795x.c err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req); dst 1614 drivers/crypto/hifn_795x.c struct scatterlist *dst, unsigned int size, unsigned int *nbytesp) dst 1624 drivers/crypto/hifn_795x.c copy = min3(srest, dst->length, size); dst 1626 drivers/crypto/hifn_795x.c daddr = kmap_atomic(sg_page(dst)); dst 1627 drivers/crypto/hifn_795x.c memcpy(daddr + dst->offset + offset, saddr, copy); dst 1639 drivers/crypto/hifn_795x.c dst++; dst 1670 drivers/crypto/hifn_795x.c struct scatterlist *dst, *t; dst 1675 drivers/crypto/hifn_795x.c dst = &req->dst[idx]; dst 1681 drivers/crypto/hifn_795x.c sg_page(dst), dst->length, nbytes); dst 1684 drivers/crypto/hifn_795x.c nbytes -= min(dst->length, nbytes); dst 1692 drivers/crypto/hifn_795x.c dst, nbytes, &nbytes); dst 533 drivers/crypto/hisilicon/sec/sec_algs.c if (skreq->src != skreq->dst) dst 534 drivers/crypto/hisilicon/sec/sec_algs.c dma_unmap_sg(dev, skreq->dst, sec_req->len_out, dst 722 drivers/crypto/hisilicon/sec/sec_algs.c bool split = skreq->src != skreq->dst; dst 742 drivers/crypto/hisilicon/sec/sec_algs.c sec_req->len_out = sg_nents(skreq->dst); dst 743 drivers/crypto/hisilicon/sec/sec_algs.c ret = sec_map_and_split_sg(skreq->dst, split_sizes, steps, dst 774 drivers/crypto/hisilicon/sec/sec_algs.c skreq->src != skreq->dst, dst 848 drivers/crypto/hisilicon/sec/sec_algs.c sec_unmap_sg_on_err(skreq->dst, steps, splits_out, dst 332 drivers/crypto/hisilicon/zip/zip_crypto.c hisi_acc_sg_buf_unmap(dev, acomp_req->dst, req->hw_dst); dst 391 drivers/crypto/hisilicon/zip/zip_crypto.c static int add_comp_head(struct scatterlist *dst, u8 req_type) dst 397 drivers/crypto/hisilicon/zip/zip_crypto.c ret = sg_copy_from_buffer(dst, sg_nents(dst), head, head_size); dst 474 drivers/crypto/hisilicon/zip/zip_crypto.c if (!a_req->src || !a_req->slen || !a_req->dst || !a_req->dlen) dst 483 drivers/crypto/hisilicon/zip/zip_crypto.c req->hw_dst = hisi_acc_sg_buf_map_to_hw_sgl(dev, a_req->dst, pool, dst 505 drivers/crypto/hisilicon/zip/zip_crypto.c hisi_acc_sg_buf_unmap(dev, a_req->dst, req->hw_dst); dst 520 drivers/crypto/hisilicon/zip/zip_crypto.c head_size = add_comp_head(acomp_req->dst, qp_ctx->qp->req_type); dst 491 drivers/crypto/inside-secure/safexcel_cipher.c struct scatterlist *dst, dst 524 drivers/crypto/inside-secure/safexcel_cipher.c if (src == dst) { dst 528 drivers/crypto/inside-secure/safexcel_cipher.c dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); dst 537 drivers/crypto/inside-secure/safexcel_cipher.c sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, dst 550 drivers/crypto/inside-secure/safexcel_cipher.c struct scatterlist *src, struct scatterlist *dst, dst 601 drivers/crypto/inside-secure/safexcel_cipher.c sreq->nr_dst = sg_nents_for_len(dst, totlen_dst); dst 610 drivers/crypto/inside-secure/safexcel_cipher.c if (src == dst) { dst 635 drivers/crypto/inside-secure/safexcel_cipher.c dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); dst 693 drivers/crypto/inside-secure/safexcel_cipher.c for_each_sg(dst, sg, sreq->nr_dst, i) { dst 762 drivers/crypto/inside-secure/safexcel_cipher.c if (src == dst) { dst 766 drivers/crypto/inside-secure/safexcel_cipher.c dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); dst 846 drivers/crypto/inside-secure/safexcel_cipher.c req->dst, req->cryptlen, sreq, dst 869 drivers/crypto/inside-secure/safexcel_cipher.c req->dst, dst 918 drivers/crypto/inside-secure/safexcel_cipher.c req->dst, req->cryptlen, 0, 0, input_iv, dst 941 drivers/crypto/inside-secure/safexcel_cipher.c ret = safexcel_send_req(async, ring, sreq, req->src, req->dst, dst 150 drivers/crypto/ixp4xx_crypto.c struct buffer_desc *dst; dst 155 drivers/crypto/ixp4xx_crypto.c struct buffer_desc *dst; dst 351 drivers/crypto/ixp4xx_crypto.c req->dst, decryptlen, authsize, 1); dst 373 drivers/crypto/ixp4xx_crypto.c free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); dst 384 drivers/crypto/ixp4xx_crypto.c if (req_ctx->dst) { dst 385 drivers/crypto/ixp4xx_crypto.c free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); dst 907 drivers/crypto/ixp4xx_crypto.c if (req->src != req->dst) { dst 912 drivers/crypto/ixp4xx_crypto.c req_ctx->dst = NULL; dst 913 drivers/crypto/ixp4xx_crypto.c if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook, dst 917 drivers/crypto/ixp4xx_crypto.c req_ctx->dst = dst_hook.next; dst 920 drivers/crypto/ixp4xx_crypto.c req_ctx->dst = NULL; dst 937 drivers/crypto/ixp4xx_crypto.c if (req->src != req->dst) { dst 938 drivers/crypto/ixp4xx_crypto.c free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); dst 1037 drivers/crypto/ixp4xx_crypto.c req_ctx->dst = NULL; dst 1039 drivers/crypto/ixp4xx_crypto.c if (req->src != req->dst) { dst 1045 drivers/crypto/ixp4xx_crypto.c buf = chainup_buffers(dev, req->dst, crypt->auth_len, dst 1047 drivers/crypto/ixp4xx_crypto.c req_ctx->dst = dst_hook.next; dst 1083 drivers/crypto/ixp4xx_crypto.c free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); dst 304 drivers/crypto/marvell/cesa.h __le32 dst; dst 853 drivers/crypto/marvell/cesa.h dma_addr_t dst, dma_addr_t src, u32 size, dst 35 drivers/crypto/marvell/cipher.c struct mv_cesa_sg_dma_iter dst; dst 44 drivers/crypto/marvell/cipher.c mv_cesa_sg_dma_iter_init(&iter->dst, req->dst, DMA_FROM_DEVICE); dst 51 drivers/crypto/marvell/cipher.c iter->dst.op_offset = 0; dst 61 drivers/crypto/marvell/cipher.c if (req->dst != req->src) { dst 62 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, dst 122 drivers/crypto/marvell/cipher.c len = sg_pcopy_from_buffer(req->dst, creq->dst_nents, dst 316 drivers/crypto/marvell/cipher.c if (req->src != req->dst) { dst 322 drivers/crypto/marvell/cipher.c ret = dma_map_sg(cesa_dev->dev, req->dst, creq->dst_nents, dst 363 drivers/crypto/marvell/cipher.c &iter.dst, flags); dst 383 drivers/crypto/marvell/cipher.c if (req->dst != req->src) dst 384 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, dst 389 drivers/crypto/marvell/cipher.c req->dst != req->src ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL); dst 426 drivers/crypto/marvell/cipher.c creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); dst 86 drivers/crypto/marvell/tdma.c tdma->dst = cpu_to_le32(tdma->dst + engine->sram_dma); dst 239 drivers/crypto/marvell/tdma.c tdma->dst = op_desc->src; dst 273 drivers/crypto/marvell/tdma.c tdma->dst = CESA_SA_CFG_SRAM_OFFSET; dst 280 drivers/crypto/marvell/tdma.c dma_addr_t dst, dma_addr_t src, u32 size, dst 291 drivers/crypto/marvell/tdma.c tdma->dst = dst; dst 330 drivers/crypto/marvell/tdma.c dma_addr_t dst, src; dst 335 drivers/crypto/marvell/tdma.c dst = CESA_SA_DATA_SRAM_OFFSET + sgiter->op_offset; dst 338 drivers/crypto/marvell/tdma.c dst = sg_dma_address(sgiter->sg) + sgiter->offset; dst 342 drivers/crypto/marvell/tdma.c ret = mv_cesa_dma_add_data_transfer(chain, dst, src, len, dst 131 drivers/crypto/mediatek/mtk-aes.c struct scatterlist dst[2]; dst 246 drivers/crypto/mediatek/mtk-aes.c static inline void mtk_aes_write_state_le(__le32 *dst, const u32 *src, u32 size) dst 251 drivers/crypto/mediatek/mtk-aes.c dst[i] = cpu_to_le32(src[i]); dst 254 drivers/crypto/mediatek/mtk-aes.c static inline void mtk_aes_write_state_be(__be32 *dst, const u32 *src, u32 size) dst 259 drivers/crypto/mediatek/mtk-aes.c dst[i] = cpu_to_be32(src[i]); dst 281 drivers/crypto/mediatek/mtk-aes.c struct scatterlist *ssg = aes->src.sg, *dsg = aes->dst.sg; dst 282 drivers/crypto/mediatek/mtk-aes.c u32 slen = aes->src.sg_len, dlen = aes->dst.sg_len; dst 346 drivers/crypto/mediatek/mtk-aes.c if (aes->src.sg == aes->dst.sg) { dst 353 drivers/crypto/mediatek/mtk-aes.c dma_unmap_sg(cryp->dev, aes->dst.sg, aes->dst.nents, dst 356 drivers/crypto/mediatek/mtk-aes.c if (aes->dst.sg != &aes->aligned_sg) dst 357 drivers/crypto/mediatek/mtk-aes.c mtk_aes_restore_sg(&aes->dst); dst 366 drivers/crypto/mediatek/mtk-aes.c if (aes->dst.sg == &aes->aligned_sg) dst 383 drivers/crypto/mediatek/mtk-aes.c if (aes->src.sg == aes->dst.sg) { dst 387 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg_len = aes->src.sg_len; dst 396 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg_len = dma_map_sg(cryp->dev, aes->dst.sg, dst 397 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents, DMA_FROM_DEVICE); dst 398 drivers/crypto/mediatek/mtk-aes.c if (unlikely(!aes->dst.sg_len)) { dst 464 drivers/crypto/mediatek/mtk-aes.c struct scatterlist *src, struct scatterlist *dst, dst 472 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg = dst; dst 473 drivers/crypto/mediatek/mtk-aes.c aes->real_dst = dst; dst 476 drivers/crypto/mediatek/mtk-aes.c if (src == dst) dst 479 drivers/crypto/mediatek/mtk-aes.c dst_aligned = mtk_aes_check_aligned(dst, len, &aes->dst); dst 495 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg = &aes->aligned_sg; dst 496 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents = 1; dst 497 drivers/crypto/mediatek/mtk-aes.c aes->dst.remainder = 0; dst 561 drivers/crypto/mediatek/mtk-aes.c return mtk_aes_dma(cryp, aes, req->src, req->dst, req->nbytes); dst 575 drivers/crypto/mediatek/mtk-aes.c struct scatterlist *src, *dst; dst 601 drivers/crypto/mediatek/mtk-aes.c dst = ((req->src == req->dst) ? src : dst 602 drivers/crypto/mediatek/mtk-aes.c scatterwalk_ffwd(cctx->dst, req->dst, cctx->offset)); dst 617 drivers/crypto/mediatek/mtk-aes.c return mtk_aes_dma(cryp, aes, src, dst, datalen); dst 916 drivers/crypto/mediatek/mtk-aes.c struct scatterlist *src, struct scatterlist *dst, dst 922 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg = dst; dst 923 drivers/crypto/mediatek/mtk-aes.c aes->real_dst = dst; dst 926 drivers/crypto/mediatek/mtk-aes.c if (src == dst) dst 929 drivers/crypto/mediatek/mtk-aes.c dst_aligned = mtk_aes_check_aligned(dst, len, &aes->dst); dst 943 drivers/crypto/mediatek/mtk-aes.c aes->dst.sg = &aes->aligned_sg; dst 944 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents = 1; dst 945 drivers/crypto/mediatek/mtk-aes.c aes->dst.remainder = 0; dst 974 drivers/crypto/mediatek/mtk-aes.c scatterwalk_map_and_copy(tag, req->dst, len, gctx->authsize, 1); dst 980 drivers/crypto/mediatek/mtk-aes.c return mtk_aes_gcm_dma(cryp, aes, req->src, req->dst, len); dst 153 drivers/crypto/mediatek/mtk-platform.h struct mtk_aes_dma dst; dst 282 drivers/crypto/mxs-dcp.c struct scatterlist *dst = req->dst; dst 349 drivers/crypto/mxs-dcp.c while (dst && actx->fill) { dst 351 drivers/crypto/mxs-dcp.c dst_buf = sg_virt(dst); dst 354 drivers/crypto/mxs-dcp.c rem = min(sg_dma_len(dst) - dst_off, dst 362 drivers/crypto/mxs-dcp.c if (dst_off == sg_dma_len(dst)) { dst 363 drivers/crypto/mxs-dcp.c dst = sg_next(dst); dst 435 drivers/crypto/mxs-dcp.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 890 drivers/crypto/n2_core.c ablkcipher_walk_init(walk, req->dst, req->src, req->nbytes); dst 915 drivers/crypto/n2_core.c dest_paddr = (page_to_phys(walk->dst.page) + dst 916 drivers/crypto/n2_core.c walk->dst.offset); dst 167 drivers/crypto/nx/nx-842.c u8 *src = p->in, *dst = p->out; dst 194 drivers/crypto/nx/nx-842.c dst += hdrsize; dst 197 drivers/crypto/nx/nx-842.c if ((u64)dst % c->alignment) { dst 198 drivers/crypto/nx/nx-842.c dskip = (int)(PTR_ALIGN(dst, c->alignment) - dst); dst 199 drivers/crypto/nx/nx-842.c dst += dskip; dst 206 drivers/crypto/nx/nx-842.c dst = ctx->dbounce; dst 219 drivers/crypto/nx/nx-842.c ret = ctx->driver->compress(src, slen, dst, &dlen, ctx->wmem); dst 223 drivers/crypto/nx/nx-842.c if (ret == -ENOSPC && dst != ctx->dbounce) dst 231 drivers/crypto/nx/nx-842.c if (dst == ctx->dbounce) dst 232 drivers/crypto/nx/nx-842.c memcpy(p->out + dskip, dst, dlen); dst 251 drivers/crypto/nx/nx-842.c u8 *dst, unsigned int *dlen) dst 266 drivers/crypto/nx/nx-842.c p.out = dst; dst 321 drivers/crypto/nx/nx-842.c ret = nx842_crypto_add_header(hdr, dst); dst 345 drivers/crypto/nx/nx-842.c u8 *src = p->in, *dst = p->out; dst 383 drivers/crypto/nx/nx-842.c if (dlen < required_len || (u64)dst % c->alignment) { dst 384 drivers/crypto/nx/nx-842.c dst = ctx->dbounce; dst 397 drivers/crypto/nx/nx-842.c ret = ctx->driver->decompress(src, slen, dst, &dlen, ctx->wmem); dst 405 drivers/crypto/nx/nx-842.c dst = p->out; dst 408 drivers/crypto/nx/nx-842.c dst = ctx->dbounce; dst 412 drivers/crypto/nx/nx-842.c ret = sw842_decompress(src, slen, dst, &dlen); dst 423 drivers/crypto/nx/nx-842.c if (dst == ctx->dbounce) dst 424 drivers/crypto/nx/nx-842.c memcpy(p->out, dst, dlen); dst 434 drivers/crypto/nx/nx-842.c u8 *dst, unsigned int *dlen) dst 447 drivers/crypto/nx/nx-842.c p.out = dst; dst 184 drivers/crypto/nx/nx-842.h u8 *dst, unsigned int *dlen); dst 187 drivers/crypto/nx/nx-842.h u8 *dst, unsigned int *dlen); dst 54 drivers/crypto/nx/nx-aes-cbc.c struct scatterlist *dst, dst 75 drivers/crypto/nx/nx-aes-cbc.c rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, dst 103 drivers/crypto/nx/nx-aes-cbc.c struct scatterlist *dst, dst 107 drivers/crypto/nx/nx-aes-cbc.c return cbc_aes_nx_crypt(desc, dst, src, nbytes, 1); dst 111 drivers/crypto/nx/nx-aes-cbc.c struct scatterlist *dst, dst 115 drivers/crypto/nx/nx-aes-cbc.c return cbc_aes_nx_crypt(desc, dst, src, nbytes, 0); dst 370 drivers/crypto/nx/nx-aes-ccm.c rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, dst 439 drivers/crypto/nx/nx-aes-ccm.c rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, dst 472 drivers/crypto/nx/nx-aes-ccm.c req->dst, nbytes + req->assoclen, authsize, dst 73 drivers/crypto/nx/nx-aes-ctr.c struct scatterlist *dst, dst 88 drivers/crypto/nx/nx-aes-ctr.c rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, dst 117 drivers/crypto/nx/nx-aes-ctr.c struct scatterlist *dst, dst 132 drivers/crypto/nx/nx-aes-ctr.c return ctr_aes_nx_crypt(desc, dst, src, nbytes); dst 54 drivers/crypto/nx/nx-aes-ecb.c struct scatterlist *dst, dst 75 drivers/crypto/nx/nx-aes-ecb.c rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, dst 103 drivers/crypto/nx/nx-aes-ecb.c struct scatterlist *dst, dst 107 drivers/crypto/nx/nx-aes-ecb.c return ecb_aes_nx_crypt(desc, dst, src, nbytes, 1); dst 111 drivers/crypto/nx/nx-aes-ecb.c struct scatterlist *dst, dst 115 drivers/crypto/nx/nx-aes-ecb.c return ecb_aes_nx_crypt(desc, dst, src, nbytes, 0); dst 361 drivers/crypto/nx/nx-aes-gcm.c rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, dst 400 drivers/crypto/nx/nx-aes-gcm.c req->dst, req->assoclen + nbytes, dst 157 drivers/crypto/nx/nx.c char *dst; dst 182 drivers/crypto/nx/nx.c dst = scatterwalk_map(&walk); dst 184 drivers/crypto/nx/nx.c nx_sg = nx_build_sg_list(nx_sg, dst, &n, sglen - (nx_sg - nx_dst)); dst 187 drivers/crypto/nx/nx.c scatterwalk_unmap(dst); dst 260 drivers/crypto/nx/nx.c struct scatterlist *dst, dst 282 drivers/crypto/nx/nx.c nx_outsg = nx_walk_and_build(nx_outsg, max_sg_len, dst, dst 63 drivers/crypto/omap-aes-gcm.c dd->aead_req->dst, dst 142 drivers/crypto/omap-aes-gcm.c dd->out_sg = req->dst; dst 143 drivers/crypto/omap-aes-gcm.c dd->orig_out = req->dst; dst 145 drivers/crypto/omap-aes-gcm.c dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, assoclen); dst 148 drivers/crypto/omap-aes-gcm.c if (req->src == req->dst || dd->out_sg == sg_arr) dst 322 drivers/crypto/omap-aes-gcm.c scatterwalk_map_and_copy(rctx->auth_tag, req->dst, 0, authlen, dst 433 drivers/crypto/omap-aes.c dd->out_sg = req->dst; dst 434 drivers/crypto/omap-aes.c dd->orig_out = req->dst; dst 437 drivers/crypto/omap-aes.c if (req->src == req->dst) dst 526 drivers/crypto/omap-aes.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 886 drivers/crypto/omap-aes.c u32 *src, *dst; dst 929 drivers/crypto/omap-aes.c dst = sg_virt(dd->out_sg) + _calc_walked(out); dst 932 drivers/crypto/omap-aes.c *dst = omap_aes_read(dd, AES_REG_DATA_N(dd, i)); dst 939 drivers/crypto/omap-aes.c dst = sg_virt(dd->out_sg) + dst 943 drivers/crypto/omap-aes.c dst++; dst 544 drivers/crypto/omap-des.c dd->out_sg = req->dst; dst 545 drivers/crypto/omap-des.c dd->orig_out = req->dst; dst 548 drivers/crypto/omap-des.c if (req->src == req->dst) dst 857 drivers/crypto/omap-des.c u32 *src, *dst; dst 900 drivers/crypto/omap-des.c dst = sg_virt(dd->out_sg) + _calc_walked(out); dst 903 drivers/crypto/omap-des.c *dst = omap_des_read(dd, DES_REG_DATA_N(dd, i)); dst 910 drivers/crypto/omap-des.c dst = sg_virt(dd->out_sg) + dst 914 drivers/crypto/omap-des.c dst++; dst 342 drivers/crypto/padlock-aes.c struct scatterlist *dst, struct scatterlist *src, dst 351 drivers/crypto/padlock-aes.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 355 drivers/crypto/padlock-aes.c padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, dst 368 drivers/crypto/padlock-aes.c struct scatterlist *dst, struct scatterlist *src, dst 377 drivers/crypto/padlock-aes.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 381 drivers/crypto/padlock-aes.c padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr, dst 415 drivers/crypto/padlock-aes.c struct scatterlist *dst, struct scatterlist *src, dst 424 drivers/crypto/padlock-aes.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 429 drivers/crypto/padlock-aes.c walk.dst.virt.addr, ctx->E, dst 443 drivers/crypto/padlock-aes.c struct scatterlist *dst, struct scatterlist *src, dst 452 drivers/crypto/padlock-aes.c blkcipher_walk_init(&walk, dst, src, nbytes); dst 456 drivers/crypto/padlock-aes.c padlock_xcrypt_cbc(walk.src.virt.addr, walk.dst.virt.addr, dst 65 drivers/crypto/padlock-sha.c uint32_t *dst, size_t count) dst 68 drivers/crypto/padlock-sha.c *dst++ = swab32(*src++); dst 294 drivers/crypto/padlock-sha.c u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); dst 300 drivers/crypto/padlock-sha.c memcpy(dst, (u8 *)(sctx->state), SHA1_DIGEST_SIZE); dst 311 drivers/crypto/padlock-sha.c : "+S"(src), "+D"(dst) \ dst 320 drivers/crypto/padlock-sha.c : "+S"(src), "+D"(dst) dst 328 drivers/crypto/padlock-sha.c memcpy((u8 *)(sctx->state), dst, SHA1_DIGEST_SIZE); dst 378 drivers/crypto/padlock-sha.c u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); dst 384 drivers/crypto/padlock-sha.c memcpy(dst, (u8 *)(sctx->state), SHA256_DIGEST_SIZE); dst 395 drivers/crypto/padlock-sha.c : "+S"(src), "+D"(dst) dst 404 drivers/crypto/padlock-sha.c : "+S"(src), "+D"(dst) dst 412 drivers/crypto/padlock-sha.c memcpy((u8 *)(sctx->state), dst, SHA256_DIGEST_SIZE); dst 210 drivers/crypto/picoxcell_crypto.c static inline void memcpy_toio32(u32 __iomem *dst, const void *src, dst 216 drivers/crypto/picoxcell_crypto.c writel(*src32++, dst++); dst 334 drivers/crypto/picoxcell_crypto.c if (areq->src != areq->dst) { dst 335 drivers/crypto/picoxcell_crypto.c dst_nents = sg_nents_for_len(areq->dst, total); dst 361 drivers/crypto/picoxcell_crypto.c dst_ents = dma_map_sg(engine->dev, areq->dst, dst_nents, dst 386 drivers/crypto/picoxcell_crypto.c for_each_sg(areq->dst, cur, dst_ents, i) { dst 427 drivers/crypto/picoxcell_crypto.c if (areq->src != areq->dst) { dst 429 drivers/crypto/picoxcell_crypto.c nents = sg_nents_for_len(areq->dst, total); dst 434 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->dst, nents, DMA_FROM_DEVICE); dst 541 drivers/crypto/picoxcell_crypto.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, dst 864 drivers/crypto/picoxcell_crypto.c if (ablk_req->src != ablk_req->dst) { dst 867 drivers/crypto/picoxcell_crypto.c spacc_free_ddt(req, req->dst_ddt, req->dst_addr, ablk_req->dst, dst 870 drivers/crypto/picoxcell_crypto.c spacc_free_ddt(req, req->dst_ddt, req->dst_addr, ablk_req->dst, dst 926 drivers/crypto/picoxcell_crypto.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 957 drivers/crypto/picoxcell_crypto.c if (req->src != req->dst) { dst 963 drivers/crypto/picoxcell_crypto.c dev_req->dst_ddt = spacc_sg_to_ddt(engine, req->dst, dst 968 drivers/crypto/picoxcell_crypto.c dev_req->dst_ddt = spacc_sg_to_ddt(engine, req->dst, dst 1001 drivers/crypto/picoxcell_crypto.c spacc_free_ddt(dev_req, dev_req->dst_ddt, dev_req->dst_addr, req->dst, dst 1002 drivers/crypto/picoxcell_crypto.c req->nbytes, req->src == req->dst ? dst 1005 drivers/crypto/picoxcell_crypto.c if (req->src != req->dst) dst 877 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req); dst 919 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req); dst 1067 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req); dst 1127 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req); dst 206 drivers/crypto/qat/qat_common/qat_asym_algs.c scatterwalk_map_and_copy(req->dst_align, areq->dst, 0, dst 353 drivers/crypto/qat/qat_common/qat_asym_algs.c if (sg_is_last(req->dst) && req->dst_len == ctx->p_size) { dst 355 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.dh.r = dma_map_single(dev, sg_virt(req->dst), dst 572 drivers/crypto/qat/qat_common/qat_asym_algs.c scatterwalk_map_and_copy(req->dst_align, areq->dst, 0, dst 749 drivers/crypto/qat/qat_common/qat_asym_algs.c if (sg_is_last(req->dst) && req->dst_len == ctx->key_sz) { dst 751 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.enc.c = dma_map_single(dev, sg_virt(req->dst), dst 893 drivers/crypto/qat/qat_common/qat_asym_algs.c if (sg_is_last(req->dst) && req->dst_len == ctx->key_sz) { dst 895 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->out.rsa.dec.m = dma_map_single(dev, sg_virt(req->dst), dst 29 drivers/crypto/qce/ablkcipher.c diff_dst = (req->src != req->dst) ? true : false; dst 69 drivers/crypto/qce/ablkcipher.c diff_dst = (req->src != req->dst) ? true : false; dst 75 drivers/crypto/qce/ablkcipher.c rctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes); dst 98 drivers/crypto/qce/ablkcipher.c sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); dst 229 drivers/crypto/qce/ablkcipher.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 158 drivers/crypto/qce/common.c void qce_cpu_to_be32p_array(__be32 *dst, const u8 *src, unsigned int len) dst 160 drivers/crypto/qce/common.c __be32 *d = dst; dst 172 drivers/crypto/qce/common.c static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize) dst 186 drivers/crypto/qce/common.c qce_cpu_to_be32p_array(dst, swap, QCE_AES_IV_LENGTH); dst 88 drivers/crypto/qce/common.h void qce_cpu_to_be32p_array(__be32 *dst, const u8 *src, unsigned int len); dst 282 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c dev->sg_dst = req->dst; dst 283 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c dev->dst_nents = sg_nents(req->dst); dst 347 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents, dst 488 drivers/crypto/s5p-sss.c s5p_sg_copy_buf(sg_virt(dev->sg_dst_cpy), dev->req->dst, dst 517 drivers/crypto/s5p-sss.c struct scatterlist **dst) dst 522 drivers/crypto/s5p-sss.c *dst = kmalloc(sizeof(**dst), GFP_ATOMIC); dst 523 drivers/crypto/s5p-sss.c if (!*dst) dst 529 drivers/crypto/s5p-sss.c kfree(*dst); dst 530 drivers/crypto/s5p-sss.c *dst = NULL; dst 536 drivers/crypto/s5p-sss.c sg_init_table(*dst, 1); dst 537 drivers/crypto/s5p-sss.c sg_set_buf(*dst, pages, len); dst 1906 drivers/crypto/s5p-sss.c sg = req->dst; dst 561 drivers/crypto/sahara.c req->nbytes, req->src, req->dst); dst 566 drivers/crypto/sahara.c dev->out_sg = req->dst; dst 671 drivers/crypto/sahara.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 693 drivers/crypto/sahara.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 715 drivers/crypto/sahara.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 737 drivers/crypto/sahara.c skcipher_request_set_crypt(subreq, req->src, req->dst, dst 982 drivers/crypto/stm32/stm32-cryp.c cryp->out_sg = req ? req->dst : areq->dst; dst 1069 drivers/crypto/stm32/stm32-cryp.c static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst, dst 1082 drivers/crypto/stm32/stm32-cryp.c return (u32 *)((u8 *)dst + n); dst 1103 drivers/crypto/stm32/stm32-cryp.c u32 cfg, size_bit, *dst, d32; dst 1159 drivers/crypto/stm32/stm32-cryp.c dst = sg_virt(cryp->out_sg) + _walked_out; dst 1164 drivers/crypto/stm32/stm32-cryp.c *dst = stm32_cryp_read(cryp, CRYP_DOUT); dst 1166 drivers/crypto/stm32/stm32-cryp.c dst = stm32_cryp_next_out(cryp, dst, dst 1178 drivers/crypto/stm32/stm32-cryp.c *((u8 *)dst) = *(d8++); dst 1179 drivers/crypto/stm32/stm32-cryp.c dst = stm32_cryp_next_out(cryp, dst, 1); dst 1236 drivers/crypto/stm32/stm32-cryp.c u32 d32, *dst; dst 1246 drivers/crypto/stm32/stm32-cryp.c dst = sg_virt(cryp->out_sg) + _walked_out; dst 1251 drivers/crypto/stm32/stm32-cryp.c *dst = stm32_cryp_read(cryp, CRYP_DOUT); dst 1253 drivers/crypto/stm32/stm32-cryp.c dst = stm32_cryp_next_out(cryp, dst, sizeof(u32)); dst 1264 drivers/crypto/stm32/stm32-cryp.c *((u8 *)dst) = *(d8++); dst 1265 drivers/crypto/stm32/stm32-cryp.c dst = stm32_cryp_next_out(cryp, dst, 1); dst 40 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->src || !areq->dst) { dst 60 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), dst 131 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c skcipher_request_set_crypt(subreq, areq->src, areq->dst, dst 150 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct scatterlist *out_sg = areq->dst; dst 177 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->src || !areq->dst) { dst 222 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), dst 16 drivers/crypto/sunxi-ss/sun4i-ss-prng.c unsigned int slen, u8 *dst, unsigned int dlen) dst 22 drivers/crypto/sunxi-ss/sun4i-ss-prng.c u32 *data = (u32 *)dst; dst 214 drivers/crypto/sunxi-ss/sun4i-ss.h unsigned int slen, u8 *dst, unsigned int dlen); dst 966 drivers/crypto/talitos.c struct scatterlist *dst, dst 974 drivers/crypto/talitos.c if (is_sec1 && dst && dst_nents > 1) { dst 977 drivers/crypto/talitos.c sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len, dst 980 drivers/crypto/talitos.c if (src != dst) { dst 984 drivers/crypto/talitos.c if (dst && (dst_nents == 1 || !is_sec1)) dst 985 drivers/crypto/talitos.c dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); dst 1008 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, dst 1018 drivers/crypto/talitos.c sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize, dst 1228 drivers/crypto/talitos.c (areq->src == areq->dst) ? dst 1266 drivers/crypto/talitos.c if (areq->src != areq->dst) { dst 1269 drivers/crypto/talitos.c dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE); dst 1276 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5], dst 1296 drivers/crypto/talitos.c talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6], dst 1323 drivers/crypto/talitos.c struct scatterlist *dst, dst 1347 drivers/crypto/talitos.c if (!dst || dst == src) { dst 1355 drivers/crypto/talitos.c dst_nents = dst ? src_nents : 0; dst 1366 drivers/crypto/talitos.c dst_nents = sg_nents_for_len(dst, dst_len); dst 1394 drivers/crypto/talitos.c if (is_sec1 && !dst) dst 1428 drivers/crypto/talitos.c return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, dst 1542 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0); dst 1601 drivers/crypto/talitos.c (areq->src == areq->dst) ? dst 1612 drivers/crypto/talitos.c if (areq->src != areq->dst) { dst 1615 drivers/crypto/talitos.c dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE); dst 1618 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4], dst 1648 drivers/crypto/talitos.c return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, dst 856 drivers/crypto/ux500/cryp/cryp_core.c ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen); dst 862 drivers/crypto/ux500/cryp/cryp_core.c bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written); dst 905 drivers/crypto/ux500/cryp/cryp_core.c ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes); dst 919 drivers/crypto/ux500/cryp/cryp_core.c dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset); dst 368 drivers/crypto/virtio/virtio_crypto_algs.c dst_nents = sg_nents(req->dst); dst 406 drivers/crypto/virtio/virtio_crypto_algs.c dst_len = virtio_crypto_alg_sg_nents_length(req->dst); dst 458 drivers/crypto/virtio/virtio_crypto_algs.c for (sg = req->dst; sg; sg = sg_next(sg)) dst 586 drivers/crypto/virtio/virtio_crypto_algs.c scatterwalk_map_and_copy(req->info, req->dst, dst 80 drivers/crypto/vmx/aes.c static void p8_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 85 drivers/crypto/vmx/aes.c crypto_cipher_encrypt_one(ctx->fallback, dst, src); dst 90 drivers/crypto/vmx/aes.c aes_p8_encrypt(src, dst, &ctx->enc_key); dst 97 drivers/crypto/vmx/aes.c static void p8_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) dst 102 drivers/crypto/vmx/aes.c crypto_cipher_decrypt_one(ctx->fallback, dst, src); dst 107 drivers/crypto/vmx/aes.c aes_p8_decrypt(src, dst, &ctx->dec_key); dst 94 drivers/crypto/vmx/aes_cbc.c walk.dst.virt.addr, dst 75 drivers/crypto/vmx/aes_ctr.c u8 *dst = walk->dst.virt.addr; dst 86 drivers/crypto/vmx/aes_ctr.c crypto_xor_cpy(dst, keystream, src, nbytes); dst 112 drivers/crypto/vmx/aes_ctr.c walk.dst.virt.addr, dst 119 drivers/crypto/vmx/aes_xts.c walk.dst.virt.addr, dst 124 drivers/crypto/vmx/aes_xts.c walk.dst.virt.addr, dst 311 drivers/dma-buf/dma-resv.c int dma_resv_copy_fences(struct dma_resv *dst, struct dma_resv *src) dst 317 drivers/dma-buf/dma-resv.c dma_resv_assert_held(dst); dst 368 drivers/dma-buf/dma-resv.c src_list = dma_resv_get_list(dst); dst 369 drivers/dma-buf/dma-resv.c old = dma_resv_get_excl(dst); dst 372 drivers/dma-buf/dma-resv.c write_seqcount_begin(&dst->seq); dst 374 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(dst->fence_excl, new); dst 375 drivers/dma-buf/dma-resv.c RCU_INIT_POINTER(dst->fence, dst_list); dst 376 drivers/dma-buf/dma-resv.c write_seqcount_end(&dst->seq); dst 263 drivers/dma/altera-msgdma.c dma_addr_t dst, dma_addr_t src, size_t len, dst 268 drivers/dma/altera-msgdma.c desc->write_addr_lo = lower_32_bits(dst); dst 272 drivers/dma/altera-msgdma.c desc->write_addr_hi = upper_32_bits(dst); dst 1650 drivers/dma/amba-pl08x.c static u32 pl08x_select_bus(bool ftdmac020, u8 src, u8 dst) dst 1665 drivers/dma/amba-pl08x.c if (!(dst & PL08X_AHB1) || ((dst & PL08X_AHB2) && (src & PL08X_AHB1))) dst 1667 drivers/dma/amba-pl08x.c if (!(src & PL08X_AHB1) || ((src & PL08X_AHB2) && !(dst & PL08X_AHB2))) dst 65 drivers/dma/at_hdmac.c static inline unsigned int atc_get_xfer_width(dma_addr_t src, dma_addr_t dst, dst 70 drivers/dma/at_hdmac.c if (!((src | dst | len) & 3)) dst 72 drivers/dma/at_hdmac.c else if (!((src | dst | len) & 1)) dst 850 drivers/dma/at_xdmac.c dma_addr_t src, dma_addr_t dst, dst 876 drivers/dma/at_xdmac.c dwidth = at_xdmac_align_width(chan, src | dst | chunk->size); dst 916 drivers/dma/at_xdmac.c desc->lld.mbr_da = dst; dst 653 drivers/dma/bcm-sba-raid.c dma_addr_t dst, dma_addr_t src) dst 698 drivers/dma/bcm-sba-raid.c cmdsp->data = dst + msg_offset; dst 712 drivers/dma/bcm-sba-raid.c dma_addr_t off, dma_addr_t dst, dma_addr_t src, dst 726 drivers/dma/bcm-sba-raid.c off, len, dst, src); dst 736 drivers/dma/bcm-sba-raid.c sba_prep_dma_memcpy(struct dma_chan *dchan, dma_addr_t dst, dma_addr_t src, dst 748 drivers/dma/bcm-sba-raid.c req = sba_prep_dma_memcpy_req(sba, off, dst, src, dst 772 drivers/dma/bcm-sba-raid.c dma_addr_t dst, dma_addr_t *src, u32 src_cnt) dst 837 drivers/dma/bcm-sba-raid.c cmdsp->data = dst + msg_offset; dst 851 drivers/dma/bcm-sba-raid.c dma_addr_t off, dma_addr_t dst, dma_addr_t *src, dst 865 drivers/dma/bcm-sba-raid.c off, len, dst, src, src_cnt); dst 875 drivers/dma/bcm-sba-raid.c sba_prep_dma_xor(struct dma_chan *dchan, dma_addr_t dst, dma_addr_t *src, dst 891 drivers/dma/bcm-sba-raid.c req = sba_prep_dma_xor_req(sba, off, dst, src, src_cnt, dst 1342 drivers/dma/bcm-sba-raid.c sba_prep_dma_pq(struct dma_chan *dchan, dma_addr_t *dst, dma_addr_t *src, dst 1362 drivers/dma/bcm-sba-raid.c dst_p = &dst[0]; dst 1364 drivers/dma/bcm-sba-raid.c dst_q = &dst[1]; dst 58 drivers/dma/bcm2835-dma.c uint32_t dst; dst 305 drivers/dma/bcm2835-dma.c dma_addr_t src, dma_addr_t dst, size_t buf_len, dst 342 drivers/dma/bcm2835-dma.c control_block->dst = dst; dst 364 drivers/dma/bcm2835-dma.c if (dst && (info & BCM2835_DMA_D_INC)) dst 365 drivers/dma/bcm2835-dma.c dst += control_block->length; dst 404 drivers/dma/bcm2835-dma.c cb->cb->dst = addr; dst 551 drivers/dma/bcm2835-dma.c dma = control_block->dst; dst 615 drivers/dma/bcm2835-dma.c struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 626 drivers/dma/bcm2835-dma.c if (!src || !dst || !len) dst 635 drivers/dma/bcm2835-dma.c src, dst, len, 0, GFP_KERNEL); dst 650 drivers/dma/bcm2835-dma.c dma_addr_t src = 0, dst = 0; dst 672 drivers/dma/bcm2835-dma.c dst = c->cfg.dst_addr; dst 682 drivers/dma/bcm2835-dma.c frames, src, dst, 0, 0, dst 702 drivers/dma/bcm2835-dma.c dma_addr_t src, dst; dst 742 drivers/dma/bcm2835-dma.c dst = buf_addr; dst 747 drivers/dma/bcm2835-dma.c dst = c->cfg.dst_addr; dst 769 drivers/dma/bcm2835-dma.c frames, src, dst, buf_len, dst 98 drivers/dma/coh901318.h dma_addr_t dst, u32 ctrl_chained, u32 ctrl_last); dst 150 drivers/dma/coh901318_lli.c dma_addr_t dst = destination; dst 153 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 158 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 164 drivers/dma/coh901318_lli.c dst += MAX_DMA_PACKET_SIZE; dst 169 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 183 drivers/dma/coh901318_lli.c dma_addr_t dst; dst 188 drivers/dma/coh901318_lli.c dst = dev_addr; dst 193 drivers/dma/coh901318_lli.c dst = buf; dst 213 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 220 drivers/dma/coh901318_lli.c dst += block_size; dst 225 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 242 drivers/dma/coh901318_lli.c dma_addr_t dst = 0; dst 252 drivers/dma/coh901318_lli.c dst = dev_addr; dst 276 drivers/dma/coh901318_lli.c dst = sg_dma_address(sg); dst 293 drivers/dma/coh901318_lli.c lli->dst_addr = dst; dst 296 drivers/dma/coh901318_lli.c dst += elem_size; dst 218 drivers/dma/dmatest.c struct dmatest_data dst; dst 570 drivers/dma/dmatest.c struct dmatest_data *dst; dst 594 drivers/dma/dmatest.c dst = &thread->dst; dst 598 drivers/dma/dmatest.c src->cnt = dst->cnt = 1; dst 602 drivers/dma/dmatest.c src->cnt = dst->cnt = 1; dst 607 drivers/dma/dmatest.c dst->cnt = 1; dst 613 drivers/dma/dmatest.c dst->cnt = 2; dst 627 drivers/dma/dmatest.c if ((src->cnt + dst->cnt) >= 255) { dst 629 drivers/dma/dmatest.c src->cnt + dst->cnt); dst 643 drivers/dma/dmatest.c if (dmatest_alloc_test_data(dst, buf_size, align) < 0) dst 652 drivers/dma/dmatest.c dma_pq = kcalloc(dst->cnt, sizeof(dma_addr_t), GFP_KERNEL); dst 697 drivers/dma/dmatest.c dst->off = 0; dst 700 drivers/dma/dmatest.c dst->off = dmatest_random() % (buf_size - len + 1); dst 703 drivers/dma/dmatest.c dst->off = (dst->off >> align) << align; dst 710 drivers/dma/dmatest.c dmatest_init_dsts(dst->aligned, dst->off, len, dst 717 drivers/dma/dmatest.c um = dmaengine_get_unmap_data(dev->dev, src->cnt + dst->cnt, dst 722 drivers/dma/dmatest.c src->off, dst->off, len, ret); dst 738 drivers/dma/dmatest.c src->off, dst->off, len, ret); dst 745 drivers/dma/dmatest.c for (i = 0; i < dst->cnt; i++) { dst 746 drivers/dma/dmatest.c void *buf = dst->aligned[i]; dst 755 drivers/dma/dmatest.c src->off, dst->off, len, ret); dst 763 drivers/dma/dmatest.c dsts[0] + dst->off, dst 767 drivers/dma/dmatest.c dsts[0] + dst->off, dst 772 drivers/dma/dmatest.c dsts[0] + dst->off, dst 776 drivers/dma/dmatest.c for (i = 0; i < dst->cnt; i++) dst 777 drivers/dma/dmatest.c dma_pq[i] = dsts[i] + dst->off; dst 785 drivers/dma/dmatest.c dst->off, len, ret); dst 799 drivers/dma/dmatest.c dst->off, len, ret); dst 821 drivers/dma/dmatest.c result("test timed out", total_tests, src->off, dst->off, dst 828 drivers/dma/dmatest.c dst->off, len, ret); dst 836 drivers/dma/dmatest.c dst->off, len, 0); dst 852 drivers/dma/dmatest.c error_count += dmatest_verify(dst->aligned, 0, dst->off, dst 855 drivers/dma/dmatest.c error_count += dmatest_verify(dst->aligned, dst->off, dst 856 drivers/dma/dmatest.c dst->off + len, src->off, dst 859 drivers/dma/dmatest.c error_count += dmatest_verify(dst->aligned, dst->off + len, dst 860 drivers/dma/dmatest.c buf_size, dst->off + len, dst 867 drivers/dma/dmatest.c result("data error", total_tests, src->off, dst->off, dst 872 drivers/dma/dmatest.c dst->off, len, 0); dst 891 drivers/dma/dmatest.c dmatest_free_test_data(dst); dst 186 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c dma_addr_t dst, size_t len) dst 190 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c return __ffs(src | dst | len | BIT(max_width)); dst 373 drivers/dma/fsl-edma-common.c void fsl_edma_fill_tcd(struct fsl_edma_hw_tcd *tcd, u32 src, u32 dst, dst 387 drivers/dma/fsl-edma-common.c tcd->daddr = cpu_to_le32(dst); dst 343 drivers/dma/fsl-qdma.c dma_addr_t dst, dma_addr_t src, u32 len) dst 369 drivers/dma/fsl-qdma.c qdma_desc_addr_set64(csgf_dest, dst); dst 921 drivers/dma/fsl-qdma.c fsl_qdma_prep_memcpy(struct dma_chan *chan, dma_addr_t dst, dst 932 drivers/dma/fsl-qdma.c fsl_qdma_comp_fill_memcpy(fsl_comp, dst, src, len); dst 110 drivers/dma/fsldma.c struct fsl_dma_ld_hw *hw, dma_addr_t dst) dst 116 drivers/dma/fsldma.c hw->dst_addr = CPU_TO_DMA(chan, snoop_bits | dst, 64); dst 208 drivers/dma/img-mdc-dma.c dma_addr_t src, dma_addr_t dst, size_t len) dst 221 drivers/dma/img-mdc-dma.c ldesc->write_addr = dst; dst 229 drivers/dma/img-mdc-dma.c if (IS_ALIGNED(dst, mdma->bus_width) && dst 370 drivers/dma/ioat/dma.h ioat_prep_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, dst 378 drivers/dma/ioat/dma.h ioat_prep_pqxor(struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src, dst 107 drivers/dma/ioat/prep.c dma_addr_t dst = dma_dest; dst 131 drivers/dma/ioat/prep.c hw->dst_addr = dst; dst 134 drivers/dma/ioat/prep.c dst += copy; dst 340 drivers/dma/ioat/prep.c const dma_addr_t *dst, const dma_addr_t *src, dst 410 drivers/dma/ioat/prep.c pq_set_src(descs, dst[1], offset, 1, s++); dst 412 drivers/dma/ioat/prep.c pq_set_src(descs, dst[0], offset, 0, s++); dst 413 drivers/dma/ioat/prep.c pq_set_src(descs, dst[1], offset, 1, s++); dst 414 drivers/dma/ioat/prep.c pq_set_src(descs, dst[1], offset, 0, s++); dst 417 drivers/dma/ioat/prep.c pq->p_addr = dst[0] + offset; dst 418 drivers/dma/ioat/prep.c pq->q_addr = dst[1] + offset; dst 464 drivers/dma/ioat/prep.c const dma_addr_t *dst, const dma_addr_t *src, dst 523 drivers/dma/ioat/prep.c pq16_set_src(descs, dst[1], offset, 1, s++); dst 525 drivers/dma/ioat/prep.c pq16_set_src(descs, dst[0], offset, 0, s++); dst 526 drivers/dma/ioat/prep.c pq16_set_src(descs, dst[1], offset, 1, s++); dst 527 drivers/dma/ioat/prep.c pq16_set_src(descs, dst[1], offset, 0, s++); dst 531 drivers/dma/ioat/prep.c pq->p_addr = dst[0] + offset; dst 532 drivers/dma/ioat/prep.c pq->q_addr = dst[1] + offset; dst 574 drivers/dma/ioat/prep.c ioat_prep_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, dst 585 drivers/dma/ioat/prep.c dst[0] = dst[1]; dst 587 drivers/dma/ioat/prep.c dst[1] = dst[0]; dst 603 drivers/dma/ioat/prep.c __ioat_prep_pq16_lock(chan, NULL, dst, single_source, dst 606 drivers/dma/ioat/prep.c __ioat_prep_pq_lock(chan, NULL, dst, single_source, 2, dst 611 drivers/dma/ioat/prep.c __ioat_prep_pq16_lock(chan, NULL, dst, src, src_cnt, dst 613 drivers/dma/ioat/prep.c __ioat_prep_pq_lock(chan, NULL, dst, src, src_cnt, dst 647 drivers/dma/ioat/prep.c ioat_prep_pqxor(struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src, dst 661 drivers/dma/ioat/prep.c pq[0] = dst; dst 663 drivers/dma/ioat/prep.c pq[1] = dst; /* specify valid address for disabled result */ dst 607 drivers/dma/iop-adma.c iop_adma_prep_dma_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, dst 645 drivers/dma/iop-adma.c dst[0] = dst[1] & 0x7; dst 647 drivers/dma/iop-adma.c iop_desc_set_pq_addr(g, dst); dst 657 drivers/dma/iop-adma.c iop_desc_set_pq_src_addr(g, i++, dst[1], 1); dst 659 drivers/dma/iop-adma.c iop_desc_set_pq_src_addr(g, i++, dst[0], 0); dst 660 drivers/dma/iop-adma.c iop_desc_set_pq_src_addr(g, i++, dst[1], 1); dst 661 drivers/dma/iop-adma.c iop_desc_set_pq_src_addr(g, i++, dst[1], 0); dst 448 drivers/dma/k3dma.c static void k3_dma_fill_desc(struct k3_dma_desc_sw *ds, dma_addr_t dst, dst 458 drivers/dma/k3dma.c ds->desc_hw[num].daddr = dst; dst 491 drivers/dma/k3dma.c struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 521 drivers/dma/k3dma.c k3_dma_fill_desc(ds, dst, src, copy, num++, c->ccfg); dst 524 drivers/dma/k3dma.c dst += copy; dst 540 drivers/dma/k3dma.c dma_addr_t addr, src = 0, dst = 0; dst 570 drivers/dma/k3dma.c dst = c->dev_addr; dst 573 drivers/dma/k3dma.c dst = addr; dst 576 drivers/dma/k3dma.c k3_dma_fill_desc(ds, dst, src, len, num++, c->ccfg); dst 597 drivers/dma/k3dma.c dma_addr_t addr, src = 0, dst = 0; dst 629 drivers/dma/k3dma.c dst = c->dev_addr; dst 632 drivers/dma/k3dma.c dst = addr; dst 642 drivers/dma/k3dma.c k3_dma_fill_desc(ds, dst, src, len, num++, c->ccfg | en_tc2); dst 142 drivers/dma/mic_x100_dma.c dma_addr_t dst, size_t len) dst 159 drivers/dma/mic_x100_dma.c src, dst, current_transfer_len); dst 162 drivers/dma/mic_x100_dma.c dst = dst + current_transfer_len; dst 181 drivers/dma/mic_x100_dma.c dma_addr_t dst, size_t len) dst 183 drivers/dma/mic_x100_dma.c if (len && -ENOMEM == mic_dma_prog_memcpy_desc(ch, src, dst, len)) { dst 270 drivers/dma/mic_x100_dma.c mic_dma_prep_status_lock(struct dma_chan *ch, dma_addr_t dst, u64 src_val, dst 280 drivers/dma/mic_x100_dma.c mic_dma_prep_status_desc(&mic_ch->desc_ring[mic_ch->head], src_val, dst, dst 601 drivers/dma/mpc512x_dma.c mpc_dma_prep_memcpy(struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 632 drivers/dma/mpc512x_dma.c if (IS_ALIGNED(src | dst | len, 32)) { dst 637 drivers/dma/mpc512x_dma.c } else if (!mdma->is_mpc8308 && IS_ALIGNED(src | dst | len, 16)) { dst 643 drivers/dma/mpc512x_dma.c } else if (IS_ALIGNED(src | dst | len, 4)) { dst 648 drivers/dma/mpc512x_dma.c } else if (IS_ALIGNED(src | dst | len, 2)) { dst 661 drivers/dma/mpc512x_dma.c tcd->daddr = dst; dst 501 drivers/dma/nbpfaxi.c dma_addr_t src, dma_addr_t dst, size_t size, bool last) dst 514 drivers/dma/nbpfaxi.c hwdesc->dst_addr = dst; dst 574 drivers/dma/nbpfaxi.c hwdesc->config, size, &src, &dst); dst 983 drivers/dma/nbpfaxi.c struct dma_chan *dchan, dma_addr_t dst, dma_addr_t src, dst 993 drivers/dma/nbpfaxi.c sg_dma_address(&dst_sg) = dst; dst 1000 drivers/dma/nbpfaxi.c __func__, len, &src, &dst); dst 361 drivers/dma/owl-dma.c dma_addr_t src, dma_addr_t dst, dst 410 drivers/dma/owl-dma.c hw->daddr = dst; dst 827 drivers/dma/owl-dma.c dma_addr_t dst, dma_addr_t src, dst 856 drivers/dma/owl-dma.c ret = owl_dma_cfg_lli(vchan, lli, src + offset, dst + offset, dst 887 drivers/dma/owl-dma.c dma_addr_t addr, src = 0, dst = 0; dst 915 drivers/dma/owl-dma.c dst = sconfig->dst_addr; dst 918 drivers/dma/owl-dma.c dst = addr; dst 921 drivers/dma/owl-dma.c ret = owl_dma_cfg_lli(vchan, lli, src, dst, len, dir, sconfig, dst 951 drivers/dma/owl-dma.c dma_addr_t src = 0, dst = 0; dst 971 drivers/dma/owl-dma.c dst = sconfig->dst_addr; dst 974 drivers/dma/owl-dma.c dst = buf_addr + (period_len * i); dst 977 drivers/dma/owl-dma.c ret = owl_dma_cfg_lli(vchan, lli, src, dst, period_len, dst 711 drivers/dma/pl330.c enum dmamov_dst dst, u32 val) dst 717 drivers/dma/pl330.c buf[1] = dst; dst 724 drivers/dma/pl330.c dst == SAR ? "SAR" : (dst == DAR ? "DAR" : "CCR"), val); dst 2612 drivers/dma/pl330.c dma_addr_t dst, dma_addr_t src, size_t len) dst 2615 drivers/dma/pl330.c px->dst_addr = dst; dst 2620 drivers/dma/pl330.c __pl330_prep_dma_memcpy(struct dma_pl330_chan *pch, dma_addr_t dst, dst 2641 drivers/dma/pl330.c fill_px(&desc->px, dst, src, len); dst 2673 drivers/dma/pl330.c dma_addr_t dst; dst 2719 drivers/dma/pl330.c dst = pch->fifo_dma; dst 2725 drivers/dma/pl330.c dst = dma_addr; dst 2735 drivers/dma/pl330.c fill_px(&desc->px, dst, src, period_len); dst 2755 drivers/dma/pl330.c pl330_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dst, dst 2768 drivers/dma/pl330.c desc = __pl330_prep_dma_memcpy(pch, dst, src, len); dst 2784 drivers/dma/pl330.c while ((src | dst | len) & (burst - 1)) dst 180 drivers/dma/ppc4xx/adma.c static void prep_dma_xor_dbg(int id, dma_addr_t dst, dma_addr_t *src, dst 188 drivers/dma/ppc4xx/adma.c pr_debug("dst:\n\t0x%016llx\n", dst); dst 191 drivers/dma/ppc4xx/adma.c static void prep_dma_pq_dbg(int id, dma_addr_t *dst, dma_addr_t *src, dst 201 drivers/dma/ppc4xx/adma.c pr_debug("\t0x%016llx ", dst[i]); dst 2087 drivers/dma/ppc4xx/adma.c dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, dst 2126 drivers/dma/ppc4xx/adma.c DMA_CUED_XOR_BASE, dst[0], 0); dst 2127 drivers/dma/ppc4xx/adma.c ppc440spe_desc_set_dest_addr(iter, chan, 0, dst[1], 1); dst 2150 drivers/dma/ppc4xx/adma.c DMA_CUED_XOR_HB, dst[1]); dst 2152 drivers/dma/ppc4xx/adma.c DMA_CUED_XOR_BASE, dst[0], 0); dst 2173 drivers/dma/ppc4xx/adma.c dma_addr_t *dst, dma_addr_t *src, int src_cnt, dst 2209 drivers/dma/ppc4xx/adma.c *dst, 0); dst 2237 drivers/dma/ppc4xx/adma.c *dst, 0); dst 2262 drivers/dma/ppc4xx/adma.c *dst, 0); dst 2277 drivers/dma/ppc4xx/adma.c dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, dst 2388 drivers/dma/ppc4xx/adma.c __func__, dst[0], dst[1]); dst 2389 drivers/dma/ppc4xx/adma.c ppc440spe_adma_pq_set_dest(sw_desc, dst, flags); dst 2422 drivers/dma/ppc4xx/adma.c dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, dst 2487 drivers/dma/ppc4xx/adma.c ppc440spe_adma_pq_set_dest(sw_desc, dst, flags); dst 2510 drivers/dma/ppc4xx/adma.c struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, dst 2521 drivers/dma/ppc4xx/adma.c dst, src, src_cnt)); dst 2526 drivers/dma/ppc4xx/adma.c if (src_cnt == 1 && dst[1] == src[0]) { dst 2530 drivers/dma/ppc4xx/adma.c dest[0] = dst[1]; dst 2538 drivers/dma/ppc4xx/adma.c if (src_cnt == 2 && dst[1] == src[1]) { dst 2540 drivers/dma/ppc4xx/adma.c &dst[1], src, 2, scf, len, flags); dst 2545 drivers/dma/ppc4xx/adma.c BUG_ON(!dst[0]); dst 2551 drivers/dma/ppc4xx/adma.c BUG_ON(!dst[1]); dst 2567 drivers/dma/ppc4xx/adma.c dst, dst_cnt, src, src_cnt, scf, dst 2573 drivers/dma/ppc4xx/adma.c dst, dst_cnt, src, src_cnt, scf, dst 166 drivers/dma/sh/rcar-dmac.c struct rcar_dmac_chan_slave dst; dst 868 drivers/dma/sh/rcar-dmac.c xfer_size = chan->dst.xfer_size; dst 1126 drivers/dma/sh/rcar-dmac.c dev_addr = rchan->dst.slave_addr; dst 1127 drivers/dma/sh/rcar-dmac.c dev_size = rchan->dst.xfer_size; dst 1255 drivers/dma/sh/rcar-dmac.c rchan->dst.slave_addr = cfg->dst_addr; dst 1257 drivers/dma/sh/rcar-dmac.c rchan->dst.xfer_size = cfg->dst_addr_width; dst 502 drivers/dma/sh/shdma-base.c unsigned long flags, dma_addr_t *dst, dma_addr_t *src, size_t *len, dst 520 drivers/dma/sh/shdma-base.c ops->desc_setup(schan, new, *src, *dst, ©_size); dst 533 drivers/dma/sh/shdma-base.c copy_size, *len, src, dst, &new->async_tx, dst 545 drivers/dma/sh/shdma-base.c *dst += copy_size; dst 379 drivers/dma/sh/shdmac.c dma_addr_t src, dma_addr_t dst, size_t *len) dst 388 drivers/dma/sh/shdmac.c sh_desc->hw.dar = dst; dst 720 drivers/dma/sprd-dma.c dma_addr_t src, dma_addr_t dst, u32 len, dst 781 drivers/dma/sprd-dma.c hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK; dst 783 drivers/dma/sprd-dma.c hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK; dst 846 drivers/dma/sprd-dma.c dma_addr_t src, dma_addr_t dst, u32 len, dst 860 drivers/dma/sprd-dma.c return sprd_dma_fill_desc(chan, hw, sglen, sg_index, src, dst, len, dst 926 drivers/dma/sprd-dma.c dma_addr_t src = 0, dst = 0; dst 968 drivers/dma/sprd-dma.c dst = slave_cfg->dst_addr; dst 971 drivers/dma/sprd-dma.c dst = sg_dma_address(sg); dst 976 drivers/dma/sprd-dma.c start_dst = dst; dst 987 drivers/dma/sprd-dma.c ret = sprd_dma_fill_linklist_desc(chan, sglen, i, src, dst, len, dst 310 drivers/dma/st_fdma.c struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 337 drivers/dma/st_fdma.c hw_node->daddr = dst; dst 662 drivers/dma/ste_dma40.c d40d->lli_log.dst = d40d->lli_log.src + lli_len; dst 667 drivers/dma/ste_dma40.c d40d->lli_phy.dst = d40d->lli_phy.src + lli_len; dst 696 drivers/dma/ste_dma40.c d40d->lli_log.dst = NULL; dst 698 drivers/dma/ste_dma40.c d40d->lli_phy.dst = NULL; dst 808 drivers/dma/ste_dma40.c struct d40_phy_lli *lli_dst = desc->lli_phy.dst; dst 877 drivers/dma/ste_dma40.c &lli->dst[lli_current], dst 905 drivers/dma/ste_dma40.c &lli->dst[lli_current], dst 915 drivers/dma/ste_dma40.c &lli->dst[lli_current], dst 2129 drivers/dma/ste_dma40.c desc->lli_log.dst, dst 2159 drivers/dma/ste_dma40.c desc->lli_phy.dst, dst 2160 drivers/dma/ste_dma40.c virt_to_phys(desc->lli_phy.dst), dst 2476 drivers/dma/ste_dma40.c dma_addr_t dst, dst 2487 drivers/dma/ste_dma40.c sg_dma_address(&dst_sg) = dst; dst 70 drivers/dma/ste_dma40_ll.c u32 dst = 0; dst 86 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_MST_POS); dst 87 drivers/dma/ste_dma40_ll.c dst |= D40_TYPE_TO_EVENT(cfg->dev_type); dst 90 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_PHY_TM_POS); dst 92 drivers/dma/ste_dma40_ll.c dst |= 3 << D40_SREG_CFG_PHY_TM_POS; dst 95 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_TIM_POS); dst 99 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_EIM_POS); dst 107 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_PHY_PEN_POS); dst 108 drivers/dma/ste_dma40_ll.c dst |= cfg->dst_info.psize << D40_SREG_CFG_PSIZE_POS; dst 114 drivers/dma/ste_dma40_ll.c dst |= d40_width_to_bits(cfg->dst_info.data_width) dst 120 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_PRI_POS); dst 126 drivers/dma/ste_dma40_ll.c dst |= BIT(D40_SREG_CFG_LBE_POS); dst 129 drivers/dma/ste_dma40_ll.c *dst_cfg = dst; dst 287 drivers/dma/ste_dma40_ll.c dma_addr_t dst = target ?: sg_addr; dst 297 drivers/dma/ste_dma40_ll.c lli = d40_phy_buf_to_lli(lli, dst, len, l_phys, lli_phys, dst 362 drivers/dma/ste_dma40_ll.h struct d40_phy_lli *dst; dst 392 drivers/dma/ste_dma40_ll.h struct d40_log_lli *dst; dst 148 drivers/dma/sun4i-dma.c dma_addr_t dst; dst 271 drivers/dma/sun4i-dma.c writel_relaxed(d->dst, pchan->base + SUN4I_DDMA_DST_ADDR_REG); dst 277 drivers/dma/sun4i-dma.c writel_relaxed(d->dst, pchan->base + SUN4I_NDMA_DST_ADDR_REG); dst 448 drivers/dma/sun4i-dma.c promise->dst = dest; dst 510 drivers/dma/sun4i-dma.c promise->dst = dest; dst 147 drivers/dma/sun6i-dma.c u32 dst; dst 397 drivers/dma/sun6i-dma.c lli->cfg, lli->src, lli->dst, dst 658 drivers/dma/sun6i-dma.c v_lli->dst = dest; dst 720 drivers/dma/sun6i-dma.c v_lli->dst = sconfig->dst_addr; dst 733 drivers/dma/sun6i-dma.c v_lli->dst = sg_dma_address(sg); dst 801 drivers/dma/sun6i-dma.c v_lli->dst = sconfig->dst_addr; dst 807 drivers/dma/sun6i-dma.c v_lli->dst = buf_addr + period_len * i; dst 152 drivers/dma/ti/edma.c u32 dst; dst 574 drivers/dma/ti/edma.c bool dst) dst 580 drivers/dma/ti/edma.c offs += dst ? PARM_DST : PARM_SRC; dst 806 drivers/dma/ti/edma.c edesc->pset[j].param.dst, dst 1036 drivers/dma/ti/edma.c param->dst = dst_addr; dst 1394 drivers/dma/ti/edma.c edesc->pset[i].param.dst, dst 1737 drivers/dma/ti/edma.c bool dst = edesc->direction == DMA_DEV_TO_MEM; dst 1752 drivers/dma/ti/edma.c pos = edma_get_position(echan->ecc, echan->slot[0], dst); dst 1769 drivers/dma/ti/edma.c pos = edma_get_position(echan->ecc, echan->slot[0], dst); dst 413 drivers/dma/xgene-dma.c dma_addr_t *dst, dma_addr_t *src, dst 429 drivers/dma/xgene-dma.c desc1->m3 |= cpu_to_le64(*dst); dst 448 drivers/dma/xgene-dma.c *dst += XGENE_DMA_MAX_BYTE_CNT; dst 836 drivers/dma/xgene-dma.c struct dma_chan *dchan, dma_addr_t dst, dma_addr_t *src, dst 856 drivers/dma/xgene-dma.c xgene_dma_prep_xor_desc(chan, new, &dst, src, dst 884 drivers/dma/xgene-dma.c struct dma_chan *dchan, dma_addr_t *dst, dma_addr_t *src, dst 931 drivers/dma/xgene-dma.c xgene_dma_prep_xor_desc(chan, new, &dst[0], src, dst 941 drivers/dma/xgene-dma.c xgene_dma_prep_xor_desc(chan, new, &dst[1], _src, dst 179 drivers/dma/xilinx/zynqmp_dma.c u64 dst; dst 304 drivers/dma/xilinx/zynqmp_dma.c dma_addr_t src, dma_addr_t dst, size_t len, dst 311 drivers/dma/xilinx/zynqmp_dma.c ddesc->addr = dst; dst 406 drivers/dma/zx_dma.c static void zx_dma_fill_desc(struct zx_dma_desc_sw *ds, dma_addr_t dst, dst 413 drivers/dma/zx_dma.c ds->desc_hw[num].daddr = dst; dst 507 drivers/dma/zx_dma.c struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 532 drivers/dma/zx_dma.c zx_dma_fill_desc(ds, dst, src, copy, num++, c->ccfg); dst 535 drivers/dma/zx_dma.c dst += copy; dst 553 drivers/dma/zx_dma.c dma_addr_t addr, src = 0, dst = 0; dst 584 drivers/dma/zx_dma.c dst = c->dev_addr; dst 587 drivers/dma/zx_dma.c dst = addr; dst 590 drivers/dma/zx_dma.c zx_dma_fill_desc(ds, dst, src, len, num++, c->ccfg); dst 610 drivers/dma/zx_dma.c dma_addr_t src = 0, dst = 0; dst 630 drivers/dma/zx_dma.c dst = c->dev_addr; dst 633 drivers/dma/zx_dma.c dst = dma_addr; dst 635 drivers/dma/zx_dma.c zx_dma_fill_desc(ds, dst, src, period_len, num++, dst 73 drivers/firmware/efi/earlycon.c unsigned long *dst; dst 77 drivers/firmware/efi/earlycon.c dst = efi_earlycon_map(y*len, len); dst 78 drivers/firmware/efi/earlycon.c if (!dst) dst 81 drivers/firmware/efi/earlycon.c memset(dst, 0, len); dst 82 drivers/firmware/efi/earlycon.c efi_earlycon_unmap(dst, len); dst 87 drivers/firmware/efi/earlycon.c unsigned long *dst, *src; dst 95 drivers/firmware/efi/earlycon.c dst = efi_earlycon_map(i*len, len); dst 96 drivers/firmware/efi/earlycon.c if (!dst) dst 101 drivers/firmware/efi/earlycon.c efi_earlycon_unmap(dst, len); dst 105 drivers/firmware/efi/earlycon.c memmove(dst, src, len); dst 108 drivers/firmware/efi/earlycon.c efi_earlycon_unmap(dst, len); dst 112 drivers/firmware/efi/earlycon.c static void efi_earlycon_write_char(u32 *dst, unsigned char c, unsigned int h) dst 125 drivers/firmware/efi/earlycon.c *dst = color_white; dst 127 drivers/firmware/efi/earlycon.c *dst = color_black; dst 128 drivers/firmware/efi/earlycon.c dst++; dst 138 drivers/firmware/efi/earlycon.c void *dst; dst 160 drivers/firmware/efi/earlycon.c dst = efi_earlycon_map((efi_y + h) * len, len); dst 161 drivers/firmware/efi/earlycon.c if (!dst) dst 169 drivers/firmware/efi/earlycon.c efi_earlycon_write_char(dst + x*4, *s, h); dst 174 drivers/firmware/efi/earlycon.c efi_earlycon_unmap(dst, len); dst 186 drivers/firmware/efi/efivars.c copy_out_compat(struct efi_variable *dst, struct compat_efi_variable *src) dst 188 drivers/firmware/efi/efivars.c memcpy(dst->VariableName, src->VariableName, EFI_VAR_NAME_LEN); dst 189 drivers/firmware/efi/efivars.c memcpy(dst->Data, src->Data, sizeof(src->Data)); dst 191 drivers/firmware/efi/efivars.c dst->VendorGuid = src->VendorGuid; dst 192 drivers/firmware/efi/efivars.c dst->DataSize = src->DataSize; dst 193 drivers/firmware/efi/efivars.c dst->Attributes = src->Attributes; dst 762 drivers/firmware/efi/libstub/efi-stub-helper.c static u8 *efi_utf16_to_utf8(u8 *dst, const u16 *src, int n) dst 777 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = c; dst 781 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0xc0 + (c >> 6); dst 785 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0xe0 + (c >> 12); dst 788 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0xf0 + (c >> 18); dst 789 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0x80 + ((c >> 12) & 0x3f); dst 791 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0x80 + ((c >> 6) & 0x3f); dst 793 drivers/firmware/efi/libstub/efi-stub-helper.c *dst++ = 0x80 + (c & 0x3f); dst 796 drivers/firmware/efi/libstub/efi-stub-helper.c return dst; dst 63 drivers/firmware/efi/test/efi_test.c copy_ucs2_from_user_len(efi_char16_t **dst, efi_char16_t __user *src, dst 69 drivers/firmware/efi/test/efi_test.c *dst = NULL; dst 78 drivers/firmware/efi/test/efi_test.c *dst = NULL; dst 81 drivers/firmware/efi/test/efi_test.c *dst = buf; dst 117 drivers/firmware/efi/test/efi_test.c copy_ucs2_from_user(efi_char16_t **dst, efi_char16_t __user *src) dst 127 drivers/firmware/efi/test/efi_test.c return copy_ucs2_from_user_len(dst, src, len); dst 140 drivers/firmware/efi/test/efi_test.c copy_ucs2_to_user_len(efi_char16_t __user *dst, efi_char16_t *src, size_t len) dst 145 drivers/firmware/efi/test/efi_test.c if (!access_ok(dst, 1)) dst 148 drivers/firmware/efi/test/efi_test.c return copy_to_user(dst, src, len); dst 571 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c uint8_t amdgpu_amdkfd_get_xgmi_hops_count(struct kgd_dev *dst, struct kgd_dev *src) dst 574 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c struct amdgpu_device *adev = (struct amdgpu_device *)dst; dst 180 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint8_t amdgpu_amdkfd_get_xgmi_hops_count(struct kgd_dev *dst, struct kgd_dev *src); dst 182 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h #define read_user_wptr(mmptr, wptr, dst) \ dst 187 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h valid = !get_user((dst), (wptr)); \ dst 190 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h valid = !get_user((dst), (wptr)); \ dst 1742 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c void amdgpu_atombios_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le) dst 1753 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c memcpy(dst, dst_tmp, align_num_bytes); dst 1758 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c memcpy(dst, dst_tmp, num_bytes); dst 1761 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c memcpy(dst, src, num_bytes); dst 202 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.h void amdgpu_atombios_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le); dst 275 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c void *dst) dst 290 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c amdgpu_gmc_set_pte_pde(adev, dst, t, page_base, flags); dst 68 drivers/gpu/drm/amd/amdgpu/amdgpu_gart.h void *dst); dst 191 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h uint32_t wave, uint32_t *dst, int *no_fields); dst 194 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h uint32_t size, uint32_t *dst); dst 197 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h uint32_t *dst); dst 108 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h u64 *dst, u64 *flags); dst 189 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.h #define amdgpu_gmc_get_vm_pde(adev, level, dst, flags) (adev)->gmc.gmc_funcs->get_vm_pde((adev), (level), (dst), (flags)) dst 302 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h void *dst; dst 308 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h dst = (void *)&ring->ring[occupied]; dst 316 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h memcpy(dst, src, chunk1); dst 320 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h dst = (void *)ring->ring; dst 321 drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h memcpy(dst, src, chunk2); dst 305 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct amdgpu_copy_mem *dst, dst 330 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst_mm = amdgpu_find_mm_node(dst->mem, &dst->offset); dst 331 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst_node_start = amdgpu_mm_node_addr(dst->bo, dst_mm, dst->mem) + dst 332 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst->offset; dst 333 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst_node_size = (dst_mm->size << PAGE_SHIFT) - dst->offset; dst 368 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c if (dst->mem->start == AMDGPU_BO_INVALID_OFFSET) { dst 369 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_map_buffer(dst->bo, dst->mem, dst 402 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst_node_start = amdgpu_mm_node_addr(dst->bo, ++dst_mm, dst 403 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst->mem); dst 431 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c struct amdgpu_copy_mem src, dst; dst 436 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst.bo = bo; dst 438 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst.mem = new_mem; dst 440 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dst.offset = 0; dst 442 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c r = amdgpu_ttm_copy_mem_to_mem(adev, &src, &dst, dst 93 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h struct amdgpu_copy_mem *dst, dst 1382 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c uint64_t dst, uint64_t flags) dst 1455 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c pe_start, dst, nptes, incr, dst 1459 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c dst += (uint64_t)nptes * AMDGPU_GPU_PAGE_SIZE << shift; dst 583 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src, saved; dst 586 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 589 drivers/gpu/drm/amd/amdgpu/atom.c dst += src; dst 591 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 597 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src, saved; dst 600 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 603 drivers/gpu/drm/amd/amdgpu/atom.c dst &= src; dst 605 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 644 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 646 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 649 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->cs_equal = (dst == src); dst 650 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->cs_above = (dst > src); dst 670 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 672 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 676 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->divmul[0] = dst / src; dst 677 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->divmul[1] = dst % src; dst 688 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 690 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 694 drivers/gpu/drm/amd/amdgpu/atom.c val64 = dst; dst 766 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, mask, src, saved; dst 769 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 774 drivers/gpu/drm/amd/amdgpu/atom.c dst &= mask; dst 775 drivers/gpu/drm/amd/amdgpu/atom.c dst |= src; dst 777 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 800 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 802 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 805 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->divmul[0] = dst * src; dst 812 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 814 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 817 drivers/gpu/drm/amd/amdgpu/atom.c val64 = (uint64_t)dst * (uint64_t)src; dst 830 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src, saved; dst 833 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 836 drivers/gpu/drm/amd/amdgpu/atom.c dst |= src; dst 838 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 920 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t saved, dst; dst 925 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 928 drivers/gpu/drm/amd/amdgpu/atom.c dst <<= shift; dst 930 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 936 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t saved, dst; dst 941 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 944 drivers/gpu/drm/amd/amdgpu/atom.c dst >>= shift; dst 946 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 952 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t saved, dst; dst 956 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 958 drivers/gpu/drm/amd/amdgpu/atom.c dst = saved; dst 961 drivers/gpu/drm/amd/amdgpu/atom.c dst <<= shift; dst 962 drivers/gpu/drm/amd/amdgpu/atom.c dst &= atom_arg_mask[dst_align]; dst 963 drivers/gpu/drm/amd/amdgpu/atom.c dst >>= atom_arg_shift[dst_align]; dst 965 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 971 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t saved, dst; dst 975 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 977 drivers/gpu/drm/amd/amdgpu/atom.c dst = saved; dst 980 drivers/gpu/drm/amd/amdgpu/atom.c dst >>= shift; dst 981 drivers/gpu/drm/amd/amdgpu/atom.c dst &= atom_arg_mask[dst_align]; dst 982 drivers/gpu/drm/amd/amdgpu/atom.c dst >>= atom_arg_shift[dst_align]; dst 984 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 990 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src, saved; dst 993 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 996 drivers/gpu/drm/amd/amdgpu/atom.c dst -= src; dst 998 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 1031 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src; dst 1033 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 1036 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ctx->cs_equal = ((dst & src) == 0); dst 1043 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t dst, src, saved; dst 1046 drivers/gpu/drm/amd/amdgpu/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 1049 drivers/gpu/drm/amd/amdgpu/atom.c dst ^= src; dst 1051 drivers/gpu/drm/amd/amdgpu/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 1131 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c static void gfx_v10_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields) dst 1139 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = 2; dst 1140 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_STATUS); dst 1141 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_PC_LO); dst 1142 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_PC_HI); dst 1143 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_EXEC_LO); dst 1144 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_EXEC_HI); dst 1145 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_HW_ID1); dst 1146 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_HW_ID2); dst 1147 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_INST_DW0); dst 1148 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_GPR_ALLOC); dst 1149 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_LDS_ALLOC); dst 1150 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_TRAPSTS); dst 1151 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_IB_STS); dst 1152 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_IB_STS2); dst 1153 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_IB_DBG1); dst 1154 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst[(*no_fields)++] = wave_read_ind(adev, wave, ixSQ_WAVE_M0); dst 1159 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t size, uint32_t *dst) dst 1165 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c dst); dst 1171 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t *dst) dst 1175 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c start + SQIND_WAVE_VGPRS_OFFSET, size, dst); dst 3012 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c static void gfx_v6_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields) dst 3015 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = 0; dst 3016 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_STATUS); dst 3017 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_LO); dst 3018 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_HI); dst 3019 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_LO); dst 3020 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_HI); dst 3021 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_HW_ID); dst 3022 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW0); dst 3023 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW1); dst 3024 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_GPR_ALLOC); dst 3025 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_LDS_ALLOC); dst 3026 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TRAPSTS); dst 3027 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_STS); dst 3028 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_LO); dst 3029 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_HI); dst 3030 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_LO); dst 3031 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_HI); dst 3032 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_DBG0); dst 3033 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_M0); dst 3038 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c uint32_t size, uint32_t *dst) dst 3042 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c start + SQIND_WAVE_SGPRS_OFFSET, size, dst); dst 4166 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c static void gfx_v7_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields) dst 4169 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = 0; dst 4170 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_STATUS); dst 4171 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_LO); dst 4172 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_HI); dst 4173 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_LO); dst 4174 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_HI); dst 4175 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_HW_ID); dst 4176 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW0); dst 4177 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW1); dst 4178 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_GPR_ALLOC); dst 4179 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_LDS_ALLOC); dst 4180 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TRAPSTS); dst 4181 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_STS); dst 4182 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_LO); dst 4183 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_HI); dst 4184 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_LO); dst 4185 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_HI); dst 4186 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_DBG0); dst 4187 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_M0); dst 4192 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c uint32_t size, uint32_t *dst) dst 4196 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c start + SQIND_WAVE_SGPRS_OFFSET, size, dst); dst 5266 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c static void gfx_v8_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields) dst 5269 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = 0; dst 5270 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_STATUS); dst 5271 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_LO); dst 5272 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_HI); dst 5273 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_LO); dst 5274 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_HI); dst 5275 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_HW_ID); dst 5276 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW0); dst 5277 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW1); dst 5278 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_GPR_ALLOC); dst 5279 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_LDS_ALLOC); dst 5280 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TRAPSTS); dst 5281 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_STS); dst 5282 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_LO); dst 5283 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TBA_HI); dst 5284 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_LO); dst 5285 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TMA_HI); dst 5286 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_DBG0); dst 5287 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_M0); dst 5292 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c uint32_t size, uint32_t *dst) dst 5296 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c start + SQIND_WAVE_SGPRS_OFFSET, size, dst); dst 1801 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c static void gfx_v9_0_read_wave_data(struct amdgpu_device *adev, uint32_t simd, uint32_t wave, uint32_t *dst, int *no_fields) dst 1804 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = 1; dst 1805 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_STATUS); dst 1806 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_LO); dst 1807 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_PC_HI); dst 1808 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_LO); dst 1809 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_EXEC_HI); dst 1810 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_HW_ID); dst 1811 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW0); dst 1812 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_INST_DW1); dst 1813 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_GPR_ALLOC); dst 1814 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_LDS_ALLOC); dst 1815 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_TRAPSTS); dst 1816 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_STS); dst 1817 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_IB_DBG0); dst 1818 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c dst[(*no_fields)++] = wave_read_ind(adev, simd, wave, ixSQ_WAVE_M0); dst 1823 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t size, uint32_t *dst) dst 1827 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c start + SQIND_WAVE_SGPRS_OFFSET, size, dst); dst 1833 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t *dst) dst 1837 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c start + SQIND_WAVE_VGPRS_OFFSET, size, dst); dst 612 drivers/gpu/drm/amd/amdkfd/kfd_events.c struct kfd_hsa_memory_exception_data __user *dst; dst 621 drivers/gpu/drm/amd/amdkfd/kfd_events.c dst = &data[i].memory_exception_data; dst 623 drivers/gpu/drm/amd/amdkfd/kfd_events.c if (copy_to_user(dst, src, dst 3135 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c struct rect dst = { 0 }; /* stream addressable area */ dst 3144 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.width = stream->timing.h_addressable; dst 3145 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.height = stream->timing.v_addressable; dst 3150 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (src.width * dst.height < dst 3151 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c src.height * dst.width) { dst 3153 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.width = src.width * dst 3154 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.height / src.height; dst 3157 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.height = src.height * dst 3158 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.width / src.width; dst 3161 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst = src; dst 3164 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.x = (stream->timing.h_addressable - dst.width) / 2; dst 3165 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.y = (stream->timing.v_addressable - dst.height) / 2; dst 3168 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.x += dm_state->underscan_hborder / 2; dst 3169 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.y += dm_state->underscan_vborder / 2; dst 3170 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.width -= dm_state->underscan_hborder; dst 3171 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.height -= dm_state->underscan_vborder; dst 3176 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c stream->dst = dst; dst 3179 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dst.x, dst.y, dst.width, dst.height); dst 5899 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c bundle->stream_update.dst = acrtc_state->stream->dst; dst 6325 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c stream_update.dst = dm_new_crtc_state->stream->dst; dst 7146 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c stream_update.dst = new_dm_crtc_state->stream->dst; dst 2948 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c } else if (pipe[i].stream->dst.width != 0 && dst 2949 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c pipe[i].stream->dst.height != 0 && dst 2955 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c data->h_taps[num_displays + 4] = pipe[i].stream->src.width == pipe[i].stream->dst.width ? bw_int_to_fixed(1) : bw_int_to_fixed(2); dst 2956 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c data->v_taps[num_displays + 4] = pipe[i].stream->src.height == pipe[i].stream->dst.height ? bw_int_to_fixed(1) : bw_int_to_fixed(2); dst 2957 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c data->h_scale_ratio[num_displays + 4] = bw_frc_to_fixed(pipe[i].stream->src.width, pipe[i].stream->dst.width); dst 2958 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c data->v_scale_ratio[num_displays + 4] = bw_frc_to_fixed(pipe[i].stream->src.height, pipe[i].stream->dst.height); dst 1603 drivers/gpu/drm/amd/display/dc/core/dc.c if ((stream_update->dst.height != 0) && dst 1604 drivers/gpu/drm/amd/display/dc/core/dc.c (stream_update->dst.width != 0)) dst 1809 drivers/gpu/drm/amd/display/dc/core/dc.c if (update->dst.height && update->dst.width) dst 1810 drivers/gpu/drm/amd/display/dc/core/dc.c stream->dst = update->dst; dst 666 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x; dst 669 drivers/gpu/drm/amd/display/dc/core/dc_resource.c - stream->src.x) * stream->dst.width dst 673 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.width / stream->src.width; dst 675 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.x + stream->dst.width) dst 677 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.x + stream->dst.width dst 680 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y; dst 683 drivers/gpu/drm/amd/display/dc/core/dc_resource.c - stream->src.y) * stream->dst.height dst 687 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.height / stream->src.height; dst 689 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.y + stream->dst.height) dst 691 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream->dst.y + stream->dst.height dst 720 drivers/gpu/drm/amd/display/dc/core/dc_resource.c const int out_w = stream->dst.width; dst 721 drivers/gpu/drm/amd/display/dc/core/dc_resource.c const int out_h = stream->dst.height; dst 880 drivers/gpu/drm/amd/display/dc/core/dc_resource.c recout_skip_h = data->recout.x - (stream->dst.x + (plane_state->dst_rect.x - stream->src.x) dst 881 drivers/gpu/drm/amd/display/dc/core/dc_resource.c * stream->dst.width / stream->src.width - dst 883 drivers/gpu/drm/amd/display/dc/core/dc_resource.c * stream->dst.width / stream->src.width); dst 884 drivers/gpu/drm/amd/display/dc/core/dc_resource.c recout_skip_v = data->recout.y - (stream->dst.y + (plane_state->dst_rect.y - stream->src.y) dst 885 drivers/gpu/drm/amd/display/dc/core/dc_resource.c * stream->dst.height / stream->src.height - dst 887 drivers/gpu/drm/amd/display/dc/core/dc_resource.c * stream->dst.height / stream->src.height); dst 1569 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (memcmp(&old_stream->dst, dst 1570 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &stream->dst, dst 644 drivers/gpu/drm/amd/display/dc/core/dc_stream.c stream->dst.x, dst 645 drivers/gpu/drm/amd/display/dc/core/dc_stream.c stream->dst.y, dst 646 drivers/gpu/drm/amd/display/dc/core/dc_stream.c stream->dst.width, dst 647 drivers/gpu/drm/amd/display/dc/core/dc_stream.c stream->dst.height, dst 132 drivers/gpu/drm/amd/display/dc/dc_stream.h struct rect dst; /* stream addressable area */ dst 221 drivers/gpu/drm/amd/display/dc/dc_stream.h struct rect dst; dst 2768 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c display_pipe_dest_params_st *dst = &pipes[pipe_idx_unsplit].pipe.dest; dst 2770 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vstartup_start = context->bw_ctx.dml.vba.VStartup[pipe_idx_unsplit]; dst 2771 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vupdate_offset = context->bw_ctx.dml.vba.VUpdateOffsetPix[pipe_idx_unsplit]; dst 2772 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vupdate_width = context->bw_ctx.dml.vba.VUpdateWidthPix[pipe_idx_unsplit]; dst 2773 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vready_offset = context->bw_ctx.dml.vba.VReadyOffsetPix[pipe_idx_unsplit]; dst 778 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c const display_pipe_dest_params_st *dst = &e2e_pipe_param[pipe_idx].pipe.dest; dst 788 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c unsigned int htotal = dst->htotal; dst 790 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c unsigned int hblank_end = dst->hblank_end; dst 791 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c unsigned int vblank_start = dst->vblank_start; dst 792 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c unsigned int vblank_end = dst->vblank_end; dst 798 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c double pclk_freq_in_mhz = dst->pixel_rate_mhz; dst 799 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c bool interlaced = dst->interlaced; dst 1000 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c vupdate_offset = dst->vupdate_offset; dst 1001 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c vupdate_width = dst->vupdate_width; dst 1002 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c vready_offset = dst->vready_offset; dst 1024 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c vstartup_start = dst->vstartup_start; dst 1199 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c if (dst->full_recout_width == 0 && !dst->odm_combine) { dst 1202 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c full_recout_width = dst->recout_width * 2; // assume half split for dcn1 dst 1204 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c full_recout_width = dst->full_recout_width; dst 1206 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c full_recout_width = dst->recout_width; dst 1212 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1214 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1223 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1225 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1248 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1250 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1259 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1261 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1287 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1289 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1297 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1299 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1319 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1321 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 1329 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->odm_combine, dst 1331 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c dst->hactive, dst 778 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c const display_pipe_dest_params_st *dst = &e2e_pipe_param[pipe_idx].pipe.dest; dst 788 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c unsigned int htotal = dst->htotal; dst 790 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c unsigned int hblank_end = dst->hblank_end; dst 791 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c unsigned int vblank_start = dst->vblank_start; dst 792 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c unsigned int vblank_end = dst->vblank_end; dst 798 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c double pclk_freq_in_mhz = dst->pixel_rate_mhz; dst 799 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c bool interlaced = dst->interlaced; dst 1000 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c vupdate_offset = dst->vupdate_offset; dst 1001 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c vupdate_width = dst->vupdate_width; dst 1002 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c vready_offset = dst->vready_offset; dst 1024 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c vstartup_start = dst->vstartup_start; dst 1199 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c if (dst->full_recout_width == 0 && !dst->odm_combine) { dst 1202 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c full_recout_width = dst->recout_width * 2; // assume half split for dcn1 dst 1204 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c full_recout_width = dst->full_recout_width; dst 1206 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c full_recout_width = dst->recout_width; dst 1212 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1214 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1223 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1225 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1248 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1250 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1259 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1261 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1287 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1289 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1297 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1299 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1319 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1321 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 1329 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->odm_combine, dst 1331 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c dst->hactive, dst 825 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c const display_pipe_dest_params_st *dst = &e2e_pipe_param[pipe_idx].pipe.dest; dst 835 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c unsigned int htotal = dst->htotal; dst 837 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c unsigned int hblank_end = dst->hblank_end; dst 838 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c unsigned int vblank_start = dst->vblank_start; dst 839 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c unsigned int vblank_end = dst->vblank_end; dst 845 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c double pclk_freq_in_mhz = dst->pixel_rate_mhz; dst 846 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c bool interlaced = dst->interlaced; dst 1040 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c vupdate_offset = dst->vupdate_offset; dst 1041 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c vupdate_width = dst->vupdate_width; dst 1042 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c vready_offset = dst->vready_offset; dst 1064 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c vstartup_start = dst->vstartup_start; dst 1251 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c if (dst->full_recout_width == 0 && !dst->odm_combine) { dst 1255 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c full_recout_width = dst->recout_width * 2; // assume half split for dcn1 dst 1257 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c full_recout_width = dst->full_recout_width; dst 1259 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c full_recout_width = dst->recout_width; dst 1266 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1268 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1278 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1280 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1305 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1307 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1317 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1319 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1348 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1350 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1359 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1361 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1384 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1386 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 1395 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->odm_combine, dst 1397 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c dst->hactive, dst 366 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c display_pipe_dest_params_st *dst = &pipes[j].pipe.dest; dst 406 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.Interlace[mode_lib->vba.NumberOfActivePlanes] = dst->interlaced; dst 407 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c if (dst->interlaced && !ip->ptoi_supported) { dst 415 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.HTotal[mode_lib->vba.NumberOfActivePlanes] = dst->htotal; dst 416 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.VTotal[mode_lib->vba.NumberOfActivePlanes] = dst->vtotal; dst 427 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.HActive[mode_lib->vba.NumberOfActivePlanes] = dst->hactive; dst 428 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.VActive[mode_lib->vba.NumberOfActivePlanes] = dst->vactive; dst 432 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c dst->recout_width; // TODO: or should this be full_recout_width???...maybe only when in hsplit mode? dst 434 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c dst->odm_combine; dst 505 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.PixelClock[mode_lib->vba.NumberOfActivePlanes] = dst->pixel_rate_mhz; dst 506 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.PixelClockBackEnd[mode_lib->vba.NumberOfActivePlanes] = dst->pixel_rate_mhz; dst 568 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c OTGInstPlane[mode_lib->vba.NumberOfActivePlanes] = dst->otg_inst; dst 571 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c mode_lib->vba.UseMaximumVStartup = dst->use_maximum_vstartup; dst 574 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c || dst->use_maximum_vstartup; dst 576 drivers/gpu/drm/amd/display/dc/dml/display_mode_vba.c if (dst->odm_combine && !src->is_hsplit) dst 269 drivers/gpu/drm/arm/hdlcd_crtc.c dest_h = drm_rect_height(&plane->state->dst); dst 160 drivers/gpu/drm/armada/armada_overlay.c drm_rect_width(&old_state->dst) != drm_rect_width(&state->dst)) { dst 163 drivers/gpu/drm/armada/armada_overlay.c drm_rect_width(&state->dst)) dst 134 drivers/gpu/drm/armada/armada_plane.c if ((state->dst.y1 | state->dst.y2) & 1) dst 137 drivers/gpu/drm/armada/armada_plane.c st->dst_yx = state->dst.y1 >> 1; dst 138 drivers/gpu/drm/armada/armada_plane.c st->dst_hw = drm_rect_height(&state->dst) >> 1; dst 141 drivers/gpu/drm/armada/armada_plane.c st->dst_yx = state->dst.y1; dst 142 drivers/gpu/drm/armada/armada_plane.c st->dst_hw = drm_rect_height(&state->dst); dst 148 drivers/gpu/drm/armada/armada_plane.c st->dst_yx |= state->dst.x1 & 0x0000ffff; dst 150 drivers/gpu/drm/armada/armada_plane.c st->dst_hw |= drm_rect_width(&state->dst) & 0x0000ffff; dst 230 drivers/gpu/drm/armada/armada_plane.c drm_rect_width(&old_state->dst) != drm_rect_width(&state->dst)) { dst 233 drivers/gpu/drm/armada/armada_plane.c drm_rect_width(&state->dst)) dst 1096 drivers/gpu/drm/ast/ast_mode.c static u32 copy_cursor_image(u8 *src, u8 *dst, int width, int height) dst 1116 drivers/gpu/drm/ast/ast_mode.c dstxor = (u8 *)dst + last_alpha_dst_delta + (AST_MAX_HWC_HEIGHT - height) * alpha_dst_delta; dst 1166 drivers/gpu/drm/ast/ast_mode.c u8 *src, *dst; dst 1192 drivers/gpu/drm/ast/ast_mode.c dst = drm_gem_vram_kmap(drm_gem_vram_of_gem(ast->cursor_cache), dst 1194 drivers/gpu/drm/ast/ast_mode.c if (IS_ERR(dst)) { dst 1195 drivers/gpu/drm/ast/ast_mode.c ret = PTR_ERR(dst); dst 1204 drivers/gpu/drm/ast/ast_mode.c dst += (AST_HWC_SIZE + AST_HWC_SIGNATURE_SIZE)*ast->next_cursor; dst 1207 drivers/gpu/drm/ast/ast_mode.c csum = copy_cursor_image(src, dst, width, height); dst 1213 drivers/gpu/drm/ast/ast_mode.c u8 *dst = drm_gem_vram_kmap(dst_gbo, false, NULL); dst 1214 drivers/gpu/drm/ast/ast_mode.c dst += (AST_HWC_SIZE + AST_HWC_SIGNATURE_SIZE)*ast->next_cursor + AST_HWC_SIZE; dst 1215 drivers/gpu/drm/ast/ast_mode.c writel(csum, dst); dst 1216 drivers/gpu/drm/ast/ast_mode.c writel(width, dst + AST_HWC_SIGNATURE_SizeX); dst 1217 drivers/gpu/drm/ast/ast_mode.c writel(height, dst + AST_HWC_SIGNATURE_SizeY); dst 1218 drivers/gpu/drm/ast/ast_mode.c writel(0, dst + AST_HWC_SIGNATURE_HOTSPOTX); dst 1219 drivers/gpu/drm/ast/ast_mode.c writel(0, dst + AST_HWC_SIGNATURE_HOTSPOTY); dst 624 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c state->crtc_x = s->dst.x1; dst 625 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c state->crtc_y = s->dst.y1; dst 626 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c state->crtc_w = drm_rect_width(&s->dst); dst 627 drivers/gpu/drm/atmel-hlcdc/atmel_hlcdc_plane.c state->crtc_h = drm_rect_height(&s->dst); dst 472 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_update_array(u8 *dst, u8 *src, int count) dst 475 drivers/gpu/drm/bridge/sil-sii8620.c *src ^= *dst; dst 476 drivers/gpu/drm/bridge/sil-sii8620.c *dst++ ^= *src++; dst 157 drivers/gpu/drm/bridge/synopsys/dw-hdmi-ahb-audio.c u32 *dst = dw->buf_dst + offset; dst 167 drivers/gpu/drm/bridge/synopsys/dw-hdmi-ahb-audio.c *dst++ = sample | b; dst 185 drivers/gpu/drm/bridge/synopsys/dw-hdmi-ahb-audio.c u32 *dst = dw->buf_dst + offset; dst 204 drivers/gpu/drm/bridge/synopsys/dw-hdmi-ahb-audio.c *dst++ = sample; dst 776 drivers/gpu/drm/drm_atomic_helper.c struct drm_rect *dst = &plane_state->dst; dst 784 drivers/gpu/drm/drm_atomic_helper.c *dst = drm_plane_state_dest(plane_state); dst 805 drivers/gpu/drm/drm_atomic_helper.c hscale = drm_rect_calc_hscale(src, dst, min_scale, max_scale); dst 806 drivers/gpu/drm/drm_atomic_helper.c vscale = drm_rect_calc_vscale(src, dst, min_scale, max_scale); dst 810 drivers/gpu/drm/drm_atomic_helper.c drm_rect_debug_print("dst: ", &plane_state->dst, false); dst 817 drivers/gpu/drm/drm_atomic_helper.c plane_state->visible = drm_rect_clip_scaled(src, dst, &clip); dst 831 drivers/gpu/drm/drm_atomic_helper.c if (!can_position && !drm_rect_equals(dst, &clip)) { dst 833 drivers/gpu/drm/drm_atomic_helper.c drm_rect_debug_print("dst: ", dst, false); dst 1959 drivers/gpu/drm/drm_dp_mst_topology.c struct drm_dp_mst_branch *mstb = txmsg->dst; dst 2071 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst->tx_slots[txmsg->seqno] = NULL; dst 2091 drivers/gpu/drm/drm_dp_mst_topology.c ARRAY_SIZE(txmsg->dst->tx_slots)); dst 2092 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst->tx_slots[txmsg->seqno] = NULL; dst 2117 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 2171 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 2273 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 2317 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = port->parent; dst 2556 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = port->parent; dst 2609 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 2646 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 4079 drivers/gpu/drm/drm_dp_mst_topology.c txmsg->dst = mstb; dst 388 drivers/gpu/drm/drm_fb_helper.c void *dst = fb_helper->buffer->vaddr + offset; dst 393 drivers/gpu/drm/drm_fb_helper.c memcpy(dst, src, len); dst 395 drivers/gpu/drm/drm_fb_helper.c dst += fb->pitches[0]; dst 36 drivers/gpu/drm/drm_format_helper.c void drm_fb_memcpy(void *dst, void *vaddr, struct drm_framebuffer *fb, dst 45 drivers/gpu/drm/drm_format_helper.c memcpy(dst, vaddr, len); dst 47 drivers/gpu/drm/drm_format_helper.c dst += len; dst 62 drivers/gpu/drm/drm_format_helper.c void drm_fb_memcpy_dstclip(void __iomem *dst, void *vaddr, dst 72 drivers/gpu/drm/drm_format_helper.c dst += offset; dst 74 drivers/gpu/drm/drm_format_helper.c memcpy_toio(dst, vaddr, len); dst 76 drivers/gpu/drm/drm_format_helper.c dst += fb->pitches[0]; dst 88 drivers/gpu/drm/drm_format_helper.c void drm_fb_swab16(u16 *dst, void *vaddr, struct drm_framebuffer *fb, dst 109 drivers/gpu/drm/drm_format_helper.c *dst++ = swab16(*src++); dst 148 drivers/gpu/drm/drm_format_helper.c void drm_fb_xrgb8888_to_rgb565(void *dst, void *vaddr, dst 169 drivers/gpu/drm/drm_format_helper.c drm_fb_xrgb8888_to_rgb565_line(dst, sbuf, linepixels, swab); dst 171 drivers/gpu/drm/drm_format_helper.c dst += dst_len; dst 193 drivers/gpu/drm/drm_format_helper.c void drm_fb_xrgb8888_to_rgb565_dstclip(void __iomem *dst, unsigned int dst_pitch, dst 207 drivers/gpu/drm/drm_format_helper.c dst += clip_offset(clip, dst_pitch, sizeof(u16)); dst 210 drivers/gpu/drm/drm_format_helper.c memcpy_toio(dst, dbuf, dst_len); dst 212 drivers/gpu/drm/drm_format_helper.c dst += dst_len; dst 245 drivers/gpu/drm/drm_format_helper.c void drm_fb_xrgb8888_to_rgb888_dstclip(void __iomem *dst, unsigned int dst_pitch, dst 259 drivers/gpu/drm/drm_format_helper.c dst += clip_offset(clip, dst_pitch, sizeof(u16)); dst 262 drivers/gpu/drm/drm_format_helper.c memcpy_toio(dst, dbuf, dst_len); dst 264 drivers/gpu/drm/drm_format_helper.c dst += dst_len; dst 287 drivers/gpu/drm/drm_format_helper.c void drm_fb_xrgb8888_to_gray8(u8 *dst, void *vaddr, struct drm_framebuffer *fb, dst 316 drivers/gpu/drm/drm_format_helper.c *dst++ = (3 * r + 6 * g + b) / 10; dst 200 drivers/gpu/drm/drm_mipi_dbi.c int mipi_dbi_buf_copy(void *dst, struct drm_framebuffer *fb, dst 220 drivers/gpu/drm/drm_mipi_dbi.c drm_fb_swab16(dst, src, fb, clip); dst 222 drivers/gpu/drm/drm_mipi_dbi.c drm_fb_memcpy(dst, src, fb, clip); dst 225 drivers/gpu/drm/drm_mipi_dbi.c drm_fb_xrgb8888_to_rgb565(dst, src, fb, clip, swap); dst 784 drivers/gpu/drm/drm_mipi_dbi.c u8 *dst; dst 798 drivers/gpu/drm/drm_mipi_dbi.c dst = dbi->tx_buf9; dst 799 drivers/gpu/drm/drm_mipi_dbi.c memset(dst, 0, 9); dst 800 drivers/gpu/drm/drm_mipi_dbi.c dst[8] = *src; dst 818 drivers/gpu/drm/drm_mipi_dbi.c dst = dbi->tx_buf9; dst 824 drivers/gpu/drm/drm_mipi_dbi.c memset(dst, 0, 9); dst 829 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = carry | BIT(8 - i) | (val >> i); dst 833 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = carry | BIT(8 - i) | (val >> i); dst 837 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = carry; dst 841 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = carry | BIT(8 - i) | (val >> i); dst 844 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = carry; dst 852 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = BIT(7) | (src[1] >> 1); dst 853 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[1] << 7) | BIT(6) | (src[0] >> 2); dst 854 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[0] << 6) | BIT(5) | (src[3] >> 3); dst 855 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[3] << 5) | BIT(4) | (src[2] >> 4); dst 856 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[2] << 4) | BIT(3) | (src[5] >> 5); dst 857 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[5] << 3) | BIT(2) | (src[4] >> 6); dst 858 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[4] << 2) | BIT(1) | (src[7] >> 7); dst 859 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[7] << 1) | BIT(0); dst 860 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = src[6]; dst 862 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = BIT(7) | (src[0] >> 1); dst 863 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[0] << 7) | BIT(6) | (src[1] >> 2); dst 864 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[1] << 6) | BIT(5) | (src[2] >> 3); dst 865 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[2] << 5) | BIT(4) | (src[3] >> 4); dst 866 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[3] << 4) | BIT(3) | (src[4] >> 5); dst 867 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[4] << 3) | BIT(2) | (src[5] >> 6); dst 868 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[5] << 2) | BIT(1) | (src[6] >> 7); dst 869 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = (src[6] << 1) | BIT(0); dst 870 drivers/gpu/drm/drm_mipi_dbi.c *dst++ = src[7]; dst 915 drivers/gpu/drm/drm_modes.c void drm_mode_copy(struct drm_display_mode *dst, const struct drm_display_mode *src) dst 917 drivers/gpu/drm/drm_modes.c struct list_head head = dst->head; dst 919 drivers/gpu/drm/drm_modes.c *dst = *src; dst 920 drivers/gpu/drm/drm_modes.c dst->head = head; dst 105 drivers/gpu/drm/drm_plane_helper.c struct drm_rect *dst, dst 121 drivers/gpu/drm/drm_plane_helper.c .crtc_x = dst->x1, dst 122 drivers/gpu/drm/drm_plane_helper.c .crtc_y = dst->y1, dst 123 drivers/gpu/drm/drm_plane_helper.c .crtc_w = drm_rect_width(dst), dst 124 drivers/gpu/drm/drm_plane_helper.c .crtc_h = drm_rect_height(dst), dst 143 drivers/gpu/drm/drm_plane_helper.c *dst = plane_state.dst; dst 55 drivers/gpu/drm/drm_rect.c static u32 clip_scaled(u32 src, u32 dst, u32 clip) dst 59 drivers/gpu/drm/drm_rect.c if (dst == 0) dst 62 drivers/gpu/drm/drm_rect.c tmp = mul_u32_u32(src, dst - clip); dst 68 drivers/gpu/drm/drm_rect.c if (src < (dst << 16)) dst 69 drivers/gpu/drm/drm_rect.c return DIV_ROUND_UP_ULL(tmp, dst); dst 71 drivers/gpu/drm/drm_rect.c return DIV_ROUND_DOWN_ULL(tmp, dst); dst 87 drivers/gpu/drm/drm_rect.c bool drm_rect_clip_scaled(struct drm_rect *src, struct drm_rect *dst, dst 92 drivers/gpu/drm/drm_rect.c diff = clip->x1 - dst->x1; dst 95 drivers/gpu/drm/drm_rect.c drm_rect_width(dst), diff); dst 98 drivers/gpu/drm/drm_rect.c dst->x1 = clip->x1; dst 100 drivers/gpu/drm/drm_rect.c diff = clip->y1 - dst->y1; dst 103 drivers/gpu/drm/drm_rect.c drm_rect_height(dst), diff); dst 106 drivers/gpu/drm/drm_rect.c dst->y1 = clip->y1; dst 108 drivers/gpu/drm/drm_rect.c diff = dst->x2 - clip->x2; dst 111 drivers/gpu/drm/drm_rect.c drm_rect_width(dst), diff); dst 114 drivers/gpu/drm/drm_rect.c dst->x2 = clip->x2; dst 116 drivers/gpu/drm/drm_rect.c diff = dst->y2 - clip->y2; dst 119 drivers/gpu/drm/drm_rect.c drm_rect_height(dst), diff); dst 122 drivers/gpu/drm/drm_rect.c dst->y2 = clip->y2; dst 125 drivers/gpu/drm/drm_rect.c return drm_rect_visible(dst); dst 129 drivers/gpu/drm/drm_rect.c static int drm_calc_scale(int src, int dst) dst 133 drivers/gpu/drm/drm_rect.c if (WARN_ON(src < 0 || dst < 0)) dst 136 drivers/gpu/drm/drm_rect.c if (dst == 0) dst 139 drivers/gpu/drm/drm_rect.c if (src > (dst << 16)) dst 140 drivers/gpu/drm/drm_rect.c return DIV_ROUND_UP(src, dst); dst 142 drivers/gpu/drm/drm_rect.c scale = src / dst; dst 165 drivers/gpu/drm/drm_rect.c const struct drm_rect *dst, dst 169 drivers/gpu/drm/drm_rect.c int dst_w = drm_rect_width(dst); dst 200 drivers/gpu/drm/drm_rect.c const struct drm_rect *dst, dst 204 drivers/gpu/drm/drm_rect.c int dst_h = drm_rect_height(dst); dst 740 drivers/gpu/drm/exynos/exynos_drm_fimc.c struct drm_exynos_ipp_task_rect *dst) dst 758 drivers/gpu/drm/exynos/exynos_drm_fimc.c dst_w = dst->h; dst 759 drivers/gpu/drm/exynos/exynos_drm_fimc.c dst_h = dst->w; dst 761 drivers/gpu/drm/exynos/exynos_drm_fimc.c dst_w = dst->w; dst 762 drivers/gpu/drm/exynos/exynos_drm_fimc.c dst_h = dst->h; dst 1096 drivers/gpu/drm/exynos/exynos_drm_fimc.c fimc_dst_set_fmt(ctx, task->dst.buf.fourcc, task->dst.buf.modifier); dst 1098 drivers/gpu/drm/exynos/exynos_drm_fimc.c fimc_dst_set_size(ctx, &task->dst); dst 1099 drivers/gpu/drm/exynos/exynos_drm_fimc.c fimc_dst_set_addr(ctx, &task->dst); dst 1100 drivers/gpu/drm/exynos/exynos_drm_fimc.c fimc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect); dst 708 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_get_ratio_shift(struct gsc_context *ctx, u32 src, u32 dst, dst 711 drivers/gpu/drm/exynos/exynos_drm_gsc.c DRM_DEV_DEBUG_KMS(ctx->dev, "src[%d]dst[%d]\n", src, dst); dst 713 drivers/gpu/drm/exynos/exynos_drm_gsc.c if (src >= dst * 8) { dst 716 drivers/gpu/drm/exynos/exynos_drm_gsc.c } else if (src >= dst * 4) dst 718 drivers/gpu/drm/exynos/exynos_drm_gsc.c else if (src >= dst * 2) dst 745 drivers/gpu/drm/exynos/exynos_drm_gsc.c struct drm_exynos_ipp_task_rect *dst) dst 755 drivers/gpu/drm/exynos/exynos_drm_gsc.c dst_w = dst->h; dst 756 drivers/gpu/drm/exynos/exynos_drm_gsc.c dst_h = dst->w; dst 758 drivers/gpu/drm/exynos/exynos_drm_gsc.c dst_w = dst->w; dst 759 drivers/gpu/drm/exynos/exynos_drm_gsc.c dst_h = dst->h; dst 1134 drivers/gpu/drm/exynos/exynos_drm_gsc.c gsc_dst_set_fmt(ctx, task->dst.buf.fourcc, task->dst.buf.modifier); dst 1135 drivers/gpu/drm/exynos/exynos_drm_gsc.c gsc_dst_set_size(ctx, &task->dst); dst 1136 drivers/gpu/drm/exynos/exynos_drm_gsc.c gsc_dst_set_addr(ctx, 0, &task->dst); dst 1137 drivers/gpu/drm/exynos/exynos_drm_gsc.c gsc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect); dst 269 drivers/gpu/drm/exynos/exynos_drm_ipp.c task->src.rect.w = task->dst.rect.w = UINT_MAX; dst 270 drivers/gpu/drm/exynos/exynos_drm_ipp.c task->src.rect.h = task->dst.rect.h = UINT_MAX; dst 291 drivers/gpu/drm/exynos/exynos_drm_ipp.c offsetof(struct exynos_drm_ipp_task, dst.buf), dst 300 drivers/gpu/drm/exynos/exynos_drm_ipp.c offsetof(struct exynos_drm_ipp_task, dst.rect), dst 399 drivers/gpu/drm/exynos/exynos_drm_ipp.c exynos_drm_ipp_task_release_buf(&task->dst); dst 505 drivers/gpu/drm/exynos/exynos_drm_ipp.c static inline bool __scale_limit_check(unsigned int src, unsigned int dst, dst 508 drivers/gpu/drm/exynos/exynos_drm_ipp.c if ((max && (dst << 16) > src * max) || dst 509 drivers/gpu/drm/exynos/exynos_drm_ipp.c (min && (dst << 16) < src * min)) { dst 511 drivers/gpu/drm/exynos/exynos_drm_ipp.c src, dst, dst 521 drivers/gpu/drm/exynos/exynos_drm_ipp.c struct drm_exynos_ipp_task_rect *dst, dst 537 drivers/gpu/drm/exynos/exynos_drm_ipp.c dw = (!swap) ? dst->w : dst->h; dst 538 drivers/gpu/drm/exynos/exynos_drm_ipp.c dh = (!swap) ? dst->h : dst->w; dst 550 drivers/gpu/drm/exynos/exynos_drm_ipp.c struct exynos_drm_ipp_buffer *dst, dst 592 drivers/gpu/drm/exynos/exynos_drm_ipp.c buf == dst ? swap : false); dst 595 drivers/gpu/drm/exynos/exynos_drm_ipp.c ret = exynos_drm_ipp_check_scale_limits(&src->rect, &dst->rect, dst 604 drivers/gpu/drm/exynos/exynos_drm_ipp.c struct exynos_drm_ipp_buffer *src = &task->src, *dst = &task->dst; dst 617 drivers/gpu/drm/exynos/exynos_drm_ipp.c if (dst->rect.w == UINT_MAX) dst 618 drivers/gpu/drm/exynos/exynos_drm_ipp.c dst->rect.w = dst->buf.width; dst 619 drivers/gpu/drm/exynos/exynos_drm_ipp.c if (dst->rect.h == UINT_MAX) dst 620 drivers/gpu/drm/exynos/exynos_drm_ipp.c dst->rect.h = dst->buf.height; dst 624 drivers/gpu/drm/exynos/exynos_drm_ipp.c dst->rect.x + dst->rect.w > (dst->buf.width) || dst 625 drivers/gpu/drm/exynos/exynos_drm_ipp.c dst->rect.y + dst->rect.h > (dst->buf.height)) { dst 632 drivers/gpu/drm/exynos/exynos_drm_ipp.c if ((!swap && (src->rect.w != dst->rect.w || dst 633 drivers/gpu/drm/exynos/exynos_drm_ipp.c src->rect.h != dst->rect.h)) || dst 634 drivers/gpu/drm/exynos/exynos_drm_ipp.c (swap && (src->rect.w != dst->rect.h || dst 635 drivers/gpu/drm/exynos/exynos_drm_ipp.c src->rect.h != dst->rect.w))) dst 639 drivers/gpu/drm/exynos/exynos_drm_ipp.c (src->rect.x || src->rect.y || dst->rect.x || dst->rect.y)) || dst 643 drivers/gpu/drm/exynos/exynos_drm_ipp.c src->buf.fourcc != dst->buf.fourcc)) { dst 649 drivers/gpu/drm/exynos/exynos_drm_ipp.c ret = exynos_drm_ipp_check_format(task, src, src, dst, rotate, swap); dst 653 drivers/gpu/drm/exynos/exynos_drm_ipp.c ret = exynos_drm_ipp_check_format(task, dst, src, dst, false, swap); dst 666 drivers/gpu/drm/exynos/exynos_drm_ipp.c struct exynos_drm_ipp_buffer *src = &task->src, *dst = &task->dst; dst 679 drivers/gpu/drm/exynos/exynos_drm_ipp.c ret = exynos_drm_ipp_task_setup_buffer(dst, filp); dst 88 drivers/gpu/drm/exynos/exynos_drm_ipp.h struct exynos_drm_ipp_buffer dst; dst 229 drivers/gpu/drm/exynos/exynos_drm_rotator.c rotator_dst_set_buf(rot, &task->dst); dst 363 drivers/gpu/drm/exynos/exynos_drm_scaler.c struct drm_exynos_ipp_task_rect *dst_pos = &task->dst.rect; dst 367 drivers/gpu/drm/exynos/exynos_drm_scaler.c dst_fmt = scaler_get_format(task->dst.buf.fourcc); dst 385 drivers/gpu/drm/exynos/exynos_drm_scaler.c scaler_set_dst_base(scaler, &task->dst); dst 386 drivers/gpu/drm/exynos/exynos_drm_scaler.c scaler_set_dst_span(scaler, &task->dst); dst 559 drivers/gpu/drm/gma500/cdv_intel_dp.c unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) dst 565 drivers/gpu/drm/gma500/cdv_intel_dp.c dst[i] = src >> ((3-i) * 8); dst 688 drivers/gpu/drm/i810/i810_dma.c unsigned int dst = pbox->x1 * cpp + pbox->y1 * pitch; dst 689 drivers/gpu/drm/i810/i810_dma.c unsigned int start = dst; dst 3265 drivers/gpu/drm/i915/display/intel_display.c intel_state->base.dst = drm_plane_state_dest(plane_state); dst 3784 drivers/gpu/drm/i915/display/intel_display.c int crtc_x = plane_state->base.dst.x1; dst 3785 drivers/gpu/drm/i915/display/intel_display.c int crtc_y = plane_state->base.dst.y1; dst 3786 drivers/gpu/drm/i915/display/intel_display.c int crtc_w = drm_rect_width(&plane_state->base.dst); dst 3787 drivers/gpu/drm/i915/display/intel_display.c int crtc_h = drm_rect_height(&plane_state->base.dst); dst 5550 drivers/gpu/drm/i915/display/intel_display.c drm_rect_width(&plane_state->base.dst), dst 5551 drivers/gpu/drm/i915/display/intel_display.c drm_rect_height(&plane_state->base.dst), dst 11484 drivers/gpu/drm/i915/display/intel_display.c drm_rect_width(&new->base.dst) != drm_rect_width(&cur->base.dst) || dst 11485 drivers/gpu/drm/i915/display/intel_display.c drm_rect_height(&new->base.dst) != drm_rect_height(&cur->base.dst)) dst 11495 drivers/gpu/drm/i915/display/intel_display.c int dst_w = drm_rect_width(&state->base.dst); dst 11496 drivers/gpu/drm/i915/display/intel_display.c int dst_h = drm_rect_height(&state->base.dst); dst 12067 drivers/gpu/drm/i915/display/intel_display.c DRM_RECT_ARG(&plane_state->base.dst)); dst 673 drivers/gpu/drm/i915/display/intel_dp.c static void intel_dp_unpack_aux(u32 src, u8 *dst, int dst_bytes) dst 679 drivers/gpu/drm/i915/display/intel_dp.c dst[i] = src >> ((3-i) * 8); dst 368 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 369 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_y = plane_state->base.dst.y1; dst 370 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_w = drm_rect_width(&plane_state->base.dst); dst 371 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_h = drm_rect_height(&plane_state->base.dst); dst 377 drivers/gpu/drm/i915/display/intel_sprite.c &plane_state->base.dst, dst 380 drivers/gpu/drm/i915/display/intel_sprite.c &plane_state->base.dst, dst 551 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 552 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_y = plane_state->base.dst.y1; dst 941 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 942 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_y = plane_state->base.dst.y1; dst 943 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_w = drm_rect_width(&plane_state->base.dst); dst 944 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_h = drm_rect_height(&plane_state->base.dst); dst 1157 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 1158 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_y = plane_state->base.dst.y1; dst 1159 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_w = drm_rect_width(&plane_state->base.dst); dst 1160 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_h = drm_rect_height(&plane_state->base.dst); dst 1410 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 1411 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_y = plane_state->base.dst.y1; dst 1412 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_w = drm_rect_width(&plane_state->base.dst); dst 1413 drivers/gpu/drm/i915/display/intel_sprite.c u32 crtc_h = drm_rect_height(&plane_state->base.dst); dst 1527 drivers/gpu/drm/i915/display/intel_sprite.c const struct drm_rect *dst = &plane_state->base.dst; dst 1536 drivers/gpu/drm/i915/display/intel_sprite.c crtc_w = drm_rect_width(dst); dst 1537 drivers/gpu/drm/i915/display/intel_sprite.c crtc_h = drm_rect_height(dst); dst 1762 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_x = plane_state->base.dst.x1; dst 1763 drivers/gpu/drm/i915/display/intel_sprite.c int crtc_w = drm_rect_width(&plane_state->base.dst); dst 499 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __set_timeline(struct intel_timeline **dst, dst 502 drivers/gpu/drm/i915/gem/i915_gem_context.c struct intel_timeline *old = *dst; dst 504 drivers/gpu/drm/i915/gem/i915_gem_context.c *dst = src ? intel_timeline_get(src) : NULL; dst 1851 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_engines(struct i915_gem_context *dst, dst 1884 drivers/gpu/drm/i915/gem/i915_gem_context.c intel_execlists_clone_virtual(dst, engine); dst 1886 drivers/gpu/drm/i915/gem/i915_gem_context.c clone->engines[n] = intel_context_create(dst, engine); dst 1897 drivers/gpu/drm/i915/gem/i915_gem_context.c free_engines(dst->engines); dst 1898 drivers/gpu/drm/i915/gem/i915_gem_context.c RCU_INIT_POINTER(dst->engines, clone); dst 1900 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_context_set_user_engines(dst); dst 1902 drivers/gpu/drm/i915/gem/i915_gem_context.c i915_gem_context_clear_user_engines(dst); dst 1910 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_flags(struct i915_gem_context *dst, dst 1913 drivers/gpu/drm/i915/gem/i915_gem_context.c dst->user_flags = src->user_flags; dst 1917 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_schedattr(struct i915_gem_context *dst, dst 1920 drivers/gpu/drm/i915/gem/i915_gem_context.c dst->sched = src->sched; dst 1924 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_sseu(struct i915_gem_context *dst, dst 1932 drivers/gpu/drm/i915/gem/i915_gem_context.c clone = dst->engines; /* no locking required; sole access */ dst 1962 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_timeline(struct i915_gem_context *dst, dst 1966 drivers/gpu/drm/i915/gem/i915_gem_context.c __assign_timeline(dst, src->timeline); dst 1971 drivers/gpu/drm/i915/gem/i915_gem_context.c static int clone_vm(struct i915_gem_context *dst, dst 2008 drivers/gpu/drm/i915/gem/i915_gem_context.c __assign_ppgtt(dst, vm); dst 2017 drivers/gpu/drm/i915/gem/i915_gem_context.c static int (* const fn[])(struct i915_gem_context *dst, dst 2030 drivers/gpu/drm/i915/gem/i915_gem_context.c struct i915_gem_context *dst = arg->ctx; dst 2052 drivers/gpu/drm/i915/gem/i915_gem_context.c GEM_BUG_ON(src == dst); dst 2058 drivers/gpu/drm/i915/gem/i915_gem_context.c err = fn[bit](dst, src); dst 25 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c struct scatterlist *src, *dst; dst 44 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c dst = st->sgl; dst 46 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c sg_set_page(dst, sg_page(src), src->length, 0); dst 47 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c dst = sg_next(dst); dst 200 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct i915_vma *dst) dst 211 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c GEM_BUG_ON(src->size != dst->size); dst 216 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c count = div_u64(dst->size, block_size); dst 233 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c dst_offset = dst->node.start; dst 314 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct drm_i915_gem_object *dst, dst 317 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c struct drm_gem_object *objs[] = { &src->base, &dst->base }; dst 332 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c vma[1] = i915_vma_instance(dst, vm, NULL); dst 24 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h struct i915_vma *dst); dst 34 drivers/gpu/drm/i915/gem/i915_gem_object_blt.h struct drm_i915_gem_object *dst, dst 28 drivers/gpu/drm/i915/gem/i915_gem_phys.c void *dst; dst 60 drivers/gpu/drm/i915/gem/i915_gem_phys.c dst = vaddr; dst 70 drivers/gpu/drm/i915/gem/i915_gem_phys.c memcpy(dst, src, PAGE_SIZE); dst 71 drivers/gpu/drm/i915/gem/i915_gem_phys.c drm_clflush_virt_range(dst, PAGE_SIZE); dst 75 drivers/gpu/drm/i915/gem/i915_gem_phys.c dst += PAGE_SIZE; dst 109 drivers/gpu/drm/i915/gem/i915_gem_phys.c char *dst; dst 115 drivers/gpu/drm/i915/gem/i915_gem_phys.c dst = kmap_atomic(page); dst 117 drivers/gpu/drm/i915/gem/i915_gem_phys.c memcpy(dst, src, PAGE_SIZE); dst 118 drivers/gpu/drm/i915/gem/i915_gem_phys.c kunmap_atomic(dst); dst 110 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c struct drm_i915_gem_object *src, *dst; dst 151 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c dst = huge_gem_object(i915, phys_sz, sz); dst 152 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c if (IS_ERR(dst)) { dst 153 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = PTR_ERR(dst); dst 157 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c vaddr = i915_gem_object_pin_map(dst, I915_MAP_WB); dst 164 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c huge_gem_object_phys_size(dst) / sizeof(u32)); dst 166 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c if (!(dst->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE)) dst 167 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c dst->cache_dirty = true; dst 170 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = i915_gem_object_copy_blt(src, dst, ce); dst 175 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_lock(dst); dst 176 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c err = i915_gem_object_set_to_cpu_domain(dst, false); dst 177 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unlock(dst); dst 181 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c for (i = 0; i < huge_gem_object_phys_size(dst) / sizeof(u32); ++i) { dst 190 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unpin_map(dst); dst 193 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_put(dst); dst 199 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_unpin_map(dst); dst 201 drivers/gpu/drm/i915/gem/selftests/i915_gem_object_blt.c i915_gem_object_put(dst); dst 3853 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_context *dst; dst 3855 drivers/gpu/drm/i915/gt/intel_lrc.c dst = intel_execlists_create_virtual(ctx, dst 3858 drivers/gpu/drm/i915/gt/intel_lrc.c if (IS_ERR(dst)) dst 3859 drivers/gpu/drm/i915/gt/intel_lrc.c return dst; dst 3862 drivers/gpu/drm/i915/gt/intel_lrc.c struct virtual_engine *de = to_virtual_engine(dst->engine); dst 3868 drivers/gpu/drm/i915/gt/intel_lrc.c intel_context_put(dst); dst 3875 drivers/gpu/drm/i915/gt/intel_lrc.c return dst; dst 587 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c size_t intel_uc_fw_copy_rsa(struct intel_uc_fw *uc_fw, void *dst, u32 max_len) dst 595 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c return sg_pcopy_to_buffer(pages->sgl, pages->nents, dst, size, offset); dst 238 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.h size_t intel_uc_fw_copy_rsa(struct intel_uc_fw *uc_fw, void *dst, u32 max_len); dst 135 drivers/gpu/drm/i915/gvt/scheduler.c void *dst; dst 198 drivers/gpu/drm/i915/gvt/scheduler.c dst = kmap(page); dst 199 drivers/gpu/drm/i915/gvt/scheduler.c intel_gvt_hypervisor_read_gpa(vgpu, context_gpa, dst, dst 1136 drivers/gpu/drm/i915/i915_cmd_parser.c void *dst, *src; dst 1143 drivers/gpu/drm/i915/i915_cmd_parser.c dst = i915_gem_object_pin_map(dst_obj, I915_MAP_FORCE_WB); dst 1145 drivers/gpu/drm/i915/i915_cmd_parser.c if (IS_ERR(dst)) dst 1146 drivers/gpu/drm/i915/i915_cmd_parser.c return dst; dst 1159 drivers/gpu/drm/i915/i915_cmd_parser.c i915_memcpy_from_wc(dst, dst 1182 drivers/gpu/drm/i915/i915_cmd_parser.c ptr = dst; dst 1203 drivers/gpu/drm/i915/i915_cmd_parser.c return dst; dst 274 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst) dst 278 drivers/gpu/drm/i915/i915_gpu_error.c if (dst->page_count >= dst->num_pages) dst 285 drivers/gpu/drm/i915/i915_gpu_error.c return dst->pages[dst->page_count++] = page; dst 290 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst) dst 301 drivers/gpu/drm/i915/i915_gpu_error.c zstream->next_out = compress_next_page(c, dst); dst 320 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst) dst 327 drivers/gpu/drm/i915/i915_gpu_error.c zstream->next_out = compress_next_page(c, dst); dst 344 drivers/gpu/drm/i915/i915_gpu_error.c dst->unused = zstream->avail_out; dst 384 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst) dst 394 drivers/gpu/drm/i915/i915_gpu_error.c dst->pages[dst->page_count++] = ptr; dst 400 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst) dst 963 drivers/gpu/drm/i915/i915_gpu_error.c struct drm_i915_error_object *dst; dst 976 drivers/gpu/drm/i915/i915_gpu_error.c dst = kmalloc(sizeof(*dst) + num_pages * sizeof(u32 *), ALLOW_FAIL); dst 977 drivers/gpu/drm/i915/i915_gpu_error.c if (!dst) dst 981 drivers/gpu/drm/i915/i915_gpu_error.c kfree(dst); dst 985 drivers/gpu/drm/i915/i915_gpu_error.c dst->gtt_offset = vma->node.start; dst 986 drivers/gpu/drm/i915/i915_gpu_error.c dst->gtt_size = vma->node.size; dst 987 drivers/gpu/drm/i915/i915_gpu_error.c dst->num_pages = num_pages; dst 988 drivers/gpu/drm/i915/i915_gpu_error.c dst->page_count = 0; dst 989 drivers/gpu/drm/i915/i915_gpu_error.c dst->unused = 0; dst 998 drivers/gpu/drm/i915/i915_gpu_error.c ret = compress_page(compress, (void __force *)s, dst); dst 1004 drivers/gpu/drm/i915/i915_gpu_error.c if (ret || compress_flush(compress, dst)) { dst 1005 drivers/gpu/drm/i915/i915_gpu_error.c while (dst->page_count--) dst 1006 drivers/gpu/drm/i915/i915_gpu_error.c pool_free(&compress->pool, dst->pages[dst->page_count]); dst 1007 drivers/gpu/drm/i915/i915_gpu_error.c kfree(dst); dst 1008 drivers/gpu/drm/i915/i915_gpu_error.c dst = NULL; dst 1012 drivers/gpu/drm/i915/i915_gpu_error.c return dst; dst 33 drivers/gpu/drm/i915/i915_memcpy.c static void __memcpy_ntdqa(void *dst, const void *src, unsigned long len) dst 47 drivers/gpu/drm/i915/i915_memcpy.c :: "r" (src), "r" (dst) : "memory"); dst 49 drivers/gpu/drm/i915/i915_memcpy.c dst += 64; dst 55 drivers/gpu/drm/i915/i915_memcpy.c :: "r" (src), "r" (dst) : "memory"); dst 57 drivers/gpu/drm/i915/i915_memcpy.c dst += 16; dst 81 drivers/gpu/drm/i915/i915_memcpy.c bool i915_memcpy_from_wc(void *dst, const void *src, unsigned long len) dst 83 drivers/gpu/drm/i915/i915_memcpy.c if (unlikely(((unsigned long)dst | (unsigned long)src | len) & 15)) dst 89 drivers/gpu/drm/i915/i915_memcpy.c __memcpy_ntdqa(dst, src, len); dst 14 drivers/gpu/drm/i915/i915_memcpy.h bool i915_memcpy_from_wc(void *dst, const void *src, unsigned long len); dst 26 drivers/gpu/drm/i915/i915_memcpy.h #define i915_can_memcpy_from_wc(dst, src, len) \ dst 27 drivers/gpu/drm/i915/i915_memcpy.h i915_memcpy_from_wc((void *)((unsigned long)(dst) | (unsigned long)(src) | (len)), NULL, 0) dst 299 drivers/gpu/drm/i915/i915_trace.h __array(int, dst, 4) dst 309 drivers/gpu/drm/i915/i915_trace.h memcpy(__entry->dst, &plane->state->dst, sizeof(__entry->dst)); dst 316 drivers/gpu/drm/i915/i915_trace.h DRM_RECT_ARG((const struct drm_rect *)__entry->dst)) dst 1151 drivers/gpu/drm/i915/intel_pm.c width = drm_rect_width(&plane_state->base.dst); dst 2527 drivers/gpu/drm/i915/intel_pm.c drm_rect_width(&plane_state->base.dst), dst 2555 drivers/gpu/drm/i915/intel_pm.c drm_rect_width(&plane_state->base.dst), dst 2595 drivers/gpu/drm/i915/intel_pm.c return ilk_wm_fbc(pri_val, drm_rect_width(&plane_state->base.dst), cpp); dst 3145 drivers/gpu/drm/i915/intel_pm.c (drm_rect_width(&sprstate->base.dst) != drm_rect_width(&sprstate->base.src) >> 16 || dst 3146 drivers/gpu/drm/i915/intel_pm.c drm_rect_height(&sprstate->base.dst) != drm_rect_height(&sprstate->base.src) >> 16); dst 4100 drivers/gpu/drm/i915/intel_pm.c dst_w = drm_rect_width(&plane_state->base.dst); dst 4101 drivers/gpu/drm/i915/intel_pm.c dst_h = drm_rect_height(&plane_state->base.dst); dst 384 drivers/gpu/drm/imx/ipuv3-plane.c if (drm_rect_width(&state->dst) < 13) dst 394 drivers/gpu/drm/imx/ipuv3-plane.c if (drm_rect_height(&state->dst) < 2) dst 405 drivers/gpu/drm/imx/ipuv3-plane.c (drm_rect_width(&state->dst) != drm_rect_width(&old_state->dst) || dst 406 drivers/gpu/drm/imx/ipuv3-plane.c drm_rect_height(&state->dst) != drm_rect_height(&old_state->dst) || dst 552 drivers/gpu/drm/imx/ipuv3-plane.c struct drm_rect *dst = &state->dst; dst 563 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_set_window_pos(ipu_plane->dp, dst->x1, dst->y1); dst 626 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dmfc_config_wait4eot(ipu_plane->dmfc, drm_rect_width(dst)); dst 131 drivers/gpu/drm/mediatek/mtk_drm_plane.c state->pending.x = plane->state->dst.x1; dst 132 drivers/gpu/drm/mediatek/mtk_drm_plane.c state->pending.y = plane->state->dst.y1; dst 133 drivers/gpu/drm/mediatek/mtk_drm_plane.c state->pending.width = drm_rect_width(&plane->state->dst); dst 134 drivers/gpu/drm/mediatek/mtk_drm_plane.c state->pending.height = drm_rect_height(&plane->state->dst); dst 501 drivers/gpu/drm/mediatek/mtk_hdmi.c static void mtk_hdmi_hw_audio_config(struct mtk_hdmi *hdmi, bool dst) dst 509 drivers/gpu/drm/mediatek/mtk_hdmi.c if (dst) dst 888 drivers/gpu/drm/mediatek/mtk_hdmi.c bool dst; dst 903 drivers/gpu/drm/mediatek/mtk_hdmi.c dst = ((hdmi->aud_param.aud_input_type == HDMI_AUD_INPUT_SPDIF) && dst 905 drivers/gpu/drm/mediatek/mtk_hdmi.c mtk_hdmi_hw_audio_config(hdmi, dst); dst 43 drivers/gpu/drm/mgag200/mgag200_cursor.c u8 *src, *dst; dst 108 drivers/gpu/drm/mgag200/mgag200_cursor.c dst = drm_gem_vram_kmap(pixels_next, true, NULL); dst 109 drivers/gpu/drm/mgag200/mgag200_cursor.c if (IS_ERR(dst)) { dst 110 drivers/gpu/drm/mgag200/mgag200_cursor.c ret = PTR_ERR(dst); dst 200 drivers/gpu/drm/mgag200/mgag200_cursor.c memcpy_toio(dst + row*48, &this_row[0], 48); dst 98 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c void *dst = state_kcalloc(a6xx_state, 1, size); dst 100 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c if (dst) dst 101 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c memcpy(dst, src, size); dst 102 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c return dst; dst 864 drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c struct drm_rect dst, clip = crtc_rect; dst 893 drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c dst = drm_plane_state_dest(pstate); dst 894 drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c if (!drm_rect_intersect(&clip, &dst)) { dst 898 drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c DRM_RECT_ARG(&dst)); dst 209 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c u32 phase_init, preload, src_y_rgb, src_uv, dst; dst 237 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c dst = (scaler3_cfg->dst_width & 0x1FFFF) | dst 285 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c DPU_REG_WRITE(c, QSEED3_DST_SIZE + scaler_offset, dst); dst 638 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c struct drm_rect src[R_MAX], dst[R_MAX]; dst 677 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c dst[i] = drm_plane_state_dest(drm_state[i]); dst 679 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c if (drm_rect_calc_hscale(&src[i], &dst[i], 1, 1) != 1 || dst 680 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c drm_rect_calc_vscale(&src[i], &dst[i], 1, 1) != 1) { dst 722 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c if (dst[R1].y1 >= dst[R0].y2 + buffer_lines || dst 723 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c dst[R0].y1 >= dst[R1].y2 + buffer_lines) { dst 849 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c struct drm_rect src, dst, fb_rect = { 0 }; dst 872 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c dst = drm_plane_state_dest(state); dst 907 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c } else if (drm_rect_width(&dst) < 0x1 || drm_rect_height(&dst) < 0x1) { dst 909 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c DRM_RECT_ARG(&dst)); dst 990 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c crtc->base.id, DRM_RECT_ARG(&state->dst), dst 1001 drivers/gpu/drm/msm/disp/dpu1/dpu_plane.c pdpu->pipe_cfg.dst_rect = state->dst; dst 26 drivers/gpu/drm/msm/disp/mdp5/mdp5_encoder.c .dst = MSM_BUS_SLAVE_EBI_CH0, \ dst 434 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c &state->src, &state->dst); dst 511 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c &new_state->src, &new_state->dst); dst 623 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c static int calc_phase_step(uint32_t src, uint32_t dst, uint32_t *out_phase) dst 627 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c if (src == 0 || dst == 0) dst 636 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c if (src > (dst * DOWN_SCALE_RATIO_MAX)) dst 640 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c *out_phase = mult_frac(unit, src, dst); dst 692 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c uint32_t src, uint32_t dst, bool horz) dst 695 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c bool scaling = format->is_yuv ? true : (src != dst); dst 705 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c uv_filter = ((src / sub) <= dst) ? dst 708 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c ya_filter = (src <= dst) ? SCALE_FILTER_BIL : SCALE_FILTER_PCMN; dst 723 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c uint32_t src, uint32_t dst, uint32_t phase_step[2], dst 727 drivers/gpu/drm/msm/disp/mdp5/mdp5_plane.c bool scaling = format->is_yuv ? true : (src != dst); dst 923 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_bo *dst) dst 933 drivers/gpu/drm/nouveau/dispnv04/crtc.c nouveau_bo_wr16(dst, i*width + j, (pixel & 0x80000000) >> 16 dst 942 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nouveau_bo *dst) dst 978 drivers/gpu/drm/nouveau/dispnv04/crtc.c nouveau_bo_wr32(dst, i, pixel); dst 39 drivers/gpu/drm/nouveau/include/nvkm/core/gpuobj.h void nvkm_gpuobj_memcpy_to(struct nvkm_gpuobj *dst, u32 dstoffset, void *src, dst 41 drivers/gpu/drm/nouveau/include/nvkm/core/gpuobj.h void nvkm_gpuobj_memcpy_from(void *dst, struct nvkm_gpuobj *src, u32 srcoffset, dst 155 drivers/gpu/drm/nouveau/nouveau_dmem.c args->dst[0] = migrate_pfn(page_to_pfn(dpage)) | MIGRATE_PFN_LOCKED; dst 170 drivers/gpu/drm/nouveau/nouveau_dmem.c unsigned long src = 0, dst = 0; dst 178 drivers/gpu/drm/nouveau/nouveau_dmem.c .dst = &dst, dst 192 drivers/gpu/drm/nouveau/nouveau_dmem.c if (ret || dst == 0) dst 600 drivers/gpu/drm/nouveau/nouveau_dmem.c args->dst[i] = nouveau_dmem_migrate_copy_one(drm, args->src[i], dst 602 drivers/gpu/drm/nouveau/nouveau_dmem.c if (args->dst[i]) dst 641 drivers/gpu/drm/nouveau/nouveau_dmem.c args.dst = kcalloc(max, sizeof(*args.dst), GFP_KERNEL); dst 642 drivers/gpu/drm/nouveau/nouveau_dmem.c if (!args.dst) dst 665 drivers/gpu/drm/nouveau/nouveau_dmem.c kfree(args.dst); dst 261 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c nvkm_gpuobj_memcpy_to(struct nvkm_gpuobj *dst, u32 dstoffset, void *src, dst 267 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c nvkm_wo32(dst, dstoffset + i, *(u32 *)(src + i)); dst 271 drivers/gpu/drm/nouveau/nvkm/core/gpuobj.c nvkm_gpuobj_memcpy_from(void *dst, struct nvkm_gpuobj *src, u32 srcoffset, dst 508 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c char *dst = en; dst 510 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c *dst++ = toupper(*src++); dst 25 drivers/gpu/drm/omapdrm/displays/connector-analog-tv.c struct omap_dss_device *dst) dst 31 drivers/gpu/drm/omapdrm/displays/connector-analog-tv.c struct omap_dss_device *dst) dst 31 drivers/gpu/drm/omapdrm/displays/connector-hdmi.c struct omap_dss_device *dst) dst 37 drivers/gpu/drm/omapdrm/displays/connector-hdmi.c struct omap_dss_device *dst) dst 30 drivers/gpu/drm/omapdrm/displays/encoder-opa362.c struct omap_dss_device *dst) dst 32 drivers/gpu/drm/omapdrm/displays/encoder-opa362.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 36 drivers/gpu/drm/omapdrm/displays/encoder-opa362.c struct omap_dss_device *dst) dst 38 drivers/gpu/drm/omapdrm/displays/encoder-opa362.c omapdss_device_disconnect(dst, dst->next); dst 33 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c struct omap_dss_device *dst) dst 35 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c struct panel_drv_data *ddata = to_panel_data(dst); dst 38 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c r = omapdss_device_connect(dst->dss, dst, dst->next); dst 52 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c struct omap_dss_device *dst) dst 54 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c struct panel_drv_data *ddata = to_panel_data(dst); dst 59 drivers/gpu/drm/omapdrm/displays/encoder-tpd12s015.c omapdss_device_disconnect(dst, dst->next); dst 745 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c struct omap_dss_device *dst) dst 747 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c struct panel_drv_data *ddata = to_panel_data(dst); dst 769 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c struct omap_dss_device *dst) dst 771 drivers/gpu/drm/omapdrm/displays/panel-dsi-cm.c struct panel_drv_data *ddata = to_panel_data(dst); dst 177 drivers/gpu/drm/omapdrm/dss/base.c struct omap_dss_device *dst) dst 183 drivers/gpu/drm/omapdrm/dss/base.c dst ? dev_name(dst->dev) : "NULL"); dst 185 drivers/gpu/drm/omapdrm/dss/base.c if (!dst) { dst 195 drivers/gpu/drm/omapdrm/dss/base.c if (omapdss_device_is_connected(dst)) dst 198 drivers/gpu/drm/omapdrm/dss/base.c dst->dss = dss; dst 200 drivers/gpu/drm/omapdrm/dss/base.c ret = dst->ops->connect(src, dst); dst 202 drivers/gpu/drm/omapdrm/dss/base.c dst->dss = NULL; dst 211 drivers/gpu/drm/omapdrm/dss/base.c struct omap_dss_device *dst) dst 213 drivers/gpu/drm/omapdrm/dss/base.c struct dss_device *dss = src ? src->dss : dst->dss; dst 217 drivers/gpu/drm/omapdrm/dss/base.c dst ? dev_name(dst->dev) : "NULL"); dst 219 drivers/gpu/drm/omapdrm/dss/base.c if (!dst) { dst 224 drivers/gpu/drm/omapdrm/dss/base.c if (!dst->id && !omapdss_device_is_connected(dst)) { dst 225 drivers/gpu/drm/omapdrm/dss/base.c WARN_ON(!dst->display); dst 229 drivers/gpu/drm/omapdrm/dss/base.c WARN_ON(dst->state != OMAP_DSS_DISPLAY_DISABLED); dst 231 drivers/gpu/drm/omapdrm/dss/base.c dst->ops->disconnect(src, dst); dst 232 drivers/gpu/drm/omapdrm/dss/base.c dst->dss = NULL; dst 576 drivers/gpu/drm/omapdrm/dss/dpi.c struct omap_dss_device *dst) dst 578 drivers/gpu/drm/omapdrm/dss/dpi.c struct dpi_data *dpi = dpi_get_data_from_dssdev(dst); dst 582 drivers/gpu/drm/omapdrm/dss/dpi.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 586 drivers/gpu/drm/omapdrm/dss/dpi.c struct omap_dss_device *dst) dst 588 drivers/gpu/drm/omapdrm/dss/dpi.c omapdss_device_disconnect(dst, dst->next); dst 4872 drivers/gpu/drm/omapdrm/dss/dsi.c struct omap_dss_device *dst) dst 4874 drivers/gpu/drm/omapdrm/dss/dsi.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 4878 drivers/gpu/drm/omapdrm/dss/dsi.c struct omap_dss_device *dst) dst 4880 drivers/gpu/drm/omapdrm/dss/dsi.c omapdss_device_disconnect(dst, dst->next); dst 397 drivers/gpu/drm/omapdrm/dss/hdmi4.c struct omap_dss_device *dst) dst 399 drivers/gpu/drm/omapdrm/dss/hdmi4.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 403 drivers/gpu/drm/omapdrm/dss/hdmi4.c struct omap_dss_device *dst) dst 405 drivers/gpu/drm/omapdrm/dss/hdmi4.c omapdss_device_disconnect(dst, dst->next); dst 402 drivers/gpu/drm/omapdrm/dss/hdmi5.c struct omap_dss_device *dst) dst 404 drivers/gpu/drm/omapdrm/dss/hdmi5.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 408 drivers/gpu/drm/omapdrm/dss/hdmi5.c struct omap_dss_device *dst) dst 410 drivers/gpu/drm/omapdrm/dss/hdmi5.c omapdss_device_disconnect(dst, dst->next); dst 60 drivers/gpu/drm/omapdrm/dss/omapdss-boot-init.c static void __init omapdss_prefix_strcpy(char *dst, int dst_len, dst 68 drivers/gpu/drm/omapdrm/dss/omapdss-boot-init.c strcpy(dst, prefix); dst 69 drivers/gpu/drm/omapdrm/dss/omapdss-boot-init.c dst += strlen(prefix); dst 71 drivers/gpu/drm/omapdrm/dss/omapdss-boot-init.c strcpy(dst, src); dst 72 drivers/gpu/drm/omapdrm/dss/omapdss-boot-init.c dst += l; dst 348 drivers/gpu/drm/omapdrm/dss/omapdss.h struct omap_dss_device *dst); dst 350 drivers/gpu/drm/omapdrm/dss/omapdss.h struct omap_dss_device *dst); dst 475 drivers/gpu/drm/omapdrm/dss/omapdss.h struct omap_dss_device *dst); dst 477 drivers/gpu/drm/omapdrm/dss/omapdss.h struct omap_dss_device *dst); dst 235 drivers/gpu/drm/omapdrm/dss/sdi.c struct omap_dss_device *dst) dst 237 drivers/gpu/drm/omapdrm/dss/sdi.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 241 drivers/gpu/drm/omapdrm/dss/sdi.c struct omap_dss_device *dst) dst 243 drivers/gpu/drm/omapdrm/dss/sdi.c omapdss_device_disconnect(dst, dst->next); dst 677 drivers/gpu/drm/omapdrm/dss/venc.c struct omap_dss_device *dst) dst 679 drivers/gpu/drm/omapdrm/dss/venc.c return omapdss_device_connect(dst->dss, dst, dst->next); dst 683 drivers/gpu/drm/omapdrm/dss/venc.c struct omap_dss_device *dst) dst 685 drivers/gpu/drm/omapdrm/dss/venc.c omapdss_device_disconnect(dst, dst->next); dst 83 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c static int dmm_dma_copy(struct dmm *dmm, dma_addr_t src, dma_addr_t dst) dst 90 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c tx = dma_dev->device_prep_dma_memcpy(dmm->wa_dma_chan, dst, src, 4, 0); dst 113 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c dma_addr_t src, dst; dst 117 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c dst = dmm->wa_dma_handle; dst 119 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c r = dmm_dma_copy(dmm, src, dst); dst 136 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c dma_addr_t src, dst; dst 149 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c dst = dmm->phys_base + reg; dst 151 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c r = dmm_dma_copy(dmm, src, dst); dst 589 drivers/gpu/drm/radeon/atom.c uint32_t dst, src, saved; dst 592 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 595 drivers/gpu/drm/radeon/atom.c dst += src; dst 597 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 603 drivers/gpu/drm/radeon/atom.c uint32_t dst, src, saved; dst 606 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 609 drivers/gpu/drm/radeon/atom.c dst &= src; dst 611 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 650 drivers/gpu/drm/radeon/atom.c uint32_t dst, src; dst 652 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 655 drivers/gpu/drm/radeon/atom.c ctx->ctx->cs_equal = (dst == src); dst 656 drivers/gpu/drm/radeon/atom.c ctx->ctx->cs_above = (dst > src); dst 676 drivers/gpu/drm/radeon/atom.c uint32_t dst, src; dst 678 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 682 drivers/gpu/drm/radeon/atom.c ctx->ctx->divmul[0] = dst / src; dst 683 drivers/gpu/drm/radeon/atom.c ctx->ctx->divmul[1] = dst % src; dst 751 drivers/gpu/drm/radeon/atom.c uint32_t dst, mask, src, saved; dst 754 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 759 drivers/gpu/drm/radeon/atom.c dst &= mask; dst 760 drivers/gpu/drm/radeon/atom.c dst |= src; dst 762 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 785 drivers/gpu/drm/radeon/atom.c uint32_t dst, src; dst 787 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 790 drivers/gpu/drm/radeon/atom.c ctx->ctx->divmul[0] = dst * src; dst 801 drivers/gpu/drm/radeon/atom.c uint32_t dst, src, saved; dst 804 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 807 drivers/gpu/drm/radeon/atom.c dst |= src; dst 809 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 891 drivers/gpu/drm/radeon/atom.c uint32_t saved, dst; dst 896 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 899 drivers/gpu/drm/radeon/atom.c dst <<= shift; dst 901 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 907 drivers/gpu/drm/radeon/atom.c uint32_t saved, dst; dst 912 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 915 drivers/gpu/drm/radeon/atom.c dst >>= shift; dst 917 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 923 drivers/gpu/drm/radeon/atom.c uint32_t saved, dst; dst 927 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 929 drivers/gpu/drm/radeon/atom.c dst = saved; dst 932 drivers/gpu/drm/radeon/atom.c dst <<= shift; dst 933 drivers/gpu/drm/radeon/atom.c dst &= atom_arg_mask[dst_align]; dst 934 drivers/gpu/drm/radeon/atom.c dst >>= atom_arg_shift[dst_align]; dst 936 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 942 drivers/gpu/drm/radeon/atom.c uint32_t saved, dst; dst 946 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 948 drivers/gpu/drm/radeon/atom.c dst = saved; dst 951 drivers/gpu/drm/radeon/atom.c dst >>= shift; dst 952 drivers/gpu/drm/radeon/atom.c dst &= atom_arg_mask[dst_align]; dst 953 drivers/gpu/drm/radeon/atom.c dst >>= atom_arg_shift[dst_align]; dst 955 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 961 drivers/gpu/drm/radeon/atom.c uint32_t dst, src, saved; dst 964 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 967 drivers/gpu/drm/radeon/atom.c dst -= src; dst 969 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 1002 drivers/gpu/drm/radeon/atom.c uint32_t dst, src; dst 1004 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1); dst 1007 drivers/gpu/drm/radeon/atom.c ctx->ctx->cs_equal = ((dst & src) == 0); dst 1014 drivers/gpu/drm/radeon/atom.c uint32_t dst, src, saved; dst 1017 drivers/gpu/drm/radeon/atom.c dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1); dst 1020 drivers/gpu/drm/radeon/atom.c dst ^= src; dst 1022 drivers/gpu/drm/radeon/atom.c atom_put_dst(ctx, arg, attr, &dptr, dst, saved); dst 57 drivers/gpu/drm/radeon/atombios_dp.c void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le) dst 68 drivers/gpu/drm/radeon/atombios_dp.c memcpy(dst, dst_tmp, align_num_bytes); dst 73 drivers/gpu/drm/radeon/atombios_dp.c memcpy(dst, dst_tmp, num_bytes); dst 76 drivers/gpu/drm/radeon/atombios_dp.c memcpy(dst, src, num_bytes); dst 799 drivers/gpu/drm/radeon/radeon_fence.c struct radeon_fence_driver *dst, *src; dst 812 drivers/gpu/drm/radeon/radeon_fence.c dst = &fence->rdev->fence_drv[dst_ring]; dst 817 drivers/gpu/drm/radeon/radeon_fence.c dst->sync_seq[i] = max(dst->sync_seq[i], src->sync_seq[i]); dst 779 drivers/gpu/drm/radeon/radeon_mode.h void radeon_atom_copy_swap(u8 *dst, u8 *src, u8 num_bytes, bool to_le); dst 818 drivers/gpu/drm/radeon/radeon_vm.c uint64_t dst, uint32_t flags) dst 856 drivers/gpu/drm/radeon/radeon_vm.c last_dst = dst; dst 862 drivers/gpu/drm/radeon/radeon_vm.c dst += nptes * RADEON_GPU_PAGE_SIZE; dst 520 drivers/gpu/drm/rcar-du/rcar_du_plane.c const struct drm_rect *dst = &state->state.dst; dst 528 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcar_du_plane_write(rgrp, index, PnDSXR, drm_rect_width(dst)); dst 529 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcar_du_plane_write(rgrp, index, PnDSYR, drm_rect_height(dst)); dst 530 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcar_du_plane_write(rgrp, index, PnDPXR, dst->x1); dst 531 drivers/gpu/drm/rcar-du/rcar_du_plane.c rcar_du_plane_write(rgrp, index, PnDPYR, dst->y1); dst 62 drivers/gpu/drm/rcar-du/rcar_du_vsp.c .dst.x1 = 0, dst 63 drivers/gpu/drm/rcar-du/rcar_du_vsp.c .dst.y1 = 0, dst 64 drivers/gpu/drm/rcar-du/rcar_du_vsp.c .dst.x2 = mode->hdisplay, dst 65 drivers/gpu/drm/rcar-du/rcar_du_vsp.c .dst.y2 = mode->vdisplay, dst 167 drivers/gpu/drm/rcar-du/rcar_du_vsp.c cfg.dst.left = state->state.dst.x1; dst 168 drivers/gpu/drm/rcar-du/rcar_du_vsp.c cfg.dst.top = state->state.dst.y1; dst 169 drivers/gpu/drm/rcar-du/rcar_du_vsp.c cfg.dst.width = drm_rect_width(&state->state.dst); dst 170 drivers/gpu/drm/rcar-du/rcar_du_vsp.c cfg.dst.height = drm_rect_height(&state->state.dst); dst 277 drivers/gpu/drm/rockchip/rockchip_drm_vop.c uint32_t dst, bool is_horizontal, dst 287 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = GET_SCL_FT_BIC(src, dst); dst 289 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = GET_SCL_FT_BILI_DN(src, dst); dst 293 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = GET_SCL_FT_BILI_UP(src, dst); dst 295 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = GET_SCL_FT_BIC(src, dst); dst 298 drivers/gpu/drm/rockchip/rockchip_drm_vop.c *vskiplines = scl_get_vskiplines(src, dst); dst 299 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = scl_get_bili_dn_vskip(src, dst, dst 302 drivers/gpu/drm/rockchip/rockchip_drm_vop.c val = GET_SCL_FT_BILI_DN(src, dst); dst 793 drivers/gpu/drm/rockchip/rockchip_drm_vop.c struct drm_rect *dest = &state->dst; dst 306 drivers/gpu/drm/rockchip/rockchip_drm_vop.h static inline uint16_t scl_cal_scale(int src, int dst, int shift) dst 308 drivers/gpu/drm/rockchip/rockchip_drm_vop.h return ((src * 2 - 3) << (shift - 1)) / (dst - 1); dst 311 drivers/gpu/drm/rockchip/rockchip_drm_vop.h static inline uint16_t scl_cal_scale2(int src, int dst) dst 313 drivers/gpu/drm/rockchip/rockchip_drm_vop.h return ((src - 1) << 12) / (dst - 1); dst 316 drivers/gpu/drm/rockchip/rockchip_drm_vop.h #define GET_SCL_FT_BILI_DN(src, dst) scl_cal_scale(src, dst, 12) dst 317 drivers/gpu/drm/rockchip/rockchip_drm_vop.h #define GET_SCL_FT_BILI_UP(src, dst) scl_cal_scale(src, dst, 16) dst 318 drivers/gpu/drm/rockchip/rockchip_drm_vop.h #define GET_SCL_FT_BIC(src, dst) scl_cal_scale(src, dst, 16) dst 333 drivers/gpu/drm/rockchip/rockchip_drm_vop.h static inline enum scale_mode scl_get_scl_mode(int src, int dst) dst 335 drivers/gpu/drm/rockchip/rockchip_drm_vop.h if (src < dst) dst 337 drivers/gpu/drm/rockchip/rockchip_drm_vop.h else if (src > dst) dst 64 drivers/gpu/drm/selftests/test-drm_plane_helper.c if (plane_state->dst.x1 != crtc_x || dst 65 drivers/gpu/drm/selftests/test-drm_plane_helper.c plane_state->dst.y1 != crtc_y || dst 66 drivers/gpu/drm/selftests/test-drm_plane_helper.c drm_rect_width(&plane_state->dst) != crtc_w || dst 67 drivers/gpu/drm/selftests/test-drm_plane_helper.c drm_rect_height(&plane_state->dst) != crtc_h) { dst 68 drivers/gpu/drm/selftests/test-drm_plane_helper.c drm_rect_debug_print("dst: ", &plane_state->dst, false); dst 149 drivers/gpu/drm/sti/sti_cursor.c u8 *dst = cursor->pixmap.base; dst 160 drivers/gpu/drm/sti/sti_cursor.c *dst = a << 6 | r << 4 | g << 2 | b; dst 162 drivers/gpu/drm/sti/sti_cursor.c dst++; dst 602 drivers/gpu/drm/sti/sti_gdp.c static int sti_gdp_get_dst(struct device *dev, int dst, int src) dst 604 drivers/gpu/drm/sti/sti_gdp.c if (dst == src) dst 605 drivers/gpu/drm/sti/sti_gdp.c return dst; dst 607 drivers/gpu/drm/sti/sti_gdp.c if (dst < src) { dst 609 drivers/gpu/drm/sti/sti_gdp.c return dst; dst 93 drivers/gpu/drm/sun4i/sun8i_ui_layer.c dst_w = drm_rect_width(&state->dst); dst 94 drivers/gpu/drm/sun4i/sun8i_ui_layer.c dst_h = drm_rect_height(&state->dst); dst 161 drivers/gpu/drm/sun4i/sun8i_ui_layer.c state->dst.x1, state->dst.y1); dst 165 drivers/gpu/drm/sun4i/sun8i_ui_layer.c SUN8I_MIXER_COORD(state->dst.x1, state->dst.y1)); dst 90 drivers/gpu/drm/sun4i/sun8i_vi_layer.c dst_w = drm_rect_width(&state->dst); dst 91 drivers/gpu/drm/sun4i/sun8i_vi_layer.c dst_h = drm_rect_height(&state->dst); dst 201 drivers/gpu/drm/sun4i/sun8i_vi_layer.c state->dst.x1, state->dst.y1); dst 205 drivers/gpu/drm/sun4i/sun8i_vi_layer.c SUN8I_MIXER_COORD(state->dst.x1, state->dst.y1)); dst 312 drivers/gpu/drm/tegra/dc.c if (window->src.w == window->dst.w) dst 327 drivers/gpu/drm/tegra/dc.c if (window->src.h == window->dst.h) dst 360 drivers/gpu/drm/tegra/dc.c value = V_POSITION(window->dst.y) | H_POSITION(window->dst.x); dst 363 drivers/gpu/drm/tegra/dc.c value = V_SIZE(window->dst.h) | H_SIZE(window->dst.w); dst 381 drivers/gpu/drm/tegra/dc.c h_dda = compute_dda_inc(window->src.w, window->dst.w, false, bpp); dst 382 drivers/gpu/drm/tegra/dc.c v_dda = compute_dda_inc(window->src.h, window->dst.h, true, bpp); dst 704 drivers/gpu/drm/tegra/dc.c window.dst.x = plane->state->dst.x1; dst 705 drivers/gpu/drm/tegra/dc.c window.dst.y = plane->state->dst.y1; dst 706 drivers/gpu/drm/tegra/dc.c window.dst.w = drm_rect_width(&plane->state->dst); dst 707 drivers/gpu/drm/tegra/dc.c window.dst.h = drm_rect_height(&plane->state->dst); dst 136 drivers/gpu/drm/tegra/dc.h } dst; dst 253 drivers/gpu/drm/tiny/gm12u320.c static void gm12u320_32bpp_to_24bpp_packed(u8 *dst, u8 *src, int len) dst 256 drivers/gpu/drm/tiny/gm12u320.c *dst++ = *src++; dst 257 drivers/gpu/drm/tiny/gm12u320.c *dst++ = *src++; dst 258 drivers/gpu/drm/tiny/gm12u320.c *dst++ = *src++; dst 64 drivers/gpu/drm/tiny/st7586.c static void st7586_xrgb8888_to_gray332(u8 *dst, void *vaddr, dst 84 drivers/gpu/drm/tiny/st7586.c *dst++ = val; dst 91 drivers/gpu/drm/tiny/st7586.c static int st7586_buf_copy(void *dst, struct drm_framebuffer *fb, dst 106 drivers/gpu/drm/tiny/st7586.c st7586_xrgb8888_to_gray332(dst, src, fb, clip); dst 251 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_copy_io_page(void *dst, void *src, unsigned long page) dst 254 drivers/gpu/drm/ttm/ttm_bo_util.c (uint32_t *) ((unsigned long)dst + (page << PAGE_SHIFT)); dst 317 drivers/gpu/drm/ttm/ttm_bo_util.c void *dst; dst 323 drivers/gpu/drm/ttm/ttm_bo_util.c dst = ttm_kmap_atomic_prot(d, prot); dst 324 drivers/gpu/drm/ttm/ttm_bo_util.c if (!dst) dst 327 drivers/gpu/drm/ttm/ttm_bo_util.c memcpy_fromio(dst, src, PAGE_SIZE); dst 329 drivers/gpu/drm/ttm/ttm_bo_util.c ttm_kunmap_atomic_prot(dst, prot); dst 334 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_copy_ttm_io_page(struct ttm_tt *ttm, void *dst, dst 344 drivers/gpu/drm/ttm/ttm_bo_util.c dst = (void *)((unsigned long)dst + (page << PAGE_SHIFT)); dst 349 drivers/gpu/drm/ttm/ttm_bo_util.c memcpy_toio(dst, src, PAGE_SIZE); dst 369 drivers/gpu/drm/vboxvideo/vbox_mode.c static void copy_cursor_image(u8 *src, u8 *dst, u32 width, u32 height, dst 375 drivers/gpu/drm/vboxvideo/vbox_mode.c memcpy(dst + mask_size, src, width * height * 4); dst 379 drivers/gpu/drm/vboxvideo/vbox_mode.c dst[i * line_size + j / 8] |= (0x80 >> (j % 8)); dst 34 drivers/gpu/drm/vboxvideo/vbva_base.c u8 *dst = &vbva->data[offset]; dst 39 drivers/gpu/drm/vboxvideo/vbva_base.c memcpy(dst, p, len); dst 42 drivers/gpu/drm/vboxvideo/vbva_base.c memcpy(dst, p, bytes_till_boundary); dst 130 drivers/gpu/drm/vc4/vc4_plane.c static enum vc4_scaling_mode vc4_get_scaling_mode(u32 src, u32 dst) dst 132 drivers/gpu/drm/vc4/vc4_plane.c if (dst == src) dst 134 drivers/gpu/drm/vc4/vc4_plane.c if (3 * dst >= 2 * src) dst 347 drivers/gpu/drm/vc4/vc4_plane.c vc4_state->crtc_x = state->dst.x1; dst 348 drivers/gpu/drm/vc4/vc4_plane.c vc4_state->crtc_y = state->dst.y1; dst 349 drivers/gpu/drm/vc4/vc4_plane.c vc4_state->crtc_w = state->dst.x2 - state->dst.x1; dst 350 drivers/gpu/drm/vc4/vc4_plane.c vc4_state->crtc_h = state->dst.y2 - state->dst.y1; dst 394 drivers/gpu/drm/vc4/vc4_plane.c static void vc4_write_tpz(struct vc4_plane_state *vc4_state, u32 src, u32 dst) dst 398 drivers/gpu/drm/vc4/vc4_plane.c scale = (1 << 16) * src / dst; dst 412 drivers/gpu/drm/vc4/vc4_plane.c static void vc4_write_ppf(struct vc4_plane_state *vc4_state, u32 src, u32 dst) dst 414 drivers/gpu/drm/vc4/vc4_plane.c u32 scale = (1 << 16) * src / dst; dst 1041 drivers/gpu/drm/vc4/vc4_plane.c plane->state->dst = state->dst; dst 37 drivers/gpu/drm/virtio/virtgpu_ioctl.c static void convert_to_hw_box(struct virtio_gpu_box *dst, dst 40 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->x = cpu_to_le32(src->x); dst 41 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->y = cpu_to_le32(src->y); dst 42 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->z = cpu_to_le32(src->z); dst 43 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->w = cpu_to_le32(src->w); dst 44 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->h = cpu_to_le32(src->h); dst 45 drivers/gpu/drm/virtio/virtgpu_ioctl.c dst->d = cpu_to_le32(src->d); dst 70 drivers/gpu/drm/vkms/vkms_composer.c int x_dst = src_composer->dst.x1; dst 71 drivers/gpu/drm/vkms/vkms_composer.c int y_dst = src_composer->dst.y1; dst 72 drivers/gpu/drm/vkms/vkms_composer.c int h_dst = drm_rect_height(&src_composer->dst); dst 73 drivers/gpu/drm/vkms/vkms_composer.c int w_dst = drm_rect_width(&src_composer->dst); dst 25 drivers/gpu/drm/vkms/vkms_drv.h struct drm_rect src, dst; dst 105 drivers/gpu/drm/vkms/vkms_plane.c memcpy(&composer->dst, &plane->state->dst, sizeof(struct drm_rect)); dst 1229 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId dst; dst 1246 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId dst; dst 1265 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId dst; dst 1301 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId dst; dst 1324 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId dst; dst 37 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c (const _type * dst, const _type * src, size_t size)\ dst 42 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c if (*dst++ != *src++) \ dst 58 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c const _type * dst, const _type * src, size_t size) \ dst 61 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c if (*--dst != *--src) \ dst 104 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c unsigned int spill = SPILL(dst, _type); \ dst 110 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c diff_offs = vmw_find_first_diff_u8(dst, src, spill); \ dst 114 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dst += spill; \ dst 124 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c ((_type *) dst, (_type *) src, to_copy); \ dst 128 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dst += diff_offs; \ dst 147 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c static size_t vmw_find_first_diff(const u8 *dst, const u8 *src, size_t size, dst 163 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c return round_down(offset + vmw_find_first_diff_u8(dst, src, size), dst 177 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c unsigned int spill = SPILL(dst, _type); \ dst 182 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c diff_offs = vmw_find_last_diff_u8(dst, src, spill); \ dst 188 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dst -= spill; \ dst 197 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c ((_type *) dst, (_type *) src, to_copy); \ dst 202 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dst -= to_copy - diff_offs; \ dst 219 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c static ssize_t vmw_find_last_diff(const u8 *dst, const u8 *src, size_t size, dst 222 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dst += size; dst 231 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c return round_down(vmw_find_last_diff_u8(dst, src, size) - 1, dst 445 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c int vmw_bo_cpu_blit(struct ttm_buffer_object *dst, dst 461 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c if (!(dst->mem.placement & TTM_PL_FLAG_NO_EVICT)) dst 462 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c dma_resv_assert_held(dst->base.resv); dst 466 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c if (dst->ttm->state == tt_unpopulated) { dst 467 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c ret = dst->ttm->bdev->driver->ttm_tt_populate(dst->ttm, &ctx); dst 482 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c d.dst_pages = dst->ttm->pages; dst 484 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c d.dst_num_pages = dst->num_pages; dst 486 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c d.dst_prot = ttm_io_prot(dst->mem.placement, PAGE_KERNEL); dst 1380 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h int vmw_bo_cpu_blit(struct ttm_buffer_object *dst, dst 158 drivers/gpu/drm/vmwgfx/vmwgfx_overlay.c items[SVGA_VIDEO_DST_X].value = arg->dst.x; dst 159 drivers/gpu/drm/vmwgfx/vmwgfx_overlay.c items[SVGA_VIDEO_DST_Y].value = arg->dst.y; dst 160 drivers/gpu/drm/vmwgfx/vmwgfx_overlay.c items[SVGA_VIDEO_DST_WIDTH].value = arg->dst.w; dst 161 drivers/gpu/drm/vmwgfx/vmwgfx_overlay.c items[SVGA_VIDEO_DST_HEIGHT].value = arg->dst.h; dst 132 drivers/gpu/drm/zte/zx_plane.c static inline u32 rsz_step_value(u32 src, u32 dst) dst 136 drivers/gpu/drm/zte/zx_plane.c if (src == dst) dst 138 drivers/gpu/drm/zte/zx_plane.c else if (src < dst) dst 139 drivers/gpu/drm/zte/zx_plane.c val = RSZ_PARA_STEP((src << 16) / dst); dst 140 drivers/gpu/drm/zte/zx_plane.c else if (src > dst) dst 141 drivers/gpu/drm/zte/zx_plane.c val = RSZ_DATA_STEP(src / dst) | dst 142 drivers/gpu/drm/zte/zx_plane.c RSZ_PARA_STEP(((src << 16) / dst) & 0xffff); dst 188 drivers/gpu/drm/zte/zx_plane.c struct drm_rect *dst = &state->dst; dst 210 drivers/gpu/drm/zte/zx_plane.c dst_x = dst->x1; dst 211 drivers/gpu/drm/zte/zx_plane.c dst_y = dst->y1; dst 212 drivers/gpu/drm/zte/zx_plane.c dst_w = drm_rect_width(dst); dst 213 drivers/gpu/drm/zte/zx_plane.c dst_h = drm_rect_height(dst); dst 368 drivers/hid/hid-picolcd_debugfs.c static void dump_buff_as_hex(char *dst, size_t dst_sz, const u8 *data, dst 373 drivers/hid/hid-picolcd_debugfs.c dst[j++] = hex_asc[(data[i] >> 4) & 0x0f]; dst 374 drivers/hid/hid-picolcd_debugfs.c dst[j++] = hex_asc[data[i] & 0x0f]; dst 375 drivers/hid/hid-picolcd_debugfs.c dst[j++] = ' '; dst 377 drivers/hid/hid-picolcd_debugfs.c dst[j] = '\0'; dst 379 drivers/hid/hid-picolcd_debugfs.c dst[j-1] = '\n'; dst 381 drivers/hid/hid-picolcd_debugfs.c dst[j-2] = dst[j-3] = '.'; dst 701 drivers/hwtracing/intel_th/core.c int src = 0, dst = 0; dst 703 drivers/hwtracing/intel_th/core.c for (src = 0, dst = 0; dst <= th->num_thdevs; src++, dst++) { dst 718 drivers/hwtracing/intel_th/core.c for (; dst < th->num_thdevs; dst++) { dst 719 drivers/hwtracing/intel_th/core.c if (th->thdev[dst]->type != INTEL_TH_OUTPUT) dst 722 drivers/hwtracing/intel_th/core.c if (th->thdev[dst]->output.type != otype) dst 732 drivers/hwtracing/intel_th/core.c if (dst == th->num_thdevs) dst 89 drivers/i2c/busses/i2c-highlander.c static void smbus_write_data(u8 *src, u16 *dst, int len) dst 92 drivers/i2c/busses/i2c-highlander.c *dst++ = be16_to_cpup((__be16 *)src); dst 97 drivers/i2c/busses/i2c-highlander.c *dst = *src << 8; dst 100 drivers/i2c/busses/i2c-highlander.c static void smbus_read_data(u16 *src, u8 *dst, int len) dst 103 drivers/i2c/busses/i2c-highlander.c *(__be16 *)dst = cpu_to_be16p(src++); dst 104 drivers/i2c/busses/i2c-highlander.c dst += 2; dst 108 drivers/i2c/busses/i2c-highlander.c *dst = *src >> 8; dst 325 drivers/infiniband/core/addr.c static int dst_fetch_ha(const struct dst_entry *dst, dst 332 drivers/infiniband/core/addr.c n = dst_neigh_lookup(dst, daddr); dst 340 drivers/infiniband/core/addr.c neigh_ha_snapshot(dev_addr->dst_dev_addr, n, dst->dev); dst 348 drivers/infiniband/core/addr.c static bool has_gateway(const struct dst_entry *dst, sa_family_t family) dst 354 drivers/infiniband/core/addr.c rt = container_of(dst, struct rtable, dst); dst 358 drivers/infiniband/core/addr.c rt6 = container_of(dst, struct rt6_info, dst); dst 362 drivers/infiniband/core/addr.c static int fetch_ha(const struct dst_entry *dst, struct rdma_dev_addr *dev_addr, dst 375 drivers/infiniband/core/addr.c if (has_gateway(dst, family) && dev_addr->network == RDMA_NETWORK_IB) dst 378 drivers/infiniband/core/addr.c return dst_fetch_ha(dst, dev_addr, daddr); dst 407 drivers/infiniband/core/addr.c addr->hoplimit = ip4_dst_hoplimit(&rt->dst); dst 423 drivers/infiniband/core/addr.c struct dst_entry *dst; dst 430 drivers/infiniband/core/addr.c dst = ipv6_stub->ipv6_dst_lookup_flow(addr->net, NULL, &fl6, NULL); dst 431 drivers/infiniband/core/addr.c if (IS_ERR(dst)) dst 432 drivers/infiniband/core/addr.c return PTR_ERR(dst); dst 437 drivers/infiniband/core/addr.c addr->hoplimit = ip6_dst_hoplimit(dst); dst 439 drivers/infiniband/core/addr.c *pdst = dst; dst 452 drivers/infiniband/core/addr.c static int addr_resolve_neigh(const struct dst_entry *dst, dst 465 drivers/infiniband/core/addr.c ret = fetch_ha(dst, addr, dst_in, seq); dst 473 drivers/infiniband/core/addr.c const struct dst_entry *dst, dst 478 drivers/infiniband/core/addr.c if (dst->dev->flags & IFF_LOOPBACK) dst 481 drivers/infiniband/core/addr.c rdma_copy_src_l2_addr(dev_addr, dst->dev); dst 488 drivers/infiniband/core/addr.c if (has_gateway(dst, dst_in->sa_family) && dst 502 drivers/infiniband/core/addr.c const struct dst_entry *dst) dst 504 drivers/infiniband/core/addr.c struct net_device *ndev = READ_ONCE(dst->dev); dst 520 drivers/infiniband/core/addr.c return copy_src_l2_addr(dev_addr, dst_in, dst, ndev); dst 556 drivers/infiniband/core/addr.c struct dst_entry *dst = NULL; dst 586 drivers/infiniband/core/addr.c dst = &rt->dst; dst 588 drivers/infiniband/core/addr.c ret = addr6_resolve(src_in, dst_in, addr, &dst); dst 594 drivers/infiniband/core/addr.c ret = rdma_set_src_addr_rcu(addr, &ndev_flags, dst_in, dst); dst 602 drivers/infiniband/core/addr.c ret = addr_resolve_neigh(dst, dst_in, addr, ndev_flags, seq); dst 607 drivers/infiniband/core/addr.c dst_release(dst); dst 1180 drivers/infiniband/core/cma.c static int cma_addr_cmp(const struct sockaddr *src, const struct sockaddr *dst) dst 1182 drivers/infiniband/core/cma.c if (src->sa_family != dst->sa_family) dst 1188 drivers/infiniband/core/cma.c ((struct sockaddr_in *)dst)->sin_addr.s_addr; dst 1191 drivers/infiniband/core/cma.c struct sockaddr_in6 *dst_addr6 = (struct sockaddr_in6 *)dst; dst 1207 drivers/infiniband/core/cma.c &((struct sockaddr_ib *) dst)->sib_addr); dst 284 drivers/infiniband/core/multicast.c struct ib_sa_mcmember_rec *dst, ib_sa_comp_mask comp_mask) dst 289 drivers/infiniband/core/multicast.c memcmp(&src->port_gid, &dst->port_gid, sizeof src->port_gid)) dst 291 drivers/infiniband/core/multicast.c if (comp_mask & IB_SA_MCMEMBER_REC_QKEY && src->qkey != dst->qkey) dst 293 drivers/infiniband/core/multicast.c if (comp_mask & IB_SA_MCMEMBER_REC_MLID && src->mlid != dst->mlid) dst 296 drivers/infiniband/core/multicast.c IB_SA_MCMEMBER_REC_MTU, dst->mtu_selector, dst 297 drivers/infiniband/core/multicast.c src->mtu, dst->mtu)) dst 300 drivers/infiniband/core/multicast.c src->traffic_class != dst->traffic_class) dst 302 drivers/infiniband/core/multicast.c if (comp_mask & IB_SA_MCMEMBER_REC_PKEY && src->pkey != dst->pkey) dst 305 drivers/infiniband/core/multicast.c IB_SA_MCMEMBER_REC_RATE, dst->rate_selector, dst 306 drivers/infiniband/core/multicast.c src->rate, dst->rate)) dst 311 drivers/infiniband/core/multicast.c dst->packet_life_time_selector, dst 312 drivers/infiniband/core/multicast.c src->packet_life_time, dst->packet_life_time)) dst 314 drivers/infiniband/core/multicast.c if (comp_mask & IB_SA_MCMEMBER_REC_SL && src->sl != dst->sl) dst 317 drivers/infiniband/core/multicast.c src->flow_label != dst->flow_label) dst 320 drivers/infiniband/core/multicast.c src->hop_limit != dst->hop_limit) dst 322 drivers/infiniband/core/multicast.c if (comp_mask & IB_SA_MCMEMBER_REC_SCOPE && src->scope != dst->scope) dst 253 drivers/infiniband/core/ucma.c static void ucma_copy_conn_event(struct rdma_ucm_conn_param *dst, dst 257 drivers/infiniband/core/ucma.c memcpy(dst->private_data, src->private_data, dst 259 drivers/infiniband/core/ucma.c dst->private_data_len = src->private_data_len; dst 260 drivers/infiniband/core/ucma.c dst->responder_resources =src->responder_resources; dst 261 drivers/infiniband/core/ucma.c dst->initiator_depth = src->initiator_depth; dst 262 drivers/infiniband/core/ucma.c dst->flow_control = src->flow_control; dst 263 drivers/infiniband/core/ucma.c dst->retry_count = src->retry_count; dst 264 drivers/infiniband/core/ucma.c dst->rnr_retry_count = src->rnr_retry_count; dst 265 drivers/infiniband/core/ucma.c dst->srq = src->srq; dst 266 drivers/infiniband/core/ucma.c dst->qp_num = src->qp_num; dst 270 drivers/infiniband/core/ucma.c struct rdma_ucm_ud_param *dst, dst 274 drivers/infiniband/core/ucma.c memcpy(dst->private_data, src->private_data, dst 276 drivers/infiniband/core/ucma.c dst->private_data_len = src->private_data_len; dst 277 drivers/infiniband/core/ucma.c ib_copy_ah_attr_to_user(device, &dst->ah_attr, &src->ah_attr); dst 278 drivers/infiniband/core/ucma.c dst->qp_num = src->qp_num; dst 279 drivers/infiniband/core/ucma.c dst->qkey = src->qkey; dst 1055 drivers/infiniband/core/ucma.c struct rdma_conn_param *dst, dst 1058 drivers/infiniband/core/ucma.c dst->private_data = src->private_data; dst 1059 drivers/infiniband/core/ucma.c dst->private_data_len = src->private_data_len; dst 1060 drivers/infiniband/core/ucma.c dst->responder_resources =src->responder_resources; dst 1061 drivers/infiniband/core/ucma.c dst->initiator_depth = src->initiator_depth; dst 1062 drivers/infiniband/core/ucma.c dst->flow_control = src->flow_control; dst 1063 drivers/infiniband/core/ucma.c dst->retry_count = src->retry_count; dst 1064 drivers/infiniband/core/ucma.c dst->rnr_retry_count = src->rnr_retry_count; dst 1065 drivers/infiniband/core/ucma.c dst->srq = src->srq; dst 1066 drivers/infiniband/core/ucma.c dst->qp_num = src->qp_num; dst 1067 drivers/infiniband/core/ucma.c dst->qkey = (id->route.addr.src_addr.ss_family == AF_IB) ? src->qkey : 0; dst 370 drivers/infiniband/core/umem.c int ib_umem_copy_from(void *dst, struct ib_umem *umem, size_t offset, dst 382 drivers/infiniband/core/umem.c ret = sg_pcopy_to_buffer(umem->sg_head.sgl, umem->sg_nents, dst, length, dst 63 drivers/infiniband/core/uverbs_marshall.c struct ib_uverbs_ah_attr *dst, dst 69 drivers/infiniband/core/uverbs_marshall.c memset(&dst->grh.reserved, 0, sizeof(dst->grh.reserved)); dst 76 drivers/infiniband/core/uverbs_marshall.c dst->dlid = rdma_ah_get_dlid(src); dst 77 drivers/infiniband/core/uverbs_marshall.c dst->sl = rdma_ah_get_sl(src); dst 78 drivers/infiniband/core/uverbs_marshall.c dst->src_path_bits = rdma_ah_get_path_bits(src); dst 79 drivers/infiniband/core/uverbs_marshall.c dst->static_rate = rdma_ah_get_static_rate(src); dst 80 drivers/infiniband/core/uverbs_marshall.c dst->is_global = rdma_ah_get_ah_flags(src) & dst 82 drivers/infiniband/core/uverbs_marshall.c if (dst->is_global) { dst 85 drivers/infiniband/core/uverbs_marshall.c memcpy(dst->grh.dgid, grh->dgid.raw, sizeof(grh->dgid)); dst 86 drivers/infiniband/core/uverbs_marshall.c dst->grh.flow_label = grh->flow_label; dst 87 drivers/infiniband/core/uverbs_marshall.c dst->grh.sgid_index = grh->sgid_index; dst 88 drivers/infiniband/core/uverbs_marshall.c dst->grh.hop_limit = grh->hop_limit; dst 89 drivers/infiniband/core/uverbs_marshall.c dst->grh.traffic_class = grh->traffic_class; dst 91 drivers/infiniband/core/uverbs_marshall.c dst->port_num = rdma_ah_get_port_num(src); dst 92 drivers/infiniband/core/uverbs_marshall.c dst->reserved = 0; dst 97 drivers/infiniband/core/uverbs_marshall.c struct ib_uverbs_qp_attr *dst, dst 100 drivers/infiniband/core/uverbs_marshall.c dst->qp_state = src->qp_state; dst 101 drivers/infiniband/core/uverbs_marshall.c dst->cur_qp_state = src->cur_qp_state; dst 102 drivers/infiniband/core/uverbs_marshall.c dst->path_mtu = src->path_mtu; dst 103 drivers/infiniband/core/uverbs_marshall.c dst->path_mig_state = src->path_mig_state; dst 104 drivers/infiniband/core/uverbs_marshall.c dst->qkey = src->qkey; dst 105 drivers/infiniband/core/uverbs_marshall.c dst->rq_psn = src->rq_psn; dst 106 drivers/infiniband/core/uverbs_marshall.c dst->sq_psn = src->sq_psn; dst 107 drivers/infiniband/core/uverbs_marshall.c dst->dest_qp_num = src->dest_qp_num; dst 108 drivers/infiniband/core/uverbs_marshall.c dst->qp_access_flags = src->qp_access_flags; dst 110 drivers/infiniband/core/uverbs_marshall.c dst->max_send_wr = src->cap.max_send_wr; dst 111 drivers/infiniband/core/uverbs_marshall.c dst->max_recv_wr = src->cap.max_recv_wr; dst 112 drivers/infiniband/core/uverbs_marshall.c dst->max_send_sge = src->cap.max_send_sge; dst 113 drivers/infiniband/core/uverbs_marshall.c dst->max_recv_sge = src->cap.max_recv_sge; dst 114 drivers/infiniband/core/uverbs_marshall.c dst->max_inline_data = src->cap.max_inline_data; dst 116 drivers/infiniband/core/uverbs_marshall.c ib_copy_ah_attr_to_user(device, &dst->ah_attr, &src->ah_attr); dst 117 drivers/infiniband/core/uverbs_marshall.c ib_copy_ah_attr_to_user(device, &dst->alt_ah_attr, &src->alt_ah_attr); dst 119 drivers/infiniband/core/uverbs_marshall.c dst->pkey_index = src->pkey_index; dst 120 drivers/infiniband/core/uverbs_marshall.c dst->alt_pkey_index = src->alt_pkey_index; dst 121 drivers/infiniband/core/uverbs_marshall.c dst->en_sqd_async_notify = src->en_sqd_async_notify; dst 122 drivers/infiniband/core/uverbs_marshall.c dst->sq_draining = src->sq_draining; dst 123 drivers/infiniband/core/uverbs_marshall.c dst->max_rd_atomic = src->max_rd_atomic; dst 124 drivers/infiniband/core/uverbs_marshall.c dst->max_dest_rd_atomic = src->max_dest_rd_atomic; dst 125 drivers/infiniband/core/uverbs_marshall.c dst->min_rnr_timer = src->min_rnr_timer; dst 126 drivers/infiniband/core/uverbs_marshall.c dst->port_num = src->port_num; dst 127 drivers/infiniband/core/uverbs_marshall.c dst->timeout = src->timeout; dst 128 drivers/infiniband/core/uverbs_marshall.c dst->retry_cnt = src->retry_cnt; dst 129 drivers/infiniband/core/uverbs_marshall.c dst->rnr_retry = src->rnr_retry; dst 130 drivers/infiniband/core/uverbs_marshall.c dst->alt_port_num = src->alt_port_num; dst 131 drivers/infiniband/core/uverbs_marshall.c dst->alt_timeout = src->alt_timeout; dst 132 drivers/infiniband/core/uverbs_marshall.c memset(dst->reserved, 0, sizeof(dst->reserved)); dst 136 drivers/infiniband/core/uverbs_marshall.c static void __ib_copy_path_rec_to_user(struct ib_user_path_rec *dst, dst 139 drivers/infiniband/core/uverbs_marshall.c memcpy(dst->dgid, src->dgid.raw, sizeof(src->dgid)); dst 140 drivers/infiniband/core/uverbs_marshall.c memcpy(dst->sgid, src->sgid.raw, sizeof(src->sgid)); dst 142 drivers/infiniband/core/uverbs_marshall.c dst->dlid = htons(ntohl(sa_path_get_dlid(src))); dst 143 drivers/infiniband/core/uverbs_marshall.c dst->slid = htons(ntohl(sa_path_get_slid(src))); dst 144 drivers/infiniband/core/uverbs_marshall.c dst->raw_traffic = sa_path_get_raw_traffic(src); dst 145 drivers/infiniband/core/uverbs_marshall.c dst->flow_label = src->flow_label; dst 146 drivers/infiniband/core/uverbs_marshall.c dst->hop_limit = src->hop_limit; dst 147 drivers/infiniband/core/uverbs_marshall.c dst->traffic_class = src->traffic_class; dst 148 drivers/infiniband/core/uverbs_marshall.c dst->reversible = src->reversible; dst 149 drivers/infiniband/core/uverbs_marshall.c dst->numb_path = src->numb_path; dst 150 drivers/infiniband/core/uverbs_marshall.c dst->pkey = src->pkey; dst 151 drivers/infiniband/core/uverbs_marshall.c dst->sl = src->sl; dst 152 drivers/infiniband/core/uverbs_marshall.c dst->mtu_selector = src->mtu_selector; dst 153 drivers/infiniband/core/uverbs_marshall.c dst->mtu = src->mtu; dst 154 drivers/infiniband/core/uverbs_marshall.c dst->rate_selector = src->rate_selector; dst 155 drivers/infiniband/core/uverbs_marshall.c dst->rate = src->rate; dst 156 drivers/infiniband/core/uverbs_marshall.c dst->packet_life_time = src->packet_life_time; dst 157 drivers/infiniband/core/uverbs_marshall.c dst->preference = src->preference; dst 158 drivers/infiniband/core/uverbs_marshall.c dst->packet_life_time_selector = src->packet_life_time_selector; dst 161 drivers/infiniband/core/uverbs_marshall.c void ib_copy_path_rec_to_user(struct ib_user_path_rec *dst, dst 168 drivers/infiniband/core/uverbs_marshall.c __ib_copy_path_rec_to_user(dst, &rec); dst 171 drivers/infiniband/core/uverbs_marshall.c __ib_copy_path_rec_to_user(dst, src); dst 175 drivers/infiniband/core/uverbs_marshall.c void ib_copy_path_rec_from_user(struct sa_path_rec *dst, dst 180 drivers/infiniband/core/uverbs_marshall.c memset(dst, 0, sizeof(*dst)); dst 183 drivers/infiniband/core/uverbs_marshall.c dst->rec_type = SA_PATH_REC_TYPE_OPA; dst 187 drivers/infiniband/core/uverbs_marshall.c dst->rec_type = SA_PATH_REC_TYPE_IB; dst 191 drivers/infiniband/core/uverbs_marshall.c memcpy(dst->dgid.raw, src->dgid, sizeof dst->dgid); dst 192 drivers/infiniband/core/uverbs_marshall.c memcpy(dst->sgid.raw, src->sgid, sizeof dst->sgid); dst 194 drivers/infiniband/core/uverbs_marshall.c sa_path_set_dlid(dst, dlid); dst 195 drivers/infiniband/core/uverbs_marshall.c sa_path_set_slid(dst, slid); dst 196 drivers/infiniband/core/uverbs_marshall.c sa_path_set_raw_traffic(dst, src->raw_traffic); dst 197 drivers/infiniband/core/uverbs_marshall.c dst->flow_label = src->flow_label; dst 198 drivers/infiniband/core/uverbs_marshall.c dst->hop_limit = src->hop_limit; dst 199 drivers/infiniband/core/uverbs_marshall.c dst->traffic_class = src->traffic_class; dst 200 drivers/infiniband/core/uverbs_marshall.c dst->reversible = src->reversible; dst 201 drivers/infiniband/core/uverbs_marshall.c dst->numb_path = src->numb_path; dst 202 drivers/infiniband/core/uverbs_marshall.c dst->pkey = src->pkey; dst 203 drivers/infiniband/core/uverbs_marshall.c dst->sl = src->sl; dst 204 drivers/infiniband/core/uverbs_marshall.c dst->mtu_selector = src->mtu_selector; dst 205 drivers/infiniband/core/uverbs_marshall.c dst->mtu = src->mtu; dst 206 drivers/infiniband/core/uverbs_marshall.c dst->rate_selector = src->rate_selector; dst 207 drivers/infiniband/core/uverbs_marshall.c dst->rate = src->rate; dst 208 drivers/infiniband/core/uverbs_marshall.c dst->packet_life_time = src->packet_life_time; dst 209 drivers/infiniband/core/uverbs_marshall.c dst->preference = src->preference; dst 210 drivers/infiniband/core/uverbs_marshall.c dst->packet_life_time_selector = src->packet_life_time_selector; dst 213 drivers/infiniband/core/uverbs_marshall.c sa_path_set_dmac_zero(dst); dst 287 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_release(ep->dst); dst 441 drivers/infiniband/hw/cxgb3/iwch_cm.c mtu_idx = find_best_mtu(T3C_DATA(ep->com.tdev), dst_mtu(ep->dst)); dst 637 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_confirm(ep->dst); dst 1103 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_confirm(ep->dst); dst 1188 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_release(ep->dst); dst 1279 drivers/infiniband/hw/cxgb3/iwch_cm.c mtu_idx = find_best_mtu(T3C_DATA(ep->com.tdev), dst_mtu(ep->dst)); dst 1339 drivers/infiniband/hw/cxgb3/iwch_cm.c struct dst_entry *dst; dst 1371 drivers/infiniband/hw/cxgb3/iwch_cm.c dst = &rt->dst; dst 1372 drivers/infiniband/hw/cxgb3/iwch_cm.c l2t = t3_l2t_get(tdev, dst, NULL, &req->peer_ip); dst 1375 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_release(dst); dst 1382 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_release(dst); dst 1398 drivers/infiniband/hw/cxgb3/iwch_cm.c child_ep->dst = dst; dst 1421 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_confirm(ep->dst); dst 1437 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_confirm(ep->dst); dst 1601 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_confirm(ep->dst); dst 1939 drivers/infiniband/hw/cxgb3/iwch_cm.c ep->dst = &rt->dst; dst 1940 drivers/infiniband/hw/cxgb3/iwch_cm.c ep->l2t = t3_l2t_get(ep->com.tdev, ep->dst, NULL, dst 1962 drivers/infiniband/hw/cxgb3/iwch_cm.c dst_release(ep->dst); dst 2131 drivers/infiniband/hw/cxgb3/iwch_cm.c if (ep->dst != old) dst 2140 drivers/infiniband/hw/cxgb3/iwch_cm.c ep->dst = new; dst 184 drivers/infiniband/hw/cxgb3/iwch_cm.h struct dst_entry *dst; dst 176 drivers/infiniband/hw/cxgb3/iwch_ev.c dst_confirm(qhp->ep->dst); dst 407 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 1230 drivers/infiniband/hw/cxgb4/cm.c dst_confirm(ep->dst); dst 2069 drivers/infiniband/hw/cxgb4/cm.c struct dst_entry *dst, struct c4iw_dev *cdev, dst 2076 drivers/infiniband/hw/cxgb4/cm.c n = dst_neigh_lookup(dst, peer_ip); dst 2124 drivers/infiniband/hw/cxgb4/cm.c ep->mtu = dst_mtu(dst); dst 2199 drivers/infiniband/hw/cxgb4/cm.c ep->dst = cxgb_find_route(&ep->com.dev->rdev.lldi, get_real_dev, dst 2207 drivers/infiniband/hw/cxgb4/cm.c ep->dst = cxgb_find_route6(&ep->com.dev->rdev.lldi, dst 2218 drivers/infiniband/hw/cxgb4/cm.c if (!ep->dst) { dst 2223 drivers/infiniband/hw/cxgb4/cm.c err = import_ep(ep, iptype, ra, ep->dst, ep->com.dev, false, dst 2245 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 2333 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 2370 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 2520 drivers/infiniband/hw/cxgb4/cm.c struct dst_entry *dst; dst 2556 drivers/infiniband/hw/cxgb4/cm.c dst = cxgb_find_route(&dev->rdev.lldi, get_real_dev, dst 2564 drivers/infiniband/hw/cxgb4/cm.c dst = cxgb_find_route6(&dev->rdev.lldi, get_real_dev, dst 2570 drivers/infiniband/hw/cxgb4/cm.c if (!dst) { dst 2578 drivers/infiniband/hw/cxgb4/cm.c dst_release(dst); dst 2582 drivers/infiniband/hw/cxgb4/cm.c err = import_ep(child_ep, iptype, peer_ip, dst, dev, false, dst 2586 drivers/infiniband/hw/cxgb4/cm.c dst_release(dst); dst 2644 drivers/infiniband/hw/cxgb4/cm.c child_ep->dst = dst; dst 2694 drivers/infiniband/hw/cxgb4/cm.c dst_confirm(ep->dst); dst 2723 drivers/infiniband/hw/cxgb4/cm.c dst_confirm(ep->dst); dst 2926 drivers/infiniband/hw/cxgb4/cm.c dst_confirm(ep->dst); dst 2960 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 3073 drivers/infiniband/hw/cxgb4/cm.c dst_confirm(ep->dst); dst 3392 drivers/infiniband/hw/cxgb4/cm.c ep->dst = cxgb_find_route(&dev->rdev.lldi, get_real_dev, dst 3415 drivers/infiniband/hw/cxgb4/cm.c ep->dst = cxgb_find_route6(&dev->rdev.lldi, get_real_dev, dst 3422 drivers/infiniband/hw/cxgb4/cm.c if (!ep->dst) { dst 3428 drivers/infiniband/hw/cxgb4/cm.c err = import_ep(ep, iptype, ra, ep->dst, ep->com.dev, true, dst 3449 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 3790 drivers/infiniband/hw/cxgb4/cm.c dst_release(ep->dst); dst 4067 drivers/infiniband/hw/cxgb4/cm.c struct dst_entry *dst; dst 4136 drivers/infiniband/hw/cxgb4/cm.c dst = cxgb_find_route(&dev->rdev.lldi, get_real_dev, dst 4139 drivers/infiniband/hw/cxgb4/cm.c if (!dst) { dst 4143 drivers/infiniband/hw/cxgb4/cm.c neigh = dst_neigh_lookup_skb(dst, skb); dst 4189 drivers/infiniband/hw/cxgb4/cm.c dst_release(dst); dst 889 drivers/infiniband/hw/cxgb4/iw_cxgb4.h struct dst_entry *dst; dst 2654 drivers/infiniband/hw/cxgb4/qp.c u64 *src, *dst; dst 2657 drivers/infiniband/hw/cxgb4/qp.c dst = (u64 *)((u8 *)srq->queue + srq->wq_pidx * T4_EQ_ENTRY_SIZE); dst 2659 drivers/infiniband/hw/cxgb4/qp.c *dst++ = *src++; dst 2660 drivers/infiniband/hw/cxgb4/qp.c if (dst >= (u64 *)&srq->queue[srq->size]) dst 2661 drivers/infiniband/hw/cxgb4/qp.c dst = (u64 *)srq->queue; dst 2662 drivers/infiniband/hw/cxgb4/qp.c *dst++ = *src++; dst 2663 drivers/infiniband/hw/cxgb4/qp.c if (dst >= (u64 *)&srq->queue[srq->size]) dst 2664 drivers/infiniband/hw/cxgb4/qp.c dst = (u64 *)srq->queue; dst 580 drivers/infiniband/hw/cxgb4/t4.h static inline void pio_copy(u64 __iomem *dst, u64 *src) dst 585 drivers/infiniband/hw/cxgb4/t4.h writeq(*src, dst); dst 587 drivers/infiniband/hw/cxgb4/t4.h dst++; dst 582 drivers/infiniband/hw/i40iw/i40iw.h void i40iw_copy_ip_ntohl(u32 *dst, __be32 *src); dst 144 drivers/infiniband/hw/i40iw/i40iw_cm.c void i40iw_copy_ip_ntohl(u32 *dst, __be32 *src) dst 146 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = ntohl(*src++); dst 147 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = ntohl(*src++); dst 148 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = ntohl(*src++); dst 149 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst = ntohl(*src); dst 157 drivers/infiniband/hw/i40iw/i40iw_cm.c static inline void i40iw_copy_ip_htonl(__be32 *dst, u32 *src) dst 159 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = htonl(*src++); dst 160 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = htonl(*src++); dst 161 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst++ = htonl(*src++); dst 162 drivers/infiniband/hw/i40iw/i40iw_cm.c *dst = htonl(*src); dst 1999 drivers/infiniband/hw/i40iw/i40iw_cm.c neigh = dst_neigh_lookup(&rt->dst, &dst_ipaddr); dst 2038 drivers/infiniband/hw/i40iw/i40iw_cm.c struct dst_entry *dst; dst 2047 drivers/infiniband/hw/i40iw/i40iw_cm.c dst = ip6_route_output(&init_net, NULL, &fl6); dst 2048 drivers/infiniband/hw/i40iw/i40iw_cm.c return dst; dst 2064 drivers/infiniband/hw/i40iw/i40iw_cm.c struct dst_entry *dst; dst 2074 drivers/infiniband/hw/i40iw/i40iw_cm.c dst = i40iw_get_dst_ipv6(&src_addr, &dst_addr); dst 2075 drivers/infiniband/hw/i40iw/i40iw_cm.c if (!dst || dst->error) { dst 2076 drivers/infiniband/hw/i40iw/i40iw_cm.c if (dst) { dst 2077 drivers/infiniband/hw/i40iw/i40iw_cm.c dst_release(dst); dst 2079 drivers/infiniband/hw/i40iw/i40iw_cm.c dst->error); dst 2084 drivers/infiniband/hw/i40iw/i40iw_cm.c neigh = dst_neigh_lookup(dst, dst_addr.sin6_addr.in6_u.u6_addr32); dst 2122 drivers/infiniband/hw/i40iw/i40iw_cm.c dst_release(dst); dst 384 drivers/infiniband/hw/mlx4/mcg.c struct ib_sa_mcmember_data *dst, ib_sa_comp_mask comp_mask) dst 391 drivers/infiniband/hw/mlx4/mcg.c if (comp_mask & IB_SA_MCMEMBER_REC_QKEY && src->qkey != dst->qkey) dst 393 drivers/infiniband/hw/mlx4/mcg.c if (comp_mask & IB_SA_MCMEMBER_REC_MLID && src->mlid != dst->mlid) dst 397 drivers/infiniband/hw/mlx4/mcg.c src->mtusel_mtu, dst->mtusel_mtu)) dst 400 drivers/infiniband/hw/mlx4/mcg.c src->tclass != dst->tclass) dst 402 drivers/infiniband/hw/mlx4/mcg.c if (comp_mask & IB_SA_MCMEMBER_REC_PKEY && src->pkey != dst->pkey) dst 406 drivers/infiniband/hw/mlx4/mcg.c src->ratesel_rate, dst->ratesel_rate)) dst 411 drivers/infiniband/hw/mlx4/mcg.c src->lifetmsel_lifetm, dst->lifetmsel_lifetm)) dst 415 drivers/infiniband/hw/mlx4/mcg.c (be32_to_cpu(dst->sl_flowlabel_hoplimit) & 0xf0000000)) dst 419 drivers/infiniband/hw/mlx4/mcg.c (be32_to_cpu(dst->sl_flowlabel_hoplimit) & 0x0fffff00)) dst 423 drivers/infiniband/hw/mlx4/mcg.c (be32_to_cpu(dst->sl_flowlabel_hoplimit) & 0x000000ff)) dst 427 drivers/infiniband/hw/mlx4/mcg.c (dst->scope_join_state & 0xf0)) dst 3517 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst, dst 3560 drivers/infiniband/hw/mlx5/main.c if (dst && !(flow_act.action & MLX5_FLOW_CONTEXT_ACTION_DROP)) { dst 3561 drivers/infiniband/hw/mlx5/main.c memcpy(&dest_arr[0], dst, sizeof(*dst)); dst 3652 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst) dst 3654 drivers/infiniband/hw/mlx5/main.c return _create_flow_rule(dev, ft_prio, flow_attr, dst, 0, NULL); dst 3660 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst) dst 3668 drivers/infiniband/hw/mlx5/main.c flow_attr, dst); dst 3689 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst) dst 3726 drivers/infiniband/hw/mlx5/main.c dst); dst 3731 drivers/infiniband/hw/mlx5/main.c dst); dst 3748 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst) dst 3758 drivers/infiniband/hw/mlx5/main.c handler_rx = create_flow_rule(dev, ft_rx, &flow_attr, dst); dst 3764 drivers/infiniband/hw/mlx5/main.c handler_tx = create_flow_rule(dev, ft_tx, &flow_attr, dst); dst 3790 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst = NULL; dst 3850 drivers/infiniband/hw/mlx5/main.c dst = kzalloc(sizeof(*dst), GFP_KERNEL); dst 3851 drivers/infiniband/hw/mlx5/main.c if (!dst) { dst 3874 drivers/infiniband/hw/mlx5/main.c dst->type = MLX5_FLOW_DESTINATION_TYPE_PORT; dst 3876 drivers/infiniband/hw/mlx5/main.c dst->type = MLX5_FLOW_DESTINATION_TYPE_TIR; dst 3878 drivers/infiniband/hw/mlx5/main.c dst->tir_num = mqp->rss_qp.tirn; dst 3880 drivers/infiniband/hw/mlx5/main.c dst->tir_num = mqp->raw_packet_qp.rq.tirn; dst 3886 drivers/infiniband/hw/mlx5/main.c flow_attr, dst); dst 3891 drivers/infiniband/hw/mlx5/main.c dst, underlay_qpn, ucmd); dst 3896 drivers/infiniband/hw/mlx5/main.c dst); dst 3898 drivers/infiniband/hw/mlx5/main.c handler = create_sniffer_rule(dev, ft_prio, ft_prio_tx, dst); dst 3911 drivers/infiniband/hw/mlx5/main.c kfree(dst); dst 3922 drivers/infiniband/hw/mlx5/main.c kfree(dst); dst 4005 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst, dst 4033 drivers/infiniband/hw/mlx5/main.c flow_act, dst, dst_num); dst 4101 drivers/infiniband/hw/mlx5/main.c struct mlx5_flow_destination *dst; dst 4114 drivers/infiniband/hw/mlx5/main.c dst = kcalloc(2, sizeof(*dst), GFP_KERNEL); dst 4115 drivers/infiniband/hw/mlx5/main.c if (!dst) dst 4128 drivers/infiniband/hw/mlx5/main.c dst[dst_num].type = dest_type; dst 4129 drivers/infiniband/hw/mlx5/main.c dst[dst_num].tir_num = dest_id; dst 4132 drivers/infiniband/hw/mlx5/main.c dst[dst_num].type = MLX5_FLOW_DESTINATION_TYPE_FLOW_TABLE_NUM; dst 4133 drivers/infiniband/hw/mlx5/main.c dst[dst_num].ft_num = dest_id; dst 4136 drivers/infiniband/hw/mlx5/main.c dst[dst_num].type = MLX5_FLOW_DESTINATION_TYPE_PORT; dst 4143 drivers/infiniband/hw/mlx5/main.c dst[dst_num].type = MLX5_FLOW_DESTINATION_TYPE_COUNTER; dst 4144 drivers/infiniband/hw/mlx5/main.c dst[dst_num].counter_id = counter_id; dst 4148 drivers/infiniband/hw/mlx5/main.c handler = _create_raw_flow_rule(dev, ft_prio, dst, fs_matcher, dst 4161 drivers/infiniband/hw/mlx5/main.c kfree(dst); dst 4169 drivers/infiniband/hw/mlx5/main.c kfree(dst); dst 48 drivers/infiniband/hw/ocrdma/ocrdma_hw.h static inline void ocrdma_cpu_to_le32(void *dst, u32 len) dst 52 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *src_ptr = dst; dst 53 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *dst_ptr = dst; dst 59 drivers/infiniband/hw/ocrdma/ocrdma_hw.h static inline void ocrdma_le32_to_cpu(void *dst, u32 len) dst 63 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *src_ptr = dst; dst 64 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *dst_ptr = dst; dst 70 drivers/infiniband/hw/ocrdma/ocrdma_hw.h static inline void ocrdma_copy_cpu_to_le32(void *dst, void *src, u32 len) dst 75 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *dst_ptr = dst; dst 79 drivers/infiniband/hw/ocrdma/ocrdma_hw.h memcpy(dst, src, len); dst 83 drivers/infiniband/hw/ocrdma/ocrdma_hw.h static inline void ocrdma_copy_le32_to_cpu(void *dst, void *src, u32 len) dst 88 drivers/infiniband/hw/ocrdma/ocrdma_hw.h u32 *dst_ptr = dst; dst 92 drivers/infiniband/hw/ocrdma/ocrdma_hw.h memcpy(dst, src, len); dst 447 drivers/infiniband/hw/qedr/qedr_iw_cm.c neigh = dst_neigh_lookup(&rt->dst, &dst_ip); dst 472 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct dst_entry *dst; dst 480 drivers/infiniband/hw/qedr/qedr_iw_cm.c dst = ip6_route_output(&init_net, NULL, &fl6); dst 482 drivers/infiniband/hw/qedr/qedr_iw_cm.c if ((!dst) || dst->error) { dst 483 drivers/infiniband/hw/qedr/qedr_iw_cm.c if (dst) { dst 484 drivers/infiniband/hw/qedr/qedr_iw_cm.c dst_release(dst); dst 487 drivers/infiniband/hw/qedr/qedr_iw_cm.c dst->error); dst 491 drivers/infiniband/hw/qedr/qedr_iw_cm.c neigh = dst_neigh_lookup(dst, &fl6.daddr); dst 504 drivers/infiniband/hw/qedr/qedr_iw_cm.c dst_release(dst); dst 48 drivers/infiniband/hw/qib/qib_pio_copy.c u64 __iomem *dst = to; dst 53 drivers/infiniband/hw/qib/qib_pio_copy.c __raw_writeq(*src++, dst++); dst 55 drivers/infiniband/hw/qib/qib_pio_copy.c __raw_writel(*(const u32 *)src, dst); dst 57 drivers/infiniband/hw/qib/qib_pio_copy.c u32 __iomem *dst = to; dst 62 drivers/infiniband/hw/qib/qib_pio_copy.c __raw_writel(*src++, dst++); dst 512 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void pvrdma_qp_cap_to_ib(struct ib_qp_cap *dst, dst 514 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void ib_qp_cap_to_pvrdma(struct pvrdma_qp_cap *dst, dst 516 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void pvrdma_gid_to_ib(union ib_gid *dst, const union pvrdma_gid *src); dst 517 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void ib_gid_to_pvrdma(union pvrdma_gid *dst, const union ib_gid *src); dst 518 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void pvrdma_global_route_to_ib(struct ib_global_route *dst, dst 520 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void ib_global_route_to_pvrdma(struct pvrdma_global_route *dst, dst 522 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void pvrdma_ah_attr_to_rdma(struct rdma_ah_attr *dst, dst 524 drivers/infiniband/hw/vmw_pvrdma/pvrdma.h void rdma_ah_attr_to_pvrdma(struct pvrdma_ah_attr *dst, dst 225 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void pvrdma_qp_cap_to_ib(struct ib_qp_cap *dst, const struct pvrdma_qp_cap *src) dst 227 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_send_wr = src->max_send_wr; dst 228 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_recv_wr = src->max_recv_wr; dst 229 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_send_sge = src->max_send_sge; dst 230 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_recv_sge = src->max_recv_sge; dst 231 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_inline_data = src->max_inline_data; dst 234 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void ib_qp_cap_to_pvrdma(struct pvrdma_qp_cap *dst, const struct ib_qp_cap *src) dst 236 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_send_wr = src->max_send_wr; dst 237 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_recv_wr = src->max_recv_wr; dst 238 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_send_sge = src->max_send_sge; dst 239 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_recv_sge = src->max_recv_sge; dst 240 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->max_inline_data = src->max_inline_data; dst 243 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void pvrdma_gid_to_ib(union ib_gid *dst, const union pvrdma_gid *src) dst 246 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c memcpy(dst, src, sizeof(*src)); dst 249 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void ib_gid_to_pvrdma(union pvrdma_gid *dst, const union ib_gid *src) dst 252 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c memcpy(dst, src, sizeof(*src)); dst 255 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void pvrdma_global_route_to_ib(struct ib_global_route *dst, dst 258 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c pvrdma_gid_to_ib(&dst->dgid, &src->dgid); dst 259 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->flow_label = src->flow_label; dst 260 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->sgid_index = src->sgid_index; dst 261 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->hop_limit = src->hop_limit; dst 262 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->traffic_class = src->traffic_class; dst 265 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void ib_global_route_to_pvrdma(struct pvrdma_global_route *dst, dst 268 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c ib_gid_to_pvrdma(&dst->dgid, &src->dgid); dst 269 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->flow_label = src->flow_label; dst 270 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->sgid_index = src->sgid_index; dst 271 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->hop_limit = src->hop_limit; dst 272 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->traffic_class = src->traffic_class; dst 275 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void pvrdma_ah_attr_to_rdma(struct rdma_ah_attr *dst, dst 278 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->type = RDMA_AH_ATTR_TYPE_ROCE; dst 279 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c pvrdma_global_route_to_ib(rdma_ah_retrieve_grh(dst), &src->grh); dst 280 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_dlid(dst, src->dlid); dst 281 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_sl(dst, src->sl); dst 282 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_path_bits(dst, src->src_path_bits); dst 283 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_static_rate(dst, src->static_rate); dst 284 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_ah_flags(dst, src->ah_flags); dst 285 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c rdma_ah_set_port_num(dst, src->port_num); dst 286 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c memcpy(dst->roce.dmac, &src->dmac, ETH_ALEN); dst 289 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c void rdma_ah_attr_to_pvrdma(struct pvrdma_ah_attr *dst, dst 292 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c ib_global_route_to_pvrdma(&dst->grh, rdma_ah_read_grh(src)); dst 293 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->dlid = rdma_ah_get_dlid(src); dst 294 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->sl = rdma_ah_get_sl(src); dst 295 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->src_path_bits = rdma_ah_get_path_bits(src); dst 296 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->static_rate = rdma_ah_get_static_rate(src); dst 297 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->ah_flags = rdma_ah_get_ah_flags(src); dst 298 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c dst->port_num = rdma_ah_get_port_num(src); dst 299 drivers/infiniband/hw/vmw_pvrdma/pvrdma_misc.c memcpy(&dst->dmac, src->roce.dmac, sizeof(dst->dmac)); dst 134 drivers/infiniband/sw/rdmavt/qp.c static void cacheless_memcpy(void *dst, void *src, size_t n) dst 142 drivers/infiniband/sw/rdmavt/qp.c __copy_user_nocache(dst, (void __user *)src, n, 0); dst 103 drivers/infiniband/sw/rxe/rxe_net.c return &rt->dst; dst 154 drivers/infiniband/sw/rxe/rxe_net.c struct dst_entry *dst = NULL; dst 157 drivers/infiniband/sw/rxe/rxe_net.c dst = sk_dst_get(qp->sk->sk); dst 159 drivers/infiniband/sw/rxe/rxe_net.c if (!dst || !dst_check(dst, qp->dst_cookie)) { dst 160 drivers/infiniband/sw/rxe/rxe_net.c if (dst) dst 161 drivers/infiniband/sw/rxe/rxe_net.c dst_release(dst); dst 169 drivers/infiniband/sw/rxe/rxe_net.c dst = rxe_find_route4(ndev, saddr, daddr); dst 176 drivers/infiniband/sw/rxe/rxe_net.c dst = rxe_find_route6(ndev, saddr6, daddr6); dst 178 drivers/infiniband/sw/rxe/rxe_net.c if (dst) dst 180 drivers/infiniband/sw/rxe/rxe_net.c rt6_get_cookie((struct rt6_info *)dst); dst 184 drivers/infiniband/sw/rxe/rxe_net.c if (dst && (qp_type(qp) == IB_QPT_RC)) { dst 185 drivers/infiniband/sw/rxe/rxe_net.c dst_hold(dst); dst 186 drivers/infiniband/sw/rxe/rxe_net.c sk_dst_set(qp->sk->sk, dst); dst 189 drivers/infiniband/sw/rxe/rxe_net.c return dst; dst 289 drivers/infiniband/sw/rxe/rxe_net.c static void prepare_ipv4_hdr(struct dst_entry *dst, struct sk_buff *skb, dst 298 drivers/infiniband/sw/rxe/rxe_net.c skb_dst_set(skb, dst_clone(dst)); dst 314 drivers/infiniband/sw/rxe/rxe_net.c __ip_select_ident(dev_net(dst->dev), iph, dst 320 drivers/infiniband/sw/rxe/rxe_net.c static void prepare_ipv6_hdr(struct dst_entry *dst, struct sk_buff *skb, dst 329 drivers/infiniband/sw/rxe/rxe_net.c skb_dst_set(skb, dst_clone(dst)); dst 346 drivers/infiniband/sw/rxe/rxe_net.c struct dst_entry *dst; dst 353 drivers/infiniband/sw/rxe/rxe_net.c dst = rxe_find_route(skb->dev, qp, av); dst 354 drivers/infiniband/sw/rxe/rxe_net.c if (!dst) { dst 362 drivers/infiniband/sw/rxe/rxe_net.c prepare_ipv4_hdr(dst, skb, saddr->s_addr, daddr->s_addr, IPPROTO_UDP, dst 365 drivers/infiniband/sw/rxe/rxe_net.c dst_release(dst); dst 372 drivers/infiniband/sw/rxe/rxe_net.c struct dst_entry *dst; dst 377 drivers/infiniband/sw/rxe/rxe_net.c dst = rxe_find_route(skb->dev, qp, av); dst 378 drivers/infiniband/sw/rxe/rxe_net.c if (!dst) { dst 386 drivers/infiniband/sw/rxe/rxe_net.c prepare_ipv6_hdr(dst, skb, saddr, daddr, IPPROTO_UDP, dst 390 drivers/infiniband/sw/rxe/rxe_net.c dst_release(dst); dst 88 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c __be64 *dst; dst 111 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c for (dst = &cntrs->tx_unicast, src = &vstats.tx_grp.unicast; dst 112 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst < &cntrs->reserved[0]; dst++, src++) { dst 113 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c *dst = cpu_to_be64(*src); dst 198 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c struct __opa_vesw_info *dst = &adapter->info.vesw; dst 201 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->fabric_id = be16_to_cpu(info->fabric_id); dst 202 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->vesw_id = be16_to_cpu(info->vesw_id); dst 203 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd0, info->rsvd0, ARRAY_SIZE(info->rsvd0)); dst 204 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->def_port_mask = be16_to_cpu(info->def_port_mask); dst 205 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd1, info->rsvd1, ARRAY_SIZE(info->rsvd1)); dst 206 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->pkey = be16_to_cpu(info->pkey); dst 208 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd2, info->rsvd2, ARRAY_SIZE(info->rsvd2)); dst 209 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->u_mcast_dlid = be32_to_cpu(info->u_mcast_dlid); dst 211 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->u_ucast_dlid[i] = be32_to_cpu(info->u_ucast_dlid[i]); dst 213 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->rc = be32_to_cpu(info->rc); dst 215 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd3, info->rsvd3, ARRAY_SIZE(info->rsvd3)); dst 216 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->eth_mtu = be16_to_cpu(info->eth_mtu); dst 217 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd4, info->rsvd4, ARRAY_SIZE(info->rsvd4)); dst 279 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c struct __opa_per_veswport_info *dst = &adapter->info.vport; dst 281 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->port_num = be32_to_cpu(info->port_num); dst 282 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd0, info->rsvd0, ARRAY_SIZE(info->rsvd0)); dst 284 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->base_mac_addr, info->base_mac_addr, dst 285 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c ARRAY_SIZE(dst->base_mac_addr)); dst 286 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->config_state = info->config_state; dst 287 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd1, info->rsvd1, ARRAY_SIZE(info->rsvd1)); dst 289 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->encap_slid = be32_to_cpu(info->encap_slid); dst 290 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->pcp_to_sc_uc, info->pcp_to_sc_uc, dst 291 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c ARRAY_SIZE(dst->pcp_to_sc_uc)); dst 292 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->pcp_to_vl_uc, info->pcp_to_vl_uc, dst 293 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c ARRAY_SIZE(dst->pcp_to_vl_uc)); dst 294 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->pcp_to_sc_mc, info->pcp_to_sc_mc, dst 295 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c ARRAY_SIZE(dst->pcp_to_sc_mc)); dst 296 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->pcp_to_vl_mc, info->pcp_to_vl_mc, dst 297 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c ARRAY_SIZE(dst->pcp_to_vl_mc)); dst 298 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->non_vlan_sc_uc = info->non_vlan_sc_uc; dst 299 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->non_vlan_vl_uc = info->non_vlan_vl_uc; dst 300 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->non_vlan_sc_mc = info->non_vlan_sc_mc; dst 301 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c dst->non_vlan_vl_mc = info->non_vlan_vl_mc; dst 302 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd2, info->rsvd2, ARRAY_SIZE(info->rsvd2)); dst 303 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema_iface.c memcpy(dst->rsvd3, info->rsvd3, ARRAY_SIZE(info->rsvd3)); dst 356 drivers/infiniband/ulp/srp/ib_srp.c (struct sockaddr *)&target->rdma_cm.dst, dst 360 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.src, &target->rdma_cm.dst, ret); dst 370 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst, ret); dst 3614 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_parse_in(net, &target->rdma_cm.dst.ss, p, dst 3834 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst); dst 3948 drivers/infiniband/ulp/srp/ib_srp.c char dst[64]; dst 3951 drivers/infiniband/ulp/srp/ib_srp.c snprintf(dst, sizeof(dst), "%pIS", dst 3952 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst); dst 3954 drivers/infiniband/ulp/srp/ib_srp.c snprintf(dst, sizeof(dst), "%pI6", dst 3959 drivers/infiniband/ulp/srp/ib_srp.c target->ch_count, dst); dst 3989 drivers/infiniband/ulp/srp/ib_srp.c target->sgid.raw, &target->rdma_cm.dst); dst 254 drivers/infiniband/ulp/srp/ib_srp.h } dst; dst 14 drivers/input/input-mt.c static void copy_abs(struct input_dev *dev, unsigned int dst, unsigned int src) dst 17 drivers/input/input-mt.c dev->absinfo[dst] = dev->absinfo[src]; dst 18 drivers/input/input-mt.c dev->absinfo[dst].fuzz = 0; dst 19 drivers/input/input-mt.c dev->absbit[BIT_WORD(dst)] |= BIT_MASK(dst); dst 919 drivers/input/serio/i8042-x86ia64io.h static void i8042_pnp_id_to_string(struct pnp_id *id, char *dst, int dst_size) dst 921 drivers/input/serio/i8042-x86ia64io.h strlcpy(dst, "PNP:", dst_size); dst 924 drivers/input/serio/i8042-x86ia64io.h strlcat(dst, " ", dst_size); dst 925 drivers/input/serio/i8042-x86ia64io.h strlcat(dst, id->id, dst_size); dst 102 drivers/interconnect/core.c static struct icc_path *path_init(struct device *dev, struct icc_node *dst, dst 105 drivers/interconnect/core.c struct icc_node *node = dst; dst 128 drivers/interconnect/core.c struct icc_node *dst) dst 147 drivers/interconnect/core.c if (node == dst) { dst 188 drivers/interconnect/core.c path = path_init(dev, dst, depth); dst 496 drivers/interconnect/core.c struct icc_node *src, *dst; dst 505 drivers/interconnect/core.c dst = node_find(dst_id); dst 506 drivers/interconnect/core.c if (!dst) dst 509 drivers/interconnect/core.c path = path_find(dev, src, dst); dst 634 drivers/interconnect/core.c struct icc_node *dst; dst 643 drivers/interconnect/core.c dst = node_find(dst_id); dst 644 drivers/interconnect/core.c if (!dst) { dst 645 drivers/interconnect/core.c dst = icc_node_create_nolock(dst_id); dst 647 drivers/interconnect/core.c if (IS_ERR(dst)) { dst 648 drivers/interconnect/core.c ret = PTR_ERR(dst); dst 662 drivers/interconnect/core.c node->links[node->num_links++] = dst; dst 678 drivers/interconnect/core.c int icc_link_destroy(struct icc_node *src, struct icc_node *dst) dst 687 drivers/interconnect/core.c if (IS_ERR_OR_NULL(dst)) dst 693 drivers/interconnect/core.c if (src->links[slot] == dst) dst 339 drivers/interconnect/qcom/qcs404.c static int qcom_icc_set(struct icc_node *src, struct icc_node *dst) dst 673 drivers/interconnect/qcom/sdm845.c static int qcom_icc_set(struct icc_node *src, struct icc_node *dst) dst 683 drivers/interconnect/qcom/sdm845.c node = dst; dst 808 drivers/iommu/arm-smmu-v3.c static void queue_write(__le64 *dst, u64 *src, size_t n_dwords) dst 813 drivers/iommu/arm-smmu-v3.c *dst++ = cpu_to_le64(*src++); dst 816 drivers/iommu/arm-smmu-v3.c static void queue_read(__le64 *dst, u64 *src, size_t n_dwords) dst 821 drivers/iommu/arm-smmu-v3.c *dst++ = le64_to_cpu(*src++); dst 1495 drivers/iommu/arm-smmu-v3.c arm_smmu_write_strtab_l1_desc(__le64 *dst, struct arm_smmu_strtab_l1_desc *desc) dst 1502 drivers/iommu/arm-smmu-v3.c *dst = cpu_to_le64(val); dst 1520 drivers/iommu/arm-smmu-v3.c __le64 *dst) dst 1538 drivers/iommu/arm-smmu-v3.c u64 val = le64_to_cpu(dst[0]); dst 1596 drivers/iommu/arm-smmu-v3.c dst[0] = cpu_to_le64(val); dst 1597 drivers/iommu/arm-smmu-v3.c dst[1] = cpu_to_le64(FIELD_PREP(STRTAB_STE_1_SHCFG, dst 1599 drivers/iommu/arm-smmu-v3.c dst[2] = 0; /* Nuke the VMID */ dst 1611 drivers/iommu/arm-smmu-v3.c dst[1] = cpu_to_le64( dst 1619 drivers/iommu/arm-smmu-v3.c dst[1] |= cpu_to_le64(STRTAB_STE_1_S1STALLD); dst 1627 drivers/iommu/arm-smmu-v3.c dst[2] = cpu_to_le64( dst 1636 drivers/iommu/arm-smmu-v3.c dst[3] = cpu_to_le64(s2_cfg->vttbr & STRTAB_STE_3_S2TTB_MASK); dst 1642 drivers/iommu/arm-smmu-v3.c dst[1] |= cpu_to_le64(FIELD_PREP(STRTAB_STE_1_EATS, dst 1647 drivers/iommu/arm-smmu-v3.c WRITE_ONCE(dst[0], cpu_to_le64(val)); dst 668 drivers/isdn/hardware/mISDN/hfcpci.c u_char *src, *dst, new_f1; dst 714 drivers/isdn/hardware/mISDN/hfcpci.c dst = df->data + le16_to_cpu(df->za[df->f1 & D_FREG_MASK].z1); dst 719 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, maxlen); /* first copy */ dst 723 drivers/isdn/hardware/mISDN/hfcpci.c dst = df->data; /* start of buffer */ dst 725 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, count); dst 746 drivers/isdn/hardware/mISDN/hfcpci.c u_char new_f1, *src, *dst; dst 785 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata + (le16_to_cpu(*z1t) - B_SUB_VAL); dst 791 drivers/isdn/hardware/mISDN/hfcpci.c fcnt, maxlen, new_z1, dst); dst 794 drivers/isdn/hardware/mISDN/hfcpci.c memset(dst, bch->fill[0], maxlen); /* first copy */ dst 797 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata; /* start of buffer */ dst 798 drivers/isdn/hardware/mISDN/hfcpci.c memset(dst, bch->fill[0], count); dst 821 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata + (le16_to_cpu(*z1t) - B_SUB_VAL); dst 827 drivers/isdn/hardware/mISDN/hfcpci.c fcnt, maxlen, new_z1, dst); dst 832 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, maxlen); /* first copy */ dst 835 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata; /* start of buffer */ dst 837 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, count); dst 883 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata + (le16_to_cpu(bz->za[bz->f1].z1) - B_SUB_VAL); dst 888 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, maxlen); /* first copy */ dst 892 drivers/isdn/hardware/mISDN/hfcpci.c dst = bdata; /* start of buffer */ dst 894 drivers/isdn/hardware/mISDN/hfcpci.c memcpy(dst, src, count); dst 119 drivers/isdn/hardware/mISDN/isdnhdlc.c int *count, u8 *dst, int dsize) dst 289 drivers/isdn/hardware/mISDN/isdnhdlc.c dst[hdlc->dstpos++] = hdlc->shift_reg; dst 342 drivers/isdn/hardware/mISDN/isdnhdlc.c int *count, u8 *dst, int dsize) dst 386 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = 0xff; dst 393 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = bitrev8(hdlc->ffvalue); dst 395 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = hdlc->ffvalue; dst 583 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = bitrev8(hdlc->cbin); dst 585 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = hdlc->cbin; dst 605 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = bitrev8(hdlc->cbin); dst 607 drivers/isdn/hardware/mISDN/isdnhdlc.c *dst++ = hdlc->cbin; dst 62 drivers/isdn/hardware/mISDN/isdnhdlc.h int slen, int *count, u8 *dst, int dsize); dst 67 drivers/isdn/hardware/mISDN/isdnhdlc.h u16 slen, int *count, u8 *dst, int dsize); dst 563 drivers/isdn/mISDN/dsp_blowfish.c encrypt_block(const u32 *P, const u32 *S, u32 *dst, u32 *src) dst 588 drivers/isdn/mISDN/dsp_blowfish.c dst[0] = yr; dst 589 drivers/isdn/mISDN/dsp_blowfish.c dst[1] = yl; dst 197 drivers/lightnvm/pblk-init.c struct nvm_addrf_12 *dst) dst 208 drivers/lightnvm/pblk-init.c dst->ch_len = power_len; dst 215 drivers/lightnvm/pblk-init.c dst->lun_len = power_len; dst 217 drivers/lightnvm/pblk-init.c dst->blk_len = src->blk_len; dst 218 drivers/lightnvm/pblk-init.c dst->pg_len = src->pg_len; dst 219 drivers/lightnvm/pblk-init.c dst->pln_len = src->pln_len; dst 220 drivers/lightnvm/pblk-init.c dst->sec_len = src->sec_len; dst 222 drivers/lightnvm/pblk-init.c dst->sec_offset = 0; dst 223 drivers/lightnvm/pblk-init.c dst->pln_offset = dst->sec_len; dst 224 drivers/lightnvm/pblk-init.c dst->ch_offset = dst->pln_offset + dst->pln_len; dst 225 drivers/lightnvm/pblk-init.c dst->lun_offset = dst->ch_offset + dst->ch_len; dst 226 drivers/lightnvm/pblk-init.c dst->pg_offset = dst->lun_offset + dst->lun_len; dst 227 drivers/lightnvm/pblk-init.c dst->blk_offset = dst->pg_offset + dst->pg_len; dst 229 drivers/lightnvm/pblk-init.c dst->sec_mask = ((1ULL << dst->sec_len) - 1) << dst->sec_offset; dst 230 drivers/lightnvm/pblk-init.c dst->pln_mask = ((1ULL << dst->pln_len) - 1) << dst->pln_offset; dst 231 drivers/lightnvm/pblk-init.c dst->ch_mask = ((1ULL << dst->ch_len) - 1) << dst->ch_offset; dst 232 drivers/lightnvm/pblk-init.c dst->lun_mask = ((1ULL << dst->lun_len) - 1) << dst->lun_offset; dst 233 drivers/lightnvm/pblk-init.c dst->pg_mask = ((1ULL << dst->pg_len) - 1) << dst->pg_offset; dst 234 drivers/lightnvm/pblk-init.c dst->blk_mask = ((1ULL << dst->blk_len) - 1) << dst->blk_offset; dst 236 drivers/lightnvm/pblk-init.c return dst->blk_offset + src->blk_len; dst 578 drivers/mailbox/bcm-flexrm-mailbox.c if (!msg->spu.src || !msg->spu.dst) dst 589 drivers/mailbox/bcm-flexrm-mailbox.c for (sg = msg->spu.dst; sg; sg = sg_next(sg)) { dst 606 drivers/mailbox/bcm-flexrm-mailbox.c struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; dst 638 drivers/mailbox/bcm-flexrm-mailbox.c rc = dma_map_sg(dev, msg->spu.dst, sg_nents(msg->spu.dst), dst 651 drivers/mailbox/bcm-flexrm-mailbox.c dma_unmap_sg(dev, msg->spu.dst, sg_nents(msg->spu.dst), dst 665 drivers/mailbox/bcm-flexrm-mailbox.c struct scatterlist *src_sg = msg->spu.src, *dst_sg = msg->spu.dst; dst 1216 drivers/mailbox/bcm-pdc-mailbox.c dst_nent = sg_nents(mssg->spu.dst); dst 1218 drivers/mailbox/bcm-pdc-mailbox.c nent = dma_map_sg(dev, mssg->spu.dst, dst_nent, dst 1237 drivers/mailbox/bcm-pdc-mailbox.c rx_desc_req = pdc_desc_count(mssg->spu.dst); dst 1242 drivers/mailbox/bcm-pdc-mailbox.c err = pdc_rx_list_init(pdcs, mssg->spu.dst, mssg->ctx); dst 1243 drivers/mailbox/bcm-pdc-mailbox.c err |= pdc_rx_list_sg_add(pdcs, mssg->spu.dst); dst 164 drivers/md/bcache/request.c struct bkey *src = op->insert_keys.keys, *dst = op->insert_keys.keys; dst 170 drivers/md/bcache/request.c memmove(dst, src, bkey_bytes(src)); dst 172 drivers/md/bcache/request.c dst = bkey_next(dst); dst 176 drivers/md/bcache/request.c op->insert_keys.top = dst; dst 523 drivers/md/dm-crypt.c u8 *dst; dst 530 drivers/md/dm-crypt.c dst = kmap_atomic(sg_page(sg)); dst 531 drivers/md/dm-crypt.c r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); dst 535 drivers/md/dm-crypt.c crypto_xor(dst + sg->offset, iv, cc->iv_size); dst 537 drivers/md/dm-crypt.c kunmap_atomic(dst); dst 677 drivers/md/dm-crypt.c u8 *dst; dst 685 drivers/md/dm-crypt.c dst = kmap_atomic(sg_page(sg)); dst 686 drivers/md/dm-crypt.c r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); dst 687 drivers/md/dm-crypt.c kunmap_atomic(dst); dst 722 drivers/md/dm-crypt.c struct scatterlist src, dst; dst 734 drivers/md/dm-crypt.c sg_init_one(&dst, iv, cc->iv_size); dst 735 drivers/md/dm-crypt.c skcipher_request_set_crypt(req, &src, &dst, cc->iv_size, buf); dst 605 drivers/md/dm-integrity.c static void block_bitmap_copy(struct dm_integrity_c *ic, struct page_list *dst, struct page_list *src) dst 611 drivers/md/dm-integrity.c unsigned long *dst_data = lowmem_page_address(dst[i].page); dst 905 drivers/md/dm-integrity.c req->dst = target_sg[section]; dst 756 drivers/md/dm-log-writes.c void *src, *dst; dst 769 drivers/md/dm-log-writes.c dst = kmap_atomic(page); dst 770 drivers/md/dm-log-writes.c memcpy(dst, src + bv.bv_offset, bv.bv_len); dst 771 drivers/md/dm-log-writes.c kunmap_atomic(dst); dst 120 drivers/md/dm-zoned-reclaim.c struct dm_io_region src, dst; dst 163 drivers/md/dm-zoned-reclaim.c dst.bdev = dev->bdev; dst 164 drivers/md/dm-zoned-reclaim.c dst.sector = dmz_blk2sect(dst_zone_block + block); dst 165 drivers/md/dm-zoned-reclaim.c dst.count = src.count; dst 169 drivers/md/dm-zoned-reclaim.c dm_kcopyd_copy(zrc->kc, &src, 1, &dst, flags, dst 1162 drivers/media/cec/cec-adap.c struct cec_msg *dst = &data->msg; dst 1169 drivers/media/cec/cec-adap.c if (!abort && dst->msg[1] == CEC_MSG_INITIATE_ARC && dst 1172 drivers/media/cec/cec-adap.c (dst->reply == CEC_MSG_REPORT_ARC_INITIATED || dst 1173 drivers/media/cec/cec-adap.c dst->reply == CEC_MSG_REPORT_ARC_TERMINATED)) dst 1174 drivers/media/cec/cec-adap.c dst->reply = cmd; dst 1177 drivers/media/cec/cec-adap.c if ((abort && cmd != dst->msg[1]) || dst 1178 drivers/media/cec/cec-adap.c (!abort && cmd != dst->reply)) dst 1182 drivers/media/cec/cec-adap.c if (msg_init != cec_msg_destination(dst) && dst 1183 drivers/media/cec/cec-adap.c !cec_msg_is_broadcast(dst)) dst 1187 drivers/media/cec/cec-adap.c memcpy(dst->msg, msg->msg, msg->len); dst 1188 drivers/media/cec/cec-adap.c dst->len = msg->len; dst 1189 drivers/media/cec/cec-adap.c dst->rx_ts = msg->rx_ts; dst 1190 drivers/media/cec/cec-adap.c dst->rx_status = msg->rx_status; dst 1192 drivers/media/cec/cec-adap.c dst->rx_status |= CEC_RX_STATUS_FEATURE_ABORT; dst 1193 drivers/media/cec/cec-adap.c msg->flags = dst->flags; dst 576 drivers/media/common/siano/smscoreapi.h #define SMS_INIT_MSG_EX(ptr, type, src, dst, len) do { \ dst 579 drivers/media/common/siano/smscoreapi.h (ptr)->msg_dst_id = dst; \ dst 172 drivers/media/dvb-frontends/rtl2832_sdr.c void *dst, const u8 *src, unsigned int src_len) dst 179 drivers/media/dvb-frontends/rtl2832_sdr.c memcpy(dst, src, src_len); dst 184 drivers/media/dvb-frontends/rtl2832_sdr.c u16 *u16dst = dst; dst 22 drivers/media/i2c/cx25840/cx25840-vbi.c static int decode_vps(u8 * dst, u8 * p) dst 66 drivers/media/i2c/cx25840/cx25840-vbi.c dst[i / 2] = c; dst 451 drivers/media/i2c/ir-kbd-i2c.c static void copy_codes(u8 *dst, u8 *src, unsigned int count) dst 458 drivers/media/i2c/ir-kbd-i2c.c *dst++ = 0x70 | (c & 0xf); dst 460 drivers/media/i2c/ir-kbd-i2c.c *dst++ = c; dst 676 drivers/media/i2c/saa7115.c static int saa711x_decode_vps(u8 *dst, u8 *p) dst 718 drivers/media/i2c/saa7115.c dst[i / 2] = c; dst 442 drivers/media/i2c/tvp7002.c static int tvp7002_read(struct v4l2_subdev *sd, u8 addr, u8 *dst) dst 452 drivers/media/i2c/tvp7002.c *dst = (u8)error; dst 473 drivers/media/i2c/tvp7002.c u8 *dst, int *err) dst 476 drivers/media/i2c/tvp7002.c *err = tvp7002_read(sd, reg, dst); dst 657 drivers/media/pci/bt8xx/dst_ca.c struct dvb_device *dst_ca_attach(struct dst_state *dst, struct dvb_adapter *dvb_adapter) dst 662 drivers/media/pci/bt8xx/dst_ca.c if (dvb_register_device(dvb_adapter, &dvbdev, &dvbdev_ca, dst, dst 664 drivers/media/pci/bt8xx/dst_ca.c dst->dst_ca = dvbdev; dst 665 drivers/media/pci/bt8xx/dst_ca.c return dst->dst_ca; dst 41 drivers/media/pci/bt8xx/dst_ca.h struct dst_state *dst; dst 76 drivers/media/pci/cobalt/cobalt-alsa-pcm.c static void sample_cpy(u8 *dst, const u8 *src, u32 len, bool is_s32) dst 87 drivers/media/pci/cobalt/cobalt-alsa-pcm.c *dst++ = 0; dst 88 drivers/media/pci/cobalt/cobalt-alsa-pcm.c *dst++ = val & 0xff; dst 90 drivers/media/pci/cobalt/cobalt-alsa-pcm.c *dst++ = (val >> 8) & 0xff; dst 91 drivers/media/pci/cobalt/cobalt-alsa-pcm.c *dst++ = (val >> 16) & 0xff; dst 322 drivers/media/pci/cobalt/cobalt-alsa-pcm.c static void pb_sample_cpy(u8 *dst, const u8 *src, u32 len, bool is_s32) dst 329 drivers/media/pci/cobalt/cobalt-alsa-pcm.c u8 *out = dst + offset; dst 68 drivers/media/pci/cx18/cx18-av-vbi.c static int decode_vps(u8 *dst, u8 *p) dst 112 drivers/media/pci/cx18/cx18-av-vbi.c dst[i / 2] = c; dst 92 drivers/media/pci/cx18/cx18-firmware.c u32 __iomem *dst = (u32 __iomem *)mem; dst 107 drivers/media/pci/cx18/cx18-firmware.c cx18_raw_writel(cx, *src, dst); dst 108 drivers/media/pci/cx18/cx18-firmware.c if (cx18_raw_readl(cx, dst) != *src) { dst 114 drivers/media/pci/cx18/cx18-firmware.c dst++; dst 126 drivers/media/pci/cx18/cx18-firmware.c static int load_apu_fw_direct(const char *fn, u8 __iomem *dst, struct cx18 *cx, dst 177 drivers/media/pci/cx18/cx18-firmware.c dst + seghdr.addr + j); dst 178 drivers/media/pci/cx18/cx18-firmware.c if (cx18_raw_readl(cx, dst + seghdr.addr + j) dst 15 drivers/media/pci/cx18/cx18-io.c u8 __iomem *dst = addr; dst 20 drivers/media/pci/cx18/cx18-io.c if ((count > 0) && ((unsigned long)dst & 1)) { dst 21 drivers/media/pci/cx18/cx18-io.c cx18_writeb(cx, (u8) val, dst); dst 23 drivers/media/pci/cx18/cx18-io.c dst++; dst 25 drivers/media/pci/cx18/cx18-io.c if ((count > 1) && ((unsigned long)dst & 2)) { dst 26 drivers/media/pci/cx18/cx18-io.c cx18_writew(cx, val2, dst); dst 28 drivers/media/pci/cx18/cx18-io.c dst += 2; dst 31 drivers/media/pci/cx18/cx18-io.c cx18_writel(cx, val4, dst); dst 33 drivers/media/pci/cx18/cx18-io.c dst += 4; dst 36 drivers/media/pci/cx18/cx18-io.c cx18_writew(cx, val2, dst); dst 38 drivers/media/pci/cx18/cx18-io.c dst += 2; dst 41 drivers/media/pci/cx18/cx18-io.c cx18_writeb(cx, (u8) val, dst); dst 46 drivers/media/pci/cx18/cx18-vbi.c u8 *dst = &cx->vbi.sliced_mpeg_data[idx][0]; dst 63 drivers/media/pci/cx18/cx18-vbi.c dst[sd + 12 + line * 43] = cx18_service2vbi(sdata->id); dst 64 drivers/media/pci/cx18/cx18-vbi.c memcpy(dst + sd + 12 + line * 43 + 1, sdata->data, 42); dst 67 drivers/media/pci/cx18/cx18-vbi.c memcpy(dst, mpeg_hdr_data, sizeof(mpeg_hdr_data)); dst 72 drivers/media/pci/cx18/cx18-vbi.c memcpy(dst + sd, "ITV0", 4); dst 73 drivers/media/pci/cx18/cx18-vbi.c memmove(dst + sd + 4, dst + sd + 12, line * 43); dst 76 drivers/media/pci/cx18/cx18-vbi.c memcpy(dst + sd, "itv0", 4); dst 79 drivers/media/pci/cx18/cx18-vbi.c memcpy(dst + sd + 4, &linemask[0], 8); dst 82 drivers/media/pci/cx18/cx18-vbi.c dst[4+16] = (size + 10) >> 8; dst 83 drivers/media/pci/cx18/cx18-vbi.c dst[5+16] = (size + 10) & 0xff; dst 84 drivers/media/pci/cx18/cx18-vbi.c dst[9+16] = 0x21 | ((pts_stamp >> 29) & 0x6); dst 85 drivers/media/pci/cx18/cx18-vbi.c dst[10+16] = (pts_stamp >> 22) & 0xff; dst 86 drivers/media/pci/cx18/cx18-vbi.c dst[11+16] = 1 | ((pts_stamp >> 14) & 0xff); dst 87 drivers/media/pci/cx18/cx18-vbi.c dst[12+16] = (pts_stamp >> 7) & 0xff; dst 88 drivers/media/pci/cx18/cx18-vbi.c dst[13+16] = 1 | ((pts_stamp & 0x7f) << 1); dst 53 drivers/media/pci/ddbridge/ddbridge-io.h static inline void ddbcpyfrom(struct ddb *dev, void *dst, u32 adr, long count) dst 55 drivers/media/pci/ddbridge/ddbridge-io.h memcpy_fromio(dst, dev->regs + adr, count); dst 275 drivers/media/pci/ivtv/ivtv-driver.h __le32 dst; dst 281 drivers/media/pci/ivtv/ivtv-driver.h u32 dst; dst 48 drivers/media/pci/ivtv/ivtv-firmware.c volatile u32 __iomem *dst = (volatile u32 __iomem *)mem; dst 63 drivers/media/pci/ivtv/ivtv-firmware.c __raw_writel(*src, dst); dst 64 drivers/media/pci/ivtv/ivtv-firmware.c dst++; dst 222 drivers/media/pci/ivtv/ivtv-irq.c s->sg_pending[idx].dst = buf->dma_handle; dst 356 drivers/media/pci/ivtv/ivtv-irq.c s->sg_pending[idx].dst = offset; dst 368 drivers/media/pci/ivtv/ivtv-irq.c s->sg_pending[idx].dst = offset; dst 375 drivers/media/pci/ivtv/ivtv-irq.c s->sg_pending[idx].dst = offset; dst 383 drivers/media/pci/ivtv/ivtv-irq.c s->sg_pending[idx].dst = offset; dst 419 drivers/media/pci/ivtv/ivtv-irq.c s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst); dst 435 drivers/media/pci/ivtv/ivtv-irq.c s->sg_dma->dst = cpu_to_le32(s->sg_processing[s->sg_processed].dst); dst 70 drivers/media/pci/ivtv/ivtv-udma.c dma->SGarray[i].dst = cpu_to_le32(buffer_offset); dst 188 drivers/media/pci/ivtv/ivtv-vbi.c u8 *dst = &itv->vbi.sliced_mpeg_data[idx][0]; dst 204 drivers/media/pci/ivtv/ivtv-vbi.c dst[sd + 12 + line * 43] = dst 206 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(dst + sd + 12 + line * 43 + 1, itv->vbi.sliced_data[i].data, 42); dst 209 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(dst, mpeg_hdr_data, sizeof(mpeg_hdr_data)); dst 214 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(dst + sd, "ITV0", 4); dst 215 drivers/media/pci/ivtv/ivtv-vbi.c memmove(dst + sd + 4, dst + sd + 12, line * 43); dst 218 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(dst + sd, "itv0", 4); dst 221 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(dst + sd + 4, &linemask[0], 8); dst 224 drivers/media/pci/ivtv/ivtv-vbi.c dst[4+16] = (size + 10) >> 8; dst 225 drivers/media/pci/ivtv/ivtv-vbi.c dst[5+16] = (size + 10) & 0xff; dst 226 drivers/media/pci/ivtv/ivtv-vbi.c dst[9+16] = 0x21 | ((pts_stamp >> 29) & 0x6); dst 227 drivers/media/pci/ivtv/ivtv-vbi.c dst[10+16] = (pts_stamp >> 22) & 0xff; dst 228 drivers/media/pci/ivtv/ivtv-vbi.c dst[11+16] = 1 | ((pts_stamp >> 14) & 0xff); dst 229 drivers/media/pci/ivtv/ivtv-vbi.c dst[12+16] = (pts_stamp >> 7) & 0xff; dst 230 drivers/media/pci/ivtv/ivtv-vbi.c dst[13+16] = 1 | ((pts_stamp & 0x7f) << 1); dst 130 drivers/media/pci/ivtv/ivtv-yuv.c dma->SGarray[dma->SG_length].dst = cpu_to_le32(IVTV_DECODER_OFFSET + yuv_offset[frame]); dst 982 drivers/media/pci/ivtv/ivtv-yuv.c nf->dst_x = args->dst.left; dst 983 drivers/media/pci/ivtv/ivtv-yuv.c nf->dst_y = args->dst.top; dst 984 drivers/media/pci/ivtv/ivtv-yuv.c nf->dst_w = args->dst.width; dst 985 drivers/media/pci/ivtv/ivtv-yuv.c nf->dst_h = args->dst.height; dst 986 drivers/media/pci/ivtv/ivtv-yuv.c nf->tru_x = args->dst.left; dst 1123 drivers/media/pci/ivtv/ivtv-yuv.c dma_args.dst = yi->main_rect; dst 371 drivers/media/pci/ivtv/ivtvfb.c void *dst; dst 403 drivers/media/pci/ivtv/ivtvfb.c dst = (void __force *) (info->screen_base + p); dst 411 drivers/media/pci/ivtv/ivtvfb.c ((unsigned long)buf & 3) == ((unsigned long)dst & 3)) { dst 413 drivers/media/pci/ivtv/ivtvfb.c if ((unsigned long)dst & 3) { dst 414 drivers/media/pci/ivtv/ivtvfb.c lead = 4 - ((unsigned long)dst & 3); dst 415 drivers/media/pci/ivtv/ivtvfb.c if (copy_from_user(dst, buf, lead)) dst 418 drivers/media/pci/ivtv/ivtvfb.c dst += lead; dst 429 drivers/media/pci/ivtv/ivtvfb.c dst += dma_size; dst 432 drivers/media/pci/ivtv/ivtvfb.c if (tail && copy_from_user(dst, buf, tail)) dst 434 drivers/media/pci/ivtv/ivtvfb.c } else if (copy_from_user(dst, buf, count)) { dst 47 drivers/media/pci/ngene/ngene-core.c #define ngcpyfrom(dst, adr, count) memcpy_fromio((dst), dev->iomem + (adr), (count)) dst 61 drivers/media/pci/saa7164/saa7164-fw.c u32 dlflags, u8 __iomem *dst, u32 dstsize) dst 75 drivers/media/pci/saa7164/saa7164-fw.c __func__, src, srcsize, dlflags, dst, dstsize); dst 77 drivers/media/pci/saa7164/saa7164-fw.c if ((src == NULL) || (dst == NULL)) { dst 125 drivers/media/pci/saa7164/saa7164-fw.c memcpy_toio(dst, srcbuf + offset, dstsize); dst 143 drivers/media/pci/saa7164/saa7164-fw.c memcpy_toio(dst, srcbuf+offset, srcsize); dst 218 drivers/media/pci/solo6x10/solo6x10-g723.c unsigned long pos, void __user *dst, dst 236 drivers/media/pci/solo6x10/solo6x10-g723.c if (copy_to_user(dst, solo_pcm->g723_buf, G723_PERIOD_BYTES)) dst 238 drivers/media/pci/solo6x10/solo6x10-g723.c dst += G723_PERIOD_BYTES; dst 245 drivers/media/pci/solo6x10/solo6x10-g723.c unsigned long pos, void *dst, dst 263 drivers/media/pci/solo6x10/solo6x10-g723.c memcpy(dst, solo_pcm->g723_buf, G723_PERIOD_BYTES); dst 264 drivers/media/pci/solo6x10/solo6x10-g723.c dst += G723_PERIOD_BYTES; dst 1217 drivers/media/pci/tw5864/tw5864-video.c u8 *dst; dst 1233 drivers/media/pci/tw5864/tw5864-video.c dst = vb2_plane_vaddr(&vb->vb.vb2_buf, 0); dst 1252 drivers/media/pci/tw5864/tw5864-video.c tw5864_h264_put_stream_header(&dst, &dst_space, input->qp, dst 1256 drivers/media/pci/tw5864/tw5864-video.c tw5864_h264_put_slice_header(&dst, &dst_space, input->h264_idr_pic_id, dst 1260 drivers/media/pci/tw5864/tw5864-video.c input->buf_cur_ptr = dst; dst 1368 drivers/media/pci/tw5864/tw5864-video.c u8 *dst = input->buf_cur_ptr; dst 1411 drivers/media/pci/tw5864/tw5864-video.c dst[0] = (input->tail & tail_mask) | (vlc_first_byte & vlc_mask); dst 1413 drivers/media/pci/tw5864/tw5864-video.c dst++; dst 1427 drivers/media/pci/tw5864/tw5864-video.c *dst++ = 0x03; dst 1430 drivers/media/pci/tw5864/tw5864-video.c *dst++ = *src; dst 1434 drivers/media/pci/tw5864/tw5864-video.c dst - (u8 *)vb2_plane_vaddr(&vb->vb.vb2_buf, 0)); dst 2728 drivers/media/platform/coda/coda-common.c u32 *dst = dev->codebuf.vaddr; dst 2734 drivers/media/platform/coda/coda-common.c dst[i] = (src[i] << 16) | (src[i] >> 16); dst 2737 drivers/media/platform/coda/coda-common.c dst[i] = (src[i + 1] << 16) | (src[i + 1] >> 16); dst 2738 drivers/media/platform/coda/coda-common.c dst[i + 1] = (src[i] << 16) | (src[i] >> 16); dst 136 drivers/media/platform/coda/coda-jpeg.c u32 *dst = parabuf + desc->offset; dst 142 drivers/media/platform/coda/coda-jpeg.c dst[i + 1] = swab32(src[i]); dst 143 drivers/media/platform/coda/coda-jpeg.c dst[i] = swab32(src[i + 1]); dst 148 drivers/media/platform/coda/imx-vdoa.c void vdoa_device_run(struct vdoa_ctx *ctx, dma_addr_t dst, dma_addr_t src) dst 175 drivers/media/platform/coda/imx-vdoa.c val = dst; dst 20 drivers/media/platform/coda/imx-vdoa.h void vdoa_device_run(struct vdoa_ctx *ctx, dma_addr_t dst, dma_addr_t src); dst 41 drivers/media/platform/coda/imx-vdoa.h dma_addr_t dst, dma_addr_t src) { }; dst 240 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_cal_prescaler_ratio(struct gsc_variant *var, u32 src, u32 dst, dst 243 drivers/media/platform/exynos-gsc/gsc-core.c if ((dst > src) || (dst >= src / var->poly_sc_down_max)) { dst 248 drivers/media/platform/exynos-gsc/gsc-core.c if ((src / var->poly_sc_down_max / var->pre_sc_down_max) > dst) { dst 253 drivers/media/platform/exynos-gsc/gsc-core.c *ratio = (dst > (src / 8)) ? 2 : 4; dst 392 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_cal_prescaler_ratio(struct gsc_variant *var, u32 src, u32 dst, dst 580 drivers/media/platform/exynos4-is/fimc-capture.c struct fimc_frame *dst = &ctx->d_frame; dst 626 drivers/media/platform/exynos4-is/fimc-capture.c min_w = dst->offs_h + dst->width; dst 627 drivers/media/platform/exynos4-is/fimc-capture.c min_h = dst->offs_v + dst->height; dst 644 drivers/media/platform/exynos4-is/fimc-capture.c dst->f_width, dst->f_height); dst 33 drivers/media/platform/exynos4-is/fimc-is-param.c static void __hw_param_copy(void *dst, void *src) dst 35 drivers/media/platform/exynos4-is/fimc-is-param.c memcpy(dst, src, FIMC_IS_PARAM_MAX_SIZE); dst 40 drivers/media/platform/exynos4-is/fimc-is-param.c struct param_global_shotmode *dst, *src; dst 42 drivers/media/platform/exynos4-is/fimc-is-param.c dst = &is->is_p_region->parameter.global.shotmode; dst 44 drivers/media/platform/exynos4-is/fimc-is-param.c __hw_param_copy(dst, src); dst 49 drivers/media/platform/exynos4-is/fimc-is-param.c struct param_sensor_framerate *dst, *src; dst 51 drivers/media/platform/exynos4-is/fimc-is-param.c dst = &is->is_p_region->parameter.sensor.frame_rate; dst 53 drivers/media/platform/exynos4-is/fimc-is-param.c __hw_param_copy(dst, src); dst 549 drivers/media/platform/rcar_fdp1.c struct fdp1_field_buffer *dst; dst 994 drivers/media/platform/rcar_fdp1.c fdp1_write(fdp1, job->dst->addrs[0], FD1_WPF_ADDR_Y); dst 995 drivers/media/platform/rcar_fdp1.c fdp1_write(fdp1, job->dst->addrs[1], FD1_WPF_ADDR_C0); dst 996 drivers/media/platform/rcar_fdp1.c fdp1_write(fdp1, job->dst->addrs[2], FD1_WPF_ADDR_C1); dst 1218 drivers/media/platform/rcar_fdp1.c job->dst = &fbuf->fields[0]; dst 1221 drivers/media/platform/rcar_fdp1.c job->dst->vb->sequence = ctx->sequence; dst 1238 drivers/media/platform/rcar_fdp1.c job->dst->vb->vb2_buf.timestamp = job->active->vb->vb2_buf.timestamp; dst 1240 drivers/media/platform/rcar_fdp1.c job->dst->vb->flags = job->active->vb->flags & dst 1334 drivers/media/platform/rcar_fdp1.c v4l2_m2m_buf_done(job->dst->vb, state); dst 1335 drivers/media/platform/rcar_fdp1.c job->dst = NULL; dst 2016 drivers/media/platform/rcar_fdp1.c v4l2_m2m_buf_done(job->dst->vb, VB2_BUF_STATE_ERROR); dst 2017 drivers/media/platform/rcar_fdp1.c job->dst = NULL; dst 32 drivers/media/platform/rockchip/rga/rga-hw.c static unsigned int rga_get_scaling(unsigned int src, unsigned int dst) dst 42 drivers/media/platform/rockchip/rga/rga-hw.c return (src > dst) ? ((dst << 16) / src) : ((src << 16) / dst); dst 38 drivers/media/platform/rockchip/rga/rga.c struct vb2_v4l2_buffer *src, *dst; dst 46 drivers/media/platform/rockchip/rga/rga.c dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx); dst 49 drivers/media/platform/rockchip/rga/rga.c rga_buf_map(&dst->vb2_buf); dst 66 drivers/media/platform/rockchip/rga/rga.c struct vb2_v4l2_buffer *src, *dst; dst 74 drivers/media/platform/rockchip/rga/rga.c dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); dst 77 drivers/media/platform/rockchip/rga/rga.c WARN_ON(!dst); dst 79 drivers/media/platform/rockchip/rga/rga.c dst->timecode = src->timecode; dst 80 drivers/media/platform/rockchip/rga/rga.c dst->vb2_buf.timestamp = src->vb2_buf.timestamp; dst 81 drivers/media/platform/rockchip/rga/rga.c dst->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK; dst 82 drivers/media/platform/rockchip/rga/rga.c dst->flags |= src->flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK; dst 85 drivers/media/platform/rockchip/rga/rga.c v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE); dst 84 drivers/media/platform/s5p-g2d/g2d-hw.c struct g2d_frame *dst) dst 89 drivers/media/platform/s5p-g2d/g2d-hw.c w((src->c_width << 16) / dst->c_width, SRC_XSCALE_REG); dst 90 drivers/media/platform/s5p-g2d/g2d-hw.c w((src->c_height << 16) / dst->c_height, SRC_YSCALE_REG); dst 502 drivers/media/platform/s5p-g2d/g2d.c struct vb2_v4l2_buffer *src, *dst; dst 509 drivers/media/platform/s5p-g2d/g2d.c dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx); dst 520 drivers/media/platform/s5p-g2d/g2d.c g2d_set_dst_addr(dev, vb2_dma_contig_plane_dma_addr(&dst->vb2_buf, 0)); dst 543 drivers/media/platform/s5p-g2d/g2d.c struct vb2_v4l2_buffer *src, *dst; dst 551 drivers/media/platform/s5p-g2d/g2d.c dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); dst 554 drivers/media/platform/s5p-g2d/g2d.c BUG_ON(dst == NULL); dst 556 drivers/media/platform/s5p-g2d/g2d.c dst->timecode = src->timecode; dst 557 drivers/media/platform/s5p-g2d/g2d.c dst->vb2_buf.timestamp = src->vb2_buf.timestamp; dst 558 drivers/media/platform/s5p-g2d/g2d.c dst->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK; dst 559 drivers/media/platform/s5p-g2d/g2d.c dst->flags |= dst 563 drivers/media/platform/s5p-g2d/g2d.c v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE); dst 83 drivers/media/platform/s5p-g2d/g2d.h struct g2d_frame *src, struct g2d_frame *dst); dst 1066 drivers/media/platform/sh_veu.c struct vb2_v4l2_buffer *dst; dst 1082 drivers/media/platform/sh_veu.c dst = v4l2_m2m_dst_buf_remove(veu->m2m_ctx); dst 1084 drivers/media/platform/sh_veu.c if (!src || !dst) dst 1087 drivers/media/platform/sh_veu.c dst->vb2_buf.timestamp = src->vb2_buf.timestamp; dst 1088 drivers/media/platform/sh_veu.c dst->flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK; dst 1089 drivers/media/platform/sh_veu.c dst->flags |= dst 1091 drivers/media/platform/sh_veu.c dst->timecode = src->timecode; dst 1095 drivers/media/platform/sh_veu.c v4l2_m2m_buf_done(dst, VB2_BUF_STATE_DONE); dst 444 drivers/media/platform/sti/bdisp/bdisp-debug.c struct bdisp_frame src, dst; dst 452 drivers/media/platform/sti/bdisp/bdisp-debug.c dst = request->dst; dst 457 drivers/media/platform/sti/bdisp/bdisp-debug.c bdisp_fmt_to_str(src), bdisp_fmt_to_str(dst)); dst 461 drivers/media/platform/sti/bdisp/bdisp-debug.c dst.crop.width, dst.crop.height, dst 462 drivers/media/platform/sti/bdisp/bdisp-debug.c dst.crop.left, dst.crop.top); dst 464 drivers/media/platform/sti/bdisp/bdisp-debug.c src.width, src.height, dst.width, dst.height); dst 634 drivers/media/platform/sti/bdisp/bdisp-hw.c dst_w = ctx->dst.crop.width; dst 635 drivers/media/platform/sti/bdisp/bdisp-hw.c dst_h = ctx->dst.crop.height; dst 662 drivers/media/platform/sti/bdisp/bdisp-hw.c struct bdisp_frame *dst = &ctx->dst; dst 681 drivers/media/platform/sti/bdisp/bdisp-hw.c c->dst_nbp = dst->fmt->nb_planes; dst 682 drivers/media/platform/sti/bdisp/bdisp-hw.c c->dst_yuv = (dst->fmt->pixelformat == V4L2_PIX_FMT_NV12) || dst 683 drivers/media/platform/sti/bdisp/bdisp-hw.c (dst->fmt->pixelformat == V4L2_PIX_FMT_YUV420); dst 763 drivers/media/platform/sti/bdisp/bdisp-hw.c struct bdisp_frame *dst = &ctx->dst; dst 766 drivers/media/platform/sti/bdisp/bdisp-hw.c struct v4l2_rect dst_rect = dst->crop; dst 768 drivers/media/platform/sti/bdisp/bdisp-hw.c s32 dst_width = dst->crop.width; dst 787 drivers/media/platform/sti/bdisp/bdisp-hw.c dst_fmt = dst->fmt->pixelformat; dst 828 drivers/media/platform/sti/bdisp/bdisp-hw.c node->tba = (t_plan == BDISP_CBCR) ? dst->paddr[1] : dst->paddr[0]; dst 830 drivers/media/platform/sti/bdisp/bdisp-hw.c node->tty = dst->bytesperline; dst 1056 drivers/media/platform/sti/bdisp/bdisp-hw.c request->dst = ctx->dst; dst 171 drivers/media/platform/sti/bdisp/bdisp-v4l2.c return &ctx->dst; dst 283 drivers/media/platform/sti/bdisp/bdisp-v4l2.c struct bdisp_frame *src, *dst; dst 288 drivers/media/platform/sti/bdisp/bdisp-v4l2.c dst = &ctx->dst; dst 296 drivers/media/platform/sti/bdisp/bdisp-v4l2.c ret = bdisp_get_addr(ctx, &dst_vb->vb2_buf, dst, dst->paddr); dst 613 drivers/media/platform/sti/bdisp/bdisp-v4l2.c ctx->dst = bdisp_dflt_fmt; dst 813 drivers/media/platform/sti/bdisp/bdisp-v4l2.c &ctx->src : &ctx->dst; dst 91 drivers/media/platform/sti/bdisp/bdisp.h struct bdisp_frame dst; dst 115 drivers/media/platform/sti/bdisp/bdisp.h struct bdisp_frame dst; dst 1058 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c const struct firmware *fw, u8 __iomem *dst, int seg_num) dst 1068 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c dst, phdr->p_memsz); dst 1070 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c memcpy((void __force *)dst, (void *)fw->data + phdr->p_offset, dst 1073 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c memset((void __force *)dst + phdr->p_filesz, 0, dst 1081 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c u8 __iomem *dst; dst 1114 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c dst = (u8 __iomem *) fei->io + DMA_MEMDMA_IMEM; dst 1119 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c dst += (phdr->p_paddr & 0xFFFFF) * sizeof(unsigned int); dst 1120 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c load_imem_segment(fei, phdr, fw, dst, i); dst 1122 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c dst = (u8 __iomem *) fei->io + DMA_MEMDMA_DMEM; dst 1127 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c dst += (phdr->p_paddr & 0xFFFFF) * sizeof(unsigned int); dst 1128 drivers/media/platform/sti/c8sectpfe/c8sectpfe-core.c load_dmem_segment(fei, phdr, fw, dst, i); dst 576 drivers/media/platform/vicodec/codec-fwht.c static void fill_encoder_block(const u8 *input, s16 *dst, dst 583 drivers/media/platform/vicodec/codec-fwht.c *dst++ = *input; dst 642 drivers/media/platform/vicodec/codec-fwht.c static void fill_decoder_block(u8 *dst, const s16 *input, int stride, dst 648 drivers/media/platform/vicodec/codec-fwht.c for (j = 0; j < 8; j++, input++, dst += dst_step) { dst 650 drivers/media/platform/vicodec/codec-fwht.c *dst = 0; dst 652 drivers/media/platform/vicodec/codec-fwht.c *dst = 255; dst 654 drivers/media/platform/vicodec/codec-fwht.c *dst = *input; dst 656 drivers/media/platform/vicodec/codec-fwht.c dst += stride - (8 * dst_step); dst 836 drivers/media/platform/vicodec/codec-fwht.c unsigned int ref_step, u8 *dst, dst 855 drivers/media/platform/vicodec/codec-fwht.c memcpy(dst, *rlco, width); dst 856 drivers/media/platform/vicodec/codec-fwht.c dst += dst_stride; dst 872 drivers/media/platform/vicodec/codec-fwht.c u8 *dstp = dst + j * 8 * dst_stride + i * 8 * dst_step; dst 913 drivers/media/platform/vicodec/codec-fwht.c struct fwht_raw_frame *dst, unsigned int dst_stride, dst 921 drivers/media/platform/vicodec/codec-fwht.c ref->luma_alpha_step, dst->luma, dst_stride, dst 922 drivers/media/platform/vicodec/codec-fwht.c dst->luma_alpha_step, dst 937 drivers/media/platform/vicodec/codec-fwht.c ref->chroma_step, dst->cb, dst_chroma_stride, dst 938 drivers/media/platform/vicodec/codec-fwht.c dst->chroma_step, dst 943 drivers/media/platform/vicodec/codec-fwht.c ref->chroma_step, dst->cr, dst_chroma_stride, dst 944 drivers/media/platform/vicodec/codec-fwht.c dst->chroma_step, dst 952 drivers/media/platform/vicodec/codec-fwht.c ref->luma_alpha_step, dst->alpha, dst_stride, dst 953 drivers/media/platform/vicodec/codec-fwht.c dst->luma_alpha_step, dst 148 drivers/media/platform/vicodec/codec-fwht.h struct fwht_raw_frame *dst, unsigned int dst_stride, dst 270 drivers/media/platform/vim2m.c u8 *src, u8 *dst, bool reverse) dst 275 drivers/media/platform/vim2m.c memcpy(dst, src, q_data_out->width * depth); dst 278 drivers/media/platform/vim2m.c memcpy(dst, src, depth); dst 279 drivers/media/platform/vim2m.c memcpy(dst + depth, src - depth, depth); dst 281 drivers/media/platform/vim2m.c dst += depth << 1; dst 289 drivers/media/platform/vim2m.c u8 *src[2], u8 **dst, int ypos, bool reverse) dst 348 drivers/media/platform/vim2m.c __le16 *dst_pix = (__le16 *)*dst; dst 355 drivers/media/platform/vim2m.c *dst += 2; dst 361 drivers/media/platform/vim2m.c __be16 *dst_pix = (__be16 *)*dst; dst 368 drivers/media/platform/vim2m.c *dst += 2; dst 373 drivers/media/platform/vim2m.c *(*dst)++ = *r++; dst 374 drivers/media/platform/vim2m.c *(*dst)++ = *g++; dst 375 drivers/media/platform/vim2m.c *(*dst)++ = *b++; dst 380 drivers/media/platform/vim2m.c *(*dst)++ = *b++; dst 381 drivers/media/platform/vim2m.c *(*dst)++ = *g++; dst 382 drivers/media/platform/vim2m.c *(*dst)++ = *r++; dst 399 drivers/media/platform/vim2m.c *(*dst)++ = y; dst 400 drivers/media/platform/vim2m.c *(*dst)++ = u; dst 402 drivers/media/platform/vim2m.c *(*dst)++ = y1; dst 403 drivers/media/platform/vim2m.c *(*dst)++ = v; dst 408 drivers/media/platform/vim2m.c *(*dst)++ = *b; dst 409 drivers/media/platform/vim2m.c *(*dst)++ = *++g; dst 411 drivers/media/platform/vim2m.c *(*dst)++ = *g; dst 412 drivers/media/platform/vim2m.c *(*dst)++ = *++r; dst 417 drivers/media/platform/vim2m.c *(*dst)++ = *g; dst 418 drivers/media/platform/vim2m.c *(*dst)++ = *++b; dst 420 drivers/media/platform/vim2m.c *(*dst)++ = *r; dst 421 drivers/media/platform/vim2m.c *(*dst)++ = *++g; dst 426 drivers/media/platform/vim2m.c *(*dst)++ = *g; dst 427 drivers/media/platform/vim2m.c *(*dst)++ = *++r; dst 429 drivers/media/platform/vim2m.c *(*dst)++ = *b; dst 430 drivers/media/platform/vim2m.c *(*dst)++ = *++g; dst 435 drivers/media/platform/vim2m.c *(*dst)++ = *r; dst 436 drivers/media/platform/vim2m.c *(*dst)++ = *++g; dst 438 drivers/media/platform/vim2m.c *(*dst)++ = *g; dst 439 drivers/media/platform/vim2m.c *(*dst)++ = *++b; dst 111 drivers/media/platform/vivid/vivid-kthread-cap.c static void scale_line(const u8 *src, u8 *dst, unsigned srcw, unsigned dstw, unsigned twopixsize) dst 128 drivers/media/platform/vivid/vivid-kthread-cap.c for (x = 0; x < dstw; x++, dst += twopixsize) { dst 129 drivers/media/platform/vivid/vivid-kthread-cap.c memcpy(dst, src + src_x * twopixsize, twopixsize); dst 836 drivers/media/platform/vsp1/vsp1_drm.c cfg->dst.left, cfg->dst.top, cfg->dst.width, cfg->dst.height, dst 856 drivers/media/platform/vsp1/vsp1_drm.c vsp1->drm->inputs[rpf_index].compose = cfg->dst; dst 232 drivers/media/usb/airspy/airspy.c void *dst, void *src, unsigned int src_len) dst 237 drivers/media/usb/airspy/airspy.c memcpy(dst, src, src_len); dst 21 drivers/media/usb/as102/as102_fw.c static unsigned char atohx(unsigned char *dst, char *src) dst 33 drivers/media/usb/as102/as102_fw.c *dst = value = ((msb & 0xF) << 4) | (lsb & 0xF); dst 45 drivers/media/usb/as102/as102_fw.c unsigned char *src, dst; dst 54 drivers/media/usb/as102/as102_fw.c atohx(&dst, src); dst 58 drivers/media/usb/as102/as102_fw.c *dataLength = dst; dst 61 drivers/media/usb/as102/as102_fw.c addr[2] = dst; dst 64 drivers/media/usb/as102/as102_fw.c addr[3] = dst; dst 68 drivers/media/usb/as102/as102_fw.c if (dst == 0x04) dst 76 drivers/media/usb/as102/as102_fw.c addr[(count - 4)] = dst; dst 78 drivers/media/usb/as102/as102_fw.c data[(count - 4)] = dst; dst 81 drivers/media/usb/as102/as102_fw.c data[(count - 4)] = dst; dst 40 drivers/media/usb/dvb-usb/vp702x-fe.c struct vp702x_device_state *dst = st->d->priv; dst 44 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_lock(&dst->buf_mutex); dst 45 drivers/media/usb/dvb-usb/vp702x-fe.c buf = dst->buf; dst 56 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_unlock(&dst->buf_mutex); dst 139 drivers/media/usb/dvb-usb/vp702x-fe.c struct vp702x_device_state *dst = st->d->priv; dst 146 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_lock(&dst->buf_mutex); dst 148 drivers/media/usb/dvb-usb/vp702x-fe.c cmd = dst->buf; dst 192 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_unlock(&dst->buf_mutex); dst 216 drivers/media/usb/dvb-usb/vp702x-fe.c struct vp702x_device_state *dst = st->d->priv; dst 223 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_lock(&dst->buf_mutex); dst 225 drivers/media/usb/dvb-usb/vp702x-fe.c cmd = dst->buf; dst 238 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_unlock(&dst->buf_mutex); dst 254 drivers/media/usb/dvb-usb/vp702x-fe.c struct vp702x_device_state *dst = st->d->priv; dst 268 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_lock(&dst->buf_mutex); dst 270 drivers/media/usb/dvb-usb/vp702x-fe.c buf = dst->buf; dst 279 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_unlock(&dst->buf_mutex); dst 288 drivers/media/usb/dvb-usb/vp702x-fe.c struct vp702x_device_state *dst = st->d->priv; dst 301 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_lock(&dst->buf_mutex); dst 303 drivers/media/usb/dvb-usb/vp702x-fe.c buf = dst->buf; dst 312 drivers/media/usb/dvb-usb/vp702x-fe.c mutex_unlock(&dst->buf_mutex); dst 190 drivers/media/usb/dvb-usb/vp702x.c struct vp702x_device_state *dst = adap->dev->priv; dst 204 drivers/media/usb/dvb-usb/vp702x.c mutex_lock(&dst->buf_mutex); dst 206 drivers/media/usb/dvb-usb/vp702x.c buf = dst->buf; dst 211 drivers/media/usb/dvb-usb/vp702x.c mutex_unlock(&dst->buf_mutex); dst 220 drivers/media/usb/dvb-usb/vp702x.c struct vp702x_device_state *dst = adap->dev->priv; dst 233 drivers/media/usb/dvb-usb/vp702x.c mutex_lock(&dst->buf_mutex); dst 234 drivers/media/usb/dvb-usb/vp702x.c b = dst->buf; dst 239 drivers/media/usb/dvb-usb/vp702x.c mutex_unlock(&dst->buf_mutex); dst 454 drivers/media/usb/hackrf/hackrf.c static void hackrf_copy_stream(struct hackrf_dev *dev, void *dst, void *src, dst 457 drivers/media/usb/hackrf/hackrf.c memcpy(dst, src, src_len); dst 242 drivers/media/usb/msi2500/msi2500.c static int msi2500_convert_stream(struct msi2500_dev *dev, u8 *dst, u8 *src, dst 274 drivers/media/usb/msi2500/msi2500.c u8 *u8dst = (u8 *)dst; dst 280 drivers/media/usb/msi2500/msi2500.c dst += 1008; dst 288 drivers/media/usb/msi2500/msi2500.c u16 *u16dst = (u16 *)dst; dst 302 drivers/media/usb/msi2500/msi2500.c dst += 1008; dst 310 drivers/media/usb/msi2500/msi2500.c memcpy(dst, src, 984); dst 312 drivers/media/usb/msi2500/msi2500.c dst += 984; dst 317 drivers/media/usb/msi2500/msi2500.c memcpy(dst, src, 1008); dst 319 drivers/media/usb/msi2500/msi2500.c dst += 1008; dst 324 drivers/media/usb/msi2500/msi2500.c memcpy(dst, src, 1008); dst 326 drivers/media/usb/msi2500/msi2500.c dst += 1008; dst 331 drivers/media/usb/msi2500/msi2500.c memcpy(dst, src, 1008); dst 333 drivers/media/usb/msi2500/msi2500.c dst += 1008; dst 1930 drivers/media/usb/pvrusb2/pvrusb2-hdw.c unsigned short *dst, const unsigned char *src, dst 1936 drivers/media/usb/pvrusb2/pvrusb2-hdw.c dst[cnt] = src[cnt]; dst 1939 drivers/media/usb/pvrusb2/pvrusb2-hdw.c dst[cnt] = I2C_CLIENT_END; dst 349 drivers/media/usb/pwc/pwc-dec23.c static void copy_image_block_Y(const int *src, unsigned char *dst, unsigned int bytes_per_line, unsigned int scalebits) dst 354 drivers/media/usb/pwc/pwc-dec23.c unsigned char *d = dst; dst 361 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line; dst 367 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line*2; dst 373 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line*3; dst 381 drivers/media/usb/pwc/pwc-dec23.c unsigned char *d = dst; dst 385 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line; dst 389 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line*2; dst 393 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line*3; dst 403 drivers/media/usb/pwc/pwc-dec23.c static void copy_image_block_CrCb(const int *src, unsigned char *dst, unsigned int bytes_per_line, unsigned int scalebits) dst 409 drivers/media/usb/pwc/pwc-dec23.c unsigned char *d = dst; dst 420 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line; dst 433 drivers/media/usb/pwc/pwc-dec23.c unsigned char *d = dst; dst 440 drivers/media/usb/pwc/pwc-dec23.c d = dst + bytes_per_line; dst 647 drivers/media/usb/pwc/pwc-dec23.c void *dst) dst 664 drivers/media/usb/pwc/pwc-dec23.c pout_planar_y = dst; dst 665 drivers/media/usb/pwc/pwc-dec23.c pout_planar_u = dst + plane_size; dst 666 drivers/media/usb/pwc/pwc-dec23.c pout_planar_v = dst + plane_size + plane_size / 4; dst 48 drivers/media/usb/pwc/pwc-dec23.h void *dst); dst 105 drivers/media/usb/stk1160/stk1160-video.c u8 *dst = buf->mem; dst 128 drivers/media/usb/stk1160/stk1160-video.c dst += bytesperline; dst 131 drivers/media/usb/stk1160/stk1160-video.c dst += linesdone * bytesperline * 2 + lineoff; dst 158 drivers/media/usb/stk1160/stk1160-video.c if ((unsigned long)dst + lencopy > dst 164 drivers/media/usb/stk1160/stk1160-video.c memcpy(dst, src, lencopy); dst 173 drivers/media/usb/stk1160/stk1160-video.c dst += lencopy + bytesperline; dst 200 drivers/media/usb/stk1160/stk1160-video.c if ((unsigned long)dst + lencopy > dst 206 drivers/media/usb/stk1160/stk1160-video.c memcpy(dst, src, lencopy); dst 381 drivers/media/usb/usbtv/usbtv-video.c u32 *dst = &frame[part_index * USBTV_CHUNK/2]; dst 383 drivers/media/usb/usbtv/usbtv-video.c memcpy(dst, src, USBTV_CHUNK/2 * sizeof(*src)); dst 1115 drivers/media/usb/uvc/uvc_video.c memcpy(op->dst, op->src, op->len); dst 1144 drivers/media/usb/uvc/uvc_video.c op->dst = buf->mem + buf->bytesused; dst 507 drivers/media/usb/uvc/uvcvideo.h void *dst; dst 201 drivers/memory/ti-emif-pm.c int ti_emif_copy_pm_function_table(struct gen_pool *sram_pool, void *dst) dst 208 drivers/memory/ti-emif-pm.c copy_addr = sram_exec_copy(sram_pool, dst, dst 62 drivers/mfd/max14577.c unsigned int min_ua, unsigned int max_ua, u8 *dst) dst 77 drivers/mfd/max14577.c *dst = 0x0; dst 92 drivers/mfd/max14577.c *dst = 0x1 << CHGCTRL4_MBCICHWRCL_SHIFT; dst 94 drivers/mfd/max14577.c *dst |= current_bits << CHGCTRL4_MBCICHWRCH_SHIFT; dst 827 drivers/misc/fastrpc.c void *dst = (void *)(uintptr_t)rpra[i].pv; dst 831 drivers/misc/fastrpc.c if (copy_from_user(dst, (void __user *)src, dst 837 drivers/misc/fastrpc.c memcpy(dst, src, len); dst 868 drivers/misc/fastrpc.c void *dst = (void *)(uintptr_t)ctx->args[i].ptr; dst 872 drivers/misc/fastrpc.c if (copy_to_user((void __user *)dst, src, len)) dst 875 drivers/misc/fastrpc.c memcpy(dst, src, len); dst 23 drivers/misc/habanalabs/firmware_if.c void __iomem *dst) dst 48 drivers/misc/habanalabs/firmware_if.c memcpy_toio(dst, fw_data, fw_size); dst 2187 drivers/misc/habanalabs/goya/goya.c void __iomem *dst; dst 2190 drivers/misc/habanalabs/goya/goya.c dst = hdev->pcie_bar[SRAM_CFG_BAR_ID] + UBOOT_FW_OFFSET; dst 2192 drivers/misc/habanalabs/goya/goya.c return hl_fw_push_fw_to_device(hdev, fw_name, dst); dst 2206 drivers/misc/habanalabs/goya/goya.c void __iomem *dst; dst 2209 drivers/misc/habanalabs/goya/goya.c dst = hdev->pcie_bar[DDR_BAR_ID] + LINUX_FW_OFFSET; dst 2211 drivers/misc/habanalabs/goya/goya.c return hl_fw_push_fw_to_device(hdev, fw_name, dst); dst 1533 drivers/misc/habanalabs/habanalabs.h void __iomem *dst); dst 46 drivers/misc/lkdtm/perms.c static noinline void execute_location(void *dst, bool write) dst 48 drivers/misc/lkdtm/perms.c void (*func)(void) = dst; dst 54 drivers/misc/lkdtm/perms.c memcpy(dst, do_nothing, EXEC_SIZE); dst 55 drivers/misc/lkdtm/perms.c flush_icache_range((unsigned long)dst, dst 56 drivers/misc/lkdtm/perms.c (unsigned long)dst + EXEC_SIZE); dst 62 drivers/misc/lkdtm/perms.c static void execute_user_location(void *dst) dst 67 drivers/misc/lkdtm/perms.c void (*func)(void) = dst; dst 72 drivers/misc/lkdtm/perms.c copied = access_process_vm(current, (unsigned long)dst, do_nothing, dst 139 drivers/misc/mic/scif/scif_api.c msg.dst = ep->peer; dst 275 drivers/misc/mic/scif/scif_api.c msg.dst.node = conreq->msg.src.node; dst 276 drivers/misc/mic/scif/scif_api.c msg.dst.port = conreq->msg.src.port; dst 493 drivers/misc/mic/scif/scif_api.c msg.dst = ep->conn_port; dst 590 drivers/misc/mic/scif/scif_api.c int __scif_connect(scif_epd_t epd, struct scif_port_id *dst, bool non_block) dst 600 drivers/misc/mic/scif/scif_api.c if (!scif_dev || dst->node > scif_info.maxid) dst 605 drivers/misc/mic/scif/scif_api.c remote_dev = &scif_dev[dst->node]; dst 667 drivers/misc/mic/scif/scif_api.c ep->conn_port = *dst; dst 683 drivers/misc/mic/scif/scif_api.c ep->remote_dev = &scif_dev[dst->node]; dst 710 drivers/misc/mic/scif/scif_api.c int scif_connect(scif_epd_t epd, struct scif_port_id *dst) dst 712 drivers/misc/mic/scif/scif_api.c return __scif_connect(epd, dst, false); dst 905 drivers/misc/mic/scif/scif_api.c msg.dst.node = conreq->msg.src.node; dst 906 drivers/misc/mic/scif/scif_api.c msg.dst.port = conreq->msg.src.port; dst 692 drivers/misc/mic/scif/scif_dma.c scif_ordered_memcpy_toio(char *dst, const char *src, size_t count) dst 697 drivers/misc/mic/scif/scif_dma.c memcpy_toio((void __iomem __force *)dst, src, --count); dst 700 drivers/misc/mic/scif/scif_dma.c *(dst + count) = *(src + count); dst 703 drivers/misc/mic/scif/scif_dma.c static inline void scif_unaligned_cpy_toio(char *dst, const char *src, dst 707 drivers/misc/mic/scif/scif_dma.c scif_ordered_memcpy_toio(dst, src, count); dst 709 drivers/misc/mic/scif/scif_dma.c memcpy_toio((void __iomem __force *)dst, src, count); dst 713 drivers/misc/mic/scif/scif_dma.c void scif_ordered_memcpy_fromio(char *dst, const char *src, size_t count) dst 718 drivers/misc/mic/scif/scif_dma.c memcpy_fromio(dst, (void __iomem __force *)src, --count); dst 721 drivers/misc/mic/scif/scif_dma.c *(dst + count) = *(src + count); dst 724 drivers/misc/mic/scif/scif_dma.c static inline void scif_unaligned_cpy_fromio(char *dst, const char *src, dst 728 drivers/misc/mic/scif/scif_dma.c scif_ordered_memcpy_fromio(dst, src, count); dst 730 drivers/misc/mic/scif/scif_dma.c memcpy_fromio(dst, (void __iomem __force *)src, count); dst 129 drivers/misc/mic/scif/scif_epd.c ep = scif_find_listen_ep(msg->dst.port); dst 193 drivers/misc/mic/scif/scif_epd.h int __scif_connect(scif_epd_t epd, struct scif_port_id *dst, bool non_block); dst 196 drivers/misc/mic/scif/scif_fence.c static int _scif_prog_signal(scif_epd_t epd, dma_addr_t dst, u64 val) dst 229 drivers/misc/mic/scif/scif_fence.c tx = ddev->device_prep_dma_imm_data(chan, dst, val, 0); dst 243 drivers/misc/mic/scif/scif_fence.c tx = ddev->device_prep_dma_memcpy(chan, dst, src, sizeof(val), dst 88 drivers/misc/mic/scif/scif_nm.c msg.dst.node = SCIF_MGMT_NODE; dst 96 drivers/misc/mic/scif/scif_nm.c msg.dst.node = dev->node; dst 159 drivers/misc/mic/scif/scif_nm.c notif_msg.dst.node = node; dst 220 drivers/misc/mic/scif/scif_nm.c msg.dst.node = SCIF_MGMT_NODE; dst 298 drivers/misc/mic/scif/scif_nodeqp.c msg.dst.node = scifdev->node; dst 312 drivers/misc/mic/scif/scif_nodeqp.c msg.dst.node = scifdev->node; dst 451 drivers/misc/mic/scif/scif_nodeqp.c static void scif_node_connect(struct scif_dev *scifdev, int dst) dst 463 drivers/misc/mic/scif/scif_nodeqp.c if (dst < 1 || dst > scif_info.maxid) dst 466 drivers/misc/mic/scif/scif_nodeqp.c dev_i = &scif_dev[dst]; dst 499 drivers/misc/mic/scif/scif_nodeqp.c msg.dst.node = dev_i->node; dst 516 drivers/misc/mic/scif/scif_nodeqp.c msg.dst.node = dev_j->node; dst 605 drivers/misc/mic/scif/scif_nodeqp.c msg->dst.node, msg->dst.port, msg->payload[0], msg->payload[1], dst 777 drivers/misc/mic/scif/scif_nodeqp.c scifdev->node, msg->dst.node, msg->src.node); dst 824 drivers/misc/mic/scif/scif_nodeqp.c msg->dst.node = msg->src.node; dst 841 drivers/misc/mic/scif/scif_nodeqp.c msg->dst.node = msg->src.node; dst 890 drivers/misc/mic/scif/scif_nodeqp.c struct scif_dev *dst_dev = &scif_dev[msg->dst.node]; dst 894 drivers/misc/mic/scif/scif_nodeqp.c scifdev->node, msg->src.node, msg->dst.node); dst 941 drivers/misc/mic/scif/scif_nodeqp.c struct scif_dev *dst_dev = &scif_dev[msg->dst.node]; dst 990 drivers/misc/mic/scif/scif_nodeqp.c swap(msg->dst.node, msg->src.node); dst 110 drivers/misc/mic/scif/scif_nodeqp.h struct scif_port_id dst; dst 469 drivers/misc/mic/vop/vop_vringh.c static int vop_sync_dma(struct vop_vdev *vdev, dma_addr_t dst, dma_addr_t src, dst 483 drivers/misc/mic/vop/vop_vringh.c tx = ddev->device_prep_dma_memcpy(vop_ch, dst, src, len, dst 152 drivers/misc/sgi-xp/xpnet.c void *dst; dst 208 drivers/misc/sgi-xp/xpnet.c dst = (void *)((u64)skb->data & ~(L1_CACHE_BYTES - 1)); dst 210 drivers/misc/sgi-xp/xpnet.c "xp_remote_memcpy(0x%p, 0x%p, %hu)\n", dst, dst 213 drivers/misc/sgi-xp/xpnet.c ret = xp_remote_memcpy(xp_pa(dst), msg->buf_pa, msg->size); dst 221 drivers/misc/sgi-xp/xpnet.c "returned error=0x%x\n", dst, dst 81 drivers/misc/sram-exec.c void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src, dst 99 drivers/misc/sram-exec.c if (!addr_in_gen_pool(pool, (unsigned long)dst, size)) dst 110 drivers/misc/sram-exec.c dst_cpy = fncpy(dst, src, size); dst 250 drivers/misc/vmw_vmci/vmci_context.c ev.msg.hdr.dst = vmci_handle_arr_get_entry(subscriber_array, i); dst 262 drivers/misc/vmw_vmci/vmci_context.c ev.msg.hdr.dst.context); dst 166 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->dst.context == VMCI_HYPERVISOR_CONTEXT_ID) dst 186 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->dst.context == VMCI_HOST_CONTEXT_ID) { dst 192 drivers/misc/vmw_vmci/vmci_datagram.c dg->dst.resource == VMCI_EVENT_HANDLER) { dst 196 drivers/misc/vmw_vmci/vmci_datagram.c resource = vmci_resource_by_handle(dg->dst, dst 200 drivers/misc/vmw_vmci/vmci_datagram.c dg->dst.context, dg->dst.resource); dst 253 drivers/misc/vmw_vmci/vmci_datagram.c if (context_id != dg->dst.context) { dst 256 drivers/misc/vmw_vmci/vmci_datagram.c (dg->dst.context))) { dst 265 drivers/misc/vmw_vmci/vmci_datagram.c context_id, dg->dst.context); dst 275 drivers/misc/vmw_vmci/vmci_datagram.c retval = vmci_ctx_enqueue_datagram(dg->dst.context, new_dg); dst 329 drivers/misc/vmw_vmci/vmci_datagram.c retval = vmci_route(&dg->src, &dg->dst, from_guest, &route); dst 332 drivers/misc/vmw_vmci/vmci_datagram.c dg->src.context, dg->dst.context, retval); dst 359 drivers/misc/vmw_vmci/vmci_datagram.c resource = vmci_resource_by_handle(dg->dst, dst 363 drivers/misc/vmw_vmci/vmci_datagram.c dg->dst.context, dg->dst.resource); dst 233 drivers/misc/vmw_vmci/vmci_doorbell.c link_msg.hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 251 drivers/misc/vmw_vmci/vmci_doorbell.c unlink_msg.hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 268 drivers/misc/vmw_vmci/vmci_doorbell.c notify_msg.hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 331 drivers/misc/vmw_vmci/vmci_doorbell.c bitmap_set_msg.hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 579 drivers/misc/vmw_vmci/vmci_doorbell.c int vmci_doorbell_notify(struct vmci_handle dst, u32 priv_flags) dst 585 drivers/misc/vmw_vmci/vmci_doorbell.c if (vmci_handle_is_invalid(dst) || dst 590 drivers/misc/vmw_vmci/vmci_doorbell.c retval = vmci_route(&src, &dst, false, &route); dst 596 drivers/misc/vmw_vmci/vmci_doorbell.c dst, priv_flags); dst 599 drivers/misc/vmw_vmci/vmci_doorbell.c return dbell_notify_as_guest(dst, priv_flags); dst 82 drivers/misc/vmw_vmci/vmci_guest.c get_cid_msg.dst = dst 177 drivers/misc/vmw_vmci/vmci_guest.c check_msg->dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 224 drivers/misc/vmw_vmci/vmci_guest.c while (dg->dst.resource != VMCI_INVALID_ID || dst 232 drivers/misc/vmw_vmci/vmci_guest.c if (dg->dst.resource == VMCI_INVALID_ID) { dst 293 drivers/misc/vmw_vmci/vmci_guest.c dg->dst.resource == VMCI_EVENT_HANDLER) { dst 301 drivers/misc/vmw_vmci/vmci_guest.c dg->dst.resource, result); dst 391 drivers/misc/vmw_vmci/vmci_host.c dg->dst.context, dg->dst.resource, dst 850 drivers/misc/vmw_vmci/vmci_queue_pair.c ev.msg.hdr.dst = vmci_make_handle(context_id, VMCI_EVENT_HANDLER); dst 954 drivers/misc/vmw_vmci/vmci_queue_pair.c alloc_msg->hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 983 drivers/misc/vmw_vmci/vmci_queue_pair.c detach_msg.hdr.dst = vmci_make_handle(VMCI_HYPERVISOR_CONTEXT_ID, dst 1463 drivers/misc/vmw_vmci/vmci_queue_pair.c ev.msg.hdr.dst = vmci_make_handle(peer_id, VMCI_EVENT_HANDLER); dst 2664 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_handle dst = vmci_make_handle(peer, VMCI_INVALID_ID); dst 2690 drivers/misc/vmw_vmci/vmci_queue_pair.c retval = vmci_route(&src, &dst, false, &route); dst 21 drivers/misc/vmw_vmci/vmci_route.c const struct vmci_handle *dst, dst 42 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == dst->context) dst 46 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_HYPERVISOR_CONTEXT_ID == dst->context) { dst 85 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_HOST_CONTEXT_ID == dst->context) { dst 153 drivers/misc/vmw_vmci/vmci_route.c if (vmci_ctx_exists(dst->context)) { dst 167 drivers/misc/vmw_vmci/vmci_route.c src->context != dst->context) { dst 19 drivers/misc/vmw_vmci/vmci_route.h int vmci_route(struct vmci_handle *src, const struct vmci_handle *dst, dst 468 drivers/mmc/core/sdio_io.c int sdio_memcpy_fromio(struct sdio_func *func, void *dst, dst 471 drivers/mmc/core/sdio_io.c return sdio_io_rw_ext_helper(func, 0, addr, 1, dst, count); dst 502 drivers/mmc/core/sdio_io.c int sdio_readsb(struct sdio_func *func, void *dst, unsigned int addr, dst 505 drivers/mmc/core/sdio_io.c return sdio_io_rw_ext_helper(func, 0, addr, 0, dst, count); dst 207 drivers/mmc/host/tifm_sd.c static void tifm_sd_copy_page(struct page *dst, unsigned int dst_off, dst 212 drivers/mmc/host/tifm_sd.c unsigned char *dst_buf = kmap_atomic(dst) + dst_off; dst 1313 drivers/mtd/devices/docg3.c static void doc_fill_autooob(u8 *dst, u8 *oobsrc) dst 1315 drivers/mtd/devices/docg3.c memcpy(dst, oobsrc, DOC_LAYOUT_OOB_PAGEINFO_SZ); dst 1316 drivers/mtd/devices/docg3.c dst[DOC_LAYOUT_OOB_UNUSED_OFS] = oobsrc[DOC_LAYOUT_OOB_PAGEINFO_SZ]; dst 716 drivers/mtd/devices/st_spi_fsm.c void __iomem *dst = fsm->base + SPI_FAST_SEQ_TRANSFER_SIZE; dst 723 drivers/mtd/devices/st_spi_fsm.c writel(*src, dst); dst 725 drivers/mtd/devices/st_spi_fsm.c dst += 4; dst 325 drivers/mtd/nand/onenand/omap2.c dma_addr_t src, dma_addr_t dst, dst 331 drivers/mtd/nand/onenand/omap2.c tx = dmaengine_prep_dma_memcpy(c->dma_chan, dst, src, count, dst 515 drivers/mtd/nand/onenand/samsung.c static int (*s5pc110_dma_ops)(dma_addr_t dst, dma_addr_t src, size_t count, int direction); dst 517 drivers/mtd/nand/onenand/samsung.c static int s5pc110_dma_poll(dma_addr_t dst, dma_addr_t src, size_t count, int direction) dst 524 drivers/mtd/nand/onenand/samsung.c writel(dst, base + S5PC110_DMA_DST_ADDR); dst 583 drivers/mtd/nand/onenand/samsung.c static int s5pc110_dma_irq(dma_addr_t dst, dma_addr_t src, size_t count, int direction) dst 595 drivers/mtd/nand/onenand/samsung.c writel(dst, base + S5PC110_DMA_DST_ADDR); dst 852 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c static void gpmi_copy_bits(u8 *dst, size_t dst_bit_off, const u8 *src, dst 870 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst += dst_bit_off / 8; dst 902 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst[0] &= GENMASK(dst_bit_off - 1, 0); dst 903 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst[0] |= src_buffer << dst_bit_off; dst 907 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst++; dst 910 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst[0] = src_buffer; dst 911 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst++; dst 923 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c memcpy(dst, src, nbytes); dst 932 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst[i] = src_buffer; dst 937 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst += nbytes; dst 967 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c (*dst & GENMASK(dst_bit_off - 1, 0)); dst 976 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c src_buffer |= (dst[nbytes] & dst 984 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c dst[i] = src_buffer; dst 228 drivers/mtd/nand/raw/vf610_nfc.c static inline void vf610_nfc_rd_from_sram(void *dst, const void __iomem *src, dst 237 drivers/mtd/nand/raw/vf610_nfc.c memcpy(dst + i, &val, min(sizeof(val), len - i)); dst 240 drivers/mtd/nand/raw/vf610_nfc.c memcpy_fromio(dst, src, len); dst 263 drivers/mtd/nand/raw/vf610_nfc.c static inline void vf610_nfc_wr_to_sram(void __iomem *dst, const void *src, dst 273 drivers/mtd/nand/raw/vf610_nfc.c __raw_writel(swab32(val), dst + i); dst 276 drivers/mtd/nand/raw/vf610_nfc.c memcpy_toio(dst, src, len); dst 241 drivers/mtd/spi-nor/aspeed-smc.c static int aspeed_smc_write_to_ahb(void __iomem *dst, const void *buf, dst 246 drivers/mtd/spi-nor/aspeed-smc.c if (IS_ALIGNED((uintptr_t)dst, sizeof(uintptr_t)) && dst 248 drivers/mtd/spi-nor/aspeed-smc.c iowrite32_rep(dst, buf, len >> 2); dst 252 drivers/mtd/spi-nor/aspeed-smc.c iowrite8_rep(dst, (const u8 *)buf + offset, len); dst 174 drivers/mtd/ubi/eba.c void ubi_eba_copy_table(struct ubi_volume *vol, struct ubi_eba_table *dst, dst 180 drivers/mtd/ubi/eba.c ubi_assert(dst && vol && vol->eba_tbl); dst 185 drivers/mtd/ubi/eba.c dst->entries[i].pnum = src->entries[i].pnum; dst 878 drivers/mtd/ubi/ubi.h void ubi_eba_copy_table(struct ubi_volume *vol, struct ubi_eba_table *dst, dst 2491 drivers/net/bonding/bond_main.c if (rt->dst.dev == bond->dev) dst 2495 drivers/net/bonding/bond_main.c tags = bond_verify_device_path(bond->dev, rt->dst.dev, 0); dst 2503 drivers/net/bonding/bond_main.c &targets[i], rt->dst.dev ? rt->dst.dev->name : "NULL"); dst 2509 drivers/net/bonding/bond_main.c addr = bond_confirm_addr(rt->dst.dev, targets[i], 0); dst 335 drivers/net/caif/caif_spi.c u8 *dst = buf; dst 363 drivers/net/caif/caif_spi.c *dst = (u8)(spad - 1); dst 364 drivers/net/caif/caif_spi.c dst += spad; dst 368 drivers/net/caif/caif_spi.c skb_copy_bits(skb, 0, dst, skb->len); dst 369 drivers/net/caif/caif_spi.c dst += skb->len; dst 378 drivers/net/caif/caif_spi.c dst += epad; dst 382 drivers/net/caif/caif_spi.c } while ((dst - buf) < len); dst 384 drivers/net/caif/caif_spi.c return dst - buf; dst 394 drivers/net/can/janz-ican3.c void __iomem *dst; dst 405 drivers/net/can/janz-ican3.c dst = mod->dpm; dst 416 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 417 drivers/net/can/janz-ican3.c dst += sizeof(desc); dst 423 drivers/net/can/janz-ican3.c dst = mod->dpm; dst 437 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 438 drivers/net/can/janz-ican3.c dst += sizeof(desc); dst 444 drivers/net/can/janz-ican3.c dst = mod->dpm; dst 449 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 454 drivers/net/can/janz-ican3.c dst = mod->dpm; dst 459 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 474 drivers/net/can/janz-ican3.c void __iomem *dst; dst 500 drivers/net/can/janz-ican3.c dst = mod->dpm + addr; dst 501 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 536 drivers/net/can/janz-ican3.c dst = mod->dpm + addr; dst 537 drivers/net/can/janz-ican3.c memcpy_toio(dst, &desc, sizeof(desc)); dst 284 drivers/net/dsa/bcm_sf2_cfp.c reg = be16_to_cpu(ports->dst) >> 8; dst 295 drivers/net/dsa/bcm_sf2_cfp.c reg = (be16_to_cpu(ports->dst) & 0xff) << 24 | dst 297 drivers/net/dsa/bcm_sf2_cfp.c (be32_to_cpu(addrs->dst) & 0x0000ff00) >> 8; dst 308 drivers/net/dsa/bcm_sf2_cfp.c reg = (u32)(be32_to_cpu(addrs->dst) & 0xff) << 24 | dst 309 drivers/net/dsa/bcm_sf2_cfp.c (u32)(be32_to_cpu(addrs->dst) >> 16) << 8 | dst 777 drivers/net/dsa/bcm_sf2_cfp.c bcm_sf2_cfp_slice_ipv6(priv, ipv6.key->dst.in6_u.u6_addr32, dst 778 drivers/net/dsa/bcm_sf2_cfp.c ports.key->dst, slice_num, false); dst 779 drivers/net/dsa/bcm_sf2_cfp.c bcm_sf2_cfp_slice_ipv6(priv, ipv6.mask->dst.in6_u.u6_addr32, dst 780 drivers/net/dsa/bcm_sf2_cfp.c ports.key->dst, SLICE_NUM_MASK, true); dst 1068 drivers/net/dsa/mv88e6xxx/chip.c ds = chip->ds->dst->ds[dev]; dst 2057 drivers/net/dsa/mv88e6xxx/chip.c ds = chip->ds->dst->ds[dev]; dst 71 drivers/net/ethernet/8390/xsurf100.c static void z_memcpy_fromio32(void *dst, const void __iomem *src, size_t bytes) dst 77 drivers/net/ethernet/8390/xsurf100.c "adda.l #32,%1" : "=a"(src), "=a"(dst) dst 78 drivers/net/ethernet/8390/xsurf100.c : "0"(src), "1"(dst) : "d0", "d1", "d2", "d3", "d4", dst 83 drivers/net/ethernet/8390/xsurf100.c *(uint32_t *)dst = z_readl(src); dst 85 drivers/net/ethernet/8390/xsurf100.c dst += 4; dst 90 drivers/net/ethernet/8390/xsurf100.c static void z_memcpy_toio32(void __iomem *dst, const void *src, size_t bytes) dst 93 drivers/net/ethernet/8390/xsurf100.c z_writel(*(const uint32_t *)src, dst); dst 95 drivers/net/ethernet/8390/xsurf100.c dst += 4; dst 127 drivers/net/ethernet/8390/xsurf100.c static void xs100_read(struct net_device *dev, void *dst, unsigned int count) dst 135 drivers/net/ethernet/8390/xsurf100.c z_memcpy_fromio32(dst, xs100->data_area + dst 138 drivers/net/ethernet/8390/xsurf100.c dst += XS100_8390_DATA_AREA_SIZE; dst 142 drivers/net/ethernet/8390/xsurf100.c z_memcpy_fromio32(dst, xs100->data_area + XS100_8390_DATA_READ32_BASE, dst 144 drivers/net/ethernet/8390/xsurf100.c dst += count & ~3; dst 146 drivers/net/ethernet/8390/xsurf100.c *(uint16_t *)dst = ei_inw(ei_local->mem + NE_DATAPORT); dst 147 drivers/net/ethernet/8390/xsurf100.c dst += 2; dst 150 drivers/net/ethernet/8390/xsurf100.c *(uint8_t *)dst = ei_inb(ei_local->mem + NE_DATAPORT); dst 117 drivers/net/ethernet/amazon/ena/ena_ethtool.c static void ena_safe_update_stat(u64 *src, u64 *dst, dst 124 drivers/net/ethernet/amazon/ena/ena_ethtool.c *(dst) = *src; dst 357 drivers/net/ethernet/amd/atarilance.c static void *slow_memcpy( void *dst, const void *src, size_t len ) dst 359 drivers/net/ethernet/amd/atarilance.c { char *cto = dst; dst 366 drivers/net/ethernet/amd/atarilance.c return dst; dst 20 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h static inline void xgene_set_bits(u32 *dst, u32 val, u32 start, u32 len) dst 25 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h *dst &= ~mask; dst 26 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h *dst |= (val << start) & mask; dst 139 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define PHY_CONTROL_SET(dst, val) xgene_set_bits(dst, val, 0, 16) dst 157 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_TXCLK_MUXSEL0_SET(dst, val) xgene_set_bits(dst, val, 29, 3) dst 158 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_RXCLK_MUXSEL0_SET(dst, val) xgene_set_bits(dst, val, 26, 3) dst 160 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_CLE_IP_PROTOCOL0_SET(dst, val) xgene_set_bits(dst, val, 16, 2) dst 161 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_CLE_IP_HDR_LEN_SET(dst, val) xgene_set_bits(dst, val, 8, 5) dst 162 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_CLE_DSTQID0_SET(dst, val) xgene_set_bits(dst, val, 0, 12) dst 163 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_CLE_FPSEL0_SET(dst, val) xgene_set_bits(dst, val, 16, 4) dst 164 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_CLE_NXTFPSEL0_SET(dst, val) xgene_set_bits(dst, val, 20, 4) dst 165 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_MACMODE_SET(dst, val) xgene_set_bits(dst, val, 18, 2) dst 166 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define CFG_WAITASYNCRD_SET(dst, val) xgene_set_bits(dst, val, 0, 16) dst 205 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define PHY_ADDR_SET(dst, val) xgene_set_bits(dst, val, 8, 5) dst 206 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define REG_ADDR_SET(dst, val) xgene_set_bits(dst, val, 0, 5) dst 207 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define ENET_INTERFACE_MODE2_SET(dst, val) xgene_set_bits(dst, val, 8, 2) dst 208 drivers/net/ethernet/apm/xgene/xgene_enet_hw.h #define MGMT_CLOCK_SEL_SET(dst, val) xgene_set_bits(dst, val, 0, 3) dst 58 drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.h #define RSIF_CLE_BUFF_THRESH_SET(dst, val) xgene_set_bits(dst, val, 0, 3) dst 61 drivers/net/ethernet/apm/xgene/xgene_enet_xgmac.h #define RSIF_PLC_CLE_BUFF_THRESH_SET(dst, val) xgene_set_bits(dst, val, 0, 2) dst 4540 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c u8 *dst = (u8 *)(data->rss_key) + sizeof(data->rss_key); dst 4548 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c *--dst = *src++; dst 4192 drivers/net/ethernet/broadcom/bnxt/bnxt.c u16 dst = BNXT_HWRM_CHNL_CHIMP; dst 4204 drivers/net/ethernet/broadcom/bnxt/bnxt.c dst = BNXT_HWRM_CHNL_KONG; dst 4215 drivers/net/ethernet/broadcom/bnxt/bnxt.c req->seq_id = cpu_to_le16(bnxt_get_hwrm_seq_id(bp, dst)); dst 4643 drivers/net/ethernet/broadcom/bnxt/bnxt.c keys->addrs.v6addrs.dst; dst 4651 drivers/net/ethernet/broadcom/bnxt/bnxt.c req.dst_ipaddr[0] = keys->addrs.v4addrs.dst; dst 4662 drivers/net/ethernet/broadcom/bnxt/bnxt.c req.dst_port = keys->ports.dst; dst 11015 drivers/net/ethernet/broadcom/bnxt/bnxt.c keys1->addrs.v4addrs.dst != keys2->addrs.v4addrs.dst) dst 11020 drivers/net/ethernet/broadcom/bnxt/bnxt.c memcmp(&keys1->addrs.v6addrs.dst, &keys2->addrs.v6addrs.dst, dst 11021 drivers/net/ethernet/broadcom/bnxt/bnxt.c sizeof(keys1->addrs.v6addrs.dst))) dst 1291 drivers/net/ethernet/broadcom/bnxt/bnxt.h struct metadata_dst *dst; dst 1946 drivers/net/ethernet/broadcom/bnxt/bnxt.h static inline u16 bnxt_get_hwrm_seq_id(struct bnxt *bp, u16 dst) dst 1950 drivers/net/ethernet/broadcom/bnxt/bnxt.h if (dst == BNXT_HWRM_CHNL_CHIMP) dst 254 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c static void bnxt_copy_to_nvm_data(union bnxt_nvm_data *dst, dst 261 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->val8 = src->vbool; dst 270 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->val32 = cpu_to_le32(val32); dst 273 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c static void bnxt_copy_from_nvm_data(union devlink_param_value *dst, dst 280 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->vbool = src->val8; dst 285 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->vu32 = val32; dst 287 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->vu16 = (u16)val32; dst 289 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c dst->vu8 = (u8)val32; dst 961 drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c fs->h_u.tcp_ip4_spec.ip4dst = fkeys->addrs.v4addrs.dst; dst 967 drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c fs->h_u.tcp_ip4_spec.pdst = fkeys->ports.dst; dst 982 drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c fkeys->addrs.v6addrs.dst; dst 990 drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c fs->h_u.tcp_ip6_spec.pdst = fkeys->ports.dst; dst 206 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c ether_addr_copy(flow->l2_key.dmac, match.key->dst); dst 207 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c ether_addr_copy(flow->l2_mask.dmac, match.mask->dst); dst 232 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l3_key.ipv4.daddr.s_addr = match.key->dst; dst 233 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l3_mask.ipv4.daddr.s_addr = match.mask->dst; dst 241 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l3_key.ipv6.daddr = match.key->dst; dst 242 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l3_mask.ipv6.daddr = match.mask->dst; dst 252 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l4_key.ports.dport = match.key->dst; dst 253 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->l4_mask.ports.dport = match.mask->dst; dst 274 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->tun_key.u.ipv4.dst = match.key->dst; dst 275 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->tun_mask.u.ipv4.dst = match.mask->dst; dst 297 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->tun_key.tp_dst = match.key->dst; dst 298 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow->tun_mask.tp_dst = match.mask->dst; dst 562 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c req.dst_ipaddr[0] = tun_key->u.ipv4.dst; dst 635 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c encap_ipv4->dest_ip_addr = encap_key->u.ipv4.dst; dst 965 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c flow.daddr = tun_key->u.ipv4.dst; dst 976 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c dst_dev = rt->dst.dev; dst 1002 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c nbr = dst_neigh_lookup(&rt->dst, &flow.daddr); dst 1011 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c tun_key->ttl = ip4_dst_hoplimit(&rt->dst); dst 1060 drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c tun_key.u.ipv4.dst = flow->tun_key.u.ipv4.src; dst 120 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c dst_hold((struct dst_entry *)vf_rep->dst); dst 121 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c skb_dst_set(skb, (struct dst_entry *)vf_rep->dst); dst 122 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c skb->dev = vf_rep->dst->u.port_info.lower_dev; dst 301 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c dst_release((struct dst_entry *)vf_rep->dst); dst 442 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c vf_rep->dst = metadata_dst_alloc(0, METADATA_HW_PORT_MUX, dst 444 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c if (!vf_rep->dst) { dst 449 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c vf_rep->dst->u.port_info.port_id = vf_rep->tx_cfa_action; dst 450 drivers/net/ethernet/broadcom/bnxt/bnxt_vfr.c vf_rep->dst->u.port_info.lower_dev = bp->dev; dst 340 drivers/net/ethernet/broadcom/cnic.c memcpy(&path_req.dst.v6_addr, &csk->dst_ip[0], dst 344 drivers/net/ethernet/broadcom/cnic.c memcpy(&path_req.dst.v4_addr, &csk->dst_ip[0], dst 3675 drivers/net/ethernet/broadcom/cnic.c struct dst_entry **dst) dst 3682 drivers/net/ethernet/broadcom/cnic.c *dst = &rt->dst; dst 3692 drivers/net/ethernet/broadcom/cnic.c struct dst_entry **dst) dst 3702 drivers/net/ethernet/broadcom/cnic.c *dst = ip6_route_output(&init_net, NULL, &fl6); dst 3703 drivers/net/ethernet/broadcom/cnic.c if ((*dst)->error) { dst 3704 drivers/net/ethernet/broadcom/cnic.c dst_release(*dst); dst 3705 drivers/net/ethernet/broadcom/cnic.c *dst = NULL; dst 3718 drivers/net/ethernet/broadcom/cnic.c struct dst_entry *dst; dst 3723 drivers/net/ethernet/broadcom/cnic.c err = cnic_get_v4_route(dst_addr, &dst); dst 3728 drivers/net/ethernet/broadcom/cnic.c err = cnic_get_v6_route(dst_addr6, &dst); dst 3735 drivers/net/ethernet/broadcom/cnic.c if (!dst->dev) dst 3738 drivers/net/ethernet/broadcom/cnic.c cnic_get_vlan(dst->dev, &netdev); dst 3743 drivers/net/ethernet/broadcom/cnic.c dst_release(dst); dst 3762 drivers/net/ethernet/broadcom/cnic.c struct dst_entry *dst = NULL; dst 3780 drivers/net/ethernet/broadcom/cnic.c cnic_get_v6_route(&saddr->remote.v6, &dst); dst 3788 drivers/net/ethernet/broadcom/cnic.c cnic_get_v4_route(&saddr->remote.v4, &dst); dst 3797 drivers/net/ethernet/broadcom/cnic.c if (dst && dst->dev) { dst 3798 drivers/net/ethernet/broadcom/cnic.c u16 vlan = cnic_get_vlan(dst->dev, &realdev); dst 3801 drivers/net/ethernet/broadcom/cnic.c csk->mtu = dst_mtu(dst); dst 3824 drivers/net/ethernet/broadcom/cnic.c dst_release(dst); dst 6398 drivers/net/ethernet/broadcom/tg3.c static inline void tg3_rd32_loop(struct tg3 *tp, u32 *dst, u32 off, u32 len) dst 6402 drivers/net/ethernet/broadcom/tg3.c dst = (u32 *)((u8 *)dst + off); dst 6404 drivers/net/ethernet/broadcom/tg3.c *dst++ = tr32(off + i); dst 207 drivers/net/ethernet/brocade/bna/bfa_msgq.c u8 *src, *dst; dst 210 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst = (u8 *)cmdq->addr.kva; dst 211 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst += (cmdq->producer_index * BFI_MSGQ_CMD_ENTRY_SIZE); dst 216 drivers/net/ethernet/brocade/bna/bfa_msgq.c memcpy(dst, src, to_copy); dst 220 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst = (u8 *)cmdq->addr.kva; dst 221 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst += (cmdq->producer_index * BFI_MSGQ_CMD_ENTRY_SIZE); dst 643 drivers/net/ethernet/brocade/bna/bfa_msgq.c u8 *src, *dst; dst 648 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst = buf; dst 653 drivers/net/ethernet/brocade/bna/bfa_msgq.c memcpy(dst, src, to_copy); dst 655 drivers/net/ethernet/brocade/bna/bfa_msgq.c dst += BFI_MSGQ_RSP_ENTRY_SIZE; dst 1390 drivers/net/ethernet/cavium/thunder/thunder_bgx.c u8 *dst) dst 1403 drivers/net/ethernet/cavium/thunder/thunder_bgx.c ether_addr_copy(dst, mac); dst 302 drivers/net/ethernet/chelsio/cxgb3/l2t.c struct l2t_entry *t3_l2t_get(struct t3cdev *cdev, struct dst_entry *dst, dst 315 drivers/net/ethernet/chelsio/cxgb3/l2t.c neigh = dst_neigh_lookup(dst, daddr); dst 112 drivers/net/ethernet/chelsio/cxgb3/l2t.h struct l2t_entry *t3_l2t_get(struct t3cdev *cdev, struct dst_entry *dst, dst 124 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->val.lip[0], &match.key->dst, sizeof(match.key->dst)); dst 126 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->mask.lip[0], &match.mask->dst, sizeof(match.mask->dst)); dst 130 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->nat_lip[0], &match.key->dst, sizeof(match.key->dst)); dst 139 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->val.lip[0], match.key->dst.s6_addr, dst 140 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c sizeof(match.key->dst)); dst 143 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->mask.lip[0], match.mask->dst.s6_addr, dst 144 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c sizeof(match.mask->dst)); dst 149 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c memcpy(&fs->nat_lip[0], match.key->dst.s6_addr, dst 150 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c sizeof(match.key->dst)); dst 159 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c fs->val.lport = cpu_to_be16(match.key->dst); dst 160 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c fs->mask.lport = cpu_to_be16(match.mask->dst); dst 165 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c fs->nat_lport = cpu_to_be16(match.key->dst); dst 940 drivers/net/ethernet/chelsio/cxgb4/sge.c static void cxgb_pio_copy(u64 __iomem *dst, u64 *src) dst 945 drivers/net/ethernet/chelsio/cxgb4/sge.c writeq(*src, dst); dst 947 drivers/net/ethernet/chelsio/cxgb4/sge.c dst++; dst 1000 drivers/net/ethernet/chelsio/cxgb4vf/sge.c __be64 __iomem *dst = (__be64 __iomem *)(tq->bar2_addr + dst 1018 drivers/net/ethernet/chelsio/cxgb4vf/sge.c writeq((__force u64)*src, dst); dst 1020 drivers/net/ethernet/chelsio/cxgb4vf/sge.c dst++; dst 105 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c n = dst_neigh_lookup(&rt->dst, &peer_ip); dst 111 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c dst_release(&rt->dst); dst 115 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c return &rt->dst; dst 125 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c struct dst_entry *dst = NULL; dst 135 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c dst = ip6_route_output(&init_net, NULL, &fl6); dst 136 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c if (dst->error || dst 138 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c ip6_dst_idev(dst)->dev) && dst 139 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c !(ip6_dst_idev(dst)->dev->flags & IFF_LOOPBACK))) { dst 140 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c dst_release(dst); dst 145 drivers/net/ethernet/chelsio/libcxgb/libcxgb_cm.c return dst; dst 39 drivers/net/ethernet/cisco/enic/enic_clsf.c data.u.ipv4.dst_addr = ntohl(keys->addrs.v4addrs.dst); dst 41 drivers/net/ethernet/cisco/enic/enic_clsf.c data.u.ipv4.dst_port = ntohs(keys->ports.dst); dst 163 drivers/net/ethernet/cisco/enic/enic_clsf.c tpos->keys.addrs.v4addrs.dst == k->addrs.v4addrs.dst && dst 469 drivers/net/ethernet/cisco/enic/enic_ethtool.c fsp->h_u.tcp_ip4_spec.pdst = n->keys.ports.dst; dst 36 drivers/net/ethernet/ezchip/nps_enet.c unsigned char *dst, u32 length) dst 40 drivers/net/ethernet/ezchip/nps_enet.c u32 *reg = (u32 *)dst, len = length / sizeof(u32); dst 41 drivers/net/ethernet/ezchip/nps_enet.c bool dst_is_aligned = IS_ALIGNED((unsigned long)dst, sizeof(u32)); dst 301 drivers/net/ethernet/freescale/enetc/enetc_ethtool.c static void ether_addr_copy_swap(u8 *dst, const u8 *src) dst 306 drivers/net/ethernet/freescale/enetc/enetc_ethtool.c dst[i] = src[ETH_ALEN - i - 1]; dst 328 drivers/net/ethernet/freescale/fec_main.c unsigned int *dst = dst_buf; dst 330 drivers/net/ethernet/freescale/fec_main.c for (i = 0; i < len; i += 4, src++, dst++) dst 331 drivers/net/ethernet/freescale/fec_main.c *dst = swab32p(src); dst 1732 drivers/net/ethernet/hisilicon/hns/hns_dsaf_main.c static void hns_dsaf_mc_mask_bit_clear(char *dst, const char *src) dst 1734 drivers/net/ethernet/hisilicon/hns/hns_dsaf_main.c u16 *a = (u16 *)dst; dst 6009 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c #define flow_ip6_dst fkeys->addrs.v6addrs.dst.in6_u.u6_addr32 dst 6013 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c tuples->dst_port = be16_to_cpu(fkeys->ports.dst); dst 6017 drivers/net/ethernet/hisilicon/hns3/hns3pf/hclge_main.c tuples->dst_ip[3] = be32_to_cpu(fkeys->addrs.v4addrs.dst); dst 306 drivers/net/ethernet/huawei/hinic/hinic_hw_cmdq.c static void cmdq_wqe_fill(void *dst, void *src) dst 308 drivers/net/ethernet/huawei/hinic/hinic_hw_cmdq.c memcpy(dst + FIRST_DATA_TO_WRITE_LAST, src + FIRST_DATA_TO_WRITE_LAST, dst 313 drivers/net/ethernet/huawei/hinic/hinic_hw_cmdq.c *(u64 *)dst = *(u64 *)src; dst 253 drivers/net/ethernet/ibm/ibmvnic.c unsigned char *dst; dst 281 drivers/net/ethernet/ibm/ibmvnic.c dst = pool->long_term_buff.buff + offset; dst 282 drivers/net/ethernet/ibm/ibmvnic.c memset(dst, 0, pool->buff_size); dst 284 drivers/net/ethernet/ibm/ibmvnic.c pool->rx_buff[index].data = dst; dst 1425 drivers/net/ethernet/ibm/ibmvnic.c unsigned char *dst; dst 1471 drivers/net/ethernet/ibm/ibmvnic.c dst = tx_pool->long_term_buff.buff + offset; dst 1472 drivers/net/ethernet/ibm/ibmvnic.c memset(dst, 0, tx_pool->buf_size); dst 1479 drivers/net/ethernet/ibm/ibmvnic.c skb_copy_from_linear_data(skb, dst, skb_headlen(skb)); dst 1486 drivers/net/ethernet/ibm/ibmvnic.c memcpy(dst + cur, dst 1492 drivers/net/ethernet/ibm/ibmvnic.c skb_copy_from_linear_data(skb, dst, skb->len); dst 350 drivers/net/ethernet/intel/i40e/i40e.h #define I40E_FLEX_SET_DST_WORD(dst) (((dst) << \ dst 356 drivers/net/ethernet/intel/i40e/i40e.h #define I40E_FLEX_PREP_VAL(dst, fsize, src) (I40E_FLEX_SET_DST_WORD(dst) | \ dst 7797 drivers/net/ethernet/intel/i40e/i40e_main.c if (!is_zero_ether_addr(match.mask->dst)) { dst 7798 drivers/net/ethernet/intel/i40e/i40e_main.c if (is_broadcast_ether_addr(match.mask->dst)) { dst 7802 drivers/net/ethernet/intel/i40e/i40e_main.c match.mask->dst); dst 7816 drivers/net/ethernet/intel/i40e/i40e_main.c ether_addr_copy(filter->dst_mac, match.key->dst); dst 7849 drivers/net/ethernet/intel/i40e/i40e_main.c if (match.mask->dst) { dst 7850 drivers/net/ethernet/intel/i40e/i40e_main.c if (match.mask->dst == cpu_to_be32(0xffffffff)) { dst 7854 drivers/net/ethernet/intel/i40e/i40e_main.c &match.mask->dst); dst 7873 drivers/net/ethernet/intel/i40e/i40e_main.c filter->dst_ipv4 = match.key->dst; dst 7885 drivers/net/ethernet/intel/i40e/i40e_main.c if (ipv6_addr_loopback(&match.key->dst) || dst 7891 drivers/net/ethernet/intel/i40e/i40e_main.c if (!ipv6_addr_any(&match.mask->dst) || dst 7897 drivers/net/ethernet/intel/i40e/i40e_main.c memcpy(&filter->dst_ipv6, &match.key->dst.s6_addr32, dst 7915 drivers/net/ethernet/intel/i40e/i40e_main.c if (match.mask->dst) { dst 7916 drivers/net/ethernet/intel/i40e/i40e_main.c if (match.mask->dst == cpu_to_be16(0xffff)) { dst 7920 drivers/net/ethernet/intel/i40e/i40e_main.c be16_to_cpu(match.mask->dst)); dst 7925 drivers/net/ethernet/intel/i40e/i40e_main.c filter->dst_port = match.key->dst; dst 2754 drivers/net/ethernet/intel/iavf/iavf_main.c if (!is_zero_ether_addr(match.mask->dst)) { dst 2755 drivers/net/ethernet/intel/iavf/iavf_main.c if (is_broadcast_ether_addr(match.mask->dst)) { dst 2759 drivers/net/ethernet/intel/iavf/iavf_main.c match.mask->dst); dst 2774 drivers/net/ethernet/intel/iavf/iavf_main.c if (!is_zero_ether_addr(match.key->dst)) dst 2775 drivers/net/ethernet/intel/iavf/iavf_main.c if (is_valid_ether_addr(match.key->dst) || dst 2776 drivers/net/ethernet/intel/iavf/iavf_main.c is_multicast_ether_addr(match.key->dst)) { dst 2781 drivers/net/ethernet/intel/iavf/iavf_main.c match.key->dst); dst 2823 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.mask->dst) { dst 2824 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.mask->dst == cpu_to_be32(0xffffffff)) { dst 2828 drivers/net/ethernet/intel/iavf/iavf_main.c be32_to_cpu(match.mask->dst)); dst 2838 drivers/net/ethernet/intel/iavf/iavf_main.c be32_to_cpu(match.mask->dst)); dst 2847 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.key->dst) { dst 2849 drivers/net/ethernet/intel/iavf/iavf_main.c vf->data.tcp_spec.dst_ip[0] = match.key->dst; dst 2863 drivers/net/ethernet/intel/iavf/iavf_main.c if (ipv6_addr_any(&match.mask->dst)) { dst 2872 drivers/net/ethernet/intel/iavf/iavf_main.c if (ipv6_addr_loopback(&match.key->dst) || dst 2878 drivers/net/ethernet/intel/iavf/iavf_main.c if (!ipv6_addr_any(&match.mask->dst) || dst 2884 drivers/net/ethernet/intel/iavf/iavf_main.c memcpy(&vf->data.tcp_spec.dst_ip, &match.key->dst.s6_addr32, dst 2905 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.mask->dst) { dst 2906 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.mask->dst == cpu_to_be16(0xffff)) { dst 2910 drivers/net/ethernet/intel/iavf/iavf_main.c be16_to_cpu(match.mask->dst)); dst 2914 drivers/net/ethernet/intel/iavf/iavf_main.c if (match.key->dst) { dst 2916 drivers/net/ethernet/intel/iavf/iavf_main.c vf->data.tcp_spec.dst_port = match.key->dst; dst 1242 drivers/net/ethernet/intel/ice/ice_flex_pipe.c u8 *src, *dst; dst 1269 drivers/net/ethernet/intel/ice/ice_flex_pipe.c dst = hw->blk[block_id].xlt1.t; dst 1282 drivers/net/ethernet/intel/ice/ice_flex_pipe.c dst = (u8 *)hw->blk[block_id].xlt2.t; dst 1295 drivers/net/ethernet/intel/ice/ice_flex_pipe.c dst = (u8 *)hw->blk[block_id].prof.t; dst 1308 drivers/net/ethernet/intel/ice/ice_flex_pipe.c dst = hw->blk[block_id].prof_redir.t; dst 1322 drivers/net/ethernet/intel/ice/ice_flex_pipe.c dst = (u8 *)hw->blk[block_id].es.t; dst 1345 drivers/net/ethernet/intel/ice/ice_flex_pipe.c memcpy(dst + offset, src, sect_len); dst 2605 drivers/net/ethernet/intel/igb/igb_main.c if (!is_zero_ether_addr(match.mask->dst)) { dst 2606 drivers/net/ethernet/intel/igb/igb_main.c if (!is_broadcast_ether_addr(match.mask->dst)) { dst 2613 drivers/net/ethernet/intel/igb/igb_main.c ether_addr_copy(input->filter.dst_addr, match.key->dst); dst 28 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c struct tc_configuration *dst = NULL; dst 46 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst = &dcfg->tc_config[i - DCB_PG_ATTR_TC_0]; dst 48 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[tx].prio_type != src->path[tx].prio_type) { dst 49 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[tx].prio_type = src->path[tx].prio_type; dst 53 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[tx].bwg_id != src->path[tx].bwg_id) { dst 54 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[tx].bwg_id = src->path[tx].bwg_id; dst 58 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[tx].bwg_percent != src->path[tx].bwg_percent) { dst 59 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[tx].bwg_percent = src->path[tx].bwg_percent; dst 63 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[tx].up_to_tc_bitmap != dst 65 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[tx].up_to_tc_bitmap = dst 70 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].prio_type != src->path[rx].prio_type) { dst 71 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].prio_type = src->path[rx].prio_type; dst 75 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].bwg_id != src->path[rx].bwg_id) { dst 76 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].bwg_id = src->path[rx].bwg_id; dst 80 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].bwg_percent != src->path[rx].bwg_percent) { dst 81 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].bwg_percent = src->path[rx].bwg_percent; dst 85 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c if (dst->path[rx].up_to_tc_bitmap != dst 87 drivers/net/ethernet/intel/ixgbe/ixgbe_dcb_nl.c dst->path[rx].up_to_tc_bitmap = dst 8471 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c common.port.dst ^= th->source; dst 3090 drivers/net/ethernet/intel/ixgbe/ixgbe_type.h __be16 dst; dst 1288 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c if (match.mask->dst) { dst 1291 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c rule->c2_tcam |= ((u64)ntohs(match.key->dst)) << offs; dst 1292 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c rule->c2_tcam_mask |= ((u64)ntohs(match.mask->dst)) << offs; dst 834 drivers/net/ethernet/mellanox/mlx4/en_netdev.c struct list_head *dst, dst 843 drivers/net/ethernet/mellanox/mlx4/en_netdev.c list_for_each_entry(dst_tmp, dst, list) { dst 860 drivers/net/ethernet/mellanox/mlx4/en_netdev.c list_for_each_entry(dst_tmp, dst, list) { dst 875 drivers/net/ethernet/mellanox/mlx4/en_netdev.c list_add_tail(&new_mc->list, dst); dst 2203 drivers/net/ethernet/mellanox/mlx4/en_netdev.c static int mlx4_en_copy_priv(struct mlx4_en_priv *dst, dst 2209 drivers/net/ethernet/mellanox/mlx4/en_netdev.c memcpy(&dst->hwtstamp_config, &prof->hwtstamp_config, dst 2210 drivers/net/ethernet/mellanox/mlx4/en_netdev.c sizeof(dst->hwtstamp_config)); dst 2211 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->num_tx_rings_p_up = prof->num_tx_rings_p_up; dst 2212 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->rx_ring_num = prof->rx_ring_num; dst 2213 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->flags = prof->flags; dst 2214 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->mdev = src->mdev; dst 2215 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->port = src->port; dst 2216 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->dev = src->dev; dst 2217 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->prof = prof; dst 2218 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->stride = roundup_pow_of_two(sizeof(struct mlx4_en_rx_desc) + dst 2222 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_ring_num[t] = prof->tx_ring_num[t]; dst 2223 drivers/net/ethernet/mellanox/mlx4/en_netdev.c if (!dst->tx_ring_num[t]) dst 2226 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_ring[t] = kcalloc(MAX_TX_RINGS, dst 2229 drivers/net/ethernet/mellanox/mlx4/en_netdev.c if (!dst->tx_ring[t]) dst 2232 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_cq[t] = kcalloc(MAX_TX_RINGS, dst 2235 drivers/net/ethernet/mellanox/mlx4/en_netdev.c if (!dst->tx_cq[t]) { dst 2236 drivers/net/ethernet/mellanox/mlx4/en_netdev.c kfree(dst->tx_ring[t]); dst 2245 drivers/net/ethernet/mellanox/mlx4/en_netdev.c kfree(dst->tx_ring[t]); dst 2246 drivers/net/ethernet/mellanox/mlx4/en_netdev.c kfree(dst->tx_cq[t]); dst 2251 drivers/net/ethernet/mellanox/mlx4/en_netdev.c static void mlx4_en_update_priv(struct mlx4_en_priv *dst, dst 2255 drivers/net/ethernet/mellanox/mlx4/en_netdev.c memcpy(dst->rx_ring, src->rx_ring, dst 2257 drivers/net/ethernet/mellanox/mlx4/en_netdev.c memcpy(dst->rx_cq, src->rx_cq, dst 2259 drivers/net/ethernet/mellanox/mlx4/en_netdev.c memcpy(&dst->hwtstamp_config, &src->hwtstamp_config, dst 2260 drivers/net/ethernet/mellanox/mlx4/en_netdev.c sizeof(dst->hwtstamp_config)); dst 2262 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_ring_num[t] = src->tx_ring_num[t]; dst 2263 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_ring[t] = src->tx_ring[t]; dst 2264 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->tx_cq[t] = src->tx_cq[t]; dst 2266 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->num_tx_rings_p_up = src->num_tx_rings_p_up; dst 2267 drivers/net/ethernet/mellanox/mlx4/en_netdev.c dst->rx_ring_num = src->rx_ring_num; dst 2268 drivers/net/ethernet/mellanox/mlx4/en_netdev.c memcpy(dst->prof, src->prof, sizeof(struct mlx4_en_port_profile)); dst 711 drivers/net/ethernet/mellanox/mlx4/en_tx.c static void mlx4_bf_copy(void __iomem *dst, const void *src, dst 714 drivers/net/ethernet/mellanox/mlx4/en_tx.c __iowrite64_copy(dst, src, bytecnt / 8); dst 1965 drivers/net/ethernet/mellanox/mlx4/fw.c u8 *dst = (u8 *)(inbox + INIT_HCA_DRIVER_VERSION_OFFSET / 4); dst 1967 drivers/net/ethernet/mellanox/mlx4/fw.c strncpy(dst, DRV_NAME_FOR_FW, INIT_HCA_DRIVER_VERSION_SZ - 1); dst 1968 drivers/net/ethernet/mellanox/mlx4/fw.c mlx4_dbg(dev, "Reporting Driver Version to FW: %s\n", dst); dst 231 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c const struct mlx5_flow_destination *dst, dst 236 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c switch (dst->type) { dst 238 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c trace_seq_printf(p, "vport=%u\n", dst->vport.num); dst 241 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c trace_seq_printf(p, "ft=%p\n", dst->ft); dst 244 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c trace_seq_printf(p, "ft_num=%u\n", dst->ft_num); dst 247 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.c trace_seq_printf(p, "tir=%u\n", dst->tir_num); dst 57 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.h #define __parse_fs_dst(dst, counter_id) \ dst 58 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.h parse_fs_dst(p, (const struct mlx5_flow_destination *)dst, counter_id) dst 61 drivers/net/ethernet/mellanox/mlx5/core/diag/fs_tracepoint.h const struct mlx5_flow_destination *dst, dst 108 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c ret = get_route_and_out_devs(priv, rt->dst.dev, route_dev, out_dev); dst 115 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c *out_ttl = ip4_dst_hoplimit(&rt->dst); dst 116 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c n = dst_neigh_lookup(&rt->dst, &fl4->daddr); dst 142 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c struct dst_entry *dst; dst 147 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c dst = ipv6_stub->ipv6_dst_lookup_flow(dev_net(mirred_dev), NULL, fl6, dst 149 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c if (IS_ERR(dst)) dst 150 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c return PTR_ERR(dst); dst 153 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c *out_ttl = ip6_dst_hoplimit(dst); dst 155 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c ret = get_route_and_out_devs(priv, dst->dev, route_dev, out_dev); dst 157 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c dst_release(dst); dst 164 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c n = dst_neigh_lookup(dst, &fl6->daddr); dst 165 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c dst_release(dst); dst 225 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c fl4.daddr = tun_key->u.ipv4.dst; dst 345 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c fl6.daddr = tun_key->u.ipv6.dst; dst 532 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c if (memchr_inv(&enc_ports.mask->dst, 0xff, dst 533 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c sizeof(enc_ports.mask->dst))) { dst 547 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c ntohs(enc_ports.mask->dst)); dst 549 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun.c ntohs(enc_ports.key->dst)); dst 37 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun_geneve.c if (be16_to_cpu(enc_ports.key->dst) != GENEVE_UDP_PORT) { dst 42 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun_geneve.c be16_to_cpu(enc_ports.key->dst)); dst 33 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun_vxlan.c be16_to_cpu(enc_ports.key->dst))) { dst 38 drivers/net/ethernet/mellanox/mlx5/core/en/tc_tun_vxlan.c be16_to_cpu(enc_ports.key->dst)); dst 555 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c struct mlx5_flow_destination dst = {}; dst 558 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c dst.type = MLX5_FLOW_DESTINATION_TYPE_TIR; dst 559 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c dst.tir_num = priv->direct_tir[rxq].tirn; dst 560 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c err = mlx5_modify_rule_destination(rule, &dst, NULL); dst 620 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c tuple->dst_ipv4 = fk->addrs.v4addrs.dst; dst 624 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c memcpy(&tuple->dst_ipv6, &fk->addrs.v6addrs.dst, dst 628 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c tuple->dst_port = fk->ports.dst; dst 641 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c if (tuple->src_port != fk->ports.src || tuple->dst_port != fk->ports.dst) dst 647 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c tuple->dst_ipv4 == fk->addrs.v4addrs.dst; dst 651 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c !memcmp(&tuple->dst_ipv6, &fk->addrs.v6addrs.dst, dst 662 drivers/net/ethernet/mellanox/mlx5/core/en_arfs.c head = arfs_hash_bucket(arfs_t, fk->ports.src, fk->ports.dst); dst 403 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c struct mlx5_flow_destination *dst = NULL; dst 427 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c dst = kzalloc(sizeof(*dst), GFP_KERNEL); dst 428 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c if (!dst) { dst 433 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c dst->type = MLX5_FLOW_DESTINATION_TYPE_TIR; dst 434 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c dst->tir_num = tir[ix].tirn; dst 440 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c rule = mlx5_add_flow_rules(ft, spec, &flow_act, dst, dst ? 1 : 0); dst 449 drivers/net/ethernet/mellanox/mlx5/core/en_fs_ethtool.c kfree(dst); dst 1694 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c ntohl(match.mask->dst)); dst 1697 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c ntohl(match.key->dst)); dst 1718 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.mask->dst, MLX5_FLD_SZ_BYTES(ipv6_layout, dst 1722 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.key->dst, MLX5_FLD_SZ_BYTES(ipv6_layout, dst 1963 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c match.mask->dst); dst 1966 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c match.key->dst); dst 1976 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c !is_zero_ether_addr(match.mask->dst)) dst 2031 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.mask->dst, sizeof(match.mask->dst)); dst 2034 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.key->dst, sizeof(match.key->dst)); dst 2036 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c if (match.mask->src || match.mask->dst) dst 2053 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.mask->dst, sizeof(match.mask->dst)); dst 2056 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c &match.key->dst, sizeof(match.key->dst)); dst 2059 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c ipv6_addr_type(&match.mask->dst) != IPV6_ADDR_ANY) dst 2108 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c tcp_dport, ntohs(match.mask->dst)); dst 2110 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c tcp_dport, ntohs(match.key->dst)); dst 2120 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c udp_dport, ntohs(match.mask->dst)); dst 2122 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c udp_dport, ntohs(match.key->dst)); dst 2132 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c if (match.mask->src || match.mask->dst) dst 1243 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c struct mlx5_flow_destination *dst = NULL; dst 1309 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c dst = &drop_ctr_dst; dst 1314 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c &flow_act, dst, dest_num); dst 1336 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c struct mlx5_flow_destination *dst = NULL; dst 1396 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c dst = &drop_ctr_dst; dst 1401 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c &flow_act, dst, dest_num); dst 161 drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads_termtbl.c struct mlx5_flow_act *dst) dst 167 drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads_termtbl.c dst->action |= MLX5_FLOW_CONTEXT_ACTION_VLAN_PUSH; dst 168 drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads_termtbl.c memcpy(&dst->vlan[0], &src->vlan[0], sizeof(src->vlan[0])); dst 175 drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads_termtbl.c dst->action |= MLX5_FLOW_CONTEXT_ACTION_VLAN_PUSH_2; dst 176 drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads_termtbl.c memcpy(&dst->vlan[1], &src->vlan[1], sizeof(src->vlan[1])); dst 369 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c struct mlx5_flow_rule *dst; dst 376 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c list_for_each_entry(dst, &fte->node.children, node.list) { dst 377 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c if (dst->dest_attr.type == MLX5_FLOW_DESTINATION_TYPE_COUNTER) dst 379 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c if (dst->dest_attr.type == MLX5_FLOW_DESTINATION_TYPE_VPORT && dst 380 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c dst->dest_attr.vport.flags & MLX5_FLOW_DEST_VPORT_REFORMAT_ID) dst 407 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c struct mlx5_flow_rule *dst; dst 487 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c list_for_each_entry(dst, &fte->node.children, node.list) { dst 488 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c unsigned int id, type = dst->dest_attr.type; dst 495 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c id = dst->dest_attr.ft_num; dst 499 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c id = dst->dest_attr.ft->id; dst 502 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c id = dst->dest_attr.vport.num; dst 505 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c !!(dst->dest_attr.vport.flags & dst 509 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c dst->dest_attr.vport.vhca_id); dst 511 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c dst->dest_attr.vport.pkt_reformat) { dst 514 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c !!(dst->dest_attr.vport.flags & dst 518 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c dst->dest_attr.vport.pkt_reformat->id); dst 522 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c id = dst->dest_attr.tir_num; dst 542 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c list_for_each_entry(dst, &fte->node.children, node.list) { dst 543 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c if (dst->dest_attr.type != dst 548 drivers/net/ethernet/mellanox/mlx5/core/fs_cmd.c dst->dest_attr.counter_id); dst 1228 drivers/net/ethernet/mellanox/mlx5/core/fs_core.c static int dst = BIT(MLX5_SET_FTE_MODIFY_ENABLE_MASK_DESTINATION_LIST); dst 1264 drivers/net/ethernet/mellanox/mlx5/core/fs_core.c *modify_mask |= type ? count : dst; dst 336 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c static void dr_ste_replace(struct mlx5dr_ste *dst, struct mlx5dr_ste *src) dst 338 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c memcpy(dst->hw_ste, src->hw_ste, DR_STE_SIZE_REDUCED); dst 339 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c dst->next_htbl = src->next_htbl; dst 340 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c if (dst->next_htbl) dst 341 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c dst->next_htbl->pointing_ste = dst; dst 343 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c dst->refcount = src->refcount; dst 345 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c INIT_LIST_HEAD(&dst->rule_list); dst 346 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c list_splice_tail_init(&src->rule_list, &dst->rule_list); dst 510 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c bool mlx5dr_ste_equal_tag(void *src, void *dst) dst 513 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c struct dr_hw_ste_format *d_hw_ste = (struct dr_hw_ste_format *)dst; dst 265 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h bool mlx5dr_ste_equal_tag(void *src, void *dst); dst 177 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c struct mlx5_flow_rule *dst) dst 179 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c struct mlx5_flow_destination *dest_attr = &dst->dest_attr; dst 188 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c struct mlx5_flow_rule *dst) dst 190 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c struct mlx5_flow_table *dest_ft = dst->dest_attr.ft; dst 224 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c struct mlx5_flow_rule *dst; dst 353 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c list_for_each_entry(dst, &fte->node.children, node.list) { dst 354 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c enum mlx5_flow_destination_type type = dst->dest_attr.type; dst 366 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c tmp_action = create_ft_action(dev, dst); dst 375 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c tmp_action = create_vport_action(domain, dst); dst 391 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c list_for_each_entry(dst, &fte->node.children, node.list) { dst 394 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c if (dst->dest_attr.type != dst 403 drivers/net/ethernet/mellanox/mlx5/core/steering/fs_dr.c id = dst->dest_attr.counter_id; dst 178 drivers/net/ethernet/mellanox/mlxsw/item.h static inline void __mlxsw_item_memcpy_from(const char *buf, char *dst, dst 184 drivers/net/ethernet/mellanox/mlxsw/item.h memcpy(dst, &buf[offset], item->size.bytes); dst 437 drivers/net/ethernet/mellanox/mlxsw/item.h mlxsw_##_type##_##_cname##_##_iname##_memcpy_from(const char *buf, char *dst) \ dst 439 drivers/net/ethernet/mellanox/mlxsw/item.h __mlxsw_item_memcpy_from(buf, dst, \ dst 466 drivers/net/ethernet/mellanox/mlxsw/item.h char *dst) \ dst 468 drivers/net/ethernet/mellanox/mlxsw/item.h __mlxsw_item_memcpy_from(buf, dst, \ dst 196 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c (char *) &match.key->dst, dst 197 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c (char *) &match.mask->dst, 4); dst 220 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.key->dst.s6_addr[0x0], dst 221 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.mask->dst.s6_addr[0x0], 4); dst 223 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.key->dst.s6_addr[0x4], dst 224 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.mask->dst.s6_addr[0x4], 4); dst 226 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.key->dst.s6_addr[0x8], dst 227 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.mask->dst.s6_addr[0x8], 4); dst 229 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.key->dst.s6_addr[0xC], dst 230 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c &match.mask->dst.s6_addr[0xC], 4); dst 252 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c ntohs(match.key->dst), dst 253 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c ntohs(match.mask->dst)); dst 394 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c match.key->dst, dst 395 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c match.mask->dst, 2); dst 398 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c match.key->dst + 2, dst 399 drivers/net/ethernet/mellanox/mlxsw/spectrum_flower.c match.mask->dst + 2, 4); dst 4339 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c union mlxsw_sp_l3addr dip = { .addr4 = htonl(fen_info->dst) }; dst 4447 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c fib_node = mlxsw_sp_fib_node_lookup(fib, &fen_info->dst, dst 4448 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c sizeof(fen_info->dst), dst 4872 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c &fen_info->dst, sizeof(fen_info->dst), dst 317 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c dev = rt->dst.dev; dst 406 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c struct dst_entry *dst; dst 416 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c dst = ip6_route_output(t->net, NULL, &fl6); dst 417 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c if (!dst || dst->error) dst 420 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c rt6 = container_of(dst, struct rt6_info, dst); dst 422 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c dev = dst->dev; dst 427 drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c dst_release(dst); dst 858 drivers/net/ethernet/mscc/ocelot.c u32 val, dst, macl, mach; dst 880 drivers/net/ethernet/mscc/ocelot.c dst = (val & ANA_TABLES_MACACCESS_DEST_IDX_M) >> 3; dst 881 drivers/net/ethernet/mscc/ocelot.c if (dst != port->chip_port) dst 89 drivers/net/ethernet/mscc/ocelot_flower.c match.key->dst); dst 93 drivers/net/ethernet/mscc/ocelot_flower.c match.mask->dst); dst 132 drivers/net/ethernet/mscc/ocelot_flower.c memcpy(tmp, &match.key->dst, 4); dst 135 drivers/net/ethernet/mscc/ocelot_flower.c memcpy(tmp, &match.mask->dst, 4); dst 149 drivers/net/ethernet/mscc/ocelot_flower.c ocelot_rule->frame.ipv4.dport.value = ntohs(match.key->dst); dst 150 drivers/net/ethernet/mscc/ocelot_flower.c ocelot_rule->frame.ipv4.dport.mask = ntohs(match.mask->dst); dst 1161 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_submit_8rx(struct mcp_kreq_ether_recv __iomem * dst, dst 1168 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_pio_copy(dst, src, 4 * sizeof(*src)); dst 1170 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_pio_copy(dst + 4, src + 4, 4 * sizeof(*src)); dst 1173 drivers/net/ethernet/myricom/myri10ge/myri10ge.c put_be32(low, &dst->addr_low); dst 2547 drivers/net/ethernet/myricom/myri10ge/myri10ge.c struct mcp_kreq_ether_send __iomem *dstp, *dst; dst 2556 drivers/net/ethernet/myricom/myri10ge/myri10ge.c dst = dstp = &tx->lanai[idx]; dst 2580 drivers/net/ethernet/myricom/myri10ge/myri10ge.c put_be32(*((__be32 *) src + 3), (__be32 __iomem *) dst + 3); dst 294 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(struct nfp_prog *nfp_prog, swreg dst, u16 imm, dst 300 drivers/net/ethernet/netronome/nfp/bpf/jit.c if (swreg_type(dst) == NN_REG_IMM) { dst 305 drivers/net/ethernet/netronome/nfp/bpf/jit.c err = swreg_to_unrestricted(dst, dst, reg_imm(imm & 0xff), ®); dst 313 drivers/net/ethernet/netronome/nfp/bpf/jit.c swreg_type(dst) == NN_REG_NONE ? reg.dst : reg.areg, dst 319 drivers/net/ethernet/netronome/nfp/bpf/jit.c __emit_shf(struct nfp_prog *nfp_prog, u16 dst, enum alu_dst_ab dst_ab, dst 351 drivers/net/ethernet/netronome/nfp/bpf/jit.c FIELD_PREP(OP_SHF_DST, dst) | dst 363 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(struct nfp_prog *nfp_prog, swreg dst, dst 369 drivers/net/ethernet/netronome/nfp/bpf/jit.c err = swreg_to_restricted(dst, lreg, rreg, ®, true); dst 375 drivers/net/ethernet/netronome/nfp/bpf/jit.c __emit_shf(nfp_prog, reg.dst, reg.dst_ab, sc, shift, dst 381 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(struct nfp_prog *nfp_prog, swreg dst, dst 390 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, dst, lreg, op, rreg, sc, 0); dst 394 drivers/net/ethernet/netronome/nfp/bpf/jit.c __emit_alu(struct nfp_prog *nfp_prog, u16 dst, enum alu_dst_ab dst_ab, dst 403 drivers/net/ethernet/netronome/nfp/bpf/jit.c FIELD_PREP(OP_ALU_DST, dst) | dst 415 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(struct nfp_prog *nfp_prog, swreg dst, dst 421 drivers/net/ethernet/netronome/nfp/bpf/jit.c err = swreg_to_unrestricted(dst, lreg, rreg, ®); dst 427 drivers/net/ethernet/netronome/nfp/bpf/jit.c __emit_alu(nfp_prog, reg.dst, reg.dst_ab, dst 471 drivers/net/ethernet/netronome/nfp/bpf/jit.c areg = reg.dst; dst 511 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field_any(struct nfp_prog *nfp_prog, swreg dst, u8 bmask, swreg src, dst 518 drivers/net/ethernet/netronome/nfp/bpf/jit.c err = swreg_to_restricted(dst, dst, src, ®, true); dst 530 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field(struct nfp_prog *nfp_prog, swreg dst, u8 bmask, swreg src, dst 533 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field_any(nfp_prog, dst, bmask, src, sc, shift, false); dst 608 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void wrp_immed(struct nfp_prog *nfp_prog, swreg dst, u32 imm) dst 614 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, val, IMMED_WIDTH_ALL, false, shift); dst 616 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, val, IMMED_WIDTH_ALL, true, shift); dst 618 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm & 0xffff, IMMED_WIDTH_ALL, dst 620 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm >> 16, IMMED_WIDTH_WORD, dst 626 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst) dst 629 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), 0); dst 633 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed_relo(struct nfp_prog *nfp_prog, swreg dst, u32 imm, dst 641 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_immed(nfp_prog, dst, imm, IMMED_WIDTH_ALL, false, IMMED_SHIFT_0B); dst 679 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void wrp_mov(struct nfp_prog *nfp_prog, swreg dst, swreg src) dst 681 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, dst, reg_none(), ALU_OP_NONE, src); dst 684 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void wrp_reg_mov(struct nfp_prog *nfp_prog, u16 dst, u16 src) dst 686 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, reg_both(dst), reg_b(src)); dst 693 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_subpart(struct nfp_prog *nfp_prog, swreg dst, swreg src, u8 field_len, dst 699 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field_any(nfp_prog, dst, mask, src, sc, offset * 8, true); dst 706 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_or_subpart(struct nfp_prog *nfp_prog, swreg dst, swreg src, dst 712 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field(nfp_prog, dst, mask, src, sc, 32 - offset * 8); dst 1020 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_lmem_load(struct nfp_prog *nfp_prog, u8 dst, u8 dst_byte, s32 off, dst 1038 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, reg_both(dst), dst 1080 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_ld_field_any(nfp_prog, reg_both(dst), mask, reg, sc, shf, new_gpr); dst 1265 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(struct nfp_prog *nfp_prog, u8 dst, enum alu_op alu_op, u32 imm) dst 1271 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), 0); dst 1277 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), ~0U); dst 1283 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), reg_none(), dst 1284 drivers/net/ethernet/netronome/nfp/bpf/jit.c ALU_OP_NOT, reg_b(dst)); dst 1290 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), reg_a(dst), alu_op, tmp_reg); dst 1315 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = meta->insn.dst_reg * 2, src = meta->insn.src_reg * 2; dst 1317 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), reg_a(dst), alu_op, reg_b(src)); dst 1318 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst + 1), dst 1319 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_a(dst + 1), alu_op, reg_b(src + 1)); dst 1329 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 1331 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_alu_imm(nfp_prog, dst, alu_op, insn->imm); dst 1332 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 1341 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = meta->insn.dst_reg * 2, src = meta->insn.src_reg * 2; dst 1343 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), reg_a(dst), alu_op, reg_b(src)); dst 1344 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 1350 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_test_reg_one(struct nfp_prog *nfp_prog, u8 dst, enum alu_op alu_op, u8 src, dst 1353 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(dst), alu_op, reg_b(src)); dst 1532 drivers/net/ethernet/netronome/nfp/bpf/jit.c static int wrp_div_imm(struct nfp_prog *nfp_prog, u8 dst, u64 imm) dst 1534 drivers/net/ethernet/netronome/nfp/bpf/jit.c swreg dst_both = reg_both(dst), dst_a = reg_a(dst), dst_b = reg_a(dst); dst 1839 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 1848 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), stack_reg(nfp_prog), dst 1850 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), 0); dst 1852 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_mov(nfp_prog, dst, src); dst 1853 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_mov(nfp_prog, dst + 1, src + 1); dst 1996 drivers/net/ethernet/netronome/nfp/bpf/jit.c static int __shl_imm64(struct nfp_prog *nfp_prog, u8 dst, u8 shift_amt) dst 2002 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_a(dst + 1), dst 2003 drivers/net/ethernet/netronome/nfp/bpf/jit.c SHF_OP_NONE, reg_b(dst), SHF_SC_R_DSHF, dst 2005 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2006 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_L_SHF, shift_amt); dst 2008 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_mov(nfp_prog, dst + 1, dst); dst 2009 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), 0); dst 2011 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_NONE, dst 2012 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_L_SHF, shift_amt - 32); dst 2013 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), 0); dst 2022 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2024 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm64(nfp_prog, dst, insn->imm); dst 2027 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shl_reg64_lt32_high(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2032 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst + 1), reg_a(dst + 1), SHF_OP_NONE, dst 2033 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_DSHF); dst 2037 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shl_reg64_lt32_low(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2040 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2041 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_L_SHF); dst 2044 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shl_reg64_lt32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2046 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32_high(nfp_prog, dst, src); dst 2047 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32_low(nfp_prog, dst, src); dst 2050 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shl_reg64_ge32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2053 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_NONE, dst 2054 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_L_SHF); dst 2055 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), 0); dst 2062 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2064 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2068 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm64(nfp_prog, dst, umin); dst 2072 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32(nfp_prog, dst, src); dst 2074 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_ge32(nfp_prog, dst, src); dst 2084 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32_high(nfp_prog, dst, src); dst 2088 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32_low(nfp_prog, dst, src); dst 2092 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_ge32(nfp_prog, dst, src); dst 2111 drivers/net/ethernet/netronome/nfp/bpf/jit.c static int __shr_imm64(struct nfp_prog *nfp_prog, u8 dst, u8 shift_amt) dst 2117 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_a(dst + 1), SHF_OP_NONE, dst 2118 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_DSHF, shift_amt); dst 2119 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_NONE, dst 2120 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, shift_amt); dst 2122 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_mov(nfp_prog, dst, dst + 1); dst 2123 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), 0); dst 2125 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2126 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, shift_amt - 32); dst 2127 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), 0); dst 2136 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2138 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm64(nfp_prog, dst, insn->imm); dst 2142 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shr_reg64_lt32_high(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2145 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_NONE, dst 2146 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF); dst 2149 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shr_reg64_lt32_low(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2152 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_a(dst + 1), SHF_OP_NONE, dst 2153 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_DSHF); dst 2156 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shr_reg64_lt32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2158 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_lt32_low(nfp_prog, dst, src); dst 2159 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_lt32_high(nfp_prog, dst, src); dst 2162 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void shr_reg64_ge32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2165 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2166 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF); dst 2167 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), 0); dst 2174 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2176 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2180 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm64(nfp_prog, dst, umin); dst 2184 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_lt32(nfp_prog, dst, src); dst 2186 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_ge32(nfp_prog, dst, src); dst 2195 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_lt32_low(nfp_prog, dst, src); dst 2199 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_lt32_high(nfp_prog, dst, src); dst 2203 drivers/net/ethernet/netronome/nfp/bpf/jit.c shr_reg64_ge32(nfp_prog, dst, src); dst 2215 drivers/net/ethernet/netronome/nfp/bpf/jit.c static int __ashr_imm64(struct nfp_prog *nfp_prog, u8 dst, u8 shift_amt) dst 2221 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_a(dst + 1), SHF_OP_NONE, dst 2222 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_DSHF, shift_amt); dst 2224 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(dst + 1), ALU_OP_OR, dst 2226 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_ASHR, dst 2227 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, shift_amt); dst 2230 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_reg_mov(nfp_prog, dst, dst + 1); dst 2231 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_ASHR, dst 2232 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, 31); dst 2234 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(dst + 1), ALU_OP_OR, dst 2236 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_ASHR, dst 2237 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, shift_amt - 32); dst 2238 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_ASHR, dst 2239 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, 31); dst 2248 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2250 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm64(nfp_prog, dst, insn->imm); dst 2253 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void ashr_reg64_lt32_high(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2258 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(src), ALU_OP_OR, reg_b(dst + 1)); dst 2259 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_ASHR, dst 2260 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF); dst 2263 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void ashr_reg64_lt32_low(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2268 drivers/net/ethernet/netronome/nfp/bpf/jit.c return shr_reg64_lt32_low(nfp_prog, dst, src); dst 2271 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void ashr_reg64_lt32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2273 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_lt32_low(nfp_prog, dst, src); dst 2274 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_lt32_high(nfp_prog, dst, src); dst 2277 drivers/net/ethernet/netronome/nfp/bpf/jit.c static void ashr_reg64_ge32(struct nfp_prog *nfp_prog, u8 dst, u8 src) dst 2279 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(src), ALU_OP_OR, reg_b(dst + 1)); dst 2280 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_none(), SHF_OP_ASHR, dst 2281 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF); dst 2282 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst + 1), reg_none(), SHF_OP_ASHR, dst 2283 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst + 1), SHF_SC_R_SHF, 31); dst 2291 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2293 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2297 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm64(nfp_prog, dst, umin); dst 2301 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_lt32(nfp_prog, dst, src); dst 2303 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_ge32(nfp_prog, dst, src); dst 2309 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_lt32_low(nfp_prog, dst, src); dst 2313 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_lt32_high(nfp_prog, dst, src); dst 2317 drivers/net/ethernet/netronome/nfp/bpf/jit.c ashr_reg64_ge32(nfp_prog, dst, src); dst 2418 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = meta->insn.dst_reg * 2; dst 2420 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_both(dst), reg_imm(0), ALU_OP_SUB, reg_b(dst)); dst 2421 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2427 drivers/net/ethernet/netronome/nfp/bpf/jit.c __ashr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, dst 2432 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(dst), ALU_OP_OR, dst 2434 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_ASHR, dst 2435 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_SHF, shift_amt); dst 2437 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2446 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2448 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2452 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm(nfp_prog, meta, dst, umin); dst 2458 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, reg_none(), reg_a(src), ALU_OP_OR, reg_b(dst)); dst 2459 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_none(), SHF_OP_ASHR, dst 2460 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_SHF); dst 2461 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2469 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2471 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __ashr_imm(nfp_prog, meta, dst, insn->imm); dst 2475 drivers/net/ethernet/netronome/nfp/bpf/jit.c __shr_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, dst 2479 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2480 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_SHF, shift_amt); dst 2481 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2488 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2490 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm(nfp_prog, meta, dst, insn->imm); dst 2497 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2499 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2503 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shr_imm(nfp_prog, meta, dst, umin); dst 2507 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf_indir(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2508 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_R_SHF); dst 2509 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2514 drivers/net/ethernet/netronome/nfp/bpf/jit.c __shl_imm(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, u8 dst, dst 2518 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_shf(nfp_prog, reg_both(dst), reg_none(), SHF_OP_NONE, dst 2519 drivers/net/ethernet/netronome/nfp/bpf/jit.c reg_b(dst), SHF_SC_L_SHF, shift_amt); dst 2520 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2527 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst = insn->dst_reg * 2; dst 2529 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm(nfp_prog, meta, dst, insn->imm); dst 2536 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst, src; dst 2538 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = insn->dst_reg * 2; dst 2542 drivers/net/ethernet/netronome/nfp/bpf/jit.c return __shl_imm(nfp_prog, meta, dst, umin); dst 2545 drivers/net/ethernet/netronome/nfp/bpf/jit.c shl_reg64_lt32_low(nfp_prog, dst, src); dst 2546 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_zext(nfp_prog, meta, dst); dst 2583 drivers/net/ethernet/netronome/nfp/bpf/jit.c u8 dst; dst 2585 drivers/net/ethernet/netronome/nfp/bpf/jit.c dst = prev->insn.dst_reg * 2; dst 2589 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst), imm_lo); dst 2593 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, reg_both(dst + 1), reg_a(dst)); dst 2595 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_immed(nfp_prog, reg_both(dst + 1), imm_hi); dst 2651 drivers/net/ethernet/netronome/nfp/bpf/jit.c swreg dst = reg_both(meta->insn.dst_reg * 2); dst 2657 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, dst, plen_reg(nfp_prog)); dst 2662 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, dst, pptr_reg(nfp_prog)); dst 2667 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, dst, dst 2682 drivers/net/ethernet/netronome/nfp/bpf/jit.c swreg dst = reg_both(meta->insn.dst_reg * 2); dst 2688 drivers/net/ethernet/netronome/nfp/bpf/jit.c wrp_mov(nfp_prog, dst, pptr_reg(nfp_prog)); dst 2693 drivers/net/ethernet/netronome/nfp/bpf/jit.c emit_alu(nfp_prog, dst, dst 434 drivers/net/ethernet/netronome/nfp/bpf/offload.c nfp_bpf_perf_event_copy(void *dst, const void *src, dst 437 drivers/net/ethernet/netronome/nfp/bpf/offload.c memcpy(dst, src + off, len); dst 443 drivers/net/ethernet/netronome/nfp/flower/action.c flow.daddr = ip_tun->key.u.ipv4.dst; dst 448 drivers/net/ethernet/netronome/nfp/flower/action.c set_tun->ttl = ip4_dst_hoplimit(&rt->dst); dst 470 drivers/net/ethernet/netronome/nfp/flower/action.c pre_tun->ipv4_dst = ip_tun->key.u.ipv4.dst; dst 87 drivers/net/ethernet/netronome/nfp/flower/cmsg.c msg->portnum = cpu_to_be32(repr->dst->u.port_info.port_id); dst 113 drivers/net/ethernet/netronome/nfp/flower/cmsg.c msg->portnum = cpu_to_be32(repr->dst->u.port_info.port_id); dst 387 drivers/net/ethernet/netronome/nfp/flower/cmsg.h __be32 dst; dst 858 drivers/net/ethernet/netronome/nfp/flower/main.c app_priv->mtu_conf.portnum = repr->dst->u.port_info.port_id; dst 93 drivers/net/ethernet/netronome/nfp/flower/match.c ether_addr_copy(ext->mac_dst, &match.key->dst[0]); dst 95 drivers/net/ethernet/netronome/nfp/flower/match.c ether_addr_copy(msk->mac_dst, &match.mask->dst[0]); dst 145 drivers/net/ethernet/netronome/nfp/flower/match.c ext->port_dst = match.key->dst; dst 147 drivers/net/ethernet/netronome/nfp/flower/match.c msk->port_dst = match.mask->dst; dst 239 drivers/net/ethernet/netronome/nfp/flower/match.c ext->ipv4_dst = match.key->dst; dst 241 drivers/net/ethernet/netronome/nfp/flower/match.c msk->ipv4_dst = match.mask->dst; dst 262 drivers/net/ethernet/netronome/nfp/flower/match.c ext->ipv6_dst = match.key->dst; dst 264 drivers/net/ethernet/netronome/nfp/flower/match.c msk->ipv6_dst = match.mask->dst; dst 295 drivers/net/ethernet/netronome/nfp/flower/match.c ext->dst = match.key->dst; dst 297 drivers/net/ethernet/netronome/nfp/flower/match.c msk->dst = match.mask->dst; dst 461 drivers/net/ethernet/netronome/nfp/flower/match.c tun_dst = ((struct nfp_flower_ipv4_gre_tun *)ext)->ipv4.dst; dst 477 drivers/net/ethernet/netronome/nfp/flower/match.c tun_dst = ((struct nfp_flower_ipv4_udp_tun *)ext)->ipv4.dst; dst 175 drivers/net/ethernet/netronome/nfp/flower/offload.c switch (enc_ports->dst) { dst 287 drivers/net/ethernet/netronome/nfp/flower/offload.c if (ipv4_addrs.mask->dst != cpu_to_be32(~0)) { dst 316 drivers/net/ethernet/netronome/nfp/flower/offload.c if (enc_ports.mask->dst != cpu_to_be16(~0)) { dst 410 drivers/net/ethernet/netronome/nfp/flower/tunnel_conf.c n = dst_neigh_lookup(&rt->dst, &flow.daddr); dst 168 drivers/net/ethernet/netronome/nfp/nfp_asm.c int swreg_to_unrestricted(swreg dst, swreg lreg, swreg rreg, dst 174 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_IMM) dst 177 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_GPR_B) dst 179 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_GPR_BOTH) dst 181 drivers/net/ethernet/netronome/nfp/nfp_asm.c reg->dst = nfp_swreg_to_unreg(dst, true); dst 198 drivers/net/ethernet/netronome/nfp/nfp_asm.c reg->dst_lmextn = swreg_lmextn(dst); dst 249 drivers/net/ethernet/netronome/nfp/nfp_asm.c int swreg_to_restricted(swreg dst, swreg lreg, swreg rreg, dst 255 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_IMM) dst 258 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_GPR_B) dst 260 drivers/net/ethernet/netronome/nfp/nfp_asm.c if (swreg_type(dst) == NN_REG_GPR_BOTH) dst 262 drivers/net/ethernet/netronome/nfp/nfp_asm.c reg->dst = nfp_swreg_to_rereg(dst, true, false, NULL); dst 279 drivers/net/ethernet/netronome/nfp/nfp_asm.c reg->dst_lmextn = swreg_lmextn(dst); dst 371 drivers/net/ethernet/netronome/nfp/nfp_asm.h u16 dst; dst 381 drivers/net/ethernet/netronome/nfp/nfp_asm.h u8 dst; dst 390 drivers/net/ethernet/netronome/nfp/nfp_asm.h int swreg_to_unrestricted(swreg dst, swreg lreg, swreg rreg, dst 392 drivers/net/ethernet/netronome/nfp/nfp_asm.h int swreg_to_restricted(swreg dst, swreg lreg, swreg rreg, dst 191 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c dst_hold((struct dst_entry *)repr->dst); dst 192 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c skb_dst_set(skb, (struct dst_entry *)repr->dst); dst 193 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c skb->dev = repr->dst->u.port_info.lower_dev; dst 242 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c lower_dev = repr->dst->u.port_info.lower_dev; dst 285 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c if (repr->dst->u.port_info.lower_dev != lower) dst 298 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c dst_release((struct dst_entry *)repr->dst); dst 312 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c repr->dst = metadata_dst_alloc(0, METADATA_HW_PORT_MUX, GFP_KERNEL); dst 313 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c if (!repr->dst) dst 315 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c repr->dst->u.port_info.port_id = cmsg_port_id; dst 316 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c repr->dst->u.port_info.lower_dev = pf_netdev; dst 390 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c dst_release((struct dst_entry *)repr->dst); dst 53 drivers/net/ethernet/netronome/nfp/nfp_net_repr.h struct metadata_dst *dst; dst 87 drivers/net/ethernet/netronome/nfp/nfp_net_repr.h return priv->dst->u.port_info.port_id; dst 1221 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c u32 *dst = buff; dst 1225 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c *(dst++) = readl(priv->data + i); dst 116 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c static void nfp_eth_copy_mac_reverse(u8 *dst, const u8 *src) dst 121 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst[ETH_ALEN - i - 1] = src[i]; dst 126 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c unsigned int index, struct nfp_eth_table_port *dst) dst 135 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->eth_index = FIELD_GET(NSP_ETH_PORT_INDEX, port); dst 136 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->index = index; dst 137 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->nbi = index / NSP_ETH_NBI_PORT_COUNT; dst 138 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->base = index % NSP_ETH_NBI_PORT_COUNT; dst 139 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->lanes = FIELD_GET(NSP_ETH_PORT_LANES, port); dst 141 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->enabled = FIELD_GET(NSP_ETH_STATE_ENABLED, state); dst 142 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->tx_enabled = FIELD_GET(NSP_ETH_STATE_TX_ENABLED, state); dst 143 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->rx_enabled = FIELD_GET(NSP_ETH_STATE_RX_ENABLED, state); dst 146 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->speed = dst->lanes * rate; dst 148 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->interface = FIELD_GET(NSP_ETH_STATE_INTERFACE, state); dst 149 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->media = FIELD_GET(NSP_ETH_STATE_MEDIA, state); dst 151 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c nfp_eth_copy_mac_reverse(dst->mac_addr, src->mac_addr); dst 153 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->label_port = FIELD_GET(NSP_ETH_PORT_PHYLABEL, port); dst 154 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->label_subport = FIELD_GET(NSP_ETH_PORT_LABEL, port); dst 159 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->override_changed = FIELD_GET(NSP_ETH_STATE_OVRD_CHNG, state); dst 160 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->aneg = FIELD_GET(NSP_ETH_STATE_ANEG, state); dst 166 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->fec_modes_supported |= fec << NFP_FEC_BASER_BIT; dst 168 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->fec_modes_supported |= fec << NFP_FEC_REED_SOLOMON_BIT; dst 169 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c if (dst->fec_modes_supported) dst 170 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->fec_modes_supported |= NFP_FEC_AUTO | NFP_FEC_DISABLED; dst 172 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c dst->fec = 1 << FIELD_GET(NSP_ETH_STATE_FEC, state); dst 1272 drivers/net/ethernet/qlogic/qed/qed_iscsi.c ether_addr_copy(con->remote_mac, conn_info->dst.mac); dst 1274 drivers/net/ethernet/qlogic/qed/qed_iscsi.c memcpy(con->remote_ip, conn_info->dst.ip, sizeof(con->remote_ip)); dst 1276 drivers/net/ethernet/qlogic/qed/qed_iscsi.c con->remote_port = conn_info->dst.port; dst 1789 drivers/net/ethernet/qlogic/qede/qede_filter.c (match.key->dst && match.mask->dst != U16_MAX)) { dst 1795 drivers/net/ethernet/qlogic/qede/qede_filter.c t->dst_port = match.key->dst; dst 1816 drivers/net/ethernet/qlogic/qede/qede_filter.c (memcmp(&match.key->dst, &zero_addr, sizeof(addr)) && dst 1817 drivers/net/ethernet/qlogic/qede/qede_filter.c memcmp(&match.mask->dst, &addr, sizeof(addr)))) { dst 1824 drivers/net/ethernet/qlogic/qede/qede_filter.c memcpy(&t->dst_ipv6, &match.key->dst, sizeof(addr)); dst 1842 drivers/net/ethernet/qlogic/qede/qede_filter.c (match.key->dst && match.mask->dst != U32_MAX)) { dst 1848 drivers/net/ethernet/qlogic/qede/qede_filter.c t->dst_ipv4 = match.key->dst; dst 158 drivers/net/ethernet/qualcomm/qca_spi.c qcaspi_read_burst(struct qcaspi *qca, u8 *dst, u32 len) dst 171 drivers/net/ethernet/qualcomm/qca_spi.c transfer[1].rx_buf = dst; dst 187 drivers/net/ethernet/qualcomm/qca_spi.c qcaspi_read_legacy(struct qcaspi *qca, u8 *dst, u32 len) dst 196 drivers/net/ethernet/qualcomm/qca_spi.c transfer.rx_buf = dst; dst 971 drivers/net/ethernet/rocker/rocker_ofdpa.c __be16 eth_type, __be32 dst, dst 986 drivers/net/ethernet/rocker/rocker_ofdpa.c entry->key.ucast_routing.dst4 = dst; dst 2267 drivers/net/ethernet/rocker/rocker_ofdpa.c static int ofdpa_port_fib_ipv4(struct ofdpa_port *ofdpa_port, __be32 dst, dst 2302 drivers/net/ethernet/rocker/rocker_ofdpa.c err = ofdpa_flow_tbl_ucast4_routing(ofdpa_port, eth_type, dst, dst 2307 drivers/net/ethernet/rocker/rocker_ofdpa.c err, &dst); dst 2746 drivers/net/ethernet/rocker/rocker_ofdpa.c err = ofdpa_port_fib_ipv4(ofdpa_port, htonl(fen_info->dst), dst 2769 drivers/net/ethernet/rocker/rocker_ofdpa.c return ofdpa_port_fib_ipv4(ofdpa_port, htonl(fen_info->dst), dst 862 drivers/net/ethernet/sfc/falcon/rx.c spec.loc_host[0] = fk.addrs.v4addrs.dst; dst 865 drivers/net/ethernet/sfc/falcon/rx.c memcpy(spec.loc_host, &fk.addrs.v6addrs.dst, sizeof(struct in6_addr)); dst 869 drivers/net/ethernet/sfc/falcon/rx.c spec.loc_port = fk.ports.dst; dst 940 drivers/net/ethernet/sfc/rx.c req->spec.loc_host[0] = fk.addrs.v4addrs.dst; dst 944 drivers/net/ethernet/sfc/rx.c memcpy(req->spec.loc_host, &fk.addrs.v6addrs.dst, dst 949 drivers/net/ethernet/sfc/rx.c req->spec.loc_port = fk.ports.dst; dst 38 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c unsigned char *dst; dst 113 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c if (attr->dst) dst 114 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c ether_addr_copy(ehdr->h_dest, attr->dst); dst 222 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c NULL, attr->src, attr->dst); dst 248 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c unsigned char *dst = tpriv->packet->dst; dst 265 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c if (dst) { dst 266 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c if (!ether_addr_equal_unaligned(ehdr->h_dest, dst)) dst 369 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 385 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 556 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = gd_addr; dst 563 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = bd_addr; dst 600 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = gd_addr; dst 607 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = bd_addr; dst 646 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = uc_addr; dst 653 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = mc_addr; dst 692 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = mc_addr; dst 699 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = uc_addr; dst 776 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 823 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 865 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c if (!ether_addr_equal_unaligned(ehdr->h_dest, tpriv->packet->dst)) dst 928 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1004 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1105 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1143 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1164 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1186 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1208 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1252 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1292 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c static int __stmmac_test_l3filt(struct stmmac_priv *priv, u32 dst, u32 src, dst 1344 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c key.dst = htonl(dst); dst 1346 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c mask.dst = dst_mask; dst 1353 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1354 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.ip_dst = dst; dst 1388 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c static int __stmmac_test_l3filt(struct stmmac_priv *priv, u32 dst, u32 src, dst 1410 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c static int __stmmac_test_l4filt(struct stmmac_priv *priv, u32 dst, u32 src, dst 1472 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c keys.key.dst = htons(dst); dst 1474 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c masks.mask.dst = dst_mask; dst 1481 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1484 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dport = dst; dst 1518 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c static int __stmmac_test_l4filt(struct stmmac_priv *priv, u32 dst, u32 src, dst 1580 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c unsigned char dst[ETH_ALEN] = {0xff, 0xff, 0xff, 0xff, 0xff, 0xff}; dst 1607 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = dst; dst 1646 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1685 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 1699 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c attr.dst = priv->dev->dev_addr; dst 435 drivers/net/ethernet/stmicro/stmmac/stmmac_tc.c hw_match = ntohl(match.key->dst) & ntohl(match.mask->dst); dst 483 drivers/net/ethernet/stmicro/stmmac/stmmac_tc.c hw_match = ntohs(match.key->dst) & ntohs(match.mask->dst); dst 1368 drivers/net/ethernet/sun/sunvnet_common.c skb_dst_set(skb, &rt->dst); dst 123 drivers/net/fddi/defza.c u64 *dst = to; dst 127 drivers/net/fddi/defza.c *dst++ = readq_u(src++); dst 130 drivers/net/fddi/defza.c dst_trail = (u32 *)dst; dst 135 drivers/net/fddi/defza.c u32 *dst = to; dst 138 drivers/net/fddi/defza.c *dst++ = readl_u(src++); dst 148 drivers/net/fddi/defza.c u64 __iomem *dst = to; dst 152 drivers/net/fddi/defza.c writeq_u(*src++, dst++); dst 155 drivers/net/fddi/defza.c dst_trail = (u32 __iomem *)dst; dst 160 drivers/net/fddi/defza.c u32 __iomem *dst = to; dst 163 drivers/net/fddi/defza.c writel_u(*src++, dst++); dst 173 drivers/net/fddi/defza.c u64 __iomem *dst = to; dst 177 drivers/net/fddi/defza.c writeq_u(readq_u(src++), dst++); dst 180 drivers/net/fddi/defza.c dst_trail = (u32 __iomem *)dst; dst 185 drivers/net/fddi/defza.c u32 __iomem *dst = to; dst 188 drivers/net/fddi/defza.c writel_u(readl_u(src++), dst++); dst 195 drivers/net/fddi/defza.c u64 __iomem *dst = to; dst 199 drivers/net/fddi/defza.c writeq_u(0, dst++); dst 201 drivers/net/fddi/defza.c dst_trail = (u32 __iomem *)dst; dst 205 drivers/net/fddi/defza.c u32 __iomem *dst = to; dst 208 drivers/net/fddi/defza.c writel_u(0, dst++); dst 136 drivers/net/geneve.c addr == node->geneve->info.key.u.ipv4.dst) dst 155 drivers/net/geneve.c ipv6_addr_equal(&addr6, &node->geneve->info.key.u.ipv6.dst)) dst 255 drivers/net/geneve.c skb_dst_set(skb, &tun_dst->dst); dst 740 drivers/net/geneve.c static int geneve_build_skb(struct dst_entry *dst, struct sk_buff *skb, dst 752 drivers/net/geneve.c min_headroom = LL_RESERVED_SPACE(dst->dev) + dst->header_len + dst 768 drivers/net/geneve.c dst_release(dst); dst 790 drivers/net/geneve.c fl4->daddr = info->key.u.ipv4.dst; dst 811 drivers/net/geneve.c if (rt->dst.dev == dev) { /* is this necessary? */ dst 817 drivers/net/geneve.c dst_cache_set_ip4(dst_cache, &rt->dst, fl4->saddr); dst 830 drivers/net/geneve.c struct dst_entry *dst = NULL; dst 840 drivers/net/geneve.c fl6->daddr = info->key.u.ipv6.dst; dst 852 drivers/net/geneve.c dst = dst_cache_get_ip6(dst_cache, &fl6->saddr); dst 853 drivers/net/geneve.c if (dst) dst 854 drivers/net/geneve.c return dst; dst 856 drivers/net/geneve.c dst = ipv6_stub->ipv6_dst_lookup_flow(geneve->net, gs6->sock->sk, fl6, dst 858 drivers/net/geneve.c if (IS_ERR(dst)) { dst 862 drivers/net/geneve.c if (dst->dev == dev) { /* is this necessary? */ dst 864 drivers/net/geneve.c dst_release(dst); dst 869 drivers/net/geneve.c dst_cache_set_ip6(dst_cache, dst, &fl6->saddr); dst 870 drivers/net/geneve.c return dst; dst 892 drivers/net/geneve.c skb_tunnel_check_pmtu(skb, &rt->dst, dst 907 drivers/net/geneve.c ttl = ttl ? : ip4_dst_hoplimit(&rt->dst); dst 925 drivers/net/geneve.c err = geneve_build_skb(&rt->dst, skb, info, xnet, sizeof(struct iphdr)); dst 944 drivers/net/geneve.c struct dst_entry *dst = NULL; dst 950 drivers/net/geneve.c dst = geneve_get_v6_dst(skb, dev, gs6, &fl6, info); dst 951 drivers/net/geneve.c if (IS_ERR(dst)) dst 952 drivers/net/geneve.c return PTR_ERR(dst); dst 954 drivers/net/geneve.c skb_tunnel_check_pmtu(skb, dst, GENEVE_IPV6_HLEN + info->options_len); dst 967 drivers/net/geneve.c ttl = ttl ? : ip6_dst_hoplimit(dst); dst 969 drivers/net/geneve.c err = geneve_build_skb(dst, skb, info, xnet, sizeof(struct ipv6hdr)); dst 973 drivers/net/geneve.c udp_tunnel6_xmit_skb(dst, gs6->sock->sk, skb, dev, dst 1050 drivers/net/geneve.c struct dst_entry *dst; dst 1054 drivers/net/geneve.c dst = geneve_get_v6_dst(skb, dev, gs6, &fl6, info); dst 1055 drivers/net/geneve.c if (IS_ERR(dst)) dst 1056 drivers/net/geneve.c return PTR_ERR(dst); dst 1058 drivers/net/geneve.c dst_release(dst); dst 1252 drivers/net/geneve.c return a->key.u.ipv4.dst == b->key.u.ipv4.dst; dst 1254 drivers/net/geneve.c return ipv6_addr_equal(&a->key.u.ipv6.dst, &b->key.u.ipv6.dst); dst 1347 drivers/net/geneve.c info->key.u.ipv4.dst = dst 1350 drivers/net/geneve.c if (ipv4_is_multicast(info->key.u.ipv4.dst)) { dst 1365 drivers/net/geneve.c info->key.u.ipv6.dst = dst 1368 drivers/net/geneve.c if (ipv6_addr_type(&info->key.u.ipv6.dst) & dst 1374 drivers/net/geneve.c if (ipv6_addr_is_multicast(&info->key.u.ipv6.dst)) { dst 1507 drivers/net/geneve.c struct flowi4 fl4 = { .daddr = info->key.u.ipv4.dst }; dst 1510 drivers/net/geneve.c if (!IS_ERR(rt) && rt->dst.dev) { dst 1511 drivers/net/geneve.c ldev_mtu = rt->dst.dev->mtu - GENEVE_IPV4_HLEN; dst 1523 drivers/net/geneve.c rt = rt6_lookup(geneve->net, &info->key.u.ipv6.dst, NULL, 0, dst 1526 drivers/net/geneve.c if (rt && rt->dst.dev) dst 1527 drivers/net/geneve.c ldev_mtu = rt->dst.dev->mtu - GENEVE_IPV6_HLEN; dst 1697 drivers/net/geneve.c info->key.u.ipv4.dst)) dst 1706 drivers/net/geneve.c &info->key.u.ipv6.dst)) dst 517 drivers/net/gtp.c if (rt->dst.dev == dev) { dst 529 drivers/net/gtp.c mtu = dst_mtu(&rt->dst) - dev->hard_header_len - dst 540 drivers/net/gtp.c mtu = dst_mtu(&rt->dst); dst 543 drivers/net/gtp.c rt->dst.ops->update_pmtu(&rt->dst, NULL, skb, mtu, false); dst 598 drivers/net/gtp.c ip4_dst_hoplimit(&pktinfo.rt->dst), dst 450 drivers/net/ieee802154/ca8210.c struct fulladdr dst; dst 1488 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.mode = dst_address_mode; dst 1490 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.pan_id[0] = LS_BYTE(dst_pan_id); dst 1491 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.pan_id[1] = MS_BYTE(dst_pan_id); dst 1493 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.address[0] = LS_BYTE( dst 1496 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.address[1] = MS_BYTE( dst 1501 drivers/net/ieee802154/ca8210.c command.pdata.data_req.dst.address, dst 439 drivers/net/ipvlan/ipvlan_core.c skb_dst_set(skb, &rt->dst); dst 459 drivers/net/ipvlan/ipvlan_core.c struct dst_entry *dst; dst 471 drivers/net/ipvlan/ipvlan_core.c dst = ip6_route_output(net, NULL, &fl6); dst 472 drivers/net/ipvlan/ipvlan_core.c if (dst->error) { dst 473 drivers/net/ipvlan/ipvlan_core.c ret = dst->error; dst 474 drivers/net/ipvlan/ipvlan_core.c dst_release(dst); dst 477 drivers/net/ipvlan/ipvlan_core.c skb_dst_set(skb, dst); dst 63 drivers/net/ipvlan/ipvlan_l3s.c struct dst_entry *dst; dst 76 drivers/net/ipvlan/ipvlan_l3s.c dst = ip6_route_input_lookup(dev_net(sdev), sdev, &fl6, dst 78 drivers/net/ipvlan/ipvlan_l3s.c skb_dst_set(skb, dst); dst 2032 drivers/net/phy/phy_device.c static void phy_copy_pause_bits(unsigned long *dst, unsigned long *src) dst 2034 drivers/net/phy/phy_device.c linkmode_mod_bit(ETHTOOL_LINK_MODE_Asym_Pause_BIT, dst, dst 2036 drivers/net/phy/phy_device.c linkmode_mod_bit(ETHTOOL_LINK_MODE_Pause_BIT, dst, dst 1093 drivers/net/phy/phylink.c static void phylink_merge_link_mode(unsigned long *dst, const unsigned long *b) dst 1100 drivers/net/phy/phylink.c linkmode_and(dst, dst, mask); dst 1101 drivers/net/phy/phylink.c linkmode_or(dst, dst, b); dst 162 drivers/net/ppp/pptp.c tdev = rt->dst.dev; dst 227 drivers/net/ppp/pptp.c if (ip_dont_fragment(sk, &rt->dst)) dst 235 drivers/net/ppp/pptp.c iph->ttl = ip4_dst_hoplimit(&rt->dst); dst 239 drivers/net/ppp/pptp.c skb_dst_set(skb, &rt->dst); dst 452 drivers/net/ppp/pptp.c sk_setup_caps(sk, &rt->dst); dst 454 drivers/net/ppp/pptp.c po->chan.mtu = dst_mtu(&rt->dst); dst 491 drivers/net/usb/lan78xx.c u32 *dst; dst 508 drivers/net/usb/lan78xx.c dst = (u32 *)data; dst 511 drivers/net/usb/lan78xx.c dst[i] = src[i]; dst 227 drivers/net/vmxnet3/vmxnet3_drv.c u32 *dst = (u32 *)dstDesc + 2; dst 229 drivers/net/vmxnet3/vmxnet3_drv.c *dst = le32_to_cpu(*src); dst 238 drivers/net/vmxnet3/vmxnet3_drv.c u32 *dst = (u32 *)(dstDesc + 1); dst 243 drivers/net/vmxnet3/vmxnet3_drv.c dst--; dst 244 drivers/net/vmxnet3/vmxnet3_drv.c *dst = cpu_to_le32(*src); dst 254 drivers/net/vmxnet3/vmxnet3_drv.c u32 *dst = (u32 *)dstDesc; dst 256 drivers/net/vmxnet3/vmxnet3_drv.c *dst = le32_to_cpu(*src); dst 258 drivers/net/vmxnet3/vmxnet3_drv.c dst++; dst 127 drivers/net/vrf.c struct dst_entry *dst) dst 133 drivers/net/vrf.c skb_dst_set(skb, dst); dst 172 drivers/net/vrf.c struct dst_entry *dst; dst 173 drivers/net/vrf.c struct dst_entry *dst_null = &net->ipv6.ip6_null_entry->dst; dst 191 drivers/net/vrf.c dst = ip6_dst_lookup_flow(net, NULL, &fl6, NULL); dst 192 drivers/net/vrf.c if (IS_ERR(dst) || dst == dst_null) dst 201 drivers/net/vrf.c if (dst->dev == dev) dst 202 drivers/net/vrf.c return vrf_local_xmit(skb, dev, dst); dst 204 drivers/net/vrf.c skb_dst_set(skb, dst); dst 277 drivers/net/vrf.c if (rt->dst.dev == vrf_dev) dst 278 drivers/net/vrf.c return vrf_local_xmit(skb, vrf_dev, &rt->dst); dst 280 drivers/net/vrf.c skb_dst_set(skb, &rt->dst); dst 363 drivers/net/vrf.c struct dst_entry *dst = skb_dst(skb); dst 364 drivers/net/vrf.c struct net_device *dev = dst->dev; dst 375 drivers/net/vrf.c nexthop = rt6_nexthop((struct rt6_info *)dst, &ipv6_hdr(skb)->daddr); dst 376 drivers/net/vrf.c neigh = __ipv6_neigh_lookup_noref(dst->dev, nexthop); dst 378 drivers/net/vrf.c neigh = __neigh_create(&nd_tbl, nexthop, dst->dev, false); dst 387 drivers/net/vrf.c IP6_INC_STATS(dev_net(dst->dev), dst 388 drivers/net/vrf.c ip6_dst_idev(dst), IPSTATS_MIB_OUTNOROUTES); dst 410 drivers/net/vrf.c struct dst_entry *dst = NULL; dst 417 drivers/net/vrf.c dst = &rt6->dst; dst 418 drivers/net/vrf.c dst_hold(dst); dst 423 drivers/net/vrf.c if (unlikely(!dst)) { dst 429 drivers/net/vrf.c skb_dst_set(skb, dst); dst 489 drivers/net/vrf.c struct dst_entry *dst; dst 498 drivers/net/vrf.c dst = &rt6->dst; dst 499 drivers/net/vrf.c dev_put(dst->dev); dst 500 drivers/net/vrf.c dst->dev = net->loopback_dev; dst 501 drivers/net/vrf.c dev_hold(dst->dev); dst 502 drivers/net/vrf.c dst_release(dst); dst 527 drivers/net/vrf.c rt6->dst.output = vrf_output6; dst 556 drivers/net/vrf.c struct dst_entry *dst = skb_dst(skb); dst 557 drivers/net/vrf.c struct rtable *rt = (struct rtable *)dst; dst 558 drivers/net/vrf.c struct net_device *dev = dst->dev; dst 622 drivers/net/vrf.c struct dst_entry *dst = NULL; dst 629 drivers/net/vrf.c dst = &rth->dst; dst 630 drivers/net/vrf.c dst_hold(dst); dst 635 drivers/net/vrf.c if (unlikely(!dst)) { dst 641 drivers/net/vrf.c skb_dst_set(skb, dst); dst 718 drivers/net/vrf.c struct dst_entry *dst; dst 727 drivers/net/vrf.c dst = &rth->dst; dst 728 drivers/net/vrf.c dev_put(dst->dev); dst 729 drivers/net/vrf.c dst->dev = net->loopback_dev; dst 730 drivers/net/vrf.c dev_hold(dst->dev); dst 731 drivers/net/vrf.c dst_release(dst); dst 748 drivers/net/vrf.c rth->dst.output = vrf_output; dst 983 drivers/net/vrf.c if (unlikely(&rt6->dst == &net->ipv6.ip6_null_entry->dst)) dst 986 drivers/net/vrf.c skb_dst_set(skb, &rt6->dst); dst 1096 drivers/net/vrf.c struct dst_entry *dst = NULL; dst 1104 drivers/net/vrf.c dst = &net->ipv6.ip6_null_entry->dst; dst 1105 drivers/net/vrf.c return dst; dst 1113 drivers/net/vrf.c dst = &rt->dst; dst 1115 drivers/net/vrf.c return dst; dst 2154 drivers/net/vxlan.c static int vxlan_build_skb(struct sk_buff *skb, struct dst_entry *dst, dst 2176 drivers/net/vxlan.c min_headroom = LL_RESERVED_SPACE(dst->dev) + dst->header_len dst 2252 drivers/net/vxlan.c if (rt->dst.dev == dev) { dst 2260 drivers/net/vxlan.c dst_cache_set_ip4(dst_cache, &rt->dst, fl4.saddr); dst 2383 drivers/net/vxlan.c struct dst_entry *dst, dst 2398 drivers/net/vxlan.c dst_release(dst); dst 2423 drivers/net/vxlan.c union vxlan_addr *dst; dst 2440 drivers/net/vxlan.c dst = &rdst->remote_ip; dst 2441 drivers/net/vxlan.c if (vxlan_addr_any(dst)) { dst 2460 drivers/net/vxlan.c if (!ttl && vxlan_addr_multicast(dst)) dst 2468 drivers/net/vxlan.c if (dst->sa.sa_family == AF_INET) dst 2481 drivers/net/vxlan.c remote_ip.sin.sin_addr.s_addr = info->key.u.ipv4.dst; dst 2484 drivers/net/vxlan.c remote_ip.sin6.sin6_addr = info->key.u.ipv6.dst; dst 2487 drivers/net/vxlan.c dst = &remote_ip; dst 2506 drivers/net/vxlan.c if (dst->sa.sa_family == AF_INET) { dst 2515 drivers/net/vxlan.c dst->sin.sin_addr.s_addr, dst 2526 drivers/net/vxlan.c err = encap_bypass_if_local(skb, dev, vxlan, dst, dst 2528 drivers/net/vxlan.c &rt->dst, rt->rt_flags); dst 2546 drivers/net/vxlan.c ndst = &rt->dst; dst 2550 drivers/net/vxlan.c ttl = ttl ? : ip4_dst_hoplimit(&rt->dst); dst 2557 drivers/net/vxlan.c dst->sin.sin_addr.s_addr, tos, ttl, df, dst 2567 drivers/net/vxlan.c label, &dst->sin6.sin6_addr, dst 2580 drivers/net/vxlan.c err = encap_bypass_if_local(skb, dev, vxlan, dst, dst 2599 drivers/net/vxlan.c &dst->sin6.sin6_addr, tos, ttl, dst 2900 drivers/net/vxlan.c struct vxlan_rdst *dst = &vxlan->default_dst; dst 2902 drivers/net/vxlan.c dst->remote_ifindex); dst 2934 drivers/net/vxlan.c info->key.u.ipv4.dst, dst 2946 drivers/net/vxlan.c info->key.label, &info->key.u.ipv6.dst, dst 3507 drivers/net/vxlan.c struct vxlan_rdst *dst = &vxlan->default_dst; dst 3524 drivers/net/vxlan.c dst->remote_vni = conf->vni; dst 3526 drivers/net/vxlan.c memcpy(&dst->remote_ip, &conf->remote_ip, sizeof(conf->remote_ip)); dst 3529 drivers/net/vxlan.c dst->remote_ifindex = conf->remote_ifindex; dst 3583 drivers/net/vxlan.c struct vxlan_rdst *dst; dst 3586 drivers/net/vxlan.c dst = &vxlan->default_dst; dst 3594 drivers/net/vxlan.c if (!vxlan_addr_any(&dst->remote_ip)) { dst 3596 drivers/net/vxlan.c &dst->remote_ip, dst 3599 drivers/net/vxlan.c dst->remote_vni, dst 3600 drivers/net/vxlan.c dst->remote_vni, dst 3601 drivers/net/vxlan.c dst->remote_ifindex, dst 3612 drivers/net/vxlan.c if (dst->remote_ifindex) { dst 3613 drivers/net/vxlan.c remote_dev = __dev_get_by_index(net, dst->remote_ifindex); dst 3627 drivers/net/vxlan.c vxlan_fdb_insert(vxlan, all_zeros_mac, dst->remote_vni, f); dst 3642 drivers/net/vxlan.c dst->remote_dev = remote_dev; dst 3966 drivers/net/vxlan.c struct vxlan_rdst *dst; dst 3969 drivers/net/vxlan.c dst = &vxlan->default_dst; dst 3979 drivers/net/vxlan.c if (dst->remote_dev == lowerdev) dst 3982 drivers/net/vxlan.c err = netdev_adjacent_change_prepare(dst->remote_dev, lowerdev, dev, dst 3988 drivers/net/vxlan.c if (!vxlan_addr_equal(&conf.remote_ip, &dst->remote_ip)) { dst 4003 drivers/net/vxlan.c netdev_adjacent_change_abort(dst->remote_dev, dst 4008 drivers/net/vxlan.c if (!vxlan_addr_any(&dst->remote_ip)) dst 4010 drivers/net/vxlan.c dst->remote_ip, dst 4012 drivers/net/vxlan.c dst->remote_vni, dst 4013 drivers/net/vxlan.c dst->remote_vni, dst 4014 drivers/net/vxlan.c dst->remote_ifindex, dst 4022 drivers/net/vxlan.c netdev_adjacent_change_commit(dst->remote_dev, lowerdev, dev); dst 4023 drivers/net/vxlan.c if (lowerdev && lowerdev != dst->remote_dev) { dst 4024 drivers/net/vxlan.c dst->remote_dev = lowerdev; dst 4076 drivers/net/vxlan.c const struct vxlan_rdst *dst = &vxlan->default_dst; dst 4082 drivers/net/vxlan.c if (nla_put_u32(skb, IFLA_VXLAN_ID, be32_to_cpu(dst->remote_vni))) dst 4085 drivers/net/vxlan.c if (!vxlan_addr_any(&dst->remote_ip)) { dst 4086 drivers/net/vxlan.c if (dst->remote_ip.sa.sa_family == AF_INET) { dst 4088 drivers/net/vxlan.c dst->remote_ip.sin.sin_addr.s_addr)) dst 4093 drivers/net/vxlan.c &dst->remote_ip.sin6.sin6_addr)) dst 4099 drivers/net/vxlan.c if (dst->remote_ifindex && nla_put_u32(skb, IFLA_VXLAN_LINK, dst->remote_ifindex)) dst 4233 drivers/net/vxlan.c struct vxlan_rdst *dst = &vxlan->default_dst; dst 4241 drivers/net/vxlan.c if (dst->remote_ifindex == dev->ifindex) dst 906 drivers/net/wireless/ath/ath10k/htt_rx.c u8 dst[ETH_ALEN]; dst 1290 drivers/net/wireless/ath/ath10k/wmi-tlv.c struct ath10k_fw_stats_vdev *dst) dst 1294 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->vdev_id = __le32_to_cpu(src->vdev_id); dst 1295 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->beacon_snr = __le32_to_cpu(src->beacon_snr); dst 1296 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->data_snr = __le32_to_cpu(src->data_snr); dst 1297 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_rx_frames = __le32_to_cpu(src->num_rx_frames); dst 1298 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_rts_fail = __le32_to_cpu(src->num_rts_fail); dst 1299 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_rts_success = __le32_to_cpu(src->num_rts_success); dst 1300 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_rx_err = __le32_to_cpu(src->num_rx_err); dst 1301 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_rx_discard = __le32_to_cpu(src->num_rx_discard); dst 1302 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_tx_not_acked = __le32_to_cpu(src->num_tx_not_acked); dst 1305 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_tx_frames[i] = dst 1309 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_tx_frames_retries[i] = dst 1313 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->num_tx_frames_failures[i] = dst 1317 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->tx_rate_history[i] = dst 1321 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->beacon_rssi_history[i] = dst 1374 drivers/net/wireless/ath/ath10k/wmi-tlv.c struct ath10k_fw_stats_pdev *dst; dst 1385 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 1386 drivers/net/wireless/ath/ath10k/wmi-tlv.c if (!dst) dst 1389 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 1390 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_pull_pdev_stats_tx(&src->tx, dst); dst 1391 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 1392 drivers/net/wireless/ath/ath10k/wmi-tlv.c list_add_tail(&dst->list, &stats->pdevs); dst 1397 drivers/net/wireless/ath/ath10k/wmi-tlv.c struct ath10k_fw_stats_vdev *dst; dst 1408 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 1409 drivers/net/wireless/ath/ath10k/wmi-tlv.c if (!dst) dst 1412 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_tlv_pull_vdev_stats(src, dst); dst 1413 drivers/net/wireless/ath/ath10k/wmi-tlv.c list_add_tail(&dst->list, &stats->vdevs); dst 1418 drivers/net/wireless/ath/ath10k/wmi-tlv.c struct ath10k_fw_stats_peer *dst; dst 1429 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 1430 drivers/net/wireless/ath/ath10k/wmi-tlv.c if (!dst) dst 1433 drivers/net/wireless/ath/ath10k/wmi-tlv.c ath10k_wmi_pull_peer_stats(&src->old, dst); dst 1434 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->peer_rx_rate = __le32_to_cpu(src->peer_rx_rate); dst 1443 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->rx_duration = __le32_to_cpu(extd->rx_duration); dst 1452 drivers/net/wireless/ath/ath10k/wmi-tlv.c dst->rx_duration |= (u64)rx_duration_high << dst 1457 drivers/net/wireless/ath/ath10k/wmi-tlv.c list_add_tail(&dst->list, &stats->peers); dst 2776 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst) dst 2778 drivers/net/wireless/ath/ath10k/wmi.c dst->ch_noise_floor = __le32_to_cpu(src->chan_nf); dst 2779 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_frame_count = __le32_to_cpu(src->tx_frame_count); dst 2780 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_frame_count = __le32_to_cpu(src->rx_frame_count); dst 2781 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_clear_count = __le32_to_cpu(src->rx_clear_count); dst 2782 drivers/net/wireless/ath/ath10k/wmi.c dst->cycle_count = __le32_to_cpu(src->cycle_count); dst 2783 drivers/net/wireless/ath/ath10k/wmi.c dst->phy_err_count = __le32_to_cpu(src->phy_err_count); dst 2784 drivers/net/wireless/ath/ath10k/wmi.c dst->chan_tx_power = __le32_to_cpu(src->chan_tx_pwr); dst 2788 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst) dst 2790 drivers/net/wireless/ath/ath10k/wmi.c dst->comp_queued = __le32_to_cpu(src->comp_queued); dst 2791 drivers/net/wireless/ath/ath10k/wmi.c dst->comp_delivered = __le32_to_cpu(src->comp_delivered); dst 2792 drivers/net/wireless/ath/ath10k/wmi.c dst->msdu_enqued = __le32_to_cpu(src->msdu_enqued); dst 2793 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_enqued = __le32_to_cpu(src->mpdu_enqued); dst 2794 drivers/net/wireless/ath/ath10k/wmi.c dst->wmm_drop = __le32_to_cpu(src->wmm_drop); dst 2795 drivers/net/wireless/ath/ath10k/wmi.c dst->local_enqued = __le32_to_cpu(src->local_enqued); dst 2796 drivers/net/wireless/ath/ath10k/wmi.c dst->local_freed = __le32_to_cpu(src->local_freed); dst 2797 drivers/net/wireless/ath/ath10k/wmi.c dst->hw_queued = __le32_to_cpu(src->hw_queued); dst 2798 drivers/net/wireless/ath/ath10k/wmi.c dst->hw_reaped = __le32_to_cpu(src->hw_reaped); dst 2799 drivers/net/wireless/ath/ath10k/wmi.c dst->underrun = __le32_to_cpu(src->underrun); dst 2800 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_abort = __le32_to_cpu(src->tx_abort); dst 2801 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_requed = __le32_to_cpu(src->mpdus_requed); dst 2802 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_ko = __le32_to_cpu(src->tx_ko); dst 2803 drivers/net/wireless/ath/ath10k/wmi.c dst->data_rc = __le32_to_cpu(src->data_rc); dst 2804 drivers/net/wireless/ath/ath10k/wmi.c dst->self_triggers = __le32_to_cpu(src->self_triggers); dst 2805 drivers/net/wireless/ath/ath10k/wmi.c dst->sw_retry_failure = __le32_to_cpu(src->sw_retry_failure); dst 2806 drivers/net/wireless/ath/ath10k/wmi.c dst->illgl_rate_phy_err = __le32_to_cpu(src->illgl_rate_phy_err); dst 2807 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_cont_xretry = __le32_to_cpu(src->pdev_cont_xretry); dst 2808 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_tx_timeout = __le32_to_cpu(src->pdev_tx_timeout); dst 2809 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_resets = __le32_to_cpu(src->pdev_resets); dst 2810 drivers/net/wireless/ath/ath10k/wmi.c dst->phy_underrun = __le32_to_cpu(src->phy_underrun); dst 2811 drivers/net/wireless/ath/ath10k/wmi.c dst->txop_ovf = __le32_to_cpu(src->txop_ovf); dst 2816 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst) dst 2818 drivers/net/wireless/ath/ath10k/wmi.c dst->comp_queued = __le32_to_cpu(src->comp_queued); dst 2819 drivers/net/wireless/ath/ath10k/wmi.c dst->comp_delivered = __le32_to_cpu(src->comp_delivered); dst 2820 drivers/net/wireless/ath/ath10k/wmi.c dst->msdu_enqued = __le32_to_cpu(src->msdu_enqued); dst 2821 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_enqued = __le32_to_cpu(src->mpdu_enqued); dst 2822 drivers/net/wireless/ath/ath10k/wmi.c dst->wmm_drop = __le32_to_cpu(src->wmm_drop); dst 2823 drivers/net/wireless/ath/ath10k/wmi.c dst->local_enqued = __le32_to_cpu(src->local_enqued); dst 2824 drivers/net/wireless/ath/ath10k/wmi.c dst->local_freed = __le32_to_cpu(src->local_freed); dst 2825 drivers/net/wireless/ath/ath10k/wmi.c dst->hw_queued = __le32_to_cpu(src->hw_queued); dst 2826 drivers/net/wireless/ath/ath10k/wmi.c dst->hw_reaped = __le32_to_cpu(src->hw_reaped); dst 2827 drivers/net/wireless/ath/ath10k/wmi.c dst->underrun = __le32_to_cpu(src->underrun); dst 2828 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_abort = __le32_to_cpu(src->tx_abort); dst 2829 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_requed = __le32_to_cpu(src->mpdus_requed); dst 2830 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_ko = __le32_to_cpu(src->tx_ko); dst 2831 drivers/net/wireless/ath/ath10k/wmi.c dst->data_rc = __le32_to_cpu(src->data_rc); dst 2832 drivers/net/wireless/ath/ath10k/wmi.c dst->self_triggers = __le32_to_cpu(src->self_triggers); dst 2833 drivers/net/wireless/ath/ath10k/wmi.c dst->sw_retry_failure = __le32_to_cpu(src->sw_retry_failure); dst 2834 drivers/net/wireless/ath/ath10k/wmi.c dst->illgl_rate_phy_err = __le32_to_cpu(src->illgl_rate_phy_err); dst 2835 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_cont_xretry = __le32_to_cpu(src->pdev_cont_xretry); dst 2836 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_tx_timeout = __le32_to_cpu(src->pdev_tx_timeout); dst 2837 drivers/net/wireless/ath/ath10k/wmi.c dst->pdev_resets = __le32_to_cpu(src->pdev_resets); dst 2838 drivers/net/wireless/ath/ath10k/wmi.c dst->phy_underrun = __le32_to_cpu(src->phy_underrun); dst 2839 drivers/net/wireless/ath/ath10k/wmi.c dst->txop_ovf = __le32_to_cpu(src->txop_ovf); dst 2840 drivers/net/wireless/ath/ath10k/wmi.c dst->hw_paused = __le32_to_cpu(src->hw_paused); dst 2841 drivers/net/wireless/ath/ath10k/wmi.c dst->seq_posted = __le32_to_cpu(src->seq_posted); dst 2842 drivers/net/wireless/ath/ath10k/wmi.c dst->seq_failed_queueing = dst 2844 drivers/net/wireless/ath/ath10k/wmi.c dst->seq_completed = __le32_to_cpu(src->seq_completed); dst 2845 drivers/net/wireless/ath/ath10k/wmi.c dst->seq_restarted = __le32_to_cpu(src->seq_restarted); dst 2846 drivers/net/wireless/ath/ath10k/wmi.c dst->mu_seq_posted = __le32_to_cpu(src->mu_seq_posted); dst 2847 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_sw_flush = __le32_to_cpu(src->mpdus_sw_flush); dst 2848 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_hw_filter = __le32_to_cpu(src->mpdus_hw_filter); dst 2849 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_truncated = __le32_to_cpu(src->mpdus_truncated); dst 2850 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_ack_failed = __le32_to_cpu(src->mpdus_ack_failed); dst 2851 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_hw_filter = __le32_to_cpu(src->mpdus_hw_filter); dst 2852 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdus_expired = __le32_to_cpu(src->mpdus_expired); dst 2856 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst) dst 2858 drivers/net/wireless/ath/ath10k/wmi.c dst->mid_ppdu_route_change = __le32_to_cpu(src->mid_ppdu_route_change); dst 2859 drivers/net/wireless/ath/ath10k/wmi.c dst->status_rcvd = __le32_to_cpu(src->status_rcvd); dst 2860 drivers/net/wireless/ath/ath10k/wmi.c dst->r0_frags = __le32_to_cpu(src->r0_frags); dst 2861 drivers/net/wireless/ath/ath10k/wmi.c dst->r1_frags = __le32_to_cpu(src->r1_frags); dst 2862 drivers/net/wireless/ath/ath10k/wmi.c dst->r2_frags = __le32_to_cpu(src->r2_frags); dst 2863 drivers/net/wireless/ath/ath10k/wmi.c dst->r3_frags = __le32_to_cpu(src->r3_frags); dst 2864 drivers/net/wireless/ath/ath10k/wmi.c dst->htt_msdus = __le32_to_cpu(src->htt_msdus); dst 2865 drivers/net/wireless/ath/ath10k/wmi.c dst->htt_mpdus = __le32_to_cpu(src->htt_mpdus); dst 2866 drivers/net/wireless/ath/ath10k/wmi.c dst->loc_msdus = __le32_to_cpu(src->loc_msdus); dst 2867 drivers/net/wireless/ath/ath10k/wmi.c dst->loc_mpdus = __le32_to_cpu(src->loc_mpdus); dst 2868 drivers/net/wireless/ath/ath10k/wmi.c dst->oversize_amsdu = __le32_to_cpu(src->oversize_amsdu); dst 2869 drivers/net/wireless/ath/ath10k/wmi.c dst->phy_errs = __le32_to_cpu(src->phy_errs); dst 2870 drivers/net/wireless/ath/ath10k/wmi.c dst->phy_err_drop = __le32_to_cpu(src->phy_err_drop); dst 2871 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_errs = __le32_to_cpu(src->mpdu_errs); dst 2875 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst) dst 2877 drivers/net/wireless/ath/ath10k/wmi.c dst->ack_rx_bad = __le32_to_cpu(src->ack_rx_bad); dst 2878 drivers/net/wireless/ath/ath10k/wmi.c dst->rts_bad = __le32_to_cpu(src->rts_bad); dst 2879 drivers/net/wireless/ath/ath10k/wmi.c dst->rts_good = __le32_to_cpu(src->rts_good); dst 2880 drivers/net/wireless/ath/ath10k/wmi.c dst->fcs_bad = __le32_to_cpu(src->fcs_bad); dst 2881 drivers/net/wireless/ath/ath10k/wmi.c dst->no_beacons = __le32_to_cpu(src->no_beacons); dst 2882 drivers/net/wireless/ath/ath10k/wmi.c dst->mib_int_count = __le32_to_cpu(src->mib_int_count); dst 2886 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst) dst 2888 drivers/net/wireless/ath/ath10k/wmi.c ether_addr_copy(dst->peer_macaddr, src->peer_macaddr.addr); dst 2889 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rssi = __le32_to_cpu(src->peer_rssi); dst 2890 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_tx_rate = __le32_to_cpu(src->peer_tx_rate); dst 2895 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst) dst 2897 drivers/net/wireless/ath/ath10k/wmi.c ether_addr_copy(dst->peer_macaddr, src->peer_macaddr.addr); dst 2898 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rssi = __le32_to_cpu(src->peer_rssi); dst 2899 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_tx_rate = __le32_to_cpu(src->peer_tx_rate); dst 2900 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rx_rate = __le32_to_cpu(src->peer_rx_rate); dst 2905 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_vdev_extd *dst) dst 2907 drivers/net/wireless/ath/ath10k/wmi.c dst->vdev_id = __le32_to_cpu(src->vdev_id); dst 2908 drivers/net/wireless/ath/ath10k/wmi.c dst->ppdu_aggr_cnt = __le32_to_cpu(src->ppdu_aggr_cnt); dst 2909 drivers/net/wireless/ath/ath10k/wmi.c dst->ppdu_noack = __le32_to_cpu(src->ppdu_noack); dst 2910 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_queued = __le32_to_cpu(src->mpdu_queued); dst 2911 drivers/net/wireless/ath/ath10k/wmi.c dst->ppdu_nonaggr_cnt = __le32_to_cpu(src->ppdu_nonaggr_cnt); dst 2912 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_sw_requeued = __le32_to_cpu(src->mpdu_sw_requeued); dst 2913 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_suc_retry = __le32_to_cpu(src->mpdu_suc_retry); dst 2914 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_suc_multitry = __le32_to_cpu(src->mpdu_suc_multitry); dst 2915 drivers/net/wireless/ath/ath10k/wmi.c dst->mpdu_fail_retry = __le32_to_cpu(src->mpdu_fail_retry); dst 2916 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_ftm_suc = __le32_to_cpu(src->tx_ftm_suc); dst 2917 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_ftm_suc_retry = __le32_to_cpu(src->tx_ftm_suc_retry); dst 2918 drivers/net/wireless/ath/ath10k/wmi.c dst->tx_ftm_fail = __le32_to_cpu(src->tx_ftm_fail); dst 2919 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_ftmr_cnt = __le32_to_cpu(src->rx_ftmr_cnt); dst 2920 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_ftmr_dup_cnt = __le32_to_cpu(src->rx_ftmr_dup_cnt); dst 2921 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_iftmr_cnt = __le32_to_cpu(src->rx_iftmr_cnt); dst 2922 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_iftmr_dup_cnt = __le32_to_cpu(src->rx_iftmr_dup_cnt); dst 2941 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst; dst 2947 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 2948 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 2951 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 2952 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_tx(&src->tx, dst); dst 2953 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 2955 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->pdevs); dst 2962 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst; dst 2968 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 2969 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 2972 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_peer_stats(src, dst); dst 2973 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers); dst 2995 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst; dst 3001 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3002 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3005 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 3006 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_tx(&src->tx, dst); dst 3007 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 3008 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_extra(&src->extra, dst); dst 3010 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->pdevs); dst 3017 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst; dst 3023 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3024 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3027 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_peer_stats(&src->old, dst); dst 3029 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rx_rate = __le32_to_cpu(src->peer_rx_rate); dst 3031 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers); dst 3056 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst; dst 3062 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3063 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3066 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 3067 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_tx(&src->tx, dst); dst 3068 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 3069 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_extra(&src->extra, dst); dst 3072 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->pdevs); dst 3093 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst; dst 3099 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3100 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3103 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_peer_stats(&src->old, dst); dst 3105 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rx_rate = __le32_to_cpu(src->peer_rx_rate); dst 3108 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers); dst 3133 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst; dst 3139 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3140 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3143 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 3144 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_tx(&src->tx, dst); dst 3145 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 3146 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_extra(&src->extra, dst); dst 3149 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->pdevs); dst 3170 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst; dst 3182 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3183 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3186 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_peer_stats(&src->common.old, dst); dst 3188 drivers/net/wireless/ath/ath10k/wmi.c dst->peer_rx_rate = __le32_to_cpu(src->common.peer_rx_rate); dst 3191 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_duration = __le32_to_cpu(src->rx_duration); dst 3194 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers); dst 3225 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_pdev *dst; dst 3231 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3232 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3235 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_base(&src->base, dst); dst 3236 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_10_4_pull_pdev_stats_tx(&src->tx, dst); dst 3237 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_rx(&src->rx, dst); dst 3238 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_ovfl_errs = __le32_to_cpu(src->rx_ovfl_errs); dst 3239 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_pull_pdev_stats_extra(&src->extra, dst); dst 3241 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->pdevs); dst 3271 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_peer *dst; dst 3277 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3278 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3281 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_10_4_pull_peer_stats(src, dst); dst 3282 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers); dst 3304 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_extd_stats_peer *dst; dst 3310 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3311 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3314 drivers/net/wireless/ath/ath10k/wmi.c ether_addr_copy(dst->peer_macaddr, dst 3316 drivers/net/wireless/ath/ath10k/wmi.c dst->rx_duration = __le32_to_cpu(src->rx_duration); dst 3317 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->peers_extd); dst 3324 drivers/net/wireless/ath/ath10k/wmi.c struct ath10k_fw_stats_vdev_extd *dst; dst 3330 drivers/net/wireless/ath/ath10k/wmi.c dst = kzalloc(sizeof(*dst), GFP_ATOMIC); dst 3331 drivers/net/wireless/ath/ath10k/wmi.c if (!dst) dst 3333 drivers/net/wireless/ath/ath10k/wmi.c ath10k_wmi_10_4_pull_vdev_stats(src, dst); dst 3334 drivers/net/wireless/ath/ath10k/wmi.c list_add_tail(&dst->list, &stats->vdevs); dst 7300 drivers/net/wireless/ath/ath10k/wmi.h struct ath10k_fw_stats_pdev *dst); dst 7302 drivers/net/wireless/ath/ath10k/wmi.h struct ath10k_fw_stats_pdev *dst); dst 7304 drivers/net/wireless/ath/ath10k/wmi.h struct ath10k_fw_stats_pdev *dst); dst 7306 drivers/net/wireless/ath/ath10k/wmi.h struct ath10k_fw_stats_pdev *dst); dst 7308 drivers/net/wireless/ath/ath10k/wmi.h struct ath10k_fw_stats_peer *dst); dst 3748 drivers/net/wireless/ath/ath6kl/wmi.c const u8 *dst, const u8 *data, dst 3764 drivers/net/wireless/ath/ath6kl/wmi.c freq, dst, data_len); dst 3767 drivers/net/wireless/ath/ath6kl/wmi.c memcpy(p->destination_addr, dst, ETH_ALEN); dst 2708 drivers/net/wireless/ath/ath6kl/wmi.h const u8 *dst, const u8 *data, dst 153 drivers/net/wireless/ath/ath9k/dynack.c u8 *dst, *src; dst 162 drivers/net/wireless/ath/ath9k/dynack.c dst = da->st_rbf.addr[da->st_rbf.h_rb].h_dest; dst 174 drivers/net/wireless/ath/ath9k/dynack.c sta = ieee80211_find_sta_by_ifaddr(ah->hw, dst, dst 181 drivers/net/wireless/ath/ath9k/dynack.c "%pM to %d [%u]\n", dst, dst 30 drivers/net/wireless/ath/wil6210/fw.c void wil_memset_toio_32(volatile void __iomem *dst, u32 val, dst 33 drivers/net/wireless/ath/wil6210/fw.c volatile u32 __iomem *d = dst; dst 285 drivers/net/wireless/ath/wil6210/fw_inc.c void __iomem *dst; dst 293 drivers/net/wireless/ath/wil6210/fw_inc.c if (!wil_fw_addr_check(wil, &dst, addr, s, "address")) dst 296 drivers/net/wireless/ath/wil6210/fw_inc.c wil_memcpy_toio_32(dst, d->data, s); dst 314 drivers/net/wireless/ath/wil6210/fw_inc.c void __iomem *dst; dst 333 drivers/net/wireless/ath/wil6210/fw_inc.c if (!wil_fw_addr_check(wil, &dst, d->addr, s, "address")) dst 339 drivers/net/wireless/ath/wil6210/fw_inc.c wil_memset_toio_32(dst, v, s); dst 385 drivers/net/wireless/ath/wil6210/fw_inc.c void __iomem *dst; dst 390 drivers/net/wireless/ath/wil6210/fw_inc.c if (!wil_fw_addr_check(wil, &dst, block[i].addr, 0, "address")) dst 393 drivers/net/wireless/ath/wil6210/fw_inc.c x = readl(dst); dst 398 drivers/net/wireless/ath/wil6210/fw_inc.c writel(y, dst); dst 152 drivers/net/wireless/ath/wil6210/main.c void wil_memcpy_fromio_32(void *dst, const volatile void __iomem *src, dst 155 drivers/net/wireless/ath/wil6210/main.c u32 *d = dst; dst 169 drivers/net/wireless/ath/wil6210/main.c void wil_memcpy_toio_32(volatile void __iomem *dst, const void *src, dst 172 drivers/net/wireless/ath/wil6210/main.c volatile u32 __iomem *d = dst; dst 1205 drivers/net/wireless/ath/wil6210/wil6210.h void wil_memcpy_fromio_32(void *dst, const volatile void __iomem *src, dst 1207 drivers/net/wireless/ath/wil6210/wil6210.h void wil_memcpy_toio_32(volatile void __iomem *dst, const void *src, dst 663 drivers/net/wireless/ath/wil6210/wmi.c void __iomem *dst; dst 731 drivers/net/wireless/ath/wil6210/wmi.c dst = wmi_buffer(wil, d_head.addr); dst 732 drivers/net/wireless/ath/wil6210/wmi.c if (!dst) { dst 746 drivers/net/wireless/ath/wil6210/wmi.c wil_memcpy_toio_32(dst, &cmd, sizeof(cmd)); dst 747 drivers/net/wireless/ath/wil6210/wmi.c wil_memcpy_toio_32(dst + sizeof(cmd), buf, len); dst 24653 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c u32 *buf, *src, *dst, sz; dst 24662 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c dst = buf + NPHY_PAPD_EPS_TBL_SIZE; dst 24690 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c dst[end] = ((u32) phy_a7 << 13) | ((u32) phy_a6 & 0x1fff); dst 24696 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_n.c NPHY_TBL_ID_EPSILONTBL1, sz, start, 32, dst); dst 393 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c struct brcms_c_rateset *dst) dst 395 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c memcpy(dst, src, sizeof(struct brcms_c_rateset)); dst 408 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c brcms_c_rateset_filter(struct brcms_c_rateset *src, struct brcms_c_rateset *dst, dst 426 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c dst->rates[count++] = r & xmask; dst 428 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c dst->count = count; dst 429 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c dst->htphy_membership = src->htphy_membership; dst 432 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c memcpy(&dst->mcs[0], &src->mcs[0], MCSSET_LEN); dst 434 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.c brcms_c_rateset_mcs_clear(dst); dst 224 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.h struct brcms_c_rateset *dst); dst 230 drivers/net/wireless/broadcom/brcm80211/brcmsmac/rate.h struct brcms_c_rateset *dst, bool basic_only, dst 10338 drivers/net/wireless/intel/ipw2x00/ipw2200.c struct sk_buff *dst; dst 10348 drivers/net/wireless/intel/ipw2x00/ipw2200.c dst = alloc_skb(len + sizeof(*rt_hdr) + sizeof(u16)*2, GFP_ATOMIC); dst 10349 drivers/net/wireless/intel/ipw2x00/ipw2200.c if (!dst) dst 10352 drivers/net/wireless/intel/ipw2x00/ipw2200.c rt_hdr = skb_put(dst, sizeof(*rt_hdr)); dst 10359 drivers/net/wireless/intel/ipw2x00/ipw2200.c *(__le16*)skb_put(dst, sizeof(u16)) = cpu_to_le16( dst 10362 drivers/net/wireless/intel/ipw2x00/ipw2200.c *(__le16*)skb_put(dst, sizeof(u16)) = dst 10366 drivers/net/wireless/intel/ipw2x00/ipw2200.c *(__le16*)skb_put(dst, sizeof(u16)) = dst 10370 drivers/net/wireless/intel/ipw2x00/ipw2200.c *(__le16*)skb_put(dst, sizeof(u16)) = dst 10374 drivers/net/wireless/intel/ipw2x00/ipw2200.c rt_hdr->it_len = cpu_to_le16(dst->len); dst 10376 drivers/net/wireless/intel/ipw2x00/ipw2200.c skb_copy_from_linear_data(src, skb_put(dst, len), len); dst 10378 drivers/net/wireless/intel/ipw2x00/ipw2200.c if (!libipw_rx(priv->prom_priv->ieee, dst, &dummystats)) dst 10379 drivers/net/wireless/intel/ipw2x00/ipw2200.c dev_kfree_skb_any(dst); dst 58 drivers/net/wireless/intel/ipw2x00/libipw_rx.c u8 * dst) dst 77 drivers/net/wireless/intel/ipw2x00/libipw_rx.c ether_addr_equal(entry->dst_addr, dst)) dst 346 drivers/net/wireless/intel/ipw2x00/libipw_rx.c u8 dst[ETH_ALEN]; dst 493 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 497 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 503 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 507 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 743 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(skb_push(skb, ETH_ALEN), dst, ETH_ALEN); dst 751 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(skb_push(skb, ETH_ALEN), dst, ETH_ALEN); dst 771 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (is_multicast_ether_addr(dst)) { dst 779 drivers/net/wireless/intel/ipw2x00/libipw_rx.c } else if (hostap_is_sta_assoc(ieee->ap, dst)) { dst 1443 drivers/net/wireless/intel/ipw2x00/libipw_rx.c struct libipw_network *dst) dst 1448 drivers/net/wireless/intel/ipw2x00/libipw_rx.c return ((src->ssid_len == dst->ssid_len) && dst 1449 drivers/net/wireless/intel/ipw2x00/libipw_rx.c (src->channel == dst->channel) && dst 1450 drivers/net/wireless/intel/ipw2x00/libipw_rx.c ether_addr_equal_64bits(src->bssid, dst->bssid) && dst 1451 drivers/net/wireless/intel/ipw2x00/libipw_rx.c !memcmp(src->ssid, dst->ssid, src->ssid_len)); dst 1454 drivers/net/wireless/intel/ipw2x00/libipw_rx.c static void update_network(struct libipw_network *dst, dst 1465 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (dst->channel == src->stats.received_channel) dst 1466 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(&dst->stats, &src->stats, dst 1471 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->channel, src->stats.received_channel); dst 1473 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->capability = src->capability; dst 1474 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst->rates, src->rates, src->rates_len); dst 1475 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->rates_len = src->rates_len; dst 1476 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst->rates_ex, src->rates_ex, src->rates_ex_len); dst 1477 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->rates_ex_len = src->rates_ex_len; dst 1479 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->mode = src->mode; dst 1480 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->flags = src->flags; dst 1481 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->time_stamp[0] = src->time_stamp[0]; dst 1482 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->time_stamp[1] = src->time_stamp[1]; dst 1484 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->beacon_interval = src->beacon_interval; dst 1485 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->listen_interval = src->listen_interval; dst 1486 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->atim_window = src->atim_window; dst 1487 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->erp_value = src->erp_value; dst 1488 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->tim = src->tim; dst 1490 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst->wpa_ie, src->wpa_ie, src->wpa_ie_len); dst 1491 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->wpa_ie_len = src->wpa_ie_len; dst 1492 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(dst->rsn_ie, src->rsn_ie, src->rsn_ie_len); dst 1493 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->rsn_ie_len = src->rsn_ie_len; dst 1495 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->last_scanned = jiffies; dst 1497 drivers/net/wireless/intel/ipw2x00/libipw_rx.c old_param = dst->qos_data.old_param_count; dst 1498 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (dst->flags & NETWORK_HAS_QOS_MASK) dst 1499 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(&dst->qos_data, &src->qos_data, dst 1502 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->qos_data.supported = src->qos_data.supported; dst 1503 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->qos_data.param_count = src->qos_data.param_count; dst 1506 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (dst->qos_data.supported == 1) { dst 1507 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (dst->ssid_len) dst 1510 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->ssid); dst 1515 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->qos_data.active = qos_active; dst 1516 drivers/net/wireless/intel/ipw2x00/libipw_rx.c dst->qos_data.old_param_count = old_param; dst 50 drivers/net/wireless/intersil/hostap/hostap.h int prism2_sta_send_mgmt(local_info_t *local, u8 *dst, u16 stype, dst 222 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c unsigned int frag, u8 *src, u8 *dst) dst 241 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcmp(entry->dst_addr, dst, ETH_ALEN) == 0) dst 731 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c u8 dst[ETH_ALEN]; dst 844 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 848 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 854 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 858 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 1043 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(skb_push(skb, ETH_ALEN), dst, ETH_ALEN); dst 1051 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c memcpy(skb_push(skb, ETH_ALEN), dst, ETH_ALEN); dst 1070 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c if (dst[0] & 0x01) { dst 1078 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c } else if (hostap_is_sta_authorized(local->ap, dst)) { dst 1026 drivers/net/wireless/intersil/hostap/hostap_main.c int prism2_sta_send_mgmt(local_info_t *local, u8 *dst, u16 stype, dst 1040 drivers/net/wireless/intersil/hostap/hostap_main.c memcpy(mgmt->da, dst, ETH_ALEN); dst 1042 drivers/net/wireless/intersil/hostap/hostap_main.c memcpy(mgmt->bssid, dst, ETH_ALEN); dst 424 drivers/net/wireless/intersil/p54/eeprom.c struct p54_pa_curve_data_sample *dst; dst 427 drivers/net/wireless/intersil/p54/eeprom.c (curve_data->points_per_channel*sizeof(*dst) + 2) * dst 439 drivers/net/wireless/intersil/p54/eeprom.c sizeof(*dst) * curve_data->points_per_channel; dst 451 drivers/net/wireless/intersil/p54/eeprom.c dst = target; dst 454 drivers/net/wireless/intersil/p54/eeprom.c dst->rf_power = src->rf_power; dst 455 drivers/net/wireless/intersil/p54/eeprom.c dst->pa_detector = src->pa_detector; dst 456 drivers/net/wireless/intersil/p54/eeprom.c dst->data_64qam = src->pcv; dst 459 drivers/net/wireless/intersil/p54/eeprom.c dst->data_16qam = SUB(src->pcv, 12); dst 460 drivers/net/wireless/intersil/p54/eeprom.c dst->data_qpsk = SUB(dst->data_16qam, 12); dst 461 drivers/net/wireless/intersil/p54/eeprom.c dst->data_bpsk = SUB(dst->data_qpsk, 12); dst 462 drivers/net/wireless/intersil/p54/eeprom.c dst->data_barker = SUB(dst->data_bpsk, 14); dst 464 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(*dst); dst 476 drivers/net/wireless/intersil/p54/eeprom.c struct p54_pa_curve_data_sample *dst; dst 479 drivers/net/wireless/intersil/p54/eeprom.c (curve_data->points_per_channel*sizeof(*dst) + 2) * dst 491 drivers/net/wireless/intersil/p54/eeprom.c sizeof(*dst) * curve_data->points_per_channel; dst 505 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(*dst); dst 705 drivers/net/wireless/intersil/p54/eeprom.c struct p54_cal_database *dst; dst 716 drivers/net/wireless/intersil/p54/eeprom.c dst = kmalloc(sizeof(*dst) + payload_len, GFP_KERNEL); dst 717 drivers/net/wireless/intersil/p54/eeprom.c if (!dst) dst 720 drivers/net/wireless/intersil/p54/eeprom.c dst->entries = entries; dst 721 drivers/net/wireless/intersil/p54/eeprom.c dst->entry_size = entry_size; dst 722 drivers/net/wireless/intersil/p54/eeprom.c dst->offset = offset; dst 723 drivers/net/wireless/intersil/p54/eeprom.c dst->len = payload_len; dst 725 drivers/net/wireless/intersil/p54/eeprom.c memcpy(dst->data, src->data, payload_len); dst 726 drivers/net/wireless/intersil/p54/eeprom.c return dst; dst 831 drivers/net/wireless/intersil/p54/eeprom.c u16 *dst; dst 842 drivers/net/wireless/intersil/p54/eeprom.c dst = (void *) priv->rssi_db->data; dst 845 drivers/net/wireless/intersil/p54/eeprom.c *(dst++) = (s16) le16_to_cpu(*(src++)); dst 1884 drivers/net/wireless/intersil/prism54/isl_ioctl.c struct sockaddr *dst = (struct sockaddr *) extra; dst 1892 drivers/net/wireless/intersil/prism54/isl_ioctl.c memcpy(dst->sa_data, entry->addr, ETH_ALEN); dst 1893 drivers/net/wireless/intersil/prism54/isl_ioctl.c dst->sa_family = ARPHRD_ETHER; dst 1895 drivers/net/wireless/intersil/prism54/isl_ioctl.c dst++; dst 3327 drivers/net/wireless/mac80211_hwsim.c const u8 *dst; dst 3338 drivers/net/wireless/mac80211_hwsim.c dst = (void *)nla_data(info->attrs[HWSIM_ATTR_ADDR_RECEIVER]); dst 3353 drivers/net/wireless/mac80211_hwsim.c data2 = get_hwsim_data_ref_from_addr(dst); dst 1467 drivers/net/wireless/marvell/mwifiex/sta_cmd.c static u32 mwifiex_parse_cal_cfg(u8 *src, size_t len, u8 *dst) dst 1469 drivers/net/wireless/marvell/mwifiex/sta_cmd.c u8 *s = src, *d = dst; dst 1484 drivers/net/wireless/marvell/mwifiex/sta_cmd.c return d - dst; dst 501 drivers/net/wireless/quantenna/qtnfmac/commands.c qtnf_sta_info_parse_flags(struct nl80211_sta_flag_update *dst, dst 506 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask = 0; dst 507 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set = 0; dst 513 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_AUTHORIZED); dst 515 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_AUTHORIZED); dst 519 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_SHORT_PREAMBLE); dst 521 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_SHORT_PREAMBLE); dst 525 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_WME); dst 527 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_WME); dst 531 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_MFP); dst 533 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_MFP); dst 537 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_AUTHENTICATED); dst 539 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_AUTHENTICATED); dst 543 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_TDLS_PEER); dst 545 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_TDLS_PEER); dst 549 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->mask |= BIT(NL80211_STA_FLAG_ASSOCIATED); dst 551 drivers/net/wireless/quantenna/qtnfmac/commands.c dst->set |= BIT(NL80211_STA_FLAG_ASSOCIATED); dst 454 drivers/net/wireless/realtek/rtw88/mac.c iddma_enable(struct rtw_dev *rtwdev, u32 src, u32 dst, u32 ctrl) dst 457 drivers/net/wireless/realtek/rtw88/mac.c rtw_write32(rtwdev, REG_DDMA_CH0DA, dst); dst 466 drivers/net/wireless/realtek/rtw88/mac.c static int iddma_download_firmware(struct rtw_dev *rtwdev, u32 src, u32 dst, dst 478 drivers/net/wireless/realtek/rtw88/mac.c if (iddma_enable(rtwdev, src, dst, ch0_ctrl)) dst 520 drivers/net/wireless/realtek/rtw88/mac.c u32 src, u32 dst, u32 size) dst 553 drivers/net/wireless/realtek/rtw88/mac.c dst + mem_offset, pkt_size, dst 563 drivers/net/wireless/realtek/rtw88/mac.c if (!check_fw_checksum(rtwdev, dst)) dst 68 drivers/net/wireless/st/cw1200/cw1200_sdio.c void *dst, int count) dst 70 drivers/net/wireless/st/cw1200/cw1200_sdio.c return sdio_memcpy_fromio(self->func, dst, addr, count); dst 59 drivers/net/wireless/st/cw1200/cw1200_spi.c void *dst, int count) dst 70 drivers/net/wireless/st/cw1200/cw1200_spi.c .rx_buf = dst, dst 115 drivers/net/wireless/st/cw1200/cw1200_spi.c uint16_t *buf = (uint16_t *)dst; dst 21 drivers/net/wireless/st/cw1200/hwbus.h void *dst, int count); dst 117 drivers/net/wireless/st/cw1200/scan.c struct wsm_ssid *dst = &priv->scan.ssids[priv->scan.n_ssids]; dst 118 drivers/net/wireless/st/cw1200/scan.c memcpy(&dst->ssid[0], req->ssids[i].ssid, sizeof(dst->ssid)); dst 119 drivers/net/wireless/st/cw1200/scan.c dst->length = req->ssids[i].ssid_len; dst 365 drivers/net/wireless/st/cw1200/txrx.c struct wsm_tx_rate_retry_policy *dst = dst 367 drivers/net/wireless/st/cw1200/txrx.c dst->index = i; dst 368 drivers/net/wireless/st/cw1200/txrx.c dst->short_retries = priv->short_frame_max_tx_count; dst 369 drivers/net/wireless/st/cw1200/txrx.c dst->long_retries = priv->long_frame_max_tx_count; dst 371 drivers/net/wireless/st/cw1200/txrx.c dst->flags = WSM_TX_RATE_POLICY_FLAG_TERMINATE_WHEN_FINISHED | dst 373 drivers/net/wireless/st/cw1200/txrx.c memcpy(dst->rate_count_indices, src->tbl, dst 374 drivers/net/wireless/st/cw1200/txrx.c sizeof(dst->rate_count_indices)); dst 194 drivers/net/xen-netback/hash.c memcpy(&data[4], &flow.addrs.v4addrs.dst, 4); dst 196 drivers/net/xen-netback/hash.c memcpy(&data[10], &flow.ports.dst, 2); dst 204 drivers/net/xen-netback/hash.c memcpy(&data[4], &flow.addrs.v4addrs.dst, 4); dst 218 drivers/net/xen-netback/hash.c memcpy(&data[16], &flow.addrs.v6addrs.dst, 16); dst 220 drivers/net/xen-netback/hash.c memcpy(&data[34], &flow.ports.dst, 2); dst 228 drivers/net/xen-netback/hash.c memcpy(&data[16], &flow.addrs.v6addrs.dst, 16); dst 779 drivers/ntb/test/ntb_perf.c void __iomem *dst, void *src, size_t len) dst 787 drivers/ntb/test/ntb_perf.c memcpy_toio(dst, src, len); dst 794 drivers/ntb/test/ntb_perf.c offset_in_page(dst), len)) dst 810 drivers/ntb/test/ntb_perf.c unmap->addr[1] = dma_map_page(dma_dev, virt_to_page(dst), dst 811 drivers/ntb/test/ntb_perf.c offset_in_page(dst), len, DMA_FROM_DEVICE); dst 256 drivers/nvdimm/label.c struct nd_namespace_index *dst, dst 260 drivers/nvdimm/label.c if (!dst || !src) dst 263 drivers/nvdimm/label.c memcpy(dst, src, sizeof_namespace_index(ndd)); dst 260 drivers/nvme/host/lightnvm.c static void nvme_nvm_set_addr_12(struct nvm_addrf_12 *dst, dst 263 drivers/nvme/host/lightnvm.c dst->ch_len = src->ch_len; dst 264 drivers/nvme/host/lightnvm.c dst->lun_len = src->lun_len; dst 265 drivers/nvme/host/lightnvm.c dst->blk_len = src->blk_len; dst 266 drivers/nvme/host/lightnvm.c dst->pg_len = src->pg_len; dst 267 drivers/nvme/host/lightnvm.c dst->pln_len = src->pln_len; dst 268 drivers/nvme/host/lightnvm.c dst->sec_len = src->sec_len; dst 270 drivers/nvme/host/lightnvm.c dst->ch_offset = src->ch_offset; dst 271 drivers/nvme/host/lightnvm.c dst->lun_offset = src->lun_offset; dst 272 drivers/nvme/host/lightnvm.c dst->blk_offset = src->blk_offset; dst 273 drivers/nvme/host/lightnvm.c dst->pg_offset = src->pg_offset; dst 274 drivers/nvme/host/lightnvm.c dst->pln_offset = src->pln_offset; dst 275 drivers/nvme/host/lightnvm.c dst->sec_offset = src->sec_offset; dst 277 drivers/nvme/host/lightnvm.c dst->ch_mask = ((1ULL << dst->ch_len) - 1) << dst->ch_offset; dst 278 drivers/nvme/host/lightnvm.c dst->lun_mask = ((1ULL << dst->lun_len) - 1) << dst->lun_offset; dst 279 drivers/nvme/host/lightnvm.c dst->blk_mask = ((1ULL << dst->blk_len) - 1) << dst->blk_offset; dst 280 drivers/nvme/host/lightnvm.c dst->pg_mask = ((1ULL << dst->pg_len) - 1) << dst->pg_offset; dst 281 drivers/nvme/host/lightnvm.c dst->pln_mask = ((1ULL << dst->pln_len) - 1) << dst->pln_offset; dst 282 drivers/nvme/host/lightnvm.c dst->sec_mask = ((1ULL << dst->sec_len) - 1) << dst->sec_offset; dst 375 drivers/nvme/host/lightnvm.c static void nvme_nvm_set_addr_20(struct nvm_addrf *dst, dst 378 drivers/nvme/host/lightnvm.c dst->ch_len = src->grp_len; dst 379 drivers/nvme/host/lightnvm.c dst->lun_len = src->pu_len; dst 380 drivers/nvme/host/lightnvm.c dst->chk_len = src->chk_len; dst 381 drivers/nvme/host/lightnvm.c dst->sec_len = src->lba_len; dst 383 drivers/nvme/host/lightnvm.c dst->sec_offset = 0; dst 384 drivers/nvme/host/lightnvm.c dst->chk_offset = dst->sec_len; dst 385 drivers/nvme/host/lightnvm.c dst->lun_offset = dst->chk_offset + dst->chk_len; dst 386 drivers/nvme/host/lightnvm.c dst->ch_offset = dst->lun_offset + dst->lun_len; dst 388 drivers/nvme/host/lightnvm.c dst->ch_mask = ((1ULL << dst->ch_len) - 1) << dst->ch_offset; dst 389 drivers/nvme/host/lightnvm.c dst->lun_mask = ((1ULL << dst->lun_len) - 1) << dst->lun_offset; dst 390 drivers/nvme/host/lightnvm.c dst->chk_mask = ((1ULL << dst->chk_len) - 1) << dst->chk_offset; dst 391 drivers/nvme/host/lightnvm.c dst->sec_mask = ((1ULL << dst->sec_len) - 1) << dst->sec_offset; dst 787 drivers/perf/arm_dsu_pmu.c int dst; dst 794 drivers/perf/arm_dsu_pmu.c dst = dsu_pmu_get_online_cpu_any_but(dsu_pmu, cpu); dst 796 drivers/perf/arm_dsu_pmu.c if (dst >= nr_cpu_ids) { dst 801 drivers/perf/arm_dsu_pmu.c perf_pmu_migrate_context(&dsu_pmu->pmu, cpu, dst); dst 802 drivers/perf/arm_dsu_pmu.c dsu_pmu_set_active_cpu(dst, dsu_pmu); dst 94 drivers/phy/phy-xgene.c #define REGSPEC_CFG_I_TX_WORDMODE0_SET(dst, src) \ dst 95 drivers/phy/phy-xgene.c (((dst) & ~0x00070000) | (((u32) (src) << 16) & 0x00070000)) dst 96 drivers/phy/phy-xgene.c #define REGSPEC_CFG_I_RX_WORDMODE0_SET(dst, src) \ dst 97 drivers/phy/phy-xgene.c (((dst) & ~0x00e00000) | (((u32) (src) << 21) & 0x00e00000)) dst 99 drivers/phy/phy-xgene.c #define REGSPEC_CFG_I_CUSTOMER_PIN_MODE0_SET(dst, src) \ dst 100 drivers/phy/phy-xgene.c (((dst) & ~0x00007fff) | (((u32) (src)) & 0x00007fff)) dst 102 drivers/phy/phy-xgene.c #define CFG_I_SPD_SEL_CDR_OVR1_SET(dst, src) \ dst 103 drivers/phy/phy-xgene.c (((dst) & ~0x0000000f) | (((u32) (src)) & 0x0000000f)) dst 109 drivers/phy/phy-xgene.c #define CFG_IND_ADDR_SET(dst, src) \ dst 110 drivers/phy/phy-xgene.c (((dst) & ~0x003ffff0) | (((u32) (src) << 4) & 0x003ffff0)) dst 114 drivers/phy/phy-xgene.c #define I_RESET_B_SET(dst, src) \ dst 115 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src)) & 0x00000001)) dst 116 drivers/phy/phy-xgene.c #define I_PLL_FBDIV_SET(dst, src) \ dst 117 drivers/phy/phy-xgene.c (((dst) & ~0x001ff000) | (((u32) (src) << 12) & 0x001ff000)) dst 118 drivers/phy/phy-xgene.c #define I_CUSTOMEROV_SET(dst, src) \ dst 119 drivers/phy/phy-xgene.c (((dst) & ~0x00000f80) | (((u32) (src) << 7) & 0x00000f80)) dst 126 drivers/phy/phy-xgene.c #define CMU_REG0_PLL_REF_SEL_SET(dst, src) \ dst 127 drivers/phy/phy-xgene.c (((dst) & ~0x00002000) | (((u32) (src) << 13) & 0x00002000)) dst 129 drivers/phy/phy-xgene.c #define CMU_REG0_CAL_COUNT_RESOL_SET(dst, src) \ dst 130 drivers/phy/phy-xgene.c (((dst) & ~0x000000e0) | (((u32) (src) << 5) & 0x000000e0)) dst 132 drivers/phy/phy-xgene.c #define CMU_REG1_PLL_CP_SET(dst, src) \ dst 133 drivers/phy/phy-xgene.c (((dst) & ~0x00003c00) | (((u32) (src) << 10) & 0x00003c00)) dst 134 drivers/phy/phy-xgene.c #define CMU_REG1_PLL_MANUALCAL_SET(dst, src) \ dst 135 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 136 drivers/phy/phy-xgene.c #define CMU_REG1_PLL_CP_SEL_SET(dst, src) \ dst 137 drivers/phy/phy-xgene.c (((dst) & ~0x000003e0) | (((u32) (src) << 5) & 0x000003e0)) dst 139 drivers/phy/phy-xgene.c #define CMU_REG1_REFCLK_CMOS_SEL_SET(dst, src) \ dst 140 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001)) dst 142 drivers/phy/phy-xgene.c #define CMU_REG2_PLL_REFDIV_SET(dst, src) \ dst 143 drivers/phy/phy-xgene.c (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000)) dst 144 drivers/phy/phy-xgene.c #define CMU_REG2_PLL_LFRES_SET(dst, src) \ dst 145 drivers/phy/phy-xgene.c (((dst) & ~0x0000001e) | (((u32) (src) << 1) & 0x0000001e)) dst 146 drivers/phy/phy-xgene.c #define CMU_REG2_PLL_FBDIV_SET(dst, src) \ dst 147 drivers/phy/phy-xgene.c (((dst) & ~0x00003fe0) | (((u32) (src) << 5) & 0x00003fe0)) dst 149 drivers/phy/phy-xgene.c #define CMU_REG3_VCOVARSEL_SET(dst, src) \ dst 150 drivers/phy/phy-xgene.c (((dst) & ~0x0000000f) | (((u32) (src) << 0) & 0x0000000f)) dst 151 drivers/phy/phy-xgene.c #define CMU_REG3_VCO_MOMSEL_INIT_SET(dst, src) \ dst 152 drivers/phy/phy-xgene.c (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0)) dst 153 drivers/phy/phy-xgene.c #define CMU_REG3_VCO_MANMOMSEL_SET(dst, src) \ dst 154 drivers/phy/phy-xgene.c (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00)) dst 157 drivers/phy/phy-xgene.c #define CMU_REG5_PLL_LFSMCAP_SET(dst, src) \ dst 158 drivers/phy/phy-xgene.c (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000)) dst 159 drivers/phy/phy-xgene.c #define CMU_REG5_PLL_LOCK_RESOLUTION_SET(dst, src) \ dst 160 drivers/phy/phy-xgene.c (((dst) & ~0x0000000e) | (((u32) (src) << 1) & 0x0000000e)) dst 161 drivers/phy/phy-xgene.c #define CMU_REG5_PLL_LFCAP_SET(dst, src) \ dst 162 drivers/phy/phy-xgene.c (((dst) & ~0x00003000) | (((u32) (src) << 12) & 0x00003000)) dst 165 drivers/phy/phy-xgene.c #define CMU_REG6_PLL_VREGTRIM_SET(dst, src) \ dst 166 drivers/phy/phy-xgene.c (((dst) & ~0x00000600) | (((u32) (src) << 9) & 0x00000600)) dst 167 drivers/phy/phy-xgene.c #define CMU_REG6_MAN_PVT_CAL_SET(dst, src) \ dst 168 drivers/phy/phy-xgene.c (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004)) dst 182 drivers/phy/phy-xgene.c #define CMU_REG9_TX_WORD_MODE_CH1_SET(dst, src) \ dst 183 drivers/phy/phy-xgene.c (((dst) & ~0x00000380) | (((u32) (src) << 7) & 0x00000380)) dst 184 drivers/phy/phy-xgene.c #define CMU_REG9_TX_WORD_MODE_CH0_SET(dst, src) \ dst 185 drivers/phy/phy-xgene.c (((dst) & ~0x00000070) | (((u32) (src) << 4) & 0x00000070)) dst 186 drivers/phy/phy-xgene.c #define CMU_REG9_PLL_POST_DIVBY2_SET(dst, src) \ dst 187 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 188 drivers/phy/phy-xgene.c #define CMU_REG9_VBG_BYPASSB_SET(dst, src) \ dst 189 drivers/phy/phy-xgene.c (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004)) dst 190 drivers/phy/phy-xgene.c #define CMU_REG9_IGEN_BYPASS_SET(dst, src) \ dst 191 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 193 drivers/phy/phy-xgene.c #define CMU_REG10_VREG_REFSEL_SET(dst, src) \ dst 194 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001)) dst 197 drivers/phy/phy-xgene.c #define CMU_REG12_STATE_DELAY9_SET(dst, src) \ dst 198 drivers/phy/phy-xgene.c (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0)) dst 205 drivers/phy/phy-xgene.c #define CMU_REG16_VCOCAL_WAIT_BTW_CODE_SET(dst, src) \ dst 206 drivers/phy/phy-xgene.c (((dst) & ~0x0000001c) | (((u32) (src) << 2) & 0x0000001c)) dst 207 drivers/phy/phy-xgene.c #define CMU_REG16_CALIBRATION_DONE_OVERRIDE_SET(dst, src) \ dst 208 drivers/phy/phy-xgene.c (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040)) dst 209 drivers/phy/phy-xgene.c #define CMU_REG16_BYPASS_PLL_LOCK_SET(dst, src) \ dst 210 drivers/phy/phy-xgene.c (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020)) dst 212 drivers/phy/phy-xgene.c #define CMU_REG17_PVT_CODE_R2A_SET(dst, src) \ dst 213 drivers/phy/phy-xgene.c (((dst) & ~0x00007f00) | (((u32) (src) << 8) & 0x00007f00)) dst 214 drivers/phy/phy-xgene.c #define CMU_REG17_RESERVED_7_SET(dst, src) \ dst 215 drivers/phy/phy-xgene.c (((dst) & ~0x000000e0) | (((u32) (src) << 5) & 0x000000e0)) dst 226 drivers/phy/phy-xgene.c #define CMU_REG26_FORCE_PLL_LOCK_SET(dst, src) \ dst 227 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001)) dst 232 drivers/phy/phy-xgene.c #define CMU_REG30_LOCK_COUNT_SET(dst, src) \ dst 233 drivers/phy/phy-xgene.c (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006)) dst 234 drivers/phy/phy-xgene.c #define CMU_REG30_PCIE_MODE_SET(dst, src) \ dst 235 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 239 drivers/phy/phy-xgene.c #define CMU_REG32_PVT_CAL_WAIT_SEL_SET(dst, src) \ dst 240 drivers/phy/phy-xgene.c (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006)) dst 241 drivers/phy/phy-xgene.c #define CMU_REG32_IREF_ADJ_SET(dst, src) \ dst 242 drivers/phy/phy-xgene.c (((dst) & ~0x00000180) | (((u32) (src) << 7) & 0x00000180)) dst 245 drivers/phy/phy-xgene.c #define CMU_REG34_VCO_CAL_VTH_LO_MAX_SET(dst, src) \ dst 246 drivers/phy/phy-xgene.c (((dst) & ~0x0000000f) | (((u32) (src) << 0) & 0x0000000f)) dst 247 drivers/phy/phy-xgene.c #define CMU_REG34_VCO_CAL_VTH_HI_MAX_SET(dst, src) \ dst 248 drivers/phy/phy-xgene.c (((dst) & ~0x00000f00) | (((u32) (src) << 8) & 0x00000f00)) dst 249 drivers/phy/phy-xgene.c #define CMU_REG34_VCO_CAL_VTH_LO_MIN_SET(dst, src) \ dst 250 drivers/phy/phy-xgene.c (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0)) dst 251 drivers/phy/phy-xgene.c #define CMU_REG34_VCO_CAL_VTH_HI_MIN_SET(dst, src) \ dst 252 drivers/phy/phy-xgene.c (((dst) & ~0x0000f000) | (((u32) (src) << 12) & 0x0000f000)) dst 254 drivers/phy/phy-xgene.c #define CMU_REG35_PLL_SSC_MOD_SET(dst, src) \ dst 255 drivers/phy/phy-xgene.c (((dst) & ~0x0000fe00) | (((u32) (src) << 9) & 0x0000fe00)) dst 257 drivers/phy/phy-xgene.c #define CMU_REG36_PLL_SSC_EN_SET(dst, src) \ dst 258 drivers/phy/phy-xgene.c (((dst) & ~0x00000010) | (((u32) (src) << 4) & 0x00000010)) dst 259 drivers/phy/phy-xgene.c #define CMU_REG36_PLL_SSC_VSTEP_SET(dst, src) \ dst 260 drivers/phy/phy-xgene.c (((dst) & ~0x0000ffc0) | (((u32) (src) << 6) & 0x0000ffc0)) dst 261 drivers/phy/phy-xgene.c #define CMU_REG36_PLL_SSC_DSMSEL_SET(dst, src) \ dst 262 drivers/phy/phy-xgene.c (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020)) dst 269 drivers/phy/phy-xgene.c #define RXTX_REG0_CTLE_EQ_HR_SET(dst, src) \ dst 270 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 271 drivers/phy/phy-xgene.c #define RXTX_REG0_CTLE_EQ_QR_SET(dst, src) \ dst 272 drivers/phy/phy-xgene.c (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0)) dst 273 drivers/phy/phy-xgene.c #define RXTX_REG0_CTLE_EQ_FR_SET(dst, src) \ dst 274 drivers/phy/phy-xgene.c (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e)) dst 276 drivers/phy/phy-xgene.c #define RXTX_REG1_RXACVCM_SET(dst, src) \ dst 277 drivers/phy/phy-xgene.c (((dst) & ~0x0000f000) | (((u32) (src) << 12) & 0x0000f000)) dst 278 drivers/phy/phy-xgene.c #define RXTX_REG1_CTLE_EQ_SET(dst, src) \ dst 279 drivers/phy/phy-xgene.c (((dst) & ~0x00000f80) | (((u32) (src) << 7) & 0x00000f80)) dst 280 drivers/phy/phy-xgene.c #define RXTX_REG1_RXVREG1_SET(dst, src) \ dst 281 drivers/phy/phy-xgene.c (((dst) & ~0x00000060) | (((u32) (src) << 5) & 0x00000060)) dst 282 drivers/phy/phy-xgene.c #define RXTX_REG1_RXIREF_ADJ_SET(dst, src) \ dst 283 drivers/phy/phy-xgene.c (((dst) & ~0x00000006) | (((u32) (src) << 1) & 0x00000006)) dst 285 drivers/phy/phy-xgene.c #define RXTX_REG2_VTT_ENA_SET(dst, src) \ dst 286 drivers/phy/phy-xgene.c (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100)) dst 287 drivers/phy/phy-xgene.c #define RXTX_REG2_TX_FIFO_ENA_SET(dst, src) \ dst 288 drivers/phy/phy-xgene.c (((dst) & ~0x00000020) | (((u32) (src) << 5) & 0x00000020)) dst 289 drivers/phy/phy-xgene.c #define RXTX_REG2_VTT_SEL_SET(dst, src) \ dst 290 drivers/phy/phy-xgene.c (((dst) & ~0x000000c0) | (((u32) (src) << 6) & 0x000000c0)) dst 293 drivers/phy/phy-xgene.c #define RXTX_REG4_TX_DATA_RATE_SET(dst, src) \ dst 294 drivers/phy/phy-xgene.c (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000)) dst 295 drivers/phy/phy-xgene.c #define RXTX_REG4_TX_WORD_MODE_SET(dst, src) \ dst 296 drivers/phy/phy-xgene.c (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800)) dst 298 drivers/phy/phy-xgene.c #define RXTX_REG5_TX_CN1_SET(dst, src) \ dst 299 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 300 drivers/phy/phy-xgene.c #define RXTX_REG5_TX_CP1_SET(dst, src) \ dst 301 drivers/phy/phy-xgene.c (((dst) & ~0x000007e0) | (((u32) (src) << 5) & 0x000007e0)) dst 302 drivers/phy/phy-xgene.c #define RXTX_REG5_TX_CN2_SET(dst, src) \ dst 303 drivers/phy/phy-xgene.c (((dst) & ~0x0000001f) | (((u32) (src) << 0) & 0x0000001f)) dst 305 drivers/phy/phy-xgene.c #define RXTX_REG6_TXAMP_CNTL_SET(dst, src) \ dst 306 drivers/phy/phy-xgene.c (((dst) & ~0x00000780) | (((u32) (src) << 7) & 0x00000780)) dst 307 drivers/phy/phy-xgene.c #define RXTX_REG6_TXAMP_ENA_SET(dst, src) \ dst 308 drivers/phy/phy-xgene.c (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040)) dst 309 drivers/phy/phy-xgene.c #define RXTX_REG6_RX_BIST_ERRCNT_RD_SET(dst, src) \ dst 310 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001)) dst 311 drivers/phy/phy-xgene.c #define RXTX_REG6_TX_IDLE_SET(dst, src) \ dst 312 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 313 drivers/phy/phy-xgene.c #define RXTX_REG6_RX_BIST_RESYNC_SET(dst, src) \ dst 314 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 318 drivers/phy/phy-xgene.c #define RXTX_REG7_BIST_ENA_RX_SET(dst, src) \ dst 319 drivers/phy/phy-xgene.c (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040)) dst 320 drivers/phy/phy-xgene.c #define RXTX_REG7_RX_WORD_MODE_SET(dst, src) \ dst 321 drivers/phy/phy-xgene.c (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800)) dst 323 drivers/phy/phy-xgene.c #define RXTX_REG8_CDR_LOOP_ENA_SET(dst, src) \ dst 324 drivers/phy/phy-xgene.c (((dst) & ~0x00004000) | (((u32) (src) << 14) & 0x00004000)) dst 325 drivers/phy/phy-xgene.c #define RXTX_REG8_CDR_BYPASS_RXLOS_SET(dst, src) \ dst 326 drivers/phy/phy-xgene.c (((dst) & ~0x00000800) | (((u32) (src) << 11) & 0x00000800)) dst 327 drivers/phy/phy-xgene.c #define RXTX_REG8_SSC_ENABLE_SET(dst, src) \ dst 328 drivers/phy/phy-xgene.c (((dst) & ~0x00000200) | (((u32) (src) << 9) & 0x00000200)) dst 329 drivers/phy/phy-xgene.c #define RXTX_REG8_SD_VREF_SET(dst, src) \ dst 330 drivers/phy/phy-xgene.c (((dst) & ~0x000000f0) | (((u32) (src) << 4) & 0x000000f0)) dst 331 drivers/phy/phy-xgene.c #define RXTX_REG8_SD_DISABLE_SET(dst, src) \ dst 332 drivers/phy/phy-xgene.c (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100)) dst 334 drivers/phy/phy-xgene.c #define RXTX_REG7_RESETB_RXD_SET(dst, src) \ dst 335 drivers/phy/phy-xgene.c (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100)) dst 336 drivers/phy/phy-xgene.c #define RXTX_REG7_RESETB_RXA_SET(dst, src) \ dst 337 drivers/phy/phy-xgene.c (((dst) & ~0x00000080) | (((u32) (src) << 7) & 0x00000080)) dst 339 drivers/phy/phy-xgene.c #define RXTX_REG7_LOOP_BACK_ENA_CTLE_SET(dst, src) \ dst 340 drivers/phy/phy-xgene.c (((dst) & ~0x00004000) | (((u32) (src) << 14) & 0x00004000)) dst 342 drivers/phy/phy-xgene.c #define RXTX_REG11_PHASE_ADJUST_LIMIT_SET(dst, src) \ dst 343 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 345 drivers/phy/phy-xgene.c #define RXTX_REG12_LATCH_OFF_ENA_SET(dst, src) \ dst 346 drivers/phy/phy-xgene.c (((dst) & ~0x00002000) | (((u32) (src) << 13) & 0x00002000)) dst 347 drivers/phy/phy-xgene.c #define RXTX_REG12_SUMOS_ENABLE_SET(dst, src) \ dst 348 drivers/phy/phy-xgene.c (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004)) dst 350 drivers/phy/phy-xgene.c #define RXTX_REG12_RX_DET_TERM_ENABLE_SET(dst, src) \ dst 351 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 354 drivers/phy/phy-xgene.c #define RXTX_REG14_CLTE_LATCAL_MAN_PROG_SET(dst, src) \ dst 355 drivers/phy/phy-xgene.c (((dst) & ~0x0000003f) | (((u32) (src) << 0) & 0x0000003f)) dst 356 drivers/phy/phy-xgene.c #define RXTX_REG14_CTLE_LATCAL_MAN_ENA_SET(dst, src) \ dst 357 drivers/phy/phy-xgene.c (((dst) & ~0x00000040) | (((u32) (src) << 6) & 0x00000040)) dst 359 drivers/phy/phy-xgene.c #define RXTX_REG26_PERIOD_ERROR_LATCH_SET(dst, src) \ dst 360 drivers/phy/phy-xgene.c (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800)) dst 361 drivers/phy/phy-xgene.c #define RXTX_REG26_BLWC_ENA_SET(dst, src) \ dst 362 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 381 drivers/phy/phy-xgene.c #define RXTX_REG38_CUSTOMER_PINMODE_INV_SET(dst, src) \ dst 382 drivers/phy/phy-xgene.c (((dst) & 0x0000fffe) | (((u32) (src) << 1) & 0x0000fffe)) dst 401 drivers/phy/phy-xgene.c #define RXTX_REG61_ISCAN_INBERT_SET(dst, src) \ dst 402 drivers/phy/phy-xgene.c (((dst) & ~0x00000010) | (((u32) (src) << 4) & 0x00000010)) dst 403 drivers/phy/phy-xgene.c #define RXTX_REG61_LOADFREQ_SHIFT_SET(dst, src) \ dst 404 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 405 drivers/phy/phy-xgene.c #define RXTX_REG61_EYE_COUNT_WIDTH_SEL_SET(dst, src) \ dst 406 drivers/phy/phy-xgene.c (((dst) & ~0x000000c0) | (((u32) (src) << 6) & 0x000000c0)) dst 407 drivers/phy/phy-xgene.c #define RXTX_REG61_SPD_SEL_CDR_SET(dst, src) \ dst 408 drivers/phy/phy-xgene.c (((dst) & ~0x00003c00) | (((u32) (src) << 10) & 0x00003c00)) dst 410 drivers/phy/phy-xgene.c #define RXTX_REG62_PERIOD_H1_QLATCH_SET(dst, src) \ dst 411 drivers/phy/phy-xgene.c (((dst) & ~0x00003800) | (((u32) (src) << 11) & 0x00003800)) dst 413 drivers/phy/phy-xgene.c #define RXTX_REG89_MU_TH7_SET(dst, src) \ dst 414 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 415 drivers/phy/phy-xgene.c #define RXTX_REG89_MU_TH8_SET(dst, src) \ dst 416 drivers/phy/phy-xgene.c (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0)) dst 417 drivers/phy/phy-xgene.c #define RXTX_REG89_MU_TH9_SET(dst, src) \ dst 418 drivers/phy/phy-xgene.c (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e)) dst 420 drivers/phy/phy-xgene.c #define RXTX_REG96_MU_FREQ1_SET(dst, src) \ dst 421 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 422 drivers/phy/phy-xgene.c #define RXTX_REG96_MU_FREQ2_SET(dst, src) \ dst 423 drivers/phy/phy-xgene.c (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0)) dst 424 drivers/phy/phy-xgene.c #define RXTX_REG96_MU_FREQ3_SET(dst, src) \ dst 425 drivers/phy/phy-xgene.c (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e)) dst 427 drivers/phy/phy-xgene.c #define RXTX_REG99_MU_PHASE1_SET(dst, src) \ dst 428 drivers/phy/phy-xgene.c (((dst) & ~0x0000f800) | (((u32) (src) << 11) & 0x0000f800)) dst 429 drivers/phy/phy-xgene.c #define RXTX_REG99_MU_PHASE2_SET(dst, src) \ dst 430 drivers/phy/phy-xgene.c (((dst) & ~0x000007c0) | (((u32) (src) << 6) & 0x000007c0)) dst 431 drivers/phy/phy-xgene.c #define RXTX_REG99_MU_PHASE3_SET(dst, src) \ dst 432 drivers/phy/phy-xgene.c (((dst) & ~0x0000003e) | (((u32) (src) << 1) & 0x0000003e)) dst 434 drivers/phy/phy-xgene.c #define RXTX_REG102_FREQLOOP_LIMIT_SET(dst, src) \ dst 435 drivers/phy/phy-xgene.c (((dst) & ~0x00000060) | (((u32) (src) << 5) & 0x00000060)) dst 440 drivers/phy/phy-xgene.c #define RXTX_REG125_PQ_REG_SET(dst, src) \ dst 441 drivers/phy/phy-xgene.c (((dst) & ~0x0000fe00) | (((u32) (src) << 9) & 0x0000fe00)) dst 442 drivers/phy/phy-xgene.c #define RXTX_REG125_SIGN_PQ_SET(dst, src) \ dst 443 drivers/phy/phy-xgene.c (((dst) & ~0x00000100) | (((u32) (src) << 8) & 0x00000100)) dst 444 drivers/phy/phy-xgene.c #define RXTX_REG125_SIGN_PQ_2C_SET(dst, src) \ dst 445 drivers/phy/phy-xgene.c (((dst) & ~0x00000080) | (((u32) (src) << 7) & 0x00000080)) dst 446 drivers/phy/phy-xgene.c #define RXTX_REG125_PHZ_MANUALCODE_SET(dst, src) \ dst 447 drivers/phy/phy-xgene.c (((dst) & ~0x0000007c) | (((u32) (src) << 2) & 0x0000007c)) dst 448 drivers/phy/phy-xgene.c #define RXTX_REG125_PHZ_MANUAL_SET(dst, src) \ dst 449 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 453 drivers/phy/phy-xgene.c #define RXTX_REG127_FORCE_SUM_CAL_START_SET(dst, src) \ dst 454 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 455 drivers/phy/phy-xgene.c #define RXTX_REG127_FORCE_LAT_CAL_START_SET(dst, src) \ dst 456 drivers/phy/phy-xgene.c (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004)) dst 457 drivers/phy/phy-xgene.c #define RXTX_REG127_LATCH_MAN_CAL_ENA_SET(dst, src) \ dst 458 drivers/phy/phy-xgene.c (((dst) & ~0x00000008) | (((u32) (src) << 3) & 0x00000008)) dst 459 drivers/phy/phy-xgene.c #define RXTX_REG127_DO_LATCH_MANCAL_SET(dst, src) \ dst 460 drivers/phy/phy-xgene.c (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00)) dst 461 drivers/phy/phy-xgene.c #define RXTX_REG127_XO_LATCH_MANCAL_SET(dst, src) \ dst 462 drivers/phy/phy-xgene.c (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0)) dst 464 drivers/phy/phy-xgene.c #define RXTX_REG128_LATCH_CAL_WAIT_SEL_SET(dst, src) \ dst 465 drivers/phy/phy-xgene.c (((dst) & ~0x0000000c) | (((u32) (src) << 2) & 0x0000000c)) dst 466 drivers/phy/phy-xgene.c #define RXTX_REG128_EO_LATCH_MANCAL_SET(dst, src) \ dst 467 drivers/phy/phy-xgene.c (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00)) dst 468 drivers/phy/phy-xgene.c #define RXTX_REG128_SO_LATCH_MANCAL_SET(dst, src) \ dst 469 drivers/phy/phy-xgene.c (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0)) dst 471 drivers/phy/phy-xgene.c #define RXTX_REG129_DE_LATCH_MANCAL_SET(dst, src) \ dst 472 drivers/phy/phy-xgene.c (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00)) dst 473 drivers/phy/phy-xgene.c #define RXTX_REG129_XE_LATCH_MANCAL_SET(dst, src) \ dst 474 drivers/phy/phy-xgene.c (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0)) dst 476 drivers/phy/phy-xgene.c #define RXTX_REG130_EE_LATCH_MANCAL_SET(dst, src) \ dst 477 drivers/phy/phy-xgene.c (((dst) & ~0x0000fc00) | (((u32) (src) << 10) & 0x0000fc00)) dst 478 drivers/phy/phy-xgene.c #define RXTX_REG130_SE_LATCH_MANCAL_SET(dst, src) \ dst 479 drivers/phy/phy-xgene.c (((dst) & ~0x000003f0) | (((u32) (src) << 4) & 0x000003f0)) dst 481 drivers/phy/phy-xgene.c #define RXTX_REG145_TX_IDLE_SATA_SET(dst, src) \ dst 482 drivers/phy/phy-xgene.c (((dst) & ~0x00000001) | (((u32) (src) << 0) & 0x00000001)) dst 483 drivers/phy/phy-xgene.c #define RXTX_REG145_RXES_ENA_SET(dst, src) \ dst 484 drivers/phy/phy-xgene.c (((dst) & ~0x00000002) | (((u32) (src) << 1) & 0x00000002)) dst 485 drivers/phy/phy-xgene.c #define RXTX_REG145_RXDFE_CONFIG_SET(dst, src) \ dst 486 drivers/phy/phy-xgene.c (((dst) & ~0x0000c000) | (((u32) (src) << 14) & 0x0000c000)) dst 487 drivers/phy/phy-xgene.c #define RXTX_REG145_RXVWES_LATENA_SET(dst, src) \ dst 488 drivers/phy/phy-xgene.c (((dst) & ~0x00000004) | (((u32) (src) << 2) & 0x00000004)) dst 215 drivers/platform/chrome/cros_ec_rpmsg.c chinfo.dst = RPMSG_ADDR_ANY; dst 44 drivers/ptp/ptp_clock.c struct ptp_extts_event *dst; dst 53 drivers/ptp/ptp_clock.c dst = &queue->buf[queue->tail]; dst 54 drivers/ptp/ptp_clock.c dst->index = src->index; dst 55 drivers/ptp/ptp_clock.c dst->t.sec = seconds; dst 56 drivers/ptp/ptp_clock.c dst->t.nsec = remainder; dst 1399 drivers/rapidio/devices/rio_mport_cdev.c u16 src, u16 dst, u16 info) dst 403 drivers/rapidio/rio.c void (*dinb) (struct rio_mport * mport, void *dev_id, u16 src, u16 dst, dst 438 drivers/rapidio/rio.c u16 dst, u16 info)) dst 1428 drivers/rpmsg/qcom_glink_native.c rpdev->dst = RPMSG_ADDR_ANY; dst 1476 drivers/rpmsg/qcom_glink_native.c chinfo.dst = RPMSG_ADDR_ANY; dst 466 drivers/rpmsg/qcom_smd.c static void smd_copy_to_fifo(void __iomem *dst, dst 472 drivers/rpmsg/qcom_smd.c __iowrite32_copy(dst, src, count / sizeof(u32)); dst 474 drivers/rpmsg/qcom_smd.c memcpy_toio(dst, src, count); dst 481 drivers/rpmsg/qcom_smd.c static void smd_copy_from_fifo(void *dst, dst 487 drivers/rpmsg/qcom_smd.c __ioread32_copy(dst, src, count / sizeof(u32)); dst 489 drivers/rpmsg/qcom_smd.c memcpy_fromio(dst, src, count); dst 1078 drivers/rpmsg/qcom_smd.c rpdev->dst = RPMSG_ADDR_ANY; dst 1309 drivers/rpmsg/qcom_smd.c chinfo.dst = RPMSG_ADDR_ANY; dst 321 drivers/rpmsg/rpmsg_char.c return sprintf(buf, "%d\n", eptdev->chinfo.dst); dst 323 drivers/rpmsg/rpmsg_char.c static DEVICE_ATTR_RO(dst); dst 446 drivers/rpmsg/rpmsg_char.c chinfo.dst = eptinfo.dst; dst 136 drivers/rpmsg/rpmsg_core.c int rpmsg_sendto(struct rpmsg_endpoint *ept, void *data, int len, u32 dst) dst 143 drivers/rpmsg/rpmsg_core.c return ept->ops->sendto(ept, data, len, dst); dst 167 drivers/rpmsg/rpmsg_core.c int rpmsg_send_offchannel(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 175 drivers/rpmsg/rpmsg_core.c return ept->ops->send_offchannel(ept, src, dst, data, len); dst 224 drivers/rpmsg/rpmsg_core.c int rpmsg_trysendto(struct rpmsg_endpoint *ept, void *data, int len, u32 dst) dst 231 drivers/rpmsg/rpmsg_core.c return ept->ops->trysendto(ept, data, len, dst); dst 274 drivers/rpmsg/rpmsg_core.c int rpmsg_trysend_offchannel(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 282 drivers/rpmsg/rpmsg_core.c return ept->ops->trysend_offchannel(ept, src, dst, data, len); dst 299 drivers/rpmsg/rpmsg_core.c if (chinfo->dst != RPMSG_ADDR_ANY && chinfo->dst != rpdev->dst) dst 369 drivers/rpmsg/rpmsg_core.c rpmsg_show_attr(dst, dst, "0x%x\n"); dst 460 drivers/rpmsg/rpmsg_core.c chinfo.dst = RPMSG_ADDR_ANY; dst 522 drivers/rpmsg/rpmsg_core.c rpdev->id.name, rpdev->src, rpdev->dst); dst 59 drivers/rpmsg/rpmsg_internal.h int (*sendto)(struct rpmsg_endpoint *ept, void *data, int len, u32 dst); dst 60 drivers/rpmsg/rpmsg_internal.h int (*send_offchannel)(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 64 drivers/rpmsg/rpmsg_internal.h int (*trysendto)(struct rpmsg_endpoint *ept, void *data, int len, u32 dst); dst 65 drivers/rpmsg/rpmsg_internal.h int (*trysend_offchannel)(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 88 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst; dst 170 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst); dst 172 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst, void *data, int len); dst 175 drivers/rpmsg/virtio_rpmsg_bus.c int len, u32 dst); dst 177 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst, void *data, int len); dst 406 drivers/rpmsg/virtio_rpmsg_bus.c chinfo->name, chinfo->src, chinfo->dst); dst 420 drivers/rpmsg/virtio_rpmsg_bus.c rpdev->dst = chinfo->dst; dst 555 drivers/rpmsg/virtio_rpmsg_bus.c u32 src, u32 dst, dst 566 drivers/rpmsg/virtio_rpmsg_bus.c if (src == RPMSG_ADDR_ANY || dst == RPMSG_ADDR_ANY) { dst 567 drivers/rpmsg/virtio_rpmsg_bus.c dev_err(dev, "invalid addr (src 0x%x, dst 0x%x)\n", src, dst); dst 618 drivers/rpmsg/virtio_rpmsg_bus.c msg->dst = dst; dst 623 drivers/rpmsg/virtio_rpmsg_bus.c msg->src, msg->dst, msg->len, msg->flags, msg->reserved); dst 655 drivers/rpmsg/virtio_rpmsg_bus.c u32 src = ept->addr, dst = rpdev->dst; dst 657 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, true); dst 661 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst) dst 666 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, true); dst 670 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst, void *data, int len) dst 674 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, true); dst 680 drivers/rpmsg/virtio_rpmsg_bus.c u32 src = ept->addr, dst = rpdev->dst; dst 682 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, false); dst 686 drivers/rpmsg/virtio_rpmsg_bus.c int len, u32 dst) dst 691 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, false); dst 695 drivers/rpmsg/virtio_rpmsg_bus.c u32 dst, void *data, int len) dst 699 drivers/rpmsg/virtio_rpmsg_bus.c return rpmsg_send_offchannel_raw(rpdev, src, dst, data, len, false); dst 710 drivers/rpmsg/virtio_rpmsg_bus.c msg->src, msg->dst, msg->len, msg->flags, msg->reserved); dst 729 drivers/rpmsg/virtio_rpmsg_bus.c ept = idr_find(&vrp->endpoints, msg->dst); dst 855 drivers/rpmsg/virtio_rpmsg_bus.c chinfo.dst = msg->addr; dst 508 drivers/s390/block/dasd_diag.c char *dst; dst 549 drivers/s390/block/dasd_diag.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 554 drivers/s390/block/dasd_diag.c dbio->buffer = dst; dst 556 drivers/s390/block/dasd_diag.c dst += blksize; dst 3170 drivers/s390/block/dasd_eckd.c char *dst; dst 3210 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 3216 drivers/s390/block/dasd_eckd.c if (dst && !skip_block) { dst 3217 drivers/s390/block/dasd_eckd.c dst += off; dst 3218 drivers/s390/block/dasd_eckd.c memset(dst, 0, blksize); dst 3846 drivers/s390/block/dasd_eckd.c char *dst; dst 3942 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 3947 drivers/s390/block/dasd_eckd.c memcpy(copy + bv.bv_offset, dst, bv.bv_len); dst 3949 drivers/s390/block/dasd_eckd.c dst = copy + bv.bv_offset; dst 3963 drivers/s390/block/dasd_eckd.c memset(dst + count, 0xe5, dst 3983 drivers/s390/block/dasd_eckd.c if (idal_is_needed(dst, blksize)) { dst 3986 drivers/s390/block/dasd_eckd.c idaws = idal_create_words(idaws, dst, blksize); dst 3988 drivers/s390/block/dasd_eckd.c ccw->cda = (__u32)(addr_t) dst; dst 3992 drivers/s390/block/dasd_eckd.c dst += blksize; dst 4036 drivers/s390/block/dasd_eckd.c char *dst, *idaw_dst; dst 4109 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 4129 drivers/s390/block/dasd_eckd.c idaw_dst = dst; dst 4138 drivers/s390/block/dasd_eckd.c if (__pa(dst) & (IDA_BLOCK_SIZE-1)) { dst 4142 drivers/s390/block/dasd_eckd.c idaw_dst = dst; dst 4144 drivers/s390/block/dasd_eckd.c if ((idaw_dst + idaw_len) != dst) { dst 4150 drivers/s390/block/dasd_eckd.c dst += part_len; dst 4366 drivers/s390/block/dasd_eckd.c char *dst; dst 4452 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 4475 drivers/s390/block/dasd_eckd.c dst, part_len); dst 4480 drivers/s390/block/dasd_eckd.c dst += part_len; dst 4485 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 4487 drivers/s390/block/dasd_eckd.c dst, bv.bv_len); dst 4625 drivers/s390/block/dasd_eckd.c char *dst; dst 4721 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 4724 drivers/s390/block/dasd_eckd.c memset(dst, 0, seg_len); dst 4738 drivers/s390/block/dasd_eckd.c idaws = idal_create_words(idaws, dst, seg_len); dst 4765 drivers/s390/block/dasd_eckd.c char *dst, *cda; dst 4782 drivers/s390/block/dasd_eckd.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 4787 drivers/s390/block/dasd_eckd.c if (dst) { dst 4792 drivers/s390/block/dasd_eckd.c if (dst != cda) { dst 4794 drivers/s390/block/dasd_eckd.c memcpy(dst, cda, bv.bv_len); dst 4798 drivers/s390/block/dasd_eckd.c dst = NULL; dst 449 drivers/s390/block/dasd_fba.c char *dst; dst 512 drivers/s390/block/dasd_fba.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 517 drivers/s390/block/dasd_fba.c memcpy(copy + bv.bv_offset, dst, bv.bv_len); dst 519 drivers/s390/block/dasd_fba.c dst = copy + bv.bv_offset; dst 538 drivers/s390/block/dasd_fba.c if (idal_is_needed(dst, blksize)) { dst 541 drivers/s390/block/dasd_fba.c idaws = idal_create_words(idaws, dst, blksize); dst 543 drivers/s390/block/dasd_fba.c ccw->cda = (__u32)(addr_t) dst; dst 547 drivers/s390/block/dasd_fba.c dst += blksize; dst 581 drivers/s390/block/dasd_fba.c char *dst, *cda; dst 594 drivers/s390/block/dasd_fba.c dst = page_address(bv.bv_page) + bv.bv_offset; dst 599 drivers/s390/block/dasd_fba.c if (dst) { dst 604 drivers/s390/block/dasd_fba.c if (dst != cda) { dst 606 drivers/s390/block/dasd_fba.c memcpy(dst, cda, bv.bv_len); dst 610 drivers/s390/block/dasd_fba.c dst = NULL; dst 88 drivers/s390/char/sclp_ocf.c void sclp_ocf_cpc_name_copy(char *dst) dst 91 drivers/s390/char/sclp_ocf.c memcpy(dst, cpc_name, OCF_LENGTH_CPC_NAME); dst 937 drivers/s390/net/qeth_core.h struct dst_entry *dst = skb_dst(skb); dst 940 drivers/s390/net/qeth_core.h rt = (struct rt6_info *) dst; dst 941 drivers/s390/net/qeth_core.h if (dst) dst 942 drivers/s390/net/qeth_core.h dst = dst_check(dst, (ipv == 6) ? rt6_get_cookie(rt) : 0); dst 943 drivers/s390/net/qeth_core.h return dst; dst 57 drivers/s390/net/qeth_ethtool.c static void qeth_add_stat_data(u64 **dst, void *src, dst 66 drivers/s390/net/qeth_ethtool.c **dst = *(u64 *)stat; dst 67 drivers/s390/net/qeth_ethtool.c (*dst)++; dst 1871 drivers/s390/net/qeth_l3_main.c static int qeth_l3_get_cast_type_rcu(struct sk_buff *skb, struct dst_entry *dst, dst 1876 drivers/s390/net/qeth_l3_main.c if (dst) dst 1877 drivers/s390/net/qeth_l3_main.c n = dst_neigh_lookup_skb(dst, skb); dst 1909 drivers/s390/net/qeth_l3_main.c struct dst_entry *dst; dst 1913 drivers/s390/net/qeth_l3_main.c dst = qeth_dst_check_rcu(skb, ipv); dst 1914 drivers/s390/net/qeth_l3_main.c cast_type = qeth_l3_get_cast_type_rcu(skb, dst, ipv); dst 1938 drivers/s390/net/qeth_l3_main.c struct dst_entry *dst; dst 1976 drivers/s390/net/qeth_l3_main.c dst = qeth_dst_check_rcu(skb, ipv); dst 1981 drivers/s390/net/qeth_l3_main.c cast_type = qeth_l3_get_cast_type_rcu(skb, dst, ipv); dst 1985 drivers/s390/net/qeth_l3_main.c struct rtable *rt = (struct rtable *) dst; dst 1991 drivers/s390/net/qeth_l3_main.c struct rt6_info *rt = (struct rt6_info *) dst; dst 129 drivers/scsi/aic7xxx/aic79xx.h #define AHD_COPY_SCB_COL_IDX(dst, src) \ dst 131 drivers/scsi/aic7xxx/aic79xx.h dst->hscb->scsiid = src->hscb->scsiid; \ dst 132 drivers/scsi/aic7xxx/aic79xx.h dst->hscb->lun = src->hscb->lun; \ dst 275 drivers/scsi/aic7xxx/aic79xx_core.c ahd_set_modes(struct ahd_softc *ahd, ahd_mode src, ahd_mode dst) dst 277 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd->src_mode == src && ahd->dst_mode == dst) dst 285 drivers/scsi/aic7xxx/aic79xx_core.c ahd_build_mode_state(ahd, src, dst)); dst 287 drivers/scsi/aic7xxx/aic79xx_core.c ahd_outb(ahd, MODE_PTR, ahd_build_mode_state(ahd, src, dst)); dst 289 drivers/scsi/aic7xxx/aic79xx_core.c ahd->dst_mode = dst; dst 297 drivers/scsi/aic7xxx/aic79xx_core.c ahd_mode dst; dst 304 drivers/scsi/aic7xxx/aic79xx_core.c ahd_extract_mode_state(ahd, mode_ptr, &src, &dst); dst 305 drivers/scsi/aic7xxx/aic79xx_core.c ahd_known_modes(ahd, src, dst); dst 338 drivers/scsi/aic7xxx/aic79xx_core.c ahd_mode dst; dst 340 drivers/scsi/aic7xxx/aic79xx_core.c ahd_extract_mode_state(ahd, state, &src, &dst); dst 341 drivers/scsi/aic7xxx/aic79xx_core.c ahd_set_modes(ahd, src, dst); dst 58 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_mode src, ahd_mode dst); dst 61 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_mode dst); dst 64 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_mode *src, ahd_mode *dst); dst 67 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_mode dst); dst 76 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_known_modes(struct ahd_softc *ahd, ahd_mode src, ahd_mode dst) dst 79 drivers/scsi/aic7xxx/aic79xx_inline.h ahd->dst_mode = dst; dst 81 drivers/scsi/aic7xxx/aic79xx_inline.h ahd->saved_dst_mode = dst; dst 85 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_build_mode_state(struct ahd_softc *ahd, ahd_mode src, ahd_mode dst) dst 87 drivers/scsi/aic7xxx/aic79xx_inline.h return ((src << SRC_MODE_SHIFT) | (dst << DST_MODE_SHIFT)); dst 92 drivers/scsi/aic7xxx/aic79xx_inline.h ahd_mode *src, ahd_mode *dst) dst 95 drivers/scsi/aic7xxx/aic79xx_inline.h *dst = (state & DST_MODE) >> DST_MODE_SHIFT; dst 149 drivers/scsi/aic7xxx/aic79xx_inline.h #define AHD_COPY_COL_IDX(dst, src) \ dst 151 drivers/scsi/aic7xxx/aic79xx_inline.h dst->hscb->scsiid = src->hscb->scsiid; \ dst 152 drivers/scsi/aic7xxx/aic79xx_inline.h dst->hscb->lun = src->hscb->lun; \ dst 284 drivers/scsi/aic94xx/aic94xx_reg.c void asd_read_reg_string(struct asd_ha_struct *asd_ha, void *dst, dst 287 drivers/scsi/aic94xx/aic94xx_reg.c u8 *p = dst; dst 54 drivers/scsi/aic94xx/aic94xx_reg.h void asd_read_reg_string(struct asd_ha_struct *asd_ha, void *dst, dst 333 drivers/scsi/atari_scsi.c char *src, *dst; dst 346 drivers/scsi/atari_scsi.c dst = phys_to_virt(phys_dst); dst 347 drivers/scsi/atari_scsi.c dprintk(NDEBUG_DMA, " = virt addr %p\n", dst); dst 349 drivers/scsi/atari_scsi.c *dst++ = *src++; dst 134 drivers/scsi/bnx2i/bnx2i.h #define GET_STATS_64(__hba, dst, field) \ dst 137 drivers/scsi/bnx2i/bnx2i.h dst->field##_lo = __hba->stats.field##_lo; \ dst 138 drivers/scsi/bnx2i/bnx2i.h dst->field##_hi = __hba->stats.field##_hi; \ dst 154 drivers/scsi/bnx2i/bnx2i.h #define GET_STATS_64(__hba, dst, field) \ dst 161 drivers/scsi/bnx2i/bnx2i.h out = (u64 *)&dst->field##_lo; \ dst 1060 drivers/scsi/csiostor/csio_mb.c uint8_t *dst; dst 1067 drivers/scsi/csiostor/csio_mb.c dst = (uint8_t *)(&stats) + ((portparams->idx - 1) * 8); dst 1069 drivers/scsi/csiostor/csio_mb.c memcpy(dst, src, (portparams->nstats * 8)); dst 928 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c csk->dst = NULL; dst 956 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c struct dst_entry *dst = csk->dst; dst 971 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c csk->l2t = t3_l2t_get(t3dev, dst, ndev, dst 1000 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c csk->mss_idx = cxgbi_sock_select_mss(csk, dst_mtu(dst)); dst 1638 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c csk->dst = NULL; dst 1716 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c n = dst_neigh_lookup(csk->dst, daddr); dst 1778 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c csk->mtu = dst_mtu(csk->dst); dst 605 drivers/scsi/cxgbi/libcxgbi.c struct dst_entry *dst; dst 625 drivers/scsi/cxgbi/libcxgbi.c dst = &rt->dst; dst 626 drivers/scsi/cxgbi/libcxgbi.c n = dst_neigh_lookup(dst, &daddr->sin_addr.s_addr); dst 680 drivers/scsi/cxgbi/libcxgbi.c csk->dst = dst; dst 721 drivers/scsi/cxgbi/libcxgbi.c struct dst_entry *dst; dst 742 drivers/scsi/cxgbi/libcxgbi.c dst = &rt->dst; dst 744 drivers/scsi/cxgbi/libcxgbi.c n = dst_neigh_lookup(dst, &daddr6->sin6_addr); dst 791 drivers/scsi/cxgbi/libcxgbi.c csk->dst = dst; dst 832 drivers/scsi/cxgbi/libcxgbi.c dst_confirm(csk->dst); dst 862 drivers/scsi/cxgbi/libcxgbi.c if (csk->dst) dst 863 drivers/scsi/cxgbi/libcxgbi.c dst_release(csk->dst); dst 879 drivers/scsi/cxgbi/libcxgbi.c if (csk->dst) dst 880 drivers/scsi/cxgbi/libcxgbi.c dst_confirm(csk->dst); dst 1071 drivers/scsi/cxgbi/libcxgbi.c dst_confirm(csk->dst); dst 1099 drivers/scsi/cxgbi/libcxgbi.c struct dst_entry *dst = csk->dst; dst 1101 drivers/scsi/cxgbi/libcxgbi.c csk->advmss = dst_metric_advmss(dst); dst 2005 drivers/scsi/cxgbi/libcxgbi.c char *dst = skb->data + task->hdr_len; dst 2012 drivers/scsi/cxgbi/libcxgbi.c memcpy(dst, src+frag->offset, frag->size); dst 2013 drivers/scsi/cxgbi/libcxgbi.c dst += frag->size; dst 2017 drivers/scsi/cxgbi/libcxgbi.c memset(dst, 0, padlen); dst 148 drivers/scsi/cxgbi/libcxgbi.h struct dst_entry *dst; dst 2839 drivers/scsi/esp_scsi.c u8 *dst = (u8 *)addr; dst 2848 drivers/scsi/esp_scsi.c *dst++ = readb(esp->fifo_reg); dst 520 drivers/scsi/g_NCR5380.c unsigned char *dst, int len) dst 546 drivers/scsi/g_NCR5380.c dst + start, 64); dst 549 drivers/scsi/g_NCR5380.c dst + start, 128); dst 551 drivers/scsi/g_NCR5380.c memcpy_fromio(dst + start, dst 2422 drivers/scsi/libfc/fc_exch.c int fc_exch_mgr_list_clone(struct fc_lport *src, struct fc_lport *dst) dst 2427 drivers/scsi/libfc/fc_exch.c if (!fc_exch_mgr_add(dst, ema->mp, ema->match)) dst 2432 drivers/scsi/libfc/fc_exch.c list_for_each_entry_safe(ema, tmp, &dst->ema_list, ema_list) dst 171 drivers/scsi/libsas/sas_host_smp.c u8 *dst = resp_data + 24 + i, *src = dst 173 drivers/scsi/libsas/sas_host_smp.c dst[0] = src[3]; dst 174 drivers/scsi/libsas/sas_host_smp.c dst[1] = src[2]; dst 175 drivers/scsi/libsas/sas_host_smp.c dst[2] = src[1]; dst 176 drivers/scsi/libsas/sas_host_smp.c dst[3] = src[0]; dst 278 drivers/scsi/mac_scsi.c unsigned char *dst, int len) dst 281 drivers/scsi/mac_scsi.c unsigned char *d = dst; dst 320 drivers/scsi/mac_scsi.c "%s: bus error (%d/%d)\n", __func__, d - dst, len); dst 953 drivers/scsi/mpt3sas/mpt3sas_ctl.c Mpi2SGESimple64_t tmp, *src = NULL, *dst = NULL; dst 958 drivers/scsi/mpt3sas/mpt3sas_ctl.c dst = dst 960 drivers/scsi/mpt3sas/mpt3sas_ctl.c src = (void *)dst + ioc->sge_size; dst 963 drivers/scsi/mpt3sas/mpt3sas_ctl.c memcpy(src, dst, ioc->sge_size); dst 964 drivers/scsi/mpt3sas/mpt3sas_ctl.c memcpy(dst, &tmp, ioc->sge_size); dst 1929 drivers/scsi/ncr53c8xx.c (struct ncb *np, ncrcmd *src, ncrcmd *dst, int len); dst 3512 drivers/scsi/ncr53c8xx.c ncr_script_copy_and_bind (struct ncb *np, ncrcmd *src, ncrcmd *dst, int len) dst 3525 drivers/scsi/ncr53c8xx.c *dst++ = cpu_to_scr(opcode); dst 3574 drivers/scsi/ncr53c8xx.c dst[-1] = cpu_to_scr(opcode & ~SCR_NO_FLUSH); dst 3649 drivers/scsi/ncr53c8xx.c *dst++ = cpu_to_scr(new); dst 3652 drivers/scsi/ncr53c8xx.c *dst++ = cpu_to_scr(*src++); dst 493 drivers/scsi/qedi/qedi_iscsi.c ether_addr_copy(conn_info->dst.mac, qedi_ep->dst_mac); dst 496 drivers/scsi/qedi/qedi_iscsi.c conn_info->dst.ip[0] = ntohl(qedi_ep->dst_addr[0]); dst 506 drivers/scsi/qedi/qedi_iscsi.c conn_info->dst.ip[i] = ntohl(qedi_ep->dst_addr[i]); dst 516 drivers/scsi/qedi/qedi_iscsi.c conn_info->dst.port = qedi_ep->dst_port; dst 905 drivers/scsi/qedi/qedi_iscsi.c memcpy(&path_req.dst.v4_addr, &qedi_ep->dst_addr, dst 909 drivers/scsi/qedi/qedi_iscsi.c memcpy(&path_req.dst.v6_addr, &qedi_ep->dst_addr, dst 83 drivers/scsi/qla2xxx/qla_inline.h host_to_adap(uint8_t *src, uint8_t *dst, uint32_t bsize) dst 86 drivers/scsi/qla2xxx/qla_inline.h __le32 *odest = (__le32 *) dst; dst 430 drivers/scsi/stex.c struct st_sgtable *dst; dst 438 drivers/scsi/stex.c dst = (struct st_sgtable *)req->variable; dst 441 drivers/scsi/stex.c dst->sg_count = cpu_to_le16((u16)nseg); dst 442 drivers/scsi/stex.c dst->max_sg_count = cpu_to_le16(hba->host->sg_tablesize); dst 443 drivers/scsi/stex.c dst->sz_in_byte = cpu_to_le32(scsi_bufflen(cmd)); dst 445 drivers/scsi/stex.c table = (struct st_sgitem *)(dst + 1); dst 462 drivers/scsi/stex.c struct st_sgtable *dst; dst 470 drivers/scsi/stex.c dst = (struct st_sgtable *)req->variable; dst 473 drivers/scsi/stex.c dst->sg_count = cpu_to_le16((u16)nseg); dst 474 drivers/scsi/stex.c dst->max_sg_count = cpu_to_le16(hba->host->sg_tablesize); dst 475 drivers/scsi/stex.c dst->sz_in_byte = cpu_to_le32(scsi_bufflen(cmd)); dst 477 drivers/scsi/stex.c table = (struct st_ss_sgitem *)(dst + 1); dst 69 drivers/soc/bcm/brcmstb/pm/pm-mips.c void *dst; dst 278 drivers/soc/qcom/wcnss_ctrl.c chinfo.dst = RPMSG_ADDR_ANY; dst 300 drivers/spi/spi-npcm-fiu.c void __iomem *dst = (void __iomem *)(chip->flash_region_mapped_ptr + dst 307 drivers/spi/spi-npcm-fiu.c iowrite8(*(buf_tx + i), dst + i); dst 309 drivers/spi/spi-npcm-fiu.c memcpy_toio(dst, buf_tx, len); dst 863 drivers/spi/spi-sh-msiof.c static void copy_bswap32(u32 *dst, const u32 *src, unsigned int words) dst 868 drivers/spi/spi-sh-msiof.c *dst++ = swab32(get_unaligned(src)); dst 871 drivers/spi/spi-sh-msiof.c } else if ((unsigned long)dst & 3) { dst 873 drivers/spi/spi-sh-msiof.c put_unaligned(swab32(*src++), dst); dst 874 drivers/spi/spi-sh-msiof.c dst++; dst 878 drivers/spi/spi-sh-msiof.c *dst++ = swab32(*src++); dst 882 drivers/spi/spi-sh-msiof.c static void copy_wswap32(u32 *dst, const u32 *src, unsigned int words) dst 887 drivers/spi/spi-sh-msiof.c *dst++ = swahw32(get_unaligned(src)); dst 890 drivers/spi/spi-sh-msiof.c } else if ((unsigned long)dst & 3) { dst 892 drivers/spi/spi-sh-msiof.c put_unaligned(swahw32(*src++), dst); dst 893 drivers/spi/spi-sh-msiof.c dst++; dst 897 drivers/spi/spi-sh-msiof.c *dst++ = swahw32(*src++); dst 901 drivers/spi/spi-sh-msiof.c static void copy_plain32(u32 *dst, const u32 *src, unsigned int words) dst 903 drivers/spi/spi-sh-msiof.c memcpy(dst, src, words * 4); dst 489 drivers/staging/fbtft/fbtft-core.c static void fbtft_merge_fbtftops(struct fbtft_ops *dst, struct fbtft_ops *src) dst 492 drivers/staging/fbtft/fbtft-core.c dst->write = src->write; dst 494 drivers/staging/fbtft/fbtft-core.c dst->read = src->read; dst 496 drivers/staging/fbtft/fbtft-core.c dst->write_vmem = src->write_vmem; dst 498 drivers/staging/fbtft/fbtft-core.c dst->write_register = src->write_register; dst 500 drivers/staging/fbtft/fbtft-core.c dst->set_addr_win = src->set_addr_win; dst 502 drivers/staging/fbtft/fbtft-core.c dst->reset = src->reset; dst 504 drivers/staging/fbtft/fbtft-core.c dst->mkdirty = src->mkdirty; dst 506 drivers/staging/fbtft/fbtft-core.c dst->update_display = src->update_display; dst 508 drivers/staging/fbtft/fbtft-core.c dst->init_display = src->init_display; dst 510 drivers/staging/fbtft/fbtft-core.c dst->blank = src->blank; dst 512 drivers/staging/fbtft/fbtft-core.c dst->request_gpios_match = src->request_gpios_match; dst 514 drivers/staging/fbtft/fbtft-core.c dst->request_gpios = src->request_gpios; dst 516 drivers/staging/fbtft/fbtft-core.c dst->verify_gpios = src->verify_gpios; dst 518 drivers/staging/fbtft/fbtft-core.c dst->register_backlight = src->register_backlight; dst 520 drivers/staging/fbtft/fbtft-core.c dst->unregister_backlight = src->unregister_backlight; dst 522 drivers/staging/fbtft/fbtft-core.c dst->set_var = src->set_var; dst 524 drivers/staging/fbtft/fbtft-core.c dst->set_gamma = src->set_gamma; dst 43 drivers/staging/fbtft/fbtft-io.c u8 *dst = par->extra; dst 75 drivers/staging/fbtft/fbtft-io.c *(__be64 *)dst = cpu_to_be64(tmp); dst 76 drivers/staging/fbtft/fbtft-io.c dst += 8; dst 77 drivers/staging/fbtft/fbtft-io.c *dst++ = (u8)(*src++ & 0x00FF); dst 133 drivers/staging/isdn/gigaset/isocdata.c int read, write, limit, src, dst; dst 193 drivers/staging/isdn/gigaset/isocdata.c dst = BAS_OUTBUFSIZE; dst 194 drivers/staging/isdn/gigaset/isocdata.c while (dst < limit && src < write) dst 195 drivers/staging/isdn/gigaset/isocdata.c iwb->data[dst++] = iwb->data[src++]; dst 196 drivers/staging/isdn/gigaset/isocdata.c if (dst <= limit) { dst 198 drivers/staging/isdn/gigaset/isocdata.c memset(iwb->data + dst, iwb->idle, dst 199 drivers/staging/isdn/gigaset/isocdata.c BAS_OUTBUFSIZE + BAS_OUTBUFPAD - dst); dst 838 drivers/staging/isdn/gigaset/isocdata.c unsigned char *dst; dst 852 drivers/staging/isdn/gigaset/isocdata.c dst = skb_put(skb, count < dobytes ? count : dobytes); dst 854 drivers/staging/isdn/gigaset/isocdata.c *dst++ = bitrev8(*src++); dst 217 drivers/staging/isdn/hysdn/boardergo.c unsigned char *dst; dst 224 drivers/staging/isdn/hysdn/boardergo.c dst = card->dpram; /* pointer to start of DPRAM */ dst 225 drivers/staging/isdn/hysdn/boardergo.c dst += (offs + ERG_DPRAM_FILL_SIZE); /* offset in the DPRAM */ dst 227 drivers/staging/isdn/hysdn/boardergo.c *dst++ = *(buf + 1); /* high byte */ dst 228 drivers/staging/isdn/hysdn/boardergo.c *dst++ = *buf; /* low byte */ dst 229 drivers/staging/isdn/hysdn/boardergo.c dst += 2; /* point to next longword */ dst 264 drivers/staging/isdn/hysdn/boardergo.c unsigned char *dst; dst 274 drivers/staging/isdn/hysdn/boardergo.c dst = sp->Data; /* point to data in spool structure */ dst 306 drivers/staging/isdn/hysdn/boardergo.c *(dst + wr_mirror) = *buf++; /* output one byte */ dst 826 drivers/staging/media/allegro-dvt/allegro-core.c void *dst, size_t nbyte) dst 843 drivers/staging/media/allegro-dvt/allegro-core.c dst, sizeof(*header) / 4); dst 844 drivers/staging/media/allegro-dvt/allegro-core.c header = dst; dst 871 drivers/staging/media/allegro-dvt/allegro-core.c dst + sizeof(*header), body_no_wrap / 4); dst 873 drivers/staging/media/allegro-dvt/allegro-core.c dst + sizeof(*header) + body_no_wrap, dst 96 drivers/staging/media/hantro/hantro_drv.c struct vb2_v4l2_buffer *src, *dst; dst 104 drivers/staging/media/hantro/hantro_drv.c dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx); dst 108 drivers/staging/media/hantro/hantro_drv.c if (WARN_ON(!dst)) dst 112 drivers/staging/media/hantro/hantro_drv.c dst->sequence = ctx->sequence_cap++; dst 114 drivers/staging/media/hantro/hantro_drv.c ret = ctx->buf_finish(ctx, &dst->vb2_buf, bytesused); dst 119 drivers/staging/media/hantro/hantro_drv.c v4l2_m2m_buf_done(dst, result); dst 179 drivers/staging/media/hantro/hantro_drv.c struct vb2_v4l2_buffer *src, *dst; dst 183 drivers/staging/media/hantro/hantro_drv.c dst = hantro_get_dst_buf(ctx); dst 192 drivers/staging/media/hantro/hantro_drv.c v4l2_m2m_buf_copy_metadata(src, dst, true); dst 206 drivers/staging/media/hantro/hantro_h264.c u32 *dst = (u32 *)tbl->scaling_list; dst 213 drivers/staging/media/hantro/hantro_h264.c *dst++ = swab32(src[j]); dst 220 drivers/staging/media/hantro/hantro_h264.c *dst++ = swab32(src[j]); dst 54 drivers/staging/media/hantro/hantro_vp8.c u8 *dst; dst 57 drivers/staging/media/hantro/hantro_vp8.c dst = ctx->vp8_dec.prob_tbl.cpu; dst 59 drivers/staging/media/hantro/hantro_vp8.c dst[0] = hdr->prob_skip_false; dst 60 drivers/staging/media/hantro/hantro_vp8.c dst[1] = hdr->prob_intra; dst 61 drivers/staging/media/hantro/hantro_vp8.c dst[2] = hdr->prob_last; dst 62 drivers/staging/media/hantro/hantro_vp8.c dst[3] = hdr->prob_gf; dst 63 drivers/staging/media/hantro/hantro_vp8.c dst[4] = hdr->segment_header.segment_probs[0]; dst 64 drivers/staging/media/hantro/hantro_vp8.c dst[5] = hdr->segment_header.segment_probs[1]; dst 65 drivers/staging/media/hantro/hantro_vp8.c dst[6] = hdr->segment_header.segment_probs[2]; dst 66 drivers/staging/media/hantro/hantro_vp8.c dst[7] = 0; dst 68 drivers/staging/media/hantro/hantro_vp8.c dst += 8; dst 69 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->y_mode_probs[0]; dst 70 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->y_mode_probs[1]; dst 71 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->y_mode_probs[2]; dst 72 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->y_mode_probs[3]; dst 73 drivers/staging/media/hantro/hantro_vp8.c dst[4] = entropy->uv_mode_probs[0]; dst 74 drivers/staging/media/hantro/hantro_vp8.c dst[5] = entropy->uv_mode_probs[1]; dst 75 drivers/staging/media/hantro/hantro_vp8.c dst[6] = entropy->uv_mode_probs[2]; dst 76 drivers/staging/media/hantro/hantro_vp8.c dst[7] = 0; /*unused */ dst 79 drivers/staging/media/hantro/hantro_vp8.c dst += 8; dst 80 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->mv_probs[0][0]; /* is short */ dst 81 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->mv_probs[1][0]; dst 82 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->mv_probs[0][1]; /* sign */ dst 83 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->mv_probs[1][1]; dst 84 drivers/staging/media/hantro/hantro_vp8.c dst[4] = entropy->mv_probs[0][8 + 9]; dst 85 drivers/staging/media/hantro/hantro_vp8.c dst[5] = entropy->mv_probs[0][9 + 9]; dst 86 drivers/staging/media/hantro/hantro_vp8.c dst[6] = entropy->mv_probs[1][8 + 9]; dst 87 drivers/staging/media/hantro/hantro_vp8.c dst[7] = entropy->mv_probs[1][9 + 9]; dst 88 drivers/staging/media/hantro/hantro_vp8.c dst += 8; dst 91 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->mv_probs[i][j + 9 + 0]; dst 92 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->mv_probs[i][j + 9 + 1]; dst 93 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->mv_probs[i][j + 9 + 2]; dst 94 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->mv_probs[i][j + 9 + 3]; dst 95 drivers/staging/media/hantro/hantro_vp8.c dst += 4; dst 99 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->mv_probs[i][0 + 2]; dst 100 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->mv_probs[i][1 + 2]; dst 101 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->mv_probs[i][2 + 2]; dst 102 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->mv_probs[i][3 + 2]; dst 103 drivers/staging/media/hantro/hantro_vp8.c dst[4] = entropy->mv_probs[i][4 + 2]; dst 104 drivers/staging/media/hantro/hantro_vp8.c dst[5] = entropy->mv_probs[i][5 + 2]; dst 105 drivers/staging/media/hantro/hantro_vp8.c dst[6] = entropy->mv_probs[i][6 + 2]; dst 106 drivers/staging/media/hantro/hantro_vp8.c dst[7] = 0; /*unused */ dst 107 drivers/staging/media/hantro/hantro_vp8.c dst += 8; dst 111 drivers/staging/media/hantro/hantro_vp8.c dst = ctx->vp8_dec.prob_tbl.cpu; dst 112 drivers/staging/media/hantro/hantro_vp8.c dst += (8 * 7); dst 116 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->coeff_probs[i][j][k][0]; dst 117 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->coeff_probs[i][j][k][1]; dst 118 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->coeff_probs[i][j][k][2]; dst 119 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->coeff_probs[i][j][k][3]; dst 120 drivers/staging/media/hantro/hantro_vp8.c dst += 4; dst 126 drivers/staging/media/hantro/hantro_vp8.c dst = ctx->vp8_dec.prob_tbl.cpu; dst 127 drivers/staging/media/hantro/hantro_vp8.c dst += (8 * 55); dst 131 drivers/staging/media/hantro/hantro_vp8.c dst[0] = entropy->coeff_probs[i][j][k][4]; dst 132 drivers/staging/media/hantro/hantro_vp8.c dst[1] = entropy->coeff_probs[i][j][k][5]; dst 133 drivers/staging/media/hantro/hantro_vp8.c dst[2] = entropy->coeff_probs[i][j][k][6]; dst 134 drivers/staging/media/hantro/hantro_vp8.c dst[3] = entropy->coeff_probs[i][j][k][7]; dst 135 drivers/staging/media/hantro/hantro_vp8.c dst[4] = entropy->coeff_probs[i][j][k][8]; dst 136 drivers/staging/media/hantro/hantro_vp8.c dst[5] = entropy->coeff_probs[i][j][k][9]; dst 137 drivers/staging/media/hantro/hantro_vp8.c dst[6] = entropy->coeff_probs[i][j][k][10]; dst 138 drivers/staging/media/hantro/hantro_vp8.c dst[7] = 0; /*unused */ dst 139 drivers/staging/media/hantro/hantro_vp8.c dst += 8; dst 72 drivers/staging/media/sunxi/cedrus/cedrus.h struct vb2_v4l2_buffer *dst; dst 33 drivers/staging/media/sunxi/cedrus/cedrus_dec.c run.dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx); dst 66 drivers/staging/media/sunxi/cedrus/cedrus_dec.c v4l2_m2m_buf_copy_metadata(run.src, run.dst, true); dst 143 drivers/staging/media/sunxi/cedrus/cedrus_h264.c output_buf = vb2_to_cedrus_buffer(&run->dst->vb2_buf); dst 181 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c dst_luma_addr = cedrus_dst_buf_addr(ctx, run->dst->vb2_buf.index, 0); dst 182 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c dst_chroma_addr = cedrus_dst_buf_addr(ctx, run->dst->vb2_buf.index, 1); dst 318 drivers/staging/mt7621-dma/mtk-hsdma.c dma_addr_t src, dst; dst 361 drivers/staging/mt7621-dma/mtk-hsdma.c dst = sg->dst_addr; dst 369 drivers/staging/mt7621-dma/mtk-hsdma.c rx_desc->addr0 = dst; dst 372 drivers/staging/mt7621-dma/mtk-hsdma.c dst += tlen; dst 297 drivers/staging/rtl8188eu/core/rtw_mlme.c int is_same_network(struct wlan_bssid_ex *src, struct wlan_bssid_ex *dst) dst 303 drivers/staging/rtl8188eu/core/rtw_mlme.c memcpy((u8 *)&le_dcap, rtw_get_capability_from_ie(dst->ies), 2); dst 308 drivers/staging/rtl8188eu/core/rtw_mlme.c return ((src->ssid.ssid_length == dst->ssid.ssid_length) && dst 309 drivers/staging/rtl8188eu/core/rtw_mlme.c (!memcmp(src->MacAddress, dst->MacAddress, ETH_ALEN)) && dst 310 drivers/staging/rtl8188eu/core/rtw_mlme.c (!memcmp(src->ssid.ssid, dst->ssid.ssid, src->ssid.ssid_length)) && dst 336 drivers/staging/rtl8188eu/core/rtw_mlme.c void update_network(struct wlan_bssid_ex *dst, struct wlan_bssid_ex *src, dst 339 drivers/staging/rtl8188eu/core/rtw_mlme.c long rssi_ori = dst->Rssi; dst 345 drivers/staging/rtl8188eu/core/rtw_mlme.c rtw_hal_antdiv_rssi_compared(padapter, dst, src); /* this will update src.Rssi, need consider again */ dst 355 drivers/staging/rtl8188eu/core/rtw_mlme.c rssi_final = (src->Rssi + dst->Rssi * 4) / 5; dst 360 drivers/staging/rtl8188eu/core/rtw_mlme.c ss_final = ((u32)(src->PhyInfo.SignalStrength)+(u32)(dst->PhyInfo.SignalStrength)*4)/5; dst 361 drivers/staging/rtl8188eu/core/rtw_mlme.c sq_final = ((u32)(src->PhyInfo.SignalQuality)+(u32)(dst->PhyInfo.SignalQuality)*4)/5; dst 362 drivers/staging/rtl8188eu/core/rtw_mlme.c rssi_final = (src->Rssi+dst->Rssi*4)/5; dst 365 drivers/staging/rtl8188eu/core/rtw_mlme.c ss_final = dst->PhyInfo.SignalStrength; dst 366 drivers/staging/rtl8188eu/core/rtw_mlme.c sq_final = dst->PhyInfo.SignalQuality; dst 367 drivers/staging/rtl8188eu/core/rtw_mlme.c rssi_final = dst->Rssi; dst 371 drivers/staging/rtl8188eu/core/rtw_mlme.c memcpy((u8 *)dst, (u8 *)src, get_wlan_bssid_ex_sz(src)); dst 372 drivers/staging/rtl8188eu/core/rtw_mlme.c dst->PhyInfo.SignalStrength = ss_final; dst 373 drivers/staging/rtl8188eu/core/rtw_mlme.c dst->PhyInfo.SignalQuality = sq_final; dst 374 drivers/staging/rtl8188eu/core/rtw_mlme.c dst->Rssi = rssi_final; dst 619 drivers/staging/rtl8188eu/core/rtw_recv.c if (!is_multicast_ether_addr(pattrib->dst)) dst 646 drivers/staging/rtl8188eu/core/rtw_recv.c bool mcast = is_multicast_ether_addr(pattrib->dst); dst 657 drivers/staging/rtl8188eu/core/rtw_recv.c if (memcmp(myhwaddr, pattrib->dst, ETH_ALEN) && !mcast) { dst 687 drivers/staging/rtl8188eu/core/rtw_recv.c if (memcmp(pattrib->bssid, pattrib->dst, ETH_ALEN)) { dst 725 drivers/staging/rtl8188eu/core/rtw_recv.c bool mcast = is_multicast_ether_addr(pattrib->dst); dst 738 drivers/staging/rtl8188eu/core/rtw_recv.c if (memcmp(myhwaddr, pattrib->dst, ETH_ALEN) && !mcast) { dst 740 drivers/staging/rtl8188eu/core/rtw_recv.c (" %s: compare DA fail; DA=%pM\n", __func__, (pattrib->dst))); dst 787 drivers/staging/rtl8188eu/core/rtw_recv.c if (!memcmp(myhwaddr, pattrib->dst, ETH_ALEN) && !mcast) { dst 1050 drivers/staging/rtl8188eu/core/rtw_recv.c memcpy(pattrib->dst, pda, ETH_ALEN); dst 1305 drivers/staging/rtl8188eu/core/rtw_recv.c memcpy(ptr, pattrib->dst, ETH_ALEN); dst 1576 drivers/staging/rtl8188eu/core/rtw_recv.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, ETH_ALEN); dst 1583 drivers/staging/rtl8188eu/core/rtw_recv.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, ETH_ALEN); dst 327 drivers/staging/rtl8188eu/core/rtw_security.c void rtw_secgetmic(struct mic_data *pmicdata, u8 *dst) dst 339 drivers/staging/rtl8188eu/core/rtw_security.c secmicputuint32(dst, pmicdata->L); dst 340 drivers/staging/rtl8188eu/core/rtw_security.c secmicputuint32(dst+4, pmicdata->R); dst 409 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pattrib->dst, ðerhdr.h_dest, ETH_ALEN); dst 416 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 422 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 768 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pwlanhdr->addr3, pattrib->dst, ETH_ALEN); dst 775 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 783 drivers/staging/rtl8188eu/core/rtw_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 175 drivers/staging/rtl8188eu/hal/rtl8188e_dm.c void rtw_hal_antdiv_rssi_compared(struct adapter *Adapter, struct wlan_bssid_ex *dst, struct wlan_bssid_ex *src) dst 179 drivers/staging/rtl8188eu/hal/rtl8188e_dm.c if (dst->Rssi >= src->Rssi) {/* keep org parameter */ dst 180 drivers/staging/rtl8188eu/hal/rtl8188e_dm.c src->Rssi = dst->Rssi; dst 181 drivers/staging/rtl8188eu/hal/rtl8188e_dm.c src->PhyInfo.Optimum_antenna = dst->PhyInfo.Optimum_antenna; dst 214 drivers/staging/rtl8188eu/include/hal_intf.h struct wlan_bssid_ex *dst, dst 42 drivers/staging/rtl8188eu/include/rtl8188e_dm.h void AntDivCompare8188E(struct adapter *adapt, struct wlan_bssid_ex *dst, dst 352 drivers/staging/rtl8188eu/include/rtw_mlme.h int is_same_network(struct wlan_bssid_ex *src, struct wlan_bssid_ex *dst); dst 485 drivers/staging/rtl8188eu/include/rtw_mlme_ext.h void update_network(struct wlan_bssid_ex *dst, struct wlan_bssid_ex *src, dst 111 drivers/staging/rtl8188eu/include/rtw_recv.h u8 dst[ETH_ALEN]; dst 295 drivers/staging/rtl8188eu/include/rtw_security.h void rtw_secgetmic(struct mic_data *pmicdata, u8 *dst); dst 125 drivers/staging/rtl8188eu/include/rtw_xmit.h u8 dst[ETH_ALEN]; dst 87 drivers/staging/rtl8188eu/os_dep/recv_linux.c bool mcast = is_multicast_ether_addr(pattrib->dst); dst 89 drivers/staging/rtl8188eu/os_dep/recv_linux.c if (memcmp(pattrib->dst, myid(&padapter->eeprompriv), dst 95 drivers/staging/rtl8188eu/os_dep/recv_linux.c psta = rtw_get_stainfo(pstapriv, pattrib->dst); dst 128 drivers/staging/rtl8192e/rtl819x_BAProc.c static struct sk_buff *rtllib_DELBA(struct rtllib_device *ieee, u8 *dst, dst 140 drivers/staging/rtl8192e/rtl819x_BAProc.c __func__, ReasonCode, dst); dst 155 drivers/staging/rtl8192e/rtl819x_BAProc.c ether_addr_copy(Delba->addr1, dst); dst 179 drivers/staging/rtl8192e/rtl819x_BAProc.c static void rtllib_send_ADDBAReq(struct rtllib_device *ieee, u8 *dst, dst 184 drivers/staging/rtl8192e/rtl819x_BAProc.c skb = rtllib_ADDBA(ieee, dst, pBA, 0, ACT_ADDBAREQ); dst 194 drivers/staging/rtl8192e/rtl819x_BAProc.c static void rtllib_send_ADDBARsp(struct rtllib_device *ieee, u8 *dst, dst 199 drivers/staging/rtl8192e/rtl819x_BAProc.c skb = rtllib_ADDBA(ieee, dst, pBA, StatusCode, ACT_ADDBARSP); dst 206 drivers/staging/rtl8192e/rtl819x_BAProc.c static void rtllib_send_DELBA(struct rtllib_device *ieee, u8 *dst, dst 212 drivers/staging/rtl8192e/rtl819x_BAProc.c skb = rtllib_DELBA(ieee, dst, pBA, TxRxSelect, ReasonCode); dst 223 drivers/staging/rtl8192e/rtl819x_BAProc.c u8 *dst = NULL, *pDialogToken = NULL, *tag = NULL; dst 244 drivers/staging/rtl8192e/rtl819x_BAProc.c dst = (u8 *)(&req->addr2[0]); dst 251 drivers/staging/rtl8192e/rtl819x_BAProc.c RT_TRACE(COMP_DBG, "====>rx ADDBAREQ from : %pM\n", dst); dst 262 drivers/staging/rtl8192e/rtl819x_BAProc.c if (!GetTs(ieee, (struct ts_common_info **)(&pTS), dst, dst 292 drivers/staging/rtl8192e/rtl819x_BAProc.c rtllib_send_ADDBARsp(ieee, dst, pBA, ADDBA_STATUS_SUCCESS); dst 304 drivers/staging/rtl8192e/rtl819x_BAProc.c rtllib_send_ADDBARsp(ieee, dst, &BA, rc); dst 314 drivers/staging/rtl8192e/rtl819x_BAProc.c u8 *dst = NULL, *pDialogToken = NULL, *tag = NULL; dst 327 drivers/staging/rtl8192e/rtl819x_BAProc.c dst = (u8 *)(&rsp->addr2[0]); dst 334 drivers/staging/rtl8192e/rtl819x_BAProc.c RT_TRACE(COMP_DBG, "====>rx ADDBARSP from : %pM\n", dst); dst 348 drivers/staging/rtl8192e/rtl819x_BAProc.c if (!GetTs(ieee, (struct ts_common_info **)(&pTS), dst, dst 408 drivers/staging/rtl8192e/rtl819x_BAProc.c rtllib_send_DELBA(ieee, dst, &BA, TX_DIR, ReasonCode); dst 417 drivers/staging/rtl8192e/rtl819x_BAProc.c u8 *dst = NULL; dst 440 drivers/staging/rtl8192e/rtl819x_BAProc.c dst = (u8 *)(&delba->addr2[0]); dst 446 drivers/staging/rtl8192e/rtl819x_BAProc.c if (!GetTs(ieee, (struct ts_common_info **)&pRxTs, dst, dst 450 drivers/staging/rtl8192e/rtl819x_BAProc.c __func__, dst, dst 459 drivers/staging/rtl8192e/rtl819x_BAProc.c if (!GetTs(ieee, (struct ts_common_info **)&pTxTs, dst, dst 869 drivers/staging/rtl8192e/rtllib.h u8 dst[ETH_ALEN]; dst 59 drivers/staging/rtl8192e/rtllib_rx.c unsigned int frag, u8 tid, u8 *src, u8 *dst) dst 78 drivers/staging/rtl8192e/rtllib_rx.c memcmp(entry->dst_addr, dst, ETH_ALEN) == 0) dst 501 drivers/staging/rtl8192e/rtllib_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), prxb->dst, ETH_ALEN); dst 508 drivers/staging/rtl8192e/rtllib_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), prxb->dst, ETH_ALEN); dst 765 drivers/staging/rtl8192e/rtllib_rx.c struct rtllib_rxb *rxb, u8 *src, u8 *dst) dst 817 drivers/staging/rtl8192e/rtllib_rx.c memcpy(rxb->dst, dst, ETH_ALEN); dst 824 drivers/staging/rtl8192e/rtllib_rx.c memcpy(rxb->dst, dst, ETH_ALEN); dst 957 drivers/staging/rtl8192e/rtllib_rx.c struct rtllib_hdr_4addr *hdr, u8 *dst, dst 964 drivers/staging/rtl8192e/rtllib_rx.c ether_addr_copy(dst, hdr->addr1); dst 969 drivers/staging/rtl8192e/rtllib_rx.c ether_addr_copy(dst, hdr->addr3); dst 974 drivers/staging/rtl8192e/rtllib_rx.c ether_addr_copy(dst, hdr->addr3); dst 979 drivers/staging/rtl8192e/rtllib_rx.c ether_addr_copy(dst, hdr->addr1); dst 987 drivers/staging/rtl8192e/rtllib_rx.c u8 *dst, u8 *src, u8 *bssid, u8 *addr2) dst 1005 drivers/staging/rtl8192e/rtllib_rx.c !ether_addr_equal(dst, ieee->current_network.bssid) && dst 1034 drivers/staging/rtl8192e/rtllib_rx.c if (is_multicast_ether_addr(dst)) { dst 1228 drivers/staging/rtl8192e/rtllib_rx.c u8 *dst, dst 1259 drivers/staging/rtl8192e/rtllib_rx.c dst); dst 1270 drivers/staging/rtl8192e/rtllib_rx.c dst); dst 1276 drivers/staging/rtl8192e/rtllib_rx.c if (is_multicast_ether_addr(dst)) dst 1303 drivers/staging/rtl8192e/rtllib_rx.c u8 dst[ETH_ALEN]; dst 1363 drivers/staging/rtl8192e/rtllib_rx.c rtllib_rx_extract_addr(ieee, hdr, dst, src, bssid); dst 1366 drivers/staging/rtl8192e/rtllib_rx.c ret = rtllib_rx_data_filter(ieee, fc, dst, src, bssid, hdr->addr2); dst 1420 drivers/staging/rtl8192e/rtllib_rx.c if (parse_subframe(ieee, skb, rx_stats, rxb, src, dst) == 0) { dst 1447 drivers/staging/rtl8192e/rtllib_rx.c rtllib_rx_indicate_pkt_legacy(ieee, rx_stats, rxb, dst, src); dst 2401 drivers/staging/rtl8192e/rtllib_rx.c struct rtllib_network *dst, u8 ssidbroad) dst 2408 drivers/staging/rtl8192e/rtllib_rx.c return (((src->ssid_len == dst->ssid_len) || (!ssidbroad)) && dst 2409 drivers/staging/rtl8192e/rtllib_rx.c (src->channel == dst->channel) && dst 2410 drivers/staging/rtl8192e/rtllib_rx.c !memcmp(src->bssid, dst->bssid, ETH_ALEN) && dst 2411 drivers/staging/rtl8192e/rtllib_rx.c (!memcmp(src->ssid, dst->ssid, src->ssid_len) || dst 2414 drivers/staging/rtl8192e/rtllib_rx.c (dst->capability & WLAN_CAPABILITY_IBSS)) && dst 2416 drivers/staging/rtl8192e/rtllib_rx.c (dst->capability & WLAN_CAPABILITY_ESS))); dst 2421 drivers/staging/rtl8192e/rtllib_rx.c struct rtllib_network *dst, dst 2427 drivers/staging/rtl8192e/rtllib_rx.c memcpy(&dst->stats, &src->stats, sizeof(struct rtllib_rx_stats)); dst 2428 drivers/staging/rtl8192e/rtllib_rx.c dst->capability = src->capability; dst 2429 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->rates, src->rates, src->rates_len); dst 2430 drivers/staging/rtl8192e/rtllib_rx.c dst->rates_len = src->rates_len; dst 2431 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->rates_ex, src->rates_ex, src->rates_ex_len); dst 2432 drivers/staging/rtl8192e/rtllib_rx.c dst->rates_ex_len = src->rates_ex_len; dst 2434 drivers/staging/rtl8192e/rtllib_rx.c if (dst->ssid_len == 0) { dst 2435 drivers/staging/rtl8192e/rtllib_rx.c memset(dst->hidden_ssid, 0, sizeof(dst->hidden_ssid)); dst 2436 drivers/staging/rtl8192e/rtllib_rx.c dst->hidden_ssid_len = src->ssid_len; dst 2437 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->hidden_ssid, src->ssid, src->ssid_len); dst 2439 drivers/staging/rtl8192e/rtllib_rx.c memset(dst->ssid, 0, dst->ssid_len); dst 2440 drivers/staging/rtl8192e/rtllib_rx.c dst->ssid_len = src->ssid_len; dst 2441 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->ssid, src->ssid, src->ssid_len); dst 2444 drivers/staging/rtl8192e/rtllib_rx.c dst->mode = src->mode; dst 2445 drivers/staging/rtl8192e/rtllib_rx.c dst->flags = src->flags; dst 2446 drivers/staging/rtl8192e/rtllib_rx.c dst->time_stamp[0] = src->time_stamp[0]; dst 2447 drivers/staging/rtl8192e/rtllib_rx.c dst->time_stamp[1] = src->time_stamp[1]; dst 2449 drivers/staging/rtl8192e/rtllib_rx.c dst->erp_value = src->erp_value; dst 2450 drivers/staging/rtl8192e/rtllib_rx.c dst->berp_info_valid = src->berp_info_valid = true; dst 2452 drivers/staging/rtl8192e/rtllib_rx.c dst->beacon_interval = src->beacon_interval; dst 2453 drivers/staging/rtl8192e/rtllib_rx.c dst->listen_interval = src->listen_interval; dst 2454 drivers/staging/rtl8192e/rtllib_rx.c dst->atim_window = src->atim_window; dst 2455 drivers/staging/rtl8192e/rtllib_rx.c dst->dtim_period = src->dtim_period; dst 2456 drivers/staging/rtl8192e/rtllib_rx.c dst->dtim_data = src->dtim_data; dst 2457 drivers/staging/rtl8192e/rtllib_rx.c dst->last_dtim_sta_time = src->last_dtim_sta_time; dst 2458 drivers/staging/rtl8192e/rtllib_rx.c memcpy(&dst->tim, &src->tim, sizeof(struct rtllib_tim_parameters)); dst 2460 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdSupportHT = src->bssht.bdSupportHT; dst 2461 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdRT2RTAggregation = src->bssht.bdRT2RTAggregation; dst 2462 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdHTCapLen = src->bssht.bdHTCapLen; dst 2463 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->bssht.bdHTCapBuf, src->bssht.bdHTCapBuf, dst 2465 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdHTInfoLen = src->bssht.bdHTInfoLen; dst 2466 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->bssht.bdHTInfoBuf, src->bssht.bdHTInfoBuf, dst 2468 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdHTSpecVer = src->bssht.bdHTSpecVer; dst 2469 drivers/staging/rtl8192e/rtllib_rx.c dst->bssht.bdRT2RTLongSlotTime = src->bssht.bdRT2RTLongSlotTime; dst 2470 drivers/staging/rtl8192e/rtllib_rx.c dst->broadcom_cap_exist = src->broadcom_cap_exist; dst 2471 drivers/staging/rtl8192e/rtllib_rx.c dst->ralink_cap_exist = src->ralink_cap_exist; dst 2472 drivers/staging/rtl8192e/rtllib_rx.c dst->atheros_cap_exist = src->atheros_cap_exist; dst 2473 drivers/staging/rtl8192e/rtllib_rx.c dst->realtek_cap_exit = src->realtek_cap_exit; dst 2474 drivers/staging/rtl8192e/rtllib_rx.c dst->marvell_cap_exist = src->marvell_cap_exist; dst 2475 drivers/staging/rtl8192e/rtllib_rx.c dst->cisco_cap_exist = src->cisco_cap_exist; dst 2476 drivers/staging/rtl8192e/rtllib_rx.c dst->airgo_cap_exist = src->airgo_cap_exist; dst 2477 drivers/staging/rtl8192e/rtllib_rx.c dst->unknown_cap_exist = src->unknown_cap_exist; dst 2478 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->wpa_ie, src->wpa_ie, src->wpa_ie_len); dst 2479 drivers/staging/rtl8192e/rtllib_rx.c dst->wpa_ie_len = src->wpa_ie_len; dst 2480 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->rsn_ie, src->rsn_ie, src->rsn_ie_len); dst 2481 drivers/staging/rtl8192e/rtllib_rx.c dst->rsn_ie_len = src->rsn_ie_len; dst 2482 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->wzc_ie, src->wzc_ie, src->wzc_ie_len); dst 2483 drivers/staging/rtl8192e/rtllib_rx.c dst->wzc_ie_len = src->wzc_ie_len; dst 2485 drivers/staging/rtl8192e/rtllib_rx.c dst->last_scanned = jiffies; dst 2487 drivers/staging/rtl8192e/rtllib_rx.c qos_active = dst->qos_data.active; dst 2488 drivers/staging/rtl8192e/rtllib_rx.c old_param = dst->qos_data.param_count; dst 2489 drivers/staging/rtl8192e/rtllib_rx.c dst->qos_data.supported = src->qos_data.supported; dst 2490 drivers/staging/rtl8192e/rtllib_rx.c if (dst->flags & NETWORK_HAS_QOS_PARAMETERS) dst 2491 drivers/staging/rtl8192e/rtllib_rx.c memcpy(&dst->qos_data, &src->qos_data, dst 2493 drivers/staging/rtl8192e/rtllib_rx.c if (dst->qos_data.supported == 1) { dst 2494 drivers/staging/rtl8192e/rtllib_rx.c if (dst->ssid_len) dst 2497 drivers/staging/rtl8192e/rtllib_rx.c dst->ssid); dst 2502 drivers/staging/rtl8192e/rtllib_rx.c dst->qos_data.active = qos_active; dst 2503 drivers/staging/rtl8192e/rtllib_rx.c dst->qos_data.old_param_count = old_param; dst 2505 drivers/staging/rtl8192e/rtllib_rx.c dst->wmm_info = src->wmm_info; dst 2510 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->wmm_param, src->wmm_param, WME_AC_PRAM_LEN); dst 2512 drivers/staging/rtl8192e/rtllib_rx.c dst->SignalStrength = src->SignalStrength; dst 2513 drivers/staging/rtl8192e/rtllib_rx.c dst->RSSI = src->RSSI; dst 2514 drivers/staging/rtl8192e/rtllib_rx.c dst->Turbo_Enable = src->Turbo_Enable; dst 2516 drivers/staging/rtl8192e/rtllib_rx.c dst->CountryIeLen = src->CountryIeLen; dst 2517 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->CountryIeBuf, src->CountryIeBuf, src->CountryIeLen); dst 2519 drivers/staging/rtl8192e/rtllib_rx.c dst->bWithAironetIE = src->bWithAironetIE; dst 2520 drivers/staging/rtl8192e/rtllib_rx.c dst->bCkipSupported = src->bCkipSupported; dst 2521 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->CcxRmState, src->CcxRmState, 2); dst 2522 drivers/staging/rtl8192e/rtllib_rx.c dst->bCcxRmEnable = src->bCcxRmEnable; dst 2523 drivers/staging/rtl8192e/rtllib_rx.c dst->MBssidMask = src->MBssidMask; dst 2524 drivers/staging/rtl8192e/rtllib_rx.c dst->bMBssidValid = src->bMBssidValid; dst 2525 drivers/staging/rtl8192e/rtllib_rx.c memcpy(dst->MBssid, src->MBssid, 6); dst 2526 drivers/staging/rtl8192e/rtllib_rx.c dst->bWithCcxVerNum = src->bWithCcxVerNum; dst 2527 drivers/staging/rtl8192e/rtllib_rx.c dst->BssCcxVerNumber = src->BssCcxVerNumber; dst 498 drivers/staging/rtl8192e/rtllib_tx.c u8 *dst) dst 502 drivers/staging/rtl8192e/rtllib_tx.c if (is_multicast_ether_addr(dst)) dst 507 drivers/staging/rtl8192e/rtllib_tx.c if (!GetTs(ieee, (struct ts_common_info **)(&pTS), dst, dst 1037 drivers/staging/rtl8192u/ieee80211/ieee80211.h u8 dst[ETH_ALEN]; dst 63 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c unsigned int frag, u8 tid, u8 *src, u8 *dst) dst 83 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcmp(entry->dst_addr, dst, ETH_ALEN) == 0) dst 546 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), prxb->dst, ETH_ALEN); dst 551 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), prxb->dst, ETH_ALEN); dst 754 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c struct ieee80211_rxb *rxb, u8 *src, u8 *dst) dst 799 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(rxb->dst, dst, ETH_ALEN); dst 805 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(rxb->dst, dst, ETH_ALEN); dst 894 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c u8 dst[ETH_ALEN]; dst 1036 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 1041 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 1048 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst, hdr->addr3, ETH_ALEN); dst 1053 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst, hdr->addr1, ETH_ALEN); dst 1253 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (parse_subframe(skb, rx_stats, rxb, src, dst) == 0) { dst 1280 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), dst, ETH_ALEN); dst 1287 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(skb_push(sub_skb, ETH_ALEN), dst, ETH_ALEN); dst 1292 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (is_multicast_ether_addr(dst)) { dst 2162 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c struct ieee80211_network *dst, struct ieee80211_device *ieee) dst 2169 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (((src->ssid_len == dst->ssid_len) || (ieee->iw_mode == IW_MODE_INFRA)) && dst 2170 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (src->channel == dst->channel) && dst 2171 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c !memcmp(src->bssid, dst->bssid, ETH_ALEN) && dst 2173 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (!memcmp(src->ssid, dst->ssid, src->ssid_len) || (ieee->iw_mode == IW_MODE_INFRA)) && dst 2175 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (dst->capability & WLAN_CAPABILITY_IBSS)) && dst 2177 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (dst->capability & WLAN_CAPABILITY_BSS))); dst 2180 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c static inline void update_network(struct ieee80211_network *dst, dst 2186 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(&dst->stats, &src->stats, sizeof(struct ieee80211_rx_stats)); dst 2187 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->capability = src->capability; dst 2188 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->rates, src->rates, src->rates_len); dst 2189 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->rates_len = src->rates_len; dst 2190 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->rates_ex, src->rates_ex, src->rates_ex_len); dst 2191 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->rates_ex_len = src->rates_ex_len; dst 2193 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memset(dst->ssid, 0, dst->ssid_len); dst 2194 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->ssid_len = src->ssid_len; dst 2195 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->ssid, src->ssid, src->ssid_len); dst 2197 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->mode = src->mode; dst 2198 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->flags = src->flags; dst 2199 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->time_stamp[0] = src->time_stamp[0]; dst 2200 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->time_stamp[1] = src->time_stamp[1]; dst 2202 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->erp_value = src->erp_value; dst 2203 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->berp_info_valid = src->berp_info_valid = true; dst 2205 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->beacon_interval = src->beacon_interval; dst 2206 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->listen_interval = src->listen_interval; dst 2207 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->atim_window = src->atim_window; dst 2208 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->dtim_period = src->dtim_period; dst 2209 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->dtim_data = src->dtim_data; dst 2210 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->last_dtim_sta_time[0] = src->last_dtim_sta_time[0]; dst 2211 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->last_dtim_sta_time[1] = src->last_dtim_sta_time[1]; dst 2212 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(&dst->tim, &src->tim, sizeof(struct ieee80211_tim_parameters)); dst 2214 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdSupportHT = src->bssht.bdSupportHT; dst 2215 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdRT2RTAggregation = src->bssht.bdRT2RTAggregation; dst 2216 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdHTCapLen = src->bssht.bdHTCapLen; dst 2217 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->bssht.bdHTCapBuf, src->bssht.bdHTCapBuf, src->bssht.bdHTCapLen); dst 2218 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdHTInfoLen = src->bssht.bdHTInfoLen; dst 2219 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->bssht.bdHTInfoBuf, src->bssht.bdHTInfoBuf, src->bssht.bdHTInfoLen); dst 2220 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdHTSpecVer = src->bssht.bdHTSpecVer; dst 2221 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bssht.bdRT2RTLongSlotTime = src->bssht.bdRT2RTLongSlotTime; dst 2222 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->broadcom_cap_exist = src->broadcom_cap_exist; dst 2223 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->ralink_cap_exist = src->ralink_cap_exist; dst 2224 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->atheros_cap_exist = src->atheros_cap_exist; dst 2225 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->cisco_cap_exist = src->cisco_cap_exist; dst 2226 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->unknown_cap_exist = src->unknown_cap_exist; dst 2227 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->wpa_ie, src->wpa_ie, src->wpa_ie_len); dst 2228 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->wpa_ie_len = src->wpa_ie_len; dst 2229 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->rsn_ie, src->rsn_ie, src->rsn_ie_len); dst 2230 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->rsn_ie_len = src->rsn_ie_len; dst 2232 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->last_scanned = jiffies; dst 2235 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c qos_active = dst->qos_data.active; dst 2237 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c old_param = dst->qos_data.param_count; dst 2238 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (dst->flags & NETWORK_HAS_QOS_MASK) dst 2239 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(&dst->qos_data, &src->qos_data, dst 2242 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->qos_data.supported = src->qos_data.supported; dst 2243 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->qos_data.param_count = src->qos_data.param_count; dst 2246 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (dst->qos_data.supported == 1) { dst 2247 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->QoS_Enable = 1; dst 2248 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (dst->ssid_len) dst 2251 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->ssid); dst 2256 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->qos_data.active = qos_active; dst 2257 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->qos_data.old_param_count = old_param; dst 2260 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->wmm_info = src->wmm_info; //sure to exist in beacon or probe response frame. dst 2265 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->wmm_param, src->wmm_param, WME_AC_PRAM_LEN); dst 2269 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->Turbo_Enable = src->Turbo_Enable; dst 2272 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->CountryIeLen = src->CountryIeLen; dst 2273 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->CountryIeBuf, src->CountryIeBuf, src->CountryIeLen); dst 2276 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bWithAironetIE = src->bWithAironetIE; dst 2277 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bCkipSupported = src->bCkipSupported; dst 2278 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->CcxRmState, src->CcxRmState, 2); dst 2279 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bCcxRmEnable = src->bCcxRmEnable; dst 2280 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->MBssidMask = src->MBssidMask; dst 2281 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bMBssidValid = src->bMBssidValid; dst 2282 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(dst->MBssid, src->MBssid, 6); dst 2283 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->bWithCcxVerNum = src->bWithCcxVerNum; dst 2284 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c dst->BssCcxVerNumber = src->BssCcxVerNumber; dst 517 drivers/staging/rtl8192u/ieee80211/ieee80211_tx.c struct sk_buff *skb, u8 *dst) dst 519 drivers/staging/rtl8192u/ieee80211/ieee80211_tx.c if (is_multicast_ether_addr(dst)) dst 523 drivers/staging/rtl8192u/ieee80211/ieee80211_tx.c if (!GetTs(ieee, (struct ts_common_info **)(&pTS), dst, skb->priority, TX_DIR, true)) { dst 182 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c u8 *dst, dst 198 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c __func__, ReasonCode, dst); dst 213 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c memcpy(Delba->addr1, dst, ETH_ALEN); dst 247 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c u8 *dst, struct ba_record *pBA) dst 250 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c skb = ieee80211_ADDBA(ieee, dst, pBA, 0, ACT_ADDBAREQ); //construct ACT_ADDBAREQ frames so set statuscode zero. dst 270 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c static void ieee80211_send_ADDBARsp(struct ieee80211_device *ieee, u8 *dst, dst 274 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c skb = ieee80211_ADDBA(ieee, dst, pBA, StatusCode, ACT_ADDBARSP); //construct ACT_ADDBARSP frames dst 295 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c static void ieee80211_send_DELBA(struct ieee80211_device *ieee, u8 *dst, dst 300 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c skb = ieee80211_DELBA(ieee, dst, pBA, TxRxSelect, ReasonCode); //construct ACT_ADDBARSP frames dst 319 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c u8 *dst = NULL, *pDialogToken = NULL, *tag = NULL; dst 338 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst = &req->addr2[0]; dst 345 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c netdev_info(ieee->dev, "====================>rx ADDBAREQ from :%pM\n", dst); dst 360 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst, dst 391 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c ieee80211_send_ADDBARsp(ieee, dst, pBA, ADDBA_STATUS_SUCCESS); dst 403 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c ieee80211_send_ADDBARsp(ieee, dst, &BA, rc); dst 420 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c u8 *dst = NULL, *pDialogToken = NULL, *tag = NULL; dst 434 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst = &rsp->addr2[0]; dst 459 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst, dst 527 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c ieee80211_send_DELBA(ieee, dst, &BA, TX_DIR, ReasonCode); dst 543 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c u8 *dst = NULL; dst 561 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst = &delba->addr2[0]; dst 570 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst, dst 585 drivers/staging/rtl8192u/ieee80211/rtl819x_BAProc.c dst, dst 391 drivers/staging/rtl8712/rtl8712_recv.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, dst 400 drivers/staging/rtl8712/rtl8712_recv.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, dst 236 drivers/staging/rtl8712/rtl871x_mlme.c struct wlan_bssid_ex *dst) dst 241 drivers/staging/rtl8712/rtl871x_mlme.c memcpy((u8 *)&d_cap, r8712_get_capability_from_ie(dst->IEs), 2); dst 242 drivers/staging/rtl8712/rtl871x_mlme.c return (src->Ssid.SsidLength == dst->Ssid.SsidLength) && dst 244 drivers/staging/rtl8712/rtl871x_mlme.c dst->Configuration.DSConfig) && dst 245 drivers/staging/rtl8712/rtl871x_mlme.c ((!memcmp(src->MacAddress, dst->MacAddress, dst 248 drivers/staging/rtl8712/rtl871x_mlme.c dst->Ssid.Ssid, dst 281 drivers/staging/rtl8712/rtl871x_mlme.c static void update_network(struct wlan_bssid_ex *dst, dst 311 drivers/staging/rtl8712/rtl871x_mlme.c src->Rssi = (src->Rssi + dst->Rssi) / 2; dst 313 drivers/staging/rtl8712/rtl871x_mlme.c memcpy((u8 *)dst, (u8 *)src, r8712_get_wlan_bssid_ex_sz(src)); dst 307 drivers/staging/rtl8712/rtl871x_recv.c bool bmcast = is_multicast_ether_addr(pattrib->dst); dst 314 drivers/staging/rtl8712/rtl871x_recv.c if ((memcmp(myhwaddr, pattrib->dst, ETH_ALEN)) && (!bmcast)) dst 339 drivers/staging/rtl8712/rtl871x_recv.c if (memcmp(pattrib->bssid, pattrib->dst, ETH_ALEN)) dst 344 drivers/staging/rtl8712/rtl871x_recv.c memcpy(pattrib->dst, GetAddr1Ptr(ptr), ETH_ALEN); dst 347 drivers/staging/rtl8712/rtl871x_recv.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 375 drivers/staging/rtl8712/rtl871x_recv.c bool bmcast = is_multicast_ether_addr(pattrib->dst); dst 396 drivers/staging/rtl8712/rtl871x_recv.c if ((memcmp(myhwaddr, pattrib->dst, ETH_ALEN)) && (!bmcast)) dst 411 drivers/staging/rtl8712/rtl871x_recv.c memcpy(pattrib->dst, GetAddr1Ptr(ptr), ETH_ALEN); dst 414 drivers/staging/rtl8712/rtl871x_recv.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 479 drivers/staging/rtl8712/rtl871x_recv.c memcpy(pattrib->dst, pda, ETH_ALEN); dst 641 drivers/staging/rtl8712/rtl871x_recv.c memcpy(ptr, pattrib->dst, ETH_ALEN); dst 62 drivers/staging/rtl8712/rtl871x_recv.h u8 dst[ETH_ALEN]; dst 317 drivers/staging/rtl8712/rtl871x_security.c void r8712_secgetmic(struct mic_data *pmicdata, u8 *dst) dst 329 drivers/staging/rtl8712/rtl871x_security.c secmicputuint32(dst, pmicdata->L); dst 330 drivers/staging/rtl8712/rtl871x_security.c secmicputuint32(dst + 4, pmicdata->R); dst 208 drivers/staging/rtl8712/rtl871x_security.h void r8712_secgetmic(struct mic_data *pmicdata, u8 *dst); dst 211 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pattrib->dst, ðerhdr.h_dest, ETH_ALEN); dst 216 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 222 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 233 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 491 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pwlanhdr->addr3, pattrib->dst, ETH_ALEN); dst 495 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 502 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 507 drivers/staging/rtl8712/rtl871x_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 118 drivers/staging/rtl8712/rtl871x_xmit.h u8 dst[ETH_ALEN]; dst 417 drivers/staging/rtl8723bs/core/rtw_mlme.c int is_same_network(struct wlan_bssid_ex *src, struct wlan_bssid_ex *dst, u8 feature) dst 422 drivers/staging/rtl8723bs/core/rtw_mlme.c if (rtw_bug_check(dst, src, &s_cap, &d_cap) == false) dst 426 drivers/staging/rtl8723bs/core/rtw_mlme.c memcpy((u8 *)&tmpd, rtw_get_capability_from_ie(dst->IEs), 2); dst 432 drivers/staging/rtl8723bs/core/rtw_mlme.c return (src->Ssid.SsidLength == dst->Ssid.SsidLength) && dst 434 drivers/staging/rtl8723bs/core/rtw_mlme.c ((!memcmp(src->MacAddress, dst->MacAddress, ETH_ALEN))) && dst 435 drivers/staging/rtl8723bs/core/rtw_mlme.c ((!memcmp(src->Ssid.Ssid, dst->Ssid.Ssid, src->Ssid.SsidLength))) && dst 496 drivers/staging/rtl8723bs/core/rtw_mlme.c void update_network(struct wlan_bssid_ex *dst, struct wlan_bssid_ex *src, dst 499 drivers/staging/rtl8723bs/core/rtw_mlme.c long rssi_ori = dst->Rssi; dst 508 drivers/staging/rtl8723bs/core/rtw_mlme.c if (strcmp(dst->Ssid.Ssid, DBG_RX_SIGNAL_DISPLAY_SSID_MONITORED) == 0) { dst 525 drivers/staging/rtl8723bs/core/rtw_mlme.c rssi_final = (src->Rssi+dst->Rssi*4)/5; dst 530 drivers/staging/rtl8723bs/core/rtw_mlme.c ss_final = ((u32)(src->PhyInfo.SignalStrength)+(u32)(dst->PhyInfo.SignalStrength)*4)/5; dst 531 drivers/staging/rtl8723bs/core/rtw_mlme.c sq_final = ((u32)(src->PhyInfo.SignalQuality)+(u32)(dst->PhyInfo.SignalQuality)*4)/5; dst 532 drivers/staging/rtl8723bs/core/rtw_mlme.c rssi_final = (src->Rssi+dst->Rssi*4)/5; dst 535 drivers/staging/rtl8723bs/core/rtw_mlme.c ss_final = dst->PhyInfo.SignalStrength; dst 536 drivers/staging/rtl8723bs/core/rtw_mlme.c sq_final = dst->PhyInfo.SignalQuality; dst 537 drivers/staging/rtl8723bs/core/rtw_mlme.c rssi_final = dst->Rssi; dst 543 drivers/staging/rtl8723bs/core/rtw_mlme.c dst->Reserved[0] = src->Reserved[0]; dst 544 drivers/staging/rtl8723bs/core/rtw_mlme.c dst->Reserved[1] = src->Reserved[1]; dst 545 drivers/staging/rtl8723bs/core/rtw_mlme.c memcpy((u8 *)dst, (u8 *)src, get_wlan_bssid_ex_sz(src)); dst 548 drivers/staging/rtl8723bs/core/rtw_mlme.c dst->PhyInfo.SignalStrength = ss_final; dst 549 drivers/staging/rtl8723bs/core/rtw_mlme.c dst->PhyInfo.SignalQuality = sq_final; dst 550 drivers/staging/rtl8723bs/core/rtw_mlme.c dst->Rssi = rssi_final; dst 553 drivers/staging/rtl8723bs/core/rtw_mlme.c if (strcmp(dst->Ssid.Ssid, DBG_RX_SIGNAL_DISPLAY_SSID_MONITORED) == 0) { dst 556 drivers/staging/rtl8723bs/core/rtw_mlme.c , dst->Ssid.Ssid, MAC_ARG(dst->MacAddress), dst->PhyInfo.SignalStrength, dst->PhyInfo.SignalQuality, dst->Rssi); dst 752 drivers/staging/rtl8723bs/core/rtw_recv.c if ((!MacAddr_isBcst(pattrib->dst)) && (!IS_MCAST(pattrib->dst))) { dst 790 drivers/staging/rtl8723bs/core/rtw_recv.c sint bmcast = IS_MCAST(pattrib->dst); dst 804 drivers/staging/rtl8723bs/core/rtw_recv.c if ((memcmp(myhwaddr, pattrib->dst, ETH_ALEN)) && (!bmcast)) { dst 836 drivers/staging/rtl8723bs/core/rtw_recv.c if (memcmp(pattrib->bssid, pattrib->dst, ETH_ALEN)) { dst 845 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(pattrib->dst, GetAddr1Ptr(ptr), ETH_ALEN); dst 848 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 888 drivers/staging/rtl8723bs/core/rtw_recv.c sint bmcast = IS_MCAST(pattrib->dst); dst 907 drivers/staging/rtl8723bs/core/rtw_recv.c if ((memcmp(myhwaddr, pattrib->dst, ETH_ALEN)) && (!bmcast)) { dst 909 drivers/staging/rtl8723bs/core/rtw_recv.c (" ap2sta_data_frame: compare DA fail; DA ="MAC_FMT"\n", MAC_ARG(pattrib->dst))); dst 911 drivers/staging/rtl8723bs/core/rtw_recv.c DBG_871X("DBG_RX_DROP_FRAME %s DA ="MAC_FMT"\n", __func__, MAC_ARG(pattrib->dst)); dst 966 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(pattrib->dst, GetAddr1Ptr(ptr), ETH_ALEN); dst 969 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 992 drivers/staging/rtl8723bs/core/rtw_recv.c if (!memcmp(myhwaddr, pattrib->dst, ETH_ALEN) && (!bmcast)) { dst 1285 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(pattrib->dst, pda, ETH_ALEN); dst 1696 drivers/staging/rtl8723bs/core/rtw_recv.c memcpy(ptr, pattrib->dst, ETH_ALEN); dst 390 drivers/staging/rtl8723bs/core/rtw_security.c void rtw_secgetmic(struct mic_data *pmicdata, u8 *dst) dst 403 drivers/staging/rtl8723bs/core/rtw_security.c secmicputuint32(dst, pmicdata->L); dst 404 drivers/staging/rtl8723bs/core/rtw_security.c secmicputuint32(dst+4, pmicdata->R); dst 697 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pattrib->dst, ðerhdr.h_dest, ETH_ALEN); dst 703 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 711 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pattrib->ra, pattrib->dst, ETH_ALEN); dst 1042 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pwlanhdr->addr3, pattrib->dst, ETH_ALEN); dst 1051 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 1059 drivers/staging/rtl8723bs/core/rtw_xmit.c memcpy(pwlanhdr->addr1, pattrib->dst, ETH_ALEN); dst 130 drivers/staging/rtl8723bs/include/osdep_service_linux.h static inline void rtw_merge_string(char *dst, int dst_len, char *src1, char *src2) dst 133 drivers/staging/rtl8723bs/include/osdep_service_linux.h len += snprintf(dst+len, dst_len - len, "%s", src1); dst 134 drivers/staging/rtl8723bs/include/osdep_service_linux.h len += snprintf(dst+len, dst_len - len, "%s", src2); dst 625 drivers/staging/rtl8723bs/include/rtw_mlme.h int is_same_network(struct wlan_bssid_ex *src, struct wlan_bssid_ex *dst, u8 feature); dst 597 drivers/staging/rtl8723bs/include/rtw_mlme_ext.h void update_network(struct wlan_bssid_ex *dst, struct wlan_bssid_ex *src, struct adapter *padapter, bool update_ie); dst 158 drivers/staging/rtl8723bs/include/rtw_recv.h u8 dst[ETH_ALEN]; dst 410 drivers/staging/rtl8723bs/include/rtw_security.h void rtw_secgetmic(struct mic_data *pmicdata, u8 * dst); dst 162 drivers/staging/rtl8723bs/include/rtw_xmit.h u8 dst[ETH_ALEN]; dst 86 drivers/staging/rtl8723bs/os_dep/recv_linux.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, ETH_ALEN); dst 93 drivers/staging/rtl8723bs/os_dep/recv_linux.c memcpy(skb_push(sub_skb, ETH_ALEN), pattrib->dst, ETH_ALEN); dst 110 drivers/staging/rtl8723bs/os_dep/recv_linux.c int bmcast = IS_MCAST(pattrib->dst); dst 114 drivers/staging/rtl8723bs/os_dep/recv_linux.c if (memcmp(pattrib->dst, myid(&padapter->eeprompriv), ETH_ALEN)) { dst 121 drivers/staging/rtl8723bs/os_dep/recv_linux.c psta = rtw_get_stainfo(pstapriv, pattrib->dst); dst 257 drivers/staging/uwb/uwb.h static inline void uwb_mas_bm_copy_le(void *dst, const struct uwb_mas_bm *mas) dst 259 drivers/staging/uwb/uwb.h bitmap_copy_le(dst, mas->bm, UWB_NUM_MAS); dst 761 drivers/staging/vc04_services/bcm2835-camera/bcm2835-camera.c struct vchiq_mmal_port *dst; dst 787 drivers/staging/vc04_services/bcm2835-camera/bcm2835-camera.c dst = &dev->component[COMP_PREVIEW]->input[0]; dst 793 drivers/staging/vc04_services/bcm2835-camera/bcm2835-camera.c ret = set_overlay_params(dev, dst); dst 807 drivers/staging/vc04_services/bcm2835-camera/bcm2835-camera.c src, dst); dst 808 drivers/staging/vc04_services/bcm2835-camera/bcm2835-camera.c ret = vchiq_mmal_port_connect_tunnel(dev->instance, src, dst); dst 1487 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c struct vchiq_mmal_port *dst) dst 1518 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c if (!dst) { dst 1526 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->format.encoding = src->format.encoding; dst 1527 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.width = src->es.video.width; dst 1528 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.height = src->es.video.height; dst 1529 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.crop.x = src->es.video.crop.x; dst 1530 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.crop.y = src->es.video.crop.y; dst 1531 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.crop.width = src->es.video.crop.width; dst 1532 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.crop.height = src->es.video.crop.height; dst 1533 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.frame_rate.num = src->es.video.frame_rate.num; dst 1534 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->es.video.frame_rate.den = src->es.video.frame_rate.den; dst 1537 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c ret = port_info_set(instance, dst); dst 1544 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c ret = port_info_get(instance, dst); dst 1553 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->component->handle, dst->handle); dst 1557 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c dst->component->handle, dst->handle); dst 1560 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c src->connected = dst; dst 153 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.h struct vchiq_mmal_port *dst); dst 229 drivers/staging/wlan-ng/p80211netdev.h int wep_encrypt(struct wlandevice *wlandev, u8 *buf, u8 *dst, u32 len, dst 218 drivers/staging/wlan-ng/p80211wep.c u8 *dst, u32 len, int keynum, u8 *iv, u8 *icv) dst 268 drivers/staging/wlan-ng/p80211wep.c dst[k] = buf[k] ^ s[(s[i] + s[j]) & 0xff]; dst 990 drivers/staging/wlan-ng/prism2sta.c u32 *dst; dst 1002 drivers/staging/wlan-ng/prism2sta.c dst = (u32 *)&hw->tallies; dst 1004 drivers/staging/wlan-ng/prism2sta.c for (i = 0; i < cnt; i++, dst++, src32++) dst 1005 drivers/staging/wlan-ng/prism2sta.c *dst += le32_to_cpu(*src32); dst 1007 drivers/staging/wlan-ng/prism2sta.c dst = (u32 *)&hw->tallies; dst 1009 drivers/staging/wlan-ng/prism2sta.c for (i = 0; i < cnt; i++, dst++, src16++) dst 1010 drivers/staging/wlan-ng/prism2sta.c *dst += le16_to_cpu(*src16); dst 194 drivers/target/iscsi/cxgbit/cxgbit.h struct dst_entry *dst; dst 812 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_release(csk->dst); dst 898 drivers/target/iscsi/cxgbit/cxgbit_cm.c u16 local_port, struct dst_entry *dst, dst 909 drivers/target/iscsi/cxgbit/cxgbit_cm.c n = dst_neigh_lookup(dst, peer_ip); dst 971 drivers/target/iscsi/cxgbit/cxgbit_cm.c csk->mtu = dst_mtu(dst); dst 1227 drivers/target/iscsi/cxgbit/cxgbit_cm.c struct dst_entry *dst; dst 1266 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst = cxgb_find_route(&cdev->lldi, cxgbit_get_real_dev, dst 1277 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst = cxgb_find_route6(&cdev->lldi, cxgbit_get_real_dev, dst 1284 drivers/target/iscsi/cxgbit/cxgbit_cm.c if (!dst) { dst 1292 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_release(dst); dst 1297 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst, cdev); dst 1301 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_release(dst); dst 1320 drivers/target/iscsi/cxgbit/cxgbit_cm.c csk->dst = dst; dst 1367 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_release(dst); dst 1639 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_confirm(csk->dst); dst 1873 drivers/target/iscsi/cxgbit/cxgbit_cm.c dst_confirm(csk->dst); dst 456 drivers/tee/optee/call.c void optee_fill_pages_list(u64 *dst, struct page **pages, int num_pages, dst 480 drivers/tee/optee/call.c pages_data = (void *)dst; dst 173 drivers/tee/optee/optee_private.h void optee_fill_pages_list(u64 *dst, struct page **pages, int num_pages, dst 66 drivers/thunderbolt/path.c const struct tb_port *dst, int dst_hopid) dst 73 drivers/thunderbolt/path.c if (out == dst) dst 101 drivers/thunderbolt/path.c struct tb_port *dst, int dst_hopid, dst 112 drivers/thunderbolt/path.c if (src_hopid < 0 && dst) { dst 120 drivers/thunderbolt/path.c src_hopid = tb_path_find_src_hopid(src, dst, dst_hopid); dst 228 drivers/thunderbolt/path.c struct tb_port *dst, int dst_hopid, int link_nr, dst 246 drivers/thunderbolt/path.c tb_route_length(tb_route(dst->sw))) + 1; dst 258 drivers/thunderbolt/path.c in_port = tb_next_port_on_path(src, dst, out_port); dst 270 drivers/thunderbolt/path.c out_port = tb_next_port_on_path(src, dst, in_port); dst 42 drivers/thunderbolt/property.c static inline void parse_dwdata(void *dst, const void *src, size_t dwords) dst 44 drivers/thunderbolt/property.c be32_to_cpu_array(dst, src, dwords); dst 47 drivers/thunderbolt/property.c static inline void format_dwdata(void *dst, const void *src, size_t dwords) dst 49 drivers/thunderbolt/property.c cpu_to_be32_array(dst, src, dwords); dst 620 drivers/thunderbolt/tb.h struct tb_port *dst, int dst_hopid, dst 623 drivers/thunderbolt/tb.h struct tb_port *dst, int dst_hopid, int link_nr, dst 527 drivers/thunderbolt/tunnel.c struct tb_port *dst, int transmit_ring, dst 541 drivers/thunderbolt/tunnel.c tunnel->dst_port = dst; dst 545 drivers/thunderbolt/tunnel.c path = tb_path_alloc(tb, dst, receive_path, nhi, receive_ring, 0, "DMA RX"); dst 554 drivers/thunderbolt/tunnel.c path = tb_path_alloc(tb, nhi, transmit_ring, dst, transmit_path, 0, "DMA TX"); dst 52 drivers/thunderbolt/tunnel.h struct tb_port *dst, int transmit_ring, dst 1928 drivers/tty/moxa.c unsigned char *dst; dst 1954 drivers/tty/moxa.c len = tty_prepare_flip_string(&port->port, &dst, dst 1956 drivers/tty/moxa.c memcpy_fromio(dst, ofs, len); dst 1966 drivers/tty/moxa.c len = tty_prepare_flip_string(&port->port, &dst, dst 1968 drivers/tty/moxa.c memcpy_fromio(dst, ofs, len); dst 471 drivers/tty/vt/vt.c static void vc_uniscr_copy_area(struct uni_screen *dst, dst 481 drivers/tty/vt/vt.c if (!dst) dst 486 drivers/tty/vt/vt.c char32_t *dst_line = dst->lines[dst_row]; dst 495 drivers/tty/vt/vt.c char32_t *dst_line = dst->lines[dst_row]; dst 444 drivers/usb/gadget/function/f_uvc.c #define UVC_COPY_DESCRIPTOR(mem, dst, desc) \ dst 447 drivers/usb/gadget/function/f_uvc.c *(dst)++ = mem; \ dst 451 drivers/usb/gadget/function/f_uvc.c #define UVC_COPY_DESCRIPTORS(mem, dst, src) \ dst 456 drivers/usb/gadget/function/f_uvc.c *dst++ = mem; \ dst 470 drivers/usb/gadget/function/f_uvc.c struct usb_descriptor_header **dst; dst 551 drivers/usb/gadget/function/f_uvc.c dst = mem; dst 555 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_iad); dst 556 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_intf); dst 559 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTORS(mem, dst, dst 565 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_ep); dst 567 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_ss_control_comp); dst 569 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_cs_ep); dst 570 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTOR(mem, dst, &uvc_streaming_intf_alt0); dst 573 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTORS(mem, dst, dst 578 drivers/usb/gadget/function/f_uvc.c UVC_COPY_DESCRIPTORS(mem, dst, uvc_streaming_std); dst 580 drivers/usb/gadget/function/f_uvc.c *dst = NULL; dst 316 drivers/usb/gadget/udc/udc-xilinx.c dma_addr_t dst, u32 length) dst 329 drivers/usb/gadget/udc/udc-xilinx.c udc->write_fn(udc->addr, XUSB_DMA_DDAR_ADDR_OFFSET, dst); dst 380 drivers/usb/gadget/udc/udc-xilinx.c dma_addr_t dst; dst 390 drivers/usb/gadget/udc/udc-xilinx.c dst = virt_to_phys(eprambase); dst 401 drivers/usb/gadget/udc/udc-xilinx.c dst = virt_to_phys(eprambase); dst 414 drivers/usb/gadget/udc/udc-xilinx.c return xudc_start_dma(ep, src, dst, length); dst 434 drivers/usb/gadget/udc/udc-xilinx.c dma_addr_t dst; dst 437 drivers/usb/gadget/udc/udc-xilinx.c dst = req->usb_req.dma + req->usb_req.actual; dst 463 drivers/usb/gadget/udc/udc-xilinx.c return xudc_start_dma(ep, src, dst, length); dst 868 drivers/usb/host/max3421-hcd.c void *dst = urb->transfer_buffer + urb->actual_length; dst 870 drivers/usb/host/max3421-hcd.c spi_rd_buf(hcd, MAX3421_REG_RCVFIFO, dst, transfer_size); dst 187 drivers/usb/isp1760/isp1760-hcd.c __u32 *dst, u32 bytes) dst 198 drivers/usb/isp1760/isp1760-hcd.c *dst = le32_to_cpu(__raw_readl(src)); dst 201 drivers/usb/isp1760/isp1760-hcd.c dst++; dst 205 drivers/usb/isp1760/isp1760-hcd.c *dst = __raw_readl(src); dst 208 drivers/usb/isp1760/isp1760-hcd.c dst++; dst 223 drivers/usb/isp1760/isp1760-hcd.c dst_byteptr = (void *) dst; dst 233 drivers/usb/isp1760/isp1760-hcd.c static void mem_reads8(void __iomem *src_base, u32 src_offset, void *dst, dst 238 drivers/usb/isp1760/isp1760-hcd.c bank_reads8(src_base, src_offset, ISP_BANK(0), dst, bytes); dst 244 drivers/usb/isp1760/isp1760-hcd.c __u32 __iomem *dst; dst 246 drivers/usb/isp1760/isp1760-hcd.c dst = dst_base + dst_offset; dst 250 drivers/usb/isp1760/isp1760-hcd.c __raw_writel(cpu_to_le32(*src), dst); dst 253 drivers/usb/isp1760/isp1760-hcd.c dst++; dst 257 drivers/usb/isp1760/isp1760-hcd.c __raw_writel(*src, dst); dst 260 drivers/usb/isp1760/isp1760-hcd.c dst++; dst 271 drivers/usb/isp1760/isp1760-hcd.c __raw_writel(cpu_to_le32(*src), dst); dst 273 drivers/usb/isp1760/isp1760-hcd.c __raw_writel(*src, dst); dst 100 drivers/usb/mtu3/mtu3_gadget_ep0.c static void ep0_read_fifo(struct mtu3_ep *mep, u8 *dst, u16 len) dst 107 drivers/usb/mtu3/mtu3_gadget_ep0.c __func__, mep->epnum, len, dst); dst 110 drivers/usb/mtu3/mtu3_gadget_ep0.c ioread32_rep(fifo, dst, len >> 2); dst 115 drivers/usb/mtu3/mtu3_gadget_ep0.c memcpy(&dst[index], &value, len & 0x3); dst 389 drivers/usb/musb/am35x.c static void am35x_read_fifo(struct musb_hw_ep *hw_ep, u16 len, u8 *dst) dst 396 drivers/usb/musb/am35x.c if (likely((0x03 & (unsigned long) dst) == 0) && len >= 4) { dst 397 drivers/usb/musb/am35x.c readsl(fifo, dst, len >> 2); dst 398 drivers/usb/musb/am35x.c dst += len & ~0x03; dst 407 drivers/usb/musb/am35x.c *(u32 *) dst = musb_readl(fifo, 0); dst 408 drivers/usb/musb/am35x.c dst += 4; dst 414 drivers/usb/musb/am35x.c memcpy(dst, &val, len); dst 325 drivers/usb/musb/musb_core.c static void musb_default_read_fifo(struct musb_hw_ep *hw_ep, u16 len, u8 *dst) dst 334 drivers/usb/musb/musb_core.c 'R', hw_ep->epnum, fifo, len, dst); dst 337 drivers/usb/musb/musb_core.c if (likely((0x01 & (unsigned long) dst) == 0)) { dst 341 drivers/usb/musb/musb_core.c if ((0x02 & (unsigned long) dst) == 0) { dst 343 drivers/usb/musb/musb_core.c ioread32_rep(fifo, dst, len >> 2); dst 347 drivers/usb/musb/musb_core.c *(u16 *)&dst[index] = __raw_readw(fifo); dst 352 drivers/usb/musb/musb_core.c ioread16_rep(fifo, dst, len >> 1); dst 357 drivers/usb/musb/musb_core.c dst[index] = __raw_readb(fifo); dst 360 drivers/usb/musb/musb_core.c ioread8_rep(fifo, dst, len); dst 407 drivers/usb/musb/musb_core.c void musb_read_fifo(struct musb_hw_ep *hw_ep, u16 len, u8 *dst) dst 409 drivers/usb/musb/musb_core.c return hw_ep->musb->io.read_fifo(hw_ep, len, dst); dst 488 drivers/usb/musb/musb_core.h extern void musb_read_fifo(struct musb_hw_ep *ep, u16 len, u8 *dst); dst 623 drivers/usb/musb/musb_dsps.c static void dsps_read_fifo32(struct musb_hw_ep *hw_ep, u16 len, u8 *dst) dst 628 drivers/usb/musb/musb_dsps.c ioread32_rep(fifo, dst, len >> 2); dst 629 drivers/usb/musb/musb_dsps.c dst += len & ~0x03; dst 636 drivers/usb/musb/musb_dsps.c memcpy(dst, &val, len); dst 1999 drivers/usb/serial/ftdi_sio.c static int ftdi_read_eeprom(struct usb_serial *serial, void *dst, u16 addr, dst 2017 drivers/usb/serial/ftdi_sio.c 0, (addr + read) / 2, dst + read, 2, dst 587 drivers/usb/serial/garmin_gps.c unsigned char *dst; dst 656 drivers/usb/serial/garmin_gps.c dst = garmin_data_p->outbuffer+GPS_OUT_BUFSIZ-datalen; dst 657 drivers/usb/serial/garmin_gps.c memcpy(dst, src, datalen); dst 658 drivers/usb/serial/garmin_gps.c src = dst; dst 661 drivers/usb/serial/garmin_gps.c dst = garmin_data_p->outbuffer; dst 663 drivers/usb/serial/garmin_gps.c *dst++ = DLE; dst 664 drivers/usb/serial/garmin_gps.c *dst++ = pktid; dst 666 drivers/usb/serial/garmin_gps.c *dst++ = datalen; dst 669 drivers/usb/serial/garmin_gps.c *dst++ = DLE; dst 673 drivers/usb/serial/garmin_gps.c *dst++ = c; dst 676 drivers/usb/serial/garmin_gps.c *dst++ = DLE; dst 680 drivers/usb/serial/garmin_gps.c *dst++ = cksum; dst 682 drivers/usb/serial/garmin_gps.c *dst++ = DLE; dst 683 drivers/usb/serial/garmin_gps.c *dst++ = DLE; dst 684 drivers/usb/serial/garmin_gps.c *dst++ = ETX; dst 686 drivers/usb/serial/garmin_gps.c i = dst-garmin_data_p->outbuffer; dst 220 drivers/vhost/vringh.c static int slow_copy(struct vringh *vrh, void *dst, const void *src, dst 230 drivers/vhost/vringh.c int (*copy)(void *dst, const void *src, size_t len)) dst 244 drivers/vhost/vringh.c err = copy(dst, src, part); dst 248 drivers/vhost/vringh.c dst += part; dst 265 drivers/vhost/vringh.c int (*copy)(void *dst, const void *src, size_t len)) dst 407 drivers/vhost/vringh.c int (*putused)(struct vring_used_elem *dst, dst 567 drivers/vhost/vringh.c static inline int copydesc_user(void *dst, const void *src, size_t len) dst 569 drivers/vhost/vringh.c return copy_from_user(dst, (__force void __user *)src, len) ? dst 573 drivers/vhost/vringh.c static inline int putused_user(struct vring_used_elem *dst, dst 577 drivers/vhost/vringh.c return copy_to_user((__force void __user *)dst, src, dst 578 drivers/vhost/vringh.c sizeof(*dst) * num) ? -EFAULT : 0; dst 581 drivers/vhost/vringh.c static inline int xfer_from_user(void *src, void *dst, size_t len) dst 583 drivers/vhost/vringh.c return copy_from_user(dst, (__force void __user *)src, len) ? dst 587 drivers/vhost/vringh.c static inline int xfer_to_user(void *dst, void *src, size_t len) dst 589 drivers/vhost/vringh.c return copy_to_user((__force void __user *)dst, src, len) ? dst 707 drivers/vhost/vringh.c ssize_t vringh_iov_pull_user(struct vringh_iov *riov, void *dst, size_t len) dst 710 drivers/vhost/vringh.c dst, len, xfer_from_user); dst 835 drivers/vhost/vringh.c static inline int copydesc_kern(void *dst, const void *src, size_t len) dst 837 drivers/vhost/vringh.c memcpy(dst, src, len); dst 841 drivers/vhost/vringh.c static inline int putused_kern(struct vring_used_elem *dst, dst 845 drivers/vhost/vringh.c memcpy(dst, src, num * sizeof(*dst)); dst 849 drivers/vhost/vringh.c static inline int xfer_kern(void *src, void *dst, size_t len) dst 851 drivers/vhost/vringh.c memcpy(dst, src, len); dst 855 drivers/vhost/vringh.c static inline int kern_xfer(void *dst, void *src, size_t len) dst 857 drivers/vhost/vringh.c memcpy(dst, src, len); dst 950 drivers/vhost/vringh.c ssize_t vringh_iov_pull_kern(struct vringh_kiov *riov, void *dst, size_t len) dst 952 drivers/vhost/vringh.c return vringh_iov_xfer(riov, dst, len, xfer_kern); dst 2598 drivers/video/fbdev/amifb.c static void bitcpy(unsigned long *dst, int dst_idx, const unsigned long *src, dst 2620 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, first); dst 2625 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, first); dst 2626 drivers/video/fbdev/amifb.c dst++; dst 2634 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2635 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2636 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2637 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2638 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2639 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2640 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2641 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2645 drivers/video/fbdev/amifb.c *dst++ = *src++; dst 2649 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, last); dst 2663 drivers/video/fbdev/amifb.c *dst = comp(*src >> right, *dst, first); dst 2666 drivers/video/fbdev/amifb.c *dst = comp(*src << left, *dst, first); dst 2671 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, *dst, dst 2680 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right, *dst, first); dst 2681 drivers/video/fbdev/amifb.c dst++; dst 2686 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, *dst, dst 2689 drivers/video/fbdev/amifb.c dst++; dst 2698 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2701 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2704 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2707 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2713 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2721 drivers/video/fbdev/amifb.c *dst = comp(d0 << left, *dst, last); dst 2725 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, dst 2726 drivers/video/fbdev/amifb.c *dst, last); dst 2738 drivers/video/fbdev/amifb.c static void bitcpy_rev(unsigned long *dst, int dst_idx, dst 2749 drivers/video/fbdev/amifb.c dst += (n - 1) / BITS_PER_LONG; dst 2753 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 2771 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, first); dst 2776 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, first); dst 2777 drivers/video/fbdev/amifb.c dst--; dst 2785 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2786 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2787 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2788 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2789 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2790 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2791 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2792 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2796 drivers/video/fbdev/amifb.c *dst-- = *src--; dst 2800 drivers/video/fbdev/amifb.c *dst = comp(*src, *dst, last); dst 2814 drivers/video/fbdev/amifb.c *dst = comp(*src << left, *dst, first); dst 2817 drivers/video/fbdev/amifb.c *dst = comp(*src >> right, *dst, first); dst 2822 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right | d1 << left, *dst, dst 2831 drivers/video/fbdev/amifb.c *dst = comp(d0 << left, *dst, first); dst 2832 drivers/video/fbdev/amifb.c dst--; dst 2837 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right | d1 << left, *dst, dst 2840 drivers/video/fbdev/amifb.c dst--; dst 2849 drivers/video/fbdev/amifb.c *dst-- = d0 >> right | d1 << left; dst 2852 drivers/video/fbdev/amifb.c *dst-- = d0 >> right | d1 << left; dst 2855 drivers/video/fbdev/amifb.c *dst-- = d0 >> right | d1 << left; dst 2858 drivers/video/fbdev/amifb.c *dst-- = d0 >> right | d1 << left; dst 2864 drivers/video/fbdev/amifb.c *dst-- = d0 >> right | d1 << left; dst 2872 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right, *dst, last); dst 2876 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right | d1 << left, dst 2877 drivers/video/fbdev/amifb.c *dst, last); dst 2890 drivers/video/fbdev/amifb.c static void bitcpy_not(unsigned long *dst, int dst_idx, dst 2912 drivers/video/fbdev/amifb.c *dst = comp(~*src, *dst, first); dst 2917 drivers/video/fbdev/amifb.c *dst = comp(~*src, *dst, first); dst 2918 drivers/video/fbdev/amifb.c dst++; dst 2926 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2927 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2928 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2929 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2930 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2931 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2932 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2933 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2937 drivers/video/fbdev/amifb.c *dst++ = ~*src++; dst 2941 drivers/video/fbdev/amifb.c *dst = comp(~*src, *dst, last); dst 2955 drivers/video/fbdev/amifb.c *dst = comp(~*src >> right, *dst, first); dst 2958 drivers/video/fbdev/amifb.c *dst = comp(~*src << left, *dst, first); dst 2963 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, *dst, dst 2972 drivers/video/fbdev/amifb.c *dst = comp(d0 >> right, *dst, first); dst 2973 drivers/video/fbdev/amifb.c dst++; dst 2978 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, *dst, dst 2981 drivers/video/fbdev/amifb.c dst++; dst 2990 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2993 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2996 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 2999 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 3005 drivers/video/fbdev/amifb.c *dst++ = d0 << left | d1 >> right; dst 3013 drivers/video/fbdev/amifb.c *dst = comp(d0 << left, *dst, last); dst 3017 drivers/video/fbdev/amifb.c *dst = comp(d0 << left | d1 >> right, dst 3018 drivers/video/fbdev/amifb.c *dst, last); dst 3030 drivers/video/fbdev/amifb.c static void bitfill32(unsigned long *dst, int dst_idx, u32 pat, u32 n) dst 3049 drivers/video/fbdev/amifb.c *dst = comp(val, *dst, first); dst 3054 drivers/video/fbdev/amifb.c *dst = comp(val, *dst, first); dst 3055 drivers/video/fbdev/amifb.c dst++; dst 3062 drivers/video/fbdev/amifb.c *dst++ = val; dst 3063 drivers/video/fbdev/amifb.c *dst++ = val; dst 3064 drivers/video/fbdev/amifb.c *dst++ = val; dst 3065 drivers/video/fbdev/amifb.c *dst++ = val; dst 3066 drivers/video/fbdev/amifb.c *dst++ = val; dst 3067 drivers/video/fbdev/amifb.c *dst++ = val; dst 3068 drivers/video/fbdev/amifb.c *dst++ = val; dst 3069 drivers/video/fbdev/amifb.c *dst++ = val; dst 3073 drivers/video/fbdev/amifb.c *dst++ = val; dst 3077 drivers/video/fbdev/amifb.c *dst = comp(val, *dst, last); dst 3086 drivers/video/fbdev/amifb.c static void bitxor32(unsigned long *dst, int dst_idx, u32 pat, u32 n) dst 3105 drivers/video/fbdev/amifb.c *dst = xor(val, *dst, first); dst 3110 drivers/video/fbdev/amifb.c *dst = xor(val, *dst, first); dst 3111 drivers/video/fbdev/amifb.c dst++; dst 3118 drivers/video/fbdev/amifb.c *dst++ ^= val; dst 3119 drivers/video/fbdev/amifb.c *dst++ ^= val; dst 3120 drivers/video/fbdev/amifb.c *dst++ ^= val; dst 3121 drivers/video/fbdev/amifb.c *dst++ ^= val; dst 3125 drivers/video/fbdev/amifb.c *dst++ ^= val; dst 3129 drivers/video/fbdev/amifb.c *dst = xor(val, *dst, last); dst 3134 drivers/video/fbdev/amifb.c unsigned long *dst, int dst_idx, u32 n, dst 3138 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 3140 drivers/video/fbdev/amifb.c bitfill32(dst, dst_idx, color & 1 ? ~0 : 0, n); dst 3149 drivers/video/fbdev/amifb.c unsigned long *dst, int dst_idx, u32 n, dst 3153 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 3155 drivers/video/fbdev/amifb.c bitxor32(dst, dst_idx, color & 1 ? ~0 : 0, n); dst 3169 drivers/video/fbdev/amifb.c unsigned long *dst; dst 3186 drivers/video/fbdev/amifb.c dst = (unsigned long *) dst 3194 drivers/video/fbdev/amifb.c par->next_plane, dst, dst_idx, width, dst 3200 drivers/video/fbdev/amifb.c dst, dst_idx, width, rect->color); dst 3208 drivers/video/fbdev/amifb.c unsigned long *dst, int dst_idx, dst 3212 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 3216 drivers/video/fbdev/amifb.c bitcpy(dst, dst_idx, src, src_idx, n); dst 3225 drivers/video/fbdev/amifb.c unsigned long *dst, int dst_idx, dst 3229 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 3233 drivers/video/fbdev/amifb.c bitcpy_rev(dst, dst_idx, src, src_idx, n); dst 3248 drivers/video/fbdev/amifb.c unsigned long *dst, *src; dst 3279 drivers/video/fbdev/amifb.c dst = (unsigned long *) dst 3281 drivers/video/fbdev/amifb.c src = dst; dst 3291 drivers/video/fbdev/amifb.c par->next_plane, dst, dst_idx, src, dst 3297 drivers/video/fbdev/amifb.c par->next_plane, dst, dst_idx, src, dst 3307 drivers/video/fbdev/amifb.c unsigned long *dst, int dst_idx, u32 n, dst 3314 drivers/video/fbdev/amifb.c dst += dst_idx >> SHIFT_PER_LONG; dst 3321 drivers/video/fbdev/amifb.c bitcpy(dst, dst_idx, src, src_idx, n); dst 3323 drivers/video/fbdev/amifb.c bitcpy_not(dst, dst_idx, src, src_idx, n); dst 3326 drivers/video/fbdev/amifb.c bitfill32(dst, dst_idx, fgcolor & 1 ? ~0 : 0, n); dst 3340 drivers/video/fbdev/amifb.c unsigned long *dst; dst 3360 drivers/video/fbdev/amifb.c dst = (unsigned long *) dst 3368 drivers/video/fbdev/amifb.c par->next_plane, dst, dst_idx, width, dst 180 drivers/video/fbdev/arkfb.c u32 __iomem *dst; dst 190 drivers/video/fbdev/arkfb.c dst = (u32 __iomem *) dst1; dst 194 drivers/video/fbdev/arkfb.c fb_writel(val, dst++); dst 207 drivers/video/fbdev/arkfb.c u32 __iomem *dst; dst 214 drivers/video/fbdev/arkfb.c dst = (u32 __iomem *) dst1; dst 216 drivers/video/fbdev/arkfb.c fb_writel(fg, dst++); dst 238 drivers/video/fbdev/arkfb.c u32 __iomem *dst; dst 248 drivers/video/fbdev/arkfb.c dst = (u32 __iomem *) dst1; dst 252 drivers/video/fbdev/arkfb.c fb_writel(val, dst++); dst 2578 drivers/video/fbdev/atafb.c unsigned long *dst; dst 2606 drivers/video/fbdev/atafb.c dst = (unsigned long *) dst 39 drivers/video/fbdev/atafb_iplan2p2.c u8 *src, *dst; dst 51 drivers/video/fbdev/atafb_iplan2p2.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 53 drivers/video/fbdev/atafb_iplan2p2.c memmove32_col(dst, src, 0xff00ff, height, next_line - BPL * 2); dst 55 drivers/video/fbdev/atafb_iplan2p2.c dst += BPL * 2; dst 61 drivers/video/fbdev/atafb_iplan2p2.c d = (u32 *)dst; dst 72 drivers/video/fbdev/atafb_iplan2p2.c memmove32_col(dst + width / (8 / BPL), src + width / (8 / BPL), dst 76 drivers/video/fbdev/atafb_iplan2p2.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 80 drivers/video/fbdev/atafb_iplan2p2.c dst -= BPL * 2; dst 81 drivers/video/fbdev/atafb_iplan2p2.c memmove32_col(dst, src, 0xff00ff00, colsize, -next_line - BPL * 2); dst 87 drivers/video/fbdev/atafb_iplan2p2.c d = (u32 *)dst; dst 98 drivers/video/fbdev/atafb_iplan2p2.c memmove32_col(dst - (width - 16) / (8 / BPL), dst 110 drivers/video/fbdev/atafb_iplan2p2.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 124 drivers/video/fbdev/atafb_iplan2p2.c dst32 = (u32 *)dst; dst 144 drivers/video/fbdev/atafb_iplan2p2.c dst += next_line; dst 152 drivers/video/fbdev/atafb_iplan2p2.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 166 drivers/video/fbdev/atafb_iplan2p2.c dst32 = (u32 *)dst; dst 186 drivers/video/fbdev/atafb_iplan2p2.c dst -= next_line; dst 39 drivers/video/fbdev/atafb_iplan2p4.c u8 *src, *dst; dst 51 drivers/video/fbdev/atafb_iplan2p4.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 53 drivers/video/fbdev/atafb_iplan2p4.c memmove32_col(dst, src, 0xff00ff, height, next_line - BPL * 2); dst 55 drivers/video/fbdev/atafb_iplan2p4.c dst += BPL * 2; dst 61 drivers/video/fbdev/atafb_iplan2p4.c d = (u32 *)dst; dst 72 drivers/video/fbdev/atafb_iplan2p4.c memmove32_col(dst + width / (8 / BPL), src + width / (8 / BPL), dst 76 drivers/video/fbdev/atafb_iplan2p4.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 80 drivers/video/fbdev/atafb_iplan2p4.c dst -= BPL * 2; dst 81 drivers/video/fbdev/atafb_iplan2p4.c memmove32_col(dst, src, 0xff00ff00, colsize, -next_line - BPL * 2); dst 87 drivers/video/fbdev/atafb_iplan2p4.c d = (u32 *)dst; dst 98 drivers/video/fbdev/atafb_iplan2p4.c memmove32_col(dst - (width - 16) / (8 / BPL), dst 110 drivers/video/fbdev/atafb_iplan2p4.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 124 drivers/video/fbdev/atafb_iplan2p4.c dst32 = (u32 *)dst; dst 151 drivers/video/fbdev/atafb_iplan2p4.c dst += next_line; dst 159 drivers/video/fbdev/atafb_iplan2p4.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 173 drivers/video/fbdev/atafb_iplan2p4.c dst32 = (u32 *)dst; dst 200 drivers/video/fbdev/atafb_iplan2p4.c dst -= next_line; dst 46 drivers/video/fbdev/atafb_iplan2p8.c u8 *src, *dst; dst 58 drivers/video/fbdev/atafb_iplan2p8.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 60 drivers/video/fbdev/atafb_iplan2p8.c memmove32_col(dst, src, 0xff00ff, height, next_line - BPL * 2); dst 62 drivers/video/fbdev/atafb_iplan2p8.c dst += BPL * 2; dst 68 drivers/video/fbdev/atafb_iplan2p8.c d = (u32 *)dst; dst 79 drivers/video/fbdev/atafb_iplan2p8.c memmove32_col(dst + width / (8 / BPL), src + width / (8 / BPL), dst 83 drivers/video/fbdev/atafb_iplan2p8.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 87 drivers/video/fbdev/atafb_iplan2p8.c dst -= BPL * 2; dst 88 drivers/video/fbdev/atafb_iplan2p8.c memmove32_col(dst, src, 0xff00ff00, colsize, -next_line - BPL * 2); dst 94 drivers/video/fbdev/atafb_iplan2p8.c d = (u32 *)dst; dst 105 drivers/video/fbdev/atafb_iplan2p8.c memmove32_col(dst - (width - 16) / (8 / BPL), dst 117 drivers/video/fbdev/atafb_iplan2p8.c dst = (u8 *)info->screen_base + dy * next_line + (dx & ~15) / (8 / BPL); dst 131 drivers/video/fbdev/atafb_iplan2p8.c dst32 = (u32 *)dst; dst 172 drivers/video/fbdev/atafb_iplan2p8.c dst += next_line; dst 180 drivers/video/fbdev/atafb_iplan2p8.c dst = (u8 *)info->screen_base + (dy - 1) * next_line + ((dx + width + 8) & ~15) / (8 / BPL); dst 194 drivers/video/fbdev/atafb_iplan2p8.c dst32 = (u32 *)dst; dst 235 drivers/video/fbdev/atafb_iplan2p8.c dst -= next_line; dst 220 drivers/video/fbdev/atafb_utils.h static inline void fast_memmove(char *dst, const char *src, size_t size) dst 224 drivers/video/fbdev/atafb_utils.h if (dst < src) dst 232 drivers/video/fbdev/atafb_utils.h : "=a" (src), "=a" (dst), "=d" (size) dst 233 drivers/video/fbdev/atafb_utils.h : "0" (src), "1" (dst), "2" (size / 16 - 1) dst 243 drivers/video/fbdev/atafb_utils.h : "=a" (src), "=a" (dst), "=d" (size) dst 244 drivers/video/fbdev/atafb_utils.h : "0" (src + size), "1" (dst + size), "2" (size / 16 - 1) dst 280 drivers/video/fbdev/atafb_utils.h static inline void fill8_col(u8 *dst, u32 m[]) dst 283 drivers/video/fbdev/atafb_utils.h dst[0] = tmp; dst 284 drivers/video/fbdev/atafb_utils.h dst[2] = (tmp >>= 8); dst 286 drivers/video/fbdev/atafb_utils.h dst[4] = (tmp >>= 8); dst 287 drivers/video/fbdev/atafb_utils.h dst[6] = tmp >> 8; dst 291 drivers/video/fbdev/atafb_utils.h dst[8] = tmp; dst 292 drivers/video/fbdev/atafb_utils.h dst[10] = (tmp >>= 8); dst 293 drivers/video/fbdev/atafb_utils.h dst[12] = (tmp >>= 8); dst 294 drivers/video/fbdev/atafb_utils.h dst[14] = tmp >> 8; dst 301 drivers/video/fbdev/atafb_utils.h static inline void fill8_2col(u8 *dst, u8 fg, u8 bg, u32 mask) dst 312 drivers/video/fbdev/atafb_utils.h dst[0] = tmp; dst 313 drivers/video/fbdev/atafb_utils.h dst[2] = (tmp >>= 8); dst 315 drivers/video/fbdev/atafb_utils.h dst[4] = (tmp >>= 8); dst 316 drivers/video/fbdev/atafb_utils.h dst[6] = tmp >> 8; dst 320 drivers/video/fbdev/atafb_utils.h dst[8] = tmp; dst 321 drivers/video/fbdev/atafb_utils.h dst[10] = (tmp >>= 8); dst 322 drivers/video/fbdev/atafb_utils.h dst[12] = (tmp >>= 8); dst 323 drivers/video/fbdev/atafb_utils.h dst[14] = tmp >> 8; dst 359 drivers/video/fbdev/atafb_utils.h static inline u32 *fill16_col(u32 *dst, int rows, u32 m[]) dst 362 drivers/video/fbdev/atafb_utils.h *dst++ = m[0]; dst 364 drivers/video/fbdev/atafb_utils.h *dst++ = m[1]; dst 367 drivers/video/fbdev/atafb_utils.h *dst++ = m[2]; dst 368 drivers/video/fbdev/atafb_utils.h *dst++ = m[3]; dst 372 drivers/video/fbdev/atafb_utils.h return dst; dst 375 drivers/video/fbdev/atafb_utils.h static inline void memmove32_col(void *dst, void *src, u32 mask, u32 h, u32 bytes) dst 380 drivers/video/fbdev/atafb_utils.h d = dst; dst 1043 drivers/video/fbdev/aty/aty128fb.c u32 xres, yres, vxres, vyres, xoffset, yoffset, bpp, dst; dst 1088 drivers/video/fbdev/aty/aty128fb.c dst = depth_to_dst(depth); dst 1090 drivers/video/fbdev/aty/aty128fb.c if (dst == -EINVAL) { dst 1096 drivers/video/fbdev/aty/aty128fb.c bytpp = mode_bytpp[dst]; dst 1137 drivers/video/fbdev/aty/aty128fb.c crtc->gen_cntl = 0x3000000L | c_sync | (dst << 8); dst 147 drivers/video/fbdev/aty/mach64_cursor.c u8 __iomem *dst = (u8 __iomem *)info->sprite.addr; dst 156 drivers/video/fbdev/aty/mach64_cursor.c fb_memset(dst, 0xaa, 1024); dst 187 drivers/video/fbdev/aty/mach64_cursor.c fb_writeb(l & 0xff, dst++); dst 188 drivers/video/fbdev/aty/mach64_cursor.c fb_writeb(l >> 8, dst++); dst 190 drivers/video/fbdev/aty/mach64_cursor.c dst += offset; dst 1014 drivers/video/fbdev/broadsheetfb.c void *dst; dst 1038 drivers/video/fbdev/broadsheetfb.c dst = (void *)(info->screen_base + p); dst 1040 drivers/video/fbdev/broadsheetfb.c if (copy_from_user(dst, buf, count)) dst 13 drivers/video/fbdev/c2p.h extern void c2p_planar(void *dst, const void *src, u32 dx, u32 dy, u32 width, dst 17 drivers/video/fbdev/c2p.h extern void c2p_iplan2(void *dst, const void *src, u32 dx, u32 dy, u32 width, dst 48 drivers/video/fbdev/c2p_iplan2.c static inline void store_iplan2(void *dst, u32 bpp, u32 d[4]) dst 52 drivers/video/fbdev/c2p_iplan2.c for (i = 0; i < bpp/2; i++, dst += 4) dst 53 drivers/video/fbdev/c2p_iplan2.c put_unaligned_be32(d[perm_c2p_16x8[i]], dst); dst 61 drivers/video/fbdev/c2p_iplan2.c static inline void store_iplan2_masked(void *dst, u32 bpp, u32 d[4], u32 mask) dst 65 drivers/video/fbdev/c2p_iplan2.c for (i = 0; i < bpp/2; i++, dst += 4) dst 67 drivers/video/fbdev/c2p_iplan2.c get_unaligned_be32(dst), mask), dst 68 drivers/video/fbdev/c2p_iplan2.c dst); dst 85 drivers/video/fbdev/c2p_iplan2.c void c2p_iplan2(void *dst, const void *src, u32 dx, u32 dy, u32 width, dst 96 drivers/video/fbdev/c2p_iplan2.c dst += dy*dst_nextline+(dx & ~15)*bpp; dst 104 drivers/video/fbdev/c2p_iplan2.c p = dst; dst 148 drivers/video/fbdev/c2p_iplan2.c dst += dst_nextline; dst 48 drivers/video/fbdev/c2p_planar.c static inline void store_planar(void *dst, u32 dst_inc, u32 bpp, u32 d[8]) dst 52 drivers/video/fbdev/c2p_planar.c for (i = 0; i < bpp; i++, dst += dst_inc) dst 53 drivers/video/fbdev/c2p_planar.c put_unaligned_be32(d[perm_c2p_32x8[i]], dst); dst 61 drivers/video/fbdev/c2p_planar.c static inline void store_planar_masked(void *dst, u32 dst_inc, u32 bpp, dst 66 drivers/video/fbdev/c2p_planar.c for (i = 0; i < bpp; i++, dst += dst_inc) dst 68 drivers/video/fbdev/c2p_planar.c get_unaligned_be32(dst), mask), dst 69 drivers/video/fbdev/c2p_planar.c dst); dst 86 drivers/video/fbdev/c2p_planar.c void c2p_planar(void *dst, const void *src, u32 dx, u32 dy, u32 width, dst 98 drivers/video/fbdev/c2p_planar.c dst += dy*dst_nextline+(dx & ~31); dst 104 drivers/video/fbdev/c2p_planar.c p = dst; dst 151 drivers/video/fbdev/c2p_planar.c dst += dst_nextline; dst 174 drivers/video/fbdev/cobalt_lcdfb.c char dst[LCD_CHARS_MAX]; dst 188 drivers/video/fbdev/cobalt_lcdfb.c if (copy_from_user(dst, buf, count)) dst 202 drivers/video/fbdev/cobalt_lcdfb.c lcd_write_data(info, dst[len]); dst 25 drivers/video/fbdev/core/bitblit.c static void update_attr(u8 *dst, u8 *src, int attribute, dst 42 drivers/video/fbdev/core/bitblit.c dst[i] = c; dst 80 drivers/video/fbdev/core/bitblit.c struct fb_image *image, u8 *buf, u8 *dst) dst 96 drivers/video/fbdev/core/bitblit.c __fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 99 drivers/video/fbdev/core/bitblit.c fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 102 drivers/video/fbdev/core/bitblit.c dst += s_pitch; dst 113 drivers/video/fbdev/core/bitblit.c u8 *dst) dst 130 drivers/video/fbdev/core/bitblit.c fb_pad_unaligned_buffer(dst, d_pitch, src, idx, dst 134 drivers/video/fbdev/core/bitblit.c dst += (shift_low >= 8) ? s_pitch : s_pitch - 1; dst 155 drivers/video/fbdev/core/bitblit.c u8 *dst, *buf = NULL; dst 181 drivers/video/fbdev/core/bitblit.c dst = fb_get_buffer_offset(info, &info->pixmap, size); dst 182 drivers/video/fbdev/core/bitblit.c image.data = dst; dst 186 drivers/video/fbdev/core/bitblit.c width, cellsize, &image, buf, dst); dst 190 drivers/video/fbdev/core/bitblit.c buf, dst); dst 270 drivers/video/fbdev/core/bitblit.c u8 *dst; dst 272 drivers/video/fbdev/core/bitblit.c dst = kmalloc_array(w, vc->vc_font.height, GFP_ATOMIC); dst 273 drivers/video/fbdev/core/bitblit.c if (!dst) dst 276 drivers/video/fbdev/core/bitblit.c ops->cursor_data = dst; dst 277 drivers/video/fbdev/core/bitblit.c update_attr(dst, src, attribute, vc); dst 278 drivers/video/fbdev/core/bitblit.c src = dst; dst 46 drivers/video/fbdev/core/cfbcopyarea.c bitcpy(struct fb_info *p, unsigned long __iomem *dst, unsigned dst_idx, dst 58 drivers/video/fbdev/core/cfbcopyarea.c memmove((char *)dst + ((dst_idx & (bits - 1))) / 8, dst 73 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), first), dst); dst 79 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), first), dst); dst 80 drivers/video/fbdev/core/cfbcopyarea.c dst++; dst 88 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 89 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 90 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 91 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 92 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 93 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 94 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 95 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 99 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src++), dst++); dst 103 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), last), dst); dst 132 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), first), dst); dst 157 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), first), dst); dst 159 drivers/video/fbdev/core/cfbcopyarea.c dst++; dst 166 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 >> right | d1 << left, dst++); dst 169 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 >> right | d1 << left, dst++); dst 172 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 >> right | d1 << left, dst++); dst 175 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 >> right | d1 << left, dst++); dst 184 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0, dst++); dst 201 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), last), dst); dst 212 drivers/video/fbdev/core/cfbcopyarea.c bitcpy_rev(struct fb_info *p, unsigned long __iomem *dst, unsigned dst_idx, dst 224 drivers/video/fbdev/core/cfbcopyarea.c memmove((char *)dst + ((dst_idx & (bits - 1))) / 8, dst 229 drivers/video/fbdev/core/cfbcopyarea.c dst += (dst_idx + n - 1) / bits; dst 246 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), last), dst); dst 252 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), first), dst); dst 253 drivers/video/fbdev/core/cfbcopyarea.c dst--; dst 261 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 262 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 263 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 264 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 265 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 266 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 267 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 268 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 272 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(FB_READL(src--), dst--); dst 276 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL( comp( FB_READL(src), FB_READL(dst), last), dst); dst 304 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), last), dst); dst 328 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0, dst); dst 330 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), first), dst); dst 332 drivers/video/fbdev/core/cfbcopyarea.c dst--; dst 340 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 << left | d1 >> right, dst--); dst 343 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 << left | d1 >> right, dst--); dst 346 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 << left | d1 >> right, dst--); dst 349 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0 << left | d1 >> right, dst--); dst 358 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(d0, dst--); dst 375 drivers/video/fbdev/core/cfbcopyarea.c FB_WRITEL(comp(d0, FB_READL(dst), last), dst); dst 35 drivers/video/fbdev/core/cfbfillrect.c bitfill_aligned(struct fb_info *p, unsigned long __iomem *dst, int dst_idx, dst 50 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), first), dst); dst 56 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), first), dst); dst 57 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 64 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 65 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 66 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 67 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 68 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 69 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 70 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 71 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 75 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 79 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), last), dst); dst 92 drivers/video/fbdev/core/cfbfillrect.c bitfill_unaligned(struct fb_info *p, unsigned long __iomem *dst, int dst_idx, dst 107 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), first), dst); dst 112 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), first), dst); dst 113 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 121 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 123 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 125 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 127 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 132 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(pat, dst++); dst 138 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(pat, FB_READL(dst), last), dst); dst 146 drivers/video/fbdev/core/cfbfillrect.c bitfill_aligned_rev(struct fb_info *p, unsigned long __iomem *dst, dst 163 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 164 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ val, dat, first), dst); dst 169 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 170 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ val, dat, first), dst); dst 171 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 178 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 179 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 180 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 181 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 182 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 183 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 184 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 185 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 186 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 187 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 188 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 189 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 190 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 191 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 192 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 193 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 197 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ val, dst); dst 198 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 202 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 203 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ val, dat, last), dst); dst 217 drivers/video/fbdev/core/cfbfillrect.c bitfill_unaligned_rev(struct fb_info *p, unsigned long __iomem *dst, dst 233 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 234 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ pat, dat, first), dst); dst 240 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 241 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ pat, dat, first), dst); dst 242 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 250 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ pat, dst); dst 251 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 253 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ pat, dst); dst 254 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 256 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ pat, dst); dst 257 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 259 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ pat, dst); dst 260 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 265 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(FB_READL(dst) ^ pat, dst); dst 266 drivers/video/fbdev/core/cfbfillrect.c dst++; dst 272 drivers/video/fbdev/core/cfbfillrect.c dat = FB_READL(dst); dst 273 drivers/video/fbdev/core/cfbfillrect.c FB_WRITEL(comp(dat ^ pat, dat, last), dst); dst 284 drivers/video/fbdev/core/cfbfillrect.c unsigned long __iomem *dst; dst 298 drivers/video/fbdev/core/cfbfillrect.c dst = (unsigned long __iomem *)((unsigned long)p->screen_base & ~(bytes-1)); dst 308 drivers/video/fbdev/core/cfbfillrect.c unsigned long __iomem *dst, int dst_idx, dst 325 drivers/video/fbdev/core/cfbfillrect.c dst += dst_idx >> (ffs(bits) - 1); dst 327 drivers/video/fbdev/core/cfbfillrect.c fill_op32(p, dst, dst_idx, pat, width*bpp, bits, dst 333 drivers/video/fbdev/core/cfbfillrect.c void (*fill_op)(struct fb_info *p, unsigned long __iomem *dst, dst 355 drivers/video/fbdev/core/cfbfillrect.c dst += dst_idx / bits; dst 360 drivers/video/fbdev/core/cfbfillrect.c fill_op(p, dst, dst_idx, pat2, left, right, dst 81 drivers/video/fbdev/core/cfbimgblt.c u32 __iomem *dst, *dst2; dst 92 drivers/video/fbdev/core/cfbimgblt.c dst = (u32 __iomem *) dst1; dst 99 drivers/video/fbdev/core/cfbimgblt.c val = FB_READL(dst) & start_mask; dst 111 drivers/video/fbdev/core/cfbimgblt.c FB_WRITEL(val, dst++); dst 124 drivers/video/fbdev/core/cfbimgblt.c FB_WRITEL((FB_READL(dst) & end_mask) | val, dst); dst 144 drivers/video/fbdev/core/cfbimgblt.c u32 __iomem *dst, *dst2; dst 160 drivers/video/fbdev/core/cfbimgblt.c dst = (u32 __iomem *) dst1; dst 167 drivers/video/fbdev/core/cfbimgblt.c val = FB_READL(dst) & start_mask; dst 178 drivers/video/fbdev/core/cfbimgblt.c FB_WRITEL(val, dst++); dst 192 drivers/video/fbdev/core/cfbimgblt.c FB_WRITEL((FB_READL(dst) & end_mask) | val, dst); dst 223 drivers/video/fbdev/core/cfbimgblt.c u32 __iomem *dst; dst 252 drivers/video/fbdev/core/cfbimgblt.c dst = (u32 __iomem *) dst1, shift = 8; src = s; dst 257 drivers/video/fbdev/core/cfbimgblt.c FB_WRITEL((end_mask & eorx)^bgx, dst++); dst 60 drivers/video/fbdev/core/fb_sys_fops.c void *dst; dst 87 drivers/video/fbdev/core/fb_sys_fops.c dst = (void __force *) (info->screen_base + p); dst 92 drivers/video/fbdev/core/fb_sys_fops.c if (copy_from_user(dst, buf, count)) dst 25 drivers/video/fbdev/core/fbcon_ccw.c static void ccw_update_attr(u8 *dst, u8 *src, int attribute, dst 52 drivers/video/fbdev/core/fbcon_ccw.c *(dst - width) |= c; dst 57 drivers/video/fbdev/core/fbcon_ccw.c *dst++ = c; dst 101 drivers/video/fbdev/core/fbcon_ccw.c struct fb_image *image, u8 *buf, u8 *dst) dst 117 drivers/video/fbdev/core/fbcon_ccw.c __fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 120 drivers/video/fbdev/core/fbcon_ccw.c fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 123 drivers/video/fbdev/core/fbcon_ccw.c dst += d_pitch * vc->vc_font.width; dst 142 drivers/video/fbdev/core/fbcon_ccw.c u8 *dst, *buf = NULL; dst 174 drivers/video/fbdev/core/fbcon_ccw.c dst = fb_get_buffer_offset(info, &info->pixmap, size); dst 175 drivers/video/fbdev/core/fbcon_ccw.c image.data = dst; dst 177 drivers/video/fbdev/core/fbcon_ccw.c width, cellsize, &image, buf, dst); dst 259 drivers/video/fbdev/core/fbcon_ccw.c u8 *dst; dst 261 drivers/video/fbdev/core/fbcon_ccw.c dst = kmalloc_array(w, vc->vc_font.width, GFP_ATOMIC); dst 262 drivers/video/fbdev/core/fbcon_ccw.c if (!dst) dst 265 drivers/video/fbdev/core/fbcon_ccw.c ops->cursor_data = dst; dst 266 drivers/video/fbdev/core/fbcon_ccw.c ccw_update_attr(dst, src, attribute, vc); dst 267 drivers/video/fbdev/core/fbcon_ccw.c src = dst; dst 25 drivers/video/fbdev/core/fbcon_cw.c static void cw_update_attr(u8 *dst, u8 *src, int attribute, dst 42 drivers/video/fbdev/core/fbcon_cw.c *dst++ = c; dst 86 drivers/video/fbdev/core/fbcon_cw.c struct fb_image *image, u8 *buf, u8 *dst) dst 102 drivers/video/fbdev/core/fbcon_cw.c __fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 105 drivers/video/fbdev/core/fbcon_cw.c fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 108 drivers/video/fbdev/core/fbcon_cw.c dst += d_pitch * vc->vc_font.width; dst 127 drivers/video/fbdev/core/fbcon_cw.c u8 *dst, *buf = NULL; dst 157 drivers/video/fbdev/core/fbcon_cw.c dst = fb_get_buffer_offset(info, &info->pixmap, size); dst 158 drivers/video/fbdev/core/fbcon_cw.c image.data = dst; dst 160 drivers/video/fbdev/core/fbcon_cw.c width, cellsize, &image, buf, dst); dst 242 drivers/video/fbdev/core/fbcon_cw.c u8 *dst; dst 244 drivers/video/fbdev/core/fbcon_cw.c dst = kmalloc_array(w, vc->vc_font.width, GFP_ATOMIC); dst 245 drivers/video/fbdev/core/fbcon_cw.c if (!dst) dst 248 drivers/video/fbdev/core/fbcon_cw.c ops->cursor_data = dst; dst 249 drivers/video/fbdev/core/fbcon_cw.c cw_update_attr(dst, src, attribute, vc); dst 250 drivers/video/fbdev/core/fbcon_cw.c src = dst; dst 27 drivers/video/fbdev/core/fbcon_rotate.c u8 *dst; dst 49 drivers/video/fbdev/core/fbcon_rotate.c dst = kmalloc_array(len, d_cellsize, GFP_KERNEL); dst 51 drivers/video/fbdev/core/fbcon_rotate.c if (dst == NULL) { dst 58 drivers/video/fbdev/core/fbcon_rotate.c ops->fontbuffer = dst; dst 61 drivers/video/fbdev/core/fbcon_rotate.c dst = ops->fontbuffer; dst 62 drivers/video/fbdev/core/fbcon_rotate.c memset(dst, 0, ops->fd_size); dst 67 drivers/video/fbdev/core/fbcon_rotate.c rotate_ud(src, dst, vc->vc_font.width, dst 71 drivers/video/fbdev/core/fbcon_rotate.c dst += d_cellsize; dst 76 drivers/video/fbdev/core/fbcon_rotate.c rotate_cw(src, dst, vc->vc_font.width, dst 79 drivers/video/fbdev/core/fbcon_rotate.c dst += d_cellsize; dst 84 drivers/video/fbdev/core/fbcon_rotate.c rotate_ccw(src, dst, vc->vc_font.width, dst 87 drivers/video/fbdev/core/fbcon_rotate.c dst += d_cellsize; dst 25 drivers/video/fbdev/core/fbcon_ud.c static void ud_update_attr(u8 *dst, u8 *src, int attribute, dst 43 drivers/video/fbdev/core/fbcon_ud.c dst[i] = c; dst 88 drivers/video/fbdev/core/fbcon_ud.c struct fb_image *image, u8 *buf, u8 *dst) dst 104 drivers/video/fbdev/core/fbcon_ud.c __fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 107 drivers/video/fbdev/core/fbcon_ud.c fb_pad_aligned_buffer(dst, d_pitch, src, idx, dst 110 drivers/video/fbdev/core/fbcon_ud.c dst += s_pitch; dst 121 drivers/video/fbdev/core/fbcon_ud.c u8 *dst) dst 138 drivers/video/fbdev/core/fbcon_ud.c fb_pad_unaligned_buffer(dst, d_pitch, src, idx, dst 142 drivers/video/fbdev/core/fbcon_ud.c dst += (shift_low >= 8) ? s_pitch : s_pitch - 1; dst 164 drivers/video/fbdev/core/fbcon_ud.c u8 *dst, *buf = NULL; dst 197 drivers/video/fbdev/core/fbcon_ud.c dst = fb_get_buffer_offset(info, &info->pixmap, size); dst 198 drivers/video/fbdev/core/fbcon_ud.c image.data = dst; dst 202 drivers/video/fbdev/core/fbcon_ud.c width, cellsize, &image, buf, dst); dst 206 drivers/video/fbdev/core/fbcon_ud.c buf, dst); dst 290 drivers/video/fbdev/core/fbcon_ud.c u8 *dst; dst 292 drivers/video/fbdev/core/fbcon_ud.c dst = kmalloc_array(w, vc->vc_font.height, GFP_ATOMIC); dst 293 drivers/video/fbdev/core/fbcon_ud.c if (!dst) dst 296 drivers/video/fbdev/core/fbcon_ud.c ops->cursor_data = dst; dst 297 drivers/video/fbdev/core/fbcon_ud.c ud_update_attr(dst, src, attribute, vc); dst 298 drivers/video/fbdev/core/fbcon_ud.c src = dst; dst 113 drivers/video/fbdev/core/fbmem.c void fb_pad_aligned_buffer(u8 *dst, u32 d_pitch, u8 *src, u32 s_pitch, u32 height) dst 115 drivers/video/fbdev/core/fbmem.c __fb_pad_aligned_buffer(dst, d_pitch, src, s_pitch, height); dst 119 drivers/video/fbdev/core/fbmem.c void fb_pad_unaligned_buffer(u8 *dst, u32 d_pitch, u8 *src, u32 idx, u32 height, dst 127 drivers/video/fbdev/core/fbmem.c tmp = dst[j]; dst 130 drivers/video/fbdev/core/fbmem.c dst[j] = tmp; dst 132 drivers/video/fbdev/core/fbmem.c dst[j+1] = tmp; dst 135 drivers/video/fbdev/core/fbmem.c tmp = dst[idx]; dst 138 drivers/video/fbdev/core/fbmem.c dst[idx] = tmp; dst 141 drivers/video/fbdev/core/fbmem.c dst[idx+1] = tmp; dst 144 drivers/video/fbdev/core/fbmem.c dst += d_pitch; dst 273 drivers/video/fbdev/core/fbmem.c const struct linux_logo *logo, u8 *dst, dst 301 drivers/video/fbdev/core/fbmem.c *dst++ = *src >> 4; dst 304 drivers/video/fbdev/core/fbmem.c *dst++ = *src & 0x0f; dst 314 drivers/video/fbdev/core/fbmem.c *dst++ = ((d >> k) & 1) ? fg : 0; dst 385 drivers/video/fbdev/core/fbmem.c static void fb_rotate_logo(struct fb_info *info, u8 *dst, dst 391 drivers/video/fbdev/core/fbmem.c fb_rotate_logo_ud(image->data, dst, image->width, dst 396 drivers/video/fbdev/core/fbmem.c fb_rotate_logo_cw(image->data, dst, image->width, dst 405 drivers/video/fbdev/core/fbmem.c fb_rotate_logo_ccw(image->data, dst, image->width, dst 415 drivers/video/fbdev/core/fbmem.c image->data = dst; dst 762 drivers/video/fbdev/core/fbmem.c u8 *buffer, *dst; dst 802 drivers/video/fbdev/core/fbmem.c dst = buffer; dst 803 drivers/video/fbdev/core/fbmem.c fb_memcpy_fromfb(dst, src, c); dst 804 drivers/video/fbdev/core/fbmem.c dst += c; dst 828 drivers/video/fbdev/core/fbmem.c u8 __iomem *dst; dst 866 drivers/video/fbdev/core/fbmem.c dst = (u8 __iomem *) (info->screen_base + p); dst 880 drivers/video/fbdev/core/fbmem.c fb_memcpy_tofb(dst, src, c); dst 881 drivers/video/fbdev/core/fbmem.c dst += c; dst 29 drivers/video/fbdev/core/softcursor.c u8 *src, *dst; dst 55 drivers/video/fbdev/core/softcursor.c dst = fb_get_buffer_offset(info, &info->pixmap, size); dst 72 drivers/video/fbdev/core/softcursor.c fb_pad_aligned_buffer(dst, d_pitch, src, s_pitch, image->height); dst 73 drivers/video/fbdev/core/softcursor.c image->data = dst; dst 225 drivers/video/fbdev/core/svgalib.c u16 __iomem *src, *dst; dst 230 drivers/video/fbdev/core/svgalib.c dst = fb + area->dx * colstride + area->dy * rowstride; dst 234 drivers/video/fbdev/core/svgalib.c dst = fb + (area->dx + area->width - 1) * colstride dst 243 drivers/video/fbdev/core/svgalib.c u16 __iomem *dst2 = dst; dst 251 drivers/video/fbdev/core/svgalib.c dst += rowstride; dst 28 drivers/video/fbdev/core/syscopyarea.c bitcpy(struct fb_info *p, unsigned long *dst, unsigned dst_idx, dst 44 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, first); dst 49 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, first); dst 50 drivers/video/fbdev/core/syscopyarea.c dst++; dst 58 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 59 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 60 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 61 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 62 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 63 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 64 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 65 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 69 drivers/video/fbdev/core/syscopyarea.c *dst++ = *src++; dst 73 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, last); dst 89 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src << left, *dst, first); dst 92 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src >> right, *dst, first); dst 97 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0 >> right | d1 << left, *dst, dst 112 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0 << left, *dst, first); dst 113 drivers/video/fbdev/core/syscopyarea.c dst++; dst 118 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0 >> right | d1 << left, *dst, dst 121 drivers/video/fbdev/core/syscopyarea.c dst++; dst 130 drivers/video/fbdev/core/syscopyarea.c *dst++ = d0 >> right | d1 << left; dst 133 drivers/video/fbdev/core/syscopyarea.c *dst++ = d0 >> right | d1 << left; dst 136 drivers/video/fbdev/core/syscopyarea.c *dst++ = d0 >> right | d1 << left; dst 139 drivers/video/fbdev/core/syscopyarea.c *dst++ = d0 >> right | d1 << left; dst 145 drivers/video/fbdev/core/syscopyarea.c *dst++ = d0 >> right | d1 << left; dst 159 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0, *dst, last); dst 170 drivers/video/fbdev/core/syscopyarea.c bitcpy_rev(struct fb_info *p, unsigned long *dst, unsigned dst_idx, dst 177 drivers/video/fbdev/core/syscopyarea.c dst += (dst_idx + n - 1) / bits; dst 193 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, last); dst 199 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, first); dst 200 drivers/video/fbdev/core/syscopyarea.c dst--; dst 208 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 209 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 210 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 211 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 212 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 213 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 214 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 215 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 219 drivers/video/fbdev/core/syscopyarea.c *dst-- = *src--; dst 222 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src, *dst, last); dst 236 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src >> right, *dst, last); dst 239 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src << left, *dst, last); dst 242 drivers/video/fbdev/core/syscopyarea.c *dst = comp(*src << left | *(src-1) >> right, dst 243 drivers/video/fbdev/core/syscopyarea.c *dst, last); dst 268 drivers/video/fbdev/core/syscopyarea.c *dst = d0; dst 270 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0, *dst, first); dst 272 drivers/video/fbdev/core/syscopyarea.c dst--; dst 280 drivers/video/fbdev/core/syscopyarea.c *dst-- = d0 << left | d1 >> right; dst 283 drivers/video/fbdev/core/syscopyarea.c *dst-- = d0 << left | d1 >> right; dst 286 drivers/video/fbdev/core/syscopyarea.c *dst-- = d0 << left | d1 >> right; dst 289 drivers/video/fbdev/core/syscopyarea.c *dst-- = d0 << left | d1 >> right; dst 295 drivers/video/fbdev/core/syscopyarea.c *dst-- = d0 << left | d1 >> right; dst 309 drivers/video/fbdev/core/syscopyarea.c *dst = comp(d0, *dst, last); dst 25 drivers/video/fbdev/core/sysfillrect.c bitfill_aligned(struct fb_info *p, unsigned long *dst, int dst_idx, dst 40 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, first); dst 46 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, first); dst 47 drivers/video/fbdev/core/sysfillrect.c dst++; dst 54 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 55 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 56 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 57 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 58 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 59 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 60 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 61 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 65 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 68 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, last); dst 81 drivers/video/fbdev/core/sysfillrect.c bitfill_unaligned(struct fb_info *p, unsigned long *dst, int dst_idx, dst 96 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, first); dst 101 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, first); dst 102 drivers/video/fbdev/core/sysfillrect.c dst++; dst 110 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 112 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 114 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 116 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 121 drivers/video/fbdev/core/sysfillrect.c *dst++ = pat; dst 127 drivers/video/fbdev/core/sysfillrect.c *dst = comp(pat, *dst, last); dst 135 drivers/video/fbdev/core/sysfillrect.c bitfill_aligned_rev(struct fb_info *p, unsigned long *dst, int dst_idx, dst 151 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ val, *dst, first); dst 156 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ val, *dst, first); dst 157 drivers/video/fbdev/core/sysfillrect.c dst++; dst 164 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 165 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 166 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 167 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 168 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 169 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 170 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 171 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 175 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= val; dst 178 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ val, *dst, last); dst 191 drivers/video/fbdev/core/sysfillrect.c bitfill_unaligned_rev(struct fb_info *p, unsigned long *dst, int dst_idx, dst 207 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ pat, *dst, first); dst 213 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ pat, *dst, first); dst 214 drivers/video/fbdev/core/sysfillrect.c dst++; dst 222 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= pat; dst 224 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= pat; dst 226 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= pat; dst 228 drivers/video/fbdev/core/sysfillrect.c *dst++ ^= pat; dst 233 drivers/video/fbdev/core/sysfillrect.c *dst ^= pat; dst 239 drivers/video/fbdev/core/sysfillrect.c *dst = comp(*dst ^ pat, *dst, last); dst 249 drivers/video/fbdev/core/sysfillrect.c unsigned long *dst; dst 263 drivers/video/fbdev/core/sysfillrect.c dst = (unsigned long *)((unsigned long)p->screen_base & ~(bytes-1)); dst 271 drivers/video/fbdev/core/sysfillrect.c void (*fill_op32)(struct fb_info *p, unsigned long *dst, dst 289 drivers/video/fbdev/core/sysfillrect.c dst += dst_idx >> (ffs(bits) - 1); dst 291 drivers/video/fbdev/core/sysfillrect.c fill_op32(p, dst, dst_idx, pat, width*bpp, bits); dst 296 drivers/video/fbdev/core/sysfillrect.c void (*fill_op)(struct fb_info *p, unsigned long *dst, dst 319 drivers/video/fbdev/core/sysfillrect.c dst += dst_idx / bits; dst 324 drivers/video/fbdev/core/sysfillrect.c fill_op(p, dst, dst_idx, pat2, left, right, dst 56 drivers/video/fbdev/core/sysimgblt.c u32 *dst, *dst2; dst 66 drivers/video/fbdev/core/sysimgblt.c dst = dst1; dst 73 drivers/video/fbdev/core/sysimgblt.c val = *dst & start_mask; dst 85 drivers/video/fbdev/core/sysimgblt.c *dst++ = val; dst 97 drivers/video/fbdev/core/sysimgblt.c *dst &= end_mask; dst 98 drivers/video/fbdev/core/sysimgblt.c *dst |= val; dst 116 drivers/video/fbdev/core/sysimgblt.c u32 *dst, *dst2; dst 131 drivers/video/fbdev/core/sysimgblt.c dst = dst1; dst 138 drivers/video/fbdev/core/sysimgblt.c val = *dst & start_mask; dst 149 drivers/video/fbdev/core/sysimgblt.c *dst++ = val; dst 162 drivers/video/fbdev/core/sysimgblt.c *dst &= end_mask; dst 163 drivers/video/fbdev/core/sysimgblt.c *dst |= val; dst 193 drivers/video/fbdev/core/sysimgblt.c u32 *dst; dst 222 drivers/video/fbdev/core/sysimgblt.c dst = dst1; dst 229 drivers/video/fbdev/core/sysimgblt.c *dst++ = (end_mask & eorx) ^ bgx; dst 159 drivers/video/fbdev/cyber2000fb.c unsigned long dst, col; dst 175 drivers/video/fbdev/cyber2000fb.c dst = rect->dx + rect->dy * cfb->fb.var.xres_virtual; dst 177 drivers/video/fbdev/cyber2000fb.c cyber2000fb_writeb(dst, CO_REG_X_PHASE, cfb); dst 178 drivers/video/fbdev/cyber2000fb.c dst *= 3; dst 181 drivers/video/fbdev/cyber2000fb.c cyber2000fb_writel(dst, CO_REG_DEST_PTR, cfb); dst 192 drivers/video/fbdev/cyber2000fb.c unsigned long src, dst; dst 204 drivers/video/fbdev/cyber2000fb.c dst = region->dx + region->dy * cfb->fb.var.xres_virtual; dst 208 drivers/video/fbdev/cyber2000fb.c dst += region->width - 1; dst 214 drivers/video/fbdev/cyber2000fb.c dst += (region->height - 1) * cfb->fb.var.xres_virtual; dst 219 drivers/video/fbdev/cyber2000fb.c cyber2000fb_writeb(dst, CO_REG_X_PHASE, cfb); dst 221 drivers/video/fbdev/cyber2000fb.c dst *= 3; dst 224 drivers/video/fbdev/cyber2000fb.c cyber2000fb_writel(dst, CO_REG_DEST_PTR, cfb); dst 101 drivers/video/fbdev/efifb.c static void efifb_copy_bmp(u8 *src, u32 *dst, int width, struct screen_info *si) dst 109 drivers/video/fbdev/efifb.c *dst++ = (r << si->red_pos) | dst 147 drivers/video/fbdev/efifb.c u8 *dst = info->screen_base; dst 211 drivers/video/fbdev/efifb.c for (y = 0; y < si->lfb_height; y++, dst += si->lfb_linelength) { dst 215 drivers/video/fbdev/efifb.c memset(dst, 0, 4 * si->lfb_width); dst 224 drivers/video/fbdev/efifb.c memset(dst, 0, bgrt_tab.image_offset_x * 4); dst 228 drivers/video/fbdev/efifb.c (u32 *)dst + dst_x, bmp_width, si); dst 230 drivers/video/fbdev/efifb.c memset((u32 *)dst + dst_x, 0, (si->lfb_width - dst_x) * 4); dst 163 drivers/video/fbdev/hecubafb.c void *dst; dst 187 drivers/video/fbdev/hecubafb.c dst = (void __force *) (info->screen_base + p); dst 189 drivers/video/fbdev/hecubafb.c if (copy_from_user(dst, buf, count)) dst 370 drivers/video/fbdev/i810/i810_accel.c u32 fg = 0, bg = 0, size, dst; dst 390 drivers/video/fbdev/i810/i810_accel.c dst = info->fix.smem_start + (image->dy * info->fix.line_length) + dst 401 drivers/video/fbdev/i810/i810_accel.c PAT_COPY_ROP, dst, (u32 *) image->data, dst 1218 drivers/video/fbdev/imsttfb.c u8 *dst = (u8 *) cursor->dest; dst 1227 drivers/video/fbdev/imsttfb.c dst[s_idx]]; dst 119 drivers/video/fbdev/leo.c u32 dst; dst 145 drivers/video/fbdev/leo.c u32 dst; /* Copy/Scroll/Fill (SS0 only) */ dst 140 drivers/video/fbdev/matrox/matroxfb_maven.c char dst; dst 151 drivers/video/fbdev/matrox/matroxfb_maven.c .len = sizeof(dst), dst 152 drivers/video/fbdev/matrox/matroxfb_maven.c .buf = &dst dst 160 drivers/video/fbdev/matrox/matroxfb_maven.c return dst & 0xFF; dst 398 drivers/video/fbdev/matrox/matroxfb_misc.c unsigned char* dst = bd->pins; dst 403 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = 0x2E; dst 404 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = 0x41; dst 405 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = pins_len; dst 408 drivers/video/fbdev/matrox/matroxfb_misc.c cksum += *dst++ = readb(pins+i); dst 416 drivers/video/fbdev/matrox/matroxfb_misc.c unsigned char* dst = bd->pins; dst 418 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = 0x40; dst 419 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = 0; dst 421 drivers/video/fbdev/matrox/matroxfb_misc.c *dst++ = readb(pins+i); dst 524 drivers/video/fbdev/metronomefb.c void *dst; dst 548 drivers/video/fbdev/metronomefb.c dst = (void __force *)(info->screen_base + p); dst 550 drivers/video/fbdev/metronomefb.c if (copy_from_user(dst, buf, count)) dst 1364 drivers/video/fbdev/neofb.c u_long dst, rop; dst 1366 drivers/video/fbdev/neofb.c dst = rect->dx + rect->dy * info->var.xres_virtual; dst 1389 drivers/video/fbdev/neofb.c writel(dst * ((info->var.bits_per_pixel + 7) >> 3), dst 1400 drivers/video/fbdev/neofb.c u_long src, dst, bltCntl; dst 1415 drivers/video/fbdev/neofb.c dst = dx * (info->var.bits_per_pixel >> 3) + dy*info->fix.line_length; dst 1423 drivers/video/fbdev/neofb.c writel(dst, &par->neo2200->dstStart); dst 34 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c struct omap_dss_device *dst) dst 49 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c dst->src = dssdev; dst 50 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c dssdev->dst = dst; dst 56 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c struct omap_dss_device *dst) dst 67 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c WARN_ON(dst != dssdev->dst); dst 68 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c if (dst != dssdev->dst) dst 71 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c dst->src = NULL; dst 72 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c dssdev->dst = NULL; dst 248 drivers/video/fbdev/omap2/omapfb/displays/encoder-opa362.c opa362_disconnect(dssdev, dssdev->dst); dst 30 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c struct omap_dss_device *dst) dst 43 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c dst->src = dssdev; dst 44 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c dssdev->dst = dst; dst 50 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c struct omap_dss_device *dst) dst 59 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c WARN_ON(dst != dssdev->dst); dst 60 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c if (dst != dssdev->dst) dst 63 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c dst->src = NULL; dst 64 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c dssdev->dst = NULL; dst 257 drivers/video/fbdev/omap2/omapfb/displays/encoder-tfp410.c tfp410_disconnect(dssdev, dssdev->dst); dst 33 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c struct omap_dss_device *dst) dst 43 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c dst->src = dssdev; dst 44 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c dssdev->dst = dst; dst 56 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c struct omap_dss_device *dst) dst 61 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c WARN_ON(dst != dssdev->dst); dst 63 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c if (dst != dssdev->dst) dst 68 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c dst->src = NULL; dst 69 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c dssdev->dst = NULL; dst 300 drivers/video/fbdev/omap2/omapfb/displays/encoder-tpd12s015.c tpd_disconnect(dssdev, dssdev->dst); dst 423 drivers/video/fbdev/omap2/omapfb/dss/apply.c while (dssdev->dst) dst 424 drivers/video/fbdev/omap2/omapfb/dss/apply.c dssdev = dssdev->dst; dst 798 drivers/video/fbdev/omap2/omapfb/dss/apply.c struct omap_dss_device *dst) dst 800 drivers/video/fbdev/omap2/omapfb/dss/apply.c return mgr->set_output(mgr, dst); dst 804 drivers/video/fbdev/omap2/omapfb/dss/apply.c struct omap_dss_device *dst) dst 649 drivers/video/fbdev/omap2/omapfb/dss/dpi.c struct omap_dss_device *dst) dst 669 drivers/video/fbdev/omap2/omapfb/dss/dpi.c r = omapdss_output_set_device(dssdev, dst); dst 672 drivers/video/fbdev/omap2/omapfb/dss/dpi.c dst->name); dst 681 drivers/video/fbdev/omap2/omapfb/dss/dpi.c struct omap_dss_device *dst) dst 683 drivers/video/fbdev/omap2/omapfb/dss/dpi.c WARN_ON(dst != dssdev->dst); dst 685 drivers/video/fbdev/omap2/omapfb/dss/dpi.c if (dst != dssdev->dst) dst 4962 drivers/video/fbdev/omap2/omapfb/dss/dsi.c struct omap_dss_device *dst) dst 4980 drivers/video/fbdev/omap2/omapfb/dss/dsi.c r = omapdss_output_set_device(dssdev, dst); dst 4992 drivers/video/fbdev/omap2/omapfb/dss/dsi.c struct omap_dss_device *dst) dst 4994 drivers/video/fbdev/omap2/omapfb/dss/dsi.c WARN_ON(dst != dssdev->dst); dst 4996 drivers/video/fbdev/omap2/omapfb/dss/dsi.c if (dst != dssdev->dst) dst 506 drivers/video/fbdev/omap2/omapfb/dss/dss.h struct omap_dss_device *dst); dst 508 drivers/video/fbdev/omap2/omapfb/dss/dss.h struct omap_dss_device *dst); dst 527 drivers/video/fbdev/omap2/omapfb/dss/dss.h struct omap_dss_device *dst); dst 529 drivers/video/fbdev/omap2/omapfb/dss/dss.h struct omap_dss_device *dst); dst 413 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c struct omap_dss_device *dst) dst 430 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c r = omapdss_output_set_device(dssdev, dst); dst 433 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c dst->name); dst 442 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c struct omap_dss_device *dst) dst 444 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c WARN_ON(dst != dssdev->dst); dst 446 drivers/video/fbdev/omap2/omapfb/dss/hdmi4.c if (dst != dssdev->dst) dst 443 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c struct omap_dss_device *dst) dst 460 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c r = omapdss_output_set_device(dssdev, dst); dst 463 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c dst->name); dst 472 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c struct omap_dss_device *dst) dst 474 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c WARN_ON(dst != dssdev->dst); dst 476 drivers/video/fbdev/omap2/omapfb/dss/hdmi5.c if (dst != dssdev->dst) dst 60 drivers/video/fbdev/omap2/omapfb/dss/omapdss-boot-init.c static void __init omapdss_prefix_strcpy(char *dst, int dst_len, dst 68 drivers/video/fbdev/omap2/omapfb/dss/omapdss-boot-init.c strcpy(dst, prefix); dst 69 drivers/video/fbdev/omap2/omapfb/dss/omapdss-boot-init.c dst += strlen(prefix); dst 71 drivers/video/fbdev/omap2/omapfb/dss/omapdss-boot-init.c strcpy(dst, src); dst 72 drivers/video/fbdev/omap2/omapfb/dss/omapdss-boot-init.c dst += l; dst 27 drivers/video/fbdev/omap2/omapfb/dss/output.c if (out->dst) { dst 29 drivers/video/fbdev/omap2/omapfb/dss/output.c out->dst->name); dst 40 drivers/video/fbdev/omap2/omapfb/dss/output.c out->dst = dssdev; dst 59 drivers/video/fbdev/omap2/omapfb/dss/output.c if (!out->dst) { dst 65 drivers/video/fbdev/omap2/omapfb/dss/output.c if (out->dst->state != OMAP_DSS_DISPLAY_DISABLED) { dst 67 drivers/video/fbdev/omap2/omapfb/dss/output.c out->dst->name); dst 72 drivers/video/fbdev/omap2/omapfb/dss/output.c out->dst->src = NULL; dst 73 drivers/video/fbdev/omap2/omapfb/dss/output.c out->dst = NULL; dst 199 drivers/video/fbdev/omap2/omapfb/dss/output.c struct omap_dss_device *dst) dst 201 drivers/video/fbdev/omap2/omapfb/dss/output.c return dss_mgr_ops->connect(mgr, dst); dst 206 drivers/video/fbdev/omap2/omapfb/dss/output.c struct omap_dss_device *dst) dst 208 drivers/video/fbdev/omap2/omapfb/dss/output.c dss_mgr_ops->disconnect(mgr, dst); dst 270 drivers/video/fbdev/omap2/omapfb/dss/sdi.c struct omap_dss_device *dst) dst 287 drivers/video/fbdev/omap2/omapfb/dss/sdi.c r = omapdss_output_set_device(dssdev, dst); dst 290 drivers/video/fbdev/omap2/omapfb/dss/sdi.c dst->name); dst 299 drivers/video/fbdev/omap2/omapfb/dss/sdi.c struct omap_dss_device *dst) dst 301 drivers/video/fbdev/omap2/omapfb/dss/sdi.c WARN_ON(dst != dssdev->dst); dst 303 drivers/video/fbdev/omap2/omapfb/dss/sdi.c if (dst != dssdev->dst) dst 720 drivers/video/fbdev/omap2/omapfb/dss/venc.c struct omap_dss_device *dst) dst 737 drivers/video/fbdev/omap2/omapfb/dss/venc.c r = omapdss_output_set_device(dssdev, dst); dst 740 drivers/video/fbdev/omap2/omapfb/dss/venc.c dst->name); dst 749 drivers/video/fbdev/omap2/omapfb/dss/venc.c struct omap_dss_device *dst) dst 751 drivers/video/fbdev/omap2/omapfb/dss/venc.c WARN_ON(dst != dssdev->dst); dst 753 drivers/video/fbdev/omap2/omapfb/dss/venc.c if (dst != dssdev->dst) dst 608 drivers/video/fbdev/ps3fb.c u64 dst; dst 659 drivers/video/fbdev/ps3fb.c for (dst = 0; lines; dst += maxlines * ddr_line_length) { dst 661 drivers/video/fbdev/ps3fb.c ps3fb_sync_image(info->device, 0, dst, 0, vmode->xres, l, dst 644 drivers/video/fbdev/pvr2fb.c unsigned long dst, start, end, len; dst 664 drivers/video/fbdev/pvr2fb.c dst = (unsigned long)fb_info->screen_base + *ppos; dst 678 drivers/video/fbdev/pvr2fb.c dma_write(pvr2dma, 0, dst, len); dst 685 drivers/video/fbdev/pvr2fb.c for (i = 0; i < nr_pages; i++, dst += PAGE_SIZE) { dst 692 drivers/video/fbdev/pvr2fb.c dma_write_page(pvr2dma, 0, dst); dst 435 drivers/video/fbdev/s1d13xxxfb.c u32 dst, src; dst 451 drivers/video/fbdev/s1d13xxxfb.c dst = (((dy + height - 1) * stride) + (bpp * (dx + width - 1))); dst 456 drivers/video/fbdev/s1d13xxxfb.c dst = (dy * stride) + (bpp * dx); dst 466 drivers/video/fbdev/s1d13xxxfb.c s1d13xxxfb_writereg(info->par, S1DREG_BBLT_DST_START0, (dst & 0xff)); dst 467 drivers/video/fbdev/s1d13xxxfb.c s1d13xxxfb_writereg(info->par, S1DREG_BBLT_DST_START1, (dst >> 8) & 0x00ff); dst 468 drivers/video/fbdev/s1d13xxxfb.c s1d13xxxfb_writereg(info->par, S1DREG_BBLT_DST_START2, (dst >> 16) & 0x00ff); dst 353 drivers/video/fbdev/s3fb.c u32 __iomem *dst; dst 363 drivers/video/fbdev/s3fb.c dst = (u32 __iomem *) dst1; dst 367 drivers/video/fbdev/s3fb.c fb_writel(val, dst++); dst 380 drivers/video/fbdev/s3fb.c u32 __iomem *dst; dst 387 drivers/video/fbdev/s3fb.c dst = (u32 __iomem *) dst1; dst 389 drivers/video/fbdev/s3fb.c fb_writel(fg, dst++); dst 410 drivers/video/fbdev/s3fb.c u32 __iomem *dst; dst 420 drivers/video/fbdev/s3fb.c dst = (u32 __iomem *) dst1; dst 424 drivers/video/fbdev/s3fb.c fb_writel(val, dst++); dst 1118 drivers/video/fbdev/sm501fb.c void __iomem *dst = par->cursor.k_addr; dst 1127 drivers/video/fbdev/sm501fb.c smc501_writel(0x0, dst + op); dst 1143 drivers/video/fbdev/sm501fb.c op |= readb(dst + (x / 4)); dst 1144 drivers/video/fbdev/sm501fb.c writeb(op, dst + (x / 4)); dst 1147 drivers/video/fbdev/sm501fb.c dst += (64*2)/8; dst 1025 drivers/video/fbdev/sm712fb.c u32 *buffer, *dst; dst 1061 drivers/video/fbdev/sm712fb.c dst = buffer; dst 1063 drivers/video/fbdev/sm712fb.c *dst = fb_readl(src++); dst 1064 drivers/video/fbdev/sm712fb.c *dst = big_swap(*dst); dst 1065 drivers/video/fbdev/sm712fb.c dst++; dst 1068 drivers/video/fbdev/sm712fb.c u8 *dst8 = (u8 *)dst; dst 1103 drivers/video/fbdev/sm712fb.c u32 __iomem *dst; dst 1137 drivers/video/fbdev/sm712fb.c dst = (u32 __iomem *)(info->screen_base + p); dst 1152 drivers/video/fbdev/sm712fb.c fb_writel(big_swap(*src), dst++); dst 1157 drivers/video/fbdev/sm712fb.c u8 __iomem *dst8 = (u8 __iomem *)dst; dst 1167 drivers/video/fbdev/sm712fb.c dst = (u32 __iomem *)dst8; dst 227 drivers/video/fbdev/ssd1307fb.c void *dst; dst 240 drivers/video/fbdev/ssd1307fb.c dst = info->screen_buffer + p; dst 242 drivers/video/fbdev/ssd1307fb.c if (copy_from_user(dst, buf, count)) dst 1102 drivers/video/fbdev/tgafb.c void __iomem *dst; dst 1114 drivers/video/fbdev/tgafb.c dst = tga_fb + (dy + height) * width * 4; dst 1118 drivers/video/fbdev/tgafb.c dst -= 64; dst 1121 drivers/video/fbdev/tgafb.c __raw_writel(0xffff, dst); dst 1126 drivers/video/fbdev/tgafb.c dst = tga_fb + dy * width * 4; dst 1131 drivers/video/fbdev/tgafb.c __raw_writel(0xffff, dst); dst 1134 drivers/video/fbdev/tgafb.c dst += 64; dst 884 drivers/video/fbdev/vga16fb.c char __iomem *dst; dst 909 drivers/video/fbdev/vga16fb.c dst = info->screen_base + (rect->dx/8) + rect->dy * info->fix.line_length; dst 923 drivers/video/fbdev/vga16fb.c writeb(0, dst); dst 924 drivers/video/fbdev/vga16fb.c dst++; dst 926 drivers/video/fbdev/vga16fb.c dst += line_ofs; dst 939 drivers/video/fbdev/vga16fb.c rmw(dst); dst 940 drivers/video/fbdev/vga16fb.c dst++; dst 942 drivers/video/fbdev/vga16fb.c dst += line_ofs; dst 1016 drivers/video/fbdev/vga16fb.c char __iomem *dst = NULL; dst 1065 drivers/video/fbdev/vga16fb.c dst = info->screen_base + (dx/8) + dy * info->fix.line_length; dst 1070 drivers/video/fbdev/vga16fb.c writeb(0, dst); dst 1071 drivers/video/fbdev/vga16fb.c dst++; dst 1075 drivers/video/fbdev/vga16fb.c dst += line_ofs; dst 1078 drivers/video/fbdev/vga16fb.c dst = info->screen_base + (dx/8) + width + dst 1084 drivers/video/fbdev/vga16fb.c dst--; dst 1087 drivers/video/fbdev/vga16fb.c writeb(0, dst); dst 1090 drivers/video/fbdev/vga16fb.c dst -= line_ofs; dst 1154 drivers/video/fbdev/vga16fb.c char __iomem *dst; dst 1174 drivers/video/fbdev/vga16fb.c dst = where; dst 1176 drivers/video/fbdev/vga16fb.c writeb(*cdat++, dst++); dst 1189 drivers/video/fbdev/vga16fb.c dst = where; dst 1191 drivers/video/fbdev/vga16fb.c rmw(dst); dst 1196 drivers/video/fbdev/vga16fb.c rmw(dst++); dst 1222 drivers/video/fbdev/vga16fb.c char __iomem *dst; dst 1235 drivers/video/fbdev/vga16fb.c dst = where + x/8; dst 1240 drivers/video/fbdev/vga16fb.c fb_readb(dst); dst 1241 drivers/video/fbdev/vga16fb.c fb_writeb(0, dst); dst 147 drivers/video/fbdev/vt8623fb.c u32 __iomem *dst; dst 157 drivers/video/fbdev/vt8623fb.c dst = (u32 __iomem *) dst1; dst 161 drivers/video/fbdev/vt8623fb.c fb_writel(val, dst++); dst 173 drivers/video/fbdev/vt8623fb.c u32 __iomem *dst; dst 180 drivers/video/fbdev/vt8623fb.c dst = (u32 __iomem *) dst1; dst 182 drivers/video/fbdev/vt8623fb.c fb_writel(fg, dst++); dst 203 drivers/video/fbdev/vt8623fb.c u32 __iomem *dst; dst 213 drivers/video/fbdev/vt8623fb.c dst = (u32 __iomem *) dst1; dst 217 drivers/video/fbdev/vt8623fb.c fb_writel(val, dst++); dst 170 drivers/xen/pvcalls-front.c uint8_t *src, *dst; dst 203 drivers/xen/pvcalls-front.c dst = (uint8_t *)&bedata->rsp[req_id] + dst 206 drivers/xen/pvcalls-front.c memcpy(dst, src, sizeof(*rsp) - sizeof(rsp->req_id)); dst 158 drivers/xen/xen-acpi-processor.c struct xen_processor_performance *dst) dst 168 drivers/xen/xen-acpi-processor.c dst->shared_type = _pr->performance->shared_type; dst 177 drivers/xen/xen-acpi-processor.c dst->shared_type = CPUFREQ_SHARED_TYPE_ALL; dst 179 drivers/xen/xen-acpi-processor.c dst->shared_type = CPUFREQ_SHARED_TYPE_HW; dst 181 drivers/xen/xen-acpi-processor.c dst->shared_type = CPUFREQ_SHARED_TYPE_ANY; dst 184 drivers/xen/xen-acpi-processor.c memcpy(&(dst->domain_info), pdomain, sizeof(struct acpi_psd_package)); dst 121 drivers/xen/xenbus/xenbus_comms.c void *dst; dst 137 drivers/xen/xenbus/xenbus_comms.c dst = get_output_chunk(cons, prod, intf->req, &avail); dst 143 drivers/xen/xenbus/xenbus_comms.c memcpy(dst, data, avail); dst 192 fs/binfmt_flat.c static int decompress_exec(struct linux_binprm *bprm, loff_t fpos, char *dst, dst 199 fs/binfmt_flat.c pr_debug("decompress_exec(offset=%llx,buf=%p,len=%lx)\n", fpos, dst, len); dst 273 fs/binfmt_flat.c strm.next_out = dst; dst 67 fs/btrfs/block-rsv.c struct btrfs_block_rsv *dst, u64 num_bytes, dst 76 fs/btrfs/block-rsv.c btrfs_block_rsv_add_bytes(dst, num_bytes, update_size); dst 298 fs/btrfs/check-integrity.c void *dst, u32 offset, size_t len); dst 1206 fs/btrfs/check-integrity.c char *dst = (char *)dstv; dst 1217 fs/btrfs/check-integrity.c memcpy(dst, kaddr + pgoff, cur); dst 1219 fs/btrfs/check-integrity.c dst += cur; dst 24 fs/btrfs/ctree.c struct extent_buffer *dst, dst 745 fs/btrfs/ctree.c static noinline int tree_mod_log_eb_copy(struct extent_buffer *dst, dst 749 fs/btrfs/ctree.c struct btrfs_fs_info *fs_info = dst->fs_info; dst 759 fs/btrfs/ctree.c if (btrfs_header_level(dst) == 0 && btrfs_header_level(src) == 0) dst 777 fs/btrfs/ctree.c tm_list_add[i] = alloc_tree_mod_elem(dst, i + dst_offset, dst 3247 fs/btrfs/ctree.c struct extent_buffer *dst, dst 3257 fs/btrfs/ctree.c dst_nritems = btrfs_header_nritems(dst); dst 3260 fs/btrfs/ctree.c WARN_ON(btrfs_header_generation(dst) != trans->transid); dst 3283 fs/btrfs/ctree.c ret = tree_mod_log_eb_copy(dst, src, dst_nritems, 0, push_items); dst 3288 fs/btrfs/ctree.c copy_extent_buffer(dst, src, dst 3304 fs/btrfs/ctree.c btrfs_set_header_nritems(dst, dst_nritems + push_items); dst 3306 fs/btrfs/ctree.c btrfs_mark_buffer_dirty(dst); dst 3321 fs/btrfs/ctree.c struct extent_buffer *dst, dst 3332 fs/btrfs/ctree.c WARN_ON(btrfs_header_generation(dst) != trans->transid); dst 3335 fs/btrfs/ctree.c dst_nritems = btrfs_header_nritems(dst); dst 3351 fs/btrfs/ctree.c ret = tree_mod_log_insert_move(dst, push_items, 0, dst_nritems); dst 3353 fs/btrfs/ctree.c memmove_extent_buffer(dst, btrfs_node_key_ptr_offset(push_items), dst 3358 fs/btrfs/ctree.c ret = tree_mod_log_eb_copy(dst, src, 0, src_nritems - push_items, dst 3364 fs/btrfs/ctree.c copy_extent_buffer(dst, src, dst 3370 fs/btrfs/ctree.c btrfs_set_header_nritems(dst, dst_nritems + push_items); dst 3373 fs/btrfs/ctree.c btrfs_mark_buffer_dirty(dst); dst 2788 fs/btrfs/ctree.h u8 *dst); dst 5562 fs/btrfs/extent_io.c char *dst = (char *)dstv; dst 5569 fs/btrfs/extent_io.c memset(dst, 0, len); dst 5580 fs/btrfs/extent_io.c memcpy(dst, kaddr + offset, cur); dst 5582 fs/btrfs/extent_io.c dst += cur; dst 5597 fs/btrfs/extent_io.c char __user *dst = (char __user *)dstv; dst 5612 fs/btrfs/extent_io.c if (copy_to_user(dst, kaddr + offset, cur)) { dst 5617 fs/btrfs/extent_io.c dst += cur; dst 5784 fs/btrfs/extent_io.c void copy_extent_buffer_full(struct extent_buffer *dst, dst 5790 fs/btrfs/extent_io.c ASSERT(dst->len == src->len); dst 5792 fs/btrfs/extent_io.c num_pages = num_extent_pages(dst); dst 5794 fs/btrfs/extent_io.c copy_page(page_address(dst->pages[i]), dst 5798 fs/btrfs/extent_io.c void copy_extent_buffer(struct extent_buffer *dst, struct extent_buffer *src, dst 5802 fs/btrfs/extent_io.c u64 dst_len = dst->len; dst 5807 fs/btrfs/extent_io.c size_t start_offset = offset_in_page(dst->start); dst 5815 fs/btrfs/extent_io.c page = dst->pages[i]; dst 5967 fs/btrfs/extent_io.c static inline bool areas_overlap(unsigned long src, unsigned long dst, unsigned long len) dst 5969 fs/btrfs/extent_io.c unsigned long distance = (src > dst) ? src - dst : dst - src; dst 5995 fs/btrfs/extent_io.c void memcpy_extent_buffer(struct extent_buffer *dst, unsigned long dst_offset, dst 5998 fs/btrfs/extent_io.c struct btrfs_fs_info *fs_info = dst->fs_info; dst 6002 fs/btrfs/extent_io.c size_t start_offset = offset_in_page(dst->start); dst 6006 fs/btrfs/extent_io.c if (src_offset + len > dst->len) { dst 6009 fs/btrfs/extent_io.c src_offset, len, dst->len); dst 6012 fs/btrfs/extent_io.c if (dst_offset + len > dst->len) { dst 6015 fs/btrfs/extent_io.c dst_offset, len, dst->len); dst 6031 fs/btrfs/extent_io.c copy_pages(dst->pages[dst_i], dst->pages[src_i], dst 6040 fs/btrfs/extent_io.c void memmove_extent_buffer(struct extent_buffer *dst, unsigned long dst_offset, dst 6043 fs/btrfs/extent_io.c struct btrfs_fs_info *fs_info = dst->fs_info; dst 6049 fs/btrfs/extent_io.c size_t start_offset = offset_in_page(dst->start); dst 6053 fs/btrfs/extent_io.c if (src_offset + len > dst->len) { dst 6056 fs/btrfs/extent_io.c src_offset, len, dst->len); dst 6059 fs/btrfs/extent_io.c if (dst_offset + len > dst->len) { dst 6062 fs/btrfs/extent_io.c dst_offset, len, dst->len); dst 6066 fs/btrfs/extent_io.c memcpy_extent_buffer(dst, dst_offset, src_offset, len); dst 6078 fs/btrfs/extent_io.c copy_pages(dst->pages[dst_i], dst->pages[src_i], dst 457 fs/btrfs/extent_io.h void read_extent_buffer(const struct extent_buffer *eb, void *dst, dst 461 fs/btrfs/extent_io.h void __user *dst, unsigned long start, dst 468 fs/btrfs/extent_io.h void copy_extent_buffer_full(struct extent_buffer *dst, dst 470 fs/btrfs/extent_io.h void copy_extent_buffer(struct extent_buffer *dst, struct extent_buffer *src, dst 473 fs/btrfs/extent_io.h void memcpy_extent_buffer(struct extent_buffer *dst, unsigned long dst_offset, dst 475 fs/btrfs/extent_io.h void memmove_extent_buffer(struct extent_buffer *dst, unsigned long dst_offset, dst 152 fs/btrfs/file-item.c u64 logical_offset, u8 *dst, int dio) dst 177 fs/btrfs/file-item.c if (!dst) { dst 190 fs/btrfs/file-item.c csum = dst; dst 293 fs/btrfs/file-item.c u8 *dst) dst 295 fs/btrfs/file-item.c return __btrfs_lookup_bio_sums(inode, bio, 0, dst, 0); dst 3245 fs/btrfs/ioctl.c struct inode *dst, u64 dst_loff) dst 3254 fs/btrfs/ioctl.c btrfs_double_extent_lock(src, loff, dst, dst_loff, len); dst 3255 fs/btrfs/ioctl.c ret = btrfs_clone(src, dst, loff, len, ALIGN(len, bs), dst_loff, 1); dst 3256 fs/btrfs/ioctl.c btrfs_double_extent_unlock(src, loff, dst, dst_loff, len); dst 3264 fs/btrfs/ioctl.c struct inode *dst, u64 dst_loff) dst 3268 fs/btrfs/ioctl.c struct btrfs_root *root_dst = BTRFS_I(dst)->root; dst 3287 fs/btrfs/ioctl.c dst, dst_loff); dst 3296 fs/btrfs/ioctl.c ret = btrfs_extent_same_range(src, loff, tail_len, dst, dst 3364 fs/btrfs/ioctl.c static int clone_copy_inline_extent(struct inode *dst, dst 3374 fs/btrfs/ioctl.c struct btrfs_fs_info *fs_info = btrfs_sb(dst->i_sb); dst 3375 fs/btrfs/ioctl.c struct btrfs_root *root = BTRFS_I(dst)->root; dst 3384 fs/btrfs/ioctl.c key.objectid = btrfs_ino(BTRFS_I(dst)); dst 3399 fs/btrfs/ioctl.c if (key.objectid == btrfs_ino(BTRFS_I(dst)) && dst 3404 fs/btrfs/ioctl.c } else if (i_size_read(dst) <= datal) { dst 3433 fs/btrfs/ioctl.c if (key.objectid == btrfs_ino(BTRFS_I(dst)) && dst 3444 fs/btrfs/ioctl.c if (i_size_read(dst) > datal) { dst 3461 fs/btrfs/ioctl.c ret = btrfs_drop_extents(trans, root, dst, drop_start, aligned_end, 1); dst 3478 fs/btrfs/ioctl.c inode_add_bytes(dst, datal); dst 3479 fs/btrfs/ioctl.c set_bit(BTRFS_INODE_NEEDS_FULL_SYNC, &BTRFS_I(dst)->runtime_flags); dst 4815 fs/btrfs/ioctl.c ret = btrfs_add_qgroup_relation(trans, sa->src, sa->dst); dst 4817 fs/btrfs/ioctl.c ret = btrfs_del_qgroup_relation(trans, sa->src, sa->dst); dst 534 fs/btrfs/qgroup.c u64 dst) dst 547 fs/btrfs/qgroup.c key.offset = dst; dst 558 fs/btrfs/qgroup.c u64 dst) dst 571 fs/btrfs/qgroup.c key.offset = dst; dst 1218 fs/btrfs/qgroup.c struct ulist *tmp, u64 src, u64 dst, dst 1230 fs/btrfs/qgroup.c err = __qgroup_excl_accounting(fs_info, tmp, dst, dst 1244 fs/btrfs/qgroup.c u64 dst) dst 1255 fs/btrfs/qgroup.c if (btrfs_qgroup_level(src) >= btrfs_qgroup_level(dst)) dst 1269 fs/btrfs/qgroup.c parent = find_qgroup_rb(fs_info, dst); dst 1283 fs/btrfs/qgroup.c ret = add_qgroup_relation_item(trans, src, dst); dst 1287 fs/btrfs/qgroup.c ret = add_qgroup_relation_item(trans, dst, src); dst 1289 fs/btrfs/qgroup.c del_qgroup_relation_item(trans, src, dst); dst 1294 fs/btrfs/qgroup.c ret = add_relation_rb(fs_info, src, dst); dst 1299 fs/btrfs/qgroup.c ret = quick_update_accounting(fs_info, tmp, src, dst, 1); dst 1308 fs/btrfs/qgroup.c u64 dst) dst 1331 fs/btrfs/qgroup.c parent = find_qgroup_rb(fs_info, dst); dst 1348 fs/btrfs/qgroup.c ret = del_qgroup_relation_item(trans, src, dst); dst 1351 fs/btrfs/qgroup.c ret2 = del_qgroup_relation_item(trans, dst, src); dst 1361 fs/btrfs/qgroup.c del_relation_rb(fs_info, src, dst); dst 1362 fs/btrfs/qgroup.c ret = quick_update_accounting(fs_info, tmp, src, dst, -1); dst 1371 fs/btrfs/qgroup.c u64 dst) dst 1377 fs/btrfs/qgroup.c ret = __del_qgroup_relation(trans, src, dst); dst 2786 fs/btrfs/qgroup.c struct btrfs_qgroup *dst; dst 2792 fs/btrfs/qgroup.c dst = find_qgroup_rb(fs_info, i_qgroups[1]); dst 2794 fs/btrfs/qgroup.c if (!src || !dst) { dst 2799 fs/btrfs/qgroup.c dst->rfer = src->rfer - level_size; dst 2800 fs/btrfs/qgroup.c dst->rfer_cmpr = src->rfer_cmpr - level_size; dst 2804 fs/btrfs/qgroup.c struct btrfs_qgroup *dst; dst 2810 fs/btrfs/qgroup.c dst = find_qgroup_rb(fs_info, i_qgroups[1]); dst 2812 fs/btrfs/qgroup.c if (!src || !dst) { dst 2817 fs/btrfs/qgroup.c dst->excl = src->excl + level_size; dst 2818 fs/btrfs/qgroup.c dst->excl_cmpr = src->excl_cmpr + level_size; dst 242 fs/btrfs/qgroup.h u64 dst); dst 244 fs/btrfs/qgroup.h u64 dst); dst 419 fs/btrfs/zstd.c workspace->out_buf.dst = kmap(out_page); dst 466 fs/btrfs/zstd.c workspace->out_buf.dst = kmap(out_page); dst 526 fs/btrfs/zstd.c workspace->out_buf.dst = kmap(out_page); dst 577 fs/btrfs/zstd.c workspace->out_buf.dst = workspace->buf; dst 596 fs/btrfs/zstd.c ret = btrfs_decompress_buf2page(workspace->out_buf.dst, dst 656 fs/btrfs/zstd.c workspace->out_buf.dst = workspace->buf; dst 698 fs/btrfs/zstd.c memcpy(kaddr + pg_offset, workspace->out_buf.dst + buf_offset, dst 33 fs/ceph/locks.c static void ceph_fl_copy_lock(struct file_lock *dst, struct file_lock *src) dst 35 fs/ceph/locks.c struct ceph_file_info *fi = dst->fl_file->private_data; dst 36 fs/ceph/locks.c struct inode *inode = file_inode(dst->fl_file); dst 423 fs/ceph/snap.c static int dup_array(u64 **dst, __le64 *src, u32 num) dst 427 fs/ceph/snap.c kfree(*dst); dst 429 fs/ceph/snap.c *dst = kcalloc(num, sizeof(u64), GFP_NOFS); dst 430 fs/ceph/snap.c if (!*dst) dst 433 fs/ceph/snap.c (*dst)[i] = get_unaligned_le64(src + i); dst 435 fs/ceph/snap.c *dst = NULL; dst 66 fs/cifs/cache.c char *delim, *dst; dst 80 fs/cifs/cache.c dst = kstrndup(delim, len, GFP_KERNEL); dst 81 fs/cifs/cache.c if (!dst) dst 84 fs/cifs/cache.c return dst; dst 352 fs/cifs/cifs_unicode.c char *dst; dst 357 fs/cifs/cifs_unicode.c dst = kmalloc(len, GFP_KERNEL); dst 358 fs/cifs/cifs_unicode.c if (!dst) dst 360 fs/cifs/cifs_unicode.c cifs_from_utf16(dst, (__le16 *) src, len, maxlen, codepage, dst 365 fs/cifs/cifs_unicode.c dst = kmalloc(len, GFP_KERNEL); dst 366 fs/cifs/cifs_unicode.c if (!dst) dst 368 fs/cifs/cifs_unicode.c strlcpy(dst, src, len); dst 371 fs/cifs/cifs_unicode.c return dst; dst 620 fs/cifs/cifs_unicode.c __le16 *dst; dst 624 fs/cifs/cifs_unicode.c dst = kmalloc(len, GFP_KERNEL); dst 625 fs/cifs/cifs_unicode.c if (!dst) { dst 629 fs/cifs/cifs_unicode.c cifsConvertToUTF16(dst, src, strlen(src), cp, remap); dst 631 fs/cifs/cifs_unicode.c return dst; dst 272 fs/cifs/cifsacl.c cifs_copy_sid(struct cifs_sid *dst, const struct cifs_sid *src) dst 276 fs/cifs/cifsacl.c dst->revision = src->revision; dst 277 fs/cifs/cifsacl.c dst->num_subauth = min_t(u8, src->num_subauth, SID_MAX_SUB_AUTHORITIES); dst 279 fs/cifs/cifsacl.c dst->authority[i] = src->authority[i]; dst 280 fs/cifs/cifsacl.c for (i = 0; i < dst->num_subauth; ++i) dst 281 fs/cifs/cifsacl.c dst->sub_auth[i] = src->sub_auth[i]; dst 149 fs/cifs/cifsproto.h extern int cifs_convert_address(struct sockaddr *dst, const char *src, int len); dst 590 fs/cifs/cifsproto.h int copy_path_name(char *dst, const char *src); dst 1329 fs/cifs/connect.c char *dst, *delim; dst 1347 fs/cifs/connect.c dst = kmalloc((len + 1), GFP_KERNEL); dst 1348 fs/cifs/connect.c if (dst == NULL) dst 1351 fs/cifs/connect.c memcpy(dst, src, len); dst 1352 fs/cifs/connect.c dst[len] = '\0'; dst 1354 fs/cifs/connect.c return dst; dst 1020 fs/cifs/misc.c int copy_path_name(char *dst, const char *src) dst 1028 fs/cifs/misc.c name_len = strscpy(dst, src, PATH_MAX); dst 126 fs/cifs/netmisc.c cifs_inet_pton(const int address_family, const char *cp, int len, void *dst) dst 132 fs/cifs/netmisc.c ret = in4_pton(cp, len, dst, '\\', NULL); dst 134 fs/cifs/netmisc.c ret = in6_pton(cp, len, dst , '\\', NULL); dst 152 fs/cifs/netmisc.c cifs_convert_address(struct sockaddr *dst, const char *src, int len) dst 157 fs/cifs/netmisc.c struct sockaddr_in *s4 = (struct sockaddr_in *) dst; dst 158 fs/cifs/netmisc.c struct sockaddr_in6 *s6 = (struct sockaddr_in6 *) dst; dst 428 fs/cifs/smb2inode.c move_smb2_info_to_cifs(FILE_ALL_INFO *dst, struct smb2_file_all_info *src) dst 430 fs/cifs/smb2inode.c memcpy(dst, src, (size_t)(&src->CurrentByteOffset) - (size_t)src); dst 431 fs/cifs/smb2inode.c dst->CurrentByteOffset = src->CurrentByteOffset; dst 432 fs/cifs/smb2inode.c dst->Mode = src->Mode; dst 433 fs/cifs/smb2inode.c dst->AlignmentRequirement = src->AlignmentRequirement; dst 434 fs/cifs/smb2inode.c dst->IndexNumber1 = 0; /* we don't use it */ dst 934 fs/cifs/smb2ops.c move_smb2_ea_to_cifs(char *dst, size_t dst_size, dst 969 fs/cifs/smb2ops.c memcpy(dst, value, value_len); dst 981 fs/cifs/smb2ops.c memcpy(dst, "user.", 5); dst 982 fs/cifs/smb2ops.c dst += 5; dst 983 fs/cifs/smb2ops.c memcpy(dst, src->ea_data, name_len); dst 984 fs/cifs/smb2ops.c dst += name_len; dst 985 fs/cifs/smb2ops.c *dst = 0; dst 986 fs/cifs/smb2ops.c ++dst; dst 3820 fs/cifs/smb2ops.c char *dst, *src; dst 3825 fs/cifs/smb2ops.c dst = (char *) kmap(new_rq[i].rq_pages[j]) + offset; dst 3828 fs/cifs/smb2ops.c memcpy(dst, src, len); dst 72 fs/cifs/smb2proto.h extern void move_smb2_info_to_cifs(FILE_ALL_INFO *dst, dst 27 fs/compat.c static inline void compat_nfs_string(struct nfs_string *dst, dst 30 fs/compat.c dst->data = compat_ptr(src->data); dst 31 fs/compat.c dst->len = src->len; dst 2 fs/cramfs/internal.h int cramfs_uncompress_block(void *dst, int dstlen, void *src, int srclen); dst 31 fs/cramfs/uncompress.c int cramfs_uncompress_block(void *dst, int dstlen, void *src, int srclen) dst 38 fs/cramfs/uncompress.c stream.next_out = dst; dst 55 fs/cramfs/uncompress.c pr_err("%p(%d)->%p(%d)\n", src, srclen, dst, dstlen); dst 160 fs/crypto/crypto.c struct scatterlist dst, src; dst 180 fs/crypto/crypto.c sg_init_table(&dst, 1); dst 181 fs/crypto/crypto.c sg_set_page(&dst, dest_page, len, offs); dst 184 fs/crypto/crypto.c skcipher_request_set_crypt(req, &src, &dst, len, &iv); dst 139 fs/crypto/fname.c static int base64_encode(const u8 *src, int len, char *dst) dst 142 fs/crypto/fname.c char *cp = dst; dst 155 fs/crypto/fname.c return cp - dst; dst 158 fs/crypto/fname.c static int base64_decode(const char *src, int len, u8 *dst) dst 162 fs/crypto/fname.c u8 *cp = dst; dst 178 fs/crypto/fname.c return cp - dst; dst 33 fs/crypto/keyring.c static void move_master_key_secret(struct fscrypt_master_key_secret *dst, dst 36 fs/crypto/keyring.c memcpy(dst, src, sizeof(*dst)); dst 32 fs/dlm/midcomms.c static void copy_from_cb(void *dst, const void *base, unsigned offset, dst 39 fs/dlm/midcomms.c memcpy(dst, base + offset, copy); dst 42 fs/dlm/midcomms.c memcpy(dst + copy, base, len); dst 39 fs/ecryptfs/crypto.c void ecryptfs_from_hex(char *dst, char *src, int dst_size) dst 47 fs/ecryptfs/crypto.c dst[x] = (unsigned char)simple_strtol(tmp, NULL, 16); dst 52 fs/ecryptfs/crypto.c char *src, int len, char *dst) dst 58 fs/ecryptfs/crypto.c err = crypto_shash_digest(desc, src, len, dst); dst 73 fs/ecryptfs/crypto.c static int ecryptfs_calculate_md5(char *dst, dst 81 fs/ecryptfs/crypto.c rc = ecryptfs_hash_digest(tfm, src, len, dst); dst 129 fs/ecryptfs/crypto.c char dst[MD5_DIGEST_SIZE]; dst 147 fs/ecryptfs/crypto.c rc = ecryptfs_calculate_md5(dst, crypt_stat, src, dst 154 fs/ecryptfs/crypto.c memcpy(iv, dst, crypt_stat->iv_bytes); dst 654 fs/ecryptfs/crypto.c char dst[MD5_DIGEST_SIZE]; dst 664 fs/ecryptfs/crypto.c rc = ecryptfs_calculate_md5(dst, crypt_stat, crypt_stat->key, dst 671 fs/ecryptfs/crypto.c memcpy(crypt_stat->root_iv, dst, crypt_stat->iv_bytes); dst 1774 fs/ecryptfs/crypto.c static void ecryptfs_encode_for_filename(unsigned char *dst, size_t *dst_size, dst 1803 fs/ecryptfs/crypto.c if (!dst) dst 1819 fs/ecryptfs/crypto.c dst[dst_offset++] = portable_filename_chars[dst_block[0]]; dst 1820 fs/ecryptfs/crypto.c dst[dst_offset++] = portable_filename_chars[dst_block[1]]; dst 1821 fs/ecryptfs/crypto.c dst[dst_offset++] = portable_filename_chars[dst_block[2]]; dst 1822 fs/ecryptfs/crypto.c dst[dst_offset++] = portable_filename_chars[dst_block[3]]; dst 1850 fs/ecryptfs/crypto.c ecryptfs_decode_from_filename(unsigned char *dst, size_t *dst_size, dst 1857 fs/ecryptfs/crypto.c if (!dst) { dst 1867 fs/ecryptfs/crypto.c dst[dst_byte_offset] = (src_byte << 2); dst 1871 fs/ecryptfs/crypto.c dst[dst_byte_offset++] |= (src_byte >> 4); dst 1872 fs/ecryptfs/crypto.c dst[dst_byte_offset] = ((src_byte & 0xF) dst 1877 fs/ecryptfs/crypto.c dst[dst_byte_offset++] |= (src_byte >> 2); dst 1878 fs/ecryptfs/crypto.c dst[dst_byte_offset] = (src_byte << 6); dst 1882 fs/ecryptfs/crypto.c dst[dst_byte_offset++] |= (src_byte); dst 42 fs/ecryptfs/ecryptfs_kernel.h ecryptfs_to_hex(char *dst, char *src, size_t src_size) dst 44 fs/ecryptfs/ecryptfs_kernel.h char *end = bin2hex(dst, src, src_size); dst 48 fs/ecryptfs/ecryptfs_kernel.h extern void ecryptfs_from_hex(char *dst, char *src, int dst_size); dst 30 fs/efs/inode.c static inline void extent_copy(efs_extent *src, efs_extent *dst) { dst 37 fs/efs/inode.c dst->cooked.ex_magic = (unsigned int) src->raw[0]; dst 38 fs/efs/inode.c dst->cooked.ex_bn = ((unsigned int) src->raw[1] << 16) | dst 41 fs/efs/inode.c dst->cooked.ex_length = (unsigned int) src->raw[4]; dst 42 fs/efs/inode.c dst->cooked.ex_offset = ((unsigned int) src->raw[5] << 16) | dst 192 fs/erofs/decompressor.c static void copy_from_pcpubuf(struct page **out, const char *dst, dst 196 fs/erofs/decompressor.c const char *end = dst + outputsize; dst 198 fs/erofs/decompressor.c const char *cur = dst - pageofs_out; dst 206 fs/erofs/decompressor.c if (cur >= dst) { dst 226 fs/erofs/decompressor.c void *dst; dst 231 fs/erofs/decompressor.c dst = kmap_atomic(*rq->out); dst 242 fs/erofs/decompressor.c dst = erofs_get_pcpubuf(0); dst 243 fs/erofs/decompressor.c if (IS_ERR(dst)) dst 244 fs/erofs/decompressor.c return PTR_ERR(dst); dst 247 fs/erofs/decompressor.c ret = alg->decompress(rq, dst); dst 249 fs/erofs/decompressor.c copy_from_pcpubuf(rq->out, dst, rq->pageofs_out, dst 252 fs/erofs/decompressor.c erofs_put_pcpubuf(dst); dst 260 fs/erofs/decompressor.c dst = page_address(*rq->out); dst 267 fs/erofs/decompressor.c dst = vm_map_ram(rq->out, nrpages_out, -1, PAGE_KERNEL); dst 270 fs/erofs/decompressor.c if (dst || ++i >= 3) dst 275 fs/erofs/decompressor.c if (!dst) dst 281 fs/erofs/decompressor.c ret = alg->decompress(rq, dst + rq->pageofs_out); dst 284 fs/erofs/decompressor.c kunmap_atomic(dst); dst 286 fs/erofs/decompressor.c vm_unmap_ram(dst, nrpages_out); dst 296 fs/erofs/decompressor.c unsigned char *src, *dst; dst 310 fs/erofs/decompressor.c dst = kmap_atomic(rq->out[0]); dst 311 fs/erofs/decompressor.c memcpy(dst + rq->pageofs_out, src, righthalf); dst 312 fs/erofs/decompressor.c kunmap_atomic(dst); dst 320 fs/erofs/decompressor.c dst = kmap_atomic(rq->out[1]); dst 321 fs/erofs/decompressor.c memcpy(dst, src + righthalf, rq->pageofs_out); dst 322 fs/erofs/decompressor.c kunmap_atomic(dst); dst 2374 fs/ext4/ext4.h static inline void ext4_fname_from_fscrypt_name(struct ext4_filename *dst, dst 2377 fs/ext4/ext4.h memset(dst, 0, sizeof(*dst)); dst 2379 fs/ext4/ext4.h dst->usr_fname = src->usr_fname; dst 2380 fs/ext4/ext4.h dst->disk_name = src->disk_name; dst 2381 fs/ext4/ext4.h dst->hinfo.hash = src->hash; dst 2382 fs/ext4/ext4.h dst->hinfo.minor_hash = src->minor_hash; dst 2383 fs/ext4/ext4.h dst->crypto_buf = src->crypto_buf; dst 2166 fs/f2fs/f2fs.h static inline void f2fs_copy_page(struct page *src, struct page *dst) dst 2169 fs/f2fs/f2fs.h char *dst_kaddr = kmap(dst); dst 2172 fs/f2fs/f2fs.h kunmap(dst); dst 1088 fs/f2fs/file.c pgoff_t src, pgoff_t dst, pgoff_t len, bool full) dst 1107 fs/f2fs/file.c ret = f2fs_get_dnode_of_data(&dn, dst + i, ALLOC_NODE); dst 1137 fs/f2fs/file.c new_size = (loff_t)(dst + i) << PAGE_SHIFT; dst 1150 fs/f2fs/file.c pdst = f2fs_get_new_data_page(dst_inode, NULL, dst + i, dst 1172 fs/f2fs/file.c struct inode *dst_inode, pgoff_t src, pgoff_t dst, dst 1203 fs/f2fs/file.c do_replace, src, dst, olen, full); dst 1208 fs/f2fs/file.c dst += olen; dst 2572 fs/f2fs/file.c struct inode *dst = file_inode(file_out); dst 2579 fs/f2fs/file.c src->i_sb != dst->i_sb) dst 2585 fs/f2fs/file.c if (!S_ISREG(src->i_mode) || !S_ISREG(dst->i_mode)) dst 2588 fs/f2fs/file.c if (IS_ENCRYPTED(src) || IS_ENCRYPTED(dst)) dst 2591 fs/f2fs/file.c if (src == dst) { dst 2599 fs/f2fs/file.c if (src != dst) { dst 2601 fs/f2fs/file.c if (!inode_trylock(dst)) dst 2617 fs/f2fs/file.c dst_osize = dst->i_size; dst 2618 fs/f2fs/file.c if (pos_out + olen > dst->i_size) dst 2631 fs/f2fs/file.c ret = f2fs_convert_inline_inode(dst); dst 2641 fs/f2fs/file.c ret = filemap_write_and_wait_range(dst->i_mapping, dst 2649 fs/f2fs/file.c if (src != dst) { dst 2651 fs/f2fs/file.c if (!down_write_trylock(&F2FS_I(dst)->i_gc_rwsem[WRITE])) dst 2656 fs/f2fs/file.c ret = __exchange_data_block(src, dst, pos_in >> F2FS_BLKSIZE_BITS, dst 2662 fs/f2fs/file.c f2fs_i_size_write(dst, dst_max_i_size); dst 2663 fs/f2fs/file.c else if (dst_osize != dst->i_size) dst 2664 fs/f2fs/file.c f2fs_i_size_write(dst, dst_osize); dst 2668 fs/f2fs/file.c if (src != dst) dst 2669 fs/f2fs/file.c up_write(&F2FS_I(dst)->i_gc_rwsem[WRITE]); dst 2673 fs/f2fs/file.c if (src != dst) dst 2674 fs/f2fs/file.c inode_unlock(dst); dst 2683 fs/f2fs/file.c struct fd dst; dst 2694 fs/f2fs/file.c dst = fdget(range.dst_fd); dst 2695 fs/f2fs/file.c if (!dst.file) dst 2698 fs/f2fs/file.c if (!(dst.file->f_mode & FMODE_WRITE)) { dst 2707 fs/f2fs/file.c err = f2fs_move_file_range(filp, range.pos_in, dst.file, dst 2718 fs/f2fs/file.c fdput(dst); dst 368 fs/f2fs/inline.c struct f2fs_dentry_ptr src, dst; dst 396 fs/f2fs/inline.c make_dentry_ptr_block(dir, &dst, dentry_blk); dst 399 fs/f2fs/inline.c memcpy(dst.bitmap, src.bitmap, src.nr_bitmap); dst 400 fs/f2fs/inline.c memset(dst.bitmap + src.nr_bitmap, 0, dst.nr_bitmap - src.nr_bitmap); dst 407 fs/f2fs/inline.c memcpy(dst.dentry, src.dentry, SIZE_OF_DIR_ENTRY * src.max); dst 408 fs/f2fs/inline.c memcpy(dst.filename, src.filename, src.max * F2FS_SLOT_LEN); dst 2592 fs/f2fs/node.c struct f2fs_inode *src, *dst; dst 2620 fs/f2fs/node.c dst = F2FS_INODE(ipage); dst 2622 fs/f2fs/node.c memcpy(dst, src, (unsigned long)&src->i_ext - (unsigned long)src); dst 2623 fs/f2fs/node.c dst->i_size = 0; dst 2624 fs/f2fs/node.c dst->i_blocks = cpu_to_le64(1); dst 2625 fs/f2fs/node.c dst->i_links = cpu_to_le32(1); dst 2626 fs/f2fs/node.c dst->i_xattr_nid = 0; dst 2627 fs/f2fs/node.c dst->i_inline = src->i_inline & (F2FS_INLINE_XATTR | F2FS_EXTRA_ATTR); dst 2628 fs/f2fs/node.c if (dst->i_inline & F2FS_EXTRA_ATTR) { dst 2629 fs/f2fs/node.c dst->i_extra_isize = src->i_extra_isize; dst 2634 fs/f2fs/node.c dst->i_inline_xattr_size = src->i_inline_xattr_size; dst 2639 fs/f2fs/node.c dst->i_projid = src->i_projid; dst 2644 fs/f2fs/node.c dst->i_crtime = src->i_crtime; dst 2645 fs/f2fs/node.c dst->i_crtime_nsec = src->i_crtime_nsec; dst 74 fs/f2fs/node.h static inline void copy_node_info(struct node_info *dst, dst 77 fs/f2fs/node.h dst->nid = src->nid; dst 78 fs/f2fs/node.h dst->ino = src->ino; dst 79 fs/f2fs/node.h dst->blk_addr = src->blk_addr; dst 80 fs/f2fs/node.h dst->version = src->version; dst 286 fs/f2fs/node.h static inline void copy_node_footer(struct page *dst, struct page *src) dst 289 fs/f2fs/node.h struct f2fs_node *dst_rn = F2FS_NODE(dst); dst 2338 fs/f2fs/segment.c struct f2fs_summary_block *dst; dst 2340 fs/f2fs/segment.c dst = (struct f2fs_summary_block *)page_address(page); dst 2341 fs/f2fs/segment.c memset(dst, 0, PAGE_SIZE); dst 2346 fs/f2fs/segment.c memcpy(&dst->journal, curseg->journal, SUM_JOURNAL_SIZE); dst 2349 fs/f2fs/segment.c memcpy(dst->entries, src->entries, SUM_ENTRY_SIZE); dst 2350 fs/f2fs/segment.c memcpy(&dst->footer, &src->footer, SUM_FOOTER_SIZE); dst 272 fs/fat/fat.h static inline void fat16_towchar(wchar_t *dst, const __u8 *src, size_t len) dst 276 fs/fat/fat.h *dst++ = src[0] | (src[1] << 8); dst 280 fs/fat/fat.h memcpy(dst, src, len * 2); dst 299 fs/fat/fat.h static inline void fatwchar_to16(__u8 *dst, const wchar_t *src, size_t len) dst 303 fs/fat/fat.h dst[0] = *src & 0x00FF; dst 304 fs/fat/fat.h dst[1] = (*src & 0xFF00) >> 8; dst 305 fs/fat/fat.h dst += 2; dst 309 fs/fat/fat.h memcpy(dst, src, len * 2); dst 240 fs/fcntl.c uid_t __user *dst = (void __user *)arg; dst 249 fs/fcntl.c err = put_user(src[0], &dst[0]); dst 250 fs/fcntl.c err |= put_user(src[1], &dst[1]); dst 524 fs/fcntl.c #define copy_flock_fields(dst, src) \ dst 525 fs/fcntl.c (dst)->l_type = (src)->l_type; \ dst 526 fs/fcntl.c (dst)->l_whence = (src)->l_whence; \ dst 527 fs/fcntl.c (dst)->l_start = (src)->l_start; \ dst 528 fs/fcntl.c (dst)->l_len = (src)->l_len; \ dst 529 fs/fcntl.c (dst)->l_pid = (src)->l_pid; dst 2581 fs/fuse/file.c static int fuse_copy_ioctl_iovec_old(struct iovec *dst, void *src, dst 2599 fs/fuse/file.c dst[i].iov_base = compat_ptr(ciov[i].iov_base); dst 2600 fs/fuse/file.c dst[i].iov_len = ciov[i].iov_len; dst 2609 fs/fuse/file.c memcpy(dst, src, transferred); dst 2628 fs/fuse/file.c static int fuse_copy_ioctl_iovec(struct fuse_conn *fc, struct iovec *dst, dst 2636 fs/fuse/file.c return fuse_copy_ioctl_iovec_old(dst, src, transferred, dst 2649 fs/fuse/file.c dst[i].iov_base = (void __user *) (unsigned long) fiov[i].base; dst 2650 fs/fuse/file.c dst[i].iov_len = (size_t) fiov[i].len; dst 2654 fs/fuse/file.c (ptr_to_compat(dst[i].iov_base) != fiov[i].base || dst 2655 fs/fuse/file.c (compat_size_t) dst[i].iov_len != fiov[i].len)) dst 98 fs/hfs/bnode.c void hfs_bnode_copy(struct hfs_bnode *dst_node, int dst, dst 103 fs/hfs/bnode.c hfs_dbg(BNODE_MOD, "copybytes: %u,%u,%u\n", dst, src, len); dst 107 fs/hfs/bnode.c dst += dst_node->page_offset; dst 111 fs/hfs/bnode.c memcpy(kmap(dst_page) + dst, kmap(src_page) + src, len); dst 117 fs/hfs/bnode.c void hfs_bnode_move(struct hfs_bnode *node, int dst, int src, int len) dst 122 fs/hfs/bnode.c hfs_dbg(BNODE_MOD, "movebytes: %u,%u,%u\n", dst, src, len); dst 126 fs/hfs/bnode.c dst += node->page_offset; dst 129 fs/hfs/bnode.c memmove(ptr + dst, ptr + src, len); dst 38 fs/hfs/trans.c char *dst; dst 45 fs/hfs/trans.c dst = out; dst 65 fs/hfs/trans.c size = nls_io->uni2char(ch, dst, dstlen); dst 69 fs/hfs/trans.c *dst = '?'; dst 72 fs/hfs/trans.c dst += size; dst 79 fs/hfs/trans.c *dst++ = (ch = *src++) == '/' ? ':' : ch; dst 82 fs/hfs/trans.c return dst - out; dst 102 fs/hfs/trans.c char *dst; dst 107 fs/hfs/trans.c dst = out->name; dst 123 fs/hfs/trans.c size = nls_disk->uni2char(ch, dst, dstlen); dst 127 fs/hfs/trans.c *dst = '?'; dst 130 fs/hfs/trans.c dst += size; dst 133 fs/hfs/trans.c *dst++ = ch > 0xff ? '?' : ch; dst 143 fs/hfs/trans.c *dst++ = (ch = *src++) == ':' ? '/' : ch; dst 146 fs/hfs/trans.c out->len = dst - (char *)out->name; dst 149 fs/hfs/trans.c *dst++ = 0; dst 127 fs/hfsplus/bnode.c void hfs_bnode_copy(struct hfs_bnode *dst_node, int dst, dst 133 fs/hfsplus/bnode.c hfs_dbg(BNODE_MOD, "copybytes: %u,%u,%u\n", dst, src, len); dst 137 fs/hfsplus/bnode.c dst += dst_node->page_offset; dst 140 fs/hfsplus/bnode.c dst_page = dst_node->page + (dst >> PAGE_SHIFT); dst 141 fs/hfsplus/bnode.c dst &= ~PAGE_MASK; dst 143 fs/hfsplus/bnode.c if (src == dst) { dst 162 fs/hfsplus/bnode.c dst_ptr = kmap(*dst_page) + dst; dst 163 fs/hfsplus/bnode.c if (PAGE_SIZE - src < PAGE_SIZE - dst) { dst 166 fs/hfsplus/bnode.c dst += l; dst 168 fs/hfsplus/bnode.c l = PAGE_SIZE - dst; dst 170 fs/hfsplus/bnode.c dst = 0; dst 177 fs/hfsplus/bnode.c if (!dst) dst 185 fs/hfsplus/bnode.c void hfs_bnode_move(struct hfs_bnode *node, int dst, int src, int len) dst 190 fs/hfsplus/bnode.c hfs_dbg(BNODE_MOD, "movebytes: %u,%u,%u\n", dst, src, len); dst 194 fs/hfsplus/bnode.c dst += node->page_offset; dst 195 fs/hfsplus/bnode.c if (dst > src) { dst 199 fs/hfsplus/bnode.c dst += len - 1; dst 200 fs/hfsplus/bnode.c dst_page = node->page + (dst >> PAGE_SHIFT); dst 201 fs/hfsplus/bnode.c dst = (dst & ~PAGE_MASK) + 1; dst 203 fs/hfsplus/bnode.c if (src == dst) { dst 225 fs/hfsplus/bnode.c dst_ptr = kmap(*dst_page) + dst; dst 226 fs/hfsplus/bnode.c if (src < dst) { dst 229 fs/hfsplus/bnode.c dst -= l; dst 231 fs/hfsplus/bnode.c l = dst; dst 233 fs/hfsplus/bnode.c dst = PAGE_SIZE; dst 240 fs/hfsplus/bnode.c if (dst == PAGE_SIZE) dst 249 fs/hfsplus/bnode.c dst_page = node->page + (dst >> PAGE_SHIFT); dst 250 fs/hfsplus/bnode.c dst &= ~PAGE_MASK; dst 252 fs/hfsplus/bnode.c if (src == dst) { dst 273 fs/hfsplus/bnode.c dst_ptr = kmap(*dst_page) + dst; dst 275 fs/hfsplus/bnode.c PAGE_SIZE - dst) { dst 278 fs/hfsplus/bnode.c dst += l; dst 280 fs/hfsplus/bnode.c l = PAGE_SIZE - dst; dst 282 fs/hfsplus/bnode.c dst = 0; dst 289 fs/hfsplus/bnode.c if (!dst) dst 411 fs/hfsplus/hfsplus_fs.h void hfs_bnode_copy(struct hfs_bnode *dst_node, int dst, dst 413 fs/hfsplus/hfsplus_fs.h void hfs_bnode_move(struct hfs_bnode *node, int dst, int src, int len); dst 175 fs/hpfs/hpfs_fn.h static inline void copy_de(struct hpfs_dirent *dst, struct hpfs_dirent *src) dst 179 fs/hpfs/hpfs_fn.h if (!dst || !src) return; dst 180 fs/hpfs/hpfs_fn.h a = dst->down; dst 181 fs/hpfs/hpfs_fn.h n = dst->not_8x3; dst 182 fs/hpfs/hpfs_fn.h memcpy((char *)dst + 2, (char *)src + 2, 28); dst 183 fs/hpfs/hpfs_fn.h dst->down = a; dst 184 fs/hpfs/hpfs_fn.h dst->not_8x3 = n; dst 3421 fs/io_uring.c static int io_copy_iov(struct io_ring_ctx *ctx, struct iovec *dst, dst 3435 fs/io_uring.c dst->iov_base = (void __user *) (unsigned long) ciov.iov_base; dst 3436 fs/io_uring.c dst->iov_len = ciov.iov_len; dst 3441 fs/io_uring.c if (copy_from_user(dst, &src[index], sizeof(*dst))) dst 350 fs/jfs/jfs_logmgr.c caddr_t dst; /* destination address in log page */ dst 432 fs/jfs/jfs_logmgr.c dst = (caddr_t) lp + dstoffset; dst 433 fs/jfs/jfs_logmgr.c memcpy(dst, src, nbytes); dst 480 fs/jfs/jfs_logmgr.c dst = (caddr_t) lp + dstoffset; dst 481 fs/jfs/jfs_logmgr.c memcpy(dst, src, nbytes); dst 1182 fs/nfs/delegation.c bool nfs4_refresh_delegation_stateid(nfs4_stateid *dst, struct inode *inode) dst 1192 fs/nfs/delegation.c nfs4_stateid_match_other(dst, &delegation->stateid)) { dst 1193 fs/nfs/delegation.c dst->seqid = delegation->stateid.seqid; dst 1212 fs/nfs/delegation.c nfs4_stateid *dst, const struct cred **cred) dst 1223 fs/nfs/delegation.c nfs4_stateid_copy(dst, &delegation->stateid); dst 68 fs/nfs/delegation.h bool nfs4_copy_delegation_stateid(struct inode *inode, fmode_t flags, nfs4_stateid *dst, const struct cred **cred); dst 69 fs/nfs/delegation.h bool nfs4_refresh_delegation_stateid(nfs4_stateid *dst, struct inode *inode); dst 299 fs/nfs/internal.h nfs4_label_copy(struct nfs4_label *dst, struct nfs4_label *src) dst 301 fs/nfs/internal.h if (!dst || !src) dst 307 fs/nfs/internal.h dst->lfs = src->lfs; dst 308 fs/nfs/internal.h dst->pi = src->pi; dst 309 fs/nfs/internal.h dst->len = src->len; dst 310 fs/nfs/internal.h memcpy(dst->label, src->label, src->len); dst 312 fs/nfs/internal.h return dst; dst 335 fs/nfs/internal.h nfs4_label_copy(struct nfs4_label *dst, struct nfs4_label *src) dst 482 fs/nfs/internal.h int nfs_scan_commit_list(struct list_head *src, struct list_head *dst, dst 485 fs/nfs/internal.h int nfs_scan_commit(struct inode *inode, struct list_head *dst, dst 503 fs/nfs/internal.h struct list_head *dst, dst 20 fs/nfs/nfs42proc.c static int nfs42_do_offload_cancel_async(struct file *dst, nfs4_stateid *std); dst 137 fs/nfs/nfs42proc.c struct file *dst, dst 143 fs/nfs/nfs42proc.c struct nfs_open_context *ctx = nfs_file_open_context(dst); dst 191 fs/nfs/nfs42proc.c nfs42_do_offload_cancel_async(dst, ©->stateid); dst 196 fs/nfs/nfs42proc.c static int process_copy_commit(struct file *dst, loff_t pos_dst, dst 206 fs/nfs/nfs42proc.c status = nfs4_proc_commit(dst, pos_dst, res->write_res.count, &cres); dst 222 fs/nfs/nfs42proc.c struct file *dst, dst 232 fs/nfs/nfs42proc.c struct inode *dst_inode = file_inode(dst); dst 283 fs/nfs/nfs42proc.c status = handle_async_copy(res, server, src, dst, dst 291 fs/nfs/nfs42proc.c status = process_copy_commit(dst, pos_dst, res); dst 307 fs/nfs/nfs42proc.c struct file *dst, loff_t pos_dst, dst 310 fs/nfs/nfs42proc.c struct nfs_server *server = NFS_SERVER(file_inode(dst)); dst 316 fs/nfs/nfs42proc.c .dst_fh = NFS_FH(file_inode(dst)), dst 327 fs/nfs/nfs42proc.c .inode = file_inode(dst), dst 338 fs/nfs/nfs42proc.c dst_lock = nfs_get_lock_context(nfs_file_open_context(dst)); dst 347 fs/nfs/nfs42proc.c inode_lock(file_inode(dst)); dst 349 fs/nfs/nfs42proc.c dst, dst_lock, dst 351 fs/nfs/nfs42proc.c inode_unlock(file_inode(dst)); dst 416 fs/nfs/nfs42proc.c static int nfs42_do_offload_cancel_async(struct file *dst, dst 419 fs/nfs/nfs42proc.c struct nfs_server *dst_server = NFS_SERVER(file_inode(dst)); dst 421 fs/nfs/nfs42proc.c struct nfs_open_context *ctx = nfs_file_open_context(dst); dst 444 fs/nfs/nfs42proc.c data->args.osa_src_fh = NFS_FH(file_inode(dst)); dst 492 fs/nfs/nfs4_fs.h extern bool nfs4_copy_open_stateid(nfs4_stateid *dst, dst 509 fs/nfs/nfs4_fs.h extern int nfs4_proc_commit(struct file *dst, __u64 offset, __u32 count, struct nfs_commitres *res); dst 550 fs/nfs/nfs4_fs.h static inline void nfs4_stateid_copy(nfs4_stateid *dst, const nfs4_stateid *src) dst 552 fs/nfs/nfs4_fs.h memcpy(dst->data, src->data, sizeof(dst->data)); dst 553 fs/nfs/nfs4_fs.h dst->type = src->type; dst 556 fs/nfs/nfs4_fs.h static inline bool nfs4_stateid_match(const nfs4_stateid *dst, const nfs4_stateid *src) dst 558 fs/nfs/nfs4_fs.h if (dst->type != src->type) dst 560 fs/nfs/nfs4_fs.h return memcmp(dst->data, src->data, sizeof(dst->data)) == 0; dst 563 fs/nfs/nfs4_fs.h static inline bool nfs4_stateid_match_other(const nfs4_stateid *dst, const nfs4_stateid *src) dst 565 fs/nfs/nfs4_fs.h return memcmp(dst->other, src->other, NFS4_STATEID_OTHER_SIZE) == 0; dst 278 fs/nfs/nfs4proc.c static void nfs4_bitmap_copy_adjust(__u32 *dst, const __u32 *src, dst 283 fs/nfs/nfs4proc.c memcpy(dst, src, NFS4_BITMASK_SZ*sizeof(*dst)); dst 293 fs/nfs/nfs4proc.c dst[0] &= ~FATTR4_WORD0_SIZE; dst 296 fs/nfs/nfs4proc.c dst[0] &= ~FATTR4_WORD0_CHANGE; dst 299 fs/nfs/nfs4proc.c static void nfs4_bitmap_copy_adjust_setattr(__u32 *dst, dst 302 fs/nfs/nfs4proc.c nfs4_bitmap_copy_adjust(dst, src, inode); dst 3344 fs/nfs/nfs4proc.c static void nfs4_sync_open_stateid(nfs4_stateid *dst, dst 3355 fs/nfs/nfs4proc.c if (!nfs4_state_match_open_stateid_other(state, dst)) { dst 3356 fs/nfs/nfs4proc.c nfs4_stateid_copy(dst, &state->open_stateid); dst 3365 fs/nfs/nfs4proc.c dst_seqid = be32_to_cpu(dst->seqid); dst 3367 fs/nfs/nfs4proc.c dst->seqid = seqid_open; dst 3376 fs/nfs/nfs4proc.c static bool nfs4_refresh_open_old_stateid(nfs4_stateid *dst, dst 3389 fs/nfs/nfs4proc.c if (!nfs4_state_match_open_stateid_other(state, dst)) { dst 3398 fs/nfs/nfs4proc.c dst_seqid = be32_to_cpu(dst->seqid); dst 3400 fs/nfs/nfs4proc.c dst->seqid = cpu_to_be32(dst_seqid + 1); dst 3402 fs/nfs/nfs4proc.c dst->seqid = seqid_open; dst 5355 fs/nfs/nfs4proc.c static int _nfs4_proc_commit(struct file *dst, struct nfs_commitargs *args, dst 5358 fs/nfs/nfs4proc.c struct inode *dst_inode = file_inode(dst); dst 5371 fs/nfs/nfs4proc.c int nfs4_proc_commit(struct file *dst, __u64 offset, __u32 count, struct nfs_commitres *res) dst 5377 fs/nfs/nfs4proc.c struct nfs_server *dst_server = NFS_SERVER(file_inode(dst)); dst 5382 fs/nfs/nfs4proc.c status = _nfs4_proc_commit(dst, &args, res); dst 6446 fs/nfs/nfs4proc.c static bool nfs4_refresh_lock_old_stateid(nfs4_stateid *dst, dst 6453 fs/nfs/nfs4proc.c if (!nfs4_stateid_match_other(dst, &lsp->ls_stateid)) dst 6455 fs/nfs/nfs4proc.c if (!nfs4_stateid_is_newer(&lsp->ls_stateid, dst)) dst 6456 fs/nfs/nfs4proc.c nfs4_stateid_seqid_inc(dst); dst 6458 fs/nfs/nfs4proc.c dst->seqid = lsp->ls_stateid.seqid; dst 6465 fs/nfs/nfs4proc.c static bool nfs4_sync_lock_stateid(nfs4_stateid *dst, dst 6472 fs/nfs/nfs4proc.c ret = !nfs4_stateid_match_other(dst, &lsp->ls_stateid); dst 6473 fs/nfs/nfs4proc.c nfs4_stateid_copy(dst, &lsp->ls_stateid); dst 144 fs/nfs/nfs4session.h static inline void nfs4_copy_sessionid(struct nfs4_sessionid *dst, dst 147 fs/nfs/nfs4session.h memcpy(dst->data, src->data, NFS4_MAX_SESSIONID_LEN); dst 953 fs/nfs/nfs4state.c static void nfs4_fl_copy_lock(struct file_lock *dst, struct file_lock *src) dst 957 fs/nfs/nfs4state.c dst->fl_u.nfs4_fl.owner = lsp; dst 985 fs/nfs/nfs4state.c static int nfs4_copy_lock_stateid(nfs4_stateid *dst, dst 1007 fs/nfs/nfs4state.c nfs4_stateid_copy(dst, &lsp->ls_stateid); dst 1016 fs/nfs/nfs4state.c bool nfs4_copy_open_stateid(nfs4_stateid *dst, struct nfs4_state *state) dst 1030 fs/nfs/nfs4state.c nfs4_stateid_copy(dst, src); dst 1041 fs/nfs/nfs4state.c nfs4_stateid *dst, const struct cred **cred) dst 1049 fs/nfs/nfs4state.c ret = nfs4_copy_lock_stateid(dst, state, l_ctx); dst 1054 fs/nfs/nfs4state.c if (nfs4_copy_delegation_stateid(state->inode, fmode, dst, cred)) { dst 1064 fs/nfs/nfs4state.c ret = nfs4_copy_open_stateid(dst, state) ? 0 : -EAGAIN; dst 1067 fs/nfs/nfs4state.c dst->seqid = 0; dst 365 fs/nfs/pnfs.c bool nfs4_layout_refresh_old_stateid(nfs4_stateid *dst, dst 382 fs/nfs/pnfs.c nfs4_stateid_match_other(dst, &lo->plh_stateid)) { dst 384 fs/nfs/pnfs.c if (!nfs4_stateid_is_newer(&lo->plh_stateid, dst)) { dst 385 fs/nfs/pnfs.c nfs4_stateid_seqid_inc(dst); dst 392 fs/nfs/pnfs.c dst->seqid = lo->plh_stateid.seqid; dst 264 fs/nfs/pnfs.h bool nfs4_layout_refresh_old_stateid(nfs4_stateid *dst, dst 368 fs/nfs/pnfs.h void pnfs_generic_recover_commit_reqs(struct list_head *dst, dst 557 fs/nfs/pnfs.h pnfs_use_threshold(struct nfs4_threshold **dst, struct nfs4_threshold *src, dst 560 fs/nfs/pnfs.h return (dst && src && src->bm != 0 && nfss->pnfs_curr_ld && dst 581 fs/nfs/pnfs.h pnfs_copy_range(struct pnfs_layout_range *dst, dst 584 fs/nfs/pnfs.h memcpy(dst, src, sizeof(*dst)); dst 786 fs/nfs/pnfs.h pnfs_use_threshold(struct nfs4_threshold **dst, struct nfs4_threshold *src, dst 801 fs/nfs/pnfs.h static inline bool nfs4_layout_refresh_old_stateid(nfs4_stateid *dst, dst 96 fs/nfs/pnfs_nfs.c struct list_head *dst = &bucket->committing; dst 100 fs/nfs/pnfs_nfs.c ret = nfs_scan_commit_list(src, dst, cinfo, max); dst 134 fs/nfs/pnfs_nfs.c void pnfs_generic_recover_commit_reqs(struct list_head *dst, dst 145 fs/nfs/pnfs_nfs.c nwritten = nfs_scan_commit_list(&b->written, dst, cinfo, 0); dst 869 fs/nfs/write.c nfs_request_add_commit_list_locked(struct nfs_page *req, struct list_head *dst, dst 873 fs/nfs/write.c nfs_list_add_request(req, dst); dst 1041 fs/nfs/write.c nfs_scan_commit_list(struct list_head *src, struct list_head *dst, dst 1054 fs/nfs/write.c if (!list_empty(dst)) { dst 1069 fs/nfs/write.c nfs_list_add_request(req, dst); dst 1089 fs/nfs/write.c nfs_scan_commit(struct inode *inode, struct list_head *dst, dst 1100 fs/nfs/write.c ret = nfs_scan_commit_list(&cinfo->mds->list, dst, dst 166 fs/nfsd/nfs4proc.c fh_dup2(struct svc_fh *dst, struct svc_fh *src) dst 168 fs/nfsd/nfs4proc.c fh_put(dst); dst 172 fs/nfsd/nfs4proc.c *dst = *src; dst 1029 fs/nfsd/nfs4proc.c stateid_t *dst_stateid, struct nfsd_file **dst) dst 1044 fs/nfsd/nfs4proc.c dst_stateid, WR_STATE, dst); dst 1052 fs/nfsd/nfs4proc.c !S_ISREG(file_inode((*dst)->nf_file)->i_mode)) { dst 1060 fs/nfsd/nfs4proc.c nfsd_file_put(*dst); dst 1071 fs/nfsd/nfs4proc.c struct nfsd_file *src, *dst; dst 1075 fs/nfsd/nfs4proc.c &clone->cl_dst_stateid, &dst); dst 1080 fs/nfsd/nfs4proc.c dst->nf_file, clone->cl_dst_pos, clone->cl_count, dst 1083 fs/nfsd/nfs4proc.c nfsd_file_put(dst); dst 1208 fs/nfsd/nfs4proc.c static void dup_copy_fields(struct nfsd4_copy *src, struct nfsd4_copy *dst) dst 1210 fs/nfsd/nfs4proc.c dst->cp_src_pos = src->cp_src_pos; dst 1211 fs/nfsd/nfs4proc.c dst->cp_dst_pos = src->cp_dst_pos; dst 1212 fs/nfsd/nfs4proc.c dst->cp_count = src->cp_count; dst 1213 fs/nfsd/nfs4proc.c dst->cp_synchronous = src->cp_synchronous; dst 1214 fs/nfsd/nfs4proc.c memcpy(&dst->cp_res, &src->cp_res, sizeof(src->cp_res)); dst 1215 fs/nfsd/nfs4proc.c memcpy(&dst->fh, &src->fh, sizeof(src->fh)); dst 1216 fs/nfsd/nfs4proc.c dst->cp_clp = src->cp_clp; dst 1217 fs/nfsd/nfs4proc.c dst->nf_dst = nfsd_file_get(src->nf_dst); dst 1218 fs/nfsd/nfs4proc.c dst->nf_src = nfsd_file_get(src->nf_src); dst 1219 fs/nfsd/nfs4proc.c memcpy(&dst->cp_stateid, &src->cp_stateid, sizeof(src->cp_stateid)); dst 927 fs/nfsd/nfs4state.c nfs4_inc_and_copy_stateid(stateid_t *dst, struct nfs4_stid *stid) dst 934 fs/nfsd/nfs4state.c memcpy(dst, src, sizeof(*dst)); dst 170 fs/nfsd/nfsfh.h fh_copy(struct svc_fh *dst, struct svc_fh *src) dst 174 fs/nfsd/nfsfh.h *dst = *src; dst 175 fs/nfsd/nfsfh.h return dst; dst 179 fs/nfsd/nfsfh.h fh_copy_shallow(struct knfsd_fh *dst, struct knfsd_fh *src) dst 181 fs/nfsd/nfsfh.h dst->fh_size = src->fh_size; dst 182 fs/nfsd/nfsfh.h memcpy(&dst->fh_base, &src->fh_base, src->fh_size); dst 630 fs/nfsd/state.h void nfs4_inc_and_copy_stateid(stateid_t *dst, struct nfs4_stid *stid); dst 533 fs/nfsd/vfs.c __be32 nfsd4_clone_file_range(struct file *src, u64 src_pos, struct file *dst, dst 538 fs/nfsd/vfs.c cloned = vfs_clone_file_range(src, src_pos, dst, dst_pos, count, 0); dst 545 fs/nfsd/vfs.c int status = vfs_fsync_range(dst, dst_pos, dst_end, 0); dst 555 fs/nfsd/vfs.c ssize_t nfsd_copy_file_range(struct file *src, u64 src_pos, struct file *dst, dst 568 fs/nfsd/vfs.c return vfs_copy_file_range(src, src_pos, dst, dst_pos, count, 0); dst 197 fs/nilfs2/page.c static void nilfs_copy_page(struct page *dst, struct page *src, int copy_dirty) dst 202 fs/nilfs2/page.c BUG_ON(PageWriteback(dst)); dst 205 fs/nilfs2/page.c if (!page_has_buffers(dst)) dst 206 fs/nilfs2/page.c create_empty_buffers(dst, sbh->b_size, 0); dst 211 fs/nilfs2/page.c dbh = dbufs = page_buffers(dst); dst 222 fs/nilfs2/page.c copy_highpage(dst, src); dst 224 fs/nilfs2/page.c if (PageUptodate(src) && !PageUptodate(dst)) dst 225 fs/nilfs2/page.c SetPageUptodate(dst); dst 226 fs/nilfs2/page.c else if (!PageUptodate(src) && PageUptodate(dst)) dst 227 fs/nilfs2/page.c ClearPageUptodate(dst); dst 228 fs/nilfs2/page.c if (PageMappedToDisk(src) && !PageMappedToDisk(dst)) dst 229 fs/nilfs2/page.c SetPageMappedToDisk(dst); dst 230 fs/nilfs2/page.c else if (!PageMappedToDisk(src) && PageMappedToDisk(dst)) dst 231 fs/nilfs2/page.c ClearPageMappedToDisk(dst); dst 20 fs/ntfs/runlist.c static inline void ntfs_rl_mm(runlist_element *base, int dst, int src, dst 23 fs/ntfs/runlist.c if (likely((dst != src) && (size > 0))) dst 24 fs/ntfs/runlist.c memmove(base + dst, base + src, size * sizeof(*base)); dst 33 fs/ntfs/runlist.c static inline void ntfs_rl_mc(runlist_element *dstbase, int dst, dst 37 fs/ntfs/runlist.c memcpy(dstbase + dst, srcbase + src, size * sizeof(*dstbase)); dst 141 fs/ntfs/runlist.c static inline bool ntfs_are_rl_mergeable(runlist_element *dst, dst 144 fs/ntfs/runlist.c BUG_ON(!dst); dst 148 fs/ntfs/runlist.c if ((dst->lcn == LCN_RL_NOT_MAPPED) && (src->lcn == LCN_RL_NOT_MAPPED)) dst 151 fs/ntfs/runlist.c if ((dst->vcn + dst->length) != src->vcn) dst 154 fs/ntfs/runlist.c if ((dst->lcn >= 0) && (src->lcn >= 0) && dst 155 fs/ntfs/runlist.c ((dst->lcn + dst->length) == src->lcn)) dst 158 fs/ntfs/runlist.c if ((dst->lcn == LCN_HOLE) && (src->lcn == LCN_HOLE)) dst 175 fs/ntfs/runlist.c static inline void __ntfs_rl_merge(runlist_element *dst, runlist_element *src) dst 177 fs/ntfs/runlist.c dst->length += src->length; dst 204 fs/ntfs/runlist.c static inline runlist_element *ntfs_rl_append(runlist_element *dst, dst 210 fs/ntfs/runlist.c BUG_ON(!dst); dst 215 fs/ntfs/runlist.c right = ntfs_are_rl_mergeable(src + ssize - 1, dst + loc + 1); dst 218 fs/ntfs/runlist.c dst = ntfs_rl_realloc(dst, dsize, dsize + ssize - right); dst 219 fs/ntfs/runlist.c if (IS_ERR(dst)) dst 220 fs/ntfs/runlist.c return dst; dst 228 fs/ntfs/runlist.c __ntfs_rl_merge(src + ssize - 1, dst + loc + 1); dst 234 fs/ntfs/runlist.c ntfs_rl_mm(dst, marker, loc + 1 + right, dsize - (loc + 1 + right)); dst 235 fs/ntfs/runlist.c ntfs_rl_mc(dst, loc + 1, src, 0, ssize); dst 238 fs/ntfs/runlist.c dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; dst 241 fs/ntfs/runlist.c if (dst[marker].lcn == LCN_ENOENT) dst 242 fs/ntfs/runlist.c dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; dst 244 fs/ntfs/runlist.c return dst; dst 271 fs/ntfs/runlist.c static inline runlist_element *ntfs_rl_insert(runlist_element *dst, dst 278 fs/ntfs/runlist.c BUG_ON(!dst); dst 290 fs/ntfs/runlist.c left = ntfs_are_rl_mergeable(dst + loc - 1, src); dst 292 fs/ntfs/runlist.c merged_length = dst[loc - 1].length; dst 296 fs/ntfs/runlist.c disc = (src[0].vcn > dst[loc - 1].vcn + merged_length); dst 302 fs/ntfs/runlist.c dst = ntfs_rl_realloc(dst, dsize, dsize + ssize - left + disc); dst 303 fs/ntfs/runlist.c if (IS_ERR(dst)) dst 304 fs/ntfs/runlist.c return dst; dst 310 fs/ntfs/runlist.c __ntfs_rl_merge(dst + loc - 1, src); dst 321 fs/ntfs/runlist.c ntfs_rl_mm(dst, marker, loc, dsize - loc); dst 322 fs/ntfs/runlist.c ntfs_rl_mc(dst, loc + disc, src, left, ssize - left); dst 325 fs/ntfs/runlist.c dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; dst 327 fs/ntfs/runlist.c if (dst[marker].lcn == LCN_HOLE || dst[marker].lcn == LCN_RL_NOT_MAPPED) dst 328 fs/ntfs/runlist.c dst[marker].length = dst[marker + 1].vcn - dst[marker].vcn; dst 333 fs/ntfs/runlist.c dst[loc].vcn = dst[loc - 1].vcn + dst[loc - 1].length; dst 334 fs/ntfs/runlist.c dst[loc].length = dst[loc + 1].vcn - dst[loc].vcn; dst 336 fs/ntfs/runlist.c dst[loc].vcn = 0; dst 337 fs/ntfs/runlist.c dst[loc].length = dst[loc + 1].vcn; dst 339 fs/ntfs/runlist.c dst[loc].lcn = LCN_RL_NOT_MAPPED; dst 341 fs/ntfs/runlist.c return dst; dst 367 fs/ntfs/runlist.c static inline runlist_element *ntfs_rl_replace(runlist_element *dst, dst 376 fs/ntfs/runlist.c BUG_ON(!dst); dst 381 fs/ntfs/runlist.c right = ntfs_are_rl_mergeable(src + ssize - 1, dst + loc + 1); dst 383 fs/ntfs/runlist.c left = ntfs_are_rl_mergeable(dst + loc - 1, src); dst 390 fs/ntfs/runlist.c dst = ntfs_rl_realloc(dst, dsize, dsize + delta); dst 391 fs/ntfs/runlist.c if (IS_ERR(dst)) dst 392 fs/ntfs/runlist.c return dst; dst 401 fs/ntfs/runlist.c __ntfs_rl_merge(src + ssize - 1, dst + loc + 1); dst 403 fs/ntfs/runlist.c __ntfs_rl_merge(dst + loc - 1, src); dst 423 fs/ntfs/runlist.c ntfs_rl_mm(dst, marker, tail, dsize - tail); dst 424 fs/ntfs/runlist.c ntfs_rl_mc(dst, loc, src, left, ssize - left); dst 427 fs/ntfs/runlist.c if (dsize - tail > 0 && dst[marker].lcn == LCN_ENOENT) dst 428 fs/ntfs/runlist.c dst[marker].vcn = dst[marker - 1].vcn + dst[marker - 1].length; dst 429 fs/ntfs/runlist.c return dst; dst 456 fs/ntfs/runlist.c static inline runlist_element *ntfs_rl_split(runlist_element *dst, int dsize, dst 459 fs/ntfs/runlist.c BUG_ON(!dst); dst 463 fs/ntfs/runlist.c dst = ntfs_rl_realloc(dst, dsize, dsize + ssize + 1); dst 464 fs/ntfs/runlist.c if (IS_ERR(dst)) dst 465 fs/ntfs/runlist.c return dst; dst 472 fs/ntfs/runlist.c ntfs_rl_mm(dst, loc + 1 + ssize, loc, dsize - loc); dst 473 fs/ntfs/runlist.c ntfs_rl_mc(dst, loc + 1, src, 0, ssize); dst 476 fs/ntfs/runlist.c dst[loc].length = dst[loc+1].vcn - dst[loc].vcn; dst 477 fs/ntfs/runlist.c dst[loc+ssize+1].vcn = dst[loc+ssize].vcn + dst[loc+ssize].length; dst 478 fs/ntfs/runlist.c dst[loc+ssize+1].length = dst[loc+ssize+2].vcn - dst[loc+ssize+1].vcn; dst 480 fs/ntfs/runlist.c return dst; dst 1238 fs/ntfs/runlist.c static inline int ntfs_write_significant_bytes(s8 *dst, const s8 *dst_max, dst 1247 fs/ntfs/runlist.c if (unlikely(dst > dst_max)) dst 1249 fs/ntfs/runlist.c *dst++ = l & 0xffll; dst 1256 fs/ntfs/runlist.c if (unlikely(dst > dst_max)) dst 1259 fs/ntfs/runlist.c *dst = (s8)-1; dst 1261 fs/ntfs/runlist.c if (unlikely(dst > dst_max)) dst 1264 fs/ntfs/runlist.c *dst = (s8)0; dst 1309 fs/ntfs/runlist.c int ntfs_mapping_pairs_build(const ntfs_volume *vol, s8 *dst, dst 1329 fs/ntfs/runlist.c *dst = 0; dst 1342 fs/ntfs/runlist.c dst_max = dst + dst_len - 1; dst 1363 fs/ntfs/runlist.c len_len = ntfs_write_significant_bytes(dst + 1, dst_max, dst 1381 fs/ntfs/runlist.c lcn_len = ntfs_write_significant_bytes(dst + 1 + dst 1387 fs/ntfs/runlist.c dst_next = dst + len_len + lcn_len + 1; dst 1391 fs/ntfs/runlist.c *dst = lcn_len << 4 | len_len; dst 1393 fs/ntfs/runlist.c dst = dst_next; dst 1414 fs/ntfs/runlist.c len_len = ntfs_write_significant_bytes(dst + 1, dst_max, dst 1429 fs/ntfs/runlist.c lcn_len = ntfs_write_significant_bytes(dst + 1 + dst 1436 fs/ntfs/runlist.c dst_next = dst + len_len + lcn_len + 1; dst 1440 fs/ntfs/runlist.c *dst = lcn_len << 4 | len_len; dst 1442 fs/ntfs/runlist.c dst = dst_next; dst 1451 fs/ntfs/runlist.c *dst = 0; dst 76 fs/ntfs/runlist.h extern int ntfs_mapping_pairs_build(const ntfs_volume *vol, s8 *dst, dst 220 fs/proc/vmcore.c static int vmcoredd_copy_dumps(void *dst, u64 start, size_t size, int userbuf) dst 233 fs/proc/vmcore.c if (copy_to(dst, buf, tsz, userbuf)) { dst 240 fs/proc/vmcore.c dst += tsz; dst 255 fs/proc/vmcore.c static int vmcoredd_mmap_dumps(struct vm_area_struct *vma, unsigned long dst, dst 269 fs/proc/vmcore.c if (remap_vmalloc_range_partial(vma, dst, buf, 0, dst 277 fs/proc/vmcore.c dst += tsz; dst 413 fs/pstore/platform.c char *dst; dst 428 fs/pstore/platform.c dst = big_oops_buf; dst 431 fs/pstore/platform.c dst = psinfo->buf; dst 436 fs/pstore/platform.c header_size = snprintf(dst, dst_size, "%s#%d Part%u\n", why, dst 441 fs/pstore/platform.c if (!kmsg_dump_get_buffer(dumper, true, dst + header_size, dst 446 fs/pstore/platform.c zipped_len = pstore_compress(dst, psinfo->buf, dst 185 fs/quota/quota.c static void copy_to_if_dqblk(struct if_dqblk *dst, struct qc_dqblk *src) dst 187 fs/quota/quota.c memset(dst, 0, sizeof(*dst)); dst 188 fs/quota/quota.c dst->dqb_bhardlimit = stoqb(src->d_spc_hardlimit); dst 189 fs/quota/quota.c dst->dqb_bsoftlimit = stoqb(src->d_spc_softlimit); dst 190 fs/quota/quota.c dst->dqb_curspace = src->d_space; dst 191 fs/quota/quota.c dst->dqb_ihardlimit = src->d_ino_hardlimit; dst 192 fs/quota/quota.c dst->dqb_isoftlimit = src->d_ino_softlimit; dst 193 fs/quota/quota.c dst->dqb_curinodes = src->d_ino_count; dst 194 fs/quota/quota.c dst->dqb_btime = src->d_spc_timer; dst 195 fs/quota/quota.c dst->dqb_itime = src->d_ino_timer; dst 196 fs/quota/quota.c dst->dqb_valid = QIF_ALL; dst 249 fs/quota/quota.c static void copy_from_if_dqblk(struct qc_dqblk *dst, struct if_dqblk *src) dst 251 fs/quota/quota.c dst->d_spc_hardlimit = qbtos(src->dqb_bhardlimit); dst 252 fs/quota/quota.c dst->d_spc_softlimit = qbtos(src->dqb_bsoftlimit); dst 253 fs/quota/quota.c dst->d_space = src->dqb_curspace; dst 254 fs/quota/quota.c dst->d_ino_hardlimit = src->dqb_ihardlimit; dst 255 fs/quota/quota.c dst->d_ino_softlimit = src->dqb_isoftlimit; dst 256 fs/quota/quota.c dst->d_ino_count = src->dqb_curinodes; dst 257 fs/quota/quota.c dst->d_spc_timer = src->dqb_btime; dst 258 fs/quota/quota.c dst->d_ino_timer = src->dqb_itime; dst 260 fs/quota/quota.c dst->d_fieldmask = 0; dst 262 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_SOFT | QC_SPC_HARD; dst 264 fs/quota/quota.c dst->d_fieldmask |= QC_SPACE; dst 266 fs/quota/quota.c dst->d_fieldmask |= QC_INO_SOFT | QC_INO_HARD; dst 268 fs/quota/quota.c dst->d_fieldmask |= QC_INO_COUNT; dst 270 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_TIMER; dst 272 fs/quota/quota.c dst->d_fieldmask |= QC_INO_TIMER; dst 486 fs/quota/quota.c static void copy_from_xfs_dqblk(struct qc_dqblk *dst, struct fs_disk_quota *src) dst 488 fs/quota/quota.c dst->d_spc_hardlimit = quota_bbtob(src->d_blk_hardlimit); dst 489 fs/quota/quota.c dst->d_spc_softlimit = quota_bbtob(src->d_blk_softlimit); dst 490 fs/quota/quota.c dst->d_ino_hardlimit = src->d_ino_hardlimit; dst 491 fs/quota/quota.c dst->d_ino_softlimit = src->d_ino_softlimit; dst 492 fs/quota/quota.c dst->d_space = quota_bbtob(src->d_bcount); dst 493 fs/quota/quota.c dst->d_ino_count = src->d_icount; dst 494 fs/quota/quota.c dst->d_ino_timer = src->d_itimer; dst 495 fs/quota/quota.c dst->d_spc_timer = src->d_btimer; dst 496 fs/quota/quota.c dst->d_ino_warns = src->d_iwarns; dst 497 fs/quota/quota.c dst->d_spc_warns = src->d_bwarns; dst 498 fs/quota/quota.c dst->d_rt_spc_hardlimit = quota_bbtob(src->d_rtb_hardlimit); dst 499 fs/quota/quota.c dst->d_rt_spc_softlimit = quota_bbtob(src->d_rtb_softlimit); dst 500 fs/quota/quota.c dst->d_rt_space = quota_bbtob(src->d_rtbcount); dst 501 fs/quota/quota.c dst->d_rt_spc_timer = src->d_rtbtimer; dst 502 fs/quota/quota.c dst->d_rt_spc_warns = src->d_rtbwarns; dst 503 fs/quota/quota.c dst->d_fieldmask = 0; dst 505 fs/quota/quota.c dst->d_fieldmask |= QC_INO_SOFT; dst 507 fs/quota/quota.c dst->d_fieldmask |= QC_INO_HARD; dst 509 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_SOFT; dst 511 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_HARD; dst 513 fs/quota/quota.c dst->d_fieldmask |= QC_RT_SPC_SOFT; dst 515 fs/quota/quota.c dst->d_fieldmask |= QC_RT_SPC_HARD; dst 517 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_TIMER; dst 519 fs/quota/quota.c dst->d_fieldmask |= QC_INO_TIMER; dst 521 fs/quota/quota.c dst->d_fieldmask |= QC_RT_SPC_TIMER; dst 523 fs/quota/quota.c dst->d_fieldmask |= QC_SPC_WARNS; dst 525 fs/quota/quota.c dst->d_fieldmask |= QC_INO_WARNS; dst 527 fs/quota/quota.c dst->d_fieldmask |= QC_RT_SPC_WARNS; dst 529 fs/quota/quota.c dst->d_fieldmask |= QC_SPACE; dst 531 fs/quota/quota.c dst->d_fieldmask |= QC_INO_COUNT; dst 533 fs/quota/quota.c dst->d_fieldmask |= QC_RT_SPACE; dst 536 fs/quota/quota.c static void copy_qcinfo_from_xfs_dqblk(struct qc_info *dst, dst 539 fs/quota/quota.c memset(dst, 0, sizeof(*dst)); dst 540 fs/quota/quota.c dst->i_spc_timelimit = src->d_btimer; dst 541 fs/quota/quota.c dst->i_ino_timelimit = src->d_itimer; dst 542 fs/quota/quota.c dst->i_rt_spc_timelimit = src->d_rtbtimer; dst 543 fs/quota/quota.c dst->i_ino_warnlimit = src->d_iwarns; dst 544 fs/quota/quota.c dst->i_spc_warnlimit = src->d_bwarns; dst 545 fs/quota/quota.c dst->i_rt_spc_warnlimit = src->d_rtbwarns; dst 547 fs/quota/quota.c dst->i_fieldmask |= QC_SPC_WARNS; dst 549 fs/quota/quota.c dst->i_fieldmask |= QC_INO_WARNS; dst 551 fs/quota/quota.c dst->i_fieldmask |= QC_RT_SPC_WARNS; dst 553 fs/quota/quota.c dst->i_fieldmask |= QC_SPC_TIMER; dst 555 fs/quota/quota.c dst->i_fieldmask |= QC_INO_TIMER; dst 557 fs/quota/quota.c dst->i_fieldmask |= QC_RT_SPC_TIMER; dst 593 fs/quota/quota.c static void copy_to_xfs_dqblk(struct fs_disk_quota *dst, struct qc_dqblk *src, dst 596 fs/quota/quota.c memset(dst, 0, sizeof(*dst)); dst 597 fs/quota/quota.c dst->d_version = FS_DQUOT_VERSION; dst 598 fs/quota/quota.c dst->d_id = id; dst 600 fs/quota/quota.c dst->d_flags = FS_USER_QUOTA; dst 602 fs/quota/quota.c dst->d_flags = FS_PROJ_QUOTA; dst 604 fs/quota/quota.c dst->d_flags = FS_GROUP_QUOTA; dst 605 fs/quota/quota.c dst->d_blk_hardlimit = quota_btobb(src->d_spc_hardlimit); dst 606 fs/quota/quota.c dst->d_blk_softlimit = quota_btobb(src->d_spc_softlimit); dst 607 fs/quota/quota.c dst->d_ino_hardlimit = src->d_ino_hardlimit; dst 608 fs/quota/quota.c dst->d_ino_softlimit = src->d_ino_softlimit; dst 609 fs/quota/quota.c dst->d_bcount = quota_btobb(src->d_space); dst 610 fs/quota/quota.c dst->d_icount = src->d_ino_count; dst 611 fs/quota/quota.c dst->d_itimer = src->d_ino_timer; dst 612 fs/quota/quota.c dst->d_btimer = src->d_spc_timer; dst 613 fs/quota/quota.c dst->d_iwarns = src->d_ino_warns; dst 614 fs/quota/quota.c dst->d_bwarns = src->d_spc_warns; dst 615 fs/quota/quota.c dst->d_rtb_hardlimit = quota_btobb(src->d_rt_spc_hardlimit); dst 616 fs/quota/quota.c dst->d_rtb_softlimit = quota_btobb(src->d_rt_spc_softlimit); dst 617 fs/quota/quota.c dst->d_rtbcount = quota_btobb(src->d_rt_space); dst 618 fs/quota/quota.c dst->d_rtbtimer = src->d_rt_spc_timer; dst 619 fs/quota/quota.c dst->d_rtbwarns = src->d_rt_spc_warns; dst 32 fs/readdir.c char __user *dst = (_dst); \ dst 35 fs/readdir.c unsafe_put_user(0, dst+len, label); \ dst 36 fs/readdir.c unsafe_copy_to_user(dst, src, len, label); \ dst 81 fs/squashfs/zstd_wrapper.c out_buf.dst = squashfs_first_page(output); dst 95 fs/squashfs/zstd_wrapper.c out_buf.dst = squashfs_next_page(output); dst 96 fs/squashfs/zstd_wrapper.c if (out_buf.dst == NULL) { dst 11 fs/stack.c void fsstack_copy_inode_size(struct inode *dst, struct inode *src) dst 54 fs/stack.c spin_lock(&dst->i_lock); dst 55 fs/stack.c i_size_write(dst, i_size); dst 56 fs/stack.c dst->i_blocks = i_blocks; dst 58 fs/stack.c spin_unlock(&dst->i_lock); dst 1721 fs/userfaultfd.c ret = validate_range(ctx->mm, &uffdio_copy.dst, uffdio_copy.len); dst 1735 fs/userfaultfd.c ret = mcopy_atomic(ctx->mm, uffdio_copy.dst, uffdio_copy.src, dst 1749 fs/userfaultfd.c range.start = uffdio_copy.dst; dst 268 fs/xfs/libxfs/xfs_attr_remote.c uint8_t **dst) dst 294 fs/xfs/libxfs/xfs_attr_remote.c memcpy(*dst, src + hdr_size, byte_cnt); dst 303 fs/xfs/libxfs/xfs_attr_remote.c *dst += byte_cnt; dst 318 fs/xfs/libxfs/xfs_attr_remote.c char *dst = bp->b_addr; dst 330 fs/xfs/libxfs/xfs_attr_remote.c hdr_size = xfs_attr3_rmt_hdr_set(mp, dst, ino, *offset, dst 333 fs/xfs/libxfs/xfs_attr_remote.c memcpy(dst + hdr_size, *src, byte_cnt); dst 342 fs/xfs/libxfs/xfs_attr_remote.c memset(dst + hdr_size + byte_cnt, 0, dst 348 fs/xfs/libxfs/xfs_attr_remote.c dst += blksize; dst 372 fs/xfs/libxfs/xfs_attr_remote.c uint8_t *dst = args->value; dst 412 fs/xfs/libxfs/xfs_attr_remote.c &dst); dst 183 fs/xfs/libxfs/xfs_bmap_btree.c struct xfs_btree_cur *dst) dst 185 fs/xfs/libxfs/xfs_bmap_btree.c ASSERT((dst->bc_tp->t_firstblock != NULLFSBLOCK) || dst 186 fs/xfs/libxfs/xfs_bmap_btree.c (dst->bc_private.b.ip->i_d.di_flags & XFS_DIFLAG_REALTIME)); dst 188 fs/xfs/libxfs/xfs_bmap_btree.c dst->bc_private.b.allocated += src->bc_private.b.allocated; dst 189 fs/xfs/libxfs/xfs_bmap_btree.c dst->bc_tp->t_firstblock = src->bc_tp->t_firstblock; dst 103 fs/xfs/libxfs/xfs_btree.h struct xfs_btree_cur *dst); dst 154 fs/xfs/libxfs/xfs_dir2_sf.c xfs_dir2_sf_hdr_t *dst; /* temporary data buffer */ dst 167 fs/xfs/libxfs/xfs_dir2_sf.c dst = kmem_alloc(mp->m_sb.sb_inodesize, 0); dst 173 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dst; dst 243 fs/xfs/libxfs/xfs_dir2_sf.c xfs_init_local_fork(dp, XFS_DATA_FORK, dst, size); dst 251 fs/xfs/libxfs/xfs_dir2_sf.c kmem_free(dst); dst 87 fs/xfs/xfs_icache.h struct xfs_eofblocks *dst) dst 99 fs/xfs/xfs_icache.h dst->eof_flags = src->eof_flags; dst 100 fs/xfs/xfs_icache.h dst->eof_prid = src->eof_prid; dst 101 fs/xfs/xfs_icache.h dst->eof_min_file_size = src->eof_min_file_size; dst 103 fs/xfs/xfs_icache.h dst->eof_uid = INVALID_UID; dst 105 fs/xfs/xfs_icache.h dst->eof_uid = make_kuid(current_user_ns(), src->eof_uid); dst 106 fs/xfs/xfs_icache.h if (!uid_valid(dst->eof_uid)) dst 110 fs/xfs/xfs_icache.h dst->eof_gid = INVALID_GID; dst 112 fs/xfs/xfs_icache.h dst->eof_gid = make_kgid(current_user_ns(), src->eof_gid); dst 113 fs/xfs/xfs_icache.h if (!gid_valid(dst->eof_gid)) dst 615 fs/xfs/xfs_qm_syscalls.c struct qc_dqblk *dst) dst 617 fs/xfs/xfs_qm_syscalls.c memset(dst, 0, sizeof(*dst)); dst 618 fs/xfs/xfs_qm_syscalls.c dst->d_spc_hardlimit = dst 620 fs/xfs/xfs_qm_syscalls.c dst->d_spc_softlimit = dst 622 fs/xfs/xfs_qm_syscalls.c dst->d_ino_hardlimit = be64_to_cpu(dqp->q_core.d_ino_hardlimit); dst 623 fs/xfs/xfs_qm_syscalls.c dst->d_ino_softlimit = be64_to_cpu(dqp->q_core.d_ino_softlimit); dst 624 fs/xfs/xfs_qm_syscalls.c dst->d_space = XFS_FSB_TO_B(mp, dqp->q_res_bcount); dst 625 fs/xfs/xfs_qm_syscalls.c dst->d_ino_count = dqp->q_res_icount; dst 626 fs/xfs/xfs_qm_syscalls.c dst->d_spc_timer = be32_to_cpu(dqp->q_core.d_btimer); dst 627 fs/xfs/xfs_qm_syscalls.c dst->d_ino_timer = be32_to_cpu(dqp->q_core.d_itimer); dst 628 fs/xfs/xfs_qm_syscalls.c dst->d_ino_warns = be16_to_cpu(dqp->q_core.d_iwarns); dst 629 fs/xfs/xfs_qm_syscalls.c dst->d_spc_warns = be16_to_cpu(dqp->q_core.d_bwarns); dst 630 fs/xfs/xfs_qm_syscalls.c dst->d_rt_spc_hardlimit = dst 632 fs/xfs/xfs_qm_syscalls.c dst->d_rt_spc_softlimit = dst 634 fs/xfs/xfs_qm_syscalls.c dst->d_rt_space = XFS_FSB_TO_B(mp, dqp->q_res_rtbcount); dst 635 fs/xfs/xfs_qm_syscalls.c dst->d_rt_spc_timer = be32_to_cpu(dqp->q_core.d_rtbtimer); dst 636 fs/xfs/xfs_qm_syscalls.c dst->d_rt_spc_warns = be16_to_cpu(dqp->q_core.d_rtbwarns); dst 649 fs/xfs/xfs_qm_syscalls.c dst->d_spc_timer = 0; dst 650 fs/xfs/xfs_qm_syscalls.c dst->d_ino_timer = 0; dst 651 fs/xfs/xfs_qm_syscalls.c dst->d_rt_spc_timer = 0; dst 659 fs/xfs/xfs_qm_syscalls.c if ((dst->d_space > dst->d_spc_softlimit) && dst 660 fs/xfs/xfs_qm_syscalls.c (dst->d_spc_softlimit > 0)) { dst 661 fs/xfs/xfs_qm_syscalls.c ASSERT(dst->d_spc_timer != 0); dst 663 fs/xfs/xfs_qm_syscalls.c if ((dst->d_ino_count > dst->d_ino_softlimit) && dst 664 fs/xfs/xfs_qm_syscalls.c (dst->d_ino_softlimit > 0)) { dst 665 fs/xfs/xfs_qm_syscalls.c ASSERT(dst->d_ino_timer != 0); dst 677 fs/xfs/xfs_qm_syscalls.c struct qc_dqblk *dst) dst 699 fs/xfs/xfs_qm_syscalls.c xfs_qm_scall_getquota_fill_qc(mp, type, dqp, dst); dst 715 fs/xfs/xfs_qm_syscalls.c struct qc_dqblk *dst) dst 727 fs/xfs/xfs_qm_syscalls.c xfs_qm_scall_getquota_fill_qc(mp, type, dqp, dst); dst 133 fs/xfs/xfs_trans_priv.h xfs_lsn_t *dst, dst 138 fs/xfs/xfs_trans_priv.h *dst = *src; dst 145 fs/xfs/xfs_trans_priv.h xfs_lsn_t *dst, dst 149 fs/xfs/xfs_trans_priv.h *dst = *src; dst 100 include/asm-generic/cacheflush.h #define copy_to_user_page(vma, page, vaddr, dst, src, len) \ dst 102 include/asm-generic/cacheflush.h memcpy(dst, src, len); \ dst 108 include/asm-generic/cacheflush.h #define copy_from_user_page(vma, page, vaddr, dst, src, len) \ dst 109 include/asm-generic/cacheflush.h memcpy(dst, src, len) dst 26 include/asm-generic/checksum.h extern __wsum csum_partial_copy(const void *src, void *dst, int len, __wsum sum); dst 34 include/asm-generic/checksum.h extern __wsum csum_partial_copy_from_user(const void __user *src, void *dst, dst 38 include/asm-generic/checksum.h #define csum_partial_copy_nocheck(src, dst, len, sum) \ dst 39 include/asm-generic/checksum.h csum_partial_copy((src), (dst), (len), (sum)) dst 220 include/asm-generic/uaccess.h __strncpy_from_user(char *dst, const char __user *src, long count) dst 223 include/asm-generic/uaccess.h strncpy(dst, (const char __force *)src, count); dst 224 include/asm-generic/uaccess.h for (tmp = dst; *tmp && count > 0; tmp++, count--) dst 226 include/asm-generic/uaccess.h return (tmp - dst); dst 231 include/asm-generic/uaccess.h strncpy_from_user(char *dst, const char __user *src, long count) dst 235 include/asm-generic/uaccess.h return __strncpy_from_user(dst, src, count); dst 29 include/crypto/acompress.h struct scatterlist *dst; dst 50 include/crypto/acompress.h void (*dst_free)(struct scatterlist *dst); dst 79 include/crypto/acompress.h void (*dst_free)(struct scatterlist *dst); dst 219 include/crypto/acompress.h struct scatterlist *dst, dst 224 include/crypto/acompress.h req->dst = dst; dst 228 include/crypto/acompress.h if (!req->dst) dst 90 include/crypto/aead.h struct scatterlist *dst; dst 482 include/crypto/aead.h struct scatterlist *dst, dst 486 include/crypto/aead.h req->dst = dst; dst 36 include/crypto/akcipher.h struct scatterlist *dst; dst 248 include/crypto/akcipher.h struct scatterlist *dst, dst 253 include/crypto/akcipher.h req->dst = dst; dst 99 include/crypto/algapi.h } src, dst; dst 122 include/crypto/algapi.h } src, dst; dst 198 include/crypto/algapi.h void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); dst 200 include/crypto/algapi.h static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) dst 205 include/crypto/algapi.h unsigned long *d = (unsigned long *)dst; dst 213 include/crypto/algapi.h __crypto_xor(dst, dst, src, size); dst 217 include/crypto/algapi.h static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2, dst 223 include/crypto/algapi.h unsigned long *d = (unsigned long *)dst; dst 232 include/crypto/algapi.h __crypto_xor(dst, src1, src2, size); dst 323 include/crypto/algapi.h struct scatterlist *dst, dst 328 include/crypto/algapi.h walk->out.sg = dst; dst 333 include/crypto/algapi.h struct scatterlist *dst, dst 338 include/crypto/algapi.h walk->out.sg = dst; dst 21 include/crypto/cast5.h void __cast5_encrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src); dst 22 include/crypto/cast5.h void __cast5_decrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src); dst 22 include/crypto/cast6.h void __cast6_encrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src); dst 23 include/crypto/cast6.h void __cast6_decrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src); dst 22 include/crypto/cbc.h u8 *dst = walk->dst.virt.addr; dst 27 include/crypto/cbc.h fn(tfm, iv, dst); dst 28 include/crypto/cbc.h memcpy(iv, dst, bsize); dst 31 include/crypto/cbc.h dst += bsize; dst 70 include/crypto/cbc.h if (walk.src.virt.addr == walk.dst.virt.addr) dst 87 include/crypto/cbc.h u8 *dst = walk->dst.virt.addr; dst 91 include/crypto/cbc.h fn(tfm, src, dst); dst 92 include/crypto/cbc.h crypto_xor(dst, iv, bsize); dst 96 include/crypto/cbc.h dst += bsize; dst 135 include/crypto/cbc.h if (walk->src.virt.addr == walk->dst.virt.addr) dst 37 include/crypto/ctr.h u8 *dst = walk.dst.virt.addr; dst 52 include/crypto/ctr.h crypto_xor_cpy(dst, src, buf, bsize); dst 55 include/crypto/ctr.h dst += bsize; dst 27 include/crypto/des.h void des_encrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src); dst 28 include/crypto/des.h void des_decrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src); dst 30 include/crypto/des.h void des3_ede_encrypt(const struct des3_ede_ctx *dctx, u8 *dst, const u8 *src); dst 31 include/crypto/des.h void des3_ede_decrypt(const struct des3_ede_ctx *dctx, u8 *dst, const u8 *src); dst 226 include/crypto/if_alg.h void af_alg_pull_tsgl(struct sock *sk, size_t used, struct scatterlist *dst, dst 32 include/crypto/internal/scompress.h unsigned int slen, u8 *dst, unsigned int *dlen, dst 35 include/crypto/internal/scompress.h unsigned int slen, u8 *dst, unsigned int *dlen, dst 78 include/crypto/internal/scompress.h u8 *dst, unsigned int *dlen, void *ctx) dst 80 include/crypto/internal/scompress.h return crypto_scomp_alg(tfm)->compress(tfm, src, slen, dst, dlen, ctx); dst 85 include/crypto/internal/scompress.h u8 *dst, unsigned int *dlen, dst 88 include/crypto/internal/scompress.h return crypto_scomp_alg(tfm)->decompress(tfm, src, slen, dst, dlen, dst 45 include/crypto/internal/skcipher.h } src, dst; dst 30 include/crypto/kpp.h struct scatterlist *dst; dst 244 include/crypto/kpp.h req->dst = output; dst 70 include/crypto/nhpoly1305.h int crypto_nhpoly1305_final(struct shash_desc *desc, u8 *dst); dst 71 include/crypto/nhpoly1305.h int crypto_nhpoly1305_final_helper(struct shash_desc *desc, u8 *dst, dst 54 include/crypto/poly1305.h void poly1305_core_emit(const struct poly1305_state *state, void *dst); dst 62 include/crypto/poly1305.h int crypto_poly1305_final(struct shash_desc *desc, u8 *dst); dst 44 include/crypto/rng.h u8 *dst, unsigned int dlen); dst 136 include/crypto/rng.h u8 *dst, unsigned int dlen) dst 142 include/crypto/rng.h ret = crypto_rng_alg(tfm)->generate(tfm, src, slen, dst, dlen); dst 111 include/crypto/scatterwalk.h struct scatterlist *scatterwalk_ffwd(struct scatterlist dst[2], dst 25 include/crypto/serpent.h void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src); dst 26 include/crypto/serpent.h void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src); dst 30 include/crypto/skcipher.h struct scatterlist *dst; dst 609 include/crypto/skcipher.h struct scatterlist *src, struct scatterlist *dst, dst 613 include/crypto/skcipher.h req->dst = dst; dst 395 include/drm/drm_dp_mst_helper.h struct drm_dp_mst_branch *dst; dst 12 include/drm/drm_format_helper.h void drm_fb_memcpy(void *dst, void *vaddr, struct drm_framebuffer *fb, dst 14 include/drm/drm_format_helper.h void drm_fb_memcpy_dstclip(void __iomem *dst, void *vaddr, dst 17 include/drm/drm_format_helper.h void drm_fb_swab16(u16 *dst, void *vaddr, struct drm_framebuffer *fb, dst 19 include/drm/drm_format_helper.h void drm_fb_xrgb8888_to_rgb565(void *dst, void *vaddr, dst 22 include/drm/drm_format_helper.h void drm_fb_xrgb8888_to_rgb565_dstclip(void __iomem *dst, unsigned int dst_pitch, dst 25 include/drm/drm_format_helper.h void drm_fb_xrgb8888_to_rgb888_dstclip(void __iomem *dst, unsigned int dst_pitch, dst 28 include/drm/drm_format_helper.h void drm_fb_xrgb8888_to_gray8(u8 *dst, void *vaddr, struct drm_framebuffer *fb, dst 162 include/drm/drm_mipi_dbi.h int mipi_dbi_buf_copy(void *dst, struct drm_framebuffer *fb, dst 510 include/drm/drm_modes.h void drm_mode_copy(struct drm_display_mode *dst, dst 188 include/drm/drm_plane.h struct drm_rect src, dst; dst 177 include/drm/drm_rect.h bool drm_rect_clip_scaled(struct drm_rect *src, struct drm_rect *dst, dst 180 include/drm/drm_rect.h const struct drm_rect *dst, dst 183 include/drm/drm_rect.h const struct drm_rect *dst, dst 100 include/linux/async_tx.h #define async_tx_find_channel(dep, type, dst, dst_count, src, src_count, len) \ dst 119 include/linux/async_tx.h enum dma_transaction_type tx_type, struct page **dst, dst 461 include/linux/bio.h extern void bio_copy_data_iter(struct bio *dst, struct bvec_iter *dst_iter, dst 463 include/linux/bio.h extern void bio_copy_data(struct bio *dst, struct bio *src); dst 464 include/linux/bio.h extern void bio_list_copy_data(struct bio *dst, struct bio *src); dst 494 include/linux/bio.h #define bio_copy_dev(dst, src) \ dst 496 include/linux/bio.h (dst)->bi_disk = (src)->bi_disk; \ dst 497 include/linux/bio.h (dst)->bi_partno = (src)->bi_partno; \ dst 498 include/linux/bio.h bio_clone_blkg_association(dst, src); \ dst 516 include/linux/bio.h void bio_clone_blkg_association(struct bio *dst, struct bio *src); dst 523 include/linux/bio.h static inline void bio_clone_blkg_association(struct bio *dst, dst 127 include/linux/bitmap.h extern void __bitmap_complement(unsigned long *dst, const unsigned long *src, dst 129 include/linux/bitmap.h extern void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, dst 131 include/linux/bitmap.h extern void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, dst 133 include/linux/bitmap.h extern int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, dst 135 include/linux/bitmap.h extern void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, dst 137 include/linux/bitmap.h extern void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, dst 139 include/linux/bitmap.h extern int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, dst 180 include/linux/bitmap.h unsigned long *dst, int nbits); dst 182 include/linux/bitmap.h unsigned long *dst, int nbits); dst 186 include/linux/bitmap.h unsigned long *dst, int nbits); dst 187 include/linux/bitmap.h extern void bitmap_remap(unsigned long *dst, const unsigned long *src, dst 191 include/linux/bitmap.h extern void bitmap_onto(unsigned long *dst, const unsigned long *orig, dst 193 include/linux/bitmap.h extern void bitmap_fold(unsigned long *dst, const unsigned long *orig, dst 200 include/linux/bitmap.h extern void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits); dst 219 include/linux/bitmap.h static inline void bitmap_zero(unsigned long *dst, unsigned int nbits) dst 222 include/linux/bitmap.h memset(dst, 0, len); dst 225 include/linux/bitmap.h static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) dst 228 include/linux/bitmap.h memset(dst, 0xff, len); dst 231 include/linux/bitmap.h static inline void bitmap_copy(unsigned long *dst, const unsigned long *src, dst 235 include/linux/bitmap.h memcpy(dst, src, len); dst 241 include/linux/bitmap.h static inline void bitmap_copy_clear_tail(unsigned long *dst, dst 244 include/linux/bitmap.h bitmap_copy(dst, src, nbits); dst 246 include/linux/bitmap.h dst[nbits / BITS_PER_LONG] &= BITMAP_LAST_WORD_MASK(nbits); dst 267 include/linux/bitmap.h static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, dst 271 include/linux/bitmap.h return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; dst 272 include/linux/bitmap.h return __bitmap_and(dst, src1, src2, nbits); dst 275 include/linux/bitmap.h static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, dst 279 include/linux/bitmap.h *dst = *src1 | *src2; dst 281 include/linux/bitmap.h __bitmap_or(dst, src1, src2, nbits); dst 284 include/linux/bitmap.h static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1, dst 288 include/linux/bitmap.h *dst = *src1 ^ *src2; dst 290 include/linux/bitmap.h __bitmap_xor(dst, src1, src2, nbits); dst 293 include/linux/bitmap.h static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1, dst 297 include/linux/bitmap.h return (*dst = *src1 & ~(*src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0; dst 298 include/linux/bitmap.h return __bitmap_andnot(dst, src1, src2, nbits); dst 301 include/linux/bitmap.h static inline void bitmap_complement(unsigned long *dst, const unsigned long *src, dst 305 include/linux/bitmap.h *dst = ~(*src); dst 307 include/linux/bitmap.h __bitmap_complement(dst, src, nbits); dst 417 include/linux/bitmap.h static inline void bitmap_shift_right(unsigned long *dst, const unsigned long *src, dst 421 include/linux/bitmap.h *dst = (*src & BITMAP_LAST_WORD_MASK(nbits)) >> shift; dst 423 include/linux/bitmap.h __bitmap_shift_right(dst, src, shift, nbits); dst 426 include/linux/bitmap.h static inline void bitmap_shift_left(unsigned long *dst, const unsigned long *src, dst 430 include/linux/bitmap.h *dst = (*src << shift) & BITMAP_LAST_WORD_MASK(nbits); dst 432 include/linux/bitmap.h __bitmap_shift_left(dst, src, shift, nbits); dst 484 include/linux/bitmap.h static inline void bitmap_from_u64(unsigned long *dst, u64 mask) dst 486 include/linux/bitmap.h dst[0] = mask & ULONG_MAX; dst 489 include/linux/bitmap.h dst[1] = mask >> 32; dst 113 include/linux/bpf.h static inline void check_and_init_map_lock(struct bpf_map *map, void *dst) dst 117 include/linux/bpf.h *(struct bpf_spin_lock *)(dst + map->spin_lock_off) = dst 122 include/linux/bpf.h static inline void copy_map_value(struct bpf_map *map, void *dst, void *src) dst 127 include/linux/bpf.h memcpy(dst, src, off); dst 128 include/linux/bpf.h memcpy(dst + off + sizeof(struct bpf_spin_lock), dst 132 include/linux/bpf.h memcpy(dst, src, map->value_size); dst 135 include/linux/bpf.h void copy_map_value_locked(struct bpf_map *map, void *dst, void *src, dst 323 include/linux/bpf.h struct bpf_insn *dst, dst 488 include/linux/bpf.h typedef unsigned long (*bpf_ctx_copy_t)(void *dst, const void *src, dst 492 include/linux/bpf.h struct bpf_insn *dst, dst 661 include/linux/bpf.h void bpf_map_charge_move(struct bpf_map_memory *dst, dst 701 include/linux/bpf.h static inline void bpf_long_memcpy(void *dst, const void *src, u32 size) dst 704 include/linux/bpf.h long *ldst = dst; dst 723 include/linux/bpf.h int dev_map_enqueue(struct bpf_dtab_netdev *dst, struct xdp_buff *xdp, dst 725 include/linux/bpf.h int dev_map_generic_redirect(struct bpf_dtab_netdev *dst, struct sk_buff *skb, dst 822 include/linux/bpf.h int dev_map_enqueue(struct bpf_dtab_netdev *dst, struct xdp_buff *xdp, dst 830 include/linux/bpf.h static inline int dev_map_generic_redirect(struct bpf_dtab_netdev *dst, dst 191 include/linux/byteorder/generic.h static inline void cpu_to_be32_array(__be32 *dst, const u32 *src, size_t len) dst 196 include/linux/byteorder/generic.h dst[i] = cpu_to_be32(src[i]); dst 199 include/linux/byteorder/generic.h static inline void be32_to_cpu_array(u32 *dst, const __be32 *src, size_t len) dst 204 include/linux/byteorder/generic.h dst[i] = be32_to_cpu(src[i]); dst 181 include/linux/ccp.h struct scatterlist *src, *dst; dst 241 include/linux/ccp.h struct scatterlist *src, *dst; dst 352 include/linux/ccp.h struct scatterlist *src, *dst; dst 380 include/linux/ccp.h struct scatterlist *src, *dst; dst 439 include/linux/ccp.h struct scatterlist *src, *dst; dst 906 include/linux/cgroup.h struct cgroup *dst); dst 846 include/linux/clk-provider.h static inline void __clk_hw_set_clk(struct clk_hw *dst, struct clk_hw *src) dst 848 include/linux/clk-provider.h dst->clk = src->clk; dst 849 include/linux/clk-provider.h dst->core = src->core; dst 174 include/linux/crypto.h struct scatterlist *dst; dst 258 include/linux/crypto.h struct scatterlist *dst, struct scatterlist *src, dst 261 include/linux/crypto.h struct scatterlist *dst, struct scatterlist *src, dst 321 include/linux/crypto.h void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); dst 322 include/linux/crypto.h void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); dst 338 include/linux/crypto.h unsigned int slen, u8 *dst, unsigned int *dlen); dst 340 include/linux/crypto.h unsigned int slen, u8 *dst, unsigned int *dlen); dst 734 include/linux/crypto.h int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, dst 736 include/linux/crypto.h int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst, dst 743 include/linux/crypto.h void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); dst 744 include/linux/crypto.h void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src); dst 750 include/linux/crypto.h u8 *dst, unsigned int *dlen); dst 753 include/linux/crypto.h u8 *dst, unsigned int *dlen); dst 1257 include/linux/crypto.h struct scatterlist *src, struct scatterlist *dst, dst 1261 include/linux/crypto.h req->dst = dst; dst 1485 include/linux/crypto.h struct scatterlist *dst, dst 1490 include/linux/crypto.h return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); dst 1513 include/linux/crypto.h struct scatterlist *dst, dst 1517 include/linux/crypto.h return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes); dst 1538 include/linux/crypto.h struct scatterlist *dst, dst 1543 include/linux/crypto.h return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); dst 1563 include/linux/crypto.h struct scatterlist *dst, dst 1567 include/linux/crypto.h return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes); dst 1596 include/linux/crypto.h u8 *dst, unsigned int len) dst 1598 include/linux/crypto.h memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len); dst 1761 include/linux/crypto.h u8 *dst, const u8 *src) dst 1764 include/linux/crypto.h dst, src); dst 1777 include/linux/crypto.h u8 *dst, const u8 *src) dst 1780 include/linux/crypto.h dst, src); dst 1836 include/linux/crypto.h u8 *dst, unsigned int *dlen) dst 1839 include/linux/crypto.h src, slen, dst, dlen); dst 1844 include/linux/crypto.h u8 *dst, unsigned int *dlen) dst 1847 include/linux/crypto.h src, slen, dst, dlen); dst 288 include/linux/dma-resv.h int dma_resv_copy_fences(struct dma_resv *dst, struct dma_resv *src); dst 754 include/linux/dmaengine.h struct dma_chan *chan, dma_addr_t dst, dma_addr_t src, dst 757 include/linux/dmaengine.h struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src, dst 763 include/linux/dmaengine.h struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, dst 791 include/linux/dmaengine.h struct dma_chan *chan, dma_addr_t dst, u64 data, dst 255 include/linux/dmar.h static inline void dmar_copy_shared_irte(struct irte *dst, struct irte *src) dst 257 include/linux/dmar.h dst->present = src->present; dst 258 include/linux/dmar.h dst->fpd = src->fpd; dst 259 include/linux/dmar.h dst->avail = src->avail; dst 260 include/linux/dmar.h dst->pst = src->pst; dst 261 include/linux/dmar.h dst->vector = src->vector; dst 262 include/linux/dmar.h dst->sid = src->sid; dst 263 include/linux/dmar.h dst->sq = src->sq; dst 264 include/linux/dmar.h dst->svt = src->svt; dst 276 include/linux/etherdevice.h static inline void ether_addr_copy(u8 *dst, const u8 *src) dst 279 include/linux/etherdevice.h *(u32 *)dst = *(const u32 *)src; dst 280 include/linux/etherdevice.h *(u16 *)(dst + 4) = *(const u16 *)(src + 4); dst 282 include/linux/etherdevice.h u16 *a = (u16 *)dst; dst 299 include/linux/etherdevice.h static inline void eth_hw_addr_inherit(struct net_device *dst, dst 302 include/linux/etherdevice.h dst->addr_assign_type = src->addr_assign_type; dst 303 include/linux/etherdevice.h ether_addr_copy(dst->dev_addr, src->dev_addr); dst 170 include/linux/ethtool.h void ethtool_intersect_link_masks(struct ethtool_link_ksettings *dst, dst 173 include/linux/ethtool.h void ethtool_convert_legacy_u32_to_link_mode(unsigned long *dst, dst 198 include/linux/fb.h void (*writeio)(struct fb_info *info, void __iomem *dst, void *src, unsigned int size); dst 199 include/linux/fb.h void (*readio) (struct fb_info *info, void *dst, void __iomem *src, unsigned int size); dst 617 include/linux/fb.h extern void fb_pad_unaligned_buffer(u8 *dst, u32 d_pitch, u8 *src, u32 idx, dst 619 include/linux/fb.h extern void fb_pad_aligned_buffer(u8 *dst, u32 d_pitch, u8 *src, u32 s_pitch, u32 height); dst 645 include/linux/fb.h static inline void __fb_pad_aligned_buffer(u8 *dst, u32 d_pitch, dst 655 include/linux/fb.h *dst++ = *src++; dst 656 include/linux/fb.h dst += d_pitch; dst 13 include/linux/fs_stack.h extern void fsstack_copy_inode_size(struct inode *dst, struct inode *src); dst 169 include/linux/hugetlb.h #define copy_hugetlb_page_range(src, dst, vma) ({ BUG(); 0; }) dst 477 include/linux/if_vlan.h static inline void __vlan_hwaccel_copy_tag(struct sk_buff *dst, const struct sk_buff *src) dst 479 include/linux/if_vlan.h dst->vlan_present = src->vlan_present; dst 480 include/linux/if_vlan.h dst->vlan_proto = src->vlan_proto; dst 481 include/linux/if_vlan.h dst->vlan_tci = src->vlan_tci; dst 53 include/linux/inet.h extern int in4_pton(const char *src, int srclen, u8 *dst, int delim, const char **end); dst 54 include/linux/inet.h extern int in6_pton(const char *src, int srclen, u8 *dst, int delim, const char **end); dst 183 include/linux/inetdevice.h __be32 inet_select_addr(const struct net_device *dev, __be32 dst, int scope); dst 184 include/linux/inetdevice.h __be32 inet_confirm_addr(struct net *net, struct in_device *in_dev, __be32 dst, dst 49 include/linux/interconnect-provider.h int (*set)(struct icc_node *src, struct icc_node *dst); dst 98 include/linux/interconnect-provider.h int icc_link_destroy(struct icc_node *src, struct icc_node *dst); dst 120 include/linux/interconnect-provider.h int icc_link_destroy(struct icc_node *src, struct icc_node *dst) dst 631 include/linux/kernel.h extern int __must_check hex2bin(u8 *dst, const char *src, size_t count); dst 632 include/linux/kernel.h extern char *bin2hex(char *dst, const void *src, size_t count); dst 8 include/linux/linkmode.h static inline void linkmode_zero(unsigned long *dst) dst 10 include/linux/linkmode.h bitmap_zero(dst, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 13 include/linux/linkmode.h static inline void linkmode_copy(unsigned long *dst, const unsigned long *src) dst 15 include/linux/linkmode.h bitmap_copy(dst, src, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 18 include/linux/linkmode.h static inline void linkmode_and(unsigned long *dst, const unsigned long *a, dst 21 include/linux/linkmode.h bitmap_and(dst, a, b, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 24 include/linux/linkmode.h static inline void linkmode_or(unsigned long *dst, const unsigned long *a, dst 27 include/linux/linkmode.h bitmap_or(dst, a, b, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 35 include/linux/linkmode.h static inline int linkmode_andnot(unsigned long *dst, const unsigned long *src1, dst 38 include/linux/linkmode.h return bitmap_andnot(dst, src1, src2, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 346 include/linux/lz4.h int LZ4_compress_HC(const char *src, char *dst, int srcSize, int dstCapacity, dst 416 include/linux/lz4.h char *dst, int srcSize, int maxDstSize); dst 508 include/linux/lz4.h char *dst, int srcSize, int maxDstSize, int acceleration); dst 25 include/linux/lzo.h unsigned char *dst, size_t *dst_len, void *wrkmem); dst 29 include/linux/lzo.h unsigned char *dst, size_t *dst_len, void *wrkmem); dst 33 include/linux/lzo.h unsigned char *dst, size_t *dst_len); dst 48 include/linux/mailbox/brcm-message.h struct scatterlist *dst; dst 130 include/linux/mempolicy.h int vma_dup_policy(struct vm_area_struct *src, struct vm_area_struct *dst); dst 249 include/linux/mempolicy.h vma_dup_policy(struct vm_area_struct *src, struct vm_area_struct *dst) dst 96 include/linux/mfd/max14577.h unsigned int min_ua, unsigned int max_ua, u8 *dst); dst 193 include/linux/migrate.h unsigned long *dst; dst 1467 include/linux/mm.h int copy_page_range(struct mm_struct *dst, struct mm_struct *src, dst 2854 include/linux/mm.h extern void copy_user_huge_page(struct page *dst, struct page *src, dst 142 include/linux/mmc/sdio_func.h extern int sdio_memcpy_fromio(struct sdio_func *func, void *dst, dst 144 include/linux/mmc/sdio_func.h extern int sdio_readsb(struct sdio_func *func, void *dst, dst 207 include/linux/net.h #define DECLARE_SOCKADDR(type, dst, src) \ dst 208 include/linux/net.h type dst = ({ __sockaddr_check_size(sizeof(*dst)); (type) src; }) dst 349 include/linux/netfilter.h int nf_route(struct net *net, struct dst_entry **dst, struct flowi *fl, dst 163 include/linux/netfilter/x_tables.h void (*compat_from_user)(void *dst, const void *src); dst 164 include/linux/netfilter/x_tables.h int (*compat_to_user)(void __user *dst, const void *src); dst 204 include/linux/netfilter/x_tables.h void (*compat_from_user)(void *dst, const void *src); dst 205 include/linux/netfilter/x_tables.h int (*compat_to_user)(void __user *dst, const void *src); dst 301 include/linux/netfilter/x_tables.h int xt_data_to_user(void __user *dst, const void *src, dst 21 include/linux/netfilter_bridge.h struct dst_entry *dst = skb_dst(skb); dst 23 include/linux/netfilter_bridge.h if (dst && (dst->flags & DST_FAKE_RTABLE)) dst 26 include/linux/netfilter_ipv4.h int nf_ip_route(struct net *net, struct dst_entry **dst, struct flowi *fl, dst 34 include/linux/netfilter_ipv4.h static inline int nf_ip_route(struct net *net, struct dst_entry **dst, dst 49 include/linux/netfilter_ipv6.h int (*route)(struct net *net, struct dst_entry **dst, struct flowi *fl, dst 97 include/linux/netfilter_ipv6.h int __nf_ip6_route(struct net *net, struct dst_entry **dst, dst 100 include/linux/netfilter_ipv6.h static inline int nf_ip6_route(struct net *net, struct dst_entry **dst, dst 107 include/linux/netfilter_ipv6.h return v6ops->route(net, dst, fl, strict); dst 112 include/linux/netfilter_ipv6.h return __nf_ip6_route(net, dst, fl, strict); dst 127 include/linux/nodemask.h #define node_set(node, dst) __node_set((node), &(dst)) dst 133 include/linux/nodemask.h #define node_clear(node, dst) __node_clear((node), &(dst)) dst 139 include/linux/nodemask.h #define nodes_setall(dst) __nodes_setall(&(dst), MAX_NUMNODES) dst 145 include/linux/nodemask.h #define nodes_clear(dst) __nodes_clear(&(dst), MAX_NUMNODES) dst 161 include/linux/nodemask.h #define nodes_and(dst, src1, src2) \ dst 162 include/linux/nodemask.h __nodes_and(&(dst), &(src1), &(src2), MAX_NUMNODES) dst 169 include/linux/nodemask.h #define nodes_or(dst, src1, src2) \ dst 170 include/linux/nodemask.h __nodes_or(&(dst), &(src1), &(src2), MAX_NUMNODES) dst 177 include/linux/nodemask.h #define nodes_xor(dst, src1, src2) \ dst 178 include/linux/nodemask.h __nodes_xor(&(dst), &(src1), &(src2), MAX_NUMNODES) dst 185 include/linux/nodemask.h #define nodes_andnot(dst, src1, src2) \ dst 186 include/linux/nodemask.h __nodes_andnot(&(dst), &(src1), &(src2), MAX_NUMNODES) dst 193 include/linux/nodemask.h #define nodes_complement(dst, src) \ dst 194 include/linux/nodemask.h __nodes_complement(&(dst), &(src), MAX_NUMNODES) dst 243 include/linux/nodemask.h #define nodes_shift_right(dst, src, n) \ dst 244 include/linux/nodemask.h __nodes_shift_right(&(dst), &(src), (n), MAX_NUMNODES) dst 251 include/linux/nodemask.h #define nodes_shift_left(dst, src, n) \ dst 252 include/linux/nodemask.h __nodes_shift_left(&(dst), &(src), (n), MAX_NUMNODES) dst 331 include/linux/nodemask.h #define nodemask_parse_user(ubuf, ulen, dst) \ dst 332 include/linux/nodemask.h __nodemask_parse_user((ubuf), (ulen), &(dst), MAX_NUMNODES) dst 339 include/linux/nodemask.h #define nodelist_parse(buf, dst) __nodelist_parse((buf), &(dst), MAX_NUMNODES) dst 353 include/linux/nodemask.h #define nodes_remap(dst, src, old, new) \ dst 354 include/linux/nodemask.h __nodes_remap(&(dst), &(src), &(old), &(new), MAX_NUMNODES) dst 361 include/linux/nodemask.h #define nodes_onto(dst, orig, relmap) \ dst 362 include/linux/nodemask.h __nodes_onto(&(dst), &(orig), &(relmap), MAX_NUMNODES) dst 369 include/linux/nodemask.h #define nodes_fold(dst, orig, sz) \ dst 370 include/linux/nodemask.h __nodes_fold(&(dst), &(orig), sz, MAX_NUMNODES) dst 74 include/linux/perf_event.h typedef unsigned long (*perf_copy_f)(void *dst, const void *src, dst 58 include/linux/ptrace.h extern int ptrace_readdata(struct task_struct *tsk, unsigned long src, char __user *dst, int len); dst 59 include/linux/ptrace.h extern int ptrace_writedata(struct task_struct *tsk, char __user *src, unsigned long dst, int len); dst 86 include/linux/qed/qed_iscsi_if.h struct qed_iscsi_id_params dst; dst 234 include/linux/rio.h void (*dinb) (struct rio_mport *mport, void *dev_id, u16 src, u16 dst, u16 info); dst 37 include/linux/rpmsg.h u32 dst; dst 55 include/linux/rpmsg.h u32 dst; dst 126 include/linux/rpmsg.h int rpmsg_sendto(struct rpmsg_endpoint *ept, void *data, int len, u32 dst); dst 127 include/linux/rpmsg.h int rpmsg_send_offchannel(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 131 include/linux/rpmsg.h int rpmsg_trysendto(struct rpmsg_endpoint *ept, void *data, int len, u32 dst); dst 132 include/linux/rpmsg.h int rpmsg_trysend_offchannel(struct rpmsg_endpoint *ept, u32 src, u32 dst, dst 192 include/linux/rpmsg.h u32 dst) dst 202 include/linux/rpmsg.h u32 dst, void *data, int len) dst 219 include/linux/rpmsg.h int len, u32 dst) dst 228 include/linux/rpmsg.h u32 dst, void *data, int len) dst 18 include/linux/rtnetlink.h extern int rtnl_put_cacheinfo(struct sk_buff *skb, struct dst_entry *dst, dst 274 include/linux/scif.h int scif_connect(scif_epd_t epd, struct scif_port_id *dst); dst 941 include/linux/skbuff.h static inline void skb_dst_set(struct sk_buff *skb, struct dst_entry *dst) dst 943 include/linux/skbuff.h skb->_skb_refdst = (unsigned long)dst; dst 956 include/linux/skbuff.h static inline void skb_dst_set_noref(struct sk_buff *skb, struct dst_entry *dst) dst 959 include/linux/skbuff.h skb->_skb_refdst = (unsigned long)dst | SKB_DST_NOREF; dst 1104 include/linux/skbuff.h struct sk_buff *skb_morph(struct sk_buff *dst, struct sk_buff *src); dst 4124 include/linux/skbuff.h static inline void __skb_ext_copy(struct sk_buff *dst, dst 4127 include/linux/skbuff.h dst->active_extensions = src->active_extensions; dst 4133 include/linux/skbuff.h dst->extensions = ext; dst 4137 include/linux/skbuff.h static inline void skb_ext_copy(struct sk_buff *dst, const struct sk_buff *src) dst 4139 include/linux/skbuff.h skb_ext_put(dst); dst 4140 include/linux/skbuff.h __skb_ext_copy(dst, src); dst 4187 include/linux/skbuff.h static inline void skb_ext_copy(struct sk_buff *dst, const struct sk_buff *s) {} dst 4214 include/linux/skbuff.h static inline void __nf_copy(struct sk_buff *dst, const struct sk_buff *src, dst 4218 include/linux/skbuff.h dst->_nfct = src->_nfct; dst 4223 include/linux/skbuff.h dst->nf_trace = src->nf_trace; dst 4227 include/linux/skbuff.h static inline void nf_copy(struct sk_buff *dst, const struct sk_buff *src) dst 4230 include/linux/skbuff.h nf_conntrack_put(skb_nfct(dst)); dst 4232 include/linux/skbuff.h __nf_copy(dst, src, true); dst 113 include/linux/skmsg.h int sk_msg_clone(struct sock *sk, struct sk_msg *dst, struct sk_msg *src, dst 183 include/linux/skmsg.h static inline void sk_msg_xfer(struct sk_msg *dst, struct sk_msg *src, dst 186 include/linux/skmsg.h dst->sg.data[which] = src->sg.data[which]; dst 187 include/linux/skmsg.h dst->sg.data[which].length = size; dst 188 include/linux/skmsg.h dst->sg.size += size; dst 194 include/linux/skmsg.h static inline void sk_msg_xfer_full(struct sk_msg *dst, struct sk_msg *src) dst 196 include/linux/skmsg.h memcpy(dst, src, sizeof(*src)); dst 19 include/linux/sram.h void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src, size_t size); dst 21 include/linux/sram.h static inline void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src, dst 164 include/linux/string.h static inline __must_check unsigned long memcpy_mcsafe(void *dst, dst 167 include/linux/string.h memcpy(dst, src, cnt); dst 172 include/linux/string.h static inline void memcpy_flushcache(void *dst, const void *src, size_t cnt) dst 174 include/linux/string.h memcpy(dst, src, cnt); dst 27 include/linux/string_helpers.h int string_unescape(char *src, char *dst, size_t size, unsigned int flags); dst 34 include/linux/string_helpers.h static inline int string_unescape_any(char *src, char *dst, size_t size) dst 36 include/linux/string_helpers.h return string_unescape(src, dst, size, UNESCAPE_ANY); dst 54 include/linux/string_helpers.h int string_escape_mem(const char *src, size_t isz, char *dst, size_t osz, dst 57 include/linux/string_helpers.h int string_escape_mem_ascii(const char *src, size_t isz, char *dst, dst 61 include/linux/string_helpers.h char *dst, size_t osz, const char *only) dst 63 include/linux/string_helpers.h return string_escape_mem(src, isz, dst, osz, ESCAPE_ANY_NP, only); dst 66 include/linux/string_helpers.h static inline int string_escape_str(const char *src, char *dst, size_t sz, dst 69 include/linux/string_helpers.h return string_escape_mem(src, strlen(src), dst, sz, flags, only); dst 72 include/linux/string_helpers.h static inline int string_escape_str_any_np(const char *src, char *dst, dst 75 include/linux/string_helpers.h return string_escape_str(src, dst, sz, ESCAPE_ANY_NP, only); dst 59 include/linux/sunrpc/addr.h static inline bool __rpc_copy_addr4(struct sockaddr *dst, dst 63 include/linux/sunrpc/addr.h struct sockaddr_in *dsin = (struct sockaddr_in *) dst; dst 85 include/linux/sunrpc/addr.h static inline bool __rpc_copy_addr6(struct sockaddr *dst, dst 89 include/linux/sunrpc/addr.h struct sockaddr_in6 *dsin6 = (struct sockaddr_in6 *) dst; dst 103 include/linux/sunrpc/addr.h static inline bool __rpc_copy_addr6(struct sockaddr *dst, dst 157 include/linux/sunrpc/addr.h static inline bool rpc_copy_addr(struct sockaddr *dst, dst 162 include/linux/sunrpc/addr.h return __rpc_copy_addr4(dst, src); dst 164 include/linux/sunrpc/addr.h return __rpc_copy_addr6(dst, src); dst 167 include/linux/sunrpc/xdr.h static inline void xdr_netobj_dup(struct xdr_netobj *dst, dst 170 include/linux/sunrpc/xdr.h dst->data = kmemdup(src->data, src->len, gfp_mask); dst 171 include/linux/sunrpc/xdr.h dst->len = src->len; dst 8 include/linux/sw842.h u8 *dst, unsigned int *destlen, void *wmem); dst 11 include/linux/sw842.h u8 *dst, unsigned int *destlen); dst 49 include/linux/task_io_accounting_ops.h static inline void task_blk_io_accounting_add(struct task_io_accounting *dst, dst 52 include/linux/task_io_accounting_ops.h dst->read_bytes += src->read_bytes; dst 53 include/linux/task_io_accounting_ops.h dst->write_bytes += src->write_bytes; dst 54 include/linux/task_io_accounting_ops.h dst->cancelled_write_bytes += src->cancelled_write_bytes; dst 85 include/linux/task_io_accounting_ops.h static inline void task_blk_io_accounting_add(struct task_io_accounting *dst, dst 93 include/linux/task_io_accounting_ops.h static inline void task_chr_io_accounting_add(struct task_io_accounting *dst, dst 96 include/linux/task_io_accounting_ops.h dst->rchar += src->rchar; dst 97 include/linux/task_io_accounting_ops.h dst->wchar += src->wchar; dst 98 include/linux/task_io_accounting_ops.h dst->syscr += src->syscr; dst 99 include/linux/task_io_accounting_ops.h dst->syscw += src->syscw; dst 102 include/linux/task_io_accounting_ops.h static inline void task_chr_io_accounting_add(struct task_io_accounting *dst, dst 108 include/linux/task_io_accounting_ops.h static inline void task_io_accounting_add(struct task_io_accounting *dst, dst 111 include/linux/task_io_accounting_ops.h task_chr_io_accounting_add(dst, src); dst 112 include/linux/task_io_accounting_ops.h task_blk_io_accounting_add(dst, src); dst 77 include/linux/textsearch.h const u8 **dst, dst 143 include/linux/ti-emif-sram.h int ti_emif_copy_pm_function_table(struct gen_pool *sram_pool, void *dst); dst 284 include/linux/uaccess.h copy_struct_from_user(void *dst, size_t ksize, const void __user *src, dst 292 include/linux/uaccess.h memset(dst + size, 0, rest); dst 299 include/linux/uaccess.h if (copy_from_user(dst, src, size)) dst 313 include/linux/uaccess.h extern long probe_kernel_read(void *dst, const void *src, size_t size); dst 314 include/linux/uaccess.h extern long __probe_kernel_read(void *dst, const void *src, size_t size); dst 325 include/linux/uaccess.h extern long probe_user_read(void *dst, const void __user *src, size_t size); dst 326 include/linux/uaccess.h extern long __probe_user_read(void *dst, const void __user *src, size_t size); dst 337 include/linux/uaccess.h extern long notrace probe_kernel_write(void *dst, const void *src, size_t size); dst 338 include/linux/uaccess.h extern long notrace __probe_kernel_write(void *dst, const void *src, size_t size); dst 349 include/linux/uaccess.h extern long notrace probe_user_write(void __user *dst, const void *src, size_t size); dst 350 include/linux/uaccess.h extern long notrace __probe_user_write(void __user *dst, const void *src, size_t size); dst 352 include/linux/uaccess.h extern long strncpy_from_unsafe(char *dst, const void *unsafe_addr, long count); dst 353 include/linux/uaccess.h extern long strncpy_from_unsafe_user(char *dst, const void __user *unsafe_addr, dst 41 include/linux/uuid.h static inline void guid_copy(guid_t *dst, const guid_t *src) dst 43 include/linux/uuid.h memcpy(dst, src, sizeof(guid_t)); dst 56 include/linux/uuid.h static inline void uuid_copy(uuid_t *dst, const uuid_t *src) dst 58 include/linux/uuid.h memcpy(dst, src, sizeof(uuid_t)); dst 457 include/linux/vmw_vmci_defs.h struct vmci_handle dst; dst 139 include/linux/vringh.h ssize_t vringh_iov_pull_user(struct vringh_iov *riov, void *dst, size_t len); dst 198 include/linux/vringh.h ssize_t vringh_iov_pull_kern(struct vringh_kiov *riov, void *dst, size_t len); dst 359 include/linux/xarray.h unsigned int xa_extract(struct xarray *, void **dst, unsigned long start, dst 249 include/linux/xxhash.h void xxh32_copy_state(struct xxh32_state *dst, const struct xxh32_state *src); dst 257 include/linux/xxhash.h void xxh64_copy_state(struct xxh64_state *dst, const struct xxh64_state *src); dst 591 include/linux/zlib.h extern int zlib_inflate_blob(void *dst, unsigned dst_sz, const void *src, unsigned src_sz); dst 256 include/linux/zstd.h size_t ZSTD_compressCCtx(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, dst 299 include/linux/zstd.h size_t ZSTD_decompressDCtx(ZSTD_DCtx *ctx, void *dst, size_t dstCapacity, dst 325 include/linux/zstd.h size_t ZSTD_compress_usingDict(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, dst 346 include/linux/zstd.h size_t ZSTD_decompress_usingDict(ZSTD_DCtx *ctx, void *dst, size_t dstCapacity, dst 406 include/linux/zstd.h size_t ZSTD_compress_usingCDict(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, dst 457 include/linux/zstd.h size_t ZSTD_decompress_usingDDict(ZSTD_DCtx *dctx, void *dst, dst 487 include/linux/zstd.h void *dst; dst 1015 include/linux/zstd.h size_t ZSTD_compressContinue(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, dst 1017 include/linux/zstd.h size_t ZSTD_compressEnd(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, dst 1105 include/linux/zstd.h size_t ZSTD_decompressContinue(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, dst 1150 include/linux/zstd.h size_t ZSTD_compressBlock(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, dst 1152 include/linux/zstd.h size_t ZSTD_decompressBlock(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, dst 213 include/media/dvb_demux.h void (*memcopy)(struct dvb_demux_feed *feed, u8 *dst, dst 60 include/media/vsp1.h struct v4l2_rect dst; dst 193 include/net/af_vsock.h struct sockaddr_vm *dst); dst 234 include/net/bluetooth/bluetooth.h static inline void bacpy(bdaddr_t *dst, const bdaddr_t *src) dst 236 include/net/bluetooth/bluetooth.h memcpy(dst, src, sizeof(bdaddr_t)); dst 239 include/net/bluetooth/bluetooth.h void baswap(bdaddr_t *dst, const bdaddr_t *src); dst 457 include/net/bluetooth/hci_core.h bdaddr_t dst; dst 832 include/net/bluetooth/hci_core.h if (c->type == type && !bacmp(&c->dst, ba)) { dst 856 include/net/bluetooth/hci_core.h if (ba_type == c->dst_type && !bacmp(&c->dst, ba)) { dst 911 include/net/bluetooth/hci_core.h struct hci_conn *hci_conn_add(struct hci_dev *hdev, int type, bdaddr_t *dst, dst 922 include/net/bluetooth/hci_core.h struct hci_conn *hci_connect_le_scan(struct hci_dev *hdev, bdaddr_t *dst, dst 925 include/net/bluetooth/hci_core.h struct hci_conn *hci_connect_le(struct hci_dev *hdev, bdaddr_t *dst, dst 928 include/net/bluetooth/hci_core.h struct hci_conn *hci_connect_acl(struct hci_dev *hdev, bdaddr_t *dst, dst 930 include/net/bluetooth/hci_core.h struct hci_conn *hci_connect_sco(struct hci_dev *hdev, int type, bdaddr_t *dst, dst 1052 include/net/bluetooth/hci_core.h struct hci_dev *hci_get_route(bdaddr_t *dst, bdaddr_t *src, u8 src_type); dst 506 include/net/bluetooth/l2cap.h bdaddr_t dst; dst 934 include/net/bluetooth/l2cap.h bdaddr_t *dst, u8 dst_type); dst 237 include/net/bluetooth/rfcomm.h int rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst, dst 245 include/net/bluetooth/rfcomm.h struct rfcomm_dlc *rfcomm_dlc_exists(bdaddr_t *src, bdaddr_t *dst, u8 channel); dst 278 include/net/bluetooth/rfcomm.h bdaddr_t *dst); dst 307 include/net/bluetooth/rfcomm.h bdaddr_t dst; dst 344 include/net/bluetooth/rfcomm.h bdaddr_t dst; dst 353 include/net/bluetooth/rfcomm.h bdaddr_t dst; dst 431 include/net/bonding.h static inline void bond_hw_addr_copy(u8 *dst, const u8 *src, unsigned int len) dst 434 include/net/bonding.h ether_addr_copy(dst, src); dst 438 include/net/bonding.h memcpy(dst, src, len); dst 588 include/net/bonding.h static inline __be32 bond_confirm_addr(struct net_device *dev, __be32 dst, __be32 local) dst 597 include/net/bonding.h addr = inet_confirm_addr(dev_net(dev), in_dev, dst, local, dst 2672 include/net/cfg80211.h __be32 src, dst; dst 3686 include/net/cfg80211.h const u8 *dst, const u8 *next_hop); dst 3688 include/net/cfg80211.h const u8 *dst); dst 3690 include/net/cfg80211.h const u8 *dst, const u8 *next_hop); dst 3692 include/net/cfg80211.h u8 *dst, u8 *next_hop, struct mpath_info *pinfo); dst 3694 include/net/cfg80211.h int idx, u8 *dst, u8 *next_hop, dst 3697 include/net/cfg80211.h u8 *dst, u8 *mpp, struct mpath_info *pinfo); dst 3699 include/net/cfg80211.h int idx, u8 *dst, u8 *mpp, dst 26 include/net/checksum.h __wsum csum_and_copy_from_user (const void __user *src, void *dst, dst 30 include/net/checksum.h return csum_partial_copy_from_user(src, dst, len, sum, err_ptr); dst 41 include/net/checksum.h (const void *src, void __user *dst, int len, __wsum sum, int *err_ptr) dst 45 include/net/checksum.h if (access_ok(dst, len)) { dst 46 include/net/checksum.h if (copy_to_user(dst, src, len) == 0) dst 153 include/net/dn.h __le16 dst; dst 63 include/net/dn_route.h struct dst_entry dst; dst 102 include/net/dn_route.h static inline void dn_rt_finish_output(struct sk_buff *skb, char *dst, char *src) dst 107 include/net/dn_route.h dst = NULL; dst 109 include/net/dn_route.h if (dev_hard_header(skb, dev, ETH_P_DNA_RT, dst, src, skb->len) >= 0) dst 171 include/net/dsa.h struct dsa_switch_tree *dst; dst 223 include/net/dsa.h struct dsa_switch_tree *dst; dst 88 include/net/dst.h u32 *dst_cow_metrics_generic(struct dst_entry *dst, unsigned long old); dst 97 include/net/dst.h static inline bool dst_metrics_read_only(const struct dst_entry *dst) dst 99 include/net/dst.h return dst->_metrics & DST_METRICS_READ_ONLY; dst 102 include/net/dst.h void __dst_destroy_metrics_generic(struct dst_entry *dst, unsigned long old); dst 104 include/net/dst.h static inline void dst_destroy_metrics_generic(struct dst_entry *dst) dst 106 include/net/dst.h unsigned long val = dst->_metrics; dst 108 include/net/dst.h __dst_destroy_metrics_generic(dst, val); dst 111 include/net/dst.h static inline u32 *dst_metrics_write_ptr(struct dst_entry *dst) dst 113 include/net/dst.h unsigned long p = dst->_metrics; dst 118 include/net/dst.h return dst->ops->cow_metrics(dst, p); dst 125 include/net/dst.h static inline void dst_init_metrics(struct dst_entry *dst, dst 129 include/net/dst.h dst->_metrics = ((unsigned long) src_metrics) | dst 144 include/net/dst.h static inline u32 *dst_metrics_ptr(struct dst_entry *dst) dst 146 include/net/dst.h return DST_METRICS_PTR(dst); dst 150 include/net/dst.h dst_metric_raw(const struct dst_entry *dst, const int metric) dst 152 include/net/dst.h u32 *p = DST_METRICS_PTR(dst); dst 158 include/net/dst.h dst_metric(const struct dst_entry *dst, const int metric) dst 163 include/net/dst.h return dst_metric_raw(dst, metric); dst 167 include/net/dst.h dst_metric_advmss(const struct dst_entry *dst) dst 169 include/net/dst.h u32 advmss = dst_metric_raw(dst, RTAX_ADVMSS); dst 172 include/net/dst.h advmss = dst->ops->default_advmss(dst); dst 177 include/net/dst.h static inline void dst_metric_set(struct dst_entry *dst, int metric, u32 val) dst 179 include/net/dst.h u32 *p = dst_metrics_write_ptr(dst); dst 192 include/net/dst.h dst_feature(const struct dst_entry *dst, u32 feature) dst 194 include/net/dst.h return dst_metric(dst, RTAX_FEATURES) & feature; dst 197 include/net/dst.h static inline u32 dst_mtu(const struct dst_entry *dst) dst 199 include/net/dst.h return dst->ops->mtu(dst); dst 203 include/net/dst.h static inline unsigned long dst_metric_rtt(const struct dst_entry *dst, int metric) dst 205 include/net/dst.h return msecs_to_jiffies(dst_metric(dst, metric)); dst 209 include/net/dst.h dst_allfrag(const struct dst_entry *dst) dst 211 include/net/dst.h int ret = dst_feature(dst, RTAX_FEATURE_ALLFRAG); dst 216 include/net/dst.h dst_metric_locked(const struct dst_entry *dst, int metric) dst 218 include/net/dst.h return dst_metric(dst, RTAX_LOCK) & (1<<metric); dst 221 include/net/dst.h static inline void dst_hold(struct dst_entry *dst) dst 228 include/net/dst.h WARN_ON(atomic_inc_not_zero(&dst->__refcnt) == 0); dst 231 include/net/dst.h static inline void dst_use_noref(struct dst_entry *dst, unsigned long time) dst 233 include/net/dst.h if (unlikely(time != dst->lastuse)) { dst 234 include/net/dst.h dst->__use++; dst 235 include/net/dst.h dst->lastuse = time; dst 239 include/net/dst.h static inline void dst_hold_and_use(struct dst_entry *dst, unsigned long time) dst 241 include/net/dst.h dst_hold(dst); dst 242 include/net/dst.h dst_use_noref(dst, time); dst 245 include/net/dst.h static inline struct dst_entry *dst_clone(struct dst_entry *dst) dst 247 include/net/dst.h if (dst) dst 248 include/net/dst.h dst_hold(dst); dst 249 include/net/dst.h return dst; dst 252 include/net/dst.h void dst_release(struct dst_entry *dst); dst 254 include/net/dst.h void dst_release_immediate(struct dst_entry *dst); dst 295 include/net/dst.h static inline bool dst_hold_safe(struct dst_entry *dst) dst 297 include/net/dst.h return atomic_inc_not_zero(&dst->__refcnt); dst 310 include/net/dst.h struct dst_entry *dst = skb_dst(skb); dst 313 include/net/dst.h if (!dst_hold_safe(dst)) dst 314 include/net/dst.h dst = NULL; dst 316 include/net/dst.h skb->_skb_refdst = (unsigned long)dst; dst 369 include/net/dst.h const struct dst_entry *dst; dst 371 include/net/dst.h dst = skb_dst(skb); dst 372 include/net/dst.h if (dst) dst 373 include/net/dst.h return dst->tclassid; dst 385 include/net/dst.h void dst_init(struct dst_entry *dst, struct dst_ops *ops, dst 388 include/net/dst.h struct dst_entry *dst_destroy(struct dst_entry *dst); dst 389 include/net/dst.h void dst_dev_put(struct dst_entry *dst); dst 391 include/net/dst.h static inline void dst_confirm(struct dst_entry *dst) dst 395 include/net/dst.h static inline struct neighbour *dst_neigh_lookup(const struct dst_entry *dst, const void *daddr) dst 397 include/net/dst.h struct neighbour *n = dst->ops->neigh_lookup(dst, NULL, daddr); dst 401 include/net/dst.h static inline struct neighbour *dst_neigh_lookup_skb(const struct dst_entry *dst, dst 404 include/net/dst.h struct neighbour *n = dst->ops->neigh_lookup(dst, skb, NULL); dst 408 include/net/dst.h static inline void dst_confirm_neigh(const struct dst_entry *dst, dst 411 include/net/dst.h if (dst->ops->confirm_neigh) dst 412 include/net/dst.h dst->ops->confirm_neigh(dst, daddr); dst 417 include/net/dst.h struct dst_entry *dst = skb_dst(skb); dst 418 include/net/dst.h if (dst && dst->ops && dst->ops->link_failure) dst 419 include/net/dst.h dst->ops->link_failure(skb); dst 422 include/net/dst.h static inline void dst_set_expires(struct dst_entry *dst, int timeout) dst 429 include/net/dst.h if (dst->expires == 0 || time_before(expires, dst->expires)) dst 430 include/net/dst.h dst->expires = expires; dst 445 include/net/dst.h static inline struct dst_entry *dst_check(struct dst_entry *dst, u32 cookie) dst 447 include/net/dst.h if (dst->obsolete) dst 448 include/net/dst.h dst = dst->ops->check(dst, cookie); dst 449 include/net/dst.h return dst; dst 487 include/net/dst.h static inline struct xfrm_state *dst_xfrm(const struct dst_entry *dst) dst 508 include/net/dst.h static inline struct xfrm_state *dst_xfrm(const struct dst_entry *dst) dst 510 include/net/dst.h return dst->xfrm; dst 516 include/net/dst.h struct dst_entry *dst = skb_dst(skb); dst 518 include/net/dst.h if (dst && dst->ops->update_pmtu) dst 519 include/net/dst.h dst->ops->update_pmtu(dst, NULL, skb, mtu, true); dst 525 include/net/dst.h struct dst_entry *dst = skb_dst(skb); dst 527 include/net/dst.h if (dst && dst->ops->update_pmtu) dst 528 include/net/dst.h dst->ops->update_pmtu(dst, NULL, skb, mtu, false); dst 43 include/net/dst_cache.h void dst_cache_set_ip4(struct dst_cache *dst_cache, struct dst_entry *dst, dst 56 include/net/dst_cache.h void dst_cache_set_ip6(struct dst_cache *dst_cache, struct dst_entry *dst, dst 20 include/net/dst_metadata.h struct dst_entry dst; dst 32 include/net/dst_metadata.h if (md_dst && md_dst->dst.flags & DST_METADATA) dst 42 include/net/dst_metadata.h struct dst_entry *dst; dst 47 include/net/dst_metadata.h dst = skb_dst(skb); dst 48 include/net/dst_metadata.h if (dst && dst->lwtstate) dst 49 include/net/dst_metadata.h return lwt_tun_info(dst->lwtstate); dst 56 include/net/dst_metadata.h struct dst_entry *dst = skb_dst(skb); dst 58 include/net/dst_metadata.h return dst && !(dst->flags & DST_METADATA); dst 125 include/net/dst_metadata.h dst_hold(&new_md->dst); dst 126 include/net/dst_metadata.h skb_dst_set(skb, &new_md->dst); dst 132 include/net/dst_metadata.h struct metadata_dst *dst; dst 134 include/net/dst_metadata.h dst = tun_dst_unclone(skb); dst 135 include/net/dst_metadata.h if (IS_ERR(dst)) dst 138 include/net/dst_metadata.h return &dst->u.tun_info; dst 196 include/net/dst_metadata.h info->key.u.ipv6.dst = *daddr; dst 29 include/net/dst_ops.h void (*update_pmtu)(struct dst_entry *dst, struct sock *sk, dst 32 include/net/dst_ops.h void (*redirect)(struct dst_entry *dst, struct sock *sk, dst 35 include/net/dst_ops.h struct neighbour * (*neigh_lookup)(const struct dst_entry *dst, dst 38 include/net/dst_ops.h void (*confirm_neigh)(const struct dst_entry *dst, dst 46 include/net/dst_ops.h static inline int dst_entries_get_fast(struct dst_ops *dst) dst 48 include/net/dst_ops.h return percpu_counter_read_positive(&dst->pcpuc_entries); dst 51 include/net/dst_ops.h static inline int dst_entries_get_slow(struct dst_ops *dst) dst 53 include/net/dst_ops.h return percpu_counter_sum_positive(&dst->pcpuc_entries); dst 56 include/net/dst_ops.h static inline void dst_entries_add(struct dst_ops *dst, int val) dst 58 include/net/dst_ops.h percpu_counter_add(&dst->pcpuc_entries, val); dst 61 include/net/dst_ops.h static inline int dst_entries_init(struct dst_ops *dst) dst 63 include/net/dst_ops.h return percpu_counter_init(&dst->pcpuc_entries, 0, GFP_KERNEL); dst 66 include/net/dst_ops.h static inline void dst_entries_destroy(struct dst_ops *dst) dst 68 include/net/dst_ops.h percpu_counter_destroy(&dst->pcpuc_entries); dst 90 include/net/flow_dissector.h __be32 dst; dst 101 include/net/flow_dissector.h struct in6_addr dst; dst 154 include/net/flow_dissector.h __be16 dst; dst 183 include/net/flow_dissector.h unsigned char dst[ETH_ALEN]; dst 41 include/net/inet_connection_sock.h struct dst_entry *dst, dst 156 include/net/inet_sock.h struct dst_entry *dst; dst 404 include/net/ip.h static inline int ip_mtu_locked(const struct dst_entry *dst) dst 406 include/net/ip.h const struct rtable *rt = (const struct rtable *)dst; dst 408 include/net/ip.h return rt->rt_mtu_locked || dst_metric_locked(dst, RTAX_MTU); dst 412 include/net/ip.h int ip_dont_fragment(const struct sock *sk, const struct dst_entry *dst) dst 418 include/net/ip.h !ip_mtu_locked(dst)); dst 438 include/net/ip.h static inline unsigned int ip_dst_mtu_maybe_forward(const struct dst_entry *dst, dst 441 include/net/ip.h struct net *net = dev_net(dst->dev); dst 444 include/net/ip.h ip_mtu_locked(dst) || dst 446 include/net/ip.h return dst_mtu(dst); dst 448 include/net/ip.h return min(READ_ONCE(dst->dev->mtu), IP_MAX_MTU); dst 475 include/net/ip.h void ip_dst_init_metrics(struct dst_entry *dst, struct dst_metrics *fib_metrics) dst 477 include/net/ip.h dst_init_metrics(dst, fib_metrics->metrics, true); dst 480 include/net/ip.h dst->_metrics |= DST_METRICS_REFCOUNTED; dst 486 include/net/ip.h void ip_dst_metrics_put(struct dst_entry *dst) dst 488 include/net/ip.h struct dst_metrics *p = (struct dst_metrics *)DST_METRICS_PTR(dst); dst 538 include/net/ip.h BUILD_BUG_ON(offsetof(typeof(flow->addrs), v4addrs.dst) != dst 178 include/net/ip6_fib.h struct dst_entry dst; dst 211 include/net/ip6_fib.h static inline struct inet6_dev *ip6_dst_idev(struct dst_entry *dst) dst 213 include/net/ip6_fib.h return ((struct rt6_info *)dst)->rt6i_idev; dst 283 include/net/ip6_fib.h BUILD_BUG_ON(offsetof(struct rt6_info, dst) != 0); dst 284 include/net/ip6_fib.h dst_release(&rt->dst); dst 526 include/net/ip6_fib.h fl6->fl6_dport = flkeys->ports.dst; dst 214 include/net/ip6_route.h const struct dst_entry *dst = skb_dst(skb); dst 217 include/net/ip6_route.h if (dst) dst 218 include/net/ip6_route.h rt6 = container_of(dst, struct rt6_info, dst); dst 226 include/net/ip6_route.h static inline void ip6_dst_store(struct sock *sk, struct dst_entry *dst, dst 232 include/net/ip6_route.h np->dst_cookie = rt6_get_cookie((struct rt6_info *)dst); dst 233 include/net/ip6_route.h sk_setup_caps(sk, dst); dst 240 include/net/ip6_route.h void ip6_sk_dst_store_flow(struct sock *sk, struct dst_entry *dst, dst 250 include/net/ip6_route.h static inline bool ipv6_anycast_destination(const struct dst_entry *dst, dst 253 include/net/ip6_route.h struct rt6_info *rt = (struct rt6_info *)dst; dst 310 include/net/ip6_route.h static inline unsigned int ip6_dst_mtu_forward(const struct dst_entry *dst) dst 315 include/net/ip6_route.h if (dst_metric_locked(dst, RTAX_MTU)) { dst 316 include/net/ip6_route.h mtu = dst_metric_raw(dst, RTAX_MTU); dst 323 include/net/ip6_route.h idev = __in6_dev_get(dst->dev); dst 209 include/net/ip_fib.h u32 dst; dst 394 include/net/ip_fib.h fl4->fl4_dport = flkeys->ports.dst; dst 409 include/net/ip_fib.h int fib_validate_source(struct sk_buff *skb, __be32 src, __be32 dst, dst 34 include/net/ip_tunnels.h #define IP_TUNNEL_KEY_IPV4_PAD offsetofend(struct ip_tunnel_key, u.ipv4.dst) dst 44 include/net/ip_tunnels.h __be32 dst; dst 48 include/net/ip_tunnels.h struct in6_addr dst; dst 178 include/net/ip_tunnels.h key->u.ipv4.dst = daddr; dst 415 include/net/ip_tunnels.h __be32 src, __be32 dst, u8 proto, dst 150 include/net/ip_vs.h static inline void ip_vs_addr_copy(int af, union nf_inet_addr *dst, dst 155 include/net/ip_vs.h dst->in6 = src->in6; dst 158 include/net/ip_vs.h dst->ip = src->ip; dst 161 include/net/ip_vs.h static inline void ip_vs_addr_set(int af, union nf_inet_addr *dst, dst 166 include/net/ip_vs.h dst->in6 = src->in6; dst 170 include/net/ip_vs.h dst->ip = src->ip; dst 171 include/net/ip_vs.h dst->all[1] = 0; dst 172 include/net/ip_vs.h dst->all[2] = 0; dst 173 include/net/ip_vs.h dst->all[3] = 0; dst 1458 include/net/ip_vs.h void ip_vs_read_estimator(struct ip_vs_kstats *dst, struct ip_vs_stats *stats); dst 316 include/net/ipv6.h struct in6_addr dst; dst 811 include/net/ipv6.h int ip6_dst_hoplimit(struct dst_entry *dst); dst 814 include/net/ipv6.h struct dst_entry *dst) dst 823 include/net/ipv6.h hlimit = ip6_dst_hoplimit(dst); dst 834 include/net/ipv6.h BUILD_BUG_ON(offsetof(typeof(flow->addrs), v6addrs.dst) != dst 1018 include/net/ipv6.h int ip6_dst_lookup(struct net *net, struct sock *sk, struct dst_entry **dst, dst 132 include/net/lwtunnel.h static inline void lwtunnel_set_redirect(struct dst_entry *dst) dst 134 include/net/lwtunnel.h if (lwtunnel_output_redirect(dst->lwtstate)) { dst 135 include/net/lwtunnel.h dst->lwtstate->orig_output = dst->output; dst 136 include/net/lwtunnel.h dst->output = lwtunnel_output; dst 138 include/net/lwtunnel.h if (lwtunnel_input_redirect(dst->lwtstate)) { dst 139 include/net/lwtunnel.h dst->lwtstate->orig_input = dst->input; dst 140 include/net/lwtunnel.h dst->input = lwtunnel_input; dst 174 include/net/lwtunnel.h static inline void lwtunnel_set_redirect(struct dst_entry *dst) dst 546 include/net/neighbour.h static inline void neigh_ha_snapshot(char *dst, const struct neighbour *n, dst 553 include/net/neighbour.h memcpy(dst, n->ha, dev->addr_len); dst 114 include/net/netfilter/nf_conntrack.h tuplehash[hash->tuple.dst.dir]); dst 124 include/net/netfilter/nf_conntrack.h return ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.protonum; dst 72 include/net/netfilter/nf_conntrack_tuple.h } dst; dst 86 include/net/netfilter/nf_conntrack_tuple.h t, t->dst.protonum, dst 88 include/net/netfilter/nf_conntrack_tuple.h &t->dst.u3.ip, ntohs(t->dst.u.all)); dst 96 include/net/netfilter/nf_conntrack_tuple.h t, t->dst.protonum, dst 98 include/net/netfilter/nf_conntrack_tuple.h t->dst.u3.all, ntohs(t->dst.u.all)); dst 116 include/net/netfilter/nf_conntrack_tuple.h ((enum ip_conntrack_dir)(h)->tuple.dst.dir) dst 135 include/net/netfilter/nf_conntrack_tuple.h return (nf_inet_addr_cmp(&t1->dst.u3, &t2->dst.u3) && dst 136 include/net/netfilter/nf_conntrack_tuple.h t1->dst.u.all == t2->dst.u.all && dst 137 include/net/netfilter/nf_conntrack_tuple.h t1->dst.protonum == t2->dst.protonum); dst 172 include/net/netfilter/nf_conntrack_tuple.h t1->dst.protonum != t2->dst.protonum) dst 85 include/net/netfilter/nf_flow_table.h struct dst_entry *dst; dst 143 include/net/netfilter/nf_tables.h static inline void nft_data_copy(u32 *dst, const struct nft_data *src, dst 146 include/net/netfilter/nf_tables.h memcpy(dst, src, len); dst 780 include/net/netfilter/nf_tables.h int (*clone)(struct nft_expr *dst, dst 442 include/net/netlink.h size_t nla_strlcpy(char *dst, const struct nlattr *nla, size_t dstsize); dst 40 include/net/phonet/pn_dev.h void rtm_phonet_notify(int event, struct net_device *dev, u8 dst); dst 310 include/net/pkt_cls.h void tcf_exts_change(struct tcf_exts *dst, struct tcf_exts *src); dst 50 include/net/route.h struct dst_entry dst; dst 182 include/net/route.h int ip_route_input_noref(struct sk_buff *skb, __be32 dst, __be32 src, dst 184 include/net/route.h int ip_route_input_rcu(struct sk_buff *skb, __be32 dst, __be32 src, dst 188 include/net/route.h static inline int ip_route_input(struct sk_buff *skb, __be32 dst, __be32 src, dst 194 include/net/route.h err = ip_route_input_noref(skb, dst, src, tos, devin); dst 244 include/net/route.h BUILD_BUG_ON(offsetof(struct rtable, dst) != 0); dst 245 include/net/route.h dst_release(&rt->dst); dst 281 include/net/route.h static inline void ip_route_connect_init(struct flowi4 *fl4, __be32 dst, __be32 src, dst 292 include/net/route.h protocol, flow_flags, dst, src, dport, sport, dst 297 include/net/route.h __be32 dst, __be32 src, u32 tos, dst 305 include/net/route.h ip_route_connect_init(fl4, dst, src, tos, oif, protocol, dst 308 include/net/route.h if (!dst || !src) { dst 347 include/net/route.h static inline int ip4_dst_hoplimit(const struct dst_entry *dst) dst 349 include/net/route.h int hoplimit = dst_metric_raw(dst, RTAX_HOPLIMIT); dst 350 include/net/route.h struct net *net = dev_net(dst->dev); dst 373 include/net/route.h struct net_device *dev = rt->dst.dev; dst 565 include/net/sctp/sctp.h if (t->dst && !dst_check(t->dst, t->dst_cookie)) dst 568 include/net/sctp/sctp.h return t->dst; dst 590 include/net/sctp/sctp.h static inline __u32 sctp_dst_mtu(const struct dst_entry *dst) dst 592 include/net/sctp/sctp.h return SCTP_TRUNC4(max_t(__u32, dst_mtu(dst), dst 598 include/net/sctp/sctp.h __u32 pmtu = sctp_dst_mtu(t->dst); dst 460 include/net/sctp/structs.h void (*addr_copy) (union sctp_addr *dst, dst 856 include/net/sctp/structs.h struct dst_entry *dst; dst 1879 include/net/sock.h struct dst_entry *dst; dst 1882 include/net/sock.h dst = rcu_dereference(sk->sk_dst_cache); dst 1883 include/net/sock.h if (dst && !atomic_inc_not_zero(&dst->__refcnt)) dst 1884 include/net/sock.h dst = NULL; dst 1886 include/net/sock.h return dst; dst 1891 include/net/sock.h struct dst_entry *ndst, *dst = __sk_dst_get(sk); dst 1895 include/net/sock.h if (dst && dst->ops->negative_advice) { dst 1896 include/net/sock.h ndst = dst->ops->negative_advice(dst); dst 1898 include/net/sock.h if (ndst != dst) { dst 1907 include/net/sock.h __sk_dst_set(struct sock *sk, struct dst_entry *dst) dst 1915 include/net/sock.h rcu_assign_pointer(sk->sk_dst_cache, dst); dst 1920 include/net/sock.h sk_dst_set(struct sock *sk, struct dst_entry *dst) dst 1926 include/net/sock.h old_dst = xchg((__force struct dst_entry **)&sk->sk_dst_cache, dst); dst 1973 include/net/sock.h void sk_setup_caps(struct sock *sk, struct dst_entry *dst); dst 388 include/net/tcp.h bool tcp_peer_is_proven(struct request_sock *req, struct dst_entry *dst); dst 438 include/net/tcp.h void tcp_ca_openreq_child(struct sock *sk, const struct dst_entry *dst); dst 441 include/net/tcp.h struct dst_entry *dst, dst 452 include/net/tcp.h struct sk_buff *tcp_make_synack(const struct sock *sk, struct dst_entry *dst, dst 465 include/net/tcp.h struct dst_entry *dst, u32 tsoff); dst 555 include/net/tcp.h const struct net *net, const struct dst_entry *dst); dst 697 include/net/tcp.h const struct dst_entry *dst = __sk_dst_get(sk); dst 700 include/net/tcp.h if (dst && dst_metric_locked(dst, RTAX_RTO_MIN)) dst 701 include/net/tcp.h rto_min = dst_metric_rtt(dst, RTAX_RTO_MIN); dst 710 include/net/tcp.h static inline bool tcp_ca_dst_locked(const struct dst_entry *dst) dst 712 include/net/tcp.h return dst_metric_locked(dst, RTAX_CC_ALGO); dst 1220 include/net/tcp.h __u32 tcp_init_cwnd(const struct tcp_sock *tp, const struct dst_entry *dst); dst 1419 include/net/tcp.h const struct dst_entry *dst); dst 1657 include/net/tcp.h const struct dst_entry *dst); dst 1997 include/net/tcp.h int (*send_synack)(const struct sock *sk, struct dst_entry *dst, dst 278 include/net/udp.h bool udp_sk_rx_dst_set(struct sock *sk, struct dst_entry *dst); dst 288 include/net/udp.h void udp4_hwcsum(struct sk_buff *skb, __be32 src, __be32 dst); dst 142 include/net/udp_tunnel.h __be32 src, __be32 dst, __u8 tos, __u8 ttl, dst 147 include/net/udp_tunnel.h int udp_tunnel6_xmit_skb(struct dst_entry *dst, struct sock *sk, dst 928 include/net/xfrm.h struct dst_entry dst; dst 945 include/net/xfrm.h static inline struct dst_entry *xfrm_dst_path(const struct dst_entry *dst) dst 948 include/net/xfrm.h if (dst->xfrm) { dst 949 include/net/xfrm.h const struct xfrm_dst *xdst = (const struct xfrm_dst *) dst; dst 954 include/net/xfrm.h return (struct dst_entry *) dst; dst 957 include/net/xfrm.h static inline struct dst_entry *xfrm_dst_child(const struct dst_entry *dst) dst 960 include/net/xfrm.h if (dst->xfrm) { dst 961 include/net/xfrm.h struct xfrm_dst *xdst = (struct xfrm_dst *) dst; dst 978 include/net/xfrm.h if (likely(xdst->u.dst.xfrm)) dst 979 include/net/xfrm.h xfrm_state_put(xdst->u.dst.xfrm); dst 983 include/net/xfrm.h void xfrm_dst_ifdown(struct dst_entry *dst, struct net_device *dev); dst 1504 include/net/xfrm.h void xfrm_tmpl_sort(struct xfrm_tmpl **dst, struct xfrm_tmpl **src, int n, dst 1506 include/net/xfrm.h void xfrm_state_sort(struct xfrm_state **dst, struct xfrm_state **src, int n, dst 1863 include/net/xfrm.h static inline bool xfrm_dst_offload_ok(struct dst_entry *dst) dst 1865 include/net/xfrm.h struct xfrm_state *x = dst->xfrm; dst 1871 include/net/xfrm.h xdst = (struct xfrm_dst *) dst; dst 1874 include/net/xfrm.h if (x->xso.offload_handle && (x->xso.dev == xfrm_dst_path(dst)->dev) && dst 1937 include/net/xfrm.h static inline bool xfrm_dst_offload_ok(struct dst_entry *dst) dst 42 include/rdma/ib_marshall.h struct ib_uverbs_qp_attr *dst, dst 46 include/rdma/ib_marshall.h struct ib_uverbs_ah_attr *dst, dst 49 include/rdma/ib_marshall.h void ib_copy_path_rec_to_user(struct ib_user_path_rec *dst, dst 52 include/rdma/ib_marshall.h void ib_copy_path_rec_from_user(struct sa_path_rec *dst, dst 76 include/rdma/ib_umem.h int ib_umem_copy_from(void *dst, struct ib_umem *umem, size_t offset, dst 94 include/rdma/ib_umem.h static inline int ib_umem_copy_from(void *dst, struct ib_umem *umem, size_t offset, dst 344 include/scsi/iscsi_if.h } dst; dst 990 include/scsi/libfc.h int fc_exch_mgr_list_clone(struct fc_lport *src, struct fc_lport *dst); dst 209 include/sound/core.h int copy_to_user_fromio(void __user *dst, const volatile void __iomem *src, size_t count); dst 210 include/sound/core.h int copy_from_user_toio(volatile void __iomem *dst, const void __user *src, size_t count); dst 1831 include/sound/emu10k1.h int snd_emu1010_fpga_link_dst_src_write(struct snd_emu10k1 * emu, u32 dst, u32 src); dst 77 include/trace/events/bridge.h __string(dev, f->dst ? f->dst->dev->name : "null") dst 84 include/trace/events/bridge.h __assign_str(dev, f->dst ? f->dst->dev->name : "null"); dst 30 include/trace/events/fib.h __array( __u8, dst, 4 ) dst 55 include/trace/events/fib.h p32 = (__be32 *) __entry->dst; dst 96 include/trace/events/fib.h __entry->src, __entry->sport, __entry->dst, __entry->dport, dst 29 include/trace/events/fib6.h __array( __u8, dst, 16 ) dst 52 include/trace/events/fib6.h in6 = (struct in6_addr *)__entry->dst; dst 84 include/trace/events/fib6.h __entry->src, __entry->sport, __entry->dst, __entry->dport, dst 673 include/trace/trace_events.h #define __assign_str(dst, src) \ dst 674 include/trace/trace_events.h strcpy(__get_str(dst), (src) ? (const char *)(src) : "(null)"); dst 683 include/trace/trace_events.h #define __assign_bitmask(dst, src, nr_bits) \ dst 684 include/trace/trace_events.h memcpy(__get_bitmask(dst), (src), __bitmask_size_in_bytes(nr_bits)) dst 495 include/uapi/drm/vmwgfx_drm.h struct drm_vmw_rect dst; dst 285 include/uapi/linux/batadv_packet.h __u8 dst[ETH_ALEN]; dst 308 include/uapi/linux/batadv_packet.h __u8 dst[ETH_ALEN]; dst 337 include/uapi/linux/batadv_packet.h __u8 dst[ETH_ALEN]; dst 376 include/uapi/linux/batadv_packet.h __u8 dst[ETH_ALEN]; dst 539 include/uapi/linux/batadv_packet.h __u8 dst[ETH_ALEN]; dst 693 include/uapi/linux/btrfs.h __u64 dst; dst 57 include/uapi/linux/ivtv.h struct v4l2_rect dst; dst 30 include/uapi/linux/netfilter/xt_HMARK.h __u16 dst; dst 34 include/uapi/linux/netfilter/xt_HMARK.h __be16 dst; dst 73 include/uapi/linux/netfilter_ipv4/ip_tables.h struct in_addr src, dst; dst 66 include/uapi/linux/netfilter_ipv6/ip6_tables.h struct in6_addr src, dst; dst 21 include/uapi/linux/rpmsg.h __u32 dst; dst 203 include/uapi/linux/userfaultfd.h __u64 dst; dst 439 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 441 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 458 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 460 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 477 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 479 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 494 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 496 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 519 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 521 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 543 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 545 include/video/omapfb_dss.h struct omap_dss_device *dst); dst 686 include/video/omapfb_dss.h struct omap_dss_device *dst; dst 118 ipc/msgutil.c struct msg_msg *copy_msg(struct msg_msg *src, struct msg_msg *dst) dst 124 ipc/msgutil.c if (src->m_ts > dst->m_ts) dst 128 ipc/msgutil.c memcpy(dst + 1, src + 1, alen); dst 130 ipc/msgutil.c for (dst_pseg = dst->next, src_pseg = src->next; dst 139 ipc/msgutil.c dst->m_type = src->m_type; dst 140 ipc/msgutil.c dst->m_ts = src->m_ts; dst 142 ipc/msgutil.c return dst; dst 145 ipc/msgutil.c struct msg_msg *copy_msg(struct msg_msg *src, struct msg_msg *dst) dst 198 ipc/util.h extern struct msg_msg *copy_msg(struct msg_msg *src, struct msg_msg *dst); dst 1207 kernel/bpf/cgroup.c static int copy_sysctl_value(char *dst, size_t dst_len, char *src, dst 1210 kernel/bpf/cgroup.c if (!dst) dst 1217 kernel/bpf/cgroup.c memset(dst, 0, dst_len); dst 1221 kernel/bpf/cgroup.c memcpy(dst, src, min(dst_len, src_len)); dst 1224 kernel/bpf/cgroup.c memset(dst + src_len, '\0', dst_len - src_len); dst 1228 kernel/bpf/cgroup.c dst[dst_len - 1] = '\0'; dst 270 kernel/bpf/core.c struct bpf_insn *dst; dst 286 kernel/bpf/core.c dst = (void *)raw; dst 288 kernel/bpf/core.c dst[i] = fp->insnsi[i]; dst 290 kernel/bpf/core.c dst[i].code == (BPF_LD | BPF_IMM | BPF_DW) && dst 291 kernel/bpf/core.c (dst[i].src_reg == BPF_PSEUDO_MAP_FD || dst 292 kernel/bpf/core.c dst[i].src_reg == BPF_PSEUDO_MAP_VALUE)) { dst 294 kernel/bpf/core.c dst[i].imm = 0; dst 296 kernel/bpf/core.c dst[i].code == 0 && dst 297 kernel/bpf/core.c dst[i].dst_reg == 0 && dst 298 kernel/bpf/core.c dst[i].src_reg == 0 && dst 299 kernel/bpf/core.c dst[i].off == 0) { dst 301 kernel/bpf/core.c dst[i].imm = 0; dst 461 kernel/bpf/devmap.c int dev_map_enqueue(struct bpf_dtab_netdev *dst, struct xdp_buff *xdp, dst 464 kernel/bpf/devmap.c struct net_device *dev = dst->dev; dst 479 kernel/bpf/devmap.c return bq_enqueue(dst, xdpf, dev_rx); dst 482 kernel/bpf/devmap.c int dev_map_generic_redirect(struct bpf_dtab_netdev *dst, struct sk_buff *skb, dst 487 kernel/bpf/devmap.c err = xdp_ok_fwd_dev(dst->dev, skb->len); dst 490 kernel/bpf/devmap.c skb->dev = dst->dev; dst 299 kernel/bpf/helpers.c void copy_map_value_locked(struct bpf_map *map, void *dst, void *src, dst 307 kernel/bpf/helpers.c lock = dst + map->spin_lock_off; dst 310 kernel/bpf/helpers.c copy_map_value(map, dst, src); dst 205 kernel/bpf/queue_stack_maps.c void *dst; dst 228 kernel/bpf/queue_stack_maps.c dst = &qs->elements[qs->head * qs->map.value_size]; dst 229 kernel/bpf/queue_stack_maps.c memcpy(dst, value, qs->map.value_size); dst 231 kernel/bpf/syscall.c void bpf_map_charge_move(struct bpf_map_memory *dst, dst 234 kernel/bpf/syscall.c *dst = *src; dst 473 kernel/bpf/syscall.c static int bpf_obj_name_cpy(char *dst, const char *src) dst 477 kernel/bpf/syscall.c memset(dst, 0, BPF_OBJ_NAME_LEN); dst 483 kernel/bpf/syscall.c *dst++ = *src++; dst 538 kernel/bpf/verifier.c static int copy_##NAME##_state(struct bpf_func_state *dst, \ dst 543 kernel/bpf/verifier.c if (WARN_ON_ONCE(dst->COUNT < src->COUNT)) { \ dst 545 kernel/bpf/verifier.c memset(dst, 0, sizeof(*dst)); \ dst 548 kernel/bpf/verifier.c memcpy(dst->FIELD, src->FIELD, \ dst 654 kernel/bpf/verifier.c static int transfer_reference_state(struct bpf_func_state *dst, dst 657 kernel/bpf/verifier.c int err = realloc_reference_state(dst, src->acquired_refs, false); dst 660 kernel/bpf/verifier.c err = copy_reference_state(dst, src); dst 699 kernel/bpf/verifier.c static int copy_func_state(struct bpf_func_state *dst, dst 704 kernel/bpf/verifier.c err = realloc_func_state(dst, src->allocated_stack, src->acquired_refs, dst 708 kernel/bpf/verifier.c memcpy(dst, src, offsetof(struct bpf_func_state, acquired_refs)); dst 709 kernel/bpf/verifier.c err = copy_reference_state(dst, src); dst 712 kernel/bpf/verifier.c return copy_stack_state(dst, src); dst 718 kernel/bpf/verifier.c struct bpf_func_state *dst; dst 744 kernel/bpf/verifier.c dst = dst_state->frame[i]; dst 745 kernel/bpf/verifier.c if (!dst) { dst 746 kernel/bpf/verifier.c dst = kzalloc(sizeof(*dst), GFP_KERNEL); dst 747 kernel/bpf/verifier.c if (!dst) dst 749 kernel/bpf/verifier.c dst_state->frame[i] = dst; dst 751 kernel/bpf/verifier.c err = copy_func_state(dst, src->frame[i]); dst 4372 kernel/bpf/verifier.c u32 dst = insn->dst_reg, src = insn->src_reg; dst 4376 kernel/bpf/verifier.c dst_reg = ®s[dst]; dst 4391 kernel/bpf/verifier.c dst); dst 4398 kernel/bpf/verifier.c dst, reg_type_str[ptr_reg->type]); dst 4410 kernel/bpf/verifier.c dst, reg_type_str[ptr_reg->type]); dst 4415 kernel/bpf/verifier.c off_reg == dst_reg ? dst : src); dst 4437 kernel/bpf/verifier.c verbose(env, "R%d tried to add from different maps or paths\n", dst); dst 4492 kernel/bpf/verifier.c verbose(env, "R%d tried to sub from different maps or paths\n", dst); dst 4498 kernel/bpf/verifier.c dst); dst 4507 kernel/bpf/verifier.c dst); dst 4559 kernel/bpf/verifier.c dst, bpf_alu_string[opcode >> 4]); dst 4564 kernel/bpf/verifier.c dst, bpf_alu_string[opcode >> 4]); dst 4580 kernel/bpf/verifier.c check_map_access(env, dst, dst_reg->off, 1, false)) { dst 4582 kernel/bpf/verifier.c "prohibited for !root\n", dst); dst 4588 kernel/bpf/verifier.c "prohibited for !root\n", dst); dst 4611 kernel/bpf/verifier.c u32 dst = insn->dst_reg; dst 4649 kernel/bpf/verifier.c verbose(env, "R%d tried to add from different pointers or scalars\n", dst); dst 4673 kernel/bpf/verifier.c verbose(env, "R%d tried to sub from different pointers or scalars\n", dst); dst 223 kernel/cgroup/freezer.c struct cgroup *src, struct cgroup *dst) dst 238 kernel/cgroup/freezer.c !test_bit(CGRP_FREEZE, &dst->flags) && dst 248 kernel/cgroup/freezer.c cgroup_inc_frozen_cnt(dst); dst 251 kernel/cgroup/freezer.c cgroup_update_frozen(dst); dst 257 kernel/cgroup/freezer.c cgroup_freeze_task(task, test_bit(CGRP_FREEZE, &dst->flags)); dst 171 kernel/events/internal.h memcpy_common(void *dst, const void *src, unsigned long n) dst 173 kernel/events/internal.h memcpy(dst, src, n); dst 180 kernel/events/internal.h memcpy_skip(void *dst, const void *src, unsigned long n) dst 191 kernel/events/internal.h arch_perf_out_copy_user(void *dst, const void *src, unsigned long n) dst 196 kernel/events/internal.h ret = __copy_from_user_inatomic(dst, src, n); dst 250 kernel/events/uprobes.c static void copy_from_page(struct page *page, unsigned long vaddr, void *dst, int len) dst 253 kernel/events/uprobes.c memcpy(dst, kaddr + (vaddr & ~PAGE_MASK), len); dst 841 kernel/fork.c int __weak arch_dup_task_struct(struct task_struct *dst, dst 844 kernel/fork.c *dst = *src; dst 284 kernel/gcov/clang.c void gcov_info_add(struct gcov_info *dst, struct gcov_info *src) dst 290 kernel/gcov/clang.c list_for_each_entry(dfn_ptr, &dst->functions, head) { dst 235 kernel/gcov/gcc_4_7.c void gcov_info_add(struct gcov_info *dst, struct gcov_info *src) dst 244 kernel/gcov/gcc_4_7.c dci_ptr = dst->functions[fi_idx]->ctrs; dst 917 kernel/kexec_file.c void *src, *dst; dst 940 kernel/kexec_file.c dst = pi->purgatory_buf + offset; dst 941 kernel/kexec_file.c memcpy(dst, src, sechdrs[i].sh_size); dst 214 kernel/locking/lockdep.c static inline void lock_time_add(struct lock_time *src, struct lock_time *dst) dst 219 kernel/locking/lockdep.c if (src->max > dst->max) dst 220 kernel/locking/lockdep.c dst->max = src->max; dst 222 kernel/locking/lockdep.c if (src->min < dst->min || !dst->nr) dst 223 kernel/locking/lockdep.c dst->min = src->min; dst 225 kernel/locking/lockdep.c dst->total += src->total; dst 226 kernel/locking/lockdep.c dst->nr += src->nr; dst 2750 kernel/module.c Elf_Sym *dst; dst 2767 kernel/module.c mod->core_kallsyms.symtab = dst = mod->core_layout.base + info->symoffs; dst 2778 kernel/module.c dst[ndst] = src[i]; dst 2779 kernel/module.c dst[ndst++].st_name = s - mod->core_kallsyms.strtab; dst 2928 kernel/module.c static int copy_chunked_from_user(void *dst, const void __user *usrc, unsigned long len) dst 2933 kernel/module.c if (copy_from_user(dst, usrc, n) != 0) dst 2936 kernel/module.c dst += n; dst 431 kernel/power/snapshot.c struct rtree_node *node, *block, **dst; dst 463 kernel/power/snapshot.c dst = &zone->rtree; dst 473 kernel/power/snapshot.c *dst = node; dst 478 kernel/power/snapshot.c dst = (struct rtree_node **)&((*dst)->data[index]); dst 479 kernel/power/snapshot.c node = *dst; dst 483 kernel/power/snapshot.c *dst = block; dst 1338 kernel/power/snapshot.c static inline void do_copy_page(long *dst, long *src) dst 1343 kernel/power/snapshot.c *dst++ = *src++; dst 1354 kernel/power/snapshot.c static void safe_copy_page(void *dst, struct page *s_page) dst 1357 kernel/power/snapshot.c do_copy_page(dst, page_address(s_page)); dst 1360 kernel/power/snapshot.c do_copy_page(dst, page_address(s_page)); dst 1375 kernel/power/snapshot.c void *src, *dst; dst 1381 kernel/power/snapshot.c dst = kmap_atomic(d_page); dst 1382 kernel/power/snapshot.c do_copy_page(dst, src); dst 1383 kernel/power/snapshot.c kunmap_atomic(dst); dst 1392 kernel/power/snapshot.c dst = kmap_atomic(d_page); dst 1393 kernel/power/snapshot.c copy_page(dst, buffer); dst 1394 kernel/power/snapshot.c kunmap_atomic(dst); dst 2142 kernel/power/snapshot.c static void duplicate_memory_bitmap(struct memory_bitmap *dst, dst 2150 kernel/power/snapshot.c memory_bm_set_bit(dst, pfn); dst 2406 kernel/power/snapshot.c void *dst; dst 2408 kernel/power/snapshot.c dst = kmap_atomic(last_highmem_page); dst 2409 kernel/power/snapshot.c copy_page(dst, buffer); dst 2410 kernel/power/snapshot.c kunmap_atomic(dst); dst 593 kernel/ptrace.c int ptrace_readdata(struct task_struct *tsk, unsigned long src, char __user *dst, int len) dst 609 kernel/ptrace.c if (copy_to_user(dst, buf, retval)) dst 613 kernel/ptrace.c dst += retval; dst 619 kernel/ptrace.c int ptrace_writedata(struct task_struct *tsk, char __user *src, unsigned long dst, int len) dst 630 kernel/ptrace.c retval = ptrace_access_vm(tsk, dst, buf, this_len, dst 639 kernel/ptrace.c dst += retval; dst 1054 kernel/time/clocksource.c ssize_t sysfs_get_uname(const char *buf, char *dst, size_t cnt) dst 1066 kernel/time/clocksource.c memcpy(dst, buf, cnt); dst 1067 kernel/time/clocksource.c dst[cnt] = 0; dst 60 kernel/time/tick-internal.h extern ssize_t sysfs_get_uname(const char *buf, char *dst, size_t cnt); dst 141 kernel/trace/bpf_trace.c BPF_CALL_3(bpf_probe_read, void *, dst, u32, size, const void *, unsafe_ptr) dst 149 kernel/trace/bpf_trace.c ret = probe_kernel_read(dst, unsafe_ptr, size); dst 152 kernel/trace/bpf_trace.c memset(dst, 0, size); dst 586 kernel/trace/bpf_trace.c BPF_CALL_3(bpf_probe_read_str, void *, dst, u32, size, dst 604 kernel/trace/bpf_trace.c ret = strncpy_from_unsafe(dst, unsafe_ptr, size); dst 607 kernel/trace/bpf_trace.c memset(dst, 0, size); dst 1420 kernel/trace/ftrace.c struct ftrace_hash **dst, struct ftrace_hash *src) dst 1449 kernel/trace/ftrace.c rcu_assign_pointer(*dst, new_hash); dst 154 kernel/trace/trace_uprobe.c u8 *dst = get_loc_data(dest, base); dst 161 kernel/trace/trace_uprobe.c ret = strlcpy(dst, current->comm, maxlen); dst 163 kernel/trace/trace_uprobe.c ret = strncpy_from_user(dst, src, maxlen); dst 166 kernel/trace/trace_uprobe.c dst[ret - 1] = '\0'; dst 174 kernel/trace/trace_uprobe.c *(u32 *)dest = make_data_loc(ret, (void *)dst - base); dst 141 lib/bch.c static void load_ecc8(struct bch_control *bch, uint32_t *dst, dst 148 lib/bch.c dst[i] = (src[0] << 24)|(src[1] << 16)|(src[2] << 8)|src[3]; dst 151 lib/bch.c dst[nwords] = (pad[0] << 24)|(pad[1] << 16)|(pad[2] << 8)|pad[3]; dst 157 lib/bch.c static void store_ecc8(struct bch_control *bch, uint8_t *dst, dst 164 lib/bch.c *dst++ = (src[i] >> 24); dst 165 lib/bch.c *dst++ = (src[i] >> 16) & 0xff; dst 166 lib/bch.c *dst++ = (src[i] >> 8) & 0xff; dst 167 lib/bch.c *dst++ = (src[i] >> 0) & 0xff; dst 173 lib/bch.c memcpy(dst, pad, BCH_ECC_BYTES(bch)-4*nwords); dst 381 lib/bch.c static void gf_poly_copy(struct gf_poly *dst, struct gf_poly *src) dst 383 lib/bch.c memcpy(dst, src, GF_POLY_SZ(src->deg)); dst 82 lib/bitmap.c void __bitmap_complement(unsigned long *dst, const unsigned long *src, unsigned int bits) dst 86 lib/bitmap.c dst[k] = ~src[k]; dst 101 lib/bitmap.c void __bitmap_shift_right(unsigned long *dst, const unsigned long *src, dst 126 lib/bitmap.c dst[k] = lower | upper; dst 129 lib/bitmap.c memset(&dst[lim - off], 0, off*sizeof(unsigned long)); dst 146 lib/bitmap.c void __bitmap_shift_left(unsigned long *dst, const unsigned long *src, dst 164 lib/bitmap.c dst[k + off] = lower | upper; dst 167 lib/bitmap.c memset(dst, 0, off*sizeof(unsigned long)); dst 171 lib/bitmap.c int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, dst 179 lib/bitmap.c result |= (dst[k] = bitmap1[k] & bitmap2[k]); dst 181 lib/bitmap.c result |= (dst[k] = bitmap1[k] & bitmap2[k] & dst 187 lib/bitmap.c void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, dst 194 lib/bitmap.c dst[k] = bitmap1[k] | bitmap2[k]; dst 198 lib/bitmap.c void __bitmap_xor(unsigned long *dst, const unsigned long *bitmap1, dst 205 lib/bitmap.c dst[k] = bitmap1[k] ^ bitmap2[k]; dst 209 lib/bitmap.c int __bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1, dst 217 lib/bitmap.c result |= (dst[k] = bitmap1[k] & ~bitmap2[k]); dst 219 lib/bitmap.c result |= (dst[k] = bitmap1[k] & ~bitmap2[k] & dst 791 lib/bitmap.c void bitmap_remap(unsigned long *dst, const unsigned long *src, dst 797 lib/bitmap.c if (dst == src) /* following doesn't handle inplace remaps */ dst 799 lib/bitmap.c bitmap_zero(dst, nbits); dst 806 lib/bitmap.c set_bit(oldbit, dst); /* identity map */ dst 808 lib/bitmap.c set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst); dst 955 lib/bitmap.c void bitmap_onto(unsigned long *dst, const unsigned long *orig, dst 960 lib/bitmap.c if (dst == orig) /* following doesn't handle inplace mappings */ dst 962 lib/bitmap.c bitmap_zero(dst, bits); dst 978 lib/bitmap.c set_bit(n, dst); dst 994 lib/bitmap.c void bitmap_fold(unsigned long *dst, const unsigned long *orig, dst 999 lib/bitmap.c if (dst == orig) /* following doesn't handle inplace mappings */ dst 1001 lib/bitmap.c bitmap_zero(dst, nbits); dst 1004 lib/bitmap.c set_bit(oldbit % sz, dst); dst 1157 lib/bitmap.c void bitmap_copy_le(unsigned long *dst, const unsigned long *src, unsigned int nbits) dst 1163 lib/bitmap.c dst[i] = cpu_to_le64(src[i]); dst 1165 lib/bitmap.c dst[i] = cpu_to_le32(src[i]); dst 152 lib/checksum.c csum_partial_copy_from_user(const void __user *src, void *dst, int len, dst 157 lib/checksum.c missing = __copy_from_user(dst, src, len); dst 159 lib/checksum.c memset(dst + len - missing, 0, missing); dst 164 lib/checksum.c return csum_partial(dst, len, sum); dst 172 lib/checksum.c csum_partial_copy(const void *src, void *dst, int len, __wsum sum) dst 174 lib/checksum.c memcpy(dst, src, len); dst 175 lib/checksum.c return csum_partial(dst, len, sum); dst 781 lib/crypto/des.c void des_encrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src) dst 797 lib/crypto/des.c put_unaligned_le32(R, dst); dst 798 lib/crypto/des.c put_unaligned_le32(L, dst + 4); dst 802 lib/crypto/des.c void des_decrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src) dst 818 lib/crypto/des.c put_unaligned_le32(R, dst); dst 819 lib/crypto/des.c put_unaligned_le32(L, dst + 4); dst 844 lib/crypto/des.c void des3_ede_encrypt(const struct des3_ede_ctx *dctx, u8 *dst, const u8 *src) dst 868 lib/crypto/des.c put_unaligned_le32(R, dst); dst 869 lib/crypto/des.c put_unaligned_le32(L, dst + 4); dst 873 lib/crypto/des.c void des3_ede_decrypt(const struct des3_ede_ctx *dctx, u8 *dst, const u8 *src) dst 897 lib/crypto/des.c put_unaligned_le32(R, dst); dst 898 lib/crypto/des.c put_unaligned_le32(L, dst + 4); dst 248 lib/crypto/sha256.c __be32 *dst = (__be32 *)out; dst 267 lib/crypto/sha256.c put_unaligned_be32(sctx->state[i], &dst[i]); dst 44 lib/hexdump.c int hex2bin(u8 *dst, const char *src, size_t count) dst 53 lib/hexdump.c *dst++ = (hi << 4) | lo; dst 65 lib/hexdump.c char *bin2hex(char *dst, const void *src, size_t count) dst 70 lib/hexdump.c dst = hex_byte_pack(dst, *_src++); dst 71 lib/hexdump.c return dst; dst 278 lib/iomap.c static inline void mmio_insb(void __iomem *addr, u8 *dst, int count) dst 282 lib/iomap.c *dst = data; dst 283 lib/iomap.c dst++; dst 286 lib/iomap.c static inline void mmio_insw(void __iomem *addr, u16 *dst, int count) dst 290 lib/iomap.c *dst = data; dst 291 lib/iomap.c dst++; dst 294 lib/iomap.c static inline void mmio_insl(void __iomem *addr, u32 *dst, int count) dst 298 lib/iomap.c *dst = data; dst 299 lib/iomap.c dst++; dst 328 lib/iomap.c void ioread8_rep(void __iomem *addr, void *dst, unsigned long count) dst 330 lib/iomap.c IO_COND(addr, insb(port,dst,count), mmio_insb(addr, dst, count)); dst 332 lib/iomap.c void ioread16_rep(void __iomem *addr, void *dst, unsigned long count) dst 334 lib/iomap.c IO_COND(addr, insw(port,dst,count), mmio_insw(addr, dst, count)); dst 336 lib/iomap.c void ioread32_rep(void __iomem *addr, void *dst, unsigned long count) dst 338 lib/iomap.c IO_COND(addr, insl(port,dst,count), mmio_insl(addr, dst, count)); dst 23 lib/iomap_copy.c u32 __iomem *dst = to; dst 28 lib/iomap_copy.c __raw_writel(*src++, dst++); dst 44 lib/iomap_copy.c u32 *dst = to; dst 49 lib/iomap_copy.c *dst++ = __raw_readl(src++); dst 68 lib/iomap_copy.c u64 __iomem *dst = to; dst 73 lib/iomap_copy.c __raw_writeq(*src++, dst++); dst 128 lib/kfifo.c static void kfifo_copy_out(struct __kfifo *fifo, void *dst, dst 143 lib/kfifo.c memcpy(dst, fifo->data + off, l); dst 144 lib/kfifo.c memcpy(dst + l, fifo->data, len - l); dst 525 lib/lz4/lz4_compress.c char * const dst, dst 538 lib/lz4/lz4_compress.c BYTE *op = (BYTE *) dst; dst 717 lib/lz4/lz4_compress.c return (int) (((char *)op) - dst); dst 723 lib/lz4/lz4_compress.c char *dst, dst 738 lib/lz4/lz4_compress.c state, src, dst, *srcSizePtr, dst 744 lib/lz4/lz4_compress.c src, dst, srcSizePtr, dst 749 lib/lz4/lz4_compress.c src, dst, srcSizePtr, dst 757 lib/lz4/lz4_compress.c char *dst, dst 762 lib/lz4/lz4_compress.c return LZ4_compress_destSize_extState(wrkmem, src, dst, srcSizePtr, dst 62 lib/lz4/lz4_decompress.c char * const dst, dst 86 lib/lz4/lz4_decompress.c BYTE *op = (BYTE *) dst; dst 438 lib/lz4/lz4_decompress.c return (int) (((char *)op) - dst); dst 458 lib/lz4/lz4_decompress.c int LZ4_decompress_safe_partial(const char *src, char *dst, dst 462 lib/lz4/lz4_decompress.c return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity, dst 464 lib/lz4/lz4_decompress.c noDict, (BYTE *)dst, NULL, 0); dst 140 lib/lz4/lz4defs.h static FORCE_INLINE void LZ4_copy8(void *dst, const void *src) dst 145 lib/lz4/lz4defs.h put_unaligned(a, (U64 *)dst); dst 150 lib/lz4/lz4defs.h put_unaligned(a, (U32 *)dst); dst 151 lib/lz4/lz4defs.h put_unaligned(b, (U32 *)dst + 1); dst 584 lib/lz4/lz4hc_compress.c char *dst, dst 601 lib/lz4/lz4hc_compress.c return LZ4HC_compress_generic(ctx, src, dst, dst 604 lib/lz4/lz4hc_compress.c return LZ4HC_compress_generic(ctx, src, dst, dst 608 lib/lz4/lz4hc_compress.c int LZ4_compress_HC(const char *src, char *dst, int srcSize, dst 611 lib/lz4/lz4hc_compress.c return LZ4_compress_HC_extStateHC(wrkmem, src, dst, dst 22 lib/lzo/lzodefs.h #define COPY4(dst, src) \ dst 23 lib/lzo/lzodefs.h put_unaligned(get_unaligned((const u32 *)(src)), (u32 *)(dst)) dst 25 lib/lzo/lzodefs.h #define COPY8(dst, src) \ dst 26 lib/lzo/lzodefs.h put_unaligned(get_unaligned((const u64 *)(src)), (u64 *)(dst)) dst 28 lib/lzo/lzodefs.h #define COPY8(dst, src) \ dst 29 lib/lzo/lzodefs.h COPY4(dst, src); COPY4((dst) + 4, (src) + 4) dst 516 lib/nlattr.c size_t nla_strlcpy(char *dst, const struct nlattr *nla, size_t dstsize) dst 527 lib/nlattr.c memset(dst, 0, dstsize); dst 528 lib/nlattr.c memcpy(dst, src, len); dst 545 lib/nlattr.c char *src = nla_data(nla), *dst; dst 550 lib/nlattr.c dst = kmalloc(srclen + 1, flags); dst 551 lib/nlattr.c if (dst != NULL) { dst 552 lib/nlattr.c memcpy(dst, src, srclen); dst 553 lib/nlattr.c dst[srclen] = '\0'; dst 555 lib/nlattr.c return dst; dst 133 lib/string_helpers.c static bool unescape_space(char **src, char **dst) dst 135 lib/string_helpers.c char *p = *dst, *q = *src; dst 156 lib/string_helpers.c *dst += 1; dst 161 lib/string_helpers.c static bool unescape_octal(char **src, char **dst) dst 163 lib/string_helpers.c char *p = *dst, *q = *src; dst 175 lib/string_helpers.c *dst += 1; dst 180 lib/string_helpers.c static bool unescape_hex(char **src, char **dst) dst 182 lib/string_helpers.c char *p = *dst, *q = *src; dst 199 lib/string_helpers.c *dst += 1; dst 204 lib/string_helpers.c static bool unescape_special(char **src, char **dst) dst 206 lib/string_helpers.c char *p = *dst, *q = *src; dst 224 lib/string_helpers.c *dst += 1; dst 268 lib/string_helpers.c int string_unescape(char *src, char *dst, size_t size, unsigned int flags) dst 270 lib/string_helpers.c char *out = dst; dst 299 lib/string_helpers.c return out - dst; dst 303 lib/string_helpers.c static bool escape_passthrough(unsigned char c, char **dst, char *end) dst 305 lib/string_helpers.c char *out = *dst; dst 309 lib/string_helpers.c *dst = out + 1; dst 313 lib/string_helpers.c static bool escape_space(unsigned char c, char **dst, char *end) dst 315 lib/string_helpers.c char *out = *dst; dst 345 lib/string_helpers.c *dst = out; dst 349 lib/string_helpers.c static bool escape_special(unsigned char c, char **dst, char *end) dst 351 lib/string_helpers.c char *out = *dst; dst 375 lib/string_helpers.c *dst = out; dst 379 lib/string_helpers.c static bool escape_null(unsigned char c, char **dst, char *end) dst 381 lib/string_helpers.c char *out = *dst; dst 393 lib/string_helpers.c *dst = out; dst 397 lib/string_helpers.c static bool escape_octal(unsigned char c, char **dst, char *end) dst 399 lib/string_helpers.c char *out = *dst; dst 414 lib/string_helpers.c *dst = out; dst 418 lib/string_helpers.c static bool escape_hex(unsigned char c, char **dst, char *end) dst 420 lib/string_helpers.c char *out = *dst; dst 435 lib/string_helpers.c *dst = out; dst 497 lib/string_helpers.c int string_escape_mem(const char *src, size_t isz, char *dst, size_t osz, dst 500 lib/string_helpers.c char *p = dst; dst 542 lib/string_helpers.c return p - dst; dst 546 lib/string_helpers.c int string_escape_mem_ascii(const char *src, size_t isz, char *dst, dst 549 lib/string_helpers.c char *p = dst; dst 561 lib/string_helpers.c return p - dst; dst 572 lib/string_helpers.c char *dst; dst 581 lib/string_helpers.c dst = kmalloc(dlen + 1, gfp); dst 582 lib/string_helpers.c if (!dst) dst 585 lib/string_helpers.c WARN_ON(string_escape_mem(src, slen, dst, dlen, flags, esc) != dlen); dst 586 lib/string_helpers.c dst[dlen] = '\0'; dst 588 lib/string_helpers.c return dst; dst 15 lib/strncpy_from_user.c #define IS_UNALIGNED(src, dst) 0 dst 17 lib/strncpy_from_user.c #define IS_UNALIGNED(src, dst) \ dst 18 lib/strncpy_from_user.c (((long) dst | (long) src) & (sizeof(long) - 1)) dst 27 lib/strncpy_from_user.c static inline long do_strncpy_from_user(char *dst, const char __user *src, dst 33 lib/strncpy_from_user.c if (IS_UNALIGNED(src, dst)) dst 42 lib/strncpy_from_user.c *(unsigned long *)(dst+res) = c; dst 57 lib/strncpy_from_user.c dst[res] = c; dst 97 lib/strncpy_from_user.c long strncpy_from_user(char *dst, const char __user *src, long count) dst 117 lib/strncpy_from_user.c kasan_check_write(dst, count); dst 118 lib/strncpy_from_user.c check_object_size(dst, count, false); dst 120 lib/strncpy_from_user.c retval = do_strncpy_from_user(dst, src, count, max); dst 165 lib/test_firmware.c static int __kstrncpy(char **dst, const char *name, size_t count, gfp_t gfp) dst 167 lib/test_firmware.c *dst = kstrndup(name, count, gfp); dst 168 lib/test_firmware.c if (!*dst) dst 285 lib/test_firmware.c static ssize_t config_test_show_str(char *dst, dst 291 lib/test_firmware.c len = snprintf(dst, PAGE_SIZE, "%s\n", src); dst 604 lib/test_kmod.c static int __kstrncpy(char **dst, const char *name, size_t count, gfp_t gfp) dst 606 lib/test_kmod.c *dst = kstrndup(name, count, gfp); dst 607 lib/test_kmod.c if (!*dst) dst 675 lib/test_kmod.c char *dst, dst 681 lib/test_kmod.c len = snprintf(dst, PAGE_SIZE, "%s\n", src); dst 63 lib/test_ubsan.c volatile char *dst, *src; dst 67 lib/test_ubsan.c dst = (char *)&val; dst 69 lib/test_ubsan.c *dst = *src; dst 204 lib/textsearch.c static unsigned int get_linear_data(unsigned int consumed, const u8 **dst, dst 211 lib/textsearch.c *dst = st->data + consumed; dst 1896 lib/xarray.c static unsigned int xas_extract_present(struct xa_state *xas, void **dst, dst 1906 lib/xarray.c dst[i++] = entry; dst 1915 lib/xarray.c static unsigned int xas_extract_marked(struct xa_state *xas, void **dst, dst 1925 lib/xarray.c dst[i++] = entry; dst 1962 lib/xarray.c unsigned int xa_extract(struct xarray *xa, void **dst, unsigned long start, dst 1971 lib/xarray.c return xas_extract_marked(&xas, dst, max, n, filter); dst 1972 lib/xarray.c return xas_extract_present(&xas, dst, max, n); dst 79 lib/xxhash.c void xxh32_copy_state(struct xxh32_state *dst, const struct xxh32_state *src) dst 81 lib/xxhash.c memcpy(dst, src, sizeof(*dst)); dst 85 lib/xxhash.c void xxh64_copy_state(struct xxh64_state *dst, const struct xxh64_state *src) dst 87 lib/xxhash.c memcpy(dst, src, sizeof(*dst)); dst 434 lib/zstd/compress.c size_t ZSTD_noCompressBlock(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 438 lib/zstd/compress.c memcpy((BYTE *)dst + ZSTD_blockHeaderSize, src, srcSize); dst 439 lib/zstd/compress.c ZSTD_writeLE24(dst, (U32)(srcSize << 2) + (U32)bt_raw); dst 443 lib/zstd/compress.c static size_t ZSTD_noCompressLiterals(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 445 lib/zstd/compress.c BYTE *const ostart = (BYTE * const)dst; dst 462 lib/zstd/compress.c static size_t ZSTD_compressRleLiteralsBlock(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 464 lib/zstd/compress.c BYTE *const ostart = (BYTE * const)dst; dst 482 lib/zstd/compress.c static size_t ZSTD_compressLiterals(ZSTD_CCtx *zc, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 486 lib/zstd/compress.c BYTE *const ostart = (BYTE *)dst; dst 496 lib/zstd/compress.c return ZSTD_noCompressLiterals(dst, dstCapacity, src, srcSize); dst 520 lib/zstd/compress.c return ZSTD_noCompressLiterals(dst, dstCapacity, src, srcSize); dst 524 lib/zstd/compress.c return ZSTD_compressRleLiteralsBlock(dst, dstCapacity, src, srcSize); dst 586 lib/zstd/compress.c ZSTD_STATIC size_t ZSTD_compressSequences_internal(ZSTD_CCtx *zc, void *dst, size_t dstCapacity) dst 598 lib/zstd/compress.c BYTE *const ostart = (BYTE *)dst; dst 832 lib/zstd/compress.c ZSTD_STATIC size_t ZSTD_compressSequences(ZSTD_CCtx *zc, void *dst, size_t dstCapacity, size_t srcSize) dst 834 lib/zstd/compress.c size_t const cSize = ZSTD_compressSequences_internal(zc, dst, dstCapacity); dst 2333 lib/zstd/compress.c static size_t ZSTD_compressBlock_internal(ZSTD_CCtx *zc, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 2345 lib/zstd/compress.c return ZSTD_compressSequences(zc, dst, dstCapacity, srcSize); dst 2355 lib/zstd/compress.c static size_t ZSTD_compress_generic(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, U32 lastFrameChunk) dst 2360 lib/zstd/compress.c BYTE *const ostart = (BYTE *)dst; dst 2431 lib/zstd/compress.c static size_t ZSTD_writeFrameHeader(void *dst, size_t dstCapacity, ZSTD_parameters params, U64 pledgedSrcSize, U32 dictID) dst 2433 lib/zstd/compress.c BYTE *const op = (BYTE *)dst; dst 2447 lib/zstd/compress.c ZSTD_writeLE32(dst, ZSTD_MAGICNUMBER); dst 2490 lib/zstd/compress.c static size_t ZSTD_compressContinue_internal(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, U32 frame, U32 lastFrameChunk) dst 2499 lib/zstd/compress.c fhSize = ZSTD_writeFrameHeader(dst, dstCapacity, cctx->params, cctx->frameContentSize, cctx->dictID); dst 2503 lib/zstd/compress.c dst = (char *)dst + fhSize; dst 2530 lib/zstd/compress.c size_t const cSize = frame ? ZSTD_compress_generic(cctx, dst, dstCapacity, src, srcSize, lastFrameChunk) dst 2531 lib/zstd/compress.c : ZSTD_compressBlock_internal(cctx, dst, dstCapacity, src, srcSize); dst 2539 lib/zstd/compress.c size_t ZSTD_compressContinue(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 2541 lib/zstd/compress.c return ZSTD_compressContinue_internal(cctx, dst, dstCapacity, src, srcSize, 1, 0); dst 2546 lib/zstd/compress.c size_t ZSTD_compressBlock(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 2551 lib/zstd/compress.c return ZSTD_compressContinue_internal(cctx, dst, dstCapacity, src, srcSize, 0, 0); dst 2765 lib/zstd/compress.c static size_t ZSTD_writeEpilogue(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity) dst 2767 lib/zstd/compress.c BYTE *const ostart = (BYTE *)dst; dst 2776 lib/zstd/compress.c fhSize = ZSTD_writeFrameHeader(dst, dstCapacity, cctx->params, 0, 0); dst 2806 lib/zstd/compress.c size_t ZSTD_compressEnd(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 2809 lib/zstd/compress.c size_t const cSize = ZSTD_compressContinue_internal(cctx, dst, dstCapacity, src, srcSize, 1, 1); dst 2812 lib/zstd/compress.c endResult = ZSTD_writeEpilogue(cctx, (char *)dst + cSize, dstCapacity - cSize); dst 2818 lib/zstd/compress.c static size_t ZSTD_compress_internal(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const void *dict, size_t dictSize, dst 2822 lib/zstd/compress.c return ZSTD_compressEnd(cctx, dst, dstCapacity, src, srcSize); dst 2825 lib/zstd/compress.c size_t ZSTD_compress_usingDict(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const void *dict, size_t dictSize, dst 2828 lib/zstd/compress.c return ZSTD_compress_internal(ctx, dst, dstCapacity, src, srcSize, dict, dictSize, params); dst 2831 lib/zstd/compress.c size_t ZSTD_compressCCtx(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, ZSTD_parameters params) dst 2833 lib/zstd/compress.c return ZSTD_compress_internal(ctx, dst, dstCapacity, src, srcSize, NULL, 0, params); dst 2930 lib/zstd/compress.c size_t ZSTD_compress_usingCDict(ZSTD_CCtx *cctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const ZSTD_CDict *cdict) dst 2941 lib/zstd/compress.c return ZSTD_compressEnd(cctx, dst, dstCapacity, src, srcSize); dst 3122 lib/zstd/compress.c ZSTD_STATIC size_t ZSTD_limitCopy(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 3125 lib/zstd/compress.c memcpy(dst, src, length); dst 3129 lib/zstd/compress.c static size_t ZSTD_compressStream_generic(ZSTD_CStream *zcs, void *dst, size_t *dstCapacityPtr, const void *src, size_t *srcSizePtr, ZSTD_flush_e const flush) dst 3135 lib/zstd/compress.c char *const ostart = (char *)dst; dst 3228 lib/zstd/compress.c ZSTD_compressStream_generic(zcs, (char *)(output->dst) + output->pos, &sizeWritten, (const char *)(input->src) + input->pos, &sizeRead, zsf_gather); dst 3242 lib/zstd/compress.c size_t const result = ZSTD_compressStream_generic(zcs, (char *)(output->dst) + output->pos, &sizeWritten, &srcSize, dst 3253 lib/zstd/compress.c BYTE *const ostart = (BYTE *)(output->dst) + output->pos; dst 3254 lib/zstd/compress.c BYTE *const oend = (BYTE *)(output->dst) + output->size; dst 52 lib/zstd/decompress.c static void ZSTD_copy4(void *dst, const void *src) { memcpy(dst, src, 4); } dst 412 lib/zstd/decompress.c static size_t ZSTD_copyRawBlock(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 416 lib/zstd/decompress.c memcpy(dst, src, srcSize); dst 420 lib/zstd/decompress.c static size_t ZSTD_setRleBlock(void *dst, size_t dstCapacity, const void *src, size_t srcSize, size_t regenSize) dst 426 lib/zstd/decompress.c memset(dst, *(const BYTE *)src, regenSize); dst 1093 lib/zstd/decompress.c static size_t ZSTD_decompressSequences(ZSTD_DCtx *dctx, void *dst, size_t maxDstSize, const void *seqStart, size_t seqSize) dst 1097 lib/zstd/decompress.c BYTE *const ostart = (BYTE * const)dst; dst 1348 lib/zstd/decompress.c static size_t ZSTD_decompressSequencesLong(ZSTD_DCtx *dctx, void *dst, size_t maxDstSize, const void *seqStart, size_t seqSize) dst 1352 lib/zstd/decompress.c BYTE *const ostart = (BYTE * const)dst; dst 1445 lib/zstd/decompress.c static size_t ZSTD_decompressBlock_internal(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 1465 lib/zstd/decompress.c return ZSTD_decompressSequencesLong(dctx, dst, dstCapacity, ip, srcSize); dst 1466 lib/zstd/decompress.c return ZSTD_decompressSequences(dctx, dst, dstCapacity, ip, srcSize); dst 1469 lib/zstd/decompress.c static void ZSTD_checkContinuity(ZSTD_DCtx *dctx, const void *dst) dst 1471 lib/zstd/decompress.c if (dst != dctx->previousDstEnd) { /* not contiguous */ dst 1473 lib/zstd/decompress.c dctx->vBase = (const char *)dst - ((const char *)(dctx->previousDstEnd) - (const char *)(dctx->base)); dst 1474 lib/zstd/decompress.c dctx->base = dst; dst 1475 lib/zstd/decompress.c dctx->previousDstEnd = dst; dst 1479 lib/zstd/decompress.c size_t ZSTD_decompressBlock(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 1482 lib/zstd/decompress.c ZSTD_checkContinuity(dctx, dst); dst 1483 lib/zstd/decompress.c dSize = ZSTD_decompressBlock_internal(dctx, dst, dstCapacity, src, srcSize); dst 1484 lib/zstd/decompress.c dctx->previousDstEnd = (char *)dst + dSize; dst 1497 lib/zstd/decompress.c size_t ZSTD_generateNxBytes(void *dst, size_t dstCapacity, BYTE byte, size_t length) dst 1501 lib/zstd/decompress.c memset(dst, byte, length); dst 1566 lib/zstd/decompress.c static size_t ZSTD_decompressFrame(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void **srcPtr, size_t *srcSizePtr) dst 1569 lib/zstd/decompress.c BYTE *const ostart = (BYTE * const)dst; dst 1643 lib/zstd/decompress.c static size_t ZSTD_decompressMultiFrame(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const void *dict, size_t dictSize, dst 1646 lib/zstd/decompress.c void *const dststart = dst; dst 1688 lib/zstd/decompress.c ZSTD_checkContinuity(dctx, dst); dst 1691 lib/zstd/decompress.c const size_t res = ZSTD_decompressFrame(dctx, dst, dstCapacity, &src, &srcSize); dst 1696 lib/zstd/decompress.c dst = (BYTE *)dst + res; dst 1704 lib/zstd/decompress.c return (BYTE *)dst - (BYTE *)dststart; dst 1707 lib/zstd/decompress.c size_t ZSTD_decompress_usingDict(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const void *dict, size_t dictSize) dst 1709 lib/zstd/decompress.c return ZSTD_decompressMultiFrame(dctx, dst, dstCapacity, src, srcSize, dict, dictSize, NULL); dst 1712 lib/zstd/decompress.c size_t ZSTD_decompressDCtx(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 1714 lib/zstd/decompress.c return ZSTD_decompress_usingDict(dctx, dst, dstCapacity, src, srcSize, NULL, 0); dst 1743 lib/zstd/decompress.c size_t ZSTD_decompressContinue(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 1749 lib/zstd/decompress.c ZSTD_checkContinuity(dctx, dst); dst 1811 lib/zstd/decompress.c case bt_compressed: rSize = ZSTD_decompressBlock_internal(dctx, dst, dstCapacity, src, srcSize); break; dst 1812 lib/zstd/decompress.c case bt_raw: rSize = ZSTD_copyRawBlock(dst, dstCapacity, src, srcSize); break; dst 1813 lib/zstd/decompress.c case bt_rle: rSize = ZSTD_setRleBlock(dst, dstCapacity, src, srcSize, dctx->rleSize); break; dst 1820 lib/zstd/decompress.c xxh64_update(&dctx->xxhState, dst, rSize); dst 1833 lib/zstd/decompress.c dctx->previousDstEnd = (char *)dst + rSize; dst 2147 lib/zstd/decompress.c size_t ZSTD_decompress_usingDDict(ZSTD_DCtx *dctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const ZSTD_DDict *ddict) dst 2150 lib/zstd/decompress.c return ZSTD_decompressMultiFrame(dctx, dst, dstCapacity, src, srcSize, NULL, 0, ddict); dst 2291 lib/zstd/decompress.c ZSTD_STATIC size_t ZSTD_limitCopy(void *dst, size_t dstCapacity, const void *src, size_t srcSize) dst 2294 lib/zstd/decompress.c memcpy(dst, src, length); dst 2303 lib/zstd/decompress.c char *const ostart = (char *)(output->dst) + output->pos; dst 2304 lib/zstd/decompress.c char *const oend = (char *)(output->dst) + output->size; dst 129 lib/zstd/fse.h FSE_PUBLIC_API size_t FSE_compress_usingCTable(void *dst, size_t dstCapacity, const void *src, size_t srcSize, const FSE_CTable *ct); dst 197 lib/zstd/fse.h FSE_PUBLIC_API size_t FSE_decompress_usingDTable(void *dst, size_t dstCapacity, const void *cSrc, size_t cSrcSize, const FSE_DTable *dt); dst 284 lib/zstd/fse.h size_t FSE_decompress_wksp(void *dst, size_t dstCapacity, const void *cSrc, size_t cSrcSize, unsigned maxLog, void *workspace, size_t workspaceSize); dst 724 lib/zstd/fse_compress.c static size_t FSE_compress_usingCTable_generic(void *dst, size_t dstSize, const void *src, size_t srcSize, const FSE_CTable *ct, const unsigned fast) dst 737 lib/zstd/fse_compress.c size_t const initError = BIT_initCStream(&bitC, dst, dstSize); dst 785 lib/zstd/fse_compress.c size_t FSE_compress_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const FSE_CTable *ct) dst 790 lib/zstd/fse_compress.c return FSE_compress_usingCTable_generic(dst, dstSize, src, srcSize, ct, 1); dst 792 lib/zstd/fse_compress.c return FSE_compress_usingCTable_generic(dst, dstSize, src, srcSize, ct, 0); dst 214 lib/zstd/fse_decompress.c FORCE_INLINE size_t FSE_decompress_usingDTable_generic(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const FSE_DTable *dt, dst 217 lib/zstd/fse_decompress.c BYTE *const ostart = (BYTE *)dst; dst 282 lib/zstd/fse_decompress.c size_t FSE_decompress_usingDTable(void *dst, size_t originalSize, const void *cSrc, size_t cSrcSize, const FSE_DTable *dt) dst 290 lib/zstd/fse_decompress.c return FSE_decompress_usingDTable_generic(dst, originalSize, cSrc, cSrcSize, dt, 1); dst 291 lib/zstd/fse_decompress.c return FSE_decompress_usingDTable_generic(dst, originalSize, cSrc, cSrcSize, dt, 0); dst 294 lib/zstd/fse_decompress.c size_t FSE_decompress_wksp(void *dst, size_t dstCapacity, const void *cSrc, size_t cSrcSize, unsigned maxLog, void *workspace, size_t workspaceSize) dst 331 lib/zstd/fse_decompress.c return FSE_decompress_usingDTable(dst, dstCapacity, ip, cSrcSize, dt); /* always return, even if it is an error code */ dst 57 lib/zstd/huf.h size_t HUF_compress4X_wksp(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned tableLog, void *workSpace, dst 104 lib/zstd/huf.h size_t HUF_decompress4X_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize); /**< decodes RLE and uncompressed */ dst 105 lib/zstd/huf.h size_t HUF_decompress4X_hufOnly_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, dst 107 lib/zstd/huf.h size_t HUF_decompress4X2_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, dst 109 lib/zstd/huf.h size_t HUF_decompress4X4_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, dst 130 lib/zstd/huf.h size_t HUF_writeCTable_wksp(void *dst, size_t maxDstSize, const HUF_CElt *CTable, unsigned maxSymbolValue, unsigned huffLog, void *workspace, size_t workspaceSize); dst 131 lib/zstd/huf.h size_t HUF_compress4X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable); dst 144 lib/zstd/huf.h size_t HUF_compress4X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned tableLog, void *workSpace, dst 183 lib/zstd/huf.h size_t HUF_decompress4X_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable); dst 184 lib/zstd/huf.h size_t HUF_decompress4X2_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable); dst 185 lib/zstd/huf.h size_t HUF_decompress4X4_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable); dst 189 lib/zstd/huf.h size_t HUF_compress1X_wksp(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned tableLog, void *workSpace, dst 191 lib/zstd/huf.h size_t HUF_compress1X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable); dst 197 lib/zstd/huf.h size_t HUF_compress1X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned tableLog, void *workSpace, dst 201 lib/zstd/huf.h size_t HUF_decompress1X_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize); dst 202 lib/zstd/huf.h size_t HUF_decompress1X2_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, dst 204 lib/zstd/huf.h size_t HUF_decompress1X4_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, dst 207 lib/zstd/huf.h size_t HUF_decompress1X_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, dst 209 lib/zstd/huf.h size_t HUF_decompress1X2_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable); dst 210 lib/zstd/huf.h size_t HUF_decompress1X4_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable); dst 82 lib/zstd/huf_compress.c size_t HUF_compressWeights_wksp(void *dst, size_t dstSize, const void *weightTable, size_t wtSize, void *workspace, size_t workspaceSize) dst 84 lib/zstd/huf_compress.c BYTE *const ostart = (BYTE *)dst; dst 152 lib/zstd/huf_compress.c size_t HUF_writeCTable_wksp(void *dst, size_t maxDstSize, const HUF_CElt *CTable, U32 maxSymbolValue, U32 huffLog, void *workspace, size_t workspaceSize) dst 154 lib/zstd/huf_compress.c BYTE *op = (BYTE *)dst; dst 538 lib/zstd/huf_compress.c size_t HUF_compress1X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable) dst 541 lib/zstd/huf_compress.c BYTE *const ostart = (BYTE *)dst; dst 581 lib/zstd/huf_compress.c size_t HUF_compress4X_usingCTable(void *dst, size_t dstSize, const void *src, size_t srcSize, const HUF_CElt *CTable) dst 586 lib/zstd/huf_compress.c BYTE *const ostart = (BYTE *)dst; dst 653 lib/zstd/huf_compress.c static size_t HUF_compress_internal(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, dst 656 lib/zstd/huf_compress.c BYTE *const ostart = (BYTE *)dst; dst 748 lib/zstd/huf_compress.c size_t HUF_compress1X_wksp(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, dst 751 lib/zstd/huf_compress.c return HUF_compress_internal(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, 1 /* single stream */, workSpace, wkspSize, NULL, NULL, 0); dst 754 lib/zstd/huf_compress.c size_t HUF_compress1X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, dst 757 lib/zstd/huf_compress.c return HUF_compress_internal(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, 1 /* single stream */, workSpace, wkspSize, hufTable, repeat, dst 761 lib/zstd/huf_compress.c size_t HUF_compress4X_wksp(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, dst 764 lib/zstd/huf_compress.c return HUF_compress_internal(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, 0 /* 4 streams */, workSpace, wkspSize, NULL, NULL, 0); dst 767 lib/zstd/huf_compress.c size_t HUF_compress4X_repeat(void *dst, size_t dstSize, const void *src, size_t srcSize, unsigned maxSymbolValue, unsigned huffLog, void *workSpace, dst 770 lib/zstd/huf_compress.c return HUF_compress_internal(dst, dstSize, src, srcSize, maxSymbolValue, huffLog, 0 /* 4 streams */, workSpace, wkspSize, hufTable, repeat, dst 199 lib/zstd/huf_decompress.c static size_t HUF_decompress1X2_usingDTable_internal(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 201 lib/zstd/huf_decompress.c BYTE *op = (BYTE *)dst; dst 224 lib/zstd/huf_decompress.c size_t HUF_decompress1X2_usingDTable(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 229 lib/zstd/huf_decompress.c return HUF_decompress1X2_usingDTable_internal(dst, dstSize, cSrc, cSrcSize, DTable); dst 232 lib/zstd/huf_decompress.c size_t HUF_decompress1X2_DCtx_wksp(HUF_DTable *DCtx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 244 lib/zstd/huf_decompress.c return HUF_decompress1X2_usingDTable_internal(dst, dstSize, ip, cSrcSize, DCtx); dst 247 lib/zstd/huf_decompress.c static size_t HUF_decompress4X2_usingDTable_internal(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 255 lib/zstd/huf_decompress.c BYTE *const ostart = (BYTE *)dst; dst 355 lib/zstd/huf_decompress.c size_t HUF_decompress4X2_usingDTable(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 360 lib/zstd/huf_decompress.c return HUF_decompress4X2_usingDTable_internal(dst, dstSize, cSrc, cSrcSize, DTable); dst 363 lib/zstd/huf_decompress.c size_t HUF_decompress4X2_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 375 lib/zstd/huf_decompress.c return HUF_decompress4X2_usingDTable_internal(dst, dstSize, ip, cSrcSize, dctx); dst 655 lib/zstd/huf_decompress.c static size_t HUF_decompress1X4_usingDTable_internal(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 668 lib/zstd/huf_decompress.c BYTE *const ostart = (BYTE *)dst; dst 684 lib/zstd/huf_decompress.c size_t HUF_decompress1X4_usingDTable(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 689 lib/zstd/huf_decompress.c return HUF_decompress1X4_usingDTable_internal(dst, dstSize, cSrc, cSrcSize, DTable); dst 692 lib/zstd/huf_decompress.c size_t HUF_decompress1X4_DCtx_wksp(HUF_DTable *DCtx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 704 lib/zstd/huf_decompress.c return HUF_decompress1X4_usingDTable_internal(dst, dstSize, ip, cSrcSize, DCtx); dst 707 lib/zstd/huf_decompress.c static size_t HUF_decompress4X4_usingDTable_internal(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 714 lib/zstd/huf_decompress.c BYTE *const ostart = (BYTE *)dst; dst 817 lib/zstd/huf_decompress.c size_t HUF_decompress4X4_usingDTable(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 822 lib/zstd/huf_decompress.c return HUF_decompress4X4_usingDTable_internal(dst, dstSize, cSrc, cSrcSize, DTable); dst 825 lib/zstd/huf_decompress.c size_t HUF_decompress4X4_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 837 lib/zstd/huf_decompress.c return HUF_decompress4X4_usingDTable_internal(dst, dstSize, ip, cSrcSize, dctx); dst 844 lib/zstd/huf_decompress.c size_t HUF_decompress1X_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 847 lib/zstd/huf_decompress.c return dtd.tableType ? HUF_decompress1X4_usingDTable_internal(dst, maxDstSize, cSrc, cSrcSize, DTable) dst 848 lib/zstd/huf_decompress.c : HUF_decompress1X2_usingDTable_internal(dst, maxDstSize, cSrc, cSrcSize, DTable); dst 851 lib/zstd/huf_decompress.c size_t HUF_decompress4X_usingDTable(void *dst, size_t maxDstSize, const void *cSrc, size_t cSrcSize, const HUF_DTable *DTable) dst 854 lib/zstd/huf_decompress.c return dtd.tableType ? HUF_decompress4X4_usingDTable_internal(dst, maxDstSize, cSrc, cSrcSize, DTable) dst 855 lib/zstd/huf_decompress.c : HUF_decompress4X2_usingDTable_internal(dst, maxDstSize, cSrc, cSrcSize, DTable); dst 899 lib/zstd/huf_decompress.c typedef size_t (*decompressionAlgo)(void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize); dst 901 lib/zstd/huf_decompress.c size_t HUF_decompress4X_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 909 lib/zstd/huf_decompress.c memcpy(dst, cSrc, dstSize); dst 913 lib/zstd/huf_decompress.c memset(dst, *(const BYTE *)cSrc, dstSize); dst 919 lib/zstd/huf_decompress.c return algoNb ? HUF_decompress4X4_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize) dst 920 lib/zstd/huf_decompress.c : HUF_decompress4X2_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize); dst 924 lib/zstd/huf_decompress.c size_t HUF_decompress4X_hufOnly_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 934 lib/zstd/huf_decompress.c return algoNb ? HUF_decompress4X4_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize) dst 935 lib/zstd/huf_decompress.c : HUF_decompress4X2_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize); dst 939 lib/zstd/huf_decompress.c size_t HUF_decompress1X_DCtx_wksp(HUF_DTable *dctx, void *dst, size_t dstSize, const void *cSrc, size_t cSrcSize, void *workspace, size_t workspaceSize) dst 947 lib/zstd/huf_decompress.c memcpy(dst, cSrc, dstSize); dst 951 lib/zstd/huf_decompress.c memset(dst, *(const BYTE *)cSrc, dstSize); dst 957 lib/zstd/huf_decompress.c return algoNb ? HUF_decompress1X4_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize) dst 958 lib/zstd/huf_decompress.c : HUF_decompress1X2_DCtx_wksp(dctx, dst, dstSize, cSrc, cSrcSize, workspace, workspaceSize); dst 129 lib/zstd/zstd_internal.h ZSTD_STATIC void ZSTD_copy8(void *dst, const void *src) { dst 130 lib/zstd/zstd_internal.h memcpy(dst, src, 8); dst 135 lib/zstd/zstd_internal.h ZSTD_STATIC void ZSTD_wildcopy(void *dst, const void *src, ptrdiff_t length) dst 138 lib/zstd/zstd_internal.h BYTE* op = (BYTE*)dst; dst 146 lib/zstd/zstd_internal.h return ZSTD_copy8(dst, src); dst 3419 mm/hugetlb.c int copy_hugetlb_page_range(struct mm_struct *dst, struct mm_struct *src, dst 3445 mm/hugetlb.c dst_pte = huge_pte_alloc(dst, addr, sz); dst 3464 mm/hugetlb.c dst_ptl = huge_pte_lock(h, dst, dst_pte); dst 3490 mm/hugetlb.c set_huge_swap_pte_at(dst, addr, dst_pte, entry, sz); dst 3506 mm/hugetlb.c set_huge_pte_at(dst, addr, dst_pte, entry); dst 3507 mm/hugetlb.c hugetlb_count_add(pages_per_huge_page(h), dst); dst 546 mm/list_lru.c struct list_lru_one *src, *dst; dst 556 mm/list_lru.c dst = list_lru_from_memcg_idx(nlru, dst_idx); dst 558 mm/list_lru.c list_splice_init(&src->list, &dst->list); dst 559 mm/list_lru.c set = (!dst->nr_items && src->nr_items); dst 560 mm/list_lru.c dst->nr_items += src->nr_items; dst 10 mm/maccess.c probe_read_common(void *dst, const void __user *src, size_t size) dst 15 mm/maccess.c ret = __copy_from_user_inatomic(dst, src, size); dst 22 mm/maccess.c probe_write_common(void __user *dst, const void *src, size_t size) dst 27 mm/maccess.c ret = __copy_to_user_inatomic(dst, src, size); dst 48 mm/maccess.c long __weak probe_kernel_read(void *dst, const void *src, size_t size) dst 51 mm/maccess.c long __probe_kernel_read(void *dst, const void *src, size_t size) dst 57 mm/maccess.c ret = probe_read_common(dst, (__force const void __user *)src, size); dst 74 mm/maccess.c long __weak probe_user_read(void *dst, const void __user *src, size_t size) dst 77 mm/maccess.c long __probe_user_read(void *dst, const void __user *src, size_t size) dst 84 mm/maccess.c ret = probe_read_common(dst, src, size); dst 101 mm/maccess.c long __weak probe_kernel_write(void *dst, const void *src, size_t size) dst 104 mm/maccess.c long __probe_kernel_write(void *dst, const void *src, size_t size) dst 110 mm/maccess.c ret = probe_write_common((__force void __user *)dst, src, size); dst 127 mm/maccess.c long __weak probe_user_write(void __user *dst, const void *src, size_t size) dst 130 mm/maccess.c long __probe_user_write(void __user *dst, const void *src, size_t size) dst 136 mm/maccess.c if (access_ok(dst, size)) dst 137 mm/maccess.c ret = probe_write_common(dst, src, size); dst 161 mm/maccess.c long strncpy_from_unsafe(char *dst, const void *unsafe_addr, long count) dst 174 mm/maccess.c ret = __get_user(*dst++, (const char __user __force *)src++); dst 175 mm/maccess.c } while (dst[-1] && ret == 0 && src - unsafe_addr < count); dst 177 mm/maccess.c dst[-1] = '\0'; dst 202 mm/maccess.c long strncpy_from_unsafe_user(char *dst, const void __user *unsafe_addr, dst 213 mm/maccess.c ret = strncpy_from_user(dst, unsafe_addr, count); dst 219 mm/maccess.c dst[ret - 1] = '\0'; dst 2148 mm/memory.c static inline void cow_user_page(struct page *dst, struct page *src, unsigned long va, struct vm_area_struct *vma) dst 2159 mm/memory.c void *kaddr = kmap_atomic(dst); dst 2171 mm/memory.c flush_dcache_page(dst); dst 2173 mm/memory.c copy_user_highpage(dst, src, va, vma); dst 4560 mm/memory.c static void copy_user_gigantic_page(struct page *dst, struct page *src, dst 4566 mm/memory.c struct page *dst_base = dst; dst 4571 mm/memory.c copy_user_highpage(dst, src, addr + i*PAGE_SIZE, vma); dst 4574 mm/memory.c dst = mem_map_next(dst, dst_base, i); dst 4580 mm/memory.c struct page *dst; dst 4589 mm/memory.c copy_user_highpage(copy_arg->dst + idx, copy_arg->src + idx, dst 4593 mm/memory.c void copy_user_huge_page(struct page *dst, struct page *src, dst 4600 mm/memory.c .dst = dst, dst 4606 mm/memory.c copy_user_gigantic_page(dst, src, addr, vma, dst 2200 mm/mempolicy.c int vma_dup_policy(struct vm_area_struct *src, struct vm_area_struct *dst) dst 2206 mm/mempolicy.c dst->vm_policy = pol; dst 548 mm/migrate.c static void __copy_gigantic_page(struct page *dst, struct page *src, dst 552 mm/migrate.c struct page *dst_base = dst; dst 557 mm/migrate.c copy_highpage(dst, src); dst 560 mm/migrate.c dst = mem_map_next(dst, dst_base, i); dst 565 mm/migrate.c static void copy_huge_page(struct page *dst, struct page *src) dst 576 mm/migrate.c __copy_gigantic_page(dst, src, nr_pages); dst 587 mm/migrate.c copy_highpage(dst + i, src + i); dst 2165 mm/migrate.c migrate->dst[migrate->npages] = 0; dst 2181 mm/migrate.c migrate->dst[migrate->npages] = 0; dst 2339 mm/migrate.c migrate->dst[migrate->npages] = 0; dst 2679 mm/migrate.c if (!args->src || !args->dst) dst 2707 mm/migrate.c unsigned long *dst) dst 2851 mm/migrate.c struct page *newpage = migrate_pfn_to_page(migrate->dst[i]); dst 2877 mm/migrate.c &migrate->dst[i]); dst 2935 mm/migrate.c struct page *newpage = migrate_pfn_to_page(migrate->dst[i]); dst 262 mm/rmap.c int anon_vma_clone(struct vm_area_struct *dst, struct vm_area_struct *src) dst 280 mm/rmap.c anon_vma_chain_link(dst, avc, anon_vma); dst 290 mm/rmap.c if (!dst->anon_vma && anon_vma != src->anon_vma && dst 292 mm/rmap.c dst->anon_vma = anon_vma; dst 294 mm/rmap.c if (dst->anon_vma) dst 295 mm/rmap.c dst->anon_vma->degree++; dst 306 mm/rmap.c dst->anon_vma = NULL; dst 307 mm/rmap.c unlink_anon_vmas(dst); dst 1684 mm/vmscan.c struct lruvec *lruvec, struct list_head *dst, dst 1731 mm/vmscan.c list_move(&page->lru, dst); dst 1585 mm/zsmalloc.c static void zs_object_copy(struct size_class *class, unsigned long dst, dst 1598 mm/zsmalloc.c obj_to_location(dst, &d_page, &d_objidx); dst 377 mm/zswap.c u8 *dst; dst 379 mm/zswap.c dst = kmalloc_node(PAGE_SIZE * 2, GFP_KERNEL, cpu_to_node(cpu)); dst 380 mm/zswap.c if (!dst) dst 383 mm/zswap.c per_cpu(zswap_dstmem, cpu) = dst; dst 389 mm/zswap.c u8 *dst; dst 391 mm/zswap.c dst = per_cpu(zswap_dstmem, cpu); dst 392 mm/zswap.c kfree(dst); dst 849 mm/zswap.c u8 *src, *dst; dst 890 mm/zswap.c dst = kmap_atomic(page); dst 893 mm/zswap.c dst, &dlen); dst 895 mm/zswap.c kunmap_atomic(dst); dst 997 mm/zswap.c u8 *src, *dst; dst 1060 mm/zswap.c dst = get_cpu_var(zswap_dstmem); dst 1063 mm/zswap.c ret = crypto_comp_compress(tfm, src, PAGE_SIZE, dst, &dlen); dst 1087 mm/zswap.c memcpy(buf + hlen, dst, dlen); dst 1135 mm/zswap.c u8 *src, *dst; dst 1150 mm/zswap.c dst = kmap_atomic(page); dst 1151 mm/zswap.c zswap_fill_page(dst, entry->value); dst 1152 mm/zswap.c kunmap_atomic(dst); dst 1161 mm/zswap.c dst = kmap_atomic(page); dst 1163 mm/zswap.c ret = crypto_comp_decompress(tfm, src, entry->length, dst, &dlen); dst 1165 mm/zswap.c kunmap_atomic(dst); dst 332 net/atm/clip.c struct dst_entry *dst = skb_dst(skb); dst 342 net/atm/clip.c if (!dst) { dst 348 net/atm/clip.c rt = (struct rtable *) dst; dst 353 net/atm/clip.c n = dst_neigh_lookup(dst, daddr); dst 469 net/atm/clip.c neigh = __neigh_lookup(&arp_tbl, &ip, rt->dst.dev, 1); dst 212 net/atm/lec.c unsigned char *dst; dst 281 net/atm/lec.c dst = lec_h->h_dest; dst 283 net/atm/lec.c vcc = lec_arp_resolve(priv, dst, is_rdesc, &entry); dst 617 net/atm/lec.c unsigned char *src, *dst; dst 630 net/atm/lec.c dst = ((struct lecdatahdr_8023 *)skb->data)->h_dest; dst 647 net/atm/lec.c if (!(dst[0] & 0x01) && /* Never filter Multi/Broadcast */ dst 649 net/atm/lec.c memcmp(dst, dev->dev_addr, dev->addr_len)) { dst 105 net/ax25/ax25_ip.c ax25_address *src, *dst; dst 111 net/ax25/ax25_ip.c dst = (ax25_address *)(bp + 1); dst 115 net/ax25/ax25_ip.c route = ax25_get_route(dst, NULL); dst 196 net/ax25/ax25_ip.c if ((ourskb = ax25_rt_build_path(skb, src, dst, route->digipeat)) == NULL) { dst 233 net/batman-adv/icmp_socket.c orig_node = batadv_orig_hash_find(bat_priv, icmp_header->dst); dst 561 net/batman-adv/multicast.c static void batadv_mcast_mla_br_addr_cpy(char *dst, const struct br_ip *src) dst 564 net/batman-adv/multicast.c ip_eth_mc_map(src->u.ip4, dst); dst 567 net/batman-adv/multicast.c ipv6_eth_mc_map(&src->u.ip6, dst); dst 570 net/batman-adv/multicast.c eth_zero_addr(dst); dst 649 net/batman-adv/netlink.c int batadv_netlink_tpmeter_notify(struct batadv_priv *bat_priv, const u8 *dst, dst 681 net/batman-adv/netlink.c if (nla_put(msg, BATADV_ATTR_ORIG_ADDRESS, ETH_ALEN, dst)) dst 716 net/batman-adv/netlink.c u8 *dst; dst 725 net/batman-adv/netlink.c dst = nla_data(info->attrs[BATADV_ATTR_ORIG_ADDRESS]); dst 743 net/batman-adv/netlink.c batadv_tp_start(bat_priv, dst, test_length, &cookie); dst 769 net/batman-adv/netlink.c u8 *dst; dst 775 net/batman-adv/netlink.c dst = nla_data(info->attrs[BATADV_ATTR_ORIG_ADDRESS]); dst 777 net/batman-adv/netlink.c batadv_tp_stop(bat_priv, dst, BATADV_TP_REASON_CANCEL); dst 20 net/batman-adv/netlink.h int batadv_netlink_tpmeter_notify(struct batadv_priv *bat_priv, const u8 *dst, dst 481 net/batman-adv/network-coding.c const char *dst) dst 484 net/batman-adv/network-coding.c memcpy(key->next_hop, dst, sizeof(key->next_hop)); dst 955 net/batman-adv/network-coding.c u8 *dst) dst 960 net/batman-adv/network-coding.c batadv_nc_hash_key_gen(&nc_path_key, src, dst); dst 982 net/batman-adv/network-coding.c ether_addr_copy(nc_path->next_hop, dst); dst 1025 net/batman-adv/network-coding.c static void batadv_nc_memxor(char *dst, const char *src, unsigned int len) dst 1030 net/batman-adv/network-coding.c dst[i] ^= src[i]; dst 1243 net/batman-adv/network-coding.c static bool batadv_nc_skb_coding_possible(struct sk_buff *skb, u8 *dst, u8 *src) dst 1245 net/batman-adv/network-coding.c if (BATADV_SKB_CB(skb)->decoded && !batadv_compare_eth(dst, src)) dst 256 net/batman-adv/routing.c ether_addr_copy(icmph->dst, icmph->orig); dst 305 net/batman-adv/routing.c icmp_packet->orig, icmp_packet->dst); dst 324 net/batman-adv/routing.c ether_addr_copy(icmp_packet->dst, icmp_packet->orig); dst 408 net/batman-adv/routing.c if (batadv_is_my_mac(bat_priv, icmph->dst)) dst 416 net/batman-adv/routing.c orig_node = batadv_orig_hash_find(bat_priv, icmph->dst); dst 1095 net/batman-adv/routing.c unicast_tvlv_packet->dst, dst 412 net/batman-adv/send.c u8 *src, *dst; dst 416 net/batman-adv/send.c dst = ethhdr->h_dest; dst 421 net/batman-adv/send.c dst = dst_hint; dst 423 net/batman-adv/send.c orig_node = batadv_transtable_search(bat_priv, src, dst, vid); dst 214 net/batman-adv/tp_meter.c const u8 *dst, struct batadv_priv *bat_priv, dst 232 net/batman-adv/tp_meter.c batadv_netlink_tpmeter_notify(bat_priv, dst, result, test_time, dst 244 net/batman-adv/tp_meter.c const u8 *dst, dst 248 net/batman-adv/tp_meter.c batadv_tp_batctl_notify(reason, dst, bat_priv, 0, 0, cookie); dst 262 net/batman-adv/tp_meter.c const u8 *dst) dst 268 net/batman-adv/tp_meter.c if (!batadv_compare_eth(pos->other_end, dst)) dst 300 net/batman-adv/tp_meter.c batadv_tp_list_find_session(struct batadv_priv *bat_priv, const u8 *dst, dst 307 net/batman-adv/tp_meter.c if (!batadv_compare_eth(pos->other_end, dst)) dst 592 net/batman-adv/tp_meter.c ether_addr_copy(icmp->dst, orig_node->orig); dst 936 net/batman-adv/tp_meter.c void batadv_tp_start(struct batadv_priv *bat_priv, const u8 *dst, dst 951 net/batman-adv/tp_meter.c tp_vars = batadv_tp_list_find(bat_priv, dst); dst 958 net/batman-adv/tp_meter.c dst, bat_priv, session_cookie); dst 966 net/batman-adv/tp_meter.c batadv_tp_batctl_error_notify(BATADV_TP_REASON_TOO_MANY, dst, dst 978 net/batman-adv/tp_meter.c dst, bat_priv, session_cookie); dst 983 net/batman-adv/tp_meter.c ether_addr_copy(tp_vars->other_end, dst); dst 1040 net/batman-adv/tp_meter.c dst, test_length); dst 1060 net/batman-adv/tp_meter.c void batadv_tp_stop(struct batadv_priv *bat_priv, const u8 *dst, dst 1067 net/batman-adv/tp_meter.c "Meter: stopping test towards %pM\n", dst); dst 1069 net/batman-adv/tp_meter.c orig_node = batadv_orig_hash_find(bat_priv, dst); dst 1155 net/batman-adv/tp_meter.c static int batadv_tp_send_ack(struct batadv_priv *bat_priv, const u8 *dst, dst 1165 net/batman-adv/tp_meter.c orig_node = batadv_orig_hash_find(bat_priv, dst); dst 1189 net/batman-adv/tp_meter.c ether_addr_copy(icmp->dst, orig_node->orig); dst 16 net/batman-adv/tp_meter.h void batadv_tp_start(struct batadv_priv *bat_priv, const u8 *dst, dst 18 net/batman-adv/tp_meter.h void batadv_tp_stop(struct batadv_priv *bat_priv, const u8 *dst, dst 3972 net/batman-adv/translation-table.c bool batadv_is_ap_isolated(struct batadv_priv *bat_priv, u8 *src, u8 *dst, dst 3987 net/batman-adv/translation-table.c tt_local_entry = batadv_tt_local_hash_find(bat_priv, dst, vid); dst 4268 net/batman-adv/translation-table.c u8 *src, u8 *dst, dst 4299 net/batman-adv/translation-table.c ret = batadv_send_tt_response(bat_priv, tt_data, src, dst); dst 4308 net/batman-adv/translation-table.c dst, tt_flag); dst 4316 net/batman-adv/translation-table.c if (batadv_is_my_mac(bat_priv, dst)) { dst 4328 net/batman-adv/translation-table.c "Routing TT_RESPONSE to %pM [%c]\n", dst, tt_flag); dst 4350 net/batman-adv/translation-table.c u8 *src, u8 *dst, dst 4361 net/batman-adv/translation-table.c if (!batadv_is_my_mac(bat_priv, dst)) dst 43 net/batman-adv/translation-table.h bool batadv_is_ap_isolated(struct batadv_priv *bat_priv, u8 *src, u8 *dst, dst 363 net/batman-adv/tvlv.c u8 *src, u8 *dst, dst 384 net/batman-adv/tvlv.c if (!dst) dst 391 net/batman-adv/tvlv.c dst, tvlv_value, dst 415 net/batman-adv/tvlv.c u8 *src, u8 *dst, dst 439 net/batman-adv/tvlv.c src, dst, tvlv_value, dst 513 net/batman-adv/tvlv.c u8 *src, u8 *dst, dst 586 net/batman-adv/tvlv.c u8 *dst, u8 type, u8 version, dst 597 net/batman-adv/tvlv.c orig_node = batadv_orig_hash_find(bat_priv, dst); dst 618 net/batman-adv/tvlv.c ether_addr_copy(unicast_tvlv_packet->dst, dst); dst 34 net/batman-adv/tvlv.h u8 *src, u8 *dst, dst 43 net/batman-adv/tvlv.h u8 *src, u8 *dst, dst 46 net/batman-adv/tvlv.h u8 *dst, u8 type, u8 version, dst 2419 net/batman-adv/types.h u8 *src, u8 *dst, dst 117 net/bluetooth/6lowpan.c &peer->chan->dst, peer->chan->dst_type); dst 119 net/bluetooth/6lowpan.c if (bacmp(&peer->chan->dst, ba)) dst 198 net/bluetooth/6lowpan.c &peer->chan->dst, peer->chan->dst_type, dst 429 net/bluetooth/6lowpan.c *peer_addr = peer->chan->dst; dst 511 net/bluetooth/6lowpan.c &pentry->chan->dst, pentry->chan->dst_type, dst 666 net/bluetooth/6lowpan.c baswap((void *)peer->lladdr, &chan->dst); dst 722 net/bluetooth/6lowpan.c netdev->ifindex, &chan->dst, chan->dst_type, dst 1010 net/bluetooth/6lowpan.c BT_DBG("conn %p dst %pMR type %d", *conn, &hcon->dst, hcon->dst_type); dst 1149 net/bluetooth/6lowpan.c &conn->hcon->dst, conn->hcon->dst_type, dst 1185 net/bluetooth/6lowpan.c &peer->chan->dst, peer->chan->dst_type); dst 567 net/bluetooth/a2mp.c &mgr->l2cap_conn->hcon->dst); dst 108 net/bluetooth/amp.c bdaddr_t *dst = &mgr->l2cap_conn->hcon->dst; dst 112 net/bluetooth/amp.c hcon = hci_conn_add(hdev, AMP_LINK, dst, role); dst 116 net/bluetooth/amp.c BT_DBG("hcon %p dst %pMR", hcon, dst); dst 189 net/bluetooth/amp.c key = hci_find_link_key(hdev, &conn->dst); dst 191 net/bluetooth/amp.c BT_DBG("No Link key for conn %p dst %pMR", conn, &conn->dst); dst 534 net/bluetooth/amp.c &chan->conn->hcon->dst); dst 118 net/bluetooth/bnep/bnep.h __u8 dst[ETH_ALEN]; dst 125 net/bluetooth/bnep/bnep.h __u8 dst[ETH_ALEN]; dst 48 net/bluetooth/bnep/core.c static struct bnep_session *__bnep_get_session(u8 *dst) dst 55 net/bluetooth/bnep/core.c if (ether_addr_equal(dst, s->eh.h_source)) dst 561 net/bluetooth/bnep/core.c u8 dst[ETH_ALEN], src[ETH_ALEN]; dst 572 net/bluetooth/bnep/core.c baswap((void *) dst, &l2cap_pi(sock->sk)->chan->dst); dst 585 net/bluetooth/bnep/core.c ss = __bnep_get_session(dst); dst 596 net/bluetooth/bnep/core.c memcpy(s->eh.h_source, &dst, ETH_ALEN); dst 663 net/bluetooth/bnep/core.c s = __bnep_get_session(req->dst); dst 679 net/bluetooth/bnep/core.c memcpy(ci->dst, s->eh.h_source, ETH_ALEN); dst 721 net/bluetooth/bnep/core.c s = __bnep_get_session(ci->dst); dst 350 net/bluetooth/cmtp/core.c s = __cmtp_get_session(&l2cap_pi(sock->sk)->chan->dst); dst 356 net/bluetooth/cmtp/core.c bacpy(&session->bdaddr, &l2cap_pi(sock->sk)->chan->dst); dst 66 net/bluetooth/ecdh_helper.c struct scatterlist src, dst; dst 85 net/bluetooth/ecdh_helper.c sg_init_one(&dst, secret, 32); dst 87 net/bluetooth/ecdh_helper.c kpp_request_set_output(req, &dst, 32); dst 173 net/bluetooth/ecdh_helper.c struct scatterlist dst; dst 187 net/bluetooth/ecdh_helper.c sg_init_one(&dst, tmp, 64); dst 189 net/bluetooth/ecdh_helper.c kpp_request_set_output(req, &dst, 64); dst 71 net/bluetooth/hci_conn.c bdaddr = &conn->dst; dst 119 net/bluetooth/hci_conn.c hci_conn_params_del(conn->hdev, &conn->dst, conn->dst_type); dst 205 net/bluetooth/hci_conn.c bacpy(&cp.bdaddr, &conn->dst); dst 208 net/bluetooth/hci_conn.c ie = hci_inquiry_cache_lookup(hdev, &conn->dst); dst 330 net/bluetooth/hci_conn.c params = hci_conn_params_lookup(hdev, &conn->dst, conn->dst_type); dst 466 net/bluetooth/hci_conn.c hci_send_cmd(conn->hdev, HCI_OP_USER_CONFIRM_REPLY, sizeof(conn->dst), dst 467 net/bluetooth/hci_conn.c &conn->dst); dst 494 net/bluetooth/hci_conn.c struct hci_conn *hci_conn_add(struct hci_dev *hdev, int type, bdaddr_t *dst, dst 499 net/bluetooth/hci_conn.c BT_DBG("%s dst %pMR", hdev->name, dst); dst 505 net/bluetooth/hci_conn.c bacpy(&conn->dst, dst); dst 619 net/bluetooth/hci_conn.c struct hci_dev *hci_get_route(bdaddr_t *dst, bdaddr_t *src, uint8_t src_type) dst 624 net/bluetooth/hci_conn.c BT_DBG("%pMR -> %pMR", src, dst); dst 666 net/bluetooth/hci_conn.c if (bacmp(&d->bdaddr, dst)) { dst 686 net/bluetooth/hci_conn.c params = hci_pend_le_action_lookup(&hdev->pend_le_conns, &conn->dst, dst 704 net/bluetooth/hci_conn.c mgmt_connect_failed(hdev, &conn->dst, conn->type, dst 811 net/bluetooth/hci_conn.c bacpy(&cp->peer_addr, &conn->dst); dst 853 net/bluetooth/hci_conn.c bacpy(&cp.peer_addr, &conn->dst); dst 899 net/bluetooth/hci_conn.c bacpy(&cp.peer_addr, &conn->dst); dst 948 net/bluetooth/hci_conn.c bacpy(&cp.direct_addr, &conn->dst); dst 961 net/bluetooth/hci_conn.c struct hci_conn *hci_connect_le(struct hci_dev *hdev, bdaddr_t *dst, dst 990 net/bluetooth/hci_conn.c conn = hci_conn_hash_lookup_le(hdev, dst, dst_type); dst 1004 net/bluetooth/hci_conn.c irk = hci_find_irk_by_addr(hdev, dst, dst_type); dst 1006 net/bluetooth/hci_conn.c dst = &irk->rpa; dst 1011 net/bluetooth/hci_conn.c bacpy(&conn->dst, dst); dst 1013 net/bluetooth/hci_conn.c conn = hci_conn_add(hdev, LE_LINK, dst, role); dst 1054 net/bluetooth/hci_conn.c params = hci_conn_params_lookup(hdev, &conn->dst, conn->dst_type); dst 1143 net/bluetooth/hci_conn.c struct hci_conn *hci_connect_le_scan(struct hci_dev *hdev, bdaddr_t *dst, dst 1166 net/bluetooth/hci_conn.c conn = hci_conn_hash_lookup_le(hdev, dst, dst_type); dst 1175 net/bluetooth/hci_conn.c conn = hci_conn_add(hdev, LE_LINK, dst, HCI_ROLE_MASTER); dst 1179 net/bluetooth/hci_conn.c if (hci_explicit_conn_params_set(hdev, dst, dst_type) < 0) { dst 1198 net/bluetooth/hci_conn.c struct hci_conn *hci_connect_acl(struct hci_dev *hdev, bdaddr_t *dst, dst 1210 net/bluetooth/hci_conn.c acl = hci_conn_hash_lookup_ba(hdev, ACL_LINK, dst); dst 1212 net/bluetooth/hci_conn.c acl = hci_conn_add(hdev, ACL_LINK, dst, HCI_ROLE_MASTER); dst 1229 net/bluetooth/hci_conn.c struct hci_conn *hci_connect_sco(struct hci_dev *hdev, int type, bdaddr_t *dst, dst 1235 net/bluetooth/hci_conn.c acl = hci_connect_acl(hdev, dst, BT_SECURITY_LOW, HCI_AT_NO_BONDING); dst 1239 net/bluetooth/hci_conn.c sco = hci_conn_hash_lookup_ba(hdev, type, dst); dst 1241 net/bluetooth/hci_conn.c sco = hci_conn_add(hdev, type, dst, HCI_ROLE_MASTER); dst 1453 net/bluetooth/hci_conn.c bacpy(&cp.bdaddr, &conn->dst); dst 1571 net/bluetooth/hci_conn.c bacpy(&(ci + n)->bdaddr, &c->dst); dst 1607 net/bluetooth/hci_conn.c bacpy(&ci.bdaddr, &conn->dst); dst 3890 net/bluetooth/hci_core.c &c->dst); dst 2206 net/bluetooth/hci_event.c mgmt_disconnect_failed(hdev, &conn->dst, conn->type, dst 2540 net/bluetooth/hci_event.c mgmt_connect_failed(hdev, &conn->dst, conn->type, dst 2697 net/bluetooth/hci_event.c mgmt_disconnect_failed(hdev, &conn->dst, conn->type, dst 2711 net/bluetooth/hci_event.c mgmt_device_disconnected(hdev, &conn->dst, conn->type, conn->dst_type, dst 2716 net/bluetooth/hci_event.c hci_remove_link_key(hdev, &conn->dst); dst 2721 net/bluetooth/hci_event.c params = hci_conn_params_lookup(hdev, &conn->dst, conn->dst_type); dst 3112 net/bluetooth/hci_event.c bacpy(&cp.bdaddr, &conn->dst); dst 4003 net/bluetooth/hci_event.c ie = hci_inquiry_cache_lookup(hdev, &conn->dst); dst 4131 net/bluetooth/hci_event.c ie = hci_inquiry_cache_lookup(hdev, &conn->dst); dst 4159 net/bluetooth/hci_event.c bacpy(&cp.bdaddr, &conn->dst); dst 4382 net/bluetooth/hci_event.c data = hci_find_remote_oob_data(hdev, &conn->dst, BDADDR_BREDR); dst 4610 net/bluetooth/hci_event.c mgmt_user_passkey_notify(hdev, &conn->dst, conn->type, dst 4648 net/bluetooth/hci_event.c mgmt_user_passkey_notify(hdev, &conn->dst, conn->type, dst 4803 net/bluetooth/hci_event.c bacpy(&hcon->dst, &bredr_hcon->dst); dst 4988 net/bluetooth/hci_event.c irk = hci_get_irk(hdev, &conn->dst, conn->dst_type); dst 4990 net/bluetooth/hci_event.c bacpy(&conn->dst, &irk->bdaddr); dst 5005 net/bluetooth/hci_event.c if (hci_bdaddr_list_lookup(&hdev->blacklist, &conn->dst, addr_type)) { dst 5048 net/bluetooth/hci_event.c params = hci_pend_le_action_lookup(&hdev->pend_le_conns, &conn->dst, dst 5584 net/bluetooth/hci_event.c ltk = hci_find_ltk(hdev, &conn->dst, conn->dst_type, conn->role); dst 5673 net/bluetooth/hci_event.c params = hci_conn_params_lookup(hdev, &hcon->dst, dst 5687 net/bluetooth/hci_event.c mgmt_new_conn_param(hdev, &hcon->dst, hcon->dst_type, dst 2237 net/bluetooth/hci_request.c 6, &conn->dst); dst 2244 net/bluetooth/hci_request.c bacpy(&rej.bdaddr, &conn->dst); dst 2252 net/bluetooth/hci_request.c bacpy(&rej.bdaddr, &conn->dst); dst 792 net/bluetooth/hidp/core.c &l2cap_pi(session->ctrl_sock->sk)->chan->dst); dst 1326 net/bluetooth/hidp/core.c bacmp(&ctrl_chan->dst, &intr_chan->dst)) dst 1337 net/bluetooth/hidp/core.c session = hidp_session_find(&ctrl_chan->dst); dst 1374 net/bluetooth/hidp/core.c ret = hidp_session_new(&session, &chan->dst, ctrl_sock, dst 676 net/bluetooth/l2cap_core.c bacpy(&chan->dst, &hcon->dst); dst 1783 net/bluetooth/l2cap_core.c bdaddr_t *dst, dst 1806 net/bluetooth/l2cap_core.c dst_match = !bacmp(&c->dst, dst); dst 1815 net/bluetooth/l2cap_core.c dst_any = !bacmp(&c->dst, BDADDR_ANY); dst 3852 net/bluetooth/l2cap_core.c &conn->hcon->dst, ACL_LINK); dst 3895 net/bluetooth/l2cap_core.c bacpy(&chan->dst, &conn->hcon->dst); dst 4583 net/bluetooth/l2cap_core.c &conn->hcon->dst); dst 5026 net/bluetooth/l2cap_core.c bacmp(&conn->hcon->src, &conn->hcon->dst) > 0) { dst 5320 net/bluetooth/l2cap_core.c mgmt_new_conn_param(hcon->hdev, &hcon->dst, hcon->dst_type, dst 5525 net/bluetooth/l2cap_core.c &conn->hcon->dst, LE_LINK); dst 5563 net/bluetooth/l2cap_core.c bacpy(&chan->dst, &conn->hcon->dst); dst 6987 net/bluetooth/l2cap_core.c chan = l2cap_global_chan_by_psm(0, psm, &hcon->src, &hcon->dst, dst 7001 net/bluetooth/l2cap_core.c bacpy(&bt_cb(skb)->l2cap.bdaddr, &hcon->dst); dst 7041 net/bluetooth/l2cap_core.c hci_bdaddr_list_lookup(&hcon->hdev->blacklist, &hcon->dst, dst 7161 net/bluetooth/l2cap_core.c bdaddr_t *dst, u8 dst_type) dst 7168 net/bluetooth/l2cap_core.c BT_DBG("%pMR -> %pMR (type %u) psm 0x%2.2x", &chan->src, dst, dst 7171 net/bluetooth/l2cap_core.c hdev = hci_get_route(dst, &chan->src, chan->src_type); dst 7232 net/bluetooth/l2cap_core.c bacpy(&chan->dst, dst); dst 7247 net/bluetooth/l2cap_core.c hcon = hci_connect_le(hdev, dst, dst_type, dst 7252 net/bluetooth/l2cap_core.c hcon = hci_connect_le_scan(hdev, dst, dst_type, dst 7258 net/bluetooth/l2cap_core.c hcon = hci_connect_acl(hdev, dst, chan->sec_level, auth_type); dst 7399 net/bluetooth/l2cap_core.c BT_DBG("hcon %p bdaddr %pMR status %d", hcon, &hcon->dst, status); dst 7413 net/bluetooth/l2cap_core.c if (hci_bdaddr_list_lookup(&hdev->blacklist, &hcon->dst, dst_type)) dst 7433 net/bluetooth/l2cap_core.c bacpy(&chan->dst, &hcon->dst); dst 7698 net/bluetooth/l2cap_core.c &c->src, c->src_type, &c->dst, c->dst_type, dst 380 net/bluetooth/l2cap_sock.c bacpy(&la->l2_bdaddr, &chan->dst); dst 33 net/bluetooth/lib.c void baswap(bdaddr_t *dst, const bdaddr_t *src) dst 36 net/bluetooth/lib.c unsigned char *d = (unsigned char *)dst; dst 2620 net/bluetooth/mgmt.c bacpy(&rp->addr[i].bdaddr, &c->dst); dst 2771 net/bluetooth/mgmt.c bacpy(&rp.addr.bdaddr, &conn->dst); dst 2999 net/bluetooth/mgmt.c if (bacmp(&addr->bdaddr, &conn->dst) != 0) { dst 7240 net/bluetooth/mgmt.c bacpy(&ev->addr.bdaddr, &conn->dst); dst 7527 net/bluetooth/mgmt.c bacpy(&ev.addr.bdaddr, &conn->dst); dst 68 net/bluetooth/rfcomm/core.c bdaddr_t *dst, dst 71 net/bluetooth/rfcomm/core.c static struct rfcomm_session *rfcomm_session_get(bdaddr_t *src, bdaddr_t *dst); dst 371 net/bluetooth/rfcomm/core.c static int __rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst, u8 channel) dst 378 net/bluetooth/rfcomm/core.c d, d->state, src, dst, channel); dst 386 net/bluetooth/rfcomm/core.c s = rfcomm_session_get(src, dst); dst 388 net/bluetooth/rfcomm/core.c s = rfcomm_session_create(src, dst, d->sec_level, &err); dst 425 net/bluetooth/rfcomm/core.c int rfcomm_dlc_open(struct rfcomm_dlc *d, bdaddr_t *src, bdaddr_t *dst, u8 channel) dst 431 net/bluetooth/rfcomm/core.c r = __rfcomm_dlc_open(d, src, dst, channel); dst 537 net/bluetooth/rfcomm/core.c struct rfcomm_dlc *rfcomm_dlc_exists(bdaddr_t *src, bdaddr_t *dst, u8 channel) dst 547 net/bluetooth/rfcomm/core.c s = rfcomm_session_get(src, dst); dst 694 net/bluetooth/rfcomm/core.c static struct rfcomm_session *rfcomm_session_get(bdaddr_t *src, bdaddr_t *dst) dst 702 net/bluetooth/rfcomm/core.c !bacmp(&chan->dst, dst)) dst 728 net/bluetooth/rfcomm/core.c bdaddr_t *dst, dst 737 net/bluetooth/rfcomm/core.c BT_DBG("%pMR -> %pMR", src, dst); dst 769 net/bluetooth/rfcomm/core.c bacpy(&addr.l2_bdaddr, dst); dst 785 net/bluetooth/rfcomm/core.c void rfcomm_session_getaddr(struct rfcomm_session *s, bdaddr_t *src, bdaddr_t *dst) dst 790 net/bluetooth/rfcomm/core.c if (dst) dst 791 net/bluetooth/rfcomm/core.c bacpy(dst, &chan->dst); dst 2106 net/bluetooth/rfcomm/core.c s = rfcomm_session_get(&conn->hdev->bdaddr, &conn->dst); dst 2159 net/bluetooth/rfcomm/core.c &chan->src, &chan->dst, dst 410 net/bluetooth/rfcomm/sock.c bacpy(&rfcomm_pi(sk)->dst, &sa->rc_bdaddr); dst 552 net/bluetooth/rfcomm/sock.c bacpy(&sa->rc_bdaddr, &rfcomm_pi(sk)->dst); dst 959 net/bluetooth/rfcomm/sock.c bdaddr_t src, dst; dst 964 net/bluetooth/rfcomm/sock.c rfcomm_session_getaddr(s, &src, &dst); dst 987 net/bluetooth/rfcomm/sock.c bacpy(&rfcomm_pi(sk)->dst, &dst); dst 1014 net/bluetooth/rfcomm/sock.c &rfcomm_pi(sk)->src, &rfcomm_pi(sk)->dst, dst 58 net/bluetooth/rfcomm/tty.c bdaddr_t dst; dst 116 net/bluetooth/rfcomm/tty.c err = rfcomm_dlc_open(dev->dlc, &dev->src, &dev->dst, dev->channel); dst 181 net/bluetooth/rfcomm/tty.c hdev = hci_get_route(&dev->dst, &dev->src, BDADDR_BREDR); dst 189 net/bluetooth/rfcomm/tty.c conn = hci_conn_hash_lookup_ba(hdev, ACL_LINK, &dev->dst); dst 204 net/bluetooth/rfcomm/tty.c return sprintf(buf, "%pMR\n", &dev->dst); dst 265 net/bluetooth/rfcomm/tty.c bacpy(&dev->dst, &req->dst); dst 413 net/bluetooth/rfcomm/tty.c dlc = rfcomm_dlc_exists(&req.src, &req.dst, req.channel); dst 535 net/bluetooth/rfcomm/tty.c bacpy(&(di + n)->dst, &dev->dst); dst 571 net/bluetooth/rfcomm/tty.c bacpy(&di.dst, &dev->dst); dst 752 net/bluetooth/rfcomm/tty.c BT_DBG("dev %p dst %pMR channel %d opened %d", dev, &dev->dst, dst 66 net/bluetooth/sco.c bdaddr_t dst; dst 221 net/bluetooth/sco.c BT_DBG("%pMR -> %pMR", &sco_pi(sk)->src, &sco_pi(sk)->dst); dst 223 net/bluetooth/sco.c hdev = hci_get_route(&sco_pi(sk)->dst, &sco_pi(sk)->src, BDADDR_BREDR); dst 240 net/bluetooth/sco.c hcon = hci_connect_sco(hdev, type, &sco_pi(sk)->dst, dst 574 net/bluetooth/sco.c bacpy(&sco_pi(sk)->dst, &sa->sco_bdaddr); dst 694 net/bluetooth/sco.c bacpy(&sa->sco_bdaddr, &sco_pi(sk)->dst); dst 738 net/bluetooth/sco.c bacpy(&cp.bdaddr, &conn->dst); dst 745 net/bluetooth/sco.c bacpy(&cp.bdaddr, &conn->dst); dst 1062 net/bluetooth/sco.c bacpy(&sco_pi(sk)->dst, &conn->hcon->dst); dst 1114 net/bluetooth/sco.c BT_DBG("hcon %p bdaddr %pMR status %d", hcon, &hcon->dst, status); dst 1168 net/bluetooth/sco.c &sco_pi(sk)->dst, sk->sk_state); dst 157 net/bluetooth/smp.c static inline void swap_buf(const u8 *src, u8 *dst, size_t len) dst 162 net/bluetooth/smp.c dst[len - 1 - i] = src[i]; dst 696 net/bluetooth/smp.c oob_data = hci_find_remote_oob_data(hdev, &hcon->dst, dst 932 net/bluetooth/smp.c ret = mgmt_user_passkey_request(hcon->hdev, &hcon->dst, dst 935 net/bluetooth/smp.c ret = mgmt_user_confirm_request(hcon->hdev, &hcon->dst, dst 939 net/bluetooth/smp.c ret = mgmt_user_passkey_notify(hcon->hdev, &hcon->dst, dst 1026 net/bluetooth/smp.c hci_add_ltk(hcon->hdev, &hcon->dst, hcon->dst_type, dst 1066 net/bluetooth/smp.c bacpy(&hcon->dst, &smp->remote_irk->bdaddr); dst 1074 net/bluetooth/smp.c bacpy(&smp->csrk->bdaddr, &hcon->dst); dst 1080 net/bluetooth/smp.c bacpy(&smp->slave_csrk->bdaddr, &hcon->dst); dst 1086 net/bluetooth/smp.c bacpy(&smp->ltk->bdaddr, &hcon->dst); dst 1092 net/bluetooth/smp.c bacpy(&smp->slave_ltk->bdaddr, &hcon->dst); dst 1107 net/bluetooth/smp.c key = hci_add_link_key(hdev, smp->conn->hcon, &hcon->dst, dst 1139 net/bluetooth/smp.c smp->ltk = hci_add_ltk(hcon->hdev, &hcon->dst, hcon->dst_type, dst 1202 net/bluetooth/smp.c key = hci_find_link_key(hdev, &hcon->dst); dst 1293 net/bluetooth/smp.c ltk = hci_add_ltk(hdev, &hcon->dst, hcon->dst_type, dst 2197 net/bluetooth/smp.c err = mgmt_user_confirm_request(hcon->hdev, &hcon->dst, hcon->type, dst 2212 net/bluetooth/smp.c key = hci_find_ltk(hcon->hdev, &hcon->dst, hcon->dst_type, hcon->role); dst 2245 net/bluetooth/smp.c hci_find_ltk(hcon->hdev, &hcon->dst, hcon->dst_type, hcon->role)) dst 2497 net/bluetooth/smp.c ltk = hci_add_ltk(hdev, &hcon->dst, hcon->dst_type, SMP_LTK, dst 2570 net/bluetooth/smp.c if (hci_is_identity_address(&hcon->dst, hcon->dst_type) && dst 2571 net/bluetooth/smp.c (bacmp(&info->bdaddr, &hcon->dst) || dst 2581 net/bluetooth/smp.c if (hci_bdaddr_is_rpa(&hcon->dst, hcon->dst_type)) dst 2582 net/bluetooth/smp.c bacpy(&rpa, &hcon->dst); dst 2755 net/bluetooth/smp.c if (mgmt_user_passkey_notify(hdev, &hcon->dst, hcon->type, dst 2778 net/bluetooth/smp.c if (mgmt_user_passkey_request(hdev, &hcon->dst, hcon->type, dst 2986 net/bluetooth/smp.c code, &hcon->dst); dst 198 net/bridge/br_arp_nd_proxy.c (f->dst && (f->dst->flags & (BR_PROXYARP_WIFI | dst 452 net/bridge/br_arp_nd_proxy.c if (f->dst && (f->dst->flags & BR_NEIGH_SUPPRESS)) { dst 31 net/bridge/br_device.c struct net_bridge_fdb_entry *dst; dst 99 net/bridge/br_device.c } else if ((dst = br_fdb_find_rcu(br, dest, vid)) != NULL) { dst 100 net/bridge/br_device.c br_forward(dst->dst, skb, false, true); dst 233 net/bridge/br_device.c dst_metric_set(&br->fake_rtable.dst, RTAX_MTU, new_mtu); dst 135 net/bridge/br_fdb.c if (f && f->dst) dst 136 net/bridge/br_fdb.c dev = f->dst->dev; dst 226 net/bridge/br_fdb.c f->dst = op; dst 237 net/bridge/br_fdb.c f->dst = NULL; dst 253 net/bridge/br_fdb.c if (f && f->is_local && !f->added_by_user && f->dst == p) dst 268 net/bridge/br_fdb.c if (f->dst == p && f->is_local && !f->added_by_user) { dst 309 net/bridge/br_fdb.c if (f && f->is_local && !f->dst && !f->added_by_user) dst 324 net/bridge/br_fdb.c if (f && f->is_local && !f->dst && !f->added_by_user) dst 396 net/bridge/br_fdb.c if (f->dst != p) dst 426 net/bridge/br_fdb.c ret = fdb && fdb->dst && fdb->dst->dev != dev && dst 427 net/bridge/br_fdb.c fdb->dst->state == BR_STATE_FORWARDING; dst 457 net/bridge/br_fdb.c if (!f->dst) dst 469 net/bridge/br_fdb.c fe->port_no = f->dst->port_no; dst 470 net/bridge/br_fdb.c fe->port_hi = f->dst->port_no >> 8; dst 495 net/bridge/br_fdb.c fdb->dst = source; dst 583 net/bridge/br_fdb.c if (unlikely(source != fdb->dst && !fdb->is_sticky)) { dst 584 net/bridge/br_fdb.c fdb->dst = source; dst 648 net/bridge/br_fdb.c ndm->ndm_ifindex = fdb->dst ? fdb->dst->dev->ifindex : br->dev->ifindex; dst 742 net/bridge/br_fdb.c if (filter_dev && (!f->dst || f->dst->dev != filter_dev)) { dst 750 net/bridge/br_fdb.c if (f->dst) dst 753 net/bridge/br_fdb.c if (!filter_dev && f->dst) dst 835 net/bridge/br_fdb.c if (fdb->dst != source) { dst 836 net/bridge/br_fdb.c fdb->dst = source; dst 986 net/bridge/br_fdb.c if (!fdb || fdb->dst != p) dst 1134 net/bridge/br_fdb.c if (fdb->dst != p) { dst 1135 net/bridge/br_fdb.c fdb->dst = p; dst 1208 net/bridge/br_fdb.c if (f->dst == p && f->key.vlan_id == vid) dst 74 net/bridge/br_input.c struct net_bridge_fdb_entry *dst = NULL; dst 146 net/bridge/br_input.c dst = br_fdb_find_rcu(br, eth_hdr(skb)->h_dest, vid); dst 151 net/bridge/br_input.c if (dst) { dst 154 net/bridge/br_input.c if (dst->is_local) dst 157 net/bridge/br_input.c if (now != dst->used) dst 158 net/bridge/br_input.c dst->used = now; dst 159 net/bridge/br_input.c br_forward(dst->dst, skb, local_rcv, false); dst 63 net/bridge/br_multicast.c struct br_ip *dst) dst 65 net/bridge/br_multicast.c return rhashtable_lookup(&br->mdb_hash_tbl, dst, br_mdb_rht_params); dst 69 net/bridge/br_multicast.c struct br_ip *dst) dst 76 net/bridge/br_multicast.c ent = rhashtable_lookup(&br->mdb_hash_tbl, dst, br_mdb_rht_params); dst 83 net/bridge/br_multicast.c __be32 dst, __u16 vid) dst 88 net/bridge/br_multicast.c br_dst.u.ip4 = dst; dst 97 net/bridge/br_multicast.c const struct in6_addr *dst, dst 103 net/bridge/br_multicast.c br_dst.u.ip6 = *dst; dst 2416 net/bridge/br_multicast.c static void mcast_stats_add_dir(u64 *dst, u64 *src) dst 2418 net/bridge/br_multicast.c dst[BR_MCAST_DIR_RX] += src[BR_MCAST_DIR_RX]; dst 2419 net/bridge/br_multicast.c dst[BR_MCAST_DIR_TX] += src[BR_MCAST_DIR_TX]; dst 269 net/bridge/br_netfilter_hooks.c struct dst_entry *dst; dst 274 net/bridge/br_netfilter_hooks.c dst = skb_dst(skb); dst 275 net/bridge/br_netfilter_hooks.c neigh = dst_neigh_lookup_skb(dst, skb); dst 386 net/bridge/br_netfilter_hooks.c if (rt->dst.dev == dev) { dst 387 net/bridge/br_netfilter_hooks.c skb_dst_set(skb, &rt->dst); dst 416 net/bridge/br_netfilter_hooks.c skb_dst_set_noref(skb, &rt->dst); dst 200 net/bridge/br_netfilter_ipv6.c skb_dst_set_noref(skb, &rt->dst); dst 24 net/bridge/br_nf_core.c static void fake_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 30 net/bridge/br_nf_core.c static void fake_redirect(struct dst_entry *dst, struct sock *sk, dst 35 net/bridge/br_nf_core.c static u32 *fake_cow_metrics(struct dst_entry *dst, unsigned long old) dst 40 net/bridge/br_nf_core.c static struct neighbour *fake_neigh_lookup(const struct dst_entry *dst, dst 47 net/bridge/br_nf_core.c static unsigned int fake_mtu(const struct dst_entry *dst) dst 49 net/bridge/br_nf_core.c return dst->dev->mtu; dst 76 net/bridge/br_nf_core.c atomic_set(&rt->dst.__refcnt, 1); dst 77 net/bridge/br_nf_core.c rt->dst.dev = br->dev; dst 78 net/bridge/br_nf_core.c dst_init_metrics(&rt->dst, br_dst_default_metrics, true); dst 79 net/bridge/br_nf_core.c rt->dst.flags = DST_NOXFRM | DST_FAKE_RTABLE; dst 80 net/bridge/br_nf_core.c rt->dst.ops = &fake_dst_ops; dst 182 net/bridge/br_private.h struct net_bridge_port *dst; dst 683 net/bridge/br_private.h br_mdb_ip_get(struct net_bridge *br, struct br_ip *dst); dst 124 net/bridge/br_switchdev.c if (!fdb->dst) dst 131 net/bridge/br_switchdev.c fdb->dst->dev, dst 138 net/bridge/br_switchdev.c fdb->dst->dev, dst 52 net/bridge/br_vlan_tunnel.c dst_release(&vlan->tinfo.tunnel_dst->dst); dst 82 net/bridge/br_vlan_tunnel.c dst_release(&vlan->tinfo.tunnel_dst->dst); dst 197 net/bridge/br_vlan_tunnel.c skb_dst_set(skb, dst_clone(&vlan->tinfo.tunnel_dst->dst)); dst 26 net/bridge/netfilter/ebt_ip.c __be16 dst; dst 75 net/bridge/netfilter/ebt_ip.c u32 dst = ntohs(pptr->tcpudphdr.dst); dst 77 net/bridge/netfilter/ebt_ip.c dst < info->dport[0] || dst 78 net/bridge/netfilter/ebt_ip.c dst > info->dport[1])) dst 29 net/bridge/netfilter/ebt_ip6.c __be16 dst; dst 82 net/bridge/netfilter/ebt_ip6.c u16 dst = ntohs(pptr->tcpudphdr.dst); dst 84 net/bridge/netfilter/ebt_ip6.c dst < info->dport[0] || dst 85 net/bridge/netfilter/ebt_ip6.c dst > info->dport[1])) dst 42 net/bridge/netfilter/ebt_log.c __be16 dst; dst 69 net/bridge/netfilter/ebt_log.c pr_cont(" SPT=%u DPT=%u", ntohs(pptr->src), ntohs(pptr->dst)); dst 62 net/bridge/netfilter/ebt_mark.c static void mark_tg_compat_from_user(void *dst, const void *src) dst 65 net/bridge/netfilter/ebt_mark.c struct ebt_mark_t_info *kern = dst; dst 71 net/bridge/netfilter/ebt_mark.c static int mark_tg_compat_to_user(void __user *dst, const void *src) dst 73 net/bridge/netfilter/ebt_mark.c struct compat_ebt_mark_t_info __user *user = dst; dst 46 net/bridge/netfilter/ebt_mark_m.c static void mark_mt_compat_from_user(void *dst, const void *src) dst 49 net/bridge/netfilter/ebt_mark_m.c struct ebt_mark_m_info *kern = dst; dst 57 net/bridge/netfilter/ebt_mark_m.c static int mark_mt_compat_to_user(void __user *dst, const void *src) dst 59 net/bridge/netfilter/ebt_mark_m.c struct compat_ebt_mark_m_info __user *user = dst; dst 47 net/bridge/netfilter/ebtables.c static void ebt_standard_compat_from_user(void *dst, const void *src) dst 53 net/bridge/netfilter/ebtables.c memcpy(dst, &v, sizeof(v)); dst 56 net/bridge/netfilter/ebtables.c static int ebt_standard_compat_to_user(void __user *dst, const void *src) dst 62 net/bridge/netfilter/ebtables.c return copy_to_user(dst, &cv, sizeof(cv)) ? -EFAULT : 0; dst 1912 net/bridge/netfilter/ebtables.c void *dst = NULL; dst 1920 net/bridge/netfilter/ebtables.c dst = state->buf_kern_start + state->buf_kern_offset; dst 1930 net/bridge/netfilter/ebtables.c if (dst) { dst 1932 net/bridge/netfilter/ebtables.c match->compat_from_user(dst, mwt->data); dst 1934 net/bridge/netfilter/ebtables.c memcpy(dst, mwt->data, match_size); dst 1950 net/bridge/netfilter/ebtables.c if (dst) { dst 1952 net/bridge/netfilter/ebtables.c wt->compat_from_user(dst, mwt->data); dst 1954 net/bridge/netfilter/ebtables.c memcpy(dst, mwt->data, match_size); dst 1969 net/bridge/netfilter/ebtables.c if (pad > 0 && dst) { dst 1974 net/bridge/netfilter/ebtables.c memset(dst + size_kern, 0, pad); dst 298 net/caif/cfpkt_skbuff.c struct sk_buff *dst = pkt_to_skb(dstpkt); dst 313 net/caif/cfpkt_skbuff.c if (dst->tail + neededtailspace > dst->end) { dst 316 net/caif/cfpkt_skbuff.c dstlen = skb_headlen(dst); dst 322 net/caif/cfpkt_skbuff.c skb_put_data(tmp, dst->data, dstlen); dst 324 net/caif/cfpkt_skbuff.c dst = tmp; dst 326 net/caif/cfpkt_skbuff.c skb_put_data(dst, add->data, skb_headlen(add)); dst 328 net/caif/cfpkt_skbuff.c return skb_to_pkt(dst); dst 142 net/can/gw.c } dst; dst 203 net/can/gw.c static void canframecpy(struct canfd_frame *dst, struct can_frame *src) dst 210 net/can/gw.c dst->can_id = src->can_id; dst 211 net/can/gw.c dst->len = src->can_dlc; dst 212 net/can/gw.c *(u64 *)dst->data = *(u64 *)src->data; dst 215 net/can/gw.c static void canfdframecpy(struct canfd_frame *dst, struct canfd_frame *src) dst 222 net/can/gw.c dst->can_id = src->can_id; dst 223 net/can/gw.c dst->flags = src->flags; dst 224 net/can/gw.c dst->len = src->len; dst 225 net/can/gw.c memcpy(dst->data, src->data, CANFD_MAX_DLEN); dst 433 net/can/gw.c if (!(gwj->dst.dev->flags & IFF_UP)) { dst 440 net/can/gw.c can_skb_prv(skb)->ifindex == gwj->dst.dev->ifindex) dst 465 net/can/gw.c nskb->dev = gwj->dst.dev; dst 535 net/can/gw.c if (gwj->src.dev == dev || gwj->dst.dev == dev) { dst 1066 net/can/gw.c gwj->dst.dev = __dev_get_by_index(net, gwj->ccgw.dst_idx); dst 1068 net/can/gw.c if (!gwj->dst.dev) dst 1071 net/can/gw.c if (gwj->dst.dev->type != ARPHRD_CAN) dst 5 net/ceph/armor.c int ceph_armor(char *dst, const char *src, const char *end); dst 6 net/ceph/armor.c int ceph_unarmor(char *dst, const char *src, const char *end); dst 37 net/ceph/armor.c int ceph_armor(char *dst, const char *src, const char *end) dst 46 net/ceph/armor.c *dst++ = encode_bits(a >> 2); dst 49 net/ceph/armor.c *dst++ = encode_bits(((a & 3) << 4) | (b >> 4)); dst 52 net/ceph/armor.c *dst++ = encode_bits(((b & 15) << 2) | dst 54 net/ceph/armor.c *dst++ = encode_bits(c & 63); dst 56 net/ceph/armor.c *dst++ = encode_bits((b & 15) << 2); dst 57 net/ceph/armor.c *dst++ = '='; dst 60 net/ceph/armor.c *dst++ = encode_bits(((a & 3) << 4)); dst 61 net/ceph/armor.c *dst++ = '='; dst 62 net/ceph/armor.c *dst++ = '='; dst 68 net/ceph/armor.c *(dst++) = '\n'; dst 75 net/ceph/armor.c int ceph_unarmor(char *dst, const char *src, const char *end) dst 95 net/ceph/armor.c *dst++ = (a << 2) | (b >> 4); dst 98 net/ceph/armor.c *dst++ = ((b & 15) << 4) | (c >> 2); dst 101 net/ceph/armor.c *dst++ = ((c & 3) << 6) | d; dst 320 net/ceph/ceph_common.c static int get_secret(struct ceph_crypto_key *dst, const char *name) { dst 353 net/ceph/ceph_common.c err = ceph_crypto_key_clone(dst, ckey); dst 70 net/ceph/crypto.c int ceph_crypto_key_clone(struct ceph_crypto_key *dst, dst 73 net/ceph/crypto.c memcpy(dst, src, sizeof(struct ceph_crypto_key)); dst 74 net/ceph/crypto.c return set_secret(dst, src->key); dst 19 net/ceph/crypto.h int ceph_crypto_key_clone(struct ceph_crypto_key *dst, dst 33 net/ceph/crypto.h int ceph_armor(char *dst, const char *src, const char *end); dst 34 net/ceph/crypto.h int ceph_unarmor(char *dst, const char *src, const char *end); dst 980 net/ceph/osd_client.c static u32 osd_req_encode_op(struct ceph_osd_op *dst, dst 991 net/ceph/osd_client.c dst->extent.offset = cpu_to_le64(src->extent.offset); dst 992 net/ceph/osd_client.c dst->extent.length = cpu_to_le64(src->extent.length); dst 993 net/ceph/osd_client.c dst->extent.truncate_size = dst 995 net/ceph/osd_client.c dst->extent.truncate_seq = dst 999 net/ceph/osd_client.c dst->cls.class_len = src->cls.class_len; dst 1000 net/ceph/osd_client.c dst->cls.method_len = src->cls.method_len; dst 1001 net/ceph/osd_client.c dst->cls.indata_len = cpu_to_le32(src->cls.indata_len); dst 1004 net/ceph/osd_client.c dst->watch.cookie = cpu_to_le64(src->watch.cookie); dst 1005 net/ceph/osd_client.c dst->watch.ver = cpu_to_le64(0); dst 1006 net/ceph/osd_client.c dst->watch.op = src->watch.op; dst 1007 net/ceph/osd_client.c dst->watch.gen = cpu_to_le32(src->watch.gen); dst 1012 net/ceph/osd_client.c dst->notify.cookie = cpu_to_le64(src->notify.cookie); dst 1017 net/ceph/osd_client.c dst->alloc_hint.expected_object_size = dst 1019 net/ceph/osd_client.c dst->alloc_hint.expected_write_size = dst 1024 net/ceph/osd_client.c dst->xattr.name_len = cpu_to_le32(src->xattr.name_len); dst 1025 net/ceph/osd_client.c dst->xattr.value_len = cpu_to_le32(src->xattr.value_len); dst 1026 net/ceph/osd_client.c dst->xattr.cmp_op = src->xattr.cmp_op; dst 1027 net/ceph/osd_client.c dst->xattr.cmp_mode = src->xattr.cmp_mode; dst 1033 net/ceph/osd_client.c dst->copy_from.snapid = cpu_to_le64(src->copy_from.snapid); dst 1034 net/ceph/osd_client.c dst->copy_from.src_version = dst 1036 net/ceph/osd_client.c dst->copy_from.flags = src->copy_from.flags; dst 1037 net/ceph/osd_client.c dst->copy_from.src_fadvise_flags = dst 1048 net/ceph/osd_client.c dst->op = cpu_to_le16(src->op); dst 1049 net/ceph/osd_client.c dst->flags = cpu_to_le32(src->flags); dst 1050 net/ceph/osd_client.c dst->payload_len = cpu_to_le32(src->indata_len); dst 9378 net/core/dev.c u64 *dst = (u64 *)stats64; dst 9382 net/core/dev.c dst[i] = src[i]; dst 47 net/core/dst.c void dst_init(struct dst_entry *dst, struct dst_ops *ops, dst 51 net/core/dst.c dst->dev = dev; dst 54 net/core/dst.c dst->ops = ops; dst 55 net/core/dst.c dst_init_metrics(dst, dst_default_metrics.metrics, true); dst 56 net/core/dst.c dst->expires = 0UL; dst 58 net/core/dst.c dst->xfrm = NULL; dst 60 net/core/dst.c dst->input = dst_discard; dst 61 net/core/dst.c dst->output = dst_discard_out; dst 62 net/core/dst.c dst->error = 0; dst 63 net/core/dst.c dst->obsolete = initial_obsolete; dst 64 net/core/dst.c dst->header_len = 0; dst 65 net/core/dst.c dst->trailer_len = 0; dst 67 net/core/dst.c dst->tclassid = 0; dst 69 net/core/dst.c dst->lwtstate = NULL; dst 70 net/core/dst.c atomic_set(&dst->__refcnt, initial_ref); dst 71 net/core/dst.c dst->__use = 0; dst 72 net/core/dst.c dst->lastuse = jiffies; dst 73 net/core/dst.c dst->flags = flags; dst 82 net/core/dst.c struct dst_entry *dst; dst 93 net/core/dst.c dst = kmem_cache_alloc(ops->kmem_cachep, GFP_ATOMIC); dst 94 net/core/dst.c if (!dst) dst 97 net/core/dst.c dst_init(dst, ops, dev, initial_ref, initial_obsolete, flags); dst 99 net/core/dst.c return dst; dst 103 net/core/dst.c struct dst_entry *dst_destroy(struct dst_entry * dst) dst 110 net/core/dst.c if (dst->xfrm) { dst 111 net/core/dst.c struct xfrm_dst *xdst = (struct xfrm_dst *) dst; dst 116 net/core/dst.c if (!(dst->flags & DST_NOCOUNT)) dst 117 net/core/dst.c dst_entries_add(dst->ops, -1); dst 119 net/core/dst.c if (dst->ops->destroy) dst 120 net/core/dst.c dst->ops->destroy(dst); dst 121 net/core/dst.c if (dst->dev) dst 122 net/core/dst.c dev_put(dst->dev); dst 124 net/core/dst.c lwtstate_put(dst->lwtstate); dst 126 net/core/dst.c if (dst->flags & DST_METADATA) dst 127 net/core/dst.c metadata_dst_free((struct metadata_dst *)dst); dst 129 net/core/dst.c kmem_cache_free(dst->ops->kmem_cachep, dst); dst 131 net/core/dst.c dst = child; dst 132 net/core/dst.c if (dst) dst 133 net/core/dst.c dst_release_immediate(dst); dst 140 net/core/dst.c struct dst_entry *dst = container_of(head, struct dst_entry, rcu_head); dst 142 net/core/dst.c dst = dst_destroy(dst); dst 154 net/core/dst.c void dst_dev_put(struct dst_entry *dst) dst 156 net/core/dst.c struct net_device *dev = dst->dev; dst 158 net/core/dst.c dst->obsolete = DST_OBSOLETE_DEAD; dst 159 net/core/dst.c if (dst->ops->ifdown) dst 160 net/core/dst.c dst->ops->ifdown(dst, dev, true); dst 161 net/core/dst.c dst->input = dst_discard; dst 162 net/core/dst.c dst->output = dst_discard_out; dst 163 net/core/dst.c dst->dev = blackhole_netdev; dst 164 net/core/dst.c dev_hold(dst->dev); dst 169 net/core/dst.c void dst_release(struct dst_entry *dst) dst 171 net/core/dst.c if (dst) { dst 174 net/core/dst.c newrefcnt = atomic_dec_return(&dst->__refcnt); dst 177 net/core/dst.c __func__, dst, newrefcnt); dst 179 net/core/dst.c call_rcu(&dst->rcu_head, dst_destroy_rcu); dst 184 net/core/dst.c void dst_release_immediate(struct dst_entry *dst) dst 186 net/core/dst.c if (dst) { dst 189 net/core/dst.c newrefcnt = atomic_dec_return(&dst->__refcnt); dst 192 net/core/dst.c __func__, dst, newrefcnt); dst 194 net/core/dst.c dst_destroy(dst); dst 199 net/core/dst.c u32 *dst_cow_metrics_generic(struct dst_entry *dst, unsigned long old) dst 211 net/core/dst.c prev = cmpxchg(&dst->_metrics, old, new); dst 229 net/core/dst.c void __dst_destroy_metrics_generic(struct dst_entry *dst, unsigned long old) dst 234 net/core/dst.c prev = cmpxchg(&dst->_metrics, old, new); dst 262 net/core/dst.c struct dst_entry *dst; dst 264 net/core/dst.c dst = &md_dst->dst; dst 265 net/core/dst.c dst_init(dst, &md_dst_ops, NULL, 1, DST_OBSOLETE_NONE, dst 268 net/core/dst.c dst->input = dst_md_discard; dst 269 net/core/dst.c dst->output = dst_md_discard_out; dst 271 net/core/dst.c memset(dst + 1, 0, sizeof(*md_dst) + optslen - sizeof(*dst)); dst 19 net/core/dst_cache.c struct dst_entry *dst; dst 28 net/core/dst_cache.c struct dst_entry *dst, u32 cookie) dst 30 net/core/dst_cache.c dst_release(dst_cache->dst); dst 31 net/core/dst_cache.c if (dst) dst 32 net/core/dst_cache.c dst_hold(dst); dst 35 net/core/dst_cache.c dst_cache->dst = dst; dst 41 net/core/dst_cache.c struct dst_entry *dst; dst 43 net/core/dst_cache.c dst = idst->dst; dst 44 net/core/dst_cache.c if (!dst) dst 48 net/core/dst_cache.c dst_hold(dst); dst 51 net/core/dst_cache.c (dst->obsolete && !dst->ops->check(dst, idst->cookie)))) { dst 53 net/core/dst_cache.c dst_release(dst); dst 56 net/core/dst_cache.c return dst; dst 75 net/core/dst_cache.c struct dst_entry *dst; dst 81 net/core/dst_cache.c dst = dst_cache_per_cpu_get(dst_cache, idst); dst 82 net/core/dst_cache.c if (!dst) dst 86 net/core/dst_cache.c return container_of(dst, struct rtable, dst); dst 90 net/core/dst_cache.c void dst_cache_set_ip4(struct dst_cache *dst_cache, struct dst_entry *dst, dst 99 net/core/dst_cache.c dst_cache_per_cpu_dst_set(idst, dst, 0); dst 105 net/core/dst_cache.c void dst_cache_set_ip6(struct dst_cache *dst_cache, struct dst_entry *dst, dst 114 net/core/dst_cache.c dst_cache_per_cpu_dst_set(this_cpu_ptr(dst_cache->cache), dst, dst 115 net/core/dst_cache.c rt6_get_cookie((struct rt6_info *)dst)); dst 124 net/core/dst_cache.c struct dst_entry *dst; dst 130 net/core/dst_cache.c dst = dst_cache_per_cpu_get(dst_cache, idst); dst 131 net/core/dst_cache.c if (!dst) dst 135 net/core/dst_cache.c return dst; dst 160 net/core/dst_cache.c dst_release(per_cpu_ptr(dst_cache->cache, i)->dst); dst 388 net/core/ethtool.c void ethtool_intersect_link_masks(struct ethtool_link_ksettings *dst, dst 395 net/core/ethtool.c dst->link_modes.supported[idx] &= dst 397 net/core/ethtool.c dst->link_modes.advertising[idx] &= dst 403 net/core/ethtool.c void ethtool_convert_legacy_u32_to_link_mode(unsigned long *dst, dst 406 net/core/ethtool.c bitmap_zero(dst, __ETHTOOL_LINK_MODE_MASK_NBITS); dst 407 net/core/ethtool.c dst[0] = legacy_u32; dst 2907 net/core/ethtool.c ether_addr_copy(match->key.eth_addrs.dst, dst 2909 net/core/ethtool.c ether_addr_copy(match->mask.eth_addrs.dst, dst 2932 net/core/ethtool.c match->key.ipv4.dst = v4_spec->ip4dst; dst 2933 net/core/ethtool.c match->mask.ipv4.dst = v4_m_spec->ip4dst; dst 2947 net/core/ethtool.c match->key.tp.dst = v4_spec->pdst; dst 2948 net/core/ethtool.c match->mask.tp.dst = v4_m_spec->pdst; dst 2982 net/core/ethtool.c memcpy(&match->key.ipv6.dst, v6_spec->ip6dst, dst 2983 net/core/ethtool.c sizeof(match->key.ipv6.dst)); dst 2984 net/core/ethtool.c memcpy(&match->mask.ipv6.dst, v6_m_spec->ip6dst, dst 2985 net/core/ethtool.c sizeof(match->mask.ipv6.dst)); dst 2999 net/core/ethtool.c match->key.tp.dst = v6_spec->pdst; dst 3000 net/core/ethtool.c match->mask.tp.dst = v6_m_spec->pdst; dst 3078 net/core/ethtool.c memcpy(match->key.eth_addrs.dst, ext_h_spec->h_dest, dst 3080 net/core/ethtool.c memcpy(match->mask.eth_addrs.dst, ext_m_spec->h_dest, dst 3524 net/core/filter.c struct bpf_dtab_netdev *dst = fwd; dst 3526 net/core/filter.c err = dev_map_enqueue(dst, xdp, dev_rx); dst 3666 net/core/filter.c struct bpf_dtab_netdev *dst = fwd; dst 3668 net/core/filter.c err = dev_map_generic_redirect(dst, skb, xdp_prog); dst 3984 net/core/filter.c memcpy(&info->key.u.ipv6.dst, from->remote_ipv6, dst 3989 net/core/filter.c info->key.u.ipv4.dst = cpu_to_be32(from->remote_ipv4); dst 4714 net/core/filter.c struct in6_addr *dst = (struct in6_addr *)params->ipv6_dst; dst 4717 net/core/filter.c *dst = nhc->nhc_gw.ipv6; dst 4718 net/core/filter.c neigh = __ipv6_neigh_lookup_noref_stub(dev, dst); dst 4733 net/core/filter.c struct in6_addr *dst = (struct in6_addr *) params->ipv6_dst; dst 4744 net/core/filter.c if (rt6_need_strict(dst) || rt6_need_strict(src)) dst 4769 net/core/filter.c fl6.daddr = *dst; dst 4815 net/core/filter.c mtu = ipv6_stub->ip6_mtu_from_fib6(&res, dst, src); dst 4824 net/core/filter.c *dst = res.nh->fib_nh_gw6; dst 4832 net/core/filter.c neigh = __ipv6_neigh_lookup_noref_stub(dev, dst); dst 338 net/core/flow_dissector.c ipv4->dst = key->u.ipv4.dst; dst 353 net/core/flow_dissector.c ipv6->dst = key->u.ipv6.dst; dst 374 net/core/flow_dissector.c tp->dst = key->tp_dst; dst 806 net/core/flow_dissector.c key_addrs->v4addrs.dst = flow_keys->ipv4_dst; dst 831 net/core/flow_dissector.c key_ports->dst = flow_keys->dport; dst 1448 net/core/flow_dissector.c return flow->addrs.v4addrs.dst; dst 1451 net/core/flow_dissector.c &flow->addrs.v6addrs.dst); dst 1464 net/core/flow_dissector.c addr_diff = (__force u32)keys->addrs.v4addrs.dst - dst 1468 net/core/flow_dissector.c ((__force u16)keys->ports.dst < dst 1470 net/core/flow_dissector.c swap(keys->addrs.v4addrs.src, keys->addrs.v4addrs.dst); dst 1471 net/core/flow_dissector.c swap(keys->ports.src, keys->ports.dst); dst 1475 net/core/flow_dissector.c addr_diff = memcmp(&keys->addrs.v6addrs.dst, dst 1477 net/core/flow_dissector.c sizeof(keys->addrs.v6addrs.dst)); dst 1480 net/core/flow_dissector.c ((__force u16)keys->ports.dst < dst 1484 net/core/flow_dissector.c keys->addrs.v6addrs.dst.s6_addr32[i]); dst 1485 net/core/flow_dissector.c swap(keys->ports.src, keys->ports.dst); dst 1529 net/core/flow_dissector.c __be32 dst; dst 1546 net/core/flow_dissector.c data->dst = flow->addrs.v4addrs.dst; dst 1675 net/core/flow_dissector.c memcpy(&keys->addrs.v6addrs.dst, &fl6->daddr, dst 1676 net/core/flow_dissector.c sizeof(keys->addrs.v6addrs.dst)); dst 1679 net/core/flow_dissector.c keys->ports.dst = fl6->fl6_dport; dst 38 net/core/lwt_bpf.c struct dst_entry *dst, bool can_redirect) dst 117 net/core/lwt_bpf.c struct dst_entry *dst = skb_dst(skb); dst 121 net/core/lwt_bpf.c bpf = bpf_lwt_lwtunnel(dst->lwtstate); dst 123 net/core/lwt_bpf.c ret = run_lwt_bpf(skb, &bpf->in, dst, NO_REDIRECT); dst 130 net/core/lwt_bpf.c if (unlikely(!dst->lwtstate->orig_input)) { dst 135 net/core/lwt_bpf.c return dst->lwtstate->orig_input(skb); dst 140 net/core/lwt_bpf.c struct dst_entry *dst = skb_dst(skb); dst 144 net/core/lwt_bpf.c bpf = bpf_lwt_lwtunnel(dst->lwtstate); dst 146 net/core/lwt_bpf.c ret = run_lwt_bpf(skb, &bpf->out, dst, NO_REDIRECT); dst 151 net/core/lwt_bpf.c if (unlikely(!dst->lwtstate->orig_output)) { dst 158 net/core/lwt_bpf.c return dst->lwtstate->orig_output(net, sk, skb); dst 179 net/core/lwt_bpf.c struct dst_entry *dst = NULL; dst 220 net/core/lwt_bpf.c dst = &rt->dst; dst 233 net/core/lwt_bpf.c dst = ipv6_stub->ipv6_dst_lookup_flow(net, skb->sk, &fl6, NULL); dst 234 net/core/lwt_bpf.c if (IS_ERR(dst)) { dst 235 net/core/lwt_bpf.c err = PTR_ERR(dst); dst 239 net/core/lwt_bpf.c if (unlikely(dst->error)) { dst 240 net/core/lwt_bpf.c err = dst->error; dst 241 net/core/lwt_bpf.c dst_release(dst); dst 250 net/core/lwt_bpf.c err = skb_cow_head(skb, LL_RESERVED_SPACE(dst->dev)); dst 255 net/core/lwt_bpf.c skb_dst_set(skb, dst); dst 271 net/core/lwt_bpf.c struct dst_entry *dst = skb_dst(skb); dst 274 net/core/lwt_bpf.c bpf = bpf_lwt_lwtunnel(dst->lwtstate); dst 279 net/core/lwt_bpf.c ret = run_lwt_bpf(skb, &bpf->xmit, dst, CAN_REDIRECT); dst 314 net/core/lwtunnel.c struct dst_entry *dst = skb_dst(skb); dst 319 net/core/lwtunnel.c if (!dst) dst 321 net/core/lwtunnel.c lwtstate = dst->lwtstate; dst 348 net/core/lwtunnel.c struct dst_entry *dst = skb_dst(skb); dst 353 net/core/lwtunnel.c if (!dst) dst 356 net/core/lwtunnel.c lwtstate = dst->lwtstate; dst 383 net/core/lwtunnel.c struct dst_entry *dst = skb_dst(skb); dst 388 net/core/lwtunnel.c if (!dst) dst 390 net/core/lwtunnel.c lwtstate = dst->lwtstate; dst 1365 net/core/neighbour.c struct dst_entry *dst = skb_dst(skb); dst 1379 net/core/neighbour.c if (dst) { dst 1380 net/core/neighbour.c n2 = dst_neigh_lookup_skb(dst, skb); dst 1860 net/core/neighbour.c void *dst, *lladdr; dst 1899 net/core/neighbour.c dst = nla_data(tb[NDA_DST]); dst 1909 net/core/neighbour.c pn = pneigh_lookup(tbl, net, dst, dev, 1); dst 1929 net/core/neighbour.c neigh = neigh_lookup(tbl, dst, dev); dst 1940 net/core/neighbour.c neigh = ___neigh_create(tbl, dst, dev, exempt_from_gc, true); dst 2746 net/core/neighbour.c void **dst, int *dev_idx, u8 *ndm_flags, dst 2793 net/core/neighbour.c *dst = nla_data(tb[i]); dst 2870 net/core/neighbour.c void *dst = NULL; dst 2875 net/core/neighbour.c err = neigh_valid_get_req(nlh, &tbl, &dst, &dev_idx, &ndm_flags, dst 2888 net/core/neighbour.c if (!dst) { dst 2896 net/core/neighbour.c pn = pneigh_lookup(tbl, net, dst, dev, 0); dst 2910 net/core/neighbour.c neigh = neigh_lookup(tbl, dst, dev); dst 2514 net/core/pktgen.c skb->_skb_refdst = (unsigned long)&pkt_dev->xdst.u.dst | SKB_DST_NOREF; dst 3659 net/core/pktgen.c pkt_dev->xdst.u.dst.dev = pkt_dev->odev; dst 3660 net/core/pktgen.c dst_init_metrics(&pkt_dev->xdst.u.dst, pktgen_dst_metrics, false); dst 3661 net/core/pktgen.c pkt_dev->xdst.child = &pkt_dev->xdst.u.dst; dst 3662 net/core/pktgen.c pkt_dev->xdst.u.dst.ops = &pkt_dev->dstops; dst 801 net/core/rtnetlink.c int rtnl_put_cacheinfo(struct sk_buff *skb, struct dst_entry *dst, u32 id, dst 809 net/core/rtnetlink.c if (dst) { dst 810 net/core/rtnetlink.c ci.rta_lastuse = jiffies_delta_to_clock_t(jiffies - dst->lastuse); dst 811 net/core/rtnetlink.c ci.rta_used = dst->__use; dst 812 net/core/rtnetlink.c ci.rta_clntref = atomic_read(&dst->__refcnt); dst 1048 net/core/skbuff.c struct sk_buff *skb_morph(struct sk_buff *dst, struct sk_buff *src) dst 1050 net/core/skbuff.c skb_release_all(dst); dst 1051 net/core/skbuff.c return __skb_clone(dst, src); dst 76 net/core/skmsg.c int sk_msg_clone(struct sock *sk, struct sk_msg *dst, struct sk_msg *src, dst 99 net/core/skmsg.c if (dst->sg.end) dst 100 net/core/skmsg.c sgd = sk_msg_elem(dst, dst->sg.end - 1); dst 106 net/core/skmsg.c dst->sg.size += sge_len; dst 107 net/core/skmsg.c } else if (!sk_msg_full(dst)) { dst 109 net/core/skmsg.c sk_msg_page_add(dst, sg_page(sge), sge_len, sge_off); dst 544 net/core/sock.c struct dst_entry *dst = __sk_dst_get(sk); dst 546 net/core/sock.c if (dst && dst->obsolete && dst->ops->check(dst, cookie) == NULL) { dst 550 net/core/sock.c dst_release(dst); dst 554 net/core/sock.c return dst; dst 560 net/core/sock.c struct dst_entry *dst = sk_dst_get(sk); dst 562 net/core/sock.c if (dst && dst->obsolete && dst->ops->check(dst, cookie) == NULL) { dst 564 net/core/sock.c dst_release(dst); dst 568 net/core/sock.c return dst; dst 1203 net/core/sock.c static int groups_to_user(gid_t __user *dst, const struct group_info *src) dst 1209 net/core/sock.c if (put_user(from_kgid_munged(user_ns, src->gid[i]), dst + i)) dst 1922 net/core/sock.c void sk_setup_caps(struct sock *sk, struct dst_entry *dst) dst 1926 net/core/sock.c sk_dst_set(sk, dst); dst 1927 net/core/sock.c sk->sk_route_caps = dst->dev->features | sk->sk_route_forced_caps; dst 1932 net/core/sock.c if (dst->header_len && !xfrm_dst_offload_ok(dst)) { dst 1936 net/core/sock.c sk->sk_gso_max_size = dst->dev->gso_max_size; dst 1937 net/core/sock.c max_segs = max_t(u32, dst->dev->gso_max_segs, 1); dst 119 net/core/utils.c u8 *dst, dst 164 net/core/utils.c memcpy(dst, dbuf, sizeof(dbuf)); dst 185 net/core/utils.c u8 *dst, dst 285 net/core/utils.c dst[i--] = *d--; dst 287 net/core/utils.c dst[i--] = 0; dst 289 net/core/utils.c dst[i--] = *d--; dst 291 net/core/utils.c memcpy(dst, dbuf, sizeof(dbuf)); dst 278 net/dccp/dccp.h struct dst_entry *dst, dst 295 net/dccp/dccp.h struct sk_buff *dccp_make_response(const struct sock *sk, struct dst_entry *dst, dst 114 net/dccp/ipv4.c sk_setup_caps(sk, &rt->dst); dst 147 net/dccp/ipv4.c struct dst_entry *dst; dst 158 net/dccp/ipv4.c dst = inet_csk_update_pmtu(sk, mtu); dst 159 net/dccp/ipv4.c if (!dst) dst 165 net/dccp/ipv4.c if (mtu < dst_mtu(dst) && ip_dont_fragment(sk, dst)) dst 168 net/dccp/ipv4.c mtu = dst_mtu(dst); dst 188 net/dccp/ipv4.c struct dst_entry *dst = __sk_dst_check(sk, 0); dst 190 net/dccp/ipv4.c if (dst) dst 191 net/dccp/ipv4.c dst->ops->redirect(dst, sk, skb); dst 362 net/dccp/ipv4.c __be32 src, __be32 dst) dst 364 net/dccp/ipv4.c return csum_tcpudp_magic(src, dst, skb->len, IPPROTO_DCCP, skb->csum); dst 396 net/dccp/ipv4.c struct dst_entry *dst, dst 421 net/dccp/ipv4.c if (dst == NULL && (dst = inet_csk_route_child_sock(sk, newsk, req)) == NULL) dst 424 net/dccp/ipv4.c sk_setup_caps(newsk, dst); dst 426 net/dccp/ipv4.c dccp_sync_mss(newsk, dst_mtu(dst)); dst 440 net/dccp/ipv4.c dst_release(dst); dst 474 net/dccp/ipv4.c return &rt->dst; dst 481 net/dccp/ipv4.c struct dst_entry *dst; dst 484 net/dccp/ipv4.c dst = inet_csk_route_req(sk, &fl4, req); dst 485 net/dccp/ipv4.c if (dst == NULL) dst 488 net/dccp/ipv4.c skb = dccp_make_response(sk, dst, req); dst 504 net/dccp/ipv4.c dst_release(dst); dst 513 net/dccp/ipv4.c struct dst_entry *dst; dst 524 net/dccp/ipv4.c dst = dccp_v4_route_skb(net, ctl_sk, rxskb); dst 525 net/dccp/ipv4.c if (dst == NULL) dst 535 net/dccp/ipv4.c skb_dst_set(skb, dst_clone(dst)); dst 549 net/dccp/ipv4.c dst_release(dst); dst 126 net/dccp/ipv6.c struct dst_entry *dst = __sk_dst_check(sk, np->dst_cookie); dst 128 net/dccp/ipv6.c if (dst) dst 129 net/dccp/ipv6.c dst->ops->redirect(dst, sk, skb); dst 135 net/dccp/ipv6.c struct dst_entry *dst = NULL; dst 145 net/dccp/ipv6.c dst = inet6_csk_update_pmtu(sk, ntohl(info)); dst 146 net/dccp/ipv6.c if (!dst) dst 149 net/dccp/ipv6.c if (inet_csk(sk)->icsk_pmtu_cookie > dst_mtu(dst)) dst 150 net/dccp/ipv6.c dccp_sync_mss(sk, dst_mtu(dst)); dst 196 net/dccp/ipv6.c struct dst_entry *dst; dst 213 net/dccp/ipv6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 214 net/dccp/ipv6.c if (IS_ERR(dst)) { dst 215 net/dccp/ipv6.c err = PTR_ERR(dst); dst 216 net/dccp/ipv6.c dst = NULL; dst 220 net/dccp/ipv6.c skb = dccp_make_response(sk, dst, req); dst 240 net/dccp/ipv6.c dst_release(dst); dst 258 net/dccp/ipv6.c struct dst_entry *dst; dst 285 net/dccp/ipv6.c dst = ip6_dst_lookup_flow(sock_net(ctl_sk), ctl_sk, &fl6, NULL); dst 286 net/dccp/ipv6.c if (!IS_ERR(dst)) { dst 287 net/dccp/ipv6.c skb_dst_set(skb, dst); dst 399 net/dccp/ipv6.c struct dst_entry *dst, dst 415 net/dccp/ipv6.c newsk = dccp_v4_request_recv_sock(sk, skb, req, dst, dst 458 net/dccp/ipv6.c if (!dst) { dst 461 net/dccp/ipv6.c dst = inet6_csk_route_req(sk, &fl6, req, IPPROTO_DCCP); dst 462 net/dccp/ipv6.c if (!dst) dst 476 net/dccp/ipv6.c ip6_dst_store(newsk, dst, NULL, NULL); dst 477 net/dccp/ipv6.c newsk->sk_route_caps = dst->dev->features & ~(NETIF_F_IP_CSUM | dst 526 net/dccp/ipv6.c dccp_sync_mss(newsk, dst_mtu(dst)); dst 551 net/dccp/ipv6.c dst_release(dst); dst 814 net/dccp/ipv6.c struct dst_entry *dst; dst 915 net/dccp/ipv6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 916 net/dccp/ipv6.c if (IS_ERR(dst)) { dst 917 net/dccp/ipv6.c err = PTR_ERR(dst); dst 930 net/dccp/ipv6.c ip6_dst_store(sk, dst, NULL, NULL); dst 390 net/dccp/output.c struct sk_buff *dccp_make_response(const struct sock *sk, struct dst_entry *dst, dst 411 net/dccp/output.c skb_dst_set(skb, dst_clone(dst)); dst 536 net/dccp/output.c struct dst_entry *dst = __sk_dst_get(sk); dst 542 net/dccp/output.c dccp_sync_mss(sk, dst_mtu(dst)); dst 905 net/decnet/af_decnet.c struct dst_entry *dst; dst 952 net/decnet/af_decnet.c dst = __sk_dst_get(sk); dst 953 net/decnet/af_decnet.c sk->sk_route_caps = dst->dev->features; dst 956 net/decnet/af_decnet.c scp->segsize_loc = dst_metric_advmss(dst); dst 1075 net/decnet/af_decnet.c struct dst_entry *dst; dst 1103 net/decnet/af_decnet.c dst = skb_dst(skb); dst 1104 net/decnet/af_decnet.c sk_dst_set(newsk, dst); dst 1132 net/decnet/af_decnet.c *(__le16 *)(DN_SK(newsk)->addr.sdn_add.a_addr) = cb->dst; dst 1870 net/decnet/af_decnet.c struct dst_entry *dst = __sk_dst_get(sk); dst 1879 net/decnet/af_decnet.c if (dst) { dst 1880 net/decnet/af_decnet.c u32 mtu = dst_mtu(dst); dst 1881 net/decnet/af_decnet.c mss_now = min_t(int, dn_mss_from_pmtu(dst->dev, mtu), mss_now); dst 559 net/decnet/dn_fib.c static void fib_magic(int cmd, int type, __le16 dst, int dst_len, struct dn_ifaddr *ifa) dst 568 net/decnet/dn_fib.c __le16 dst; dst 570 net/decnet/dn_fib.c .dst = dst, dst 175 net/decnet/dn_neigh.c struct dst_entry *dst = skb_dst(skb); dst 176 net/decnet/dn_neigh.c struct dn_route *rt = (struct dn_route *)dst; dst 200 net/decnet/dn_neigh.c struct dst_entry *dst = skb_dst(skb); dst 201 net/decnet/dn_neigh.c struct dn_route *rt = (struct dn_route *)dst; dst 240 net/decnet/dn_neigh.c dn_dn2eth(lp->d_id, cb->dst); dst 285 net/decnet/dn_neigh.c sp->dstnode = cb->dst; dst 327 net/decnet/dn_neigh.c sp->dstnode = cb->dst & cpu_to_le16(0x03ff); dst 340 net/decnet/dn_neigh.c struct dst_entry *dst = skb_dst(skb); dst 341 net/decnet/dn_neigh.c struct dn_route *rt = (struct dn_route *) dst; dst 81 net/decnet/dn_nsp_in.c le16_to_cpu(cb->dst), dst 773 net/decnet/dn_nsp_in.c swap(cb->dst, cb->src); dst 70 net/decnet/dn_nsp_out.c struct dst_entry *dst; dst 76 net/decnet/dn_nsp_out.c dst = sk_dst_check(sk, 0); dst 77 net/decnet/dn_nsp_out.c if (dst) { dst 79 net/decnet/dn_nsp_out.c skb_dst_set(skb, dst); dst 91 net/decnet/dn_nsp_out.c dst = sk_dst_get(sk); dst 92 net/decnet/dn_nsp_out.c sk->sk_route_caps = dst->dev->features; dst 526 net/decnet/dn_nsp_out.c struct dst_entry *dst, dst 533 net/decnet/dn_nsp_out.c if ((dst == NULL) || (rem == 0)) { dst 535 net/decnet/dn_nsp_out.c le16_to_cpu(rem), dst); dst 562 net/decnet/dn_nsp_out.c skb_dst_set(skb, dst_clone(dst)); dst 106 net/decnet/dn_route.c static unsigned int dn_dst_default_advmss(const struct dst_entry *dst); dst 107 net/decnet/dn_route.c static unsigned int dn_dst_mtu(const struct dst_entry *dst); dst 112 net/decnet/dn_route.c static void dn_dst_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 115 net/decnet/dn_route.c static void dn_dst_redirect(struct dst_entry *dst, struct sock *sk, dst 117 net/decnet/dn_route.c static struct neighbour *dn_dst_neigh_lookup(const struct dst_entry *dst, dst 147 net/decnet/dn_route.c static void dn_dst_destroy(struct dst_entry *dst) dst 149 net/decnet/dn_route.c struct dn_route *rt = (struct dn_route *) dst; dst 153 net/decnet/dn_route.c dst_destroy_metrics_generic(dst); dst 156 net/decnet/dn_route.c static void dn_dst_ifdown(struct dst_entry *dst, struct net_device *dev, int how) dst 159 net/decnet/dn_route.c struct dn_route *rt = (struct dn_route *) dst; dst 170 net/decnet/dn_route.c static __inline__ unsigned int dn_hash(__le16 src, __le16 dst) dst 172 net/decnet/dn_route.c __u16 tmp = (__u16 __force)(src ^ dst); dst 193 net/decnet/dn_route.c if (atomic_read(&rt->dst.__refcnt) > 1 || dst 194 net/decnet/dn_route.c (now - rt->dst.lastuse) < expire) { dst 200 net/decnet/dn_route.c dst_dev_put(&rt->dst); dst 201 net/decnet/dn_route.c dst_release(&rt->dst); dst 227 net/decnet/dn_route.c if (atomic_read(&rt->dst.__refcnt) > 1 || dst 228 net/decnet/dn_route.c (now - rt->dst.lastuse) < expire) { dst 234 net/decnet/dn_route.c dst_dev_put(&rt->dst); dst 235 net/decnet/dn_route.c dst_release(&rt->dst); dst 254 net/decnet/dn_route.c static void dn_dst_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 258 net/decnet/dn_route.c struct dn_route *rt = (struct dn_route *) dst; dst 270 net/decnet/dn_route.c if (dst_metric(dst, RTAX_MTU) > mtu && mtu >= min_mtu) { dst 271 net/decnet/dn_route.c if (!(dst_metric_locked(dst, RTAX_MTU))) { dst 272 net/decnet/dn_route.c dst_metric_set(dst, RTAX_MTU, mtu); dst 273 net/decnet/dn_route.c dst_set_expires(dst, dn_rt_mtu_expires); dst 275 net/decnet/dn_route.c if (!(dst_metric_locked(dst, RTAX_ADVMSS))) { dst 277 net/decnet/dn_route.c u32 existing_mss = dst_metric_raw(dst, RTAX_ADVMSS); dst 279 net/decnet/dn_route.c dst_metric_set(dst, RTAX_ADVMSS, mss); dst 284 net/decnet/dn_route.c static void dn_dst_redirect(struct dst_entry *dst, struct sock *sk, dst 292 net/decnet/dn_route.c static struct dst_entry *dn_dst_check(struct dst_entry *dst, __u32 cookie) dst 297 net/decnet/dn_route.c static struct dst_entry *dn_dst_negative_advice(struct dst_entry *dst) dst 299 net/decnet/dn_route.c dst_release(dst); dst 335 net/decnet/dn_route.c dst_hold_and_use(&rth->dst, now); dst 338 net/decnet/dn_route.c dst_release_immediate(&rt->dst); dst 348 net/decnet/dn_route.c dst_hold_and_use(&rt->dst, now); dst 368 net/decnet/dn_route.c dst_dev_put(&rt->dst); dst 369 net/decnet/dn_route.c dst_release(&rt->dst); dst 423 net/decnet/dn_route.c __le16 *dst; dst 436 net/decnet/dn_route.c dst = (__le16 *)ptr; dst 442 net/decnet/dn_route.c swap(*src, *dst); dst 516 net/decnet/dn_route.c le16_to_cpu(cb->src), le16_to_cpu(cb->dst), dst 546 net/decnet/dn_route.c cb->dst = dn_eth2dn(ptr); dst 584 net/decnet/dn_route.c cb->dst = *(__le16 *)ptr; dst 735 net/decnet/dn_route.c struct dst_entry *dst = skb_dst(skb); dst 736 net/decnet/dn_route.c struct dn_route *rt = (struct dn_route *)dst; dst 737 net/decnet/dn_route.c struct net_device *dev = dst->dev; dst 748 net/decnet/dn_route.c cb->dst = rt->rt_daddr; dst 774 net/decnet/dn_route.c struct dst_entry *dst = skb_dst(skb); dst 775 net/decnet/dn_route.c struct dn_dev *dn_db = rcu_dereference(dst->dev->dn_ptr); dst 786 net/decnet/dn_route.c if (skb_cow(skb, LL_RESERVED_SPACE(rt->dst.dev)+header_len)) dst 795 net/decnet/dn_route.c skb->dev = rt->dst.dev; dst 824 net/decnet/dn_route.c le16_to_cpu(cb->src), le16_to_cpu(cb->dst)); dst 836 net/decnet/dn_route.c le16_to_cpu(cb->src), le16_to_cpu(cb->dst)); dst 843 net/decnet/dn_route.c static unsigned int dn_dst_default_advmss(const struct dst_entry *dst) dst 845 net/decnet/dn_route.c return dn_mss_from_pmtu(dst->dev, dst_mtu(dst)); dst 848 net/decnet/dn_route.c static unsigned int dn_dst_mtu(const struct dst_entry *dst) dst 850 net/decnet/dn_route.c unsigned int mtu = dst_metric_raw(dst, RTAX_MTU); dst 852 net/decnet/dn_route.c return mtu ? : dst->dev->mtu; dst 855 net/decnet/dn_route.c static struct neighbour *dn_dst_neigh_lookup(const struct dst_entry *dst, dst 859 net/decnet/dn_route.c return __neigh_lookup_errno(&dn_neigh_table, daddr, dst->dev); dst 865 net/decnet/dn_route.c struct net_device *dev = rt->dst.dev; dst 873 net/decnet/dn_route.c dst_init_metrics(&rt->dst, fi->fib_metrics, true); dst 884 net/decnet/dn_route.c if (dst_metric(&rt->dst, RTAX_MTU) > rt->dst.dev->mtu) dst 885 net/decnet/dn_route.c dst_metric_set(&rt->dst, RTAX_MTU, rt->dst.dev->mtu); dst 886 net/decnet/dn_route.c mss_metric = dst_metric_raw(&rt->dst, RTAX_ADVMSS); dst 888 net/decnet/dn_route.c unsigned int mss = dn_mss_from_pmtu(dev, dst_mtu(&rt->dst)); dst 890 net/decnet/dn_route.c dst_metric_set(&rt->dst, RTAX_ADVMSS, mss); dst 1199 net/decnet/dn_route.c rt->dst.lastuse = jiffies; dst 1200 net/decnet/dn_route.c rt->dst.output = dn_output; dst 1201 net/decnet/dn_route.c rt->dst.input = dn_rt_bug; dst 1204 net/decnet/dn_route.c rt->dst.input = dn_nsp_rx; dst 1234 net/decnet/dn_route.c dst_release_immediate(&rt->dst); dst 1256 net/decnet/dn_route.c dst_hold_and_use(&rt->dst, jiffies); dst 1258 net/decnet/dn_route.c *pprt = &rt->dst; dst 1313 net/decnet/dn_route.c .daddr = cb->dst, dst 1350 net/decnet/dn_route.c if (!dn_dev_islocal(in_dev, cb->dst)) dst 1414 net/decnet/dn_route.c fld.saddr = cb->dst; dst 1459 net/decnet/dn_route.c rt->fld.daddr = cb->dst; dst 1465 net/decnet/dn_route.c rt->dst.lastuse = jiffies; dst 1466 net/decnet/dn_route.c rt->dst.output = dn_rt_bug_out; dst 1469 net/decnet/dn_route.c rt->dst.input = dn_forward; dst 1472 net/decnet/dn_route.c rt->dst.output = dn_output; dst 1473 net/decnet/dn_route.c rt->dst.input = dn_nsp_rx; dst 1474 net/decnet/dn_route.c rt->dst.dev = in_dev; dst 1480 net/decnet/dn_route.c rt->dst.input = dst_discard; dst 1491 net/decnet/dn_route.c skb_dst_set(skb, &rt->dst); dst 1513 net/decnet/dn_route.c dst_release_immediate(&rt->dst); dst 1521 net/decnet/dn_route.c unsigned int hash = dn_hash(cb->src, cb->dst); dst 1530 net/decnet/dn_route.c (rt->fld.daddr == cb->dst) && dst 1534 net/decnet/dn_route.c dst_hold_and_use(&rt->dst, jiffies); dst 1580 net/decnet/dn_route.c if (rt->dst.dev && dst 1581 net/decnet/dn_route.c nla_put_u32(skb, RTA_OIF, rt->dst.dev->ifindex) < 0) dst 1596 net/decnet/dn_route.c if (rtnetlink_put_metrics(skb, dst_metrics_ptr(&rt->dst)) < 0) dst 1599 net/decnet/dn_route.c expires = rt->dst.expires ? rt->dst.expires - jiffies : 0; dst 1600 net/decnet/dn_route.c if (rtnl_put_cacheinfo(skb, &rt->dst, 0, expires, dst 1601 net/decnet/dn_route.c rt->dst.error) < 0) dst 1681 net/decnet/dn_route.c cb->dst = fld.daddr; dst 1687 net/decnet/dn_route.c if (!err && -rt->dst.error) dst 1688 net/decnet/dn_route.c err = rt->dst.error; dst 1699 net/decnet/dn_route.c skb_dst_set(skb, &rt->dst); dst 1751 net/decnet/dn_route.c skb_dst_set(skb, dst_clone(&rt->dst)); dst 1835 net/decnet/dn_route.c rt->dst.dev ? rt->dst.dev->name : "*", dst 1838 net/decnet/dn_route.c atomic_read(&rt->dst.__refcnt), dst 1839 net/decnet/dn_route.c rt->dst.__use, 0); dst 47 net/decnet/dn_rules.c __le16 dst; dst 116 net/decnet/dn_rules.c ((daddr ^ r->dst) & r->dstmask)) dst 153 net/decnet/dn_rules.c r->dst = nla_get_le16(tb[FRA_DST]); dst 178 net/decnet/dn_rules.c if (frh->dst_len && (r->dst != nla_get_le16(tb[FRA_DST]))) dst 212 net/decnet/dn_rules.c nla_put_le16(skb, FRA_DST, r->dst)) || dst 96 net/decnet/dn_table.c static inline dn_fib_key_t dz_key(__le16 dst, struct dn_zone *dz) dst 99 net/decnet/dn_table.c k.datum = dst & DZ_MASK(dz); dst 302 net/decnet/dn_table.c u32 tb_id, u8 type, u8 scope, void *dst, int dst_len, dst 327 net/decnet/dn_table.c nla_put(skb, RTA_DST, 2, dst) < 0) dst 550 net/decnet/dn_table.c __le16 dst = nla_get_le16(attrs[RTA_DST]); dst 551 net/decnet/dn_table.c if (dst & ~DZ_MASK(dz)) dst 553 net/decnet/dn_table.c key = dz_key(dst, dz); dst 685 net/decnet/dn_table.c __le16 dst = nla_get_le16(attrs[RTA_DST]); dst 686 net/decnet/dn_table.c if (dst & ~DZ_MASK(dz)) dst 688 net/decnet/dn_table.c key = dz_key(dst, dz); dst 29 net/dsa/dsa2.c struct dsa_switch_tree *dst; dst 31 net/dsa/dsa2.c list_for_each_entry(dst, &dsa_tree_list, list) dst 32 net/dsa/dsa2.c if (dst->index == index) dst 33 net/dsa/dsa2.c return dst; dst 40 net/dsa/dsa2.c struct dsa_switch_tree *dst; dst 42 net/dsa/dsa2.c dst = kzalloc(sizeof(*dst), GFP_KERNEL); dst 43 net/dsa/dsa2.c if (!dst) dst 46 net/dsa/dsa2.c dst->index = index; dst 48 net/dsa/dsa2.c INIT_LIST_HEAD(&dst->list); dst 49 net/dsa/dsa2.c list_add_tail(&dst->list, &dsa_tree_list); dst 51 net/dsa/dsa2.c kref_init(&dst->refcount); dst 53 net/dsa/dsa2.c return dst; dst 56 net/dsa/dsa2.c static void dsa_tree_free(struct dsa_switch_tree *dst) dst 58 net/dsa/dsa2.c list_del(&dst->list); dst 59 net/dsa/dsa2.c kfree(dst); dst 62 net/dsa/dsa2.c static struct dsa_switch_tree *dsa_tree_get(struct dsa_switch_tree *dst) dst 64 net/dsa/dsa2.c if (dst) dst 65 net/dsa/dsa2.c kref_get(&dst->refcount); dst 67 net/dsa/dsa2.c return dst; dst 72 net/dsa/dsa2.c struct dsa_switch_tree *dst; dst 74 net/dsa/dsa2.c dst = dsa_tree_find(index); dst 75 net/dsa/dsa2.c if (dst) dst 76 net/dsa/dsa2.c return dsa_tree_get(dst); dst 83 net/dsa/dsa2.c struct dsa_switch_tree *dst; dst 85 net/dsa/dsa2.c dst = container_of(ref, struct dsa_switch_tree, refcount); dst 87 net/dsa/dsa2.c dsa_tree_free(dst); dst 90 net/dsa/dsa2.c static void dsa_tree_put(struct dsa_switch_tree *dst) dst 92 net/dsa/dsa2.c if (dst) dst 93 net/dsa/dsa2.c kref_put(&dst->refcount, dsa_tree_release); dst 111 net/dsa/dsa2.c static struct dsa_port *dsa_tree_find_port_by_node(struct dsa_switch_tree *dst, dst 119 net/dsa/dsa2.c ds = dst->ds[device]; dst 137 net/dsa/dsa2.c struct dsa_switch_tree *dst = ds->dst; dst 144 net/dsa/dsa2.c link_dp = dsa_tree_find_port_by_node(dst, it.node); dst 178 net/dsa/dsa2.c static bool dsa_tree_setup_routing_table(struct dsa_switch_tree *dst) dst 185 net/dsa/dsa2.c ds = dst->ds[device]; dst 197 net/dsa/dsa2.c static struct dsa_port *dsa_tree_find_first_cpu(struct dsa_switch_tree *dst) dst 204 net/dsa/dsa2.c ds = dst->ds[device]; dst 219 net/dsa/dsa2.c static int dsa_tree_setup_default_cpu(struct dsa_switch_tree *dst) dst 226 net/dsa/dsa2.c dst->cpu_dp = dsa_tree_find_first_cpu(dst); dst 227 net/dsa/dsa2.c if (!dst->cpu_dp) { dst 234 net/dsa/dsa2.c ds = dst->ds[device]; dst 242 net/dsa/dsa2.c dp->cpu_dp = dst->cpu_dp; dst 249 net/dsa/dsa2.c static void dsa_tree_teardown_default_cpu(struct dsa_switch_tree *dst) dst 252 net/dsa/dsa2.c dst->cpu_dp = NULL; dst 258 net/dsa/dsa2.c struct dsa_switch_tree *dst = ds->dst; dst 259 net/dsa/dsa2.c const unsigned char *id = (const unsigned char *)&dst->index; dst 260 net/dsa/dsa2.c const unsigned char len = sizeof(dst->index); dst 443 net/dsa/dsa2.c static int dsa_tree_setup_switches(struct dsa_switch_tree *dst) dst 451 net/dsa/dsa2.c ds = dst->ds[device]; dst 472 net/dsa/dsa2.c ds = dst->ds[i]; dst 488 net/dsa/dsa2.c static void dsa_tree_teardown_switches(struct dsa_switch_tree *dst) dst 495 net/dsa/dsa2.c ds = dst->ds[device]; dst 509 net/dsa/dsa2.c static int dsa_tree_setup_master(struct dsa_switch_tree *dst) dst 511 net/dsa/dsa2.c struct dsa_port *cpu_dp = dst->cpu_dp; dst 518 net/dsa/dsa2.c static void dsa_tree_teardown_master(struct dsa_switch_tree *dst) dst 520 net/dsa/dsa2.c struct dsa_port *cpu_dp = dst->cpu_dp; dst 526 net/dsa/dsa2.c static int dsa_tree_setup(struct dsa_switch_tree *dst) dst 531 net/dsa/dsa2.c if (dst->setup) { dst 533 net/dsa/dsa2.c dst->index); dst 537 net/dsa/dsa2.c complete = dsa_tree_setup_routing_table(dst); dst 541 net/dsa/dsa2.c err = dsa_tree_setup_default_cpu(dst); dst 545 net/dsa/dsa2.c err = dsa_tree_setup_switches(dst); dst 549 net/dsa/dsa2.c err = dsa_tree_setup_master(dst); dst 553 net/dsa/dsa2.c dst->setup = true; dst 555 net/dsa/dsa2.c pr_info("DSA: tree %d setup\n", dst->index); dst 560 net/dsa/dsa2.c dsa_tree_teardown_switches(dst); dst 562 net/dsa/dsa2.c dsa_tree_teardown_default_cpu(dst); dst 567 net/dsa/dsa2.c static void dsa_tree_teardown(struct dsa_switch_tree *dst) dst 569 net/dsa/dsa2.c if (!dst->setup) dst 572 net/dsa/dsa2.c dsa_tree_teardown_master(dst); dst 574 net/dsa/dsa2.c dsa_tree_teardown_switches(dst); dst 576 net/dsa/dsa2.c dsa_tree_teardown_default_cpu(dst); dst 578 net/dsa/dsa2.c pr_info("DSA: tree %d torn down\n", dst->index); dst 580 net/dsa/dsa2.c dst->setup = false; dst 583 net/dsa/dsa2.c static void dsa_tree_remove_switch(struct dsa_switch_tree *dst, dst 586 net/dsa/dsa2.c dsa_tree_teardown(dst); dst 588 net/dsa/dsa2.c dst->ds[index] = NULL; dst 589 net/dsa/dsa2.c dsa_tree_put(dst); dst 592 net/dsa/dsa2.c static int dsa_tree_add_switch(struct dsa_switch_tree *dst, dst 598 net/dsa/dsa2.c if (dst->ds[index]) dst 601 net/dsa/dsa2.c dsa_tree_get(dst); dst 602 net/dsa/dsa2.c dst->ds[index] = ds; dst 604 net/dsa/dsa2.c err = dsa_tree_setup(dst); dst 606 net/dsa/dsa2.c dst->ds[index] = NULL; dst 607 net/dsa/dsa2.c dsa_tree_put(dst); dst 634 net/dsa/dsa2.c struct dsa_switch_tree *dst = ds->dst; dst 652 net/dsa/dsa2.c dp->dst = dst; dst 732 net/dsa/dsa2.c ds->dst = dsa_tree_touch(m[0]); dst 733 net/dsa/dsa2.c if (!ds->dst) dst 810 net/dsa/dsa2.c ds->dst = dsa_tree_touch(0); dst 811 net/dsa/dsa2.c if (!ds->dst) dst 819 net/dsa/dsa2.c struct dsa_switch_tree *dst = ds->dst; dst 821 net/dsa/dsa2.c return dsa_tree_add_switch(dst, ds); dst 870 net/dsa/dsa2.c dsa_tree_put(ds->dst); dst 879 net/dsa/dsa2.c struct dsa_switch_tree *dst = ds->dst; dst 882 net/dsa/dsa2.c dsa_tree_remove_switch(dst, index); dst 106 net/dsa/dsa_priv.h struct dsa_switch_tree *dst = cpu_dp->dst; dst 113 net/dsa/dsa_priv.h ds = dst->ds[device]; dst 18 net/dsa/port.c struct raw_notifier_head *nh = &dp->ds->dst->nh; dst 55 net/dsa/slave.c ds->dst->index, ds->index); dst 432 net/dsa/slave.c struct dsa_switch_tree *dst = ds->dst; dst 441 net/dsa/slave.c ppid->id_len = sizeof(dst->index); dst 442 net/dsa/slave.c memcpy(&ppid->id, &dst->index, ppid->id_len); dst 351 net/dsa/switch.c return raw_notifier_chain_register(&ds->dst->nh, &ds->nb); dst 358 net/dsa/switch.c err = raw_notifier_chain_unregister(&ds->dst->nh, &ds->nb); dst 91 net/hsr/hsr_forward.c unsigned char *dst, *src; dst 108 net/hsr/hsr_forward.c dst = skb_mac_header(skb); dst 109 net/hsr/hsr_forward.c memcpy(dst, src, copylen); dst 154 net/hsr/hsr_forward.c unsigned char *dst, *src; dst 171 net/hsr/hsr_forward.c dst = skb_push(skb, HSR_HLEN); dst 172 net/hsr/hsr_forward.c memmove(dst, src, movelen); dst 24 net/ieee802154/6lowpan/6lowpan_i.h struct ieee802154_addr dst; dst 65 net/ieee802154/6lowpan/reassembly.c const struct ieee802154_addr *dst) dst 75 net/ieee802154/6lowpan/reassembly.c key.dst = *dst; dst 1211 net/ipv4/af_inet.c sk_setup_caps(sk, &rt->dst); dst 1263 net/ipv4/af_inet.c sk_setup_caps(sk, &rt->dst); dst 303 net/ipv4/arp.c struct dst_entry *dst) dst 316 net/ipv4/arp.c skb_dst_set(skb, dst_clone(dst)); dst 338 net/ipv4/arp.c struct dst_entry *dst = NULL; dst 388 net/ipv4/arp.c dst = skb_dst(skb); dst 390 net/ipv4/arp.c dst_hw, dev->dev_addr, NULL, dst); dst 439 net/ipv4/arp.c if (rt->dst.dev != dev) { dst 456 net/ipv4/arp.c if (rt->dst.dev == dev) dst 469 net/ipv4/arp.c out_dev = __in_dev_get_rcu(rt->dst.dev); dst 500 net/ipv4/arp.c if (rt->dst.dev != dev) dst 840 net/ipv4/arp.c (rt->dst.dev != dev && dst 1036 net/ipv4/arp.c dev = rt->dst.dev; dst 1161 net/ipv4/arp.c dev = rt->dst.dev; dst 78 net/ipv4/datagram.c sk_dst_set(sk, &rt->dst); dst 105 net/ipv4/datagram.c struct dst_entry *dst; dst 111 net/ipv4/datagram.c dst = __sk_dst_get(sk); dst 112 net/ipv4/datagram.c if (!dst || !dst->obsolete || dst->ops->check(dst, 0)) { dst 124 net/ipv4/datagram.c dst = !IS_ERR(rt) ? &rt->dst : NULL; dst 125 net/ipv4/datagram.c sk_dst_set(sk, dst); dst 1301 net/ipv4/devinet.c __be32 inet_select_addr(const struct net_device *dev, __be32 dst, int scope) dst 1323 net/ipv4/devinet.c if (!dst || inet_ifa_match(dst, ifa)) { dst 1371 net/ipv4/devinet.c static __be32 confirm_addr_indev(struct in_device *in_dev, __be32 dst, dst 1394 net/ipv4/devinet.c (!dst || inet_ifa_match(dst, ifa)); dst 1396 net/ipv4/devinet.c if (local || !dst) dst 1424 net/ipv4/devinet.c __be32 dst, __be32 local, int scope) dst 1430 net/ipv4/devinet.c return confirm_addr_indev(in_dev, dst, local, scope); dst 1436 net/ipv4/devinet.c addr = confirm_addr_indev(in_dev, dst, local, scope); dst 115 net/ipv4/esp4.c if (req->src != req->dst) dst 504 net/ipv4/esp4.c struct xfrm_dst *dst = (struct xfrm_dst *)skb_dst(skb); dst 507 net/ipv4/esp4.c padto = min(x->tfcpad, xfrm_state_mtu(x, dst->child_mtu_cached)); dst 353 net/ipv4/fib_frontend.c static int __fib_validate_source(struct sk_buff *skb, __be32 src, __be32 dst, dst 369 net/ipv4/fib_frontend.c fl4.saddr = dst; dst 428 net/ipv4/fib_frontend.c int fib_validate_source(struct sk_buff *skb, __be32 src, __be32 dst, dst 455 net/ipv4/fib_frontend.c return __fib_validate_source(skb, src, dst, tos, oif, dev, r, idev, itag); dst 1059 net/ipv4/fib_frontend.c static void fib_magic(int cmd, int type, __be32 dst, int dst_len, dst 1068 net/ipv4/fib_frontend.c .fc_dst = dst, dst 39 net/ipv4/fib_lookup.h u8 type, __be32 dst, int dst_len, u8 tos, struct fib_info *fi, dst 40 net/ipv4/fib_rules.c __be32 dst; dst 179 net/ipv4/fib_rules.c ((daddr ^ r->dst) & r->dstmask)) dst 255 net/ipv4/fib_rules.c rule4->dst = nla_get_in_addr(tb[FRA_DST]); dst 326 net/ipv4/fib_rules.c if (frh->dst_len && (rule4->dst != nla_get_in_addr(tb[FRA_DST]))) dst 342 net/ipv4/fib_rules.c nla_put_in_addr(skb, FRA_DST, rule4->dst)) || dst 158 net/ipv4/fib_semantics.c dst_dev_put(&rt->dst); dst 159 net/ipv4/fib_semantics.c dst_release_immediate(&rt->dst); dst 202 net/ipv4/fib_semantics.c dst_dev_put(&rt->dst); dst 203 net/ipv4/fib_semantics.c dst_release_immediate(&rt->dst); dst 1728 net/ipv4/fib_semantics.c u32 tb_id, u8 type, __be32 dst, int dst_len, u8 tos, dst 1756 net/ipv4/fib_semantics.c nla_put_in_addr(skb, RTA_DST, dst)) dst 78 net/ipv4/fib_trie.c enum fib_event_type event_type, u32 dst, dst 82 net/ipv4/fib_trie.c .dst = dst, dst 93 net/ipv4/fib_trie.c enum fib_event_type event_type, u32 dst, dst 99 net/ipv4/fib_trie.c .dst = dst, dst 313 net/ipv4/icmp.c struct dst_entry *dst = &rt->dst; dst 322 net/ipv4/icmp.c if (dst->dev && (dst->dev->flags&IFF_LOOPBACK)) dst 325 net/ipv4/icmp.c vif = l3mdev_master_ifindex(dst->dev); dst 370 net/ipv4/icmp.c sk = icmp_sk(dev_net((*rt)->dst.dev)); dst 402 net/ipv4/icmp.c struct net *net = dev_net(rt->dst.dev); dst 492 net/ipv4/icmp.c rt = (struct rtable *) xfrm_lookup(net, &rt->dst, dst 525 net/ipv4/icmp.c RT_TOS(tos), rt2->dst.dev); dst 527 net/ipv4/icmp.c dst_release(&rt2->dst); dst 535 net/ipv4/icmp.c rt2 = (struct rtable *) xfrm_lookup(net, &rt2->dst, dst 539 net/ipv4/icmp.c dst_release(&rt->dst); dst 544 net/ipv4/icmp.c dst_release(&rt->dst); dst 587 net/ipv4/icmp.c if (rt->dst.dev) dst 588 net/ipv4/icmp.c net = dev_net(rt->dst.dev); dst 728 net/ipv4/icmp.c room = dst_mtu(&rt->dst); dst 1000 net/ipv4/icmp.c struct net *net = dev_net(rt->dst.dev); dst 379 net/ipv4/igmp.c skb_dst_set(skb, &rt->dst); dst 735 net/ipv4/igmp.c __be32 dst; dst 745 net/ipv4/igmp.c dst = IGMP_ALL_ROUTER; dst 747 net/ipv4/igmp.c dst = group; dst 749 net/ipv4/igmp.c rt = ip_route_output_ports(net, &fl4, NULL, dst, 0, dst 764 net/ipv4/igmp.c skb_dst_set(skb, &rt->dst); dst 777 net/ipv4/igmp.c iph->daddr = dst; dst 1831 net/ipv4/igmp.c dev = rt->dst.dev; dst 591 net/ipv4/inet_connection_sock.c return &rt->dst; dst 628 net/ipv4/inet_connection_sock.c return &rt->dst; dst 1098 net/ipv4/inet_connection_sock.c sk_setup_caps(sk, &rt->dst); dst 1101 net/ipv4/inet_connection_sock.c return &rt->dst; dst 1106 net/ipv4/inet_connection_sock.c struct dst_entry *dst = __sk_dst_check(sk, 0); dst 1109 net/ipv4/inet_connection_sock.c if (!dst) { dst 1110 net/ipv4/inet_connection_sock.c dst = inet_csk_rebuild_route(sk, &inet->cork.fl); dst 1111 net/ipv4/inet_connection_sock.c if (!dst) dst 1114 net/ipv4/inet_connection_sock.c dst->ops->update_pmtu(dst, sk, NULL, mtu, true); dst 1116 net/ipv4/inet_connection_sock.c dst = __sk_dst_check(sk, 0); dst 1117 net/ipv4/inet_connection_sock.c if (!dst) dst 1118 net/ipv4/inet_connection_sock.c dst = inet_csk_rebuild_route(sk, &inet->cork.fl); dst 1120 net/ipv4/inet_connection_sock.c return dst; dst 130 net/ipv4/ip_forward.c mtu = ip_dst_mtu_maybe_forward(&rt->dst, true); dst 139 net/ipv4/ip_forward.c if (skb_cow(skb, LL_RESERVED_SPACE(rt->dst.dev)+rt->dst.header_len)) dst 158 net/ipv4/ip_forward.c net, NULL, skb, skb->dev, rt->dst.dev, dst 579 net/ipv4/ip_gre.c ip_tunnel_init_flow(&fl4, IPPROTO_GRE, key->u.ipv4.dst, key->u.ipv4.src, dst 870 net/ipv4/ip_gre.c dev = rt->dst.dev; dst 548 net/ipv4/ip_input.c struct dst_entry *dst; dst 560 net/ipv4/ip_input.c dst = skb_dst(skb); dst 561 net/ipv4/ip_input.c if (curr_dst != dst) { dst 567 net/ipv4/ip_input.c curr_dst = dst; dst 131 net/ipv4/ip_output.c static inline int ip_select_ttl(struct inet_sock *inet, struct dst_entry *dst) dst 136 net/ipv4/ip_output.c ttl = ip4_dst_hoplimit(dst); dst 159 net/ipv4/ip_output.c iph->ttl = ip_select_ttl(inet, &rt->dst); dst 163 net/ipv4/ip_output.c if (ip_dont_fragment(sk, &rt->dst)) { dst 187 net/ipv4/ip_output.c struct dst_entry *dst = skb_dst(skb); dst 188 net/ipv4/ip_output.c struct rtable *rt = (struct rtable *)dst; dst 189 net/ipv4/ip_output.c struct net_device *dev = dst->dev; dst 214 net/ipv4/ip_output.c if (lwtunnel_xmit_redirect(dst->lwtstate)) { dst 354 net/ipv4/ip_output.c skb_dst_set(skb, &new_rt->dst); dst 364 net/ipv4/ip_output.c struct net_device *dev = rt->dst.dev; dst 497 net/ipv4/ip_output.c sk_setup_caps(sk, &rt->dst); dst 499 net/ipv4/ip_output.c skb_dst_set_noref(skb, &rt->dst); dst 510 net/ipv4/ip_output.c if (ip_dont_fragment(sk, &rt->dst) && !skb->ignore_df) dst 514 net/ipv4/ip_output.c iph->ttl = ip_select_ttl(inet, &rt->dst); dst 800 net/ipv4/ip_output.c ll_rs = LL_RESERVED_SPACE(rt->dst.dev); dst 976 net/ipv4/ip_output.c struct rtable *rt = (struct rtable *)cork->dst; dst 983 net/ipv4/ip_output.c exthdrlen = !skb ? rt->dst.header_len : 0; dst 991 net/ipv4/ip_output.c hh_len = LL_RESERVED_SPACE(rt->dst.dev); dst 1009 net/ipv4/ip_output.c rt->dst.dev->features & (NETIF_F_HW_CSUM | NETIF_F_IP_CSUM) && dst 1011 net/ipv4/ip_output.c (!exthdrlen || (rt->dst.dev->features & NETIF_F_HW_ESP_TX_CSUM))) dst 1019 net/ipv4/ip_output.c if (rt->dst.dev->features & NETIF_F_SG && dst 1071 net/ipv4/ip_output.c !(rt->dst.dev->features&NETIF_F_SG)) dst 1088 net/ipv4/ip_output.c alloclen += rt->dst.trailer_len; dst 1170 net/ipv4/ip_output.c if (!(rt->dst.dev->features&NETIF_F_SG) && dst 1262 net/ipv4/ip_output.c dst_mtu(&rt->dst) : READ_ONCE(rt->dst.dev->mtu); dst 1269 net/ipv4/ip_output.c cork->dst = &rt->dst; dst 1346 net/ipv4/ip_output.c rt = (struct rtable *)cork->dst; dst 1350 net/ipv4/ip_output.c if (!(rt->dst.dev->features&NETIF_F_SG)) dst 1353 net/ipv4/ip_output.c hh_len = LL_RESERVED_SPACE(rt->dst.dev); dst 1457 net/ipv4/ip_output.c dst_release(cork->dst); dst 1458 net/ipv4/ip_output.c cork->dst = NULL; dst 1475 net/ipv4/ip_output.c struct rtable *rt = (struct rtable *)cork->dst; dst 1510 net/ipv4/ip_output.c (skb->len <= dst_mtu(&rt->dst) && dst 1511 net/ipv4/ip_output.c ip_dont_fragment(sk, &rt->dst))) dst 1522 net/ipv4/ip_output.c ttl = ip_select_ttl(inet, &rt->dst); dst 1546 net/ipv4/ip_output.c cork->dst = NULL; dst 1547 net/ipv4/ip_output.c skb_dst_set(skb, &rt->dst); dst 1412 net/ipv4/ip_sockglue.c struct dst_entry *dst; dst 1414 net/ipv4/ip_sockglue.c dst = sk_dst_get(sk); dst 1415 net/ipv4/ip_sockglue.c if (dst) { dst 1416 net/ipv4/ip_sockglue.c val = dst_mtu(dst); dst 1417 net/ipv4/ip_sockglue.c dst_release(dst); dst 300 net/ipv4/ip_tunnel.c tdev = rt->dst.dev; dst 488 net/ipv4/ip_tunnel.c int tunnel_hlen, __be32 dst, bool md) dst 498 net/ipv4/ip_tunnel.c mtu = dst_mtu(&rt->dst) - dev->hard_header_len dst 522 net/ipv4/ip_tunnel.c daddr = md ? dst : tunnel->parms.iph.daddr; dst 571 net/ipv4/ip_tunnel.c ip_tunnel_init_flow(&fl4, proto, key->u.ipv4.dst, key->u.ipv4.src, dst 587 net/ipv4/ip_tunnel.c dst_cache_set_ip4(&tun_info->dst_cache, &rt->dst, dst 590 net/ipv4/ip_tunnel.c if (rt->dst.dev == dev) { dst 599 net/ipv4/ip_tunnel.c key->u.ipv4.dst, true)) { dst 612 net/ipv4/ip_tunnel.c ttl = ip4_dst_hoplimit(&rt->dst); dst 618 net/ipv4/ip_tunnel.c headroom += LL_RESERVED_SPACE(rt->dst.dev) + rt->dst.header_len; dst 652 net/ipv4/ip_tunnel.c __be32 dst; dst 660 net/ipv4/ip_tunnel.c dst = tnl_params->daddr; dst 661 net/ipv4/ip_tunnel.c if (dst == 0) { dst 672 net/ipv4/ip_tunnel.c tun_info->key.u.ipv4.dst) { dst 673 net/ipv4/ip_tunnel.c dst = tun_info->key.u.ipv4.dst; dst 679 net/ipv4/ip_tunnel.c dst = rt_nexthop(rt, inner_iph->daddr); dst 705 net/ipv4/ip_tunnel.c dst = addr6->s6_addr32[3]; dst 731 net/ipv4/ip_tunnel.c ip_tunnel_init_flow(&fl4, protocol, dst, tnl_params->saddr, dst 756 net/ipv4/ip_tunnel.c dst_cache_set_ip4(&tun_info->dst_cache, &rt->dst, dst 759 net/ipv4/ip_tunnel.c dst_cache_set_ip4(&tunnel->dst_cache, &rt->dst, dst 763 net/ipv4/ip_tunnel.c if (rt->dst.dev == dev) { dst 795 net/ipv4/ip_tunnel.c ttl = ip4_dst_hoplimit(&rt->dst); dst 802 net/ipv4/ip_tunnel.c max_headroom = LL_RESERVED_SPACE(rt->dst.dev) + sizeof(struct iphdr) dst 803 net/ipv4/ip_tunnel.c + rt->dst.header_len + ip_encap_hlen(&tunnel->encap); dst 47 net/ipv4/ip_tunnel_core.c __be32 src, __be32 dst, __u8 proto, dst 51 net/ipv4/ip_tunnel_core.c struct net *net = dev_net(rt->dst.dev); dst 59 net/ipv4/ip_tunnel_core.c skb_dst_set(skb, &rt->dst); dst 70 net/ipv4/ip_tunnel_core.c iph->frag_off = ip_mtu_locked(&rt->dst) ? 0 : df; dst 73 net/ipv4/ip_tunnel_core.c iph->daddr = dst; dst 125 net/ipv4/ip_tunnel_core.c struct ip_tunnel_info *dst, *src; dst 136 net/ipv4/ip_tunnel_core.c dst = &res->u.tun_info; dst 138 net/ipv4/ip_tunnel_core.c dst->key.tun_id = src->key.tun_id; dst 140 net/ipv4/ip_tunnel_core.c memcpy(&dst->key.u.ipv6.dst, &src->key.u.ipv6.src, dst 143 net/ipv4/ip_tunnel_core.c dst->key.u.ipv4.dst = src->key.u.ipv4.src; dst 144 net/ipv4/ip_tunnel_core.c dst->key.tun_flags = src->key.tun_flags; dst 145 net/ipv4/ip_tunnel_core.c dst->mode = src->mode | IP_TUNNEL_INFO_TX; dst 257 net/ipv4/ip_tunnel_core.c tun_info->key.u.ipv4.dst = nla_get_in_addr(tb[LWTUNNEL_IP_DST]); dst 295 net/ipv4/ip_tunnel_core.c nla_put_in_addr(skb, LWTUNNEL_IP_DST, tun_info->key.u.ipv4.dst) || dst 366 net/ipv4/ip_tunnel_core.c tun_info->key.u.ipv6.dst = nla_get_in6_addr(tb[LWTUNNEL_IP6_DST]); dst 395 net/ipv4/ip_tunnel_core.c nla_put_in6_addr(skb, LWTUNNEL_IP6_DST, &tun_info->key.u.ipv6.dst) || dst 178 net/ipv4/ip_vti.c static bool vti_state_check(const struct xfrm_state *x, __be32 dst, __be32 src) dst 180 net/ipv4/ip_vti.c xfrm_address_t *daddr = (xfrm_address_t *)&dst; dst 190 net/ipv4/ip_vti.c if (!dst) dst 204 net/ipv4/ip_vti.c struct dst_entry *dst = skb_dst(skb); dst 210 net/ipv4/ip_vti.c if (!dst) { dst 222 net/ipv4/ip_vti.c dst = &rt->dst; dst 223 net/ipv4/ip_vti.c skb_dst_set(skb, dst); dst 230 net/ipv4/ip_vti.c dst = ip6_route_output(dev_net(dev), NULL, &fl->u.ip6); dst 231 net/ipv4/ip_vti.c if (dst->error) { dst 232 net/ipv4/ip_vti.c dst_release(dst); dst 233 net/ipv4/ip_vti.c dst = NULL; dst 237 net/ipv4/ip_vti.c skb_dst_set(skb, dst); dst 246 net/ipv4/ip_vti.c dst_hold(dst); dst 247 net/ipv4/ip_vti.c dst = xfrm_lookup(tunnel->net, dst, fl, NULL, 0); dst 248 net/ipv4/ip_vti.c if (IS_ERR(dst)) { dst 253 net/ipv4/ip_vti.c if (!vti_state_check(dst->xfrm, parms->iph.daddr, parms->iph.saddr)) { dst 255 net/ipv4/ip_vti.c dst_release(dst); dst 259 net/ipv4/ip_vti.c tdev = dst->dev; dst 262 net/ipv4/ip_vti.c dst_release(dst); dst 267 net/ipv4/ip_vti.c mtu = dst_mtu(dst); dst 280 net/ipv4/ip_vti.c dst_release(dst); dst 285 net/ipv4/ip_vti.c skb_dst_set(skb, dst); dst 1881 net/ipv4/ipmr.c dev = rt->dst.dev; dst 1883 net/ipv4/ipmr.c if (skb->len+encap > dst_mtu(&rt->dst) && (ntohs(iph->frag_off) & IP_DF)) { dst 1893 net/ipv4/ipmr.c encap += LL_RESERVED_SPACE(dev) + rt->dst.header_len; dst 1904 net/ipv4/ipmr.c skb_dst_set(skb, &rt->dst); dst 55 net/ipv4/netfilter.c skb_dst_set(skb, &rt->dst); dst 63 net/ipv4/netfilter.c struct dst_entry *dst = skb_dst(skb); dst 65 net/ipv4/netfilter.c dst = xfrm_lookup(net, dst, flowi4_to_flowi(&fl4), sk, 0); dst 66 net/ipv4/netfilter.c if (IS_ERR(dst)) dst 67 net/ipv4/netfilter.c return PTR_ERR(dst); dst 68 net/ipv4/netfilter.c skb_dst_set(skb, dst); dst 83 net/ipv4/netfilter.c int nf_ip_route(struct net *net, struct dst_entry **dst, struct flowi *fl, dst 89 net/ipv4/netfilter.c *dst = &rt->dst; dst 717 net/ipv4/netfilter/arp_tables.c static void compat_standard_from_user(void *dst, const void *src) dst 723 net/ipv4/netfilter/arp_tables.c memcpy(dst, &v, sizeof(v)); dst 726 net/ipv4/netfilter/arp_tables.c static int compat_standard_to_user(void __user *dst, const void *src) dst 732 net/ipv4/netfilter/arp_tables.c return copy_to_user(dst, &cv, sizeof(cv)) ? -EFAULT : 0; dst 56 net/ipv4/netfilter/ip_tables.c (ip->daddr & ipinfo->dmsk.s_addr) != ipinfo->dst.s_addr)) dst 872 net/ipv4/netfilter/ip_tables.c static void compat_standard_from_user(void *dst, const void *src) dst 878 net/ipv4/netfilter/ip_tables.c memcpy(dst, &v, sizeof(v)); dst 881 net/ipv4/netfilter/ip_tables.c static int compat_standard_to_user(void __user *dst, const void *src) dst 887 net/ipv4/netfilter/ip_tables.c return copy_to_user(dst, &cv, sizeof(cv)) ? -EFAULT : 0; dst 478 net/ipv4/netfilter/ipt_CLUSTERIP.c e->ip.dst.s_addr == 0) { dst 495 net/ipv4/netfilter/ipt_CLUSTERIP.c config = clusterip_config_find_get(par->net, e->ip.dst.s_addr, 1); dst 499 net/ipv4/netfilter/ipt_CLUSTERIP.c &e->ip.dst.s_addr); dst 503 net/ipv4/netfilter/ipt_CLUSTERIP.c e->ip.dst.s_addr, dst 43 net/ipv4/netfilter/nf_dup_ipv4.c skb_dst_set(skb, &rt->dst); dst 44 net/ipv4/netfilter/nf_dup_ipv4.c skb->dev = rt->dst.dev; dst 118 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3.ip, dst 123 net/ipv4/netfilter/nf_nat_h323.c tuple.dst.u3, dst 125 net/ipv4/netfilter/nf_nat_h323.c } else if (addr.ip == ct->tuplehash[dir].tuple.dst.u3.ip && dst 161 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3.ip, dst 162 net/ipv4/netfilter/nf_nat_h323.c ntohs(ct->tuplehash[!dir].tuple.dst.u.udp.port)); dst 164 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 166 net/ipv4/netfilter/nf_nat_h323.c dst.u.udp.port); dst 188 net/ipv4/netfilter/nf_nat_h323.c rtp_exp->saved_proto.udp.port = rtp_exp->tuple.dst.u.udp.port; dst 191 net/ipv4/netfilter/nf_nat_h323.c rtcp_exp->saved_proto.udp.port = rtcp_exp->tuple.dst.u.udp.port; dst 202 net/ipv4/netfilter/nf_nat_h323.c rtp_exp->tuple.dst.u.udp.port = info->rtp_port[i][dir]; dst 203 net/ipv4/netfilter/nf_nat_h323.c rtcp_exp->tuple.dst.u.udp.port = dst 219 net/ipv4/netfilter/nf_nat_h323.c for (nated_port = ntohs(rtp_exp->tuple.dst.u.udp.port); dst 223 net/ipv4/netfilter/nf_nat_h323.c rtp_exp->tuple.dst.u.udp.port = htons(nated_port); dst 226 net/ipv4/netfilter/nf_nat_h323.c rtcp_exp->tuple.dst.u.udp.port = dst 252 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 268 net/ipv4/netfilter/nf_nat_h323.c &rtp_exp->tuple.dst.u3.ip, dst 269 net/ipv4/netfilter/nf_nat_h323.c ntohs(rtp_exp->tuple.dst.u.udp.port)); dst 273 net/ipv4/netfilter/nf_nat_h323.c &rtcp_exp->tuple.dst.u3.ip, dst 274 net/ipv4/netfilter/nf_nat_h323.c ntohs(rtcp_exp->tuple.dst.u.udp.port)); dst 290 net/ipv4/netfilter/nf_nat_h323.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 298 net/ipv4/netfilter/nf_nat_h323.c exp->tuple.dst.u.tcp.port = htons(nated_port); dst 315 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 324 net/ipv4/netfilter/nf_nat_h323.c &exp->tuple.dst.u3.ip, dst 325 net/ipv4/netfilter/nf_nat_h323.c ntohs(exp->tuple.dst.u.tcp.port)); dst 342 net/ipv4/netfilter/nf_nat_h323.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 354 net/ipv4/netfilter/nf_nat_h323.c exp->tuple.dst.u.tcp.port = htons(nated_port); dst 371 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 384 net/ipv4/netfilter/nf_nat_h323.c &exp->tuple.dst.u3.ip, dst 385 net/ipv4/netfilter/nf_nat_h323.c ntohs(exp->tuple.dst.u.tcp.port)); dst 434 net/ipv4/netfilter/nf_nat_h323.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 446 net/ipv4/netfilter/nf_nat_h323.c exp->tuple.dst.u.tcp.port = htons(nated_port); dst 463 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 478 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 489 net/ipv4/netfilter/nf_nat_h323.c &exp->tuple.dst.u3.ip, dst 490 net/ipv4/netfilter/nf_nat_h323.c ntohs(exp->tuple.dst.u.tcp.port)); dst 529 net/ipv4/netfilter/nf_nat_h323.c exp->saved_addr = exp->tuple.dst.u3; dst 530 net/ipv4/netfilter/nf_nat_h323.c exp->tuple.dst.u3.ip = ct->tuplehash[!dir].tuple.dst.u3.ip; dst 531 net/ipv4/netfilter/nf_nat_h323.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 539 net/ipv4/netfilter/nf_nat_h323.c exp->tuple.dst.u.tcp.port = htons(nated_port); dst 556 net/ipv4/netfilter/nf_nat_h323.c &ct->tuplehash[!dir].tuple.dst.u3, dst 566 net/ipv4/netfilter/nf_nat_h323.c &exp->tuple.dst.u3.ip, dst 567 net/ipv4/netfilter/nf_nat_h323.c ntohs(exp->tuple.dst.u.tcp.port)); dst 69 net/ipv4/netfilter/nf_nat_pptp.c t.dst.u3.ip = master->tuplehash[!exp->dir].tuple.dst.u3.ip; dst 70 net/ipv4/netfilter/nf_nat_pptp.c t.dst.u.gre.key = ct_pptp_info->pns_call_id; dst 71 net/ipv4/netfilter/nf_nat_pptp.c t.dst.protonum = IPPROTO_GRE; dst 78 net/ipv4/netfilter/nf_nat_pptp.c t.dst.u3.ip = master->tuplehash[!exp->dir].tuple.dst.u3.ip; dst 79 net/ipv4/netfilter/nf_nat_pptp.c t.dst.u.gre.key = nat_pptp_info->pac_call_id; dst 80 net/ipv4/netfilter/nf_nat_pptp.c t.dst.protonum = IPPROTO_GRE; dst 100 net/ipv4/netfilter/nf_nat_pptp.c = ct->master->tuplehash[!exp->dir].tuple.dst.u3; dst 156 net/ipv4/netfilter/nf_nat_pptp.c new_callid = ct->tuplehash[IP_CT_DIR_REPLY].tuple.dst.u.tcp.port; dst 219 net/ipv4/netfilter/nf_nat_pptp.c expect_orig->tuple.dst.u.gre.key = ct_pptp_info->pac_call_id; dst 225 net/ipv4/netfilter/nf_nat_pptp.c expect_reply->tuple.dst.u.gre.key = ct_pptp_info->pns_call_id; dst 137 net/ipv4/netfilter/nf_nat_snmp_basic_main.c ctx.to = ct->tuplehash[!dir].tuple.dst.u3.ip; dst 140 net/ipv4/netfilter/nf_nat_snmp_basic_main.c ctx.to = ct->tuplehash[dir].tuple.dst.u3.ip; dst 212 net/ipv4/netfilter/nf_nat_snmp_basic_main.c .tuple.dst.protonum = IPPROTO_UDP, dst 32 net/ipv4/netfilter/nft_fib_ipv4.c u32 *dst = ®s->data[priv->dreg]; dst 53 net/ipv4/netfilter/nft_fib_ipv4.c *dst = inet_dev_addr_type(nft_net(pkt), dev, addr); dst 843 net/ipv4/ping.c dst_confirm_neigh(&rt->dst, &fl4.daddr); dst 357 net/ipv4/raw.c if (length > rt->dst.dev->mtu) { dst 359 net/ipv4/raw.c rt->dst.dev->mtu); dst 368 net/ipv4/raw.c hlen = LL_RESERVED_SPACE(rt->dst.dev); dst 369 net/ipv4/raw.c tlen = rt->dst.dev->needed_tailroom; dst 380 net/ipv4/raw.c skb_dst_set(skb, &rt->dst); dst 429 net/ipv4/raw.c net, sk, skb, NULL, rt->dst.dev, dst 690 net/ipv4/raw.c dst_confirm_neigh(&rt->dst, &fl4.daddr); dst 136 net/ipv4/route.c static struct dst_entry *ipv4_dst_check(struct dst_entry *dst, u32 cookie); dst 137 net/ipv4/route.c static unsigned int ipv4_default_advmss(const struct dst_entry *dst); dst 138 net/ipv4/route.c static unsigned int ipv4_mtu(const struct dst_entry *dst); dst 139 net/ipv4/route.c static struct dst_entry *ipv4_negative_advice(struct dst_entry *dst); dst 141 net/ipv4/route.c static void ip_rt_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 144 net/ipv4/route.c static void ip_do_redirect(struct dst_entry *dst, struct sock *sk, dst 146 net/ipv4/route.c static void ipv4_dst_destroy(struct dst_entry *dst); dst 148 net/ipv4/route.c static u32 *ipv4_cow_metrics(struct dst_entry *dst, unsigned long old) dst 154 net/ipv4/route.c static struct neighbour *ipv4_neigh_lookup(const struct dst_entry *dst, dst 157 net/ipv4/route.c static void ipv4_confirm_neigh(const struct dst_entry *dst, const void *daddr); dst 340 net/ipv4/route.c struct ip_rt_acct *dst, *src; dst 343 net/ipv4/route.c dst = kcalloc(256, sizeof(struct ip_rt_acct), GFP_KERNEL); dst 344 net/ipv4/route.c if (!dst) dst 350 net/ipv4/route.c dst[j].o_bytes += src[j].o_bytes; dst 351 net/ipv4/route.c dst[j].o_packets += src[j].o_packets; dst 352 net/ipv4/route.c dst[j].i_bytes += src[j].i_bytes; dst 353 net/ipv4/route.c dst[j].i_packets += src[j].i_packets; dst 357 net/ipv4/route.c seq_write(m, dst, 256 * sizeof(struct ip_rt_acct)); dst 358 net/ipv4/route.c kfree(dst); dst 423 net/ipv4/route.c return rth->rt_genid != rt_genid_ipv4(dev_net(rth->dst.dev)); dst 431 net/ipv4/route.c static struct neighbour *ipv4_neigh_lookup(const struct dst_entry *dst, dst 435 net/ipv4/route.c const struct rtable *rt = container_of(dst, struct rtable, dst); dst 436 net/ipv4/route.c struct net_device *dev = dst->dev; dst 460 net/ipv4/route.c static void ipv4_confirm_neigh(const struct dst_entry *dst, const void *daddr) dst 462 net/ipv4/route.c const struct rtable *rt = container_of(dst, struct rtable, dst); dst 463 net/ipv4/route.c struct net_device *dev = dst->dev; dst 594 net/ipv4/route.c dst_dev_put(&rt->dst); dst 595 net/ipv4/route.c dst_release(&rt->dst); dst 600 net/ipv4/route.c dst_dev_put(&rt->dst); dst 601 net/ipv4/route.c dst_release(&rt->dst); dst 633 net/ipv4/route.c rt->dst.expires = fnhe->fnhe_expires; dst 718 net/ipv4/route.c rt->dst.obsolete = DST_OBSOLETE_KILL; dst 725 net/ipv4/route.c rt->dst.obsolete = DST_OBSOLETE_KILL; dst 780 net/ipv4/route.c n = __ipv4_neigh_lookup(rt->dst.dev, new_gw); dst 782 net/ipv4/route.c n = neigh_create(&arp_tbl, &new_gw, rt->dst.dev); dst 795 net/ipv4/route.c rt->dst.obsolete = DST_OBSOLETE_KILL; dst 818 net/ipv4/route.c static void ip_do_redirect(struct dst_entry *dst, struct sock *sk, struct sk_buff *skb) dst 829 net/ipv4/route.c rt = (struct rtable *) dst; dst 835 net/ipv4/route.c static struct dst_entry *ipv4_negative_advice(struct dst_entry *dst) dst 837 net/ipv4/route.c struct rtable *rt = (struct rtable *)dst; dst 838 net/ipv4/route.c struct dst_entry *ret = dst; dst 841 net/ipv4/route.c if (dst->obsolete > 0) { dst 845 net/ipv4/route.c rt->dst.expires) { dst 879 net/ipv4/route.c in_dev = __in_dev_get_rcu(rt->dst.dev); dst 885 net/ipv4/route.c vif = l3mdev_master_ifindex_rcu(rt->dst.dev); dst 888 net/ipv4/route.c net = dev_net(rt->dst.dev); dst 959 net/ipv4/route.c net = dev_net(rt->dst.dev); dst 961 net/ipv4/route.c switch (rt->dst.error) { dst 973 net/ipv4/route.c switch (rt->dst.error) { dst 1014 net/ipv4/route.c struct dst_entry *dst = &rt->dst; dst 1015 net/ipv4/route.c u32 old_mtu = ipv4_mtu(dst); dst 1019 net/ipv4/route.c if (ip_mtu_locked(dst)) dst 1031 net/ipv4/route.c time_before(jiffies, dst->expires - ip_rt_mtu_expires / 2)) dst 1035 net/ipv4/route.c if (fib_lookup(dev_net(dst->dev), fl4, &res, 0) == 0) { dst 1044 net/ipv4/route.c static void ip_rt_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 1048 net/ipv4/route.c struct rtable *rt = (struct rtable *) dst; dst 1123 net/ipv4/route.c __ip_rt_update_pmtu((struct rtable *) xfrm_dst_path(&rt->dst), &fl4, mtu); dst 1125 net/ipv4/route.c if (!dst_check(&rt->dst, 0)) { dst 1127 net/ipv4/route.c dst_release(&rt->dst); dst 1137 net/ipv4/route.c sk_dst_set(sk, &rt->dst); dst 1178 net/ipv4/route.c static struct dst_entry *ipv4_dst_check(struct dst_entry *dst, u32 cookie) dst 1180 net/ipv4/route.c struct rtable *rt = (struct rtable *) dst; dst 1190 net/ipv4/route.c if (dst->obsolete != DST_OBSOLETE_FORCE_CHK || rt_is_expired(rt)) dst 1192 net/ipv4/route.c return dst; dst 1231 net/ipv4/route.c dst_set_expires(&rt->dst, 0); dst 1266 net/ipv4/route.c .flowi4_oif = rt->dst.dev->ifindex, dst 1272 net/ipv4/route.c if (fib_lookup(dev_net(rt->dst.dev), &fl4, &res, 0) == 0) dst 1273 net/ipv4/route.c src = fib_result_prefsrc(dev_net(rt->dst.dev), &res); dst 1275 net/ipv4/route.c src = inet_select_addr(rt->dst.dev, dst 1286 net/ipv4/route.c if (!(rt->dst.tclassid & 0xFFFF)) dst 1287 net/ipv4/route.c rt->dst.tclassid |= tag & 0xFFFF; dst 1288 net/ipv4/route.c if (!(rt->dst.tclassid & 0xFFFF0000)) dst 1289 net/ipv4/route.c rt->dst.tclassid |= tag & 0xFFFF0000; dst 1293 net/ipv4/route.c static unsigned int ipv4_default_advmss(const struct dst_entry *dst) dst 1296 net/ipv4/route.c unsigned int advmss = max_t(unsigned int, ipv4_mtu(dst) - header_size, dst 1302 net/ipv4/route.c static unsigned int ipv4_mtu(const struct dst_entry *dst) dst 1304 net/ipv4/route.c const struct rtable *rt = (const struct rtable *) dst; dst 1307 net/ipv4/route.c if (!mtu || time_after_eq(jiffies, rt->dst.expires)) dst 1308 net/ipv4/route.c mtu = dst_metric_raw(dst, RTAX_MTU); dst 1313 net/ipv4/route.c mtu = READ_ONCE(dst->dev->mtu); dst 1315 net/ipv4/route.c if (unlikely(ip_mtu_locked(dst))) { dst 1322 net/ipv4/route.c return mtu - lwtunnel_headroom(dst->lwtstate, mtu); dst 1426 net/ipv4/route.c int genid = fnhe_genid(dev_net(rt->dst.dev)); dst 1450 net/ipv4/route.c dst_hold(&rt->dst); dst 1453 net/ipv4/route.c dst_dev_put(&orig->dst); dst 1454 net/ipv4/route.c dst_release(&orig->dst); dst 1481 net/ipv4/route.c dst_hold(&rt->dst); dst 1486 net/ipv4/route.c dst_release(&orig->dst); dst 1489 net/ipv4/route.c dst_release(&rt->dst); dst 1525 net/ipv4/route.c static void ipv4_dst_destroy(struct dst_entry *dst) dst 1527 net/ipv4/route.c struct rtable *rt = (struct rtable *)dst; dst 1529 net/ipv4/route.c ip_dst_metrics_put(dst); dst 1543 net/ipv4/route.c if (rt->dst.dev != dev) dst 1545 net/ipv4/route.c rt->dst.dev = blackhole_netdev; dst 1546 net/ipv4/route.c dev_hold(rt->dst.dev); dst 1556 net/ipv4/route.c rt->dst.obsolete == DST_OBSOLETE_FORCE_CHK && dst 1581 net/ipv4/route.c ip_dst_init_metrics(&rt->dst, fi->fib_metrics); dst 1588 net/ipv4/route.c rt->dst.tclassid = nh->nh_tclassid; dst 1591 net/ipv4/route.c rt->dst.lwtstate = lwtstate_get(nhc->nhc_lwtstate); dst 1643 net/ipv4/route.c rt->dst.output = ip_output; dst 1645 net/ipv4/route.c rt->dst.input = ip_local_deliver; dst 1657 net/ipv4/route.c rt->dst.flags); dst 1674 net/ipv4/route.c new_rt->dst.flags |= DST_HOST; dst 1675 net/ipv4/route.c new_rt->dst.input = rt->dst.input; dst 1676 net/ipv4/route.c new_rt->dst.output = rt->dst.output; dst 1677 net/ipv4/route.c new_rt->dst.error = rt->dst.error; dst 1678 net/ipv4/route.c new_rt->dst.lastuse = jiffies; dst 1679 net/ipv4/route.c new_rt->dst.lwtstate = lwtstate_get(rt->dst.lwtstate); dst 1739 net/ipv4/route.c rth->dst.tclassid = itag; dst 1741 net/ipv4/route.c rth->dst.output = ip_rt_bug; dst 1746 net/ipv4/route.c rth->dst.input = ip_mr_input; dst 1750 net/ipv4/route.c skb_dst_set(skb, &rth->dst); dst 1844 net/ipv4/route.c skb_dst_set_noref(skb, &rth->dst); dst 1860 net/ipv4/route.c rth->dst.input = ip_forward; dst 1864 net/ipv4/route.c lwtunnel_set_redirect(&rth->dst); dst 1865 net/ipv4/route.c skb_dst_set(skb, &rth->dst); dst 1912 net/ipv4/route.c hash_keys->addrs.v4addrs.dst = key_iph->daddr; dst 1931 net/ipv4/route.c hash_keys.addrs.v4addrs.dst = fl4->daddr; dst 1953 net/ipv4/route.c hash_keys.addrs.v4addrs.dst = flkeys->addrs.v4addrs.dst; dst 1955 net/ipv4/route.c hash_keys.ports.dst = flkeys->ports.dst; dst 1961 net/ipv4/route.c hash_keys.addrs.v4addrs.dst = fl4->daddr; dst 1963 net/ipv4/route.c hash_keys.ports.dst = fl4->fl4_dport; dst 1978 net/ipv4/route.c hash_keys.addrs.v4addrs.dst = keys.addrs.v4addrs.dst; dst 1982 net/ipv4/route.c hash_keys.addrs.v6addrs.dst = keys.addrs.v6addrs.dst; dst 1994 net/ipv4/route.c hash_keys.addrs.v4addrs.dst = fl4->daddr; dst 2172 net/ipv4/route.c skb_dst_set_noref(skb, &rth->dst); dst 2184 net/ipv4/route.c rth->dst.output= ip_rt_bug; dst 2186 net/ipv4/route.c rth->dst.tclassid = itag; dst 2192 net/ipv4/route.c rth->dst.input= ip_error; dst 2193 net/ipv4/route.c rth->dst.error= -err; dst 2200 net/ipv4/route.c rth->dst.lwtstate = lwtstate_get(nhc->nhc_lwtstate); dst 2201 net/ipv4/route.c if (lwtunnel_input_redirect(rth->dst.lwtstate)) { dst 2202 net/ipv4/route.c WARN_ON(rth->dst.input == lwtunnel_input); dst 2203 net/ipv4/route.c rth->dst.lwtstate->orig_input = rth->dst.input; dst 2204 net/ipv4/route.c rth->dst.input = lwtunnel_input; dst 2210 net/ipv4/route.c skb_dst_set(skb, &rth->dst); dst 2397 net/ipv4/route.c if (rt_cache_valid(rth) && dst_hold_safe(&rth->dst)) dst 2416 net/ipv4/route.c rth->dst.output = ip_mc_output; dst 2423 net/ipv4/route.c rth->dst.input = ip_mr_input; dst 2424 net/ipv4/route.c rth->dst.output = ip_mc_output; dst 2431 net/ipv4/route.c lwtunnel_set_redirect(&rth->dst); dst 2638 net/ipv4/route.c static struct dst_entry *ipv4_blackhole_dst_check(struct dst_entry *dst, u32 cookie) dst 2643 net/ipv4/route.c static unsigned int ipv4_blackhole_mtu(const struct dst_entry *dst) dst 2645 net/ipv4/route.c unsigned int mtu = dst_metric_raw(dst, RTAX_MTU); dst 2647 net/ipv4/route.c return mtu ? : dst->dev->mtu; dst 2650 net/ipv4/route.c static void ipv4_rt_blackhole_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 2656 net/ipv4/route.c static void ipv4_rt_blackhole_redirect(struct dst_entry *dst, struct sock *sk, dst 2661 net/ipv4/route.c static u32 *ipv4_rt_blackhole_cow_metrics(struct dst_entry *dst, dst 2685 net/ipv4/route.c struct dst_entry *new = &rt->dst; dst 2715 net/ipv4/route.c return rt ? &rt->dst : ERR_PTR(-ENOMEM); dst 2727 net/ipv4/route.c rt = (struct rtable *)xfrm_lookup_route(net, &rt->dst, dst 2736 net/ipv4/route.c static int rt_fill_info(struct net *net, __be32 dst, __be32 src, dst 2768 net/ipv4/route.c if (nla_put_in_addr(skb, RTA_DST, dst)) dst 2775 net/ipv4/route.c if (rt->dst.dev && dst 2776 net/ipv4/route.c nla_put_u32(skb, RTA_OIF, rt->dst.dev->ifindex)) dst 2779 net/ipv4/route.c if (rt->dst.tclassid && dst 2780 net/ipv4/route.c nla_put_u32(skb, RTA_FLOW, rt->dst.tclassid)) dst 2807 net/ipv4/route.c expires = rt->dst.expires; dst 2817 net/ipv4/route.c memcpy(metrics, dst_metrics_ptr(&rt->dst), sizeof(metrics)); dst 2838 net/ipv4/route.c if (ipv4_is_multicast(dst) && dst 2839 net/ipv4/route.c !ipv4_is_local_multicast(dst) && dst 2857 net/ipv4/route.c error = rt->dst.error; dst 2859 net/ipv4/route.c if (rtnl_put_cacheinfo(skb, &rt->dst, 0, expires, error) < 0) dst 2945 net/ipv4/route.c static struct sk_buff *inet_rtm_getroute_build_skb(__be32 src, __be32 dst, dst 2965 net/ipv4/route.c iph->daddr = dst; dst 2991 net/ipv4/route.c src, dst, 0); dst 3088 net/ipv4/route.c __be32 dst = 0; dst 3101 net/ipv4/route.c dst = tb[RTA_DST] ? nla_get_in_addr(tb[RTA_DST]) : 0; dst 3122 net/ipv4/route.c skb = inet_rtm_getroute_build_skb(src, dst, ip_proto, sport, dport); dst 3126 net/ipv4/route.c fl4.daddr = dst; dst 3152 net/ipv4/route.c err = ip_route_input_rcu(skb, dst, src, rtm->rtm_tos, dst 3156 net/ipv4/route.c if (err == 0 && rt->dst.error) dst 3157 net/ipv4/route.c err = -rt->dst.error; dst 3166 net/ipv4/route.c skb_dst_set(skb, &rt->dst); dst 3196 net/ipv4/route.c err = rt_fill_info(net, dst, src, rt, table_id, &fl4, skb, dst 203 net/ipv4/syncookies.c struct dst_entry *dst, u32 tsoff) dst 209 net/ipv4/syncookies.c child = icsk->icsk_af_ops->syn_recv_sock(sk, skb, req, dst, dst 265 net/ipv4/syncookies.c const struct net *net, const struct dst_entry *dst) dst 275 net/ipv4/syncookies.c return dst_feature(dst, RTAX_FEATURE_ECN); dst 388 net/ipv4/syncookies.c req->rsk_window_clamp = tp->window_clamp ? :dst_metric(&rt->dst, RTAX_WINDOW); dst 393 net/ipv4/syncookies.c dst_metric(&rt->dst, RTAX_INITRWND)); dst 396 net/ipv4/syncookies.c ireq->ecn_ok = cookie_ecn_ok(&tcp_opt, sock_net(sk), &rt->dst); dst 398 net/ipv4/syncookies.c ret = tcp_get_cookie_sock(sk, skb, req, &rt->dst, tsoff); dst 326 net/ipv4/tcp_fastopen.c const struct dst_entry *dst, dst 331 net/ipv4/tcp_fastopen.c (dst && dst_metric(dst, RTAX_FASTOPEN_NO_COOKIE)); dst 341 net/ipv4/tcp_fastopen.c const struct dst_entry *dst) dst 360 net/ipv4/tcp_fastopen.c tcp_fastopen_no_cookie(sk, dst, TFO_SERVER_COOKIE_NOT_REQD)) dst 409 net/ipv4/tcp_fastopen.c const struct dst_entry *dst; dst 419 net/ipv4/tcp_fastopen.c dst = __sk_dst_get(sk); dst 421 net/ipv4/tcp_fastopen.c if (tcp_fastopen_no_cookie(sk, dst, TFO_CLIENT_NO_COOKIE)) { dst 525 net/ipv4/tcp_fastopen.c struct dst_entry *dst; dst 541 net/ipv4/tcp_fastopen.c dst = sk_dst_get(sk); dst 542 net/ipv4/tcp_fastopen.c if (!(dst && dst->dev && (dst->dev->flags & IFF_LOOPBACK))) dst 544 net/ipv4/tcp_fastopen.c dst_release(dst); dst 242 net/ipv4/tcp_input.c const struct dst_entry *dst = __sk_dst_get(sk); dst 244 net/ipv4/tcp_input.c return (dst && dst_metric(dst, RTAX_QUICKACK)) || dst 863 net/ipv4/tcp_input.c __u32 tcp_init_cwnd(const struct tcp_sock *tp, const struct dst_entry *dst) dst 865 net/ipv4/tcp_input.c __u32 cwnd = (dst ? dst_metric(dst, RTAX_INITCWND) : 0); dst 6410 net/ipv4/tcp_input.c const struct dst_entry *dst) dst 6422 net/ipv4/tcp_input.c ecn_ok_dst = dst_feature(dst, DST_FEATURE_ECN_MASK); dst 6570 net/ipv4/tcp_input.c struct dst_entry *dst; dst 6623 net/ipv4/tcp_input.c dst = af_ops->route_req(sk, &fl, req); dst 6624 net/ipv4/tcp_input.c if (!dst) dst 6632 net/ipv4/tcp_input.c !tcp_peer_is_proven(req, dst)) { dst 6648 net/ipv4/tcp_input.c tcp_ecn_create_request(req, skb, sk, dst); dst 6659 net/ipv4/tcp_input.c tcp_openreq_init_rwin(req, sk, dst); dst 6663 net/ipv4/tcp_input.c fastopen_sk = tcp_try_fastopen(sk, skb, req, &foc, dst); dst 6666 net/ipv4/tcp_input.c af_ops->send_synack(fastopen_sk, dst, &fl, req, dst 6683 net/ipv4/tcp_input.c af_ops->send_synack(sk, dst, &fl, req, &foc, dst 6695 net/ipv4/tcp_input.c dst_release(dst); dst 291 net/ipv4/tcp_ipv4.c sk_setup_caps(sk, &rt->dst); dst 341 net/ipv4/tcp_ipv4.c struct dst_entry *dst; dst 347 net/ipv4/tcp_ipv4.c dst = inet_csk_update_pmtu(sk, mtu); dst 348 net/ipv4/tcp_ipv4.c if (!dst) dst 354 net/ipv4/tcp_ipv4.c if (mtu < dst_mtu(dst) && ip_dont_fragment(sk, dst)) dst 357 net/ipv4/tcp_ipv4.c mtu = dst_mtu(dst); dst 376 net/ipv4/tcp_ipv4.c struct dst_entry *dst = __sk_dst_check(sk, 0); dst 378 net/ipv4/tcp_ipv4.c if (dst) dst 379 net/ipv4/tcp_ipv4.c dst->ops->redirect(dst, sk, skb); dst 938 net/ipv4/tcp_ipv4.c static int tcp_v4_send_synack(const struct sock *sk, struct dst_entry *dst, dst 950 net/ipv4/tcp_ipv4.c if (!dst && (dst = inet_csk_route_req(sk, &fl4, req)) == NULL) dst 953 net/ipv4/tcp_ipv4.c skb = tcp_make_synack(sk, dst, req, foc, synack_type); dst 1415 net/ipv4/tcp_ipv4.c struct dst_entry *dst, dst 1455 net/ipv4/tcp_ipv4.c if (!dst) { dst 1456 net/ipv4/tcp_ipv4.c dst = inet_csk_route_child_sock(sk, newsk, req); dst 1457 net/ipv4/tcp_ipv4.c if (!dst) dst 1462 net/ipv4/tcp_ipv4.c sk_setup_caps(newsk, dst); dst 1464 net/ipv4/tcp_ipv4.c tcp_ca_openreq_child(newsk, dst); dst 1466 net/ipv4/tcp_ipv4.c tcp_sync_mss(newsk, dst_mtu(dst)); dst 1467 net/ipv4/tcp_ipv4.c newtp->advmss = tcp_mss_clamp(tcp_sk(sk), dst_metric_advmss(dst)); dst 1502 net/ipv4/tcp_ipv4.c dst_release(dst); dst 1553 net/ipv4/tcp_ipv4.c struct dst_entry *dst = sk->sk_rx_dst; dst 1557 net/ipv4/tcp_ipv4.c if (dst) { dst 1559 net/ipv4/tcp_ipv4.c !dst->ops->check(dst, 0)) { dst 1560 net/ipv4/tcp_ipv4.c dst_release(dst); dst 1636 net/ipv4/tcp_ipv4.c struct dst_entry *dst = READ_ONCE(sk->sk_rx_dst); dst 1638 net/ipv4/tcp_ipv4.c if (dst) dst 1639 net/ipv4/tcp_ipv4.c dst = dst_check(dst, 0); dst 1640 net/ipv4/tcp_ipv4.c if (dst && dst 1642 net/ipv4/tcp_ipv4.c skb_dst_set_noref(skb, dst); dst 2038 net/ipv4/tcp_ipv4.c struct dst_entry *dst = skb_dst(skb); dst 2040 net/ipv4/tcp_ipv4.c if (dst && dst_hold_safe(dst)) { dst 2041 net/ipv4/tcp_ipv4.c sk->sk_rx_dst = dst; dst 94 net/ipv4/tcp_metrics.c const struct dst_entry *dst, dst 103 net/ipv4/tcp_metrics.c if (dst_metric_locked(dst, RTAX_RTT)) dst 105 net/ipv4/tcp_metrics.c if (dst_metric_locked(dst, RTAX_RTTVAR)) dst 107 net/ipv4/tcp_metrics.c if (dst_metric_locked(dst, RTAX_SSTHRESH)) dst 109 net/ipv4/tcp_metrics.c if (dst_metric_locked(dst, RTAX_CWND)) dst 111 net/ipv4/tcp_metrics.c if (dst_metric_locked(dst, RTAX_REORDERING)) dst 115 net/ipv4/tcp_metrics.c msval = dst_metric_raw(dst, RTAX_RTT); dst 118 net/ipv4/tcp_metrics.c msval = dst_metric_raw(dst, RTAX_RTTVAR); dst 120 net/ipv4/tcp_metrics.c tm->tcpm_vals[TCP_METRIC_SSTHRESH] = dst_metric_raw(dst, RTAX_SSTHRESH); dst 121 net/ipv4/tcp_metrics.c tm->tcpm_vals[TCP_METRIC_CWND] = dst_metric_raw(dst, RTAX_CWND); dst 122 net/ipv4/tcp_metrics.c tm->tcpm_vals[TCP_METRIC_REORDERING] = dst_metric_raw(dst, RTAX_REORDERING); dst 134 net/ipv4/tcp_metrics.c static void tcpm_check_stamp(struct tcp_metrics_block *tm, struct dst_entry *dst) dst 137 net/ipv4/tcp_metrics.c tcpm_suck_dst(tm, dst, false); dst 146 net/ipv4/tcp_metrics.c static struct tcp_metrics_block *tcpm_new(struct dst_entry *dst, dst 156 net/ipv4/tcp_metrics.c net = dev_net(dst->dev); dst 167 net/ipv4/tcp_metrics.c tcpm_check_stamp(tm, dst); dst 190 net/ipv4/tcp_metrics.c tcpm_suck_dst(tm, dst, true); dst 230 net/ipv4/tcp_metrics.c struct dst_entry *dst) dst 256 net/ipv4/tcp_metrics.c net = dev_net(dst->dev); dst 267 net/ipv4/tcp_metrics.c tcpm_check_stamp(tm, dst); dst 272 net/ipv4/tcp_metrics.c struct dst_entry *dst, dst 301 net/ipv4/tcp_metrics.c net = dev_net(dst->dev); dst 309 net/ipv4/tcp_metrics.c tm = tcpm_new(dst, &saddr, &daddr, hash); dst 311 net/ipv4/tcp_metrics.c tcpm_check_stamp(tm, dst); dst 323 net/ipv4/tcp_metrics.c struct dst_entry *dst = __sk_dst_get(sk); dst 332 net/ipv4/tcp_metrics.c if (net->ipv4.sysctl_tcp_nometrics_save || !dst) dst 341 net/ipv4/tcp_metrics.c tm = tcp_get_metrics(sk, dst, false); dst 346 net/ipv4/tcp_metrics.c tm = tcp_get_metrics(sk, dst, true); dst 442 net/ipv4/tcp_metrics.c struct dst_entry *dst = __sk_dst_get(sk); dst 448 net/ipv4/tcp_metrics.c if (!dst) dst 452 net/ipv4/tcp_metrics.c tm = tcp_get_metrics(sk, dst, true); dst 517 net/ipv4/tcp_metrics.c bool tcp_peer_is_proven(struct request_sock *req, struct dst_entry *dst) dst 522 net/ipv4/tcp_metrics.c if (!dst) dst 526 net/ipv4/tcp_metrics.c tm = __tcp_get_metrics_req(req, dst); dst 565 net/ipv4/tcp_metrics.c struct dst_entry *dst = __sk_dst_get(sk); dst 568 net/ipv4/tcp_metrics.c if (!dst) dst 571 net/ipv4/tcp_metrics.c tm = tcp_get_metrics(sk, dst, true); dst 362 net/ipv4/tcp_minisocks.c const struct dst_entry *dst) dst 372 net/ipv4/tcp_minisocks.c mss = tcp_mss_clamp(tp, dst_metric_advmss(dst)); dst 375 net/ipv4/tcp_minisocks.c req->rsk_window_clamp = window_clamp ? : dst_metric(dst, RTAX_WINDOW); dst 384 net/ipv4/tcp_minisocks.c rcv_wnd = dst_metric(dst, RTAX_INITRWND); dst 406 net/ipv4/tcp_minisocks.c void tcp_ca_openreq_child(struct sock *sk, const struct dst_entry *dst) dst 409 net/ipv4/tcp_minisocks.c u32 ca_key = dst_metric(dst, RTAX_CC_ALGO); dst 418 net/ipv4/tcp_minisocks.c icsk->icsk_ca_dst_locked = tcp_ca_dst_locked(dst); dst 122 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 125 net/ipv4/tcp_output.c if (dst) { dst 126 net/ipv4/tcp_output.c unsigned int metric = dst_metric_advmss(dst); dst 329 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 331 net/ipv4/tcp_output.c if (dst && dst_feature(dst, RTAX_FEATURE_ECN)) dst 1480 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 1482 net/ipv4/tcp_output.c if (dst && dst_allfrag(dst)) dst 1520 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 1522 net/ipv4/tcp_output.c if (dst && dst_allfrag(dst)) dst 1596 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 1604 net/ipv4/tcp_output.c if (dst) { dst 1605 net/ipv4/tcp_output.c u32 mtu = dst_mtu(dst); dst 3266 net/ipv4/tcp_output.c struct sk_buff *tcp_make_synack(const struct sock *sk, struct dst_entry *dst, dst 3283 net/ipv4/tcp_output.c dst_release(dst); dst 3306 net/ipv4/tcp_output.c skb_dst_set(skb, dst); dst 3308 net/ipv4/tcp_output.c mss = tcp_mss_clamp(tp, dst_metric_advmss(dst)); dst 3368 net/ipv4/tcp_output.c static void tcp_ca_dst_init(struct sock *sk, const struct dst_entry *dst) dst 3372 net/ipv4/tcp_output.c u32 ca_key = dst_metric(dst, RTAX_CC_ALGO); dst 3381 net/ipv4/tcp_output.c icsk->icsk_ca_dst_locked = tcp_ca_dst_locked(dst); dst 3390 net/ipv4/tcp_output.c const struct dst_entry *dst = __sk_dst_get(sk); dst 3412 net/ipv4/tcp_output.c tcp_sync_mss(sk, dst_mtu(dst)); dst 3414 net/ipv4/tcp_output.c tcp_ca_dst_init(sk, dst); dst 3417 net/ipv4/tcp_output.c tp->window_clamp = dst_metric(dst, RTAX_WINDOW); dst 3418 net/ipv4/tcp_output.c tp->advmss = tcp_mss_clamp(tp, dst_metric_advmss(dst)); dst 3429 net/ipv4/tcp_output.c rcv_wnd = dst_metric(dst, RTAX_INITRWND); dst 749 net/ipv4/udp.c void udp4_hwcsum(struct sk_buff *skb, __be32 src, __be32 dst) dst 763 net/ipv4/udp.c uh->check = ~csum_tcpudp_magic(src, dst, len, dst 781 net/ipv4/udp.c uh->check = csum_tcpudp_magic(src, dst, len, IPPROTO_UDP, csum); dst 1158 net/ipv4/udp.c sk_dst_set(sk, dst_clone(&rt->dst)); dst 1237 net/ipv4/udp.c dst_confirm_neigh(&rt->dst, &fl4->daddr); dst 2125 net/ipv4/udp.c bool udp_sk_rx_dst_set(struct sock *sk, struct dst_entry *dst) dst 2129 net/ipv4/udp.c if (dst_hold_safe(dst)) { dst 2130 net/ipv4/udp.c old = xchg(&sk->sk_rx_dst, dst); dst 2132 net/ipv4/udp.c return old != dst; dst 2316 net/ipv4/udp.c struct dst_entry *dst = skb_dst(skb); dst 2319 net/ipv4/udp.c if (unlikely(sk->sk_rx_dst != dst)) dst 2320 net/ipv4/udp.c udp_sk_rx_dst_set(sk, dst); dst 2441 net/ipv4/udp.c struct dst_entry *dst; dst 2477 net/ipv4/udp.c dst = READ_ONCE(sk->sk_rx_dst); dst 2479 net/ipv4/udp.c if (dst) dst 2480 net/ipv4/udp.c dst = dst_check(dst, 0); dst 2481 net/ipv4/udp.c if (dst) { dst 2488 net/ipv4/udp.c skb_dst_set_noref(skb, dst); dst 174 net/ipv4/udp_tunnel.c __be32 src, __be32 dst, __u8 tos, __u8 ttl, dst 190 net/ipv4/udp_tunnel.c udp_set_csum(nocheck, skb, src, dst, skb->len); dst 192 net/ipv4/udp_tunnel.c iptunnel_xmit(sk, rt, skb, src, dst, IPPROTO_UDP, tos, ttl, df, xnet); dst 40 net/ipv4/xfrm4_policy.c return &rt->dst; dst 59 net/ipv4/xfrm4_policy.c struct dst_entry *dst; dst 62 net/ipv4/xfrm4_policy.c dst = __xfrm4_dst_lookup(net, &fl4, 0, oif, NULL, daddr, mark); dst 63 net/ipv4/xfrm4_policy.c if (IS_ERR(dst)) dst 67 net/ipv4/xfrm4_policy.c dst_release(dst); dst 79 net/ipv4/xfrm4_policy.c xdst->u.dst.dev = dev; dst 102 net/ipv4/xfrm4_policy.c static void xfrm4_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 106 net/ipv4/xfrm4_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 112 net/ipv4/xfrm4_policy.c static void xfrm4_redirect(struct dst_entry *dst, struct sock *sk, dst 115 net/ipv4/xfrm4_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 121 net/ipv4/xfrm4_policy.c static void xfrm4_dst_destroy(struct dst_entry *dst) dst 123 net/ipv4/xfrm4_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 125 net/ipv4/xfrm4_policy.c dst_destroy_metrics_generic(dst); dst 131 net/ipv4/xfrm4_policy.c static void xfrm4_dst_ifdown(struct dst_entry *dst, struct net_device *dev, dst 137 net/ipv4/xfrm4_policy.c xfrm_dst_ifdown(dst, dev); dst 1516 net/ipv6/addrconf.c struct ipv6_saddr_dst *dst, dst 1542 net/ipv6/addrconf.c ret = ipv6_addr_equal(&score->ifa->addr, dst->addr); dst 1567 net/ipv6/addrconf.c if (ret >= dst->scope) dst 1588 net/ipv6/addrconf.c int prefhome = !(dst->prefs & IPV6_PREFER_SRC_COA); dst 1595 net/ipv6/addrconf.c ret = (!dst->ifindex || dst 1596 net/ipv6/addrconf.c dst->ifindex == score->ifa->idev->dev->ifindex); dst 1602 net/ipv6/addrconf.c score->ifa->idev->dev->ifindex) == dst->label; dst 1609 net/ipv6/addrconf.c int preftmp = dst->prefs & (IPV6_PREFER_SRC_PUBLIC|IPV6_PREFER_SRC_TMP) ? dst 1610 net/ipv6/addrconf.c !!(dst->prefs & IPV6_PREFER_SRC_TMP) : dst 1620 net/ipv6/addrconf.c ipv6_addr_orchid(dst->addr)); dst 1624 net/ipv6/addrconf.c ret = ipv6_addr_diff(&score->ifa->addr, dst->addr); dst 1649 net/ipv6/addrconf.c struct ipv6_saddr_dst *dst, dst 1688 net/ipv6/addrconf.c minihiscore = ipv6_get_saddr_eval(net, hiscore, dst, i); dst 1689 net/ipv6/addrconf.c miniscore = ipv6_get_saddr_eval(net, score, dst, i); dst 1723 net/ipv6/addrconf.c struct ipv6_saddr_dst *dst, dst 1731 net/ipv6/addrconf.c hiscore_idx = __ipv6_dev_get_saddr(net, dst, idev, dst 1736 net/ipv6/addrconf.c hiscore_idx = __ipv6_dev_get_saddr(net, dst, idev, dst 1747 net/ipv6/addrconf.c struct ipv6_saddr_dst dst; dst 1756 net/ipv6/addrconf.c dst.addr = daddr; dst 1757 net/ipv6/addrconf.c dst.ifindex = dst_dev ? dst_dev->ifindex : 0; dst 1758 net/ipv6/addrconf.c dst.scope = __ipv6_addr_src_scope(dst_type); dst 1759 net/ipv6/addrconf.c dst.label = ipv6_addr_label(net, daddr, dst_type, dst.ifindex); dst 1760 net/ipv6/addrconf.c dst.prefs = prefs; dst 1786 net/ipv6/addrconf.c dst.scope <= IPV6_ADDR_SCOPE_LINKLOCAL || dst 1794 net/ipv6/addrconf.c hiscore_idx = __ipv6_dev_get_saddr(net, &dst, idev, scores, hiscore_idx); dst 1808 net/ipv6/addrconf.c master, &dst, dst 1824 net/ipv6/addrconf.c hiscore_idx = __ipv6_dev_get_saddr(net, &dst, idev, scores, hiscore_idx); dst 6384 net/ipv6/addrconf.c rt->dst.flags |= DST_NOPOLICY; dst 6386 net/ipv6/addrconf.c rt->dst.flags &= ~DST_NOPOLICY; dst 742 net/ipv6/af_inet6.c struct dst_entry *dst; dst 744 net/ipv6/af_inet6.c dst = __sk_dst_check(sk, np->dst_cookie); dst 746 net/ipv6/af_inet6.c if (!dst) { dst 768 net/ipv6/af_inet6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 769 net/ipv6/af_inet6.c if (IS_ERR(dst)) { dst 771 net/ipv6/af_inet6.c sk->sk_err_soft = -PTR_ERR(dst); dst 772 net/ipv6/af_inet6.c return PTR_ERR(dst); dst 775 net/ipv6/af_inet6.c ip6_dst_store(sk, dst, NULL, NULL); dst 97 net/ipv6/anycast.c dev = rt->dst.dev; dst 70 net/ipv6/datagram.c struct dst_entry *dst; dst 88 net/ipv6/datagram.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 89 net/ipv6/datagram.c if (IS_ERR(dst)) { dst 90 net/ipv6/datagram.c err = PTR_ERR(dst); dst 106 net/ipv6/datagram.c ip6_sk_dst_store_flow(sk, dst, &fl6); dst 115 net/ipv6/datagram.c struct dst_entry *dst; dst 121 net/ipv6/datagram.c dst = __sk_dst_get(sk); dst 122 net/ipv6/datagram.c if (!dst || !dst->obsolete || dst 123 net/ipv6/datagram.c dst->ops->check(dst, inet6_sk(sk)->dst_cookie)) { dst 122 net/ipv6/esp6.c if (req->src != req->dst) dst 445 net/ipv6/esp6.c struct xfrm_dst *dst = (struct xfrm_dst *)skb_dst(skb); dst 448 net/ipv6/esp6.c padto = min(x->tfcpad, xfrm_state_mtu(x, dst->child_mtu_cached)); dst 284 net/ipv6/exthdrs.c struct dst_entry *dst = skb_dst(skb); dst 291 net/ipv6/exthdrs.c __IP6_INC_STATS(dev_net(dst->dev), idev, dst 26 net/ipv6/fib6_rules.c struct rt6key dst; dst 34 net/ipv6/fib6_rules.c if (r->dst.plen || r->src.plen || r->tclass) dst 109 net/ipv6/fib6_rules.c return &res.rt6->dst; dst 114 net/ipv6/fib6_rules.c if (rt != net->ipv6.ip6_null_entry && rt->dst.error != -EAGAIN) dst 115 net/ipv6/fib6_rules.c return &rt->dst; dst 118 net/ipv6/fib6_rules.c if (rt->dst.error != -EAGAIN) dst 119 net/ipv6/fib6_rules.c return &rt->dst; dst 124 net/ipv6/fib6_rules.c dst_hold(&net->ipv6.ip6_null_entry->dst); dst 125 net/ipv6/fib6_rules.c return &net->ipv6.ip6_null_entry->dst; dst 231 net/ipv6/fib6_rules.c ip6_dst_idev(&rt->dst)->dev); dst 236 net/ipv6/fib6_rules.c err = rt->dst.error; dst 248 net/ipv6/fib6_rules.c dst_hold(&rt->dst); dst 300 net/ipv6/fib6_rules.c if (r->dst.plen && dst 301 net/ipv6/fib6_rules.c !ipv6_prefix_equal(&fl6->daddr, &r->dst.addr, r->dst.plen)) dst 364 net/ipv6/fib6_rules.c rule6->dst.addr = nla_get_in6_addr(tb[FRA_DST]); dst 367 net/ipv6/fib6_rules.c rule6->dst.plen = frh->dst_len; dst 398 net/ipv6/fib6_rules.c if (frh->dst_len && (rule6->dst.plen != frh->dst_len)) dst 409 net/ipv6/fib6_rules.c nla_memcmp(tb[FRA_DST], &rule6->dst.addr, sizeof(struct in6_addr))) dst 420 net/ipv6/fib6_rules.c frh->dst_len = rule6->dst.plen; dst 424 net/ipv6/fib6_rules.c if ((rule6->dst.plen && dst 425 net/ipv6/fib6_rules.c nla_put_in6_addr(skb, FRA_DST, &rule6->dst.addr)) || dst 197 net/ipv6/icmp.c struct dst_entry *dst; dst 208 net/ipv6/icmp.c dst = ip6_route_output(net, sk, fl6); dst 209 net/ipv6/icmp.c if (dst->error) { dst 210 net/ipv6/icmp.c IP6_INC_STATS(net, ip6_dst_idev(dst), dst 212 net/ipv6/icmp.c } else if (dst->dev && (dst->dev->flags&IFF_LOOPBACK)) { dst 215 net/ipv6/icmp.c struct rt6_info *rt = (struct rt6_info *)dst; dst 228 net/ipv6/icmp.c dst_release(dst); dst 337 net/ipv6/icmp.c struct dst_entry *dst, *dst2; dst 341 net/ipv6/icmp.c err = ip6_dst_lookup(net, sk, &dst, fl6); dst 349 net/ipv6/icmp.c if (ipv6_anycast_destination(dst, &fl6->daddr)) { dst 351 net/ipv6/icmp.c dst_release(dst); dst 356 net/ipv6/icmp.c dst2 = dst; dst 358 net/ipv6/icmp.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(fl6), sk, 0); dst 359 net/ipv6/icmp.c if (!IS_ERR(dst)) { dst 360 net/ipv6/icmp.c if (dst != dst2) dst 361 net/ipv6/icmp.c return dst; dst 363 net/ipv6/icmp.c if (PTR_ERR(dst) == -EPERM) dst 364 net/ipv6/icmp.c dst = NULL; dst 366 net/ipv6/icmp.c return dst; dst 379 net/ipv6/icmp.c dst_release(dst); dst 380 net/ipv6/icmp.c dst = dst2; dst 384 net/ipv6/icmp.c dst_release(dst); dst 391 net/ipv6/icmp.c if (dst) dst 392 net/ipv6/icmp.c return dst; dst 432 net/ipv6/icmp.c struct dst_entry *dst; dst 485 net/ipv6/icmp.c dst = skb_dst(skb); dst 486 net/ipv6/icmp.c iif = l3mdev_master_ifindex(dst ? dst->dev : skb->dev); dst 557 net/ipv6/icmp.c dst = icmpv6_route_lookup(net, skb, sk, &fl6); dst 558 net/ipv6/icmp.c if (IS_ERR(dst)) dst 561 net/ipv6/icmp.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 581 net/ipv6/icmp.c &ipc6, &fl6, (struct rt6_info *)dst, dst 591 net/ipv6/icmp.c dst_release(dst); dst 639 net/ipv6/icmp.c if (rt && rt->dst.dev) dst 640 net/ipv6/icmp.c skb2->dev = rt->dst.dev; dst 683 net/ipv6/icmp.c struct dst_entry *dst; dst 731 net/ipv6/icmp.c if (ip6_dst_lookup(net, sk, &dst, &fl6)) dst 733 net/ipv6/icmp.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), sk, 0); dst 734 net/ipv6/icmp.c if (IS_ERR(dst)) dst 749 net/ipv6/icmp.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 755 net/ipv6/icmp.c (struct rt6_info *)dst, MSG_DONTWAIT)) { dst 763 net/ipv6/icmp.c dst_release(dst); dst 43 net/ipv6/ila/ila_lwt.c struct dst_entry *dst; dst 61 net/ipv6/ila/ila_lwt.c dst = dst_cache_get(&ilwt->dst_cache); dst 62 net/ipv6/ila/ila_lwt.c if (unlikely(!dst)) { dst 76 net/ipv6/ila/ila_lwt.c dst = ip6_route_output(net, NULL, &fl6); dst 77 net/ipv6/ila/ila_lwt.c if (dst->error) { dst 79 net/ipv6/ila/ila_lwt.c dst_release(dst); dst 83 net/ipv6/ila/ila_lwt.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), NULL, 0); dst 84 net/ipv6/ila/ila_lwt.c if (IS_ERR(dst)) { dst 85 net/ipv6/ila/ila_lwt.c err = PTR_ERR(dst); dst 90 net/ipv6/ila/ila_lwt.c dst_cache_set_ip6(&ilwt->dst_cache, dst, &fl6.saddr); dst 93 net/ipv6/ila/ila_lwt.c skb_dst_set(skb, dst); dst 103 net/ipv6/ila/ila_lwt.c struct dst_entry *dst = skb_dst(skb); dst 104 net/ipv6/ila/ila_lwt.c struct ila_lwt *ilwt = ila_lwt_lwtunnel(dst->lwtstate); dst 111 net/ipv6/ila/ila_lwt.c ila_params_lwtunnel(dst->lwtstate), dst 114 net/ipv6/ila/ila_lwt.c return dst->lwtstate->orig_input(skb); dst 35 net/ipv6/inet6_connection_sock.c struct dst_entry *dst; dst 51 net/ipv6/inet6_connection_sock.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, fl6, final_p); dst 52 net/ipv6/inet6_connection_sock.c if (IS_ERR(dst)) dst 55 net/ipv6/inet6_connection_sock.c return dst; dst 85 net/ipv6/inet6_connection_sock.c struct dst_entry *dst; dst 104 net/ipv6/inet6_connection_sock.c dst = __inet6_csk_dst_check(sk, np->dst_cookie); dst 105 net/ipv6/inet6_connection_sock.c if (!dst) { dst 106 net/ipv6/inet6_connection_sock.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, fl6, final_p); dst 108 net/ipv6/inet6_connection_sock.c if (!IS_ERR(dst)) dst 109 net/ipv6/inet6_connection_sock.c ip6_dst_store(sk, dst, NULL, NULL); dst 111 net/ipv6/inet6_connection_sock.c return dst; dst 118 net/ipv6/inet6_connection_sock.c struct dst_entry *dst; dst 121 net/ipv6/inet6_connection_sock.c dst = inet6_csk_route_socket(sk, &fl6); dst 122 net/ipv6/inet6_connection_sock.c if (IS_ERR(dst)) { dst 123 net/ipv6/inet6_connection_sock.c sk->sk_err_soft = -PTR_ERR(dst); dst 126 net/ipv6/inet6_connection_sock.c return PTR_ERR(dst); dst 130 net/ipv6/inet6_connection_sock.c skb_dst_set_noref(skb, dst); dst 145 net/ipv6/inet6_connection_sock.c struct dst_entry *dst = inet6_csk_route_socket(sk, &fl6); dst 147 net/ipv6/inet6_connection_sock.c if (IS_ERR(dst)) dst 149 net/ipv6/inet6_connection_sock.c dst->ops->update_pmtu(dst, sk, NULL, mtu, true); dst 151 net/ipv6/inet6_connection_sock.c dst = inet6_csk_route_socket(sk, &fl6); dst 152 net/ipv6/inet6_connection_sock.c return IS_ERR(dst) ? NULL : dst; dst 318 net/ipv6/ip6_fib.c if (rt->dst.error == -EAGAIN) { dst 322 net/ipv6/ip6_fib.c dst_hold(&rt->dst); dst 325 net/ipv6/ip6_fib.c return &rt->dst; dst 436 net/ipv6/ip6_flowlabel.c fl->dst = freq->flr_dst; dst 521 net/ipv6/ip6_flowlabel.c freq->flr_dst = sfl->fl->dst; dst 825 net/ipv6/ip6_flowlabel.c &fl->dst, dst 741 net/ipv6/ip6_gre.c fl6->daddr = key->u.ipv6.dst; dst 916 net/ipv6/ip6_gre.c struct dst_entry *dst = skb_dst(skb); dst 976 net/ipv6/ip6_gre.c fl6.daddr = key->u.ipv6.dst; dst 1042 net/ipv6/ip6_gre.c if (!t->parms.collect_md && dst && dst_mtu(dst) > dst->dev->mtu) dst 1043 net/ipv6/ip6_gre.c dst->ops->update_pmtu(dst, NULL, skb, dst->dev->mtu, false); dst 1119 net/ipv6/ip6_gre.c if (rt->dst.dev) { dst 1120 net/ipv6/ip6_gre.c dev->needed_headroom = rt->dst.dev->hard_header_len + dst 1124 net/ipv6/ip6_gre.c dev->mtu = rt->dst.dev->mtu - t_hlen; dst 98 net/ipv6/ip6_input.c struct dst_entry *dst; dst 108 net/ipv6/ip6_input.c dst = skb_dst(skb); dst 109 net/ipv6/ip6_input.c if (curr_dst != dst) { dst 115 net/ipv6/ip6_input.c curr_dst = dst; dst 60 net/ipv6/ip6_output.c struct dst_entry *dst = skb_dst(skb); dst 61 net/ipv6/ip6_output.c struct net_device *dev = dst->dev; dst 102 net/ipv6/ip6_output.c if (lwtunnel_xmit_redirect(dst->lwtstate)) { dst 110 net/ipv6/ip6_output.c nexthop = rt6_nexthop((struct rt6_info *)dst, &ipv6_hdr(skb)->daddr); dst 111 net/ipv6/ip6_output.c neigh = __ipv6_neigh_lookup_noref(dst->dev, nexthop); dst 113 net/ipv6/ip6_output.c neigh = __neigh_create(&nd_tbl, nexthop, dst->dev, false); dst 122 net/ipv6/ip6_output.c IP6_INC_STATS(net, ip6_dst_idev(dst), IPSTATS_MIB_OUTNOROUTES); dst 201 net/ipv6/ip6_output.c struct dst_entry *dst = skb_dst(skb); dst 209 net/ipv6/ip6_output.c head_room = sizeof(struct ipv6hdr) + LL_RESERVED_SPACE(dst->dev); dst 248 net/ipv6/ip6_output.c hlimit = ip6_dst_hoplimit(dst); dst 264 net/ipv6/ip6_output.c mtu = dst_mtu(dst); dst 280 net/ipv6/ip6_output.c net, (struct sock *)sk, skb, NULL, dst->dev, dst 284 net/ipv6/ip6_output.c skb->dev = dst->dev; dst 386 net/ipv6/ip6_output.c struct dst_entry *dst = skb_dst(skb); dst 388 net/ipv6/ip6_output.c __IP6_INC_STATS(net, ip6_dst_idev(dst), IPSTATS_MIB_OUTFORWDATAGRAMS); dst 389 net/ipv6/ip6_output.c __IP6_ADD_STATS(net, ip6_dst_idev(dst), IPSTATS_MIB_OUTOCTETS, skb->len); dst 423 net/ipv6/ip6_output.c struct dst_entry *dst = skb_dst(skb); dst 426 net/ipv6/ip6_output.c struct net *net = dev_net(dst->dev); dst 471 net/ipv6/ip6_output.c skb->dev = dst->dev; dst 495 net/ipv6/ip6_output.c dst = skb_dst(skb); dst 501 net/ipv6/ip6_output.c if (IP6CB(skb)->iif == dst->dev->ifindex && dst 512 net/ipv6/ip6_output.c rt = (struct rt6_info *) dst; dst 541 net/ipv6/ip6_output.c mtu = ip6_dst_mtu_forward(dst); dst 547 net/ipv6/ip6_output.c skb->dev = dst->dev; dst 550 net/ipv6/ip6_output.c __IP6_INC_STATS(net, ip6_dst_idev(dst), dst 556 net/ipv6/ip6_output.c if (skb_cow(skb, dst->dev->hard_header_len)) { dst 557 net/ipv6/ip6_output.c __IP6_INC_STATS(net, ip6_dst_idev(dst), dst 569 net/ipv6/ip6_output.c net, NULL, skb, skb->dev, dst->dev, dst 817 net/ipv6/ip6_output.c hroom = LL_RESERVED_SPACE(rt->dst.dev); dst 862 net/ipv6/ip6_output.c IP6_INC_STATS(net, ip6_dst_idev(&rt->dst), dst 874 net/ipv6/ip6_output.c IP6_INC_STATS(net, ip6_dst_idev(&rt->dst), dst 881 net/ipv6/ip6_output.c IP6_INC_STATS(net, ip6_dst_idev(&rt->dst), dst 900 net/ipv6/ip6_output.c ip6_frag_init(skb, hlen, mtu, rt->dst.dev->needed_tailroom, dst 901 net/ipv6/ip6_output.c LL_RESERVED_SPACE(rt->dst.dev), prevhdr, nexthdr, frag_id, dst 954 net/ipv6/ip6_output.c struct dst_entry *dst, dst 960 net/ipv6/ip6_output.c if (!dst) dst 963 net/ipv6/ip6_output.c if (dst->ops->family != AF_INET6) { dst 964 net/ipv6/ip6_output.c dst_release(dst); dst 968 net/ipv6/ip6_output.c rt = (struct rt6_info *)dst; dst 991 net/ipv6/ip6_output.c (fl6->flowi6_oif && fl6->flowi6_oif != dst->dev->ifindex))) { dst 992 net/ipv6/ip6_output.c dst_release(dst); dst 993 net/ipv6/ip6_output.c dst = NULL; dst 997 net/ipv6/ip6_output.c return dst; dst 1001 net/ipv6/ip6_output.c struct dst_entry **dst, struct flowi6 *fl6) dst 1019 net/ipv6/ip6_output.c if (ipv6_addr_any(&fl6->saddr) && (!*dst || !(*dst)->error)) { dst 1022 net/ipv6/ip6_output.c bool had_dst = *dst != NULL; dst 1025 net/ipv6/ip6_output.c *dst = ip6_route_output(net, sk, fl6); dst 1026 net/ipv6/ip6_output.c rt = (*dst)->error ? NULL : (struct rt6_info *)*dst; dst 1042 net/ipv6/ip6_output.c if (!had_dst && (*dst)->error) { dst 1043 net/ipv6/ip6_output.c dst_release(*dst); dst 1044 net/ipv6/ip6_output.c *dst = NULL; dst 1051 net/ipv6/ip6_output.c if (!*dst) dst 1052 net/ipv6/ip6_output.c *dst = ip6_route_output_flags(net, sk, fl6, flags); dst 1054 net/ipv6/ip6_output.c err = (*dst)->error; dst 1067 net/ipv6/ip6_output.c rt = (struct rt6_info *) *dst; dst 1069 net/ipv6/ip6_output.c n = __ipv6_neigh_lookup_noref(rt->dst.dev, dst 1080 net/ipv6/ip6_output.c (*dst)->dev, 1); dst 1091 net/ipv6/ip6_output.c dst_release(*dst); dst 1094 net/ipv6/ip6_output.c *dst = ip6_route_output(net, sk, &fl_gw6); dst 1095 net/ipv6/ip6_output.c err = (*dst)->error; dst 1110 net/ipv6/ip6_output.c dst_release(*dst); dst 1111 net/ipv6/ip6_output.c *dst = NULL; dst 1128 net/ipv6/ip6_output.c int ip6_dst_lookup(struct net *net, struct sock *sk, struct dst_entry **dst, dst 1131 net/ipv6/ip6_output.c *dst = NULL; dst 1132 net/ipv6/ip6_output.c return ip6_dst_lookup_tail(net, sk, dst, fl6); dst 1150 net/ipv6/ip6_output.c struct dst_entry *dst = NULL; dst 1153 net/ipv6/ip6_output.c err = ip6_dst_lookup_tail(net, sk, &dst, fl6); dst 1159 net/ipv6/ip6_output.c return xfrm_lookup_route(net, dst, flowi6_to_flowi(fl6), sk, 0); dst 1185 net/ipv6/ip6_output.c struct dst_entry *dst = sk_dst_check(sk, inet6_sk(sk)->dst_cookie); dst 1187 net/ipv6/ip6_output.c dst = ip6_sk_dst_check(sk, dst, fl6); dst 1188 net/ipv6/ip6_output.c if (dst) dst 1189 net/ipv6/ip6_output.c return dst; dst 1191 net/ipv6/ip6_output.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, fl6, final_dst); dst 1192 net/ipv6/ip6_output.c if (connected && !IS_ERR(dst)) dst 1193 net/ipv6/ip6_output.c ip6_sk_dst_store_flow(sk, dst_clone(dst), fl6); dst 1195 net/ipv6/ip6_output.c return dst; dst 1218 net/ipv6/ip6_output.c if (!(rt->dst.flags & DST_XFRM_TUNNEL)) { dst 1221 net/ipv6/ip6_output.c *mtu = orig_mtu - rt->dst.header_len; dst 1280 net/ipv6/ip6_output.c dst_hold(&rt->dst); dst 1281 net/ipv6/ip6_output.c cork->base.dst = &rt->dst; dst 1285 net/ipv6/ip6_output.c if (rt->dst.flags & DST_XFRM_TUNNEL) dst 1287 net/ipv6/ip6_output.c READ_ONCE(rt->dst.dev->mtu) : dst_mtu(&rt->dst); dst 1290 net/ipv6/ip6_output.c READ_ONCE(rt->dst.dev->mtu) : dst_mtu(xfrm_dst_path(&rt->dst)); dst 1303 net/ipv6/ip6_output.c if (dst_allfrag(xfrm_dst_path(&rt->dst))) dst 1333 net/ipv6/ip6_output.c struct rt6_info *rt = (struct rt6_info *)cork->dst; dst 1343 net/ipv6/ip6_output.c dst_exthdrlen = rt->dst.header_len - rt->rt6i_nfheader_len; dst 1354 net/ipv6/ip6_output.c hh_len = LL_RESERVED_SPACE(rt->dst.dev); dst 1363 net/ipv6/ip6_output.c (dst_allfrag(&rt->dst) ? dst 1400 net/ipv6/ip6_output.c rt->dst.dev->features & (NETIF_F_IPV6_CSUM | NETIF_F_HW_CSUM)) dst 1408 net/ipv6/ip6_output.c if (rt->dst.dev->features & NETIF_F_SG && dst 1471 net/ipv6/ip6_output.c datalen = maxfraglen - fragheaderlen - rt->dst.trailer_len; dst 1476 net/ipv6/ip6_output.c !(rt->dst.dev->features&NETIF_F_SG)) dst 1492 net/ipv6/ip6_output.c datalen += rt->dst.trailer_len; dst 1495 net/ipv6/ip6_output.c alloclen += rt->dst.trailer_len; dst 1591 net/ipv6/ip6_output.c if (!(rt->dst.dev->features&NETIF_F_SG) && dst 1705 net/ipv6/ip6_output.c if (cork->base.dst) { dst 1706 net/ipv6/ip6_output.c dst_release(cork->base.dst); dst 1707 net/ipv6/ip6_output.c cork->base.dst = NULL; dst 1725 net/ipv6/ip6_output.c struct rt6_info *rt = (struct rt6_info *)cork->base.dst; dst 1775 net/ipv6/ip6_output.c skb_dst_set(skb, dst_clone(&rt->dst)); dst 1864 net/ipv6/ip6_output.c cork->base.dst = NULL; dst 617 net/ipv6/ip6_tunnel.c skb2->dev = rt->dst.dev; dst 625 net/ipv6/ip6_tunnel.c if (IS_ERR(rt) || rt->dst.dev->type != ARPHRD_TUNNEL6) { dst 630 net/ipv6/ip6_tunnel.c skb_dst_set(skb2, &rt->dst); dst 682 net/ipv6/ip6_tunnel.c if (rt && rt->dst.dev) dst 683 net/ipv6/ip6_tunnel.c skb2->dev = rt->dst.dev; dst 1041 net/ipv6/ip6_tunnel.c struct dst_entry *dst = NULL, *ndst = NULL; dst 1092 net/ipv6/ip6_tunnel.c dst = dst_cache_get(&t->dst_cache); dst 1097 net/ipv6/ip6_tunnel.c if (!dst) { dst 1102 net/ipv6/ip6_tunnel.c dst = ip6_route_output(net, NULL, fl6); dst 1104 net/ipv6/ip6_tunnel.c if (dst->error) dst 1106 net/ipv6/ip6_tunnel.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(fl6), NULL, 0); dst 1107 net/ipv6/ip6_tunnel.c if (IS_ERR(dst)) { dst 1108 net/ipv6/ip6_tunnel.c err = PTR_ERR(dst); dst 1109 net/ipv6/ip6_tunnel.c dst = NULL; dst 1113 net/ipv6/ip6_tunnel.c ipv6_dev_get_saddr(net, ip6_dst_idev(dst)->dev, dst 1116 net/ipv6/ip6_tunnel.c ndst = dst; dst 1119 net/ipv6/ip6_tunnel.c tdev = dst->dev; dst 1127 net/ipv6/ip6_tunnel.c mtu = dst_mtu(dst) - eth_hlen - psh_hlen - t->tun_hlen; dst 1181 net/ipv6/ip6_tunnel.c skb_dst_set(skb, dst); dst 1189 net/ipv6/ip6_tunnel.c hop_limit = ip6_dst_hoplimit(dst); dst 1195 net/ipv6/ip6_tunnel.c max_headroom = LL_RESERVED_SPACE(dst->dev) + sizeof(struct ipv6hdr) dst 1196 net/ipv6/ip6_tunnel.c + dst->header_len + t->hlen; dst 1224 net/ipv6/ip6_tunnel.c dst_release(dst); dst 1260 net/ipv6/ip6_tunnel.c fl6.daddr = key->u.ipv6.dst; dst 1332 net/ipv6/ip6_tunnel.c fl6.daddr = key->u.ipv6.dst; dst 1464 net/ipv6/ip6_tunnel.c if (rt->dst.dev) { dst 1465 net/ipv6/ip6_tunnel.c dev->hard_header_len = rt->dst.dev->hard_header_len + dst 1468 net/ipv6/ip6_tunnel.c dev->mtu = rt->dst.dev->mtu - t_hlen; dst 81 net/ipv6/ip6_udp_tunnel.c int udp_tunnel6_xmit_skb(struct dst_entry *dst, struct sock *sk, dst 100 net/ipv6/ip6_udp_tunnel.c skb_dst_set(skb, dst); dst 412 net/ipv6/ip6_vti.c const struct in6_addr *dst, dst 415 net/ipv6/ip6_vti.c xfrm_address_t *daddr = (xfrm_address_t *)dst; dst 425 net/ipv6/ip6_vti.c if (ipv6_addr_any(dst)) dst 445 net/ipv6/ip6_vti.c struct dst_entry *dst = skb_dst(skb); dst 452 net/ipv6/ip6_vti.c if (!dst) { dst 462 net/ipv6/ip6_vti.c dst = &rt->dst; dst 463 net/ipv6/ip6_vti.c skb_dst_set(skb, dst); dst 469 net/ipv6/ip6_vti.c dst = ip6_route_output(dev_net(dev), NULL, &fl->u.ip6); dst 470 net/ipv6/ip6_vti.c if (dst->error) { dst 471 net/ipv6/ip6_vti.c dst_release(dst); dst 472 net/ipv6/ip6_vti.c dst = NULL; dst 475 net/ipv6/ip6_vti.c skb_dst_set(skb, dst); dst 482 net/ipv6/ip6_vti.c dst_hold(dst); dst 483 net/ipv6/ip6_vti.c dst = xfrm_lookup(t->net, dst, fl, NULL, 0); dst 484 net/ipv6/ip6_vti.c if (IS_ERR(dst)) { dst 485 net/ipv6/ip6_vti.c err = PTR_ERR(dst); dst 486 net/ipv6/ip6_vti.c dst = NULL; dst 490 net/ipv6/ip6_vti.c x = dst->xfrm; dst 498 net/ipv6/ip6_vti.c tdev = dst->dev; dst 507 net/ipv6/ip6_vti.c mtu = dst_mtu(dst); dst 526 net/ipv6/ip6_vti.c skb_dst_set(skb, dst); dst 539 net/ipv6/ip6_vti.c dst_release(dst); dst 677 net/ipv6/ip6_vti.c tdev = rt->dst.dev; dst 2002 net/ipv6/ip6mr.c struct dst_entry *dst; dst 2026 net/ipv6/ip6mr.c dst = ip6_route_output(net, NULL, &fl6); dst 2027 net/ipv6/ip6mr.c if (dst->error) { dst 2028 net/ipv6/ip6mr.c dst_release(dst); dst 2033 net/ipv6/ip6mr.c skb_dst_set(skb, dst); dst 1125 net/ipv6/ipv6_sockglue.c struct dst_entry *dst; dst 1129 net/ipv6/ipv6_sockglue.c dst = __sk_dst_get(sk); dst 1130 net/ipv6/ipv6_sockglue.c if (dst) dst 1131 net/ipv6/ipv6_sockglue.c val = dst_mtu(dst); dst 1218 net/ipv6/ipv6_sockglue.c struct dst_entry *dst; dst 1228 net/ipv6/ipv6_sockglue.c dst = __sk_dst_get(sk); dst 1229 net/ipv6/ipv6_sockglue.c if (dst) dst 1230 net/ipv6/ipv6_sockglue.c mtuinfo.ip6m_mtu = dst_mtu(dst); dst 1258 net/ipv6/ipv6_sockglue.c struct dst_entry *dst; dst 1267 net/ipv6/ipv6_sockglue.c dst = __sk_dst_get(sk); dst 1268 net/ipv6/ipv6_sockglue.c if (dst) dst 1269 net/ipv6/ipv6_sockglue.c val = ip6_dst_hoplimit(dst); dst 169 net/ipv6/mcast.c dev = rt->dst.dev; dst 270 net/ipv6/mcast.c dev = rt->dst.dev; dst 1652 net/ipv6/mcast.c struct dst_entry *dst; dst 1671 net/ipv6/mcast.c dst = icmp6_dst_alloc(skb->dev, &fl6); dst 1674 net/ipv6/mcast.c if (IS_ERR(dst)) { dst 1675 net/ipv6/mcast.c err = PTR_ERR(dst); dst 1676 net/ipv6/mcast.c dst = NULL; dst 1678 net/ipv6/mcast.c skb_dst_set(skb, dst); dst 1997 net/ipv6/mcast.c struct dst_entry *dst; dst 2052 net/ipv6/mcast.c dst = icmp6_dst_alloc(skb->dev, &fl6); dst 2053 net/ipv6/mcast.c if (IS_ERR(dst)) { dst 2054 net/ipv6/mcast.c err = PTR_ERR(dst); dst 2058 net/ipv6/mcast.c skb_dst_set(skb, dst); dst 112 net/ipv6/mip6.c struct in6_addr dst; dst 176 net/ipv6/mip6.c const struct in6_addr *dst, dst 185 net/ipv6/mip6.c !ipv6_addr_equal(&mip6_report_rl.dst, dst)) { dst 189 net/ipv6/mip6.c mip6_report_rl.dst = *dst; dst 470 net/ipv6/ndisc.c struct dst_entry *dst = skb_dst(skb); dst 480 net/ipv6/ndisc.c if (!dst) { dst 485 net/ipv6/ndisc.c dst = icmp6_dst_alloc(skb->dev, &fl6); dst 486 net/ipv6/ndisc.c if (IS_ERR(dst)) { dst 491 net/ipv6/ndisc.c skb_dst_set(skb, dst); dst 502 net/ipv6/ndisc.c idev = __in6_dev_get(dst->dev); dst 506 net/ipv6/ndisc.c net, sk, skb, NULL, dst->dev, dst 1587 net/ipv6/ndisc.c struct dst_entry *dst; dst 1616 net/ipv6/ndisc.c dst = ip6_route_output(net, NULL, &fl6); dst 1617 net/ipv6/ndisc.c if (dst->error) { dst 1618 net/ipv6/ndisc.c dst_release(dst); dst 1621 net/ipv6/ndisc.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), NULL, 0); dst 1622 net/ipv6/ndisc.c if (IS_ERR(dst)) dst 1625 net/ipv6/ndisc.c rt = (struct rt6_info *) dst; dst 1694 net/ipv6/ndisc.c skb_dst_set(buff, dst); dst 1699 net/ipv6/ndisc.c dst_release(dst); dst 28 net/ipv6/netfilter.c struct dst_entry *dst; dst 41 net/ipv6/netfilter.c dst = ip6_route_output(net, sk, &fl6); dst 42 net/ipv6/netfilter.c err = dst->error; dst 44 net/ipv6/netfilter.c IP6_INC_STATS(net, ip6_dst_idev(dst), IPSTATS_MIB_OUTNOROUTES); dst 46 net/ipv6/netfilter.c dst_release(dst); dst 53 net/ipv6/netfilter.c skb_dst_set(skb, dst); dst 59 net/ipv6/netfilter.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), sk, 0); dst 60 net/ipv6/netfilter.c if (IS_ERR(dst)) dst 61 net/ipv6/netfilter.c return PTR_ERR(dst); dst 62 net/ipv6/netfilter.c skb_dst_set(skb, dst); dst 92 net/ipv6/netfilter.c int __nf_ip6_route(struct net *net, struct dst_entry **dst, dst 110 net/ipv6/netfilter.c *dst = result; dst 64 net/ipv6/netfilter/ip6_tables.c &ip6info->dst))) dst 888 net/ipv6/netfilter/ip6_tables.c static void compat_standard_from_user(void *dst, const void *src) dst 894 net/ipv6/netfilter/ip6_tables.c memcpy(dst, &v, sizeof(v)); dst 897 net/ipv6/netfilter/ip6_tables.c static int compat_standard_to_user(void __user *dst, const void *src) dst 903 net/ipv6/netfilter/ip6_tables.c return copy_to_user(dst, &cv, sizeof(cv)) ? -EFAULT : 0; dst 64 net/ipv6/netfilter/ip6t_rpfilter.c if (rt->dst.error) dst 23 net/ipv6/netfilter/nf_dup_ipv6.c struct dst_entry *dst; dst 34 net/ipv6/netfilter/nf_dup_ipv6.c dst = ip6_route_output(net, NULL, &fl6); dst 35 net/ipv6/netfilter/nf_dup_ipv6.c if (dst->error) { dst 36 net/ipv6/netfilter/nf_dup_ipv6.c dst_release(dst); dst 40 net/ipv6/netfilter/nf_dup_ipv6.c skb_dst_set(skb, dst); dst 41 net/ipv6/netfilter/nf_dup_ipv6.c skb->dev = dst->dev; dst 138 net/ipv6/netfilter/nf_reject_ipv6.c struct dst_entry *dst = NULL; dst 160 net/ipv6/netfilter/nf_reject_ipv6.c dst = ip6_route_output(net, NULL, &fl6); dst 161 net/ipv6/netfilter/nf_reject_ipv6.c if (dst->error) { dst 162 net/ipv6/netfilter/nf_reject_ipv6.c dst_release(dst); dst 165 net/ipv6/netfilter/nf_reject_ipv6.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), NULL, 0); dst 166 net/ipv6/netfilter/nf_reject_ipv6.c if (IS_ERR(dst)) dst 169 net/ipv6/netfilter/nf_reject_ipv6.c hh_len = (dst->dev->hard_header_len + 15)&~15; dst 170 net/ipv6/netfilter/nf_reject_ipv6.c nskb = alloc_skb(hh_len + 15 + dst->header_len + sizeof(struct ipv6hdr) dst 171 net/ipv6/netfilter/nf_reject_ipv6.c + sizeof(struct tcphdr) + dst->trailer_len, dst 176 net/ipv6/netfilter/nf_reject_ipv6.c dst_release(dst); dst 180 net/ipv6/netfilter/nf_reject_ipv6.c skb_dst_set(nskb, dst); dst 184 net/ipv6/netfilter/nf_reject_ipv6.c skb_reserve(nskb, hh_len + dst->header_len); dst 186 net/ipv6/netfilter/nf_reject_ipv6.c ip6_dst_hoplimit(dst)); dst 82 net/ipv6/netfilter/nft_fib_ipv6.c route_err = rt->dst.error; dst 83 net/ipv6/netfilter/nft_fib_ipv6.c dst_release(&rt->dst); dst 92 net/ipv6/netfilter/nft_fib_ipv6.c dst_release(&rt->dst); dst 175 net/ipv6/netfilter/nft_fib_ipv6.c if (rt->dst.error) dst 15 net/ipv6/output_core.c const struct in6_addr *dst, dst 19 net/ipv6/output_core.c struct in6_addr dst; dst 22 net/ipv6/output_core.c .dst = *dst, dst 128 net/ipv6/output_core.c int ip6_dst_hoplimit(struct dst_entry *dst) dst 130 net/ipv6/output_core.c int hoplimit = dst_metric_raw(dst, RTAX_HOPLIMIT); dst 132 net/ipv6/output_core.c struct net_device *dev = dst->dev; dst 57 net/ipv6/ping.c struct dst_entry *dst; dst 119 net/ipv6/ping.c dst = ip6_sk_dst_lookup_flow(sk, &fl6, daddr, false); dst 120 net/ipv6/ping.c if (IS_ERR(dst)) dst 121 net/ipv6/ping.c return PTR_ERR(dst); dst 122 net/ipv6/ping.c rt = (struct rt6_info *) dst; dst 138 net/ipv6/ping.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 155 net/ipv6/ping.c dst_release(dst); dst 628 net/ipv6/raw.c int hlen = LL_RESERVED_SPACE(rt->dst.dev); dst 629 net/ipv6/raw.c int tlen = rt->dst.dev->needed_tailroom; dst 631 net/ipv6/raw.c if (length > rt->dst.dev->mtu) { dst 632 net/ipv6/raw.c ipv6_local_error(sk, EMSGSIZE, fl6, rt->dst.dev->mtu); dst 671 net/ipv6/raw.c skb_dst_set(skb, &rt->dst); dst 688 net/ipv6/raw.c NULL, rt->dst.dev, dst_output); dst 777 net/ipv6/raw.c struct dst_entry *dst = NULL; dst 928 net/ipv6/raw.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 929 net/ipv6/raw.c if (IS_ERR(dst)) { dst 930 net/ipv6/raw.c err = PTR_ERR(dst); dst 934 net/ipv6/raw.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 944 net/ipv6/raw.c err = rawv6_send_hdrinc(sk, msg, len, &fl6, &dst, dst 950 net/ipv6/raw.c len, 0, &ipc6, &fl6, (struct rt6_info *)dst, dst 960 net/ipv6/raw.c dst_release(dst); dst 967 net/ipv6/raw.c dst_confirm_neigh(dst, &fl6.daddr); dst 83 net/ipv6/route.c static struct dst_entry *ip6_dst_check(struct dst_entry *dst, u32 cookie); dst 84 net/ipv6/route.c static unsigned int ip6_default_advmss(const struct dst_entry *dst); dst 85 net/ipv6/route.c static unsigned int ip6_mtu(const struct dst_entry *dst); dst 97 net/ipv6/route.c static void ip6_rt_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 100 net/ipv6/route.c static void rt6_do_redirect(struct dst_entry *dst, struct sock *sk, dst 106 net/ipv6/route.c struct fib6_info *rt, struct dst_entry *dst, dst 148 net/ipv6/route.c struct net *net = dev_net(rt->dst.dev); dst 172 net/ipv6/route.c struct net_device *rt_dev = rt->dst.dev; dst 180 net/ipv6/route.c rt->dst.dev = blackhole_netdev; dst 181 net/ipv6/route.c dev_hold(rt->dst.dev); dst 216 net/ipv6/route.c static struct neighbour *ip6_dst_neigh_lookup(const struct dst_entry *dst, dst 220 net/ipv6/route.c const struct rt6_info *rt = container_of(dst, struct rt6_info, dst); dst 223 net/ipv6/route.c dst->dev, skb, daddr); dst 226 net/ipv6/route.c static void ip6_confirm_neigh(const struct dst_entry *dst, const void *daddr) dst 228 net/ipv6/route.c struct net_device *dev = dst->dev; dst 229 net/ipv6/route.c struct rt6_info *rt = (struct rt6_info *)dst; dst 260 net/ipv6/route.c static unsigned int ip6_blackhole_mtu(const struct dst_entry *dst) dst 262 net/ipv6/route.c unsigned int mtu = dst_metric_raw(dst, RTAX_MTU); dst 264 net/ipv6/route.c return mtu ? : dst->dev->mtu; dst 267 net/ipv6/route.c static void ip6_rt_blackhole_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 273 net/ipv6/route.c static void ip6_rt_blackhole_redirect(struct dst_entry *dst, struct sock *sk, dst 304 net/ipv6/route.c .dst = { dst 318 net/ipv6/route.c .dst = { dst 330 net/ipv6/route.c .dst = { dst 345 net/ipv6/route.c struct dst_entry *dst = &rt->dst; dst 347 net/ipv6/route.c memset(dst + 1, 0, sizeof(*rt) - sizeof(*dst)); dst 367 net/ipv6/route.c static void ip6_dst_destroy(struct dst_entry *dst) dst 369 net/ipv6/route.c struct rt6_info *rt = (struct rt6_info *)dst; dst 373 net/ipv6/route.c ip_dst_metrics_put(dst); dst 386 net/ipv6/route.c static void ip6_dst_ifdown(struct dst_entry *dst, struct net_device *dev, dst 389 net/ipv6/route.c struct rt6_info *rt = (struct rt6_info *)dst; dst 406 net/ipv6/route.c return time_after(jiffies, rt->dst.expires); dst 418 net/ipv6/route.c if (time_after(jiffies, rt->dst.expires)) dst 421 net/ipv6/route.c return rt->dst.obsolete != DST_OBSOLETE_FORCE_CHK || dst 1073 net/ipv6/route.c rt->dst.error = ip6_rt_type_to_error(fib6_type); dst 1077 net/ipv6/route.c rt->dst.output = dst_discard_out; dst 1078 net/ipv6/route.c rt->dst.input = dst_discard; dst 1081 net/ipv6/route.c rt->dst.output = ip6_pkt_prohibit_out; dst 1082 net/ipv6/route.c rt->dst.input = ip6_pkt_prohibit; dst 1087 net/ipv6/route.c rt->dst.output = ip6_pkt_discard_out; dst 1088 net/ipv6/route.c rt->dst.input = ip6_pkt_discard; dst 1102 net/ipv6/route.c rt->dst.error = 0; dst 1103 net/ipv6/route.c rt->dst.output = ip6_output; dst 1106 net/ipv6/route.c rt->dst.input = ip6_input; dst 1108 net/ipv6/route.c rt->dst.input = ip6_mc_input; dst 1110 net/ipv6/route.c rt->dst.input = ip6_forward; dst 1114 net/ipv6/route.c rt->dst.lwtstate = lwtstate_get(res->nh->fib_nh_lws); dst 1115 net/ipv6/route.c lwtunnel_set_redirect(&rt->dst); dst 1118 net/ipv6/route.c rt->dst.lastuse = jiffies; dst 1126 net/ipv6/route.c ip_dst_init_metrics(&rt->dst, from->fib6_metrics); dst 1173 net/ipv6/route.c if (dst_hold_safe(&rt->dst)) dst 1177 net/ipv6/route.c dst_hold(&rt->dst); dst 1208 net/ipv6/route.c dst_hold(&nrt->dst); dst 1241 net/ipv6/route.c dst_hold(&rt->dst); dst 1254 net/ipv6/route.c dst_use_noref(&rt->dst, jiffies); dst 1283 net/ipv6/route.c struct dst_entry *dst; dst 1291 net/ipv6/route.c dst = fib6_rule_lookup(net, &fl6, skb, flags, ip6_pol_route_lookup); dst 1292 net/ipv6/route.c if (dst->error == 0) dst 1293 net/ipv6/route.c return (struct rt6_info *) dst; dst 1295 net/ipv6/route.c dst_release(dst); dst 1352 net/ipv6/route.c rt->dst.flags |= DST_HOST; dst 1400 net/ipv6/route.c return rt6->sernum == rt_genid_ipv6(dev_net(rt6->dst.dev)); dst 1416 net/ipv6/route.c dst_dev_put(&prev->dst); dst 1417 net/ipv6/route.c dst_release(&prev->dst); dst 1465 net/ipv6/route.c net = dev_net(rt6_ex->rt6i->dst.dev); dst 1473 net/ipv6/route.c dst_dev_put(&rt6_ex->rt6i->dst); dst 1476 net/ipv6/route.c dst_release(&rt6_ex->rt6i->dst); dst 1499 net/ipv6/route.c static u32 rt6_exception_hash(const struct in6_addr *dst, dst 1506 net/ipv6/route.c val = jhash(dst, sizeof(*dst), seed); dst 1661 net/ipv6/route.c struct net *net = dev_net(nrt->dst.dev); dst 1700 net/ipv6/route.c if (dst_metric_raw(&nrt->dst, RTAX_MTU) >= fib6_mtu(res)) { dst 1975 net/ipv6/route.c .dev = rt->dst.dev, dst 2005 net/ipv6/route.c if (dst_mtu(&rt->dst) >= mtu) dst 2008 net/ipv6/route.c if (dst_mtu(&rt->dst) == idev->cnf.mtu6) dst 2033 net/ipv6/route.c if (dst_metric_raw(&entry->dst, RTAX_MTU) && dst 2035 net/ipv6/route.c dst_metric_set(&entry->dst, RTAX_MTU, mtu); dst 2090 net/ipv6/route.c if (time_after_eq(now, rt->dst.lastuse + gc_args->timeout)) { dst 2095 net/ipv6/route.c } else if (time_after(jiffies, rt->dst.expires)) { dst 2105 net/ipv6/route.c neigh = __ipv6_neigh_lookup_noref(rt->dst.dev, &rt->rt6i_gateway); dst 2339 net/ipv6/route.c keys->addrs.v6addrs.dst = _flkeys->addrs.v6addrs.dst; dst 2344 net/ipv6/route.c keys->addrs.v6addrs.dst = key_iph->daddr; dst 2365 net/ipv6/route.c hash_keys.addrs.v6addrs.dst = fl6->daddr; dst 2387 net/ipv6/route.c hash_keys.addrs.v6addrs.dst = flkeys->addrs.v6addrs.dst; dst 2389 net/ipv6/route.c hash_keys.ports.dst = flkeys->ports.dst; dst 2395 net/ipv6/route.c hash_keys.addrs.v6addrs.dst = fl6->daddr; dst 2397 net/ipv6/route.c hash_keys.ports.dst = fl6->fl6_dport; dst 2416 net/ipv6/route.c hash_keys.addrs.v4addrs.dst = flkeys->addrs.v4addrs.dst; dst 2420 net/ipv6/route.c hash_keys.addrs.v6addrs.dst = flkeys->addrs.v6addrs.dst; dst 2432 net/ipv6/route.c hash_keys.addrs.v6addrs.dst = fl6->daddr; dst 2491 net/ipv6/route.c struct dst_entry *dst; dst 2494 net/ipv6/route.c dst = l3mdev_link_scope_lookup(net, fl6); dst 2495 net/ipv6/route.c if (dst) dst 2496 net/ipv6/route.c return dst; dst 2521 net/ipv6/route.c struct dst_entry *dst; dst 2525 net/ipv6/route.c dst = ip6_route_output_flags_noref(net, sk, fl6, flags); dst 2526 net/ipv6/route.c rt6 = (struct rt6_info *)dst; dst 2528 net/ipv6/route.c if (list_empty(&rt6->rt6i_uncached) && !dst_hold_safe(dst)) { dst 2529 net/ipv6/route.c dst = &net->ipv6.ip6_null_entry->dst; dst 2530 net/ipv6/route.c dst_hold(dst); dst 2534 net/ipv6/route.c return dst; dst 2550 net/ipv6/route.c new = &rt->dst; dst 2555 net/ipv6/route.c dst_copy_metrics(new, &ort->dst); dst 2601 net/ipv6/route.c return &rt->dst; dst 2609 net/ipv6/route.c rt->dst.obsolete == DST_OBSOLETE_FORCE_CHK && dst 2611 net/ipv6/route.c return &rt->dst; dst 2616 net/ipv6/route.c static struct dst_entry *ip6_dst_check(struct dst_entry *dst, u32 cookie) dst 2622 net/ipv6/route.c rt = container_of(dst, struct rt6_info, dst); dst 2625 net/ipv6/route.c return rt6_is_valid(rt) ? dst : NULL; dst 2647 net/ipv6/route.c static struct dst_entry *ip6_negative_advice(struct dst_entry *dst) dst 2649 net/ipv6/route.c struct rt6_info *rt = (struct rt6_info *) dst; dst 2656 net/ipv6/route.c dst = NULL; dst 2660 net/ipv6/route.c dst_release(dst); dst 2661 net/ipv6/route.c dst = NULL; dst 2664 net/ipv6/route.c return dst; dst 2701 net/ipv6/route.c rt0->dst.expires = from->expires; dst 2705 net/ipv6/route.c dst_set_expires(&rt0->dst, timeout); dst 2711 net/ipv6/route.c struct net *net = dev_net(rt->dst.dev); dst 2713 net/ipv6/route.c dst_metric_set(&rt->dst, RTAX_MTU, mtu); dst 2724 net/ipv6/route.c static void __ip6_rt_update_pmtu(struct dst_entry *dst, const struct sock *sk, dst 2729 net/ipv6/route.c struct rt6_info *rt6 = (struct rt6_info *)dst; dst 2748 net/ipv6/route.c dst_confirm_neigh(dst, daddr); dst 2751 net/ipv6/route.c if (mtu >= dst_mtu(dst)) dst 2773 net/ipv6/route.c .dev = dst->dev, dst 2795 net/ipv6/route.c dst_release_immediate(&nrt6->dst); dst 2802 net/ipv6/route.c static void ip6_rt_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 2806 net/ipv6/route.c __ip6_rt_update_pmtu(dst, sk, skb ? ipv6_hdr(skb) : NULL, mtu, dst 2814 net/ipv6/route.c struct dst_entry *dst; dst 2824 net/ipv6/route.c dst = ip6_route_output(net, NULL, &fl6); dst 2825 net/ipv6/route.c if (!dst->error) dst 2826 net/ipv6/route.c __ip6_rt_update_pmtu(dst, NULL, iph, ntohl(mtu), true); dst 2827 net/ipv6/route.c dst_release(dst); dst 2834 net/ipv6/route.c struct dst_entry *dst; dst 2841 net/ipv6/route.c dst = __sk_dst_get(sk); dst 2842 net/ipv6/route.c if (!dst || !dst->obsolete || dst 2843 net/ipv6/route.c dst->ops->check(dst, inet6_sk(sk)->dst_cookie)) dst 2853 net/ipv6/route.c void ip6_sk_dst_store_flow(struct sock *sk, struct dst_entry *dst, dst 2860 net/ipv6/route.c ip6_dst_store(sk, dst, dst 3029 net/ipv6/route.c struct dst_entry *dst; dst 3040 net/ipv6/route.c dst = ip6_route_redirect(net, &fl6, skb, &ipv6_hdr(skb)->saddr); dst 3041 net/ipv6/route.c rt6_do_redirect(dst, NULL, skb); dst 3042 net/ipv6/route.c dst_release(dst); dst 3050 net/ipv6/route.c struct dst_entry *dst; dst 3059 net/ipv6/route.c dst = ip6_route_redirect(net, &fl6, skb, &iph->saddr); dst 3060 net/ipv6/route.c rt6_do_redirect(dst, NULL, skb); dst 3061 net/ipv6/route.c dst_release(dst); dst 3071 net/ipv6/route.c static unsigned int ip6_default_advmss(const struct dst_entry *dst) dst 3073 net/ipv6/route.c struct net_device *dev = dst->dev; dst 3074 net/ipv6/route.c unsigned int mtu = dst_mtu(dst); dst 3093 net/ipv6/route.c static unsigned int ip6_mtu(const struct dst_entry *dst) dst 3098 net/ipv6/route.c mtu = dst_metric_raw(dst, RTAX_MTU); dst 3105 net/ipv6/route.c idev = __in6_dev_get(dst->dev); dst 3113 net/ipv6/route.c return mtu - lwtunnel_headroom(dst->lwtstate, mtu); dst 3142 net/ipv6/route.c mtu = dst_metric_raw(&rt->dst, RTAX_MTU); dst 3160 net/ipv6/route.c struct dst_entry *dst; dst 3171 net/ipv6/route.c dst = ERR_PTR(-ENOMEM); dst 3175 net/ipv6/route.c rt->dst.flags |= DST_HOST; dst 3176 net/ipv6/route.c rt->dst.input = ip6_input; dst 3177 net/ipv6/route.c rt->dst.output = ip6_output; dst 3182 net/ipv6/route.c dst_metric_set(&rt->dst, RTAX_HOPLIMIT, 0); dst 3190 net/ipv6/route.c dst = xfrm_lookup(net, &rt->dst, flowi6_to_flowi(fl6), NULL, 0); dst 3193 net/ipv6/route.c return dst; dst 3562 net/ipv6/route.c dst_dev_put(&pcpu_rt->dst); dst 3563 net/ipv6/route.c dst_release(&pcpu_rt->dst); dst 3837 net/ipv6/route.c if (cfg->fc_ifindex && rt->dst.dev->ifindex != cfg->fc_ifindex) dst 3976 net/ipv6/route.c static void rt6_do_redirect(struct dst_entry *dst, struct sock *sk, struct sk_buff *skb) dst 4038 net/ipv6/route.c rt = (struct rt6_info *) dst; dst 4048 net/ipv6/route.c dst_confirm_neigh(&rt->dst, &ipv6_hdr(skb)->saddr); dst 4072 net/ipv6/route.c .dev = dst->dev, dst 4103 net/ipv6/route.c dst_release_immediate(&nrt->dst); dst 4107 net/ipv6/route.c netevent.old = &rt->dst; dst 4108 net/ipv6/route.c netevent.new = &nrt->dst; dst 4364 net/ipv6/route.c struct dst_entry *dst = skb_dst(skb); dst 4365 net/ipv6/route.c struct net *net = dev_net(dst->dev); dst 4370 net/ipv6/route.c dst->dev == net->loopback_dev) dst 4373 net/ipv6/route.c idev = ip6_dst_idev(dst); dst 5397 net/ipv6/route.c struct fib6_info *rt, struct dst_entry *dst, dst 5402 net/ipv6/route.c struct rt6_info *rt6 = (struct rt6_info *)dst; dst 5488 net/ipv6/route.c pmetrics = dst ? dst_metrics_ptr(dst) : rt->fib6_metrics->metrics; dst 5503 net/ipv6/route.c if (dst->dev && nla_put_u32(skb, RTA_OIF, dst->dev->ifindex)) dst 5546 net/ipv6/route.c expires = dst ? dst->expires : rt->expires; dst 5550 net/ipv6/route.c if (rtnl_put_cacheinfo(skb, dst, 0, expires, dst ? dst->error : 0) < 0) dst 5646 net/ipv6/route.c &rt6_ex->rt6i->dst, NULL, NULL, 0, dst 5803 net/ipv6/route.c struct dst_entry *dst; dst 5880 net/ipv6/route.c dst = ip6_route_input_lookup(net, dev, &fl6, NULL, flags); dst 5886 net/ipv6/route.c dst = ip6_route_output(net, NULL, &fl6); dst 5890 net/ipv6/route.c rt = container_of(dst, struct rt6_info, dst); dst 5891 net/ipv6/route.c if (rt->dst.error) { dst 5892 net/ipv6/route.c err = rt->dst.error; dst 5898 net/ipv6/route.c err = rt->dst.error; dst 5910 net/ipv6/route.c skb_dst_set(skb, &rt->dst); dst 5921 net/ipv6/route.c err = rt6_fill_node(net, skb, from, dst, &fl6.daddr, dst 6014 net/ipv6/route.c net->ipv6.ip6_null_entry->dst.dev = dev; dst 6017 net/ipv6/route.c net->ipv6.ip6_prohibit_entry->dst.dev = dev; dst 6019 net/ipv6/route.c net->ipv6.ip6_blk_hole_entry->dst.dev = dev; dst 6215 net/ipv6/route.c net->ipv6.ip6_null_entry->dst.ops = &net->ipv6.ip6_dst_ops; dst 6216 net/ipv6/route.c dst_init_metrics(&net->ipv6.ip6_null_entry->dst, dst 6227 net/ipv6/route.c net->ipv6.ip6_prohibit_entry->dst.ops = &net->ipv6.ip6_dst_ops; dst 6228 net/ipv6/route.c dst_init_metrics(&net->ipv6.ip6_prohibit_entry->dst, dst 6237 net/ipv6/route.c net->ipv6.ip6_blk_hole_entry->dst.ops = &net->ipv6.ip6_dst_ops; dst 6238 net/ipv6/route.c dst_init_metrics(&net->ipv6.ip6_blk_hole_entry->dst, dst 6349 net/ipv6/route.c init_net.ipv6.ip6_null_entry->dst.dev = init_net.loopback_dev; dst 6352 net/ipv6/route.c init_net.ipv6.ip6_prohibit_entry->dst.dev = init_net.loopback_dev; dst 6354 net/ipv6/route.c init_net.ipv6.ip6_blk_hole_entry->dst.dev = init_net.loopback_dev; dst 110 net/ipv6/seg6_iptunnel.c struct dst_entry *dst = skb_dst(skb); dst 111 net/ipv6/seg6_iptunnel.c struct net *net = dev_net(dst->dev); dst 156 net/ipv6/seg6_iptunnel.c set_tun_src(net, dst->dev, &hdr->daddr, &hdr->saddr); dst 226 net/ipv6/seg6_iptunnel.c struct dst_entry *dst = skb_dst(skb); dst 230 net/ipv6/seg6_iptunnel.c tinfo = seg6_encap_lwtunnel(dst->lwtstate); dst 288 net/ipv6/seg6_iptunnel.c struct dst_entry *dst = NULL; dst 301 net/ipv6/seg6_iptunnel.c dst = dst_cache_get(&slwt->cache); dst 306 net/ipv6/seg6_iptunnel.c if (!dst) { dst 308 net/ipv6/seg6_iptunnel.c dst = skb_dst(skb); dst 309 net/ipv6/seg6_iptunnel.c if (!dst->error) { dst 311 net/ipv6/seg6_iptunnel.c dst_cache_set_ip6(&slwt->cache, dst, dst 316 net/ipv6/seg6_iptunnel.c skb_dst_set(skb, dst); dst 319 net/ipv6/seg6_iptunnel.c err = skb_cow_head(skb, LL_RESERVED_SPACE(dst->dev)); dst 329 net/ipv6/seg6_iptunnel.c struct dst_entry *dst = NULL; dst 340 net/ipv6/seg6_iptunnel.c dst = dst_cache_get(&slwt->cache); dst 343 net/ipv6/seg6_iptunnel.c if (unlikely(!dst)) { dst 354 net/ipv6/seg6_iptunnel.c dst = ip6_route_output(net, NULL, &fl6); dst 355 net/ipv6/seg6_iptunnel.c if (dst->error) { dst 356 net/ipv6/seg6_iptunnel.c err = dst->error; dst 357 net/ipv6/seg6_iptunnel.c dst_release(dst); dst 362 net/ipv6/seg6_iptunnel.c dst_cache_set_ip6(&slwt->cache, dst, &fl6.saddr); dst 367 net/ipv6/seg6_iptunnel.c skb_dst_set(skb, dst); dst 369 net/ipv6/seg6_iptunnel.c err = skb_cow_head(skb, LL_RESERVED_SPACE(dst->dev)); dst 160 net/ipv6/seg6_local.c struct dst_entry *dst = NULL; dst 175 net/ipv6/seg6_local.c dst = ip6_route_input_lookup(net, skb->dev, &fl6, skb, flags); dst 184 net/ipv6/seg6_local.c dst = &rt->dst; dst 187 net/ipv6/seg6_local.c if (dst && dst->dev->flags & IFF_LOOPBACK && !dst->error) { dst 188 net/ipv6/seg6_local.c dst_release(dst); dst 189 net/ipv6/seg6_local.c dst = NULL; dst 193 net/ipv6/seg6_local.c if (!dst) { dst 195 net/ipv6/seg6_local.c dst = &rt->dst; dst 196 net/ipv6/seg6_local.c dst_hold(dst); dst 200 net/ipv6/seg6_local.c skb_dst_set(skb, dst); dst 201 net/ipv6/seg6_local.c return dst->error; dst 806 net/ipv6/sit.c __be32 dst = 0; dst 807 net/ipv6/sit.c check_6rd(tunnel, v6dst, &dst); dst 808 net/ipv6/sit.c return dst; dst 827 net/ipv6/sit.c __be32 dst = tiph->daddr; dst 857 net/ipv6/sit.c dst = addr6->s6_addr32[3]; dst 866 net/ipv6/sit.c if (!dst) dst 867 net/ipv6/sit.c dst = try_6rd(tunnel, &iph6->daddr); dst 869 net/ipv6/sit.c if (!dst) { dst 890 net/ipv6/sit.c dst = addr6->s6_addr32[3]; dst 901 net/ipv6/sit.c 0, dst, tiph->saddr, 0, 0, dst 911 net/ipv6/sit.c dst_cache_set_ip4(&tunnel->dst_cache, &rt->dst, fl4.saddr); dst 919 net/ipv6/sit.c tdev = rt->dst.dev; dst 933 net/ipv6/sit.c mtu = dst_mtu(&rt->dst) - t_hlen; dst 1079 net/ipv6/sit.c tdev = rt->dst.dev; dst 140 net/ipv6/syncookies.c struct dst_entry *dst; dst 238 net/ipv6/syncookies.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 239 net/ipv6/syncookies.c if (IS_ERR(dst)) dst 243 net/ipv6/syncookies.c req->rsk_window_clamp = tp->window_clamp ? :dst_metric(dst, RTAX_WINDOW); dst 247 net/ipv6/syncookies.c dst_metric(dst, RTAX_INITRWND)); dst 250 net/ipv6/syncookies.c ireq->ecn_ok = cookie_ecn_ok(&tcp_opt, sock_net(sk), dst); dst 252 net/ipv6/syncookies.c ret = tcp_get_cookie_sock(sk, skb, req, dst, tsoff); dst 104 net/ipv6/tcp_ipv6.c struct dst_entry *dst = skb_dst(skb); dst 106 net/ipv6/tcp_ipv6.c if (dst && dst_hold_safe(dst)) { dst 107 net/ipv6/tcp_ipv6.c const struct rt6_info *rt = (const struct rt6_info *)dst; dst 109 net/ipv6/tcp_ipv6.c sk->sk_rx_dst = dst; dst 155 net/ipv6/tcp_ipv6.c struct dst_entry *dst; dst 278 net/ipv6/tcp_ipv6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 279 net/ipv6/tcp_ipv6.c if (IS_ERR(dst)) { dst 280 net/ipv6/tcp_ipv6.c err = PTR_ERR(dst); dst 294 net/ipv6/tcp_ipv6.c ip6_dst_store(sk, dst, NULL, NULL); dst 345 net/ipv6/tcp_ipv6.c struct dst_entry *dst; dst 350 net/ipv6/tcp_ipv6.c dst = inet6_csk_update_pmtu(sk, tcp_sk(sk)->mtu_info); dst 351 net/ipv6/tcp_ipv6.c if (!dst) dst 354 net/ipv6/tcp_ipv6.c if (inet_csk(sk)->icsk_pmtu_cookie > dst_mtu(dst)) { dst 355 net/ipv6/tcp_ipv6.c tcp_sync_mss(sk, dst_mtu(dst)); dst 422 net/ipv6/tcp_ipv6.c struct dst_entry *dst = __sk_dst_check(sk, np->dst_cookie); dst 424 net/ipv6/tcp_ipv6.c if (dst) dst 425 net/ipv6/tcp_ipv6.c dst->ops->redirect(dst, sk, skb); dst 484 net/ipv6/tcp_ipv6.c static int tcp_v6_send_synack(const struct sock *sk, struct dst_entry *dst, dst 498 net/ipv6/tcp_ipv6.c if (!dst && (dst = inet6_csk_route_req(sk, fl6, req, dst 502 net/ipv6/tcp_ipv6.c skb = tcp_make_synack(sk, dst, req, foc, synack_type); dst 817 net/ipv6/tcp_ipv6.c struct dst_entry *dst; dst 909 net/ipv6/tcp_ipv6.c dst = ip6_dst_lookup_flow(sock_net(ctl_sk), ctl_sk, &fl6, NULL); dst 910 net/ipv6/tcp_ipv6.c if (!IS_ERR(dst)) { dst 911 net/ipv6/tcp_ipv6.c skb_dst_set(buff, dst); dst 1116 net/ipv6/tcp_ipv6.c struct dst_entry *dst, dst 1137 net/ipv6/tcp_ipv6.c newsk = tcp_v4_syn_recv_sock(sk, skb, req, dst, dst 1190 net/ipv6/tcp_ipv6.c if (!dst) { dst 1191 net/ipv6/tcp_ipv6.c dst = inet6_csk_route_req(sk, &fl6, req, IPPROTO_TCP); dst 1192 net/ipv6/tcp_ipv6.c if (!dst) dst 1207 net/ipv6/tcp_ipv6.c ip6_dst_store(newsk, dst, NULL, NULL); dst 1261 net/ipv6/tcp_ipv6.c tcp_ca_openreq_child(newsk, dst); dst 1263 net/ipv6/tcp_ipv6.c tcp_sync_mss(newsk, dst_mtu(dst)); dst 1264 net/ipv6/tcp_ipv6.c newtp->advmss = tcp_mss_clamp(tcp_sk(sk), dst_metric_advmss(dst)); dst 1313 net/ipv6/tcp_ipv6.c dst_release(dst); dst 1366 net/ipv6/tcp_ipv6.c struct dst_entry *dst = sk->sk_rx_dst; dst 1370 net/ipv6/tcp_ipv6.c if (dst) { dst 1372 net/ipv6/tcp_ipv6.c dst->ops->check(dst, np->rx_dst_cookie) == NULL) { dst 1373 net/ipv6/tcp_ipv6.c dst_release(dst); dst 1725 net/ipv6/tcp_ipv6.c struct dst_entry *dst = READ_ONCE(sk->sk_rx_dst); dst 1727 net/ipv6/tcp_ipv6.c if (dst) dst 1728 net/ipv6/tcp_ipv6.c dst = dst_check(dst, tcp_inet6_sk(sk)->rx_dst_cookie); dst 1729 net/ipv6/tcp_ipv6.c if (dst && dst 1731 net/ipv6/tcp_ipv6.c skb_dst_set_noref(skb, dst); dst 812 net/ipv6/udp.c static void udp6_sk_rx_dst_set(struct sock *sk, struct dst_entry *dst) dst 814 net/ipv6/udp.c if (udp_sk_rx_dst_set(sk, dst)) { dst 815 net/ipv6/udp.c const struct rt6_info *rt = (const struct rt6_info *)dst; dst 885 net/ipv6/udp.c struct dst_entry *dst = skb_dst(skb); dst 888 net/ipv6/udp.c if (unlikely(sk->sk_rx_dst != dst)) dst 889 net/ipv6/udp.c udp6_sk_rx_dst_set(sk, dst); dst 977 net/ipv6/udp.c struct dst_entry *dst; dst 1000 net/ipv6/udp.c dst = READ_ONCE(sk->sk_rx_dst); dst 1002 net/ipv6/udp.c if (dst) dst 1003 net/ipv6/udp.c dst = dst_check(dst, inet6_sk(sk)->rx_dst_cookie); dst 1004 net/ipv6/udp.c if (dst) { dst 1009 net/ipv6/udp.c skb_dst_set_noref(skb, dst); dst 1226 net/ipv6/udp.c struct dst_entry *dst; dst 1309 net/ipv6/udp.c dst = NULL; dst 1446 net/ipv6/udp.c dst = ip6_sk_dst_lookup_flow(sk, &fl6, final_p, connected); dst 1447 net/ipv6/udp.c if (IS_ERR(dst)) { dst 1448 net/ipv6/udp.c err = PTR_ERR(dst); dst 1449 net/ipv6/udp.c dst = NULL; dst 1454 net/ipv6/udp.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 1467 net/ipv6/udp.c &fl6, (struct rt6_info *)dst, dst 1493 net/ipv6/udp.c &ipc6, &fl6, (struct rt6_info *)dst, dst 1507 net/ipv6/udp.c dst_release(dst); dst 1528 net/ipv6/udp.c dst_confirm_neigh(dst, &fl6.daddr); dst 104 net/ipv6/xfrm6_input.c xfrm_address_t *dst, *src; dst 108 net/ipv6/xfrm6_input.c dst = daddr; dst 113 net/ipv6/xfrm6_input.c dst = daddr; dst 118 net/ipv6/xfrm6_input.c dst = (xfrm_address_t *)&in6addr_any; dst 123 net/ipv6/xfrm6_input.c x = xfrm_state_lookup_byaddr(net, skb->mark, dst, src, proto, AF_INET6); dst 70 net/ipv6/xfrm6_output.c struct dst_entry *dst = skb_dst(skb); dst 75 net/ipv6/xfrm6_output.c mtu = dst_mtu(dst); dst 82 net/ipv6/xfrm6_output.c skb->dev = dst->dev; dst 145 net/ipv6/xfrm6_output.c struct dst_entry *dst = skb_dst(skb); dst 146 net/ipv6/xfrm6_output.c struct xfrm_state *x = dst->xfrm; dst 32 net/ipv6/xfrm6_policy.c struct dst_entry *dst; dst 43 net/ipv6/xfrm6_policy.c dst = ip6_route_output(net, NULL, &fl6); dst 45 net/ipv6/xfrm6_policy.c err = dst->error; dst 46 net/ipv6/xfrm6_policy.c if (dst->error) { dst 47 net/ipv6/xfrm6_policy.c dst_release(dst); dst 48 net/ipv6/xfrm6_policy.c dst = ERR_PTR(err); dst 51 net/ipv6/xfrm6_policy.c return dst; dst 58 net/ipv6/xfrm6_policy.c struct dst_entry *dst; dst 61 net/ipv6/xfrm6_policy.c dst = xfrm6_dst_lookup(net, 0, oif, NULL, daddr, mark); dst 62 net/ipv6/xfrm6_policy.c if (IS_ERR(dst)) dst 65 net/ipv6/xfrm6_policy.c dev = ip6_dst_idev(dst)->dev; dst 67 net/ipv6/xfrm6_policy.c dst_release(dst); dst 76 net/ipv6/xfrm6_policy.c xdst->u.dst.dev = dev; dst 100 net/ipv6/xfrm6_policy.c static void xfrm6_update_pmtu(struct dst_entry *dst, struct sock *sk, dst 104 net/ipv6/xfrm6_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 110 net/ipv6/xfrm6_policy.c static void xfrm6_redirect(struct dst_entry *dst, struct sock *sk, dst 113 net/ipv6/xfrm6_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 119 net/ipv6/xfrm6_policy.c static void xfrm6_dst_destroy(struct dst_entry *dst) dst 121 net/ipv6/xfrm6_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 125 net/ipv6/xfrm6_policy.c dst_destroy_metrics_generic(dst); dst 131 net/ipv6/xfrm6_policy.c static void xfrm6_dst_ifdown(struct dst_entry *dst, struct net_device *dev, dst 139 net/ipv6/xfrm6_policy.c xdst = (struct xfrm_dst *)dst; dst 148 net/ipv6/xfrm6_policy.c xdst = (struct xfrm_dst *)xfrm_dst_child(&xdst->u.dst); dst 149 net/ipv6/xfrm6_policy.c } while (xdst->u.dst.xfrm); dst 154 net/ipv6/xfrm6_policy.c xfrm_dst_ifdown(dst, dev); dst 120 net/iucv/af_iucv.c static inline void high_nmcpy(unsigned char *dst, char *src) dst 122 net/iucv/af_iucv.c memcpy(dst, src, 8); dst 125 net/iucv/af_iucv.c static inline void low_nmcpy(unsigned char *dst, char *src) dst 127 net/iucv/af_iucv.c memcpy(&dst[8], src, 8); dst 490 net/key/af_key.c const struct sadb_address *dst) dst 494 net/key/af_key.c if (!src || !dst) dst 498 net/key/af_key.c d_addr = (const struct sockaddr *)(dst + 1); dst 3496 net/key/af_key.c const xfrm_address_t *src, const xfrm_address_t *dst) dst 3515 net/key/af_key.c !pfkey_sockaddr_fill(dst, 0, (struct sockaddr *)(sa + socklen), family)) dst 273 net/l2tp/l2tp_core.h struct dst_entry *dst; dst 276 net/l2tp/l2tp_core.h dst = sk_dst_get(tunnel->sock); dst 277 net/l2tp/l2tp_core.h if (!dst) dst 280 net/l2tp/l2tp_core.h mtu = dst_mtu(dst); dst 281 net/l2tp/l2tp_core.h dst_release(dst); dst 494 net/l2tp/l2tp_ip.c sk_setup_caps(sk, &rt->dst); dst 496 net/l2tp/l2tp_ip.c skb_dst_set(skb, &rt->dst); dst 504 net/l2tp/l2tp_ip.c skb_dst_set_noref(skb, &rt->dst); dst 513 net/l2tp/l2tp_ip6.c struct dst_entry *dst = NULL; dst 632 net/l2tp/l2tp_ip6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, &fl6, final_p); dst 633 net/l2tp/l2tp_ip6.c if (IS_ERR(dst)) { dst 634 net/l2tp/l2tp_ip6.c err = PTR_ERR(dst); dst 639 net/l2tp/l2tp_ip6.c ipc6.hlimit = ip6_sk_dst_hoplimit(np, &fl6, dst); dst 651 net/l2tp/l2tp_ip6.c &fl6, (struct rt6_info *)dst, dst 659 net/l2tp/l2tp_ip6.c dst_release(dst); dst 668 net/l2tp/l2tp_ip6.c dst_confirm_neigh(dst, &fl6.daddr); dst 128 net/l3mdev/l3mdev.c struct dst_entry *dst = NULL; dst 139 net/l3mdev/l3mdev.c dst = dev->l3mdev_ops->l3mdev_link_scope_lookup(dev, fl6); dst 142 net/l3mdev/l3mdev.c return dst; dst 1722 net/mac80211/cfg.c const u8 *dst, const u8 *next_hop) dst 1737 net/mac80211/cfg.c mpath = mesh_path_add(sdata, dst); dst 1750 net/mac80211/cfg.c const u8 *dst) dst 1754 net/mac80211/cfg.c if (dst) dst 1755 net/mac80211/cfg.c return mesh_path_del(sdata, dst); dst 1762 net/mac80211/cfg.c const u8 *dst, const u8 *next_hop) dst 1778 net/mac80211/cfg.c mpath = mesh_path_lookup(sdata, dst); dst 1837 net/mac80211/cfg.c u8 *dst, u8 *next_hop, struct mpath_info *pinfo) dst 1846 net/mac80211/cfg.c mpath = mesh_path_lookup(sdata, dst); dst 1851 net/mac80211/cfg.c memcpy(dst, mpath->dst, ETH_ALEN); dst 1858 net/mac80211/cfg.c int idx, u8 *dst, u8 *next_hop, dst 1872 net/mac80211/cfg.c memcpy(dst, mpath->dst, ETH_ALEN); dst 1888 net/mac80211/cfg.c u8 *dst, u8 *mpp, struct mpath_info *pinfo) dst 1897 net/mac80211/cfg.c mpath = mpp_path_lookup(sdata, dst); dst 1902 net/mac80211/cfg.c memcpy(dst, mpath->dst, ETH_ALEN); dst 1909 net/mac80211/cfg.c int idx, u8 *dst, u8 *mpp, dst 1923 net/mac80211/cfg.c memcpy(dst, mpath->dst, ETH_ALEN); dst 76 net/mac80211/fils_aead.c struct scatterlist src[1], dst[1]; dst 133 net/mac80211/fils_aead.c sg_init_one(dst, out + AES_BLOCK_SIZE, plain_len); dst 134 net/mac80211/fils_aead.c skcipher_request_set_crypt(req, src, dst, plain_len, v); dst 152 net/mac80211/fils_aead.c struct scatterlist src[1], dst[1]; dst 192 net/mac80211/fils_aead.c sg_init_one(dst, out, crypt_len); dst 193 net/mac80211/fils_aead.c skcipher_request_set_crypt(req, src, dst, crypt_len, iv); dst 328 net/mac80211/ieee80211_i.h u8 dst[ETH_ALEN]; dst 2120 net/mac80211/ieee80211_i.h const u8 *src, const u8 *dst, dst 104 net/mac80211/mesh.h u8 dst[ETH_ALEN]; dst 263 net/mac80211/mesh.h const u8 *dst); dst 265 net/mac80211/mesh.h const u8 *dst); dst 267 net/mac80211/mesh.h const u8 *dst, const u8 *mpp); dst 277 net/mac80211/mesh.h mesh_path_add(struct ieee80211_sub_if_data *sdata, const u8 *dst); dst 1000 net/mac80211/mesh_hwmp.c memcpy(preq_node->dst, mpath->dst, ETH_ALEN); dst 1053 net/mac80211/mesh_hwmp.c mpath = mesh_path_lookup(sdata, preq_node->dst); dst 1104 net/mac80211/mesh_hwmp.c target_flags, mpath->dst, mpath->sn, da, 0, dst 30 net/mac80211/mesh_pathtbl.c .key_offset = offsetof(struct mesh_path, dst), dst 190 net/mac80211/mesh_pathtbl.c prepare_for_gate(skb, gate_mpath->dst, gate_mpath); dst 201 net/mac80211/mesh_pathtbl.c gate_mpath->dst, skb_queue_len(&gate_mpath->frame_queue)); dst 212 net/mac80211/mesh_pathtbl.c static struct mesh_path *mpath_lookup(struct mesh_table *tbl, const u8 *dst, dst 217 net/mac80211/mesh_pathtbl.c mpath = rhashtable_lookup(&tbl->rhead, dst, mesh_rht_params); dst 237 net/mac80211/mesh_pathtbl.c mesh_path_lookup(struct ieee80211_sub_if_data *sdata, const u8 *dst) dst 239 net/mac80211/mesh_pathtbl.c return mpath_lookup(sdata->u.mesh.mesh_paths, dst, sdata); dst 243 net/mac80211/mesh_pathtbl.c mpp_path_lookup(struct ieee80211_sub_if_data *sdata, const u8 *dst) dst 245 net/mac80211/mesh_pathtbl.c return mpath_lookup(sdata->u.mesh.mpp_paths, dst, sdata); dst 329 net/mac80211/mesh_pathtbl.c mpath->dst, mpath->sdata->u.mesh.num_gates); dst 355 net/mac80211/mesh_pathtbl.c mpath->dst, mpath->sdata->u.mesh.num_gates); dst 369 net/mac80211/mesh_pathtbl.c const u8 *dst, gfp_t gfp_flags) dst 377 net/mac80211/mesh_pathtbl.c memcpy(new_mpath->dst, dst, ETH_ALEN); dst 400 net/mac80211/mesh_pathtbl.c const u8 *dst) dst 405 net/mac80211/mesh_pathtbl.c if (ether_addr_equal(dst, sdata->vif.addr)) dst 409 net/mac80211/mesh_pathtbl.c if (is_multicast_ether_addr(dst)) dst 415 net/mac80211/mesh_pathtbl.c new_mpath = mesh_path_new(sdata, dst, GFP_ATOMIC); dst 442 net/mac80211/mesh_pathtbl.c const u8 *dst, const u8 *mpp) dst 448 net/mac80211/mesh_pathtbl.c if (ether_addr_equal(dst, sdata->vif.addr)) dst 452 net/mac80211/mesh_pathtbl.c if (is_multicast_ether_addr(dst)) dst 455 net/mac80211/mesh_pathtbl.c new_mpath = mesh_path_new(sdata, dst, GFP_ATOMIC); dst 505 net/mac80211/mesh_pathtbl.c mpath->dst, mpath->sn, dst 687 net/mac80211/mesh_pathtbl.c mpath_dbg(sdata, "Forwarding to %pM\n", gate->dst); dst 694 net/mac80211/mesh_pathtbl.c gate->dst, gate->flags); dst 699 net/mac80211/mesh_pathtbl.c mpath_dbg(sdata, "Sending to %pM\n", gate->dst); dst 2478 net/mac80211/mlme.c const u8 *src, const u8 *dst, dst 2484 net/mac80211/mlme.c skb = ieee80211_build_probe_req(sdata, src, dst, (u32)-1, channel, dst 2495 net/mac80211/mlme.c u8 *dst = ifmgd->associated->bssid; dst 2505 net/mac80211/mlme.c dst = NULL; dst 2516 net/mac80211/mlme.c if (dst) { dst 2518 net/mac80211/mlme.c sta = sta_info_get(sdata, dst); dst 2540 net/mac80211/mlme.c ieee80211_mlme_send_probe_req(sdata, sdata->vif.addr, dst, dst 580 net/mac80211/scan.c const u8 *src, const u8 *dst, dst 589 net/mac80211/scan.c skb = ieee80211_build_probe_req(sdata, src, dst, ratemask, channel, dst 2573 net/mac80211/tx.c mesh_path_del(sdata, mpath->dst); dst 2600 net/mac80211/tx.c mesh_da = mpath->dst; dst 1877 net/mac80211/util.c const u8 *src, const u8 *dst, dst 1915 net/mac80211/util.c if (dst) { dst 1917 net/mac80211/util.c memcpy(mgmt->da, dst, ETH_ALEN); dst 1918 net/mac80211/util.c memcpy(mgmt->bssid, dst, ETH_ALEN); dst 598 net/mpls/af_mpls.c dev = rt->dst.dev; dst 618 net/mpls/af_mpls.c struct dst_entry *dst; dst 626 net/mpls/af_mpls.c dst = ipv6_stub->ipv6_dst_lookup_flow(net, NULL, &fl6, NULL); dst 627 net/mpls/af_mpls.c if (IS_ERR(dst)) dst 628 net/mpls/af_mpls.c return ERR_CAST(dst); dst 630 net/mpls/af_mpls.c dev = dst->dev; dst 632 net/mpls/af_mpls.c dst_release(dst); dst 44 net/mpls/mpls_iptunnel.c struct dst_entry *dst = skb_dst(skb); dst 55 net/mpls/mpls_iptunnel.c out_dev = dst->dev; dst 61 net/mpls/mpls_iptunnel.c !dst->lwtstate || skb_warn_if_lro(skb)) dst 66 net/mpls/mpls_iptunnel.c tun_encap_info = mpls_lwtunnel_encap(dst->lwtstate); dst 78 net/mpls/mpls_iptunnel.c if (dst->ops->family == AF_INET) { dst 86 net/mpls/mpls_iptunnel.c rt = (struct rtable *)dst; dst 87 net/mpls/mpls_iptunnel.c } else if (dst->ops->family == AF_INET6) { dst 95 net/mpls/mpls_iptunnel.c rt6 = (struct rt6_info *)dst; dst 748 net/netfilter/ipvs/ip_vs_core.c struct dst_entry *dst = skb_dst(skb); dst 750 net/netfilter/ipvs/ip_vs_core.c if (dst->dev && !(dst->dev->flags & IFF_LOOPBACK) && dst 78 net/netfilter/ipvs/ip_vs_ctl.c struct dst_entry *dst = ip6_route_output(net, NULL, &fl6); dst 81 net/netfilter/ipvs/ip_vs_ctl.c is_local = !dst->error && dst->dev && (dst->dev->flags & IFF_LOOPBACK); dst 83 net/netfilter/ipvs/ip_vs_ctl.c dst_release(dst); dst 802 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_copy_stats(struct ip_vs_kstats *dst, struct ip_vs_stats *src) dst 804 net/netfilter/ipvs/ip_vs_ctl.c #define IP_VS_SHOW_STATS_COUNTER(c) dst->c = src->kstats.c - src->kstats0.c dst 814 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_read_estimator(dst, src); dst 820 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_export_stats_user(struct ip_vs_stats_user *dst, struct ip_vs_kstats *src) dst 822 net/netfilter/ipvs/ip_vs_ctl.c dst->conns = (u32)src->conns; dst 823 net/netfilter/ipvs/ip_vs_ctl.c dst->inpkts = (u32)src->inpkts; dst 824 net/netfilter/ipvs/ip_vs_ctl.c dst->outpkts = (u32)src->outpkts; dst 825 net/netfilter/ipvs/ip_vs_ctl.c dst->inbytes = src->inbytes; dst 826 net/netfilter/ipvs/ip_vs_ctl.c dst->outbytes = src->outbytes; dst 827 net/netfilter/ipvs/ip_vs_ctl.c dst->cps = (u32)src->cps; dst 828 net/netfilter/ipvs/ip_vs_ctl.c dst->inpps = (u32)src->inpps; dst 829 net/netfilter/ipvs/ip_vs_ctl.c dst->outpps = (u32)src->outpps; dst 830 net/netfilter/ipvs/ip_vs_ctl.c dst->inbps = (u32)src->inbps; dst 831 net/netfilter/ipvs/ip_vs_ctl.c dst->outbps = (u32)src->outbps; dst 2556 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_copy_service(struct ip_vs_service_entry *dst, struct ip_vs_service *src) dst 2564 net/netfilter/ipvs/ip_vs_ctl.c dst->protocol = src->protocol; dst 2565 net/netfilter/ipvs/ip_vs_ctl.c dst->addr = src->addr.ip; dst 2566 net/netfilter/ipvs/ip_vs_ctl.c dst->port = src->port; dst 2567 net/netfilter/ipvs/ip_vs_ctl.c dst->fwmark = src->fwmark; dst 2568 net/netfilter/ipvs/ip_vs_ctl.c strlcpy(dst->sched_name, sched_name, sizeof(dst->sched_name)); dst 2569 net/netfilter/ipvs/ip_vs_ctl.c dst->flags = src->flags; dst 2570 net/netfilter/ipvs/ip_vs_ctl.c dst->timeout = src->timeout / HZ; dst 2571 net/netfilter/ipvs/ip_vs_ctl.c dst->netmask = src->netmask; dst 2572 net/netfilter/ipvs/ip_vs_ctl.c dst->num_dests = src->num_dests; dst 2574 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_export_stats_user(&dst->stats, &kstats); dst 176 net/netfilter/ipvs/ip_vs_est.c void ip_vs_read_estimator(struct ip_vs_kstats *dst, struct ip_vs_stats *stats) dst 180 net/netfilter/ipvs/ip_vs_est.c dst->cps = (e->cps + 0x1FF) >> 10; dst 181 net/netfilter/ipvs/ip_vs_est.c dst->inpps = (e->inpps + 0x1FF) >> 10; dst 182 net/netfilter/ipvs/ip_vs_est.c dst->outpps = (e->outpps + 0x1FF) >> 10; dst 183 net/netfilter/ipvs/ip_vs_est.c dst->inbps = (e->inbps + 0xF) >> 5; dst 184 net/netfilter/ipvs/ip_vs_est.c dst->outbps = (e->outbps + 0xF) >> 5; dst 57 net/netfilter/ipvs/ip_vs_nfct.c IP_VS_DBG_ADDR((T)->src.l3num, &(T)->dst.u3), \ dst 58 net/netfilter/ipvs/ip_vs_nfct.c ntohs((T)->dst.u.all), \ dst 59 net/netfilter/ipvs/ip_vs_nfct.c (T)->dst.protonum dst 110 net/netfilter/ipvs/ip_vs_nfct.c if (new_tuple.dst.protonum != IPPROTO_ICMP && dst 111 net/netfilter/ipvs/ip_vs_nfct.c new_tuple.dst.protonum != IPPROTO_ICMPV6) dst 114 net/netfilter/ipvs/ip_vs_nfct.c new_tuple.dst.u3 = cp->vaddr; dst 115 net/netfilter/ipvs/ip_vs_nfct.c if (new_tuple.dst.protonum != IPPROTO_ICMP && dst 116 net/netfilter/ipvs/ip_vs_nfct.c new_tuple.dst.protonum != IPPROTO_ICMPV6) dst 117 net/netfilter/ipvs/ip_vs_nfct.c new_tuple.dst.u.tcp.port = cp->vport; dst 158 net/netfilter/ipvs/ip_vs_nfct.c ip_vs_conn_fill_param(net_ipvs(net), exp->tuple.src.l3num, orig->dst.protonum, dst 160 net/netfilter/ipvs/ip_vs_nfct.c &orig->dst.u3, orig->dst.u.tcp.port, &p); dst 171 net/netfilter/ipvs/ip_vs_nfct.c new_reply.dst.u3 = cp->vaddr; dst 172 net/netfilter/ipvs/ip_vs_nfct.c new_reply.dst.u.tcp.port = cp->vport; dst 252 net/netfilter/ipvs/ip_vs_nfct.c .dst = { .protonum = cp->protocol, .dir = IP_CT_DIR_ORIGINAL } }; dst 256 net/netfilter/ipvs/ip_vs_nfct.c tuple.dst.u3 = cp->vaddr; dst 257 net/netfilter/ipvs/ip_vs_nfct.c tuple.dst.u.all = cp->vport; dst 74 net/netfilter/ipvs/ip_vs_xmit.c struct dst_entry *dst, u32 dst_cookie) dst 82 net/netfilter/ipvs/ip_vs_xmit.c dest_dst->dst_cache = dst; dst 95 net/netfilter/ipvs/ip_vs_xmit.c struct dst_entry *dst; dst 99 net/netfilter/ipvs/ip_vs_xmit.c dst = dest_dst->dst_cache; dst 100 net/netfilter/ipvs/ip_vs_xmit.c if (dst->obsolete && dst 101 net/netfilter/ipvs/ip_vs_xmit.c dst->ops->check(dst, dest_dst->dst_cookie) == NULL) dst 161 net/netfilter/ipvs/ip_vs_xmit.c return rt->dst.dev && rt->dst.dev->flags & IFF_LOOPBACK; dst 211 net/netfilter/ipvs/ip_vs_xmit.c ort->dst.ops->update_pmtu(&ort->dst, sk, NULL, mtu, true); dst 264 net/netfilter/ipvs/ip_vs_xmit.c struct dst_entry *dst = skb_dst(skb); dst 271 net/netfilter/ipvs/ip_vs_xmit.c skb->dev = dst->dev; dst 338 net/netfilter/ipvs/ip_vs_xmit.c __ip_vs_dst_set(dest, dest_dst, &rt->dst, 0); dst 342 net/netfilter/ipvs/ip_vs_xmit.c atomic_read(&rt->dst.__refcnt)); dst 381 net/netfilter/ipvs/ip_vs_xmit.c mtu = dst_mtu(&rt->dst); dst 383 net/netfilter/ipvs/ip_vs_xmit.c mtu = dst_mtu(&rt->dst) - sizeof(struct iphdr); dst 412 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set_noref(skb, &rt->dst); dst 414 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set(skb, dst_clone(&rt->dst)); dst 416 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set(skb, &rt->dst); dst 435 net/netfilter/ipvs/ip_vs_xmit.c struct dst_entry *dst; dst 443 net/netfilter/ipvs/ip_vs_xmit.c dst = ip6_route_output(net, NULL, &fl6); dst 444 net/netfilter/ipvs/ip_vs_xmit.c if (dst->error) dst 447 net/netfilter/ipvs/ip_vs_xmit.c return dst; dst 449 net/netfilter/ipvs/ip_vs_xmit.c ipv6_dev_get_saddr(net, ip6_dst_idev(dst)->dev, dst 453 net/netfilter/ipvs/ip_vs_xmit.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), NULL, 0); dst 454 net/netfilter/ipvs/ip_vs_xmit.c if (IS_ERR(dst)) { dst 455 net/netfilter/ipvs/ip_vs_xmit.c dst = NULL; dst 460 net/netfilter/ipvs/ip_vs_xmit.c return dst; dst 463 net/netfilter/ipvs/ip_vs_xmit.c dst_release(dst); dst 480 net/netfilter/ipvs/ip_vs_xmit.c struct dst_entry *dst; dst 498 net/netfilter/ipvs/ip_vs_xmit.c dst = __ip_vs_route_output_v6(net, &dest->addr.in6, dst 501 net/netfilter/ipvs/ip_vs_xmit.c if (!dst) { dst 507 net/netfilter/ipvs/ip_vs_xmit.c rt = (struct rt6_info *) dst; dst 509 net/netfilter/ipvs/ip_vs_xmit.c __ip_vs_dst_set(dest, dest_dst, &rt->dst, cookie); dst 513 net/netfilter/ipvs/ip_vs_xmit.c atomic_read(&rt->dst.__refcnt)); dst 519 net/netfilter/ipvs/ip_vs_xmit.c dst = __ip_vs_route_output_v6(net, daddr, ret_saddr, do_xfrm, dst 521 net/netfilter/ipvs/ip_vs_xmit.c if (!dst) dst 523 net/netfilter/ipvs/ip_vs_xmit.c rt = (struct rt6_info *) dst; dst 538 net/netfilter/ipvs/ip_vs_xmit.c dst_release(&rt->dst); dst 547 net/netfilter/ipvs/ip_vs_xmit.c mtu = dst_mtu(&rt->dst); dst 549 net/netfilter/ipvs/ip_vs_xmit.c mtu = dst_mtu(&rt->dst) - sizeof(struct ipv6hdr); dst 579 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set_noref(skb, &rt->dst); dst 581 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set(skb, dst_clone(&rt->dst)); dst 583 net/netfilter/ipvs/ip_vs_xmit.c skb_dst_set(skb, &rt->dst); dst 589 net/netfilter/ipvs/ip_vs_xmit.c dst_release(&rt->dst); dst 822 net/netfilter/ipvs/ip_vs_xmit.c if (skb_cow(skb, rt->dst.dev->hard_header_len)) dst 911 net/netfilter/ipvs/ip_vs_xmit.c if (skb_cow(skb, rt->dst.dev->hard_header_len)) dst 1165 net/netfilter/ipvs/ip_vs_xmit.c tdev = rt->dst.dev; dst 1315 net/netfilter/ipvs/ip_vs_xmit.c tdev = rt->dst.dev; dst 1577 net/netfilter/ipvs/ip_vs_xmit.c if (skb_cow(skb, rt->dst.dev->hard_header_len)) dst 1666 net/netfilter/ipvs/ip_vs_xmit.c if (skb_cow(skb, rt->dst.dev->hard_header_len)) dst 155 net/netfilter/nf_conntrack_amanda.c &tuple->src.u3, &tuple->dst.u3, dst 185 net/netfilter/nf_conntrack_amanda.c .tuple.dst.protonum = IPPROTO_UDP, dst 195 net/netfilter/nf_conntrack_amanda.c .tuple.dst.protonum = IPPROTO_UDP, dst 38 net/netfilter/nf_conntrack_broadcast.c in_dev = __in_dev_get_rcu(rt->dst.dev); dst 197 net/netfilter/nf_conntrack_core.c n = (sizeof(tuple->src) + sizeof(tuple->dst.u3)) / sizeof(u32); dst 199 net/netfilter/nf_conntrack_core.c (((__force __u16)tuple->dst.u.all << 16) | dst 200 net/netfilter/nf_conntrack_core.c tuple->dst.protonum)); dst 235 net/netfilter/nf_conntrack_core.c tuple->dst.u.udp.port = inet_hdr->dport; dst 275 net/netfilter/nf_conntrack_core.c tuple->dst.u3.ip = ap[1]; dst 279 net/netfilter/nf_conntrack_core.c memcpy(tuple->dst.u3.ip6, ap + 4, sizeof(tuple->dst.u3.ip6)); dst 283 net/netfilter/nf_conntrack_core.c tuple->dst.protonum = protonum; dst 284 net/netfilter/nf_conntrack_core.c tuple->dst.dir = IP_CT_DIR_ORIGINAL; dst 419 net/netfilter/nf_conntrack_core.c inverse->src.u3.ip = orig->dst.u3.ip; dst 420 net/netfilter/nf_conntrack_core.c inverse->dst.u3.ip = orig->src.u3.ip; dst 423 net/netfilter/nf_conntrack_core.c inverse->src.u3.in6 = orig->dst.u3.in6; dst 424 net/netfilter/nf_conntrack_core.c inverse->dst.u3.in6 = orig->src.u3.in6; dst 430 net/netfilter/nf_conntrack_core.c inverse->dst.dir = !orig->dst.dir; dst 432 net/netfilter/nf_conntrack_core.c inverse->dst.protonum = orig->dst.protonum; dst 434 net/netfilter/nf_conntrack_core.c switch (orig->dst.protonum) { dst 443 net/netfilter/nf_conntrack_core.c inverse->src.u.all = orig->dst.u.all; dst 444 net/netfilter/nf_conntrack_core.c inverse->dst.u.all = orig->src.u.all; dst 1824 net/netfilter/nf_conntrack_core.c nla_put_be16(skb, CTA_PROTO_DST_PORT, tuple->dst.u.tcp.port)) dst 1846 net/netfilter/nf_conntrack_core.c t->dst.u.tcp.port = nla_get_be16(tb[CTA_PROTO_DST_PORT]); dst 1913 net/netfilter/nf_conntrack_core.c memcpy(tuple.dst.u3.all, dst 1914 net/netfilter/nf_conntrack_core.c ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.u3.all, dst 1915 net/netfilter/nf_conntrack_core.c sizeof(tuple.dst.u3.all)); dst 1916 net/netfilter/nf_conntrack_core.c tuple.dst.u.all = dst 1917 net/netfilter/nf_conntrack_core.c ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.u.all; dst 2048 net/netfilter/nf_conntrack_core.c src_tuple = nf_ct_tuple(ct, !hash->tuple.dst.dir); dst 87 net/netfilter/nf_conntrack_expect.c hash = jhash2(tuple->dst.u3.all, ARRAY_SIZE(tuple->dst.u3.all), dst 88 net/netfilter/nf_conntrack_expect.c (((tuple->dst.protonum ^ tuple->src.l3num) << 16) | dst 89 net/netfilter/nf_conntrack_expect.c (__force __u16)tuple->dst.u.all) ^ seed); dst 300 net/netfilter/nf_conntrack_expect.c u_int8_t proto, const __be16 *src, const __be16 *dst) dst 314 net/netfilter/nf_conntrack_expect.c exp->tuple.dst.protonum = proto; dst 339 net/netfilter/nf_conntrack_expect.c memcpy(&exp->tuple.dst.u3, daddr, len); dst 340 net/netfilter/nf_conntrack_expect.c if (sizeof(exp->tuple.dst.u3) > len) dst 342 net/netfilter/nf_conntrack_expect.c memset((void *)&exp->tuple.dst.u3 + len, 0x00, dst 343 net/netfilter/nf_conntrack_expect.c sizeof(exp->tuple.dst.u3) - len); dst 345 net/netfilter/nf_conntrack_expect.c exp->tuple.dst.u.all = *dst; dst 622 net/netfilter/nf_conntrack_expect.c expect->tuple.dst.protonum); dst 624 net/netfilter/nf_conntrack_expect.c nf_ct_l4proto_find(expect->tuple.dst.protonum)); dst 113 net/netfilter/nf_conntrack_ftp.c get_ipv6_addr(const char *src, size_t dlen, struct in6_addr *dst, u_int8_t term) dst 116 net/netfilter/nf_conntrack_ftp.c int ret = in6_pton(src, min_t(size_t, dlen, 0xffff), (u8 *)dst, term, &end); dst 485 net/netfilter/nf_conntrack_ftp.c daddr = &ct->tuplehash[!dir].tuple.dst.u3; dst 285 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 295 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 299 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 353 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 358 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 631 net/netfilter/nf_conntrack_h323_main.c .tuple.dst.protonum = IPPROTO_UDP, dst 690 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 695 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 721 net/netfilter/nf_conntrack_h323_main.c const union nf_inet_addr *dst, dst 735 net/netfilter/nf_conntrack_h323_main.c fl2.daddr = dst->ip; dst 742 net/netfilter/nf_conntrack_h323_main.c rt1->dst.dev == rt2->dst.dev) dst 744 net/netfilter/nf_conntrack_h323_main.c dst_release(&rt2->dst); dst 746 net/netfilter/nf_conntrack_h323_main.c dst_release(&rt1->dst); dst 759 net/netfilter/nf_conntrack_h323_main.c fl2.daddr = dst->in6; dst 766 net/netfilter/nf_conntrack_h323_main.c rt1->dst.dev == rt2->dst.dev) dst 768 net/netfilter/nf_conntrack_h323_main.c dst_release(&rt2->dst); dst 770 net/netfilter/nf_conntrack_h323_main.c dst_release(&rt1->dst); dst 818 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 885 net/netfilter/nf_conntrack_h323_main.c memcmp(&addr, &ct->tuplehash[!dir].tuple.dst.u3, sizeof(addr))) { dst 887 net/netfilter/nf_conntrack_h323_main.c &addr, ntohs(port), &ct->tuplehash[!dir].tuple.dst.u3, dst 888 net/netfilter/nf_conntrack_h323_main.c ntohs(ct->tuplehash[!dir].tuple.dst.u.tcp.port)); dst 891 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 892 net/netfilter/nf_conntrack_h323_main.c ct->tuplehash[!dir].tuple.dst.u.tcp.port); dst 1193 net/netfilter/nf_conntrack_h323_main.c .tuple.dst.protonum = IPPROTO_TCP, dst 1202 net/netfilter/nf_conntrack_h323_main.c .tuple.dst.protonum = IPPROTO_TCP, dst 1235 net/netfilter/nf_conntrack_h323_main.c memcpy(&tuple.dst.u3, addr, sizeof(tuple.dst.u3)); dst 1236 net/netfilter/nf_conntrack_h323_main.c tuple.dst.u.tcp.port = port; dst 1237 net/netfilter/nf_conntrack_h323_main.c tuple.dst.protonum = IPPROTO_TCP; dst 1276 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 1434 net/netfilter/nf_conntrack_h323_main.c exp = find_expect(ct, &ct->tuplehash[dir].tuple.dst.u3, dst 1507 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 1520 net/netfilter/nf_conntrack_h323_main.c &ct->tuplehash[!dir].tuple.dst.u3, dst 1545 net/netfilter/nf_conntrack_h323_main.c if (!memcmp(&addr, &ct->tuplehash[dir].tuple.dst.u3, sizeof(addr))) { dst 1761 net/netfilter/nf_conntrack_h323_main.c .tuple.dst.protonum = IPPROTO_UDP, dst 1770 net/netfilter/nf_conntrack_h323_main.c .tuple.dst.protonum = IPPROTO_UDP, dst 50 net/netfilter/nf_conntrack_helper.c return (((tuple->src.l3num << 8) | tuple->dst.protonum) ^ dst 87 net/netfilter/nf_conntrack_helper.c if (h->tuple.dst.protonum == protonum) dst 416 net/netfilter/nf_conntrack_helper.c cur->tuple.dst.protonum == me->tuple.dst.protonum) { dst 491 net/netfilter/nf_conntrack_helper.c helper->tuple.dst.protonum = protonum; dst 189 net/netfilter/nf_conntrack_irc.c tuple->dst.u3.ip != dcc_ip) { dst 207 net/netfilter/nf_conntrack_irc.c NULL, &tuple->dst.u3, dst 37 net/netfilter/nf_conntrack_labels.c u32 *dst; dst 47 net/netfilter/nf_conntrack_labels.c dst = (u32 *) labels->bits; dst 49 net/netfilter/nf_conntrack_labels.c changed |= replace_u32(&dst[i], mask ? ~mask[i] : 0, data[i]); dst 53 net/netfilter/nf_conntrack_labels.c replace_u32(&dst[i], 0, 0); dst 50 net/netfilter/nf_conntrack_netbios_ns.c .tuple.dst.protonum = IPPROTO_UDP, dst 69 net/netfilter/nf_conntrack_netlink.c if (nla_put_u8(skb, CTA_PROTO_NUM, tuple->dst.protonum)) dst 87 net/netfilter/nf_conntrack_netlink.c nla_put_in_addr(skb, CTA_IP_V4_DST, tuple->dst.u3.ip)) dst 96 net/netfilter/nf_conntrack_netlink.c nla_put_in6_addr(skb, CTA_IP_V6_DST, &tuple->dst.u3.in6)) dst 138 net/netfilter/nf_conntrack_netlink.c l4proto = nf_ct_l4proto_find(tuple->dst.protonum); dst 1003 net/netfilter/nf_conntrack_netlink.c t->dst.u3.ip = nla_get_in_addr(tb[CTA_IP_V4_DST]); dst 1015 net/netfilter/nf_conntrack_netlink.c t->dst.u3.in6 = nla_get_in6_addr(tb[CTA_IP_V6_DST]); dst 1065 net/netfilter/nf_conntrack_netlink.c tuple->dst.protonum = nla_get_u8(tb[CTA_PROTO_NUM]); dst 1068 net/netfilter/nf_conntrack_netlink.c l4proto = nf_ct_l4proto_find(tuple->dst.protonum); dst 1169 net/netfilter/nf_conntrack_netlink.c tuple->dst.dir = IP_CT_DIR_REPLY; dst 1171 net/netfilter/nf_conntrack_netlink.c tuple->dst.dir = IP_CT_DIR_ORIGINAL; dst 2141 net/netfilter/nf_conntrack_netlink.c if (otuple.dst.protonum != rtuple.dst.protonum) dst 2681 net/netfilter/nf_conntrack_netlink.c m.dst.protonum = tuple->dst.protonum; dst 2690 net/netfilter/nf_conntrack_netlink.c l4proto = nf_ct_l4proto_find(tuple->dst.protonum); dst 2764 net/netfilter/nf_conntrack_netlink.c nat_tuple.dst.protonum = nf_ct_protonum(master); dst 193 net/netfilter/nf_conntrack_pptp.c t.dst.protonum = IPPROTO_GRE; dst 195 net/netfilter/nf_conntrack_pptp.c t.dst.u.gre.key = ct_pptp_info->pac_call_id; dst 201 net/netfilter/nf_conntrack_pptp.c t.dst.protonum = IPPROTO_GRE; dst 203 net/netfilter/nf_conntrack_pptp.c t.dst.u.gre.key = ct_pptp_info->pns_call_id; dst 229 net/netfilter/nf_conntrack_pptp.c &ct->tuplehash[dir].tuple.dst.u3, dst 238 net/netfilter/nf_conntrack_pptp.c &ct->tuplehash[dir].tuple.dst.u3, dst 616 net/netfilter/nf_conntrack_pptp.c .tuple.dst.protonum = IPPROTO_TCP, dst 250 net/netfilter/nf_conntrack_proto.c tuple.dst.u3.ip = inet->inet_daddr; dst 251 net/netfilter/nf_conntrack_proto.c tuple.dst.u.tcp.port = inet->inet_dport; dst 253 net/netfilter/nf_conntrack_proto.c tuple.dst.protonum = sk->sk_protocol; dst 257 net/netfilter/nf_conntrack_proto.c if (tuple.dst.protonum != IPPROTO_TCP && dst 258 net/netfilter/nf_conntrack_proto.c tuple.dst.protonum != IPPROTO_SCTP) { dst 276 net/netfilter/nf_conntrack_proto.c .tuple.dst.u.tcp.port; dst 278 net/netfilter/nf_conntrack_proto.c .tuple.dst.u3.ip; dst 291 net/netfilter/nf_conntrack_proto.c &tuple.dst.u3.ip, ntohs(tuple.dst.u.tcp.port)); dst 319 net/netfilter/nf_conntrack_proto.c tuple.dst.u3.in6 = sk->sk_v6_daddr; dst 320 net/netfilter/nf_conntrack_proto.c tuple.dst.u.tcp.port = inet->inet_dport; dst 321 net/netfilter/nf_conntrack_proto.c tuple.dst.protonum = sk->sk_protocol; dst 326 net/netfilter/nf_conntrack_proto.c if (tuple.dst.protonum != IPPROTO_TCP && dst 327 net/netfilter/nf_conntrack_proto.c tuple.dst.protonum != IPPROTO_SCTP) dst 337 net/netfilter/nf_conntrack_proto.c &tuple.dst.u3.ip6, ntohs(tuple.dst.u.tcp.port)); dst 344 net/netfilter/nf_conntrack_proto.c sin6.sin6_port = ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.u.tcp.port; dst 347 net/netfilter/nf_conntrack_proto.c &ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.u3.in6, dst 76 net/netfilter/nf_conntrack_proto_gre.c !memcmp(&km->tuple.dst.u3, &t->dst.u3, sizeof(t->dst.u3)) && dst 77 net/netfilter/nf_conntrack_proto_gre.c km->tuple.dst.protonum == t->dst.protonum && dst 78 net/netfilter/nf_conntrack_proto_gre.c km->tuple.dst.u.all == t->dst.u.all; dst 178 net/netfilter/nf_conntrack_proto_gre.c tuple->dst.u.all = 0; dst 192 net/netfilter/nf_conntrack_proto_gre.c tuple->dst.u.gre.key = pgrehdr->call_id; dst 35 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.type = hp->type; dst 37 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.code = hp->code; dst 57 net/netfilter/nf_conntrack_proto_icmp.c if (orig->dst.u.icmp.type >= sizeof(invmap) || dst 58 net/netfilter/nf_conntrack_proto_icmp.c !invmap[orig->dst.u.icmp.type]) dst 62 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.type = invmap[orig->dst.u.icmp.type] - 1; dst 63 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.code = orig->dst.u.icmp.code; dst 87 net/netfilter/nf_conntrack_proto_icmp.c if (ct->tuplehash[0].tuple.dst.u.icmp.type >= sizeof(valid_new) || dst 88 net/netfilter/nf_conntrack_proto_icmp.c !valid_new[ct->tuplehash[0].tuple.dst.u.icmp.type]) { dst 91 net/netfilter/nf_conntrack_proto_icmp.c ct->tuplehash[0].tuple.dst.u.icmp.type); dst 168 net/netfilter/nf_conntrack_proto_icmp.c ct_daddr = &ct->tuplehash[dir].tuple.dst.u3; dst 262 net/netfilter/nf_conntrack_proto_icmp.c nla_put_u8(skb, CTA_PROTO_ICMP_TYPE, t->dst.u.icmp.type) || dst 263 net/netfilter/nf_conntrack_proto_icmp.c nla_put_u8(skb, CTA_PROTO_ICMP_CODE, t->dst.u.icmp.code)) dst 285 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.type = nla_get_u8(tb[CTA_PROTO_ICMP_TYPE]); dst 286 net/netfilter/nf_conntrack_proto_icmp.c tuple->dst.u.icmp.code = nla_get_u8(tb[CTA_PROTO_ICMP_CODE]); dst 289 net/netfilter/nf_conntrack_proto_icmp.c if (tuple->dst.u.icmp.type >= sizeof(invmap) || dst 290 net/netfilter/nf_conntrack_proto_icmp.c !invmap[tuple->dst.u.icmp.type]) dst 40 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.type = hp->icmp6_type; dst 42 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.code = hp->icmp6_code; dst 69 net/netfilter/nf_conntrack_proto_icmpv6.c int type = orig->dst.u.icmp.type - 128; dst 74 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.type = invmap[type] - 1; dst 75 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.code = orig->dst.u.icmp.code; dst 100 net/netfilter/nf_conntrack_proto_icmpv6.c int type = ct->tuplehash[0].tuple.dst.u.icmp.type - 128; dst 180 net/netfilter/nf_conntrack_proto_icmpv6.c nla_put_u8(skb, CTA_PROTO_ICMPV6_TYPE, t->dst.u.icmp.type) || dst 181 net/netfilter/nf_conntrack_proto_icmpv6.c nla_put_u8(skb, CTA_PROTO_ICMPV6_CODE, t->dst.u.icmp.code)) dst 203 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.type = nla_get_u8(tb[CTA_PROTO_ICMPV6_TYPE]); dst 204 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.code = nla_get_u8(tb[CTA_PROTO_ICMPV6_CODE]); dst 207 net/netfilter/nf_conntrack_proto_icmpv6.c if (tuple->dst.u.icmp.type < 128 || dst 208 net/netfilter/nf_conntrack_proto_icmpv6.c tuple->dst.u.icmp.type - 128 >= sizeof(invmap) || dst 209 net/netfilter/nf_conntrack_proto_icmpv6.c !invmap[tuple->dst.u.icmp.type - 128]) dst 149 net/netfilter/nf_conntrack_sane.c &tuple->src.u3, &tuple->dst.u3, dst 814 net/netfilter/nf_conntrack_sip.c !nf_inet_addr_cmp(&exp->tuple.dst.u3, addr) || dst 815 net/netfilter/nf_conntrack_sip.c exp->tuple.dst.protonum != proto || dst 816 net/netfilter/nf_conntrack_sip.c exp->tuple.dst.u.udp.port != port) dst 874 net/netfilter/nf_conntrack_sip.c struct dst_entry *dst = NULL; dst 881 net/netfilter/nf_conntrack_sip.c nf_ip_route(net, &dst, &fl, false); dst 886 net/netfilter/nf_conntrack_sip.c nf_ip6_route(net, &dst, &fl, false); dst 893 net/netfilter/nf_conntrack_sip.c if (dst) { dst 894 net/netfilter/nf_conntrack_sip.c bool external_media = (dst->dev == dev); dst 896 net/netfilter/nf_conntrack_sip.c dst_release(dst); dst 919 net/netfilter/nf_conntrack_sip.c tuple.dst.protonum = IPPROTO_UDP; dst 920 net/netfilter/nf_conntrack_sip.c tuple.dst.u3 = *daddr; dst 921 net/netfilter/nf_conntrack_sip.c tuple.dst.u.udp.port = port; dst 932 net/netfilter/nf_conntrack_sip.c (!nf_inet_addr_cmp(&exp->saved_addr, &exp->tuple.dst.u3) || dst 933 net/netfilter/nf_conntrack_sip.c exp->saved_proto.udp.port != exp->tuple.dst.u.udp.port) && dst 936 net/netfilter/nf_conntrack_sip.c tuple.dst.u3 = exp->saved_addr; dst 937 net/netfilter/nf_conntrack_sip.c tuple.dst.u.udp.port = exp->saved_proto.udp.port; dst 944 net/netfilter/nf_conntrack_sip.c base_port = ntohs(tuple.dst.u.udp.port) & ~1; dst 1367 net/netfilter/nf_conntrack_sip.c if (!nf_inet_addr_cmp(&ct->tuplehash[dir].tuple.dst.u3, &addr)) dst 57 net/netfilter/nf_conntrack_snmp.c .tuple.dst.protonum = IPPROTO_UDP, dst 41 net/netfilter/nf_conntrack_standalone.c &tuple->src.u3.ip, &tuple->dst.u3.ip); dst 45 net/netfilter/nf_conntrack_standalone.c tuple->src.u3.ip6, tuple->dst.u3.ip6); dst 54 net/netfilter/nf_conntrack_standalone.c tuple->dst.u.icmp.type, dst 55 net/netfilter/nf_conntrack_standalone.c tuple->dst.u.icmp.code, dst 61 net/netfilter/nf_conntrack_standalone.c ntohs(tuple->dst.u.tcp.port)); dst 67 net/netfilter/nf_conntrack_standalone.c ntohs(tuple->dst.u.udp.port)); dst 73 net/netfilter/nf_conntrack_standalone.c ntohs(tuple->dst.u.dccp.port)); dst 78 net/netfilter/nf_conntrack_standalone.c ntohs(tuple->dst.u.sctp.port)); dst 82 net/netfilter/nf_conntrack_standalone.c tuple->dst.u.icmp.type, dst 83 net/netfilter/nf_conntrack_standalone.c tuple->dst.u.icmp.code, dst 89 net/netfilter/nf_conntrack_standalone.c ntohs(tuple->dst.u.gre.key)); dst 72 net/netfilter/nf_conntrack_tftp.c &tuple->src.u3, &tuple->dst.u3, dst 73 net/netfilter/nf_conntrack_tftp.c IPPROTO_UDP, NULL, &tuple->dst.u.udp.port); dst 33 net/netfilter/nf_flow_table_core.c struct dst_entry *other_dst = route->tuple[!dir].dst; dst 34 net/netfilter/nf_flow_table_core.c struct dst_entry *dst = route->tuple[dir].dst; dst 41 net/netfilter/nf_flow_table_core.c ft->dst_v4 = ctt->dst.u3.in; dst 42 net/netfilter/nf_flow_table_core.c ft->mtu = ip_dst_mtu_maybe_forward(dst, true); dst 46 net/netfilter/nf_flow_table_core.c ft->dst_v6 = ctt->dst.u3.in6; dst 47 net/netfilter/nf_flow_table_core.c ft->mtu = ip6_dst_mtu_forward(dst); dst 52 net/netfilter/nf_flow_table_core.c ft->l4proto = ctt->dst.protonum; dst 54 net/netfilter/nf_flow_table_core.c ft->dst_port = ctt->dst.u.tcp.port; dst 57 net/netfilter/nf_flow_table_core.c ft->dst_cache = dst; dst 76 net/netfilter/nf_flow_table_core.c if (!dst_hold_safe(route->tuple[FLOW_OFFLOAD_DIR_ORIGINAL].dst)) dst 79 net/netfilter/nf_flow_table_core.c if (!dst_hold_safe(route->tuple[FLOW_OFFLOAD_DIR_REPLY].dst)) dst 95 net/netfilter/nf_flow_table_core.c dst_release(route->tuple[FLOW_OFFLOAD_DIR_ORIGINAL].dst); dst 218 net/netfilter/nf_flow_table_ip.c static int nf_flow_offload_dst_check(struct dst_entry *dst) dst 220 net/netfilter/nf_flow_table_ip.c if (unlikely(dst_xfrm(dst))) dst 221 net/netfilter/nf_flow_table_ip.c return dst_check(dst, 0) ? 0 : -1; dst 228 net/netfilter/nf_flow_table_ip.c struct dst_entry *dst) dst 231 net/netfilter/nf_flow_table_ip.c skb_dst_set_noref(skb, dst); dst 264 net/netfilter/nf_flow_table_ip.c outdev = rt->dst.dev; dst 276 net/netfilter/nf_flow_table_ip.c if (nf_flow_offload_dst_check(&rt->dst)) { dst 289 net/netfilter/nf_flow_table_ip.c if (unlikely(dst_xfrm(&rt->dst))) { dst 293 net/netfilter/nf_flow_table_ip.c return nf_flow_xmit_xfrm(skb, state, &rt->dst); dst 298 net/netfilter/nf_flow_table_ip.c skb_dst_set_noref(skb, &rt->dst); dst 494 net/netfilter/nf_flow_table_ip.c outdev = rt->dst.dev; dst 503 net/netfilter/nf_flow_table_ip.c if (nf_flow_offload_dst_check(&rt->dst)) { dst 519 net/netfilter/nf_flow_table_ip.c if (unlikely(dst_xfrm(&rt->dst))) { dst 523 net/netfilter/nf_flow_table_ip.c return nf_flow_xmit_xfrm(skb, state, &rt->dst); dst 528 net/netfilter/nf_flow_table_ip.c skb_dst_set_noref(skb, &rt->dst); dst 39 net/netfilter/nf_nat_amanda.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 50 net/netfilter/nf_nat_amanda.c exp->tuple.dst.u.tcp.port = htons(port); dst 65 net/netfilter/nf_nat_core.c fl4->daddr = t->dst.u3.ip; dst 66 net/netfilter/nf_nat_core.c if (t->dst.protonum == IPPROTO_TCP || dst 67 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDP || dst 68 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDPLITE || dst 69 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_DCCP || dst 70 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_SCTP) dst 71 net/netfilter/nf_nat_core.c fl4->fl4_dport = t->dst.u.all; dst 78 net/netfilter/nf_nat_core.c if (t->dst.protonum == IPPROTO_TCP || dst 79 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDP || dst 80 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDPLITE || dst 81 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_DCCP || dst 82 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_SCTP) dst 98 net/netfilter/nf_nat_core.c fl6->daddr = t->dst.u3.in6; dst 99 net/netfilter/nf_nat_core.c if (t->dst.protonum == IPPROTO_TCP || dst 100 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDP || dst 101 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDPLITE || dst 102 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_DCCP || dst 103 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_SCTP) dst 104 net/netfilter/nf_nat_core.c fl6->fl6_dport = t->dst.u.all; dst 111 net/netfilter/nf_nat_core.c if (t->dst.protonum == IPPROTO_TCP || dst 112 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDP || dst 113 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_UDPLITE || dst 114 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_DCCP || dst 115 net/netfilter/nf_nat_core.c t->dst.protonum == IPPROTO_SCTP) dst 154 net/netfilter/nf_nat_core.c struct dst_entry *dst; dst 162 net/netfilter/nf_nat_core.c dst = skb_dst(skb); dst 163 net/netfilter/nf_nat_core.c if (dst->xfrm) dst 164 net/netfilter/nf_nat_core.c dst = ((struct xfrm_dst *)dst)->route; dst 165 net/netfilter/nf_nat_core.c if (!dst_hold_safe(dst)) dst 171 net/netfilter/nf_nat_core.c dst = xfrm_lookup(net, dst, &fl, sk, 0); dst 172 net/netfilter/nf_nat_core.c if (IS_ERR(dst)) dst 173 net/netfilter/nf_nat_core.c return PTR_ERR(dst); dst 176 net/netfilter/nf_nat_core.c skb_dst_set(skb, dst); dst 198 net/netfilter/nf_nat_core.c tuple->dst.protonum ^ nf_nat_hash_rnd ^ net_hash_mix(n)); dst 239 net/netfilter/nf_nat_core.c switch (tuple->dst.protonum) { dst 253 net/netfilter/nf_nat_core.c port = tuple->dst.u.all; dst 289 net/netfilter/nf_nat_core.c return (t->dst.protonum == tuple->dst.protonum && dst 312 net/netfilter/nf_nat_core.c result->dst = tuple->dst; dst 347 net/netfilter/nf_nat_core.c var_ipp = &tuple->dst.u3; dst 369 net/netfilter/nf_nat_core.c 0 : (__force u32)tuple->dst.u3.all[max] ^ zone->id); dst 391 net/netfilter/nf_nat_core.c j ^= (__force u32)tuple->dst.u3.all[i]; dst 410 net/netfilter/nf_nat_core.c switch (tuple->dst.protonum) { dst 434 net/netfilter/nf_nat_core.c keyptr = &tuple->dst.u.gre.key; dst 453 net/netfilter/nf_nat_core.c keyptr = &tuple->dst.u.all; dst 677 net/netfilter/nf_nat_core.c ct->tuplehash[IP_CT_DIR_REPLY].tuple.dst.u3 : dst 81 net/netfilter/nf_nat_ftp.c newaddr = ct->tuplehash[!dir].tuple.dst.u3; dst 82 net/netfilter/nf_nat_ftp.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 93 net/netfilter/nf_nat_ftp.c exp->tuple.dst.u.tcp.port = htons(port); dst 190 net/netfilter/nf_nat_helper.c = ct->master->tuplehash[!exp->dir].tuple.dst.u3; dst 45 net/netfilter/nf_nat_irc.c newaddr = ct->tuplehash[IP_CT_DIR_REPLY].tuple.dst.u3; dst 47 net/netfilter/nf_nat_irc.c exp->saved_proto.tcp.port = exp->tuple.dst.u.tcp.port; dst 55 net/netfilter/nf_nat_irc.c exp->tuple.dst.u.tcp.port = htons(port); dst 106 net/netfilter/nf_nat_masquerade.c return ifa->ifa_address == tuple->dst.u3.ip; dst 207 net/netfilter/nf_nat_masquerade.c return ipv6_addr_equal(&w->addr, &tuple->dst.u3.in6); dst 52 net/netfilter/nf_nat_proto.c newport = tuple->dst.u.udp.port; dst 125 net/netfilter/nf_nat_proto.c hdr->dest = tuple->dst.u.sctp.port; dst 167 net/netfilter/nf_nat_proto.c newport = tuple->dst.u.tcp.port; dst 205 net/netfilter/nf_nat_proto.c newport = tuple->dst.u.dccp.port; dst 306 net/netfilter/nf_nat_proto.c pr_debug("call_id -> 0x%04x\n", ntohs(tuple->dst.u.gre.key)); dst 307 net/netfilter/nf_nat_proto.c pgreh->call_id = tuple->dst.u.gre.key; dst 322 net/netfilter/nf_nat_proto.c switch (tuple->dst.protonum) { dst 375 net/netfilter/nf_nat_proto.c csum_replace4(&iph->check, iph->daddr, target->dst.u3.ip); dst 376 net/netfilter/nf_nat_proto.c iph->daddr = target->dst.u3.ip; dst 413 net/netfilter/nf_nat_proto.c ipv6h->daddr = target->dst.u3.in6; dst 458 net/netfilter/nf_nat_proto.c newip = t->dst.u3.ip; dst 477 net/netfilter/nf_nat_proto.c newip = &t->dst.u3.in6; dst 616 net/netfilter/nf_nat_proto.c target.dst.protonum = IPPROTO_ICMP; dst 686 net/netfilter/nf_nat_proto.c ct->tuplehash[!dir].tuple.dst.u3.ip || dst 687 net/netfilter/nf_nat_proto.c (ct->tuplehash[dir].tuple.dst.protonum != IPPROTO_ICMP && dst 689 net/netfilter/nf_nat_proto.c ct->tuplehash[!dir].tuple.dst.u.all)) { dst 716 net/netfilter/nf_nat_proto.c if (ct->tuplehash[dir].tuple.dst.u3.ip != dst 724 net/netfilter/nf_nat_proto.c ct->tuplehash[dir].tuple.dst.protonum != IPPROTO_ICMP && dst 725 net/netfilter/nf_nat_proto.c ct->tuplehash[dir].tuple.dst.u.all != dst 840 net/netfilter/nf_nat_proto.c target.dst.protonum = IPPROTO_ICMPV6; dst 923 net/netfilter/nf_nat_proto.c &ct->tuplehash[!dir].tuple.dst.u3) || dst 924 net/netfilter/nf_nat_proto.c (ct->tuplehash[dir].tuple.dst.protonum != IPPROTO_ICMPV6 && dst 926 net/netfilter/nf_nat_proto.c ct->tuplehash[!dir].tuple.dst.u.all)) { dst 954 net/netfilter/nf_nat_proto.c if (!nf_inet_addr_cmp(&ct->tuplehash[dir].tuple.dst.u3, dst 962 net/netfilter/nf_nat_proto.c ct->tuplehash[dir].tuple.dst.protonum != IPPROTO_ICMPV6 && dst 963 net/netfilter/nf_nat_proto.c ct->tuplehash[dir].tuple.dst.u.all != dst 109 net/netfilter/nf_nat_sip.c newaddr = ct->tuplehash[!dir].tuple.dst.u3; dst 110 net/netfilter/nf_nat_sip.c newport = ct->tuplehash[!dir].tuple.dst.u.udp.port; dst 111 net/netfilter/nf_nat_sip.c } else if (nf_inet_addr_cmp(&ct->tuplehash[dir].tuple.dst.u3, addr) && dst 112 net/netfilter/nf_nat_sip.c ct->tuplehash[dir].tuple.dst.u.udp.port == port) { dst 194 net/netfilter/nf_nat_sip.c &ct->tuplehash[dir].tuple.dst.u3) || dst 195 net/netfilter/nf_nat_sip.c port != ct->tuplehash[dir].tuple.dst.u.udp.port) dst 214 net/netfilter/nf_nat_sip.c !nf_inet_addr_cmp(&addr, &ct->tuplehash[!dir].tuple.dst.u3)) { dst 216 net/netfilter/nf_nat_sip.c &ct->tuplehash[!dir].tuple.dst.u3, dst 230 net/netfilter/nf_nat_sip.c nf_inet_addr_cmp(&addr, &ct->tuplehash[dir].tuple.dst.u3) && dst 247 net/netfilter/nf_nat_sip.c htons(n) == ct->tuplehash[dir].tuple.dst.u.udp.port && dst 342 net/netfilter/nf_nat_sip.c pair_exp->tuple.dst.protonum == ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.protonum && dst 346 net/netfilter/nf_nat_sip.c range.min_proto.all = range.max_proto.all = pair_exp->tuple.dst.u.all; dst 347 net/netfilter/nf_nat_sip.c range.min_addr = range.max_addr = pair_exp->tuple.dst.u3; dst 364 net/netfilter/nf_nat_sip.c = ct->master->tuplehash[!exp->dir].tuple.dst.u3; dst 392 net/netfilter/nf_nat_sip.c &ct->tuplehash[!dir].tuple.dst.u3)) dst 393 net/netfilter/nf_nat_sip.c newaddr = exp->tuple.dst.u3; dst 395 net/netfilter/nf_nat_sip.c newaddr = ct->tuplehash[!dir].tuple.dst.u3; dst 402 net/netfilter/nf_nat_sip.c if (exp->tuple.dst.u.udp.port == srcport) dst 403 net/netfilter/nf_nat_sip.c port = ntohs(ct->tuplehash[!dir].tuple.dst.u.udp.port); dst 405 net/netfilter/nf_nat_sip.c port = ntohs(exp->tuple.dst.u.udp.port); dst 407 net/netfilter/nf_nat_sip.c exp->saved_addr = exp->tuple.dst.u3; dst 408 net/netfilter/nf_nat_sip.c exp->tuple.dst.u3 = newaddr; dst 409 net/netfilter/nf_nat_sip.c exp->saved_proto.udp.port = exp->tuple.dst.u.udp.port; dst 416 net/netfilter/nf_nat_sip.c exp->tuple.dst.u.udp.port = htons(port); dst 431 net/netfilter/nf_nat_sip.c if (!nf_inet_addr_cmp(&exp->tuple.dst.u3, &exp->saved_addr) || dst 432 net/netfilter/nf_nat_sip.c exp->tuple.dst.u.udp.port != exp->saved_proto.udp.port) { dst 587 net/netfilter/nf_nat_sip.c &ct->tuplehash[!dir].tuple.dst.u3)) dst 588 net/netfilter/nf_nat_sip.c *rtp_addr = rtp_exp->tuple.dst.u3; dst 590 net/netfilter/nf_nat_sip.c *rtp_addr = ct->tuplehash[!dir].tuple.dst.u3; dst 592 net/netfilter/nf_nat_sip.c rtp_exp->saved_addr = rtp_exp->tuple.dst.u3; dst 593 net/netfilter/nf_nat_sip.c rtp_exp->tuple.dst.u3 = *rtp_addr; dst 594 net/netfilter/nf_nat_sip.c rtp_exp->saved_proto.udp.port = rtp_exp->tuple.dst.u.udp.port; dst 598 net/netfilter/nf_nat_sip.c rtcp_exp->saved_addr = rtcp_exp->tuple.dst.u3; dst 599 net/netfilter/nf_nat_sip.c rtcp_exp->tuple.dst.u3 = *rtp_addr; dst 600 net/netfilter/nf_nat_sip.c rtcp_exp->saved_proto.udp.port = rtcp_exp->tuple.dst.u.udp.port; dst 605 net/netfilter/nf_nat_sip.c for (port = ntohs(rtp_exp->tuple.dst.u.udp.port); dst 609 net/netfilter/nf_nat_sip.c rtp_exp->tuple.dst.u.udp.port = htons(port); dst 618 net/netfilter/nf_nat_sip.c rtcp_exp->tuple.dst.u.udp.port = htons(port + 1); dst 639 net/netfilter/nf_nat_sip.c if (rtp_exp->tuple.dst.u.udp.port != rtp_exp->saved_proto.udp.port && dst 837 net/netfilter/nf_synproxy_core.c struct dst_entry *dst; dst 854 net/netfilter/nf_synproxy_core.c err = nf_ip6_route(net, &dst, flowi6_to_flowi(&fl6), false); dst 859 net/netfilter/nf_synproxy_core.c dst = xfrm_lookup(net, dst, flowi6_to_flowi(&fl6), NULL, 0); dst 860 net/netfilter/nf_synproxy_core.c if (IS_ERR(dst)) dst 863 net/netfilter/nf_synproxy_core.c skb_dst_set(nskb, dst); dst 90 net/netfilter/nfnetlink_cthelper.c tuple->dst.protonum = nla_get_u8(tb[NFCTH_TUPLE_L4PROTONUM]); dst 441 net/netfilter/nfnetlink_cthelper.c tuple.dst.protonum != cur->tuple.dst.protonum)) dst 473 net/netfilter/nfnetlink_cthelper.c if (nla_put_u8(skb, NFCTH_TUPLE_L4PROTONUM, helper->tuple.dst.protonum)) dst 657 net/netfilter/nfnetlink_cthelper.c tuple.dst.protonum != cur->tuple.dst.protonum)) dst 723 net/netfilter/nfnetlink_cthelper.c tuple.dst.protonum != cur->tuple.dst.protonum)) dst 31 net/netfilter/nft_bitwise.c u32 *dst = ®s->data[priv->dreg]; dst 35 net/netfilter/nft_bitwise.c dst[i] = (src[i] & priv->mask.data[i]) ^ priv->xor.data[i]; dst 32 net/netfilter/nft_byteorder.c u32 *dst = ®s->data[priv->dreg]; dst 37 net/netfilter/nft_byteorder.c d = (void *)dst; dst 47 net/netfilter/nft_byteorder.c nft_reg_store64(&dst[i], be64_to_cpu(src64)); dst 54 net/netfilter/nft_byteorder.c nft_reg_store64(&dst[i], src64); dst 198 net/netfilter/nft_connlimit.c static int nft_connlimit_clone(struct nft_expr *dst, const struct nft_expr *src) dst 200 net/netfilter/nft_connlimit.c struct nft_connlimit *priv_dst = nft_expr_priv(dst); dst 227 net/netfilter/nft_counter.c static int nft_counter_clone(struct nft_expr *dst, const struct nft_expr *src) dst 230 net/netfilter/nft_counter.c struct nft_counter_percpu_priv *priv_clone = nft_expr_priv(dst); dst 195 net/netfilter/nft_ct.c memcpy(dest, tuple->dst.u3.all, dst 202 net/netfilter/nft_ct.c nft_reg_store16(dest, (__force u16)tuple->dst.u.all); dst 212 net/netfilter/nft_ct.c *dest = tuple->dst.u3.ip; dst 222 net/netfilter/nft_ct.c memcpy(dest, tuple->dst.u3.ip6, sizeof(struct in6_addr)); dst 1251 net/netfilter/nft_ct.c &ct->tuplehash[!dir].tuple.dst.u3, dst 27 net/netfilter/nft_dynset.c static int nft_expr_clone(struct nft_expr *dst, struct nft_expr *src) dst 32 net/netfilter/nft_dynset.c dst->ops = src->ops; dst 33 net/netfilter/nft_dynset.c err = src->ops->clone(dst, src); dst 37 net/netfilter/nft_dynset.c memcpy(dst, src, src->ops->size); dst 47 net/netfilter/nft_flow_offload.c route->tuple[dir].dst = this_dst; dst 48 net/netfilter/nft_flow_offload.c route->tuple[!dir].dst = other_dst; dst 91 net/netfilter/nft_flow_offload.c switch (ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.protonum) { dst 131 net/netfilter/nft_flow_offload.c dst_release(route.tuple[!dir].dst); dst 137 net/netfilter/nft_flow_offload.c dst_release(route.tuple[!dir].dst); dst 147 net/netfilter/nft_meta.c const struct dst_entry *dst = skb_dst(skb); dst 149 net/netfilter/nft_meta.c if (dst == NULL) dst 151 net/netfilter/nft_meta.c *dest = dst->tclassid; dst 176 net/netfilter/nft_payload.c dst, ETH_ALEN, reg); dst 203 net/netfilter/nft_payload.c NFT_OFFLOAD_MATCH(FLOW_DISSECTOR_KEY_IPV4_ADDRS, ipv4, dst, dst 239 net/netfilter/nft_payload.c NFT_OFFLOAD_MATCH(FLOW_DISSECTOR_KEY_IPV6_ADDRS, ipv6, dst, dst 295 net/netfilter/nft_payload.c NFT_OFFLOAD_MATCH(FLOW_DISSECTOR_KEY_PORTS, tp, dst, dst 323 net/netfilter/nft_payload.c NFT_OFFLOAD_MATCH(FLOW_DISSECTOR_KEY_PORTS, tp, dst, dst 25 net/netfilter/nft_rt.c struct dst_entry *dst = NULL; dst 41 net/netfilter/nft_rt.c nf_route(nft_net(pkt), &dst, &fl, false, nft_pf(pkt)); dst 42 net/netfilter/nft_rt.c if (dst) { dst 43 net/netfilter/nft_rt.c mtu = min(mtu, dst_mtu(dst)); dst 44 net/netfilter/nft_rt.c dst_release(dst); dst 60 net/netfilter/nft_rt.c const struct dst_entry *dst; dst 62 net/netfilter/nft_rt.c dst = skb_dst(skb); dst 63 net/netfilter/nft_rt.c if (!dst) dst 69 net/netfilter/nft_rt.c *dest = dst->tclassid; dst 76 net/netfilter/nft_rt.c *dest = (__force u32)rt_nexthop((const struct rtable *)dst, dst 83 net/netfilter/nft_rt.c memcpy(dest, rt6_nexthop((struct rt6_info *)dst, dst 88 net/netfilter/nft_rt.c nft_reg_store16(dest, get_tcpmss(pkt, dst)); dst 92 net/netfilter/nft_rt.c nft_reg_store8(dest, !!dst->xfrm); dst 180 net/netfilter/nft_tunnel.c info->key.u.ipv4.dst = nla_get_be32(tb[NFTA_TUNNEL_KEY_IP_DST]); dst 212 net/netfilter/nft_tunnel.c memcpy(&info->key.u.ipv6.dst, dst 452 net/netfilter/nft_tunnel.c nla_put_in6_addr(skb, NFTA_TUNNEL_KEY_IP6_DST, &info->key.u.ipv6.dst) < 0 || dst 463 net/netfilter/nft_tunnel.c nla_put_in_addr(skb, NFTA_TUNNEL_KEY_IP_DST, info->key.u.ipv4.dst) < 0) dst 178 net/netfilter/nft_xfrm.c const struct dst_entry *dst = skb_dst(pkt->skb); dst 181 net/netfilter/nft_xfrm.c for (i = 0; dst && dst->xfrm; dst 182 net/netfilter/nft_xfrm.c dst = ((const struct xfrm_dst *)dst)->child, i++) { dst 186 net/netfilter/nft_xfrm.c nft_xfrm_state_get_key(priv, regs, dst->xfrm); dst 163 net/netfilter/utils.c int nf_route(struct net *net, struct dst_entry **dst, struct flowi *fl, dst 171 net/netfilter/utils.c ret = nf_ip_route(net, dst, fl, strict); dst 174 net/netfilter/utils.c ret = nf_ip6_route(net, dst, fl, strict); dst 292 net/netfilter/x_tables.c int xt_data_to_user(void __user *dst, const void *src, dst 296 net/netfilter/x_tables.c if (copy_to_user(dst, src, usersize)) dst 299 net/netfilter/x_tables.c clear_user(dst + usersize, aligned_size - usersize)) dst 35 net/netfilter/xt_HMARK.c __be32 dst; dst 64 net/netfilter/xt_HMARK.c u16 src, dst; dst 68 net/netfilter/xt_HMARK.c dst = ntohs(hp.b16.dst); dst 70 net/netfilter/xt_HMARK.c if (dst > src) dst 71 net/netfilter/xt_HMARK.c uports->v32 = (dst << 16) | src; dst 73 net/netfilter/xt_HMARK.c uports->v32 = (src << 16) | dst; dst 94 net/netfilter/xt_HMARK.c t->dst = hmark_addr_mask(otuple->src.l3num, rtuple->src.u3.ip6, dst 103 net/netfilter/xt_HMARK.c t->uports.b16.dst = rtuple->src.u.all; dst 120 net/netfilter/xt_HMARK.c u32 dst = ntohl(t->dst); dst 122 net/netfilter/xt_HMARK.c if (dst < src) dst 123 net/netfilter/xt_HMARK.c swap(src, dst); dst 125 net/netfilter/xt_HMARK.c hash = jhash_3words(src, dst, t->uports.v32, info->hashrnd); dst 194 net/netfilter/xt_HMARK.c t->dst = hmark_addr6_mask(ip6->daddr.s6_addr32, info->dst_mask.ip6); dst 271 net/netfilter/xt_HMARK.c t->dst = ip->daddr & info->dst_mask.ip; dst 64 net/netfilter/xt_TCPMSS.c mtu = dst_mtu(&rt->dst); dst 65 net/netfilter/xt_TCPMSS.c dst_release(&rt->dst); dst 64 net/netfilter/xt_addrtype.c dst_release(&rt->dst); dst 53 net/netfilter/xt_conntrack.c return conntrack_addrcmp(&ct->tuplehash[IP_CT_DIR_ORIGINAL].tuple.dst.u3, dst 71 net/netfilter/xt_conntrack.c return conntrack_addrcmp(&ct->tuplehash[IP_CT_DIR_REPLY].tuple.dst.u3, dst 94 net/netfilter/xt_conntrack.c (tuple->dst.u.all == info->origdst_port) ^ dst 106 net/netfilter/xt_conntrack.c (tuple->dst.u.all == info->repldst_port) ^ dst 140 net/netfilter/xt_conntrack.c ntohs(tuple->dst.u.all), dst 154 net/netfilter/xt_conntrack.c ntohs(tuple->dst.u.all), dst 76 net/netfilter/xt_hashlimit.c __be32 dst; dst 81 net/netfilter/xt_hashlimit.c __be32 dst[4]; dst 92 net/netfilter/xt_hashlimit.c struct dsthash_dst dst; dst 180 net/netfilter/xt_hashlimit.c return !memcmp(&ent->dst, b, sizeof(ent->dst)); dst 184 net/netfilter/xt_hashlimit.c hash_dst(const struct xt_hashlimit_htable *ht, const struct dsthash_dst *dst) dst 186 net/netfilter/xt_hashlimit.c u_int32_t hash = jhash2((const u32 *)dst, dst 187 net/netfilter/xt_hashlimit.c sizeof(*dst)/sizeof(u32), dst 200 net/netfilter/xt_hashlimit.c const struct dsthash_dst *dst) dst 203 net/netfilter/xt_hashlimit.c u_int32_t hash = hash_dst(ht, dst); dst 207 net/netfilter/xt_hashlimit.c if (dst_cmp(ent, dst)) { dst 218 net/netfilter/xt_hashlimit.c const struct dsthash_dst *dst, bool *race) dst 227 net/netfilter/xt_hashlimit.c ent = dsthash_find(ht, dst); dst 248 net/netfilter/xt_hashlimit.c memcpy(&ent->dst, dst, sizeof(ent->dst)); dst 252 net/netfilter/xt_hashlimit.c hlist_add_head_rcu(&ent->node, &ht->hash[hash_dst(ht, dst)]); dst 638 net/netfilter/xt_hashlimit.c struct dsthash_dst *dst, dst 645 net/netfilter/xt_hashlimit.c memset(dst, 0, sizeof(*dst)); dst 650 net/netfilter/xt_hashlimit.c dst->ip.dst = maskl(ip_hdr(skb)->daddr, dst 653 net/netfilter/xt_hashlimit.c dst->ip.src = maskl(ip_hdr(skb)->saddr, dst 667 net/netfilter/xt_hashlimit.c memcpy(&dst->ip6.dst, &ipv6_hdr(skb)->daddr, dst 668 net/netfilter/xt_hashlimit.c sizeof(dst->ip6.dst)); dst 669 net/netfilter/xt_hashlimit.c hashlimit_ipv6_mask(dst->ip6.dst, hinfo->cfg.dstmask); dst 672 net/netfilter/xt_hashlimit.c memcpy(&dst->ip6.src, &ipv6_hdr(skb)->saddr, dst 673 net/netfilter/xt_hashlimit.c sizeof(dst->ip6.src)); dst 674 net/netfilter/xt_hashlimit.c hashlimit_ipv6_mask(dst->ip6.src, hinfo->cfg.srcmask); dst 703 net/netfilter/xt_hashlimit.c dst->src_port = ports[0]; dst 705 net/netfilter/xt_hashlimit.c dst->dst_port = ports[1]; dst 731 net/netfilter/xt_hashlimit.c struct dsthash_dst dst; dst 735 net/netfilter/xt_hashlimit.c if (hashlimit_init_dst(hinfo, &dst, skb, par->thoff) < 0) dst 739 net/netfilter/xt_hashlimit.c dh = dsthash_find(hinfo, &dst); dst 741 net/netfilter/xt_hashlimit.c dh = dsthash_alloc_init(hinfo, &dst, &race); dst 1101 net/netfilter/xt_hashlimit.c &ent->dst.ip.src, dst 1102 net/netfilter/xt_hashlimit.c ntohs(ent->dst.src_port), dst 1103 net/netfilter/xt_hashlimit.c &ent->dst.ip.dst, dst 1104 net/netfilter/xt_hashlimit.c ntohs(ent->dst.dst_port), dst 1112 net/netfilter/xt_hashlimit.c &ent->dst.ip6.src, dst 1113 net/netfilter/xt_hashlimit.c ntohs(ent->dst.src_port), dst 1114 net/netfilter/xt_hashlimit.c &ent->dst.ip6.dst, dst 1115 net/netfilter/xt_hashlimit.c ntohs(ent->dst.dst_port), dst 151 net/netfilter/xt_limit.c static void limit_mt_compat_from_user(void *dst, const void *src) dst 162 net/netfilter/xt_limit.c memcpy(dst, &m, sizeof(m)); dst 165 net/netfilter/xt_limit.c static int limit_mt_compat_to_user(void __user *dst, const void *src) dst 177 net/netfilter/xt_limit.c return copy_to_user(dst, &cm, sizeof(cm)) ? -EFAULT : 0; dst 29 net/netfilter/xt_multiport.c u_int16_t src, u_int16_t dst) dst 48 net/netfilter/xt_multiport.c if (dst >= s && dst <= e) dst 52 net/netfilter/xt_multiport.c if ((dst >= s && dst <= e) || dst 69 net/netfilter/xt_multiport.c if (dst == s) dst 73 net/netfilter/xt_multiport.c if (src == s || dst == s) dst 37 net/netfilter/xt_nat.c static void xt_nat_convert_range(struct nf_nat_range2 *dst, dst 40 net/netfilter/xt_nat.c memset(&dst->min_addr, 0, sizeof(dst->min_addr)); dst 41 net/netfilter/xt_nat.c memset(&dst->max_addr, 0, sizeof(dst->max_addr)); dst 42 net/netfilter/xt_nat.c memset(&dst->base_proto, 0, sizeof(dst->base_proto)); dst 44 net/netfilter/xt_nat.c dst->flags = src->flags; dst 45 net/netfilter/xt_nat.c dst->min_addr.ip = src->min_ip; dst 46 net/netfilter/xt_nat.c dst->max_addr.ip = src->max_ip; dst 47 net/netfilter/xt_nat.c dst->min_proto = src->min; dst 48 net/netfilter/xt_nat.c dst->max_proto = src->max; dst 86 net/netfilter/xt_policy.c const struct dst_entry *dst = skb_dst(skb); dst 90 net/netfilter/xt_policy.c if (dst->xfrm == NULL) dst 93 net/netfilter/xt_policy.c for (i = 0; dst && dst->xfrm; dst 94 net/netfilter/xt_policy.c dst = ((struct xfrm_dst *)dst)->child, i++) { dst 100 net/netfilter/xt_policy.c if (match_xfrm_state(dst->xfrm, e, family)) { dst 25 net/netfilter/xt_realm.c const struct dst_entry *dst = skb_dst(skb); dst 27 net/netfilter/xt_realm.c return (info->id == (dst->tclassid & info->mask)) ^ info->invert; dst 330 net/netlink/af_netlink.c static void netlink_deliver_tap_kernel(struct sock *dst, struct sock *src, dst 333 net/netlink/af_netlink.c if (!(netlink_is_kernel(dst) && netlink_is_kernel(src))) dst 334 net/netlink/af_netlink.c netlink_deliver_tap(sock_net(dst), skb); dst 46 net/openvswitch/actions.c unsigned long dst; dst 239 net/openvswitch/actions.c u16 *dst = (u16 *)dst_; dst 243 net/openvswitch/actions.c OVS_SET_MASKED(dst[0], src[0], mask[0]); dst 244 net/openvswitch/actions.c OVS_SET_MASKED(dst[1], src[1], mask[1]); dst 245 net/openvswitch/actions.c OVS_SET_MASKED(dst[2], src[2], mask[2]); dst 268 net/openvswitch/actions.c ether_addr_copy(flow_key->eth.dst, eth_hdr(skb)->h_dest); dst 476 net/openvswitch/actions.c flow_key->ipv4.addr.dst = new_addr; dst 545 net/openvswitch/actions.c memcpy(&flow_key->ipv6.addr.dst, masked, dst 546 net/openvswitch/actions.c sizeof(flow_key->ipv6.addr.dst)); dst 643 net/openvswitch/actions.c __be16 src, dst; dst 654 net/openvswitch/actions.c dst = OVS_MASKED(uh->dest, key->udp_dst, mask->udp_dst); dst 661 net/openvswitch/actions.c if (likely(dst != uh->dest)) { dst 662 net/openvswitch/actions.c set_tp_port(skb, &uh->dest, dst, &uh->check); dst 663 net/openvswitch/actions.c flow_key->tp.dst = dst; dst 670 net/openvswitch/actions.c uh->dest = dst; dst 672 net/openvswitch/actions.c flow_key->tp.dst = dst; dst 685 net/openvswitch/actions.c __be16 src, dst; dst 699 net/openvswitch/actions.c dst = OVS_MASKED(th->dest, key->tcp_dst, mask->tcp_dst); dst 700 net/openvswitch/actions.c if (likely(dst != th->dest)) { dst 701 net/openvswitch/actions.c set_tp_port(skb, &th->dest, dst, &th->check); dst 702 net/openvswitch/actions.c flow_key->tp.dst = dst; dst 736 net/openvswitch/actions.c flow_key->tp.dst = sh->dest; dst 751 net/openvswitch/actions.c __skb_dst_copy(skb, data->dst); dst 776 net/openvswitch/actions.c ovs_dst_get_mtu(const struct dst_entry *dst) dst 778 net/openvswitch/actions.c return dst->dev->mtu; dst 796 net/openvswitch/actions.c data->dst = skb->_skb_refdst; dst 857 net/openvswitch/actions.c dst_init(&ovs_rt.dst, &ovs_dst_ops, NULL, 1, dst 859 net/openvswitch/actions.c ovs_rt.dst.dev = vport->dev; dst 862 net/openvswitch/actions.c skb_dst_set_noref(skb, &ovs_rt.dst); dst 179 net/openvswitch/conntrack.c key->ct_orig_proto = orig->dst.protonum; dst 180 net/openvswitch/conntrack.c if (orig->dst.protonum == icmp_proto) { dst 181 net/openvswitch/conntrack.c key->ct.orig_tp.src = htons(orig->dst.u.icmp.type); dst 182 net/openvswitch/conntrack.c key->ct.orig_tp.dst = htons(orig->dst.u.icmp.code); dst 185 net/openvswitch/conntrack.c key->ct.orig_tp.dst = orig->dst.u.all; dst 210 net/openvswitch/conntrack.c key->ipv4.ct_orig.dst = orig->dst.u3.ip; dst 217 net/openvswitch/conntrack.c key->ipv6.ct_orig.dst = orig->dst.u3.in6; dst 306 net/openvswitch/conntrack.c output->ipv4.ct_orig.dst, dst 308 net/openvswitch/conntrack.c output->ct.orig_tp.dst, dst 317 net/openvswitch/conntrack.c IN6_ADDR_INITIALIZER(output->ipv6.ct_orig.dst), dst 319 net/openvswitch/conntrack.c output->ct.orig_tp.dst, dst 390 net/openvswitch/conntrack.c u32 *dst = (u32 *)cl->bits; dst 394 net/openvswitch/conntrack.c dst[i] = (dst[i] & ~mask->ct_labels_32[i]) | dst 642 net/openvswitch/conntrack.c h = &ct->tuplehash[!h->tuple.dst.dir]; dst 841 net/openvswitch/conntrack.c __be16 dst; dst 845 net/openvswitch/conntrack.c key->ipv4.addr.dst = ip_hdr(skb)->daddr; dst 847 net/openvswitch/conntrack.c memcpy(&key->ipv6.addr.dst, &ipv6_hdr(skb)->daddr, dst 848 net/openvswitch/conntrack.c sizeof(key->ipv6.addr.dst)); dst 853 net/openvswitch/conntrack.c dst = udp_hdr(skb)->dest; dst 855 net/openvswitch/conntrack.c dst = tcp_hdr(skb)->dest; dst 857 net/openvswitch/conntrack.c dst = sctp_hdr(skb)->dest; dst 861 net/openvswitch/conntrack.c key->tp.dst = dst; dst 262 net/openvswitch/flow.c key->ipv6.addr.dst = nh->daddr; dst 414 net/openvswitch/flow.c key->tp.dst = htons(icmp->icmp6_code); dst 554 net/openvswitch/flow.c key->ipv4.addr.dst = nh->daddr; dst 577 net/openvswitch/flow.c key->tp.dst = tcp->dest; dst 587 net/openvswitch/flow.c key->tp.dst = udp->dest; dst 595 net/openvswitch/flow.c key->tp.dst = sctp->dest; dst 606 net/openvswitch/flow.c key->tp.dst = htons(icmp->code); dst 632 net/openvswitch/flow.c memcpy(&key->ipv4.addr.dst, arp->ar_tip, sizeof(key->ipv4.addr.dst)); dst 693 net/openvswitch/flow.c key->tp.dst = tcp->dest; dst 702 net/openvswitch/flow.c key->tp.dst = udp->dest; dst 710 net/openvswitch/flow.c key->tp.dst = sctp->dest; dst 776 net/openvswitch/flow.c ether_addr_copy(key->eth.dst, eth->h_dest); dst 76 net/openvswitch/flow.h u8 dst[ETH_ALEN]; /* Ethernet destination address. */ dst 100 net/openvswitch/flow.h __be16 dst; /* TCP/UDP/SCTP destination port. */ dst 107 net/openvswitch/flow.h __be32 dst; /* IP destination address. */ dst 112 net/openvswitch/flow.h __be32 dst; dst 123 net/openvswitch/flow.h struct in6_addr dst; /* IPv6 destination address. */ dst 129 net/openvswitch/flow.h struct in6_addr dst; dst 144 net/openvswitch/flow.h __be16 dst; /* CT orig tuple tp dst port. */ dst 156 net/openvswitch/flow.h key->tp.dst == 0 && dst 693 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tun_key.u.ipv4.dst, dst 703 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tun_key.u.ipv6.dst, dst 812 net/openvswitch/flow_netlink.c match->key->tun_key.u.ipv4.dst || dst 821 net/openvswitch/flow_netlink.c } else if (!match->key->tun_key.u.ipv4.dst) { dst 826 net/openvswitch/flow_netlink.c if (ipv6 && ipv6_addr_any(&match->key->tun_key.u.ipv6.dst)) { dst 877 net/openvswitch/flow_netlink.c if (output->u.ipv4.dst && dst 879 net/openvswitch/flow_netlink.c output->u.ipv4.dst)) dst 887 net/openvswitch/flow_netlink.c if (!ipv6_addr_any(&output->u.ipv6.dst) && dst 889 net/openvswitch/flow_netlink.c &output->u.ipv6.dst)) dst 1240 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, ipv4.ct_orig.dst, ct->ipv4_dst, is_mask); dst 1242 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, ct.orig_tp.dst, ct->dst_port, is_mask); dst 1254 net/openvswitch/flow_netlink.c SW_FLOW_KEY_MEMCPY(match, ipv6.ct_orig.dst, &ct->ipv6_dst, dst 1255 net/openvswitch/flow_netlink.c sizeof(match->key->ipv6.ct_orig.dst), dst 1258 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, ct.orig_tp.dst, ct->dst_port, is_mask); dst 1511 net/openvswitch/flow_netlink.c SW_FLOW_KEY_MEMCPY(match, eth.dst, dst 1555 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, ipv4.addr.dst, dst 1590 net/openvswitch/flow_netlink.c SW_FLOW_KEY_MEMCPY(match, ipv6.addr.dst, dst 1592 net/openvswitch/flow_netlink.c sizeof(match->key->ipv6.addr.dst), dst 1610 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, ipv4.addr.dst, dst 1644 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tp.dst, tcp_key->tcp_dst, is_mask); dst 1660 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tp.dst, udp_key->udp_dst, is_mask); dst 1669 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tp.dst, sctp_key->sctp_dst, is_mask); dst 1679 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tp.dst, dst 1690 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, tp.dst, dst 2025 net/openvswitch/flow_netlink.c ether_addr_copy(eth_key->eth_dst, output->eth.dst); dst 2078 net/openvswitch/flow_netlink.c ipv4_key->ipv4_dst = output->ipv4.addr.dst; dst 2092 net/openvswitch/flow_netlink.c memcpy(ipv6_key->ipv6_dst, &output->ipv6.addr.dst, dst 2112 net/openvswitch/flow_netlink.c arp_key->arp_tip = output->ipv4.addr.dst; dst 2138 net/openvswitch/flow_netlink.c tcp_key->tcp_dst = output->tp.dst; dst 2150 net/openvswitch/flow_netlink.c udp_key->udp_dst = output->tp.dst; dst 2159 net/openvswitch/flow_netlink.c sctp_key->sctp_dst = output->tp.dst; dst 2169 net/openvswitch/flow_netlink.c icmp_key->icmp_code = ntohs(output->tp.dst); dst 2180 net/openvswitch/flow_netlink.c icmpv6_key->icmpv6_code = ntohs(output->tp.dst); dst 2621 net/openvswitch/flow_netlink.c else if (key.tun_proto == AF_INET && key.tun_key.u.ipv4.dst == 0) dst 47 net/openvswitch/flow_table.c void ovs_flow_mask_key(struct sw_flow_key *dst, const struct sw_flow_key *src, dst 51 net/openvswitch/flow_table.c int len = full ? sizeof *dst : range_n_bytes(&mask->range); dst 54 net/openvswitch/flow_table.c long *d = (long *)((u8 *)dst + start); dst 74 net/openvswitch/flow_table.h void ovs_flow_mask_key(struct sw_flow_key *dst, const struct sw_flow_key *src, dst 149 net/phonet/af_phonet.c u16 dst, u16 src, u8 res, u8 irq) dst 161 net/phonet/af_phonet.c if (pn_addr(dst) == PNADDR_BROADCAST) { dst 171 net/phonet/af_phonet.c ph->pn_rdev = pn_dev(dst); dst 175 net/phonet/af_phonet.c ph->pn_robj = pn_obj(dst); dst 205 net/phonet/af_phonet.c u16 dst, u16 src, u8 res) dst 211 net/phonet/af_phonet.c if (phonet_address_lookup(dev_net(dev), pn_addr(dst)) == 0) dst 217 net/phonet/af_phonet.c return pn_send(skb, dev, dst, src, res, 1); dst 231 net/phonet/af_phonet.c u16 src, dst; dst 236 net/phonet/af_phonet.c dst = pn_sockaddr_get_object(target); dst 239 net/phonet/af_phonet.c dst = pn->dobject; dst 242 net/phonet/af_phonet.c daddr = pn_addr(dst); dst 250 net/phonet/af_phonet.c } else if (dst == 0) { dst 272 net/phonet/af_phonet.c err = pn_send(skb, dev, dst, src, res, 0); dst 196 net/phonet/pep.c struct sockaddr_pn dst; dst 213 net/phonet/pep.c pn_skb_get_src_sockaddr(oskb, &dst); dst 214 net/phonet/pep.c return pn_skb_send(sk, skb, &dst); dst 626 net/phonet/pep.c const struct sockaddr_pn *dst, dst 630 net/phonet/pep.c u16 dobj = pn_sockaddr_get_object(dst); dst 659 net/phonet/pep.c struct sockaddr_pn dst; dst 670 net/phonet/pep.c pn_skb_get_dst_sockaddr(skb, &dst); dst 673 net/phonet/pep.c sknode = pep_find_pipe(&pn->hlist, &dst, pipe_handle); dst 769 net/phonet/pep.c struct sockaddr_pn dst, src; dst 828 net/phonet/pep.c newsk = pep_find_pipe(&pn->hlist, &dst, pipe_handle); dst 852 net/phonet/pep.c pn_skb_get_dst_sockaddr(skb, &dst); dst 854 net/phonet/pep.c newpn->pn_sk.sobject = pn_sockaddr_get_object(&dst); dst 856 net/phonet/pep.c newpn->pn_sk.resource = pn_sockaddr_get_resource(&dst); dst 160 net/phonet/pn_netlink.c static int fill_route(struct sk_buff *skb, struct net_device *dev, u8 dst, dst 180 net/phonet/pn_netlink.c if (nla_put_u8(skb, RTA_DST, dst) || dst 191 net/phonet/pn_netlink.c void rtm_phonet_notify(int event, struct net_device *dev, u8 dst) dst 200 net/phonet/pn_netlink.c err = fill_route(skb, dev, dst, 0, 0, event); dst 226 net/phonet/pn_netlink.c u8 dst; dst 246 net/phonet/pn_netlink.c dst = nla_get_u8(tb[RTA_DST]); dst 247 net/phonet/pn_netlink.c if (dst & 3) /* Phonet addresses only have 6 high-order bits */ dst 255 net/phonet/pn_netlink.c err = phonet_route_add(dev, dst); dst 257 net/phonet/pn_netlink.c err = phonet_route_del(dev, dst); dst 259 net/phonet/pn_netlink.c rtm_phonet_notify(nlh->nlmsg_type, dev, dst); dst 366 net/qrtr/qrtr.c struct sockaddr_qrtr dst; dst 378 net/qrtr/qrtr.c dst.sq_node = cb->dst_node; dst 379 net/qrtr/qrtr.c dst.sq_port = cb->dst_port; dst 400 net/qrtr/qrtr.c pkt->client.node = cpu_to_le32(dst.sq_node); dst 401 net/qrtr/qrtr.c pkt->client.port = cpu_to_le32(dst.sq_port); dst 404 net/qrtr/qrtr.c &dst, &src)) dst 455 net/qrtr/qrtr.c struct sockaddr_qrtr dst = {AF_QIPCRTR, qrtr_local_nid, QRTR_PORT_CTRL}; dst 467 net/qrtr/qrtr.c qrtr_local_enqueue(NULL, skb, QRTR_TYPE_BYE, &src, &dst); dst 803 net/rds/ib_recv.c __le64 *src, *dst; dst 812 net/rds/ib_recv.c dst = (void *)map->m_page_addrs[map_page] + map_off; dst 816 net/rds/ib_recv.c uncongested |= ~(*src) & *dst; dst 817 net/rds/ib_recv.c *dst++ = *src++; dst 202 net/rds/message.c unsigned char *dst; dst 213 net/rds/message.c dst = hdr->h_exthdr; dst 215 net/rds/message.c *dst++ = type; dst 216 net/rds/message.c memcpy(dst, data, len); dst 218 net/rds/message.c dst[len] = RDS_EXTHDR_NONE; dst 156 net/rxrpc/peer_object.c struct dst_entry *dst; dst 177 net/rxrpc/peer_object.c dst = &rt->dst; dst 189 net/rxrpc/peer_object.c dst = ip6_route_output(net, NULL, fl6); dst 190 net/rxrpc/peer_object.c if (dst->error) { dst 191 net/rxrpc/peer_object.c _leave(" [route err %d]", dst->error); dst 201 net/rxrpc/peer_object.c peer->if_mtu = dst_mtu(dst); dst 202 net/rxrpc/peer_object.c dst_release(dst); dst 43 net/sched/act_tunnel_key.c skb_dst_set(skb, dst_clone(¶ms->tcft_enc_metadata->dst)); dst 68 net/sched/act_tunnel_key.c tunnel_key_copy_geneve_opt(const struct nlattr *nla, void *dst, int dst_len, dst 100 net/sched/act_tunnel_key.c if (dst) { dst 101 net/sched/act_tunnel_key.c struct geneve_opt *opt = dst; dst 119 net/sched/act_tunnel_key.c static int tunnel_key_copy_opts(const struct nlattr *nla, u8 *dst, dst 133 net/sched/act_tunnel_key.c opt_len = tunnel_key_copy_geneve_opt(attr, dst, dst 142 net/sched/act_tunnel_key.c if (dst) { dst 144 net/sched/act_tunnel_key.c dst += opt_len; dst 207 net/sched/act_tunnel_key.c dst_release(&p->tcft_enc_metadata->dst); dst 406 net/sched/act_tunnel_key.c dst_release(&metadata->dst); dst 491 net/sched/act_tunnel_key.c __be32 daddr = info->key.u.ipv4.dst; dst 500 net/sched/act_tunnel_key.c const struct in6_addr *daddr6 = &info->key.u.ipv6.dst; dst 3053 net/sched/cls_api.c void tcf_exts_change(struct tcf_exts *dst, struct tcf_exts *src) dst 3056 net/sched/cls_api.c struct tcf_exts old = *dst; dst 3058 net/sched/cls_api.c *dst = *src; dst 78 net/sched/cls_flow.c __be32 dst = flow_get_u32_dst(flow); dst 80 net/sched/cls_flow.c if (dst) dst 81 net/sched/cls_flow.c return ntohl(dst); dst 105 net/sched/cls_flow.c return ntohs(flow->ports.dst); dst 169 net/sched/cls_flow.c return ntohl(CTTUPLE(skb, dst.u3.ip)); dst 171 net/sched/cls_flow.c return ntohl(CTTUPLE(skb, dst.u3.ip6[3])); dst 188 net/sched/cls_flow.c return ntohs(CTTUPLE(skb, dst.u.all)); dst 206 net/sched/cls_flower.c min_mask = htons(filter->mask->key.tp_range.tp_min.dst); dst 207 net/sched/cls_flower.c max_mask = htons(filter->mask->key.tp_range.tp_max.dst); dst 208 net/sched/cls_flower.c min_val = htons(filter->key.tp_range.tp_min.dst); dst 209 net/sched/cls_flower.c max_val = htons(filter->key.tp_range.tp_max.dst); dst 212 net/sched/cls_flower.c if (htons(key->tp_range.tp.dst) < min_val || dst 213 net/sched/cls_flower.c htons(key->tp_range.tp.dst) > max_val) dst 217 net/sched/cls_flower.c mkey->tp_range.tp_min.dst = filter->mkey.tp_range.tp_min.dst; dst 218 net/sched/cls_flower.c mkey->tp_range.tp_max.dst = filter->mkey.tp_range.tp_max.dst; dst 725 net/sched/cls_flower.c fl_set_key_val(tb, &key->tp_range.tp_min.dst, dst 726 net/sched/cls_flower.c TCA_FLOWER_KEY_PORT_DST_MIN, &mask->tp_range.tp_min.dst, dst 727 net/sched/cls_flower.c TCA_FLOWER_UNSPEC, sizeof(key->tp_range.tp_min.dst)); dst 728 net/sched/cls_flower.c fl_set_key_val(tb, &key->tp_range.tp_max.dst, dst 729 net/sched/cls_flower.c TCA_FLOWER_KEY_PORT_DST_MAX, &mask->tp_range.tp_max.dst, dst 730 net/sched/cls_flower.c TCA_FLOWER_UNSPEC, sizeof(key->tp_range.tp_max.dst)); dst 738 net/sched/cls_flower.c if ((mask->tp_range.tp_min.dst && mask->tp_range.tp_max.dst && dst 739 net/sched/cls_flower.c htons(key->tp_range.tp_max.dst) <= dst 740 net/sched/cls_flower.c htons(key->tp_range.tp_min.dst)) || dst 1067 net/sched/cls_flower.c fl_set_key_val(tb, key->eth.dst, TCA_FLOWER_KEY_ETH_DST, dst 1068 net/sched/cls_flower.c mask->eth.dst, TCA_FLOWER_KEY_ETH_DST_MASK, dst 1069 net/sched/cls_flower.c sizeof(key->eth.dst)); dst 1119 net/sched/cls_flower.c fl_set_key_val(tb, &key->ipv4.dst, TCA_FLOWER_KEY_IPV4_DST, dst 1120 net/sched/cls_flower.c &mask->ipv4.dst, TCA_FLOWER_KEY_IPV4_DST_MASK, dst 1121 net/sched/cls_flower.c sizeof(key->ipv4.dst)); dst 1128 net/sched/cls_flower.c fl_set_key_val(tb, &key->ipv6.dst, TCA_FLOWER_KEY_IPV6_DST, dst 1129 net/sched/cls_flower.c &mask->ipv6.dst, TCA_FLOWER_KEY_IPV6_DST_MASK, dst 1130 net/sched/cls_flower.c sizeof(key->ipv6.dst)); dst 1137 net/sched/cls_flower.c fl_set_key_val(tb, &key->tp.dst, TCA_FLOWER_KEY_TCP_DST, dst 1138 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_TCP_DST_MASK, dst 1139 net/sched/cls_flower.c sizeof(key->tp.dst)); dst 1147 net/sched/cls_flower.c fl_set_key_val(tb, &key->tp.dst, TCA_FLOWER_KEY_UDP_DST, dst 1148 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_UDP_DST_MASK, dst 1149 net/sched/cls_flower.c sizeof(key->tp.dst)); dst 1154 net/sched/cls_flower.c fl_set_key_val(tb, &key->tp.dst, TCA_FLOWER_KEY_SCTP_DST, dst 1155 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_SCTP_DST_MASK, dst 1156 net/sched/cls_flower.c sizeof(key->tp.dst)); dst 1218 net/sched/cls_flower.c fl_set_key_val(tb, &key->enc_ipv4.dst, dst 1220 net/sched/cls_flower.c &mask->enc_ipv4.dst, dst 1222 net/sched/cls_flower.c sizeof(key->enc_ipv4.dst)); dst 1234 net/sched/cls_flower.c fl_set_key_val(tb, &key->enc_ipv6.dst, dst 1236 net/sched/cls_flower.c &mask->enc_ipv6.dst, dst 1238 net/sched/cls_flower.c sizeof(key->enc_ipv6.dst)); dst 1249 net/sched/cls_flower.c fl_set_key_val(tb, &key->enc_tp.dst, TCA_FLOWER_KEY_ENC_UDP_DST_PORT, dst 1250 net/sched/cls_flower.c &mask->enc_tp.dst, TCA_FLOWER_KEY_ENC_UDP_DST_PORT_MASK, dst 1251 net/sched/cls_flower.c sizeof(key->enc_tp.dst)); dst 1271 net/sched/cls_flower.c static void fl_mask_copy(struct fl_flow_mask *dst, dst 1275 net/sched/cls_flower.c void *pdst = fl_key_get_start(&dst->key, src); dst 1278 net/sched/cls_flower.c dst->range = src->range; dst 1384 net/sched/cls_flower.c if ((newmask->key.tp_range.tp_min.dst && dst 1385 net/sched/cls_flower.c newmask->key.tp_range.tp_max.dst) || dst 1985 net/sched/cls_flower.c if (fl_dump_key_val(skb, &key->tp_range.tp_min.dst, dst 1987 net/sched/cls_flower.c &mask->tp_range.tp_min.dst, TCA_FLOWER_UNSPEC, dst 1988 net/sched/cls_flower.c sizeof(key->tp_range.tp_min.dst)) || dst 1989 net/sched/cls_flower.c fl_dump_key_val(skb, &key->tp_range.tp_max.dst, dst 1991 net/sched/cls_flower.c &mask->tp_range.tp_max.dst, TCA_FLOWER_UNSPEC, dst 1992 net/sched/cls_flower.c sizeof(key->tp_range.tp_max.dst)) || dst 2242 net/sched/cls_flower.c if (fl_dump_key_val(skb, key->eth.dst, TCA_FLOWER_KEY_ETH_DST, dst 2243 net/sched/cls_flower.c mask->eth.dst, TCA_FLOWER_KEY_ETH_DST_MASK, dst 2244 net/sched/cls_flower.c sizeof(key->eth.dst)) || dst 2292 net/sched/cls_flower.c fl_dump_key_val(skb, &key->ipv4.dst, TCA_FLOWER_KEY_IPV4_DST, dst 2293 net/sched/cls_flower.c &mask->ipv4.dst, TCA_FLOWER_KEY_IPV4_DST_MASK, dst 2294 net/sched/cls_flower.c sizeof(key->ipv4.dst)))) dst 2300 net/sched/cls_flower.c fl_dump_key_val(skb, &key->ipv6.dst, TCA_FLOWER_KEY_IPV6_DST, dst 2301 net/sched/cls_flower.c &mask->ipv6.dst, TCA_FLOWER_KEY_IPV6_DST_MASK, dst 2302 net/sched/cls_flower.c sizeof(key->ipv6.dst)))) dst 2309 net/sched/cls_flower.c fl_dump_key_val(skb, &key->tp.dst, TCA_FLOWER_KEY_TCP_DST, dst 2310 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_TCP_DST_MASK, dst 2311 net/sched/cls_flower.c sizeof(key->tp.dst)) || dst 2320 net/sched/cls_flower.c fl_dump_key_val(skb, &key->tp.dst, TCA_FLOWER_KEY_UDP_DST, dst 2321 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_UDP_DST_MASK, dst 2322 net/sched/cls_flower.c sizeof(key->tp.dst)))) dst 2328 net/sched/cls_flower.c fl_dump_key_val(skb, &key->tp.dst, TCA_FLOWER_KEY_SCTP_DST, dst 2329 net/sched/cls_flower.c &mask->tp.dst, TCA_FLOWER_KEY_SCTP_DST_MASK, dst 2330 net/sched/cls_flower.c sizeof(key->tp.dst)))) dst 2387 net/sched/cls_flower.c fl_dump_key_val(skb, &key->enc_ipv4.dst, dst 2388 net/sched/cls_flower.c TCA_FLOWER_KEY_ENC_IPV4_DST, &mask->enc_ipv4.dst, dst 2390 net/sched/cls_flower.c sizeof(key->enc_ipv4.dst)))) dst 2397 net/sched/cls_flower.c fl_dump_key_val(skb, &key->enc_ipv6.dst, dst 2399 net/sched/cls_flower.c &mask->enc_ipv6.dst, dst 2401 net/sched/cls_flower.c sizeof(key->enc_ipv6.dst)))) dst 2412 net/sched/cls_flower.c fl_dump_key_val(skb, &key->enc_tp.dst, dst 2414 net/sched/cls_flower.c &mask->enc_tp.dst, dst 2416 net/sched/cls_flower.c sizeof(key->enc_tp.dst)) || dst 128 net/sched/cls_route.c struct dst_entry *dst; dst 134 net/sched/cls_route.c dst = skb_dst(skb); dst 135 net/sched/cls_route.c if (!dst) dst 138 net/sched/cls_route.c id = dst->tclassid; dst 75 net/sched/cls_rsvp.h __be32 dst[RSVP_DST_LEN]; dst 99 net/sched/cls_rsvp.h static inline unsigned int hash_dst(__be32 *dst, u8 protocol, u8 tunnelid) dst 101 net/sched/cls_rsvp.h unsigned int h = (__force __u32)dst[RSVP_DST_LEN - 1]; dst 134 net/sched/cls_rsvp.h __be32 *dst, *src; dst 155 net/sched/cls_rsvp.h dst = &nhptr->daddr.s6_addr32[0]; dst 160 net/sched/cls_rsvp.h dst = &nhptr->daddr; dst 167 net/sched/cls_rsvp.h h1 = hash_dst(dst, protocol, tunnelid); dst 172 net/sched/cls_rsvp.h if (dst[RSVP_DST_LEN-1] == s->dst[RSVP_DST_LEN - 1] && dst 177 net/sched/cls_rsvp.h dst[0] == s->dst[0] && dst 178 net/sched/cls_rsvp.h dst[1] == s->dst[1] && dst 179 net/sched/cls_rsvp.h dst[2] == s->dst[2] && dst 488 net/sched/cls_rsvp.h __be32 *dst; dst 566 net/sched/cls_rsvp.h dst = nla_data(tb[TCA_RSVP_DST]); dst 567 net/sched/cls_rsvp.h h1 = hash_dst(dst, pinfo ? pinfo->protocol : 0, pinfo ? pinfo->tunnelid : 0); dst 587 net/sched/cls_rsvp.h if (dst[RSVP_DST_LEN-1] == s->dst[RSVP_DST_LEN-1] && dst 591 net/sched/cls_rsvp.h dst[0] == s->dst[0] && dst 592 net/sched/cls_rsvp.h dst[1] == s->dst[1] && dst 593 net/sched/cls_rsvp.h dst[2] == s->dst[2] && dst 630 net/sched/cls_rsvp.h memcpy(s->dst, dst, sizeof(s->dst)); dst 708 net/sched/cls_rsvp.h if (nla_put(skb, TCA_RSVP_DST, sizeof(s->dst), &s->dst)) dst 101 net/sched/em_meta.c struct meta_obj *dst, int *err) dst 109 net/sched/em_meta.c get_random_bytes(&dst->value, sizeof(dst->value)); dst 122 net/sched/em_meta.c dst->value = fixed_loadavg(avenrun[0]); dst 127 net/sched/em_meta.c dst->value = fixed_loadavg(avenrun[1]); dst 132 net/sched/em_meta.c dst->value = fixed_loadavg(avenrun[2]); dst 139 net/sched/em_meta.c static inline int int_dev(struct net_device *dev, struct meta_obj *dst) dst 144 net/sched/em_meta.c dst->value = dev->ifindex; dst 148 net/sched/em_meta.c static inline int var_dev(struct net_device *dev, struct meta_obj *dst) dst 153 net/sched/em_meta.c dst->value = (unsigned long) dev->name; dst 154 net/sched/em_meta.c dst->len = strlen(dev->name); dst 160 net/sched/em_meta.c *err = int_dev(skb->dev, dst); dst 165 net/sched/em_meta.c *err = var_dev(skb->dev, dst); dst 177 net/sched/em_meta.c dst->value = skb_vlan_tag_get(skb); dst 179 net/sched/em_meta.c dst->value = tag; dst 192 net/sched/em_meta.c dst->value = skb->priority; dst 198 net/sched/em_meta.c dst->value = tc_skb_protocol(skb); dst 203 net/sched/em_meta.c dst->value = skb->pkt_type; dst 208 net/sched/em_meta.c dst->value = skb->len; dst 213 net/sched/em_meta.c dst->value = skb->data_len; dst 218 net/sched/em_meta.c dst->value = skb->mac_len; dst 223 net/sched/em_meta.c dst->value = skb_get_hash(skb); dst 232 net/sched/em_meta.c dst->value = skb->mark; dst 241 net/sched/em_meta.c dst->value = skb->tc_index; dst 254 net/sched/em_meta.c dst->value = skb_dst(skb)->tclassid; dst 256 net/sched/em_meta.c dst->value = 0; dst 265 net/sched/em_meta.c dst->value = inet_iif(skb); dst 281 net/sched/em_meta.c dst->value = skb->sk->sk_family; dst 290 net/sched/em_meta.c dst->value = skb->sk->sk_state; dst 299 net/sched/em_meta.c dst->value = skb->sk->sk_reuse; dst 309 net/sched/em_meta.c dst->value = skb->sk->sk_bound_dev_if; dst 320 net/sched/em_meta.c dst->value = (unsigned long) "any"; dst 321 net/sched/em_meta.c dst->len = 3; dst 328 net/sched/em_meta.c *err = var_dev(dev, dst); dst 339 net/sched/em_meta.c dst->value = refcount_read(&skb->sk->sk_refcnt); dst 350 net/sched/em_meta.c dst->value = sk->sk_rcvbuf; dst 361 net/sched/em_meta.c dst->value = sk->sk_shutdown; dst 372 net/sched/em_meta.c dst->value = sk->sk_protocol; dst 383 net/sched/em_meta.c dst->value = sk->sk_type; dst 394 net/sched/em_meta.c dst->value = sk_rmem_alloc_get(sk); dst 405 net/sched/em_meta.c dst->value = sk_wmem_alloc_get(sk); dst 416 net/sched/em_meta.c dst->value = atomic_read(&sk->sk_omem_alloc); dst 427 net/sched/em_meta.c dst->value = sk->sk_receive_queue.qlen; dst 438 net/sched/em_meta.c dst->value = sk->sk_write_queue.qlen; dst 449 net/sched/em_meta.c dst->value = READ_ONCE(sk->sk_wmem_queued); dst 460 net/sched/em_meta.c dst->value = sk->sk_forward_alloc; dst 471 net/sched/em_meta.c dst->value = sk->sk_sndbuf; dst 482 net/sched/em_meta.c dst->value = (__force int) sk->sk_allocation; dst 491 net/sched/em_meta.c dst->value = skb->sk->sk_hash; dst 502 net/sched/em_meta.c dst->value = sk->sk_lingertime / HZ; dst 513 net/sched/em_meta.c dst->value = sk->sk_error_queue.qlen; dst 524 net/sched/em_meta.c dst->value = sk->sk_ack_backlog; dst 535 net/sched/em_meta.c dst->value = sk->sk_max_ack_backlog; dst 546 net/sched/em_meta.c dst->value = sk->sk_priority; dst 557 net/sched/em_meta.c dst->value = READ_ONCE(sk->sk_rcvlowat); dst 568 net/sched/em_meta.c dst->value = sk->sk_rcvtimeo / HZ; dst 579 net/sched/em_meta.c dst->value = sk->sk_sndtimeo / HZ; dst 590 net/sched/em_meta.c dst->value = sk->sk_frag.offset; dst 601 net/sched/em_meta.c dst->value = sk->sk_write_pending; dst 692 net/sched/em_meta.c static int meta_var_change(struct meta_value *dst, struct nlattr *nla) dst 696 net/sched/em_meta.c dst->val = (unsigned long)kmemdup(nla_data(nla), len, GFP_KERNEL); dst 697 net/sched/em_meta.c if (dst->val == 0UL) dst 699 net/sched/em_meta.c dst->len = len; dst 709 net/sched/em_meta.c struct meta_obj *dst) dst 713 net/sched/em_meta.c if (shift && shift < dst->len) dst 714 net/sched/em_meta.c dst->len -= shift; dst 745 net/sched/em_meta.c static int meta_int_change(struct meta_value *dst, struct nlattr *nla) dst 748 net/sched/em_meta.c dst->val = *(unsigned long *) nla_data(nla); dst 749 net/sched/em_meta.c dst->len = sizeof(unsigned long); dst 751 net/sched/em_meta.c dst->val = nla_get_u32(nla); dst 752 net/sched/em_meta.c dst->len = sizeof(u32); dst 760 net/sched/em_meta.c struct meta_obj *dst) dst 763 net/sched/em_meta.c dst->value >>= v->hdr.shift; dst 766 net/sched/em_meta.c dst->value &= v->val; dst 823 net/sched/em_meta.c struct meta_value *v, struct meta_obj *dst) dst 828 net/sched/em_meta.c dst->value = v->val; dst 829 net/sched/em_meta.c dst->len = v->len; dst 833 net/sched/em_meta.c meta_ops(v)->get(skb, info, v, dst, &err); dst 838 net/sched/em_meta.c meta_type_ops(v)->apply_extras(v, dst); dst 882 net/sched/em_meta.c static inline int meta_change_data(struct meta_value *dst, struct nlattr *nla) dst 888 net/sched/em_meta.c return meta_type_ops(dst)->change(dst, nla); dst 601 net/sched/sch_cake.c keys->addrs.v4addrs.src = rev ? tuple.dst.u3.ip : tuple.src.u3.ip; dst 602 net/sched/sch_cake.c keys->addrs.v4addrs.dst = rev ? tuple.src.u3.ip : tuple.dst.u3.ip; dst 605 net/sched/sch_cake.c keys->ports.src = rev ? tuple.dst.u.all : tuple.src.u.all; dst 606 net/sched/sch_cake.c keys->ports.dst = rev ? tuple.src.u.all : tuple.dst.u.all; dst 661 net/sched/sch_cake.c host_keys.addrs.v4addrs.dst = 0; dst 670 net/sched/sch_cake.c memset(&host_keys.addrs.v6addrs.dst, 0, dst 671 net/sched/sch_cake.c sizeof(host_keys.addrs.v6addrs.dst)); dst 770 net/sched/sch_qfq.c int src, int dst) dst 772 net/sched/sch_qfq.c q->bitmaps[dst] |= q->bitmaps[src] & mask; dst 218 net/sched/sch_teql.c struct dst_entry *dst) dst 223 net/sched/sch_teql.c n = dst_neigh_lookup_skb(dst, skb); dst 227 net/sched/sch_teql.c if (dst->dev != dev) { dst 259 net/sched/sch_teql.c struct dst_entry *dst = skb_dst(skb); dst 265 net/sched/sch_teql.c if (!dev->header_ops || !dst) dst 269 net/sched/sch_teql.c res = __teql_resolve(skb, skb_res, dev, txq, dst); dst 1434 net/sctp/associola.c if (t->pmtu_pending && t->dst) { dst 31 net/sctp/diag.c struct dst_entry *dst; dst 37 net/sctp/diag.c dst = asoc->peer.primary_path->dst; dst 42 net/sctp/diag.c r->id.idiag_if = dst ? dst->dev->ifindex : 0; dst 422 net/sctp/input.c struct dst_entry *dst; dst 426 net/sctp/input.c dst = sctp_transport_dst_check(t); dst 427 net/sctp/input.c if (dst) dst 428 net/sctp/input.c dst->ops->redirect(dst, sk, skb); dst 230 net/sctp/ipv6.c struct dst_entry *dst = NULL; dst 279 net/sctp/ipv6.c dst = ip6_dst_lookup_flow(sock_net(sk), sk, fl6, final_p); dst 281 net/sctp/ipv6.c t->dst = dst; dst 291 net/sctp/ipv6.c if (!IS_ERR(dst)) { dst 307 net/sctp/ipv6.c t->dst = dst; dst 316 net/sctp/ipv6.c dst_release(dst); dst 317 net/sctp/ipv6.c dst = NULL; dst 344 net/sctp/ipv6.c if (!IS_ERR_OR_NULL(dst)) dst 345 net/sctp/ipv6.c dst_release(dst); dst 346 net/sctp/ipv6.c dst = bdst; dst 347 net/sctp/ipv6.c t->dst = dst; dst 358 net/sctp/ipv6.c if (!IS_ERR_OR_NULL(dst)) dst 359 net/sctp/ipv6.c dst_release(dst); dst 360 net/sctp/ipv6.c dst = bdst; dst 362 net/sctp/ipv6.c t->dst = dst; dst 368 net/sctp/ipv6.c if (!IS_ERR_OR_NULL(dst)) { dst 371 net/sctp/ipv6.c rt = (struct rt6_info *)dst; dst 377 net/sctp/ipv6.c t->dst = NULL; dst 401 net/sctp/ipv6.c pr_debug("%s: asoc:%p dst:%p\n", __func__, t->asoc, t->dst); dst 403 net/sctp/ipv6.c if (t->dst) { dst 127 net/sctp/output.c if (!tp->dst) dst 132 net/sctp/output.c if (__sk_dst_get(sk) != tp->dst) { dst 133 net/sctp/output.c dst_hold(tp->dst); dst 134 net/sctp/output.c sk_setup_caps(sk, tp->dst); dst 136 net/sctp/output.c packet->max_size = sk_can_gso(sk) ? tp->dst->dev->gso_max_size dst 512 net/sctp/output.c if (skb_dst(head) != tp->dst) { dst 513 net/sctp/output.c dst_hold(tp->dst); dst 514 net/sctp/output.c sk_setup_caps(sk, tp->dst); dst 551 net/sctp/output.c struct dst_entry *dst; dst 588 net/sctp/output.c dst = dst_clone(tp->dst); dst 589 net/sctp/output.c if (!dst) { dst 594 net/sctp/output.c skb_dst_set(head, dst); dst 1664 net/sctp/outqueue.c if (transport->dst) dst 416 net/sctp/protocol.c struct dst_entry *dst = NULL; dst 443 net/sctp/protocol.c dst = &rt->dst; dst 444 net/sctp/protocol.c t->dst = dst; dst 456 net/sctp/protocol.c if (dst) { dst 475 net/sctp/protocol.c dst_release(dst); dst 476 net/sctp/protocol.c dst = NULL; dst 509 net/sctp/protocol.c if (!dst) { dst 510 net/sctp/protocol.c dst = &rt->dst; dst 511 net/sctp/protocol.c t->dst = dst; dst 514 net/sctp/protocol.c dst_release(&rt->dst); dst 519 net/sctp/protocol.c dst_release(dst); dst 520 net/sctp/protocol.c dst = &rt->dst; dst 521 net/sctp/protocol.c t->dst = dst; dst 529 net/sctp/protocol.c if (dst) { dst 533 net/sctp/protocol.c t->dst = NULL; dst 546 net/sctp/protocol.c struct rtable *rt = (struct rtable *)t->dst; dst 764 net/sctp/sm_sideeffect.c if (t->dst) dst 147 net/sctp/transport.c dst_release(transport->dst); dst 225 net/sctp/transport.c if (!transport->dst || transport->dst->obsolete) { dst 240 net/sctp/transport.c if (transport->dst) dst 241 net/sctp/transport.c transport->pathmtu = sctp_dst_mtu(transport->dst); dst 248 net/sctp/transport.c struct dst_entry *dst = sctp_transport_dst_check(t); dst 260 net/sctp/transport.c if (dst) { dst 261 net/sctp/transport.c struct sctp_pf *pf = sctp_get_pf_specific(dst->ops->family); dst 266 net/sctp/transport.c dst->ops->update_pmtu(dst, sk, NULL, pmtu, true); dst 269 net/sctp/transport.c dst = sctp_transport_dst_check(t); dst 272 net/sctp/transport.c if (!dst) { dst 274 net/sctp/transport.c dst = t->dst; dst 277 net/sctp/transport.c if (dst) { dst 279 net/sctp/transport.c pmtu = sctp_dst_mtu(dst); dst 309 net/sctp/transport.c if (transport->dst && asoc && dst 687 net/sctp/transport.c dst_release(t->dst); dst 688 net/sctp/transport.c t->dst = NULL; dst 96 net/smc/smc_clc.c static int smc_clc_prfx_set4_rcu(struct dst_entry *dst, __be32 ipv4, dst 99 net/smc/smc_clc.c struct in_device *in_dev = __in_dev_get_rcu(dst->dev); dst 117 net/smc/smc_clc.c static int smc_clc_prfx_set6_rcu(struct dst_entry *dst, dst 122 net/smc/smc_clc.c struct inet6_dev *in6_dev = __in6_dev_get(dst->dev); dst 151 net/smc/smc_clc.c struct dst_entry *dst = sk_dst_get(clcsock->sk); dst 158 net/smc/smc_clc.c if (!dst) { dst 162 net/smc/smc_clc.c if (!dst->dev) { dst 174 net/smc/smc_clc.c rc = smc_clc_prfx_set4_rcu(dst, addr->sin_addr.s_addr, prop); dst 177 net/smc/smc_clc.c rc = smc_clc_prfx_set4_rcu(dst, addr6->sin6_addr.s6_addr32[3], dst 181 net/smc/smc_clc.c rc = smc_clc_prfx_set6_rcu(dst, prop, ipv6_prfx); dst 185 net/smc/smc_clc.c dst_release(dst); dst 241 net/smc/smc_clc.c struct dst_entry *dst = sk_dst_get(clcsock->sk); dst 244 net/smc/smc_clc.c if (!dst) { dst 248 net/smc/smc_clc.c if (!dst->dev) { dst 254 net/smc/smc_clc.c rc = smc_clc_prfx_match4_rcu(dst->dev, prop); dst 256 net/smc/smc_clc.c rc = smc_clc_prfx_match6_rcu(dst->dev, prop); dst 259 net/smc/smc_clc.c dst_release(dst); dst 548 net/smc/smc_core.c struct dst_entry *dst = sk_dst_get(clcsock->sk); dst 553 net/smc/smc_core.c if (!dst) { dst 557 net/smc/smc_core.c if (!dst->dev) { dst 562 net/smc/smc_core.c ndev = dst->dev; dst 585 net/smc/smc_core.c dst_release(dst); dst 861 net/smc/smc_pnet.c struct dst_entry *dst = sk_dst_get(sk); dst 865 net/smc/smc_pnet.c if (!dst) dst 867 net/smc/smc_pnet.c if (!dst->dev) dst 870 net/smc/smc_pnet.c smc_pnet_find_roce_by_pnetid(dst->dev, ini); dst 873 net/smc/smc_pnet.c dst_release(dst); dst 880 net/smc/smc_pnet.c struct dst_entry *dst = sk_dst_get(sk); dst 883 net/smc/smc_pnet.c if (!dst) dst 885 net/smc/smc_pnet.c if (!dst->dev) dst 888 net/smc/smc_pnet.c smc_pnet_find_ism_by_pnetid(dst->dev, ini); dst 891 net/smc/smc_pnet.c dst_release(dst); dst 123 net/sunrpc/auth_gss/svcauth_gss.c static int dup_to_netobj(struct xdr_netobj *dst, char *src, int len) dst 125 net/sunrpc/auth_gss/svcauth_gss.c dst->len = len; dst 126 net/sunrpc/auth_gss/svcauth_gss.c dst->data = (len ? kmemdup(src, len, GFP_KERNEL) : NULL); dst 127 net/sunrpc/auth_gss/svcauth_gss.c if (len && !dst->data) dst 132 net/sunrpc/auth_gss/svcauth_gss.c static inline int dup_netobj(struct xdr_netobj *dst, struct xdr_netobj *src) dst 134 net/sunrpc/auth_gss/svcauth_gss.c return dup_to_netobj(dst, src->data, src->len); dst 105 net/sunrpc/rpc_pipe.c char __user *dst, size_t buflen) dst 111 net/sunrpc/rpc_pipe.c left = copy_to_user(dst, data, mlen); dst 42 net/sunrpc/sunrpc.h struct dst_entry *dst; dst 45 net/sunrpc/sunrpc.h dst = rcu_dereference(sk->sk_dst_cache); dst 46 net/sunrpc/sunrpc.h if (dst && dst->dev && dst 47 net/sunrpc/sunrpc.h (dst->dev->features & NETIF_F_LOOPBACK)) dst 1691 net/sunrpc/svc.c char *result, *dst; dst 1697 net/sunrpc/svc.c dst = result; dst 1702 net/sunrpc/svc.c memcpy(dst, first->iov_base, len); dst 1703 net/sunrpc/svc.c dst += len; dst 1709 net/sunrpc/svc.c memcpy(dst, p, len); dst 1710 net/sunrpc/svc.c dst += len; dst 1713 net/sunrpc/svc.c *dst = '\0'; dst 964 net/sunrpc/svcsock.c struct kvec *src, *dst; dst 985 net/sunrpc/svcsock.c dst = &req->rq_private_buf.head[0]; dst 987 net/sunrpc/svcsock.c if (dst->iov_len < src->iov_len) dst 989 net/sunrpc/svcsock.c memcpy(dst->iov_base, src->iov_base, src->iov_len); dst 31 net/sunrpc/xprtrdma/svc_rdma_backchannel.c struct kvec *dst, *src = &rcvbuf->head[0]; dst 61 net/sunrpc/xprtrdma/svc_rdma_backchannel.c dst = &req->rq_private_buf.head[0]; dst 63 net/sunrpc/xprtrdma/svc_rdma_backchannel.c if (dst->iov_len < len) dst 65 net/sunrpc/xprtrdma/svc_rdma_backchannel.c memcpy(dst->iov_base, p, len); dst 361 net/sunrpc/xprtrdma/svc_rdma_sendto.c static unsigned int xdr_encode_write_chunk(__be32 *dst, __be32 *src, dst 368 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst++ = *src++; dst 372 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst++ = *src++; dst 376 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst++ = *src++; dst 382 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst = *src; dst 386 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst = cpu_to_be32(remaining); dst 389 net/sunrpc/xprtrdma/svc_rdma_sendto.c dst++; src++; dst 392 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst++ = *src++; dst 393 net/sunrpc/xprtrdma/svc_rdma_sendto.c *dst++ = *src++; dst 582 net/sunrpc/xprtrdma/svc_rdma_sendto.c unsigned char *dst, *tailbase; dst 585 net/sunrpc/xprtrdma/svc_rdma_sendto.c dst = ctxt->sc_xprt_buf; dst 586 net/sunrpc/xprtrdma/svc_rdma_sendto.c dst += ctxt->sc_sges[0].length; dst 588 net/sunrpc/xprtrdma/svc_rdma_sendto.c memcpy(dst, xdr->head[0].iov_base, xdr->head[0].iov_len); dst 589 net/sunrpc/xprtrdma/svc_rdma_sendto.c dst += xdr->head[0].iov_len; dst 612 net/sunrpc/xprtrdma/svc_rdma_sendto.c memcpy(dst, page_address(*ppages), len); dst 614 net/sunrpc/xprtrdma/svc_rdma_sendto.c dst += len; dst 620 net/sunrpc/xprtrdma/svc_rdma_sendto.c memcpy(dst, tailbase, taillen); dst 284 net/tipc/bcast.c struct tipc_dest *dst, *tmp; dst 291 net/tipc/bcast.c list_for_each_entry_safe(dst, tmp, &dests->list, list) { dst 292 net/tipc/bcast.c dnode = dst->node; dst 518 net/tipc/bearer.c struct tipc_media_addr *dst) dst 533 net/tipc/bearer.c b->media->send_msg(net, skb, b, dst); dst 232 net/tipc/bearer.h struct tipc_media_addr *dst); dst 100 net/tipc/discover.c static void tipc_disc_msg_xmit(struct net *net, u32 mtyp, u32 dst, dst 114 net/tipc/discover.c msg_set_dest_domain(hdr, dst); dst 142 net/tipc/discover.c u32 dst, u32 src, dst 157 net/tipc/discover.c if (dst != tn->trial_addr) dst 201 net/tipc/discover.c u32 dst = msg_dest_domain(hdr); dst 230 net/tipc/discover.c if (tipc_disc_addr_trial_msg(b->disc, &maddr, b, dst, dst 240 net/tipc/discover.c if (!tipc_in_scope(legacy, dst, self)) dst 738 net/tipc/msg.c bool tipc_msg_pskb_copy(u32 dst, struct sk_buff_head *msg, dst 749 net/tipc/msg.c msg_set_destnode(buf_msg(_skb), dst); dst 1071 net/tipc/msg.h bool tipc_msg_pskb_copy(u32 dst, struct sk_buff_head *msg, dst 985 net/tipc/name_table.c struct tipc_dest *dst; dst 987 net/tipc/name_table.c list_for_each_entry(dst, l, list) { dst 988 net/tipc/name_table.c if (dst->node == node && dst->port == port) dst 989 net/tipc/name_table.c return dst; dst 996 net/tipc/name_table.c struct tipc_dest *dst; dst 1001 net/tipc/name_table.c dst = kmalloc(sizeof(*dst), GFP_ATOMIC); dst 1002 net/tipc/name_table.c if (unlikely(!dst)) dst 1004 net/tipc/name_table.c dst->node = node; dst 1005 net/tipc/name_table.c dst->port = port; dst 1006 net/tipc/name_table.c list_add(&dst->list, l); dst 1012 net/tipc/name_table.c struct tipc_dest *dst; dst 1016 net/tipc/name_table.c dst = list_first_entry(l, typeof(*dst), list); dst 1018 net/tipc/name_table.c *port = dst->port; dst 1020 net/tipc/name_table.c *node = dst->node; dst 1021 net/tipc/name_table.c list_del(&dst->list); dst 1022 net/tipc/name_table.c kfree(dst); dst 1028 net/tipc/name_table.c struct tipc_dest *dst; dst 1030 net/tipc/name_table.c dst = tipc_dest_find(l, node, port); dst 1031 net/tipc/name_table.c if (!dst) dst 1033 net/tipc/name_table.c list_del(&dst->list); dst 1034 net/tipc/name_table.c kfree(dst); dst 1040 net/tipc/name_table.c struct tipc_dest *dst, *tmp; dst 1042 net/tipc/name_table.c list_for_each_entry_safe(dst, tmp, l, list) { dst 1043 net/tipc/name_table.c list_del(&dst->list); dst 1044 net/tipc/name_table.c kfree(dst); dst 1050 net/tipc/name_table.c struct tipc_dest *dst; dst 1053 net/tipc/name_table.c list_for_each_entry(dst, l, list) { dst 1521 net/tipc/node.c u32 dst; dst 1525 net/tipc/node.c dst = n->addr; dst 1526 net/tipc/node.c if (in_own_node(net, dst)) dst 1533 net/tipc/node.c msg_set_destnode(buf_msg(txskb), dst); dst 1534 net/tipc/node.c tipc_node_xmit_skb(net, txskb, dst, 0); dst 2387 net/tipc/socket.c struct sockaddr_tipc *dst = (struct sockaddr_tipc *)dest; dst 2403 net/tipc/socket.c if (dst->family == AF_UNSPEC) { dst 2409 net/tipc/socket.c if (!tipc_sockaddr_is_sane(dst)) { dst 2417 net/tipc/socket.c } else if (dst->addrtype == TIPC_SERVICE_RANGE) { dst 162 net/tipc/udp_media.c struct udp_media_addr *dst, struct dst_cache *cache) dst 169 net/tipc/udp_media.c if (dst->proto == htons(ETH_P_IP)) { dst 174 net/tipc/udp_media.c .daddr = dst->ipv4.s_addr, dst 184 net/tipc/udp_media.c dst_cache_set_ip4(cache, &rt->dst, fl.saddr); dst 187 net/tipc/udp_media.c ttl = ip4_dst_hoplimit(&rt->dst); dst 189 net/tipc/udp_media.c dst->ipv4.s_addr, 0, ttl, 0, src->port, dst 190 net/tipc/udp_media.c dst->port, false, true); dst 196 net/tipc/udp_media.c .daddr = dst->ipv6, dst 211 net/tipc/udp_media.c &src->ipv6, &dst->ipv6, 0, ttl, 0, dst 212 net/tipc/udp_media.c src->port, dst->port, false); dst 229 net/tipc/udp_media.c struct udp_media_addr *dst = (struct udp_media_addr *)&addr->value; dst 248 net/tipc/udp_media.c return tipc_udp_xmit(net, skb, ub, src, dst, dst 327 net/tipc/udp_media.c struct udp_media_addr *dst; dst 329 net/tipc/udp_media.c dst = (struct udp_media_addr *)&b->bcast_addr.value; dst 330 net/tipc/udp_media.c if (tipc_udp_is_mcast_addr(dst)) dst 538 net/tipc/udp_media.c struct udp_media_addr *dst; dst 553 net/tipc/udp_media.c dst = (struct udp_media_addr *)&b->bcast_addr.value; dst 554 net/tipc/udp_media.c if (__tipc_nl_add_udp_addr(msg->skb, dst, TIPC_NLA_UDP_REMOTE)) dst 622 net/tipc/udp_media.c struct udp_media_addr *dst; dst 634 net/tipc/udp_media.c dst = (struct udp_media_addr *)&b->bcast_addr.value; dst 635 net/tipc/udp_media.c if (tipc_udp_is_mcast_addr(dst)) { dst 110 net/tls/tls_device.c struct dst_entry *dst = sk_dst_get(sk); dst 113 net/tls/tls_device.c if (likely(dst)) { dst 114 net/tls/tls_device.c netdev = dst->dev; dst 118 net/tls/tls_device.c dst_release(dst); dst 154 net/tls/tls_sw.c struct scatterlist *sgout = aead_req->dst; dst 165 net/vmw_vsock/af_vsock.c #define VSOCK_CONN_HASH(src, dst) \ dst 166 net/vmw_vsock/af_vsock.c (((src)->svm_cid ^ (dst)->svm_port) % VSOCK_HASH_SIZE) dst 167 net/vmw_vsock/af_vsock.c #define vsock_connected_sockets(src, dst) \ dst 168 net/vmw_vsock/af_vsock.c (&vsock_connected_table[VSOCK_CONN_HASH(src, dst)]) dst 241 net/vmw_vsock/af_vsock.c struct sockaddr_vm *dst) dst 245 net/vmw_vsock/af_vsock.c list_for_each_entry(vsk, vsock_connected_sockets(src, dst), dst 248 net/vmw_vsock/af_vsock.c dst->svm_port == vsk->local_addr.svm_port) { dst 308 net/vmw_vsock/af_vsock.c struct sockaddr_vm *dst) dst 313 net/vmw_vsock/af_vsock.c sk = __vsock_find_connected_socket(src, dst); dst 1065 net/vmw_vsock/virtio_transport_common.c struct sockaddr_vm src, dst; dst 1072 net/vmw_vsock/virtio_transport_common.c vsock_addr_init(&dst, le64_to_cpu(pkt->hdr.dst_cid), dst 1076 net/vmw_vsock/virtio_transport_common.c dst.svm_cid, dst.svm_port, dst 1092 net/vmw_vsock/virtio_transport_common.c sk = vsock_find_connected_socket(&src, &dst); dst 1094 net/vmw_vsock/virtio_transport_common.c sk = vsock_find_bound_socket(&dst); dst 1108 net/vmw_vsock/virtio_transport_common.c vsk->local_addr.svm_cid = dst.svm_cid; dst 120 net/vmw_vsock/vmci_transport.c struct sockaddr_vm *dst, dst 133 net/vmw_vsock/vmci_transport.c pkt->dg.dst = vmci_make_handle(dst->svm_cid, dst 134 net/vmw_vsock/vmci_transport.c vmci_transport_peer_rid(dst->svm_cid)); dst 139 net/vmw_vsock/vmci_transport.c pkt->dst_port = dst->svm_port; dst 186 net/vmw_vsock/vmci_transport.c vsock_addr_init(local, pkt->dg.dst.context, pkt->dst_port); dst 193 net/vmw_vsock/vmci_transport.c struct sockaddr_vm *dst, dst 204 net/vmw_vsock/vmci_transport.c vmci_transport_packet_init(pkt, src, dst, type, size, mode, wait, dst 222 net/vmw_vsock/vmci_transport.c struct sockaddr_vm src, dst; dst 227 net/vmw_vsock/vmci_transport.c vmci_transport_packet_get_addresses(pkt, &src, &dst); dst 228 net/vmw_vsock/vmci_transport.c return __vmci_transport_send_control_pkt(&reply, &src, &dst, dst 238 net/vmw_vsock/vmci_transport.c struct sockaddr_vm *dst, dst 252 net/vmw_vsock/vmci_transport.c return __vmci_transport_send_control_pkt(&pkt, src, dst, type, dst 260 net/vmw_vsock/vmci_transport.c struct sockaddr_vm *dst, dst 275 net/vmw_vsock/vmci_transport.c err = __vmci_transport_send_control_pkt(pkt, src, dst, type, size, dst 308 net/vmw_vsock/vmci_transport.c static int vmci_transport_send_reset_bh(struct sockaddr_vm *dst, dst 315 net/vmw_vsock/vmci_transport.c dst, src, dst 324 net/vmw_vsock/vmci_transport.c struct sockaddr_vm dst; dst 338 net/vmw_vsock/vmci_transport.c vsock_addr_init(&dst, pkt->dg.src.context, dst 340 net/vmw_vsock/vmci_transport.c dst_ptr = &dst; dst 395 net/vmw_vsock/vmci_transport.c static int vmci_transport_send_invalid_bh(struct sockaddr_vm *dst, dst 399 net/vmw_vsock/vmci_transport.c dst, src, dst 404 net/vmw_vsock/vmci_transport.c int vmci_transport_send_wrote_bh(struct sockaddr_vm *dst, dst 408 net/vmw_vsock/vmci_transport.c dst, src, dst 413 net/vmw_vsock/vmci_transport.c int vmci_transport_send_read_bh(struct sockaddr_vm *dst, dst 417 net/vmw_vsock/vmci_transport.c dst, src, dst 681 net/vmw_vsock/vmci_transport.c struct sockaddr_vm dst; dst 711 net/vmw_vsock/vmci_transport.c vsock_addr_init(&dst, pkt->dg.dst.context, pkt->dst_port); dst 713 net/vmw_vsock/vmci_transport.c sk = vsock_find_connected_socket(&src, &dst); dst 715 net/vmw_vsock/vmci_transport.c sk = vsock_find_bound_socket(&dst); dst 726 net/vmw_vsock/vmci_transport.c if (vmci_transport_send_reset_bh(&dst, &src, pkt) < 0) dst 739 net/vmw_vsock/vmci_transport.c vmci_transport_send_invalid_bh(&dst, &src); dst 768 net/vmw_vsock/vmci_transport.c vsk->local_addr.svm_cid = dst.svm_cid; dst 772 net/vmw_vsock/vmci_transport.c sk, pkt, true, &dst, &src, dst 783 net/vmw_vsock/vmci_transport.c if (vmci_transport_send_reset_bh(&dst, &src, pkt) < 0) dst 911 net/vmw_vsock/vmci_transport.c vsock_sk(sk)->local_addr.svm_cid = pkt->dg.dst.context; dst 970 net/vmw_vsock/vmci_transport.c vsock_sk(pending)->local_addr.svm_cid = pkt->dg.dst.context; dst 1025 net/vmw_vsock/vmci_transport.c vsock_addr_init(&vpending->local_addr, pkt->dg.dst.context, dst 1409 net/vmw_vsock/vmci_transport.c vsk->local_addr.svm_cid = pkt->dg.dst.context; dst 1731 net/vmw_vsock/vmci_transport.c dg->dst = vmci_make_handle(remote_addr->svm_cid, dst 125 net/vmw_vsock/vmci_transport.h int vmci_transport_send_wrote_bh(struct sockaddr_vm *dst, dst 127 net/vmw_vsock/vmci_transport.h int vmci_transport_send_read_bh(struct sockaddr_vm *dst, dst 108 net/vmw_vsock/vmci_transport_notify.c struct sockaddr_vm *dst, dst 124 net/vmw_vsock/vmci_transport_notify.c sent = vmci_transport_send_wrote_bh(dst, src) > 0; dst 138 net/vmw_vsock/vmci_transport_notify.c struct sockaddr_vm *dst, dst 154 net/vmw_vsock/vmci_transport_notify.c sent = vmci_transport_send_read_bh(dst, src) > 0; dst 168 net/vmw_vsock/vmci_transport_notify.c struct sockaddr_vm *dst, struct sockaddr_vm *src) dst 304 net/vmw_vsock/vmci_transport_notify.c struct sockaddr_vm *dst, struct sockaddr_vm *src) dst 602 net/vmw_vsock/vmci_transport_notify.c struct sockaddr_vm *dst, dst 609 net/vmw_vsock/vmci_transport_notify.c vmci_transport_handle_wrote(sk, pkt, bottom_half, dst, src); dst 613 net/vmw_vsock/vmci_transport_notify.c vmci_transport_handle_read(sk, pkt, bottom_half, dst, src); dst 618 net/vmw_vsock/vmci_transport_notify.c dst, src); dst 624 net/vmw_vsock/vmci_transport_notify.c dst, src); dst 48 net/vmw_vsock/vmci_transport_notify.h bool bottom_half, struct sockaddr_vm *dst, dst 76 net/vmw_vsock/vmci_transport_notify_qstate.c struct sockaddr_vm *dst, struct sockaddr_vm *src) dst 85 net/vmw_vsock/vmci_transport_notify_qstate.c struct sockaddr_vm *dst, struct sockaddr_vm *src) dst 346 net/vmw_vsock/vmci_transport_notify_qstate.c struct sockaddr_vm *dst, dst 353 net/vmw_vsock/vmci_transport_notify_qstate.c vmci_transport_handle_wrote(sk, pkt, bottom_half, dst, src); dst 357 net/vmw_vsock/vmci_transport_notify_qstate.c vmci_transport_handle_read(sk, pkt, bottom_half, dst, src); dst 6101 net/wireless/nl80211.c u8 *dst, u8 *next_hop, dst 6112 net/wireless/nl80211.c nla_put(msg, NL80211_ATTR_MAC, ETH_ALEN, dst) || dst 6165 net/wireless/nl80211.c u8 dst[ETH_ALEN]; dst 6186 net/wireless/nl80211.c err = rdev_dump_mpath(rdev, wdev->netdev, path_idx, dst, dst 6195 net/wireless/nl80211.c wdev->netdev, dst, next_hop, dst 6217 net/wireless/nl80211.c u8 *dst = NULL; dst 6225 net/wireless/nl80211.c dst = nla_data(info->attrs[NL80211_ATTR_MAC]); dst 6233 net/wireless/nl80211.c err = rdev_get_mpath(rdev, dev, dst, next_hop, &pinfo); dst 6242 net/wireless/nl80211.c dev, dst, next_hop, &pinfo) < 0) { dst 6254 net/wireless/nl80211.c u8 *dst = NULL; dst 6263 net/wireless/nl80211.c dst = nla_data(info->attrs[NL80211_ATTR_MAC]); dst 6272 net/wireless/nl80211.c return rdev_change_mpath(rdev, dev, dst, next_hop); dst 6279 net/wireless/nl80211.c u8 *dst = NULL; dst 6288 net/wireless/nl80211.c dst = nla_data(info->attrs[NL80211_ATTR_MAC]); dst 6297 net/wireless/nl80211.c return rdev_add_mpath(rdev, dev, dst, next_hop); dst 6304 net/wireless/nl80211.c u8 *dst = NULL; dst 6307 net/wireless/nl80211.c dst = nla_data(info->attrs[NL80211_ATTR_MAC]); dst 6315 net/wireless/nl80211.c return rdev_del_mpath(rdev, dev, dst); dst 6325 net/wireless/nl80211.c u8 *dst = NULL; dst 6333 net/wireless/nl80211.c dst = nla_data(info->attrs[NL80211_ATTR_MAC]); dst 6341 net/wireless/nl80211.c err = rdev_get_mpp(rdev, dev, dst, mpp, &pinfo); dst 6350 net/wireless/nl80211.c dev, dst, mpp, &pinfo) < 0) { dst 6364 net/wireless/nl80211.c u8 dst[ETH_ALEN]; dst 6385 net/wireless/nl80211.c err = rdev_dump_mpp(rdev, wdev->netdev, path_idx, dst, dst 6394 net/wireless/nl80211.c wdev->netdev, dst, mpp, dst 11183 net/wireless/nl80211.c nla_put_in_addr(msg, NL80211_WOWLAN_TCP_DST_IPV4, tcp->dst) || dst 11463 net/wireless/nl80211.c cfg->dst = nla_get_in_addr(tb[NL80211_WOWLAN_TCP_DST_IPV4]); dst 227 net/wireless/rdev-ops.h struct net_device *dev, u8 *dst, u8 *next_hop) dst 230 net/wireless/rdev-ops.h trace_rdev_add_mpath(&rdev->wiphy, dev, dst, next_hop); dst 231 net/wireless/rdev-ops.h ret = rdev->ops->add_mpath(&rdev->wiphy, dev, dst, next_hop); dst 237 net/wireless/rdev-ops.h struct net_device *dev, u8 *dst) dst 240 net/wireless/rdev-ops.h trace_rdev_del_mpath(&rdev->wiphy, dev, dst); dst 241 net/wireless/rdev-ops.h ret = rdev->ops->del_mpath(&rdev->wiphy, dev, dst); dst 247 net/wireless/rdev-ops.h struct net_device *dev, u8 *dst, dst 251 net/wireless/rdev-ops.h trace_rdev_change_mpath(&rdev->wiphy, dev, dst, next_hop); dst 252 net/wireless/rdev-ops.h ret = rdev->ops->change_mpath(&rdev->wiphy, dev, dst, next_hop); dst 258 net/wireless/rdev-ops.h struct net_device *dev, u8 *dst, u8 *next_hop, dst 262 net/wireless/rdev-ops.h trace_rdev_get_mpath(&rdev->wiphy, dev, dst, next_hop); dst 263 net/wireless/rdev-ops.h ret = rdev->ops->get_mpath(&rdev->wiphy, dev, dst, next_hop, pinfo); dst 270 net/wireless/rdev-ops.h struct net_device *dev, u8 *dst, u8 *mpp, dst 275 net/wireless/rdev-ops.h trace_rdev_get_mpp(&rdev->wiphy, dev, dst, mpp); dst 276 net/wireless/rdev-ops.h ret = rdev->ops->get_mpp(&rdev->wiphy, dev, dst, mpp, pinfo); dst 282 net/wireless/rdev-ops.h struct net_device *dev, int idx, u8 *dst, dst 287 net/wireless/rdev-ops.h trace_rdev_dump_mpath(&rdev->wiphy, dev, idx, dst, next_hop); dst 288 net/wireless/rdev-ops.h ret = rdev->ops->dump_mpath(&rdev->wiphy, dev, idx, dst, next_hop, dst 295 net/wireless/rdev-ops.h struct net_device *dev, int idx, u8 *dst, dst 301 net/wireless/rdev-ops.h trace_rdev_dump_mpp(&rdev->wiphy, dev, idx, dst, mpp); dst 302 net/wireless/rdev-ops.h ret = rdev->ops->dump_mpp(&rdev->wiphy, dev, idx, dst, mpp, pinfo); dst 855 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, struct net_device *netdev, u8 *dst, dst 857 net/wireless/trace.h TP_ARGS(wiphy, netdev, dst, next_hop), dst 861 net/wireless/trace.h MAC_ENTRY(dst) dst 867 net/wireless/trace.h MAC_ASSIGN(dst, dst); dst 871 net/wireless/trace.h WIPHY_PR_ARG, NETDEV_PR_ARG, MAC_PR_ARG(dst), dst 876 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, struct net_device *netdev, u8 *dst, dst 878 net/wireless/trace.h TP_ARGS(wiphy, netdev, dst, next_hop) dst 882 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, struct net_device *netdev, u8 *dst, dst 884 net/wireless/trace.h TP_ARGS(wiphy, netdev, dst, next_hop) dst 888 net/wireless/trace.h TP_PROTO(struct wiphy *wiphy, struct net_device *netdev, u8 *dst, dst 890 net/wireless/trace.h TP_ARGS(wiphy, netdev, dst, next_hop) dst 895 net/wireless/trace.h u8 *dst, u8 *next_hop), dst 896 net/wireless/trace.h TP_ARGS(wiphy, netdev, _idx, dst, next_hop), dst 900 net/wireless/trace.h MAC_ENTRY(dst) dst 907 net/wireless/trace.h MAC_ASSIGN(dst, dst); dst 913 net/wireless/trace.h WIPHY_PR_ARG, NETDEV_PR_ARG, __entry->idx, MAC_PR_ARG(dst), dst 919 net/wireless/trace.h u8 *dst, u8 *mpp), dst 920 net/wireless/trace.h TP_ARGS(wiphy, netdev, dst, mpp), dst 924 net/wireless/trace.h MAC_ENTRY(dst) dst 930 net/wireless/trace.h MAC_ASSIGN(dst, dst); dst 935 net/wireless/trace.h MAC_PR_ARG(dst), MAC_PR_ARG(mpp)) dst 940 net/wireless/trace.h u8 *dst, u8 *mpp), dst 941 net/wireless/trace.h TP_ARGS(wiphy, netdev, _idx, mpp, dst), dst 945 net/wireless/trace.h MAC_ENTRY(dst) dst 952 net/wireless/trace.h MAC_ASSIGN(dst, dst); dst 958 net/wireless/trace.h WIPHY_PR_ARG, NETDEV_PR_ARG, __entry->idx, MAC_PR_ARG(dst), dst 194 net/xfrm/xfrm_device.c struct dst_entry *dst; dst 217 net/xfrm/xfrm_device.c dst = __xfrm_dst_lookup(net, 0, 0, saddr, daddr, dst 220 net/xfrm/xfrm_device.c if (IS_ERR(dst)) dst 223 net/xfrm/xfrm_device.c dev = dst->dev; dst 226 net/xfrm/xfrm_device.c dst_release(dst); dst 264 net/xfrm/xfrm_device.c struct dst_entry *dst = skb_dst(skb); dst 265 net/xfrm/xfrm_device.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 271 net/xfrm/xfrm_device.c if ((!dev || (dev == xfrm_dst_path(dst)->dev)) && dst 264 net/xfrm/xfrm_interface.c struct dst_entry *dst = skb_dst(skb); dst 271 net/xfrm/xfrm_interface.c dst_hold(dst); dst 272 net/xfrm/xfrm_interface.c dst = xfrm_lookup_with_ifid(xi->net, dst, fl, NULL, 0, xi->p.if_id); dst 273 net/xfrm/xfrm_interface.c if (IS_ERR(dst)) { dst 274 net/xfrm/xfrm_interface.c err = PTR_ERR(dst); dst 275 net/xfrm/xfrm_interface.c dst = NULL; dst 279 net/xfrm/xfrm_interface.c x = dst->xfrm; dst 286 net/xfrm/xfrm_interface.c tdev = dst->dev; dst 295 net/xfrm/xfrm_interface.c mtu = dst_mtu(dst); dst 309 net/xfrm/xfrm_interface.c dst_release(dst); dst 314 net/xfrm/xfrm_interface.c skb_dst_set(skb, dst); dst 335 net/xfrm/xfrm_interface.c dst_release(dst); dst 343 net/xfrm/xfrm_interface.c struct dst_entry *dst = skb_dst(skb); dst 353 net/xfrm/xfrm_interface.c if (!dst) { dst 356 net/xfrm/xfrm_interface.c dst = ip6_route_output(dev_net(dev), NULL, &fl.u.ip6); dst 357 net/xfrm/xfrm_interface.c if (dst->error) { dst 358 net/xfrm/xfrm_interface.c dst_release(dst); dst 362 net/xfrm/xfrm_interface.c skb_dst_set(skb, dst); dst 368 net/xfrm/xfrm_interface.c if (!dst) { dst 378 net/xfrm/xfrm_interface.c skb_dst_set(skb, &rt->dst); dst 26 net/xfrm/xfrm_output.c struct dst_entry *dst = skb_dst(skb); dst 27 net/xfrm/xfrm_output.c int nhead = dst->header_len + LL_RESERVED_SPACE(dst->dev) dst 29 net/xfrm/xfrm_output.c int ntail = dst->dev->needed_tailroom - skb_tailroom(skb); dst 193 net/xfrm/xfrm_output.c struct dst_entry *dst = skb_dst(skb); dst 226 net/xfrm/xfrm_output.c top_iph->ttl = ip4_dst_hoplimit(xfrm_dst_child(dst)); dst 230 net/xfrm/xfrm_output.c ip_select_ident(dev_net(dst->dev), skb, NULL); dst 238 net/xfrm/xfrm_output.c struct dst_entry *dst = skb_dst(skb); dst 265 net/xfrm/xfrm_output.c top_iph->hop_limit = ip6_dst_hoplimit(xfrm_dst_child(dst)); dst 409 net/xfrm/xfrm_output.c struct dst_entry *dst = skb_dst(skb); dst 410 net/xfrm/xfrm_output.c struct xfrm_state *x = dst->xfrm; dst 480 net/xfrm/xfrm_output.c dst = skb_dst_pop(skb); dst 481 net/xfrm/xfrm_output.c if (!dst) { dst 486 net/xfrm/xfrm_output.c skb_dst_set(skb, dst); dst 487 net/xfrm/xfrm_output.c x = dst->xfrm; dst 161 net/xfrm/xfrm_policy.c static int stale_bundle(struct dst_entry *dst); dst 256 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 262 net/xfrm/xfrm_policy.c dst = afinfo->dst_lookup(net, tos, oif, saddr, daddr, mark); dst 266 net/xfrm/xfrm_policy.c return dst; dst 279 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 290 net/xfrm/xfrm_policy.c dst = __xfrm_dst_lookup(net, tos, oif, saddr, daddr, family, mark); dst 292 net/xfrm/xfrm_policy.c if (!IS_ERR(dst)) { dst 299 net/xfrm/xfrm_policy.c return dst; dst 2486 net/xfrm/xfrm_policy.c struct dst_entry *dst = &xdst->u.dst; dst 2488 net/xfrm/xfrm_policy.c memset(dst + 1, 0, sizeof(*xdst) - sizeof(*dst)); dst 2497 net/xfrm/xfrm_policy.c static void xfrm_init_path(struct xfrm_dst *path, struct dst_entry *dst, dst 2500 net/xfrm/xfrm_policy.c if (dst->ops->family == AF_INET6) { dst 2501 net/xfrm/xfrm_policy.c struct rt6_info *rt = (struct rt6_info *)dst; dst 2511 net/xfrm/xfrm_policy.c xfrm_policy_get_afinfo(xdst->u.dst.ops->family); dst 2534 net/xfrm/xfrm_policy.c struct dst_entry *dst) dst 2556 net/xfrm/xfrm_policy.c dst_hold(dst); dst 2560 net/xfrm/xfrm_policy.c struct dst_entry *dst1 = &xdst->u.dst; dst 2564 net/xfrm/xfrm_policy.c dst_release(dst); dst 2575 net/xfrm/xfrm_policy.c xfrm_dst_set_child(xdst_prev, &xdst->u.dst); dst 2582 net/xfrm/xfrm_policy.c dst_release(dst); dst 2588 net/xfrm/xfrm_policy.c xdst->route = dst; dst 2589 net/xfrm/xfrm_policy.c dst_copy_metrics(dst1, dst); dst 2598 net/xfrm/xfrm_policy.c dst = xfrm_dst_lookup(xfrm[i], tos, fl->flowi_oif, dst 2600 net/xfrm/xfrm_policy.c err = PTR_ERR(dst); dst 2601 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) dst 2604 net/xfrm/xfrm_policy.c dst_hold(dst); dst 2631 net/xfrm/xfrm_policy.c xfrm_dst_set_child(xdst_prev, dst); dst 2632 net/xfrm/xfrm_policy.c xdst0->path = dst; dst 2635 net/xfrm/xfrm_policy.c dev = dst->dev; dst 2639 net/xfrm/xfrm_policy.c xfrm_init_path(xdst0, dst, nfheader_len); dst 2642 net/xfrm/xfrm_policy.c for (xdst_prev = xdst0; xdst_prev != (struct xfrm_dst *)dst; dst 2643 net/xfrm/xfrm_policy.c xdst_prev = (struct xfrm_dst *) xfrm_dst_child(&xdst_prev->u.dst)) { dst 2648 net/xfrm/xfrm_policy.c xdst_prev->u.dst.header_len = header_len; dst 2649 net/xfrm/xfrm_policy.c xdst_prev->u.dst.trailer_len = trailer_len; dst 2650 net/xfrm/xfrm_policy.c header_len -= xdst_prev->u.dst.xfrm->props.header_len; dst 2651 net/xfrm/xfrm_policy.c trailer_len -= xdst_prev->u.dst.xfrm->props.trailer_len; dst 2654 net/xfrm/xfrm_policy.c return &xdst0->u.dst; dst 2661 net/xfrm/xfrm_policy.c dst_release_immediate(&xdst0->u.dst); dst 2720 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 2734 net/xfrm/xfrm_policy.c dst = xfrm_bundle_create(pols[0], xfrm, bundle, err, fl, dst_orig); dst 2735 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) { dst 2737 net/xfrm/xfrm_policy.c return ERR_CAST(dst); dst 2740 net/xfrm/xfrm_policy.c xdst = (struct xfrm_dst *)dst; dst 2753 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 2766 net/xfrm/xfrm_policy.c dst = skb_dst(skb); dst 2768 net/xfrm/xfrm_policy.c xfrm_decode_session(skb, &fl, dst->ops->family); dst 2771 net/xfrm/xfrm_policy.c dst_hold(xfrm_dst_path(dst)); dst 2772 net/xfrm/xfrm_policy.c dst = xfrm_lookup(net, xfrm_dst_path(dst), &fl, sk, XFRM_LOOKUP_QUEUE); dst 2773 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) dst 2776 net/xfrm/xfrm_policy.c if (dst->flags & DST_XFRM_QUEUE) { dst 2777 net/xfrm/xfrm_policy.c dst_release(dst); dst 2788 net/xfrm/xfrm_policy.c dst_release(dst); dst 2802 net/xfrm/xfrm_policy.c dst = xfrm_lookup(net, xfrm_dst_path(skb_dst(skb)), &fl, skb->sk, 0); dst 2803 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) { dst 2810 net/xfrm/xfrm_policy.c skb_dst_set(skb, dst); dst 2828 net/xfrm/xfrm_policy.c struct dst_entry *dst = skb_dst(skb); dst 2829 net/xfrm/xfrm_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *) dst; dst 2875 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 2888 net/xfrm/xfrm_policy.c dst = xflo->dst_orig; dst 2889 net/xfrm/xfrm_policy.c dst1 = &xdst->u.dst; dst 2890 net/xfrm/xfrm_policy.c dst_hold(dst); dst 2891 net/xfrm/xfrm_policy.c xdst->route = dst; dst 2893 net/xfrm/xfrm_policy.c dst_copy_metrics(dst1, dst); dst 2902 net/xfrm/xfrm_policy.c dst_hold(dst); dst 2903 net/xfrm/xfrm_policy.c xfrm_dst_set_child(xdst, dst); dst 2904 net/xfrm/xfrm_policy.c xdst->path = dst; dst 2906 net/xfrm/xfrm_policy.c xfrm_init_path((struct xfrm_dst *)dst1, dst, 0); dst 2909 net/xfrm/xfrm_policy.c dev = dst->dev; dst 3022 net/xfrm/xfrm_policy.c struct dst_entry *dst, *route; dst 3027 net/xfrm/xfrm_policy.c dst = NULL; dst 3093 net/xfrm/xfrm_policy.c dst = &xdst->u.dst; dst 3136 net/xfrm/xfrm_policy.c dst_release(dst); dst 3137 net/xfrm/xfrm_policy.c dst = dst_orig; dst 3141 net/xfrm/xfrm_policy.c if (dst && dst->xfrm && dst 3142 net/xfrm/xfrm_policy.c dst->xfrm->props.mode == XFRM_MODE_TUNNEL) dst 3143 net/xfrm/xfrm_policy.c dst->flags |= DST_XFRM_TUNNEL; dst 3144 net/xfrm/xfrm_policy.c return dst; dst 3148 net/xfrm/xfrm_policy.c dst = dst_orig; dst 3153 net/xfrm/xfrm_policy.c dst_release(dst); dst 3182 net/xfrm/xfrm_policy.c struct dst_entry *dst = xfrm_lookup(net, dst_orig, fl, sk, dst 3186 net/xfrm/xfrm_policy.c if (IS_ERR(dst) && PTR_ERR(dst) == -EREMOTE) dst 3189 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) dst 3192 net/xfrm/xfrm_policy.c return dst; dst 3672 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 3686 net/xfrm/xfrm_policy.c dst = xfrm_lookup(net, skb_dst(skb), &fl, NULL, XFRM_LOOKUP_QUEUE); dst 3687 net/xfrm/xfrm_policy.c if (IS_ERR(dst)) { dst 3689 net/xfrm/xfrm_policy.c dst = NULL; dst 3691 net/xfrm/xfrm_policy.c skb_dst_set(skb, dst); dst 3698 net/xfrm/xfrm_policy.c static struct dst_entry *xfrm_dst_check(struct dst_entry *dst, u32 cookie) dst 3721 net/xfrm/xfrm_policy.c if (dst->obsolete < 0 && !stale_bundle(dst)) dst 3722 net/xfrm/xfrm_policy.c return dst; dst 3727 net/xfrm/xfrm_policy.c static int stale_bundle(struct dst_entry *dst) dst 3729 net/xfrm/xfrm_policy.c return !xfrm_bundle_ok((struct xfrm_dst *)dst); dst 3732 net/xfrm/xfrm_policy.c void xfrm_dst_ifdown(struct dst_entry *dst, struct net_device *dev) dst 3734 net/xfrm/xfrm_policy.c while ((dst = xfrm_dst_child(dst)) && dst->xfrm && dst->dev == dev) { dst 3735 net/xfrm/xfrm_policy.c dst->dev = dev_net(dev)->loopback_dev; dst 3736 net/xfrm/xfrm_policy.c dev_hold(dst->dev); dst 3747 net/xfrm/xfrm_policy.c static struct dst_entry *xfrm_negative_advice(struct dst_entry *dst) dst 3749 net/xfrm/xfrm_policy.c if (dst) { dst 3750 net/xfrm/xfrm_policy.c if (dst->obsolete) { dst 3751 net/xfrm/xfrm_policy.c dst_release(dst); dst 3752 net/xfrm/xfrm_policy.c dst = NULL; dst 3755 net/xfrm/xfrm_policy.c return dst; dst 3763 net/xfrm/xfrm_policy.c struct dst_entry *dst; dst 3765 net/xfrm/xfrm_policy.c dst = &xdst->u.dst; dst 3766 net/xfrm/xfrm_policy.c pmtu = dst_mtu(xfrm_dst_child(dst)); dst 3769 net/xfrm/xfrm_policy.c pmtu = xfrm_state_mtu(dst->xfrm, pmtu); dst 3777 net/xfrm/xfrm_policy.c dst_metric_set(dst, RTAX_MTU, pmtu); dst 3788 net/xfrm/xfrm_policy.c struct dst_entry *dst = &first->u.dst; dst 3793 net/xfrm/xfrm_policy.c if (!dst_check(xfrm_dst_path(dst), ((struct xfrm_dst *)dst)->path_cookie) || dst 3794 net/xfrm/xfrm_policy.c (dst->dev && !netif_running(dst->dev))) dst 3797 net/xfrm/xfrm_policy.c if (dst->flags & DST_XFRM_QUEUE) dst 3802 net/xfrm/xfrm_policy.c struct xfrm_dst *xdst = (struct xfrm_dst *)dst; dst 3804 net/xfrm/xfrm_policy.c if (dst->xfrm->km.state != XFRM_STATE_VALID) dst 3806 net/xfrm/xfrm_policy.c if (xdst->xfrm_genid != dst->xfrm->genid) dst 3814 net/xfrm/xfrm_policy.c mtu = dst_mtu(xfrm_dst_child(dst)); dst 3828 net/xfrm/xfrm_policy.c dst = xfrm_dst_child(dst); dst 3829 net/xfrm/xfrm_policy.c } while (dst->xfrm); dst 3837 net/xfrm/xfrm_policy.c dst = &xdst->u.dst; dst 3839 net/xfrm/xfrm_policy.c mtu = xfrm_state_mtu(dst->xfrm, mtu); dst 3842 net/xfrm/xfrm_policy.c dst_metric_set(dst, RTAX_MTU, mtu); dst 3853 net/xfrm/xfrm_policy.c static unsigned int xfrm_default_advmss(const struct dst_entry *dst) dst 3855 net/xfrm/xfrm_policy.c return dst_metric_advmss(xfrm_dst_path(dst)); dst 3858 net/xfrm/xfrm_policy.c static unsigned int xfrm_mtu(const struct dst_entry *dst) dst 3860 net/xfrm/xfrm_policy.c unsigned int mtu = dst_metric_raw(dst, RTAX_MTU); dst 3862 net/xfrm/xfrm_policy.c return mtu ? : dst_mtu(xfrm_dst_path(dst)); dst 3865 net/xfrm/xfrm_policy.c static const void *xfrm_get_dst_nexthop(const struct dst_entry *dst, dst 3868 net/xfrm/xfrm_policy.c while (dst->xfrm) { dst 3869 net/xfrm/xfrm_policy.c const struct xfrm_state *xfrm = dst->xfrm; dst 3871 net/xfrm/xfrm_policy.c dst = xfrm_dst_child(dst); dst 3883 net/xfrm/xfrm_policy.c static struct neighbour *xfrm_neigh_lookup(const struct dst_entry *dst, dst 3887 net/xfrm/xfrm_policy.c const struct dst_entry *path = xfrm_dst_path(dst); dst 3890 net/xfrm/xfrm_policy.c daddr = xfrm_get_dst_nexthop(dst, daddr); dst 3894 net/xfrm/xfrm_policy.c static void xfrm_confirm_neigh(const struct dst_entry *dst, const void *daddr) dst 3896 net/xfrm/xfrm_policy.c const struct dst_entry *path = xfrm_dst_path(dst); dst 3898 net/xfrm/xfrm_policy.c daddr = xfrm_get_dst_nexthop(dst, daddr); dst 1773 net/xfrm/xfrm_state.c __xfrm6_sort(void **dst, void **src, int n, dst 1791 net/xfrm/xfrm_state.c dst[count[class[i] - 1]++] = src[i]; dst 1856 net/xfrm/xfrm_state.c __xfrm6_sort(void **dst, void **src, int n, dst 1862 net/xfrm/xfrm_state.c dst[i] = src[i]; dst 1867 net/xfrm/xfrm_state.c xfrm_tmpl_sort(struct xfrm_tmpl **dst, struct xfrm_tmpl **src, int n, dst 1873 net/xfrm/xfrm_state.c __xfrm6_sort((void **)dst, (void **)src, n, dst 1877 net/xfrm/xfrm_state.c dst[i] = src[i]; dst 1881 net/xfrm/xfrm_state.c xfrm_state_sort(struct xfrm_state **dst, struct xfrm_state **src, int n, dst 1887 net/xfrm/xfrm_state.c __xfrm6_sort((void **)dst, (void **)src, n, dst 1891 net/xfrm/xfrm_state.c dst[i] = src[i]; dst 19 samples/bpf/sockex2_kern.c __be32 dst; dst 73 samples/bpf/sockex2_kern.c flow->dst = load_word(skb, nhoff + offsetof(struct iphdr, daddr)); dst 92 samples/bpf/sockex2_kern.c flow->dst = ipv6_addr_hash(skb, dst 209 samples/bpf/sockex2_kern.c key = flow.dst; dst 66 samples/bpf/sockex3_kern.c __be32 dst; dst 206 samples/bpf/sockex3_kern.c g->flow.dst = load_word(skb, nhoff + offsetof(struct iphdr, daddr)); dst 231 samples/bpf/sockex3_kern.c g->flow.dst = ipv6_addr_hash(skb, dst 18 samples/bpf/sockex3_user.c __be32 dst; dst 77 samples/bpf/sockex3_user.c inet_ntoa((struct in_addr){htonl(next_key.dst)}), dst 27 samples/bpf/xdp2_kern.c unsigned short dst[3]; dst 29 samples/bpf/xdp2_kern.c dst[0] = p[0]; dst 30 samples/bpf/xdp2_kern.c dst[1] = p[1]; dst 31 samples/bpf/xdp2_kern.c dst[2] = p[2]; dst 35 samples/bpf/xdp2_kern.c p[3] = dst[0]; dst 36 samples/bpf/xdp2_kern.c p[4] = dst[1]; dst 37 samples/bpf/xdp2_kern.c p[5] = dst[2]; dst 81 samples/bpf/xdp_fwd_kern.c struct in6_addr *dst = (struct in6_addr *) fib_params.ipv6_dst; dst 97 samples/bpf/xdp_fwd_kern.c *dst = ip6h->daddr; dst 42 samples/bpf/xdp_redirect_kern.c unsigned short dst[3]; dst 44 samples/bpf/xdp_redirect_kern.c dst[0] = p[0]; dst 45 samples/bpf/xdp_redirect_kern.c dst[1] = p[1]; dst 46 samples/bpf/xdp_redirect_kern.c dst[2] = p[2]; dst 50 samples/bpf/xdp_redirect_kern.c p[3] = dst[0]; dst 51 samples/bpf/xdp_redirect_kern.c p[4] = dst[1]; dst 52 samples/bpf/xdp_redirect_kern.c p[5] = dst[2]; dst 42 samples/bpf/xdp_redirect_map_kern.c unsigned short dst[3]; dst 44 samples/bpf/xdp_redirect_map_kern.c dst[0] = p[0]; dst 45 samples/bpf/xdp_redirect_map_kern.c dst[1] = p[1]; dst 46 samples/bpf/xdp_redirect_map_kern.c dst[2] = p[2]; dst 50 samples/bpf/xdp_redirect_map_kern.c p[3] = dst[0]; dst 51 samples/bpf/xdp_redirect_map_kern.c p[4] = dst[1]; dst 52 samples/bpf/xdp_redirect_map_kern.c p[5] = dst[2]; dst 35 samples/bpf/xdp_router_ipv4_kern.c __be32 dst; dst 85 samples/bpf/xdp_router_ipv4_kern.c static inline void set_src_dst_mac(void *data, void *src, void *dst) dst 88 samples/bpf/xdp_router_ipv4_kern.c unsigned short *dest = dst; dst 134 samples/bpf/xdp_router_ipv4_user.c __be32 dst, gw; dst 139 samples/bpf/xdp_router_ipv4_user.c __be32 dst; dst 190 samples/bpf/xdp_router_ipv4_user.c route.dst = atoi(dsts); dst 219 samples/bpf/xdp_router_ipv4_user.c direct_entry.arp.dst = 0; dst 223 samples/bpf/xdp_router_ipv4_user.c &route.dst) == 0); dst 226 samples/bpf/xdp_router_ipv4_user.c &route.dst, dst 228 samples/bpf/xdp_router_ipv4_user.c direct_entry.arp.dst = route.dst; dst 230 samples/bpf/xdp_router_ipv4_user.c &route.dst, dst 235 samples/bpf/xdp_router_ipv4_user.c prefix_key->data[i] = (route.dst >> i * 8) & 0xff; dst 386 samples/bpf/xdp_router_ipv4_user.c __be32 dst; dst 416 samples/bpf/xdp_router_ipv4_user.c arp_entry.dst = atoi(dsts); dst 418 samples/bpf/xdp_router_ipv4_user.c printf("%x\t\t%llx\n", arp_entry.dst, arp_entry.mac); dst 421 samples/bpf/xdp_router_ipv4_user.c &arp_entry.dst, dst 424 samples/bpf/xdp_router_ipv4_user.c direct_entry.arp.dst = 0; dst 427 samples/bpf/xdp_router_ipv4_user.c direct_entry.arp.dst = arp_entry.dst; dst 431 samples/bpf/xdp_router_ipv4_user.c &arp_entry.dst, dst 438 samples/bpf/xdp_router_ipv4_user.c &arp_entry.dst) == 0); dst 441 samples/bpf/xdp_router_ipv4_user.c &arp_entry.dst, dst 61 samples/bpf/xdp_rxq_info_kern.c unsigned short dst[3]; dst 63 samples/bpf/xdp_rxq_info_kern.c dst[0] = p[0]; dst 64 samples/bpf/xdp_rxq_info_kern.c dst[1] = p[1]; dst 65 samples/bpf/xdp_rxq_info_kern.c dst[2] = p[2]; dst 69 samples/bpf/xdp_rxq_info_kern.c p[3] = dst[0]; dst 70 samples/bpf/xdp_rxq_info_kern.c p[4] = dst[1]; dst 71 samples/bpf/xdp_rxq_info_kern.c p[5] = dst[2]; dst 57 samples/rpmsg/rpmsg_client_sample.c rpdev->src, rpdev->dst); dst 72 scripts/kconfig/gconf.c static void update_tree(struct menu *src, GtkTreeIter * dst); dst 1247 scripts/kconfig/gconf.c static void update_tree(struct menu *src, GtkTreeIter * dst) dst 1260 scripts/kconfig/gconf.c valid = gtk_tree_model_iter_children(model2, child2, dst); dst 1284 scripts/kconfig/gconf.c if (gtktree_iter_find_node(dst, menu1) != NULL) { dst 1298 scripts/kconfig/gconf.c if (gtktree_iter_find_node(dst, menu1) == NULL) { // add node dst 1305 scripts/kconfig/gconf.c dst, sibling); dst 144 security/keys/dh.c u8 *dst, unsigned int dlen, unsigned int zlen) dst 149 security/keys/dh.c u8 *dst_orig = dst; dst 183 security/keys/dh.c err = crypto_shash_final(desc, dst); dst 188 security/keys/dh.c dst += h; dst 31 security/safesetid/lsm.c kuid_t src, kuid_t dst) dst 39 security/safesetid/lsm.c if (uid_eq(rule->dst_uid, dst)) dst 50 security/safesetid/lsm.c static enum sid_policy_type setuid_policy_lookup(kuid_t src, kuid_t dst) dst 58 security/safesetid/lsm.c result = _setuid_policy_lookup(pol, src, dst); dst 49 security/safesetid/lsm.h kuid_t src, kuid_t dst); dst 41 security/selinux/ss/context.h static inline int mls_context_cpy(struct context *dst, struct context *src) dst 45 security/selinux/ss/context.h dst->range.level[0].sens = src->range.level[0].sens; dst 46 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[0].cat, &src->range.level[0].cat); dst 50 security/selinux/ss/context.h dst->range.level[1].sens = src->range.level[1].sens; dst 51 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[1].cat, &src->range.level[1].cat); dst 53 security/selinux/ss/context.h ebitmap_destroy(&dst->range.level[0].cat); dst 61 security/selinux/ss/context.h static inline int mls_context_cpy_low(struct context *dst, struct context *src) dst 65 security/selinux/ss/context.h dst->range.level[0].sens = src->range.level[0].sens; dst 66 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[0].cat, &src->range.level[0].cat); dst 70 security/selinux/ss/context.h dst->range.level[1].sens = src->range.level[0].sens; dst 71 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[1].cat, &src->range.level[0].cat); dst 73 security/selinux/ss/context.h ebitmap_destroy(&dst->range.level[0].cat); dst 81 security/selinux/ss/context.h static inline int mls_context_cpy_high(struct context *dst, struct context *src) dst 85 security/selinux/ss/context.h dst->range.level[0].sens = src->range.level[1].sens; dst 86 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[0].cat, &src->range.level[1].cat); dst 90 security/selinux/ss/context.h dst->range.level[1].sens = src->range.level[1].sens; dst 91 security/selinux/ss/context.h rc = ebitmap_cpy(&dst->range.level[1].cat, &src->range.level[1].cat); dst 93 security/selinux/ss/context.h ebitmap_destroy(&dst->range.level[0].cat); dst 118 security/selinux/ss/context.h static inline int context_cpy(struct context *dst, struct context *src) dst 122 security/selinux/ss/context.h dst->user = src->user; dst 123 security/selinux/ss/context.h dst->role = src->role; dst 124 security/selinux/ss/context.h dst->type = src->type; dst 126 security/selinux/ss/context.h dst->str = kstrdup(src->str, GFP_ATOMIC); dst 127 security/selinux/ss/context.h if (!dst->str) dst 129 security/selinux/ss/context.h dst->len = src->len; dst 131 security/selinux/ss/context.h dst->str = NULL; dst 132 security/selinux/ss/context.h dst->len = 0; dst 134 security/selinux/ss/context.h rc = mls_context_cpy(dst, src); dst 136 security/selinux/ss/context.h kfree(dst->str); dst 52 security/selinux/ss/ebitmap.c int ebitmap_cpy(struct ebitmap *dst, struct ebitmap *src) dst 56 security/selinux/ss/ebitmap.c ebitmap_init(dst); dst 62 security/selinux/ss/ebitmap.c ebitmap_destroy(dst); dst 71 security/selinux/ss/ebitmap.c dst->node = new; dst 76 security/selinux/ss/ebitmap.c dst->highbit = src->highbit; dst 126 security/selinux/ss/ebitmap.h int ebitmap_cpy(struct ebitmap *dst, struct ebitmap *src); dst 248 security/selinux/ss/sidtab.c struct context *dst, *dst_convert; dst 292 security/selinux/ss/sidtab.c dst = sidtab_do_lookup(s, count, 1); dst 293 security/selinux/ss/sidtab.c if (!dst) dst 296 security/selinux/ss/sidtab.c rc = context_cpy(dst, context); dst 308 security/selinux/ss/sidtab.c context_destroy(dst); dst 314 security/selinux/ss/sidtab.c context_destroy(dst); dst 214 security/selinux/xfrm.c struct dst_entry *dst = skb_dst(skb); dst 217 security/selinux/xfrm.c if (dst == NULL) dst 219 security/selinux/xfrm.c x = dst->xfrm; dst 441 security/selinux/xfrm.c struct dst_entry *dst; dst 455 security/selinux/xfrm.c dst = skb_dst(skb); dst 456 security/selinux/xfrm.c if (dst) { dst 459 security/selinux/xfrm.c for (iter = dst; iter != NULL; iter = xfrm_dst_child(iter)) { dst 642 security/smack/smack_lsm.c struct smack_mnt_opts *dst, *src = src_fc->security; dst 650 security/smack/smack_lsm.c dst = fc->security; dst 653 security/smack/smack_lsm.c dst->fsdefault = kstrdup(src->fsdefault, GFP_KERNEL); dst 654 security/smack/smack_lsm.c if (!dst->fsdefault) dst 658 security/smack/smack_lsm.c dst->fsfloor = kstrdup(src->fsfloor, GFP_KERNEL); dst 659 security/smack/smack_lsm.c if (!dst->fsfloor) dst 663 security/smack/smack_lsm.c dst->fshat = kstrdup(src->fshat, GFP_KERNEL); dst 664 security/smack/smack_lsm.c if (!dst->fshat) dst 668 security/smack/smack_lsm.c dst->fsroot = kstrdup(src->fsroot, GFP_KERNEL); dst 669 security/smack/smack_lsm.c if (!dst->fsroot) dst 673 security/smack/smack_lsm.c dst->fstransmute = kstrdup(src->fstransmute, GFP_KERNEL); dst 674 security/smack/smack_lsm.c if (!dst->fstransmute) dst 1080 sound/core/control.c char *dst = ue->elem_data + dst 1083 sound/core/control.c change = memcmp(&ucontrol->value, dst, size) != 0; dst 1085 sound/core/control.c memcpy(dst, &ucontrol->value, size); dst 22 sound/core/hwdep_compat.c struct snd_hwdep_dsp_image __user *dst; dst 26 sound/core/hwdep_compat.c dst = compat_alloc_user_space(sizeof(*dst)); dst 29 sound/core/hwdep_compat.c if (copy_in_user(dst, src, 4 + 64)) dst 32 sound/core/hwdep_compat.c put_user(compat_ptr(ptr), &dst->image)) dst 35 sound/core/hwdep_compat.c put_user(val, &dst->length)) dst 38 sound/core/hwdep_compat.c put_user(val, &dst->driver_data)) dst 41 sound/core/hwdep_compat.c return snd_hwdep_dsp_load(hw, dst); dst 23 sound/core/memory.c int copy_to_user_fromio(void __user *dst, const volatile void __iomem *src, size_t count) dst 26 sound/core/memory.c return copy_to_user(dst, (const void __force*)src, count) ? -EFAULT : 0; dst 34 sound/core/memory.c if (copy_to_user(dst, buf, c)) dst 37 sound/core/memory.c dst += c; dst 55 sound/core/memory.c int copy_from_user_toio(volatile void __iomem *dst, const void __user *src, size_t count) dst 58 sound/core/memory.c return copy_from_user((void __force *)dst, src, count) ? -EFAULT : 0; dst 67 sound/core/memory.c memcpy_toio(dst, buf, c); dst 69 sound/core/memory.c dst += c; dst 43 sound/core/oss/linear.c unsigned char *dst, unsigned char *src) dst 52 sound/core/oss/linear.c memcpy(dst, p + data->dst_ofs, data->dst_bytes); dst 65 sound/core/oss/linear.c char *dst; dst 76 sound/core/oss/linear.c dst = dst_channels[channel].area.addr + dst_channels[channel].area.first / 8; dst 81 sound/core/oss/linear.c do_convert(data, dst, src); dst 83 sound/core/oss/linear.c dst += dst_step; dst 157 sound/core/oss/mulaw.c unsigned char *dst, u16 sample) dst 163 sound/core/oss/mulaw.c memset(dst, 0, data->native_bytes); dst 164 sound/core/oss/mulaw.c memcpy(dst + data->native_ofs, (char *)&sample + data->copy_ofs, dst 178 sound/core/oss/mulaw.c char *dst; dst 189 sound/core/oss/mulaw.c dst = dst_channels[channel].area.addr + dst_channels[channel].area.first / 8; dst 195 sound/core/oss/mulaw.c cvt_s16_to_native(data, dst, sample); dst 197 sound/core/oss/mulaw.c dst += dst_step; dst 224 sound/core/oss/mulaw.c char *dst; dst 235 sound/core/oss/mulaw.c dst = dst_channels[channel].area.addr + dst_channels[channel].area.first / 8; dst 241 sound/core/oss/mulaw.c *dst = linear2ulaw(sample); dst 243 sound/core/oss/mulaw.c dst += dst_step; dst 691 sound/core/oss/pcm_plugin.c unsigned char *dst; dst 697 sound/core/oss/pcm_plugin.c dst = dst_area->addr + (dst_area->first + dst_area->step * dst_offset) / 8; dst 702 sound/core/oss/pcm_plugin.c return snd_pcm_format_set_silence(format, dst, samples); dst 713 sound/core/oss/pcm_plugin.c *dst &= 0xf0; dst 715 sound/core/oss/pcm_plugin.c *dst &= 0x0f; dst 716 sound/core/oss/pcm_plugin.c dst += dst_step; dst 719 sound/core/oss/pcm_plugin.c dst++; dst 726 sound/core/oss/pcm_plugin.c memcpy(dst, silence, width); dst 727 sound/core/oss/pcm_plugin.c dst += dst_step; dst 738 sound/core/oss/pcm_plugin.c char *src, *dst; dst 744 sound/core/oss/pcm_plugin.c dst = dst_area->addr + (dst_area->first + dst_area->step * dst_offset) / 8; dst 753 sound/core/oss/pcm_plugin.c memcpy(dst, src, bytes); dst 771 sound/core/oss/pcm_plugin.c *dst = (*dst & 0xf0) | srcval; dst 773 sound/core/oss/pcm_plugin.c *dst = (*dst & 0x0f) | (srcval << 4); dst 780 sound/core/oss/pcm_plugin.c dst += dst_step; dst 783 sound/core/oss/pcm_plugin.c dst++; dst 790 sound/core/oss/pcm_plugin.c memcpy(dst, src, width); dst 792 sound/core/oss/pcm_plugin.c dst += dst_step; dst 72 sound/core/oss/rate.c signed short *src, *dst; dst 92 sound/core/oss/rate.c dst = (signed short *)dst_channels[channel].area.addr + dst 112 sound/core/oss/rate.c *dst = val; dst 113 sound/core/oss/rate.c dst += dst_step; dst 131 sound/core/oss/rate.c signed short *src, *dst; dst 151 sound/core/oss/rate.c dst = (signed short *)dst_channels[channel].area.addr + dst 170 sound/core/oss/rate.c *dst = val; dst 171 sound/core/oss/rate.c dst += dst_step; dst 30 sound/core/oss/route.c int dst = 0; dst 31 sound/core/oss/route.c for (; dst < ndsts; ++dst) { dst 52 sound/core/oss/route.c int nsrcs, ndsts, dst; dst 70 sound/core/oss/route.c for (dst = 0; dst < ndsts; ++dst) { dst 77 sound/core/oss/route.c for (dst = 0; dst < ndsts && dst < nsrcs; ++dst) { dst 82 sound/core/oss/route.c if (dst < ndsts) dst 83 sound/core/oss/route.c zero_areas(dvp, ndsts - dst, frames, format); dst 252 sound/core/pcm_compat.c #define put_timespec(src, dst) copy_to_user(dst, src, sizeof(*dst)) dst 2359 sound/core/pcm_lib.c unsigned int __user *dst; dst 2369 sound/core/pcm_lib.c dst = tlv + 2; dst 2376 sound/core/pcm_lib.c if (put_user(SNDRV_CTL_TLVT_CHMAP_FIXED, dst) || dst 2377 sound/core/pcm_lib.c put_user(chs_bytes, dst + 1)) dst 2379 sound/core/pcm_lib.c dst += 2; dst 2387 sound/core/pcm_lib.c if (put_user(map->map[c], dst)) dst 2389 sound/core/pcm_lib.c dst++; dst 418 sound/core/pcm_misc.c unsigned char *dst, *pat; dst 436 sound/core/pcm_misc.c dst = data; dst 439 sound/core/pcm_misc.c memcpy(dst, pat, width); dst 440 sound/core/pcm_misc.c dst += width; dst 447 sound/core/pcm_misc.c memcpy(dst, pat, 2); dst 448 sound/core/pcm_misc.c dst += 2; dst 453 sound/core/pcm_misc.c memcpy(dst, pat, 3); dst 454 sound/core/pcm_misc.c dst += 3; dst 459 sound/core/pcm_misc.c memcpy(dst, pat, 4); dst 460 sound/core/pcm_misc.c dst += 4; dst 465 sound/core/pcm_misc.c memcpy(dst, pat, 8); dst 466 sound/core/pcm_misc.c dst += 8; dst 97 sound/core/rawmidi_compat.c #define put_timespec(src, dst) copy_to_user(dst, src, sizeof(*dst)) dst 361 sound/drivers/aloop.c char *dst = runtime->dma_area; dst 373 sound/drivers/aloop.c snd_pcm_format_set_silence(runtime->format, dst + dst_off, dst 390 sound/drivers/aloop.c char *dst = capt->substream->runtime->dma_area; dst 418 sound/drivers/aloop.c memcpy(dst + dst_off, src + src_off, size); dst 630 sound/drivers/dummy.c void __user *dst, unsigned long bytes) dst 637 sound/drivers/dummy.c void *dst, unsigned long bytes) dst 485 sound/drivers/ml403-ac97cr.c u16 *dst; dst 490 sound/drivers/ml403-ac97cr.c dst = (u16 *)(substream->runtime->dma_area + rec->sw_data); dst 495 sound/drivers/ml403-ac97cr.c dst[copied_words] = CR_RECDATA(in_be32(CR_REG(ml403_ac97cr, dst 189 sound/firewire/amdtp-am824.c u32 *dst; dst 195 sound/firewire/amdtp-am824.c dst = (void *)runtime->dma_area + dst 201 sound/firewire/amdtp-am824.c *dst = be32_to_cpu(buffer[p->pcm_positions[c]]) << 8; dst 202 sound/firewire/amdtp-am824.c dst++; dst 206 sound/firewire/amdtp-am824.c dst = (void *)runtime->dma_area; dst 186 sound/firewire/digi00x/amdtp-dot.c u32 *dst; dst 192 sound/firewire/digi00x/amdtp-dot.c dst = (void *)runtime->dma_area + dst 199 sound/firewire/digi00x/amdtp-dot.c *dst = be32_to_cpu(buffer[c]) << 8; dst 200 sound/firewire/digi00x/amdtp-dot.c dst++; dst 204 sound/firewire/digi00x/amdtp-dot.c dst = (void *)runtime->dma_area; dst 69 sound/firewire/fireface/amdtp-ff.c u32 *dst; dst 75 sound/firewire/fireface/amdtp-ff.c dst = (void *)runtime->dma_area + dst 81 sound/firewire/fireface/amdtp-ff.c *dst = le32_to_cpu(buffer[c]) & 0xffffff00; dst 82 sound/firewire/fireface/amdtp-ff.c dst++; dst 86 sound/firewire/fireface/amdtp-ff.c dst = (void *)runtime->dma_area; dst 26 sound/firewire/motu/amdtp-motu-trace.h __field(int, dst) dst 33 sound/firewire/motu/amdtp-motu-trace.h __entry->dst = fw_parent_device(s->unit)->card->node_id; dst 36 sound/firewire/motu/amdtp-motu-trace.h __entry->dst = fw_parent_device(s->unit)->node_id; dst 44 sound/firewire/motu/amdtp-motu-trace.h __entry->dst, dst 55 sound/firewire/motu/amdtp-motu-trace.h __field(int, dst) dst 62 sound/firewire/motu/amdtp-motu-trace.h __entry->dst = fw_parent_device(s->unit)->card->node_id; dst 65 sound/firewire/motu/amdtp-motu-trace.h __entry->dst = fw_parent_device(s->unit)->node_id; dst 73 sound/firewire/motu/amdtp-motu-trace.h __entry->dst, dst 130 sound/firewire/motu/amdtp-motu.c u32 *dst; dst 136 sound/firewire/motu/amdtp-motu.c dst = (void *)runtime->dma_area + dst 144 sound/firewire/motu/amdtp-motu.c *dst = (byte[0] << 24) | dst 148 sound/firewire/motu/amdtp-motu.c dst++; dst 152 sound/firewire/motu/amdtp-motu.c dst = (void *)runtime->dma_area; dst 74 sound/firewire/tascam/amdtp-tascam.c u32 *dst; dst 80 sound/firewire/tascam/amdtp-tascam.c dst = (void *)runtime->dma_area + dst 89 sound/firewire/tascam/amdtp-tascam.c *dst = be32_to_cpu(buffer[c]); dst 90 sound/firewire/tascam/amdtp-tascam.c dst++; dst 94 sound/firewire/tascam/amdtp-tascam.c dst = (void *)runtime->dma_area; dst 670 sound/hda/hdmi_chmap.c unsigned int __user *dst; dst 681 sound/hda/hdmi_chmap.c dst = tlv + 2; dst 710 sound/hda/hdmi_chmap.c if (put_user(type, dst) || dst 711 sound/hda/hdmi_chmap.c put_user(chs_bytes, dst + 1)) dst 714 sound/hda/hdmi_chmap.c dst += 2; dst 726 sound/hda/hdmi_chmap.c if (copy_to_user(dst, tlv_chmap, chs_bytes)) dst 728 sound/hda/hdmi_chmap.c dst += chs; dst 528 sound/isa/wavefront/wavefront_synth.c unsigned char *dst, dst 534 sound/isa/wavefront/wavefront_synth.c *dst = src & 0x7F; /* Mask high bit of LSB */ dst 538 sound/isa/wavefront/wavefront_synth.c dst++; dst 540 sound/isa/wavefront/wavefront_synth.c return dst; dst 559 sound/isa/wavefront/wavefront_synth.c munge_buf (unsigned char *src, unsigned char *dst, unsigned int dst_size) dst 566 sound/isa/wavefront/wavefront_synth.c *dst++ = src[i] & 0x7f; dst 567 sound/isa/wavefront/wavefront_synth.c *dst++ = src[i] >> 7; dst 569 sound/isa/wavefront/wavefront_synth.c return dst; dst 574 sound/isa/wavefront/wavefront_synth.c demunge_buf (unsigned char *src, unsigned char *dst, unsigned int src_bytes) dst 585 sound/isa/wavefront/wavefront_synth.c dst[i] = *src++; dst 586 sound/isa/wavefront/wavefront_synth.c dst[i] |= (*src++)<<7; dst 589 sound/isa/wavefront/wavefront_synth.c return dst; dst 352 sound/mips/sgio2audio.c s16 *dst; dst 372 sound/mips/sgio2audio.c dst = (s16 *)(dst_base + dst_pos); dst 375 sound/mips/sgio2audio.c dst[0] = (x >> CHANNEL_LEFT_SHIFT) & 0xffff; dst 376 sound/mips/sgio2audio.c dst[1] = (x >> CHANNEL_RIGHT_SHIFT) & 0xffff; dst 400 sound/mips/sgio2audio.c u64 *dst; dst 420 sound/mips/sgio2audio.c dst = (u64 *)(dst_base + dst_pos); dst 425 sound/mips/sgio2audio.c *dst = ((l & 0x00ffffff) << CHANNEL_LEFT_SHIFT) | dst 2588 sound/pci/ac97/ac97_codec.c static void set_ctl_name(char *dst, const char *src, const char *suffix) dst 2591 sound/pci/ac97/ac97_codec.c sprintf(dst, "%s %s", src, suffix); dst 2593 sound/pci/ac97/ac97_codec.c strcpy(dst, src); dst 2618 sound/pci/ac97/ac97_codec.c const char *dst, const char *suffix) dst 2622 sound/pci/ac97/ac97_codec.c set_ctl_name(kctl->id.name, dst, suffix); dst 2630 sound/pci/ac97/ac97_codec.c const char *dst) dst 2632 sound/pci/ac97/ac97_codec.c snd_ac97_rename_ctl(ac97, src, dst, "Switch"); dst 2633 sound/pci/ac97/ac97_codec.c snd_ac97_rename_ctl(ac97, src, dst, "Volume"); dst 66 sound/pci/ac97/ac97_patch.h const char *dst, const char *suffix); dst 70 sound/pci/ac97/ac97_patch.h const char *dst); dst 719 sound/pci/ca0106/ca0106_mixer.c static int rename_ctl(struct snd_card *card, const char *src, const char *dst) dst 723 sound/pci/ca0106/ca0106_mixer.c strcpy(kctl->id.name, dst); dst 309 sound/pci/cs46xx/cs46xx_lib.c void __iomem *dst; dst 315 sound/pci/cs46xx/cs46xx_lib.c dst = chip->region.idx[bank+1].remap_addr + offset; dst 320 sound/pci/cs46xx/cs46xx_lib.c writel(*src++, dst); dst 321 sound/pci/cs46xx/cs46xx_lib.c dst += sizeof(u32); dst 326 sound/pci/cs46xx/cs46xx_lib.c static inline void memcpy_le32(void *dst, const void *src, unsigned int len) dst 329 sound/pci/cs46xx/cs46xx_lib.c memcpy(dst, src, len); dst 331 sound/pci/cs46xx/cs46xx_lib.c u32 *_dst = dst; dst 472 sound/pci/cs46xx/cs46xx_lib.c void __iomem *dst; dst 478 sound/pci/cs46xx/cs46xx_lib.c dst = chip->region.idx[bank+1].remap_addr + offset; dst 483 sound/pci/cs46xx/cs46xx_lib.c writel(0, dst); dst 484 sound/pci/cs46xx/cs46xx_lib.c dst += sizeof(u32); dst 550 sound/pci/cs46xx/dsp_spos.c void __iomem *dst = chip->region.idx[1].remap_addr + DSP_PARAMETER_BYTE_OFFSET; dst 563 sound/pci/cs46xx/dsp_spos.c val = readl(dst + (ins->tasks[i].address + j) * sizeof(u32)); dst 611 sound/pci/cs46xx/dsp_spos.c void __iomem *dst = chip->region.idx[1].remap_addr + DSP_PARAMETER_BYTE_OFFSET; dst 629 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 638 sound/pci/cs46xx/dsp_spos.c void __iomem *dst = chip->region.idx[2].remap_addr; dst 651 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 667 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 682 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 698 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 714 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 730 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 745 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 760 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 776 sound/pci/cs46xx/dsp_spos.c snd_iprintf(buffer,"%08X ",readl(dst + i)); dst 64 sound/pci/cs46xx/dsp_spos_scb_lib.c void __iomem *dst = chip->region.idx[1].remap_addr + DSP_PARAMETER_BYTE_OFFSET; dst 74 sound/pci/cs46xx/dsp_spos_scb_lib.c snd_iprintf(buffer,"%08x ",readl(dst + (scb->address + j) * sizeof(u32))); dst 151 sound/pci/cs46xx/dsp_spos_scb_lib.c void __iomem *dst = chip->region.idx[2].remap_addr + sample_buffer_addr; dst 155 sound/pci/cs46xx/dsp_spos_scb_lib.c writel(0, dst); dst 156 sound/pci/cs46xx/dsp_spos_scb_lib.c dst += 4; dst 1443 sound/pci/emu10k1/emufx.c #define _A_SWITCH(icode, ptr, dst, src, sw) \ dst 1444 sound/pci/emu10k1/emufx.c A_OP((icode), ptr, iMACINT0, dst, A_C_00000000, src, sw); dst 1445 sound/pci/emu10k1/emufx.c #define A_SWITCH(icode, ptr, dst, src, sw) \ dst 1446 sound/pci/emu10k1/emufx.c _A_SWITCH(icode, ptr, A_GPR(dst), A_GPR(src), A_GPR(sw)) dst 1447 sound/pci/emu10k1/emufx.c #define _A_SWITCH_NEG(icode, ptr, dst, src) \ dst 1448 sound/pci/emu10k1/emufx.c A_OP((icode), ptr, iANDXOR, dst, src, A_C_00000001, A_C_00000001); dst 1449 sound/pci/emu10k1/emufx.c #define A_SWITCH_NEG(icode, ptr, dst, src) \ dst 1450 sound/pci/emu10k1/emufx.c _A_SWITCH_NEG(icode, ptr, A_GPR(dst), A_GPR(src)) dst 1780 sound/pci/emu10k1/emufx.c static void _volume(struct snd_emu10k1_fx8010_code *icode, u32 *ptr, u32 dst, u32 src, u32 vol) dst 1782 sound/pci/emu10k1/emufx.c OP(icode, ptr, iMAC0, dst, C_00000000, src, vol); dst 1785 sound/pci/emu10k1/emufx.c OP(icode, ptr, iACC3, dst, src, C_00000000, C_00000000); dst 1787 sound/pci/emu10k1/emufx.c static void _volume_add(struct snd_emu10k1_fx8010_code *icode, u32 *ptr, u32 dst, u32 src, u32 vol) dst 1791 sound/pci/emu10k1/emufx.c OP(icode, ptr, iMACINT0, dst, dst, src, C_00000001); dst 1793 sound/pci/emu10k1/emufx.c OP(icode, ptr, iMAC0, dst, dst, src, vol); dst 1795 sound/pci/emu10k1/emufx.c static void _volume_out(struct snd_emu10k1_fx8010_code *icode, u32 *ptr, u32 dst, u32 src, u32 vol) dst 1799 sound/pci/emu10k1/emufx.c OP(icode, ptr, iACC3, dst, src, C_00000000, C_00000000); dst 1801 sound/pci/emu10k1/emufx.c OP(icode, ptr, iMAC0, dst, C_00000000, src, vol); dst 1804 sound/pci/emu10k1/emufx.c #define VOLUME(icode, ptr, dst, src, vol) \ dst 1805 sound/pci/emu10k1/emufx.c _volume(icode, ptr, GPR(dst), GPR(src), GPR(vol)) dst 1806 sound/pci/emu10k1/emufx.c #define VOLUME_IN(icode, ptr, dst, src, vol) \ dst 1807 sound/pci/emu10k1/emufx.c _volume(icode, ptr, GPR(dst), EXTIN(src), GPR(vol)) dst 1808 sound/pci/emu10k1/emufx.c #define VOLUME_ADD(icode, ptr, dst, src, vol) \ dst 1809 sound/pci/emu10k1/emufx.c _volume_add(icode, ptr, GPR(dst), GPR(src), GPR(vol)) dst 1810 sound/pci/emu10k1/emufx.c #define VOLUME_ADDIN(icode, ptr, dst, src, vol) \ dst 1811 sound/pci/emu10k1/emufx.c _volume_add(icode, ptr, GPR(dst), EXTIN(src), GPR(vol)) dst 1812 sound/pci/emu10k1/emufx.c #define VOLUME_OUT(icode, ptr, dst, src, vol) \ dst 1813 sound/pci/emu10k1/emufx.c _volume_out(icode, ptr, EXTOUT(dst), GPR(src), GPR(vol)) dst 1814 sound/pci/emu10k1/emufx.c #define _SWITCH(icode, ptr, dst, src, sw) \ dst 1815 sound/pci/emu10k1/emufx.c OP((icode), ptr, iMACINT0, dst, C_00000000, src, sw); dst 1816 sound/pci/emu10k1/emufx.c #define SWITCH(icode, ptr, dst, src, sw) \ dst 1817 sound/pci/emu10k1/emufx.c _SWITCH(icode, ptr, GPR(dst), GPR(src), GPR(sw)) dst 1818 sound/pci/emu10k1/emufx.c #define SWITCH_IN(icode, ptr, dst, src, sw) \ dst 1819 sound/pci/emu10k1/emufx.c _SWITCH(icode, ptr, GPR(dst), EXTIN(src), GPR(sw)) dst 1820 sound/pci/emu10k1/emufx.c #define _SWITCH_NEG(icode, ptr, dst, src) \ dst 1821 sound/pci/emu10k1/emufx.c OP((icode), ptr, iANDXOR, dst, src, C_00000001, C_00000001); dst 1822 sound/pci/emu10k1/emufx.c #define SWITCH_NEG(icode, ptr, dst, src) \ dst 1823 sound/pci/emu10k1/emufx.c _SWITCH_NEG(icode, ptr, GPR(dst), GPR(src)) dst 2486 sound/pci/emu10k1/emufx.c static void copy_string(char *dst, char *src, char *null, int idx) dst 2489 sound/pci/emu10k1/emufx.c sprintf(dst, "%s %02X", null, idx); dst 2491 sound/pci/emu10k1/emufx.c strcpy(dst, src); dst 1765 sound/pci/emu10k1/emumixer.c static int rename_ctl(struct snd_card *card, const char *src, const char *dst) dst 1769 sound/pci/emu10k1/emumixer.c strcpy(kctl->id.name, dst); dst 278 sound/pci/emu10k1/io.c int snd_emu1010_fpga_link_dst_src_write(struct snd_emu10k1 * emu, u32 dst, u32 src) dst 280 sound/pci/emu10k1/io.c snd_emu1010_fpga_write(emu, 0x00, ((dst >> 8) & 0x3f) ); dst 281 sound/pci/emu10k1/io.c snd_emu1010_fpga_write(emu, 0x01, (dst & 0x3f) ); dst 828 sound/pci/es1938.c void __user *dst, unsigned long count) dst 836 sound/pci/es1938.c if (copy_to_user(dst, runtime->dma_area + pos + 1, count)) dst 839 sound/pci/es1938.c if (copy_to_user(dst, runtime->dma_area + pos + 1, count - 1)) dst 842 sound/pci/es1938.c ((unsigned char __user *)dst) + count - 1)) dst 850 sound/pci/es1938.c void *dst, unsigned long count) dst 858 sound/pci/es1938.c memcpy(dst, runtime->dma_area + pos + 1, count); dst 860 sound/pci/es1938.c memcpy(dst, runtime->dma_area + pos + 1, count - 1); dst 861 sound/pci/es1938.c runtime->dma_area[0] = *((unsigned char *)dst + count - 1); dst 1264 sound/pci/korg1212/korg1212.c struct KorgAudioFrame * dst = korg1212->playDataBufsPtr[0].bufferData + pos; dst 1274 sound/pci/korg1212/korg1212.c if ( (void *) dst < (void *) korg1212->playDataBufsPtr || dst 1275 sound/pci/korg1212/korg1212.c (void *) dst > (void *) korg1212->playDataBufsPtr[8].bufferData ) { dst 1277 sound/pci/korg1212/korg1212.c dst, i); dst 1281 sound/pci/korg1212/korg1212.c memset((void*) dst + offset, 0, size); dst 1282 sound/pci/korg1212/korg1212.c dst++; dst 1289 sound/pci/korg1212/korg1212.c void __user *dst, int pos, int count, dst 1310 sound/pci/korg1212/korg1212.c printk(KERN_DEBUG "K1212_DEBUG: snd_korg1212_copy_to KERNEL EFAULT, src=%p dst=%p iter=%d\n", src, dst, i); dst 1315 sound/pci/korg1212/korg1212.c memcpy((__force void *)dst, src, size); dst 1316 sound/pci/korg1212/korg1212.c else if (copy_to_user(dst, src, size)) dst 1319 sound/pci/korg1212/korg1212.c dst += size; dst 1331 sound/pci/korg1212/korg1212.c struct KorgAudioFrame *dst; dst 1337 sound/pci/korg1212/korg1212.c dst = korg1212->playDataBufsPtr[0].bufferData + pos; dst 1347 sound/pci/korg1212/korg1212.c if ( (void *) dst < (void *) korg1212->playDataBufsPtr || dst 1348 sound/pci/korg1212/korg1212.c (void *) dst > (void *) korg1212->playDataBufsPtr[8].bufferData ) { dst 1349 sound/pci/korg1212/korg1212.c printk(KERN_DEBUG "K1212_DEBUG: snd_korg1212_copy_from KERNEL EFAULT, src=%p dst=%p iter=%d\n", src, dst, i); dst 1354 sound/pci/korg1212/korg1212.c memcpy(dst, (__force void *)src, size); dst 1355 sound/pci/korg1212/korg1212.c else if (copy_from_user(dst, src, size)) dst 1357 sound/pci/korg1212/korg1212.c dst++; dst 1673 sound/pci/korg1212/korg1212.c void __user *dst, unsigned long count) dst 1675 sound/pci/korg1212/korg1212.c return snd_korg1212_copy_to(substream, dst, pos, count, false); dst 1680 sound/pci/korg1212/korg1212.c void *dst, unsigned long count) dst 1682 sound/pci/korg1212/korg1212.c return snd_korg1212_copy_to(substream, (void __user *)dst, dst 489 sound/pci/lola/lola_mixer.c int dst = chip->mixer.dest_phys_out_ofs + dst 491 sound/pci/lola/lola_mixer.c lola_mixer_set_mapping_gain(chip, src, dst, 336, true); dst 759 sound/pci/lola/lola_mixer.c unsigned int dst, mask, i; dst 761 sound/pci/lola/lola_mixer.c dst = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + dst_ofs; dst 762 sound/pci/lola/lola_mixer.c mask = readl(&chip->mixer.array->dest_mix_gain_enable[dst]); dst 768 sound/pci/lola/lola_mixer.c if (mask & (1 << dst)) dst 769 sound/pci/lola/lola_mixer.c val = readw(&chip->mixer.array->dest_mix_gain[dst][src]) + 1; dst 784 sound/pci/lola/lola_mixer.c unsigned int dst, mask; dst 798 sound/pci/lola/lola_mixer.c dst = snd_ctl_get_ioffidx(kcontrol, &ucontrol->id) + dst_ofs; dst 799 sound/pci/lola/lola_mixer.c return lola_mixer_set_dest_gains(chip, dst, mask, gains); dst 724 sound/pci/nm256/nm256.c void __user *dst, unsigned long count) dst 729 sound/pci/nm256/nm256.c if (copy_to_user_fromio(dst, s->bufptr + pos, count)) dst 737 sound/pci/nm256/nm256.c void *dst, unsigned long count) dst 742 sound/pci/nm256/nm256.c memcpy_fromio(dst, s->bufptr + pos, count); dst 279 sound/pci/rme32.c void __user *dst, unsigned long count) dst 283 sound/pci/rme32.c if (copy_to_user_fromio(dst, dst 292 sound/pci/rme32.c void *dst, unsigned long count) dst 296 sound/pci/rme32.c memcpy_fromio(dst, rme32->iobase + RME32_IO_DATA_BUFFER + pos, count); dst 350 sound/pci/rme96.c void __user *dst, unsigned long count) dst 354 sound/pci/rme96.c return copy_to_user_fromio(dst, dst 362 sound/pci/rme96.c void *dst, unsigned long count) dst 366 sound/pci/rme96.c memcpy_fromio(dst, rme96->iobase + RME96_IO_REC_BUFFER + pos, count); dst 3930 sound/pci/rme9652/hdsp.c void __user *dst, unsigned long count) dst 3941 sound/pci/rme9652/hdsp.c if (copy_to_user(dst, channel_buf + pos, count)) dst 3948 sound/pci/rme9652/hdsp.c void *dst, unsigned long count) dst 3956 sound/pci/rme9652/hdsp.c memcpy(dst, channel_buf + pos, count); dst 1909 sound/pci/rme9652/rme9652.c void __user *dst, unsigned long count) dst 1922 sound/pci/rme9652/rme9652.c if (copy_to_user(dst, channel_buf + pos, count)) dst 1929 sound/pci/rme9652/rme9652.c void *dst, unsigned long count) dst 1939 sound/pci/rme9652/rme9652.c memcpy(dst, channel_buf + pos, count); dst 41 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono16(u16 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 44 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = inw(rdp_port) ^ xor; dst 49 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono32(u32 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 57 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = ((((u32)val2 & 0xff) << 24) | ((u32)val1 << 8)) ^ xor; dst 61 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo16(u16 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 64 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = inw(rdp_port) ^ xor; dst 65 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = inw(rdp_port) ^ xor; dst 69 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo32(u32 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 77 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = ((((u32)val2 & 0xff) << 24) | ((u32)val1 << 8)) ^ xor; dst 78 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (((u32)val3 << 16) | (val2 & 0xff00)) ^ xor; dst 82 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono16sw(u16 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 85 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab16(inw(rdp_port) ^ xor); dst 90 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono32sw(u32 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 98 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab32((((val2 & 0xff) << 24) | ((u32)val1 << 8)) ^ xor); dst 102 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo16sw(u16 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 105 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab16(inw(rdp_port) ^ xor); dst 106 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab16(inw(rdp_port) ^ xor); dst 110 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo32sw(u32 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 118 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab32((((val2 & 0xff) << 24) | ((u32)val1 << 8)) ^ xor); dst 119 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = swab32((((u32)val3 << 16) | (val2 & 0xff00)) ^ xor); dst 123 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono24le(u8 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 133 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 8); dst 134 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 16); dst 135 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 24); dst 139 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_mono24be(u8 *dst, u16 xor, unsigned int size, unsigned long rdp_port) dst 149 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 24); dst 150 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 16); dst 151 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 8); dst 155 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo24le(u8 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 166 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 8); dst 167 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 16); dst 168 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 24); dst 169 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 8); dst 170 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 16); dst 171 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 24); dst 175 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c static inline void pdacf_transfer_stereo24be(u8 *dst, u32 xor, unsigned int size, unsigned long rdp_port) dst 186 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 24); dst 187 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 16); dst 188 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval1 >> 8); dst 189 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 24); dst 190 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 16); dst 191 sound/pcmcia/pdaudiocf/pdaudiocf_irq.c *dst++ = (u8)(xval2 >> 8); dst 97 sound/soc/codecs/rt5677-spi.c static void rt5677_spi_reverse(u8 *dst, u32 dstlen, const u8 *src, u32 srclen) dst 105 sound/soc/codecs/rt5677-spi.c dst[w + i] = si < srclen ? src[si] : 0; dst 109 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DEFAULT_DESTINATION(cmd.header.dst); dst 263 sound/soc/intel/atom/sst-atom-controls.c len = sizeof(cmd->dst) + sizeof(cmd->command_id) + bc->max; dst 269 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DESTINATION(2, cmd->dst, bc->pipe_id, bc->module_id); dst 400 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DEFAULT_DESTINATION(cmd.header.dst); dst 651 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DEFAULT_DESTINATION(cmd.header.dst); dst 737 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DEFAULT_DESTINATION(cmd.header.dst); dst 941 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DEFAULT_DESTINATION(drv->ssp_cmd.header.dst); dst 997 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DESTINATION(2, cmd.header.dst, dst 1031 sound/soc/intel/atom/sst-atom-controls.c SST_FILL_DESTINATION(2, cmd.header.dst, dst 313 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_LOCATION_IDS(dst, cell_idx, pipe_id) do { \ dst 314 sound/soc/intel/atom/sst-atom-controls.h dst.location_id.p.cell_nbr_idx = (cell_idx); \ dst 315 sound/soc/intel/atom/sst-atom-controls.h dst.location_id.p.path_id = (pipe_id); \ dst 317 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_LOCATION_ID(dst, loc_id) (\ dst 318 sound/soc/intel/atom/sst-atom-controls.h dst.location_id.f = (loc_id)) dst 319 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_MODULE_ID(dst, mod_id) (\ dst 320 sound/soc/intel/atom/sst-atom-controls.h dst.module_id = (mod_id)) dst 322 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_DESTINATION1(dst, id) do { \ dst 323 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_LOCATION_ID(dst, (id) & 0xFFFF); \ dst 324 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_MODULE_ID(dst, ((id) & 0xFFFF0000) >> 16); \ dst 326 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_DESTINATION2(dst, loc_id, mod_id) do { \ dst 327 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_LOCATION_ID(dst, loc_id); \ dst 328 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_MODULE_ID(dst, mod_id); \ dst 330 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_DESTINATION3(dst, cell_idx, path_id, mod_id) do { \ dst 331 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_LOCATION_IDS(dst, cell_idx, path_id); \ dst 332 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_MODULE_ID(dst, mod_id); \ dst 335 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_DESTINATION(level, dst, ...) \ dst 336 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_DESTINATION##level(dst, __VA_ARGS__) dst 337 sound/soc/intel/atom/sst-atom-controls.h #define SST_FILL_DEFAULT_DESTINATION(dst) \ dst 338 sound/soc/intel/atom/sst-atom-controls.h SST_FILL_DESTINATION(2, dst, SST_DEFAULT_LOCATION_ID, SST_DEFAULT_MODULE_ID) dst 351 sound/soc/intel/atom/sst-atom-controls.h struct sst_destination_id dst; dst 411 sound/soc/intel/atom/sst-atom-controls.h struct sst_destination_id dst; dst 368 sound/soc/intel/atom/sst/sst.c kfree(ctx->fw_sg_list.dst); dst 265 sound/soc/intel/atom/sst/sst.h struct scatterlist *dst; dst 523 sound/soc/intel/atom/sst/sst.h void memcpy32_toio(void __iomem *dst, const void *src, int count); dst 524 sound/soc/intel/atom/sst/sst.h void memcpy32_fromio(void *dst, const void __iomem *src, int count); dst 34 sound/soc/intel/atom/sst/sst_loader.c void memcpy32_toio(void __iomem *dst, const void *src, int count) dst 39 sound/soc/intel/atom/sst/sst_loader.c __iowrite32_copy(dst, src, count / 4); dst 42 sound/soc/intel/atom/sst/sst_loader.c void memcpy32_fromio(void *dst, const void __iomem *src, int count) dst 47 sound/soc/intel/atom/sst/sst_loader.c __ioread32_copy(dst, src, count / 4); dst 699 sound/soc/intel/skylake/skl-topology.c if (modules->dst == module) dst 701 sound/soc/intel/skylake/skl-topology.c modules->dst); dst 881 sound/soc/intel/skylake/skl-topology.c struct skl_module_cfg *src, struct skl_module_cfg *dst) dst 887 sound/soc/intel/skylake/skl-topology.c for (i = 0; i < dst->module->max_input_pins; i++) { dst 888 sound/soc/intel/skylake/skl-topology.c struct skl_module_pin *pin = &dst->m_in_pin[i]; dst 898 sound/soc/intel/skylake/skl-topology.c if (modules->src == src && modules->dst == dst) dst 908 sound/soc/intel/skylake/skl-topology.c m_list->dst = dst; dst 1189 sound/soc/intel/skylake/skl-topology.c if (modules->dst == src_module) { dst 1191 sound/soc/intel/skylake/skl-topology.c modules->dst); dst 1201 sound/soc/intel/skylake/skl-topology.c modules->dst = NULL; dst 424 sound/soc/intel/skylake/skl-topology.h struct skl_module_cfg *dst; dst 289 sound/soc/mediatek/common/mtk-btcvsd.c u8 *src, u8 *dst, dst 297 sound/soc/mediatek/common/mtk-btcvsd.c u32 *dst_32 = (u32 *)dst; dst 303 sound/soc/mediatek/common/mtk-btcvsd.c u16 *dst_16 = (u16 *)dst; dst 340 sound/soc/mediatek/common/mtk-btcvsd.c void *dst; dst 345 sound/soc/mediatek/common/mtk-btcvsd.c dst = (void *)bt->tx->buffer_info.bt_sram_addr[i]; dst 348 sound/soc/mediatek/common/mtk-btcvsd.c bt->tx->temp_packet_buf, dst, dst 418 sound/soc/mediatek/common/mtk-btcvsd.c u8 *dst; dst 445 sound/soc/mediatek/common/mtk-btcvsd.c dst = (u8 *)ap_addr_tx; dst 449 sound/soc/mediatek/common/mtk-btcvsd.c bt->tx->temp_packet_buf, dst, dst 518 sound/soc/mediatek/common/mtk-btcvsd.c u8 *src, *dst; dst 539 sound/soc/mediatek/common/mtk-btcvsd.c dst = (u8 *)ap_addr_tx; dst 546 sound/soc/mediatek/common/mtk-btcvsd.c bt->tx->temp_packet_buf, dst, dst 46 sound/soc/sof/ipc.c u8 *dst; dst 514 sound/soc/sof/ipc.c struct sof_ipc_ctrl_data *dst, dst 521 sound/soc/sof/ipc.c sparams->dst = (u8 *)dst->chanv; dst 526 sound/soc/sof/ipc.c sparams->dst = (u8 *)dst->compv; dst 531 sound/soc/sof/ipc.c sparams->dst = (u8 *)dst->data->data; dst 592 sound/soc/sof/ipc.c memcpy(sparams->dst, sparams->src + offset, send_bytes); dst 604 sound/soc/sof/ipc.c memcpy(sparams->dst + offset, sparams->src, send_bytes); dst 541 sound/soc/sprd/sprd-pcm-compress.c void *dst; dst 551 sound/soc/sprd/sprd-pcm-compress.c dst = stream->iram_buffer.area + stream->received_stage0; dst 558 sound/soc/sprd/sprd-pcm-compress.c if (copy_from_user(dst, buf, data_count)) dst 571 sound/soc/sprd/sprd-pcm-compress.c if (copy_from_user(dst, buf, avail_bytes)) dst 585 sound/soc/sprd/sprd-pcm-compress.c dst = stream->compr_buffer.area + stream->stage1_pointer; dst 587 sound/soc/sprd/sprd-pcm-compress.c if (copy_from_user(dst, buf, data_count)) dst 594 sound/soc/sprd/sprd-pcm-compress.c if (copy_from_user(dst, buf, avail_bytes)) dst 281 sound/soc/uniphier/aio-compress.c int src = 0, dst = 0, ret; dst 310 sound/soc/uniphier/aio-compress.c dstbuf[dst++] = frm_a; dst 311 sound/soc/uniphier/aio-compress.c dstbuf[dst++] = frm_b; dst 507 sound/usb/card.c unsigned int src, dst; dst 511 sound/usb/card.c sscanf(quirk_alias[i], "%x:%x", &src, &dst) != 2 || dst 517 sound/usb/card.c USB_ID_VENDOR(dst), USB_ID_PRODUCT(dst)); dst 518 sound/usb/card.c *id = dst; dst 350 sound/usb/mixer_scarlett.c static void scarlett_generate_name(int i, char *dst, int offsets[]) dst 353 sound/usb/mixer_scarlett.c sprintf(dst, "Mix %c", dst 356 sound/usb/mixer_scarlett.c sprintf(dst, "ADAT %d", i - offsets[SCARLETT_OFFSET_ADAT]); dst 358 sound/usb/mixer_scarlett.c sprintf(dst, "SPDIF %d", i - offsets[SCARLETT_OFFSET_SPDIF]); dst 360 sound/usb/mixer_scarlett.c sprintf(dst, "Analog %d", i - offsets[SCARLETT_OFFSET_ANALOG]); dst 362 sound/usb/mixer_scarlett.c sprintf(dst, "PCM %d", i - offsets[SCARLETT_OFFSET_PCM]); dst 364 sound/usb/mixer_scarlett.c sprintf(dst, "Off"); dst 1462 sound/usb/pcm.c u8 *dst = urb->transfer_buffer; dst 1485 sound/usb/pcm.c dst[dst_idx++] = marker[subs->dsd_dop.marker]; dst 1500 sound/usb/pcm.c dst[dst_idx++] = bitrev8(src[idx]); dst 1502 sound/usb/pcm.c dst[dst_idx++] = src[idx]; dst 152 sound/usb/stream.c unsigned int __user *dst; dst 160 sound/usb/stream.c dst = tlv + 2; dst 172 sound/usb/stream.c if (put_user(SNDRV_CTL_TLVT_CHMAP_FIXED, dst) || dst 173 sound/usb/stream.c put_user(ch_bytes, dst + 1)) dst 175 sound/usb/stream.c dst += 2; dst 176 sound/usb/stream.c for (i = 0; i < fp->chmap->channels; i++, dst++) { dst 177 sound/usb/stream.c if (put_user(fp->chmap->map[i], dst)) dst 635 sound/xen/xen_snd_front_alsa.c int channel, unsigned long pos, void __user *dst, dst 648 sound/xen/xen_snd_front_alsa.c return copy_to_user(dst, stream->buffer + pos, count) ? dst 653 sound/xen/xen_snd_front_alsa.c int channel, unsigned long pos, void *dst, dst 666 sound/xen/xen_snd_front_alsa.c memcpy(dst, stream->buffer + pos, count); dst 41 tools/bpf/bpftool/cfg.c struct bb_node *dst; dst 256 tools/bpf/bpftool/cfg.c static struct edge_node *new_edge(struct bb_node *src, struct bb_node *dst, dst 269 tools/bpf/bpftool/cfg.c if (dst) dst 270 tools/bpf/bpftool/cfg.c e->dst = dst; dst 306 tools/bpf/bpftool/cfg.c e->dst = bb_next(bb); dst 311 tools/bpf/bpftool/cfg.c e->dst = func_search_bb_with_head(func, dst 318 tools/bpf/bpftool/cfg.c e->dst = bb_next(bb); dst 326 tools/bpf/bpftool/cfg.c e->dst = func_search_bb_with_head(func, insn + insn->off + 1); dst 427 tools/bpf/bpftool/cfg.c func_idx, e->src->idx, func_idx, e->dst->idx, dst 6 tools/build/feature/test-libelf-getphdrnum.c size_t dst; dst 8 tools/build/feature/test-libelf-getphdrnum.c return elf_getphdrnum(0, &dst); dst 6 tools/build/feature/test-libelf-getshdrstrndx.c size_t dst; dst 8 tools/build/feature/test-libelf-getshdrstrndx.c return elf_getshdrstrndx(0, &dst); dst 14 tools/include/linux/bitmap.h void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, dst 16 tools/include/linux/bitmap.h int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, dst 31 tools/include/linux/bitmap.h static inline void bitmap_zero(unsigned long *dst, int nbits) dst 34 tools/include/linux/bitmap.h *dst = 0UL; dst 37 tools/include/linux/bitmap.h memset(dst, 0, len); dst 41 tools/include/linux/bitmap.h static inline void bitmap_fill(unsigned long *dst, unsigned int nbits) dst 46 tools/include/linux/bitmap.h memset(dst, 0xff, len); dst 48 tools/include/linux/bitmap.h dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits); dst 74 tools/include/linux/bitmap.h static inline void bitmap_or(unsigned long *dst, const unsigned long *src1, dst 78 tools/include/linux/bitmap.h *dst = *src1 | *src2; dst 80 tools/include/linux/bitmap.h __bitmap_or(dst, src1, src2, nbits); dst 143 tools/include/linux/bitmap.h static inline int bitmap_and(unsigned long *dst, const unsigned long *src1, dst 147 tools/include/linux/bitmap.h return (*dst = *src1 & *src2 & BITMAP_LAST_WORD_MASK(nbits)) != 0; dst 148 tools/include/linux/bitmap.h return __bitmap_and(dst, src1, src2, nbits); dst 2284 tools/include/nolibc/nolibc.h void *memmove(void *dst, const void *src, size_t len) dst 2286 tools/include/nolibc/nolibc.h ssize_t pos = (dst <= src) ? -1 : (long)len; dst 2287 tools/include/nolibc/nolibc.h void *ret = dst; dst 2290 tools/include/nolibc/nolibc.h pos += (dst <= src) ? 1 : -1; dst 2291 tools/include/nolibc/nolibc.h ((char *)dst)[pos] = ((char *)src)[pos]; dst 2297 tools/include/nolibc/nolibc.h void *memset(void *dst, int b, size_t len) dst 2299 tools/include/nolibc/nolibc.h char *p = dst; dst 2303 tools/include/nolibc/nolibc.h return dst; dst 2319 tools/include/nolibc/nolibc.h char *strcpy(char *dst, const char *src) dst 2321 tools/include/nolibc/nolibc.h char *ret = dst; dst 2323 tools/include/nolibc/nolibc.h while ((*dst++ = *src++)); dst 2424 tools/include/nolibc/nolibc.h void *memcpy(void *dst, const void *src, size_t len) dst 2426 tools/include/nolibc/nolibc.h return memmove(dst, src, len); dst 21 tools/lib/bitmap.c void __bitmap_or(unsigned long *dst, const unsigned long *bitmap1, dst 28 tools/lib/bitmap.c dst[k] = bitmap1[k] | bitmap2[k]; dst 60 tools/lib/bitmap.c int __bitmap_and(unsigned long *dst, const unsigned long *bitmap1, dst 68 tools/lib/bitmap.c result |= (dst[k] = bitmap1[k] & bitmap2[k]); dst 70 tools/lib/bitmap.c result |= (dst[k] = bitmap1[k] & bitmap2[k] & dst 12 tools/lib/bpf/str_error.c char *libbpf_strerror_r(int err, char *dst, int len) dst 14 tools/lib/bpf/str_error.c int ret = strerror_r(err < 0 ? -err : err, dst, len); dst 16 tools/lib/bpf/str_error.c snprintf(dst, len, "ERROR: strerror_r(%d)=%d", err, ret); dst 17 tools/lib/bpf/str_error.c return dst; dst 5 tools/lib/bpf/str_error.h char *libbpf_strerror_r(int err, char *dst, int len); dst 112 tools/perf/bench/mem-functions.c u64 (*do_cycles)(const struct function *r, size_t size, void *src, void *dst); dst 113 tools/perf/bench/mem-functions.c double (*do_gettimeofday)(const struct function *r, size_t size, void *src, void *dst); dst 123 tools/perf/bench/mem-functions.c void *src = NULL, *dst = zalloc(size); dst 127 tools/perf/bench/mem-functions.c if (dst == NULL) dst 140 tools/perf/bench/mem-functions.c result_cycles = info->do_cycles(r, size, src, dst); dst 142 tools/perf/bench/mem-functions.c result_bps = info->do_gettimeofday(r, size, src, dst); dst 169 tools/perf/bench/mem-functions.c free(dst); dst 226 tools/perf/bench/mem-functions.c static u64 do_memcpy_cycles(const struct function *r, size_t size, void *src, void *dst) dst 239 tools/perf/bench/mem-functions.c fn(dst, src, size); dst 243 tools/perf/bench/mem-functions.c fn(dst, src, size); dst 249 tools/perf/bench/mem-functions.c static double do_memcpy_gettimeofday(const struct function *r, size_t size, void *src, void *dst) dst 259 tools/perf/bench/mem-functions.c fn(dst, src, size); dst 263 tools/perf/bench/mem-functions.c fn(dst, src, size); dst 303 tools/perf/bench/mem-functions.c static u64 do_memset_cycles(const struct function *r, size_t size, void *src __maybe_unused, void *dst) dst 313 tools/perf/bench/mem-functions.c fn(dst, -1, size); dst 317 tools/perf/bench/mem-functions.c fn(dst, i, size); dst 323 tools/perf/bench/mem-functions.c static double do_memset_gettimeofday(const struct function *r, size_t size, void *src __maybe_unused, void *dst) dst 333 tools/perf/bench/mem-functions.c fn(dst, -1, size); dst 337 tools/perf/bench/mem-functions.c fn(dst, i, size); dst 8 tools/perf/bench/mem-memcpy-x86-64-lib.c unsigned long __memcpy_mcsafe(void *dst, const void *src, size_t cnt); dst 143 tools/perf/builtin-record.c static size_t zstd_compress(struct perf_session *session, void *dst, size_t dst_size, dst 931 tools/perf/builtin-record.c static size_t zstd_compress(struct perf_session *session, void *dst, size_t dst_size, dst 937 tools/perf/builtin-record.c compressed = zstd_compress_stream_to_records(&session->zstd_data, dst, dst_size, src, src_size, dst 65 tools/perf/include/bpf/bpf.h static int (*probe_read)(void *dst, int size, const void *unsafe_addr) = (void *)BPF_FUNC_probe_read; dst 66 tools/perf/include/bpf/bpf.h static int (*probe_read_str)(void *dst, int size, const void *unsafe_addr) = (void *)BPF_FUNC_probe_read_str; dst 79 tools/perf/pmu-events/jevents.c static void addfield(char *map, char **dst, const char *sep, dst 83 tools/perf/pmu-events/jevents.c int olen = *dst ? strlen(*dst) : 0; dst 87 tools/perf/pmu-events/jevents.c out = realloc(*dst, len + olen + blen); dst 92 tools/perf/pmu-events/jevents.c *dst = out; dst 95 tools/perf/pmu-events/jevents.c *(*dst) = 0; dst 97 tools/perf/pmu-events/jevents.c strcat(*dst, sep); dst 98 tools/perf/pmu-events/jevents.c strcat(*dst, a); dst 100 tools/perf/pmu-events/jevents.c strncat(*dst, map + bt->start, blen); dst 997 tools/perf/util/callchain.c struct callchain_node *dst, struct callchain_node *src) dst 1017 tools/perf/util/callchain.c if (append_chain_children(dst, cursor, src->hit) < 0) dst 1027 tools/perf/util/callchain.c err = merge_chain_branch(cursor, dst, child); dst 1041 tools/perf/util/callchain.c struct callchain_root *dst, struct callchain_root *src) dst 1043 tools/perf/util/callchain.c return merge_chain_branch(cursor, &dst->node, &src->node); dst 1559 tools/perf/util/callchain.c int callchain_cursor__copy(struct callchain_cursor *dst, dst 1564 tools/perf/util/callchain.c callchain_cursor_reset(dst); dst 1574 tools/perf/util/callchain.c rc = callchain_cursor_append(dst, node->ip, node->map, node->sym, dst 193 tools/perf/util/callchain.h struct callchain_root *dst, struct callchain_root *src); dst 226 tools/perf/util/callchain.h int callchain_cursor__copy(struct callchain_cursor *dst, dst 32 tools/perf/util/compress.h size_t zstd_compress_stream_to_records(struct zstd_data *data, void *dst, size_t dst_size, dst 37 tools/perf/util/compress.h void *dst, size_t dst_size); dst 52 tools/perf/util/compress.h void *dst __maybe_unused, size_t dst_size __maybe_unused, dst 61 tools/perf/util/compress.h size_t src_size __maybe_unused, void *dst __maybe_unused, dst 98 tools/perf/util/expr.y char *dst = res->id; dst 102 tools/perf/util/expr.y *dst++ = *p++; dst 112 tools/perf/util/expr.y *dst++ = '/'; dst 114 tools/perf/util/expr.y *dst++ = *++p; dst 116 tools/perf/util/expr.y *dst++ = *p; dst 119 tools/perf/util/expr.y *dst = 0; dst 121 tools/perf/util/expr.y dst = res->id; dst 122 tools/perf/util/expr.y switch (dst[0]) { dst 124 tools/perf/util/expr.y if (!strcmp(dst, "min")) dst 126 tools/perf/util/expr.y if (!strcmp(dst, "max")) dst 130 tools/perf/util/expr.y if (!strcmp(dst, "if")) dst 134 tools/perf/util/expr.y if (!strcmp(dst, "else")) dst 138 tools/perf/util/expr.y if (!strcasecmp(dst, "#smt_on")) dst 55 tools/perf/util/mmap.c void *dst = map->core.event_copy; dst 59 tools/perf/util/mmap.c memcpy(dst, &data[offset & map->core.mask], cpy); dst 61 tools/perf/util/mmap.c dst += cpy; dst 2241 tools/perf/util/probe-event.c static int perf_probe_point__copy(struct perf_probe_point *dst, dst 2244 tools/perf/util/probe-event.c dst->file = strdup_or_goto(src->file, out_err); dst 2245 tools/perf/util/probe-event.c dst->function = strdup_or_goto(src->function, out_err); dst 2246 tools/perf/util/probe-event.c dst->lazy_line = strdup_or_goto(src->lazy_line, out_err); dst 2247 tools/perf/util/probe-event.c dst->line = src->line; dst 2248 tools/perf/util/probe-event.c dst->retprobe = src->retprobe; dst 2249 tools/perf/util/probe-event.c dst->offset = src->offset; dst 2253 tools/perf/util/probe-event.c clear_perf_probe_point(dst); dst 2257 tools/perf/util/probe-event.c static int perf_probe_arg__copy(struct perf_probe_arg *dst, dst 2262 tools/perf/util/probe-event.c dst->name = strdup_or_goto(src->name, out_err); dst 2263 tools/perf/util/probe-event.c dst->var = strdup_or_goto(src->var, out_err); dst 2264 tools/perf/util/probe-event.c dst->type = strdup_or_goto(src->type, out_err); dst 2267 tools/perf/util/probe-event.c ppfield = &(dst->field); dst 2283 tools/perf/util/probe-event.c int perf_probe_event__copy(struct perf_probe_event *dst, dst 2288 tools/perf/util/probe-event.c dst->event = strdup_or_goto(src->event, out_err); dst 2289 tools/perf/util/probe-event.c dst->group = strdup_or_goto(src->group, out_err); dst 2290 tools/perf/util/probe-event.c dst->target = strdup_or_goto(src->target, out_err); dst 2291 tools/perf/util/probe-event.c dst->uprobes = src->uprobes; dst 2293 tools/perf/util/probe-event.c if (perf_probe_point__copy(&dst->point, &src->point) < 0) dst 2296 tools/perf/util/probe-event.c dst->args = zalloc(sizeof(struct perf_probe_arg) * src->nargs); dst 2297 tools/perf/util/probe-event.c if (!dst->args) dst 2299 tools/perf/util/probe-event.c dst->nargs = src->nargs; dst 2302 tools/perf/util/probe-event.c if (perf_probe_arg__copy(&dst->args[i], &src->args[i]) < 0) dst 2307 tools/perf/util/probe-event.c clear_perf_probe_event(dst); dst 138 tools/perf/util/probe-event.h int perf_probe_event__copy(struct perf_probe_event *dst, dst 75 tools/perf/util/symbol-elf.c static int elf_getphdrnum(Elf *elf, size_t *dst) dst 84 tools/perf/util/symbol-elf.c *dst = ehdr->e_phnum; dst 91 tools/perf/util/symbol-elf.c static int elf_getshdrstrndx(Elf *elf __maybe_unused, size_t *dst __maybe_unused) dst 2048 tools/perf/util/symbol-elf.c Elf_Data dst = { dst 2055 tools/perf/util/symbol-elf.c .d_version = EV_CURRENT, .d_size = dst.d_size, .d_off = 0, dst 2067 tools/perf/util/symbol-elf.c if (len < dst.d_size + 3) dst 2071 tools/perf/util/symbol-elf.c if (gelf_xlatetom(*elf, &dst, &src, dst 2078 tools/perf/util/symbol-elf.c provider = data + dst.d_size; dst 57 tools/perf/util/zstd.c size_t zstd_compress_stream_to_records(struct zstd_data *data, void *dst, size_t dst_size, dst 67 tools/perf/util/zstd.c record = dst; dst 70 tools/perf/util/zstd.c dst += size; dst 72 tools/perf/util/zstd.c output = (ZSTD_outBuffer){ dst, (dst_size > max_record_size) ? dst 79 tools/perf/util/zstd.c memcpy(dst, src, src_size); dst 85 tools/perf/util/zstd.c dst += size; dst 93 tools/perf/util/zstd.c void *dst, size_t dst_size) dst 97 tools/perf/util/zstd.c ZSTD_outBuffer output = { dst, dst_size, 0 }; dst 106 tools/perf/util/zstd.c output.dst = dst + output.pos; dst 94 tools/spi/spidev_test.c char *dst = _dst; dst 104 tools/spi/spidev_test.c *dst++ = (unsigned char)ch; dst 106 tools/spi/spidev_test.c *dst++ = *src++; dst 3063 tools/testing/nvdimm/test/nfit.c static void mcsafe_test_init(char *dst, char *src, size_t size) dst 3067 tools/testing/nvdimm/test/nfit.c memset(dst, 0xff, size); dst 3072 tools/testing/nvdimm/test/nfit.c static bool mcsafe_test_validate(unsigned char *dst, unsigned char *src, dst 3078 tools/testing/nvdimm/test/nfit.c if (dst[i] != (unsigned char) i) { dst 3080 tools/testing/nvdimm/test/nfit.c __func__, __LINE__, i, dst[i], dst 3085 tools/testing/nvdimm/test/nfit.c if (dst[i] != 0xffU) { dst 3087 tools/testing/nvdimm/test/nfit.c __func__, __LINE__, i, dst[i]); dst 3111 tools/testing/nvdimm/test/nfit.c void *src, *dst; dst 3118 tools/testing/nvdimm/test/nfit.c dst = &mcsafe_buf[2048]; dst 3125 tools/testing/nvdimm/test/nfit.c dst = &mcsafe_buf[2048]; dst 3132 tools/testing/nvdimm/test/nfit.c dst = &mcsafe_buf[2048 - i]; dst 3138 tools/testing/nvdimm/test/nfit.c mcsafe_test_init(dst, src, 512); dst 3139 tools/testing/nvdimm/test/nfit.c rem = __memcpy_mcsafe(dst, src, 512); dst 3140 tools/testing/nvdimm/test/nfit.c valid = mcsafe_test_validate(dst, src, 512, expect); dst 3145 tools/testing/nvdimm/test/nfit.c ((unsigned long) dst) & ~PAGE_MASK, dst 39 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_probe_read)(void *dst, int size, const void *unsafe_ptr) = dst 70 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_probe_write_user)(void *dst, const void *src, int size) = dst 531 tools/testing/selftests/bpf/bpf_helpers.h #define BPF_CORE_READ(dst, src) \ dst 532 tools/testing/selftests/bpf/bpf_helpers.h bpf_probe_read((dst), sizeof(*(src)), \ dst 19 tools/testing/selftests/bpf/prog_tests/l4lb_all.c __be32 dst; dst 23 tools/testing/selftests/bpf/prog_tests/l4lb_all.c } real_def = {.dst = MAGIC_VAL}; dst 20 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c __be32 dst; dst 24 tools/testing/selftests/bpf/prog_tests/xdp_noinline.c } real_def = {.dst = MAGIC_VAL}; dst 129 tools/testing/selftests/bpf/progs/test_l4lb.c __be32 dst; dst 155 tools/testing/selftests/bpf/progs/test_l4lb.c __be32 dst; dst 279 tools/testing/selftests/bpf/progs/test_l4lb.c pckt->dst = iph->saddr; dst 332 tools/testing/selftests/bpf/progs/test_l4lb.c struct real_definition *dst; dst 391 tools/testing/selftests/bpf/progs/test_l4lb.c pckt.dst = iph->daddr; dst 409 tools/testing/selftests/bpf/progs/test_l4lb.c vip.daddr.v4 = pckt.dst; dst 425 tools/testing/selftests/bpf/progs/test_l4lb.c if (!get_packet_dst(&dst, &pckt, vip_info, is_ipv6)) dst 428 tools/testing/selftests/bpf/progs/test_l4lb.c if (dst->flags & F_IPV6) { dst 433 tools/testing/selftests/bpf/progs/test_l4lb.c memcpy(tkey.remote_ipv6, dst->dstv6, 16); dst 440 tools/testing/selftests/bpf/progs/test_l4lb.c tkey.remote_ipv4 = dst->dst; dst 125 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c __be32 dst; dst 151 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c __be32 dst; dst 279 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c pckt->dst = iph->saddr; dst 332 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c struct real_definition *dst; dst 391 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c pckt.dst = iph->daddr; dst 409 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c vip.daddr.v4 = pckt.dst; dst 425 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c if (!get_packet_dst(&dst, &pckt, vip_info, is_ipv6)) dst 428 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c if (dst->flags & F_IPV6) { dst 433 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c memcpy(tkey.remote_ipv6, dst->dstv6, 16); dst 440 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c tkey.remote_ipv4 = dst->dst; dst 82 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c static void skcpy(struct bpf_sock *dst, dst 85 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->bound_dev_if = src->bound_dev_if; dst 86 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->family = src->family; dst 87 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->type = src->type; dst 88 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->protocol = src->protocol; dst 89 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->mark = src->mark; dst 90 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->priority = src->priority; dst 91 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_ip4 = src->src_ip4; dst 92 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_ip6[0] = src->src_ip6[0]; dst 93 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_ip6[1] = src->src_ip6[1]; dst 94 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_ip6[2] = src->src_ip6[2]; dst 95 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_ip6[3] = src->src_ip6[3]; dst 96 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->src_port = src->src_port; dst 97 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_ip4 = src->dst_ip4; dst 98 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_ip6[0] = src->dst_ip6[0]; dst 99 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_ip6[1] = src->dst_ip6[1]; dst 100 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_ip6[2] = src->dst_ip6[2]; dst 101 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_ip6[3] = src->dst_ip6[3]; dst 102 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->dst_port = src->dst_port; dst 103 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->state = src->state; dst 106 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c static void tpcpy(struct bpf_tcp_sock *dst, dst 109 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->snd_cwnd = src->snd_cwnd; dst 110 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->srtt_us = src->srtt_us; dst 111 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->rtt_min = src->rtt_min; dst 112 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->snd_ssthresh = src->snd_ssthresh; dst 113 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->rcv_nxt = src->rcv_nxt; dst 114 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->snd_nxt = src->snd_nxt; dst 115 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->snd_una = src->snd_una; dst 116 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->mss_cache = src->mss_cache; dst 117 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->ecn_flags = src->ecn_flags; dst 118 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->rate_delivered = src->rate_delivered; dst 119 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->rate_interval_us = src->rate_interval_us; dst 120 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->packets_out = src->packets_out; dst 121 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->retrans_out = src->retrans_out; dst 122 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->total_retrans = src->total_retrans; dst 123 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->segs_in = src->segs_in; dst 124 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->data_segs_in = src->data_segs_in; dst 125 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->segs_out = src->segs_out; dst 126 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->data_segs_out = src->data_segs_out; dst 127 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->lost_out = src->lost_out; dst 128 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->sacked_out = src->sacked_out; dst 129 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->bytes_received = src->bytes_received; dst 130 tools/testing/selftests/bpf/progs/test_sock_fields_kern.c dst->bytes_acked = src->bytes_acked; dst 111 tools/testing/selftests/bpf/progs/test_xdp_noinline.c __be32 dst; dst 156 tools/testing/selftests/bpf/progs/test_xdp_noinline.c __be32 dst; dst 280 tools/testing/selftests/bpf/progs/test_xdp_noinline.c struct real_definition *dst, __u32 pkt_bytes) dst 316 tools/testing/selftests/bpf/progs/test_xdp_noinline.c memcpy(ip6h->daddr.in6_u.u6_addr32, dst->dstv6, 16); dst 323 tools/testing/selftests/bpf/progs/test_xdp_noinline.c struct real_definition *dst, __u32 pkt_bytes) dst 361 tools/testing/selftests/bpf/progs/test_xdp_noinline.c iph->saddr = ((0xFFFF0000 & ip_suffix) | 4268) ^ dst->dst; dst 531 tools/testing/selftests/bpf/progs/test_xdp_noinline.c pckt->flow.dst = iph->saddr; dst 685 tools/testing/selftests/bpf/progs/test_xdp_noinline.c pckt->flow.dst = iph->daddr; dst 695 tools/testing/selftests/bpf/progs/test_xdp_noinline.c struct real_definition *dst = NULL; dst 734 tools/testing/selftests/bpf/progs/test_xdp_noinline.c vip.vip = pckt.flow.dst; dst 752 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (!dst) { dst 756 tools/testing/selftests/bpf/progs/test_xdp_noinline.c connection_table_lookup(&dst, &pckt, lru_map); dst 757 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (dst) dst 770 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (!get_packet_dst(&dst, &pckt, vip_info, is_ipv6, lru_map)) dst 778 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (dst->flags & (1 << 0)) { dst 779 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (!encap_v6(xdp, cval, &pckt, dst, pkt_bytes)) dst 782 tools/testing/selftests/bpf/progs/test_xdp_noinline.c if (!encap_v4(xdp, cval, &pckt, dst, pkt_bytes)) dst 796 tools/testing/selftests/bpf/progs/test_xdp_noinline.c *(u32 *)data = dst->dst; dst 30 tools/testing/selftests/bpf/progs/xdping_kern.c unsigned short dst[3]; dst 32 tools/testing/selftests/bpf/progs/xdping_kern.c dst[0] = p[0]; dst 33 tools/testing/selftests/bpf/progs/xdping_kern.c dst[1] = p[1]; dst 34 tools/testing/selftests/bpf/progs/xdping_kern.c dst[2] = p[2]; dst 38 tools/testing/selftests/bpf/progs/xdping_kern.c p[3] = dst[0]; dst 39 tools/testing/selftests/bpf/progs/xdping_kern.c p[4] = dst[1]; dst 40 tools/testing/selftests/bpf/progs/xdping_kern.c p[5] = dst[2]; dst 166 tools/testing/selftests/bpf/test_flow_dissector.c uint32_t src, uint32_t dst, dst 179 tools/testing/selftests/bpf/test_flow_dissector.c iph->daddr = dst; dst 195 tools/testing/selftests/bpf/test_flow_dissector.c struct sockaddr_in6 *dst, dst 207 tools/testing/selftests/bpf/test_flow_dissector.c memcpy(&ip6h->daddr, &dst->sin6_addr, sizeof(ip6h->daddr)); dst 28 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c struct sockaddr_in6 *dst) dst 30 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c memset(dst, 0, sizeof(*dst)); dst 32 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c dst->sin6_family = AF_INET6; dst 33 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c dst->sin6_port = htons(1025); dst 35 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c if (inet_pton(AF_INET6, ip, &dst->sin6_addr) != 1) { dst 40 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c dst->sin6_scope_id = if_nametoindex(iface); dst 41 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c if (!dst->sin6_scope_id) { dst 51 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c struct sockaddr_in6 dst; dst 56 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c if (mk_dst_addr(LINKLOCAL_MULTICAST, iface, &dst)) dst 65 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c if (sendto(fd, &msg, sizeof(msg), 0, (const struct sockaddr *)&dst, dst 66 tools/testing/selftests/bpf/test_skb_cgroup_id_user.c sizeof(dst)) == -1) { dst 78 tools/testing/selftests/bpf/test_tag.c static int hex2bin(uint8_t *dst, const char *src, size_t count) dst 86 tools/testing/selftests/bpf/test_tag.c *dst++ = (hi << 4) | lo; dst 279 tools/testing/selftests/networking/timestamping/rxtimestamp.c int src, dst, rcv; dst 285 tools/testing/selftests/networking/timestamping/rxtimestamp.c dst = socket(AF_INET, s.type, s.protocol); dst 286 tools/testing/selftests/networking/timestamping/rxtimestamp.c if (dst < 0) dst 294 tools/testing/selftests/networking/timestamping/rxtimestamp.c if (bind(dst, (struct sockaddr *)&addr, sizeof(addr)) < 0) dst 297 tools/testing/selftests/networking/timestamping/rxtimestamp.c if (s.type == SOCK_STREAM && (listen(dst, 1) < 0)) dst 304 tools/testing/selftests/networking/timestamping/rxtimestamp.c rcv = accept(dst, NULL, NULL); dst 307 tools/testing/selftests/networking/timestamping/rxtimestamp.c close(dst); dst 309 tools/testing/selftests/networking/timestamping/rxtimestamp.c rcv = dst; dst 116 tools/testing/selftests/powerpc/alignment/alignment_handler.c void preload_data(void *dst, int offset, int width) dst 118 tools/testing/selftests/powerpc/alignment/alignment_handler.c char *c = dst; dst 127 tools/testing/selftests/powerpc/alignment/alignment_handler.c int test_memcpy(void *dst, void *src, int size, int offset, dst 134 tools/testing/selftests/powerpc/alignment/alignment_handler.c d = dst; dst 83 tools/testing/selftests/powerpc/copyloops/exc_validate.c unsigned long src, dst, len; dst 106 tools/testing/selftests/powerpc/copyloops/exc_validate.c for (dst = 0; dst < MAX_LEN; dst++) { dst 109 tools/testing/selftests/powerpc/copyloops/exc_validate.c do_one_test(q+dst, q+src, len); dst 17 tools/testing/selftests/powerpc/copyloops/validate.c static void do_one(char *src, char *dst, unsigned long src_off, dst 26 tools/testing/selftests/powerpc/copyloops/validate.c dstp = dst + MIN_REDZONE + dst_off; dst 29 tools/testing/selftests/powerpc/copyloops/validate.c memset(dst, POISON, BUFLEN); dst 50 tools/testing/selftests/powerpc/copyloops/validate.c if (memcmp(dst, redzone, dstp - dst)) { dst 56 tools/testing/selftests/powerpc/copyloops/validate.c if (memcmp(dstp+len, redzone, dst+BUFLEN-(dstp+len))) { dst 65 tools/testing/selftests/powerpc/copyloops/validate.c char *src, *dst, *redzone, *fill; dst 70 tools/testing/selftests/powerpc/copyloops/validate.c dst = memalign(BUFLEN, BUFLEN); dst 74 tools/testing/selftests/powerpc/copyloops/validate.c if (!src || !dst || !redzone || !fill) { dst 88 tools/testing/selftests/powerpc/copyloops/validate.c do_one(src, dst, src_off, dst_off, len, dst 578 tools/testing/selftests/rseq/rseq-arm.h void *dst, void *src, size_t len, dst 667 tools/testing/selftests/rseq/rseq-arm.h [dst] "r" (dst), dst 702 tools/testing/selftests/rseq/rseq-arm.h void *dst, void *src, size_t len, dst 792 tools/testing/selftests/rseq/rseq-arm.h [dst] "r" (dst), dst 196 tools/testing/selftests/rseq/rseq-arm64.h #define RSEQ_ASM_OP_R_BAD_MEMCPY(dst, src, len) \ dst 202 tools/testing/selftests/rseq/rseq-arm64.h " strb " RSEQ_ASM_TMP_REG32 ", [%[" __rseq_str(dst) "]" \ dst 544 tools/testing/selftests/rseq/rseq-arm64.h void *dst, void *src, size_t len, dst 565 tools/testing/selftests/rseq/rseq-arm64.h RSEQ_ASM_OP_R_BAD_MEMCPY(dst, src, len) dst 577 tools/testing/selftests/rseq/rseq-arm64.h [dst] "r" (dst), dst 604 tools/testing/selftests/rseq/rseq-arm64.h void *dst, void *src, size_t len, dst 625 tools/testing/selftests/rseq/rseq-arm64.h RSEQ_ASM_OP_R_BAD_MEMCPY(dst, src, len) dst 637 tools/testing/selftests/rseq/rseq-arm64.h [dst] "r" (dst), dst 573 tools/testing/selftests/rseq/rseq-mips.h void *dst, void *src, size_t len, dst 659 tools/testing/selftests/rseq/rseq-mips.h [dst] "r" (dst), dst 694 tools/testing/selftests/rseq/rseq-mips.h void *dst, void *src, size_t len, dst 781 tools/testing/selftests/rseq/rseq-mips.h [dst] "r" (dst), dst 598 tools/testing/selftests/rseq/rseq-ppc.h void *dst, void *src, size_t len, dst 645 tools/testing/selftests/rseq/rseq-ppc.h [dst] "r" (dst), dst 672 tools/testing/selftests/rseq/rseq-ppc.h void *dst, void *src, size_t len, dst 721 tools/testing/selftests/rseq/rseq-ppc.h [dst] "r" (dst), dst 464 tools/testing/selftests/rseq/rseq-s390.h void *dst, void *src, size_t len, dst 544 tools/testing/selftests/rseq/rseq-s390.h [dst] "r" (dst), dst 575 tools/testing/selftests/rseq/rseq-s390.h void *dst, void *src, size_t len, dst 578 tools/testing/selftests/rseq/rseq-s390.h return rseq_cmpeqv_trymemcpy_storev(v, expect, dst, src, len, dst 53 tools/testing/selftests/rseq/rseq-skip.h void *dst, void *src, size_t len, dst 61 tools/testing/selftests/rseq/rseq-skip.h void *dst, void *src, size_t len, dst 428 tools/testing/selftests/rseq/rseq-x86.h void *dst, void *src, size_t len, dst 507 tools/testing/selftests/rseq/rseq-x86.h [dst] "r" (dst), dst 537 tools/testing/selftests/rseq/rseq-x86.h void *dst, void *src, size_t len, dst 540 tools/testing/selftests/rseq/rseq-x86.h return rseq_cmpeqv_trymemcpy_storev(v, expect, dst, src, len, dst 1012 tools/testing/selftests/rseq/rseq-x86.h void *dst, void *src, size_t len, dst 1094 tools/testing/selftests/rseq/rseq-x86.h [dst] "r" (dst), dst 1124 tools/testing/selftests/rseq/rseq-x86.h void *dst, void *src, size_t len, dst 1207 tools/testing/selftests/rseq/rseq-x86.h [dst] "r" (dst), dst 28 tools/testing/selftests/timers/set-tz.c int set_tz(int min, int dst) dst 33 tools/testing/selftests/timers/set-tz.c tz.tz_dsttime = dst; dst 61 tools/testing/selftests/timers/set-tz.c int min, dst; dst 64 tools/testing/selftests/timers/set-tz.c dst = get_tz_dst(); dst 65 tools/testing/selftests/timers/set-tz.c printf("tz_minuteswest started at %i, dst at %i\n", min, dst); dst 70 tools/testing/selftests/timers/set-tz.c ret = set_tz(i, dst); dst 82 tools/testing/selftests/timers/set-tz.c if (!set_tz(-15*60-1, dst)) { dst 87 tools/testing/selftests/timers/set-tz.c if (!set_tz(15*60+1, dst)) { dst 92 tools/testing/selftests/timers/set-tz.c if (!set_tz(-24*60, dst)) { dst 97 tools/testing/selftests/timers/set-tz.c if (!set_tz(24*60, dst)) { dst 104 tools/testing/selftests/timers/set-tz.c set_tz(min, dst); dst 108 tools/testing/selftests/timers/set-tz.c set_tz(min, dst); dst 398 tools/testing/selftests/vm/userfaultfd.c uffd_test_ops->alias_mapping(&uffdio_copy->dst, dst 419 tools/testing/selftests/vm/userfaultfd.c uffdio_copy.dst = (unsigned long) area_dst + offset;