shadow_addr       111 arch/powerpc/platforms/ps3/spu.c 	u64 shadow_addr;
shadow_addr       161 arch/powerpc/platforms/ps3/spu.c 		&spu_pdata(spu)->shadow_addr,
shadow_addr       195 arch/powerpc/platforms/ps3/spu.c 	spu_pdata(spu)->shadow = ioremap_prot(spu_pdata(spu)->shadow_addr,
shadow_addr       228 arch/powerpc/platforms/ps3/spu.c 		spu_pdata(spu)->shadow_addr);
shadow_addr       212 arch/x86/kvm/mmu.c 	hpa_t shadow_addr;
shadow_addr      2645 arch/x86/kvm/mmu.c 	iterator->shadow_addr = root;
shadow_addr      2660 arch/x86/kvm/mmu.c 		iterator->shadow_addr
shadow_addr      2662 arch/x86/kvm/mmu.c 		iterator->shadow_addr &= PT64_BASE_ADDR_MASK;
shadow_addr      2664 arch/x86/kvm/mmu.c 		if (!iterator->shadow_addr)
shadow_addr      2682 arch/x86/kvm/mmu.c 	iterator->sptep	= ((u64 *)__va(iterator->shadow_addr)) + iterator->index;
shadow_addr      2694 arch/x86/kvm/mmu.c 	iterator->shadow_addr = spte & PT64_BASE_ADDR_MASK;
shadow_addr       745 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	uint64_t shadow_addr, parent_addr;
shadow_addr       747 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	shadow_addr = amdgpu_bo_gpu_offset(shadow);
shadow_addr       750 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	return amdgpu_copy_buffer(ring, shadow_addr, parent_addr,
shadow_addr        72 drivers/misc/habanalabs/habanalabs.h 	u64			shadow_addr;
shadow_addr        14 drivers/misc/habanalabs/mmu.c static inline u64 get_phys_addr(struct hl_ctx *ctx, u64 shadow_addr);
shadow_addr        22 drivers/misc/habanalabs/mmu.c 		if (hop_addr == pgt_info->shadow_addr)
shadow_addr        36 drivers/misc/habanalabs/mmu.c 	kfree((u64 *) (uintptr_t) pgt_info->shadow_addr);
shadow_addr        45 drivers/misc/habanalabs/mmu.c 	u64 phys_addr, shadow_addr;
shadow_addr        58 drivers/misc/habanalabs/mmu.c 	shadow_addr = (u64) (uintptr_t) kzalloc(prop->mmu_hop_table_size,
shadow_addr        60 drivers/misc/habanalabs/mmu.c 	if (!shadow_addr)
shadow_addr        64 drivers/misc/habanalabs/mmu.c 	pgt_info->shadow_addr = shadow_addr;
shadow_addr        67 drivers/misc/habanalabs/mmu.c 	hash_add(ctx->mmu_shadow_hash, &pgt_info->node, shadow_addr);
shadow_addr        69 drivers/misc/habanalabs/mmu.c 	return shadow_addr;
shadow_addr       221 drivers/misc/habanalabs/mmu.c static inline u64 get_phys_addr(struct hl_ctx *ctx, u64 shadow_addr)
shadow_addr       224 drivers/misc/habanalabs/mmu.c 	u64 shadow_hop_addr = shadow_addr & ~page_mask;
shadow_addr       225 drivers/misc/habanalabs/mmu.c 	u64 pte_offset = shadow_addr & page_mask;
shadow_addr       510 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, pgt_info->shadow_addr);
shadow_addr       701 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static void copy_wqe_to_shadow(struct hinic_wq *wq, void *shadow_addr,
shadow_addr       712 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		memcpy(shadow_addr, wqebb_addr, wq->wqebb_size);
shadow_addr       714 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		shadow_addr += wq->wqebb_size;
shadow_addr       718 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c static void copy_wqe_from_shadow(struct hinic_wq *wq, void *shadow_addr,
shadow_addr       729 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		memcpy(wqebb_addr, shadow_addr, wq->wqebb_size);
shadow_addr       730 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		shadow_addr += wq->wqebb_size;
shadow_addr       772 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size];
shadow_addr       774 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		copy_wqe_to_shadow(wq, shadow_addr, num_wqebbs, *prod_idx);
shadow_addr       777 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		return shadow_addr;
shadow_addr       842 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size];
shadow_addr       844 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		copy_wqe_to_shadow(wq, shadow_addr, num_wqebbs, *cons_idx);
shadow_addr       845 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		return shadow_addr;
shadow_addr       888 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 	void *shadow_addr;
shadow_addr       896 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size];
shadow_addr       898 drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c 		copy_wqe_from_shadow(wq, shadow_addr, num_wqebbs, prod_idx);
shadow_addr        64 mm/kasan/generic.c 	u8 *shadow_addr = (u8 *)kasan_mem_to_shadow((void *)addr);
shadow_addr        71 mm/kasan/generic.c 		return *shadow_addr || memory_is_poisoned_1(addr + size - 1);
shadow_addr        78 mm/kasan/generic.c 	u16 *shadow_addr = (u16 *)kasan_mem_to_shadow((void *)addr);
shadow_addr        82 mm/kasan/generic.c 		return *shadow_addr || memory_is_poisoned_1(addr + 15);
shadow_addr        84 mm/kasan/generic.c 	return *shadow_addr;
shadow_addr        49 mm/kasan/generic_report.c 	u8 *shadow_addr;
shadow_addr        51 mm/kasan/generic_report.c 	shadow_addr = (u8 *)kasan_mem_to_shadow(info->first_bad_addr);
shadow_addr        57 mm/kasan/generic_report.c 	if (*shadow_addr > 0 && *shadow_addr <= KASAN_SHADOW_SCALE_SIZE - 1)
shadow_addr        58 mm/kasan/generic_report.c 		shadow_addr++;
shadow_addr        60 mm/kasan/generic_report.c 	switch (*shadow_addr) {
shadow_addr       128 mm/kasan/kasan.h static inline const void *kasan_shadow_to_mem(const void *shadow_addr)
shadow_addr       130 mm/kasan/kasan.h 	return (void *)(((unsigned long)shadow_addr - KASAN_SHADOW_OFFSET)