va 351 arch/alpha/include/asm/core_apecs.h unsigned long va; /* Effective VA of fault or miss. */ va 192 arch/alpha/include/asm/core_lca.h unsigned long va; /* virtual address register */ va 125 arch/alpha/include/asm/hwrpb.h unsigned long va; va 41 arch/alpha/include/asm/mce.h unsigned long va; /* Effective VA of fault or miss. */ va 497 arch/alpha/kernel/core_mcpcia.c frame->va); va 1135 arch/alpha/kernel/setup.c unsigned long count, va, pc; va 1199 arch/alpha/kernel/setup.c unaligned[0].count, unaligned[0].pc, unaligned[0].va, va 1200 arch/alpha/kernel/setup.c unaligned[1].count, unaligned[1].pc, unaligned[1].va, va 428 arch/alpha/kernel/traps.c unsigned long count, va, pc; va 437 arch/alpha/kernel/traps.c do_entUna(void * va, unsigned long opcode, unsigned long reg, va 446 arch/alpha/kernel/traps.c unaligned[0].va = (unsigned long) va; va 464 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 480 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 496 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 524 arch/alpha/kernel/traps.c : "r"(va), "r"(una_reg(reg)), "0"(0)); va 548 arch/alpha/kernel/traps.c : "r"(va), "r"(una_reg(reg)), "0"(0)); va 572 arch/alpha/kernel/traps.c : "r"(va), "r"(una_reg(reg)), "0"(0)); va 579 arch/alpha/kernel/traps.c pc, va, opcode, reg); va 722 arch/alpha/kernel/traps.c do_entUnaUser(void __user * va, unsigned long opcode, va 739 arch/alpha/kernel/traps.c regs->pc - 4, va, opcode, reg); va 751 arch/alpha/kernel/traps.c if ((unsigned long)va >= TASK_SIZE) va 755 arch/alpha/kernel/traps.c unaligned[1].va = (unsigned long)va; va 787 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 803 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 819 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 835 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 851 arch/alpha/kernel/traps.c : "r"(va), "0"(0)); va 879 arch/alpha/kernel/traps.c : "r"(va), "r"(*reg_addr), "0"(0)); va 907 arch/alpha/kernel/traps.c : "r"(va), "r"(*reg_addr), "0"(0)); va 935 arch/alpha/kernel/traps.c : "r"(va), "r"(*reg_addr), "0"(0)); va 956 arch/alpha/kernel/traps.c if ((unsigned long)va >= TASK_SIZE) va 961 arch/alpha/kernel/traps.c if (find_vma(mm, (unsigned long)va)) va 967 arch/alpha/kernel/traps.c send_sig_fault(SIGSEGV, si_code, va, 0, current); va 972 arch/alpha/kernel/traps.c send_sig_fault(SIGBUS, BUS_ADRALN, va, 0, current); va 108 arch/alpha/math-emu/math.c unsigned long res, va, vb, vc, swcr, fpcr; va 130 arch/alpha/math-emu/math.c va = alpha_read_fp_reg_s(fa); va 133 arch/alpha/math-emu/math.c FP_UNPACK_SP(SA, &va); va 160 arch/alpha/math-emu/math.c va = alpha_read_fp_reg(fa); va 164 arch/alpha/math-emu/math.c FP_UNPACK_RAW_DP(DA, &va); va 195 arch/alpha/math-emu/math.c FP_UNPACK_DP(DA, &va); va 163 arch/alpha/mm/init.c - crb->map[0].va); va 166 arch/alpha/mm/init.c - crb->map[0].va); va 212 arch/alpha/mm/init.c crb->map[i].va = vaddr; va 15 arch/arm/include/asm/edac.h static inline void edac_atomic_scrub(void *va, u32 size) va 18 arch/arm/include/asm/edac.h unsigned int *virt_addr = va; va 225 arch/arm/include/asm/kvm_mmu.h void *va = kmap_atomic_pfn(pfn); va 227 arch/arm/include/asm/kvm_mmu.h kvm_flush_dcache_to_poc(va, PAGE_SIZE); va 232 arch/arm/include/asm/kvm_mmu.h kunmap_atomic(va); va 275 arch/arm/include/asm/kvm_mmu.h void *va = kmap_atomic_pfn(pfn); va 276 arch/arm/include/asm/kvm_mmu.h void *end = va + PAGE_SIZE; va 277 arch/arm/include/asm/kvm_mmu.h void *addr = va; va 290 arch/arm/include/asm/kvm_mmu.h kunmap_atomic(va); va 303 arch/arm/include/asm/kvm_mmu.h void *va = kmap_atomic(pte_page(pte)); va 305 arch/arm/include/asm/kvm_mmu.h kvm_flush_dcache_to_poc(va, PAGE_SIZE); va 307 arch/arm/include/asm/kvm_mmu.h kunmap_atomic(va); va 316 arch/arm/include/asm/kvm_mmu.h void *va = kmap_atomic_pfn(pfn); va 318 arch/arm/include/asm/kvm_mmu.h kvm_flush_dcache_to_poc(va, PAGE_SIZE); va 323 arch/arm/include/asm/kvm_mmu.h kunmap_atomic(va); va 451 arch/arm/kernel/hw_breakpoint.c unsigned long va; va 453 arch/arm/kernel/hw_breakpoint.c va = hw->address; va 456 arch/arm/kernel/hw_breakpoint.c return (va >= TASK_SIZE) && ((va + len - 1) >= TASK_SIZE); va 29 arch/arm/mach-davinci/include/mach/hardware.h #define io_v2p(va) ((va) - IO_OFFSET) va 60 arch/arm/mach-omap1/irq.c void __iomem *va; va 72 arch/arm/mach-omap1/irq.c return readl_relaxed(irq_banks[bank].va + offset); va 76 arch/arm/mach-omap1/irq.c writel_relaxed(value, irq_banks[bank].va + offset); va 82 arch/arm/mach-omap1/irq.c writel_relaxed(0x1, irq_banks[1].va + IRQ_CONTROL_REG_OFFSET); va 84 arch/arm/mach-omap1/irq.c writel_relaxed(0x1, irq_banks[0].va + IRQ_CONTROL_REG_OFFSET); va 146 arch/arm/mach-omap1/irq.c void __iomem *l1 = irq_banks[0].va; va 147 arch/arm/mach-omap1/irq.c void __iomem *l2 = irq_banks[1].va; va 223 arch/arm/mach-omap1/irq.c irq_banks[i].va = ioremap(irq_banks[i].base_reg, 0xff); va 224 arch/arm/mach-omap1/irq.c if (WARN_ON(!irq_banks[i].va)) va 267 arch/arm/mach-omap1/irq.c omap_alloc_gc(irq_banks[i].va, irq_base + i * 32, 32); va 52 arch/arm/mach-omap2/cm2xxx_3xxx.h return readl_relaxed(cm_base.va + module + idx); va 57 arch/arm/mach-omap2/cm2xxx_3xxx.h writel_relaxed(val, cm_base.va + module + idx); va 53 arch/arm/mach-omap2/cm33xx.c return readl_relaxed(cm_base.va + inst + idx); va 59 arch/arm/mach-omap2/cm33xx.c writel_relaxed(val, cm_base.va + inst + idx); va 669 arch/arm/mach-omap2/cm3xxx.c omap2_clk_legacy_provider_init(TI_CLKM_CM, cm_base.va + va 49 arch/arm/mach-omap2/cm_common.c cm_base.va = cm; va 50 arch/arm/mach-omap2/cm_common.c cm2_base.va = cm2; va 349 arch/arm/mach-omap2/cm_common.c mem->va = data->mem + data->offset; va 356 arch/arm/mach-omap2/cm_common.c (cm_base.va && cm2_base.va))) va 117 arch/arm/mach-omap2/cminst44xx.c !_cm_bases[part].va); va 118 arch/arm/mach-omap2/cminst44xx.c return readl_relaxed(_cm_bases[part].va + inst + idx); va 126 arch/arm/mach-omap2/cminst44xx.c !_cm_bases[part].va); va 127 arch/arm/mach-omap2/cminst44xx.c writel_relaxed(val, _cm_bases[part].va + inst + idx); va 54 arch/arm/mach-omap2/omap_hwmod_reset.c void __iomem *va; va 56 arch/arm/mach-omap2/omap_hwmod_reset.c va = omap_hwmod_get_mpu_rt_va(oh); va 57 arch/arm/mach-omap2/omap_hwmod_reset.c if (!va) va 60 arch/arm/mach-omap2/omap_hwmod_reset.c aess_enable_autogating(va); va 527 arch/arm/mach-omap2/prcm-common.h void __iomem *va; va 58 arch/arm/mach-omap2/prcm_mpu44xx.c prcm_mpu_base.va = prcm_mpu; va 55 arch/arm/mach-omap2/prm2xxx_3xxx.h return readl_relaxed(prm_base.va + module + idx); va 60 arch/arm/mach-omap2/prm2xxx_3xxx.h writel_relaxed(val, prm_base.va + module + idx); va 33 arch/arm/mach-omap2/prm33xx.c return readl_relaxed(prm_base.va + inst + idx); va 39 arch/arm/mach-omap2/prm33xx.c writel_relaxed(val, prm_base.va + inst + idx); va 676 arch/arm/mach-omap2/prm3xxx.c prm_base.va + OMAP3430_IVA2_MOD); va 98 arch/arm/mach-omap2/prm44xx.c return readl_relaxed(prm_base.va + inst + reg); va 104 arch/arm/mach-omap2/prm44xx.c writel_relaxed(val, prm_base.va + inst + reg); va 318 arch/arm/mach-omap2/prm_common.c irq_setup->base_irq + i * 32, prm_base.va, va 355 arch/arm/mach-omap2/prm_common.c prm_base.va = prm; va 761 arch/arm/mach-omap2/prm_common.c prm_base.va = data->mem + data->offset; va 62 arch/arm/mach-omap2/prminst44xx.c !_prm_bases[part].va); va 63 arch/arm/mach-omap2/prminst44xx.c return readl_relaxed(_prm_bases[part].va + inst + idx); va 71 arch/arm/mach-omap2/prminst44xx.c !_prm_bases[part].va); va 72 arch/arm/mach-omap2/prminst44xx.c writel_relaxed(val, _prm_bases[part].va + inst + idx); va 58 arch/arm/mm/cache-xsc3l2.c static inline void l2_unmap_va(unsigned long va) va 61 arch/arm/mm/cache-xsc3l2.c if (va != -1) va 62 arch/arm/mm/cache-xsc3l2.c kunmap_atomic((void *)va); va 69 arch/arm/mm/cache-xsc3l2.c unsigned long va = prev_va & PAGE_MASK; va 78 arch/arm/mm/cache-xsc3l2.c va = (unsigned long)kmap_atomic_pfn(pa >> PAGE_SHIFT); va 80 arch/arm/mm/cache-xsc3l2.c return va + (pa_offset >> (32 - PAGE_SHIFT)); va 54 arch/arm/mm/flush.c unsigned long va = FLUSH_ALIAS_START + (CACHE_COLOUR(vaddr) << PAGE_SHIFT); va 58 arch/arm/mm/flush.c set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); va 59 arch/arm/mm/flush.c to = va + offset; va 26 arch/arm/mm/mm.h static inline void set_top_pte(unsigned long va, pte_t pte) va 28 arch/arm/mm/mm.h pte_t *ptep = pte_offset_kernel(top_pmd, va); va 30 arch/arm/mm/mm.h local_flush_tlb_kernel_page(va); va 33 arch/arm/mm/mm.h static inline pte_t get_top_pte(unsigned long va) va 35 arch/arm/mm/mm.h pte_t *ptep = pte_offset_kernel(top_pmd, va); va 1613 arch/arm/mm/mmu.c unsigned long va = fix_to_virt(__end_of_permanent_fixed_addresses - 1); va 1616 arch/arm/mm/mmu.c pmd_clear(fixmap_pmd(va)); va 1617 arch/arm/mm/mmu.c local_flush_tlb_kernel_page(va); va 36 arch/arm/plat-samsung/pm-debug.c va_list va; va 39 arch/arm/plat-samsung/pm-debug.c va_start(va, fmt); va 40 arch/arm/plat-samsung/pm-debug.c vsnprintf(buff, sizeof(buff), fmt, va); va 41 arch/arm/plat-samsung/pm-debug.c va_end(va); va 33 arch/arm64/include/asm/kasan.h #define _KASAN_SHADOW_START(va) (KASAN_SHADOW_END - (1UL << ((va) - KASAN_SHADOW_SCALE_SHIFT))) va 312 arch/arm64/include/asm/kvm_mmu.h void *va = page_address(pfn_to_page(pfn)); va 323 arch/arm64/include/asm/kvm_mmu.h kvm_flush_dcache_to_poc(va, size); va 334 arch/arm64/include/asm/kvm_mmu.h void *va = page_address(pfn_to_page(pfn)); va 336 arch/arm64/include/asm/kvm_mmu.h invalidate_icache_range((unsigned long)va, va 337 arch/arm64/include/asm/kvm_mmu.h (unsigned long)va + size); va 47 arch/arm64/include/asm/memory.h #define _PAGE_OFFSET(va) (-(UL(1) << (va))) va 67 arch/arm64/include/asm/memory.h #define _PAGE_END(va) (-(UL(1) << ((va) - 1))) va 338 arch/arm64/kernel/hw_breakpoint.c unsigned long va; va 340 arch/arm64/kernel/hw_breakpoint.c va = hw->address; va 343 arch/arm64/kernel/hw_breakpoint.c return (va >= TASK_SIZE) && ((va + len - 1) >= TASK_SIZE); va 24 arch/ia64/include/asm/tlbflush.h extern int ia64_itr_entry(u64 target_mask, u64 va, u64 pte, u64 log_size); va 30 arch/ia64/kernel/brl_emu.c #define unimplemented_virtual_address(va) ( \ va 31 arch/ia64/kernel/brl_emu.c ((va) & local_cpu_data->unimpl_va_mask) != 0 && \ va 32 arch/ia64/kernel/brl_emu.c ((va) & local_cpu_data->unimpl_va_mask) != local_cpu_data->unimpl_va_mask \ va 970 arch/ia64/kernel/mca.c ia64_va va; va 998 arch/ia64/kernel/mca.c va.l = r12; va 999 arch/ia64/kernel/mca.c if (va.f.reg == 0) { va 1000 arch/ia64/kernel/mca.c va.f.reg = 7; va 1001 arch/ia64/kernel/mca.c r12 = va.l; va 1003 arch/ia64/kernel/mca.c va.l = r13; va 1004 arch/ia64/kernel/mca.c if (va.f.reg == 0) { va 1005 arch/ia64/kernel/mca.c va.f.reg = 7; va 1006 arch/ia64/kernel/mca.c r13 = va.l; va 1010 arch/ia64/kernel/mca.c va.l = ar_bspstore; va 1011 arch/ia64/kernel/mca.c if (va.f.reg == 0) { va 1012 arch/ia64/kernel/mca.c va.f.reg = 7; va 1013 arch/ia64/kernel/mca.c ar_bspstore = va.l; va 1015 arch/ia64/kernel/mca.c va.l = ar_bsp; va 1016 arch/ia64/kernel/mca.c if (va.f.reg == 0) { va 1017 arch/ia64/kernel/mca.c va.f.reg = 7; va 1018 arch/ia64/kernel/mca.c ar_bsp = va.l; va 1062 arch/ia64/kernel/mca.c va.p = old_bspstore; va 1063 arch/ia64/kernel/mca.c if (va.f.reg < 5) { va 422 arch/ia64/mm/tlb.c static int is_tr_overlap(struct ia64_tr_entry *p, u64 va, u64 log_size) va 426 arch/ia64/mm/tlb.c u64 va_rr = ia64_get_rr(va); va 428 arch/ia64/mm/tlb.c u64 va_end = va + (1<<log_size) - 1; va 435 arch/ia64/mm/tlb.c if (va > tr_end || p->ifa > va_end) va 455 arch/ia64/mm/tlb.c int ia64_itr_entry(u64 target_mask, u64 va, u64 pte, u64 log_size) va 476 arch/ia64/mm/tlb.c if (is_tr_overlap(p, va, log_size)) { va 488 arch/ia64/mm/tlb.c if (is_tr_overlap(p, va, log_size)) { va 526 arch/ia64/mm/tlb.c ia64_itr(0x1, i, va, pte, log_size); va 529 arch/ia64/mm/tlb.c p->ifa = va; va 532 arch/ia64/mm/tlb.c p->rr = ia64_get_rr(va); va 535 arch/ia64/mm/tlb.c ia64_itr(0x2, i, va, pte, log_size); va 538 arch/ia64/mm/tlb.c p->ifa = va; va 541 arch/ia64/mm/tlb.c p->rr = ia64_get_rr(va); va 41 arch/m68k/include/asm/openprom.h char * (*v2_dumb_mem_alloc)(char *va, unsigned sz); va 42 arch/m68k/include/asm/openprom.h void (*v2_dumb_mem_free)(char *va, unsigned sz); va 177 arch/m68k/include/asm/openprom.h void (*pv_setctxt)(int ctxt, char *va, int pmeg); va 245 arch/m68k/include/asm/openprom.h void (*pv_setctxt)(int ctxt, char *va, int pmeg); va 57 arch/microblaze/include/asm/mmu.h extern void _tlbie(unsigned long va); /* invalidate a TLB entry */ va 29 arch/microblaze/include/asm/mmu_context_mm.h # define CTX_TO_VSID(ctx, va) (((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \ va 531 arch/microblaze/include/asm/pgtable.h int map_page(unsigned long va, phys_addr_t pa, int flags); va 135 arch/microblaze/mm/pgtable.c int map_page(unsigned long va, phys_addr_t pa, int flags) va 141 arch/microblaze/mm/pgtable.c pd = pmd_offset(pgd_offset_k(va), va); va 143 arch/microblaze/mm/pgtable.c pg = pte_alloc_kernel(pd, va); /* from powerpc - pgtable.c */ va 148 arch/microblaze/mm/pgtable.c set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, va 151 arch/microblaze/mm/pgtable.c _tlbie(va); va 9 arch/mips/include/asm/edac.h static inline void edac_atomic_scrub(void *va, u32 size) va 11 arch/mips/include/asm/edac.h unsigned long *virt_addr = va; va 286 arch/mips/include/asm/kvm_host.h #define TLB_LO_IDX(x, va) (((va) >> PAGE_SHIFT) & 1) va 287 arch/mips/include/asm/kvm_host.h #define TLB_IS_VALID(x, va) ((x).tlb_lo[TLB_LO_IDX(x, va)] & ENTRYLO_V) va 288 arch/mips/include/asm/kvm_host.h #define TLB_IS_DIRTY(x, va) ((x).tlb_lo[TLB_LO_IDX(x, va)] & ENTRYLO_D) va 1806 arch/mips/kvm/emulate.c unsigned long va; va 1827 arch/mips/kvm/emulate.c va = arch->gprs[base] + offset; va 1889 arch/mips/kvm/emulate.c curr_pc, va, run, vcpu, cause); va 1902 arch/mips/kvm/emulate.c curr_pc, va, run, vcpu, cause); va 1906 arch/mips/kvm/emulate.c curr_pc, va, run, vcpu, cause); va 2765 arch/mips/kvm/emulate.c unsigned long va = vcpu->arch.host_cp0_badvaddr; va 2778 arch/mips/kvm/emulate.c (va & VPN2_MASK) | va 2798 arch/mips/kvm/emulate.c if (!TLB_IS_VALID(*tlb, va)) { va 2817 arch/mips/kvm/emulate.c if (kvm_mips_handle_mapped_seg_tlb_fault(vcpu, tlb, va, va 2820 arch/mips/kvm/emulate.c __func__, va, index, vcpu, va 166 arch/mips/kvm/tlb.c int kvm_mips_host_tlb_inv(struct kvm_vcpu *vcpu, unsigned long va, va 181 arch/mips/kvm/tlb.c idx_user = _kvm_mips_host_tlb_inv((va & VPN2_MASK) | va 184 arch/mips/kvm/tlb.c idx_kernel = _kvm_mips_host_tlb_inv((va & VPN2_MASK) | va 201 arch/mips/kvm/tlb.c __func__, (va & VPN2_MASK) | va 205 arch/mips/kvm/tlb.c __func__, (va & VPN2_MASK) | va 251 arch/mips/kvm/tlb.c int kvm_vz_host_tlb_inv(struct kvm_vcpu *vcpu, unsigned long va) va 264 arch/mips/kvm/tlb.c idx = _kvm_mips_host_tlb_inv((va & VPN2_MASK) | va 283 arch/mips/kvm/tlb.c __func__, (va & VPN2_MASK) | va 1072 arch/mips/kvm/vz.c unsigned long va, curr_pc; va 1092 arch/mips/kvm/vz.c va = arch->gprs[base] + offset; va 1103 arch/mips/kvm/vz.c flush_icache_line_indexed(va); va 1106 arch/mips/kvm/vz.c flush_dcache_line_indexed(va); va 1475 arch/mips/math-emu/cp1emu.c u32 __user *va; va 1480 arch/mips/math-emu/cp1emu.c va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + va 1484 arch/mips/math-emu/cp1emu.c if (!access_ok(va, sizeof(u32))) { va 1486 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1489 arch/mips/math-emu/cp1emu.c if (__get_user(val, va)) { va 1491 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1498 arch/mips/math-emu/cp1emu.c va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + va 1504 arch/mips/math-emu/cp1emu.c if (!access_ok(va, sizeof(u32))) { va 1506 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1509 arch/mips/math-emu/cp1emu.c if (put_user(val, va)) { va 1511 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1572 arch/mips/math-emu/cp1emu.c u64 __user *va; va 1577 arch/mips/math-emu/cp1emu.c va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + va 1581 arch/mips/math-emu/cp1emu.c if (!access_ok(va, sizeof(u64))) { va 1583 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1586 arch/mips/math-emu/cp1emu.c if (__get_user(val, va)) { va 1588 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1595 arch/mips/math-emu/cp1emu.c va = (void __user *) (xcp->regs[MIPSInst_FR(ir)] + va 1600 arch/mips/math-emu/cp1emu.c if (!access_ok(va, sizeof(u64))) { va 1602 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 1605 arch/mips/math-emu/cp1emu.c if (__put_user(val, va)) { va 1607 arch/mips/math-emu/cp1emu.c *fault_addr = va; va 324 arch/mips/mm/cerr-sb1.c unsigned long long taglo, va; va 357 arch/mips/mm/cerr-sb1.c va = (taglo & 0xC0000FFFFFFFE000ULL) | addr; va 359 arch/mips/mm/cerr-sb1.c va |= 0x3FFFF00000000000ULL; va 377 arch/mips/mm/cerr-sb1.c way, va, valid, taghi, taglo); va 90 arch/openrisc/kernel/dma.c unsigned long va; va 100 arch/openrisc/kernel/dma.c va = (unsigned long)page; va 106 arch/openrisc/kernel/dma.c if (walk_page_range(&init_mm, va, va + size, &set_nocache_walk_ops, va 112 arch/openrisc/kernel/dma.c return (void *)va; va 119 arch/openrisc/kernel/dma.c unsigned long va = (unsigned long)vaddr; va 122 arch/openrisc/kernel/dma.c WARN_ON(walk_page_range(&init_mm, va, va + size, va 5 arch/parisc/include/asm/special_insns.h #define lpa(va) ({ \ va 11 arch/parisc/include/asm/special_insns.h : "r" (va) \ va 17 arch/parisc/include/asm/special_insns.h #define lpa_user(va) ({ \ va 23 arch/parisc/include/asm/special_insns.h : "r" (va) \ va 140 arch/powerpc/include/asm/book3s/32/pgtable.h int map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot); va 231 arch/powerpc/include/asm/book3s/32/pgtable.h extern int flush_hash_pages(unsigned context, unsigned long va, va 235 arch/powerpc/include/asm/book3s/32/pgtable.h extern void add_hash_page(unsigned context, unsigned long va, va 338 arch/powerpc/include/asm/book3s/64/pgtable.h #define pte_iterate_hashed_subpages(rpte, psize, va, index, shift) \ va 19 arch/powerpc/include/asm/edac.h static __inline__ void edac_atomic_scrub(void *va, u32 size) va 21 arch/powerpc/include/asm/edac.h unsigned int *virt_addr = va; va 776 arch/powerpc/include/asm/kvm_ppc.h unsigned long va); va 67 arch/powerpc/include/asm/nohash/32/pgtable.h int map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot); va 264 arch/powerpc/kernel/vecemu.c unsigned int va, vb, vc, vd; va 272 arch/powerpc/kernel/vecemu.c va = (instr >> 16) & 0x1f; va 281 arch/powerpc/kernel/vecemu.c vaddfp(&vrs[vd], &vrs[va], &vrs[vb]); va 284 arch/powerpc/kernel/vecemu.c vsubfp(&vrs[vd], &vrs[va], &vrs[vb]); va 324 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = ctuxs(vrs[vb].u[i], va, va 329 arch/powerpc/kernel/vecemu.c vrs[vd].u[i] = ctsxs(vrs[vb].u[i], va, va 337 arch/powerpc/kernel/vecemu.c vmaddfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); va 340 arch/powerpc/kernel/vecemu.c vnmsubfp(&vrs[vd], &vrs[va], &vrs[vb], &vrs[vc]); va 544 arch/powerpc/kvm/book3s_64_mmu.c static void kvmppc_mmu_book3s_64_tlbie(struct kvm_vcpu *vcpu, ulong va, va 551 arch/powerpc/kvm/book3s_64_mmu.c dprintk("KVM MMU: tlbie(0x%lx)\n", va); va 561 arch/powerpc/kvm/book3s_64_mmu.c if (va & 1) { /* L bit */ va 562 arch/powerpc/kvm/book3s_64_mmu.c if ((va & 0xf000) == 0x1000) va 574 arch/powerpc/kvm/book3s_64_mmu.c kvmppc_mmu_pte_vflush(v, va >> 12, mask); va 1201 arch/powerpc/kvm/book3s_64_mmu_hv.c void kvmppc_unpin_guest_page(struct kvm *kvm, void *va, unsigned long gpa, va 1204 arch/powerpc/kvm/book3s_64_mmu_hv.c struct page *page = virt_to_page(va); va 496 arch/powerpc/kvm/book3s_hv.c void *va; va 514 arch/powerpc/kvm/book3s_hv.c va = kvmppc_pin_guest_page(kvm, vpa, &nb); va 515 arch/powerpc/kvm/book3s_hv.c if (va == NULL) va 518 arch/powerpc/kvm/book3s_hv.c len = be16_to_cpu(((struct reg_vpa *)va)->length.hword); va 520 arch/powerpc/kvm/book3s_hv.c len = be32_to_cpu(((struct reg_vpa *)va)->length.word); va 521 arch/powerpc/kvm/book3s_hv.c kvmppc_unpin_guest_page(kvm, va, vpa, false); va 610 arch/powerpc/kvm/book3s_hv.c void *va; va 625 arch/powerpc/kvm/book3s_hv.c va = NULL; va 628 arch/powerpc/kvm/book3s_hv.c va = kvmppc_pin_guest_page(kvm, gpa, &nb); va 633 arch/powerpc/kvm/book3s_hv.c if (va) va 634 arch/powerpc/kvm/book3s_hv.c kvmppc_unpin_guest_page(kvm, va, gpa, false); va 638 arch/powerpc/kvm/book3s_hv.c if (va && nb < vpap->len) { va 644 arch/powerpc/kvm/book3s_hv.c kvmppc_unpin_guest_page(kvm, va, gpa, false); va 645 arch/powerpc/kvm/book3s_hv.c va = NULL; va 651 arch/powerpc/kvm/book3s_hv.c vpap->pinned_addr = va; va 653 arch/powerpc/kvm/book3s_hv.c if (va) va 654 arch/powerpc/kvm/book3s_hv.c vpap->pinned_end = va + vpap->len; va 687 arch/powerpc/kvm/book3s_hv_rm_mmu.c unsigned long va) va 32 arch/powerpc/kvm/trace_pr.h TP_PROTO(int rflags, ulong hpteg, ulong va, kvm_pfn_t hpaddr, va 34 arch/powerpc/kvm/trace_pr.h TP_ARGS(rflags, hpteg, va, hpaddr, orig_pte), va 41 arch/powerpc/kvm/trace_pr.h __field( unsigned long, va ) va 51 arch/powerpc/kvm/trace_pr.h __entry->va = va; va 58 arch/powerpc/kvm/trace_pr.h __entry->hpteg, __entry->va, __entry->vpage, __entry->hpaddr) va 182 arch/powerpc/math-emu/math_efp.c union dw_union vc, va, vb; va 201 arch/powerpc/math-emu/math_efp.c va.wp[0] = current->thread.evr[fa]; va 202 arch/powerpc/math-emu/math_efp.c va.wp[1] = regs->gpr[fa]; va 210 arch/powerpc/math-emu/math_efp.c pr_debug("va: %08x %08x\n", va.wp[0], va.wp[1]); va 220 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA, va.wp + 1); va 225 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA, va.wp + 1); va 234 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] & ~SIGN_BIT_S; va 238 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] | SIGN_BIT_S; va 242 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] ^ SIGN_BIT_S; va 348 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DA, va.dp); va 353 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DA, va.dp); va 364 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] & ~SIGN_BIT_D; va 368 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] | SIGN_BIT_D; va 372 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] ^ SIGN_BIT_D; va 493 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA0, va.wp); va 494 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA1, va.wp + 1); va 500 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA0, va.wp); va 501 arch/powerpc/math-emu/math_efp.c FP_UNPACK_SP(SA1, va.wp + 1); va 516 arch/powerpc/math-emu/math_efp.c vc.wp[0] = va.wp[0] & ~SIGN_BIT_S; va 517 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] & ~SIGN_BIT_S; va 521 arch/powerpc/math-emu/math_efp.c vc.wp[0] = va.wp[0] | SIGN_BIT_S; va 522 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] | SIGN_BIT_S; va 526 arch/powerpc/math-emu/math_efp.c vc.wp[0] = va.wp[0] ^ SIGN_BIT_S; va 527 arch/powerpc/math-emu/math_efp.c vc.wp[1] = va.wp[1] ^ SIGN_BIT_S; va 688 arch/powerpc/math-emu/math_efp.c pr_debug("va: %08x %08x\n", va.wp[0], va.wp[1]); va 50 arch/powerpc/mm/book3s32/mmu.c phys_addr_t v_block_mapped(unsigned long va) va 54 arch/powerpc/mm/book3s32/mmu.c if (va >= bat_addrs[b].start && va < bat_addrs[b].limit) va 55 arch/powerpc/mm/book3s32/mmu.c return bat_addrs[b].phys + (va - bat_addrs[b].start); va 146 arch/powerpc/mm/book3s64/hash_native.c unsigned long va; va 157 arch/powerpc/mm/book3s64/hash_native.c va = vpn << VPN_SHIFT; va 164 arch/powerpc/mm/book3s64/hash_native.c va &= ~(0xffffULL << 48); va 169 arch/powerpc/mm/book3s64/hash_native.c va &= ~((1ul << (64 - 52)) - 1); va 170 arch/powerpc/mm/book3s64/hash_native.c va |= ssize << 8; va 172 arch/powerpc/mm/book3s64/hash_native.c va |= sllp << 5; va 174 arch/powerpc/mm/book3s64/hash_native.c : : "r" (va), "r"(0), "i" (CPU_FTR_ARCH_206) va 180 arch/powerpc/mm/book3s64/hash_native.c va &= ~((1ul << mmu_psize_defs[apsize].shift) - 1); va 181 arch/powerpc/mm/book3s64/hash_native.c va |= penc << 12; va 182 arch/powerpc/mm/book3s64/hash_native.c va |= ssize << 8; va 190 arch/powerpc/mm/book3s64/hash_native.c va |= (vpn & 0xfe); /* AVAL */ va 191 arch/powerpc/mm/book3s64/hash_native.c va |= 1; /* L */ va 193 arch/powerpc/mm/book3s64/hash_native.c : : "r" (va), "r"(0), "i" (CPU_FTR_ARCH_206) va 197 arch/powerpc/mm/book3s64/hash_native.c return va; va 242 arch/powerpc/mm/book3s64/hash_native.c unsigned long va; va 247 arch/powerpc/mm/book3s64/hash_native.c va = vpn << VPN_SHIFT; va 254 arch/powerpc/mm/book3s64/hash_native.c va &= ~(0xffffULL << 48); va 259 arch/powerpc/mm/book3s64/hash_native.c va &= ~((1ul << (64 - 52)) - 1); va 260 arch/powerpc/mm/book3s64/hash_native.c va |= ssize << 8; va 262 arch/powerpc/mm/book3s64/hash_native.c va |= sllp << 5; va 264 arch/powerpc/mm/book3s64/hash_native.c : : "r" (va), "i" (CPU_FTR_ARCH_206) va 270 arch/powerpc/mm/book3s64/hash_native.c va &= ~((1ul << mmu_psize_defs[apsize].shift) - 1); va 271 arch/powerpc/mm/book3s64/hash_native.c va |= penc << 12; va 272 arch/powerpc/mm/book3s64/hash_native.c va |= ssize << 8; va 280 arch/powerpc/mm/book3s64/hash_native.c va |= (vpn & 0xfe); va 281 arch/powerpc/mm/book3s64/hash_native.c va |= 1; /* L */ va 283 arch/powerpc/mm/book3s64/hash_native.c : : "r" (va), "i" (CPU_FTR_ARCH_206) va 287 arch/powerpc/mm/book3s64/hash_native.c trace_tlbie(0, 1, va, 0, 0, 0, 0); va 345 arch/powerpc/mm/book3s64/iommu_api.c u64 *va; va 358 arch/powerpc/mm/book3s64/iommu_api.c va = &mem->hpas[entry]; va 359 arch/powerpc/mm/book3s64/iommu_api.c *hpa = (*va & MM_IOMMU_TABLE_GROUP_PAGE_MASK) | (ua & ~PAGE_MASK); va 395 arch/powerpc/mm/book3s64/iommu_api.c void *va; va 406 arch/powerpc/mm/book3s64/iommu_api.c va = &mem->hpas[entry]; va 408 arch/powerpc/mm/book3s64/iommu_api.c pa = (void *) vmalloc_to_phys(va); va 151 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbiel_va(unsigned long va, unsigned long pid, va 156 arch/powerpc/mm/book3s64/radix_tlb.c rb = va & ~(PPC_BITMASK(52, 63)); va 167 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_va(unsigned long va, unsigned long pid, va 172 arch/powerpc/mm/book3s64/radix_tlb.c rb = va & ~(PPC_BITMASK(52, 63)); va 183 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void __tlbie_lpid_va(unsigned long va, unsigned long lpid, va 188 arch/powerpc/mm/book3s64/radix_tlb.c rb = va & ~(PPC_BITMASK(52, 63)); va 200 arch/powerpc/mm/book3s64/radix_tlb.c static inline void fixup_tlbie_va(unsigned long va, unsigned long pid, va 205 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_va(va, 0, ap, RIC_FLUSH_TLB); va 210 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_va(va, pid, ap, RIC_FLUSH_TLB); va 214 arch/powerpc/mm/book3s64/radix_tlb.c static inline void fixup_tlbie_va_range(unsigned long va, unsigned long pid, va 224 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_va(va, pid, ap, RIC_FLUSH_TLB); va 234 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long va = ((1UL << 52) - 1); va 243 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_va(va, pid, mmu_get_ap(MMU_PAGE_64K), RIC_FLUSH_TLB); va 248 arch/powerpc/mm/book3s64/radix_tlb.c static inline void fixup_tlbie_lpid_va(unsigned long va, unsigned long lpid, va 253 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_lpid_va(va, 0, ap, RIC_FLUSH_TLB); va 258 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_lpid_va(va, lpid, ap, RIC_FLUSH_TLB); va 268 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long va = ((1UL << 52) - 1); va 277 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_lpid_va(va, lpid, mmu_get_ap(MMU_PAGE_64K), RIC_FLUSH_TLB); va 426 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbiel_va(unsigned long va, unsigned long pid, va 432 arch/powerpc/mm/book3s64/radix_tlb.c __tlbiel_va(va, pid, ap, ric); va 460 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbie_va(unsigned long va, unsigned long pid, va 466 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_va(va, pid, ap, ric); va 467 arch/powerpc/mm/book3s64/radix_tlb.c fixup_tlbie_va(va, pid, ap); va 473 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long va; va 483 arch/powerpc/mm/book3s64/radix_tlb.c _tlbiel_va(t->va, t->pid, t->psize, RIC_FLUSH_TLB); va 485 arch/powerpc/mm/book3s64/radix_tlb.c _tlbiel_va(t->va, t->pid, t->psize, RIC_FLUSH_PWC); va 487 arch/powerpc/mm/book3s64/radix_tlb.c _tlbiel_va(t->va, t->pid, t->psize, RIC_FLUSH_ALL); va 491 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long va, unsigned long pid, va 495 arch/powerpc/mm/book3s64/radix_tlb.c struct tlbiel_va t = { .va = va, .pid = pid, .psize = psize, .ric = ric }; va 498 arch/powerpc/mm/book3s64/radix_tlb.c _tlbie_va(va, pid, psize, RIC_FLUSH_TLB); va 518 arch/powerpc/mm/book3s64/radix_tlb.c static __always_inline void _tlbie_lpid_va(unsigned long va, unsigned long lpid, va 524 arch/powerpc/mm/book3s64/radix_tlb.c __tlbie_lpid_va(va, lpid, ap, ric); va 525 arch/powerpc/mm/book3s64/radix_tlb.c fixup_tlbie_lpid_va(va, lpid, ap); va 82 arch/powerpc/mm/ioremap.c unsigned long va; va 89 arch/powerpc/mm/ioremap.c va = (unsigned long)area->addr; va 91 arch/powerpc/mm/ioremap.c ret = ioremap_page_range(va, va + size, pa, prot); va 95 arch/powerpc/mm/ioremap.c unmap_kernel_range(va, size); va 13 arch/powerpc/mm/ioremap_64.c unsigned long va = (unsigned long)ea; va 29 arch/powerpc/mm/ioremap_64.c ret = ioremap_page_range(va, va + size, pa, prot); va 31 arch/powerpc/mm/ioremap_64.c unmap_kernel_range(va, size); va 33 arch/powerpc/mm/ioremap_64.c ret = early_ioremap_range(va, pa, size, prot); va 25 arch/powerpc/mm/kasan/kasan_init_32.c unsigned long va = (unsigned long)kasan_early_shadow_page; va 30 arch/powerpc/mm/kasan/kasan_init_32.c __set_pte_at(&init_mm, va, ptep, pfn_pte(PHYS_PFN(pa), prot), 0); va 98 arch/powerpc/mm/kasan/kasan_init_32.c void *va = block ? block + k_cur - k_start : kasan_get_one_page(); va 99 arch/powerpc/mm/kasan/kasan_init_32.c pte_t pte = pfn_pte(PHYS_PFN(__pa(va)), PAGE_KERNEL); va 101 arch/powerpc/mm/kasan/kasan_init_32.c if (!va) va 159 arch/powerpc/mm/mmu_decl.h phys_addr_t v_block_mapped(unsigned long va); va 162 arch/powerpc/mm/mmu_decl.h static inline phys_addr_t v_block_mapped(unsigned long va) { return 0; } va 27 arch/powerpc/mm/nohash/8xx.c phys_addr_t v_block_mapped(unsigned long va) va 33 arch/powerpc/mm/nohash/8xx.c if (va >= VIRT_IMMR_BASE && va < VIRT_IMMR_BASE + IMMR_SIZE) va 34 arch/powerpc/mm/nohash/8xx.c return p + va - VIRT_IMMR_BASE; va 35 arch/powerpc/mm/nohash/8xx.c if (va >= PAGE_OFFSET && va < PAGE_OFFSET + block_mapped_ram) va 36 arch/powerpc/mm/nohash/8xx.c return __pa(va); va 74 arch/powerpc/mm/nohash/fsl_booke.c phys_addr_t v_block_mapped(unsigned long va) va 78 arch/powerpc/mm/nohash/fsl_booke.c if (va >= tlbcam_addrs[b].start && va < tlbcam_addrs[b].limit) va 79 arch/powerpc/mm/nohash/fsl_booke.c return tlbcam_addrs[b].phys + (va - tlbcam_addrs[b].start); va 291 arch/powerpc/mm/pgtable.c unsigned long vmalloc_to_phys(void *va) va 293 arch/powerpc/mm/pgtable.c unsigned long pfn = vmalloc_to_pfn(va); va 296 arch/powerpc/mm/pgtable.c return __pa(pfn_to_kaddr(pfn)) + offset_in_page(va); va 48 arch/powerpc/mm/pgtable_32.c static pte_t __init *early_pte_alloc_kernel(pmd_t *pmdp, unsigned long va) va 55 arch/powerpc/mm/pgtable_32.c return pte_offset_kernel(pmdp, va); va 59 arch/powerpc/mm/pgtable_32.c int __ref map_kernel_page(unsigned long va, phys_addr_t pa, pgprot_t prot) va 66 arch/powerpc/mm/pgtable_32.c pd = pmd_offset(pud_offset(pgd_offset_k(va), va), va); va 69 arch/powerpc/mm/pgtable_32.c pg = pte_alloc_kernel(pd, va); va 71 arch/powerpc/mm/pgtable_32.c pg = early_pte_alloc_kernel(pd, va); va 78 arch/powerpc/mm/pgtable_32.c set_pte_at(&init_mm, va, pg, pfn_pte(pa >> PAGE_SHIFT, prot)); va 65 arch/powerpc/platforms/powernv/pci-ioda.c vaf.va = &args; va 188 arch/riscv/mm/init.c static phys_addr_t __init alloc_pte(uintptr_t va) va 200 arch/riscv/mm/init.c uintptr_t va, phys_addr_t pa, va 203 arch/riscv/mm/init.c uintptr_t pte_index = pte_index(va); va 233 arch/riscv/mm/init.c static phys_addr_t __init alloc_pmd(uintptr_t va) va 240 arch/riscv/mm/init.c pmd_num = (va - PAGE_OFFSET) >> PGDIR_SHIFT; va 246 arch/riscv/mm/init.c uintptr_t va, phys_addr_t pa, va 251 arch/riscv/mm/init.c uintptr_t pmd_index = pmd_index(va); va 260 arch/riscv/mm/init.c pte_phys = alloc_pte(va); va 269 arch/riscv/mm/init.c create_pte_mapping(ptep, va, pa, sz, prot); va 290 arch/riscv/mm/init.c uintptr_t va, phys_addr_t pa, va 295 arch/riscv/mm/init.c uintptr_t pgd_index = pgd_index(va); va 304 arch/riscv/mm/init.c next_phys = alloc_pgd_next(va); va 313 arch/riscv/mm/init.c create_pgd_next_mapping(nextp, va, pa, sz, prot); va 348 arch/riscv/mm/init.c uintptr_t va, end_va; va 392 arch/riscv/mm/init.c for (va = PAGE_OFFSET; va < end_va; va += map_size) va 393 arch/riscv/mm/init.c create_pgd_mapping(early_pg_dir, va, va 394 arch/riscv/mm/init.c load_pa + (va - PAGE_OFFSET), va 399 arch/riscv/mm/init.c for (va = __fix_to_virt(FIX_FDT); va < end_va; va += PAGE_SIZE) va 400 arch/riscv/mm/init.c create_pte_mapping(fixmap_pte, va, va 401 arch/riscv/mm/init.c dtb_pa + (va - __fix_to_virt(FIX_FDT)), va 412 arch/riscv/mm/init.c uintptr_t va, map_size; va 439 arch/riscv/mm/init.c va = (uintptr_t)__va(pa); va 440 arch/riscv/mm/init.c create_pgd_mapping(swapper_pg_dir, va, pa, va 127 arch/sh/kernel/hw_breakpoint.c unsigned long va; va 129 arch/sh/kernel/hw_breakpoint.c va = hw->address; va 132 arch/sh/kernel/hw_breakpoint.c return (va >= TASK_SIZE) && ((va + len - 1) >= TASK_SIZE); va 33 arch/sparc/include/asm/openprom.h char * (*v2_dumb_mem_alloc)(char *va, unsigned int sz); va 34 arch/sparc/include/asm/openprom.h void (*v2_dumb_mem_free)(char *va, unsigned int sz); va 142 arch/sparc/include/asm/openprom.h void (*pv_setctxt)(int ctxt, char *va, int pmeg); va 185 arch/sparc/kernel/ioport.c void __iomem *va; /* P3 diag */ va 208 arch/sparc/kernel/ioport.c va = _sparc_ioremap(res, busno, phys, size); va 210 arch/sparc/kernel/ioport.c return va; va 322 arch/sparc/kernel/ioport.c void *va; va 328 arch/sparc/kernel/ioport.c va = (void *) __get_free_pages(gfp | __GFP_ZERO, get_order(size)); va 329 arch/sparc/kernel/ioport.c if (!va) { va 338 arch/sparc/kernel/ioport.c srmmu_mapiorange(0, virt_to_phys(va), addr, size); va 340 arch/sparc/kernel/ioport.c *dma_handle = virt_to_phys(va); va 344 arch/sparc/kernel/ioport.c free_pages((unsigned long)va, get_order(size)); va 218 arch/sparc/kernel/module.c unsigned long va; va 221 arch/sparc/kernel/module.c for (va = 0; va < (PAGE_SIZE << 1); va += 32) va 222 arch/sparc/kernel/module.c spitfire_put_icache_tag(va, 0x0); va 417 arch/sparc/kernel/traps_64.c unsigned long va; va 423 arch/sparc/kernel/traps_64.c for (va = 0; va < (PAGE_SIZE << 1); va += 32) { va 424 arch/sparc/kernel/traps_64.c spitfire_put_icache_tag(va, 0x0); va 425 arch/sparc/kernel/traps_64.c spitfire_put_dcache_tag(va, 0x0); va 748 arch/sparc/mm/init_64.c unsigned long va; va 753 arch/sparc/mm/init_64.c for (va = start; va < end; va += 32) { va 754 arch/sparc/mm/init_64.c spitfire_put_dcache_tag(va & 0x3fe0, 0x0); va 761 arch/sparc/mm/init_64.c for (va = start; va < end; va += 32) va 765 arch/sparc/mm/init_64.c : "r" (va), va 218 arch/sparc/mm/io-unit.c unsigned long va, addr, page, end, ret; va 227 arch/sparc/mm/io-unit.c va = __get_free_pages(gfp | __GFP_ZERO, get_order(len)); va 228 arch/sparc/mm/io-unit.c if (!va) va 239 arch/sparc/mm/io-unit.c page = va; va 258 arch/sparc/mm/io-unit.c va += PAGE_SIZE; va 266 arch/sparc/mm/io-unit.c free_pages(va, get_order(len)); va 311 arch/sparc/mm/iommu.c unsigned long va, addr, page, end, ret; va 321 arch/sparc/mm/iommu.c va = __get_free_pages(gfp | __GFP_ZERO, get_order(len)); va 322 arch/sparc/mm/iommu.c if (va == 0) va 329 arch/sparc/mm/iommu.c BUG_ON((va & ~PAGE_MASK) != 0); va 343 arch/sparc/mm/iommu.c page = va; va 365 arch/sparc/mm/iommu.c va += PAGE_SIZE; va 387 arch/sparc/mm/iommu.c free_pages(va, get_order(len)); va 7 arch/x86/include/asm/edac.h static inline void edac_atomic_scrub(void *va, u32 size) va 9 arch/x86/include/asm/edac.h u32 i, *virt_addr = va; va 344 arch/x86/include/asm/xen/hypercall.h HYPERVISOR_update_va_mapping(unsigned long va, pte_t new_val, va 348 arch/x86/include/asm/xen/hypercall.h return _hypercall3(int, update_va_mapping, va, va 351 arch/x86/include/asm/xen/hypercall.h return _hypercall4(int, update_va_mapping, va, va 459 arch/x86/include/asm/xen/hypercall.h MULTI_update_va_mapping(struct multicall_entry *mcl, unsigned long va, va 463 arch/x86/include/asm/xen/hypercall.h mcl->args[0] = va; va 216 arch/x86/kernel/hw_breakpoint.c unsigned long va; va 219 arch/x86/kernel/hw_breakpoint.c va = hw->address; va 227 arch/x86/kernel/hw_breakpoint.c return (va >= TASK_SIZE_MAX) || ((va + len - 1) >= TASK_SIZE_MAX); va 37 arch/x86/kernel/irq_64.c void *va; va 46 arch/x86/kernel/irq_64.c va = vmap(pages, IRQ_STACK_SIZE / PAGE_SIZE, GFP_KERNEL, PAGE_KERNEL); va 47 arch/x86/kernel/irq_64.c if (!va) va 50 arch/x86/kernel/irq_64.c per_cpu(hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE; va 60 arch/x86/kernel/irq_64.c void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); va 62 arch/x86/kernel/irq_64.c per_cpu(hardirq_stack_ptr, cpu) = va + IRQ_STACK_SIZE; va 131 arch/x86/kernel/ldt.c static pmd_t *pgd_to_pmd_walk(pgd_t *pgd, unsigned long va) va 139 arch/x86/kernel/ldt.c p4d = p4d_offset(pgd, va); va 143 arch/x86/kernel/ldt.c pud = pud_offset(p4d, va); va 147 arch/x86/kernel/ldt.c return pmd_offset(pud, va); va 206 arch/x86/kernel/ldt.c unsigned long va; va 234 arch/x86/kernel/ldt.c va = (unsigned long)ldt_slot_va(slot) + offset; va 242 arch/x86/kernel/ldt.c ptep = get_locked_pte(mm, va, &ptl); va 254 arch/x86/kernel/ldt.c set_pte_at(mm, va, ptep, pte); va 267 arch/x86/kernel/ldt.c unsigned long va; va 284 arch/x86/kernel/ldt.c va = (unsigned long)ldt_slot_va(ldt->slot) + offset; va 285 arch/x86/kernel/ldt.c ptep = get_locked_pte(mm, va, &ptl); va 286 arch/x86/kernel/ldt.c pte_clear(mm, va, ptep); va 290 arch/x86/kernel/ldt.c va = (unsigned long)ldt_slot_va(ldt->slot); va 291 arch/x86/kernel/ldt.c flush_tlb_mm_range(mm, va, va + nr_pages * PAGE_SIZE, PAGE_SHIFT, false); va 129 arch/x86/kernel/umip.c vaf.va = &args; va 4305 arch/x86/kvm/x86.c struct kvm_vapic_addr va; va 4312 arch/x86/kvm/x86.c if (copy_from_user(&va, argp, sizeof(va))) va 4315 arch/x86/kvm/x86.c r = kvm_lapic_set_vapic_addr(vcpu, va.vapic_addr); va 22 arch/x86/mm/cpu_entry_area.c unsigned long va = CPU_ENTRY_AREA_PER_CPU + cpu * CPU_ENTRY_AREA_SIZE; va 25 arch/x86/mm/cpu_entry_area.c return (struct cpu_entry_area *) va; va 31 arch/x86/mm/cpu_entry_area.c unsigned long va = (unsigned long) cea_vaddr; va 45 arch/x86/mm/cpu_entry_area.c set_pte_vaddr(va, pte); va 473 arch/x86/mm/init_32.c unsigned long pfn, va; va 490 arch/x86/mm/init_32.c va = PAGE_OFFSET + (pfn<<PAGE_SHIFT); va 491 arch/x86/mm/init_32.c pgd = base + pgd_index(va); va 495 arch/x86/mm/init_32.c p4d = p4d_offset(pgd, va); va 496 arch/x86/mm/init_32.c pud = pud_offset(p4d, va); va 497 arch/x86/mm/init_32.c pmd = pmd_offset(pud, va); va 508 arch/x86/mm/init_32.c pte = pte_offset_kernel(pmd, va); va 514 arch/x86/mm/init_32.c pte_clear(NULL, va, pte); va 459 arch/x86/mm/pti.c unsigned long va = (unsigned long)&per_cpu(cpu_tss_rw, cpu); va 460 arch/x86/mm/pti.c phys_addr_t pa = per_cpu_ptr_to_phys((void *)va); va 463 arch/x86/mm/pti.c target_pte = pti_user_pagetable_walk_pte(va); va 616 arch/x86/platform/efi/efi.c void *va; va 624 arch/x86/platform/efi/efi.c va = __va(md->phys_addr); va 627 arch/x86/platform/efi/efi.c efi_memory_uc((u64)(unsigned long)va, size); va 629 arch/x86/platform/efi/efi.c va = efi_ioremap(md->phys_addr, size, va 632 arch/x86/platform/efi/efi.c md->virt_addr = (u64) (unsigned long) va; va 633 arch/x86/platform/efi/efi.c if (!va) va 317 arch/x86/platform/efi/efi_64.c virt_to_phys_or_null_size(void *va, unsigned long size) va 321 arch/x86/platform/efi/efi_64.c if (!va) va 324 arch/x86/platform/efi/efi_64.c if (virt_addr_valid(va)) va 325 arch/x86/platform/efi/efi_64.c return virt_to_phys(va); va 327 arch/x86/platform/efi/efi_64.c pa = slow_virt_to_phys(va); va 412 arch/x86/platform/efi/efi_64.c static void __init __map_region(efi_memory_desc_t *md, u64 va) va 425 arch/x86/platform/efi/efi_64.c if (kernel_map_pages_in_pgd(pgd, pfn, va, md->num_pages, flags)) va 427 arch/x86/platform/efi/efi_64.c md->phys_addr, va); va 380 arch/x86/platform/efi/quirks.c u64 va = md->virt_addr; va 401 arch/x86/platform/efi/quirks.c if (kernel_unmap_pages_in_pgd(pgd, va, md->num_pages)) va 402 arch/x86/platform/efi/quirks.c pr_err("Failed to unmap VA mapping for 0x%llx\n", va); va 975 arch/x86/tools/relocs.c static int cmp_relocs(const void *va, const void *vb) va 978 arch/x86/tools/relocs.c a = va; b = vb; va 452 arch/x86/xen/enlighten_pv.c unsigned long va = dtr->address; va 461 arch/x86/xen/enlighten_pv.c BUG_ON(va & ~PAGE_MASK); va 470 arch/x86/xen/enlighten_pv.c ptep = lookup_address(va, &level); va 477 arch/x86/xen/enlighten_pv.c make_lowmem_page_readonly((void *)va); va 489 arch/x86/xen/enlighten_pv.c unsigned long va = dtr->address; va 496 arch/x86/xen/enlighten_pv.c BUG_ON(va & ~PAGE_MASK); va 498 arch/x86/xen/enlighten_pv.c pfn = virt_to_pfn(va); va 503 arch/x86/xen/enlighten_pv.c if (HYPERVISOR_update_va_mapping((unsigned long)va, pte, 0)) va 58 arch/xtensa/include/asm/cacheflush.h static inline void __flush_dcache_page(unsigned long va) va 61 arch/xtensa/include/asm/cacheflush.h static inline void __flush_dcache_range(unsigned long va, unsigned long sz) va 67 arch/xtensa/include/asm/io.h unsigned long va = (unsigned long) addr; va 69 arch/xtensa/include/asm/io.h if (!(va >= XCHAL_KIO_CACHED_VADDR && va 70 arch/xtensa/include/asm/io.h va - XCHAL_KIO_CACHED_VADDR < XCHAL_KIO_SIZE) && va 71 arch/xtensa/include/asm/io.h !(va >= XCHAL_KIO_BYPASS_VADDR && va 72 arch/xtensa/include/asm/io.h va - XCHAL_KIO_BYPASS_VADDR < XCHAL_KIO_SIZE)) va 165 arch/xtensa/include/asm/page.h static inline unsigned long ___pa(unsigned long va) va 167 arch/xtensa/include/asm/page.h unsigned long off = va - PAGE_OFFSET; va 39 arch/xtensa/kernel/hw_breakpoint.c unsigned long va; va 41 arch/xtensa/kernel/hw_breakpoint.c va = hw->address; va 44 arch/xtensa/kernel/hw_breakpoint.c return (va >= TASK_SIZE) && ((va + len - 1) >= TASK_SIZE); va 49 block/partitions/ldm.c vaf.va = &args; va 491 drivers/acpi/utils.c vaf.va = &args; va 520 drivers/acpi/utils.c vaf.va = &args; va 7266 drivers/ata/libata-core.c vaf.va = &args; va 7283 drivers/ata/libata-core.c vaf.va = &args; va 7305 drivers/ata/libata-core.c vaf.va = &args; va 3353 drivers/base/core.c vaf.va = &args; va 3370 drivers/base/core.c vaf.va = &args; \ va 594 drivers/block/rbd.c vaf.va = &args; va 36 drivers/char/agp/hp-agp.c #define HP_ZX1_IOVA_TO_PDIR(va) ((va - hp_private.iova_base) >> hp_private.io_tlb_shift) va 146 drivers/clk/sifive/fu540-prci.c void __iomem *va; va 210 drivers/clk/sifive/fu540-prci.c return readl_relaxed(pd->va + offs); va 215 drivers/clk/sifive/fu540-prci.c writel_relaxed(v, pd->va + offs); va 594 drivers/clk/sifive/fu540-prci.c pd->va = devm_ioremap_resource(dev, res); va 595 drivers/clk/sifive/fu540-prci.c if (IS_ERR(pd->va)) va 596 drivers/clk/sifive/fu540-prci.c return PTR_ERR(pd->va); va 47 drivers/clk/ti/fapll.c #define fapll_is_ddr_pll(va) (((u32)(va) & 0xffff) == 0x0440) va 53 drivers/clk/ti/fapll.c #define is_ddr_pll_clk1(va) (((u32)(va) & 0xffff) == 0x044c) va 54 drivers/clk/ti/fapll.c #define is_audio_pll_clk1(va) (((u32)(va) & 0xffff) == 0x04a8) va 229 drivers/crypto/chelsio/chcr_core.c work_handlers[rpl->opcode](dev, pgl->va); va 351 drivers/crypto/chelsio/chtls/chtls_main.c , gl->va + pktshift, va 1134 drivers/crypto/hisilicon/qm.c qp->qdma.va = dma_alloc_coherent(dev, qp->qdma.size, va 1136 drivers/crypto/hisilicon/qm.c if (!qp->qdma.va) { va 1142 drivers/crypto/hisilicon/qm.c qp->qdma.va, &qp->qdma.dma, qp->qdma.size); va 1159 drivers/crypto/hisilicon/qm.c dma_free_coherent(dev, qp->qdma.size, qp->qdma.va, va 1184 drivers/crypto/hisilicon/qm.c if (qm->use_dma_api && qdma->va) va 1185 drivers/crypto/hisilicon/qm.c dma_free_coherent(dev, qdma->size, qdma->va, qdma->dma); va 1282 drivers/crypto/hisilicon/qm.c (qp)->type = ((qp)->qdma.va + (off)); \ va 1489 drivers/crypto/hisilicon/qm.c if (qm->use_dma_api && qm->qdma.va) { va 1492 drivers/crypto/hisilicon/qm.c qm->qdma.va, qm->qdma.dma); va 1628 drivers/crypto/hisilicon/qm.c (qm)->type = ((qm)->qdma.va + (off)); \ va 1711 drivers/crypto/hisilicon/qm.c } else if (!qm->qdma.va) { va 1716 drivers/crypto/hisilicon/qm.c qm->qdma.va = dma_alloc_coherent(dev, qm->qdma.size, va 1719 drivers/crypto/hisilicon/qm.c qm->qdma.va, &qm->qdma.dma, qm->qdma.size); va 1720 drivers/crypto/hisilicon/qm.c if (!qm->qdma.va) va 113 drivers/crypto/hisilicon/qm.h void *va; va 122 drivers/dio/dio.c void *va; va 134 drivers/dio/dio.c va = (void *)(pa + DIO_VIRADDRBASE); va 136 drivers/dio/dio.c va = ioremap(pa, PAGE_SIZE); va 138 drivers/dio/dio.c if (probe_kernel_read(&i, (unsigned char *)va + DIO_IDOFF, 1)) { va 140 drivers/dio/dio.c iounmap(va); va 144 drivers/dio/dio.c prid = DIO_ID(va); va 147 drivers/dio/dio.c secid = DIO_SECID(va); va 154 drivers/dio/dio.c iounmap(va); va 195 drivers/dio/dio.c u_char *va; va 207 drivers/dio/dio.c va = (void *)(pa + DIO_VIRADDRBASE); va 209 drivers/dio/dio.c va = ioremap(pa, PAGE_SIZE); va 211 drivers/dio/dio.c if (probe_kernel_read(&i, (unsigned char *)va + DIO_IDOFF, 1)) { va 213 drivers/dio/dio.c iounmap(va); va 227 drivers/dio/dio.c dev->resource.end = pa + DIO_SIZE(scode, va); va 231 drivers/dio/dio.c prid = DIO_ID(va); va 234 drivers/dio/dio.c secid = DIO_SECID(va); va 239 drivers/dio/dio.c dev->ipl = DIO_IPL(va); va 247 drivers/dio/dio.c iounmap(va); va 40 drivers/edac/edac_mc.c #define edac_atomic_scrub(va, size) do { } while (0) va 35 drivers/firewire/core-card.c vaf.va = &args; \ va 241 drivers/firmware/efi/esrt.c void *va; va 278 drivers/firmware/efi/esrt.c va = early_memremap(efi.esrt, size); va 279 drivers/firmware/efi/esrt.c if (!va) { va 285 drivers/firmware/efi/esrt.c memcpy(&tmpesrt, va, sizeof(tmpesrt)); va 286 drivers/firmware/efi/esrt.c early_memunmap(va, size); va 609 drivers/firmware/stratix10-svc.c void *va; va 618 drivers/firmware/stratix10-svc.c va = memremap(paddr, size, MEMREMAP_WC); va 619 drivers/firmware/stratix10-svc.c if (!va) { va 623 drivers/firmware/stratix10-svc.c vaddr = (unsigned long)va; va 626 drivers/firmware/stratix10-svc.c va, (unsigned int)paddr, (unsigned int)size); va 900 drivers/firmware/stratix10-svc.c unsigned long va; va 909 drivers/firmware/stratix10-svc.c va = gen_pool_alloc(genpool, s); va 910 drivers/firmware/stratix10-svc.c if (!va) va 913 drivers/firmware/stratix10-svc.c memset((void *)va, 0, s); va 914 drivers/firmware/stratix10-svc.c pa = gen_pool_virt_to_phys(genpool, va); va 916 drivers/firmware/stratix10-svc.c pmem->vaddr = (void *)va; va 923 drivers/firmware/stratix10-svc.c return (void *)va; va 626 drivers/gpu/drm/amd/amdgpu/amdgpu.h void *va; va 45 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint64_t va; va 58 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint64_t va; va 211 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct kgd_dev *kgd, uint64_t va, uint64_t size, va 232 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h uint64_t va, void *vm, va 141 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v7.c uint64_t va, uint32_t vmid); va 782 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v7.c uint64_t va, uint32_t vmid) va 787 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v7.c WREG32(mmSH_HIDDEN_PRIVATE_BASE_VMID, va); va 97 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v8.c uint64_t va, uint32_t vmid); va 741 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v8.c uint64_t va, uint32_t vmid) va 746 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v8.c WREG32(mmSH_HIDDEN_PRIVATE_BASE_VMID, va); va 781 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.c uint64_t va, uint32_t vmid) va 65 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gfx_v9.h uint64_t va, uint32_t vmid); va 378 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c uint64_t va = mem->va; va 382 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (!va) { va 388 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c va += bo_size; va 394 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c pr_debug("\t add VA 0x%llx - 0x%llx to vm %p\n", va, va 395 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c va + bo_size, vm); va 406 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c bo_va_entry->va = va; va 436 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va, va 437 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va + size, entry); va 713 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c amdgpu_vm_bo_unmap(adev, bo_va, entry->va); va 746 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c ret = amdgpu_vm_bo_map(adev, entry->bo_va, entry->va, 0, va 751 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va, ret); va 1071 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c struct kgd_dev *kgd, uint64_t va, uint64_t size, va 1167 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c va, size, domain_string(alloc_domain)); va 1191 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c (*mem)->va = va; va 1240 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va, bo_size); va 1269 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c pr_debug("Release VA 0x%llx - 0x%llx\n", mem->va, va 1270 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va + bo_size * (1 + mem->aql_queue)); va 1341 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va, va 1342 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va + bo_size * (1 + mem->aql_queue), va 1391 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va, entry->va + bo_size, va 1463 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va, va 1464 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va + bo_size * (1 + mem->aql_queue), va 1470 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va, va 1471 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c entry->va + bo_size, va 1479 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c mem->va); va 1593 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c uint64_t va, void *vm, va 1635 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c (*mem)->va = va; va 1630 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c NULL, &adev->fw_vram_usage.va); va 1645 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c adev->fw_vram_usage.va = NULL; va 1657 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c &adev->fw_vram_usage.va); va 340 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c if (adev->fw_vram_usage.va != NULL) { va 343 drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c adev->fw_vram_usage.va + AMDGIM_DATAEXCHANGE_OFFSET); va 2086 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(mapping, &vm->va); va 2139 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); va 2267 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); va 2319 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c tmp = amdgpu_vm_it_iter_first(&vm->va, saddr, eaddr); va 2350 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(tmp, &vm->va); va 2365 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(before, &vm->va); va 2374 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_insert(after, &vm->va); va 2399 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return amdgpu_vm_it_iter_first(&vm->va, addr, addr); va 2417 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c for (mapping = amdgpu_vm_it_iter_first(&vm->va, 0, U64_MAX); mapping; va 2470 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); va 2477 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_it_remove(mapping, &vm->va); va 2680 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c vm->va = RB_ROOT_CACHED; va 2960 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if (!RB_EMPTY_ROOT(&vm->va.rb_root)) { va 2964 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c &vm->va.rb_root, rb) { va 232 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.h struct rb_root_cached va; va 185 drivers/gpu/drm/amd/amdgpu/mxgpu_ai.c if (!r && adev->fw_vram_usage.va != NULL) { va 909 drivers/gpu/drm/amd/amdkfd/kfd_events.c memory_exception_data.va = address; va 989 drivers/gpu/drm/amd/amdkfd/kfd_events.c memory_exception_data.va = (info->page_addr) << PAGE_SHIFT; va 365 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c vaf.va = &args; va 302 drivers/gpu/drm/amd/include/kgd_kfd_interface.h uint64_t va, uint32_t vmid); va 190 drivers/gpu/drm/drm_print.c vaf.va = &args; va 214 drivers/gpu/drm/drm_print.c vaf.va = &args; va 237 drivers/gpu/drm/drm_print.c vaf.va = &args; va 253 drivers/gpu/drm/drm_print.c vaf.va = &args; va 252 drivers/gpu/drm/etnaviv/etnaviv_gem.c u64 va) va 313 drivers/gpu/drm/etnaviv/etnaviv_gem.c mapping, va); va 80 drivers/gpu/drm/etnaviv/etnaviv_gem.h u64 va; va 124 drivers/gpu/drm/etnaviv/etnaviv_gem.h u64 va); va 81 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va = bo->presumed; va 236 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va); va 243 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c submit->bos[i].va != mapping->iova) { va 224 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct drm_mm_node *node, size_t size, u64 va) va 226 drivers/gpu/drm/etnaviv/etnaviv_mmu.c return drm_mm_insert_node_in_range(&context->mm, node, size, 0, 0, va, va 227 drivers/gpu/drm/etnaviv/etnaviv_mmu.c va + size, DRM_MM_INSERT_LOWEST); va 232 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct etnaviv_vram_mapping *mapping, u64 va) va 258 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (va) va 260 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_obj->base.size, va); va 91 drivers/gpu/drm/etnaviv/etnaviv_mmu.h struct etnaviv_vram_mapping *mapping, u64 va); va 12510 drivers/gpu/drm/i915/display/intel_display.c vaf.va = &args; va 505 drivers/gpu/drm/i915/display/intel_dpio_phy.c vaf.va = &args; va 1692 drivers/gpu/drm/i915/gvt/cmd_parser.c unsigned long gma, unsigned long end_gma, void *va) va 1710 drivers/gpu/drm/i915/gvt/cmd_parser.c intel_gvt_hypervisor_read_gpa(vgpu, gpa, va + len, copy_len); va 1794 drivers/gpu/drm/i915/gvt/cmd_parser.c static int audit_bb_end(struct parser_exec_state *s, void *va) va 1797 drivers/gpu/drm/i915/gvt/cmd_parser.c u32 cmd = *(u32 *)va; va 1870 drivers/gpu/drm/i915/gvt/cmd_parser.c bb->va = i915_gem_object_pin_map(bb->obj, I915_MAP_WB); va 1871 drivers/gpu/drm/i915/gvt/cmd_parser.c if (IS_ERR(bb->va)) { va 1872 drivers/gpu/drm/i915/gvt/cmd_parser.c ret = PTR_ERR(bb->va); va 1877 drivers/gpu/drm/i915/gvt/cmd_parser.c drm_clflush_virt_range(bb->va, bb->obj->base.size); va 1883 drivers/gpu/drm/i915/gvt/cmd_parser.c bb->va + start_offset); va 1890 drivers/gpu/drm/i915/gvt/cmd_parser.c ret = audit_bb_end(s, bb->va + start_offset + bb_end_cmd_offset); va 1913 drivers/gpu/drm/i915/gvt/cmd_parser.c s->ip_va = bb->va + start_offset; va 121 drivers/gpu/drm/i915/gvt/gvt.h void *va; va 590 drivers/gpu/drm/i915/gvt/kvmgt.c base = vgpu_opregion(vgpu)->va; va 229 drivers/gpu/drm/i915/gvt/opregion.c vgpu_opregion(vgpu)->va = (void *)__get_free_pages(GFP_KERNEL | va 232 drivers/gpu/drm/i915/gvt/opregion.c if (!vgpu_opregion(vgpu)->va) { va 238 drivers/gpu/drm/i915/gvt/opregion.c buf = (u8 *)vgpu_opregion(vgpu)->va; va 265 drivers/gpu/drm/i915/gvt/opregion.c mfn = intel_gvt_hypervisor_virt_to_mfn(vgpu_opregion(vgpu)->va va 337 drivers/gpu/drm/i915/gvt/opregion.c if (!vgpu_opregion(vgpu)->va) va 346 drivers/gpu/drm/i915/gvt/opregion.c free_pages((unsigned long)vgpu_opregion(vgpu)->va, va 349 drivers/gpu/drm/i915/gvt/opregion.c vgpu_opregion(vgpu)->va = NULL; va 476 drivers/gpu/drm/i915/gvt/opregion.c scic = *((u32 *)vgpu_opregion(vgpu)->va + va 478 drivers/gpu/drm/i915/gvt/opregion.c parm = *((u32 *)vgpu_opregion(vgpu)->va + va 541 drivers/gpu/drm/i915/gvt/opregion.c *((u32 *)vgpu_opregion(vgpu)->va + va 543 drivers/gpu/drm/i915/gvt/opregion.c *((u32 *)vgpu_opregion(vgpu)->va + va 482 drivers/gpu/drm/i915/gvt/scheduler.c drm_clflush_virt_range(bb->va, va 504 drivers/gpu/drm/i915/gvt/scheduler.c drm_clflush_virt_range(bb->va, va 604 drivers/gpu/drm/i915/gvt/scheduler.c if (bb->va && !IS_ERR(bb->va)) va 124 drivers/gpu/drm/i915/gvt/scheduler.h void *va; va 164 drivers/gpu/drm/i915/i915_gpu_error.c i915_error_vprintf(p->arg, vaf->fmt, *vaf->va); va 31 drivers/gpu/drm/i915/i915_utils.c vaf.va = &args; va 337 drivers/gpu/drm/i915/selftests/i915_selftest.c va_list va; va 346 drivers/gpu/drm/i915/selftests/i915_selftest.c va_start(va, fmt); va 347 drivers/gpu/drm/i915/selftests/i915_selftest.c vprintk(fmt, va); va 348 drivers/gpu/drm/i915/selftests/i915_selftest.c va_end(va); va 85 drivers/gpu/drm/lima/lima_drv.c return lima_gem_get_info(file, args->handle, &args->va, &args->offset); va 43 drivers/gpu/drm/lima/lima_gem.c if (!list_empty(&bo->va)) va 67 drivers/gpu/drm/lima/lima_gem.c int lima_gem_get_info(struct drm_file *file, u32 handle, u32 *va, u64 *offset) va 81 drivers/gpu/drm/lima/lima_gem.c *va = lima_vm_get_va(vm, bo); va 18 drivers/gpu/drm/lima/lima_gem.h int lima_gem_get_info(struct drm_file *file, u32 handle, u32 *va, u64 *offset); va 48 drivers/gpu/drm/lima/lima_object.c INIT_LIST_HEAD(&bo->va); va 20 drivers/gpu/drm/lima/lima_object.h struct list_head va; va 29 drivers/gpu/drm/lima/lima_vm.c #define LIMA_PDE(va) (va >> LIMA_VM_PD_SHIFT) va 30 drivers/gpu/drm/lima/lima_vm.c #define LIMA_PTE(va) ((va & LIMA_VM_PT_MASK) >> LIMA_VM_PT_SHIFT) va 31 drivers/gpu/drm/lima/lima_vm.c #define LIMA_PBE(va) (va >> LIMA_VM_PB_SHIFT) va 32 drivers/gpu/drm/lima/lima_vm.c #define LIMA_BTE(va) ((va & LIMA_VM_BT_MASK) >> LIMA_VM_BT_SHIFT) va 90 drivers/gpu/drm/lima/lima_vm.c list_for_each_entry(bo_va, &bo->va, list) { va 142 drivers/gpu/drm/lima/lima_vm.c list_add_tail(&bo_va->list, &bo->va); va 64 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_memory_map(p,o,vm,va,av,ac) \ va 65 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h (p)->func->map((p),(o),(vm),(va),(av),(ac)) va 505 drivers/gpu/drm/radeon/radeon.h struct list_head va; va 919 drivers/gpu/drm/radeon/radeon.h struct rb_root_cached va; va 87 drivers/gpu/drm/radeon/radeon_object.c WARN_ON_ONCE(!list_empty(&bo->va)); va 216 drivers/gpu/drm/radeon/radeon_object.c INIT_LIST_HEAD(&bo->va); va 298 drivers/gpu/drm/radeon/radeon_vm.c list_for_each_entry(bo_va, &bo->va, bo_list) { va 339 drivers/gpu/drm/radeon/radeon_vm.c list_add_tail(&bo_va->bo_list, &bo->va); va 482 drivers/gpu/drm/radeon/radeon_vm.c it = interval_tree_iter_first(&vm->va, soffset, eoffset); va 510 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); va 525 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_insert(&bo_va->it, &vm->va); va 1128 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); va 1158 drivers/gpu/drm/radeon/radeon_vm.c list_for_each_entry(bo_va, &bo->va, bo_list) { va 1189 drivers/gpu/drm/radeon/radeon_vm.c vm->va = RB_ROOT_CACHED; va 1236 drivers/gpu/drm/radeon/radeon_vm.c if (!RB_EMPTY_ROOT(&vm->va.rb_root)) { va 1240 drivers/gpu/drm/radeon/radeon_vm.c &vm->va.rb_root, it.rb) { va 1241 drivers/gpu/drm/radeon/radeon_vm.c interval_tree_remove(&bo_va->it, &vm->va); va 169 drivers/gpu/drm/tegra/vic.c dma_addr_t iova, void *va) va 173 drivers/gpu/drm/tegra/vic.c return tegra_drm_free(tegra, size, va, iova); va 223 drivers/infiniband/core/device.c vaf.va = &args; va 240 drivers/infiniband/core/device.c vaf.va = &args; \ va 150 drivers/infiniband/core/umem.c unsigned long va, pgoff; va 158 drivers/infiniband/core/umem.c va = virt; va 168 drivers/infiniband/core/umem.c mask |= (sg_dma_address(sg) + pgoff) ^ va; va 169 drivers/infiniband/core/umem.c va += sg_dma_len(sg) - pgoff; va 175 drivers/infiniband/core/umem.c mask |= va; va 418 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->bind.va = (u64)(unsigned long)fence->va; va 496 drivers/infiniband/hw/bnxt_re/ib_verbs.c dma_addr = dma_map_single(dev, fence->va, BNXT_RE_FENCE_BYTES, va 526 drivers/infiniband/hw/bnxt_re/ib_verbs.c mr->qplib_mr.va = (u64)(unsigned long)fence->va; va 2209 drivers/infiniband/hw/bnxt_re/ib_verbs.c wqe->frmr.va = wr->mr->iova; va 3543 drivers/infiniband/hw/bnxt_re/ib_verbs.c mr->qplib_mr.va = virt_addr; va 50 drivers/infiniband/hw/bnxt_re/ib_verbs.h u8 va[BNXT_RE_FENCE_BYTES]; va 1727 drivers/infiniband/hw/bnxt_re/qplib_fp.c sqe->va = cpu_to_le64(wqe->frmr.va); va 1742 drivers/infiniband/hw/bnxt_re/qplib_fp.c sqe->va = cpu_to_le64(wqe->bind.va); va 208 drivers/infiniband/hw/bnxt_re/qplib_fp.h u64 va; va 223 drivers/infiniband/hw/bnxt_re/qplib_fp.h u64 va; va 647 drivers/infiniband/hw/bnxt_re/qplib_sp.c mrw->va = 0; va 724 drivers/infiniband/hw/bnxt_re/qplib_sp.c req.va = cpu_to_le64(mr->va); va 116 drivers/infiniband/hw/bnxt_re/qplib_sp.h u64 va; va 379 drivers/infiniband/hw/bnxt_re/roce_hsi.h __le64 va; va 411 drivers/infiniband/hw/bnxt_re/roce_hsi.h __le64 va; va 1467 drivers/infiniband/hw/bnxt_re/roce_hsi.h __le64 va; va 1136 drivers/infiniband/hw/cxgb4/device.c gl->va + pktshift, va 1188 drivers/infiniband/hw/cxgb4/device.c } else if (unlikely(*(u8 *)rsp != *(u8 *)gl->va)) { va 1193 drivers/infiniband/hw/cxgb4/device.c pci_name(ctx->lldi.pdev), gl->va, va 1195 drivers/infiniband/hw/cxgb4/device.c be64_to_cpu(*(__force __be64 *)gl->va), va 1596 drivers/infiniband/hw/efa/efa_verbs.c unsigned long va; va 1622 drivers/infiniband/hw/efa/efa_verbs.c for (va = vma->vm_start; va < vma->vm_end; va 1623 drivers/infiniband/hw/efa/efa_verbs.c va += PAGE_SIZE, pfn++) { va 1624 drivers/infiniband/hw/efa/efa_verbs.c err = vm_insert_page(vma, va, pfn_to_page(pfn)); va 409 drivers/infiniband/hw/hfi1/file_ops.c memvirt = dd->cr_base[uctxt->numa_id].va; va 412 drivers/infiniband/hw/hfi1/file_ops.c (u64)dd->cr_base[uctxt->numa_id].va) & PAGE_MASK); va 1278 drivers/infiniband/hw/hfi1/file_ops.c (u64)dd->cr_base[uctxt->numa_id].va) % PAGE_SIZE; va 566 drivers/infiniband/hw/hfi1/pio.c sc->hw_free = &sc->dd->cr_base[sc->node].va[gc].cr[index]; va 2122 drivers/infiniband/hw/hfi1/pio.c dd->cr_base[i].va = dma_alloc_coherent(&dd->pcidev->dev, va 2126 drivers/infiniband/hw/hfi1/pio.c if (!dd->cr_base[i].va) { va 2149 drivers/infiniband/hw/hfi1/pio.c if (dd->cr_base[i].va) { va 2153 drivers/infiniband/hw/hfi1/pio.c dd->cr_base[i].va, va 161 drivers/infiniband/hw/hfi1/pio.h struct credit_return *va; va 76 drivers/infiniband/hw/hfi1/trace_dbg.h *vaf->va) >= va 104 drivers/infiniband/hw/hfi1/trace_dbg.h vaf.va = &args; \ va 89 drivers/infiniband/hw/hfi1/trace_tid.h unsigned long va, unsigned long pa, dma_addr_t dma), va 90 drivers/infiniband/hw/hfi1/trace_tid.h TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma), va 96 drivers/infiniband/hw/hfi1/trace_tid.h __field(unsigned long, va) va 105 drivers/infiniband/hw/hfi1/trace_tid.h __entry->va = va; va 115 drivers/infiniband/hw/hfi1/trace_tid.h __entry->va, va 123 drivers/infiniband/hw/hfi1/trace_tid.h unsigned long va, unsigned long pa, dma_addr_t dma), va 124 drivers/infiniband/hw/hfi1/trace_tid.h TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma) va 130 drivers/infiniband/hw/hfi1/trace_tid.h unsigned long va, unsigned long pa, dma_addr_t dma), va 131 drivers/infiniband/hw/hfi1/trace_tid.h TP_ARGS(ctxt, subctxt, rarr, npages, va, pa, dma) va 164 drivers/infiniband/hw/hfi1/trace_tid.h TP_PROTO(unsigned int ctxt, u16 subctxt, unsigned long va, u32 rarr, va 166 drivers/infiniband/hw/hfi1/trace_tid.h TP_ARGS(ctxt, subctxt, va, rarr, npages, dma), va 170 drivers/infiniband/hw/hfi1/trace_tid.h __field(unsigned long, va) va 178 drivers/infiniband/hw/hfi1/trace_tid.h __entry->va = va; va 188 drivers/infiniband/hw/hfi1/trace_tid.h __entry->va, va 89 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = cpu_to_le64(wr->mr->iova); va 479 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = va 486 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = va 493 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = va 521 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = va 528 drivers/infiniband/hw/hns/hns_roce_hw_v2.c rc_sq_wqe->va = va 1166 drivers/infiniband/hw/hns/hns_roce_hw_v2.h __le64 va; va 396 drivers/infiniband/hw/i40iw/i40iw_cm.c buf = sqbuf->mem.va; va 3144 drivers/infiniband/hw/i40iw/i40iw_cm.c rbuf->mem.va, va 3146 drivers/infiniband/hw/i40iw/i40iw_cm.c ethh = (struct vlan_ethhdr *)rbuf->mem.va; va 3423 drivers/infiniband/hw/i40iw/i40iw_cm.c dev->iw_priv_qp_ops->qp_setctx(&iwqp->sc_qp, (u64 *)(iwqp->host_ctx.va), ctx_info); va 3486 drivers/infiniband/hw/i40iw/i40iw_cm.c if (iwqp->ietf_mem.va) { va 3698 drivers/infiniband/hw/i40iw/i40iw_cm.c accept.addr = iwqp->ietf_mem.va; va 3709 drivers/infiniband/hw/i40iw/i40iw_cm.c tagged_offset = (uintptr_t)iwqp->ietf_mem.va; va 3726 drivers/infiniband/hw/i40iw/i40iw_cm.c iwqp->ietf_mem.va, va 3023 drivers/infiniband/hw/i40iw/i40iw_ctrl.c temp = (info->addr_type == I40IW_ADDR_TYPE_VA_BASED) ? (uintptr_t)info->va : info->fbo; va 3096 drivers/infiniband/hw/i40iw/i40iw_ctrl.c va64 = (uintptr_t)(info->va); va 3102 drivers/infiniband/hw/i40iw/i40iw_ctrl.c (info->addr_type == I40IW_ADDR_TYPE_VA_BASED ? (uintptr_t)info->va : fbo)); va 3275 drivers/infiniband/hw/i40iw/i40iw_ctrl.c temp = (info->addr_type == I40IW_ADDR_TYPE_VA_BASED) ? (uintptr_t)info->va : info->fbo; va 3485 drivers/infiniband/hw/i40iw/i40iw_ctrl.c query_fpm_mem.va = dev->fpm_query_buf; va 3496 drivers/infiniband/hw/i40iw/i40iw_ctrl.c if (!dev->vf_fpm_query_buf[iw_vf_idx].va) { va 3502 drivers/infiniband/hw/i40iw/i40iw_ctrl.c vf_dev->fpm_query_buf = dev->vf_fpm_query_buf[iw_vf_idx].va; va 3506 drivers/infiniband/hw/i40iw/i40iw_ctrl.c query_fpm_mem.va = vf_dev->fpm_query_buf; va 3538 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_sc_parse_fpm_query_buf((u64 *)query_fpm_mem.va, va 3544 drivers/infiniband/hw/i40iw/i40iw_ctrl.c query_fpm_mem.va, I40IW_QUERY_FPM_BUF_SIZE); va 3550 drivers/infiniband/hw/i40iw/i40iw_ctrl.c i40iw_sc_parse_fpm_commit_buf((u64 *)query_fpm_mem.va, hmc_info->hmc_obj, &hmc_info->sd_table.sd_cnt); va 3556 drivers/infiniband/hw/i40iw/i40iw_ctrl.c hmc_info->sd_table.sd_entry = virt_mem.va; va 3604 drivers/infiniband/hw/i40iw/i40iw_ctrl.c commit_fpm_mem.va = dev->fpm_commit_buf; va 3622 drivers/infiniband/hw/i40iw/i40iw_ctrl.c commit_fpm_mem.va, I40IW_COMMIT_FPM_BUF_SIZE); va 3659 drivers/infiniband/hw/i40iw/i40iw_ctrl.c memcpy((char *)sdbuf->va + offset, &info->entry[3], va 4024 drivers/infiniband/hw/i40iw/i40iw_ctrl.c hmc_info->sd_table.sd_entry = virt_mem.va; va 4250 drivers/infiniband/hw/i40iw/i40iw_ctrl.c values_mem.va = pcmdinfo->in.u.query_fpm_values.fpm_values_va; va 4259 drivers/infiniband/hw/i40iw/i40iw_ctrl.c values_mem.va = pcmdinfo->in.u.commit_fpm_values.fpm_values_va; va 428 drivers/infiniband/hw/i40iw/i40iw_hmc.c if (!mem || !mem->va) va 576 drivers/infiniband/hw/i40iw/i40iw_hmc.c sd_entry->u.pd_table.pd_entry_virt_mem.va; va 657 drivers/infiniband/hw/i40iw/i40iw_hmc.c pd_addr = (u64 *)pd_table->pd_page_addr.va; va 721 drivers/infiniband/hw/i40iw/i40iw_hmc.c pd_addr = (u64 *)pd_table->pd_page_addr.va; va 732 drivers/infiniband/hw/i40iw/i40iw_hmc.c if (!mem || !mem->va) va 423 drivers/infiniband/hw/i40iw/i40iw_hw.c iwqp->host_ctx.va, va 531 drivers/infiniband/hw/i40iw/i40iw_main.c unsigned long va, newva; va 534 drivers/infiniband/hw/i40iw/i40iw_main.c va = (unsigned long)iwdev->obj_next.va; va 535 drivers/infiniband/hw/i40iw/i40iw_main.c newva = va; va 537 drivers/infiniband/hw/i40iw/i40iw_main.c newva = ALIGN(va, (mask + 1)); va 538 drivers/infiniband/hw/i40iw/i40iw_main.c extra = newva - va; va 539 drivers/infiniband/hw/i40iw/i40iw_main.c memptr->va = (u8 *)va + extra; va 542 drivers/infiniband/hw/i40iw/i40iw_main.c if ((memptr->va + size) > (iwdev->obj_mem.va + iwdev->obj_mem.size)) va 545 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->obj_next.va = memptr->va + size; va 589 drivers/infiniband/hw/i40iw/i40iw_main.c dev->cqp->host_ctx = mem.va; va 593 drivers/infiniband/hw/i40iw/i40iw_main.c cqp_init_info.sq = cqp->sq.va; va 596 drivers/infiniband/hw/i40iw/i40iw_main.c cqp_init_info.host_ctx = mem.va; va 657 drivers/infiniband/hw/i40iw/i40iw_main.c info.cq_base = ccq->mem_cq.va; va 660 drivers/infiniband/hw/i40iw/i40iw_main.c info.shadow_area = mem.va; va 740 drivers/infiniband/hw/i40iw/i40iw_main.c info.ceqe_base = iwceq->mem.va; va 880 drivers/infiniband/hw/i40iw/i40iw_main.c info.aeqe_base = aeq->mem.va; va 1342 drivers/infiniband/hw/i40iw/i40iw_main.c info.fpm_query_buf = mem.va; va 1348 drivers/infiniband/hw/i40iw/i40iw_main.c info.fpm_commit_buf = mem.va; va 1855 drivers/infiniband/hw/i40iw/i40iw_main.c vf_dev_mem.va = tmp_vfdev; va 87 drivers/infiniband/hw/i40iw/i40iw_osdep.h void *va; va 93 drivers/infiniband/hw/i40iw/i40iw_osdep.h void *va; va 156 drivers/infiniband/hw/i40iw/i40iw_pble.c chunk->vaddr = ((u8 *)sd_entry->u.bp.addr.va + offset); va 288 drivers/infiniband/hw/i40iw/i40iw_pble.c mem.va = (void *)(addr); va 176 drivers/infiniband/hw/i40iw/i40iw_puda.c buf = (struct i40iw_puda_buf *)buf_mem.va; va 184 drivers/infiniband/hw/i40iw/i40iw_puda.c buf->buf_mem.va = buf_mem.va; va 574 drivers/infiniband/hw/i40iw/i40iw_puda.c memset(mem->va, 0, t_size); va 584 drivers/infiniband/hw/i40iw/i40iw_puda.c ukqp->sq_base = mem->va; va 699 drivers/infiniband/hw/i40iw/i40iw_puda.c init_info->cq_base = mem->va; va 700 drivers/infiniband/hw/i40iw/i40iw_puda.c init_info->shadow_area = (u64 *)((u8 *)mem->va + cqsize); va 911 drivers/infiniband/hw/i40iw/i40iw_puda.c rsrc = (struct i40iw_puda_rsrc *)vmem->va; va 914 drivers/infiniband/hw/i40iw/i40iw_puda.c vsi->ilq = (struct i40iw_puda_rsrc *)vmem->va; va 921 drivers/infiniband/hw/i40iw/i40iw_puda.c vsi->ieq = (struct i40iw_puda_rsrc *)vmem->va; va 927 drivers/infiniband/hw/i40iw/i40iw_puda.c rsrc->sq_wrtrk_array = (struct i40iw_sq_uk_wr_trk_info *)((u8 *)vmem->va + pudasize); va 928 drivers/infiniband/hw/i40iw/i40iw_puda.c rsrc->rq_wrid_array = (u64 *)((u8 *)vmem->va + pudasize + sqwridsize); va 1026 drivers/infiniband/hw/i40iw/i40iw_puda.c void *mem1 = (u8 *)buf->mem.va + buf_offset; va 1027 drivers/infiniband/hw/i40iw/i40iw_puda.c void *mem2 = (u8 *)txbuf->mem.va + txbuf_offset; va 1089 drivers/infiniband/hw/i40iw/i40iw_puda.c txbuf->data = (u8 *)txbuf->mem.va + buf->hdrlen; va 1093 drivers/infiniband/hw/i40iw/i40iw_puda.c bufoffset = (u16)(buf->data - (u8 *)buf->mem.va); va 1112 drivers/infiniband/hw/i40iw/i40iw_puda.c bufoffset = (u16)(buf->data - (u8 *)buf->mem.va); va 1216 drivers/infiniband/hw/i40iw/i40iw_puda.c txbuf->mem.va, txbuf->totallen); va 1256 drivers/infiniband/hw/i40iw/i40iw_puda.c ioffset = (u16)(buf->data - (u8 *)buf->mem.va); va 787 drivers/infiniband/hw/i40iw/i40iw_type.h void *va; va 806 drivers/infiniband/hw/i40iw/i40iw_type.h void *va; va 833 drivers/infiniband/hw/i40iw/i40iw_type.h void *va; va 627 drivers/infiniband/hw/i40iw/i40iw_uk.c set_64bit_val(wqe, 0, (uintptr_t)op_info->va); va 226 drivers/infiniband/hw/i40iw/i40iw_user.h void *va; va 761 drivers/infiniband/hw/i40iw/i40iw_utils.c mem->va = dma_alloc_coherent(&pcidev->dev, mem->size, va 763 drivers/infiniband/hw/i40iw/i40iw_utils.c if (!mem->va) va 777 drivers/infiniband/hw/i40iw/i40iw_utils.c if (!mem || !mem->va) va 781 drivers/infiniband/hw/i40iw/i40iw_utils.c mem->va, (dma_addr_t)mem->pa); va 782 drivers/infiniband/hw/i40iw/i40iw_utils.c mem->va = NULL; va 799 drivers/infiniband/hw/i40iw/i40iw_utils.c mem->va = kzalloc(size, GFP_KERNEL); va 801 drivers/infiniband/hw/i40iw/i40iw_utils.c if (mem->va) va 821 drivers/infiniband/hw/i40iw/i40iw_utils.c kfree(mem->va); va 1112 drivers/infiniband/hw/i40iw/i40iw_utils.c cqp_info->in.u.query_fpm_values.fpm_values_va = values_mem->va; va 1145 drivers/infiniband/hw/i40iw/i40iw_utils.c cqp_info->in.u.commit_fpm_values.fpm_values_va = values_mem->va; va 1440 drivers/infiniband/hw/i40iw/i40iw_utils.c u8 *addr = (u8 *)buf->mem.va; va 1464 drivers/infiniband/hw/i40iw/i40iw_utils.c u8 *mem = (u8 *)buf->mem.va; va 1465 drivers/infiniband/hw/i40iw/i40iw_utils.c struct ethhdr *ethh = (struct ethhdr *)buf->mem.va; va 348 drivers/infiniband/hw/i40iw/i40iw_verbs.c static struct i40iw_pbl *i40iw_get_pbl(unsigned long va, va 354 drivers/infiniband/hw/i40iw/i40iw_verbs.c if (iwpbl->user_base == va) { va 496 drivers/infiniband/hw/i40iw/i40iw_verbs.c ukinfo->sq = mem->va; va 593 drivers/infiniband/hw/i40iw/i40iw_verbs.c init_info.q2 = iwqp->q2_ctx_mem.va; va 613 drivers/infiniband/hw/i40iw/i40iw_verbs.c iwqp->host_ctx.va = init_info.host_ctx; va 693 drivers/infiniband/hw/i40iw/i40iw_verbs.c (u64 *)iwqp->host_ctx.va, va 966 drivers/infiniband/hw/i40iw/i40iw_verbs.c (u64 *)iwqp->host_ctx.va, va 1171 drivers/infiniband/hw/i40iw/i40iw_verbs.c ukinfo->cq_base = iwcq->kmem.va; va 1174 drivers/infiniband/hw/i40iw/i40iw_verbs.c ukinfo->shadow_area = iwcq->kmem.va + rsize; va 1692 drivers/infiniband/hw/i40iw/i40iw_verbs.c stag_info->va = (void *)(unsigned long)iwpbl->user_base; va 2249 drivers/infiniband/hw/i40iw/i40iw_verbs.c info.va = (void *)(uintptr_t)iwmr->ibmr.iova; va 316 drivers/infiniband/hw/i40iw/i40iw_virtchnl.c vf_dev_mem.va = vf_dev; va 477 drivers/infiniband/hw/i40iw/i40iw_virtchnl.c vf_dev = vf_dev_mem.va; va 2082 drivers/infiniband/hw/mlx5/mr.c klms[i].va = cpu_to_be64(sg_dma_address(sg) + sg_offset); va 2102 drivers/infiniband/hw/mlx5/mr.c klms[i + j].va = cpu_to_be64(sg_dma_address(sg) + va 169 drivers/infiniband/hw/mlx5/odp.c unsigned long va; va 176 drivers/infiniband/hw/mlx5/odp.c pklm->va = 0; va 209 drivers/infiniband/hw/mlx5/odp.c va = (offset + i) * MLX5_IMR_MTT_SIZE; va 210 drivers/infiniband/hw/mlx5/odp.c if (odp && ib_umem_start(odp) == va) { va 219 drivers/infiniband/hw/mlx5/odp.c i, va, be32_to_cpu(pklm->key)); va 892 drivers/infiniband/hw/mlx5/odp.c frame->io_virt = be64_to_cpu(pklm->va) + offset; va 4634 drivers/infiniband/hw/mlx5/qp.c data_klm->va = cpu_to_be64(data_va); va 4673 drivers/infiniband/hw/mlx5/qp.c data_sentry->va = cpu_to_be64(data_va); va 4678 drivers/infiniband/hw/mlx5/qp.c prot_sentry->va = cpu_to_be64(prot_va); va 121 drivers/infiniband/hw/ocrdma/ocrdma.h void *va; va 127 drivers/infiniband/hw/ocrdma/ocrdma.h void *va; va 132 drivers/infiniband/hw/ocrdma/ocrdma.h void *va; va 190 drivers/infiniband/hw/ocrdma/ocrdma.h u64 va; va 223 drivers/infiniband/hw/ocrdma/ocrdma.h void *va; va 266 drivers/infiniband/hw/ocrdma/ocrdma.h struct ocrdma_av *va; va 319 drivers/infiniband/hw/ocrdma/ocrdma.h struct ocrdma_cqe *va; va 364 drivers/infiniband/hw/ocrdma/ocrdma.h u8 *va; /* virtual address */ va 443 drivers/infiniband/hw/ocrdma/ocrdma.h u32 *va; va 195 drivers/infiniband/hw/ocrdma/ocrdma_ah.c if ((pd->uctx) && (pd->uctx->ah_tbl.va)) { va 196 drivers/infiniband/hw/ocrdma/ocrdma_ah.c ahid_addr = pd->uctx->ah_tbl.va + rdma_ah_get_dlid(attr); va 113 drivers/infiniband/hw/ocrdma/ocrdma_hw.c return eq->q.va + (eq->q.tail * sizeof(struct ocrdma_eqe)); va 124 drivers/infiniband/hw/ocrdma/ocrdma_hw.c (dev->mq.cq.va + (dev->mq.cq.tail * sizeof(struct ocrdma_mcqe))); va 138 drivers/infiniband/hw/ocrdma/ocrdma_hw.c return dev->mq.sq.va + (dev->mq.sq.head * sizeof(struct ocrdma_mqe)); va 148 drivers/infiniband/hw/ocrdma/ocrdma_hw.c return dev->mq.sq.va + (dev->mqe_ctx.tag * sizeof(struct ocrdma_mqe)); va 373 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&dev->nic_info.pdev->dev, q->size, q->va, q->dma); va 383 drivers/infiniband/hw/ocrdma/ocrdma_hw.c q->va = dma_alloc_coherent(&dev->nic_info.pdev->dev, q->size, &q->dma, va 385 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!q->va) va 538 drivers/infiniband/hw/ocrdma/ocrdma_hw.c cmd->pgsz_pgcnt |= PAGES_4K_SPANNED(cq->va, cq->size); va 574 drivers/infiniband/hw/ocrdma/ocrdma_hw.c num_pages = PAGES_4K_SPANNED(mq->va, mq->size); va 1288 drivers/infiniband/hw/ocrdma/ocrdma_hw.c struct ocrdma_rdma_stats_req *req = dev->stats_mem.va; va 1317 drivers/infiniband/hw/ocrdma/ocrdma_hw.c status = ocrdma_nonemb_mbx_cmd(dev, mqe, dev->stats_mem.va); va 1341 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma.va = dma_alloc_coherent(&dev->nic_info.pdev->dev, va 1343 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!dma.va) va 1354 drivers/infiniband/hw/ocrdma/ocrdma_hw.c ocrdma_init_mch((struct ocrdma_mbx_hdr *)dma.va, va 1359 drivers/infiniband/hw/ocrdma/ocrdma_hw.c status = ocrdma_nonemb_mbx_cmd(dev, mqe, dma.va); va 1361 drivers/infiniband/hw/ocrdma/ocrdma_hw.c ctrl_attr_rsp = (struct ocrdma_get_ctrl_attribs_rsp *)dma.va; va 1371 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&dev->nic_info.pdev->dev, dma.size, dma.va, dma.pa); va 1680 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->av_tbl.pbl.va = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, va 1683 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (dev->av_tbl.pbl.va == NULL) va 1686 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->av_tbl.va = dma_alloc_coherent(&pdev->dev, dev->av_tbl.size, va 1688 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (dev->av_tbl.va == NULL) va 1693 drivers/infiniband/hw/ocrdma/ocrdma_hw.c pbes = (struct ocrdma_pbe *)dev->av_tbl.pbl.va; va 1710 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, dev->av_tbl.size, dev->av_tbl.va, va 1712 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->av_tbl.va = NULL; va 1714 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, PAGE_SIZE, dev->av_tbl.pbl.va, va 1716 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->av_tbl.pbl.va = NULL; va 1728 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (dev->av_tbl.va == NULL) va 1737 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, dev->av_tbl.size, dev->av_tbl.va, va 1739 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dev->av_tbl.va = NULL; va 1740 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, PAGE_SIZE, dev->av_tbl.pbl.va, va 1820 drivers/infiniband/hw/ocrdma/ocrdma_hw.c cq->va = dma_alloc_coherent(&pdev->dev, cq->len, &cq->pa, GFP_KERNEL); va 1821 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!cq->va) { va 1883 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, cq->len, cq->va, cq->pa); va 1905 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&dev->nic_info.pdev->dev, cq->len, cq->va, cq->pa); va 1993 drivers/infiniband/hw/ocrdma/ocrdma_hw.c cmd->va_loaddr = (u32) hwmr->va; va 1994 drivers/infiniband/hw/ocrdma/ocrdma_hw.c cmd->va_hiaddr = (u32) upper_32_bits(hwmr->va); va 2208 drivers/infiniband/hw/ocrdma/ocrdma_hw.c qp->sq.va = dma_alloc_coherent(&pdev->dev, len, &pa, GFP_KERNEL); va 2209 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!qp->sq.va) va 2258 drivers/infiniband/hw/ocrdma/ocrdma_hw.c qp->rq.va = dma_alloc_coherent(&pdev->dev, len, &pa, GFP_KERNEL); va 2259 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!qp->rq.va) va 2455 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (qp->rq.va) va 2456 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, qp->rq.len, qp->rq.va, qp->rq.pa); va 2459 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, qp->sq.len, qp->sq.va, qp->sq.pa); va 2734 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (qp->sq.va) va 2735 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, qp->sq.len, qp->sq.va, qp->sq.pa); va 2736 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!qp->srq && qp->rq.va) va 2737 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, qp->rq.len, qp->rq.va, qp->rq.pa); va 2772 drivers/infiniband/hw/ocrdma/ocrdma_hw.c srq->rq.va = dma_alloc_coherent(&pdev->dev, len, &pa, GFP_KERNEL); va 2773 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (!srq->rq.va) { va 2812 drivers/infiniband/hw/ocrdma/ocrdma_hw.c dma_free_coherent(&pdev->dev, srq->rq.len, srq->rq.va, pa); va 2871 drivers/infiniband/hw/ocrdma/ocrdma_hw.c if (srq->rq.va) va 2873 drivers/infiniband/hw/ocrdma/ocrdma_hw.c srq->rq.va, srq->rq.pa); va 3045 drivers/infiniband/hw/ocrdma/ocrdma_hw.c av = dev->av_tbl.va; va 76 drivers/infiniband/hw/ocrdma/ocrdma_stats.c mem->va = dma_alloc_coherent(&dev->nic_info.pdev->dev, mem->size, va 78 drivers/infiniband/hw/ocrdma/ocrdma_stats.c if (!mem->va) { va 95 drivers/infiniband/hw/ocrdma/ocrdma_stats.c if (mem->va) va 97 drivers/infiniband/hw/ocrdma/ocrdma_stats.c mem->va, mem->pa); va 98 drivers/infiniband/hw/ocrdma/ocrdma_stats.c mem->va = NULL; va 106 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 223 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 269 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 280 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 291 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 333 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 348 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 365 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 398 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 419 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 445 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 470 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 488 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 611 drivers/infiniband/hw/ocrdma/ocrdma_stats.c (struct ocrdma_rdma_stats_resp *)dev->stats_mem.va; va 486 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c ctx->ah_tbl.va = dma_alloc_coherent(&pdev->dev, map_len, va 488 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c if (!ctx->ah_tbl.va) va 494 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c resp.ah_tbl_page = virt_to_phys(ctx->ah_tbl.va); va 521 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c dma_free_coherent(&pdev->dev, ctx->ah_tbl.len, ctx->ah_tbl.va, va 536 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c dma_free_coherent(&pdev->dev, uctx->ah_tbl.len, uctx->ah_tbl.va, va 757 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c if (!mr->pbl_table[i].va) va 760 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c mr->pbl_table[i].va, va 799 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c void *va; va 809 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c va = dma_alloc_coherent(&pdev->dev, dma_len, &pa, GFP_KERNEL); va 810 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c if (!va) { va 815 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c mr->pbl_table[i].va = va; va 834 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c pbe = (struct ocrdma_pbe *)pbl_tbl->va; va 855 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c pbe = (struct ocrdma_pbe *)pbl_tbl->va; va 890 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c mr->hwmr.va = usr_addr; va 956 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c uresp.page_addr[0] = virt_to_phys(cq->va); va 1050 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c cqe = cq->va; va 1201 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c uresp.sq_page_addr[0] = virt_to_phys(qp->sq.va); va 1207 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c uresp.rq_page_addr[0] = virt_to_phys(qp->rq.va); va 1581 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c return q->va + (q->head * q->entry_size); va 1587 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c return q->va + (idx * q->entry_size); va 1626 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c cqe = cq->va + cur_getp; va 1756 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c uresp.rq_page_addr[0] = virt_to_phys(srq->rq.va); va 2073 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c pbe = pbl_tbl->va; va 2087 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c pbe = (struct ocrdma_pbe *)pbl_tbl->va; va 2781 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c cqe = cq->va + cur_getp; va 225 drivers/infiniband/hw/qedr/qedr.h void *va; va 493 drivers/infiniband/hw/qedr/verbs.c if (!pbl[i].va) va 496 drivers/infiniband/hw/qedr/verbs.c pbl[i].va, pbl[i].pa); va 517 drivers/infiniband/hw/qedr/verbs.c void *va; va 525 drivers/infiniband/hw/qedr/verbs.c va = dma_alloc_coherent(&pdev->dev, pbl_info->pbl_size, &pa, va 527 drivers/infiniband/hw/qedr/verbs.c if (!va) va 530 drivers/infiniband/hw/qedr/verbs.c pbl_table[i].va = va; va 537 drivers/infiniband/hw/qedr/verbs.c pbl_main_tbl = (dma_addr_t *)pbl_table[0].va; va 546 drivers/infiniband/hw/qedr/verbs.c pbl_table[i].va, pbl_table[i].pa); va 623 drivers/infiniband/hw/qedr/verbs.c pbe = (struct regpair *)pbl_tbl->va; va 652 drivers/infiniband/hw/qedr/verbs.c pbe = (struct regpair *)pbl_tbl->va; va 1321 drivers/infiniband/hw/qedr/verbs.c void *va; va 1324 drivers/infiniband/hw/qedr/verbs.c va = dma_alloc_coherent(&dev->pdev->dev, va 1327 drivers/infiniband/hw/qedr/verbs.c if (!va) { va 1334 drivers/infiniband/hw/qedr/verbs.c hw_srq->virt_prod_pair_addr = va; va 1353 drivers/infiniband/hw/qedr/verbs.c va, phy_prod_pair_addr); va 1563 drivers/infiniband/hw/qedr/verbs.c qp->usq.pbl_tbl->va = out_params->sq_pbl_virt; va 1569 drivers/infiniband/hw/qedr/verbs.c qp->urq.pbl_tbl->va = out_params->rq_pbl_virt; va 2812 drivers/infiniband/hw/qedr/verbs.c pbe = (struct regpair *)pbl_table->va; va 638 drivers/infiniband/hw/usnic/usnic_ib_verbs.c usnic_dbg("va 0x%lx length 0x%zx\n", mr->umem->va, mr->umem->length); va 199 drivers/infiniband/hw/usnic/usnic_uiom.c long unsigned va, size; va 202 drivers/infiniband/hw/usnic/usnic_uiom.c va = interval->start << PAGE_SHIFT; va 206 drivers/infiniband/hw/usnic/usnic_uiom.c usnic_dbg("va 0x%lx size 0x%lx", va, PAGE_SIZE); va 207 drivers/infiniband/hw/usnic/usnic_uiom.c iommu_unmap(pd->domain, va, PAGE_SIZE); va 208 drivers/infiniband/hw/usnic/usnic_uiom.c va += PAGE_SIZE; va 225 drivers/infiniband/hw/usnic/usnic_uiom.c vpn_start = (uiomr->va & PAGE_MASK) >> PAGE_SHIFT; va 256 drivers/infiniband/hw/usnic/usnic_uiom.c long int va = uiomr->va & PAGE_MASK; va 264 drivers/infiniband/hw/usnic/usnic_uiom.c for (i = 0; i < chunk->nents; i++, va += PAGE_SIZE) { va 266 drivers/infiniband/hw/usnic/usnic_uiom.c if ((va >> PAGE_SHIFT) < interval_node->start) va 269 drivers/infiniband/hw/usnic/usnic_uiom.c if ((va >> PAGE_SHIFT) == interval_node->start) { va 271 drivers/infiniband/hw/usnic/usnic_uiom.c va_start = va; va 291 drivers/infiniband/hw/usnic/usnic_uiom.c va_start = va; va 296 drivers/infiniband/hw/usnic/usnic_uiom.c if ((va >> PAGE_SHIFT) == interval_node->last) { va 363 drivers/infiniband/hw/usnic/usnic_uiom.c uiomr->va = va_base; va 67 drivers/infiniband/hw/usnic/usnic_uiom.h unsigned long va; va 555 drivers/infiniband/sw/rxe/rxe_hdr.h __be64 va; va 564 drivers/infiniband/sw/rxe/rxe_hdr.h return be64_to_cpu(reth->va); va 567 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void __reth_set_va(void *arg, u64 va) va 571 drivers/infiniband/sw/rxe/rxe_hdr.h reth->va = cpu_to_be64(va); va 608 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void reth_set_va(struct rxe_pkt_info *pkt, u64 va) va 611 drivers/infiniband/sw/rxe/rxe_hdr.h + rxe_opcode[pkt->opcode].offset[RXE_RETH], va); va 642 drivers/infiniband/sw/rxe/rxe_hdr.h __be64 va; va 652 drivers/infiniband/sw/rxe/rxe_hdr.h return be64_to_cpu(atmeth->va); va 655 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void __atmeth_set_va(void *arg, u64 va) va 659 drivers/infiniband/sw/rxe/rxe_hdr.h atmeth->va = cpu_to_be64(va); va 710 drivers/infiniband/sw/rxe/rxe_hdr.h static inline void atmeth_set_va(struct rxe_pkt_info *pkt, u64 va) va 713 drivers/infiniband/sw/rxe/rxe_hdr.h + rxe_opcode[pkt->opcode].offset[RXE_ATMETH], va); va 227 drivers/infiniband/sw/rxe/rxe_mr.c mem->va = start; va 349 drivers/infiniband/sw/rxe/rxe_mr.c u8 *va; va 394 drivers/infiniband/sw/rxe/rxe_mr.c va = (u8 *)(uintptr_t)buf->addr + offset; va 395 drivers/infiniband/sw/rxe/rxe_mr.c src = (dir == to_mem_obj) ? addr : va; va 396 drivers/infiniband/sw/rxe/rxe_mr.c dest = (dir == to_mem_obj) ? va : addr; va 625 drivers/infiniband/sw/rxe/rxe_mr.c mem->va = iova; va 421 drivers/infiniband/sw/rxe/rxe_resp.c u64 va; va 431 drivers/infiniband/sw/rxe/rxe_resp.c qp->resp.va = reth_va(pkt); va 439 drivers/infiniband/sw/rxe/rxe_resp.c qp->resp.va = atmeth_va(pkt); va 454 drivers/infiniband/sw/rxe/rxe_resp.c va = qp->resp.va; va 470 drivers/infiniband/sw/rxe/rxe_resp.c if (mem_check_range(mem, va, resid)) { va 528 drivers/infiniband/sw/rxe/rxe_resp.c err = rxe_mem_copy(qp->resp.mr, qp->resp.va, payload_addr(pkt), va 535 drivers/infiniband/sw/rxe/rxe_resp.c qp->resp.va += data_len; va 683 drivers/infiniband/sw/rxe/rxe_resp.c res->read.va = qp->resp.va; va 684 drivers/infiniband/sw/rxe/rxe_resp.c res->read.va_org = qp->resp.va; va 730 drivers/infiniband/sw/rxe/rxe_resp.c err = rxe_mem_copy(res->read.mr, res->read.va, payload_addr(&ack_pkt), va 751 drivers/infiniband/sw/rxe/rxe_resp.c res->read.va += payload; va 1127 drivers/infiniband/sw/rxe/rxe_resp.c res->read.va = iova; va 1036 drivers/infiniband/sw/rxe/rxe_verbs.c mr->va = ibmr->iova; va 190 drivers/infiniband/sw/rxe/rxe_verbs.h u64 va; va 212 drivers/infiniband/sw/rxe/rxe_verbs.h u64 va; va 333 drivers/infiniband/sw/rxe/rxe_verbs.h u64 va; va 161 drivers/infiniband/sw/siw/siw.h u64 va; /* VA of memory */ va 103 drivers/infiniband/sw/siw/siw_mem.c mem->va = start; va 190 drivers/infiniband/sw/siw/siw_mem.c if (addr < mem->va || addr + len > mem->va + mem->len) { va 196 drivers/infiniband/sw/siw/siw_mem.c (void *)(uintptr_t)mem->va, va 197 drivers/infiniband/sw/siw/siw_mem.c (void *)(uintptr_t)(mem->va + mem->len), va 131 drivers/infiniband/sw/siw/siw_qp_rx.c u64 offset = addr - mem->va; va 28 drivers/infiniband/sw/siw/siw_qp_tx.c u64 offset = addr - mem->va; va 526 drivers/infiniband/sw/siw/siw_qp_tx.c u64 va = sge->laddr + sge_off; va 528 drivers/infiniband/sw/siw/siw_qp_tx.c page_array[seg] = virt_to_page(va & PAGE_MASK); va 532 drivers/infiniband/sw/siw/siw_qp_tx.c (void *)(uintptr_t)va, va 959 drivers/infiniband/sw/siw/siw_qp_tx.c mem->va = base_mr->iova; va 1510 drivers/infiniband/sw/siw/siw_verbs.c mem->va = base_mr->iova; va 1513 drivers/infiniband/sw/siw/siw_verbs.c mem->len, (void *)(uintptr_t)mem->va, num_sle, va 1456 drivers/infiniband/ulp/srp/ib_srp.c desc->va = cpu_to_be64(dma_addr); va 1876 drivers/infiniband/ulp/srp/ib_srp.c buf->va = cpu_to_be64(sg_dma_address(scat)); va 1960 drivers/infiniband/ulp/srp/ib_srp.c indirect_hdr->table_desc.va = cpu_to_be64(req->indirect_dma_addr); va 909 drivers/infiniband/ulp/srpt/ib_srpt.c u64 remote_addr = be64_to_cpu(db->va); va 33 drivers/input/rmi4/rmi_bus.c vaf.va = &args; va 1232 drivers/iommu/arm-smmu.c unsigned long va, flags; va 1240 drivers/iommu/arm-smmu.c va = iova & ~0xfffUL; va 1242 drivers/iommu/arm-smmu.c arm_smmu_cb_writeq(smmu, idx, ARM_SMMU_CB_ATS1PR, va); va 1244 drivers/iommu/arm-smmu.c arm_smmu_cb_write(smmu, idx, ARM_SMMU_CB_ATS1PR, va); va 534 drivers/iommu/msm_iommu.c dma_addr_t va) va 563 drivers/iommu/msm_iommu.c SET_V2PPR(iommu->base, master->num, va & V2Pxx_VA); va 569 drivers/iommu/msm_iommu.c ret = (par & 0xFF000000) | (va & 0x00FFFFFF); va 571 drivers/iommu/msm_iommu.c ret = (par & 0xFFFFF000) | (va & 0x00000FFF); va 57 drivers/iommu/msm_iommu_hw-8xxx.h #define FL_OFFSET(va) (((va) & 0xFFF00000) >> 20) va 71 drivers/iommu/msm_iommu_hw-8xxx.h #define SL_OFFSET(va) (((va) & 0xFF000) >> 12) va 66 drivers/iommu/omap-iopgtable.h static inline phys_addr_t omap_iommu_translate(u32 d, u32 va, u32 mask) va 68 drivers/iommu/omap-iopgtable.h return (d & mask) | (va & (~mask)); va 1058 drivers/irqchip/irq-gic-v3-its.c void *va; va 1066 drivers/irqchip/irq-gic-v3-its.c va = page_address(its_dev->event_map.vm->vprop_page); va 1074 drivers/irqchip/irq-gic-v3-its.c va = gic_rdists->prop_table_va; va 1078 drivers/irqchip/irq-gic-v3-its.c cfg = va + hwirq - 8192; va 1628 drivers/irqchip/irq-gic-v3-its.c static void gic_reset_prop_table(void *va) va 1631 drivers/irqchip/irq-gic-v3-its.c memset(va, LPI_PROP_DEFAULT_PRIO | LPI_PROP_GROUP1, LPI_PROPBASE_SZ); va 1634 drivers/irqchip/irq-gic-v3-its.c gic_flush_dcache_to_poc(va, LPI_PROPBASE_SZ); va 96 drivers/isdn/mISDN/layer1.c va_list va; va 98 drivers/isdn/mISDN/layer1.c va_start(va, fmt); va 101 drivers/isdn/mISDN/layer1.c vaf.va = &va; va 105 drivers/isdn/mISDN/layer1.c va_end(va); va 94 drivers/isdn/mISDN/layer2.c va_list va; va 99 drivers/isdn/mISDN/layer2.c va_start(va, fmt); va 102 drivers/isdn/mISDN/layer2.c vaf.va = &va; va 107 drivers/isdn/mISDN/layer2.c va_end(va); va 375 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 128; va 377 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 8; va 586 drivers/isdn/mISDN/layer2.c return ((nr - l2->va) % 128) <= ((l2->vs - l2->va) % 128); va 588 drivers/isdn/mISDN/layer2.c return ((nr - l2->va) % 8) <= ((l2->vs - l2->va) % 8); va 596 drivers/isdn/mISDN/layer2.c while (l2->va != nr) { va 597 drivers/isdn/mISDN/layer2.c l2->va++; va 599 drivers/isdn/mISDN/layer2.c l2->va %= 128; va 601 drivers/isdn/mISDN/layer2.c l2->va %= 8; va 913 drivers/isdn/mISDN/layer2.c l2->va = 0; va 955 drivers/isdn/mISDN/layer2.c if (l2->vs != l2->va) { va 962 drivers/isdn/mISDN/layer2.c l2->va = 0; va 1013 drivers/isdn/mISDN/layer2.c } else if (l2->vs != l2->va) { va 1020 drivers/isdn/mISDN/layer2.c l2->va = 0; va 1164 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 128; va 1167 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 8; va 1231 drivers/isdn/mISDN/layer2.c } else if ((l2->va != nr) || (typ == RNR)) { va 1329 drivers/isdn/mISDN/layer2.c } else if (nr != l2->va) va 1493 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 128; va 1496 drivers/isdn/mISDN/layer2.c p1 = (l2->vs - l2->va) % 8; va 54 drivers/isdn/mISDN/layer2.h u_int vs, va, vr; va 74 drivers/isdn/mISDN/tei.c va_list va; va 79 drivers/isdn/mISDN/tei.c va_start(va, fmt); va 82 drivers/isdn/mISDN/tei.c vaf.va = &va; va 86 drivers/isdn/mISDN/tei.c va_end(va); va 223 drivers/isdn/mISDN/tei.c va_list va; va 228 drivers/isdn/mISDN/tei.c va_start(va, fmt); va 231 drivers/isdn/mISDN/tei.c vaf.va = &va; va 236 drivers/isdn/mISDN/tei.c va_end(va); va 658 drivers/md/dm-integrity.c char *va; va 665 drivers/md/dm-integrity.c va = lowmem_page_address(pl[pl_index].page); va 667 drivers/md/dm-integrity.c return (struct journal_sector *)(va + pl_offset); va 3222 drivers/md/dm-integrity.c char *va = lowmem_page_address(pl[idx].page); va 3228 drivers/md/dm-integrity.c sg_set_buf(&s[idx - start_index], va + start, end - start); va 3403 drivers/md/dm-integrity.c char *va = lowmem_page_address(ic->journal_xor[i].page); va 3404 drivers/md/dm-integrity.c clear_page(va); va 3405 drivers/md/dm-integrity.c sg_set_buf(&sg[i], va, PAGE_SIZE); va 372 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c pfb->base_y.va = vb2_plane_vaddr(&dst_buf->vb2_buf, 0); va 376 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c pfb->base_c.va = vb2_plane_vaddr(&dst_buf->vb2_buf, 1); va 385 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c pfb->base_y.va, &pfb->base_y.dma_addr, va 409 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c buf.va = vb2_plane_vaddr(&src_buf->vb2_buf, 0); va 412 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c if (!buf.va) { va 419 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c ctx->id, buf.va, &buf.dma_addr, buf.size, src_buf); va 1176 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c src_mem.va = vb2_plane_vaddr(&src_buf->vb2_buf, 0); va 1182 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c src_mem.va, &src_mem.dma_addr, va 947 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c bs_buf.va = vb2_plane_vaddr(&dst_buf->vb2_buf, 0); va 954 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c dst_buf->vb2_buf.index, bs_buf.va, va 1085 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c bs_buf.va = vb2_plane_vaddr(&dst_buf->vb2_buf, 0); va 44 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c mem->va = dma_alloc_coherent(dev, size, &mem->dma_addr, GFP_KERNEL); va 45 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c if (!mem->va) { va 51 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c mtk_v4l2_debug(3, "[%d] - va = %p", ctx->id, mem->va); va 67 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c if (!mem->va) { va 73 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c mtk_v4l2_debug(3, "[%d] - va = %p", ctx->id, mem->va); va 78 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c dma_free_coherent(dev, size, mem->va, mem->dma_addr); va 79 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c mem->va = NULL; va 16 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h void *va; va 163 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c if (mem->va) va 176 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c if (mem->va) va 198 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c if (mem->va) va 236 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c fb->base_y.va, (u64)fb->base_y.dma_addr); va 360 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c buf = (unsigned char *)bs->va; va 385 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c if (mem->va) va 468 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c bs_va = (unsigned char *)bs->va; va 36 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c unsigned long va; va 222 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (fb->base_y.va == addr) { va 272 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c vsi->frm_bufs[ref_idx].buf.fb->base_y.va); va 290 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (vsi->sf_ref_fb[i].fb.base_y.va) { va 315 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (vsi->sf_ref_fb[idx].fb.base_y.va && va 324 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (vsi->sf_ref_fb[idx].fb.base_y.va == NULL) va 389 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (mem->va) va 401 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c vsi->mv_buf.va = (unsigned long)mem->va; va 407 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (mem->va) va 418 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c vsi->seg_id_buf.va = (unsigned long)mem->va; va 479 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c memcpy((void *)inst->cur_fb->base_y.va, va 480 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c (void *)frm_to_show->fb->base_y.va, va 482 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c memcpy((void *)inst->cur_fb->base_c.va, va 483 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c (void *)frm_to_show->fb->base_c.va, va 516 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c vsi->frm_bufs[vsi->new_fb_idx].buf.fb->base_y.va); va 558 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c inst = mem.va; va 569 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (mem.va) va 667 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c inst->vsi->mv_buf.va = (unsigned long)inst->mv_buf.va; va 672 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c inst->vsi->seg_id_buf.va = (unsigned long)inst->seg_id_buf.va; va 771 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (mem->va) va 775 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c if (mem->va) va 842 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c data[0] = *((unsigned int *)bs->va); va 843 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c data[1] = *((unsigned int *)(bs->va + 4)); va 844 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c data[2] = *((unsigned int *)(bs->va + 8)); va 860 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c sf_bs_src = bs->va + bs->size - sf_bs_sz; va 868 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c memcpy((void *)bs->va, va 869 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c (void *)(bs->va + va 874 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c memset(inst->seg_id_buf.va, 0, inst->seg_id_buf.size); va 260 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c inst->work_bufs[i].va = vpu_mapping_dm_addr( va 282 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c memcpy(inst->work_bufs[i].va, tmp_va, va 290 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c i, inst->work_bufs[i].va, va 396 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c memcpy(bs_buf->va + bs_size_sps, inst->pps_buf.va, bs_size_pps); va 423 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c memcpy(bs_buf->va, va 424 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c inst->work_bufs[VENC_H264_VPU_WORK_BUF_SKIP_FRAME].va, va 549 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c h264_encode_filler(inst, bs_buf->va + hdr_sz, va 553 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c tmp_bs_buf.va = bs_buf->va + hdr_sz + filler_sz; va 197 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c memcpy(inst->work_bufs[i].va, tmp_va, wb[i].size); va 203 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c i, inst->work_bufs[i].va, va 283 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c memmove(bs_buf->va + bs_hdr_len + ac_tag_size, va 284 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c bs_buf->va, bs_frm_size); va 285 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c memcpy(bs_buf->va + ac_tag_size, va 286 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c inst->work_bufs[VENC_VP8_VPU_WORK_BUF_BS_HEADER].va, va 288 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c memcpy(bs_buf->va, ac_tag, ac_tag_size); va 84 drivers/media/platform/mtk-vpu/mtk_vpu.c void *va; va 446 drivers/media/platform/mtk-vpu/mtk_vpu.c return vpu->extmem[D_FW].va + (dtcm_dmem_addr - VPU_DTCM_SIZE); va 518 drivers/media/platform/mtk-vpu/mtk_vpu.c dest = vpu->extmem[fw_type].va; va 667 drivers/media/platform/mtk-vpu/mtk_vpu.c dma_free_coherent(dev, fw_ext_size, vpu->extmem[fw_type].va, va 679 drivers/media/platform/mtk-vpu/mtk_vpu.c vpu->extmem[fw_type].va = dma_alloc_coherent(dev, va 683 drivers/media/platform/mtk-vpu/mtk_vpu.c if (!vpu->extmem[fw_type].va) { va 696 drivers/media/platform/mtk-vpu/mtk_vpu.c vpu->extmem[fw_type].va); va 25 drivers/media/platform/qcom/venus/helpers.c void *va; va 111 drivers/media/platform/qcom/venus/helpers.c dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da, va 164 drivers/media/platform/qcom/venus/helpers.c buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL, va 166 drivers/media/platform/qcom/venus/helpers.c if (!buf->va) { va 211 drivers/media/platform/qcom/venus/helpers.c buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL, va 213 drivers/media/platform/qcom/venus/helpers.c if (!buf->va) { va 236 drivers/media/platform/qcom/venus/helpers.c dma_free_attrs(dev, buf->size, buf->va, buf->da, buf->attrs); va 258 drivers/media/platform/qcom/venus/helpers.c dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da, va 337 drivers/media/platform/qcom/venus/helpers.c dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da, va 714 drivers/media/tuners/tda18271-common.c vaf.va = &args; va 143 drivers/misc/fastrpc.c void *va; va 632 drivers/misc/fastrpc.c map->va = sg_virt(map->table->sgl); va 4916 drivers/misc/habanalabs/goya/goya.c bool is_hard, u32 asid, u64 va, u64 size) va 567 drivers/misc/habanalabs/habanalabs.h u32 asid, u64 va, u64 size); va 101 drivers/misc/mic/bus/scif_bus.h void (*unmap)(struct scif_hw_dev *sdev, void __iomem *va); va 98 drivers/misc/mic/bus/vop_bus.h void (*unmap)(struct vop_device *vpdev, void __iomem *va); va 222 drivers/misc/mic/card/mic_device.c static void ___mic_iounmap(struct scif_hw_dev *scdev, void __iomem *va) va 226 drivers/misc/mic/card/mic_device.c mic_card_unmap(&mdrv->mdev, va); va 293 drivers/misc/mic/card/mic_device.c static void __mic_iounmap(struct vop_device *vpdev, void __iomem *va) va 297 drivers/misc/mic/card/mic_device.c mic_card_unmap(&mdrv->mdev, va); va 100 drivers/misc/mic/card/mic_device.h return ioread32(mw->va + offset); va 114 drivers/misc/mic/card/mic_device.h iowrite32(val, mw->va + offset); va 245 drivers/misc/mic/card/mic_x100.c mdev->mmio.va = devm_ioremap(&pdev->dev, MIC_X100_MMIO_BASE, va 247 drivers/misc/mic/card/mic_x100.c if (!mdev->mmio.va) { va 256 drivers/misc/mic/card/mic_x100.c mdrv->mdev.mmio.va); va 34 drivers/misc/mic/common/mic_dev.h void __iomem *va; va 51 drivers/misc/mic/cosm/cosm_debugfs.c log_buf_len_va = cdev->hw_ops->aper(cdev)->va + aper_offset; va 53 drivers/misc/mic/cosm/cosm_debugfs.c log_buf_va = cdev->hw_ops->aper(cdev)->va + aper_offset; va 31 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; va 34 drivers/misc/mic/host/mic_boot.c return mic_map_single(mdev, va, size); va 108 drivers/misc/mic/host/mic_boot.c return mdev->aper.va + pa; va 111 drivers/misc/mic/host/mic_boot.c static void __mic_iounmap(struct vop_device *vpdev, void __iomem *va) va 140 drivers/misc/mic/host/mic_boot.c void *va = kmalloc(size, gfp | __GFP_ZERO); va 142 drivers/misc/mic/host/mic_boot.c if (va) { va 143 drivers/misc/mic/host/mic_boot.c tmp = mic_map_single(mdev, va, size); va 145 drivers/misc/mic/host/mic_boot.c kfree(va); va 146 drivers/misc/mic/host/mic_boot.c va = NULL; va 151 drivers/misc/mic/host/mic_boot.c return va; va 169 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; va 173 drivers/misc/mic/host/mic_boot.c return mic_map_single(mdev, va, size); va 292 drivers/misc/mic/host/mic_boot.c return mdev->aper.va + pa; va 295 drivers/misc/mic/host/mic_boot.c static void ___mic_iounmap(struct scif_hw_dev *scdev, void __iomem *va) va 320 drivers/misc/mic/host/mic_boot.c void *va = phys_to_virt(page_to_phys(page)) + offset; va 323 drivers/misc/mic/host/mic_boot.c return mic_map_single(mdev, va, size); va 464 drivers/misc/mic/host/mic_boot.c &mbus_hw_ops, id, mdev->mmio.va); va 135 drivers/misc/mic/host/mic_device.h return ioread32(mw->va + offset); va 149 drivers/misc/mic/host/mic_device.h iowrite32(val, mw->va + offset); va 201 drivers/misc/mic/host/mic_main.c mdev->mmio.va = pci_ioremap_bar(pdev, mdev->ops->mmio_bar); va 202 drivers/misc/mic/host/mic_main.c if (!mdev->mmio.va) { va 210 drivers/misc/mic/host/mic_main.c mdev->aper.va = ioremap_wc(mdev->aper.pa, mdev->aper.len); va 211 drivers/misc/mic/host/mic_main.c if (!mdev->aper.va) { va 254 drivers/misc/mic/host/mic_main.c iounmap(mdev->aper.va); va 256 drivers/misc/mic/host/mic_main.c iounmap(mdev->mmio.va); va 290 drivers/misc/mic/host/mic_main.c iounmap(mdev->aper.va); va 291 drivers/misc/mic/host/mic_main.c iounmap(mdev->mmio.va); va 295 drivers/misc/mic/host/mic_smpt.c dma_addr_t mic_map_single(struct mic_device *mdev, void *va, size_t size) va 300 drivers/misc/mic/host/mic_smpt.c pci_map_single(pdev, va, size, PCI_DMA_BIDIRECTIONAL); va 63 drivers/misc/mic/host/mic_smpt.h dma_addr_t mic_map_single(struct mic_device *mdev, void *va, size_t size); va 345 drivers/misc/mic/host/mic_x100.c void __iomem *cmd_line_va = mdev->aper.va + mdev->bootaddr + fw->size; va 374 drivers/misc/mic/host/mic_x100.c struct boot_params __iomem *bp = mdev->aper.va + mdev->bootaddr; va 387 drivers/misc/mic/host/mic_x100.c memcpy_toio(mdev->aper.va + (mdev->bootaddr << 1), fw->data, fw->size); va 456 drivers/misc/mic/host/mic_x100.c memcpy_toio(mdev->aper.va + mdev->bootaddr, fw->data, fw->size); va 650 drivers/misc/mic/scif/scif_dma.c void *va = NULL; va 655 drivers/misc/mic/scif/scif_dma.c va = page_address(pages[page_nr]) + page_off; va 657 drivers/misc/mic/scif/scif_dma.c return va; va 19 drivers/misc/mic/scif/scif_map.h void *va; va 22 drivers/misc/mic/scif/scif_map.h va = kmalloc(size, gfp); va 23 drivers/misc/mic/scif/scif_map.h if (va) va 24 drivers/misc/mic/scif/scif_map.h *dma_handle = virt_to_phys(va); va 26 drivers/misc/mic/scif/scif_map.h va = dma_alloc_coherent(&scifdev->sdev->dev, va 28 drivers/misc/mic/scif/scif_map.h if (va && scifdev_is_p2p(scifdev)) va 31 drivers/misc/mic/scif/scif_map.h return va; va 35 drivers/misc/mic/scif/scif_map.h scif_free_coherent(void *va, dma_addr_t local, va 39 drivers/misc/mic/scif/scif_map.h kfree(va); va 44 drivers/misc/mic/scif/scif_map.h size, va, local); va 267 drivers/misc/mic/scif/scif_mmap.c ((*pages)->va = scif_zalloc(nr_pages * sizeof(void *))); va 268 drivers/misc/mic/scif/scif_mmap.c if (!(*pages)->va) { va 285 drivers/misc/mic/scif/scif_mmap.c (*pages)->va[i] = va 286 drivers/misc/mic/scif/scif_mmap.c ep->remote_dev->sdev->aper->va + va 298 drivers/misc/mic/scif/scif_mmap.c scif_free((*pages)->va, va 357 drivers/misc/mic/scif/scif_mmap.c scif_free(pages->va, pages->nr_pages * sizeof(void *)); va 791 drivers/misc/mic/scif/scif_nodeqp.c newdev->mmio.va = ioremap_nocache(msg->payload[1], sdev->mmio->len); va 792 drivers/misc/mic/scif/scif_nodeqp.c if (!newdev->mmio.va) { va 834 drivers/misc/mic/scif/scif_nodeqp.c iounmap(newdev->mmio.va); va 835 drivers/misc/mic/scif/scif_nodeqp.c newdev->mmio.va = NULL; va 308 drivers/misc/mic/vop/vop_main.c void __iomem *va; va 325 drivers/misc/mic/vop/vop_main.c va = vpdev->hw_ops->remap(vpdev, le64_to_cpu(config.address), vr_size); va 326 drivers/misc/mic/vop/vop_main.c if (!va) va 328 drivers/misc/mic/vop/vop_main.c vdev->vr[index] = va; va 329 drivers/misc/mic/vop/vop_main.c memset_io(va, 0x0, _vr_size); va 331 drivers/misc/mic/vop/vop_main.c info = va + _vr_size; va 353 drivers/misc/mic/vop/vop_main.c (void __force *)va, vop_notify, callback, va 301 drivers/misc/mic/vop/vop_vringh.c vr->va = (void *) va 304 drivers/misc/mic/vop/vop_vringh.c if (!vr->va) { va 311 drivers/misc/mic/vop/vop_vringh.c vr->info = vr->va + vring_size(num, MIC_VIRTIO_RING_ALIGN); va 313 drivers/misc/mic/vop/vop_vringh.c vr_addr = dma_map_single(&vpdev->dev, vr->va, vr_size, va 316 drivers/misc/mic/vop/vop_vringh.c free_pages((unsigned long)vr->va, get_order(vr_size)); va 324 drivers/misc/mic/vop/vop_vringh.c vring_init(&vr->vr, num, vr->va, MIC_VIRTIO_RING_ALIGN); va 341 drivers/misc/mic/vop/vop_vringh.c __func__, __LINE__, i, vr->va, vr->info, vr_size); va 387 drivers/misc/mic/vop/vop_vringh.c free_pages((unsigned long)vvr->vring.va, va 445 drivers/misc/mic/vop/vop_vringh.c free_pages((unsigned long)vvr->vring.va, va 1063 drivers/misc/mic/vop/vop_vringh.c *pa = virt_to_phys(vvr->vring.va); va 525 drivers/misc/vmw_vmci/vmci_host.c if (set_va_info.va) { va 532 drivers/misc/vmw_vmci/vmci_host.c set_va_info.va); va 342 drivers/misc/vmw_vmci/vmci_queue_pair.c void *va; va 346 drivers/misc/vmw_vmci/vmci_queue_pair.c va = kmap(kernel_if->u.h.page[page_index]); va 348 drivers/misc/vmw_vmci/vmci_queue_pair.c va = kernel_if->u.g.vas[page_index + 1]; va 357 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!copy_from_iter_full((u8 *)va + page_offset, to_copy, va 389 drivers/misc/vmw_vmci/vmci_queue_pair.c void *va; va 394 drivers/misc/vmw_vmci/vmci_queue_pair.c va = kmap(kernel_if->u.h.page[page_index]); va 396 drivers/misc/vmw_vmci/vmci_queue_pair.c va = kernel_if->u.g.vas[page_index + 1]; va 405 drivers/misc/vmw_vmci/vmci_queue_pair.c err = copy_to_iter((u8 *)va + page_offset, to_copy, to); va 44 drivers/misc/vmw_vmci/vmci_queue_pair.h u64 va; /* Start VA of queue pair PPNs. */ va 153 drivers/mtd/ubi/misc.c vaf.va = &args; va 169 drivers/mtd/ubi/misc.c vaf.va = &args; va 186 drivers/mtd/ubi/misc.c vaf.va = &args; va 870 drivers/net/ethernet/amazon/ena/ena_netdev.c void *va; va 887 drivers/net/ethernet/amazon/ena/ena_netdev.c va = page_address(rx_info->page) + rx_info->page_offset; va 888 drivers/net/ethernet/amazon/ena/ena_netdev.c prefetch(va + NET_IP_ALIGN); va 904 drivers/net/ethernet/amazon/ena/ena_netdev.c skb_copy_to_linear_data(skb, va, len); va 130 drivers/net/ethernet/amd/hplance.c unsigned long va = (d->resource.start + DIO_VIRADDRBASE); va 135 drivers/net/ethernet/amd/hplance.c out_8(va + DIO_IDOFF, 0xff); va 139 drivers/net/ethernet/amd/hplance.c dev->base_addr = va; va 147 drivers/net/ethernet/amd/hplance.c dev->dev_addr[i] = ((in_8(va + HPLANCE_NVRAMOFF + i*4 + 1) & 0xF) << 4) va 148 drivers/net/ethernet/amd/hplance.c | (in_8(va + HPLANCE_NVRAMOFF + i*4 + 3) & 0xF); va 153 drivers/net/ethernet/amd/hplance.c lp->lance.base = va; va 154 drivers/net/ethernet/amd/hplance.c lp->lance.init_block = (struct lance_init_block *)(va + HPLANCE_MEMOFF); /* CPU addr */ va 600 drivers/net/ethernet/cavium/liquidio/lio_core.c unsigned char *va; va 605 drivers/net/ethernet/cavium/liquidio/lio_core.c va = page_address(pg_info->page) + va 607 drivers/net/ethernet/cavium/liquidio/lio_core.c memcpy(skb->data, va, MIN_SKB_SIZE); va 267 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c unsigned char *va; va 271 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c va = page_address(pg_info->page) + va 273 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c memcpy(skb->data, va, MIN_SKB_SIZE); va 441 drivers/net/ethernet/cavium/liquidio/octeon_network.h unsigned char *va; va 444 drivers/net/ethernet/cavium/liquidio/octeon_network.h va = page_address(pg_info->page) + pg_info->page_offset; va 446 drivers/net/ethernet/cavium/liquidio/octeon_network.h return va; va 93 drivers/net/ethernet/chelsio/cxgb3/adapter.h void *va; va 411 drivers/net/ethernet/chelsio/cxgb3/sge.c static inline int add_one_rx_buf(void *va, unsigned int len, va 417 drivers/net/ethernet/chelsio/cxgb3/sge.c mapping = pci_map_single(pdev, va, len, PCI_DMA_FROMDEVICE); va 452 drivers/net/ethernet/chelsio/cxgb3/sge.c q->pg_chunk.va = page_address(q->pg_chunk.page); va 453 drivers/net/ethernet/chelsio/cxgb3/sge.c q->pg_chunk.p_cnt = q->pg_chunk.va + (PAGE_SIZE << order) - va 473 drivers/net/ethernet/chelsio/cxgb3/sge.c q->pg_chunk.va += q->buf_size; va 858 drivers/net/ethernet/chelsio/cxgb3/sge.c memcpy(newskb->data, sd->pg_chunk.va, len); va 895 drivers/net/ethernet/chelsio/cxgb3/sge.c memcpy(newskb->data, sd->pg_chunk.va, SGE_RX_PULL_LEN); va 2170 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl = qs->lro_va = sd->pg_chunk.va + 2; va 2373 drivers/net/ethernet/chelsio/cxgb3/sge.c void *addr = fl->sdesc[fl->cidx].pg_chunk.va; va 657 drivers/net/ethernet/chelsio/cxgb4/cxgb4.h void *va; /* virtual address of first byte */ va 2657 drivers/net/ethernet/chelsio/cxgb4/sge.c skb_copy_to_linear_data(skb, gl->va, gl->tot_len); va 2663 drivers/net/ethernet/chelsio/cxgb4/sge.c skb_copy_to_linear_data(skb, gl->va, pull_len); va 3191 drivers/net/ethernet/chelsio/cxgb4/sge.c si.va = page_address(si.frags[0].page) + va 3193 drivers/net/ethernet/chelsio/cxgb4/sge.c prefetch(si.va); va 154 drivers/net/ethernet/chelsio/cxgb4vf/adapter.h void *va; /* virtual address of first byte */ va 1524 drivers/net/ethernet/chelsio/cxgb4vf/sge.c skb_copy_to_linear_data(skb, gl->va, gl->tot_len); va 1530 drivers/net/ethernet/chelsio/cxgb4vf/sge.c skb_copy_to_linear_data(skb, gl->va, pull_len); va 1830 drivers/net/ethernet/chelsio/cxgb4vf/sge.c gl.va = (page_address(gl.frags[0].page) + va 1832 drivers/net/ethernet/chelsio/cxgb4vf/sge.c prefetch(gl.va); va 121 drivers/net/ethernet/emulex/benet/be.h void *va; va 154 drivers/net/ethernet/emulex/benet/be.h return q->dma_mem.va + q->head * q->entry_size; va 159 drivers/net/ethernet/emulex/benet/be.h return q->dma_mem.va + q->tail * q->entry_size; va 164 drivers/net/ethernet/emulex/benet/be.h return q->dma_mem.va + index * q->entry_size; va 668 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_mcc_mailbox *mbox = mbox_mem->va; va 820 drivers/net/ethernet/emulex/benet/be_cmds.c int i, buf_pages = min(PAGES_4K_SPANNED(mem->va, mem->size), max_pages); va 834 drivers/net/ethernet/emulex/benet/be_cmds.c = &((struct be_mcc_mailbox *)(mbox_mem->va))->wrb; va 1014 drivers/net/ethernet/emulex/benet/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 1176 drivers/net/ethernet/emulex/benet/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 1254 drivers/net/ethernet/emulex/benet/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 1319 drivers/net/ethernet/emulex/benet/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 1381 drivers/net/ethernet/emulex/benet/be_cmds.c req->num_pages = PAGES_4K_SPANNED(q_mem->va, q_mem->size); va 1600 drivers/net/ethernet/emulex/benet/be_cmds.c hdr = nonemb_cmd->va; va 1644 drivers/net/ethernet/emulex/benet/be_cmds.c req = nonemb_cmd->va; va 1805 drivers/net/ethernet/emulex/benet/be_cmds.c get_fat_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 1808 drivers/net/ethernet/emulex/benet/be_cmds.c if (!get_fat_cmd.va) va 1822 drivers/net/ethernet/emulex/benet/be_cmds.c req = get_fat_cmd.va; va 1836 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_cmd_resp_get_fat *resp = get_fat_cmd.va; va 1850 drivers/net/ethernet/emulex/benet/be_cmds.c get_fat_cmd.va, get_fat_cmd.dma); va 1979 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_cmd_req_rx_filter *req = mem->va; va 2299 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 2301 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) { va 2313 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 2323 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_cmd_resp_port_type *resp = cmd.va; va 2329 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma); va 2545 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd->va; va 2690 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_cmd_write_flashrom *req = flash_cmd->va; va 3063 drivers/net/ethernet/emulex/benet/be_cmds.c flash_cmd.va = dma_alloc_coherent(dev, flash_cmd.size, &flash_cmd.dma, va 3065 drivers/net/ethernet/emulex/benet/be_cmds.c if (!flash_cmd.va) va 3068 drivers/net/ethernet/emulex/benet/be_cmds.c dest_image_ptr = flash_cmd.va + va 3101 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(dev, flash_cmd.size, flash_cmd.va, flash_cmd.dma); va 3181 drivers/net/ethernet/emulex/benet/be_cmds.c flash_cmd.va = dma_alloc_coherent(dev, flash_cmd.size, &flash_cmd.dma, va 3183 drivers/net/ethernet/emulex/benet/be_cmds.c if (!flash_cmd.va) va 3203 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(dev, flash_cmd.size, flash_cmd.va, flash_cmd.dma); va 3224 drivers/net/ethernet/emulex/benet/be_cmds.c req = nonemb_cmd->va; va 3355 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd->va; va 3374 drivers/net/ethernet/emulex/benet/be_cmds.c resp = cmd->va; va 3400 drivers/net/ethernet/emulex/benet/be_cmds.c req = nonemb_cmd->va; va 3432 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 3434 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) { va 3440 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 3449 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va + sizeof(struct be_cmd_req_hdr); va 3467 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma); va 3519 drivers/net/ethernet/emulex/benet/be_cmds.c attribs_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 3522 drivers/net/ethernet/emulex/benet/be_cmds.c if (!attribs_cmd.va) { va 3533 drivers/net/ethernet/emulex/benet/be_cmds.c req = attribs_cmd.va; va 3541 drivers/net/ethernet/emulex/benet/be_cmds.c attribs = attribs_cmd.va + sizeof(struct be_cmd_resp_hdr); va 3556 drivers/net/ethernet/emulex/benet/be_cmds.c if (attribs_cmd.va) va 3558 drivers/net/ethernet/emulex/benet/be_cmds.c attribs_cmd.va, attribs_cmd.dma); va 3696 drivers/net/ethernet/emulex/benet/be_cmds.c get_mac_list_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 3701 drivers/net/ethernet/emulex/benet/be_cmds.c if (!get_mac_list_cmd.va) { va 3715 drivers/net/ethernet/emulex/benet/be_cmds.c req = get_mac_list_cmd.va; va 3733 drivers/net/ethernet/emulex/benet/be_cmds.c get_mac_list_cmd.va; va 3773 drivers/net/ethernet/emulex/benet/be_cmds.c get_mac_list_cmd.va, get_mac_list_cmd.dma); va 3826 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 3828 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) va 3839 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 3852 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, cmd.dma); va 4032 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 4034 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) { va 4046 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 4059 drivers/net/ethernet/emulex/benet/be_cmds.c resp = (struct be_cmd_resp_acpi_wol_magic_config_v1 *)cmd.va; va 4070 drivers/net/ethernet/emulex/benet/be_cmds.c if (cmd.va) va 4071 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, va 4086 drivers/net/ethernet/emulex/benet/be_cmds.c extfat_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 4089 drivers/net/ethernet/emulex/benet/be_cmds.c if (!extfat_cmd.va) va 4097 drivers/net/ethernet/emulex/benet/be_cmds.c (extfat_cmd.va + sizeof(struct be_cmd_resp_hdr)); va 4110 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, extfat_cmd.size, extfat_cmd.va, va 4124 drivers/net/ethernet/emulex/benet/be_cmds.c extfat_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 4128 drivers/net/ethernet/emulex/benet/be_cmds.c if (!extfat_cmd.va) { va 4136 drivers/net/ethernet/emulex/benet/be_cmds.c cfgs = (struct be_fat_conf_params *)(extfat_cmd.va + va 4144 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, extfat_cmd.size, extfat_cmd.va, va 4170 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd->va; va 4198 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd->va; va 4351 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 4353 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) { va 4365 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 4376 drivers/net/ethernet/emulex/benet/be_cmds.c struct be_cmd_resp_get_func_config *resp = cmd.va; va 4400 drivers/net/ethernet/emulex/benet/be_cmds.c if (cmd.va) va 4401 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, va 4449 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 4451 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) va 4454 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 4475 drivers/net/ethernet/emulex/benet/be_cmds.c resp = cmd.va; va 4519 drivers/net/ethernet/emulex/benet/be_cmds.c if (cmd.va) va 4520 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, va 4536 drivers/net/ethernet/emulex/benet/be_cmds.c cmd.va = dma_alloc_coherent(&adapter->pdev->dev, cmd.size, &cmd.dma, va 4538 drivers/net/ethernet/emulex/benet/be_cmds.c if (!cmd.va) va 4541 drivers/net/ethernet/emulex/benet/be_cmds.c req = cmd.va; va 4552 drivers/net/ethernet/emulex/benet/be_cmds.c if (cmd.va) va 4553 drivers/net/ethernet/emulex/benet/be_cmds.c dma_free_coherent(&adapter->pdev->dev, cmd.size, cmd.va, va 1010 drivers/net/ethernet/emulex/benet/be_cmds.h struct lancer_cmd_resp_pport_stats *cmd = adapter->stats_cmd.va; va 273 drivers/net/ethernet/emulex/benet/be_ethtool.c read_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, read_cmd.size, va 276 drivers/net/ethernet/emulex/benet/be_ethtool.c if (!read_cmd.va) { va 290 drivers/net/ethernet/emulex/benet/be_ethtool.c memcpy(buf + total_read_len, read_cmd.va, read_len); va 298 drivers/net/ethernet/emulex/benet/be_ethtool.c dma_free_coherent(&adapter->pdev->dev, read_cmd.size, read_cmd.va, va 815 drivers/net/ethernet/emulex/benet/be_ethtool.c cmd.va = dma_alloc_coherent(dev, cmd.size, &cmd.dma, GFP_KERNEL); va 816 drivers/net/ethernet/emulex/benet/be_ethtool.c if (!cmd.va) va 838 drivers/net/ethernet/emulex/benet/be_ethtool.c dma_free_coherent(dev, cmd.size, cmd.va, cmd.dma); va 851 drivers/net/ethernet/emulex/benet/be_ethtool.c ddrdma_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 854 drivers/net/ethernet/emulex/benet/be_ethtool.c if (!ddrdma_cmd.va) va 865 drivers/net/ethernet/emulex/benet/be_ethtool.c dma_free_coherent(&adapter->pdev->dev, ddrdma_cmd.size, ddrdma_cmd.va, va 1030 drivers/net/ethernet/emulex/benet/be_ethtool.c eeprom_cmd.va = dma_alloc_coherent(&adapter->pdev->dev, va 1034 drivers/net/ethernet/emulex/benet/be_ethtool.c if (!eeprom_cmd.va) va 1040 drivers/net/ethernet/emulex/benet/be_ethtool.c resp = eeprom_cmd.va; va 1043 drivers/net/ethernet/emulex/benet/be_ethtool.c dma_free_coherent(&adapter->pdev->dev, eeprom_cmd.size, eeprom_cmd.va, va 150 drivers/net/ethernet/emulex/benet/be_main.c if (mem->va) { va 151 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(&adapter->pdev->dev, mem->size, mem->va, va 153 drivers/net/ethernet/emulex/benet/be_main.c mem->va = NULL; va 166 drivers/net/ethernet/emulex/benet/be_main.c mem->va = dma_alloc_coherent(&adapter->pdev->dev, mem->size, va 168 drivers/net/ethernet/emulex/benet/be_main.c if (!mem->va) va 385 drivers/net/ethernet/emulex/benet/be_main.c struct be_cmd_resp_get_stats_v0 *cmd = adapter->stats_cmd.va; va 389 drivers/net/ethernet/emulex/benet/be_main.c struct be_cmd_resp_get_stats_v1 *cmd = adapter->stats_cmd.va; va 393 drivers/net/ethernet/emulex/benet/be_main.c struct be_cmd_resp_get_stats_v2 *cmd = adapter->stats_cmd.va; va 1437 drivers/net/ethernet/emulex/benet/be_main.c entry = txo->q.dma_mem.va; va 1447 drivers/net/ethernet/emulex/benet/be_main.c entry = txo->cq.dma_mem.va; va 5746 drivers/net/ethernet/emulex/benet/be_main.c if (mem->va) va 5747 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(dev, mem->size, mem->va, mem->dma); va 5750 drivers/net/ethernet/emulex/benet/be_main.c if (mem->va) va 5751 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(dev, mem->size, mem->va, mem->dma); va 5754 drivers/net/ethernet/emulex/benet/be_main.c if (mem->va) va 5755 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(dev, mem->size, mem->va, mem->dma); va 5769 drivers/net/ethernet/emulex/benet/be_main.c mbox_mem_alloc->va = dma_alloc_coherent(dev, mbox_mem_alloc->size, va 5772 drivers/net/ethernet/emulex/benet/be_main.c if (!mbox_mem_alloc->va) va 5776 drivers/net/ethernet/emulex/benet/be_main.c mbox_mem_align->va = PTR_ALIGN(mbox_mem_alloc->va, 16); va 5780 drivers/net/ethernet/emulex/benet/be_main.c rx_filter->va = dma_alloc_coherent(dev, rx_filter->size, va 5782 drivers/net/ethernet/emulex/benet/be_main.c if (!rx_filter->va) { va 5795 drivers/net/ethernet/emulex/benet/be_main.c stats_cmd->va = dma_alloc_coherent(dev, stats_cmd->size, va 5797 drivers/net/ethernet/emulex/benet/be_main.c if (!stats_cmd->va) { va 5827 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(dev, rx_filter->size, rx_filter->va, rx_filter->dma); va 5829 drivers/net/ethernet/emulex/benet/be_main.c dma_free_coherent(dev, mbox_mem_alloc->size, mbox_mem_alloc->va, va 234 drivers/net/ethernet/google/gve/gve_rx.c void *va = page_info->page_address + GVE_RX_PAD + va 242 drivers/net/ethernet/google/gve/gve_rx.c skb_copy_to_linear_data(skb, va, len); va 547 drivers/net/ethernet/hisilicon/hns/hns_enet.c unsigned char *va; va 557 drivers/net/ethernet/hisilicon/hns/hns_enet.c va = (unsigned char *)desc_cb->buf + desc_cb->page_offset; va 560 drivers/net/ethernet/hisilicon/hns/hns_enet.c prefetch(va); va 562 drivers/net/ethernet/hisilicon/hns/hns_enet.c prefetch(va + L1_CACHE_BYTES); va 579 drivers/net/ethernet/hisilicon/hns/hns_enet.c memcpy(__skb_put(skb, length), va, ALIGN(length, sizeof(long))); va 596 drivers/net/ethernet/hisilicon/hns/hns_enet.c pull_len = eth_get_headlen(ndev, va, HNS_RX_HEAD_SIZE); va 597 drivers/net/ethernet/hisilicon/hns/hns_enet.c memcpy(__skb_put(skb, pull_len), va, va 2634 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c unsigned char *va) va 2659 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c memcpy(__skb_put(skb, length), va, ALIGN(length, sizeof(long))); va 2674 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ring->pull_len = eth_get_headlen(netdev, va, HNS3_RX_HEAD_SIZE); va 2893 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ring->va = (unsigned char *)desc_cb->buf + desc_cb->page_offset; va 2902 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c prefetch(ring->va); va 2904 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c prefetch(ring->va + L1_CACHE_BYTES); va 2908 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c ret = hns3_alloc_skb(ring, length, ring->va); va 2921 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c memcpy(skb->data, ring->va, va 2932 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c memcpy(skb->data, ring->va, va 426 drivers/net/ethernet/hisilicon/hns3/hns3_enet.h unsigned char *va; /* first buffer address for current packet */ va 27 drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_cmd.h void *va; va 257 drivers/net/ethernet/intel/fm10k/fm10k_main.c unsigned char *va = page_address(page) + rx_buffer->page_offset; va 269 drivers/net/ethernet/intel/fm10k/fm10k_main.c memcpy(__skb_put(skb, size), va, ALIGN(size, sizeof(long))); va 283 drivers/net/ethernet/intel/fm10k/fm10k_main.c pull_len = eth_get_headlen(skb->dev, va, FM10K_RX_HDR_LEN); va 286 drivers/net/ethernet/intel/fm10k/fm10k_main.c memcpy(__skb_put(skb, pull_len), va, ALIGN(pull_len, sizeof(long))); va 289 drivers/net/ethernet/intel/fm10k/fm10k_main.c va += pull_len; va 294 drivers/net/ethernet/intel/fm10k/fm10k_main.c (unsigned long)va & ~PAGE_MASK, size, truesize); va 134 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.arq.r.arq_bi = (struct i40e_dma_mem *)hw->aq.arq.dma_head.va; va 196 drivers/net/ethernet/intel/i40e/i40e_adminq.c hw->aq.asq.r.asq_bi = (struct i40e_dma_mem *)hw->aq.asq.dma_head.va; va 657 drivers/net/ethernet/intel/i40e/i40e_adminq.c if (hw->nvm_buff.va) va 827 drivers/net/ethernet/intel/i40e/i40e_adminq.c memcpy(dma_buff->va, buff, buff_size); va 870 drivers/net/ethernet/intel/i40e/i40e_adminq.c memcpy(buff, dma_buff->va, buff_size); va 997 drivers/net/ethernet/intel/i40e/i40e_adminq.c memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va, va 12 drivers/net/ethernet/intel/i40e/i40e_adminq.h (&(((struct i40e_aq_desc *)((R).desc_buf.va))[i])) va 53 drivers/net/ethernet/intel/i40e/i40e_adminq.h (&(((struct i40e_asq_cmd_details *)((R).cmd_buf.va))[i])) va 567 drivers/net/ethernet/intel/i40e/i40e_dcb.c lldpmib = (u8 *)mem.va; va 69 drivers/net/ethernet/intel/i40e/i40e_hmc.c sd_entry->u.pd_table.pd_entry_virt_mem.va; va 158 drivers/net/ethernet/intel/i40e/i40e_hmc.c pd_addr = (u64 *)pd_table->pd_page_addr.va; va 223 drivers/net/ethernet/intel/i40e/i40e_hmc.c pd_addr = (u64 *)pd_table->pd_page_addr.va; va 95 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c hw->hmc.hmc_obj_virt_mem.va; va 209 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c (struct i40e_hmc_sd_entry *)hw->hmc.sd_table.addr.va; va 1030 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c *object_base = (u8 *)pd_entry->bp.addr.va + obj_offset_in_pd; va 1034 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c *object_base = (u8 *)sd_entry->u.bp.addr.va + obj_offset_in_sd; va 121 drivers/net/ethernet/intel/i40e/i40e_main.c mem->va = dma_alloc_coherent(&pf->pdev->dev, mem->size, &mem->pa, va 123 drivers/net/ethernet/intel/i40e/i40e_main.c if (!mem->va) va 138 drivers/net/ethernet/intel/i40e/i40e_main.c dma_free_coherent(&pf->pdev->dev, mem->size, mem->va, mem->pa); va 139 drivers/net/ethernet/intel/i40e/i40e_main.c mem->va = NULL; va 156 drivers/net/ethernet/intel/i40e/i40e_main.c mem->va = kzalloc(size, GFP_KERNEL); va 158 drivers/net/ethernet/intel/i40e/i40e_main.c if (!mem->va) va 172 drivers/net/ethernet/intel/i40e/i40e_main.c kfree(mem->va); va 173 drivers/net/ethernet/intel/i40e/i40e_main.c mem->va = NULL; va 609 drivers/net/ethernet/intel/i40e/i40e_nvm.c data = (u16 *)vmem.va; va 1405 drivers/net/ethernet/intel/i40e/i40e_nvm.c if (!hw->nvm_buff.va) { va 1414 drivers/net/ethernet/intel/i40e/i40e_nvm.c if (hw->nvm_buff.va) { va 1415 drivers/net/ethernet/intel/i40e/i40e_nvm.c buff = hw->nvm_buff.va; va 1497 drivers/net/ethernet/intel/i40e/i40e_nvm.c buff = hw->nvm_buff.va; va 1499 drivers/net/ethernet/intel/i40e/i40e_nvm.c buff = hw->nvm_buff.va + (cmd->offset - aq_desc_len); va 1503 drivers/net/ethernet/intel/i40e/i40e_nvm.c int start_byte = buff - (u8 *)hw->nvm_buff.va; va 35 drivers/net/ethernet/intel/i40e/i40e_osdep.h void *va; va 45 drivers/net/ethernet/intel/i40e/i40e_osdep.h void *va; va 120 drivers/net/ethernet/intel/iavf/iavf_adminq.c hw->aq.arq.r.arq_bi = (struct iavf_dma_mem *)hw->aq.arq.dma_head.va; va 183 drivers/net/ethernet/intel/iavf/iavf_adminq.c hw->aq.asq.r.asq_bi = (struct iavf_dma_mem *)hw->aq.asq.dma_head.va; va 729 drivers/net/ethernet/intel/iavf/iavf_adminq.c memcpy(dma_buff->va, buff, buff_size); va 772 drivers/net/ethernet/intel/iavf/iavf_adminq.c memcpy(buff, dma_buff->va, buff_size); va 898 drivers/net/ethernet/intel/iavf/iavf_adminq.c memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va, va 12 drivers/net/ethernet/intel/iavf/iavf_adminq.h (&(((struct iavf_aq_desc *)((R).desc_buf.va))[i])) va 53 drivers/net/ethernet/intel/iavf/iavf_adminq.h (&(((struct iavf_asq_cmd_details *)((R).cmd_buf.va))[i])) va 82 drivers/net/ethernet/intel/iavf/iavf_main.c mem->va = dma_alloc_coherent(&adapter->pdev->dev, mem->size, va 84 drivers/net/ethernet/intel/iavf/iavf_main.c if (mem->va) va 100 drivers/net/ethernet/intel/iavf/iavf_main.c if (!mem || !mem->va) va 103 drivers/net/ethernet/intel/iavf/iavf_main.c mem->va, (dma_addr_t)mem->pa); va 120 drivers/net/ethernet/intel/iavf/iavf_main.c mem->va = kzalloc(size, GFP_KERNEL); va 122 drivers/net/ethernet/intel/iavf/iavf_main.c if (mem->va) va 140 drivers/net/ethernet/intel/iavf/iavf_main.c kfree(mem->va); va 31 drivers/net/ethernet/intel/iavf/iavf_osdep.h void *va; va 41 drivers/net/ethernet/intel/iavf/iavf_osdep.h void *va; va 1299 drivers/net/ethernet/intel/iavf/iavf_txrx.c void *va; va 1311 drivers/net/ethernet/intel/iavf/iavf_txrx.c va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 1312 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va); va 1314 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va + L1_CACHE_BYTES); va 1327 drivers/net/ethernet/intel/iavf/iavf_txrx.c headlen = eth_get_headlen(skb->dev, va, IAVF_RX_HDR_SIZE); va 1330 drivers/net/ethernet/intel/iavf/iavf_txrx.c memcpy(__skb_put(skb, headlen), va, ALIGN(headlen, sizeof(long))); va 1366 drivers/net/ethernet/intel/iavf/iavf_txrx.c void *va; va 1378 drivers/net/ethernet/intel/iavf/iavf_txrx.c va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 1379 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va); va 1381 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va + L1_CACHE_BYTES); va 1384 drivers/net/ethernet/intel/iavf/iavf_txrx.c skb = build_skb(va - IAVF_SKB_PAD, truesize); va 80 drivers/net/ethernet/intel/ice/ice_controlq.c cq->sq.desc_buf.va = dmam_alloc_coherent(ice_hw_to_dev(hw), size, va 83 drivers/net/ethernet/intel/ice/ice_controlq.c if (!cq->sq.desc_buf.va) va 91 drivers/net/ethernet/intel/ice/ice_controlq.c cq->sq.desc_buf.va, cq->sq.desc_buf.pa); va 92 drivers/net/ethernet/intel/ice/ice_controlq.c cq->sq.desc_buf.va = NULL; va 111 drivers/net/ethernet/intel/ice/ice_controlq.c cq->rq.desc_buf.va = dmam_alloc_coherent(ice_hw_to_dev(hw), size, va 114 drivers/net/ethernet/intel/ice/ice_controlq.c if (!cq->rq.desc_buf.va) va 131 drivers/net/ethernet/intel/ice/ice_controlq.c ring->desc_buf.va, ring->desc_buf.pa); va 132 drivers/net/ethernet/intel/ice/ice_controlq.c ring->desc_buf.va = NULL; va 162 drivers/net/ethernet/intel/ice/ice_controlq.c bi->va = dmam_alloc_coherent(ice_hw_to_dev(hw), va 165 drivers/net/ethernet/intel/ice/ice_controlq.c if (!bi->va) va 197 drivers/net/ethernet/intel/ice/ice_controlq.c cq->rq.r.rq_bi[i].va, cq->rq.r.rq_bi[i].pa); va 198 drivers/net/ethernet/intel/ice/ice_controlq.c cq->rq.r.rq_bi[i].va = NULL; va 229 drivers/net/ethernet/intel/ice/ice_controlq.c bi->va = dmam_alloc_coherent(ice_hw_to_dev(hw), va 232 drivers/net/ethernet/intel/ice/ice_controlq.c if (!bi->va) va 243 drivers/net/ethernet/intel/ice/ice_controlq.c cq->sq.r.sq_bi[i].va, cq->sq.r.sq_bi[i].pa); va 244 drivers/net/ethernet/intel/ice/ice_controlq.c cq->sq.r.sq_bi[i].va = NULL; va 433 drivers/net/ethernet/intel/ice/ice_controlq.c (qi)->ring.r.ring##_bi[i].va,\ va 435 drivers/net/ethernet/intel/ice/ice_controlq.c (qi)->ring.r.ring##_bi[i].va = NULL; \ va 924 drivers/net/ethernet/intel/ice/ice_controlq.c memcpy(dma_buf->va, buf, buf_size); va 968 drivers/net/ethernet/intel/ice/ice_controlq.c memcpy(buf, dma_buf->va, copy_size); va 1085 drivers/net/ethernet/intel/ice/ice_controlq.c memcpy(e->msg_buf, cq->rq.r.rq_bi[desc_idx].va, e->msg_len); va 14 drivers/net/ethernet/intel/ice/ice_controlq.h (&(((struct ice_aq_desc *)((R).desc_buf.va))[i])) va 22 drivers/net/ethernet/intel/ice/ice_osdep.h void *va; va 695 drivers/net/ethernet/intel/ice/ice_txrx.c void *va = page_address(rx_buf->page) + rx_buf->page_offset; va 700 drivers/net/ethernet/intel/ice/ice_txrx.c prefetch(va); va 702 drivers/net/ethernet/intel/ice/ice_txrx.c prefetch((u8 *)va + L1_CACHE_BYTES); va 715 drivers/net/ethernet/intel/ice/ice_txrx.c headlen = eth_get_headlen(skb->dev, va, ICE_RX_HDR_SIZE); va 718 drivers/net/ethernet/intel/ice/ice_txrx.c memcpy(__skb_put(skb, headlen), va, ALIGN(headlen, sizeof(long))); va 678 drivers/net/ethernet/intel/igb/igb.h void igb_ptp_rx_pktstamp(struct igb_q_vector *q_vector, void *va, va 8011 drivers/net/ethernet/intel/igb/igb_main.c void *va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 8021 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va); va 8023 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va + L1_CACHE_BYTES); va 8032 drivers/net/ethernet/intel/igb/igb_main.c igb_ptp_rx_pktstamp(rx_ring->q_vector, va, skb); va 8033 drivers/net/ethernet/intel/igb/igb_main.c va += IGB_TS_HDR_LEN; va 8040 drivers/net/ethernet/intel/igb/igb_main.c headlen = eth_get_headlen(skb->dev, va, IGB_RX_HDR_LEN); va 8043 drivers/net/ethernet/intel/igb/igb_main.c memcpy(__skb_put(skb, headlen), va, ALIGN(headlen, sizeof(long))); va 8049 drivers/net/ethernet/intel/igb/igb_main.c (va + headlen) - page_address(rx_buffer->page), va 8068 drivers/net/ethernet/intel/igb/igb_main.c void *va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 8078 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va); va 8080 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va + L1_CACHE_BYTES); va 8084 drivers/net/ethernet/intel/igb/igb_main.c skb = build_skb(va - IGB_SKB_PAD, truesize); va 869 drivers/net/ethernet/intel/igb/igb_ptp.c void igb_ptp_rx_pktstamp(struct igb_q_vector *q_vector, void *va, va 872 drivers/net/ethernet/intel/igb/igb_ptp.c __le64 *regval = (__le64 *)va; va 1252 drivers/net/ethernet/intel/igc/igc_main.c void *va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 1262 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va); va 1264 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va + L1_CACHE_BYTES); va 1268 drivers/net/ethernet/intel/igc/igc_main.c skb = build_skb(va - IGC_SKB_PAD, truesize); va 1291 drivers/net/ethernet/intel/igc/igc_main.c void *va = page_address(rx_buffer->page) + rx_buffer->page_offset; va 1301 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va); va 1303 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va + L1_CACHE_BYTES); va 1314 drivers/net/ethernet/intel/igc/igc_main.c headlen = eth_get_headlen(skb->dev, va, IGC_RX_HDR_LEN); va 1317 drivers/net/ethernet/intel/igc/igc_main.c memcpy(__skb_put(skb, headlen), va, ALIGN(headlen, sizeof(long))); va 1323 drivers/net/ethernet/intel/igc/igc_main.c (va + headlen) - page_address(rx_buffer->page), va 1790 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c unsigned char *va; va 1798 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c va = skb_frag_address(frag); va 1804 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c pull_len = eth_get_headlen(skb->dev, va, IXGBE_RX_HDR_SIZE); va 1807 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c skb_copy_to_linear_data(skb, va, ALIGN(pull_len, sizeof(long))); va 90 drivers/net/ethernet/mellanox/mlx4/en_main.c vaf.va = &args; va 534 drivers/net/ethernet/mellanox/mlx4/en_rx.c static void validate_loopback(struct mlx4_en_priv *priv, void *va) va 536 drivers/net/ethernet/mellanox/mlx4/en_rx.c const unsigned char *data = va + ETH_HLEN; va 625 drivers/net/ethernet/mellanox/mlx4/en_rx.c static int check_csum(struct mlx4_cqe *cqe, struct sk_buff *skb, void *va, va 642 drivers/net/ethernet/mellanox/mlx4/en_rx.c hdr = (u8 *)va + sizeof(struct ethhdr); va 702 drivers/net/ethernet/mellanox/mlx4/en_rx.c void *va; va 706 drivers/net/ethernet/mellanox/mlx4/en_rx.c va = page_address(frags[0].page) + frags[0].page_offset; va 707 drivers/net/ethernet/mellanox/mlx4/en_rx.c prefetchw(va); va 730 drivers/net/ethernet/mellanox/mlx4/en_rx.c const struct ethhdr *ethh = va; va 756 drivers/net/ethernet/mellanox/mlx4/en_rx.c validate_loopback(priv, va); va 779 drivers/net/ethernet/mellanox/mlx4/en_rx.c xdp.data_hard_start = va - frags[0].page_offset; va 780 drivers/net/ethernet/mellanox/mlx4/en_rx.c xdp.data = va; va 791 drivers/net/ethernet/mellanox/mlx4/en_rx.c va = xdp.data; va 857 drivers/net/ethernet/mellanox/mlx4/en_rx.c if (check_csum(cqe, skb, va, dev->features)) va 122 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c void *va, u16 *rx_headroom, u32 *len, bool xsk) va 133 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c xdp.data = va + *rx_headroom; va 136 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c xdp.data_hard_start = va; va 66 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.h void *va, u16 *rx_headroom, u32 *len, bool xsk); va 96 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c void *va, *data; va 113 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c va = di->xsk.data; va 114 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c data = va + rx_headroom; va 121 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c consumed = mlx5e_xdp_handle(rq, di, va, &rx_headroom, &cqe_bcnt32, true); va 158 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c void *va, *data; va 169 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c va = di->xsk.data; va 170 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c data = va + rx_headroom; va 182 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/rx.c consumed = mlx5e_xdp_handle(rq, di, va, &rx_headroom, &cqe_bcnt, true); va 1040 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c struct sk_buff *mlx5e_build_linear_skb(struct mlx5e_rq *rq, void *va, va 1044 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c struct sk_buff *skb = build_skb(va, frag_size); va 1064 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c void *va, *data; va 1068 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c va = page_address(di->page) + wi->offset; va 1069 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c data = va + rx_headroom; va 1074 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c prefetchw(va); /* xdp_frame data area */ va 1078 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c consumed = mlx5e_xdp_handle(rq, di, va, &rx_headroom, &cqe_bcnt, false); va 1083 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c skb = mlx5e_build_linear_skb(rq, va, frag_size, rx_headroom, cqe_bcnt); va 1290 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c void *va, *data; va 1300 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c va = page_address(di->page) + head_offset; va 1301 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c data = va + rx_headroom; va 1306 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c prefetchw(va); /* xdp_frame data area */ va 1310 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c consumed = mlx5e_xdp_handle(rq, di, va, &rx_headroom, &cqe_bcnt32, false); va 1318 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c skb = mlx5e_build_linear_skb(rq, va, frag_size, rx_headroom, cqe_bcnt32); va 1287 drivers/net/ethernet/myricom/myri10ge/myri10ge.c u8 *va; va 1292 drivers/net/ethernet/myricom/myri10ge/myri10ge.c va = addr; va 1293 drivers/net/ethernet/myricom/myri10ge/myri10ge.c va += MXGEFW_PAD; va 1294 drivers/net/ethernet/myricom/myri10ge/myri10ge.c veh = (struct vlan_ethhdr *)va; va 1300 drivers/net/ethernet/myricom/myri10ge/myri10ge.c vsum = csum_partial(va + ETH_HLEN, VLAN_HLEN, 0); va 1305 drivers/net/ethernet/myricom/myri10ge/myri10ge.c memmove(va + VLAN_HLEN, va, 2 * ETH_ALEN); va 1326 drivers/net/ethernet/myricom/myri10ge/myri10ge.c u8 *va; va 1338 drivers/net/ethernet/myricom/myri10ge/myri10ge.c va = page_address(rx->info[idx].page) + rx->info[idx].page_offset; va 1339 drivers/net/ethernet/myricom/myri10ge/myri10ge.c prefetch(va); va 1378 drivers/net/ethernet/myricom/myri10ge/myri10ge.c myri10ge_vlan_rx(mgp->dev, va, skb); va 7278 drivers/net/ethernet/qlogic/qed/qed_hsi.h struct regpair va; va 1540 drivers/net/ethernet/qlogic/qed/qed_rdma.c p_ramrod->va.hi = 0; va 1541 drivers/net/ethernet/qlogic/qed/qed_rdma.c p_ramrod->va.lo = cpu_to_le32(params->fbo); va 1543 drivers/net/ethernet/qlogic/qed/qed_rdma.c DMA_REGPAIR_LE(p_ramrod->va, params->vaddr); va 157 drivers/net/ethernet/tehuti/tehuti.c f->va = pci_alloc_consistent(priv->pdev, va 159 drivers/net/ethernet/tehuti/tehuti.c if (!f->va) { va 185 drivers/net/ethernet/tehuti/tehuti.c if (f->va) { va 187 drivers/net/ethernet/tehuti/tehuti.c f->memsz + FIFO_EXTRA_SPACE, f->va, f->da); va 188 drivers/net/ethernet/tehuti/tehuti.c f->va = NULL; va 1104 drivers/net/ethernet/tehuti/tehuti.c rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); va 1117 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va, f->m.va + f->m.memsz, delta); va 1159 drivers/net/ethernet/tehuti/tehuti.c rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); va 1172 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va, f->m.va + f->m.memsz, delta); va 1219 drivers/net/ethernet/tehuti/tehuti.c rxdd = (struct rxd_desc *)(f->m.va + f->m.rptr); va 1242 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va + f->m.memsz, f->m.va, tmp_len); va 1622 drivers/net/ethernet/tehuti/tehuti.c txdd = (struct txd_desc *)(f->m.va + f->m.wptr); va 1660 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va, f->m.va + f->m.memsz, len); va 1819 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va + f->m.wptr, data, size); va 1822 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va + f->m.wptr, data, i); va 1824 drivers/net/ethernet/tehuti/tehuti.c memcpy(f->m.va, data + i, f->m.wptr); va 144 drivers/net/ethernet/tehuti/tehuti.h char *va; /* virtual address of fifo (used by SW) */ va 32 drivers/net/wireless/ath/ath10k/debug.c vaf.va = &args; va 124 drivers/net/wireless/ath/ath10k/debug.c vaf.va = &args; va 139 drivers/net/wireless/ath/ath10k/debug.c vaf.va = &args; va 2676 drivers/net/wireless/ath/ath10k/debug.c vaf.va = &args; va 63 drivers/net/wireless/ath/ath10k/trace.h *vaf->va) >= ATH10K_MSG_MAX); va 104 drivers/net/wireless/ath/ath10k/trace.h *vaf->va) >= ATH10K_MSG_MAX); va 3194 drivers/net/wireless/ath/ath5k/base.c vaf.va = &args; va 48 drivers/net/wireless/ath/ath6kl/debug.c vaf.va = &args; va 64 drivers/net/wireless/ath/ath6kl/debug.c vaf.va = &args; va 79 drivers/net/wireless/ath/ath6kl/debug.c vaf.va = &args; va 94 drivers/net/wireless/ath/ath6kl/debug.c vaf.va = &args; va 138 drivers/net/wireless/ath/ath6kl/debug.c vaf.va = &args; va 262 drivers/net/wireless/ath/ath6kl/trace.h *vaf->va) >= ATH6KL_MSG_MAX); va 294 drivers/net/wireless/ath/ath6kl/trace.h *vaf->va) >= ATH6KL_MSG_MAX); va 80 drivers/net/wireless/ath/main.c vaf.va = &args; va 52 drivers/net/wireless/ath/trace.h *vaf->va) >= ATH_DBG_MAX_LEN); va 28 drivers/net/wireless/ath/wil6210/debug.c vaf.va = &args; va 44 drivers/net/wireless/ath/wil6210/debug.c vaf.va = &args; va 60 drivers/net/wireless/ath/wil6210/debug.c vaf.va = &args; va 73 drivers/net/wireless/ath/wil6210/debug.c vaf.va = &args; va 86 drivers/net/wireless/ath/wil6210/debug.c vaf.va = &args; va 63 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[idx].rx.enhanced; va 73 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[idx].tx.enhanced; va 94 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, " va = 0x%p\n", ring->va); va 97 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, " swtail = %u\n", *ring->edma_rx_swtail.va); va 123 drivers/net/wireless/ath/wil6210/debugfs.c if (ring->va && (ring->size <= (1 << WIL_RING_SIZE_ORDER_MAX))) { va 133 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[i].tx.legacy; va 154 drivers/net/wireless/ath/wil6210/debugfs.c if (ring->va) { va 214 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, " va = 0x%pK\n", sring->va); va 241 drivers/net/wireless/ath/wil6210/debugfs.c if (sring->va && (sring->size <= (1 << WIL_RING_SIZE_ORDER_MAX))) { va 246 drivers/net/wireless/ath/wil6210/debugfs.c (u32 *)(sring->va + (sring->elem_size * i)); va 268 drivers/net/wireless/ath/wil6210/debugfs.c if (wil->srings[i].va) va 764 drivers/net/wireless/ath/wil6210/debugfs.c if (!wil->ring_rx.va || val > wil->ring_rx.size) { va 1108 drivers/net/wireless/ath/wil6210/debugfs.c if (!ring->va) { va 1129 drivers/net/wireless/ath/wil6210/debugfs.c d = &ring->va[txdesc_idx].tx.legacy; va 1139 drivers/net/wireless/ath/wil6210/debugfs.c &ring->va[txdesc_idx].rx.enhanced; va 1191 drivers/net/wireless/ath/wil6210/debugfs.c if (!sring->va) { va 1202 drivers/net/wireless/ath/wil6210/debugfs.c u = sring->va + (sring->elem_size * status_msg_idx); va 216 drivers/net/wireless/ath/wil6210/main.c if (!ring->va) va 618 drivers/net/wireless/ath/wil6210/main.c if (!wil->ring_tx[i].va) va 657 drivers/net/wireless/ath/wil6210/main.c if (ri >= 0 && wil->ring_tx[ri].va) va 157 drivers/net/wireless/ath/wil6210/netdev.c if (!ring->va || !txdata->enabled || va 189 drivers/net/wireless/ath/wil6210/netdev.c if (!sring->va) va 29 drivers/net/wireless/ath/wil6210/pmc.c void *va; va 157 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[i].va = dma_alloc_coherent(dev, va 162 drivers/net/wireless/ath/wil6210/pmc.c if (unlikely(!pmc->descriptors[i].va)) { va 168 drivers/net/wireless/ath/wil6210/pmc.c u32 *p = (u32 *)pmc->descriptors[i].va + j; va 208 drivers/net/wireless/ath/wil6210/pmc.c for (i = 0; i < num_descriptors && pmc->descriptors[i].va; i++) { va 211 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[i].va, va 214 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[i].va = NULL; va 292 drivers/net/wireless/ath/wil6210/pmc.c i < pmc->num_descriptors && pmc->descriptors[i].va; i++) { va 295 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[i].va, va 297 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[i].va = NULL; va 376 drivers/net/wireless/ath/wil6210/pmc.c pmc->descriptors[idx].va, va 90 drivers/net/wireless/ath/wil6210/trace.h *vaf->va) >= WIL6210_MSG_MAX); va 88 drivers/net/wireless/ath/wil6210/txrx.c if (!vring->va || !txdata->enabled) { va 109 drivers/net/wireless/ath/wil6210/txrx.c if (!vring->va || !txdata->enabled) va 123 drivers/net/wireless/ath/wil6210/txrx.c size_t sz = vring->size * sizeof(vring->va[0]); va 128 drivers/net/wireless/ath/wil6210/txrx.c BUILD_BUG_ON(sizeof(vring->va[0]) != 32); va 134 drivers/net/wireless/ath/wil6210/txrx.c vring->va = NULL; va 153 drivers/net/wireless/ath/wil6210/txrx.c vring->va = dma_alloc_coherent(dev, sz, &vring->pa, GFP_KERNEL); va 154 drivers/net/wireless/ath/wil6210/txrx.c if (!vring->va) { va 170 drivers/net/wireless/ath/wil6210/txrx.c &vring->va[i].tx.legacy; va 176 drivers/net/wireless/ath/wil6210/txrx.c vring->va, &vring->pa, vring->ctx); va 203 drivers/net/wireless/ath/wil6210/txrx.c size_t sz = vring->size * sizeof(vring->va[0]); va 210 drivers/net/wireless/ath/wil6210/txrx.c vring_index, vring->size, vring->va, va 214 drivers/net/wireless/ath/wil6210/txrx.c vring->size, vring->va, va 226 drivers/net/wireless/ath/wil6210/txrx.c &vring->va[vring->swtail].tx.legacy; va 244 drivers/net/wireless/ath/wil6210/txrx.c &vring->va[vring->swhead].rx.legacy; va 255 drivers/net/wireless/ath/wil6210/txrx.c dma_free_coherent(dev, sz, (void *)vring->va, vring->pa); va 258 drivers/net/wireless/ath/wil6210/txrx.c vring->va = NULL; va 273 drivers/net/wireless/ath/wil6210/txrx.c volatile struct vring_rx_desc *_d = &vring->va[i].rx.legacy; va 368 drivers/net/wireless/ath/wil6210/txrx.c _d = (struct vring_rx_desc *)&ring->va[ring->swhead].rx.legacy; va 477 drivers/net/wireless/ath/wil6210/txrx.c _d = &vring->va[i].rx.legacy; va 1044 drivers/net/wireless/ath/wil6210/txrx.c if (unlikely(!v->va)) { va 1088 drivers/net/wireless/ath/wil6210/txrx.c if (vring->va) { va 1122 drivers/net/wireless/ath/wil6210/txrx.c if (vring->va) va 1211 drivers/net/wireless/ath/wil6210/txrx.c if (vring->va) { va 1306 drivers/net/wireless/ath/wil6210/txrx.c if (!vring->va) { va 1380 drivers/net/wireless/ath/wil6210/txrx.c if (vring->va) { va 1456 drivers/net/wireless/ath/wil6210/txrx.c if (v->va && txdata->enabled) { va 1490 drivers/net/wireless/ath/wil6210/txrx.c if (!ring->va || !txdata->enabled || txdata->mid != vif->mid) va 1534 drivers/net/wireless/ath/wil6210/txrx.c if (!v->va || !txdata->enabled) va 1568 drivers/net/wireless/ath/wil6210/txrx.c if (!v->va || !txdata->enabled || txdata->mid != vif->mid) va 1597 drivers/net/wireless/ath/wil6210/txrx.c if (!v2->va || txdata2->mid != vif->mid) va 1825 drivers/net/wireless/ath/wil6210/txrx.c _hdr_desc = &vring->va[i].tx.legacy; va 1891 drivers/net/wireless/ath/wil6210/txrx.c _desc = &vring->va[i].tx.legacy; va 2020 drivers/net/wireless/ath/wil6210/txrx.c d = (struct vring_tx_desc *)&vring->va[i].tx.legacy; va 2021 drivers/net/wireless/ath/wil6210/txrx.c _desc = &vring->va[i].tx.legacy; va 2063 drivers/net/wireless/ath/wil6210/txrx.c _d = &ring->va[i].tx.legacy; va 2103 drivers/net/wireless/ath/wil6210/txrx.c _d = &ring->va[i].tx.legacy; va 2178 drivers/net/wireless/ath/wil6210/txrx.c _d = &ring->va[i].tx.legacy; va 2281 drivers/net/wireless/ath/wil6210/txrx.c if (txdata->mid != vif->mid || !cur_ring->va || va 2441 drivers/net/wireless/ath/wil6210/txrx.c if (unlikely(!vring->va)) { va 2469 drivers/net/wireless/ath/wil6210/txrx.c _d = &vring->va[lf].tx.legacy; va 2481 drivers/net/wireless/ath/wil6210/txrx.c _d = &vring->va[vring->swtail].tx.legacy; va 63 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!wil->srings[i].va) va 76 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!sring || !sring->va) va 82 drivers/net/wireless/ath/wil6210/txrx_edma.c sz, sring->va, &sring->pa); va 84 drivers/net/wireless/ath/wil6210/txrx_edma.c dma_free_coherent(dev, sz, (void *)sring->va, sring->pa); va 86 drivers/net/wireless/ath/wil6210/txrx_edma.c sring->va = NULL; va 107 drivers/net/wireless/ath/wil6210/txrx_edma.c sring->va = dma_alloc_coherent(dev, sz, &sring->pa, GFP_KERNEL); va 108 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!sring->va) va 111 drivers/net/wireless/ath/wil6210/txrx_edma.c wil_dbg_misc(wil, "status_ring[%d] 0x%p:%pad\n", sring->size, sring->va, va 178 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[i].rx.enhanced; va 230 drivers/net/wireless/ath/wil6210/txrx_edma.c (sring->va + (sring->elem_size * sring->swhead)); va 249 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->swtail = *ring->edma_rx_swtail.va; va 390 drivers/net/wireless/ath/wil6210/txrx_edma.c size_t sz = ring->size * sizeof(ring->va[0]); va 394 drivers/net/wireless/ath/wil6210/txrx_edma.c BUILD_BUG_ON(sizeof(ring->va[0]) != 32); va 402 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->va = dma_alloc_coherent(dev, sz, &ring->pa, GFP_KERNEL); va 403 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!ring->va) va 407 drivers/net/wireless/ath/wil6210/txrx_edma.c sz = sizeof(*ring->edma_rx_swtail.va); va 408 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->edma_rx_swtail.va = va 411 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!ring->edma_rx_swtail.va) va 417 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->size, ring->va, &ring->pa, ring->ctx); va 421 drivers/net/wireless/ath/wil6210/txrx_edma.c dma_free_coherent(dev, ring->size * sizeof(ring->va[0]), va 422 drivers/net/wireless/ath/wil6210/txrx_edma.c (void *)ring->va, ring->pa); va 423 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->va = NULL; va 437 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!ring->va) va 440 drivers/net/wireless/ath/wil6210/txrx_edma.c sz = ring->size * sizeof(ring->va[0]); va 445 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->size, ring->va, va 449 drivers/net/wireless/ath/wil6210/txrx_edma.c dma_free_coherent(dev, sizeof(*ring->edma_rx_swtail.va), va 450 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->edma_rx_swtail.va, va 459 drivers/net/wireless/ath/wil6210/txrx_edma.c ring_index, ring->size, ring->va, va 468 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[ring->swtail].tx.enhanced; va 486 drivers/net/wireless/ath/wil6210/txrx_edma.c dma_free_coherent(dev, sz, (void *)ring->va, ring->pa); va 489 drivers/net/wireless/ath/wil6210/txrx_edma.c ring->va = NULL; va 594 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!sring->va) va 909 drivers/net/wireless/ath/wil6210/txrx_edma.c (sring->va + (sring->elem_size * sring->swhead)); va 1075 drivers/net/wireless/ath/wil6210/txrx_edma.c if (unlikely(!ring->va)) { va 1083 drivers/net/wireless/ath/wil6210/txrx_edma.c if (unlikely(!sring->va)) { va 1148 drivers/net/wireless/ath/wil6210/txrx_edma.c (sring->va + (sring->elem_size * sring->swhead)); va 1199 drivers/net/wireless/ath/wil6210/txrx_edma.c if (unlikely(!ring->va)) { va 1235 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[ring->swtail].tx.enhanced; va 1363 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[i].tx.enhanced; va 1538 drivers/net/wireless/ath/wil6210/txrx_edma.c &ring->va[i].tx.enhanced; va 433 drivers/net/wireless/ath/wil6210/txrx_edma.h (s->va + (s->elem_size * s->swhead)))->buff_id = 0; va 522 drivers/net/wireless/ath/wil6210/wil6210.h u32 *va; va 533 drivers/net/wireless/ath/wil6210/wil6210.h volatile union wil_ring_desc *va; va 560 drivers/net/wireless/ath/wil6210/wil6210.h void *va; /* pointer to ring_[tr]x_status elements */ va 1614 drivers/net/wireless/ath/wil6210/wmi.c if (!ring->va || !txdata->enabled || txdata->mid != vif->mid) va 395 drivers/net/wireless/broadcom/b43/main.c vaf.va = &args; va 416 drivers/net/wireless/broadcom/b43/main.c vaf.va = &args; va 437 drivers/net/wireless/broadcom/b43/main.c vaf.va = &args; va 456 drivers/net/wireless/broadcom/b43/main.c vaf.va = &args; va 177 drivers/net/wireless/broadcom/b43legacy/main.c vaf.va = &args; va 196 drivers/net/wireless/broadcom/b43legacy/main.c vaf.va = &args; va 215 drivers/net/wireless/broadcom/b43legacy/main.c vaf.va = &args; va 232 drivers/net/wireless/broadcom/b43legacy/main.c vaf.va = &args; va 352 drivers/net/wireless/broadcom/brcm80211/brcmfmac/common.c vaf.va = &args; va 371 drivers/net/wireless/broadcom/brcm80211/brcmfmac/common.c vaf.va = &args; va 23 drivers/net/wireless/broadcom/brcm80211/brcmfmac/tracepoint.c vaf.va = &args; va 42 drivers/net/wireless/broadcom/brcm80211/brcmfmac/tracepoint.h *vaf->va) >= MAX_MSG_LEN); va 60 drivers/net/wireless/broadcom/brcm80211/brcmfmac/tracepoint.h *vaf->va) >= MAX_MSG_LEN); va 36 drivers/net/wireless/broadcom/brcm80211/brcmsmac/brcms_trace_brcmsmac_msg.h *vaf->va) >= MAX_MSG_LEN); va 74 drivers/net/wireless/broadcom/brcm80211/brcmsmac/brcms_trace_brcmsmac_msg.h *vaf->va) >= MAX_MSG_LEN); va 222 drivers/net/wireless/broadcom/brcm80211/brcmsmac/debug.c vaf.va = &args; \ va 243 drivers/net/wireless/broadcom/brcm80211/brcmsmac/debug.c vaf.va = &args; va 473 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c void *va; va 477 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c va = dma_alloc_consistent(di, size, *alignbits, alloced, descpa); va 479 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c if (NULL == va) va 482 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c desc_strtaddr = (u32) roundup((unsigned long)va, alignbytes); va 486 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c dma_free_coherent(di->dmadev, size, va, *descpa); va 487 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c va = dma_alloc_consistent(di, size, *alignbits, va 490 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c return va; va 497 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c void *va; va 509 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c va = dma_ringalloc(di, D64RINGALIGN, size, &align_bits, va 511 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c if (va == NULL) { va 519 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c roundup((unsigned long)va, align); va 520 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c di->txdalign = (uint) ((s8 *)di->txd64 - (s8 *) va); va 524 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c va = dma_ringalloc(di, D64RINGALIGN, size, &align_bits, va 526 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c if (va == NULL) { va 534 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c roundup((unsigned long)va, align); va 535 drivers/net/wireless/broadcom/brcm80211/brcmsmac/dma.c di->rxdalign = (uint) ((s8 *)di->rxd64 - (s8 *) va); va 318 drivers/net/wireless/broadcom/brcm80211/brcmutil/utils.c vaf.va = &args; va 75 drivers/net/wireless/intel/iwlwifi/iwl-debug.c vaf.va = &args; \ va 97 drivers/net/wireless/intel/iwlwifi/iwl-debug.c vaf.va = &args; va 120 drivers/net/wireless/intel/iwlwifi/iwl-debug.c vaf.va = &args; va 31 drivers/net/wireless/intel/iwlwifi/iwl-devtrace-msg.h *vaf->va) >= MAX_MSG_LEN); va 72 drivers/net/wireless/intel/iwlwifi/iwl-devtrace-msg.h *vaf->va) >= MAX_MSG_LEN); va 1767 drivers/net/wireless/marvell/mwifiex/main.c vaf.va = &args; va 416 drivers/net/wireless/mediatek/mt76/dma.c if (!q->rx_page.va) va 419 drivers/net/wireless/mediatek/mt76/dma.c page = virt_to_page(q->rx_page.va); va 620 drivers/net/wireless/mediatek/mt76/usb.c if (!q->rx_page.va) va 623 drivers/net/wireless/mediatek/mt76/usb.c page = virt_to_page(q->rx_page.va); va 22 drivers/net/wireless/realtek/rtlwifi/debug.c vaf.va = &args; va 42 drivers/net/wireless/realtek/rtlwifi/debug.c vaf.va = &args; va 740 drivers/net/wireless/realtek/rtw88/debug.c vaf.va = &args; va 62 drivers/net/wireless/rsi/rsi_91x_main.c vaf.va = &args; va 70 drivers/pcmcia/soc_common.c vaf.va = &args; va 70 drivers/platform/mellanox/mlxbf-tmfifo.c void *va; va 216 drivers/platform/mellanox/mlxbf-tmfifo.c if (vring->va) { va 219 drivers/platform/mellanox/mlxbf-tmfifo.c vring->va, vring->dma); va 220 drivers/platform/mellanox/mlxbf-tmfifo.c vring->va = NULL; va 237 drivers/platform/mellanox/mlxbf-tmfifo.c void *va; va 249 drivers/platform/mellanox/mlxbf-tmfifo.c va = dma_alloc_coherent(dev->parent, size, &dma, GFP_KERNEL); va 250 drivers/platform/mellanox/mlxbf-tmfifo.c if (!va) { va 256 drivers/platform/mellanox/mlxbf-tmfifo.c vring->va = va; va 939 drivers/platform/mellanox/mlxbf-tmfifo.c memset(vring->va, 0, size); va 941 drivers/platform/mellanox/mlxbf-tmfifo.c false, false, vring->va, va 214 drivers/remoteproc/imx_rproc.c void *va = NULL; va 233 drivers/remoteproc/imx_rproc.c va = (__force void *)(priv->mem[i].cpu_addr + offset); va 238 drivers/remoteproc/imx_rproc.c dev_dbg(&rproc->dev, "da = 0x%llx len = 0x%x va = 0x%p\n", da, len, va); va 240 drivers/remoteproc/imx_rproc.c return va; va 252 drivers/remoteproc/keystone_remoteproc.c void __iomem *va = NULL; va 271 drivers/remoteproc/keystone_remoteproc.c va = ksproc->mem[i].cpu_addr + offset; va 279 drivers/remoteproc/keystone_remoteproc.c va = ksproc->mem[i].cpu_addr + offset; va 285 drivers/remoteproc/keystone_remoteproc.c return (__force void *)va; va 205 drivers/remoteproc/remoteproc_core.c if (!carveout->va) va 216 drivers/remoteproc/remoteproc_core.c ptr = carveout->va + offset; va 744 drivers/remoteproc/remoteproc_core.c void *va; va 747 drivers/remoteproc/remoteproc_core.c va = dma_alloc_coherent(dev->parent, mem->len, &dma, GFP_KERNEL); va 748 drivers/remoteproc/remoteproc_core.c if (!va) { va 755 drivers/remoteproc/remoteproc_core.c va, &dma, mem->len); va 824 drivers/remoteproc/remoteproc_core.c mem->va = va; va 831 drivers/remoteproc/remoteproc_core.c dma_free_coherent(dev->parent, mem->len, va, dma); va 849 drivers/remoteproc/remoteproc_core.c dma_free_coherent(dev->parent, mem->len, mem->va, mem->dma); va 961 drivers/remoteproc/remoteproc_core.c void *va, dma_addr_t dma, int len, u32 da, va 973 drivers/remoteproc/remoteproc_core.c mem->va = va; va 1213 drivers/remoteproc/remoteproc_core.c if (entry->va) va 1214 drivers/remoteproc/remoteproc_core.c pa = (u64)rproc_va_to_pa(entry->va); va 44 drivers/remoteproc/remoteproc_debugfs.c void *va; va 48 drivers/remoteproc/remoteproc_debugfs.c va = rproc_da_to_va(data->rproc, trace->da, trace->len); va 50 drivers/remoteproc/remoteproc_debugfs.c if (!va) { va 53 drivers/remoteproc/remoteproc_debugfs.c va = buf; va 55 drivers/remoteproc/remoteproc_debugfs.c len = strnlen(va, trace->len); va 58 drivers/remoteproc/remoteproc_debugfs.c return simple_read_from_buffer(userbuf, count, ppos, va, len); va 293 drivers/remoteproc/remoteproc_debugfs.c seq_printf(seq, "\tVirtual address: %pK\n", carveout->va); va 90 drivers/remoteproc/remoteproc_virtio.c if (!mem || !mem->va) va 94 drivers/remoteproc/remoteproc_virtio.c addr = mem->va; va 360 drivers/remoteproc/remoteproc_virtio.c if (mem->va) { va 363 drivers/remoteproc/remoteproc_virtio.c pa = rproc_va_to_pa(mem->va); va 96 drivers/remoteproc/st_remoteproc.c void *va; va 98 drivers/remoteproc/st_remoteproc.c va = ioremap_wc(mem->dma, mem->len); va 99 drivers/remoteproc/st_remoteproc.c if (!va) { va 106 drivers/remoteproc/st_remoteproc.c mem->va = va; va 114 drivers/remoteproc/st_remoteproc.c iounmap(mem->va); va 180 drivers/remoteproc/st_slim_rproc.c void *va = NULL; va 189 drivers/remoteproc/st_slim_rproc.c va = (__force void *)slim_rproc->mem[i].cpu_addr; va 195 drivers/remoteproc/st_slim_rproc.c da, len, va); va 197 drivers/remoteproc/st_slim_rproc.c return va; va 98 drivers/remoteproc/stm32_rproc.c void *va; va 101 drivers/remoteproc/stm32_rproc.c va = ioremap_wc(mem->dma, mem->len); va 102 drivers/remoteproc/stm32_rproc.c if (IS_ERR_OR_NULL(va)) { va 109 drivers/remoteproc/stm32_rproc.c mem->va = va; va 118 drivers/remoteproc/stm32_rproc.c iounmap(mem->va); va 86 drivers/remoteproc/wkup_m3_rproc.c void *va = NULL; va 98 drivers/remoteproc/wkup_m3_rproc.c va = (__force void *)(wkupm3->mem[i].cpu_addr + offset); va 103 drivers/remoteproc/wkup_m3_rproc.c return va; va 422 drivers/sbus/char/oradax.c static int dax_lock_page(void *va, struct page **p) va 426 drivers/sbus/char/oradax.c dax_dbg("uva %p", va); va 428 drivers/sbus/char/oradax.c ret = get_user_pages_fast((unsigned long)va, 1, FOLL_WRITE, p); va 430 drivers/sbus/char/oradax.c dax_dbg("locked page %p, for VA %p", *p, va); va 434 drivers/sbus/char/oradax.c dax_dbg("get_user_pages failed, va=%p, ret=%d", va, ret); va 25 drivers/scsi/be2iscsi/be.h void *va; va 53 drivers/scsi/be2iscsi/be.h return q->dma_mem.va + q->head * q->entry_size; va 58 drivers/scsi/be2iscsi/be.h return q->dma_mem.va + wrb_num * q->entry_size; va 63 drivers/scsi/be2iscsi/be.h return q->dma_mem.va + q->tail * q->entry_size; va 187 drivers/scsi/be2iscsi/be_cmds.c mbx_hdr = (struct be_cmd_req_hdr *)mbx_cmd_mem->va; va 285 drivers/scsi/be2iscsi/be_cmds.c tag_mem->va = mbx_cmd_mem->va; va 519 drivers/scsi/be2iscsi/be_cmds.c tag_mem->va, tag_mem->dma); va 641 drivers/scsi/be2iscsi/be_cmds.c struct be_mcc_mailbox *mbox = mbox_mem->va; va 700 drivers/scsi/be2iscsi/be_cmds.c buf_pages = min(PAGES_4K_SPANNED(mem->va, mem->size), max_pages); va 732 drivers/scsi/be2iscsi/be_cmds.c return &((struct be_mcc_mailbox *)(mbox_mem->va))->wrb; va 752 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 795 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = cpu_to_le16(PAGES_4K_SPANNED(q_mem->va, q_mem->size)); va 873 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = PAGES_4K_SPANNED(q_mem->va, q_mem->size); va 995 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = PAGES_4K_SPANNED(q_mem->va, q_mem->size); va 1094 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = PAGES_4K_SPANNED(q_mem->va, q_mem->size); va 1137 drivers/scsi/be2iscsi/be_cmds.c req->num_pages = PAGES_4K_SPANNED(q_mem->va, q_mem->size); va 1267 drivers/scsi/be2iscsi/be_cmds.c nonemb_cmd.va = dma_alloc_coherent(&ctrl->pdev->dev, va 1270 drivers/scsi/be2iscsi/be_cmds.c if (nonemb_cmd.va == NULL) { va 1277 drivers/scsi/be2iscsi/be_cmds.c req = nonemb_cmd.va; va 1289 drivers/scsi/be2iscsi/be_cmds.c struct be_mgmt_controller_attributes_resp *resp = nonemb_cmd.va; va 1311 drivers/scsi/be2iscsi/be_cmds.c if (nonemb_cmd.va) va 1313 drivers/scsi/be2iscsi/be_cmds.c nonemb_cmd.va, nonemb_cmd.dma); va 1071 drivers/scsi/be2iscsi/be_iscsi.c nonemb_cmd.va = dma_alloc_coherent(&phba->ctrl.pdev->dev, va 1074 drivers/scsi/be2iscsi/be_iscsi.c if (nonemb_cmd.va == NULL) { va 1084 drivers/scsi/be2iscsi/be_iscsi.c memset(nonemb_cmd.va, 0, nonemb_cmd.size); va 1092 drivers/scsi/be2iscsi/be_iscsi.c nonemb_cmd.va, nonemb_cmd.dma); va 1105 drivers/scsi/be2iscsi/be_iscsi.c nonemb_cmd.size, nonemb_cmd.va, va 1112 drivers/scsi/be2iscsi/be_iscsi.c ptcpcnct_out = (struct tcp_connect_and_offload_out *)nonemb_cmd.va; va 1120 drivers/scsi/be2iscsi/be_iscsi.c nonemb_cmd.va, nonemb_cmd.dma); va 537 drivers/scsi/be2iscsi/be_main.c mbox_mem_alloc->va = dma_alloc_coherent(&pdev->dev, va 539 drivers/scsi/be2iscsi/be_main.c if (!mbox_mem_alloc->va) { va 545 drivers/scsi/be2iscsi/be_main.c mbox_mem_align->va = PTR_ALIGN(mbox_mem_alloc->va, 16); va 547 drivers/scsi/be2iscsi/be_main.c memset(mbox_mem_align->va, 0, sizeof(struct be_mcc_mailbox)); va 2942 drivers/scsi/be2iscsi/be_main.c sgl->va = virtual_address; va 2958 drivers/scsi/be2iscsi/be_main.c if (sgl->va) va 2970 drivers/scsi/be2iscsi/be_main.c if (sgl->va) va 2987 drivers/scsi/be2iscsi/be_main.c mem->va = vaddress; va 2988 drivers/scsi/be2iscsi/be_main.c if (!mem->va) va 2990 drivers/scsi/be2iscsi/be_main.c memset(mem->va, 0, mem->size); va 3023 drivers/scsi/be2iscsi/be_main.c mem->va = eq_vaddress; va 3052 drivers/scsi/be2iscsi/be_main.c if (mem->va) va 3055 drivers/scsi/be2iscsi/be_main.c mem->va, mem->dma); va 3117 drivers/scsi/be2iscsi/be_main.c if (mem->va) va 3120 drivers/scsi/be2iscsi/be_main.c mem->va, mem->dma); va 3309 drivers/scsi/be2iscsi/be_main.c if (mem->va) { va 3311 drivers/scsi/be2iscsi/be_main.c mem->va, mem->dma); va 3312 drivers/scsi/be2iscsi/be_main.c mem->va = NULL; va 3325 drivers/scsi/be2iscsi/be_main.c mem->va = dma_alloc_coherent(&phba->pcidev->dev, mem->size, &mem->dma, va 3327 drivers/scsi/be2iscsi/be_main.c if (!mem->va) va 3466 drivers/scsi/be2iscsi/be_main.c ptag_mem->va, va 4837 drivers/scsi/be2iscsi/be_main.c nonemb_cmd.va = dma_alloc_coherent(&phba->ctrl.pdev->dev, va 4840 drivers/scsi/be2iscsi/be_main.c if (nonemb_cmd.va == NULL) { va 4853 drivers/scsi/be2iscsi/be_main.c nonemb_cmd.va, nonemb_cmd.dma); va 4867 drivers/scsi/be2iscsi/be_main.c nonemb_cmd.va, nonemb_cmd.dma); va 4874 drivers/scsi/be2iscsi/be_main.c resp = (struct be_cmd_resp_hdr *)nonemb_cmd.va; va 4877 drivers/scsi/be2iscsi/be_main.c nonemb_cmd.va, (resp->response_length va 4884 drivers/scsi/be2iscsi/be_main.c nonemb_cmd.va, nonemb_cmd.dma); va 5741 drivers/scsi/be2iscsi/be_main.c phba->ctrl.mbox_mem_alloced.va, va 5785 drivers/scsi/be2iscsi/be_main.c phba->ctrl.mbox_mem_alloced.va, va 965 drivers/scsi/be2iscsi/be_main.h void *va; /* The virtual address of the ring. This va 42 drivers/scsi/be2iscsi/be_mgmt.c struct be_bsg_vendor_cmd *req = nonemb_cmd->va; va 46 drivers/scsi/be2iscsi/be_mgmt.c memset(nonemb_cmd->va, 0, nonemb_cmd->size); va 64 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd->va + offset, job->request_len); va 155 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd->va; va 256 drivers/scsi/be2iscsi/be_mgmt.c tag_mem->va = nonemb_cmd->va; va 270 drivers/scsi/be2iscsi/be_mgmt.c memcpy(resp_buf, nonemb_cmd->va, resp_buf_len); va 288 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd->va, nonemb_cmd->dma); va 296 drivers/scsi/be2iscsi/be_mgmt.c cmd->va = dma_alloc_coherent(&phba->ctrl.pdev->dev, size, &cmd->dma, va 298 drivers/scsi/be2iscsi/be_mgmt.c if (!cmd->va) { va 304 drivers/scsi/be2iscsi/be_mgmt.c be_cmd_hdr_prepare(cmd->va, subsystem, opcode, size); va 320 drivers/scsi/be2iscsi/be_mgmt.c tag_mem->va, tag_mem->dma); va 337 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 369 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 446 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 498 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 519 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 557 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 606 drivers/scsi/be2iscsi/be_mgmt.c reldhcp = nonemb_cmd.va; va 685 drivers/scsi/be2iscsi/be_mgmt.c dhcpreq = nonemb_cmd.va; va 753 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 767 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va, va 780 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va)->actual_resp_len; va 785 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va, va 863 drivers/scsi/be2iscsi/be_mgmt.c sess_resp = bs->nonemb_cmd.va; va 874 drivers/scsi/be2iscsi/be_mgmt.c bs->nonemb_cmd.va, bs->nonemb_cmd.dma); va 875 drivers/scsi/be2iscsi/be_mgmt.c bs->nonemb_cmd.va = NULL; va 1016 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd->va = dma_alloc_coherent(&phba->ctrl.pdev->dev, va 1020 drivers/scsi/be2iscsi/be_mgmt.c if (!nonemb_cmd->va) { va 1025 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd->va; va 1513 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va = dma_alloc_coherent(&phba->ctrl.pdev->dev, va 1516 drivers/scsi/be2iscsi/be_mgmt.c if (!nonemb_cmd.va) { va 1527 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va, nonemb_cmd.dma); va 1531 drivers/scsi/be2iscsi/be_mgmt.c req = nonemb_cmd.va; va 1554 drivers/scsi/be2iscsi/be_mgmt.c nonemb_cmd.va, nonemb_cmd.dma); va 28 drivers/scsi/bnx2fc/bnx2fc_debug.c vaf.va = &args; va 52 drivers/scsi/bnx2fc/bnx2fc_debug.c vaf.va = &args; va 76 drivers/scsi/bnx2fc/bnx2fc_debug.c vaf.va = &args; va 2199 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c if (unlikely(*(u8 *)rsp != *(u8 *)pgl->va)) { va 2201 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c pgl->va, be64_to_cpu(*rsp), va 2202 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c be64_to_cpu(*(u64 *)pgl->va), va 1178 drivers/scsi/ibmvscsi/ibmvfc.c login_info->async.va = cpu_to_be64(vhost->async_crq.msg_token); va 1304 drivers/scsi/ibmvscsi/ibmvfc.c md[i].va = cpu_to_be64(sg_dma_address(sg)); va 1371 drivers/scsi/ibmvscsi/ibmvfc.c data->va = cpu_to_be64(evt->ext_list_token); va 1627 drivers/scsi/ibmvscsi/ibmvfc.c vfc_cmd->resp.va = cpu_to_be64(be64_to_cpu(evt->crq.ioba) + offsetof(struct ibmvfc_cmd, rsp)); va 1899 drivers/scsi/ibmvscsi/ibmvfc.c mad->cmd_ioba.va = cpu_to_be64(be64_to_cpu(evt->crq.ioba) + va 1908 drivers/scsi/ibmvscsi/ibmvfc.c mad->iu.cmd.va = cpu_to_be64(sg_dma_address(job->request_payload.sg_list)); va 1910 drivers/scsi/ibmvscsi/ibmvfc.c mad->iu.rsp.va = cpu_to_be64(sg_dma_address(job->reply_payload.sg_list)); va 1978 drivers/scsi/ibmvscsi/ibmvfc.c tmf->resp.va = cpu_to_be64(be64_to_cpu(evt->crq.ioba) + offsetof(struct ibmvfc_cmd, rsp)); va 2308 drivers/scsi/ibmvscsi/ibmvfc.c tmf->resp.va = cpu_to_be64(be64_to_cpu(evt->crq.ioba) + offsetof(struct ibmvfc_cmd, rsp)); va 3638 drivers/scsi/ibmvscsi/ibmvfc.c mad->cmd_ioba.va = cpu_to_be64((u64)be64_to_cpu(evt->crq.ioba) + va 3643 drivers/scsi/ibmvscsi/ibmvfc.c mad->iu.cmd.va = cpu_to_be64((u64)be64_to_cpu(evt->crq.ioba) + va 3647 drivers/scsi/ibmvscsi/ibmvfc.c mad->iu.rsp.va = cpu_to_be64((u64)be64_to_cpu(evt->crq.ioba) + va 3986 drivers/scsi/ibmvscsi/ibmvfc.c mad->buffer.va = cpu_to_be64(vhost->disc_buf_dma); va 4101 drivers/scsi/ibmvscsi/ibmvfc.c mad->buffer.va = cpu_to_be64(vhost->login_buf_dma); va 661 drivers/scsi/ibmvscsi/ibmvscsi.c descr->va = cpu_to_be64(sg_dma_address(sg)); va 704 drivers/scsi/ibmvscsi/ibmvscsi.c indirect->table_desc.va = 0; va 735 drivers/scsi/ibmvscsi/ibmvscsi.c indirect->table_desc.va = cpu_to_be64(evt_struct->ext_list_token); va 1067 drivers/scsi/ibmvscsi/ibmvscsi.c indirect->table_desc.va == 0) { va 1068 drivers/scsi/ibmvscsi/ibmvscsi.c indirect->table_desc.va = va 3234 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c client_ioba = be64_to_cpu(md[md_idx].va); va 361 drivers/scsi/libsas/sas_ata.c vaf.va = &args; va 13 drivers/scsi/qedf/qedf_dbg.c va_list va; va 16 drivers/scsi/qedf/qedf_dbg.c va_start(va, fmt); va 19 drivers/scsi/qedf/qedf_dbg.c vaf.va = &va; va 27 drivers/scsi/qedf/qedf_dbg.c va_end(va); va 34 drivers/scsi/qedf/qedf_dbg.c va_list va; va 37 drivers/scsi/qedf/qedf_dbg.c va_start(va, fmt); va 40 drivers/scsi/qedf/qedf_dbg.c vaf.va = &va; va 52 drivers/scsi/qedf/qedf_dbg.c va_end(va); va 59 drivers/scsi/qedf/qedf_dbg.c va_list va; va 62 drivers/scsi/qedf/qedf_dbg.c va_start(va, fmt); va 65 drivers/scsi/qedf/qedf_dbg.c vaf.va = &va; va 78 drivers/scsi/qedf/qedf_dbg.c va_end(va); va 85 drivers/scsi/qedf/qedf_dbg.c va_list va; va 88 drivers/scsi/qedf/qedf_dbg.c va_start(va, fmt); va 91 drivers/scsi/qedf/qedf_dbg.c vaf.va = &va; va 103 drivers/scsi/qedf/qedf_dbg.c va_end(va); va 14 drivers/scsi/qedi/qedi_dbg.c va_list va; va 17 drivers/scsi/qedi/qedi_dbg.c va_start(va, fmt); va 20 drivers/scsi/qedi/qedi_dbg.c vaf.va = &va; va 28 drivers/scsi/qedi/qedi_dbg.c va_end(va); va 35 drivers/scsi/qedi/qedi_dbg.c va_list va; va 38 drivers/scsi/qedi/qedi_dbg.c va_start(va, fmt); va 41 drivers/scsi/qedi/qedi_dbg.c vaf.va = &va; va 53 drivers/scsi/qedi/qedi_dbg.c va_end(va); va 60 drivers/scsi/qedi/qedi_dbg.c va_list va; va 63 drivers/scsi/qedi/qedi_dbg.c va_start(va, fmt); va 66 drivers/scsi/qedi/qedi_dbg.c vaf.va = &va; va 79 drivers/scsi/qedi/qedi_dbg.c va_end(va); va 86 drivers/scsi/qedi/qedi_dbg.c va_list va; va 89 drivers/scsi/qedi/qedi_dbg.c va_start(va, fmt); va 92 drivers/scsi/qedi/qedi_dbg.c vaf.va = &va; va 104 drivers/scsi/qedi/qedi_dbg.c va_end(va); va 2538 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2544 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2547 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2560 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 2581 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2589 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2592 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2598 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 2617 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2635 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2638 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2655 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 2675 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2689 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2692 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2709 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 2776 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2794 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2797 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2814 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 2834 drivers/scsi/qla2xxx/qla_dbg.c va_list va; va 2840 drivers/scsi/qla2xxx/qla_dbg.c va_start(va, fmt); va 2843 drivers/scsi/qla2xxx/qla_dbg.c vaf.va = &va; va 2856 drivers/scsi/qla2xxx/qla_dbg.c va_end(va); va 4541 drivers/scsi/scsi_transport_iscsi.c vaf.va = &args; va 241 drivers/staging/comedi/drivers/ni_routes.c static int _ni_sort_destcmp(const void *va, const void *vb) va 243 drivers/staging/comedi/drivers/ni_routes.c const struct ni_route_set *a = va; va 1363 drivers/staging/qlge/qlge.h char *va; /* virt addr for this chunk */ va 1115 drivers/staging/qlge/qlge_main.c rx_ring->pg_chunk.va = page_address(rx_ring->pg_chunk.page); va 1131 drivers/staging/qlge/qlge_main.c rx_ring->pg_chunk.va += rx_ring->lbq_buf_size; va 1517 drivers/staging/qlge/qlge_main.c prefetch(lbq_desc->p.pg_chunk.va); va 1558 drivers/staging/qlge/qlge_main.c addr = lbq_desc->p.pg_chunk.va; va 1893 drivers/staging/qlge/qlge_main.c lbq_desc->p.pg_chunk.va, va 1950 drivers/staging/qlge/qlge_main.c ql_update_mac_hdr_len(qdev, ib_mac_rsp, lbq_desc->p.pg_chunk.va, va 243 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_iscsi_hdr *cpl = (struct cpl_iscsi_hdr *)gl->va; va 249 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->hdr = gl->va + offset; va 258 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_iscsi_data *cpl = (struct cpl_iscsi_data *)gl->va; va 271 drivers/target/iscsi/cxgbit/cxgbit_main.c cpl = (struct cpl_rx_iscsi_cmp *)gl->va; va 275 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->hdr = gl->va + offset; va 455 drivers/target/iscsi/cxgbit/cxgbit_main.c rpl = gl ? (struct cpl_tx_data *)gl->va : va 483 drivers/target/iscsi/cxgbit/cxgbit_main.c if (unlikely(op != *(u8 *)gl->va)) { va 485 drivers/target/iscsi/cxgbit/cxgbit_main.c gl->va, be64_to_cpu(*rsp), va 486 drivers/target/iscsi/cxgbit/cxgbit_main.c get_unaligned_be64(gl->va), va 452 drivers/tee/optee/core.c void *va; va 479 drivers/tee/optee/core.c va = memremap(paddr, size, MEMREMAP_WB); va 480 drivers/tee/optee/core.c if (!va) { va 484 drivers/tee/optee/core.c vaddr = (unsigned long)va; va 505 drivers/tee/optee/core.c *memremaped_shm = va; va 514 drivers/tee/optee/core.c memunmap(va); va 383 drivers/tee/tee_shm.c int tee_shm_va2pa(struct tee_shm *shm, void *va, phys_addr_t *pa) va 388 drivers/tee/tee_shm.c if ((char *)va < (char *)shm->kaddr) va 390 drivers/tee/tee_shm.c if ((char *)va >= ((char *)shm->kaddr + shm->size)) va 394 drivers/tee/tee_shm.c shm, (unsigned long)va - (unsigned long)shm->kaddr, pa); va 405 drivers/tee/tee_shm.c int tee_shm_pa2va(struct tee_shm *shm, phys_addr_t pa, void **va) va 415 drivers/tee/tee_shm.c if (va) { va 420 drivers/tee/tee_shm.c *va = v; va 15 drivers/tee/tee_shm_pool.c unsigned long va; va 19 drivers/tee/tee_shm_pool.c va = gen_pool_alloc(genpool, s); va 20 drivers/tee/tee_shm_pool.c if (!va) va 23 drivers/tee/tee_shm_pool.c memset((void *)va, 0, s); va 24 drivers/tee/tee_shm_pool.c shm->kaddr = (void *)va; va 25 drivers/tee/tee_shm_pool.c shm->paddr = gen_pool_virt_to_phys(genpool, va); va 60 drivers/tty/serial/samsung.c va_list va; va 63 drivers/tty/serial/samsung.c va_start(va, fmt); va 64 drivers/tty/serial/samsung.c vscnprintf(buff, sizeof(buff), fmt, va); va 65 drivers/tty/serial/samsung.c va_end(va); va 104 drivers/usb/gadget/udc/s3c2410_udc.c vaf.va = &args; va 30 drivers/usb/host/xhci-dbg.c vaf.va = &args; va 35 drivers/usb/host/xhci-trace.h vsnprintf(__get_str(msg), XHCI_MSG_MAX, vaf->fmt, *vaf->va); va 20 drivers/usb/mtu3/mtu3_trace.c vaf.va = &args; va 32 drivers/usb/mtu3/mtu3_trace.h vsnprintf(__get_str(msg), MTU3_MSG_MAX, vaf->fmt, *vaf->va); va 20 drivers/usb/musb/musb_trace.c vaf.va = &args; va 35 drivers/usb/musb/musb_trace.h vsnprintf(__get_str(msg), MUSB_MSG_MAX, vaf->fmt, *vaf->va); va 128 drivers/video/fbdev/matrox/matroxfb_base.h static inline unsigned int mga_readb(vaddr_t va, unsigned int offs) { va 129 drivers/video/fbdev/matrox/matroxfb_base.h return readb(va.vaddr + offs); va 132 drivers/video/fbdev/matrox/matroxfb_base.h static inline void mga_writeb(vaddr_t va, unsigned int offs, u_int8_t value) { va 133 drivers/video/fbdev/matrox/matroxfb_base.h writeb(value, va.vaddr + offs); va 136 drivers/video/fbdev/matrox/matroxfb_base.h static inline void mga_writew(vaddr_t va, unsigned int offs, u_int16_t value) { va 137 drivers/video/fbdev/matrox/matroxfb_base.h writew(value, va.vaddr + offs); va 140 drivers/video/fbdev/matrox/matroxfb_base.h static inline u_int32_t mga_readl(vaddr_t va, unsigned int offs) { va 141 drivers/video/fbdev/matrox/matroxfb_base.h return readl(va.vaddr + offs); va 144 drivers/video/fbdev/matrox/matroxfb_base.h static inline void mga_writel(vaddr_t va, unsigned int offs, u_int32_t value) { va 145 drivers/video/fbdev/matrox/matroxfb_base.h writel(value, va.vaddr + offs); va 148 drivers/video/fbdev/matrox/matroxfb_base.h static inline void mga_memcpy_toio(vaddr_t va, const void* src, int len) { va 157 drivers/video/fbdev/matrox/matroxfb_base.h iowrite32_rep(va.vaddr, src, len >> 2); va 159 drivers/video/fbdev/matrox/matroxfb_base.h u_int32_t __iomem* addr = va.vaddr; va 179 drivers/video/fbdev/matrox/matroxfb_base.h static inline void vaddr_add(vaddr_t* va, unsigned long offs) { va 180 drivers/video/fbdev/matrox/matroxfb_base.h va->vaddr += offs; va 183 drivers/video/fbdev/matrox/matroxfb_base.h static inline void __iomem* vaddr_va(vaddr_t va) { va 184 drivers/video/fbdev/matrox/matroxfb_base.h return va.vaddr; va 422 drivers/video/fbdev/tgafb.c min_diff = delta, vm = m, va = a, vr = r; \ va 430 drivers/video/fbdev/tgafb.c int r,a,m,vm = 34, va = 1, vr = 30; va 498 drivers/video/fbdev/tgafb.c TGA_WRITE_REG(par, (va >> r) & 1, TGA_CLOCK_REG); va 73 drivers/video/fbdev/vermilion/vermilion.c static int vmlfb_alloc_vram_area(struct vram_area *va, unsigned max_order, va 89 drivers/video/fbdev/vermilion/vermilion.c va->logical = va 91 drivers/video/fbdev/vermilion/vermilion.c } while (va->logical == 0 && max_order > min_order); va 93 drivers/video/fbdev/vermilion/vermilion.c if (!va->logical) va 96 drivers/video/fbdev/vermilion/vermilion.c va->phys = virt_to_phys((void *)va->logical); va 97 drivers/video/fbdev/vermilion/vermilion.c va->size = PAGE_SIZE << max_order; va 98 drivers/video/fbdev/vermilion/vermilion.c va->order = max_order; va 107 drivers/video/fbdev/vermilion/vermilion.c memset((void *)va->logical, 0x00, va->size); va 108 drivers/video/fbdev/vermilion/vermilion.c for (i = va->logical; i < va->logical + va->size; i += PAGE_SIZE) { va 116 drivers/video/fbdev/vermilion/vermilion.c set_pages_uc(virt_to_page(va->logical), va->size >> PAGE_SHIFT); va 120 drivers/video/fbdev/vermilion/vermilion.c va->size, va->phys); va 130 drivers/video/fbdev/vermilion/vermilion.c static void vmlfb_free_vram_area(struct vram_area *va) va 134 drivers/video/fbdev/vermilion/vermilion.c if (va->logical) { va 140 drivers/video/fbdev/vermilion/vermilion.c set_pages_wb(virt_to_page(va->logical), va 141 drivers/video/fbdev/vermilion/vermilion.c va->size >> PAGE_SHIFT); va 148 drivers/video/fbdev/vermilion/vermilion.c for (j = va->logical; j < va->logical + va->size; va 155 drivers/video/fbdev/vermilion/vermilion.c va->size, va->phys); va 156 drivers/video/fbdev/vermilion/vermilion.c free_pages(va->logical, va->order); va 158 drivers/video/fbdev/vermilion/vermilion.c va->logical = 0; va 191 drivers/video/fbdev/vermilion/vermilion.c struct vram_area *va; va 196 drivers/video/fbdev/vermilion/vermilion.c va = &vinfo->vram[i]; va 202 drivers/video/fbdev/vermilion/vermilion.c err = vmlfb_alloc_vram_area(va, order, 0); va 208 drivers/video/fbdev/vermilion/vermilion.c vinfo->vram_start = va->phys; va 209 drivers/video/fbdev/vermilion/vermilion.c vinfo->vram_logical = (void __iomem *) va->logical; va 210 drivers/video/fbdev/vermilion/vermilion.c vinfo->vram_contig_size = va->size; va 217 drivers/video/fbdev/vermilion/vermilion.c if (va->phys + va->size == va2->phys || va 218 drivers/video/fbdev/vermilion/vermilion.c va2->phys + va2->size == va->phys) { va 226 drivers/video/fbdev/vermilion/vermilion.c if (va->phys < vinfo->vram_start) { va 227 drivers/video/fbdev/vermilion/vermilion.c vinfo->vram_start = va->phys; va 229 drivers/video/fbdev/vermilion/vermilion.c (void __iomem *)va->logical; va 231 drivers/video/fbdev/vermilion/vermilion.c vinfo->vram_contig_size += va->size; va 233 drivers/video/fbdev/vermilion/vermilion.c vmlfb_free_vram_area(va); va 238 drivers/video/fbdev/vermilion/vermilion.c if (requested < va->size) va 241 drivers/video/fbdev/vermilion/vermilion.c requested -= va->size; va 218 drivers/xen/privcmd.c unsigned long va; va 232 drivers/xen/privcmd.c ((unsigned long)(msg->npages << PAGE_SHIFT) >= -st->va)) va 236 drivers/xen/privcmd.c if ((msg->va != st->va) || va 237 drivers/xen/privcmd.c ((msg->va+(msg->npages<<PAGE_SHIFT)) > vma->vm_end)) va 241 drivers/xen/privcmd.c msg->va & PAGE_MASK, va 248 drivers/xen/privcmd.c st->va += msg->npages << PAGE_SHIFT; va 288 drivers/xen/privcmd.c vma = find_vma(mm, msg->va); va 291 drivers/xen/privcmd.c if (!vma || (msg->va != vma->vm_start) || vma->vm_private_data) va 296 drivers/xen/privcmd.c state.va = vma->vm_start; va 316 drivers/xen/privcmd.c unsigned long va; va 350 drivers/xen/privcmd.c ret = xen_remap_domain_gfn_array(st->vma, st->va & PAGE_MASK, gfnp, nr, va 364 drivers/xen/privcmd.c st->va += XEN_PAGE_SIZE * nr; va 548 drivers/xen/privcmd.c state.va = m.addr; va 31 fs/adfs/super.c vaf.va = &args; va 47 fs/adfs/super.c vaf.va = &args; va 453 fs/affs/amigaffs.c vaf.va = &args; va 469 fs/affs/amigaffs.c vaf.va = &args; va 36 fs/befs/debug.c vaf.va = &args; va 49 fs/befs/debug.c vaf.va = &args; va 64 fs/befs/debug.c vaf.va = &args; va 125 fs/btrfs/super.c vaf.va = &args; va 214 fs/btrfs/super.c vaf.va = &args; va 279 fs/btrfs/super.c vaf.va = &args; va 59 fs/btrfs/tree-checker.c vaf.va = &args; va 86 fs/btrfs/tree-checker.c vaf.va = &args; va 305 fs/btrfs/tree-checker.c vaf.va = &args; va 439 fs/btrfs/tree-checker.c vaf.va = &args; va 549 fs/btrfs/tree-checker.c vaf.va = &args; va 690 fs/btrfs/tree-checker.c vaf.va = &args; va 941 fs/btrfs/tree-checker.c vaf.va = &args; va 10 fs/ceph/ceph_frag.c unsigned va = ceph_frag_value(a); va 12 fs/ceph/ceph_frag.c if (va < vb) va 14 fs/ceph/ceph_frag.c if (va > vb) va 16 fs/ceph/ceph_frag.c va = ceph_frag_bits(a); va 18 fs/ceph/ceph_frag.c if (va < vb) va 20 fs/ceph/ceph_frag.c if (va > vb) va 467 fs/crypto/crypto.c vaf.va = &args; va 28 fs/erofs/super.c vaf.va = &args; va 43 fs/erofs/super.c vaf.va = &args; va 68 fs/ext2/super.c vaf.va = &args; va 93 fs/ext2/super.c vaf.va = &args; va 510 fs/ext4/super.c vaf.va = &args; va 537 fs/ext4/super.c vaf.va = &args; va 576 fs/ext4/super.c vaf.va = &args; va 685 fs/ext4/super.c vaf.va = &args; va 722 fs/ext4/super.c vaf.va = &args; va 742 fs/ext4/super.c vaf.va = &args; va 759 fs/ext4/super.c vaf.va = &args; va 788 fs/ext4/super.c vaf.va = &args; va 219 fs/f2fs/super.c vaf.va = &args; va 30 fs/fat/misc.c vaf.va = &args; va 55 fs/fat/misc.c vaf.va = &args; va 393 fs/fs_context.c va_list va; va 397 fs/fs_context.c va_start(va, fmt); va 403 fs/fs_context.c p = va_arg(va, const char *); va 407 fs/fs_context.c q = kvasprintf(GFP_KERNEL, fmt, va); va 462 fs/fs_context.c va_end(va); va 183 fs/fscache/cache.c va_list va; va 189 fs/fscache/cache.c va_start(va, idfmt); va 190 fs/fscache/cache.c vsnprintf(cache->identifier, sizeof(cache->identifier), idfmt, va); va 191 fs/fscache/cache.c va_end(va); va 1090 fs/gfs2/glock.c vaf.va = &args; va 51 fs/gfs2/util.c vaf.va = &args; va 65 fs/hpfs/super.c vaf.va = &args; va 113 fs/jbd2/journal.c vaf.va = &args; va 91 fs/jfs/super.c vaf.va = &args; va 73 fs/nilfs2/super.c vaf.va = &args; va 123 fs/nilfs2/super.c vaf.va = &args; va 44 fs/ntfs/debug.c vaf.va = &args; va 87 fs/ntfs/debug.c vaf.va = &args; va 114 fs/ntfs/debug.c vaf.va = &args; va 77 fs/ocfs2/cluster/masklog.c vaf.va = &args; va 2547 fs/ocfs2/super.c vaf.va = &args; va 2571 fs/ocfs2/super.c vaf.va = &args; va 1106 fs/proc/proc_sysctl.c vaf.va = &args; va 444 fs/pstore/ram_core.c void *va; va 454 fs/pstore/ram_core.c va = ioremap(start, size); va 456 fs/pstore/ram_core.c va = ioremap_wc(start, size); va 463 fs/pstore/ram_core.c return va; va 145 fs/quota/dquot.c vaf.va = &args; va 14 fs/ubifs/misc.c vaf.va = &args; va 31 fs/ubifs/misc.c vaf.va = &args; va 50 fs/ubifs/misc.c vaf.va = &args; va 2326 fs/udf/super.c vaf.va = &args; va 2342 fs/udf/super.c vaf.va = &args; va 291 fs/ufs/super.c vaf.va = &args; va 324 fs/ufs/super.c vaf.va = &args; va 339 fs/ufs/super.c vaf.va = &args; va 25 fs/verity/init.c vaf.va = &args; va 40 fs/xfs/xfs_message.c vaf.va = &args; \ va 80 fs/xfs/xfs_message.c vaf.va = &args; va 100 include/drm/drm_print.h drm_vprintf(struct drm_printer *p, const char *fmt, va_list *va) va 102 include/drm/drm_print.h struct va_format vaf = { .fmt = fmt, .va = va }; va 42 include/linux/fsl/bestcomm/sram.h static inline phys_addr_t bcom_sram_va2pa(void *va) { va 44 include/linux/fsl/bestcomm/sram.h (unsigned long)(va - bcom_sram->base_virt); va 371 include/linux/mlx5/qp.h __be64 va; va 385 include/linux/mlx5/qp.h __be64 va; va 458 include/linux/mlx5/qp.h __be64 va; va 465 include/linux/mlx5/qp.h __be64 va; va 241 include/linux/mm_types.h void * va; va 89 include/linux/printk.h va_list *va; va 330 include/linux/remoteproc.h void *va; va 552 include/linux/remoteproc.h void *va; va 602 include/linux/remoteproc.h void *va, dma_addr_t dma, int len, u32 da, va 115 include/linux/scif.h void __iomem **va; va 374 include/linux/tee_drv.h int tee_shm_va2pa(struct tee_shm *shm, void *va, phys_addr_t *pa); va 383 include/linux/tee_drv.h int tee_shm_pa2va(struct tee_shm *shm, phys_addr_t pa, void **va); va 248 include/net/ax25.h unsigned short vs, vr, va; va 90 include/net/lapb.h unsigned short vs, vr, va; va 73 include/net/netrom.h unsigned short vs, vr, va, vl; va 138 include/net/rose.h unsigned short vs, vr, va, vl; va 155 include/net/x25.h unsigned short vs, vr, va, vl; va 98 include/scsi/srp.h __be64 va; va 34 include/trace/events/iscsi.h vsnprintf(__get_str(msg), ISCSI_MSG_MAX, vaf->fmt, *vaf->va); va 50 include/uapi/drm/lima_drm.h __u32 va; /* out, virtual address mapped into GPU MMU */ va 107 include/uapi/linux/ax25.h unsigned int vs, vr, va, vs_max; va 257 include/uapi/linux/kfd_ioctl.h __u64 va; va 170 include/uapi/linux/mic_common.h void *va; va 47 include/uapi/xen/privcmd.h __u64 va; va 78 include/xen/arm/hypercall.h MULTI_update_va_mapping(struct multicall_entry *mcl, unsigned long va, va 702 kernel/module.c void *va = (void *)addr; va 704 kernel/module.c if (va >= start && va < start + mod->percpu_size) { va 706 kernel/module.c *can_addr = (unsigned long) (va - start); va 525 kernel/power/main.c vaf.va = &args; va 2104 kernel/trace/trace_events_filter.c #define DATA_REC(m, va, vb, vc, vd, ve, vf, vg, vh, nvisit) \ va 2107 kernel/trace/trace_events_filter.c .rec = { .a = va, .b = vb, .c = vc, .d = vd, \ va 565 lib/dynamic_debug.c vaf.va = &args; va 585 lib/dynamic_debug.c vaf.va = &args; va 616 lib/dynamic_debug.c vaf.va = &args; va 652 lib/dynamic_debug.c vaf.va = &args; va 25 lib/test_debug_virtual.c void *va; va 27 lib/test_debug_virtual.c va = (void *)VMALLOC_START; va 28 lib/test_debug_virtual.c pa = virt_to_phys(va); va 30 lib/test_debug_virtual.c pr_info("PA: %pa for VA: 0x%lx\n", &pa, (unsigned long)va); va 37 lib/test_debug_virtual.c va = foo; va 38 lib/test_debug_virtual.c pr_info("PA: %pa for VA: 0x%lx\n", &pa, (unsigned long)va); va 1614 lib/vsprintf.c va_list va; va 1619 lib/vsprintf.c va_copy(va, *va_fmt->va); va 1620 lib/vsprintf.c buf += vsnprintf(buf, end > buf ? end - buf : 0, va_fmt->fmt, va); va 1621 lib/vsprintf.c va_end(va); va 2148 mm/memory.c static inline void cow_user_page(struct page *dst, struct page *src, unsigned long va, struct vm_area_struct *vma) va 2160 mm/memory.c void __user *uaddr = (void __user *)(va & PAGE_MASK); va 2173 mm/memory.c copy_user_highpage(dst, src, va, vma); va 3756 mm/page_alloc.c vaf.va = &args; va 4863 mm/page_alloc.c nc->va = page ? page_address(page) : NULL; va 4884 mm/page_alloc.c if (unlikely(!nc->va)) { va 4907 mm/page_alloc.c page = virt_to_page(nc->va); va 4927 mm/page_alloc.c return nc->va + offset; va 2006 mm/percpu.c void *va = (void *)addr; va 2008 mm/percpu.c if (va >= start && va < start + static_size) { va 2010 mm/percpu.c *can_addr = (unsigned long) (va - start); va 626 mm/slub.c vaf.va = &args; va 642 mm/slub.c vaf.va = &args; va 375 mm/vmalloc.c va_size(struct vmap_area *va) va 377 mm/vmalloc.c return (va->va_end - va->va_start); va 383 mm/vmalloc.c struct vmap_area *va; va 385 mm/vmalloc.c va = rb_entry_safe(node, struct vmap_area, rb_node); va 386 mm/vmalloc.c return va ? va->subtree_max_size : 0; va 393 mm/vmalloc.c compute_subtree_max_size(struct vmap_area *va) va 395 mm/vmalloc.c return max3(va_size(va), va 396 mm/vmalloc.c get_subtree_max_size(va->rb_node.rb_left), va 397 mm/vmalloc.c get_subtree_max_size(va->rb_node.rb_right)); va 419 mm/vmalloc.c struct vmap_area *va; va 421 mm/vmalloc.c va = rb_entry(n, struct vmap_area, rb_node); va 422 mm/vmalloc.c if (addr < va->va_start) va 424 mm/vmalloc.c else if (addr >= va->va_end) va 427 mm/vmalloc.c return va; va 438 mm/vmalloc.c find_va_links(struct vmap_area *va, va 468 mm/vmalloc.c if (va->va_start < tmp_va->va_end && va 469 mm/vmalloc.c va->va_end <= tmp_va->va_start) va 471 mm/vmalloc.c else if (va->va_end > tmp_va->va_start && va 472 mm/vmalloc.c va->va_start >= tmp_va->va_end) va 501 mm/vmalloc.c link_va(struct vmap_area *va, struct rb_root *root, va 515 mm/vmalloc.c rb_link_node(&va->rb_node, parent, link); va 528 mm/vmalloc.c rb_insert_augmented(&va->rb_node, va 530 mm/vmalloc.c va->subtree_max_size = 0; va 532 mm/vmalloc.c rb_insert_color(&va->rb_node, root); va 536 mm/vmalloc.c list_add(&va->list, head); va 540 mm/vmalloc.c unlink_va(struct vmap_area *va, struct rb_root *root) va 542 mm/vmalloc.c if (WARN_ON(RB_EMPTY_NODE(&va->rb_node))) va 546 mm/vmalloc.c rb_erase_augmented(&va->rb_node, va 549 mm/vmalloc.c rb_erase(&va->rb_node, root); va 551 mm/vmalloc.c list_del(&va->list); va 552 mm/vmalloc.c RB_CLEAR_NODE(&va->rb_node); va 559 mm/vmalloc.c struct vmap_area *va; va 567 mm/vmalloc.c va = rb_entry(n, struct vmap_area, rb_node); va 568 mm/vmalloc.c size = va->subtree_max_size; va 572 mm/vmalloc.c va = rb_entry(node, struct vmap_area, rb_node); va 577 mm/vmalloc.c if (va_size(va) == size) { va 587 mm/vmalloc.c va = rb_entry(n, struct vmap_area, rb_node); va 589 mm/vmalloc.c va_size(va), va->subtree_max_size); va 625 mm/vmalloc.c augment_tree_propagate_from(struct vmap_area *va) va 627 mm/vmalloc.c struct rb_node *node = &va->rb_node; va 631 mm/vmalloc.c va = rb_entry(node, struct vmap_area, rb_node); va 632 mm/vmalloc.c new_va_sub_max_size = compute_subtree_max_size(va); va 640 mm/vmalloc.c if (va->subtree_max_size == new_va_sub_max_size) va 643 mm/vmalloc.c va->subtree_max_size = new_va_sub_max_size; va 644 mm/vmalloc.c node = rb_parent(&va->rb_node); va 653 mm/vmalloc.c insert_vmap_area(struct vmap_area *va, va 659 mm/vmalloc.c link = find_va_links(va, root, NULL, &parent); va 660 mm/vmalloc.c link_va(va, root, parent, link, head); va 664 mm/vmalloc.c insert_vmap_area_augment(struct vmap_area *va, va 672 mm/vmalloc.c link = find_va_links(va, NULL, from, &parent); va 674 mm/vmalloc.c link = find_va_links(va, root, NULL, &parent); va 676 mm/vmalloc.c link_va(va, root, parent, link, head); va 677 mm/vmalloc.c augment_tree_propagate_from(va); va 687 mm/vmalloc.c merge_or_add_vmap_area(struct vmap_area *va, va 700 mm/vmalloc.c link = find_va_links(va, root, NULL, &parent); va 718 mm/vmalloc.c if (sibling->va_start == va->va_end) { va 719 mm/vmalloc.c sibling->va_start = va->va_start; va 725 mm/vmalloc.c kmem_cache_free(vmap_area_cachep, va); va 728 mm/vmalloc.c va = sibling; va 742 mm/vmalloc.c if (sibling->va_end == va->va_start) { va 743 mm/vmalloc.c sibling->va_end = va->va_end; va 749 mm/vmalloc.c unlink_va(va, root); va 752 mm/vmalloc.c kmem_cache_free(vmap_area_cachep, va); va 759 mm/vmalloc.c link_va(va, root, parent, link, head); va 760 mm/vmalloc.c augment_tree_propagate_from(va); va 765 mm/vmalloc.c is_within_this_va(struct vmap_area *va, unsigned long size, va 770 mm/vmalloc.c if (va->va_start > vstart) va 771 mm/vmalloc.c nva_start_addr = ALIGN(va->va_start, align); va 780 mm/vmalloc.c return (nva_start_addr + size <= va->va_end); va 792 mm/vmalloc.c struct vmap_area *va; va 803 mm/vmalloc.c va = rb_entry(node, struct vmap_area, rb_node); va 806 mm/vmalloc.c vstart < va->va_start) { va 809 mm/vmalloc.c if (is_within_this_va(va, size, align, vstart)) va 810 mm/vmalloc.c return va; va 828 mm/vmalloc.c va = rb_entry(node, struct vmap_area, rb_node); va 829 mm/vmalloc.c if (is_within_this_va(va, size, align, vstart)) va 830 mm/vmalloc.c return va; va 833 mm/vmalloc.c vstart <= va->va_start) { va 851 mm/vmalloc.c struct vmap_area *va; va 853 mm/vmalloc.c list_for_each_entry(va, &free_vmap_area_list, list) { va 854 mm/vmalloc.c if (!is_within_this_va(va, size, align, vstart)) va 857 mm/vmalloc.c return va; va 891 mm/vmalloc.c classify_va_fit_type(struct vmap_area *va, va 897 mm/vmalloc.c if (nva_start_addr < va->va_start || va 898 mm/vmalloc.c nva_start_addr + size > va->va_end) va 902 mm/vmalloc.c if (va->va_start == nva_start_addr) { va 903 mm/vmalloc.c if (va->va_end == nva_start_addr + size) va 907 mm/vmalloc.c } else if (va->va_end == nva_start_addr + size) { va 917 mm/vmalloc.c adjust_va_to_fit_type(struct vmap_area *va, va 931 mm/vmalloc.c unlink_va(va, &free_vmap_area_root); va 932 mm/vmalloc.c kmem_cache_free(vmap_area_cachep, va); va 941 mm/vmalloc.c va->va_start += size; va 950 mm/vmalloc.c va->va_end = nva_start_addr; va 981 mm/vmalloc.c lva->va_start = va->va_start; va 987 mm/vmalloc.c va->va_start = nva_start_addr + size; va 993 mm/vmalloc.c augment_tree_propagate_from(va); va 996 mm/vmalloc.c insert_vmap_area_augment(lva, &va->rb_node, va 1012 mm/vmalloc.c struct vmap_area *va; va 1016 mm/vmalloc.c va = find_vmap_lowest_match(size, align, vstart); va 1017 mm/vmalloc.c if (unlikely(!va)) va 1020 mm/vmalloc.c if (va->va_start > vstart) va 1021 mm/vmalloc.c nva_start_addr = ALIGN(va->va_start, align); va 1030 mm/vmalloc.c type = classify_va_fit_type(va, nva_start_addr, size); va 1035 mm/vmalloc.c ret = adjust_va_to_fit_type(va, nva_start_addr, size, type); va 1055 mm/vmalloc.c struct vmap_area *va, *pva; va 1068 mm/vmalloc.c va = kmem_cache_alloc_node(vmap_area_cachep, va 1070 mm/vmalloc.c if (unlikely(!va)) va 1077 mm/vmalloc.c kmemleak_scan_area(&va->rb_node, SIZE_MAX, gfp_mask & GFP_RECLAIM_MASK); va 1115 mm/vmalloc.c va->va_start = addr; va 1116 mm/vmalloc.c va->va_end = addr + size; va 1117 mm/vmalloc.c va->vm = NULL; va 1118 mm/vmalloc.c insert_vmap_area(va, &vmap_area_root, &vmap_area_list); va 1122 mm/vmalloc.c BUG_ON(!IS_ALIGNED(va->va_start, align)); va 1123 mm/vmalloc.c BUG_ON(va->va_start < vstart); va 1124 mm/vmalloc.c BUG_ON(va->va_end > vend); va 1126 mm/vmalloc.c return va; va 1149 mm/vmalloc.c kmem_cache_free(vmap_area_cachep, va); va 1165 mm/vmalloc.c static void __free_vmap_area(struct vmap_area *va) va 1170 mm/vmalloc.c unlink_va(va, &vmap_area_root); va 1175 mm/vmalloc.c merge_or_add_vmap_area(va, va 1182 mm/vmalloc.c static void free_vmap_area(struct vmap_area *va) va 1185 mm/vmalloc.c __free_vmap_area(va); va 1192 mm/vmalloc.c static void unmap_vmap_area(struct vmap_area *va) va 1194 mm/vmalloc.c vunmap_page_range(va->va_start, va->va_end); va 1250 mm/vmalloc.c struct vmap_area *va; va 1269 mm/vmalloc.c llist_for_each_entry(va, valist, purge_list) { va 1270 mm/vmalloc.c if (va->va_start < start) va 1271 mm/vmalloc.c start = va->va_start; va 1272 mm/vmalloc.c if (va->va_end > end) va 1273 mm/vmalloc.c end = va->va_end; va 1280 mm/vmalloc.c llist_for_each_entry_safe(va, n_va, valist, purge_list) { va 1281 mm/vmalloc.c unsigned long nr = (va->va_end - va->va_start) >> PAGE_SHIFT; va 1288 mm/vmalloc.c merge_or_add_vmap_area(va, va 1328 mm/vmalloc.c static void free_vmap_area_noflush(struct vmap_area *va) va 1333 mm/vmalloc.c unlink_va(va, &vmap_area_root); va 1336 mm/vmalloc.c nr_lazy = atomic_long_add_return((va->va_end - va->va_start) >> va 1340 mm/vmalloc.c llist_add(&va->purge_list, &vmap_purge_list); va 1349 mm/vmalloc.c static void free_unmap_vmap_area(struct vmap_area *va) va 1351 mm/vmalloc.c flush_cache_vunmap(va->va_start, va->va_end); va 1352 mm/vmalloc.c unmap_vmap_area(va); va 1354 mm/vmalloc.c flush_tlb_kernel_range(va->va_start, va->va_end); va 1356 mm/vmalloc.c free_vmap_area_noflush(va); va 1361 mm/vmalloc.c struct vmap_area *va; va 1364 mm/vmalloc.c va = __find_vmap_area(addr); va 1367 mm/vmalloc.c return va; va 1407 mm/vmalloc.c struct vmap_area *va; va 1461 mm/vmalloc.c struct vmap_area *va; va 1473 mm/vmalloc.c va = alloc_vmap_area(VMAP_BLOCK_SIZE, VMAP_BLOCK_SIZE, va 1476 mm/vmalloc.c if (IS_ERR(va)) { va 1478 mm/vmalloc.c return ERR_CAST(va); va 1484 mm/vmalloc.c free_vmap_area(va); va 1488 mm/vmalloc.c vaddr = vmap_block_vaddr(va->va_start, 0); va 1490 mm/vmalloc.c vb->va = va; va 1499 mm/vmalloc.c vb_idx = addr_to_vb_idx(va->va_start); va 1520 mm/vmalloc.c vb_idx = addr_to_vb_idx(vb->va->va_start); va 1526 mm/vmalloc.c free_vmap_area_noflush(vb->va); va 1604 mm/vmalloc.c vaddr = vmap_block_vaddr(vb->va->va_start, pages_off); va 1687 mm/vmalloc.c unsigned long va_start = vb->va->va_start; va 1741 mm/vmalloc.c struct vmap_area *va; va 1755 mm/vmalloc.c va = find_vmap_area(addr); va 1756 mm/vmalloc.c BUG_ON(!va); va 1757 mm/vmalloc.c debug_check_no_locks_freed((void *)va->va_start, va 1758 mm/vmalloc.c (va->va_end - va->va_start)); va 1759 mm/vmalloc.c free_unmap_vmap_area(va); va 1790 mm/vmalloc.c struct vmap_area *va; va 1791 mm/vmalloc.c va = alloc_vmap_area(size, PAGE_SIZE, va 1793 mm/vmalloc.c if (IS_ERR(va)) va 1796 mm/vmalloc.c addr = va->va_start; va 1903 mm/vmalloc.c struct vmap_area *va; va 1926 mm/vmalloc.c va = kmem_cache_zalloc(vmap_area_cachep, GFP_NOWAIT); va 1927 mm/vmalloc.c if (WARN_ON_ONCE(!va)) va 1930 mm/vmalloc.c va->va_start = (unsigned long)tmp->addr; va 1931 mm/vmalloc.c va->va_end = va->va_start + tmp->size; va 1932 mm/vmalloc.c va->vm = tmp; va 1933 mm/vmalloc.c insert_vmap_area(va, &vmap_area_root, &vmap_area_list); va 2018 mm/vmalloc.c static void setup_vmalloc_vm(struct vm_struct *vm, struct vmap_area *va, va 2023 mm/vmalloc.c vm->addr = (void *)va->va_start; va 2024 mm/vmalloc.c vm->size = va->va_end - va->va_start; va 2026 mm/vmalloc.c va->vm = vm; va 2045 mm/vmalloc.c struct vmap_area *va; va 2064 mm/vmalloc.c va = alloc_vmap_area(size, align, start, end, node, gfp_mask); va 2065 mm/vmalloc.c if (IS_ERR(va)) { va 2070 mm/vmalloc.c setup_vmalloc_vm(area, va, flags, caller); va 2128 mm/vmalloc.c struct vmap_area *va; va 2130 mm/vmalloc.c va = find_vmap_area((unsigned long)addr); va 2131 mm/vmalloc.c if (!va) va 2134 mm/vmalloc.c return va->vm; va 2149 mm/vmalloc.c struct vmap_area *va; va 2154 mm/vmalloc.c va = __find_vmap_area((unsigned long)addr); va 2155 mm/vmalloc.c if (va && va->vm) { va 2156 mm/vmalloc.c struct vm_struct *vm = va->vm; va 2158 mm/vmalloc.c va->vm = NULL; va 2162 mm/vmalloc.c free_unmap_vmap_area(va); va 2848 mm/vmalloc.c struct vmap_area *va; va 2859 mm/vmalloc.c list_for_each_entry(va, &vmap_area_list, list) { va 2863 mm/vmalloc.c if (!va->vm) va 2866 mm/vmalloc.c vm = va->vm; va 2927 mm/vmalloc.c struct vmap_area *va; va 2939 mm/vmalloc.c list_for_each_entry(va, &vmap_area_list, list) { va 2943 mm/vmalloc.c if (!va->vm) va 2946 mm/vmalloc.c vm = va->vm; va 3152 mm/vmalloc.c struct vmap_area *va, *tmp; va 3156 mm/vmalloc.c va = NULL; va 3161 mm/vmalloc.c va = tmp; va 3171 mm/vmalloc.c return va; va 3184 mm/vmalloc.c pvm_determine_end_from_reverse(struct vmap_area **va, unsigned long align) va 3189 mm/vmalloc.c if (likely(*va)) { va 3190 mm/vmalloc.c list_for_each_entry_from_reverse((*va), va 3192 mm/vmalloc.c addr = min((*va)->va_end & ~(align - 1), vmalloc_end); va 3193 mm/vmalloc.c if ((*va)->va_start < addr) va 3231 mm/vmalloc.c struct vmap_area **vas, *va; va 3285 mm/vmalloc.c va = pvm_find_va_enclose_addr(vmalloc_end); va 3286 mm/vmalloc.c base = pvm_determine_end_from_reverse(&va, align) - end; va 3299 mm/vmalloc.c if (va == NULL) va 3306 mm/vmalloc.c if (base + end > va->va_end) { va 3307 mm/vmalloc.c base = pvm_determine_end_from_reverse(&va, align) - end; va 3315 mm/vmalloc.c if (base + start < va->va_start) { va 3316 mm/vmalloc.c va = node_to_va(rb_prev(&va->rb_node)); va 3317 mm/vmalloc.c base = pvm_determine_end_from_reverse(&va, align) - end; va 3332 mm/vmalloc.c va = pvm_find_va_enclose_addr(base + end); va 3342 mm/vmalloc.c va = pvm_find_va_enclose_addr(start); va 3343 mm/vmalloc.c if (WARN_ON_ONCE(va == NULL)) va 3347 mm/vmalloc.c type = classify_va_fit_type(va, start, size); va 3352 mm/vmalloc.c ret = adjust_va_to_fit_type(va, start, size, type); va 3357 mm/vmalloc.c va = vas[area]; va 3358 mm/vmalloc.c va->va_start = start; va 3359 mm/vmalloc.c va->va_end = start + size; va 3361 mm/vmalloc.c insert_vmap_area(va, &vmap_area_root, &vmap_area_list); va 3477 mm/vmalloc.c struct vmap_area *va; va 3483 mm/vmalloc.c llist_for_each_entry(va, head, purge_list) { va 3485 mm/vmalloc.c (void *)va->va_start, (void *)va->va_end, va 3486 mm/vmalloc.c va->va_end - va->va_start); va 3492 mm/vmalloc.c struct vmap_area *va; va 3495 mm/vmalloc.c va = list_entry(p, struct vmap_area, list); va 3501 mm/vmalloc.c if (!va->vm) { va 3503 mm/vmalloc.c (void *)va->va_start, (void *)va->va_end, va 3504 mm/vmalloc.c va->va_end - va->va_start); va 3509 mm/vmalloc.c v = va->vm; va 3550 mm/vmalloc.c if (list_is_last(&va->list, &vmap_area_list)) va 44 net/9p/mod.c vaf.va = &args; va 1787 net/ax25/af_ax25.c ax25_info.va = ax25->va; va 1899 net/ax25/af_ax25.c ax25->vs, ax25->vr, ax25->va, va 57 net/ax25/ax25_ds_in.c ax25->va = 0; va 165 net/ax25/ax25_ds_in.c ax25->va = 0; va 204 net/ax25/ax25_ds_in.c if (ax25->va != nr) va 256 net/ax25/ax25_out.c start = (skb_peek(&ax25->ack_queue) == NULL) ? ax25->va : ax25->vs; va 257 net/ax25/ax25_out.c end = (ax25->va + ax25->window) % ax25->modulus; va 385 net/ax25/ax25_out.c if (ax25->va != nr) { va 65 net/ax25/ax25_std_in.c ax25->va = 0; va 162 net/ax25/ax25_std_in.c ax25->va = 0; va 287 net/ax25/ax25_std_in.c ax25->va = 0; va 314 net/ax25/ax25_std_in.c if (ax25->vs == ax25->va) { va 343 net/ax25/ax25_std_in.c if (ax25->vs == ax25->va) { va 53 net/ax25/ax25_subr.c if (ax25->va != nr) { va 54 net/ax25/ax25_subr.c while (skb_peek(&ax25->ack_queue) != NULL && ax25->va != nr) { va 57 net/ax25/ax25_subr.c ax25->va = (ax25->va + 1) % ax25->modulus; va 81 net/ax25/ax25_subr.c unsigned short vc = ax25->va; va 232 net/batman-adv/log.c vaf.va = &args; va 53 net/batman-adv/trace.h *vaf->va) >= BATADV_MAX_MSG_LEN); va 146 net/bluetooth/lib.c vaf.va = &args; va 162 net/bluetooth/lib.c vaf.va = &args; va 178 net/bluetooth/lib.c vaf.va = &args; va 194 net/bluetooth/lib.c vaf.va = &args; va 10006 net/core/dev.c vaf.va = &args; va 10023 net/core/dev.c vaf.va = &args; \ va 63 net/lapb/lapb_in.c lapb->va = 0; va 83 net/lapb/lapb_in.c lapb->va = 0; va 161 net/lapb/lapb_in.c lapb->va = 0; va 272 net/lapb/lapb_in.c lapb->va = 0; va 290 net/lapb/lapb_in.c lapb->va = 0; va 496 net/lapb/lapb_in.c lapb->va = 0; va 516 net/lapb/lapb_in.c lapb->va = 0; va 72 net/lapb/lapb_out.c start = !skb_peek(&lapb->ack_queue) ? lapb->va : lapb->vs; va 73 net/lapb/lapb_out.c end = (lapb->va + lapb->window) % modulus; va 194 net/lapb/lapb_out.c } else if (lapb->va != nr) { va 56 net/lapb/lapb_subr.c if (lapb->va != nr) va 57 net/lapb/lapb_subr.c while (skb_peek(&lapb->ack_queue) && lapb->va != nr) { va 60 net/lapb/lapb_subr.c lapb->va = (lapb->va + 1) % modulus; va 88 net/lapb/lapb_subr.c unsigned short vc = lapb->va; va 23 net/mac80211/trace.c vaf.va = &args; va 38 net/mac80211/trace.c vaf.va = &args; va 54 net/mac80211/trace.c vaf.va = &args; va 69 net/mac80211/trace.c vaf.va = &args; va 33 net/mac80211/trace_msg.h *vaf->va) >= MAX_MSG_LEN); va 381 net/netfilter/nf_conntrack_helper.c vaf.va = &args; va 65 net/netfilter/nf_conntrack_proto.c vaf.va = &args; va 88 net/netfilter/nf_conntrack_proto.c vaf.va = &args; va 989 net/netrom/af_netrom.c nr_make->va = 0; va 1283 net/netrom/af_netrom.c nr->va, va 83 net/netrom/nr_in.c nr->va = 0; va 108 net/netrom/nr_out.c skbn->data[2] = nr->va; va 137 net/netrom/nr_out.c start = (skb_peek(&nr->ack_queue) == NULL) ? nr->va : nr->vs; va 138 net/netrom/nr_out.c end = (nr->va + nr->window) % NR_MODULUS; va 265 net/netrom/nr_out.c if (nrom->va != nr) { va 54 net/netrom/nr_subr.c if (nrom->va != nr) { va 55 net/netrom/nr_subr.c while (skb_peek(&nrom->ack_queue) != NULL && nrom->va != nr) { va 58 net/netrom/nr_subr.c nrom->va = (nrom->va + 1) % NR_MODULUS; va 88 net/netrom/nr_subr.c unsigned short vc = nrom->va; va 1011 net/rose/af_rose.c make_rose->va = 0; va 1334 net/rose/af_rose.c rose->va = 0; va 1405 net/rose/af_rose.c rose->va, va 46 net/rose/rose_in.c rose->va = 0; va 114 net/rose/rose_in.c rose->va = 0; va 132 net/rose/rose_in.c rose->va = 0; va 154 net/rose/rose_in.c rose->va = 0; va 174 net/rose/rose_in.c rose->va = 0; va 224 net/rose/rose_in.c rose->va = 0; va 60 net/rose/rose_out.c start = (skb_peek(&rose->ack_queue) == NULL) ? rose->va : rose->vs; va 61 net/rose/rose_out.c end = (rose->va + sysctl_rose_window_size) % ROSE_MODULUS; va 51 net/rose/rose_subr.c if (rose->va != nr) { va 52 net/rose/rose_subr.c while (skb_peek(&rose->ack_queue) != NULL && rose->va != nr) { va 55 net/rose/rose_subr.c rose->va = (rose->va + 1) % ROSE_MODULUS; va 85 net/rose/rose_subr.c unsigned short vc = rose->va; va 1161 net/sunrpc/svc.c vaf.va = &args; va 99 net/x25/x25_in.c x25->va = 0; va 224 net/x25/x25_in.c x25->va = 0; va 246 net/x25/x25_in.c x25->va = 0; va 268 net/x25/x25_in.c x25->va = 0; va 286 net/x25/x25_in.c x25->va = 0; va 356 net/x25/x25_in.c x25->va = 0; va 164 net/x25/x25_out.c start = skb_peek(&x25->ack_queue) ? x25->vs : x25->va; va 165 net/x25/x25_out.c end = (x25->va + x25->facilities.winsize_out) % modulus; va 104 net/x25/x25_proc.c x25->va, x25_display_timer(s) / HZ, x25->t2 / HZ, va 60 net/x25/x25_subr.c if (x25->va != nr) va 61 net/x25/x25_subr.c while (skb_peek(&x25->ack_queue) && x25->va != nr) { va 64 net/x25/x25_subr.c x25->va = (x25->va + 1) % modulus; va 93 net/x25/x25_subr.c unsigned short vc = x25->va; va 420 samples/mic/mpssd/mpssd.c char *va; va 425 samples/mic/mpssd/mpssd.c va = mmap(NULL, MIC_DEVICE_PAGE_END + vr_size * num_vq, va 427 samples/mic/mpssd/mpssd.c if (MAP_FAILED == va) { va 433 samples/mic/mpssd/mpssd.c set_dp(mic, type, va); va 434 samples/mic/mpssd/mpssd.c vr0->va = (struct mic_vring *)&va[MIC_DEVICE_PAGE_END]; va 435 samples/mic/mpssd/mpssd.c vr0->info = vr0->va + va 438 samples/mic/mpssd/mpssd.c MIC_VRING_ENTRIES, vr0->va, MIC_VIRTIO_RING_ALIGN); va 440 samples/mic/mpssd/mpssd.c __func__, mic->name, vr0->va, vr0->info, vr_size, va 446 samples/mic/mpssd/mpssd.c vr1->va = (struct mic_vring *) va 447 samples/mic/mpssd/mpssd.c &va[MIC_DEVICE_PAGE_END + vr_size]; va 448 samples/mic/mpssd/mpssd.c vr1->info = vr1->va + _vring_size(MIC_VRING_ENTRIES, va 451 samples/mic/mpssd/mpssd.c MIC_VRING_ENTRIES, vr1->va, MIC_VIRTIO_RING_ALIGN); va 453 samples/mic/mpssd/mpssd.c __func__, mic->name, vr1->va, vr1->info, vr_size, va 460 samples/mic/mpssd/mpssd.c return va; va 1266 scripts/asn1_compiler.c va_list va; va 1270 scripts/asn1_compiler.c va_start(va, fmt); va 1271 scripts/asn1_compiler.c vfprintf(out, fmt, va); va 1272 scripts/asn1_compiler.c va_end(va); va 1280 scripts/asn1_compiler.c va_list va; va 1283 scripts/asn1_compiler.c va_start(va, fmt); va 1284 scripts/asn1_compiler.c vfprintf(out, fmt, va); va 1285 scripts/asn1_compiler.c va_end(va); va 374 scripts/dtc/srcpos.c const char *fmt, va_list va) va 381 scripts/dtc/srcpos.c vfprintf(stderr, fmt, va); va 390 scripts/dtc/srcpos.c va_list va; va 392 scripts/dtc/srcpos.c va_start(va, fmt); va 393 scripts/dtc/srcpos.c srcpos_verror(pos, prefix, fmt, va); va 394 scripts/dtc/srcpos.c va_end(va); va 97 scripts/dtc/srcpos.h const char *fmt, va_list va); va 44 security/apparmor/capability.c static void audit_cb(struct audit_buffer *ab, void *va) va 46 security/apparmor/capability.c struct common_audit_data *sa = va; va 56 security/apparmor/file.c static void file_audit_cb(struct audit_buffer *ab, void *va) va 58 security/apparmor/file.c struct common_audit_data *sa = va; va 91 security/apparmor/include/net.h void audit_net_cb(struct audit_buffer *ab, void *va); va 45 security/apparmor/ipc.c static void audit_ptrace_cb(struct audit_buffer *ab, void *va) va 47 security/apparmor/ipc.c struct common_audit_data *sa = va; va 162 security/apparmor/ipc.c static void audit_signal_cb(struct audit_buffer *ab, void *va) va 164 security/apparmor/ipc.c struct common_audit_data *sa = va; va 262 security/apparmor/lib.c static void aa_audit_perms_cb(struct audit_buffer *ab, void *va) va 264 security/apparmor/lib.c struct common_audit_data *sa = va; va 86 security/apparmor/mount.c static void audit_cb(struct audit_buffer *ab, void *va) va 88 security/apparmor/mount.c struct common_audit_data *sa = va; va 71 security/apparmor/net.c void audit_net_cb(struct audit_buffer *ab, void *va) va 73 security/apparmor/net.c struct common_audit_data *sa = va; va 596 security/apparmor/policy.c static void audit_cb(struct audit_buffer *ab, void *va) va 598 security/apparmor/policy.c struct common_audit_data *sa = va; va 77 security/apparmor/policy_unpack.c static void audit_cb(struct audit_buffer *ab, void *va) va 79 security/apparmor/policy_unpack.c struct common_audit_data *sa = va; va 30 security/apparmor/resource.c static void audit_cb(struct audit_buffer *ab, void *va) va 32 security/apparmor/resource.c struct common_audit_data *sa = va; va 69 sound/core/misc.c vaf.va = &args; va 48 sound/firewire/cmp.c va_list va; va 50 sound/firewire/cmp.c va_start(va, fmt); va 53 sound/firewire/cmp.c c->pcr_index, &(struct va_format){ fmt, &va }); va 54 sound/firewire/cmp.c va_end(va); va 783 tools/lib/subcmd/parse-options.c static int option__cmp(const void *va, const void *vb) va 785 tools/lib/subcmd/parse-options.c const struct option *a = va, *b = vb; va 1058 tools/perf/util/machine.c u64 va = X86_64_CPU_ENTRY_AREA_PER_CPU + va 1062 tools/perf/util/machine.c .start = va, va 1063 tools/perf/util/machine.c .end = va + page_size, va 52 tools/perf/util/syscalltbl.c static int syscallcmp(const void *va, const void *vb) va 54 tools/perf/util/syscalltbl.c const struct syscall *a = va, *b = vb; va 79 tools/testing/selftests/kvm/lib/aarch64/ucall.c va_list va; va 84 tools/testing/selftests/kvm/lib/aarch64/ucall.c va_start(va, nargs); va 86 tools/testing/selftests/kvm/lib/aarch64/ucall.c uc.args[i] = va_arg(va, uint64_t); va 87 tools/testing/selftests/kvm/lib/aarch64/ucall.c va_end(va); va 22 tools/testing/selftests/kvm/lib/s390x/ucall.c va_list va; va 27 tools/testing/selftests/kvm/lib/s390x/ucall.c va_start(va, nargs); va 29 tools/testing/selftests/kvm/lib/s390x/ucall.c uc.args[i] = va_arg(va, uint64_t); va 30 tools/testing/selftests/kvm/lib/s390x/ucall.c va_end(va); va 24 tools/testing/selftests/kvm/lib/x86_64/ucall.c va_list va; va 29 tools/testing/selftests/kvm/lib/x86_64/ucall.c va_start(va, nargs); va 31 tools/testing/selftests/kvm/lib/x86_64/ucall.c uc.args[i] = va_arg(va, uint64_t); va 32 tools/testing/selftests/kvm/lib/x86_64/ucall.c va_end(va); va 55 tools/testing/selftests/proc/proc-self-map-files-002.c unsigned long va; va 64 tools/testing/selftests/proc/proc-self-map-files-002.c for (va = 0; va < va_max; va += PAGE_SIZE) { va 65 tools/testing/selftests/proc/proc-self-map-files-002.c p = mmap((void *)va, PAGE_SIZE, PROT_NONE, MAP_PRIVATE|MAP_FILE|MAP_FIXED, fd, 0); va 66 tools/testing/selftests/proc/proc-self-map-files-002.c if (p == (void *)va) va 69 tools/testing/selftests/proc/proc-self-map-files-002.c if (va == va_max) {