mem               233 arch/alpha/include/asm/atomic.h 	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"((long)u)
mem               265 arch/alpha/include/asm/atomic.h 	: [mem] "m"(*v), [a] "rI"(a), [u] "rI"(u)
mem               294 arch/alpha/include/asm/atomic.h 	: [mem] "m"(*v)
mem                94 arch/alpha/include/uapi/asm/compiler.h #define __kernel_ldbu(mem)	(mem)
mem                95 arch/alpha/include/uapi/asm/compiler.h #define __kernel_ldwu(mem)	(mem)
mem                96 arch/alpha/include/uapi/asm/compiler.h #define __kernel_stb(val,mem)	((mem) = (val))
mem                97 arch/alpha/include/uapi/asm/compiler.h #define __kernel_stw(val,mem)	((mem) = (val))
mem                99 arch/alpha/include/uapi/asm/compiler.h #define __kernel_ldbu(mem)				\
mem               102 arch/alpha/include/uapi/asm/compiler.h 	      ldbu %0,%1" : "=r"(__kir) : "m"(mem));	\
mem               104 arch/alpha/include/uapi/asm/compiler.h #define __kernel_ldwu(mem)				\
mem               107 arch/alpha/include/uapi/asm/compiler.h 	      ldwu %0,%1" : "=r"(__kir) : "m"(mem));	\
mem               109 arch/alpha/include/uapi/asm/compiler.h #define __kernel_stb(val,mem)				\
mem               111 arch/alpha/include/uapi/asm/compiler.h 	   stb %1,%0" : "=m"(mem) : "r"(val))
mem               112 arch/alpha/include/uapi/asm/compiler.h #define __kernel_stw(val,mem)				\
mem               114 arch/alpha/include/uapi/asm/compiler.h 	   stw %1,%0" : "=m"(mem) : "r"(val))
mem              1023 arch/alpha/kernel/core_marvel.c marvel_agp_bind_memory(alpha_agp_info *agp, off_t pg_start, struct agp_memory *mem)
mem              1027 arch/alpha/kernel/core_marvel.c 			  mem->page_count, mem->pages);
mem              1031 arch/alpha/kernel/core_marvel.c marvel_agp_unbind_memory(alpha_agp_info *agp, off_t pg_start, struct agp_memory *mem)
mem              1035 arch/alpha/kernel/core_marvel.c 			    mem->page_count);
mem               292 arch/alpha/kernel/core_mcpcia.c 	struct resource *io, *mem, *hae_mem;
mem               299 arch/alpha/kernel/core_mcpcia.c 	mem = alloc_resource();
mem               316 arch/alpha/kernel/core_mcpcia.c 	mem->start = MCPCIA_DENSE(mid) - MCPCIA_MEM_BIAS;
mem               317 arch/alpha/kernel/core_mcpcia.c 	mem->end = mem->start + 0xffffffff;
mem               318 arch/alpha/kernel/core_mcpcia.c 	mem->name = pci_mem_names[h];
mem               319 arch/alpha/kernel/core_mcpcia.c 	mem->flags = IORESOURCE_MEM;
mem               321 arch/alpha/kernel/core_mcpcia.c 	hae_mem->start = mem->start;
mem               322 arch/alpha/kernel/core_mcpcia.c 	hae_mem->end = mem->start + MCPCIA_MEM_MASK;
mem               328 arch/alpha/kernel/core_mcpcia.c 	if (request_resource(&iomem_resource, mem) < 0)
mem               330 arch/alpha/kernel/core_mcpcia.c 	if (request_resource(mem, hae_mem) < 0)
mem               684 arch/alpha/kernel/core_titan.c titan_agp_bind_memory(alpha_agp_info *agp, off_t pg_start, struct agp_memory *mem)
mem               688 arch/alpha/kernel/core_titan.c 			  mem->page_count, mem->pages);
mem               692 arch/alpha/kernel/core_titan.c titan_agp_unbind_memory(alpha_agp_info *agp, off_t pg_start, struct agp_memory *mem)
mem               696 arch/alpha/kernel/core_titan.c 			    mem->page_count);
mem                52 arch/alpha/kernel/pci_iommu.c 	unsigned long mem = max_low_pfn << PAGE_SHIFT;
mem                53 arch/alpha/kernel/pci_iommu.c 	if (mem < max)
mem                54 arch/alpha/kernel/pci_iommu.c 		max = roundup_pow_of_two(mem);
mem               132 arch/arc/include/asm/entry-compact.h .macro PROLOG_FREEUP_REG	reg, mem
mem               140 arch/arc/include/asm/entry-compact.h .macro PROLOG_RESTORE_REG	reg, mem
mem               172 arch/arm/boot/compressed/atags_to_fdt.c 			if (!atag->u.mem.size)
mem               183 arch/arm/boot/compressed/atags_to_fdt.c 					cpu_to_fdt64(atag->u.mem.start);
mem               185 arch/arm/boot/compressed/atags_to_fdt.c 					cpu_to_fdt64(atag->u.mem.size);
mem               188 arch/arm/boot/compressed/atags_to_fdt.c 					cpu_to_fdt32(atag->u.mem.start);
mem               190 arch/arm/boot/compressed/atags_to_fdt.c 					cpu_to_fdt32(atag->u.mem.size);
mem               368 arch/arm/common/locomo.c __locomo_probe(struct device *me, struct resource *mem, int irq)
mem               384 arch/arm/common/locomo.c 	lchip->phys = mem->start;
mem               392 arch/arm/common/locomo.c 	lchip->base = ioremap(mem->start, PAGE_SIZE);
mem               485 arch/arm/common/locomo.c 	struct resource *mem;
mem               488 arch/arm/common/locomo.c 	mem = platform_get_resource(dev, IORESOURCE_MEM, 0);
mem               489 arch/arm/common/locomo.c 	if (!mem)
mem               495 arch/arm/common/locomo.c 	return __locomo_probe(&dev->dev, mem, irq);
mem               803 arch/arm/common/sa1111.c static int __sa1111_probe(struct device *me, struct resource *mem, int irq)
mem               832 arch/arm/common/sa1111.c 	sachip->phys = mem->start;
mem               839 arch/arm/common/sa1111.c 	sachip->base = ioremap(mem->start, PAGE_SIZE * 2);
mem               914 arch/arm/common/sa1111.c 			sa1111_init_one_child(sachip, mem, &sa1111_devices[i]);
mem              1115 arch/arm/common/sa1111.c 	struct resource *mem;
mem              1118 arch/arm/common/sa1111.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1119 arch/arm/common/sa1111.c 	if (!mem)
mem              1125 arch/arm/common/sa1111.c 	return __sa1111_probe(&pdev->dev, mem, irq);
mem               179 arch/arm/common/scoop.c 	struct resource *mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               182 arch/arm/common/scoop.c 	if (!mem)
mem               192 arch/arm/common/scoop.c 	devptr->base = ioremap(mem->start, resource_size(mem));
mem               201 arch/arm/common/scoop.c 	printk("Sharp Scoop Device found at 0x%08x -> 0x%8p\n",(unsigned int)mem->start, devptr->base);
mem               151 arch/arm/include/uapi/asm/setup.h 		struct tag_mem32	mem;
mem                86 arch/arm/kernel/atags_compat.c 	tag->u.mem.size = size;
mem                87 arch/arm/kernel/atags_compat.c 	tag->u.mem.start = start;
mem                43 arch/arm/kernel/atags_parse.c 	struct tag_mem32  mem;
mem                67 arch/arm/kernel/atags_parse.c 	return arm_add_memory(tag->u.mem.start, tag->u.mem.size);
mem               185 arch/arm/kernel/atags_parse.c 	default_tags.mem.start = PHYS_OFFSET;
mem                51 arch/arm/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem                57 arch/arm/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem                60 arch/arm/kernel/kgdb.c 		memset(mem, 0, dbg_reg_def[regno].size);
mem                64 arch/arm/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem                70 arch/arm/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem                64 arch/arm/kernel/machine_kexec.c 		if (!memblock_is_region_memory(idmap_to_phys(current_segment->mem),
mem                73 arch/arm/kernel/machine_kexec.c 			image->arch.kernel_r2 = current_segment->mem;
mem               330 arch/arm/mach-omap2/cm_common.c 	struct omap_domain_base *mem = NULL;
mem               340 arch/arm/mach-omap2/cm_common.c 			mem = &cm_base;
mem               343 arch/arm/mach-omap2/cm_common.c 			mem = &cm2_base;
mem               345 arch/arm/mach-omap2/cm_common.c 		data->mem = ioremap(res.start, resource_size(&res));
mem               347 arch/arm/mach-omap2/cm_common.c 		if (mem) {
mem               348 arch/arm/mach-omap2/cm_common.c 			mem->pa = res.start + data->offset;
mem               349 arch/arm/mach-omap2/cm_common.c 			mem->va = data->mem + data->offset;
mem               350 arch/arm/mach-omap2/cm_common.c 			mem->offset = data->offset;
mem               382 arch/arm/mach-omap2/cm_common.c 		ret = omap2_clk_provider_init(np, data->index, NULL, data->mem);
mem               728 arch/arm/mach-omap2/control.c 	void __iomem *mem;
mem               771 arch/arm/mach-omap2/control.c 	void __iomem *mem;
mem               776 arch/arm/mach-omap2/control.c 		mem = of_iomap(np, 0);
mem               777 arch/arm/mach-omap2/control.c 		if (!mem)
mem               781 arch/arm/mach-omap2/control.c 			omap2_ctrl_base = mem;
mem               785 arch/arm/mach-omap2/control.c 		data->mem = mem;
mem               831 arch/arm/mach-omap2/control.c 						      data->mem);
mem               235 arch/arm/mach-omap2/dma.c 	struct resource				*mem;
mem               258 arch/arm/mach-omap2/dma.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               259 arch/arm/mach-omap2/dma.c 	if (!mem) {
mem               264 arch/arm/mach-omap2/dma.c 	dma_base = ioremap(mem->start, resource_size(mem));
mem               544 arch/arm/mach-omap2/prcm-common.h 	void __iomem *mem;
mem               758 arch/arm/mach-omap2/prm_common.c 		data->mem = ioremap(res.start, resource_size(&res));
mem               761 arch/arm/mach-omap2/prm_common.c 			prm_base.va = data->mem + data->offset;
mem               801 arch/arm/mach-omap2/prm_common.c 		ret = omap2_clk_provider_init(np, data->index, NULL, data->mem);
mem               384 arch/arm/mach-orion5x/common.c 		    (!t->u.mem.size || t->u.mem.size & ~PAGE_MASK ||
mem               385 arch/arm/mach-orion5x/common.c 		     t->u.mem.start & ~PAGE_MASK)) {
mem               388 arch/arm/mach-orion5x/common.c 			       t->u.mem.size / 1024, t->u.mem.start);
mem               866 arch/arm/mach-pxa/cm-x300.c 			tags->u.mem.start == 0x80000000) {
mem               867 arch/arm/mach-pxa/cm-x300.c 			tags->u.mem.start = 0xa0000000;
mem               267 arch/arm/mach-pxa/corgi.c 	.mem        = &corgi_fb_mem,
mem               866 arch/arm/mach-pxa/eseries.c 	.mem        = &e800_w100_mem_info,
mem               509 arch/arm/mach-pxa/hx4700.c 	.mem       = &w3220_mem_info,
mem               130 arch/arm/mach-rpc/include/mach/uncompress.h 				nr_pages += (t->u.mem.size / PAGE_SIZE);
mem               418 arch/arm/mm/init.c 	struct memblock_region *mem, *res;
mem               421 arch/arm/mm/init.c 	for_each_memblock(memory, mem) {
mem               422 arch/arm/mm/init.c 		unsigned long start = memblock_region_memory_base_pfn(mem);
mem               423 arch/arm/mm/init.c 		unsigned long end = memblock_region_memory_end_pfn(mem);
mem               429 arch/arm/mm/init.c 		if (memblock_is_nomap(mem))
mem                25 arch/arm/mm/pmsa-v7.c static struct region __initdata mem[MPU_MAX_REGIONS];
mem               292 arch/arm/mm/pmsa-v7.c 	memset(mem, 0, sizeof(mem));
mem               293 arch/arm/mm/pmsa-v7.c 	num = allocate_region(mem_start, specified_mem_size, mem_max_regions, mem);
mem               296 arch/arm/mm/pmsa-v7.c 		unsigned long  subreg = mem[i].size / PMSAv7_NR_SUBREGS;
mem               298 arch/arm/mm/pmsa-v7.c 		total_mem_size += mem[i].size - subreg * hweight_long(mem[i].subreg);
mem               301 arch/arm/mm/pmsa-v7.c 			 &mem[i].base, &mem[i].size, PMSAv7_NR_SUBREGS, &mem[i].subreg);
mem               452 arch/arm/mm/pmsa-v7.c 	for (i = 0; i < ARRAY_SIZE(mem); i++) {
mem               453 arch/arm/mm/pmsa-v7.c 		if (!mem[i].size)
mem               456 arch/arm/mm/pmsa-v7.c 		err |= mpu_setup_region(region++, mem[i].base, ilog2(mem[i].size),
mem               458 arch/arm/mm/pmsa-v7.c 					mem[i].subreg, false);
mem                79 arch/arm/mm/pmsa-v8.c static struct range __initdata mem[MPU_MAX_REGIONS];
mem               247 arch/arm/mm/pmsa-v8.c 	add_range(mem,  ARRAY_SIZE(mem), 0,  memblock.memory.regions[0].base,
mem               254 arch/arm/mm/pmsa-v8.c 	subtract_range(mem, ARRAY_SIZE(mem), __pa(KERNEL_START), __pa(KERNEL_END));
mem               259 arch/arm/mm/pmsa-v8.c 	subtract_range(mem, ARRAY_SIZE(mem), CONFIG_XIP_PHYS_ADDR, __pa(_exiprom));
mem               265 arch/arm/mm/pmsa-v8.c 	subtract_range(mem, ARRAY_SIZE(mem),  vectors_base, vectors_base + 2 * PAGE_SIZE);
mem               269 arch/arm/mm/pmsa-v8.c 	for (i = 0; i < ARRAY_SIZE(mem); i++)
mem               270 arch/arm/mm/pmsa-v8.c 		subtract_range(io, ARRAY_SIZE(io), mem[i].start, mem[i].end);
mem               291 arch/arm/mm/pmsa-v8.c 	for (i = 0; i < ARRAY_SIZE(mem); i++) {
mem               292 arch/arm/mm/pmsa-v8.c 		if (!mem[i].end)
mem               295 arch/arm/mm/pmsa-v8.c 		err |= pmsav8_setup_ram(region++, mem[i].start, mem[i].end);
mem              1279 arch/arm/probes/kprobes/test-core.c static void print_memory(u32 *mem, size_t size)
mem              1283 arch/arm/probes/kprobes/test-core.c 		pr_err("%08x %08x %08x %08x\n", mem[i], mem[i+1],
mem              1284 arch/arm/probes/kprobes/test-core.c 						mem[i+2], mem[i+3]);
mem              1434 arch/arm/probes/kprobes/test-core.c 	u32 *mem = 0;
mem              1442 arch/arm/probes/kprobes/test-core.c 		mem = (u32 *)result_regs.ARM_sp;
mem              1443 arch/arm/probes/kprobes/test-core.c 		mem_size = expected_memory_size(mem);
mem              1444 arch/arm/probes/kprobes/test-core.c 		if (memcmp(expected_memory, mem, mem_size)) {
mem              1460 arch/arm/probes/kprobes/test-core.c 	if (mem) {
mem              1464 arch/arm/probes/kprobes/test-core.c 		print_memory(mem, mem_size);
mem              1499 arch/arm/probes/kprobes/test-core.c 		u32 *mem = (u32 *)result_regs.ARM_sp;
mem              1501 arch/arm/probes/kprobes/test-core.c 		memcpy(expected_memory, mem, expected_memory_size(mem));
mem                87 arch/arm64/kernel/kexec_image.c 	kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               100 arch/arm64/kernel/kexec_image.c 	kernel_segment->mem += text_offset;
mem               102 arch/arm64/kernel/kexec_image.c 	image->start = kernel_segment->mem;
mem               105 arch/arm64/kernel/kexec_image.c 				kernel_segment->mem, kbuf.bufsz,
mem               110 arch/arm64/kernel/kexec_image.c 				kernel_segment->mem, kernel_segment->memsz,
mem               103 arch/arm64/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem               109 arch/arm64/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem               112 arch/arm64/kernel/kgdb.c 		memset(mem, 0, dbg_reg_def[regno].size);
mem               116 arch/arm64/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem               122 arch/arm64/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem                49 arch/arm64/kernel/machine_kexec.c 			kimage->segment[i].mem,
mem                50 arch/arm64/kernel/machine_kexec.c 			kimage->segment[i].mem + kimage->segment[i].memsz,
mem               129 arch/arm64/kernel/machine_kexec.c 			kimage->segment[i].mem,
mem               130 arch/arm64/kernel/machine_kexec.c 			kimage->segment[i].mem + kimage->segment[i].memsz,
mem               134 arch/arm64/kernel/machine_kexec.c 		__flush_dcache_area(phys_to_virt(kimage->segment[i].mem),
mem               284 arch/arm64/kernel/machine_kexec.c 			__phys_to_virt(kexec_crash_image->segment[i].mem),
mem               294 arch/arm64/kernel/machine_kexec.c 			__phys_to_virt(kexec_crash_image->segment[i].mem),
mem               347 arch/arm64/kernel/machine_kexec.c 		if (addr >= kexec_crash_image->segment[i].mem &&
mem               348 arch/arm64/kernel/machine_kexec.c 				addr < (kexec_crash_image->segment[i].mem +
mem               196 arch/arm64/kernel/machine_kexec_file.c 		kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               207 arch/arm64/kernel/machine_kexec_file.c 		initrd_load_addr = kbuf.mem;
mem               223 arch/arm64/kernel/machine_kexec_file.c 	kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               234 arch/arm64/kernel/machine_kexec_file.c 	image->arch.dtb_mem = kbuf.mem;
mem               237 arch/arm64/kernel/machine_kexec_file.c 			kbuf.mem, dtb_len, dtb_len);
mem               254 arch/arm64/kernel/setup.c 		struct resource *mem = &standard_resources[i];
mem               255 arch/arm64/kernel/setup.c 		phys_addr_t r_start, r_end, mem_size = resource_size(mem);
mem               257 arch/arm64/kernel/setup.c 		if (!memblock_is_region_reserved(mem->start, mem_size))
mem               263 arch/arm64/kernel/setup.c 			start = max(PFN_PHYS(PFN_DOWN(r_start)), mem->start);
mem               264 arch/arm64/kernel/setup.c 			end = min(PFN_PHYS(PFN_UP(r_end)) - 1, mem->end);
mem               266 arch/arm64/kernel/setup.c 			if (start > mem->end || end < mem->start)
mem               269 arch/arm64/kernel/setup.c 			reserve_region_with_split(mem, start, end, "reserved");
mem                35 arch/h8300/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem                45 arch/h8300/kernel/kgdb.c 		*(u32 *)mem = *(u16 *)((void *)regs +
mem                50 arch/h8300/kernel/kgdb.c 			memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem                53 arch/h8300/kernel/kgdb.c 			memset(mem, 0, dbg_reg_def[regno].size);
mem                59 arch/h8300/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem                70 arch/h8300/kernel/kgdb.c 			 dbg_reg_def[regno].offset) = *(u32 *)mem;
mem                73 arch/h8300/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem                78 arch/hexagon/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem                83 arch/hexagon/kernel/kgdb.c 	*((unsigned long *) mem) = *((unsigned long *) ((void *)regs +
mem                89 arch/hexagon/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem                95 arch/hexagon/kernel/kgdb.c 		*((unsigned long *) mem);
mem               100 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r8,16;								\
mem               101 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r9,16;								\
mem               103 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r10,24;							\
mem               104 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r11,24;							\
mem               130 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r20,16;	/* save original r1 */				\
mem               131 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r12,16;							\
mem               134 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r13,16;							\
mem               135 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r21,16;	/* save ar.fpsr */				\
mem               138 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r15,16;							\
mem               139 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r14,16;							\
mem               141 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r16]=r2,16;								\
mem               142 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r17]=r3,16;								\
mem               168 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r16,16;		\
mem               169 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r17,16;		\
mem               171 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r18,16;		\
mem               172 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r19,16;		\
mem               174 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r20,16;		\
mem               175 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r21,16;		\
mem               178 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r22,16;		\
mem               179 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r23,16;		\
mem               182 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r24,16;		\
mem               183 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r25,16;		\
mem               185 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r26,16;		\
mem               186 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r27,16;		\
mem               188 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r28,16;		\
mem               189 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r29,16;		\
mem               191 arch/ia64/kernel/minstate.h .mem.offset 0,0; st8.spill [r2]=r30,16;		\
mem               192 arch/ia64/kernel/minstate.h .mem.offset 8,0; st8.spill [r3]=r31,32;		\
mem               503 arch/m68k/kernel/setup_mm.c 	unsigned long mem;
mem               512 arch/m68k/kernel/setup_mm.c 	for (mem = 0, i = 0; i < m68k_num_memory; i++)
mem               513 arch/m68k/kernel/setup_mm.c 		mem += m68k_memory[i].size;
mem               514 arch/m68k/kernel/setup_mm.c 	seq_printf(m, "System Memory:\t%ldK\n", mem >> 10);
mem               462 arch/m68k/kernel/sys_m68k.c 		      unsigned long __user * mem)
mem               474 arch/m68k/kernel/sys_m68k.c 		pgd = pgd_offset(mm, (unsigned long)mem);
mem               477 arch/m68k/kernel/sys_m68k.c 		pmd = pmd_offset(pgd, (unsigned long)mem);
mem               480 arch/m68k/kernel/sys_m68k.c 		pte = pte_offset_map_lock(mm, pmd, (unsigned long)mem, &ptl);
mem               491 arch/m68k/kernel/sys_m68k.c 		__get_user(mem_value, mem);
mem               493 arch/m68k/kernel/sys_m68k.c 			__put_user(newval, mem);
mem               511 arch/m68k/kernel/sys_m68k.c 			if (do_page_fault(fp, (unsigned long)mem, 3))
mem               535 arch/m68k/kernel/sys_m68k.c 		      unsigned long __user * mem)
mem               542 arch/m68k/kernel/sys_m68k.c 	mem_value = *mem;
mem               544 arch/m68k/kernel/sys_m68k.c 		*mem = newval;
mem                57 arch/mips/bcm47xx/prom.c 	unsigned long mem;
mem                81 arch/mips/bcm47xx/prom.c 	for (mem = 1 << 20; mem < max; mem += 1 << 20) {
mem                83 arch/mips/bcm47xx/prom.c 		if (off + mem >= max) {
mem                84 arch/mips/bcm47xx/prom.c 			mem = max;
mem                88 arch/mips/bcm47xx/prom.c 		if (!memcmp(prom_init, prom_init + mem, 32))
mem                91 arch/mips/bcm47xx/prom.c 	lowmem = mem;
mem                98 arch/mips/bcm47xx/prom.c 	if (c->cputype == CPU_74K && (mem == (128  << 20)))
mem                99 arch/mips/bcm47xx/prom.c 		mem -= 0x1000;
mem               100 arch/mips/bcm47xx/prom.c 	add_memory_region(0, mem, BOOT_MEM_RAM);
mem               456 arch/mips/boot/tools/relocs.c 		void *mem = realloc(r->offset, newsize * sizeof(r->offset[0]));
mem               458 arch/mips/boot/tools/relocs.c 		if (!mem)
mem               461 arch/mips/boot/tools/relocs.c 		r->offset = mem;
mem               193 arch/mips/cavium-octeon/dma-octeon.c 	struct memblock_region *mem;
mem               202 arch/mips/cavium-octeon/dma-octeon.c 	for_each_memblock(memory, mem) {
mem               204 arch/mips/cavium-octeon/dma-octeon.c 		if (mem->base > 0x410000000ull && !OCTEON_IS_OCTEON2())
mem               207 arch/mips/cavium-octeon/dma-octeon.c 		addr_size += mem->size;
mem               209 arch/mips/cavium-octeon/dma-octeon.c 		if (max_addr < mem->base + mem->size)
mem               210 arch/mips/cavium-octeon/dma-octeon.c 			max_addr = mem->base + mem->size;
mem               134 arch/mips/cavium-octeon/executive/cvmx-boot-vector.c static void cvmx_boot_vector_init(void *mem)
mem               139 arch/mips/cavium-octeon/executive/cvmx-boot-vector.c 	memset(mem, 0, VECTOR_TABLE_SIZE);
mem               140 arch/mips/cavium-octeon/executive/cvmx-boot-vector.c 	kseg0_mem = cvmx_ptr_to_phys(mem) | 0x8000000000000000ull;
mem               176 arch/mips/cavium-octeon/setup.c 						image->segment[i].mem + offt;
mem               231 arch/mips/cavium-octeon/setup.c 				kimage_ptr->segment[i].mem - PAGE_SIZE,
mem               929 arch/mips/cavium-octeon/setup.c static __init void memory_exclude_page(u64 addr, u64 *mem, u64 *size)
mem               931 arch/mips/cavium-octeon/setup.c 	if (addr > *mem && addr < *mem + *size) {
mem               932 arch/mips/cavium-octeon/setup.c 		u64 inc = addr - *mem;
mem               933 arch/mips/cavium-octeon/setup.c 		add_memory_region(*mem, inc, BOOT_MEM_RAM);
mem               934 arch/mips/cavium-octeon/setup.c 		*mem += inc;
mem               938 arch/mips/cavium-octeon/setup.c 	if (addr == *mem && *size > PAGE_SIZE) {
mem               939 arch/mips/cavium-octeon/setup.c 		*mem += PAGE_SIZE;
mem               318 arch/mips/include/asm/io.h 				    volatile void __iomem *mem)		\
mem               328 arch/mips/include/asm/io.h 	__mem = (void *)__swizzle_addr_##bwlq((unsigned long)(mem));	\
mem               357 arch/mips/include/asm/io.h static inline type pfx##read##bwlq(const volatile void __iomem *mem)	\
mem               362 arch/mips/include/asm/io.h 	__mem = (void *)__swizzle_addr_##bwlq((unsigned long)(mem));	\
mem               522 arch/mips/include/asm/io.h static inline void writes##bwlq(volatile void __iomem *mem,		\
mem               528 arch/mips/include/asm/io.h 		__mem_write##bwlq(*__addr, mem);			\
mem               533 arch/mips/include/asm/io.h static inline void reads##bwlq(volatile void __iomem *mem, void *addr,	\
mem               539 arch/mips/include/asm/io.h 		*__addr = __mem_read##bwlq(mem);			\
mem                29 arch/mips/include/asm/mach-cavium-octeon/kernel-entry-init.h 	# Read the cavium mem control register
mem                34 arch/mips/include/asm/mach-cavium-octeon/kernel-entry-init.h 	dmtc0	v0, CP0_CVMMEMCTL_REG	# Write the cavium mem control register
mem               107 arch/mips/include/asm/mach-jazz/floppy.h 	unsigned long mem;
mem               109 arch/mips/include/asm/mach-jazz/floppy.h 	mem = __get_dma_pages(GFP_KERNEL, get_order(size));
mem               110 arch/mips/include/asm/mach-jazz/floppy.h 	if(!mem)
mem               112 arch/mips/include/asm/mach-jazz/floppy.h 	vdma_alloc(CPHYSADDR(mem), size);	/* XXX error checking */
mem               114 arch/mips/include/asm/mach-jazz/floppy.h 	return mem;
mem                43 arch/mips/include/asm/octeon/cvmx-rnm-defs.h 		uint64_t mem:1;
mem                45 arch/mips/include/asm/octeon/cvmx-rnm-defs.h 		uint64_t mem:1;
mem               131 arch/mips/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem               139 arch/mips/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem               147 arch/mips/kernel/kgdb.c 			memcpy((void *)&current->thread.fpu.fcr31, mem,
mem               155 arch/mips/kernel/kgdb.c 		memcpy((void *)&current->thread.fpu.fpr[fp_reg], mem,
mem               164 arch/mips/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem               173 arch/mips/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem               182 arch/mips/kernel/kgdb.c 			memcpy(mem, (void *)&current->thread.fpu.fcr31,
mem               187 arch/mips/kernel/kgdb.c 			memset(mem, 0, dbg_reg_def[regno].size);
mem               191 arch/mips/kernel/kgdb.c 		memcpy(mem, (void *)&current->thread.fpu.fpr[fp_reg],
mem                46 arch/mips/kernel/machine_kexec.c 			kimage->segment[i].mem,
mem                47 arch/mips/kernel/machine_kexec.c 			kimage->segment[i].mem + kimage->segment[i].memsz,
mem                78 arch/mips/kernel/machine_kexec.c 			phys_to_virt((unsigned long)kimage->segment[i].mem);
mem               300 arch/mips/kernel/setup.c 	struct memblock_region *mem;
mem               338 arch/mips/kernel/setup.c 	for_each_memblock(memory, mem) {
mem               339 arch/mips/kernel/setup.c 		unsigned long start = memblock_region_memory_base_pfn(mem);
mem               340 arch/mips/kernel/setup.c 		unsigned long end = memblock_region_memory_end_pfn(mem);
mem               348 arch/mips/kernel/setup.c 		if (memblock_is_nomap(mem))
mem               458 arch/mips/kernel/setup.c 	struct memblock_region *mem;
mem               462 arch/mips/kernel/setup.c 	 for_each_memblock(memory, mem) {
mem               463 arch/mips/kernel/setup.c 		unsigned long start = mem->base;
mem               464 arch/mips/kernel/setup.c 		unsigned long end = start + mem->size;
mem               225 arch/mips/kvm/mips.c 				   const struct kvm_userspace_memory_region *mem,
mem               232 arch/mips/kvm/mips.c 				   const struct kvm_userspace_memory_region *mem,
mem               240 arch/mips/kvm/mips.c 		  __func__, kvm, mem->slot, mem->guest_phys_addr,
mem               241 arch/mips/kvm/mips.c 		  mem->memory_size, mem->userspace_addr);
mem                71 arch/mips/netlogic/xlp/setup.c 	struct memblock_region *mem;
mem                73 arch/mips/netlogic/xlp/setup.c 	for_each_memblock(memory, mem) {
mem                74 arch/mips/netlogic/xlp/setup.c 		memblock_remove(mem->base + mem->size - pref_backup,
mem                84 arch/mips/pci/pci-lantiq.c 	u32 mem, bar11mask;
mem                87 arch/mips/pci/pci-lantiq.c 	mem = get_num_physpages() * PAGE_SIZE;
mem                88 arch/mips/pci/pci-lantiq.c 	bar11mask = (0x0ffffff0 & ~((1 << (fls(mem) - 1)) - 1)) | 8;
mem               468 arch/mips/pci/pci-xtalk-bridge.c 	pci_add_resource_offset(&host->windows, &bd->mem, bd->mem_offset);
mem                92 arch/mips/pci/pcie-octeon.c 	} mem;
mem               143 arch/mips/pci/pcie-octeon.c 	pcie_addr.mem.upper = 0;
mem               144 arch/mips/pci/pcie-octeon.c 	pcie_addr.mem.io = 1;
mem               145 arch/mips/pci/pcie-octeon.c 	pcie_addr.mem.did = 3;
mem               146 arch/mips/pci/pcie-octeon.c 	pcie_addr.mem.subdid = 3 + pcie_port;
mem                54 arch/mips/sgi-ip22/ip22-mc.c 	struct mem bank[4];
mem                55 arch/mips/sgi-ip22/ip22-mc.c 	struct mem space[2] = {{SGIMC_SEG0_BADDR, 0}, {SGIMC_SEG1_BADDR, 0}};
mem                48 arch/mips/sgi-ip27/ip27-xtalk.c 	bd->mem.name	= "Bridge PCI MEM";
mem                49 arch/mips/sgi-ip27/ip27-xtalk.c 	bd->mem.start	= offset + (widget << SWIN_SIZE_BITS);
mem                50 arch/mips/sgi-ip27/ip27-xtalk.c 	bd->mem.end	= bd->mem.start + SWIN_SIZE - 1;
mem                51 arch/mips/sgi-ip27/ip27-xtalk.c 	bd->mem.flags	= IORESOURCE_MEM;
mem                71 arch/nios2/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem                77 arch/nios2/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem                80 arch/nios2/kernel/kgdb.c 		memset(mem, 0, dbg_reg_def[regno].size);
mem                85 arch/nios2/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem                91 arch/nios2/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem                25 arch/parisc/kernel/kexec.c 			kimage->segment[n].mem,
mem                26 arch/parisc/kernel/kexec.c 			kimage->segment[n].mem + kimage->segment[n].memsz,
mem                39 arch/parisc/kernel/kexec_file.c 		image->segment[i].mem = __pa(image->segment[i].mem);
mem                49 arch/parisc/kernel/kexec_file.c 		kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem                54 arch/parisc/kernel/kexec_file.c 		pr_debug("Loaded initrd at 0x%lx\n", kbuf.mem);
mem                55 arch/parisc/kernel/kexec_file.c 		image->arch.initrd_start = kbuf.mem;
mem                56 arch/parisc/kernel/kexec_file.c 		image->arch.initrd_end = kbuf.mem + initrd_len;
mem                66 arch/parisc/kernel/kexec_file.c 		kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem                71 arch/parisc/kernel/kexec_file.c 		pr_debug("Loaded cmdline at 0x%lx\n", kbuf.mem);
mem                72 arch/parisc/kernel/kexec_file.c 		image->arch.cmdline = kbuf.mem;
mem               122 arch/powerpc/boot/cuboot-pq2.c 	struct pci_range *mem = NULL, *mmio = NULL,
mem               162 arch/powerpc/boot/cuboot-pq2.c 			mem = &pci_ranges_buf[i];
mem               169 arch/powerpc/boot/cuboot-pq2.c 	if (!mem || !mmio || !io)
mem               171 arch/powerpc/boot/cuboot-pq2.c 	if (mem->size[1] != mmio->size[1])
mem               173 arch/powerpc/boot/cuboot-pq2.c 	if (mem->size[1] & (mem->size[1] - 1))
mem               178 arch/powerpc/boot/cuboot-pq2.c 	if (mem->phys_addr + mem->size[1] == mmio->phys_addr)
mem               179 arch/powerpc/boot/cuboot-pq2.c 		mem_base = mem;
mem               180 arch/powerpc/boot/cuboot-pq2.c 	else if (mmio->phys_addr + mmio->size[1] == mem->phys_addr)
mem               186 arch/powerpc/boot/cuboot-pq2.c 	out_be32(&pci_regs[2][0], ~(mem->size[1] + mmio->size[1] - 1));
mem               191 arch/powerpc/boot/cuboot-pq2.c 	out_le32(&pci_regs[0][0], mem->pci_addr[1] >> 12);
mem               192 arch/powerpc/boot/cuboot-pq2.c 	out_le32(&pci_regs[0][2], mem->phys_addr >> 12);
mem               193 arch/powerpc/boot/cuboot-pq2.c 	out_le32(&pci_regs[0][4], (~(mem->size[1] - 1) >> 12) | 0xa0000000);
mem               101 arch/powerpc/boot/wii.c 	void *mem;
mem               107 arch/powerpc/boot/wii.c 	mem = finddevice("/memory");
mem               108 arch/powerpc/boot/wii.c 	if (!mem)
mem               112 arch/powerpc/boot/wii.c 	len = getprop(mem, "reg", reg, sizeof(reg));
mem               128 arch/powerpc/boot/wii.c 		setprop(mem, "reg", reg, sizeof(reg));
mem                 5 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readb, u8, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                 6 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readw, u16, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                 7 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readl, u32, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                 8 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readw_be, u16, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                 9 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readl_be, u32, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                10 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writeb, (u8 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                11 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writew, (u16 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                12 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writel, (u32 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                13 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writew_be, (u16 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                14 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writel_be, (u32 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                17 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readq, u64, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                18 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_RET(readq_be, u64, (const PCI_IO_ADDR addr), (addr), mem, addr)
mem                19 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writeq, (u64 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                20 arch/powerpc/include/asm/io-defs.h DEF_PCI_AC_NORET(writeq_be, (u64 val, PCI_IO_ADDR addr), (val, addr), mem, addr)
mem                31 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                33 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                35 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                37 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                39 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                41 arch/powerpc/include/asm/io-defs.h 		 (a, b, c), mem, a)
mem                57 arch/powerpc/include/asm/io-defs.h 		 (a, c, n), mem, a)
mem                59 arch/powerpc/include/asm/io-defs.h 		 (d, s, n), mem, s)
mem                61 arch/powerpc/include/asm/io-defs.h 		 (d, s, n), mem, d)
mem               169 arch/powerpc/include/asm/kvm_ppc.h 				struct kvm_userspace_memory_region *mem);
mem               211 arch/powerpc/include/asm/kvm_ppc.h 				const struct kvm_userspace_memory_region *mem);
mem               213 arch/powerpc/include/asm/kvm_ppc.h 				const struct kvm_userspace_memory_region *mem,
mem               283 arch/powerpc/include/asm/kvm_ppc.h 				     const struct kvm_userspace_memory_region *mem);
mem               285 arch/powerpc/include/asm/kvm_ppc.h 				     const struct kvm_userspace_memory_region *mem,
mem                31 arch/powerpc/include/asm/mmu_context.h 		struct mm_iommu_table_group_mem_t *mem);
mem                40 arch/powerpc/include/asm/mmu_context.h extern long mm_iommu_ua_to_hpa(struct mm_iommu_table_group_mem_t *mem,
mem                42 arch/powerpc/include/asm/mmu_context.h extern long mm_iommu_ua_to_hpa_rm(struct mm_iommu_table_group_mem_t *mem,
mem                47 arch/powerpc/include/asm/mmu_context.h extern long mm_iommu_mapped_inc(struct mm_iommu_table_group_mem_t *mem);
mem                48 arch/powerpc/include/asm/mmu_context.h extern void mm_iommu_mapped_dec(struct mm_iommu_table_group_mem_t *mem);
mem               164 arch/powerpc/include/asm/sstep.h 			     const void *mem, bool cross_endian);
mem               166 arch/powerpc/include/asm/sstep.h 			      const union vsx_reg *reg, void *mem,
mem                43 arch/powerpc/kernel/kexec_elf_64.c 				  .mem = KEXEC_BUF_MEM_UNKNOWN };
mem                61 arch/powerpc/kernel/kexec_elf_64.c 	pr_debug("Loaded purgatory at 0x%lx\n", pbuf.mem);
mem                68 arch/powerpc/kernel/kexec_elf_64.c 		kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem                72 arch/powerpc/kernel/kexec_elf_64.c 		initrd_load_addr = kbuf.mem;
mem               101 arch/powerpc/kernel/kexec_elf_64.c 	kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               105 arch/powerpc/kernel/kexec_elf_64.c 	fdt_load_addr = kbuf.mem;
mem               321 arch/powerpc/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem               329 arch/powerpc/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem               336 arch/powerpc/kernel/kgdb.c 			memcpy(mem, &current->thread.evr[regno-32],
mem               340 arch/powerpc/kernel/kgdb.c 		memset(mem, 0, dbg_reg_def[regno].size);
mem               347 arch/powerpc/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem               355 arch/powerpc/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem               361 arch/powerpc/kernel/kgdb.c 		memcpy(&current->thread.evr[regno-32], mem,
mem                50 arch/powerpc/kernel/machine_kexec_64.c 		if (image->segment[i].mem < __pa(_end))
mem                64 arch/powerpc/kernel/machine_kexec_64.c 			begin = image->segment[i].mem;
mem               129 arch/powerpc/kernel/machine_kexec_64.c 		flush_icache_range((unsigned long)__va(ranges[i].mem),
mem               130 arch/powerpc/kernel/machine_kexec_64.c 			(unsigned long)__va(ranges[i].mem + ranges[i].memsz));
mem               856 arch/powerpc/kvm/book3s.c 				const struct kvm_userspace_memory_region *mem)
mem               858 arch/powerpc/kvm/book3s.c 	return kvm->arch.kvm_ops->prepare_memory_region(kvm, memslot, mem);
mem               862 arch/powerpc/kvm/book3s.c 				const struct kvm_userspace_memory_region *mem,
mem               867 arch/powerpc/kvm/book3s.c 	kvm->arch.kvm_ops->commit_memory_region(kvm, mem, old, new, change);
mem               369 arch/powerpc/kvm/book3s_64_vio.c 		struct mm_iommu_table_group_mem_t *mem;
mem               372 arch/powerpc/kvm/book3s_64_vio.c 		mem = mm_iommu_lookup(stt->kvm->mm, ua, 1ULL << shift);
mem               373 arch/powerpc/kvm/book3s_64_vio.c 		if (!mem)
mem               376 arch/powerpc/kvm/book3s_64_vio.c 		if (mm_iommu_ua_to_hpa(mem, ua, shift, &hpa))
mem               425 arch/powerpc/kvm/book3s_64_vio.c 	struct mm_iommu_table_group_mem_t *mem = NULL;
mem               432 arch/powerpc/kvm/book3s_64_vio.c 	mem = mm_iommu_lookup(kvm->mm, be64_to_cpu(*pua), pgsize);
mem               433 arch/powerpc/kvm/book3s_64_vio.c 	if (!mem)
mem               436 arch/powerpc/kvm/book3s_64_vio.c 	mm_iommu_mapped_dec(mem);
mem               488 arch/powerpc/kvm/book3s_64_vio.c 	struct mm_iommu_table_group_mem_t *mem;
mem               494 arch/powerpc/kvm/book3s_64_vio.c 	mem = mm_iommu_lookup(kvm->mm, ua, 1ULL << tbl->it_page_shift);
mem               495 arch/powerpc/kvm/book3s_64_vio.c 	if (!mem)
mem               499 arch/powerpc/kvm/book3s_64_vio.c 	if (WARN_ON_ONCE(mm_iommu_ua_to_hpa(mem, ua, tbl->it_page_shift, &hpa)))
mem               502 arch/powerpc/kvm/book3s_64_vio.c 	if (mm_iommu_mapped_inc(mem))
mem               507 arch/powerpc/kvm/book3s_64_vio.c 		mm_iommu_mapped_dec(mem);
mem               125 arch/powerpc/kvm/book3s_64_vio_hv.c 		struct mm_iommu_table_group_mem_t *mem;
mem               128 arch/powerpc/kvm/book3s_64_vio_hv.c 		mem = mm_iommu_lookup_rm(stt->kvm->mm, ua, 1ULL << shift);
mem               129 arch/powerpc/kvm/book3s_64_vio_hv.c 		if (!mem)
mem               132 arch/powerpc/kvm/book3s_64_vio_hv.c 		if (mm_iommu_ua_to_hpa_rm(mem, ua, shift, &hpa))
mem               263 arch/powerpc/kvm/book3s_64_vio_hv.c 	struct mm_iommu_table_group_mem_t *mem = NULL;
mem               271 arch/powerpc/kvm/book3s_64_vio_hv.c 	mem = mm_iommu_lookup_rm(kvm->mm, be64_to_cpu(*pua), pgsize);
mem               272 arch/powerpc/kvm/book3s_64_vio_hv.c 	if (!mem)
mem               275 arch/powerpc/kvm/book3s_64_vio_hv.c 	mm_iommu_mapped_dec(mem);
mem               330 arch/powerpc/kvm/book3s_64_vio_hv.c 	struct mm_iommu_table_group_mem_t *mem;
mem               336 arch/powerpc/kvm/book3s_64_vio_hv.c 	mem = mm_iommu_lookup_rm(kvm->mm, ua, 1ULL << tbl->it_page_shift);
mem               337 arch/powerpc/kvm/book3s_64_vio_hv.c 	if (!mem)
mem               340 arch/powerpc/kvm/book3s_64_vio_hv.c 	if (WARN_ON_ONCE_RM(mm_iommu_ua_to_hpa_rm(mem, ua, tbl->it_page_shift,
mem               344 arch/powerpc/kvm/book3s_64_vio_hv.c 	if (WARN_ON_ONCE_RM(mm_iommu_mapped_inc(mem)))
mem               349 arch/powerpc/kvm/book3s_64_vio_hv.c 		mm_iommu_mapped_dec(mem);
mem               517 arch/powerpc/kvm/book3s_64_vio_hv.c 		struct mm_iommu_table_group_mem_t *mem;
mem               522 arch/powerpc/kvm/book3s_64_vio_hv.c 		mem = mm_iommu_lookup_rm(vcpu->kvm->mm, ua, IOMMU_PAGE_SIZE_4K);
mem               523 arch/powerpc/kvm/book3s_64_vio_hv.c 		if (mem)
mem               524 arch/powerpc/kvm/book3s_64_vio_hv.c 			prereg = mm_iommu_ua_to_hpa_rm(mem, ua,
mem              4479 arch/powerpc/kvm/book3s_hv.c 					const struct kvm_userspace_memory_region *mem)
mem              4485 arch/powerpc/kvm/book3s_hv.c 				const struct kvm_userspace_memory_region *mem,
mem              4490 arch/powerpc/kvm/book3s_hv.c 	unsigned long npages = mem->memory_size >> PAGE_SHIFT;
mem              1908 arch/powerpc/kvm/book3s_pr.c 					const struct kvm_userspace_memory_region *mem)
mem              1914 arch/powerpc/kvm/book3s_pr.c 				const struct kvm_userspace_memory_region *mem,
mem              1817 arch/powerpc/kvm/booke.c 				      const struct kvm_userspace_memory_region *mem)
mem              1823 arch/powerpc/kvm/booke.c 				const struct kvm_userspace_memory_region *mem,
mem               698 arch/powerpc/kvm/powerpc.c 				   const struct kvm_userspace_memory_region *mem,
mem               701 arch/powerpc/kvm/powerpc.c 	return kvmppc_core_prepare_memory_region(kvm, memslot, mem);
mem               705 arch/powerpc/kvm/powerpc.c 				   const struct kvm_userspace_memory_region *mem,
mem               710 arch/powerpc/kvm/powerpc.c 	kvmppc_core_commit_memory_region(kvm, mem, old, new, change);
mem               657 arch/powerpc/lib/sstep.c 		      const void *mem, bool rev)
mem               673 arch/powerpc/lib/sstep.c 		memcpy(reg, mem, size);
mem               683 arch/powerpc/lib/sstep.c 		memcpy(&reg->b[i], mem, read_size);
mem               698 arch/powerpc/lib/sstep.c 				unsigned long v = *(unsigned long *)(mem + 8);
mem               706 arch/powerpc/lib/sstep.c 		wp = mem;
mem               721 arch/powerpc/lib/sstep.c 		hp = mem;
mem               729 arch/powerpc/lib/sstep.c 		bp = mem;
mem               741 arch/powerpc/lib/sstep.c 		       void *mem, bool rev)
mem               765 arch/powerpc/lib/sstep.c 		memcpy(mem, reg, size);
mem               778 arch/powerpc/lib/sstep.c 		memcpy(mem, &reg->b[i], write_size);
mem               780 arch/powerpc/lib/sstep.c 			memcpy(mem + 8, &reg->d[IS_BE], 8);
mem               782 arch/powerpc/lib/sstep.c 			do_byte_reverse(mem, write_size);
mem               784 arch/powerpc/lib/sstep.c 				do_byte_reverse(mem + 8, 8);
mem               789 arch/powerpc/lib/sstep.c 		wp = mem;
mem               797 arch/powerpc/lib/sstep.c 		hp = mem;
mem               805 arch/powerpc/lib/sstep.c 		bp = mem;
mem               821 arch/powerpc/lib/sstep.c 	u8 mem[16];
mem               825 arch/powerpc/lib/sstep.c 	if (!address_ok(regs, ea, size) || copy_mem_in(mem, ea, size, regs))
mem               828 arch/powerpc/lib/sstep.c 	emulate_vsx_load(op, &buf, mem, cross_endian);
mem               853 arch/powerpc/lib/sstep.c 	u8 mem[16];
mem               876 arch/powerpc/lib/sstep.c 	emulate_vsx_store(op, &buf, mem, cross_endian);
mem               877 arch/powerpc/lib/sstep.c 	return  copy_mem_out(mem, ea, size, regs);
mem                60 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem, *mem2;
mem                73 arch/powerpc/mm/book3s64/iommu_api.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem                74 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem) {
mem                80 arch/powerpc/mm/book3s64/iommu_api.c 		mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT));
mem                81 arch/powerpc/mm/book3s64/iommu_api.c 		mem->dev_hpa = dev_hpa;
mem                84 arch/powerpc/mm/book3s64/iommu_api.c 	mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA;
mem                91 arch/powerpc/mm/book3s64/iommu_api.c 	mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT));
mem                92 arch/powerpc/mm/book3s64/iommu_api.c 	mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0])));
mem                93 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem->hpas) {
mem                94 arch/powerpc/mm/book3s64/iommu_api.c 		kfree(mem);
mem               108 arch/powerpc/mm/book3s64/iommu_api.c 				mem->hpages + entry, NULL);
mem               126 arch/powerpc/mm/book3s64/iommu_api.c 		struct page *page = mem->hpages[i];
mem               132 arch/powerpc/mm/book3s64/iommu_api.c 		if ((mem->pageshift > PAGE_SHIFT) && PageHuge(page))
mem               134 arch/powerpc/mm/book3s64/iommu_api.c 		mem->pageshift = min(mem->pageshift, pageshift);
mem               139 arch/powerpc/mm/book3s64/iommu_api.c 		mem->hpas[i] = page_to_pfn(page) << PAGE_SHIFT;
mem               143 arch/powerpc/mm/book3s64/iommu_api.c 	atomic64_set(&mem->mapped, 1);
mem               144 arch/powerpc/mm/book3s64/iommu_api.c 	mem->used = 1;
mem               145 arch/powerpc/mm/book3s64/iommu_api.c 	mem->ua = ua;
mem               146 arch/powerpc/mm/book3s64/iommu_api.c 	mem->entries = entries;
mem               161 arch/powerpc/mm/book3s64/iommu_api.c 	list_add_rcu(&mem->next, &mm->context.iommu_group_mem_list);
mem               165 arch/powerpc/mm/book3s64/iommu_api.c 	*pmem = mem;
mem               172 arch/powerpc/mm/book3s64/iommu_api.c 		put_page(mem->hpages[i]);
mem               174 arch/powerpc/mm/book3s64/iommu_api.c 	vfree(mem->hpas);
mem               175 arch/powerpc/mm/book3s64/iommu_api.c 	kfree(mem);
mem               199 arch/powerpc/mm/book3s64/iommu_api.c static void mm_iommu_unpin(struct mm_iommu_table_group_mem_t *mem)
mem               204 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem->hpas)
mem               207 arch/powerpc/mm/book3s64/iommu_api.c 	for (i = 0; i < mem->entries; ++i) {
mem               208 arch/powerpc/mm/book3s64/iommu_api.c 		if (!mem->hpas[i])
mem               211 arch/powerpc/mm/book3s64/iommu_api.c 		page = pfn_to_page(mem->hpas[i] >> PAGE_SHIFT);
mem               215 arch/powerpc/mm/book3s64/iommu_api.c 		if (mem->hpas[i] & MM_IOMMU_TABLE_GROUP_PAGE_DIRTY)
mem               219 arch/powerpc/mm/book3s64/iommu_api.c 		mem->hpas[i] = 0;
mem               223 arch/powerpc/mm/book3s64/iommu_api.c static void mm_iommu_do_free(struct mm_iommu_table_group_mem_t *mem)
mem               226 arch/powerpc/mm/book3s64/iommu_api.c 	mm_iommu_unpin(mem);
mem               227 arch/powerpc/mm/book3s64/iommu_api.c 	vfree(mem->hpas);
mem               228 arch/powerpc/mm/book3s64/iommu_api.c 	kfree(mem);
mem               233 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem = container_of(head,
mem               236 arch/powerpc/mm/book3s64/iommu_api.c 	mm_iommu_do_free(mem);
mem               239 arch/powerpc/mm/book3s64/iommu_api.c static void mm_iommu_release(struct mm_iommu_table_group_mem_t *mem)
mem               241 arch/powerpc/mm/book3s64/iommu_api.c 	list_del_rcu(&mem->next);
mem               242 arch/powerpc/mm/book3s64/iommu_api.c 	call_rcu(&mem->rcu, mm_iommu_free);
mem               245 arch/powerpc/mm/book3s64/iommu_api.c long mm_iommu_put(struct mm_struct *mm, struct mm_iommu_table_group_mem_t *mem)
mem               252 arch/powerpc/mm/book3s64/iommu_api.c 	if (mem->used == 0) {
mem               257 arch/powerpc/mm/book3s64/iommu_api.c 	--mem->used;
mem               259 arch/powerpc/mm/book3s64/iommu_api.c 	if (mem->used)
mem               263 arch/powerpc/mm/book3s64/iommu_api.c 	if (atomic_cmpxchg(&mem->mapped, 1, 0) != 1) {
mem               264 arch/powerpc/mm/book3s64/iommu_api.c 		++mem->used;
mem               269 arch/powerpc/mm/book3s64/iommu_api.c 	if (mem->dev_hpa == MM_IOMMU_TABLE_INVALID_HPA)
mem               270 arch/powerpc/mm/book3s64/iommu_api.c 		unlock_entries = mem->entries;
mem               273 arch/powerpc/mm/book3s64/iommu_api.c 	mm_iommu_release(mem);
mem               287 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem, *ret = NULL;
mem               289 arch/powerpc/mm/book3s64/iommu_api.c 	list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) {
mem               290 arch/powerpc/mm/book3s64/iommu_api.c 		if ((mem->ua <= ua) &&
mem               291 arch/powerpc/mm/book3s64/iommu_api.c 				(ua + size <= mem->ua +
mem               292 arch/powerpc/mm/book3s64/iommu_api.c 				 (mem->entries << PAGE_SHIFT))) {
mem               293 arch/powerpc/mm/book3s64/iommu_api.c 			ret = mem;
mem               305 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem, *ret = NULL;
mem               307 arch/powerpc/mm/book3s64/iommu_api.c 	list_for_each_entry_lockless(mem, &mm->context.iommu_group_mem_list,
mem               309 arch/powerpc/mm/book3s64/iommu_api.c 		if ((mem->ua <= ua) &&
mem               310 arch/powerpc/mm/book3s64/iommu_api.c 				(ua + size <= mem->ua +
mem               311 arch/powerpc/mm/book3s64/iommu_api.c 				 (mem->entries << PAGE_SHIFT))) {
mem               312 arch/powerpc/mm/book3s64/iommu_api.c 			ret = mem;
mem               323 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem, *ret = NULL;
mem               327 arch/powerpc/mm/book3s64/iommu_api.c 	list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) {
mem               328 arch/powerpc/mm/book3s64/iommu_api.c 		if ((mem->ua == ua) && (mem->entries == entries)) {
mem               329 arch/powerpc/mm/book3s64/iommu_api.c 			ret = mem;
mem               330 arch/powerpc/mm/book3s64/iommu_api.c 			++mem->used;
mem               341 arch/powerpc/mm/book3s64/iommu_api.c long mm_iommu_ua_to_hpa(struct mm_iommu_table_group_mem_t *mem,
mem               344 arch/powerpc/mm/book3s64/iommu_api.c 	const long entry = (ua - mem->ua) >> PAGE_SHIFT;
mem               347 arch/powerpc/mm/book3s64/iommu_api.c 	if (entry >= mem->entries)
mem               350 arch/powerpc/mm/book3s64/iommu_api.c 	if (pageshift > mem->pageshift)
mem               353 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem->hpas) {
mem               354 arch/powerpc/mm/book3s64/iommu_api.c 		*hpa = mem->dev_hpa + (ua - mem->ua);
mem               358 arch/powerpc/mm/book3s64/iommu_api.c 	va = &mem->hpas[entry];
mem               365 arch/powerpc/mm/book3s64/iommu_api.c long mm_iommu_ua_to_hpa_rm(struct mm_iommu_table_group_mem_t *mem,
mem               368 arch/powerpc/mm/book3s64/iommu_api.c 	const long entry = (ua - mem->ua) >> PAGE_SHIFT;
mem               371 arch/powerpc/mm/book3s64/iommu_api.c 	if (entry >= mem->entries)
mem               374 arch/powerpc/mm/book3s64/iommu_api.c 	if (pageshift > mem->pageshift)
mem               377 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem->hpas) {
mem               378 arch/powerpc/mm/book3s64/iommu_api.c 		*hpa = mem->dev_hpa + (ua - mem->ua);
mem               382 arch/powerpc/mm/book3s64/iommu_api.c 	pa = (void *) vmalloc_to_phys(&mem->hpas[entry]);
mem               393 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem;
mem               398 arch/powerpc/mm/book3s64/iommu_api.c 	mem = mm_iommu_lookup_rm(mm, ua, PAGE_SIZE);
mem               399 arch/powerpc/mm/book3s64/iommu_api.c 	if (!mem)
mem               402 arch/powerpc/mm/book3s64/iommu_api.c 	if (mem->dev_hpa != MM_IOMMU_TABLE_INVALID_HPA)
mem               405 arch/powerpc/mm/book3s64/iommu_api.c 	entry = (ua - mem->ua) >> PAGE_SHIFT;
mem               406 arch/powerpc/mm/book3s64/iommu_api.c 	va = &mem->hpas[entry];
mem               418 arch/powerpc/mm/book3s64/iommu_api.c 	struct mm_iommu_table_group_mem_t *mem;
mem               421 arch/powerpc/mm/book3s64/iommu_api.c 	list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) {
mem               422 arch/powerpc/mm/book3s64/iommu_api.c 		if (mem->dev_hpa == MM_IOMMU_TABLE_INVALID_HPA)
mem               425 arch/powerpc/mm/book3s64/iommu_api.c 		end = mem->dev_hpa + (mem->entries << PAGE_SHIFT);
mem               426 arch/powerpc/mm/book3s64/iommu_api.c 		if ((mem->dev_hpa <= hpa) && (hpa < end)) {
mem               442 arch/powerpc/mm/book3s64/iommu_api.c long mm_iommu_mapped_inc(struct mm_iommu_table_group_mem_t *mem)
mem               444 arch/powerpc/mm/book3s64/iommu_api.c 	if (atomic64_inc_not_zero(&mem->mapped))
mem               452 arch/powerpc/mm/book3s64/iommu_api.c void mm_iommu_mapped_dec(struct mm_iommu_table_group_mem_t *mem)
mem               454 arch/powerpc/mm/book3s64/iommu_api.c 	atomic64_add_unless(&mem->mapped, -1, 1);
mem                69 arch/powerpc/mm/nohash/8xx.c 		unsigned long mem = total_lowmem;
mem                71 arch/powerpc/mm/nohash/8xx.c 		for (; i < 32 && mem >= LARGE_PAGE_SIZE_8M; i++) {
mem                77 arch/powerpc/mm/nohash/8xx.c 			mem -= LARGE_PAGE_SIZE_8M;
mem                88 arch/powerpc/mm/numa.c 	unsigned long long mem;
mem               107 arch/powerpc/mm/numa.c 	mem = memparse(p, &p);
mem               108 arch/powerpc/mm/numa.c 	if (!mem)
mem               111 arch/powerpc/mm/numa.c 	if (mem < curr_boundary)
mem               114 arch/powerpc/mm/numa.c 	curr_boundary = mem;
mem               116 arch/powerpc/mm/numa.c 	if ((end_pfn << PAGE_SHIFT) > mem) {
mem              1157 arch/powerpc/perf/imc-pmu.c static int trace_imc_prepare_sample(struct trace_imc_data *mem,
mem              1164 arch/powerpc/perf/imc-pmu.c 	if (be64_to_cpu(READ_ONCE(mem->tb1)) > *prev_tb)
mem              1165 arch/powerpc/perf/imc-pmu.c 		*prev_tb = be64_to_cpu(READ_ONCE(mem->tb1));
mem              1169 arch/powerpc/perf/imc-pmu.c 	if ((be64_to_cpu(READ_ONCE(mem->tb1)) & IMC_TRACE_RECORD_TB1_MASK) !=
mem              1170 arch/powerpc/perf/imc-pmu.c 			 be64_to_cpu(READ_ONCE(mem->tb2)))
mem              1174 arch/powerpc/perf/imc-pmu.c 	data->ip =  be64_to_cpu(READ_ONCE(mem->ip));
mem              1193 arch/powerpc/perf/imc-pmu.c 	struct trace_imc_data *mem;
mem              1197 arch/powerpc/perf/imc-pmu.c 	mem = (struct trace_imc_data *)get_trace_imc_event_base_addr();
mem              1199 arch/powerpc/perf/imc-pmu.c 		i++, mem++) {
mem              1203 arch/powerpc/perf/imc-pmu.c 		ret = trace_imc_prepare_sample(mem, &data, &prev_tb, &header, event);
mem               160 arch/powerpc/perf/power9-pmu.c GENERIC_EVENT_ATTR(mem-loads,			MEM_LOADS);
mem               161 arch/powerpc/perf/power9-pmu.c GENERIC_EVENT_ATTR(mem-stores,			MEM_STORES);
mem               126 arch/powerpc/platforms/4xx/hsta_msi.c 	struct resource *mem;
mem               130 arch/powerpc/platforms/4xx/hsta_msi.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               131 arch/powerpc/platforms/4xx/hsta_msi.c 	if (!mem) {
mem               143 arch/powerpc/platforms/4xx/hsta_msi.c 	ppc4xx_hsta_msi.address = mem->start;
mem               144 arch/powerpc/platforms/4xx/hsta_msi.c 	ppc4xx_hsta_msi.data = ioremap(mem->start, resource_size(mem));
mem               308 arch/powerpc/platforms/maple/setup.c 	volatile void __iomem *mem;
mem               327 arch/powerpc/platforms/maple/setup.c 	mem = ioremap(r.start, resource_size(&r));
mem               328 arch/powerpc/platforms/maple/setup.c 	if (!mem) {
mem               334 arch/powerpc/platforms/maple/setup.c 	rev = __raw_readl(mem);
mem               335 arch/powerpc/platforms/maple/setup.c 	iounmap(mem);
mem               109 arch/powerpc/platforms/powermac/bootx_init.c #define dt_push_token(token, mem) \
mem               111 arch/powerpc/platforms/powermac/bootx_init.c 		*(mem) = _ALIGN_UP(*(mem),4); \
mem               112 arch/powerpc/platforms/powermac/bootx_init.c 		*((u32 *)*(mem)) = token; \
mem               113 arch/powerpc/platforms/powermac/bootx_init.c 		*(mem) += 4; \
mem                25 arch/powerpc/platforms/powernv/memtrace.c 	void *mem;
mem                44 arch/powerpc/platforms/powernv/memtrace.c 	return simple_read_from_buffer(ubuf, count, ppos, ent->mem, ent->size);
mem                53 arch/powerpc/platforms/powernv/memtrace.c static int check_memblock_online(struct memory_block *mem, void *arg)
mem                55 arch/powerpc/platforms/powernv/memtrace.c 	if (mem->state != MEM_ONLINE)
mem                61 arch/powerpc/platforms/powernv/memtrace.c static int change_memblock_state(struct memory_block *mem, void *arg)
mem                65 arch/powerpc/platforms/powernv/memtrace.c 	mem->state = state;
mem               179 arch/powerpc/platforms/powernv/memtrace.c 		ent->mem = ioremap(ent->start, ent->size);
mem               181 arch/powerpc/platforms/powernv/memtrace.c 		if (!ent->mem) {
mem               205 arch/powerpc/platforms/powernv/memtrace.c static int online_mem_block(struct memory_block *mem, void *arg)
mem               207 arch/powerpc/platforms/powernv/memtrace.c 	return device_online(&mem->dev);
mem               227 arch/powerpc/platforms/powernv/memtrace.c 		if (ent->mem) {
mem               228 arch/powerpc/platforms/powernv/memtrace.c 			iounmap(ent->mem);
mem               229 arch/powerpc/platforms/powernv/memtrace.c 			ent->mem = 0;
mem                70 arch/powerpc/platforms/pseries/ibmebus.c 	void *mem;
mem                72 arch/powerpc/platforms/pseries/ibmebus.c 	mem = kmalloc(size, flag);
mem                73 arch/powerpc/platforms/pseries/ibmebus.c 	*dma_handle = (dma_addr_t)mem;
mem                75 arch/powerpc/platforms/pseries/ibmebus.c 	return mem;
mem               197 arch/powerpc/sysdev/fsl_pci.c 	u64 mem, sz, paddr_hi = 0;
mem               315 arch/powerpc/sysdev/fsl_pci.c 	mem = memblock_end_of_DRAM();
mem               316 arch/powerpc/sysdev/fsl_pci.c 	pr_info("%s: end of DRAM %llx\n", __func__, mem);
mem               334 arch/powerpc/sysdev/fsl_pci.c 		if ((address >= mem) && (address < (mem + PAGE_SIZE))) {
mem               336 arch/powerpc/sysdev/fsl_pci.c 			mem += PAGE_SIZE;
mem               344 arch/powerpc/sysdev/fsl_pci.c 	sz = min(mem, paddr_lo);
mem               350 arch/powerpc/sysdev/fsl_pci.c 		if ((1ull << mem_log) != mem) {
mem               352 arch/powerpc/sysdev/fsl_pci.c 			if ((1ull << mem_log) > mem)
mem               375 arch/powerpc/sysdev/fsl_pci.c 		if (sz != mem) {
mem               376 arch/powerpc/sysdev/fsl_pci.c 			mem_log = ilog2(mem);
mem               379 arch/powerpc/sysdev/fsl_pci.c 			if ((1ull << mem_log) != mem)
mem               438 arch/powerpc/sysdev/fsl_pci.c 	if (hose->dma_window_size < mem) {
mem               120 arch/s390/include/asm/sclp.h int sclp_early_get_memsize(unsigned long *mem);
mem              1724 arch/s390/kernel/ipl.c 	comp->entry.addr = kbuf->mem;
mem                42 arch/s390/kernel/kexec_elf.c 		buf.mem = ALIGN(phdr->p_paddr, phdr->p_align);
mem                44 arch/s390/kernel/kexec_elf.c 			buf.mem += crashk_res.start;
mem                50 arch/s390/kernel/kexec_elf.c 			data->kernel_mem = buf.mem;
mem                26 arch/s390/kernel/kexec_image.c 	buf.mem = 0;
mem                28 arch/s390/kernel/kexec_image.c 		buf.mem += crashk_res.start;
mem                32 arch/s390/kernel/kexec_image.c 	data->kernel_mem = buf.mem;
mem               125 arch/s390/kernel/machine_kexec_file.c 	buf.mem = data->memsz;
mem               127 arch/s390/kernel/machine_kexec_file.c 		buf.mem += crashk_res.start;
mem               149 arch/s390/kernel/machine_kexec_file.c 	buf.mem = data->memsz;
mem               151 arch/s390/kernel/machine_kexec_file.c 		buf.mem += crashk_res.start;
mem               177 arch/s390/kernel/machine_kexec_file.c 	buf.mem = data->memsz;
mem               179 arch/s390/kernel/machine_kexec_file.c 		buf.mem += crashk_res.start;
mem               210 arch/s390/kernel/machine_kexec_file.c 	*lc_ipl_parmblock_ptr = (__u32)buf.mem;
mem              4507 arch/s390/kvm/kvm-s390.c 				   const struct kvm_userspace_memory_region *mem,
mem              4515 arch/s390/kvm/kvm-s390.c 	if (mem->userspace_addr & 0xffffful)
mem              4518 arch/s390/kvm/kvm-s390.c 	if (mem->memory_size & 0xffffful)
mem              4521 arch/s390/kvm/kvm-s390.c 	if (mem->guest_phys_addr + mem->memory_size > kvm->arch.mem_limit)
mem              4528 arch/s390/kvm/kvm-s390.c 				const struct kvm_userspace_memory_region *mem,
mem              4547 arch/s390/kvm/kvm-s390.c 		rc = gmap_map_segment(kvm->arch.gmap, mem->userspace_addr,
mem              4548 arch/s390/kvm/kvm-s390.c 				      mem->guest_phys_addr, mem->memory_size);
mem               806 arch/s390/kvm/priv.c static void handle_stsi_3_2_2(struct kvm_vcpu *vcpu, struct sysinfo_3_2_2 *mem)
mem               814 arch/s390/kvm/priv.c 	if (stsi(mem, 3, 2, 2))
mem               815 arch/s390/kvm/priv.c 		mem->count = 0;
mem               816 arch/s390/kvm/priv.c 	if (mem->count < 8)
mem               817 arch/s390/kvm/priv.c 		mem->count++;
mem               818 arch/s390/kvm/priv.c 	for (n = mem->count - 1; n > 0 ; n--)
mem               819 arch/s390/kvm/priv.c 		memcpy(&mem->vm[n], &mem->vm[n - 1], sizeof(mem->vm[0]));
mem               821 arch/s390/kvm/priv.c 	memset(&mem->vm[0], 0, sizeof(mem->vm[0]));
mem               822 arch/s390/kvm/priv.c 	mem->vm[0].cpus_total = cpus;
mem               823 arch/s390/kvm/priv.c 	mem->vm[0].cpus_configured = cpus;
mem               824 arch/s390/kvm/priv.c 	mem->vm[0].cpus_standby = 0;
mem               825 arch/s390/kvm/priv.c 	mem->vm[0].cpus_reserved = 0;
mem               826 arch/s390/kvm/priv.c 	mem->vm[0].caf = 1000;
mem               827 arch/s390/kvm/priv.c 	memcpy(mem->vm[0].name, "KVMguest", 8);
mem               828 arch/s390/kvm/priv.c 	ASCEBC(mem->vm[0].name, 8);
mem               829 arch/s390/kvm/priv.c 	memcpy(mem->vm[0].cpi, "KVM/Linux       ", 16);
mem               830 arch/s390/kvm/priv.c 	ASCEBC(mem->vm[0].cpi, 16);
mem               849 arch/s390/kvm/priv.c 	unsigned long mem = 0;
mem               883 arch/s390/kvm/priv.c 		mem = get_zeroed_page(GFP_KERNEL);
mem               884 arch/s390/kvm/priv.c 		if (!mem)
mem               886 arch/s390/kvm/priv.c 		if (stsi((void *) mem, fc, sel1, sel2))
mem               892 arch/s390/kvm/priv.c 		mem = get_zeroed_page(GFP_KERNEL);
mem               893 arch/s390/kvm/priv.c 		if (!mem)
mem               895 arch/s390/kvm/priv.c 		handle_stsi_3_2_2(vcpu, (void *) mem);
mem               899 arch/s390/kvm/priv.c 	rc = write_guest(vcpu, operand2, ar, (void *)mem, PAGE_SIZE);
mem               909 arch/s390/kvm/priv.c 	free_page(mem);
mem               916 arch/s390/kvm/priv.c 	free_page(mem);
mem                93 arch/sh/include/asm/io.h pfx##writes##bwlq(volatile void __iomem *mem, const void *addr,		\
mem                99 arch/sh/include/asm/io.h 		__raw_write##bwlq(*__addr, mem);			\
mem               104 arch/sh/include/asm/io.h static inline void pfx##reads##bwlq(volatile void __iomem *mem,		\
mem               110 arch/sh/include/asm/io.h 		*__addr = __raw_read##bwlq(mem);			\
mem               193 arch/sh/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem               199 arch/sh/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem               205 arch/sh/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem               211 arch/sh/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem               216 arch/sh/kernel/kgdb.c 		__asm__ __volatile__ ("stc vbr, %0" : "=r" (mem));
mem                59 arch/sh/kernel/machine_kexec.c 		       (unsigned int)image->segment[i].mem,
mem                60 arch/sh/kernel/machine_kexec.c 		       (unsigned int)image->segment[i].mem +
mem                28 arch/um/include/asm/uaccess.h extern unsigned long __clear_user(void __user *mem, unsigned long len);
mem               211 arch/um/kernel/skas/uaccess.c unsigned long __clear_user(void __user *mem, unsigned long len)
mem               214 arch/um/kernel/skas/uaccess.c 		memset((__force void*)mem, 0, len);
mem               218 arch/um/kernel/skas/uaccess.c 	return buffer_op((unsigned long) mem, len, 1, clear_chunk, NULL);
mem               273 arch/x86/events/intel/core.c EVENT_ATTR_STR(mem-loads,	mem_ld_nhm,	"event=0x0b,umask=0x10,ldlat=3");
mem               274 arch/x86/events/intel/core.c EVENT_ATTR_STR(mem-loads,	mem_ld_snb,	"event=0xcd,umask=0x1,ldlat=3");
mem               275 arch/x86/events/intel/core.c EVENT_ATTR_STR(mem-stores,	mem_st_snb,	"event=0xcd,umask=0x2");
mem              4222 arch/x86/events/intel/core.c EVENT_ATTR_STR(mem-loads,	mem_ld_hsw,	"event=0xcd,umask=0x1,ldlat=3");
mem              4223 arch/x86/events/intel/core.c EVENT_ATTR_STR(mem-stores,	mem_st_hsw,	"event=0xd0,umask=0x82")
mem               251 arch/x86/include/asm/kvm_emulate.h 		} mem;
mem                79 arch/x86/include/asm/realmode.h static inline void set_real_mode_mem(phys_addr_t mem)
mem                81 arch/x86/include/asm/realmode.h 	real_mode_header = (struct real_mode_header *) __va(mem);
mem               584 arch/x86/include/asm/uaccess.h unsigned long __must_check clear_user(void __user *mem, unsigned long len);
mem               585 arch/x86/include/asm/uaccess.h unsigned long __must_check __clear_user(void __user *mem, unsigned long len);
mem              2633 arch/x86/kernel/apic/io_apic.c 	char *mem;
mem              2642 arch/x86/kernel/apic/io_apic.c 	mem = memblock_alloc(n, SMP_CACHE_BYTES);
mem              2643 arch/x86/kernel/apic/io_apic.c 	if (!mem)
mem              2645 arch/x86/kernel/apic/io_apic.c 	res = (void *)mem;
mem              2647 arch/x86/kernel/apic/io_apic.c 	mem += sizeof(struct resource) * nr_ioapics;
mem              2650 arch/x86/kernel/apic/io_apic.c 		res[i].name = mem;
mem              2652 arch/x86/kernel/apic/io_apic.c 		snprintf(mem, IOAPIC_RESOURCE_NAME_SIZE, "IOAPIC %u", i);
mem              2653 arch/x86/kernel/apic/io_apic.c 		mem += IOAPIC_RESOURCE_NAME_SIZE;
mem               455 arch/x86/kernel/crash.c 		image->arch.backup_load_addr = kbuf.mem;
mem               471 arch/x86/kernel/crash.c 	kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               477 arch/x86/kernel/crash.c 	image->arch.elf_load_addr = kbuf.mem;
mem               398 arch/x86/kernel/kexec-bzimage64.c 	pr_debug("Loaded purgatory at 0x%lx\n", pbuf.mem);
mem               436 arch/x86/kernel/kexec-bzimage64.c 	bootparam_load_addr = kbuf.mem;
mem               446 arch/x86/kernel/kexec-bzimage64.c 	kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               450 arch/x86/kernel/kexec-bzimage64.c 	kernel_load_addr = kbuf.mem;
mem               461 arch/x86/kernel/kexec-bzimage64.c 		kbuf.mem = KEXEC_BUF_MEM_UNKNOWN;
mem               465 arch/x86/kernel/kexec-bzimage64.c 		initrd_load_addr = kbuf.mem;
mem                91 arch/x86/kernel/kgdb.c int dbg_set_reg(int regno, void *mem, struct pt_regs *regs)
mem               101 arch/x86/kernel/kgdb.c 		memcpy((void *)regs + dbg_reg_def[regno].offset, mem,
mem               106 arch/x86/kernel/kgdb.c char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs)
mem               109 arch/x86/kernel/kgdb.c 		memcpy(mem, &regs->orig_ax, sizeof(regs->orig_ax));
mem               116 arch/x86/kernel/kgdb.c 		memcpy(mem, (void *)regs + dbg_reg_def[regno].offset,
mem               123 arch/x86/kernel/kgdb.c 		*(unsigned long *)mem = 0xFFFF;
mem               235 arch/x86/kernel/machine_kexec_64.c 		mstart = image->segment[i].mem;
mem              1391 arch/x86/kvm/emulate.c 	op->addr.mem.ea = modrm_ea;
mem              1393 arch/x86/kvm/emulate.c 		ctxt->memop.addr.mem.ea = (u32)ctxt->memop.addr.mem.ea;
mem              1407 arch/x86/kvm/emulate.c 		op->addr.mem.ea = insn_fetch(u16, ctxt);
mem              1410 arch/x86/kvm/emulate.c 		op->addr.mem.ea = insn_fetch(u32, ctxt);
mem              1413 arch/x86/kvm/emulate.c 		op->addr.mem.ea = insn_fetch(u64, ctxt);
mem              1434 arch/x86/kvm/emulate.c 		ctxt->dst.addr.mem.ea = address_mask(ctxt,
mem              1435 arch/x86/kvm/emulate.c 					   ctxt->dst.addr.mem.ea + (sv >> 3));
mem              1848 arch/x86/kvm/emulate.c 						 op->addr.mem,
mem              1854 arch/x86/kvm/emulate.c 					       op->addr.mem,
mem              1860 arch/x86/kvm/emulate.c 				       op->addr.mem,
mem              3435 arch/x86/kvm/emulate.c 	op->addr.mem.ea = register_address(ctxt, reg);
mem              3813 arch/x86/kvm/emulate.c 	rc = linearize(ctxt, ctxt->src.addr.mem, 1, false, &linear);
mem              3864 arch/x86/kvm/emulate.c 	return segmented_write_std(ctxt, ctxt->dst.addr.mem,
mem              3885 arch/x86/kvm/emulate.c 	rc = read_descriptor(ctxt, ctxt->src.addr.mem,
mem              4135 arch/x86/kvm/emulate.c 	return segmented_write_std(ctxt, ctxt->memop.addr.mem, &fx_state,
mem              4170 arch/x86/kvm/emulate.c 	rc = segmented_read_std(ctxt, ctxt->memop.addr.mem, &fx_state, size);
mem              4984 arch/x86/kvm/emulate.c 	op->addr.mem.ea = ctxt->_eip;
mem              5069 arch/x86/kvm/emulate.c 		op->addr.mem.ea =
mem              5071 arch/x86/kvm/emulate.c 		op->addr.mem.seg = VCPU_SREG_ES;
mem              5123 arch/x86/kvm/emulate.c 		op->addr.mem.ea =
mem              5125 arch/x86/kvm/emulate.c 		op->addr.mem.seg = ctxt->seg_override;
mem              5132 arch/x86/kvm/emulate.c 		op->addr.mem.ea =
mem              5136 arch/x86/kvm/emulate.c 		op->addr.mem.seg = ctxt->seg_override;
mem              5141 arch/x86/kvm/emulate.c 		op->addr.mem.ea = ctxt->_eip;
mem              5448 arch/x86/kvm/emulate.c 	ctxt->memop.addr.mem.seg = ctxt->seg_override;
mem              5470 arch/x86/kvm/emulate.c 		ctxt->memopp->addr.mem.ea = address_mask(ctxt,
mem              5471 arch/x86/kvm/emulate.c 					ctxt->memopp->addr.mem.ea + ctxt->_eip);
mem              5655 arch/x86/kvm/emulate.c 		rc = segmented_read(ctxt, ctxt->src.addr.mem,
mem              5663 arch/x86/kvm/emulate.c 		rc = segmented_read(ctxt, ctxt->src2.addr.mem,
mem              5675 arch/x86/kvm/emulate.c 		rc = segmented_read(ctxt, ctxt->dst.addr.mem,
mem              5727 arch/x86/kvm/emulate.c 		ctxt->dst.val = ctxt->src.addr.mem.ea;
mem              9828 arch/x86/kvm/x86.c 				const struct kvm_userspace_memory_region *mem,
mem              9833 arch/x86/kvm/x86.c 					       mem->memory_size >> PAGE_SHIFT);
mem              9889 arch/x86/kvm/x86.c 				const struct kvm_userspace_memory_region *mem,
mem               126 arch/x86/platform/olpc/olpc_dt.c 	static u8 *mem;
mem               147 arch/x86/platform/olpc/olpc_dt.c 		mem = res;
mem               152 arch/x86/platform/olpc/olpc_dt.c 	res = mem;
mem               153 arch/x86/platform/olpc/olpc_dt.c 	mem += size;
mem                20 arch/x86/realmode/init.c 	phys_addr_t mem;
mem                29 arch/x86/realmode/init.c 	mem = memblock_find_in_range(0, 1<<20, size, PAGE_SIZE);
mem                30 arch/x86/realmode/init.c 	if (!mem) {
mem                35 arch/x86/realmode/init.c 	memblock_reserve(mem, size);
mem                36 arch/x86/realmode/init.c 	set_real_mode_mem(mem);
mem               653 arch/x86/tools/relocs.c 		void *mem = realloc(r->offset, newsize * sizeof(r->offset[0]));
mem               655 arch/x86/tools/relocs.c 		if (!mem)
mem               658 arch/x86/tools/relocs.c 		r->offset = mem;
mem                92 arch/xtensa/mm/init.c 	struct memblock_region *mem, *res;
mem                96 arch/xtensa/mm/init.c 	for_each_memblock(memory, mem) {
mem                97 arch/xtensa/mm/init.c 		unsigned long start = memblock_region_memory_base_pfn(mem);
mem                98 arch/xtensa/mm/init.c 		unsigned long end = memblock_region_memory_end_pfn(mem);
mem               104 arch/xtensa/mm/init.c 		if (memblock_is_nomap(mem))
mem               451 crypto/api.c   	char *mem;
mem               460 crypto/api.c   	mem = kzalloc(total, GFP_KERNEL);
mem               461 crypto/api.c   	if (mem == NULL)
mem               464 crypto/api.c   	tfm = (struct crypto_tfm *)(mem + tfmsize);
mem               481 crypto/api.c   	kfree(mem);
mem               483 crypto/api.c   	mem = ERR_PTR(err);
mem               485 crypto/api.c   	return mem;
mem               567 crypto/api.c   void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
mem               571 crypto/api.c   	if (unlikely(!mem))
mem               580 crypto/api.c   	kzfree(mem);
mem                79 crypto/jitterentropy.c 	unsigned char *mem;	/* Memory access location with size of
mem               269 crypto/jitterentropy.c 	if (NULL == ec || NULL == ec->mem)
mem               281 crypto/jitterentropy.c 		unsigned char *tmpval = ec->mem + ec->memlocation;
mem               488 crypto/jitterentropy.c 		entropy_collector->mem = jent_zalloc(JENT_MEMORY_SIZE);
mem               489 crypto/jitterentropy.c 		if (!entropy_collector->mem) {
mem               511 crypto/jitterentropy.c 	jent_zfree(entropy_collector->mem);
mem               512 crypto/jitterentropy.c 	entropy_collector->mem = NULL;
mem                89 crypto/scompress.c 		void *mem;
mem                93 crypto/scompress.c 		mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
mem                94 crypto/scompress.c 		if (!mem)
mem                96 crypto/scompress.c 		scratch->src = mem;
mem                97 crypto/scompress.c 		mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
mem                98 crypto/scompress.c 		if (!mem)
mem               100 crypto/scompress.c 		scratch->dst = mem;
mem               172 drivers/acpi/acpi_extlog.c 		struct cper_sec_mem_err *mem = (void *)(gdata + 1);
mem               173 drivers/acpi/acpi_extlog.c 		if (gdata->error_data_length >= sizeof(*mem))
mem               174 drivers/acpi/acpi_extlog.c 			trace_extlog_mem_event(mem, err_seq, fru_id, fru_text,
mem                87 drivers/acpi/acpi_memhotplug.c 		if ((info->caching == address64.info.mem.caching) &&
mem                88 drivers/acpi/acpi_memhotplug.c 		    (info->write_protect == address64.info.mem.write_protect) &&
mem               100 drivers/acpi/acpi_memhotplug.c 	new->caching = address64.info.mem.caching;
mem               101 drivers/acpi/acpi_memhotplug.c 	new->write_protect = address64.info.mem.write_protect;
mem               158 drivers/acpi/acpi_memhotplug.c static int acpi_bind_memblk(struct memory_block *mem, void *arg)
mem               160 drivers/acpi/acpi_memhotplug.c 	return acpi_bind_one(&mem->dev, arg);
mem               170 drivers/acpi/acpi_memhotplug.c static int acpi_unbind_memblk(struct memory_block *mem, void *arg)
mem               172 drivers/acpi/acpi_memhotplug.c 	acpi_unbind_one(&mem->dev);
mem               214 drivers/acpi/acpica/rsaddr.c 	{ACPI_RSC_1BITFLAG, ACPI_RS_OFFSET(data.address.info.mem.write_protect),
mem               218 drivers/acpi/acpica/rsaddr.c 	{ACPI_RSC_2BITFLAG, ACPI_RS_OFFSET(data.address.info.mem.caching),
mem               222 drivers/acpi/acpica/rsaddr.c 	{ACPI_RSC_2BITFLAG, ACPI_RS_OFFSET(data.address.info.mem.range_type),
mem               226 drivers/acpi/acpica/rsaddr.c 	{ACPI_RSC_1BITFLAG, ACPI_RS_OFFSET(data.address.info.mem.translation),
mem               502 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_1BITFLAG, ACPI_RSD_OFFSET(address.info.mem.write_protect),
mem               504 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_2BITFLAG, ACPI_RSD_OFFSET(address.info.mem.caching),
mem               506 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_2BITFLAG, ACPI_RSD_OFFSET(address.info.mem.range_type),
mem               508 drivers/acpi/acpica/rsdumpinfo.c 	{ACPI_RSD_1BITFLAG, ACPI_RSD_OFFSET(address.info.mem.translation),
mem               210 drivers/acpi/resource.c 	bool wp = addr->info.mem.write_protect;
mem               267 drivers/acpi/resource.c 	if (addr->info.mem.caching == ACPI_PREFETCHABLE_MEMORY)
mem               282 drivers/ata/acard-ahci.c 	void *mem;
mem               313 drivers/ata/acard-ahci.c 	mem = dmam_alloc_coherent(dev, dma_sz, &mem_dma, GFP_KERNEL);
mem               314 drivers/ata/acard-ahci.c 	if (!mem)
mem               321 drivers/ata/acard-ahci.c 	pp->cmd_slot = mem;
mem               324 drivers/ata/acard-ahci.c 	mem += AHCI_CMD_SLOT_SZ;
mem               330 drivers/ata/acard-ahci.c 	pp->rx_fis = mem;
mem               333 drivers/ata/acard-ahci.c 	mem += rx_fis_sz;
mem               340 drivers/ata/acard-ahci.c 	pp->cmd_tbl = mem;
mem              2323 drivers/ata/libahci.c 	void *mem;
mem              2364 drivers/ata/libahci.c 	mem = dmam_alloc_coherent(dev, dma_sz, &mem_dma, GFP_KERNEL);
mem              2365 drivers/ata/libahci.c 	if (!mem)
mem              2372 drivers/ata/libahci.c 	pp->cmd_slot = mem;
mem              2375 drivers/ata/libahci.c 	mem += AHCI_CMD_SLOT_SZ;
mem              2381 drivers/ata/libahci.c 	pp->rx_fis = mem;
mem              2384 drivers/ata/libahci.c 	mem += rx_fis_sz;
mem              2391 drivers/ata/libahci.c 	pp->cmd_tbl = mem;
mem               294 drivers/ata/pata_bk3710.c 	struct resource *mem;
mem               313 drivers/ata/pata_bk3710.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               321 drivers/ata/pata_bk3710.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem                52 drivers/ata/pata_palmld.c 	void __iomem *mem;
mem                66 drivers/ata/pata_palmld.c 	mem = devm_ioremap(dev, PALMLD_IDE_PHYS, 0x1000);
mem                67 drivers/ata/pata_palmld.c 	if (!mem)
mem                93 drivers/ata/pata_palmld.c 	ap->ioaddr.cmd_addr = mem + 0x10;
mem                94 drivers/ata/pata_palmld.c 	ap->ioaddr.altstatus_addr = mem + 0xe;
mem                95 drivers/ata/pata_palmld.c 	ap->ioaddr.ctl_addr = mem + 0xe;
mem               717 drivers/ata/sata_fsl.c 	void *mem;
mem               727 drivers/ata/sata_fsl.c 	mem = dma_alloc_coherent(dev, SATA_FSL_PORT_PRIV_DMA_SZ, &mem_dma,
mem               729 drivers/ata/sata_fsl.c 	if (!mem) {
mem               734 drivers/ata/sata_fsl.c 	pp->cmdslot = mem;
mem               737 drivers/ata/sata_fsl.c 	mem += SATA_FSL_CMD_SLOT_SIZE;
mem               740 drivers/ata/sata_fsl.c 	pp->cmdentry = mem;
mem               457 drivers/ata/sata_highbank.c 	struct resource *mem;
mem               465 drivers/ata/sata_highbank.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               466 drivers/ata/sata_highbank.c 	if (!mem) {
mem               491 drivers/ata/sata_highbank.c 	hpriv->mmio = devm_ioremap(dev, mem->start, resource_size(mem));
mem               493 drivers/ata/sata_highbank.c 		dev_err(dev, "can't map %pR\n", mem);
mem               539 drivers/ata/sata_highbank.c 		ata_port_desc(ap, "mmio %pR", mem);
mem              1091 drivers/ata/sata_nv.c 	void *mem;
mem              1131 drivers/ata/sata_nv.c 	mem = dmam_alloc_coherent(dev, NV_ADMA_PORT_PRIV_DMA_SZ,
mem              1133 drivers/ata/sata_nv.c 	if (!mem)
mem              1141 drivers/ata/sata_nv.c 	pp->cpb     = mem;
mem              1147 drivers/ata/sata_nv.c 	mem     += NV_ADMA_MAX_CPBS * NV_ADMA_CPB_SZ;
mem              1153 drivers/ata/sata_nv.c 	pp->aprd = mem;
mem               889 drivers/ata/sata_rcar.c 	struct resource *mem;
mem               918 drivers/ata/sata_rcar.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               919 drivers/ata/sata_rcar.c 	priv->base = devm_ioremap_resource(dev, mem);
mem              1296 drivers/atm/eni.c 	void __iomem *mem;
mem              1320 drivers/atm/eni.c 	mem = NULL; /* for gcc */
mem              1323 drivers/atm/eni.c 		mem = eni_alloc_mem(eni_dev,&size);
mem              1324 drivers/atm/eni.c 		if (!mem) return -ENOBUFS;
mem              1327 drivers/atm/eni.c 			eni_free_mem(eni_dev,mem,size);
mem              1332 drivers/atm/eni.c 		tx->send = mem;
mem              1355 drivers/atm/eni.c 			eni_free_mem(eni_dev,mem,size);
mem              1764 drivers/atm/eni.c 	eni_dev->mem = i;
mem              1765 drivers/atm/eni.c 	memset_io(eni_dev->ram,0,eni_dev->mem);
mem              1767 drivers/atm/eni.c 	printk("mem=%dkB (",eni_dev->mem >> 10);
mem              1848 drivers/atm/eni.c 	buffer_mem = eni_dev->mem - (buf - eni_dev->ram);
mem              2112 drivers/atm/eni.c 		    eni_dev->mem >> 10,eni_dev->tx_bw);
mem               114 drivers/atm/eni.h 	int mem;			/* RAM on board (in bytes) */
mem              1818 drivers/atm/horizon.c   HDW * mem;
mem              1844 drivers/atm/horizon.c   for (mem = (HDW *) memmap; mem < (HDW *) (memmap + 1); ++mem)
mem              1845 drivers/atm/horizon.c     wr_mem (dev, mem, 0);
mem              1016 drivers/atm/iphase.c 	RAM_BASE*((iadev->mem)/(128 * 1024))  
mem              1018 drivers/atm/iphase.c 	IPHASE5575_FRAG_CONTROL_RAM_BASE*((iadev->mem)/(128 * 1024))  
mem              1020 drivers/atm/iphase.c 	IPHASE5575_REASS_CONTROL_RAM_BASE*((iadev->mem)/(128 * 1024))  
mem              2380 drivers/atm/iphase.c 	iadev->mem = iadev->pci_map_size /2;  
mem              1024 drivers/atm/iphase.h 	int mem;  
mem              1230 drivers/atm/zatm.c 	zatm_dev->mem = i << 2;
mem              1238 drivers/atm/zatm.c 	printk("mem=%dkB,%s (",zatm_dev->mem >> 10,zatm_dev->copper ? "UTP" :
mem              1285 drivers/atm/zatm.c 	vccs = (zatm_dev->mem-NR_SHAPERS*SHAPER_SIZE-pools*POOL_SIZE)/
mem              1305 drivers/atm/zatm.c 	    (zatm_dev->mem-curr*4)/VC_SIZE);
mem                83 drivers/atm/zatm.h 	int mem;			/* RAM on board (in bytes) */
mem                92 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem                94 drivers/base/memory.c 	kfree(mem);
mem               109 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               112 drivers/base/memory.c 	phys_index = mem->start_section_nr / sections_per_block;
mem               132 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               139 drivers/base/memory.c 	switch (mem->state) {
mem               151 drivers/base/memory.c 				mem->state);
mem               241 drivers/base/memory.c static int memory_block_change_state(struct memory_block *mem,
mem               246 drivers/base/memory.c 	if (mem->state != from_state_req)
mem               250 drivers/base/memory.c 		mem->state = MEM_GOING_OFFLINE;
mem               252 drivers/base/memory.c 	ret = memory_block_action(mem->start_section_nr, to_state,
mem               253 drivers/base/memory.c 				mem->online_type);
mem               255 drivers/base/memory.c 	mem->state = ret ? from_state_req : to_state;
mem               263 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               266 drivers/base/memory.c 	if (mem->state == MEM_ONLINE)
mem               274 drivers/base/memory.c 	if (mem->online_type < 0)
mem               275 drivers/base/memory.c 		mem->online_type = MMOP_ONLINE_KEEP;
mem               277 drivers/base/memory.c 	ret = memory_block_change_state(mem, MEM_ONLINE, MEM_OFFLINE);
mem               280 drivers/base/memory.c 	mem->online_type = -1;
mem               287 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               289 drivers/base/memory.c 	if (mem->state == MEM_OFFLINE)
mem               293 drivers/base/memory.c 	if (mem->section_count != sections_per_block)
mem               296 drivers/base/memory.c 	return memory_block_change_state(mem, MEM_OFFLINE, MEM_ONLINE);
mem               302 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               327 drivers/base/memory.c 		mem->online_type = online_type;
mem               328 drivers/base/memory.c 		ret = device_online(&mem->dev);
mem               331 drivers/base/memory.c 		ret = device_offline(&mem->dev);
mem               360 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               361 drivers/base/memory.c 	return sprintf(buf, "%d\n", mem->phys_device);
mem               381 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               382 drivers/base/memory.c 	unsigned long start_pfn = section_nr_to_pfn(mem->start_section_nr);
mem               392 drivers/base/memory.c 	if (mem->state == MEM_ONLINE) {
mem               405 drivers/base/memory.c 	nid = mem->nid;
mem               631 drivers/base/memory.c 	struct memory_block *mem;
mem               635 drivers/base/memory.c 	mem = find_memory_block_by_id(block_id);
mem               636 drivers/base/memory.c 	if (mem) {
mem               637 drivers/base/memory.c 		put_device(&mem->dev);
mem               640 drivers/base/memory.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem               641 drivers/base/memory.c 	if (!mem)
mem               644 drivers/base/memory.c 	mem->start_section_nr = block_id * sections_per_block;
mem               645 drivers/base/memory.c 	mem->state = state;
mem               646 drivers/base/memory.c 	start_pfn = section_nr_to_pfn(mem->start_section_nr);
mem               647 drivers/base/memory.c 	mem->phys_device = arch_get_memory_phys_device(start_pfn);
mem               648 drivers/base/memory.c 	mem->nid = NUMA_NO_NODE;
mem               650 drivers/base/memory.c 	ret = register_memory(mem);
mem               652 drivers/base/memory.c 	*memory = mem;
mem               659 drivers/base/memory.c 	struct memory_block *mem;
mem               669 drivers/base/memory.c 	ret = init_memory_block(&mem, base_memory_block_id(base_section_nr),
mem               673 drivers/base/memory.c 	mem->section_count = section_count;
mem               696 drivers/base/memory.c 	struct memory_block *mem;
mem               706 drivers/base/memory.c 		ret = init_memory_block(&mem, block_id, MEM_OFFLINE);
mem               709 drivers/base/memory.c 		mem->section_count = sections_per_block;
mem               715 drivers/base/memory.c 			mem = find_memory_block_by_id(block_id);
mem               716 drivers/base/memory.c 			mem->section_count = 0;
mem               717 drivers/base/memory.c 			unregister_memory(mem);
mem               733 drivers/base/memory.c 	struct memory_block *mem;
mem               742 drivers/base/memory.c 		mem = find_memory_block_by_id(block_id);
mem               743 drivers/base/memory.c 		if (WARN_ON_ONCE(!mem))
mem               745 drivers/base/memory.c 		mem->section_count = 0;
mem               746 drivers/base/memory.c 		unregister_memory_block_under_nodes(mem);
mem               747 drivers/base/memory.c 		unregister_memory(mem);
mem               753 drivers/base/memory.c bool is_memblock_offlined(struct memory_block *mem)
mem               755 drivers/base/memory.c 	return mem->state == MEM_OFFLINE;
mem               839 drivers/base/memory.c 	struct memory_block *mem;
mem               847 drivers/base/memory.c 		mem = find_memory_block_by_id(block_id);
mem               848 drivers/base/memory.c 		if (!mem)
mem               851 drivers/base/memory.c 		ret = func(mem, arg);
mem               852 drivers/base/memory.c 		put_device(&mem->dev);
mem               866 drivers/base/memory.c 	struct memory_block *mem = to_memory_block(dev);
mem               869 drivers/base/memory.c 	return cb_data->func(mem, cb_data->arg);
mem               262 drivers/block/brd.c 	void *mem;
mem               271 drivers/block/brd.c 	mem = kmap_atomic(page);
mem               273 drivers/block/brd.c 		copy_from_brd(mem + off, brd, sector, len);
mem               277 drivers/block/brd.c 		copy_to_brd(brd, mem + off, sector, len);
mem               279 drivers/block/brd.c 	kunmap_atomic(mem);
mem               560 drivers/block/sx8.c 	void *mem;
mem               569 drivers/block/sx8.c 	mem = carm_ref_msg(host, rq->tag);
mem               571 drivers/block/sx8.c 	msg_size = func(host, rq->tag, mem);
mem               573 drivers/block/sx8.c 	ioc = mem;
mem               587 drivers/block/sx8.c 					unsigned int idx, void *mem)
mem               589 drivers/block/sx8.c 	struct carm_msg_sync_time *st = mem;
mem               603 drivers/block/sx8.c 					unsigned int idx, void *mem)
mem               605 drivers/block/sx8.c 	struct carm_msg_allocbuf *ab = mem;
mem               628 drivers/block/sx8.c 					    unsigned int idx, void *mem)
mem               630 drivers/block/sx8.c 	struct carm_msg_ioctl *ioc = mem;
mem               641 drivers/block/sx8.c 	mem += IOC_SCAN_CHAN_OFFSET;
mem               642 drivers/block/sx8.c 	memset(mem, IOC_SCAN_CHAN_NODEV, CARM_MAX_PORTS);
mem               648 drivers/block/sx8.c 					 unsigned int idx, void *mem)
mem               650 drivers/block/sx8.c 	struct carm_msg_get_fw_ver *ioc = mem;
mem               803 drivers/block/sx8.c 				   struct carm_request *crq, u8 *mem,
mem               807 drivers/block/sx8.c 	u8 *msg_data = mem + sizeof(struct carm_array_info);
mem               861 drivers/block/sx8.c 				  struct carm_request *crq, u8 *mem,
mem               864 drivers/block/sx8.c 	u8 *msg_data = mem + IOC_SCAN_CHAN_OFFSET;
mem               913 drivers/block/sx8.c 	u8 *mem;
mem               937 drivers/block/sx8.c 	mem = carm_ref_msg(host, msg_idx);
mem               943 drivers/block/sx8.c 			carm_handle_scan_chan(host, crq, mem, error);
mem               964 drivers/block/sx8.c 				(mem + sizeof(struct carm_msg_get_fw_ver));
mem               983 drivers/block/sx8.c 			carm_handle_array_info(host, crq, mem, error);
mem              1237 drivers/block/zram/zram_drv.c 		void *mem;
mem              1240 drivers/block/zram/zram_drv.c 		mem = kmap_atomic(page);
mem              1241 drivers/block/zram/zram_drv.c 		zram_fill_page(mem, PAGE_SIZE, value);
mem              1242 drivers/block/zram/zram_drv.c 		kunmap_atomic(mem);
mem              1313 drivers/block/zram/zram_drv.c 	void *src, *dst, *mem;
mem              1319 drivers/block/zram/zram_drv.c 	mem = kmap_atomic(page);
mem              1320 drivers/block/zram/zram_drv.c 	if (page_same_filled(mem, &element)) {
mem              1321 drivers/block/zram/zram_drv.c 		kunmap_atomic(mem);
mem              1327 drivers/block/zram/zram_drv.c 	kunmap_atomic(mem);
mem              1277 drivers/bus/mvebu-mbus.c 						 struct resource *mem,
mem              1287 drivers/bus/mvebu-mbus.c 	memset(mem, 0, sizeof(struct resource));
mem              1288 drivers/bus/mvebu-mbus.c 	mem->end = -1;
mem              1294 drivers/bus/mvebu-mbus.c 		mem->start = reg[0];
mem              1295 drivers/bus/mvebu-mbus.c 		mem->end = mem->start + reg[1] - 1;
mem              1296 drivers/bus/mvebu-mbus.c 		mem->flags = IORESOURCE_MEM;
mem               197 drivers/char/agp/agp.h int agp_generic_insert_memory(struct agp_memory *mem, off_t pg_start, int type);
mem               198 drivers/char/agp/agp.h int agp_generic_remove_memory(struct agp_memory *mem, off_t pg_start, int type);
mem               221 drivers/char/agp/agp.h void agp_alloc_page_array(size_t size, struct agp_memory *mem);
mem               222 drivers/char/agp/agp.h static inline void agp_free_page_array(struct agp_memory *mem)
mem               224 drivers/char/agp/agp.h 	kvfree(mem->pages);
mem               230 drivers/char/agp/agp.h void agp3_generic_tlbflush(struct agp_memory *mem);
mem                43 drivers/char/agp/ali-agp.c static void ali_tlbflush(struct agp_memory *mem)
mem                65 drivers/char/agp/alpha-agp.c static void alpha_core_agp_tlbflush(struct agp_memory *mem)
mem                84 drivers/char/agp/alpha-agp.c static int alpha_core_agp_insert_memory(struct agp_memory *mem, off_t pg_start,
mem                91 drivers/char/agp/alpha-agp.c 	if (type >= AGP_USER_TYPES || mem->type >= AGP_USER_TYPES)
mem                96 drivers/char/agp/alpha-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem                99 drivers/char/agp/alpha-agp.c 	status = agp->ops->bind(agp, pg_start, mem);
mem               101 drivers/char/agp/alpha-agp.c 	alpha_core_agp_tlbflush(mem);
mem               106 drivers/char/agp/alpha-agp.c static int alpha_core_agp_remove_memory(struct agp_memory *mem, off_t pg_start,
mem               112 drivers/char/agp/alpha-agp.c 	status = agp->ops->unbind(agp, pg_start, mem);
mem               113 drivers/char/agp/alpha-agp.c 	alpha_core_agp_tlbflush(mem);
mem               285 drivers/char/agp/amd-k7-agp.c static int amd_insert_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               293 drivers/char/agp/amd-k7-agp.c 	if (type != mem->type ||
mem               297 drivers/char/agp/amd-k7-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem               301 drivers/char/agp/amd-k7-agp.c 	while (j < (pg_start + mem->page_count)) {
mem               309 drivers/char/agp/amd-k7-agp.c 	if (!mem->is_flushed) {
mem               311 drivers/char/agp/amd-k7-agp.c 		mem->is_flushed = true;
mem               314 drivers/char/agp/amd-k7-agp.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem               318 drivers/char/agp/amd-k7-agp.c 					       page_to_phys(mem->pages[i]),
mem               319 drivers/char/agp/amd-k7-agp.c 					       mem->type),
mem               323 drivers/char/agp/amd-k7-agp.c 	amd_irongate_tlbflush(mem);
mem               327 drivers/char/agp/amd-k7-agp.c static int amd_remove_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               333 drivers/char/agp/amd-k7-agp.c 	if (type != mem->type ||
mem               337 drivers/char/agp/amd-k7-agp.c 	for (i = pg_start; i < (mem->page_count + pg_start); i++) {
mem               344 drivers/char/agp/amd-k7-agp.c 	amd_irongate_tlbflush(mem);
mem                45 drivers/char/agp/amd64-agp.c static int amd64_insert_memory(struct agp_memory *mem, off_t pg_start, int type)
mem                50 drivers/char/agp/amd64-agp.c 	struct agp_bridge_data *bridge = mem->bridge;
mem                55 drivers/char/agp/amd64-agp.c 	if (type != mem->type)
mem                64 drivers/char/agp/amd64-agp.c 	if (((unsigned long)pg_start + mem->page_count) > num_entries)
mem                70 drivers/char/agp/amd64-agp.c 	while (j < (pg_start + mem->page_count)) {
mem                76 drivers/char/agp/amd64-agp.c 	if (!mem->is_flushed) {
mem                78 drivers/char/agp/amd64-agp.c 		mem->is_flushed = true;
mem                81 drivers/char/agp/amd64-agp.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem                83 drivers/char/agp/amd64-agp.c 						      page_to_phys(mem->pages[i]),
mem                94 drivers/char/agp/amd64-agp.c 	amd64_tlbflush(mem);
mem               172 drivers/char/agp/ati-agp.c static void ati_tlbflush(struct agp_memory * mem)
mem               268 drivers/char/agp/ati-agp.c static int ati_insert_memory(struct agp_memory * mem,
mem               278 drivers/char/agp/ati-agp.c 	mask_type = agp_generic_type_to_mask_type(mem->bridge, type);
mem               279 drivers/char/agp/ati-agp.c 	if (mask_type != 0 || type != mem->type)
mem               282 drivers/char/agp/ati-agp.c 	if (mem->page_count == 0)
mem               285 drivers/char/agp/ati-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem               289 drivers/char/agp/ati-agp.c 	while (j < (pg_start + mem->page_count)) {
mem               297 drivers/char/agp/ati-agp.c 	if (!mem->is_flushed) {
mem               300 drivers/char/agp/ati-agp.c 		mem->is_flushed = true;
mem               303 drivers/char/agp/ati-agp.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem               307 drivers/char/agp/ati-agp.c 						       page_to_phys(mem->pages[i]),
mem               308 drivers/char/agp/ati-agp.c 						       mem->type),
mem               312 drivers/char/agp/ati-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               316 drivers/char/agp/ati-agp.c static int ati_remove_memory(struct agp_memory * mem, off_t pg_start,
mem               324 drivers/char/agp/ati-agp.c 	mask_type = agp_generic_type_to_mask_type(mem->bridge, type);
mem               325 drivers/char/agp/ati-agp.c 	if (mask_type != 0 || type != mem->type)
mem               328 drivers/char/agp/ati-agp.c 	if (mem->page_count == 0)
mem               331 drivers/char/agp/ati-agp.c 	for (i = pg_start; i < (mem->page_count + pg_start); i++) {
mem               338 drivers/char/agp/ati-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               101 drivers/char/agp/backend.c static const struct { int mem, agp; } maxes_table[] = {
mem               124 drivers/char/agp/backend.c 	while ((memory > maxes_table[index].mem) && (index < 8))
mem               128 drivers/char/agp/backend.c 	   ( (memory - maxes_table[index - 1].mem)  *
mem               130 drivers/char/agp/backend.c 	   (maxes_table[index].mem - maxes_table[index - 1].mem);
mem               109 drivers/char/agp/efficeon-agp.c static void efficeon_tlbflush(struct agp_memory * mem)
mem               236 drivers/char/agp/efficeon-agp.c static int efficeon_insert_memory(struct agp_memory * mem, off_t pg_start, int type)
mem               238 drivers/char/agp/efficeon-agp.c 	int i, count = mem->page_count, num_entries;
mem               246 drivers/char/agp/efficeon-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem               248 drivers/char/agp/efficeon-agp.c 	if (type != 0 || mem->type != 0)
mem               251 drivers/char/agp/efficeon-agp.c 	if (!mem->is_flushed) {
mem               253 drivers/char/agp/efficeon-agp.c 		mem->is_flushed = true;
mem               259 drivers/char/agp/efficeon-agp.c 		unsigned long insert = efficeon_mask_memory(mem->pages[i]);
mem               281 drivers/char/agp/efficeon-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               285 drivers/char/agp/efficeon-agp.c static int efficeon_remove_memory(struct agp_memory * mem, off_t pg_start, int type)
mem               287 drivers/char/agp/efficeon-agp.c 	int i, count = mem->page_count, num_entries;
mem               293 drivers/char/agp/efficeon-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem               295 drivers/char/agp/efficeon-agp.c 	if (type != 0 || mem->type != 0)
mem               307 drivers/char/agp/efficeon-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem                91 drivers/char/agp/generic.c void agp_alloc_page_array(size_t size, struct agp_memory *mem)
mem                93 drivers/char/agp/generic.c 	mem->pages = kvmalloc(size, GFP_KERNEL);
mem              1029 drivers/char/agp/generic.c int agp_generic_insert_memory(struct agp_memory * mem, off_t pg_start, int type)
mem              1038 drivers/char/agp/generic.c 	bridge = mem->bridge;
mem              1042 drivers/char/agp/generic.c 	if (mem->page_count == 0)
mem              1071 drivers/char/agp/generic.c 	if (type != mem->type)
mem              1080 drivers/char/agp/generic.c 	if (((pg_start + mem->page_count) > num_entries) ||
mem              1081 drivers/char/agp/generic.c 	    ((pg_start + mem->page_count) < pg_start))
mem              1086 drivers/char/agp/generic.c 	while (j < (pg_start + mem->page_count)) {
mem              1092 drivers/char/agp/generic.c 	if (!mem->is_flushed) {
mem              1094 drivers/char/agp/generic.c 		mem->is_flushed = true;
mem              1097 drivers/char/agp/generic.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem              1099 drivers/char/agp/generic.c 						   page_to_phys(mem->pages[i]),
mem              1105 drivers/char/agp/generic.c 	bridge->driver->tlb_flush(mem);
mem              1111 drivers/char/agp/generic.c int agp_generic_remove_memory(struct agp_memory *mem, off_t pg_start, int type)
mem              1117 drivers/char/agp/generic.c 	bridge = mem->bridge;
mem              1121 drivers/char/agp/generic.c 	if (mem->page_count == 0)
mem              1124 drivers/char/agp/generic.c 	if (type != mem->type)
mem              1128 drivers/char/agp/generic.c 	if (((pg_start + mem->page_count) > num_entries) ||
mem              1129 drivers/char/agp/generic.c 	    ((pg_start + mem->page_count) < pg_start))
mem              1139 drivers/char/agp/generic.c 	for (i = pg_start; i < (mem->page_count + pg_start); i++) {
mem              1144 drivers/char/agp/generic.c 	bridge->driver->tlb_flush(mem);
mem              1191 drivers/char/agp/generic.c int agp_generic_alloc_pages(struct agp_bridge_data *bridge, struct agp_memory *mem, size_t num_pages)
mem              1208 drivers/char/agp/generic.c 		mem->pages[i] = page;
mem              1209 drivers/char/agp/generic.c 		mem->page_count++;
mem              1213 drivers/char/agp/generic.c 	set_pages_array_uc(mem->pages, num_pages);
mem              1237 drivers/char/agp/generic.c void agp_generic_destroy_pages(struct agp_memory *mem)
mem              1242 drivers/char/agp/generic.c 	if (!mem)
mem              1246 drivers/char/agp/generic.c 	set_pages_array_wb(mem->pages, mem->page_count);
mem              1249 drivers/char/agp/generic.c 	for (i = 0; i < mem->page_count; i++) {
mem              1250 drivers/char/agp/generic.c 		page = mem->pages[i];
mem              1258 drivers/char/agp/generic.c 		mem->pages[i] = NULL;
mem              1366 drivers/char/agp/generic.c void agp3_generic_tlbflush(struct agp_memory *mem)
mem               279 drivers/char/agp/hp-agp.c hp_zx1_tlbflush (struct agp_memory *mem)
mem               329 drivers/char/agp/hp-agp.c hp_zx1_insert_memory (struct agp_memory *mem, off_t pg_start, int type)
mem               336 drivers/char/agp/hp-agp.c 	if (type != mem->type ||
mem               342 drivers/char/agp/hp-agp.c 	io_pg_count = hp->io_pages_per_kpage * mem->page_count;
mem               355 drivers/char/agp/hp-agp.c 	if (!mem->is_flushed) {
mem               357 drivers/char/agp/hp-agp.c 		mem->is_flushed = true;
mem               360 drivers/char/agp/hp-agp.c 	for (i = 0, j = io_pg_start; i < mem->page_count; i++) {
mem               363 drivers/char/agp/hp-agp.c 		paddr = page_to_phys(mem->pages[i]);
mem               371 drivers/char/agp/hp-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               376 drivers/char/agp/hp-agp.c hp_zx1_remove_memory (struct agp_memory *mem, off_t pg_start, int type)
mem               381 drivers/char/agp/hp-agp.c 	if (type != mem->type ||
mem               387 drivers/char/agp/hp-agp.c 	io_pg_count = hp->io_pages_per_kpage * mem->page_count;
mem               392 drivers/char/agp/hp-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               170 drivers/char/agp/i460-agp.c static void i460_tlb_flush (struct agp_memory *mem)
mem               293 drivers/char/agp/i460-agp.c static int i460_insert_memory_small_io_page (struct agp_memory *mem,
mem               301 drivers/char/agp/i460-agp.c 		 mem, pg_start, type, page_to_phys(mem->pages[0]));
mem               303 drivers/char/agp/i460-agp.c 	if (type >= AGP_USER_TYPES || mem->type >= AGP_USER_TYPES)
mem               311 drivers/char/agp/i460-agp.c 	if ((io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count) > num_entries) {
mem               317 drivers/char/agp/i460-agp.c 	while (j < (io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count)) {
mem               327 drivers/char/agp/i460-agp.c 	for (i = 0, j = io_pg_start; i < mem->page_count; i++) {
mem               328 drivers/char/agp/i460-agp.c 		paddr = page_to_phys(mem->pages[i]);
mem               330 drivers/char/agp/i460-agp.c 			WR_GATT(j, i460_mask_memory(agp_bridge, paddr, mem->type));
mem               336 drivers/char/agp/i460-agp.c static int i460_remove_memory_small_io_page(struct agp_memory *mem,
mem               342 drivers/char/agp/i460-agp.c 		 mem, pg_start, type);
mem               346 drivers/char/agp/i460-agp.c 	for (i = pg_start; i < (pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count); i++)
mem               400 drivers/char/agp/i460-agp.c static int i460_insert_memory_large_io_page (struct agp_memory *mem,
mem               407 drivers/char/agp/i460-agp.c 	if (type >= AGP_USER_TYPES || mem->type >= AGP_USER_TYPES)
mem               415 drivers/char/agp/i460-agp.c 	end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE];
mem               417 drivers/char/agp/i460-agp.c 	end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE;
mem               453 drivers/char/agp/i460-agp.c 			mem->pages[i] = lp->page;
mem               461 drivers/char/agp/i460-agp.c static int i460_remove_memory_large_io_page (struct agp_memory *mem,
mem               473 drivers/char/agp/i460-agp.c 	end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE];
mem               475 drivers/char/agp/i460-agp.c 	end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE;
mem               482 drivers/char/agp/i460-agp.c 			mem->pages[i] = NULL;
mem               500 drivers/char/agp/i460-agp.c static int i460_insert_memory (struct agp_memory *mem,
mem               504 drivers/char/agp/i460-agp.c 		return i460_insert_memory_small_io_page(mem, pg_start, type);
mem               506 drivers/char/agp/i460-agp.c 		return i460_insert_memory_large_io_page(mem, pg_start, type);
mem               509 drivers/char/agp/i460-agp.c static int i460_remove_memory (struct agp_memory *mem,
mem               513 drivers/char/agp/i460-agp.c 		return i460_remove_memory_small_io_page(mem, pg_start, type);
mem               515 drivers/char/agp/i460-agp.c 		return i460_remove_memory_large_io_page(mem, pg_start, type);
mem                75 drivers/char/agp/intel-agp.c static void intel_tlbflush(struct agp_memory *mem)
mem                82 drivers/char/agp/intel-agp.c static void intel_8xx_tlbflush(struct agp_memory *mem)
mem               186 drivers/char/agp/intel-agp.c static void intel_820_tlbflush(struct agp_memory *mem)
mem               127 drivers/char/agp/intel-gtt.c 	DBG("try unmapping %lu pages\n", (unsigned long)mem->page_count);
mem               212 drivers/char/agp/intel-gtt.c static int i810_insert_dcache_entries(struct agp_memory *mem, off_t pg_start,
mem               217 drivers/char/agp/intel-gtt.c 	if ((pg_start + mem->page_count)
mem               221 drivers/char/agp/intel-gtt.c 	if (!mem->is_flushed)
mem               224 drivers/char/agp/intel-gtt.c 	for (i = pg_start; i < (pg_start + mem->page_count); i++) {
mem               897 drivers/char/agp/intel-gtt.c static int intel_fake_agp_insert_entries(struct agp_memory *mem,
mem               910 drivers/char/agp/intel-gtt.c 		return i810_insert_dcache_entries(mem, pg_start, type);
mem               912 drivers/char/agp/intel-gtt.c 	if (mem->page_count == 0)
mem               915 drivers/char/agp/intel-gtt.c 	if (pg_start + mem->page_count > intel_private.gtt_total_entries)
mem               918 drivers/char/agp/intel-gtt.c 	if (type != mem->type)
mem               924 drivers/char/agp/intel-gtt.c 	if (!mem->is_flushed)
mem               930 drivers/char/agp/intel-gtt.c 		ret = intel_gtt_map_memory(mem->pages, mem->page_count, &st);
mem               935 drivers/char/agp/intel-gtt.c 		mem->sg_list = st.sgl;
mem               936 drivers/char/agp/intel-gtt.c 		mem->num_sg = st.nents;
mem               938 drivers/char/agp/intel-gtt.c 		intel_gtt_insert_pages(pg_start, mem->page_count, mem->pages,
mem               944 drivers/char/agp/intel-gtt.c 	mem->is_flushed = true;
mem               962 drivers/char/agp/intel-gtt.c static int intel_fake_agp_remove_entries(struct agp_memory *mem,
mem               965 drivers/char/agp/intel-gtt.c 	if (mem->page_count == 0)
mem               968 drivers/char/agp/intel-gtt.c 	intel_gtt_clear_range(pg_start, mem->page_count);
mem               971 drivers/char/agp/intel-gtt.c 		intel_gtt_unmap_memory(mem->sg_list, mem->num_sg);
mem               972 drivers/char/agp/intel-gtt.c 		mem->sg_list = NULL;
mem               973 drivers/char/agp/intel-gtt.c 		mem->num_sg = 0;
mem               201 drivers/char/agp/nvidia-agp.c static int nvidia_insert_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               206 drivers/char/agp/nvidia-agp.c 	mask_type = agp_generic_type_to_mask_type(mem->bridge, type);
mem               207 drivers/char/agp/nvidia-agp.c 	if (mask_type != 0 || type != mem->type)
mem               210 drivers/char/agp/nvidia-agp.c 	if (mem->page_count == 0)
mem               213 drivers/char/agp/nvidia-agp.c 	if ((pg_start + mem->page_count) >
mem               217 drivers/char/agp/nvidia-agp.c 	for (j = pg_start; j < (pg_start + mem->page_count); j++) {
mem               222 drivers/char/agp/nvidia-agp.c 	if (!mem->is_flushed) {
mem               224 drivers/char/agp/nvidia-agp.c 		mem->is_flushed = true;
mem               226 drivers/char/agp/nvidia-agp.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem               228 drivers/char/agp/nvidia-agp.c 			       page_to_phys(mem->pages[i]), mask_type),
mem               235 drivers/char/agp/nvidia-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               240 drivers/char/agp/nvidia-agp.c static int nvidia_remove_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               246 drivers/char/agp/nvidia-agp.c 	mask_type = agp_generic_type_to_mask_type(mem->bridge, type);
mem               247 drivers/char/agp/nvidia-agp.c 	if (mask_type != 0 || type != mem->type)
mem               250 drivers/char/agp/nvidia-agp.c 	if (mem->page_count == 0)
mem               253 drivers/char/agp/nvidia-agp.c 	for (i = pg_start; i < (mem->page_count + pg_start); i++)
mem               256 drivers/char/agp/nvidia-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               261 drivers/char/agp/nvidia-agp.c static void nvidia_tlbflush(struct agp_memory *mem)
mem                89 drivers/char/agp/parisc-agp.c parisc_agp_tlbflush(struct agp_memory *mem)
mem               121 drivers/char/agp/parisc-agp.c parisc_agp_insert_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               128 drivers/char/agp/parisc-agp.c 	if (type != mem->type ||
mem               134 drivers/char/agp/parisc-agp.c 	io_pg_count = info->io_pages_per_kpage * mem->page_count;
mem               146 drivers/char/agp/parisc-agp.c 	if (!mem->is_flushed) {
mem               148 drivers/char/agp/parisc-agp.c 		mem->is_flushed = true;
mem               151 drivers/char/agp/parisc-agp.c 	for (i = 0, j = io_pg_start; i < mem->page_count; i++) {
mem               154 drivers/char/agp/parisc-agp.c 		paddr = page_to_phys(mem->pages[i]);
mem               164 drivers/char/agp/parisc-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem               170 drivers/char/agp/parisc-agp.c parisc_agp_remove_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               175 drivers/char/agp/parisc-agp.c 	if (type != mem->type ||
mem               181 drivers/char/agp/parisc-agp.c 	io_pg_count = info->io_pages_per_kpage * mem->page_count;
mem               186 drivers/char/agp/parisc-agp.c 	agp_bridge->driver->tlb_flush(mem);
mem                46 drivers/char/agp/sis-agp.c static void sis_tlbflush(struct agp_memory *mem)
mem               320 drivers/char/agp/sworks-agp.c static int serverworks_insert_memory(struct agp_memory *mem,
mem               329 drivers/char/agp/sworks-agp.c 	if (type != 0 || mem->type != 0) {
mem               332 drivers/char/agp/sworks-agp.c 	if ((pg_start + mem->page_count) > num_entries) {
mem               337 drivers/char/agp/sworks-agp.c 	while (j < (pg_start + mem->page_count)) {
mem               345 drivers/char/agp/sworks-agp.c 	if (!mem->is_flushed) {
mem               347 drivers/char/agp/sworks-agp.c 		mem->is_flushed = true;
mem               350 drivers/char/agp/sworks-agp.c 	for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
mem               354 drivers/char/agp/sworks-agp.c 				page_to_phys(mem->pages[i]), mem->type),
mem               357 drivers/char/agp/sworks-agp.c 	serverworks_tlbflush(mem);
mem               361 drivers/char/agp/sworks-agp.c static int serverworks_remove_memory(struct agp_memory *mem, off_t pg_start,
mem               368 drivers/char/agp/sworks-agp.c 	if (type != 0 || mem->type != 0) {
mem               373 drivers/char/agp/sworks-agp.c 	serverworks_tlbflush(mem);
mem               375 drivers/char/agp/sworks-agp.c 	for (i = pg_start; i < (mem->page_count + pg_start); i++) {
mem               381 drivers/char/agp/sworks-agp.c 	serverworks_tlbflush(mem);
mem                73 drivers/char/agp/uninorth-agp.c static void uninorth_tlbflush(struct agp_memory *mem)
mem                83 drivers/char/agp/uninorth-agp.c 	if (!mem && uninorth_rev <= 0x30) {
mem               149 drivers/char/agp/uninorth-agp.c static int uninorth_insert_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               156 drivers/char/agp/uninorth-agp.c 	if (type != mem->type)
mem               165 drivers/char/agp/uninorth-agp.c 	if (mem->page_count == 0)
mem               171 drivers/char/agp/uninorth-agp.c 	if ((pg_start + mem->page_count) > num_entries)
mem               175 drivers/char/agp/uninorth-agp.c 	for (i = 0; i < mem->page_count; ++i) {
mem               184 drivers/char/agp/uninorth-agp.c 	for (i = 0; i < mem->page_count; i++) {
mem               186 drivers/char/agp/uninorth-agp.c 			gp[i] = (page_to_phys(mem->pages[i]) >> PAGE_SHIFT) | 0x80000000UL;
mem               188 drivers/char/agp/uninorth-agp.c 			gp[i] =	cpu_to_le32((page_to_phys(mem->pages[i]) & 0xFFFFF000UL) |
mem               190 drivers/char/agp/uninorth-agp.c 		flush_dcache_range((unsigned long)__va(page_to_phys(mem->pages[i])),
mem               191 drivers/char/agp/uninorth-agp.c 				   (unsigned long)__va(page_to_phys(mem->pages[i]))+0x1000);
mem               194 drivers/char/agp/uninorth-agp.c 	uninorth_tlbflush(mem);
mem               199 drivers/char/agp/uninorth-agp.c static int uninorth_remove_memory(struct agp_memory *mem, off_t pg_start, int type)
mem               205 drivers/char/agp/uninorth-agp.c 	if (type != mem->type)
mem               214 drivers/char/agp/uninorth-agp.c 	if (mem->page_count == 0)
mem               218 drivers/char/agp/uninorth-agp.c 	for (i = 0; i < mem->page_count; ++i) {
mem               222 drivers/char/agp/uninorth-agp.c 	uninorth_tlbflush(mem);
mem                80 drivers/char/agp/via-agp.c static void via_tlbflush(struct agp_memory *mem)
mem               163 drivers/char/agp/via-agp.c static void via_tlbflush_agp3(struct agp_memory *mem)
mem                97 drivers/char/applicom.c static unsigned long mem = 0;	/* physical segment of board  */
mem               101 drivers/char/applicom.c module_param_hw(mem, ulong, iomem, 0);
mem               102 drivers/char/applicom.c MODULE_PARM_DESC(mem, "Shared Memory Address of Applicom board");
mem               249 drivers/char/applicom.c 	if (!mem || !irq) {
mem               262 drivers/char/applicom.c 		RamIO = ioremap_nocache(mem + (LEN_RAM_IO * i), LEN_RAM_IO);
mem               269 drivers/char/applicom.c 		if (!(boardno = ac_register_board((unsigned long)mem+ (LEN_RAM_IO*i),
mem               275 drivers/char/applicom.c 		printk(KERN_NOTICE "Applicom ISA card found at mem 0x%lx, irq %d\n", mem + (LEN_RAM_IO*i), irq);
mem               294 drivers/char/applicom.c 				"at mem 0x%lx\n", mem);
mem               794 drivers/char/applicom.c 		printk(KERN_INFO "Segment of board ........... %X\n", (int) mem);
mem                50 drivers/char/hw_random/exynos-trng.c 	void __iomem     *mem;
mem                65 drivers/char/hw_random/exynos-trng.c 	writel_relaxed(max * 8, trng->mem + EXYNOS_TRNG_FIFO_CTRL);
mem                66 drivers/char/hw_random/exynos-trng.c 	val = readl_poll_timeout(trng->mem + EXYNOS_TRNG_FIFO_CTRL, val,
mem                71 drivers/char/hw_random/exynos-trng.c 	memcpy_fromio(data, trng->mem + EXYNOS_TRNG_FIFO_0, max);
mem                94 drivers/char/hw_random/exynos-trng.c 	writel_relaxed(val, trng->mem + EXYNOS_TRNG_CLKDIV);
mem                98 drivers/char/hw_random/exynos-trng.c 	writel_relaxed(val, trng->mem + EXYNOS_TRNG_CTRL);
mem               104 drivers/char/hw_random/exynos-trng.c 	writel_relaxed(0, trng->mem + EXYNOS_TRNG_POST_CTRL);
mem               132 drivers/char/hw_random/exynos-trng.c 	trng->mem = devm_ioremap_resource(&pdev->dev, res);
mem               133 drivers/char/hw_random/exynos-trng.c 	if (IS_ERR(trng->mem))
mem               134 drivers/char/hw_random/exynos-trng.c 		return PTR_ERR(trng->mem);
mem                57 drivers/char/hw_random/geode-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem                59 drivers/char/hw_random/geode-rng.c 	*data = readl(mem + GEODE_RNG_DATA_REG);
mem                66 drivers/char/hw_random/geode-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem                70 drivers/char/hw_random/geode-rng.c 		data = !!(readl(mem + GEODE_RNG_STATUS_REG));
mem                91 drivers/char/hw_random/geode-rng.c 	void __iomem *mem;
mem               107 drivers/char/hw_random/geode-rng.c 	mem = ioremap(rng_base, 0x58);
mem               108 drivers/char/hw_random/geode-rng.c 	if (!mem)
mem               110 drivers/char/hw_random/geode-rng.c 	geode_rng.priv = (unsigned long)mem;
mem               123 drivers/char/hw_random/geode-rng.c 	iounmap(mem);
mem               129 drivers/char/hw_random/geode-rng.c 	void __iomem *mem = (void __iomem *)geode_rng.priv;
mem               132 drivers/char/hw_random/geode-rng.c 	iounmap(mem);
mem               155 drivers/char/hw_random/intel-rng.c static inline u8 hwstatus_get(void __iomem *mem)
mem               157 drivers/char/hw_random/intel-rng.c 	return readb(mem + INTEL_RNG_HW_STATUS);
mem               160 drivers/char/hw_random/intel-rng.c static inline u8 hwstatus_set(void __iomem *mem,
mem               163 drivers/char/hw_random/intel-rng.c 	writeb(hw_status, mem + INTEL_RNG_HW_STATUS);
mem               164 drivers/char/hw_random/intel-rng.c 	return hwstatus_get(mem);
mem               169 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem               173 drivers/char/hw_random/intel-rng.c 		data = !!(readb(mem + INTEL_RNG_STATUS) &
mem               184 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem               186 drivers/char/hw_random/intel-rng.c 	*data = readb(mem + INTEL_RNG_DATA);
mem               193 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem               197 drivers/char/hw_random/intel-rng.c 	hw_status = hwstatus_get(mem);
mem               200 drivers/char/hw_random/intel-rng.c 		hw_status = hwstatus_set(mem, hw_status | INTEL_RNG_ENABLED);
mem               212 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = (void __iomem *)rng->priv;
mem               215 drivers/char/hw_random/intel-rng.c 	hw_status = hwstatus_get(mem);
mem               217 drivers/char/hw_random/intel-rng.c 		hwstatus_set(mem, hw_status & ~INTEL_RNG_ENABLED);
mem               233 drivers/char/hw_random/intel-rng.c 	void __iomem *mem;
mem               258 drivers/char/hw_random/intel-rng.c 	writeb(INTEL_FWH_RESET_CMD, intel_rng_hw->mem);
mem               259 drivers/char/hw_random/intel-rng.c 	writeb(INTEL_FWH_READ_ID_CMD, intel_rng_hw->mem);
mem               260 drivers/char/hw_random/intel-rng.c 	mfc = readb(intel_rng_hw->mem + INTEL_FWH_MANUFACTURER_CODE_ADDRESS);
mem               261 drivers/char/hw_random/intel-rng.c 	dvc = readb(intel_rng_hw->mem + INTEL_FWH_DEVICE_CODE_ADDRESS);
mem               262 drivers/char/hw_random/intel-rng.c 	writeb(INTEL_FWH_RESET_CMD, intel_rng_hw->mem);
mem               320 drivers/char/hw_random/intel-rng.c 	intel_rng_hw->mem = ioremap_nocache(INTEL_FWH_ADDR, INTEL_FWH_ADDR_LEN);
mem               321 drivers/char/hw_random/intel-rng.c 	if (intel_rng_hw->mem == NULL)
mem               333 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = mem;
mem               374 drivers/char/hw_random/intel-rng.c 	iounmap(intel_rng_hw->mem);
mem               381 drivers/char/hw_random/intel-rng.c 	mem = ioremap(INTEL_RNG_ADDR, INTEL_RNG_ADDR_LEN);
mem               382 drivers/char/hw_random/intel-rng.c 	if (!mem)
mem               384 drivers/char/hw_random/intel-rng.c 	intel_rng.priv = (unsigned long)mem;
mem               388 drivers/char/hw_random/intel-rng.c 	hw_status = hwstatus_get(mem);
mem               390 drivers/char/hw_random/intel-rng.c 		iounmap(mem);
mem               399 drivers/char/hw_random/intel-rng.c 		iounmap(mem);
mem               408 drivers/char/hw_random/intel-rng.c 	void __iomem *mem = (void __iomem *)intel_rng.priv;
mem               411 drivers/char/hw_random/intel-rng.c 	iounmap(mem);
mem               177 drivers/char/hw_random/ks-sa-rng.c 	struct resource		*mem;
mem               193 drivers/char/hw_random/ks-sa-rng.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               194 drivers/char/hw_random/ks-sa-rng.c 	ks_sa_rng->reg_rng = devm_ioremap_resource(dev, mem);
mem                57 drivers/char/hw_random/mxc-rnga.c 	void __iomem *mem;
mem                68 drivers/char/hw_random/mxc-rnga.c 		int level = (__raw_readl(mxc_rng->mem + RNGA_STATUS) &
mem                84 drivers/char/hw_random/mxc-rnga.c 	*data = __raw_readl(mxc_rng->mem + RNGA_OUTPUT_FIFO);
mem                87 drivers/char/hw_random/mxc-rnga.c 	err = __raw_readl(mxc_rng->mem + RNGA_STATUS) & RNGA_STATUS_ERROR_INT;
mem                92 drivers/char/hw_random/mxc-rnga.c 		ctrl = __raw_readl(mxc_rng->mem + RNGA_CONTROL);
mem                94 drivers/char/hw_random/mxc-rnga.c 					mxc_rng->mem + RNGA_CONTROL);
mem               106 drivers/char/hw_random/mxc-rnga.c 	ctrl = __raw_readl(mxc_rng->mem + RNGA_CONTROL);
mem               107 drivers/char/hw_random/mxc-rnga.c 	__raw_writel(ctrl & ~RNGA_CONTROL_SLEEP, mxc_rng->mem + RNGA_CONTROL);
mem               110 drivers/char/hw_random/mxc-rnga.c 	osc = __raw_readl(mxc_rng->mem + RNGA_STATUS);
mem               117 drivers/char/hw_random/mxc-rnga.c 	ctrl = __raw_readl(mxc_rng->mem + RNGA_CONTROL);
mem               118 drivers/char/hw_random/mxc-rnga.c 	__raw_writel(ctrl | RNGA_CONTROL_GO, mxc_rng->mem + RNGA_CONTROL);
mem               128 drivers/char/hw_random/mxc-rnga.c 	ctrl = __raw_readl(mxc_rng->mem + RNGA_CONTROL);
mem               131 drivers/char/hw_random/mxc-rnga.c 	__raw_writel(ctrl & ~RNGA_CONTROL_GO, mxc_rng->mem + RNGA_CONTROL);
mem               160 drivers/char/hw_random/mxc-rnga.c 	mxc_rng->mem = devm_platform_ioremap_resource(pdev, 0);
mem               161 drivers/char/hw_random/mxc-rnga.c 	if (IS_ERR(mxc_rng->mem)) {
mem               162 drivers/char/hw_random/mxc-rnga.c 		err = PTR_ERR(mxc_rng->mem);
mem               175 drivers/clk/axs10x/i2s_pll_clock.c 	struct resource *mem;
mem               181 drivers/clk/axs10x/i2s_pll_clock.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               182 drivers/clk/axs10x/i2s_pll_clock.c 	pll_clk->base = devm_ioremap_resource(dev, mem);
mem               224 drivers/clk/axs10x/pll_clock.c 	struct resource *mem;
mem               232 drivers/clk/axs10x/pll_clock.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               233 drivers/clk/axs10x/pll_clock.c 	pll_clk->base = devm_ioremap_resource(dev, mem);
mem               237 drivers/clk/axs10x/pll_clock.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               238 drivers/clk/axs10x/pll_clock.c 	pll_clk->lock = devm_ioremap_resource(dev, mem);
mem               415 drivers/clk/clk-axi-clkgen.c 	struct resource *mem;
mem               430 drivers/clk/clk-axi-clkgen.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               431 drivers/clk/clk-axi-clkgen.c 	axi_clkgen->base = devm_ioremap_resource(&pdev->dev, mem);
mem               302 drivers/clk/clk-hsdk-pll.c 	struct resource *mem;
mem               313 drivers/clk/clk-hsdk-pll.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               314 drivers/clk/clk-hsdk-pll.c 	pll_clk->regs = devm_ioremap_resource(dev, mem);
mem               241 drivers/clk/samsung/clk-s3c2410-dclk.c 	struct resource *mem;
mem               260 drivers/clk/samsung/clk-s3c2410-dclk.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               261 drivers/clk/samsung/clk-s3c2410-dclk.c 	s3c24xx_dclk->base = devm_ioremap_resource(&pdev->dev, mem);
mem               142 drivers/clk/sirf/clk-atlas6.c 	clk_register_clkdev(atlas6_clks[mem],  NULL, "mem");
mem               143 drivers/clk/sirf/clk-atlas6.c 	clk_register_clkdev(atlas6_clks[mem],  NULL, "osc");
mem               141 drivers/clk/sirf/clk-prima2.c 	clk_register_clkdev(prima2_clks[mem],  NULL, "mem");
mem               142 drivers/clk/sirf/clk-prima2.c 	clk_register_clkdev(prima2_clks[mem],  NULL, "osc");
mem              1882 drivers/clk/tegra/clk-dfll.c 	struct resource *mem;
mem              1932 drivers/clk/tegra/clk-dfll.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1933 drivers/clk/tegra/clk-dfll.c 	if (!mem) {
mem              1938 drivers/clk/tegra/clk-dfll.c 	td->base = devm_ioremap(td->dev, mem->start, resource_size(mem));
mem              1944 drivers/clk/tegra/clk-dfll.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem              1945 drivers/clk/tegra/clk-dfll.c 	if (!mem) {
mem              1950 drivers/clk/tegra/clk-dfll.c 	td->i2c_base = devm_ioremap(td->dev, mem->start, resource_size(mem));
mem              1956 drivers/clk/tegra/clk-dfll.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 2);
mem              1957 drivers/clk/tegra/clk-dfll.c 	if (!mem) {
mem              1962 drivers/clk/tegra/clk-dfll.c 	td->i2c_controller_base = devm_ioremap(td->dev, mem->start,
mem              1963 drivers/clk/tegra/clk-dfll.c 					       resource_size(mem));
mem              1970 drivers/clk/tegra/clk-dfll.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 3);
mem              1971 drivers/clk/tegra/clk-dfll.c 	if (!mem) {
mem              1976 drivers/clk/tegra/clk-dfll.c 	td->lut_base = devm_ioremap(td->dev, mem->start, resource_size(mem));
mem                43 drivers/clk/ti/clk.c 	void __iomem *mem;
mem                57 drivers/clk/ti/clk.c 		writel_relaxed(val, io->mem + reg->offset);
mem                79 drivers/clk/ti/clk.c 		_clk_rmw(val, mask, io->mem + reg->offset);
mem                93 drivers/clk/ti/clk.c 		val = readl_relaxed(io->mem + reg->offset);
mem               321 drivers/clk/ti/clk.c 				   struct regmap *syscon, void __iomem *mem)
mem               341 drivers/clk/ti/clk.c 	io->mem = mem;
mem               355 drivers/clk/ti/clk.c void __init omap2_clk_legacy_provider_init(int index, void __iomem *mem)
mem               364 drivers/clk/ti/clk.c 	io->mem = mem;
mem               896 drivers/clocksource/sh_cmt.c 	struct resource *mem;
mem               898 drivers/clocksource/sh_cmt.c 	mem = platform_get_resource(cmt->pdev, IORESOURCE_MEM, 0);
mem               899 drivers/clocksource/sh_cmt.c 	if (!mem) {
mem               904 drivers/clocksource/sh_cmt.c 	cmt->mapbase = ioremap_nocache(mem->start, resource_size(mem));
mem               783 drivers/clocksource/timer-ti-dm.c 	struct resource *mem, *irq;
mem               805 drivers/clocksource/timer-ti-dm.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               806 drivers/clocksource/timer-ti-dm.c 	if (unlikely(!mem)) {
mem               816 drivers/clocksource/timer-ti-dm.c 	timer->io_base = devm_ioremap_resource(dev, mem);
mem               655 drivers/crypto/ccp/psp-dev.c 	void *id_blob, *mem;
mem               670 drivers/crypto/ccp/psp-dev.c 	mem = kzalloc(data_size + user_size, GFP_KERNEL);
mem               671 drivers/crypto/ccp/psp-dev.c 	if (!mem)
mem               674 drivers/crypto/ccp/psp-dev.c 	data = mem;
mem               675 drivers/crypto/ccp/psp-dev.c 	id_blob = mem + data_size;
mem               686 drivers/crypto/ccp/psp-dev.c 	kfree(mem);
mem                73 drivers/crypto/exynos-rng.c 	void __iomem			*mem;
mem                89 drivers/crypto/exynos-rng.c 	return readl_relaxed(rng->mem + offset);
mem                94 drivers/crypto/exynos-rng.c 	writel_relaxed(val, rng->mem + offset);
mem               165 drivers/crypto/exynos-rng.c 	memcpy_fromio(dst, rng->mem + EXYNOS_RNG_OUT_BASE, *read);
mem               291 drivers/crypto/exynos-rng.c 	rng->mem = devm_platform_ioremap_resource(pdev, 0);
mem               292 drivers/crypto/exynos-rng.c 	if (IS_ERR(rng->mem))
mem               293 drivers/crypto/exynos-rng.c 		return PTR_ERR(rng->mem);
mem                47 drivers/dma/acpi-dma.c 	resource_size_t mem = 0, irq = 0;
mem                60 drivers/dma/acpi-dma.c 			mem = rentry->res->start;
mem                68 drivers/dma/acpi-dma.c 	if (mem == 0 && irq == 0)
mem                74 drivers/dma/acpi-dma.c 	if (si->mmio_base_low != lower_32_bits(mem) ||
mem                75 drivers/dma/acpi-dma.c 	    si->mmio_base_high != upper_32_bits(mem) ||
mem              1108 drivers/dma/at_hdmac.c 			u32		mem;
mem              1114 drivers/dma/at_hdmac.c 			mem = sg_dma_address(sg);
mem              1122 drivers/dma/at_hdmac.c 			if (unlikely(mem & 3 || len & 3))
mem              1125 drivers/dma/at_hdmac.c 			desc->lli.saddr = mem;
mem              1149 drivers/dma/at_hdmac.c 			u32		mem;
mem              1155 drivers/dma/at_hdmac.c 			mem = sg_dma_address(sg);
mem              1163 drivers/dma/at_hdmac.c 			if (unlikely(mem & 3 || len & 3))
mem              1167 drivers/dma/at_hdmac.c 			desc->lli.daddr = mem;
mem               667 drivers/dma/at_xdmac.c 		u32			len, mem, dwidth, fixed_dwidth;
mem               670 drivers/dma/at_xdmac.c 		mem = sg_dma_address(sg);
mem               676 drivers/dma/at_xdmac.c 			 __func__, i, len, mem);
mem               689 drivers/dma/at_xdmac.c 			desc->lld.mbr_da = mem;
mem               691 drivers/dma/at_xdmac.c 			desc->lld.mbr_sa = mem;
mem               824 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	struct resource *mem;
mem               850 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               851 drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c 	chip->regs = devm_ioremap_resource(chip->dev, mem);
mem               665 drivers/dma/dw/core.c 			u32		len, mem;
mem               668 drivers/dma/dw/core.c 			mem = sg_dma_address(sg);
mem               671 drivers/dma/dw/core.c 			mem_width = __ffs(data_width | mem | len);
mem               680 drivers/dma/dw/core.c 			lli_write(desc, sar, mem);
mem               694 drivers/dma/dw/core.c 			mem += dlen;
mem               715 drivers/dma/dw/core.c 			u32		len, mem;
mem               718 drivers/dma/dw/core.c 			mem = sg_dma_address(sg);
mem               729 drivers/dma/dw/core.c 			lli_write(desc, dar, mem);
mem               731 drivers/dma/dw/core.c 			mem_width = __ffs(data_width | mem | dlen);
mem               743 drivers/dma/dw/core.c 			mem += dlen;
mem               632 drivers/dma/idma64.c 	struct resource *mem;
mem               643 drivers/dma/idma64.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               644 drivers/dma/idma64.c 	chip->regs = devm_ioremap_resource(dev, mem);
mem                85 drivers/dma/sh/rcar-dmac.c 		struct rcar_dmac_hw_desc *mem;
mem               703 drivers/dma/sh/rcar-dmac.c 	if (desc->hwdescs.mem) {
mem               705 drivers/dma/sh/rcar-dmac.c 				  desc->hwdescs.mem, desc->hwdescs.dma);
mem               706 drivers/dma/sh/rcar-dmac.c 		desc->hwdescs.mem = NULL;
mem               713 drivers/dma/sh/rcar-dmac.c 	desc->hwdescs.mem = dma_alloc_coherent(chan->chan.device->dev, size,
mem               715 drivers/dma/sh/rcar-dmac.c 	if (!desc->hwdescs.mem)
mem               729 drivers/dma/sh/rcar-dmac.c 	hwdesc = desc->hwdescs.mem;
mem              1816 drivers/dma/sh/rcar-dmac.c 	struct resource *mem;
mem              1851 drivers/dma/sh/rcar-dmac.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1852 drivers/dma/sh/rcar-dmac.c 	dmac->iomem = devm_ioremap_resource(&pdev->dev, mem);
mem               769 drivers/dma/sh/usb-dmac.c 	struct resource *mem;
mem               790 drivers/dma/sh/usb-dmac.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               791 drivers/dma/sh/usb-dmac.c 	dmac->iomem = devm_ioremap_resource(&pdev->dev, mem);
mem               185 drivers/dma/st_fdma.h 	readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
mem               190 drivers/dma/st_fdma.h 	writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
mem               197 drivers/dma/st_fdma.h 	writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
mem               207 drivers/dma/st_fdma.h 	readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
mem               212 drivers/dma/st_fdma.h 	writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
mem              1065 drivers/dma/tegra20-apb-dma.c 		u32 len, mem;
mem              1067 drivers/dma/tegra20-apb-dma.c 		mem = sg_dma_address(sg);
mem              1070 drivers/dma/tegra20-apb-dma.c 		if ((len & 3) || (mem & 3) ||
mem              1089 drivers/dma/tegra20-apb-dma.c 		sg_req->ch_regs.ahb_ptr = mem;
mem              1134 drivers/dma/tegra20-apb-dma.c 	dma_addr_t mem = buf_addr;
mem              1223 drivers/dma/tegra20-apb-dma.c 		sg_req->ch_regs.ahb_ptr = mem;
mem              1235 drivers/dma/tegra20-apb-dma.c 		mem += len;
mem               278 drivers/dma/ti/cppi41.c static void cppi_writel(u32 val, void *__iomem *mem)
mem               280 drivers/dma/ti/cppi41.c 	__raw_writel(val, mem);
mem               283 drivers/dma/ti/cppi41.c static u32 cppi_readl(void *__iomem *mem)
mem               285 drivers/dma/ti/cppi41.c 	return __raw_readl(mem);
mem              1042 drivers/dma/ti/cppi41.c 	struct resource *mem;
mem              1075 drivers/dma/ti/cppi41.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, index);
mem              1076 drivers/dma/ti/cppi41.c 	cdd->ctrl_mem = devm_ioremap_resource(dev, mem);
mem              1080 drivers/dma/ti/cppi41.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, index + 1);
mem              1081 drivers/dma/ti/cppi41.c 	cdd->sched_mem = devm_ioremap_resource(dev, mem);
mem              1085 drivers/dma/ti/cppi41.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, index + 2);
mem              1086 drivers/dma/ti/cppi41.c 	cdd->qmgr_mem = devm_ioremap_resource(dev, mem);
mem              2257 drivers/dma/ti/edma.c 	struct resource		*mem;
mem              2303 drivers/dma/ti/edma.c 	mem = platform_get_resource_byname(pdev, IORESOURCE_MEM, "edma3_cc");
mem              2304 drivers/dma/ti/edma.c 	if (!mem) {
mem              2306 drivers/dma/ti/edma.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              2307 drivers/dma/ti/edma.c 		if (!mem) {
mem              2312 drivers/dma/ti/edma.c 	ecc->base = devm_ioremap_resource(dev, mem);
mem               829 drivers/dma/txx9dmac.c 		dma_addr_t mem;
mem               838 drivers/dma/txx9dmac.c 		mem = sg_dma_address(sg);
mem               842 drivers/dma/txx9dmac.c 				desc->hwdesc.SAR = mem;
mem               846 drivers/dma/txx9dmac.c 				desc->hwdesc.DAR = mem;
mem               851 drivers/dma/txx9dmac.c 				desc->hwdesc32.SAR = mem;
mem               855 drivers/dma/txx9dmac.c 				desc->hwdesc32.DAR = mem;
mem               207 drivers/edac/thunderx_edac.c 	struct page *mem;
mem               344 drivers/edac/thunderx_edac.c 	addr = (uintptr_t)page_address(lmc->mem);
mem               345 drivers/edac/thunderx_edac.c 	phys = (uintptr_t)page_to_phys(lmc->mem);
mem               418 drivers/edac/thunderx_edac.c 	lmc->mem = alloc_pages_node(lmc->node, GFP_KERNEL, 0);
mem               419 drivers/edac/thunderx_edac.c 	if (!lmc->mem)
mem               424 drivers/edac/thunderx_edac.c 		__free_pages(lmc->mem, 0);
mem               428 drivers/edac/thunderx_edac.c 	addr = page_address(lmc->mem);
mem               444 drivers/edac/thunderx_edac.c 	__free_pages(lmc->mem, 0);
mem               198 drivers/firmware/arm_scmi/driver.c 				struct scmi_shared_mem __iomem *mem)
mem               200 drivers/firmware/arm_scmi/driver.c 	xfer->hdr.status = ioread32(mem->msg_payload);
mem               202 drivers/firmware/arm_scmi/driver.c 	xfer->rx.len = min_t(size_t, xfer->rx.len, ioread32(&mem->length) - 8);
mem               205 drivers/firmware/arm_scmi/driver.c 	memcpy_fromio(xfer->rx.buf, mem->msg_payload + 4, xfer->rx.len);
mem               248 drivers/firmware/arm_scmi/driver.c 	struct scmi_shared_mem __iomem *mem = cinfo->payload;
mem               256 drivers/firmware/arm_scmi/driver.c 	spin_until_cond(ioread32(&mem->channel_status) &
mem               259 drivers/firmware/arm_scmi/driver.c 	iowrite32(0x0, &mem->channel_status);
mem               261 drivers/firmware/arm_scmi/driver.c 		  &mem->flags);
mem               262 drivers/firmware/arm_scmi/driver.c 	iowrite32(sizeof(mem->msg_header) + t->tx.len, &mem->length);
mem               263 drivers/firmware/arm_scmi/driver.c 	iowrite32(pack_scmi_header(&t->hdr), &mem->msg_header);
mem               265 drivers/firmware/arm_scmi/driver.c 		memcpy_toio(mem->msg_payload, t->tx.buf, t->tx.len);
mem               356 drivers/firmware/arm_scmi/driver.c 	struct scmi_shared_mem __iomem *mem = cinfo->payload;
mem               358 drivers/firmware/arm_scmi/driver.c 	msg_hdr = ioread32(&mem->msg_header);
mem               375 drivers/firmware/arm_scmi/driver.c 	scmi_fetch_response(xfer, mem);
mem               399 drivers/firmware/arm_scmi/driver.c 	struct scmi_shared_mem __iomem *mem = cinfo->payload;
mem               400 drivers/firmware/arm_scmi/driver.c 	u16 xfer_id = MSG_XTRACT_TOKEN(ioread32(&mem->msg_header));
mem               405 drivers/firmware/arm_scmi/driver.c 	return ioread32(&mem->channel_status) &
mem               385 drivers/firmware/arm_scpi.c 			struct legacy_scpi_shared_mem __iomem *mem =
mem               391 drivers/firmware/arm_scpi.c 			match->status = ioread32(&mem->status);
mem               392 drivers/firmware/arm_scpi.c 			memcpy_fromio(match->rx_buf, mem->payload, len);
mem               394 drivers/firmware/arm_scpi.c 			struct scpi_shared_mem __iomem *mem = ch->rx_payload;
mem               398 drivers/firmware/arm_scpi.c 			match->status = ioread32(&mem->status);
mem               399 drivers/firmware/arm_scpi.c 			memcpy_fromio(match->rx_buf, mem->payload, len);
mem               412 drivers/firmware/arm_scpi.c 	struct scpi_shared_mem __iomem *mem = ch->rx_payload;
mem               416 drivers/firmware/arm_scpi.c 		cmd = ioread32(&mem->command);
mem               426 drivers/firmware/arm_scpi.c 	struct scpi_shared_mem __iomem *mem = ch->tx_payload;
mem               432 drivers/firmware/arm_scpi.c 			memcpy_toio(mem->payload, t->tx_buf, t->tx_len);
mem               445 drivers/firmware/arm_scpi.c 		iowrite32(t->cmd, &mem->command);
mem               216 drivers/firmware/efi/cper.c static int cper_mem_err_location(struct cper_mem_err_compact *mem, char *msg)
mem               225 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_NODE)
mem               226 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "node: %d ", mem->node);
mem               227 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_CARD)
mem               228 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "card: %d ", mem->card);
mem               229 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_MODULE)
mem               230 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "module: %d ", mem->module);
mem               231 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_RANK_NUMBER)
mem               232 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "rank: %d ", mem->rank);
mem               233 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_BANK)
mem               234 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "bank: %d ", mem->bank);
mem               235 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_DEVICE)
mem               236 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "device: %d ", mem->device);
mem               237 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_ROW)
mem               238 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "row: %d ", mem->row);
mem               239 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_COLUMN)
mem               240 drivers/firmware/efi/cper.c 		n += scnprintf(msg + n, len - n, "column: %d ", mem->column);
mem               241 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_BIT_POSITION)
mem               243 drivers/firmware/efi/cper.c 			       mem->bit_pos);
mem               244 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_REQUESTOR_ID)
mem               246 drivers/firmware/efi/cper.c 			       mem->requestor_id);
mem               247 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_RESPONDER_ID)
mem               249 drivers/firmware/efi/cper.c 			       mem->responder_id);
mem               250 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_TARGET_ID)
mem               252 drivers/firmware/efi/cper.c 			  mem->target_id);
mem               258 drivers/firmware/efi/cper.c static int cper_dimm_err_location(struct cper_mem_err_compact *mem, char *msg)
mem               263 drivers/firmware/efi/cper.c 	if (!msg || !(mem->validation_bits & CPER_MEM_VALID_MODULE_HANDLE))
mem               268 drivers/firmware/efi/cper.c 	dmi_memdev_name(mem->mem_dev_handle, &bank, &device);
mem               274 drivers/firmware/efi/cper.c 			     mem->mem_dev_handle);
mem               280 drivers/firmware/efi/cper.c void cper_mem_err_pack(const struct cper_sec_mem_err *mem,
mem               283 drivers/firmware/efi/cper.c 	cmem->validation_bits = mem->validation_bits;
mem               284 drivers/firmware/efi/cper.c 	cmem->node = mem->node;
mem               285 drivers/firmware/efi/cper.c 	cmem->card = mem->card;
mem               286 drivers/firmware/efi/cper.c 	cmem->module = mem->module;
mem               287 drivers/firmware/efi/cper.c 	cmem->bank = mem->bank;
mem               288 drivers/firmware/efi/cper.c 	cmem->device = mem->device;
mem               289 drivers/firmware/efi/cper.c 	cmem->row = mem->row;
mem               290 drivers/firmware/efi/cper.c 	cmem->column = mem->column;
mem               291 drivers/firmware/efi/cper.c 	cmem->bit_pos = mem->bit_pos;
mem               292 drivers/firmware/efi/cper.c 	cmem->requestor_id = mem->requestor_id;
mem               293 drivers/firmware/efi/cper.c 	cmem->responder_id = mem->responder_id;
mem               294 drivers/firmware/efi/cper.c 	cmem->target_id = mem->target_id;
mem               295 drivers/firmware/efi/cper.c 	cmem->rank = mem->rank;
mem               296 drivers/firmware/efi/cper.c 	cmem->mem_array_handle = mem->mem_array_handle;
mem               297 drivers/firmware/efi/cper.c 	cmem->mem_dev_handle = mem->mem_dev_handle;
mem               314 drivers/firmware/efi/cper.c static void cper_print_mem(const char *pfx, const struct cper_sec_mem_err *mem,
mem               321 drivers/firmware/efi/cper.c 	    (mem->validation_bits & ~(CPER_MEM_VALID_RANK_NUMBER - 1))) {
mem               325 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_ERROR_STATUS)
mem               326 drivers/firmware/efi/cper.c 		printk("%s""error_status: 0x%016llx\n", pfx, mem->error_status);
mem               327 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_PA)
mem               329 drivers/firmware/efi/cper.c 		       pfx, mem->physical_addr);
mem               330 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_PA_MASK)
mem               332 drivers/firmware/efi/cper.c 		       pfx, mem->physical_addr_mask);
mem               333 drivers/firmware/efi/cper.c 	cper_mem_err_pack(mem, &cmem);
mem               336 drivers/firmware/efi/cper.c 	if (mem->validation_bits & CPER_MEM_VALID_ERROR_TYPE) {
mem               337 drivers/firmware/efi/cper.c 		u8 etype = mem->error_type;
mem               258 drivers/firmware/efi/memmap.c 			      struct efi_mem_range *mem)
mem               266 drivers/firmware/efi/memmap.c 	m_start = mem->range.start;
mem               267 drivers/firmware/efi/memmap.c 	m_end = mem->range.end;
mem               268 drivers/firmware/efi/memmap.c 	m_attr = mem->attribute;
mem                35 drivers/gpio/gpio-menz127.c 	struct resource *mem;
mem               142 drivers/gpio/gpio-menz127.c 	men_z127_gpio->mem = mcb_request_mem(mdev, dev_name(dev));
mem               143 drivers/gpio/gpio-menz127.c 	if (IS_ERR(men_z127_gpio->mem)) {
mem               145 drivers/gpio/gpio-menz127.c 		return PTR_ERR(men_z127_gpio->mem);
mem               148 drivers/gpio/gpio-menz127.c 	men_z127_gpio->reg_base = ioremap(men_z127_gpio->mem->start,
mem               149 drivers/gpio/gpio-menz127.c 					  resource_size(men_z127_gpio->mem));
mem               181 drivers/gpio/gpio-menz127.c 	mcb_release_mem(men_z127_gpio->mem);
mem               191 drivers/gpio/gpio-menz127.c 	mcb_release_mem(men_z127_gpio->mem);
mem               707 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem, struct mm_struct *mm)
mem               133 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem, struct mm_struct *mm);
mem               161 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem);
mem               162 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h int amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem);
mem               212 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		void *vm, struct kgd_mem **mem,
mem               215 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		struct kgd_dev *kgd, struct kgd_mem *mem);
mem               217 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		struct kgd_dev *kgd, struct kgd_mem *mem, void *vm);
mem               219 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		struct kgd_dev *kgd, struct kgd_mem *mem, void *vm);
mem               221 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		struct kgd_dev *kgd, struct kgd_mem *mem, bool intr);
mem               223 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 		struct kgd_mem *mem, void **kptr, uint64_t *size);
mem               233 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h 				      struct kgd_mem **mem, uint64_t *size,
mem                84 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_mem *mem)
mem                88 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(entry, &mem->bo_va_list, bo_list)
mem               102 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	uint64_t mem;
mem               105 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mem = si.totalram - si.totalhigh;
mem               106 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mem *= si.mem_unit;
mem               109 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfd_mem_limit.max_system_mem_limit = (mem >> 1) + (mem >> 2);
mem               110 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfd_mem_limit.max_ttm_mem_limit = (mem >> 1) - (mem >> 3);
mem               371 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int add_bo_to_vm(struct amdgpu_device *adev, struct kgd_mem *mem,
mem               377 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem               378 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	uint64_t va = mem->va;
mem               379 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct list_head *list_bo_va = &mem->bo_va_list;
mem               380 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = bo->tbo.mem.size;
mem               408 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 							 mem->mapping_flags);
mem               443 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static void add_kgd_mem_to_kfd_bo_list(struct kgd_mem *mem,
mem               447 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct ttm_validate_buffer *entry = &mem->validate_list;
mem               448 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem               461 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static void remove_kgd_mem_from_kfd_bo_list(struct kgd_mem *mem,
mem               466 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo_list_entry = &mem->validate_list;
mem               484 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int init_user_pages(struct kgd_mem *mem, struct mm_struct *mm,
mem               487 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdkfd_process_info *process_info = mem->process_info;
mem               488 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem               518 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_bo_placement_from_domain(bo, mem->domain);
mem               561 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int reserve_bo_and_vm(struct kgd_mem *mem,
mem               565 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem               572 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->sync = &mem->sync;
mem               611 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int reserve_bo_and_cond_vms(struct kgd_mem *mem,
mem               615 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem               623 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->sync = &mem->sync;
mem               628 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(entry, &mem->bo_va_list, bo_list) {
mem               650 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(entry, &mem->bo_va_list, bo_list) {
mem              1072 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		void *vm, struct kgd_mem **mem,
mem              1122 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	*mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL);
mem              1123 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!*mem) {
mem              1127 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&(*mem)->bo_va_list);
mem              1128 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_init(&(*mem)->lock);
mem              1129 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->aql_queue = !!(flags & ALLOC_MEM_FLAGS_AQL_QUEUE_MEM);
mem              1135 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if ((*mem)->aql_queue)
mem              1156 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->mapping_flags = mapping_flags;
mem              1158 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_create(&(*mem)->sync);
mem              1186 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo->kfd_bo = *mem;
mem              1187 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = bo;
mem              1191 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->va = va;
mem              1192 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->domain = domain;
mem              1193 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->mapped_to_gpu_memory = 0;
mem              1194 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->process_info = avm->process_info;
mem              1195 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, user_addr);
mem              1198 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = init_user_pages(*mem, current->mm, user_addr);
mem              1209 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	remove_kgd_mem_from_kfd_bo_list(*mem, avm->process_info);
mem              1216 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_destroy(&(*mem)->lock);
mem              1217 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfree(*mem);
mem              1227 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_dev *kgd, struct kgd_mem *mem)
mem              1229 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdkfd_process_info *process_info = mem->process_info;
mem              1230 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = mem->bo->tbo.mem.size;
mem              1236 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->lock);
mem              1238 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->mapped_to_gpu_memory > 0) {
mem              1240 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				mem->va, bo_size);
mem              1241 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mutex_unlock(&mem->lock);
mem              1245 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->lock);
mem              1251 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_mn_unregister(mem->bo);
mem              1254 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo_list_entry = &mem->validate_list;
mem              1259 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_cond_vms(mem, NULL, BO_VM_ALL, &ctx);
mem              1267 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_amdkfd_remove_eviction_fence(mem->bo,
mem              1269 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	pr_debug("Release VA 0x%llx - 0x%llx\n", mem->va,
mem              1270 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->va + bo_size * (1 + mem->aql_queue));
mem              1273 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry_safe(entry, tmp, &mem->bo_va_list, bo_list)
mem              1280 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_free(&mem->sync);
mem              1285 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->bo->tbo.sg) {
mem              1286 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg_free_table(mem->bo->tbo.sg);
mem              1287 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(mem->bo->tbo.sg);
mem              1291 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	drm_gem_object_put_unlocked(&mem->bo->tbo.base);
mem              1292 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_destroy(&mem->lock);
mem              1293 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfree(mem);
mem              1299 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_dev *kgd, struct kgd_mem *mem, void *vm)
mem              1313 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo = mem->bo;
mem              1323 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->process_info->lock);
mem              1331 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		is_invalid_userptr = atomic_read(&mem->invalid);
mem              1335 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->lock);
mem              1337 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	domain = mem->domain;
mem              1338 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bo_size = bo->tbo.mem.size;
mem              1341 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			mem->va,
mem              1342 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			mem->va + bo_size * (1 + mem->aql_queue),
mem              1345 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_vm(mem, vm, &ctx);
mem              1355 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    bo->tbo.mem.mem_type == TTM_PL_SYSTEM)
mem              1358 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (check_if_add_bo_to_vm(avm, mem)) {
mem              1359 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = add_bo_to_vm(adev, mem, avm, false,
mem              1363 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (mem->aql_queue) {
mem              1364 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = add_bo_to_vm(adev, mem, avm,
mem              1375 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->mapped_to_gpu_memory == 0 &&
mem              1388 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(entry, &mem->bo_va_list, bo_list) {
mem              1408 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			mem->mapped_to_gpu_memory++;
mem              1410 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 					mem->mapped_to_gpu_memory);
mem              1431 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->process_info->lock);
mem              1432 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->lock);
mem              1437 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_dev *kgd, struct kgd_mem *mem, void *vm)
mem              1442 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unsigned long bo_size = mem->bo->tbo.mem.size;
mem              1447 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->lock);
mem              1449 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_cond_vms(mem, vm, BO_VM_MAPPED, &ctx);
mem              1463 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->va,
mem              1464 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->va + bo_size * (1 + mem->aql_queue),
mem              1467 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(entry, &mem->bo_va_list, bo_list) {
mem              1479 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 						mem->va);
mem              1483 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			mem->mapped_to_gpu_memory--;
mem              1485 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 					mem->mapped_to_gpu_memory);
mem              1492 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->mapped_to_gpu_memory == 0 &&
mem              1493 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	    !amdgpu_ttm_tt_get_usermm(mem->bo->tbo.ttm) && !mem->bo->pin_count)
mem              1494 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_amdkfd_remove_eviction_fence(mem->bo,
mem              1500 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->lock);
mem              1505 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_dev *kgd, struct kgd_mem *mem, bool intr)
mem              1512 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->lock);
mem              1513 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_clone(&mem->sync, &sync);
mem              1514 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->lock);
mem              1522 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct kgd_mem *mem, void **kptr, uint64_t *size)
mem              1525 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdgpu_bo *bo = mem->bo;
mem              1535 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&mem->process_info->lock);
mem              1556 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo, mem->process_info->eviction_fence);
mem              1557 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_del_init(&mem->validate_list.head);
mem              1564 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->process_info->lock);
mem              1572 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&mem->process_info->lock);
mem              1578 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 					      struct kfd_vm_fault_info *mem)
mem              1584 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		*mem = *adev->gmc.vm_fault_info;
mem              1594 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				      struct kgd_mem **mem, uint64_t *size,
mem              1617 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	*mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL);
mem              1618 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!*mem)
mem              1627 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&(*mem)->bo_va_list);
mem              1628 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_init(&(*mem)->lock);
mem              1629 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->mapping_flags =
mem              1634 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = bo;
mem              1635 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->va = va;
mem              1636 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) ?
mem              1638 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->mapped_to_gpu_memory = 0;
mem              1639 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->process_info = avm->process_info;
mem              1640 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	add_kgd_mem_to_kfd_bo_list(*mem, avm->process_info, false);
mem              1641 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_create(&(*mem)->sync);
mem              1657 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c int amdgpu_amdkfd_evict_userptr(struct kgd_mem *mem,
mem              1660 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct amdkfd_process_info *process_info = mem->process_info;
mem              1664 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	invalid = atomic_inc_return(&mem->invalid);
mem              1687 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct kgd_mem *mem, *tmp_mem;
mem              1695 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry_safe(mem, tmp_mem,
mem              1698 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (!atomic_read(&mem->invalid))
mem              1701 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
mem              1714 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_move_tail(&mem->validate_list.head,
mem              1722 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(mem, &process_info->userptr_inval_list,
mem              1724 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		invalid = atomic_read(&mem->invalid);
mem              1731 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
mem              1748 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (atomic_cmpxchg(&mem->invalid, invalid, 0) != invalid)
mem              1769 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct kgd_mem *mem, *tmp_mem;
mem              1793 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(mem, &process_info->userptr_inval_list,
mem              1795 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_add_tail(&mem->resv_list.head, &resv_list);
mem              1796 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.bo = mem->validate_list.bo;
mem              1797 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.num_shared = mem->validate_list.num_shared;
mem              1814 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry_safe(mem, tmp_mem,
mem              1819 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo = mem->bo;
mem              1823 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			amdgpu_bo_placement_from_domain(bo, mem->domain);
mem              1831 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_move_tail(&mem->validate_list.head,
mem              1840 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_for_each_entry(bo_va_entry, &mem->bo_va_list, bo_list) {
mem              1850 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				atomic_inc(&mem->invalid);
mem              1965 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct kgd_mem *mem;
mem              1991 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(mem, &process_info->kfd_bo_list,
mem              1994 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_add_tail(&mem->resv_list.head, &ctx.list);
mem              1995 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.bo = mem->validate_list.bo;
mem              1996 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		mem->resv_list.num_shared = mem->validate_list.num_shared;
mem              2020 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(mem, &process_info->kfd_bo_list,
mem              2023 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		struct amdgpu_bo *bo = mem->bo;
mem              2024 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		uint32_t domain = mem->domain;
mem              2037 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_for_each_entry(bo_va_entry, &mem->bo_va_list,
mem              2077 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_for_each_entry(mem, &process_info->kfd_bo_list,
mem              2079 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_bo_fence(mem->bo,
mem              2099 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c int amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem)
mem              2108 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	*mem = kzalloc(sizeof(struct kgd_mem), GFP_KERNEL);
mem              2109 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!*mem)
mem              2112 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_init(&(*mem)->lock);
mem              2113 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->bo = amdgpu_bo_ref(gws_bo);
mem              2114 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->domain = AMDGPU_GEM_DOMAIN_GWS;
mem              2115 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	(*mem)->process_info = process_info;
mem              2116 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	add_kgd_mem_to_kfd_bo_list(*mem, process_info, false);
mem              2117 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_create(&(*mem)->sync);
mem              2121 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_lock(&(*mem)->process_info->lock);
mem              2142 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&(*mem)->process_info->lock);
mem              2150 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_unlock(&(*mem)->process_info->lock);
mem              2151 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_sync_free(&(*mem)->sync);
mem              2152 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	remove_kgd_mem_from_kfd_bo_list(*mem, process_info);
mem              2154 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	mutex_destroy(&(*mem)->lock);
mem              2155 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfree(*mem);
mem              2156 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	*mem = NULL;
mem              2160 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c int amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem)
mem              2164 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct kgd_mem *kgd_mem = (struct kgd_mem *)mem;
mem              2184 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfree(mem);
mem               480 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		other = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
mem              3408 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 		if (shadow->tbo.mem.mem_type != TTM_PL_TT ||
mem              3409 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 		    shadow->tbo.mem.start == AMDGPU_BO_INVALID_OFFSET ||
mem              3410 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c 		    shadow->parent->tbo.mem.mem_type != TTM_PL_VRAM)
mem               695 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		info.alignment = robj->tbo.mem.page_alignment << PAGE_SHIFT;
mem               805 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
mem                47 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	switch (bo->tbo.mem.mem_type) {
mem                59 drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c 	*flags = amdgpu_ttm_tt_pde_flags(bo->tbo.ttm, &bo->tbo.mem);
mem               151 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c bool amdgpu_gtt_mgr_has_gart_addr(struct ttm_mem_reg *mem)
mem               153 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	struct amdgpu_gtt_node *node = mem->mm_node;
mem               171 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 				struct ttm_mem_reg *mem)
mem               175 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	struct amdgpu_gtt_node *node = mem->mm_node;
mem               180 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	if (amdgpu_gtt_mgr_has_gart_addr(mem))
mem               198 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	r = drm_mm_insert_node_in_range(&mgr->mm, &node->node, mem->num_pages,
mem               199 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 					mem->page_alignment, 0, fpfn, lpfn,
mem               204 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 		mem->start = node->node.start;
mem               222 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 			      struct ttm_mem_reg *mem)
mem               229 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	if ((&tbo->mem == mem || tbo->mem.mem_type != TTM_PL_TT) &&
mem               230 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	    atomic64_read(&mgr->available) < mem->num_pages) {
mem               234 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	atomic64_sub(mem->num_pages, &mgr->available);
mem               244 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	node->node.size = mem->num_pages;
mem               246 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	mem->mm_node = node;
mem               249 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 		r = amdgpu_gtt_mgr_alloc(man, tbo, place, mem);
mem               252 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 			mem->mm_node = NULL;
mem               257 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 		mem->start = node->node.start;
mem               262 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	atomic64_add(mem->num_pages, &mgr->available);
mem               278 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 			       struct ttm_mem_reg *mem)
mem               281 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	struct amdgpu_gtt_node *node = mem->mm_node;
mem               290 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	atomic64_add(mem->num_pages, &mgr->available);
mem               293 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c 	mem->mm_node = NULL;
mem               596 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		struct drm_amdgpu_memory_info mem;
mem               598 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		memset(&mem, 0, sizeof(mem));
mem               599 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.vram.total_heap_size = adev->gmc.real_vram_size;
mem               600 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.vram.usable_heap_size = adev->gmc.real_vram_size -
mem               602 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.vram.heap_usage =
mem               604 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.vram.max_allocation = mem.vram.usable_heap_size * 3 / 4;
mem               606 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.cpu_accessible_vram.total_heap_size =
mem               608 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.cpu_accessible_vram.usable_heap_size = adev->gmc.visible_vram_size -
mem               610 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.cpu_accessible_vram.heap_usage =
mem               612 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.cpu_accessible_vram.max_allocation =
mem               613 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 			mem.cpu_accessible_vram.usable_heap_size * 3 / 4;
mem               615 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.gtt.total_heap_size = adev->mman.bdev.man[TTM_PL_TT].size;
mem               616 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.gtt.total_heap_size *= PAGE_SIZE;
mem               617 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.gtt.usable_heap_size = mem.gtt.total_heap_size -
mem               619 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.gtt.heap_usage =
mem               621 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		mem.gtt.max_allocation = mem.gtt.usable_heap_size * 3 / 4;
mem               623 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 		return copy_to_user(out, &mem,
mem               624 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 				    min((size_t)size, sizeof(mem)))
mem               277 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 			struct kgd_mem *mem = bo->kfd_bo;
mem               281 drivers/gpu/drm/amd/amdgpu/amdgpu_mn.c 				amdgpu_amdkfd_evict_userptr(mem, amn->mm);
mem                66 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->tbo.mem.mem_type == TTM_PL_VRAM) {
mem                70 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	} else if (bo->tbo.mem.mem_type == TTM_PL_TT) {
mem               385 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	ttm_bo_mem_put(&(*bo_ptr)->tbo, &(*bo_ptr)->tbo.mem);
mem               387 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			     &(*bo_ptr)->tbo.mem, &ctx);
mem               582 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.mem_type == TTM_PL_VRAM &&
mem               583 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.start < adev->gmc.visible_vram_size >> PAGE_SHIFT)
mem               590 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	    bo->tbo.mem.placement & TTM_PL_FLAG_VRAM) {
mem               909 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		uint32_t mem_type = bo->tbo.mem.mem_type;
mem               952 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	domain = amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type);
mem              1261 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem              1304 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM || !bo->mem.mm_node ||
mem              1346 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
mem              1349 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	size = bo->mem.num_pages << PAGE_SHIFT;
mem              1350 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	offset = bo->mem.start << PAGE_SHIFT;
mem              1371 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	offset = bo->mem.start << PAGE_SHIFT;
mem              1373 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	if (bo->mem.mem_type == TTM_PL_VRAM &&
mem              1435 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_SYSTEM);
mem              1438 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.start == AMDGPU_BO_INVALID_OFFSET);
mem              1439 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	WARN_ON_ONCE(bo->tbo.mem.mem_type == TTM_PL_VRAM &&
mem               183 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / AMDGPU_GPU_PAGE_SIZE;
mem               204 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	struct drm_mm_node *node = bo->tbo.mem.mm_node;
mem               207 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	if (bo->tbo.mem.mem_type != TTM_PL_VRAM)
mem               210 drivers/gpu/drm/amd/amdgpu/amdgpu_object.h 	for (pages_left = bo->tbo.mem.num_pages; pages_left;
mem               131 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->type = bo->tbo.mem.mem_type;
mem                61 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			     struct ttm_mem_reg *mem, unsigned num_pages,
mem               169 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	switch (bo->mem.mem_type) {
mem               246 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               263 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				    struct ttm_mem_reg *mem)
mem               269 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		addr += bo->bdev->man[mem->mem_type].gpu_offset;
mem               282 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static struct drm_mm_node *amdgpu_find_mm_node(struct ttm_mem_reg *mem,
mem               285 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *mm_node = mem->mm_node;
mem               324 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_mm = amdgpu_find_mm_node(src->mem, &src->offset);
mem               325 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_node_start = amdgpu_mm_node_addr(src->bo, src_mm, src->mem) +
mem               330 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst_mm = amdgpu_find_mm_node(dst->mem, &dst->offset);
mem               331 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst_node_start = amdgpu_mm_node_addr(dst->bo, dst_mm, dst->mem) +
mem               355 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (src->mem->start == AMDGPU_BO_INVALID_OFFSET) {
mem               356 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			r = amdgpu_map_buffer(src->bo, src->mem,
mem               368 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (dst->mem->start == AMDGPU_BO_INVALID_OFFSET) {
mem               369 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			r = amdgpu_map_buffer(dst->bo, dst->mem,
mem               393 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 							     src->mem);
mem               403 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 							     dst->mem);
mem               437 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src.mem = old_mem;
mem               438 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dst.mem = new_mem;
mem               489 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               548 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               594 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			       struct ttm_mem_reg *mem)
mem               596 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *nodes = mem->mm_node;
mem               598 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (mem->mem_type == TTM_PL_SYSTEM ||
mem               599 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	    mem->mem_type == TTM_PL_TT)
mem               601 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (mem->mem_type != TTM_PL_VRAM)
mem               605 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (nodes->size != mem->num_pages)
mem               623 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               705 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static int amdgpu_ttm_io_mem_reserve(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               707 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               709 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *mm_node = mem->mm_node;
mem               711 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mem->bus.addr = NULL;
mem               712 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mem->bus.offset = 0;
mem               713 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               714 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mem->bus.base = 0;
mem               715 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mem->bus.is_iomem = false;
mem               718 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	switch (mem->mem_type) {
mem               725 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               727 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if ((mem->bus.offset + mem->bus.size) > adev->gmc.visible_vram_size)
mem               734 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		    (mm_node->size == mem->num_pages))
mem               735 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			mem->bus.addr = (u8 *)adev->mman.aper_base_kaddr +
mem               736 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 					mem->bus.offset;
mem               738 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		mem->bus.base = adev->gmc.aper_base;
mem               739 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		mem->bus.is_iomem = true;
mem               747 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c static void amdgpu_ttm_io_mem_free(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               757 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm = amdgpu_find_mm_node(&bo->mem, &offset);
mem               758 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return (bo->mem.bus.base >> PAGE_SHIFT) + mm->start +
mem              1105 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->mem.start != AMDGPU_BO_INVALID_OFFSET)
mem              1110 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->mem.start = addr >> PAGE_SHIFT;
mem              1114 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		tmp = bo->mem;
mem              1122 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		placements.flags = (bo->mem.placement & ~TTM_PL_MASK_MEM) |
mem              1140 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm_bo_mem_put(bo, &bo->mem);
mem              1141 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->mem = tmp;
mem              1144 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	bo->offset = (bo->mem.start << PAGE_SHIFT) +
mem              1145 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		bo->bdev->man[bo->mem.mem_type].gpu_offset;
mem              1165 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags = amdgpu_ttm_tt_pte_flags(adev, tbo->ttm, &tbo->mem);
mem              1426 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_mem_reg *mem)
mem              1430 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (mem && mem->mem_type != TTM_PL_SYSTEM)
mem              1433 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (mem && mem->mem_type == TTM_PL_TT) {
mem              1452 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				 struct ttm_mem_reg *mem)
mem              1454 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	uint64_t flags = amdgpu_ttm_tt_pde_flags(ttm, mem);
mem              1477 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	unsigned long num_pages = bo->mem.num_pages;
mem              1478 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *node = bo->mem.mm_node;
mem              1504 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	switch (bo->mem.mem_type) {
mem              1551 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
mem              1554 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	nodes = amdgpu_find_mm_node(&abo->tbo.mem, &offset);
mem              1898 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			     struct ttm_mem_reg *mem, unsigned num_pages,
mem              1943 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	flags = amdgpu_ttm_tt_pte_flags(adev, ttm, mem);
mem              2058 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (bo->tbo.mem.mem_type == TTM_PL_TT) {
mem              2065 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm_node = bo->tbo.mem.mm_node;
mem              2093 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	mm_node = bo->tbo.mem.mm_node;
mem              2099 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		dst_addr = amdgpu_mm_node_addr(&bo->tbo, mm_node, &bo->tbo.mem);
mem                65 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h 	struct ttm_mem_reg		*mem;
mem                72 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h bool amdgpu_gtt_mgr_has_gart_addr(struct ttm_mem_reg *mem);
mem               132 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h uint64_t amdgpu_ttm_tt_pde_flags(struct ttm_tt *ttm, struct ttm_mem_reg *mem);
mem               134 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.h 				 struct ttm_mem_reg *mem);
mem               315 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	    amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type))
mem               577 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (bo->mem.placement & TTM_PL_FLAG_NO_EVICT)
mem              1689 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct ttm_mem_reg *mem;
mem              1697 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		mem = NULL;
mem              1703 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		mem = &bo->tbo.mem;
mem              1704 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		nodes = mem->mm_node;
mem              1705 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		if (mem->mem_type == TTM_PL_TT) {
mem              1713 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		flags = amdgpu_ttm_tt_pte_flags(adev, bo->tbo.ttm, mem);
mem              1751 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		uint32_t mem_type = bo->tbo.mem.mem_type;
mem               218 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	struct ttm_mem_reg *mem = &bo->tbo.mem;
mem               219 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	struct drm_mm_node *nodes = mem->mm_node;
mem               220 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	unsigned pages = mem->num_pages;
mem               226 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	if (mem->start >= adev->gmc.visible_vram_size >> PAGE_SHIFT)
mem               244 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c static void amdgpu_vram_mgr_virt_start(struct ttm_mem_reg *mem,
mem               250 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	if (start > mem->num_pages)
mem               251 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		start -= mem->num_pages;
mem               254 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	mem->start = max(mem->start, start);
mem               270 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 			       struct ttm_mem_reg *mem)
mem               287 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	mem_bytes = (u64)mem->num_pages << PAGE_SHIFT;
mem               290 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		mem->mm_node = NULL;
mem               304 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		pages_per_node = max((uint32_t)pages_per_node, mem->page_alignment);
mem               305 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		num_nodes = DIV_ROUND_UP(mem->num_pages, pages_per_node);
mem               319 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	mem->start = 0;
mem               320 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	pages_left = mem->num_pages;
mem               334 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		amdgpu_vram_mgr_virt_start(mem, &nodes[i]);
mem               340 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		uint32_t alignment = mem->page_alignment;
mem               353 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 		amdgpu_vram_mgr_virt_start(mem, &nodes[i]);
mem               360 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	mem->mm_node = nodes;
mem               368 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	atomic64_sub(mem->num_pages << PAGE_SHIFT, &mgr->usage);
mem               385 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 				struct ttm_mem_reg *mem)
mem               389 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	struct drm_mm_node *nodes = mem->mm_node;
mem               391 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	unsigned pages = mem->num_pages;
mem               393 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	if (!mem->mm_node)
mem               409 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	kvfree(mem->mm_node);
mem               410 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c 	mem->mm_node = NULL;
mem              1078 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c 	memset(hpd, 0, adev->gfx.mec.hpd_eop_obj->tbo.mem.size);
mem              1745 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c 	memset(hpd, 0, adev->gfx.mec.hpd_eop_obj->tbo.mem.size);
mem              1011 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		void *mem, *kern_addr;
mem              1032 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		mem = kfd_process_device_translate_handle(pdd,
mem              1034 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		if (!mem) {
mem              1043 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 						mem, &kern_addr, &size);
mem              1252 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	void *mem;
mem              1295 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 		pdd->vm, (struct kgd_mem **) &mem, &offset,
mem              1301 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	idr_handle = kfd_process_device_create_obj_handle(pdd, mem);
mem              1323 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	amdgpu_amdkfd_gpuvm_free_memory_of_gpu(dev->kgd, (struct kgd_mem *)mem);
mem              1334 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	void *mem;
mem              1351 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	mem = kfd_process_device_translate_handle(
mem              1353 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	if (!mem) {
mem              1359 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 						(struct kgd_mem *)mem);
mem              1378 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	void *mem;
mem              1418 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	mem = kfd_process_device_translate_handle(pdd,
mem              1420 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	if (!mem) {
mem              1440 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 			peer->kgd, (struct kgd_mem *)mem, peer_pdd->vm);
mem              1451 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	err = amdgpu_amdkfd_gpuvm_sync_memory(dev->kgd, (struct kgd_mem *) mem, true);
mem              1488 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	void *mem;
mem              1527 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	mem = kfd_process_device_translate_handle(pdd,
mem              1529 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	if (!mem) {
mem              1547 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 			peer->kgd, (struct kgd_mem *)mem, peer_pdd->vm);
mem              1634 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	void *mem;
mem              1655 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 					      (struct kgd_mem **)&mem, &size,
mem              1660 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	idr_handle = kfd_process_device_create_obj_handle(pdd, mem);
mem              1673 drivers/gpu/drm/amd/amdkfd/kfd_chardev.c 	amdgpu_amdkfd_gpuvm_free_memory_of_gpu(dev->kgd, (struct kgd_mem *)mem);
mem               215 drivers/gpu/drm/amd/amdkfd/kfd_crat.c static int kfd_parse_subtype_mem(struct crat_subtype_memory *mem,
mem               226 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 			mem->proximity_domain);
mem               228 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 		if (mem->proximity_domain == dev->proximity_domain) {
mem               232 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 				if (mem->visibility_type == 0)
mem               237 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 					heap_type = mem->visibility_type;
mem               241 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 			if (mem->flags & CRAT_MEM_FLAGS_HOT_PLUGGABLE)
mem               243 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 			if (mem->flags & CRAT_MEM_FLAGS_NON_VOLATILE)
mem               247 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 				((uint64_t)mem->length_high << 32) +
mem               248 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 							mem->length_low;
mem               249 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 			width = mem->width;
mem               430 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 	struct crat_subtype_memory *mem;
mem               441 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 		mem = (struct crat_subtype_memory *)sub_type_hdr;
mem               442 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 		ret = kfd_parse_subtype_mem(mem, device_list);
mem               786 drivers/gpu/drm/amd/amdkfd/kfd_priv.h 					void *mem);
mem               161 drivers/gpu/drm/amd/amdkfd/kfd_process.c static void kfd_process_free_gpuvm(struct kgd_mem *mem,
mem               166 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu(dev->kgd, mem, pdd->vm);
mem               167 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	amdgpu_amdkfd_gpuvm_free_memory_of_gpu(dev->kgd, mem);
mem               181 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	struct kgd_mem *mem = NULL;
mem               186 drivers/gpu/drm/amd/amdkfd/kfd_process.c 						 pdd->vm, &mem, NULL, flags);
mem               190 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	err = amdgpu_amdkfd_gpuvm_map_memory_to_gpu(kdev->kgd, mem, pdd->vm);
mem               194 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	err = amdgpu_amdkfd_gpuvm_sync_memory(kdev->kgd, mem, true);
mem               205 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	handle = kfd_process_device_create_obj_handle(pdd, mem);
mem               214 drivers/gpu/drm/amd/amdkfd/kfd_process.c 				(struct kgd_mem *)mem, kptr, NULL);
mem               227 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	kfd_process_free_gpuvm(mem, pdd);
mem               231 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	amdgpu_amdkfd_gpuvm_free_memory_of_gpu(kdev->kgd, mem);
mem               382 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	void *mem;
mem               389 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	idr_for_each_entry(&pdd->alloc_idr, mem, id) {
mem               397 drivers/gpu/drm/amd/amdkfd/kfd_process.c 				peer_pdd->dev->kgd, mem, peer_pdd->vm);
mem               400 drivers/gpu/drm/amd/amdkfd/kfd_process.c 		amdgpu_amdkfd_gpuvm_free_memory_of_gpu(pdd->dev->kgd, mem);
mem               877 drivers/gpu/drm/amd/amdkfd/kfd_process.c 					void *mem)
mem               879 drivers/gpu/drm/amd/amdkfd/kfd_process.c 	return idr_alloc(&pdd->alloc_idr, mem, 0, 0, GFP_KERNEL);
mem                84 drivers/gpu/drm/amd/amdkfd/kfd_process_queue_manager.c 	struct kgd_mem *mem = NULL;
mem               113 drivers/gpu/drm/amd/amdkfd/kfd_process_queue_manager.c 			gws, &mem);
mem               120 drivers/gpu/drm/amd/amdkfd/kfd_process_queue_manager.c 	pqn->q->gws = mem;
mem               136 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem               144 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		mem = container_of(dev->mem_props.next,
mem               146 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		list_del(&mem->list);
mem               147 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		kfree(mem);
mem               302 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem               307 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	mem = container_of(attr, struct kfd_mem_properties, attr);
mem               308 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	sysfs_show_32bit_prop(buffer, "heap_type", mem->heap_type);
mem               309 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	sysfs_show_64bit_prop(buffer, "size_in_bytes", mem->size_in_bytes);
mem               310 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	sysfs_show_32bit_prop(buffer, "flags", mem->flags);
mem               311 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	sysfs_show_32bit_prop(buffer, "width", mem->width);
mem               312 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	ret = sysfs_show_32bit_prop(buffer, "mem_clk_max", mem->mem_clk_max);
mem               534 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem               562 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		list_for_each_entry(mem, &dev->mem_props, list)
mem               563 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 			if (mem->kobj) {
mem               564 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 				kfd_remove_sysfs_file(mem->kobj, &mem->attr);
mem               565 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 				mem->kobj = NULL;
mem               597 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem               657 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	list_for_each_entry(mem, &dev->mem_props, list) {
mem               658 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		mem->kobj = kzalloc(sizeof(struct kobject), GFP_KERNEL);
mem               659 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		if (!mem->kobj)
mem               661 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		ret = kobject_init_and_add(mem->kobj, &mem_type,
mem               666 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		mem->attr.name = "properties";
mem               667 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		mem->attr.mode = KFD_SYSFS_FILE_MODE;
mem               668 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		sysfs_attr_init(&mem->attr);
mem               669 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		ret = sysfs_create_file(mem->kobj, &mem->attr);
mem               887 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem               896 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		list_for_each_entry(mem, &kdev->mem_props, list) {
mem               898 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 				mem->width = mem_width;
mem               900 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 				mem->mem_clk_max = mem_clock;
mem              1134 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct kfd_mem_properties *mem;
mem              1148 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	list_for_each_entry(mem, &dev->mem_props, list)
mem              1149 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		mem->mem_clk_max = local_mem_info.mem_clk_max;
mem                66 drivers/gpu/drm/armada/armada_drv.c 	struct resource *mem = NULL;
mem                77 drivers/gpu/drm/armada/armada_drv.c 			mem = r;
mem                82 drivers/gpu/drm/armada/armada_drv.c 	if (!mem)
mem                85 drivers/gpu/drm/armada/armada_drv.c 	if (!devm_request_mem_region(dev, mem->start, resource_size(mem),
mem               136 drivers/gpu/drm/armada/armada_drv.c 	drm_mm_init(&priv->linear, mem->start, resource_size(mem));
mem               113 drivers/gpu/drm/bochs/bochs_hw.c 	unsigned long addr, size, mem, ioaddr, iosize;
mem               140 drivers/gpu/drm/bochs/bochs_hw.c 	mem = bochs_dispi_read(bochs, VBE_DISPI_INDEX_VIDEO_MEMORY_64K)
mem               153 drivers/gpu/drm/bochs/bochs_hw.c 	if (size != mem) {
mem               155 drivers/gpu/drm/bochs/bochs_hw.c 			size, mem);
mem               156 drivers/gpu/drm/bochs/bochs_hw.c 		size = min(size, mem);
mem               102 drivers/gpu/drm/drm_vram_mm_helper.c 				    struct ttm_mem_reg *mem)
mem               104 drivers/gpu/drm/drm_vram_mm_helper.c 	struct ttm_mem_type_manager *man = bdev->man + mem->mem_type;
mem               110 drivers/gpu/drm/drm_vram_mm_helper.c 	mem->bus.addr = NULL;
mem               111 drivers/gpu/drm/drm_vram_mm_helper.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               113 drivers/gpu/drm/drm_vram_mm_helper.c 	switch (mem->mem_type) {
mem               115 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.offset = 0;
mem               116 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.base = 0;
mem               117 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.is_iomem = false;
mem               120 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               121 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.base = vmm->vram_base;
mem               122 drivers/gpu/drm/drm_vram_mm_helper.c 		mem->bus.is_iomem = true;
mem               132 drivers/gpu/drm/drm_vram_mm_helper.c 				  struct ttm_mem_reg *mem)
mem               128 drivers/gpu/drm/hisilicon/hibmc/hibmc_drm_fbdev.c 	info->fix.smem_start = gbo->bo.mem.bus.offset + gbo->bo.mem.bus.base;
mem               160 drivers/gpu/drm/i915/gvt/firmware.c 	const void *mem;
mem               167 drivers/gpu/drm/i915/gvt/firmware.c 	mem = fw->data + crc32_start;
mem               177 drivers/gpu/drm/i915/gvt/firmware.c 	VERIFY("crc32", h->crc32, crc32_le(0, mem, fw->size - crc32_start));
mem               181 drivers/gpu/drm/i915/gvt/firmware.c 	mem = (fw->data + h->cfg_space_offset);
mem               183 drivers/gpu/drm/i915/gvt/firmware.c 	id = *(u16 *)(mem + PCI_VENDOR_ID);
mem               186 drivers/gpu/drm/i915/gvt/firmware.c 	id = *(u16 *)(mem + PCI_DEVICE_ID);
mem               189 drivers/gpu/drm/i915/gvt/firmware.c 	id = *(u8 *)(mem + PCI_REVISION_ID);
mem               217 drivers/gpu/drm/i915/gvt/firmware.c 	void *mem;
mem               224 drivers/gpu/drm/i915/gvt/firmware.c 	mem = kmalloc(info->cfg_space_size, GFP_KERNEL);
mem               225 drivers/gpu/drm/i915/gvt/firmware.c 	if (!mem) {
mem               230 drivers/gpu/drm/i915/gvt/firmware.c 	firmware->cfg_space = mem;
mem               232 drivers/gpu/drm/i915/gvt/firmware.c 	mem = kmalloc(info->mmio_size, GFP_KERNEL);
mem               233 drivers/gpu/drm/i915/gvt/firmware.c 	if (!mem) {
mem               239 drivers/gpu/drm/i915/gvt/firmware.c 	firmware->mmio = mem;
mem              1440 drivers/gpu/drm/i915/gvt/gtt.c 		ops->get_entry(oos_page->mem, &old, index, false, 0, vgpu);
mem              1456 drivers/gpu/drm/i915/gvt/gtt.c 		ops->set_entry(oos_page->mem, &new, index, false, 0, vgpu);
mem              1491 drivers/gpu/drm/i915/gvt/gtt.c 			oos_page->mem, I915_GTT_PAGE_SIZE);
mem              1760 drivers/gpu/drm/i915/gvt/gtt.c 		ops->set_entry(spt->guest_page.oos_page->mem, &we, index,
mem              2543 drivers/gpu/drm/i915/gvt/gtt.c 		free_page((unsigned long)oos_page->mem);
mem              2564 drivers/gpu/drm/i915/gvt/gtt.c 		oos_page->mem = (void *)__get_free_pages(GFP_KERNEL, 0);
mem              2565 drivers/gpu/drm/i915/gvt/gtt.c 		if (!oos_page->mem) {
mem               226 drivers/gpu/drm/i915/gvt/gtt.h 	void *mem;
mem               308 drivers/gpu/drm/i915/intel_pm.c 							 int mem)
mem               313 drivers/gpu/drm/i915/intel_pm.c 	if (fsb == 0 || mem == 0)
mem               320 drivers/gpu/drm/i915/intel_pm.c 		    fsb == latency->fsb_freq && mem == latency->mem_freq)
mem               681 drivers/gpu/drm/mediatek/mtk_dpi.c 	struct resource *mem;
mem               692 drivers/gpu/drm/mediatek/mtk_dpi.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               693 drivers/gpu/drm/mediatek/mtk_dpi.c 	dpi->regs = devm_ioremap_resource(dev, mem);
mem               488 drivers/gpu/drm/mediatek/mtk_drm_drv.c 	struct resource *mem;
mem               502 drivers/gpu/drm/mediatek/mtk_drm_drv.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               503 drivers/gpu/drm/mediatek/mtk_drm_drv.c 	private->config_regs = devm_ioremap_resource(dev, mem);
mem              1445 drivers/gpu/drm/mediatek/mtk_hdmi.c 	struct resource *mem;
mem              1489 drivers/gpu/drm/mediatek/mtk_hdmi.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1490 drivers/gpu/drm/mediatek/mtk_hdmi.c 	hdmi->regs = devm_ioremap_resource(dev, mem);
mem               271 drivers/gpu/drm/mediatek/mtk_hdmi_ddc.c 	struct resource *mem;
mem               284 drivers/gpu/drm/mediatek/mtk_hdmi_ddc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               285 drivers/gpu/drm/mediatek/mtk_hdmi_ddc.c 	ddc->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               314 drivers/gpu/drm/mediatek/mtk_hdmi_ddc.c 	dev_dbg(dev, "physical adr: %pa, end: %pa\n", &mem->start,
mem               315 drivers/gpu/drm/mediatek/mtk_hdmi_ddc.c 		&mem->end);
mem               102 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c 	struct resource *mem;
mem               118 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               119 drivers/gpu/drm/mediatek/mtk_hdmi_phy.c 	hdmi_phy->regs = devm_ioremap_resource(dev, mem);
mem               385 drivers/gpu/drm/mediatek/mtk_mipi_tx.c 	struct resource *mem;
mem               403 drivers/gpu/drm/mediatek/mtk_mipi_tx.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               404 drivers/gpu/drm/mediatek/mtk_mipi_tx.c 	mipi_tx->regs = devm_ioremap_resource(dev, mem);
mem                21 drivers/gpu/drm/mgag200/mgag200_main.c static int mga_probe_vram(struct mga_device *mdev, void __iomem *mem)
mem                30 drivers/gpu/drm/mgag200/mgag200_main.c 	orig = ioread16(mem);
mem                31 drivers/gpu/drm/mgag200/mgag200_main.c 	iowrite16(0, mem);
mem                40 drivers/gpu/drm/mgag200/mgag200_main.c 		orig1 = ioread8(mem + offset);
mem                41 drivers/gpu/drm/mgag200/mgag200_main.c 		orig2 = ioread8(mem + offset + 0x100);
mem                43 drivers/gpu/drm/mgag200/mgag200_main.c 		iowrite16(0xaa55, mem + offset);
mem                44 drivers/gpu/drm/mgag200/mgag200_main.c 		iowrite16(0xaa55, mem + offset + 0x100);
mem                46 drivers/gpu/drm/mgag200/mgag200_main.c 		test1 = ioread16(mem + offset);
mem                47 drivers/gpu/drm/mgag200/mgag200_main.c 		test2 = ioread16(mem);
mem                49 drivers/gpu/drm/mgag200/mgag200_main.c 		iowrite16(orig1, mem + offset);
mem                50 drivers/gpu/drm/mgag200/mgag200_main.c 		iowrite16(orig2, mem + offset + 0x100);
mem                61 drivers/gpu/drm/mgag200/mgag200_main.c 	iowrite16(orig, mem);
mem                68 drivers/gpu/drm/mgag200/mgag200_main.c 	void __iomem *mem;
mem                80 drivers/gpu/drm/mgag200/mgag200_main.c 	mem = pci_iomap(mdev->dev->pdev, 0, 0);
mem                81 drivers/gpu/drm/mgag200/mgag200_main.c 	if (!mem)
mem                84 drivers/gpu/drm/mgag200/mgag200_main.c 	mdev->mc.vram_size = mga_probe_vram(mdev, mem);
mem                86 drivers/gpu/drm/mgag200/mgag200_main.c 	pci_iounmap(mdev->dev->pdev, mem);
mem               258 drivers/gpu/drm/nouveau/dispnv50/head507d.c head507d_olut_load(struct drm_color_lut *in, int size, void __iomem *mem)
mem               260 drivers/gpu/drm/nouveau/dispnv50/head507d.c 	for (; size--; in++, mem += 8) {
mem               261 drivers/gpu/drm/nouveau/dispnv50/head507d.c 		writew(drm_color_lut_extract(in->  red, 11) << 3, mem + 0);
mem               262 drivers/gpu/drm/nouveau/dispnv50/head507d.c 		writew(drm_color_lut_extract(in->green, 11) << 3, mem + 2);
mem               263 drivers/gpu/drm/nouveau/dispnv50/head507d.c 		writew(drm_color_lut_extract(in-> blue, 11) << 3, mem + 4);
mem               269 drivers/gpu/drm/nouveau/dispnv50/head507d.c 	writew(readw(mem - 8), mem + 0);
mem               270 drivers/gpu/drm/nouveau/dispnv50/head507d.c 	writew(readw(mem - 6), mem + 2);
mem               271 drivers/gpu/drm/nouveau/dispnv50/head507d.c 	writew(readw(mem - 4), mem + 4);
mem               217 drivers/gpu/drm/nouveau/dispnv50/head907d.c head907d_olut_load(struct drm_color_lut *in, int size, void __iomem *mem)
mem               219 drivers/gpu/drm/nouveau/dispnv50/head907d.c 	for (; size--; in++, mem += 8) {
mem               220 drivers/gpu/drm/nouveau/dispnv50/head907d.c 		writew(drm_color_lut_extract(in->  red, 14) + 0x6000, mem + 0);
mem               221 drivers/gpu/drm/nouveau/dispnv50/head907d.c 		writew(drm_color_lut_extract(in->green, 14) + 0x6000, mem + 2);
mem               222 drivers/gpu/drm/nouveau/dispnv50/head907d.c 		writew(drm_color_lut_extract(in-> blue, 14) + 0x6000, mem + 4);
mem               228 drivers/gpu/drm/nouveau/dispnv50/head907d.c 	writew(readw(mem - 8), mem + 0);
mem               229 drivers/gpu/drm/nouveau/dispnv50/head907d.c 	writew(readw(mem - 6), mem + 2);
mem               230 drivers/gpu/drm/nouveau/dispnv50/head907d.c 	writew(readw(mem - 4), mem + 4);
mem               102 drivers/gpu/drm/nouveau/dispnv50/headc57d.c headc57d_olut_load_8(struct drm_color_lut *in, int size, void __iomem *mem)
mem               104 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	memset_io(mem, 0x00, 0x20); /* VSS header. */
mem               105 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	mem += 0x20;
mem               119 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 		for (i = 0; i < 4; i++, mem += 8) {
mem               120 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 			writew(r + ri * i, mem + 0);
mem               121 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 			writew(g + gi * i, mem + 2);
mem               122 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 			writew(b + bi * i, mem + 4);
mem               129 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 8), mem + 0);
mem               130 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 6), mem + 2);
mem               131 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 4), mem + 4);
mem               135 drivers/gpu/drm/nouveau/dispnv50/headc57d.c headc57d_olut_load(struct drm_color_lut *in, int size, void __iomem *mem)
mem               137 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	memset_io(mem, 0x00, 0x20); /* VSS header. */
mem               138 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	mem += 0x20;
mem               140 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	for (; size--; in++, mem += 0x08) {
mem               141 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 		writew(drm_color_lut_extract(in->  red, 16), mem + 0);
mem               142 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 		writew(drm_color_lut_extract(in->green, 16), mem + 2);
mem               143 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 		writew(drm_color_lut_extract(in-> blue, 16), mem + 4);
mem               149 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 8), mem + 0);
mem               150 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 6), mem + 2);
mem               151 drivers/gpu/drm/nouveau/dispnv50/headc57d.c 	writew(readw(mem - 4), mem + 4);
mem                36 drivers/gpu/drm/nouveau/dispnv50/lut.c 	void __iomem *mem = lut->mem[buffer].object.map.ptr;
mem                37 drivers/gpu/drm/nouveau/dispnv50/lut.c 	const u32 addr = lut->mem[buffer].addr;
mem                48 drivers/gpu/drm/nouveau/dispnv50/lut.c 			load(in, 1024, mem);
mem                52 drivers/gpu/drm/nouveau/dispnv50/lut.c 		load(in, blob->length / sizeof(*in), mem);
mem                62 drivers/gpu/drm/nouveau/dispnv50/lut.c 	for (i = 0; i < ARRAY_SIZE(lut->mem); i++)
mem                63 drivers/gpu/drm/nouveau/dispnv50/lut.c 		nvif_mem_fini(&lut->mem[i]);
mem                72 drivers/gpu/drm/nouveau/dispnv50/lut.c 	for (i = 0; i < ARRAY_SIZE(lut->mem); i++) {
mem                74 drivers/gpu/drm/nouveau/dispnv50/lut.c 					    &lut->mem[i]);
mem                 9 drivers/gpu/drm/nouveau/dispnv50/lut.h 	struct nvif_mem mem[2];
mem               137 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c wndwc57e_ilut_load(struct drm_color_lut *in, int size, void __iomem *mem)
mem               139 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	memset_io(mem, 0x00, 0x20); /* VSS header. */
mem               140 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	mem += 0x20;
mem               142 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	for (; size--; in++, mem += 0x08) {
mem               146 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 		writew(r, mem + 0);
mem               147 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 		writew(g, mem + 2);
mem               148 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 		writew(b, mem + 4);
mem               154 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	writew(readw(mem - 8), mem + 0);
mem               155 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	writew(readw(mem - 6), mem + 2);
mem               156 drivers/gpu/drm/nouveau/dispnv50/wndwc57e.c 	writew(readw(mem - 4), mem + 4);
mem                11 drivers/gpu/drm/nouveau/include/nvif/mmu.h 	s32 mem;
mem                66 drivers/gpu/drm/nouveau/include/nvkm/subdev/mmu.h 	struct nvkm_mm_node *mem;
mem               314 drivers/gpu/drm/nouveau/nouveau_abi16.c 	if (chan->chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM)
mem               305 drivers/gpu/drm/nouveau/nouveau_bo.c 	nvbo->bo.mem.num_pages = size >> PAGE_SHIFT;
mem               363 drivers/gpu/drm/nouveau/nouveau_bo.c 	    nvbo->bo.mem.num_pages < vram_pages / 4) {
mem               429 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (!(memtype & (1 << bo->mem.mem_type)) || evict) {
mem               432 drivers/gpu/drm/nouveau/nouveau_bo.c 				 1 << bo->mem.mem_type, memtype);
mem               459 drivers/gpu/drm/nouveau/nouveau_bo.c 	switch (bo->mem.mem_type) {
mem               461 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm->gem.vram_available -= bo->mem.size;
mem               464 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm->gem.gart_available -= bo->mem.size;
mem               493 drivers/gpu/drm/nouveau/nouveau_bo.c 	nouveau_bo_placement_set(nvbo, bo->mem.placement, 0);
mem               497 drivers/gpu/drm/nouveau/nouveau_bo.c 		switch (bo->mem.mem_type) {
mem               499 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->gem.vram_available += bo->mem.size;
mem               502 drivers/gpu/drm/nouveau/nouveau_bo.c 			drm->gem.gart_available += bo->mem.size;
mem               523 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages, &nvbo->kmap);
mem               597 drivers/gpu/drm/nouveau/nouveau_bo.c 	u16 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem);
mem               599 drivers/gpu/drm/nouveau/nouveau_bo.c 	mem += index;
mem               602 drivers/gpu/drm/nouveau/nouveau_bo.c 		iowrite16_native(val, (void __force __iomem *)mem);
mem               604 drivers/gpu/drm/nouveau/nouveau_bo.c 		*mem = val;
mem               611 drivers/gpu/drm/nouveau/nouveau_bo.c 	u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem);
mem               613 drivers/gpu/drm/nouveau/nouveau_bo.c 	mem += index;
mem               616 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ioread32_native((void __force __iomem *)mem);
mem               618 drivers/gpu/drm/nouveau/nouveau_bo.c 		return *mem;
mem               625 drivers/gpu/drm/nouveau/nouveau_bo.c 	u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem);
mem               627 drivers/gpu/drm/nouveau/nouveau_bo.c 	mem += index;
mem               630 drivers/gpu/drm/nouveau/nouveau_bo.c 		iowrite32_native(val, (void __force __iomem *)mem);
mem               632 drivers/gpu/drm/nouveau/nouveau_bo.c 		*mem = val;
mem               724 drivers/gpu/drm/nouveau/nouveau_bo.c 	switch (bo->mem.mem_type) {
mem               754 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               758 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[0].addr));
mem               759 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[0].addr));
mem               760 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[1].addr));
mem               761 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[1].addr));
mem               786 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               787 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 src_offset = mem->vma[0].addr;
mem               788 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 dst_offset = mem->vma[1].addr;
mem               824 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               825 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 src_offset = mem->vma[0].addr;
mem               826 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 dst_offset = mem->vma[1].addr;
mem               863 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               864 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 src_offset = mem->vma[0].addr;
mem               865 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 dst_offset = mem->vma[1].addr;
mem               901 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               905 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[0].addr));
mem               906 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[0].addr));
mem               907 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[1].addr));
mem               908 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[1].addr));
mem               919 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               924 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[0].addr));
mem               925 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[0].addr));
mem               926 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, upper_32_bits(mem->vma[1].addr));
mem               927 drivers/gpu/drm/nouveau/nouveau_bo.c 		OUT_RING  (chan, lower_32_bits(mem->vma[1].addr));
mem               953 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(old_reg);
mem               955 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 src_offset = mem->vma[0].addr;
mem               956 drivers/gpu/drm/nouveau/nouveau_bo.c 	u64 dst_offset = mem->vma[1].addr;
mem               957 drivers/gpu/drm/nouveau/nouveau_bo.c 	int src_tiled = !!mem->kind;
mem              1095 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *old_mem = nouveau_mem(&bo->mem);
mem              1100 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nvif_vmm_get(vmm, LAZY, false, old_mem->mem.page, 0,
mem              1101 drivers/gpu/drm/nouveau/nouveau_bo.c 			   old_mem->mem.size, &old_mem->vma[0]);
mem              1105 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nvif_vmm_get(vmm, LAZY, false, new_mem->mem.page, 0,
mem              1106 drivers/gpu/drm/nouveau/nouveau_bo.c 			   new_mem->mem.size, &old_mem->vma[1]);
mem              1146 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = drm->ttm.move(chan, bo, &bo->mem, new_reg);
mem              1307 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = new_reg ? nouveau_mem(new_reg) : NULL;
mem              1315 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (mem && new_reg->mem_type != TTM_PL_SYSTEM &&
mem              1316 drivers/gpu/drm/nouveau/nouveau_bo.c 	    mem->mem.page == nvbo->page) {
mem              1318 drivers/gpu/drm/nouveau/nouveau_bo.c 			nouveau_vma_map(vma, mem);
mem              1369 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_mem_reg *old_reg = &bo->mem;
mem              1388 drivers/gpu/drm/nouveau/nouveau_bo.c 		BUG_ON(bo->mem.mm_node != NULL);
mem              1389 drivers/gpu/drm/nouveau/nouveau_bo.c 		bo->mem = *new_reg;
mem              1443 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem              1464 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (drm->client.mem->oclass < NVIF_CLASS_MEM_NV50 || !mem->kind)
mem              1472 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (drm->client.mem->oclass >= NVIF_CLASS_MEM_NV50) {
mem              1481 drivers/gpu/drm/nouveau/nouveau_bo.c 			switch (mem->mem.object.oclass) {
mem              1485 drivers/gpu/drm/nouveau/nouveau_bo.c 				args.nv50.kind = mem->kind;
mem              1486 drivers/gpu/drm/nouveau/nouveau_bo.c 				args.nv50.comp = mem->comp;
mem              1492 drivers/gpu/drm/nouveau/nouveau_bo.c 				args.gf100.kind = mem->kind;
mem              1500 drivers/gpu/drm/nouveau/nouveau_bo.c 			ret = nvif_object_map_handle(&mem->mem.object,
mem              1520 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem              1522 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (drm->client.mem->oclass >= NVIF_CLASS_MEM_NV50) {
mem              1525 drivers/gpu/drm/nouveau/nouveau_bo.c 			if (mem->kind)
mem              1526 drivers/gpu/drm/nouveau/nouveau_bo.c 				nvif_object_unmap_handle(&mem->mem.object);
mem              1529 drivers/gpu/drm/nouveau/nouveau_bo.c 			nvif_object_unmap_handle(&mem->mem.object);
mem              1549 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (bo->mem.mem_type != TTM_PL_VRAM) {
mem              1554 drivers/gpu/drm/nouveau/nouveau_bo.c 		if (bo->mem.mem_type == TTM_PL_SYSTEM) {
mem              1566 drivers/gpu/drm/nouveau/nouveau_bo.c 	    bo->mem.start + bo->mem.num_pages < mappable)
mem               183 drivers/gpu/drm/nouveau/nouveau_chan.c 	if (chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) {
mem                37 drivers/gpu/drm/nouveau/nouveau_dma.c 	u32 *mem = ttm_kmap_obj_virtual(&chan->push.buffer->kmap, &is_iomem);
mem                38 drivers/gpu/drm/nouveau/nouveau_dma.c 	mem = &mem[chan->dma.cur];
mem                40 drivers/gpu/drm/nouveau/nouveau_dma.c 		memcpy_toio((void __force __iomem *)mem, data, nr_dwords * 4);
mem                42 drivers/gpu/drm/nouveau/nouveau_dma.c 		memcpy(mem, data, nr_dwords * 4);
mem               281 drivers/gpu/drm/nouveau/nouveau_drm.c 	cli->mem = &mems[ret];
mem               101 drivers/gpu/drm/nouveau/nouveau_drv.h 	const struct nvif_mclass *mem;
mem               379 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->fix.smem_start = fb->nvbo->bo.mem.bus.base +
mem               380 drivers/gpu/drm/nouveau/nouveau_fbcon.c 			       fb->nvbo->bo.mem.bus.offset;
mem               381 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->fix.smem_len = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT;
mem               384 drivers/gpu/drm/nouveau/nouveau_fbcon.c 	info->screen_size = fb->nvbo->bo.mem.num_pages << PAGE_SHIFT;
mem               231 drivers/gpu/drm/nouveau/nouveau_gem.c 	else if (nvbo->bo.mem.mem_type == TTM_PL_TT)
mem               244 drivers/gpu/drm/nouveau/nouveau_gem.c 	rep->size = nvbo->bo.mem.num_pages << PAGE_SHIFT;
mem               306 drivers/gpu/drm/nouveau/nouveau_gem.c 	    bo->mem.mem_type == TTM_PL_VRAM)
mem               310 drivers/gpu/drm/nouveau/nouveau_gem.c 		 bo->mem.mem_type == TTM_PL_TT)
mem               523 drivers/gpu/drm/nouveau/nouveau_gem.c 			    ((nvbo->bo.mem.mem_type == TTM_PL_VRAM &&
mem               525 drivers/gpu/drm/nouveau/nouveau_gem.c 			     (nvbo->bo.mem.mem_type == TTM_PL_TT &&
mem               529 drivers/gpu/drm/nouveau/nouveau_gem.c 			if (nvbo->bo.mem.mem_type == TTM_PL_TT)
mem               588 drivers/gpu/drm/nouveau/nouveau_gem.c 	void *mem;
mem               593 drivers/gpu/drm/nouveau/nouveau_gem.c 	mem = kvmalloc(size, GFP_KERNEL);
mem               594 drivers/gpu/drm/nouveau/nouveau_gem.c 	if (!mem)
mem               597 drivers/gpu/drm/nouveau/nouveau_gem.c 	if (copy_from_user(mem, userptr, size)) {
mem               598 drivers/gpu/drm/nouveau/nouveau_gem.c 		u_free(mem);
mem               602 drivers/gpu/drm/nouveau/nouveau_gem.c 	return mem;
mem               642 drivers/gpu/drm/nouveau/nouveau_gem.c 			     nvbo->bo.mem.num_pages << PAGE_SHIFT)) {
mem               649 drivers/gpu/drm/nouveau/nouveau_gem.c 			ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages,
mem               821 drivers/gpu/drm/nouveau/nouveau_gem.c 							  nvbo->bo.mem.
mem                36 drivers/gpu/drm/nouveau/nouveau_mem.c nouveau_mem_map(struct nouveau_mem *mem,
mem                54 drivers/gpu/drm/nouveau/nouveau_mem.c 		args.nv50.kind = mem->kind;
mem                55 drivers/gpu/drm/nouveau/nouveau_mem.c 		args.nv50.comp = mem->comp;
mem                62 drivers/gpu/drm/nouveau/nouveau_mem.c 		if (mem->mem.type & NVIF_MEM_VRAM)
mem                68 drivers/gpu/drm/nouveau/nouveau_mem.c 		args.gf100.kind = mem->kind;
mem                78 drivers/gpu/drm/nouveau/nouveau_mem.c 	ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc,
mem                79 drivers/gpu/drm/nouveau/nouveau_mem.c 			   &mem->mem, 0);
mem                85 drivers/gpu/drm/nouveau/nouveau_mem.c nouveau_mem_fini(struct nouveau_mem *mem)
mem                87 drivers/gpu/drm/nouveau/nouveau_mem.c 	nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[1]);
mem                88 drivers/gpu/drm/nouveau/nouveau_mem.c 	nvif_vmm_put(&mem->cli->drm->client.vmm.vmm, &mem->vma[0]);
mem                89 drivers/gpu/drm/nouveau/nouveau_mem.c 	mutex_lock(&mem->cli->drm->master.lock);
mem                90 drivers/gpu/drm/nouveau/nouveau_mem.c 	nvif_mem_fini(&mem->mem);
mem                91 drivers/gpu/drm/nouveau/nouveau_mem.c 	mutex_unlock(&mem->cli->drm->master.lock);
mem                97 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem                98 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_cli *cli = mem->cli;
mem               107 drivers/gpu/drm/nouveau/nouveau_mem.c 		type = drm->ttm.type_ncoh[!!mem->kind];
mem               111 drivers/gpu/drm/nouveau/nouveau_mem.c 	if (mem->kind && !(mmu->type[type].type & NVIF_MEM_KIND))
mem               112 drivers/gpu/drm/nouveau/nouveau_mem.c 		mem->comp = mem->kind = 0;
mem               113 drivers/gpu/drm/nouveau/nouveau_mem.c 	if (mem->comp && !(mmu->type[type].type & NVIF_MEM_COMP)) {
mem               115 drivers/gpu/drm/nouveau/nouveau_mem.c 			mem->kind = mmu->kind[mem->kind];
mem               116 drivers/gpu/drm/nouveau/nouveau_mem.c 		mem->comp = 0;
mem               124 drivers/gpu/drm/nouveau/nouveau_mem.c 	ret = nvif_mem_init_type(mmu, cli->mem->oclass, type, PAGE_SHIFT,
mem               126 drivers/gpu/drm/nouveau/nouveau_mem.c 				 &args, sizeof(args), &mem->mem);
mem               135 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem               136 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_cli *cli = mem->cli;
mem               145 drivers/gpu/drm/nouveau/nouveau_mem.c 	switch (cli->mem->oclass) {
mem               147 drivers/gpu/drm/nouveau/nouveau_mem.c 		ret = nvif_mem_init_type(mmu, cli->mem->oclass,
mem               152 drivers/gpu/drm/nouveau/nouveau_mem.c 					 &mem->mem);
mem               155 drivers/gpu/drm/nouveau/nouveau_mem.c 		ret = nvif_mem_init_type(mmu, cli->mem->oclass,
mem               158 drivers/gpu/drm/nouveau/nouveau_mem.c 						.bankswz = mmu->kind[mem->kind] == 2,
mem               161 drivers/gpu/drm/nouveau/nouveau_mem.c 					 &mem->mem);
mem               171 drivers/gpu/drm/nouveau/nouveau_mem.c 	reg->start = mem->mem.addr >> PAGE_SHIFT;
mem               178 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem               179 drivers/gpu/drm/nouveau/nouveau_mem.c 	nouveau_mem_fini(mem);
mem               188 drivers/gpu/drm/nouveau/nouveau_mem.c 	struct nouveau_mem *mem;
mem               190 drivers/gpu/drm/nouveau/nouveau_mem.c 	if (!(mem = kzalloc(sizeof(*mem), GFP_KERNEL)))
mem               192 drivers/gpu/drm/nouveau/nouveau_mem.c 	mem->cli = cli;
mem               193 drivers/gpu/drm/nouveau/nouveau_mem.c 	mem->kind = kind;
mem               194 drivers/gpu/drm/nouveau/nouveau_mem.c 	mem->comp = comp;
mem               196 drivers/gpu/drm/nouveau/nouveau_mem.c 	reg->mm_node = mem;
mem                19 drivers/gpu/drm/nouveau/nouveau_mem.h 	struct nvif_mem mem;
mem                14 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_mem *mem;
mem                32 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem                39 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	ret = nouveau_mem_map(mem, &mem->cli->vmm.vmm, &mem->vma[0]);
mem                41 drivers/gpu/drm/nouveau/nouveau_sgdma.c 		nouveau_mem_fini(mem);
mem                45 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nvbe->mem = mem;
mem                53 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nouveau_mem_fini(nvbe->mem);
mem                67 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	struct nouveau_mem *mem = nouveau_mem(reg);
mem                74 drivers/gpu/drm/nouveau/nouveau_sgdma.c 	nvbe->mem = mem;
mem               130 drivers/gpu/drm/nouveau/nouveau_ttm.c 	struct nouveau_mem *mem;
mem               134 drivers/gpu/drm/nouveau/nouveau_ttm.c 	mem = nouveau_mem(reg);
mem               138 drivers/gpu/drm/nouveau/nouveau_ttm.c 	ret = nvif_vmm_get(&mem->cli->vmm.vmm, PTES, false, 12, 0,
mem               139 drivers/gpu/drm/nouveau/nouveau_ttm.c 			   reg->num_pages << PAGE_SHIFT, &mem->vma[0]);
mem               149 drivers/gpu/drm/nouveau/nouveau_ttm.c 	reg->start = mem->vma[0].addr >> PAGE_SHIFT;
mem                31 drivers/gpu/drm/nouveau/nouveau_vmm.c 	if (vma->mem) {
mem                33 drivers/gpu/drm/nouveau/nouveau_vmm.c 		vma->mem = NULL;
mem                38 drivers/gpu/drm/nouveau/nouveau_vmm.c nouveau_vma_map(struct nouveau_vma *vma, struct nouveau_mem *mem)
mem                41 drivers/gpu/drm/nouveau/nouveau_vmm.c 	int ret = nouveau_mem_map(mem, &vma->vmm->vmm, &tmp);
mem                44 drivers/gpu/drm/nouveau/nouveau_vmm.c 	vma->mem = mem;
mem                80 drivers/gpu/drm/nouveau/nouveau_vmm.c 	struct nouveau_mem *mem = nouveau_mem(&nvbo->bo.mem);
mem                95 drivers/gpu/drm/nouveau/nouveau_vmm.c 	vma->mem = NULL;
mem                99 drivers/gpu/drm/nouveau/nouveau_vmm.c 	if (nvbo->bo.mem.mem_type != TTM_PL_SYSTEM &&
mem               100 drivers/gpu/drm/nouveau/nouveau_vmm.c 	    mem->mem.page == nvbo->page) {
mem               101 drivers/gpu/drm/nouveau/nouveau_vmm.c 		ret = nvif_vmm_get(&vmm->vmm, LAZY, false, mem->mem.page, 0,
mem               102 drivers/gpu/drm/nouveau/nouveau_vmm.c 				   mem->mem.size, &tmp);
mem               107 drivers/gpu/drm/nouveau/nouveau_vmm.c 		ret = nouveau_vma_map(vma, mem);
mem               109 drivers/gpu/drm/nouveau/nouveau_vmm.c 		ret = nvif_vmm_get(&vmm->vmm, PTES, false, mem->mem.page, 0,
mem               110 drivers/gpu/drm/nouveau/nouveau_vmm.c 				   mem->mem.size, &tmp);
mem                13 drivers/gpu/drm/nouveau/nouveau_vmm.h 	struct nouveau_mem *mem;
mem                79 drivers/gpu/drm/nouveau/nv17_fence.c 	struct ttm_mem_reg *reg = &priv->bo->bo.mem;
mem                40 drivers/gpu/drm/nouveau/nv50_fence.c 	struct ttm_mem_reg *reg = &priv->bo->bo.mem;
mem                28 drivers/gpu/drm/nouveau/nvif/mem.c nvif_mem_init_map(struct nvif_mmu *mmu, u8 type, u64 size, struct nvif_mem *mem)
mem                30 drivers/gpu/drm/nouveau/nvif/mem.c 	int ret = nvif_mem_init(mmu, mmu->mem, NVIF_MEM_MAPPABLE | type, 0,
mem                31 drivers/gpu/drm/nouveau/nvif/mem.c 				size, NULL, 0, mem);
mem                33 drivers/gpu/drm/nouveau/nvif/mem.c 		ret = nvif_object_map(&mem->object, NULL, 0);
mem                35 drivers/gpu/drm/nouveau/nvif/mem.c 			nvif_mem_fini(mem);
mem                41 drivers/gpu/drm/nouveau/nvif/mem.c nvif_mem_fini(struct nvif_mem *mem)
mem                43 drivers/gpu/drm/nouveau/nvif/mem.c 	nvif_object_fini(&mem->object);
mem                48 drivers/gpu/drm/nouveau/nvif/mem.c 		   u64 size, void *argv, u32 argc, struct nvif_mem *mem)
mem                54 drivers/gpu/drm/nouveau/nvif/mem.c 	mem->object.client = NULL;
mem                71 drivers/gpu/drm/nouveau/nvif/mem.c 			       sizeof(*args) + argc, &mem->object);
mem                73 drivers/gpu/drm/nouveau/nvif/mem.c 		mem->type = mmu->type[type].type;
mem                74 drivers/gpu/drm/nouveau/nvif/mem.c 		mem->page = args->page;
mem                75 drivers/gpu/drm/nouveau/nvif/mem.c 		mem->addr = args->addr;
mem                76 drivers/gpu/drm/nouveau/nvif/mem.c 		mem->size = args->size;
mem                87 drivers/gpu/drm/nouveau/nvif/mem.c 	      u64 size, void *argv, u32 argc, struct nvif_mem *mem)
mem                91 drivers/gpu/drm/nouveau/nvif/mem.c 	mem->object.client = NULL;
mem                96 drivers/gpu/drm/nouveau/nvif/mem.c 						 argv, argc, mem);
mem                66 drivers/gpu/drm/nouveau/nvif/mmu.c 	mmu->mem = mems[ret].oclass;
mem                37 drivers/gpu/drm/nouveau/nvif/vmm.c 	     struct nvif_mem *mem, u64 offset)
mem                53 drivers/gpu/drm/nouveau/nvif/vmm.c 	args->memory = nvif_handle(&mem->object);
mem                61 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	cur = fifo->runlist.mem[fifo->runlist.active];
mem               589 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 			      false, &fifo->runlist.mem[0]);
mem               594 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 			      false, &fifo->runlist.mem[1]);
mem               601 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 			      0x1000, false, &fifo->user.mem);
mem               605 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	ret = nvkm_vmm_get(bar, 12, nvkm_memory_size(fifo->user.mem),
mem               610 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	return nvkm_memory_map(fifo->user.mem, 0, bar, fifo->user.bar, NULL, 0);
mem               662 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	nvkm_memory_unref(&fifo->user.mem);
mem               663 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	nvkm_memory_unref(&fifo->runlist.mem[0]);
mem               664 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c 	nvkm_memory_unref(&fifo->runlist.mem[1]);
mem                23 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.h 		struct nvkm_memory *mem[2];
mem                29 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.h 		struct nvkm_memory *mem;
mem               153 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 			  struct nvkm_memory *mem, int nr)
mem               159 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	switch (nvkm_memory_target(mem)) {
mem               167 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	nvkm_wr32(device, 0x002270, (nvkm_memory_addr(mem) >> 12) |
mem               184 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	struct nvkm_memory *mem;
mem               189 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	mem = fifo->runlist[runl].mem[fifo->runlist[runl].next];
mem               192 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	nvkm_kmap(mem);
mem               194 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 		func->chan(chan, mem, nr++ * func->size);
mem               198 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 		func->cgrp(cgrp, mem, nr++ * func->size);
mem               200 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 			func->chan(chan, mem, nr++ * func->size);
mem               203 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	nvkm_done(mem);
mem               205 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	func->commit(fifo, runl, mem, nr);
mem               948 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 		for (j = 0; j < ARRAY_SIZE(fifo->runlist[i].mem); j++) {
mem               953 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 					      &fifo->runlist[i].mem[j]);
mem               965 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 			      &fifo->user.mem);
mem               969 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	ret = nvkm_vmm_get(bar, 12, nvkm_memory_size(fifo->user.mem),
mem               974 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	return nvkm_memory_map(fifo->user.mem, 0, bar, fifo->user.bar, NULL, 0);
mem              1017 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 	nvkm_memory_unref(&fifo->user.mem);
mem              1020 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 		nvkm_memory_unref(&fifo->runlist[i].mem[1]);
mem              1021 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c 		nvkm_memory_unref(&fifo->runlist[i].mem[0]);
mem                32 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.h 		struct nvkm_memory *mem[2];
mem                42 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.h 		struct nvkm_memory *mem;
mem               266 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c 	nvkm_kmap(fifo->user.mem);
mem               268 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c 		nvkm_wo32(fifo->user.mem, usermem + i, 0x00000000);
mem               269 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c 	nvkm_done(fifo->user.mem);
mem               270 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c 	usermem = nvkm_memory_addr(fifo->user.mem) + usermem;
mem               301 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c 	nvkm_kmap(fifo->user.mem);
mem               303 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c 		nvkm_wo32(fifo->user.mem, usermem + i, 0x00000000);
mem               304 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c 	nvkm_done(fifo->user.mem);
mem               305 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c 	usermem = nvkm_memory_addr(fifo->user.mem) + usermem;
mem               181 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c 	nvkm_kmap(fifo->user.mem);
mem               183 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c 		nvkm_wo32(fifo->user.mem, usermem + i, 0x00000000);
mem               184 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c 	nvkm_done(fifo->user.mem);
mem               185 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c 	usermem = nvkm_memory_addr(fifo->user.mem) + usermem;
mem                35 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gv100.c 	struct nvkm_memory *usermem = chan->fifo->user.mem;
mem                33 drivers/gpu/drm/nouveau/nvkm/engine/fifo/tu102.c 			  struct nvkm_memory *mem, int nr)
mem                36 drivers/gpu/drm/nouveau/nvkm/engine/fifo/tu102.c 	u64 addr = nvkm_memory_addr(mem);
mem               661 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_grctx_fill(struct nvkm_device *device, struct nvkm_gpuobj *mem)
mem               666 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			     .data = mem,
mem               256 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_grctx_fill(struct nvkm_device *device, struct nvkm_gpuobj *mem)
mem               261 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			     .data = mem,
mem               362 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 		nvkm_memory_unref(&chan->data[i].mem);
mem               419 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 				      &chan->data[i].mem);
mem               424 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 				   nvkm_memory_size(chan->data[i].mem),
mem               431 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.c 		ret = nvkm_memory_map(chan->data[i].mem, 0, chan->vmm,
mem               265 drivers/gpu/drm/nouveau/nvkm/engine/gr/gf100.h 		struct nvkm_memory *mem;
mem                89 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	nvkm_wr32(device, 0x001704, 0x00000000 | bar->mem->addr >> 12);
mem                90 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	nvkm_wr32(device, 0x001704, 0x40000000 | bar->mem->addr >> 12);
mem               115 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_gpuobj_new(device, 0x20000, 0, false, NULL, &bar->mem);
mem               119 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_gpuobj_new(device, bar->pgd_addr, 0, false, bar->mem,
mem               124 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_gpuobj_new(device, 0x4000, 0, false, bar->mem, &bar->pgd);
mem               147 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_vmm_join(bar->bar2_vmm, bar->mem->memory);
mem               151 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_gpuobj_new(device, 24, 16, false, bar->mem, &bar->bar2);
mem               183 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_vmm_join(bar->bar1_vmm, bar->mem->memory);
mem               187 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	ret = nvkm_gpuobj_new(device, 24, 16, false, bar->mem, &bar->bar1);
mem               207 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 	if (bar->mem) {
mem               209 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 		nvkm_vmm_part(bar->bar1_vmm, bar->mem->memory);
mem               212 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 		nvkm_vmm_part(bar->bar2_vmm, bar->mem->memory);
mem               216 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.c 		nvkm_gpuobj_del(&bar->mem);
mem                10 drivers/gpu/drm/nouveau/nvkm/subdev/bar/nv50.h 	struct nvkm_gpuobj *mem;
mem               106 drivers/gpu/drm/nouveau/nvkm/subdev/fault/base.c 			      &buffer->mem);
mem               111 drivers/gpu/drm/nouveau/nvkm/subdev/fault/base.c 	buffer->addr = nvkm_memory_bar2(buffer->mem);
mem               154 drivers/gpu/drm/nouveau/nvkm/subdev/fault/base.c 			nvkm_memory_unref(&fault->buffer[i]->mem);
mem                34 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 	struct nvkm_memory *mem = buffer->mem;
mem                40 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 	nvkm_kmap(mem);
mem                43 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 instlo = nvkm_ro32(mem, base + 0x00);
mem                44 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 insthi = nvkm_ro32(mem, base + 0x04);
mem                45 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 addrlo = nvkm_ro32(mem, base + 0x08);
mem                46 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 addrhi = nvkm_ro32(mem, base + 0x0c);
mem                47 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 timelo = nvkm_ro32(mem, base + 0x10);
mem                48 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32 timehi = nvkm_ro32(mem, base + 0x14);
mem                49 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32  info0 = nvkm_ro32(mem, base + 0x18);
mem                50 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 		const u32  info1 = nvkm_ro32(mem, base + 0x1c);
mem                70 drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c 	nvkm_done(mem);
mem                17 drivers/gpu/drm/nouveau/nvkm/subdev/fault/priv.h 	struct nvkm_memory *mem;
mem                38 drivers/gpu/drm/nouveau/nvkm/subdev/fault/user.c 	*size = nvkm_memory_size(buffer->mem);
mem                45 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ram.c 		.mem = vram->mn,
mem                30 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c 	u32 mem, mib;
mem                47 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c 		pci_read_config_dword(bridge, 0x7c, &mem);
mem                48 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c 		mib = ((mem >> 6) & 31) + 1;
mem                50 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c 		pci_read_config_dword(bridge, 0x84, &mem);
mem                51 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c 		mib = ((mem >> 4) & 127) + 1;
mem               288 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c 		.mem = node->mn,
mem                31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/g84.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_NV50}, nv50_mem_new, nv50_mem_map },
mem                79 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gf100.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gk104.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gk20a.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_GF100}, .umap = gf100_mem_map },
mem                75 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm200.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                85 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm200.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm20b.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_GF100}, .umap = gf100_mem_map },
mem                43 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gm20b.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_GF100}, .umap = gf100_mem_map },
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gp100.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gp10b.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_GF100}, .umap = gf100_mem_map },
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/gv100.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem                31 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mcp77.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_NV50}, nv50_mem_new, nv50_mem_map },
mem                35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct page **mem;
mem                57 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem = nvkm_mem(memory);
mem                58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (mem->pages == 1 && mem->mem)
mem                59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		return mem->dma[0];
mem                73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem = nvkm_mem(memory);
mem                75 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		.memory = &mem->memory,
mem                77 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		.dma = mem->dma,
mem                85 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem = nvkm_mem(memory);
mem                86 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (mem->mem) {
mem                87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		while (mem->pages--) {
mem                88 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			dma_unmap_page(mem->mmu->subdev.device->dev,
mem                89 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 				       mem->dma[mem->pages], PAGE_SIZE,
mem                91 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			__free_page(mem->mem[mem->pages]);
mem                93 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		kvfree(mem->dma);
mem                94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		kvfree(mem->mem);
mem                96 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	return mem;
mem               113 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem = nvkm_mem(memory);
mem               115 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		.memory = &mem->memory,
mem               117 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		.sgl = mem->sgl,
mem               135 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem = nvkm_mem(memory);
mem               136 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (mem->mem) {
mem               137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		*pmap = vmap(mem->mem, mem->pages, VM_MAP, PAGE_KERNEL);
mem               154 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	struct nvkm_mem *mem;
mem               166 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (!(mem = kzalloc(sizeof(*mem), GFP_KERNEL)))
mem               168 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	mem->target = target;
mem               169 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	mem->mmu = mmu;
mem               170 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	*pmemory = &mem->memory;
mem               174 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			nvkm_memory_ctor(&nvkm_mem_dma, &mem->memory);
mem               175 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			mem->dma = args->v0.dma;
mem               177 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			nvkm_memory_ctor(&nvkm_mem_sgl, &mem->memory);
mem               178 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 			mem->sgl = args->v0.sgl;
mem               183 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		mem->pages = size >> PAGE_SHIFT;
mem               187 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		kfree(mem);
mem               191 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	nvkm_memory_ctor(&nvkm_mem_dma, &mem->memory);
mem               194 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (!(mem->mem = kvmalloc_array(size, sizeof(*mem->mem), GFP_KERNEL)))
mem               196 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	if (!(mem->dma = kvmalloc_array(size, sizeof(*mem->dma), GFP_KERNEL)))
mem               204 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 	for (mem->pages = 0; size; size--, mem->pages++) {
mem               209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		mem->dma[mem->pages] = dma_map_page(mmu->subdev.device->dev,
mem               212 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		if (dma_mapping_error(dev, mem->dma[mem->pages])) {
mem               217 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		mem->mem[mem->pages] = p;
mem               231 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c 		ret = mmu->func->mem.vram(mmu, type, page, size,
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv04.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_NV04}, nv04_mem_new, nv04_mem_map },
mem                45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv41.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_NV04}, nv04_mem_new, nv04_mem_map },
mem                60 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c 	.mem = {{ -1, -1, NVIF_CLASS_MEM_NV04}, nv04_mem_new, nv04_mem_map },
mem                67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv50.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_NV50}, nv50_mem_new, nv50_mem_map },
mem                27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/priv.h 	} mem;
mem                33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/tu102.c 	.mem = {{ -1,  0, NVIF_CLASS_MEM_GF100}, gf100_mem_new, gf100_mem_map },
mem               110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/umem.c 		int ret = mmu->func->mem.umap(mmu, umem->memory, argv, argc,
mem                37 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ummu.c 	if (mmu->func->mem.user.oclass && oclass->client->super) {
mem                39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ummu.c 			oclass->base = mmu->func->mem.user;
mem              1467 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 	if (map->mem) {
mem              1468 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		for (; map->off; map->mem = map->mem->next) {
mem              1469 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			u64 size = (u64)map->mem->length << NVKM_RAM_MM_SHIFT;
mem              1474 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 		func = map->page->desc->func->mem;
mem              1550 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			const bool mem = next->memory != NULL;
mem              1559 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 			       (next->memory != NULL) == mem &&
mem              1570 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c 							!mem);
mem                67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 	nvkm_vmm_pte_func mem;
mem               313 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 		     ((u64)MAP->mem->offset << NVKM_RAM_MM_SHIFT),             \
mem               314 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 		     ((u64)MAP->mem->length << NVKM_RAM_MM_SHIFT),             \
mem               315 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h 		     (MAP->mem = MAP->mem->next))
mem               100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c 	.mem = gf100_vmm_pgt_mem,
mem                36 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c 	.mem = gf100_vmm_pgt_mem,
mem                39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c 	.mem = gf100_vmm_pgt_mem,
mem                49 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c 	.mem = gf100_vmm_pgt_mem,
mem               167 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	.mem = gp100_vmm_pgt_mem,
mem               188 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	.mem = gp100_vmm_pgt_mem,
mem               266 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c 	.mem = gp100_vmm_pd0_mem,
mem               125 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	struct nvkm_memory *mem;
mem               135 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	mem = vmm->pd->pt[0]->memory;
mem               136 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	nvkm_kmap(mem);
mem               137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	nvkm_wo32(mem, 0x00000, 0x0002103d); /* PCI, RW, PT, !LN */
mem               138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	nvkm_wo32(mem, 0x00004, vmm->limit - 1);
mem               139 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c 	nvkm_done(mem);
mem               100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	.mem = nv50_vmm_pgt_mem,
mem               287 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 	if (map->mem && map->mem->type != kindm[kind]) {
mem               289 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c 			  kindm[kind], map->mem->type);
mem               784 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	struct resource *mem;
mem               810 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	mem = platform_get_resource(dev, IORESOURCE_MEM, 0);
mem               811 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	if (!mem) {
mem               816 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	omap_dmm->phys_base = mem->start;
mem               817 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	omap_dmm->base = ioremap(mem->start, SZ_2K);
mem               397 drivers/gpu/drm/qxl/qxl_cmd.c 	create->mem = qxl_bo_physical_address(qdev, bo, 0);
mem               399 drivers/gpu/drm/qxl/qxl_cmd.c 	DRM_DEBUG_DRIVER("mem = %llx, from %p\n", create->mem, bo->kptr);
mem               323 drivers/gpu/drm/qxl/qxl_dev.h 	QXLPHYSICAL mem;
mem               310 drivers/gpu/drm/qxl/qxl_drv.h 		(bo->tbo.mem.mem_type == TTM_PL_VRAM)
mem               151 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type];
mem               156 drivers/gpu/drm/qxl/qxl_object.c 	if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
mem               158 drivers/gpu/drm/qxl/qxl_object.c 	else if (bo->tbo.mem.mem_type == TTM_PL_PRIV)
mem               164 drivers/gpu/drm/qxl/qxl_object.c 	ret = ttm_mem_io_reserve(bo->tbo.bdev, &bo->tbo.mem);
mem               167 drivers/gpu/drm/qxl/qxl_object.c 	return io_mapping_map_atomic_wc(map, bo->tbo.mem.bus.offset + page_offset);
mem               196 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_mem_type_manager *man = &bo->tbo.bdev->man[bo->tbo.mem.mem_type];
mem               198 drivers/gpu/drm/qxl/qxl_object.c 	if ((bo->tbo.mem.mem_type != TTM_PL_VRAM) &&
mem               199 drivers/gpu/drm/qxl/qxl_object.c 	    (bo->tbo.mem.mem_type != TTM_PL_PRIV))
mem               205 drivers/gpu/drm/qxl/qxl_object.c 	ttm_mem_io_free(bo->tbo.bdev, &bo->tbo.mem);
mem                82 drivers/gpu/drm/qxl/qxl_object.h 		*mem_type = bo->tbo.mem.mem_type;
mem               163 drivers/gpu/drm/qxl/qxl_ttm.c 				  struct ttm_mem_reg *mem)
mem               165 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               168 drivers/gpu/drm/qxl/qxl_ttm.c 	mem->bus.addr = NULL;
mem               169 drivers/gpu/drm/qxl/qxl_ttm.c 	mem->bus.offset = 0;
mem               170 drivers/gpu/drm/qxl/qxl_ttm.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               171 drivers/gpu/drm/qxl/qxl_ttm.c 	mem->bus.base = 0;
mem               172 drivers/gpu/drm/qxl/qxl_ttm.c 	mem->bus.is_iomem = false;
mem               175 drivers/gpu/drm/qxl/qxl_ttm.c 	switch (mem->mem_type) {
mem               180 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.is_iomem = true;
mem               181 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.base = qdev->vram_base;
mem               182 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               185 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.is_iomem = true;
mem               186 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.base = qdev->surfaceram_base;
mem               187 drivers/gpu/drm/qxl/qxl_ttm.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               196 drivers/gpu/drm/qxl/qxl_ttm.c 				struct ttm_mem_reg *mem)
mem               265 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               276 drivers/gpu/drm/qxl/qxl_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               302 drivers/gpu/drm/qxl/qxl_ttm.c 	if (bo->mem.mem_type == TTM_PL_PRIV && qbo->surface_id)
mem              2854 drivers/gpu/drm/radeon/radeon.h 			struct ttm_mem_reg *mem);
mem               518 drivers/gpu/drm/radeon/radeon_cs.c 				&rdev->ring_tmp_bo.bo->tbo.mem);
mem               532 drivers/gpu/drm/radeon/radeon_cs.c 		r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem);
mem               460 drivers/gpu/drm/radeon/radeon_gem.c 	cur_placement = READ_ONCE(robj->tbo.mem.mem_type);
mem               490 drivers/gpu/drm/radeon/radeon_gem.c 	cur_placement = READ_ONCE(robj->tbo.mem.mem_type);
mem               574 drivers/gpu/drm/radeon/radeon_gem.c 		domain = radeon_mem_type_to_domain(entry->bo->mem.mem_type);
mem               587 drivers/gpu/drm/radeon/radeon_gem.c 		r = radeon_vm_bo_update(rdev, bo_va, &bo_va->bo->tbo.mem);
mem               793 drivers/gpu/drm/radeon/radeon_gem.c 		domain = radeon_mem_type_to_domain(rbo->tbo.mem.mem_type);
mem                81 drivers/gpu/drm/radeon/radeon_object.c 	radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1);
mem               412 drivers/gpu/drm/radeon/radeon_object.c 		if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
mem               556 drivers/gpu/drm/radeon/radeon_object.c 				radeon_mem_type_to_domain(bo->tbo.mem.mem_type);
mem               655 drivers/gpu/drm/radeon/radeon_object.c 			       bo->tbo.mem.start << PAGE_SHIFT,
mem               761 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->tbo.mem.mem_type != TTM_PL_VRAM) {
mem               793 drivers/gpu/drm/radeon/radeon_object.c 	radeon_update_memory_usage(rbo, bo->mem.mem_type, -1);
mem               810 drivers/gpu/drm/radeon/radeon_object.c 	if (bo->mem.mem_type != TTM_PL_VRAM)
mem               813 drivers/gpu/drm/radeon/radeon_object.c 	size = bo->mem.num_pages << PAGE_SHIFT;
mem               814 drivers/gpu/drm/radeon/radeon_object.c 	offset = bo->mem.start << PAGE_SHIFT;
mem               839 drivers/gpu/drm/radeon/radeon_object.c 	offset = bo->mem.start << PAGE_SHIFT;
mem               855 drivers/gpu/drm/radeon/radeon_object.c 		*mem_type = bo->tbo.mem.mem_type;
mem               108 drivers/gpu/drm/radeon/radeon_object.h 	return (bo->tbo.mem.page_alignment << PAGE_SHIFT) / RADEON_GPU_PAGE_SIZE;
mem               157 drivers/gpu/drm/radeon/radeon_pm.c 		if (bo->tbo.mem.mem_type == TTM_PL_VRAM)
mem               144 drivers/gpu/drm/radeon/radeon_ttm.c 	switch (bo->mem.mem_type) {
mem               149 drivers/gpu/drm/radeon/radeon_ttm.c 			 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) {
mem               194 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               262 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               307 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               345 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               402 drivers/gpu/drm/radeon/radeon_ttm.c static int radeon_ttm_io_mem_reserve(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               404 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               407 drivers/gpu/drm/radeon/radeon_ttm.c 	mem->bus.addr = NULL;
mem               408 drivers/gpu/drm/radeon/radeon_ttm.c 	mem->bus.offset = 0;
mem               409 drivers/gpu/drm/radeon/radeon_ttm.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               410 drivers/gpu/drm/radeon/radeon_ttm.c 	mem->bus.base = 0;
mem               411 drivers/gpu/drm/radeon/radeon_ttm.c 	mem->bus.is_iomem = false;
mem               414 drivers/gpu/drm/radeon/radeon_ttm.c 	switch (mem->mem_type) {
mem               422 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.offset = mem->start << PAGE_SHIFT;
mem               423 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.base = rdev->mc.agp_base;
mem               424 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.is_iomem = !rdev->ddev->agp->cant_use_aperture;
mem               429 drivers/gpu/drm/radeon/radeon_ttm.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               431 drivers/gpu/drm/radeon/radeon_ttm.c 		if ((mem->bus.offset + mem->bus.size) > rdev->mc.visible_vram_size)
mem               433 drivers/gpu/drm/radeon/radeon_ttm.c 		mem->bus.base = rdev->mc.aper_base;
mem               434 drivers/gpu/drm/radeon/radeon_ttm.c 		mem->bus.is_iomem = true;
mem               440 drivers/gpu/drm/radeon/radeon_ttm.c 		if (mem->placement & TTM_PL_FLAG_WC)
mem               441 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.addr =
mem               442 drivers/gpu/drm/radeon/radeon_ttm.c 				ioremap_wc(mem->bus.base + mem->bus.offset,
mem               443 drivers/gpu/drm/radeon/radeon_ttm.c 					   mem->bus.size);
mem               445 drivers/gpu/drm/radeon/radeon_ttm.c 			mem->bus.addr =
mem               446 drivers/gpu/drm/radeon/radeon_ttm.c 				ioremap_nocache(mem->bus.base + mem->bus.offset,
mem               447 drivers/gpu/drm/radeon/radeon_ttm.c 						mem->bus.size);
mem               448 drivers/gpu/drm/radeon/radeon_ttm.c 		if (!mem->bus.addr)
mem               457 drivers/gpu/drm/radeon/radeon_ttm.c 		mem->bus.base = (mem->bus.base & 0x0ffffffffUL) +
mem               467 drivers/gpu/drm/radeon/radeon_ttm.c static void radeon_ttm_io_mem_free(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               914 drivers/gpu/drm/radeon/radeon_vm.c 			struct ttm_mem_reg *mem)
mem               930 drivers/gpu/drm/radeon/radeon_vm.c 	if (mem) {
mem               948 drivers/gpu/drm/radeon/radeon_vm.c 	if (mem) {
mem               949 drivers/gpu/drm/radeon/radeon_vm.c 		addr = (u64)mem->start << PAGE_SHIFT;
mem               950 drivers/gpu/drm/radeon/radeon_vm.c 		if (mem->mem_type != TTM_PL_SYSTEM) {
mem               953 drivers/gpu/drm/radeon/radeon_vm.c 		if (mem->mem_type == TTM_PL_TT) {
mem               510 drivers/gpu/drm/rcar-du/rcar_du_drv.c 	struct resource *mem;
mem               524 drivers/gpu/drm/rcar-du/rcar_du_drv.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               525 drivers/gpu/drm/rcar-du/rcar_du_drv.c 	rcdu->mmio = devm_ioremap_resource(&pdev->dev, mem);
mem               173 drivers/gpu/drm/rcar-du/rcar_du_vsp.c 		cfg.mem[i] = sg_dma_address(state->sg_tables[i].sgl)
mem               235 drivers/gpu/drm/rcar-du/rcar_du_writeback.c 		cfg->mem[i] = sg_dma_address(rjob->sg_tables[i].sgl)
mem               864 drivers/gpu/drm/rcar-du/rcar_lvds.c 	struct resource *mem;
mem               888 drivers/gpu/drm/rcar-du/rcar_lvds.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               889 drivers/gpu/drm/rcar-du/rcar_lvds.c 	lvds->mmio = devm_ioremap_resource(&pdev->dev, mem);
mem                89 drivers/gpu/drm/sis/sis_mm.c 	drm_sis_mem_t *mem = data;
mem               111 drivers/gpu/drm/sis/sis_mm.c 	mem->size = (mem->size + SIS_MM_ALIGN_MASK) >> SIS_MM_ALIGN_SHIFT;
mem               115 drivers/gpu/drm/sis/sis_mm.c 					    mem->size);
mem               119 drivers/gpu/drm/sis/sis_mm.c 		item->req.size = mem->size;
mem               127 drivers/gpu/drm/sis/sis_mm.c 					    mem->size);
mem               142 drivers/gpu/drm/sis/sis_mm.c 	mem->offset = ((pool == 0) ?
mem               145 drivers/gpu/drm/sis/sis_mm.c 	mem->free = user_key;
mem               146 drivers/gpu/drm/sis/sis_mm.c 	mem->size = mem->size << SIS_MM_ALIGN_SHIFT;
mem               156 drivers/gpu/drm/sis/sis_mm.c 	mem->offset = 0;
mem               157 drivers/gpu/drm/sis/sis_mm.c 	mem->size = 0;
mem               158 drivers/gpu/drm/sis/sis_mm.c 	mem->free = 0;
mem               160 drivers/gpu/drm/sis/sis_mm.c 	DRM_DEBUG("alloc %d, size = %ld, offset = %ld\n", pool, mem->size,
mem               161 drivers/gpu/drm/sis/sis_mm.c 		  mem->offset);
mem               169 drivers/gpu/drm/sis/sis_mm.c 	drm_sis_mem_t *mem = data;
mem               173 drivers/gpu/drm/sis/sis_mm.c 	obj = idr_find(&dev_priv->object_idr, mem->free);
mem               179 drivers/gpu/drm/sis/sis_mm.c 	idr_remove(&dev_priv->object_idr, mem->free);
mem               189 drivers/gpu/drm/sis/sis_mm.c 	DRM_DEBUG("free = 0x%lx\n", mem->free);
mem                47 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct agp_memory *mem;
mem                56 drivers/gpu/drm/ttm/ttm_agp_backend.c 	struct agp_memory *mem;
mem                60 drivers/gpu/drm/ttm/ttm_agp_backend.c 	mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY);
mem                61 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (unlikely(mem == NULL))
mem                64 drivers/gpu/drm/ttm/ttm_agp_backend.c 	mem->page_count = 0;
mem                71 drivers/gpu/drm/ttm/ttm_agp_backend.c 		mem->pages[mem->page_count++] = page;
mem                73 drivers/gpu/drm/ttm/ttm_agp_backend.c 	agp_be->mem = mem;
mem                75 drivers/gpu/drm/ttm/ttm_agp_backend.c 	mem->is_flushed = 1;
mem                76 drivers/gpu/drm/ttm/ttm_agp_backend.c 	mem->type = (cached) ? AGP_USER_CACHED_MEMORY : AGP_USER_MEMORY;
mem                78 drivers/gpu/drm/ttm/ttm_agp_backend.c 	ret = agp_bind_memory(mem, node->start);
mem                89 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (agp_be->mem) {
mem                90 drivers/gpu/drm/ttm/ttm_agp_backend.c 		if (agp_be->mem->is_bound)
mem                91 drivers/gpu/drm/ttm/ttm_agp_backend.c 			return agp_unbind_memory(agp_be->mem);
mem                92 drivers/gpu/drm/ttm/ttm_agp_backend.c 		agp_free_memory(agp_be->mem);
mem                93 drivers/gpu/drm/ttm/ttm_agp_backend.c 		agp_be->mem = NULL;
mem               102 drivers/gpu/drm/ttm/ttm_agp_backend.c 	if (agp_be->mem)
mem               124 drivers/gpu/drm/ttm/ttm_agp_backend.c 	agp_be->mem = NULL;
mem               102 drivers/gpu/drm/ttm/ttm_bo.c 		   bo, bo->mem.num_pages, bo->mem.size >> 10,
mem               103 drivers/gpu/drm/ttm/ttm_bo.c 		   bo->mem.size >> 20);
mem               157 drivers/gpu/drm/ttm/ttm_bo.c 	BUG_ON(bo->mem.mm_node != NULL);
mem               171 drivers/gpu/drm/ttm/ttm_bo.c 				  struct ttm_mem_reg *mem)
mem               181 drivers/gpu/drm/ttm/ttm_bo.c 	if (mem->placement & TTM_PL_FLAG_NO_EVICT)
mem               184 drivers/gpu/drm/ttm/ttm_bo.c 	man = &bdev->man[mem->mem_type];
mem               198 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_add_mem_to_lru(bo, &bo->mem);
mem               253 drivers/gpu/drm/ttm/ttm_bo.c 	if (bulk && !(bo->mem.placement & TTM_PL_FLAG_NO_EVICT)) {
mem               254 drivers/gpu/drm/ttm/ttm_bo.c 		switch (bo->mem.mem_type) {
mem               321 drivers/gpu/drm/ttm/ttm_bo.c 				  struct ttm_mem_reg *mem, bool evict,
mem               325 drivers/gpu/drm/ttm/ttm_bo.c 	bool old_is_pci = ttm_mem_reg_is_pci(bdev, &bo->mem);
mem               326 drivers/gpu/drm/ttm/ttm_bo.c 	bool new_is_pci = ttm_mem_reg_is_pci(bdev, mem);
mem               327 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *old_man = &bdev->man[bo->mem.mem_type];
mem               328 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *new_man = &bdev->man[mem->mem_type];
mem               332 drivers/gpu/drm/ttm/ttm_bo.c 	    ((mem->placement & bo->mem.placement & TTM_PL_MASK_CACHING) == 0)) {
mem               352 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_tt_set_placement_caching(bo->ttm, mem->placement);
mem               356 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mem_type != TTM_PL_SYSTEM) {
mem               357 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_tt_bind(bo->ttm, mem, ctx);
mem               362 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->mem.mem_type == TTM_PL_SYSTEM) {
mem               364 drivers/gpu/drm/ttm/ttm_bo.c 				bdev->driver->move_notify(bo, evict, mem);
mem               365 drivers/gpu/drm/ttm/ttm_bo.c 			bo->mem = *mem;
mem               366 drivers/gpu/drm/ttm/ttm_bo.c 			mem->mm_node = NULL;
mem               372 drivers/gpu/drm/ttm/ttm_bo.c 		bdev->driver->move_notify(bo, evict, mem);
mem               376 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_ttm(bo, ctx, mem);
mem               378 drivers/gpu/drm/ttm/ttm_bo.c 		ret = bdev->driver->move(bo, evict, ctx, mem);
mem               380 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_memcpy(bo, ctx, mem);
mem               384 drivers/gpu/drm/ttm/ttm_bo.c 			swap(*mem, bo->mem);
mem               385 drivers/gpu/drm/ttm/ttm_bo.c 			bdev->driver->move_notify(bo, false, mem);
mem               386 drivers/gpu/drm/ttm/ttm_bo.c 			swap(*mem, bo->mem);
mem               395 drivers/gpu/drm/ttm/ttm_bo.c 			ret = bdev->driver->invalidate_caches(bdev, bo->mem.placement);
mem               402 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mm_node)
mem               403 drivers/gpu/drm/ttm/ttm_bo.c 		bo->offset = (bo->mem.start << PAGE_SHIFT) +
mem               404 drivers/gpu/drm/ttm/ttm_bo.c 		    bdev->man[bo->mem.mem_type].gpu_offset;
mem               412 drivers/gpu/drm/ttm/ttm_bo.c 	new_man = &bdev->man[bo->mem.mem_type];
mem               436 drivers/gpu/drm/ttm/ttm_bo.c 	ttm_bo_mem_put(bo, &bo->mem);
mem               513 drivers/gpu/drm/ttm/ttm_bo.c 		if (bo->mem.placement & TTM_PL_FLAG_NO_EVICT) {
mem               514 drivers/gpu/drm/ttm/ttm_bo.c 			bo->mem.placement &= ~TTM_PL_FLAG_NO_EVICT;
mem               676 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type];
mem               731 drivers/gpu/drm/ttm/ttm_bo.c 	evict_mem = bo->mem;
mem               764 drivers/gpu/drm/ttm/ttm_bo.c 	if (place->fpfn >= (bo->mem.start + bo->mem.size) ||
mem               765 drivers/gpu/drm/ttm/ttm_bo.c 	    (place->lpfn && place->lpfn <= bo->mem.start))
mem               917 drivers/gpu/drm/ttm/ttm_bo.c void ttm_bo_mem_put(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem)
mem               919 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bo->bdev->man[mem->mem_type];
mem               921 drivers/gpu/drm/ttm/ttm_bo.c 	if (mem->mm_node)
mem               922 drivers/gpu/drm/ttm/ttm_bo.c 		(*man->func->put_node)(man, mem);
mem               931 drivers/gpu/drm/ttm/ttm_bo.c 				 struct ttm_mem_reg *mem,
mem               966 drivers/gpu/drm/ttm/ttm_bo.c 				  struct ttm_mem_reg *mem,
mem               970 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               976 drivers/gpu/drm/ttm/ttm_bo.c 		ret = (*man->func->get_node)(man, bo, place, mem);
mem               979 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mm_node)
mem               981 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_mem_evict_first(bdev, mem->mem_type, place, ctx,
mem               987 drivers/gpu/drm/ttm/ttm_bo.c 	return ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
mem              1047 drivers/gpu/drm/ttm/ttm_bo.c 				struct ttm_mem_reg *mem,
mem              1067 drivers/gpu/drm/ttm/ttm_bo.c 	cur_flags = ttm_bo_select_caching(man, bo->mem.placement, cur_flags);
mem              1074 drivers/gpu/drm/ttm/ttm_bo.c 	mem->mem_type = mem_type;
mem              1075 drivers/gpu/drm/ttm/ttm_bo.c 	mem->placement = cur_flags;
mem              1077 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type < mem_type && !list_empty(&bo->lru)) {
mem              1080 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_add_mem_to_lru(bo, mem);
mem              1097 drivers/gpu/drm/ttm/ttm_bo.c 			struct ttm_mem_reg *mem,
mem              1108 drivers/gpu/drm/ttm/ttm_bo.c 	mem->mm_node = NULL;
mem              1113 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
mem              1120 drivers/gpu/drm/ttm/ttm_bo.c 		mem->mm_node = NULL;
mem              1121 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mem_type == TTM_PL_SYSTEM)
mem              1124 drivers/gpu/drm/ttm/ttm_bo.c 		man = &bdev->man[mem->mem_type];
mem              1125 drivers/gpu/drm/ttm/ttm_bo.c 		ret = (*man->func->get_node)(man, bo, place, mem);
mem              1129 drivers/gpu/drm/ttm/ttm_bo.c 		if (!mem->mm_node)
mem              1132 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
mem              1134 drivers/gpu/drm/ttm/ttm_bo.c 			(*man->func->put_node)(man, mem);
mem              1146 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
mem              1153 drivers/gpu/drm/ttm/ttm_bo.c 		mem->mm_node = NULL;
mem              1154 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mem_type == TTM_PL_SYSTEM)
mem              1157 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_force_space(bo, place, mem, ctx);
mem              1158 drivers/gpu/drm/ttm/ttm_bo.c 		if (ret == 0 && mem->mm_node)
mem              1172 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type == TTM_PL_SYSTEM && !list_empty(&bo->lru)) {
mem              1187 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_reg mem;
mem              1191 drivers/gpu/drm/ttm/ttm_bo.c 	mem.num_pages = bo->num_pages;
mem              1192 drivers/gpu/drm/ttm/ttm_bo.c 	mem.size = mem.num_pages << PAGE_SHIFT;
mem              1193 drivers/gpu/drm/ttm/ttm_bo.c 	mem.page_alignment = bo->mem.page_alignment;
mem              1194 drivers/gpu/drm/ttm/ttm_bo.c 	mem.bus.io_reserved_vm = false;
mem              1195 drivers/gpu/drm/ttm/ttm_bo.c 	mem.bus.io_reserved_count = 0;
mem              1199 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_mem_space(bo, placement, &mem, ctx);
mem              1202 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_handle_move_mem(bo, &mem, false, ctx);
mem              1204 drivers/gpu/drm/ttm/ttm_bo.c 	if (ret && mem.mm_node)
mem              1205 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_bo_mem_put(bo, &mem);
mem              1211 drivers/gpu/drm/ttm/ttm_bo.c 				 struct ttm_mem_reg *mem,
mem              1219 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mm_node && (mem->start < heap->fpfn ||
mem              1220 drivers/gpu/drm/ttm/ttm_bo.c 		     (heap->lpfn != 0 && (mem->start + mem->num_pages) > heap->lpfn)))
mem              1224 drivers/gpu/drm/ttm/ttm_bo.c 		if ((*new_flags & mem->placement & TTM_PL_MASK_CACHING) &&
mem              1225 drivers/gpu/drm/ttm/ttm_bo.c 		    (*new_flags & mem->placement & TTM_PL_MASK_MEM) &&
mem              1227 drivers/gpu/drm/ttm/ttm_bo.c 		     (mem->placement & TTM_PL_FLAG_CONTIGUOUS)))
mem              1234 drivers/gpu/drm/ttm/ttm_bo.c 		       struct ttm_mem_reg *mem,
mem              1238 drivers/gpu/drm/ttm/ttm_bo.c 				 mem, new_flags))
mem              1245 drivers/gpu/drm/ttm/ttm_bo.c 				 mem, new_flags))
mem              1263 drivers/gpu/drm/ttm/ttm_bo.c 	if (!ttm_bo_mem_compat(placement, &bo->mem, &new_flags)) {
mem              1272 drivers/gpu/drm/ttm/ttm_bo.c 		ttm_flag_masked(&bo->mem.placement, new_flags,
mem              1278 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
mem              1337 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.size = num_pages << PAGE_SHIFT;
mem              1338 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.mem_type = TTM_PL_SYSTEM;
mem              1339 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.num_pages = bo->num_pages;
mem              1340 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.mm_node = NULL;
mem              1341 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.page_alignment = page_alignment;
mem              1342 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.bus.io_reserved_vm = false;
mem              1343 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.bus.io_reserved_count = 0;
mem              1345 drivers/gpu/drm/ttm/ttm_bo.c 	bo->mem.placement = (TTM_PL_FLAG_SYSTEM | TTM_PL_FLAG_CACHED);
mem              1371 drivers/gpu/drm/ttm/ttm_bo.c 					 bo->mem.num_pages);
mem              1392 drivers/gpu/drm/ttm/ttm_bo.c 	if (resv && !(bo->mem.placement & TTM_PL_FLAG_NO_EVICT)) {
mem              1777 drivers/gpu/drm/ttm/ttm_bo.c bool ttm_mem_reg_is_pci(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem              1779 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem              1782 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->mem_type == TTM_PL_SYSTEM)
mem              1788 drivers/gpu/drm/ttm/ttm_bo.c 		if (mem->placement & TTM_PL_FLAG_CACHED)
mem              1805 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_mem_type_manager *man = &bdev->man[bo->mem.mem_type];
mem              1909 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->mem.mem_type != TTM_PL_SYSTEM ||
mem              1914 drivers/gpu/drm/ttm/ttm_bo.c 		evict_mem = bo->mem;
mem                54 drivers/gpu/drm/ttm/ttm_bo_manager.c 			       struct ttm_mem_reg *mem)
mem                77 drivers/gpu/drm/ttm/ttm_bo_manager.c 					  mem->num_pages,
mem                78 drivers/gpu/drm/ttm/ttm_bo_manager.c 					  mem->page_alignment, 0,
mem                85 drivers/gpu/drm/ttm/ttm_bo_manager.c 		mem->mm_node = node;
mem                86 drivers/gpu/drm/ttm/ttm_bo_manager.c 		mem->start = node->start;
mem                93 drivers/gpu/drm/ttm/ttm_bo_manager.c 				struct ttm_mem_reg *mem)
mem                97 drivers/gpu/drm/ttm/ttm_bo_manager.c 	if (mem->mm_node) {
mem                99 drivers/gpu/drm/ttm/ttm_bo_manager.c 		drm_mm_remove_node(mem->mm_node);
mem               102 drivers/gpu/drm/ttm/ttm_bo_manager.c 		kfree(mem->mm_node);
mem               103 drivers/gpu/drm/ttm/ttm_bo_manager.c 		mem->mm_node = NULL;
mem                50 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_bo_mem_put(bo, &bo->mem);
mem                58 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               134 drivers/gpu/drm/ttm/ttm_bo_util.c 		       struct ttm_mem_reg *mem)
mem               136 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               142 drivers/gpu/drm/ttm/ttm_bo_util.c 		return bdev->driver->io_mem_reserve(bdev, mem);
mem               145 drivers/gpu/drm/ttm/ttm_bo_util.c 	    mem->bus.io_reserved_count++ == 0) {
mem               147 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = bdev->driver->io_mem_reserve(bdev, mem);
mem               159 drivers/gpu/drm/ttm/ttm_bo_util.c 		     struct ttm_mem_reg *mem)
mem               161 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               167 drivers/gpu/drm/ttm/ttm_bo_util.c 	    --mem->bus.io_reserved_count == 0 &&
mem               169 drivers/gpu/drm/ttm/ttm_bo_util.c 		bdev->driver->io_mem_free(bdev, mem);
mem               176 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
mem               179 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (!mem->bus.io_reserved_vm) {
mem               181 drivers/gpu/drm/ttm/ttm_bo_util.c 			&bo->bdev->man[mem->mem_type];
mem               183 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_mem_io_reserve(bo->bdev, mem);
mem               186 drivers/gpu/drm/ttm/ttm_bo_util.c 		mem->bus.io_reserved_vm = true;
mem               196 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
mem               198 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (mem->bus.io_reserved_vm) {
mem               199 drivers/gpu/drm/ttm/ttm_bo_util.c 		mem->bus.io_reserved_vm = false;
mem               201 drivers/gpu/drm/ttm/ttm_bo_util.c 		ttm_mem_io_free(bo->bdev, mem);
mem               205 drivers/gpu/drm/ttm/ttm_bo_util.c static int ttm_mem_reg_ioremap(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem,
mem               208 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               214 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_mem_io_reserve(bdev, mem);
mem               216 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (ret || !mem->bus.is_iomem)
mem               219 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (mem->bus.addr) {
mem               220 drivers/gpu/drm/ttm/ttm_bo_util.c 		addr = mem->bus.addr;
mem               222 drivers/gpu/drm/ttm/ttm_bo_util.c 		if (mem->placement & TTM_PL_FLAG_WC)
mem               223 drivers/gpu/drm/ttm/ttm_bo_util.c 			addr = ioremap_wc(mem->bus.base + mem->bus.offset, mem->bus.size);
mem               225 drivers/gpu/drm/ttm/ttm_bo_util.c 			addr = ioremap_nocache(mem->bus.base + mem->bus.offset, mem->bus.size);
mem               228 drivers/gpu/drm/ttm/ttm_bo_util.c 			ttm_mem_io_free(bdev, mem);
mem               237 drivers/gpu/drm/ttm/ttm_bo_util.c static void ttm_mem_reg_iounmap(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem,
mem               242 drivers/gpu/drm/ttm/ttm_bo_util.c 	man = &bdev->man[mem->mem_type];
mem               244 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (virtual && mem->bus.addr == NULL)
mem               247 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_mem_io_free(bdev, mem);
mem               363 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               496 drivers/gpu/drm/ttm/ttm_bo_util.c 	fbo->base.mem.placement |= TTM_PL_FLAG_NO_EVICT;
mem               560 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
mem               562 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (bo->mem.bus.addr) {
mem               564 drivers/gpu/drm/ttm/ttm_bo_util.c 		map->virtual = (void *)(((u8 *)bo->mem.bus.addr) + offset);
mem               567 drivers/gpu/drm/ttm/ttm_bo_util.c 		if (mem->placement & TTM_PL_FLAG_WC)
mem               568 drivers/gpu/drm/ttm/ttm_bo_util.c 			map->virtual = ioremap_wc(bo->mem.bus.base + bo->mem.bus.offset + offset,
mem               571 drivers/gpu/drm/ttm/ttm_bo_util.c 			map->virtual = ioremap_nocache(bo->mem.bus.base + bo->mem.bus.offset + offset,
mem               582 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *mem = &bo->mem;
mem               597 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (num_pages == 1 && (mem->placement & TTM_PL_FLAG_CACHED)) {
mem               611 drivers/gpu/drm/ttm/ttm_bo_util.c 		prot = ttm_io_prot(mem->placement, PAGE_KERNEL);
mem               624 drivers/gpu/drm/ttm/ttm_bo_util.c 		&bo->bdev->man[bo->mem.mem_type];
mem               636 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_mem_io_reserve(bo->bdev, &bo->mem);
mem               640 drivers/gpu/drm/ttm/ttm_bo_util.c 	if (!bo->mem.bus.is_iomem) {
mem               654 drivers/gpu/drm/ttm/ttm_bo_util.c 		&bo->bdev->man[bo->mem.mem_type];
mem               674 drivers/gpu/drm/ttm/ttm_bo_util.c 	ttm_mem_io_free(map->bo->bdev, &map->bo->mem);
mem               688 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               748 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_mem_reg *old_mem = &bo->mem;
mem               849 drivers/gpu/drm/ttm/ttm_bo_util.c 	memset(&bo->mem, 0, sizeof(bo->mem));
mem               850 drivers/gpu/drm/ttm/ttm_bo_util.c 	bo->mem.mem_type = TTM_PL_SYSTEM;
mem               105 drivers/gpu/drm/ttm/ttm_bo_vm.c 	return ((bo->mem.bus.base + bo->mem.bus.offset) >> PAGE_SHIFT)
mem               125 drivers/gpu/drm/ttm/ttm_bo_vm.c 		&bdev->man[bo->mem.mem_type];
mem               231 drivers/gpu/drm/ttm/ttm_bo_vm.c 	if (bo->mem.bus.is_iomem) {
mem               232 drivers/gpu/drm/ttm/ttm_bo_vm.c 		cvma.vm_page_prot = ttm_io_prot(bo->mem.placement,
mem               243 drivers/gpu/drm/ttm/ttm_bo_vm.c 		cvma.vm_page_prot = ttm_io_prot(bo->mem.placement,
mem               258 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (bo->mem.bus.is_iomem) {
mem               372 drivers/gpu/drm/ttm/ttm_bo_vm.c 	switch (bo->mem.mem_type) {
mem               303 drivers/gpu/drm/ttm/ttm_memory.c 	uint64_t mem;
mem               309 drivers/gpu/drm/ttm/ttm_memory.c 	mem = si->totalram - si->totalhigh;
mem               310 drivers/gpu/drm/ttm/ttm_memory.c 	mem *= si->mem_unit;
mem               313 drivers/gpu/drm/ttm/ttm_memory.c 	zone->zone_mem = mem;
mem               314 drivers/gpu/drm/ttm/ttm_memory.c 	zone->max_mem = mem >> 1;
mem               315 drivers/gpu/drm/ttm/ttm_memory.c 	zone->emer_mem = (mem >> 1) + (mem >> 2);
mem               316 drivers/gpu/drm/ttm/ttm_memory.c 	zone->swap_limit = zone->max_mem - (mem >> 3);
mem               335 drivers/gpu/drm/ttm/ttm_memory.c 	uint64_t mem;
mem               345 drivers/gpu/drm/ttm/ttm_memory.c 	mem = si->totalram;
mem               346 drivers/gpu/drm/ttm/ttm_memory.c 	mem *= si->mem_unit;
mem               349 drivers/gpu/drm/ttm/ttm_memory.c 	zone->zone_mem = mem;
mem               350 drivers/gpu/drm/ttm/ttm_memory.c 	zone->max_mem = mem >> 1;
mem               351 drivers/gpu/drm/ttm/ttm_memory.c 	zone->emer_mem = (mem >> 1) + (mem >> 2);
mem               352 drivers/gpu/drm/ttm/ttm_memory.c 	zone->swap_limit = zone->max_mem - (mem >> 3);
mem               371 drivers/gpu/drm/ttm/ttm_memory.c 	uint64_t mem;
mem               377 drivers/gpu/drm/ttm/ttm_memory.c 	mem = si->totalram;
mem               378 drivers/gpu/drm/ttm/ttm_memory.c 	mem *= si->mem_unit;
mem               384 drivers/gpu/drm/ttm/ttm_memory.c 	if (mem <= ((uint64_t) 1ULL << 32)) {
mem               395 drivers/gpu/drm/ttm/ttm_memory.c 	mem = ((uint64_t) 1ULL << 32);
mem               397 drivers/gpu/drm/ttm/ttm_memory.c 	zone->zone_mem = mem;
mem               398 drivers/gpu/drm/ttm/ttm_memory.c 	zone->max_mem = mem >> 1;
mem               399 drivers/gpu/drm/ttm/ttm_memory.c 	zone->emer_mem = (mem >> 1) + (mem >> 2);
mem               400 drivers/gpu/drm/ttm/ttm_memory.c 	zone->swap_limit = zone->max_mem - (mem >> 3);
mem               118 drivers/gpu/drm/via/via_mm.c 	drm_via_mem_t *mem = data;
mem               125 drivers/gpu/drm/via/via_mm.c 	if (mem->type > VIA_MEM_AGP) {
mem               130 drivers/gpu/drm/via/via_mm.c 	if (0 == ((mem->type == VIA_MEM_VIDEO) ? dev_priv->vram_initialized :
mem               144 drivers/gpu/drm/via/via_mm.c 	tmpSize = (mem->size + VIA_MM_ALIGN_MASK) >> VIA_MM_ALIGN_SHIFT;
mem               145 drivers/gpu/drm/via/via_mm.c 	if (mem->type == VIA_MEM_AGP)
mem               164 drivers/gpu/drm/via/via_mm.c 	mem->offset = ((mem->type == VIA_MEM_VIDEO) ?
mem               167 drivers/gpu/drm/via/via_mm.c 	mem->index = user_key;
mem               177 drivers/gpu/drm/via/via_mm.c 	mem->offset = 0;
mem               178 drivers/gpu/drm/via/via_mm.c 	mem->size = 0;
mem               179 drivers/gpu/drm/via/via_mm.c 	mem->index = 0;
mem               188 drivers/gpu/drm/via/via_mm.c 	drm_via_mem_t *mem = data;
mem               192 drivers/gpu/drm/via/via_mm.c 	obj = idr_find(&dev_priv->object_idr, mem->index);
mem               198 drivers/gpu/drm/via/via_mm.c 	idr_remove(&dev_priv->object_idr, mem->index);
mem               204 drivers/gpu/drm/via/via_mm.c 	DRM_DEBUG("free = 0x%lx\n", mem->index);
mem                79 drivers/gpu/drm/virtio/virtgpu_ttm.c 			       struct ttm_mem_reg *mem)
mem                81 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->mm_node = (void *)1;
mem                86 drivers/gpu/drm/virtio/virtgpu_ttm.c 				struct ttm_mem_reg *mem)
mem                88 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->mm_node = (void *)NULL;
mem               160 drivers/gpu/drm/virtio/virtgpu_ttm.c 					 struct ttm_mem_reg *mem)
mem               162 drivers/gpu/drm/virtio/virtgpu_ttm.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               164 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->bus.addr = NULL;
mem               165 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->bus.offset = 0;
mem               166 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               167 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->bus.base = 0;
mem               168 drivers/gpu/drm/virtio/virtgpu_ttm.c 	mem->bus.is_iomem = false;
mem               171 drivers/gpu/drm/virtio/virtgpu_ttm.c 	switch (mem->mem_type) {
mem               183 drivers/gpu/drm/virtio/virtgpu_ttm.c 				       struct ttm_mem_reg *mem)
mem               461 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (!(dst->mem.placement & TTM_PL_FLAG_NO_EVICT))
mem               463 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	if (!(src->mem.placement & TTM_PL_FLAG_NO_EVICT))
mem               486 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.dst_prot = ttm_io_prot(dst->mem.placement, PAGE_KERNEL);
mem               487 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	d.src_prot = ttm_io_prot(src->mem.placement, PAGE_KERNEL);
mem               110 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(placement, &bo->mem,
mem               159 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(&vmw_vram_gmr_placement, &bo->mem,
mem               246 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->mem.mem_type == TTM_PL_VRAM &&
mem               247 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->mem.start < bo->num_pages &&
mem               248 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	    bo->mem.start > 0 &&
mem               255 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_mem_compat(&placement, &bo->mem,
mem               318 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (bo->mem.mem_type == TTM_PL_VRAM) {
mem               322 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ptr->gmrId = bo->mem.start;
mem               341 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	uint32_t old_mem_type = bo->mem.mem_type;
mem               368 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	BUG_ON(ret != 0 || bo->mem.mem_type != old_mem_type);
mem              1134 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 			struct ttm_mem_reg *mem)
mem              1138 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (mem == NULL)
mem              1153 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (mem->mem_type == TTM_PL_VRAM || bo->mem.mem_type == TTM_PL_VRAM)
mem              1161 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	if (mem->mem_type != VMW_PL_MOB && bo->mem.mem_type == VMW_PL_MOB)
mem               890 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		cb_hdr->ptr.mob.mobid = man->cmd_space->mem.start;
mem               344 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               353 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	cmd->body.mobid = bo->mem.start;
mem               383 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               511 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               520 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	cmd->body.mobid = bo->mem.start;
mem               590 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               173 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB);
mem               181 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               186 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	cmd->body.mobid = bo->mem.start;
mem               315 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB);
mem               713 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 				  struct ttm_mem_reg *mem);
mem               794 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 			       struct ttm_mem_reg *mem);
mem               730 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	cmd->body.mobid = dx_query_mob->base.mem.start;
mem              3306 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		switch (bo->mem.mem_type) {
mem              3312 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			reloc->location->gmrId = bo->mem.start;
mem              3315 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			*reloc->mob_loc = bo->mem.start;
mem               613 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	if (bo->mem.mem_type == TTM_PL_VRAM) {
mem               617 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 		cmd->body.guestResult.gmrId = bo->mem.start;
mem               658 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               659 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c 	cmd->body.mobid = bo->mem.start;
mem                50 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 				  struct ttm_mem_reg *mem)
mem                56 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	mem->mm_node = NULL;
mem                70 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	mem->mm_node = gman;
mem                71 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	mem->start = id;
mem                72 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	mem->num_pages = bo->num_pages;
mem                85 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 				   struct ttm_mem_reg *mem)
mem                90 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 	if (mem->mm_node) {
mem                91 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 		ida_free(&gman->gmr_ida, mem->start);
mem                93 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 		gman->used_gmr_pages -= mem->num_pages;
mem                95 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c 		mem->mm_node = NULL;
mem               790 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 			   struct ttm_mem_reg *mem)
mem               802 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	if (mem == NULL || !dx_query_mob || !dx_query_mob->dx_query_ctx) {
mem               808 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	if (mem->mem_type == TTM_PL_SYSTEM && bo->mem.mem_type == VMW_PL_MOB) {
mem               257 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               266 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd->body.mobid = bo->mem.start;
mem               285 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(res->backup->base.mem.mem_type != VMW_PL_MOB);
mem               405 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd->body.mobid = res->backup->base.mem.start;
mem               453 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               516 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	BUG_ON(res->backup->base.mem.mem_type != VMW_PL_MOB);
mem              1154 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem              1165 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	cmd1->body.mobid = bo->mem.start;
mem              1202 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	BUG_ON(bo->mem.mem_type != VMW_PL_MOB);
mem               797 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_io_mem_reserve(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               799 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct ttm_mem_type_manager *man = &bdev->man[mem->mem_type];
mem               802 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	mem->bus.addr = NULL;
mem               803 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	mem->bus.is_iomem = false;
mem               804 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	mem->bus.offset = 0;
mem               805 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	mem->bus.size = mem->num_pages << PAGE_SHIFT;
mem               806 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	mem->bus.base = 0;
mem               809 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	switch (mem->mem_type) {
mem               815 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		mem->bus.offset = mem->start << PAGE_SHIFT;
mem               816 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		mem->bus.base = dev_priv->vram_start;
mem               817 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		mem->bus.is_iomem = true;
mem               825 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static void vmw_ttm_io_mem_free(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem)
mem               846 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 			    struct ttm_mem_reg *mem)
mem               848 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_bo_move_notify(bo, mem);
mem               849 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	vmw_query_move_notify(bo, mem);
mem                25 drivers/gpu/host1x/hw/channel_hw.c 	void *mem = NULL;
mem                28 drivers/gpu/host1x/hw/channel_hw.c 		mem = host1x_bo_mmap(bo);
mem                30 drivers/gpu/host1x/hw/channel_hw.c 	if (mem) {
mem                43 drivers/gpu/host1x/hw/channel_hw.c 						      mem);
mem                46 drivers/gpu/host1x/hw/channel_hw.c 		host1x_bo_munmap(bo, mem);
mem                31 drivers/gpu/host1x/job.c 	void *mem;
mem                43 drivers/gpu/host1x/job.c 	mem = job = kzalloc(total, GFP_KERNEL);
mem                51 drivers/gpu/host1x/job.c 	mem += sizeof(struct host1x_job);
mem                52 drivers/gpu/host1x/job.c 	job->relocs = num_relocs ? mem : NULL;
mem                53 drivers/gpu/host1x/job.c 	mem += num_relocs * sizeof(struct host1x_reloc);
mem                54 drivers/gpu/host1x/job.c 	job->unpins = num_unpins ? mem : NULL;
mem                55 drivers/gpu/host1x/job.c 	mem += num_unpins * sizeof(struct host1x_job_unpin_data);
mem                56 drivers/gpu/host1x/job.c 	job->gathers = num_cmdbufs ? mem : NULL;
mem                57 drivers/gpu/host1x/job.c 	mem += num_cmdbufs * sizeof(struct host1x_job_gather);
mem                58 drivers/gpu/host1x/job.c 	job->addr_phys = num_unpins ? mem : NULL;
mem               329 drivers/hsi/controllers/omap_ssi_core.c 	struct resource *mem;
mem               333 drivers/hsi/controllers/omap_ssi_core.c 	mem = platform_get_resource_byname(pd, IORESOURCE_MEM, name);
mem               334 drivers/hsi/controllers/omap_ssi_core.c 	base = devm_ioremap_resource(&ssi->device, mem);
mem               341 drivers/hsi/controllers/omap_ssi_core.c 		*phy = mem->start;
mem              1096 drivers/hsi/controllers/omap_ssi_port.c 	struct resource *mem;
mem              1100 drivers/hsi/controllers/omap_ssi_port.c 	mem = platform_get_resource_byname(pd, IORESOURCE_MEM, name);
mem              1101 drivers/hsi/controllers/omap_ssi_port.c 	if (!mem) {
mem              1105 drivers/hsi/controllers/omap_ssi_port.c 	ioarea = devm_request_mem_region(&port->device, mem->start,
mem              1106 drivers/hsi/controllers/omap_ssi_port.c 					resource_size(mem), dev_name(&pd->dev));
mem              1109 drivers/hsi/controllers/omap_ssi_port.c 								mem->name);
mem              1112 drivers/hsi/controllers/omap_ssi_port.c 	base = devm_ioremap(&port->device, mem->start, resource_size(mem));
mem              1114 drivers/hsi/controllers/omap_ssi_port.c 		dev_err(&pd->dev, "%s IO remap failed\n", mem->name);
mem              1120 drivers/hsi/controllers/omap_ssi_port.c 		*phy = mem->start;
mem               629 drivers/hv/hv_balloon.c 	struct memory_notify *mem = (struct memory_notify *)v;
mem               643 drivers/hv/hv_balloon.c 		pfn_count = hv_page_offline_check(mem->start_pfn,
mem               644 drivers/hv/hv_balloon.c 						  mem->nr_pages);
mem               199 drivers/i2c/busses/i2c-at91-core.c 	struct resource *mem;
mem               209 drivers/i2c/busses/i2c-at91-core.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               210 drivers/i2c/busses/i2c-at91-core.c 	if (!mem)
mem               212 drivers/i2c/busses/i2c-at91-core.c 	phy_addr = mem->start;
mem               218 drivers/i2c/busses/i2c-at91-core.c 	dev->base = devm_ioremap_resource(&pdev->dev, mem);
mem               405 drivers/i2c/busses/i2c-bcm2835.c 	struct resource *mem, *irq;
mem               418 drivers/i2c/busses/i2c-bcm2835.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               419 drivers/i2c/busses/i2c-bcm2835.c 	i2c_dev->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               764 drivers/i2c/busses/i2c-davinci.c 	struct resource *mem;
mem               817 drivers/i2c/busses/i2c-davinci.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               818 drivers/i2c/busses/i2c-davinci.c 	dev->base = devm_ioremap_resource(&pdev->dev, mem);
mem               159 drivers/i2c/busses/i2c-designware-platdrv.c 	struct resource *mem;
mem               163 drivers/i2c/busses/i2c-designware-platdrv.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               164 drivers/i2c/busses/i2c-designware-platdrv.c 		dev->ext = devm_ioremap_resource(&pdev->dev, mem);
mem               258 drivers/i2c/busses/i2c-designware-platdrv.c 	struct resource *mem;
mem               272 drivers/i2c/busses/i2c-designware-platdrv.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               273 drivers/i2c/busses/i2c-designware-platdrv.c 	dev->base = devm_ioremap_resource(&pdev->dev, mem);
mem               743 drivers/i2c/busses/i2c-exynos5.c 	struct resource *mem;
mem               769 drivers/i2c/busses/i2c-exynos5.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               770 drivers/i2c/busses/i2c-exynos5.c 	i2c->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               393 drivers/i2c/busses/i2c-hix5hd2.c 	struct resource *mem;
mem               414 drivers/i2c/busses/i2c-hix5hd2.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               415 drivers/i2c/busses/i2c-hix5hd2.c 	priv->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               367 drivers/i2c/busses/i2c-meson.c 	struct resource *mem;
mem               392 drivers/i2c/busses/i2c-meson.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               393 drivers/i2c/busses/i2c-meson.c 	i2c->regs = devm_ioremap_resource(&pdev->dev, mem);
mem              1358 drivers/i2c/busses/i2c-omap.c 	struct resource		*mem;
mem              1378 drivers/i2c/busses/i2c-omap.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1379 drivers/i2c/busses/i2c-omap.c 	omap->base = devm_ioremap_resource(&pdev->dev, mem);
mem               186 drivers/i2c/busses/i2c-puv3.c 	struct resource *mem;
mem               189 drivers/i2c/busses/i2c-puv3.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               190 drivers/i2c/busses/i2c-puv3.c 	if (!mem)
mem               193 drivers/i2c/busses/i2c-puv3.c 	if (!request_mem_region(mem->start, resource_size(mem), "puv3_i2c"))
mem               203 drivers/i2c/busses/i2c-puv3.c 			mem->start);
mem               221 drivers/i2c/busses/i2c-puv3.c 	release_mem_region(mem->start, resource_size(mem));
mem               229 drivers/i2c/busses/i2c-puv3.c 	struct resource *mem;
mem               235 drivers/i2c/busses/i2c-puv3.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               236 drivers/i2c/busses/i2c-puv3.c 	release_mem_region(mem->start, resource_size(mem));
mem              1196 drivers/i2c/busses/i2c-rk3x.c 	struct resource *mem;
mem              1226 drivers/i2c/busses/i2c-rk3x.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1227 drivers/i2c/busses/i2c-rk3x.c 	i2c->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               303 drivers/ide/palm_bk3710.c 	struct resource *mem, *irq;
mem               321 drivers/ide/palm_bk3710.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               322 drivers/ide/palm_bk3710.c 	if (mem == NULL) {
mem               333 drivers/ide/palm_bk3710.c 	mem_size = resource_size(mem);
mem               334 drivers/ide/palm_bk3710.c 	if (request_mem_region(mem->start, mem_size, "palm_bk3710") == NULL) {
mem               339 drivers/ide/palm_bk3710.c 	base = ioremap(mem->start, mem_size);
mem               342 drivers/ide/palm_bk3710.c 		release_mem_region(mem->start, mem_size);
mem               772 drivers/iio/adc/exynos_adc.c 	struct resource	*mem;
mem               791 drivers/iio/adc/exynos_adc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               792 drivers/iio/adc/exynos_adc.c 	info->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               298 drivers/iio/adc/fsl-imx25-gcq.c 	void __iomem *mem;
mem               309 drivers/iio/adc/fsl-imx25-gcq.c 	mem = devm_ioremap_resource(dev, res);
mem               310 drivers/iio/adc/fsl-imx25-gcq.c 	if (IS_ERR(mem))
mem               311 drivers/iio/adc/fsl-imx25-gcq.c 		return PTR_ERR(mem);
mem               313 drivers/iio/adc/fsl-imx25-gcq.c 	priv->regs = devm_regmap_init_mmio(dev, mem, &mx25_gcq_regconfig);
mem                25 drivers/iio/adc/men_z188_adc.c 	struct resource *mem;
mem               105 drivers/iio/adc/men_z188_adc.c 	struct resource *mem;
mem               119 drivers/iio/adc/men_z188_adc.c 	mem = mcb_request_mem(dev, "z188-adc");
mem               120 drivers/iio/adc/men_z188_adc.c 	if (IS_ERR(mem))
mem               121 drivers/iio/adc/men_z188_adc.c 		return PTR_ERR(mem);
mem               123 drivers/iio/adc/men_z188_adc.c 	adc->base = ioremap(mem->start, resource_size(mem));
mem               129 drivers/iio/adc/men_z188_adc.c 	adc->mem = mem;
mem               135 drivers/iio/adc/men_z188_adc.c 	mcb_release_mem(mem);
mem               146 drivers/iio/adc/men_z188_adc.c 	mcb_release_mem(adc->mem);
mem               484 drivers/iio/adc/rcar-gyroadc.c 	struct resource *mem;
mem               494 drivers/iio/adc/rcar-gyroadc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               495 drivers/iio/adc/rcar-gyroadc.c 	priv->regs = devm_ioremap_resource(dev, mem);
mem               201 drivers/iio/adc/rockchip_saradc.c 	struct resource	*mem;
mem               224 drivers/iio/adc/rockchip_saradc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               225 drivers/iio/adc/rockchip_saradc.c 	info->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               499 drivers/iio/adc/sun4i-gpadc-iio.c 	struct resource *mem;
mem               511 drivers/iio/adc/sun4i-gpadc-iio.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               512 drivers/iio/adc/sun4i-gpadc-iio.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem               805 drivers/iio/adc/vf610_adc.c 	struct resource *mem;
mem               818 drivers/iio/adc/vf610_adc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               819 drivers/iio/adc/vf610_adc.c 	info->regs = devm_ioremap_resource(&pdev->dev, mem);
mem              1191 drivers/iio/adc/xilinx-xadc-core.c 	struct resource *mem;
mem              1221 drivers/iio/adc/xilinx-xadc-core.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1222 drivers/iio/adc/xilinx-xadc-core.c 	xadc->base = devm_ioremap_resource(&pdev->dev, mem);
mem               175 drivers/iio/dac/vf610_dac.c 	struct resource *mem;
mem               188 drivers/iio/dac/vf610_dac.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               189 drivers/iio/dac/vf610_dac.c 	info->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               284 drivers/infiniband/hw/hns/hns_roce_hem.c 	struct scatterlist *mem;
mem               307 drivers/infiniband/hw/hns/hns_roce_hem.c 			sg_init_table(chunk->mem, HNS_ROCE_HEM_CHUNK_LEN);
mem               321 drivers/infiniband/hw/hns/hns_roce_hem.c 		mem = &chunk->mem[chunk->npages];
mem               323 drivers/infiniband/hw/hns/hns_roce_hem.c 				&sg_dma_address(mem), gfp_mask);
mem               328 drivers/infiniband/hw/hns/hns_roce_hem.c 		sg_dma_len(mem) = PAGE_SIZE << order;
mem               353 drivers/infiniband/hw/hns/hns_roce_hem.c 				   sg_dma_len(&chunk->mem[i]),
mem               355 drivers/infiniband/hw/hns/hns_roce_hem.c 				   sg_dma_address(&chunk->mem[i]));
mem               853 drivers/infiniband/hw/hns/hns_roce_hem.c 			length = sg_dma_len(&chunk->mem[i]);
mem               857 drivers/infiniband/hw/hns/hns_roce_hem.c 						&chunk->mem[i]) + dma_offset;
mem                85 drivers/infiniband/hw/hns/hns_roce_hem.h 	struct scatterlist	 mem[HNS_ROCE_HEM_CHUNK_LEN];
mem               181 drivers/infiniband/hw/hns/hns_roce_hem.h 	return sg_dma_address(&iter->chunk->mem[iter->page_idx]);
mem               173 drivers/infiniband/hw/i40iw/i40iw.h 	struct i40iw_dma_mem mem;
mem               182 drivers/infiniband/hw/i40iw/i40iw.h 	struct i40iw_dma_mem mem;
mem               396 drivers/infiniband/hw/i40iw/i40iw_cm.c 	buf = sqbuf->mem.va;
mem              3144 drivers/infiniband/hw/i40iw/i40iw_cm.c 			rbuf->mem.va,
mem              3146 drivers/infiniband/hw/i40iw/i40iw_cm.c 	ethh = (struct vlan_ethhdr *)rbuf->mem.va;
mem               409 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	struct i40iw_dma_mem *mem;
mem               424 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		mem = (sd_entry->entry_type == I40IW_SD_TYPE_PAGED) ?
mem               428 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		if (!mem || !mem->va)
mem               431 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			i40iw_free_dma_mem(dev->hw, mem);
mem               553 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	struct i40iw_dma_mem mem;
mem               564 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		ret_code = i40iw_allocate_dma_mem(hw, &mem, alloc_len,
mem               578 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			memcpy(&sd_entry->u.pd_table.pd_page_addr, &mem, sizeof(struct i40iw_dma_mem));
mem               580 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			memcpy(&sd_entry->u.bp.addr, &mem, sizeof(struct i40iw_dma_mem));
mem               593 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			i40iw_free_dma_mem(hw, &mem);
mem               623 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	struct i40iw_dma_mem mem;
mem               624 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	struct i40iw_dma_mem *page = &mem;
mem               701 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	struct i40iw_dma_mem *mem;
mem               731 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		mem = &pd_entry->bp.addr;
mem               732 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		if (!mem || !mem->va)
mem               734 drivers/infiniband/hw/i40iw/i40iw_hmc.c 		i40iw_free_dma_mem(hw, mem);
mem               302 drivers/infiniband/hw/i40iw/i40iw_main.c 	i40iw_free_dma_mem(dev->hw, &aeq->mem);
mem               332 drivers/infiniband/hw/i40iw/i40iw_main.c 	i40iw_free_dma_mem(dev->hw, &iwceq->mem);
mem               561 drivers/infiniband/hw/i40iw/i40iw_main.c 	struct i40iw_dma_mem mem;
mem               584 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_obj_aligned_mem(iwdev, &mem, sizeof(struct i40iw_cqp_ctx),
mem               588 drivers/infiniband/hw/i40iw/i40iw_main.c 	dev->cqp->host_ctx_pa = mem.pa;
mem               589 drivers/infiniband/hw/i40iw/i40iw_main.c 	dev->cqp->host_ctx = mem.va;
mem               595 drivers/infiniband/hw/i40iw/i40iw_main.c 	cqp_init_info.host_ctx_pa = mem.pa;
mem               596 drivers/infiniband/hw/i40iw/i40iw_main.c 	cqp_init_info.host_ctx = mem.va;
mem               636 drivers/infiniband/hw/i40iw/i40iw_main.c 	struct i40iw_dma_mem mem;
mem               651 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_obj_aligned_mem(iwdev, &mem, ccq->shadow_area.size,
mem               660 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.shadow_area = mem.va;
mem               661 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.shadow_area_pa = mem.pa;
mem               733 drivers/infiniband/hw/i40iw/i40iw_main.c 	iwceq->mem.size = sizeof(struct i40iw_ceqe) *
mem               735 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_allocate_dma_mem(dev->hw, &iwceq->mem, iwceq->mem.size,
mem               740 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.ceqe_base = iwceq->mem.va;
mem               741 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.ceqe_pa = iwceq->mem.pa;
mem               753 drivers/infiniband/hw/i40iw/i40iw_main.c 		i40iw_free_dma_mem(dev->hw, &iwceq->mem);
mem               874 drivers/infiniband/hw/i40iw/i40iw_main.c 	aeq->mem.size = sizeof(struct i40iw_sc_aeqe) * aeq_size;
mem               875 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_allocate_dma_mem(dev->hw, &aeq->mem, aeq->mem.size,
mem               880 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.aeqe_base = aeq->mem.va;
mem               881 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.aeq_elem_pa = aeq->mem.pa;
mem               892 drivers/infiniband/hw/i40iw/i40iw_main.c 		i40iw_free_dma_mem(dev->hw, &aeq->mem);
mem              1318 drivers/infiniband/hw/i40iw/i40iw_main.c 	struct i40iw_dma_mem mem;
mem              1337 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_obj_aligned_mem(iwdev, &mem, I40IW_QUERY_FPM_BUF_SIZE,
mem              1341 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.fpm_query_buf_pa = mem.pa;
mem              1342 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.fpm_query_buf = mem.va;
mem              1343 drivers/infiniband/hw/i40iw/i40iw_main.c 	status = i40iw_obj_aligned_mem(iwdev, &mem, I40IW_COMMIT_FPM_BUF_SIZE,
mem              1347 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.fpm_commit_buf_pa = mem.pa;
mem              1348 drivers/infiniband/hw/i40iw/i40iw_main.c 	info.fpm_commit_buf = mem.va;
mem               195 drivers/infiniband/hw/i40iw/i40iw_osdep.h 						 struct i40iw_dma_mem *mem);
mem               118 drivers/infiniband/hw/i40iw/i40iw_p.h 					      struct i40iw_dma_mem *mem, u64 size,
mem               120 drivers/infiniband/hw/i40iw/i40iw_p.h void i40iw_free_dma_mem(struct i40iw_hw *hw, struct i40iw_dma_mem *mem);
mem               122 drivers/infiniband/hw/i40iw/i40iw_p.h 					       struct i40iw_virt_mem *mem, u32 size);
mem               124 drivers/infiniband/hw/i40iw/i40iw_p.h 					   struct i40iw_virt_mem *mem);
mem               255 drivers/infiniband/hw/i40iw/i40iw_pble.c 	struct i40iw_dma_mem mem;
mem               286 drivers/infiniband/hw/i40iw/i40iw_pble.c 		mem.pa = chunk->dmaaddrs[i];
mem               287 drivers/infiniband/hw/i40iw/i40iw_pble.c 		mem.size = PAGE_SIZE;
mem               288 drivers/infiniband/hw/i40iw/i40iw_pble.c 		mem.va = (void *)(addr);
mem               291 drivers/infiniband/hw/i40iw/i40iw_pble.c 			status = i40iw_add_pd_table_entry(dev->hw, hmc_info, pd_idx++, &mem);
mem               126 drivers/infiniband/hw/i40iw/i40iw_puda.c 	set_64bit_val(wqe, 0, buf->mem.pa);
mem               128 drivers/infiniband/hw/i40iw/i40iw_puda.c 		      LS_64(buf->mem.size, I40IWQPSQ_FRAG_LEN));
mem               177 drivers/infiniband/hw/i40iw/i40iw_puda.c 	ret = i40iw_allocate_dma_mem(dev->hw, &buf->mem, length, 1);
mem               197 drivers/infiniband/hw/i40iw/i40iw_puda.c 	i40iw_free_dma_mem(dev->hw, &buf->mem);
mem               452 drivers/infiniband/hw/i40iw/i40iw_puda.c 	info.paddr = buf->mem.pa;
mem               558 drivers/infiniband/hw/i40iw/i40iw_puda.c 	struct i40iw_dma_mem *mem;
mem               573 drivers/infiniband/hw/i40iw/i40iw_puda.c 	mem = &rsrc->qpmem;
mem               574 drivers/infiniband/hw/i40iw/i40iw_puda.c 	memset(mem->va, 0, t_size);
mem               581 drivers/infiniband/hw/i40iw/i40iw_puda.c 	qp->sq_pa = mem->pa;
mem               584 drivers/infiniband/hw/i40iw/i40iw_puda.c 	ukqp->sq_base = mem->va;
mem               678 drivers/infiniband/hw/i40iw/i40iw_puda.c 	struct i40iw_dma_mem *mem;
mem               690 drivers/infiniband/hw/i40iw/i40iw_puda.c 	mem = &rsrc->cqmem;
mem               697 drivers/infiniband/hw/i40iw/i40iw_puda.c 	info.cq_base_pa = mem->pa;
mem               698 drivers/infiniband/hw/i40iw/i40iw_puda.c 	info.shadow_area_pa = mem->pa + cqsize;
mem               699 drivers/infiniband/hw/i40iw/i40iw_puda.c 	init_info->cq_base = mem->va;
mem               700 drivers/infiniband/hw/i40iw/i40iw_puda.c 	init_info->shadow_area = (u64 *)((u8 *)mem->va + cqsize);
mem              1026 drivers/infiniband/hw/i40iw/i40iw_puda.c 	void *mem1 = (u8 *)buf->mem.va + buf_offset;
mem              1027 drivers/infiniband/hw/i40iw/i40iw_puda.c 	void *mem2 = (u8 *)txbuf->mem.va + txbuf_offset;
mem              1089 drivers/infiniband/hw/i40iw/i40iw_puda.c 	txbuf->data = (u8 *)txbuf->mem.va + buf->hdrlen;
mem              1093 drivers/infiniband/hw/i40iw/i40iw_puda.c 	bufoffset = (u16)(buf->data - (u8 *)buf->mem.va);
mem              1112 drivers/infiniband/hw/i40iw/i40iw_puda.c 		bufoffset = (u16)(buf->data - (u8 *)buf->mem.va);
mem              1216 drivers/infiniband/hw/i40iw/i40iw_puda.c 			txbuf->mem.va, txbuf->totallen);
mem              1256 drivers/infiniband/hw/i40iw/i40iw_puda.c 	ioffset = (u16)(buf->data - (u8 *)buf->mem.va);
mem                81 drivers/infiniband/hw/i40iw/i40iw_puda.h 	struct i40iw_dma_mem mem;	/* DMA memory for the buffer */
mem               752 drivers/infiniband/hw/i40iw/i40iw_utils.c 					      struct i40iw_dma_mem *mem,
mem               758 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (!mem)
mem               760 drivers/infiniband/hw/i40iw/i40iw_utils.c 	mem->size = ALIGN(size, alignment);
mem               761 drivers/infiniband/hw/i40iw/i40iw_utils.c 	mem->va = dma_alloc_coherent(&pcidev->dev, mem->size,
mem               762 drivers/infiniband/hw/i40iw/i40iw_utils.c 				     (dma_addr_t *)&mem->pa, GFP_KERNEL);
mem               763 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (!mem->va)
mem               773 drivers/infiniband/hw/i40iw/i40iw_utils.c void i40iw_free_dma_mem(struct i40iw_hw *hw, struct i40iw_dma_mem *mem)
mem               777 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (!mem || !mem->va)
mem               780 drivers/infiniband/hw/i40iw/i40iw_utils.c 	dma_free_coherent(&pcidev->dev, mem->size,
mem               781 drivers/infiniband/hw/i40iw/i40iw_utils.c 			  mem->va, (dma_addr_t)mem->pa);
mem               782 drivers/infiniband/hw/i40iw/i40iw_utils.c 	mem->va = NULL;
mem               792 drivers/infiniband/hw/i40iw/i40iw_utils.c 					       struct i40iw_virt_mem *mem,
mem               795 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (!mem)
mem               798 drivers/infiniband/hw/i40iw/i40iw_utils.c 	mem->size = size;
mem               799 drivers/infiniband/hw/i40iw/i40iw_utils.c 	mem->va = kzalloc(size, GFP_KERNEL);
mem               801 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (mem->va)
mem               813 drivers/infiniband/hw/i40iw/i40iw_utils.c 					   struct i40iw_virt_mem *mem)
mem               815 drivers/infiniband/hw/i40iw/i40iw_utils.c 	if (!mem)
mem               821 drivers/infiniband/hw/i40iw/i40iw_utils.c 	kfree(mem->va);
mem              1353 drivers/infiniband/hw/i40iw/i40iw_utils.c 						 struct i40iw_dma_mem *mem)
mem              1358 drivers/infiniband/hw/i40iw/i40iw_utils.c 	status = i40iw_obj_aligned_mem(iwdev, mem, I40IW_QUERY_FPM_BUF_SIZE,
mem              1440 drivers/infiniband/hw/i40iw/i40iw_utils.c 	u8 *addr = (u8 *)buf->mem.va;
mem              1464 drivers/infiniband/hw/i40iw/i40iw_utils.c 	u8 *mem = (u8 *)buf->mem.va;
mem              1465 drivers/infiniband/hw/i40iw/i40iw_utils.c 	struct ethhdr *ethh = (struct ethhdr *)buf->mem.va;
mem              1474 drivers/infiniband/hw/i40iw/i40iw_utils.c 	buf->iph = mem + buf->maclen;
mem               461 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	struct i40iw_dma_mem *mem = &iwqp->kqp.dma_mem;
mem               489 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	status = i40iw_allocate_dma_mem(iwdev->sc_dev.hw, mem, size, 256);
mem               496 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	ukinfo->sq = mem->va;
mem               497 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	info->sq_pa = mem->pa;
mem               530 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	void *mem;
mem               572 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	mem = kzalloc(sizeof(*iwqp), GFP_KERNEL);
mem               573 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	if (!mem)
mem               576 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	iwqp = (struct i40iw_qp *)mem;
mem               577 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	iwqp->allocated_buffer = mem;
mem              4537 drivers/infiniband/hw/mlx5/qp.c 	struct ib_sig_domain *mem = &sig_attrs->mem;
mem              4549 drivers/infiniband/hw/mlx5/qp.c 	switch (sig_attrs->mem.sig_type) {
mem              4553 drivers/infiniband/hw/mlx5/qp.c 		basic->mem.bs_selector = bs_selector(mem->sig.dif.pi_interval);
mem              4555 drivers/infiniband/hw/mlx5/qp.c 		mlx5_fill_inl_bsf(mem, &bsf->m_inl);
mem              4566 drivers/infiniband/hw/mlx5/qp.c 		if (mem->sig.dif.pi_interval == wire->sig.dif.pi_interval &&
mem              4567 drivers/infiniband/hw/mlx5/qp.c 		    mem->sig_type == wire->sig_type) {
mem              4570 drivers/infiniband/hw/mlx5/qp.c 			if (mem->sig.dif.bg_type == wire->sig.dif.bg_type)
mem              4572 drivers/infiniband/hw/mlx5/qp.c 			if (mem->sig.dif.app_tag == wire->sig.dif.app_tag)
mem              4574 drivers/infiniband/hw/mlx5/qp.c 			if (mem->sig.dif.ref_tag == wire->sig.dif.ref_tag)
mem              4653 drivers/infiniband/hw/mlx5/qp.c 		u16 block_size = sig_attrs->mem.sig.dif.pi_interval;
mem              4660 drivers/infiniband/hw/mlx5/qp.c 		prot_size = prot_field_size(sig_attrs->mem.sig_type);
mem              4756 drivers/infiniband/hw/mlx5/qp.c 	if (sig_attrs->mem.sig_type != IB_SIG_TYPE_NONE)
mem              5171 drivers/infiniband/hw/mlx5/qp.c 				err = set_psv_wr(&sig_attrs->mem,
mem                59 drivers/infiniband/hw/mthca/mthca_memfree.c 		struct scatterlist mem;
mem                69 drivers/infiniband/hw/mthca/mthca_memfree.c 		pci_unmap_sg(dev->pdev, chunk->mem, chunk->npages,
mem                73 drivers/infiniband/hw/mthca/mthca_memfree.c 		__free_pages(sg_page(&chunk->mem[i]),
mem                74 drivers/infiniband/hw/mthca/mthca_memfree.c 			     get_order(chunk->mem[i].length));
mem                82 drivers/infiniband/hw/mthca/mthca_memfree.c 		dma_free_coherent(&dev->pdev->dev, chunk->mem[i].length,
mem                83 drivers/infiniband/hw/mthca/mthca_memfree.c 				  lowmem_page_address(sg_page(&chunk->mem[i])),
mem                84 drivers/infiniband/hw/mthca/mthca_memfree.c 				  sg_dma_address(&chunk->mem[i]));
mem               107 drivers/infiniband/hw/mthca/mthca_memfree.c static int mthca_alloc_icm_pages(struct scatterlist *mem, int order, gfp_t gfp_mask)
mem               119 drivers/infiniband/hw/mthca/mthca_memfree.c 	sg_set_page(mem, page, PAGE_SIZE << order, 0);
mem               123 drivers/infiniband/hw/mthca/mthca_memfree.c static int mthca_alloc_icm_coherent(struct device *dev, struct scatterlist *mem,
mem               126 drivers/infiniband/hw/mthca/mthca_memfree.c 	void *buf = dma_alloc_coherent(dev, PAGE_SIZE << order, &sg_dma_address(mem),
mem               131 drivers/infiniband/hw/mthca/mthca_memfree.c 	sg_set_buf(mem, buf, PAGE_SIZE << order);
mem               132 drivers/infiniband/hw/mthca/mthca_memfree.c 	BUG_ON(mem->offset);
mem               133 drivers/infiniband/hw/mthca/mthca_memfree.c 	sg_dma_len(mem) = PAGE_SIZE << order;
mem               164 drivers/infiniband/hw/mthca/mthca_memfree.c 			sg_init_table(chunk->mem, MTHCA_ICM_CHUNK_LEN);
mem               175 drivers/infiniband/hw/mthca/mthca_memfree.c 						       &chunk->mem[chunk->npages],
mem               178 drivers/infiniband/hw/mthca/mthca_memfree.c 			ret = mthca_alloc_icm_pages(&chunk->mem[chunk->npages],
mem               187 drivers/infiniband/hw/mthca/mthca_memfree.c 				chunk->nsg = pci_map_sg(dev->pdev, chunk->mem,
mem               207 drivers/infiniband/hw/mthca/mthca_memfree.c 		chunk->nsg = pci_map_sg(dev->pdev, chunk->mem,
mem               300 drivers/infiniband/hw/mthca/mthca_memfree.c 				if (sg_dma_len(&chunk->mem[i]) > dma_offset)
mem               301 drivers/infiniband/hw/mthca/mthca_memfree.c 					*dma_handle = sg_dma_address(&chunk->mem[i]) +
mem               303 drivers/infiniband/hw/mthca/mthca_memfree.c 				dma_offset -= sg_dma_len(&chunk->mem[i]);
mem               308 drivers/infiniband/hw/mthca/mthca_memfree.c 			if (chunk->mem[i].length > offset) {
mem               309 drivers/infiniband/hw/mthca/mthca_memfree.c 				page = sg_page(&chunk->mem[i]);
mem               312 drivers/infiniband/hw/mthca/mthca_memfree.c 			offset -= chunk->mem[i].length;
mem               480 drivers/infiniband/hw/mthca/mthca_memfree.c 	sg_set_page(&db_tab->page[i].mem, pages[0], MTHCA_ICM_PAGE_SIZE,
mem               483 drivers/infiniband/hw/mthca/mthca_memfree.c 	ret = pci_map_sg(dev->pdev, &db_tab->page[i].mem, 1, PCI_DMA_TODEVICE);
mem               489 drivers/infiniband/hw/mthca/mthca_memfree.c 	ret = mthca_MAP_ICM_page(dev, sg_dma_address(&db_tab->page[i].mem),
mem               492 drivers/infiniband/hw/mthca/mthca_memfree.c 		pci_unmap_sg(dev->pdev, &db_tab->page[i].mem, 1, PCI_DMA_TODEVICE);
mem               493 drivers/infiniband/hw/mthca/mthca_memfree.c 		put_user_page(sg_page(&db_tab->page[i].mem));
mem               541 drivers/infiniband/hw/mthca/mthca_memfree.c 		sg_init_table(&db_tab->page[i].mem, 1);
mem               558 drivers/infiniband/hw/mthca/mthca_memfree.c 			pci_unmap_sg(dev->pdev, &db_tab->page[i].mem, 1, PCI_DMA_TODEVICE);
mem               559 drivers/infiniband/hw/mthca/mthca_memfree.c 			put_user_page(sg_page(&db_tab->page[i].mem));
mem                55 drivers/infiniband/hw/mthca/mthca_memfree.h 	struct scatterlist mem[MTHCA_ICM_CHUNK_LEN];
mem               130 drivers/infiniband/hw/mthca/mthca_memfree.h 	return sg_dma_address(&iter->chunk->mem[iter->page_idx]);
mem               135 drivers/infiniband/hw/mthca/mthca_memfree.h 	return sg_dma_len(&iter->chunk->mem[iter->page_idx]);
mem               583 drivers/infiniband/hw/mthca/mthca_mr.c 	    mr->attr.max_pages * sizeof *mr->mem.arbel.mtts > PAGE_SIZE)
mem               601 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.arbel.mpt = mthca_table_find(dev->mr_table.mpt_table, key, NULL);
mem               602 drivers/infiniband/hw/mthca/mthca_mr.c 		BUG_ON(!mr->mem.arbel.mpt);
mem               604 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.tavor.mpt = dev->mr_table.tavor_fmr.mpt_base +
mem               605 drivers/infiniband/hw/mthca/mthca_mr.c 			sizeof *(mr->mem.tavor.mpt) * idx;
mem               616 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.arbel.mtts = mthca_table_find(dev->mr_table.mtt_table,
mem               618 drivers/infiniband/hw/mthca/mthca_mr.c 						      &mr->mem.arbel.dma_handle);
mem               619 drivers/infiniband/hw/mthca/mthca_mr.c 		BUG_ON(!mr->mem.arbel.mtts);
mem               621 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.tavor.mtts = dev->mr_table.tavor_fmr.mtt_base + mtt_seg;
mem               736 drivers/infiniband/hw/mthca/mthca_mr.c 	writeb(MTHCA_MPT_STATUS_SW, fmr->mem.tavor.mpt);
mem               741 drivers/infiniband/hw/mthca/mthca_mr.c 		mthca_write64_raw(mtt_entry, fmr->mem.tavor.mtts + i);
mem               748 drivers/infiniband/hw/mthca/mthca_mr.c 	__raw_writel((__force u32) mpt_entry.lkey, &fmr->mem.tavor.mpt->key);
mem               749 drivers/infiniband/hw/mthca/mthca_mr.c 	memcpy_toio(&fmr->mem.tavor.mpt->start, &mpt_entry.start,
mem               753 drivers/infiniband/hw/mthca/mthca_mr.c 	writeb(MTHCA_MPT_STATUS_HW, fmr->mem.tavor.mpt);
mem               779 drivers/infiniband/hw/mthca/mthca_mr.c 	*(u8 *) fmr->mem.arbel.mpt = MTHCA_MPT_STATUS_SW;
mem               783 drivers/infiniband/hw/mthca/mthca_mr.c 	dma_sync_single_for_cpu(&dev->pdev->dev, fmr->mem.arbel.dma_handle,
mem               787 drivers/infiniband/hw/mthca/mthca_mr.c 		fmr->mem.arbel.mtts[i] = cpu_to_be64(page_list[i] |
mem               790 drivers/infiniband/hw/mthca/mthca_mr.c 	dma_sync_single_for_device(&dev->pdev->dev, fmr->mem.arbel.dma_handle,
mem               793 drivers/infiniband/hw/mthca/mthca_mr.c 	fmr->mem.arbel.mpt->key    = cpu_to_be32(key);
mem               794 drivers/infiniband/hw/mthca/mthca_mr.c 	fmr->mem.arbel.mpt->lkey   = cpu_to_be32(key);
mem               795 drivers/infiniband/hw/mthca/mthca_mr.c 	fmr->mem.arbel.mpt->length = cpu_to_be64(list_len * (1ull << fmr->attr.page_shift));
mem               796 drivers/infiniband/hw/mthca/mthca_mr.c 	fmr->mem.arbel.mpt->start  = cpu_to_be64(iova);
mem               800 drivers/infiniband/hw/mthca/mthca_mr.c 	*(u8 *) fmr->mem.arbel.mpt = MTHCA_MPT_STATUS_HW;
mem               814 drivers/infiniband/hw/mthca/mthca_mr.c 	writeb(MTHCA_MPT_STATUS_SW, fmr->mem.tavor.mpt);
mem               824 drivers/infiniband/hw/mthca/mthca_mr.c 	*(u8 *) fmr->mem.arbel.mpt = MTHCA_MPT_STATUS_SW;
mem                94 drivers/infiniband/hw/mthca/mthca_provider.h 	} mem;
mem                69 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	struct stats_mem *mem = &dev->stats_mem;
mem                73 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	mem->size = max_t(u32, sizeof(struct ocrdma_rdma_stats_req),
mem                76 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	mem->va = dma_alloc_coherent(&dev->nic_info.pdev->dev, mem->size,
mem                77 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 				     &mem->pa, GFP_KERNEL);
mem                78 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	if (!mem->va) {
mem                84 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	mem->debugfs_mem = kzalloc(OCRDMA_MAX_DBGFS_MEM, GFP_KERNEL);
mem                85 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	if (!mem->debugfs_mem)
mem                93 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	struct stats_mem *mem = &dev->stats_mem;
mem                95 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	if (mem->va)
mem                96 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 		dma_free_coherent(&dev->nic_info.pdev->dev, mem->size,
mem                97 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 				  mem->va, mem->pa);
mem                98 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	mem->va = NULL;
mem                99 drivers/infiniband/hw/ocrdma/ocrdma_stats.c 	kfree(mem->debugfs_mem);
mem               107 drivers/infiniband/sw/rxe/rxe_loc.h 		     int access, struct rxe_mem *mem);
mem               114 drivers/infiniband/sw/rxe/rxe_loc.h 		      int max_pages, struct rxe_mem *mem);
mem               116 drivers/infiniband/sw/rxe/rxe_loc.h int rxe_mem_copy(struct rxe_mem *mem, u64 iova, void *addr,
mem               123 drivers/infiniband/sw/rxe/rxe_loc.h void *iova_to_vaddr(struct rxe_mem *mem, u64 iova, int length);
mem               133 drivers/infiniband/sw/rxe/rxe_loc.h int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length);
mem               135 drivers/infiniband/sw/rxe/rxe_loc.h int rxe_mem_map_pages(struct rxe_dev *rxe, struct rxe_mem *mem,
mem                54 drivers/infiniband/sw/rxe/rxe_mr.c int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length)
mem                56 drivers/infiniband/sw/rxe/rxe_mr.c 	switch (mem->type) {
mem                62 drivers/infiniband/sw/rxe/rxe_mr.c 		if (iova < mem->iova ||
mem                63 drivers/infiniband/sw/rxe/rxe_mr.c 		    length > mem->length ||
mem                64 drivers/infiniband/sw/rxe/rxe_mr.c 		    iova > mem->iova + mem->length - length)
mem                77 drivers/infiniband/sw/rxe/rxe_mr.c static void rxe_mem_init(int access, struct rxe_mem *mem)
mem                79 drivers/infiniband/sw/rxe/rxe_mr.c 	u32 lkey = mem->pelem.index << 8 | rxe_get_key();
mem                82 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem->pelem.pool->type == RXE_TYPE_MR) {
mem                83 drivers/infiniband/sw/rxe/rxe_mr.c 		mem->ibmr.lkey		= lkey;
mem                84 drivers/infiniband/sw/rxe/rxe_mr.c 		mem->ibmr.rkey		= rkey;
mem                87 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->lkey		= lkey;
mem                88 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->rkey		= rkey;
mem                89 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->state		= RXE_MEM_STATE_INVALID;
mem                90 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->type		= RXE_MEM_TYPE_NONE;
mem                91 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->map_shift		= ilog2(RXE_BUF_PER_MAP);
mem                96 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_mem *mem = container_of(arg, typeof(*mem), pelem);
mem                99 drivers/infiniband/sw/rxe/rxe_mr.c 	ib_umem_release(mem->umem);
mem               101 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem->map) {
mem               102 drivers/infiniband/sw/rxe/rxe_mr.c 		for (i = 0; i < mem->num_map; i++)
mem               103 drivers/infiniband/sw/rxe/rxe_mr.c 			kfree(mem->map[i]);
mem               105 drivers/infiniband/sw/rxe/rxe_mr.c 		kfree(mem->map);
mem               109 drivers/infiniband/sw/rxe/rxe_mr.c static int rxe_mem_alloc(struct rxe_mem *mem, int num_buf)
mem               113 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_map **map = mem->map;
mem               117 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->map = kmalloc_array(num_map, sizeof(*map), GFP_KERNEL);
mem               118 drivers/infiniband/sw/rxe/rxe_mr.c 	if (!mem->map)
mem               122 drivers/infiniband/sw/rxe/rxe_mr.c 		mem->map[i] = kmalloc(sizeof(**map), GFP_KERNEL);
mem               123 drivers/infiniband/sw/rxe/rxe_mr.c 		if (!mem->map[i])
mem               129 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->map_shift	= ilog2(RXE_BUF_PER_MAP);
mem               130 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->map_mask	= RXE_BUF_PER_MAP - 1;
mem               132 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->num_buf = num_buf;
mem               133 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->num_map = num_map;
mem               134 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->max_buf = num_map * RXE_BUF_PER_MAP;
mem               140 drivers/infiniband/sw/rxe/rxe_mr.c 		kfree(mem->map[i]);
mem               142 drivers/infiniband/sw/rxe/rxe_mr.c 	kfree(mem->map);
mem               148 drivers/infiniband/sw/rxe/rxe_mr.c 		     int access, struct rxe_mem *mem)
mem               150 drivers/infiniband/sw/rxe/rxe_mr.c 	rxe_mem_init(access, mem);
mem               152 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->pd			= pd;
mem               153 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->access		= access;
mem               154 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->state		= RXE_MEM_STATE_VALID;
mem               155 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->type		= RXE_MEM_TYPE_DMA;
mem               162 drivers/infiniband/sw/rxe/rxe_mr.c 		      struct rxe_mem *mem)
mem               180 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->umem = umem;
mem               183 drivers/infiniband/sw/rxe/rxe_mr.c 	rxe_mem_init(access, mem);
mem               185 drivers/infiniband/sw/rxe/rxe_mr.c 	err = rxe_mem_alloc(mem, num_buf);
mem               192 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->page_shift		= PAGE_SHIFT;
mem               193 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->page_mask = PAGE_SIZE - 1;
mem               196 drivers/infiniband/sw/rxe/rxe_mr.c 	map			= mem->map;
mem               222 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->pd			= pd;
mem               223 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->umem		= umem;
mem               224 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->access		= access;
mem               225 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->length		= length;
mem               226 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->iova		= iova;
mem               227 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->va			= start;
mem               228 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->offset		= ib_umem_offset(umem);
mem               229 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->state		= RXE_MEM_STATE_VALID;
mem               230 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->type		= RXE_MEM_TYPE_MR;
mem               239 drivers/infiniband/sw/rxe/rxe_mr.c 		      int max_pages, struct rxe_mem *mem)
mem               243 drivers/infiniband/sw/rxe/rxe_mr.c 	rxe_mem_init(0, mem);
mem               246 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->ibmr.rkey = mem->ibmr.lkey;
mem               248 drivers/infiniband/sw/rxe/rxe_mr.c 	err = rxe_mem_alloc(mem, max_pages);
mem               252 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->pd			= pd;
mem               253 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->max_buf		= max_pages;
mem               254 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->state		= RXE_MEM_STATE_FREE;
mem               255 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->type		= RXE_MEM_TYPE_MR;
mem               264 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_mem	*mem,
mem               270 drivers/infiniband/sw/rxe/rxe_mr.c 	size_t			offset = iova - mem->iova + mem->offset;
mem               275 drivers/infiniband/sw/rxe/rxe_mr.c 	if (likely(mem->page_shift)) {
mem               276 drivers/infiniband/sw/rxe/rxe_mr.c 		*offset_out = offset & mem->page_mask;
mem               277 drivers/infiniband/sw/rxe/rxe_mr.c 		offset >>= mem->page_shift;
mem               278 drivers/infiniband/sw/rxe/rxe_mr.c 		*n_out = offset & mem->map_mask;
mem               279 drivers/infiniband/sw/rxe/rxe_mr.c 		*m_out = offset >> mem->map_shift;
mem               284 drivers/infiniband/sw/rxe/rxe_mr.c 		length = mem->map[map_index]->buf[buf_index].size;
mem               294 drivers/infiniband/sw/rxe/rxe_mr.c 			length = mem->map[map_index]->buf[buf_index].size;
mem               303 drivers/infiniband/sw/rxe/rxe_mr.c void *iova_to_vaddr(struct rxe_mem *mem, u64 iova, int length)
mem               309 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem->state != RXE_MEM_STATE_VALID) {
mem               315 drivers/infiniband/sw/rxe/rxe_mr.c 	if (!mem->map) {
mem               320 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem_check_range(mem, iova, length)) {
mem               326 drivers/infiniband/sw/rxe/rxe_mr.c 	lookup_iova(mem, iova, &m, &n, &offset);
mem               328 drivers/infiniband/sw/rxe/rxe_mr.c 	if (offset + length > mem->map[m]->buf[n].size) {
mem               334 drivers/infiniband/sw/rxe/rxe_mr.c 	addr = (void *)(uintptr_t)mem->map[m]->buf[n].addr + offset;
mem               344 drivers/infiniband/sw/rxe/rxe_mr.c int rxe_mem_copy(struct rxe_mem *mem, u64 iova, void *addr, int length,
mem               360 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem->type == RXE_MEM_TYPE_DMA) {
mem               372 drivers/infiniband/sw/rxe/rxe_mr.c 			*crcp = rxe_crc32(to_rdev(mem->pd->ibpd.device),
mem               378 drivers/infiniband/sw/rxe/rxe_mr.c 	WARN_ON_ONCE(!mem->map);
mem               380 drivers/infiniband/sw/rxe/rxe_mr.c 	err = mem_check_range(mem, iova, length);
mem               386 drivers/infiniband/sw/rxe/rxe_mr.c 	lookup_iova(mem, iova, &m, &i, &offset);
mem               388 drivers/infiniband/sw/rxe/rxe_mr.c 	map	= mem->map + m;
mem               406 drivers/infiniband/sw/rxe/rxe_mr.c 			crc = rxe_crc32(to_rdev(mem->pd->ibpd.device),
mem               448 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_mem		*mem	= NULL;
mem               461 drivers/infiniband/sw/rxe/rxe_mr.c 		mem = lookup_mem(pd, access, sge->lkey, lookup_local);
mem               462 drivers/infiniband/sw/rxe/rxe_mr.c 		if (!mem) {
mem               472 drivers/infiniband/sw/rxe/rxe_mr.c 			if (mem) {
mem               473 drivers/infiniband/sw/rxe/rxe_mr.c 				rxe_drop_ref(mem);
mem               474 drivers/infiniband/sw/rxe/rxe_mr.c 				mem = NULL;
mem               486 drivers/infiniband/sw/rxe/rxe_mr.c 				mem = lookup_mem(pd, access, sge->lkey,
mem               488 drivers/infiniband/sw/rxe/rxe_mr.c 				if (!mem) {
mem               503 drivers/infiniband/sw/rxe/rxe_mr.c 			err = rxe_mem_copy(mem, iova, addr, bytes, dir, crcp);
mem               517 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem)
mem               518 drivers/infiniband/sw/rxe/rxe_mr.c 		rxe_drop_ref(mem);
mem               523 drivers/infiniband/sw/rxe/rxe_mr.c 	if (mem)
mem               524 drivers/infiniband/sw/rxe/rxe_mr.c 		rxe_drop_ref(mem);
mem               571 drivers/infiniband/sw/rxe/rxe_mr.c 	struct rxe_mem *mem;
mem               575 drivers/infiniband/sw/rxe/rxe_mr.c 	mem = rxe_pool_get_index(&rxe->mr_pool, index);
mem               576 drivers/infiniband/sw/rxe/rxe_mr.c 	if (!mem)
mem               579 drivers/infiniband/sw/rxe/rxe_mr.c 	if (unlikely((type == lookup_local && mem->lkey != key) ||
mem               580 drivers/infiniband/sw/rxe/rxe_mr.c 		     (type == lookup_remote && mem->rkey != key) ||
mem               581 drivers/infiniband/sw/rxe/rxe_mr.c 		     mem->pd != pd ||
mem               582 drivers/infiniband/sw/rxe/rxe_mr.c 		     (access && !(access & mem->access)) ||
mem               583 drivers/infiniband/sw/rxe/rxe_mr.c 		     mem->state != RXE_MEM_STATE_VALID)) {
mem               584 drivers/infiniband/sw/rxe/rxe_mr.c 		rxe_drop_ref(mem);
mem               585 drivers/infiniband/sw/rxe/rxe_mr.c 		mem = NULL;
mem               588 drivers/infiniband/sw/rxe/rxe_mr.c 	return mem;
mem               591 drivers/infiniband/sw/rxe/rxe_mr.c int rxe_mem_map_pages(struct rxe_dev *rxe, struct rxe_mem *mem,
mem               601 drivers/infiniband/sw/rxe/rxe_mr.c 	if (num_pages > mem->max_buf) {
mem               607 drivers/infiniband/sw/rxe/rxe_mr.c 	page_size	= 1 << mem->page_shift;
mem               608 drivers/infiniband/sw/rxe/rxe_mr.c 	map		= mem->map;
mem               624 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->iova	= iova;
mem               625 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->va		= iova;
mem               626 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->length	= num_pages << mem->page_shift;
mem               627 drivers/infiniband/sw/rxe/rxe_mr.c 	mem->state	= RXE_MEM_STATE_VALID;
mem               420 drivers/infiniband/sw/rxe/rxe_resp.c 	struct rxe_mem *mem = NULL;
mem               459 drivers/infiniband/sw/rxe/rxe_resp.c 	mem = lookup_mem(qp->pd, access, rkey, lookup_remote);
mem               460 drivers/infiniband/sw/rxe/rxe_resp.c 	if (!mem) {
mem               465 drivers/infiniband/sw/rxe/rxe_resp.c 	if (unlikely(mem->state == RXE_MEM_STATE_FREE)) {
mem               470 drivers/infiniband/sw/rxe/rxe_resp.c 	if (mem_check_range(mem, va, resid)) {
mem               498 drivers/infiniband/sw/rxe/rxe_resp.c 	qp->resp.mr = mem;
mem               502 drivers/infiniband/sw/rxe/rxe_resp.c 	if (mem)
mem               503 drivers/infiniband/sw/rxe/rxe_resp.c 		rxe_drop_ref(mem);
mem               178 drivers/infiniband/sw/siw/siw.h 	struct siw_mem *mem;
mem               207 drivers/infiniband/sw/siw/siw.h 	struct siw_mem *mem[SIW_MAX_SGE]; /* per sge's resolved mem */
mem               495 drivers/infiniband/sw/siw/siw.h #define rx_mem(rctx) ((rctx)->wqe_active.mem[0])
mem               732 drivers/infiniband/sw/siw/siw.h #define siw_dbg_mem(mem, fmt, ...)                                             \
mem               733 drivers/infiniband/sw/siw/siw.h 	ibdev_dbg(&mem->sdev->base_dev,                                        \
mem               734 drivers/infiniband/sw/siw/siw.h 		  "MEM[0x%08x] %s: " fmt, mem->stag, __func__, ##__VA_ARGS__)
mem                50 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem;
mem                53 drivers/infiniband/sw/siw/siw_mem.c 	mem = xa_load(&sdev->mem_xa, stag_index);
mem                54 drivers/infiniband/sw/siw/siw_mem.c 	if (likely(mem && kref_get_unless_zero(&mem->ref))) {
mem                56 drivers/infiniband/sw/siw/siw_mem.c 		return mem;
mem                93 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem                97 drivers/infiniband/sw/siw/siw_mem.c 	if (!mem)
mem               100 drivers/infiniband/sw/siw/siw_mem.c 	mem->mem_obj = mem_obj;
mem               101 drivers/infiniband/sw/siw/siw_mem.c 	mem->stag_valid = 0;
mem               102 drivers/infiniband/sw/siw/siw_mem.c 	mem->sdev = sdev;
mem               103 drivers/infiniband/sw/siw/siw_mem.c 	mem->va = start;
mem               104 drivers/infiniband/sw/siw/siw_mem.c 	mem->len = len;
mem               105 drivers/infiniband/sw/siw/siw_mem.c 	mem->pd = pd;
mem               106 drivers/infiniband/sw/siw/siw_mem.c 	mem->perms = rights & IWARP_ACCESS_MASK;
mem               107 drivers/infiniband/sw/siw/siw_mem.c 	kref_init(&mem->ref);
mem               109 drivers/infiniband/sw/siw/siw_mem.c 	mr->mem = mem;
mem               114 drivers/infiniband/sw/siw/siw_mem.c 	if (xa_alloc_cyclic(&sdev->mem_xa, &id, mem, limit, &next,
mem               116 drivers/infiniband/sw/siw/siw_mem.c 		kfree(mem);
mem               120 drivers/infiniband/sw/siw/siw_mem.c 	mem->stag = id << 8;
mem               121 drivers/infiniband/sw/siw/siw_mem.c 	mr->base_mr.lkey = mr->base_mr.rkey = mem->stag;
mem               128 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem = mr->mem, *found;
mem               130 drivers/infiniband/sw/siw/siw_mem.c 	mem->stag_valid = 0;
mem               135 drivers/infiniband/sw/siw/siw_mem.c 	found = xa_erase(&mem->sdev->mem_xa, mem->stag >> 8);
mem               136 drivers/infiniband/sw/siw/siw_mem.c 	WARN_ON(found != mem);
mem               137 drivers/infiniband/sw/siw/siw_mem.c 	siw_mem_put(mem);
mem               142 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem = container_of(ref, struct siw_mem, ref);
mem               144 drivers/infiniband/sw/siw/siw_mem.c 	siw_dbg_mem(mem, "free mem, pbl: %s\n", mem->is_pbl ? "y" : "n");
mem               146 drivers/infiniband/sw/siw/siw_mem.c 	if (!mem->is_mw && mem->mem_obj) {
mem               147 drivers/infiniband/sw/siw/siw_mem.c 		if (mem->is_pbl == 0)
mem               148 drivers/infiniband/sw/siw/siw_mem.c 			siw_umem_release(mem->umem, true);
mem               150 drivers/infiniband/sw/siw/siw_mem.c 			kfree(mem->pbl);
mem               152 drivers/infiniband/sw/siw/siw_mem.c 	kfree(mem);
mem               168 drivers/infiniband/sw/siw/siw_mem.c int siw_check_mem(struct ib_pd *pd, struct siw_mem *mem, u64 addr,
mem               171 drivers/infiniband/sw/siw/siw_mem.c 	if (!mem->stag_valid) {
mem               172 drivers/infiniband/sw/siw/siw_mem.c 		siw_dbg_pd(pd, "STag 0x%08x invalid\n", mem->stag);
mem               175 drivers/infiniband/sw/siw/siw_mem.c 	if (mem->pd != pd) {
mem               176 drivers/infiniband/sw/siw/siw_mem.c 		siw_dbg_pd(pd, "STag 0x%08x: PD mismatch\n", mem->stag);
mem               182 drivers/infiniband/sw/siw/siw_mem.c 	if ((mem->perms & perms) < perms) {
mem               184 drivers/infiniband/sw/siw/siw_mem.c 			   mem->perms, perms);
mem               190 drivers/infiniband/sw/siw/siw_mem.c 	if (addr < mem->va || addr + len > mem->va + mem->len) {
mem               196 drivers/infiniband/sw/siw/siw_mem.c 			   (void *)(uintptr_t)mem->va,
mem               197 drivers/infiniband/sw/siw/siw_mem.c 			   (void *)(uintptr_t)(mem->va + mem->len),
mem               198 drivers/infiniband/sw/siw/siw_mem.c 			   mem->stag);
mem               222 drivers/infiniband/sw/siw/siw_mem.c int siw_check_sge(struct ib_pd *pd, struct siw_sge *sge, struct siw_mem *mem[],
mem               233 drivers/infiniband/sw/siw/siw_mem.c 	if (*mem == NULL) {
mem               240 drivers/infiniband/sw/siw/siw_mem.c 		*mem = new;
mem               243 drivers/infiniband/sw/siw/siw_mem.c 	if (unlikely((*mem)->stag != sge->lkey)) {
mem               244 drivers/infiniband/sw/siw/siw_mem.c 		siw_dbg_mem((*mem), "STag mismatch: 0x%08x\n", sge->lkey);
mem               248 drivers/infiniband/sw/siw/siw_mem.c 	rv = siw_check_mem(pd, *mem, sge->laddr + off, perms, len);
mem               256 drivers/infiniband/sw/siw/siw_mem.c 		*mem = NULL;
mem               272 drivers/infiniband/sw/siw/siw_mem.c 			siw_unref_mem_sgl(wqe->mem, wqe->sqe.num_sge);
mem               276 drivers/infiniband/sw/siw/siw_mem.c 		siw_unref_mem_sgl(wqe->mem, wqe->rqe.num_sge);
mem               280 drivers/infiniband/sw/siw/siw_mem.c 		siw_unref_mem_sgl(wqe->mem, 1);
mem               295 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem = siw_mem_id2obj(sdev, stag >> 8);
mem               298 drivers/infiniband/sw/siw/siw_mem.c 	if (unlikely(!mem)) {
mem               302 drivers/infiniband/sw/siw/siw_mem.c 	if (unlikely(mem->pd != pd)) {
mem               311 drivers/infiniband/sw/siw/siw_mem.c 	mem->stag_valid = 0;
mem               315 drivers/infiniband/sw/siw/siw_mem.c 	siw_mem_put(mem);
mem                16 drivers/infiniband/sw/siw/siw_mem.h int siw_check_mem(struct ib_pd *pd, struct siw_mem *mem, u64 addr,
mem                19 drivers/infiniband/sw/siw/siw_mem.h 		  struct siw_mem *mem[], enum ib_access_flags perms,
mem                27 drivers/infiniband/sw/siw/siw_mem.h static inline void siw_mem_put(struct siw_mem *mem)
mem                29 drivers/infiniband/sw/siw/siw_mem.h 	kref_put(&mem->ref, siw_free_mem);
mem                34 drivers/infiniband/sw/siw/siw_mem.h 	return container_of(m, struct siw_mr, mem);
mem                37 drivers/infiniband/sw/siw/siw_mem.h static inline void siw_unref_mem_sgl(struct siw_mem **mem, unsigned int num_sge)
mem                40 drivers/infiniband/sw/siw/siw_mem.h 		if (*mem == NULL)
mem                43 drivers/infiniband/sw/siw/siw_mem.h 		siw_mem_put(*mem);
mem                44 drivers/infiniband/sw/siw/siw_mem.h 		*mem = NULL;
mem                45 drivers/infiniband/sw/siw/siw_mem.h 		mem++;
mem               272 drivers/infiniband/sw/siw/siw_qp.c 	memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
mem               905 drivers/infiniband/sw/siw/siw_qp.c 		memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
mem               939 drivers/infiniband/sw/siw/siw_qp.c 		memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
mem               128 drivers/infiniband/sw/siw/siw_qp_rx.c 		      struct siw_mem *mem, u64 addr, int len)
mem               130 drivers/infiniband/sw/siw/siw_qp_rx.c 	struct siw_pbl *pbl = mem->pbl;
mem               131 drivers/infiniband/sw/siw/siw_qp_rx.c 	u64 offset = addr - mem->va;
mem               370 drivers/infiniband/sw/siw/siw_qp_rx.c 				wqe->mem[i] = NULL;
mem               455 drivers/infiniband/sw/siw/siw_qp_rx.c 		struct siw_mem **mem, *mem_p;
mem               469 drivers/infiniband/sw/siw/siw_qp_rx.c 		mem = &wqe->mem[frx->sge_idx];
mem               476 drivers/infiniband/sw/siw/siw_qp_rx.c 		rv = siw_check_sge(pd, sge, mem, IB_ACCESS_LOCAL_WRITE,
mem               486 drivers/infiniband/sw/siw/siw_qp_rx.c 		mem_p = *mem;
mem               543 drivers/infiniband/sw/siw/siw_qp_rx.c 	struct siw_mem *mem;
mem               576 drivers/infiniband/sw/siw/siw_qp_rx.c 	mem = rx_mem(frx);
mem               582 drivers/infiniband/sw/siw/siw_qp_rx.c 	if (unlikely(mem->stag != srx->ddp_stag)) {
mem               588 drivers/infiniband/sw/siw/siw_qp_rx.c 	rv = siw_check_mem(qp->pd, mem, srx->ddp_to + srx->fpdu_part_rcvd,
mem               600 drivers/infiniband/sw/siw/siw_qp_rx.c 	if (mem->mem_obj == NULL)
mem               604 drivers/infiniband/sw/siw/siw_qp_rx.c 	else if (!mem->is_pbl)
mem               605 drivers/infiniband/sw/siw/siw_qp_rx.c 		rv = siw_rx_umem(srx, mem->umem,
mem               608 drivers/infiniband/sw/siw/siw_qp_rx.c 		rv = siw_rx_pbl(srx, &frx->pbl_idx, mem,
mem               688 drivers/infiniband/sw/siw/siw_qp_rx.c 		tx_work->mem[0] = NULL;
mem               758 drivers/infiniband/sw/siw/siw_qp_rx.c 		wqe->mem[0] = NULL;
mem               782 drivers/infiniband/sw/siw/siw_qp_rx.c 	struct siw_mem **mem, *mem_p;
mem               819 drivers/infiniband/sw/siw/siw_qp_rx.c 	mem = &wqe->mem[0];
mem               821 drivers/infiniband/sw/siw/siw_qp_rx.c 	if (!(*mem)) {
mem               825 drivers/infiniband/sw/siw/siw_qp_rx.c 		rv = siw_check_sge(qp->pd, sge, mem, IB_ACCESS_LOCAL_WRITE, 0,
mem               840 drivers/infiniband/sw/siw/siw_qp_rx.c 	mem_p = *mem;
mem                25 drivers/infiniband/sw/siw/siw_qp_tx.c static struct page *siw_get_pblpage(struct siw_mem *mem, u64 addr, int *idx)
mem                27 drivers/infiniband/sw/siw/siw_qp_tx.c 	struct siw_pbl *pbl = mem->pbl;
mem                28 drivers/infiniband/sw/siw/siw_qp_tx.c 	u64 offset = addr - mem->va;
mem                55 drivers/infiniband/sw/siw/siw_qp_tx.c 		struct siw_mem *mem = wqe->mem[0];
mem                57 drivers/infiniband/sw/siw/siw_qp_tx.c 		if (!mem->mem_obj) {
mem                71 drivers/infiniband/sw/siw/siw_qp_tx.c 			if (!mem->is_pbl)
mem                72 drivers/infiniband/sw/siw/siw_qp_tx.c 				p = siw_get_upage(mem->umem, sge->laddr);
mem                74 drivers/infiniband/sw/siw/siw_qp_tx.c 				p = siw_get_pblpage(mem, sge->laddr, &pbl_idx);
mem                89 drivers/infiniband/sw/siw/siw_qp_tx.c 				if (!mem->is_pbl)
mem                90 drivers/infiniband/sw/siw/siw_qp_tx.c 					p = siw_get_upage(mem->umem,
mem                93 drivers/infiniband/sw/siw/siw_qp_tx.c 					p = siw_get_pblpage(mem,
mem               459 drivers/infiniband/sw/siw/siw_qp_tx.c 		struct siw_mem *mem;
mem               462 drivers/infiniband/sw/siw/siw_qp_tx.c 			mem = wqe->mem[sge_idx];
mem               463 drivers/infiniband/sw/siw/siw_qp_tx.c 			is_kva = mem->mem_obj == NULL ? 1 : 0;
mem               492 drivers/infiniband/sw/siw/siw_qp_tx.c 				if (mem->is_pbl)
mem               494 drivers/infiniband/sw/siw/siw_qp_tx.c 						mem, sge->laddr + sge_off,
mem               497 drivers/infiniband/sw/siw/siw_qp_tx.c 					p = siw_get_upage(mem->umem,
mem               765 drivers/infiniband/sw/siw/siw_qp_tx.c 			int rv = siw_check_sge(pd, sge, &wqe->mem[i], perms, 0,
mem               923 drivers/infiniband/sw/siw/siw_qp_tx.c 	struct siw_mem *mem;
mem               938 drivers/infiniband/sw/siw/siw_qp_tx.c 	mem = siw_mem_id2obj(sdev, sqe->rkey  >> 8);
mem               939 drivers/infiniband/sw/siw/siw_qp_tx.c 	if (unlikely(!mem)) {
mem               944 drivers/infiniband/sw/siw/siw_qp_tx.c 	if (unlikely(mem->pd != pd)) {
mem               949 drivers/infiniband/sw/siw/siw_qp_tx.c 	if (unlikely(mem->stag_valid)) {
mem               955 drivers/infiniband/sw/siw/siw_qp_tx.c 	mem->stag = sqe->rkey;
mem               956 drivers/infiniband/sw/siw/siw_qp_tx.c 	mem->perms = sqe->access;
mem               958 drivers/infiniband/sw/siw/siw_qp_tx.c 	siw_dbg_mem(mem, "STag 0x%08x now valid\n", sqe->rkey);
mem               959 drivers/infiniband/sw/siw/siw_qp_tx.c 	mem->va = base_mr->iova;
mem               960 drivers/infiniband/sw/siw/siw_qp_tx.c 	mem->stag_valid = 1;
mem               962 drivers/infiniband/sw/siw/siw_qp_tx.c 	siw_mem_put(mem);
mem              1273 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_mem(mr->mem, "deregister MR\n");
mem              1350 drivers/infiniband/sw/siw/siw_verbs.c 		struct siw_mem *mem = mr->mem;
mem              1362 drivers/infiniband/sw/siw/siw_verbs.c 		mem->stag |= ureq.stag_key;
mem              1363 drivers/infiniband/sw/siw/siw_verbs.c 		uresp.stag = mem->stag;
mem              1373 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->stag_valid = 1;
mem              1380 drivers/infiniband/sw/siw/siw_verbs.c 		if (mr->mem)
mem              1429 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->is_pbl = 1;
mem              1431 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_pd(pd, "[MEM %u]: success\n", mr->mem->stag);
mem              1441 drivers/infiniband/sw/siw/siw_verbs.c 		if (mr->mem)
mem              1461 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mem *mem = mr->mem;
mem              1462 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_pbl *pbl = mem->pbl;
mem              1468 drivers/infiniband/sw/siw/siw_verbs.c 		siw_dbg_mem(mem, "no PBL allocated\n");
mem              1474 drivers/infiniband/sw/siw/siw_verbs.c 		siw_dbg_mem(mem, "too many SGE's: %d > %d\n",
mem              1475 drivers/infiniband/sw/siw/siw_verbs.c 			    mem->pbl->max_buf, num_sle);
mem              1480 drivers/infiniband/sw/siw/siw_verbs.c 			siw_dbg_mem(mem, "empty SGE\n");
mem              1502 drivers/infiniband/sw/siw/siw_verbs.c 		siw_dbg_mem(mem,
mem              1509 drivers/infiniband/sw/siw/siw_verbs.c 		mem->len = base_mr->length;
mem              1510 drivers/infiniband/sw/siw/siw_verbs.c 		mem->va = base_mr->iova;
mem              1511 drivers/infiniband/sw/siw/siw_verbs.c 		siw_dbg_mem(mem,
mem              1513 drivers/infiniband/sw/siw/siw_verbs.c 			mem->len, (void *)(uintptr_t)mem->va, num_sle,
mem              1544 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->stag_valid = 1;
mem              1546 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_pd(pd, "[MEM %u]: success\n", mr->mem->stag);
mem               334 drivers/infiniband/ulp/iser/iscsi_iser.h 				  struct iser_data_buf *mem,
mem               591 drivers/infiniband/ulp/iser/iscsi_iser.h 				     struct iser_data_buf *mem,
mem                43 drivers/infiniband/ulp/iser/iser_memory.c 		      struct iser_data_buf *mem,
mem                48 drivers/infiniband/ulp/iser/iser_memory.c 		     struct iser_data_buf *mem,
mem               191 drivers/infiniband/ulp/iser/iser_memory.c iser_reg_dma(struct iser_device *device, struct iser_data_buf *mem,
mem               194 drivers/infiniband/ulp/iser/iser_memory.c 	struct scatterlist *sg = mem->sg;
mem               228 drivers/infiniband/ulp/iser/iser_memory.c 		      struct iser_data_buf *mem,
mem               241 drivers/infiniband/ulp/iser/iser_memory.c 	plen = ib_sg_to_pages(&page_vec->fake_mr, mem->sg,
mem               242 drivers/infiniband/ulp/iser/iser_memory.c 			      mem->dma_nents, NULL, iser_set_page);
mem               243 drivers/infiniband/ulp/iser/iser_memory.c 	if (unlikely(plen < mem->dma_nents)) {
mem               245 drivers/infiniband/ulp/iser/iser_memory.c 		iser_data_buf_dump(mem, device->ib_device);
mem               327 drivers/infiniband/ulp/iser/iser_memory.c 		sig_attrs->mem.sig_type = IB_SIG_TYPE_NONE;
mem               334 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->mem);
mem               335 drivers/infiniband/ulp/iser/iser_memory.c 		sig_attrs->mem.sig.dif.bg_type = sc->prot_flags & SCSI_PROT_IP_CHECKSUM ?
mem               342 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->mem);
mem               343 drivers/infiniband/ulp/iser/iser_memory.c 		sig_attrs->mem.sig.dif.bg_type = sc->prot_flags & SCSI_PROT_IP_CHECKSUM ?
mem               381 drivers/infiniband/ulp/iser/iser_memory.c 		struct iser_data_buf *mem,
mem               405 drivers/infiniband/ulp/iser/iser_memory.c 	ret = ib_map_mr_sg_pi(mr, mem->sg, mem->dma_nents, NULL,
mem               409 drivers/infiniband/ulp/iser/iser_memory.c 			 mem->dma_nents + sig_mem->dma_nents);
mem               439 drivers/infiniband/ulp/iser/iser_memory.c 			    struct iser_data_buf *mem,
mem               454 drivers/infiniband/ulp/iser/iser_memory.c 	n = ib_map_mr_sg(mr, mem->sg, mem->dma_nents, NULL, SIZE_4K);
mem               455 drivers/infiniband/ulp/iser/iser_memory.c 	if (unlikely(n != mem->dma_nents)) {
mem               457 drivers/infiniband/ulp/iser/iser_memory.c 			 n, mem->dma_nents);
mem               487 drivers/infiniband/ulp/iser/iser_memory.c 		 struct iser_data_buf *mem,
mem               495 drivers/infiniband/ulp/iser/iser_memory.c 		return iser_reg_dma(device, mem, reg);
mem               497 drivers/infiniband/ulp/iser/iser_memory.c 	return device->reg_ops->reg_mem(task, mem, &desc->rsc, reg);
mem               506 drivers/infiniband/ulp/iser/iser_memory.c 	struct iser_data_buf *mem = &task->data[dir];
mem               512 drivers/infiniband/ulp/iser/iser_memory.c 	use_dma_key = mem->dma_nents == 1 && (all_imm || !iser_always_reg) &&
mem               521 drivers/infiniband/ulp/iser/iser_memory.c 		err = iser_reg_data_sg(task, mem, desc, use_dma_key, reg);
mem               525 drivers/infiniband/ulp/iser/iser_memory.c 		err = iser_reg_sig_mr(task, mem, &task->prot[dir],
mem              2089 drivers/infiniband/ulp/isert/ib_isert.c 		sig_attrs->mem.sig_type = IB_SIG_TYPE_NONE;
mem              2095 drivers/infiniband/ulp/isert/ib_isert.c 		isert_set_dif_domain(se_cmd, &sig_attrs->mem);
mem              2100 drivers/infiniband/ulp/isert/ib_isert.c 		isert_set_dif_domain(se_cmd, &sig_attrs->mem);
mem               892 drivers/input/evdev.c 	unsigned long *mem;
mem               894 drivers/input/evdev.c 	mem = bitmap_alloc(maxbit, GFP_KERNEL);
mem               895 drivers/input/evdev.c 	if (!mem)
mem               901 drivers/input/evdev.c 	bitmap_copy(mem, bits, maxbit);
mem               909 drivers/input/evdev.c 	ret = bits_to_user(mem, maxbit, maxlen, p, compat);
mem               913 drivers/input/evdev.c 	bitmap_free(mem);
mem               156 drivers/input/keyboard/davinci_keyscan.c 	struct resource *res, *mem;
mem               209 drivers/input/keyboard/davinci_keyscan.c 	mem = request_mem_region(davinci_ks->pbase, davinci_ks->base_size,
mem               211 drivers/input/keyboard/davinci_keyscan.c 	if (!mem) {
mem                43 drivers/input/mouse/pxa930_trkball.c 	struct resource *mem;
mem               270 drivers/input/touchscreen/atmel_mxt_ts.c 	u8 *mem;
mem              1372 drivers/input/touchscreen/atmel_mxt_ts.c 				*(cfg->mem + byte_offset) = val;
mem              1398 drivers/input/touchscreen/atmel_mxt_ts.c 					size, cfg->mem + byte_offset);
mem              1531 drivers/input/touchscreen/atmel_mxt_ts.c 	cfg.mem = kzalloc(cfg.mem_size, GFP_KERNEL);
mem              1532 drivers/input/touchscreen/atmel_mxt_ts.c 	if (!cfg.mem) {
mem              1550 drivers/input/touchscreen/atmel_mxt_ts.c 		calculated_crc = mxt_calculate_crc(cfg.mem,
mem              1575 drivers/input/touchscreen/atmel_mxt_ts.c 	kfree(cfg.mem);
mem               506 drivers/input/touchscreen/fsl-imx25-tcq.c 	void __iomem *mem;
mem               514 drivers/input/touchscreen/fsl-imx25-tcq.c 	mem = devm_platform_ioremap_resource(pdev, 0);
mem               515 drivers/input/touchscreen/fsl-imx25-tcq.c 	if (IS_ERR(mem))
mem               516 drivers/input/touchscreen/fsl-imx25-tcq.c 		return PTR_ERR(mem);
mem               522 drivers/input/touchscreen/fsl-imx25-tcq.c 	priv->regs = devm_regmap_init_mmio(dev, mem, &mx25_tcq_regconfig);
mem               420 drivers/iommu/virtio-iommu.c 			       struct virtio_iommu_probe_resv_mem *mem,
mem               429 drivers/iommu/virtio-iommu.c 	start = start64 = le64_to_cpu(mem->start);
mem               430 drivers/iommu/virtio-iommu.c 	end = end64 = le64_to_cpu(mem->end);
mem               437 drivers/iommu/virtio-iommu.c 	if (len < sizeof(*mem))
mem               440 drivers/iommu/virtio-iommu.c 	switch (mem->subtype) {
mem               443 drivers/iommu/virtio-iommu.c 			 mem->subtype);
mem               100 drivers/irqchip/irq-tb10x.c 	struct resource mem;
mem               105 drivers/irqchip/irq-tb10x.c 	if (of_address_to_resource(ictl, 0, &mem)) {
mem               111 drivers/irqchip/irq-tb10x.c 	if (!request_mem_region(mem.start, resource_size(&mem),
mem               117 drivers/irqchip/irq-tb10x.c 	reg_base = ioremap(mem.start, resource_size(&mem));
mem               181 drivers/irqchip/irq-tb10x.c 	release_mem_region(mem.start, resource_size(&mem));
mem                65 drivers/leds/leds-bcm6328.c 	void __iomem *mem;
mem               112 drivers/leds/leds-bcm6328.c 		mode = led->mem + BCM6328_REG_MODE_HI;
mem               114 drivers/leds/leds-bcm6328.c 		mode = led->mem + BCM6328_REG_MODE_LO;
mem               187 drivers/leds/leds-bcm6328.c 		val = bcm6328_led_read(led->mem + BCM6328_REG_INIT);
mem               190 drivers/leds/leds-bcm6328.c 		bcm6328_led_write(led->mem + BCM6328_REG_INIT, val);
mem               205 drivers/leds/leds-bcm6328.c 			 void __iomem *mem, spinlock_t *lock)
mem               211 drivers/leds/leds-bcm6328.c 	val = bcm6328_led_read(mem + BCM6328_REG_HWDIS);
mem               213 drivers/leds/leds-bcm6328.c 	bcm6328_led_write(mem + BCM6328_REG_HWDIS, val);
mem               227 drivers/leds/leds-bcm6328.c 			addr = mem + BCM6328_REG_LNKACTSEL_LO;
mem               229 drivers/leds/leds-bcm6328.c 			addr = mem + BCM6328_REG_LNKACTSEL_HI;
mem               254 drivers/leds/leds-bcm6328.c 			addr = mem + BCM6328_REG_LNKACTSEL_LO;
mem               256 drivers/leds/leds-bcm6328.c 			addr = mem + BCM6328_REG_LNKACTSEL_HI;
mem               277 drivers/leds/leds-bcm6328.c 		       void __iomem *mem, spinlock_t *lock,
mem               289 drivers/leds/leds-bcm6328.c 	led->mem = mem;
mem               311 drivers/leds/leds-bcm6328.c 				mode = mem + BCM6328_REG_MODE_HI;
mem               313 drivers/leds/leds-bcm6328.c 				mode = mem + BCM6328_REG_MODE_LO;
mem               350 drivers/leds/leds-bcm6328.c 	void __iomem *mem;
mem               358 drivers/leds/leds-bcm6328.c 	mem = devm_ioremap_resource(dev, mem_r);
mem               359 drivers/leds/leds-bcm6328.c 	if (IS_ERR(mem))
mem               360 drivers/leds/leds-bcm6328.c 		return PTR_ERR(mem);
mem               376 drivers/leds/leds-bcm6328.c 	bcm6328_led_write(mem + BCM6328_REG_HWDIS, ~0);
mem               377 drivers/leds/leds-bcm6328.c 	bcm6328_led_write(mem + BCM6328_REG_LNKACTSEL_HI, 0);
mem               378 drivers/leds/leds-bcm6328.c 	bcm6328_led_write(mem + BCM6328_REG_LNKACTSEL_LO, 0);
mem               380 drivers/leds/leds-bcm6328.c 	val = bcm6328_led_read(mem + BCM6328_REG_INIT);
mem               392 drivers/leds/leds-bcm6328.c 	bcm6328_led_write(mem + BCM6328_REG_INIT, val);
mem               408 drivers/leds/leds-bcm6328.c 			rc = bcm6328_hwled(dev, child, reg, mem, lock);
mem               410 drivers/leds/leds-bcm6328.c 			rc = bcm6328_led(dev, child, reg, mem, lock,
mem                40 drivers/leds/leds-bcm6358.c 	void __iomem *mem;
mem                64 drivers/leds/leds-bcm6358.c static unsigned long bcm6358_led_busy(void __iomem *mem)
mem                68 drivers/leds/leds-bcm6358.c 	while ((val = bcm6358_led_read(mem + BCM6358_REG_CTRL)) &
mem                83 drivers/leds/leds-bcm6358.c 	bcm6358_led_busy(led->mem);
mem                84 drivers/leds/leds-bcm6358.c 	val = bcm6358_led_read(led->mem + BCM6358_REG_MODE);
mem                90 drivers/leds/leds-bcm6358.c 	bcm6358_led_write(led->mem + BCM6358_REG_MODE, val);
mem                95 drivers/leds/leds-bcm6358.c 		       void __iomem *mem, spinlock_t *lock)
mem               106 drivers/leds/leds-bcm6358.c 	led->mem = mem;
mem               122 drivers/leds/leds-bcm6358.c 			val = bcm6358_led_read(led->mem + BCM6358_REG_MODE);
mem               155 drivers/leds/leds-bcm6358.c 	void __iomem *mem;
mem               164 drivers/leds/leds-bcm6358.c 	mem = devm_ioremap_resource(dev, mem_r);
mem               165 drivers/leds/leds-bcm6358.c 	if (IS_ERR(mem))
mem               166 drivers/leds/leds-bcm6358.c 		return PTR_ERR(mem);
mem               174 drivers/leds/leds-bcm6358.c 	val = bcm6358_led_busy(mem);
mem               193 drivers/leds/leds-bcm6358.c 	bcm6358_led_write(mem + BCM6358_REG_CTRL, val);
mem               208 drivers/leds/leds-bcm6358.c 		rc = bcm6358_led(dev, child, reg, mem, lock);
mem               944 drivers/lightnvm/pblk-core.c 	line->emeta->mem = 0;
mem              1481 drivers/lightnvm/pblk-core.c 		while (emeta->mem < lm->emeta_len[0]) {
mem                95 drivers/lightnvm/pblk-rb.c 	rb->mem = rb->subm = rb->sync = rb->l2p_update = 0;
mem               194 drivers/lightnvm/pblk-rb.c 	unsigned int mem = READ_ONCE(rb->mem);
mem               197 drivers/lightnvm/pblk-rb.c 	return pblk_rb_ring_space(rb, mem, sync, rb->nr_entries);
mem               212 drivers/lightnvm/pblk-rb.c 	unsigned int mem = READ_ONCE(rb->mem);
mem               215 drivers/lightnvm/pblk-rb.c 	return pblk_rb_ring_count(mem, subm, rb->nr_entries);
mem               220 drivers/lightnvm/pblk-rb.c 	unsigned int mem = READ_ONCE(rb->mem);
mem               223 drivers/lightnvm/pblk-rb.c 	return pblk_rb_ring_count(mem, sync, rb->nr_entries);
mem               280 drivers/lightnvm/pblk-rb.c 			      unsigned int mem, unsigned int sync)
mem               288 drivers/lightnvm/pblk-rb.c 	space = pblk_rb_ring_space(rb, mem, rb->l2p_update, rb->nr_entries);
mem               421 drivers/lightnvm/pblk-rb.c 	unsigned int mem;
mem               426 drivers/lightnvm/pblk-rb.c 	mem = READ_ONCE(rb->mem);
mem               430 drivers/lightnvm/pblk-rb.c 	if (pblk_rb_ring_space(rb, mem, sync, rb->nr_entries) < threshold)
mem               433 drivers/lightnvm/pblk-rb.c 	if (pblk_rb_update_l2p(rb, nr_entries, mem, sync))
mem               436 drivers/lightnvm/pblk-rb.c 	*pos = mem;
mem               448 drivers/lightnvm/pblk-rb.c 	smp_store_release(&rb->mem, pblk_rb_ptr_wrap(rb, *pos, nr_entries));
mem               455 drivers/lightnvm/pblk-rb.c 	unsigned int mem = READ_ONCE(rb->mem);
mem               457 drivers/lightnvm/pblk-rb.c 	if (pblk_rb_flush_point_set(rb, NULL, mem))
mem               467 drivers/lightnvm/pblk-rb.c 	unsigned int mem;
mem               472 drivers/lightnvm/pblk-rb.c 	mem = pblk_rb_ptr_wrap(rb, *pos, nr_entries);
mem               479 drivers/lightnvm/pblk-rb.c 		if (pblk_rb_flush_point_set(&pblk->rwb, bio, mem))
mem               484 drivers/lightnvm/pblk-rb.c 	smp_store_release(&rb->mem, mem);
mem               772 drivers/lightnvm/pblk-rb.c 	if ((rb->mem == rb->subm) && (rb->subm == rb->sync) &&
mem               825 drivers/lightnvm/pblk-rb.c 			rb->mem,
mem               843 drivers/lightnvm/pblk-rb.c 			rb->mem,
mem               810 drivers/lightnvm/pblk-recovery.c 			line->emeta->mem = 0;
mem               391 drivers/lightnvm/pblk-write.c 	data = ((void *)emeta->buf) + emeta->mem;
mem               407 drivers/lightnvm/pblk-write.c 	emeta->mem += rq_len;
mem               408 drivers/lightnvm/pblk-write.c 	if (emeta->mem >= lm->emeta_len[0])
mem               482 drivers/lightnvm/pblk-write.c 	if (meta_line->emeta->mem >= lm->emeta_len[0]) {
mem               165 drivers/lightnvm/pblk.h 	unsigned int mem;		/* Write offset - points to next
mem               412 drivers/lightnvm/pblk.h 	int mem;			/* Write offset - points to next
mem               698 drivers/mailbox/omap-mailbox.c 	struct resource *mem;
mem               775 drivers/mailbox/omap-mailbox.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               776 drivers/mailbox/omap-mailbox.c 	mdev->mbox_base = devm_ioremap_resource(&pdev->dev, mem);
mem               425 drivers/mcb/mcb-core.c 		return &dev->mem;
mem               443 drivers/mcb/mcb-core.c 	struct resource *mem;
mem               449 drivers/mcb/mcb-core.c 	size = resource_size(&dev->mem);
mem               451 drivers/mcb/mcb-core.c 	mem = request_mem_region(dev->mem.start, size, name);
mem               452 drivers/mcb/mcb-core.c 	if (!mem)
mem               455 drivers/mcb/mcb-core.c 	return mem;
mem               465 drivers/mcb/mcb-core.c void mcb_release_mem(struct resource *mem)
mem               469 drivers/mcb/mcb-core.c 	size = resource_size(mem);
mem               470 drivers/mcb/mcb-core.c 	release_mem_region(mem->start, size);
mem                18 drivers/mcb/mcb-lpc.c 	struct resource *mem;
mem                32 drivers/mcb/mcb-lpc.c 	priv->mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem                33 drivers/mcb/mcb-lpc.c 	if (!priv->mem) {
mem                38 drivers/mcb/mcb-lpc.c 	res = devm_request_mem_region(&pdev->dev, priv->mem->start,
mem                39 drivers/mcb/mcb-lpc.c 				      resource_size(priv->mem),
mem                46 drivers/mcb/mcb-lpc.c 	priv->base = devm_ioremap(&pdev->dev, priv->mem->start,
mem                47 drivers/mcb/mcb-lpc.c 				  resource_size(priv->mem));
mem                59 drivers/mcb/mcb-lpc.c 	ret = chameleon_parse_cells(priv->bus, priv->mem->start, priv->base);
mem                97 drivers/mcb/mcb-parse.c 	mdev->mem.start = dev_mapbase + offset;
mem                99 drivers/mcb/mcb-parse.c 	mdev->mem.end = mdev->mem.start + size - 1;
mem               100 drivers/mcb/mcb-parse.c 	mdev->mem.flags = IORESOURCE_MEM;
mem               572 drivers/md/dm-bufio.c 		io_req.mem.type = DM_IO_KMEM;
mem               573 drivers/md/dm-bufio.c 		io_req.mem.ptr.addr = (char *)b->data + offset;
mem               575 drivers/md/dm-bufio.c 		io_req.mem.type = DM_IO_VMA;
mem               576 drivers/md/dm-bufio.c 		io_req.mem.ptr.vma = (char *)b->data + offset;
mem              1312 drivers/md/dm-bufio.c 		.mem.type = DM_IO_KMEM,
mem              1313 drivers/md/dm-bufio.c 		.mem.ptr.addr = NULL,
mem              1924 drivers/md/dm-bufio.c 	__u64 mem;
mem              1931 drivers/md/dm-bufio.c 	mem = (__u64)mult_frac(totalram_pages() - totalhigh_pages(),
mem              1934 drivers/md/dm-bufio.c 	if (mem > ULONG_MAX)
mem              1935 drivers/md/dm-bufio.c 		mem = ULONG_MAX;
mem              1938 drivers/md/dm-bufio.c 	if (mem > mult_frac(VMALLOC_TOTAL, DM_BUFIO_VMALLOC_PERCENT, 100))
mem              1939 drivers/md/dm-bufio.c 		mem = mult_frac(VMALLOC_TOTAL, DM_BUFIO_VMALLOC_PERCENT, 100);
mem              1942 drivers/md/dm-bufio.c 	dm_bufio_default_cache_size = mem;
mem               481 drivers/md/dm-integrity.c 	io_req.mem.type = DM_IO_KMEM;
mem               482 drivers/md/dm-integrity.c 	io_req.mem.ptr.addr = ic->sb;
mem               954 drivers/md/dm-integrity.c 	io_req.mem.type = DM_IO_PAGE_LIST;
mem               956 drivers/md/dm-integrity.c 		io_req.mem.ptr.pl = &ic->journal_io[pl_index];
mem               958 drivers/md/dm-integrity.c 		io_req.mem.ptr.pl = &ic->journal[pl_index];
mem               959 drivers/md/dm-integrity.c 	io_req.mem.offset = pl_offset;
mem              1075 drivers/md/dm-integrity.c 	io_req.mem.type = DM_IO_PAGE_LIST;
mem              1076 drivers/md/dm-integrity.c 	io_req.mem.ptr.pl = &ic->journal[pl_index];
mem              1077 drivers/md/dm-integrity.c 	io_req.mem.offset = pl_offset;
mem              1537 drivers/md/dm-integrity.c 			char *mem, *checksums_ptr;
mem              1540 drivers/md/dm-integrity.c 			mem = (char *)kmap_atomic(bv.bv_page) + bv.bv_offset;
mem              1544 drivers/md/dm-integrity.c 				integrity_sector_checksum(ic, sector, mem + pos, checksums_ptr);
mem              1550 drivers/md/dm-integrity.c 			kunmap_atomic(mem);
mem              1707 drivers/md/dm-integrity.c 		char *mem;
mem              1714 drivers/md/dm-integrity.c 		mem = kmap_atomic(bv.bv_page);
mem              1728 drivers/md/dm-integrity.c 					kunmap_atomic(mem);
mem              1736 drivers/md/dm-integrity.c 				mem_ptr = mem + bv.bv_offset;
mem              1748 drivers/md/dm-integrity.c 					integrity_sector_checksum(ic, logical_sector, mem + bv.bv_offset, checksums_onstack);
mem              1786 drivers/md/dm-integrity.c 				memcpy(js, mem + bv.bv_offset, ic->sectors_per_block << SECTOR_SHIFT);
mem              1819 drivers/md/dm-integrity.c 		kunmap_atomic(mem);
mem              2417 drivers/md/dm-integrity.c 	io_req.mem.type = DM_IO_VMA;
mem              2418 drivers/md/dm-integrity.c 	io_req.mem.ptr.addr = ic->recalc_buffer;
mem               500 drivers/md/dm-io.c 	switch (io_req->mem.type) {
mem               502 drivers/md/dm-io.c 		list_dp_init(dp, io_req->mem.ptr.pl, io_req->mem.offset);
mem               506 drivers/md/dm-io.c 		bio_dp_init(dp, io_req->mem.ptr.bio);
mem               510 drivers/md/dm-io.c 		flush_kernel_vmap_range(io_req->mem.ptr.vma, size);
mem               512 drivers/md/dm-io.c 			dp->vma_invalidate_address = io_req->mem.ptr.vma;
mem               515 drivers/md/dm-io.c 		vm_dp_init(dp, io_req->mem.ptr.vma);
mem               519 drivers/md/dm-io.c 		km_dp_init(dp, io_req->mem.ptr.addr);
mem               556 drivers/md/dm-kcopyd.c 		.mem.type = DM_IO_PAGE_LIST,
mem               557 drivers/md/dm-kcopyd.c 		.mem.ptr.pl = job->pages,
mem               558 drivers/md/dm-kcopyd.c 		.mem.offset = 0,
mem               459 drivers/md/dm-log.c 		lc->io_req.mem.type = DM_IO_VMA;
mem               477 drivers/md/dm-log.c 		lc->io_req.mem.ptr.vma = lc->disk_header;
mem               265 drivers/md/dm-raid1.c 		.mem.type = DM_IO_KMEM,
mem               266 drivers/md/dm-raid1.c 		.mem.ptr.addr = NULL,
mem               540 drivers/md/dm-raid1.c 		.mem.type = DM_IO_BIO,
mem               541 drivers/md/dm-raid1.c 		.mem.ptr.bio = bio,
mem               654 drivers/md/dm-raid1.c 		.mem.type = DM_IO_BIO,
mem               655 drivers/md/dm-raid1.c 		.mem.ptr.bio = bio,
mem               663 drivers/md/dm-raid1.c 		io_req.mem.type = DM_IO_KMEM;
mem               664 drivers/md/dm-raid1.c 		io_req.mem.ptr.addr = NULL;
mem               240 drivers/md/dm-snap-persistent.c 		.mem.type = DM_IO_VMA,
mem               241 drivers/md/dm-snap-persistent.c 		.mem.ptr.vma = area,
mem               672 drivers/md/dm-snap.c 				    struct kmem_cache *mem)
mem               684 drivers/md/dm-snap.c 			kmem_cache_free(mem, ex);
mem               868 drivers/md/dm-snap.c 	unsigned long mem = 2 * 1024 * 1024;
mem               869 drivers/md/dm-snap.c 	mem /= sizeof(struct hlist_bl_head);
mem               871 drivers/md/dm-snap.c 	return mem;
mem               483 drivers/md/dm-writecache.c 		req.mem.type = DM_IO_VMA;
mem               484 drivers/md/dm-writecache.c 		req.mem.ptr.vma = (char *)wc->memory_map + (size_t)i * BITMAP_GRANULARITY;
mem               524 drivers/md/dm-writecache.c 	req.mem.type = DM_IO_KMEM;
mem               525 drivers/md/dm-writecache.c 	req.mem.ptr.addr = NULL;
mem               891 drivers/md/dm-writecache.c 	req.mem.type = DM_IO_VMA;
mem               892 drivers/md/dm-writecache.c 	req.mem.ptr.vma = (char *)wc->memory_map;
mem               167 drivers/media/common/saa7146/saa7146_core.c 	void *mem = vmalloc_32(length);
mem               170 drivers/media/common/saa7146/saa7146_core.c 	if (NULL == mem)
mem               173 drivers/media/common/saa7146/saa7146_core.c 	if (!(pt->slist = vmalloc_to_sg(mem, pages)))
mem               187 drivers/media/common/saa7146/saa7146_core.c 	return mem;
mem               197 drivers/media/common/saa7146/saa7146_core.c 	vfree(mem);
mem               202 drivers/media/common/saa7146/saa7146_core.c void saa7146_vfree_destroy_pgtable(struct pci_dev *pci, void *mem, struct saa7146_pgtable *pt)
mem               208 drivers/media/common/saa7146/saa7146_core.c 	vfree(mem);
mem               380 drivers/media/common/saa7146/saa7146_core.c 	dev->mem = ioremap(pci_resource_start(pci, 0),
mem               382 drivers/media/common/saa7146/saa7146_core.c 	if (!dev->mem) {
mem               433 drivers/media/common/saa7146/saa7146_core.c 		dev->mem, dev->revision, pci->irq,
mem               485 drivers/media/common/saa7146/saa7146_core.c 	iounmap(dev->mem);
mem               524 drivers/media/common/saa7146/saa7146_core.c 	iounmap(dev->mem);
mem                32 drivers/media/common/saa7146/saa7146_i2c.c 	int mem = 0, op_count = 0;
mem                36 drivers/media/common/saa7146/saa7146_i2c.c 		mem += m[i].len + 1;
mem                41 drivers/media/common/saa7146/saa7146_i2c.c 	mem = 1 + ((mem-1) / 3);
mem                46 drivers/media/common/saa7146/saa7146_i2c.c 	if ((4 * mem) > SAA7146_I2C_MEM) {
mem                52 drivers/media/common/saa7146/saa7146_i2c.c 	memset(op,0,sizeof(__le32)*mem);
mem                83 drivers/media/common/saa7146/saa7146_i2c.c 	return mem;
mem               170 drivers/media/dvb-core/dmxdev.c 		void *mem;
mem               176 drivers/media/dvb-core/dmxdev.c 		mem = vmalloc(DVR_BUFFER_SIZE);
mem               177 drivers/media/dvb-core/dmxdev.c 		if (!mem) {
mem               181 drivers/media/dvb-core/dmxdev.c 		dvb_ringbuffer_init(&dmxdev->dvr_buffer, mem, DVR_BUFFER_SIZE);
mem               232 drivers/media/dvb-core/dmxdev.c 			void *mem = dmxdev->dvr_buffer.data;
mem               238 drivers/media/dvb-core/dmxdev.c 			vfree(mem);
mem               677 drivers/media/dvb-core/dmxdev.c 	void *mem;
mem               687 drivers/media/dvb-core/dmxdev.c 		mem = vmalloc(filter->buffer.size);
mem               688 drivers/media/dvb-core/dmxdev.c 		if (!mem)
mem               691 drivers/media/dvb-core/dmxdev.c 		filter->buffer.data = mem;
mem               849 drivers/media/dvb-core/dmxdev.c 		void *mem = dmxdevfilter->buffer.data;
mem               854 drivers/media/dvb-core/dmxdev.c 		vfree(mem);
mem                97 drivers/media/dvb-frontends/sp2.c 	int mem, ret;
mem               125 drivers/media/dvb-frontends/sp2.c 		ret = ci_op_cam(s->priv, read, addr, data, &mem);
mem               137 drivers/media/dvb-frontends/sp2.c 			(read) ? mem : data);
mem               140 drivers/media/dvb-frontends/sp2.c 		return mem;
mem               143 drivers/media/dvb-frontends/sp887x.c 	const unsigned char *mem = fw->data;
mem               151 drivers/media/dvb-frontends/sp887x.c 	mem = fw->data + 10;
mem               180 drivers/media/dvb-frontends/sp887x.c 		memcpy(&buf[2], mem + i, c);
mem               307 drivers/media/dvb-frontends/tda1004x.c 			      const unsigned char *mem, unsigned int len,
mem               328 drivers/media/dvb-frontends/tda1004x.c 		memcpy(buf + 1, mem + pos, tx_size);
mem                87 drivers/media/pci/cx18/cx18-firmware.c static int load_cpu_fw_direct(const char *fn, u8 __iomem *mem, struct cx18 *cx)
mem                92 drivers/media/pci/cx18/cx18-firmware.c 	u32 __iomem *dst = (u32 __iomem *)mem;
mem               265 drivers/media/pci/cx23885/altera-ci.c 	int mem = 0;
mem               280 drivers/media/pci/cx23885/altera-ci.c 	mem = netup_fpga_op_rw(inter, NETUP_CI_DATA, val, read);
mem               287 drivers/media/pci/cx23885/altera-ci.c 			(read) ? mem : val);
mem               289 drivers/media/pci/cx23885/altera-ci.c 	return mem;
mem               150 drivers/media/pci/cx23885/cimax2.c 	int mem;
mem               154 drivers/media/pci/cx23885/cimax2.c 		mem = cx_read(MC417_RWD);
mem               155 drivers/media/pci/cx23885/cimax2.c 		if ((mem & NETUP_ACK) == 0)
mem               164 drivers/media/pci/cx23885/cimax2.c 	return mem & 0xff;
mem               175 drivers/media/pci/cx23885/cimax2.c 	int mem;
mem               218 drivers/media/pci/cx23885/cimax2.c 	mem = netup_ci_get_mem(dev);
mem               223 drivers/media/pci/cx23885/cimax2.c 		if (mem < 0)
mem               229 drivers/media/pci/cx23885/cimax2.c 			(read) ? mem : data);
mem               232 drivers/media/pci/cx23885/cimax2.c 		return mem;
mem               664 drivers/media/pci/cx23885/cx23885-dvb.c 				u8 data, int *mem)
mem               718 drivers/media/pci/cx23885/cx23885-dvb.c 	*mem = tmp & 0xff;
mem               723 drivers/media/pci/cx23885/cx23885-dvb.c 		if (*mem < 0) {
mem               938 drivers/media/pci/cx23885/cx23885-dvb.c 	uint32_t mem = 0;
mem               940 drivers/media/pci/cx23885/cx23885-dvb.c 	mem = cx_read(MC417_RWD);
mem               945 drivers/media/pci/cx23885/cx23885-dvb.c 		mem &= ~ALT_DATA;
mem               946 drivers/media/pci/cx23885/cx23885-dvb.c 		mem |= (data & ALT_DATA);
mem               950 drivers/media/pci/cx23885/cx23885-dvb.c 		mem |= ALT_AD_RG;
mem               952 drivers/media/pci/cx23885/cx23885-dvb.c 		mem &= ~ALT_AD_RG;
mem               954 drivers/media/pci/cx23885/cx23885-dvb.c 	mem &= ~ALT_CS;
mem               956 drivers/media/pci/cx23885/cx23885-dvb.c 		mem = (mem & ~ALT_RD) | ALT_WR;
mem               958 drivers/media/pci/cx23885/cx23885-dvb.c 		mem = (mem & ~ALT_WR) | ALT_RD;
mem               960 drivers/media/pci/cx23885/cx23885-dvb.c 	cx_write(MC417_RWD, mem);  /* start RW cycle */
mem               963 drivers/media/pci/cx23885/cx23885-dvb.c 		mem = cx_read(MC417_RWD);
mem               964 drivers/media/pci/cx23885/cx23885-dvb.c 		if ((mem & ALT_RDY) == 0)
mem               973 drivers/media/pci/cx23885/cx23885-dvb.c 		return mem & ALT_DATA;
mem               137 drivers/media/pci/ddbridge/ddbridge-core.c 	u64 mem;
mem               142 drivers/media/pci/ddbridge/ddbridge-core.c 		mem = dma->pbuf[i];
mem               143 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem & 0xffffffff, dma->bufregs + i * 8);
mem               144 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem >> 32, dma->bufregs + i * 8 + 4);
mem               174 drivers/media/pci/ddbridge/ddbridge-core.c 	u64 mem;
mem               179 drivers/media/pci/ddbridge/ddbridge-core.c 		mem = ddma->pbuf[i];
mem               180 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem & 0xffffffff, base + i * 8);
mem               181 drivers/media/pci/ddbridge/ddbridge-core.c 		ddbwritel(dev, mem >> 32, base + i * 8 + 4);
mem                40 drivers/media/pci/ivtv/ivtv-firmware.c static int load_fw_direct(const char *fn, volatile u8 __iomem *mem, struct ivtv *itv, long size)
mem                48 drivers/media/pci/ivtv/ivtv-firmware.c 		volatile u32 __iomem *dst = (volatile u32 __iomem *)mem;
mem               166 drivers/media/pci/ivtv/ivtv-firmware.c static volatile struct ivtv_mailbox __iomem *ivtv_search_mailbox(const volatile u8 __iomem *mem, u32 size)
mem               173 drivers/media/pci/ivtv/ivtv-firmware.c 		if (readl(mem + i)      == 0x12345678 &&
mem               174 drivers/media/pci/ivtv/ivtv-firmware.c 		    readl(mem + i + 4)  == 0x34567812 &&
mem               175 drivers/media/pci/ivtv/ivtv-firmware.c 		    readl(mem + i + 8)  == 0x56781234 &&
mem               176 drivers/media/pci/ivtv/ivtv-firmware.c 		    readl(mem + i + 12) == 0x78123456) {
mem               177 drivers/media/pci/ivtv/ivtv-firmware.c 			return (volatile struct ivtv_mailbox __iomem *)(mem + i + 16);
mem                65 drivers/media/pci/meye/meye.c 	void *mem;
mem                69 drivers/media/pci/meye/meye.c 	mem = vmalloc_32(size);
mem                70 drivers/media/pci/meye/meye.c 	if (mem) {
mem                71 drivers/media/pci/meye/meye.c 		memset(mem, 0, size);
mem                72 drivers/media/pci/meye/meye.c 		adr = (unsigned long) mem;
mem                79 drivers/media/pci/meye/meye.c 	return mem;
mem                82 drivers/media/pci/meye/meye.c static void rvfree(void * mem, unsigned long size)
mem                86 drivers/media/pci/meye/meye.c 	if (mem) {
mem                87 drivers/media/pci/meye/meye.c 		adr = (unsigned long) mem;
mem                93 drivers/media/pci/meye/meye.c 		vfree(mem);
mem               774 drivers/media/pci/smipcie/smipcie-main.c 	u64 mem;
mem               780 drivers/media/pci/smipcie/smipcie-main.c 		mem = port->dma_addr[0];
mem               781 drivers/media/pci/smipcie/smipcie-main.c 		dmaMemPtrLow = mem & 0xffffffff;
mem               782 drivers/media/pci/smipcie/smipcie-main.c 		dmaMemPtrHi = mem >> 32;
mem               795 drivers/media/pci/smipcie/smipcie-main.c 		mem = port->dma_addr[1];
mem               796 drivers/media/pci/smipcie/smipcie-main.c 		dmaMemPtrLow = mem & 0xffffffff;
mem               797 drivers/media/pci/smipcie/smipcie-main.c 		dmaMemPtrHi = mem >> 32;
mem              1235 drivers/media/pci/ttpci/av7110.c 	u8 *mem = (u8 *) (budget->grabbing);
mem              1257 drivers/media/pci/ttpci/av7110.c 	       mem[olddma],
mem              1263 drivers/media/pci/ttpci/av7110.c 		dvb_dmx_swfilter_packets(demux, mem + olddma, (newdma - olddma) / 188);
mem              1266 drivers/media/pci/ttpci/av7110.c 		dvb_dmx_swfilter_packets(demux, mem + olddma, (TS_BUFLEN - olddma) / 188);
mem              1267 drivers/media/pci/ttpci/av7110.c 		dvb_dmx_swfilter_packets(demux, mem, newdma / 188);
mem               177 drivers/media/pci/ttpci/budget-core.c 	u8 *mem = (u8 *) (budget->grabbing);
mem               198 drivers/media/pci/ttpci/budget-core.c 		dvb_dmx_swfilter_packets(&budget->demux, mem + olddma, count / 188);
mem               201 drivers/media/pci/ttpci/budget-core.c 		dvb_dmx_swfilter_packets(&budget->demux, mem + olddma, count / 188);
mem               203 drivers/media/pci/ttpci/budget-core.c 		dvb_dmx_swfilter_packets(&budget->demux, mem, newdma / 188);
mem                38 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 			struct mtk_vcodec_mem *mem)
mem                40 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	unsigned long size = mem->size;
mem                44 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mem->va = dma_alloc_coherent(dev, size, &mem->dma_addr, GFP_KERNEL);
mem                45 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	if (!mem->va) {
mem                51 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - va      = %p", ctx->id, mem->va);
mem                53 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 		       (unsigned long)mem->dma_addr);
mem                61 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 			struct mtk_vcodec_mem *mem)
mem                63 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	unsigned long size = mem->size;
mem                67 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	if (!mem->va) {
mem                73 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - va      = %p", ctx->id, mem->va);
mem                75 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 		       (unsigned long)mem->dma_addr);
mem                78 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	dma_free_coherent(dev, size, mem->va, mem->dma_addr);
mem                79 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mem->va = NULL;
mem                80 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mem->dma_addr = 0;
mem                81 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mem->size = 0;
mem                79 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h 				struct mtk_vcodec_mem *mem);
mem                81 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h 				struct mtk_vcodec_mem *mem);
mem               157 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	struct mtk_vcodec_mem *mem = NULL;
mem               162 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	mem = &inst->pred_buf;
mem               163 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	if (mem->va)
mem               164 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem               171 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	struct mtk_vcodec_mem *mem = NULL;
mem               175 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		mem = &inst->mv_buf[i];
mem               176 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		if (mem->va)
mem               177 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 			mtk_vcodec_mem_free(inst->ctx, mem);
mem               178 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		mem->size = buf_sz;
mem               179 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		err = mtk_vcodec_mem_alloc(inst->ctx, mem);
mem               184 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		inst->vsi->mv_buf_dma[i] = mem->dma_addr;
mem               193 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	struct mtk_vcodec_mem *mem = NULL;
mem               197 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		mem = &inst->mv_buf[i];
mem               198 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		if (mem->va)
mem               199 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 			mtk_vcodec_mem_free(inst->ctx, mem);
mem               368 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	struct mtk_vcodec_mem *mem = &inst->working_buf;
mem               370 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	mem->size = VP8_WORKING_BUF_SZ;
mem               371 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	err = mtk_vcodec_mem_alloc(inst->ctx, mem);
mem               377 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->vsi->dec.working_buf_dma = (uint64_t)mem->dma_addr;
mem               383 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	struct mtk_vcodec_mem *mem = &inst->working_buf;
mem               385 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	if (mem->va)
mem               386 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem               199 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_mem mem;
mem               359 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_mem *mem;
mem               388 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem = &inst->mv_buf;
mem               389 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mem->va)
mem               390 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem               392 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem->size = ((vsi->buf_w / 64) *
mem               394 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(inst->ctx, mem);
mem               396 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mem->size = 0;
mem               401 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->mv_buf.va = (unsigned long)mem->va;
mem               402 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->mv_buf.pa = (unsigned long)mem->dma_addr;
mem               403 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->mv_buf.sz = (unsigned int)mem->size;
mem               406 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem = &inst->seg_id_buf;
mem               407 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mem->va)
mem               408 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem               410 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem->size = VP9_SEG_ID_SZ;
mem               411 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(inst->ctx, mem);
mem               413 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mem->size = 0;
mem               418 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->seg_id_buf.va = (unsigned long)mem->va;
mem               419 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->seg_id_buf.pa = (unsigned long)mem->dma_addr;
mem               420 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	vsi->seg_id_buf.sz = (unsigned int)mem->size;
mem               549 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_mem mem;
mem               552 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	memset(&mem, 0, sizeof(mem));
mem               553 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem.size = sizeof(struct vdec_vp9_inst);
mem               554 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(ctx, &mem);
mem               558 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst = mem.va;
mem               559 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst->mem = mem;
mem               566 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_mem mem;
mem               568 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem = inst->mem;
mem               569 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mem.va)
mem               570 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, &mem);
mem               763 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_mem *mem;
mem               770 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem = &inst->mv_buf;
mem               771 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mem->va)
mem               772 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem               774 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mem = &inst->seg_id_buf;
mem               775 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mem->va)
mem               776 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
mem              2267 drivers/media/platform/omap3isp/isp.c 	struct resource *mem;
mem              2330 drivers/media/platform/omap3isp/isp.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, i);
mem              2332 drivers/media/platform/omap3isp/isp.c 			devm_ioremap_resource(isp->dev, mem);
mem              2386 drivers/media/platform/omap3isp/isp.c 		mem->start + isp_res_maps[m].offset[OMAP3_ISP_IOMEM_HIST];
mem                74 drivers/media/platform/qcom/venus/hfi_cmds.c 		res->mem = addr;
mem               930 drivers/media/platform/qcom/venus/hfi_helper.h 	u32 mem;
mem               341 drivers/media/platform/qcom/venus/hfi_venus.c static void venus_free(struct venus_hfi_device *hdev, struct mem_desc *mem)
mem               345 drivers/media/platform/qcom/venus/hfi_venus.c 	dma_free_attrs(dev, mem->size, mem->kva, mem->da, mem->attrs);
mem              1285 drivers/media/platform/rcar-vin/rcar-core.c 	struct resource *mem;
mem              1304 drivers/media/platform/rcar-vin/rcar-core.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1305 drivers/media/platform/rcar-vin/rcar-core.c 	if (mem == NULL)
mem              1308 drivers/media/platform/rcar-vin/rcar-core.c 	vin->base = devm_ioremap_resource(vin->dev, mem);
mem                43 drivers/media/platform/s5p-mfc/s5p_mfc.c module_param_named(mem, mfc_mem_size, charp, 0644);
mem                44 drivers/media/platform/s5p-mfc/s5p_mfc.c MODULE_PARM_DESC(mem, "Preallocated memory size for the firmware and context buffers");
mem               134 drivers/media/platform/vsp1/vsp1_dl.c 	void *mem;
mem               158 drivers/media/platform/vsp1/vsp1_dl.c 	void *mem;
mem               276 drivers/media/platform/vsp1/vsp1_dl.c 	pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
mem               278 drivers/media/platform/vsp1/vsp1_dl.c 	if (!pool->mem) {
mem               294 drivers/media/platform/vsp1/vsp1_dl.c 		dlb->entries = pool->mem + i * dlb_size;
mem               313 drivers/media/platform/vsp1/vsp1_dl.c 	if (pool->mem)
mem               314 drivers/media/platform/vsp1/vsp1_dl.c 		dma_free_wc(pool->vsp1->bus_master, pool->size, pool->mem,
mem               448 drivers/media/platform/vsp1/vsp1_dl.c 	pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
mem               450 drivers/media/platform/vsp1/vsp1_dl.c 	if (!pool->mem) {
mem               471 drivers/media/platform/vsp1/vsp1_dl.c 		cmd->cmds = pool->mem + cmd_offset;
mem               474 drivers/media/platform/vsp1/vsp1_dl.c 		cmd->data = pool->mem + data_offset;
mem               522 drivers/media/platform/vsp1/vsp1_dl.c 	if (pool->mem)
mem               523 drivers/media/platform/vsp1/vsp1_dl.c 		dma_free_wc(pool->vsp1->bus_master, pool->size, pool->mem,
mem               837 drivers/media/platform/vsp1/vsp1_drm.c 		cfg->pixelformat, cfg->pitch, &cfg->mem[0], &cfg->mem[1],
mem               838 drivers/media/platform/vsp1/vsp1_drm.c 		&cfg->mem[2], cfg->zpos);
mem               851 drivers/media/platform/vsp1/vsp1_drm.c 	rpf->mem.addr[0] = cfg->mem[0];
mem               852 drivers/media/platform/vsp1/vsp1_drm.c 	rpf->mem.addr[1] = cfg->mem[1];
mem               853 drivers/media/platform/vsp1/vsp1_drm.c 	rpf->mem.addr[2] = cfg->mem[2];
mem               892 drivers/media/platform/vsp1/vsp1_drm.c 		pipe->output->mem.addr[0] = wb_cfg->mem[0];
mem               893 drivers/media/platform/vsp1/vsp1_drm.c 		pipe->output->mem.addr[1] = wb_cfg->mem[1];
mem               894 drivers/media/platform/vsp1/vsp1_drm.c 		pipe->output->mem.addr[2] = wb_cfg->mem[2];
mem               212 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->top_y0 = rpf->mem.addr[0];
mem               213 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->top_c0 = rpf->mem.addr[1];
mem               214 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->top_c1 = rpf->mem.addr[2];
mem               219 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->bottom_y0 = rpf->mem.addr[0] + offset_y;
mem               220 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->bottom_c0 = rpf->mem.addr[1] + offset_c;
mem               221 drivers/media/platform/vsp1/vsp1_rpf.c 	auto_fld->bottom_c1 = rpf->mem.addr[2] + offset_c;
mem               247 drivers/media/platform/vsp1/vsp1_rpf.c 	struct vsp1_rwpf_memory mem = rpf->mem;
mem               290 drivers/media/platform/vsp1/vsp1_rpf.c 	mem.addr[0] += crop.top * format->plane_fmt[0].bytesperline
mem               299 drivers/media/platform/vsp1/vsp1_rpf.c 		mem.addr[1] += offset;
mem               300 drivers/media/platform/vsp1/vsp1_rpf.c 		mem.addr[2] += offset;
mem               309 drivers/media/platform/vsp1/vsp1_rpf.c 		swap(mem.addr[1], mem.addr[2]);
mem               318 drivers/media/platform/vsp1/vsp1_rpf.c 		vsp1_rpf_write(rpf, dlb, VI6_RPF_SRCM_ADDR_Y, mem.addr[0]);
mem               319 drivers/media/platform/vsp1/vsp1_rpf.c 		vsp1_rpf_write(rpf, dlb, VI6_RPF_SRCM_ADDR_C0, mem.addr[1]);
mem               320 drivers/media/platform/vsp1/vsp1_rpf.c 		vsp1_rpf_write(rpf, dlb, VI6_RPF_SRCM_ADDR_C1, mem.addr[2]);
mem                63 drivers/media/platform/vsp1/vsp1_rwpf.h 	struct vsp1_rwpf_memory mem;
mem               359 drivers/media/platform/vsp1/vsp1_video.c 	video->rwpf->mem = buf->mem;
mem               745 drivers/media/platform/vsp1/vsp1_video.c 		buf->mem.addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
mem               752 drivers/media/platform/vsp1/vsp1_video.c 		buf->mem.addr[i] = 0;
mem               776 drivers/media/platform/vsp1/vsp1_video.c 	video->rwpf->mem = buf->mem;
mem                22 drivers/media/platform/vsp1/vsp1_video.h 	struct vsp1_rwpf_memory mem;
mem               398 drivers/media/platform/vsp1/vsp1_wpf.c 	struct vsp1_rwpf_memory mem = wpf->mem;
mem               469 drivers/media/platform/vsp1/vsp1_wpf.c 			mem.addr[i] += offset / vsub
mem               472 drivers/media/platform/vsp1/vsp1_wpf.c 			mem.addr[i] += offset / hsub
mem               488 drivers/media/platform/vsp1/vsp1_wpf.c 		mem.addr[0] += (height - 1)
mem               494 drivers/media/platform/vsp1/vsp1_wpf.c 			mem.addr[1] += offset;
mem               495 drivers/media/platform/vsp1/vsp1_wpf.c 			mem.addr[2] += offset;
mem               509 drivers/media/platform/vsp1/vsp1_wpf.c 			mem.addr[i] += hoffset / hsub
mem               520 drivers/media/platform/vsp1/vsp1_wpf.c 		swap(mem.addr[1], mem.addr[2]);
mem               522 drivers/media/platform/vsp1/vsp1_wpf.c 	vsp1_wpf_write(wpf, dlb, VI6_WPF_DSTM_ADDR_Y, mem.addr[0]);
mem               523 drivers/media/platform/vsp1/vsp1_wpf.c 	vsp1_wpf_write(wpf, dlb, VI6_WPF_DSTM_ADDR_C0, mem.addr[1]);
mem               524 drivers/media/platform/vsp1/vsp1_wpf.c 	vsp1_wpf_write(wpf, dlb, VI6_WPF_DSTM_ADDR_C1, mem.addr[2]);
mem                61 drivers/media/usb/au0828/au0828-vbi.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem               407 drivers/media/usb/au0828/au0828-video.c 	(*buf)->vb_buf = (*buf)->mem;
mem               482 drivers/media/usb/au0828/au0828-video.c 	(*buf)->vb_buf = (*buf)->mem;
mem               727 drivers/media/usb/au0828/au0828-video.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem               160 drivers/media/usb/au0828/au0828.h 	void *mem;
mem                82 drivers/media/usb/cpia2/cpia2_core.c 	void *mem;
mem                88 drivers/media/usb/cpia2/cpia2_core.c 	mem = vmalloc_32(size);
mem                89 drivers/media/usb/cpia2/cpia2_core.c 	if (!mem)
mem                92 drivers/media/usb/cpia2/cpia2_core.c 	memset(mem, 0, size);	/* Clear the ram out, no junk to the user */
mem                93 drivers/media/usb/cpia2/cpia2_core.c 	adr = (unsigned long) mem;
mem               100 drivers/media/usb/cpia2/cpia2_core.c 	return mem;
mem               103 drivers/media/usb/cpia2/cpia2_core.c static void rvfree(void *mem, unsigned long size)
mem               107 drivers/media/usb/cpia2/cpia2_core.c 	if (!mem)
mem               112 drivers/media/usb/cpia2/cpia2_core.c 	adr = (unsigned long) mem;
mem               118 drivers/media/usb/cpia2/cpia2_core.c 	vfree(mem);
mem               345 drivers/media/usb/dvb-usb-v2/dvbsky.c 					u8 data, int *mem)
mem               368 drivers/media/usb/dvb-usb-v2/dvbsky.c 		*mem = respond[1];
mem                83 drivers/media/usb/em28xx/em28xx-vbi.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem               612 drivers/media/usb/em28xx/em28xx-video.c 	buf->vb_buf = buf->mem;
mem              1228 drivers/media/usb/em28xx/em28xx-video.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem               285 drivers/media/usb/em28xx/em28xx.h 	void			*mem;
mem               368 drivers/media/usb/go7007/go7007-fw.c 	u16 mem = 0x3e00;
mem               390 drivers/media/usb/go7007/go7007-fw.c 		code[off + 1] = __cpu_to_le16(0x8000 | mem);
mem               393 drivers/media/usb/go7007/go7007-fw.c 		if (mem + chunk > 0x4000)
mem               394 drivers/media/usb/go7007/go7007-fw.c 			chunk = 0x4000 - mem;
mem               401 drivers/media/usb/go7007/go7007-fw.c 			mem = 0x3e00;
mem               405 drivers/media/usb/go7007/go7007-fw.c 			mem += 28;
mem               635 drivers/media/usb/go7007/go7007-fw.c 	u16 mem = 0x3e00;
mem               679 drivers/media/usb/go7007/go7007-fw.c 		code[off + 1] = __cpu_to_le16(0x8000 | mem);
mem               682 drivers/media/usb/go7007/go7007-fw.c 		if (mem + chunk > 0x4000)
mem               683 drivers/media/usb/go7007/go7007-fw.c 			chunk = 0x4000 - mem;
mem               690 drivers/media/usb/go7007/go7007-fw.c 			if (mem + chunk == 0x4000) {
mem               691 drivers/media/usb/go7007/go7007-fw.c 				mem = 0x3e00;
mem               697 drivers/media/usb/go7007/go7007-fw.c 			mem += 28;
mem               821 drivers/media/usb/go7007/go7007-fw.c 	u16 mem = 0x3e00;
mem               847 drivers/media/usb/go7007/go7007-fw.c 		code[off + 1] = __cpu_to_le16(0x8000 | mem);
mem               850 drivers/media/usb/go7007/go7007-fw.c 		if (mem + chunk > 0x4000)
mem               851 drivers/media/usb/go7007/go7007-fw.c 			chunk = 0x4000 - mem;
mem               858 drivers/media/usb/go7007/go7007-fw.c 			if (mem + chunk == 0x4000) {
mem               859 drivers/media/usb/go7007/go7007-fw.c 				mem = 0x3e00;
mem               865 drivers/media/usb/go7007/go7007-fw.c 			mem += 28;
mem               871 drivers/media/usb/go7007/go7007-fw.c 	mem = 0x3e00;
mem               890 drivers/media/usb/go7007/go7007-fw.c 		code[off + 1] = __cpu_to_le16(0x8000 | mem);
mem               893 drivers/media/usb/go7007/go7007-fw.c 		if (mem + chunk > 0x4000)
mem               894 drivers/media/usb/go7007/go7007-fw.c 			chunk = 0x4000 - mem;
mem               901 drivers/media/usb/go7007/go7007-fw.c 			if (mem + chunk == 0x4000) {
mem               902 drivers/media/usb/go7007/go7007-fw.c 				mem = 0x3e00;
mem               908 drivers/media/usb/go7007/go7007-fw.c 			mem += 28;
mem               136 drivers/media/usb/hdpvr/hdpvr-video.c 	u8 *mem;
mem               157 drivers/media/usb/hdpvr/hdpvr-video.c 		mem = usb_alloc_coherent(dev->udev, dev->bulk_in_size, GFP_KERNEL,
mem               159 drivers/media/usb/hdpvr/hdpvr-video.c 		if (!mem) {
mem               168 drivers/media/usb/hdpvr/hdpvr-video.c 				  mem, dev->bulk_in_size,
mem               697 drivers/media/usb/stk1160/stk1160-v4l.c 		buf->mem = vb2_plane_vaddr(vb, 0);
mem               105 drivers/media/usb/stk1160/stk1160-video.c 	u8 *dst = buf->mem;
mem               159 drivers/media/usb/stk1160/stk1160-video.c 		(unsigned long)buf->mem + buf->length) {
mem               201 drivers/media/usb/stk1160/stk1160-video.c 			(unsigned long)buf->mem + buf->length) {
mem                75 drivers/media/usb/stk1160/stk1160.h 	void *mem;
mem               103 drivers/media/usb/usbvision/usbvision-core.c 	void *mem;
mem               107 drivers/media/usb/usbvision/usbvision-core.c 	mem = vmalloc_32(size);
mem               108 drivers/media/usb/usbvision/usbvision-core.c 	if (!mem)
mem               111 drivers/media/usb/usbvision/usbvision-core.c 	memset(mem, 0, size); /* Clear the ram out, no junk to the user */
mem               112 drivers/media/usb/usbvision/usbvision-core.c 	adr = (unsigned long) mem;
mem               119 drivers/media/usb/usbvision/usbvision-core.c 	return mem;
mem               122 drivers/media/usb/usbvision/usbvision-core.c static void usbvision_rvfree(void *mem, unsigned long size)
mem               126 drivers/media/usb/usbvision/usbvision-core.c 	if (!mem)
mem               131 drivers/media/usb/usbvision/usbvision-core.c 	adr = (unsigned long) mem;
mem               138 drivers/media/usb/usbvision/usbvision-core.c 	vfree(mem);
mem                44 drivers/media/usb/uvc/uvc_isight.c 	u8 *mem;
mem                82 drivers/media/usb/uvc/uvc_isight.c 		mem = buf->mem + buf->bytesused;
mem                84 drivers/media/usb/uvc/uvc_isight.c 		memcpy(mem, data, nbytes);
mem               121 drivers/media/usb/uvc/uvc_queue.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem              1144 drivers/media/usb/uvc/uvc_video.c 	op->dst = buf->mem + buf->bytesused;
mem              1198 drivers/media/usb/uvc/uvc_video.c 	void *mem;
mem              1201 drivers/media/usb/uvc/uvc_video.c 	mem = buf->mem + queue->buf_used;
mem              1205 drivers/media/usb/uvc/uvc_video.c 	memcpy(data, mem, nbytes);
mem              1229 drivers/media/usb/uvc/uvc_video.c 				  const u8 *mem, unsigned int length)
mem              1248 drivers/media/usb/uvc/uvc_video.c 	has_pts = mem[1] & UVC_STREAM_PTS;
mem              1249 drivers/media/usb/uvc/uvc_video.c 	has_scr = mem[1] & UVC_STREAM_SCR;
mem              1253 drivers/media/usb/uvc/uvc_video.c 		scr = mem + 6;
mem              1255 drivers/media/usb/uvc/uvc_video.c 		scr = mem + 2;
mem              1268 drivers/media/usb/uvc/uvc_video.c 	meta = (struct uvc_meta_buf *)((u8 *)meta_buf->mem + meta_buf->bytesused);
mem              1279 drivers/media/usb/uvc/uvc_video.c 	memcpy(&meta->length, mem, length);
mem              1337 drivers/media/usb/uvc/uvc_video.c 	u8 *mem;
mem              1351 drivers/media/usb/uvc/uvc_video.c 		mem = urb->transfer_buffer + urb->iso_frame_desc[i].offset;
mem              1353 drivers/media/usb/uvc/uvc_video.c 			ret = uvc_video_decode_start(stream, buf, mem,
mem              1362 drivers/media/usb/uvc/uvc_video.c 		uvc_video_decode_meta(stream, meta_buf, mem, ret);
mem              1365 drivers/media/usb/uvc/uvc_video.c 		uvc_video_decode_data(uvc_urb, buf, mem + ret,
mem              1369 drivers/media/usb/uvc/uvc_video.c 		uvc_video_decode_end(stream, buf, mem,
mem              1382 drivers/media/usb/uvc/uvc_video.c 	u8 *mem;
mem              1392 drivers/media/usb/uvc/uvc_video.c 	mem = urb->transfer_buffer;
mem              1401 drivers/media/usb/uvc/uvc_video.c 			ret = uvc_video_decode_start(stream, buf, mem, len);
mem              1410 drivers/media/usb/uvc/uvc_video.c 			memcpy(stream->bulk.header, mem, ret);
mem              1413 drivers/media/usb/uvc/uvc_video.c 			uvc_video_decode_meta(stream, meta_buf, mem, ret);
mem              1415 drivers/media/usb/uvc/uvc_video.c 			mem += ret;
mem              1427 drivers/media/usb/uvc/uvc_video.c 		uvc_video_decode_data(uvc_urb, buf, mem, len);
mem              1453 drivers/media/usb/uvc/uvc_video.c 	u8 *mem = urb->transfer_buffer;
mem              1463 drivers/media/usb/uvc/uvc_video.c 		ret = uvc_video_encode_header(stream, buf, mem, len);
mem              1466 drivers/media/usb/uvc/uvc_video.c 		mem += ret;
mem              1471 drivers/media/usb/uvc/uvc_video.c 	ret = uvc_video_encode_data(stream, buf, mem, len);
mem               412 drivers/media/usb/uvc/uvcvideo.h 	void *mem;
mem                38 drivers/media/v4l2-core/videobuf-dma-contig.c 			       struct videobuf_dma_contig_memory *mem,
mem                41 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->size = size;
mem                42 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->vaddr = dma_alloc_coherent(dev, mem->size,
mem                43 drivers/media/v4l2-core/videobuf-dma-contig.c 					&mem->dma_handle, flags);
mem                45 drivers/media/v4l2-core/videobuf-dma-contig.c 	if (!mem->vaddr) {
mem                46 drivers/media/v4l2-core/videobuf-dma-contig.c 		dev_err(dev, "memory alloc size %ld failed\n", mem->size);
mem                50 drivers/media/v4l2-core/videobuf-dma-contig.c 	dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size);
mem                56 drivers/media/v4l2-core/videobuf-dma-contig.c 			       struct videobuf_dma_contig_memory *mem)
mem                58 drivers/media/v4l2-core/videobuf-dma-contig.c 	dma_free_coherent(dev, mem->size, mem->vaddr, mem->dma_handle);
mem                60 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->vaddr = NULL;
mem                84 drivers/media/v4l2-core/videobuf-dma-contig.c 		struct videobuf_dma_contig_memory *mem;
mem               100 drivers/media/v4l2-core/videobuf-dma-contig.c 			mem = q->bufs[i]->priv;
mem               101 drivers/media/v4l2-core/videobuf-dma-contig.c 			if (mem) {
mem               108 drivers/media/v4l2-core/videobuf-dma-contig.c 				MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               114 drivers/media/v4l2-core/videobuf-dma-contig.c 					i, mem->vaddr);
mem               116 drivers/media/v4l2-core/videobuf-dma-contig.c 				__videobuf_dc_free(q->dev, mem);
mem               117 drivers/media/v4l2-core/videobuf-dma-contig.c 				mem->vaddr = NULL;
mem               141 drivers/media/v4l2-core/videobuf-dma-contig.c static void videobuf_dma_contig_user_put(struct videobuf_dma_contig_memory *mem)
mem               143 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->dma_handle = 0;
mem               144 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->size = 0;
mem               157 drivers/media/v4l2-core/videobuf-dma-contig.c static int videobuf_dma_contig_user_get(struct videobuf_dma_contig_memory *mem,
mem               169 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem->size = PAGE_ALIGN(vb->size + offset);
mem               178 drivers/media/v4l2-core/videobuf-dma-contig.c 	if ((untagged_baddr + mem->size) > vma->vm_end)
mem               185 drivers/media/v4l2-core/videobuf-dma-contig.c 	while (pages_done < (mem->size >> PAGE_SHIFT)) {
mem               191 drivers/media/v4l2-core/videobuf-dma-contig.c 			mem->dma_handle = (this_pfn << PAGE_SHIFT) + offset;
mem               211 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem;
mem               214 drivers/media/v4l2-core/videobuf-dma-contig.c 	vb = kzalloc(size + sizeof(*mem), GFP_KERNEL);
mem               217 drivers/media/v4l2-core/videobuf-dma-contig.c 		mem = vb->priv;
mem               218 drivers/media/v4l2-core/videobuf-dma-contig.c 		mem->magic = MAGIC_DC_MEM;
mem               226 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem = buf->priv;
mem               228 drivers/media/v4l2-core/videobuf-dma-contig.c 	BUG_ON(!mem);
mem               229 drivers/media/v4l2-core/videobuf-dma-contig.c 	MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               231 drivers/media/v4l2-core/videobuf-dma-contig.c 	return mem->vaddr;
mem               238 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem = vb->priv;
mem               240 drivers/media/v4l2-core/videobuf-dma-contig.c 	BUG_ON(!mem);
mem               241 drivers/media/v4l2-core/videobuf-dma-contig.c 	MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               248 drivers/media/v4l2-core/videobuf-dma-contig.c 		if (!mem->vaddr) {
mem               258 drivers/media/v4l2-core/videobuf-dma-contig.c 			return videobuf_dma_contig_user_get(mem, vb);
mem               261 drivers/media/v4l2-core/videobuf-dma-contig.c 		if (__videobuf_dc_alloc(q->dev, mem, PAGE_ALIGN(vb->size),
mem               278 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem;
mem               294 drivers/media/v4l2-core/videobuf-dma-contig.c 	mem = buf->priv;
mem               295 drivers/media/v4l2-core/videobuf-dma-contig.c 	BUG_ON(!mem);
mem               296 drivers/media/v4l2-core/videobuf-dma-contig.c 	MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               298 drivers/media/v4l2-core/videobuf-dma-contig.c 	if (__videobuf_dc_alloc(q->dev, mem, PAGE_ALIGN(buf->bsize),
mem               313 drivers/media/v4l2-core/videobuf-dma-contig.c 	retval = vm_iomap_memory(vma, mem->dma_handle, mem->size);
mem               317 drivers/media/v4l2-core/videobuf-dma-contig.c 		dma_free_coherent(q->dev, mem->size,
mem               318 drivers/media/v4l2-core/videobuf-dma-contig.c 				  mem->vaddr, mem->dma_handle);
mem               364 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem = buf->priv;
mem               366 drivers/media/v4l2-core/videobuf-dma-contig.c 	BUG_ON(!mem);
mem               367 drivers/media/v4l2-core/videobuf-dma-contig.c 	MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               369 drivers/media/v4l2-core/videobuf-dma-contig.c 	return mem->dma_handle;
mem               376 drivers/media/v4l2-core/videobuf-dma-contig.c 	struct videobuf_dma_contig_memory *mem = buf->priv;
mem               387 drivers/media/v4l2-core/videobuf-dma-contig.c 	if (!mem)
mem               390 drivers/media/v4l2-core/videobuf-dma-contig.c 	MAGIC_CHECK(mem->magic, MAGIC_DC_MEM);
mem               394 drivers/media/v4l2-core/videobuf-dma-contig.c 		videobuf_dma_contig_user_put(mem);
mem               399 drivers/media/v4l2-core/videobuf-dma-contig.c 	if (mem->vaddr) {
mem               400 drivers/media/v4l2-core/videobuf-dma-contig.c 		__videobuf_dc_free(q->dev, mem);
mem               401 drivers/media/v4l2-core/videobuf-dma-contig.c 		mem->vaddr = NULL;
mem               136 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem = buf->priv;
mem               137 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem);
mem               139 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem               141 drivers/media/v4l2-core/videobuf-dma-sg.c 	return &mem->dma;
mem               401 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem;
mem               414 drivers/media/v4l2-core/videobuf-dma-sg.c 			mem = q->bufs[i]->priv;
mem               415 drivers/media/v4l2-core/videobuf-dma-sg.c 			if (!mem)
mem               418 drivers/media/v4l2-core/videobuf-dma-sg.c 			MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem               473 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem;
mem               476 drivers/media/v4l2-core/videobuf-dma-sg.c 	vb = kzalloc(size + sizeof(*mem), GFP_KERNEL);
mem               480 drivers/media/v4l2-core/videobuf-dma-sg.c 	mem = vb->priv = ((char *)vb) + size;
mem               481 drivers/media/v4l2-core/videobuf-dma-sg.c 	mem->magic = MAGIC_SG_MEM;
mem               483 drivers/media/v4l2-core/videobuf-dma-sg.c 	videobuf_dma_init(&mem->dma);
mem               487 drivers/media/v4l2-core/videobuf-dma-sg.c 		mem, (long)sizeof(*mem));
mem               494 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem = buf->priv;
mem               495 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem);
mem               497 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem               499 drivers/media/v4l2-core/videobuf-dma-sg.c 	return mem->dma.vaddr;
mem               508 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem = vb->priv;
mem               509 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem);
mem               511 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem               513 drivers/media/v4l2-core/videobuf-dma-sg.c 	if (!mem->dma.dev)
mem               514 drivers/media/v4l2-core/videobuf-dma-sg.c 		mem->dma.dev = q->dev;
mem               516 drivers/media/v4l2-core/videobuf-dma-sg.c 		WARN_ON(mem->dma.dev != q->dev);
mem               524 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_kernel(&mem->dma,
mem               531 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_user(&mem->dma,
mem               542 drivers/media/v4l2-core/videobuf-dma-sg.c 			err = videobuf_dma_init_user_locked(&mem->dma,
mem               560 drivers/media/v4l2-core/videobuf-dma-sg.c 		err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE,
mem               568 drivers/media/v4l2-core/videobuf-dma-sg.c 	err = videobuf_dma_map(q->dev, &mem->dma);
mem               578 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem = buf->priv;
mem               579 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem || !mem->dma.sglen);
mem               581 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem               582 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF);
mem               584 drivers/media/v4l2-core/videobuf-dma-sg.c 	dma_sync_sg_for_cpu(q->dev, mem->dma.sglist,
mem               585 drivers/media/v4l2-core/videobuf-dma-sg.c 			    mem->dma.nr_pages, mem->dma.direction);
mem               594 drivers/media/v4l2-core/videobuf-dma-sg.c 	struct videobuf_dma_sg_memory *mem = buf->priv;
mem               601 drivers/media/v4l2-core/videobuf-dma-sg.c 	BUG_ON(!mem);
mem               602 drivers/media/v4l2-core/videobuf-dma-sg.c 	MAGIC_CHECK(mem->magic, MAGIC_SG_MEM);
mem                72 drivers/media/v4l2-core/videobuf-vmalloc.c 		struct videobuf_vmalloc_memory *mem;
mem                88 drivers/media/v4l2-core/videobuf-vmalloc.c 			mem = q->bufs[i]->priv;
mem                89 drivers/media/v4l2-core/videobuf-vmalloc.c 			if (mem) {
mem                96 drivers/media/v4l2-core/videobuf-vmalloc.c 				MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM);
mem               102 drivers/media/v4l2-core/videobuf-vmalloc.c 					__func__, i, mem->vaddr);
mem               104 drivers/media/v4l2-core/videobuf-vmalloc.c 				vfree(mem->vaddr);
mem               105 drivers/media/v4l2-core/videobuf-vmalloc.c 				mem->vaddr = NULL;
mem               137 drivers/media/v4l2-core/videobuf-vmalloc.c 	struct videobuf_vmalloc_memory *mem;
mem               140 drivers/media/v4l2-core/videobuf-vmalloc.c 	vb = kzalloc(size + sizeof(*mem), GFP_KERNEL);
mem               144 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem = vb->priv = ((char *)vb) + size;
mem               145 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem->magic = MAGIC_VMAL_MEM;
mem               149 drivers/media/v4l2-core/videobuf-vmalloc.c 		mem, (long)sizeof(*mem));
mem               158 drivers/media/v4l2-core/videobuf-vmalloc.c 	struct videobuf_vmalloc_memory *mem = vb->priv;
mem               161 drivers/media/v4l2-core/videobuf-vmalloc.c 	BUG_ON(!mem);
mem               163 drivers/media/v4l2-core/videobuf-vmalloc.c 	MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM);
mem               170 drivers/media/v4l2-core/videobuf-vmalloc.c 		if (!mem->vaddr) {
mem               189 drivers/media/v4l2-core/videobuf-vmalloc.c 		mem->vaddr = vmalloc_user(pages);
mem               190 drivers/media/v4l2-core/videobuf-vmalloc.c 		if (!mem->vaddr) {
mem               195 drivers/media/v4l2-core/videobuf-vmalloc.c 			mem->vaddr, pages);
mem               213 drivers/media/v4l2-core/videobuf-vmalloc.c 	struct videobuf_vmalloc_memory *mem;
mem               229 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem = buf->priv;
mem               230 drivers/media/v4l2-core/videobuf-vmalloc.c 	BUG_ON(!mem);
mem               231 drivers/media/v4l2-core/videobuf-vmalloc.c 	MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM);
mem               234 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem->vaddr = vmalloc_user(pages);
mem               235 drivers/media/v4l2-core/videobuf-vmalloc.c 	if (!mem->vaddr) {
mem               239 drivers/media/v4l2-core/videobuf-vmalloc.c 	dprintk(1, "vmalloc is at addr %p (%d pages)\n", mem->vaddr, pages);
mem               242 drivers/media/v4l2-core/videobuf-vmalloc.c 	retval = remap_vmalloc_range(vma, mem->vaddr, 0);
mem               245 drivers/media/v4l2-core/videobuf-vmalloc.c 		vfree(mem->vaddr);
mem               263 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem = NULL;
mem               294 drivers/media/v4l2-core/videobuf-vmalloc.c 	struct videobuf_vmalloc_memory *mem = buf->priv;
mem               295 drivers/media/v4l2-core/videobuf-vmalloc.c 	BUG_ON(!mem);
mem               296 drivers/media/v4l2-core/videobuf-vmalloc.c 	MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM);
mem               298 drivers/media/v4l2-core/videobuf-vmalloc.c 	return mem->vaddr;
mem               304 drivers/media/v4l2-core/videobuf-vmalloc.c 	struct videobuf_vmalloc_memory *mem = buf->priv;
mem               315 drivers/media/v4l2-core/videobuf-vmalloc.c 	if (!mem)
mem               318 drivers/media/v4l2-core/videobuf-vmalloc.c 	MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM);
mem               320 drivers/media/v4l2-core/videobuf-vmalloc.c 	vfree(mem->vaddr);
mem               321 drivers/media/v4l2-core/videobuf-vmalloc.c 	mem->vaddr = NULL;
mem               550 drivers/memory/brcmstb_dpfe.c static int __write_firmware(u32 __iomem *mem, const u32 *fw,
mem               560 drivers/memory/brcmstb_dpfe.c 		writel_relaxed(0, mem + i);
mem               565 drivers/memory/brcmstb_dpfe.c 			writel_relaxed(be32_to_cpu(fw[i]), mem + i);
mem               568 drivers/memory/brcmstb_dpfe.c 			writel_relaxed(le32_to_cpu(fw[i]), mem + i);
mem               201 drivers/memory/omap-gpmc.c 	struct resource mem;
mem               910 drivers/memory/omap-gpmc.c 	struct resource *res = &gpmc->mem;
mem               926 drivers/memory/omap-gpmc.c 	struct resource *res = &gpmc->mem;
mem               984 drivers/memory/omap-gpmc.c 	struct resource *res = &gpmc->mem;
mem              1030 drivers/memory/omap-gpmc.c 	struct resource *res = &gpmc->mem;
mem              1644 drivers/message/fusion/mptbase.c 	u8		__iomem *mem;
mem              1723 drivers/message/fusion/mptbase.c 	mem = NULL;
mem              1726 drivers/message/fusion/mptbase.c 	mem = ioremap(mem_phys, msize);
mem              1727 drivers/message/fusion/mptbase.c 	if (mem == NULL) {
mem              1733 drivers/message/fusion/mptbase.c 	ioc->memmap = mem;
mem              1735 drivers/message/fusion/mptbase.c 	    ioc->name, mem, (unsigned long long)mem_phys));
mem              1738 drivers/message/fusion/mptbase.c 	ioc->chip = (SYSIF_REGS __iomem *)mem;
mem              4327 drivers/message/fusion/mptbase.c 	u8		*mem;
mem              4336 drivers/message/fusion/mptbase.c 		mem = kmalloc(sz, GFP_ATOMIC);
mem              4337 drivers/message/fusion/mptbase.c 		if (mem == NULL)
mem              4340 drivers/message/fusion/mptbase.c 		ioc->ReqToChain = (int *) mem;
mem              4342 drivers/message/fusion/mptbase.c 			 	ioc->name, mem, sz));
mem              4343 drivers/message/fusion/mptbase.c 		mem = kmalloc(sz, GFP_ATOMIC);
mem              4344 drivers/message/fusion/mptbase.c 		if (mem == NULL)
mem              4347 drivers/message/fusion/mptbase.c 		ioc->RequestNB = (int *) mem;
mem              4349 drivers/message/fusion/mptbase.c 			 	ioc->name, mem, sz));
mem              4410 drivers/message/fusion/mptbase.c 		mem = kmalloc(sz, GFP_ATOMIC);
mem              4411 drivers/message/fusion/mptbase.c 		if (mem == NULL)
mem              4414 drivers/message/fusion/mptbase.c 		ioc->ChainToChain = (int *) mem;
mem              4416 drivers/message/fusion/mptbase.c 			 	ioc->name, mem, sz));
mem              4418 drivers/message/fusion/mptbase.c 		mem = (u8 *) ioc->ChainToChain;
mem              4420 drivers/message/fusion/mptbase.c 	memset(mem, 0xFF, sz);
mem              4441 drivers/message/fusion/mptbase.c 	u8 *mem;
mem              4500 drivers/message/fusion/mptbase.c 		mem = pci_alloc_consistent(ioc->pcidev, total_size, &alloc_dma);
mem              4501 drivers/message/fusion/mptbase.c 		if (mem == NULL) {
mem              4508 drivers/message/fusion/mptbase.c 			 	ioc->name, mem, (void *)(ulong)alloc_dma, total_size, total_size));
mem              4510 drivers/message/fusion/mptbase.c 		memset(mem, 0, total_size);
mem              4512 drivers/message/fusion/mptbase.c 		ioc->alloc = mem;
mem              4515 drivers/message/fusion/mptbase.c 		ioc->reply_frames = (MPT_FRAME_HDR *) mem;
mem              4522 drivers/message/fusion/mptbase.c 		mem += reply_sz;
mem              4526 drivers/message/fusion/mptbase.c 		ioc->req_frames = (MPT_FRAME_HDR *) mem;
mem              4530 drivers/message/fusion/mptbase.c 			 	ioc->name, mem, (void *)(ulong)alloc_dma));
mem              4536 drivers/message/fusion/mptbase.c 			mem += ioc->req_sz;
mem              4539 drivers/message/fusion/mptbase.c 		ioc->ChainBuffer = mem;
mem              4552 drivers/message/fusion/mptbase.c 		mem = (u8 *)ioc->ChainBuffer;
mem              4554 drivers/message/fusion/mptbase.c 			mf = (MPT_FRAME_HDR *) mem;
mem              4556 drivers/message/fusion/mptbase.c 			mem += ioc->req_sz;
mem              4562 drivers/message/fusion/mptbase.c 		mem = (u8 *) ioc->req_frames;
mem              4567 drivers/message/fusion/mptbase.c 			mf = (MPT_FRAME_HDR *) mem;
mem              4572 drivers/message/fusion/mptbase.c 			mem += ioc->req_sz;
mem              5375 drivers/message/fusion/mptbase.c 		u8	*mem;
mem              5377 drivers/message/fusion/mptbase.c 		mem = kmalloc(sz, GFP_ATOMIC);
mem              5378 drivers/message/fusion/mptbase.c 		if (mem == NULL)
mem              5381 drivers/message/fusion/mptbase.c 		ioc->spi_data.nvram = (int *) mem;
mem              5959 drivers/message/fusion/mptbase.c 	u8			*mem;
mem              6004 drivers/message/fusion/mptbase.c 	mem = kmemdup(pIoc2, iocpage2sz, GFP_KERNEL);
mem              6005 drivers/message/fusion/mptbase.c 	if (!mem) {
mem              6010 drivers/message/fusion/mptbase.c 	ioc->raid_data.pIocPg2 = (IOCPage2_t *) mem;
mem              6029 drivers/message/fusion/mptbase.c 	u8			*mem;
mem              6072 drivers/message/fusion/mptbase.c 		mem = kmalloc(iocpage3sz, GFP_KERNEL);
mem              6073 drivers/message/fusion/mptbase.c 		if (mem) {
mem              6074 drivers/message/fusion/mptbase.c 			memcpy(mem, (u8 *)pIoc3, iocpage3sz);
mem              6075 drivers/message/fusion/mptbase.c 			ioc->raid_data.pIocPg3 = (IOCPage3_t *) mem;
mem               877 drivers/mfd/asic3.c 				  struct resource *mem)
mem               915 drivers/mfd/asic3.c 			&asic3_cell_ds1wm, 1, mem, asic->irq_base, NULL);
mem               956 drivers/mfd/asic3.c 	struct resource *mem;
mem               969 drivers/mfd/asic3.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               970 drivers/mfd/asic3.c 	if (!mem) {
mem               975 drivers/mfd/asic3.c 	asic->mapping = ioremap(mem->start, resource_size(mem));
mem               984 drivers/mfd/asic3.c 	asic->bus_shift = 2 - (resource_size(mem) >> 12);
mem              1017 drivers/mfd/asic3.c 	asic3_mfd_probe(pdev, pdata, mem);
mem               115 drivers/mfd/intel-lpss-acpi.c 	info->mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem                35 drivers/mfd/intel-lpss-pci.c 	info->mem = &pdev->resource[0];
mem               246 drivers/mfd/intel-lpss.c 	resource_size_t addr = lpss->info->mem->start;
mem               380 drivers/mfd/intel-lpss.c 	if (!info || !info->mem || info->irq <= 0)
mem               387 drivers/mfd/intel-lpss.c 	lpss->priv = devm_ioremap_uc(dev, info->mem->start + LPSS_PRIV_OFFSET,
mem               422 drivers/mfd/intel-lpss.c 				      1, info->mem, info->irq, NULL);
mem               429 drivers/mfd/intel-lpss.c 			      1, info->mem, info->irq, NULL);
mem                21 drivers/mfd/intel-lpss.h 	struct resource *mem;
mem                97 drivers/mfd/sun4i-gpadc.c 	struct resource *mem;
mem               128 drivers/mfd/sun4i-gpadc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               129 drivers/mfd/sun4i-gpadc.c 	dev->base = devm_ioremap_resource(&pdev->dev, mem);
mem               158 drivers/mfd/vexpress-sysreg.c 	struct resource *mem;
mem               164 drivers/mfd/vexpress-sysreg.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               165 drivers/mfd/vexpress-sysreg.c 	if (!mem)
mem               168 drivers/mfd/vexpress-sysreg.c 	base = devm_ioremap(&pdev->dev, mem->start, resource_size(mem));
mem               202 drivers/mfd/vexpress-sysreg.c 			ARRAY_SIZE(vexpress_sysreg_cells), mem, 0, NULL);
mem               140 drivers/misc/pvpanic.c 	struct resource *mem;
mem               142 drivers/misc/pvpanic.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               143 drivers/misc/pvpanic.c 	if (!mem)
mem               146 drivers/misc/pvpanic.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem                83 drivers/mmc/core/mmc_test.c 	struct mmc_test_mem *mem;
mem               316 drivers/mmc/core/mmc_test.c static void mmc_test_free_mem(struct mmc_test_mem *mem)
mem               318 drivers/mmc/core/mmc_test.c 	if (!mem)
mem               320 drivers/mmc/core/mmc_test.c 	while (mem->cnt--)
mem               321 drivers/mmc/core/mmc_test.c 		__free_pages(mem->arr[mem->cnt].page,
mem               322 drivers/mmc/core/mmc_test.c 			     mem->arr[mem->cnt].order);
mem               323 drivers/mmc/core/mmc_test.c 	kfree(mem->arr);
mem               324 drivers/mmc/core/mmc_test.c 	kfree(mem);
mem               343 drivers/mmc/core/mmc_test.c 	struct mmc_test_mem *mem;
mem               356 drivers/mmc/core/mmc_test.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem               357 drivers/mmc/core/mmc_test.c 	if (!mem)
mem               360 drivers/mmc/core/mmc_test.c 	mem->arr = kcalloc(max_segs, sizeof(*mem->arr), GFP_KERNEL);
mem               361 drivers/mmc/core/mmc_test.c 	if (!mem->arr)
mem               382 drivers/mmc/core/mmc_test.c 		mem->arr[mem->cnt].page = page;
mem               383 drivers/mmc/core/mmc_test.c 		mem->arr[mem->cnt].order = order;
mem               384 drivers/mmc/core/mmc_test.c 		mem->cnt += 1;
mem               389 drivers/mmc/core/mmc_test.c 		if (mem->cnt >= max_segs) {
mem               396 drivers/mmc/core/mmc_test.c 	return mem;
mem               399 drivers/mmc/core/mmc_test.c 	mmc_test_free_mem(mem);
mem               407 drivers/mmc/core/mmc_test.c static int mmc_test_map_sg(struct mmc_test_mem *mem, unsigned long size,
mem               422 drivers/mmc/core/mmc_test.c 		for (i = 0; i < mem->cnt; i++) {
mem               423 drivers/mmc/core/mmc_test.c 			unsigned long len = PAGE_SIZE << mem->arr[i].order;
mem               437 drivers/mmc/core/mmc_test.c 			sg_set_page(sg, mem->arr[i].page, len, 0);
mem               458 drivers/mmc/core/mmc_test.c static int mmc_test_map_sg_max_scatter(struct mmc_test_mem *mem,
mem               466 drivers/mmc/core/mmc_test.c 	unsigned int i = mem->cnt, cnt;
mem               474 drivers/mmc/core/mmc_test.c 		base = page_address(mem->arr[--i].page);
mem               475 drivers/mmc/core/mmc_test.c 		cnt = 1 << mem->arr[i].order;
mem               497 drivers/mmc/core/mmc_test.c 			i = mem->cnt;
mem              1407 drivers/mmc/core/mmc_test.c 		err = mmc_test_map_sg_max_scatter(t->mem, sz, t->sg,
mem              1411 drivers/mmc/core/mmc_test.c 		err = mmc_test_map_sg(t->mem, sz, t->sg, 1, t->max_segs,
mem              1528 drivers/mmc/core/mmc_test.c 	mmc_test_free_mem(t->mem);
mem              1576 drivers/mmc/core/mmc_test.c 	t->mem = mmc_test_alloc_mem(min_sz, t->max_tfr, t->max_segs,
mem              1578 drivers/mmc/core/mmc_test.c 	if (!t->mem)
mem              1195 drivers/mmc/host/davinci_mmc.c 	struct resource *r, *mem = NULL;
mem              1208 drivers/mmc/host/davinci_mmc.c 	mem = devm_request_mem_region(&pdev->dev, r->start, mem_size,
mem              1210 drivers/mmc/host/davinci_mmc.c 	if (!mem)
mem              1220 drivers/mmc/host/davinci_mmc.c 	host->mem_res = mem;
mem              1221 drivers/mmc/host/davinci_mmc.c 	host->base = devm_ioremap(&pdev->dev, mem->start, mem_size);
mem              1326 drivers/mmc/host/davinci_mmc.c 	rename_region(mem, mmc_hostname(mmc));
mem              1075 drivers/mmc/host/s3cmci.c 		.src_addr = host->mem->start + host->sdidata,
mem              1076 drivers/mmc/host/s3cmci.c 		.dst_addr = host->mem->start + host->sdidata,
mem              1590 drivers/mmc/host/s3cmci.c 	host->mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1591 drivers/mmc/host/s3cmci.c 	if (!host->mem) {
mem              1599 drivers/mmc/host/s3cmci.c 	host->mem = request_mem_region(host->mem->start,
mem              1600 drivers/mmc/host/s3cmci.c 				       resource_size(host->mem), pdev->name);
mem              1602 drivers/mmc/host/s3cmci.c 	if (!host->mem) {
mem              1608 drivers/mmc/host/s3cmci.c 	host->base = ioremap(host->mem->start, resource_size(host->mem));
mem              1727 drivers/mmc/host/s3cmci.c 	release_mem_region(host->mem->start, resource_size(host->mem));
mem              1777 drivers/mmc/host/s3cmci.c 	release_mem_region(host->mem->start, resource_size(host->mem));
mem                21 drivers/mmc/host/s3cmci.h 	struct resource		*mem;
mem               392 drivers/mtd/nand/raw/lpc32xx_mlc.c static int lpc32xx_xmit_dma(struct mtd_info *mtd, void *mem, int len,
mem               401 drivers/mtd/nand/raw/lpc32xx_mlc.c 	sg_init_one(&host->sgl, mem, len);
mem               428 drivers/mtd/nand/raw/lpc32xx_slc.c 			    void *mem, int len, enum dma_transfer_direction dir)
mem               450 drivers/mtd/nand/raw/lpc32xx_slc.c 	sg_init_one(&host->sgl, mem, len);
mem               969 drivers/mtd/nand/raw/nandsim.c 	size_t mem;
mem               972 drivers/mtd/nand/raw/nandsim.c 	mem = wear_eb_count * sizeof(unsigned long);
mem               973 drivers/mtd/nand/raw/nandsim.c 	if (mem / sizeof(unsigned long) != wear_eb_count) {
mem               977 drivers/mtd/nand/raw/nandsim.c 	erase_block_wear = kzalloc(mem, GFP_KERNEL);
mem              1080 drivers/mtd/nand/spi/core.c static int spinand_probe(struct spi_mem *mem)
mem              1086 drivers/mtd/nand/spi/core.c 	spinand = devm_kzalloc(&mem->spi->dev, sizeof(*spinand),
mem              1091 drivers/mtd/nand/spi/core.c 	spinand->spimem = mem;
mem              1092 drivers/mtd/nand/spi/core.c 	spi_mem_set_drvdata(mem, spinand);
mem              1093 drivers/mtd/nand/spi/core.c 	spinand_set_of_node(spinand, mem->spi->dev.of_node);
mem              1096 drivers/mtd/nand/spi/core.c 	mtd->dev.parent = &mem->spi->dev;
mem              1114 drivers/mtd/nand/spi/core.c static int spinand_remove(struct spi_mem *mem)
mem              1120 drivers/mtd/nand/spi/core.c 	spinand = spi_mem_get_drvdata(mem);
mem                19 drivers/mtd/spi-nor/intel-spi-platform.c 	struct resource *mem;
mem                25 drivers/mtd/spi-nor/intel-spi-platform.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem                26 drivers/mtd/spi-nor/intel-spi-platform.c 	ispi = intel_spi_probe(&pdev->dev, mem, info);
mem               868 drivers/mtd/spi-nor/intel-spi.c 	struct resource *mem, const struct intel_spi_boardinfo *info)
mem               879 drivers/mtd/spi-nor/intel-spi.c 	if (!info || !mem)
mem               886 drivers/mtd/spi-nor/intel-spi.c 	ispi->base = devm_ioremap_resource(dev, mem);
mem                18 drivers/mtd/spi-nor/intel-spi.h 	struct resource *mem, const struct intel_spi_boardinfo *info);
mem                66 drivers/net/appletalk/ltpc.h 	struct lt_mem mem;
mem               257 drivers/net/can/c_can/c_can_platform.c 	struct resource *mem;
mem               287 drivers/net/can/c_can/c_can_platform.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               288 drivers/net/can/c_can/c_can_platform.c 	addr = devm_ioremap_resource(&pdev->dev, mem);
mem               305 drivers/net/can/c_can/c_can_platform.c 		switch (mem->flags & IORESOURCE_MEM_TYPE_MASK) {
mem                69 drivers/net/can/cc770/cc770_isa.c static unsigned long mem[MAXDEV];
mem                80 drivers/net/can/cc770/cc770_isa.c module_param_hw_array(mem, ulong, iomem, NULL, 0444);
mem                81 drivers/net/can/cc770/cc770_isa.c MODULE_PARM_DESC(mem, "I/O memory address");
mem               172 drivers/net/can/cc770/cc770_isa.c 		idx, port[idx], mem[idx], irq[idx]);
mem               173 drivers/net/can/cc770/cc770_isa.c 	if (mem[idx]) {
mem               174 drivers/net/can/cc770/cc770_isa.c 		if (!request_mem_region(mem[idx], iosize, KBUILD_MODNAME)) {
mem               178 drivers/net/can/cc770/cc770_isa.c 		base = ioremap_nocache(mem[idx], iosize);
mem               202 drivers/net/can/cc770/cc770_isa.c 	if (mem[idx]) {
mem               204 drivers/net/can/cc770/cc770_isa.c 		dev->base_addr = mem[idx];
mem               275 drivers/net/can/cc770/cc770_isa.c 	if (mem[idx])
mem               278 drivers/net/can/cc770/cc770_isa.c 	if (mem[idx])
mem               279 drivers/net/can/cc770/cc770_isa.c 		release_mem_region(mem[idx], iosize);
mem               294 drivers/net/can/cc770/cc770_isa.c 	if (mem[idx]) {
mem               296 drivers/net/can/cc770/cc770_isa.c 		release_mem_region(mem[idx], CC770_IOSIZE);
mem               321 drivers/net/can/cc770/cc770_isa.c 		if ((port[idx] || mem[idx]) && irq[idx]) {
mem               335 drivers/net/can/cc770/cc770_isa.c 				 idx, port[idx], mem[idx], irq[idx]);
mem               336 drivers/net/can/cc770/cc770_isa.c 		} else if (idx == 0 || port[idx] || mem[idx]) {
mem               163 drivers/net/can/cc770/cc770_platform.c 	struct resource *mem;
mem               168 drivers/net/can/cc770/cc770_platform.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               170 drivers/net/can/cc770/cc770_platform.c 	if (!mem || irq <= 0)
mem               173 drivers/net/can/cc770/cc770_platform.c 	mem_size = resource_size(mem);
mem               174 drivers/net/can/cc770/cc770_platform.c 	if (!request_mem_region(mem->start, mem_size, pdev->name))
mem               177 drivers/net/can/cc770/cc770_platform.c 	base = ioremap(mem->start, mem_size);
mem               228 drivers/net/can/cc770/cc770_platform.c 	release_mem_region(mem->start, mem_size);
mem               237 drivers/net/can/cc770/cc770_platform.c 	struct resource *mem;
mem               243 drivers/net/can/cc770/cc770_platform.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               244 drivers/net/can/cc770/cc770_platform.c 	release_mem_region(mem->start, resource_size(mem));
mem              1538 drivers/net/can/flexcan.c 	struct resource *mem;
mem              1573 drivers/net/can/flexcan.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1578 drivers/net/can/flexcan.c 	regs = devm_ioremap_resource(&pdev->dev, mem);
mem               747 drivers/net/can/rcar/rcar_can.c 	struct resource *mem;
mem               762 drivers/net/can/rcar/rcar_can.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               763 drivers/net/can/rcar/rcar_can.c 	addr = devm_ioremap_resource(&pdev->dev, mem);
mem              1633 drivers/net/can/rcar/rcar_canfd.c 	struct resource *mem;
mem              1707 drivers/net/can/rcar/rcar_canfd.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1708 drivers/net/can/rcar/rcar_canfd.c 	addr = devm_ioremap_resource(&pdev->dev, mem);
mem                32 drivers/net/can/sja1000/sja1000_isa.c static unsigned long mem[MAXDEV];
mem                43 drivers/net/can/sja1000/sja1000_isa.c module_param_hw_array(mem, ulong, iomem, NULL, 0444);
mem                44 drivers/net/can/sja1000/sja1000_isa.c MODULE_PARM_DESC(mem, "I/O memory address");
mem               126 drivers/net/can/sja1000/sja1000_isa.c 		idx, port[idx], mem[idx], irq[idx]);
mem               128 drivers/net/can/sja1000/sja1000_isa.c 	if (mem[idx]) {
mem               129 drivers/net/can/sja1000/sja1000_isa.c 		if (!request_mem_region(mem[idx], iosize, DRV_NAME)) {
mem               133 drivers/net/can/sja1000/sja1000_isa.c 		base = ioremap_nocache(mem[idx], iosize);
mem               157 drivers/net/can/sja1000/sja1000_isa.c 	if (mem[idx]) {
mem               159 drivers/net/can/sja1000/sja1000_isa.c 		dev->base_addr = mem[idx];
mem               213 drivers/net/can/sja1000/sja1000_isa.c 	if (mem[idx])
mem               216 drivers/net/can/sja1000/sja1000_isa.c 	if (mem[idx])
mem               217 drivers/net/can/sja1000/sja1000_isa.c 		release_mem_region(mem[idx], iosize);
mem               232 drivers/net/can/sja1000/sja1000_isa.c 	if (mem[idx]) {
mem               234 drivers/net/can/sja1000/sja1000_isa.c 		release_mem_region(mem[idx], SJA1000_IOSIZE);
mem               259 drivers/net/can/sja1000/sja1000_isa.c 		if ((port[idx] || mem[idx]) && irq[idx]) {
mem               273 drivers/net/can/sja1000/sja1000_isa.c 				 DRV_NAME, idx, port[idx], mem[idx], irq[idx]);
mem               274 drivers/net/can/sja1000/sja1000_isa.c 		} else if (idx == 0 || port[idx] || mem[idx]) {
mem                98 drivers/net/can/softing/softing_fw.c 	const uint8_t *mem;
mem               116 drivers/net/can/softing/softing_fw.c 	mem = *pmem;
mem               117 drivers/net/can/softing/softing_fw.c 	*ptype = le16_to_cpup((void *)&mem[0]);
mem               118 drivers/net/can/softing/softing_fw.c 	*paddr = le32_to_cpup((void *)&mem[2]);
mem               119 drivers/net/can/softing/softing_fw.c 	*plen = le16_to_cpup((void *)&mem[6]);
mem               120 drivers/net/can/softing/softing_fw.c 	*pdat = &mem[8];
mem               122 drivers/net/can/softing/softing_fw.c 	end = &mem[8 + *plen];
mem               124 drivers/net/can/softing/softing_fw.c 	for (checksum[1] = 0; mem < end; ++mem)
mem               125 drivers/net/can/softing/softing_fw.c 		checksum[1] += *mem;
mem               138 drivers/net/can/softing/softing_fw.c 	const uint8_t *mem, *end, *dat;
mem               153 drivers/net/can/softing/softing_fw.c 	mem = fw->data;
mem               154 drivers/net/can/softing/softing_fw.c 	end = &mem[fw->size];
mem               156 drivers/net/can/softing/softing_fw.c 	ret = fw_parse(&mem, &type, &addr, &len, &dat);
mem               166 drivers/net/can/softing/softing_fw.c 	while (mem < end) {
mem               167 drivers/net/can/softing/softing_fw.c 		ret = fw_parse(&mem, &type, &addr, &len, &dat);
mem               221 drivers/net/can/softing/softing_fw.c 	const uint8_t *mem, *end, *dat;
mem               237 drivers/net/can/softing/softing_fw.c 	mem = fw->data;
mem               238 drivers/net/can/softing/softing_fw.c 	end = &mem[fw->size];
mem               240 drivers/net/can/softing/softing_fw.c 	ret = fw_parse(&mem, &type, &addr, &len, &dat);
mem               255 drivers/net/can/softing/softing_fw.c 	while (mem < end) {
mem               256 drivers/net/can/softing/softing_fw.c 		ret = fw_parse(&mem, &type, &addr, &len, &dat);
mem               774 drivers/net/can/sun4i_can.c 	struct resource *mem;
mem               794 drivers/net/can/sun4i_can.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               795 drivers/net/can/sun4i_can.c 	addr = devm_ioremap_resource(&pdev->dev, mem);
mem               562 drivers/net/dsa/sja1105/sja1105_static_config.c 	int i, mem = 0;
mem               567 drivers/net/dsa/sja1105/sja1105_static_config.c 		mem += l2_fwd_params->part_spc[i];
mem               569 drivers/net/dsa/sja1105/sja1105_static_config.c 	if (mem > SJA1105_MAX_FRAME_MEMORY)
mem                75 drivers/net/ethernet/8390/8390.h 	void __iomem *mem;
mem               113 drivers/net/ethernet/8390/ax88796.c 	void __iomem *ioaddr = ei_local->mem;
mem               183 drivers/net/ethernet/8390/ax88796.c 	void __iomem *nic_base = ei_local->mem;
mem               227 drivers/net/ethernet/8390/ax88796.c 	void __iomem *nic_base = ei_local->mem;
mem               264 drivers/net/ethernet/8390/ax88796.c 	void __iomem *nic_base = ei_local->mem;
mem               397 drivers/net/ethernet/8390/ax88796.c 	ei_outb(reg_gpoc, ei_local->mem + EI_SHIFT(0x17));
mem               460 drivers/net/ethernet/8390/ax88796.c 	ax->addr_memr = ei_local->mem + AX_MEMR;
mem               608 drivers/net/ethernet/8390/ax88796.c 	u8 reg = ei_inb(ei_local->mem + AX_MEMR);
mem               619 drivers/net/ethernet/8390/ax88796.c 	u8 reg = ei_inb(ei_local->mem + AX_MEMR);
mem               630 drivers/net/ethernet/8390/ax88796.c 	ei_outb(reg, ei_local->mem + AX_MEMR);
mem               655 drivers/net/ethernet/8390/ax88796.c 	void __iomem *ioaddr = ei_local->mem;
mem               678 drivers/net/ethernet/8390/ax88796.c 	void __iomem *ioaddr = ei_local->mem;
mem               734 drivers/net/ethernet/8390/ax88796.c 		ei_outb(ax->plat->dcr_val, ei_local->mem + EN0_DCFG);
mem               745 drivers/net/ethernet/8390/ax88796.c 			ei_local->mem + E8390_CMD); /* 0x61 */
mem               810 drivers/net/ethernet/8390/ax88796.c 	struct resource *mem;
mem               814 drivers/net/ethernet/8390/ax88796.c 	iounmap(ei_local->mem);
mem               815 drivers/net/ethernet/8390/ax88796.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               816 drivers/net/ethernet/8390/ax88796.c 	release_mem_region(mem->start, resource_size(mem));
mem               820 drivers/net/ethernet/8390/ax88796.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               821 drivers/net/ethernet/8390/ax88796.c 		release_mem_region(mem->start, resource_size(mem));
mem               842 drivers/net/ethernet/8390/ax88796.c 	struct resource *irq, *mem, *mem2;
mem               874 drivers/net/ethernet/8390/ax88796.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               875 drivers/net/ethernet/8390/ax88796.c 	if (!mem) {
mem               881 drivers/net/ethernet/8390/ax88796.c 	mem_size = resource_size(mem);
mem               895 drivers/net/ethernet/8390/ax88796.c 	if (!request_mem_region(mem->start, mem_size, pdev->name)) {
mem               901 drivers/net/ethernet/8390/ax88796.c 	ei_local->mem = ioremap(mem->start, mem_size);
mem               902 drivers/net/ethernet/8390/ax88796.c 	dev->base_addr = (unsigned long)ei_local->mem;
mem               904 drivers/net/ethernet/8390/ax88796.c 	if (ei_local->mem == NULL) {
mem               905 drivers/net/ethernet/8390/ax88796.c 		dev_err(&pdev->dev, "Cannot ioremap area %pR\n", mem);
mem               934 drivers/net/ethernet/8390/ax88796.c 		ei_local->reg_offset[0x1f] = ax->map2 - ei_local->mem;
mem               952 drivers/net/ethernet/8390/ax88796.c 	iounmap(ei_local->mem);
mem               955 drivers/net/ethernet/8390/ax88796.c 	release_mem_region(mem->start, mem_size);
mem               409 drivers/net/ethernet/8390/mcf8390.c 	struct resource *mem, *irq;
mem               419 drivers/net/ethernet/8390/mcf8390.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               420 drivers/net/ethernet/8390/mcf8390.c 	if (mem == NULL) {
mem               424 drivers/net/ethernet/8390/mcf8390.c 	msize = resource_size(mem);
mem               425 drivers/net/ethernet/8390/mcf8390.c 	if (!request_mem_region(mem->start, msize, pdev->name))
mem               430 drivers/net/ethernet/8390/mcf8390.c 		release_mem_region(mem->start, msize);
mem               438 drivers/net/ethernet/8390/mcf8390.c 	dev->base_addr = mem->start;
mem               442 drivers/net/ethernet/8390/mcf8390.c 		release_mem_region(mem->start, msize);
mem               452 drivers/net/ethernet/8390/mcf8390.c 	struct resource *mem;
mem               455 drivers/net/ethernet/8390/mcf8390.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               456 drivers/net/ethernet/8390/mcf8390.c 	if (mem)
mem               457 drivers/net/ethernet/8390/mcf8390.c 		release_mem_region(mem->start, resource_size(mem));
mem              1358 drivers/net/ethernet/8390/pcnet_cs.c     void __iomem *xfer_start = ei_status.mem + (TX_PAGES<<8)
mem              1372 drivers/net/ethernet/8390/pcnet_cs.c     void __iomem *base = ei_status.mem;
mem              1393 drivers/net/ethernet/8390/pcnet_cs.c     void __iomem *shmem = ei_status.mem + (start_page << 8);
mem              1449 drivers/net/ethernet/8390/pcnet_cs.c     ei_status.mem = info->base + offset;
mem              1451 drivers/net/ethernet/8390/pcnet_cs.c     dev->mem_start = (u_long)ei_status.mem;
mem               298 drivers/net/ethernet/8390/smc-ultra.c 	ei_status.mem = ioremap(dev->mem_start, (ei_status.stop_page - START_PG)*256);
mem               299 drivers/net/ethernet/8390/smc-ultra.c 	if (!ei_status.mem) {
mem               444 drivers/net/ethernet/8390/smc-ultra.c 	void __iomem *hdr_start = ei_status.mem + ((ring_page - START_PG)<<8);
mem               464 drivers/net/ethernet/8390/smc-ultra.c 	void __iomem *xfer_start = ei_status.mem + ring_offset - (START_PG<<8);
mem               474 drivers/net/ethernet/8390/smc-ultra.c 		memcpy_fromio(skb->data + semi_count, ei_status.mem + TX_PAGES * 256, count);
mem               486 drivers/net/ethernet/8390/smc-ultra.c 	void __iomem *shmem = ei_status.mem + ((start_page - START_PG)<<8);
mem               613 drivers/net/ethernet/8390/smc-ultra.c 	iounmap(ei_status.mem);
mem               337 drivers/net/ethernet/8390/wd.c 	ei_status.mem = ioremap(dev->mem_start, ei_status.priv);
mem               338 drivers/net/ethernet/8390/wd.c 	if (!ei_status.mem) {
mem               365 drivers/net/ethernet/8390/wd.c 		iounmap(ei_status.mem);
mem               415 drivers/net/ethernet/8390/wd.c 	void __iomem *hdr_start = ei_status.mem + ((ring_page - WD_START_PG)<<8);
mem               442 drivers/net/ethernet/8390/wd.c 	void __iomem *xfer_start = ei_status.mem + offset;
mem               449 drivers/net/ethernet/8390/wd.c 		memcpy_fromio(skb->data + semi_count, ei_status.mem + TX_PAGES * 256, count);
mem               465 drivers/net/ethernet/8390/wd.c 	void __iomem *shmem = ei_status.mem + ((start_page - WD_START_PG)<<8);
mem               503 drivers/net/ethernet/8390/wd.c static int mem[MAX_WD_CARDS];
mem               508 drivers/net/ethernet/8390/wd.c module_param_hw_array(mem, int, iomem, NULL, 0);
mem               513 drivers/net/ethernet/8390/wd.c MODULE_PARM_DESC(mem, "memory base address(es)(ignored for PureData boards)");
mem               537 drivers/net/ethernet/8390/wd.c 		dev->mem_start = mem[this_dev];
mem               556 drivers/net/ethernet/8390/wd.c 	iounmap(ei_status.mem);
mem               120 drivers/net/ethernet/8390/xsurf100.c 		ei_outw(*(uint16_t *)src, ei_local->mem + NE_DATAPORT);
mem               124 drivers/net/ethernet/8390/xsurf100.c 		ei_outb(*(uint8_t *)src, ei_local->mem + NE_DATAPORT);
mem               146 drivers/net/ethernet/8390/xsurf100.c 		*(uint16_t *)dst = ei_inw(ei_local->mem + NE_DATAPORT);
mem               150 drivers/net/ethernet/8390/xsurf100.c 		*(uint8_t *)dst = ei_inb(ei_local->mem + NE_DATAPORT);
mem               163 drivers/net/ethernet/8390/xsurf100.c 	void __iomem *nic_base = ei_local->mem;
mem               192 drivers/net/ethernet/8390/xsurf100.c 	void __iomem *nic_base = ei_local->mem;
mem               199 drivers/net/ethernet/amd/atarilance.c /* base+0xf */	volatile unsigned char	mem;
mem               221 drivers/net/ethernet/amd/atarilance.c 	struct lance_memory	*mem;
mem               233 drivers/net/ethernet/amd/atarilance.c #define	MEM		lp->mem
mem               544 drivers/net/ethernet/amd/atarilance.c 		i = IO->mem;
mem               600 drivers/net/ethernet/amd/atarilance.c 		i = IO->mem;
mem               152 drivers/net/ethernet/amd/sun3lance.c 	struct lance_memory	*mem;
mem               162 drivers/net/ethernet/amd/sun3lance.c #define	MEM	lp->mem
mem               190 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	void *mem;
mem               192 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	mem = kzalloc_node(size, GFP_KERNEL, node);
mem               193 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	if (!mem)
mem               194 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 		mem = kzalloc(size, GFP_KERNEL);
mem               196 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	return mem;
mem               202 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	void *mem;
mem               206 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	mem = dma_alloc_coherent(dev, size, dma, GFP_KERNEL);
mem               209 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	if (!mem)
mem               210 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 		mem = dma_alloc_coherent(dev, size, dma, GFP_KERNEL);
mem               212 drivers/net/ethernet/amd/xgbe/xgbe-desc.c 	return mem;
mem               163 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 	void *mem;
mem               165 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 	mem = kzalloc_node(size, GFP_KERNEL, node);
mem               166 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 	if (!mem)
mem               167 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 		mem = kzalloc(size, GFP_KERNEL);
mem               169 drivers/net/ethernet/amd/xgbe/xgbe-drv.c 	return mem;
mem              1103 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[0].addr = pci_resource_start(dev->pcidev, 0);
mem              1104 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[0].internal_addr = dev->regview;
mem              1105 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[0].memtype = UIO_MEM_PHYS;
mem              1108 drivers/net/ethernet/broadcom/cnic.c 		uinfo->mem[0].size = MB_GET_CID_ADDR(TX_TSS_CID +
mem              1110 drivers/net/ethernet/broadcom/cnic.c 		uinfo->mem[1].addr = (unsigned long) cp->status_blk.gen &
mem              1113 drivers/net/ethernet/broadcom/cnic.c 			uinfo->mem[1].size = BNX2_SBLK_MSIX_ALIGN_SIZE * 9;
mem              1115 drivers/net/ethernet/broadcom/cnic.c 			uinfo->mem[1].size = BNX2_SBLK_MSIX_ALIGN_SIZE;
mem              1119 drivers/net/ethernet/broadcom/cnic.c 		uinfo->mem[0].size = pci_resource_len(dev->pcidev, 0);
mem              1121 drivers/net/ethernet/broadcom/cnic.c 		uinfo->mem[1].addr = (unsigned long) cp->bnx2x_def_status_blk &
mem              1123 drivers/net/ethernet/broadcom/cnic.c 		uinfo->mem[1].size = sizeof(*cp->bnx2x_def_status_blk);
mem              1128 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[1].memtype = UIO_MEM_LOGICAL;
mem              1130 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[2].addr = (unsigned long) udev->l2_ring;
mem              1131 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[2].size = udev->l2_ring_size;
mem              1132 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[2].memtype = UIO_MEM_LOGICAL;
mem              1134 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[3].addr = (unsigned long) udev->l2_buf;
mem              1135 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[3].size = udev->l2_buf_size;
mem              1136 drivers/net/ethernet/broadcom/cnic.c 	uinfo->mem[3].memtype = UIO_MEM_LOGICAL;
mem              3351 drivers/net/ethernet/cadence/macb_main.c static void macb_probe_queues(void __iomem *mem,
mem              3367 drivers/net/ethernet/cadence/macb_main.c 	if (!hw_is_gem(mem, native_io))
mem              3371 drivers/net/ethernet/cadence/macb_main.c 	*queue_mask = readl_relaxed(mem + GEM_DCFG6) & 0xff;
mem              4193 drivers/net/ethernet/cadence/macb_main.c 	void __iomem *mem;
mem              4199 drivers/net/ethernet/cadence/macb_main.c 	mem = devm_ioremap_resource(&pdev->dev, regs);
mem              4200 drivers/net/ethernet/cadence/macb_main.c 	if (IS_ERR(mem))
mem              4201 drivers/net/ethernet/cadence/macb_main.c 		return PTR_ERR(mem);
mem              4223 drivers/net/ethernet/cadence/macb_main.c 	native_io = hw_is_native_io(mem);
mem              4225 drivers/net/ethernet/cadence/macb_main.c 	macb_probe_queues(mem, native_io, &queue_mask, &num_queues);
mem              4239 drivers/net/ethernet/cadence/macb_main.c 	bp->regs = mem;
mem              2446 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 		struct mc7 *mem;
mem              2462 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 			mem = &adapter->cm;
mem              2464 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 			mem = &adapter->pmrx;
mem              2466 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 			mem = &adapter->pmtx;
mem              2489 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c 				t3_mc7_bd_read(mem, t.addr / 8, chunk / 8,
mem               311 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		struct mc7 *mem;
mem               316 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			mem = &adapter->cm;
mem               318 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			mem = &adapter->pmrx;
mem               320 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			mem = &adapter->pmtx;
mem               325 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			t3_mc7_bd_read(mem, t->addr / 8, t->len / 8,
mem               115 drivers/net/ethernet/chelsio/cxgb4/cudbg_entity.h 	struct cudbg_mem_desc mem[ARRAY_SIZE(cudbg_region) + 3];
mem               139 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	memset(meminfo_buff->mem, 0,
mem               141 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	md  = meminfo_buff->mem;
mem               143 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	for (i = 0; i < ARRAY_SIZE(meminfo_buff->mem); i++) {
mem               144 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		meminfo_buff->mem[i].limit = 0;
mem               145 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		meminfo_buff->mem[i].idx = i;
mem               324 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	n = md - meminfo_buff->mem;
mem               327 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 	sort(meminfo_buff->mem, n, sizeof(struct cudbg_mem_desc),
mem               798 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		if (meminfo->mem[i].idx >= ARRAY_SIZE(cudbg_region))
mem               801 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		if (!(meminfo->mem[i].limit))
mem               802 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			meminfo->mem[i].limit =
mem               804 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 				meminfo->mem[i + 1].base - 1 : ~0;
mem               806 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 		if (meminfo->mem[i].idx == idx) {
mem               808 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			if (meminfo->mem[i].base < meminfo->avail[mc].base &&
mem               809 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			    meminfo->mem[i].limit < meminfo->avail[mc].base)
mem               812 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			if (meminfo->mem[i].base > meminfo->avail[mc].limit)
mem               815 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c 			memcpy(mem_desc, &meminfo->mem[i],
mem              3065 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	unsigned int mem;
mem              3070 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	mem = (uintptr_t)file->private_data & 0x7;
mem              3071 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	adap = file->private_data - mem;
mem              3083 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	unsigned int mem = (uintptr_t)file->private_data & 0x7;
mem              3084 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	struct adapter *adap = file->private_data - mem;
mem              3100 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 	ret = t4_memory_rw(adap, 0, mem, pos, count, data, T4_MEMORY_READ);
mem              3290 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 		if (meminfo.mem[i].idx >= ARRAY_SIZE(cudbg_region))
mem              3292 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 		if (!meminfo.mem[i].limit)
mem              3293 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 			meminfo.mem[i].limit =
mem              3295 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 				meminfo.mem[i + 1].base - 1 : ~0;
mem              3296 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 		mem_region_show(seq, cudbg_region[meminfo.mem[i].idx],
mem              3297 drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c 				meminfo.mem[i].base, meminfo.mem[i].limit);
mem               766 drivers/net/ethernet/cirrus/ep93xx_eth.c 	struct resource *mem;
mem               782 drivers/net/ethernet/cirrus/ep93xx_eth.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               783 drivers/net/ethernet/cirrus/ep93xx_eth.c 		release_mem_region(mem->start, resource_size(mem));
mem               796 drivers/net/ethernet/cirrus/ep93xx_eth.c 	struct resource *mem;
mem               804 drivers/net/ethernet/cirrus/ep93xx_eth.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               806 drivers/net/ethernet/cirrus/ep93xx_eth.c 	if (!mem || irq < 0)
mem               821 drivers/net/ethernet/cirrus/ep93xx_eth.c 	ep->res = request_mem_region(mem->start, resource_size(mem),
mem               829 drivers/net/ethernet/cirrus/ep93xx_eth.c 	ep->base_addr = ioremap(mem->start, resource_size(mem));
mem               795 drivers/net/ethernet/emulex/benet/be_cmds.c 				   struct be_dma_mem *mem)
mem               805 drivers/net/ethernet/emulex/benet/be_cmds.c 	if (mem) {
mem               809 drivers/net/ethernet/emulex/benet/be_cmds.c 		sge->pa_hi = cpu_to_le32(upper_32_bits(mem->dma));
mem               810 drivers/net/ethernet/emulex/benet/be_cmds.c 		sge->pa_lo = cpu_to_le32(mem->dma & 0xFFFFFFFF);
mem               811 drivers/net/ethernet/emulex/benet/be_cmds.c 		sge->len = cpu_to_le32(mem->size);
mem               818 drivers/net/ethernet/emulex/benet/be_cmds.c 				      struct be_dma_mem *mem)
mem               820 drivers/net/ethernet/emulex/benet/be_cmds.c 	int i, buf_pages = min(PAGES_4K_SPANNED(mem->va, mem->size), max_pages);
mem               821 drivers/net/ethernet/emulex/benet/be_cmds.c 	u64 dma = (u64)mem->dma;
mem              1978 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_dma_mem *mem = &adapter->rx_filter;
mem              1979 drivers/net/ethernet/emulex/benet/be_cmds.c 	struct be_cmd_req_rx_filter *req = mem->va;
mem              1992 drivers/net/ethernet/emulex/benet/be_cmds.c 			       wrb, mem);
mem               148 drivers/net/ethernet/emulex/benet/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
mem               150 drivers/net/ethernet/emulex/benet/be_main.c 	if (mem->va) {
mem               151 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(&adapter->pdev->dev, mem->size, mem->va,
mem               152 drivers/net/ethernet/emulex/benet/be_main.c 				  mem->dma);
mem               153 drivers/net/ethernet/emulex/benet/be_main.c 		mem->va = NULL;
mem               160 drivers/net/ethernet/emulex/benet/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
mem               165 drivers/net/ethernet/emulex/benet/be_main.c 	mem->size = len * entry_size;
mem               166 drivers/net/ethernet/emulex/benet/be_main.c 	mem->va = dma_alloc_coherent(&adapter->pdev->dev, mem->size,
mem               167 drivers/net/ethernet/emulex/benet/be_main.c 				     &mem->dma, GFP_KERNEL);
mem               168 drivers/net/ethernet/emulex/benet/be_main.c 	if (!mem->va)
mem              5743 drivers/net/ethernet/emulex/benet/be_main.c 	struct be_dma_mem *mem = &adapter->mbox_mem_alloced;
mem              5746 drivers/net/ethernet/emulex/benet/be_main.c 	if (mem->va)
mem              5747 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
mem              5749 drivers/net/ethernet/emulex/benet/be_main.c 	mem = &adapter->rx_filter;
mem              5750 drivers/net/ethernet/emulex/benet/be_main.c 	if (mem->va)
mem              5751 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
mem              5753 drivers/net/ethernet/emulex/benet/be_main.c 	mem = &adapter->stats_cmd;
mem              5754 drivers/net/ethernet/emulex/benet/be_main.c 	if (mem->va)
mem              5755 drivers/net/ethernet/emulex/benet/be_main.c 		dma_free_coherent(dev, mem->size, mem->va, mem->dma);
mem              1025 drivers/net/ethernet/ethoc.c 	struct resource *mem = NULL;
mem              1063 drivers/net/ethernet/ethoc.c 		mem = devm_request_mem_region(&pdev->dev, res->start,
mem              1065 drivers/net/ethernet/ethoc.c 		if (!mem) {
mem              1071 drivers/net/ethernet/ethoc.c 		netdev->mem_start = mem->start;
mem              1072 drivers/net/ethernet/ethoc.c 		netdev->mem_end   = mem->end;
mem              1100 drivers/net/ethernet/ethoc.c 			netdev->mem_start, resource_size(mem));
mem               420 drivers/net/ethernet/fealnx.c 	void __iomem *mem;
mem               549 drivers/net/ethernet/fealnx.c 	np->mem = ioaddr;
mem               698 drivers/net/ethernet/fealnx.c 		pci_iounmap(pdev, np->mem);
mem               758 drivers/net/ethernet/fealnx.c 	void __iomem *miiport = np->mem + MANAGEMENT;
mem               797 drivers/net/ethernet/fealnx.c 	void __iomem *miiport = np->mem + MANAGEMENT;
mem               829 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem               936 drivers/net/ethernet/fealnx.c 			if (ioread32(np->mem + BMCRSR) & LinkIsUp2) {
mem               959 drivers/net/ethernet/fealnx.c 		if (ioread32(np->mem + TCRRCR) & CR_R_FD)
mem               963 drivers/net/ethernet/fealnx.c 		if (ioread32(np->mem + TCRRCR) & CR_R_PS10)
mem              1085 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1122 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1148 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1197 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1364 drivers/net/ethernet/fealnx.c 	iowrite32(0, np->mem + TXPDR);
mem              1422 drivers/net/ethernet/fealnx.c 		np->mem + RXLBA);
mem              1432 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1610 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1745 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1774 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem              1892 drivers/net/ethernet/fealnx.c 	void __iomem *ioaddr = np->mem;
mem               822 drivers/net/ethernet/freescale/fec_mpc52xx.c 	struct resource mem;
mem               840 drivers/net/ethernet/freescale/fec_mpc52xx.c 	rv = of_address_to_resource(np, 0, &mem);
mem               845 drivers/net/ethernet/freescale/fec_mpc52xx.c 	if (resource_size(&mem) < sizeof(struct mpc52xx_fec)) {
mem               847 drivers/net/ethernet/freescale/fec_mpc52xx.c 		       (unsigned long)resource_size(&mem),
mem               853 drivers/net/ethernet/freescale/fec_mpc52xx.c 	if (!request_mem_region(mem.start, sizeof(struct mpc52xx_fec),
mem               863 drivers/net/ethernet/freescale/fec_mpc52xx.c 	ndev->base_addr		= mem.start;
mem               869 drivers/net/ethernet/freescale/fec_mpc52xx.c 	priv->fec = ioremap(mem.start, sizeof(struct mpc52xx_fec));
mem               979 drivers/net/ethernet/freescale/fec_mpc52xx.c 	release_mem_region(mem.start, sizeof(struct mpc52xx_fec));
mem               170 drivers/net/ethernet/freescale/fs_enet/fs_enet.h 			void __iomem *mem;	/* FCC DPRAM */
mem               108 drivers/net/ethernet/freescale/fs_enet/mac-fcc.c 	fep->fcc.mem = (void __iomem *)cpm2_immr;
mem               290 drivers/net/ethernet/freescale/fs_enet/mac-fcc.c 	memset_io(fep->fcc.mem + fpi->dpram_offset + 64, 0x88, 32);
mem                20 drivers/net/ethernet/huawei/hinic/hinic_common.c 	u32 *mem = data;
mem                26 drivers/net/ethernet/huawei/hinic/hinic_common.c 		*mem = cpu_to_be32(*mem);
mem                27 drivers/net/ethernet/huawei/hinic/hinic_common.c 		mem++;
mem                38 drivers/net/ethernet/huawei/hinic/hinic_common.c 	u32 *mem = data;
mem                44 drivers/net/ethernet/huawei/hinic/hinic_common.c 		*mem = be32_to_cpu(*mem);
mem                45 drivers/net/ethernet/huawei/hinic/hinic_common.c 		mem++;
mem               582 drivers/net/ethernet/intel/e100.c 	struct mem *mem;
mem               653 drivers/net/ethernet/intel/e100.c 	u32 dma_addr = nic->dma_addr + offsetof(struct mem, selftest);
mem               658 drivers/net/ethernet/intel/e100.c 	nic->mem->selftest.signature = 0;
mem               659 drivers/net/ethernet/intel/e100.c 	nic->mem->selftest.result = 0xFFFFFFFF;
mem               670 drivers/net/ethernet/intel/e100.c 	if (nic->mem->selftest.result != 0) {
mem               673 drivers/net/ethernet/intel/e100.c 			  nic->mem->selftest.result);
mem               676 drivers/net/ethernet/intel/e100.c 	if (nic->mem->selftest.signature == 0) {
mem              1392 drivers/net/ethernet/intel/e100.c 		offsetof(struct mem, dump_buf));
mem              1553 drivers/net/ethernet/intel/e100.c 		nic->dma_addr + offsetof(struct mem, stats))))
mem              1608 drivers/net/ethernet/intel/e100.c 	struct stats *s = &nic->mem->stats;
mem              2442 drivers/net/ethernet/intel/e100.c 	return 1 + E100_PHY_REGS + sizeof(nic->mem->dump_buf);
mem              2459 drivers/net/ethernet/intel/e100.c 	memset(nic->mem->dump_buf, 0, sizeof(nic->mem->dump_buf));
mem              2462 drivers/net/ethernet/intel/e100.c 	memcpy(&buff[2 + E100_PHY_REGS], nic->mem->dump_buf,
mem              2463 drivers/net/ethernet/intel/e100.c 		sizeof(nic->mem->dump_buf));
mem              2758 drivers/net/ethernet/intel/e100.c 	nic->mem = pci_alloc_consistent(nic->pdev, sizeof(struct mem),
mem              2760 drivers/net/ethernet/intel/e100.c 	return nic->mem ? 0 : -ENOMEM;
mem              2765 drivers/net/ethernet/intel/e100.c 	if (nic->mem) {
mem              2766 drivers/net/ethernet/intel/e100.c 		pci_free_consistent(nic->pdev, sizeof(struct mem),
mem              2767 drivers/net/ethernet/intel/e100.c 			nic->mem, nic->dma_addr);
mem              2768 drivers/net/ethernet/intel/e100.c 		nic->mem = NULL;
mem                24 drivers/net/ethernet/intel/i40e/i40e_alloc.h 					    struct i40e_dma_mem *mem,
mem                28 drivers/net/ethernet/intel/i40e/i40e_alloc.h 					struct i40e_dma_mem *mem);
mem                30 drivers/net/ethernet/intel/i40e/i40e_alloc.h 					     struct i40e_virt_mem *mem,
mem                33 drivers/net/ethernet/intel/i40e/i40e_alloc.h 					 struct i40e_virt_mem *mem);
mem               559 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	struct i40e_virt_mem mem;
mem               563 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	ret = i40e_allocate_virt_mem(hw, &mem, I40E_LLDPDU_SIZE);
mem               567 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	lldpmib = (u8 *)mem.va;
mem               578 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	i40e_free_virt_mem(hw, &mem);
mem               949 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	u16 mem;
mem               961 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	mem = le16_to_cpu(raw_mem);
mem               965 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	if (mem & I40E_PTR_TYPE)
mem               966 drivers/net/ethernet/intel/i40e/i40e_dcb.c 		address = (0x7FFF & mem) * 4096;
mem               968 drivers/net/ethernet/intel/i40e/i40e_dcb.c 		address = (0x7FFF & mem) * 2;
mem               980 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	mem = le16_to_cpu(raw_mem);
mem               981 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	offset = mem + word_offset;
mem              1008 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	u32 mem;
mem              1017 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	ret = i40e_aq_read_nvm(hw, I40E_SR_NVM_CONTROL_WORD, 0, sizeof(mem),
mem              1018 drivers/net/ethernet/intel/i40e/i40e_dcb.c 			       &mem, true, NULL);
mem              1027 drivers/net/ethernet/intel/i40e/i40e_dcb.c 	if (mem & I40E_SR_NVM_MAP_STRUCTURE_TYPE) {
mem                29 drivers/net/ethernet/intel/i40e/i40e_hmc.c 	struct i40e_dma_mem mem;
mem                56 drivers/net/ethernet/intel/i40e/i40e_hmc.c 		ret_code = i40e_allocate_dma_mem(hw, &mem, mem_type, alloc_len,
mem                70 drivers/net/ethernet/intel/i40e/i40e_hmc.c 			sd_entry->u.pd_table.pd_page_addr = mem;
mem                72 drivers/net/ethernet/intel/i40e/i40e_hmc.c 			sd_entry->u.bp.addr = mem;
mem                87 drivers/net/ethernet/intel/i40e/i40e_hmc.c 			i40e_free_dma_mem(hw, &mem);
mem               117 drivers/net/ethernet/intel/i40e/i40e_hmc.c 	struct i40e_dma_mem mem;
mem               118 drivers/net/ethernet/intel/i40e/i40e_hmc.c 	struct i40e_dma_mem *page = &mem;
mem               115 drivers/net/ethernet/intel/i40e/i40e_main.c int i40e_allocate_dma_mem_d(struct i40e_hw *hw, struct i40e_dma_mem *mem,
mem               120 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->size = ALIGN(size, alignment);
mem               121 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->va = dma_alloc_coherent(&pf->pdev->dev, mem->size, &mem->pa,
mem               123 drivers/net/ethernet/intel/i40e/i40e_main.c 	if (!mem->va)
mem               134 drivers/net/ethernet/intel/i40e/i40e_main.c int i40e_free_dma_mem_d(struct i40e_hw *hw, struct i40e_dma_mem *mem)
mem               138 drivers/net/ethernet/intel/i40e/i40e_main.c 	dma_free_coherent(&pf->pdev->dev, mem->size, mem->va, mem->pa);
mem               139 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->va = NULL;
mem               140 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->pa = 0;
mem               141 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->size = 0;
mem               152 drivers/net/ethernet/intel/i40e/i40e_main.c int i40e_allocate_virt_mem_d(struct i40e_hw *hw, struct i40e_virt_mem *mem,
mem               155 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->size = size;
mem               156 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->va = kzalloc(size, GFP_KERNEL);
mem               158 drivers/net/ethernet/intel/i40e/i40e_main.c 	if (!mem->va)
mem               169 drivers/net/ethernet/intel/i40e/i40e_main.c int i40e_free_virt_mem_d(struct i40e_hw *hw, struct i40e_virt_mem *mem)
mem               172 drivers/net/ethernet/intel/i40e/i40e_main.c 	kfree(mem->va);
mem               173 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->va = NULL;
mem               174 drivers/net/ethernet/intel/i40e/i40e_main.c 	mem->size = 0;
mem                24 drivers/net/ethernet/intel/iavf/iavf_alloc.h 				       struct iavf_dma_mem *mem,
mem                28 drivers/net/ethernet/intel/iavf/iavf_alloc.h 				   struct iavf_dma_mem *mem);
mem                30 drivers/net/ethernet/intel/iavf/iavf_alloc.h 					struct iavf_virt_mem *mem, u32 size);
mem                32 drivers/net/ethernet/intel/iavf/iavf_alloc.h 				    struct iavf_virt_mem *mem);
mem                73 drivers/net/ethernet/intel/iavf/iavf_main.c 					 struct iavf_dma_mem *mem,
mem                78 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (!mem)
mem                81 drivers/net/ethernet/intel/iavf/iavf_main.c 	mem->size = ALIGN(size, alignment);
mem                82 drivers/net/ethernet/intel/iavf/iavf_main.c 	mem->va = dma_alloc_coherent(&adapter->pdev->dev, mem->size,
mem                83 drivers/net/ethernet/intel/iavf/iavf_main.c 				     (dma_addr_t *)&mem->pa, GFP_KERNEL);
mem                84 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (mem->va)
mem                96 drivers/net/ethernet/intel/iavf/iavf_main.c 				     struct iavf_dma_mem *mem)
mem               100 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (!mem || !mem->va)
mem               102 drivers/net/ethernet/intel/iavf/iavf_main.c 	dma_free_coherent(&adapter->pdev->dev, mem->size,
mem               103 drivers/net/ethernet/intel/iavf/iavf_main.c 			  mem->va, (dma_addr_t)mem->pa);
mem               114 drivers/net/ethernet/intel/iavf/iavf_main.c 					  struct iavf_virt_mem *mem, u32 size)
mem               116 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (!mem)
mem               119 drivers/net/ethernet/intel/iavf/iavf_main.c 	mem->size = size;
mem               120 drivers/net/ethernet/intel/iavf/iavf_main.c 	mem->va = kzalloc(size, GFP_KERNEL);
mem               122 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (mem->va)
mem               134 drivers/net/ethernet/intel/iavf/iavf_main.c 				      struct iavf_virt_mem *mem)
mem               136 drivers/net/ethernet/intel/iavf/iavf_main.c 	if (!mem)
mem               140 drivers/net/ethernet/intel/iavf/iavf_main.c 	kfree(mem->va);
mem              2514 drivers/net/ethernet/marvell/skge.c 	u64 base = skge->dma + (e->desc - skge->mem);
mem              2549 drivers/net/ethernet/marvell/skge.c 	skge->mem = pci_alloc_consistent(hw->pdev, skge->mem_size, &skge->dma);
mem              2550 drivers/net/ethernet/marvell/skge.c 	if (!skge->mem)
mem              2561 drivers/net/ethernet/marvell/skge.c 	err = skge_ring_alloc(&skge->rx_ring, skge->mem, skge->dma);
mem              2569 drivers/net/ethernet/marvell/skge.c 	err = skge_ring_alloc(&skge->tx_ring, skge->mem + rx_size,
mem              2627 drivers/net/ethernet/marvell/skge.c 	pci_free_consistent(hw->pdev, skge->mem_size, skge->mem, skge->dma);
mem              2628 drivers/net/ethernet/marvell/skge.c 	skge->mem = NULL;
mem              2648 drivers/net/ethernet/marvell/skge.c 	if (!skge->mem)
mem              2717 drivers/net/ethernet/marvell/skge.c 	pci_free_consistent(hw->pdev, skge->mem_size, skge->mem, skge->dma);
mem              2718 drivers/net/ethernet/marvell/skge.c 	skge->mem = NULL;
mem              2472 drivers/net/ethernet/marvell/skge.h 	void		     *mem;	/* PCI memory for rings */
mem                98 drivers/net/ethernet/mellanox/mlx4/icm.c static int mlx4_alloc_icm_pages(struct scatterlist *mem, int order,
mem               110 drivers/net/ethernet/mellanox/mlx4/icm.c 	sg_set_page(mem, page, PAGE_SIZE << order, 0);
mem                74 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c 	if (xdp->rxq->mem.type == MEM_TYPE_ZERO_COPY) {
mem               334 drivers/net/ethernet/netronome/nfp/bpf/main.c 	u8 __iomem *mem, *start;
mem               336 drivers/net/ethernet/netronome/nfp/bpf/main.c 	mem = nfp_rtsym_map(app->pf->rtbl, "_abi_bpf_capabilities", "bpf.cap",
mem               338 drivers/net/ethernet/netronome/nfp/bpf/main.c 	if (IS_ERR(mem))
mem               339 drivers/net/ethernet/netronome/nfp/bpf/main.c 		return PTR_ERR(mem) == -ENOENT ? 0 : PTR_ERR(mem);
mem               341 drivers/net/ethernet/netronome/nfp/bpf/main.c 	start = mem;
mem               342 drivers/net/ethernet/netronome/nfp/bpf/main.c 	while (mem - start + 8 <= nfp_cpp_area_size(area)) {
mem               346 drivers/net/ethernet/netronome/nfp/bpf/main.c 		type = readl(mem);
mem               347 drivers/net/ethernet/netronome/nfp/bpf/main.c 		length = readl(mem + 4);
mem               348 drivers/net/ethernet/netronome/nfp/bpf/main.c 		value = mem + 8;
mem               350 drivers/net/ethernet/netronome/nfp/bpf/main.c 		mem += 8 + length;
mem               351 drivers/net/ethernet/netronome/nfp/bpf/main.c 		if (mem - start > nfp_cpp_area_size(area))
mem               396 drivers/net/ethernet/netronome/nfp/bpf/main.c 	if (mem - start != nfp_cpp_area_size(area)) {
mem               398 drivers/net/ethernet/netronome/nfp/bpf/main.c 			mem - start, nfp_cpp_area_size(area));
mem               407 drivers/net/ethernet/netronome/nfp/bpf/main.c 	nfp_err(cpp, "invalid BPF capabilities at offset:%zd\n", mem - start);
mem               546 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c nfp_vnic_get_hw_stats(u64 *data, u8 __iomem *mem, unsigned int num_vecs)
mem               551 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c 		*data++ = readq(mem + nfp_net_et_stats[i].off);
mem               554 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c 		*data++ = readq(mem + NFP_NET_CFG_RXR_STATS(i));
mem               555 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c 		*data++ = readq(mem + NFP_NET_CFG_RXR_STATS(i) + 8);
mem               556 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c 		*data++ = readq(mem + NFP_NET_CFG_TXR_STATS(i));
mem               557 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c 		*data++ = readq(mem + NFP_NET_CFG_TXR_STATS(i) + 8);
mem               449 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 	u8 __iomem *mem;
mem               453 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 	mem = nfp_pf_map_rtsym(pf, "net.bar0", "_pf%d_net_bar0",
mem               455 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 	if (IS_ERR(mem)) {
mem               457 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 		return PTR_ERR(mem);
mem               498 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 	mem = nfp_cpp_map_area(pf->cpp, "net.qc", cpp_id, NFP_PCIE_QUEUE(0),
mem               500 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 	if (IS_ERR(mem)) {
mem               502 drivers/net/ethernet/netronome/nfp/nfp_net_main.c 		err = PTR_ERR(mem);
mem                62 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	u8 __iomem *mem = port->eth_stats;
mem                64 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->tx_packets = readq(mem + NFP_MAC_STATS_TX_FRAMES_TRANSMITTED_OK);
mem                65 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->tx_bytes = readq(mem + NFP_MAC_STATS_TX_OUT_OCTETS);
mem                66 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->tx_dropped = readq(mem + NFP_MAC_STATS_TX_OUT_ERRORS);
mem                68 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->rx_packets = readq(mem + NFP_MAC_STATS_RX_FRAMES_RECEIVED_OK);
mem                69 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->rx_bytes = readq(mem + NFP_MAC_STATS_RX_IN_OCTETS);
mem                70 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c 	stats->rx_dropped = readq(mem + NFP_MAC_STATS_RX_IN_ERRORS);
mem               527 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 	u8 __iomem *mem;
mem               548 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 	mem = nfp_cpp_map_area(rtbl->cpp, id, cpp_id, addr, sym->size, area);
mem               549 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 	if (IS_ERR(mem)) {
mem               551 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 			name, PTR_ERR(mem));
mem               552 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 		return mem;
mem               555 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c 	return mem;
mem              3193 drivers/net/ethernet/qlogic/qed/qed_debug.c 			const struct dbg_dump_mem *mem =
mem              3196 drivers/net/ethernet/qlogic/qed/qed_debug.c 			u8 mem_group_id = GET_FIELD(mem->dword0,
mem              3215 drivers/net/ethernet/qlogic/qed/qed_debug.c 			mem_addr = GET_FIELD(mem->dword0, DBG_DUMP_MEM_ADDRESS);
mem              3216 drivers/net/ethernet/qlogic/qed/qed_debug.c 			mem_len = GET_FIELD(mem->dword1, DBG_DUMP_MEM_LENGTH);
mem              3217 drivers/net/ethernet/qlogic/qed/qed_debug.c 			mem_wide_bus = GET_FIELD(mem->dword1,
mem               235 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		struct __mem		mem;
mem               564 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	struct __mem *rom = &entry->region.mem;
mem               662 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 					 struct __mem *mem, __le32 *buffer,
mem               668 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	reg_read = mem->size;
mem               669 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	addr = mem->addr;
mem               710 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	return mem->size;
mem               722 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 				struct __mem *mem)
mem               735 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	ret = qlcnic_ind_wr(adapter, temp_addr, mem->desc_card_addr);
mem               745 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	ret = qlcnic_ind_wr(adapter, temp_addr, mem->start_dma_cmd);
mem               769 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 				     struct __mem *mem,
mem               811 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	temp = mem->dma_desc_cmd & 0xff0f;
mem               819 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	while (read_size < mem->size) {
mem               820 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		if (mem->size - read_size >= QLC_PEX_DMA_READ_SIZE)
mem               823 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 			size = mem->size - read_size;
mem               825 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		dma_descr->src_addr_low = mem->addr + read_size;
mem               830 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		*ret = qlcnic_ms_mem_write128(adapter, mem->desc_card_addr,
mem               834 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 				 mem->desc_card_addr);
mem               838 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		*ret = qlcnic_start_pex_dma(adapter, mem);
mem               860 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	struct __mem *mem = &entry->region.mem;
mem               865 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 		data_size = qlcnic_read_memory_pexdma(adapter, mem, buffer,
mem               875 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	data_size = qlcnic_read_memory_test_agent(adapter, mem, buffer, &ret);
mem              1022 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	struct __mem *rom = &entry->region.mem;
mem              1116 drivers/net/ethernet/realtek/8139cp.c 	void *mem;
mem              1119 drivers/net/ethernet/realtek/8139cp.c 	mem = dma_alloc_coherent(d, CP_RING_BYTES, &cp->ring_dma, GFP_KERNEL);
mem              1120 drivers/net/ethernet/realtek/8139cp.c 	if (!mem)
mem              1123 drivers/net/ethernet/realtek/8139cp.c 	cp->rx_ring = mem;
mem               273 drivers/net/ethernet/sun/sunbmac.h #define bib_offset(mem, elem) \
mem               274 drivers/net/ethernet/sun/sunbmac.h ((__u32)((unsigned long)(&(((struct bmac_init_block *)0)->mem[elem]))))
mem               380 drivers/net/ethernet/sun/sunhme.h #define hblock_offset(mem, elem) \
mem               381 drivers/net/ethernet/sun/sunhme.h ((__u32)((unsigned long)(&(((struct hmeal_init_block *)0)->mem[elem]))))
mem               309 drivers/net/ethernet/sun/sunqe.h #define qib_offset(mem, elem) \
mem               310 drivers/net/ethernet/sun/sunqe.h ((__u32)((unsigned long)(&(((struct qe_init_block *)0)->mem[elem]))))
mem               331 drivers/net/ethernet/sun/sunqe.h #define qebuf_offset(mem, elem) \
mem               332 drivers/net/ethernet/sun/sunqe.h ((__u32)((unsigned long)(&(((struct sunqe_buffers *)0)->mem[elem][0]))))
mem               913 drivers/net/ethernet/ti/cpmac.c 	struct resource *mem;
mem               917 drivers/net/ethernet/ti/cpmac.c 	mem = platform_get_resource_byname(priv->pdev, IORESOURCE_MEM, "regs");
mem               918 drivers/net/ethernet/ti/cpmac.c 	if (!request_mem_region(mem->start, resource_size(mem), dev->name)) {
mem               926 drivers/net/ethernet/ti/cpmac.c 	priv->regs = ioremap(mem->start, resource_size(mem));
mem              1004 drivers/net/ethernet/ti/cpmac.c 	release_mem_region(mem->start, resource_size(mem));
mem              1014 drivers/net/ethernet/ti/cpmac.c 	struct resource *mem;
mem              1031 drivers/net/ethernet/ti/cpmac.c 	mem = platform_get_resource_byname(priv->pdev, IORESOURCE_MEM, "regs");
mem              1032 drivers/net/ethernet/ti/cpmac.c 	release_mem_region(mem->start, resource_size(mem));
mem              1068 drivers/net/ethernet/ti/cpmac.c 	struct resource *mem;
mem              1107 drivers/net/ethernet/ti/cpmac.c 	mem = platform_get_resource_byname(pdev, IORESOURCE_MEM, "regs");
mem              1108 drivers/net/ethernet/ti/cpmac.c 	if (!mem) {
mem              1149 drivers/net/ethernet/ti/cpmac.c 			 "mac: %pM\n", (void *)mem->start, dev->irq,
mem               268 drivers/net/ethernet/wiznet/w5100.c 	struct resource *mem;
mem               272 drivers/net/ethernet/wiznet/w5100.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               273 drivers/net/ethernet/wiznet/w5100.c 	mmio_priv->base = devm_ioremap_resource(&pdev->dev, mem);
mem               277 drivers/net/ethernet/wiznet/w5100.c 	netdev_info(ndev, "at 0x%llx irq %d\n", (u64)mem->start, priv->irq);
mem              1047 drivers/net/ethernet/wiznet/w5100.c 	struct resource *mem;
mem              1054 drivers/net/ethernet/wiznet/w5100.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1055 drivers/net/ethernet/wiznet/w5100.c 	if (resource_size(mem) < W5100_BUS_DIRECT_SIZE)
mem               531 drivers/net/ethernet/wiznet/w5300.c 	struct resource *mem;
mem               542 drivers/net/ethernet/wiznet/w5300.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               543 drivers/net/ethernet/wiznet/w5300.c 	priv->base = devm_ioremap_resource(&pdev->dev, mem);
mem               547 drivers/net/ethernet/wiznet/w5300.c 	mem_size = resource_size(mem);
mem               585 drivers/net/ethernet/wiznet/w5300.c 	netdev_info(ndev, "at 0x%llx irq %d\n", (u64)mem->start, irq);
mem               829 drivers/net/ethernet/xscale/ixp4xx_eth.c 	void *mem;
mem               849 drivers/net/ethernet/xscale/ixp4xx_eth.c 	mem = skb->data;
mem               853 drivers/net/ethernet/xscale/ixp4xx_eth.c 	if (!(mem = kmalloc(bytes, GFP_ATOMIC))) {
mem               858 drivers/net/ethernet/xscale/ixp4xx_eth.c 	memcpy_swab32(mem, (u32 *)((int)skb->data & ~3), bytes / 4);
mem               861 drivers/net/ethernet/xscale/ixp4xx_eth.c 	phys = dma_map_single(&dev->dev, mem, bytes, DMA_TO_DEVICE);
mem               865 drivers/net/ethernet/xscale/ixp4xx_eth.c 		kfree(mem);
mem               878 drivers/net/ethernet/xscale/ixp4xx_eth.c 	port->tx_buff_tab[n] = mem;
mem               365 drivers/net/fddi/defxx.c 	writel(data, bp->base.mem + offset);
mem               390 drivers/net/fddi/defxx.c 	*data = readl(bp->base.mem + offset);
mem               617 drivers/net/fddi/defxx.c 		bp->base.mem = ioremap_nocache(bar_start[0], bar_len[0]);
mem               618 drivers/net/fddi/defxx.c 		if (!bp->base.mem) {
mem               660 drivers/net/fddi/defxx.c 		iounmap(bp->base.mem);
mem              3725 drivers/net/fddi/defxx.c 		iounmap(bp->base.mem);
mem              1775 drivers/net/fddi/defxx.h 		void __iomem *mem;
mem               204 drivers/net/fddi/skfp/skfddi.c 	void __iomem *mem;
mem               229 drivers/net/fddi/skfp/skfddi.c 	mem = ioremap(pci_resource_start(pdev, 0), 0x4000);
mem               237 drivers/net/fddi/skfp/skfddi.c 	mem = ioport_map(pci_resource_start(pdev, 1), FP_IO_LEN);
mem               239 drivers/net/fddi/skfp/skfddi.c 	if (!mem) {
mem               268 drivers/net/fddi/skfp/skfddi.c 	smc->hw.iop = mem;
mem               272 drivers/net/fddi/skfp/skfddi.c 	dev->base_addr = (unsigned long)mem;
mem               305 drivers/net/fddi/skfp/skfddi.c 	iounmap(mem);
mem               307 drivers/net/fddi/skfp/skfddi.c 	ioport_unmap(mem);
mem               115 drivers/net/fjes/fjes_hw.c 	void *mem;
mem               117 drivers/net/fjes/fjes_hw.c 	mem = vzalloc(EP_BUFFER_SIZE);
mem               118 drivers/net/fjes/fjes_hw.c 	if (!mem)
mem               121 drivers/net/fjes/fjes_hw.c 	epbh->buffer = mem;
mem               124 drivers/net/fjes/fjes_hw.c 	epbh->info = (union ep_buffer_info *)mem;
mem               125 drivers/net/fjes/fjes_hw.c 	epbh->ring = (u8 *)(mem + sizeof(union ep_buffer_info));
mem               542 drivers/net/veth.c 			xdp.rxq->mem = frame->mem;
mem               554 drivers/net/veth.c 			xdp.rxq->mem = frame->mem;
mem               670 drivers/net/veth.c 		xdp.rxq->mem = rq->xdp_mem;
mem               681 drivers/net/veth.c 		xdp.rxq->mem = rq->xdp_mem;
mem               869 drivers/net/veth.c 			rq->xdp_mem = rq->xdp_rxq.mem;
mem               901 drivers/net/veth.c 		rq->xdp_rxq.mem = rq->xdp_mem;
mem               446 drivers/net/wan/farsync.c 	char __iomem *mem;	/* Card memory mapped to kernel space */
mem               494 drivers/net/wan/farsync.c #define FST_RDB(C,E)    readb ((C)->mem + WIN_OFFSET(E))
mem               495 drivers/net/wan/farsync.c #define FST_RDW(C,E)    readw ((C)->mem + WIN_OFFSET(E))
mem               496 drivers/net/wan/farsync.c #define FST_RDL(C,E)    readl ((C)->mem + WIN_OFFSET(E))
mem               498 drivers/net/wan/farsync.c #define FST_WRB(C,E,B)  writeb ((B), (C)->mem + WIN_OFFSET(E))
mem               499 drivers/net/wan/farsync.c #define FST_WRW(C,E,W)  writew ((W), (C)->mem + WIN_OFFSET(E))
mem               500 drivers/net/wan/farsync.c #define FST_WRL(C,E,L)  writel ((L), (C)->mem + WIN_OFFSET(E))
mem               726 drivers/net/wan/farsync.c 		(void) readb(card->mem);
mem               881 drivers/net/wan/farsync.c fst_rx_dma(struct fst_card_info *card, dma_addr_t dma, u32 mem, int len)
mem               887 drivers/net/wan/farsync.c 	dbg(DBG_RX, "In fst_rx_dma %x %x %d\n", (u32)dma, mem, len);
mem               893 drivers/net/wan/farsync.c 	outl(mem, card->pci_conf + DMALADR0);	/* from here */
mem               908 drivers/net/wan/farsync.c fst_tx_dma(struct fst_card_info *card, dma_addr_t dma, u32 mem, int len)
mem               914 drivers/net/wan/farsync.c 	dbg(DBG_TX, "In fst_tx_dma %x %x %d\n", (u32)dma, mem, len);
mem               920 drivers/net/wan/farsync.c 	outl(mem, card->pci_conf + DMALADR1);	/* to here */
mem              1278 drivers/net/wan/farsync.c 			      card->mem + BUF_OFFSET(rxBuffer[pi][rxp][0]),
mem              1377 drivers/net/wan/farsync.c 					memcpy_toio(card->mem +
mem              2034 drivers/net/wan/farsync.c 		memcpy_toio(card->mem + wrthdr.offset, buf, wrthdr.size);
mem              2452 drivers/net/wan/farsync.c 	if ((card->mem = ioremap(card->phys_mem, FST_MEMSIZE)) == NULL) {
mem              2462 drivers/net/wan/farsync.c 	dbg(DBG_PCI, "kernel mem %p, ctlmem %p\n", card->mem, card->ctlmem);
mem              2592 drivers/net/wan/farsync.c 	iounmap(card->mem);
mem              2622 drivers/net/wan/farsync.c 	iounmap(card->mem);
mem               832 drivers/net/wan/ixp4xx_hss.c 	void *mem;
mem               852 drivers/net/wan/ixp4xx_hss.c 	mem = skb->data;
mem               856 drivers/net/wan/ixp4xx_hss.c 	if (!(mem = kmalloc(bytes, GFP_ATOMIC))) {
mem               861 drivers/net/wan/ixp4xx_hss.c 	memcpy_swab32(mem, (u32 *)((uintptr_t)skb->data & ~3), bytes / 4);
mem               865 drivers/net/wan/ixp4xx_hss.c 	phys = dma_map_single(&dev->dev, mem, bytes, DMA_TO_DEVICE);
mem               870 drivers/net/wan/ixp4xx_hss.c 		kfree(mem);
mem               883 drivers/net/wan/ixp4xx_hss.c 	port->tx_buff_tab[n] = mem;
mem              1194 drivers/net/wan/sdla.c 	struct sdla_mem mem;
mem              1197 drivers/net/wan/sdla.c 	if(copy_from_user(&mem, info, sizeof(mem)))
mem              1202 drivers/net/wan/sdla.c 		temp = kzalloc(mem.len, GFP_KERNEL);
mem              1205 drivers/net/wan/sdla.c 		sdla_read(dev, mem.addr, temp, mem.len);
mem              1206 drivers/net/wan/sdla.c 		if(copy_to_user(mem.data, temp, mem.len))
mem              1215 drivers/net/wan/sdla.c 		temp = memdup_user(mem.data, mem.len);
mem              1218 drivers/net/wan/sdla.c 		sdla_write(dev, mem.addr, temp, mem.len);
mem               564 drivers/net/wan/wanxl.c 	u8 __iomem *mem;	/* memory virtual base addr */
mem               707 drivers/net/wan/wanxl.c 	mem = ioremap_nocache(mem_phy, PDM_OFFSET + sizeof(firmware));
mem               708 drivers/net/wan/wanxl.c 	if (!mem) {
mem               715 drivers/net/wan/wanxl.c 		writel(ntohl(*(__be32*)(firmware + i)), mem + PDM_OFFSET + i);
mem               720 drivers/net/wan/wanxl.c 		       (void *)card->status, mem + PDM_OFFSET + 4 + i * 4);
mem               721 drivers/net/wan/wanxl.c 	writel(card->status_address, mem + PDM_OFFSET + 20);
mem               722 drivers/net/wan/wanxl.c 	writel(PDM_OFFSET, mem);
mem               723 drivers/net/wan/wanxl.c 	iounmap(mem);
mem                37 drivers/net/wireless/ath/ath10k/ahb.c 	iowrite32(value, ar_ahb->mem + offset);
mem                44 drivers/net/wireless/ath/ath10k/ahb.c 	return ioread32(ar_ahb->mem + offset);
mem               452 drivers/net/wireless/ath/ath10k/ahb.c 	ar_ahb->mem = devm_ioremap_resource(&pdev->dev, res);
mem               453 drivers/net/wireless/ath/ath10k/ahb.c 	if (IS_ERR(ar_ahb->mem)) {
mem               455 drivers/net/wireless/ath/ath10k/ahb.c 		ret = PTR_ERR(ar_ahb->mem);
mem               508 drivers/net/wireless/ath/ath10k/ahb.c 		   ar_ahb->mem, ar_ahb->mem_len,
mem               524 drivers/net/wireless/ath/ath10k/ahb.c 	devm_iounmap(&pdev->dev, ar_ahb->mem);
mem               527 drivers/net/wireless/ath/ath10k/ahb.c 	ar_ahb->mem = NULL;
mem               538 drivers/net/wireless/ath/ath10k/ahb.c 	if (ar_ahb->mem)
mem               539 drivers/net/wireless/ath/ath10k/ahb.c 		devm_iounmap(dev, ar_ahb->mem);
mem               547 drivers/net/wireless/ath/ath10k/ahb.c 	ar_ahb->mem = NULL;
mem               774 drivers/net/wireless/ath/ath10k/ahb.c 	ar_pci->mem = ar_ahb->mem;
mem                14 drivers/net/wireless/ath/ath10k/ahb.h 	void __iomem *mem;
mem               432 drivers/net/wireless/ath/ath10k/pci.c 	u32 val = ioread32(ar_pci->mem + PCIE_LOCAL_BASE_ADDRESS +
mem               448 drivers/net/wireless/ath/ath10k/pci.c 		  ar_pci->mem + PCIE_LOCAL_BASE_ADDRESS +
mem               462 drivers/net/wireless/ath/ath10k/pci.c 		  ar_pci->mem + PCIE_LOCAL_BASE_ADDRESS +
mem               503 drivers/net/wireless/ath/ath10k/pci.c 			  ar_pci->mem + PCIE_LOCAL_BASE_ADDRESS +
mem               524 drivers/net/wireless/ath/ath10k/pci.c 		  ar_pci->mem + PCIE_LOCAL_BASE_ADDRESS +
mem               647 drivers/net/wireless/ath/ath10k/pci.c 	iowrite32(value, ar_pci->mem + offset);
mem               670 drivers/net/wireless/ath/ath10k/pci.c 	val = ioread32(ar_pci->mem + offset);
mem              1589 drivers/net/wireless/ath/ath10k/pci.c 	base_addr = ioread32(ar_pci->mem + QCA99X0_PCIE_BAR0_START_REG);
mem              1593 drivers/net/wireless/ath/ath10k/pci.c 		iowrite32(base_addr + i, ar_pci->mem + QCA99X0_CPU_MEM_ADDR_REG);
mem              1594 drivers/net/wireless/ath/ath10k/pci.c 		*(u32 *)(buf + i) = ioread32(ar_pci->mem + QCA99X0_CPU_MEM_DATA_REG);
mem              1617 drivers/net/wireless/ath/ath10k/pci.c 		*(u32 *)(buf + i) = ioread32(ar_pci->mem + region->start + i);
mem              3409 drivers/net/wireless/ath/ath10k/pci.c 	ar_pci->mem = pci_iomap(pdev, BAR_NUM, 0);
mem              3410 drivers/net/wireless/ath/ath10k/pci.c 	if (!ar_pci->mem) {
mem              3416 drivers/net/wireless/ath/ath10k/pci.c 	ath10k_dbg(ar, ATH10K_DBG_BOOT, "boot pci_mem 0x%pK\n", ar_pci->mem);
mem              3436 drivers/net/wireless/ath/ath10k/pci.c 	pci_iounmap(pdev, ar_pci->mem);
mem               111 drivers/net/wireless/ath/ath10k/pci.h 	void __iomem *mem;
mem               471 drivers/net/wireless/ath/ath10k/snoc.c 	iowrite32(value, ar_snoc->mem + offset);
mem               479 drivers/net/wireless/ath/ath10k/snoc.c 	val = ioread32(ar_snoc->mem + offset);
mem              1242 drivers/net/wireless/ath/ath10k/snoc.c 	ar_snoc->mem = devm_ioremap(&pdev->dev, ar_snoc->mem_pa,
mem              1244 drivers/net/wireless/ath/ath10k/snoc.c 	if (!ar_snoc->mem) {
mem                72 drivers/net/wireless/ath/ath10k/snoc.h 	void __iomem *mem;
mem              4672 drivers/net/wireless/ath/ath10k/wmi.h 	struct wmi_pdev_stats_mem mem;
mem              4682 drivers/net/wireless/ath/ath10k/wmi.h 	struct wmi_pdev_stats_mem mem;
mem                91 drivers/net/wireless/ath/ath5k/ahb.c 	void __iomem *mem;
mem               109 drivers/net/wireless/ath/ath5k/ahb.c 	mem = ioremap_nocache(res->start, resource_size(res));
mem               110 drivers/net/wireless/ath/ath5k/ahb.c 	if (mem == NULL) {
mem               135 drivers/net/wireless/ath/ath5k/ahb.c 	ah->iobase = mem;
mem               186 drivers/net/wireless/ath/ath5k/ahb.c         iounmap(mem);
mem               161 drivers/net/wireless/ath/ath5k/pci.c 	void __iomem *mem;
mem               238 drivers/net/wireless/ath/ath5k/pci.c 	mem = pci_iomap(pdev, 0, 0);
mem               239 drivers/net/wireless/ath/ath5k/pci.c 	if (!mem) {
mem               264 drivers/net/wireless/ath/ath5k/pci.c 	ah->iobase = mem; /* So we can unmap it on detach */
mem               278 drivers/net/wireless/ath/ath5k/pci.c 	pci_iounmap(pdev, mem);
mem                74 drivers/net/wireless/ath/ath9k/ahb.c 	void __iomem *mem;
mem                95 drivers/net/wireless/ath/ath9k/ahb.c 	mem = devm_ioremap_nocache(&pdev->dev, res->start, resource_size(res));
mem                96 drivers/net/wireless/ath/ath9k/ahb.c 	if (mem == NULL) {
mem               122 drivers/net/wireless/ath/ath9k/ahb.c 	sc->mem = mem;
mem               140 drivers/net/wireless/ath/ath9k/ahb.c 		   hw_name, (unsigned long)mem, irq);
mem               993 drivers/net/wireless/ath/ath9k/ath9k.h 	void __iomem *mem;
mem                35 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	void __iomem *mem;
mem                58 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	mem = pcim_iomap(pdev, 0, 0);
mem                59 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	if (!mem) {
mem                87 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 		iowrite32(val, mem + reg);
mem                96 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	pcim_iounmap(pdev, mem);
mem               182 drivers/net/wireless/ath/ath9k/init.c 		iowrite32(val, sc->mem + reg_offset);
mem               185 drivers/net/wireless/ath/ath9k/init.c 		iowrite32(val, sc->mem + reg_offset);
mem               198 drivers/net/wireless/ath/ath9k/init.c 		val = ioread32(sc->mem + reg_offset);
mem               201 drivers/net/wireless/ath/ath9k/init.c 		val = ioread32(sc->mem + reg_offset);
mem               220 drivers/net/wireless/ath/ath9k/init.c 	val = ioread32(sc->mem + reg_offset);
mem               223 drivers/net/wireless/ath/ath9k/init.c 	iowrite32(val, sc->mem + reg_offset);
mem               962 drivers/net/wireless/ath/ath9k/pci.c 	sc->mem = pcim_iomap_table(pdev)[0];
mem               997 drivers/net/wireless/ath/ath9k/pci.c 		   hw_name, (unsigned long)sc->mem, pdev->irq);
mem               704 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	struct brcmf_core *mem;
mem               706 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	mem = brcmf_chip_get_core(&ci->pub, BCMA_CORE_ARM_CR4);
mem               707 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	if (mem) {
mem               708 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		mem_core = container_of(mem, struct brcmf_core_priv, pub);
mem               716 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		mem = brcmf_chip_get_core(&ci->pub, BCMA_CORE_SYS_MEM);
mem               717 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		if (mem) {
mem               718 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			mem_core = container_of(mem, struct brcmf_core_priv,
mem               727 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			mem = brcmf_chip_get_core(&ci->pub,
mem               729 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			if (!mem) {
mem               733 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			mem_core = container_of(mem, struct brcmf_core_priv,
mem               294 drivers/net/wireless/intersil/hostap/hostap_pci.c 	void __iomem *mem = NULL;
mem               316 drivers/net/wireless/intersil/hostap/hostap_pci.c 	mem = pci_ioremap_bar(pdev, 0);
mem               317 drivers/net/wireless/intersil/hostap/hostap_pci.c 	if (mem == NULL) {
mem               332 drivers/net/wireless/intersil/hostap/hostap_pci.c         hw_priv->mem_start = mem;
mem               333 drivers/net/wireless/intersil/hostap/hostap_pci.c 	dev->base_addr = (unsigned long) mem;
mem               361 drivers/net/wireless/intersil/hostap/hostap_pci.c 	if (mem)
mem               362 drivers/net/wireless/intersil/hostap/hostap_pci.c 		iounmap(mem);
mem               139 drivers/net/wireless/intersil/orinoco/orinoco_cs.c 	void __iomem *mem;
mem               154 drivers/net/wireless/intersil/orinoco/orinoco_cs.c 	mem = ioport_map(link->resource[0]->start,
mem               156 drivers/net/wireless/intersil/orinoco/orinoco_cs.c 	if (!mem)
mem               162 drivers/net/wireless/intersil/orinoco/orinoco_cs.c 	hermes_struct_init(hw, mem, HERMES_16BIT_REGSPACING);
mem               201 drivers/net/wireless/intersil/orinoco/spectrum_cs.c 	void __iomem *mem;
mem               216 drivers/net/wireless/intersil/orinoco/spectrum_cs.c 	mem = ioport_map(link->resource[0]->start,
mem               218 drivers/net/wireless/intersil/orinoco/spectrum_cs.c 	if (!mem)
mem               224 drivers/net/wireless/intersil/orinoco/spectrum_cs.c 	hermes_struct_init(hw, mem, HERMES_16BIT_REGSPACING);
mem               747 drivers/net/wireless/intersil/prism54/islpci_dev.c 		kfree(buf->mem);
mem               749 drivers/net/wireless/intersil/prism54/islpci_dev.c 		buf->mem = NULL;
mem                51 drivers/net/wireless/intersil/prism54/islpci_dev.h 	void *mem;                  /* address of memory as seen by CPU */
mem               111 drivers/net/wireless/intersil/prism54/islpci_mgt.c 		if (buf->mem == NULL) {
mem               112 drivers/net/wireless/intersil/prism54/islpci_mgt.c 			buf->mem = kmalloc(MGMT_FRAME_SIZE, GFP_ATOMIC);
mem               113 drivers/net/wireless/intersil/prism54/islpci_mgt.c 			if (!buf->mem)
mem               118 drivers/net/wireless/intersil/prism54/islpci_mgt.c 			buf->pci_addr = pci_map_single(priv->pdev, buf->mem,
mem               177 drivers/net/wireless/intersil/prism54/islpci_mgt.c 	p = buf.mem = kmalloc(frag_len, GFP_KERNEL);
mem               178 drivers/net/wireless/intersil/prism54/islpci_mgt.c 	if (!buf.mem)
mem               194 drivers/net/wireless/intersil/prism54/islpci_mgt.c 		pimfor_header_t *h = buf.mem;
mem               206 drivers/net/wireless/intersil/prism54/islpci_mgt.c 	buf.pci_addr = pci_map_single(priv->pdev, buf.mem, frag_len,
mem               245 drivers/net/wireless/intersil/prism54/islpci_mgt.c 	kfree(buf.mem);
mem               309 drivers/net/wireless/intersil/prism54/islpci_mgt.c 		header = pimfor_decode_header(buf->mem, frag_len);
mem               420 drivers/net/wireless/intersil/prism54/islpci_mgt.c 		kfree(buf->mem);
mem               421 drivers/net/wireless/intersil/prism54/islpci_mgt.c 		buf->mem = NULL;
mem              2353 drivers/net/wireless/marvell/mwifiex/fw.h 		struct host_cmd_ds_mem_access mem;
mem              1121 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 	struct host_cmd_ds_mem_access *mem_access = (void *)&cmd->params.mem;
mem               749 drivers/net/wireless/marvell/mwifiex/sta_cmdresp.c 	struct host_cmd_ds_mem_access *mem = (void *)&resp->params.mem;
mem               751 drivers/net/wireless/marvell/mwifiex/sta_cmdresp.c 	priv->mem_rw.addr = le32_to_cpu(mem->addr);
mem               752 drivers/net/wireless/marvell/mwifiex/sta_cmdresp.c 	priv->mem_rw.value = le32_to_cpu(mem->value);
mem               131 drivers/net/wireless/ti/wl1251/wl1251.h 	struct wl1251_partition mem;
mem               294 drivers/net/wireless/ti/wl12xx/main.c 	.mem = {
mem               491 drivers/net/wireless/ti/wl12xx/main.c 		.mem = {
mem               511 drivers/net/wireless/ti/wl12xx/main.c 		.mem = {
mem               530 drivers/net/wireless/ti/wl12xx/main.c 		.mem = {
mem               549 drivers/net/wireless/ti/wl12xx/main.c 		.mem = {
mem               642 drivers/net/wireless/ti/wl12xx/main.c 		memcpy(&wl->conf.mem, &wl12xx_default_priv_conf.mem_wl127x,
mem               643 drivers/net/wireless/ti/wl12xx/main.c 		       sizeof(wl->conf.mem));
mem               667 drivers/net/wireless/ti/wl12xx/main.c 		memcpy(&wl->conf.mem, &wl12xx_default_priv_conf.mem_wl127x,
mem               668 drivers/net/wireless/ti/wl12xx/main.c 		       sizeof(wl->conf.mem));
mem               432 drivers/net/wireless/ti/wl18xx/main.c 	.mem = {
mem               582 drivers/net/wireless/ti/wl18xx/main.c 		.mem  = { .start = 0x00A00000, .size  = 0x00012000 },
mem               588 drivers/net/wireless/ti/wl18xx/main.c 		.mem  = { .start = 0x00000000, .size  = 0x00014000 },
mem               594 drivers/net/wireless/ti/wl18xx/main.c 		.mem  = { .start = 0x00700000, .size = 0x0000030c },
mem               600 drivers/net/wireless/ti/wl18xx/main.c 		.mem  = { .start = 0x00800000, .size  = 0x000050FC },
mem               606 drivers/net/wireless/ti/wl18xx/main.c 		.mem  = { .start = WL18XX_PHY_INIT_MEM_ADDR,
mem               959 drivers/net/wireless/ti/wlcore/acx.c 	struct conf_memory_settings *mem;
mem               970 drivers/net/wireless/ti/wlcore/acx.c 	mem = &wl->conf.mem;
mem               973 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->num_stations = mem->num_stations;
mem               974 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->rx_mem_block_num = mem->rx_block_num;
mem               975 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->tx_min_mem_block_num = mem->tx_min_block_num;
mem               976 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->num_ssid_profiles = mem->ssid_profiles;
mem               978 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->dyn_mem_enable = mem->dynamic_memory;
mem               979 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->tx_free_req = mem->min_req_tx_blocks;
mem               980 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->rx_free_req = mem->min_req_rx_blocks;
mem               981 drivers/net/wireless/ti/wlcore/acx.c 	mem_conf->tx_min = mem->tx_min;
mem               187 drivers/net/wireless/ti/wlcore/boot.c 	partition.mem.start = dest;
mem               194 drivers/net/wireless/ti/wlcore/boot.c 	partition_limit = wl->ptable[PART_DOWN].mem.size;
mem               202 drivers/net/wireless/ti/wlcore/boot.c 				wl->ptable[PART_DOWN].mem.size;
mem               203 drivers/net/wireless/ti/wlcore/boot.c 			partition.mem.start = addr;
mem              1141 drivers/net/wireless/ti/wlcore/conf.h 	struct conf_memory_settings mem;
mem              1080 drivers/net/wireless/ti/wlcore/debugfs.c 	part.mem.start = *ppos;
mem              1081 drivers/net/wireless/ti/wlcore/debugfs.c 	part.mem.size = bytes;
mem              1162 drivers/net/wireless/ti/wlcore/debugfs.c 	part.mem.start = *ppos;
mem              1163 drivers/net/wireless/ti/wlcore/debugfs.c 	part.mem.size = bytes;
mem              1322 drivers/net/wireless/ti/wlcore/debugfs.c 	DEBUGFS_ADD_PREFIX(dev, mem, rootdir);
mem                69 drivers/net/wireless/ti/wlcore/io.c 	if ((addr >= part->mem.start) &&
mem                70 drivers/net/wireless/ti/wlcore/io.c 	    (addr < part->mem.start + part->mem.size))
mem                71 drivers/net/wireless/ti/wlcore/io.c 		return addr - part->mem.start;
mem                74 drivers/net/wireless/ti/wlcore/io.c 		return addr - part->reg.start + part->mem.size;
mem                77 drivers/net/wireless/ti/wlcore/io.c 		return addr - part->mem2.start + part->mem.size +
mem                81 drivers/net/wireless/ti/wlcore/io.c 		return addr - part->mem3.start + part->mem.size +
mem               132 drivers/net/wireless/ti/wlcore/io.c 		     p->mem.start, p->mem.size);
mem               140 drivers/net/wireless/ti/wlcore/io.c 	ret = wlcore_raw_write32(wl, HW_PART0_START_ADDR, p->mem.start);
mem               144 drivers/net/wireless/ti/wlcore/io.c 	ret = wlcore_raw_write32(wl, HW_PART0_SIZE_ADDR, p->mem.size);
mem               137 drivers/net/wireless/ti/wlcore/wlcore.h 	struct wlcore_partition mem;
mem              1644 drivers/ntb/hw/intel/ntb_hw_gen1.c 	int rc, mem;
mem              1736 drivers/ntb/hw/intel/ntb_hw_gen1.c 		mem = pci_select_bars(pdev, IORESOURCE_MEM);
mem              1737 drivers/ntb/hw/intel/ntb_hw_gen1.c 		ndev->bar4_split = hweight32(mem) ==
mem              1740 drivers/ntb/hw/intel/ntb_hw_gen1.c 			mem, ndev->bar4_split);
mem              1111 drivers/nvdimm/btt.c 	void *mem = kmap_atomic(page);
mem              1113 drivers/nvdimm/btt.c 	ret = arena_read_bytes(arena, nsoff, mem + off, len, NVDIMM_IO_ATOMIC);
mem              1114 drivers/nvdimm/btt.c 	kunmap_atomic(mem);
mem              1124 drivers/nvdimm/btt.c 	void *mem = kmap_atomic(page);
mem              1126 drivers/nvdimm/btt.c 	ret = arena_write_bytes(arena, nsoff, mem + off, len, NVDIMM_IO_ATOMIC);
mem              1127 drivers/nvdimm/btt.c 	kunmap_atomic(mem);
mem              1134 drivers/nvdimm/btt.c 	void *mem = kmap_atomic(page);
mem              1136 drivers/nvdimm/btt.c 	memset(mem + off, 0, len);
mem              1137 drivers/nvdimm/btt.c 	kunmap_atomic(mem);
mem              1156 drivers/nvdimm/btt.c 		void *mem;
mem              1166 drivers/nvdimm/btt.c 		mem = kmap_atomic(bv.bv_page);
mem              1169 drivers/nvdimm/btt.c 					mem + bv.bv_offset, cur_len,
mem              1173 drivers/nvdimm/btt.c 					mem + bv.bv_offset, cur_len,
mem              1176 drivers/nvdimm/btt.c 		kunmap_atomic(mem);
mem                59 drivers/nvdimm/core.c 		void *mem;
mem               101 drivers/nvdimm/core.c 		nvdimm_map->mem = memremap(offset, size, flags);
mem               105 drivers/nvdimm/core.c 	if (!nvdimm_map->mem)
mem               132 drivers/nvdimm/core.c 		memunmap(nvdimm_map->mem);
mem               175 drivers/nvdimm/core.c 	return nvdimm_map->mem;
mem               104 drivers/nvdimm/pmem.c 	void *mem;
mem               107 drivers/nvdimm/pmem.c 		mem = kmap_atomic(page);
mem               109 drivers/nvdimm/pmem.c 		memcpy_flushcache(pmem_addr, mem + off, chunk);
mem               110 drivers/nvdimm/pmem.c 		kunmap_atomic(mem);
mem               123 drivers/nvdimm/pmem.c 	void *mem;
mem               126 drivers/nvdimm/pmem.c 		mem = kmap_atomic(page);
mem               128 drivers/nvdimm/pmem.c 		rem = memcpy_mcsafe(mem + off, pmem_addr, chunk);
mem               129 drivers/nvdimm/pmem.c 		kunmap_atomic(mem);
mem               884 drivers/of/address.c 	void __iomem *mem;
mem               894 drivers/of/address.c 	mem = ioremap(res.start, resource_size(&res));
mem               895 drivers/of/address.c 	if (!mem) {
mem               900 drivers/of/address.c 	return mem;
mem                95 drivers/of/fdt.c static void *unflatten_dt_alloc(void **mem, unsigned long size,
mem               100 drivers/of/fdt.c 	*mem = PTR_ALIGN(*mem, align);
mem               101 drivers/of/fdt.c 	res = *mem;
mem               102 drivers/of/fdt.c 	*mem += size;
mem               109 drivers/of/fdt.c 				void **mem,
mem               140 drivers/of/fdt.c 		pp = unflatten_dt_alloc(mem, sizeof(struct property),
mem               189 drivers/of/fdt.c 		pp = unflatten_dt_alloc(mem, sizeof(struct property) + len,
mem               210 drivers/of/fdt.c 			  void **mem,
mem               227 drivers/of/fdt.c 	np = unflatten_dt_alloc(mem, sizeof(struct device_node) + allocl,
mem               243 drivers/of/fdt.c 	populate_properties(blob, offset, mem, np, pathp, dryrun);
mem               288 drivers/of/fdt.c 			      void *mem,
mem               296 drivers/of/fdt.c 	void *base = mem;
mem               325 drivers/of/fdt.c 		if (!populate_node(blob, offset, &mem, nps[depth],
mem               327 drivers/of/fdt.c 			return mem - base;
mem               347 drivers/of/fdt.c 	return mem - base;
mem               374 drivers/of/fdt.c 	void *mem;
mem               402 drivers/of/fdt.c 	mem = dt_alloc(size + 4, __alignof__(struct device_node));
mem               403 drivers/of/fdt.c 	if (!mem)
mem               406 drivers/of/fdt.c 	memset(mem, 0, size);
mem               408 drivers/of/fdt.c 	*(__be32 *)(mem + size) = cpu_to_be32(0xdeadbeef);
mem               410 drivers/of/fdt.c 	pr_debug("  unflattening %p...\n", mem);
mem               413 drivers/of/fdt.c 	unflatten_dt_nodes(blob, mem, dad, mynodes);
mem               414 drivers/of/fdt.c 	if (be32_to_cpup(mem + size) != 0xdeadbeef)
mem               416 drivers/of/fdt.c 			   be32_to_cpup(mem + size));
mem               424 drivers/of/fdt.c 	return mem;
mem               452 drivers/of/fdt.c 	void *mem;
mem               455 drivers/of/fdt.c 	mem = __unflatten_device_tree(blob, dad, mynodes, &kernel_tree_alloc,
mem               459 drivers/of/fdt.c 	return mem;
mem               403 drivers/pci/controller/dwc/pci-keystone.c 	u64 start = pp->mem->start;
mem               404 drivers/pci/controller/dwc/pci-keystone.c 	u64 end = pp->mem->end;
mem               393 drivers/pci/controller/dwc/pcie-designware-ep.c 	aligned_offset = msg_addr_lower & (epc->mem->page_size - 1);
mem               397 drivers/pci/controller/dwc/pcie-designware-ep.c 				  epc->mem->page_size);
mem               456 drivers/pci/controller/dwc/pcie-designware-ep.c 				  epc->mem->page_size);
mem               472 drivers/pci/controller/dwc/pcie-designware-ep.c 			      epc->mem->page_size);
mem               585 drivers/pci/controller/dwc/pcie-designware-ep.c 					     epc->mem->page_size);
mem               373 drivers/pci/controller/dwc/pcie-designware-host.c 			pp->mem = win->res;
mem               374 drivers/pci/controller/dwc/pcie-designware-host.c 			pp->mem->name = "MEM";
mem               375 drivers/pci/controller/dwc/pcie-designware-host.c 			pp->mem_size = resource_size(pp->mem);
mem               376 drivers/pci/controller/dwc/pcie-designware-host.c 			pp->mem_bus_addr = pp->mem->start - win->offset;
mem               401 drivers/pci/controller/dwc/pcie-designware-host.c 	pp->mem_base = pp->mem->start;
mem               181 drivers/pci/controller/dwc/pcie-designware.h 	struct resource		*mem;
mem               436 drivers/pci/controller/pci-ftpci100.c 	struct resource *mem;
mem               514 drivers/pci/controller/pci-ftpci100.c 			mem = win->res;
mem               515 drivers/pci/controller/pci-ftpci100.c 			mem->name = "Gemini PCI MEM";
mem                77 drivers/pci/controller/pci-mvebu.c 	struct resource mem;
mem               977 drivers/pci/controller/pci-mvebu.c 	mvebu_mbus_get_pcie_mem_aperture(&pcie->mem);
mem               978 drivers/pci/controller/pci-mvebu.c 	if (resource_size(&pcie->mem) == 0) {
mem               983 drivers/pci/controller/pci-mvebu.c 	pcie->mem.name = "PCI MEM";
mem               984 drivers/pci/controller/pci-mvebu.c 	pci_add_resource(&pcie->resources, &pcie->mem);
mem               360 drivers/pci/controller/pci-tegra.c 	struct resource mem;
mem               365 drivers/pci/controller/pci-tegra.c 		resource_size_t mem;
mem               808 drivers/pci/controller/pci-tegra.c 	pci_add_resource_offset(windows, &pcie->mem, pcie->offset.mem);
mem               809 drivers/pci/controller/pci-tegra.c 	pci_add_resource_offset(windows, &pcie->prefetch, pcie->offset.mem);
mem               936 drivers/pci/controller/pci-tegra.c 	fpci_bar = (((pcie->mem.start >> 12) & 0x0fffffff) << 4) | 0x1;
mem               937 drivers/pci/controller/pci-tegra.c 	size = resource_size(&pcie->mem);
mem               938 drivers/pci/controller/pci-tegra.c 	axi_address = pcie->mem.start;
mem              2208 drivers/pci/controller/pci-tegra.c 			pcie->offset.mem = res.start - range.pci_addr;
mem              2214 drivers/pci/controller/pci-tegra.c 				memcpy(&pcie->mem, &res, sizeof(res));
mem              2215 drivers/pci/controller/pci-tegra.c 				pcie->mem.name = "non-prefetchable";
mem               528 drivers/pci/controller/pci-v3-semi.c 	struct resource *mem;
mem               555 drivers/pci/controller/pci-v3-semi.c 		mem = win->res;
mem               556 drivers/pci/controller/pci-v3-semi.c 		if (mem->flags & IORESOURCE_PREFETCH) {
mem               557 drivers/pci/controller/pci-v3-semi.c 			mem->name = "V3 PCI PRE-MEM";
mem               558 drivers/pci/controller/pci-v3-semi.c 			v3->pre_mem = mem->start;
mem               559 drivers/pci/controller/pci-v3-semi.c 			v3->pre_bus_addr = mem->start - win->offset;
mem               561 drivers/pci/controller/pci-v3-semi.c 				mem, &v3->pre_bus_addr);
mem               562 drivers/pci/controller/pci-v3-semi.c 			if (resource_size(mem) != SZ_256M) {
mem               567 drivers/pci/controller/pci-v3-semi.c 			    (mem->start != v3->non_pre_mem + SZ_256M)) {
mem               582 drivers/pci/controller/pci-v3-semi.c 			mem->name = "V3 PCI NON-PRE-MEM";
mem               583 drivers/pci/controller/pci-v3-semi.c 			v3->non_pre_mem = mem->start;
mem               584 drivers/pci/controller/pci-v3-semi.c 			v3->non_pre_bus_addr = mem->start - win->offset;
mem               586 drivers/pci/controller/pci-v3-semi.c 				mem, &v3->non_pre_bus_addr);
mem               587 drivers/pci/controller/pci-v3-semi.c 			if (resource_size(mem) != SZ_256M) {
mem                68 drivers/pci/controller/pci-versatile.c 	int err, mem = 1, res_valid = 0;
mem                95 drivers/pci/controller/pci-versatile.c 			writel(res->start >> 28, PCI_IMAP(mem));
mem                96 drivers/pci/controller/pci-versatile.c 			writel(PHYS_OFFSET >> 28, PCI_SMAP(mem));
mem                97 drivers/pci/controller/pci-versatile.c 			mem++;
mem                59 drivers/pci/controller/pcie-iproc-bcma.c 	pcie->mem.start = bdev->addr_s[0];
mem                60 drivers/pci/controller/pcie-iproc-bcma.c 	pcie->mem.end = bdev->addr_s[0] + SZ_128M - 1;
mem                61 drivers/pci/controller/pcie-iproc-bcma.c 	pcie->mem.name = "PCIe MEM space";
mem                62 drivers/pci/controller/pcie-iproc-bcma.c 	pcie->mem.flags = IORESOURCE_MEM;
mem                63 drivers/pci/controller/pcie-iproc-bcma.c 	pci_add_resource(&resources, &pcie->mem);
mem                87 drivers/pci/controller/pcie-iproc.h 	struct resource mem;
mem               219 drivers/pci/controller/pcie-mediatek.c 	struct resource mem;
mem               664 drivers/pci/controller/pcie-mediatek.c 	struct resource *mem = &pcie->mem;
mem               721 drivers/pci/controller/pcie-mediatek.c 	val = lower_32_bits(mem->start) |
mem               722 drivers/pci/controller/pcie-mediatek.c 	      AHB2PCIE_SIZE(fls(resource_size(mem)));
mem               725 drivers/pci/controller/pcie-mediatek.c 	val = upper_32_bits(mem->start);
mem              1051 drivers/pci/controller/pcie-mediatek.c 			memcpy(&pcie->mem, win->res, sizeof(*win->res));
mem              1052 drivers/pci/controller/pcie-mediatek.c 			pcie->mem.name = "non-prefetchable";
mem               955 drivers/pci/controller/pcie-rockchip-host.c 	struct resource	*mem;
mem              1024 drivers/pci/controller/pcie-rockchip-host.c 			mem = win->res;
mem              1025 drivers/pci/controller/pcie-rockchip-host.c 			mem->name = "MEM";
mem              1026 drivers/pci/controller/pcie-rockchip-host.c 			rockchip->mem_size = resource_size(mem);
mem              1027 drivers/pci/controller/pcie-rockchip-host.c 			rockchip->mem_bus_addr = mem->start - win->offset;
mem                23 drivers/pci/endpoint/pci-epc-mem.c static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size)
mem                26 drivers/pci/endpoint/pci-epc-mem.c 	unsigned int page_shift = ilog2(mem->page_size);
mem                52 drivers/pci/endpoint/pci-epc-mem.c 	struct pci_epc_mem *mem;
mem                65 drivers/pci/endpoint/pci-epc-mem.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem                66 drivers/pci/endpoint/pci-epc-mem.c 	if (!mem) {
mem                77 drivers/pci/endpoint/pci-epc-mem.c 	mem->bitmap = bitmap;
mem                78 drivers/pci/endpoint/pci-epc-mem.c 	mem->phys_base = phys_base;
mem                79 drivers/pci/endpoint/pci-epc-mem.c 	mem->page_size = page_size;
mem                80 drivers/pci/endpoint/pci-epc-mem.c 	mem->pages = pages;
mem                81 drivers/pci/endpoint/pci-epc-mem.c 	mem->size = size;
mem                82 drivers/pci/endpoint/pci-epc-mem.c 	mutex_init(&mem->lock);
mem                84 drivers/pci/endpoint/pci-epc-mem.c 	epc->mem = mem;
mem                89 drivers/pci/endpoint/pci-epc-mem.c 	kfree(mem);
mem               105 drivers/pci/endpoint/pci-epc-mem.c 	struct pci_epc_mem *mem = epc->mem;
mem               107 drivers/pci/endpoint/pci-epc-mem.c 	epc->mem = NULL;
mem               108 drivers/pci/endpoint/pci-epc-mem.c 	kfree(mem->bitmap);
mem               109 drivers/pci/endpoint/pci-epc-mem.c 	kfree(mem);
mem               127 drivers/pci/endpoint/pci-epc-mem.c 	struct pci_epc_mem *mem = epc->mem;
mem               128 drivers/pci/endpoint/pci-epc-mem.c 	unsigned int page_shift = ilog2(mem->page_size);
mem               131 drivers/pci/endpoint/pci-epc-mem.c 	size = ALIGN(size, mem->page_size);
mem               132 drivers/pci/endpoint/pci-epc-mem.c 	order = pci_epc_mem_get_order(mem, size);
mem               134 drivers/pci/endpoint/pci-epc-mem.c 	mutex_lock(&mem->lock);
mem               135 drivers/pci/endpoint/pci-epc-mem.c 	pageno = bitmap_find_free_region(mem->bitmap, mem->pages, order);
mem               139 drivers/pci/endpoint/pci-epc-mem.c 	*phys_addr = mem->phys_base + (pageno << page_shift);
mem               142 drivers/pci/endpoint/pci-epc-mem.c 		bitmap_release_region(mem->bitmap, pageno, order);
mem               145 drivers/pci/endpoint/pci-epc-mem.c 	mutex_unlock(&mem->lock);
mem               163 drivers/pci/endpoint/pci-epc-mem.c 	struct pci_epc_mem *mem = epc->mem;
mem               164 drivers/pci/endpoint/pci-epc-mem.c 	unsigned int page_shift = ilog2(mem->page_size);
mem               168 drivers/pci/endpoint/pci-epc-mem.c 	pageno = (phys_addr - mem->phys_base) >> page_shift;
mem               169 drivers/pci/endpoint/pci-epc-mem.c 	size = ALIGN(size, mem->page_size);
mem               170 drivers/pci/endpoint/pci-epc-mem.c 	order = pci_epc_mem_get_order(mem, size);
mem               171 drivers/pci/endpoint/pci-epc-mem.c 	mutex_lock(&mem->lock);
mem               172 drivers/pci/endpoint/pci-epc-mem.c 	bitmap_release_region(mem->bitmap, pageno, order);
mem               173 drivers/pci/endpoint/pci-epc-mem.c 	mutex_unlock(&mem->lock);
mem               361 drivers/pci/hotplug/ibmphp.h 	u32 mem;
mem               682 drivers/pci/hotplug/ibmphp.h 	struct resource_node *mem[6];
mem               317 drivers/pci/hotplug/ibmphp_pci.c 		} else if (cur_func->mem[i]) {
mem               318 drivers/pci/hotplug/ibmphp_pci.c 			ibmphp_remove_resource(cur_func->mem[i]);
mem               319 drivers/pci/hotplug/ibmphp_pci.c 			cur_func->mem[i] = NULL;
mem               346 drivers/pci/hotplug/ibmphp_pci.c 	struct resource_node *mem[6];
mem               489 drivers/pci/hotplug/ibmphp_pci.c 				mem[count] = kzalloc(sizeof(struct resource_node), GFP_KERNEL);
mem               490 drivers/pci/hotplug/ibmphp_pci.c 				if (!mem[count])
mem               493 drivers/pci/hotplug/ibmphp_pci.c 				mem[count]->type = MEM;
mem               494 drivers/pci/hotplug/ibmphp_pci.c 				mem[count]->busno = func->busno;
mem               495 drivers/pci/hotplug/ibmphp_pci.c 				mem[count]->devfunc = PCI_DEVFN(func->device,
mem               497 drivers/pci/hotplug/ibmphp_pci.c 				mem[count]->len = len[count];
mem               498 drivers/pci/hotplug/ibmphp_pci.c 				if (ibmphp_check_resource(mem[count], 0) == 0) {
mem               499 drivers/pci/hotplug/ibmphp_pci.c 					ibmphp_add_resource(mem[count]);
mem               500 drivers/pci/hotplug/ibmphp_pci.c 					func->mem[count] = mem[count];
mem               504 drivers/pci/hotplug/ibmphp_pci.c 					kfree(mem[count]);
mem               507 drivers/pci/hotplug/ibmphp_pci.c 				pci_bus_write_config_dword(ibmphp_pci_bus, devfn, address[count], func->mem[count]->start);
mem               509 drivers/pci/hotplug/ibmphp_pci.c 				debug("b4 writing, start address is %x\n", func->mem[count]->start);
mem               562 drivers/pci/hotplug/ibmphp_pci.c 	struct resource_node *mem = NULL;
mem               751 drivers/pci/hotplug/ibmphp_pci.c 					func->mem[count] = bus_mem[count];
mem               759 drivers/pci/hotplug/ibmphp_pci.c 				pci_bus_write_config_dword(ibmphp_pci_bus, devfn, address[count], func->mem[count]->start);
mem               781 drivers/pci/hotplug/ibmphp_pci.c 	debug("amount_needed->mem = %x\n", amount_needed->mem);
mem               796 drivers/pci/hotplug/ibmphp_pci.c 				func->mem[count] = NULL;
mem               825 drivers/pci/hotplug/ibmphp_pci.c 	if (!amount_needed->mem) {
mem               829 drivers/pci/hotplug/ibmphp_pci.c 		debug("it wants %x memory behind the bridge\n", amount_needed->mem);
mem               830 drivers/pci/hotplug/ibmphp_pci.c 		mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem               831 drivers/pci/hotplug/ibmphp_pci.c 		if (!mem) {
mem               835 drivers/pci/hotplug/ibmphp_pci.c 		mem->type = MEM;
mem               836 drivers/pci/hotplug/ibmphp_pci.c 		mem->busno = func->busno;
mem               837 drivers/pci/hotplug/ibmphp_pci.c 		mem->devfunc = PCI_DEVFN(func->device, func->function);
mem               838 drivers/pci/hotplug/ibmphp_pci.c 		mem->len = amount_needed->mem;
mem               839 drivers/pci/hotplug/ibmphp_pci.c 		if (ibmphp_check_resource(mem, 1) == 0) {
mem               840 drivers/pci/hotplug/ibmphp_pci.c 			ibmphp_add_resource(mem);
mem               902 drivers/pci/hotplug/ibmphp_pci.c 			rc = add_new_bus(bus, io, mem, pfmem, func->busno);
mem               904 drivers/pci/hotplug/ibmphp_pci.c 			rc = add_new_bus(bus, io, mem, pfmem, 0xFF);
mem              1024 drivers/pci/hotplug/ibmphp_pci.c 	if (mem)
mem              1025 drivers/pci/hotplug/ibmphp_pci.c 		ibmphp_remove_resource(mem);
mem              1035 drivers/pci/hotplug/ibmphp_pci.c 			func->mem[i] = NULL;
mem              1151 drivers/pci/hotplug/ibmphp_pci.c 							amount->mem += len[count];
mem              1169 drivers/pci/hotplug/ibmphp_pci.c 	if ((amount->mem) && (amount->mem < MEMBRIDGE))
mem              1170 drivers/pci/hotplug/ibmphp_pci.c 		amount->mem = MEMBRIDGE;
mem              1197 drivers/pci/hotplug/ibmphp_pci.c 	struct resource_node *mem;
mem              1280 drivers/pci/hotplug/ibmphp_pci.c 				if (ibmphp_find_resource(bus, start_address, &mem, MEM) < 0) {
mem              1284 drivers/pci/hotplug/ibmphp_pci.c 				if (mem) {
mem              1285 drivers/pci/hotplug/ibmphp_pci.c 					debug("mem->start = %x\n", mem->start);
mem              1287 drivers/pci/hotplug/ibmphp_pci.c 					ibmphp_remove_resource(mem);
mem              1311 drivers/pci/hotplug/ibmphp_pci.c 	struct resource_node *mem = NULL;
mem              1396 drivers/pci/hotplug/ibmphp_pci.c 				if (ibmphp_find_resource(bus, start_address, &mem, MEM) < 0) {
mem              1400 drivers/pci/hotplug/ibmphp_pci.c 				if (mem) {
mem              1401 drivers/pci/hotplug/ibmphp_pci.c 					debug("mem->start = %x\n", mem->start);
mem              1403 drivers/pci/hotplug/ibmphp_pci.c 					ibmphp_remove_resource(mem);
mem              1571 drivers/pci/hotplug/ibmphp_pci.c 				if (cur_func->mem[i]) {
mem              1574 drivers/pci/hotplug/ibmphp_pci.c 						ibmphp_remove_resource(cur_func->mem[i]);
mem              1575 drivers/pci/hotplug/ibmphp_pci.c 					cur_func->mem[i] = NULL;
mem              1605 drivers/pci/hotplug/ibmphp_pci.c static int add_new_bus(struct bus_node *bus, struct resource_node *io, struct resource_node *mem, struct resource_node *pfmem, u8 parent_busno)
mem              1633 drivers/pci/hotplug/ibmphp_pci.c 	if (mem) {
mem              1638 drivers/pci/hotplug/ibmphp_pci.c 		mem_range->start = mem->start;
mem              1639 drivers/pci/hotplug/ibmphp_pci.c 		mem_range->end = mem->end;
mem              1667 drivers/pci/hotplug/ibmphp_res.c 	struct resource_node *mem;
mem              1690 drivers/pci/hotplug/ibmphp_res.c 				mem = kzalloc(sizeof(struct resource_node), GFP_KERNEL);
mem              1691 drivers/pci/hotplug/ibmphp_res.c 				if (!mem)
mem              1694 drivers/pci/hotplug/ibmphp_res.c 				mem->type = MEM;
mem              1695 drivers/pci/hotplug/ibmphp_res.c 				mem->busno = pfmem_cur->busno;
mem              1696 drivers/pci/hotplug/ibmphp_res.c 				mem->devfunc = pfmem_cur->devfunc;
mem              1697 drivers/pci/hotplug/ibmphp_res.c 				mem->start = pfmem_cur->start;
mem              1698 drivers/pci/hotplug/ibmphp_res.c 				mem->end = pfmem_cur->end;
mem              1699 drivers/pci/hotplug/ibmphp_res.c 				mem->len = pfmem_cur->len;
mem              1700 drivers/pci/hotplug/ibmphp_res.c 				if (ibmphp_add_resource(mem) < 0)
mem              1702 drivers/pci/hotplug/ibmphp_res.c 				pfmem_cur->rangeno = mem->rangeno;
mem              1914 drivers/pci/hotplug/ibmphp_res.c 	struct resource_node *mem;
mem              2043 drivers/pci/hotplug/ibmphp_res.c 							if (ibmphp_find_resource(bus_cur, start_address, &mem, MEM)) {
mem              2044 drivers/pci/hotplug/ibmphp_res.c 								mem = kzalloc(sizeof(struct resource_node), GFP_KERNEL);
mem              2045 drivers/pci/hotplug/ibmphp_res.c 								if (!mem) {
mem              2049 drivers/pci/hotplug/ibmphp_res.c 								mem->type = MEM;
mem              2050 drivers/pci/hotplug/ibmphp_res.c 								mem->busno = bus_cur->busno;
mem              2051 drivers/pci/hotplug/ibmphp_res.c 								mem->devfunc = ((device << 3) | (function & 0x7));
mem              2052 drivers/pci/hotplug/ibmphp_res.c 								mem->start = start_address;
mem              2053 drivers/pci/hotplug/ibmphp_res.c 								mem->end = end_address + 0xfffff;
mem              2054 drivers/pci/hotplug/ibmphp_res.c 								mem->len = mem->end - mem->start + 1;
mem              2055 drivers/pci/hotplug/ibmphp_res.c 								ibmphp_add_resource(mem);
mem                88 drivers/pcmcia/cistpl.c 	pccard_mem_map *mem = &s->cis_mem;
mem                91 drivers/pcmcia/cistpl.c 	if (!(s->features & SS_CAP_STATIC_MAP) && (mem->res == NULL)) {
mem                92 drivers/pcmcia/cistpl.c 		mem->res = pcmcia_find_mem_region(0, s->map_size,
mem                94 drivers/pcmcia/cistpl.c 		if (mem->res == NULL) {
mem               102 drivers/pcmcia/cistpl.c 		s->cis_virt = ioremap(mem->res->start, s->map_size);
mem               104 drivers/pcmcia/cistpl.c 	mem->card_start = card_offset;
mem               105 drivers/pcmcia/cistpl.c 	mem->flags = flags;
mem               107 drivers/pcmcia/cistpl.c 	ret = s->ops->set_mem_map(s, mem);
mem               117 drivers/pcmcia/cistpl.c 		s->cis_virt = ioremap(mem->static_start, s->map_size);
mem              1021 drivers/pcmcia/cistpl.c static u_char *parse_mem(u_char *p, u_char *q, cistpl_mem_t *mem)
mem              1029 drivers/pcmcia/cistpl.c 	mem->nwin = (*p & 0x07) + 1;
mem              1036 drivers/pcmcia/cistpl.c 	for (i = 0; i < mem->nwin; i++) {
mem              1054 drivers/pcmcia/cistpl.c 		mem->win[i].len = len << 8;
mem              1055 drivers/pcmcia/cistpl.c 		mem->win[i].card_addr = ca << 8;
mem              1056 drivers/pcmcia/cistpl.c 		mem->win[i].host_addr = ha << 8;
mem              1157 drivers/pcmcia/cistpl.c 		entry->mem.nwin = 0;
mem              1160 drivers/pcmcia/cistpl.c 		entry->mem.nwin = 1;
mem              1161 drivers/pcmcia/cistpl.c 		entry->mem.win[0].len = get_unaligned_le16(p) << 8;
mem              1162 drivers/pcmcia/cistpl.c 		entry->mem.win[0].card_addr = 0;
mem              1163 drivers/pcmcia/cistpl.c 		entry->mem.win[0].host_addr = 0;
mem              1169 drivers/pcmcia/cistpl.c 		entry->mem.nwin = 1;
mem              1170 drivers/pcmcia/cistpl.c 		entry->mem.win[0].len = get_unaligned_le16(p) << 8;
mem              1171 drivers/pcmcia/cistpl.c 		entry->mem.win[0].card_addr = get_unaligned_le16(p + 2) << 8;
mem              1172 drivers/pcmcia/cistpl.c 		entry->mem.win[0].host_addr = 0;
mem              1178 drivers/pcmcia/cistpl.c 		p = parse_mem(p, q, &entry->mem);
mem                34 drivers/pcmcia/cs_internal.h 	struct resource mem[MAX_WIN];   /* mem areas */
mem               565 drivers/pcmcia/ds.c 			c->mem[i].name = p_dev->devname;
mem               566 drivers/pcmcia/ds.c 			c->mem[i].flags = IORESOURCE_MEM;
mem               572 drivers/pcmcia/ds.c 		p_dev->resource[i] = &p_dev->function_config->mem[i-MAX_IO_WIN];
mem               180 drivers/pcmcia/electra_cf.c 	struct resource mem, io;
mem               186 drivers/pcmcia/electra_cf.c 	err = of_address_to_resource(np, 0, &mem);
mem               202 drivers/pcmcia/electra_cf.c 	cf->mem_phys = mem.start;
mem               203 drivers/pcmcia/electra_cf.c 	cf->mem_size = PAGE_ALIGN(resource_size(&mem));
mem               230 drivers/pcmcia/electra_cf.c 	cf->iomem.end = (unsigned long)cf->mem_base + (mem.end - mem.start);
mem               401 drivers/pcmcia/i82092.c 	pccard_mem_map mem = { .res = &res, };
mem               410 drivers/pcmcia/i82092.c         	mem.map = i;
mem               411 drivers/pcmcia/i82092.c                 i82092aa_set_mem_map(sock, &mem);
mem               602 drivers/pcmcia/i82092.c static int i82092aa_set_mem_map(struct pcmcia_socket *socket, struct pccard_mem_map *mem)
mem               612 drivers/pcmcia/i82092.c 	pcibios_resource_to_bus(sock_info->dev->bus, &region, mem->res);
mem               614 drivers/pcmcia/i82092.c 	map = mem->map;
mem               621 drivers/pcmcia/i82092.c 	if ( (mem->card_start > 0x3ffffff) || (region.start > region.end) ||
mem               622 drivers/pcmcia/i82092.c 	     (mem->speed > 1000) ) {
mem               629 drivers/pcmcia/i82092.c 			mem->card_start);
mem               643 drivers/pcmcia/i82092.c 	if (mem->flags & MAP_16BIT) 
mem               645 drivers/pcmcia/i82092.c 	if (mem->flags & MAP_0WS)
mem               652 drivers/pcmcia/i82092.c 	switch (to_cycles(mem->speed)) {
mem               670 drivers/pcmcia/i82092.c 	i = ((mem->card_start - region.start) >> 12) & 0x3fff;
mem               671 drivers/pcmcia/i82092.c 	if (mem->flags & MAP_WRPROT)
mem               673 drivers/pcmcia/i82092.c 	if (mem->flags & MAP_ATTRIB) {
mem               682 drivers/pcmcia/i82092.c 	if (mem->flags & MAP_ACTIVE)
mem                33 drivers/pcmcia/i82092aa.h static int i82092aa_set_mem_map(struct pcmcia_socket *socket, struct pccard_mem_map *mem);
mem              1061 drivers/pcmcia/i82365.c static int i365_set_mem_map(u_short sock, struct pccard_mem_map *mem)
mem              1067 drivers/pcmcia/i82365.c 	  "%#x)\n", sock, mem->map, mem->flags, mem->speed,
mem              1068 drivers/pcmcia/i82365.c 	  (unsigned long long)mem->res->start,
mem              1069 drivers/pcmcia/i82365.c 	  (unsigned long long)mem->res->end, mem->card_start);
mem              1071 drivers/pcmcia/i82365.c     map = mem->map;
mem              1072 drivers/pcmcia/i82365.c     if ((map > 4) || (mem->card_start > 0x3ffffff) ||
mem              1073 drivers/pcmcia/i82365.c 	(mem->res->start > mem->res->end) || (mem->speed > 1000))
mem              1075 drivers/pcmcia/i82365.c     if ((mem->res->start > 0xffffff) || (mem->res->end > 0xffffff))
mem              1083 drivers/pcmcia/i82365.c     i = (mem->res->start >> 12) & 0x0fff;
mem              1084 drivers/pcmcia/i82365.c     if (mem->flags & MAP_16BIT) i |= I365_MEM_16BIT;
mem              1085 drivers/pcmcia/i82365.c     if (mem->flags & MAP_0WS) i |= I365_MEM_0WS;
mem              1088 drivers/pcmcia/i82365.c     i = (mem->res->end >> 12) & 0x0fff;
mem              1089 drivers/pcmcia/i82365.c     switch (to_cycles(mem->speed)) {
mem              1097 drivers/pcmcia/i82365.c     i = ((mem->card_start - mem->res->start) >> 12) & 0x3fff;
mem              1098 drivers/pcmcia/i82365.c     if (mem->flags & MAP_WRPROT) i |= I365_MEM_WRPROT;
mem              1099 drivers/pcmcia/i82365.c     if (mem->flags & MAP_ATTRIB) i |= I365_MEM_REG;
mem              1103 drivers/pcmcia/i82365.c     if (mem->flags & MAP_ACTIVE)
mem              1195 drivers/pcmcia/i82365.c static int pcic_set_mem_map(struct pcmcia_socket *s, struct pccard_mem_map *mem)
mem              1201 drivers/pcmcia/i82365.c 	LOCKED(i365_set_mem_map(sock, mem));
mem              1209 drivers/pcmcia/i82365.c 	pccard_mem_map mem = { .res = &res, };
mem              1216 drivers/pcmcia/i82365.c 		mem.map = i;
mem              1217 drivers/pcmcia/i82365.c 		pcic_set_mem_map(s, &mem);
mem               225 drivers/pcmcia/pcmcia_cis.c 		cistpl_mem_t *mem = (cfg->mem.nwin) ? &cfg->mem : &dflt->mem;
mem               228 drivers/pcmcia/pcmcia_cis.c 		if (mem->nwin == 0)
mem               231 drivers/pcmcia/pcmcia_cis.c 		p_dev->resource[2]->start = mem->win[0].host_addr;
mem               232 drivers/pcmcia/pcmcia_cis.c 		p_dev->resource[2]->end = mem->win[0].len;
mem               235 drivers/pcmcia/pcmcia_cis.c 		p_dev->card_addr = mem->win[0].card_addr;
mem               467 drivers/pcmcia/pd6729.c 			      struct pccard_mem_map *mem)
mem               474 drivers/pcmcia/pd6729.c 	map = mem->map;
mem               480 drivers/pcmcia/pd6729.c 	if ((mem->res->start > mem->res->end) || (mem->speed > 1000)) {
mem               491 drivers/pcmcia/pd6729.c 	i = (mem->res->start >> 12) & 0x0fff;
mem               492 drivers/pcmcia/pd6729.c 	if (mem->flags & MAP_16BIT)
mem               494 drivers/pcmcia/pd6729.c 	if (mem->flags & MAP_0WS)
mem               500 drivers/pcmcia/pd6729.c 	i = (mem->res->end >> 12) & 0x0fff;
mem               501 drivers/pcmcia/pd6729.c 	switch (to_cycles(mem->speed)) {
mem               519 drivers/pcmcia/pd6729.c 	indirect_write(socket, PD67_EXT_DATA, mem->res->start >> 24);
mem               523 drivers/pcmcia/pd6729.c 	i = ((mem->card_start - mem->res->start) >> 12) & 0x3fff;
mem               524 drivers/pcmcia/pd6729.c 	if (mem->flags & MAP_WRPROT)
mem               526 drivers/pcmcia/pd6729.c 	if (mem->flags & MAP_ATTRIB) {
mem               537 drivers/pcmcia/pd6729.c 	if (mem->flags & MAP_ACTIVE)
mem               548 drivers/pcmcia/pd6729.c 	pccard_mem_map mem = { .res = &res, };
mem               556 drivers/pcmcia/pd6729.c 		mem.map = i;
mem               557 drivers/pcmcia/pd6729.c 		pd6729_set_mem_map(sock, &mem);
mem               171 drivers/pcmcia/pxa2xx_base.c 	pxa2xx_pcmcia_set_mcmem(sock, timing.mem, clk);
mem                89 drivers/pcmcia/sa11xx_base.c 	bs_mem = skt->ops->get_timing(skt, cpu_clock, timing.mem);
mem               159 drivers/pcmcia/sa11xx_base.c 	p+=sprintf(p, "common   : %uns (%uns)\n", timing.mem,
mem               139 drivers/pcmcia/soc_common.c 	timing->mem =
mem               149 drivers/pcmcia/soc_common.h 	unsigned short mem;
mem               731 drivers/pcmcia/tcic.c static int tcic_set_mem_map(struct pcmcia_socket *sock, struct pccard_mem_map *mem)
mem               738 drivers/pcmcia/tcic.c 	  "%#llx-%#llx, %#x)\n", psock, mem->map, mem->flags,
mem               739 drivers/pcmcia/tcic.c 	  mem->speed, (unsigned long long)mem->res->start,
mem               740 drivers/pcmcia/tcic.c 	  (unsigned long long)mem->res->end, mem->card_start);
mem               741 drivers/pcmcia/tcic.c     if ((mem->map > 3) || (mem->card_start > 0x3ffffff) ||
mem               742 drivers/pcmcia/tcic.c 	(mem->res->start > 0xffffff) || (mem->res->end > 0xffffff) ||
mem               743 drivers/pcmcia/tcic.c 	(mem->res->start > mem->res->end) || (mem->speed > 1000))
mem               746 drivers/pcmcia/tcic.c     addr = TCIC_MWIN(psock, mem->map);
mem               748 drivers/pcmcia/tcic.c     base = mem->res->start; len = mem->res->end - mem->res->start;
mem               757 drivers/pcmcia/tcic.c     mmap = mem->card_start - mem->res->start;
mem               759 drivers/pcmcia/tcic.c     if (mem->flags & MAP_ATTRIB) mmap |= TCIC_MMAP_REG;
mem               764 drivers/pcmcia/tcic.c     ctl |= to_cycles(mem->speed) & TCIC_MCTL_WSCNT_MASK;
mem               765 drivers/pcmcia/tcic.c     ctl |= (mem->flags & MAP_16BIT) ? 0 : TCIC_MCTL_B8;
mem               766 drivers/pcmcia/tcic.c     ctl |= (mem->flags & MAP_WRPROT) ? TCIC_MCTL_WP : 0;
mem               767 drivers/pcmcia/tcic.c     ctl |= (mem->flags & MAP_ACTIVE) ? TCIC_MCTL_ENA : 0;
mem               781 drivers/pcmcia/tcic.c 	pccard_mem_map mem = { .res = &res, };
mem               788 drivers/pcmcia/tcic.c 		mem.map = i;
mem               789 drivers/pcmcia/tcic.c 		tcic_set_mem_map(s, &mem);
mem               409 drivers/pcmcia/vrc4171_card.c static int pccard_set_mem_map(struct pcmcia_socket *sock, struct pccard_mem_map *mem)
mem               417 drivers/pcmcia/vrc4171_card.c 	    mem == NULL || mem->map >= MEM_MAX_MAPS ||
mem               418 drivers/pcmcia/vrc4171_card.c 	    mem->res->start < CARD_MEM_START || mem->res->start > CARD_MEM_END ||
mem               419 drivers/pcmcia/vrc4171_card.c 	    mem->res->end < CARD_MEM_START || mem->res->end > CARD_MEM_END ||
mem               420 drivers/pcmcia/vrc4171_card.c 	    mem->res->start > mem->res->end ||
mem               421 drivers/pcmcia/vrc4171_card.c 	    mem->card_start > CARD_MAX_MEM_OFFSET ||
mem               422 drivers/pcmcia/vrc4171_card.c 	    mem->speed > CARD_MAX_MEM_SPEED)
mem               426 drivers/pcmcia/vrc4171_card.c 	map = mem->map;
mem               434 drivers/pcmcia/vrc4171_card.c 	start = (mem->res->start >> 12) & 0x3fff;
mem               435 drivers/pcmcia/vrc4171_card.c 	if (mem->flags & MAP_16BIT)
mem               439 drivers/pcmcia/vrc4171_card.c 	stop = (mem->res->end >> 12) & 0x3fff;
mem               440 drivers/pcmcia/vrc4171_card.c 	switch (mem->speed) {
mem               455 drivers/pcmcia/vrc4171_card.c 	offset = (mem->card_start >> 12) & 0x3fff;
mem               456 drivers/pcmcia/vrc4171_card.c 	if (mem->flags & MAP_ATTRIB)
mem               458 drivers/pcmcia/vrc4171_card.c 	if (mem->flags & MAP_WRPROT)
mem               462 drivers/pcmcia/vrc4171_card.c 	if (mem->flags & MAP_ACTIVE) {
mem               305 drivers/pcmcia/vrc4173_cardu.c static int cardu_get_mem_map(unsigned int sock, struct pccard_mem_map *mem)
mem               312 drivers/pcmcia/vrc4173_cardu.c 	map = mem->map;
mem               317 drivers/pcmcia/vrc4173_cardu.c 	mem->flags = (window & MEM_WIN_EN(map)) ? MAP_ACTIVE : 0;
mem               320 drivers/pcmcia/vrc4173_cardu.c 	mem->flags |= (start & MEM_WIN_DSIZE) ? MAP_16BIT : 0;
mem               327 drivers/pcmcia/vrc4173_cardu.c 	mem->flags |= (offset & MEM_WIN_WP) ? MAP_WRPROT : 0;
mem               328 drivers/pcmcia/vrc4173_cardu.c 	mem->flags |= (offset & MEM_WIN_REGSET) ? MAP_ATTRIB : 0;
mem               330 drivers/pcmcia/vrc4173_cardu.c 	mem->card_start = offset & 0x03ffffff;
mem               333 drivers/pcmcia/vrc4173_cardu.c 	mem->sys_start = start + page;
mem               334 drivers/pcmcia/vrc4173_cardu.c 	mem->sys_stop = start + page;
mem               339 drivers/pcmcia/vrc4173_cardu.c static int cardu_set_mem_map(unsigned int sock, struct pccard_mem_map *mem)
mem               347 drivers/pcmcia/vrc4173_cardu.c 	map = mem->map;
mem               348 drivers/pcmcia/vrc4173_cardu.c 	sys_start = mem->sys_start;
mem               349 drivers/pcmcia/vrc4173_cardu.c 	sys_stop = mem->sys_stop;
mem               350 drivers/pcmcia/vrc4173_cardu.c 	card_start = mem->card_start;
mem               366 drivers/pcmcia/vrc4173_cardu.c 	if (mem->flags & MAP_16BIT) value |= MEM_WIN_DSIZE;
mem               373 drivers/pcmcia/vrc4173_cardu.c 	if (mem->flags & MAP_WRPROT) value |= MEM_WIN_WP;
mem               374 drivers/pcmcia/vrc4173_cardu.c 	if (mem->flags & MAP_ATTRIB) value |= MEM_WIN_REGSET;
mem               377 drivers/pcmcia/vrc4173_cardu.c 	if (mem->flags & MAP_ACTIVE)
mem               441 drivers/pcmcia/yenta_socket.c static int yenta_set_mem_map(struct pcmcia_socket *sock, struct pccard_mem_map *mem)
mem               450 drivers/pcmcia/yenta_socket.c 	pcibios_resource_to_bus(socket->dev->bus, &region, mem->res);
mem               452 drivers/pcmcia/yenta_socket.c 	map = mem->map;
mem               455 drivers/pcmcia/yenta_socket.c 	card_start = mem->card_start;
mem               458 drivers/pcmcia/yenta_socket.c 	    (card_start >> 26) || mem->speed > 1000)
mem               471 drivers/pcmcia/yenta_socket.c 	if (mem->flags & MAP_16BIT)
mem               473 drivers/pcmcia/yenta_socket.c 	if (mem->flags & MAP_0WS)
mem               478 drivers/pcmcia/yenta_socket.c 	switch (to_cycles(mem->speed)) {
mem               494 drivers/pcmcia/yenta_socket.c 	if (mem->flags & MAP_WRPROT)
mem               496 drivers/pcmcia/yenta_socket.c 	if (mem->flags & MAP_ATTRIB)
mem               500 drivers/pcmcia/yenta_socket.c 	if (mem->flags & MAP_ACTIVE)
mem               553 drivers/pcmcia/yenta_socket.c 	pccard_mem_map mem = { .res = &res, };
mem               561 drivers/pcmcia/yenta_socket.c 		mem.map = i;
mem               562 drivers/pcmcia/yenta_socket.c 		yenta_set_mem_map(&socket->socket, &mem);
mem               146 drivers/phy/samsung/phy-samsung-usb2.c 	struct resource *mem;
mem               170 drivers/phy/samsung/phy-samsung-usb2.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               171 drivers/phy/samsung/phy-samsung-usb2.c 	drv->reg_phy = devm_ioremap_resource(dev, mem);
mem               246 drivers/pinctrl/pinctrl-falcon.c 	void __iomem *mem = info->membase[PORT(pin)];
mem               251 drivers/pinctrl/pinctrl-falcon.c 			!!pad_getbit(mem, LTQ_PADC_DCC, PORT_PIN(pin)));
mem               256 drivers/pinctrl/pinctrl-falcon.c 			!!pad_getbit(mem, LTQ_PADC_SRC, PORT_PIN(pin)));
mem               260 drivers/pinctrl/pinctrl-falcon.c 		if (pad_getbit(mem, LTQ_PADC_PDEN, PORT_PIN(pin)))
mem               262 drivers/pinctrl/pinctrl-falcon.c 		else if (pad_getbit(mem, LTQ_PADC_PUEN, PORT_PIN(pin)))
mem               283 drivers/pinctrl/pinctrl-falcon.c 	void __iomem *mem = info->membase[PORT(pin)];
mem               313 drivers/pinctrl/pinctrl-falcon.c 		pad_w32(mem, BIT(PORT_PIN(pin)), reg);
mem               314 drivers/pinctrl/pinctrl-falcon.c 		if (!(pad_r32(mem, reg) & BIT(PORT_PIN(pin))))
mem                47 drivers/pinctrl/pinctrl-rza1.c #define RZA1_ADDR(mem, reg, port)	((mem) + (reg) + ((port) * 4))
mem               576 drivers/pinctrl/pinctrl-rza1.c 	void __iomem *mem = RZA1_ADDR(port->base, reg, port->id);
mem               577 drivers/pinctrl/pinctrl-rza1.c 	u16 val = ioread16(mem);
mem               584 drivers/pinctrl/pinctrl-rza1.c 	iowrite16(val, mem);
mem               590 drivers/pinctrl/pinctrl-rza1.c 	void __iomem *mem = RZA1_ADDR(port->base, reg, port->id);
mem               592 drivers/pinctrl/pinctrl-rza1.c 	return ioread16(mem) & BIT(bit);
mem               750 drivers/pinctrl/pinctrl-tb10x.c 	struct resource *mem;
mem               771 drivers/pinctrl/pinctrl-tb10x.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               772 drivers/pinctrl/pinctrl-tb10x.c 	state->base = devm_ioremap_resource(dev, mem);
mem                33 drivers/pinctrl/sh-pfc/gpio.c 	struct sh_pfc_window		*mem;
mem                59 drivers/pinctrl/sh-pfc/gpio.c 	void __iomem *mem = address - chip->mem->phys + chip->mem->virt;
mem                61 drivers/pinctrl/sh-pfc/gpio.c 	return sh_pfc_read_raw_reg(mem, dreg->reg_width);
mem                68 drivers/pinctrl/sh-pfc/gpio.c 	void __iomem *mem = address - chip->mem->phys + chip->mem->virt;
mem                70 drivers/pinctrl/sh-pfc/gpio.c 	sh_pfc_write_raw_reg(mem, dreg->reg_width, value);
mem               298 drivers/pinctrl/sh-pfc/gpio.c 		    struct sh_pfc_window *mem)
mem               307 drivers/pinctrl/sh-pfc/gpio.c 	chip->mem = mem;
mem                77 drivers/pnp/base.h 		struct pnp_mem mem;
mem               153 drivers/pnp/interface.c 			  struct pnp_mem *mem)
mem               158 drivers/pnp/interface.c 		   space, (unsigned long long) mem->min,
mem               159 drivers/pnp/interface.c 		   (unsigned long long) mem->max,
mem               160 drivers/pnp/interface.c 		   (unsigned long long) mem->align,
mem               161 drivers/pnp/interface.c 		   (unsigned long long) mem->size);
mem               162 drivers/pnp/interface.c 	if (mem->flags & IORESOURCE_MEM_WRITEABLE)
mem               164 drivers/pnp/interface.c 	if (mem->flags & IORESOURCE_MEM_CACHEABLE)
mem               166 drivers/pnp/interface.c 	if (mem->flags & IORESOURCE_MEM_RANGELENGTH)
mem               168 drivers/pnp/interface.c 	if (mem->flags & IORESOURCE_MEM_SHADOWABLE)
mem               170 drivers/pnp/interface.c 	if (mem->flags & IORESOURCE_MEM_EXPANSIONROM)
mem               172 drivers/pnp/interface.c 	switch (mem->flags & IORESOURCE_MEM_TYPE_MASK) {
mem               196 drivers/pnp/interface.c 		pnp_print_mem(buffer, space, &option->u.mem);
mem               277 drivers/pnp/manager.c 			ret = pnp_assign_mem(dev, &option->u.mem, nmem++);
mem               418 drivers/pnp/pnpacpi/rsparser.c 		if (p->info.mem.write_protect == ACPI_READ_WRITE_MEMORY)
mem               437 drivers/pnp/pnpacpi/rsparser.c 		if (p->info.mem.write_protect == ACPI_READ_WRITE_MEMORY)
mem               882 drivers/pnp/pnpacpi/rsparser.c 	unsigned int port = 0, irq = 0, dma = 0, mem = 0;
mem               910 drivers/pnp/pnpacpi/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               911 drivers/pnp/pnpacpi/rsparser.c 			mem++;
mem               915 drivers/pnp/pnpacpi/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               916 drivers/pnp/pnpacpi/rsparser.c 			mem++;
mem               920 drivers/pnp/pnpacpi/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               921 drivers/pnp/pnpacpi/rsparser.c 			mem++;
mem               664 drivers/pnp/pnpbios/rsparser.c 	int port = 0, irq = 0, dma = 0, mem = 0;
mem               686 drivers/pnp/pnpbios/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               687 drivers/pnp/pnpbios/rsparser.c 			mem++;
mem               694 drivers/pnp/pnpbios/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               695 drivers/pnp/pnpbios/rsparser.c 			mem++;
mem               702 drivers/pnp/pnpbios/rsparser.c 				pnp_get_resource(dev, IORESOURCE_MEM, mem));
mem               703 drivers/pnp/pnpbios/rsparser.c 			mem++;
mem               126 drivers/pnp/resource.c 	struct pnp_mem *mem;
mem               132 drivers/pnp/resource.c 	mem = &option->u.mem;
mem               133 drivers/pnp/resource.c 	mem->min = min;
mem               134 drivers/pnp/resource.c 	mem->max = max;
mem               135 drivers/pnp/resource.c 	mem->align = align;
mem               136 drivers/pnp/resource.c 	mem->size = size;
mem               137 drivers/pnp/resource.c 	mem->flags = flags;
mem               649 drivers/pnp/resource.c 	struct pnp_mem *mem;
mem               664 drivers/pnp/resource.c 			mem = &option->u.mem;
mem               665 drivers/pnp/resource.c 			if (mem->min == start && mem->size == size)
mem               110 drivers/pnp/support.c 	struct pnp_mem *mem;
mem               134 drivers/pnp/support.c 		mem = &option->u.mem;
mem               137 drivers/pnp/support.c 				 (unsigned long long) mem->min,
mem               138 drivers/pnp/support.c 				 (unsigned long long) mem->max,
mem               139 drivers/pnp/support.c 				 (unsigned long long) mem->align,
mem               140 drivers/pnp/support.c 				 (unsigned long long) mem->size, mem->flags);
mem               827 drivers/power/avs/smartreflex.c 	struct resource *mem, *irq;
mem               847 drivers/power/avs/smartreflex.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               848 drivers/power/avs/smartreflex.c 	sr_info->base = devm_ioremap_resource(&pdev->dev, mem);
mem                39 drivers/power/reset/msm-poweroff.c 	struct resource *mem;
mem                41 drivers/power/reset/msm-poweroff.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem                42 drivers/power/reset/msm-poweroff.c 	msm_ps_hold = devm_ioremap_resource(dev, mem);
mem                68 drivers/remoteproc/da8xx_remoteproc.c 	struct da8xx_rproc_mem *mem;
mem               207 drivers/remoteproc/da8xx_remoteproc.c 	drproc->mem = devm_kcalloc(dev, num_mems, sizeof(*drproc->mem),
mem               209 drivers/remoteproc/da8xx_remoteproc.c 	if (!drproc->mem)
mem               215 drivers/remoteproc/da8xx_remoteproc.c 		drproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res);
mem               216 drivers/remoteproc/da8xx_remoteproc.c 		if (IS_ERR(drproc->mem[i].cpu_addr)) {
mem               219 drivers/remoteproc/da8xx_remoteproc.c 			return PTR_ERR(drproc->mem[i].cpu_addr);
mem               221 drivers/remoteproc/da8xx_remoteproc.c 		drproc->mem[i].bus_addr = res->start;
mem               222 drivers/remoteproc/da8xx_remoteproc.c 		drproc->mem[i].dev_addr =
mem               224 drivers/remoteproc/da8xx_remoteproc.c 		drproc->mem[i].size = resource_size(res);
mem               227 drivers/remoteproc/da8xx_remoteproc.c 			mem_names[i], &drproc->mem[i].bus_addr,
mem               228 drivers/remoteproc/da8xx_remoteproc.c 			drproc->mem[i].size, drproc->mem[i].cpu_addr,
mem               229 drivers/remoteproc/da8xx_remoteproc.c 			drproc->mem[i].dev_addr);
mem                86 drivers/remoteproc/imx_rproc.c 	struct imx_rproc_mem		mem[IMX7D_RPROC_MEM_MAX];
mem               229 drivers/remoteproc/imx_rproc.c 		if (sys >= priv->mem[i].sys_addr && sys + len <
mem               230 drivers/remoteproc/imx_rproc.c 		    priv->mem[i].sys_addr +  priv->mem[i].size) {
mem               231 drivers/remoteproc/imx_rproc.c 			unsigned int offset = sys - priv->mem[i].sys_addr;
mem               233 drivers/remoteproc/imx_rproc.c 			va = (__force void *)(priv->mem[i].cpu_addr + offset);
mem               267 drivers/remoteproc/imx_rproc.c 		priv->mem[b].cpu_addr = devm_ioremap(&pdev->dev,
mem               269 drivers/remoteproc/imx_rproc.c 		if (!priv->mem[b].cpu_addr) {
mem               273 drivers/remoteproc/imx_rproc.c 		priv->mem[b].sys_addr = att->sa;
mem               274 drivers/remoteproc/imx_rproc.c 		priv->mem[b].size = att->size;
mem               298 drivers/remoteproc/imx_rproc.c 		priv->mem[b].cpu_addr = devm_ioremap_resource(&pdev->dev, &res);
mem               299 drivers/remoteproc/imx_rproc.c 		if (IS_ERR(priv->mem[b].cpu_addr)) {
mem               301 drivers/remoteproc/imx_rproc.c 			err = PTR_ERR(priv->mem[b].cpu_addr);
mem               304 drivers/remoteproc/imx_rproc.c 		priv->mem[b].sys_addr = res.start;
mem               305 drivers/remoteproc/imx_rproc.c 		priv->mem[b].size = resource_size(&res);
mem                58 drivers/remoteproc/keystone_remoteproc.c 	struct keystone_rproc_mem *mem;
mem               262 drivers/remoteproc/keystone_remoteproc.c 		bus_addr = ksproc->mem[i].bus_addr;
mem               263 drivers/remoteproc/keystone_remoteproc.c 		dev_addr = ksproc->mem[i].dev_addr;
mem               264 drivers/remoteproc/keystone_remoteproc.c 		size = ksproc->mem[i].size;
mem               271 drivers/remoteproc/keystone_remoteproc.c 				va = ksproc->mem[i].cpu_addr + offset;
mem               279 drivers/remoteproc/keystone_remoteproc.c 				va = ksproc->mem[i].cpu_addr + offset;
mem               305 drivers/remoteproc/keystone_remoteproc.c 	ksproc->mem = devm_kcalloc(ksproc->dev, num_mems,
mem               306 drivers/remoteproc/keystone_remoteproc.c 				   sizeof(*ksproc->mem), GFP_KERNEL);
mem               307 drivers/remoteproc/keystone_remoteproc.c 	if (!ksproc->mem)
mem               313 drivers/remoteproc/keystone_remoteproc.c 		ksproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res);
mem               314 drivers/remoteproc/keystone_remoteproc.c 		if (IS_ERR(ksproc->mem[i].cpu_addr)) {
mem               317 drivers/remoteproc/keystone_remoteproc.c 			return PTR_ERR(ksproc->mem[i].cpu_addr);
mem               319 drivers/remoteproc/keystone_remoteproc.c 		ksproc->mem[i].bus_addr = res->start;
mem               320 drivers/remoteproc/keystone_remoteproc.c 		ksproc->mem[i].dev_addr =
mem               322 drivers/remoteproc/keystone_remoteproc.c 		ksproc->mem[i].size = resource_size(res);
mem               325 drivers/remoteproc/keystone_remoteproc.c 		memset((__force void *)ksproc->mem[i].cpu_addr, 0,
mem               326 drivers/remoteproc/keystone_remoteproc.c 		       ksproc->mem[i].size);
mem                53 drivers/remoteproc/remoteproc_core.c 				struct rproc_mem_entry *mem);
mem                55 drivers/remoteproc/remoteproc_core.c 				  struct rproc_mem_entry *mem);
mem               248 drivers/remoteproc/remoteproc_core.c 	struct rproc_mem_entry *carveout, *mem = NULL;
mem               260 drivers/remoteproc/remoteproc_core.c 			mem = carveout;
mem               265 drivers/remoteproc/remoteproc_core.c 	return mem;
mem               283 drivers/remoteproc/remoteproc_core.c 				   struct rproc_mem_entry *mem, u32 da, u32 len)
mem               289 drivers/remoteproc/remoteproc_core.c 	if (len > mem->len) {
mem               294 drivers/remoteproc/remoteproc_core.c 	if (da != FW_RSC_ADDR_ANY && mem->da == FW_RSC_ADDR_ANY) {
mem               297 drivers/remoteproc/remoteproc_core.c 	} else if (da != FW_RSC_ADDR_ANY && mem->da != FW_RSC_ADDR_ANY) {
mem               298 drivers/remoteproc/remoteproc_core.c 		delta = da - mem->da;
mem               307 drivers/remoteproc/remoteproc_core.c 		if (delta + len > mem->len) {
mem               324 drivers/remoteproc/remoteproc_core.c 	struct rproc_mem_entry *mem;
mem               332 drivers/remoteproc/remoteproc_core.c 	mem = rproc_find_carveout_by_name(rproc, "vdev%dvring%d", rvdev->index,
mem               334 drivers/remoteproc/remoteproc_core.c 	if (mem) {
mem               335 drivers/remoteproc/remoteproc_core.c 		if (rproc_check_carveout_da(rproc, mem, rsc->vring[i].da, size))
mem               339 drivers/remoteproc/remoteproc_core.c 		mem = rproc_mem_entry_init(dev, 0, 0, size, rsc->vring[i].da,
mem               344 drivers/remoteproc/remoteproc_core.c 		if (!mem) {
mem               349 drivers/remoteproc/remoteproc_core.c 		rproc_add_carveout(rproc, mem);
mem               739 drivers/remoteproc/remoteproc_core.c 				struct rproc_mem_entry *mem)
mem               747 drivers/remoteproc/remoteproc_core.c 	va = dma_alloc_coherent(dev->parent, mem->len, &dma, GFP_KERNEL);
mem               750 drivers/remoteproc/remoteproc_core.c 			"failed to allocate dma memory: len 0x%x\n", mem->len);
mem               755 drivers/remoteproc/remoteproc_core.c 		va, &dma, mem->len);
mem               757 drivers/remoteproc/remoteproc_core.c 	if (mem->da != FW_RSC_ADDR_ANY && !rproc->domain) {
mem               764 drivers/remoteproc/remoteproc_core.c 		if (mem->da != (u32)dma)
mem               786 drivers/remoteproc/remoteproc_core.c 	if (mem->da != FW_RSC_ADDR_ANY && rproc->domain) {
mem               793 drivers/remoteproc/remoteproc_core.c 		ret = iommu_map(rproc->domain, mem->da, dma, mem->len,
mem               794 drivers/remoteproc/remoteproc_core.c 				mem->flags);
mem               807 drivers/remoteproc/remoteproc_core.c 		mapping->da = mem->da;
mem               808 drivers/remoteproc/remoteproc_core.c 		mapping->len = mem->len;
mem               812 drivers/remoteproc/remoteproc_core.c 			mem->da, &dma);
mem               815 drivers/remoteproc/remoteproc_core.c 	if (mem->da == FW_RSC_ADDR_ANY) {
mem               820 drivers/remoteproc/remoteproc_core.c 		mem->da = (u32)dma;
mem               823 drivers/remoteproc/remoteproc_core.c 	mem->dma = dma;
mem               824 drivers/remoteproc/remoteproc_core.c 	mem->va = va;
mem               831 drivers/remoteproc/remoteproc_core.c 	dma_free_coherent(dev->parent, mem->len, va, dma);
mem               844 drivers/remoteproc/remoteproc_core.c 				  struct rproc_mem_entry *mem)
mem               849 drivers/remoteproc/remoteproc_core.c 	dma_free_coherent(dev->parent, mem->len, mem->va, mem->dma);
mem               939 drivers/remoteproc/remoteproc_core.c void rproc_add_carveout(struct rproc *rproc, struct rproc_mem_entry *mem)
mem               941 drivers/remoteproc/remoteproc_core.c 	list_add_tail(&mem->node, &rproc->carveouts);
mem               966 drivers/remoteproc/remoteproc_core.c 	struct rproc_mem_entry *mem;
mem               969 drivers/remoteproc/remoteproc_core.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem               970 drivers/remoteproc/remoteproc_core.c 	if (!mem)
mem               971 drivers/remoteproc/remoteproc_core.c 		return mem;
mem               973 drivers/remoteproc/remoteproc_core.c 	mem->va = va;
mem               974 drivers/remoteproc/remoteproc_core.c 	mem->dma = dma;
mem               975 drivers/remoteproc/remoteproc_core.c 	mem->da = da;
mem               976 drivers/remoteproc/remoteproc_core.c 	mem->len = len;
mem               977 drivers/remoteproc/remoteproc_core.c 	mem->alloc = alloc;
mem               978 drivers/remoteproc/remoteproc_core.c 	mem->release = release;
mem               979 drivers/remoteproc/remoteproc_core.c 	mem->rsc_offset = FW_RSC_ADDR_ANY;
mem               980 drivers/remoteproc/remoteproc_core.c 	mem->of_resm_idx = -1;
mem               983 drivers/remoteproc/remoteproc_core.c 	vsnprintf(mem->name, sizeof(mem->name), name, args);
mem               986 drivers/remoteproc/remoteproc_core.c 	return mem;
mem              1006 drivers/remoteproc/remoteproc_core.c 	struct rproc_mem_entry *mem;
mem              1009 drivers/remoteproc/remoteproc_core.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem              1010 drivers/remoteproc/remoteproc_core.c 	if (!mem)
mem              1011 drivers/remoteproc/remoteproc_core.c 		return mem;
mem              1013 drivers/remoteproc/remoteproc_core.c 	mem->da = da;
mem              1014 drivers/remoteproc/remoteproc_core.c 	mem->len = len;
mem              1015 drivers/remoteproc/remoteproc_core.c 	mem->rsc_offset = FW_RSC_ADDR_ANY;
mem              1016 drivers/remoteproc/remoteproc_core.c 	mem->of_resm_idx = of_resm_idx;
mem              1019 drivers/remoteproc/remoteproc_core.c 	vsnprintf(mem->name, sizeof(mem->name), name, args);
mem              1022 drivers/remoteproc/remoteproc_core.c 	return mem;
mem                73 drivers/remoteproc/remoteproc_virtio.c 	struct rproc_mem_entry *mem;
mem                88 drivers/remoteproc/remoteproc_virtio.c 	mem = rproc_find_carveout_by_name(rproc, "vdev%dvring%d", rvdev->index,
mem                90 drivers/remoteproc/remoteproc_virtio.c 	if (!mem || !mem->va)
mem                94 drivers/remoteproc/remoteproc_virtio.c 	addr = mem->va;
mem               121 drivers/remoteproc/remoteproc_virtio.c 	rsc->vring[id].da = mem->da;
mem               334 drivers/remoteproc/remoteproc_virtio.c 	struct rproc_mem_entry *mem;
mem               345 drivers/remoteproc/remoteproc_virtio.c 	mem = rproc_find_carveout_by_name(rproc, "vdev%dbuffer", rvdev->index);
mem               346 drivers/remoteproc/remoteproc_virtio.c 	if (mem) {
mem               349 drivers/remoteproc/remoteproc_virtio.c 		if (mem->of_resm_idx != -1) {
mem               354 drivers/remoteproc/remoteproc_virtio.c 								 mem->of_resm_idx);
mem               360 drivers/remoteproc/remoteproc_virtio.c 			if (mem->va) {
mem               363 drivers/remoteproc/remoteproc_virtio.c 				pa = rproc_va_to_pa(mem->va);
mem               366 drivers/remoteproc/remoteproc_virtio.c 				pa = (phys_addr_t)mem->dma;
mem               371 drivers/remoteproc/remoteproc_virtio.c 							   mem->da,
mem               372 drivers/remoteproc/remoteproc_virtio.c 							   mem->len);
mem                93 drivers/remoteproc/st_remoteproc.c 			      struct rproc_mem_entry *mem)
mem                98 drivers/remoteproc/st_remoteproc.c 	va = ioremap_wc(mem->dma, mem->len);
mem               101 drivers/remoteproc/st_remoteproc.c 			&mem->dma, mem->len);
mem               106 drivers/remoteproc/st_remoteproc.c 	mem->va = va;
mem               112 drivers/remoteproc/st_remoteproc.c 				struct rproc_mem_entry *mem)
mem               114 drivers/remoteproc/st_remoteproc.c 	iounmap(mem->va);
mem               123 drivers/remoteproc/st_remoteproc.c 	struct rproc_mem_entry *mem;
mem               139 drivers/remoteproc/st_remoteproc.c 			mem = rproc_mem_entry_init(dev, NULL,
mem               147 drivers/remoteproc/st_remoteproc.c 			mem = rproc_of_resm_mem_entry_init(dev, index,
mem               153 drivers/remoteproc/st_remoteproc.c 		if (!mem)
mem               156 drivers/remoteproc/st_remoteproc.c 		rproc_add_carveout(rproc, mem);
mem               144 drivers/remoteproc/st_slim_rproc.c 	fw_rev = readl(slim_rproc->mem[ST_SLIM_DMEM].cpu_addr +
mem               184 drivers/remoteproc/st_slim_rproc.c 		if (da != slim_rproc->mem[i].bus_addr)
mem               187 drivers/remoteproc/st_slim_rproc.c 		if (len <= slim_rproc->mem[i].size) {
mem               189 drivers/remoteproc/st_slim_rproc.c 			va = (__force void *)slim_rproc->mem[i].cpu_addr;
mem               253 drivers/remoteproc/st_slim_rproc.c 		slim_rproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res);
mem               254 drivers/remoteproc/st_slim_rproc.c 		if (IS_ERR(slim_rproc->mem[i].cpu_addr)) {
mem               256 drivers/remoteproc/st_slim_rproc.c 			err = PTR_ERR(slim_rproc->mem[i].cpu_addr);
mem               259 drivers/remoteproc/st_slim_rproc.c 		slim_rproc->mem[i].bus_addr = res->start;
mem               260 drivers/remoteproc/st_slim_rproc.c 		slim_rproc->mem[i].size = resource_size(res);
mem                95 drivers/remoteproc/stm32_rproc.c 				 struct rproc_mem_entry *mem)
mem               100 drivers/remoteproc/stm32_rproc.c 	dev_dbg(dev, "map memory: %pa+%x\n", &mem->dma, mem->len);
mem               101 drivers/remoteproc/stm32_rproc.c 	va = ioremap_wc(mem->dma, mem->len);
mem               104 drivers/remoteproc/stm32_rproc.c 			&mem->dma, mem->len);
mem               109 drivers/remoteproc/stm32_rproc.c 	mem->va = va;
mem               115 drivers/remoteproc/stm32_rproc.c 				   struct rproc_mem_entry *mem)
mem               117 drivers/remoteproc/stm32_rproc.c 	dev_dbg(rproc->dev.parent, "unmap memory: %pa\n", &mem->dma);
mem               118 drivers/remoteproc/stm32_rproc.c 	iounmap(mem->va);
mem               204 drivers/remoteproc/stm32_rproc.c 	struct rproc_mem_entry *mem;
mem               227 drivers/remoteproc/stm32_rproc.c 			mem = rproc_mem_entry_init(dev, NULL,
mem               234 drivers/remoteproc/stm32_rproc.c 			if (mem)
mem               239 drivers/remoteproc/stm32_rproc.c 			mem = rproc_of_resm_mem_entry_init(dev, index,
mem               245 drivers/remoteproc/stm32_rproc.c 		if (!mem)
mem               248 drivers/remoteproc/stm32_rproc.c 		rproc_add_carveout(rproc, mem);
mem                50 drivers/remoteproc/wkup_m3_rproc.c 	struct wkup_m3_mem mem[WKUPM3_MEM_MAX];
mem                94 drivers/remoteproc/wkup_m3_rproc.c 		if (da >= wkupm3->mem[i].dev_addr && da + len <=
mem                95 drivers/remoteproc/wkup_m3_rproc.c 		    wkupm3->mem[i].dev_addr +  wkupm3->mem[i].size) {
mem                96 drivers/remoteproc/wkup_m3_rproc.c 			offset = da -  wkupm3->mem[i].dev_addr;
mem                98 drivers/remoteproc/wkup_m3_rproc.c 			va = (__force void *)(wkupm3->mem[i].cpu_addr + offset);
mem               171 drivers/remoteproc/wkup_m3_rproc.c 		wkupm3->mem[i].cpu_addr = devm_ioremap_resource(dev, res);
mem               172 drivers/remoteproc/wkup_m3_rproc.c 		if (IS_ERR(wkupm3->mem[i].cpu_addr)) {
mem               175 drivers/remoteproc/wkup_m3_rproc.c 			ret = PTR_ERR(wkupm3->mem[i].cpu_addr);
mem               178 drivers/remoteproc/wkup_m3_rproc.c 		wkupm3->mem[i].bus_addr = res->start;
mem               179 drivers/remoteproc/wkup_m3_rproc.c 		wkupm3->mem[i].size = resource_size(res);
mem               189 drivers/remoteproc/wkup_m3_rproc.c 		wkupm3->mem[i].dev_addr = be32_to_cpu(*addrp) - l4_offset;
mem                47 drivers/reset/reset-axs10x.c 	struct resource *mem;
mem                53 drivers/reset/reset-axs10x.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem                54 drivers/reset/reset-axs10x.c 	rst->regs_rst = devm_ioremap_resource(&pdev->dev, mem);
mem                95 drivers/reset/reset-hsdk.c 	struct resource *mem;
mem               101 drivers/reset/reset-hsdk.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               102 drivers/reset/reset-hsdk.c 	rst->regs_ctl = devm_ioremap_resource(&pdev->dev, mem);
mem               106 drivers/reset/reset-hsdk.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               107 drivers/reset/reset-hsdk.c 	rst->regs_rst = devm_ioremap_resource(&pdev->dev, mem);
mem               310 drivers/rtc/rtc-jz4740.c 	struct resource *mem;
mem               329 drivers/rtc/rtc-jz4740.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               330 drivers/rtc/rtc-jz4740.c 	rtc->base = devm_ioremap_resource(&pdev->dev, mem);
mem               132 drivers/rtc/rtc-pcf8583.c static int pcf8583_read_mem(struct i2c_client *client, struct rtc_mem *mem)
mem               144 drivers/rtc/rtc-pcf8583.c 			.len = mem->nr,
mem               145 drivers/rtc/rtc-pcf8583.c 			.buf = mem->data,
mem               149 drivers/rtc/rtc-pcf8583.c 	if (mem->loc < 8)
mem               152 drivers/rtc/rtc-pcf8583.c 	addr[0] = mem->loc;
mem               157 drivers/rtc/rtc-pcf8583.c static int pcf8583_write_mem(struct i2c_client *client, struct rtc_mem *mem)
mem               162 drivers/rtc/rtc-pcf8583.c 	if (mem->loc < 8 || mem->nr > 8)
mem               165 drivers/rtc/rtc-pcf8583.c 	buf[0] = mem->loc;
mem               166 drivers/rtc/rtc-pcf8583.c 	memcpy(buf + 1, mem->data, mem->nr);
mem               168 drivers/rtc/rtc-pcf8583.c 	ret = i2c_master_send(client, buf, mem->nr + 1);
mem               169 drivers/rtc/rtc-pcf8583.c 	return ret == mem->nr + 1 ? 0 : -EIO;
mem               176 drivers/rtc/rtc-pcf8583.c 	struct rtc_mem mem = {
mem               199 drivers/rtc/rtc-pcf8583.c 	    pcf8583_read_mem(client, &mem))
mem               648 drivers/s390/block/dasd_int.h dasd_init_chunklist(struct list_head *chunk_list, void *mem,
mem               654 drivers/s390/block/dasd_int.h 	chunk = (struct dasd_mchunk *) mem;
mem               682 drivers/s390/block/dasd_int.h dasd_free_chunk(struct list_head *chunk_list, void *mem)
mem               688 drivers/s390/block/dasd_int.h 		((char *) mem - sizeof(struct dasd_mchunk));
mem               277 drivers/s390/char/raw3270.h add_string_memory(struct list_head *free_list, void *mem, unsigned long size)
mem               281 drivers/s390/char/raw3270.h 	cs = (struct string *) mem;
mem               275 drivers/s390/char/sclp_early_core.c int __init sclp_early_get_memsize(unsigned long *mem)
mem               287 drivers/s390/char/sclp_early_core.c 	*mem = rnsize * rnmax;
mem               377 drivers/s390/cio/cmf.c 	struct cmb *mem;
mem               454 drivers/s390/cio/cmf.c 	cmb = cmb_area.mem;
mem               462 drivers/s390/cio/cmf.c 	if (cmb - cmb_area.mem >= cmb_area.num_channels) {
mem               480 drivers/s390/cio/cmf.c 	struct cmb *mem;
mem               497 drivers/s390/cio/cmf.c 	if (!cmb_area.mem) {
mem               503 drivers/s390/cio/cmf.c 		mem = (void*)__get_free_pages(GFP_KERNEL | GFP_DMA,
mem               507 drivers/s390/cio/cmf.c 		if (cmb_area.mem) {
mem               509 drivers/s390/cio/cmf.c 			free_pages((unsigned long)mem, get_order(size));
mem               510 drivers/s390/cio/cmf.c 		} else if (!mem) {
mem               516 drivers/s390/cio/cmf.c 			memset(mem, 0, size);
mem               517 drivers/s390/cio/cmf.c 			cmb_area.mem = mem;
mem               518 drivers/s390/cio/cmf.c 			cmf_activate(cmb_area.mem, CMF_ON);
mem               553 drivers/s390/cio/cmf.c 		free_pages((unsigned long)cmb_area.mem, get_order(size));
mem               554 drivers/s390/cio/cmf.c 		cmb_area.mem = NULL;
mem               572 drivers/s390/cio/cmf.c 	offset = mme ? (struct cmb *)cmb_data->hw_block - cmb_area.mem : 0;
mem              1250 drivers/s390/cio/cmf.c 		cmf_activate(cmb_area.mem, CMF_ON);
mem               226 drivers/s390/crypto/zcrypt_ccamisc.c static void free_cprbmem(void *mem, size_t paramblen, int scrub)
mem               229 drivers/s390/crypto/zcrypt_ccamisc.c 		memzero_explicit(mem, 2 * (sizeof(struct CPRBX) + paramblen));
mem               230 drivers/s390/crypto/zcrypt_ccamisc.c 	kfree(mem);
mem               277 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem               315 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem               406 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 0);
mem               418 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem               455 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem               544 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 1);
mem               557 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem               600 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem               688 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 0);
mem               716 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem               789 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem               901 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 0);
mem               919 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem               967 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem              1056 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 0);
mem              1153 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem;
mem              1202 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(PARMBSIZE, &mem, &preqcblk, &prepcblk);
mem              1306 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, PARMBSIZE, 0);
mem              1321 drivers/s390/crypto/zcrypt_ccamisc.c 	u8 *mem, *ptr;
mem              1341 drivers/s390/crypto/zcrypt_ccamisc.c 	rc = alloc_and_prep_cprbmem(parmbsize, &mem, &preqcblk, &prepcblk);
mem              1408 drivers/s390/crypto/zcrypt_ccamisc.c 	free_cprbmem(mem, parmbsize, 0);
mem               259 drivers/scsi/aacraid/comminit.c static void aac_queue_init(struct aac_dev * dev, struct aac_queue * q, u32 *mem, int qsize)
mem               268 drivers/scsi/aacraid/comminit.c 	q->headers.producer = (__le32 *)mem;
mem               269 drivers/scsi/aacraid/comminit.c 	q->headers.consumer = (__le32 *)(mem+1);
mem               695 drivers/scsi/be2iscsi/be_cmds.c 							struct be_dma_mem *mem)
mem               698 drivers/scsi/be2iscsi/be_cmds.c 	u64 dma = (u64) mem->dma;
mem               700 drivers/scsi/be2iscsi/be_cmds.c 	buf_pages = min(PAGES_4K_SPANNED(mem->va, mem->size), max_pages);
mem              2981 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
mem              2986 drivers/scsi/be2iscsi/be_main.c 	mem->size = len * entry_size;
mem              2987 drivers/scsi/be2iscsi/be_main.c 	mem->va = vaddress;
mem              2988 drivers/scsi/be2iscsi/be_main.c 	if (!mem->va)
mem              2990 drivers/scsi/be2iscsi/be_main.c 	memset(mem->va, 0, mem->size);
mem              3000 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem;
mem              3013 drivers/scsi/be2iscsi/be_main.c 		mem = &eq->dma_mem;
mem              3023 drivers/scsi/be2iscsi/be_main.c 		mem->va = eq_vaddress;
mem              3032 drivers/scsi/be2iscsi/be_main.c 		mem->dma = paddr;
mem              3051 drivers/scsi/be2iscsi/be_main.c 		mem = &eq->dma_mem;
mem              3052 drivers/scsi/be2iscsi/be_main.c 		if (mem->va)
mem              3055 drivers/scsi/be2iscsi/be_main.c 					    mem->va, mem->dma);
mem              3065 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem;
mem              3080 drivers/scsi/be2iscsi/be_main.c 		mem = &cq->dma_mem;
mem              3098 drivers/scsi/be2iscsi/be_main.c 		mem->dma = paddr;
mem              3116 drivers/scsi/be2iscsi/be_main.c 		mem = &cq->dma_mem;
mem              3117 drivers/scsi/be2iscsi/be_main.c 		if (mem->va)
mem              3120 drivers/scsi/be2iscsi/be_main.c 					    mem->va, mem->dma);
mem              3134 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem;
mem              3141 drivers/scsi/be2iscsi/be_main.c 	mem = &dq->dma_mem;
mem              3156 drivers/scsi/be2iscsi/be_main.c 	mem->dma = (unsigned long)mem_descr->mem_array[idx].
mem              3186 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem;
mem              3193 drivers/scsi/be2iscsi/be_main.c 	mem = &dataq->dma_mem;
mem              3209 drivers/scsi/be2iscsi/be_main.c 	mem->dma = (unsigned long)mem_descr->mem_array[idx].
mem              3308 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
mem              3309 drivers/scsi/be2iscsi/be_main.c 	if (mem->va) {
mem              3310 drivers/scsi/be2iscsi/be_main.c 		dma_free_coherent(&phba->pcidev->dev, mem->size,
mem              3311 drivers/scsi/be2iscsi/be_main.c 			mem->va, mem->dma);
mem              3312 drivers/scsi/be2iscsi/be_main.c 		mem->va = NULL;
mem              3319 drivers/scsi/be2iscsi/be_main.c 	struct be_dma_mem *mem = &q->dma_mem;
mem              3324 drivers/scsi/be2iscsi/be_main.c 	mem->size = len * entry_size;
mem              3325 drivers/scsi/be2iscsi/be_main.c 	mem->va = dma_alloc_coherent(&phba->pcidev->dev, mem->size, &mem->dma,
mem              3327 drivers/scsi/be2iscsi/be_main.c 	if (!mem->va)
mem                68 drivers/scsi/csiostor/csio_init.c 	unsigned int mem = (uintptr_t)file->private_data & 3;
mem                69 drivers/scsi/csiostor/csio_init.c 	struct csio_hw *hw = file->private_data - mem;
mem                83 drivers/scsi/csiostor/csio_init.c 		if (mem == MEM_MC)
mem                87 drivers/scsi/csiostor/csio_init.c 			ret = hw->chip_ops->chip_edc_read(hw, mem, pos,
mem               203 drivers/scsi/mpt3sas/mpt3sas_config.c 	struct config_request *mem)
mem               207 drivers/scsi/mpt3sas/mpt3sas_config.c 	if (mem->sz > ioc->config_page_sz) {
mem               208 drivers/scsi/mpt3sas/mpt3sas_config.c 		mem->page = dma_alloc_coherent(&ioc->pdev->dev, mem->sz,
mem               209 drivers/scsi/mpt3sas/mpt3sas_config.c 		    &mem->page_dma, GFP_KERNEL);
mem               210 drivers/scsi/mpt3sas/mpt3sas_config.c 		if (!mem->page) {
mem               212 drivers/scsi/mpt3sas/mpt3sas_config.c 				__func__, mem->sz);
mem               216 drivers/scsi/mpt3sas/mpt3sas_config.c 		mem->page = ioc->config_page;
mem               217 drivers/scsi/mpt3sas/mpt3sas_config.c 		mem->page_dma = ioc->config_page_dma;
mem               219 drivers/scsi/mpt3sas/mpt3sas_config.c 	ioc->config_vaddr = mem->page;
mem               234 drivers/scsi/mpt3sas/mpt3sas_config.c 	struct config_request *mem)
mem               236 drivers/scsi/mpt3sas/mpt3sas_config.c 	if (mem->sz > ioc->config_page_sz)
mem               237 drivers/scsi/mpt3sas/mpt3sas_config.c 		dma_free_coherent(&ioc->pdev->dev, mem->sz, mem->page,
mem               238 drivers/scsi/mpt3sas/mpt3sas_config.c 		    mem->page_dma);
mem               306 drivers/scsi/mpt3sas/mpt3sas_config.c 	struct config_request mem;
mem               317 drivers/scsi/mpt3sas/mpt3sas_config.c 	memset(&mem, 0, sizeof(struct config_request));
mem               330 drivers/scsi/mpt3sas/mpt3sas_config.c 			mem.sz = mpi_request->Header.PageLength * 4;
mem               332 drivers/scsi/mpt3sas/mpt3sas_config.c 			mem.sz = le16_to_cpu(mpi_reply->ExtPageLength) * 4;
mem               333 drivers/scsi/mpt3sas/mpt3sas_config.c 		r = _config_alloc_config_dma_memory(ioc, &mem);
mem               341 drivers/scsi/mpt3sas/mpt3sas_config.c 			    MPT3_CONFIG_COMMON_WRITE_SGLFLAGS | mem.sz,
mem               342 drivers/scsi/mpt3sas/mpt3sas_config.c 			    mem.page_dma);
mem               343 drivers/scsi/mpt3sas/mpt3sas_config.c 			memcpy(mem.page, config_page, min_t(u16, mem.sz,
mem               348 drivers/scsi/mpt3sas/mpt3sas_config.c 			    MPT3_CONFIG_COMMON_SGLFLAGS | mem.sz, mem.page_dma);
mem               349 drivers/scsi/mpt3sas/mpt3sas_config.c 			memset(mem.page, 0, min_t(u16, mem.sz, config_page_sz));
mem               436 drivers/scsi/mpt3sas/mpt3sas_config.c 		u8 *p = (u8 *)mem.page;
mem               444 drivers/scsi/mpt3sas/mpt3sas_config.c 				_debug_dump_config(p, min_t(u16, mem.sz,
mem               457 drivers/scsi/mpt3sas/mpt3sas_config.c 				_debug_dump_config(p, min_t(u16, mem.sz,
mem               464 drivers/scsi/mpt3sas/mpt3sas_config.c 		memcpy(config_page, mem.page, min_t(u16, mem.sz,
mem               470 drivers/scsi/mpt3sas/mpt3sas_config.c 		_config_free_config_dma_memory(ioc, &mem);
mem               308 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[0].addr = (unsigned long)udev->uctrl;
mem               309 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[0].size = sizeof(struct qedi_uio_ctrl);
mem               310 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[0].memtype = UIO_MEM_LOGICAL;
mem               312 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[1].addr = (unsigned long)udev->ll2_ring;
mem               313 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[1].size = udev->ll2_ring_size;
mem               314 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[1].memtype = UIO_MEM_LOGICAL;
mem               316 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[2].addr = (unsigned long)udev->ll2_buf;
mem               317 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[2].size = udev->ll2_buf_size;
mem               318 drivers/scsi/qedi/qedi_main.c 	uinfo->mem[2].memtype = UIO_MEM_LOGICAL;
mem              1165 drivers/scsi/qla2xxx/qla_bsg.c 				ql84_mgmt->mgmt.mgmtp.u.mem.start_addr);
mem              1216 drivers/scsi/qla2xxx/qla_bsg.c 			cpu_to_le32(ql84_mgmt->mgmt.mgmtp.u.mem.start_addr);
mem                84 drivers/scsi/qla2xxx/qla_bsg.h 		} mem; /* for QLA84_MGMT_READ/WRITE_MEM */
mem                35 drivers/scsi/qla2xxx/qla_tmpl.c qla27xx_insertbuf(void *mem, ulong size, void *buf, ulong *len)
mem                37 drivers/scsi/qla2xxx/qla_tmpl.c 	if (buf && mem && size) {
mem                39 drivers/scsi/qla2xxx/qla_tmpl.c 		memcpy(buf, mem, size);
mem               515 drivers/scsi/sun3_scsi.c 	struct resource *irq, *mem;
mem               537 drivers/scsi/sun3_scsi.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, i);
mem               538 drivers/scsi/sun3_scsi.c 		if (!irq || !mem)
mem               541 drivers/scsi/sun3_scsi.c 		ioaddr = sun3_ioremap(mem->start, resource_size(mem),
mem               564 drivers/scsi/sun3_scsi.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               565 drivers/scsi/sun3_scsi.c 	if (!irq || !mem)
mem               568 drivers/scsi/sun3_scsi.c 	ioaddr = ioremap(mem->start, resource_size(mem));
mem               589 drivers/scsi/sun3_scsi.c 	hostdata->base = mem->start;
mem               179 drivers/soundwire/intel.c static ssize_t intel_sprintf(void __iomem *mem, bool l,
mem               185 drivers/soundwire/intel.c 		value = intel_readl(mem, reg);
mem               187 drivers/soundwire/intel.c 		value = intel_readw(mem, reg);
mem               147 drivers/spi/atmel-quadspi.c 	void __iomem		*mem;
mem               202 drivers/spi/atmel-quadspi.c static bool atmel_qspi_supports_op(struct spi_mem *mem,
mem               327 drivers/spi/atmel-quadspi.c static int atmel_qspi_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               329 drivers/spi/atmel-quadspi.c 	struct atmel_qspi *aq = spi_controller_get_devdata(mem->spi->master);
mem               352 drivers/spi/atmel-quadspi.c 			_memcpy_fromio(op->data.buf.in, aq->mem + offset,
mem               355 drivers/spi/atmel-quadspi.c 			_memcpy_toio(aq->mem + offset, op->data.buf.out,
mem               485 drivers/spi/atmel-quadspi.c 	aq->mem = devm_ioremap_resource(&pdev->dev, res);
mem               486 drivers/spi/atmel-quadspi.c 	if (IS_ERR(aq->mem)) {
mem               488 drivers/spi/atmel-quadspi.c 		err = PTR_ERR(aq->mem);
mem               945 drivers/spi/spi-bcm-qspi.c static int bcm_qspi_exec_mem_op(struct spi_mem *mem,
mem               948 drivers/spi/spi-bcm-qspi.c 	struct spi_device *spi = mem->spi;
mem               661 drivers/spi/spi-fsl-espi.c static int fsl_espi_probe(struct device *dev, struct resource *mem,
mem               701 drivers/spi/spi-fsl-espi.c 	espi->reg_base = devm_ioremap_resource(dev, mem);
mem               759 drivers/spi/spi-fsl-espi.c 	struct resource mem;
mem               772 drivers/spi/spi-fsl-espi.c 	ret = of_address_to_resource(np, 0, &mem);
mem               780 drivers/spi/spi-fsl-espi.c 	return fsl_espi_probe(dev, &mem, irq, num_cs);
mem                80 drivers/spi/spi-fsl-lib.c void mpc8xxx_spi_probe(struct device *dev, struct resource *mem,
mem               109 drivers/spi/spi-fsl-lib.h extern void mpc8xxx_spi_probe(struct device *dev, struct resource *mem,
mem               356 drivers/spi/spi-fsl-qspi.c static bool fsl_qspi_supports_op(struct spi_mem *mem,
mem               359 drivers/spi/spi-fsl-qspi.c 	struct fsl_qspi *q = spi_controller_get_devdata(mem->spi->master);
mem               401 drivers/spi/spi-fsl-qspi.c 	return spi_mem_default_supports_op(mem, op);
mem               630 drivers/spi/spi-fsl-qspi.c static int fsl_qspi_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               632 drivers/spi/spi-fsl-qspi.c 	struct fsl_qspi *q = spi_controller_get_devdata(mem->spi->master);
mem               643 drivers/spi/spi-fsl-qspi.c 	fsl_qspi_select_mem(q, mem->spi);
mem               687 drivers/spi/spi-fsl-qspi.c static int fsl_qspi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
mem               689 drivers/spi/spi-fsl-qspi.c 	struct fsl_qspi *q = spi_controller_get_devdata(mem->spi->master);
mem               793 drivers/spi/spi-fsl-qspi.c static const char *fsl_qspi_get_name(struct spi_mem *mem)
mem               795 drivers/spi/spi-fsl-qspi.c 	struct fsl_qspi *q = spi_controller_get_devdata(mem->spi->master);
mem               796 drivers/spi/spi-fsl-qspi.c 	struct device *dev = &mem->spi->dev;
mem               809 drivers/spi/spi-fsl-qspi.c 			      mem->spi->chip_select);
mem               593 drivers/spi/spi-fsl-spi.c 		struct resource *mem, unsigned int irq)
mem               610 drivers/spi/spi-fsl-spi.c 	mpc8xxx_spi_probe(dev, mem, irq);
mem               625 drivers/spi/spi-fsl-spi.c 	mpc8xxx_spi->reg_base = devm_ioremap_resource(dev, mem);
mem               709 drivers/spi/spi-fsl-spi.c 	struct resource mem;
mem               746 drivers/spi/spi-fsl-spi.c 	ret = of_address_to_resource(np, 0, &mem);
mem               756 drivers/spi/spi-fsl-spi.c 	master = fsl_spi_probe(dev, &mem, irq);
mem               796 drivers/spi/spi-fsl-spi.c 	struct resource *mem;
mem               803 drivers/spi/spi-fsl-spi.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               804 drivers/spi/spi-fsl-spi.c 	if (!mem)
mem               811 drivers/spi/spi-fsl-spi.c 	master = fsl_spi_probe(&pdev->dev, mem, irq);
mem               102 drivers/spi/spi-mem.c static int spi_check_buswidth_req(struct spi_mem *mem, u8 buswidth, bool tx)
mem               104 drivers/spi/spi-mem.c 	u32 mode = mem->spi->mode;
mem               138 drivers/spi/spi-mem.c bool spi_mem_default_supports_op(struct spi_mem *mem,
mem               141 drivers/spi/spi-mem.c 	if (spi_check_buswidth_req(mem, op->cmd.buswidth, true))
mem               145 drivers/spi/spi-mem.c 	    spi_check_buswidth_req(mem, op->addr.buswidth, true))
mem               149 drivers/spi/spi-mem.c 	    spi_check_buswidth_req(mem, op->dummy.buswidth, true))
mem               153 drivers/spi/spi-mem.c 	    spi_check_buswidth_req(mem, op->data.buswidth,
mem               188 drivers/spi/spi-mem.c static bool spi_mem_internal_supports_op(struct spi_mem *mem,
mem               191 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               194 drivers/spi/spi-mem.c 		return ctlr->mem_ops->supports_op(mem, op);
mem               196 drivers/spi/spi-mem.c 	return spi_mem_default_supports_op(mem, op);
mem               214 drivers/spi/spi-mem.c bool spi_mem_supports_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               219 drivers/spi/spi-mem.c 	return spi_mem_internal_supports_op(mem, op);
mem               223 drivers/spi/spi-mem.c static int spi_mem_access_start(struct spi_mem *mem)
mem               225 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               250 drivers/spi/spi-mem.c static void spi_mem_access_end(struct spi_mem *mem)
mem               252 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               273 drivers/spi/spi-mem.c int spi_mem_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               276 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               286 drivers/spi/spi-mem.c 	if (!spi_mem_internal_supports_op(mem, op))
mem               290 drivers/spi/spi-mem.c 		ret = spi_mem_access_start(mem);
mem               294 drivers/spi/spi-mem.c 		ret = ctlr->mem_ops->exec_op(mem, op);
mem               296 drivers/spi/spi-mem.c 		spi_mem_access_end(mem);
mem               369 drivers/spi/spi-mem.c 	ret = spi_sync(mem->spi, &msg);
mem               395 drivers/spi/spi-mem.c const char *spi_mem_get_name(struct spi_mem *mem)
mem               397 drivers/spi/spi-mem.c 	return mem->name;
mem               416 drivers/spi/spi-mem.c int spi_mem_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
mem               418 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               424 drivers/spi/spi-mem.c 		return ctlr->mem_ops->adjust_op_size(mem, op);
mem               427 drivers/spi/spi-mem.c 		if (len > spi_max_transfer_size(mem->spi))
mem               431 drivers/spi/spi-mem.c 				       spi_max_transfer_size(mem->spi),
mem               432 drivers/spi/spi-mem.c 				       spi_max_message_size(mem->spi) -
mem               451 drivers/spi/spi-mem.c 	ret = spi_mem_adjust_op_size(desc->mem, &op);
mem               455 drivers/spi/spi-mem.c 	ret = spi_mem_exec_op(desc->mem, &op);
mem               471 drivers/spi/spi-mem.c 	ret = spi_mem_adjust_op_size(desc->mem, &op);
mem               475 drivers/spi/spi-mem.c 	ret = spi_mem_exec_op(desc->mem, &op);
mem               496 drivers/spi/spi-mem.c spi_mem_dirmap_create(struct spi_mem *mem,
mem               499 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = mem->spi->controller;
mem               515 drivers/spi/spi-mem.c 	desc->mem = mem;
mem               522 drivers/spi/spi-mem.c 		if (!spi_mem_supports_op(desc->mem, &desc->info.op_tmpl))
mem               546 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = desc->mem->spi->controller;
mem               575 drivers/spi/spi-mem.c devm_spi_mem_dirmap_create(struct device *dev, struct spi_mem *mem,
mem               585 drivers/spi/spi-mem.c 	desc = spi_mem_dirmap_create(mem, info);
mem               643 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = desc->mem->spi->controller;
mem               655 drivers/spi/spi-mem.c 		ret = spi_mem_access_start(desc->mem);
mem               661 drivers/spi/spi-mem.c 		spi_mem_access_end(desc->mem);
mem               689 drivers/spi/spi-mem.c 	struct spi_controller *ctlr = desc->mem->spi->controller;
mem               701 drivers/spi/spi-mem.c 		ret = spi_mem_access_start(desc->mem);
mem               707 drivers/spi/spi-mem.c 		spi_mem_access_end(desc->mem);
mem               725 drivers/spi/spi-mem.c 	struct spi_mem *mem;
mem               727 drivers/spi/spi-mem.c 	mem = devm_kzalloc(&spi->dev, sizeof(*mem), GFP_KERNEL);
mem               728 drivers/spi/spi-mem.c 	if (!mem)
mem               731 drivers/spi/spi-mem.c 	mem->spi = spi;
mem               734 drivers/spi/spi-mem.c 		mem->name = ctlr->mem_ops->get_name(mem);
mem               736 drivers/spi/spi-mem.c 		mem->name = dev_name(&spi->dev);
mem               738 drivers/spi/spi-mem.c 	if (IS_ERR_OR_NULL(mem->name))
mem               739 drivers/spi/spi-mem.c 		return PTR_ERR(mem->name);
mem               741 drivers/spi/spi-mem.c 	spi_set_drvdata(spi, mem);
mem               743 drivers/spi/spi-mem.c 	return memdrv->probe(mem);
mem               749 drivers/spi/spi-mem.c 	struct spi_mem *mem = spi_get_drvdata(spi);
mem               752 drivers/spi/spi-mem.c 		return memdrv->remove(mem);
mem               760 drivers/spi/spi-mem.c 	struct spi_mem *mem = spi_get_drvdata(spi);
mem               763 drivers/spi/spi-mem.c 		memdrv->shutdown(mem);
mem               335 drivers/spi/spi-mxic.c static bool mxic_spi_mem_supports_op(struct spi_mem *mem,
mem               352 drivers/spi/spi-mxic.c static int mxic_spi_mem_exec_op(struct spi_mem *mem,
mem               355 drivers/spi/spi-mxic.c 	struct mxic_spi *mxic = spi_master_get_devdata(mem->spi->master);
mem               360 drivers/spi/spi-mxic.c 	ret = mxic_spi_set_freq(mxic, mem->spi->max_speed_hz);
mem               364 drivers/spi/spi-mxic.c 	if (mem->spi->mode & (SPI_TX_QUAD | SPI_RX_QUAD))
mem               366 drivers/spi/spi-mxic.c 	else if (mem->spi->mode & (SPI_TX_DUAL | SPI_RX_DUAL))
mem               370 drivers/spi/spi-mxic.c 	       HC_CFG_TYPE(mem->spi->chip_select, HC_CFG_TYPE_SPI_NOR) |
mem               371 drivers/spi/spi-mxic.c 	       HC_CFG_SLV_ACT(mem->spi->chip_select) | HC_CFG_IDLE_SIO_LVL(1) |
mem               391 drivers/spi/spi-mxic.c 	writel(ss_ctrl, mxic->regs + SS_CTRL(mem->spi->chip_select));
mem               271 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(desc->mem->spi->master);
mem               272 drivers/spi/spi-npcm-fiu.c 	struct npcm_fiu_chip *chip = &fiu->chip[desc->mem->spi->chip_select];
mem               298 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(desc->mem->spi->master);
mem               299 drivers/spi/spi-npcm-fiu.c 	struct npcm_fiu_chip *chip = &fiu->chip[desc->mem->spi->chip_select];
mem               314 drivers/spi/spi-npcm-fiu.c static int npcm_fiu_uma_read(struct spi_mem *mem,
mem               319 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(mem->spi->master);
mem               328 drivers/spi/spi-npcm-fiu.c 			   (mem->spi->chip_select <<
mem               369 drivers/spi/spi-npcm-fiu.c static int npcm_fiu_uma_write(struct spi_mem *mem,
mem               374 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(mem->spi->master);
mem               382 drivers/spi/spi-npcm-fiu.c 			   (mem->spi->chip_select <<
mem               419 drivers/spi/spi-npcm-fiu.c static int npcm_fiu_manualwrite(struct spi_mem *mem,
mem               423 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(mem->spi->master);
mem               435 drivers/spi/spi-npcm-fiu.c 			   (mem->spi->chip_select <<
mem               440 drivers/spi/spi-npcm-fiu.c 	ret = npcm_fiu_uma_write(mem, op, op->cmd.opcode, true, NULL, 0);
mem               446 drivers/spi/spi-npcm-fiu.c 		ret = npcm_fiu_uma_write(mem, op, data[0], false,
mem               456 drivers/spi/spi-npcm-fiu.c 		ret = npcm_fiu_uma_write(mem, op, data[0], false,
mem               468 drivers/spi/spi-npcm-fiu.c static int npcm_fiu_read(struct spi_mem *mem, const struct spi_mem_op *op)
mem               487 drivers/spi/spi-npcm-fiu.c 		ret = npcm_fiu_uma_read(mem, op, addr, true, buf_ptr,
mem               525 drivers/spi/spi-npcm-fiu.c static int npcm_fiu_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               528 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(mem->spi->master);
mem               529 drivers/spi/spi-npcm-fiu.c 	struct npcm_fiu_chip *chip = &fiu->chip[mem->spi->chip_select];
mem               553 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_uma_read(mem, op, op->addr.val, false,
mem               556 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_read(mem, op);
mem               560 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_uma_write(mem, op, op->cmd.opcode, false,
mem               571 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_uma_write(mem, op, op->cmd.opcode, false,
mem               575 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_uma_write(mem, op, op->cmd.opcode, false,
mem               579 drivers/spi/spi-npcm-fiu.c 			ret = npcm_fiu_manualwrite(mem, op);
mem               588 drivers/spi/spi-npcm-fiu.c 		spi_controller_get_devdata(desc->mem->spi->master);
mem               589 drivers/spi/spi-npcm-fiu.c 	struct npcm_fiu_chip *chip = &fiu->chip[desc->mem->spi->chip_select];
mem               608 drivers/spi/spi-npcm-fiu.c 						    desc->mem->spi->chip_select)),
mem               392 drivers/spi/spi-nxp-fspi.c static bool nxp_fspi_supports_op(struct spi_mem *mem,
mem               395 drivers/spi/spi-nxp-fspi.c 	struct nxp_fspi *f = spi_controller_get_devdata(mem->spi->master);
mem               442 drivers/spi/spi-nxp-fspi.c 	return spi_mem_default_supports_op(mem, op);
mem               787 drivers/spi/spi-nxp-fspi.c static int nxp_fspi_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               789 drivers/spi/spi-nxp-fspi.c 	struct nxp_fspi *f = spi_controller_get_devdata(mem->spi->master);
mem               799 drivers/spi/spi-nxp-fspi.c 	nxp_fspi_select_mem(f, mem->spi);
mem               825 drivers/spi/spi-nxp-fspi.c static int nxp_fspi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
mem               827 drivers/spi/spi-nxp-fspi.c 	struct nxp_fspi *f = spi_controller_get_devdata(mem->spi->master);
mem               914 drivers/spi/spi-nxp-fspi.c static const char *nxp_fspi_get_name(struct spi_mem *mem)
mem               916 drivers/spi/spi-nxp-fspi.c 	struct nxp_fspi *f = spi_controller_get_devdata(mem->spi->master);
mem               917 drivers/spi/spi-nxp-fspi.c 	struct device *dev = &mem->spi->dev;
mem               926 drivers/spi/spi-nxp-fspi.c 			      mem->spi->chip_select);
mem               702 drivers/spi/spi-pic32.c 	struct resource *mem;
mem               705 drivers/spi/spi-pic32.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               706 drivers/spi/spi-pic32.c 	pic32s->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               710 drivers/spi/spi-pic32.c 	pic32s->dma_base = mem->start;
mem               590 drivers/spi/spi-rockchip.c 	struct resource *mem;
mem               602 drivers/spi/spi-rockchip.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               603 drivers/spi/spi-rockchip.c 	rs->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               713 drivers/spi/spi-rockchip.c 		rs->dma_addr_tx = mem->start + ROCKCHIP_SPI_TXDR;
mem               714 drivers/spi/spi-rockchip.c 		rs->dma_addr_rx = mem->start + ROCKCHIP_SPI_RXDR;
mem               327 drivers/spi/spi-stm32-qspi.c static int stm32_qspi_send(struct spi_mem *mem, const struct spi_mem_op *op)
mem               329 drivers/spi/spi-stm32-qspi.c 	struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->master);
mem               330 drivers/spi/spi-stm32-qspi.c 	struct stm32_qspi_flash *flash = &qspi->flash[mem->spi->chip_select];
mem               429 drivers/spi/spi-stm32-qspi.c static int stm32_qspi_exec_op(struct spi_mem *mem, const struct spi_mem_op *op)
mem               431 drivers/spi/spi-stm32-qspi.c 	struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->master);
mem               435 drivers/spi/spi-stm32-qspi.c 	ret = stm32_qspi_send(mem, op);
mem               530 drivers/spi/spi-ti-qspi.c static int ti_qspi_exec_mem_op(struct spi_mem *mem,
mem               533 drivers/spi/spi-ti-qspi.c 	struct ti_qspi *qspi = spi_master_get_devdata(mem->spi->master);
mem               549 drivers/spi/spi-ti-qspi.c 	if (!qspi->mmap_enabled || qspi->current_cs != mem->spi->chip_select)
mem               550 drivers/spi/spi-ti-qspi.c 		ti_qspi_enable_memory_map(mem->spi);
mem               551 drivers/spi/spi-ti-qspi.c 	ti_qspi_setup_mmap_read(mem->spi, op->cmd.opcode, op->data.buswidth,
mem               558 drivers/spi/spi-ti-qspi.c 		    !spi_controller_dma_map_mem_op_data(mem->spi->master, op,
mem               561 drivers/spi/spi-ti-qspi.c 			spi_controller_dma_unmap_mem_op_data(mem->spi->master,
mem                83 drivers/spi/spi-xtensa-xtfpga.c 	struct resource *mem;
mem               101 drivers/spi/spi-xtensa-xtfpga.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               102 drivers/spi/spi-xtensa-xtfpga.c 	if (!mem) {
mem               107 drivers/spi/spi-xtensa-xtfpga.c 	xspi->regs = devm_ioremap_resource(&pdev->dev, mem);
mem               220 drivers/spi/spi-zynq-qspi.c static bool zynq_qspi_supports_op(struct spi_mem *mem,
mem               223 drivers/spi/spi-zynq-qspi.c 	if (!spi_mem_default_supports_op(mem, op))
mem               513 drivers/spi/spi-zynq-qspi.c static int zynq_qspi_exec_mem_op(struct spi_mem *mem,
mem               516 drivers/spi/spi-zynq-qspi.c 	struct zynq_qspi *xqspi = spi_controller_get_devdata(mem->spi->master);
mem               524 drivers/spi/spi-zynq-qspi.c 	zynq_qspi_chipselect(mem->spi, true);
mem               525 drivers/spi/spi-zynq-qspi.c 	zynq_qspi_config_op(xqspi, mem->spi);
mem               598 drivers/spi/spi-zynq-qspi.c 	zynq_qspi_chipselect(mem->spi, false);
mem               128 drivers/staging/axis-fifo/axis-fifo.c 	struct resource *mem; /* physical memory */
mem               775 drivers/staging/axis-fifo/axis-fifo.c 	fifo->mem = r_mem;
mem               778 drivers/staging/axis-fifo/axis-fifo.c 	if (!request_mem_region(fifo->mem->start, resource_size(fifo->mem),
mem               782 drivers/staging/axis-fifo/axis-fifo.c 			&fifo->mem->start);
mem               787 drivers/staging/axis-fifo/axis-fifo.c 		&fifo->mem->start, &fifo->mem->end);
mem               790 drivers/staging/axis-fifo/axis-fifo.c 	fifo->base_addr = ioremap(fifo->mem->start, resource_size(fifo->mem));
mem               800 drivers/staging/axis-fifo/axis-fifo.c 		 DRIVER_NAME, &fifo->mem->start);
mem               979 drivers/staging/axis-fifo/axis-fifo.c 			&fifo->mem->start);
mem              1032 drivers/staging/axis-fifo/axis-fifo.c 		 &fifo->mem->start, &fifo->base_addr, fifo->irq,
mem              1048 drivers/staging/axis-fifo/axis-fifo.c 	release_mem_region(fifo->mem->start, resource_size(fifo->mem));
mem              1066 drivers/staging/axis-fifo/axis-fifo.c 	release_mem_region(fifo->mem->start, resource_size(fifo->mem));
mem               138 drivers/staging/clocking-wizard/clk-xlnx-clock-wizard.c 	struct resource *mem;
mem               146 drivers/staging/clocking-wizard/clk-xlnx-clock-wizard.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               147 drivers/staging/clocking-wizard/clk-xlnx-clock-wizard.c 	clk_wzrd->base = devm_ioremap_resource(&pdev->dev, mem);
mem              1256 drivers/staging/gasket/gasket_page_table.c 	void *mem;
mem              1268 drivers/staging/gasket/gasket_page_table.c 	mem = dma_alloc_coherent(gasket_get_device(gasket_dev),
mem              1270 drivers/staging/gasket/gasket_page_table.c 	if (!mem)
mem              1286 drivers/staging/gasket/gasket_page_table.c 	gasket_dev->coherent_buffer.virt_base = mem;
mem              1293 drivers/staging/gasket/gasket_page_table.c 			(u64)mem + j * PAGE_SIZE;
mem              1299 drivers/staging/gasket/gasket_page_table.c 	if (mem) {
mem              1301 drivers/staging/gasket/gasket_page_table.c 				  num_pages * PAGE_SIZE, mem, handle);
mem               316 drivers/staging/kpc2000/kpc2000/cell_probe.c 	kudev->uioinfo.mem[0].name = "uiomap";
mem               317 drivers/staging/kpc2000/kpc2000/cell_probe.c 	kudev->uioinfo.mem[0].addr = pci_resource_start(pcard->pdev, REG_BAR) + cte.offset;
mem               318 drivers/staging/kpc2000/kpc2000/cell_probe.c 	kudev->uioinfo.mem[0].size = (cte.length + PAGE_SIZE - 1) & ~(PAGE_SIZE - 1); // Round up to nearest PAGE_SIZE boundary
mem               319 drivers/staging/kpc2000/kpc2000/cell_probe.c 	kudev->uioinfo.mem[0].memtype = UIO_MEM_PHYS;
mem                74 drivers/staging/media/ipu3/ipu3-css-fw.c 				  enum imgu_abi_memories mem,
mem                82 drivers/staging/media/ipu3/ipu3-css-fw.c 	    bi->info.isp.sp.mem_initializers.params[cls][mem].size)
mem               184 drivers/staging/media/ipu3/ipu3-css-fw.h 				  enum imgu_abi_memories mem,
mem               189 drivers/staging/media/ipu3/ipu3-css.c static inline void writes(const void *mem, ssize_t count, void __iomem *addr)
mem               192 drivers/staging/media/ipu3/ipu3-css.c 		const u32 *buf = mem;
mem               811 drivers/staging/media/ipu3/ipu3-css.c 			css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr;
mem               813 drivers/staging/media/ipu3/ipu3-css.c 			css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i].daddr +
mem               861 drivers/staging/media/ipu3/ipu3-css.c 					.mem[i].daddr;
mem              1216 drivers/staging/media/ipu3/ipu3-css.c 				 &css_pipe->aux_frames[j].mem[i]);
mem              1221 drivers/staging/media/ipu3/ipu3-css.c 				 &css_pipe->aux_frames[j].mem[i]);
mem              1242 drivers/staging/media/ipu3/ipu3-css.c 				       mem[i], CSS_BDS_SIZE))
mem              1248 drivers/staging/media/ipu3/ipu3-css.c 				       mem[i], CSS_GDC_SIZE))
mem              1297 drivers/staging/media/ipu3/ipu3-css.c 			&css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_REF].mem[i],
mem              1318 drivers/staging/media/ipu3/ipu3-css.c 			&css_pipe->aux_frames[IPU3_CSS_AUX_FRAME_TNR].mem[i],
mem               129 drivers/staging/media/ipu3/ipu3-css.h 		struct imgu_css_map mem[IPU3_CSS_AUX_FRAMES];
mem               911 drivers/staging/media/omap4iss/iss.c 	struct resource *mem;
mem               913 drivers/staging/media/omap4iss/iss.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, res);
mem               915 drivers/staging/media/omap4iss/iss.c 	iss->regs[res] = devm_ioremap_resource(iss->dev, mem);
mem               135 drivers/staging/mt7621-pci/pci-mt7621.c 	struct resource mem;
mem               138 drivers/staging/mt7621-pci/pci-mt7621.c 		resource_size_t mem;
mem               261 drivers/staging/mt7621-pci/pci-mt7621.c 	struct resource *mem_resource = &pcie->mem;
mem               304 drivers/staging/mt7621-pci/pci-mt7621.c 			res = &pcie->mem;
mem               305 drivers/staging/mt7621-pci/pci-mt7621.c 			pcie->offset.mem = 0x00000000UL;
mem               624 drivers/staging/mt7621-pci/pci-mt7621.c 	pci_add_resource_offset(res, &pcie->mem, pcie->offset.mem);
mem               424 drivers/staging/rtl8723bs/hal/sdio_ops.c 	u8 *mem
mem               445 drivers/staging/rtl8723bs/hal/sdio_ops.c 	err = _sd_read(intfhdl, addr, cnt, mem);
mem               472 drivers/staging/rtl8723bs/hal/sdio_ops.c 	u8 *mem
mem               478 drivers/staging/rtl8723bs/hal/sdio_ops.c 	struct xmit_buf *xmitbuf = (struct xmit_buf *)mem;
mem               304 drivers/staging/rtl8723bs/include/rtw_io.h #define rtw_write_port(adapter, addr, cnt, mem) _rtw_write_port((adapter), (addr), (cnt), (mem))
mem                25 drivers/staging/rtl8723bs/include/rtw_mp.h 	uint mem[(MAX_MP_XMITBUF_SZ >> 2)];
mem               376 drivers/staging/rtl8723bs/include/rtw_recv.h 		uint mem[RECVFRAME_HDR_ALIGN>>2];
mem              3555 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c 	const u8  *mem = void_mem;
mem              3565 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c 				s += scnprintf(s, 4, "%02x ", mem[offset]);
mem              3572 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c 				u8 ch = mem[offset];
mem              3589 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c 		mem += 16;
mem              2300 drivers/target/target_core_configfs.c 	struct t10_alua_lba_map_member *mem;
mem              2313 drivers/target/target_core_configfs.c 		list_for_each_entry(mem, &map->lba_map_mem_list,
mem              2315 drivers/target/target_core_configfs.c 			switch (mem->lba_map_mem_alua_state) {
mem              2333 drivers/target/target_core_configfs.c 				      mem->lba_map_mem_alua_pg_id, state);
mem              1496 drivers/target/target_core_user.c 		if (info->mem[vma->vm_pgoff].size == 0)
mem              1546 drivers/target/target_core_user.c 		addr = (void *)(unsigned long)info->mem[mi].addr + offset;
mem              1917 drivers/target/target_core_user.c 	info->mem[0].name = "tcm-user command & data buffer";
mem              1918 drivers/target/target_core_user.c 	info->mem[0].addr = (phys_addr_t)(uintptr_t)udev->mb_addr;
mem              1919 drivers/target/target_core_user.c 	info->mem[0].size = udev->ring_size = udev->data_size + CMDR_SIZE;
mem              1920 drivers/target/target_core_user.c 	info->mem[0].memtype = UIO_MEM_NONE;
mem                74 drivers/tty/serial/8250/8250_men_mcb.c 	struct resource *mem;
mem                79 drivers/tty/serial/8250/8250_men_mcb.c 	mem = mcb_get_resource(mdev, IORESOURCE_MEM);
mem                80 drivers/tty/serial/8250/8250_men_mcb.c 	if (mem == NULL)
mem                82 drivers/tty/serial/8250/8250_men_mcb.c 	membase = devm_ioremap_resource(&mdev->dev, mem);
mem               118 drivers/tty/serial/8250/8250_men_mcb.c 		data[i].uart.port.mapbase = (unsigned long) mem->start
mem               113 drivers/tty/serial/cpm_uart/cpm_uart.h 	u32 mem = (u32)pinfo->mem_addr;
mem               115 drivers/tty/serial/cpm_uart/cpm_uart.h 	if (likely(val >= mem && val < mem + pinfo->mem_size)) {
mem               116 drivers/tty/serial/cpm_uart/cpm_uart.h 		offset = val - mem;
mem              1147 drivers/tty/serial/cpm_uart/cpm_uart_core.c 	void __iomem *mem, *pram;
mem              1176 drivers/tty/serial/cpm_uart/cpm_uart_core.c 	mem = of_iomap(np, 0);
mem              1177 drivers/tty/serial/cpm_uart/cpm_uart_core.c 	if (!mem)
mem              1182 drivers/tty/serial/cpm_uart/cpm_uart_core.c 		pinfo->sccp = mem;
mem              1187 drivers/tty/serial/cpm_uart/cpm_uart_core.c 		pinfo->smcp = mem;
mem              1205 drivers/tty/serial/cpm_uart/cpm_uart_core.c 	pinfo->port.mapbase = (unsigned long)mem;
mem              1254 drivers/tty/serial/cpm_uart/cpm_uart_core.c 	iounmap(mem);
mem               122 drivers/tty/serial/men_z135_uart.c 	struct resource *mem;
mem               737 drivers/tty/serial/men_z135_uart.c 	mcb_release_mem(uart->mem);
mem               744 drivers/tty/serial/men_z135_uart.c 	struct resource *mem;
mem               746 drivers/tty/serial/men_z135_uart.c 	mem = mcb_request_mem(uart->mdev, dev_name(&mdev->dev));
mem               747 drivers/tty/serial/men_z135_uart.c 	if (IS_ERR(mem))
mem               748 drivers/tty/serial/men_z135_uart.c 		return PTR_ERR(mem);
mem               750 drivers/tty/serial/men_z135_uart.c 	port->mapbase = mem->start;
mem               751 drivers/tty/serial/men_z135_uart.c 	uart->mem = mem;
mem               753 drivers/tty/serial/men_z135_uart.c 	port->membase = ioremap(mem->start, resource_size(mem));
mem               755 drivers/tty/serial/men_z135_uart.c 		mcb_release_mem(mem);
mem               813 drivers/tty/serial/men_z135_uart.c 	struct resource *mem;
mem               827 drivers/tty/serial/men_z135_uart.c 	mem = &mdev->mem;
mem               841 drivers/tty/serial/men_z135_uart.c 	uart->port.mapbase = mem->start;
mem              1646 drivers/tty/serial/omap-serial.c 	struct resource *mem;
mem              1670 drivers/tty/serial/omap-serial.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1671 drivers/tty/serial/omap-serial.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem              1713 drivers/tty/serial/omap-serial.c 	up->port.mapbase = mem->start;
mem               894 drivers/tty/serial/sifive.c 	struct resource *mem;
mem               903 drivers/tty/serial/sifive.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               904 drivers/tty/serial/sifive.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem               940 drivers/tty/serial/sifive.c 	ssp->port.mapbase = mem->start;
mem                44 drivers/uio/uio.c 	struct uio_mem *mem;
mem                48 drivers/uio/uio.c static ssize_t map_name_show(struct uio_mem *mem, char *buf)
mem                50 drivers/uio/uio.c 	if (unlikely(!mem->name))
mem                51 drivers/uio/uio.c 		mem->name = "";
mem                53 drivers/uio/uio.c 	return sprintf(buf, "%s\n", mem->name);
mem                56 drivers/uio/uio.c static ssize_t map_addr_show(struct uio_mem *mem, char *buf)
mem                58 drivers/uio/uio.c 	return sprintf(buf, "%pa\n", &mem->addr);
mem                61 drivers/uio/uio.c static ssize_t map_size_show(struct uio_mem *mem, char *buf)
mem                63 drivers/uio/uio.c 	return sprintf(buf, "%pa\n", &mem->size);
mem                66 drivers/uio/uio.c static ssize_t map_offset_show(struct uio_mem *mem, char *buf)
mem                68 drivers/uio/uio.c 	return sprintf(buf, "0x%llx\n", (unsigned long long)mem->offs);
mem               104 drivers/uio/uio.c 	struct uio_mem *mem = map->mem;
mem               112 drivers/uio/uio.c 	return entry->show(mem, buf);
mem               288 drivers/uio/uio.c 	struct uio_mem *mem;
mem               294 drivers/uio/uio.c 		mem = &idev->info->mem[mi];
mem               295 drivers/uio/uio.c 		if (mem->size == 0)
mem               312 drivers/uio/uio.c 		map->mem = mem;
mem               313 drivers/uio/uio.c 		mem->map = map;
mem               367 drivers/uio/uio.c 		mem = &idev->info->mem[mi];
mem               368 drivers/uio/uio.c 		map = mem->map;
mem               379 drivers/uio/uio.c 	struct uio_mem *mem;
mem               383 drivers/uio/uio.c 		mem = &idev->info->mem[i];
mem               384 drivers/uio/uio.c 		if (mem->size == 0)
mem               386 drivers/uio/uio.c 		kobject_put(&mem->map->kobj);
mem               660 drivers/uio/uio.c 		if (idev->info->mem[vma->vm_pgoff].size == 0)
mem               694 drivers/uio/uio.c 	addr = (void *)(unsigned long)idev->info->mem[mi].addr + offset;
mem               695 drivers/uio/uio.c 	if (idev->info->mem[mi].memtype == UIO_MEM_LOGICAL)
mem               729 drivers/uio/uio.c 	struct uio_mem *mem;
mem               733 drivers/uio/uio.c 	mem = idev->info->mem + mi;
mem               735 drivers/uio/uio.c 	if (mem->addr & ~PAGE_MASK)
mem               737 drivers/uio/uio.c 	if (vma->vm_end - vma->vm_start > mem->size)
mem               741 drivers/uio/uio.c 	if (idev->info->mem[mi].memtype == UIO_MEM_PHYS)
mem               755 drivers/uio/uio.c 			       mem->addr >> PAGE_SHIFT,
mem               786 drivers/uio/uio.c 	actual_pages = ((idev->info->mem[mi].addr & ~PAGE_MASK)
mem               787 drivers/uio/uio.c 			+ idev->info->mem[mi].size + PAGE_SIZE -1) >> PAGE_SHIFT;
mem               798 drivers/uio/uio.c 	switch (idev->info->mem[mi].memtype) {
mem                29 drivers/uio/uio_cif.c 	void __iomem *plx_intscr = dev_info->mem[0].internal_addr
mem                56 drivers/uio/uio_cif.c 	info->mem[0].addr = pci_resource_start(dev, 0);
mem                57 drivers/uio/uio_cif.c 	if (!info->mem[0].addr)
mem                59 drivers/uio/uio_cif.c 	info->mem[0].internal_addr = pci_ioremap_bar(dev, 0);
mem                60 drivers/uio/uio_cif.c 	if (!info->mem[0].internal_addr)
mem                63 drivers/uio/uio_cif.c 	info->mem[0].size = pci_resource_len(dev, 0);
mem                64 drivers/uio/uio_cif.c 	info->mem[0].memtype = UIO_MEM_PHYS;
mem                65 drivers/uio/uio_cif.c 	info->mem[1].addr = pci_resource_start(dev, 2);
mem                66 drivers/uio/uio_cif.c 	info->mem[1].size = pci_resource_len(dev, 2);
mem                67 drivers/uio/uio_cif.c 	info->mem[1].memtype = UIO_MEM_PHYS;
mem                90 drivers/uio/uio_cif.c 	iounmap(info->mem[0].internal_addr);
mem               107 drivers/uio/uio_cif.c 	iounmap(info->mem[0].internal_addr);
mem                50 drivers/uio/uio_dmem_genirq.c 	uiomem = &priv->uioinfo->mem[priv->dmem_region_start];
mem                53 drivers/uio/uio_dmem_genirq.c 	while (!priv->refcnt && uiomem < &priv->uioinfo->mem[MAX_UIO_MAPS]) {
mem                83 drivers/uio/uio_dmem_genirq.c 	uiomem = &priv->uioinfo->mem[priv->dmem_region_start];
mem                88 drivers/uio/uio_dmem_genirq.c 	while (!priv->refcnt && uiomem < &priv->uioinfo->mem[MAX_UIO_MAPS]) {
mem               209 drivers/uio/uio_dmem_genirq.c 	uiomem = &uioinfo->mem[0];
mem               217 drivers/uio/uio_dmem_genirq.c 		if (uiomem >= &uioinfo->mem[MAX_UIO_MAPS]) {
mem               230 drivers/uio/uio_dmem_genirq.c 	priv->dmem_region_start = uiomem - &uioinfo->mem[0];
mem               234 drivers/uio/uio_dmem_genirq.c 		if (uiomem >= &uioinfo->mem[MAX_UIO_MAPS]) {
mem               246 drivers/uio/uio_dmem_genirq.c 	while (uiomem < &uioinfo->mem[MAX_UIO_MAPS]) {
mem               158 drivers/uio/uio_fsl_elbc_gpcm.c 	void __iomem *reg_int_en = info->mem[0].internal_addr +
mem               161 drivers/uio/uio_fsl_elbc_gpcm.c 	void __iomem *reg_int_stat = info->mem[0].internal_addr +
mem               190 drivers/uio/uio_fsl_elbc_gpcm.c 	iowrite32(0, info->mem[0].internal_addr + win0_offset +
mem               200 drivers/uio/uio_fsl_elbc_gpcm.c 	iowrite32(0, info->mem[0].internal_addr + DPM_HOST_WIN0_OFFSET +
mem               382 drivers/uio/uio_fsl_elbc_gpcm.c 	info->mem[0].internal_addr = ioremap(res.start, resource_size(&res));
mem               383 drivers/uio/uio_fsl_elbc_gpcm.c 	if (!info->mem[0].internal_addr) {
mem               390 drivers/uio/uio_fsl_elbc_gpcm.c 	info->mem[0].name = kasprintf(GFP_KERNEL, "%pOFn", node);
mem               391 drivers/uio/uio_fsl_elbc_gpcm.c 	info->mem[0].addr = res.start;
mem               392 drivers/uio/uio_fsl_elbc_gpcm.c 	info->mem[0].size = resource_size(&res);
mem               393 drivers/uio/uio_fsl_elbc_gpcm.c 	info->mem[0].memtype = UIO_MEM_PHYS;
mem               430 drivers/uio/uio_fsl_elbc_gpcm.c 	iounmap(info->mem[0].internal_addr);
mem               432 drivers/uio/uio_fsl_elbc_gpcm.c 	kfree(info->mem[0].name);
mem               449 drivers/uio/uio_fsl_elbc_gpcm.c 	iounmap(info->mem[0].internal_addr);
mem               450 drivers/uio/uio_fsl_elbc_gpcm.c 	kfree(info->mem[0].name);
mem               271 drivers/uio/uio_hv_generic.c 	pdata->info.mem[TXRX_RING_MAP].name = "txrx_rings";
mem               273 drivers/uio/uio_hv_generic.c 	pdata->info.mem[TXRX_RING_MAP].addr
mem               275 drivers/uio/uio_hv_generic.c 	pdata->info.mem[TXRX_RING_MAP].size
mem               277 drivers/uio/uio_hv_generic.c 	pdata->info.mem[TXRX_RING_MAP].memtype = UIO_MEM_IOVA;
mem               279 drivers/uio/uio_hv_generic.c 	pdata->info.mem[INT_PAGE_MAP].name = "int_page";
mem               280 drivers/uio/uio_hv_generic.c 	pdata->info.mem[INT_PAGE_MAP].addr
mem               282 drivers/uio/uio_hv_generic.c 	pdata->info.mem[INT_PAGE_MAP].size = PAGE_SIZE;
mem               283 drivers/uio/uio_hv_generic.c 	pdata->info.mem[INT_PAGE_MAP].memtype = UIO_MEM_LOGICAL;
mem               285 drivers/uio/uio_hv_generic.c 	pdata->info.mem[MON_PAGE_MAP].name = "monitor_page";
mem               286 drivers/uio/uio_hv_generic.c 	pdata->info.mem[MON_PAGE_MAP].addr
mem               288 drivers/uio/uio_hv_generic.c 	pdata->info.mem[MON_PAGE_MAP].size = PAGE_SIZE;
mem               289 drivers/uio/uio_hv_generic.c 	pdata->info.mem[MON_PAGE_MAP].memtype = UIO_MEM_LOGICAL;
mem               305 drivers/uio/uio_hv_generic.c 	pdata->info.mem[RECV_BUF_MAP].name = pdata->recv_name;
mem               306 drivers/uio/uio_hv_generic.c 	pdata->info.mem[RECV_BUF_MAP].addr
mem               308 drivers/uio/uio_hv_generic.c 	pdata->info.mem[RECV_BUF_MAP].size = RECV_BUFFER_SIZE;
mem               309 drivers/uio/uio_hv_generic.c 	pdata->info.mem[RECV_BUF_MAP].memtype = UIO_MEM_VIRTUAL;
mem               324 drivers/uio/uio_hv_generic.c 	pdata->info.mem[SEND_BUF_MAP].name = pdata->send_name;
mem               325 drivers/uio/uio_hv_generic.c 	pdata->info.mem[SEND_BUF_MAP].addr
mem               327 drivers/uio/uio_hv_generic.c 	pdata->info.mem[SEND_BUF_MAP].size = SEND_BUFFER_SIZE;
mem               328 drivers/uio/uio_hv_generic.c 	pdata->info.mem[SEND_BUF_MAP].memtype = UIO_MEM_VIRTUAL;
mem                35 drivers/uio/uio_mf624.c 	void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR;
mem                63 drivers/uio/uio_mf624.c 	void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR;
mem                90 drivers/uio/uio_mf624.c 	void __iomem *INTCSR_reg = info->mem[0].internal_addr + INTCSR;
mem               117 drivers/uio/uio_mf624.c static int mf624_setup_mem(struct pci_dev *dev, int bar, struct uio_mem *mem, const char *name)
mem               122 drivers/uio/uio_mf624.c 	mem->name = name;
mem               123 drivers/uio/uio_mf624.c 	mem->addr = start & PAGE_MASK;
mem               124 drivers/uio/uio_mf624.c 	mem->offs = start & ~PAGE_MASK;
mem               125 drivers/uio/uio_mf624.c 	if (!mem->addr)
mem               127 drivers/uio/uio_mf624.c 	mem->size = ((start & ~PAGE_MASK) + len + PAGE_SIZE - 1) & PAGE_MASK;
mem               128 drivers/uio/uio_mf624.c 	mem->memtype = UIO_MEM_PHYS;
mem               129 drivers/uio/uio_mf624.c 	mem->internal_addr = pci_ioremap_bar(dev, bar);
mem               130 drivers/uio/uio_mf624.c 	if (!mem->internal_addr)
mem               155 drivers/uio/uio_mf624.c 	if (mf624_setup_mem(dev, 0, &info->mem[0], "PCI chipset, interrupts, status "
mem               159 drivers/uio/uio_mf624.c 	if (mf624_setup_mem(dev, 2, &info->mem[1], "ADC, DAC, DIO"))
mem               163 drivers/uio/uio_mf624.c 	if (mf624_setup_mem(dev, 4, &info->mem[2], "Counter/timer chip"))
mem               180 drivers/uio/uio_mf624.c 	iounmap(info->mem[2].internal_addr);
mem               182 drivers/uio/uio_mf624.c 	iounmap(info->mem[1].internal_addr);
mem               184 drivers/uio/uio_mf624.c 	iounmap(info->mem[0].internal_addr);
mem               207 drivers/uio/uio_mf624.c 	iounmap(info->mem[0].internal_addr);
mem               208 drivers/uio/uio_mf624.c 	iounmap(info->mem[1].internal_addr);
mem               209 drivers/uio/uio_mf624.c 	iounmap(info->mem[2].internal_addr);
mem                34 drivers/uio/uio_netx.c 	void __iomem *int_enable_reg = dev_info->mem[0].internal_addr
mem                36 drivers/uio/uio_netx.c 	void __iomem *int_status_reg = dev_info->mem[0].internal_addr
mem                81 drivers/uio/uio_netx.c 	info->mem[0].addr = pci_resource_start(dev, bar);
mem                82 drivers/uio/uio_netx.c 	if (!info->mem[0].addr)
mem                84 drivers/uio/uio_netx.c 	info->mem[0].internal_addr = ioremap(pci_resource_start(dev, bar),
mem                87 drivers/uio/uio_netx.c 	if (!info->mem[0].internal_addr)
mem                90 drivers/uio/uio_netx.c 	info->mem[0].size = pci_resource_len(dev, bar);
mem                91 drivers/uio/uio_netx.c 	info->mem[0].memtype = UIO_MEM_PHYS;
mem                98 drivers/uio/uio_netx.c 	iowrite32(0, info->mem[0].internal_addr + DPM_HOST_INT_EN0);
mem               110 drivers/uio/uio_netx.c 	iounmap(info->mem[0].internal_addr);
mem               125 drivers/uio/uio_netx.c 	iowrite32(0, info->mem[0].internal_addr + DPM_HOST_INT_EN0);
mem               129 drivers/uio/uio_netx.c 	iounmap(info->mem[0].internal_addr);
mem               165 drivers/uio/uio_pdrv_genirq.c 	uiomem = &uioinfo->mem[0];
mem               173 drivers/uio/uio_pdrv_genirq.c 		if (uiomem >= &uioinfo->mem[MAX_UIO_MAPS]) {
mem               187 drivers/uio/uio_pdrv_genirq.c 	while (uiomem < &uioinfo->mem[MAX_UIO_MAPS]) {
mem               197 drivers/uio/uio_pruss.c 		p->mem[0].addr = regs_prussio->start;
mem               198 drivers/uio/uio_pruss.c 		p->mem[0].size = resource_size(regs_prussio);
mem               199 drivers/uio/uio_pruss.c 		p->mem[0].memtype = UIO_MEM_PHYS;
mem               201 drivers/uio/uio_pruss.c 		p->mem[1].addr = gdev->sram_paddr;
mem               202 drivers/uio/uio_pruss.c 		p->mem[1].size = sram_pool_sz;
mem               203 drivers/uio/uio_pruss.c 		p->mem[1].memtype = UIO_MEM_PHYS;
mem               205 drivers/uio/uio_pruss.c 		p->mem[2].addr = gdev->ddr_paddr;
mem               206 drivers/uio/uio_pruss.c 		p->mem[2].size = extram_pool_sz;
mem               207 drivers/uio/uio_pruss.c 		p->mem[2].memtype = UIO_MEM_PHYS;
mem                55 drivers/uio/uio_sercos3.c 	void __iomem *ier0 = info->mem[3].internal_addr + IER0_OFFSET;
mem                68 drivers/uio/uio_sercos3.c 	void __iomem *ier0 = info->mem[3].internal_addr + IER0_OFFSET;
mem                78 drivers/uio/uio_sercos3.c 	void __iomem *isr0 = info->mem[3].internal_addr + ISR0_OFFSET;
mem                79 drivers/uio/uio_sercos3.c 	void __iomem *ier0 = info->mem[3].internal_addr + IER0_OFFSET;
mem               108 drivers/uio/uio_sercos3.c 	info->mem[n].addr = pci_resource_start(dev, pci_bar);
mem               109 drivers/uio/uio_sercos3.c 	if (!info->mem[n].addr)
mem               111 drivers/uio/uio_sercos3.c 	info->mem[n].internal_addr = ioremap(pci_resource_start(dev, pci_bar),
mem               113 drivers/uio/uio_sercos3.c 	if (!info->mem[n].internal_addr)
mem               115 drivers/uio/uio_sercos3.c 	info->mem[n].size = pci_resource_len(dev, pci_bar);
mem               116 drivers/uio/uio_sercos3.c 	info->mem[n].memtype = UIO_MEM_PHYS;
mem               171 drivers/uio/uio_sercos3.c 		if (info->mem[i].internal_addr)
mem               172 drivers/uio/uio_sercos3.c 			iounmap(info->mem[i].internal_addr);
mem               193 drivers/uio/uio_sercos3.c 		if (info->mem[i].internal_addr)
mem               194 drivers/uio/uio_sercos3.c 			iounmap(info->mem[i].internal_addr);
mem                87 drivers/usb/core/devio.c 	void *mem;
mem               184 drivers/usb/core/devio.c 		usb_free_coherent(ps->dev, usbm->size, usbm->mem,
mem               222 drivers/usb/core/devio.c 	void *mem;
mem               237 drivers/usb/core/devio.c 	mem = usb_alloc_coherent(ps->dev, size, GFP_USER | __GFP_NOWARN,
mem               239 drivers/usb/core/devio.c 	if (!mem) {
mem               244 drivers/usb/core/devio.c 	memset(mem, 0, size);
mem               246 drivers/usb/core/devio.c 	usbm->mem = mem;
mem               256 drivers/usb/core/devio.c 				    virt_to_phys(usbm->mem) >> PAGE_SHIFT,
mem               262 drivers/usb/core/devio.c 		if (dma_mmap_coherent(hcd->self.sysdev, vma, mem, dma_handle,
mem              1725 drivers/usb/core/devio.c 			as->urb->transfer_buffer = as->usbm->mem +
mem              1243 drivers/usb/gadget/composite.c 	unsigned mem;
mem              1248 drivers/usb/gadget/composite.c 	mem = sizeof(*uc);
mem              1249 drivers/usb/gadget/composite.c 	mem += sizeof(void *) * (n_gstrings + 1);
mem              1250 drivers/usb/gadget/composite.c 	mem += sizeof(struct usb_gadget_strings) * n_gstrings;
mem              1251 drivers/usb/gadget/composite.c 	mem += sizeof(struct usb_string) * (n_strings + 1) * (n_gstrings);
mem              1252 drivers/usb/gadget/composite.c 	uc = kmalloc(mem, GFP_KERNEL);
mem               127 drivers/usb/gadget/config.c 	void *mem;
mem               135 drivers/usb/gadget/config.c 	mem = kmalloc(bytes, GFP_KERNEL);
mem               136 drivers/usb/gadget/config.c 	if (!mem)
mem               143 drivers/usb/gadget/config.c 	tmp = mem;
mem               144 drivers/usb/gadget/config.c 	ret = mem;
mem               145 drivers/usb/gadget/config.c 	mem += (n_desc + 1) * sizeof(*tmp);
mem               147 drivers/usb/gadget/config.c 		memcpy(mem, *src, (*src)->bLength);
mem               148 drivers/usb/gadget/config.c 		*tmp = mem;
mem               150 drivers/usb/gadget/config.c 		mem += (*src)->bLength;
mem               444 drivers/usb/gadget/function/f_uvc.c #define UVC_COPY_DESCRIPTOR(mem, dst, desc) \
mem               446 drivers/usb/gadget/function/f_uvc.c 		memcpy(mem, desc, (desc)->bLength); \
mem               447 drivers/usb/gadget/function/f_uvc.c 		*(dst)++ = mem; \
mem               448 drivers/usb/gadget/function/f_uvc.c 		mem += (desc)->bLength; \
mem               451 drivers/usb/gadget/function/f_uvc.c #define UVC_COPY_DESCRIPTORS(mem, dst, src) \
mem               455 drivers/usb/gadget/function/f_uvc.c 			memcpy(mem, *__src, (*__src)->bLength); \
mem               456 drivers/usb/gadget/function/f_uvc.c 			*dst++ = mem; \
mem               457 drivers/usb/gadget/function/f_uvc.c 			mem += (*__src)->bLength; \
mem               476 drivers/usb/gadget/function/f_uvc.c 	void *mem;
mem               546 drivers/usb/gadget/function/f_uvc.c 	mem = kmalloc((n_desc + 1) * sizeof(*src) + bytes, GFP_KERNEL);
mem               547 drivers/usb/gadget/function/f_uvc.c 	if (mem == NULL)
mem               550 drivers/usb/gadget/function/f_uvc.c 	hdr = mem;
mem               551 drivers/usb/gadget/function/f_uvc.c 	dst = mem;
mem               552 drivers/usb/gadget/function/f_uvc.c 	mem += (n_desc + 1) * sizeof(*src);
mem               555 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTOR(mem, dst, &uvc_iad);
mem               556 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_intf);
mem               558 drivers/usb/gadget/function/f_uvc.c 	uvc_control_header = mem;
mem               559 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTORS(mem, dst,
mem               565 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_ep);
mem               567 drivers/usb/gadget/function/f_uvc.c 		UVC_COPY_DESCRIPTOR(mem, dst, &uvc_ss_control_comp);
mem               569 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTOR(mem, dst, &uvc_control_cs_ep);
mem               570 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTOR(mem, dst, &uvc_streaming_intf_alt0);
mem               572 drivers/usb/gadget/function/f_uvc.c 	uvc_streaming_header = mem;
mem               573 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTORS(mem, dst,
mem               578 drivers/usb/gadget/function/f_uvc.c 	UVC_COPY_DESCRIPTORS(mem, dst, uvc_streaming_std);
mem                73 drivers/usb/gadget/function/uvc_queue.c 	buf->mem = vb2_plane_vaddr(vb, 0);
mem                36 drivers/usb/gadget/function/uvc_queue.h 	void *mem;
mem                45 drivers/usb/gadget/function/uvc_video.c 	void *mem;
mem                48 drivers/usb/gadget/function/uvc_video.c 	mem = buf->mem + queue->buf_used;
mem                51 drivers/usb/gadget/function/uvc_video.c 	memcpy(data, mem, nbytes);
mem                61 drivers/usb/gadget/function/uvc_video.c 	void *mem = req->buf;
mem                67 drivers/usb/gadget/function/uvc_video.c 		ret = uvc_video_encode_header(video, buf, mem, len);
mem                69 drivers/usb/gadget/function/uvc_video.c 		mem += ret;
mem                75 drivers/usb/gadget/function/uvc_video.c 	ret = uvc_video_encode_data(video, buf, mem, len);
mem               101 drivers/usb/gadget/function/uvc_video.c 	void *mem = req->buf;
mem               106 drivers/usb/gadget/function/uvc_video.c 	ret = uvc_video_encode_header(video, buf, mem, len);
mem               107 drivers/usb/gadget/function/uvc_video.c 	mem += ret;
mem               111 drivers/usb/gadget/function/uvc_video.c 	ret = uvc_video_encode_data(video, buf, mem, len);
mem               438 drivers/usb/gadget/udc/bdc/bdc.h 	void	*mem;
mem               811 drivers/usb/gadget/udc/fusb300_udc.c #define UVC_COPY_DESCRIPTORS(mem, src) \
mem               815 drivers/usb/gadget/udc/fusb300_udc.c 			memcpy(mem, *__src, (*__src)->bLength); \
mem               816 drivers/usb/gadget/udc/fusb300_udc.c 			mem += (*__src)->bLength; \
mem               379 drivers/usb/host/ohci-da8xx.c 	struct resource *mem;
mem               438 drivers/usb/host/ohci-da8xx.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               439 drivers/usb/host/ohci-da8xx.c 	hcd->regs = devm_ioremap_resource(dev, mem);
mem               444 drivers/usb/host/ohci-da8xx.c 	hcd->rsrc_start = mem->start;
mem               445 drivers/usb/host/ohci-da8xx.c 	hcd->rsrc_len = resource_size(mem);
mem                92 drivers/usb/host/ohci-sm501.c 	struct resource	*res, *mem;
mem               100 drivers/usb/host/ohci-sm501.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               101 drivers/usb/host/ohci-sm501.c 	if (mem == NULL) {
mem               107 drivers/usb/host/ohci-sm501.c 	if (!request_mem_region(mem->start, resource_size(mem), pdev->name)) {
mem               160 drivers/usb/host/ohci-sm501.c 	if (usb_hcd_setup_local_mem(hcd, mem->start,
mem               161 drivers/usb/host/ohci-sm501.c 				    mem->start - mem->parent->start,
mem               162 drivers/usb/host/ohci-sm501.c 				    resource_size(mem)) < 0)
mem               182 drivers/usb/host/ohci-sm501.c 	release_mem_region(mem->start, resource_size(mem));
mem               190 drivers/usb/host/ohci-sm501.c 	struct resource	*mem;
mem               195 drivers/usb/host/ohci-sm501.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem               196 drivers/usb/host/ohci-sm501.c 	if (mem)
mem               197 drivers/usb/host/ohci-sm501.c 		release_mem_region(mem->start, resource_size(mem));
mem               410 drivers/usb/host/oxu210hp-hcd.c 	struct oxu_onchip_mem	__iomem *mem;
mem               942 drivers/usb/host/oxu210hp-hcd.c 		qtd->buffer = (void *) &oxu->mem->db_pool[i];
mem               966 drivers/usb/host/oxu210hp-hcd.c 	index = (qtd->buffer - (void *) &oxu->mem->db_pool[0])
mem               995 drivers/usb/host/oxu210hp-hcd.c 	index = qtd - &oxu->mem->qtd_pool[0];
mem              1013 drivers/usb/host/oxu210hp-hcd.c 		qtd = (struct ehci_qtd *) &oxu->mem->qtd_pool[i];
mem              1037 drivers/usb/host/oxu210hp-hcd.c 	index = qh - &oxu->mem->qh_pool[0];
mem              1070 drivers/usb/host/oxu210hp-hcd.c 		qh = (struct ehci_qh *) &oxu->mem->qh_pool[i];
mem              1171 drivers/usb/host/oxu210hp-hcd.c 		oxu->mem->frame_list[i] = EHCI_LIST_END;
mem              1188 drivers/usb/host/oxu210hp-hcd.c 	oxu->periodic = (__le32 *) &oxu->mem->frame_list;
mem              3096 drivers/usb/host/oxu210hp-hcd.c 		oxu->mem = hcd->regs + OXU_SPH_MEM;
mem              3102 drivers/usb/host/oxu210hp-hcd.c 		oxu->mem = hcd->regs + OXU_OTG_MEM;
mem               108 drivers/usb/isp1760/isp1760-core.c int isp1760_register(struct resource *mem, int irq, unsigned long irqflags,
mem               134 drivers/usb/isp1760/isp1760-core.c 	isp->regs = devm_ioremap_resource(dev, mem);
mem               141 drivers/usb/isp1760/isp1760-core.c 		ret = isp1760_hcd_register(&isp->hcd, isp->regs, mem, irq,
mem                49 drivers/usb/isp1760/isp1760-core.h int isp1760_register(struct resource *mem, int irq, unsigned long irqflags,
mem              2190 drivers/usb/isp1760/isp1760-hcd.c 			 struct resource *mem, int irq, unsigned long irqflags,
mem              2208 drivers/usb/isp1760/isp1760-hcd.c 	hcd->rsrc_start = mem->start;
mem              2209 drivers/usb/isp1760/isp1760-hcd.c 	hcd->rsrc_len = resource_size(mem);
mem                74 drivers/usb/isp1760/isp1760-hcd.h 			 struct resource *mem, int irq, unsigned long irqflags,
mem                82 drivers/usb/isp1760/isp1760-hcd.h 				       void __iomem *regs, struct resource *mem,
mem               755 drivers/usb/mon/mon_text.c static void mon_text_ctor(void *mem)
mem               761 drivers/usb/mon/mon_text.c 	memset(mem, 0xe5, sizeof(struct mon_event_text));
mem              1093 drivers/usb/musb/tusb6010.c 	struct resource		*mem;
mem              1104 drivers/usb/musb/tusb6010.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              1105 drivers/usb/musb/tusb6010.c 	musb->async = mem->start;
mem              1108 drivers/usb/musb/tusb6010.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 1);
mem              1109 drivers/usb/musb/tusb6010.c 	if (!mem) {
mem              1114 drivers/usb/musb/tusb6010.c 	musb->sync = mem->start;
mem              1116 drivers/usb/musb/tusb6010.c 	sync = ioremap(mem->start, resource_size(mem));
mem                37 drivers/vfio/pci/vfio_pci_nvlink2.c 	struct mm_iommu_table_group_mem_t *mem; /* Pre-registered RAM descr. */
mem               100 drivers/vfio/pci/vfio_pci_nvlink2.c 		if (data->mem) {
mem               101 drivers/vfio/pci/vfio_pci_nvlink2.c 			ret = mm_iommu_put(data->mm, data->mem);
mem               166 drivers/vfio/pci/vfio_pci_nvlink2.c 			vma_pages(vma), data->gpu_hpa, &data->mem);
mem                53 drivers/vfio/vfio_iommu_spapr_tce.c 	struct mm_iommu_table_group_mem_t *mem;
mem                92 drivers/vfio/vfio_iommu_spapr_tce.c 	ret = mm_iommu_put(container->mm, tcemem->mem);
mem               105 drivers/vfio/vfio_iommu_spapr_tce.c 	struct mm_iommu_table_group_mem_t *mem;
mem               113 drivers/vfio/vfio_iommu_spapr_tce.c 	mem = mm_iommu_get(container->mm, vaddr, size >> PAGE_SHIFT);
mem               114 drivers/vfio/vfio_iommu_spapr_tce.c 	if (!mem)
mem               118 drivers/vfio/vfio_iommu_spapr_tce.c 		if (tcemem->mem == mem) {
mem               129 drivers/vfio/vfio_iommu_spapr_tce.c 	mm_iommu_put(container->mm, mem);
mem               138 drivers/vfio/vfio_iommu_spapr_tce.c 	struct mm_iommu_table_group_mem_t *mem = NULL;
mem               146 drivers/vfio/vfio_iommu_spapr_tce.c 	mem = mm_iommu_get(container->mm, vaddr, entries);
mem               147 drivers/vfio/vfio_iommu_spapr_tce.c 	if (mem) {
mem               149 drivers/vfio/vfio_iommu_spapr_tce.c 			if (tcemem->mem == mem) {
mem               155 drivers/vfio/vfio_iommu_spapr_tce.c 		ret = mm_iommu_new(container->mm, vaddr, entries, &mem);
mem               166 drivers/vfio/vfio_iommu_spapr_tce.c 	tcemem->mem = mem;
mem               174 drivers/vfio/vfio_iommu_spapr_tce.c 	mm_iommu_put(container->mm, mem);
mem               394 drivers/vfio/vfio_iommu_spapr_tce.c 	struct mm_iommu_table_group_mem_t *mem;
mem               396 drivers/vfio/vfio_iommu_spapr_tce.c 	mem = mm_iommu_lookup(container->mm, tce, 1ULL << shift);
mem               397 drivers/vfio/vfio_iommu_spapr_tce.c 	if (!mem)
mem               400 drivers/vfio/vfio_iommu_spapr_tce.c 	ret = mm_iommu_ua_to_hpa(mem, tce, shift, phpa);
mem               404 drivers/vfio/vfio_iommu_spapr_tce.c 	*pmem = mem;
mem               412 drivers/vfio/vfio_iommu_spapr_tce.c 	struct mm_iommu_table_group_mem_t *mem = NULL;
mem               421 drivers/vfio/vfio_iommu_spapr_tce.c 			tbl->it_page_shift, &hpa, &mem);
mem               425 drivers/vfio/vfio_iommu_spapr_tce.c 	if (mem)
mem               426 drivers/vfio/vfio_iommu_spapr_tce.c 		mm_iommu_mapped_dec(mem);
mem               557 drivers/vfio/vfio_iommu_spapr_tce.c 		struct mm_iommu_table_group_mem_t *mem = NULL;
mem               561 drivers/vfio/vfio_iommu_spapr_tce.c 				tce, tbl->it_page_shift, &hpa, &mem);
mem               576 drivers/vfio/vfio_iommu_spapr_tce.c 		if (mm_iommu_mapped_inc(mem))
mem              1421 drivers/vhost/vhost.c 	struct vhost_memory mem, *newmem;
mem              1427 drivers/vhost/vhost.c 	if (copy_from_user(&mem, m, size))
mem              1429 drivers/vhost/vhost.c 	if (mem.padding)
mem              1431 drivers/vhost/vhost.c 	if (mem.nregions > max_mem_regions)
mem              1433 drivers/vhost/vhost.c 	newmem = kvzalloc(struct_size(newmem, regions, mem.nregions),
mem              1438 drivers/vhost/vhost.c 	memcpy(newmem, &mem, size);
mem              1440 drivers/vhost/vhost.c 			   mem.nregions * sizeof *m->regions)) {
mem              1452 drivers/vhost/vhost.c 	     region < newmem->regions + mem.nregions;
mem               567 drivers/video/fbdev/arkfb.c 	int rv, mem, step;
mem               590 drivers/video/fbdev/arkfb.c 	mem = ((var->bits_per_pixel * var->xres_virtual) >> 3) * var->yres_virtual;
mem               591 drivers/video/fbdev/arkfb.c 	if (mem > info->screen_size)
mem               594 drivers/video/fbdev/arkfb.c 		       mem >> 10, (unsigned int) (info->screen_size >> 10));
mem               451 drivers/video/fbdev/aty/aty128fb.c 	const struct aty128_meminfo *mem;   /* onboard mem info    */
mem              1003 drivers/video/fbdev/aty/aty128fb.c 		par->mem = &sdr_128;
mem              1006 drivers/video/fbdev/aty/aty128fb.c 		par->mem = &sdr_sgram;
mem              1009 drivers/video/fbdev/aty/aty128fb.c 		par->mem = &ddr_sgram;
mem              1012 drivers/video/fbdev/aty/aty128fb.c 		par->mem = &sdr_sgram;
mem              1434 drivers/video/fbdev/aty/aty128fb.c 	const struct aty128_meminfo *m = par->mem;
mem              1929 drivers/video/fbdev/aty/aty128fb.c 		printk("%dM %s\n", par->vram_size / (1024*1024), par->mem->name);
mem              1931 drivers/video/fbdev/aty/aty128fb.c 		printk("%dk %s\n", par->vram_size / 1024, par->mem->name);
mem              2914 drivers/video/fbdev/aty/atyfb_base.c 	u32 mem, chip_id;
mem              3005 drivers/video/fbdev/aty/atyfb_base.c 		mem = aty_ld_le32(MEM_CNTL, par);
mem              3008 drivers/video/fbdev/aty/atyfb_base.c 			switch (mem & 0x0f) {
mem              3010 drivers/video/fbdev/aty/atyfb_base.c 				mem = (mem & ~(0x0f)) | 2;
mem              3013 drivers/video/fbdev/aty/atyfb_base.c 				mem = (mem & ~(0x0f)) | 3;
mem              3016 drivers/video/fbdev/aty/atyfb_base.c 				mem = (mem & ~(0x0f)) | 4;
mem              3019 drivers/video/fbdev/aty/atyfb_base.c 				mem = (mem & ~(0x0f)) | 5;
mem              3025 drivers/video/fbdev/aty/atyfb_base.c 				mem &= ~(0x00700000);
mem              3027 drivers/video/fbdev/aty/atyfb_base.c 		mem &= ~(0xcf80e000);	/* Turn off all undocumented bits. */
mem              3028 drivers/video/fbdev/aty/atyfb_base.c 		aty_st_le32(MEM_CNTL, mem, par);
mem              1866 drivers/video/fbdev/cirrusfb.c 	unsigned long mem;
mem              1872 drivers/video/fbdev/cirrusfb.c 		mem = ((SR14 & 7) + 1) << 20;
mem              1877 drivers/video/fbdev/cirrusfb.c 			mem = 512 * 1024;
mem              1880 drivers/video/fbdev/cirrusfb.c 			mem = 1024 * 1024;
mem              1886 drivers/video/fbdev/cirrusfb.c 			mem = 2048 * 1024;
mem              1890 drivers/video/fbdev/cirrusfb.c 			mem = 1024 * 1024;
mem              1896 drivers/video/fbdev/cirrusfb.c 			mem *= 2;
mem              1900 drivers/video/fbdev/cirrusfb.c 	return mem;
mem               759 drivers/video/fbdev/cyber2000fb.c 	unsigned int mem;
mem               837 drivers/video/fbdev/cyber2000fb.c 	mem = var->xres_virtual * var->yres_virtual * (var->bits_per_pixel / 8);
mem               838 drivers/video/fbdev/cyber2000fb.c 	if (mem > cfb->fb.fix.smem_len)
mem               863 drivers/video/fbdev/cyber2000fb.c 	unsigned int mem;
mem               942 drivers/video/fbdev/cyber2000fb.c 	mem = cfb->fb.fix.line_length * var->yres_virtual;
mem               943 drivers/video/fbdev/cyber2000fb.c 	BUG_ON(mem > cfb->fb.fix.smem_len);
mem               201 drivers/video/fbdev/g364fb.c 	int mem, i, j;
mem               237 drivers/video/fbdev/g364fb.c 	mem = (r4030_read_reg32(JAZZ_R4030_CONFIG) >> 8) & 3;
mem               238 drivers/video/fbdev/g364fb.c 	fb_fix.smem_len = (1 << (mem * 2)) * 512 * 1024;
mem               207 drivers/video/fbdev/hyperv_fb.c 	struct resource *mem;
mem               699 drivers/video/fbdev/hyperv_fb.c 	ret = vmbus_allocate_mmio(&par->mem, hdev, pot_start, pot_end,
mem               706 drivers/video/fbdev/hyperv_fb.c 	fb_virt = ioremap(par->mem->start, screen_fb_size);
mem               724 drivers/video/fbdev/hyperv_fb.c 	info->fix.smem_start = par->mem->start;
mem               737 drivers/video/fbdev/hyperv_fb.c 	vmbus_free_mmio(par->mem->start, screen_fb_size);
mem               738 drivers/video/fbdev/hyperv_fb.c 	par->mem = NULL;
mem               752 drivers/video/fbdev/hyperv_fb.c 	vmbus_free_mmio(par->mem->start, screen_fb_size);
mem               753 drivers/video/fbdev/hyperv_fb.c 	par->mem = NULL;
mem               403 drivers/video/fbdev/i740fb.c 	u32 bpp, base, dacspeed24, mem;
mem               488 drivers/video/fbdev/i740fb.c 	mem = vxres * vyres * ((bpp + 1) / 8);
mem               489 drivers/video/fbdev/i740fb.c 	if (mem > info->screen_size) {
mem               491 drivers/video/fbdev/i740fb.c 			mem >> 10, info->screen_size >> 10);
mem              1240 drivers/video/fbdev/matrox/matroxfb_base.c static unsigned int mem;		/* "matroxfb:mem:xxxxxM" */
mem              1706 drivers/video/fbdev/matrox/matroxfb_base.c 	if (mem < 1024) mem *= 1024;
mem              1707 drivers/video/fbdev/matrox/matroxfb_base.c 	if (mem < 0x00100000) mem *= 1024;
mem              1709 drivers/video/fbdev/matrox/matroxfb_base.c 	if (mem && (mem < memsize))
mem              1710 drivers/video/fbdev/matrox/matroxfb_base.c 		memsize = mem;
mem              2373 drivers/video/fbdev/matrox/matroxfb_base.c 			mem = simple_strtoul(this_opt+4, NULL, 0);
mem              2488 drivers/video/fbdev/matrox/matroxfb_base.c module_param(mem, int, 0);
mem              2489 drivers/video/fbdev/matrox/matroxfb_base.c MODULE_PARM_DESC(mem, "Size of available memory in MB, KB or B (2,4,8,12,16MB, default=autodetect)");
mem                24 drivers/video/fbdev/matrox/matroxfb_crtc2.c static int mem = 8192;
mem                26 drivers/video/fbdev/matrox/matroxfb_crtc2.c module_param(mem, int, 0);
mem                27 drivers/video/fbdev/matrox/matroxfb_crtc2.c MODULE_PARM_DESC(mem, "Memory size reserved for dualhead (default=8MB)");
mem               612 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	if (mem < 64)
mem               613 drivers/video/fbdev/matrox/matroxfb_crtc2.c 		mem *= 1024;
mem               614 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	if (mem < 64*1024)
mem               615 drivers/video/fbdev/matrox/matroxfb_crtc2.c 		mem *= 1024;
mem               616 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	mem &= ~0x00000FFF;	/* PAGE_MASK? */
mem               617 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	if (minfo->video.len_usable + mem <= minfo->video.len)
mem               618 drivers/video/fbdev/matrox/matroxfb_crtc2.c 		m2info->video.offbase = minfo->video.len - mem;
mem               619 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	else if (minfo->video.len < mem) {
mem               622 drivers/video/fbdev/matrox/matroxfb_crtc2.c 		m2info->video.borrowed = mem;
mem               623 drivers/video/fbdev/matrox/matroxfb_crtc2.c 		minfo->video.len_usable -= mem;
mem               627 drivers/video/fbdev/matrox/matroxfb_crtc2.c 	m2info->video.len = m2info->video.len_usable = m2info->video.len_maximum = mem;
mem               148 drivers/video/fbdev/metronomefb.c static u8 calc_cksum(int start, int end, u8 *mem)
mem               154 drivers/video/fbdev/metronomefb.c 		tmp += mem[i];
mem               170 drivers/video/fbdev/metronomefb.c static int load_waveform(u8 *mem, size_t size, int m, int t,
mem               195 drivers/video/fbdev/metronomefb.c 	wfm_hdr = (struct waveform_hdr *) mem;
mem               205 drivers/video/fbdev/metronomefb.c 	cksum = calc_cksum(32, 47, mem);
mem               228 drivers/video/fbdev/metronomefb.c 		if (mem[i] > t) {
mem               238 drivers/video/fbdev/metronomefb.c 	cksum = calc_cksum(sizeof(*wfm_hdr), cksum_idx, mem);
mem               239 drivers/video/fbdev/metronomefb.c 	if (cksum != mem[cksum_idx]) {
mem               241 drivers/video/fbdev/metronomefb.c 				" %x != %x\n", cksum, mem[cksum_idx]);
mem               250 drivers/video/fbdev/metronomefb.c 	cksum = calc_cksum(cksum_idx - 3, cksum_idx, mem);
mem               251 drivers/video/fbdev/metronomefb.c 	if (cksum != mem[cksum_idx]) {
mem               253 drivers/video/fbdev/metronomefb.c 				" %x != %x\n", cksum, mem[cksum_idx]);
mem               258 drivers/video/fbdev/metronomefb.c 	tta = get_unaligned_le32(mem + wmta + m * 4) & 0x00FFFFFF;
mem               262 drivers/video/fbdev/metronomefb.c 	cksum = calc_cksum(cksum_idx - 3, cksum_idx, mem);
mem               263 drivers/video/fbdev/metronomefb.c 	if (cksum != mem[cksum_idx]) {
mem               265 drivers/video/fbdev/metronomefb.c 			" %x != %x\n", cksum, mem[cksum_idx]);
mem               271 drivers/video/fbdev/metronomefb.c 	wfm_idx = get_unaligned_le32(mem + tta + trn * 4) & 0x00FFFFFF;
mem               277 drivers/video/fbdev/metronomefb.c 		v = mem[wfm_idx++];
mem               279 drivers/video/fbdev/metronomefb.c 			while (((v = mem[wfm_idx++]) != wfm_hdr->swtb) &&
mem               289 drivers/video/fbdev/metronomefb.c 		rl = mem[wfm_idx++];
mem               297 drivers/video/fbdev/metronomefb.c 	cksum = calc_cksum(owfm_idx, cksum_idx, mem);
mem               298 drivers/video/fbdev/metronomefb.c 	if (cksum != mem[cksum_idx]) {
mem               300 drivers/video/fbdev/metronomefb.c 				" %x != %x\n", cksum, mem[cksum_idx]);
mem               338 drivers/video/fbdev/omap2/omapfb/vrfb.c 	struct resource *mem;
mem               343 drivers/video/fbdev/omap2/omapfb/vrfb.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               344 drivers/video/fbdev/omap2/omapfb/vrfb.c 	vrfb_base = devm_ioremap_resource(&pdev->dev, mem);
mem               358 drivers/video/fbdev/omap2/omapfb/vrfb.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 1 + i);
mem               359 drivers/video/fbdev/omap2/omapfb/vrfb.c 		if (!mem) {
mem               365 drivers/video/fbdev/omap2/omapfb/vrfb.c 		ctxs[i].base = mem->start;
mem               875 drivers/video/fbdev/s3c2410fb.c 	info->mem = request_mem_region(res->start, size, pdev->name);
mem               876 drivers/video/fbdev/s3c2410fb.c 	if (info->mem == NULL) {
mem              1051 drivers/video/fbdev/s3c2410fb.c 	release_mem_region(info->mem->start, resource_size(info->mem));
mem                25 drivers/video/fbdev/s3c2410fb.h 	struct resource		*mem;
mem               549 drivers/video/fbdev/s3fb.c 	int rv, mem, step;
mem               577 drivers/video/fbdev/s3fb.c 	mem = ((var->bits_per_pixel * var->xres_virtual) >> 3) * var->yres_virtual;
mem               578 drivers/video/fbdev/s3fb.c 	if (mem > info->screen_size) {
mem               580 drivers/video/fbdev/s3fb.c 		       mem >> 10, (unsigned int) (info->screen_size >> 10));
mem               408 drivers/video/fbdev/simplefb.c 	struct resource *mem;
mem               422 drivers/video/fbdev/simplefb.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               423 drivers/video/fbdev/simplefb.c 	if (!mem) {
mem               436 drivers/video/fbdev/simplefb.c 	info->fix.smem_start = mem->start;
mem               437 drivers/video/fbdev/simplefb.c 	info->fix.smem_len = resource_size(mem);
mem              6602 drivers/video/fbdev/sis/sis_main.c static unsigned int	mem = 0;
mem              6669 drivers/video/fbdev/sis/sis_main.c 	if(mem)
mem              6670 drivers/video/fbdev/sis/sis_main.c 		sisfb_parm_mem = mem;
mem              6717 drivers/video/fbdev/sis/sis_main.c module_param(mem, int, 0);
mem              6742 drivers/video/fbdev/sis/sis_main.c MODULE_PARM_DESC(mem,
mem               167 drivers/video/fbdev/sm501fb.c static int sm501_alloc_mem(struct sm501fb_info *inf, struct sm501_mem *mem,
mem               238 drivers/video/fbdev/sm501fb.c 	mem->size    = size;
mem               239 drivers/video/fbdev/sm501fb.c 	mem->sm_addr = ptr;
mem               240 drivers/video/fbdev/sm501fb.c 	mem->k_addr  = inf->fbmem + ptr;
mem               243 drivers/video/fbdev/sm501fb.c 		__func__, mem->sm_addr, mem->k_addr, why, size);
mem              1225 drivers/video/fbdev/sm501fb.c 	void __iomem *mem = info->regs;
mem              1231 drivers/video/fbdev/sm501fb.c 				smc501_readl(mem + reg));
mem                98 drivers/video/fbdev/sstfb.c static int mem;			/* mem size in MB, 0 = autodetect */
mem               831 drivers/video/fbdev/sstfb.c 	if (mem >= 1  && mem <= 4) {
mem               832 drivers/video/fbdev/sstfb.c 		*memsize = (mem * 0x100000);
mem              1300 drivers/video/fbdev/sstfb.c 			mem = simple_strtoul (this_opt+4, NULL, 0);
mem              1521 drivers/video/fbdev/sstfb.c module_param(mem, int, 0);
mem              1522 drivers/video/fbdev/sstfb.c MODULE_PARM_DESC(mem, "Size of frame buffer memory in MB (1, 2, 4 MB, default=autodetect)");
mem                74 drivers/video/fbdev/udlfb.c 	void *mem;
mem              1015 drivers/video/fbdev/udlfb.c 		vfree(d->mem);
mem              1205 drivers/video/fbdev/udlfb.c static void dlfb_deferred_vfree(struct dlfb_data *dlfb, void *mem)
mem              1210 drivers/video/fbdev/udlfb.c 	d->mem = mem;
mem               587 drivers/video/fbdev/vermilion/vermilion.c 	u64 mem;
mem               640 drivers/video/fbdev/vermilion/vermilion.c 	mem = (u64)pitch * var->yres_virtual;
mem               641 drivers/video/fbdev/vermilion/vermilion.c 	if (mem > vinfo->vram_contig_size) {
mem               322 drivers/video/fbdev/vt8623fb.c 	int rv, mem, step;
mem               344 drivers/video/fbdev/vt8623fb.c 	mem = ((var->bits_per_pixel * var->xres_virtual) >> 3) * var->yres_virtual;
mem               345 drivers/video/fbdev/vt8623fb.c 	if (mem > info->screen_size)
mem               348 drivers/video/fbdev/vt8623fb.c 		       mem >> 10, (unsigned int) (info->screen_size >> 10));
mem               353 drivers/video/fbdev/vt8623fb.c 	if ((var->bits_per_pixel == 0) && (mem > (256*1024)))
mem               356 drivers/video/fbdev/vt8623fb.c 		       mem >> 10);
mem               481 drivers/video/fbdev/w100fb.c 	if (par->mach->mem && ((var->xres*var->yres*BITS_PER_PIXEL/8) > (par->mach->mem->size+1)))
mem               484 drivers/video/fbdev/w100fb.c 	if (!par->mach->mem && ((var->xres*var->yres*BITS_PER_PIXEL/8) > (MEM_INT_SIZE+1)))
mem               536 drivers/video/fbdev/w100fb.c 			info->fix.smem_len = par->mach->mem->size+1;
mem               570 drivers/video/fbdev/w100fb.c 		memsize=par->mach->mem->size;
mem               588 drivers/video/fbdev/w100fb.c 		memsize=par->mach->mem->size;
mem               646 drivers/video/fbdev/w100fb.c 	struct resource *mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               649 drivers/video/fbdev/w100fb.c 	if (!mem)
mem               653 drivers/video/fbdev/w100fb.c 	remapped_base = ioremap_nocache(mem->start+W100_CFG_BASE, W100_CFG_LEN);
mem               658 drivers/video/fbdev/w100fb.c 	remapped_regs = ioremap_nocache(mem->start+W100_REG_BASE, W100_REG_LEN);
mem               674 drivers/video/fbdev/w100fb.c 	printk(" at 0x%08lx.\n", (unsigned long) mem->start+W100_CFG_BASE);
mem               677 drivers/video/fbdev/w100fb.c 	remapped_fbuf = ioremap_nocache(mem->start+MEM_WINDOW_BASE, MEM_WINDOW_SIZE);
mem               721 drivers/video/fbdev/w100fb.c 	info->fix.smem_start = mem->start+W100_FB_BASE;
mem               722 drivers/video/fbdev/w100fb.c 	info->fix.mmio_start = mem->start+W100_REG_BASE;
mem              1364 drivers/video/fbdev/w100fb.c 	struct w100_mem_info *mem = par->mach->mem;
mem              1388 drivers/video/fbdev/w100fb.c 		extmem_location.f.mc_ext_mem_top = (W100_FB_BASE+par->mach->mem->size) >> 8;
mem              1392 drivers/video/fbdev/w100fb.c 		writel(mem->ext_cntl, remapped_regs + mmMEM_EXT_CNTL);
mem              1397 drivers/video/fbdev/w100fb.c 		writel(mem->sdram_mode_reg, remapped_regs + mmMEM_SDRAM_MODE_REG);
mem              1399 drivers/video/fbdev/w100fb.c 		writel(mem->ext_timing_cntl, remapped_regs + mmMEM_EXT_TIMING_CNTL);
mem              1400 drivers/video/fbdev/w100fb.c 		writel(mem->io_cntl, remapped_regs + mmMEM_IO_CNTL);
mem               534 drivers/virtio/virtio_mmio.c 	struct resource *mem;
mem               538 drivers/virtio/virtio_mmio.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               539 drivers/virtio/virtio_mmio.c 	if (!mem)
mem               542 drivers/virtio/virtio_mmio.c 	if (!devm_request_mem_region(&pdev->dev, mem->start,
mem               543 drivers/virtio/virtio_mmio.c 			resource_size(mem), pdev->name))
mem               557 drivers/virtio/virtio_mmio.c 	vm_dev->base = devm_ioremap(&pdev->dev, mem->start, resource_size(mem));
mem                16 drivers/watchdog/menz69_wdt.c 	struct resource *mem;
mem               113 drivers/watchdog/menz69_wdt.c 	struct resource *mem;
mem               119 drivers/watchdog/menz69_wdt.c 	mem = mcb_request_mem(dev, "z069-wdt");
mem               120 drivers/watchdog/menz69_wdt.c 	if (IS_ERR(mem))
mem               121 drivers/watchdog/menz69_wdt.c 		return PTR_ERR(mem);
mem               123 drivers/watchdog/menz69_wdt.c 	drv->base = devm_ioremap(&dev->dev, mem->start, resource_size(mem));
mem               127 drivers/watchdog/menz69_wdt.c 	drv->mem = mem;
mem               139 drivers/watchdog/menz69_wdt.c 	mcb_release_mem(mem);
mem               148 drivers/watchdog/menz69_wdt.c 	mcb_release_mem(drv->mem);
mem               108 drivers/xen/xen-acpi-memhotplug.c 		if ((info->caching == address64.info.mem.caching) &&
mem               109 drivers/xen/xen-acpi-memhotplug.c 		    (info->write_protect == address64.info.mem.write_protect) &&
mem               121 drivers/xen/xen-acpi-memhotplug.c 	new->caching = address64.info.mem.caching;
mem               122 drivers/xen/xen-acpi-memhotplug.c 	new->write_protect = address64.info.mem.write_protect;
mem                58 fs/btrfs/lzo.c 	void *mem;
mem                92 fs/btrfs/lzo.c 	kvfree(workspace->mem);
mem               104 fs/btrfs/lzo.c 	workspace->mem = kvmalloc(LZO1X_MEM_COMPRESS, GFP_KERNEL);
mem               107 fs/btrfs/lzo.c 	if (!workspace->mem || !workspace->buf || !workspace->cbuf)
mem               189 fs/btrfs/lzo.c 				       &out_len, workspace->mem);
mem                43 fs/btrfs/zstd.c 	void *mem;
mem               339 fs/btrfs/zstd.c 	kvfree(workspace->mem);
mem               356 fs/btrfs/zstd.c 	workspace->mem = kvmalloc(workspace->size, GFP_KERNEL);
mem               358 fs/btrfs/zstd.c 	if (!workspace->mem || !workspace->buf)
mem               397 fs/btrfs/zstd.c 	stream = ZSTD_initCStream(params, len, workspace->mem,
mem               566 fs/btrfs/zstd.c 			ZSTD_BTRFS_MAX_INPUT, workspace->mem, workspace->size);
mem               643 fs/btrfs/zstd.c 			ZSTD_BTRFS_MAX_INPUT, workspace->mem, workspace->size);
mem              1627 fs/nfsd/nfs4state.c 	int mem, i;
mem              1631 fs/nfsd/nfs4state.c 	mem = numslots * sizeof(struct nfsd4_slot *);
mem              1633 fs/nfsd/nfs4state.c 	new = kzalloc(sizeof(*new) + mem, GFP_KERNEL);
mem               287 fs/reiserfs/journal.c 	int mem = bmap_nr * sizeof(struct reiserfs_bitmap_node *);
mem               292 fs/reiserfs/journal.c 		jb->bitmaps = vzalloc(mem);
mem                24 fs/squashfs/zstd_wrapper.c 	void *mem;
mem                38 fs/squashfs/zstd_wrapper.c 	wksp->mem = vmalloc(wksp->mem_size);
mem                39 fs/squashfs/zstd_wrapper.c 	if (wksp->mem == NULL)
mem                56 fs/squashfs/zstd_wrapper.c 		vfree(wksp->mem);
mem                73 fs/squashfs/zstd_wrapper.c 	stream = ZSTD_initDStream(wksp->window_size, wksp->mem, wksp->mem_size);
mem               150 fs/ufs/util.c  	unsigned char * mem, struct ufs_buffer_head * ubh, unsigned size)
mem               158 fs/ufs/util.c  		memcpy (mem, ubh->bh[bhno]->b_data, len);
mem               159 fs/ufs/util.c  		mem += uspi->s_fsize;
mem               166 fs/ufs/util.c  	struct ufs_buffer_head * ubh, unsigned char * mem, unsigned size)
mem               174 fs/ufs/util.c  		memcpy (ubh->bh[bhno]->b_data, mem, len);
mem               175 fs/ufs/util.c  		mem += uspi->s_fsize;
mem               276 fs/ufs/util.h  #define ubh_ubhcpymem(mem,ubh,size) _ubh_ubhcpymem_(uspi,mem,ubh,size)
mem               278 fs/ufs/util.h  #define ubh_memcpyubh(ubh,mem,size) _ubh_memcpyubh_(uspi,ubh,mem,size)
mem              1934 fs/userfaultfd.c static void init_once_userfaultfd_ctx(void *mem)
mem              1936 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = (struct userfaultfd_ctx *) mem;
mem               250 include/acpi/acrestyp.h 	struct acpi_memory_attribute mem;
mem               105 include/asm-generic/vmlinux.lds.h #define MEM_KEEP(sec)    *(.mem##sec)
mem               109 include/asm-generic/vmlinux.lds.h #define MEM_DISCARD(sec) *(.mem##sec)
mem               196 include/drm/ttm/ttm_bo_api.h 	struct ttm_mem_reg mem;
mem               339 include/drm/ttm/ttm_bo_api.h bool ttm_bo_mem_compat(struct ttm_placement *placement, struct ttm_mem_reg *mem,
mem               109 include/drm/ttm/ttm_bo_driver.h 			 struct ttm_mem_reg *mem);
mem               122 include/drm/ttm/ttm_bo_driver.h 			 struct ttm_mem_reg *mem);
mem               355 include/drm/ttm/ttm_bo_driver.h 			      struct ttm_mem_reg *mem);
mem               357 include/drm/ttm/ttm_bo_driver.h 			    struct ttm_mem_reg *mem);
mem               559 include/drm/ttm/ttm_bo_driver.h bool ttm_mem_reg_is_pci(struct ttm_bo_device *bdev, struct ttm_mem_reg *mem);
mem               582 include/drm/ttm/ttm_bo_driver.h 		     struct ttm_mem_reg *mem,
mem               585 include/drm/ttm/ttm_bo_driver.h void ttm_bo_mem_put(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem);
mem               587 include/drm/ttm/ttm_bo_driver.h 			   struct ttm_mem_reg *mem);
mem               794 include/drm/ttm/ttm_bo_driver.h 		       struct ttm_mem_reg *mem);
mem               796 include/drm/ttm/ttm_bo_driver.h 		     struct ttm_mem_reg *mem);
mem               659 include/linux/bpf.h int bpf_map_charge_init(struct bpf_map_memory *mem, u64 size);
mem               660 include/linux/bpf.h void bpf_map_charge_finish(struct bpf_map_memory *mem);
mem               266 include/linux/clk/ti.h 			    struct regmap *syscon, void __iomem *mem);
mem               267 include/linux/clk/ti.h void omap2_clk_legacy_provider_init(int index, void __iomem *mem);
mem               826 include/linux/crypto.h void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
mem                62 include/linux/dm-io.h 	struct dm_io_memory mem;	/* Memory to use for io */
mem               398 include/linux/efi.h 	efi_pci_io_protocol_access_32_t mem;
mem               418 include/linux/efi.h 	efi_pci_io_protocol_access_64_t mem;
mem               438 include/linux/efi.h 	efi_pci_io_protocol_access_t mem;
mem              1067 include/linux/efi.h 				     void *buf, struct efi_mem_range *mem);
mem                10 include/linux/kbuild.h #define OFFSET(sym, str, mem) \
mem                11 include/linux/kbuild.h 	DEFINE(sym, offsetof(struct str, mem))
mem                87 include/linux/kexec.h 	unsigned long mem;
mem                95 include/linux/kexec.h 	compat_ulong_t mem;	/* User space sees this as a (void *) ... */
mem               172 include/linux/kexec.h 	unsigned long mem;
mem               214 include/linux/kexec.h extern int crash_exclude_mem_range(struct crash_mem *mem,
mem               217 include/linux/kexec.h extern int crash_prepare_elf64_headers(struct crash_mem *mem, int kernel_map,
mem                93 include/linux/kgdb.h extern char *dbg_get_reg(int regno, void *mem, struct pt_regs *regs);
mem                94 include/linux/kgdb.h extern int dbg_set_reg(int regno, void *mem, struct pt_regs *regs);
mem               309 include/linux/kgdb.h extern char *kgdb_mem2hex(char *mem, char *buf, int count);
mem               310 include/linux/kgdb.h extern int kgdb_hex2mem(char *buf, char *mem, int count);
mem               677 include/linux/kvm_host.h 			  const struct kvm_userspace_memory_region *mem);
mem               679 include/linux/kvm_host.h 			    const struct kvm_userspace_memory_region *mem);
mem               687 include/linux/kvm_host.h 				const struct kvm_userspace_memory_region *mem,
mem               690 include/linux/kvm_host.h 				const struct kvm_userspace_memory_region *mem,
mem                75 include/linux/mcb.h 	struct resource mem;
mem               134 include/linux/mcb.h extern void mcb_release_mem(struct resource *mem);
mem               351 include/linux/memory_hotplug.h extern bool is_memblock_offlined(struct memory_block *mem);
mem               436 include/linux/mlx5/qp.h 		} mem;
mem               424 include/linux/netfilter/x_tables.h 	const char __percpu *mem;
mem               100 include/linux/pci-epc.h 	struct pci_epc_mem		*mem;
mem                12 include/linux/platform_data/xtalk-bridge.h 	struct resource	mem;
mem               179 include/linux/plist.h #define plist_for_each_entry(pos, head, mem)	\
mem               180 include/linux/plist.h 	 list_for_each_entry(pos, &(head)->node_list, mem.node_list)
mem               340 include/linux/remoteproc.h 	int (*alloc)(struct rproc *rproc, struct rproc_mem_entry *mem);
mem               341 include/linux/remoteproc.h 	int (*release)(struct rproc *rproc, struct rproc_mem_entry *mem);
mem               598 include/linux/remoteproc.h void rproc_add_carveout(struct rproc *rproc, struct rproc_mem_entry *mem);
mem                42 include/linux/remoteproc/st_slim_rproc.h 	struct st_slim_mem mem[ST_SLIM_MEM_MAX];
mem               124 include/linux/seq_buf.h extern int seq_buf_putmem(struct seq_buf *s, const void *mem, unsigned int len);
mem               125 include/linux/seq_buf.h extern int seq_buf_putmem_hex(struct seq_buf *s, const void *mem,
mem              3553 include/linux/skbuff.h 	__wsum (*update)(const void *mem, int len, __wsum wsum);
mem               164 include/linux/spi/spi-mem.h 	struct spi_mem *mem;
mem               194 include/linux/spi/spi-mem.h static inline void spi_mem_set_drvdata(struct spi_mem *mem, void *data)
mem               196 include/linux/spi/spi-mem.h 	mem->drvpriv = data;
mem               206 include/linux/spi/spi-mem.h static inline void *spi_mem_get_drvdata(struct spi_mem *mem)
mem               208 include/linux/spi/spi-mem.h 	return mem->drvpriv;
mem               253 include/linux/spi/spi-mem.h 	int (*adjust_op_size)(struct spi_mem *mem, struct spi_mem_op *op);
mem               254 include/linux/spi/spi-mem.h 	bool (*supports_op)(struct spi_mem *mem,
mem               256 include/linux/spi/spi-mem.h 	int (*exec_op)(struct spi_mem *mem,
mem               258 include/linux/spi/spi-mem.h 	const char *(*get_name)(struct spi_mem *mem);
mem               285 include/linux/spi/spi-mem.h 	int (*probe)(struct spi_mem *mem);
mem               286 include/linux/spi/spi-mem.h 	int (*remove)(struct spi_mem *mem);
mem               287 include/linux/spi/spi-mem.h 	void (*shutdown)(struct spi_mem *mem);
mem               299 include/linux/spi/spi-mem.h bool spi_mem_default_supports_op(struct spi_mem *mem,
mem               319 include/linux/spi/spi-mem.h bool spi_mem_default_supports_op(struct spi_mem *mem,
mem               327 include/linux/spi/spi-mem.h int spi_mem_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op);
mem               329 include/linux/spi/spi-mem.h bool spi_mem_supports_op(struct spi_mem *mem,
mem               332 include/linux/spi/spi-mem.h int spi_mem_exec_op(struct spi_mem *mem,
mem               335 include/linux/spi/spi-mem.h const char *spi_mem_get_name(struct spi_mem *mem);
mem               338 include/linux/spi/spi-mem.h spi_mem_dirmap_create(struct spi_mem *mem,
mem               346 include/linux/spi/spi-mem.h devm_spi_mem_dirmap_create(struct device *dev, struct spi_mem *mem,
mem               359 include/linux/swap.h extern unsigned long mem_cgroup_shrink_node(struct mem_cgroup *mem,
mem               642 include/linux/swap.h static inline int mem_cgroup_swappiness(struct mem_cgroup *mem)
mem                87 include/linux/trace_seq.h extern void trace_seq_putmem(struct trace_seq *s, const void *mem, unsigned int len);
mem                88 include/linux/trace_seq.h extern void trace_seq_putmem_hex(struct trace_seq *s, const void *mem,
mem               126 include/linux/trace_seq.h trace_seq_putmem(struct trace_seq *s, const void *mem, unsigned int len)
mem               129 include/linux/trace_seq.h static inline void trace_seq_putmem_hex(struct trace_seq *s, const void *mem,
mem               102 include/linux/uio_driver.h 	struct uio_mem		mem[MAX_UIO_MAPS];
mem                76 include/linux/vmalloc.h extern void vm_unmap_ram(const void *mem, unsigned int count);
mem                22 include/media/drv-intf/saa7146.h #define saa7146_write(sxy,adr,dat)    writel((dat),(sxy->mem+(adr)))
mem                23 include/media/drv-intf/saa7146.h #define saa7146_read(sxy,adr)         readl(sxy->mem+(adr))
mem               128 include/media/drv-intf/saa7146.h 	unsigned char			__iomem *mem;		/* pointer to mapped IO memory */
mem               175 include/media/drv-intf/saa7146.h void saa7146_vfree_destroy_pgtable(struct pci_dev *pci, void *mem, struct saa7146_pgtable *pt);
mem                58 include/media/vsp1.h 	dma_addr_t mem[3];
mem                96 include/media/vsp1.h 	dma_addr_t mem[3];
mem                22 include/net/inet_frag.h 	atomic_long_t		mem ____cacheline_aligned_in_smp;
mem               141 include/net/inet_frag.h 	return atomic_long_read(&fqdir->mem);
mem               146 include/net/inet_frag.h 	atomic_long_sub(val, &fqdir->mem);
mem               151 include/net/inet_frag.h 	atomic_long_add(val, &fqdir->mem);
mem              1637 include/net/sock.h void sock_kfree_s(struct sock *sk, void *mem, int size);
mem              1638 include/net/sock.h void sock_kzfree_s(struct sock *sk, void *mem, int size);
mem                63 include/net/xdp.h 	struct xdp_mem_info mem;
mem                83 include/net/xdp.h 	struct xdp_mem_info mem;
mem               104 include/net/xdp.h 	if (xdp->rxq->mem.type == MEM_TYPE_ZERO_COPY)
mem               123 include/net/xdp.h 	xdp_frame->mem = xdp->rxq->mem;
mem               137 include/net/xdp.h void __xdp_release_frame(void *data, struct xdp_mem_info *mem);
mem               140 include/net/xdp.h 	struct xdp_mem_info *mem = &xdpf->mem;
mem               143 include/net/xdp.h 	if (mem->type == MEM_TYPE_PAGE_POOL)
mem               144 include/net/xdp.h 		__xdp_release_frame(xdpf->data, mem);
mem                 9 include/net/xdp_priv.h 	struct xdp_mem_info mem;
mem               463 include/pcmcia/cistpl.h     cistpl_mem_t	mem;
mem               483 include/pcmcia/cistpl.h     u_char		mem;
mem               127 include/pcmcia/ss.h 	int (*set_mem_map)(struct pcmcia_socket *s, struct pccard_mem_map *mem);
mem                28 include/ras/ras_event.h 	TP_PROTO(struct cper_sec_mem_err *mem,
mem                34 include/ras/ras_event.h 	TP_ARGS(mem, err_seq, fru_id, fru_text, sev),
mem                49 include/ras/ras_event.h 		if (mem->validation_bits & CPER_MEM_VALID_ERROR_TYPE)
mem                50 include/ras/ras_event.h 			__entry->etype = mem->error_type;
mem                54 include/ras/ras_event.h 		if (mem->validation_bits & CPER_MEM_VALID_PA)
mem                55 include/ras/ras_event.h 			__entry->pa = mem->physical_addr;
mem                59 include/ras/ras_event.h 		if (mem->validation_bits & CPER_MEM_VALID_PA_MASK)
mem                60 include/ras/ras_event.h 			__entry->pa_mask_lsb = (u8)__ffs64(mem->physical_addr_mask);
mem                65 include/ras/ras_event.h 		cper_mem_err_pack(mem, &__entry->data);
mem                89 include/rdma/signature.h 	struct ib_sig_domain	mem;
mem              1548 include/sound/emu10k1.h 	struct snd_util_memblk mem;
mem              1556 include/sound/emu10k1.h #define snd_emu10k1_memblk_offset(blk)	(((blk)->mapped_page << PAGE_SHIFT) | ((blk)->mem.offset & (PAGE_SIZE - 1)))
mem               333 include/trace/events/xdp.h 		__entry->mem_id		= xa->mem.id;
mem               334 include/trace/events/xdp.h 		__entry->mem_type	= xa->mem.type;
mem               363 include/trace/events/xdp.h 		__entry->mem_id		= xa->mem.id;
mem               364 include/trace/events/xdp.h 		__entry->mem_type	= xa->mem.type;
mem               381 include/trace/events/xdp.h 	TP_PROTO(const struct xdp_mem_info *mem,
mem               384 include/trace/events/xdp.h 	TP_ARGS(mem, page),
mem               394 include/trace/events/xdp.h 		__entry->mem_id		= mem->id;
mem               395 include/trace/events/xdp.h 		__entry->mem_type	= mem->type;
mem                57 include/uapi/linux/kexec.h 	const void *mem;
mem               119 include/video/w100fb.h 	struct w100_mem_info *mem;
mem               275 include/xen/interface/platform.h 			} mem;
mem                78 kernel/bpf/arraymap.c 	struct bpf_map_memory mem;
mem               115 kernel/bpf/arraymap.c 	ret = bpf_map_charge_init(&mem, cost);
mem               122 kernel/bpf/arraymap.c 		bpf_map_charge_finish(&mem);
mem               130 kernel/bpf/arraymap.c 	bpf_map_charge_move(&array->map.memory, &mem);
mem               275 kernel/bpf/local_storage.c 	struct bpf_map_memory mem;
mem               295 kernel/bpf/local_storage.c 	ret = bpf_map_charge_init(&mem, sizeof(struct bpf_cgroup_storage_map));
mem               302 kernel/bpf/local_storage.c 		bpf_map_charge_finish(&mem);
mem               306 kernel/bpf/local_storage.c 	bpf_map_charge_move(&map->map.memory, &mem);
mem                70 kernel/bpf/queue_stack_maps.c 	struct bpf_map_memory mem = {0};
mem                77 kernel/bpf/queue_stack_maps.c 	ret = bpf_map_charge_init(&mem, cost);
mem                83 kernel/bpf/queue_stack_maps.c 		bpf_map_charge_finish(&mem);
mem                91 kernel/bpf/queue_stack_maps.c 	bpf_map_charge_move(&qs->map.memory, &mem);
mem               154 kernel/bpf/reuseport_array.c 	struct bpf_map_memory mem;
mem               163 kernel/bpf/reuseport_array.c 	err = bpf_map_charge_init(&mem, array_size);
mem               170 kernel/bpf/reuseport_array.c 		bpf_map_charge_finish(&mem);
mem               176 kernel/bpf/reuseport_array.c 	bpf_map_charge_move(&array->map.memory, &mem);
mem                89 kernel/bpf/stackmap.c 	struct bpf_map_memory mem;
mem               118 kernel/bpf/stackmap.c 	err = bpf_map_charge_init(&mem, cost);
mem               124 kernel/bpf/stackmap.c 		bpf_map_charge_finish(&mem);
mem               140 kernel/bpf/stackmap.c 	bpf_map_charge_move(&smap->map.memory, &mem);
mem               147 kernel/bpf/stackmap.c 	bpf_map_charge_finish(&mem);
mem               203 kernel/bpf/syscall.c int bpf_map_charge_init(struct bpf_map_memory *mem, u64 size)
mem               219 kernel/bpf/syscall.c 	mem->pages = pages;
mem               220 kernel/bpf/syscall.c 	mem->user = user;
mem               225 kernel/bpf/syscall.c void bpf_map_charge_finish(struct bpf_map_memory *mem)
mem               227 kernel/bpf/syscall.c 	bpf_uncharge_memlock(mem->user, mem->pages);
mem               228 kernel/bpf/syscall.c 	free_uid(mem->user);
mem               305 kernel/bpf/syscall.c 	struct bpf_map_memory mem;
mem               307 kernel/bpf/syscall.c 	bpf_map_charge_move(&mem, &map->memory);
mem               311 kernel/bpf/syscall.c 	bpf_map_charge_finish(&mem);
mem               553 kernel/bpf/syscall.c 	struct bpf_map_memory mem;
mem               637 kernel/bpf/syscall.c 	bpf_map_charge_move(&mem, &map->memory);
mem               639 kernel/bpf/syscall.c 	bpf_map_charge_finish(&mem);
mem               239 kernel/debug/gdbstub.c char *kgdb_mem2hex(char *mem, char *buf, int count)
mem               250 kernel/debug/gdbstub.c 	err = probe_kernel_read(tmp, mem, count);
mem               268 kernel/debug/gdbstub.c int kgdb_hex2mem(char *buf, char *mem, int count)
mem               286 kernel/debug/gdbstub.c 	return probe_kernel_write(mem, tmp_raw, count);
mem               326 kernel/debug/gdbstub.c static int kgdb_ebin2mem(char *buf, char *mem, int count)
mem               338 kernel/debug/gdbstub.c 	return probe_kernel_write(mem, c, size);
mem                32 kernel/dma/coherent.c 					     struct dma_coherent_mem * mem)
mem                34 kernel/dma/coherent.c 	if (mem->use_dev_dma_pfn_offset)
mem                35 kernel/dma/coherent.c 		return (mem->pfn_base - dev->dma_pfn_offset) << PAGE_SHIFT;
mem                37 kernel/dma/coherent.c 		return mem->device_base;
mem                42 kernel/dma/coherent.c 		struct dma_coherent_mem **mem)
mem                77 kernel/dma/coherent.c 	*mem = dma_mem;
mem                87 kernel/dma/coherent.c static void dma_release_coherent_memory(struct dma_coherent_mem *mem)
mem                89 kernel/dma/coherent.c 	if (!mem)
mem                92 kernel/dma/coherent.c 	memunmap(mem->virt_base);
mem                93 kernel/dma/coherent.c 	kfree(mem->bitmap);
mem                94 kernel/dma/coherent.c 	kfree(mem);
mem                98 kernel/dma/coherent.c 				      struct dma_coherent_mem *mem)
mem               106 kernel/dma/coherent.c 	dev->dma_mem = mem;
mem               113 kernel/dma/coherent.c 	struct dma_coherent_mem *mem;
mem               116 kernel/dma/coherent.c 	ret = dma_init_coherent_memory(phys_addr, device_addr, size, &mem);
mem               120 kernel/dma/coherent.c 	ret = dma_assign_coherent_memory(dev, mem);
mem               122 kernel/dma/coherent.c 		dma_release_coherent_memory(mem);
mem               127 kernel/dma/coherent.c 				       struct dma_coherent_mem *mem,
mem               135 kernel/dma/coherent.c 	spin_lock_irqsave(&mem->spinlock, flags);
mem               137 kernel/dma/coherent.c 	if (unlikely(size > ((dma_addr_t)mem->size << PAGE_SHIFT)))
mem               140 kernel/dma/coherent.c 	pageno = bitmap_find_free_region(mem->bitmap, mem->size, order);
mem               147 kernel/dma/coherent.c 	*dma_handle = dma_get_device_base(dev, mem) +
mem               149 kernel/dma/coherent.c 	ret = mem->virt_base + ((dma_addr_t)pageno << PAGE_SHIFT);
mem               150 kernel/dma/coherent.c 	spin_unlock_irqrestore(&mem->spinlock, flags);
mem               154 kernel/dma/coherent.c 	spin_unlock_irqrestore(&mem->spinlock, flags);
mem               175 kernel/dma/coherent.c 	struct dma_coherent_mem *mem = dev_get_coherent_memory(dev);
mem               177 kernel/dma/coherent.c 	if (!mem)
mem               180 kernel/dma/coherent.c 	*ret = __dma_alloc_from_coherent(dev, mem, size, dma_handle);
mem               194 kernel/dma/coherent.c static int __dma_release_from_coherent(struct dma_coherent_mem *mem,
mem               197 kernel/dma/coherent.c 	if (mem && vaddr >= mem->virt_base && vaddr <
mem               198 kernel/dma/coherent.c 		   (mem->virt_base + ((dma_addr_t)mem->size << PAGE_SHIFT))) {
mem               199 kernel/dma/coherent.c 		int page = (vaddr - mem->virt_base) >> PAGE_SHIFT;
mem               202 kernel/dma/coherent.c 		spin_lock_irqsave(&mem->spinlock, flags);
mem               203 kernel/dma/coherent.c 		bitmap_release_region(mem->bitmap, page, order);
mem               204 kernel/dma/coherent.c 		spin_unlock_irqrestore(&mem->spinlock, flags);
mem               224 kernel/dma/coherent.c 	struct dma_coherent_mem *mem = dev_get_coherent_memory(dev);
mem               226 kernel/dma/coherent.c 	return __dma_release_from_coherent(mem, order, vaddr);
mem               238 kernel/dma/coherent.c static int __dma_mmap_from_coherent(struct dma_coherent_mem *mem,
mem               241 kernel/dma/coherent.c 	if (mem && vaddr >= mem->virt_base && vaddr + size <=
mem               242 kernel/dma/coherent.c 		   (mem->virt_base + ((dma_addr_t)mem->size << PAGE_SHIFT))) {
mem               244 kernel/dma/coherent.c 		int start = (vaddr - mem->virt_base) >> PAGE_SHIFT;
mem               250 kernel/dma/coherent.c 			unsigned long pfn = mem->pfn_base + start + off;
mem               278 kernel/dma/coherent.c 	struct dma_coherent_mem *mem = dev_get_coherent_memory(dev);
mem               280 kernel/dma/coherent.c 	return __dma_mmap_from_coherent(mem, vma, vaddr, size, ret);
mem               305 kernel/dma/coherent.c 	struct dma_coherent_mem *mem = rmem->priv;
mem               308 kernel/dma/coherent.c 	if (!mem) {
mem               310 kernel/dma/coherent.c 					       rmem->size, &mem);
mem               317 kernel/dma/coherent.c 	mem->use_dev_dma_pfn_offset = true;
mem               318 kernel/dma/coherent.c 	rmem->priv = mem;
mem               319 kernel/dma/coherent.c 	dma_assign_coherent_memory(dev, mem);
mem               292 kernel/kexec.c 		out.mem   = in.mem;
mem               171 kernel/kexec_core.c 		mstart = image->segment[i].mem;
mem               190 kernel/kexec_core.c 		mstart = image->segment[i].mem;
mem               195 kernel/kexec_core.c 			pstart = image->segment[j].mem;
mem               242 kernel/kexec_core.c 			mstart = image->segment[i].mem;
mem               290 kernel/kexec_core.c 		mstart = image->segment[i].mem;
mem               458 kernel/kexec_core.c 			mstart = image->segment[i].mem;
mem               791 kernel/kexec_core.c 	maddr = segment->mem;
mem               864 kernel/kexec_core.c 	maddr = segment->mem;
mem               416 kernel/kexec_elf.c 		kbuf->mem = KEXEC_BUF_MEM_UNKNOWN;
mem               420 kernel/kexec_elf.c 		load_addr = kbuf->mem;
mem               434 kernel/kexec_file.c 			 i, ksegment->buf, ksegment->bufsz, ksegment->mem,
mem               492 kernel/kexec_file.c 	kbuf->mem = temp_start;
mem               526 kernel/kexec_file.c 	kbuf->mem = temp_start;
mem               640 kernel/kexec_file.c 	if (kbuf->mem != KEXEC_BUF_MEM_UNKNOWN)
mem               699 kernel/kexec_file.c 	ksegment->mem = kbuf->mem;
mem               788 kernel/kexec_file.c 		sha_regions[j].start = ksegment->mem;
mem               912 kernel/kexec_file.c 	bss_addr = kbuf->mem + kbuf->bufsz;
mem               936 kernel/kexec_file.c 			kbuf->image->start += kbuf->mem + offset;
mem               943 kernel/kexec_file.c 		sechdrs[i].sh_addr = kbuf->mem + offset;
mem              1169 kernel/kexec_file.c int crash_exclude_mem_range(struct crash_mem *mem,
mem              1176 kernel/kexec_file.c 	for (i = 0; i < mem->nr_ranges; i++) {
mem              1177 kernel/kexec_file.c 		start = mem->ranges[i].start;
mem              1178 kernel/kexec_file.c 		end = mem->ranges[i].end;
mem              1191 kernel/kexec_file.c 			mem->ranges[i].start = 0;
mem              1192 kernel/kexec_file.c 			mem->ranges[i].end = 0;
mem              1193 kernel/kexec_file.c 			if (i < mem->nr_ranges - 1) {
mem              1195 kernel/kexec_file.c 				for (j = i; j < mem->nr_ranges - 1; j++) {
mem              1196 kernel/kexec_file.c 					mem->ranges[j].start =
mem              1197 kernel/kexec_file.c 						mem->ranges[j+1].start;
mem              1198 kernel/kexec_file.c 					mem->ranges[j].end =
mem              1199 kernel/kexec_file.c 							mem->ranges[j+1].end;
mem              1202 kernel/kexec_file.c 			mem->nr_ranges--;
mem              1208 kernel/kexec_file.c 			mem->ranges[i].end = mstart - 1;
mem              1212 kernel/kexec_file.c 			mem->ranges[i].end = mstart - 1;
mem              1214 kernel/kexec_file.c 			mem->ranges[i].start = mend + 1;
mem              1223 kernel/kexec_file.c 	if (i == mem->max_nr_ranges - 1)
mem              1228 kernel/kexec_file.c 	if (j < mem->nr_ranges) {
mem              1230 kernel/kexec_file.c 		for (i = mem->nr_ranges - 1; i >= j; i--)
mem              1231 kernel/kexec_file.c 			mem->ranges[i + 1] = mem->ranges[i];
mem              1234 kernel/kexec_file.c 	mem->ranges[j].start = temp_range.start;
mem              1235 kernel/kexec_file.c 	mem->ranges[j].end = temp_range.end;
mem              1236 kernel/kexec_file.c 	mem->nr_ranges++;
mem              1240 kernel/kexec_file.c int crash_prepare_elf64_headers(struct crash_mem *mem, int kernel_map,
mem              1253 kernel/kexec_file.c 	nr_phdr += mem->nr_ranges;
mem              1315 kernel/kexec_file.c 	for (i = 0; i < mem->nr_ranges; i++) {
mem              1316 kernel/kexec_file.c 		mstart = mem->ranges[i].start;
mem              1317 kernel/kexec_file.c 		mend = mem->ranges[i].end;
mem               125 kernel/relay.c 	void *mem;
mem               141 kernel/relay.c 	mem = vmap(buf->page_array, n_pages, VM_MAP, PAGE_KERNEL);
mem               142 kernel/relay.c 	if (!mem)
mem               145 kernel/relay.c 	memset(mem, 0, *size);
mem               147 kernel/relay.c 	return mem;
mem               262 kernel/trace/trace_seq.c void trace_seq_putmem(struct trace_seq *s, const void *mem, unsigned int len)
mem               274 kernel/trace/trace_seq.c 	seq_buf_putmem(&s->seq, mem, len);
mem               288 kernel/trace/trace_seq.c void trace_seq_putmem_hex(struct trace_seq *s, const void *mem,
mem               305 kernel/trace/trace_seq.c 	seq_buf_putmem_hex(&s->seq, mem, len);
mem               193 lib/seq_buf.c  int seq_buf_putmem(struct seq_buf *s, const void *mem, unsigned int len)
mem               198 lib/seq_buf.c  		memcpy(s->buffer + s->len, mem, len);
mem               221 lib/seq_buf.c  int seq_buf_putmem_hex(struct seq_buf *s, const void *mem,
mem               225 lib/seq_buf.c  	const unsigned char *data = mem;
mem               178 lib/zlib_deflate/deflate.c     deflate_workspace *mem;
mem               192 lib/zlib_deflate/deflate.c     mem = (deflate_workspace *) strm->workspace;
mem               208 lib/zlib_deflate/deflate.c     next = (char *) mem;
mem               209 lib/zlib_deflate/deflate.c     next += sizeof(*mem);
mem               210 lib/zlib_deflate/deflate.c     mem->window_memory = (Byte *) next;
mem               212 lib/zlib_deflate/deflate.c     mem->prev_memory = (Pos *) next;
mem               214 lib/zlib_deflate/deflate.c     mem->head_memory = (Pos *) next;
mem               216 lib/zlib_deflate/deflate.c     mem->overlay_memory = next;
mem               218 lib/zlib_deflate/deflate.c     s = (deflate_state *) &(mem->deflate_memory);
mem               232 lib/zlib_deflate/deflate.c     s->window = (Byte *) mem->window_memory;
mem               233 lib/zlib_deflate/deflate.c     s->prev   = (Pos *)  mem->prev_memory;
mem               234 lib/zlib_deflate/deflate.c     s->head   = (Pos *)  mem->head_memory;
mem               238 lib/zlib_deflate/deflate.c     overlay = (ush *) mem->overlay_memory;
mem                71 mm/cma_debug.c static void cma_add_to_cma_mem_list(struct cma *cma, struct cma_mem *mem)
mem                74 mm/cma_debug.c 	hlist_add_head(&mem->node, &cma->mem_head);
mem                80 mm/cma_debug.c 	struct cma_mem *mem = NULL;
mem                84 mm/cma_debug.c 		mem = hlist_entry(cma->mem_head.first, struct cma_mem, node);
mem                85 mm/cma_debug.c 		hlist_del_init(&mem->node);
mem                89 mm/cma_debug.c 	return mem;
mem                94 mm/cma_debug.c 	struct cma_mem *mem = NULL;
mem                97 mm/cma_debug.c 		mem = cma_get_entry_from_list(cma);
mem                98 mm/cma_debug.c 		if (mem == NULL)
mem               101 mm/cma_debug.c 		if (mem->n <= count) {
mem               102 mm/cma_debug.c 			cma_release(cma, mem->p, mem->n);
mem               103 mm/cma_debug.c 			count -= mem->n;
mem               104 mm/cma_debug.c 			kfree(mem);
mem               106 mm/cma_debug.c 			cma_release(cma, mem->p, count);
mem               107 mm/cma_debug.c 			mem->p += count;
mem               108 mm/cma_debug.c 			mem->n -= count;
mem               110 mm/cma_debug.c 			cma_add_to_cma_mem_list(cma, mem);
mem               113 mm/cma_debug.c 			cma_add_to_cma_mem_list(cma, mem);
mem               133 mm/cma_debug.c 	struct cma_mem *mem;
mem               136 mm/cma_debug.c 	mem = kzalloc(sizeof(*mem), GFP_KERNEL);
mem               137 mm/cma_debug.c 	if (!mem)
mem               142 mm/cma_debug.c 		kfree(mem);
mem               146 mm/cma_debug.c 	mem->p = p;
mem               147 mm/cma_debug.c 	mem->n = count;
mem               149 mm/cma_debug.c 	cma_add_to_cma_mem_list(cma, mem);
mem              1287 mm/filemap.c   static inline bool clear_bit_unlock_is_negative_byte(long nr, volatile void *mem)
mem              1289 mm/filemap.c   	clear_bit_unlock(nr, mem);
mem              1291 mm/filemap.c   	return test_bit(PG_waiters, mem);
mem               799 mm/memory_hotplug.c 	struct memory_block *mem;
mem               807 mm/memory_hotplug.c 	mem = find_memory_block(__pfn_to_section(pfn));
mem               808 mm/memory_hotplug.c 	nid = mem->nid;
mem               809 mm/memory_hotplug.c 	put_device(&mem->dev);
mem              1021 mm/memory_hotplug.c static int online_memory_block(struct memory_block *mem, void *arg)
mem              1023 mm/memory_hotplug.c 	return device_online(&mem->dev);
mem              1633 mm/memory_hotplug.c static int check_memblock_offlined_cb(struct memory_block *mem, void *arg)
mem              1635 mm/memory_hotplug.c 	int ret = !is_memblock_offlined(mem);
mem              1640 mm/memory_hotplug.c 		beginpa = PFN_PHYS(section_nr_to_pfn(mem->start_section_nr));
mem              1666 mm/memory_hotplug.c static int check_no_memblock_for_node_cb(struct memory_block *mem, void *arg)
mem              1675 mm/memory_hotplug.c 	return mem->nid == nid ? -EEXIST : 0;
mem               511 mm/mempool.c   	struct kmem_cache *mem = pool_data;
mem               512 mm/mempool.c   	VM_BUG_ON(mem->ctor);
mem               513 mm/mempool.c   	return kmem_cache_alloc(mem, gfp_mask);
mem               519 mm/mempool.c   	struct kmem_cache *mem = pool_data;
mem               520 mm/mempool.c   	kmem_cache_free(mem, element);
mem               350 mm/nommu.c     void vm_unmap_ram(const void *mem, unsigned int count)
mem                66 mm/page_poison.c static void check_poison_mem(unsigned char *mem, size_t bytes)
mem                75 mm/page_poison.c 	start = memchr_inv(mem, PAGE_POISON, bytes);
mem                79 mm/page_poison.c 	for (end = mem + bytes - 1; end > start; end--) {
mem              1738 mm/slab_common.c 	void *mem = (void *)p;
mem              1740 mm/slab_common.c 	if (unlikely(ZERO_OR_NULL_PTR(mem)))
mem              1742 mm/slab_common.c 	ks = ksize(mem);
mem              1743 mm/slab_common.c 	memset(mem, 0, ks);
mem              1744 mm/slab_common.c 	kfree(mem);
mem              1737 mm/vmalloc.c   void vm_unmap_ram(const void *mem, unsigned int count)
mem              1740 mm/vmalloc.c   	unsigned long addr = (unsigned long)mem;
mem              1750 mm/vmalloc.c   		debug_check_no_locks_freed(mem, size);
mem              1751 mm/vmalloc.c   		vb_free(mem, size);
mem              1782 mm/vmalloc.c   	void *mem;
mem              1785 mm/vmalloc.c   		mem = vb_alloc(size, GFP_KERNEL);
mem              1786 mm/vmalloc.c   		if (IS_ERR(mem))
mem              1788 mm/vmalloc.c   		addr = (unsigned long)mem;
mem              1797 mm/vmalloc.c   		mem = (void *)addr;
mem              1800 mm/vmalloc.c   		vm_unmap_ram(mem, count);
mem              1803 mm/vmalloc.c   	return mem;
mem               199 mm/vmstat.c    	int mem;	/* memory in 128 MB units */
mem               231 mm/vmstat.c    	mem = zone_managed_pages(zone) >> (27 - PAGE_SHIFT);
mem               233 mm/vmstat.c    	threshold = 2 * fls(num_online_cpus()) * (1 + fls(mem));
mem              2143 net/core/sock.c 		void *mem;
mem              2148 net/core/sock.c 		mem = kmalloc(size, priority);
mem              2149 net/core/sock.c 		if (mem)
mem              2150 net/core/sock.c 			return mem;
mem              2161 net/core/sock.c static inline void __sock_kfree_s(struct sock *sk, void *mem, int size,
mem              2164 net/core/sock.c 	if (WARN_ON_ONCE(!mem))
mem              2167 net/core/sock.c 		kzfree(mem);
mem              2169 net/core/sock.c 		kfree(mem);
mem              2173 net/core/sock.c void sock_kfree_s(struct sock *sk, void *mem, int size)
mem              2175 net/core/sock.c 	__sock_kfree_s(sk, mem, size, false);
mem              2179 net/core/sock.c void sock_kzfree_s(struct sock *sk, void *mem, int size)
mem              2181 net/core/sock.c 	__sock_kfree_s(sk, mem, size, true);
mem              3209 net/core/sock.c void sk_get_meminfo(const struct sock *sk, u32 *mem)
mem              3211 net/core/sock.c 	memset(mem, 0, sizeof(*mem) * SK_MEMINFO_VARS);
mem              3213 net/core/sock.c 	mem[SK_MEMINFO_RMEM_ALLOC] = sk_rmem_alloc_get(sk);
mem              3214 net/core/sock.c 	mem[SK_MEMINFO_RCVBUF] = READ_ONCE(sk->sk_rcvbuf);
mem              3215 net/core/sock.c 	mem[SK_MEMINFO_WMEM_ALLOC] = sk_wmem_alloc_get(sk);
mem              3216 net/core/sock.c 	mem[SK_MEMINFO_SNDBUF] = READ_ONCE(sk->sk_sndbuf);
mem              3217 net/core/sock.c 	mem[SK_MEMINFO_FWD_ALLOC] = sk->sk_forward_alloc;
mem              3218 net/core/sock.c 	mem[SK_MEMINFO_WMEM_QUEUED] = READ_ONCE(sk->sk_wmem_queued);
mem              3219 net/core/sock.c 	mem[SK_MEMINFO_OPTMEM] = atomic_read(&sk->sk_omem_alloc);
mem              3220 net/core/sock.c 	mem[SK_MEMINFO_BACKLOG] = READ_ONCE(sk->sk_backlog.len);
mem              3221 net/core/sock.c 	mem[SK_MEMINFO_DROPS] = atomic_read(&sk->sk_drops);
mem                62 net/core/sock_diag.c 	u32 mem[SK_MEMINFO_VARS];
mem                64 net/core/sock_diag.c 	sk_get_meminfo(sk, mem);
mem                66 net/core/sock_diag.c 	return nla_put(skb, attrtype, sizeof(mem), &mem);
mem                39 net/core/xdp.c 	BUILD_BUG_ON(FIELD_SIZEOF(struct xdp_mem_allocator, mem.id)
mem                52 net/core/xdp.c 	return xa->mem.id != mem_id;
mem                58 net/core/xdp.c 	.key_offset  = offsetof(struct xdp_mem_allocator, mem.id),
mem                59 net/core/xdp.c 	.key_len = FIELD_SIZEOF(struct xdp_mem_allocator, mem.id),
mem                74 net/core/xdp.c 	ida_simple_remove(&mem_id_pool, xa->mem.id);
mem                77 net/core/xdp.c 	xa->mem.id = 0xFFFF;
mem                78 net/core/xdp.c 	xa->mem.type = 0xF0F0;
mem               140 net/core/xdp.c 	int id = xdp_rxq->mem.id;
mem               150 net/core/xdp.c 	if (xdp_rxq->mem.type == MEM_TYPE_ZERO_COPY)
mem               153 net/core/xdp.c 	if (xdp_rxq->mem.type == MEM_TYPE_PAGE_POOL) {
mem               176 net/core/xdp.c 	xdp_rxq->mem.id = 0;
mem               177 net/core/xdp.c 	xdp_rxq->mem.type = 0;
mem               305 net/core/xdp.c 	xdp_rxq->mem.type = type;
mem               334 net/core/xdp.c 	xdp_rxq->mem.id = id;
mem               335 net/core/xdp.c 	xdp_alloc->mem  = xdp_rxq->mem;
mem               341 net/core/xdp.c 		ida_simple_remove(&mem_id_pool, xdp_rxq->mem.id);
mem               342 net/core/xdp.c 		xdp_rxq->mem.id = 0;
mem               367 net/core/xdp.c static void __xdp_return(void *data, struct xdp_mem_info *mem, bool napi_direct,
mem               373 net/core/xdp.c 	switch (mem->type) {
mem               377 net/core/xdp.c 		xa = rhashtable_lookup(mem_id_ht, &mem->id, mem_id_rht_params);
mem               394 net/core/xdp.c 		xa = rhashtable_lookup(mem_id_ht, &mem->id, mem_id_rht_params);
mem               405 net/core/xdp.c 	__xdp_return(xdpf->data, &xdpf->mem, false, 0);
mem               411 net/core/xdp.c 	__xdp_return(xdpf->data, &xdpf->mem, true, 0);
mem               417 net/core/xdp.c 	__xdp_return(xdp->data, &xdp->rxq->mem, true, xdp->handle);
mem               422 net/core/xdp.c void __xdp_release_frame(void *data, struct xdp_mem_info *mem)
mem               428 net/core/xdp.c 	xa = rhashtable_lookup(mem_id_ht, &mem->id, mem_id_rht_params);
mem               498 net/core/xdp.c 	xdpf->mem.type = MEM_TYPE_PAGE_ORDER0;
mem               697 net/netfilter/x_tables.c 	size_t mem;
mem               707 net/netfilter/x_tables.c 	mem = sizeof(struct compat_delta) * number;
mem               708 net/netfilter/x_tables.c 	if (mem > XT_MAX_TABLE_SIZE)
mem               711 net/netfilter/x_tables.c 	xt[af].compat_tab = vmalloc(mem);
mem              1054 net/netfilter/x_tables.c 	void *mem;
mem              1092 net/netfilter/x_tables.c 	mem = vmalloc(len);
mem              1093 net/netfilter/x_tables.c 	if (!mem)
mem              1096 net/netfilter/x_tables.c 	if (copy_from_user(mem, user, len) == 0)
mem              1097 net/netfilter/x_tables.c 		return mem;
mem              1099 net/netfilter/x_tables.c 	vfree(mem);
mem              1339 net/netfilter/x_tables.c 	struct xt_counters *mem;
mem              1341 net/netfilter/x_tables.c 	if (counters == 0 || counters > INT_MAX / sizeof(*mem))
mem              1344 net/netfilter/x_tables.c 	counters *= sizeof(*mem);
mem              1841 net/netfilter/x_tables.c 	if (!state->mem) {
mem              1842 net/netfilter/x_tables.c 		state->mem = __alloc_percpu(XT_PCPU_BLOCK_SIZE,
mem              1844 net/netfilter/x_tables.c 		if (!state->mem)
mem              1847 net/netfilter/x_tables.c 	counter->pcnt = (__force unsigned long)(state->mem + state->off);
mem              1850 net/netfilter/x_tables.c 		state->mem = NULL;
mem               146 net/sched/sch_fq_codel.c 	unsigned int mem = 0;
mem               171 net/sched/sch_fq_codel.c 		mem += get_codel_cb(skb)->mem_usage;
mem               178 net/sched/sch_fq_codel.c 	q->memory_usage -= mem;
mem               160 net/sctp/diag.c 		u32 mem[SK_MEMINFO_VARS];
mem               167 net/sctp/diag.c 		mem[SK_MEMINFO_WMEM_ALLOC] = amt;
mem               172 net/sctp/diag.c 		mem[SK_MEMINFO_RMEM_ALLOC] = amt;
mem               173 net/sctp/diag.c 		mem[SK_MEMINFO_RCVBUF] = sk->sk_rcvbuf;
mem               174 net/sctp/diag.c 		mem[SK_MEMINFO_SNDBUF] = sk->sk_sndbuf;
mem               175 net/sctp/diag.c 		mem[SK_MEMINFO_FWD_ALLOC] = sk->sk_forward_alloc;
mem               176 net/sctp/diag.c 		mem[SK_MEMINFO_WMEM_QUEUED] = sk->sk_wmem_queued;
mem               177 net/sctp/diag.c 		mem[SK_MEMINFO_OPTMEM] = atomic_read(&sk->sk_omem_alloc);
mem               178 net/sctp/diag.c 		mem[SK_MEMINFO_BACKLOG] = READ_ONCE(sk->sk_backlog.len);
mem               179 net/sctp/diag.c 		mem[SK_MEMINFO_DROPS] = atomic_read(&sk->sk_drops);
mem               181 net/sctp/diag.c 		if (nla_put(skb, INET_DIAG_SKMEMINFO, sizeof(mem), &mem) < 0)
mem              1402 net/tls/tls_sw.c 	u8 *aad, *iv, *mem = NULL;
mem              1439 net/tls/tls_sw.c 	mem = kmalloc(mem_size, sk->sk_allocation);
mem              1440 net/tls/tls_sw.c 	if (!mem)
mem              1444 net/tls/tls_sw.c 	aead_req = (struct aead_request *)mem;
mem              1445 net/tls/tls_sw.c 	sgin = (struct scatterlist *)(mem + aead_size);
mem              1461 net/tls/tls_sw.c 		kfree(mem);
mem              1485 net/tls/tls_sw.c 		kfree(mem);
mem              1523 net/tls/tls_sw.c 	kfree(mem);
mem               212 net/xdp/xsk.c  	return (xdp->rxq->mem.type == MEM_TYPE_ZERO_COPY) ?
mem               108 samples/seccomp/user-trap.c 	int ret = -1, mem;
mem               128 samples/seccomp/user-trap.c 	mem = open(path, O_RDONLY);
mem               129 samples/seccomp/user-trap.c 	if (mem < 0) {
mem               155 samples/seccomp/user-trap.c 	if (lseek(mem, req->data.args[0], SEEK_SET) < 0) {
mem               160 samples/seccomp/user-trap.c 	ret = read(mem, source, sizeof(source));
mem               166 samples/seccomp/user-trap.c 	if (lseek(mem, req->data.args[1], SEEK_SET) < 0) {
mem               171 samples/seccomp/user-trap.c 	ret = read(mem, target, sizeof(target));
mem               197 samples/seccomp/user-trap.c 	close(mem);
mem                96 samples/vfio-mdev/mbochs.c MODULE_PARM_DESC(mem, "megabytes available to " MBOCHS_NAME " devices");
mem                44 scripts/dtc/data.c struct data data_copy_mem(const char *mem, int len)
mem                51 scripts/dtc/data.c 	memcpy(d.val, mem, len);
mem               103 scripts/dtc/dtc.h struct data data_copy_mem(const char *mem, int len);
mem               126 security/integrity/ima/ima_kexec.c 	ret = arch_ima_add_kexec_buffer(image, kbuf.mem, kexec_segment_size);
mem               133 security/integrity/ima/ima_kexec.c 		 kbuf.mem);
mem               684 sound/isa/msnd/msnd_pinnacle.c static int snd_msnd_write_cfg_mem(int cfg, int num, int mem)
mem               688 sound/isa/msnd/msnd_pinnacle.c 	mem >>= 8;
mem               689 sound/isa/msnd/msnd_pinnacle.c 	wmem = (u16)(mem & 0xfff);
mem               712 sound/isa/msnd/msnd_pinnacle.c 				      u16 io1, u16 irq, int mem)
mem               722 sound/isa/msnd/msnd_pinnacle.c 	if (snd_msnd_write_cfg_mem(cfg, num, mem))
mem               753 sound/isa/msnd/msnd_pinnacle.c static long mem[SNDRV_CARDS] = SNDRV_DEFAULT_PORT;
mem               793 sound/isa/msnd/msnd_pinnacle.c module_param_hw_array(mem, long, iomem, NULL, 0444);
mem               814 sound/isa/msnd/msnd_pinnacle.c 	if (irq[i] == SNDRV_AUTO_PORT || mem[i] == SNDRV_AUTO_PORT) {
mem               853 sound/isa/msnd/msnd_pinnacle.c 	if (!(mem[i] == 0xb0000 ||
mem               854 sound/isa/msnd/msnd_pinnacle.c 	      mem[i] == 0xc8000 ||
mem               855 sound/isa/msnd/msnd_pinnacle.c 	      mem[i] == 0xd0000 ||
mem               856 sound/isa/msnd/msnd_pinnacle.c 	      mem[i] == 0xd8000 ||
mem               857 sound/isa/msnd/msnd_pinnacle.c 	      mem[i] == 0xe0000 ||
mem               858 sound/isa/msnd/msnd_pinnacle.c 	      mem[i] == 0xe8000)) {
mem               918 sound/isa/msnd/msnd_pinnacle.c 	switch (mem[idx]) {
mem               951 sound/isa/msnd/msnd_pinnacle.c 					 irq[idx], mem[idx]);
mem              1011 sound/isa/msnd/msnd_pinnacle.c 	chip->base = mem[idx];
mem              1124 sound/isa/msnd/msnd_pinnacle.c 	mem[idx] = pnp_mem_start(pnp_dev, 0);
mem              1136 sound/isa/msnd/msnd_pinnacle.c 	chip->base = mem[idx];
mem               104 sound/pci/cs5530.c 	void __iomem *mem;
mem               133 sound/pci/cs5530.c 	mem = pci_ioremap_bar(pci, 0);
mem               134 sound/pci/cs5530.c 	if (mem == NULL) {
mem               139 sound/pci/cs5530.c 	map = readw(mem + 0x18);
mem               140 sound/pci/cs5530.c 	iounmap(mem);
mem                83 sound/pci/emu10k1/memory.c 	blk->first_page = get_aligned_page(blk->mem.offset);
mem                84 sound/pci/emu10k1/memory.c 	blk->last_page = get_aligned_page(blk->mem.offset + blk->mem.size - 1);
mem               211 sound/pci/emu10k1/memory.c 		blk = get_emu10k1_memblk(p, mem.list);
mem               224 sound/pci/emu10k1/memory.c 	blk->mem.offset = aligned_page_offset(page); /* set aligned offset */
mem               457 sound/pci/emu10k1/memory.c 	if ((p = blk->mem.list.prev) != &hdr->block) {
mem               458 sound/pci/emu10k1/memory.c 		q = get_emu10k1_memblk(p, mem.list);
mem               463 sound/pci/emu10k1/memory.c 	if ((p = blk->mem.list.next) != &hdr->block) {
mem               464 sound/pci/emu10k1/memory.c 		q = get_emu10k1_memblk(p, mem.list);
mem              1104 sound/pci/mixart/mixart.c 		iounmap(mgr->mem[i].virt);
mem              1281 sound/pci/mixart/mixart.c 		mgr->mem[i].phys = pci_resource_start(pci, i);
mem              1282 sound/pci/mixart/mixart.c 		mgr->mem[i].virt = pci_ioremap_bar(pci, i);
mem              1283 sound/pci/mixart/mixart.c 		if (!mgr->mem[i].virt) {
mem              1285 sound/pci/mixart/mixart.c 			       mgr->mem[i].phys);
mem              1338 sound/pci/mixart/mixart.c 			mgr->mem[0].phys, mgr->mem[1].phys, mgr->irq, i);
mem                62 sound/pci/mixart/mixart.h 	struct mem_area mem[2];
mem                31 sound/pci/mixart/mixart_hwdep.h #define MIXART_MEM(mgr,x)	((mgr)->mem[0].virt + (x))
mem                32 sound/pci/mixart/mixart_hwdep.h #define MIXART_REG(mgr,x)	((mgr)->mem[1].virt + (x))
mem               594 sound/soc/atmel/atmel-i2s.c 	struct resource *mem;
mem               613 sound/soc/atmel/atmel-i2s.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               614 sound/soc/atmel/atmel-i2s.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem               684 sound/soc/atmel/atmel-i2s.c 	dev->playback.addr	= (dma_addr_t)mem->start + ATMEL_I2SC_THR;
mem               686 sound/soc/atmel/atmel-i2s.c 	dev->capture.addr	= (dma_addr_t)mem->start + ATMEL_I2SC_RHR;
mem               884 sound/soc/atmel/mchp-i2s-mcc.c 	struct resource *mem;
mem               895 sound/soc/atmel/mchp-i2s-mcc.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               896 sound/soc/atmel/mchp-i2s-mcc.c 	base = devm_ioremap_resource(&pdev->dev, mem);
mem               952 sound/soc/atmel/mchp-i2s-mcc.c 	dev->playback.addr	= (dma_addr_t)mem->start + MCHP_I2SMCC_THR;
mem               953 sound/soc/atmel/mchp-i2s-mcc.c 	dev->capture.addr	= (dma_addr_t)mem->start + MCHP_I2SMCC_RHR;
mem              1400 sound/soc/codecs/cs47l15.c 	cs47l15->core.adsp[0].mem = cs47l15_dsp1_regions;
mem              1244 sound/soc/codecs/cs47l24.c 		cs47l24->core.adsp[i].mem = cs47l24_dsp_regions[i - 1];
mem              1693 sound/soc/codecs/cs47l35.c 		cs47l35->core.adsp[i].mem = cs47l35_dsp_regions[i];
mem              2641 sound/soc/codecs/cs47l85.c 		cs47l85->core.adsp[i].mem = cs47l85_dsp_regions[i];
mem              2544 sound/soc/codecs/cs47l90.c 		cs47l90->core.adsp[i].mem = cs47l90_dsp_regions[i];
mem              1952 sound/soc/codecs/cs47l92.c 	cs47l92->core.adsp[0].mem = cs47l92_dsp1_regions;
mem              2224 sound/soc/codecs/wm2200.c 	wm2200->dsp[0].mem = wm2200_dsp1_regions;
mem              2228 sound/soc/codecs/wm2200.c 	wm2200->dsp[1].mem = wm2200_dsp2_regions;
mem              2056 sound/soc/codecs/wm5102.c 	wm5102->core.adsp[0].mem = wm5102_dsp1_regions;
mem              2418 sound/soc/codecs/wm5110.c 		wm5110->core.adsp[i].mem = wm5110_dsp_regions[i];
mem               832 sound/soc/codecs/wm_adsp.c 		if (dsp->mem[i].type == type)
mem               833 sound/soc/codecs/wm_adsp.c 			return &dsp->mem[i];
mem               838 sound/soc/codecs/wm_adsp.c static unsigned int wm_adsp_region_to_reg(struct wm_adsp_region const *mem,
mem               841 sound/soc/codecs/wm_adsp.c 	switch (mem->type) {
mem               843 sound/soc/codecs/wm_adsp.c 		return mem->base + (offset * 3);
mem               848 sound/soc/codecs/wm_adsp.c 		return mem->base + (offset * 2);
mem               855 sound/soc/codecs/wm_adsp.c static unsigned int wm_halo_region_to_reg(struct wm_adsp_region const *mem,
mem               858 sound/soc/codecs/wm_adsp.c 	switch (mem->type) {
mem               861 sound/soc/codecs/wm_adsp.c 		return mem->base + (offset * 4);
mem               864 sound/soc/codecs/wm_adsp.c 		return (mem->base + (offset * 3)) & ~0x3;
mem               866 sound/soc/codecs/wm_adsp.c 		return mem->base + (offset * 5);
mem               932 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem               934 sound/soc/codecs/wm_adsp.c 	mem = wm_adsp_find_region(dsp, alg_region->type);
mem               935 sound/soc/codecs/wm_adsp.c 	if (!mem) {
mem               941 sound/soc/codecs/wm_adsp.c 	*reg = dsp->ops->region_to_reg(mem, ctl->alg_region.base + ctl->offset);
mem              1807 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem              1912 sound/soc/codecs/wm_adsp.c 			mem = wm_adsp_find_region(dsp, type);
mem              1913 sound/soc/codecs/wm_adsp.c 			if (!mem) {
mem              1919 sound/soc/codecs/wm_adsp.c 			reg = dsp->ops->region_to_reg(mem, offset);
mem              2013 sound/soc/codecs/wm_adsp.c 			       const struct wm_adsp_region *mem,
mem              2032 sound/soc/codecs/wm_adsp.c 	reg = dsp->ops->region_to_reg(mem, pos + len);
mem              2052 sound/soc/codecs/wm_adsp.c 	reg = dsp->ops->region_to_reg(mem, pos);
mem              2158 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem              2163 sound/soc/codecs/wm_adsp.c 	mem = wm_adsp_find_region(dsp, WMFW_ADSP1_DM);
mem              2164 sound/soc/codecs/wm_adsp.c 	if (WARN_ON(!mem))
mem              2167 sound/soc/codecs/wm_adsp.c 	ret = regmap_raw_read(dsp->regmap, mem->base, &adsp1_id,
mem              2193 sound/soc/codecs/wm_adsp.c 	adsp1_alg = wm_adsp_read_algs(dsp, n_algs, mem, pos, len);
mem              2259 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem              2264 sound/soc/codecs/wm_adsp.c 	mem = wm_adsp_find_region(dsp, WMFW_ADSP2_XM);
mem              2265 sound/soc/codecs/wm_adsp.c 	if (WARN_ON(!mem))
mem              2268 sound/soc/codecs/wm_adsp.c 	ret = regmap_raw_read(dsp->regmap, mem->base, &adsp2_id,
mem              2299 sound/soc/codecs/wm_adsp.c 	adsp2_alg = wm_adsp_read_algs(dsp, n_algs, mem, pos, len);
mem              2399 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem              2404 sound/soc/codecs/wm_adsp.c 	mem = wm_adsp_find_region(dsp, WMFW_ADSP2_XM);
mem              2405 sound/soc/codecs/wm_adsp.c 	if (WARN_ON(!mem))
mem              2408 sound/soc/codecs/wm_adsp.c 	ret = regmap_raw_read(dsp->regmap, mem->base, &halo_id,
mem              2429 sound/soc/codecs/wm_adsp.c 	halo_alg = wm_adsp_read_algs(dsp, n_algs, mem, pos, len);
mem              2462 sound/soc/codecs/wm_adsp.c 	const struct wm_adsp_region *mem;
mem              2544 sound/soc/codecs/wm_adsp.c 				mem = wm_adsp_find_region(dsp, type);
mem              2545 sound/soc/codecs/wm_adsp.c 				if (!mem) {
mem              2549 sound/soc/codecs/wm_adsp.c 				reg = dsp->ops->region_to_reg(mem, 0);
mem              2568 sound/soc/codecs/wm_adsp.c 			mem = wm_adsp_find_region(dsp, type);
mem              2569 sound/soc/codecs/wm_adsp.c 			if (!mem) {
mem              2578 sound/soc/codecs/wm_adsp.c 				reg = dsp->ops->region_to_reg(mem, reg);
mem              3558 sound/soc/codecs/wm_adsp.c 	struct wm_adsp_region const *mem = wm_adsp_find_region(dsp, mem_type);
mem              3562 sound/soc/codecs/wm_adsp.c 	if (!mem)
mem              3565 sound/soc/codecs/wm_adsp.c 	reg = dsp->ops->region_to_reg(mem, mem_addr);
mem              3587 sound/soc/codecs/wm_adsp.c 	struct wm_adsp_region const *mem = wm_adsp_find_region(dsp, mem_type);
mem              3590 sound/soc/codecs/wm_adsp.c 	if (!mem)
mem              3593 sound/soc/codecs/wm_adsp.c 	reg = dsp->ops->region_to_reg(mem, mem_addr);
mem                81 sound/soc/codecs/wm_adsp.h 	const struct wm_adsp_region *mem;
mem               120 sound/soc/codecs/wm_adsp.h 	unsigned int (*region_to_reg)(struct wm_adsp_region const *mem,
mem               177 sound/soc/intel/common/sst-firmware.c static struct dw_dma_chip *dw_probe(struct device *dev, struct resource *mem,
mem               188 sound/soc/intel/common/sst-firmware.c 	chip->regs = devm_ioremap_resource(dev, mem);
mem               268 sound/soc/intel/common/sst-firmware.c 	struct resource mem;
mem               292 sound/soc/intel/common/sst-firmware.c 	memset(&mem, 0, sizeof(mem));
mem               294 sound/soc/intel/common/sst-firmware.c 	mem.start = sst->addr.lpe_base + sst_pdata->dma_base;
mem               295 sound/soc/intel/common/sst-firmware.c 	mem.end   = sst->addr.lpe_base + sst_pdata->dma_base + sst_pdata->dma_size - 1;
mem               296 sound/soc/intel/common/sst-firmware.c 	mem.flags = IORESOURCE_MEM;
mem               299 sound/soc/intel/common/sst-firmware.c 	dma->chip = dw_probe(sst->dma_dev, &mem, sst_pdata->irq);
mem                97 sound/soc/jz4740/jz4740-i2s.c 	struct resource *mem;
mem               498 sound/soc/jz4740/jz4740-i2s.c 	struct resource *mem;
mem               510 sound/soc/jz4740/jz4740-i2s.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               511 sound/soc/jz4740/jz4740-i2s.c 	i2s->base = devm_ioremap_resource(&pdev->dev, mem);
mem               515 sound/soc/jz4740/jz4740-i2s.c 	i2s->phys_base = mem->start;
mem                62 sound/soc/sh/rcar/dma.c static struct rsnd_mod mem = {
mem               722 sound/soc/sh/rcar/dma.c 			rsnd_mod_name(mod[i] ? mod[i] : &mem),
mem               789 sound/soc/sh/rcar/dma.c 		rsnd_mod_name(mod_from ? mod_from : &mem),
mem               790 sound/soc/sh/rcar/dma.c 		rsnd_mod_name(mod_to   ? mod_to   : &mem));
mem               848 sound/soc/sh/rcar/dma.c 	return rsnd_mod_init(NULL, &mem, &mem_ops, NULL, 0, 0);
mem               305 sound/soc/tegra/tegra20_ac97.c 	struct resource *mem;
mem               324 sound/soc/tegra/tegra20_ac97.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               325 sound/soc/tegra/tegra20_ac97.c 	regs = devm_ioremap_resource(&pdev->dev, mem);
mem               360 sound/soc/tegra/tegra20_ac97.c 	ac97->capture_dma_data.addr = mem->start + TEGRA20_AC97_FIFO_RX1;
mem               364 sound/soc/tegra/tegra20_ac97.c 	ac97->playback_dma_data.addr = mem->start + TEGRA20_AC97_FIFO_TX1;
mem               328 sound/soc/tegra/tegra20_i2s.c 	struct resource *mem;
mem               349 sound/soc/tegra/tegra20_i2s.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               350 sound/soc/tegra/tegra20_i2s.c 	regs = devm_ioremap_resource(&pdev->dev, mem);
mem               364 sound/soc/tegra/tegra20_i2s.c 	i2s->capture_dma_data.addr = mem->start + TEGRA20_I2S_FIFO2;
mem               368 sound/soc/tegra/tegra20_i2s.c 	i2s->playback_dma_data.addr = mem->start + TEGRA20_I2S_FIFO1;
mem               254 sound/soc/tegra/tegra20_spdif.c 	struct resource *mem, *dmareq;
mem               272 sound/soc/tegra/tegra20_spdif.c 	mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               273 sound/soc/tegra/tegra20_spdif.c 	regs = devm_ioremap_resource(&pdev->dev, mem);
mem               291 sound/soc/tegra/tegra20_spdif.c 	spdif->playback_dma_data.addr = mem->start + TEGRA20_SPDIF_DATA_OUT;
mem               650 sound/soc/ti/davinci-i2s.c 	struct resource *mem, *res;
mem               655 sound/soc/ti/davinci-i2s.c 	mem = platform_get_resource_byname(pdev, IORESOURCE_MEM, "mpu");
mem               656 sound/soc/ti/davinci-i2s.c 	if (!mem) {
mem               659 sound/soc/ti/davinci-i2s.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem               660 sound/soc/ti/davinci-i2s.c 		if (!mem) {
mem               666 sound/soc/ti/davinci-i2s.c 	io_base = devm_ioremap_resource(&pdev->dev, mem);
mem               679 sound/soc/ti/davinci-i2s.c 	dma_data->addr = (dma_addr_t)(mem->start + DAVINCI_MCBSP_DXR_REG);
mem               694 sound/soc/ti/davinci-i2s.c 	dma_data->addr = (dma_addr_t)(mem->start + DAVINCI_MCBSP_DRR_REG);
mem              2104 sound/soc/ti/davinci-mcasp.c 	struct resource *mem, *res, *dat;
mem              2128 sound/soc/ti/davinci-mcasp.c 	mem = platform_get_resource_byname(pdev, IORESOURCE_MEM, "mpu");
mem              2129 sound/soc/ti/davinci-mcasp.c 	if (!mem) {
mem              2132 sound/soc/ti/davinci-mcasp.c 		mem = platform_get_resource(pdev, IORESOURCE_MEM, 0);
mem              2133 sound/soc/ti/davinci-mcasp.c 		if (!mem) {
mem              2139 sound/soc/ti/davinci-mcasp.c 	mcasp->base = devm_ioremap_resource(&pdev->dev, mem);
mem              2246 sound/soc/ti/davinci-mcasp.c 		dma_data->addr = mem->start + davinci_mcasp_txdma_offset(pdata);
mem              2268 sound/soc/ti/davinci-mcasp.c 				mem->start + davinci_mcasp_rxdma_offset(pdata);
mem                37 tools/bpf/bpftool/map_perf_ring.c 	void *mem;
mem                40 tools/perf/builtin-mem.c 	struct perf_mem *mem = *(struct perf_mem **)opt->value;
mem                45 tools/perf/builtin-mem.c 			mem->operation = 0;
mem                71 tools/perf/builtin-mem.c static int __cmd_record(int argc, const char **argv, struct perf_mem *mem)
mem                78 tools/perf/builtin-mem.c 	OPT_CALLBACK('e', "event", &mem, "event",
mem                99 tools/perf/builtin-mem.c 	if (mem->operation & MEM_OPERATION_LOAD)
mem               102 tools/perf/builtin-mem.c 	if (mem->operation & MEM_OPERATION_STORE)
mem               110 tools/perf/builtin-mem.c 	if (mem->phys_addr)
mem               158 tools/perf/builtin-mem.c 	struct perf_mem *mem = container_of(tool, struct perf_mem, tool);
mem               168 tools/perf/builtin-mem.c 	if (al.filtered || (mem->hide_unresolved && al.sym == NULL))
mem               174 tools/perf/builtin-mem.c 	if (mem->phys_addr) {
mem               242 tools/perf/builtin-mem.c static int report_raw_events(struct perf_mem *mem)
mem               247 tools/perf/builtin-mem.c 		.force = mem->force,
mem               251 tools/perf/builtin-mem.c 							 &mem->tool);
mem               256 tools/perf/builtin-mem.c 	if (mem->cpu_list) {
mem               257 tools/perf/builtin-mem.c 		ret = perf_session__cpu_bitmap(session, mem->cpu_list,
mem               258 tools/perf/builtin-mem.c 					       mem->cpu_bitmap);
mem               267 tools/perf/builtin-mem.c 	if (mem->phys_addr)
mem               279 tools/perf/builtin-mem.c static int report_events(int argc, const char **argv, struct perf_mem *mem)
mem               284 tools/perf/builtin-mem.c 	if (mem->dump_raw)
mem               285 tools/perf/builtin-mem.c 		return report_raw_events(mem);
mem               300 tools/perf/builtin-mem.c 	if (!(mem->operation & MEM_OPERATION_LOAD)) {
mem               301 tools/perf/builtin-mem.c 		if (mem->phys_addr)
mem               307 tools/perf/builtin-mem.c 	} else if (mem->phys_addr)
mem               391 tools/perf/builtin-mem.c 	struct perf_mem mem = {
mem               410 tools/perf/builtin-mem.c 	OPT_CALLBACK('t', "type", &mem.operation,
mem               413 tools/perf/builtin-mem.c 	OPT_BOOLEAN('D', "dump-raw-samples", &mem.dump_raw,
mem               415 tools/perf/builtin-mem.c 	OPT_BOOLEAN('U', "hide-unresolved", &mem.hide_unresolved,
mem               419 tools/perf/builtin-mem.c 	OPT_STRING('C', "cpu", &mem.cpu_list, "cpu",
mem               425 tools/perf/builtin-mem.c 	OPT_BOOLEAN('f', "force", &mem.force, "don't complain, do it"),
mem               426 tools/perf/builtin-mem.c 	OPT_BOOLEAN('p', "phys-data", &mem.phys_addr, "Record/Report sample physical addresses"),
mem               443 tools/perf/builtin-mem.c 	if (!argc || !(strncmp(argv[0], "rec", 3) || mem.operation))
mem               446 tools/perf/builtin-mem.c 	if (!mem.input_name || !strlen(mem.input_name)) {
mem               448 tools/perf/builtin-mem.c 			mem.input_name = "-";
mem               450 tools/perf/builtin-mem.c 			mem.input_name = "perf.data";
mem               454 tools/perf/builtin-mem.c 		return __cmd_record(argc, argv, &mem);
mem               456 tools/perf/builtin-mem.c 		return report_events(argc, argv, &mem);
mem               411 tools/perf/util/cpumap.c 	unsigned int cpu, mem;
mem               437 tools/perf/util/cpumap.c 		if (dent1->d_type != DT_DIR || sscanf(dent1->d_name, "node%u", &mem) < 1)
mem               452 tools/perf/util/cpumap.c 			cpunode_map[cpu] = mem;
mem               241 tools/perf/util/cputopo.c 	u64 mem;
mem               255 tools/perf/util/cputopo.c 		if (sscanf(buf, "%*s %*d %31s %"PRIu64, field, &mem) != 2)
mem               258 tools/perf/util/cputopo.c 			node->mem_total = mem;
mem               260 tools/perf/util/cputopo.c 			node->mem_free = mem;
mem               649 tools/perf/util/header.c 	uint64_t mem;
mem               661 tools/perf/util/header.c 		n = sscanf(buf, "%*s %"PRIu64, &mem);
mem               663 tools/perf/util/header.c 			ret = do_write(ff, &mem, sizeof(mem));
mem              1210 tools/perf/util/header.c #define for_each_memory(mem, dir)					\
mem              1214 tools/perf/util/header.c 		    sscanf(ent->d_name, "memory%u", &mem) == 1)
mem              1442 tools/perf/util/header.c 			char *mem = argv_i;
mem              1452 tools/perf/util/header.c 			free(mem);
mem              1550 tools/perf/util/probe-finder.c 	GElf_Shdr mem, *shdr;
mem              1569 tools/perf/util/probe-finder.c 			shdr = gelf_getshdr(scn, &mem);
mem               532 tools/power/acpi/os_specific/service_layers/osunixxf.c 	void *mem;
mem               534 tools/power/acpi/os_specific/service_layers/osunixxf.c 	mem = (void *)malloc((size_t) size);
mem               535 tools/power/acpi/os_specific/service_layers/osunixxf.c 	return (mem);
mem               553 tools/power/acpi/os_specific/service_layers/osunixxf.c 	void *mem;
mem               555 tools/power/acpi/os_specific/service_layers/osunixxf.c 	mem = (void *)calloc(1, (size_t) size);
mem               556 tools/power/acpi/os_specific/service_layers/osunixxf.c 	return (mem);
mem               572 tools/power/acpi/os_specific/service_layers/osunixxf.c void acpi_os_free(void *mem)
mem               575 tools/power/acpi/os_specific/service_layers/osunixxf.c 	free(mem);
mem               154 tools/power/cpupower/debug/i386/dump_psb.c 	char *mem = NULL;
mem               184 tools/power/cpupower/debug/i386/dump_psb.c 	mem = mmap(mem, 0x100000 - 0xc0000, PROT_READ, MAP_SHARED, fd, 0xc0000);
mem               187 tools/power/cpupower/debug/i386/dump_psb.c 	for (p = mem; p - mem < LEN; p+=16) {
mem               194 tools/power/cpupower/debug/i386/dump_psb.c 	munmap(mem, LEN);
mem                 9 tools/testing/selftests/bpf/progs/test_xdp_meta.c #define ctx_ptr(ctx, mem) (void *)(unsigned long)ctx->mem
mem                28 tools/testing/selftests/drivers/dma-buf/udmabuf.c 	void *mem;
mem               301 tools/testing/selftests/net/tls.c 	void *mem = malloc(16384);
mem               310 tools/testing/selftests/net/tls.c 		struct iovec vec = { (void *)mem, send_len };
mem               318 tools/testing/selftests/net/tls.c 		EXPECT_NE(recv(self->fd, mem, send_len, 0), -1);
mem               320 tools/testing/selftests/net/tls.c 	free(mem);
mem               144 tools/usb/usbip/libsrc/names.c 	void *mem;
mem               157 tools/usb/usbip/libsrc/names.c 	p->mem = calloc(1, size);
mem               158 tools/usb/usbip/libsrc/names.c 	if (!p->mem) {
mem               166 tools/usb/usbip/libsrc/names.c 	return p->mem;
mem               179 tools/usb/usbip/libsrc/names.c 		if (pool->mem)
mem               180 tools/usb/usbip/libsrc/names.c 			free(pool->mem);
mem                43 tools/virtio/virtio_test.c 	struct vhost_memory *mem;
mem               126 tools/virtio/virtio_test.c 	dev->mem = malloc(offsetof(struct vhost_memory, regions) +
mem               127 tools/virtio/virtio_test.c 			  sizeof dev->mem->regions[0]);
mem               128 tools/virtio/virtio_test.c 	assert(dev->mem);
mem               129 tools/virtio/virtio_test.c 	memset(dev->mem, 0, offsetof(struct vhost_memory, regions) +
mem               130 tools/virtio/virtio_test.c                           sizeof dev->mem->regions[0]);
mem               131 tools/virtio/virtio_test.c 	dev->mem->nregions = 1;
mem               132 tools/virtio/virtio_test.c 	dev->mem->regions[0].guest_phys_addr = (long)dev->buf;
mem               133 tools/virtio/virtio_test.c 	dev->mem->regions[0].userspace_addr = (long)dev->buf;
mem               134 tools/virtio/virtio_test.c 	dev->mem->regions[0].memory_size = dev->buf_size;
mem               135 tools/virtio/virtio_test.c 	r = ioctl(dev->control, VHOST_SET_MEM_TABLE, dev->mem);
mem              2262 virt/kvm/arm/mmu.c 				   const struct kvm_userspace_memory_region *mem,
mem              2272 virt/kvm/arm/mmu.c 	if (change != KVM_MR_DELETE && mem->flags & KVM_MEM_LOG_DIRTY_PAGES)
mem              2273 virt/kvm/arm/mmu.c 		kvm_mmu_wp_memory_region(kvm, mem->slot);
mem              2278 virt/kvm/arm/mmu.c 				   const struct kvm_userspace_memory_region *mem,
mem              2281 virt/kvm/arm/mmu.c 	hva_t hva = mem->userspace_addr;
mem              2282 virt/kvm/arm/mmu.c 	hva_t reg_end = hva + mem->memory_size;
mem              2283 virt/kvm/arm/mmu.c 	bool writable = !(mem->flags & KVM_MEM_READONLY);
mem              2334 virt/kvm/arm/mmu.c 			gpa_t gpa = mem->guest_phys_addr +
mem              2335 virt/kvm/arm/mmu.c 				    (vm_start - mem->userspace_addr);
mem              2361 virt/kvm/arm/mmu.c 		unmap_stage2_range(kvm, mem->guest_phys_addr, mem->memory_size);
mem               933 virt/kvm/kvm_main.c static int check_memory_region_flags(const struct kvm_userspace_memory_region *mem)
mem               941 virt/kvm/kvm_main.c 	if (mem->flags & ~valid_flags)
mem               992 virt/kvm/kvm_main.c 			    const struct kvm_userspace_memory_region *mem)
mem              1003 virt/kvm/kvm_main.c 	r = check_memory_region_flags(mem);
mem              1008 virt/kvm/kvm_main.c 	as_id = mem->slot >> 16;
mem              1009 virt/kvm/kvm_main.c 	id = (u16)mem->slot;
mem              1012 virt/kvm/kvm_main.c 	if (mem->memory_size & (PAGE_SIZE - 1))
mem              1014 virt/kvm/kvm_main.c 	if (mem->guest_phys_addr & (PAGE_SIZE - 1))
mem              1018 virt/kvm/kvm_main.c 	    ((mem->userspace_addr & (PAGE_SIZE - 1)) ||
mem              1019 virt/kvm/kvm_main.c 	     !access_ok((void __user *)(unsigned long)mem->userspace_addr,
mem              1020 virt/kvm/kvm_main.c 			mem->memory_size)))
mem              1024 virt/kvm/kvm_main.c 	if (mem->guest_phys_addr + mem->memory_size < mem->guest_phys_addr)
mem              1028 virt/kvm/kvm_main.c 	base_gfn = mem->guest_phys_addr >> PAGE_SHIFT;
mem              1029 virt/kvm/kvm_main.c 	npages = mem->memory_size >> PAGE_SHIFT;
mem              1039 virt/kvm/kvm_main.c 	new.flags = mem->flags;
mem              1045 virt/kvm/kvm_main.c 			if ((mem->userspace_addr != old.userspace_addr) ||
mem              1086 virt/kvm/kvm_main.c 		new.userspace_addr = mem->userspace_addr;
mem              1126 virt/kvm/kvm_main.c 	r = kvm_arch_prepare_memory_region(kvm, &new, mem, change);
mem              1139 virt/kvm/kvm_main.c 	kvm_arch_commit_memory_region(kvm, mem, &old, &new, change);
mem              1155 virt/kvm/kvm_main.c 			  const struct kvm_userspace_memory_region *mem)
mem              1160 virt/kvm/kvm_main.c 	r = __kvm_set_memory_region(kvm, mem);
mem              1167 virt/kvm/kvm_main.c 					  struct kvm_userspace_memory_region *mem)
mem              1169 virt/kvm/kvm_main.c 	if ((u16)mem->slot >= KVM_USER_MEM_SLOTS)
mem              1172 virt/kvm/kvm_main.c 	return kvm_set_memory_region(kvm, mem);