mr                 72 arch/arm/mach-omap2/sdrc.h 	u32 mr;
mr                 49 arch/mips/generic/yamon-dt.c 	const struct yamon_mem_region *mr;
mr                 53 arch/mips/generic/yamon-dt.c 	for (mr = regions; mr->size && memsize; ++mr) {
mr                 61 arch/mips/generic/yamon-dt.c 		size = min_t(unsigned long, memsize, mr->size);
mr                 65 arch/mips/generic/yamon-dt.c 		*(mem_array++) = cpu_to_be32(mr->start);
mr                 70 arch/mips/generic/yamon-dt.c 		memsize -= min_t(unsigned long, memsize, mr->discard);
mr                 78 arch/mips/include/asm/txx9/tx4939.h 		__u64 mr;
mr                 56 arch/powerpc/kernel/head_booke.h 	mr	r11, r1;						     \
mr                 77 arch/powerpc/kernel/head_booke.h 	mr	r1, r11;						     \
mr                 99 arch/powerpc/kernel/head_booke.h 	mr	r12, r13
mr                118 arch/powerpc/kernel/head_booke.h 	mr	r1, r11
mr                255 arch/powerpc/kernel/head_booke.h 1:	mr	r11, r8;							     \
mr                268 arch/powerpc/kernel/head_booke.h 	mr	r1,r11;							     \
mr                477 arch/powerpc/kernel/head_booke.h 	mr      r4,r12;                 /* Pass SRR0 as arg2 */		      \
mr               1289 arch/powerpc/kvm/mpic.c static void add_mmio_region(struct openpic *opp, const struct mem_reg *mr)
mr               1296 arch/powerpc/kvm/mpic.c 	opp->mmio_regions[opp->num_mmio_regions++] = mr;
mr               1345 arch/powerpc/kvm/mpic.c 		const struct mem_reg *mr = opp->mmio_regions[i];
mr               1347 arch/powerpc/kvm/mpic.c 		if (mr->start_addr > addr || addr >= mr->start_addr + mr->size)
mr               1350 arch/powerpc/kvm/mpic.c 		return mr->read(opp, addr - mr->start_addr, ptr);
mr               1361 arch/powerpc/kvm/mpic.c 		const struct mem_reg *mr = opp->mmio_regions[i];
mr               1363 arch/powerpc/kvm/mpic.c 		if (mr->start_addr > addr || addr >= mr->start_addr + mr->size)
mr               1366 arch/powerpc/kvm/mpic.c 		return mr->write(opp, addr - mr->start_addr, val);
mr               1159 arch/s390/include/asm/pgtable.h 			       bool nq, bool mr, bool mc);
mr               1009 arch/s390/kvm/priv.c 	bool mr = false, mc = false, nq;
mr               1035 arch/s390/kvm/priv.c 		mr = vcpu->run->s.regs.gprs[reg1] & PFMF_MR;
mr               1091 arch/s390/kvm/priv.c 							key, NULL, nq, mr, mc);
mr                828 arch/s390/mm/pgtable.c 			       bool nq, bool mr, bool mc)
mr                834 arch/s390/mm/pgtable.c 	if (mr | mc) {
mr                840 arch/s390/mm/pgtable.c 		if (!mr)
mr                733 arch/x86/include/asm/uv/uv_bau.h static inline void write_mmr_sw_ack(unsigned long mr)
mr                735 arch/x86/include/asm/uv/uv_bau.h 	uv_write_local_mmr(UVH_LB_BAU_INTD_SOFTWARE_ACKNOWLEDGE_ALIAS, mr);
mr                738 arch/x86/include/asm/uv/uv_bau.h static inline void write_gmmr_sw_ack(int pnode, unsigned long mr)
mr                740 arch/x86/include/asm/uv/uv_bau.h 	write_gmmr(pnode, UVH_LB_BAU_INTD_SOFTWARE_ACKNOWLEDGE_ALIAS, mr);
mr                753 arch/x86/include/asm/uv/uv_bau.h static inline void write_mmr_proc_sw_ack(unsigned long mr)
mr                755 arch/x86/include/asm/uv/uv_bau.h 	uv_write_local_mmr(UV4H_LB_PROC_INTD_SOFT_ACK_CLEAR, mr);
mr                758 arch/x86/include/asm/uv/uv_bau.h static inline void write_gmmr_proc_sw_ack(int pnode, unsigned long mr)
mr                760 arch/x86/include/asm/uv/uv_bau.h 	write_gmmr(pnode, UV4H_LB_PROC_INTD_SOFT_ACK_CLEAR, mr);
mr                773 arch/x86/include/asm/uv/uv_bau.h static inline void write_mmr_data_config(int pnode, unsigned long mr)
mr                775 arch/x86/include/asm/uv/uv_bau.h 	uv_write_global_mmr64(pnode, UVH_BAU_DATA_CONFIG, mr);
mr                174 arch/x86/kernel/tboot.c 	struct tboot_mac_region *mr;
mr                181 arch/x86/kernel/tboot.c 		mr = &tboot->mac_regions[tboot->num_mac_regions++];
mr                182 arch/x86/kernel/tboot.c 		mr->start = round_down(start, PAGE_SIZE);
mr                183 arch/x86/kernel/tboot.c 		mr->size  = round_up(end, PAGE_SIZE) - mr->start;
mr                261 arch/x86/mm/init.c static int __meminit save_mr(struct map_range *mr, int nr_range,
mr                268 arch/x86/mm/init.c 		mr[nr_range].start = start_pfn<<PAGE_SHIFT;
mr                269 arch/x86/mm/init.c 		mr[nr_range].end   = end_pfn<<PAGE_SHIFT;
mr                270 arch/x86/mm/init.c 		mr[nr_range].page_size_mask = page_size_mask;
mr                281 arch/x86/mm/init.c static void __ref adjust_range_page_size_mask(struct map_range *mr,
mr                288 arch/x86/mm/init.c 		    !(mr[i].page_size_mask & (1<<PG_LEVEL_2M))) {
mr                289 arch/x86/mm/init.c 			unsigned long start = round_down(mr[i].start, PMD_SIZE);
mr                290 arch/x86/mm/init.c 			unsigned long end = round_up(mr[i].end, PMD_SIZE);
mr                298 arch/x86/mm/init.c 				mr[i].page_size_mask |= 1<<PG_LEVEL_2M;
mr                301 arch/x86/mm/init.c 		    !(mr[i].page_size_mask & (1<<PG_LEVEL_1G))) {
mr                302 arch/x86/mm/init.c 			unsigned long start = round_down(mr[i].start, PUD_SIZE);
mr                303 arch/x86/mm/init.c 			unsigned long end = round_up(mr[i].end, PUD_SIZE);
mr                306 arch/x86/mm/init.c 				mr[i].page_size_mask |= 1<<PG_LEVEL_1G;
mr                311 arch/x86/mm/init.c static const char *page_size_string(struct map_range *mr)
mr                318 arch/x86/mm/init.c 	if (mr->page_size_mask & (1<<PG_LEVEL_1G))
mr                327 arch/x86/mm/init.c 	    mr->page_size_mask & (1<<PG_LEVEL_2M))
mr                330 arch/x86/mm/init.c 	if (mr->page_size_mask & (1<<PG_LEVEL_2M))
mr                336 arch/x86/mm/init.c static int __meminit split_mem_range(struct map_range *mr, int nr_range,
mr                365 arch/x86/mm/init.c 		nr_range = save_mr(mr, nr_range, start_pfn, end_pfn, 0);
mr                380 arch/x86/mm/init.c 		nr_range = save_mr(mr, nr_range, start_pfn, end_pfn,
mr                390 arch/x86/mm/init.c 		nr_range = save_mr(mr, nr_range, start_pfn, end_pfn,
mr                400 arch/x86/mm/init.c 		nr_range = save_mr(mr, nr_range, start_pfn, end_pfn,
mr                409 arch/x86/mm/init.c 	nr_range = save_mr(mr, nr_range, start_pfn, end_pfn, 0);
mr                412 arch/x86/mm/init.c 		adjust_range_page_size_mask(mr, nr_range);
mr                417 arch/x86/mm/init.c 		if (mr[i].end != mr[i+1].start ||
mr                418 arch/x86/mm/init.c 		    mr[i].page_size_mask != mr[i+1].page_size_mask)
mr                421 arch/x86/mm/init.c 		old_start = mr[i].start;
mr                422 arch/x86/mm/init.c 		memmove(&mr[i], &mr[i+1],
mr                424 arch/x86/mm/init.c 		mr[i--].start = old_start;
mr                430 arch/x86/mm/init.c 				mr[i].start, mr[i].end - 1,
mr                431 arch/x86/mm/init.c 				page_size_string(&mr[i]));
mr                472 arch/x86/mm/init.c 	struct map_range mr[NR_RANGE_MR];
mr                479 arch/x86/mm/init.c 	memset(mr, 0, sizeof(mr));
mr                480 arch/x86/mm/init.c 	nr_range = split_mem_range(mr, 0, start, end);
mr                483 arch/x86/mm/init.c 		ret = kernel_physical_mapping_init(mr[i].start, mr[i].end,
mr                484 arch/x86/mm/init.c 						   mr[i].page_size_mask);
mr                247 arch/x86/platform/efi/quirks.c 	struct efi_mem_range mr;
mr                267 arch/x86/platform/efi/quirks.c 	mr.range.start = addr;
mr                268 arch/x86/platform/efi/quirks.c 	mr.range.end = addr + size - 1;
mr                269 arch/x86/platform/efi/quirks.c 	mr.attribute = md.attribute | EFI_MEMORY_RUNTIME;
mr                271 arch/x86/platform/efi/quirks.c 	num_entries = efi_memmap_split_count(&md, &mr.range);
mr                288 arch/x86/platform/efi/quirks.c 	efi_memmap_insert(&efi.memmap, new, &mr);
mr                261 arch/x86/platform/uv/tlb_uv.c 				unsigned long mr;
mr                269 arch/x86/platform/uv/tlb_uv.c 				mr = (msg_res << UV_SW_ACK_NPENDING) | msg_res;
mr                270 arch/x86/platform/uv/tlb_uv.c 				ops.write_l_sw_ack(mr);
mr                395 arch/x86/platform/uv/tlb_uv.c 			unsigned long mr;
mr                405 arch/x86/platform/uv/tlb_uv.c 			mr = (msg_res << UV_SW_ACK_NPENDING) | msg_res;
mr                408 arch/x86/platform/uv/tlb_uv.c 				ops.write_l_sw_ack(mr);
mr               1628 drivers/crypto/atmel-aes.c 	u32 isr, mr;
mr               1655 drivers/crypto/atmel-aes.c 		mr = atmel_aes_read(dd, AES_MR);
mr               1656 drivers/crypto/atmel-aes.c 		mr &= ~(AES_MR_SMOD_MASK | AES_MR_DUALBUFF);
mr               1657 drivers/crypto/atmel-aes.c 		mr |= AES_MR_SMOD_IDATAR0;
mr               1659 drivers/crypto/atmel-aes.c 			mr |= AES_MR_DUALBUFF;
mr               1660 drivers/crypto/atmel-aes.c 		atmel_aes_write(dd, AES_MR, mr);
mr               1658 drivers/crypto/atmel-sha.c 	u32 mr = SHA_MR_MODE_AUTO;
mr               1663 drivers/crypto/atmel-sha.c 	mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK);
mr               1664 drivers/crypto/atmel-sha.c 	atmel_sha_write(dd, SHA_MR, mr);
mr               1963 drivers/crypto/atmel-sha.c 	u32 mr;
mr               1976 drivers/crypto/atmel-sha.c 	mr = SHA_MR_MODE_AUTO | SHA_MR_UIHV;
mr               1977 drivers/crypto/atmel-sha.c 	mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK);
mr               1978 drivers/crypto/atmel-sha.c 	atmel_sha_write(dd, SHA_MR, mr);
mr               2020 drivers/crypto/atmel-sha.c 	u32 mr;
mr               2041 drivers/crypto/atmel-sha.c 	mr = (SHA_MR_HMAC | SHA_MR_DUALBUFF);
mr               2042 drivers/crypto/atmel-sha.c 	mr |= ctx->flags & SHA_FLAGS_ALGO_MASK;
mr               2044 drivers/crypto/atmel-sha.c 		mr |= SHA_MR_MODE_IDATAR0;
mr               2046 drivers/crypto/atmel-sha.c 		mr |= SHA_MR_MODE_AUTO;
mr               2047 drivers/crypto/atmel-sha.c 	atmel_sha_write(dd, SHA_MR, mr);
mr               2426 drivers/crypto/atmel-sha.c 	u32 mr, msg_size;
mr               2436 drivers/crypto/atmel-sha.c 	mr = (SHA_MR_MODE_IDATAR0 |
mr               2439 drivers/crypto/atmel-sha.c 	mr |= ctx->flags & SHA_FLAGS_ALGO_MASK;
mr               2440 drivers/crypto/atmel-sha.c 	atmel_sha_write(dd, SHA_MR, mr);
mr                 61 drivers/dma/fsldma.c 	FSL_DMA_OUT(chan, &chan->regs->mr, val, 32);
mr                 66 drivers/dma/fsldma.c 	return FSL_DMA_IN(chan, &chan->regs->mr, 32);
mr               1339 drivers/dma/fsldma.c 		chan->regs_save.mr = get_mr(chan);
mr               1369 drivers/dma/fsldma.c 		mode = chan->regs_save.mr
mr                108 drivers/dma/fsldma.h 	u32 mr;		/* 0x00 - Mode Register */
mr                142 drivers/dma/fsldma.h 	u32 mr;
mr                133 drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h 	u32 mr[16];
mr                 89 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 		DLL = !(ram->mr[1] & 0x1);
mr                 90 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 		RON = !((ram->mr[1] & 0x300) >> 8);
mr                 98 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 		ODT =  (ram->mr[1] & 0xc) >> 2;
mr                101 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	hi = ram->mr[2] & 0x1;
mr                107 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[0] &= ~0xf74;
mr                108 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[0] |= (CWL & 0x07) << 9;
mr                109 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[0] |= (CL & 0x07) << 4;
mr                110 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[0] |= (CL & 0x08) >> 1;
mr                112 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] &= ~0x3fc;
mr                113 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] |= (ODT & 0x03) << 2;
mr                114 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] |= (RON & 0x03) << 8;
mr                115 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] |= (WR  & 0x03) << 4;
mr                116 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] |= (WR  & 0x04) << 5;
mr                117 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr3.c 	ram->mr[1] |= !DLL << 6;
mr                 75 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[0] &= ~0xf7f;
mr                 76 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[0] |= (WR & 0x0f) << 8;
mr                 77 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[0] |= (CL & 0x0f) << 3;
mr                 78 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[0] |= (WL & 0x07) << 0;
mr                 80 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[1] &= ~0x0bf;
mr                 81 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[1] |= (xd & 0x01) << 7;
mr                 82 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[1] |= (at[0] & 0x03) << 4;
mr                 83 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[1] |= (dt & 0x03) << 2;
mr                 84 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[1] |= (ds & 0x03) << 0;
mr                 89 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr1_nuts = ram->mr[1];
mr                 91 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 		ram->mr[1] &= ~0x030;
mr                 92 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 		ram->mr[1] |= (at[1] & 0x03) << 4;
mr                 95 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[3] &= ~0x020;
mr                 96 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[3] |= (rq & 0x01) << 5;
mr                 98 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[5] &= ~0x004;
mr                 99 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[5] |= (l3 << 2);
mr                102 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 		vo = (ram->mr[6] & 0xff0) >> 4;
mr                103 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	if (ram->mr[6] & 0x001)
mr                105 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[6] &= ~0xff1;
mr                106 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[6] |= (vo & 0xff) << 4;
mr                107 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[6] |= (pd & 0x01) << 0;
mr                110 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 		ram->mr[7] &= ~0x300;
mr                111 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 		ram->mr[7] |= (vr & 0x03) << 8;
mr                113 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[7] &= ~0x088;
mr                114 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[7] |= (vh & 0x01) << 7;
mr                115 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[7] |= (lf & 0x01) << 3;
mr                117 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[8] &= ~0x003;
mr                118 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[8] |= (WR & 0x10) >> 3;
mr                119 drivers/gpu/drm/nouveau/nvkm/subdev/fb/gddr5.c 	ram->mr[8] |= (CL & 0x10) >> 4;
mr                267 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	if ((ram->base.mr[1] & 0x03c) != 0x030) {
mr                268 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 		ram_mask(fuc, mr[1], 0x03c, ram->base.mr[1] & 0x03c);
mr                269 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 		ram_nuts(ram, mr[1], 0x03c, ram->base.mr1_nuts & 0x03c, 0x000);
mr                592 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[3], 0xfff, ram->base.mr[3]);
mr                593 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_wr32(fuc, mr[0], ram->base.mr[0]);
mr                594 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[8], 0xfff, ram->base.mr[8]);
mr                596 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[1], 0xfff, ram->base.mr[1]);
mr                597 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[5], 0xfff, ram->base.mr[5] & ~0x004); /* LP3 later */
mr                598 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[6], 0xfff, ram->base.mr[6]);
mr                599 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[7], 0xfff, ram->base.mr[7]);
mr                651 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	if (ram_mask(fuc, mr[5], 0x004, ram->base.mr[5]) != ram->base.mr[5])
mr                683 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_nuke(fuc, mr[0]);
mr                684 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[0], 0x100, 0x100);
mr                685 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[0], 0x100, 0x000);
mr                691 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	u32 mr1_old = ram_rd32(fuc, mr[1]);
mr                694 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 		ram_mask(fuc, mr[1], 0x1, 0x1);
mr                912 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 		ram_mask(fuc, mr[1], 0x1, 0x0);
mr                916 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[2], 0x00000fff, ram->base.mr[2]);
mr                917 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_mask(fuc, mr[1], 0xffffffff, ram->base.mr[1]);
mr                918 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 	ram_wr32(fuc, mr[0], ram->base.mr[0]);
mr               1087 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 		if (ram_have(fuc, mr[i]))
mr               1088 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c 			ram->base.mr[i] = ram_rd32(fuc, mr[i]);
mr                426 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	ram_mask(fuc, mr[0], 0x100, 0x100);
mr                428 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	ram_mask(fuc, mr[0], 0x100, 0x000);
mr                433 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c nvkm_sddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr)
mr                435 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	u32 mr1_old = ram_rd32(fuc, mr[1]);
mr                439 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 		ram_wr32(fuc, mr[1], mr[1]);
mr                445 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c nvkm_gddr3_dll_disable(struct gt215_ramfuc *fuc, u32 *mr)
mr                447 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	u32 mr1_old = ram_rd32(fuc, mr[1]);
mr                450 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 		ram_wr32(fuc, mr[1], mr[1]);
mr                564 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	ram->base.mr[0] = ram_rd32(fuc, mr[0]);
mr                565 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	ram->base.mr[1] = ram_rd32(fuc, mr[1]);
mr                566 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 	ram->base.mr[2] = ram_rd32(fuc, mr[2]);
mr                639 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 		nvkm_sddr3_dll_disable(fuc, ram->base.mr);
mr                642 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 		nvkm_gddr3_dll_disable(fuc, ram->base.mr);
mr                749 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 		if (ram_rd32(fuc, mr[i]) != ram->base.mr[i]) {
mr                750 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c 			ram_wr32(fuc, mr[i], ram->base.mr[i]);
mr                186 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram_mask(hwsq, mr[0], 0x100, 0x100);
mr                187 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram_mask(hwsq, mr[0], 0x100, 0x000);
mr                287 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram->base.mr[0] = ram_rd32(hwsq, mr[0]);
mr                288 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram->base.mr[1] = ram_rd32(hwsq, mr[1]);
mr                289 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram->base.mr[2] = ram_rd32(hwsq, mr[2]);
mr                377 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_nuke(hwsq, mr[0]); /* force update */
mr                378 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_mask(hwsq, mr[0], 0x000, 0x000);
mr                381 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_nuke(hwsq, mr[1]); /* force update */
mr                382 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_wr32(hwsq, mr[1], ram->base.mr[1]);
mr                383 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_nuke(hwsq, mr[0]); /* force update */
mr                384 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 		ram_wr32(hwsq, mr[0], ram->base.mr[0]);
mr                454 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c 	ram_mask(hwsq, mr[1], 0xffffffff, ram->base.mr[1]);
mr                 82 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 		ODT =  (ram->mr[1] & 0x004) >> 2 |
mr                 83 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 		       (ram->mr[1] & 0x040) >> 5;
mr                 91 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[0] &= ~0xf70;
mr                 92 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[0] |= (WR & 0x07) << 9;
mr                 93 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[0] |= (CL & 0x07) << 4;
mr                 95 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[1] &= ~0x045;
mr                 96 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[1] |= (ODT & 0x1) << 2;
mr                 97 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[1] |= (ODT & 0x2) << 5;
mr                 98 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr2.c 	ram->mr[1] |= !DLL;
mr                 92 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 		ODT =   (ram->mr[1] & 0x004) >> 2 |
mr                 93 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 			(ram->mr[1] & 0x040) >> 5 |
mr                 94 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 		        (ram->mr[1] & 0x200) >> 7;
mr                106 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[0] &= ~0xf74;
mr                107 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[0] |= (WR & 0x07) << 9;
mr                108 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[0] |= (CL & 0x0e) << 3;
mr                109 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[0] |= (CL & 0x01) << 2;
mr                111 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[1] &= ~0x245;
mr                112 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[1] |= (ODT & 0x1) << 2;
mr                113 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[1] |= (ODT & 0x2) << 5;
mr                114 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[1] |= (ODT & 0x4) << 7;
mr                115 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[1] |= !DLL;
mr                117 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[2] &= ~0x038;
mr                118 drivers/gpu/drm/nouveau/nvkm/subdev/fb/sddr3.c 	ram->mr[2] |= (CWL & 0x07) << 3;
mr               1160 drivers/iio/adc/at91-sama5d2_adc.c 	unsigned f_per, prescal, startup, mr;
mr               1168 drivers/iio/adc/at91-sama5d2_adc.c 	mr = at91_adc_readl(st, AT91_SAMA5D2_MR);
mr               1169 drivers/iio/adc/at91-sama5d2_adc.c 	mr &= ~(AT91_SAMA5D2_MR_STARTUP_MASK | AT91_SAMA5D2_MR_PRESCAL_MASK);
mr               1170 drivers/iio/adc/at91-sama5d2_adc.c 	mr |= AT91_SAMA5D2_MR_STARTUP(startup);
mr               1171 drivers/iio/adc/at91-sama5d2_adc.c 	mr |= AT91_SAMA5D2_MR_PRESCAL(prescal);
mr               1172 drivers/iio/adc/at91-sama5d2_adc.c 	at91_adc_writel(st, AT91_SAMA5D2_MR, mr);
mr                 10 drivers/infiniband/core/mr_pool.c 	struct ib_mr *mr;
mr                 14 drivers/infiniband/core/mr_pool.c 	mr = list_first_entry_or_null(list, struct ib_mr, qp_entry);
mr                 15 drivers/infiniband/core/mr_pool.c 	if (mr) {
mr                 16 drivers/infiniband/core/mr_pool.c 		list_del(&mr->qp_entry);
mr                 21 drivers/infiniband/core/mr_pool.c 	return mr;
mr                 25 drivers/infiniband/core/mr_pool.c void ib_mr_pool_put(struct ib_qp *qp, struct list_head *list, struct ib_mr *mr)
mr                 30 drivers/infiniband/core/mr_pool.c 	list_add(&mr->qp_entry, list);
mr                 39 drivers/infiniband/core/mr_pool.c 	struct ib_mr *mr;
mr                 45 drivers/infiniband/core/mr_pool.c 			mr = ib_alloc_mr_integrity(qp->pd, max_num_sg,
mr                 48 drivers/infiniband/core/mr_pool.c 			mr = ib_alloc_mr(qp->pd, type, max_num_sg);
mr                 49 drivers/infiniband/core/mr_pool.c 		if (IS_ERR(mr)) {
mr                 50 drivers/infiniband/core/mr_pool.c 			ret = PTR_ERR(mr);
mr                 55 drivers/infiniband/core/mr_pool.c 		list_add_tail(&mr->qp_entry, list);
mr                 68 drivers/infiniband/core/mr_pool.c 	struct ib_mr *mr;
mr                 73 drivers/infiniband/core/mr_pool.c 		mr = list_first_entry(list, struct ib_mr, qp_entry);
mr                 74 drivers/infiniband/core/mr_pool.c 		list_del(&mr->qp_entry);
mr                 77 drivers/infiniband/core/mr_pool.c 		ib_dereg_mr(mr);
mr                581 drivers/infiniband/core/nldev.c 	struct ib_mr *mr = container_of(res, struct ib_mr, res);
mr                582 drivers/infiniband/core/nldev.c 	struct ib_device *dev = mr->pd->device;
mr                585 drivers/infiniband/core/nldev.c 		if (nla_put_u32(msg, RDMA_NLDEV_ATTR_RES_RKEY, mr->rkey))
mr                587 drivers/infiniband/core/nldev.c 		if (nla_put_u32(msg, RDMA_NLDEV_ATTR_RES_LKEY, mr->lkey))
mr                591 drivers/infiniband/core/nldev.c 	if (nla_put_u64_64bit(msg, RDMA_NLDEV_ATTR_RES_MRLEN, mr->length,
mr                599 drivers/infiniband/core/nldev.c 	    nla_put_u32(msg, RDMA_NLDEV_ATTR_RES_PDN, mr->pd->res.id))
mr               1418 drivers/infiniband/core/nldev.c RES_GET_FUNCS(mr, RDMA_RESTRACK_MR);
mr                 72 drivers/infiniband/core/rw.c 	if (reg->mr->need_inval) {
mr                 74 drivers/infiniband/core/rw.c 		reg->inv_wr.ex.invalidate_rkey = reg->mr->lkey;
mr                 94 drivers/infiniband/core/rw.c 	reg->mr = ib_mr_pool_get(qp, &qp->rdma_mrs);
mr                 95 drivers/infiniband/core/rw.c 	if (!reg->mr)
mr                100 drivers/infiniband/core/rw.c 	ret = ib_map_mr_sg(reg->mr, sg, nents, &offset, PAGE_SIZE);
mr                102 drivers/infiniband/core/rw.c 		ib_mr_pool_put(qp, &qp->rdma_mrs, reg->mr);
mr                107 drivers/infiniband/core/rw.c 	reg->reg_wr.mr = reg->mr;
mr                113 drivers/infiniband/core/rw.c 	reg->sge.addr = reg->mr->iova;
mr                114 drivers/infiniband/core/rw.c 	reg->sge.length = reg->mr->length;
mr                145 drivers/infiniband/core/rw.c 			if (reg->mr->need_inval)
mr                163 drivers/infiniband/core/rw.c 			reg->wr.wr.ex.invalidate_rkey = reg->mr->lkey;
mr                183 drivers/infiniband/core/rw.c 		ib_mr_pool_put(qp, &qp->rdma_mrs, ctx->reg[i].mr);
mr                411 drivers/infiniband/core/rw.c 	ctx->reg->mr = ib_mr_pool_get(qp, &qp->sig_mrs);
mr                412 drivers/infiniband/core/rw.c 	if (!ctx->reg->mr) {
mr                419 drivers/infiniband/core/rw.c 	memcpy(ctx->reg->mr->sig_attrs, sig_attrs, sizeof(struct ib_sig_attrs));
mr                421 drivers/infiniband/core/rw.c 	ret = ib_map_mr_sg_pi(ctx->reg->mr, sg, sg_cnt, NULL, prot_sg,
mr                435 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.mr = ctx->reg->mr;
mr                436 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.key = ctx->reg->mr->lkey;
mr                439 drivers/infiniband/core/rw.c 	ctx->reg->sge.addr = ctx->reg->mr->iova;
mr                440 drivers/infiniband/core/rw.c 	ctx->reg->sge.length = ctx->reg->mr->length;
mr                442 drivers/infiniband/core/rw.c 		ctx->reg->sge.length -= ctx->reg->mr->sig_attrs->meta_length;
mr                459 drivers/infiniband/core/rw.c 	ib_mr_pool_put(qp, &qp->sig_mrs, ctx->reg->mr);
mr                479 drivers/infiniband/core/rw.c 	reg->mr->need_inval = need_inval;
mr                480 drivers/infiniband/core/rw.c 	ib_update_fast_reg_key(reg->mr, ib_inc_rkey(reg->mr->lkey));
mr                481 drivers/infiniband/core/rw.c 	reg->reg_wr.key = reg->mr->lkey;
mr                482 drivers/infiniband/core/rw.c 	reg->sge.lkey = reg->mr->lkey;
mr                585 drivers/infiniband/core/rw.c 			ib_mr_pool_put(qp, &qp->rdma_mrs, ctx->reg[i].mr);
mr                623 drivers/infiniband/core/rw.c 	ib_mr_pool_put(qp, &qp->sig_mrs, ctx->reg->mr);
mr                710 drivers/infiniband/core/uverbs_cmd.c 	struct ib_mr                *mr;
mr                744 drivers/infiniband/core/uverbs_cmd.c 	mr = pd->device->ops.reg_user_mr(pd, cmd.start, cmd.length, cmd.hca_va,
mr                747 drivers/infiniband/core/uverbs_cmd.c 	if (IS_ERR(mr)) {
mr                748 drivers/infiniband/core/uverbs_cmd.c 		ret = PTR_ERR(mr);
mr                752 drivers/infiniband/core/uverbs_cmd.c 	mr->device  = pd->device;
mr                753 drivers/infiniband/core/uverbs_cmd.c 	mr->pd      = pd;
mr                754 drivers/infiniband/core/uverbs_cmd.c 	mr->type    = IB_MR_TYPE_USER;
mr                755 drivers/infiniband/core/uverbs_cmd.c 	mr->dm	    = NULL;
mr                756 drivers/infiniband/core/uverbs_cmd.c 	mr->sig_attrs = NULL;
mr                757 drivers/infiniband/core/uverbs_cmd.c 	mr->uobject = uobj;
mr                759 drivers/infiniband/core/uverbs_cmd.c 	mr->res.type = RDMA_RESTRACK_MR;
mr                760 drivers/infiniband/core/uverbs_cmd.c 	rdma_restrack_uadd(&mr->res);
mr                762 drivers/infiniband/core/uverbs_cmd.c 	uobj->object = mr;
mr                765 drivers/infiniband/core/uverbs_cmd.c 	resp.lkey      = mr->lkey;
mr                766 drivers/infiniband/core/uverbs_cmd.c 	resp.rkey      = mr->rkey;
mr                778 drivers/infiniband/core/uverbs_cmd.c 	ib_dereg_mr_user(mr, uverbs_get_cleared_udata(attrs));
mr                793 drivers/infiniband/core/uverbs_cmd.c 	struct ib_mr                *mr;
mr                814 drivers/infiniband/core/uverbs_cmd.c 	mr = uobj->object;
mr                816 drivers/infiniband/core/uverbs_cmd.c 	if (mr->dm) {
mr                836 drivers/infiniband/core/uverbs_cmd.c 	old_pd = mr->pd;
mr                837 drivers/infiniband/core/uverbs_cmd.c 	ret = mr->device->ops.rereg_user_mr(mr, cmd.flags, cmd.start,
mr                846 drivers/infiniband/core/uverbs_cmd.c 		mr->pd = pd;
mr                851 drivers/infiniband/core/uverbs_cmd.c 	resp.lkey      = mr->lkey;
mr                852 drivers/infiniband/core/uverbs_cmd.c 	resp.rkey      = mr->rkey;
mr                 93 drivers/infiniband/core/uverbs_std_types_mr.c 	struct ib_mr *mr;
mr                125 drivers/infiniband/core/uverbs_std_types_mr.c 	mr = pd->device->ops.reg_dm_mr(pd, dm, &attr, attrs);
mr                126 drivers/infiniband/core/uverbs_std_types_mr.c 	if (IS_ERR(mr))
mr                127 drivers/infiniband/core/uverbs_std_types_mr.c 		return PTR_ERR(mr);
mr                129 drivers/infiniband/core/uverbs_std_types_mr.c 	mr->device  = pd->device;
mr                130 drivers/infiniband/core/uverbs_std_types_mr.c 	mr->pd      = pd;
mr                131 drivers/infiniband/core/uverbs_std_types_mr.c 	mr->type    = IB_MR_TYPE_DM;
mr                132 drivers/infiniband/core/uverbs_std_types_mr.c 	mr->dm      = dm;
mr                133 drivers/infiniband/core/uverbs_std_types_mr.c 	mr->uobject = uobj;
mr                137 drivers/infiniband/core/uverbs_std_types_mr.c 	uobj->object = mr;
mr                139 drivers/infiniband/core/uverbs_std_types_mr.c 	ret = uverbs_copy_to(attrs, UVERBS_ATTR_REG_DM_MR_RESP_LKEY, &mr->lkey,
mr                140 drivers/infiniband/core/uverbs_std_types_mr.c 			     sizeof(mr->lkey));
mr                145 drivers/infiniband/core/uverbs_std_types_mr.c 			     &mr->rkey, sizeof(mr->rkey));
mr                152 drivers/infiniband/core/uverbs_std_types_mr.c 	ib_dereg_mr_user(mr, uverbs_get_cleared_udata(attrs));
mr                292 drivers/infiniband/core/verbs.c 		struct ib_mr *mr;
mr                294 drivers/infiniband/core/verbs.c 		mr = pd->device->ops.get_dma_mr(pd, mr_access_flags);
mr                295 drivers/infiniband/core/verbs.c 		if (IS_ERR(mr)) {
mr                297 drivers/infiniband/core/verbs.c 			return ERR_CAST(mr);
mr                300 drivers/infiniband/core/verbs.c 		mr->device	= pd->device;
mr                301 drivers/infiniband/core/verbs.c 		mr->pd		= pd;
mr                302 drivers/infiniband/core/verbs.c 		mr->type        = IB_MR_TYPE_DMA;
mr                303 drivers/infiniband/core/verbs.c 		mr->uobject	= NULL;
mr                304 drivers/infiniband/core/verbs.c 		mr->need_inval	= false;
mr                306 drivers/infiniband/core/verbs.c 		pd->__internal_mr = mr;
mr               1981 drivers/infiniband/core/verbs.c int ib_dereg_mr_user(struct ib_mr *mr, struct ib_udata *udata)
mr               1983 drivers/infiniband/core/verbs.c 	struct ib_pd *pd = mr->pd;
mr               1984 drivers/infiniband/core/verbs.c 	struct ib_dm *dm = mr->dm;
mr               1985 drivers/infiniband/core/verbs.c 	struct ib_sig_attrs *sig_attrs = mr->sig_attrs;
mr               1988 drivers/infiniband/core/verbs.c 	rdma_restrack_del(&mr->res);
mr               1989 drivers/infiniband/core/verbs.c 	ret = mr->device->ops.dereg_mr(mr, udata);
mr               2017 drivers/infiniband/core/verbs.c 	struct ib_mr *mr;
mr               2025 drivers/infiniband/core/verbs.c 	mr = pd->device->ops.alloc_mr(pd, mr_type, max_num_sg, udata);
mr               2026 drivers/infiniband/core/verbs.c 	if (!IS_ERR(mr)) {
mr               2027 drivers/infiniband/core/verbs.c 		mr->device  = pd->device;
mr               2028 drivers/infiniband/core/verbs.c 		mr->pd      = pd;
mr               2029 drivers/infiniband/core/verbs.c 		mr->dm      = NULL;
mr               2030 drivers/infiniband/core/verbs.c 		mr->uobject = NULL;
mr               2032 drivers/infiniband/core/verbs.c 		mr->need_inval = false;
mr               2033 drivers/infiniband/core/verbs.c 		mr->res.type = RDMA_RESTRACK_MR;
mr               2034 drivers/infiniband/core/verbs.c 		rdma_restrack_kadd(&mr->res);
mr               2035 drivers/infiniband/core/verbs.c 		mr->type = mr_type;
mr               2036 drivers/infiniband/core/verbs.c 		mr->sig_attrs = NULL;
mr               2039 drivers/infiniband/core/verbs.c 	return mr;
mr               2059 drivers/infiniband/core/verbs.c 	struct ib_mr *mr;
mr               2073 drivers/infiniband/core/verbs.c 	mr = pd->device->ops.alloc_mr_integrity(pd, max_num_data_sg,
mr               2075 drivers/infiniband/core/verbs.c 	if (IS_ERR(mr)) {
mr               2077 drivers/infiniband/core/verbs.c 		return mr;
mr               2080 drivers/infiniband/core/verbs.c 	mr->device = pd->device;
mr               2081 drivers/infiniband/core/verbs.c 	mr->pd = pd;
mr               2082 drivers/infiniband/core/verbs.c 	mr->dm = NULL;
mr               2083 drivers/infiniband/core/verbs.c 	mr->uobject = NULL;
mr               2085 drivers/infiniband/core/verbs.c 	mr->need_inval = false;
mr               2086 drivers/infiniband/core/verbs.c 	mr->res.type = RDMA_RESTRACK_MR;
mr               2087 drivers/infiniband/core/verbs.c 	rdma_restrack_kadd(&mr->res);
mr               2088 drivers/infiniband/core/verbs.c 	mr->type = IB_MR_TYPE_INTEGRITY;
mr               2089 drivers/infiniband/core/verbs.c 	mr->sig_attrs = sig_attrs;
mr               2091 drivers/infiniband/core/verbs.c 	return mr;
mr               2402 drivers/infiniband/core/verbs.c int ib_check_mr_status(struct ib_mr *mr, u32 check_mask,
mr               2405 drivers/infiniband/core/verbs.c 	if (!mr->device->ops.check_mr_status)
mr               2408 drivers/infiniband/core/verbs.c 	return mr->device->ops.check_mr_status(mr, check_mask, mr_status);
mr               2472 drivers/infiniband/core/verbs.c int ib_map_mr_sg_pi(struct ib_mr *mr, struct scatterlist *data_sg,
mr               2477 drivers/infiniband/core/verbs.c 	if (unlikely(!mr->device->ops.map_mr_sg_pi ||
mr               2478 drivers/infiniband/core/verbs.c 		     WARN_ON_ONCE(mr->type != IB_MR_TYPE_INTEGRITY)))
mr               2481 drivers/infiniband/core/verbs.c 	mr->page_size = page_size;
mr               2483 drivers/infiniband/core/verbs.c 	return mr->device->ops.map_mr_sg_pi(mr, data_sg, data_sg_nents,
mr               2514 drivers/infiniband/core/verbs.c int ib_map_mr_sg(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
mr               2517 drivers/infiniband/core/verbs.c 	if (unlikely(!mr->device->ops.map_mr_sg))
mr               2520 drivers/infiniband/core/verbs.c 	mr->page_size = page_size;
mr               2522 drivers/infiniband/core/verbs.c 	return mr->device->ops.map_mr_sg(mr, sg, sg_nents, sg_offset);
mr               2546 drivers/infiniband/core/verbs.c int ib_sg_to_pages(struct ib_mr *mr, struct scatterlist *sgl, int sg_nents,
mr               2553 drivers/infiniband/core/verbs.c 	u64 page_mask = ~((u64)mr->page_size - 1);
mr               2559 drivers/infiniband/core/verbs.c 	mr->iova = sg_dma_address(&sgl[0]) + sg_offset;
mr               2560 drivers/infiniband/core/verbs.c 	mr->length = 0;
mr               2588 drivers/infiniband/core/verbs.c 			ret = set_page(mr, page_addr);
mr               2591 drivers/infiniband/core/verbs.c 				mr->length += prev_addr - dma_addr;
mr               2598 drivers/infiniband/core/verbs.c 			page_addr += mr->page_size;
mr               2601 drivers/infiniband/core/verbs.c 		mr->length += dma_len;
mr                408 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct ib_mr *ib_mr = &fence->mr->ib_mr;
mr                462 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = fence->mr;
mr                468 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (mr) {
mr                469 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		if (mr->ib_mr.rkey)
mr                470 drivers/infiniband/hw/bnxt_re/ib_verbs.c 			bnxt_qplib_dereg_mrw(&rdev->qplib_res, &mr->qplib_mr,
mr                472 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		if (mr->ib_mr.lkey)
mr                473 drivers/infiniband/hw/bnxt_re/ib_verbs.c 			bnxt_qplib_free_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr                474 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		kfree(mr);
mr                475 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		fence->mr = NULL;
mr                490 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = NULL;
mr                508 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                509 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!mr) {
mr                513 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	fence->mr = mr;
mr                514 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->rdev = rdev;
mr                515 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.pd = &pd->qplib_pd;
mr                516 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.type = CMDQ_ALLOCATE_MRW_MRW_FLAGS_PMR;
mr                517 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.flags = __from_ib_access_flags(mr_access_flags);
mr                518 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_alloc_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr                525 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.lkey = mr->qplib_mr.lkey;
mr                526 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.va = (u64)(unsigned long)fence->va;
mr                527 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.total_size = BNXT_RE_FENCE_BYTES;
mr                529 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_reg_mr(&rdev->qplib_res, &mr->qplib_mr, &pbl_tbl,
mr                535 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.rkey = mr->qplib_mr.rkey;
mr               2175 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = container_of(wr->mr, struct bnxt_re_mr, ib_mr);
mr               2176 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_qplib_frpl *qplib_frpl = &mr->qplib_frpl;
mr               2181 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	wqe->frmr.page_list = mr->pages;
mr               2182 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	wqe->frmr.page_list_len = mr->npages;
mr               2207 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	wqe->frmr.length = wr->mr->length;
mr               2208 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	wqe->frmr.pbl_pg_sz_log = (wr->mr->page_size >> PAGE_SHIFT_4K) - 1;
mr               2209 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	wqe->frmr.va = wr->mr->iova;
mr               3279 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr;
mr               3283 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               3284 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!mr)
mr               3287 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->rdev = rdev;
mr               3288 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.pd = &pd->qplib_pd;
mr               3289 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.flags = __from_ib_access_flags(mr_access_flags);
mr               3290 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.type = CMDQ_ALLOCATE_MRW_MRW_FLAGS_PMR;
mr               3293 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_alloc_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3297 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.hwq.level = PBL_LVL_MAX;
mr               3298 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.total_size = -1; /* Infinte length */
mr               3299 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_reg_mr(&rdev->qplib_res, &mr->qplib_mr, &pbl, 0, false,
mr               3304 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.lkey = mr->qplib_mr.lkey;
mr               3307 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		mr->ib_mr.rkey = mr->ib_mr.lkey;
mr               3310 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	return &mr->ib_mr;
mr               3313 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	bnxt_qplib_free_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3315 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	kfree(mr);
mr               3321 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = container_of(ib_mr, struct bnxt_re_mr, ib_mr);
mr               3322 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_dev *rdev = mr->rdev;
mr               3325 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_free_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3331 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (mr->pages) {
mr               3333 drivers/infiniband/hw/bnxt_re/ib_verbs.c 							&mr->qplib_frpl);
mr               3334 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		kfree(mr->pages);
mr               3335 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		mr->npages = 0;
mr               3336 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		mr->pages = NULL;
mr               3338 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ib_umem_release(mr->ib_umem);
mr               3340 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	kfree(mr);
mr               3347 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = container_of(ib_mr, struct bnxt_re_mr, ib_mr);
mr               3349 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (unlikely(mr->npages == mr->qplib_frpl.max_pg_ptrs))
mr               3352 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->pages[mr->npages++] = addr;
mr               3359 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = container_of(ib_mr, struct bnxt_re_mr, ib_mr);
mr               3361 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->npages = 0;
mr               3370 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr = NULL;
mr               3380 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               3381 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!mr)
mr               3384 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->rdev = rdev;
mr               3385 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.pd = &pd->qplib_pd;
mr               3386 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.flags = BNXT_QPLIB_FR_PMR;
mr               3387 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.type = CMDQ_ALLOCATE_MRW_MRW_FLAGS_PMR;
mr               3389 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_alloc_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3393 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.lkey = mr->qplib_mr.lkey;
mr               3394 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.rkey = mr->ib_mr.lkey;
mr               3396 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->pages = kcalloc(max_num_sg, sizeof(u64), GFP_KERNEL);
mr               3397 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!mr->pages) {
mr               3402 drivers/infiniband/hw/bnxt_re/ib_verbs.c 						 &mr->qplib_frpl, max_num_sg);
mr               3410 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	return &mr->ib_mr;
mr               3413 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	kfree(mr->pages);
mr               3415 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	bnxt_qplib_free_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3417 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	kfree(mr);
mr               3507 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_mr *mr;
mr               3518 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               3519 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!mr)
mr               3522 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->rdev = rdev;
mr               3523 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.pd = &pd->qplib_pd;
mr               3524 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.flags = __from_ib_access_flags(mr_access_flags);
mr               3525 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.type = CMDQ_ALLOCATE_MRW_MRW_FLAGS_MR;
mr               3527 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_alloc_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3533 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.rkey = mr->qplib_mr.rkey;
mr               3541 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_umem = umem;
mr               3543 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.va = virt_addr;
mr               3550 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->qplib_mr.total_size = length;
mr               3578 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	rc = bnxt_qplib_reg_mr(&rdev->qplib_res, &mr->qplib_mr, pbl_tbl,
mr               3587 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.lkey = mr->qplib_mr.lkey;
mr               3588 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	mr->ib_mr.rkey = mr->qplib_mr.lkey;
mr               3591 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	return &mr->ib_mr;
mr               3597 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	bnxt_qplib_free_mrw(&rdev->qplib_res, &mr->qplib_mr);
mr               3599 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	kfree(mr);
mr                 52 drivers/infiniband/hw/bnxt_re/ib_verbs.h 	struct bnxt_re_mr *mr;
mr                204 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_dereg_mr(struct ib_mr *mr, struct ib_udata *udata);
mr                655 drivers/infiniband/hw/bnxt_re/qplib_sp.c int bnxt_qplib_reg_mr(struct bnxt_qplib_res *res, struct bnxt_qplib_mrw *mr,
mr                682 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		if (mr->hwq.max_elements)
mr                683 drivers/infiniband/hw/bnxt_re/qplib_sp.c 			bnxt_qplib_free_hwq(res->pdev, &mr->hwq);
mr                685 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		mr->hwq.max_elements = pages;
mr                687 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		rc = bnxt_qplib_alloc_init_hwq(res->pdev, &mr->hwq, NULL,
mr                688 drivers/infiniband/hw/bnxt_re/qplib_sp.c 					       &mr->hwq.max_elements,
mr                697 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		pbl_ptr = (dma_addr_t **)mr->hwq.pbl_ptr;
mr                706 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	if (mr->hwq.level == PBL_LVL_MAX) {
mr                712 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		level = mr->hwq.level + 1;
mr                713 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		req.pbl = cpu_to_le64(mr->hwq.pbl[PBL_LVL_0].pg_map_arr[0]);
mr                723 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.access = (mr->flags & 0xFFFF);
mr                724 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.va = cpu_to_le64(mr->va);
mr                725 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.key = cpu_to_le32(mr->lkey);
mr                726 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.mr_size = cpu_to_le64(mr->total_size);
mr                736 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	if (mr->hwq.max_elements)
mr                737 drivers/infiniband/hw/bnxt_re/qplib_sp.c 		bnxt_qplib_free_hwq(res->pdev, &mr->hwq);
mr                257 drivers/infiniband/hw/bnxt_re/qplib_sp.h int bnxt_qplib_reg_mr(struct bnxt_qplib_res *res, struct bnxt_qplib_mrw *mr,
mr                259 drivers/infiniband/hw/bnxt_re/qplib_sp.h int bnxt_qplib_free_mrw(struct bnxt_qplib_res *res, struct bnxt_qplib_mrw *mr);
mr                261 drivers/infiniband/hw/bnxt_re/qplib_sp.h 				 struct bnxt_qplib_mrw *mr, int max);
mr                152 drivers/infiniband/hw/cxgb3/iwch_qp.c 	struct iwch_mr *mhp = to_iwch_mr(wr->mr);
mr                166 drivers/infiniband/hw/cxgb3/iwch_qp.c 		V_FR_PAGE_SIZE(ilog2(wr->mr->page_size) - 12) |
mr                804 drivers/infiniband/hw/cxgb4/qp.c 		FW_RI_TPTE_PS_V(ilog2(wr->mr->page_size) - 12));
mr                833 drivers/infiniband/hw/cxgb4/qp.c 	wqe->fr.pgsz_shift = ilog2(wr->mr->page_size) - 12;
mr               1192 drivers/infiniband/hw/cxgb4/qp.c 			struct c4iw_mr *mhp = to_c4iw_mr(reg_wr(wr)->mr);
mr               1340 drivers/infiniband/hw/efa/efa_verbs.c static int efa_create_inline_pbl(struct efa_dev *dev, struct efa_mr *mr,
mr               1346 drivers/infiniband/hw/efa/efa_verbs.c 	err = umem_to_page_list(dev, mr->umem, params->pbl.inline_pbl_array,
mr               1359 drivers/infiniband/hw/efa/efa_verbs.c 			  struct efa_mr *mr,
mr               1364 drivers/infiniband/hw/efa/efa_verbs.c 	err = pbl_create(dev, pbl, mr->umem, params->page_num,
mr               1400 drivers/infiniband/hw/efa/efa_verbs.c 	struct efa_mr *mr;
mr               1420 drivers/infiniband/hw/efa/efa_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1421 drivers/infiniband/hw/efa/efa_verbs.c 	if (!mr) {
mr               1426 drivers/infiniband/hw/efa/efa_verbs.c 	mr->umem = ib_umem_get(udata, start, length, access_flags, 0);
mr               1427 drivers/infiniband/hw/efa/efa_verbs.c 	if (IS_ERR(mr->umem)) {
mr               1428 drivers/infiniband/hw/efa/efa_verbs.c 		err = PTR_ERR(mr->umem);
mr               1439 drivers/infiniband/hw/efa/efa_verbs.c 	pg_sz = ib_umem_find_best_pgsz(mr->umem,
mr               1459 drivers/infiniband/hw/efa/efa_verbs.c 		err = efa_create_inline_pbl(dev, mr, &params);
mr               1467 drivers/infiniband/hw/efa/efa_verbs.c 		err = efa_create_pbl(dev, &pbl, mr, &params);
mr               1478 drivers/infiniband/hw/efa/efa_verbs.c 	mr->ibmr.lkey = result.l_key;
mr               1479 drivers/infiniband/hw/efa/efa_verbs.c 	mr->ibmr.rkey = result.r_key;
mr               1480 drivers/infiniband/hw/efa/efa_verbs.c 	mr->ibmr.length = length;
mr               1481 drivers/infiniband/hw/efa/efa_verbs.c 	ibdev_dbg(&dev->ibdev, "Registered mr[%d]\n", mr->ibmr.lkey);
mr               1483 drivers/infiniband/hw/efa/efa_verbs.c 	return &mr->ibmr;
mr               1486 drivers/infiniband/hw/efa/efa_verbs.c 	ib_umem_release(mr->umem);
mr               1488 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(mr);
mr               1498 drivers/infiniband/hw/efa/efa_verbs.c 	struct efa_mr *mr = to_emr(ibmr);
mr               1503 drivers/infiniband/hw/efa/efa_verbs.c 	params.l_key = mr->ibmr.lkey;
mr               1508 drivers/infiniband/hw/efa/efa_verbs.c 	ib_umem_release(mr->umem);
mr               1509 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(mr);
mr                188 drivers/infiniband/hw/hfi1/rc.c 			if (len && !e->rdma_sge.mr) {
mr                197 drivers/infiniband/hw/hfi1/rc.c 			ps->s_txreq->mr = e->rdma_sge.mr;
mr                198 drivers/infiniband/hw/hfi1/rc.c 			if (ps->s_txreq->mr)
mr                199 drivers/infiniband/hw/hfi1/rc.c 				rvt_get_mr(ps->s_txreq->mr);
mr                237 drivers/infiniband/hw/hfi1/rc.c 			if (len && !e->rdma_sge.mr) {
mr                246 drivers/infiniband/hw/hfi1/rc.c 			ps->s_txreq->mr = e->rdma_sge.mr;
mr                247 drivers/infiniband/hw/hfi1/rc.c 			if (ps->s_txreq->mr)
mr                248 drivers/infiniband/hw/hfi1/rc.c 				rvt_get_mr(ps->s_txreq->mr);
mr                273 drivers/infiniband/hw/hfi1/rc.c 		ps->s_txreq->mr = qp->s_ack_rdma_sge.sge.mr;
mr                274 drivers/infiniband/hw/hfi1/rc.c 		if (ps->s_txreq->mr)
mr                275 drivers/infiniband/hw/hfi1/rc.c 			rvt_get_mr(ps->s_txreq->mr);
mr               3036 drivers/infiniband/hw/hfi1/rc.c 			qp->r_sge.sge.mr = NULL;
mr               3094 drivers/infiniband/hw/hfi1/rc.c 			e->rdma_sge.mr = NULL;
mr               3172 drivers/infiniband/hw/hfi1/rc.c 		rvt_put_mr(qp->r_sge.sge.mr);
mr                 46 drivers/infiniband/hw/hfi1/rc.h 	if (e->rdma_sge.mr) {
mr                 47 drivers/infiniband/hw/hfi1/rc.h 		rvt_put_mr(e->rdma_sge.mr);
mr                 48 drivers/infiniband/hw/hfi1/rc.h 		e->rdma_sge.mr = NULL;
mr               1099 drivers/infiniband/hw/hfi1/tid_rdma.c 		} else if (sge->length == 0 && sge->mr->lkey) {
mr               1104 drivers/infiniband/hw/hfi1/tid_rdma.c 			sge->vaddr = sge->mr->map[sge->m]->segs[sge->n].vaddr;
mr               1105 drivers/infiniband/hw/hfi1/tid_rdma.c 			sge->length = sge->mr->map[sge->m]->segs[sge->n].length;
mr               1727 drivers/infiniband/hw/hfi1/tid_rdma.c 	wpriv->ss.sge.mr = NULL;
mr               3888 drivers/infiniband/hw/hfi1/tid_rdma.c 	epriv->ss.sge.mr = NULL;
mr                507 drivers/infiniband/hw/hfi1/uc.c 			qp->r_sge.sge.mr = NULL;
mr                 66 drivers/infiniband/hw/hfi1/verbs_txreq.c 	if (tx->mr)
mr                 67 drivers/infiniband/hw/hfi1/verbs_txreq.c 		rvt_put_mr(tx->mr);
mr                 63 drivers/infiniband/hw/hfi1/verbs_txreq.h 	struct rvt_mregion	*mr;
mr                 91 drivers/infiniband/hw/hfi1/verbs_txreq.h 	tx->mr = NULL;
mr                945 drivers/infiniband/hw/hns/hns_roce_device.h 	int (*write_mtpt)(void *mb_buf, struct hns_roce_mr *mr,
mr                948 drivers/infiniband/hw/hns/hns_roce_device.h 				struct hns_roce_mr *mr, int flags, u32 pdn,
mr                951 drivers/infiniband/hw/hns/hns_roce_device.h 	int (*frmr_write_mtpt)(void *mb_buf, struct hns_roce_mr *mr);
mr                975 drivers/infiniband/hw/hns/hns_roce_device.h 	int (*dereg_mr)(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr,
mr               1192 drivers/infiniband/hw/hns/hns_roce_device.h int hns_roce_rereg_user_mr(struct ib_mr *mr, int flags, u64 start, u64 length,
mr               1045 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	hr_mr = (struct hns_roce_mr *)mr_work->mr;
mr               1098 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 				struct hns_roce_mr *mr, struct ib_udata *udata)
mr               1113 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	if (mr->enabled) {
mr               1114 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		if (hns_roce_hw2sw_mpt(hr_dev, NULL, key_to_hw_index(mr->key)
mr               1130 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	mr_work->mr = (void *)mr;
mr               1146 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	dev_warn(dev, "Free mr work 0x%x over 50s and failed!\n", mr->key);
mr               1151 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		mr->key, jiffies_to_usecs(jiffies) - jiffies_to_usecs(start));
mr               1153 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	if (mr->size != ~0ULL) {
mr               1154 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		npages = ib_umem_page_count(mr->umem);
mr               1155 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		dma_free_coherent(dev, npages * 8, mr->pbl_buf,
mr               1156 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 				  mr->pbl_dma_addr);
mr               1160 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 			     key_to_hw_index(mr->key), 0);
mr               1162 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	ib_umem_release(mr->umem);
mr               1164 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	kfree(mr);
mr               1830 drivers/infiniband/hw/hns/hns_roce_hw_v1.c static int hns_roce_v1_write_mtpt(void *mb_buf, struct hns_roce_mr *mr,
mr               1845 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		       MPT_BYTE_4_KEY_S, mr->key);
mr               1850 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		     (mr->access & IB_ACCESS_MW_BIND ? 1 : 0));
mr               1853 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		       MPT_BYTE_4_MEMORY_LOCATION_TYPE_S, mr->type);
mr               1856 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		     (mr->access & IB_ACCESS_LOCAL_WRITE ? 1 : 0));
mr               1858 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		     (mr->access & IB_ACCESS_REMOTE_WRITE ? 1 : 0));
mr               1860 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		     (mr->access & IB_ACCESS_REMOTE_READ ? 1 : 0));
mr               1870 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	mpt_entry->virt_addr_l = cpu_to_le32((u32)mr->iova);
mr               1871 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	mpt_entry->virt_addr_h = cpu_to_le32((u32)(mr->iova >> 32));
mr               1872 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	mpt_entry->length = cpu_to_le32((u32)mr->size);
mr               1875 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		       MPT_BYTE_28_PD_S, mr->pd);
mr               1882 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	if (mr->type == MR_TYPE_DMA)
mr               1890 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	for_each_sg_dma_page(mr->umem->sg_head.sgl, &sg_iter, mr->umem->nmap, 0) {
mr               1968 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	mpt_entry->pbl_addr_l = cpu_to_le32((u32)(mr->pbl_dma_addr));
mr               1972 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 		       ((u32)(mr->pbl_dma_addr >> 32)));
mr               1070 drivers/infiniband/hw/hns/hns_roce_hw_v1.h 	void	*mr;
mr                 63 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	struct hns_roce_mr *mr = to_hr_mr(wr->mr);
mr                 83 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	rc_sq_wqe->msg_len = cpu_to_le32(mr->pbl_ba & 0xffffffff);
mr                 84 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	rc_sq_wqe->inv_key = cpu_to_le32(mr->pbl_ba >> 32);
mr                 86 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	rc_sq_wqe->byte_16 = cpu_to_le32(wr->mr->length & 0xffffffff);
mr                 87 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	rc_sq_wqe->byte_20 = cpu_to_le32(wr->mr->length >> 32);
mr                 89 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	rc_sq_wqe->va = cpu_to_le64(wr->mr->iova);
mr                 91 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	fseg->pbl_size = cpu_to_le32(mr->pbl_size);
mr                 95 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       mr->pbl_buf_pg_sz + PG_SHIFT_OFFSET);
mr               2223 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			struct hns_roce_mr *mr)
mr               2230 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->pbl_size = cpu_to_le32(mr->pbl_size);
mr               2231 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->pbl_ba_l = cpu_to_le32(lower_32_bits(mr->pbl_ba >> 3));
mr               2234 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       upper_32_bits(mr->pbl_ba >> 3));
mr               2241 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	for_each_sg_dma_page(mr->umem->sg_head.sgl, &sg_iter, mr->umem->nmap, 0) {
mr               2261 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       mr->pbl_buf_pg_sz + PG_SHIFT_OFFSET);
mr               2268 drivers/infiniband/hw/hns/hns_roce_hw_v2.c static int hns_roce_v2_write_mtpt(void *mb_buf, struct hns_roce_mr *mr,
mr               2280 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       V2_MPT_BYTE_4_PBL_HOP_NUM_S, mr->pbl_hop_num ==
mr               2281 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       HNS_ROCE_HOP_NUM_0 ? 0 : mr->pbl_hop_num);
mr               2285 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       mr->pbl_ba_pg_sz + PG_SHIFT_OFFSET);
mr               2287 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       V2_MPT_BYTE_4_PD_S, mr->pd);
mr               2293 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     (mr->access & IB_ACCESS_MW_BIND ? 1 : 0));
mr               2295 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     mr->access & IB_ACCESS_REMOTE_ATOMIC ? 1 : 0);
mr               2297 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     (mr->access & IB_ACCESS_REMOTE_READ ? 1 : 0));
mr               2299 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     (mr->access & IB_ACCESS_REMOTE_WRITE ? 1 : 0));
mr               2301 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     (mr->access & IB_ACCESS_LOCAL_WRITE ? 1 : 0));
mr               2304 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		     mr->type == MR_TYPE_MR ? 0 : 1);
mr               2308 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->len_l = cpu_to_le32(lower_32_bits(mr->size));
mr               2309 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->len_h = cpu_to_le32(upper_32_bits(mr->size));
mr               2310 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->lkey = cpu_to_le32(mr->key);
mr               2311 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->va_l = cpu_to_le32(lower_32_bits(mr->iova));
mr               2312 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->va_h = cpu_to_le32(upper_32_bits(mr->iova));
mr               2314 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	if (mr->type == MR_TYPE_DMA)
mr               2317 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = set_mtpt_pbl(mpt_entry, mr);
mr               2323 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 					struct hns_roce_mr *mr, int flags,
mr               2336 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		mr->pd = pdn;
mr               2360 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		mr->iova = iova;
mr               2361 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		mr->size = size;
mr               2363 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		ret = set_mtpt_pbl(mpt_entry, mr);
mr               2369 drivers/infiniband/hw/hns/hns_roce_hw_v2.c static int hns_roce_v2_frmr_write_mtpt(void *mb_buf, struct hns_roce_mr *mr)
mr               2383 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       mr->pbl_ba_pg_sz + PG_SHIFT_OFFSET);
mr               2385 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       V2_MPT_BYTE_4_PD_S, mr->pd);
mr               2396 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->pbl_size = cpu_to_le32(mr->pbl_size);
mr               2398 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	mpt_entry->pbl_ba_l = cpu_to_le32(lower_32_bits(mr->pbl_ba >> 3));
mr               2401 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       upper_32_bits(mr->pbl_ba >> 3));
mr               2406 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 		       mr->pbl_buf_pg_sz + PG_SHIFT_OFFSET);
mr                296 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr, int err_loop_index,
mr                311 drivers/infiniband/hw/hns/hns_roce_mr.c 			dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l1[i],
mr                312 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_l1_dma_addr[i]);
mr                320 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_bt_l2[bt_idx],
mr                321 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_l2_dma_addr[bt_idx]);
mr                326 drivers/infiniband/hw/hns/hns_roce_mr.c 			dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l1[i],
mr                327 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_l1_dma_addr[i]);
mr                332 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_bt_l2[bt_idx],
mr                333 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_l2_dma_addr[bt_idx]);
mr                338 drivers/infiniband/hw/hns/hns_roce_mr.c 			dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l1[i],
mr                339 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_l1_dma_addr[i]);
mr                346 drivers/infiniband/hw/hns/hns_roce_mr.c 	dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l0, mr->pbl_l0_dma_addr);
mr                347 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l0 = NULL;
mr                348 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l0_dma_addr = 0;
mr                351 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr, u32 pbl_bt_sz)
mr                360 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_buf = dma_alloc_coherent(dev, npages * 8,
mr                361 drivers/infiniband/hw/hns/hns_roce_mr.c 					 &(mr->pbl_dma_addr),
mr                363 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_buf)
mr                366 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_size = npages;
mr                367 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_ba = mr->pbl_dma_addr;
mr                368 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_hop_num = 1;
mr                369 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_ba_pg_sz = hr_dev->caps.pbl_ba_pg_sz;
mr                370 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_buf_pg_sz = hr_dev->caps.pbl_buf_pg_sz;
mr                377 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr, u32 pbl_bt_sz)
mr                396 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l1[i] = dma_alloc_coherent(dev, size,
mr                397 drivers/infiniband/hw/hns/hns_roce_mr.c 					    &(mr->pbl_l1_dma_addr[i]),
mr                399 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (!mr->pbl_bt_l1[i]) {
mr                400 drivers/infiniband/hw/hns/hns_roce_mr.c 			hns_roce_loop_free(hr_dev, mr, 1, i, 0);
mr                404 drivers/infiniband/hw/hns/hns_roce_mr.c 		*(mr->pbl_bt_l0 + i) = mr->pbl_l1_dma_addr[i];
mr                411 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->l0_chunk_last_num = i + 1;
mr                417 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr, u32 pbl_bt_sz)
mr                431 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l2_dma_addr = kcalloc(pbl_last_bt_num,
mr                432 drivers/infiniband/hw/hns/hns_roce_mr.c 				      sizeof(*mr->pbl_l2_dma_addr),
mr                434 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_l2_dma_addr)
mr                437 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l2 = kcalloc(pbl_last_bt_num,
mr                438 drivers/infiniband/hw/hns/hns_roce_mr.c 				sizeof(*mr->pbl_bt_l2),
mr                440 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_bt_l2)
mr                445 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l1[i] = dma_alloc_coherent(dev, pbl_bt_sz,
mr                446 drivers/infiniband/hw/hns/hns_roce_mr.c 					    &(mr->pbl_l1_dma_addr[i]),
mr                448 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (!mr->pbl_bt_l1[i]) {
mr                449 drivers/infiniband/hw/hns/hns_roce_mr.c 			hns_roce_loop_free(hr_dev, mr, 1, i, 0);
mr                453 drivers/infiniband/hw/hns/hns_roce_mr.c 		*(mr->pbl_bt_l0 + i) = mr->pbl_l1_dma_addr[i];
mr                465 drivers/infiniband/hw/hns/hns_roce_mr.c 			mr->pbl_bt_l2[bt_idx] = dma_alloc_coherent(
mr                467 drivers/infiniband/hw/hns/hns_roce_mr.c 				      &(mr->pbl_l2_dma_addr[bt_idx]),
mr                469 drivers/infiniband/hw/hns/hns_roce_mr.c 			if (!mr->pbl_bt_l2[bt_idx]) {
mr                470 drivers/infiniband/hw/hns/hns_roce_mr.c 				hns_roce_loop_free(hr_dev, mr, 2, i, j);
mr                474 drivers/infiniband/hw/hns/hns_roce_mr.c 			*(mr->pbl_bt_l1[i] + j) =
mr                475 drivers/infiniband/hw/hns/hns_roce_mr.c 					mr->pbl_l2_dma_addr[bt_idx];
mr                488 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->l0_chunk_last_num = i + 1;
mr                489 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->l1_chunk_last_num = j + 1;
mr                495 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_bt_l2);
mr                496 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l2 = NULL;
mr                499 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_l2_dma_addr);
mr                500 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l2_dma_addr = NULL;
mr                508 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr)
mr                514 drivers/infiniband/hw/hns/hns_roce_mr.c 	mhop_num = (mr->type == MR_TYPE_FRMR ? 1 : hr_dev->caps.pbl_hop_num);
mr                521 drivers/infiniband/hw/hns/hns_roce_mr.c 		return pbl_1hop_alloc(hr_dev, npages, mr, pbl_bt_sz);
mr                523 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l1_dma_addr = kcalloc(pbl_bt_sz / 8,
mr                524 drivers/infiniband/hw/hns/hns_roce_mr.c 				      sizeof(*mr->pbl_l1_dma_addr),
mr                526 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_l1_dma_addr)
mr                529 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l1 = kcalloc(pbl_bt_sz / 8, sizeof(*mr->pbl_bt_l1),
mr                531 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_bt_l1)
mr                535 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l0 = dma_alloc_coherent(dev, pbl_bt_sz,
mr                536 drivers/infiniband/hw/hns/hns_roce_mr.c 					   &(mr->pbl_l0_dma_addr),
mr                538 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->pbl_bt_l0)
mr                542 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (pbl_2hop_alloc(hr_dev, npages, mr, pbl_bt_sz))
mr                547 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (pbl_3hop_alloc(hr_dev, npages, mr, pbl_bt_sz))
mr                552 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_size = npages;
mr                553 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_ba = mr->pbl_l0_dma_addr;
mr                554 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_hop_num = hr_dev->caps.pbl_hop_num;
mr                555 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_ba_pg_sz = hr_dev->caps.pbl_ba_pg_sz;
mr                556 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_buf_pg_sz = hr_dev->caps.pbl_buf_pg_sz;
mr                561 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_bt_l1);
mr                562 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l1 = NULL;
mr                565 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_l1_dma_addr);
mr                566 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l1_dma_addr = NULL;
mr                573 drivers/infiniband/hw/hns/hns_roce_mr.c 			     struct hns_roce_mr *mr)
mr                584 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->iova = iova;			/* MR va starting addr */
mr                585 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->size = size;			/* MR addr range */
mr                586 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pd = pd;				/* MR num */
mr                587 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->access = access;			/* MR access permit */
mr                588 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->enabled = 0;			/* MR active status */
mr                589 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->key = hw_index_to_key(index);	/* MR key */
mr                592 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_buf = NULL;
mr                593 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_dma_addr = 0;
mr                595 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l2 = NULL;
mr                596 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l1 = NULL;
mr                597 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l0 = NULL;
mr                598 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_l2_dma_addr = NULL;
mr                599 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_l1_dma_addr = NULL;
mr                600 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_l0_dma_addr = 0;
mr                603 drivers/infiniband/hw/hns/hns_roce_mr.c 			mr->pbl_buf = dma_alloc_coherent(dev,
mr                605 drivers/infiniband/hw/hns/hns_roce_mr.c 							 &(mr->pbl_dma_addr),
mr                607 drivers/infiniband/hw/hns/hns_roce_mr.c 			if (!mr->pbl_buf)
mr                610 drivers/infiniband/hw/hns/hns_roce_mr.c 			ret = hns_roce_mhop_alloc(hr_dev, npages, mr);
mr                618 drivers/infiniband/hw/hns/hns_roce_mr.c 			       struct hns_roce_mr *mr)
mr                628 drivers/infiniband/hw/hns/hns_roce_mr.c 	npages = mr->pbl_size;
mr                630 drivers/infiniband/hw/hns/hns_roce_mr.c 	mhop_num = (mr->type == MR_TYPE_FRMR) ? 1 : hr_dev->caps.pbl_hop_num;
mr                637 drivers/infiniband/hw/hns/hns_roce_mr.c 				  mr->pbl_buf, mr->pbl_dma_addr);
mr                641 drivers/infiniband/hw/hns/hns_roce_mr.c 	dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l0,
mr                642 drivers/infiniband/hw/hns/hns_roce_mr.c 			  mr->pbl_l0_dma_addr);
mr                645 drivers/infiniband/hw/hns/hns_roce_mr.c 		for (i = 0; i < mr->l0_chunk_last_num; i++) {
mr                646 drivers/infiniband/hw/hns/hns_roce_mr.c 			if (i == mr->l0_chunk_last_num - 1) {
mr                652 drivers/infiniband/hw/hns/hns_roce_mr.c 				       mr->pbl_bt_l1[i],
mr                653 drivers/infiniband/hw/hns/hns_roce_mr.c 				       mr->pbl_l1_dma_addr[i]);
mr                658 drivers/infiniband/hw/hns/hns_roce_mr.c 			dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l1[i],
mr                659 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_l1_dma_addr[i]);
mr                662 drivers/infiniband/hw/hns/hns_roce_mr.c 		for (i = 0; i < mr->l0_chunk_last_num; i++) {
mr                663 drivers/infiniband/hw/hns/hns_roce_mr.c 			dma_free_coherent(dev, pbl_bt_sz, mr->pbl_bt_l1[i],
mr                664 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_l1_dma_addr[i]);
mr                669 drivers/infiniband/hw/hns/hns_roce_mr.c 				if ((i == mr->l0_chunk_last_num - 1)
mr                670 drivers/infiniband/hw/hns/hns_roce_mr.c 				    && j == mr->l1_chunk_last_num - 1) {
mr                677 drivers/infiniband/hw/hns/hns_roce_mr.c 					      mr->pbl_bt_l2[bt_idx],
mr                678 drivers/infiniband/hw/hns/hns_roce_mr.c 					      mr->pbl_l2_dma_addr[bt_idx]);
mr                684 drivers/infiniband/hw/hns/hns_roce_mr.c 						mr->pbl_bt_l2[bt_idx],
mr                685 drivers/infiniband/hw/hns/hns_roce_mr.c 						mr->pbl_l2_dma_addr[bt_idx]);
mr                690 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_bt_l1);
mr                691 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr->pbl_l1_dma_addr);
mr                692 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_bt_l1 = NULL;
mr                693 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_l1_dma_addr = NULL;
mr                695 drivers/infiniband/hw/hns/hns_roce_mr.c 		kfree(mr->pbl_bt_l2);
mr                696 drivers/infiniband/hw/hns/hns_roce_mr.c 		kfree(mr->pbl_l2_dma_addr);
mr                697 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_bt_l2 = NULL;
mr                698 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_l2_dma_addr = NULL;
mr                703 drivers/infiniband/hw/hns/hns_roce_mr.c 			     struct hns_roce_mr *mr)
mr                709 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr->enabled) {
mr                710 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hns_roce_hw2sw_mpt(hr_dev, NULL, key_to_hw_index(mr->key)
mr                716 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr->size != ~0ULL) {
mr                717 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (mr->type == MR_TYPE_MR)
mr                718 drivers/infiniband/hw/hns/hns_roce_mr.c 			npages = ib_umem_page_count(mr->umem);
mr                723 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_buf, mr->pbl_dma_addr);
mr                725 drivers/infiniband/hw/hns/hns_roce_mr.c 			hns_roce_mhop_free(hr_dev, mr);
mr                728 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr->enabled)
mr                730 drivers/infiniband/hw/hns/hns_roce_mr.c 				   key_to_hw_index(mr->key));
mr                733 drivers/infiniband/hw/hns/hns_roce_mr.c 			     key_to_hw_index(mr->key), BITMAP_NO_RR);
mr                737 drivers/infiniband/hw/hns/hns_roce_mr.c 			      struct hns_roce_mr *mr)
mr                740 drivers/infiniband/hw/hns/hns_roce_mr.c 	unsigned long mtpt_idx = key_to_hw_index(mr->key);
mr                757 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr->type != MR_TYPE_FRMR)
mr                758 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hr_dev->hw->write_mtpt(mailbox->buf, mr, mtpt_idx);
mr                760 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hr_dev->hw->frmr_write_mtpt(mailbox->buf, mr);
mr                773 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->enabled = 1;
mr                987 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr;
mr                990 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr = kmalloc(sizeof(*mr), GFP_KERNEL);
mr                991 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr == NULL)
mr                994 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->type = MR_TYPE_DMA;
mr                998 drivers/infiniband/hw/hns/hns_roce_mr.c 				~0ULL, acc, 0, mr);
mr               1002 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_mr_enable(to_hr_dev(pd->device), mr);
mr               1006 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->key;
mr               1007 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->umem = NULL;
mr               1009 drivers/infiniband/hw/hns/hns_roce_mr.c 	return &mr->ibmr;
mr               1012 drivers/infiniband/hw/hns/hns_roce_mr.c 	hns_roce_mr_free(to_hr_dev(pd->device), mr);
mr               1015 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr);
mr               1091 drivers/infiniband/hw/hns/hns_roce_mr.c 				     struct hns_roce_mr *mr,
mr               1107 drivers/infiniband/hw/hns/hns_roce_mr.c 			mr->pbl_buf[i++] = page_addr >> 12;
mr               1109 drivers/infiniband/hw/hns/hns_roce_mr.c 			mr->pbl_buf[i++] = page_addr;
mr               1112 drivers/infiniband/hw/hns/hns_roce_mr.c 				mr->pbl_bt_l1[i][j] = page_addr;
mr               1114 drivers/infiniband/hw/hns/hns_roce_mr.c 				mr->pbl_bt_l2[i][j] = page_addr;
mr               1136 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr;
mr               1142 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr = kmalloc(sizeof(*mr), GFP_KERNEL);
mr               1143 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr)
mr               1146 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->umem = ib_umem_get(udata, start, length, access_flags, 0);
mr               1147 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (IS_ERR(mr->umem)) {
mr               1148 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = PTR_ERR(mr->umem);
mr               1152 drivers/infiniband/hw/hns/hns_roce_mr.c 	n = ib_umem_page_count(mr->umem);
mr               1178 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->type = MR_TYPE_MR;
mr               1181 drivers/infiniband/hw/hns/hns_roce_mr.c 				access_flags, n, mr);
mr               1185 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_ib_umem_write_mr(hr_dev, mr, mr->umem);
mr               1189 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_mr_enable(hr_dev, mr);
mr               1193 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->key;
mr               1195 drivers/infiniband/hw/hns/hns_roce_mr.c 	return &mr->ibmr;
mr               1198 drivers/infiniband/hw/hns/hns_roce_mr.c 	hns_roce_mr_free(hr_dev, mr);
mr               1201 drivers/infiniband/hw/hns/hns_roce_mr.c 	ib_umem_release(mr->umem);
mr               1204 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr);
mr               1215 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr = to_hr_mr(ibmr);
mr               1220 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (mr->size != ~0ULL) {
mr               1221 drivers/infiniband/hw/hns/hns_roce_mr.c 		npages = ib_umem_page_count(mr->umem);
mr               1224 drivers/infiniband/hw/hns/hns_roce_mr.c 			hns_roce_mhop_free(hr_dev, mr);
mr               1227 drivers/infiniband/hw/hns/hns_roce_mr.c 					  mr->pbl_buf, mr->pbl_dma_addr);
mr               1229 drivers/infiniband/hw/hns/hns_roce_mr.c 	ib_umem_release(mr->umem);
mr               1231 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->umem = ib_umem_get(udata, start, length, mr_access_flags, 0);
mr               1232 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (IS_ERR(mr->umem)) {
mr               1233 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = PTR_ERR(mr->umem);
mr               1234 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->umem = NULL;
mr               1237 drivers/infiniband/hw/hns/hns_roce_mr.c 	npages = ib_umem_page_count(mr->umem);
mr               1240 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hns_roce_mhop_alloc(hr_dev, npages, mr);
mr               1244 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->pbl_buf = dma_alloc_coherent(dev, npages * 8,
mr               1245 drivers/infiniband/hw/hns/hns_roce_mr.c 						 &(mr->pbl_dma_addr),
mr               1247 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (!mr->pbl_buf) {
mr               1253 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hr_dev->hw->rereg_write_mtpt(hr_dev, mr, flags, pdn,
mr               1260 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_ib_umem_write_mr(hr_dev, mr, mr->umem);
mr               1262 drivers/infiniband/hw/hns/hns_roce_mr.c 		if (mr->size != ~0ULL) {
mr               1263 drivers/infiniband/hw/hns/hns_roce_mr.c 			npages = ib_umem_page_count(mr->umem);
mr               1266 drivers/infiniband/hw/hns/hns_roce_mr.c 				hns_roce_mhop_free(hr_dev, mr);
mr               1269 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_buf,
mr               1270 drivers/infiniband/hw/hns/hns_roce_mr.c 						  mr->pbl_dma_addr);
mr               1279 drivers/infiniband/hw/hns/hns_roce_mr.c 	ib_umem_release(mr->umem);
mr               1290 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr = to_hr_mr(ibmr);
mr               1297 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr->enabled)
mr               1304 drivers/infiniband/hw/hns/hns_roce_mr.c 	mtpt_idx = key_to_hw_index(mr->key) & (hr_dev->caps.num_mtpts - 1);
mr               1315 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->enabled = 0;
mr               1328 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hr_dev->hw->rereg_write_mtpt(hr_dev, mr, flags, pdn,
mr               1338 drivers/infiniband/hw/hns/hns_roce_mr.c 		ib_umem_release(mr->umem);
mr               1342 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->enabled = 1;
mr               1344 drivers/infiniband/hw/hns/hns_roce_mr.c 		mr->access = mr_access_flags;
mr               1359 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr = to_hr_mr(ibmr);
mr               1363 drivers/infiniband/hw/hns/hns_roce_mr.c 		ret = hr_dev->hw->dereg_mr(hr_dev, mr, udata);
mr               1365 drivers/infiniband/hw/hns/hns_roce_mr.c 		hns_roce_mr_free(hr_dev, mr);
mr               1367 drivers/infiniband/hw/hns/hns_roce_mr.c 		ib_umem_release(mr->umem);
mr               1368 drivers/infiniband/hw/hns/hns_roce_mr.c 		kfree(mr);
mr               1379 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr;
mr               1396 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1397 drivers/infiniband/hw/hns/hns_roce_mr.c 	if (!mr)
mr               1400 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->type = MR_TYPE_FRMR;
mr               1404 drivers/infiniband/hw/hns/hns_roce_mr.c 				0, max_num_sg, mr);
mr               1408 drivers/infiniband/hw/hns/hns_roce_mr.c 	ret = hns_roce_mr_enable(hr_dev, mr);
mr               1412 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->key;
mr               1413 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->umem = NULL;
mr               1415 drivers/infiniband/hw/hns/hns_roce_mr.c 	return &mr->ibmr;
mr               1418 drivers/infiniband/hw/hns/hns_roce_mr.c 	hns_roce_mr_free(to_hr_dev(pd->device), mr);
mr               1421 drivers/infiniband/hw/hns/hns_roce_mr.c 	kfree(mr);
mr               1427 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr = to_hr_mr(ibmr);
mr               1429 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->pbl_buf[mr->npages++] = addr;
mr               1437 drivers/infiniband/hw/hns/hns_roce_mr.c 	struct hns_roce_mr *mr = to_hr_mr(ibmr);
mr               1439 drivers/infiniband/hw/hns/hns_roce_mr.c 	mr->npages = 0;
mr               3159 drivers/infiniband/hw/i40iw/i40iw_ctrl.c 		 LS_64(info->mr, I40IW_CQPSQ_STAG_MR) |
mr                828 drivers/infiniband/hw/i40iw/i40iw_type.h 	bool mr;
mr               2028 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	info->mr = true;
mr               2234 drivers/infiniband/hw/i40iw/i40iw_verbs.c 			struct i40iw_mr *iwmr = to_iwmr(reg_wr(ib_wr)->mr);
mr               2245 drivers/infiniband/hw/i40iw/i40iw_verbs.c 			info.page_size = reg_wr(ib_wr)->mr->page_size;
mr                736 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_dereg_mr(struct ib_mr *mr, struct ib_udata *udata);
mr                894 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_rereg_user_mr(struct ib_mr *mr, int flags,
mr                 60 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr;
mr                 63 drivers/infiniband/hw/mlx4/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                 64 drivers/infiniband/hw/mlx4/mr.c 	if (!mr)
mr                 68 drivers/infiniband/hw/mlx4/mr.c 			    ~0ull, convert_access(acc), 0, 0, &mr->mmr);
mr                 72 drivers/infiniband/hw/mlx4/mr.c 	err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr);
mr                 76 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key;
mr                 77 drivers/infiniband/hw/mlx4/mr.c 	mr->umem = NULL;
mr                 79 drivers/infiniband/hw/mlx4/mr.c 	return &mr->ibmr;
mr                 82 drivers/infiniband/hw/mlx4/mr.c 	(void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr);
mr                 85 drivers/infiniband/hw/mlx4/mr.c 	kfree(mr);
mr                409 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr;
mr                414 drivers/infiniband/hw/mlx4/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                415 drivers/infiniband/hw/mlx4/mr.c 	if (!mr)
mr                418 drivers/infiniband/hw/mlx4/mr.c 	mr->umem = mlx4_get_umem_mr(udata, start, length, access_flags);
mr                419 drivers/infiniband/hw/mlx4/mr.c 	if (IS_ERR(mr->umem)) {
mr                420 drivers/infiniband/hw/mlx4/mr.c 		err = PTR_ERR(mr->umem);
mr                424 drivers/infiniband/hw/mlx4/mr.c 	n = ib_umem_page_count(mr->umem);
mr                425 drivers/infiniband/hw/mlx4/mr.c 	shift = mlx4_ib_umem_calc_optimal_mtt_size(mr->umem, start, &n);
mr                428 drivers/infiniband/hw/mlx4/mr.c 			    convert_access(access_flags), n, shift, &mr->mmr);
mr                432 drivers/infiniband/hw/mlx4/mr.c 	err = mlx4_ib_umem_write_mtt(dev, &mr->mmr.mtt, mr->umem);
mr                436 drivers/infiniband/hw/mlx4/mr.c 	err = mlx4_mr_enable(dev->dev, &mr->mmr);
mr                440 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key;
mr                441 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.length = length;
mr                442 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.iova = virt_addr;
mr                443 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.page_size = 1U << shift;
mr                445 drivers/infiniband/hw/mlx4/mr.c 	return &mr->ibmr;
mr                448 drivers/infiniband/hw/mlx4/mr.c 	(void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr);
mr                451 drivers/infiniband/hw/mlx4/mr.c 	ib_umem_release(mr->umem);
mr                454 drivers/infiniband/hw/mlx4/mr.c 	kfree(mr);
mr                459 drivers/infiniband/hw/mlx4/mr.c int mlx4_ib_rereg_user_mr(struct ib_mr *mr, int flags,
mr                464 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_dev *dev = to_mdev(mr->device);
mr                465 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mmr = to_mmr(mr);
mr                551 drivers/infiniband/hw/mlx4/mr.c 		      struct mlx4_ib_mr *mr,
mr                561 drivers/infiniband/hw/mlx4/mr.c 	mr->page_map_size = roundup(max_pages * sizeof(u64),
mr                565 drivers/infiniband/hw/mlx4/mr.c 	mr->pages = (__be64 *)get_zeroed_page(GFP_KERNEL);
mr                566 drivers/infiniband/hw/mlx4/mr.c 	if (!mr->pages)
mr                569 drivers/infiniband/hw/mlx4/mr.c 	mr->page_map = dma_map_single(device->dev.parent, mr->pages,
mr                570 drivers/infiniband/hw/mlx4/mr.c 				      mr->page_map_size, DMA_TO_DEVICE);
mr                572 drivers/infiniband/hw/mlx4/mr.c 	if (dma_mapping_error(device->dev.parent, mr->page_map)) {
mr                580 drivers/infiniband/hw/mlx4/mr.c 	free_page((unsigned long)mr->pages);
mr                585 drivers/infiniband/hw/mlx4/mr.c mlx4_free_priv_pages(struct mlx4_ib_mr *mr)
mr                587 drivers/infiniband/hw/mlx4/mr.c 	if (mr->pages) {
mr                588 drivers/infiniband/hw/mlx4/mr.c 		struct ib_device *device = mr->ibmr.device;
mr                590 drivers/infiniband/hw/mlx4/mr.c 		dma_unmap_single(device->dev.parent, mr->page_map,
mr                591 drivers/infiniband/hw/mlx4/mr.c 				 mr->page_map_size, DMA_TO_DEVICE);
mr                592 drivers/infiniband/hw/mlx4/mr.c 		free_page((unsigned long)mr->pages);
mr                593 drivers/infiniband/hw/mlx4/mr.c 		mr->pages = NULL;
mr                599 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr = to_mmr(ibmr);
mr                602 drivers/infiniband/hw/mlx4/mr.c 	mlx4_free_priv_pages(mr);
mr                604 drivers/infiniband/hw/mlx4/mr.c 	ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr);
mr                607 drivers/infiniband/hw/mlx4/mr.c 	if (mr->umem)
mr                608 drivers/infiniband/hw/mlx4/mr.c 		ib_umem_release(mr->umem);
mr                609 drivers/infiniband/hw/mlx4/mr.c 	kfree(mr);
mr                661 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr;
mr                668 drivers/infiniband/hw/mlx4/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                669 drivers/infiniband/hw/mlx4/mr.c 	if (!mr)
mr                673 drivers/infiniband/hw/mlx4/mr.c 			    max_num_sg, 0, &mr->mmr);
mr                677 drivers/infiniband/hw/mlx4/mr.c 	err = mlx4_alloc_priv_pages(pd->device, mr, max_num_sg);
mr                681 drivers/infiniband/hw/mlx4/mr.c 	mr->max_pages = max_num_sg;
mr                682 drivers/infiniband/hw/mlx4/mr.c 	err = mlx4_mr_enable(dev->dev, &mr->mmr);
mr                686 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key;
mr                687 drivers/infiniband/hw/mlx4/mr.c 	mr->umem = NULL;
mr                689 drivers/infiniband/hw/mlx4/mr.c 	return &mr->ibmr;
mr                692 drivers/infiniband/hw/mlx4/mr.c 	mr->ibmr.device = pd->device;
mr                693 drivers/infiniband/hw/mlx4/mr.c 	mlx4_free_priv_pages(mr);
mr                695 drivers/infiniband/hw/mlx4/mr.c 	(void) mlx4_mr_free(dev->dev, &mr->mmr);
mr                697 drivers/infiniband/hw/mlx4/mr.c 	kfree(mr);
mr                722 drivers/infiniband/hw/mlx4/mr.c 	fmr->ibfmr.rkey = fmr->ibfmr.lkey = fmr->mfmr.mr.key;
mr                727 drivers/infiniband/hw/mlx4/mr.c 	(void) mlx4_mr_free(to_mdev(pd->device)->dev, &fmr->mfmr.mr);
mr                796 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr = to_mmr(ibmr);
mr                798 drivers/infiniband/hw/mlx4/mr.c 	if (unlikely(mr->npages == mr->max_pages))
mr                801 drivers/infiniband/hw/mlx4/mr.c 	mr->pages[mr->npages++] = cpu_to_be64(addr | MLX4_MTT_FLAG_PRESENT);
mr                809 drivers/infiniband/hw/mlx4/mr.c 	struct mlx4_ib_mr *mr = to_mmr(ibmr);
mr                812 drivers/infiniband/hw/mlx4/mr.c 	mr->npages = 0;
mr                814 drivers/infiniband/hw/mlx4/mr.c 	ib_dma_sync_single_for_cpu(ibmr->device, mr->page_map,
mr                815 drivers/infiniband/hw/mlx4/mr.c 				   mr->page_map_size, DMA_TO_DEVICE);
mr                819 drivers/infiniband/hw/mlx4/mr.c 	ib_dma_sync_single_for_device(ibmr->device, mr->page_map,
mr                820 drivers/infiniband/hw/mlx4/mr.c 				      mr->page_map_size, DMA_TO_DEVICE);
mr               3321 drivers/infiniband/hw/mlx4/qp.c 	struct mlx4_ib_mr *mr = to_mmr(wr->mr);
mr               3325 drivers/infiniband/hw/mlx4/qp.c 	fseg->buf_list		= cpu_to_be64(mr->page_map);
mr               3326 drivers/infiniband/hw/mlx4/qp.c 	fseg->start_addr	= cpu_to_be64(mr->ibmr.iova);
mr               3327 drivers/infiniband/hw/mlx4/qp.c 	fseg->reg_len		= cpu_to_be64(mr->ibmr.length);
mr               3329 drivers/infiniband/hw/mlx4/qp.c 	fseg->page_size		= cpu_to_be32(ilog2(mr->ibmr.page_size));
mr                450 drivers/infiniband/hw/mlx5/cq.c 	struct mlx5_ib_mr *mr;
mr                551 drivers/infiniband/hw/mlx5/cq.c 		mr = to_mibmr(mmkey);
mr                552 drivers/infiniband/hw/mlx5/cq.c 		get_sig_err_item(sig_err_cqe, &mr->sig->err_item);
mr                553 drivers/infiniband/hw/mlx5/cq.c 		mr->sig->sig_err_exists = true;
mr                554 drivers/infiniband/hw/mlx5/cq.c 		mr->sig->sigerr_count++;
mr                557 drivers/infiniband/hw/mlx5/cq.c 			     cq->mcq.cqn, mr->sig->err_item.key,
mr                558 drivers/infiniband/hw/mlx5/cq.c 			     mr->sig->err_item.err_type,
mr                559 drivers/infiniband/hw/mlx5/cq.c 			     mr->sig->err_item.sig_err_offset,
mr                560 drivers/infiniband/hw/mlx5/cq.c 			     mr->sig->err_item.expected,
mr                561 drivers/infiniband/hw/mlx5/cq.c 			     mr->sig->err_item.actual);
mr                628 drivers/infiniband/hw/mlx5/mlx5_ib.h static inline bool is_odp_mr(struct mlx5_ib_mr *mr)
mr                630 drivers/infiniband/hw/mlx5/mlx5_ib.h 	return IS_ENABLED(CONFIG_INFINIBAND_ON_DEMAND_PAGING) && mr->umem &&
mr                631 drivers/infiniband/hw/mlx5/mlx5_ib.h 	       mr->umem->is_odp;
mr               1160 drivers/infiniband/hw/mlx5/mlx5_ib.h int mlx5_ib_update_xlt(struct mlx5_ib_mr *mr, u64 idx, int npages,
mr               1165 drivers/infiniband/hw/mlx5/mlx5_ib.h void mlx5_ib_free_implicit_mr(struct mlx5_ib_mr *mr);
mr               1226 drivers/infiniband/hw/mlx5/mlx5_ib.h void mlx5_mr_cache_free(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
mr               1259 drivers/infiniband/hw/mlx5/mlx5_ib.h 			   size_t nentries, struct mlx5_ib_mr *mr, int flags);
mr               1276 drivers/infiniband/hw/mlx5/mlx5_ib.h 					 size_t nentries, struct mlx5_ib_mr *mr,
mr                 50 drivers/infiniband/hw/mlx5/mr.c static void clean_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
mr                 51 drivers/infiniband/hw/mlx5/mr.c static void dereg_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
mr                 53 drivers/infiniband/hw/mlx5/mr.c static int unreg_umr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
mr                 60 drivers/infiniband/hw/mlx5/mr.c static int destroy_mkey(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr)
mr                 62 drivers/infiniband/hw/mlx5/mr.c 	int err = mlx5_core_destroy_mkey(dev->mdev, &mr->mmkey);
mr                 81 drivers/infiniband/hw/mlx5/mr.c static bool use_umr_mtt_update(struct mlx5_ib_mr *mr, u64 start, u64 length)
mr                 83 drivers/infiniband/hw/mlx5/mr.c 	return ((u64)1 << mr->order) * MLX5_ADAPTER_PAGE_SIZE >=
mr                 89 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr =
mr                 91 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_dev *dev = mr->dev;
mr                 93 drivers/infiniband/hw/mlx5/mr.c 	int c = order2idx(dev, mr->order);
mr                105 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr);
mr                111 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.type = MLX5_MKEY_MR;
mr                115 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.key = mlx5_idx_to_mkey(MLX5_GET(create_mkey_out, mr->out, mkey_index)) | key;
mr                120 drivers/infiniband/hw/mlx5/mr.c 	list_add_tail(&mr->list, &ent->head);
mr                126 drivers/infiniband/hw/mlx5/mr.c 	err = xa_err(__xa_store(mkeys, mlx5_base_mkey(mr->mmkey.key),
mr                127 drivers/infiniband/hw/mlx5/mr.c 				&mr->mmkey, GFP_ATOMIC));
mr                141 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                158 drivers/infiniband/hw/mlx5/mr.c 		mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                159 drivers/infiniband/hw/mlx5/mr.c 		if (!mr) {
mr                163 drivers/infiniband/hw/mlx5/mr.c 		mr->order = ent->order;
mr                164 drivers/infiniband/hw/mlx5/mr.c 		mr->allocated_from_cache = 1;
mr                165 drivers/infiniband/hw/mlx5/mr.c 		mr->dev = dev;
mr                180 drivers/infiniband/hw/mlx5/mr.c 		err = mlx5_core_create_mkey_cb(dev->mdev, &mr->mmkey,
mr                182 drivers/infiniband/hw/mlx5/mr.c 					       mr->out, sizeof(mr->out),
mr                183 drivers/infiniband/hw/mlx5/mr.c 					       reg_mr_callback, &mr->cb_work);
mr                189 drivers/infiniband/hw/mlx5/mr.c 			kfree(mr);
mr                203 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                213 drivers/infiniband/hw/mlx5/mr.c 		mr = list_first_entry(&ent->head, struct mlx5_ib_mr, list);
mr                214 drivers/infiniband/hw/mlx5/mr.c 		list_move(&mr->list, &del_list);
mr                218 drivers/infiniband/hw/mlx5/mr.c 		mlx5_core_destroy_mkey(dev->mdev, &mr->mmkey);
mr                224 drivers/infiniband/hw/mlx5/mr.c 	list_for_each_entry_safe(mr, tmp_mr, &del_list, list) {
mr                225 drivers/infiniband/hw/mlx5/mr.c 		list_del(&mr->list);
mr                226 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr);
mr                426 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                446 drivers/infiniband/hw/mlx5/mr.c 			mr = list_first_entry(&ent->head, struct mlx5_ib_mr,
mr                448 drivers/infiniband/hw/mlx5/mr.c 			list_del(&mr->list);
mr                453 drivers/infiniband/hw/mlx5/mr.c 			return mr;
mr                461 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = NULL;
mr                481 drivers/infiniband/hw/mlx5/mr.c 			mr = list_first_entry(&ent->head, struct mlx5_ib_mr,
mr                483 drivers/infiniband/hw/mlx5/mr.c 			list_del(&mr->list);
mr                495 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr                498 drivers/infiniband/hw/mlx5/mr.c 	return mr;
mr                501 drivers/infiniband/hw/mlx5/mr.c void mlx5_mr_cache_free(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr)
mr                508 drivers/infiniband/hw/mlx5/mr.c 	if (!mr->allocated_from_cache)
mr                511 drivers/infiniband/hw/mlx5/mr.c 	c = order2idx(dev, mr->order);
mr                514 drivers/infiniband/hw/mlx5/mr.c 	if (unreg_umr(dev, mr)) {
mr                515 drivers/infiniband/hw/mlx5/mr.c 		mr->allocated_from_cache = false;
mr                516 drivers/infiniband/hw/mlx5/mr.c 		destroy_mkey(dev, mr);
mr                525 drivers/infiniband/hw/mlx5/mr.c 	list_add_tail(&mr->list, &ent->head);
mr                540 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                550 drivers/infiniband/hw/mlx5/mr.c 		mr = list_first_entry(&ent->head, struct mlx5_ib_mr, list);
mr                551 drivers/infiniband/hw/mlx5/mr.c 		list_move(&mr->list, &del_list);
mr                555 drivers/infiniband/hw/mlx5/mr.c 		mlx5_core_destroy_mkey(dev->mdev, &mr->mmkey);
mr                562 drivers/infiniband/hw/mlx5/mr.c 	list_for_each_entry_safe(mr, tmp_mr, &del_list, list) {
mr                563 drivers/infiniband/hw/mlx5/mr.c 		list_del(&mr->list);
mr                564 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr);
mr                687 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                692 drivers/infiniband/hw/mlx5/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                693 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr                716 drivers/infiniband/hw/mlx5/mr.c 	err = mlx5_core_create_mkey(mdev, &mr->mmkey, in, inlen);
mr                721 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.type = MLX5_MKEY_MR;
mr                722 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.lkey = mr->mmkey.key;
mr                723 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.rkey = mr->mmkey.key;
mr                724 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = NULL;
mr                726 drivers/infiniband/hw/mlx5/mr.c 	return &mr->ibmr;
mr                732 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr);
mr                855 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr                860 drivers/infiniband/hw/mlx5/mr.c 		mr = alloc_cached_mr(dev, order);
mr                861 drivers/infiniband/hw/mlx5/mr.c 		if (mr)
mr                871 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr                874 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.pd = pd;
mr                875 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = umem;
mr                876 drivers/infiniband/hw/mlx5/mr.c 	mr->access_flags = access_flags;
mr                877 drivers/infiniband/hw/mlx5/mr.c 	mr->desc_size = sizeof(struct mlx5_mtt);
mr                878 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.iova = virt_addr;
mr                879 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.size = len;
mr                880 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.pd = to_mpd(pd)->pdn;
mr                882 drivers/infiniband/hw/mlx5/mr.c 	return mr;
mr                885 drivers/infiniband/hw/mlx5/mr.c static inline int populate_xlt(struct mlx5_ib_mr *mr, int idx, int npages,
mr                889 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_dev *dev = mr->dev;
mr                890 drivers/infiniband/hw/mlx5/mr.c 	struct ib_umem *umem = mr->umem;
mr                895 drivers/infiniband/hw/mlx5/mr.c 		mlx5_odp_populate_klm(xlt, idx, npages, mr, flags);
mr                919 drivers/infiniband/hw/mlx5/mr.c int mlx5_ib_update_xlt(struct mlx5_ib_mr *mr, u64 idx, int npages,
mr                922 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_dev *dev = mr->dev;
mr                995 drivers/infiniband/hw/mlx5/mr.c 	wr.pd = mr->ibmr.pd;
mr                996 drivers/infiniband/hw/mlx5/mr.c 	wr.mkey = mr->mmkey.key;
mr                997 drivers/infiniband/hw/mlx5/mr.c 	wr.length = mr->mmkey.size;
mr                998 drivers/infiniband/hw/mlx5/mr.c 	wr.virt_addr = mr->mmkey.iova;
mr                999 drivers/infiniband/hw/mlx5/mr.c 	wr.access_flags = mr->access_flags;
mr               1007 drivers/infiniband/hw/mlx5/mr.c 		npages = populate_xlt(mr, idx, npages, xlt,
mr               1057 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr               1065 drivers/infiniband/hw/mlx5/mr.c 	mr = ibmr ? to_mmr(ibmr) : kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1066 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr               1069 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.pd = pd;
mr               1070 drivers/infiniband/hw/mlx5/mr.c 	mr->access_flags = access_flags;
mr               1112 drivers/infiniband/hw/mlx5/mr.c 	err = mlx5_core_create_mkey(dev->mdev, &mr->mmkey, in, inlen);
mr               1117 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.type = MLX5_MKEY_MR;
mr               1118 drivers/infiniband/hw/mlx5/mr.c 	mr->desc_size = sizeof(struct mlx5_mtt);
mr               1119 drivers/infiniband/hw/mlx5/mr.c 	mr->dev = dev;
mr               1122 drivers/infiniband/hw/mlx5/mr.c 	mlx5_ib_dbg(dev, "mkey = 0x%x\n", mr->mmkey.key);
mr               1124 drivers/infiniband/hw/mlx5/mr.c 	return mr;
mr               1131 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr);
mr               1136 drivers/infiniband/hw/mlx5/mr.c static void set_mr_fields(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr,
mr               1139 drivers/infiniband/hw/mlx5/mr.c 	mr->npages = npages;
mr               1141 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.lkey = mr->mmkey.key;
mr               1142 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.rkey = mr->mmkey.key;
mr               1143 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.length = length;
mr               1144 drivers/infiniband/hw/mlx5/mr.c 	mr->access_flags = access_flags;
mr               1153 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr               1158 drivers/infiniband/hw/mlx5/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1159 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr               1183 drivers/infiniband/hw/mlx5/mr.c 	err = mlx5_core_create_mkey(mdev, &mr->mmkey, in, inlen);
mr               1189 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = NULL;
mr               1190 drivers/infiniband/hw/mlx5/mr.c 	set_mr_fields(dev, mr, 0, length, acc);
mr               1192 drivers/infiniband/hw/mlx5/mr.c 	return &mr->ibmr;
mr               1198 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr);
mr               1255 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = NULL;
mr               1276 drivers/infiniband/hw/mlx5/mr.c 		mr = mlx5_ib_alloc_implicit_mr(to_mpd(pd), udata, access_flags);
mr               1277 drivers/infiniband/hw/mlx5/mr.c 		if (IS_ERR(mr))
mr               1278 drivers/infiniband/hw/mlx5/mr.c 			return ERR_CAST(mr);
mr               1279 drivers/infiniband/hw/mlx5/mr.c 		return &mr->ibmr;
mr               1291 drivers/infiniband/hw/mlx5/mr.c 		mr = alloc_mr_from_cache(pd, umem, virt_addr, length, ncont,
mr               1293 drivers/infiniband/hw/mlx5/mr.c 		if (PTR_ERR(mr) == -EAGAIN) {
mr               1295 drivers/infiniband/hw/mlx5/mr.c 			mr = NULL;
mr               1306 drivers/infiniband/hw/mlx5/mr.c 	if (!mr) {
mr               1308 drivers/infiniband/hw/mlx5/mr.c 		mr = reg_create(NULL, pd, virt_addr, length, umem, ncont,
mr               1313 drivers/infiniband/hw/mlx5/mr.c 	if (IS_ERR(mr)) {
mr               1314 drivers/infiniband/hw/mlx5/mr.c 		err = PTR_ERR(mr);
mr               1318 drivers/infiniband/hw/mlx5/mr.c 	mlx5_ib_dbg(dev, "mkey 0x%x\n", mr->mmkey.key);
mr               1320 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = umem;
mr               1321 drivers/infiniband/hw/mlx5/mr.c 	set_mr_fields(dev, mr, npages, length, access_flags);
mr               1329 drivers/infiniband/hw/mlx5/mr.c 		err = mlx5_ib_update_xlt(mr, 0, ncont, page_shift,
mr               1333 drivers/infiniband/hw/mlx5/mr.c 			dereg_mr(dev, mr);
mr               1338 drivers/infiniband/hw/mlx5/mr.c 	if (is_odp_mr(mr)) {
mr               1339 drivers/infiniband/hw/mlx5/mr.c 		to_ib_umem_odp(mr->umem)->private = mr;
mr               1340 drivers/infiniband/hw/mlx5/mr.c 		atomic_set(&mr->num_pending_prefetch, 0);
mr               1343 drivers/infiniband/hw/mlx5/mr.c 		smp_store_release(&mr->live, 1);
mr               1345 drivers/infiniband/hw/mlx5/mr.c 	return &mr->ibmr;
mr               1351 drivers/infiniband/hw/mlx5/mr.c static int unreg_umr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr)
mr               1363 drivers/infiniband/hw/mlx5/mr.c 	umrwr.mkey = mr->mmkey.key;
mr               1369 drivers/infiniband/hw/mlx5/mr.c static int rereg_umr(struct ib_pd *pd, struct mlx5_ib_mr *mr,
mr               1379 drivers/infiniband/hw/mlx5/mr.c 	umrwr.mkey = mr->mmkey.key;
mr               1397 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ib_mr);
mr               1401 drivers/infiniband/hw/mlx5/mr.c 			    mr->access_flags;
mr               1413 drivers/infiniband/hw/mlx5/mr.c 	atomic_sub(mr->npages, &dev->mdev->priv.reg_pages);
mr               1415 drivers/infiniband/hw/mlx5/mr.c 	if (!mr->umem)
mr               1418 drivers/infiniband/hw/mlx5/mr.c 	if (is_odp_mr(mr))
mr               1425 drivers/infiniband/hw/mlx5/mr.c 		addr = mr->umem->address;
mr               1426 drivers/infiniband/hw/mlx5/mr.c 		len = mr->umem->length;
mr               1435 drivers/infiniband/hw/mlx5/mr.c 		ib_umem_release(mr->umem);
mr               1436 drivers/infiniband/hw/mlx5/mr.c 		mr->umem = NULL;
mr               1438 drivers/infiniband/hw/mlx5/mr.c 				  &mr->umem, &npages, &page_shift, &ncont,
mr               1445 drivers/infiniband/hw/mlx5/mr.c 	    (flags & IB_MR_REREG_TRANS && !use_umr_mtt_update(mr, addr, len))) {
mr               1449 drivers/infiniband/hw/mlx5/mr.c 		if (mr->allocated_from_cache)
mr               1450 drivers/infiniband/hw/mlx5/mr.c 			err = unreg_umr(dev, mr);
mr               1452 drivers/infiniband/hw/mlx5/mr.c 			err = destroy_mkey(dev, mr);
mr               1456 drivers/infiniband/hw/mlx5/mr.c 		mr = reg_create(ib_mr, pd, addr, len, mr->umem, ncont,
mr               1459 drivers/infiniband/hw/mlx5/mr.c 		if (IS_ERR(mr)) {
mr               1460 drivers/infiniband/hw/mlx5/mr.c 			err = PTR_ERR(mr);
mr               1461 drivers/infiniband/hw/mlx5/mr.c 			mr = to_mmr(ib_mr);
mr               1465 drivers/infiniband/hw/mlx5/mr.c 		mr->allocated_from_cache = 0;
mr               1470 drivers/infiniband/hw/mlx5/mr.c 		mr->ibmr.pd = pd;
mr               1471 drivers/infiniband/hw/mlx5/mr.c 		mr->access_flags = access_flags;
mr               1472 drivers/infiniband/hw/mlx5/mr.c 		mr->mmkey.iova = addr;
mr               1473 drivers/infiniband/hw/mlx5/mr.c 		mr->mmkey.size = len;
mr               1474 drivers/infiniband/hw/mlx5/mr.c 		mr->mmkey.pd = to_mpd(pd)->pdn;
mr               1482 drivers/infiniband/hw/mlx5/mr.c 			err = mlx5_ib_update_xlt(mr, 0, npages, page_shift,
mr               1485 drivers/infiniband/hw/mlx5/mr.c 			err = rereg_umr(pd, mr, access_flags, flags);
mr               1492 drivers/infiniband/hw/mlx5/mr.c 	set_mr_fields(dev, mr, npages, len, access_flags);
mr               1497 drivers/infiniband/hw/mlx5/mr.c 	ib_umem_release(mr->umem);
mr               1498 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = NULL;
mr               1500 drivers/infiniband/hw/mlx5/mr.c 	clean_mr(dev, mr);
mr               1506 drivers/infiniband/hw/mlx5/mr.c 		      struct mlx5_ib_mr *mr,
mr               1516 drivers/infiniband/hw/mlx5/mr.c 	mr->descs_alloc = kzalloc(size + add_size, GFP_KERNEL);
mr               1517 drivers/infiniband/hw/mlx5/mr.c 	if (!mr->descs_alloc)
mr               1520 drivers/infiniband/hw/mlx5/mr.c 	mr->descs = PTR_ALIGN(mr->descs_alloc, MLX5_UMR_ALIGN);
mr               1522 drivers/infiniband/hw/mlx5/mr.c 	mr->desc_map = dma_map_single(device->dev.parent, mr->descs,
mr               1524 drivers/infiniband/hw/mlx5/mr.c 	if (dma_mapping_error(device->dev.parent, mr->desc_map)) {
mr               1531 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr->descs_alloc);
mr               1537 drivers/infiniband/hw/mlx5/mr.c mlx5_free_priv_descs(struct mlx5_ib_mr *mr)
mr               1539 drivers/infiniband/hw/mlx5/mr.c 	if (mr->descs) {
mr               1540 drivers/infiniband/hw/mlx5/mr.c 		struct ib_device *device = mr->ibmr.device;
mr               1541 drivers/infiniband/hw/mlx5/mr.c 		int size = mr->max_descs * mr->desc_size;
mr               1543 drivers/infiniband/hw/mlx5/mr.c 		dma_unmap_single(device->dev.parent, mr->desc_map,
mr               1545 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr->descs_alloc);
mr               1546 drivers/infiniband/hw/mlx5/mr.c 		mr->descs = NULL;
mr               1550 drivers/infiniband/hw/mlx5/mr.c static void clean_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr)
mr               1552 drivers/infiniband/hw/mlx5/mr.c 	int allocated_from_cache = mr->allocated_from_cache;
mr               1554 drivers/infiniband/hw/mlx5/mr.c 	if (mr->sig) {
mr               1556 drivers/infiniband/hw/mlx5/mr.c 					  mr->sig->psv_memory.psv_idx))
mr               1558 drivers/infiniband/hw/mlx5/mr.c 				     mr->sig->psv_memory.psv_idx);
mr               1560 drivers/infiniband/hw/mlx5/mr.c 					  mr->sig->psv_wire.psv_idx))
mr               1562 drivers/infiniband/hw/mlx5/mr.c 				     mr->sig->psv_wire.psv_idx);
mr               1563 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr->sig);
mr               1564 drivers/infiniband/hw/mlx5/mr.c 		mr->sig = NULL;
mr               1568 drivers/infiniband/hw/mlx5/mr.c 		destroy_mkey(dev, mr);
mr               1569 drivers/infiniband/hw/mlx5/mr.c 		mlx5_free_priv_descs(mr);
mr               1573 drivers/infiniband/hw/mlx5/mr.c static void dereg_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr)
mr               1575 drivers/infiniband/hw/mlx5/mr.c 	int npages = mr->npages;
mr               1576 drivers/infiniband/hw/mlx5/mr.c 	struct ib_umem *umem = mr->umem;
mr               1578 drivers/infiniband/hw/mlx5/mr.c 	if (is_odp_mr(mr)) {
mr               1584 drivers/infiniband/hw/mlx5/mr.c 		WRITE_ONCE(mr->live, 0);
mr               1590 drivers/infiniband/hw/mlx5/mr.c 		if (atomic_read(&mr->num_pending_prefetch))
mr               1592 drivers/infiniband/hw/mlx5/mr.c 		WARN_ON(atomic_read(&mr->num_pending_prefetch));
mr               1600 drivers/infiniband/hw/mlx5/mr.c 			mlx5_ib_free_implicit_mr(mr);
mr               1613 drivers/infiniband/hw/mlx5/mr.c 	clean_mr(dev, mr);
mr               1619 drivers/infiniband/hw/mlx5/mr.c 	mlx5_mr_cache_free(dev, mr);
mr               1624 drivers/infiniband/hw/mlx5/mr.c 	if (!mr->allocated_from_cache)
mr               1625 drivers/infiniband/hw/mlx5/mr.c 		kfree(mr);
mr               1659 drivers/infiniband/hw/mlx5/mr.c static int _mlx5_alloc_mkey_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr,
mr               1666 drivers/infiniband/hw/mlx5/mr.c 	mr->access_mode = access_mode;
mr               1667 drivers/infiniband/hw/mlx5/mr.c 	mr->desc_size = desc_size;
mr               1668 drivers/infiniband/hw/mlx5/mr.c 	mr->max_descs = ndescs;
mr               1670 drivers/infiniband/hw/mlx5/mr.c 	err = mlx5_alloc_priv_descs(pd->device, mr, ndescs, desc_size);
mr               1676 drivers/infiniband/hw/mlx5/mr.c 	err = mlx5_core_create_mkey(dev->mdev, &mr->mmkey, in, inlen);
mr               1680 drivers/infiniband/hw/mlx5/mr.c 	mr->mmkey.type = MLX5_MKEY_MR;
mr               1681 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.lkey = mr->mmkey.key;
mr               1682 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.rkey = mr->mmkey.key;
mr               1687 drivers/infiniband/hw/mlx5/mr.c 	mlx5_free_priv_descs(mr);
mr               1698 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr               1702 drivers/infiniband/hw/mlx5/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1703 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr               1706 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.pd = pd;
mr               1707 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.device = pd->device;
mr               1718 drivers/infiniband/hw/mlx5/mr.c 	err = _mlx5_alloc_mkey_descs(pd, mr, ndescs, desc_size, page_shift,
mr               1723 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = NULL;
mr               1726 drivers/infiniband/hw/mlx5/mr.c 	return mr;
mr               1731 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr);
mr               1735 drivers/infiniband/hw/mlx5/mr.c static int mlx5_alloc_mem_reg_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr,
mr               1738 drivers/infiniband/hw/mlx5/mr.c 	return _mlx5_alloc_mkey_descs(pd, mr, ndescs, sizeof(struct mlx5_mtt),
mr               1743 drivers/infiniband/hw/mlx5/mr.c static int mlx5_alloc_sg_gaps_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr,
mr               1746 drivers/infiniband/hw/mlx5/mr.c 	return _mlx5_alloc_mkey_descs(pd, mr, ndescs, sizeof(struct mlx5_klm),
mr               1750 drivers/infiniband/hw/mlx5/mr.c static int mlx5_alloc_integrity_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr,
mr               1759 drivers/infiniband/hw/mlx5/mr.c 	mr->sig = kzalloc(sizeof(*mr->sig), GFP_KERNEL);
mr               1760 drivers/infiniband/hw/mlx5/mr.c 	if (!mr->sig)
mr               1768 drivers/infiniband/hw/mlx5/mr.c 	mr->sig->psv_memory.psv_idx = psv_index[0];
mr               1769 drivers/infiniband/hw/mlx5/mr.c 	mr->sig->psv_wire.psv_idx = psv_index[1];
mr               1771 drivers/infiniband/hw/mlx5/mr.c 	mr->sig->sig_status_checked = true;
mr               1772 drivers/infiniband/hw/mlx5/mr.c 	mr->sig->sig_err_exists = false;
mr               1774 drivers/infiniband/hw/mlx5/mr.c 	++mr->sig->sigerr_count;
mr               1775 drivers/infiniband/hw/mlx5/mr.c 	mr->klm_mr = mlx5_ib_alloc_pi_mr(pd, max_num_sg, max_num_meta_sg,
mr               1778 drivers/infiniband/hw/mlx5/mr.c 	if (IS_ERR(mr->klm_mr)) {
mr               1779 drivers/infiniband/hw/mlx5/mr.c 		err = PTR_ERR(mr->klm_mr);
mr               1782 drivers/infiniband/hw/mlx5/mr.c 	mr->mtt_mr = mlx5_ib_alloc_pi_mr(pd, max_num_sg, max_num_meta_sg,
mr               1785 drivers/infiniband/hw/mlx5/mr.c 	if (IS_ERR(mr->mtt_mr)) {
mr               1786 drivers/infiniband/hw/mlx5/mr.c 		err = PTR_ERR(mr->mtt_mr);
mr               1795 drivers/infiniband/hw/mlx5/mr.c 	err = _mlx5_alloc_mkey_descs(pd, mr, 4, sizeof(struct mlx5_klm), 0,
mr               1803 drivers/infiniband/hw/mlx5/mr.c 	dereg_mr(to_mdev(mr->mtt_mr->ibmr.device), mr->mtt_mr);
mr               1804 drivers/infiniband/hw/mlx5/mr.c 	mr->mtt_mr = NULL;
mr               1806 drivers/infiniband/hw/mlx5/mr.c 	dereg_mr(to_mdev(mr->klm_mr->ibmr.device), mr->klm_mr);
mr               1807 drivers/infiniband/hw/mlx5/mr.c 	mr->klm_mr = NULL;
mr               1809 drivers/infiniband/hw/mlx5/mr.c 	if (mlx5_core_destroy_psv(dev->mdev, mr->sig->psv_memory.psv_idx))
mr               1811 drivers/infiniband/hw/mlx5/mr.c 			     mr->sig->psv_memory.psv_idx);
mr               1812 drivers/infiniband/hw/mlx5/mr.c 	if (mlx5_core_destroy_psv(dev->mdev, mr->sig->psv_wire.psv_idx))
mr               1814 drivers/infiniband/hw/mlx5/mr.c 			     mr->sig->psv_wire.psv_idx);
mr               1816 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr->sig);
mr               1828 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr;
mr               1832 drivers/infiniband/hw/mlx5/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1833 drivers/infiniband/hw/mlx5/mr.c 	if (!mr)
mr               1842 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.device = pd->device;
mr               1843 drivers/infiniband/hw/mlx5/mr.c 	mr->umem = NULL;
mr               1847 drivers/infiniband/hw/mlx5/mr.c 		err = mlx5_alloc_mem_reg_descs(pd, mr, ndescs, in, inlen);
mr               1850 drivers/infiniband/hw/mlx5/mr.c 		err = mlx5_alloc_sg_gaps_descs(pd, mr, ndescs, in, inlen);
mr               1853 drivers/infiniband/hw/mlx5/mr.c 		err = mlx5_alloc_integrity_descs(pd, mr, max_num_sg,
mr               1866 drivers/infiniband/hw/mlx5/mr.c 	return &mr->ibmr;
mr               1871 drivers/infiniband/hw/mlx5/mr.c 	kfree(mr);
mr               2033 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2037 drivers/infiniband/hw/mlx5/mr.c 	mr->meta_length = 0;
mr               2040 drivers/infiniband/hw/mlx5/mr.c 		mr->ndescs = 1;
mr               2043 drivers/infiniband/hw/mlx5/mr.c 		mr->data_length = sg_dma_len(data_sg) - sg_offset;
mr               2044 drivers/infiniband/hw/mlx5/mr.c 		mr->data_iova = sg_dma_address(data_sg) + sg_offset;
mr               2047 drivers/infiniband/hw/mlx5/mr.c 			mr->meta_ndescs = 1;
mr               2052 drivers/infiniband/hw/mlx5/mr.c 			mr->meta_length = sg_dma_len(meta_sg) - sg_offset;
mr               2053 drivers/infiniband/hw/mlx5/mr.c 			mr->pi_iova = sg_dma_address(meta_sg) + sg_offset;
mr               2055 drivers/infiniband/hw/mlx5/mr.c 		ibmr->length = mr->data_length + mr->meta_length;
mr               2062 drivers/infiniband/hw/mlx5/mr.c mlx5_ib_sg_to_klms(struct mlx5_ib_mr *mr,
mr               2071 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_klm *klms = mr->descs;
mr               2073 drivers/infiniband/hw/mlx5/mr.c 	u32 lkey = mr->ibmr.pd->local_dma_lkey;
mr               2076 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.iova = sg_dma_address(sg) + sg_offset;
mr               2077 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.length = 0;
mr               2080 drivers/infiniband/hw/mlx5/mr.c 		if (unlikely(i >= mr->max_descs))
mr               2085 drivers/infiniband/hw/mlx5/mr.c 		mr->ibmr.length += sg_dma_len(sg) - sg_offset;
mr               2093 drivers/infiniband/hw/mlx5/mr.c 	mr->ndescs = i;
mr               2094 drivers/infiniband/hw/mlx5/mr.c 	mr->data_length = mr->ibmr.length;
mr               2100 drivers/infiniband/hw/mlx5/mr.c 			if (unlikely(i + j >= mr->max_descs))
mr               2107 drivers/infiniband/hw/mlx5/mr.c 			mr->ibmr.length += sg_dma_len(sg) - sg_offset;
mr               2114 drivers/infiniband/hw/mlx5/mr.c 		mr->meta_ndescs = j;
mr               2115 drivers/infiniband/hw/mlx5/mr.c 		mr->meta_length = mr->ibmr.length - mr->data_length;
mr               2123 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2126 drivers/infiniband/hw/mlx5/mr.c 	if (unlikely(mr->ndescs == mr->max_descs))
mr               2129 drivers/infiniband/hw/mlx5/mr.c 	descs = mr->descs;
mr               2130 drivers/infiniband/hw/mlx5/mr.c 	descs[mr->ndescs++] = cpu_to_be64(addr | MLX5_EN_RD | MLX5_EN_WR);
mr               2137 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2140 drivers/infiniband/hw/mlx5/mr.c 	if (unlikely(mr->ndescs + mr->meta_ndescs == mr->max_descs))
mr               2143 drivers/infiniband/hw/mlx5/mr.c 	descs = mr->descs;
mr               2144 drivers/infiniband/hw/mlx5/mr.c 	descs[mr->ndescs + mr->meta_ndescs++] =
mr               2156 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2157 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *pi_mr = mr->mtt_mr;
mr               2221 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2222 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *pi_mr = mr->klm_mr;
mr               2254 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2260 drivers/infiniband/hw/mlx5/mr.c 	mr->ndescs = 0;
mr               2261 drivers/infiniband/hw/mlx5/mr.c 	mr->data_length = 0;
mr               2262 drivers/infiniband/hw/mlx5/mr.c 	mr->data_iova = 0;
mr               2263 drivers/infiniband/hw/mlx5/mr.c 	mr->meta_ndescs = 0;
mr               2264 drivers/infiniband/hw/mlx5/mr.c 	mr->pi_iova = 0;
mr               2284 drivers/infiniband/hw/mlx5/mr.c 	pi_mr = mr->mtt_mr;
mr               2291 drivers/infiniband/hw/mlx5/mr.c 	pi_mr = mr->klm_mr;
mr               2301 drivers/infiniband/hw/mlx5/mr.c 	mr->pi_mr = pi_mr;
mr               2305 drivers/infiniband/hw/mlx5/mr.c 		ibmr->sig_attrs->meta_length = mr->meta_length;
mr               2313 drivers/infiniband/hw/mlx5/mr.c 	struct mlx5_ib_mr *mr = to_mmr(ibmr);
mr               2316 drivers/infiniband/hw/mlx5/mr.c 	mr->ndescs = 0;
mr               2318 drivers/infiniband/hw/mlx5/mr.c 	ib_dma_sync_single_for_cpu(ibmr->device, mr->desc_map,
mr               2319 drivers/infiniband/hw/mlx5/mr.c 				   mr->desc_size * mr->max_descs,
mr               2322 drivers/infiniband/hw/mlx5/mr.c 	if (mr->access_mode == MLX5_MKC_ACCESS_MODE_KLMS)
mr               2323 drivers/infiniband/hw/mlx5/mr.c 		n = mlx5_ib_sg_to_klms(mr, sg, sg_nents, sg_offset, NULL, 0,
mr               2329 drivers/infiniband/hw/mlx5/mr.c 	ib_dma_sync_single_for_device(ibmr->device, mr->desc_map,
mr               2330 drivers/infiniband/hw/mlx5/mr.c 				      mr->desc_size * mr->max_descs,
mr                 99 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr = odp->private;
mr                101 drivers/infiniband/hw/mlx5/odp.c 	return mr && mr->parent == parent && !odp->dying;
mr                104 drivers/infiniband/hw/mlx5/odp.c static struct ib_ucontext_per_mm *mr_to_per_mm(struct mlx5_ib_mr *mr)
mr                106 drivers/infiniband/hw/mlx5/odp.c 	if (WARN_ON(!mr || !is_odp_mr(mr)))
mr                109 drivers/infiniband/hw/mlx5/odp.c 	return to_ib_umem_odp(mr->umem)->per_mm;
mr                114 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr = odp->private, *parent = mr->parent;
mr                164 drivers/infiniband/hw/mlx5/odp.c 			   size_t nentries, struct mlx5_ib_mr *mr, int flags)
mr                166 drivers/infiniband/hw/mlx5/odp.c 	struct ib_pd *pd = mr->ibmr.pd;
mr                201 drivers/infiniband/hw/mlx5/odp.c 	lockdep_assert_held(&to_ib_umem_odp(mr->umem)->umem_mutex);
mr                202 drivers/infiniband/hw/mlx5/odp.c 	lockdep_assert_held(&mr->dev->mr_srcu);
mr                205 drivers/infiniband/hw/mlx5/odp.c 			 nentries * MLX5_IMR_MTT_SIZE, mr);
mr                227 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr = odp->private, *imr = mr->parent;
mr                231 drivers/infiniband/hw/mlx5/odp.c 	mr->parent = NULL;
mr                232 drivers/infiniband/hw/mlx5/odp.c 	synchronize_srcu(&mr->dev->mr_srcu);
mr                235 drivers/infiniband/hw/mlx5/odp.c 		srcu_key = srcu_read_lock(&mr->dev->mr_srcu);
mr                241 drivers/infiniband/hw/mlx5/odp.c 		srcu_read_unlock(&mr->dev->mr_srcu, srcu_key);
mr                244 drivers/infiniband/hw/mlx5/odp.c 	mlx5_mr_cache_free(mr->dev, mr);
mr                253 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr;
mr                265 drivers/infiniband/hw/mlx5/odp.c 	mr = umem_odp->private;
mr                267 drivers/infiniband/hw/mlx5/odp.c 	if (!mr || !mr->ibmr.pd)
mr                298 drivers/infiniband/hw/mlx5/odp.c 				mlx5_ib_update_xlt(mr, blk_start_idx,
mr                307 drivers/infiniband/hw/mlx5/odp.c 		mlx5_ib_update_xlt(mr, blk_start_idx,
mr                319 drivers/infiniband/hw/mlx5/odp.c 	if (unlikely(!umem_odp->npages && mr->parent &&
mr                321 drivers/infiniband/hw/mlx5/odp.c 		WRITE_ONCE(mr->live, 0);
mr                323 drivers/infiniband/hw/mlx5/odp.c 		atomic_inc(&mr->parent->num_leaf_free);
mr                424 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr;
mr                427 drivers/infiniband/hw/mlx5/odp.c 	mr = mlx5_mr_cache_alloc(dev, ksm ? MLX5_IMR_KSM_CACHE_ENTRY :
mr                430 drivers/infiniband/hw/mlx5/odp.c 	if (IS_ERR(mr))
mr                431 drivers/infiniband/hw/mlx5/odp.c 		return mr;
mr                433 drivers/infiniband/hw/mlx5/odp.c 	mr->ibmr.pd = pd;
mr                435 drivers/infiniband/hw/mlx5/odp.c 	mr->dev = dev;
mr                436 drivers/infiniband/hw/mlx5/odp.c 	mr->access_flags = access_flags;
mr                437 drivers/infiniband/hw/mlx5/odp.c 	mr->mmkey.iova = 0;
mr                438 drivers/infiniband/hw/mlx5/odp.c 	mr->umem = &umem_odp->umem;
mr                441 drivers/infiniband/hw/mlx5/odp.c 		err = mlx5_ib_update_xlt(mr, 0,
mr                449 drivers/infiniband/hw/mlx5/odp.c 		err = mlx5_ib_update_xlt(mr, 0,
mr                460 drivers/infiniband/hw/mlx5/odp.c 	mr->ibmr.lkey = mr->mmkey.key;
mr                461 drivers/infiniband/hw/mlx5/odp.c 	mr->ibmr.rkey = mr->mmkey.key;
mr                464 drivers/infiniband/hw/mlx5/odp.c 		    mr->mmkey.key, dev->mdev, mr);
mr                466 drivers/infiniband/hw/mlx5/odp.c 	return mr;
mr                470 drivers/infiniband/hw/mlx5/odp.c 	mlx5_mr_cache_free(dev, mr);
mr                475 drivers/infiniband/hw/mlx5/odp.c static struct ib_umem_odp *implicit_mr_get_data(struct mlx5_ib_mr *mr,
mr                478 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_dev *dev = to_mdev(mr->ibmr.pd->device);
mr                480 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp_mr = to_ib_umem_odp(mr->umem);
mr                486 drivers/infiniband/hw/mlx5/odp.c 	odp = odp_lookup(addr, 1, mr);
mr                502 drivers/infiniband/hw/mlx5/odp.c 		mtt = implicit_mr_alloc(mr->ibmr.pd, odp, 0,
mr                503 drivers/infiniband/hw/mlx5/odp.c 					mr->access_flags);
mr                513 drivers/infiniband/hw/mlx5/odp.c 		mtt->parent = mr;
mr                536 drivers/infiniband/hw/mlx5/odp.c 		ret = mlx5_ib_update_xlt(mr, start_idx, nentries, 0,
mr                585 drivers/infiniband/hw/mlx5/odp.c 		struct mlx5_ib_mr *mr = umem_odp->private;
mr                587 drivers/infiniband/hw/mlx5/odp.c 		if (mr->parent != imr)
mr                611 drivers/infiniband/hw/mlx5/odp.c static int pagefault_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr,
mr                616 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp_mr = to_ib_umem_odp(mr->umem);
mr                625 drivers/infiniband/hw/mlx5/odp.c 		odp = implicit_mr_get_data(mr, io_virt, bcnt);
mr                629 drivers/infiniband/hw/mlx5/odp.c 		mr = odp->private;
mr                639 drivers/infiniband/hw/mlx5/odp.c 	start_idx = (io_virt - (mr->mmkey.iova & page_mask)) >> page_shift;
mr                675 drivers/infiniband/hw/mlx5/odp.c 		ret = mlx5_ib_update_xlt(mr, start_idx, np,
mr                708 drivers/infiniband/hw/mlx5/odp.c 		mr = odp->private;
mr                784 drivers/infiniband/hw/mlx5/odp.c 	struct mlx5_ib_mr *mr;
mr                811 drivers/infiniband/hw/mlx5/odp.c 		mr = container_of(mmkey, struct mlx5_ib_mr, mmkey);
mr                812 drivers/infiniband/hw/mlx5/odp.c 		if (!smp_load_acquire(&mr->live) || !mr->ibmr.pd) {
mr                819 drivers/infiniband/hw/mlx5/odp.c 			if (!is_odp_mr(mr) ||
mr                820 drivers/infiniband/hw/mlx5/odp.c 			    mr->ibmr.pd != pd) {
mr                822 drivers/infiniband/hw/mlx5/odp.c 					    is_odp_mr(mr) ?  "MR is not ODP" :
mr                829 drivers/infiniband/hw/mlx5/odp.c 		if (!is_odp_mr(mr)) {
mr                838 drivers/infiniband/hw/mlx5/odp.c 		ret = pagefault_mr(dev, mr, io_virt, bcnt, bytes_mapped, flags);
mr               1644 drivers/infiniband/hw/mlx5/odp.c 		struct mlx5_ib_mr *mr;
mr               1648 drivers/infiniband/hw/mlx5/odp.c 		mr = container_of(mmkey, struct mlx5_ib_mr, mmkey);
mr               1649 drivers/infiniband/hw/mlx5/odp.c 		atomic_dec(&mr->num_pending_prefetch);
mr               1664 drivers/infiniband/hw/mlx5/odp.c 		struct mlx5_ib_mr *mr;
mr               1678 drivers/infiniband/hw/mlx5/odp.c 		mr = container_of(mmkey, struct mlx5_ib_mr, mmkey);
mr               1680 drivers/infiniband/hw/mlx5/odp.c 		if (!smp_load_acquire(&mr->live)) {
mr               1685 drivers/infiniband/hw/mlx5/odp.c 		if (mr->ibmr.pd != pd) {
mr               1690 drivers/infiniband/hw/mlx5/odp.c 		atomic_inc(&mr->num_pending_prefetch);
mr               4208 drivers/infiniband/hw/mlx5/qp.c 			    struct mlx5_ib_mr *mr, u8 flags, bool atomic)
mr               4210 drivers/infiniband/hw/mlx5/qp.c 	int size = (mr->ndescs + mr->meta_ndescs) * mr->desc_size;
mr               4342 drivers/infiniband/hw/mlx5/qp.c 			     struct mlx5_ib_mr *mr,
mr               4345 drivers/infiniband/hw/mlx5/qp.c 	int ndescs = ALIGN(mr->ndescs + mr->meta_ndescs, 8) >> 1;
mr               4349 drivers/infiniband/hw/mlx5/qp.c 	if (mr->access_mode == MLX5_MKC_ACCESS_MODE_MTT)
mr               4350 drivers/infiniband/hw/mlx5/qp.c 		seg->log2_page_size = ilog2(mr->ibmr.page_size);
mr               4351 drivers/infiniband/hw/mlx5/qp.c 	else if (mr->access_mode == MLX5_MKC_ACCESS_MODE_KLMS)
mr               4355 drivers/infiniband/hw/mlx5/qp.c 	seg->flags = get_umr_flags(access) | mr->access_mode;
mr               4358 drivers/infiniband/hw/mlx5/qp.c 	seg->start_addr = cpu_to_be64(mr->ibmr.iova);
mr               4359 drivers/infiniband/hw/mlx5/qp.c 	seg->len = cpu_to_be64(mr->ibmr.length);
mr               4393 drivers/infiniband/hw/mlx5/qp.c 			     struct mlx5_ib_mr *mr,
mr               4396 drivers/infiniband/hw/mlx5/qp.c 	int bcount = mr->desc_size * (mr->ndescs + mr->meta_ndescs);
mr               4398 drivers/infiniband/hw/mlx5/qp.c 	dseg->addr = cpu_to_be64(mr->desc_map);
mr               4605 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_mr *mr = to_mmr(sig_mr);
mr               4606 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_mr *pi_mr = mr->pi_mr;
mr               4735 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_mr *sig_mr = to_mmr(wr->mr);
mr               4766 drivers/infiniband/hw/mlx5/qp.c 	set_sig_mkey_segment(*seg, wr->mr, wr->access, xlt_size, region_len,
mr               4772 drivers/infiniband/hw/mlx5/qp.c 	ret = set_sig_data_segment(send_wr, wr->mr, sig_attrs, qp, seg, size,
mr               4813 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_mr *mr = to_mmr(wr->mr);
mr               4816 drivers/infiniband/hw/mlx5/qp.c 	int mr_list_size = (mr->ndescs + mr->meta_ndescs) * mr->desc_size;
mr               4842 drivers/infiniband/hw/mlx5/qp.c 	set_reg_umr_seg(*seg, mr, flags, atomic);
mr               4847 drivers/infiniband/hw/mlx5/qp.c 	set_reg_mkey_seg(*seg, mr, wr->key, wr->access);
mr               4853 drivers/infiniband/hw/mlx5/qp.c 		memcpy_send_wqe(&qp->sq, cur_edge, seg, size, mr->descs,
mr               4857 drivers/infiniband/hw/mlx5/qp.c 		set_reg_data_seg(*seg, mr, pd);
mr               4970 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_mr *mr;
mr               5089 drivers/infiniband/hw/mlx5/qp.c 				mr = to_mmr(reg_wr(wr)->mr);
mr               5090 drivers/infiniband/hw/mlx5/qp.c 				pi_mr = mr->pi_mr;
mr               5096 drivers/infiniband/hw/mlx5/qp.c 					reg_pi_wr.mr = &pi_mr->ibmr;
mr               5128 drivers/infiniband/hw/mlx5/qp.c 						mr->ibmr.pd->local_dma_lkey;
mr               5130 drivers/infiniband/hw/mlx5/qp.c 					pa_pi_mr.ndescs = mr->ndescs;
mr               5131 drivers/infiniband/hw/mlx5/qp.c 					pa_pi_mr.data_length = mr->data_length;
mr               5132 drivers/infiniband/hw/mlx5/qp.c 					pa_pi_mr.data_iova = mr->data_iova;
mr               5133 drivers/infiniband/hw/mlx5/qp.c 					if (mr->meta_ndescs) {
mr               5135 drivers/infiniband/hw/mlx5/qp.c 							mr->meta_ndescs;
mr               5137 drivers/infiniband/hw/mlx5/qp.c 							mr->meta_length;
mr               5138 drivers/infiniband/hw/mlx5/qp.c 						pa_pi_mr.pi_iova = mr->pi_iova;
mr               5141 drivers/infiniband/hw/mlx5/qp.c 					pa_pi_mr.ibmr.length = mr->ibmr.length;
mr               5142 drivers/infiniband/hw/mlx5/qp.c 					mr->pi_mr = &pa_pi_mr;
mr               5144 drivers/infiniband/hw/mlx5/qp.c 				ctrl->imm = cpu_to_be32(mr->ibmr.rkey);
mr               5161 drivers/infiniband/hw/mlx5/qp.c 				sig_attrs = mr->ibmr.sig_attrs;
mr               5172 drivers/infiniband/hw/mlx5/qp.c 						 mr->sig->psv_memory.psv_idx,
mr               5193 drivers/infiniband/hw/mlx5/qp.c 						 mr->sig->psv_wire.psv_idx,
mr                197 drivers/infiniband/hw/mthca/mthca_allocator.c 		    int hca_write, struct mthca_mr *mr)
mr                267 drivers/infiniband/hw/mthca/mthca_allocator.c 				  mr);
mr                285 drivers/infiniband/hw/mthca/mthca_allocator.c 		    int is_direct, struct mthca_mr *mr)
mr                289 drivers/infiniband/hw/mthca/mthca_allocator.c 	if (mr)
mr                290 drivers/infiniband/hw/mthca/mthca_allocator.c 		mthca_free_mr(dev, mr);
mr                356 drivers/infiniband/hw/mthca/mthca_cq.c 			      &dev->driver_pd, 1, &buf->mr);
mr                369 drivers/infiniband/hw/mthca/mthca_cq.c 		       buf->is_direct, &buf->mr);
mr                834 drivers/infiniband/hw/mthca/mthca_cq.c 	cq_context->lkey            = cpu_to_be32(cq->buf.mr.ibmr.lkey);
mr                426 drivers/infiniband/hw/mthca/mthca_dev.h 		    int hca_write, struct mthca_mr *mr);
mr                428 drivers/infiniband/hw/mthca/mthca_dev.h 		    int is_direct, struct mthca_mr *mr);
mr                472 drivers/infiniband/hw/mthca/mthca_dev.h 		   u64 iova, u64 total_size, u32 access, struct mthca_mr *mr);
mr                474 drivers/infiniband/hw/mthca/mthca_dev.h 			   u32 access, struct mthca_mr *mr);
mr                478 drivers/infiniband/hw/mthca/mthca_dev.h 			u32 access, struct mthca_mr *mr);
mr                479 drivers/infiniband/hw/mthca/mthca_dev.h void mthca_free_mr(struct mthca_dev *dev,  struct mthca_mr *mr);
mr                523 drivers/infiniband/hw/mthca/mthca_eq.c 				  &eq->mr);
mr                543 drivers/infiniband/hw/mthca/mthca_eq.c 	eq_context->lkey            = cpu_to_be32(eq->mr.ibmr.lkey);
mr                565 drivers/infiniband/hw/mthca/mthca_eq.c 	mthca_free_mr(dev, &eq->mr);
mr                618 drivers/infiniband/hw/mthca/mthca_eq.c 	mthca_free_mr(dev, &eq->mr);
mr                430 drivers/infiniband/hw/mthca/mthca_mr.c 		   u64 iova, u64 total_size, u32 access, struct mthca_mr *mr)
mr                444 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = hw_index_to_key(dev, key);
mr                463 drivers/infiniband/hw/mthca/mthca_mr.c 	if (!mr->mtt)
mr                475 drivers/infiniband/hw/mthca/mthca_mr.c 	if (mr->mtt)
mr                478 drivers/infiniband/hw/mthca/mthca_mr.c 				    mr->mtt->first_seg * dev->limits.mtt_seg_size);
mr                481 drivers/infiniband/hw/mthca/mthca_mr.c 		mthca_dbg(dev, "Dumping MPT entry %08x:\n", mr->ibmr.lkey);
mr                513 drivers/infiniband/hw/mthca/mthca_mr.c 			   u32 access, struct mthca_mr *mr)
mr                515 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->mtt = NULL;
mr                516 drivers/infiniband/hw/mthca/mthca_mr.c 	return mthca_mr_alloc(dev, pd, 12, 0, ~0ULL, access, mr);
mr                522 drivers/infiniband/hw/mthca/mthca_mr.c 			u32 access, struct mthca_mr *mr)
mr                526 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->mtt = mthca_alloc_mtt(dev, list_len);
mr                527 drivers/infiniband/hw/mthca/mthca_mr.c 	if (IS_ERR(mr->mtt))
mr                528 drivers/infiniband/hw/mthca/mthca_mr.c 		return PTR_ERR(mr->mtt);
mr                530 drivers/infiniband/hw/mthca/mthca_mr.c 	err = mthca_write_mtt(dev, mr->mtt, 0, buffer_list, list_len);
mr                532 drivers/infiniband/hw/mthca/mthca_mr.c 		mthca_free_mtt(dev, mr->mtt);
mr                537 drivers/infiniband/hw/mthca/mthca_mr.c 			     total_size, access, mr);
mr                539 drivers/infiniband/hw/mthca/mthca_mr.c 		mthca_free_mtt(dev, mr->mtt);
mr                553 drivers/infiniband/hw/mthca/mthca_mr.c void mthca_free_mr(struct mthca_dev *dev, struct mthca_mr *mr)
mr                558 drivers/infiniband/hw/mthca/mthca_mr.c 			      key_to_hw_index(dev, mr->ibmr.lkey) &
mr                563 drivers/infiniband/hw/mthca/mthca_mr.c 	mthca_free_region(dev, mr->ibmr.lkey);
mr                564 drivers/infiniband/hw/mthca/mthca_mr.c 	mthca_free_mtt(dev, mr->mtt);
mr                568 drivers/infiniband/hw/mthca/mthca_mr.c 		    u32 access, struct mthca_fmr *mr)
mr                574 drivers/infiniband/hw/mthca/mthca_mr.c 	int list_len = mr->attr.max_pages;
mr                578 drivers/infiniband/hw/mthca/mthca_mr.c 	if (mr->attr.page_shift < 12 || mr->attr.page_shift >= 32)
mr                583 drivers/infiniband/hw/mthca/mthca_mr.c 	    mr->attr.max_pages * sizeof *mr->mem.arbel.mtts > PAGE_SIZE)
mr                586 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->maps = 0;
mr                594 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->ibmr.rkey = mr->ibmr.lkey = hw_index_to_key(dev, key);
mr                601 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.arbel.mpt = mthca_table_find(dev->mr_table.mpt_table, key, NULL);
mr                602 drivers/infiniband/hw/mthca/mthca_mr.c 		BUG_ON(!mr->mem.arbel.mpt);
mr                604 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.tavor.mpt = dev->mr_table.tavor_fmr.mpt_base +
mr                605 drivers/infiniband/hw/mthca/mthca_mr.c 			sizeof *(mr->mem.tavor.mpt) * idx;
mr                607 drivers/infiniband/hw/mthca/mthca_mr.c 	mr->mtt = __mthca_alloc_mtt(dev, list_len, dev->mr_table.fmr_mtt_buddy);
mr                608 drivers/infiniband/hw/mthca/mthca_mr.c 	if (IS_ERR(mr->mtt)) {
mr                609 drivers/infiniband/hw/mthca/mthca_mr.c 		err = PTR_ERR(mr->mtt);
mr                613 drivers/infiniband/hw/mthca/mthca_mr.c 	mtt_seg = mr->mtt->first_seg * dev->limits.mtt_seg_size;
mr                616 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.arbel.mtts = mthca_table_find(dev->mr_table.mtt_table,
mr                617 drivers/infiniband/hw/mthca/mthca_mr.c 						      mr->mtt->first_seg,
mr                618 drivers/infiniband/hw/mthca/mthca_mr.c 						      &mr->mem.arbel.dma_handle);
mr                619 drivers/infiniband/hw/mthca/mthca_mr.c 		BUG_ON(!mr->mem.arbel.mtts);
mr                621 drivers/infiniband/hw/mthca/mthca_mr.c 		mr->mem.tavor.mtts = dev->mr_table.tavor_fmr.mtt_base + mtt_seg;
mr                636 drivers/infiniband/hw/mthca/mthca_mr.c 	mpt_entry->page_size = cpu_to_be32(mr->attr.page_shift - 12);
mr                644 drivers/infiniband/hw/mthca/mthca_mr.c 		mthca_dbg(dev, "Dumping MPT entry %08x:\n", mr->ibmr.lkey);
mr                668 drivers/infiniband/hw/mthca/mthca_mr.c 	mthca_free_mtt(dev, mr->mtt);
mr                430 drivers/infiniband/hw/mthca/mthca_provider.c 		srq->mr.ibmr.lkey = ucmd.lkey;
mr                516 drivers/infiniband/hw/mthca/mthca_provider.c 			qp->mr.ibmr.lkey = ucmd.lkey;
mr                643 drivers/infiniband/hw/mthca/mthca_provider.c 		cq->buf.mr.ibmr.lkey = ucmd.lkey;
mr                748 drivers/infiniband/hw/mthca/mthca_provider.c 		lkey = cq->resize_buf->buf.mr.ibmr.lkey;
mr                833 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_mr *mr;
mr                836 drivers/infiniband/hw/mthca/mthca_provider.c 	mr = kmalloc(sizeof *mr, GFP_KERNEL);
mr                837 drivers/infiniband/hw/mthca/mthca_provider.c 	if (!mr)
mr                842 drivers/infiniband/hw/mthca/mthca_provider.c 				     convert_access(acc), mr);
mr                845 drivers/infiniband/hw/mthca/mthca_provider.c 		kfree(mr);
mr                849 drivers/infiniband/hw/mthca/mthca_provider.c 	mr->umem = NULL;
mr                851 drivers/infiniband/hw/mthca/mthca_provider.c 	return &mr->ibmr;
mr                861 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_mr *mr;
mr                879 drivers/infiniband/hw/mthca/mthca_provider.c 	mr = kmalloc(sizeof *mr, GFP_KERNEL);
mr                880 drivers/infiniband/hw/mthca/mthca_provider.c 	if (!mr)
mr                883 drivers/infiniband/hw/mthca/mthca_provider.c 	mr->umem = ib_umem_get(udata, start, length, acc,
mr                886 drivers/infiniband/hw/mthca/mthca_provider.c 	if (IS_ERR(mr->umem)) {
mr                887 drivers/infiniband/hw/mthca/mthca_provider.c 		err = PTR_ERR(mr->umem);
mr                891 drivers/infiniband/hw/mthca/mthca_provider.c 	n = ib_umem_num_pages(mr->umem);
mr                893 drivers/infiniband/hw/mthca/mthca_provider.c 	mr->mtt = mthca_alloc_mtt(dev, n);
mr                894 drivers/infiniband/hw/mthca/mthca_provider.c 	if (IS_ERR(mr->mtt)) {
mr                895 drivers/infiniband/hw/mthca/mthca_provider.c 		err = PTR_ERR(mr->mtt);
mr                909 drivers/infiniband/hw/mthca/mthca_provider.c 	for_each_sg_dma_page(mr->umem->sg_head.sgl, &sg_iter, mr->umem->nmap, 0) {
mr                917 drivers/infiniband/hw/mthca/mthca_provider.c 			err = mthca_write_mtt(dev, mr->mtt, n, pages, i);
mr                926 drivers/infiniband/hw/mthca/mthca_provider.c 		err = mthca_write_mtt(dev, mr->mtt, n, pages, i);
mr                933 drivers/infiniband/hw/mthca/mthca_provider.c 			     convert_access(acc), mr);
mr                938 drivers/infiniband/hw/mthca/mthca_provider.c 	return &mr->ibmr;
mr                941 drivers/infiniband/hw/mthca/mthca_provider.c 	mthca_free_mtt(dev, mr->mtt);
mr                944 drivers/infiniband/hw/mthca/mthca_provider.c 	ib_umem_release(mr->umem);
mr                947 drivers/infiniband/hw/mthca/mthca_provider.c 	kfree(mr);
mr                951 drivers/infiniband/hw/mthca/mthca_provider.c static int mthca_dereg_mr(struct ib_mr *mr, struct ib_udata *udata)
mr                953 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_mr *mmr = to_mmr(mr);
mr                955 drivers/infiniband/hw/mthca/mthca_provider.c 	mthca_free_mr(to_mdev(mr->device), mmr);
mr                115 drivers/infiniband/hw/mthca/mthca_provider.h 	struct mthca_mr        mr;
mr                187 drivers/infiniband/hw/mthca/mthca_provider.h 	struct mthca_mr		mr;
mr                240 drivers/infiniband/hw/mthca/mthca_provider.h 	struct mthca_mr		mr;
mr                273 drivers/infiniband/hw/mthca/mthca_provider.h 	struct mthca_mr        mr;
mr                711 drivers/infiniband/hw/mthca/mthca_qp.c 	qp_context->wqe_lkey   = cpu_to_be32(qp->mr.ibmr.lkey);
mr               1069 drivers/infiniband/hw/mthca/mthca_qp.c 			      &qp->queue, &qp->is_direct, pd, 0, &qp->mr);
mr               1085 drivers/infiniband/hw/mthca/mthca_qp.c 		       &qp->queue, qp->is_direct, &qp->mr);
mr                110 drivers/infiniband/hw/mthca/mthca_srq.c 	context->lkey        = cpu_to_be32(srq->mr.ibmr.lkey);
mr                137 drivers/infiniband/hw/mthca/mthca_srq.c 	context->lkey = cpu_to_be32(srq->mr.ibmr.lkey);
mr                150 drivers/infiniband/hw/mthca/mthca_srq.c 		       srq->is_direct, &srq->mr);
mr                171 drivers/infiniband/hw/mthca/mthca_srq.c 			      &srq->queue, &srq->is_direct, pd, 1, &srq->mr);
mr                699 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static int ocrdma_alloc_lkey(struct ocrdma_dev *dev, struct ocrdma_mr *mr,
mr                704 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.fr_mr = 0;
mr                705 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_rd = 1;
mr                706 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_rd = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0;
mr                707 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_wr = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0;
mr                708 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_wr = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0;
mr                709 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.mw_bind = (acc & IB_ACCESS_MW_BIND) ? 1 : 0;
mr                710 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0;
mr                711 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.num_pbls = num_pbls;
mr                713 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_mbx_alloc_lkey(dev, &mr->hwmr, pdid, addr_check);
mr                717 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->ibmr.lkey = mr->hwmr.lkey;
mr                718 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (mr->hwmr.remote_wr || mr->hwmr.remote_rd)
mr                719 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		mr->ibmr.rkey = mr->hwmr.lkey;
mr                726 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr;
mr                735 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                736 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr)
mr                739 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_alloc_lkey(dev, mr, pd->id, acc, 0,
mr                742 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		kfree(mr);
mr                746 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	return &mr->ibmr;
mr                750 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 				   struct ocrdma_hw_mr *mr)
mr                755 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (mr->pbl_table) {
mr                756 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		for (i = 0; i < mr->num_pbls; i++) {
mr                757 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			if (!mr->pbl_table[i].va)
mr                759 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			dma_free_coherent(&pdev->dev, mr->pbl_size,
mr                760 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 					  mr->pbl_table[i].va,
mr                761 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 					  mr->pbl_table[i].pa);
mr                763 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		kfree(mr->pbl_table);
mr                764 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		mr->pbl_table = NULL;
mr                768 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static int ocrdma_get_pbl_info(struct ocrdma_dev *dev, struct ocrdma_mr *mr,
mr                787 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.num_pbes = num_pbes;
mr                788 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.num_pbls = num_pbls;
mr                789 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.pbl_size = pbl_size;
mr                793 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static int ocrdma_build_pbl_tbl(struct ocrdma_dev *dev, struct ocrdma_hw_mr *mr)
mr                797 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	u32 dma_len = mr->pbl_size;
mr                802 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->pbl_table = kcalloc(mr->num_pbls, sizeof(struct ocrdma_pbl),
mr                805 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr->pbl_table)
mr                808 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	for (i = 0; i < mr->num_pbls; i++) {
mr                811 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			ocrdma_free_mr_pbl_tbl(dev, mr);
mr                815 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		mr->pbl_table[i].va = va;
mr                816 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		mr->pbl_table[i].pa = pa;
mr                821 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static void build_user_pbes(struct ocrdma_dev *dev, struct ocrdma_mr *mr,
mr                826 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_pbl *pbl_tbl = mr->hwmr.pbl_table;
mr                827 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ib_umem *umem = mr->umem;
mr                831 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr->hwmr.num_pbes)
mr                853 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		if (pbe_cnt == (mr->hwmr.pbl_size / sizeof(u64))) {
mr                866 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr;
mr                875 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                876 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr)
mr                878 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->umem = ib_umem_get(udata, start, len, acc, 0);
mr                879 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (IS_ERR(mr->umem)) {
mr                883 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	num_pbes = ib_umem_page_count(mr->umem);
mr                884 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_get_pbl_info(dev, mr, num_pbes);
mr                888 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.pbe_size = PAGE_SIZE;
mr                889 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.fbo = ib_umem_offset(mr->umem);
mr                890 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.va = usr_addr;
mr                891 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.len = len;
mr                892 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_wr = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0;
mr                893 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_rd = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0;
mr                894 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_wr = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0;
mr                895 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_rd = 1;
mr                896 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0;
mr                897 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_build_pbl_tbl(dev, &mr->hwmr);
mr                900 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	build_user_pbes(dev, mr, num_pbes);
mr                901 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_reg_mr(dev, &mr->hwmr, pd->id, acc);
mr                904 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->ibmr.lkey = mr->hwmr.lkey;
mr                905 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (mr->hwmr.remote_wr || mr->hwmr.remote_rd)
mr                906 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		mr->ibmr.rkey = mr->hwmr.lkey;
mr                908 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	return &mr->ibmr;
mr                911 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ocrdma_free_mr_pbl_tbl(dev, &mr->hwmr);
mr                913 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	kfree(mr);
mr                919 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr = get_ocrdma_mr(ib_mr);
mr                922 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	(void) ocrdma_mbx_dealloc_lkey(dev, mr->hwmr.fr_mr, mr->hwmr.lkey);
mr                924 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	kfree(mr->pages);
mr                925 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ocrdma_free_mr_pbl_tbl(dev, &mr->hwmr);
mr                928 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ib_umem_release(mr->umem);
mr                929 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	kfree(mr);
mr               2044 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr = get_ocrdma_mr(wr->mr);
mr               2045 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_pbl *pbl_tbl = mr->hwmr.pbl_table;
mr               2062 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	hdr->total_len = mr->ibmr.length;
mr               2064 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	fbo = mr->ibmr.iova - mr->pages[0];
mr               2066 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	fast_reg->va_hi = upper_32_bits(mr->ibmr.iova);
mr               2067 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	fast_reg->va_lo = (u32) (mr->ibmr.iova & 0xffffffff);
mr               2070 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	fast_reg->num_sges = mr->npages;
mr               2071 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	fast_reg->size_sge = get_encoded_page_size(mr->ibmr.page_size);
mr               2074 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	for (i = 0; i < mr->npages; i++) {
mr               2075 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		u64 buf_addr = mr->pages[i];
mr               2085 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		if (num_pbes == (mr->hwmr.pbl_size/sizeof(u64))) {
mr               2915 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr;
mr               2925 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               2926 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr)
mr               2929 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->pages = kcalloc(max_num_sg, sizeof(u64), GFP_KERNEL);
mr               2930 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!mr->pages) {
mr               2935 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_get_pbl_info(dev, mr, max_num_sg);
mr               2938 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.fr_mr = 1;
mr               2939 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_rd = 0;
mr               2940 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.remote_wr = 0;
mr               2941 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_rd = 0;
mr               2942 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.local_wr = 0;
mr               2943 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->hwmr.mw_bind = 0;
mr               2944 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_build_pbl_tbl(dev, &mr->hwmr);
mr               2947 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_reg_mr(dev, &mr->hwmr, pd->id, 0);
mr               2950 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->ibmr.rkey = mr->hwmr.lkey;
mr               2951 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->ibmr.lkey = mr->hwmr.lkey;
mr               2952 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	dev->stag_arr[(mr->hwmr.lkey >> 8) & (OCRDMA_MAX_STAG - 1)] =
mr               2953 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		(unsigned long) mr;
mr               2954 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	return &mr->ibmr;
mr               2956 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ocrdma_free_mr_pbl_tbl(dev, &mr->hwmr);
mr               2958 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	kfree(mr->pages);
mr               2960 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	kfree(mr);
mr               2966 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr = get_ocrdma_mr(ibmr);
mr               2968 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (unlikely(mr->npages == mr->hwmr.num_pbes))
mr               2971 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->pages[mr->npages++] = addr;
mr               2979 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_mr *mr = get_ocrdma_mr(ibmr);
mr               2981 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mr->npages = 0;
mr                435 drivers/infiniband/hw/qedr/qedr.h 		struct qedr_mr *mr;
mr               2606 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr;
mr               2618 drivers/infiniband/hw/qedr/verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               2619 drivers/infiniband/hw/qedr/verbs.c 	if (!mr)
mr               2622 drivers/infiniband/hw/qedr/verbs.c 	mr->type = QEDR_MR_USER;
mr               2624 drivers/infiniband/hw/qedr/verbs.c 	mr->umem = ib_umem_get(udata, start, len, acc, 0);
mr               2625 drivers/infiniband/hw/qedr/verbs.c 	if (IS_ERR(mr->umem)) {
mr               2630 drivers/infiniband/hw/qedr/verbs.c 	rc = init_mr_info(dev, &mr->info, ib_umem_page_count(mr->umem), 1);
mr               2634 drivers/infiniband/hw/qedr/verbs.c 	qedr_populate_pbls(dev, mr->umem, mr->info.pbl_table,
mr               2635 drivers/infiniband/hw/qedr/verbs.c 			   &mr->info.pbl_info, PAGE_SHIFT);
mr               2637 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid);
mr               2644 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.tid_type = QED_RDMA_TID_REGISTERED_MR;
mr               2645 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.key = 0;
mr               2646 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pd = pd->pd_id;
mr               2647 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_read = 1;
mr               2648 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_write = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0;
mr               2649 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_read = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0;
mr               2650 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_write = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0;
mr               2651 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0;
mr               2652 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.mw_bind = false;
mr               2653 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_ptr = mr->info.pbl_table[0].pa;
mr               2654 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_two_level = mr->info.pbl_info.two_layered;
mr               2655 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_page_size_log = ilog2(mr->info.pbl_info.pbl_size);
mr               2656 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.page_size_log = PAGE_SHIFT;
mr               2657 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.fbo = ib_umem_offset(mr->umem);
mr               2658 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.length = len;
mr               2659 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.vaddr = usr_addr;
mr               2660 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.zbva = false;
mr               2661 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.phy_mr = false;
mr               2662 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.dma_mr = false;
mr               2664 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_register_tid(dev->rdma_ctx, &mr->hw_mr);
mr               2670 drivers/infiniband/hw/qedr/verbs.c 	mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key;
mr               2671 drivers/infiniband/hw/qedr/verbs.c 	if (mr->hw_mr.remote_write || mr->hw_mr.remote_read ||
mr               2672 drivers/infiniband/hw/qedr/verbs.c 	    mr->hw_mr.remote_atomic)
mr               2673 drivers/infiniband/hw/qedr/verbs.c 		mr->ibmr.rkey = mr->hw_mr.itid << 8 | mr->hw_mr.key;
mr               2676 drivers/infiniband/hw/qedr/verbs.c 		 mr->ibmr.lkey);
mr               2677 drivers/infiniband/hw/qedr/verbs.c 	return &mr->ibmr;
mr               2680 drivers/infiniband/hw/qedr/verbs.c 	dev->ops->rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid);
mr               2682 drivers/infiniband/hw/qedr/verbs.c 	qedr_free_pbl(dev, &mr->info.pbl_info, mr->info.pbl_table);
mr               2684 drivers/infiniband/hw/qedr/verbs.c 	kfree(mr);
mr               2690 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr = get_qedr_mr(ib_mr);
mr               2694 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_deregister_tid(dev->rdma_ctx, mr->hw_mr.itid);
mr               2698 drivers/infiniband/hw/qedr/verbs.c 	dev->ops->rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid);
mr               2700 drivers/infiniband/hw/qedr/verbs.c 	if (mr->type != QEDR_MR_DMA)
mr               2701 drivers/infiniband/hw/qedr/verbs.c 		free_mr_info(dev, &mr->info);
mr               2704 drivers/infiniband/hw/qedr/verbs.c 	ib_umem_release(mr->umem);
mr               2706 drivers/infiniband/hw/qedr/verbs.c 	kfree(mr);
mr               2716 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr;
mr               2723 drivers/infiniband/hw/qedr/verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               2724 drivers/infiniband/hw/qedr/verbs.c 	if (!mr)
mr               2727 drivers/infiniband/hw/qedr/verbs.c 	mr->dev = dev;
mr               2728 drivers/infiniband/hw/qedr/verbs.c 	mr->type = QEDR_MR_FRMR;
mr               2730 drivers/infiniband/hw/qedr/verbs.c 	rc = init_mr_info(dev, &mr->info, max_page_list_len, 1);
mr               2734 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid);
mr               2741 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.tid_type = QED_RDMA_TID_FMR;
mr               2742 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.key = 0;
mr               2743 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pd = pd->pd_id;
mr               2744 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_read = 1;
mr               2745 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_write = 0;
mr               2746 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_read = 0;
mr               2747 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_write = 0;
mr               2748 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_atomic = 0;
mr               2749 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.mw_bind = false;
mr               2750 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_ptr = 0;
mr               2751 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_two_level = mr->info.pbl_info.two_layered;
mr               2752 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pbl_page_size_log = ilog2(mr->info.pbl_info.pbl_size);
mr               2753 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.fbo = 0;
mr               2754 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.length = 0;
mr               2755 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.vaddr = 0;
mr               2756 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.zbva = false;
mr               2757 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.phy_mr = true;
mr               2758 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.dma_mr = false;
mr               2760 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_register_tid(dev->rdma_ctx, &mr->hw_mr);
mr               2766 drivers/infiniband/hw/qedr/verbs.c 	mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key;
mr               2767 drivers/infiniband/hw/qedr/verbs.c 	mr->ibmr.rkey = mr->ibmr.lkey;
mr               2769 drivers/infiniband/hw/qedr/verbs.c 	DP_DEBUG(dev, QEDR_MSG_MR, "alloc frmr: %x\n", mr->ibmr.lkey);
mr               2770 drivers/infiniband/hw/qedr/verbs.c 	return mr;
mr               2773 drivers/infiniband/hw/qedr/verbs.c 	dev->ops->rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid);
mr               2775 drivers/infiniband/hw/qedr/verbs.c 	kfree(mr);
mr               2782 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr;
mr               2787 drivers/infiniband/hw/qedr/verbs.c 	mr = __qedr_alloc_mr(ibpd, max_num_sg);
mr               2789 drivers/infiniband/hw/qedr/verbs.c 	if (IS_ERR(mr))
mr               2792 drivers/infiniband/hw/qedr/verbs.c 	return &mr->ibmr;
mr               2797 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr = get_qedr_mr(ibmr);
mr               2802 drivers/infiniband/hw/qedr/verbs.c 	if (unlikely(mr->npages == mr->info.pbl_info.num_pbes)) {
mr               2803 drivers/infiniband/hw/qedr/verbs.c 		DP_ERR(mr->dev, "qedr_set_page fails when %d\n", mr->npages);
mr               2807 drivers/infiniband/hw/qedr/verbs.c 	DP_DEBUG(mr->dev, QEDR_MSG_MR, "qedr_set_page pages[%d] = 0x%llx\n",
mr               2808 drivers/infiniband/hw/qedr/verbs.c 		 mr->npages, addr);
mr               2810 drivers/infiniband/hw/qedr/verbs.c 	pbes_in_page = mr->info.pbl_info.pbl_size / sizeof(u64);
mr               2811 drivers/infiniband/hw/qedr/verbs.c 	pbl_table = mr->info.pbl_table + (mr->npages / pbes_in_page);
mr               2813 drivers/infiniband/hw/qedr/verbs.c 	pbe +=  mr->npages % pbes_in_page;
mr               2817 drivers/infiniband/hw/qedr/verbs.c 	mr->npages++;
mr               2845 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr = get_qedr_mr(ibmr);
mr               2847 drivers/infiniband/hw/qedr/verbs.c 	mr->npages = 0;
mr               2849 drivers/infiniband/hw/qedr/verbs.c 	handle_completed_mrs(mr->dev, &mr->info);
mr               2857 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr;
mr               2860 drivers/infiniband/hw/qedr/verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               2861 drivers/infiniband/hw/qedr/verbs.c 	if (!mr)
mr               2864 drivers/infiniband/hw/qedr/verbs.c 	mr->type = QEDR_MR_DMA;
mr               2866 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_alloc_tid(dev->rdma_ctx, &mr->hw_mr.itid);
mr               2873 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.tid_type = QED_RDMA_TID_REGISTERED_MR;
mr               2874 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.pd = pd->pd_id;
mr               2875 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_read = 1;
mr               2876 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.local_write = (acc & IB_ACCESS_LOCAL_WRITE) ? 1 : 0;
mr               2877 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_read = (acc & IB_ACCESS_REMOTE_READ) ? 1 : 0;
mr               2878 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_write = (acc & IB_ACCESS_REMOTE_WRITE) ? 1 : 0;
mr               2879 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.remote_atomic = (acc & IB_ACCESS_REMOTE_ATOMIC) ? 1 : 0;
mr               2880 drivers/infiniband/hw/qedr/verbs.c 	mr->hw_mr.dma_mr = true;
mr               2882 drivers/infiniband/hw/qedr/verbs.c 	rc = dev->ops->rdma_register_tid(dev->rdma_ctx, &mr->hw_mr);
mr               2888 drivers/infiniband/hw/qedr/verbs.c 	mr->ibmr.lkey = mr->hw_mr.itid << 8 | mr->hw_mr.key;
mr               2889 drivers/infiniband/hw/qedr/verbs.c 	if (mr->hw_mr.remote_write || mr->hw_mr.remote_read ||
mr               2890 drivers/infiniband/hw/qedr/verbs.c 	    mr->hw_mr.remote_atomic)
mr               2891 drivers/infiniband/hw/qedr/verbs.c 		mr->ibmr.rkey = mr->hw_mr.itid << 8 | mr->hw_mr.key;
mr               2893 drivers/infiniband/hw/qedr/verbs.c 	DP_DEBUG(dev, QEDR_MSG_MR, "get dma mr: lkey = %x\n", mr->ibmr.lkey);
mr               2894 drivers/infiniband/hw/qedr/verbs.c 	return &mr->ibmr;
mr               2897 drivers/infiniband/hw/qedr/verbs.c 	dev->ops->rdma_free_tid(dev->rdma_ctx, mr->hw_mr.itid);
mr               2899 drivers/infiniband/hw/qedr/verbs.c 	kfree(mr);
mr               3079 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_mr *mr = get_qedr_mr(wr->mr);
mr               3083 drivers/infiniband/hw/qedr/verbs.c 	fwqe1->addr.hi = upper_32_bits(mr->ibmr.iova);
mr               3084 drivers/infiniband/hw/qedr/verbs.c 	fwqe1->addr.lo = lower_32_bits(mr->ibmr.iova);
mr               3101 drivers/infiniband/hw/qedr/verbs.c 		   ilog2(mr->ibmr.page_size) - 12);
mr               3104 drivers/infiniband/hw/qedr/verbs.c 	fwqe2->length_lo = mr->ibmr.length;
mr               3105 drivers/infiniband/hw/qedr/verbs.c 	fwqe2->pbl_addr.hi = upper_32_bits(mr->info.pbl_table->pa);
mr               3106 drivers/infiniband/hw/qedr/verbs.c 	fwqe2->pbl_addr.lo = lower_32_bits(mr->info.pbl_table->pa);
mr               3108 drivers/infiniband/hw/qedr/verbs.c 	qp->wqe_wr_id[qp->sq.prod].mr = mr;
mr               3694 drivers/infiniband/hw/qedr/verbs.c 		qp->wqe_wr_id[qp->sq.cons].mr->info.completed++;
mr               3730 drivers/infiniband/hw/qedr/verbs.c 			qp->wqe_wr_id[qp->sq.cons].mr->info.completed++;
mr                255 drivers/infiniband/hw/qib/qib.h 	struct rvt_mregion	*mr;
mr                 82 drivers/infiniband/hw/qib/qib_rc.c 		if (e->rdma_sge.mr) {
mr                 83 drivers/infiniband/hw/qib/qib_rc.c 			rvt_put_mr(e->rdma_sge.mr);
mr                 84 drivers/infiniband/hw/qib/qib_rc.c 			e->rdma_sge.mr = NULL;
mr                114 drivers/infiniband/hw/qib/qib_rc.c 			if (len && !e->rdma_sge.mr) {
mr                119 drivers/infiniband/hw/qib/qib_rc.c 			qp->s_rdma_mr = e->rdma_sge.mr;
mr                155 drivers/infiniband/hw/qib/qib_rc.c 		qp->s_rdma_mr = qp->s_ack_rdma_sge.sge.mr;
mr               1600 drivers/infiniband/hw/qib/qib_rc.c 		if (e->rdma_sge.mr) {
mr               1601 drivers/infiniband/hw/qib/qib_rc.c 			rvt_put_mr(e->rdma_sge.mr);
mr               1602 drivers/infiniband/hw/qib/qib_rc.c 			e->rdma_sge.mr = NULL;
mr               1915 drivers/infiniband/hw/qib/qib_rc.c 			qp->r_sge.sge.mr = NULL;
mr               1954 drivers/infiniband/hw/qib/qib_rc.c 		if (e->opcode == OP(RDMA_READ_REQUEST) && e->rdma_sge.mr) {
mr               1955 drivers/infiniband/hw/qib/qib_rc.c 			rvt_put_mr(e->rdma_sge.mr);
mr               1956 drivers/infiniband/hw/qib/qib_rc.c 			e->rdma_sge.mr = NULL;
mr               1976 drivers/infiniband/hw/qib/qib_rc.c 			e->rdma_sge.mr = NULL;
mr               2025 drivers/infiniband/hw/qib/qib_rc.c 		if (e->opcode == OP(RDMA_READ_REQUEST) && e->rdma_sge.mr) {
mr               2026 drivers/infiniband/hw/qib/qib_rc.c 			rvt_put_mr(e->rdma_sge.mr);
mr               2027 drivers/infiniband/hw/qib/qib_rc.c 			e->rdma_sge.mr = NULL;
mr               2047 drivers/infiniband/hw/qib/qib_rc.c 		rvt_put_mr(qp->r_sge.sge.mr);
mr                432 drivers/infiniband/hw/qib/qib_uc.c 			qp->r_sge.sge.mr = NULL;
mr                185 drivers/infiniband/hw/qib/qib_ud.c 		} else if (sge->length == 0 && sge->mr->lkey) {
mr                187 drivers/infiniband/hw/qib/qib_ud.c 				if (++sge->m >= sge->mr->mapsz)
mr                192 drivers/infiniband/hw/qib/qib_ud.c 				sge->mr->map[sge->m]->segs[sge->n].vaddr;
mr                194 drivers/infiniband/hw/qib/qib_ud.c 				sge->mr->map[sge->m]->segs[sge->n].length;
mr                161 drivers/infiniband/hw/qib/qib_verbs.c 		} else if (sge.length == 0 && sge.mr->lkey) {
mr                163 drivers/infiniband/hw/qib/qib_verbs.c 				if (++sge.m >= sge.mr->mapsz)
mr                168 drivers/infiniband/hw/qib/qib_verbs.c 				sge.mr->map[sge.m]->segs[sge.n].vaddr;
mr                170 drivers/infiniband/hw/qib/qib_verbs.c 				sge.mr->map[sge.m]->segs[sge.n].length;
mr                194 drivers/infiniband/hw/qib/qib_verbs.c 		} else if (sge->length == 0 && sge->mr->lkey) {
mr                196 drivers/infiniband/hw/qib/qib_verbs.c 				if (++sge->m >= sge->mr->mapsz)
mr                201 drivers/infiniband/hw/qib/qib_verbs.c 				sge->mr->map[sge->m]->segs[sge->n].vaddr;
mr                203 drivers/infiniband/hw/qib/qib_verbs.c 				sge->mr->map[sge->m]->segs[sge->n].length;
mr                624 drivers/infiniband/hw/qib/qib_verbs.c 	if (tx->mr) {
mr                625 drivers/infiniband/hw/qib/qib_verbs.c 		rvt_put_mr(tx->mr);
mr                626 drivers/infiniband/hw/qib/qib_verbs.c 		tx->mr = NULL;
mr                802 drivers/infiniband/hw/qib/qib_verbs.c 	tx->mr = qp->s_rdma_mr;
mr                609 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	struct usnic_ib_mr *mr;
mr                615 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                616 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	if (!mr)
mr                619 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	mr->umem = usnic_uiom_reg_get(to_upd(pd)->umem_pd, start, length,
mr                621 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	if (IS_ERR_OR_NULL(mr->umem)) {
mr                622 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 		err = mr->umem ? PTR_ERR(mr->umem) : -EFAULT;
mr                626 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	mr->ibmr.lkey = mr->ibmr.rkey = 0;
mr                627 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	return &mr->ibmr;
mr                630 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	kfree(mr);
mr                636 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	struct usnic_ib_mr *mr = to_umr(ibmr);
mr                638 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	usnic_dbg("va 0x%lx length 0x%zx\n", mr->umem->va, mr->umem->length);
mr                640 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	usnic_uiom_reg_release(mr->umem);
mr                641 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	kfree(mr);
mr                 61 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr;
mr                 75 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                 76 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	if (!mr)
mr                 89 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 		kfree(mr);
mr                 93 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->mmr.mr_handle = resp->mr_handle;
mr                 94 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.lkey = resp->lkey;
mr                 95 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.rkey = resp->rkey;
mr                 97 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	return &mr->ibmr;
mr                116 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr = NULL;
mr                144 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                145 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	if (!mr) {
mr                150 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->mmr.iova = virt_addr;
mr                151 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->mmr.size = length;
mr                152 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->umem = umem;
mr                154 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	ret = pvrdma_page_dir_init(dev, &mr->pdir, npages, false);
mr                161 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	ret = pvrdma_page_dir_insert_umem(&mr->pdir, mr->umem, 0);
mr                172 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	cmd->pdir_dma = mr->pdir.dir_dma;
mr                181 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->mmr.mr_handle = resp->mr_handle;
mr                182 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.lkey = resp->lkey;
mr                183 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.rkey = resp->rkey;
mr                185 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	return &mr->ibmr;
mr                188 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	pvrdma_page_dir_cleanup(dev, &mr->pdir);
mr                191 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	kfree(mr);
mr                208 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr;
mr                220 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                221 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	if (!mr)
mr                224 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->pages = kzalloc(size, GFP_KERNEL);
mr                225 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	if (!mr->pages) {
mr                230 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	ret = pvrdma_page_dir_init(dev, &mr->pdir, max_num_sg, false);
mr                252 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->max_pages = max_num_sg;
mr                253 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->mmr.mr_handle = resp->mr_handle;
mr                254 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.lkey = resp->lkey;
mr                255 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->ibmr.rkey = resp->rkey;
mr                256 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->page_shift = PAGE_SHIFT;
mr                257 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->umem = NULL;
mr                259 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	return &mr->ibmr;
mr                262 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	pvrdma_page_dir_cleanup(dev, &mr->pdir);
mr                264 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	kfree(mr->pages);
mr                266 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	kfree(mr);
mr                278 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr = to_vmr(ibmr);
mr                286 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	cmd->mr_handle = mr->mmr.mr_handle;
mr                292 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	pvrdma_page_dir_cleanup(dev, &mr->pdir);
mr                293 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	ib_umem_release(mr->umem);
mr                295 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	kfree(mr->pages);
mr                296 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	kfree(mr);
mr                303 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr = to_vmr(ibmr);
mr                305 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	if (mr->npages == mr->max_pages)
mr                308 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->pages[mr->npages++] = addr;
mr                315 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	struct pvrdma_user_mr *mr = to_vmr(ibmr);
mr                319 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	mr->npages = 0;
mr                596 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	struct pvrdma_user_mr *mr = to_vmr(wr->mr);
mr                598 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	wqe_hdr->wr.fast_reg.iova_start = mr->ibmr.iova;
mr                599 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	wqe_hdr->wr.fast_reg.pl_pdir_dma = mr->pdir.dir_dma;
mr                600 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	wqe_hdr->wr.fast_reg.page_shift = mr->page_shift;
mr                601 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	wqe_hdr->wr.fast_reg.page_list_len = mr->npages;
mr                602 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	wqe_hdr->wr.fast_reg.length = mr->ibmr.length;
mr                606 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	return pvrdma_page_dir_insert_page_list(&mr->pdir, mr->pages,
mr                607 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 						mr->npages);
mr                407 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h int pvrdma_dereg_mr(struct ib_mr *mr, struct ib_udata *udata);
mr                118 drivers/infiniband/sw/rdmavt/mr.c static void rvt_deinit_mregion(struct rvt_mregion *mr)
mr                120 drivers/infiniband/sw/rdmavt/mr.c 	int i = mr->mapsz;
mr                122 drivers/infiniband/sw/rdmavt/mr.c 	mr->mapsz = 0;
mr                124 drivers/infiniband/sw/rdmavt/mr.c 		kfree(mr->map[--i]);
mr                125 drivers/infiniband/sw/rdmavt/mr.c 	percpu_ref_exit(&mr->refcount);
mr                130 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mregion *mr = container_of(ref, struct rvt_mregion,
mr                133 drivers/infiniband/sw/rdmavt/mr.c 	complete(&mr->comp);
mr                136 drivers/infiniband/sw/rdmavt/mr.c static int rvt_init_mregion(struct rvt_mregion *mr, struct ib_pd *pd,
mr                142 drivers/infiniband/sw/rdmavt/mr.c 	mr->mapsz = 0;
mr                145 drivers/infiniband/sw/rdmavt/mr.c 		mr->map[i] = kzalloc_node(sizeof(*mr->map[0]), GFP_KERNEL,
mr                147 drivers/infiniband/sw/rdmavt/mr.c 		if (!mr->map[i])
mr                149 drivers/infiniband/sw/rdmavt/mr.c 		mr->mapsz++;
mr                151 drivers/infiniband/sw/rdmavt/mr.c 	init_completion(&mr->comp);
mr                153 drivers/infiniband/sw/rdmavt/mr.c 	if (percpu_ref_init(&mr->refcount, &__rvt_mregion_complete,
mr                157 drivers/infiniband/sw/rdmavt/mr.c 	atomic_set(&mr->lkey_invalid, 0);
mr                158 drivers/infiniband/sw/rdmavt/mr.c 	mr->pd = pd;
mr                159 drivers/infiniband/sw/rdmavt/mr.c 	mr->max_segs = count;
mr                162 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(mr);
mr                178 drivers/infiniband/sw/rdmavt/mr.c static int rvt_alloc_lkey(struct rvt_mregion *mr, int dma_region)
mr                184 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_dev_info *dev = ib_to_rvt(mr->pd->device);
mr                187 drivers/infiniband/sw/rdmavt/mr.c 	rvt_get_mr(mr);
mr                196 drivers/infiniband/sw/rdmavt/mr.c 			mr->lkey_published = 1;
mr                198 drivers/infiniband/sw/rdmavt/mr.c 			rcu_assign_pointer(dev->dma_mr, mr);
mr                199 drivers/infiniband/sw/rdmavt/mr.c 			rvt_get_mr(mr);
mr                223 drivers/infiniband/sw/rdmavt/mr.c 	mr->lkey = (r << (32 - dev->dparms.lkey_table_size)) |
mr                226 drivers/infiniband/sw/rdmavt/mr.c 	if (mr->lkey == 0) {
mr                227 drivers/infiniband/sw/rdmavt/mr.c 		mr->lkey |= 1 << 8;
mr                230 drivers/infiniband/sw/rdmavt/mr.c 	mr->lkey_published = 1;
mr                232 drivers/infiniband/sw/rdmavt/mr.c 	rcu_assign_pointer(rkt->table[r], mr);
mr                238 drivers/infiniband/sw/rdmavt/mr.c 	rvt_put_mr(mr);
mr                248 drivers/infiniband/sw/rdmavt/mr.c static void rvt_free_lkey(struct rvt_mregion *mr)
mr                251 drivers/infiniband/sw/rdmavt/mr.c 	u32 lkey = mr->lkey;
mr                253 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_dev_info *dev = ib_to_rvt(mr->pd->device);
mr                259 drivers/infiniband/sw/rdmavt/mr.c 		if (mr->lkey_published) {
mr                260 drivers/infiniband/sw/rdmavt/mr.c 			mr->lkey_published = 0;
mr                263 drivers/infiniband/sw/rdmavt/mr.c 			rvt_put_mr(mr);
mr                266 drivers/infiniband/sw/rdmavt/mr.c 		if (!mr->lkey_published)
mr                269 drivers/infiniband/sw/rdmavt/mr.c 		mr->lkey_published = 0;
mr                277 drivers/infiniband/sw/rdmavt/mr.c 		percpu_ref_kill(&mr->refcount);
mr                282 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr;
mr                288 drivers/infiniband/sw/rdmavt/mr.c 	mr = kzalloc(struct_size(mr, mr.map, m), GFP_KERNEL);
mr                289 drivers/infiniband/sw/rdmavt/mr.c 	if (!mr)
mr                292 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_init_mregion(&mr->mr, pd, count, 0);
mr                299 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_alloc_lkey(&mr->mr, 0);
mr                302 drivers/infiniband/sw/rdmavt/mr.c 	mr->ibmr.lkey = mr->mr.lkey;
mr                303 drivers/infiniband/sw/rdmavt/mr.c 	mr->ibmr.rkey = mr->mr.lkey;
mr                305 drivers/infiniband/sw/rdmavt/mr.c 	return mr;
mr                308 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&mr->mr);
mr                310 drivers/infiniband/sw/rdmavt/mr.c 	kfree(mr);
mr                311 drivers/infiniband/sw/rdmavt/mr.c 	mr = ERR_PTR(rval);
mr                315 drivers/infiniband/sw/rdmavt/mr.c static void __rvt_free_mr(struct rvt_mr *mr)
mr                317 drivers/infiniband/sw/rdmavt/mr.c 	rvt_free_lkey(&mr->mr);
mr                318 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&mr->mr);
mr                319 drivers/infiniband/sw/rdmavt/mr.c 	kfree(mr);
mr                333 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr;
mr                340 drivers/infiniband/sw/rdmavt/mr.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                341 drivers/infiniband/sw/rdmavt/mr.c 	if (!mr) {
mr                346 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_init_mregion(&mr->mr, pd, 0, 0);
mr                352 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_alloc_lkey(&mr->mr, 1);
mr                358 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.access_flags = acc;
mr                359 drivers/infiniband/sw/rdmavt/mr.c 	ret = &mr->ibmr;
mr                364 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&mr->mr);
mr                366 drivers/infiniband/sw/rdmavt/mr.c 	kfree(mr);
mr                384 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr;
mr                399 drivers/infiniband/sw/rdmavt/mr.c 	mr = __rvt_alloc_mr(n, pd);
mr                400 drivers/infiniband/sw/rdmavt/mr.c 	if (IS_ERR(mr)) {
mr                401 drivers/infiniband/sw/rdmavt/mr.c 		ret = (struct ib_mr *)mr;
mr                405 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.user_base = start;
mr                406 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.iova = virt_addr;
mr                407 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.length = length;
mr                408 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.offset = ib_umem_offset(umem);
mr                409 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.access_flags = mr_access_flags;
mr                410 drivers/infiniband/sw/rdmavt/mr.c 	mr->umem = umem;
mr                412 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.page_shift = PAGE_SHIFT;
mr                423 drivers/infiniband/sw/rdmavt/mr.c 		mr->mr.map[m]->segs[n].vaddr = vaddr;
mr                424 drivers/infiniband/sw/rdmavt/mr.c 		mr->mr.map[m]->segs[n].length = PAGE_SIZE;
mr                425 drivers/infiniband/sw/rdmavt/mr.c 		trace_rvt_mr_user_seg(&mr->mr, m, n, vaddr, PAGE_SIZE);
mr                431 drivers/infiniband/sw/rdmavt/mr.c 	return &mr->ibmr;
mr                434 drivers/infiniband/sw/rdmavt/mr.c 	__rvt_free_mr(mr);
mr                453 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mregion *mr = (struct rvt_mregion *)v;
mr                456 drivers/infiniband/sw/rdmavt/mr.c 	if (mr->pd != qp->ibqp.pd)
mr                458 drivers/infiniband/sw/rdmavt/mr.c 	rvt_qp_mr_clean(qp, mr->lkey);
mr                468 drivers/infiniband/sw/rdmavt/mr.c static void rvt_dereg_clean_qps(struct rvt_mregion *mr)
mr                470 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_dev_info *rdi = ib_to_rvt(mr->pd->device);
mr                472 drivers/infiniband/sw/rdmavt/mr.c 	rvt_qp_iter(rdi, (u64)mr, rvt_dereg_clean_qp_cb);
mr                486 drivers/infiniband/sw/rdmavt/mr.c static int rvt_check_refs(struct rvt_mregion *mr, const char *t)
mr                489 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_dev_info *rdi = ib_to_rvt(mr->pd->device);
mr                491 drivers/infiniband/sw/rdmavt/mr.c 	if (mr->lkey) {
mr                493 drivers/infiniband/sw/rdmavt/mr.c 		rvt_dereg_clean_qps(mr);
mr                498 drivers/infiniband/sw/rdmavt/mr.c 	timeout = wait_for_completion_timeout(&mr->comp, 5 * HZ);
mr                502 drivers/infiniband/sw/rdmavt/mr.c 			   t, mr, mr->pd, mr->lkey,
mr                503 drivers/infiniband/sw/rdmavt/mr.c 			   atomic_long_read(&mr->refcount.count));
mr                504 drivers/infiniband/sw/rdmavt/mr.c 		rvt_get_mr(mr);
mr                515 drivers/infiniband/sw/rdmavt/mr.c bool rvt_mr_has_lkey(struct rvt_mregion *mr, u32 lkey)
mr                517 drivers/infiniband/sw/rdmavt/mr.c 	return mr && lkey == mr->lkey;
mr                536 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_mr_has_lkey(ss->sge.mr, lkey);
mr                539 drivers/infiniband/sw/rdmavt/mr.c 		rval = rvt_mr_has_lkey(ss->sg_list[i].mr, lkey);
mr                555 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr = to_imr(ibmr);
mr                558 drivers/infiniband/sw/rdmavt/mr.c 	rvt_free_lkey(&mr->mr);
mr                560 drivers/infiniband/sw/rdmavt/mr.c 	rvt_put_mr(&mr->mr); /* will set completion if last */
mr                561 drivers/infiniband/sw/rdmavt/mr.c 	ret = rvt_check_refs(&mr->mr, __func__);
mr                564 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&mr->mr);
mr                565 drivers/infiniband/sw/rdmavt/mr.c 	ib_umem_release(mr->umem);
mr                566 drivers/infiniband/sw/rdmavt/mr.c 	kfree(mr);
mr                582 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr;
mr                587 drivers/infiniband/sw/rdmavt/mr.c 	mr = __rvt_alloc_mr(max_num_sg, pd);
mr                588 drivers/infiniband/sw/rdmavt/mr.c 	if (IS_ERR(mr))
mr                589 drivers/infiniband/sw/rdmavt/mr.c 		return (struct ib_mr *)mr;
mr                591 drivers/infiniband/sw/rdmavt/mr.c 	return &mr->ibmr;
mr                603 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr = to_imr(ibmr);
mr                604 drivers/infiniband/sw/rdmavt/mr.c 	u32 ps = 1 << mr->mr.page_shift;
mr                605 drivers/infiniband/sw/rdmavt/mr.c 	u32 mapped_segs = mr->mr.length >> mr->mr.page_shift;
mr                608 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(mapped_segs == mr->mr.max_segs))
mr                613 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.map[m]->segs[n].vaddr = (void *)addr;
mr                614 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.map[m]->segs[n].length = ps;
mr                615 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.length += ps;
mr                616 drivers/infiniband/sw/rdmavt/mr.c 	trace_rvt_mr_page_seg(&mr->mr, m, n, (void *)addr, ps);
mr                635 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr = to_imr(ibmr);
mr                638 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.length = 0;
mr                639 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.page_shift = PAGE_SHIFT;
mr                641 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.user_base = ibmr->iova;
mr                642 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.iova = ibmr->iova;
mr                643 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.offset = ibmr->iova - (u64)mr->mr.map[0]->segs[0].vaddr;
mr                644 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.length = (size_t)ibmr->length;
mr                661 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mr *mr = to_imr(ibmr);
mr                663 drivers/infiniband/sw/rdmavt/mr.c 	if (qp->ibqp.pd != mr->mr.pd)
mr                667 drivers/infiniband/sw/rdmavt/mr.c 	if (!mr->mr.lkey || mr->umem)
mr                670 drivers/infiniband/sw/rdmavt/mr.c 	if ((key & 0xFFFFFF00) != (mr->mr.lkey & 0xFFFFFF00))
mr                675 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.lkey = key;
mr                676 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.access_flags = access;
mr                677 drivers/infiniband/sw/rdmavt/mr.c 	mr->mr.iova = ibmr->iova;
mr                678 drivers/infiniband/sw/rdmavt/mr.c 	atomic_set(&mr->mr.lkey_invalid, 0);
mr                695 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mregion *mr;
mr                701 drivers/infiniband/sw/rdmavt/mr.c 	mr = rcu_dereference(
mr                703 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(!mr || mr->lkey != rkey || qp->ibqp.pd != mr->pd))
mr                706 drivers/infiniband/sw/rdmavt/mr.c 	atomic_set(&mr->lkey_invalid, 1);
mr                734 drivers/infiniband/sw/rdmavt/mr.c 	fmr = kzalloc(struct_size(fmr, mr.map, m), GFP_KERNEL);
mr                738 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_init_mregion(&fmr->mr, pd, fmr_attr->max_pages,
mr                747 drivers/infiniband/sw/rdmavt/mr.c 	rval = rvt_alloc_lkey(&fmr->mr, 0);
mr                750 drivers/infiniband/sw/rdmavt/mr.c 	fmr->ibfmr.rkey = fmr->mr.lkey;
mr                751 drivers/infiniband/sw/rdmavt/mr.c 	fmr->ibfmr.lkey = fmr->mr.lkey;
mr                756 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.access_flags = mr_access_flags;
mr                757 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.max_segs = fmr_attr->max_pages;
mr                758 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.page_shift = fmr_attr->page_shift;
mr                765 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&fmr->mr);
mr                795 drivers/infiniband/sw/rdmavt/mr.c 	i = atomic_long_read(&fmr->mr.refcount.count);
mr                799 drivers/infiniband/sw/rdmavt/mr.c 	if (list_len > fmr->mr.max_segs)
mr                804 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.user_base = iova;
mr                805 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.iova = iova;
mr                806 drivers/infiniband/sw/rdmavt/mr.c 	ps = 1 << fmr->mr.page_shift;
mr                807 drivers/infiniband/sw/rdmavt/mr.c 	fmr->mr.length = list_len * ps;
mr                811 drivers/infiniband/sw/rdmavt/mr.c 		fmr->mr.map[m]->segs[n].vaddr = (void *)page_list[i];
mr                812 drivers/infiniband/sw/rdmavt/mr.c 		fmr->mr.map[m]->segs[n].length = ps;
mr                813 drivers/infiniband/sw/rdmavt/mr.c 		trace_rvt_mr_fmr_seg(&fmr->mr, m, n, (void *)page_list[i], ps);
mr                840 drivers/infiniband/sw/rdmavt/mr.c 		fmr->mr.user_base = 0;
mr                841 drivers/infiniband/sw/rdmavt/mr.c 		fmr->mr.iova = 0;
mr                842 drivers/infiniband/sw/rdmavt/mr.c 		fmr->mr.length = 0;
mr                859 drivers/infiniband/sw/rdmavt/mr.c 	rvt_free_lkey(&fmr->mr);
mr                860 drivers/infiniband/sw/rdmavt/mr.c 	rvt_put_mr(&fmr->mr); /* will set completion if last */
mr                861 drivers/infiniband/sw/rdmavt/mr.c 	ret = rvt_check_refs(&fmr->mr, __func__);
mr                864 drivers/infiniband/sw/rdmavt/mr.c 	rvt_deinit_mregion(&fmr->mr);
mr                882 drivers/infiniband/sw/rdmavt/mr.c 	if (last_sge && sge->lkey == last_sge->mr->lkey &&
mr                885 drivers/infiniband/sw/rdmavt/mr.c 			if (unlikely((sge->addr - last_sge->mr->user_base +
mr                886 drivers/infiniband/sw/rdmavt/mr.c 			      sge->length > last_sge->mr->length)))
mr                918 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mregion *mr;
mr                934 drivers/infiniband/sw/rdmavt/mr.c 		mr = rcu_dereference(dev->dma_mr);
mr                935 drivers/infiniband/sw/rdmavt/mr.c 		if (!mr)
mr                937 drivers/infiniband/sw/rdmavt/mr.c 		rvt_get_mr(mr);
mr                940 drivers/infiniband/sw/rdmavt/mr.c 		isge->mr = mr;
mr                951 drivers/infiniband/sw/rdmavt/mr.c 	mr = rcu_dereference(rkt->table[sge->lkey >> rkt->shift]);
mr                952 drivers/infiniband/sw/rdmavt/mr.c 	if (!mr)
mr                954 drivers/infiniband/sw/rdmavt/mr.c 	rvt_get_mr(mr);
mr                955 drivers/infiniband/sw/rdmavt/mr.c 	if (!READ_ONCE(mr->lkey_published))
mr                958 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(atomic_read(&mr->lkey_invalid) ||
mr                959 drivers/infiniband/sw/rdmavt/mr.c 		     mr->lkey != sge->lkey || mr->pd != &pd->ibpd))
mr                962 drivers/infiniband/sw/rdmavt/mr.c 	off = sge->addr - mr->user_base;
mr                963 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(sge->addr < mr->user_base ||
mr                964 drivers/infiniband/sw/rdmavt/mr.c 		     off + sge->length > mr->length ||
mr                965 drivers/infiniband/sw/rdmavt/mr.c 		     (mr->access_flags & acc) != acc))
mr                969 drivers/infiniband/sw/rdmavt/mr.c 	off += mr->offset;
mr                970 drivers/infiniband/sw/rdmavt/mr.c 	if (mr->page_shift) {
mr                978 drivers/infiniband/sw/rdmavt/mr.c 		entries_spanned_by_off = off >> mr->page_shift;
mr                979 drivers/infiniband/sw/rdmavt/mr.c 		off -= (entries_spanned_by_off << mr->page_shift);
mr                985 drivers/infiniband/sw/rdmavt/mr.c 		while (off >= mr->map[m]->segs[n].length) {
mr                986 drivers/infiniband/sw/rdmavt/mr.c 			off -= mr->map[m]->segs[n].length;
mr                994 drivers/infiniband/sw/rdmavt/mr.c 	isge->mr = mr;
mr                995 drivers/infiniband/sw/rdmavt/mr.c 	isge->vaddr = mr->map[m]->segs[n].vaddr + off;
mr                996 drivers/infiniband/sw/rdmavt/mr.c 	isge->length = mr->map[m]->segs[n].length - off;
mr               1004 drivers/infiniband/sw/rdmavt/mr.c 	rvt_put_mr(mr);
mr               1029 drivers/infiniband/sw/rdmavt/mr.c 	struct rvt_mregion *mr;
mr               1044 drivers/infiniband/sw/rdmavt/mr.c 		mr = rcu_dereference(rdi->dma_mr);
mr               1045 drivers/infiniband/sw/rdmavt/mr.c 		if (!mr)
mr               1047 drivers/infiniband/sw/rdmavt/mr.c 		rvt_get_mr(mr);
mr               1050 drivers/infiniband/sw/rdmavt/mr.c 		sge->mr = mr;
mr               1059 drivers/infiniband/sw/rdmavt/mr.c 	mr = rcu_dereference(rkt->table[rkey >> rkt->shift]);
mr               1060 drivers/infiniband/sw/rdmavt/mr.c 	if (!mr)
mr               1062 drivers/infiniband/sw/rdmavt/mr.c 	rvt_get_mr(mr);
mr               1064 drivers/infiniband/sw/rdmavt/mr.c 	if (!READ_ONCE(mr->lkey_published))
mr               1066 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(atomic_read(&mr->lkey_invalid) ||
mr               1067 drivers/infiniband/sw/rdmavt/mr.c 		     mr->lkey != rkey || qp->ibqp.pd != mr->pd))
mr               1070 drivers/infiniband/sw/rdmavt/mr.c 	off = vaddr - mr->iova;
mr               1071 drivers/infiniband/sw/rdmavt/mr.c 	if (unlikely(vaddr < mr->iova || off + len > mr->length ||
mr               1072 drivers/infiniband/sw/rdmavt/mr.c 		     (mr->access_flags & acc) == 0))
mr               1076 drivers/infiniband/sw/rdmavt/mr.c 	off += mr->offset;
mr               1077 drivers/infiniband/sw/rdmavt/mr.c 	if (mr->page_shift) {
mr               1085 drivers/infiniband/sw/rdmavt/mr.c 		entries_spanned_by_off = off >> mr->page_shift;
mr               1086 drivers/infiniband/sw/rdmavt/mr.c 		off -= (entries_spanned_by_off << mr->page_shift);
mr               1092 drivers/infiniband/sw/rdmavt/mr.c 		while (off >= mr->map[m]->segs[n].length) {
mr               1093 drivers/infiniband/sw/rdmavt/mr.c 			off -= mr->map[m]->segs[n].length;
mr               1101 drivers/infiniband/sw/rdmavt/mr.c 	sge->mr = mr;
mr               1102 drivers/infiniband/sw/rdmavt/mr.c 	sge->vaddr = mr->map[m]->segs[n].vaddr + off;
mr               1103 drivers/infiniband/sw/rdmavt/mr.c 	sge->length = mr->map[m]->segs[n].length - off;
mr               1110 drivers/infiniband/sw/rdmavt/mr.c 	rvt_put_mr(mr);
mr                 54 drivers/infiniband/sw/rdmavt/mr.h 	struct rvt_mregion mr;        /* must be last */
mr                 60 drivers/infiniband/sw/rdmavt/mr.h 	struct rvt_mregion mr;  /* must be last */
mr                646 drivers/infiniband/sw/rdmavt/qp.c 		if (e->rdma_sge.mr) {
mr                647 drivers/infiniband/sw/rdmavt/qp.c 			rvt_put_mr(e->rdma_sge.mr);
mr                648 drivers/infiniband/sw/rdmavt/qp.c 			e->rdma_sge.mr = NULL;
mr                667 drivers/infiniband/sw/rdmavt/qp.c 		if (rvt_mr_has_lkey(sge->mr, lkey))
mr                710 drivers/infiniband/sw/rdmavt/qp.c 		if (rvt_mr_has_lkey(e->rdma_sge.mr, lkey))
mr               2043 drivers/infiniband/sw/rdmavt/qp.c 					      reg_wr(wr)->mr,
mr               2167 drivers/infiniband/sw/rdmavt/qp.c 		rvt_put_mr(sge->mr);
mr               2340 drivers/infiniband/sw/rdmavt/qp.c 		rvt_put_mr(sge->mr);
mr               3149 drivers/infiniband/sw/rdmavt/qp.c 		rvt_put_mr(qp->r_sge.sge.mr);
mr                 63 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
mr                 64 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_ARGS(mr, m, n, v, len),
mr                 66 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ENTRY(ib_to_rvt(mr->pd->device))
mr                 79 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ASSIGN(ib_to_rvt(mr->pd->device));
mr                 82 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->iova = mr->iova;
mr                 83 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->user_base = mr->user_base;
mr                 84 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->lkey = mr->lkey;
mr                 88 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->length = mr->length;
mr                 89 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->offset = mr->offset;
mr                109 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
mr                110 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_ARGS(mr, m, n, v, len));
mr                114 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
mr                115 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_ARGS(mr, m, n, v, len));
mr                119 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
mr                120 drivers/infiniband/sw/rdmavt/trace_mr.h 	TP_ARGS(mr, m, n, v, len));
mr                127 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ENTRY(ib_to_rvt(sge->mr->pd->device))
mr                128 drivers/infiniband/sw/rdmavt/trace_mr.h 		__field(struct rvt_mregion *, mr)
mr                142 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ASSIGN(ib_to_rvt(sge->mr->pd->device));
mr                143 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->mr = sge->mr;
mr                148 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->lkey = sge->mr->lkey;
mr                154 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->user = ibpd_to_rvtpd(sge->mr->pd)->user;
mr                159 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->mr,
mr                189 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ENTRY(ib_to_rvt(to_imr(ibmr)->mr.pd->device))
mr                198 drivers/infiniband/sw/rdmavt/trace_mr.h 		RDI_DEV_ASSIGN(ib_to_rvt(to_imr(ibmr)->mr.pd->device))
mr                200 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->iova = to_imr(ibmr)->mr.iova;
mr                201 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->user_base = to_imr(ibmr)->mr.user_base;
mr                202 drivers/infiniband/sw/rdmavt/trace_mr.h 		__entry->ibmr_length = to_imr(ibmr)->mr.length;
mr                111 drivers/infiniband/sw/rxe/rxe_loc.h 		      struct rxe_mem *mr);
mr                158 drivers/infiniband/sw/rxe/rxe_qp.c 		if (res->read.mr)
mr                159 drivers/infiniband/sw/rxe/rxe_qp.c 			rxe_drop_ref(res->read.mr);
mr                538 drivers/infiniband/sw/rxe/rxe_qp.c 	if (qp->resp.mr) {
mr                539 drivers/infiniband/sw/rxe/rxe_qp.c 		rxe_drop_ref(qp->resp.mr);
mr                540 drivers/infiniband/sw/rxe/rxe_qp.c 		qp->resp.mr = NULL;
mr                827 drivers/infiniband/sw/rxe/rxe_qp.c 	if (qp->resp.mr) {
mr                828 drivers/infiniband/sw/rxe/rxe_qp.c 		rxe_drop_ref(qp->resp.mr);
mr                829 drivers/infiniband/sw/rxe/rxe_qp.c 		qp->resp.mr = NULL;
mr                646 drivers/infiniband/sw/rxe/rxe_req.c 			struct rxe_mem *rmr = to_rmr(wqe->wr.wr.reg.mr);
mr                652 drivers/infiniband/sw/rxe/rxe_req.c 			rmr->iova = wqe->wr.wr.reg.mr->iova;
mr                496 drivers/infiniband/sw/rxe/rxe_resp.c 	WARN_ON_ONCE(qp->resp.mr);
mr                498 drivers/infiniband/sw/rxe/rxe_resp.c 	qp->resp.mr = mem;
mr                528 drivers/infiniband/sw/rxe/rxe_resp.c 	err = rxe_mem_copy(qp->resp.mr, qp->resp.va, payload_addr(pkt),
mr                551 drivers/infiniband/sw/rxe/rxe_resp.c 	struct rxe_mem *mr = qp->resp.mr;
mr                553 drivers/infiniband/sw/rxe/rxe_resp.c 	if (mr->state != RXE_MEM_STATE_VALID) {
mr                558 drivers/infiniband/sw/rxe/rxe_resp.c 	vaddr = iova_to_vaddr(mr, iova, sizeof(u64));
mr                702 drivers/infiniband/sw/rxe/rxe_resp.c 		res->read.mr		= qp->resp.mr;
mr                703 drivers/infiniband/sw/rxe/rxe_resp.c 		qp->resp.mr		= NULL;
mr                730 drivers/infiniband/sw/rxe/rxe_resp.c 	err = rxe_mem_copy(res->read.mr, res->read.va, payload_addr(&ack_pkt),
mr               1049 drivers/infiniband/sw/rxe/rxe_resp.c 	if (qp->resp.mr) {
mr               1050 drivers/infiniband/sw/rxe/rxe_resp.c 		rxe_drop_ref(qp->resp.mr);
mr               1051 drivers/infiniband/sw/rxe/rxe_resp.c 		qp->resp.mr = NULL;
mr               1195 drivers/infiniband/sw/rxe/rxe_resp.c 		if (qp->resp.mr) {
mr               1196 drivers/infiniband/sw/rxe/rxe_resp.c 			rxe_drop_ref(qp->resp.mr);
mr               1197 drivers/infiniband/sw/rxe/rxe_resp.c 			qp->resp.mr = NULL;
mr                576 drivers/infiniband/sw/rxe/rxe_verbs.c 			wr->wr.reg.mr = reg_wr(ibwr)->mr;
mr                898 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr;
mr                901 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr = rxe_alloc(&rxe->mr_pool);
mr                902 drivers/infiniband/sw/rxe/rxe_verbs.c 	if (!mr) {
mr                907 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_add_index(mr);
mr                911 drivers/infiniband/sw/rxe/rxe_verbs.c 	err = rxe_mem_init_dma(pd, access, mr);
mr                915 drivers/infiniband/sw/rxe/rxe_verbs.c 	return &mr->ibmr;
mr                919 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_index(mr);
mr                920 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_ref(mr);
mr                934 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr;
mr                936 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr = rxe_alloc(&rxe->mr_pool);
mr                937 drivers/infiniband/sw/rxe/rxe_verbs.c 	if (!mr) {
mr                942 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_add_index(mr);
mr                947 drivers/infiniband/sw/rxe/rxe_verbs.c 				access, udata, mr);
mr                951 drivers/infiniband/sw/rxe/rxe_verbs.c 	return &mr->ibmr;
mr                955 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_index(mr);
mr                956 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_ref(mr);
mr                963 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr = to_rmr(ibmr);
mr                965 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->state = RXE_MEM_STATE_ZOMBIE;
mr                966 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_ref(mr->pd);
mr                967 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_index(mr);
mr                968 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_ref(mr);
mr                977 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr;
mr                983 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr = rxe_alloc(&rxe->mr_pool);
mr                984 drivers/infiniband/sw/rxe/rxe_verbs.c 	if (!mr) {
mr                989 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_add_index(mr);
mr                993 drivers/infiniband/sw/rxe/rxe_verbs.c 	err = rxe_mem_init_fast(pd, max_num_sg, mr);
mr                997 drivers/infiniband/sw/rxe/rxe_verbs.c 	return &mr->ibmr;
mr               1001 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_index(mr);
mr               1002 drivers/infiniband/sw/rxe/rxe_verbs.c 	rxe_drop_ref(mr);
mr               1009 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr = to_rmr(ibmr);
mr               1013 drivers/infiniband/sw/rxe/rxe_verbs.c 	if (unlikely(mr->nbuf == mr->num_buf))
mr               1016 drivers/infiniband/sw/rxe/rxe_verbs.c 	map = mr->map[mr->nbuf / RXE_BUF_PER_MAP];
mr               1017 drivers/infiniband/sw/rxe/rxe_verbs.c 	buf = &map->buf[mr->nbuf % RXE_BUF_PER_MAP];
mr               1021 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->nbuf++;
mr               1029 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_mem *mr = to_rmr(ibmr);
mr               1032 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->nbuf = 0;
mr               1036 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->va = ibmr->iova;
mr               1037 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->iova = ibmr->iova;
mr               1038 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->length = ibmr->length;
mr               1039 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->page_shift = ilog2(ibmr->page_size);
mr               1040 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->page_mask = ibmr->page_size - 1;
mr               1041 drivers/infiniband/sw/rxe/rxe_verbs.c 	mr->offset = mr->iova & mr->page_mask;
mr                186 drivers/infiniband/sw/rxe/rxe_verbs.h 			struct rxe_mem	*mr;
mr                213 drivers/infiniband/sw/rxe/rxe_verbs.h 	struct rxe_mem		*mr;
mr                458 drivers/infiniband/sw/rxe/rxe_verbs.h static inline struct rxe_mem *to_rmr(struct ib_mr *mr)
mr                460 drivers/infiniband/sw/rxe/rxe_verbs.h 	return mr ? container_of(mr, struct rxe_mem, ibmr) : NULL;
mr                 89 drivers/infiniband/sw/siw/siw_mem.c int siw_mr_add_mem(struct siw_mr *mr, struct ib_pd *pd, void *mem_obj,
mr                109 drivers/infiniband/sw/siw/siw_mem.c 	mr->mem = mem;
mr                121 drivers/infiniband/sw/siw/siw_mem.c 	mr->base_mr.lkey = mr->base_mr.rkey = mem->stag;
mr                126 drivers/infiniband/sw/siw/siw_mem.c void siw_mr_drop_mem(struct siw_mr *mr)
mr                128 drivers/infiniband/sw/siw/siw_mem.c 	struct siw_mem *mem = mr->mem, *found;
mr                 22 drivers/infiniband/sw/siw/siw_mem.h int siw_mr_add_mem(struct siw_mr *mr, struct ib_pd *pd, void *mem_obj,
mr                 24 drivers/infiniband/sw/siw/siw_mem.h void siw_mr_drop_mem(struct siw_mr *mr);
mr                895 drivers/infiniband/sw/siw/siw_verbs.c 			sqe->base_mr = (uintptr_t)reg_wr(wr)->mr;
mr               1270 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mr *mr = to_siw_mr(base_mr);
mr               1273 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_mem(mr->mem, "deregister MR\n");
mr               1277 drivers/infiniband/sw/siw/siw_verbs.c 	siw_mr_drop_mem(mr);
mr               1278 drivers/infiniband/sw/siw/siw_verbs.c 	kfree_rcu(mr, rcu);
mr               1298 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mr *mr = NULL;
mr               1339 drivers/infiniband/sw/siw/siw_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1340 drivers/infiniband/sw/siw/siw_verbs.c 	if (!mr) {
mr               1344 drivers/infiniband/sw/siw/siw_verbs.c 	rv = siw_mr_add_mem(mr, pd, umem, start, len, rights);
mr               1350 drivers/infiniband/sw/siw/siw_verbs.c 		struct siw_mem *mem = mr->mem;
mr               1360 drivers/infiniband/sw/siw/siw_verbs.c 		mr->base_mr.lkey |= ureq.stag_key;
mr               1361 drivers/infiniband/sw/siw/siw_verbs.c 		mr->base_mr.rkey |= ureq.stag_key;
mr               1373 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->stag_valid = 1;
mr               1375 drivers/infiniband/sw/siw/siw_verbs.c 	return &mr->base_mr;
mr               1379 drivers/infiniband/sw/siw/siw_verbs.c 	if (mr) {
mr               1380 drivers/infiniband/sw/siw/siw_verbs.c 		if (mr->mem)
mr               1381 drivers/infiniband/sw/siw/siw_verbs.c 			siw_mr_drop_mem(mr);
mr               1382 drivers/infiniband/sw/siw/siw_verbs.c 		kfree_rcu(mr, rcu);
mr               1394 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mr *mr = NULL;
mr               1420 drivers/infiniband/sw/siw/siw_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1421 drivers/infiniband/sw/siw/siw_verbs.c 	if (!mr) {
mr               1425 drivers/infiniband/sw/siw/siw_verbs.c 	rv = siw_mr_add_mem(mr, pd, pbl, 0, max_sge * PAGE_SIZE, 0);
mr               1429 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->is_pbl = 1;
mr               1431 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_pd(pd, "[MEM %u]: success\n", mr->mem->stag);
mr               1433 drivers/infiniband/sw/siw/siw_verbs.c 	return &mr->base_mr;
mr               1438 drivers/infiniband/sw/siw/siw_verbs.c 	if (!mr) {
mr               1441 drivers/infiniband/sw/siw/siw_verbs.c 		if (mr->mem)
mr               1442 drivers/infiniband/sw/siw/siw_verbs.c 			siw_mr_drop_mem(mr);
mr               1443 drivers/infiniband/sw/siw/siw_verbs.c 		kfree_rcu(mr, rcu);
mr               1460 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mr *mr = to_siw_mr(base_mr);
mr               1461 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mem *mem = mr->mem;
mr               1527 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_mr *mr = NULL;
mr               1535 drivers/infiniband/sw/siw/siw_verbs.c 	mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr               1536 drivers/infiniband/sw/siw/siw_verbs.c 	if (!mr) {
mr               1540 drivers/infiniband/sw/siw/siw_verbs.c 	rv = siw_mr_add_mem(mr, pd, NULL, 0, ULONG_MAX, rights);
mr               1544 drivers/infiniband/sw/siw/siw_verbs.c 	mr->mem->stag_valid = 1;
mr               1546 drivers/infiniband/sw/siw/siw_verbs.c 	siw_dbg_pd(pd, "[MEM %u]: success\n", mr->mem->stag);
mr               1548 drivers/infiniband/sw/siw/siw_verbs.c 	return &mr->base_mr;
mr               1552 drivers/infiniband/sw/siw/siw_verbs.c 		kfree(mr);
mr                382 drivers/infiniband/ulp/iser/iscsi_iser.h 		struct ib_mr             *mr;
mr                595 drivers/infiniband/ulp/iser/iser_initiator.c 	if (unlikely((!desc->sig_protected && rkey != desc->rsc.mr->rkey) ||
mr                216 drivers/infiniband/ulp/iser/iser_memory.c static int iser_set_page(struct ib_mr *mr, u64 addr)
mr                219 drivers/infiniband/ulp/iser/iser_memory.c 		container_of(mr, struct iser_page_vec, fake_mr);
mr                367 drivers/infiniband/ulp/iser/iser_memory.c 	      struct ib_mr *mr,
mr                373 drivers/infiniband/ulp/iser/iser_memory.c 	inv_wr->ex.invalidate_rkey = mr->rkey;
mr                388 drivers/infiniband/ulp/iser/iser_memory.c 	struct ib_mr *mr = rsc->sig_mr;
mr                389 drivers/infiniband/ulp/iser/iser_memory.c 	struct ib_sig_attrs *sig_attrs = mr->sig_attrs;
mr                401 drivers/infiniband/ulp/iser/iser_memory.c 		iser_inv_rkey(&tx_desc->inv_wr, mr, cqe, &wr->wr);
mr                403 drivers/infiniband/ulp/iser/iser_memory.c 	ib_update_fast_reg_key(mr, ib_inc_rkey(mr->rkey));
mr                405 drivers/infiniband/ulp/iser/iser_memory.c 	ret = ib_map_mr_sg_pi(mr, mem->sg, mem->dma_nents, NULL,
mr                419 drivers/infiniband/ulp/iser/iser_memory.c 	wr->mr = mr;
mr                420 drivers/infiniband/ulp/iser/iser_memory.c 	wr->key = mr->rkey;
mr                426 drivers/infiniband/ulp/iser/iser_memory.c 	sig_reg->sge.lkey = mr->lkey;
mr                427 drivers/infiniband/ulp/iser/iser_memory.c 	sig_reg->rkey = mr->rkey;
mr                428 drivers/infiniband/ulp/iser/iser_memory.c 	sig_reg->sge.addr = mr->iova;
mr                429 drivers/infiniband/ulp/iser/iser_memory.c 	sig_reg->sge.length = mr->length;
mr                445 drivers/infiniband/ulp/iser/iser_memory.c 	struct ib_mr *mr = rsc->mr;
mr                450 drivers/infiniband/ulp/iser/iser_memory.c 		iser_inv_rkey(&tx_desc->inv_wr, mr, cqe, &wr->wr);
mr                452 drivers/infiniband/ulp/iser/iser_memory.c 	ib_update_fast_reg_key(mr, ib_inc_rkey(mr->rkey));
mr                454 drivers/infiniband/ulp/iser/iser_memory.c 	n = ib_map_mr_sg(mr, mem->sg, mem->dma_nents, NULL, SIZE_4K);
mr                466 drivers/infiniband/ulp/iser/iser_memory.c 	wr->mr = mr;
mr                467 drivers/infiniband/ulp/iser/iser_memory.c 	wr->key = mr->rkey;
mr                474 drivers/infiniband/ulp/iser/iser_memory.c 	reg->sge.lkey = mr->lkey;
mr                475 drivers/infiniband/ulp/iser/iser_memory.c 	reg->rkey = mr->rkey;
mr                476 drivers/infiniband/ulp/iser/iser_memory.c 	reg->sge.addr = mr->iova;
mr                477 drivers/infiniband/ulp/iser/iser_memory.c 	reg->sge.length = mr->length;
mr                256 drivers/infiniband/ulp/iser/iser_verbs.c 	desc->rsc.mr = ib_alloc_mr(pd, mr_type, size);
mr                257 drivers/infiniband/ulp/iser/iser_verbs.c 	if (IS_ERR(desc->rsc.mr)) {
mr                258 drivers/infiniband/ulp/iser/iser_verbs.c 		ret = PTR_ERR(desc->rsc.mr);
mr                276 drivers/infiniband/ulp/iser/iser_verbs.c 	ib_dereg_mr(desc->rsc.mr);
mr                287 drivers/infiniband/ulp/iser/iser_verbs.c 	ib_dereg_mr(res->mr);
mr               1672 drivers/infiniband/ulp/isert/ib_isert.c 	ret = isert_check_pi_status(cmd, isert_cmd->rw.reg->mr);
mr               1718 drivers/infiniband/ulp/isert/ib_isert.c 		ret = isert_check_pi_status(se_cmd, isert_cmd->rw.reg->mr);
mr                422 drivers/infiniband/ulp/srp/ib_srp.c 		if (d->mr)
mr                423 drivers/infiniband/ulp/srp/ib_srp.c 			ib_dereg_mr(d->mr);
mr                441 drivers/infiniband/ulp/srp/ib_srp.c 	struct ib_mr *mr;
mr                462 drivers/infiniband/ulp/srp/ib_srp.c 		mr = ib_alloc_mr(pd, mr_type, max_page_list_len);
mr                463 drivers/infiniband/ulp/srp/ib_srp.c 		if (IS_ERR(mr)) {
mr                464 drivers/infiniband/ulp/srp/ib_srp.c 			ret = PTR_ERR(mr);
mr                470 drivers/infiniband/ulp/srp/ib_srp.c 		d->mr = mr;
mr               1260 drivers/infiniband/ulp/srp/ib_srp.c 			res = srp_inv_rkey(req, ch, (*pfr)->mr->rkey);
mr               1264 drivers/infiniband/ulp/srp/ib_srp.c 				  (*pfr)->mr->rkey, res);
mr               1556 drivers/infiniband/ulp/srp/ib_srp.c 	rkey = ib_inc_rkey(desc->mr->rkey);
mr               1557 drivers/infiniband/ulp/srp/ib_srp.c 	ib_update_fast_reg_key(desc->mr, rkey);
mr               1559 drivers/infiniband/ulp/srp/ib_srp.c 	n = ib_map_mr_sg(desc->mr, state->sg, sg_nents, sg_offset_p,
mr               1569 drivers/infiniband/ulp/srp/ib_srp.c 	WARN_ON_ONCE(desc->mr->length == 0);
mr               1578 drivers/infiniband/ulp/srp/ib_srp.c 	wr.mr = desc->mr;
mr               1579 drivers/infiniband/ulp/srp/ib_srp.c 	wr.key = desc->mr->rkey;
mr               1587 drivers/infiniband/ulp/srp/ib_srp.c 	srp_map_desc(state, desc->mr->iova,
mr               1588 drivers/infiniband/ulp/srp/ib_srp.c 		     desc->mr->length, desc->mr->rkey);
mr               1779 drivers/infiniband/ulp/srp/ib_srp.c 			mr_len += (*pfr)->mr->length;
mr                289 drivers/infiniband/ulp/srp/ib_srp.h 	struct ib_mr			*mr;
mr                332 drivers/ipack/devices/ipoctal.c 			 &channel->regs->w.mr); /* mr1 */
mr                333 drivers/ipack/devices/ipoctal.c 		iowrite8(0, &channel->regs->w.mr); /* mr2 */
mr                603 drivers/ipack/devices/ipoctal.c 	iowrite8(mr1, &channel->regs->w.mr);
mr                604 drivers/ipack/devices/ipoctal.c 	iowrite8(mr2, &channel->regs->w.mr);
mr                 23 drivers/ipack/devices/scc2698.h 		u8 d0, mr;  /* Mode register 1/2*/
mr                 30 drivers/ipack/devices/scc2698.h 		u8 d0, mr;  /* Mode register 1/2 */
mr                 86 drivers/mailbox/stm32-ipcc.c 	u32 status, mr, tosr, chan;
mr                 93 drivers/mailbox/stm32-ipcc.c 	mr = readl_relaxed(ipcc->reg_proc + IPCC_XMR);
mr                 96 drivers/mailbox/stm32-ipcc.c 	status = tosr & FIELD_GET(RX_BIT_MASK, ~mr);
mr                119 drivers/mailbox/stm32-ipcc.c 	u32 status, mr, tosr, chan;
mr                123 drivers/mailbox/stm32-ipcc.c 	mr = readl_relaxed(ipcc->reg_proc + IPCC_XMR);
mr                126 drivers/mailbox/stm32-ipcc.c 	status = ~tosr & FIELD_GET(TX_BIT_MASK, ~mr);
mr                179 drivers/media/usb/usbvision/usbvision.h #define YUV_TO_RGB_BY_THE_BOOK(my, mu, mv, mr, mg, mb) { \
mr                190 drivers/media/usb/usbvision/usbvision.h 	mr = LIMIT_RGB(mm_r); \
mr                446 drivers/message/fusion/mptbase.c 	MPT_FRAME_HDR *mr = NULL;
mr                478 drivers/message/fusion/mptbase.c 		mr = (MPT_FRAME_HDR *) CAST_U32_TO_PTR(pa);
mr                482 drivers/message/fusion/mptbase.c 		mr = (MPT_FRAME_HDR *) CAST_U32_TO_PTR(pa);
mr                497 drivers/message/fusion/mptbase.c 	if (MptCallbacks[cb_idx](ioc, mf, mr))
mr                507 drivers/message/fusion/mptbase.c 	MPT_FRAME_HDR	*mr;
mr                526 drivers/message/fusion/mptbase.c 	mr = (MPT_FRAME_HDR *)((u8 *)ioc->reply_frames +
mr                529 drivers/message/fusion/mptbase.c 	req_idx = le16_to_cpu(mr->u.frame.hwhdr.msgctxu.fld.req_idx);
mr                530 drivers/message/fusion/mptbase.c 	cb_idx = mr->u.frame.hwhdr.msgctxu.fld.cb_idx;
mr                534 drivers/message/fusion/mptbase.c 			ioc->name, mr, req_idx, cb_idx, mr->u.hdr.Function));
mr                535 drivers/message/fusion/mptbase.c 	DBG_DUMP_REPLY_FRAME(ioc, (u32 *)mr);
mr                539 drivers/message/fusion/mptbase.c 	ioc_stat = le16_to_cpu(mr->u.reply.IOCStatus);
mr                541 drivers/message/fusion/mptbase.c 		u32	 log_info = le32_to_cpu(mr->u.reply.IOCLogInfo);
mr                562 drivers/message/fusion/mptbase.c 	freeme = MptCallbacks[cb_idx](ioc, mf, mr);
mr                286 drivers/message/fusion/mptctl.c mptctl_taskmgmt_reply(MPT_ADAPTER *ioc, MPT_FRAME_HDR *mf, MPT_FRAME_HDR *mr)
mr                293 drivers/message/fusion/mptctl.c 		ioc->name, mf, mr));
mr                297 drivers/message/fusion/mptctl.c 	if (!mr)
mr                301 drivers/message/fusion/mptctl.c 	memcpy(ioc->taskmgmt_cmds.reply, mr,
mr                302 drivers/message/fusion/mptctl.c 	    min(MPT_DEFAULT_FRAME_SIZE, 4 * mr->u.reply.MsgLength));
mr               1193 drivers/message/fusion/mptsas.c mptsas_taskmgmt_complete(MPT_ADAPTER *ioc, MPT_FRAME_HDR *mf, MPT_FRAME_HDR *mr)
mr               1202 drivers/message/fusion/mptsas.c 	    "(mf = %p, mr = %p)\n", ioc->name, mf, mr));
mr               1204 drivers/message/fusion/mptsas.c 	pScsiTmReply = (SCSITaskMgmtReply_t *)mr;
mr               1229 drivers/message/fusion/mptsas.c 		memcpy(ioc->taskmgmt_cmds.reply, mr,
mr               1230 drivers/message/fusion/mptsas.c 		    min(MPT_DEFAULT_FRAME_SIZE, 4 * mr->u.reply.MsgLength));
mr                588 drivers/message/fusion/mptscsih.c mptscsih_io_done(MPT_ADAPTER *ioc, MPT_FRAME_HDR *mf, MPT_FRAME_HDR *mr)
mr                600 drivers/message/fusion/mptscsih.c 	req_idx_MR = (mr != NULL) ?
mr                601 drivers/message/fusion/mptscsih.c 	    le16_to_cpu(mr->u.frame.hwhdr.msgctxu.fld.req_idx) : req_idx;
mr                645 drivers/message/fusion/mptscsih.c 	pScsiReply = (SCSIIOReply_t *) mr;
mr                650 drivers/message/fusion/mptscsih.c 			ioc->name, mf, mr, sc, req_idx, pScsiReply->TaskTag));
mr                654 drivers/message/fusion/mptscsih.c 			ioc->name, mf, mr, sc, req_idx));
mr               2055 drivers/message/fusion/mptscsih.c 	MPT_FRAME_HDR *mr)
mr               2058 drivers/message/fusion/mptscsih.c 		"TaskMgmt completed (mf=%p, mr=%p)\n", ioc->name, mf, mr));
mr               2062 drivers/message/fusion/mptscsih.c 	if (!mr)
mr               2066 drivers/message/fusion/mptscsih.c 	memcpy(ioc->taskmgmt_cmds.reply, mr,
mr               2067 drivers/message/fusion/mptscsih.c 	    min(MPT_DEFAULT_FRAME_SIZE, 4 * mr->u.reply.MsgLength));
mr                384 drivers/net/ethernet/ibm/ehea/ehea.h 	struct ehea_mr mr;
mr                213 drivers/net/ethernet/ibm/ehea/ehea_main.c 		if (adapter->mr.handle) {
mr                215 drivers/net/ethernet/ibm/ehea/ehea_main.c 			arr[i++].fwh = adapter->mr.handle;
mr               1395 drivers/net/ethernet/ibm/ehea/ehea_main.c 	ret = ehea_gen_smr(adapter, &adapter->mr, &pr->send_mr);
mr               1399 drivers/net/ethernet/ibm/ehea/ehea_main.c 	ret = ehea_gen_smr(adapter, &adapter->mr, &pr->recv_mr);
mr               2305 drivers/net/ethernet/ibm/ehea/ehea_main.c 	ehea_rem_mr(&adapter->mr);
mr               2313 drivers/net/ethernet/ibm/ehea/ehea_main.c 	return ehea_reg_kernel_mr(adapter, &adapter->mr);
mr               2744 drivers/net/ethernet/ibm/ehea/ehea_main.c 			ret = ehea_rem_mr(&adapter->mr);
mr               2756 drivers/net/ethernet/ibm/ehea/ehea_main.c 			ret = ehea_reg_kernel_mr(adapter, &adapter->mr);
mr                435 drivers/net/ethernet/ibm/ehea/ehea_phyp.c 			struct ehea_mr *mr)
mr                449 drivers/net/ethernet/ibm/ehea/ehea_phyp.c 	mr->handle = outs[0];
mr                450 drivers/net/ethernet/ibm/ehea/ehea_phyp.c 	mr->lkey = (u32)outs[2];
mr                402 drivers/net/ethernet/ibm/ehea/ehea_phyp.h 			struct ehea_mr *mr);
mr                795 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 			       struct ehea_mr *mr)
mr                810 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		hret = ehea_h_register_rpage_mr(adapter->handle, mr->handle, 0,
mr                815 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 			ehea_h_free_resource(adapter->handle, mr->handle,
mr                826 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 				struct ehea_mr *mr)
mr                835 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		hret = ehea_reg_mr_section(top, dir, idx, pt, adapter, mr);
mr                844 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 				    struct ehea_mr *mr)
mr                853 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		hret = ehea_reg_mr_sections(top, dir, pt, adapter, mr);
mr                860 drivers/net/ethernet/ibm/ehea/ehea_qmr.c int ehea_reg_kernel_mr(struct ehea_adapter *adapter, struct ehea_mr *mr)
mr                878 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 					&mr->handle, &mr->lkey);
mr                887 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		ehea_h_free_resource(adapter->handle, mr->handle, FORCE_FREE);
mr                897 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		hret = ehea_reg_mr_dir_sections(top, pt, adapter, mr);
mr                903 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 		ehea_h_free_resource(adapter->handle, mr->handle, FORCE_FREE);
mr                909 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 	mr->vaddr = EHEA_BUSMAP_START;
mr                910 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 	mr->adapter = adapter;
mr                917 drivers/net/ethernet/ibm/ehea/ehea_qmr.c int ehea_rem_mr(struct ehea_mr *mr)
mr                921 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 	if (!mr || !mr->adapter)
mr                924 drivers/net/ethernet/ibm/ehea/ehea_qmr.c 	hret = ehea_h_free_resource(mr->adapter->handle, mr->handle,
mr                374 drivers/net/ethernet/ibm/ehea/ehea_qmr.h int ehea_reg_kernel_mr(struct ehea_adapter *adapter, struct ehea_mr *mr);
mr                379 drivers/net/ethernet/ibm/ehea/ehea_qmr.h int ehea_rem_mr(struct ehea_mr *mr);
mr                 51 drivers/net/ethernet/ibm/emac/tah.c 	out_be32(&p->mr, TAH_MR_SR);
mr                 53 drivers/net/ethernet/ibm/emac/tah.c 	while ((in_be32(&p->mr) & TAH_MR_SR) && n)
mr                 60 drivers/net/ethernet/ibm/emac/tah.c 	out_be32(&p->mr,
mr                 25 drivers/net/ethernet/ibm/emac/tah.h 	u32 mr;
mr                242 drivers/net/ethernet/mellanox/mlx4/en_main.c 	(void) mlx4_mr_free(dev, &mdev->mr);
mr                305 drivers/net/ethernet/mellanox/mlx4/en_main.c 			 0, 0, &mdev->mr)) {
mr                309 drivers/net/ethernet/mellanox/mlx4/en_main.c 	if (mlx4_mr_enable(mdev->dev, &mdev->mr)) {
mr                340 drivers/net/ethernet/mellanox/mlx4/en_main.c 	(void) mlx4_mr_free(dev, &mdev->mr);
mr                119 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rx_desc->data[i].lkey = cpu_to_be32(priv->mdev->mr.key);
mr                199 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	ring->mr_key = cpu_to_be32(mdev->mr.key);
mr                427 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 	struct mlx4_mr		mr;
mr                419 drivers/net/ethernet/mellanox/mlx4/mr.c 			   int page_shift, struct mlx4_mr *mr)
mr                421 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->iova       = iova;
mr                422 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->size       = size;
mr                423 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->pd	       = pd;
mr                424 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->access     = access;
mr                425 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->enabled    = MLX4_MPT_DISABLED;
mr                426 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->key	       = hw_index_to_key(mridx);
mr                428 drivers/net/ethernet/mellanox/mlx4/mr.c 	return mlx4_mtt_init(dev, npages, page_shift, &mr->mtt);
mr                528 drivers/net/ethernet/mellanox/mlx4/mr.c 		  int npages, int page_shift, struct mlx4_mr *mr)
mr                538 drivers/net/ethernet/mellanox/mlx4/mr.c 				     access, npages, page_shift, mr);
mr                546 drivers/net/ethernet/mellanox/mlx4/mr.c static int mlx4_mr_free_reserved(struct mlx4_dev *dev, struct mlx4_mr *mr)
mr                550 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->enabled == MLX4_MPT_EN_HW) {
mr                552 drivers/net/ethernet/mellanox/mlx4/mr.c 				     key_to_hw_index(mr->key) &
mr                560 drivers/net/ethernet/mellanox/mlx4/mr.c 		mr->enabled = MLX4_MPT_EN_SW;
mr                562 drivers/net/ethernet/mellanox/mlx4/mr.c 	mlx4_mtt_cleanup(dev, &mr->mtt);
mr                567 drivers/net/ethernet/mellanox/mlx4/mr.c int mlx4_mr_free(struct mlx4_dev *dev, struct mlx4_mr *mr)
mr                571 drivers/net/ethernet/mellanox/mlx4/mr.c 	ret = mlx4_mr_free_reserved(dev, mr);
mr                574 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->enabled)
mr                575 drivers/net/ethernet/mellanox/mlx4/mr.c 		mlx4_mpt_free_icm(dev, key_to_hw_index(mr->key));
mr                576 drivers/net/ethernet/mellanox/mlx4/mr.c 	mlx4_mpt_release(dev, key_to_hw_index(mr->key));
mr                582 drivers/net/ethernet/mellanox/mlx4/mr.c void mlx4_mr_rereg_mem_cleanup(struct mlx4_dev *dev, struct mlx4_mr *mr)
mr                584 drivers/net/ethernet/mellanox/mlx4/mr.c 	mlx4_mtt_cleanup(dev, &mr->mtt);
mr                585 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->mtt.order = -1;
mr                589 drivers/net/ethernet/mellanox/mlx4/mr.c int mlx4_mr_rereg_mem_write(struct mlx4_dev *dev, struct mlx4_mr *mr,
mr                595 drivers/net/ethernet/mellanox/mlx4/mr.c 	err = mlx4_mtt_init(dev, npages, page_shift, &mr->mtt);
mr                604 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->mtt.order < 0) {
mr                609 drivers/net/ethernet/mellanox/mlx4/mr.c 						  &mr->mtt));
mr                610 drivers/net/ethernet/mellanox/mlx4/mr.c 		if (mr->mtt.page_shift == 0)
mr                611 drivers/net/ethernet/mellanox/mlx4/mr.c 			mpt_entry->mtt_sz    = cpu_to_be32(1 << mr->mtt.order);
mr                613 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->mtt.order >= 0 && mr->mtt.page_shift == 0) {
mr                621 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->enabled = MLX4_MPT_EN_SW;
mr                627 drivers/net/ethernet/mellanox/mlx4/mr.c int mlx4_mr_enable(struct mlx4_dev *dev, struct mlx4_mr *mr)
mr                633 drivers/net/ethernet/mellanox/mlx4/mr.c 	err = mlx4_mpt_alloc_icm(dev, key_to_hw_index(mr->key));
mr                645 drivers/net/ethernet/mellanox/mlx4/mr.c 				       mr->access);
mr                647 drivers/net/ethernet/mellanox/mlx4/mr.c 	mpt_entry->key	       = cpu_to_be32(key_to_hw_index(mr->key));
mr                648 drivers/net/ethernet/mellanox/mlx4/mr.c 	mpt_entry->pd_flags    = cpu_to_be32(mr->pd | MLX4_MPT_PD_FLAG_EN_INV);
mr                649 drivers/net/ethernet/mellanox/mlx4/mr.c 	mpt_entry->start       = cpu_to_be64(mr->iova);
mr                650 drivers/net/ethernet/mellanox/mlx4/mr.c 	mpt_entry->length      = cpu_to_be64(mr->size);
mr                651 drivers/net/ethernet/mellanox/mlx4/mr.c 	mpt_entry->entity_size = cpu_to_be32(mr->mtt.page_shift);
mr                653 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->mtt.order < 0) {
mr                658 drivers/net/ethernet/mellanox/mlx4/mr.c 						  &mr->mtt));
mr                661 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (mr->mtt.order >= 0 && mr->mtt.page_shift == 0) {
mr                666 drivers/net/ethernet/mellanox/mlx4/mr.c 		mpt_entry->mtt_sz    = cpu_to_be32(1 << mr->mtt.order);
mr                672 drivers/net/ethernet/mellanox/mlx4/mr.c 			     key_to_hw_index(mr->key) & (dev->caps.num_mpts - 1));
mr                677 drivers/net/ethernet/mellanox/mlx4/mr.c 	mr->enabled = MLX4_MPT_EN_HW;
mr                687 drivers/net/ethernet/mellanox/mlx4/mr.c 	mlx4_mpt_free_icm(dev, key_to_hw_index(mr->key));
mr               1008 drivers/net/ethernet/mellanox/mlx4/mr.c 	key = key_to_hw_index(fmr->mr.key);
mr               1010 drivers/net/ethernet/mellanox/mlx4/mr.c 	*lkey = *rkey = fmr->mr.key = hw_index_to_key(key);
mr               1065 drivers/net/ethernet/mellanox/mlx4/mr.c 			    page_shift, &fmr->mr);
mr               1070 drivers/net/ethernet/mellanox/mlx4/mr.c 				    fmr->mr.mtt.offset,
mr               1081 drivers/net/ethernet/mellanox/mlx4/mr.c 	(void) mlx4_mr_free(dev, &fmr->mr);
mr               1091 drivers/net/ethernet/mellanox/mlx4/mr.c 	err = mlx4_mr_enable(dev, &fmr->mr);
mr               1096 drivers/net/ethernet/mellanox/mlx4/mr.c 				    key_to_hw_index(fmr->mr.key), NULL);
mr               1126 drivers/net/ethernet/mellanox/mlx4/mr.c 	if (fmr->mr.enabled == MLX4_MPT_EN_HW) {
mr               1143 drivers/net/ethernet/mellanox/mlx4/mr.c 	ret = mlx4_mr_free(dev, &fmr->mr);
mr               1146 drivers/net/ethernet/mellanox/mlx4/mr.c 	fmr->mr.enabled = MLX4_MPT_DISABLED;
mr                346 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	send_info->read.lkey = send_ring->mr->mkey.key;
mr                373 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 		send_info->write.addr = (uintptr_t)send_ring->mr->dma_addr + buff_offset;
mr                374 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 		send_info->write.lkey = send_ring->mr->mkey.key;
mr                821 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	struct mlx5dr_mr *mr = kzalloc(sizeof(*mr), GFP_KERNEL);
mr                826 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	if (!mr)
mr                835 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 		kfree(mr);
mr                839 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	err = dr_create_mkey(mdev, pdn, &mr->mkey);
mr                844 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 		kfree(mr);
mr                848 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	mr->dma_addr = dma_addr;
mr                849 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	mr->size = size;
mr                850 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	mr->addr = buf;
mr                852 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	return mr;
mr                855 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c static void dr_dereg_mr(struct mlx5_core_dev *mdev, struct mlx5dr_mr *mr)
mr                857 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	mlx5_core_destroy_mkey(mdev, &mr->mkey);
mr                858 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	dma_unmap_single(&mdev->pdev->dev, mr->dma_addr, mr->size,
mr                860 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	kfree(mr);
mr                920 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	dmn->send_ring->mr = dr_reg_mr(dmn->mdev,
mr                922 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	if (!dmn->send_ring->mr) {
mr                938 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	dr_dereg_mr(dmn->mdev, dmn->send_ring->mr);
mr                957 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c 	dr_dereg_mr(dmn->mdev, send_ring->mr);
mr               1014 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h 	struct mlx5dr_mr *mr;
mr                153 drivers/net/ethernet/mellanox/mlxsw/spectrum.h 	struct mlxsw_sp_mr *mr;
mr                251 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                261 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		mr_route->route_priv = kzalloc(mr->mr_ops->route_priv_size,
mr                271 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		err = mr->mr_ops->route_create(mlxsw_sp, mr->priv,
mr                277 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		err = mr->mr_ops->route_update(mlxsw_sp, mr_route->route_priv,
mr                289 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                291 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->route_destroy(mlxsw_sp, mr->priv, mr_route->route_priv);
mr                474 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                484 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	err = mr->mr_ops->route_irif_update(mlxsw_sp, rve->mr_route->route_priv,
mr                489 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	err = mr->mr_ops->route_action_update(mlxsw_sp,
mr                508 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                510 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->route_action_update(mlxsw_sp, rve->mr_route->route_priv,
mr                523 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                532 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		err = mr->mr_ops->route_action_update(mlxsw_sp,
mr                542 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		err = mr->mr_ops->route_erif_add(mlxsw_sp,
mr                552 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		err = mr->mr_ops->route_min_mtu_update(mlxsw_sp,
mr                565 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		mr->mr_ops->route_erif_del(mlxsw_sp, rve->mr_route->route_priv,
mr                569 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		mr->mr_ops->route_action_update(mlxsw_sp,
mr                582 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                599 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 		mr->mr_ops->route_action_update(mlxsw_sp,
mr                605 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->route_erif_del(mlxsw_sp, rve->mr_route->route_priv, rifi);
mr                743 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                758 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 			mr->mr_ops->route_min_mtu_update(mlxsw_sp,
mr                898 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                903 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr_table = kzalloc(sizeof(*mr_table) + mr->mr_ops->route_priv_size,
mr                925 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	err = mr->mr_ops->route_create(mlxsw_sp, mr->priv,
mr                930 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	list_add_tail(&mr_table->node, &mr->table_list);
mr                943 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                947 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->route_destroy(mlxsw_sp, mr->priv,
mr                980 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr                986 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->route_stats(mlxsw_sp, mr_route->route_priv, &packets,
mr                997 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = container_of(work, struct mlxsw_sp_mr,
mr               1004 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	list_for_each_entry(mr_table, &mr->table_list, node)
mr               1011 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mlxsw_core_schedule_dw(&mr->stats_update_dw, interval);
mr               1017 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr;
mr               1021 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr = kzalloc(sizeof(*mr) + mr_ops->priv_size, GFP_KERNEL);
mr               1022 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	if (!mr)
mr               1024 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops = mr_ops;
mr               1025 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mlxsw_sp->mr = mr;
mr               1026 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	INIT_LIST_HEAD(&mr->table_list);
mr               1028 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	err = mr_ops->init(mlxsw_sp, mr->priv);
mr               1033 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	INIT_DELAYED_WORK(&mr->stats_update_dw, mlxsw_sp_mr_stats_update);
mr               1035 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mlxsw_core_schedule_dw(&mr->stats_update_dw, interval);
mr               1038 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	kfree(mr);
mr               1044 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	struct mlxsw_sp_mr *mr = mlxsw_sp->mr;
mr               1046 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	cancel_delayed_work_sync(&mr->stats_update_dw);
mr               1047 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	mr->mr_ops->fini(mlxsw_sp, mr->priv);
mr               1048 drivers/net/ethernet/mellanox/mlxsw/spectrum_mr.c 	kfree(mr);
mr                334 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c static int mmc_submit_one(struct mmc_data *md, struct mmc_request *mr,
mr                351 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 	mmc_wait_for_req(func->card->host, mr);
mr                 54 drivers/nvme/host/rdma.c 	struct ib_mr		*mr;
mr               1140 drivers/nvme/host/rdma.c 		.ex.invalidate_rkey = req->mr->rkey,
mr               1159 drivers/nvme/host/rdma.c 	if (req->mr) {
mr               1160 drivers/nvme/host/rdma.c 		ib_mr_pool_put(queue->qp, &queue->qp->rdma_mrs, req->mr);
mr               1161 drivers/nvme/host/rdma.c 		req->mr = NULL;
mr               1225 drivers/nvme/host/rdma.c 	req->mr = ib_mr_pool_get(queue->qp, &queue->qp->rdma_mrs);
mr               1226 drivers/nvme/host/rdma.c 	if (WARN_ON_ONCE(!req->mr))
mr               1233 drivers/nvme/host/rdma.c 	nr = ib_map_mr_sg(req->mr, req->sg_table.sgl, count, NULL, SZ_4K);
mr               1235 drivers/nvme/host/rdma.c 		ib_mr_pool_put(queue->qp, &queue->qp->rdma_mrs, req->mr);
mr               1236 drivers/nvme/host/rdma.c 		req->mr = NULL;
mr               1242 drivers/nvme/host/rdma.c 	ib_update_fast_reg_key(req->mr, ib_inc_rkey(req->mr->rkey));
mr               1249 drivers/nvme/host/rdma.c 	req->reg_wr.mr = req->mr;
mr               1250 drivers/nvme/host/rdma.c 	req->reg_wr.key = req->mr->rkey;
mr               1255 drivers/nvme/host/rdma.c 	sg->addr = cpu_to_le64(req->mr->iova);
mr               1256 drivers/nvme/host/rdma.c 	put_unaligned_le24(req->mr->length, sg->length);
mr               1257 drivers/nvme/host/rdma.c 	put_unaligned_le32(req->mr->rkey, sg->key);
mr               1461 drivers/nvme/host/rdma.c 		if (unlikely(wc->ex.invalidate_rkey != req->mr->rkey)) {
mr               1464 drivers/nvme/host/rdma.c 				req->mr->rkey);
mr               1467 drivers/nvme/host/rdma.c 	} else if (req->mr) {
mr               1474 drivers/nvme/host/rdma.c 				req->mr->rkey, ret);
mr               1783 drivers/nvme/host/rdma.c 			req->mr ? &req->reg_wr.wr : NULL);
mr                199 drivers/pinctrl/sirf/pinctrl-atlas7.c #define PADCONF(pad, t, mr, pr, dsr, adr, mb, pb, dsb, adb)	\
mr                203 drivers/pinctrl/sirf/pinctrl-atlas7.c 		.mux_reg = mr,					\
mr                133 drivers/rtc/rtc-at91sam9.c 	u32 offset, alarm, mr;
mr                140 drivers/rtc/rtc-at91sam9.c 	mr = rtt_readl(rtc, MR);
mr                143 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr & ~(AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN));
mr                164 drivers/rtc/rtc-at91sam9.c 			mr &= ~AT91_RTT_ALMIEN;
mr                170 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr | AT91_RTT_RTTRST);
mr                205 drivers/rtc/rtc-at91sam9.c 	u32 mr;
mr                214 drivers/rtc/rtc-at91sam9.c 	mr = rtt_readl(rtc, MR);
mr                215 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr & ~AT91_RTT_ALMIEN);
mr                226 drivers/rtc/rtc-at91sam9.c 		rtt_writel(rtc, MR, mr | AT91_RTT_ALMIEN);
mr                236 drivers/rtc/rtc-at91sam9.c 	u32 mr = rtt_readl(rtc, MR);
mr                238 drivers/rtc/rtc-at91sam9.c 	dev_dbg(dev, "alarm_irq_enable: enabled=%08x, mr %08x\n", enabled, mr);
mr                240 drivers/rtc/rtc-at91sam9.c 		rtt_writel(rtc, MR, mr | AT91_RTT_ALMIEN);
mr                242 drivers/rtc/rtc-at91sam9.c 		rtt_writel(rtc, MR, mr & ~AT91_RTT_ALMIEN);
mr                252 drivers/rtc/rtc-at91sam9.c 	u32 mr = rtt_readl(rtc, MR);
mr                255 drivers/rtc/rtc-at91sam9.c 		   (mr & AT91_RTT_RTTINCIEN) ? "yes" : "no");
mr                261 drivers/rtc/rtc-at91sam9.c 	u32 sr, mr;
mr                266 drivers/rtc/rtc-at91sam9.c 	mr = rtt_readl(rtc, MR) & (AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN);
mr                267 drivers/rtc/rtc-at91sam9.c 	sr = rtt_readl(rtc, SR) & (mr >> 16);
mr                340 drivers/rtc/rtc-at91sam9.c 	u32		mr;
mr                396 drivers/rtc/rtc-at91sam9.c 	mr = rtt_readl(rtc, MR);
mr                399 drivers/rtc/rtc-at91sam9.c 	if ((mr & AT91_RTT_RTPRES) != sclk_rate) {
mr                400 drivers/rtc/rtc-at91sam9.c 		mr = AT91_RTT_RTTRST | (sclk_rate & AT91_RTT_RTPRES);
mr                405 drivers/rtc/rtc-at91sam9.c 	mr &= ~(AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN);
mr                406 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr);
mr                450 drivers/rtc/rtc-at91sam9.c 	u32		mr = rtt_readl(rtc, MR);
mr                453 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr & ~(AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN));
mr                463 drivers/rtc/rtc-at91sam9.c 	u32		mr = rtt_readl(rtc, MR);
mr                465 drivers/rtc/rtc-at91sam9.c 	rtc->imr = mr & (AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN);
mr                466 drivers/rtc/rtc-at91sam9.c 	rtt_writel(rtc, MR, mr & ~rtc->imr);
mr                476 drivers/rtc/rtc-at91sam9.c 	u32		mr = rtt_readl(rtc, MR);
mr                482 drivers/rtc/rtc-at91sam9.c 	rtc->imr = mr & (AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN);
mr                484 drivers/rtc/rtc-at91sam9.c 		if (device_may_wakeup(dev) && (mr & AT91_RTT_ALMIEN)) {
mr                492 drivers/rtc/rtc-at91sam9.c 			if (mr & AT91_RTT_RTTINCIEN)
mr                493 drivers/rtc/rtc-at91sam9.c 				rtt_writel(rtc, MR, mr & ~AT91_RTT_RTTINCIEN);
mr                495 drivers/rtc/rtc-at91sam9.c 			rtt_writel(rtc, MR, mr & ~rtc->imr);
mr                505 drivers/rtc/rtc-at91sam9.c 	u32		mr;
mr                512 drivers/rtc/rtc-at91sam9.c 		mr = rtt_readl(rtc, MR);
mr                513 drivers/rtc/rtc-at91sam9.c 		rtt_writel(rtc, MR, mr | rtc->imr);
mr                287 drivers/scsi/NCR5380.c 	unsigned char status, basr, mr, icr, i;
mr                290 drivers/scsi/NCR5380.c 	mr = NCR5380_read(MODE_REG);
mr                306 drivers/scsi/NCR5380.c 	printk(KERN_CONT "\nMR =   0x%02x : ", mr);
mr                308 drivers/scsi/NCR5380.c 		if (mr & mrs[i].mask)
mr                861 drivers/scsi/NCR5380.c 		unsigned char mr = NCR5380_read(MODE_REG);
mr                865 drivers/scsi/NCR5380.c 		         irq, basr, sr, mr);
mr                867 drivers/scsi/NCR5380.c 		if ((mr & MR_DMA_MODE) || (mr & MR_MONITOR_BSY)) {
mr                305 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                311 drivers/scsi/mesh.c 	       ms, mr, md);
mr                314 drivers/scsi/mesh.c 	       (mr->count_hi << 8) + mr->count_lo, mr->sequence,
mr                315 drivers/scsi/mesh.c 	       (mr->bus_status1 << 8) + mr->bus_status0, mr->fifo_count,
mr                316 drivers/scsi/mesh.c 	       mr->exception, mr->error, mr->intr_mask, mr->interrupt,
mr                317 drivers/scsi/mesh.c 	       mr->sync_params);
mr                318 drivers/scsi/mesh.c 	while(in_8(&mr->fifo_count))
mr                319 drivers/scsi/mesh.c 		printk(KERN_DEBUG " fifo data=%.2x\n",in_8(&mr->fifo));
mr                339 drivers/scsi/mesh.c static inline void mesh_flush_io(volatile struct mesh_regs __iomem *mr)
mr                341 drivers/scsi/mesh.c 	(void)in_8(&mr->mesh_id);
mr                360 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                363 drivers/scsi/mesh.c 	mesh_flush_io(mr);
mr                368 drivers/scsi/mesh.c 	out_8(&mr->exception, 0xff);	/* clear all exception bits */
mr                369 drivers/scsi/mesh.c 	out_8(&mr->error, 0xff);	/* clear all error bits */
mr                370 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_RESETMESH);
mr                371 drivers/scsi/mesh.c 	mesh_flush_io(mr);
mr                373 drivers/scsi/mesh.c 	out_8(&mr->intr_mask, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                374 drivers/scsi/mesh.c 	out_8(&mr->source_id, ms->host->this_id);
mr                375 drivers/scsi/mesh.c 	out_8(&mr->sel_timeout, 25);	/* 250ms */
mr                376 drivers/scsi/mesh.c 	out_8(&mr->sync_params, ASYNC_PARAMS);
mr                382 drivers/scsi/mesh.c 		out_8(&mr->bus_status1, BS1_RST);	/* assert RST */
mr                383 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                385 drivers/scsi/mesh.c 		out_8(&mr->bus_status1, 0);	/* negate RST */
mr                386 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                393 drivers/scsi/mesh.c 	out_8(&mr->interrupt, 0xff);	/* clear all interrupt bits */
mr                394 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_FLUSHFIFO);
mr                395 drivers/scsi/mesh.c 	mesh_flush_io(mr);
mr                397 drivers/scsi/mesh.c 	out_8(&mr->sync_params, ASYNC_PARAMS);
mr                398 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_ENBRESEL);
mr                407 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                446 drivers/scsi/mesh.c 	     MKWORD(mr->interrupt, mr->exception, mr->error, mr->fifo_count));
mr                447 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_CMDDONE);
mr                448 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_ENBRESEL);
mr                449 drivers/scsi/mesh.c 	mesh_flush_io(mr);
mr                452 drivers/scsi/mesh.c 	if (in_8(&mr->bus_status1) & (BS1_BSY | BS1_SEL)) {
mr                458 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception,
mr                459 drivers/scsi/mesh.c 			    mr->error, mr->fifo_count));
mr                461 drivers/scsi/mesh.c 			if ((in_8(&mr->bus_status1) & (BS1_BSY | BS1_SEL)) == 0)
mr                463 drivers/scsi/mesh.c 			if (in_8(&mr->interrupt) != 0) {
mr                465 drivers/scsi/mesh.c 				     MKWORD(mr->interrupt, mr->exception,
mr                466 drivers/scsi/mesh.c 					    mr->error, mr->fifo_count));
mr                473 drivers/scsi/mesh.c 		if (in_8(&mr->bus_status1) & (BS1_BSY | BS1_SEL)) {
mr                486 drivers/scsi/mesh.c 	out_8(&mr->dest_id, mr->source_id);
mr                500 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_DISRESEL);
mr                501 drivers/scsi/mesh.c 	if (in_8(&mr->interrupt) != 0) {
mr                503 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception,
mr                504 drivers/scsi/mesh.c 			    mr->error, mr->fifo_count));
mr                509 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception,
mr                510 drivers/scsi/mesh.c 			    mr->error, mr->fifo_count));
mr                513 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_ARBITRATE);
mr                516 drivers/scsi/mesh.c 		if (in_8(&mr->interrupt) != 0)
mr                521 drivers/scsi/mesh.c 	     MKWORD(mr->interrupt, mr->exception, mr->error, mr->fifo_count));
mr                522 drivers/scsi/mesh.c 	if (in_8(&mr->interrupt) == 0 && (in_8(&mr->bus_status1) & BS1_SEL)
mr                523 drivers/scsi/mesh.c 	    && (in_8(&mr->bus_status0) & BS0_IO)) {
mr                526 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception, mr->error, mr->fifo_count));
mr                527 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_RESETMESH);
mr                528 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                530 drivers/scsi/mesh.c 		out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                531 drivers/scsi/mesh.c 		out_8(&mr->intr_mask, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                532 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_ENBRESEL);
mr                533 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                534 drivers/scsi/mesh.c 		for (t = 10; t > 0 && in_8(&mr->interrupt) == 0; --t)
mr                537 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception, mr->error, mr->fifo_count));
mr                539 drivers/scsi/mesh.c 		if (in_8(&mr->interrupt) == 0 && (in_8(&mr->bus_status1) & BS1_SEL)
mr                540 drivers/scsi/mesh.c 		    && (in_8(&mr->bus_status0) & BS0_IO)) {
mr                641 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                651 drivers/scsi/mesh.c 		out_8(&mr->sync_params, ASYNC_PARAMS);
mr                674 drivers/scsi/mesh.c 	out_8(&mr->sync_params, tp->sync_params);
mr                682 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                688 drivers/scsi/mesh.c 	     MKWORD(ms->n_msgout, mr->exception, mr->fifo_count, mr->sequence));
mr                689 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                696 drivers/scsi/mesh.c 		out_8(&mr->count_hi, 0);
mr                697 drivers/scsi/mesh.c 		out_8(&mr->count_lo, 1);
mr                698 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_MSGIN + seq);
mr                724 drivers/scsi/mesh.c 		out_8(&mr->count_hi, 0);
mr                725 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_FLUSHFIFO);
mr                726 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                732 drivers/scsi/mesh.c 		if ((in_8(&mr->bus_status0) & BS0_ATN) == 0) {
mr                733 drivers/scsi/mesh.c 			dlog(ms, "bus0 was %.2x explicitly asserting ATN", mr->bus_status0);
mr                734 drivers/scsi/mesh.c 			out_8(&mr->bus_status0, BS0_ATN); /* explicit ATN */
mr                735 drivers/scsi/mesh.c 			mesh_flush_io(mr);
mr                737 drivers/scsi/mesh.c 			out_8(&mr->count_lo, 1);
mr                738 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_MSGOUT + seq);
mr                739 drivers/scsi/mesh.c 			out_8(&mr->bus_status0, 0); /* release explicit ATN */
mr                740 drivers/scsi/mesh.c 			dlog(ms,"hace: after explicit ATN bus0=%.2x",mr->bus_status0);
mr                751 drivers/scsi/mesh.c 			out_8(&mr->count_lo, ms->n_msgout - 1);
mr                752 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_MSGOUT + seq);
mr                754 drivers/scsi/mesh.c 				out_8(&mr->fifo, ms->msgout[i]);
mr                765 drivers/scsi/mesh.c 		out_8(&mr->dest_id, ms->conn_tgt);
mr                766 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_SELECT + SEQ_ATN);
mr                769 drivers/scsi/mesh.c 		out_8(&mr->sync_params, tp->sync_params);
mr                770 drivers/scsi/mesh.c 		out_8(&mr->count_hi, 0);
mr                772 drivers/scsi/mesh.c 			out_8(&mr->count_lo, cmd->cmd_len);
mr                773 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_COMMAND + seq);
mr                775 drivers/scsi/mesh.c 				out_8(&mr->fifo, cmd->cmnd[i]);
mr                777 drivers/scsi/mesh.c 			out_8(&mr->count_lo, 6);
mr                778 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_COMMAND + seq);
mr                780 drivers/scsi/mesh.c 				out_8(&mr->fifo, 0);
mr                796 drivers/scsi/mesh.c 		out_8(&mr->count_lo, nb);
mr                797 drivers/scsi/mesh.c 		out_8(&mr->count_hi, nb >> 8);
mr                798 drivers/scsi/mesh.c 		out_8(&mr->sequence, (tp->data_goes_out?
mr                802 drivers/scsi/mesh.c 		out_8(&mr->count_hi, 0);
mr                803 drivers/scsi/mesh.c 		out_8(&mr->count_lo, 1);
mr                804 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_STATUS + seq);
mr                808 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_ENBRESEL);
mr                809 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                812 drivers/scsi/mesh.c 		     MKWORD(mr->interrupt, mr->exception, mr->error,
mr                813 drivers/scsi/mesh.c 			    mr->fifo_count));
mr                814 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_BUSFREE);
mr                826 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                829 drivers/scsi/mesh.c 	n = mr->fifo_count;
mr                834 drivers/scsi/mesh.c 			ms->msgin[i++] = in_8(&mr->fifo);
mr                858 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr                904 drivers/scsi/mesh.c 	while ((in_8(&mr->bus_status1) & BS1_BSY) == 0) {
mr                907 drivers/scsi/mesh.c 		out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                908 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                910 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_ENBRESEL);
mr                911 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr                914 drivers/scsi/mesh.c 		     MKWORD(0, mr->error, mr->exception, mr->fifo_count));
mr                916 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr                917 drivers/scsi/mesh.c        	mesh_flush_io(mr);
mr                919 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_ENBRESEL);
mr                920 drivers/scsi/mesh.c        	mesh_flush_io(mr);
mr                922 drivers/scsi/mesh.c 	out_8(&mr->sync_params, ASYNC_PARAMS);
mr                927 drivers/scsi/mesh.c 	if (in_8(&mr->fifo_count) == 0) {
mr                934 drivers/scsi/mesh.c 		b = in_8(&mr->fifo);
mr                936 drivers/scsi/mesh.c 	} while (in_8(&mr->fifo_count));
mr                952 drivers/scsi/mesh.c 	out_8(&mr->sync_params, tp->sync_params);
mr                965 drivers/scsi/mesh.c 	dlog(ms, "resel err/exc=%.4x", MKWORD(0, 0, mr->error, mr->exception));
mr                991 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1011 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr               1012 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_FLUSHFIFO);
mr               1013 drivers/scsi/mesh.c        	mesh_flush_io(mr);
mr               1015 drivers/scsi/mesh.c 	out_8(&mr->sync_params, ASYNC_PARAMS);
mr               1016 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_ENBRESEL);
mr               1034 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1036 drivers/scsi/mesh.c 	err = in_8(&mr->error);
mr               1037 drivers/scsi/mesh.c 	exc = in_8(&mr->exception);
mr               1038 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr               1040 drivers/scsi/mesh.c 	     MKWORD(err, exc, mr->fifo_count, mr->count_lo));
mr               1045 drivers/scsi/mesh.c 		while ((in_8(&mr->bus_status1) & BS1_RST) != 0)
mr               1064 drivers/scsi/mesh.c 		out_8(&mr->interrupt, INT_CMDDONE);
mr               1084 drivers/scsi/mesh.c 		count = (mr->count_hi << 8) + mr->count_lo;
mr               1089 drivers/scsi/mesh.c 			out_8(&mr->sequence, mr->sequence);
mr               1115 drivers/scsi/mesh.c 	if (ms->phase > selecting && (in_8(&mr->bus_status1) & BS1_BSY)) {
mr               1128 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1130 drivers/scsi/mesh.c 	exc = in_8(&mr->exception);
mr               1131 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_EXCEPTION | INT_CMDDONE);
mr               1322 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1329 drivers/scsi/mesh.c 		while (t > 0 && in_8(&mr->fifo_count) != 0
mr               1336 drivers/scsi/mesh.c 	nb = (mr->count_hi << 8) + mr->count_lo;
mr               1338 drivers/scsi/mesh.c 	     MKWORD(0, mr->fifo_count, 0, nb));
mr               1340 drivers/scsi/mesh.c 		nb += mr->fifo_count;
mr               1366 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1370 drivers/scsi/mesh.c 	     MKWORD(mr->count_hi, mr->count_lo, mr->sequence, mr->fifo_count));
mr               1371 drivers/scsi/mesh.c 	phase = in_8(&mr->bus_status0) & BS0_PHASE;
mr               1374 drivers/scsi/mesh.c 		out_8(&mr->count_lo, 1);
mr               1375 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_MSGOUT + use_active_neg);
mr               1376 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr               1378 drivers/scsi/mesh.c 		out_8(&mr->fifo, ms->msgout[ms->n_msgout-1]);
mr               1391 drivers/scsi/mesh.c 	if (mr->fifo_count) {
mr               1392 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_FLUSHFIFO);
mr               1393 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr               1445 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1450 drivers/scsi/mesh.c 	dlog(ms, "cmd_complete fc=%x", mr->fifo_count);
mr               1464 drivers/scsi/mesh.c 			out_8(&mr->count_lo, n - ms->n_msgin);
mr               1465 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_MSGIN + seq);
mr               1474 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_FLUSHFIFO);
mr               1475 drivers/scsi/mesh.c 		mesh_flush_io(mr);
mr               1477 drivers/scsi/mesh.c 		out_8(&mr->count_lo, 1);
mr               1478 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_MSGIN + SEQ_ATN + use_active_neg);
mr               1493 drivers/scsi/mesh.c 		out_8(&mr->count_lo, 1);
mr               1494 drivers/scsi/mesh.c 		out_8(&mr->sequence, SEQ_MSGOUT + use_active_neg + SEQ_ATN);
mr               1496 drivers/scsi/mesh.c 		while ((in_8(&mr->bus_status0) & BS0_REQ) == 0 && --t >= 0)
mr               1499 drivers/scsi/mesh.c 		     MKWORD(mr->error, mr->exception,
mr               1500 drivers/scsi/mesh.c 			    mr->fifo_count, mr->count_lo));
mr               1501 drivers/scsi/mesh.c 		if (in_8(&mr->interrupt) & (INT_ERROR | INT_EXCEPTION)) {
mr               1505 drivers/scsi/mesh.c 			if (in_8(&mr->interrupt) & INT_ERROR) {
mr               1507 drivers/scsi/mesh.c 				       in_8(&mr->error));
mr               1511 drivers/scsi/mesh.c 			if (in_8(&mr->exception) != EXC_PHASEMM)
mr               1513 drivers/scsi/mesh.c 				       in_8(&mr->exception));
mr               1516 drivers/scsi/mesh.c 				       in_8(&mr->bus_status0));
mr               1520 drivers/scsi/mesh.c 		if (in_8(&mr->bus_status0) & BS0_REQ) {
mr               1521 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_MSGOUT + use_active_neg);
mr               1522 drivers/scsi/mesh.c 			mesh_flush_io(mr);
mr               1524 drivers/scsi/mesh.c 			out_8(&mr->fifo, ms->msgout[ms->n_msgout-1]);
mr               1527 drivers/scsi/mesh.c 			out_8(&mr->sequence, SEQ_MSGIN + use_active_neg + SEQ_ATN);
mr               1569 drivers/scsi/mesh.c 			while ((in_8(&mr->bus_status0) & BS0_REQ) == 0) {
mr               1593 drivers/scsi/mesh.c 			out_8(&mr->sequence, 0);
mr               1594 drivers/scsi/mesh.c 			out_8(&mr->interrupt,
mr               1600 drivers/scsi/mesh.c 				cmd->SCp.Status = mr->fifo;
mr               1659 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1665 drivers/scsi/mesh.c 		       "phase=%d msgphase=%d\n", mr->bus_status0,
mr               1666 drivers/scsi/mesh.c 		       mr->interrupt, mr->exception, mr->error,
mr               1669 drivers/scsi/mesh.c 	while ((intr = in_8(&mr->interrupt)) != 0) {
mr               1671 drivers/scsi/mesh.c 		     MKWORD(intr, mr->error, mr->exception, mr->sequence));
mr               1677 drivers/scsi/mesh.c 			out_8(&mr->interrupt, INT_CMDDONE);
mr               1707 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr = ms->mesh;
mr               1717 drivers/scsi/mesh.c 	out_8(&mr->exception, 0xff);	/* clear all exception bits */
mr               1718 drivers/scsi/mesh.c 	out_8(&mr->error, 0xff);	/* clear all error bits */
mr               1719 drivers/scsi/mesh.c 	out_8(&mr->sequence, SEQ_RESETMESH);
mr               1720 drivers/scsi/mesh.c        	mesh_flush_io(mr);
mr               1722 drivers/scsi/mesh.c 	out_8(&mr->intr_mask, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr               1723 drivers/scsi/mesh.c 	out_8(&mr->source_id, ms->host->this_id);
mr               1724 drivers/scsi/mesh.c 	out_8(&mr->sel_timeout, 25);	/* 250ms */
mr               1725 drivers/scsi/mesh.c 	out_8(&mr->sync_params, ASYNC_PARAMS);
mr               1728 drivers/scsi/mesh.c 	out_8(&mr->bus_status1, BS1_RST);	/* assert RST */
mr               1729 drivers/scsi/mesh.c        	mesh_flush_io(mr);
mr               1731 drivers/scsi/mesh.c 	out_8(&mr->bus_status1, 0);	/* negate RST */
mr               1815 drivers/scsi/mesh.c 	volatile struct mesh_regs __iomem *mr;
mr               1820 drivers/scsi/mesh.c        	mr = ms->mesh;
mr               1821 drivers/scsi/mesh.c 	out_8(&mr->intr_mask, 0);
mr               1822 drivers/scsi/mesh.c 	out_8(&mr->interrupt, INT_ERROR | INT_EXCEPTION | INT_CMDDONE);
mr               1823 drivers/scsi/mesh.c 	out_8(&mr->bus_status1, BS1_RST);
mr               1824 drivers/scsi/mesh.c 	mesh_flush_io(mr);
mr               1826 drivers/scsi/mesh.c 	out_8(&mr->bus_status1, 0);
mr               1117 drivers/scsi/qla1280.c 	uint8_t mr;
mr               1124 drivers/scsi/qla1280.c 	mr = BIT_3 | BIT_2 | BIT_1 | BIT_0;
mr               1143 drivers/scsi/qla1280.c 		mr |= BIT_6;
mr               1149 drivers/scsi/qla1280.c 	status = qla1280_mailbox_command(ha, mr, mb);
mr               2415 drivers/scsi/qla1280.c qla1280_mailbox_command(struct scsi_qla_host *ha, uint8_t mr, uint16_t *mb)
mr               2440 drivers/scsi/qla1280.c 		if (mr & BIT_0) {
mr               2444 drivers/scsi/qla1280.c 		mr >>= 1;
mr               2484 drivers/scsi/qla1280.c 	mr = MAILBOX_REGISTER_COUNT;
mr               1066 drivers/scsi/qla2xxx/qla_attr.c 		    vha->hw->mr.serial_num);
mr               1095 drivers/scsi/qla2xxx/qla_attr.c 		    vha->hw->mr.hw_version);
mr               4269 drivers/scsi/qla2xxx/qla_def.h 	struct mr_data_fx00 mr;
mr                854 drivers/scsi/qla2xxx/qla_gs.c 		    ha->mr.fw_version, qla2x00_version_str);
mr               1863 drivers/scsi/qla2xxx/qla_init.c 		return qlafx00_fx_disc(vha, &vha->hw->mr.fcport,
mr                705 drivers/scsi/qla2xxx/qla_mr.c 	snprintf(str, size, "%s", ha->mr.fw_version);
mr               1255 drivers/scsi/qla2xxx/qla_mr.c 	rval = qlafx00_fx_disc(vha, &vha->hw->mr.fcport,
mr               1361 drivers/scsi/qla2xxx/qla_mr.c 	ha->mr.fw_hbt_en = 0;
mr               1474 drivers/scsi/qla2xxx/qla_mr.c 		    &vha->hw->mr.fcport, FXDISC_REG_HOST_INFO))
mr               1492 drivers/scsi/qla2xxx/qla_mr.c 	if (ha->mr.fw_hbt_cnt)
mr               1493 drivers/scsi/qla2xxx/qla_mr.c 		ha->mr.fw_hbt_cnt--;
mr               1498 drivers/scsi/qla2xxx/qla_mr.c 		    (ha->mr.fw_hbt_en)) {
mr               1500 drivers/scsi/qla2xxx/qla_mr.c 			if (fw_heart_beat != ha->mr.old_fw_hbt_cnt) {
mr               1501 drivers/scsi/qla2xxx/qla_mr.c 				ha->mr.old_fw_hbt_cnt = fw_heart_beat;
mr               1502 drivers/scsi/qla2xxx/qla_mr.c 				ha->mr.fw_hbt_miss_cnt = 0;
mr               1504 drivers/scsi/qla2xxx/qla_mr.c 				ha->mr.fw_hbt_miss_cnt++;
mr               1505 drivers/scsi/qla2xxx/qla_mr.c 				if (ha->mr.fw_hbt_miss_cnt ==
mr               1510 drivers/scsi/qla2xxx/qla_mr.c 					ha->mr.fw_hbt_miss_cnt = 0;
mr               1514 drivers/scsi/qla2xxx/qla_mr.c 		ha->mr.fw_hbt_cnt = QLAFX00_HEARTBEAT_INTERVAL;
mr               1520 drivers/scsi/qla2xxx/qla_mr.c 		if (ha->mr.fw_reset_timer_exp) {
mr               1523 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_exp = 0;
mr               1529 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL;
mr               1531 drivers/scsi/qla2xxx/qla_mr.c 		    (!ha->mr.fw_hbt_en)) {
mr               1532 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_hbt_en = 1;
mr               1533 drivers/scsi/qla2xxx/qla_mr.c 		} else if (!ha->mr.fw_reset_timer_tick) {
mr               1534 drivers/scsi/qla2xxx/qla_mr.c 			if (aenmbx0 == ha->mr.old_aenmbx0_state)
mr               1535 drivers/scsi/qla2xxx/qla_mr.c 				ha->mr.fw_reset_timer_exp = 1;
mr               1536 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL;
mr               1552 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_tick =
mr               1555 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_tick =
mr               1558 drivers/scsi/qla2xxx/qla_mr.c 		if (ha->mr.old_aenmbx0_state != aenmbx0) {
mr               1559 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.old_aenmbx0_state = aenmbx0;
mr               1560 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL;
mr               1562 drivers/scsi/qla2xxx/qla_mr.c 		ha->mr.fw_reset_timer_tick--;
mr               1569 drivers/scsi/qla2xxx/qla_mr.c 		if (ha->mr.fw_critemp_timer_tick == 0) {
mr               1575 drivers/scsi/qla2xxx/qla_mr.c 			if (tempc < ha->mr.critical_temperature) {
mr               1581 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_critemp_timer_tick =
mr               1584 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.fw_critemp_timer_tick--;
mr               1587 drivers/scsi/qla2xxx/qla_mr.c 	if (ha->mr.host_info_resend) {
mr               1592 drivers/scsi/qla2xxx/qla_mr.c 		if (ha->mr.hinfo_resend_timer_tick == 0) {
mr               1593 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.host_info_resend = false;
mr               1595 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.hinfo_resend_timer_tick =
mr               1599 drivers/scsi/qla2xxx/qla_mr.c 			ha->mr.hinfo_resend_timer_tick--;
mr               1885 drivers/scsi/qla2xxx/qla_mr.c 				ha->mr.host_info_resend = true;
mr               1944 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.symbolic_name, pinfo->symbolic_name,
mr               1945 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.symbolic_name));
mr               1946 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.serial_num, pinfo->serial_num,
mr               1947 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.serial_num));
mr               1948 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.hw_version, pinfo->hw_version,
mr               1949 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.hw_version));
mr               1950 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.fw_version, pinfo->fw_version,
mr               1951 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.fw_version));
mr               1952 drivers/scsi/qla2xxx/qla_mr.c 		strim(vha->hw->mr.fw_version);
mr               1953 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.uboot_version, pinfo->uboot_version,
mr               1954 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.uboot_version));
mr               1955 drivers/scsi/qla2xxx/qla_mr.c 		memcpy(&vha->hw->mr.fru_serial_num, pinfo->fru_serial_num,
mr               1956 drivers/scsi/qla2xxx/qla_mr.c 		    sizeof(vha->hw->mr.fru_serial_num));
mr               1957 drivers/scsi/qla2xxx/qla_mr.c 		vha->hw->mr.critical_temperature =
mr               1960 drivers/scsi/qla2xxx/qla_mr.c 		ha->mr.extended_io_enabled = (pinfo->enabled_capabilities &
mr               2996 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.fw_hbt_cnt = QLAFX00_HEARTBEAT_INTERVAL;
mr               2997 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.fw_reset_timer_tick = QLAFX00_RESET_INTERVAL;
mr               2998 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.fw_critemp_timer_tick = QLAFX00_CRITEMP_INTERVAL;
mr               2999 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.fw_hbt_en = 1;
mr               3000 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.host_info_resend = false;
mr               3001 drivers/scsi/qla2xxx/qla_os.c 		ha->mr.hinfo_resend_timer_tick = QLAFX00_HINFO_RESEND_INTERVAL;
mr               3107 drivers/scsi/qla2xxx/qla_os.c 	ha->mr.fcport.vha = base_vha;
mr               3108 drivers/scsi/qla2xxx/qla_os.c 	ha->mr.fcport.port_type = FCT_UNKNOWN;
mr               3109 drivers/scsi/qla2xxx/qla_os.c 	ha->mr.fcport.loop_id = FC_NO_LOOP_ID;
mr               3110 drivers/scsi/qla2xxx/qla_os.c 	qla2x00_set_fcport_state(&ha->mr.fcport, FCS_UNCONFIGURED);
mr               3111 drivers/scsi/qla2xxx/qla_os.c 	ha->mr.fcport.supported_classes = FC_COS_UNSPECIFIED;
mr               3112 drivers/scsi/qla2xxx/qla_os.c 	ha->mr.fcport.scan_state = 1;
mr               3387 drivers/scsi/qla2xxx/qla_os.c 			&base_vha->hw->mr.fcport, FXDISC_GET_CONFIG_INFO);
mr               3388 drivers/scsi/qla2xxx/qla_os.c 		host->sg_tablesize = (ha->mr.extended_io_enabled) ?
mr               3414 drivers/scsi/qla2xxx/qla_os.c 			&base_vha->hw->mr.fcport, FXDISC_GET_PORT_INFO);
mr               3418 drivers/scsi/qla2xxx/qla_os.c 			&base_vha->hw->mr.fcport, FXDISC_REG_HOST_INFO);
mr               6259 drivers/scsi/qla2xxx/qla_os.c 				    &base_vha->hw->mr.fcport,
mr                 44 drivers/sh/intc/balancing.c 	struct intc_mask_reg *mr = desc->hw.mask_regs;
mr                 48 drivers/sh/intc/balancing.c 	for (i = 0; mr && enum_id && i < desc->hw.nr_mask_regs; i++) {
mr                 49 drivers/sh/intc/balancing.c 		mr = desc->hw.mask_regs + i;
mr                 55 drivers/sh/intc/balancing.c 		if (!mr->dist_reg)
mr                 58 drivers/sh/intc/balancing.c 		for (j = 0; j < ARRAY_SIZE(mr->enum_ids); j++) {
mr                 59 drivers/sh/intc/balancing.c 			if (mr->enum_ids[j] != enum_id)
mr                 64 drivers/sh/intc/balancing.c 			reg_e = mr->dist_reg;
mr                 65 drivers/sh/intc/balancing.c 			reg_d = mr->dist_reg;
mr                 67 drivers/sh/intc/balancing.c 			fn += (mr->reg_width >> 3) - 1;
mr                 72 drivers/sh/intc/balancing.c 					(mr->reg_width - 1) - j);
mr                 44 drivers/sh/intc/handle.c 	struct intc_mask_reg *mr = desc->hw.mask_regs;
mr                 48 drivers/sh/intc/handle.c 	while (mr && enum_id && *reg_idx < desc->hw.nr_mask_regs) {
mr                 49 drivers/sh/intc/handle.c 		mr = desc->hw.mask_regs + *reg_idx;
mr                 51 drivers/sh/intc/handle.c 		for (; *fld_idx < ARRAY_SIZE(mr->enum_ids); (*fld_idx)++) {
mr                 52 drivers/sh/intc/handle.c 			if (mr->enum_ids[*fld_idx] != enum_id)
mr                 55 drivers/sh/intc/handle.c 			if (mr->set_reg && mr->clr_reg) {
mr                 58 drivers/sh/intc/handle.c 				reg_e = mr->clr_reg;
mr                 59 drivers/sh/intc/handle.c 				reg_d = mr->set_reg;
mr                 62 drivers/sh/intc/handle.c 				if (mr->set_reg) {
mr                 64 drivers/sh/intc/handle.c 					reg_e = mr->set_reg;
mr                 65 drivers/sh/intc/handle.c 					reg_d = mr->set_reg;
mr                 68 drivers/sh/intc/handle.c 					reg_e = mr->clr_reg;
mr                 69 drivers/sh/intc/handle.c 					reg_d = mr->clr_reg;
mr                 73 drivers/sh/intc/handle.c 			fn += (mr->reg_width >> 3) - 1;
mr                 78 drivers/sh/intc/handle.c 					(mr->reg_width - 1) - *fld_idx);
mr                178 drivers/sh/intc/handle.c 	struct intc_mask_reg *mr = desc->hw.ack_regs;
mr                182 drivers/sh/intc/handle.c 	for (i = 0; mr && enum_id && i < desc->hw.nr_ack_regs; i++) {
mr                183 drivers/sh/intc/handle.c 		mr = desc->hw.ack_regs + i;
mr                185 drivers/sh/intc/handle.c 		for (j = 0; j < ARRAY_SIZE(mr->enum_ids); j++) {
mr                186 drivers/sh/intc/handle.c 			if (mr->enum_ids[j] != enum_id)
mr                191 drivers/sh/intc/handle.c 			reg_e = mr->set_reg;
mr                192 drivers/sh/intc/handle.c 			reg_d = mr->set_reg;
mr                194 drivers/sh/intc/handle.c 			fn += (mr->reg_width >> 3) - 1;
mr                199 drivers/sh/intc/handle.c 					(mr->reg_width - 1) - j);
mr                171 drivers/soc/fsl/dpio/qbman-portal.c 		p->mr.valid_bit = QB_VALID_BIT;
mr                359 drivers/soc/fsl/dpio/qbman-portal.c 		if (p->mr.valid_bit != (ret[0] & QB_VALID_BIT))
mr                365 drivers/soc/fsl/dpio/qbman-portal.c 		p->mr.valid_bit ^= QB_VALID_BIT;
mr                116 drivers/soc/fsl/dpio/qbman-portal.h 	} mr;
mr                358 drivers/soc/fsl/qbman/qman.c 	struct qm_mr mr;
mr                770 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                773 drivers/soc/fsl/qbman/qman.c 	mr->ring = portal->addr.ce + QM_CL_MR;
mr                774 drivers/soc/fsl/qbman/qman.c 	mr->pi = qm_in(portal, QM_REG_MR_PI_CINH) & (QM_MR_SIZE - 1);
mr                775 drivers/soc/fsl/qbman/qman.c 	mr->ci = qm_in(portal, QM_REG_MR_CI_CINH) & (QM_MR_SIZE - 1);
mr                776 drivers/soc/fsl/qbman/qman.c 	mr->cursor = mr->ring + mr->ci;
mr                777 drivers/soc/fsl/qbman/qman.c 	mr->fill = dpaa_cyc_diff(QM_MR_SIZE, mr->ci, mr->pi);
mr                778 drivers/soc/fsl/qbman/qman.c 	mr->vbit = (qm_in(portal, QM_REG_MR_PI_CINH) & QM_MR_SIZE)
mr                780 drivers/soc/fsl/qbman/qman.c 	mr->ithresh = qm_in(portal, QM_REG_MR_ITR);
mr                782 drivers/soc/fsl/qbman/qman.c 	mr->pmode = pmode;
mr                783 drivers/soc/fsl/qbman/qman.c 	mr->cmode = cmode;
mr                793 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                795 drivers/soc/fsl/qbman/qman.c 	if (mr->ci != mr_ptr2idx(mr->cursor))
mr                801 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                803 drivers/soc/fsl/qbman/qman.c 	if (!mr->fill)
mr                805 drivers/soc/fsl/qbman/qman.c 	return mr->cursor;
mr                810 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                812 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mr->fill);
mr                813 drivers/soc/fsl/qbman/qman.c 	mr->cursor = mr_inc(mr->cursor);
mr                814 drivers/soc/fsl/qbman/qman.c 	return --mr->fill;
mr                819 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                820 drivers/soc/fsl/qbman/qman.c 	union qm_mr_entry *res = qm_cl(mr->ring, mr->pi);
mr                822 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mr->pmode == qm_mr_pvb);
mr                824 drivers/soc/fsl/qbman/qman.c 	if ((res->verb & QM_MR_VERB_VBIT) == mr->vbit) {
mr                825 drivers/soc/fsl/qbman/qman.c 		mr->pi = (mr->pi + 1) & (QM_MR_SIZE - 1);
mr                826 drivers/soc/fsl/qbman/qman.c 		if (!mr->pi)
mr                827 drivers/soc/fsl/qbman/qman.c 			mr->vbit ^= QM_MR_VERB_VBIT;
mr                828 drivers/soc/fsl/qbman/qman.c 		mr->fill++;
mr                836 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                838 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mr->cmode == qm_mr_cci);
mr                839 drivers/soc/fsl/qbman/qman.c 	mr->ci = (mr->ci + num) & (QM_MR_SIZE - 1);
mr                840 drivers/soc/fsl/qbman/qman.c 	qm_out(portal, QM_REG_MR_CI_CINH, mr->ci);
mr                845 drivers/soc/fsl/qbman/qman.c 	struct qm_mr *mr = &portal->mr;
mr                847 drivers/soc/fsl/qbman/qman.c 	DPAA_ASSERT(mr->cmode == qm_mr_cci);
mr                848 drivers/soc/fsl/qbman/qman.c 	mr->ci = mr_ptr2idx(mr->cursor);
mr                849 drivers/soc/fsl/qbman/qman.c 	qm_out(portal, QM_REG_MR_CI_CINH, mr->ci);
mr                154 drivers/spi/atmel-quadspi.c 	u32			mr;
mr                295 drivers/spi/atmel-quadspi.c 	if (aq->mr != QSPI_MR_SMM) {
mr                297 drivers/spi/atmel-quadspi.c 		aq->mr = QSPI_MR_SMM;
mr                425 drivers/spi/atmel-quadspi.c 	aq->mr = QSPI_MR_SMM;
mr                364 drivers/spi/spi-at91-usart.c 	unsigned int mr = at91_usart_spi_readl(aus, MR);
mr                367 drivers/spi/spi-at91-usart.c 		mr |= US_MR_CPOL;
mr                369 drivers/spi/spi-at91-usart.c 		mr &= ~US_MR_CPOL;
mr                372 drivers/spi/spi-at91-usart.c 		mr |= US_MR_CPHA;
mr                374 drivers/spi/spi-at91-usart.c 		mr &= ~US_MR_CPHA;
mr                377 drivers/spi/spi-at91-usart.c 		mr |= US_MR_LOOP;
mr                379 drivers/spi/spi-at91-usart.c 		mr &= ~US_MR_LOOP;
mr                389 drivers/spi/spi-at91-usart.c 	*ausd = mr;
mr                393 drivers/spi/spi-at91-usart.c 		spi->bits_per_word, spi->mode, spi->chip_select, mr);
mr                354 drivers/spi/spi-atmel.c 	u32 mr;
mr                375 drivers/spi/spi-atmel.c 		mr = spi_readl(as, MR);
mr                391 drivers/spi/spi-atmel.c 		mr = spi_readl(as, MR);
mr                392 drivers/spi/spi-atmel.c 		mr = SPI_BFINS(PCS, ~(1 << spi->chip_select), mr);
mr                395 drivers/spi/spi-atmel.c 		spi_writel(as, MR, mr);
mr                398 drivers/spi/spi-atmel.c 	dev_dbg(&spi->dev, "activate NPCS, mr %08x\n", mr);
mr                404 drivers/spi/spi-atmel.c 	u32 mr;
mr                409 drivers/spi/spi-atmel.c 	mr = spi_readl(as, MR);
mr                410 drivers/spi/spi-atmel.c 	if (~SPI_BFEXT(PCS, mr) & (1 << spi->chip_select)) {
mr                411 drivers/spi/spi-atmel.c 		mr = SPI_BFINS(PCS, 0xf, mr);
mr                412 drivers/spi/spi-atmel.c 		spi_writel(as, MR, mr);
mr                415 drivers/spi/spi-atmel.c 	dev_dbg(&spi->dev, "DEactivate NPCS, mr %08x\n", mr);
mr                178 drivers/tty/serial/atmel_serial.c 		u32		mr;
mr               2601 drivers/tty/serial/atmel_serial.c 	unsigned int mr, quot;
mr               2611 drivers/tty/serial/atmel_serial.c 	mr = atmel_uart_readl(port, ATMEL_US_MR) & ATMEL_US_CHRL;
mr               2612 drivers/tty/serial/atmel_serial.c 	if (mr == ATMEL_US_CHRL_8)
mr               2617 drivers/tty/serial/atmel_serial.c 	mr = atmel_uart_readl(port, ATMEL_US_MR) & ATMEL_US_PAR;
mr               2618 drivers/tty/serial/atmel_serial.c 	if (mr == ATMEL_US_PAR_EVEN)
mr               2620 drivers/tty/serial/atmel_serial.c 	else if (mr == ATMEL_US_PAR_ODD)
mr               2729 drivers/tty/serial/atmel_serial.c 		atmel_port->cache.mr = atmel_uart_readl(port, ATMEL_US_MR);
mr               2762 drivers/tty/serial/atmel_serial.c 		atmel_uart_writel(port, ATMEL_US_MR, atmel_port->cache.mr);
mr                983 drivers/tty/serial/msm_serial.c 	unsigned int mr;
mr                992 drivers/tty/serial/msm_serial.c 	mr = msm_read(port, UART_MR1);
mr                993 drivers/tty/serial/msm_serial.c 	mr &= ~UART_MR1_RX_RDY_CTL;
mr                994 drivers/tty/serial/msm_serial.c 	msm_write(port, mr, UART_MR1);
mr               1003 drivers/tty/serial/msm_serial.c 	unsigned int mr;
mr               1005 drivers/tty/serial/msm_serial.c 	mr = msm_read(port, UART_MR1);
mr               1008 drivers/tty/serial/msm_serial.c 		mr &= ~UART_MR1_RX_RDY_CTL;
mr               1009 drivers/tty/serial/msm_serial.c 		msm_write(port, mr, UART_MR1);
mr               1012 drivers/tty/serial/msm_serial.c 		mr |= UART_MR1_RX_RDY_CTL;
mr               1013 drivers/tty/serial/msm_serial.c 		msm_write(port, mr, UART_MR1);
mr               1245 drivers/tty/serial/msm_serial.c 	unsigned int baud, mr;
mr               1259 drivers/tty/serial/msm_serial.c 	mr = msm_read(port, UART_MR2);
mr               1260 drivers/tty/serial/msm_serial.c 	mr &= ~UART_MR2_PARITY_MODE;
mr               1263 drivers/tty/serial/msm_serial.c 			mr |= UART_MR2_PARITY_MODE_ODD;
mr               1265 drivers/tty/serial/msm_serial.c 			mr |= UART_MR2_PARITY_MODE_SPACE;
mr               1267 drivers/tty/serial/msm_serial.c 			mr |= UART_MR2_PARITY_MODE_EVEN;
mr               1271 drivers/tty/serial/msm_serial.c 	mr &= ~UART_MR2_BITS_PER_CHAR;
mr               1274 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_BITS_PER_CHAR_5;
mr               1277 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_BITS_PER_CHAR_6;
mr               1280 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_BITS_PER_CHAR_7;
mr               1284 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_BITS_PER_CHAR_8;
mr               1289 drivers/tty/serial/msm_serial.c 	mr &= ~(UART_MR2_STOP_BIT_LEN_ONE | UART_MR2_STOP_BIT_LEN_TWO);
mr               1291 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_STOP_BIT_LEN_TWO;
mr               1293 drivers/tty/serial/msm_serial.c 		mr |= UART_MR2_STOP_BIT_LEN_ONE;
mr               1296 drivers/tty/serial/msm_serial.c 	msm_write(port, mr, UART_MR2);
mr               1299 drivers/tty/serial/msm_serial.c 	mr = msm_read(port, UART_MR1);
mr               1300 drivers/tty/serial/msm_serial.c 	mr &= ~(UART_MR1_CTS_CTL | UART_MR1_RX_RDY_CTL);
mr               1302 drivers/tty/serial/msm_serial.c 		mr |= UART_MR1_CTS_CTL;
mr               1303 drivers/tty/serial/msm_serial.c 		mr |= UART_MR1_RX_RDY_CTL;
mr               1305 drivers/tty/serial/msm_serial.c 	msm_write(port, mr, UART_MR1);
mr               1162 drivers/tty/serial/xilinx_uartps.c 		u32 mr;
mr               1167 drivers/tty/serial/xilinx_uartps.c 		mr = CDNS_UART_MR_PARITY_NONE;
mr               1169 drivers/tty/serial/xilinx_uartps.c 			mr |= CDNS_UART_MR_CLKSEL;
mr               1171 drivers/tty/serial/xilinx_uartps.c 		writel(mr,   port->membase + CDNS_UART_MR);
mr                476 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 		struct omapfb_memory_read *mr)
mr                485 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	if (!access_ok(mr->buffer, mr->buffer_size))
mr                488 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	if (mr->w > 4096 || mr->h > 4096)
mr                491 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	if (mr->w * mr->h * 3 > mr->buffer_size)
mr                494 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	buf = vmalloc(mr->buffer_size);
mr                500 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 	r = display->driver->memory_read(display, buf, mr->buffer_size,
mr                501 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 			mr->x, mr->y, mr->w, mr->h);
mr                504 drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c 		if (copy_to_user(mr->buffer, buf, r))
mr                 86 drivers/watchdog/at91sam9_wdt.c 	u32 mr;
mr                164 drivers/watchdog/at91sam9_wdt.c 	if ((tmp & mask) != (wdt->mr & mask)) {
mr                166 drivers/watchdog/at91sam9_wdt.c 			wdt_write(wdt, AT91_WDT_MR, wdt->mr);
mr                172 drivers/watchdog/at91sam9_wdt.c 		if (wdt->mr & AT91_WDT_WDDIS)
mr                217 drivers/watchdog/at91sam9_wdt.c 	if ((tmp & wdt->mr_mask) != (wdt->mr & wdt->mr_mask))
mr                220 drivers/watchdog/at91sam9_wdt.c 			 tmp & wdt->mr_mask, wdt->mr & wdt->mr_mask);
mr                293 drivers/watchdog/at91sam9_wdt.c 	wdt->mr = 0;
mr                296 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDFIEN;
mr                299 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDRSTEN;
mr                304 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDRPROC;
mr                307 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDDIS;
mr                312 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDIDLEHLT;
mr                315 drivers/watchdog/at91sam9_wdt.c 		wdt->mr |= AT91_WDT_WDDBGHLT;
mr                317 drivers/watchdog/at91sam9_wdt.c 	wdt->mr |= max | ((max - min) << 16);
mr                337 drivers/watchdog/at91sam9_wdt.c 	wdt->mr = (WDT_HW_TIMEOUT * 256) | AT91_WDT_WDRSTEN | AT91_WDT_WDD |
mr                 31 drivers/watchdog/sama5d4_wdt.c 	u32			mr;
mr                 48 drivers/watchdog/sama5d4_wdt.c #define wdt_enabled (!(wdt->mr & AT91_WDT_WDDIS))
mr                 81 drivers/watchdog/sama5d4_wdt.c 	wdt->mr &= ~AT91_WDT_WDDIS;
mr                 82 drivers/watchdog/sama5d4_wdt.c 	wdt_write(wdt, AT91_WDT_MR, wdt->mr);
mr                 91 drivers/watchdog/sama5d4_wdt.c 	wdt->mr |= AT91_WDT_WDDIS;
mr                 92 drivers/watchdog/sama5d4_wdt.c 	wdt_write(wdt, AT91_WDT_MR, wdt->mr);
mr                112 drivers/watchdog/sama5d4_wdt.c 	wdt->mr &= ~AT91_WDT_WDV;
mr                113 drivers/watchdog/sama5d4_wdt.c 	wdt->mr |= AT91_WDT_SET_WDV(value);
mr                123 drivers/watchdog/sama5d4_wdt.c 		wdt_write(wdt, AT91_WDT_MR, wdt->mr & ~AT91_WDT_WDDIS);
mr                160 drivers/watchdog/sama5d4_wdt.c 	wdt->mr = AT91_WDT_WDDIS;
mr                164 drivers/watchdog/sama5d4_wdt.c 		wdt->mr |= AT91_WDT_WDFIEN;
mr                166 drivers/watchdog/sama5d4_wdt.c 		wdt->mr |= AT91_WDT_WDRSTEN;
mr                169 drivers/watchdog/sama5d4_wdt.c 		wdt->mr |= AT91_WDT_WDIDLEHLT;
mr                172 drivers/watchdog/sama5d4_wdt.c 		wdt->mr |= AT91_WDT_WDDBGHLT;
mr                187 drivers/watchdog/sama5d4_wdt.c 		wdt_write_nosleep(wdt, AT91_WDT_MR, wdt->mr);
mr                235 drivers/watchdog/sama5d4_wdt.c 	if ((wdt->mr & AT91_WDT_WDFIEN) && irq) {
mr                249 drivers/watchdog/sama5d4_wdt.c 	wdt->mr |= AT91_WDT_SET_WDD(WDT_SEC2TICKS(MAX_WDT_TIMEOUT));
mr                250 drivers/watchdog/sama5d4_wdt.c 	wdt->mr |= AT91_WDT_SET_WDV(timeout);
mr               1328 fs/cifs/cifsglob.h 	struct smbd_mr			*mr;
mr               1354 fs/cifs/cifsglob.h 	struct smbd_mr			*mr;
mr               1664 fs/cifs/cifssmb.c 	use_rdma_mr = rdata->mr;
mr               2050 fs/cifs/cifssmb.c 	if (wdata->mr) {
mr               2051 fs/cifs/cifssmb.c 		smbd_deregister_mr(wdata->mr);
mr               2052 fs/cifs/cifssmb.c 		wdata->mr = NULL;
mr               3306 fs/cifs/file.c 	if (rdata->mr) {
mr               3307 fs/cifs/file.c 		smbd_deregister_mr(rdata->mr);
mr               3308 fs/cifs/file.c 		rdata->mr = NULL;
mr               3450 fs/cifs/file.c 		else if (rdata->mr)
mr               4194 fs/cifs/file.c 		else if (rdata->mr)
mr               4004 fs/cifs/smb2ops.c 	use_rdma_mr = rdata->mr;
mr               3576 fs/cifs/smb2pdu.c 		rdata->mr = smbd_register_mr(
mr               3580 fs/cifs/smb2pdu.c 		if (!rdata->mr)
mr               3591 fs/cifs/smb2pdu.c 		v1->offset = cpu_to_le64(rdata->mr->mr->iova);
mr               3592 fs/cifs/smb2pdu.c 		v1->token = cpu_to_le32(rdata->mr->mr->rkey);
mr               3593 fs/cifs/smb2pdu.c 		v1->length = cpu_to_le32(rdata->mr->mr->length);
mr               3687 fs/cifs/smb2pdu.c 	if (rdata->mr) {
mr               3688 fs/cifs/smb2pdu.c 		smbd_deregister_mr(rdata->mr);
mr               3689 fs/cifs/smb2pdu.c 		rdata->mr = NULL;
mr               3912 fs/cifs/smb2pdu.c 	if (wdata->mr) {
mr               3913 fs/cifs/smb2pdu.c 		smbd_deregister_mr(wdata->mr);
mr               3914 fs/cifs/smb2pdu.c 		wdata->mr = NULL;
mr               3984 fs/cifs/smb2pdu.c 		wdata->mr = smbd_register_mr(
mr               3988 fs/cifs/smb2pdu.c 		if (!wdata->mr) {
mr               4010 fs/cifs/smb2pdu.c 		v1->offset = cpu_to_le64(wdata->mr->mr->iova);
mr               4011 fs/cifs/smb2pdu.c 		v1->token = cpu_to_le32(wdata->mr->mr->rkey);
mr               4012 fs/cifs/smb2pdu.c 		v1->length = cpu_to_le32(wdata->mr->mr->length);
mr               4026 fs/cifs/smb2pdu.c 	if (wdata->mr) {
mr               4036 fs/cifs/smb2pdu.c 	if (!wdata->mr)
mr               2246 fs/cifs/smbdirect.c 	struct smbd_mr *mr;
mr               2252 fs/cifs/smbdirect.c 		mr = container_of(cqe, struct smbd_mr, cqe);
mr               2253 fs/cifs/smbdirect.c 		smbd_disconnect_rdma_connection(mr->conn);
mr               2277 fs/cifs/smbdirect.c 			rc = ib_dereg_mr(smbdirect_mr->mr);
mr               2286 fs/cifs/smbdirect.c 			smbdirect_mr->mr = ib_alloc_mr(
mr               2289 fs/cifs/smbdirect.c 			if (IS_ERR(smbdirect_mr->mr)) {
mr               2318 fs/cifs/smbdirect.c 	struct smbd_mr *mr, *tmp;
mr               2321 fs/cifs/smbdirect.c 	list_for_each_entry_safe(mr, tmp, &info->mr_list, list) {
mr               2322 fs/cifs/smbdirect.c 		if (mr->state == MR_INVALIDATED)
mr               2323 fs/cifs/smbdirect.c 			ib_dma_unmap_sg(info->id->device, mr->sgl,
mr               2324 fs/cifs/smbdirect.c 				mr->sgl_count, mr->dir);
mr               2325 fs/cifs/smbdirect.c 		ib_dereg_mr(mr->mr);
mr               2326 fs/cifs/smbdirect.c 		kfree(mr->sgl);
mr               2327 fs/cifs/smbdirect.c 		kfree(mr);
mr               2354 fs/cifs/smbdirect.c 		smbdirect_mr->mr = ib_alloc_mr(info->pd, info->mr_type,
mr               2356 fs/cifs/smbdirect.c 		if (IS_ERR(smbdirect_mr->mr)) {
mr               2368 fs/cifs/smbdirect.c 			ib_dereg_mr(smbdirect_mr->mr);
mr               2384 fs/cifs/smbdirect.c 		ib_dereg_mr(smbdirect_mr->mr);
mr               2499 fs/cifs/smbdirect.c 	rc = ib_map_mr_sg(smbdirect_mr->mr, smbdirect_mr->sgl, num_pages,
mr               2508 fs/cifs/smbdirect.c 	ib_update_fast_reg_key(smbdirect_mr->mr,
mr               2509 fs/cifs/smbdirect.c 		ib_inc_rkey(smbdirect_mr->mr->rkey));
mr               2516 fs/cifs/smbdirect.c 	reg_wr->mr = smbdirect_mr->mr;
mr               2517 fs/cifs/smbdirect.c 	reg_wr->key = smbdirect_mr->mr->rkey;
mr               2583 fs/cifs/smbdirect.c 		wr->ex.invalidate_rkey = smbdirect_mr->mr->rkey;
mr                292 fs/cifs/smbdirect.h 	struct ib_mr		*mr;
mr                309 fs/cifs/smbdirect.h int smbd_deregister_mr(struct smbd_mr *mr);
mr                711 include/linux/mlx4/device.h 	struct mlx4_mr		mr;
mr               1118 include/linux/mlx4/device.h 		  int npages, int page_shift, struct mlx4_mr *mr);
mr               1119 include/linux/mlx4/device.h int mlx4_mr_free(struct mlx4_dev *dev, struct mlx4_mr *mr);
mr               1120 include/linux/mlx4/device.h int mlx4_mr_enable(struct mlx4_dev *dev, struct mlx4_mr *mr);
mr               1536 include/linux/mlx4/device.h void mlx4_mr_rereg_mem_cleanup(struct mlx4_dev *dev, struct mlx4_mr *mr);
mr               1537 include/linux/mlx4/device.h int mlx4_mr_rereg_mem_write(struct mlx4_dev *dev, struct mlx4_mr *mr,
mr                 10 include/net/netfilter/nf_nat_redirect.h 		     const struct nf_nat_ipv4_multi_range_compat *mr,
mr               1385 include/rdma/ib_verbs.h 	struct ib_mr		*mr;
mr               2398 include/rdma/ib_verbs.h 	int (*rereg_user_mr)(struct ib_mr *mr, int flags, u64 start, u64 length,
mr               2401 include/rdma/ib_verbs.h 	int (*dereg_mr)(struct ib_mr *mr, struct ib_udata *udata);
mr               2411 include/rdma/ib_verbs.h 	int (*map_mr_sg)(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
mr               2413 include/rdma/ib_verbs.h 	int (*check_mr_status)(struct ib_mr *mr, u32 check_mask,
mr               2477 include/rdma/ib_verbs.h 	int (*map_mr_sg_pi)(struct ib_mr *mr, struct scatterlist *data_sg,
mr               4119 include/rdma/ib_verbs.h int ib_dereg_mr_user(struct ib_mr *mr, struct ib_udata *udata);
mr               4130 include/rdma/ib_verbs.h static inline int ib_dereg_mr(struct ib_mr *mr)
mr               4132 include/rdma/ib_verbs.h 	return ib_dereg_mr_user(mr, NULL);
mr               4154 include/rdma/ib_verbs.h static inline void ib_update_fast_reg_key(struct ib_mr *mr, u8 newkey)
mr               4156 include/rdma/ib_verbs.h 	mr->lkey = (mr->lkey & 0xffffff00) | newkey;
mr               4157 include/rdma/ib_verbs.h 	mr->rkey = (mr->rkey & 0xffffff00) | newkey;
mr               4290 include/rdma/ib_verbs.h int ib_check_mr_status(struct ib_mr *mr, u32 check_mask,
mr               4333 include/rdma/ib_verbs.h int ib_map_mr_sg(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
mr               4335 include/rdma/ib_verbs.h int ib_map_mr_sg_pi(struct ib_mr *mr, struct scatterlist *data_sg,
mr               4341 include/rdma/ib_verbs.h ib_map_mr_sg_zbva(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
mr               4346 include/rdma/ib_verbs.h 	n = ib_map_mr_sg(mr, sg, sg_nents, sg_offset, page_size);
mr               4347 include/rdma/ib_verbs.h 	mr->iova = 0;
mr               4352 include/rdma/ib_verbs.h int ib_sg_to_pages(struct ib_mr *mr, struct scatterlist *sgl, int sg_nents,
mr                 11 include/rdma/mr_pool.h void ib_mr_pool_put(struct ib_qp *qp, struct list_head *list, struct ib_mr *mr);
mr                110 include/rdma/rdmavt_mr.h 	struct rvt_mregion *mr;
mr                125 include/rdma/rdmavt_mr.h static inline void rvt_put_mr(struct rvt_mregion *mr)
mr                127 include/rdma/rdmavt_mr.h 	percpu_ref_put(&mr->refcount);
mr                130 include/rdma/rdmavt_mr.h static inline void rvt_get_mr(struct rvt_mregion *mr)
mr                132 include/rdma/rdmavt_mr.h 	percpu_ref_get(&mr->refcount);
mr                138 include/rdma/rdmavt_mr.h 		rvt_put_mr(ss->sge.mr);
mr                166 include/rdma/rdmavt_mr.h 			rvt_put_mr(sge->mr);
mr                169 include/rdma/rdmavt_mr.h 	} else if (sge->length == 0 && sge->mr->lkey) {
mr                171 include/rdma/rdmavt_mr.h 			if (++sge->m >= sge->mr->mapsz)
mr                175 include/rdma/rdmavt_mr.h 		sge->vaddr = sge->mr->map[sge->m]->segs[sge->n].vaddr;
mr                176 include/rdma/rdmavt_mr.h 		sge->length = sge->mr->map[sge->m]->segs[sge->n].length;
mr                195 include/rdma/rdmavt_mr.h bool rvt_mr_has_lkey(struct rvt_mregion *mr, u32 lkey);
mr                589 include/rdma/rdmavt_qp.h 		rvt_put_mr(sge->mr);
mr                 40 include/rdma/rw.h 			struct ib_mr		*mr;
mr                 92 include/trace/events/rpcrdma.h 		struct rpcrdma_mr *mr,
mr                 96 include/trace/events/rpcrdma.h 	TP_ARGS(task, pos, mr, nsegs),
mr                113 include/trace/events/rpcrdma.h 		__entry->nents = mr->mr_nents;
mr                114 include/trace/events/rpcrdma.h 		__entry->handle = mr->mr_handle;
mr                115 include/trace/events/rpcrdma.h 		__entry->length = mr->mr_length;
mr                116 include/trace/events/rpcrdma.h 		__entry->offset = mr->mr_offset;
mr                133 include/trace/events/rpcrdma.h 					struct rpcrdma_mr *mr,		\
mr                136 include/trace/events/rpcrdma.h 				TP_ARGS(task, pos, mr, nsegs))
mr                141 include/trace/events/rpcrdma.h 		struct rpcrdma_mr *mr,
mr                145 include/trace/events/rpcrdma.h 	TP_ARGS(task, mr, nsegs),
mr                160 include/trace/events/rpcrdma.h 		__entry->nents = mr->mr_nents;
mr                161 include/trace/events/rpcrdma.h 		__entry->handle = mr->mr_handle;
mr                162 include/trace/events/rpcrdma.h 		__entry->length = mr->mr_length;
mr                163 include/trace/events/rpcrdma.h 		__entry->offset = mr->mr_offset;
mr                179 include/trace/events/rpcrdma.h 					struct rpcrdma_mr *mr,		\
mr                182 include/trace/events/rpcrdma.h 				TP_ARGS(task, mr, nsegs))
mr                193 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                199 include/trace/events/rpcrdma.h 		__entry->mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                206 include/trace/events/rpcrdma.h 		__entry->mr, rdma_show_wc_status(__entry->status),
mr                233 include/trace/events/rpcrdma.h 		const struct rpcrdma_mr *mr
mr                236 include/trace/events/rpcrdma.h 	TP_ARGS(mr),
mr                239 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                247 include/trace/events/rpcrdma.h 		__entry->mr = mr;
mr                248 include/trace/events/rpcrdma.h 		__entry->handle = mr->mr_handle;
mr                249 include/trace/events/rpcrdma.h 		__entry->length = mr->mr_length;
mr                250 include/trace/events/rpcrdma.h 		__entry->offset = mr->mr_offset;
mr                251 include/trace/events/rpcrdma.h 		__entry->dir    = mr->mr_dir;
mr                255 include/trace/events/rpcrdma.h 		__entry->mr, __entry->length,
mr                264 include/trace/events/rpcrdma.h 					const struct rpcrdma_mr *mr \
mr                266 include/trace/events/rpcrdma.h 				TP_ARGS(mr))
mr                837 include/trace/events/rpcrdma.h 		const struct rpcrdma_mr *mr,
mr                841 include/trace/events/rpcrdma.h 	TP_ARGS(mr, rc),
mr                844 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                849 include/trace/events/rpcrdma.h 		__entry->mr = mr;
mr                854 include/trace/events/rpcrdma.h 		__entry->mr, __entry->rc
mr                860 include/trace/events/rpcrdma.h 		const struct rpcrdma_mr *mr,
mr                864 include/trace/events/rpcrdma.h 	TP_ARGS(mr, rc),
mr                867 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                876 include/trace/events/rpcrdma.h 		__entry->mr = mr;
mr                877 include/trace/events/rpcrdma.h 		__entry->handle = mr->mr_handle;
mr                878 include/trace/events/rpcrdma.h 		__entry->length = mr->mr_length;
mr                879 include/trace/events/rpcrdma.h 		__entry->offset = mr->mr_offset;
mr                880 include/trace/events/rpcrdma.h 		__entry->dir    = mr->mr_dir;
mr                885 include/trace/events/rpcrdma.h 		__entry->mr, __entry->length,
mr                894 include/trace/events/rpcrdma.h 		const struct rpcrdma_mr *mr,
mr                898 include/trace/events/rpcrdma.h 	TP_ARGS(mr, sg_nents),
mr                901 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                908 include/trace/events/rpcrdma.h 		__entry->mr = mr;
mr                909 include/trace/events/rpcrdma.h 		__entry->addr = mr->mr_sg->dma_address;
mr                910 include/trace/events/rpcrdma.h 		__entry->dir = mr->mr_dir;
mr                915 include/trace/events/rpcrdma.h 		__entry->mr, __entry->addr,
mr                923 include/trace/events/rpcrdma.h 		const struct rpcrdma_mr *mr,
mr                927 include/trace/events/rpcrdma.h 	TP_ARGS(mr, num_mapped),
mr                930 include/trace/events/rpcrdma.h 		__field(const void *, mr)
mr                938 include/trace/events/rpcrdma.h 		__entry->mr = mr;
mr                939 include/trace/events/rpcrdma.h 		__entry->addr = mr->mr_sg->dma_address;
mr                940 include/trace/events/rpcrdma.h 		__entry->dir = mr->mr_dir;
mr                942 include/trace/events/rpcrdma.h 		__entry->nents = mr->mr_nents;
mr                946 include/trace/events/rpcrdma.h 		__entry->mr, __entry->addr,
mr                 99 include/uapi/rdma/rdma_user_rxe.h 				struct ib_mr *mr;
mr                 73 net/mac80211/rc80211_minstrel.c int minstrel_get_tp_avg(struct minstrel_rate *mr, int prob_ewma)
mr                 77 net/mac80211/rc80211_minstrel.c 	usecs = mr->perfect_tx_time;
mr                 82 net/mac80211/rc80211_minstrel.c 	if (mr->stats.prob_ewma < MINSTREL_FRAC(10, 100))
mr                198 net/mac80211/rc80211_minstrel.c 		struct minstrel_rate *mr = &mi->r[i];
mr                209 net/mac80211/rc80211_minstrel.c 			mr->adjusted_retry_count = mrs->retry_count >> 1;
mr                210 net/mac80211/rc80211_minstrel.c 			if (mr->adjusted_retry_count > 2)
mr                211 net/mac80211/rc80211_minstrel.c 				mr->adjusted_retry_count = 2;
mr                212 net/mac80211/rc80211_minstrel.c 			mr->sample_limit = 4;
mr                214 net/mac80211/rc80211_minstrel.c 			mr->sample_limit = -1;
mr                215 net/mac80211/rc80211_minstrel.c 			mr->adjusted_retry_count = mrs->retry_count;
mr                217 net/mac80211/rc80211_minstrel.c 		if (!mr->adjusted_retry_count)
mr                218 net/mac80211/rc80211_minstrel.c 			mr->adjusted_retry_count = 2;
mr                229 net/mac80211/rc80211_minstrel.c 			tmp_cur_tp = minstrel_get_tp_avg(mr, mrs->prob_ewma);
mr                299 net/mac80211/rc80211_minstrel.c minstrel_get_retry_count(struct minstrel_rate *mr,
mr                302 net/mac80211/rc80211_minstrel.c 	u8 retry = mr->adjusted_retry_count;
mr                305 net/mac80211/rc80211_minstrel.c 		retry = max_t(u8, 2, min(mr->stats.retry_count_rtscts, retry));
mr                307 net/mac80211/rc80211_minstrel.c 		retry = max_t(u8, 2, min(mr->retry_count_cts, retry));
mr                336 net/mac80211/rc80211_minstrel.c 	struct minstrel_rate *msr, *mr;
mr                394 net/mac80211/rc80211_minstrel.c 	mr = &mi->r[mi->max_tp_rate[0]];
mr                401 net/mac80211/rc80211_minstrel.c 	    msr->perfect_tx_time > mr->perfect_tx_time &&
mr                499 net/mac80211/rc80211_minstrel.c 		struct minstrel_rate *mr = &mi->r[n];
mr                512 net/mac80211/rc80211_minstrel.c 		memset(mr, 0, sizeof(*mr));
mr                515 net/mac80211/rc80211_minstrel.c 		mr->rix = i;
mr                517 net/mac80211/rc80211_minstrel.c 		mr->bitrate = DIV_ROUND_UP(sband->bitrates[i].bitrate,
mr                519 net/mac80211/rc80211_minstrel.c 		calc_rate_durations(sband->band, mr, &sband->bitrates[i],
mr                524 net/mac80211/rc80211_minstrel.c 		mr->sample_limit = -1;
mr                526 net/mac80211/rc80211_minstrel.c 		mr->retry_count_cts = 1;
mr                528 net/mac80211/rc80211_minstrel.c 		tx_time = mr->perfect_tx_time + mi->sp_ack_dur;
mr                531 net/mac80211/rc80211_minstrel.c 			tx_time_single = mr->ack_time + mr->perfect_tx_time;
mr                541 net/mac80211/rc80211_minstrel.c 				(mr->retry_count_cts < mp->max_retry))
mr                542 net/mac80211/rc80211_minstrel.c 				mr->retry_count_cts++;
mr                547 net/mac80211/rc80211_minstrel.c 				(++mr->stats.retry_count < mp->max_retry));
mr                548 net/mac80211/rc80211_minstrel.c 		mr->adjusted_retry_count = mrs->retry_count;
mr                550 net/mac80211/rc80211_minstrel.c 			mr->retry_count_cts = mrs->retry_count;
mr                554 net/mac80211/rc80211_minstrel.c 		struct minstrel_rate *mr = &mi->r[i];
mr                555 net/mac80211/rc80211_minstrel.c 		mr->rix = -1;
mr                130 net/mac80211/rc80211_minstrel.h int minstrel_get_tp_avg(struct minstrel_rate *mr, int prob_ewma);
mr                 78 net/mac80211/rc80211_minstrel_debugfs.c 		struct minstrel_rate *mr = &mi->r[i];
mr                 87 net/mac80211/rc80211_minstrel_debugfs.c 		p += sprintf(p, " %3u%s ", mr->bitrate / 2,
mr                 88 net/mac80211/rc80211_minstrel_debugfs.c 				(mr->bitrate & 1 ? ".5" : "  "));
mr                 90 net/mac80211/rc80211_minstrel_debugfs.c 		p += sprintf(p, "%6u ", mr->perfect_tx_time);
mr                 92 net/mac80211/rc80211_minstrel_debugfs.c 		tp_max = minstrel_get_tp_avg(mr, MINSTREL_FRAC(100,100));
mr                 93 net/mac80211/rc80211_minstrel_debugfs.c 		tp_avg = minstrel_get_tp_avg(mr, mrs->prob_ewma);
mr                135 net/mac80211/rc80211_minstrel_debugfs.c 		struct minstrel_rate *mr = &mi->r[i];
mr                144 net/mac80211/rc80211_minstrel_debugfs.c 		p += sprintf(p, ",%u%s", mr->bitrate / 2,
mr                145 net/mac80211/rc80211_minstrel_debugfs.c 				(mr->bitrate & 1 ? ".5," : ","));
mr                147 net/mac80211/rc80211_minstrel_debugfs.c 		p += sprintf(p, "%u,",mr->perfect_tx_time);
mr                149 net/mac80211/rc80211_minstrel_debugfs.c 		tp_max = minstrel_get_tp_avg(mr, MINSTREL_FRAC(100,100));
mr                150 net/mac80211/rc80211_minstrel_debugfs.c 		tp_avg = minstrel_get_tp_avg(mr, mrs->prob_ewma);
mr                 29 net/netfilter/nf_nat_redirect.c 		     const struct nf_nat_ipv4_multi_range_compat *mr,
mr                 67 net/netfilter/nf_nat_redirect.c 	newrange.flags	     = mr->range[0].flags | NF_NAT_RANGE_MAP_IPS;
mr                 70 net/netfilter/nf_nat_redirect.c 	newrange.min_proto   = mr->range[0].min;
mr                 71 net/netfilter/nf_nat_redirect.c 	newrange.max_proto   = mr->range[0].max;
mr                110 net/netfilter/nft_redir.c 	struct nf_nat_ipv4_multi_range_compat mr;
mr                112 net/netfilter/nft_redir.c 	memset(&mr, 0, sizeof(mr));
mr                114 net/netfilter/nft_redir.c 		mr.range[0].min.all = (__force __be16)nft_reg_load16(
mr                116 net/netfilter/nft_redir.c 		mr.range[0].max.all = (__force __be16)nft_reg_load16(
mr                118 net/netfilter/nft_redir.c 		mr.range[0].flags |= NF_NAT_RANGE_PROTO_SPECIFIED;
mr                121 net/netfilter/nft_redir.c 	mr.range[0].flags |= priv->flags;
mr                123 net/netfilter/nft_redir.c 	regs->verdict.code = nf_nat_redirect_ipv4(pkt->skb, &mr, nft_hook(pkt));
mr                 21 net/netfilter/xt_MASQUERADE.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 23 net/netfilter/xt_MASQUERADE.c 	if (mr->range[0].flags & NF_NAT_RANGE_MAP_IPS) {
mr                 27 net/netfilter/xt_MASQUERADE.c 	if (mr->rangesize != 1) {
mr                 28 net/netfilter/xt_MASQUERADE.c 		pr_debug("bad rangesize %u\n", mr->rangesize);
mr                 38 net/netfilter/xt_MASQUERADE.c 	const struct nf_nat_ipv4_multi_range_compat *mr;
mr                 40 net/netfilter/xt_MASQUERADE.c 	mr = par->targinfo;
mr                 41 net/netfilter/xt_MASQUERADE.c 	range.flags = mr->range[0].flags;
mr                 42 net/netfilter/xt_MASQUERADE.c 	range.min_proto = mr->range[0].min;
mr                 43 net/netfilter/xt_MASQUERADE.c 	range.max_proto = mr->range[0].max;
mr                 74 net/netfilter/xt_NETMAP.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 83 net/netfilter/xt_NETMAP.c 	netmask = ~(mr->range[0].min_ip ^ mr->range[0].max_ip);
mr                 90 net/netfilter/xt_NETMAP.c 	new_ip |= mr->range[0].min_ip & netmask;
mr                 94 net/netfilter/xt_NETMAP.c 	newrange.flags	     = mr->range[0].flags | NF_NAT_RANGE_MAP_IPS;
mr                 97 net/netfilter/xt_NETMAP.c 	newrange.min_proto   = mr->range[0].min;
mr                 98 net/netfilter/xt_NETMAP.c 	newrange.max_proto   = mr->range[0].max;
mr                106 net/netfilter/xt_NETMAP.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                108 net/netfilter/xt_NETMAP.c 	if (!(mr->range[0].flags & NF_NAT_RANGE_MAP_IPS)) {
mr                112 net/netfilter/xt_NETMAP.c 	if (mr->rangesize != 1) {
mr                113 net/netfilter/xt_NETMAP.c 		pr_debug("bad rangesize %u.\n", mr->rangesize);
mr                 52 net/netfilter/xt_REDIRECT.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 54 net/netfilter/xt_REDIRECT.c 	if (mr->range[0].flags & NF_NAT_RANGE_MAP_IPS) {
mr                 58 net/netfilter/xt_REDIRECT.c 	if (mr->rangesize != 1) {
mr                 59 net/netfilter/xt_REDIRECT.c 		pr_debug("bad rangesize %u.\n", mr->rangesize);
mr                 18 net/netfilter/xt_nat.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 20 net/netfilter/xt_nat.c 	if (mr->rangesize != 1) {
mr                 54 net/netfilter/xt_nat.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 64 net/netfilter/xt_nat.c 	xt_nat_convert_range(&range, &mr->range[0]);
mr                 71 net/netfilter/xt_nat.c 	const struct nf_nat_ipv4_multi_range_compat *mr = par->targinfo;
mr                 80 net/netfilter/xt_nat.c 	xt_nat_convert_range(&range, &mr->range[0]);
mr                 78 net/rds/ib_frmr.c 	frmr->mr = ib_alloc_mr(rds_ibdev->pd, IB_MR_TYPE_MEM_REG,
mr                 80 net/rds/ib_frmr.c 	if (IS_ERR(frmr->mr)) {
mr                 82 net/rds/ib_frmr.c 		err = PTR_ERR(frmr->mr);
mr                134 net/rds/ib_frmr.c 	ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_len,
mr                150 net/rds/ib_frmr.c 	ib_update_fast_reg_key(frmr->mr, ibmr->remap_count++);
mr                157 net/rds/ib_frmr.c 	reg_wr.mr = frmr->mr;
mr                158 net/rds/ib_frmr.c 	reg_wr.key = frmr->mr->rkey;
mr                273 net/rds/ib_frmr.c 	if (!i_cm_id || !i_cm_id->qp || !frmr->mr)
mr                290 net/rds/ib_frmr.c 	s_wr->ex.invalidate_rkey = frmr->mr->rkey;
mr                392 net/rds/ib_frmr.c 			if (frmr->mr)
mr                393 net/rds/ib_frmr.c 				ib_dereg_mr(frmr->mr);
mr                428 net/rds/ib_frmr.c 		*key = frmr->mr->rkey;
mr                 57 net/rds/ib_mr.h 	struct ib_mr		*mr;
mr                 70 net/rds/rdma.c 	struct rds_mr *mr;
mr                 74 net/rds/rdma.c 		mr = rb_entry(parent, struct rds_mr, r_rb_node);
mr                 76 net/rds/rdma.c 		if (key < mr->r_key)
mr                 78 net/rds/rdma.c 		else if (key > mr->r_key)
mr                 81 net/rds/rdma.c 			return mr;
mr                 95 net/rds/rdma.c static void rds_destroy_mr(struct rds_mr *mr)
mr                 97 net/rds/rdma.c 	struct rds_sock *rs = mr->r_sock;
mr                102 net/rds/rdma.c 			mr->r_key, refcount_read(&mr->r_refcount));
mr                104 net/rds/rdma.c 	if (test_and_set_bit(RDS_MR_DEAD, &mr->r_state))
mr                108 net/rds/rdma.c 	if (!RB_EMPTY_NODE(&mr->r_rb_node))
mr                109 net/rds/rdma.c 		rb_erase(&mr->r_rb_node, &rs->rs_rdma_keys);
mr                110 net/rds/rdma.c 	trans_private = mr->r_trans_private;
mr                111 net/rds/rdma.c 	mr->r_trans_private = NULL;
mr                115 net/rds/rdma.c 		mr->r_trans->free_mr(trans_private, mr->r_invalidate);
mr                118 net/rds/rdma.c void __rds_put_mr_final(struct rds_mr *mr)
mr                120 net/rds/rdma.c 	rds_destroy_mr(mr);
mr                121 net/rds/rdma.c 	kfree(mr);
mr                130 net/rds/rdma.c 	struct rds_mr *mr;
mr                137 net/rds/rdma.c 		mr = rb_entry(node, struct rds_mr, r_rb_node);
mr                138 net/rds/rdma.c 		if (mr->r_trans == rs->rs_transport)
mr                139 net/rds/rdma.c 			mr->r_invalidate = 0;
mr                140 net/rds/rdma.c 		rb_erase(&mr->r_rb_node, &rs->rs_rdma_keys);
mr                141 net/rds/rdma.c 		RB_CLEAR_NODE(&mr->r_rb_node);
mr                143 net/rds/rdma.c 		rds_destroy_mr(mr);
mr                144 net/rds/rdma.c 		rds_mr_put(mr);
mr                177 net/rds/rdma.c 	struct rds_mr *mr = NULL, *found;
mr                222 net/rds/rdma.c 	mr = kzalloc(sizeof(struct rds_mr), GFP_KERNEL);
mr                223 net/rds/rdma.c 	if (!mr) {
mr                228 net/rds/rdma.c 	refcount_set(&mr->r_refcount, 1);
mr                229 net/rds/rdma.c 	RB_CLEAR_NODE(&mr->r_rb_node);
mr                230 net/rds/rdma.c 	mr->r_trans = rs->rs_transport;
mr                231 net/rds/rdma.c 	mr->r_sock = rs;
mr                234 net/rds/rdma.c 		mr->r_use_once = 1;
mr                236 net/rds/rdma.c 		mr->r_invalidate = 1;
mr                238 net/rds/rdma.c 		mr->r_write = 1;
mr                274 net/rds/rdma.c 						 &mr->r_key,
mr                285 net/rds/rdma.c 	mr->r_trans_private = trans_private;
mr                288 net/rds/rdma.c 	       mr->r_key, (void *)(unsigned long) args->cookie_addr);
mr                294 net/rds/rdma.c 	cookie = rds_rdma_make_cookie(mr->r_key, args->vec.addr & ~PAGE_MASK);
mr                306 net/rds/rdma.c 	found = rds_mr_tree_walk(&rs->rs_rdma_keys, mr->r_key, mr);
mr                309 net/rds/rdma.c 	BUG_ON(found && found != mr);
mr                311 net/rds/rdma.c 	rdsdebug("RDS: get_mr key is %x\n", mr->r_key);
mr                313 net/rds/rdma.c 		refcount_inc(&mr->r_refcount);
mr                314 net/rds/rdma.c 		*mr_ret = mr;
mr                320 net/rds/rdma.c 	if (mr)
mr                321 net/rds/rdma.c 		rds_mr_put(mr);
mr                369 net/rds/rdma.c 	struct rds_mr *mr;
mr                392 net/rds/rdma.c 	mr = rds_mr_tree_walk(&rs->rs_rdma_keys, rds_rdma_cookie_key(args.cookie), NULL);
mr                393 net/rds/rdma.c 	if (mr) {
mr                394 net/rds/rdma.c 		rb_erase(&mr->r_rb_node, &rs->rs_rdma_keys);
mr                395 net/rds/rdma.c 		RB_CLEAR_NODE(&mr->r_rb_node);
mr                397 net/rds/rdma.c 			mr->r_invalidate = 1;
mr                401 net/rds/rdma.c 	if (!mr)
mr                409 net/rds/rdma.c 	rds_destroy_mr(mr);
mr                410 net/rds/rdma.c 	rds_mr_put(mr);
mr                421 net/rds/rdma.c 	struct rds_mr *mr;
mr                426 net/rds/rdma.c 	mr = rds_mr_tree_walk(&rs->rs_rdma_keys, r_key, NULL);
mr                427 net/rds/rdma.c 	if (!mr) {
mr                434 net/rds/rdma.c 	if (mr->r_use_once || force) {
mr                435 net/rds/rdma.c 		rb_erase(&mr->r_rb_node, &rs->rs_rdma_keys);
mr                436 net/rds/rdma.c 		RB_CLEAR_NODE(&mr->r_rb_node);
mr                444 net/rds/rdma.c 	if (mr->r_trans->sync_mr)
mr                445 net/rds/rdma.c 		mr->r_trans->sync_mr(mr->r_trans_private, DMA_FROM_DEVICE);
mr                450 net/rds/rdma.c 		rds_destroy_mr(mr);
mr                451 net/rds/rdma.c 		rds_mr_put(mr);
mr                734 net/rds/rdma.c 	struct rds_mr *mr;
mr                752 net/rds/rdma.c 	mr = rds_mr_tree_walk(&rs->rs_rdma_keys, r_key, NULL);
mr                753 net/rds/rdma.c 	if (!mr)
mr                756 net/rds/rdma.c 		refcount_inc(&mr->r_refcount);
mr                759 net/rds/rdma.c 	if (mr) {
mr                760 net/rds/rdma.c 		mr->r_trans->sync_mr(mr->r_trans_private, DMA_TO_DEVICE);
mr                761 net/rds/rdma.c 		rm->rdma.op_rdma_mr = mr;
mr                945 net/rds/rds.h  void __rds_put_mr_final(struct rds_mr *mr);
mr                946 net/rds/rds.h  static inline void rds_mr_put(struct rds_mr *mr)
mr                948 net/rds/rds.h  	if (refcount_dec_and_test(&mr->r_refcount))
mr                949 net/rds/rds.h  		__rds_put_mr_final(mr);
mr                357 net/smc/smc_ib.c void smc_ib_put_memory_region(struct ib_mr *mr)
mr                359 net/smc/smc_ib.c 	ib_dereg_mr(mr);
mr                 74 net/smc/smc_ib.h void smc_ib_put_memory_region(struct ib_mr *mr);
mr                256 net/smc/smc_wr.c int smc_wr_reg_send(struct smc_link *link, struct ib_mr *mr)
mr                263 net/smc/smc_wr.c 	link->wr_reg.wr.wr_id = (u64)(uintptr_t)mr;
mr                264 net/smc/smc_wr.c 	link->wr_reg.mr = mr;
mr                265 net/smc/smc_wr.c 	link->wr_reg.key = mr->rkey;
mr                103 net/smc/smc_wr.h int smc_wr_reg_send(struct smc_link *link, struct ib_mr *mr);
mr                 80 net/sunrpc/xprtrdma/frwr_ops.c void frwr_release_mr(struct rpcrdma_mr *mr)
mr                 84 net/sunrpc/xprtrdma/frwr_ops.c 	rc = ib_dereg_mr(mr->frwr.fr_mr);
mr                 86 net/sunrpc/xprtrdma/frwr_ops.c 		trace_xprtrdma_frwr_dereg(mr, rc);
mr                 87 net/sunrpc/xprtrdma/frwr_ops.c 	kfree(mr->mr_sg);
mr                 88 net/sunrpc/xprtrdma/frwr_ops.c 	kfree(mr);
mr                 91 net/sunrpc/xprtrdma/frwr_ops.c static void frwr_mr_recycle(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr *mr)
mr                 93 net/sunrpc/xprtrdma/frwr_ops.c 	trace_xprtrdma_mr_recycle(mr);
mr                 95 net/sunrpc/xprtrdma/frwr_ops.c 	if (mr->mr_dir != DMA_NONE) {
mr                 96 net/sunrpc/xprtrdma/frwr_ops.c 		trace_xprtrdma_mr_unmap(mr);
mr                 98 net/sunrpc/xprtrdma/frwr_ops.c 				mr->mr_sg, mr->mr_nents, mr->mr_dir);
mr                 99 net/sunrpc/xprtrdma/frwr_ops.c 		mr->mr_dir = DMA_NONE;
mr                103 net/sunrpc/xprtrdma/frwr_ops.c 	list_del(&mr->mr_all);
mr                107 net/sunrpc/xprtrdma/frwr_ops.c 	frwr_release_mr(mr);
mr                116 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr = container_of(work, struct rpcrdma_mr,
mr                119 net/sunrpc/xprtrdma/frwr_ops.c 	frwr_mr_recycle(mr->mr_xprt, mr);
mr                130 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                132 net/sunrpc/xprtrdma/frwr_ops.c 	while ((mr = rpcrdma_mr_pop(&req->rl_registered)))
mr                133 net/sunrpc/xprtrdma/frwr_ops.c 		frwr_mr_recycle(mr->mr_xprt, mr);
mr                148 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                150 net/sunrpc/xprtrdma/frwr_ops.c 	while ((mr = rpcrdma_mr_pop(&req->rl_registered)))
mr                151 net/sunrpc/xprtrdma/frwr_ops.c 		rpcrdma_mr_put(mr);
mr                162 net/sunrpc/xprtrdma/frwr_ops.c int frwr_init_mr(struct rpcrdma_ia *ia, struct rpcrdma_mr *mr)
mr                180 net/sunrpc/xprtrdma/frwr_ops.c 	mr->frwr.fr_mr = frmr;
mr                181 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_dir = DMA_NONE;
mr                182 net/sunrpc/xprtrdma/frwr_ops.c 	INIT_LIST_HEAD(&mr->mr_list);
mr                183 net/sunrpc/xprtrdma/frwr_ops.c 	INIT_WORK(&mr->mr_recycle, frwr_mr_recycle_worker);
mr                184 net/sunrpc/xprtrdma/frwr_ops.c 	init_completion(&mr->frwr.fr_linv_done);
mr                187 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_sg = sg;
mr                192 net/sunrpc/xprtrdma/frwr_ops.c 	trace_xprtrdma_frwr_alloc(mr, rc);
mr                325 net/sunrpc/xprtrdma/frwr_ops.c 				struct rpcrdma_mr *mr)
mr                337 net/sunrpc/xprtrdma/frwr_ops.c 			sg_set_page(&mr->mr_sg[i],
mr                342 net/sunrpc/xprtrdma/frwr_ops.c 			sg_set_buf(&mr->mr_sg[i], seg->mr_offset,
mr                353 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_dir = rpcrdma_data_dir(writing);
mr                354 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_nents = i;
mr                356 net/sunrpc/xprtrdma/frwr_ops.c 	dma_nents = ib_dma_map_sg(ia->ri_id->device, mr->mr_sg, mr->mr_nents,
mr                357 net/sunrpc/xprtrdma/frwr_ops.c 				  mr->mr_dir);
mr                361 net/sunrpc/xprtrdma/frwr_ops.c 	ibmr = mr->frwr.fr_mr;
mr                362 net/sunrpc/xprtrdma/frwr_ops.c 	n = ib_map_mr_sg(ibmr, mr->mr_sg, dma_nents, NULL, PAGE_SIZE);
mr                371 net/sunrpc/xprtrdma/frwr_ops.c 	reg_wr = &mr->frwr.fr_regwr;
mr                372 net/sunrpc/xprtrdma/frwr_ops.c 	reg_wr->mr = ibmr;
mr                378 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_handle = ibmr->rkey;
mr                379 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_length = ibmr->length;
mr                380 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_offset = ibmr->iova;
mr                381 net/sunrpc/xprtrdma/frwr_ops.c 	trace_xprtrdma_mr_map(mr);
mr                386 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_dir = DMA_NONE;
mr                387 net/sunrpc/xprtrdma/frwr_ops.c 	trace_xprtrdma_frwr_sgerr(mr, i);
mr                391 net/sunrpc/xprtrdma/frwr_ops.c 	trace_xprtrdma_frwr_maperr(mr, n);
mr                426 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                429 net/sunrpc/xprtrdma/frwr_ops.c 	list_for_each_entry(mr, &req->rl_registered, mr_list) {
mr                432 net/sunrpc/xprtrdma/frwr_ops.c 		frwr = &mr->frwr;
mr                458 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                460 net/sunrpc/xprtrdma/frwr_ops.c 	list_for_each_entry(mr, mrs, mr_list)
mr                461 net/sunrpc/xprtrdma/frwr_ops.c 		if (mr->mr_handle == rep->rr_inv_rkey) {
mr                462 net/sunrpc/xprtrdma/frwr_ops.c 			list_del_init(&mr->mr_list);
mr                463 net/sunrpc/xprtrdma/frwr_ops.c 			trace_xprtrdma_mr_remoteinv(mr);
mr                464 net/sunrpc/xprtrdma/frwr_ops.c 			rpcrdma_mr_put(mr);
mr                469 net/sunrpc/xprtrdma/frwr_ops.c static void __frwr_release_mr(struct ib_wc *wc, struct rpcrdma_mr *mr)
mr                472 net/sunrpc/xprtrdma/frwr_ops.c 		rpcrdma_mr_recycle(mr);
mr                474 net/sunrpc/xprtrdma/frwr_ops.c 		rpcrdma_mr_put(mr);
mr                488 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                492 net/sunrpc/xprtrdma/frwr_ops.c 	__frwr_release_mr(wc, mr);
mr                507 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                511 net/sunrpc/xprtrdma/frwr_ops.c 	__frwr_release_mr(wc, mr);
mr                531 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                541 net/sunrpc/xprtrdma/frwr_ops.c 	while ((mr = rpcrdma_mr_pop(&req->rl_registered))) {
mr                543 net/sunrpc/xprtrdma/frwr_ops.c 		trace_xprtrdma_mr_localinv(mr);
mr                546 net/sunrpc/xprtrdma/frwr_ops.c 		frwr = &mr->frwr;
mr                555 net/sunrpc/xprtrdma/frwr_ops.c 		last->ex.invalidate_rkey = mr->mr_handle;
mr                590 net/sunrpc/xprtrdma/frwr_ops.c 		mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                593 net/sunrpc/xprtrdma/frwr_ops.c 		list_del_init(&mr->mr_list);
mr                594 net/sunrpc/xprtrdma/frwr_ops.c 		rpcrdma_mr_recycle(mr);
mr                609 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                610 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_rep *rep = mr->mr_req->rl_reply;
mr                614 net/sunrpc/xprtrdma/frwr_ops.c 	__frwr_release_mr(wc, mr);
mr                636 net/sunrpc/xprtrdma/frwr_ops.c 	struct rpcrdma_mr *mr;
mr                644 net/sunrpc/xprtrdma/frwr_ops.c 	while ((mr = rpcrdma_mr_pop(&req->rl_registered))) {
mr                646 net/sunrpc/xprtrdma/frwr_ops.c 		trace_xprtrdma_mr_localinv(mr);
mr                649 net/sunrpc/xprtrdma/frwr_ops.c 		frwr = &mr->frwr;
mr                658 net/sunrpc/xprtrdma/frwr_ops.c 		last->ex.invalidate_rkey = mr->mr_handle;
mr                685 net/sunrpc/xprtrdma/frwr_ops.c 		mr = container_of(frwr, struct rpcrdma_mr, frwr);
mr                688 net/sunrpc/xprtrdma/frwr_ops.c 		rpcrdma_mr_recycle(mr);
mr                309 net/sunrpc/xprtrdma/rpc_rdma.c xdr_encode_rdma_segment(__be32 *iptr, struct rpcrdma_mr *mr)
mr                311 net/sunrpc/xprtrdma/rpc_rdma.c 	*iptr++ = cpu_to_be32(mr->mr_handle);
mr                312 net/sunrpc/xprtrdma/rpc_rdma.c 	*iptr++ = cpu_to_be32(mr->mr_length);
mr                313 net/sunrpc/xprtrdma/rpc_rdma.c 	xdr_encode_hyper(iptr, mr->mr_offset);
mr                317 net/sunrpc/xprtrdma/rpc_rdma.c encode_rdma_segment(struct xdr_stream *xdr, struct rpcrdma_mr *mr)
mr                325 net/sunrpc/xprtrdma/rpc_rdma.c 	xdr_encode_rdma_segment(p, mr);
mr                330 net/sunrpc/xprtrdma/rpc_rdma.c encode_read_segment(struct xdr_stream *xdr, struct rpcrdma_mr *mr,
mr                341 net/sunrpc/xprtrdma/rpc_rdma.c 	xdr_encode_rdma_segment(p, mr);
mr                349 net/sunrpc/xprtrdma/rpc_rdma.c 						 struct rpcrdma_mr **mr)
mr                351 net/sunrpc/xprtrdma/rpc_rdma.c 	*mr = rpcrdma_mr_pop(&req->rl_free_mrs);
mr                352 net/sunrpc/xprtrdma/rpc_rdma.c 	if (!*mr) {
mr                353 net/sunrpc/xprtrdma/rpc_rdma.c 		*mr = rpcrdma_mr_get(r_xprt);
mr                354 net/sunrpc/xprtrdma/rpc_rdma.c 		if (!*mr)
mr                357 net/sunrpc/xprtrdma/rpc_rdma.c 		(*mr)->mr_req = req;
mr                360 net/sunrpc/xprtrdma/rpc_rdma.c 	rpcrdma_mr_push(*mr, &req->rl_registered);
mr                361 net/sunrpc/xprtrdma/rpc_rdma.c 	return frwr_map(r_xprt, seg, nsegs, writing, req->rl_slot.rq_xid, *mr);
mr                392 net/sunrpc/xprtrdma/rpc_rdma.c 	struct rpcrdma_mr *mr;
mr                409 net/sunrpc/xprtrdma/rpc_rdma.c 		seg = rpcrdma_mr_prepare(r_xprt, req, seg, nsegs, false, &mr);
mr                413 net/sunrpc/xprtrdma/rpc_rdma.c 		if (encode_read_segment(xdr, mr, pos) < 0)
mr                416 net/sunrpc/xprtrdma/rpc_rdma.c 		trace_xprtrdma_chunk_read(rqst->rq_task, pos, mr, nsegs);
mr                418 net/sunrpc/xprtrdma/rpc_rdma.c 		nsegs -= mr->mr_nents;
mr                447 net/sunrpc/xprtrdma/rpc_rdma.c 	struct rpcrdma_mr *mr;
mr                470 net/sunrpc/xprtrdma/rpc_rdma.c 		seg = rpcrdma_mr_prepare(r_xprt, req, seg, nsegs, true, &mr);
mr                474 net/sunrpc/xprtrdma/rpc_rdma.c 		if (encode_rdma_segment(xdr, mr) < 0)
mr                477 net/sunrpc/xprtrdma/rpc_rdma.c 		trace_xprtrdma_chunk_write(rqst->rq_task, mr, nsegs);
mr                479 net/sunrpc/xprtrdma/rpc_rdma.c 		r_xprt->rx_stats.total_rdma_request += mr->mr_length;
mr                481 net/sunrpc/xprtrdma/rpc_rdma.c 		nsegs -= mr->mr_nents;
mr                510 net/sunrpc/xprtrdma/rpc_rdma.c 	struct rpcrdma_mr *mr;
mr                531 net/sunrpc/xprtrdma/rpc_rdma.c 		seg = rpcrdma_mr_prepare(r_xprt, req, seg, nsegs, true, &mr);
mr                535 net/sunrpc/xprtrdma/rpc_rdma.c 		if (encode_rdma_segment(xdr, mr) < 0)
mr                538 net/sunrpc/xprtrdma/rpc_rdma.c 		trace_xprtrdma_chunk_reply(rqst->rq_task, mr, nsegs);
mr                540 net/sunrpc/xprtrdma/rpc_rdma.c 		r_xprt->rx_stats.total_rdma_request += mr->mr_length;
mr                542 net/sunrpc/xprtrdma/rpc_rdma.c 		nsegs -= mr->mr_nents;
mr                953 net/sunrpc/xprtrdma/verbs.c 		struct rpcrdma_mr *mr;
mr                956 net/sunrpc/xprtrdma/verbs.c 		mr = kzalloc(sizeof(*mr), GFP_NOFS);
mr                957 net/sunrpc/xprtrdma/verbs.c 		if (!mr)
mr                960 net/sunrpc/xprtrdma/verbs.c 		rc = frwr_init_mr(ia, mr);
mr                962 net/sunrpc/xprtrdma/verbs.c 			kfree(mr);
mr                966 net/sunrpc/xprtrdma/verbs.c 		mr->mr_xprt = r_xprt;
mr                969 net/sunrpc/xprtrdma/verbs.c 		rpcrdma_mr_push(mr, &buf->rb_mrs);
mr                970 net/sunrpc/xprtrdma/verbs.c 		list_add(&mr->mr_all, &buf->rb_all_mrs);
mr               1196 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_mr *mr;
mr               1200 net/sunrpc/xprtrdma/verbs.c 	while ((mr = rpcrdma_mr_pop(&req->rl_free_mrs))) {
mr               1201 net/sunrpc/xprtrdma/verbs.c 		struct rpcrdma_buffer *buf = &mr->mr_xprt->rx_buf;
mr               1204 net/sunrpc/xprtrdma/verbs.c 		list_del(&mr->mr_all);
mr               1207 net/sunrpc/xprtrdma/verbs.c 		frwr_release_mr(mr);
mr               1227 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_mr *mr;
mr               1230 net/sunrpc/xprtrdma/verbs.c 	while ((mr = list_first_entry_or_null(&buf->rb_all_mrs,
mr               1233 net/sunrpc/xprtrdma/verbs.c 		list_del(&mr->mr_list);
mr               1234 net/sunrpc/xprtrdma/verbs.c 		list_del(&mr->mr_all);
mr               1237 net/sunrpc/xprtrdma/verbs.c 		frwr_release_mr(mr);
mr               1283 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_mr *mr;
mr               1286 net/sunrpc/xprtrdma/verbs.c 	mr = rpcrdma_mr_pop(&buf->rb_mrs);
mr               1288 net/sunrpc/xprtrdma/verbs.c 	return mr;
mr               1296 net/sunrpc/xprtrdma/verbs.c void rpcrdma_mr_put(struct rpcrdma_mr *mr)
mr               1298 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_xprt *r_xprt = mr->mr_xprt;
mr               1300 net/sunrpc/xprtrdma/verbs.c 	if (mr->mr_dir != DMA_NONE) {
mr               1301 net/sunrpc/xprtrdma/verbs.c 		trace_xprtrdma_mr_unmap(mr);
mr               1303 net/sunrpc/xprtrdma/verbs.c 				mr->mr_sg, mr->mr_nents, mr->mr_dir);
mr               1304 net/sunrpc/xprtrdma/verbs.c 		mr->mr_dir = DMA_NONE;
mr               1307 net/sunrpc/xprtrdma/verbs.c 	rpcrdma_mr_push(mr, &mr->mr_req->rl_free_mrs);
mr                342 net/sunrpc/xprtrdma/xprt_rdma.h rpcrdma_mr_push(struct rpcrdma_mr *mr, struct list_head *list)
mr                344 net/sunrpc/xprtrdma/xprt_rdma.h 	list_add(&mr->mr_list, list);
mr                350 net/sunrpc/xprtrdma/xprt_rdma.h 	struct rpcrdma_mr *mr;
mr                352 net/sunrpc/xprtrdma/xprt_rdma.h 	mr = list_first_entry_or_null(list, struct rpcrdma_mr, mr_list);
mr                353 net/sunrpc/xprtrdma/xprt_rdma.h 	if (mr)
mr                354 net/sunrpc/xprtrdma/xprt_rdma.h 		list_del_init(&mr->mr_list);
mr                355 net/sunrpc/xprtrdma/xprt_rdma.h 	return mr;
mr                492 net/sunrpc/xprtrdma/xprt_rdma.h void rpcrdma_mr_put(struct rpcrdma_mr *mr);
mr                495 net/sunrpc/xprtrdma/xprt_rdma.h rpcrdma_mr_recycle(struct rpcrdma_mr *mr)
mr                497 net/sunrpc/xprtrdma/xprt_rdma.h 	schedule_work(&mr->mr_recycle);
mr                551 net/sunrpc/xprtrdma/xprt_rdma.h int frwr_init_mr(struct rpcrdma_ia *ia, struct rpcrdma_mr *mr);
mr                552 net/sunrpc/xprtrdma/xprt_rdma.h void frwr_release_mr(struct rpcrdma_mr *mr);
mr                557 net/sunrpc/xprtrdma/xprt_rdma.h 				struct rpcrdma_mr *mr);
mr                340 net/xdp/xdp_umem.c static int xdp_umem_reg(struct xdp_umem *umem, struct xdp_umem_reg *mr)
mr                342 net/xdp/xdp_umem.c 	bool unaligned_chunks = mr->flags & XDP_UMEM_UNALIGNED_CHUNK_FLAG;
mr                343 net/xdp/xdp_umem.c 	u32 chunk_size = mr->chunk_size, headroom = mr->headroom;
mr                344 net/xdp/xdp_umem.c 	u64 npgs, addr = mr->addr, size = mr->len;
mr                358 net/xdp/xdp_umem.c 	if (mr->flags & ~(XDP_UMEM_UNALIGNED_CHUNK_FLAG |
mr                401 net/xdp/xdp_umem.c 	umem->flags = mr->flags;
mr                434 net/xdp/xdp_umem.c struct xdp_umem *xdp_umem_create(struct xdp_umem_reg *mr)
mr                450 net/xdp/xdp_umem.c 	err = xdp_umem_reg(umem, mr);
mr                 19 net/xdp/xdp_umem.h struct xdp_umem *xdp_umem_create(struct xdp_umem_reg *mr);
mr                772 net/xdp/xsk.c  		struct xdp_umem_reg mr = {};
mr                777 net/xdp/xsk.c  		else if (optlen < sizeof(mr))
mr                780 net/xdp/xsk.c  		if (copy_from_user(&mr, optval, mr_size))
mr                789 net/xdp/xsk.c  		umem = xdp_umem_create(&mr);
mr                324 sound/soc/atmel/atmel-i2s.c 	unsigned int mr = 0;
mr                329 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_FORMAT_I2S;
mr                340 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_MODE_MASTER;
mr                348 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_MODE_SLAVE;
mr                360 sound/soc/atmel/atmel-i2s.c 			mr |= ATMEL_I2SC_MR_TXMONO;
mr                362 sound/soc/atmel/atmel-i2s.c 			mr |= ATMEL_I2SC_MR_RXMONO;
mr                373 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_8_BITS;
mr                377 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_16_BITS;
mr                381 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_18_BITS | ATMEL_I2SC_MR_IWS;
mr                385 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_20_BITS | ATMEL_I2SC_MR_IWS;
mr                389 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_24_BITS | ATMEL_I2SC_MR_IWS;
mr                393 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_24_BITS;
mr                397 sound/soc/atmel/atmel-i2s.c 		mr |= ATMEL_I2SC_MR_DATALENGTH_32_BITS;
mr                405 sound/soc/atmel/atmel-i2s.c 	return regmap_write(dev->regmap, ATMEL_I2SC_MR, mr);
mr                411 sound/soc/atmel/atmel-i2s.c 	unsigned int mr, mr_mask;
mr                415 sound/soc/atmel/atmel-i2s.c 	mr = 0;
mr                429 sound/soc/atmel/atmel-i2s.c 					 mr_mask, mr);
mr                453 sound/soc/atmel/atmel-i2s.c 	mr |= ATMEL_I2SC_MR_IMCKDIV(dev->gck_param->imckdiv);
mr                454 sound/soc/atmel/atmel-i2s.c 	mr |= ATMEL_I2SC_MR_IMCKFS(dev->gck_param->imckfs);
mr                455 sound/soc/atmel/atmel-i2s.c 	mr |= ATMEL_I2SC_MR_IMCKMODE_I2SMCK;
mr                456 sound/soc/atmel/atmel-i2s.c 	ret = regmap_update_bits(dev->regmap, ATMEL_I2SC_MR, mr_mask, mr);
mr                471 sound/soc/atmel/atmel-i2s.c 	unsigned int cr, mr;
mr                492 sound/soc/atmel/atmel-i2s.c 	err = regmap_read(dev->regmap, ATMEL_I2SC_MR, &mr);
mr                495 sound/soc/atmel/atmel-i2s.c 	is_master = (mr & ATMEL_I2SC_MR_MODE_MASK) == ATMEL_I2SC_MR_MODE_MASTER;
mr                395 sound/soc/fsl/fsl_dma.c 	u32 mr;
mr                481 sound/soc/fsl/fsl_dma.c 	mr = in_be32(&dma_channel->mr) &
mr                499 sound/soc/fsl/fsl_dma.c 	mr |= CCSR_DMA_MR_EOSIE | CCSR_DMA_MR_EIE | CCSR_DMA_MR_EMP_EN |
mr                504 sound/soc/fsl/fsl_dma.c 	mr |= (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) ?
mr                507 sound/soc/fsl/fsl_dma.c 	out_be32(&dma_channel->mr, mr);
mr                567 sound/soc/fsl/fsl_dma.c 	u32 mr; /* DMA Mode Register */
mr                582 sound/soc/fsl/fsl_dma.c 	mr = in_be32(&dma_channel->mr) & ~(CCSR_DMA_MR_BWC_MASK |
mr                592 sound/soc/fsl/fsl_dma.c 		mr |= CCSR_DMA_MR_DAHTS_1 | CCSR_DMA_MR_SAHTS_1;
mr                596 sound/soc/fsl/fsl_dma.c 		mr |= CCSR_DMA_MR_DAHTS_2 | CCSR_DMA_MR_SAHTS_2;
mr                600 sound/soc/fsl/fsl_dma.c 		mr |= CCSR_DMA_MR_DAHTS_4 | CCSR_DMA_MR_SAHTS_4;
mr                639 sound/soc/fsl/fsl_dma.c 	mr |= CCSR_DMA_MR_BWC((dma_private->ssi_fifo_depth - 2) * sample_bytes);
mr                641 sound/soc/fsl/fsl_dma.c 	out_be32(&dma_channel->mr, mr);
mr                777 sound/soc/fsl/fsl_dma.c 		out_be32(&dma_channel->mr, CCSR_DMA_MR_CA);
mr                778 sound/soc/fsl/fsl_dma.c 		out_be32(&dma_channel->mr, 0);
mr                 12 sound/soc/fsl/fsl_dma.h 		__be32 mr;      /* Mode register */
mr                209 tools/lib/bpf/xsk.c 	struct xdp_umem_reg mr;
mr                232 tools/lib/bpf/xsk.c 	memset(&mr, 0, sizeof(mr));
mr                233 tools/lib/bpf/xsk.c 	mr.addr = (uintptr_t)umem_area;
mr                234 tools/lib/bpf/xsk.c 	mr.len = size;
mr                235 tools/lib/bpf/xsk.c 	mr.chunk_size = umem->config.frame_size;
mr                236 tools/lib/bpf/xsk.c 	mr.headroom = umem->config.frame_headroom;
mr                237 tools/lib/bpf/xsk.c 	mr.flags = umem->config.flags;
mr                239 tools/lib/bpf/xsk.c 	err = setsockopt(umem->fd, SOL_XDP, XDP_UMEM_REG, &mr, sizeof(mr));