ib_umem_odp        51 drivers/infiniband/core/umem_odp.c static void ib_umem_notifier_start_account(struct ib_umem_odp *umem_odp)
ib_umem_odp        64 drivers/infiniband/core/umem_odp.c static void ib_umem_notifier_end_account(struct ib_umem_odp *umem_odp)
ib_umem_odp        90 drivers/infiniband/core/umem_odp.c 		struct ib_umem_odp *umem_odp =
ib_umem_odp        91 drivers/infiniband/core/umem_odp.c 			rb_entry(node, struct ib_umem_odp, interval_tree.rb);
ib_umem_odp       108 drivers/infiniband/core/umem_odp.c static int invalidate_range_start_trampoline(struct ib_umem_odp *item,
ib_umem_odp       148 drivers/infiniband/core/umem_odp.c static int invalidate_range_end_trampoline(struct ib_umem_odp *item, u64 start,
ib_umem_odp       207 drivers/infiniband/core/umem_odp.c static inline int ib_init_umem_odp(struct ib_umem_odp *umem_odp)
ib_umem_odp       293 drivers/infiniband/core/umem_odp.c struct ib_umem_odp *ib_umem_odp_alloc_implicit(struct ib_udata *udata,
ib_umem_odp       300 drivers/infiniband/core/umem_odp.c 	struct ib_umem_odp *umem_odp;
ib_umem_odp       339 drivers/infiniband/core/umem_odp.c struct ib_umem_odp *ib_umem_odp_alloc_child(struct ib_umem_odp *root,
ib_umem_odp       346 drivers/infiniband/core/umem_odp.c 	struct ib_umem_odp *odp_data;
ib_umem_odp       385 drivers/infiniband/core/umem_odp.c struct ib_umem_odp *ib_umem_odp_get(struct ib_udata *udata, unsigned long addr,
ib_umem_odp       388 drivers/infiniband/core/umem_odp.c 	struct ib_umem_odp *umem_odp;
ib_umem_odp       405 drivers/infiniband/core/umem_odp.c 	umem_odp = kzalloc(sizeof(struct ib_umem_odp), GFP_KERNEL);
ib_umem_odp       443 drivers/infiniband/core/umem_odp.c void ib_umem_odp_release(struct ib_umem_odp *umem_odp)
ib_umem_odp       503 drivers/infiniband/core/umem_odp.c 		struct ib_umem_odp *umem_odp,
ib_umem_odp       587 drivers/infiniband/core/umem_odp.c int ib_umem_odp_map_dma_pages(struct ib_umem_odp *umem_odp, u64 user_virt,
ib_umem_odp       717 drivers/infiniband/core/umem_odp.c void ib_umem_odp_unmap_dma_pages(struct ib_umem_odp *umem_odp, u64 virt,
ib_umem_odp       777 drivers/infiniband/core/umem_odp.c 	struct ib_umem_odp *umem;
ib_umem_odp       788 drivers/infiniband/core/umem_odp.c 		umem = container_of(node, struct ib_umem_odp, interval_tree);
ib_umem_odp      1255 drivers/infiniband/hw/mlx5/mlx5_ib.h void mlx5_ib_invalidate_range(struct ib_umem_odp *umem_odp, unsigned long start,
ib_umem_odp      1286 drivers/infiniband/hw/mlx5/mlx5_ib.h static inline void mlx5_ib_invalidate_range(struct ib_umem_odp *umem_odp,
ib_umem_odp       765 drivers/infiniband/hw/mlx5/mr.c 		struct ib_umem_odp *odp;
ib_umem_odp      1579 drivers/infiniband/hw/mlx5/mr.c 		struct ib_umem_odp *umem_odp = to_ib_umem_odp(umem);
ib_umem_odp        96 drivers/infiniband/hw/mlx5/odp.c static int check_parent(struct ib_umem_odp *odp,
ib_umem_odp       112 drivers/infiniband/hw/mlx5/odp.c static struct ib_umem_odp *odp_next(struct ib_umem_odp *odp)
ib_umem_odp       123 drivers/infiniband/hw/mlx5/odp.c 		odp = rb_entry(rb, struct ib_umem_odp, interval_tree.rb);
ib_umem_odp       134 drivers/infiniband/hw/mlx5/odp.c static struct ib_umem_odp *odp_lookup(u64 start, u64 length,
ib_umem_odp       138 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp;
ib_umem_odp       152 drivers/infiniband/hw/mlx5/odp.c 		odp = rb_entry(rb, struct ib_umem_odp, interval_tree.rb);
ib_umem_odp       168 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp;
ib_umem_odp       225 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp = container_of(work, struct ib_umem_odp, work);
ib_umem_odp       228 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp_imr = to_ib_umem_odp(imr->umem);
ib_umem_odp       250 drivers/infiniband/hw/mlx5/odp.c void mlx5_ib_invalidate_range(struct ib_umem_odp *umem_odp, unsigned long start,
ib_umem_odp       420 drivers/infiniband/hw/mlx5/odp.c 					    struct ib_umem_odp *umem_odp,
ib_umem_odp       475 drivers/infiniband/hw/mlx5/odp.c static struct ib_umem_odp *implicit_mr_get_data(struct mlx5_ib_mr *mr,
ib_umem_odp       479 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp, *result = NULL;
ib_umem_odp       480 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp_mr = to_ib_umem_odp(mr->umem);
ib_umem_odp       554 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *umem_odp;
ib_umem_odp       583 drivers/infiniband/hw/mlx5/odp.c 		struct ib_umem_odp *umem_odp =
ib_umem_odp       584 drivers/infiniband/hw/mlx5/odp.c 			rb_entry(node, struct ib_umem_odp, interval_tree.rb);
ib_umem_odp       616 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp_mr = to_ib_umem_odp(mr->umem);
ib_umem_odp       621 drivers/infiniband/hw/mlx5/odp.c 	struct ib_umem_odp *odp;
ib_umem_odp       698 drivers/infiniband/hw/mlx5/odp.c 		struct ib_umem_odp *next;
ib_umem_odp        42 include/rdma/ib_umem.h struct ib_umem_odp;
ib_umem_odp        86 include/rdma/ib_umem_odp.h static inline struct ib_umem_odp *to_ib_umem_odp(struct ib_umem *umem)
ib_umem_odp        88 include/rdma/ib_umem_odp.h 	return container_of(umem, struct ib_umem_odp, umem);
ib_umem_odp        92 include/rdma/ib_umem_odp.h static inline unsigned long ib_umem_start(struct ib_umem_odp *umem_odp)
ib_umem_odp        98 include/rdma/ib_umem_odp.h static inline unsigned long ib_umem_end(struct ib_umem_odp *umem_odp)
ib_umem_odp       103 include/rdma/ib_umem_odp.h static inline size_t ib_umem_odp_num_pages(struct ib_umem_odp *umem_odp)
ib_umem_odp       133 include/rdma/ib_umem_odp.h struct ib_umem_odp *ib_umem_odp_get(struct ib_udata *udata, unsigned long addr,
ib_umem_odp       135 include/rdma/ib_umem_odp.h struct ib_umem_odp *ib_umem_odp_alloc_implicit(struct ib_udata *udata,
ib_umem_odp       137 include/rdma/ib_umem_odp.h struct ib_umem_odp *ib_umem_odp_alloc_child(struct ib_umem_odp *root_umem,
ib_umem_odp       139 include/rdma/ib_umem_odp.h void ib_umem_odp_release(struct ib_umem_odp *umem_odp);
ib_umem_odp       141 include/rdma/ib_umem_odp.h int ib_umem_odp_map_dma_pages(struct ib_umem_odp *umem_odp, u64 start_offset,
ib_umem_odp       145 include/rdma/ib_umem_odp.h void ib_umem_odp_unmap_dma_pages(struct ib_umem_odp *umem_odp, u64 start_offset,
ib_umem_odp       148 include/rdma/ib_umem_odp.h typedef int (*umem_call_back)(struct ib_umem_odp *item, u64 start, u64 end,
ib_umem_odp       163 include/rdma/ib_umem_odp.h static inline struct ib_umem_odp *
ib_umem_odp       171 include/rdma/ib_umem_odp.h 	return container_of(node, struct ib_umem_odp, interval_tree);
ib_umem_odp       175 include/rdma/ib_umem_odp.h static inline int ib_umem_mmu_notifier_retry(struct ib_umem_odp *umem_odp,
ib_umem_odp       194 include/rdma/ib_umem_odp.h static inline struct ib_umem_odp *ib_umem_odp_get(struct ib_udata *udata,
ib_umem_odp       201 include/rdma/ib_umem_odp.h static inline void ib_umem_odp_release(struct ib_umem_odp *umem_odp) {}
ib_umem_odp        74 include/rdma/ib_verbs.h struct ib_umem_odp;
ib_umem_odp      2424 include/rdma/ib_verbs.h 	void (*invalidate_range)(struct ib_umem_odp *umem_odp,