Lines Matching refs:base
67 struct gk20a_instobj base; member
74 container_of(gk20a_instobj(p), struct gk20a_instobj_dma, base)
80 struct gk20a_instobj base; member
88 container_of(gk20a_instobj(p), struct gk20a_instobj_iommu, base)
91 struct nvkm_instmem base; member
113 #define gk20a_instmem(p) container_of((p), struct gk20a_instmem, base)
138 struct device *dev = node->base.imem->base.subdev.device->dev; in gk20a_instobj_cpu_map_dma()
185 nvkm_debug(&imem->base.subdev, "(GC) vaddr used: %x/%x\n", in gk20a_instmem_vaddr_gc()
196 struct nvkm_ltc *ltc = imem->base.subdev.device->ltc; in gk20a_instobj_acquire()
217 nvkm_error(&imem->base.subdev, "cannot map instobj - " in gk20a_instobj_acquire()
223 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", in gk20a_instobj_acquire()
237 struct nvkm_ltc *ltc = imem->base.subdev.device->ltc; in gk20a_instobj_release()
299 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", in gk20a_instobj_dtor()
310 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_dma()
311 struct device *dev = imem->base.subdev.device->dev; in gk20a_instobj_dtor_dma()
313 gk20a_instobj_dtor(&node->base); in gk20a_instobj_dtor_dma()
318 dma_free_attrs(dev, node->base.mem.size << PAGE_SHIFT, node->cpuaddr, in gk20a_instobj_dtor_dma()
329 struct gk20a_instmem *imem = node->base.imem; in gk20a_instobj_dtor_iommu()
330 struct device *dev = imem->base.subdev.device->dev; in gk20a_instobj_dtor_iommu()
334 gk20a_instobj_dtor(&node->base); in gk20a_instobj_dtor_iommu()
336 if (unlikely(list_empty(&node->base.mem.regions))) in gk20a_instobj_dtor_iommu()
339 r = list_first_entry(&node->base.mem.regions, struct nvkm_mm_node, in gk20a_instobj_dtor_iommu()
346 for (i = 0; i < node->base.mem.size; i++) { in gk20a_instobj_dtor_iommu()
394 struct nvkm_subdev *subdev = &imem->base.subdev; in gk20a_instobj_ctor_dma()
399 *_node = &node->base; in gk20a_instobj_ctor_dma()
401 nvkm_memory_ctor(&gk20a_instobj_func_dma, &node->base.memory); in gk20a_instobj_ctor_dma()
422 node->base.mem.offset = node->handle; in gk20a_instobj_ctor_dma()
424 INIT_LIST_HEAD(&node->base.mem.regions); in gk20a_instobj_ctor_dma()
425 list_add_tail(&node->r.rl_entry, &node->base.mem.regions); in gk20a_instobj_ctor_dma()
435 struct nvkm_subdev *subdev = &imem->base.subdev; in gk20a_instobj_ctor_iommu()
448 *_node = &node->base; in gk20a_instobj_ctor_iommu()
451 nvkm_memory_ctor(&gk20a_instobj_func_iommu, &node->base.memory); in gk20a_instobj_ctor_iommu()
502 node->base.mem.offset = ((u64)r->offset) << imem->iommu_pgshift; in gk20a_instobj_ctor_iommu()
504 INIT_LIST_HEAD(&node->base.mem.regions); in gk20a_instobj_ctor_iommu()
505 list_add_tail(&r->rl_entry, &node->base.mem.regions); in gk20a_instobj_ctor_iommu()
527 gk20a_instobj_new(struct nvkm_instmem *base, u32 size, u32 align, bool zero, in gk20a_instobj_new() argument
530 struct gk20a_instmem *imem = gk20a_instmem(base); in gk20a_instobj_new()
531 struct nvkm_subdev *subdev = &imem->base.subdev; in gk20a_instobj_new()
566 gk20a_instmem_dtor(struct nvkm_instmem *base) in gk20a_instmem_dtor() argument
568 struct gk20a_instmem *imem = gk20a_instmem(base); in gk20a_instmem_dtor()
572 nvkm_warn(&base->subdev, "instobj LRU not empty!\n"); in gk20a_instmem_dtor()
575 nvkm_warn(&base->subdev, "instobj vmap area not empty! " in gk20a_instmem_dtor()
598 nvkm_instmem_ctor(&gk20a_instmem, device, index, &imem->base); in gk20a_instmem_new()
600 *pimem = &imem->base; in gk20a_instmem_new()
615 nvkm_info(&imem->base.subdev, "using IOMMU\n"); in gk20a_instmem_new()
625 nvkm_info(&imem->base.subdev, "using DMA API\n"); in gk20a_instmem_new()