Searched refs:iobj (Results 1 - 4 of 4) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
H A Dbase.c67 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_release() local
68 nvkm_bar_flush(iobj->imem->subdev.device->bar); nvkm_instobj_release()
99 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_dtor() local
100 spin_lock(&iobj->imem->lock); nvkm_instobj_dtor()
101 list_del(&iobj->head); nvkm_instobj_dtor()
102 spin_unlock(&iobj->imem->lock); nvkm_instobj_dtor()
103 nvkm_memory_del(&iobj->parent); nvkm_instobj_dtor()
104 return iobj; nvkm_instobj_dtor()
130 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_release_slow() local
132 nvkm_done(iobj->parent); nvkm_instobj_release_slow()
138 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_acquire_slow() local
139 iobj->map = nvkm_kmap(iobj->parent); nvkm_instobj_acquire_slow()
140 if (iobj->map) nvkm_instobj_acquire_slow()
142 return iobj->map; nvkm_instobj_acquire_slow()
148 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_rd32_slow() local
149 return nvkm_ro32(iobj->parent, offset); nvkm_instobj_rd32_slow()
155 struct nvkm_instobj *iobj = nvkm_instobj(memory); nvkm_instobj_wr32_slow() local
156 return nvkm_wo32(iobj->parent, offset, data); nvkm_instobj_wr32_slow()
178 struct nvkm_instobj *iobj; nvkm_instobj_new() local
187 if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL))) { nvkm_instobj_new()
192 nvkm_memory_ctor(&nvkm_instobj_func_slow, &iobj->memory); nvkm_instobj_new()
193 iobj->parent = memory; nvkm_instobj_new()
194 iobj->imem = imem; nvkm_instobj_new()
195 spin_lock(&iobj->imem->lock); nvkm_instobj_new()
196 list_add_tail(&iobj->head, &imem->list); nvkm_instobj_new()
197 spin_unlock(&iobj->imem->lock); nvkm_instobj_new()
198 memory = &iobj->memory; nvkm_instobj_new()
239 struct nvkm_instobj *iobj; nvkm_instmem_fini() local
246 list_for_each_entry(iobj, &imem->list, head) { nvkm_instmem_fini()
247 struct nvkm_memory *memory = iobj->parent; nvkm_instmem_fini()
250 iobj->suspend = vmalloc(size); nvkm_instmem_fini()
251 if (!iobj->suspend) nvkm_instmem_fini()
255 iobj->suspend[i / 4] = nvkm_ro32(memory, i); nvkm_instmem_fini()
275 struct nvkm_instobj *iobj; nvkm_instmem_init() local
278 list_for_each_entry(iobj, &imem->list, head) { nvkm_instmem_init()
279 if (iobj->suspend) { nvkm_instmem_init()
280 struct nvkm_memory *memory = iobj->parent; nvkm_instmem_init()
283 nvkm_wo32(memory, i, iobj->suspend[i / 4]); nvkm_instmem_init()
284 vfree(iobj->suspend); nvkm_instmem_init()
285 iobj->suspend = NULL; nvkm_instmem_init()
H A Dnv50.c73 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_boot() local
74 struct nvkm_subdev *subdev = &iobj->imem->base.subdev; nv50_instobj_boot()
80 iobj->map = ERR_PTR(-ENOMEM); nv50_instobj_boot()
82 ret = nvkm_vm_get(vm, size, 12, NV_MEM_ACCESS_RW, &iobj->bar); nv50_instobj_boot()
85 (u32)iobj->bar.offset, size); nv50_instobj_boot()
87 nvkm_memory_map(memory, &iobj->bar, 0); nv50_instobj_boot()
88 iobj->map = map; nv50_instobj_boot()
91 nvkm_vm_put(&iobj->bar); nv50_instobj_boot()
108 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_acquire() local
109 struct nv50_instmem *imem = iobj->imem; nv50_instobj_acquire()
114 if (!iobj->map && (vm = nvkm_bar_kmap(bar))) nv50_instobj_acquire()
116 if (!IS_ERR_OR_NULL(iobj->map)) nv50_instobj_acquire()
117 return iobj->map; nv50_instobj_acquire()
127 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_rd32() local
128 struct nv50_instmem *imem = iobj->imem; nv50_instobj_rd32()
130 u64 base = (iobj->mem->offset + offset) & 0xffffff00000ULL; nv50_instobj_rd32()
131 u64 addr = (iobj->mem->offset + offset) & 0x000000fffffULL; nv50_instobj_rd32()
145 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_wr32() local
146 struct nv50_instmem *imem = iobj->imem; nv50_instobj_wr32()
148 u64 base = (iobj->mem->offset + offset) & 0xffffff00000ULL; nv50_instobj_wr32()
149 u64 addr = (iobj->mem->offset + offset) & 0x000000fffffULL; nv50_instobj_wr32()
161 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_map() local
162 nvkm_vm_map_at(vma, offset, iobj->mem); nv50_instobj_map()
168 struct nv50_instobj *iobj = nv50_instobj(memory); nv50_instobj_dtor() local
169 struct nvkm_ram *ram = iobj->imem->base.subdev.device->fb->ram; nv50_instobj_dtor()
170 if (!IS_ERR_OR_NULL(iobj->map)) { nv50_instobj_dtor()
171 nvkm_vm_put(&iobj->bar); nv50_instobj_dtor()
172 iounmap(iobj->map); nv50_instobj_dtor()
174 ram->func->put(ram, &iobj->mem); nv50_instobj_dtor()
175 return iobj; nv50_instobj_dtor()
197 struct nv50_instobj *iobj; nv50_instobj_new() local
201 if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL))) nv50_instobj_new()
203 *pmemory = &iobj->memory; nv50_instobj_new()
205 nvkm_memory_ctor(&nv50_instobj_func, &iobj->memory); nv50_instobj_new()
206 iobj->imem = imem; nv50_instobj_new()
211 ret = ram->func->get(ram, size, align, 0, 0x800, &iobj->mem); nv50_instobj_new()
215 iobj->mem->page_shift = 12; nv50_instobj_new()
H A Dnv04.c67 struct nv04_instobj *iobj = nv04_instobj(memory); nv04_instobj_acquire() local
68 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_acquire()
69 return device->pri + 0x700000 + iobj->node->offset; nv04_instobj_acquire()
80 struct nv04_instobj *iobj = nv04_instobj(memory); nv04_instobj_rd32() local
81 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_rd32()
82 return nvkm_rd32(device, 0x700000 + iobj->node->offset + offset); nv04_instobj_rd32()
88 struct nv04_instobj *iobj = nv04_instobj(memory); nv04_instobj_wr32() local
89 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_wr32()
90 nvkm_wr32(device, 0x700000 + iobj->node->offset + offset, data); nv04_instobj_wr32()
96 struct nv04_instobj *iobj = nv04_instobj(memory); nv04_instobj_dtor() local
97 mutex_lock(&iobj->imem->base.subdev.mutex); nv04_instobj_dtor()
98 nvkm_mm_free(&iobj->imem->heap, &iobj->node); nv04_instobj_dtor()
99 mutex_unlock(&iobj->imem->base.subdev.mutex); nv04_instobj_dtor()
100 return iobj; nv04_instobj_dtor()
120 struct nv04_instobj *iobj; nv04_instobj_new() local
123 if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL))) nv04_instobj_new()
125 *pmemory = &iobj->memory; nv04_instobj_new()
127 nvkm_memory_ctor(&nv04_instobj_func, &iobj->memory); nv04_instobj_new()
128 iobj->imem = imem; nv04_instobj_new()
132 align ? align : 1, &iobj->node); nv04_instobj_new()
H A Dnv40.c69 struct nv40_instobj *iobj = nv40_instobj(memory); nv40_instobj_acquire() local
70 return iobj->imem->iomem + iobj->node->offset; nv40_instobj_acquire()
81 struct nv40_instobj *iobj = nv40_instobj(memory); nv40_instobj_rd32() local
82 return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset); nv40_instobj_rd32()
88 struct nv40_instobj *iobj = nv40_instobj(memory); nv40_instobj_wr32() local
89 iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset); nv40_instobj_wr32()
95 struct nv40_instobj *iobj = nv40_instobj(memory); nv40_instobj_dtor() local
96 mutex_lock(&iobj->imem->base.subdev.mutex); nv40_instobj_dtor()
97 nvkm_mm_free(&iobj->imem->heap, &iobj->node); nv40_instobj_dtor()
98 mutex_unlock(&iobj->imem->base.subdev.mutex); nv40_instobj_dtor()
99 return iobj; nv40_instobj_dtor()
119 struct nv40_instobj *iobj; nv40_instobj_new() local
122 if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL))) nv40_instobj_new()
124 *pmemory = &iobj->memory; nv40_instobj_new()
126 nvkm_memory_ctor(&nv40_instobj_func, &iobj->memory); nv40_instobj_new()
127 iobj->imem = imem; nv40_instobj_new()
131 align ? align : 1, &iobj->node); nv40_instobj_new()

Completed in 165 milliseconds