iobj               32 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c nvkm_instobj_load(struct nvkm_instobj *iobj)
iobj               34 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	struct nvkm_memory *memory = &iobj->memory;
iobj               41 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			nvkm_wo32(memory, i, iobj->suspend[i / 4]);
iobj               43 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		memcpy_toio(map, iobj->suspend, size);
iobj               47 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	kvfree(iobj->suspend);
iobj               48 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	iobj->suspend = NULL;
iobj               52 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c nvkm_instobj_save(struct nvkm_instobj *iobj)
iobj               54 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	struct nvkm_memory *memory = &iobj->memory;
iobj               59 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	iobj->suspend = kvmalloc(size, GFP_KERNEL);
iobj               60 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	if (!iobj->suspend)
iobj               65 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			iobj->suspend[i / 4] = nvkm_ro32(memory, i);
iobj               67 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		memcpy_fromio(iobj->suspend, map, size);
iobj               74 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c nvkm_instobj_dtor(struct nvkm_instmem *imem, struct nvkm_instobj *iobj)
iobj               77 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	list_del(&iobj->head);
iobj               83 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		  struct nvkm_instmem *imem, struct nvkm_instobj *iobj)
iobj               85 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	nvkm_memory_ctor(func, &iobj->memory);
iobj               86 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	iobj->suspend = NULL;
iobj               88 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	list_add_tail(&iobj->head, &imem->list);
iobj              151 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	struct nvkm_instobj *iobj, *itmp;
iobj              153 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	list_for_each_entry_safe(iobj, itmp, &imem->list, head) {
iobj              154 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		list_move_tail(&iobj->head, &imem->boot);
iobj              163 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	struct nvkm_instobj *iobj;
iobj              166 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		list_for_each_entry(iobj, &imem->list, head) {
iobj              167 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			int ret = nvkm_instobj_save(iobj);
iobj              174 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		list_for_each_entry(iobj, &imem->boot, head) {
iobj              175 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			int ret = nvkm_instobj_save(iobj);
iobj              191 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	struct nvkm_instobj *iobj;
iobj              193 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	list_for_each_entry(iobj, &imem->boot, head) {
iobj              194 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		if (iobj->suspend)
iobj              195 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			nvkm_instobj_load(iobj);
iobj              200 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 	list_for_each_entry(iobj, &imem->list, head) {
iobj              201 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 		if (iobj->suspend)
iobj              202 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/base.c 			nvkm_instobj_load(iobj);
iobj               48 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nv04_instobj *iobj = nv04_instobj(memory);
iobj               49 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nvkm_device *device = iobj->imem->base.subdev.device;
iobj               50 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	nvkm_wr32(device, 0x700000 + iobj->node->offset + offset, data);
iobj               56 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nv04_instobj *iobj = nv04_instobj(memory);
iobj               57 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nvkm_device *device = iobj->imem->base.subdev.device;
iobj               58 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	return nvkm_rd32(device, 0x700000 + iobj->node->offset + offset);
iobj               75 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nv04_instobj *iobj = nv04_instobj(memory);
iobj               76 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nvkm_device *device = iobj->imem->base.subdev.device;
iobj               77 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	return device->pri + 0x700000 + iobj->node->offset;
iobj              101 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nv04_instobj *iobj = nv04_instobj(memory);
iobj              102 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	mutex_lock(&iobj->imem->base.subdev.mutex);
iobj              103 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	nvkm_mm_free(&iobj->imem->heap, &iobj->node);
iobj              104 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	mutex_unlock(&iobj->imem->base.subdev.mutex);
iobj              105 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	nvkm_instobj_dtor(&iobj->imem->base, &iobj->base);
iobj              106 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	return iobj;
iobj              124 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	struct nv04_instobj *iobj;
iobj              127 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL)))
iobj              129 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	*pmemory = &iobj->base.memory;
iobj              131 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	nvkm_instobj_ctor(&nv04_instobj_func, &imem->base, &iobj->base);
iobj              132 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	iobj->base.memory.ptrs = &nv04_instobj_ptrs;
iobj              133 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 	iobj->imem = imem;
iobj              137 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c 			   align ? align : 1, &iobj->node);
iobj               50 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	struct nv40_instobj *iobj = nv40_instobj(memory);
iobj               51 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset);
iobj               57 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	struct nv40_instobj *iobj = nv40_instobj(memory);
iobj               58 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset);
iobj               76 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	struct nv40_instobj *iobj = nv40_instobj(memory);
iobj               77 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	return iobj->imem->iomem + iobj->node->offset;
iobj              101 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	struct nv40_instobj *iobj = nv40_instobj(memory);
iobj              102 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	mutex_lock(&iobj->imem->base.subdev.mutex);
iobj              103 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	nvkm_mm_free(&iobj->imem->heap, &iobj->node);
iobj              104 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	mutex_unlock(&iobj->imem->base.subdev.mutex);
iobj              105 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	nvkm_instobj_dtor(&iobj->imem->base, &iobj->base);
iobj              106 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	return iobj;
iobj              124 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	struct nv40_instobj *iobj;
iobj              127 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL)))
iobj              129 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	*pmemory = &iobj->base.memory;
iobj              131 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	nvkm_instobj_ctor(&nv40_instobj_func, &imem->base, &iobj->base);
iobj              132 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	iobj->base.memory.ptrs = &nv40_instobj_ptrs;
iobj              133 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 	iobj->imem = imem;
iobj              137 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c 			   align ? align : 1, &iobj->node);
iobj               58 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj               59 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instmem *imem = iobj->imem;
iobj               61 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	u64 base = (nvkm_memory_addr(iobj->ram) + offset) & 0xffffff00000ULL;
iobj               62 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	u64 addr = (nvkm_memory_addr(iobj->ram) + offset) & 0x000000fffffULL;
iobj               77 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj               78 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instmem *imem = iobj->imem;
iobj               80 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	u64 base = (nvkm_memory_addr(iobj->ram) + offset) & 0xffffff00000ULL;
iobj               81 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	u64 addr = (nvkm_memory_addr(iobj->ram) + offset) & 0x000000fffffULL;
iobj              120 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c nv50_instobj_kmap(struct nv50_instobj *iobj, struct nvkm_vmm *vmm)
iobj              122 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instmem *imem = iobj->imem;
iobj              124 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nvkm_memory *memory = &iobj->base.memory;
iobj              164 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (ret || iobj->bar) {
iobj              173 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	iobj->bar = bar;
iobj              174 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	iobj->map = ioremap_wc(device->func->resource_addr(device, 3) +
iobj              175 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			       (u32)iobj->bar->addr, size);
iobj              176 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (!iobj->map) {
iobj              178 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		nvkm_vmm_put(vmm, &iobj->bar);
iobj              193 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj              194 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instmem *imem = iobj->imem;
iobj              200 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (refcount_dec_and_mutex_lock(&iobj->maps, &subdev->mutex)) {
iobj              204 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		if (likely(iobj->lru.next) && iobj->map) {
iobj              205 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			BUG_ON(!list_empty(&iobj->lru));
iobj              206 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			list_add_tail(&iobj->lru, &imem->lru);
iobj              210 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		iobj->base.memory.ptrs = NULL;
iobj              218 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj              219 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nvkm_instmem *imem = &iobj->imem->base;
iobj              224 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (refcount_inc_not_zero(&iobj->maps))
iobj              225 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		return iobj->map;
iobj              231 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (refcount_inc_not_zero(&iobj->maps)) {
iobj              233 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		return iobj->map;
iobj              238 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		if (!iobj->map)
iobj              239 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			nv50_instobj_kmap(iobj, vmm);
iobj              240 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		map = iobj->map;
iobj              243 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (!refcount_inc_not_zero(&iobj->maps)) {
iobj              245 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		if (likely(iobj->lru.next))
iobj              246 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			list_del_init(&iobj->lru);
iobj              249 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			iobj->base.memory.ptrs = &nv50_instobj_fast;
iobj              251 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 			iobj->base.memory.ptrs = &nv50_instobj_slow;
iobj              252 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		refcount_set(&iobj->maps, 1);
iobj              262 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj              263 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nvkm_instmem *imem = &iobj->imem->base;
iobj              269 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (likely(iobj->lru.next)) {
iobj              270 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		list_del_init(&iobj->lru);
iobj              271 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		iobj->lru.next = NULL;
iobj              274 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	nv50_instobj_kmap(iobj, vmm);
iobj              294 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj              296 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (nv50_instobj_acquire(&iobj->base.memory)) {
iobj              297 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		iobj->lru.next = NULL; /* Exclude from eviction. */
iobj              298 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		addr = iobj->bar->addr;
iobj              300 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	nv50_instobj_release(&iobj->base.memory);
iobj              313 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj = nv50_instobj(memory);
iobj              314 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nvkm_instmem *imem = &iobj->imem->base;
iobj              319 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (likely(iobj->lru.next))
iobj              320 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 		list_del(&iobj->lru);
iobj              321 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	map = iobj->map;
iobj              322 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	bar = iobj->bar;
iobj              332 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	nvkm_memory_unref(&iobj->ram);
iobj              333 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	nvkm_instobj_dtor(imem, &iobj->base);
iobj              334 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	return iobj;
iobj              355 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	struct nv50_instobj *iobj;
iobj              359 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	if (!(iobj = kzalloc(sizeof(*iobj), GFP_KERNEL)))
iobj              361 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	*pmemory = &iobj->base.memory;
iobj              363 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	nvkm_instobj_ctor(&nv50_instobj_func, &imem->base, &iobj->base);
iobj              364 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	iobj->imem = imem;
iobj              365 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	refcount_set(&iobj->maps, 0);
iobj              366 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	INIT_LIST_HEAD(&iobj->lru);
iobj              368 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c 	return nvkm_ram_get(device, 0, 1, page, size, true, true, &iobj->ram);