/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
H A D | nv40.c | 44 struct nv40_instmem *imem; member in struct:nv40_instobj 70 return iobj->imem->iomem + iobj->node->offset; nv40_instobj_acquire() 82 return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset); nv40_instobj_rd32() 89 iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset); nv40_instobj_wr32() 96 mutex_lock(&iobj->imem->base.subdev.mutex); nv40_instobj_dtor() 97 nvkm_mm_free(&iobj->imem->heap, &iobj->node); nv40_instobj_dtor() 98 mutex_unlock(&iobj->imem->base.subdev.mutex); nv40_instobj_dtor() 118 struct nv40_instmem *imem = nv40_instmem(base); nv40_instobj_new() local 127 iobj->imem = imem; nv40_instobj_new() 129 mutex_lock(&imem->base.subdev.mutex); nv40_instobj_new() 130 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, nv40_instobj_new() 132 mutex_unlock(&imem->base.subdev.mutex); nv40_instobj_new() 155 struct nv40_instmem *imem = nv40_instmem(base); nv40_instmem_oneinit() local 156 struct nvkm_device *device = imem->base.subdev.device; nv40_instmem_oneinit() 164 if (device->chipset == 0x40) imem->base.reserved = 0x6aa0 * vs; nv40_instmem_oneinit() 165 else if (device->chipset < 0x43) imem->base.reserved = 0x4f00 * vs; nv40_instmem_oneinit() 166 else if (nv44_gr_class(device)) imem->base.reserved = 0x4980 * vs; nv40_instmem_oneinit() 167 else imem->base.reserved = 0x4a40 * vs; nv40_instmem_oneinit() 168 imem->base.reserved += 16 * 1024; nv40_instmem_oneinit() 169 imem->base.reserved *= 32; /* per-channel */ nv40_instmem_oneinit() 170 imem->base.reserved += 512 * 1024; /* pci(e)gart table */ nv40_instmem_oneinit() 171 imem->base.reserved += 512 * 1024; /* object storage */ nv40_instmem_oneinit() 172 imem->base.reserved = round_up(imem->base.reserved, 4096); nv40_instmem_oneinit() 174 ret = nvkm_mm_init(&imem->heap, 0, imem->base.reserved, 1); nv40_instmem_oneinit() 180 &imem->base.vbios); nv40_instmem_oneinit() 185 ret = nvkm_ramht_new(device, 0x08000, 0, NULL, &imem->base.ramht); nv40_instmem_oneinit() 193 &imem->base.ramro); nv40_instmem_oneinit() 201 &imem->base.ramfc); nv40_instmem_oneinit() 211 struct nv40_instmem *imem = nv40_instmem(base); nv40_instmem_dtor() local 212 nvkm_memory_del(&imem->base.ramfc); nv40_instmem_dtor() 213 nvkm_memory_del(&imem->base.ramro); nv40_instmem_dtor() 214 nvkm_ramht_del(&imem->base.ramht); nv40_instmem_dtor() 215 nvkm_memory_del(&imem->base.vbios); nv40_instmem_dtor() 216 nvkm_mm_fini(&imem->heap); nv40_instmem_dtor() 217 if (imem->iomem) nv40_instmem_dtor() 218 iounmap(imem->iomem); nv40_instmem_dtor() 219 return imem; nv40_instmem_dtor() 237 struct nv40_instmem *imem; nv40_instmem_new() local 240 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) nv40_instmem_new() 242 nvkm_instmem_ctor(&nv40_instmem, device, index, &imem->base); nv40_instmem_new() 243 *pimem = &imem->base; nv40_instmem_new() 251 imem->iomem = ioremap(device->func->resource_addr(device, bar), nv40_instmem_new() 253 if (!imem->iomem) { nv40_instmem_new() 254 nvkm_error(&imem->base.subdev, "unable to map PRAMIN BAR\n"); nv40_instmem_new()
|
H A D | gk20a.c | 55 struct gk20a_instmem *imem; member in struct:gk20a_instobj 138 struct device *dev = node->base.imem->base.subdev.device->dev; gk20a_instobj_cpu_map_dma() 170 gk20a_instmem_vaddr_gc(struct gk20a_instmem *imem, const u64 size) gk20a_instmem_vaddr_gc() argument 172 while (imem->vaddr_use + size > imem->vaddr_max) { gk20a_instmem_vaddr_gc() 176 if (list_empty(&imem->vaddr_lru)) gk20a_instmem_vaddr_gc() 179 obj = list_first_entry(&imem->vaddr_lru, struct gk20a_instobj, gk20a_instmem_vaddr_gc() 184 imem->vaddr_use -= nvkm_memory_size(&obj->memory); gk20a_instmem_vaddr_gc() 185 nvkm_debug(&imem->base.subdev, "(GC) vaddr used: %x/%x\n", gk20a_instmem_vaddr_gc() 186 imem->vaddr_use, imem->vaddr_max); gk20a_instmem_vaddr_gc() 195 struct gk20a_instmem *imem = node->imem; gk20a_instobj_acquire() local 196 struct nvkm_ltc *ltc = imem->base.subdev.device->ltc; gk20a_instobj_acquire() 202 spin_lock_irqsave(&imem->lock, flags); gk20a_instobj_acquire() 212 gk20a_instmem_vaddr_gc(imem, size); gk20a_instobj_acquire() 214 node->vaddr = imem->cpu_map(memory); gk20a_instobj_acquire() 217 nvkm_error(&imem->base.subdev, "cannot map instobj - " gk20a_instobj_acquire() 222 imem->vaddr_use += size; gk20a_instobj_acquire() 223 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", gk20a_instobj_acquire() 224 imem->vaddr_use, imem->vaddr_max); gk20a_instobj_acquire() 227 spin_unlock_irqrestore(&imem->lock, flags); gk20a_instobj_acquire() 236 struct gk20a_instmem *imem = node->imem; gk20a_instobj_release() local 237 struct nvkm_ltc *ltc = imem->base.subdev.device->ltc; gk20a_instobj_release() 240 spin_lock_irqsave(&imem->lock, flags); gk20a_instobj_release() 243 list_add_tail(&node->vaddr_node, &imem->vaddr_lru); gk20a_instobj_release() 245 spin_unlock_irqrestore(&imem->lock, flags); gk20a_instobj_release() 281 struct gk20a_instmem *imem = node->imem; gk20a_instobj_dtor() local 285 spin_lock_irqsave(&imem->lock, flags); gk20a_instobj_dtor() 290 list_for_each_entry(obj, &imem->vaddr_lru, vaddr_node) { gk20a_instobj_dtor() 298 imem->vaddr_use -= nvkm_memory_size(&node->memory); gk20a_instobj_dtor() 299 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", gk20a_instobj_dtor() 300 imem->vaddr_use, imem->vaddr_max); gk20a_instobj_dtor() 303 spin_unlock_irqrestore(&imem->lock, flags); gk20a_instobj_dtor() 310 struct gk20a_instmem *imem = node->base.imem; gk20a_instobj_dtor_dma() local 311 struct device *dev = imem->base.subdev.device->dev; gk20a_instobj_dtor_dma() 319 node->handle, &imem->attrs); gk20a_instobj_dtor_dma() 329 struct gk20a_instmem *imem = node->base.imem; gk20a_instobj_dtor_iommu() local 330 struct device *dev = imem->base.subdev.device->dev; gk20a_instobj_dtor_iommu() 343 r->offset &= ~BIT(imem->iommu_bit - imem->iommu_pgshift); gk20a_instobj_dtor_iommu() 347 iommu_unmap(imem->domain, gk20a_instobj_dtor_iommu() 348 (r->offset + i) << imem->iommu_pgshift, PAGE_SIZE); gk20a_instobj_dtor_iommu() 355 mutex_lock(imem->mm_mutex); gk20a_instobj_dtor_iommu() 356 nvkm_mm_free(imem->mm, &r); gk20a_instobj_dtor_iommu() 357 mutex_unlock(imem->mm_mutex); gk20a_instobj_dtor_iommu() 390 gk20a_instobj_ctor_dma(struct gk20a_instmem *imem, u32 npages, u32 align, gk20a_instobj_ctor_dma() argument 394 struct nvkm_subdev *subdev = &imem->base.subdev; gk20a_instobj_ctor_dma() 405 &imem->attrs); gk20a_instobj_ctor_dma() 431 gk20a_instobj_ctor_iommu(struct gk20a_instmem *imem, u32 npages, u32 align, gk20a_instobj_ctor_iommu() argument 435 struct nvkm_subdev *subdev = &imem->base.subdev; gk20a_instobj_ctor_iommu() 472 mutex_lock(imem->mm_mutex); gk20a_instobj_ctor_iommu() 474 ret = nvkm_mm_head(imem->mm, 0, 1, npages, npages, gk20a_instobj_ctor_iommu() 475 align >> imem->iommu_pgshift, &r); gk20a_instobj_ctor_iommu() 476 mutex_unlock(imem->mm_mutex); gk20a_instobj_ctor_iommu() 484 u32 offset = (r->offset + i) << imem->iommu_pgshift; gk20a_instobj_ctor_iommu() 486 ret = iommu_map(imem->domain, offset, node->dma_addrs[i], gk20a_instobj_ctor_iommu() 493 iommu_unmap(imem->domain, offset, PAGE_SIZE); gk20a_instobj_ctor_iommu() 500 r->offset |= BIT(imem->iommu_bit - imem->iommu_pgshift); gk20a_instobj_ctor_iommu() 502 node->base.mem.offset = ((u64)r->offset) << imem->iommu_pgshift; gk20a_instobj_ctor_iommu() 510 mutex_lock(imem->mm_mutex); gk20a_instobj_ctor_iommu() 511 nvkm_mm_free(imem->mm, &r); gk20a_instobj_ctor_iommu() 512 mutex_unlock(imem->mm_mutex); gk20a_instobj_ctor_iommu() 530 struct gk20a_instmem *imem = gk20a_instmem(base); gk20a_instobj_new() local 531 struct nvkm_subdev *subdev = &imem->base.subdev; gk20a_instobj_new() 536 imem->domain ? "IOMMU" : "DMA", size, align); gk20a_instobj_new() 542 if (imem->domain) gk20a_instobj_new() 543 ret = gk20a_instobj_ctor_iommu(imem, size >> PAGE_SHIFT, gk20a_instobj_new() 546 ret = gk20a_instobj_ctor_dma(imem, size >> PAGE_SHIFT, gk20a_instobj_new() 552 node->imem = imem; gk20a_instobj_new() 568 struct gk20a_instmem *imem = gk20a_instmem(base); gk20a_instmem_dtor() local 571 if (!list_empty(&imem->vaddr_lru)) gk20a_instmem_dtor() 574 if (imem->vaddr_use != 0) gk20a_instmem_dtor() 576 "0x%x bytes still mapped\n", imem->vaddr_use); gk20a_instmem_dtor() 578 return imem; gk20a_instmem_dtor() 594 struct gk20a_instmem *imem; gk20a_instmem_new() local 596 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) gk20a_instmem_new() 598 nvkm_instmem_ctor(&gk20a_instmem, device, index, &imem->base); gk20a_instmem_new() 599 spin_lock_init(&imem->lock); gk20a_instmem_new() 600 *pimem = &imem->base; gk20a_instmem_new() 603 imem->vaddr_use = 0; gk20a_instmem_new() 604 imem->vaddr_max = 0x100000; gk20a_instmem_new() 605 INIT_LIST_HEAD(&imem->vaddr_lru); gk20a_instmem_new() 608 imem->mm_mutex = &tdev->iommu.mutex; gk20a_instmem_new() 609 imem->mm = &tdev->iommu.mm; gk20a_instmem_new() 610 imem->domain = tdev->iommu.domain; gk20a_instmem_new() 611 imem->iommu_pgshift = tdev->iommu.pgshift; gk20a_instmem_new() 612 imem->cpu_map = gk20a_instobj_cpu_map_iommu; gk20a_instmem_new() 613 imem->iommu_bit = tdev->func->iommu_bit; gk20a_instmem_new() 615 nvkm_info(&imem->base.subdev, "using IOMMU\n"); gk20a_instmem_new() 617 init_dma_attrs(&imem->attrs); gk20a_instmem_new() 619 dma_set_attr(DMA_ATTR_NON_CONSISTENT, &imem->attrs); gk20a_instmem_new() 620 dma_set_attr(DMA_ATTR_WEAK_ORDERING, &imem->attrs); gk20a_instmem_new() 621 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &imem->attrs); gk20a_instmem_new() 622 dma_set_attr(DMA_ATTR_NO_KERNEL_MAPPING, &imem->attrs); gk20a_instmem_new() 623 imem->cpu_map = gk20a_instobj_cpu_map_dma; gk20a_instmem_new() 625 nvkm_info(&imem->base.subdev, "using DMA API\n"); gk20a_instmem_new()
|
H A D | base.c | 37 struct nvkm_instmem *imem; member in struct:nvkm_instobj 68 nvkm_bar_flush(iobj->imem->subdev.device->bar); nvkm_instobj_release() 100 spin_lock(&iobj->imem->lock); nvkm_instobj_dtor() 102 spin_unlock(&iobj->imem->lock); nvkm_instobj_dtor() 174 nvkm_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, nvkm_instobj_new() argument 182 ret = imem->func->memory_new(imem, size, align, zero, &memory); nvkm_instobj_new() 186 if (!imem->func->persistent) { nvkm_instobj_new() 194 iobj->imem = imem; nvkm_instobj_new() 195 spin_lock(&iobj->imem->lock); nvkm_instobj_new() 196 list_add_tail(&iobj->head, &imem->list); nvkm_instobj_new() 197 spin_unlock(&iobj->imem->lock); nvkm_instobj_new() 201 if (!imem->func->zero && zero) { nvkm_instobj_new() 224 nvkm_instmem_rd32(struct nvkm_instmem *imem, u32 addr) nvkm_instmem_rd32() argument 226 return imem->func->rd32(imem, addr); nvkm_instmem_rd32() 230 nvkm_instmem_wr32(struct nvkm_instmem *imem, u32 addr, u32 data) nvkm_instmem_wr32() argument 232 return imem->func->wr32(imem, addr, data); nvkm_instmem_wr32() 238 struct nvkm_instmem *imem = nvkm_instmem(subdev); nvkm_instmem_fini() local 242 if (imem->func->fini) nvkm_instmem_fini() 243 imem->func->fini(imem); nvkm_instmem_fini() 246 list_for_each_entry(iobj, &imem->list, head) { nvkm_instmem_fini() 265 struct nvkm_instmem *imem = nvkm_instmem(subdev); nvkm_instmem_oneinit() local 266 if (imem->func->oneinit) nvkm_instmem_oneinit() 267 return imem->func->oneinit(imem); nvkm_instmem_oneinit() 274 struct nvkm_instmem *imem = nvkm_instmem(subdev); nvkm_instmem_init() local 278 list_for_each_entry(iobj, &imem->list, head) { nvkm_instmem_init() 295 struct nvkm_instmem *imem = nvkm_instmem(subdev); nvkm_instmem_dtor() local 296 if (imem->func->dtor) nvkm_instmem_dtor() 297 return imem->func->dtor(imem); nvkm_instmem_dtor() 298 return imem; nvkm_instmem_dtor() 312 struct nvkm_instmem *imem) nvkm_instmem_ctor() 314 nvkm_subdev_ctor(&nvkm_instmem, device, index, 0, &imem->subdev); nvkm_instmem_ctor() 315 imem->func = func; nvkm_instmem_ctor() 316 spin_lock_init(&imem->lock); nvkm_instmem_ctor() 317 INIT_LIST_HEAD(&imem->list); nvkm_instmem_ctor() 310 nvkm_instmem_ctor(const struct nvkm_instmem_func *func, struct nvkm_device *device, int index, struct nvkm_instmem *imem) nvkm_instmem_ctor() argument
|
H A D | nv04.c | 42 struct nv04_instmem *imem; member in struct:nv04_instobj 68 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_acquire() 81 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_rd32() 89 struct nvkm_device *device = iobj->imem->base.subdev.device; nv04_instobj_wr32() 97 mutex_lock(&iobj->imem->base.subdev.mutex); nv04_instobj_dtor() 98 nvkm_mm_free(&iobj->imem->heap, &iobj->node); nv04_instobj_dtor() 99 mutex_unlock(&iobj->imem->base.subdev.mutex); nv04_instobj_dtor() 119 struct nv04_instmem *imem = nv04_instmem(base); nv04_instobj_new() local 128 iobj->imem = imem; nv04_instobj_new() 130 mutex_lock(&imem->base.subdev.mutex); nv04_instobj_new() 131 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, nv04_instobj_new() 133 mutex_unlock(&imem->base.subdev.mutex); nv04_instobj_new() 142 nv04_instmem_rd32(struct nvkm_instmem *imem, u32 addr) nv04_instmem_rd32() argument 144 return nvkm_rd32(imem->subdev.device, 0x700000 + addr); nv04_instmem_rd32() 148 nv04_instmem_wr32(struct nvkm_instmem *imem, u32 addr, u32 data) nv04_instmem_wr32() argument 150 nvkm_wr32(imem->subdev.device, 0x700000 + addr, data); nv04_instmem_wr32() 156 struct nv04_instmem *imem = nv04_instmem(base); nv04_instmem_oneinit() local 157 struct nvkm_device *device = imem->base.subdev.device; nv04_instmem_oneinit() 161 imem->base.reserved = 512 * 1024; nv04_instmem_oneinit() 163 ret = nvkm_mm_init(&imem->heap, 0, imem->base.reserved, 1); nv04_instmem_oneinit() 169 &imem->base.vbios); nv04_instmem_oneinit() 174 ret = nvkm_ramht_new(device, 0x08000, 0, NULL, &imem->base.ramht); nv04_instmem_oneinit() 180 &imem->base.ramfc); nv04_instmem_oneinit() 186 &imem->base.ramro); nv04_instmem_oneinit() 196 struct nv04_instmem *imem = nv04_instmem(base); nv04_instmem_dtor() local 197 nvkm_memory_del(&imem->base.ramfc); nv04_instmem_dtor() 198 nvkm_memory_del(&imem->base.ramro); nv04_instmem_dtor() 199 nvkm_ramht_del(&imem->base.ramht); nv04_instmem_dtor() 200 nvkm_memory_del(&imem->base.vbios); nv04_instmem_dtor() 201 nvkm_mm_fini(&imem->heap); nv04_instmem_dtor() 202 return imem; nv04_instmem_dtor() 220 struct nv04_instmem *imem; nv04_instmem_new() local 222 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) nv04_instmem_new() 224 nvkm_instmem_ctor(&nv04_instmem, device, index, &imem->base); nv04_instmem_new() 225 *pimem = &imem->base; nv04_instmem_new()
|
H A D | nv50.c | 46 struct nv50_instmem *imem; member in struct:nv50_instobj 74 struct nvkm_subdev *subdev = &iobj->imem->base.subdev; nv50_instobj_boot() 101 struct nv50_instmem *imem = nv50_instobj(memory)->imem; nv50_instobj_release() local 102 spin_unlock_irqrestore(&imem->lock, imem->lock_flags); nv50_instobj_release() 109 struct nv50_instmem *imem = iobj->imem; nv50_instobj_acquire() local 110 struct nvkm_bar *bar = imem->base.subdev.device->bar; nv50_instobj_acquire() 119 spin_lock_irqsave(&imem->lock, flags); nv50_instobj_acquire() 120 imem->lock_flags = flags; nv50_instobj_acquire() 128 struct nv50_instmem *imem = iobj->imem; nv50_instobj_rd32() local 129 struct nvkm_device *device = imem->base.subdev.device; nv50_instobj_rd32() 134 if (unlikely(imem->addr != base)) { nv50_instobj_rd32() 136 imem->addr = base; nv50_instobj_rd32() 146 struct nv50_instmem *imem = iobj->imem; nv50_instobj_wr32() local 147 struct nvkm_device *device = imem->base.subdev.device; nv50_instobj_wr32() 151 if (unlikely(imem->addr != base)) { nv50_instobj_wr32() 153 imem->addr = base; nv50_instobj_wr32() 169 struct nvkm_ram *ram = iobj->imem->base.subdev.device->fb->ram; nv50_instobj_dtor() 196 struct nv50_instmem *imem = nv50_instmem(base); nv50_instobj_new() local 198 struct nvkm_ram *ram = imem->base.subdev.device->fb->ram; nv50_instobj_new() 206 iobj->imem = imem; nv50_instobj_new() 241 struct nv50_instmem *imem; nv50_instmem_new() local 243 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) nv50_instmem_new() 245 nvkm_instmem_ctor(&nv50_instmem, device, index, &imem->base); nv50_instmem_new() 246 spin_lock_init(&imem->lock); nv50_instmem_new() 247 *pimem = &imem->base; nv50_instmem_new()
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | dmanv40.c | 62 struct nvkm_instmem *imem = device->imem; nv40_fifo_dma_engine_fini() local 76 nvkm_kmap(imem->ramfc); nv40_fifo_dma_engine_fini() 77 nvkm_wo32(imem->ramfc, chan->ramfc + ctx, 0x00000000); nv40_fifo_dma_engine_fini() 78 nvkm_done(imem->ramfc); nv40_fifo_dma_engine_fini() 92 struct nvkm_instmem *imem = device->imem; nv40_fifo_dma_engine_init() local 107 nvkm_kmap(imem->ramfc); nv40_fifo_dma_engine_init() 108 nvkm_wo32(imem->ramfc, chan->ramfc + ctx, inst); nv40_fifo_dma_engine_init() 109 nvkm_done(imem->ramfc); nv40_fifo_dma_engine_init() 144 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; nv40_fifo_dma_object_ctor() local 160 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, nv40_fifo_dma_object_ctor() 190 struct nvkm_instmem *imem = device->imem; nv40_fifo_dma_new() local 221 nvkm_kmap(imem->ramfc); nv40_fifo_dma_new() 222 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); nv40_fifo_dma_new() 223 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); nv40_fifo_dma_new() 224 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); nv40_fifo_dma_new() 225 nvkm_wo32(imem->ramfc, chan->ramfc + 0x18, 0x30000000 | nv40_fifo_dma_new() 232 nvkm_wo32(imem->ramfc, chan->ramfc + 0x3c, 0x0001ffff); nv40_fifo_dma_new() 233 nvkm_done(imem->ramfc); nv40_fifo_dma_new()
|
H A D | dmanv04.c | 38 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; nv04_fifo_dma_object_dtor() local 39 nvkm_ramht_remove(imem->ramht, cookie); nv04_fifo_dma_object_dtor() 47 struct nvkm_instmem *imem = chan->fifo->base.engine.subdev.device->imem; nv04_fifo_dma_object_ctor() local 63 hash = nvkm_ramht_insert(imem->ramht, object, chan->base.chid, 4, nv04_fifo_dma_object_ctor() 75 struct nvkm_memory *fctx = device->imem->ramfc; nv04_fifo_dma_fini() 138 struct nvkm_instmem *imem = fifo->base.engine.subdev.device->imem; nv04_fifo_dma_dtor() local 141 nvkm_kmap(imem->ramfc); nv04_fifo_dma_dtor() 143 nvkm_wo32(imem->ramfc, chan->ramfc + c->ctxp, 0x00000000); nv04_fifo_dma_dtor() 145 nvkm_done(imem->ramfc); nv04_fifo_dma_dtor() 169 struct nvkm_instmem *imem = device->imem; nv04_fifo_dma_new() local 199 nvkm_kmap(imem->ramfc); nv04_fifo_dma_new() 200 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); nv04_fifo_dma_new() 201 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); nv04_fifo_dma_new() 202 nvkm_wo32(imem->ramfc, chan->ramfc + 0x08, chan->base.push->addr >> 4); nv04_fifo_dma_new() 203 nvkm_wo32(imem->ramfc, chan->ramfc + 0x10, nv04_fifo_dma_new() 210 nvkm_done(imem->ramfc); nv04_fifo_dma_new()
|
H A D | dmanv10.c | 45 struct nvkm_instmem *imem = device->imem; nv10_fifo_dma_new() local 75 nvkm_kmap(imem->ramfc); nv10_fifo_dma_new() 76 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); nv10_fifo_dma_new() 77 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); nv10_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); nv10_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, nv10_fifo_dma_new() 86 nvkm_done(imem->ramfc); nv10_fifo_dma_new()
|
H A D | dmanv17.c | 45 struct nvkm_instmem *imem = device->imem; nv17_fifo_dma_new() local 76 nvkm_kmap(imem->ramfc); nv17_fifo_dma_new() 77 nvkm_wo32(imem->ramfc, chan->ramfc + 0x00, args->v0.offset); nv17_fifo_dma_new() 78 nvkm_wo32(imem->ramfc, chan->ramfc + 0x04, args->v0.offset); nv17_fifo_dma_new() 79 nvkm_wo32(imem->ramfc, chan->ramfc + 0x0c, chan->base.push->addr >> 4); nv17_fifo_dma_new() 80 nvkm_wo32(imem->ramfc, chan->ramfc + 0x14, nv17_fifo_dma_new() 87 nvkm_done(imem->ramfc); nv17_fifo_dma_new()
|
H A D | nv17.c | 55 struct nvkm_instmem *imem = device->imem; nv17_fifo_init() local 56 struct nvkm_ramht *ramht = imem->ramht; nv17_fifo_init() 57 struct nvkm_memory *ramro = imem->ramro; nv17_fifo_init() 58 struct nvkm_memory *ramfc = imem->ramfc; nv17_fifo_init()
|
H A D | nv40.c | 65 struct nvkm_instmem *imem = device->imem; nv40_fifo_init() local 66 struct nvkm_ramht *ramht = imem->ramht; nv40_fifo_init() 67 struct nvkm_memory *ramro = imem->ramro; nv40_fifo_init() 68 struct nvkm_memory *ramfc = imem->ramfc; nv40_fifo_init()
|
H A D | nv04.c | 301 struct nvkm_instmem *imem = device->imem; nv04_fifo_init() local 302 struct nvkm_ramht *ramht = imem->ramht; nv04_fifo_init() 303 struct nvkm_memory *ramro = imem->ramro; nv04_fifo_init() 304 struct nvkm_memory *ramfc = imem->ramfc; nv04_fifo_init()
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/ |
H A D | nv40.c | 33 struct nvkm_instmem *imem = device->imem; nv40_mpeg_mthd_dma() local 35 u32 dma0 = nvkm_instmem_rd32(imem, inst + 0); nv40_mpeg_mthd_dma() 36 u32 dma1 = nvkm_instmem_rd32(imem, inst + 4); nv40_mpeg_mthd_dma() 37 u32 dma2 = nvkm_instmem_rd32(imem, inst + 8); nv40_mpeg_mthd_dma()
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/core/ |
H A D | memory.c | 51 struct nvkm_instmem *imem = device->imem; nvkm_memory_new() local 55 if (unlikely(target != NVKM_MEM_TARGET_INST || !imem)) nvkm_memory_new() 58 ret = nvkm_instobj_new(imem, size, align, zero, &memory); nvkm_memory_new()
|
H A D | subdev.c | 42 [NVKM_SUBDEV_INSTMEM] = "imem",
|
/linux-4.4.14/arch/blackfin/mach-common/ |
H A D | cache-c.c | 29 u32 imem = bfin_read_IMEM_CONTROL(); blackfin_invalidate_entire_icache() local 30 bfin_write_IMEM_CONTROL(imem & ~0x4); blackfin_invalidate_entire_icache() 32 bfin_write_IMEM_CONTROL(imem); blackfin_invalidate_entire_icache()
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
H A D | base.c | 85 .imem = nv04_instmem_new, 106 .imem = nv04_instmem_new, 128 .imem = nv04_instmem_new, 148 .imem = nv04_instmem_new, 170 .imem = nv04_instmem_new, 192 .imem = nv04_instmem_new, 214 .imem = nv04_instmem_new, 236 .imem = nv04_instmem_new, 258 .imem = nv04_instmem_new, 280 .imem = nv04_instmem_new, 302 .imem = nv04_instmem_new, 324 .imem = nv04_instmem_new, 346 .imem = nv04_instmem_new, 368 .imem = nv04_instmem_new, 390 .imem = nv04_instmem_new, 413 .imem = nv04_instmem_new, 436 .imem = nv04_instmem_new, 458 .imem = nv04_instmem_new, 481 .imem = nv40_instmem_new, 507 .imem = nv40_instmem_new, 533 .imem = nv40_instmem_new, 559 .imem = nv40_instmem_new, 585 .imem = nv40_instmem_new, 611 .imem = nv40_instmem_new, 637 .imem = nv40_instmem_new, 663 .imem = nv40_instmem_new, 689 .imem = nv40_instmem_new, 715 .imem = nv40_instmem_new, 741 .imem = nv40_instmem_new, 767 .imem = nv40_instmem_new, 793 .imem = nv40_instmem_new, 821 .imem = nv50_instmem_new, 848 .imem = nv40_instmem_new, 874 .imem = nv40_instmem_new, 900 .imem = nv40_instmem_new, 928 .imem = nv50_instmem_new, 960 .imem = nv50_instmem_new, 992 .imem = nv50_instmem_new, 1024 .imem = nv50_instmem_new, 1056 .imem = nv50_instmem_new, 1088 .imem = nv50_instmem_new, 1120 .imem = nv50_instmem_new, 1152 .imem = nv50_instmem_new, 1186 .imem = nv50_instmem_new, 1219 .imem = nv50_instmem_new, 1252 .imem = nv50_instmem_new, 1284 .imem = nv50_instmem_new, 1316 .imem = nv50_instmem_new, 1350 .imem = nv50_instmem_new, 1386 .imem = nv50_instmem_new, 1421 .imem = nv50_instmem_new, 1456 .imem = nv50_instmem_new, 1492 .imem = nv50_instmem_new, 1528 .imem = nv50_instmem_new, 1564 .imem = nv50_instmem_new, 1599 .imem = nv50_instmem_new, 1632 .imem = nv50_instmem_new, 1667 .imem = nv50_instmem_new, 1704 .imem = nv50_instmem_new, 1741 .imem = nv50_instmem_new, 1774 .imem = gk20a_instmem_new, 1802 .imem = nv50_instmem_new, 1838 .imem = nv50_instmem_new, 1874 .imem = nv50_instmem_new, 1910 .imem = nv50_instmem_new, 1946 .imem = nv50_instmem_new, 1977 .imem = nv50_instmem_new, 2008 .imem = nv50_instmem_new, 2035 .imem = gk20a_instmem_new, 2085 _(INSTMEM, device->imem , &device->imem->subdev); nvkm_device_subdev() 2530 _(NVKM_SUBDEV_INSTMEM, imem);
|
H A D | user.c | 47 struct nvkm_instmem *imem = device->imem; nvkm_udevice_info() local 115 if (imem && args->v0.ram_size > 0) nvkm_udevice_info() 116 args->v0.ram_user = args->v0.ram_user - imem->reserved; nvkm_udevice_info()
|
/linux-4.4.14/arch/ia64/sn/kernel/ |
H A D | bte_error.c | 43 ii_imem_u_t imem; /* II IMEM Register */ shub1_bte_error_handler() local 109 imem.ii_imem_regval = REMOTE_HUB_L(nasid, IIO_IMEM); shub1_bte_error_handler() 110 imem.ii_imem_fld_s.i_b0_esd = imem.ii_imem_fld_s.i_b1_esd = 1; shub1_bte_error_handler() 111 REMOTE_HUB_S(nasid, IIO_IMEM, imem.ii_imem_regval); shub1_bte_error_handler()
|
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/core/ |
H A D | device.h | 112 struct nvkm_instmem *imem; member in struct:nvkm_device 177 int (*imem )(struct nvkm_device *, int idx, struct nvkm_instmem **); member in struct:nvkm_device_chip
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/dma/ |
H A D | user.c | 68 struct nvkm_instmem *instmem = device->imem; nvkm_dmaobj_ctor()
|
/linux-4.4.14/drivers/clk/samsung/ |
H A D | clk-s5pv210.c | 672 GATE(CLK_IMEM, "imem", "dout_hclkm", CLK_GATE_IP0, 5, 0, 0), 734 GATE(CLK_IMEM, "imem", "dout_hclkd", CLK_GATE_IP0, 5, 0, 0),
|