Lines Matching refs:device
33 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units()
44 int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, in nv50_gr_object_bind()
71 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv50_gr_chan_bind()
75 nv50_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv50_gr_chan_bind()
241 struct nvkm_device *device = subdev->device; in nv50_gr_prop_trap() local
242 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap()
243 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap()
244 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap()
245 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap()
246 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap()
247 u32 e20 = nvkm_rd32(device, ustatus_addr + 0x18); in nv50_gr_prop_trap()
248 u32 e24 = nvkm_rd32(device, ustatus_addr + 0x1c); in nv50_gr_prop_trap()
283 struct nvkm_device *device = subdev->device; in nv50_gr_mp_trap() local
284 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_mp_trap()
292 if (device->chipset < 0xa0) in nv50_gr_mp_trap()
296 mp10 = nvkm_rd32(device, addr + 0x10); in nv50_gr_mp_trap()
297 status = nvkm_rd32(device, addr + 0x14); in nv50_gr_mp_trap()
301 nvkm_rd32(device, addr + 0x20); in nv50_gr_mp_trap()
302 pc = nvkm_rd32(device, addr + 0x24); in nv50_gr_mp_trap()
303 oplow = nvkm_rd32(device, addr + 0x70); in nv50_gr_mp_trap()
304 ophigh = nvkm_rd32(device, addr + 0x74); in nv50_gr_mp_trap()
313 nvkm_wr32(device, addr + 0x10, mp10); in nv50_gr_mp_trap()
314 nvkm_wr32(device, addr + 0x14, 0); in nv50_gr_mp_trap()
327 struct nvkm_device *device = subdev->device; in nv50_gr_tp_trap() local
328 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_tp_trap()
336 if (device->chipset < 0xa0) in nv50_gr_tp_trap()
340 ustatus = nvkm_rd32(device, ustatus_addr) & 0x7fffffff; in nv50_gr_tp_trap()
350 nvkm_rd32(device, r)); in nv50_gr_tp_trap()
385 nvkm_wr32(device, ustatus_addr, 0xc0000000); in nv50_gr_tp_trap()
397 struct nvkm_device *device = subdev->device; in nv50_gr_trap_handler() local
398 u32 status = nvkm_rd32(device, 0x400108); in nv50_gr_trap_handler()
411 ustatus = nvkm_rd32(device, 0x400804) & 0x7fffffff; in nv50_gr_trap_handler()
416 nvkm_wr32(device, 0x400500, 0x00000000); in nv50_gr_trap_handler()
420 u32 addr = nvkm_rd32(device, 0x400808); in nv50_gr_trap_handler()
423 u32 datal = nvkm_rd32(device, 0x40080c); in nv50_gr_trap_handler()
424 u32 datah = nvkm_rd32(device, 0x400810); in nv50_gr_trap_handler()
425 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_trap_handler()
426 u32 r848 = nvkm_rd32(device, 0x400848); in nv50_gr_trap_handler()
441 nvkm_wr32(device, 0x400808, 0); in nv50_gr_trap_handler()
442 nvkm_wr32(device, 0x4008e8, nvkm_rd32(device, 0x4008e8) & 3); in nv50_gr_trap_handler()
443 nvkm_wr32(device, 0x400848, 0); in nv50_gr_trap_handler()
448 u32 addr = nvkm_rd32(device, 0x40084c); in nv50_gr_trap_handler()
451 u32 data = nvkm_rd32(device, 0x40085c); in nv50_gr_trap_handler()
452 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_trap_handler()
466 nvkm_wr32(device, 0x40084c, 0); in nv50_gr_trap_handler()
475 nvkm_wr32(device, 0x400804, 0xc0000000); in nv50_gr_trap_handler()
476 nvkm_wr32(device, 0x400108, 0x001); in nv50_gr_trap_handler()
484 u32 ustatus = nvkm_rd32(device, 0x406800) & 0x7fffffff; in nv50_gr_trap_handler()
491 nvkm_rd32(device, 0x406804), in nv50_gr_trap_handler()
492 nvkm_rd32(device, 0x406808), in nv50_gr_trap_handler()
493 nvkm_rd32(device, 0x40680c), in nv50_gr_trap_handler()
494 nvkm_rd32(device, 0x406810)); in nv50_gr_trap_handler()
498 nvkm_wr32(device, 0x400040, 2); in nv50_gr_trap_handler()
499 nvkm_wr32(device, 0x400040, 0); in nv50_gr_trap_handler()
500 nvkm_wr32(device, 0x406800, 0xc0000000); in nv50_gr_trap_handler()
501 nvkm_wr32(device, 0x400108, 0x002); in nv50_gr_trap_handler()
507 u32 ustatus = nvkm_rd32(device, 0x400c04) & 0x7fffffff; in nv50_gr_trap_handler()
514 nvkm_rd32(device, 0x400c00), in nv50_gr_trap_handler()
515 nvkm_rd32(device, 0x400c08), in nv50_gr_trap_handler()
516 nvkm_rd32(device, 0x400c0c), in nv50_gr_trap_handler()
517 nvkm_rd32(device, 0x400c10)); in nv50_gr_trap_handler()
520 nvkm_wr32(device, 0x400c04, 0xc0000000); in nv50_gr_trap_handler()
521 nvkm_wr32(device, 0x400108, 0x004); in nv50_gr_trap_handler()
527 ustatus = nvkm_rd32(device, 0x401800) & 0x7fffffff; in nv50_gr_trap_handler()
534 nvkm_rd32(device, 0x401804), in nv50_gr_trap_handler()
535 nvkm_rd32(device, 0x401808), in nv50_gr_trap_handler()
536 nvkm_rd32(device, 0x40180c), in nv50_gr_trap_handler()
537 nvkm_rd32(device, 0x401810)); in nv50_gr_trap_handler()
541 nvkm_wr32(device, 0x400040, 0x80); in nv50_gr_trap_handler()
542 nvkm_wr32(device, 0x400040, 0); in nv50_gr_trap_handler()
543 nvkm_wr32(device, 0x401800, 0xc0000000); in nv50_gr_trap_handler()
544 nvkm_wr32(device, 0x400108, 0x008); in nv50_gr_trap_handler()
550 ustatus = nvkm_rd32(device, 0x405018) & 0x7fffffff; in nv50_gr_trap_handler()
558 nvkm_rd32(device, 0x405000), in nv50_gr_trap_handler()
559 nvkm_rd32(device, 0x405004), in nv50_gr_trap_handler()
560 nvkm_rd32(device, 0x405008), in nv50_gr_trap_handler()
561 nvkm_rd32(device, 0x40500c), in nv50_gr_trap_handler()
562 nvkm_rd32(device, 0x405010), in nv50_gr_trap_handler()
563 nvkm_rd32(device, 0x405014), in nv50_gr_trap_handler()
564 nvkm_rd32(device, 0x40501c)); in nv50_gr_trap_handler()
567 nvkm_wr32(device, 0x405018, 0xc0000000); in nv50_gr_trap_handler()
568 nvkm_wr32(device, 0x400108, 0x010); in nv50_gr_trap_handler()
576 ustatus = nvkm_rd32(device, 0x402000) & 0x7fffffff; in nv50_gr_trap_handler()
579 nvkm_wr32(device, 0x402000, 0xc0000000); in nv50_gr_trap_handler()
587 nvkm_wr32(device, 0x400108, 0x040); in nv50_gr_trap_handler()
595 nvkm_wr32(device, 0x400108, 0x080); in nv50_gr_trap_handler()
604 nvkm_wr32(device, 0x400108, 0x100); in nv50_gr_trap_handler()
611 nvkm_wr32(device, 0x400108, status); in nv50_gr_trap_handler()
622 struct nvkm_device *device = subdev->device; in nv50_gr_intr() local
624 u32 stat = nvkm_rd32(device, 0x400100); in nv50_gr_intr()
625 u32 inst = nvkm_rd32(device, 0x40032c) & 0x0fffffff; in nv50_gr_intr()
626 u32 addr = nvkm_rd32(device, 0x400704); in nv50_gr_intr()
629 u32 data = nvkm_rd32(device, 0x400708); in nv50_gr_intr()
630 u32 class = nvkm_rd32(device, 0x400814); in nv50_gr_intr()
638 chan = nvkm_fifo_chan_inst(device->fifo, (u64)inst << 12, &flags); in nv50_gr_intr()
645 u32 ecode = nvkm_rd32(device, 0x400110); in nv50_gr_intr()
658 nvkm_wr32(device, 0x400100, stat); in nv50_gr_intr()
659 nvkm_wr32(device, 0x400500, 0x00010001); in nv50_gr_intr()
670 if (nvkm_rd32(device, 0x400824) & (1 << 31)) in nv50_gr_intr()
671 nvkm_wr32(device, 0x400824, nvkm_rd32(device, 0x400824) & ~(1 << 31)); in nv50_gr_intr()
673 nvkm_fifo_chan_put(device->fifo, flags, &chan); in nv50_gr_intr()
680 struct nvkm_device *device = gr->base.engine.subdev.device; in nv50_gr_init() local
684 nvkm_wr32(device, 0x40008c, 0x00000004); in nv50_gr_init()
687 nvkm_wr32(device, 0x400804, 0xc0000000); in nv50_gr_init()
688 nvkm_wr32(device, 0x406800, 0xc0000000); in nv50_gr_init()
689 nvkm_wr32(device, 0x400c04, 0xc0000000); in nv50_gr_init()
690 nvkm_wr32(device, 0x401800, 0xc0000000); in nv50_gr_init()
691 nvkm_wr32(device, 0x405018, 0xc0000000); in nv50_gr_init()
692 nvkm_wr32(device, 0x402000, 0xc0000000); in nv50_gr_init()
694 units = nvkm_rd32(device, 0x001540); in nv50_gr_init()
699 if (device->chipset < 0xa0) { in nv50_gr_init()
700 nvkm_wr32(device, 0x408900 + (i << 12), 0xc0000000); in nv50_gr_init()
701 nvkm_wr32(device, 0x408e08 + (i << 12), 0xc0000000); in nv50_gr_init()
702 nvkm_wr32(device, 0x408314 + (i << 12), 0xc0000000); in nv50_gr_init()
704 nvkm_wr32(device, 0x408600 + (i << 11), 0xc0000000); in nv50_gr_init()
705 nvkm_wr32(device, 0x408708 + (i << 11), 0xc0000000); in nv50_gr_init()
706 nvkm_wr32(device, 0x40831c + (i << 11), 0xc0000000); in nv50_gr_init()
710 nvkm_wr32(device, 0x400108, 0xffffffff); in nv50_gr_init()
711 nvkm_wr32(device, 0x400138, 0xffffffff); in nv50_gr_init()
712 nvkm_wr32(device, 0x400100, 0xffffffff); in nv50_gr_init()
713 nvkm_wr32(device, 0x40013c, 0xffffffff); in nv50_gr_init()
714 nvkm_wr32(device, 0x400500, 0x00010001); in nv50_gr_init()
717 ret = nv50_grctx_init(device, &gr->size); in nv50_gr_init()
721 nvkm_wr32(device, 0x400824, 0x00000000); in nv50_gr_init()
722 nvkm_wr32(device, 0x400828, 0x00000000); in nv50_gr_init()
723 nvkm_wr32(device, 0x40082c, 0x00000000); in nv50_gr_init()
724 nvkm_wr32(device, 0x400830, 0x00000000); in nv50_gr_init()
725 nvkm_wr32(device, 0x40032c, 0x00000000); in nv50_gr_init()
726 nvkm_wr32(device, 0x400330, 0x00000000); in nv50_gr_init()
729 switch (device->chipset & 0xf0) { in nv50_gr_init()
733 nvkm_wr32(device, 0x402ca8, 0x00000800); in nv50_gr_init()
737 if (device->chipset == 0xa0 || in nv50_gr_init()
738 device->chipset == 0xaa || in nv50_gr_init()
739 device->chipset == 0xac) { in nv50_gr_init()
740 nvkm_wr32(device, 0x402ca8, 0x00000802); in nv50_gr_init()
742 nvkm_wr32(device, 0x402cc0, 0x00000000); in nv50_gr_init()
743 nvkm_wr32(device, 0x402ca8, 0x00000002); in nv50_gr_init()
751 nvkm_wr32(device, 0x402c20 + (i * 0x10), 0x00000000); in nv50_gr_init()
752 nvkm_wr32(device, 0x402c24 + (i * 0x10), 0x00000000); in nv50_gr_init()
753 nvkm_wr32(device, 0x402c28 + (i * 0x10), 0x00000000); in nv50_gr_init()
754 nvkm_wr32(device, 0x402c2c + (i * 0x10), 0x00000000); in nv50_gr_init()
761 nv50_gr_new_(const struct nvkm_gr_func *func, struct nvkm_device *device, in nv50_gr_new_() argument
771 return nvkm_gr_ctor(func, device, index, 0x00201000, true, &gr->base); in nv50_gr_new_()
791 nv50_gr_new(struct nvkm_device *device, int index, struct nvkm_gr **pgr) in nv50_gr_new() argument
793 return nv50_gr_new_(&nv50_gr, device, index, pgr); in nv50_gr_new()