Lines Matching refs:device
33 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_chan_fini() local
37 nvkm_mask(device, 0x400720, 0x00000001, 0x00000000); in nv20_gr_chan_fini()
38 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini()
39 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini()
41 nvkm_wr32(device, 0x400784, inst >> 4); in nv20_gr_chan_fini()
42 nvkm_wr32(device, 0x400788, 0x00000002); in nv20_gr_chan_fini()
43 nvkm_msec(device, 2000, in nv20_gr_chan_fini()
44 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini()
47 nvkm_wr32(device, 0x400144, 0x10000000); in nv20_gr_chan_fini()
48 nvkm_mask(device, 0x400148, 0xff000000, 0x1f000000); in nv20_gr_chan_fini()
50 nvkm_mask(device, 0x400720, 0x00000001, 0x00000001); in nv20_gr_chan_fini()
88 ret = nvkm_memory_new(gr->base.engine.subdev.device, in nv20_gr_chan_new()
151 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_tile() local
152 struct nvkm_fifo *fifo = device->fifo; in nv20_gr_tile()
158 nvkm_wr32(device, NV20_PGRAPH_TLIMIT(i), tile->limit); in nv20_gr_tile()
159 nvkm_wr32(device, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv20_gr_tile()
160 nvkm_wr32(device, NV20_PGRAPH_TILE(i), tile->addr); in nv20_gr_tile()
162 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA0030 + 4 * i); in nv20_gr_tile()
163 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, tile->limit); in nv20_gr_tile()
164 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA0050 + 4 * i); in nv20_gr_tile()
165 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, tile->pitch); in nv20_gr_tile()
166 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA0010 + 4 * i); in nv20_gr_tile()
167 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, tile->addr); in nv20_gr_tile()
169 if (device->chipset != 0x34) { in nv20_gr_tile()
170 nvkm_wr32(device, NV20_PGRAPH_ZCOMP(i), tile->zcomp); in nv20_gr_tile()
171 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00ea0090 + 4 * i); in nv20_gr_tile()
172 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, tile->zcomp); in nv20_gr_tile()
183 struct nvkm_device *device = subdev->device; in nv20_gr_intr() local
185 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv20_gr_intr()
186 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv20_gr_intr()
187 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv20_gr_intr()
188 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv20_gr_intr()
192 u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA); in nv20_gr_intr()
193 u32 class = nvkm_rd32(device, 0x400160 + subc * 4) & 0xfff; in nv20_gr_intr()
198 chan = nvkm_fifo_chan_chid(device->fifo, chid, &flags); in nv20_gr_intr()
200 nvkm_wr32(device, NV03_PGRAPH_INTR, stat); in nv20_gr_intr()
201 nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001); in nv20_gr_intr()
215 nvkm_fifo_chan_put(device->fifo, flags, &chan); in nv20_gr_intr()
222 return nvkm_memory_new(gr->base.engine.subdev.device, in nv20_gr_oneinit()
231 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_init() local
235 nvkm_wr32(device, NV20_PGRAPH_CHANNEL_CTX_TABLE, in nv20_gr_init()
238 if (device->chipset == 0x20) { in nv20_gr_init()
239 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x003d0000); in nv20_gr_init()
241 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, 0x00000000); in nv20_gr_init()
242 nvkm_msec(device, 2000, in nv20_gr_init()
243 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_init()
247 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x02c80000); in nv20_gr_init()
249 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, 0x00000000); in nv20_gr_init()
250 nvkm_msec(device, 2000, in nv20_gr_init()
251 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_init()
256 nvkm_wr32(device, NV03_PGRAPH_INTR , 0xFFFFFFFF); in nv20_gr_init()
257 nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF); in nv20_gr_init()
259 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF); in nv20_gr_init()
260 nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x00000000); in nv20_gr_init()
261 nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x00118700); in nv20_gr_init()
262 nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xF3CE0475); /* 0x4 = auto ctx switch */ in nv20_gr_init()
263 nvkm_wr32(device, NV10_PGRAPH_DEBUG_4, 0x00000000); in nv20_gr_init()
264 nvkm_wr32(device, 0x40009C , 0x00000040); in nv20_gr_init()
266 if (device->chipset >= 0x25) { in nv20_gr_init()
267 nvkm_wr32(device, 0x400890, 0x00a8cfff); in nv20_gr_init()
268 nvkm_wr32(device, 0x400610, 0x304B1FB6); in nv20_gr_init()
269 nvkm_wr32(device, 0x400B80, 0x1cbd3883); in nv20_gr_init()
270 nvkm_wr32(device, 0x400B84, 0x44000000); in nv20_gr_init()
271 nvkm_wr32(device, 0x400098, 0x40000080); in nv20_gr_init()
272 nvkm_wr32(device, 0x400B88, 0x000000ff); in nv20_gr_init()
275 nvkm_wr32(device, 0x400880, 0x0008c7df); in nv20_gr_init()
276 nvkm_wr32(device, 0x400094, 0x00000005); in nv20_gr_init()
277 nvkm_wr32(device, 0x400B80, 0x45eae20e); in nv20_gr_init()
278 nvkm_wr32(device, 0x400B84, 0x24000000); in nv20_gr_init()
279 nvkm_wr32(device, 0x400098, 0x00000040); in nv20_gr_init()
280 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00E00038); in nv20_gr_init()
281 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA , 0x00000030); in nv20_gr_init()
282 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00E10038); in nv20_gr_init()
283 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA , 0x00000030); in nv20_gr_init()
286 nvkm_wr32(device, 0x4009a0, nvkm_rd32(device, 0x100324)); in nv20_gr_init()
287 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA000C); in nv20_gr_init()
288 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA, nvkm_rd32(device, 0x100324)); in nv20_gr_init()
290 nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10000100); in nv20_gr_init()
291 nvkm_wr32(device, NV10_PGRAPH_STATE , 0xFFFFFFFF); in nv20_gr_init()
293 tmp = nvkm_rd32(device, NV10_PGRAPH_SURFACE) & 0x0007ff00; in nv20_gr_init()
294 nvkm_wr32(device, NV10_PGRAPH_SURFACE, tmp); in nv20_gr_init()
295 tmp = nvkm_rd32(device, NV10_PGRAPH_SURFACE) | 0x00020100; in nv20_gr_init()
296 nvkm_wr32(device, NV10_PGRAPH_SURFACE, tmp); in nv20_gr_init()
299 vramsz = device->func->resource_size(device, 1) - 1; in nv20_gr_init()
300 nvkm_wr32(device, 0x4009A4, nvkm_rd32(device, 0x100200)); in nv20_gr_init()
301 nvkm_wr32(device, 0x4009A8, nvkm_rd32(device, 0x100204)); in nv20_gr_init()
302 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA0000); in nv20_gr_init()
303 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA , nvkm_rd32(device, 0x100200)); in nv20_gr_init()
304 nvkm_wr32(device, NV10_PGRAPH_RDI_INDEX, 0x00EA0004); in nv20_gr_init()
305 nvkm_wr32(device, NV10_PGRAPH_RDI_DATA , nvkm_rd32(device, 0x100204)); in nv20_gr_init()
306 nvkm_wr32(device, 0x400820, 0); in nv20_gr_init()
307 nvkm_wr32(device, 0x400824, 0); in nv20_gr_init()
308 nvkm_wr32(device, 0x400864, vramsz - 1); in nv20_gr_init()
309 nvkm_wr32(device, 0x400868, vramsz - 1); in nv20_gr_init()
312 nvkm_wr32(device, 0x400B20, 0x00000000); in nv20_gr_init()
313 nvkm_wr32(device, 0x400B04, 0xFFFFFFFF); in nv20_gr_init()
315 nvkm_wr32(device, NV03_PGRAPH_ABS_UCLIP_XMIN, 0); in nv20_gr_init()
316 nvkm_wr32(device, NV03_PGRAPH_ABS_UCLIP_YMIN, 0); in nv20_gr_init()
317 nvkm_wr32(device, NV03_PGRAPH_ABS_UCLIP_XMAX, 0x7fff); in nv20_gr_init()
318 nvkm_wr32(device, NV03_PGRAPH_ABS_UCLIP_YMAX, 0x7fff); in nv20_gr_init()
331 nv20_gr_new_(const struct nvkm_gr_func *func, struct nvkm_device *device, in nv20_gr_new_() argument
340 return nvkm_gr_ctor(func, device, index, 0x00001000, true, &gr->base); in nv20_gr_new_()
372 nv20_gr_new(struct nvkm_device *device, int index, struct nvkm_gr **pgr) in nv20_gr_new() argument
374 return nv20_gr_new_(&nv20_gr, device, index, pgr); in nv20_gr_new()