Home
last modified time | relevance | path

Searched refs:nvkm_rd32 (Results 1 – 168 of 168) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dnv50.c33 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units()
242 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap()
243 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap()
244 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap()
245 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap()
246 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap()
247 u32 e20 = nvkm_rd32(device, ustatus_addr + 0x18); in nv50_gr_prop_trap()
248 u32 e24 = nvkm_rd32(device, ustatus_addr + 0x1c); in nv50_gr_prop_trap()
284 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_mp_trap()
296 mp10 = nvkm_rd32(device, addr + 0x10); in nv50_gr_mp_trap()
[all …]
Dnv40.c36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units()
103 if (nvkm_rd32(device, 0x40032c) == inst) { in nv40_gr_chan_fini()
110 if (!(nvkm_rd32(device, 0x400300) & 0x00000001)) in nv40_gr_chan_fini()
113 u32 insn = nvkm_rd32(device, 0x400308); in nv40_gr_chan_fini()
122 if (nvkm_rd32(device, 0x400330) == inst) in nv40_gr_chan_fini()
238 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv40_gr_intr()
239 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv40_gr_intr()
240 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv40_gr_intr()
241 u32 inst = nvkm_rd32(device, 0x40032c) & 0x000fffff; in nv40_gr_intr()
242 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv40_gr_intr()
[all …]
Dg84.c132 for (tmp = nvkm_rd32(device, 0x400380); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush()
137 for (tmp = nvkm_rd32(device, 0x400384); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush()
142 for (tmp = nvkm_rd32(device, 0x400388); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush()
152 tmp = nvkm_rd32(device, 0x400700); in g84_gr_tlb_flush()
157 nvkm_rd32(device, 0x400380)); in g84_gr_tlb_flush()
159 nvkm_rd32(device, 0x400384)); in g84_gr_tlb_flush()
161 nvkm_rd32(device, 0x400388)); in g84_gr_tlb_flush()
167 if (!(nvkm_rd32(device, 0x100c80) & 0x00000001)) in g84_gr_tlb_flush()
Dgf100.c720 nvkm_rd32(device, 0x400700); in gf100_gr_wait_idle()
722 gr_enabled = nvkm_rd32(device, 0x200) & 0x1000; in gf100_gr_wait_idle()
723 ctxsw_active = nvkm_rd32(device, 0x2640) & 0x8000; in gf100_gr_wait_idle()
724 gr_busy = nvkm_rd32(device, 0x40060c) & 0x1; in gf100_gr_wait_idle()
781 if (!(nvkm_rd32(device, 0x400700) & 0x00000004)) in gf100_gr_icmd()
862 trap[0] = nvkm_rd32(device, GPC_UNIT(gpc, 0x0420)) & 0x3fffffff; in gf100_gr_trap_gpc_rop()
863 trap[1] = nvkm_rd32(device, GPC_UNIT(gpc, 0x0434)); in gf100_gr_trap_gpc_rop()
864 trap[2] = nvkm_rd32(device, GPC_UNIT(gpc, 0x0438)); in gf100_gr_trap_gpc_rop()
865 trap[3] = nvkm_rd32(device, GPC_UNIT(gpc, 0x043c)); in gf100_gr_trap_gpc_rop()
901 u32 werr = nvkm_rd32(device, TPC_UNIT(gpc, tpc, 0x648)); in gf100_gr_trap_mp()
[all …]
Dnv20.c38 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini()
39 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini()
44 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini()
185 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv20_gr_intr()
186 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv20_gr_intr()
187 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv20_gr_intr()
188 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv20_gr_intr()
192 u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA); in nv20_gr_intr()
193 u32 class = nvkm_rd32(device, 0x400160 + subc * 4) & 0xfff; in nv20_gr_intr()
243 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_init()
[all …]
Dgm20b.c41 val = nvkm_rd32(device, 0x100c80); in gm20b_gr_init_gpc_mmu()
47 nvkm_wr32(device, 0x4188b0, nvkm_rd32(device, 0x100cc4)); in gm20b_gr_init_gpc_mmu()
48 nvkm_wr32(device, 0x4188b4, nvkm_rd32(device, 0x100cc8)); in gm20b_gr_init_gpc_mmu()
49 nvkm_wr32(device, 0x4188b8, nvkm_rd32(device, 0x100ccc)); in gm20b_gr_init_gpc_mmu()
51 nvkm_wr32(device, 0x4188ac, nvkm_rd32(device, 0x100800)); in gm20b_gr_init_gpc_mmu()
Dnv10.c419 state[__i] = nvkm_rd32(device, NV10_PGRAPH_PIPE_DATA); \
454 xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0); in nv17_gr_mthd_lma_window()
455 xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1); in nv17_gr_mthd_lma_window()
551 if (nvkm_rd32(device, 0x400144) & 0x00010000) { in nv10_gr_channel()
552 int chid = nvkm_rd32(device, 0x400148) >> 24; in nv10_gr_channel()
589 xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0); in nv10_gr_load_pipe()
590 xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1); in nv10_gr_load_pipe()
827 int class = nvkm_rd32(device, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff; in nv10_gr_load_dma_vtxbuf()
839 ctx_user = nvkm_rd32(device, NV10_PGRAPH_CTX_USER); in nv10_gr_load_dma_vtxbuf()
841 ctx_switch[i] = nvkm_rd32(device, NV10_PGRAPH_CTX_SWITCH(i)); in nv10_gr_load_dma_vtxbuf()
[all …]
Dnv04.c447 int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7; in nv04_gr_set_ctx1()
450 tmp = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx1()
465 ctx1 = nvkm_rd32(device, 0x700000 + inst); in nv04_gr_set_ctx_val()
469 tmp = nvkm_rd32(device, 0x70000c + inst); in nv04_gr_set_ctx_val()
510 u8 class = nvkm_rd32(device, 0x700000) & 0x000000ff; in nv04_gr_mthd_set_operation()
561 return nvkm_rd32(device, 0x700000 + (inst << 4)); in nv04_gr_mthd_bind_class()
1018 switch (nvkm_rd32(device, 0x700000 + inst) & 0x000000ff) { in nv04_gr_mthd()
1076 if (nvkm_rd32(device, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) { in nv04_gr_channel()
1077 int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24; in nv04_gr_channel()
1106 chan->nv04[i] = nvkm_rd32(device, nv04_gr_ctx_regs[i]); in nv04_gr_unload_context()
[all …]
Dnv30.c152 nvkm_wr32(device, 0x4009A4, nvkm_rd32(device, 0x100200)); in nv30_gr_init()
153 nvkm_wr32(device, 0x4009A8, nvkm_rd32(device, 0x100204)); in nv30_gr_init()
156 nvkm_wr32(device, 0x400754, nvkm_rd32(device, 0x100200)); in nv30_gr_init()
158 nvkm_wr32(device, 0x400754, nvkm_rd32(device, 0x100204)); in nv30_gr_init()
Dctxgm20b.c50 idle_timeout_save = nvkm_rd32(device, 0x404154); in gm20b_grctx_generate_main()
67 nvkm_wr32(device, 0x408908, nvkm_rd32(device, 0x410108) | 0x80000000); in gm20b_grctx_generate_main()
Dgm204.c248 tmp = nvkm_rd32(device, 0x100c80); /*XXX: mask? */ in gm204_gr_init()
292 nvkm_wr32(device, GPC_BCAST(0x08ac), nvkm_rd32(device, 0x100800)); in gm204_gr_init()
293 nvkm_wr32(device, GPC_BCAST(0x033c), nvkm_rd32(device, 0x100804)); in gm204_gr_init()
Dctxgk20a.c39 idle_timeout_save = nvkm_rd32(device, 0x404154); in gk20a_grctx_generate_main()
Dctxgf100.c1318 if (nvkm_rd32(device, 0x100c80) & 0x00008000) in gf100_grctx_generate()
1335 if (nvkm_rd32(device, 0x409800) & 0x00000010) in gf100_grctx_generate()
1350 if (nvkm_rd32(device, 0x409800) & 0x80000000) in gf100_grctx_generate()
1363 if (!(nvkm_rd32(device, 0x409b00) & 0x80000000)) in gf100_grctx_generate()
Dgk20a.c156 if (!(nvkm_rd32(device, 0x40910c) & 0x00000006)) in gk20a_gr_wait_mem_scrubbing()
164 if (!(nvkm_rd32(device, 0x41a10c) & 0x00000006)) in gk20a_gr_wait_mem_scrubbing()
Dctxgm204.c943 const u32 fbp_count = nvkm_rd32(device, 0x12006c); in gm204_grctx_generate_rop_active_fbps()
Dgk104.c228 nvkm_wr32(device, GPC_BCAST(0x08ac), nvkm_rd32(device, 0x100800)); in gk104_gr_init()
Dctxgk104.c949 const u32 fbp_count = nvkm_rd32(device, 0x120074); in gk104_grctx_generate_rop_active_fbps()
Dgm107.c357 nvkm_wr32(device, GPC_BCAST(0x08ac), nvkm_rd32(device, 0x100800)); in gm107_gr_init()
Dctxnv50.c301 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_construct_mmio()
1194 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_construct_xfer1()
3277 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_construct_xfer2()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/
Dbase.c44 addr = nvkm_rd32(device, 0x10a4a0); in nvkm_pmu_send()
46 u32 tmp = nvkm_rd32(device, 0x10a4b0); in nvkm_pmu_send()
65 } while (nvkm_rd32(device, 0x10a580) != 0x00000001); in nvkm_pmu_send()
99 u32 addr = nvkm_rd32(device, 0x10a4cc); in nvkm_pmu_recv()
100 if (addr == nvkm_rd32(device, 0x10a4c8)) in nvkm_pmu_recv()
106 } while (nvkm_rd32(device, 0x10a580) != 0x00000002); in nvkm_pmu_recv()
111 process = nvkm_rd32(device, 0x10a1c4); in nvkm_pmu_recv()
112 message = nvkm_rd32(device, 0x10a1c4); in nvkm_pmu_recv()
113 data0 = nvkm_rd32(device, 0x10a1c4); in nvkm_pmu_recv()
114 data1 = nvkm_rd32(device, 0x10a1c4); in nvkm_pmu_recv()
[all …]
Dgk110.c59 nvkm_rd32(device, 0x000200); in gk110_pmu_pgob()
71 if (!(nvkm_rd32(device, magic[i].addr) & 0x80000000)) in gk110_pmu_pgob()
82 nvkm_rd32(device, 0x000200); in gk110_pmu_pgob()
Dgk104.c40 if (nvkm_rd32(device, 0x00c800) & 0x40000000) { in magic_()
65 nvkm_rd32(device, 0x000200); in gk104_pmu_pgob()
82 nvkm_rd32(device, 0x000200); in gk104_pmu_pgob()
Dmemx.c65 } while (nvkm_rd32(device, 0x10a580) != 0x00000003); in nvkm_memx_init()
83 finish = nvkm_rd32(device, 0x10a1c0) & 0x00ffffff; in nvkm_memx_fini()
132 heads = nvkm_rd32(device, 0x610050); in nvkm_memx_wait_vblank()
136 x = nvkm_rd32(device, 0x610b40 + (0x540 * i)); in nvkm_memx_wait_vblank()
185 res[i] = nvkm_rd32(device, 0x10a1c4); in nvkm_memx_train_result()
Dgk20a.c104 status->busy = nvkm_rd32(device, 0x10a508 + (BUSY_SLOT * 0x10)); in gk20a_pmu_dvfs_get_dev_status()
105 status->total= nvkm_rd32(device, 0x10a508 + (CLK_SLOT * 0x10)); in gk20a_pmu_dvfs_get_dev_status()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/ibus/
Dgf100.c30 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0400)); in gf100_ibus_intr_hub()
31 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0400)); in gf100_ibus_intr_hub()
32 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0400)); in gf100_ibus_intr_hub()
41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0400)); in gf100_ibus_intr_rop()
42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0400)); in gf100_ibus_intr_rop()
43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0400)); in gf100_ibus_intr_rop()
52 u32 addr = nvkm_rd32(device, 0x128120 + (i * 0x0400)); in gf100_ibus_intr_gpc()
53 u32 data = nvkm_rd32(device, 0x128124 + (i * 0x0400)); in gf100_ibus_intr_gpc()
54 u32 stat = nvkm_rd32(device, 0x128128 + (i * 0x0400)); in gf100_ibus_intr_gpc()
63 u32 intr0 = nvkm_rd32(device, 0x121c58); in gf100_ibus_intr()
[all …]
Dgk104.c30 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0800)); in gk104_ibus_intr_hub()
31 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0800)); in gk104_ibus_intr_hub()
32 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0800)); in gk104_ibus_intr_hub()
41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0800)); in gk104_ibus_intr_rop()
42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0800)); in gk104_ibus_intr_rop()
43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0800)); in gk104_ibus_intr_rop()
52 u32 addr = nvkm_rd32(device, 0x128120 + (i * 0x0800)); in gk104_ibus_intr_gpc()
53 u32 data = nvkm_rd32(device, 0x128124 + (i * 0x0800)); in gk104_ibus_intr_gpc()
54 u32 stat = nvkm_rd32(device, 0x128128 + (i * 0x0800)); in gk104_ibus_intr_gpc()
63 u32 intr0 = nvkm_rd32(device, 0x120058); in gk104_ibus_intr()
[all …]
Dgk20a.c37 nvkm_rd32(device, 0x122204); in gk20a_ibus_init_ibus_ring()
52 u32 status0 = nvkm_rd32(device, 0x120058); in gk20a_ibus_intr()
62 if (!(nvkm_rd32(device, 0x12004c) & 0x0000003f)) in gk20a_ibus_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dnv04.c71 u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0); in nv04_fifo_pause()
76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause()
114 u32 engine = nvkm_rd32(device, 0x003280); in nv04_fifo_swmthd()
121 data = nvkm_rd32(device, 0x003258) & 0x0000ffff; in nv04_fifo_swmthd()
141 u32 pull0 = nvkm_rd32(device, 0x003250); in nv04_fifo_cache_error()
153 mthd = nvkm_rd32(device, NV04_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error()
154 data = nvkm_rd32(device, NV04_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error()
156 mthd = nvkm_rd32(device, NV40_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error()
157 data = nvkm_rd32(device, NV40_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error()
174 nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH0) & ~1); in nv04_fifo_cache_error()
[all …]
Dgk104.c74 if (wait_event_timeout(engn->wait, !(nvkm_rd32(device, 0x002284 + in gk104_fifo_runlist_update()
158 u32 intr = nvkm_rd32(device, 0x00252c); in gk104_fifo_intr_bind()
183 u32 stat = nvkm_rd32(device, 0x002640 + (engn * 0x04)); in gk104_fifo_intr_sched_ctxsw()
213 u32 intr = nvkm_rd32(device, 0x00254c); in gk104_fifo_intr_sched()
234 u32 stat = nvkm_rd32(device, 0x00256c); in gk104_fifo_intr_chsw()
244 u32 stat = nvkm_rd32(device, 0x00259c); in gk104_fifo_intr_dropped_fault()
358 u32 inst = nvkm_rd32(device, 0x002800 + (unit * 0x10)); in gk104_fifo_intr_fault()
359 u32 valo = nvkm_rd32(device, 0x002804 + (unit * 0x10)); in gk104_fifo_intr_fault()
360 u32 vahi = nvkm_rd32(device, 0x002808 + (unit * 0x10)); in gk104_fifo_intr_fault()
361 u32 stat = nvkm_rd32(device, 0x00280c + (unit * 0x10)); in gk104_fifo_intr_fault()
[all …]
Dgf100.c74 !(nvkm_rd32(device, 0x00227c) & 0x00100000), in gf100_fifo_runlist_update()
184 u32 stat = nvkm_rd32(device, 0x002640 + (engn * 0x04)); in gf100_fifo_intr_sched_ctxsw()
212 u32 intr = nvkm_rd32(device, 0x00254c); in gf100_fifo_intr_sched()
293 u32 inst = nvkm_rd32(device, 0x002800 + (unit * 0x10)); in gf100_fifo_intr_fault()
294 u32 valo = nvkm_rd32(device, 0x002804 + (unit * 0x10)); in gf100_fifo_intr_fault()
295 u32 vahi = nvkm_rd32(device, 0x002808 + (unit * 0x10)); in gf100_fifo_intr_fault()
296 u32 stat = nvkm_rd32(device, 0x00280c + (unit * 0x10)); in gf100_fifo_intr_fault()
363 u32 stat = nvkm_rd32(device, 0x040108 + (unit * 0x2000)); in gf100_fifo_intr_pbdma()
364 u32 addr = nvkm_rd32(device, 0x0400c0 + (unit * 0x2000)); in gf100_fifo_intr_pbdma()
365 u32 data = nvkm_rd32(device, 0x0400c4 + (unit * 0x2000)); in gf100_fifo_intr_pbdma()
[all …]
Ddmanv40.c73 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_fini()
104 chid = nvkm_rd32(device, 0x003204) & (fifo->base.nr - 1); in nv40_fifo_dma_engine_init()
Ddmanv04.c87 chid = nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH1) & mask; in nv04_fifo_dma_fini()
97 u32 rv = (nvkm_rd32(device, c->regp) & rm) >> c->regs; in nv04_fifo_dma_fini()
Dnv50.c41 if (nvkm_rd32(device, 0x002600 + (i * 4)) & 0x80000000) in nv50_fifo_runlist_update_locked()
Dchang84.c109 if (nvkm_rd32(device, 0x0032fc) != 0xffffffff) in g84_fifo_chan_engine_fini()
Dchannv50.c77 if (nvkm_rd32(device, 0x0032fc) != 0xffffffff) in nv50_fifo_chan_engine_fini()
Dgpfifogf100.c64 if (nvkm_rd32(device, 0x002634) == chan->base.chid) in gf100_fifo_gpfifo_engine_fini()
Dgpfifogk104.c45 if (!(nvkm_rd32(device, 0x002634) & 0x00100000)) in gk104_fifo_gpfifo_kick()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/disp/
Drootgf119.c38 const u32 total = nvkm_rd32(device, 0x640414 + (head * 0x300)); in gf119_disp_root_scanoutpos()
39 const u32 blanke = nvkm_rd32(device, 0x64041c + (head * 0x300)); in gf119_disp_root_scanoutpos()
40 const u32 blanks = nvkm_rd32(device, 0x640420 + (head * 0x300)); in gf119_disp_root_scanoutpos()
58 nvkm_rd32(device, 0x616340 + (head * 0x800)) & 0xffff; in gf119_disp_root_scanoutpos()
61 nvkm_rd32(device, 0x616344 + (head * 0x800)) & 0xffff; in gf119_disp_root_scanoutpos()
91 tmp = nvkm_rd32(device, 0x616104 + (i * 0x800)); in gf119_disp_root_init()
93 tmp = nvkm_rd32(device, 0x616108 + (i * 0x800)); in gf119_disp_root_init()
95 tmp = nvkm_rd32(device, 0x61610c + (i * 0x800)); in gf119_disp_root_init()
101 tmp = nvkm_rd32(device, 0x61a000 + (i * 0x800)); in gf119_disp_root_init()
107 tmp = nvkm_rd32(device, 0x61c000 + (i * 0x800)); in gf119_disp_root_init()
[all …]
Dpiocnv50.c39 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x00030000)) in nv50_disp_pioc_fini()
43 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_pioc_fini()
57 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x00030000)) in nv50_disp_pioc_init()
61 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_pioc_init()
67 u32 tmp = nvkm_rd32(device, 0x610200 + (chid * 0x10)); in nv50_disp_pioc_init()
72 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_pioc_init()
Dgf119.c107 ctrl = nvkm_rd32(device, 0x640180 + (or * 0x20)); in exec_script()
146 ctrl = nvkm_rd32(device, 0x660180 + (or * 0x20)); in exec_clkcmp()
230 u32 pclk = nvkm_rd32(device, 0x660450 + (head * 0x300)) / 1000; in gf119_disp_intr_unk2_1()
242 const u32 ctrl = nvkm_rd32(device, 0x660200 + (or * 0x020)); in gf119_disp_intr_unk2_2_tu()
243 const u32 conf = nvkm_rd32(device, 0x660404 + (head * 0x300)); in gf119_disp_intr_unk2_2_tu()
244 const s32 vactive = nvkm_rd32(device, 0x660414 + (head * 0x300)) & 0xffff; in gf119_disp_intr_unk2_2_tu()
245 const s32 vblanke = nvkm_rd32(device, 0x66041c + (head * 0x300)) & 0xffff; in gf119_disp_intr_unk2_2_tu()
246 const s32 vblanks = nvkm_rd32(device, 0x660420 + (head * 0x300)) & 0xffff; in gf119_disp_intr_unk2_2_tu()
247 const u32 pclk = nvkm_rd32(device, 0x660450 + (head * 0x300)) / 1000; in gf119_disp_intr_unk2_2_tu()
254 u32 dpctrl = nvkm_rd32(device, 0x61c10c + loff); in gf119_disp_intr_unk2_2_tu()
[all …]
Drootnv50.c38 const u32 blanke = nvkm_rd32(device, 0x610aec + (head * 0x540)); in nv50_disp_root_scanoutpos()
39 const u32 blanks = nvkm_rd32(device, 0x610af4 + (head * 0x540)); in nv50_disp_root_scanoutpos()
40 const u32 total = nvkm_rd32(device, 0x610afc + (head * 0x540)); in nv50_disp_root_scanoutpos()
58 nvkm_rd32(device, 0x616340 + (head * 0x800)) & 0xffff; in nv50_disp_root_scanoutpos()
61 nvkm_rd32(device, 0x616344 + (head * 0x800)) & 0xffff; in nv50_disp_root_scanoutpos()
317 tmp = nvkm_rd32(device, 0x614004); in nv50_disp_root_init()
322 tmp = nvkm_rd32(device, 0x616100 + (i * 0x800)); in nv50_disp_root_init()
324 tmp = nvkm_rd32(device, 0x616104 + (i * 0x800)); in nv50_disp_root_init()
326 tmp = nvkm_rd32(device, 0x616108 + (i * 0x800)); in nv50_disp_root_init()
328 tmp = nvkm_rd32(device, 0x61610c + (i * 0x800)); in nv50_disp_root_init()
[all …]
Dcorenv50.c178 if (!(nvkm_rd32(device, 0x610200) & 0x001e0000)) in nv50_disp_core_fini()
182 nvkm_rd32(device, 0x610200)); in nv50_disp_core_fini()
200 if ((nvkm_rd32(device, 0x610200) & 0x009f0000) == 0x00020000) in nv50_disp_core_init()
202 if ((nvkm_rd32(device, 0x610200) & 0x003f0000) == 0x00030000) in nv50_disp_core_init()
215 if (!(nvkm_rd32(device, 0x610200) & 0x80000000)) in nv50_disp_core_init()
219 nvkm_rd32(device, 0x610200)); in nv50_disp_core_init()
Dpiocgf119.c39 if (!(nvkm_rd32(device, 0x610490 + (chid * 0x10)) & 0x00030000)) in gf119_disp_pioc_fini()
43 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_pioc_fini()
65 u32 tmp = nvkm_rd32(device, 0x610490 + (chid * 0x10)); in gf119_disp_pioc_init()
70 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_pioc_init()
Dnv50.c197 u32 data = nvkm_rd32(device, 0x610084 + (chid * 0x08)); in nv50_disp_intr_error()
198 u32 addr = nvkm_rd32(device, 0x610080 + (chid * 0x08)); in nv50_disp_intr_error()
299 ctrl = nvkm_rd32(device, 0x610b5c + (i * 8)); in exec_script()
311 ctrl = nvkm_rd32(device, reg + (i * 8)); in exec_script()
318 ctrl = nvkm_rd32(device, 0x610b84 + (i * 8)); in exec_script()
359 ctrl = nvkm_rd32(device, 0x610b58 + (i * 8)); in exec_clkcmp()
371 ctrl = nvkm_rd32(device, reg + (i * 8)); in exec_clkcmp()
378 ctrl = nvkm_rd32(device, 0x610b80 + (i * 8)); in exec_clkcmp()
479 u32 pclk = nvkm_rd32(device, 0x610ad0 + (head * 0x540)) & 0x3fffff; in nv50_disp_intr_unk20_1()
494 const u32 ctrl = nvkm_rd32(device, 0x610794 + (or * 8)); in nv50_disp_intr_unk20_2_dp()
[all …]
Ddmacgf119.c51 if (!(nvkm_rd32(device, 0x610490 + (chid * 0x10)) & 0x001e0000)) in gf119_disp_dmac_fini()
55 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_dmac_fini()
84 if (!(nvkm_rd32(device, 0x610490 + (chid * 0x10)) & 0x80000000)) in gf119_disp_dmac_init()
88 nvkm_rd32(device, 0x610490 + (chid * 0x10))); in gf119_disp_dmac_init()
Drootnv04.c54 args->v0.vblanks = nvkm_rd32(device, 0x680800 + hoff) & 0xffff; in nv04_disp_scanoutpos()
55 args->v0.vtotal = nvkm_rd32(device, 0x680804 + hoff) & 0xffff; in nv04_disp_scanoutpos()
58 args->v0.hblanks = nvkm_rd32(device, 0x680820 + hoff) & 0xffff; in nv04_disp_scanoutpos()
59 args->v0.htotal = nvkm_rd32(device, 0x680824 + hoff) & 0xffff; in nv04_disp_scanoutpos()
71 line = nvkm_rd32(device, 0x600868 + hoff); in nv04_disp_scanoutpos()
Dcoregf119.c185 if (!(nvkm_rd32(device, 0x610490) & 0x001e0000)) in gf119_disp_core_fini()
189 nvkm_rd32(device, 0x610490)); in gf119_disp_core_fini()
217 if (!(nvkm_rd32(device, 0x610490) & 0x80000000)) in gf119_disp_core_init()
221 nvkm_rd32(device, 0x610490)); in gf119_disp_core_init()
Dsornv50.c54 if (!(nvkm_rd32(device, 0x61c004 + soff) & 0x80000000)) in nv50_sor_power()
59 if (!(nvkm_rd32(device, 0x61c004 + soff) & 0x80000000)) in nv50_sor_power()
63 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in nv50_sor_power()
Ddacnv50.c58 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_power()
63 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_power()
93 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_sense()
104 if (!(nvkm_rd32(device, 0x61a004 + doff) & 0x80000000)) in nv50_dac_sense()
Dsorgm204.c86 if (!(nvkm_rd32(device, 0x61c034 + soff) & 0x80000000)) in gm204_sor_dp_lnk_pwr()
117 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in gm204_sor_dp_drv_ctl()
118 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in gm204_sor_dp_drv_ctl()
119 data[2] = nvkm_rd32(device, 0x61c130 + loff); in gm204_sor_dp_drv_ctl()
125 data[3] = nvkm_rd32(device, 0x61c13c + loff) & ~(0x000000ff << shift); in gm204_sor_dp_drv_ctl()
Dnv04.c51 u32 crtc0 = nvkm_rd32(device, 0x600100); in nv04_disp_intr()
52 u32 crtc1 = nvkm_rd32(device, 0x602100); in nv04_disp_intr()
66 pvideo = nvkm_rd32(device, 0x8100); in nv04_disp_intr()
Dsorgf119.c91 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in gf119_sor_dp_drv_ctl()
92 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in gf119_sor_dp_drv_ctl()
93 data[2] = nvkm_rd32(device, 0x61c130 + loff); in gf119_sor_dp_drv_ctl()
99 data[3] = nvkm_rd32(device, 0x61c13c + loff) & ~(0x000000ff << shift); in gf119_sor_dp_drv_ctl()
Dsorg94.c95 if (!(nvkm_rd32(device, 0x61c034 + soff) & 0x80000000)) in g94_sor_dp_lnk_pwr()
144 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drv_ctl()
145 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drv_ctl()
146 data[2] = nvkm_rd32(device, 0x61c130 + loff); in g94_sor_dp_drv_ctl()
Dhdagt215.c56 u32 tmp = nvkm_rd32(device, 0x61c1e0 + soff); in gt215_hda_eld()
70 u32 tmp = nvkm_rd32(device, 0x61c1e0 + soff); in gt215_hda_eld()
Dhdagf119.c59 u32 tmp = nvkm_rd32(device, 0x616618 + hoff); in gf119_hda_eld()
74 u32 tmp = nvkm_rd32(device, 0x616618 + hoff); in gf119_hda_eld()
Ddmacnv50.c199 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x001e0000)) in nv50_disp_dmac_fini()
203 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_dmac_fini()
231 if (!(nvkm_rd32(device, 0x610200 + (chid * 0x10)) & 0x80000000)) in nv50_disp_dmac_init()
235 nvkm_rd32(device, 0x610200 + (chid * 0x10))); in nv50_disp_dmac_init()
Dpiornv50.c57 if (!(nvkm_rd32(device, 0x61e004 + soff) & 0x80000000)) in nv50_pior_power()
62 if (!(nvkm_rd32(device, 0x61e004 + soff) & 0x80000000)) in nv50_pior_power()
Dchannv50.c45 u32 next = nvkm_rd32(device, list->data[i].addr + base + 0); in nv50_disp_mthd_list()
46 u32 prev = nvkm_rd32(device, list->data[i].addr + base + c); in nv50_disp_mthd_list()
162 *data = nvkm_rd32(device, 0x640000 + (chan->chid * 0x1000) + addr); in nv50_disp_chan_rd32()
Dvga.c137 if (!(nvkm_rd32(device, 0x001084) & 0x10000000)) in nvkm_lockvgac()
166 u32 tied = nvkm_rd32(device, 0x001084) & 0x10000000; in nvkm_rdvgaowner()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/pm/
Dgf100.c152 case 0: ctr->ctr = nvkm_rd32(device, dom->addr + 0x08c); break; in gf100_perfctr_read()
153 case 1: ctr->ctr = nvkm_rd32(device, dom->addr + 0x088); break; in gf100_perfctr_read()
154 case 2: ctr->ctr = nvkm_rd32(device, dom->addr + 0x080); break; in gf100_perfctr_read()
155 case 3: ctr->ctr = nvkm_rd32(device, dom->addr + 0x090); break; in gf100_perfctr_read()
157 dom->clk = nvkm_rd32(device, dom->addr + 0x070); in gf100_perfctr_read()
210 mask = (1 << nvkm_rd32(device, 0x022430)) - 1; in gf100_pm_new_()
211 mask &= ~nvkm_rd32(device, 0x022504); in gf100_pm_new_()
212 mask &= ~nvkm_rd32(device, 0x022584); in gf100_pm_new_()
220 mask = (1 << nvkm_rd32(device, 0x022438)) - 1; in gf100_pm_new_()
221 mask &= ~nvkm_rd32(device, 0x022548); in gf100_pm_new_()
[all …]
Dnv40.c50 case 0: ctr->ctr = nvkm_rd32(device, 0x00a700 + dom->addr); break; in nv40_perfctr_read()
51 case 1: ctr->ctr = nvkm_rd32(device, 0x00a6c0 + dom->addr); break; in nv40_perfctr_read()
52 case 2: ctr->ctr = nvkm_rd32(device, 0x00a680 + dom->addr); break; in nv40_perfctr_read()
53 case 3: ctr->ctr = nvkm_rd32(device, 0x00a740 + dom->addr); break; in nv40_perfctr_read()
55 dom->clk = nvkm_rd32(device, 0x00a600 + dom->addr); in nv40_perfctr_read()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bus/
Dnv31.c35 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv31_bus_intr()
36 u32 gpio = nvkm_rd32(device, 0x001104) & nvkm_rd32(device, 0x001144); in nv31_bus_intr()
45 u32 addr = nvkm_rd32(device, 0x009084); in nv31_bus_intr()
46 u32 data = nvkm_rd32(device, 0x009088); in nv31_bus_intr()
Dnv50.c44 if (!(nvkm_rd32(device, 0x001308) & 0x00000100)) in nv50_bus_hwsq_exec()
57 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv50_bus_intr()
60 u32 addr = nvkm_rd32(device, 0x009084); in nv50_bus_intr()
61 u32 data = nvkm_rd32(device, 0x009088); in nv50_bus_intr()
Dgf100.c32 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in gf100_bus_intr()
35 u32 addr = nvkm_rd32(device, 0x009084); in gf100_bus_intr()
36 u32 data = nvkm_rd32(device, 0x009088); in gf100_bus_intr()
Dnv04.c36 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv04_bus_intr()
Dhwsq.c142 heads = nvkm_rd32(device, 0x610050); in nvkm_hwsq_wait_vblank()
146 x = nvkm_rd32(device, 0x610b40 + (0x540 * i)); in nvkm_hwsq_wait_vblank()
Dg94.c44 if (!(nvkm_rd32(device, 0x001308) & 0x00000100)) in g94_bus_hwsq_exec()
Dhwsq.h89 reg->data = nvkm_rd32(device, reg->addr); in hwsq_rd32()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/
Dg94.c30 u32 intr0 = nvkm_rd32(device, 0x00e054); in g94_gpio_intr_stat()
31 u32 intr1 = nvkm_rd32(device, 0x00e074); in g94_gpio_intr_stat()
32 u32 stat0 = nvkm_rd32(device, 0x00e050) & intr0; in g94_gpio_intr_stat()
33 u32 stat1 = nvkm_rd32(device, 0x00e070) & intr1; in g94_gpio_intr_stat()
44 u32 inte0 = nvkm_rd32(device, 0x00e050); in g94_gpio_intr_mask()
45 u32 inte1 = nvkm_rd32(device, 0x00e070); in g94_gpio_intr_mask()
Dgk104.c30 u32 intr0 = nvkm_rd32(device, 0x00dc00); in gk104_gpio_intr_stat()
31 u32 intr1 = nvkm_rd32(device, 0x00dc80); in gk104_gpio_intr_stat()
32 u32 stat0 = nvkm_rd32(device, 0x00dc08) & intr0; in gk104_gpio_intr_stat()
33 u32 stat1 = nvkm_rd32(device, 0x00dc88) & intr1; in gk104_gpio_intr_stat()
44 u32 inte0 = nvkm_rd32(device, 0x00dc08); in gk104_gpio_intr_mask()
45 u32 inte1 = nvkm_rd32(device, 0x00dc88); in gk104_gpio_intr_mask()
Dnv10.c34 line = nvkm_rd32(device, 0x600818) >> line; in nv10_gpio_sense()
39 line = nvkm_rd32(device, 0x60081c) >> line; in nv10_gpio_sense()
44 line = nvkm_rd32(device, 0x600850) >> line; in nv10_gpio_sense()
86 u32 intr = nvkm_rd32(device, 0x001104); in nv10_gpio_intr_stat()
87 u32 stat = nvkm_rd32(device, 0x001144) & intr; in nv10_gpio_intr_stat()
97 u32 inte = nvkm_rd32(device, 0x001144); in nv10_gpio_intr_mask()
Dnv50.c92 return !!(nvkm_rd32(device, reg) & (4 << shift)); in nv50_gpio_sense()
99 u32 intr = nvkm_rd32(device, 0x00e054); in nv50_gpio_intr_stat()
100 u32 stat = nvkm_rd32(device, 0x00e050) & intr; in nv50_gpio_intr_stat()
110 u32 inte = nvkm_rd32(device, 0x00e050); in nv50_gpio_intr_mask()
Dgf119.c69 return !!(nvkm_rd32(device, 0x00d610 + (line * 4)) & 0x00004000); in gf119_gpio_sense()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bios/
Dshadowramin.c37 *(u32 *)&bios->data[i] = nvkm_rd32(device, 0x700000 + i); in pramin_read()
68 addr = nvkm_rd32(device, 0x021c04); in pramin_init()
71 addr = nvkm_rd32(device, 0x022500); in pramin_init()
81 addr = nvkm_rd32(device, 0x619f04); in pramin_init()
94 addr = (u64)nvkm_rd32(device, 0x001700) << 16; in pramin_init()
105 priv->bar0 = nvkm_rd32(device, 0x001700); in pramin_init()
Dshadowrom.c34 *(u32 *)&bios->data[i] = nvkm_rd32(device, 0x300000 + i); in prom_read()
Dramcfg.c32 return (nvkm_rd32(subdev->device, 0x101000) & 0x0000003c) >> 2; in nvbios_ramcfg_strap()
Dpll.c368 u32 sel_clk = nvkm_rd32(device, 0x680524); in nvbios_pll_parse()
Dinit.c185 return nvkm_rd32(device, reg); in init_rd32()
204 u32 tmp = nvkm_rd32(device, reg); in init_mask()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/
Dnv50.c64 u32 stat = nvkm_rd32(device, 0x00b100); in nv50_mpeg_intr()
65 u32 type = nvkm_rd32(device, 0x00b230); in nv50_mpeg_intr()
66 u32 mthd = nvkm_rd32(device, 0x00b234); in nv50_mpeg_intr()
67 u32 data = nvkm_rd32(device, 0x00b238); in nv50_mpeg_intr()
108 if (!(nvkm_rd32(device, 0x00b200) & 0x00000001)) in nv50_mpeg_init()
112 nvkm_rd32(device, 0x00b200)); in nv50_mpeg_init()
Dnv31.c128 u32 dma0 = nvkm_rd32(device, 0x700000 + inst); in nv31_mpeg_mthd_dma()
129 u32 dma1 = nvkm_rd32(device, 0x700004 + inst); in nv31_mpeg_mthd_dma()
130 u32 dma2 = nvkm_rd32(device, 0x700008 + inst); in nv31_mpeg_mthd_dma()
184 u32 stat = nvkm_rd32(device, 0x00b100); in nv31_mpeg_intr()
185 u32 type = nvkm_rd32(device, 0x00b230); in nv31_mpeg_intr()
186 u32 mthd = nvkm_rd32(device, 0x00b234); in nv31_mpeg_intr()
187 u32 data = nvkm_rd32(device, 0x00b238); in nv31_mpeg_intr()
240 if (!(nvkm_rd32(device, 0x00b200) & 0x00000001)) in nv31_mpeg_init()
244 nvkm_rd32(device, 0x00b200)); in nv31_mpeg_init()
Dnv44.c77 if (nvkm_rd32(device, 0x00b318) == inst) in nv44_mpeg_chan_fini()
150 u32 inst = nvkm_rd32(device, 0x00b318) & 0x000fffff; in nv44_mpeg_intr()
151 u32 stat = nvkm_rd32(device, 0x00b100); in nv44_mpeg_intr()
152 u32 type = nvkm_rd32(device, 0x00b230); in nv44_mpeg_intr()
153 u32 mthd = nvkm_rd32(device, 0x00b234); in nv44_mpeg_intr()
154 u32 data = nvkm_rd32(device, 0x00b238); in nv44_mpeg_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/therm/
Dnv40.c63 return nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_sensor_setup()
67 return nvkm_rd32(device, 0x15b4) & 0xff; in nv40_sensor_setup()
82 core_temp = nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_temp_get()
85 core_temp = nvkm_rd32(device, 0x15b4) & 0xff; in nv40_temp_get()
126 u32 reg = nvkm_rd32(device, 0x0010f0); in nv40_fan_pwm_get()
134 u32 reg = nvkm_rd32(device, 0x0015f4); in nv40_fan_pwm_get()
136 *divs = nvkm_rd32(device, 0x0015f8); in nv40_fan_pwm_get()
172 uint32_t stat = nvkm_rd32(device, 0x1100); in nv40_therm_intr()
Dgf119.c31 u32 gpio = nvkm_rd32(device, 0x00d610 + (line * 0x04)); in pwm_info()
74 if (nvkm_rd32(device, 0x00d610 + (line * 0x04)) & 0x00000040) { in gf119_fan_pwm_get()
75 *divs = nvkm_rd32(device, 0x00e114 + (indx * 8)); in gf119_fan_pwm_get()
76 *duty = nvkm_rd32(device, 0x00e118 + (indx * 8)); in gf119_fan_pwm_get()
80 *divs = nvkm_rd32(device, 0x0200d8) & 0x1fff; in gf119_fan_pwm_get()
81 *duty = nvkm_rd32(device, 0x0200dc) & 0x1fff; in gf119_fan_pwm_get()
Dnv50.c73 if (nvkm_rd32(device, ctrl) & (1 << line)) { in nv50_fan_pwm_get()
74 *divs = nvkm_rd32(device, 0x00e114 + (id * 8)); in nv50_fan_pwm_get()
75 *duty = nvkm_rd32(device, 0x00e118 + (id * 8)); in nv50_fan_pwm_get()
103 u8 pwm_div = nvkm_rd32(device, 0x410c); in nv50_fan_pwm_clock()
104 if (nvkm_rd32(device, 0xc040) & 0x800000) { in nv50_fan_pwm_clock()
135 core_temp = nvkm_rd32(device, 0x20014) & 0x3fff; in nv50_temp_get()
Dgm107.c37 *divs = nvkm_rd32(device, 0x10eb20) & 0x1fff; in gm107_fan_pwm_get()
38 *duty = nvkm_rd32(device, 0x10eb24) & 0x1fff; in gm107_fan_pwm_get()
Dgt215.c32 u32 tach = nvkm_rd32(device, 0x00e728) & 0x0000ffff; in gt215_therm_fan_sense()
33 u32 ctrl = nvkm_rd32(device, 0x00e720); in gt215_therm_fan_sense()
Dg84.c35 return nvkm_rd32(device, 0x20400); in g84_temp_get()
106 temp = nvkm_rd32(device, thrs_reg); in g84_therm_threshold_hyst_emulation()
149 intr = nvkm_rd32(device, 0x20100) & 0x3ff; in g84_therm_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
Dramnv40.c80 u32 vbl = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog()
83 if (vbl != nvkm_rd32(device, 0x600808 + (i * 0x2000))) { in nv40_ram_prog()
100 u32 tmp = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog()
106 u32 tmp = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog()
172 u32 tmp = nvkm_rd32(device, 0x600808 + (i * 0x2000)); in nv40_ram_prog()
211 u32 pbus1218 = nvkm_rd32(device, 0x001218); in nv40_ram_new()
212 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv40_ram_new()
213 u32 tags = nvkm_rd32(device, 0x100320); in nv40_ram_new()
228 (*pram)->parts = (nvkm_rd32(device, 0x100200) & 0x00000003) + 1; in nv40_ram_new()
Dramnv49.c30 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv49_ram_new()
31 u32 tags = nvkm_rd32(device, 0x100320); in nv49_ram_new()
32 u32 fb914 = nvkm_rd32(device, 0x100914); in nv49_ram_new()
47 (*pram)->parts = (nvkm_rd32(device, 0x100200) & 0x00000003) + 1; in nv49_ram_new()
Dramnv20.c30 u32 pbus1218 = nvkm_rd32(device, 0x001218); in nv20_ram_new()
31 u32 size = (nvkm_rd32(device, 0x10020c) & 0xff000000); in nv20_ram_new()
32 u32 tags = nvkm_rd32(device, 0x100320); in nv20_ram_new()
47 (*pram)->parts = (nvkm_rd32(device, 0x100200) & 0x00000003) + 1; in nv20_ram_new()
Dramnv41.c30 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv41_ram_new()
31 u32 tags = nvkm_rd32(device, 0x100320); in nv41_ram_new()
32 u32 fb474 = nvkm_rd32(device, 0x100474); in nv41_ram_new()
47 (*pram)->parts = (nvkm_rd32(device, 0x100200) & 0x00000003) + 1; in nv41_ram_new()
Dramnv10.c30 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv10_ram_new()
31 u32 cfg0 = nvkm_rd32(device, 0x100200); in nv10_ram_new()
Dramnv50.c81 cur2 = nvkm_rd32(device, 0x100228); in nv50_ram_timing_calc()
82 cur4 = nvkm_rd32(device, 0x100230); in nv50_ram_timing_calc()
83 cur7 = nvkm_rd32(device, 0x10023c); in nv50_ram_timing_calc()
84 cur8 = nvkm_rd32(device, 0x100240); in nv50_ram_timing_calc()
159 timing[i] = nvkm_rd32(device, 0x100220 + (i * 4)); in nv50_ram_timing_read()
607 r0 = nvkm_rd32(device, 0x100200); in nv50_fb_vram_rblock()
608 r4 = nvkm_rd32(device, 0x100204); in nv50_fb_vram_rblock()
609 rt = nvkm_rd32(device, 0x100250); in nv50_fb_vram_rblock()
611 r0, r4, rt, nvkm_rd32(device, 0x001540)); in nv50_fb_vram_rblock()
644 u64 size = nvkm_rd32(device, 0x10020c); in nv50_ram_ctor()
[all …]
Dramnv44.c30 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv44_ram_new()
31 u32 fb474 = nvkm_rd32(device, 0x100474); in nv44_ram_new()
Drammcp77.c66 u64 base = (u64)nvkm_rd32(device, 0x100e10) << 12; in mcp77_ram_new()
67 u64 size = (u64)nvkm_rd32(device, 0x100e14) << 12; in mcp77_ram_new()
Dramgf100.c113 u32 part = nvkm_rd32(device, 0x022438), i; in gf100_ram_train()
114 u32 mask = nvkm_rd32(device, 0x022554); in gf100_ram_train()
567 u32 parts = nvkm_rd32(device, 0x022438); in gf100_ram_ctor()
568 u32 pmask = nvkm_rd32(device, maskaddr); in gf100_ram_ctor()
569 u64 bsize = (u64)nvkm_rd32(device, 0x10f20c) << 20; in gf100_ram_ctor()
575 nvkm_debug(subdev, "100800: %08x\n", nvkm_rd32(device, 0x100800)); in gf100_ram_ctor()
583 psize = (u64)nvkm_rd32(device, 0x11020c + (i * 0x1000)) << 20; in gf100_ram_ctor()
624 ram->ranks = (nvkm_rd32(device, 0x10f200) & 0x00000004) ? 2 : 1; in gf100_ram_ctor()
Dnv30.c70 nvkm_rd32(device, 0x122c + 0x10 * k + 0x4 * j) >> in calc_bias()
103 int l = nvkm_rd32(device, 0x1003d0); in nv30_fb_init()
Dramnv4e.c30 u32 size = nvkm_rd32(device, 0x10020c) & 0xff000000; in nv4e_ram_new()
Dramnv04.c35 u32 boot0 = nvkm_rd32(device, NV04_PFB_BOOT_0); in nv04_ram_new()
Dramgt215.c322 r001700 = nvkm_rd32(device, 0x1700); in gt215_link_train_init()
330 train->r_100720 = nvkm_rd32(device, 0x100720); in gt215_link_train_init()
331 train->r_1111e0 = nvkm_rd32(device, 0x1111e0); in gt215_link_train_init()
332 train->r_111400 = nvkm_rd32(device, 0x111400); in gt215_link_train_init()
356 cur2 = nvkm_rd32(device, 0x100228); in gt215_ram_timing_calc()
357 cur3 = nvkm_rd32(device, 0x10022c); in gt215_ram_timing_calc()
358 cur7 = nvkm_rd32(device, 0x10023c); in gt215_ram_timing_calc()
359 cur8 = nvkm_rd32(device, 0x100240); in gt215_ram_timing_calc()
Dnv50.c166 idx = nvkm_rd32(device, 0x100c90); in nv50_fb_intr()
173 trap[i] = nvkm_rd32(device, 0x100c94); in nv50_fb_intr()
Dnv41.c36 nvkm_rd32(device, 0x100600 + (i * 0x10)); in nv41_fb_tile_prog()
Dnv44.c46 nvkm_rd32(device, 0x100600 + (i * 0x10)); in nv44_fb_tile_prog()
Dnv10.c54 nvkm_rd32(device, 0x100240 + (i * 0x10)); in nv10_fb_tile_prog()
Dgf100.c42 u32 intr = nvkm_rd32(device, 0x000100); in gf100_fb_intr()
Dnv20.c76 nvkm_rd32(device, 0x100240 + (i * 0x10)); in nv20_fb_tile_prog()
Dbase.c69 const u8 ramcfg = (nvkm_rd32(device, 0x101000) & 0x0000003c) >> 2; in nvkm_fb_bios_memtype()
Dramfuc.h85 reg->data = nvkm_rd32(device, reg->addr); in ramfuc_rd32()
Dramgk104.c242 u32 prev = nvkm_rd32(device, addr); in gk104_ram_nuts()
1417 save = nvkm_rd32(device, 0x10f65c) & 0x000000f0; in gk104_ram_init()
1558 ram->parts = nvkm_rd32(device, 0x022438); in gk104_ram_new()
1559 ram->pmask = nvkm_rd32(device, 0x022554); in gk104_ram_new()
1563 u32 cfg1 = nvkm_rd32(device, 0x110204 + (i * 0x1000)); in gk104_ram_new()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/clk/
Dnv50.c41 return nvkm_rd32(device, 0x004700); in read_div()
45 return nvkm_rd32(device, 0x004800); in read_div()
57 u32 rsel = nvkm_rd32(device, 0x00e18c); in read_pll_src()
73 coef = nvkm_rd32(device, 0x00e81c + (id * 0x0c)); in read_pll_src()
82 coef = nvkm_rd32(device, 0x00e81c); in read_pll_src()
90 rsel = nvkm_rd32(device, 0x00c050); in read_pll_src()
108 coef = nvkm_rd32(device, 0x00e81c + (id * 0x28)); in read_pll_src()
109 P = (nvkm_rd32(device, 0x00e824 + (id * 0x28)) >> 16) & 7; in read_pll_src()
129 u32 src, mast = nvkm_rd32(device, 0x00c040); in read_pll_ref()
161 u32 mast = nvkm_rd32(device, 0x00c040); in read_pll()
[all …]
Dgk20a.c130 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_read_mnp()
276 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_slide()
293 val = nvkm_rd32(device, GPCPLL_COEFF); in gk20a_pllg_slide()
300 val = nvkm_rd32(device, GPCPLL_NDIV_SLOWDOWN); in gk20a_pllg_slide()
307 val = nvkm_rd32(device, GPC_BCAST_NDIV_SLOWDOWN_DEBUG); in gk20a_pllg_slide()
316 nvkm_rd32(device, GPCPLL_NDIV_SLOWDOWN); in gk20a_pllg_slide()
331 nvkm_rd32(device, GPCPLL_CFG); in _gk20a_pllg_enable()
339 nvkm_rd32(device, GPCPLL_CFG); in _gk20a_pllg_disable()
351 val = nvkm_rd32(device, GPCPLL_COEFF); in _gk20a_pllg_program_mnp()
356 cfg = nvkm_rd32(device, GPCPLL_CFG); in _gk20a_pllg_program_mnp()
[all …]
Dgt215.c46 u32 sctl = nvkm_rd32(device, 0x4120 + (idx * 4)); in read_vco()
70 return nvkm_rd32(device, 0x00471c) * 1000; in read_clk()
76 sctl = nvkm_rd32(device, 0x4120 + (idx * 4)); in read_clk()
111 u32 ctrl = nvkm_rd32(device, pll + 0); in read_pll()
116 u32 coef = nvkm_rd32(device, pll + 4); in read_pll()
164 hsrc = (nvkm_rd32(device, 0xc040) & 0x30000000) >> 28; in gt215_clk_read()
314 if (!nvkm_rd32(device, 0x000100)) in gt215_clk_pre()
323 if (nvkm_rd32(device, 0x002504) & 0x00000010) in gt215_clk_pre()
329 u32 tmp = nvkm_rd32(device, 0x00251c) & 0x0000003f; in gt215_clk_pre()
372 bypass = nvkm_rd32(device, ctrl) & 0x00000008; in prog_pll()
[all …]
Dgk104.c53 u32 ssrc = nvkm_rd32(device, dsrc); in read_vco()
63 u32 ctrl = nvkm_rd32(device, pll + 0x00); in read_pll()
64 u32 coef = nvkm_rd32(device, pll + 0x04); in read_pll()
86 fN = nvkm_rd32(device, pll + 0x10) >> 16; in read_pll()
109 u32 ssrc = nvkm_rd32(device, dsrc + (doff * 4)); in read_div()
110 u32 sctl = nvkm_rd32(device, dctl + (doff * 4)); in read_div()
136 switch (nvkm_rd32(device, 0x1373f4) & 0x0000000f) { in read_mem()
148 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk()
152 u32 ssel = nvkm_rd32(device, 0x137100); in read_clk()
161 u32 ssrc = nvkm_rd32(device, 0x137160 + (idx * 0x04)); in read_clk()
[all …]
Dgf100.c52 u32 ssrc = nvkm_rd32(device, dsrc); in read_vco()
62 u32 ctrl = nvkm_rd32(device, pll + 0x00); in read_pll()
63 u32 coef = nvkm_rd32(device, pll + 0x04); in read_pll()
101 u32 ssrc = nvkm_rd32(device, dsrc + (doff * 4)); in read_div()
102 u32 sctl = nvkm_rd32(device, dctl + (doff * 4)); in read_div()
128 u32 sctl = nvkm_rd32(device, 0x137250 + (idx * 4)); in read_clk()
129 u32 ssel = nvkm_rd32(device, 0x137100); in read_clk()
175 if (nvkm_rd32(device, 0x1373f0) & 0x00000002) in gf100_clk_read()
352 if (!(nvkm_rd32(device, 0x137100) & (1 << idx))) in gf100_clk_prog_1()
370 if (nvkm_rd32(device, addr + 0x00) & 0x00020000) in gf100_clk_prog_2()
[all …]
Dmcp77.c45 return nvkm_rd32(device, 0x004600); in read_div()
52 u32 ctrl = nvkm_rd32(device, base + 0); in read_pll()
53 u32 coef = nvkm_rd32(device, base + 4); in read_pll()
61 post_div = 1 << ((nvkm_rd32(device, 0x4070) & 0x000f0000) >> 16); in read_pll()
64 post_div = (nvkm_rd32(device, 0x4040) & 0x000f0000) >> 16; in read_pll()
86 u32 mast = nvkm_rd32(device, 0x00c054); in mcp77_clk_read()
107 P = (nvkm_rd32(device, 0x004028) & 0x00070000) >> 16; in mcp77_clk_read()
130 P = (nvkm_rd32(device, 0x004020) & 0x00070000) >> 16; in mcp77_clk_read()
357 u32 tmp = nvkm_rd32(device, 0x004080) & pllmask; in mcp77_clk_prog()
Dnv40.c43 u32 ctrl = nvkm_rd32(device, reg + 0x00); in read_pll_1()
59 u32 ctrl = nvkm_rd32(device, reg + 0x00); in read_pll_2()
60 u32 coef = nvkm_rd32(device, reg + 0x04); in read_pll_2()
102 u32 mast = nvkm_rd32(device, 0x00c040); in nv40_clk_read()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/ltc/
Dgm107.c47 if (!nvkm_rd32(device, addr)) in gm107_ltc_cbc_wait()
79 u32 stat = nvkm_rd32(device, base + 0x00c); in gm107_ltc_lts_isr()
93 mask = nvkm_rd32(device, 0x00017c); in gm107_ltc_intr()
106 const u32 parts = nvkm_rd32(device, 0x022438); in gm107_ltc_oneinit()
107 const u32 mask = nvkm_rd32(device, 0x021c14); in gm107_ltc_oneinit()
108 const u32 slice = nvkm_rd32(device, 0x17e280) >> 28; in gm107_ltc_oneinit()
124 u32 lpg128 = !(nvkm_rd32(device, 0x100c80) & 0x00000001); in gm107_ltc_init()
Dgf100.c48 if (!nvkm_rd32(device, addr)) in gf100_ltc_cbc_wait()
98 u32 intr = nvkm_rd32(device, base + 0x020); in gf100_ltc_lts_intr()
116 mask = nvkm_rd32(device, 0x00017c); in gf100_ltc_intr()
212 const u32 parts = nvkm_rd32(device, 0x022438); in gf100_ltc_oneinit()
213 const u32 mask = nvkm_rd32(device, 0x022554); in gf100_ltc_oneinit()
214 const u32 slice = nvkm_rd32(device, 0x17e8dc) >> 28; in gf100_ltc_oneinit()
230 u32 lpg128 = !(nvkm_rd32(device, 0x100c80) & 0x00000001); in gf100_ltc_init()
Dgk104.c30 u32 lpg128 = !(nvkm_rd32(device, 0x100c80) & 0x00000001); in gk104_ltc_init()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/timer/
Dnv04.c49 hi = nvkm_rd32(device, NV04_PTIMER_TIME_1); in nv04_timer_read()
50 lo = nvkm_rd32(device, NV04_PTIMER_TIME_0); in nv04_timer_read()
51 } while (hi != nvkm_rd32(device, NV04_PTIMER_TIME_1)); in nv04_timer_read()
76 u32 stat = nvkm_rd32(device, NV04_PTIMER_INTR_0); in nv04_timer_intr()
103 n = nvkm_rd32(device, NV04_PTIMER_NUMERATOR); in nv04_timer_init()
104 d = nvkm_rd32(device, NV04_PTIMER_DENOMINATOR); in nv04_timer_init()
Dnv40.c40 n = nvkm_rd32(device, NV04_PTIMER_NUMERATOR); in nv40_timer_init()
41 d = nvkm_rd32(device, NV04_PTIMER_DENOMINATOR); in nv40_timer_init()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/
Dnv04.c148 uint32_t oldpll = nvkm_rd32(device, reg); in setPLL_single()
158 saved_powerctrl_1 = nvkm_rd32(device, 0x001584); in setPLL_single()
175 nvkm_rd32(device, reg); in setPLL_single()
205 uint32_t oldpll1 = nvkm_rd32(device, reg1); in setPLL_double_highregs()
206 uint32_t oldpll2 = !nv3035 ? nvkm_rd32(device, reg2) : 0; in setPLL_double_highregs()
221 oldramdac580 = nvkm_rd32(device, 0x680580); in setPLL_double_highregs()
237 saved_powerctrl_1 = nvkm_rd32(device, 0x001584); in setPLL_double_highregs()
257 savedc040 = nvkm_rd32(device, 0xc040); in setPLL_double_highregs()
289 uint32_t oldPval = nvkm_rd32(device, Preg); in setPLL_double_lowregs()
298 if (nvkm_rd32(device, NMNMreg) == NMNM && (oldPval & 0xc0070000) == Pval) in setPLL_double_lowregs()
[all …]
Dgm204.c67 nvkm_wr32(device, 0x10a1c0, nvkm_rd32(device, 0x10a1c4) + argi); in pmu_args()
68 return nvkm_rd32(device, 0x10a1c4); in pmu_args()
130 nvkm_rd32(device, 0x000200); in gm204_devinit_post()
131 while (nvkm_rd32(device, 0x10a10c) & 0x00000006) { in gm204_devinit_post()
159 while (!(nvkm_rd32(device, 0x10a040) & 0x00002000)) { in gm204_devinit_post()
Dgt215.c69 u32 r001540 = nvkm_rd32(device, 0x001540); in gt215_devinit_disable()
70 u32 r00154c = nvkm_rd32(device, 0x00154c); in gt215_devinit_disable()
127 init->r001540 = nvkm_rd32(device, 0x001540); in gt215_devinit_mmio()
Dnv05.c63 strap = (nvkm_rd32(device, 0x101000) & 0x0000003c) >> 2; in nv05_devinit_meminit()
75 if (nvkm_rd32(device, NV04_PFB_BOOT_0) & NV04_PFB_BOOT_0_UMA_ENABLE) in nv05_devinit_meminit()
106 v = nvkm_rd32(device, NV04_PFB_BOOT_0) & NV04_PFB_BOOT_0_RAM_AMOUNT; in nv05_devinit_meminit()
Dgm107.c33 u32 r021c00 = nvkm_rd32(device, 0x021c00); in gm107_devinit_disable()
34 u32 r021c04 = nvkm_rd32(device, 0x021c04); in gm107_devinit_disable()
Dg84.c33 u32 r001540 = nvkm_rd32(device, 0x001540); in g84_devinit_disable()
34 u32 r00154c = nvkm_rd32(device, 0x00154c); in g84_devinit_disable()
Dmcp89.c33 u32 r001540 = nvkm_rd32(device, 0x001540); in mcp89_devinit_disable()
34 u32 r00154c = nvkm_rd32(device, 0x00154c); in mcp89_devinit_disable()
Dg98.c33 u32 r001540 = nvkm_rd32(device, 0x001540); in g98_devinit_disable()
34 u32 r00154c = nvkm_rd32(device, 0x00154c); in g98_devinit_disable()
Dnv20.c53 amount = nvkm_rd32(device, 0x10020c); in nv20_devinit_meminit()
57 amount = nvkm_rd32(device, 0x10020c); in nv20_devinit_meminit()
Dnv10.c78 int off = nvkm_rd32(device, 0x10020c) - 0x100000; in nv10_devinit_meminit()
Dgf100.c70 u32 r022500 = nvkm_rd32(device, 0x022500); in gf100_devinit_disable()
Dnv50.c84 u32 r001540 = nvkm_rd32(device, 0x001540); in nv50_devinit_disable()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/i2c/
Dauxg94.c51 ctrl = nvkm_rd32(device, 0x00e4e4 + (aux->ch * 0x50)); in g94_i2c_aux_init()
63 ctrl = nvkm_rd32(device, 0x00e4e4 + (aux->ch * 0x50)); in g94_i2c_aux_init()
92 stat = nvkm_rd32(device, 0x00e4e8 + base); in g94_i2c_aux_xfer()
107 ctrl = nvkm_rd32(device, 0x00e4e4 + base); in g94_i2c_aux_xfer()
126 ctrl = nvkm_rd32(device, 0x00e4e4 + base); in g94_i2c_aux_xfer()
151 xbuf[i / 4] = nvkm_rd32(device, 0x00e4d0 + base + i); in g94_i2c_aux_xfer()
Dauxgm204.c51 ctrl = nvkm_rd32(device, 0x00d954 + (aux->ch * 0x50)); in gm204_i2c_aux_init()
63 ctrl = nvkm_rd32(device, 0x00d954 + (aux->ch * 0x50)); in gm204_i2c_aux_init()
92 stat = nvkm_rd32(device, 0x00d958 + base); in gm204_i2c_aux_xfer()
107 ctrl = nvkm_rd32(device, 0x00d954 + base); in gm204_i2c_aux_xfer()
126 ctrl = nvkm_rd32(device, 0x00d954 + base); in gm204_i2c_aux_xfer()
151 xbuf[i / 4] = nvkm_rd32(device, 0x00d940 + base + i); in gm204_i2c_aux_xfer()
Dgk104.c31 u32 intr = nvkm_rd32(device, 0x00dc60); in gk104_aux_stat()
32 u32 stat = nvkm_rd32(device, 0x00dc68) & intr, i; in gk104_aux_stat()
46 u32 temp = nvkm_rd32(device, 0x00dc68), i; in gk104_aux_mask()
Dg94.c31 u32 intr = nvkm_rd32(device, 0x00e06c); in g94_aux_stat()
32 u32 stat = nvkm_rd32(device, 0x00e068) & intr, i; in g94_aux_stat()
46 u32 temp = nvkm_rd32(device, 0x00e068), i; in g94_aux_mask()
Dbusnv4e.c53 return !!(nvkm_rd32(device, bus->addr) & 0x00040000); in nv4e_i2c_bus_sense_scl()
61 return !!(nvkm_rd32(device, bus->addr) & 0x00080000); in nv4e_i2c_bus_sense_sda()
Dbusgf119.c53 return !!(nvkm_rd32(device, bus->addr) & 0x00000010); in gf119_i2c_bus_sense_scl()
61 return !!(nvkm_rd32(device, bus->addr) & 0x00000020); in gf119_i2c_bus_sense_sda()
Dbusnv50.c60 return !!(nvkm_rd32(device, bus->addr) & 0x00000001); in nv50_i2c_bus_sense_scl()
68 return !!(nvkm_rd32(device, bus->addr) & 0x00000002); in nv50_i2c_bus_sense_sda()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/
Dxtensa.c63 u32 unk104 = nvkm_rd32(device, base + 0xd04); in nvkm_xtensa_intr()
64 u32 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr()
65 u32 chan = nvkm_rd32(device, base + 0xc28); in nvkm_xtensa_intr()
66 u32 unk10c = nvkm_rd32(device, base + 0xd0c); in nvkm_xtensa_intr()
71 intr = nvkm_rd32(device, base + 0xc20); in nvkm_xtensa_intr()
151 tmp = nvkm_rd32(device, 0x0); in nvkm_xtensa_init()
Dfalcon.c64 u32 dest = nvkm_rd32(device, base + 0x01c); in nvkm_falcon_intr()
65 u32 intr = nvkm_rd32(device, base + 0x008) & dest & ~(dest >> 16); in nvkm_falcon_intr()
66 u32 inst = nvkm_rd32(device, base + 0x050) & 0x3fffffff; in nvkm_falcon_intr()
140 caps = nvkm_rd32(device, base + 0x12c); in nvkm_falcon_oneinit()
145 caps = nvkm_rd32(device, base + 0x108); in nvkm_falcon_oneinit()
171 if (nvkm_rd32(device, base + 0x008) & 0x00000010) in nvkm_falcon_init()
176 if (!(nvkm_rd32(device, base + 0x180) & 0x80000000)) in nvkm_falcon_init()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/cipher/
Dg84.c86 u32 stat = nvkm_rd32(device, 0x102130); in g84_cipher_intr()
87 u32 mthd = nvkm_rd32(device, 0x102190); in g84_cipher_intr()
88 u32 data = nvkm_rd32(device, 0x102194); in g84_cipher_intr()
89 u32 inst = nvkm_rd32(device, 0x102188) & 0x7fffffff; in g84_cipher_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/sec/
Dg98.c47 u32 ssta = nvkm_rd32(device, 0x087040) & 0x0000ffff; in g98_sec_intr()
48 u32 addr = nvkm_rd32(device, 0x087040) >> 16; in g98_sec_intr()
51 u32 data = nvkm_rd32(device, 0x087044); in g98_sec_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/ce/
Dgt215.c48 u32 ssta = nvkm_rd32(device, 0x104040 + base) & 0x0000ffff; in gt215_ce_intr()
49 u32 addr = nvkm_rd32(device, 0x104040 + base) >> 16; in gt215_ce_intr()
52 u32 data = nvkm_rd32(device, 0x104044 + base); in gt215_ce_intr()
Dgk104.c34 u32 stat = nvkm_rd32(device, 0x104908 + base); in gk104_ce_intr()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mc/
Dgf100.c57 nvkm_rd32(device, 0x000140); in gf100_mc_intr_unarm()
72 u32 intr0 = nvkm_rd32(device, 0x000100); in gf100_mc_intr_mask()
73 u32 intr1 = nvkm_rd32(device, 0x000104); in gf100_mc_intr_mask()
Dnv04.c46 nvkm_rd32(device, 0x000140); in nv04_mc_intr_unarm()
59 return nvkm_rd32(mc->subdev.device, 0x000100); in nv04_mc_intr_mask()
Dnv44.c30 u32 tmp = nvkm_rd32(device, 0x10020c); in nv44_mc_init()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/volt/
Dgk104.c44 div = nvkm_rd32(device, 0x20340); in gk104_volt_get()
45 duty = nvkm_rd32(device, 0x20344); in gk104_volt_get()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/fuse/
Dgm107.c30 return nvkm_rd32(device, 0x021100 + addr); in gm107_fuse_read()
Dnv50.c36 val = nvkm_rd32(device, 0x021000 + addr); in nv50_fuse_read()
Dgf100.c37 val = nvkm_rd32(device, 0x021100 + addr); in gf100_fuse_read()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dgf100.c179 if (nvkm_rd32(device, 0x100c80) & 0x00ff0000) in gf100_vm_flush()
188 if (nvkm_rd32(device, 0x100c80) & 0x00008000) in gf100_vm_flush()
Dnv44.c151 if (nvkm_rd32(device, 0x100808) & 0x00000001) in nv44_vm_flush()
200 addr = nvkm_rd32(device, 0x10020c); in nv44_mmu_init()
Dnv41.c77 if (nvkm_rd32(device, 0x100810) & 0x00000020) in nv41_vm_flush()
Dnv50.c193 if (!(nvkm_rd32(device, 0x100c80) & 0x00000001)) in nv50_vm_flush()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/bar/
Dg84.c36 if (!(nvkm_rd32(device, 0x070000) & 0x00000002)) in g84_bar_flush()
Dnv50.c53 if (!(nvkm_rd32(device, 0x00330c) & 0x00000002)) in nv50_bar_flush()
158 if (!(nvkm_rd32(device, 0x100c80) & 0x00000001)) in nv50_bar_init()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/pci/
Dnv04.c30 return nvkm_rd32(device, 0x001800 + addr); in nv04_pci_rd32()
Dnv40.c30 return nvkm_rd32(device, 0x088000 + addr); in nv40_pci_rd32()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
Dnv04.c82 return nvkm_rd32(device, 0x700000 + iobj->node->offset + offset); in nv04_instobj_rd32()
144 return nvkm_rd32(imem->subdev.device, 0x700000 + addr); in nv04_instmem_rd32()
Dnv40.c163 vs = hweight8((nvkm_rd32(device, 0x001540) & 0x0000ff00) >> 8); in nv40_instmem_oneinit()
Dnv50.c138 data = nvkm_rd32(device, 0x700000 + addr); in nv50_instobj_rd32()
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/subdev/
Dtimer.h64 if ((nvkm_rd32(d, (addr)) & (mask)) == (data)) \
/linux-4.4.14/drivers/gpu/drm/nouveau/include/nvkm/core/
Ddevice.h215 #define nvkm_rd32(d,a) ioread32_native((d)->pri + (a)) macro
221 u32 _addr = (a), _temp = nvkm_rd32(_device, _addr); \
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/core/
Dsubdev.c105 nvkm_rd32(device, 0x000200); in nvkm_subdev_fini()
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/device/
Duser.c174 *data = nvkm_rd32(udev->device, addr); in nvkm_udevice_rd32()