This source file includes following definitions.
- a2xx_me_init
- a2xx_hw_init
- a2xx_recover
- a2xx_destroy
- a2xx_idle
- a2xx_irq
- a2xx_dump
- a2xx_gpu_state_get
- a2xx_gpu_init
1
2
3
4 #include "a2xx_gpu.h"
5 #include "msm_gem.h"
6 #include "msm_mmu.h"
7
8 extern bool hang_debug;
9
10 static void a2xx_dump(struct msm_gpu *gpu);
11 static bool a2xx_idle(struct msm_gpu *gpu);
12
13 static bool a2xx_me_init(struct msm_gpu *gpu)
14 {
15 struct msm_ringbuffer *ring = gpu->rb[0];
16
17 OUT_PKT3(ring, CP_ME_INIT, 18);
18
19
20 OUT_RING(ring, 0x000003ff);
21
22 OUT_RING(ring, 0x00000000);
23
24 OUT_RING(ring, 0x00000000);
25
26 OUT_RING(ring, REG_A2XX_RB_SURFACE_INFO - 0x2000);
27 OUT_RING(ring, REG_A2XX_PA_SC_WINDOW_OFFSET - 0x2000);
28 OUT_RING(ring, REG_A2XX_VGT_MAX_VTX_INDX - 0x2000);
29 OUT_RING(ring, REG_A2XX_SQ_PROGRAM_CNTL - 0x2000);
30 OUT_RING(ring, REG_A2XX_RB_DEPTHCONTROL - 0x2000);
31 OUT_RING(ring, REG_A2XX_PA_SU_POINT_SIZE - 0x2000);
32 OUT_RING(ring, REG_A2XX_PA_SC_LINE_CNTL - 0x2000);
33 OUT_RING(ring, REG_A2XX_PA_SU_POLY_OFFSET_FRONT_SCALE - 0x2000);
34
35
36
37 OUT_RING(ring, 0x80000180);
38
39 OUT_RING(ring, 0x00000001);
40
41
42 OUT_RING(ring, 0x00000000);
43
44 OUT_RING(ring, 0x00000000);
45
46 OUT_RING(ring, 0x200001f2);
47
48 OUT_RING(ring, 0x00000000);
49
50 OUT_RING(ring, 0x00000000);
51
52
53 OUT_PKT3(ring, CP_SET_PROTECTED_MODE, 1);
54 OUT_RING(ring, 1);
55
56 gpu->funcs->flush(gpu, ring);
57 return a2xx_idle(gpu);
58 }
59
60 static int a2xx_hw_init(struct msm_gpu *gpu)
61 {
62 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
63 dma_addr_t pt_base, tran_error;
64 uint32_t *ptr, len;
65 int i, ret;
66
67 msm_gpummu_params(gpu->aspace->mmu, &pt_base, &tran_error);
68
69 DBG("%s", gpu->name);
70
71
72 gpu_write(gpu, REG_AXXX_CP_ME_CNTL, AXXX_CP_ME_CNTL_HALT);
73
74 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE1, 0xfffffffe);
75 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE2, 0xffffffff);
76
77
78 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0xffffffff);
79 msleep(30);
80 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0x00000000);
81
82 if (adreno_is_a225(adreno_gpu))
83 gpu_write(gpu, REG_A2XX_SQ_FLOW_CONTROL, 0x18000000);
84
85
86 gpu_write(gpu, REG_A2XX_RBBM_CNTL, 0x00004442);
87
88
89 gpu_write(gpu, REG_A2XX_MH_MMU_MPU_BASE, 0x00000000);
90 gpu_write(gpu, REG_A2XX_MH_MMU_MPU_END, 0xfffff000);
91
92 gpu_write(gpu, REG_A2XX_MH_MMU_CONFIG, A2XX_MH_MMU_CONFIG_MMU_ENABLE |
93 A2XX_MH_MMU_CONFIG_RB_W_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
94 A2XX_MH_MMU_CONFIG_CP_W_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
95 A2XX_MH_MMU_CONFIG_CP_R0_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
96 A2XX_MH_MMU_CONFIG_CP_R1_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
97 A2XX_MH_MMU_CONFIG_CP_R2_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
98 A2XX_MH_MMU_CONFIG_CP_R3_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
99 A2XX_MH_MMU_CONFIG_CP_R4_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
100 A2XX_MH_MMU_CONFIG_VGT_R0_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
101 A2XX_MH_MMU_CONFIG_VGT_R1_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
102 A2XX_MH_MMU_CONFIG_TC_R_CLNT_BEHAVIOR(BEH_TRAN_RNG) |
103 A2XX_MH_MMU_CONFIG_PA_W_CLNT_BEHAVIOR(BEH_TRAN_RNG));
104
105
106 gpu_write(gpu, REG_A2XX_MH_MMU_VA_RANGE, SZ_16M |
107 A2XX_MH_MMU_VA_RANGE_NUM_64KB_REGIONS(0xfff));
108
109 gpu_write(gpu, REG_A2XX_MH_MMU_PT_BASE, pt_base);
110 gpu_write(gpu, REG_A2XX_MH_MMU_TRAN_ERROR, tran_error);
111
112 gpu_write(gpu, REG_A2XX_MH_MMU_INVALIDATE,
113 A2XX_MH_MMU_INVALIDATE_INVALIDATE_ALL |
114 A2XX_MH_MMU_INVALIDATE_INVALIDATE_TC);
115
116 gpu_write(gpu, REG_A2XX_MH_ARBITER_CONFIG,
117 A2XX_MH_ARBITER_CONFIG_SAME_PAGE_LIMIT(16) |
118 A2XX_MH_ARBITER_CONFIG_L1_ARB_ENABLE |
119 A2XX_MH_ARBITER_CONFIG_L1_ARB_HOLD_ENABLE |
120 A2XX_MH_ARBITER_CONFIG_PAGE_SIZE(1) |
121 A2XX_MH_ARBITER_CONFIG_TC_REORDER_ENABLE |
122 A2XX_MH_ARBITER_CONFIG_TC_ARB_HOLD_ENABLE |
123 A2XX_MH_ARBITER_CONFIG_IN_FLIGHT_LIMIT_ENABLE |
124 A2XX_MH_ARBITER_CONFIG_IN_FLIGHT_LIMIT(8) |
125 A2XX_MH_ARBITER_CONFIG_CP_CLNT_ENABLE |
126 A2XX_MH_ARBITER_CONFIG_VGT_CLNT_ENABLE |
127 A2XX_MH_ARBITER_CONFIG_TC_CLNT_ENABLE |
128 A2XX_MH_ARBITER_CONFIG_RB_CLNT_ENABLE |
129 A2XX_MH_ARBITER_CONFIG_PA_CLNT_ENABLE);
130 if (!adreno_is_a20x(adreno_gpu))
131 gpu_write(gpu, REG_A2XX_MH_CLNT_INTF_CTRL_CONFIG1, 0x00032f07);
132
133 gpu_write(gpu, REG_A2XX_SQ_VS_PROGRAM, 0x00000000);
134 gpu_write(gpu, REG_A2XX_SQ_PS_PROGRAM, 0x00000000);
135
136 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE1, 0);
137 gpu_write(gpu, REG_A2XX_RBBM_PM_OVERRIDE2, 0);
138
139
140 gpu_write(gpu, REG_A2XX_RBBM_DEBUG, 0x00080000);
141
142 gpu_write(gpu, REG_A2XX_RBBM_INT_CNTL,
143 A2XX_RBBM_INT_CNTL_RDERR_INT_MASK);
144 gpu_write(gpu, REG_AXXX_CP_INT_CNTL,
145 AXXX_CP_INT_CNTL_T0_PACKET_IN_IB_MASK |
146 AXXX_CP_INT_CNTL_OPCODE_ERROR_MASK |
147 AXXX_CP_INT_CNTL_PROTECTED_MODE_ERROR_MASK |
148 AXXX_CP_INT_CNTL_RESERVED_BIT_ERROR_MASK |
149 AXXX_CP_INT_CNTL_IB_ERROR_MASK |
150 AXXX_CP_INT_CNTL_IB1_INT_MASK |
151 AXXX_CP_INT_CNTL_RB_INT_MASK);
152 gpu_write(gpu, REG_A2XX_SQ_INT_CNTL, 0);
153 gpu_write(gpu, REG_A2XX_MH_INTERRUPT_MASK,
154 A2XX_MH_INTERRUPT_MASK_AXI_READ_ERROR |
155 A2XX_MH_INTERRUPT_MASK_AXI_WRITE_ERROR |
156 A2XX_MH_INTERRUPT_MASK_MMU_PAGE_FAULT);
157
158 for (i = 3; i <= 5; i++)
159 if ((SZ_16K << i) == adreno_gpu->gmem)
160 break;
161 gpu_write(gpu, REG_A2XX_RB_EDRAM_INFO, i);
162
163 ret = adreno_hw_init(gpu);
164 if (ret)
165 return ret;
166
167
168
169
170
171
172
173
174 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PM4]->data);
175 len = adreno_gpu->fw[ADRENO_FW_PM4]->size / 4;
176 DBG("loading PM4 ucode version: %x", ptr[1]);
177
178 gpu_write(gpu, REG_AXXX_CP_DEBUG,
179 AXXX_CP_DEBUG_MIU_128BIT_WRITE_ENABLE);
180 gpu_write(gpu, REG_AXXX_CP_ME_RAM_WADDR, 0);
181 for (i = 1; i < len; i++)
182 gpu_write(gpu, REG_AXXX_CP_ME_RAM_DATA, ptr[i]);
183
184
185 ptr = (uint32_t *)(adreno_gpu->fw[ADRENO_FW_PFP]->data);
186 len = adreno_gpu->fw[ADRENO_FW_PFP]->size / 4;
187 DBG("loading PFP ucode version: %x", ptr[5]);
188
189 gpu_write(gpu, REG_A2XX_CP_PFP_UCODE_ADDR, 0);
190 for (i = 1; i < len; i++)
191 gpu_write(gpu, REG_A2XX_CP_PFP_UCODE_DATA, ptr[i]);
192
193 gpu_write(gpu, REG_AXXX_CP_QUEUE_THRESHOLDS, 0x000C0804);
194
195
196 gpu_write(gpu, REG_AXXX_CP_ME_CNTL, 0);
197
198 return a2xx_me_init(gpu) ? 0 : -EINVAL;
199 }
200
201 static void a2xx_recover(struct msm_gpu *gpu)
202 {
203 int i;
204
205 adreno_dump_info(gpu);
206
207 for (i = 0; i < 8; i++) {
208 printk("CP_SCRATCH_REG%d: %u\n", i,
209 gpu_read(gpu, REG_AXXX_CP_SCRATCH_REG0 + i));
210 }
211
212
213 if (hang_debug)
214 a2xx_dump(gpu);
215
216 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 1);
217 gpu_read(gpu, REG_A2XX_RBBM_SOFT_RESET);
218 gpu_write(gpu, REG_A2XX_RBBM_SOFT_RESET, 0);
219 adreno_recover(gpu);
220 }
221
222 static void a2xx_destroy(struct msm_gpu *gpu)
223 {
224 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
225 struct a2xx_gpu *a2xx_gpu = to_a2xx_gpu(adreno_gpu);
226
227 DBG("%s", gpu->name);
228
229 adreno_gpu_cleanup(adreno_gpu);
230
231 kfree(a2xx_gpu);
232 }
233
234 static bool a2xx_idle(struct msm_gpu *gpu)
235 {
236
237 if (!adreno_idle(gpu, gpu->rb[0]))
238 return false;
239
240
241 if (spin_until(!(gpu_read(gpu, REG_A2XX_RBBM_STATUS) &
242 A2XX_RBBM_STATUS_GUI_ACTIVE))) {
243 DRM_ERROR("%s: timeout waiting for GPU to idle!\n", gpu->name);
244
245
246 return false;
247 }
248
249 return true;
250 }
251
252 static irqreturn_t a2xx_irq(struct msm_gpu *gpu)
253 {
254 uint32_t mstatus, status;
255
256 mstatus = gpu_read(gpu, REG_A2XX_MASTER_INT_SIGNAL);
257
258 if (mstatus & A2XX_MASTER_INT_SIGNAL_MH_INT_STAT) {
259 status = gpu_read(gpu, REG_A2XX_MH_INTERRUPT_STATUS);
260
261 dev_warn(gpu->dev->dev, "MH_INT: %08X\n", status);
262 dev_warn(gpu->dev->dev, "MMU_PAGE_FAULT: %08X\n",
263 gpu_read(gpu, REG_A2XX_MH_MMU_PAGE_FAULT));
264
265 gpu_write(gpu, REG_A2XX_MH_INTERRUPT_CLEAR, status);
266 }
267
268 if (mstatus & A2XX_MASTER_INT_SIGNAL_CP_INT_STAT) {
269 status = gpu_read(gpu, REG_AXXX_CP_INT_STATUS);
270
271
272 if (status & ~AXXX_CP_INT_CNTL_RB_INT_MASK)
273 dev_warn(gpu->dev->dev, "CP_INT: %08X\n", status);
274
275 gpu_write(gpu, REG_AXXX_CP_INT_ACK, status);
276 }
277
278 if (mstatus & A2XX_MASTER_INT_SIGNAL_RBBM_INT_STAT) {
279 status = gpu_read(gpu, REG_A2XX_RBBM_INT_STATUS);
280
281 dev_warn(gpu->dev->dev, "RBBM_INT: %08X\n", status);
282
283 gpu_write(gpu, REG_A2XX_RBBM_INT_ACK, status);
284 }
285
286 msm_gpu_retire(gpu);
287
288 return IRQ_HANDLED;
289 }
290
291 static const unsigned int a200_registers[] = {
292 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
293 0x0046, 0x0047, 0x01C0, 0x01C1, 0x01C3, 0x01C8, 0x01D5, 0x01D9,
294 0x01DC, 0x01DD, 0x01EA, 0x01EA, 0x01EE, 0x01F3, 0x01F6, 0x01F7,
295 0x01FC, 0x01FF, 0x0391, 0x0392, 0x039B, 0x039E, 0x03B2, 0x03B5,
296 0x03B7, 0x03B7, 0x03F8, 0x03FB, 0x0440, 0x0440, 0x0443, 0x0444,
297 0x044B, 0x044B, 0x044D, 0x044F, 0x0452, 0x0452, 0x0454, 0x045B,
298 0x047F, 0x047F, 0x0578, 0x0587, 0x05C9, 0x05C9, 0x05D0, 0x05D0,
299 0x0601, 0x0604, 0x0606, 0x0609, 0x060B, 0x060E, 0x0613, 0x0614,
300 0x0A29, 0x0A2B, 0x0A2F, 0x0A31, 0x0A40, 0x0A43, 0x0A45, 0x0A45,
301 0x0A4E, 0x0A4F, 0x0C2C, 0x0C2C, 0x0C30, 0x0C30, 0x0C38, 0x0C3C,
302 0x0C40, 0x0C40, 0x0C44, 0x0C44, 0x0C80, 0x0C86, 0x0C88, 0x0C94,
303 0x0C99, 0x0C9A, 0x0CA4, 0x0CA5, 0x0D00, 0x0D03, 0x0D06, 0x0D06,
304 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1, 0x0DC8, 0x0DD4,
305 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04, 0x0E17, 0x0E1E,
306 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0, 0x0ED4, 0x0ED7,
307 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x0F0C, 0x0F0C, 0x0F0E, 0x0F12,
308 0x0F26, 0x0F2A, 0x0F2C, 0x0F2C, 0x2000, 0x2002, 0x2006, 0x200F,
309 0x2080, 0x2082, 0x2100, 0x2109, 0x210C, 0x2114, 0x2180, 0x2184,
310 0x21F5, 0x21F7, 0x2200, 0x2208, 0x2280, 0x2283, 0x2293, 0x2294,
311 0x2300, 0x2308, 0x2312, 0x2312, 0x2316, 0x231D, 0x2324, 0x2326,
312 0x2380, 0x2383, 0x2400, 0x2402, 0x2406, 0x240F, 0x2480, 0x2482,
313 0x2500, 0x2509, 0x250C, 0x2514, 0x2580, 0x2584, 0x25F5, 0x25F7,
314 0x2600, 0x2608, 0x2680, 0x2683, 0x2693, 0x2694, 0x2700, 0x2708,
315 0x2712, 0x2712, 0x2716, 0x271D, 0x2724, 0x2726, 0x2780, 0x2783,
316 0x4000, 0x4003, 0x4800, 0x4805, 0x4900, 0x4900, 0x4908, 0x4908,
317 ~0
318 };
319
320 static const unsigned int a220_registers[] = {
321 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
322 0x0046, 0x0047, 0x01C0, 0x01C1, 0x01C3, 0x01C8, 0x01D5, 0x01D9,
323 0x01DC, 0x01DD, 0x01EA, 0x01EA, 0x01EE, 0x01F3, 0x01F6, 0x01F7,
324 0x01FC, 0x01FF, 0x0391, 0x0392, 0x039B, 0x039E, 0x03B2, 0x03B5,
325 0x03B7, 0x03B7, 0x03F8, 0x03FB, 0x0440, 0x0440, 0x0443, 0x0444,
326 0x044B, 0x044B, 0x044D, 0x044F, 0x0452, 0x0452, 0x0454, 0x045B,
327 0x047F, 0x047F, 0x0578, 0x0587, 0x05C9, 0x05C9, 0x05D0, 0x05D0,
328 0x0601, 0x0604, 0x0606, 0x0609, 0x060B, 0x060E, 0x0613, 0x0614,
329 0x0A29, 0x0A2B, 0x0A2F, 0x0A31, 0x0A40, 0x0A40, 0x0A42, 0x0A43,
330 0x0A45, 0x0A45, 0x0A4E, 0x0A4F, 0x0C30, 0x0C30, 0x0C38, 0x0C39,
331 0x0C3C, 0x0C3C, 0x0C80, 0x0C81, 0x0C88, 0x0C93, 0x0D00, 0x0D03,
332 0x0D05, 0x0D06, 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1,
333 0x0DC8, 0x0DD4, 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04,
334 0x0E17, 0x0E1E, 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0,
335 0x0ED4, 0x0ED7, 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x2000, 0x2002,
336 0x2006, 0x200F, 0x2080, 0x2082, 0x2100, 0x2102, 0x2104, 0x2109,
337 0x210C, 0x2114, 0x2180, 0x2184, 0x21F5, 0x21F7, 0x2200, 0x2202,
338 0x2204, 0x2204, 0x2208, 0x2208, 0x2280, 0x2282, 0x2294, 0x2294,
339 0x2300, 0x2308, 0x2309, 0x230A, 0x2312, 0x2312, 0x2316, 0x2316,
340 0x2318, 0x231D, 0x2324, 0x2326, 0x2380, 0x2383, 0x2400, 0x2402,
341 0x2406, 0x240F, 0x2480, 0x2482, 0x2500, 0x2502, 0x2504, 0x2509,
342 0x250C, 0x2514, 0x2580, 0x2584, 0x25F5, 0x25F7, 0x2600, 0x2602,
343 0x2604, 0x2606, 0x2608, 0x2608, 0x2680, 0x2682, 0x2694, 0x2694,
344 0x2700, 0x2708, 0x2712, 0x2712, 0x2716, 0x2716, 0x2718, 0x271D,
345 0x2724, 0x2726, 0x2780, 0x2783, 0x4000, 0x4003, 0x4800, 0x4805,
346 0x4900, 0x4900, 0x4908, 0x4908,
347 ~0
348 };
349
350 static const unsigned int a225_registers[] = {
351 0x0000, 0x0002, 0x0004, 0x000B, 0x003B, 0x003D, 0x0040, 0x0044,
352 0x0046, 0x0047, 0x013C, 0x013C, 0x0140, 0x014F, 0x01C0, 0x01C1,
353 0x01C3, 0x01C8, 0x01D5, 0x01D9, 0x01DC, 0x01DD, 0x01EA, 0x01EA,
354 0x01EE, 0x01F3, 0x01F6, 0x01F7, 0x01FC, 0x01FF, 0x0391, 0x0392,
355 0x039B, 0x039E, 0x03B2, 0x03B5, 0x03B7, 0x03B7, 0x03F8, 0x03FB,
356 0x0440, 0x0440, 0x0443, 0x0444, 0x044B, 0x044B, 0x044D, 0x044F,
357 0x0452, 0x0452, 0x0454, 0x045B, 0x047F, 0x047F, 0x0578, 0x0587,
358 0x05C9, 0x05C9, 0x05D0, 0x05D0, 0x0601, 0x0604, 0x0606, 0x0609,
359 0x060B, 0x060E, 0x0613, 0x0614, 0x0A29, 0x0A2B, 0x0A2F, 0x0A31,
360 0x0A40, 0x0A40, 0x0A42, 0x0A43, 0x0A45, 0x0A45, 0x0A4E, 0x0A4F,
361 0x0C01, 0x0C1D, 0x0C30, 0x0C30, 0x0C38, 0x0C39, 0x0C3C, 0x0C3C,
362 0x0C80, 0x0C81, 0x0C88, 0x0C93, 0x0D00, 0x0D03, 0x0D05, 0x0D06,
363 0x0D08, 0x0D0B, 0x0D34, 0x0D35, 0x0DAE, 0x0DC1, 0x0DC8, 0x0DD4,
364 0x0DD8, 0x0DD9, 0x0E00, 0x0E00, 0x0E02, 0x0E04, 0x0E17, 0x0E1E,
365 0x0EC0, 0x0EC9, 0x0ECB, 0x0ECC, 0x0ED0, 0x0ED0, 0x0ED4, 0x0ED7,
366 0x0EE0, 0x0EE2, 0x0F01, 0x0F02, 0x2000, 0x200F, 0x2080, 0x2082,
367 0x2100, 0x2109, 0x210C, 0x2114, 0x2180, 0x2184, 0x21F5, 0x21F7,
368 0x2200, 0x2202, 0x2204, 0x2206, 0x2208, 0x2210, 0x2220, 0x2222,
369 0x2280, 0x2282, 0x2294, 0x2294, 0x2297, 0x2297, 0x2300, 0x230A,
370 0x2312, 0x2312, 0x2315, 0x2316, 0x2318, 0x231D, 0x2324, 0x2326,
371 0x2340, 0x2357, 0x2360, 0x2360, 0x2380, 0x2383, 0x2400, 0x240F,
372 0x2480, 0x2482, 0x2500, 0x2509, 0x250C, 0x2514, 0x2580, 0x2584,
373 0x25F5, 0x25F7, 0x2600, 0x2602, 0x2604, 0x2606, 0x2608, 0x2610,
374 0x2620, 0x2622, 0x2680, 0x2682, 0x2694, 0x2694, 0x2697, 0x2697,
375 0x2700, 0x270A, 0x2712, 0x2712, 0x2715, 0x2716, 0x2718, 0x271D,
376 0x2724, 0x2726, 0x2740, 0x2757, 0x2760, 0x2760, 0x2780, 0x2783,
377 0x4000, 0x4003, 0x4800, 0x4806, 0x4808, 0x4808, 0x4900, 0x4900,
378 0x4908, 0x4908,
379 ~0
380 };
381
382
383 static void a2xx_dump(struct msm_gpu *gpu)
384 {
385 printk("status: %08x\n",
386 gpu_read(gpu, REG_A2XX_RBBM_STATUS));
387 adreno_dump(gpu);
388 }
389
390 static struct msm_gpu_state *a2xx_gpu_state_get(struct msm_gpu *gpu)
391 {
392 struct msm_gpu_state *state = kzalloc(sizeof(*state), GFP_KERNEL);
393
394 if (!state)
395 return ERR_PTR(-ENOMEM);
396
397 adreno_gpu_state_get(gpu, state);
398
399 state->rbbm_status = gpu_read(gpu, REG_A2XX_RBBM_STATUS);
400
401 return state;
402 }
403
404
405 static const unsigned int a2xx_register_offsets[REG_ADRENO_REGISTER_MAX] = {
406 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_BASE, REG_AXXX_CP_RB_BASE),
407 REG_ADRENO_SKIP(REG_ADRENO_CP_RB_BASE_HI),
408 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_RPTR_ADDR, REG_AXXX_CP_RB_RPTR_ADDR),
409 REG_ADRENO_SKIP(REG_ADRENO_CP_RB_RPTR_ADDR_HI),
410 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_RPTR, REG_AXXX_CP_RB_RPTR),
411 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_WPTR, REG_AXXX_CP_RB_WPTR),
412 REG_ADRENO_DEFINE(REG_ADRENO_CP_RB_CNTL, REG_AXXX_CP_RB_CNTL),
413 };
414
415 static const struct adreno_gpu_funcs funcs = {
416 .base = {
417 .get_param = adreno_get_param,
418 .hw_init = a2xx_hw_init,
419 .pm_suspend = msm_gpu_pm_suspend,
420 .pm_resume = msm_gpu_pm_resume,
421 .recover = a2xx_recover,
422 .submit = adreno_submit,
423 .flush = adreno_flush,
424 .active_ring = adreno_active_ring,
425 .irq = a2xx_irq,
426 .destroy = a2xx_destroy,
427 #if defined(CONFIG_DEBUG_FS) || defined(CONFIG_DEV_COREDUMP)
428 .show = adreno_show,
429 #endif
430 .gpu_state_get = a2xx_gpu_state_get,
431 .gpu_state_put = adreno_gpu_state_put,
432 },
433 };
434
435 static const struct msm_gpu_perfcntr perfcntrs[] = {
436
437 };
438
439 struct msm_gpu *a2xx_gpu_init(struct drm_device *dev)
440 {
441 struct a2xx_gpu *a2xx_gpu = NULL;
442 struct adreno_gpu *adreno_gpu;
443 struct msm_gpu *gpu;
444 struct msm_drm_private *priv = dev->dev_private;
445 struct platform_device *pdev = priv->gpu_pdev;
446 int ret;
447
448 if (!pdev) {
449 dev_err(dev->dev, "no a2xx device\n");
450 ret = -ENXIO;
451 goto fail;
452 }
453
454 a2xx_gpu = kzalloc(sizeof(*a2xx_gpu), GFP_KERNEL);
455 if (!a2xx_gpu) {
456 ret = -ENOMEM;
457 goto fail;
458 }
459
460 adreno_gpu = &a2xx_gpu->base;
461 gpu = &adreno_gpu->base;
462
463 gpu->perfcntrs = perfcntrs;
464 gpu->num_perfcntrs = ARRAY_SIZE(perfcntrs);
465
466 if (adreno_is_a20x(adreno_gpu))
467 adreno_gpu->registers = a200_registers;
468 else if (adreno_is_a225(adreno_gpu))
469 adreno_gpu->registers = a225_registers;
470 else
471 adreno_gpu->registers = a220_registers;
472
473 adreno_gpu->reg_offsets = a2xx_register_offsets;
474
475 ret = adreno_gpu_init(dev, pdev, adreno_gpu, &funcs, 1);
476 if (ret)
477 goto fail;
478
479 if (!gpu->aspace) {
480 dev_err(dev->dev, "No memory protection without MMU\n");
481 ret = -ENXIO;
482 goto fail;
483 }
484
485 return gpu;
486
487 fail:
488 if (a2xx_gpu)
489 a2xx_destroy(&a2xx_gpu->base.base);
490
491 return ERR_PTR(ret);
492 }