This source file includes following definitions.
- cfg_lf
- cfg_qp
- cfg_parts
- cfg_tap
- cfg_ref
- cfg_buffers
- rk3399_vpu_vp8_dec_run
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 #include <media/v4l2-mem2mem.h>
16 #include <media/vp8-ctrls.h>
17
18 #include "hantro_hw.h"
19 #include "hantro.h"
20 #include "hantro_g1_regs.h"
21
22 #define VDPU_REG_DEC_CTRL0 0x0c8
23 #define VDPU_REG_STREAM_LEN 0x0cc
24 #define VDPU_REG_DEC_FORMAT 0x0d4
25 #define VDPU_REG_DEC_CTRL0_DEC_MODE(x) (((x) & 0xf) << 0)
26 #define VDPU_REG_DATA_ENDIAN 0x0d8
27 #define VDPU_REG_CONFIG_DEC_STRENDIAN_E BIT(5)
28 #define VDPU_REG_CONFIG_DEC_STRSWAP32_E BIT(4)
29 #define VDPU_REG_CONFIG_DEC_OUTSWAP32_E BIT(3)
30 #define VDPU_REG_CONFIG_DEC_INSWAP32_E BIT(2)
31 #define VDPU_REG_CONFIG_DEC_OUT_ENDIAN BIT(1)
32 #define VDPU_REG_CONFIG_DEC_IN_ENDIAN BIT(0)
33 #define VDPU_REG_AXI_CTRL 0x0e0
34 #define VDPU_REG_CONFIG_DEC_MAX_BURST(x) (((x) & 0x1f) << 16)
35 #define VDPU_REG_EN_FLAGS 0x0e4
36 #define VDPU_REG_DEC_CTRL0_PIC_INTER_E BIT(14)
37 #define VDPU_REG_CONFIG_DEC_TIMEOUT_E BIT(5)
38 #define VDPU_REG_CONFIG_DEC_CLK_GATE_E BIT(4)
39 #define VDPU_REG_PRED_FLT 0x0ec
40 #define VDPU_REG_ADDR_QTABLE 0x0f4
41 #define VDPU_REG_ADDR_DST 0x0fc
42 #define VDPU_REG_ADDR_STR 0x100
43 #define VDPU_REG_VP8_PIC_MB_SIZE 0x1e0
44 #define VDPU_REG_VP8_DCT_START_BIT 0x1e4
45 #define VDPU_REG_DEC_CTRL4_VC1_HEIGHT_EXT BIT(13)
46 #define VDPU_REG_DEC_CTRL4_BILIN_MC_E BIT(12)
47 #define VDPU_REG_VP8_CTRL0 0x1e8
48 #define VDPU_REG_VP8_DATA_VAL 0x1f0
49 #define VDPU_REG_PRED_FLT7 0x1f4
50 #define VDPU_REG_PRED_FLT8 0x1f8
51 #define VDPU_REG_PRED_FLT9 0x1fc
52 #define VDPU_REG_PRED_FLT10 0x200
53 #define VDPU_REG_FILTER_LEVEL 0x204
54 #define VDPU_REG_VP8_QUANTER0 0x208
55 #define VDPU_REG_VP8_ADDR_REF0 0x20c
56 #define VDPU_REG_FILTER_MB_ADJ 0x210
57 #define VDPU_REG_REF_PIC_FILT_TYPE_E BIT(31)
58 #define VDPU_REG_REF_PIC_FILT_SHARPNESS(x) (((x) & 0x7) << 28)
59 #define VDPU_REG_FILTER_REF_ADJ 0x214
60 #define VDPU_REG_VP8_ADDR_REF2_5(i) (0x218 + ((i) * 0x4))
61 #define VDPU_REG_VP8_GREF_SIGN_BIAS BIT(0)
62 #define VDPU_REG_VP8_AREF_SIGN_BIAS BIT(0)
63 #define VDPU_REG_VP8_DCT_BASE(i) \
64 (0x230 + ((((i) < 5) ? (i) : ((i) + 1)) * 0x4))
65 #define VDPU_REG_VP8_ADDR_CTRL_PART 0x244
66 #define VDPU_REG_VP8_SEGMENT_VAL 0x254
67 #define VDPU_REG_FWD_PIC1_SEGMENT_BASE(x) ((x) << 0)
68 #define VDPU_REG_FWD_PIC1_SEGMENT_UPD_E BIT(1)
69 #define VDPU_REG_FWD_PIC1_SEGMENT_E BIT(0)
70 #define VDPU_REG_VP8_DCT_START_BIT2 0x258
71 #define VDPU_REG_VP8_QUANTER1 0x25c
72 #define VDPU_REG_VP8_QUANTER2 0x260
73 #define VDPU_REG_PRED_FLT1 0x264
74 #define VDPU_REG_PRED_FLT2 0x268
75 #define VDPU_REG_PRED_FLT3 0x26c
76 #define VDPU_REG_PRED_FLT4 0x270
77 #define VDPU_REG_PRED_FLT5 0x274
78 #define VDPU_REG_PRED_FLT6 0x278
79
80 static const struct hantro_reg vp8_dec_dct_base[8] = {
81 { VDPU_REG_ADDR_STR, 0, 0xffffffff },
82 { VDPU_REG_VP8_DCT_BASE(0), 0, 0xffffffff },
83 { VDPU_REG_VP8_DCT_BASE(1), 0, 0xffffffff },
84 { VDPU_REG_VP8_DCT_BASE(2), 0, 0xffffffff },
85 { VDPU_REG_VP8_DCT_BASE(3), 0, 0xffffffff },
86 { VDPU_REG_VP8_DCT_BASE(4), 0, 0xffffffff },
87 { VDPU_REG_VP8_DCT_BASE(5), 0, 0xffffffff },
88 { VDPU_REG_VP8_DCT_BASE(6), 0, 0xffffffff },
89 };
90
91 static const struct hantro_reg vp8_dec_lf_level[4] = {
92 { VDPU_REG_FILTER_LEVEL, 18, 0x3f },
93 { VDPU_REG_FILTER_LEVEL, 12, 0x3f },
94 { VDPU_REG_FILTER_LEVEL, 6, 0x3f },
95 { VDPU_REG_FILTER_LEVEL, 0, 0x3f },
96 };
97
98 static const struct hantro_reg vp8_dec_mb_adj[4] = {
99 { VDPU_REG_FILTER_MB_ADJ, 21, 0x7f },
100 { VDPU_REG_FILTER_MB_ADJ, 14, 0x7f },
101 { VDPU_REG_FILTER_MB_ADJ, 7, 0x7f },
102 { VDPU_REG_FILTER_MB_ADJ, 0, 0x7f },
103 };
104
105 static const struct hantro_reg vp8_dec_ref_adj[4] = {
106 { VDPU_REG_FILTER_REF_ADJ, 21, 0x7f },
107 { VDPU_REG_FILTER_REF_ADJ, 14, 0x7f },
108 { VDPU_REG_FILTER_REF_ADJ, 7, 0x7f },
109 { VDPU_REG_FILTER_REF_ADJ, 0, 0x7f },
110 };
111
112 static const struct hantro_reg vp8_dec_quant[4] = {
113 { VDPU_REG_VP8_QUANTER0, 11, 0x7ff },
114 { VDPU_REG_VP8_QUANTER0, 0, 0x7ff },
115 { VDPU_REG_VP8_QUANTER1, 11, 0x7ff },
116 { VDPU_REG_VP8_QUANTER1, 0, 0x7ff },
117 };
118
119 static const struct hantro_reg vp8_dec_quant_delta[5] = {
120 { VDPU_REG_VP8_QUANTER0, 27, 0x1f },
121 { VDPU_REG_VP8_QUANTER0, 22, 0x1f },
122 { VDPU_REG_VP8_QUANTER1, 27, 0x1f },
123 { VDPU_REG_VP8_QUANTER1, 22, 0x1f },
124 { VDPU_REG_VP8_QUANTER2, 27, 0x1f },
125 };
126
127 static const struct hantro_reg vp8_dec_dct_start_bits[8] = {
128 { VDPU_REG_VP8_CTRL0, 26, 0x3f },
129 { VDPU_REG_VP8_DCT_START_BIT, 26, 0x3f },
130 { VDPU_REG_VP8_DCT_START_BIT, 20, 0x3f },
131 { VDPU_REG_VP8_DCT_START_BIT2, 24, 0x3f },
132 { VDPU_REG_VP8_DCT_START_BIT2, 18, 0x3f },
133 { VDPU_REG_VP8_DCT_START_BIT2, 12, 0x3f },
134 { VDPU_REG_VP8_DCT_START_BIT2, 6, 0x3f },
135 { VDPU_REG_VP8_DCT_START_BIT2, 0, 0x3f },
136 };
137
138 static const struct hantro_reg vp8_dec_pred_bc_tap[8][6] = {
139 {
140 { 0, 0, 0},
141 { VDPU_REG_PRED_FLT, 22, 0x3ff },
142 { VDPU_REG_PRED_FLT, 12, 0x3ff },
143 { VDPU_REG_PRED_FLT, 2, 0x3ff },
144 { VDPU_REG_PRED_FLT1, 22, 0x3ff },
145 { 0, 0, 0},
146 }, {
147 { 0, 0, 0},
148 { VDPU_REG_PRED_FLT1, 12, 0x3ff },
149 { VDPU_REG_PRED_FLT1, 2, 0x3ff },
150 { VDPU_REG_PRED_FLT2, 22, 0x3ff },
151 { VDPU_REG_PRED_FLT2, 12, 0x3ff },
152 { 0, 0, 0},
153 }, {
154 { VDPU_REG_PRED_FLT10, 10, 0x3 },
155 { VDPU_REG_PRED_FLT2, 2, 0x3ff },
156 { VDPU_REG_PRED_FLT3, 22, 0x3ff },
157 { VDPU_REG_PRED_FLT3, 12, 0x3ff },
158 { VDPU_REG_PRED_FLT3, 2, 0x3ff },
159 { VDPU_REG_PRED_FLT10, 8, 0x3},
160 }, {
161 { 0, 0, 0},
162 { VDPU_REG_PRED_FLT4, 22, 0x3ff },
163 { VDPU_REG_PRED_FLT4, 12, 0x3ff },
164 { VDPU_REG_PRED_FLT4, 2, 0x3ff },
165 { VDPU_REG_PRED_FLT5, 22, 0x3ff },
166 { 0, 0, 0},
167 }, {
168 { VDPU_REG_PRED_FLT10, 6, 0x3 },
169 { VDPU_REG_PRED_FLT5, 12, 0x3ff },
170 { VDPU_REG_PRED_FLT5, 2, 0x3ff },
171 { VDPU_REG_PRED_FLT6, 22, 0x3ff },
172 { VDPU_REG_PRED_FLT6, 12, 0x3ff },
173 { VDPU_REG_PRED_FLT10, 4, 0x3 },
174 }, {
175 { 0, 0, 0},
176 { VDPU_REG_PRED_FLT6, 2, 0x3ff },
177 { VDPU_REG_PRED_FLT7, 22, 0x3ff },
178 { VDPU_REG_PRED_FLT7, 12, 0x3ff },
179 { VDPU_REG_PRED_FLT7, 2, 0x3ff },
180 { 0, 0, 0},
181 }, {
182 { VDPU_REG_PRED_FLT10, 2, 0x3 },
183 { VDPU_REG_PRED_FLT8, 22, 0x3ff },
184 { VDPU_REG_PRED_FLT8, 12, 0x3ff },
185 { VDPU_REG_PRED_FLT8, 2, 0x3ff },
186 { VDPU_REG_PRED_FLT9, 22, 0x3ff },
187 { VDPU_REG_PRED_FLT10, 0, 0x3 },
188 }, {
189 { 0, 0, 0},
190 { VDPU_REG_PRED_FLT9, 12, 0x3ff },
191 { VDPU_REG_PRED_FLT9, 2, 0x3ff },
192 { VDPU_REG_PRED_FLT10, 22, 0x3ff },
193 { VDPU_REG_PRED_FLT10, 12, 0x3ff },
194 { 0, 0, 0},
195 },
196 };
197
198 static const struct hantro_reg vp8_dec_mb_start_bit = {
199 .base = VDPU_REG_VP8_CTRL0,
200 .shift = 18,
201 .mask = 0x3f
202 };
203
204 static const struct hantro_reg vp8_dec_mb_aligned_data_len = {
205 .base = VDPU_REG_VP8_DATA_VAL,
206 .shift = 0,
207 .mask = 0x3fffff
208 };
209
210 static const struct hantro_reg vp8_dec_num_dct_partitions = {
211 .base = VDPU_REG_VP8_DATA_VAL,
212 .shift = 24,
213 .mask = 0xf
214 };
215
216 static const struct hantro_reg vp8_dec_stream_len = {
217 .base = VDPU_REG_STREAM_LEN,
218 .shift = 0,
219 .mask = 0xffffff
220 };
221
222 static const struct hantro_reg vp8_dec_mb_width = {
223 .base = VDPU_REG_VP8_PIC_MB_SIZE,
224 .shift = 23,
225 .mask = 0x1ff
226 };
227
228 static const struct hantro_reg vp8_dec_mb_height = {
229 .base = VDPU_REG_VP8_PIC_MB_SIZE,
230 .shift = 11,
231 .mask = 0xff
232 };
233
234 static const struct hantro_reg vp8_dec_mb_width_ext = {
235 .base = VDPU_REG_VP8_PIC_MB_SIZE,
236 .shift = 3,
237 .mask = 0x7
238 };
239
240 static const struct hantro_reg vp8_dec_mb_height_ext = {
241 .base = VDPU_REG_VP8_PIC_MB_SIZE,
242 .shift = 0,
243 .mask = 0x7
244 };
245
246 static const struct hantro_reg vp8_dec_bool_range = {
247 .base = VDPU_REG_VP8_CTRL0,
248 .shift = 0,
249 .mask = 0xff
250 };
251
252 static const struct hantro_reg vp8_dec_bool_value = {
253 .base = VDPU_REG_VP8_CTRL0,
254 .shift = 8,
255 .mask = 0xff
256 };
257
258 static const struct hantro_reg vp8_dec_filter_disable = {
259 .base = VDPU_REG_DEC_CTRL0,
260 .shift = 8,
261 .mask = 1
262 };
263
264 static const struct hantro_reg vp8_dec_skip_mode = {
265 .base = VDPU_REG_DEC_CTRL0,
266 .shift = 9,
267 .mask = 1
268 };
269
270 static const struct hantro_reg vp8_dec_start_dec = {
271 .base = VDPU_REG_EN_FLAGS,
272 .shift = 0,
273 .mask = 1
274 };
275
276 static void cfg_lf(struct hantro_ctx *ctx,
277 const struct v4l2_ctrl_vp8_frame_header *hdr)
278 {
279 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
280 const struct v4l2_vp8_loopfilter_header *lf = &hdr->lf_header;
281 struct hantro_dev *vpu = ctx->dev;
282 unsigned int i;
283 u32 reg;
284
285 if (!(seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED)) {
286 hantro_reg_write(vpu, &vp8_dec_lf_level[0], lf->level);
287 } else if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE) {
288 for (i = 0; i < 4; i++) {
289 u32 lf_level = clamp(lf->level + seg->lf_update[i],
290 0, 63);
291
292 hantro_reg_write(vpu, &vp8_dec_lf_level[i], lf_level);
293 }
294 } else {
295 for (i = 0; i < 4; i++)
296 hantro_reg_write(vpu, &vp8_dec_lf_level[i],
297 seg->lf_update[i]);
298 }
299
300 reg = VDPU_REG_REF_PIC_FILT_SHARPNESS(lf->sharpness_level);
301 if (lf->flags & V4L2_VP8_LF_FILTER_TYPE_SIMPLE)
302 reg |= VDPU_REG_REF_PIC_FILT_TYPE_E;
303 vdpu_write_relaxed(vpu, reg, VDPU_REG_FILTER_MB_ADJ);
304
305 if (lf->flags & V4L2_VP8_LF_HEADER_ADJ_ENABLE) {
306 for (i = 0; i < 4; i++) {
307 hantro_reg_write(vpu, &vp8_dec_mb_adj[i],
308 lf->mb_mode_delta[i]);
309 hantro_reg_write(vpu, &vp8_dec_ref_adj[i],
310 lf->ref_frm_delta[i]);
311 }
312 }
313 }
314
315 static void cfg_qp(struct hantro_ctx *ctx,
316 const struct v4l2_ctrl_vp8_frame_header *hdr)
317 {
318 const struct v4l2_vp8_quantization_header *q = &hdr->quant_header;
319 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
320 struct hantro_dev *vpu = ctx->dev;
321 unsigned int i;
322
323 if (!(seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED)) {
324 hantro_reg_write(vpu, &vp8_dec_quant[0], q->y_ac_qi);
325 } else if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE) {
326 for (i = 0; i < 4; i++) {
327 u32 quant = clamp(q->y_ac_qi + seg->quant_update[i],
328 0, 127);
329
330 hantro_reg_write(vpu, &vp8_dec_quant[i], quant);
331 }
332 } else {
333 for (i = 0; i < 4; i++)
334 hantro_reg_write(vpu, &vp8_dec_quant[i],
335 seg->quant_update[i]);
336 }
337
338 hantro_reg_write(vpu, &vp8_dec_quant_delta[0], q->y_dc_delta);
339 hantro_reg_write(vpu, &vp8_dec_quant_delta[1], q->y2_dc_delta);
340 hantro_reg_write(vpu, &vp8_dec_quant_delta[2], q->y2_ac_delta);
341 hantro_reg_write(vpu, &vp8_dec_quant_delta[3], q->uv_dc_delta);
342 hantro_reg_write(vpu, &vp8_dec_quant_delta[4], q->uv_ac_delta);
343 }
344
345 static void cfg_parts(struct hantro_ctx *ctx,
346 const struct v4l2_ctrl_vp8_frame_header *hdr)
347 {
348 struct hantro_dev *vpu = ctx->dev;
349 struct vb2_v4l2_buffer *vb2_src;
350 u32 first_part_offset = VP8_FRAME_IS_KEY_FRAME(hdr) ? 10 : 3;
351 u32 mb_size, mb_offset_bytes, mb_offset_bits, mb_start_bits;
352 u32 dct_size_part_size, dct_part_offset;
353 dma_addr_t src_dma;
354 u32 dct_part_total_len = 0;
355 u32 count = 0;
356 unsigned int i;
357
358 vb2_src = hantro_get_src_buf(ctx);
359 src_dma = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
360
361
362
363
364
365
366
367
368
369
370
371
372 mb_offset_bits = first_part_offset * 8 +
373 hdr->first_part_header_bits + 8;
374 mb_offset_bytes = mb_offset_bits / 8;
375 mb_start_bits = mb_offset_bits -
376 (mb_offset_bytes & (~DEC_8190_ALIGN_MASK)) * 8;
377 mb_size = hdr->first_part_size -
378 (mb_offset_bytes - first_part_offset) +
379 (mb_offset_bytes & DEC_8190_ALIGN_MASK);
380
381
382 vdpu_write_relaxed(vpu, (mb_offset_bytes & (~DEC_8190_ALIGN_MASK)) +
383 src_dma, VDPU_REG_VP8_ADDR_CTRL_PART);
384 hantro_reg_write(vpu, &vp8_dec_mb_start_bit, mb_start_bits);
385 hantro_reg_write(vpu, &vp8_dec_mb_aligned_data_len, mb_size);
386
387
388
389
390
391
392
393
394
395 dct_size_part_size = (hdr->num_dct_parts - 1) * 3;
396 dct_part_offset = first_part_offset + hdr->first_part_size;
397 for (i = 0; i < hdr->num_dct_parts; i++)
398 dct_part_total_len += hdr->dct_part_sizes[i];
399 dct_part_total_len += dct_size_part_size;
400 dct_part_total_len += (dct_part_offset & DEC_8190_ALIGN_MASK);
401
402
403 hantro_reg_write(vpu, &vp8_dec_num_dct_partitions,
404 hdr->num_dct_parts - 1);
405
406
407 hantro_reg_write(vpu, &vp8_dec_stream_len, dct_part_total_len);
408
409
410 for (i = 0; i < hdr->num_dct_parts; i++) {
411 u32 byte_offset = dct_part_offset + dct_size_part_size + count;
412 u32 base_addr = byte_offset + src_dma;
413
414 hantro_reg_write(vpu, &vp8_dec_dct_base[i],
415 base_addr & (~DEC_8190_ALIGN_MASK));
416
417 hantro_reg_write(vpu, &vp8_dec_dct_start_bits[i],
418 (byte_offset & DEC_8190_ALIGN_MASK) * 8);
419
420 count += hdr->dct_part_sizes[i];
421 }
422 }
423
424
425
426
427
428 static void cfg_tap(struct hantro_ctx *ctx,
429 const struct v4l2_ctrl_vp8_frame_header *hdr)
430 {
431 struct hantro_dev *vpu = ctx->dev;
432 int i, j;
433
434 if ((hdr->version & 0x03) != 0)
435 return;
436
437 for (i = 0; i < 8; i++) {
438 for (j = 0; j < 6; j++) {
439 if (vp8_dec_pred_bc_tap[i][j].base != 0)
440 hantro_reg_write(vpu,
441 &vp8_dec_pred_bc_tap[i][j],
442 hantro_vp8_dec_mc_filter[i][j]);
443 }
444 }
445 }
446
447 static void cfg_ref(struct hantro_ctx *ctx,
448 const struct v4l2_ctrl_vp8_frame_header *hdr)
449 {
450 struct hantro_dev *vpu = ctx->dev;
451 struct vb2_v4l2_buffer *vb2_dst;
452 struct vb2_queue *cap_q;
453 dma_addr_t ref;
454
455 cap_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
456 vb2_dst = hantro_get_dst_buf(ctx);
457
458 ref = hantro_get_ref(cap_q, hdr->last_frame_ts);
459 if (!ref)
460 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
461 vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF0);
462
463 ref = hantro_get_ref(cap_q, hdr->golden_frame_ts);
464 WARN_ON(!ref && hdr->golden_frame_ts);
465 if (!ref)
466 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
467 if (hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN)
468 ref |= VDPU_REG_VP8_GREF_SIGN_BIAS;
469 vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(2));
470
471 ref = hantro_get_ref(cap_q, hdr->alt_frame_ts);
472 WARN_ON(!ref && hdr->alt_frame_ts);
473 if (!ref)
474 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
475 if (hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT)
476 ref |= VDPU_REG_VP8_AREF_SIGN_BIAS;
477 vdpu_write_relaxed(vpu, ref, VDPU_REG_VP8_ADDR_REF2_5(3));
478 }
479
480 static void cfg_buffers(struct hantro_ctx *ctx,
481 const struct v4l2_ctrl_vp8_frame_header *hdr)
482 {
483 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
484 struct hantro_dev *vpu = ctx->dev;
485 struct vb2_v4l2_buffer *vb2_dst;
486 dma_addr_t dst_dma;
487 u32 reg;
488
489 vb2_dst = hantro_get_dst_buf(ctx);
490
491
492 vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
493 VDPU_REG_ADDR_QTABLE);
494
495
496 reg = VDPU_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
497 if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED) {
498 reg |= VDPU_REG_FWD_PIC1_SEGMENT_E;
499 if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP)
500 reg |= VDPU_REG_FWD_PIC1_SEGMENT_UPD_E;
501 }
502 vdpu_write_relaxed(vpu, reg, VDPU_REG_VP8_SEGMENT_VAL);
503
504
505 dst_dma = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
506 vdpu_write_relaxed(vpu, dst_dma, VDPU_REG_ADDR_DST);
507 }
508
509 void rk3399_vpu_vp8_dec_run(struct hantro_ctx *ctx)
510 {
511 const struct v4l2_ctrl_vp8_frame_header *hdr;
512 struct hantro_dev *vpu = ctx->dev;
513 size_t height = ctx->dst_fmt.height;
514 size_t width = ctx->dst_fmt.width;
515 u32 mb_width, mb_height;
516 u32 reg;
517
518 hantro_prepare_run(ctx);
519
520 hdr = hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER);
521 if (WARN_ON(!hdr))
522 return;
523
524
525 if (VP8_FRAME_IS_KEY_FRAME(hdr) && ctx->vp8_dec.segment_map.cpu)
526 memset(ctx->vp8_dec.segment_map.cpu, 0,
527 ctx->vp8_dec.segment_map.size);
528
529 hantro_vp8_prob_update(ctx, hdr);
530
531
532
533
534
535
536
537
538 ctx->codec_ops->reset(ctx);
539
540 reg = VDPU_REG_CONFIG_DEC_TIMEOUT_E
541 | VDPU_REG_CONFIG_DEC_CLK_GATE_E;
542 if (!VP8_FRAME_IS_KEY_FRAME(hdr))
543 reg |= VDPU_REG_DEC_CTRL0_PIC_INTER_E;
544 vdpu_write_relaxed(vpu, reg, VDPU_REG_EN_FLAGS);
545
546 reg = VDPU_REG_CONFIG_DEC_STRENDIAN_E
547 | VDPU_REG_CONFIG_DEC_INSWAP32_E
548 | VDPU_REG_CONFIG_DEC_STRSWAP32_E
549 | VDPU_REG_CONFIG_DEC_OUTSWAP32_E
550 | VDPU_REG_CONFIG_DEC_IN_ENDIAN
551 | VDPU_REG_CONFIG_DEC_OUT_ENDIAN;
552 vdpu_write_relaxed(vpu, reg, VDPU_REG_DATA_ENDIAN);
553
554 reg = VDPU_REG_CONFIG_DEC_MAX_BURST(16);
555 vdpu_write_relaxed(vpu, reg, VDPU_REG_AXI_CTRL);
556
557 reg = VDPU_REG_DEC_CTRL0_DEC_MODE(10);
558 vdpu_write_relaxed(vpu, reg, VDPU_REG_DEC_FORMAT);
559
560 if (!(hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF))
561 hantro_reg_write(vpu, &vp8_dec_skip_mode, 1);
562 if (hdr->lf_header.level == 0)
563 hantro_reg_write(vpu, &vp8_dec_filter_disable, 1);
564
565
566 mb_width = VP8_MB_WIDTH(width);
567 mb_height = VP8_MB_HEIGHT(height);
568
569 hantro_reg_write(vpu, &vp8_dec_mb_width, mb_width);
570 hantro_reg_write(vpu, &vp8_dec_mb_height, mb_height);
571 hantro_reg_write(vpu, &vp8_dec_mb_width_ext, mb_width >> 9);
572 hantro_reg_write(vpu, &vp8_dec_mb_height_ext, mb_height >> 8);
573
574
575 hantro_reg_write(vpu, &vp8_dec_bool_range, hdr->coder_state.range);
576 hantro_reg_write(vpu, &vp8_dec_bool_value, hdr->coder_state.value);
577
578 reg = vdpu_read(vpu, VDPU_REG_VP8_DCT_START_BIT);
579 if (hdr->version != 3)
580 reg |= VDPU_REG_DEC_CTRL4_VC1_HEIGHT_EXT;
581 if (hdr->version & 0x3)
582 reg |= VDPU_REG_DEC_CTRL4_BILIN_MC_E;
583 vdpu_write_relaxed(vpu, reg, VDPU_REG_VP8_DCT_START_BIT);
584
585 cfg_lf(ctx, hdr);
586 cfg_qp(ctx, hdr);
587 cfg_parts(ctx, hdr);
588 cfg_tap(ctx, hdr);
589 cfg_ref(ctx, hdr);
590 cfg_buffers(ctx, hdr);
591
592 hantro_finish_run(ctx);
593
594 hantro_reg_write(vpu, &vp8_dec_start_dec, 1);
595 }