This source file includes following definitions.
- cfg_lf
- cfg_qp
- cfg_parts
- cfg_tap
- cfg_ref
- cfg_buffers
- hantro_g1_vp8_dec_run
1
2
3
4
5
6
7
8
9
10
11
12 #include <media/v4l2-mem2mem.h>
13 #include <media/vp8-ctrls.h>
14
15 #include "hantro_hw.h"
16 #include "hantro.h"
17 #include "hantro_g1_regs.h"
18
19
20 static const struct hantro_reg vp8_dec_dct_base[8] = {
21 { G1_REG_ADDR_STR, 0, 0xffffffff },
22 { G1_REG_ADDR_REF(8), 0, 0xffffffff },
23 { G1_REG_ADDR_REF(9), 0, 0xffffffff },
24 { G1_REG_ADDR_REF(10), 0, 0xffffffff },
25 { G1_REG_ADDR_REF(11), 0, 0xffffffff },
26 { G1_REG_ADDR_REF(12), 0, 0xffffffff },
27 { G1_REG_ADDR_REF(14), 0, 0xffffffff },
28 { G1_REG_ADDR_REF(15), 0, 0xffffffff },
29 };
30
31
32 static const struct hantro_reg vp8_dec_lf_level[4] = {
33 { G1_REG_REF_PIC(2), 18, 0x3f },
34 { G1_REG_REF_PIC(2), 12, 0x3f },
35 { G1_REG_REF_PIC(2), 6, 0x3f },
36 { G1_REG_REF_PIC(2), 0, 0x3f },
37 };
38
39
40 static const struct hantro_reg vp8_dec_mb_adj[4] = {
41 { G1_REG_REF_PIC(0), 21, 0x7f },
42 { G1_REG_REF_PIC(0), 14, 0x7f },
43 { G1_REG_REF_PIC(0), 7, 0x7f },
44 { G1_REG_REF_PIC(0), 0, 0x7f },
45 };
46
47
48 static const struct hantro_reg vp8_dec_ref_adj[4] = {
49 { G1_REG_REF_PIC(1), 21, 0x7f },
50 { G1_REG_REF_PIC(1), 14, 0x7f },
51 { G1_REG_REF_PIC(1), 7, 0x7f },
52 { G1_REG_REF_PIC(1), 0, 0x7f },
53 };
54
55
56 static const struct hantro_reg vp8_dec_quant[4] = {
57 { G1_REG_REF_PIC(3), 11, 0x7ff },
58 { G1_REG_REF_PIC(3), 0, 0x7ff },
59 { G1_REG_BD_REF_PIC(4), 11, 0x7ff },
60 { G1_REG_BD_REF_PIC(4), 0, 0x7ff },
61 };
62
63
64 static const struct hantro_reg vp8_dec_quant_delta[5] = {
65 { G1_REG_REF_PIC(3), 27, 0x1f },
66 { G1_REG_REF_PIC(3), 22, 0x1f },
67 { G1_REG_BD_REF_PIC(4), 27, 0x1f },
68 { G1_REG_BD_REF_PIC(4), 22, 0x1f },
69 { G1_REG_BD_P_REF_PIC, 27, 0x1f },
70 };
71
72
73 static const struct hantro_reg vp8_dec_dct_start_bits[8] = {
74 { G1_REG_DEC_CTRL2, 26, 0x3f }, { G1_REG_DEC_CTRL4, 26, 0x3f },
75 { G1_REG_DEC_CTRL4, 20, 0x3f }, { G1_REG_DEC_CTRL7, 24, 0x3f },
76 { G1_REG_DEC_CTRL7, 18, 0x3f }, { G1_REG_DEC_CTRL7, 12, 0x3f },
77 { G1_REG_DEC_CTRL7, 6, 0x3f }, { G1_REG_DEC_CTRL7, 0, 0x3f },
78 };
79
80
81 static const struct hantro_reg vp8_dec_pred_bc_tap[8][4] = {
82 {
83 { G1_REG_PRED_FLT, 22, 0x3ff },
84 { G1_REG_PRED_FLT, 12, 0x3ff },
85 { G1_REG_PRED_FLT, 2, 0x3ff },
86 { G1_REG_REF_PIC(4), 22, 0x3ff },
87 },
88 {
89 { G1_REG_REF_PIC(4), 12, 0x3ff },
90 { G1_REG_REF_PIC(4), 2, 0x3ff },
91 { G1_REG_REF_PIC(5), 22, 0x3ff },
92 { G1_REG_REF_PIC(5), 12, 0x3ff },
93 },
94 {
95 { G1_REG_REF_PIC(5), 2, 0x3ff },
96 { G1_REG_REF_PIC(6), 22, 0x3ff },
97 { G1_REG_REF_PIC(6), 12, 0x3ff },
98 { G1_REG_REF_PIC(6), 2, 0x3ff },
99 },
100 {
101 { G1_REG_REF_PIC(7), 22, 0x3ff },
102 { G1_REG_REF_PIC(7), 12, 0x3ff },
103 { G1_REG_REF_PIC(7), 2, 0x3ff },
104 { G1_REG_LT_REF, 22, 0x3ff },
105 },
106 {
107 { G1_REG_LT_REF, 12, 0x3ff },
108 { G1_REG_LT_REF, 2, 0x3ff },
109 { G1_REG_VALID_REF, 22, 0x3ff },
110 { G1_REG_VALID_REF, 12, 0x3ff },
111 },
112 {
113 { G1_REG_VALID_REF, 2, 0x3ff },
114 { G1_REG_BD_REF_PIC(0), 22, 0x3ff },
115 { G1_REG_BD_REF_PIC(0), 12, 0x3ff },
116 { G1_REG_BD_REF_PIC(0), 2, 0x3ff },
117 },
118 {
119 { G1_REG_BD_REF_PIC(1), 22, 0x3ff },
120 { G1_REG_BD_REF_PIC(1), 12, 0x3ff },
121 { G1_REG_BD_REF_PIC(1), 2, 0x3ff },
122 { G1_REG_BD_REF_PIC(2), 22, 0x3ff },
123 },
124 {
125 { G1_REG_BD_REF_PIC(2), 12, 0x3ff },
126 { G1_REG_BD_REF_PIC(2), 2, 0x3ff },
127 { G1_REG_BD_REF_PIC(3), 22, 0x3ff },
128 { G1_REG_BD_REF_PIC(3), 12, 0x3ff },
129 },
130 };
131
132
133
134
135 static void cfg_lf(struct hantro_ctx *ctx,
136 const struct v4l2_ctrl_vp8_frame_header *hdr)
137 {
138 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
139 const struct v4l2_vp8_loopfilter_header *lf = &hdr->lf_header;
140 struct hantro_dev *vpu = ctx->dev;
141 unsigned int i;
142 u32 reg;
143
144 if (!(seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED)) {
145 hantro_reg_write(vpu, &vp8_dec_lf_level[0], lf->level);
146 } else if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE) {
147 for (i = 0; i < 4; i++) {
148 u32 lf_level = clamp(lf->level + seg->lf_update[i],
149 0, 63);
150
151 hantro_reg_write(vpu, &vp8_dec_lf_level[i], lf_level);
152 }
153 } else {
154 for (i = 0; i < 4; i++)
155 hantro_reg_write(vpu, &vp8_dec_lf_level[i],
156 seg->lf_update[i]);
157 }
158
159 reg = G1_REG_REF_PIC_FILT_SHARPNESS(lf->sharpness_level);
160 if (lf->flags & V4L2_VP8_LF_FILTER_TYPE_SIMPLE)
161 reg |= G1_REG_REF_PIC_FILT_TYPE_E;
162 vdpu_write_relaxed(vpu, reg, G1_REG_REF_PIC(0));
163
164 if (lf->flags & V4L2_VP8_LF_HEADER_ADJ_ENABLE) {
165 for (i = 0; i < 4; i++) {
166 hantro_reg_write(vpu, &vp8_dec_mb_adj[i],
167 lf->mb_mode_delta[i]);
168 hantro_reg_write(vpu, &vp8_dec_ref_adj[i],
169 lf->ref_frm_delta[i]);
170 }
171 }
172 }
173
174
175
176
177 static void cfg_qp(struct hantro_ctx *ctx,
178 const struct v4l2_ctrl_vp8_frame_header *hdr)
179 {
180 const struct v4l2_vp8_quantization_header *q = &hdr->quant_header;
181 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
182 struct hantro_dev *vpu = ctx->dev;
183 unsigned int i;
184
185 if (!(seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED)) {
186 hantro_reg_write(vpu, &vp8_dec_quant[0], q->y_ac_qi);
187 } else if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_DELTA_VALUE_MODE) {
188 for (i = 0; i < 4; i++) {
189 u32 quant = clamp(q->y_ac_qi + seg->quant_update[i],
190 0, 127);
191
192 hantro_reg_write(vpu, &vp8_dec_quant[i], quant);
193 }
194 } else {
195 for (i = 0; i < 4; i++)
196 hantro_reg_write(vpu, &vp8_dec_quant[i],
197 seg->quant_update[i]);
198 }
199
200 hantro_reg_write(vpu, &vp8_dec_quant_delta[0], q->y_dc_delta);
201 hantro_reg_write(vpu, &vp8_dec_quant_delta[1], q->y2_dc_delta);
202 hantro_reg_write(vpu, &vp8_dec_quant_delta[2], q->y2_ac_delta);
203 hantro_reg_write(vpu, &vp8_dec_quant_delta[3], q->uv_dc_delta);
204 hantro_reg_write(vpu, &vp8_dec_quant_delta[4], q->uv_ac_delta);
205 }
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232 static void cfg_parts(struct hantro_ctx *ctx,
233 const struct v4l2_ctrl_vp8_frame_header *hdr)
234 {
235 struct hantro_dev *vpu = ctx->dev;
236 struct vb2_v4l2_buffer *vb2_src;
237 u32 first_part_offset = VP8_FRAME_IS_KEY_FRAME(hdr) ? 10 : 3;
238 u32 mb_size, mb_offset_bytes, mb_offset_bits, mb_start_bits;
239 u32 dct_size_part_size, dct_part_offset;
240 struct hantro_reg reg;
241 dma_addr_t src_dma;
242 u32 dct_part_total_len = 0;
243 u32 count = 0;
244 unsigned int i;
245
246 vb2_src = hantro_get_src_buf(ctx);
247 src_dma = vb2_dma_contig_plane_dma_addr(&vb2_src->vb2_buf, 0);
248
249
250
251
252
253
254
255
256
257
258
259
260 mb_offset_bits = first_part_offset * 8 +
261 hdr->first_part_header_bits + 8;
262 mb_offset_bytes = mb_offset_bits / 8;
263 mb_start_bits = mb_offset_bits -
264 (mb_offset_bytes & (~DEC_8190_ALIGN_MASK)) * 8;
265 mb_size = hdr->first_part_size -
266 (mb_offset_bytes - first_part_offset) +
267 (mb_offset_bytes & DEC_8190_ALIGN_MASK);
268
269
270 vdpu_write_relaxed(vpu, (mb_offset_bytes & (~DEC_8190_ALIGN_MASK))
271 + src_dma, G1_REG_ADDR_REF(13));
272
273
274 reg.base = G1_REG_DEC_CTRL2;
275 reg.mask = 0x3f;
276 reg.shift = 18;
277 hantro_reg_write(vpu, ®, mb_start_bits);
278
279
280 reg.base = G1_REG_DEC_CTRL6;
281 reg.mask = 0x3fffff;
282 reg.shift = 0;
283 hantro_reg_write(vpu, ®, mb_size + 1);
284
285
286
287
288
289
290
291
292
293 dct_size_part_size = (hdr->num_dct_parts - 1) * 3;
294 dct_part_offset = first_part_offset + hdr->first_part_size;
295 for (i = 0; i < hdr->num_dct_parts; i++)
296 dct_part_total_len += hdr->dct_part_sizes[i];
297 dct_part_total_len += dct_size_part_size;
298 dct_part_total_len += (dct_part_offset & DEC_8190_ALIGN_MASK);
299
300
301 reg.base = G1_REG_DEC_CTRL6;
302 reg.mask = 0xf;
303 reg.shift = 24;
304 hantro_reg_write(vpu, ®, hdr->num_dct_parts - 1);
305
306
307 vdpu_write_relaxed(vpu,
308 G1_REG_DEC_CTRL3_STREAM_LEN(dct_part_total_len),
309 G1_REG_DEC_CTRL3);
310
311
312 for (i = 0; i < hdr->num_dct_parts; i++) {
313 u32 byte_offset = dct_part_offset + dct_size_part_size + count;
314 u32 base_addr = byte_offset + src_dma;
315
316 hantro_reg_write(vpu, &vp8_dec_dct_base[i],
317 base_addr & (~DEC_8190_ALIGN_MASK));
318
319 hantro_reg_write(vpu, &vp8_dec_dct_start_bits[i],
320 (byte_offset & DEC_8190_ALIGN_MASK) * 8);
321
322 count += hdr->dct_part_sizes[i];
323 }
324 }
325
326
327
328
329
330 static void cfg_tap(struct hantro_ctx *ctx,
331 const struct v4l2_ctrl_vp8_frame_header *hdr)
332 {
333 struct hantro_dev *vpu = ctx->dev;
334 struct hantro_reg reg;
335 u32 val = 0;
336 int i, j;
337
338 reg.base = G1_REG_BD_REF_PIC(3);
339 reg.mask = 0xf;
340
341 if ((hdr->version & 0x03) != 0)
342 return;
343
344 for (i = 0; i < 8; i++) {
345 val = (hantro_vp8_dec_mc_filter[i][0] << 2) |
346 hantro_vp8_dec_mc_filter[i][5];
347
348 for (j = 0; j < 4; j++)
349 hantro_reg_write(vpu, &vp8_dec_pred_bc_tap[i][j],
350 hantro_vp8_dec_mc_filter[i][j + 1]);
351
352 switch (i) {
353 case 2:
354 reg.shift = 8;
355 break;
356 case 4:
357 reg.shift = 4;
358 break;
359 case 6:
360 reg.shift = 0;
361 break;
362 default:
363 continue;
364 }
365
366 hantro_reg_write(vpu, ®, val);
367 }
368 }
369
370 static void cfg_ref(struct hantro_ctx *ctx,
371 const struct v4l2_ctrl_vp8_frame_header *hdr)
372 {
373 struct vb2_queue *cap_q = &ctx->fh.m2m_ctx->cap_q_ctx.q;
374 struct hantro_dev *vpu = ctx->dev;
375 struct vb2_v4l2_buffer *vb2_dst;
376 dma_addr_t ref;
377
378 vb2_dst = hantro_get_dst_buf(ctx);
379
380 ref = hantro_get_ref(cap_q, hdr->last_frame_ts);
381 if (!ref)
382 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
383 vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(0));
384
385 ref = hantro_get_ref(cap_q, hdr->golden_frame_ts);
386 WARN_ON(!ref && hdr->golden_frame_ts);
387 if (!ref)
388 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
389 if (hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_GOLDEN)
390 ref |= G1_REG_ADDR_REF_TOPC_E;
391 vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(4));
392
393 ref = hantro_get_ref(cap_q, hdr->alt_frame_ts);
394 WARN_ON(!ref && hdr->alt_frame_ts);
395 if (!ref)
396 ref = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
397 if (hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_SIGN_BIAS_ALT)
398 ref |= G1_REG_ADDR_REF_TOPC_E;
399 vdpu_write_relaxed(vpu, ref, G1_REG_ADDR_REF(5));
400 }
401
402 static void cfg_buffers(struct hantro_ctx *ctx,
403 const struct v4l2_ctrl_vp8_frame_header *hdr)
404 {
405 const struct v4l2_vp8_segment_header *seg = &hdr->segment_header;
406 struct hantro_dev *vpu = ctx->dev;
407 struct vb2_v4l2_buffer *vb2_dst;
408 dma_addr_t dst_dma;
409 u32 reg;
410
411 vb2_dst = hantro_get_dst_buf(ctx);
412
413
414 vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
415 G1_REG_ADDR_QTABLE);
416
417
418 reg = G1_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
419 if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_ENABLED) {
420 reg |= G1_REG_FWD_PIC1_SEGMENT_E;
421 if (seg->flags & V4L2_VP8_SEGMENT_HEADER_FLAG_UPDATE_MAP)
422 reg |= G1_REG_FWD_PIC1_SEGMENT_UPD_E;
423 }
424 vdpu_write_relaxed(vpu, reg, G1_REG_FWD_PIC(0));
425
426 dst_dma = vb2_dma_contig_plane_dma_addr(&vb2_dst->vb2_buf, 0);
427 vdpu_write_relaxed(vpu, dst_dma, G1_REG_ADDR_DST);
428 }
429
430 void hantro_g1_vp8_dec_run(struct hantro_ctx *ctx)
431 {
432 const struct v4l2_ctrl_vp8_frame_header *hdr;
433 struct hantro_dev *vpu = ctx->dev;
434 size_t height = ctx->dst_fmt.height;
435 size_t width = ctx->dst_fmt.width;
436 u32 mb_width, mb_height;
437 u32 reg;
438
439 hantro_prepare_run(ctx);
440
441 hdr = hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER);
442 if (WARN_ON(!hdr))
443 return;
444
445
446 if (VP8_FRAME_IS_KEY_FRAME(hdr) && ctx->vp8_dec.segment_map.cpu)
447 memset(ctx->vp8_dec.segment_map.cpu, 0,
448 ctx->vp8_dec.segment_map.size);
449
450 hantro_vp8_prob_update(ctx, hdr);
451
452 reg = G1_REG_CONFIG_DEC_TIMEOUT_E |
453 G1_REG_CONFIG_DEC_STRENDIAN_E |
454 G1_REG_CONFIG_DEC_INSWAP32_E |
455 G1_REG_CONFIG_DEC_STRSWAP32_E |
456 G1_REG_CONFIG_DEC_OUTSWAP32_E |
457 G1_REG_CONFIG_DEC_CLK_GATE_E |
458 G1_REG_CONFIG_DEC_IN_ENDIAN |
459 G1_REG_CONFIG_DEC_OUT_ENDIAN |
460 G1_REG_CONFIG_DEC_MAX_BURST(16);
461 vdpu_write_relaxed(vpu, reg, G1_REG_CONFIG);
462
463 reg = G1_REG_DEC_CTRL0_DEC_MODE(10);
464 if (!VP8_FRAME_IS_KEY_FRAME(hdr))
465 reg |= G1_REG_DEC_CTRL0_PIC_INTER_E;
466 if (!(hdr->flags & V4L2_VP8_FRAME_HEADER_FLAG_MB_NO_SKIP_COEFF))
467 reg |= G1_REG_DEC_CTRL0_SKIP_MODE;
468 if (hdr->lf_header.level == 0)
469 reg |= G1_REG_DEC_CTRL0_FILTERING_DIS;
470 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL0);
471
472
473 mb_width = VP8_MB_WIDTH(width);
474 mb_height = VP8_MB_HEIGHT(height);
475 reg = G1_REG_DEC_CTRL1_PIC_MB_WIDTH(mb_width) |
476 G1_REG_DEC_CTRL1_PIC_MB_HEIGHT_P(mb_height) |
477 G1_REG_DEC_CTRL1_PIC_MB_W_EXT(mb_width >> 9) |
478 G1_REG_DEC_CTRL1_PIC_MB_H_EXT(mb_height >> 8);
479 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL1);
480
481
482 reg = G1_REG_DEC_CTRL2_BOOLEAN_RANGE(hdr->coder_state.range)
483 | G1_REG_DEC_CTRL2_BOOLEAN_VALUE(hdr->coder_state.value);
484 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL2);
485
486 reg = 0;
487 if (hdr->version != 3)
488 reg |= G1_REG_DEC_CTRL4_VC1_HEIGHT_EXT;
489 if (hdr->version & 0x3)
490 reg |= G1_REG_DEC_CTRL4_BILIN_MC_E;
491 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL4);
492
493 cfg_lf(ctx, hdr);
494 cfg_qp(ctx, hdr);
495 cfg_parts(ctx, hdr);
496 cfg_tap(ctx, hdr);
497 cfg_ref(ctx, hdr);
498 cfg_buffers(ctx, hdr);
499
500 hantro_finish_run(ctx);
501
502 vdpu_write(vpu, G1_REG_INTERRUPT_DEC_E, G1_REG_INTERRUPT);
503 }