This source file includes following definitions.
- set_params
- set_ref
- set_buffers
- hantro_g1_h264_dec_run
1
2
3
4
5
6
7
8
9
10
11
12
13 #include <linux/types.h>
14 #include <linux/sort.h>
15
16 #include <media/v4l2-mem2mem.h>
17
18 #include "hantro_g1_regs.h"
19 #include "hantro_hw.h"
20 #include "hantro_v4l2.h"
21
22 static void set_params(struct hantro_ctx *ctx)
23 {
24 const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
25 const struct v4l2_ctrl_h264_decode_params *dec_param = ctrls->decode;
26 const struct v4l2_ctrl_h264_slice_params *slices = ctrls->slices;
27 const struct v4l2_ctrl_h264_sps *sps = ctrls->sps;
28 const struct v4l2_ctrl_h264_pps *pps = ctrls->pps;
29 struct vb2_v4l2_buffer *src_buf = hantro_get_src_buf(ctx);
30 struct hantro_dev *vpu = ctx->dev;
31 u32 reg;
32
33
34 reg = G1_REG_DEC_CTRL0_DEC_AXI_WR_ID(0x0);
35 if (sps->flags & V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD)
36 reg |= G1_REG_DEC_CTRL0_SEQ_MBAFF_E;
37 if (sps->profile_idc > 66) {
38 reg |= G1_REG_DEC_CTRL0_PICORD_COUNT_E;
39 if (dec_param->nal_ref_idc)
40 reg |= G1_REG_DEC_CTRL0_WRITE_MVS_E;
41 }
42
43 if (!(sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY) &&
44 (sps->flags & V4L2_H264_SPS_FLAG_MB_ADAPTIVE_FRAME_FIELD ||
45 slices[0].flags & V4L2_H264_SLICE_FLAG_FIELD_PIC))
46 reg |= G1_REG_DEC_CTRL0_PIC_INTERLACE_E;
47 if (slices[0].flags & V4L2_H264_SLICE_FLAG_FIELD_PIC)
48 reg |= G1_REG_DEC_CTRL0_PIC_FIELDMODE_E;
49 if (!(slices[0].flags & V4L2_H264_SLICE_FLAG_BOTTOM_FIELD))
50 reg |= G1_REG_DEC_CTRL0_PIC_TOPFIELD_E;
51 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL0);
52
53
54 reg = G1_REG_DEC_CTRL1_PIC_MB_WIDTH(sps->pic_width_in_mbs_minus1 + 1) |
55 G1_REG_DEC_CTRL1_PIC_MB_HEIGHT_P(sps->pic_height_in_map_units_minus1 + 1) |
56 G1_REG_DEC_CTRL1_REF_FRAMES(sps->max_num_ref_frames);
57 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL1);
58
59
60 reg = G1_REG_DEC_CTRL2_CH_QP_OFFSET(pps->chroma_qp_index_offset) |
61 G1_REG_DEC_CTRL2_CH_QP_OFFSET2(pps->second_chroma_qp_index_offset);
62
63
64 reg |= G1_REG_DEC_CTRL2_TYPE1_QUANT_E;
65
66 if (!(sps->flags & V4L2_H264_SPS_FLAG_FRAME_MBS_ONLY))
67 reg |= G1_REG_DEC_CTRL2_FIELDPIC_FLAG_E;
68 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL2);
69
70
71 reg = G1_REG_DEC_CTRL3_START_CODE_E |
72 G1_REG_DEC_CTRL3_INIT_QP(pps->pic_init_qp_minus26 + 26) |
73 G1_REG_DEC_CTRL3_STREAM_LEN(vb2_get_plane_payload(&src_buf->vb2_buf, 0));
74 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL3);
75
76
77 reg = G1_REG_DEC_CTRL4_FRAMENUM_LEN(sps->log2_max_frame_num_minus4 + 4) |
78 G1_REG_DEC_CTRL4_FRAMENUM(slices[0].frame_num) |
79 G1_REG_DEC_CTRL4_WEIGHT_BIPR_IDC(pps->weighted_bipred_idc);
80 if (pps->flags & V4L2_H264_PPS_FLAG_ENTROPY_CODING_MODE)
81 reg |= G1_REG_DEC_CTRL4_CABAC_E;
82 if (sps->flags & V4L2_H264_SPS_FLAG_DIRECT_8X8_INFERENCE)
83 reg |= G1_REG_DEC_CTRL4_DIR_8X8_INFER_E;
84 if (sps->chroma_format_idc == 0)
85 reg |= G1_REG_DEC_CTRL4_BLACKWHITE_E;
86 if (pps->flags & V4L2_H264_PPS_FLAG_WEIGHTED_PRED)
87 reg |= G1_REG_DEC_CTRL4_WEIGHT_PRED_E;
88 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL4);
89
90
91 reg = G1_REG_DEC_CTRL5_REFPIC_MK_LEN(slices[0].dec_ref_pic_marking_bit_size) |
92 G1_REG_DEC_CTRL5_IDR_PIC_ID(slices[0].idr_pic_id);
93 if (pps->flags & V4L2_H264_PPS_FLAG_CONSTRAINED_INTRA_PRED)
94 reg |= G1_REG_DEC_CTRL5_CONST_INTRA_E;
95 if (pps->flags & V4L2_H264_PPS_FLAG_DEBLOCKING_FILTER_CONTROL_PRESENT)
96 reg |= G1_REG_DEC_CTRL5_FILT_CTRL_PRES;
97 if (pps->flags & V4L2_H264_PPS_FLAG_REDUNDANT_PIC_CNT_PRESENT)
98 reg |= G1_REG_DEC_CTRL5_RDPIC_CNT_PRES;
99 if (pps->flags & V4L2_H264_PPS_FLAG_TRANSFORM_8X8_MODE)
100 reg |= G1_REG_DEC_CTRL5_8X8TRANS_FLAG_E;
101 if (dec_param->flags & V4L2_H264_DECODE_PARAM_FLAG_IDR_PIC)
102 reg |= G1_REG_DEC_CTRL5_IDR_PIC_E;
103 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL5);
104
105
106 reg = G1_REG_DEC_CTRL6_PPS_ID(slices[0].pic_parameter_set_id) |
107 G1_REG_DEC_CTRL6_REFIDX0_ACTIVE(pps->num_ref_idx_l0_default_active_minus1 + 1) |
108 G1_REG_DEC_CTRL6_REFIDX1_ACTIVE(pps->num_ref_idx_l1_default_active_minus1 + 1) |
109 G1_REG_DEC_CTRL6_POC_LENGTH(slices[0].pic_order_cnt_bit_size);
110 vdpu_write_relaxed(vpu, reg, G1_REG_DEC_CTRL6);
111
112
113 vdpu_write_relaxed(vpu, 0, G1_REG_ERR_CONC);
114
115
116 vdpu_write_relaxed(vpu,
117 G1_REG_PRED_FLT_PRED_BC_TAP_0_0(1) |
118 G1_REG_PRED_FLT_PRED_BC_TAP_0_1(-5 & 0x3ff) |
119 G1_REG_PRED_FLT_PRED_BC_TAP_0_2(20),
120 G1_REG_PRED_FLT);
121
122
123 vdpu_write_relaxed(vpu, 0, G1_REG_REF_BUF_CTRL);
124
125
126 vdpu_write_relaxed(vpu, G1_REG_REF_BUF_CTRL2_APF_THRESHOLD(8),
127 G1_REG_REF_BUF_CTRL2);
128 }
129
130 static void set_ref(struct hantro_ctx *ctx)
131 {
132 struct v4l2_h264_dpb_entry *dpb = ctx->h264_dec.dpb;
133 const u8 *b0_reflist, *b1_reflist, *p_reflist;
134 struct hantro_dev *vpu = ctx->dev;
135 u32 dpb_longterm = 0;
136 u32 dpb_valid = 0;
137 int reg_num;
138 u32 reg;
139 int i;
140
141
142
143
144
145 for (i = 0; i < HANTRO_H264_DPB_SIZE; ++i) {
146 if (dpb[i].flags & V4L2_H264_DPB_ENTRY_FLAG_ACTIVE)
147 dpb_valid |= BIT(HANTRO_H264_DPB_SIZE - 1 - i);
148
149 if (dpb[i].flags & V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM)
150 dpb_longterm |= BIT(HANTRO_H264_DPB_SIZE - 1 - i);
151 }
152 vdpu_write_relaxed(vpu, dpb_valid << 16, G1_REG_VALID_REF);
153 vdpu_write_relaxed(vpu, dpb_longterm << 16, G1_REG_LT_REF);
154
155
156
157
158
159
160
161 for (i = 0; i < HANTRO_H264_DPB_SIZE; i += 2) {
162 reg = 0;
163 if (dpb[i].flags & V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM)
164 reg |= G1_REG_REF_PIC_REFER0_NBR(dpb[i].pic_num);
165 else
166 reg |= G1_REG_REF_PIC_REFER0_NBR(dpb[i].frame_num);
167
168 if (dpb[i + 1].flags & V4L2_H264_DPB_ENTRY_FLAG_LONG_TERM)
169 reg |= G1_REG_REF_PIC_REFER1_NBR(dpb[i + 1].pic_num);
170 else
171 reg |= G1_REG_REF_PIC_REFER1_NBR(dpb[i + 1].frame_num);
172
173 vdpu_write_relaxed(vpu, reg, G1_REG_REF_PIC(i / 2));
174 }
175
176 b0_reflist = ctx->h264_dec.reflists.b0;
177 b1_reflist = ctx->h264_dec.reflists.b1;
178 p_reflist = ctx->h264_dec.reflists.p;
179
180
181
182
183
184 reg_num = 0;
185 for (i = 0; i < 15; i += 3) {
186 reg = G1_REG_BD_REF_PIC_BINIT_RLIST_F0(b0_reflist[i]) |
187 G1_REG_BD_REF_PIC_BINIT_RLIST_F1(b0_reflist[i + 1]) |
188 G1_REG_BD_REF_PIC_BINIT_RLIST_F2(b0_reflist[i + 2]) |
189 G1_REG_BD_REF_PIC_BINIT_RLIST_B0(b1_reflist[i]) |
190 G1_REG_BD_REF_PIC_BINIT_RLIST_B1(b1_reflist[i + 1]) |
191 G1_REG_BD_REF_PIC_BINIT_RLIST_B2(b1_reflist[i + 2]);
192 vdpu_write_relaxed(vpu, reg, G1_REG_BD_REF_PIC(reg_num++));
193 }
194
195
196
197
198
199
200 reg = G1_REG_BD_P_REF_PIC_BINIT_RLIST_F15(b0_reflist[15]) |
201 G1_REG_BD_P_REF_PIC_BINIT_RLIST_B15(b1_reflist[15]) |
202 G1_REG_BD_P_REF_PIC_PINIT_RLIST_F0(p_reflist[0]) |
203 G1_REG_BD_P_REF_PIC_PINIT_RLIST_F1(p_reflist[1]) |
204 G1_REG_BD_P_REF_PIC_PINIT_RLIST_F2(p_reflist[2]) |
205 G1_REG_BD_P_REF_PIC_PINIT_RLIST_F3(p_reflist[3]);
206 vdpu_write_relaxed(vpu, reg, G1_REG_BD_P_REF_PIC);
207
208
209
210
211
212 reg_num = 0;
213 for (i = 4; i < HANTRO_H264_DPB_SIZE; i += 6) {
214 reg = G1_REG_FWD_PIC_PINIT_RLIST_F0(p_reflist[i]) |
215 G1_REG_FWD_PIC_PINIT_RLIST_F1(p_reflist[i + 1]) |
216 G1_REG_FWD_PIC_PINIT_RLIST_F2(p_reflist[i + 2]) |
217 G1_REG_FWD_PIC_PINIT_RLIST_F3(p_reflist[i + 3]) |
218 G1_REG_FWD_PIC_PINIT_RLIST_F4(p_reflist[i + 4]) |
219 G1_REG_FWD_PIC_PINIT_RLIST_F5(p_reflist[i + 5]);
220 vdpu_write_relaxed(vpu, reg, G1_REG_FWD_PIC(reg_num++));
221 }
222
223
224 for (i = 0; i < HANTRO_H264_DPB_SIZE; i++) {
225 struct vb2_buffer *buf = hantro_h264_get_ref_buf(ctx, i);
226
227 vdpu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(buf, 0),
228 G1_REG_ADDR_REF(i));
229 }
230 }
231
232 static void set_buffers(struct hantro_ctx *ctx)
233 {
234 const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
235 struct vb2_v4l2_buffer *src_buf, *dst_buf;
236 struct hantro_dev *vpu = ctx->dev;
237 dma_addr_t src_dma, dst_dma;
238
239 src_buf = hantro_get_src_buf(ctx);
240 dst_buf = hantro_get_dst_buf(ctx);
241
242
243 src_dma = vb2_dma_contig_plane_dma_addr(&src_buf->vb2_buf, 0);
244 vdpu_write_relaxed(vpu, src_dma, G1_REG_ADDR_STR);
245
246
247 dst_dma = vb2_dma_contig_plane_dma_addr(&dst_buf->vb2_buf, 0);
248 vdpu_write_relaxed(vpu, dst_dma, G1_REG_ADDR_DST);
249
250
251 if (ctrls->sps->profile_idc > 66 && ctrls->decode->nal_ref_idc) {
252 size_t pic_size = ctx->h264_dec.pic_size;
253 size_t mv_offset = round_up(pic_size, 8);
254
255 if (ctrls->slices[0].flags & V4L2_H264_SLICE_FLAG_BOTTOM_FIELD)
256 mv_offset += 32 * H264_MB_WIDTH(ctx->dst_fmt.width);
257
258 vdpu_write_relaxed(vpu, dst_dma + mv_offset,
259 G1_REG_ADDR_DIR_MV);
260 }
261
262
263 vdpu_write_relaxed(vpu, ctx->h264_dec.priv.dma, G1_REG_ADDR_QTABLE);
264 }
265
266 void hantro_g1_h264_dec_run(struct hantro_ctx *ctx)
267 {
268 struct hantro_dev *vpu = ctx->dev;
269
270
271 if (hantro_h264_dec_prepare_run(ctx))
272 return;
273
274
275 set_params(ctx);
276 set_ref(ctx);
277 set_buffers(ctx);
278
279 hantro_finish_run(ctx);
280
281
282 vdpu_write_relaxed(vpu,
283 G1_REG_CONFIG_DEC_AXI_RD_ID(0xffu) |
284 G1_REG_CONFIG_DEC_TIMEOUT_E |
285 G1_REG_CONFIG_DEC_OUT_ENDIAN |
286 G1_REG_CONFIG_DEC_STRENDIAN_E |
287 G1_REG_CONFIG_DEC_MAX_BURST(16) |
288 G1_REG_CONFIG_DEC_OUTSWAP32_E |
289 G1_REG_CONFIG_DEC_INSWAP32_E |
290 G1_REG_CONFIG_DEC_STRSWAP32_E |
291 G1_REG_CONFIG_DEC_CLK_GATE_E,
292 G1_REG_CONFIG);
293 vdpu_write(vpu, G1_REG_INTERRUPT_DEC_E, G1_REG_INTERRUPT);
294 }