st/va: add support for RGBX and BGRX in VPP
[mesa.git] / src / gallium / state_trackers / va / picture.c
1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen & Orasanu Lucian.
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include "pipe/p_video_codec.h"
30
31 #include "util/u_handle_table.h"
32 #include "util/u_video.h"
33
34 #include "vl/vl_vlc.h"
35 #include "vl/vl_winsys.h"
36
37 #include "va_private.h"
38
39 VAStatus
40 vlVaBeginPicture(VADriverContextP ctx, VAContextID context_id, VASurfaceID render_target)
41 {
42 vlVaDriver *drv;
43 vlVaContext *context;
44 vlVaSurface *surf;
45
46 if (!ctx)
47 return VA_STATUS_ERROR_INVALID_CONTEXT;
48
49 drv = VL_VA_DRIVER(ctx);
50 if (!drv)
51 return VA_STATUS_ERROR_INVALID_CONTEXT;
52
53 context = handle_table_get(drv->htab, context_id);
54 if (!context)
55 return VA_STATUS_ERROR_INVALID_CONTEXT;
56
57 surf = handle_table_get(drv->htab, render_target);
58 if (!surf || !surf->buffer)
59 return VA_STATUS_ERROR_INVALID_SURFACE;
60
61 context->target = surf->buffer;
62 if (!context->decoder) {
63 /* VPP */
64 if ((context->target->buffer_format != PIPE_FORMAT_B8G8R8A8_UNORM &&
65 context->target->buffer_format != PIPE_FORMAT_R8G8B8A8_UNORM &&
66 context->target->buffer_format != PIPE_FORMAT_B8G8R8X8_UNORM &&
67 context->target->buffer_format != PIPE_FORMAT_R8G8B8X8_UNORM) ||
68 context->target->interlaced)
69 return VA_STATUS_ERROR_UNIMPLEMENTED;
70 return VA_STATUS_SUCCESS;
71 }
72
73 context->decoder->begin_frame(context->decoder, context->target, &context->desc.base);
74
75 return VA_STATUS_SUCCESS;
76 }
77
78 static void
79 getReferenceFrame(vlVaDriver *drv, VASurfaceID surface_id,
80 struct pipe_video_buffer **ref_frame)
81 {
82 vlVaSurface *surf = handle_table_get(drv->htab, surface_id);
83 if (surf)
84 *ref_frame = surf->buffer;
85 else
86 *ref_frame = NULL;
87 }
88
89 static void
90 handlePictureParameterBuffer(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
91 {
92 VAPictureParameterBufferMPEG2 *mpeg2;
93 VAPictureParameterBufferH264 *h264;
94 VAPictureParameterBufferVC1 * vc1;
95 VAPictureParameterBufferMPEG4 *mpeg4;
96 VAPictureParameterBufferHEVC *hevc;
97 vlVaSurface *surf_forward;
98 vlVaSurface *surf_backward;
99 unsigned int i;
100 static const uint8_t default_intra_quant_matrix[64] = { 0 };
101 static const uint8_t default_non_intra_quant_matrix[64] = { 0 };
102
103 switch (u_reduce_video_profile(context->decoder->profile)) {
104 case PIPE_VIDEO_FORMAT_MPEG12:
105 assert(buf->size >= sizeof(VAPictureParameterBufferMPEG2) && buf->num_elements == 1);
106 mpeg2 = buf->data;
107 /*horizontal_size;*/
108 /*vertical_size;*/
109 getReferenceFrame(drv, mpeg2->forward_reference_picture, &context->desc.mpeg12.ref[0]);
110 getReferenceFrame(drv, mpeg2->backward_reference_picture, &context->desc.mpeg12.ref[1]);
111 context->desc.mpeg12.picture_coding_type = mpeg2->picture_coding_type;
112 context->desc.mpeg12.f_code[0][0] = ((mpeg2->f_code >> 12) & 0xf) - 1;
113 context->desc.mpeg12.f_code[0][1] = ((mpeg2->f_code >> 8) & 0xf) - 1;
114 context->desc.mpeg12.f_code[1][0] = ((mpeg2->f_code >> 4) & 0xf) - 1;
115 context->desc.mpeg12.f_code[1][1] = (mpeg2->f_code & 0xf) - 1;
116 context->desc.mpeg12.intra_dc_precision =
117 mpeg2->picture_coding_extension.bits.intra_dc_precision;
118 context->desc.mpeg12.picture_structure =
119 mpeg2->picture_coding_extension.bits.picture_structure;
120 context->desc.mpeg12.top_field_first =
121 mpeg2->picture_coding_extension.bits.top_field_first;
122 context->desc.mpeg12.frame_pred_frame_dct =
123 mpeg2->picture_coding_extension.bits.frame_pred_frame_dct;
124 context->desc.mpeg12.concealment_motion_vectors =
125 mpeg2->picture_coding_extension.bits.concealment_motion_vectors;
126 context->desc.mpeg12.q_scale_type =
127 mpeg2->picture_coding_extension.bits.q_scale_type;
128 context->desc.mpeg12.intra_vlc_format =
129 mpeg2->picture_coding_extension.bits.intra_vlc_format;
130 context->desc.mpeg12.alternate_scan =
131 mpeg2->picture_coding_extension.bits.alternate_scan;
132 /*repeat_first_field*/
133 /*progressive_frame*/
134 /*is_first_field*/
135 break;
136
137 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
138 assert(buf->size >= sizeof(VAPictureParameterBufferH264) && buf->num_elements == 1);
139 h264 = buf->data;
140 /*CurrPic*/
141 context->desc.h264.field_order_cnt[0] = h264->CurrPic.TopFieldOrderCnt;
142 context->desc.h264.field_order_cnt[1] = h264->CurrPic.BottomFieldOrderCnt;
143 /*ReferenceFrames[16]*/
144 /*picture_width_in_mbs_minus1*/
145 /*picture_height_in_mbs_minus1*/
146 /*bit_depth_luma_minus8*/
147 /*bit_depth_chroma_minus8*/
148 context->desc.h264.num_ref_frames = h264->num_ref_frames;
149 /*chroma_format_idc*/
150 /*residual_colour_transform_flag*/
151 /*gaps_in_frame_num_value_allowed_flag*/
152 context->desc.h264.pps->sps->frame_mbs_only_flag =
153 h264->seq_fields.bits.frame_mbs_only_flag;
154 context->desc.h264.pps->sps->mb_adaptive_frame_field_flag =
155 h264->seq_fields.bits.mb_adaptive_frame_field_flag;
156 context->desc.h264.pps->sps->direct_8x8_inference_flag =
157 h264->seq_fields.bits.direct_8x8_inference_flag;
158 /*MinLumaBiPredSize8x8*/
159 context->desc.h264.pps->sps->log2_max_frame_num_minus4 =
160 h264->seq_fields.bits.log2_max_frame_num_minus4;
161 context->desc.h264.pps->sps->pic_order_cnt_type =
162 h264->seq_fields.bits.pic_order_cnt_type;
163 context->desc.h264.pps->sps->log2_max_pic_order_cnt_lsb_minus4 =
164 h264->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4;
165 context->desc.h264.pps->sps->delta_pic_order_always_zero_flag =
166 h264->seq_fields.bits.delta_pic_order_always_zero_flag;
167 /*num_slice_groups_minus1*/
168 /*slice_group_map_type*/
169 /*slice_group_change_rate_minus1*/
170 context->desc.h264.pps->pic_init_qp_minus26 =
171 h264->pic_init_qp_minus26;
172 /*pic_init_qs_minus26*/
173 context->desc.h264.pps->chroma_qp_index_offset =
174 h264->chroma_qp_index_offset;
175 context->desc.h264.pps->second_chroma_qp_index_offset =
176 h264->second_chroma_qp_index_offset;
177 context->desc.h264.pps->entropy_coding_mode_flag =
178 h264->pic_fields.bits.entropy_coding_mode_flag;
179 context->desc.h264.pps->weighted_pred_flag =
180 h264->pic_fields.bits.weighted_pred_flag;
181 context->desc.h264.pps->weighted_bipred_idc =
182 h264->pic_fields.bits.weighted_bipred_idc;
183 context->desc.h264.pps->transform_8x8_mode_flag =
184 h264->pic_fields.bits.transform_8x8_mode_flag;
185 context->desc.h264.field_pic_flag =
186 h264->pic_fields.bits.field_pic_flag;
187 context->desc.h264.pps->constrained_intra_pred_flag =
188 h264->pic_fields.bits.constrained_intra_pred_flag;
189 context->desc.h264.pps->bottom_field_pic_order_in_frame_present_flag =
190 h264->pic_fields.bits.pic_order_present_flag;
191 context->desc.h264.pps->deblocking_filter_control_present_flag =
192 h264->pic_fields.bits.deblocking_filter_control_present_flag;
193 context->desc.h264.pps->redundant_pic_cnt_present_flag =
194 h264->pic_fields.bits.redundant_pic_cnt_present_flag;
195 /*reference_pic_flag*/
196 context->desc.h264.frame_num = h264->frame_num;
197 break;
198
199 case PIPE_VIDEO_FORMAT_VC1:
200 assert(buf->size >= sizeof(VAPictureParameterBufferVC1) && buf->num_elements == 1);
201 vc1 = buf->data;
202 getReferenceFrame(drv, vc1->forward_reference_picture, &context->desc.vc1.ref[0]);
203 getReferenceFrame(drv, vc1->backward_reference_picture, &context->desc.vc1.ref[1]);
204 context->desc.vc1.picture_type = vc1->picture_fields.bits.picture_type;
205 context->desc.vc1.frame_coding_mode = vc1->picture_fields.bits.frame_coding_mode;
206 context->desc.vc1.postprocflag = vc1->post_processing != 0;
207 context->desc.vc1.pulldown = vc1->sequence_fields.bits.pulldown;
208 context->desc.vc1.interlace = vc1->sequence_fields.bits.interlace;
209 context->desc.vc1.tfcntrflag = vc1->sequence_fields.bits.tfcntrflag;
210 context->desc.vc1.finterpflag = vc1->sequence_fields.bits.finterpflag;
211 context->desc.vc1.psf = vc1->sequence_fields.bits.psf;
212 context->desc.vc1.dquant = vc1->pic_quantizer_fields.bits.dquant;
213 context->desc.vc1.panscan_flag = vc1->entrypoint_fields.bits.panscan_flag;
214 context->desc.vc1.refdist_flag =
215 vc1->reference_fields.bits.reference_distance_flag;
216 context->desc.vc1.quantizer = vc1->pic_quantizer_fields.bits.quantizer;
217 context->desc.vc1.extended_mv = vc1->mv_fields.bits.extended_mv_flag;
218 context->desc.vc1.extended_dmv = vc1->mv_fields.bits.extended_dmv_flag;
219 context->desc.vc1.overlap = vc1->sequence_fields.bits.overlap;
220 context->desc.vc1.vstransform =
221 vc1->transform_fields.bits.variable_sized_transform_flag;
222 context->desc.vc1.loopfilter = vc1->entrypoint_fields.bits.loopfilter;
223 context->desc.vc1.fastuvmc = vc1->fast_uvmc_flag;
224 context->desc.vc1.range_mapy_flag = vc1->range_mapping_fields.bits.luma_flag;
225 context->desc.vc1.range_mapy = vc1->range_mapping_fields.bits.luma;
226 context->desc.vc1.range_mapuv_flag = vc1->range_mapping_fields.bits.chroma_flag;
227 context->desc.vc1.range_mapuv = vc1->range_mapping_fields.bits.chroma;
228 context->desc.vc1.multires = vc1->sequence_fields.bits.multires;
229 context->desc.vc1.syncmarker = vc1->sequence_fields.bits.syncmarker;
230 context->desc.vc1.rangered = vc1->sequence_fields.bits.rangered;
231 context->desc.vc1.maxbframes = vc1->sequence_fields.bits.max_b_frames;
232 context->desc.vc1.deblockEnable = vc1->post_processing != 0;
233 context->desc.vc1.pquant = vc1->pic_quantizer_fields.bits.pic_quantizer_scale;
234 break;
235
236 case PIPE_VIDEO_FORMAT_MPEG4:
237 assert(buf->size >= sizeof(VAPictureParameterBufferMPEG4) && buf->num_elements == 1);
238 mpeg4 = buf->data;
239
240 context->mpeg4.pps = *mpeg4;
241
242 /* vop_width */
243 /* vop_height */
244 /* forward_reference_picture */
245 /* backward_reference_picture */
246 context->desc.mpeg4.short_video_header =
247 mpeg4->vol_fields.bits.short_video_header;
248 /* chroma_format */
249 context->desc.mpeg4.interlaced = mpeg4->vol_fields.bits.interlaced;
250 /* obmc_disable */
251 /* sprite_enable */
252 /* sprite_warping_accuracy */
253 context->desc.mpeg4.quant_type = mpeg4->vol_fields.bits.quant_type;
254 context->desc.mpeg4.quarter_sample = mpeg4->vol_fields.bits.quarter_sample;
255 /* data_partitioned */
256 /* reversible_vlc */
257 context->desc.mpeg4.resync_marker_disable =
258 mpeg4->vol_fields.bits.resync_marker_disable;
259 /* no_of_sprite_warping_points */
260 /* sprite_trajectory_du */
261 /* sprite_trajectory_dv */
262 /* quant_precision */
263 context->desc.mpeg4.vop_coding_type = mpeg4->vop_fields.bits.vop_coding_type;
264 /* backward_reference_vop_coding_type */
265 /* vop_rounding_type */
266 /* intra_dc_vlc_thr */
267 context->desc.mpeg4.top_field_first =
268 mpeg4->vop_fields.bits.top_field_first;
269 context->desc.mpeg4.alternate_vertical_scan_flag =
270 mpeg4->vop_fields.bits.alternate_vertical_scan_flag;
271 context->desc.mpeg4.vop_fcode_forward = mpeg4->vop_fcode_forward;
272 context->desc.mpeg4.vop_fcode_backward = mpeg4->vop_fcode_backward;
273 context->desc.mpeg4.vop_time_increment_resolution =
274 mpeg4->vop_time_increment_resolution;
275 /* num_gobs_in_vop */
276 /* num_macroblocks_in_gob */
277 context->desc.mpeg4.trb[0] = mpeg4->TRB;
278 context->desc.mpeg4.trb[1] = mpeg4->TRB;
279 context->desc.mpeg4.trd[0] = mpeg4->TRD;
280 context->desc.mpeg4.trd[1] = mpeg4->TRD;
281
282 /* default [non-]intra quant matrix because mpv does not set these
283 matrices */
284 if (!context->desc.mpeg4.intra_matrix)
285 context->desc.mpeg4.intra_matrix = default_intra_quant_matrix;
286 if (!context->desc.mpeg4.non_intra_matrix)
287 context->desc.mpeg4.non_intra_matrix = default_non_intra_quant_matrix;
288
289 surf_forward = handle_table_get(drv->htab, mpeg4->forward_reference_picture);
290 if (surf_forward)
291 context->desc.mpeg4.ref[0] = surf_forward->buffer;
292 surf_backward = handle_table_get(drv->htab, mpeg4->backward_reference_picture);
293 if (surf_backward)
294 context->desc.mpeg4.ref[1] = surf_backward->buffer;
295
296 context->mpeg4.vti_bits = 0;
297 for (i = context->desc.mpeg4.vop_time_increment_resolution; i > 0; i /= 2)
298 ++context->mpeg4.vti_bits;
299
300 break;
301
302 case PIPE_VIDEO_FORMAT_HEVC:
303 assert(buf->size >= sizeof(VAPictureParameterBufferHEVC) && buf->num_elements == 1);
304 hevc = buf->data;
305 context->desc.h265.pps->sps->chroma_format_idc = hevc->pic_fields.bits.chroma_format_idc;
306 context->desc.h265.pps->sps->separate_colour_plane_flag =
307 hevc->pic_fields.bits.separate_colour_plane_flag;
308 context->desc.h265.pps->sps->pic_width_in_luma_samples = hevc->pic_width_in_luma_samples;
309 context->desc.h265.pps->sps->pic_height_in_luma_samples = hevc->pic_height_in_luma_samples;
310 context->desc.h265.pps->sps->bit_depth_luma_minus8 = hevc->bit_depth_luma_minus8;
311 context->desc.h265.pps->sps->bit_depth_chroma_minus8 = hevc->bit_depth_chroma_minus8;
312 context->desc.h265.pps->sps->log2_max_pic_order_cnt_lsb_minus4 =
313 hevc->log2_max_pic_order_cnt_lsb_minus4;
314 context->desc.h265.pps->sps->sps_max_dec_pic_buffering_minus1 =
315 hevc->sps_max_dec_pic_buffering_minus1;
316 context->desc.h265.pps->sps->log2_min_luma_coding_block_size_minus3 =
317 hevc->log2_min_luma_coding_block_size_minus3;
318 context->desc.h265.pps->sps->log2_diff_max_min_luma_coding_block_size =
319 hevc->log2_diff_max_min_luma_coding_block_size;
320 context->desc.h265.pps->sps->log2_min_transform_block_size_minus2 =
321 hevc->log2_min_transform_block_size_minus2;
322 context->desc.h265.pps->sps->log2_diff_max_min_transform_block_size =
323 hevc->log2_diff_max_min_transform_block_size;
324 context->desc.h265.pps->sps->max_transform_hierarchy_depth_inter =
325 hevc->max_transform_hierarchy_depth_inter;
326 context->desc.h265.pps->sps->max_transform_hierarchy_depth_intra =
327 hevc->max_transform_hierarchy_depth_intra;
328 context->desc.h265.pps->sps->scaling_list_enabled_flag =
329 hevc->pic_fields.bits.scaling_list_enabled_flag;
330 context->desc.h265.pps->sps->amp_enabled_flag = hevc->pic_fields.bits.amp_enabled_flag;
331 context->desc.h265.pps->sps->sample_adaptive_offset_enabled_flag =
332 hevc->slice_parsing_fields.bits.sample_adaptive_offset_enabled_flag;
333 context->desc.h265.pps->sps->pcm_enabled_flag = hevc->pic_fields.bits.pcm_enabled_flag;
334 if (hevc->pic_fields.bits.pcm_enabled_flag == 1) {
335 context->desc.h265.pps->sps->pcm_sample_bit_depth_luma_minus1 =
336 hevc->pcm_sample_bit_depth_luma_minus1;
337 context->desc.h265.pps->sps->pcm_sample_bit_depth_chroma_minus1 =
338 hevc->pcm_sample_bit_depth_chroma_minus1;
339 context->desc.h265.pps->sps->log2_min_pcm_luma_coding_block_size_minus3 =
340 hevc->log2_min_pcm_luma_coding_block_size_minus3;
341 context->desc.h265.pps->sps->log2_diff_max_min_pcm_luma_coding_block_size =
342 hevc->log2_diff_max_min_pcm_luma_coding_block_size;
343 context->desc.h265.pps->sps->pcm_loop_filter_disabled_flag =
344 hevc->pic_fields.bits.pcm_loop_filter_disabled_flag;
345 }
346 context->desc.h265.pps->sps->num_short_term_ref_pic_sets = hevc->num_short_term_ref_pic_sets;
347 context->desc.h265.pps->sps->long_term_ref_pics_present_flag =
348 hevc->slice_parsing_fields.bits.long_term_ref_pics_present_flag;
349 context->desc.h265.pps->sps->num_long_term_ref_pics_sps = hevc->num_long_term_ref_pic_sps;
350 context->desc.h265.pps->sps->sps_temporal_mvp_enabled_flag =
351 hevc->slice_parsing_fields.bits.sps_temporal_mvp_enabled_flag;
352 context->desc.h265.pps->sps->strong_intra_smoothing_enabled_flag =
353 hevc->pic_fields.bits.strong_intra_smoothing_enabled_flag;
354
355 context->desc.h265.pps->dependent_slice_segments_enabled_flag =
356 hevc->slice_parsing_fields.bits.dependent_slice_segments_enabled_flag;
357 context->desc.h265.pps->output_flag_present_flag =
358 hevc->slice_parsing_fields.bits.output_flag_present_flag;
359 context->desc.h265.pps->num_extra_slice_header_bits = hevc->num_extra_slice_header_bits;
360 context->desc.h265.pps->sign_data_hiding_enabled_flag =
361 hevc->pic_fields.bits.sign_data_hiding_enabled_flag;
362 context->desc.h265.pps->cabac_init_present_flag =
363 hevc->slice_parsing_fields.bits.cabac_init_present_flag;
364 context->desc.h265.pps->num_ref_idx_l0_default_active_minus1 =
365 hevc->num_ref_idx_l0_default_active_minus1;
366 context->desc.h265.pps->num_ref_idx_l1_default_active_minus1 =
367 hevc->num_ref_idx_l1_default_active_minus1;
368 context->desc.h265.pps->init_qp_minus26 = hevc->init_qp_minus26;
369 context->desc.h265.pps->constrained_intra_pred_flag =
370 hevc->pic_fields.bits.constrained_intra_pred_flag;
371 context->desc.h265.pps->transform_skip_enabled_flag =
372 hevc->pic_fields.bits.transform_skip_enabled_flag;
373 context->desc.h265.pps->cu_qp_delta_enabled_flag =
374 hevc->pic_fields.bits.cu_qp_delta_enabled_flag;
375 context->desc.h265.pps->diff_cu_qp_delta_depth = hevc->diff_cu_qp_delta_depth;
376 context->desc.h265.pps->pps_cb_qp_offset = hevc->pps_cb_qp_offset;
377 context->desc.h265.pps->pps_cr_qp_offset = hevc->pps_cr_qp_offset;
378 context->desc.h265.pps->pps_slice_chroma_qp_offsets_present_flag =
379 hevc->slice_parsing_fields.bits.pps_slice_chroma_qp_offsets_present_flag;
380 context->desc.h265.pps->weighted_pred_flag = hevc->pic_fields.bits.weighted_pred_flag;
381 context->desc.h265.pps->weighted_bipred_flag = hevc->pic_fields.bits.weighted_bipred_flag;
382 context->desc.h265.pps->transquant_bypass_enabled_flag =
383 hevc->pic_fields.bits.transquant_bypass_enabled_flag;
384 context->desc.h265.pps->tiles_enabled_flag = hevc->pic_fields.bits.tiles_enabled_flag;
385 context->desc.h265.pps->entropy_coding_sync_enabled_flag =
386 hevc->pic_fields.bits.entropy_coding_sync_enabled_flag;
387 if (hevc->pic_fields.bits.tiles_enabled_flag == 1) {
388 context->desc.h265.pps->num_tile_columns_minus1 = hevc->num_tile_columns_minus1;
389 context->desc.h265.pps->num_tile_rows_minus1 = hevc->num_tile_rows_minus1;
390 for (i = 0 ; i < 19 ; i++)
391 context->desc.h265.pps->column_width_minus1[i] = hevc->column_width_minus1[i];
392 for (i = 0 ; i < 21 ; i++)
393 context->desc.h265.pps->row_height_minus1[i] = hevc->row_height_minus1[i];
394 context->desc.h265.pps->loop_filter_across_tiles_enabled_flag =
395 hevc->pic_fields.bits.loop_filter_across_tiles_enabled_flag;
396 }
397 context->desc.h265.pps->pps_loop_filter_across_slices_enabled_flag =
398 hevc->pic_fields.bits.pps_loop_filter_across_slices_enabled_flag;
399 context->desc.h265.pps->deblocking_filter_override_enabled_flag =
400 hevc->slice_parsing_fields.bits.deblocking_filter_override_enabled_flag;
401 context->desc.h265.pps->pps_deblocking_filter_disabled_flag =
402 hevc->slice_parsing_fields.bits.pps_disable_deblocking_filter_flag;
403 context->desc.h265.pps->pps_beta_offset_div2 = hevc->pps_beta_offset_div2;
404 context->desc.h265.pps->pps_tc_offset_div2 = hevc->pps_tc_offset_div2;
405 context->desc.h265.pps->lists_modification_present_flag =
406 hevc->slice_parsing_fields.bits.lists_modification_present_flag;
407 context->desc.h265.pps->log2_parallel_merge_level_minus2 =
408 hevc->log2_parallel_merge_level_minus2;
409 context->desc.h265.pps->slice_segment_header_extension_present_flag =
410 hevc->slice_parsing_fields.bits.slice_segment_header_extension_present_flag;
411
412 context->desc.h265.IDRPicFlag = hevc->slice_parsing_fields.bits.IdrPicFlag;
413 context->desc.h265.RAPPicFlag = hevc->slice_parsing_fields.bits.RapPicFlag;
414
415 context->desc.h265.CurrPicOrderCntVal = hevc->CurrPic.pic_order_cnt;
416
417 for (i = 0 ; i < 8 ; i++) {
418 context->desc.h265.RefPicSetStCurrBefore[i] = 0xFF;
419 context->desc.h265.RefPicSetStCurrAfter[i] = 0xFF;
420 context->desc.h265.RefPicSetLtCurr[i] = 0xFF;
421 }
422 context->desc.h265.NumPocStCurrBefore = 0;
423 context->desc.h265.NumPocStCurrAfter = 0;
424 context->desc.h265.NumPocLtCurr = 0;
425 unsigned int iBefore = 0;
426 unsigned int iAfter = 0;
427 unsigned int iCurr = 0;
428 for (i = 0 ; i < 15 ; i++) {
429 context->desc.h265.PicOrderCntVal[i] = hevc->ReferenceFrames[i].pic_order_cnt;
430
431 unsigned int index = hevc->ReferenceFrames[i].picture_id & 0x7F;
432
433 if (index == 0x7F)
434 continue;
435
436 getReferenceFrame(drv, hevc->ReferenceFrames[i].picture_id, &context->desc.h265.ref[i]);
437
438 if ((hevc->ReferenceFrames[i].flags & VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE) && (iBefore < 8)) {
439 context->desc.h265.RefPicSetStCurrBefore[iBefore++] = i;
440 context->desc.h265.NumPocStCurrBefore++;
441 }
442 if ((hevc->ReferenceFrames[i].flags & VA_PICTURE_HEVC_RPS_ST_CURR_AFTER) && (iAfter < 8)) {
443 context->desc.h265.RefPicSetStCurrAfter[iAfter++] = i;
444 context->desc.h265.NumPocStCurrAfter++;
445 }
446 if ((hevc->ReferenceFrames[i].flags & VA_PICTURE_HEVC_RPS_LT_CURR) && (iCurr < 8)) {
447 context->desc.h265.RefPicSetLtCurr[iCurr++] = i;
448 context->desc.h265.NumPocLtCurr++;
449 }
450 }
451 break;
452
453 default:
454 break;
455 }
456 }
457
458 static void
459 handleIQMatrixBuffer(vlVaContext *context, vlVaBuffer *buf)
460 {
461 VAIQMatrixBufferMPEG2 *mpeg2;
462 VAIQMatrixBufferH264 *h264;
463 VAIQMatrixBufferMPEG4 *mpeg4;
464 VAIQMatrixBufferHEVC *h265;
465
466 switch (u_reduce_video_profile(context->decoder->profile)) {
467 case PIPE_VIDEO_FORMAT_MPEG12:
468 assert(buf->size >= sizeof(VAIQMatrixBufferMPEG2) && buf->num_elements == 1);
469 mpeg2 = buf->data;
470 if (mpeg2->load_intra_quantiser_matrix)
471 context->desc.mpeg12.intra_matrix = mpeg2->intra_quantiser_matrix;
472 else
473 context->desc.mpeg12.intra_matrix = NULL;
474
475 if (mpeg2->load_non_intra_quantiser_matrix)
476 context->desc.mpeg12.non_intra_matrix = mpeg2->non_intra_quantiser_matrix;
477 else
478 context->desc.mpeg12.non_intra_matrix = NULL;
479 break;
480
481 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
482 assert(buf->size >= sizeof(VAIQMatrixBufferH264) && buf->num_elements == 1);
483 h264 = buf->data;
484 memcpy(&context->desc.h264.pps->ScalingList4x4, h264->ScalingList4x4, 6 * 16);
485 memcpy(&context->desc.h264.pps->ScalingList8x8, h264->ScalingList8x8, 2 * 64);
486 break;
487
488 case PIPE_VIDEO_FORMAT_HEVC:
489 assert(buf->size >= sizeof(VAIQMatrixBufferH264) && buf->num_elements == 1);
490 h265 = buf->data;
491 memcpy(&context->desc.h265.pps->sps->ScalingList4x4, h265->ScalingList4x4, 6 * 16);
492 memcpy(&context->desc.h265.pps->sps->ScalingList8x8, h265->ScalingList8x8, 6 * 64);
493 memcpy(&context->desc.h265.pps->sps->ScalingList16x16, h265->ScalingList16x16, 6 * 64);
494 memcpy(&context->desc.h265.pps->sps->ScalingList32x32, h265->ScalingList32x32, 2 * 64);
495 memcpy(&context->desc.h265.pps->sps->ScalingListDCCoeff16x16, h265->ScalingListDC16x16, 6);
496 memcpy(&context->desc.h265.pps->sps->ScalingListDCCoeff32x32, h265->ScalingListDC32x32, 2);
497 break;
498
499 case PIPE_VIDEO_FORMAT_MPEG4:
500 assert(buf->size >= sizeof(VAIQMatrixBufferMPEG4) && buf->num_elements == 1);
501 mpeg4 = buf->data;
502
503 if (mpeg4->load_intra_quant_mat)
504 context->desc.mpeg4.intra_matrix = mpeg4->intra_quant_mat;
505 else
506 context->desc.mpeg4.intra_matrix = NULL;
507
508 if (mpeg4->load_non_intra_quant_mat)
509 context->desc.mpeg4.non_intra_matrix = mpeg4->non_intra_quant_mat;
510 else
511 context->desc.mpeg4.non_intra_matrix = NULL;
512 break;
513
514 default:
515 break;
516 }
517 }
518
519 static void
520 handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf)
521 {
522 VASliceParameterBufferH264 *h264;
523 VASliceParameterBufferMPEG4 *mpeg4;
524 VASliceParameterBufferHEVC *h265;
525
526 switch (u_reduce_video_profile(context->decoder->profile)) {
527 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
528 assert(buf->size >= sizeof(VASliceParameterBufferH264) && buf->num_elements == 1);
529 h264 = buf->data;
530 context->desc.h264.num_ref_idx_l0_active_minus1 =
531 h264->num_ref_idx_l0_active_minus1;
532 context->desc.h264.num_ref_idx_l1_active_minus1 =
533 h264->num_ref_idx_l1_active_minus1;
534 break;
535 case PIPE_VIDEO_FORMAT_MPEG4:
536 assert(buf->size >= sizeof(VASliceParameterBufferMPEG4) && buf->num_elements == 1);
537 mpeg4 = buf->data;
538
539 context->mpeg4.quant_scale = mpeg4->quant_scale;
540 break;
541 case PIPE_VIDEO_FORMAT_HEVC:
542 assert(buf->size >= sizeof(VASliceParameterBufferHEVC) && buf->num_elements == 1);
543 h265 = buf->data;
544 for (int i = 0 ; i < 2 ; i++) {
545 for (int j = 0 ; j < 15 ; j++)
546 context->desc.h265.RefPicList[i][j] = h265->RefPicList[i][j];
547 }
548 context->desc.h265.UseRefPicList = true;
549 break;
550 default:
551 break;
552 }
553 }
554
555 struct bit_stream
556 {
557 uint8_t *data;
558 unsigned int length; /* bits */
559 unsigned int pos; /* bits */
560 };
561
562 static inline void
563 write_bit(struct bit_stream *writer, unsigned int bit)
564 {
565 assert(writer->length > (writer)->pos);
566 writer->data[writer->pos>>3] |= ((bit & 1)<<(7 - (writer->pos & 7)));
567 writer->pos++;
568 }
569
570 static inline void
571 write_bits(struct bit_stream *writer, unsigned int bits, unsigned int len)
572 {
573 int i;
574 assert(len <= sizeof(bits)*8);
575 for (i = len - 1; i >= 0; i--)
576 write_bit(writer, bits>>i);
577 }
578
579 static void
580 vlVaDecoderFixMPEG4Startcode(vlVaContext *context)
581 {
582 uint8_t vop[] = { 0x00, 0x00, 0x01, 0xb6, 0x00, 0x00, 0x00, 0x00, 0x00 };
583 struct bit_stream bs_vop = {vop, sizeof(vop)*8, 32};
584 unsigned int vop_time_inc;
585 int mod_time;
586 unsigned int vop_size;
587 unsigned int vop_coding_type = context->desc.mpeg4.vop_coding_type;
588
589 context->mpeg4.start_code_size = 0;
590 memset(context->mpeg4.start_code, 0, sizeof(context->mpeg4.start_code));
591 if (vop_coding_type+1 == PIPE_MPEG12_PICTURE_CODING_TYPE_I) {
592 unsigned int vop_time = context->mpeg4.frame_num/
593 context->desc.mpeg4.vop_time_increment_resolution;
594 unsigned int vop_hour = vop_time / 3600;
595 unsigned int vop_minute = (vop_time / 60) % 60;
596 unsigned int vop_second = vop_time % 60;
597 uint8_t group_of_vop[] = { 0x00, 0x00, 0x01, 0xb3, 0x00, 0x00, 0x00 };
598 struct bit_stream bs_gvop = {group_of_vop, sizeof(group_of_vop)*8, 32};
599
600 write_bits(&bs_gvop, vop_hour, 5);
601 write_bits(&bs_gvop, vop_minute, 6);
602 write_bit(&bs_gvop, 1); /* marker_bit */
603 write_bits(&bs_gvop, vop_second, 6);
604 write_bit(&bs_gvop, 0); /* closed_gov */ /* TODO replace magic */
605 write_bit(&bs_gvop, 0); /* broken_link */
606 write_bit(&bs_gvop, 0); /* padding */
607 write_bits(&bs_gvop, 7, 3); /* padding */
608
609 memcpy(context->mpeg4.start_code, group_of_vop, sizeof(group_of_vop));
610 context->mpeg4.start_code_size += sizeof(group_of_vop);
611 }
612
613 write_bits(&bs_vop, vop_coding_type, 2);
614 mod_time = context->mpeg4.frame_num %
615 context->desc.mpeg4.vop_time_increment_resolution == 0 &&
616 vop_coding_type+1 != PIPE_MPEG12_PICTURE_CODING_TYPE_I;
617 while (mod_time--)
618 write_bit(&bs_vop, 1); /* modulo_time_base */
619 write_bit(&bs_vop, 0); /* modulo_time_base */
620
621 write_bit(&bs_vop, 1); /* marker_bit */
622 vop_time_inc = context->mpeg4.frame_num %
623 context->desc.mpeg4.vop_time_increment_resolution;
624 write_bits(&bs_vop, vop_time_inc, context->mpeg4.vti_bits);
625 write_bit(&bs_vop, 1); /* marker_bit */
626 write_bit(&bs_vop, 1); /* vop_coded */
627 if (vop_coding_type+1 == PIPE_MPEG12_PICTURE_CODING_TYPE_P)
628 write_bit(&bs_vop, context->mpeg4.pps.vop_fields.bits.vop_rounding_type);
629 write_bits(&bs_vop, context->mpeg4.pps.vop_fields.bits.intra_dc_vlc_thr, 3);
630 if (context->mpeg4.pps.vol_fields.bits.interlaced) {
631 write_bit(&bs_vop, context->mpeg4.pps.vop_fields.bits.top_field_first);
632 write_bit(&bs_vop, context->mpeg4.pps.vop_fields.bits.alternate_vertical_scan_flag);
633 }
634
635 write_bits(&bs_vop, context->mpeg4.quant_scale, context->mpeg4.pps.quant_precision);
636 if (vop_coding_type+1 != PIPE_MPEG12_PICTURE_CODING_TYPE_I)
637 write_bits(&bs_vop, context->desc.mpeg4.vop_fcode_forward, 3);
638 if (vop_coding_type+1 == PIPE_MPEG12_PICTURE_CODING_TYPE_B)
639 write_bits(&bs_vop, context->desc.mpeg4.vop_fcode_backward, 3);
640
641 vop_size = bs_vop.pos/8;
642 memcpy(context->mpeg4.start_code + context->mpeg4.start_code_size, vop, vop_size);
643 context->mpeg4.start_code_size += vop_size;
644 }
645
646 static unsigned int
647 bufHasStartcode(vlVaBuffer *buf, unsigned int code, unsigned int bits)
648 {
649 struct vl_vlc vlc = {0};
650 int i;
651
652 /* search the first 64 bytes for a startcode */
653 vl_vlc_init(&vlc, 1, (const void * const*)&buf->data, &buf->size);
654 for (i = 0; i < 64 && vl_vlc_bits_left(&vlc) >= bits; ++i) {
655 if (vl_vlc_peekbits(&vlc, bits) == code)
656 return 1;
657 vl_vlc_eatbits(&vlc, 8);
658 vl_vlc_fillbits(&vlc);
659 }
660
661 return 0;
662 }
663
664 static void
665 handleVASliceDataBufferType(vlVaContext *context, vlVaBuffer *buf)
666 {
667 enum pipe_video_format format;
668 unsigned num_buffers = 0;
669 void * const *buffers[2];
670 unsigned sizes[2];
671 static const uint8_t start_code_h264[] = { 0x00, 0x00, 0x01 };
672 static const uint8_t start_code_h265[] = { 0x00, 0x00, 0x01 };
673 static const uint8_t start_code_vc1[] = { 0x00, 0x00, 0x01, 0x0d };
674
675 format = u_reduce_video_profile(context->decoder->profile);
676 switch (format) {
677 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
678 if (bufHasStartcode(buf, 0x000001, 24))
679 break;
680
681 buffers[num_buffers] = (void *const)&start_code_h264;
682 sizes[num_buffers++] = sizeof(start_code_h264);
683 break;
684 case PIPE_VIDEO_FORMAT_HEVC:
685 if (bufHasStartcode(buf, 0x000001, 24))
686 break;
687
688 buffers[num_buffers] = (void *const)&start_code_h265;
689 sizes[num_buffers++] = sizeof(start_code_h265);
690 break;
691 case PIPE_VIDEO_FORMAT_VC1:
692 if (bufHasStartcode(buf, 0x0000010d, 32) ||
693 bufHasStartcode(buf, 0x0000010c, 32) ||
694 bufHasStartcode(buf, 0x0000010b, 32))
695 break;
696
697 buffers[num_buffers] = (void *const)&start_code_vc1;
698 sizes[num_buffers++] = sizeof(start_code_vc1);
699 break;
700 case PIPE_VIDEO_FORMAT_MPEG4:
701 if (bufHasStartcode(buf, 0x000001, 24))
702 break;
703
704 vlVaDecoderFixMPEG4Startcode(context);
705 buffers[num_buffers] = (void *)context->mpeg4.start_code;
706 sizes[num_buffers++] = context->mpeg4.start_code_size;
707 default:
708 break;
709 }
710
711 buffers[num_buffers] = buf->data;
712 sizes[num_buffers] = buf->size;
713 ++num_buffers;
714 context->decoder->decode_bitstream(context->decoder, context->target, &context->desc.base,
715 num_buffers, (const void * const*)buffers, sizes);
716 }
717
718 static VAStatus
719 handleVAProcPipelineParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
720 {
721 struct u_rect src_rect;
722 struct u_rect dst_rect;
723 struct u_rect *dirty_area;
724 vlVaSurface *src_surface;
725 VAProcPipelineParameterBuffer *pipeline_param;
726 struct pipe_surface **surfaces;
727 struct pipe_screen *screen;
728 struct pipe_surface *psurf;
729
730 if (!drv || !context)
731 return VA_STATUS_ERROR_INVALID_CONTEXT;
732
733 if (!buf || !buf->data)
734 return VA_STATUS_ERROR_INVALID_BUFFER;
735
736 if (!context->target)
737 return VA_STATUS_ERROR_INVALID_SURFACE;
738
739 pipeline_param = (VAProcPipelineParameterBuffer *)buf->data;
740
741 src_surface = handle_table_get(drv->htab, pipeline_param->surface);
742 if (!src_surface || !src_surface->buffer)
743 return VA_STATUS_ERROR_INVALID_SURFACE;
744
745 surfaces = context->target->get_surfaces(context->target);
746
747 if (!surfaces || !surfaces[0])
748 return VA_STATUS_ERROR_INVALID_SURFACE;
749
750 screen = drv->pipe->screen;
751
752 psurf = surfaces[0];
753
754 src_rect.x0 = pipeline_param->surface_region->x;
755 src_rect.y0 = pipeline_param->surface_region->y;
756 src_rect.x1 = pipeline_param->surface_region->x + pipeline_param->surface_region->width;
757 src_rect.y1 = pipeline_param->surface_region->y + pipeline_param->surface_region->height;
758
759 dst_rect.x0 = pipeline_param->output_region->x;
760 dst_rect.y0 = pipeline_param->output_region->y;
761 dst_rect.x1 = pipeline_param->output_region->x + pipeline_param->output_region->width;
762 dst_rect.y1 = pipeline_param->output_region->y + pipeline_param->output_region->height;
763
764 dirty_area = vl_screen_get_dirty_area(drv->vscreen);
765
766 vl_compositor_clear_layers(&drv->cstate);
767 vl_compositor_set_buffer_layer(&drv->cstate, &drv->compositor, 0, src_surface->buffer, &src_rect, NULL, VL_COMPOSITOR_WEAVE);
768 vl_compositor_set_layer_dst_area(&drv->cstate, 0, &dst_rect);
769 vl_compositor_render(&drv->cstate, &drv->compositor, psurf, dirty_area, true);
770
771 screen->fence_reference(screen, &src_surface->fence, NULL);
772 drv->pipe->flush(drv->pipe, &src_surface->fence, 0);
773
774 return VA_STATUS_SUCCESS;
775 }
776
777 VAStatus
778 vlVaRenderPicture(VADriverContextP ctx, VAContextID context_id, VABufferID *buffers, int num_buffers)
779 {
780 vlVaDriver *drv;
781 vlVaContext *context;
782 VAStatus vaStatus = VA_STATUS_SUCCESS;
783
784 unsigned i;
785
786 if (!ctx)
787 return VA_STATUS_ERROR_INVALID_CONTEXT;
788
789 drv = VL_VA_DRIVER(ctx);
790 if (!drv)
791 return VA_STATUS_ERROR_INVALID_CONTEXT;
792
793 context = handle_table_get(drv->htab, context_id);
794 if (!context)
795 return VA_STATUS_ERROR_INVALID_CONTEXT;
796
797 for (i = 0; i < num_buffers; ++i) {
798 vlVaBuffer *buf = handle_table_get(drv->htab, buffers[i]);
799 if (!buf)
800 return VA_STATUS_ERROR_INVALID_BUFFER;
801
802 switch (buf->type) {
803 case VAPictureParameterBufferType:
804 handlePictureParameterBuffer(drv, context, buf);
805 break;
806
807 case VAIQMatrixBufferType:
808 handleIQMatrixBuffer(context, buf);
809 break;
810
811 case VASliceParameterBufferType:
812 handleSliceParameterBuffer(context, buf);
813 break;
814
815 case VASliceDataBufferType:
816 handleVASliceDataBufferType(context, buf);
817 break;
818 case VAProcPipelineParameterBufferType:
819 vaStatus = handleVAProcPipelineParameterBufferType(drv, context, buf);
820 break;
821
822 default:
823 break;
824 }
825 }
826
827 return vaStatus;
828 }
829
830 VAStatus
831 vlVaEndPicture(VADriverContextP ctx, VAContextID context_id)
832 {
833 vlVaDriver *drv;
834 vlVaContext *context;
835
836 if (!ctx)
837 return VA_STATUS_ERROR_INVALID_CONTEXT;
838
839 drv = VL_VA_DRIVER(ctx);
840 if (!drv)
841 return VA_STATUS_ERROR_INVALID_CONTEXT;
842
843 context = handle_table_get(drv->htab, context_id);
844 if (!context)
845 return VA_STATUS_ERROR_INVALID_CONTEXT;
846
847 if (!context->decoder) {
848 /* VPP */
849 return VA_STATUS_SUCCESS;
850 }
851
852 context->mpeg4.frame_num++;
853 context->decoder->end_frame(context->decoder, context->target, &context->desc.base);
854
855 return VA_STATUS_SUCCESS;
856 }