st/va: implement poc type
[mesa.git] / src / gallium / state_trackers / va / picture.c
1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen & Orasanu Lucian.
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include "pipe/p_video_codec.h"
30
31 #include "util/u_handle_table.h"
32 #include "util/u_video.h"
33
34 #include "vl/vl_vlc.h"
35 #include "vl/vl_winsys.h"
36
37 #include "va_private.h"
38
39 VAStatus
40 vlVaBeginPicture(VADriverContextP ctx, VAContextID context_id, VASurfaceID render_target)
41 {
42 vlVaDriver *drv;
43 vlVaContext *context;
44 vlVaSurface *surf;
45
46 if (!ctx)
47 return VA_STATUS_ERROR_INVALID_CONTEXT;
48
49 drv = VL_VA_DRIVER(ctx);
50 if (!drv)
51 return VA_STATUS_ERROR_INVALID_CONTEXT;
52
53 mtx_lock(&drv->mutex);
54 context = handle_table_get(drv->htab, context_id);
55 if (!context) {
56 mtx_unlock(&drv->mutex);
57 return VA_STATUS_ERROR_INVALID_CONTEXT;
58 }
59
60 surf = handle_table_get(drv->htab, render_target);
61 mtx_unlock(&drv->mutex);
62 if (!surf || !surf->buffer)
63 return VA_STATUS_ERROR_INVALID_SURFACE;
64
65 context->target_id = render_target;
66 surf->ctx = context_id;
67 context->target = surf->buffer;
68 context->mjpeg.sampling_factor = 0;
69
70 if (!context->decoder) {
71
72 /* VPP */
73 if (context->templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN &&
74 context->target->buffer_format != PIPE_FORMAT_B8G8R8A8_UNORM &&
75 context->target->buffer_format != PIPE_FORMAT_R8G8B8A8_UNORM &&
76 context->target->buffer_format != PIPE_FORMAT_B8G8R8X8_UNORM &&
77 context->target->buffer_format != PIPE_FORMAT_R8G8B8X8_UNORM &&
78 context->target->buffer_format != PIPE_FORMAT_NV12 &&
79 context->target->buffer_format != PIPE_FORMAT_P016)
80 return VA_STATUS_ERROR_UNIMPLEMENTED;
81
82 return VA_STATUS_SUCCESS;
83 }
84
85 if (context->decoder->entrypoint != PIPE_VIDEO_ENTRYPOINT_ENCODE)
86 context->needs_begin_frame = true;
87
88 return VA_STATUS_SUCCESS;
89 }
90
91 void
92 vlVaGetReferenceFrame(vlVaDriver *drv, VASurfaceID surface_id,
93 struct pipe_video_buffer **ref_frame)
94 {
95 vlVaSurface *surf = handle_table_get(drv->htab, surface_id);
96 if (surf)
97 *ref_frame = surf->buffer;
98 else
99 *ref_frame = NULL;
100 }
101
102 static void
103 getEncParamPreset(vlVaContext *context)
104 {
105 //motion estimation preset
106 context->desc.h264enc.motion_est.motion_est_quarter_pixel = 0x00000001;
107 context->desc.h264enc.motion_est.lsmvert = 0x00000002;
108 context->desc.h264enc.motion_est.enc_disable_sub_mode = 0x00000078;
109 context->desc.h264enc.motion_est.enc_en_ime_overw_dis_subm = 0x00000001;
110 context->desc.h264enc.motion_est.enc_ime_overw_dis_subm_no = 0x00000001;
111 context->desc.h264enc.motion_est.enc_ime2_search_range_x = 0x00000004;
112 context->desc.h264enc.motion_est.enc_ime2_search_range_y = 0x00000004;
113
114 //pic control preset
115 context->desc.h264enc.pic_ctrl.enc_cabac_enable = 0x00000001;
116 context->desc.h264enc.pic_ctrl.enc_constraint_set_flags = 0x00000040;
117
118 //rate control
119 context->desc.h264enc.rate_ctrl.vbv_buffer_size = 20000000;
120 context->desc.h264enc.rate_ctrl.vbv_buf_lv = 48;
121 context->desc.h264enc.rate_ctrl.fill_data_enable = 1;
122 context->desc.h264enc.rate_ctrl.enforce_hrd = 1;
123 context->desc.h264enc.enable_vui = false;
124 if (context->desc.h264enc.rate_ctrl.frame_rate_num == 0 ||
125 context->desc.h264enc.rate_ctrl.frame_rate_den == 0) {
126 context->desc.h264enc.rate_ctrl.frame_rate_num = 30;
127 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
128 }
129 context->desc.h264enc.rate_ctrl.target_bits_picture =
130 context->desc.h264enc.rate_ctrl.target_bitrate *
131 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
132 context->desc.h264enc.rate_ctrl.frame_rate_num);
133 context->desc.h264enc.rate_ctrl.peak_bits_picture_integer =
134 context->desc.h264enc.rate_ctrl.peak_bitrate *
135 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
136 context->desc.h264enc.rate_ctrl.frame_rate_num);
137
138 context->desc.h264enc.rate_ctrl.peak_bits_picture_fraction = 0;
139 context->desc.h264enc.ref_pic_mode = 0x00000201;
140 }
141
142 static VAStatus
143 handlePictureParameterBuffer(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
144 {
145 VAStatus vaStatus = VA_STATUS_SUCCESS;
146
147 switch (u_reduce_video_profile(context->templat.profile)) {
148 case PIPE_VIDEO_FORMAT_MPEG12:
149 vlVaHandlePictureParameterBufferMPEG12(drv, context, buf);
150 break;
151
152 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
153 vlVaHandlePictureParameterBufferH264(drv, context, buf);
154 break;
155
156 case PIPE_VIDEO_FORMAT_VC1:
157 vlVaHandlePictureParameterBufferVC1(drv, context, buf);
158 break;
159
160 case PIPE_VIDEO_FORMAT_MPEG4:
161 vlVaHandlePictureParameterBufferMPEG4(drv, context, buf);
162 break;
163
164 case PIPE_VIDEO_FORMAT_HEVC:
165 vlVaHandlePictureParameterBufferHEVC(drv, context, buf);
166 break;
167
168 case PIPE_VIDEO_FORMAT_JPEG:
169 vlVaHandlePictureParameterBufferMJPEG(drv, context, buf);
170 break;
171
172 default:
173 break;
174 }
175
176 /* Create the decoder once max_references is known. */
177 if (!context->decoder) {
178 enum pipe_video_format format =
179 u_reduce_video_profile(context->templat.profile);
180
181 if (!context->target)
182 return VA_STATUS_ERROR_INVALID_CONTEXT;
183
184 if (context->templat.max_references == 0 &&
185 format != PIPE_VIDEO_FORMAT_JPEG)
186 return VA_STATUS_ERROR_INVALID_BUFFER;
187
188 if (format == PIPE_VIDEO_FORMAT_MPEG4_AVC)
189 context->templat.level = u_get_h264_level(context->templat.width,
190 context->templat.height, &context->templat.max_references);
191
192 context->decoder = drv->pipe->create_video_codec(drv->pipe,
193 &context->templat);
194
195 if (!context->decoder)
196 return VA_STATUS_ERROR_ALLOCATION_FAILED;
197
198 context->needs_begin_frame = true;
199 }
200
201 return vaStatus;
202 }
203
204 static void
205 handleIQMatrixBuffer(vlVaContext *context, vlVaBuffer *buf)
206 {
207 switch (u_reduce_video_profile(context->templat.profile)) {
208 case PIPE_VIDEO_FORMAT_MPEG12:
209 vlVaHandleIQMatrixBufferMPEG12(context, buf);
210 break;
211
212 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
213 vlVaHandleIQMatrixBufferH264(context, buf);
214 break;
215
216 case PIPE_VIDEO_FORMAT_MPEG4:
217 vlVaHandleIQMatrixBufferMPEG4(context, buf);
218 break;
219
220 case PIPE_VIDEO_FORMAT_HEVC:
221 vlVaHandleIQMatrixBufferHEVC(context, buf);
222 break;
223
224 case PIPE_VIDEO_FORMAT_JPEG:
225 vlVaHandleIQMatrixBufferMJPEG(context, buf);
226 break;
227
228 default:
229 break;
230 }
231 }
232
233 static void
234 handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf)
235 {
236 switch (u_reduce_video_profile(context->templat.profile)) {
237 case PIPE_VIDEO_FORMAT_MPEG12:
238 vlVaHandleSliceParameterBufferMPEG12(context, buf);
239 break;
240
241 case PIPE_VIDEO_FORMAT_VC1:
242 vlVaHandleSliceParameterBufferVC1(context, buf);
243 break;
244
245 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
246 vlVaHandleSliceParameterBufferH264(context, buf);
247 break;
248
249 case PIPE_VIDEO_FORMAT_MPEG4:
250 vlVaHandleSliceParameterBufferMPEG4(context, buf);
251 break;
252
253 case PIPE_VIDEO_FORMAT_HEVC:
254 vlVaHandleSliceParameterBufferHEVC(context, buf);
255 break;
256
257 case PIPE_VIDEO_FORMAT_JPEG:
258 vlVaHandleSliceParameterBufferMJPEG(context, buf);
259 break;
260
261 default:
262 break;
263 }
264 }
265
266 static unsigned int
267 bufHasStartcode(vlVaBuffer *buf, unsigned int code, unsigned int bits)
268 {
269 struct vl_vlc vlc = {0};
270 int i;
271
272 /* search the first 64 bytes for a startcode */
273 vl_vlc_init(&vlc, 1, (const void * const*)&buf->data, &buf->size);
274 for (i = 0; i < 64 && vl_vlc_bits_left(&vlc) >= bits; ++i) {
275 if (vl_vlc_peekbits(&vlc, bits) == code)
276 return 1;
277 vl_vlc_eatbits(&vlc, 8);
278 vl_vlc_fillbits(&vlc);
279 }
280
281 return 0;
282 }
283
284 static void
285 handleVASliceDataBufferType(vlVaContext *context, vlVaBuffer *buf)
286 {
287 enum pipe_video_format format;
288 unsigned num_buffers = 0;
289 void * const *buffers[2];
290 unsigned sizes[2];
291 static const uint8_t start_code_h264[] = { 0x00, 0x00, 0x01 };
292 static const uint8_t start_code_h265[] = { 0x00, 0x00, 0x01 };
293 static const uint8_t start_code_vc1[] = { 0x00, 0x00, 0x01, 0x0d };
294
295 format = u_reduce_video_profile(context->templat.profile);
296 switch (format) {
297 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
298 if (bufHasStartcode(buf, 0x000001, 24))
299 break;
300
301 buffers[num_buffers] = (void *const)&start_code_h264;
302 sizes[num_buffers++] = sizeof(start_code_h264);
303 break;
304 case PIPE_VIDEO_FORMAT_HEVC:
305 if (bufHasStartcode(buf, 0x000001, 24))
306 break;
307
308 buffers[num_buffers] = (void *const)&start_code_h265;
309 sizes[num_buffers++] = sizeof(start_code_h265);
310 break;
311 case PIPE_VIDEO_FORMAT_VC1:
312 if (bufHasStartcode(buf, 0x0000010d, 32) ||
313 bufHasStartcode(buf, 0x0000010c, 32) ||
314 bufHasStartcode(buf, 0x0000010b, 32))
315 break;
316
317 if (context->decoder->profile == PIPE_VIDEO_PROFILE_VC1_ADVANCED) {
318 buffers[num_buffers] = (void *const)&start_code_vc1;
319 sizes[num_buffers++] = sizeof(start_code_vc1);
320 }
321 break;
322 case PIPE_VIDEO_FORMAT_MPEG4:
323 if (bufHasStartcode(buf, 0x000001, 24))
324 break;
325
326 vlVaDecoderFixMPEG4Startcode(context);
327 buffers[num_buffers] = (void *)context->mpeg4.start_code;
328 sizes[num_buffers++] = context->mpeg4.start_code_size;
329 case PIPE_VIDEO_FORMAT_JPEG:
330 /* TODO */
331 break;
332 default:
333 break;
334 }
335
336 buffers[num_buffers] = buf->data;
337 sizes[num_buffers] = buf->size;
338 ++num_buffers;
339
340 if (context->needs_begin_frame) {
341 context->decoder->begin_frame(context->decoder, context->target,
342 &context->desc.base);
343 context->needs_begin_frame = false;
344 }
345 context->decoder->decode_bitstream(context->decoder, context->target, &context->desc.base,
346 num_buffers, (const void * const*)buffers, sizes);
347 }
348
349 static VAStatus
350 handleVAEncMiscParameterTypeRateControl(vlVaContext *context, VAEncMiscParameterBuffer *misc)
351 {
352 VAEncMiscParameterRateControl *rc = (VAEncMiscParameterRateControl *)misc->data;
353 if (context->desc.h264enc.rate_ctrl.rate_ctrl_method ==
354 PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT)
355 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second;
356 else
357 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second * (rc->target_percentage / 100.0);
358 context->desc.h264enc.rate_ctrl.peak_bitrate = rc->bits_per_second;
359 if (context->desc.h264enc.rate_ctrl.target_bitrate < 2000000)
360 context->desc.h264enc.rate_ctrl.vbv_buffer_size = MIN2((context->desc.h264enc.rate_ctrl.target_bitrate * 2.75), 2000000);
361 else
362 context->desc.h264enc.rate_ctrl.vbv_buffer_size = context->desc.h264enc.rate_ctrl.target_bitrate;
363
364 return VA_STATUS_SUCCESS;
365 }
366
367 static VAStatus
368 handleVAEncMiscParameterTypeFrameRate(vlVaContext *context, VAEncMiscParameterBuffer *misc)
369 {
370 VAEncMiscParameterFrameRate *fr = (VAEncMiscParameterFrameRate *)misc->data;
371 if (fr->framerate & 0xffff0000) {
372 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate & 0xffff;
373 context->desc.h264enc.rate_ctrl.frame_rate_den = fr->framerate >> 16 & 0xffff;
374 } else {
375 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate;
376 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
377 }
378 return VA_STATUS_SUCCESS;
379 }
380
381 static VAStatus
382 handleVAEncSequenceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
383 {
384 VAEncSequenceParameterBufferH264 *h264 = (VAEncSequenceParameterBufferH264 *)buf->data;
385 if (!context->decoder) {
386 context->templat.max_references = h264->max_num_ref_frames;
387 context->templat.level = h264->level_idc;
388 context->decoder = drv->pipe->create_video_codec(drv->pipe, &context->templat);
389 if (!context->decoder)
390 return VA_STATUS_ERROR_ALLOCATION_FAILED;
391 }
392
393 context->gop_coeff = ((1024 + h264->intra_idr_period - 1) / h264->intra_idr_period + 1) / 2 * 2;
394 if (context->gop_coeff > VL_VA_ENC_GOP_COEFF)
395 context->gop_coeff = VL_VA_ENC_GOP_COEFF;
396 context->desc.h264enc.gop_size = h264->intra_idr_period * context->gop_coeff;
397 context->desc.h264enc.rate_ctrl.frame_rate_num = h264->time_scale / 2;
398 context->desc.h264enc.rate_ctrl.frame_rate_den = h264->num_units_in_tick;
399 context->desc.h264enc.pic_order_cnt_type = h264->seq_fields.bits.pic_order_cnt_type;
400 return VA_STATUS_SUCCESS;
401 }
402
403 static VAStatus
404 handleVAEncMiscParameterBufferType(vlVaContext *context, vlVaBuffer *buf)
405 {
406 VAStatus vaStatus = VA_STATUS_SUCCESS;
407 VAEncMiscParameterBuffer *misc;
408 misc = buf->data;
409
410 switch (misc->type) {
411 case VAEncMiscParameterTypeRateControl:
412 vaStatus = handleVAEncMiscParameterTypeRateControl(context, misc);
413 break;
414
415 case VAEncMiscParameterTypeFrameRate:
416 vaStatus = handleVAEncMiscParameterTypeFrameRate(context, misc);
417 break;
418
419 default:
420 break;
421 }
422
423 return vaStatus;
424 }
425
426 static VAStatus
427 handleVAEncPictureParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
428 {
429 VAEncPictureParameterBufferH264 *h264;
430 vlVaBuffer *coded_buf;
431
432 h264 = buf->data;
433 context->desc.h264enc.frame_num = h264->frame_num;
434 context->desc.h264enc.not_referenced = false;
435 context->desc.h264enc.is_idr = (h264->pic_fields.bits.idr_pic_flag == 1);
436 context->desc.h264enc.pic_order_cnt = h264->CurrPic.TopFieldOrderCnt;
437 if (context->desc.h264enc.gop_cnt == 0)
438 context->desc.h264enc.i_remain = context->gop_coeff;
439 else if (context->desc.h264enc.frame_num == 1)
440 context->desc.h264enc.i_remain--;
441
442 context->desc.h264enc.p_remain = context->desc.h264enc.gop_size - context->desc.h264enc.gop_cnt - context->desc.h264enc.i_remain;
443
444 coded_buf = handle_table_get(drv->htab, h264->coded_buf);
445 if (!coded_buf->derived_surface.resource)
446 coded_buf->derived_surface.resource = pipe_buffer_create(drv->pipe->screen, PIPE_BIND_VERTEX_BUFFER,
447 PIPE_USAGE_STREAM, coded_buf->size);
448 context->coded_buf = coded_buf;
449
450 util_hash_table_set(context->desc.h264enc.frame_idx,
451 UINT_TO_PTR(h264->CurrPic.picture_id),
452 UINT_TO_PTR(h264->frame_num));
453
454 if (context->desc.h264enc.is_idr)
455 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
456 else
457 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
458
459 context->desc.h264enc.quant_i_frames = h264->pic_init_qp;
460 context->desc.h264enc.quant_b_frames = h264->pic_init_qp;
461 context->desc.h264enc.quant_p_frames = h264->pic_init_qp;
462 context->desc.h264enc.gop_cnt++;
463 if (context->desc.h264enc.gop_cnt == context->desc.h264enc.gop_size)
464 context->desc.h264enc.gop_cnt = 0;
465
466 return VA_STATUS_SUCCESS;
467 }
468
469 static VAStatus
470 handleVAEncSliceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
471 {
472 VAEncSliceParameterBufferH264 *h264;
473
474 h264 = buf->data;
475 context->desc.h264enc.ref_idx_l0 = VA_INVALID_ID;
476 context->desc.h264enc.ref_idx_l1 = VA_INVALID_ID;
477
478 for (int i = 0; i < 32; i++) {
479 if (h264->RefPicList0[i].picture_id != VA_INVALID_ID) {
480 if (context->desc.h264enc.ref_idx_l0 == VA_INVALID_ID)
481 context->desc.h264enc.ref_idx_l0 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
482 UINT_TO_PTR(h264->RefPicList0[i].picture_id)));
483 }
484 if (h264->RefPicList1[i].picture_id != VA_INVALID_ID && h264->slice_type == 1) {
485 if (context->desc.h264enc.ref_idx_l1 == VA_INVALID_ID)
486 context->desc.h264enc.ref_idx_l1 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
487 UINT_TO_PTR(h264->RefPicList1[i].picture_id)));
488 }
489 }
490
491 if (h264->slice_type == 1)
492 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_B;
493 else if (h264->slice_type == 0)
494 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
495 else if (h264->slice_type == 2) {
496 if (context->desc.h264enc.is_idr){
497 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
498 context->desc.h264enc.idr_pic_id++;
499 } else
500 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_I;
501 } else
502 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_SKIP;
503
504 return VA_STATUS_SUCCESS;
505 }
506
507 VAStatus
508 vlVaRenderPicture(VADriverContextP ctx, VAContextID context_id, VABufferID *buffers, int num_buffers)
509 {
510 vlVaDriver *drv;
511 vlVaContext *context;
512 VAStatus vaStatus = VA_STATUS_SUCCESS;
513
514 unsigned i;
515
516 if (!ctx)
517 return VA_STATUS_ERROR_INVALID_CONTEXT;
518
519 drv = VL_VA_DRIVER(ctx);
520 if (!drv)
521 return VA_STATUS_ERROR_INVALID_CONTEXT;
522
523 mtx_lock(&drv->mutex);
524 context = handle_table_get(drv->htab, context_id);
525 if (!context) {
526 mtx_unlock(&drv->mutex);
527 return VA_STATUS_ERROR_INVALID_CONTEXT;
528 }
529
530 for (i = 0; i < num_buffers; ++i) {
531 vlVaBuffer *buf = handle_table_get(drv->htab, buffers[i]);
532 if (!buf) {
533 mtx_unlock(&drv->mutex);
534 return VA_STATUS_ERROR_INVALID_BUFFER;
535 }
536
537 switch (buf->type) {
538 case VAPictureParameterBufferType:
539 vaStatus = handlePictureParameterBuffer(drv, context, buf);
540 break;
541
542 case VAIQMatrixBufferType:
543 handleIQMatrixBuffer(context, buf);
544 break;
545
546 case VASliceParameterBufferType:
547 handleSliceParameterBuffer(context, buf);
548 break;
549
550 case VASliceDataBufferType:
551 handleVASliceDataBufferType(context, buf);
552 break;
553 case VAProcPipelineParameterBufferType:
554 vaStatus = vlVaHandleVAProcPipelineParameterBufferType(drv, context, buf);
555 break;
556
557 case VAEncSequenceParameterBufferType:
558 vaStatus = handleVAEncSequenceParameterBufferType(drv, context, buf);
559 break;
560
561 case VAEncMiscParameterBufferType:
562 vaStatus = handleVAEncMiscParameterBufferType(context, buf);
563 break;
564
565 case VAEncPictureParameterBufferType:
566 vaStatus = handleVAEncPictureParameterBufferType(drv, context, buf);
567 break;
568
569 case VAEncSliceParameterBufferType:
570 vaStatus = handleVAEncSliceParameterBufferType(drv, context, buf);
571 break;
572
573 case VAHuffmanTableBufferType:
574 vlVaHandleHuffmanTableBufferType(context, buf);
575 break;
576
577 default:
578 break;
579 }
580 }
581 mtx_unlock(&drv->mutex);
582
583 return vaStatus;
584 }
585
586 VAStatus
587 vlVaEndPicture(VADriverContextP ctx, VAContextID context_id)
588 {
589 vlVaDriver *drv;
590 vlVaContext *context;
591 vlVaBuffer *coded_buf;
592 vlVaSurface *surf;
593 void *feedback;
594 struct pipe_screen *screen;
595 bool supported;
596 bool realloc = false;
597 enum pipe_format format;
598
599 if (!ctx)
600 return VA_STATUS_ERROR_INVALID_CONTEXT;
601
602 drv = VL_VA_DRIVER(ctx);
603 if (!drv)
604 return VA_STATUS_ERROR_INVALID_CONTEXT;
605
606 mtx_lock(&drv->mutex);
607 context = handle_table_get(drv->htab, context_id);
608 mtx_unlock(&drv->mutex);
609 if (!context)
610 return VA_STATUS_ERROR_INVALID_CONTEXT;
611
612 if (!context->decoder) {
613 if (context->templat.profile != PIPE_VIDEO_PROFILE_UNKNOWN)
614 return VA_STATUS_ERROR_INVALID_CONTEXT;
615
616 /* VPP */
617 return VA_STATUS_SUCCESS;
618 }
619
620 mtx_lock(&drv->mutex);
621 surf = handle_table_get(drv->htab, context->target_id);
622 context->mpeg4.frame_num++;
623
624 screen = context->decoder->context->screen;
625 supported = screen->get_video_param(screen, context->decoder->profile,
626 context->decoder->entrypoint,
627 surf->buffer->interlaced ?
628 PIPE_VIDEO_CAP_SUPPORTS_INTERLACED :
629 PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
630
631 if (!supported) {
632 surf->templat.interlaced = screen->get_video_param(screen,
633 context->decoder->profile,
634 context->decoder->entrypoint,
635 PIPE_VIDEO_CAP_PREFERS_INTERLACED);
636 realloc = true;
637 }
638
639 format = screen->get_video_param(screen, context->decoder->profile,
640 PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
641 PIPE_VIDEO_CAP_PREFERED_FORMAT);
642
643 if (surf->buffer->buffer_format != format &&
644 surf->buffer->buffer_format == PIPE_FORMAT_NV12) {
645 /* check originally as NV12 only */
646 surf->templat.buffer_format = format;
647 realloc = true;
648 }
649
650 if (u_reduce_video_profile(context->templat.profile) == PIPE_VIDEO_FORMAT_JPEG &&
651 surf->buffer->buffer_format == PIPE_FORMAT_NV12) {
652 if (context->mjpeg.sampling_factor == 0x211111 ||
653 context->mjpeg.sampling_factor == 0x221212) {
654 surf->templat.buffer_format = PIPE_FORMAT_YUYV;
655 realloc = true;
656 } else if (context->mjpeg.sampling_factor != 0x221111) {
657 /* Not NV12 either */
658 mtx_unlock(&drv->mutex);
659 return VA_STATUS_ERROR_INVALID_SURFACE;
660 }
661 }
662
663 if (realloc) {
664 struct pipe_video_buffer *old_buf = surf->buffer;
665
666 if (vlVaHandleSurfaceAllocate(drv, surf, &surf->templat) != VA_STATUS_SUCCESS) {
667 mtx_unlock(&drv->mutex);
668 return VA_STATUS_ERROR_ALLOCATION_FAILED;
669 }
670
671 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
672 if (old_buf->interlaced) {
673 struct u_rect src_rect, dst_rect;
674
675 dst_rect.x0 = src_rect.x0 = 0;
676 dst_rect.y0 = src_rect.y0 = 0;
677 dst_rect.x1 = src_rect.x1 = surf->templat.width;
678 dst_rect.y1 = src_rect.y1 = surf->templat.height;
679 vl_compositor_yuv_deint_full(&drv->cstate, &drv->compositor,
680 old_buf, surf->buffer,
681 &src_rect, &dst_rect, VL_COMPOSITOR_WEAVE);
682 } else
683 /* Can't convert from progressive to interlaced yet */
684 return VA_STATUS_ERROR_INVALID_SURFACE;
685 }
686
687 old_buf->destroy(old_buf);
688 context->target = surf->buffer;
689 }
690
691 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
692 coded_buf = context->coded_buf;
693 getEncParamPreset(context);
694 context->desc.h264enc.frame_num_cnt++;
695 context->decoder->begin_frame(context->decoder, context->target, &context->desc.base);
696 context->decoder->encode_bitstream(context->decoder, context->target,
697 coded_buf->derived_surface.resource, &feedback);
698 surf->feedback = feedback;
699 surf->coded_buf = coded_buf;
700 }
701
702 context->decoder->end_frame(context->decoder, context->target, &context->desc.base);
703 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
704 int idr_period = context->desc.h264enc.gop_size / context->gop_coeff;
705 int p_remain_in_idr = idr_period - context->desc.h264enc.frame_num;
706 surf->frame_num_cnt = context->desc.h264enc.frame_num_cnt;
707 surf->force_flushed = false;
708 if (context->first_single_submitted) {
709 context->decoder->flush(context->decoder);
710 context->first_single_submitted = false;
711 surf->force_flushed = true;
712 }
713 if (p_remain_in_idr == 1) {
714 if ((context->desc.h264enc.frame_num_cnt % 2) != 0) {
715 context->decoder->flush(context->decoder);
716 context->first_single_submitted = true;
717 }
718 else
719 context->first_single_submitted = false;
720 surf->force_flushed = true;
721 }
722 }
723 mtx_unlock(&drv->mutex);
724 return VA_STATUS_SUCCESS;
725 }