62a30782aa36b0c813c4ba4831ef661be000a196
[mesa.git] / src / gallium / state_trackers / va / picture.c
1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen & Orasanu Lucian.
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include "pipe/p_video_codec.h"
30
31 #include "util/u_handle_table.h"
32 #include "util/u_video.h"
33
34 #include "vl/vl_vlc.h"
35 #include "vl/vl_winsys.h"
36
37 #include "va_private.h"
38
39 VAStatus
40 vlVaBeginPicture(VADriverContextP ctx, VAContextID context_id, VASurfaceID render_target)
41 {
42 vlVaDriver *drv;
43 vlVaContext *context;
44 vlVaSurface *surf;
45
46 if (!ctx)
47 return VA_STATUS_ERROR_INVALID_CONTEXT;
48
49 drv = VL_VA_DRIVER(ctx);
50 if (!drv)
51 return VA_STATUS_ERROR_INVALID_CONTEXT;
52
53 mtx_lock(&drv->mutex);
54 context = handle_table_get(drv->htab, context_id);
55 if (!context) {
56 mtx_unlock(&drv->mutex);
57 return VA_STATUS_ERROR_INVALID_CONTEXT;
58 }
59
60 surf = handle_table_get(drv->htab, render_target);
61 mtx_unlock(&drv->mutex);
62 if (!surf || !surf->buffer)
63 return VA_STATUS_ERROR_INVALID_SURFACE;
64
65 context->target_id = render_target;
66 surf->ctx = context_id;
67 context->target = surf->buffer;
68
69 if (!context->decoder) {
70
71 /* VPP */
72 if (context->templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN &&
73 context->target->buffer_format != PIPE_FORMAT_B8G8R8A8_UNORM &&
74 context->target->buffer_format != PIPE_FORMAT_R8G8B8A8_UNORM &&
75 context->target->buffer_format != PIPE_FORMAT_B8G8R8X8_UNORM &&
76 context->target->buffer_format != PIPE_FORMAT_R8G8B8X8_UNORM &&
77 context->target->buffer_format != PIPE_FORMAT_NV12 &&
78 context->target->buffer_format != PIPE_FORMAT_P016)
79 return VA_STATUS_ERROR_UNIMPLEMENTED;
80
81 return VA_STATUS_SUCCESS;
82 }
83
84 if (context->decoder->entrypoint != PIPE_VIDEO_ENTRYPOINT_ENCODE)
85 context->needs_begin_frame = true;
86
87 return VA_STATUS_SUCCESS;
88 }
89
90 void
91 vlVaGetReferenceFrame(vlVaDriver *drv, VASurfaceID surface_id,
92 struct pipe_video_buffer **ref_frame)
93 {
94 vlVaSurface *surf = handle_table_get(drv->htab, surface_id);
95 if (surf)
96 *ref_frame = surf->buffer;
97 else
98 *ref_frame = NULL;
99 }
100
101 static void
102 getEncParamPreset(vlVaContext *context)
103 {
104 //motion estimation preset
105 context->desc.h264enc.motion_est.motion_est_quarter_pixel = 0x00000001;
106 context->desc.h264enc.motion_est.lsmvert = 0x00000002;
107 context->desc.h264enc.motion_est.enc_disable_sub_mode = 0x00000078;
108 context->desc.h264enc.motion_est.enc_en_ime_overw_dis_subm = 0x00000001;
109 context->desc.h264enc.motion_est.enc_ime_overw_dis_subm_no = 0x00000001;
110 context->desc.h264enc.motion_est.enc_ime2_search_range_x = 0x00000004;
111 context->desc.h264enc.motion_est.enc_ime2_search_range_y = 0x00000004;
112
113 //pic control preset
114 context->desc.h264enc.pic_ctrl.enc_cabac_enable = 0x00000001;
115 context->desc.h264enc.pic_ctrl.enc_constraint_set_flags = 0x00000040;
116
117 //rate control
118 context->desc.h264enc.rate_ctrl.vbv_buffer_size = 20000000;
119 context->desc.h264enc.rate_ctrl.vbv_buf_lv = 48;
120 context->desc.h264enc.rate_ctrl.fill_data_enable = 1;
121 context->desc.h264enc.rate_ctrl.enforce_hrd = 1;
122 context->desc.h264enc.enable_vui = false;
123 if (context->desc.h264enc.rate_ctrl.frame_rate_num == 0 ||
124 context->desc.h264enc.rate_ctrl.frame_rate_den == 0) {
125 context->desc.h264enc.rate_ctrl.frame_rate_num = 30;
126 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
127 }
128 context->desc.h264enc.rate_ctrl.target_bits_picture =
129 context->desc.h264enc.rate_ctrl.target_bitrate *
130 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
131 context->desc.h264enc.rate_ctrl.frame_rate_num);
132 context->desc.h264enc.rate_ctrl.peak_bits_picture_integer =
133 context->desc.h264enc.rate_ctrl.peak_bitrate *
134 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
135 context->desc.h264enc.rate_ctrl.frame_rate_num);
136
137 context->desc.h264enc.rate_ctrl.peak_bits_picture_fraction = 0;
138 context->desc.h264enc.ref_pic_mode = 0x00000201;
139 }
140
141 static VAStatus
142 handlePictureParameterBuffer(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
143 {
144 VAStatus vaStatus = VA_STATUS_SUCCESS;
145
146 switch (u_reduce_video_profile(context->templat.profile)) {
147 case PIPE_VIDEO_FORMAT_MPEG12:
148 vlVaHandlePictureParameterBufferMPEG12(drv, context, buf);
149 break;
150
151 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
152 vlVaHandlePictureParameterBufferH264(drv, context, buf);
153 break;
154
155 case PIPE_VIDEO_FORMAT_VC1:
156 vlVaHandlePictureParameterBufferVC1(drv, context, buf);
157 break;
158
159 case PIPE_VIDEO_FORMAT_MPEG4:
160 vlVaHandlePictureParameterBufferMPEG4(drv, context, buf);
161 break;
162
163 case PIPE_VIDEO_FORMAT_HEVC:
164 vlVaHandlePictureParameterBufferHEVC(drv, context, buf);
165 break;
166
167 case PIPE_VIDEO_FORMAT_JPEG:
168 vlVaHandlePictureParameterBufferMJPEG(drv, context, buf);
169 break;
170
171 default:
172 break;
173 }
174
175 /* Create the decoder once max_references is known. */
176 if (!context->decoder) {
177 enum pipe_video_format format =
178 u_reduce_video_profile(context->templat.profile);
179
180 if (!context->target)
181 return VA_STATUS_ERROR_INVALID_CONTEXT;
182
183 if (context->templat.max_references == 0 &&
184 format != PIPE_VIDEO_FORMAT_JPEG)
185 return VA_STATUS_ERROR_INVALID_BUFFER;
186
187 if (format == PIPE_VIDEO_FORMAT_MPEG4_AVC)
188 context->templat.level = u_get_h264_level(context->templat.width,
189 context->templat.height, &context->templat.max_references);
190
191 context->decoder = drv->pipe->create_video_codec(drv->pipe,
192 &context->templat);
193
194 if (!context->decoder)
195 return VA_STATUS_ERROR_ALLOCATION_FAILED;
196
197 context->needs_begin_frame = true;
198 }
199
200 return vaStatus;
201 }
202
203 static void
204 handleIQMatrixBuffer(vlVaContext *context, vlVaBuffer *buf)
205 {
206 switch (u_reduce_video_profile(context->templat.profile)) {
207 case PIPE_VIDEO_FORMAT_MPEG12:
208 vlVaHandleIQMatrixBufferMPEG12(context, buf);
209 break;
210
211 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
212 vlVaHandleIQMatrixBufferH264(context, buf);
213 break;
214
215 case PIPE_VIDEO_FORMAT_MPEG4:
216 vlVaHandleIQMatrixBufferMPEG4(context, buf);
217 break;
218
219 case PIPE_VIDEO_FORMAT_HEVC:
220 vlVaHandleIQMatrixBufferHEVC(context, buf);
221 break;
222
223 case PIPE_VIDEO_FORMAT_JPEG:
224 vlVaHandleIQMatrixBufferMJPEG(context, buf);
225 break;
226
227 default:
228 break;
229 }
230 }
231
232 static void
233 handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf)
234 {
235 switch (u_reduce_video_profile(context->templat.profile)) {
236 case PIPE_VIDEO_FORMAT_MPEG12:
237 vlVaHandleSliceParameterBufferMPEG12(context, buf);
238 break;
239
240 case PIPE_VIDEO_FORMAT_VC1:
241 vlVaHandleSliceParameterBufferVC1(context, buf);
242 break;
243
244 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
245 vlVaHandleSliceParameterBufferH264(context, buf);
246 break;
247
248 case PIPE_VIDEO_FORMAT_MPEG4:
249 vlVaHandleSliceParameterBufferMPEG4(context, buf);
250 break;
251
252 case PIPE_VIDEO_FORMAT_HEVC:
253 vlVaHandleSliceParameterBufferHEVC(context, buf);
254 break;
255
256 case PIPE_VIDEO_FORMAT_JPEG:
257 vlVaHandleSliceParameterBufferMJPEG(context, buf);
258 break;
259
260 default:
261 break;
262 }
263 }
264
265 static unsigned int
266 bufHasStartcode(vlVaBuffer *buf, unsigned int code, unsigned int bits)
267 {
268 struct vl_vlc vlc = {0};
269 int i;
270
271 /* search the first 64 bytes for a startcode */
272 vl_vlc_init(&vlc, 1, (const void * const*)&buf->data, &buf->size);
273 for (i = 0; i < 64 && vl_vlc_bits_left(&vlc) >= bits; ++i) {
274 if (vl_vlc_peekbits(&vlc, bits) == code)
275 return 1;
276 vl_vlc_eatbits(&vlc, 8);
277 vl_vlc_fillbits(&vlc);
278 }
279
280 return 0;
281 }
282
283 static void
284 handleVASliceDataBufferType(vlVaContext *context, vlVaBuffer *buf)
285 {
286 enum pipe_video_format format;
287 unsigned num_buffers = 0;
288 void * const *buffers[2];
289 unsigned sizes[2];
290 static const uint8_t start_code_h264[] = { 0x00, 0x00, 0x01 };
291 static const uint8_t start_code_h265[] = { 0x00, 0x00, 0x01 };
292 static const uint8_t start_code_vc1[] = { 0x00, 0x00, 0x01, 0x0d };
293
294 format = u_reduce_video_profile(context->templat.profile);
295 switch (format) {
296 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
297 if (bufHasStartcode(buf, 0x000001, 24))
298 break;
299
300 buffers[num_buffers] = (void *const)&start_code_h264;
301 sizes[num_buffers++] = sizeof(start_code_h264);
302 break;
303 case PIPE_VIDEO_FORMAT_HEVC:
304 if (bufHasStartcode(buf, 0x000001, 24))
305 break;
306
307 buffers[num_buffers] = (void *const)&start_code_h265;
308 sizes[num_buffers++] = sizeof(start_code_h265);
309 break;
310 case PIPE_VIDEO_FORMAT_VC1:
311 if (bufHasStartcode(buf, 0x0000010d, 32) ||
312 bufHasStartcode(buf, 0x0000010c, 32) ||
313 bufHasStartcode(buf, 0x0000010b, 32))
314 break;
315
316 if (context->decoder->profile == PIPE_VIDEO_PROFILE_VC1_ADVANCED) {
317 buffers[num_buffers] = (void *const)&start_code_vc1;
318 sizes[num_buffers++] = sizeof(start_code_vc1);
319 }
320 break;
321 case PIPE_VIDEO_FORMAT_MPEG4:
322 if (bufHasStartcode(buf, 0x000001, 24))
323 break;
324
325 vlVaDecoderFixMPEG4Startcode(context);
326 buffers[num_buffers] = (void *)context->mpeg4.start_code;
327 sizes[num_buffers++] = context->mpeg4.start_code_size;
328 case PIPE_VIDEO_FORMAT_JPEG:
329 /* TODO */
330 break;
331 default:
332 break;
333 }
334
335 buffers[num_buffers] = buf->data;
336 sizes[num_buffers] = buf->size;
337 ++num_buffers;
338
339 if (context->needs_begin_frame) {
340 context->decoder->begin_frame(context->decoder, context->target,
341 &context->desc.base);
342 context->needs_begin_frame = false;
343 }
344 context->decoder->decode_bitstream(context->decoder, context->target, &context->desc.base,
345 num_buffers, (const void * const*)buffers, sizes);
346 }
347
348 static VAStatus
349 handleVAEncMiscParameterTypeRateControl(vlVaContext *context, VAEncMiscParameterBuffer *misc)
350 {
351 VAEncMiscParameterRateControl *rc = (VAEncMiscParameterRateControl *)misc->data;
352 if (context->desc.h264enc.rate_ctrl.rate_ctrl_method ==
353 PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT)
354 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second;
355 else
356 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second * (rc->target_percentage / 100.0);
357 context->desc.h264enc.rate_ctrl.peak_bitrate = rc->bits_per_second;
358 if (context->desc.h264enc.rate_ctrl.target_bitrate < 2000000)
359 context->desc.h264enc.rate_ctrl.vbv_buffer_size = MIN2((context->desc.h264enc.rate_ctrl.target_bitrate * 2.75), 2000000);
360 else
361 context->desc.h264enc.rate_ctrl.vbv_buffer_size = context->desc.h264enc.rate_ctrl.target_bitrate;
362
363 return VA_STATUS_SUCCESS;
364 }
365
366 static VAStatus
367 handleVAEncMiscParameterTypeFrameRate(vlVaContext *context, VAEncMiscParameterBuffer *misc)
368 {
369 VAEncMiscParameterFrameRate *fr = (VAEncMiscParameterFrameRate *)misc->data;
370 if (fr->framerate & 0xffff0000) {
371 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate & 0xffff;
372 context->desc.h264enc.rate_ctrl.frame_rate_den = fr->framerate >> 16 & 0xffff;
373 } else {
374 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate;
375 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
376 }
377 return VA_STATUS_SUCCESS;
378 }
379
380 static VAStatus
381 handleVAEncSequenceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
382 {
383 VAEncSequenceParameterBufferH264 *h264 = (VAEncSequenceParameterBufferH264 *)buf->data;
384 if (!context->decoder) {
385 context->templat.max_references = h264->max_num_ref_frames;
386 context->templat.level = h264->level_idc;
387 context->decoder = drv->pipe->create_video_codec(drv->pipe, &context->templat);
388 if (!context->decoder)
389 return VA_STATUS_ERROR_ALLOCATION_FAILED;
390 }
391
392 context->gop_coeff = ((1024 + h264->intra_idr_period - 1) / h264->intra_idr_period + 1) / 2 * 2;
393 if (context->gop_coeff > VL_VA_ENC_GOP_COEFF)
394 context->gop_coeff = VL_VA_ENC_GOP_COEFF;
395 context->desc.h264enc.gop_size = h264->intra_idr_period * context->gop_coeff;
396 context->desc.h264enc.rate_ctrl.frame_rate_num = h264->time_scale / 2;
397 context->desc.h264enc.rate_ctrl.frame_rate_den = h264->num_units_in_tick;
398 return VA_STATUS_SUCCESS;
399 }
400
401 static VAStatus
402 handleVAEncMiscParameterBufferType(vlVaContext *context, vlVaBuffer *buf)
403 {
404 VAStatus vaStatus = VA_STATUS_SUCCESS;
405 VAEncMiscParameterBuffer *misc;
406 misc = buf->data;
407
408 switch (misc->type) {
409 case VAEncMiscParameterTypeRateControl:
410 vaStatus = handleVAEncMiscParameterTypeRateControl(context, misc);
411 break;
412
413 case VAEncMiscParameterTypeFrameRate:
414 vaStatus = handleVAEncMiscParameterTypeFrameRate(context, misc);
415 break;
416
417 default:
418 break;
419 }
420
421 return vaStatus;
422 }
423
424 static VAStatus
425 handleVAEncPictureParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
426 {
427 VAEncPictureParameterBufferH264 *h264;
428 vlVaBuffer *coded_buf;
429
430 h264 = buf->data;
431 context->desc.h264enc.frame_num = h264->frame_num;
432 context->desc.h264enc.not_referenced = false;
433 context->desc.h264enc.is_idr = (h264->pic_fields.bits.idr_pic_flag == 1);
434 context->desc.h264enc.pic_order_cnt = h264->CurrPic.TopFieldOrderCnt;
435 if (context->desc.h264enc.gop_cnt == 0)
436 context->desc.h264enc.i_remain = context->gop_coeff;
437 else if (context->desc.h264enc.frame_num == 1)
438 context->desc.h264enc.i_remain--;
439
440 context->desc.h264enc.p_remain = context->desc.h264enc.gop_size - context->desc.h264enc.gop_cnt - context->desc.h264enc.i_remain;
441
442 coded_buf = handle_table_get(drv->htab, h264->coded_buf);
443 if (!coded_buf->derived_surface.resource)
444 coded_buf->derived_surface.resource = pipe_buffer_create(drv->pipe->screen, PIPE_BIND_VERTEX_BUFFER,
445 PIPE_USAGE_STREAM, coded_buf->size);
446 context->coded_buf = coded_buf;
447
448 util_hash_table_set(context->desc.h264enc.frame_idx,
449 UINT_TO_PTR(h264->CurrPic.picture_id),
450 UINT_TO_PTR(h264->frame_num));
451
452 if (context->desc.h264enc.is_idr)
453 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
454 else
455 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
456
457 context->desc.h264enc.quant_i_frames = h264->pic_init_qp;
458 context->desc.h264enc.quant_b_frames = h264->pic_init_qp;
459 context->desc.h264enc.quant_p_frames = h264->pic_init_qp;
460 context->desc.h264enc.gop_cnt++;
461 if (context->desc.h264enc.gop_cnt == context->desc.h264enc.gop_size)
462 context->desc.h264enc.gop_cnt = 0;
463
464 return VA_STATUS_SUCCESS;
465 }
466
467 static VAStatus
468 handleVAEncSliceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
469 {
470 VAEncSliceParameterBufferH264 *h264;
471
472 h264 = buf->data;
473 context->desc.h264enc.ref_idx_l0 = VA_INVALID_ID;
474 context->desc.h264enc.ref_idx_l1 = VA_INVALID_ID;
475
476 for (int i = 0; i < 32; i++) {
477 if (h264->RefPicList0[i].picture_id != VA_INVALID_ID) {
478 if (context->desc.h264enc.ref_idx_l0 == VA_INVALID_ID)
479 context->desc.h264enc.ref_idx_l0 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
480 UINT_TO_PTR(h264->RefPicList0[i].picture_id)));
481 }
482 if (h264->RefPicList1[i].picture_id != VA_INVALID_ID && h264->slice_type == 1) {
483 if (context->desc.h264enc.ref_idx_l1 == VA_INVALID_ID)
484 context->desc.h264enc.ref_idx_l1 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
485 UINT_TO_PTR(h264->RefPicList1[i].picture_id)));
486 }
487 }
488
489 if (h264->slice_type == 1)
490 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_B;
491 else if (h264->slice_type == 0)
492 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
493 else if (h264->slice_type == 2) {
494 if (context->desc.h264enc.is_idr){
495 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
496 context->desc.h264enc.idr_pic_id++;
497 } else
498 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_I;
499 } else
500 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_SKIP;
501
502 return VA_STATUS_SUCCESS;
503 }
504
505 VAStatus
506 vlVaRenderPicture(VADriverContextP ctx, VAContextID context_id, VABufferID *buffers, int num_buffers)
507 {
508 vlVaDriver *drv;
509 vlVaContext *context;
510 VAStatus vaStatus = VA_STATUS_SUCCESS;
511
512 unsigned i;
513
514 if (!ctx)
515 return VA_STATUS_ERROR_INVALID_CONTEXT;
516
517 drv = VL_VA_DRIVER(ctx);
518 if (!drv)
519 return VA_STATUS_ERROR_INVALID_CONTEXT;
520
521 mtx_lock(&drv->mutex);
522 context = handle_table_get(drv->htab, context_id);
523 if (!context) {
524 mtx_unlock(&drv->mutex);
525 return VA_STATUS_ERROR_INVALID_CONTEXT;
526 }
527
528 for (i = 0; i < num_buffers; ++i) {
529 vlVaBuffer *buf = handle_table_get(drv->htab, buffers[i]);
530 if (!buf) {
531 mtx_unlock(&drv->mutex);
532 return VA_STATUS_ERROR_INVALID_BUFFER;
533 }
534
535 switch (buf->type) {
536 case VAPictureParameterBufferType:
537 vaStatus = handlePictureParameterBuffer(drv, context, buf);
538 break;
539
540 case VAIQMatrixBufferType:
541 handleIQMatrixBuffer(context, buf);
542 break;
543
544 case VASliceParameterBufferType:
545 handleSliceParameterBuffer(context, buf);
546 break;
547
548 case VASliceDataBufferType:
549 handleVASliceDataBufferType(context, buf);
550 break;
551 case VAProcPipelineParameterBufferType:
552 vaStatus = vlVaHandleVAProcPipelineParameterBufferType(drv, context, buf);
553 break;
554
555 case VAEncSequenceParameterBufferType:
556 vaStatus = handleVAEncSequenceParameterBufferType(drv, context, buf);
557 break;
558
559 case VAEncMiscParameterBufferType:
560 vaStatus = handleVAEncMiscParameterBufferType(context, buf);
561 break;
562
563 case VAEncPictureParameterBufferType:
564 vaStatus = handleVAEncPictureParameterBufferType(drv, context, buf);
565 break;
566
567 case VAEncSliceParameterBufferType:
568 vaStatus = handleVAEncSliceParameterBufferType(drv, context, buf);
569 break;
570
571 case VAHuffmanTableBufferType:
572 vlVaHandleHuffmanTableBufferType(context, buf);
573 break;
574
575 default:
576 break;
577 }
578 }
579 mtx_unlock(&drv->mutex);
580
581 return vaStatus;
582 }
583
584 VAStatus
585 vlVaEndPicture(VADriverContextP ctx, VAContextID context_id)
586 {
587 vlVaDriver *drv;
588 vlVaContext *context;
589 vlVaBuffer *coded_buf;
590 vlVaSurface *surf;
591 void *feedback;
592
593 if (!ctx)
594 return VA_STATUS_ERROR_INVALID_CONTEXT;
595
596 drv = VL_VA_DRIVER(ctx);
597 if (!drv)
598 return VA_STATUS_ERROR_INVALID_CONTEXT;
599
600 mtx_lock(&drv->mutex);
601 context = handle_table_get(drv->htab, context_id);
602 mtx_unlock(&drv->mutex);
603 if (!context)
604 return VA_STATUS_ERROR_INVALID_CONTEXT;
605
606 if (!context->decoder) {
607 if (context->templat.profile != PIPE_VIDEO_PROFILE_UNKNOWN)
608 return VA_STATUS_ERROR_INVALID_CONTEXT;
609
610 /* VPP */
611 return VA_STATUS_SUCCESS;
612 }
613
614 mtx_lock(&drv->mutex);
615 surf = handle_table_get(drv->htab, context->target_id);
616 context->mpeg4.frame_num++;
617
618 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
619 coded_buf = context->coded_buf;
620 getEncParamPreset(context);
621 context->desc.h264enc.frame_num_cnt++;
622 context->decoder->begin_frame(context->decoder, context->target, &context->desc.base);
623 context->decoder->encode_bitstream(context->decoder, context->target,
624 coded_buf->derived_surface.resource, &feedback);
625 surf->feedback = feedback;
626 surf->coded_buf = coded_buf;
627 }
628
629 context->decoder->end_frame(context->decoder, context->target, &context->desc.base);
630 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
631 int idr_period = context->desc.h264enc.gop_size / context->gop_coeff;
632 int p_remain_in_idr = idr_period - context->desc.h264enc.frame_num;
633 surf->frame_num_cnt = context->desc.h264enc.frame_num_cnt;
634 surf->force_flushed = false;
635 if (context->first_single_submitted) {
636 context->decoder->flush(context->decoder);
637 context->first_single_submitted = false;
638 surf->force_flushed = true;
639 }
640 if (p_remain_in_idr == 1) {
641 if ((context->desc.h264enc.frame_num_cnt % 2) != 0) {
642 context->decoder->flush(context->decoder);
643 context->first_single_submitted = true;
644 }
645 else
646 context->first_single_submitted = false;
647 surf->force_flushed = true;
648 }
649 }
650 mtx_unlock(&drv->mutex);
651 return VA_STATUS_SUCCESS;
652 }