*
**************************************************************************/
-#include <pipe/p_video_context.h>
-
-#include <util/u_memory.h>
-#include <util/u_math.h>
-#include <util/u_debug.h>
+#include "util/u_memory.h"
+#include "util/u_math.h"
+#include "util/u_debug.h"
+#include "util/u_video.h"
#include "vdpau_private.h"
+/**
+ * Create a VdpDecoder.
+ */
VdpStatus
vlVdpDecoderCreate(VdpDevice device,
VdpDecoderProfile profile,
VdpDecoder *decoder)
{
enum pipe_video_profile p_profile;
- struct pipe_video_context *vpipe;
+ struct pipe_context *pipe;
+ struct pipe_screen *screen;
vlVdpDevice *dev;
vlVdpDecoder *vldecoder;
VdpStatus ret;
- unsigned i;
+ bool supported;
VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Creating decoder\n");
if (!decoder)
return VDP_STATUS_INVALID_POINTER;
+ *decoder = 0;
if (!(width && height))
return VDP_STATUS_INVALID_VALUE;
if (!dev)
return VDP_STATUS_INVALID_HANDLE;
- vpipe = dev->context->vpipe;
+ pipe = dev->context->pipe;
+ screen = dev->vscreen->pscreen;
+ supported = screen->get_video_param
+ (
+ screen,
+ p_profile,
+ PIPE_VIDEO_CAP_SUPPORTED
+ );
+ if (!supported)
+ return VDP_STATUS_INVALID_DECODER_PROFILE;
vldecoder = CALLOC(1,sizeof(vlVdpDecoder));
if (!vldecoder)
vldecoder->device = dev;
- // TODO: Define max_references. Used mainly for H264
- vldecoder->decoder = vpipe->create_decoder
+ vldecoder->decoder = pipe->create_video_decoder
(
- vpipe, p_profile,
+ pipe, p_profile,
PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
PIPE_VIDEO_CHROMA_FORMAT_420,
- width, height
+ width, height, max_references,
+ false
);
+
if (!vldecoder->decoder) {
ret = VDP_STATUS_ERROR;
goto error_decoder;
}
- vldecoder->cur_buffer = 0;
-
- for (i = 0; i < VL_NUM_DECODE_BUFFERS; ++i) {
- vldecoder->buffer[i] = vldecoder->decoder->create_buffer(vldecoder->decoder);
- if (!vldecoder->buffer[i]) {
- ret = VDP_STATUS_ERROR;
- goto error_buffer;
- }
- }
-
*decoder = vlAddDataHTAB(vldecoder);
if (*decoder == 0) {
ret = VDP_STATUS_ERROR;
return VDP_STATUS_OK;
error_handle:
-error_buffer:
-
- for (i = 0; i < VL_NUM_DECODE_BUFFERS; ++i)
- if (vldecoder->buffer[i])
- vldecoder->buffer[i]->destroy(vldecoder->buffer[i]);
vldecoder->decoder->destroy(vldecoder->decoder);
return ret;
}
+/**
+ * Destroy a VdpDecoder.
+ */
VdpStatus
vlVdpDecoderDestroy(VdpDecoder decoder)
{
vlVdpDecoder *vldecoder;
- unsigned i;
VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Destroying decoder\n");
if (!vldecoder)
return VDP_STATUS_INVALID_HANDLE;
- for (i = 0; i < VL_NUM_DECODE_BUFFERS; ++i)
- if (vldecoder->buffer[i])
- vldecoder->buffer[i]->destroy(vldecoder->buffer[i]);
-
vldecoder->decoder->destroy(vldecoder->decoder);
FREE(vldecoder);
return VDP_STATUS_OK;
}
+/**
+ * Retrieve the parameters used to create a VdpBitmapSurface.
+ */
VdpStatus
vlVdpDecoderGetParameters(VdpDecoder decoder,
VdpDecoderProfile *profile,
uint32_t *width,
uint32_t *height)
{
+ vlVdpDecoder *vldecoder;
+
+ VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoder get parameters called\n");
+
+ vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder);
+ if (!vldecoder)
+ return VDP_STATUS_INVALID_HANDLE;
+
+ *profile = PipeToProfile(vldecoder->decoder->profile);
+ *width = vldecoder->decoder->width;
+ *height = vldecoder->decoder->height;
+
return VDP_STATUS_OK;
}
+/**
+ * Decode a mpeg 1/2 video.
+ */
static VdpStatus
-vlVdpDecoderRenderMpeg2(struct pipe_video_decoder *decoder,
- struct pipe_video_decode_buffer *buffer,
- struct pipe_video_buffer *target,
- VdpPictureInfoMPEG1Or2 *picture_info,
- uint32_t bitstream_buffer_count,
- VdpBitstreamBuffer const *bitstream_buffers)
+vlVdpDecoderRenderMpeg12(struct pipe_video_decoder *decoder,
+ VdpPictureInfoMPEG1Or2 *picture_info)
{
struct pipe_mpeg12_picture_desc picture;
+ struct pipe_mpeg12_quant_matrix quant;
struct pipe_video_buffer *ref_frames[2];
- unsigned num_ycbcr_blocks[3] = { 0, 0, 0 };
unsigned i;
- VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG2\n");
+ VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG12\n");
+
+ i = 0;
/* if surfaces equals VDP_STATUS_INVALID_HANDLE, they are not used */
- if (picture_info->forward_reference == VDP_INVALID_HANDLE)
- ref_frames[0] = NULL;
- else {
- ref_frames[0] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer;
- if (!ref_frames[0])
+ if (picture_info->forward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[i] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer;
+ if (!ref_frames[i])
return VDP_STATUS_INVALID_HANDLE;
+ ++i;
}
- if (picture_info->backward_reference == VDP_INVALID_HANDLE)
- ref_frames[1] = NULL;
- else {
- ref_frames[1] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer;
- if (!ref_frames[1])
+ if (picture_info->backward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[i] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer;
+ if (!ref_frames[i])
return VDP_STATUS_INVALID_HANDLE;
+ ++i;
}
+ decoder->set_reference_frames(decoder, ref_frames, i);
+
memset(&picture, 0, sizeof(picture));
+ picture.base.profile = decoder->profile;
picture.picture_coding_type = picture_info->picture_coding_type;
picture.picture_structure = picture_info->picture_structure;
picture.frame_pred_frame_dct = picture_info->frame_pred_frame_dct;
picture.q_scale_type = picture_info->q_scale_type;
picture.alternate_scan = picture_info->alternate_scan;
- picture.intra_dc_precision = picture_info->intra_dc_precision;
picture.intra_vlc_format = picture_info->intra_vlc_format;
picture.concealment_motion_vectors = picture_info->concealment_motion_vectors;
+ picture.intra_dc_precision = picture_info->intra_dc_precision;
picture.f_code[0][0] = picture_info->f_code[0][0] - 1;
picture.f_code[0][1] = picture_info->f_code[0][1] - 1;
picture.f_code[1][0] = picture_info->f_code[1][0] - 1;
picture.f_code[1][1] = picture_info->f_code[1][1] - 1;
+ picture.num_slices = picture_info->slice_count;
+ picture.top_field_first = picture_info->top_field_first;
+ picture.full_pel_forward_vector = picture_info->full_pel_forward_vector;
+ picture.full_pel_backward_vector = picture_info->full_pel_backward_vector;
+
+ decoder->set_picture_parameters(decoder, &picture.base);
+
+ memset(&quant, 0, sizeof(quant));
+ quant.base.codec = PIPE_VIDEO_CODEC_MPEG12;
+ quant.intra_matrix = picture_info->intra_quantizer_matrix;
+ quant.non_intra_matrix = picture_info->non_intra_quantizer_matrix;
+
+ decoder->set_quant_matrix(decoder, &quant.base);
+ return VDP_STATUS_OK;
+}
+
+/**
+ * Decode a mpeg 4 video.
+ */
+static VdpStatus
+vlVdpDecoderRenderMpeg4(struct pipe_video_decoder *decoder,
+ VdpPictureInfoMPEG4Part2 *picture_info)
+{
+ struct pipe_mpeg4_picture_desc picture;
+ struct pipe_mpeg4_quant_matrix quant;
+ struct pipe_video_buffer *ref_frames[2] = {};
+ unsigned i;
- picture.intra_quantizer_matrix = picture_info->intra_quantizer_matrix;
- picture.non_intra_quantizer_matrix = picture_info->non_intra_quantizer_matrix;
+ VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding MPEG4\n");
- buffer->map(buffer);
+ /* if surfaces equals VDP_STATUS_INVALID_HANDLE, they are not used */
+ if (picture_info->forward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[0] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer;
+ if (!ref_frames[0])
+ return VDP_STATUS_INVALID_HANDLE;
+ }
- for (i = 0; i < bitstream_buffer_count; ++i)
- buffer->decode_bitstream(buffer, bitstream_buffers[i].bitstream_bytes,
- bitstream_buffers[i].bitstream, &picture, num_ycbcr_blocks);
+ if (picture_info->backward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[1] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer;
+ if (!ref_frames[1])
+ return VDP_STATUS_INVALID_HANDLE;
+ }
+ decoder->set_reference_frames(decoder, ref_frames, 2);
- buffer->unmap(buffer);
+ memset(&picture, 0, sizeof(picture));
+ picture.base.profile = decoder->profile;
+ for (i = 0; i < 2; ++i) {
+ picture.trd[i] = picture_info->trd[i];
+ picture.trb[i] = picture_info->trb[i];
+ }
+ picture.vop_time_increment_resolution = picture_info->vop_time_increment_resolution;
+ picture.vop_coding_type = picture_info->vop_coding_type;
+ picture.vop_fcode_forward = picture_info->vop_fcode_forward;
+ picture.vop_fcode_backward = picture_info->vop_fcode_backward;
+ picture.resync_marker_disable = picture_info->resync_marker_disable;
+ picture.interlaced = picture_info->interlaced;
+ picture.quant_type = picture_info->quant_type;
+ picture.quarter_sample = picture_info->quarter_sample;
+ picture.short_video_header = picture_info->short_video_header;
+ picture.rounding_control = picture_info->rounding_control;
+ picture.alternate_vertical_scan_flag = picture_info->alternate_vertical_scan_flag;
+ picture.top_field_first = picture_info->top_field_first;
+ decoder->set_picture_parameters(decoder, &picture.base);
+
+ memset(&quant, 0, sizeof(quant));
+ quant.base.codec = PIPE_VIDEO_CODEC_MPEG4;
+ quant.intra_matrix = picture_info->intra_quantizer_matrix;
+ quant.non_intra_matrix = picture_info->non_intra_quantizer_matrix;
+ decoder->set_quant_matrix(decoder, &quant.base);
+ return VDP_STATUS_OK;
+}
- decoder->flush_buffer(buffer, num_ycbcr_blocks, ref_frames, target);
+static VdpStatus
+vlVdpDecoderRenderVC1(struct pipe_video_decoder *decoder,
+ VdpPictureInfoVC1 *picture_info)
+{
+ struct pipe_vc1_picture_desc picture;
+ struct pipe_video_buffer *ref_frames[2] = {};
+ VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding VC-1\n");
+
+ /* if surfaces equals VDP_STATUS_INVALID_HANDLE, they are not used */
+ if (picture_info->forward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[0] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->forward_reference))->video_buffer;
+ if (!ref_frames[0])
+ return VDP_STATUS_INVALID_HANDLE;
+ }
+
+ if (picture_info->backward_reference != VDP_INVALID_HANDLE) {
+ ref_frames[1] = ((vlVdpSurface *)vlGetDataHTAB(picture_info->backward_reference))->video_buffer;
+ if (!ref_frames[1])
+ return VDP_STATUS_INVALID_HANDLE;
+ }
+ decoder->set_reference_frames(decoder, ref_frames, 2);
+
+ memset(&picture, 0, sizeof(picture));
+ picture.base.profile = decoder->profile;
+ picture.slice_count = picture_info->slice_count;
+ picture.picture_type = picture_info->picture_type;
+ picture.frame_coding_mode = picture_info->frame_coding_mode;
+ picture.postprocflag = picture_info->postprocflag;
+ picture.pulldown = picture_info->pulldown;
+ picture.interlace = picture_info->interlace;
+ picture.tfcntrflag = picture_info->tfcntrflag;
+ picture.finterpflag = picture_info->finterpflag;
+ picture.psf = picture_info->psf;
+ picture.dquant = picture_info->dquant;
+ picture.panscan_flag = picture_info->panscan_flag;
+ picture.refdist_flag = picture_info->refdist_flag;
+ picture.quantizer = picture_info->quantizer;
+ picture.extended_mv = picture_info->extended_mv;
+ picture.extended_dmv = picture_info->extended_dmv;
+ picture.overlap = picture_info->overlap;
+ picture.vstransform = picture_info->vstransform;
+ picture.loopfilter = picture_info->loopfilter;
+ picture.fastuvmc = picture_info->fastuvmc;
+ picture.range_mapy_flag = picture_info->range_mapy_flag;
+ picture.range_mapy = picture_info->range_mapy;
+ picture.range_mapuv_flag = picture_info->range_mapuv_flag;
+ picture.range_mapuv = picture_info->range_mapuv;
+ picture.multires = picture_info->multires;
+ picture.syncmarker = picture_info->syncmarker;
+ picture.rangered = picture_info->rangered;
+ picture.maxbframes = picture_info->maxbframes;
+ picture.deblockEnable = picture_info->deblockEnable;
+ picture.pquant = picture_info->pquant;
+ decoder->set_picture_parameters(decoder, &picture.base);
return VDP_STATUS_OK;
}
+/**
+ * Decode a compressed field/frame and render the result into a VdpVideoSurface.
+ */
VdpStatus
vlVdpDecoderRender(VdpDecoder decoder,
VdpVideoSurface target,
uint32_t bitstream_buffer_count,
VdpBitstreamBuffer const *bitstream_buffers)
{
+ const void * buffers[bitstream_buffer_count];
+ unsigned sizes[bitstream_buffer_count];
vlVdpDecoder *vldecoder;
vlVdpSurface *vlsurf;
+ VdpStatus ret;
+ struct pipe_video_decoder *dec;
+ unsigned i;
VDPAU_MSG(VDPAU_TRACE, "[VDPAU] Decoding\n");
vldecoder = (vlVdpDecoder *)vlGetDataHTAB(decoder);
if (!vldecoder)
return VDP_STATUS_INVALID_HANDLE;
+ dec = vldecoder->decoder;
vlsurf = (vlVdpSurface *)vlGetDataHTAB(target);
if (!vlsurf)
if (vlsurf->device != vldecoder->device)
return VDP_STATUS_HANDLE_DEVICE_MISMATCH;
- if (vlsurf->video_buffer->chroma_format != vldecoder->decoder->chroma_format)
+ if (vlsurf->video_buffer->chroma_format != dec->chroma_format)
// TODO: Recreate decoder with correct chroma
return VDP_STATUS_INVALID_CHROMA_TYPE;
- // TODO: Right now only mpeg2 is supported.
- switch (vldecoder->decoder->profile) {
- case PIPE_VIDEO_PROFILE_MPEG2_SIMPLE:
- case PIPE_VIDEO_PROFILE_MPEG2_MAIN:
- ++vldecoder->cur_buffer;
- vldecoder->cur_buffer %= VL_NUM_DECODE_BUFFERS;
- return vlVdpDecoderRenderMpeg2(vldecoder->decoder,
- vldecoder->buffer[vldecoder->cur_buffer],
- vlsurf->video_buffer,
- (VdpPictureInfoMPEG1Or2 *)picture_info,
- bitstream_buffer_count,bitstream_buffers);
- break;
+ dec->set_decode_target(dec, vlsurf->video_buffer);
+ switch (u_reduce_video_profile(dec->profile)) {
+ case PIPE_VIDEO_CODEC_MPEG12:
+ ret = vlVdpDecoderRenderMpeg12(dec, (VdpPictureInfoMPEG1Or2 *)picture_info);
+ break;
+ case PIPE_VIDEO_CODEC_MPEG4:
+ ret = vlVdpDecoderRenderMpeg4(dec, (VdpPictureInfoMPEG4Part2 *)picture_info);
+ break;
+ case PIPE_VIDEO_CODEC_VC1:
+ ret = vlVdpDecoderRenderVC1(dec, (VdpPictureInfoVC1 *)picture_info);
+ break;
default:
return VDP_STATUS_INVALID_DECODER_PROFILE;
}
+ if (ret != VDP_STATUS_OK)
+ return ret;
+
+ dec->begin_frame(dec);
+ for (i = 0; i < bitstream_buffer_count; ++i) {
+ buffers[i] = bitstream_buffers[i].bitstream;
+ sizes[i] = bitstream_buffers[i].bitstream_bytes;
+ }
+ dec->decode_bitstream(dec, bitstream_buffer_count, buffers, sizes);
+ dec->end_frame(dec);
+ return ret;
}