3fee6d95f66ee21ed2175660a940e47323b52ca5
[mesa.git] / src / gallium / drivers / nouveau / nv50 / nv84_video.c
1 /*
2 * Copyright 2013 Ilia Mirkin
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23 #include <sys/mman.h>
24 #include <sys/stat.h>
25 #include <sys/types.h>
26 #include <fcntl.h>
27
28 #include "util/u_format.h"
29 #include "util/u_sampler.h"
30 #include "vl/vl_zscan.h"
31
32 #include "nv50/nv84_video.h"
33
34 static int
35 nv84_copy_firmware(const char *path, void *dest, ssize_t len)
36 {
37 int fd = open(path, O_RDONLY | O_CLOEXEC);
38 ssize_t r;
39 if (fd < 0) {
40 fprintf(stderr, "opening firmware file %s failed: %m\n", path);
41 return 1;
42 }
43 r = read(fd, dest, len);
44 close(fd);
45
46 if (r != len) {
47 fprintf(stderr, "reading firwmare file %s failed: %m\n", path);
48 return 1;
49 }
50
51 return 0;
52 }
53
54 static int
55 filesize(const char *path)
56 {
57 int ret;
58 struct stat statbuf;
59
60 ret = stat(path, &statbuf);
61 if (ret)
62 return ret;
63 return statbuf.st_size;
64 }
65
66 static struct nouveau_bo *
67 nv84_load_firmwares(struct nouveau_device *dev, struct nv84_decoder *dec,
68 const char *fw1, const char *fw2)
69 {
70 int ret, size1, size2 = 0;
71 struct nouveau_bo *fw;
72
73 size1 = filesize(fw1);
74 if (fw2)
75 size2 = filesize(fw2);
76 if (size1 < 0 || size2 < 0)
77 return NULL;
78
79 dec->vp_fw2_offset = align(size1, 0x100);
80
81 ret = nouveau_bo_new(dev, NOUVEAU_BO_VRAM, 0, dec->vp_fw2_offset + size2, NULL, &fw);
82 if (ret)
83 return NULL;
84 ret = nouveau_bo_map(fw, NOUVEAU_BO_WR, dec->client);
85 if (ret)
86 goto error;
87
88 ret = nv84_copy_firmware(fw1, fw->map, size1);
89 if (fw2 && !ret)
90 ret = nv84_copy_firmware(fw2, fw->map + dec->vp_fw2_offset, size2);
91 munmap(fw->map, fw->size);
92 fw->map = NULL;
93 if (!ret)
94 return fw;
95 error:
96 nouveau_bo_ref(NULL, &fw);
97 return NULL;
98 }
99
100 static struct nouveau_bo *
101 nv84_load_bsp_firmware(struct nouveau_device *dev, struct nv84_decoder *dec)
102 {
103 return nv84_load_firmwares(
104 dev, dec, "/lib/firmware/nouveau/nv84_bsp-h264", NULL);
105 }
106
107 static struct nouveau_bo *
108 nv84_load_vp_firmware(struct nouveau_device *dev, struct nv84_decoder *dec)
109 {
110 return nv84_load_firmwares(
111 dev, dec,
112 "/lib/firmware/nouveau/nv84_vp-h264-1",
113 "/lib/firmware/nouveau/nv84_vp-h264-2");
114 }
115
116 static struct nouveau_bo *
117 nv84_load_vp_firmware_mpeg(struct nouveau_device *dev, struct nv84_decoder *dec)
118 {
119 return nv84_load_firmwares(
120 dev, dec, "/lib/firmware/nouveau/nv84_vp-mpeg12", NULL);
121 }
122
123 static void
124 nv84_decoder_decode_bitstream_h264(struct pipe_video_codec *decoder,
125 struct pipe_video_buffer *video_target,
126 struct pipe_picture_desc *picture,
127 unsigned num_buffers,
128 const void *const *data,
129 const unsigned *num_bytes)
130 {
131 struct nv84_decoder *dec = (struct nv84_decoder *)decoder;
132 struct nv84_video_buffer *target = (struct nv84_video_buffer *)video_target;
133
134 struct pipe_h264_picture_desc *desc = (struct pipe_h264_picture_desc *)picture;
135
136 assert(target->base.buffer_format == PIPE_FORMAT_NV12);
137
138 nv84_decoder_bsp(dec, desc, num_buffers, data, num_bytes, target);
139 nv84_decoder_vp_h264(dec, desc, target);
140 }
141
142 static void
143 nv84_decoder_flush(struct pipe_video_codec *decoder)
144 {
145 }
146
147 static void
148 nv84_decoder_begin_frame_h264(struct pipe_video_codec *decoder,
149 struct pipe_video_buffer *target,
150 struct pipe_picture_desc *picture)
151 {
152 }
153
154 static void
155 nv84_decoder_end_frame_h264(struct pipe_video_codec *decoder,
156 struct pipe_video_buffer *target,
157 struct pipe_picture_desc *picture)
158 {
159 }
160
161 static void
162 nv84_decoder_decode_bitstream_mpeg12(struct pipe_video_codec *decoder,
163 struct pipe_video_buffer *video_target,
164 struct pipe_picture_desc *picture,
165 unsigned num_buffers,
166 const void *const *data,
167 const unsigned *num_bytes)
168 {
169 struct nv84_decoder *dec = (struct nv84_decoder *)decoder;
170
171 assert(video_target->buffer_format == PIPE_FORMAT_NV12);
172
173 vl_mpg12_bs_decode(dec->mpeg12_bs,
174 video_target,
175 (struct pipe_mpeg12_picture_desc *)picture,
176 num_buffers,
177 data,
178 num_bytes);
179 }
180
181 static void
182 nv84_decoder_begin_frame_mpeg12(struct pipe_video_codec *decoder,
183 struct pipe_video_buffer *target,
184 struct pipe_picture_desc *picture)
185 {
186 struct nv84_decoder *dec = (struct nv84_decoder *)decoder;
187 struct pipe_mpeg12_picture_desc *desc = (struct pipe_mpeg12_picture_desc *)picture;
188 int i;
189
190 nouveau_bo_wait(dec->mpeg12_bo, NOUVEAU_BO_RDWR, dec->client);
191 dec->mpeg12_mb_info = dec->mpeg12_bo->map + 0x100;
192 dec->mpeg12_data = dec->mpeg12_bo->map + 0x100 +
193 align(0x20 * mb(dec->base.width) * mb(dec->base.height), 0x100);
194 if (desc->intra_matrix) {
195 dec->zscan = desc->alternate_scan ? vl_zscan_alternate : vl_zscan_normal;
196 for (i = 0; i < 64; i++) {
197 dec->mpeg12_intra_matrix[i] = desc->intra_matrix[dec->zscan[i]];
198 dec->mpeg12_non_intra_matrix[i] = desc->non_intra_matrix[dec->zscan[i]];
199 }
200 dec->mpeg12_intra_matrix[0] = 1 << (7 - desc->intra_dc_precision);
201 }
202 }
203
204 static void
205 nv84_decoder_end_frame_mpeg12(struct pipe_video_codec *decoder,
206 struct pipe_video_buffer *target,
207 struct pipe_picture_desc *picture)
208 {
209 nv84_decoder_vp_mpeg12(
210 (struct nv84_decoder *)decoder,
211 (struct pipe_mpeg12_picture_desc *)picture,
212 (struct nv84_video_buffer *)target);
213 }
214
215 static void
216 nv84_decoder_decode_macroblock(struct pipe_video_codec *decoder,
217 struct pipe_video_buffer *target,
218 struct pipe_picture_desc *picture,
219 const struct pipe_macroblock *macroblocks,
220 unsigned num_macroblocks)
221 {
222 const struct pipe_mpeg12_macroblock *mb = (const struct pipe_mpeg12_macroblock *)macroblocks;
223 for (int i = 0; i < num_macroblocks; i++, mb++) {
224 nv84_decoder_vp_mpeg12_mb(
225 (struct nv84_decoder *)decoder,
226 (struct pipe_mpeg12_picture_desc *)picture,
227 mb);
228 }
229 }
230
231 static void
232 nv84_decoder_destroy(struct pipe_video_codec *decoder)
233 {
234 struct nv84_decoder *dec = (struct nv84_decoder *)decoder;
235
236 nouveau_bo_ref(NULL, &dec->bsp_fw);
237 nouveau_bo_ref(NULL, &dec->bsp_data);
238 nouveau_bo_ref(NULL, &dec->vp_fw);
239 nouveau_bo_ref(NULL, &dec->vp_data);
240 nouveau_bo_ref(NULL, &dec->mbring);
241 nouveau_bo_ref(NULL, &dec->vpring);
242 nouveau_bo_ref(NULL, &dec->bitstream);
243 nouveau_bo_ref(NULL, &dec->vp_params);
244 nouveau_bo_ref(NULL, &dec->fence);
245
246 nouveau_object_del(&dec->bsp);
247 nouveau_object_del(&dec->vp);
248
249 nouveau_bufctx_del(&dec->bsp_bufctx);
250 nouveau_pushbuf_del(&dec->bsp_pushbuf);
251 nouveau_object_del(&dec->bsp_channel);
252
253 nouveau_bufctx_del(&dec->vp_bufctx);
254 nouveau_pushbuf_del(&dec->vp_pushbuf);
255 nouveau_object_del(&dec->vp_channel);
256
257 nouveau_client_del(&dec->client);
258
259 if (dec->mpeg12_bs)
260 FREE(dec->mpeg12_bs);
261 FREE(dec);
262 }
263
264 struct pipe_video_codec *
265 nv84_create_decoder(struct pipe_context *context,
266 const struct pipe_video_codec *templ)
267 {
268 struct nv50_context *nv50 = (struct nv50_context *)context;
269 struct nouveau_screen *screen = &nv50->screen->base;
270 struct nv84_decoder *dec;
271 struct nouveau_pushbuf *bsp_push, *vp_push;
272 struct nv50_surface surf;
273 struct nv50_miptree mip;
274 union pipe_color_union color;
275 struct nv04_fifo nv04_data = { .vram = 0xbeef0201, .gart = 0xbeef0202 };
276 int ret, i;
277 int is_h264 = u_reduce_video_profile(templ->profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC;
278 int is_mpeg12 = u_reduce_video_profile(templ->profile) == PIPE_VIDEO_FORMAT_MPEG12;
279
280 if (getenv("XVMC_VL"))
281 return vl_create_decoder(context, templ);
282
283 if ((is_h264 && templ->entrypoint != PIPE_VIDEO_ENTRYPOINT_BITSTREAM) ||
284 (is_mpeg12 && templ->entrypoint > PIPE_VIDEO_ENTRYPOINT_IDCT)) {
285 debug_printf("%x\n", templ->entrypoint);
286 return NULL;
287 }
288
289 if (!is_h264 && !is_mpeg12) {
290 debug_printf("invalid profile: %x\n", templ->profile);
291 return NULL;
292 }
293
294 dec = CALLOC_STRUCT(nv84_decoder);
295 if (!dec)
296 return NULL;
297
298 dec->base = *templ;
299 dec->base.context = context;
300 dec->base.destroy = nv84_decoder_destroy;
301 dec->base.flush = nv84_decoder_flush;
302 if (is_h264) {
303 dec->base.decode_bitstream = nv84_decoder_decode_bitstream_h264;
304 dec->base.begin_frame = nv84_decoder_begin_frame_h264;
305 dec->base.end_frame = nv84_decoder_end_frame_h264;
306
307 dec->frame_mbs = mb(dec->base.width) * mb_half(dec->base.height) * 2;
308 dec->frame_size = dec->frame_mbs << 8;
309 dec->vpring_deblock = align(0x30 * dec->frame_mbs, 0x100);
310 dec->vpring_residual = 0x2000 + MAX2(0x32000, 0x600 * dec->frame_mbs);
311 dec->vpring_ctrl = MAX2(0x10000, align(0x1080 + 0x144 * dec->frame_mbs, 0x100));
312 } else if (is_mpeg12) {
313 dec->base.decode_macroblock = nv84_decoder_decode_macroblock;
314 dec->base.begin_frame = nv84_decoder_begin_frame_mpeg12;
315 dec->base.end_frame = nv84_decoder_end_frame_mpeg12;
316
317 if (templ->entrypoint == PIPE_VIDEO_ENTRYPOINT_BITSTREAM) {
318 dec->mpeg12_bs = CALLOC_STRUCT(vl_mpg12_bs);
319 if (!dec->mpeg12_bs)
320 goto fail;
321 vl_mpg12_bs_init(dec->mpeg12_bs, &dec->base);
322 dec->base.decode_bitstream = nv84_decoder_decode_bitstream_mpeg12;
323 }
324 } else {
325 goto fail;
326 }
327
328 ret = nouveau_client_new(screen->device, &dec->client);
329 if (ret)
330 goto fail;
331
332 if (is_h264) {
333 ret = nouveau_object_new(&screen->device->object, 0,
334 NOUVEAU_FIFO_CHANNEL_CLASS,
335 &nv04_data, sizeof(nv04_data), &dec->bsp_channel);
336 if (ret)
337 goto fail;
338
339 ret = nouveau_pushbuf_new(dec->client, dec->bsp_channel, 4,
340 32 * 1024, true, &dec->bsp_pushbuf);
341 if (ret)
342 goto fail;
343
344 ret = nouveau_bufctx_new(dec->client, 1, &dec->bsp_bufctx);
345 if (ret)
346 goto fail;
347 }
348
349 ret = nouveau_object_new(&screen->device->object, 0,
350 NOUVEAU_FIFO_CHANNEL_CLASS,
351 &nv04_data, sizeof(nv04_data), &dec->vp_channel);
352 if (ret)
353 goto fail;
354 ret = nouveau_pushbuf_new(dec->client, dec->vp_channel, 4,
355 32 * 1024, true, &dec->vp_pushbuf);
356 if (ret)
357 goto fail;
358
359 ret = nouveau_bufctx_new(dec->client, 1, &dec->vp_bufctx);
360 if (ret)
361 goto fail;
362
363 bsp_push = dec->bsp_pushbuf;
364 vp_push = dec->vp_pushbuf;
365
366 if (is_h264) {
367 dec->bsp_fw = nv84_load_bsp_firmware(screen->device, dec);
368 dec->vp_fw = nv84_load_vp_firmware(screen->device, dec);
369 if (!dec->bsp_fw || !dec->vp_fw)
370 goto fail;
371 }
372 if (is_mpeg12) {
373 dec->vp_fw = nv84_load_vp_firmware_mpeg(screen->device, dec);
374 if (!dec->vp_fw)
375 goto fail;
376 }
377
378 if (is_h264) {
379 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP,
380 0, 0x40000, NULL, &dec->bsp_data);
381 if (ret)
382 goto fail;
383 }
384 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP,
385 0, 0x40000, NULL, &dec->vp_data);
386 if (ret)
387 goto fail;
388 if (is_h264) {
389 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP,
390 0,
391 2 * (dec->vpring_deblock +
392 dec->vpring_residual +
393 dec->vpring_ctrl +
394 0x1000),
395 NULL, &dec->vpring);
396 if (ret)
397 goto fail;
398 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP,
399 0,
400 (templ->max_references + 1) * dec->frame_mbs * 0x40 +
401 dec->frame_size + 0x2000,
402 NULL, &dec->mbring);
403 if (ret)
404 goto fail;
405 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_GART,
406 0, 2 * (0x700 + MAX2(0x40000, 0x800 + 0x180 * dec->frame_mbs)),
407 NULL, &dec->bitstream);
408 if (ret)
409 goto fail;
410 ret = nouveau_bo_map(dec->bitstream, NOUVEAU_BO_WR, dec->client);
411 if (ret)
412 goto fail;
413 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_GART,
414 0, 0x2000, NULL, &dec->vp_params);
415 if (ret)
416 goto fail;
417 ret = nouveau_bo_map(dec->vp_params, NOUVEAU_BO_WR, dec->client);
418 if (ret)
419 goto fail;
420 }
421 if (is_mpeg12) {
422 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_GART,
423 0,
424 align(0x20 * mb(templ->width) * mb(templ->height), 0x100) +
425 (6 * 64 * 8) * mb(templ->width) * mb(templ->height) + 0x100,
426 NULL, &dec->mpeg12_bo);
427 if (ret)
428 goto fail;
429 ret = nouveau_bo_map(dec->mpeg12_bo, NOUVEAU_BO_WR, dec->client);
430 if (ret)
431 goto fail;
432 }
433
434 ret = nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM,
435 0, 0x1000, NULL, &dec->fence);
436 if (ret)
437 goto fail;
438 ret = nouveau_bo_map(dec->fence, NOUVEAU_BO_WR, dec->client);
439 if (ret)
440 goto fail;
441 *(uint32_t *)dec->fence->map = 0;
442
443 if (is_h264) {
444 nouveau_pushbuf_bufctx(bsp_push, dec->bsp_bufctx);
445 nouveau_bufctx_refn(dec->bsp_bufctx, 0,
446 dec->bsp_fw, NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
447 nouveau_bufctx_refn(dec->bsp_bufctx, 0,
448 dec->bsp_data, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
449 }
450
451 nouveau_pushbuf_bufctx(vp_push, dec->vp_bufctx);
452 nouveau_bufctx_refn(dec->vp_bufctx, 0, dec->vp_fw,
453 NOUVEAU_BO_VRAM | NOUVEAU_BO_RD);
454 nouveau_bufctx_refn(dec->vp_bufctx, 0, dec->vp_data,
455 NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
456
457 if (is_h264 && !ret)
458 ret = nouveau_object_new(dec->bsp_channel, 0xbeef74b0, 0x74b0,
459 NULL, 0, &dec->bsp);
460
461 if (!ret)
462 ret = nouveau_object_new(dec->vp_channel, 0xbeef7476, 0x7476,
463 NULL, 0, &dec->vp);
464
465 if (ret)
466 goto fail;
467
468
469 if (is_h264) {
470 /* Zero out some parts of mbring/vpring. there's gotta be some cleaner way
471 * of doing this... perhaps makes sense to just copy the relevant logic
472 * here. */
473 color.f[0] = color.f[1] = color.f[2] = color.f[3] = 0;
474 surf.offset = dec->frame_size;
475 surf.width = 64;
476 surf.height = (templ->max_references + 1) * dec->frame_mbs / 4;
477 surf.depth = 1;
478 surf.base.format = PIPE_FORMAT_B8G8R8A8_UNORM;
479 surf.base.u.tex.level = 0;
480 surf.base.texture = &mip.base.base;
481 mip.level[0].tile_mode = 0;
482 mip.level[0].pitch = surf.width * 4;
483 mip.base.domain = NOUVEAU_BO_VRAM;
484 mip.base.bo = dec->mbring;
485 context->clear_render_target(context, &surf.base, &color, 0, 0, 64, 4760);
486 surf.offset = dec->vpring->size / 2 - 0x1000;
487 surf.width = 1024;
488 surf.height = 1;
489 mip.level[0].pitch = surf.width * 4;
490 mip.base.bo = dec->vpring;
491 context->clear_render_target(context, &surf.base, &color, 0, 0, 1024, 1);
492 surf.offset = dec->vpring->size - 0x1000;
493 context->clear_render_target(context, &surf.base, &color, 0, 0, 1024, 1);
494
495 PUSH_SPACE(screen->pushbuf, 5);
496 PUSH_REFN(screen->pushbuf, dec->fence, NOUVEAU_BO_VRAM | NOUVEAU_BO_RDWR);
497 /* The clear_render_target is done via 3D engine, so use it to write to a
498 * sempahore to indicate that it's done.
499 */
500 BEGIN_NV04(screen->pushbuf, NV50_3D(QUERY_ADDRESS_HIGH), 4);
501 PUSH_DATAh(screen->pushbuf, dec->fence->offset);
502 PUSH_DATA (screen->pushbuf, dec->fence->offset);
503 PUSH_DATA (screen->pushbuf, 1);
504 PUSH_DATA (screen->pushbuf, 0xf010);
505 PUSH_KICK (screen->pushbuf);
506
507 PUSH_SPACE(bsp_push, 2 + 12 + 2 + 4 + 3);
508
509 BEGIN_NV04(bsp_push, SUBC_BSP(NV01_SUBCHAN_OBJECT), 1);
510 PUSH_DATA (bsp_push, dec->bsp->handle);
511
512 BEGIN_NV04(bsp_push, SUBC_BSP(0x180), 11);
513 for (i = 0; i < 11; i++)
514 PUSH_DATA(bsp_push, nv04_data.vram);
515 BEGIN_NV04(bsp_push, SUBC_BSP(0x1b8), 1);
516 PUSH_DATA (bsp_push, nv04_data.vram);
517
518 BEGIN_NV04(bsp_push, SUBC_BSP(0x600), 3);
519 PUSH_DATAh(bsp_push, dec->bsp_fw->offset);
520 PUSH_DATA (bsp_push, dec->bsp_fw->offset);
521 PUSH_DATA (bsp_push, dec->bsp_fw->size);
522
523 BEGIN_NV04(bsp_push, SUBC_BSP(0x628), 2);
524 PUSH_DATA (bsp_push, dec->bsp_data->offset >> 8);
525 PUSH_DATA (bsp_push, dec->bsp_data->size);
526 PUSH_KICK (bsp_push);
527 }
528
529 PUSH_SPACE(vp_push, 2 + 12 + 2 + 4 + 3);
530
531 BEGIN_NV04(vp_push, SUBC_VP(NV01_SUBCHAN_OBJECT), 1);
532 PUSH_DATA (vp_push, dec->vp->handle);
533
534 BEGIN_NV04(vp_push, SUBC_VP(0x180), 11);
535 for (i = 0; i < 11; i++)
536 PUSH_DATA(vp_push, nv04_data.vram);
537
538 BEGIN_NV04(vp_push, SUBC_VP(0x1b8), 1);
539 PUSH_DATA (vp_push, nv04_data.vram);
540
541 BEGIN_NV04(vp_push, SUBC_VP(0x600), 3);
542 PUSH_DATAh(vp_push, dec->vp_fw->offset);
543 PUSH_DATA (vp_push, dec->vp_fw->offset);
544 PUSH_DATA (vp_push, dec->vp_fw->size);
545
546 BEGIN_NV04(vp_push, SUBC_VP(0x628), 2);
547 PUSH_DATA (vp_push, dec->vp_data->offset >> 8);
548 PUSH_DATA (vp_push, dec->vp_data->size);
549 PUSH_KICK (vp_push);
550
551 return &dec->base;
552 fail:
553 nv84_decoder_destroy(&dec->base);
554 return NULL;
555 }
556
557 static struct pipe_sampler_view **
558 nv84_video_buffer_sampler_view_planes(struct pipe_video_buffer *buffer)
559 {
560 struct nv84_video_buffer *buf = (struct nv84_video_buffer *)buffer;
561 return buf->sampler_view_planes;
562 }
563
564 static struct pipe_sampler_view **
565 nv84_video_buffer_sampler_view_components(struct pipe_video_buffer *buffer)
566 {
567 struct nv84_video_buffer *buf = (struct nv84_video_buffer *)buffer;
568 return buf->sampler_view_components;
569 }
570
571 static struct pipe_surface **
572 nv84_video_buffer_surfaces(struct pipe_video_buffer *buffer)
573 {
574 struct nv84_video_buffer *buf = (struct nv84_video_buffer *)buffer;
575 return buf->surfaces;
576 }
577
578 static void
579 nv84_video_buffer_destroy(struct pipe_video_buffer *buffer)
580 {
581 struct nv84_video_buffer *buf = (struct nv84_video_buffer *)buffer;
582 unsigned i;
583
584 assert(buf);
585
586 for (i = 0; i < VL_NUM_COMPONENTS; ++i) {
587 pipe_resource_reference(&buf->resources[i], NULL);
588 pipe_sampler_view_reference(&buf->sampler_view_planes[i], NULL);
589 pipe_sampler_view_reference(&buf->sampler_view_components[i], NULL);
590 pipe_surface_reference(&buf->surfaces[i * 2], NULL);
591 pipe_surface_reference(&buf->surfaces[i * 2 + 1], NULL);
592 }
593
594 nouveau_bo_ref(NULL, &buf->interlaced);
595 nouveau_bo_ref(NULL, &buf->full);
596
597 FREE(buffer);
598 }
599
600 struct pipe_video_buffer *
601 nv84_video_buffer_create(struct pipe_context *pipe,
602 const struct pipe_video_buffer *template)
603 {
604 struct nv84_video_buffer *buffer;
605 struct pipe_resource templ;
606 unsigned i, j, component;
607 struct pipe_sampler_view sv_templ;
608 struct pipe_surface surf_templ;
609 struct nv50_miptree *mt0, *mt1;
610 struct nouveau_bo *empty = NULL;
611 struct nouveau_screen *screen = &((struct nv50_context *)pipe)->screen->base;
612 union nouveau_bo_config cfg;
613 unsigned bo_size;
614
615 if (getenv("XVMC_VL") || template->buffer_format != PIPE_FORMAT_NV12)
616 return vl_video_buffer_create(pipe, template);
617
618 if (!template->interlaced) {
619 debug_printf("Require interlaced video buffers\n");
620 return NULL;
621 }
622 if (template->chroma_format != PIPE_VIDEO_CHROMA_FORMAT_420) {
623 debug_printf("Must use 4:2:0 format\n");
624 return NULL;
625 }
626
627 /*
628 * Note that there are always going to be exactly two planes, one for Y,
629 * and one for UV. These are also the resources. VP expects these to be
630 * adjacent, so they need to belong to the same BO.
631 */
632
633 buffer = CALLOC_STRUCT(nv84_video_buffer);
634 if (!buffer) return NULL;
635
636 buffer->mvidx = -1;
637
638 buffer->base.buffer_format = template->buffer_format;
639 buffer->base.context = pipe;
640 buffer->base.destroy = nv84_video_buffer_destroy;
641 buffer->base.chroma_format = template->chroma_format;
642 buffer->base.width = template->width;
643 buffer->base.height = template->height;
644 buffer->base.get_sampler_view_planes = nv84_video_buffer_sampler_view_planes;
645 buffer->base.get_sampler_view_components = nv84_video_buffer_sampler_view_components;
646 buffer->base.get_surfaces = nv84_video_buffer_surfaces;
647 buffer->base.interlaced = true;
648
649 memset(&templ, 0, sizeof(templ));
650 templ.target = PIPE_TEXTURE_2D_ARRAY;
651 templ.depth0 = 1;
652 templ.bind = PIPE_BIND_SAMPLER_VIEW | PIPE_BIND_RENDER_TARGET;
653 templ.format = PIPE_FORMAT_R8_UNORM;
654 templ.width0 = align(template->width, 2);
655 templ.height0 = align(template->height, 4) / 2;
656 templ.flags = NV50_RESOURCE_FLAG_VIDEO | NV50_RESOURCE_FLAG_NOALLOC;
657 templ.array_size = 2;
658
659 cfg.nv50.tile_mode = 0x20;
660 cfg.nv50.memtype = 0x70;
661
662 buffer->resources[0] = pipe->screen->resource_create(pipe->screen, &templ);
663 if (!buffer->resources[0])
664 goto error;
665
666 templ.format = PIPE_FORMAT_R8G8_UNORM;
667 templ.width0 /= 2;
668 templ.height0 /= 2;
669 buffer->resources[1] = pipe->screen->resource_create(pipe->screen, &templ);
670 if (!buffer->resources[1])
671 goto error;
672
673 mt0 = nv50_miptree(buffer->resources[0]);
674 mt1 = nv50_miptree(buffer->resources[1]);
675
676 bo_size = mt0->total_size + mt1->total_size;
677 if (nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP, 0,
678 bo_size, &cfg, &buffer->interlaced))
679 goto error;
680 /* XXX Change reference frame management so that this is only allocated in
681 * the decoder when necessary. */
682 if (nouveau_bo_new(screen->device, NOUVEAU_BO_VRAM | NOUVEAU_BO_NOSNOOP, 0,
683 bo_size, &cfg, &buffer->full))
684 goto error;
685
686 mt0->base.bo = buffer->interlaced;
687 mt0->base.domain = NOUVEAU_BO_VRAM;
688 mt0->base.offset = 0;
689 mt0->base.address = buffer->interlaced->offset + mt0->base.offset;
690 nouveau_bo_ref(buffer->interlaced, &empty);
691
692 mt1->base.bo = buffer->interlaced;
693 mt1->base.domain = NOUVEAU_BO_VRAM;
694 mt1->base.offset = mt0->layer_stride * 2;
695 mt1->base.address = buffer->interlaced->offset + mt1->base.offset;
696 nouveau_bo_ref(buffer->interlaced, &empty);
697
698 memset(&sv_templ, 0, sizeof(sv_templ));
699 for (component = 0, i = 0; i < 2; ++i ) {
700 struct pipe_resource *res = buffer->resources[i];
701 unsigned nr_components = util_format_get_nr_components(res->format);
702
703 u_sampler_view_default_template(&sv_templ, res, res->format);
704 buffer->sampler_view_planes[i] =
705 pipe->create_sampler_view(pipe, res, &sv_templ);
706 if (!buffer->sampler_view_planes[i])
707 goto error;
708
709 for (j = 0; j < nr_components; ++j, ++component) {
710 sv_templ.swizzle_r = sv_templ.swizzle_g = sv_templ.swizzle_b =
711 PIPE_SWIZZLE_RED + j;
712 sv_templ.swizzle_a = PIPE_SWIZZLE_ONE;
713
714 buffer->sampler_view_components[component] =
715 pipe->create_sampler_view(pipe, res, &sv_templ);
716 if (!buffer->sampler_view_components[component])
717 goto error;
718 }
719 }
720
721 memset(&surf_templ, 0, sizeof(surf_templ));
722 for (j = 0; j < 2; ++j) {
723 surf_templ.format = buffer->resources[j]->format;
724 surf_templ.u.tex.first_layer = surf_templ.u.tex.last_layer = 0;
725 buffer->surfaces[j * 2] =
726 pipe->create_surface(pipe, buffer->resources[j], &surf_templ);
727 if (!buffer->surfaces[j * 2])
728 goto error;
729
730 surf_templ.u.tex.first_layer = surf_templ.u.tex.last_layer = 1;
731 buffer->surfaces[j * 2 + 1] =
732 pipe->create_surface(pipe, buffer->resources[j], &surf_templ);
733 if (!buffer->surfaces[j * 2 + 1])
734 goto error;
735 }
736
737 return &buffer->base;
738
739 error:
740 nv84_video_buffer_destroy(&buffer->base);
741 return NULL;
742 }
743
744 int
745 nv84_screen_get_video_param(struct pipe_screen *pscreen,
746 enum pipe_video_profile profile,
747 enum pipe_video_entrypoint entrypoint,
748 enum pipe_video_cap param)
749 {
750 switch (param) {
751 case PIPE_VIDEO_CAP_SUPPORTED:
752 return u_reduce_video_profile(profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC ||
753 u_reduce_video_profile(profile) == PIPE_VIDEO_FORMAT_MPEG12;
754 case PIPE_VIDEO_CAP_NPOT_TEXTURES:
755 return 1;
756 case PIPE_VIDEO_CAP_MAX_WIDTH:
757 case PIPE_VIDEO_CAP_MAX_HEIGHT:
758 return 2048;
759 case PIPE_VIDEO_CAP_PREFERED_FORMAT:
760 return PIPE_FORMAT_NV12;
761 case PIPE_VIDEO_CAP_SUPPORTS_INTERLACED:
762 case PIPE_VIDEO_CAP_PREFERS_INTERLACED:
763 return true;
764 case PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE:
765 return false;
766 case PIPE_VIDEO_CAP_MAX_LEVEL:
767 switch (profile) {
768 case PIPE_VIDEO_PROFILE_MPEG1:
769 return 0;
770 case PIPE_VIDEO_PROFILE_MPEG2_SIMPLE:
771 case PIPE_VIDEO_PROFILE_MPEG2_MAIN:
772 return 3;
773 case PIPE_VIDEO_PROFILE_MPEG4_AVC_BASELINE:
774 case PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN:
775 case PIPE_VIDEO_PROFILE_MPEG4_AVC_HIGH:
776 return 41;
777 default:
778 debug_printf("unknown video profile: %d\n", profile);
779 return 0;
780 }
781 default:
782 debug_printf("unknown video param: %d\n", param);
783 return 0;
784 }
785 }
786
787 boolean
788 nv84_screen_video_supported(struct pipe_screen *screen,
789 enum pipe_format format,
790 enum pipe_video_profile profile,
791 enum pipe_video_entrypoint entrypoint)
792 {
793 if (profile != PIPE_VIDEO_PROFILE_UNKNOWN)
794 return format == PIPE_FORMAT_NV12;
795
796 return vl_video_buffer_is_format_supported(screen, format, profile, entrypoint);
797 }