2 #include "util/u_format.h"
4 #include "nvc0/nvc0_context.h"
6 #include "nv50/nv50_defs.xml.h"
9 struct pipe_transfer base
;
10 struct nv50_m2mf_rect rect
[2];
17 nvc0_m2mf_transfer_rect(struct nvc0_context
*nvc0
,
18 const struct nv50_m2mf_rect
*dst
,
19 const struct nv50_m2mf_rect
*src
,
20 uint32_t nblocksx
, uint32_t nblocksy
)
22 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
23 struct nouveau_bufctx
*bctx
= nvc0
->bufctx
;
24 const int cpp
= dst
->cpp
;
25 uint32_t src_ofst
= src
->base
;
26 uint32_t dst_ofst
= dst
->base
;
27 uint32_t height
= nblocksy
;
30 uint32_t exec
= (1 << 20);
32 assert(dst
->cpp
== src
->cpp
);
34 nouveau_bufctx_refn(bctx
, 0, src
->bo
, src
->domain
| NOUVEAU_BO_RD
);
35 nouveau_bufctx_refn(bctx
, 0, dst
->bo
, dst
->domain
| NOUVEAU_BO_WR
);
36 nouveau_pushbuf_bufctx(push
, bctx
);
37 nouveau_pushbuf_validate(push
);
39 if (nouveau_bo_memtype(src
->bo
)) {
40 BEGIN_NVC0(push
, NVC0_M2MF(TILING_MODE_IN
), 5);
41 PUSH_DATA (push
, src
->tile_mode
);
42 PUSH_DATA (push
, src
->width
* cpp
);
43 PUSH_DATA (push
, src
->height
);
44 PUSH_DATA (push
, src
->depth
);
45 PUSH_DATA (push
, src
->z
);
47 src_ofst
+= src
->y
* src
->pitch
+ src
->x
* cpp
;
49 BEGIN_NVC0(push
, NVC0_M2MF(PITCH_IN
), 1);
50 PUSH_DATA (push
, src
->width
* cpp
);
52 exec
|= NVC0_M2MF_EXEC_LINEAR_IN
;
55 if (nouveau_bo_memtype(dst
->bo
)) {
56 BEGIN_NVC0(push
, NVC0_M2MF(TILING_MODE_OUT
), 5);
57 PUSH_DATA (push
, dst
->tile_mode
);
58 PUSH_DATA (push
, dst
->width
* cpp
);
59 PUSH_DATA (push
, dst
->height
);
60 PUSH_DATA (push
, dst
->depth
);
61 PUSH_DATA (push
, dst
->z
);
63 dst_ofst
+= dst
->y
* dst
->pitch
+ dst
->x
* cpp
;
65 BEGIN_NVC0(push
, NVC0_M2MF(PITCH_OUT
), 1);
66 PUSH_DATA (push
, dst
->width
* cpp
);
68 exec
|= NVC0_M2MF_EXEC_LINEAR_OUT
;
72 int line_count
= height
> 2047 ? 2047 : height
;
74 BEGIN_NVC0(push
, NVC0_M2MF(OFFSET_IN_HIGH
), 2);
75 PUSH_DATAh(push
, src
->bo
->offset
+ src_ofst
);
76 PUSH_DATA (push
, src
->bo
->offset
+ src_ofst
);
78 BEGIN_NVC0(push
, NVC0_M2MF(OFFSET_OUT_HIGH
), 2);
79 PUSH_DATAh(push
, dst
->bo
->offset
+ dst_ofst
);
80 PUSH_DATA (push
, dst
->bo
->offset
+ dst_ofst
);
82 if (!(exec
& NVC0_M2MF_EXEC_LINEAR_IN
)) {
83 BEGIN_NVC0(push
, NVC0_M2MF(TILING_POSITION_IN_X
), 2);
84 PUSH_DATA (push
, src
->x
* cpp
);
87 src_ofst
+= line_count
* src
->pitch
;
89 if (!(exec
& NVC0_M2MF_EXEC_LINEAR_OUT
)) {
90 BEGIN_NVC0(push
, NVC0_M2MF(TILING_POSITION_OUT_X
), 2);
91 PUSH_DATA (push
, dst
->x
* cpp
);
94 dst_ofst
+= line_count
* dst
->pitch
;
97 BEGIN_NVC0(push
, NVC0_M2MF(LINE_LENGTH_IN
), 2);
98 PUSH_DATA (push
, nblocksx
* cpp
);
99 PUSH_DATA (push
, line_count
);
100 BEGIN_NVC0(push
, NVC0_M2MF(EXEC
), 1);
101 PUSH_DATA (push
, exec
);
103 height
-= line_count
;
108 nouveau_bufctx_reset(bctx
, 0);
112 nve4_m2mf_transfer_rect(struct nvc0_context
*nvc0
,
113 const struct nv50_m2mf_rect
*dst
,
114 const struct nv50_m2mf_rect
*src
,
115 uint32_t nblocksx
, uint32_t nblocksy
)
117 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
118 struct nouveau_bufctx
*bctx
= nvc0
->bufctx
;
120 uint32_t src_base
= src
->base
;
121 uint32_t dst_base
= dst
->base
;
122 const int cpp
= dst
->cpp
;
124 assert(dst
->cpp
== src
->cpp
);
126 nouveau_bufctx_refn(bctx
, 0, dst
->bo
, dst
->domain
| NOUVEAU_BO_WR
);
127 nouveau_bufctx_refn(bctx
, 0, src
->bo
, src
->domain
| NOUVEAU_BO_RD
);
128 nouveau_pushbuf_bufctx(push
, bctx
);
129 nouveau_pushbuf_validate(push
);
131 exec
= 0x200 /* 2D_ENABLE */ | 0x6 /* UNK */;
133 if (!nouveau_bo_memtype(dst
->bo
)) {
135 dst_base
+= dst
->y
* dst
->pitch
+ dst
->x
* cpp
;
136 exec
|= 0x100; /* DST_MODE_2D_LINEAR */
138 if (!nouveau_bo_memtype(src
->bo
)) {
140 src_base
+= src
->y
* src
->pitch
+ src
->x
* cpp
;
141 exec
|= 0x080; /* SRC_MODE_2D_LINEAR */
144 BEGIN_NVC0(push
, SUBC_COPY(0x070c), 6);
145 PUSH_DATA (push
, 0x1000 | dst
->tile_mode
);
146 PUSH_DATA (push
, dst
->pitch
);
147 PUSH_DATA (push
, dst
->height
);
148 PUSH_DATA (push
, dst
->depth
);
149 PUSH_DATA (push
, dst
->z
);
150 PUSH_DATA (push
, (dst
->y
<< 16) | (dst
->x
* cpp
));
152 BEGIN_NVC0(push
, SUBC_COPY(0x0728), 6);
153 PUSH_DATA (push
, 0x1000 | src
->tile_mode
);
154 PUSH_DATA (push
, src
->pitch
);
155 PUSH_DATA (push
, src
->height
);
156 PUSH_DATA (push
, src
->depth
);
157 PUSH_DATA (push
, src
->z
);
158 PUSH_DATA (push
, (src
->y
<< 16) | (src
->x
* cpp
));
160 BEGIN_NVC0(push
, SUBC_COPY(0x0400), 8);
161 PUSH_DATAh(push
, src
->bo
->offset
+ src_base
);
162 PUSH_DATA (push
, src
->bo
->offset
+ src_base
);
163 PUSH_DATAh(push
, dst
->bo
->offset
+ dst_base
);
164 PUSH_DATA (push
, dst
->bo
->offset
+ dst_base
);
165 PUSH_DATA (push
, src
->pitch
);
166 PUSH_DATA (push
, dst
->pitch
);
167 PUSH_DATA (push
, nblocksx
* cpp
);
168 PUSH_DATA (push
, nblocksy
);
170 BEGIN_NVC0(push
, SUBC_COPY(0x0300), 1);
171 PUSH_DATA (push
, exec
);
173 nouveau_bufctx_reset(bctx
, 0);
177 nvc0_m2mf_push_linear(struct nouveau_context
*nv
,
178 struct nouveau_bo
*dst
, unsigned offset
, unsigned domain
,
179 unsigned size
, const void *data
)
181 struct nvc0_context
*nvc0
= nvc0_context(&nv
->pipe
);
182 struct nouveau_pushbuf
*push
= nv
->pushbuf
;
183 uint32_t *src
= (uint32_t *)data
;
184 unsigned count
= (size
+ 3) / 4;
186 nouveau_bufctx_refn(nvc0
->bufctx
, 0, dst
, domain
| NOUVEAU_BO_WR
);
187 nouveau_pushbuf_bufctx(push
, nvc0
->bufctx
);
188 nouveau_pushbuf_validate(push
);
191 unsigned nr
= MIN2(count
, NV04_PFIFO_MAX_PACKET_LEN
);
193 if (!PUSH_SPACE(push
, nr
+ 9))
196 BEGIN_NVC0(push
, NVC0_M2MF(OFFSET_OUT_HIGH
), 2);
197 PUSH_DATAh(push
, dst
->offset
+ offset
);
198 PUSH_DATA (push
, dst
->offset
+ offset
);
199 BEGIN_NVC0(push
, NVC0_M2MF(LINE_LENGTH_IN
), 2);
200 PUSH_DATA (push
, MIN2(size
, nr
* 4));
202 BEGIN_NVC0(push
, NVC0_M2MF(EXEC
), 1);
203 PUSH_DATA (push
, 0x100111);
205 /* must not be interrupted (trap on QUERY fence, 0x50 works however) */
206 BEGIN_NIC0(push
, NVC0_M2MF(DATA
), nr
);
207 PUSH_DATAp(push
, src
, nr
);
215 nouveau_bufctx_reset(nvc0
->bufctx
, 0);
219 nve4_p2mf_push_linear(struct nouveau_context
*nv
,
220 struct nouveau_bo
*dst
, unsigned offset
, unsigned domain
,
221 unsigned size
, const void *data
)
223 struct nvc0_context
*nvc0
= nvc0_context(&nv
->pipe
);
224 struct nouveau_pushbuf
*push
= nv
->pushbuf
;
225 uint32_t *src
= (uint32_t *)data
;
226 unsigned count
= (size
+ 3) / 4;
228 nouveau_bufctx_refn(nvc0
->bufctx
, 0, dst
, domain
| NOUVEAU_BO_WR
);
229 nouveau_pushbuf_bufctx(push
, nvc0
->bufctx
);
230 nouveau_pushbuf_validate(push
);
233 unsigned nr
= MIN2(count
, (NV04_PFIFO_MAX_PACKET_LEN
- 1));
235 if (!PUSH_SPACE(push
, nr
+ 10))
238 BEGIN_NVC0(push
, NVE4_P2MF(UPLOAD_DST_ADDRESS_HIGH
), 2);
239 PUSH_DATAh(push
, dst
->offset
+ offset
);
240 PUSH_DATA (push
, dst
->offset
+ offset
);
241 BEGIN_NVC0(push
, NVE4_P2MF(UPLOAD_LINE_LENGTH_IN
), 2);
242 PUSH_DATA (push
, MIN2(size
, nr
* 4));
244 /* must not be interrupted (trap on QUERY fence, 0x50 works however) */
245 BEGIN_1IC0(push
, NVE4_P2MF(UPLOAD_EXEC
), nr
+ 1);
246 PUSH_DATA (push
, 0x1001);
247 PUSH_DATAp(push
, src
, nr
);
255 nouveau_bufctx_reset(nvc0
->bufctx
, 0);
259 nvc0_m2mf_copy_linear(struct nouveau_context
*nv
,
260 struct nouveau_bo
*dst
, unsigned dstoff
, unsigned dstdom
,
261 struct nouveau_bo
*src
, unsigned srcoff
, unsigned srcdom
,
264 struct nouveau_pushbuf
*push
= nv
->pushbuf
;
265 struct nouveau_bufctx
*bctx
= nvc0_context(&nv
->pipe
)->bufctx
;
267 nouveau_bufctx_refn(bctx
, 0, src
, srcdom
| NOUVEAU_BO_RD
);
268 nouveau_bufctx_refn(bctx
, 0, dst
, dstdom
| NOUVEAU_BO_WR
);
269 nouveau_pushbuf_bufctx(push
, bctx
);
270 nouveau_pushbuf_validate(push
);
273 unsigned bytes
= MIN2(size
, 1 << 17);
275 BEGIN_NVC0(push
, NVC0_M2MF(OFFSET_OUT_HIGH
), 2);
276 PUSH_DATAh(push
, dst
->offset
+ dstoff
);
277 PUSH_DATA (push
, dst
->offset
+ dstoff
);
278 BEGIN_NVC0(push
, NVC0_M2MF(OFFSET_IN_HIGH
), 2);
279 PUSH_DATAh(push
, src
->offset
+ srcoff
);
280 PUSH_DATA (push
, src
->offset
+ srcoff
);
281 BEGIN_NVC0(push
, NVC0_M2MF(LINE_LENGTH_IN
), 2);
282 PUSH_DATA (push
, bytes
);
284 BEGIN_NVC0(push
, NVC0_M2MF(EXEC
), 1);
285 PUSH_DATA (push
, NVC0_M2MF_EXEC_QUERY_SHORT
|
286 NVC0_M2MF_EXEC_LINEAR_IN
| NVC0_M2MF_EXEC_LINEAR_OUT
);
293 nouveau_bufctx_reset(bctx
, 0);
297 nve4_m2mf_copy_linear(struct nouveau_context
*nv
,
298 struct nouveau_bo
*dst
, unsigned dstoff
, unsigned dstdom
,
299 struct nouveau_bo
*src
, unsigned srcoff
, unsigned srcdom
,
302 struct nouveau_pushbuf
*push
= nv
->pushbuf
;
303 struct nouveau_bufctx
*bctx
= nvc0_context(&nv
->pipe
)->bufctx
;
305 nouveau_bufctx_refn(bctx
, 0, src
, srcdom
| NOUVEAU_BO_RD
);
306 nouveau_bufctx_refn(bctx
, 0, dst
, dstdom
| NOUVEAU_BO_WR
);
307 nouveau_pushbuf_bufctx(push
, bctx
);
308 nouveau_pushbuf_validate(push
);
310 BEGIN_NVC0(push
, SUBC_COPY(0x0400), 4);
311 PUSH_DATAh(push
, src
->offset
+ srcoff
);
312 PUSH_DATA (push
, src
->offset
+ srcoff
);
313 PUSH_DATAh(push
, dst
->offset
+ dstoff
);
314 PUSH_DATA (push
, dst
->offset
+ dstoff
);
315 BEGIN_NVC0(push
, SUBC_COPY(0x0418), 1);
316 PUSH_DATA (push
, size
);
317 BEGIN_NVC0(push
, SUBC_COPY(0x0300), 1);
318 PUSH_DATA (push
, 0x186);
320 nouveau_bufctx_reset(bctx
, 0);
325 nvc0_mt_transfer_can_map_directly(struct nv50_miptree
*mt
)
327 if (mt
->base
.domain
== NOUVEAU_BO_VRAM
)
329 if (mt
->base
.base
.usage
!= PIPE_USAGE_STAGING
)
331 return !nouveau_bo_memtype(mt
->base
.bo
);
335 nvc0_mt_sync(struct nvc0_context
*nvc0
, struct nv50_miptree
*mt
, unsigned usage
)
338 uint32_t access
= (usage
& PIPE_TRANSFER_WRITE
) ?
339 NOUVEAU_BO_WR
: NOUVEAU_BO_RD
;
340 return !nouveau_bo_wait(mt
->base
.bo
, access
, nvc0
->base
.client
);
342 if (usage
& PIPE_TRANSFER_WRITE
)
343 return !mt
->base
.fence
|| nouveau_fence_wait(mt
->base
.fence
);
344 return !mt
->base
.fence_wr
|| nouveau_fence_wait(mt
->base
.fence_wr
);
348 nvc0_miptree_transfer_map(struct pipe_context
*pctx
,
349 struct pipe_resource
*res
,
352 const struct pipe_box
*box
,
353 struct pipe_transfer
**ptransfer
)
355 struct nvc0_context
*nvc0
= nvc0_context(pctx
);
356 struct nouveau_device
*dev
= nvc0
->screen
->base
.device
;
357 struct nv50_miptree
*mt
= nv50_miptree(res
);
358 struct nvc0_transfer
*tx
;
363 if (nvc0_mt_transfer_can_map_directly(mt
)) {
364 ret
= !nvc0_mt_sync(nvc0
, mt
, usage
);
366 ret
= nouveau_bo_map(mt
->base
.bo
, 0, NULL
);
368 (usage
& PIPE_TRANSFER_MAP_DIRECTLY
))
371 usage
|= PIPE_TRANSFER_MAP_DIRECTLY
;
373 if (usage
& PIPE_TRANSFER_MAP_DIRECTLY
)
376 tx
= CALLOC_STRUCT(nvc0_transfer
);
380 pipe_resource_reference(&tx
->base
.resource
, res
);
382 tx
->base
.level
= level
;
383 tx
->base
.usage
= usage
;
386 if (util_format_is_plain(res
->format
)) {
387 tx
->nblocksx
= box
->width
<< mt
->ms_x
;
388 tx
->nblocksy
= box
->height
<< mt
->ms_y
;
390 tx
->nblocksx
= util_format_get_nblocksx(res
->format
, box
->width
);
391 tx
->nblocksy
= util_format_get_nblocksy(res
->format
, box
->height
);
393 tx
->nlayers
= box
->depth
;
395 tx
->base
.stride
= tx
->nblocksx
* util_format_get_blocksize(res
->format
);
396 tx
->base
.layer_stride
= tx
->nblocksy
* tx
->base
.stride
;
398 if (usage
& PIPE_TRANSFER_MAP_DIRECTLY
) {
399 tx
->base
.stride
= align(tx
->base
.stride
, 128);
400 *ptransfer
= &tx
->base
;
401 return mt
->base
.bo
->map
+ mt
->base
.offset
;
404 nv50_m2mf_rect_setup(&tx
->rect
[0], res
, level
, box
->x
, box
->y
, box
->z
);
406 size
= tx
->base
.layer_stride
;
408 ret
= nouveau_bo_new(dev
, NOUVEAU_BO_GART
| NOUVEAU_BO_MAP
, 0,
409 size
* tx
->nlayers
, NULL
, &tx
->rect
[1].bo
);
411 pipe_resource_reference(&tx
->base
.resource
, NULL
);
416 tx
->rect
[1].cpp
= tx
->rect
[0].cpp
;
417 tx
->rect
[1].width
= tx
->nblocksx
;
418 tx
->rect
[1].height
= tx
->nblocksy
;
419 tx
->rect
[1].depth
= 1;
420 tx
->rect
[1].pitch
= tx
->base
.stride
;
421 tx
->rect
[1].domain
= NOUVEAU_BO_GART
;
423 if (usage
& PIPE_TRANSFER_READ
) {
424 unsigned base
= tx
->rect
[0].base
;
425 unsigned z
= tx
->rect
[0].z
;
427 for (i
= 0; i
< tx
->nlayers
; ++i
) {
428 nvc0
->m2mf_copy_rect(nvc0
, &tx
->rect
[1], &tx
->rect
[0],
429 tx
->nblocksx
, tx
->nblocksy
);
433 tx
->rect
[0].base
+= mt
->layer_stride
;
434 tx
->rect
[1].base
+= size
;
437 tx
->rect
[0].base
= base
;
438 tx
->rect
[1].base
= 0;
441 if (tx
->rect
[1].bo
->map
) {
442 *ptransfer
= &tx
->base
;
443 return tx
->rect
[1].bo
->map
;
446 if (usage
& PIPE_TRANSFER_READ
)
447 flags
= NOUVEAU_BO_RD
;
448 if (usage
& PIPE_TRANSFER_WRITE
)
449 flags
|= NOUVEAU_BO_WR
;
451 ret
= nouveau_bo_map(tx
->rect
[1].bo
, flags
, nvc0
->screen
->base
.client
);
453 pipe_resource_reference(&tx
->base
.resource
, NULL
);
454 nouveau_bo_ref(NULL
, &tx
->rect
[1].bo
);
459 *ptransfer
= &tx
->base
;
460 return tx
->rect
[1].bo
->map
;
464 nvc0_miptree_transfer_unmap(struct pipe_context
*pctx
,
465 struct pipe_transfer
*transfer
)
467 struct nvc0_context
*nvc0
= nvc0_context(pctx
);
468 struct nvc0_transfer
*tx
= (struct nvc0_transfer
*)transfer
;
469 struct nv50_miptree
*mt
= nv50_miptree(tx
->base
.resource
);
472 if (tx
->base
.usage
& PIPE_TRANSFER_MAP_DIRECTLY
) {
473 pipe_resource_reference(&transfer
->resource
, NULL
);
479 if (tx
->base
.usage
& PIPE_TRANSFER_WRITE
) {
480 for (i
= 0; i
< tx
->nlayers
; ++i
) {
481 nvc0
->m2mf_copy_rect(nvc0
, &tx
->rect
[0], &tx
->rect
[1],
482 tx
->nblocksx
, tx
->nblocksy
);
486 tx
->rect
[0].base
+= mt
->layer_stride
;
487 tx
->rect
[1].base
+= tx
->nblocksy
* tx
->base
.stride
;
489 NOUVEAU_DRV_STAT(&nvc0
->screen
->base
, tex_transfers_wr
, 1);
491 /* Allow the copies above to finish executing before freeing the source */
492 nouveau_fence_work(nvc0
->screen
->base
.fence
.current
,
493 nouveau_fence_unref_bo
, tx
->rect
[1].bo
);
495 nouveau_bo_ref(NULL
, &tx
->rect
[1].bo
);
497 if (tx
->base
.usage
& PIPE_TRANSFER_READ
)
498 NOUVEAU_DRV_STAT(&nvc0
->screen
->base
, tex_transfers_rd
, 1);
500 pipe_resource_reference(&transfer
->resource
, NULL
);
505 /* This happens rather often with DTD9/st. */
507 nvc0_cb_push(struct nouveau_context
*nv
,
508 struct nv04_resource
*res
,
509 unsigned offset
, unsigned words
, const uint32_t *data
)
511 struct nvc0_context
*nvc0
= nvc0_context(&nv
->pipe
);
512 struct nvc0_constbuf
*cb
= NULL
;
515 /* Go through all the constbuf binding points of this buffer and try to
516 * find one which contains the region to be updated.
518 for (s
= 0; s
< 6 && !cb
; s
++) {
519 uint16_t bindings
= res
->cb_bindings
[s
];
521 int i
= ffs(bindings
) - 1;
522 uint32_t cb_offset
= nvc0
->constbuf
[s
][i
].offset
;
524 bindings
&= ~(1 << i
);
525 if (cb_offset
<= offset
&&
526 cb_offset
+ nvc0
->constbuf
[s
][i
].size
>= offset
+ words
* 4) {
527 cb
= &nvc0
->constbuf
[s
][i
];
534 nvc0_cb_bo_push(nv
, res
->bo
, res
->domain
,
535 res
->offset
+ cb
->offset
, cb
->size
,
536 offset
- cb
->offset
, words
, data
);
538 nv
->push_data(nv
, res
->bo
, res
->offset
+ offset
, res
->domain
,
544 nvc0_cb_bo_push(struct nouveau_context
*nv
,
545 struct nouveau_bo
*bo
, unsigned domain
,
546 unsigned base
, unsigned size
,
547 unsigned offset
, unsigned words
, const uint32_t *data
)
549 struct nouveau_pushbuf
*push
= nv
->pushbuf
;
551 NOUVEAU_DRV_STAT(nv
->screen
, constbuf_upload_count
, 1);
552 NOUVEAU_DRV_STAT(nv
->screen
, constbuf_upload_bytes
, words
* 4);
554 assert(!(offset
& 3));
555 size
= align(size
, 0x100);
557 assert(offset
< size
);
558 assert(offset
+ words
* 4 <= size
);
560 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
561 PUSH_DATA (push
, size
);
562 PUSH_DATAh(push
, bo
->offset
+ base
);
563 PUSH_DATA (push
, bo
->offset
+ base
);
566 unsigned nr
= MIN2(words
, NV04_PFIFO_MAX_PACKET_LEN
- 1);
568 PUSH_SPACE(push
, nr
+ 2);
569 PUSH_REFN (push
, bo
, NOUVEAU_BO_WR
| domain
);
570 BEGIN_1IC0(push
, NVC0_3D(CB_POS
), nr
+ 1);
571 PUSH_DATA (push
, offset
);
572 PUSH_DATAp(push
, data
, nr
);
581 nvc0_init_transfer_functions(struct nvc0_context
*nvc0
)
583 if (nvc0
->screen
->base
.class_3d
>= NVE4_3D_CLASS
) {
584 nvc0
->m2mf_copy_rect
= nve4_m2mf_transfer_rect
;
585 nvc0
->base
.copy_data
= nve4_m2mf_copy_linear
;
586 nvc0
->base
.push_data
= nve4_p2mf_push_linear
;
588 nvc0
->m2mf_copy_rect
= nvc0_m2mf_transfer_rect
;
589 nvc0
->base
.copy_data
= nvc0_m2mf_copy_linear
;
590 nvc0
->base
.push_data
= nvc0_m2mf_push_linear
;
592 nvc0
->base
.push_cb
= nvc0_cb_push
;