tu: Implement fallback linear staging blit for CopyImage
[mesa.git] / src / gallium / state_trackers / nine / surface9.c
1 /*
2 * Copyright 2011 Joakim Sindholt <opensource@zhasha.com>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE. */
22
23 #include "iunknown.h"
24 #include "surface9.h"
25 #include "device9.h"
26
27 /* for marking dirty */
28 #include "basetexture9.h"
29 #include "texture9.h"
30 #include "cubetexture9.h"
31
32 #include "nine_helpers.h"
33 #include "nine_pipe.h"
34 #include "nine_dump.h"
35 #include "nine_state.h"
36
37 #include "pipe/p_context.h"
38 #include "pipe/p_screen.h"
39 #include "pipe/p_state.h"
40
41 #include "util/u_math.h"
42 #include "util/u_inlines.h"
43 #include "util/u_surface.h"
44
45 #define DBG_CHANNEL DBG_SURFACE
46
47 static void
48 NineSurface9_CreatePipeSurfaces( struct NineSurface9 *This );
49
50 HRESULT
51 NineSurface9_ctor( struct NineSurface9 *This,
52 struct NineUnknownParams *pParams,
53 struct NineUnknown *pContainer,
54 struct pipe_resource *pResource,
55 void *user_buffer,
56 uint8_t TextureType,
57 unsigned Level,
58 unsigned Layer,
59 D3DSURFACE_DESC *pDesc )
60 {
61 HRESULT hr;
62 bool allocate = !pContainer && pDesc->Format != D3DFMT_NULL;
63 D3DMULTISAMPLE_TYPE multisample_type;
64
65 DBG("This=%p pDevice=%p pResource=%p Level=%u Layer=%u pDesc=%p\n",
66 This, pParams->device, pResource, Level, Layer, pDesc);
67
68 /* Mark this as a special surface held by another internal resource. */
69 pParams->container = pContainer;
70 /* Make sure there's a Desc */
71 assert(pDesc);
72
73 assert(allocate || pResource || user_buffer ||
74 pDesc->Format == D3DFMT_NULL);
75 assert(!allocate || (!pResource && !user_buffer));
76 assert(!pResource || !user_buffer);
77 assert(!user_buffer || pDesc->Pool != D3DPOOL_DEFAULT);
78 assert(!pResource || pDesc->Pool == D3DPOOL_DEFAULT);
79 /* Allocation only from create_zs_or_rt_surface with params 0 0 0 */
80 assert(!allocate || (Level == 0 && Layer == 0 && TextureType == 0));
81
82 This->data = (uint8_t *)user_buffer;
83
84 multisample_type = pDesc->MultiSampleType;
85
86 /* Map MultiSampleQuality to MultiSampleType */
87 hr = d3dmultisample_type_check(pParams->device->screen,
88 pDesc->Format,
89 &multisample_type,
90 pDesc->MultiSampleQuality,
91 NULL);
92 if (FAILED(hr)) {
93 return hr;
94 }
95
96 /* TODO: this is (except width and height) duplicate from
97 * container info (in the pContainer case). Some refactoring is
98 * needed to avoid duplication */
99 This->base.info.screen = pParams->device->screen;
100 This->base.info.target = PIPE_TEXTURE_2D;
101 This->base.info.width0 = pDesc->Width;
102 This->base.info.height0 = pDesc->Height;
103 This->base.info.depth0 = 1;
104 This->base.info.last_level = 0;
105 This->base.info.array_size = 1;
106 This->base.info.nr_samples = multisample_type;
107 This->base.info.nr_storage_samples = multisample_type;
108 This->base.info.usage = PIPE_USAGE_DEFAULT;
109 This->base.info.bind = PIPE_BIND_SAMPLER_VIEW; /* StretchRect */
110
111 if (pDesc->Usage & D3DUSAGE_RENDERTARGET) {
112 This->base.info.bind |= PIPE_BIND_RENDER_TARGET;
113 } else if (pDesc->Usage & D3DUSAGE_DEPTHSTENCIL) {
114 if (!depth_stencil_format(pDesc->Format))
115 return D3DERR_INVALIDCALL;
116 This->base.info.bind = d3d9_get_pipe_depth_format_bindings(pDesc->Format);
117 if (TextureType)
118 This->base.info.bind |= PIPE_BIND_SAMPLER_VIEW;
119 }
120
121 This->base.info.flags = 0;
122 This->base.info.format = d3d9_to_pipe_format_checked(This->base.info.screen,
123 pDesc->Format,
124 This->base.info.target,
125 This->base.info.nr_samples,
126 This->base.info.bind,
127 FALSE,
128 pDesc->Pool == D3DPOOL_SCRATCH);
129
130 if (This->base.info.format == PIPE_FORMAT_NONE && pDesc->Format != D3DFMT_NULL)
131 return D3DERR_INVALIDCALL;
132
133 if (allocate && compressed_format(pDesc->Format)) {
134 const unsigned w = util_format_get_blockwidth(This->base.info.format);
135 const unsigned h = util_format_get_blockheight(This->base.info.format);
136
137 /* Note: In the !allocate case, the test could fail (lower levels of a texture) */
138 user_assert(!(pDesc->Width % w) && !(pDesc->Height % h), D3DERR_INVALIDCALL);
139 }
140
141 /* Get true format */
142 This->format_internal = d3d9_to_pipe_format_checked(This->base.info.screen,
143 pDesc->Format,
144 This->base.info.target,
145 This->base.info.nr_samples,
146 This->base.info.bind,
147 FALSE,
148 TRUE);
149 if (This->base.info.format != This->format_internal ||
150 /* DYNAMIC Textures requires same stride as ram buffers.
151 * Do not use workaround by default as it eats more virtual space */
152 (pParams->device->workarounds.dynamic_texture_workaround &&
153 pDesc->Pool == D3DPOOL_DEFAULT && pDesc->Usage & D3DUSAGE_DYNAMIC)) {
154 This->data_internal = align_calloc(
155 nine_format_get_level_alloc_size(This->format_internal,
156 pDesc->Width,
157 pDesc->Height,
158 0), 32);
159 if (!This->data_internal)
160 return E_OUTOFMEMORY;
161 This->stride_internal = nine_format_get_stride(This->format_internal,
162 pDesc->Width);
163 }
164
165 if ((allocate && pDesc->Pool != D3DPOOL_DEFAULT) || pDesc->Format == D3DFMT_NULL) {
166 /* Ram buffer with no parent. Has to allocate the resource itself */
167 assert(!user_buffer);
168 This->data = align_calloc(
169 nine_format_get_level_alloc_size(This->base.info.format,
170 pDesc->Width,
171 pDesc->Height,
172 0), 32);
173 if (!This->data)
174 return E_OUTOFMEMORY;
175 }
176
177 hr = NineResource9_ctor(&This->base, pParams, pResource,
178 allocate && (pDesc->Pool == D3DPOOL_DEFAULT),
179 D3DRTYPE_SURFACE, pDesc->Pool, pDesc->Usage);
180
181 if (FAILED(hr))
182 return hr;
183
184 This->transfer = NULL;
185
186 This->texture = TextureType;
187 This->level = Level;
188 This->level_actual = Level;
189 This->layer = Layer;
190 This->desc = *pDesc;
191
192 This->stride = nine_format_get_stride(This->base.info.format, pDesc->Width);
193
194 if (This->base.resource && (pDesc->Usage & D3DUSAGE_DYNAMIC))
195 This->base.resource->flags |= NINE_RESOURCE_FLAG_LOCKABLE;
196
197 if (This->base.resource && (pDesc->Usage & (D3DUSAGE_RENDERTARGET | D3DUSAGE_DEPTHSTENCIL)))
198 NineSurface9_CreatePipeSurfaces(This);
199
200 /* TODO: investigate what else exactly needs to be cleared */
201 if (This->base.resource && (pDesc->Usage & D3DUSAGE_RENDERTARGET))
202 nine_context_clear_render_target(pParams->device, This, 0, 0, 0, pDesc->Width, pDesc->Height);
203
204 NineSurface9_Dump(This);
205
206 return D3D_OK;
207 }
208
209 void
210 NineSurface9_dtor( struct NineSurface9 *This )
211 {
212 DBG("This=%p\n", This);
213
214 if (This->transfer) {
215 struct pipe_context *pipe = nine_context_get_pipe_multithread(This->base.base.device);
216 pipe->transfer_unmap(pipe, This->transfer);
217 This->transfer = NULL;
218 }
219
220 /* Note: Following condition cannot happen currently, since we
221 * refcount the surface in the functions increasing
222 * pending_uploads_counter. */
223 if (p_atomic_read(&This->pending_uploads_counter))
224 nine_csmt_process(This->base.base.device);
225
226 pipe_surface_reference(&This->surface[0], NULL);
227 pipe_surface_reference(&This->surface[1], NULL);
228
229 /* Release system memory when we have to manage it (no parent) */
230 if (!This->base.base.container && This->data)
231 align_free(This->data);
232 if (This->data_internal)
233 align_free(This->data_internal);
234 NineResource9_dtor(&This->base);
235 }
236
237 static void
238 NineSurface9_CreatePipeSurfaces( struct NineSurface9 *This )
239 {
240 struct pipe_context *pipe;
241 struct pipe_screen *screen = NineDevice9_GetScreen(This->base.base.device);
242 struct pipe_resource *resource = This->base.resource;
243 struct pipe_surface templ;
244 enum pipe_format srgb_format;
245
246 assert(This->desc.Pool == D3DPOOL_DEFAULT);
247 assert(resource);
248
249 srgb_format = util_format_srgb(resource->format);
250 if (srgb_format == PIPE_FORMAT_NONE ||
251 !screen->is_format_supported(screen, srgb_format,
252 resource->target, 0, 0, resource->bind))
253 srgb_format = resource->format;
254
255 memset(&templ, 0, sizeof(templ));
256 templ.format = resource->format;
257 templ.u.tex.level = This->level;
258 templ.u.tex.first_layer = This->layer;
259 templ.u.tex.last_layer = This->layer;
260
261 pipe = nine_context_get_pipe_acquire(This->base.base.device);
262
263 This->surface[0] = pipe->create_surface(pipe, resource, &templ);
264
265 memset(&templ, 0, sizeof(templ));
266 templ.format = srgb_format;
267 templ.u.tex.level = This->level;
268 templ.u.tex.first_layer = This->layer;
269 templ.u.tex.last_layer = This->layer;
270
271 This->surface[1] = pipe->create_surface(pipe, resource, &templ);
272
273 nine_context_get_pipe_release(This->base.base.device);
274
275 assert(This->surface[0]); /* TODO: Handle failure */
276 assert(This->surface[1]);
277 }
278
279 #if defined(DEBUG) || !defined(NDEBUG)
280 void
281 NineSurface9_Dump( struct NineSurface9 *This )
282 {
283 struct NineBaseTexture9 *tex;
284 GUID id = IID_IDirect3DBaseTexture9;
285 REFIID ref = &id;
286
287 DBG("\nNineSurface9(%p->%p/%p): Pool=%s Type=%s Usage=%s\n"
288 "Dims=%ux%u Format=%s Stride=%u Lockable=%i\n"
289 "Level=%u(%u), Layer=%u\n", This, This->base.resource, This->data,
290 nine_D3DPOOL_to_str(This->desc.Pool),
291 nine_D3DRTYPE_to_str(This->desc.Type),
292 nine_D3DUSAGE_to_str(This->desc.Usage),
293 This->desc.Width, This->desc.Height,
294 d3dformat_to_string(This->desc.Format), This->stride,
295 This->base.resource &&
296 (This->base.resource->flags & NINE_RESOURCE_FLAG_LOCKABLE),
297 This->level, This->level_actual, This->layer);
298
299 if (!This->base.base.container)
300 return;
301 NineUnknown_QueryInterface(This->base.base.container, ref, (void **)&tex);
302 if (tex) {
303 NineBaseTexture9_Dump(tex);
304 NineUnknown_Release(NineUnknown(tex));
305 }
306 }
307 #endif /* DEBUG || !NDEBUG */
308
309 HRESULT NINE_WINAPI
310 NineSurface9_GetContainer( struct NineSurface9 *This,
311 REFIID riid,
312 void **ppContainer )
313 {
314 HRESULT hr;
315 char guid_str[64];
316
317 DBG("This=%p riid=%p id=%s ppContainer=%p\n",
318 This, riid, riid ? GUID_sprintf(guid_str, riid) : "", ppContainer);
319
320 (void)guid_str;
321
322 if (!ppContainer) return E_POINTER;
323
324 /* Return device for OffscreenPlainSurface, DepthStencilSurface and RenderTarget */
325 if (!NineUnknown(This)->container) {
326 *ppContainer = NineUnknown(This)->device;
327 NineUnknown_AddRef(NineUnknown(*ppContainer));
328
329 return D3D_OK;
330 }
331
332 hr = NineUnknown_QueryInterface(NineUnknown(This)->container, riid, ppContainer);
333 if (FAILED(hr))
334 DBG("QueryInterface FAILED!\n");
335 return hr;
336 }
337
338 void
339 NineSurface9_MarkContainerDirty( struct NineSurface9 *This )
340 {
341 if (This->texture) {
342 struct NineBaseTexture9 *tex =
343 NineBaseTexture9(This->base.base.container);
344 assert(tex);
345 assert(This->texture == D3DRTYPE_TEXTURE ||
346 This->texture == D3DRTYPE_CUBETEXTURE);
347 if (This->base.pool == D3DPOOL_MANAGED)
348 tex->managed.dirty = TRUE;
349 else
350 if (This->base.usage & D3DUSAGE_AUTOGENMIPMAP)
351 tex->dirty_mip = TRUE;
352
353 BASETEX_REGISTER_UPDATE(tex);
354 }
355 }
356
357 HRESULT NINE_WINAPI
358 NineSurface9_GetDesc( struct NineSurface9 *This,
359 D3DSURFACE_DESC *pDesc )
360 {
361 user_assert(pDesc != NULL, E_POINTER);
362 *pDesc = This->desc;
363 return D3D_OK;
364 }
365
366 /* Add the dirty rects to the source texture */
367 inline void
368 NineSurface9_AddDirtyRect( struct NineSurface9 *This,
369 const struct pipe_box *box )
370 {
371 RECT dirty_rect;
372
373 DBG("This=%p box=%p\n", This, box);
374
375 assert (This->base.pool != D3DPOOL_MANAGED ||
376 This->texture == D3DRTYPE_CUBETEXTURE ||
377 This->texture == D3DRTYPE_TEXTURE);
378
379 if (This->base.pool == D3DPOOL_DEFAULT)
380 return;
381
382 /* Add a dirty rect to level 0 of the parent texture */
383 dirty_rect.left = box->x << This->level_actual;
384 dirty_rect.right = dirty_rect.left + (box->width << This->level_actual);
385 dirty_rect.top = box->y << This->level_actual;
386 dirty_rect.bottom = dirty_rect.top + (box->height << This->level_actual);
387
388 if (This->texture == D3DRTYPE_TEXTURE) {
389 struct NineTexture9 *tex =
390 NineTexture9(This->base.base.container);
391
392 NineTexture9_AddDirtyRect(tex, &dirty_rect);
393 } else if (This->texture == D3DRTYPE_CUBETEXTURE) {
394 struct NineCubeTexture9 *ctex =
395 NineCubeTexture9(This->base.base.container);
396
397 NineCubeTexture9_AddDirtyRect(ctex, This->layer, &dirty_rect);
398 }
399 }
400
401 static inline unsigned
402 NineSurface9_GetSystemMemOffset(enum pipe_format format, unsigned stride,
403 int x, int y)
404 {
405 unsigned x_offset = util_format_get_stride(format, x);
406
407 y = util_format_get_nblocksy(format, y);
408
409 return y * stride + x_offset;
410 }
411
412 HRESULT NINE_WINAPI
413 NineSurface9_LockRect( struct NineSurface9 *This,
414 D3DLOCKED_RECT *pLockedRect,
415 const RECT *pRect,
416 DWORD Flags )
417 {
418 struct pipe_resource *resource = This->base.resource;
419 struct pipe_context *pipe;
420 struct pipe_box box;
421 unsigned usage;
422
423 DBG("This=%p pLockedRect=%p pRect=%p[%u..%u,%u..%u] Flags=%s\n", This,
424 pLockedRect, pRect,
425 pRect ? pRect->left : 0, pRect ? pRect->right : 0,
426 pRect ? pRect->top : 0, pRect ? pRect->bottom : 0,
427 nine_D3DLOCK_to_str(Flags));
428 NineSurface9_Dump(This);
429
430 /* check if it's already locked */
431 user_assert(This->lock_count == 0, D3DERR_INVALIDCALL);
432
433 /* set pBits to NULL after lock_count check */
434 user_assert(pLockedRect, E_POINTER);
435 pLockedRect->pBits = NULL;
436
437 #ifdef NINE_STRICT
438 user_assert(This->base.pool != D3DPOOL_DEFAULT ||
439 (resource && (resource->flags & NINE_RESOURCE_FLAG_LOCKABLE)),
440 D3DERR_INVALIDCALL);
441 #endif
442 user_assert(!(Flags & ~(D3DLOCK_DISCARD |
443 D3DLOCK_DONOTWAIT |
444 D3DLOCK_NO_DIRTY_UPDATE |
445 D3DLOCK_NOOVERWRITE |
446 D3DLOCK_NOSYSLOCK | /* ignored */
447 D3DLOCK_READONLY)), D3DERR_INVALIDCALL);
448 user_assert(!((Flags & D3DLOCK_DISCARD) && (Flags & D3DLOCK_READONLY)),
449 D3DERR_INVALIDCALL);
450
451 user_assert(This->desc.MultiSampleType == D3DMULTISAMPLE_NONE,
452 D3DERR_INVALIDCALL);
453
454 if (pRect && This->desc.Pool == D3DPOOL_DEFAULT &&
455 util_format_is_compressed(This->base.info.format)) {
456 const unsigned w = util_format_get_blockwidth(This->base.info.format);
457 const unsigned h = util_format_get_blockheight(This->base.info.format);
458 user_assert((pRect->left == 0 && pRect->right == This->desc.Width &&
459 pRect->top == 0 && pRect->bottom == This->desc.Height) ||
460 (!(pRect->left % w) && !(pRect->right % w) &&
461 !(pRect->top % h) && !(pRect->bottom % h)),
462 D3DERR_INVALIDCALL);
463 }
464
465 if (Flags & D3DLOCK_DISCARD) {
466 usage = PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD_RANGE;
467 } else {
468 usage = (Flags & D3DLOCK_READONLY) ?
469 PIPE_TRANSFER_READ : PIPE_TRANSFER_READ_WRITE;
470 }
471 if (Flags & D3DLOCK_DONOTWAIT)
472 usage |= PIPE_TRANSFER_DONTBLOCK;
473
474 if (pRect) {
475 /* Windows XP accepts invalid locking rectangles, Windows 7 rejects
476 * them. Use Windows XP behaviour for now. */
477 rect_to_pipe_box(&box, pRect);
478 } else {
479 u_box_origin_2d(This->desc.Width, This->desc.Height, &box);
480 }
481 box.z = This->layer;
482
483 user_warn(This->desc.Format == D3DFMT_NULL);
484
485 if (p_atomic_read(&This->pending_uploads_counter))
486 nine_csmt_process(This->base.base.device);
487
488 if (This->data_internal || This->data) {
489 enum pipe_format format = This->base.info.format;
490 unsigned stride = This->stride;
491 uint8_t *data = This->data;
492 if (This->data_internal) {
493 format = This->format_internal;
494 stride = This->stride_internal;
495 data = This->data_internal;
496 }
497 DBG("returning system memory\n");
498 /* ATI1 and ATI2 need special handling, because of d3d9 bug.
499 * We must advertise to the application as if it is uncompressed
500 * and bpp 8, and the app has a workaround to work with the fact
501 * that it is actually compressed. */
502 if (is_ATI1_ATI2(format)) {
503 pLockedRect->Pitch = This->desc.Width;
504 pLockedRect->pBits = data + box.y * This->desc.Width + box.x;
505 } else {
506 pLockedRect->Pitch = stride;
507 pLockedRect->pBits = data +
508 NineSurface9_GetSystemMemOffset(format,
509 stride,
510 box.x,
511 box.y);
512 }
513 } else {
514 bool no_refs = !p_atomic_read(&This->base.base.bind) &&
515 !(This->base.base.container && p_atomic_read(&This->base.base.container->bind));
516 DBG("mapping pipe_resource %p (level=%u usage=%x)\n",
517 resource, This->level, usage);
518
519 /* if the object is not bound internally, there can't be any pending
520 * operation with the surface in the queue */
521 if (no_refs)
522 pipe = nine_context_get_pipe_acquire(This->base.base.device);
523 else
524 pipe = NineDevice9_GetPipe(This->base.base.device);
525 pLockedRect->pBits = pipe->transfer_map(pipe, resource,
526 This->level, usage, &box,
527 &This->transfer);
528 if (no_refs)
529 nine_context_get_pipe_release(This->base.base.device);
530 if (!This->transfer) {
531 DBG("transfer_map failed\n");
532 if (Flags & D3DLOCK_DONOTWAIT)
533 return D3DERR_WASSTILLDRAWING;
534 return D3DERR_INVALIDCALL;
535 }
536 pLockedRect->Pitch = This->transfer->stride;
537 }
538
539 if (!(Flags & (D3DLOCK_NO_DIRTY_UPDATE | D3DLOCK_READONLY))) {
540 NineSurface9_MarkContainerDirty(This);
541 NineSurface9_AddDirtyRect(This, &box);
542 }
543
544 ++This->lock_count;
545 return D3D_OK;
546 }
547
548 HRESULT NINE_WINAPI
549 NineSurface9_UnlockRect( struct NineSurface9 *This )
550 {
551 struct pipe_box dst_box, src_box;
552 struct pipe_context *pipe;
553 DBG("This=%p lock_count=%u\n", This, This->lock_count);
554 user_assert(This->lock_count, D3DERR_INVALIDCALL);
555 if (This->transfer) {
556 pipe = nine_context_get_pipe_acquire(This->base.base.device);
557 pipe->transfer_unmap(pipe, This->transfer);
558 nine_context_get_pipe_release(This->base.base.device);
559 This->transfer = NULL;
560 }
561 --This->lock_count;
562
563 if (This->data_internal) {
564 if (This->data) {
565 (void) util_format_translate(This->base.info.format,
566 This->data, This->stride,
567 0, 0,
568 This->format_internal,
569 This->data_internal,
570 This->stride_internal,
571 0, 0,
572 This->desc.Width, This->desc.Height);
573 } else {
574 u_box_2d_zslice(0, 0, This->layer,
575 This->desc.Width, This->desc.Height, &dst_box);
576 u_box_2d_zslice(0, 0, 0,
577 This->desc.Width, This->desc.Height, &src_box);
578
579 nine_context_box_upload(This->base.base.device,
580 &This->pending_uploads_counter,
581 (struct NineUnknown *)This,
582 This->base.resource,
583 This->level,
584 &dst_box,
585 This->format_internal,
586 This->data_internal,
587 This->stride_internal,
588 0, /* depth = 1 */
589 &src_box);
590 }
591 }
592 return D3D_OK;
593 }
594
595 HRESULT NINE_WINAPI
596 NineSurface9_GetDC( struct NineSurface9 *This,
597 HDC *phdc )
598 {
599 STUB(D3DERR_INVALIDCALL);
600 }
601
602 HRESULT NINE_WINAPI
603 NineSurface9_ReleaseDC( struct NineSurface9 *This,
604 HDC hdc )
605 {
606 STUB(D3DERR_INVALIDCALL);
607 }
608
609 IDirect3DSurface9Vtbl NineSurface9_vtable = {
610 (void *)NineUnknown_QueryInterface,
611 (void *)NineUnknown_AddRef,
612 (void *)NineUnknown_Release,
613 (void *)NineUnknown_GetDevice, /* actually part of Resource9 iface */
614 (void *)NineUnknown_SetPrivateData,
615 (void *)NineUnknown_GetPrivateData,
616 (void *)NineUnknown_FreePrivateData,
617 (void *)NineResource9_SetPriority,
618 (void *)NineResource9_GetPriority,
619 (void *)NineResource9_PreLoad,
620 (void *)NineResource9_GetType,
621 (void *)NineSurface9_GetContainer,
622 (void *)NineSurface9_GetDesc,
623 (void *)NineSurface9_LockRect,
624 (void *)NineSurface9_UnlockRect,
625 (void *)NineSurface9_GetDC,
626 (void *)NineSurface9_ReleaseDC
627 };
628
629 /* When this function is called, we have already checked
630 * The copy regions fit the surfaces */
631 void
632 NineSurface9_CopyMemToDefault( struct NineSurface9 *This,
633 struct NineSurface9 *From,
634 const POINT *pDestPoint,
635 const RECT *pSourceRect )
636 {
637 struct pipe_resource *r_dst = This->base.resource;
638 struct pipe_box dst_box, src_box;
639 int src_x, src_y, dst_x, dst_y, copy_width, copy_height;
640
641 assert(This->base.pool == D3DPOOL_DEFAULT &&
642 From->base.pool == D3DPOOL_SYSTEMMEM);
643
644 if (pDestPoint) {
645 dst_x = pDestPoint->x;
646 dst_y = pDestPoint->y;
647 } else {
648 dst_x = 0;
649 dst_y = 0;
650 }
651
652 if (pSourceRect) {
653 src_x = pSourceRect->left;
654 src_y = pSourceRect->top;
655 copy_width = pSourceRect->right - pSourceRect->left;
656 copy_height = pSourceRect->bottom - pSourceRect->top;
657 } else {
658 src_x = 0;
659 src_y = 0;
660 copy_width = From->desc.Width;
661 copy_height = From->desc.Height;
662 }
663
664 u_box_2d_zslice(dst_x, dst_y, This->layer,
665 copy_width, copy_height, &dst_box);
666 u_box_2d_zslice(src_x, src_y, 0,
667 copy_width, copy_height, &src_box);
668
669 nine_context_box_upload(This->base.base.device,
670 &From->pending_uploads_counter,
671 (struct NineUnknown *)From,
672 r_dst,
673 This->level,
674 &dst_box,
675 From->base.info.format,
676 From->data, From->stride,
677 0, /* depth = 1 */
678 &src_box);
679 if (From->texture == D3DRTYPE_TEXTURE) {
680 struct NineTexture9 *tex =
681 NineTexture9(From->base.base.container);
682 /* D3DPOOL_SYSTEMMEM with buffer content passed
683 * from the user: execute the upload right now.
684 * It is possible it is enough to delay upload
685 * until the surface refcount is 0, but the
686 * bind refcount may not be 0, and thus the dtor
687 * is not executed (and doesn't trigger the
688 * pending_uploads_counter check). */
689 if (!tex->managed_buffer)
690 nine_csmt_process(This->base.base.device);
691 }
692
693 if (This->data_internal)
694 (void) util_format_translate(This->format_internal,
695 This->data_internal,
696 This->stride_internal,
697 dst_x, dst_y,
698 From->base.info.format,
699 From->data, From->stride,
700 src_x, src_y,
701 copy_width, copy_height);
702
703 NineSurface9_MarkContainerDirty(This);
704 }
705
706 void
707 NineSurface9_CopyDefaultToMem( struct NineSurface9 *This,
708 struct NineSurface9 *From )
709 {
710 struct pipe_context *pipe;
711 struct pipe_resource *r_src = From->base.resource;
712 struct pipe_transfer *transfer;
713 struct pipe_box src_box;
714 uint8_t *p_dst;
715 const uint8_t *p_src;
716
717 assert(This->base.pool == D3DPOOL_SYSTEMMEM &&
718 From->base.pool == D3DPOOL_DEFAULT);
719
720 assert(This->desc.Width == From->desc.Width);
721 assert(This->desc.Height == From->desc.Height);
722
723 u_box_origin_2d(This->desc.Width, This->desc.Height, &src_box);
724 src_box.z = From->layer;
725
726 if (p_atomic_read(&This->pending_uploads_counter))
727 nine_csmt_process(This->base.base.device);
728
729 pipe = NineDevice9_GetPipe(This->base.base.device);
730 p_src = pipe->transfer_map(pipe, r_src, From->level,
731 PIPE_TRANSFER_READ,
732 &src_box, &transfer);
733 p_dst = This->data;
734
735 assert (p_src && p_dst);
736
737 util_copy_rect(p_dst, This->base.info.format,
738 This->stride, 0, 0,
739 This->desc.Width, This->desc.Height,
740 p_src,
741 transfer->stride, 0, 0);
742
743 pipe->transfer_unmap(pipe, transfer);
744 }
745
746
747 /* Gladly, rendering to a MANAGED surface is not permitted, so we will
748 * never have to do the reverse, i.e. download the surface.
749 */
750 HRESULT
751 NineSurface9_UploadSelf( struct NineSurface9 *This,
752 const struct pipe_box *damaged )
753 {
754 struct pipe_resource *res = This->base.resource;
755 struct pipe_box box;
756
757 DBG("This=%p damaged=%p\n", This, damaged);
758
759 assert(This->base.pool == D3DPOOL_MANAGED);
760
761 if (damaged) {
762 box = *damaged;
763 box.z = This->layer;
764 box.depth = 1;
765 } else {
766 box.x = 0;
767 box.y = 0;
768 box.z = This->layer;
769 box.width = This->desc.Width;
770 box.height = This->desc.Height;
771 box.depth = 1;
772 }
773
774 nine_context_box_upload(This->base.base.device,
775 &This->pending_uploads_counter,
776 (struct NineUnknown *)This,
777 res,
778 This->level,
779 &box,
780 res->format,
781 This->data, This->stride,
782 0, /* depth = 1 */
783 &box);
784
785 return D3D_OK;
786 }
787
788 /* Currently nine_context uses the NineSurface9
789 * fields when it is render target. Any modification requires
790 * pending commands with the surface to be executed. If the bind
791 * count is 0, there is no pending commands. */
792 #define PROCESS_IF_BOUND(surf) \
793 if (surf->base.base.bind) \
794 nine_csmt_process(surf->base.base.device);
795
796 void
797 NineSurface9_SetResource( struct NineSurface9 *This,
798 struct pipe_resource *resource, unsigned level )
799 {
800 /* No need to call PROCESS_IF_BOUND, because SetResource is used only
801 * for MANAGED textures, and they are not render targets. */
802 assert(This->base.pool == D3DPOOL_MANAGED);
803 This->level = level;
804 pipe_resource_reference(&This->base.resource, resource);
805 }
806
807 void
808 NineSurface9_SetMultiSampleType( struct NineSurface9 *This,
809 D3DMULTISAMPLE_TYPE mst )
810 {
811 PROCESS_IF_BOUND(This);
812 This->desc.MultiSampleType = mst;
813 }
814
815 void
816 NineSurface9_SetResourceResize( struct NineSurface9 *This,
817 struct pipe_resource *resource )
818 {
819 assert(This->level == 0 && This->level_actual == 0);
820 assert(!This->lock_count);
821 assert(This->desc.Pool == D3DPOOL_DEFAULT);
822 assert(!This->texture);
823
824 PROCESS_IF_BOUND(This);
825 pipe_resource_reference(&This->base.resource, resource);
826
827 This->desc.Width = This->base.info.width0 = resource->width0;
828 This->desc.Height = This->base.info.height0 = resource->height0;
829 This->base.info.nr_samples = resource->nr_samples;
830 This->base.info.nr_storage_samples = resource->nr_storage_samples;
831
832 This->stride = nine_format_get_stride(This->base.info.format,
833 This->desc.Width);
834
835 pipe_surface_reference(&This->surface[0], NULL);
836 pipe_surface_reference(&This->surface[1], NULL);
837 if (resource)
838 NineSurface9_CreatePipeSurfaces(This);
839 }
840
841
842 static const GUID *NineSurface9_IIDs[] = {
843 &IID_IDirect3DSurface9,
844 &IID_IDirect3DResource9,
845 &IID_IUnknown,
846 NULL
847 };
848
849 HRESULT
850 NineSurface9_new( struct NineDevice9 *pDevice,
851 struct NineUnknown *pContainer,
852 struct pipe_resource *pResource,
853 void *user_buffer,
854 uint8_t TextureType,
855 unsigned Level,
856 unsigned Layer,
857 D3DSURFACE_DESC *pDesc,
858 struct NineSurface9 **ppOut )
859 {
860 NINE_DEVICE_CHILD_NEW(Surface9, ppOut, pDevice, /* args */
861 pContainer, pResource, user_buffer,
862 TextureType, Level, Layer, pDesc);
863 }