nv50,nvc0: update the format tables
[mesa.git] / src / gallium / drivers / nvc0 / nvc0_surface.c
1 /*
2 * Copyright 2008 Ben Skeggs
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
18 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
19 * OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 * SOFTWARE.
21 */
22
23 #include <stdint.h>
24
25 #include "pipe/p_defines.h"
26
27 #include "util/u_inlines.h"
28 #include "util/u_pack_color.h"
29 #include "util/u_format.h"
30
31 #include "nvc0_context.h"
32 #include "nvc0_resource.h"
33 #include "nvc0_transfer.h"
34
35 #include "nv50/nv50_defs.xml.h"
36
37 #define NVC0_ENG2D_SUPPORTED_FORMATS 0xff9ccfe1cce3ccc9ULL
38
39 /* return TRUE for formats that can be converted among each other by NVC0_2D */
40 static INLINE boolean
41 nvc0_2d_format_faithful(enum pipe_format format)
42 {
43 uint8_t id = nvc0_format_table[format].rt;
44
45 return (id >= 0xc0) && (NVC0_ENG2D_SUPPORTED_FORMATS & (1ULL << (id - 0xc0)));
46 }
47
48 static INLINE uint8_t
49 nvc0_2d_format(enum pipe_format format)
50 {
51 uint8_t id = nvc0_format_table[format].rt;
52
53 /* Hardware values for color formats range from 0xc0 to 0xff,
54 * but the 2D engine doesn't support all of them.
55 */
56 if (nvc0_2d_format_faithful(format))
57 return id;
58
59 switch (util_format_get_blocksize(format)) {
60 case 1:
61 return NV50_SURFACE_FORMAT_R8_UNORM;
62 case 2:
63 return NV50_SURFACE_FORMAT_R16_UNORM;
64 case 4:
65 return NV50_SURFACE_FORMAT_A8R8G8B8_UNORM;
66 case 8:
67 return NV50_SURFACE_FORMAT_R16G16B16A16_UNORM;
68 case 16:
69 return NV50_SURFACE_FORMAT_R32G32B32A32_FLOAT;
70 default:
71 return 0;
72 }
73 }
74
75 static int
76 nvc0_2d_texture_set(struct nouveau_channel *chan, int dst,
77 struct nvc0_miptree *mt, unsigned level, unsigned layer)
78 {
79 struct nouveau_bo *bo = mt->base.bo;
80 uint32_t width, height, depth;
81 uint32_t format;
82 uint32_t mthd = dst ? NVC0_2D_DST_FORMAT : NVC0_2D_SRC_FORMAT;
83 uint32_t flags = mt->base.domain | (dst ? NOUVEAU_BO_WR : NOUVEAU_BO_RD);
84 uint32_t offset = mt->level[level].offset;
85
86 format = nvc0_2d_format(mt->base.base.format);
87 if (!format) {
88 NOUVEAU_ERR("invalid/unsupported surface format: %s\n",
89 util_format_name(mt->base.base.format));
90 return 1;
91 }
92
93 width = u_minify(mt->base.base.width0, level);
94 height = u_minify(mt->base.base.height0, level);
95 depth = u_minify(mt->base.base.depth0, level);
96
97 /* layer has to be < depth, and depth > tile depth / 2 */
98
99 if (!mt->layout_3d) {
100 offset += mt->layer_stride * layer;
101 layer = 0;
102 depth = 1;
103 } else
104 if (!dst) {
105 offset += nvc0_miptree_zslice_offset(mt, level, layer);
106 layer = 0;
107 }
108
109 if (!(bo->tile_flags & NOUVEAU_BO_TILE_LAYOUT_MASK)) {
110 BEGIN_RING(chan, RING_2D_(mthd), 2);
111 OUT_RING (chan, format);
112 OUT_RING (chan, 1);
113 BEGIN_RING(chan, RING_2D_(mthd + 0x14), 5);
114 OUT_RING (chan, mt->level[level].pitch);
115 OUT_RING (chan, width);
116 OUT_RING (chan, height);
117 OUT_RELOCh(chan, bo, offset, flags);
118 OUT_RELOCl(chan, bo, offset, flags);
119 } else {
120 BEGIN_RING(chan, RING_2D_(mthd), 5);
121 OUT_RING (chan, format);
122 OUT_RING (chan, 0);
123 OUT_RING (chan, mt->level[level].tile_mode);
124 OUT_RING (chan, depth);
125 OUT_RING (chan, layer);
126 BEGIN_RING(chan, RING_2D_(mthd + 0x18), 4);
127 OUT_RING (chan, width);
128 OUT_RING (chan, height);
129 OUT_RELOCh(chan, bo, offset, flags);
130 OUT_RELOCl(chan, bo, offset, flags);
131 }
132
133 #if 0
134 if (dst) {
135 BEGIN_RING(chan, RING_2D_(NVC0_2D_CLIP_X), 4);
136 OUT_RING (chan, 0);
137 OUT_RING (chan, 0);
138 OUT_RING (chan, width);
139 OUT_RING (chan, height);
140 }
141 #endif
142 return 0;
143 }
144
145 static int
146 nvc0_2d_texture_do_copy(struct nouveau_channel *chan,
147 struct nvc0_miptree *dst, unsigned dst_level,
148 unsigned dx, unsigned dy, unsigned dz,
149 struct nvc0_miptree *src, unsigned src_level,
150 unsigned sx, unsigned sy, unsigned sz,
151 unsigned w, unsigned h)
152 {
153 int ret;
154
155 ret = MARK_RING(chan, 2 * 16 + 32, 4);
156 if (ret)
157 return ret;
158
159 ret = nvc0_2d_texture_set(chan, 1, dst, dst_level, dz);
160 if (ret)
161 return ret;
162
163 ret = nvc0_2d_texture_set(chan, 0, src, src_level, sz);
164 if (ret)
165 return ret;
166
167 /* 0/1 = CENTER/CORNER, 10/00 = POINT/BILINEAR */
168 BEGIN_RING(chan, RING_2D(BLIT_CONTROL), 1);
169 OUT_RING (chan, 0);
170 BEGIN_RING(chan, RING_2D(BLIT_DST_X), 4);
171 OUT_RING (chan, dx);
172 OUT_RING (chan, dy);
173 OUT_RING (chan, w);
174 OUT_RING (chan, h);
175 BEGIN_RING(chan, RING_2D(BLIT_DU_DX_FRACT), 4);
176 OUT_RING (chan, 0);
177 OUT_RING (chan, 1);
178 OUT_RING (chan, 0);
179 OUT_RING (chan, 1);
180 BEGIN_RING(chan, RING_2D(BLIT_SRC_X_FRACT), 4);
181 OUT_RING (chan, 0);
182 OUT_RING (chan, sx);
183 OUT_RING (chan, 0);
184 OUT_RING (chan, sy);
185
186 return 0;
187 }
188
189 static void
190 nvc0_setup_m2mf_rect(struct nvc0_m2mf_rect *rect,
191 struct pipe_resource *restrict res, unsigned l,
192 unsigned x, unsigned y, unsigned z)
193 {
194 struct nvc0_miptree *mt = nvc0_miptree(res);
195 const unsigned w = u_minify(res->width0, l);
196 const unsigned h = u_minify(res->height0, l);
197
198 rect->bo = mt->base.bo;
199 rect->domain = mt->base.domain;
200 rect->base = mt->level[l].offset;
201 rect->pitch = mt->level[l].pitch;
202 if (util_format_is_plain(res->format)) {
203 rect->width = w;
204 rect->height = h;
205 rect->x = x;
206 rect->y = y;
207 } else {
208 rect->width = util_format_get_nblocksx(res->format, w);
209 rect->height = util_format_get_nblocksy(res->format, h);
210 rect->x = util_format_get_nblocksx(res->format, x);
211 rect->y = util_format_get_nblocksy(res->format, y);
212 }
213 rect->tile_mode = mt->level[l].tile_mode;
214 rect->cpp = util_format_get_blocksize(res->format);
215
216 if (mt->layout_3d) {
217 rect->z = z;
218 rect->depth = u_minify(res->depth0, l);
219 } else {
220 rect->base += z * mt->layer_stride;
221 rect->z = 0;
222 rect->depth = 1;
223 }
224 }
225
226 static void
227 nvc0_resource_copy_region(struct pipe_context *pipe,
228 struct pipe_resource *dst, unsigned dst_level,
229 unsigned dstx, unsigned dsty, unsigned dstz,
230 struct pipe_resource *src, unsigned src_level,
231 const struct pipe_box *src_box)
232 {
233 struct nvc0_screen *screen = nvc0_context(pipe)->screen;
234 int ret;
235 unsigned dst_layer = dstz, src_layer = src_box->z;
236
237 nv04_resource(dst)->status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
238
239 if (src->format == dst->format) {
240 struct nvc0_m2mf_rect drect, srect;
241 unsigned i;
242 unsigned nx = util_format_get_nblocksx(src->format, src_box->width);
243 unsigned ny = util_format_get_nblocksx(src->format, src_box->height);
244
245 nvc0_setup_m2mf_rect(&drect, dst, dst_level, dstx, dsty, dstz);
246 nvc0_setup_m2mf_rect(&srect, src, src_level,
247 src_box->x, src_box->y, src_box->z);
248
249 for (i = 0; i < src_box->depth; ++i) {
250 nvc0_m2mf_transfer_rect(&screen->base.base, &drect, &srect, nx, ny);
251
252 if (nvc0_miptree(dst)->layout_3d)
253 drect.z++;
254 else
255 drect.base += nvc0_miptree(dst)->layer_stride;
256
257 if (nvc0_miptree(src)->layout_3d)
258 srect.z++;
259 else
260 srect.base += nvc0_miptree(src)->layer_stride;
261 }
262 return;
263 }
264
265 assert(nvc0_2d_format_faithful(src->format));
266 assert(nvc0_2d_format_faithful(dst->format));
267
268 for (; dst_layer < dstz + src_box->depth; ++dst_layer, ++src_layer) {
269 ret = nvc0_2d_texture_do_copy(screen->base.channel,
270 nvc0_miptree(dst), dst_level,
271 dstx, dsty, dst_layer,
272 nvc0_miptree(src), src_level,
273 src_box->x, src_box->y, src_layer,
274 src_box->width, src_box->height);
275 if (ret)
276 return;
277 }
278 }
279
280 static void
281 nvc0_clear_render_target(struct pipe_context *pipe,
282 struct pipe_surface *dst,
283 const float *rgba,
284 unsigned dstx, unsigned dsty,
285 unsigned width, unsigned height)
286 {
287 struct nvc0_context *nv50 = nvc0_context(pipe);
288 struct nvc0_screen *screen = nv50->screen;
289 struct nouveau_channel *chan = screen->base.channel;
290 struct nvc0_miptree *mt = nvc0_miptree(dst->texture);
291 struct nvc0_surface *sf = nvc0_surface(dst);
292 struct nouveau_bo *bo = mt->base.bo;
293
294 BEGIN_RING(chan, RING_3D(CLEAR_COLOR(0)), 4);
295 OUT_RINGf (chan, rgba[0]);
296 OUT_RINGf (chan, rgba[1]);
297 OUT_RINGf (chan, rgba[2]);
298 OUT_RINGf (chan, rgba[3]);
299
300 if (MARK_RING(chan, 18, 2))
301 return;
302
303 BEGIN_RING(chan, RING_3D(RT_CONTROL), 1);
304 OUT_RING (chan, 1);
305 BEGIN_RING(chan, RING_3D(RT_ADDRESS_HIGH(0)), 9);
306 OUT_RELOCh(chan, bo, sf->offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
307 OUT_RELOCl(chan, bo, sf->offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
308 OUT_RING (chan, sf->width);
309 OUT_RING (chan, sf->height);
310 OUT_RING (chan, nvc0_format_table[dst->format].rt);
311 OUT_RING (chan, (mt->layout_3d << 16) |
312 mt->level[sf->base.u.tex.level].tile_mode);
313 OUT_RING (chan, dst->u.tex.first_layer + sf->depth);
314 OUT_RING (chan, mt->layer_stride >> 2);
315 OUT_RING (chan, dst->u.tex.first_layer);
316
317 BEGIN_RING(chan, RING_3D(CLIP_RECT_HORIZ(0)), 2);
318 OUT_RING (chan, ((dstx + width) << 16) | dstx);
319 OUT_RING (chan, ((dsty + height) << 16) | dsty);
320 IMMED_RING(chan, RING_3D(CLIP_RECTS_EN), 1);
321
322 BEGIN_RING(chan, RING_3D(CLEAR_BUFFERS), 1);
323 OUT_RING (chan, 0x3c);
324
325 IMMED_RING(chan, RING_3D(CLIP_RECTS_EN), 0);
326
327 nv50->dirty |= NVC0_NEW_FRAMEBUFFER;
328 }
329
330 static void
331 nvc0_clear_depth_stencil(struct pipe_context *pipe,
332 struct pipe_surface *dst,
333 unsigned clear_flags,
334 double depth,
335 unsigned stencil,
336 unsigned dstx, unsigned dsty,
337 unsigned width, unsigned height)
338 {
339 struct nvc0_context *nv50 = nvc0_context(pipe);
340 struct nvc0_screen *screen = nv50->screen;
341 struct nouveau_channel *chan = screen->base.channel;
342 struct nvc0_miptree *mt = nvc0_miptree(dst->texture);
343 struct nvc0_surface *sf = nvc0_surface(dst);
344 struct nouveau_bo *bo = mt->base.bo;
345 uint32_t mode = 0;
346 int unk = mt->base.base.target == PIPE_TEXTURE_2D;
347
348 if (clear_flags & PIPE_CLEAR_DEPTH) {
349 BEGIN_RING(chan, RING_3D(CLEAR_DEPTH), 1);
350 OUT_RINGf (chan, depth);
351 mode |= NVC0_3D_CLEAR_BUFFERS_Z;
352 }
353
354 if (clear_flags & PIPE_CLEAR_STENCIL) {
355 BEGIN_RING(chan, RING_3D(CLEAR_STENCIL), 1);
356 OUT_RING (chan, stencil & 0xff);
357 mode |= NVC0_3D_CLEAR_BUFFERS_S;
358 }
359
360 if (MARK_RING(chan, 17, 2))
361 return;
362
363 BEGIN_RING(chan, RING_3D(ZETA_ADDRESS_HIGH), 5);
364 OUT_RELOCh(chan, bo, sf->offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
365 OUT_RELOCl(chan, bo, sf->offset, NOUVEAU_BO_VRAM | NOUVEAU_BO_WR);
366 OUT_RING (chan, nvc0_format_table[dst->format].rt);
367 OUT_RING (chan, mt->level[sf->base.u.tex.level].tile_mode);
368 OUT_RING (chan, mt->layer_stride >> 2);
369 BEGIN_RING(chan, RING_3D(ZETA_ENABLE), 1);
370 OUT_RING (chan, 1);
371 BEGIN_RING(chan, RING_3D(ZETA_HORIZ), 3);
372 OUT_RING (chan, sf->width);
373 OUT_RING (chan, sf->height);
374 OUT_RING (chan, (unk << 16) | (dst->u.tex.first_layer + sf->depth));
375 BEGIN_RING(chan, RING_3D(ZETA_BASE_LAYER), 1);
376 OUT_RING (chan, dst->u.tex.first_layer);
377
378 BEGIN_RING(chan, RING_3D(CLIP_RECT_HORIZ(0)), 2);
379 OUT_RING (chan, ((dstx + width) << 16) | dstx);
380 OUT_RING (chan, ((dsty + height) << 16) | dsty);
381 IMMED_RING(chan, RING_3D(CLIP_RECTS_EN), 1);
382
383 BEGIN_RING(chan, RING_3D(CLEAR_BUFFERS), 1);
384 OUT_RING (chan, mode);
385
386 IMMED_RING(chan, RING_3D(CLIP_RECTS_EN), 0);
387
388 nv50->dirty |= NVC0_NEW_FRAMEBUFFER;
389 }
390
391 void
392 nvc0_clear(struct pipe_context *pipe, unsigned buffers,
393 const float *rgba, double depth, unsigned stencil)
394 {
395 struct nvc0_context *nvc0 = nvc0_context(pipe);
396 struct nouveau_channel *chan = nvc0->screen->base.channel;
397 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
398 unsigned i;
399 const unsigned dirty = nvc0->dirty;
400 uint32_t mode = 0;
401
402 /* don't need NEW_BLEND, COLOR_MASK doesn't affect CLEAR_BUFFERS */
403 nvc0->dirty &= NVC0_NEW_FRAMEBUFFER;
404 if (!nvc0_state_validate(nvc0))
405 return;
406
407 if (buffers & PIPE_CLEAR_COLOR && fb->nr_cbufs) {
408 BEGIN_RING(chan, RING_3D(CLEAR_COLOR(0)), 4);
409 OUT_RINGf (chan, rgba[0]);
410 OUT_RINGf (chan, rgba[1]);
411 OUT_RINGf (chan, rgba[2]);
412 OUT_RINGf (chan, rgba[3]);
413 mode =
414 NVC0_3D_CLEAR_BUFFERS_R | NVC0_3D_CLEAR_BUFFERS_G |
415 NVC0_3D_CLEAR_BUFFERS_B | NVC0_3D_CLEAR_BUFFERS_A;
416 }
417
418 if (buffers & PIPE_CLEAR_DEPTH) {
419 BEGIN_RING(chan, RING_3D(CLEAR_DEPTH), 1);
420 OUT_RING (chan, fui(depth));
421 mode |= NVC0_3D_CLEAR_BUFFERS_Z;
422 }
423
424 if (buffers & PIPE_CLEAR_STENCIL) {
425 BEGIN_RING(chan, RING_3D(CLEAR_STENCIL), 1);
426 OUT_RING (chan, stencil & 0xff);
427 mode |= NVC0_3D_CLEAR_BUFFERS_S;
428 }
429
430 BEGIN_RING(chan, RING_3D(CLEAR_BUFFERS), 1);
431 OUT_RING (chan, mode);
432
433 for (i = 1; i < fb->nr_cbufs; i++) {
434 BEGIN_RING(chan, RING_3D(CLEAR_BUFFERS), 1);
435 OUT_RING (chan, (i << 6) | 0x3c);
436 }
437
438 nvc0->dirty = dirty & ~NVC0_NEW_FRAMEBUFFER;
439 }
440
441 void
442 nvc0_init_surface_functions(struct nvc0_context *nvc0)
443 {
444 struct pipe_context *pipe = &nvc0->base.pipe;
445
446 pipe->resource_copy_region = nvc0_resource_copy_region;
447 pipe->clear_render_target = nvc0_clear_render_target;
448 pipe->clear_depth_stencil = nvc0_clear_depth_stencil;
449 }
450
451