gallium: separate indirect stuff from pipe_draw_info - 80 -> 56 bytes
[mesa.git] / src / gallium / drivers / nouveau / nvc0 / nvc0_context.c
1 /*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23 #include "pipe/p_defines.h"
24 #include "util/u_framebuffer.h"
25 #include "util/u_upload_mgr.h"
26
27 #include "nvc0/nvc0_context.h"
28 #include "nvc0/nvc0_screen.h"
29 #include "nvc0/nvc0_resource.h"
30
31 static void
32 nvc0_flush(struct pipe_context *pipe,
33 struct pipe_fence_handle **fence,
34 unsigned flags)
35 {
36 struct nvc0_context *nvc0 = nvc0_context(pipe);
37 struct nouveau_screen *screen = &nvc0->screen->base;
38
39 if (fence)
40 nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
41
42 PUSH_KICK(nvc0->base.pushbuf); /* fencing handled in kick_notify */
43
44 nouveau_context_update_frame_stats(&nvc0->base);
45 }
46
47 static void
48 nvc0_texture_barrier(struct pipe_context *pipe, unsigned flags)
49 {
50 struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
51
52 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
53 IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
54 }
55
56 static void
57 nvc0_memory_barrier(struct pipe_context *pipe, unsigned flags)
58 {
59 struct nvc0_context *nvc0 = nvc0_context(pipe);
60 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
61 int i, s;
62
63 if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
64 for (i = 0; i < nvc0->num_vtxbufs; ++i) {
65 if (!nvc0->vtxbuf[i].buffer.resource && !nvc0->vtxbuf[i].is_user_buffer)
66 continue;
67 if (nvc0->vtxbuf[i].buffer.resource->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
68 nvc0->base.vbo_dirty = true;
69 }
70
71 if (nvc0->idxbuf.buffer &&
72 nvc0->idxbuf.buffer->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
73 nvc0->base.vbo_dirty = true;
74
75 for (s = 0; s < 5 && !nvc0->cb_dirty; ++s) {
76 uint32_t valid = nvc0->constbuf_valid[s];
77
78 while (valid && !nvc0->cb_dirty) {
79 const unsigned i = ffs(valid) - 1;
80 struct pipe_resource *res;
81
82 valid &= ~(1 << i);
83 if (nvc0->constbuf[s][i].user)
84 continue;
85
86 res = nvc0->constbuf[s][i].u.buf;
87 if (!res)
88 continue;
89
90 if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
91 nvc0->cb_dirty = true;
92 }
93 }
94 } else {
95 /* Pretty much any writing by shaders needs a serialize after
96 * it. Especially when moving between 3d and compute pipelines, but even
97 * without that.
98 */
99 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
100 }
101
102 /* If we're going to texture from a buffer/image written by a shader, we
103 * must flush the texture cache.
104 */
105 if (flags & PIPE_BARRIER_TEXTURE)
106 IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
107
108 if (flags & PIPE_BARRIER_CONSTANT_BUFFER)
109 nvc0->cb_dirty = true;
110 if (flags & (PIPE_BARRIER_VERTEX_BUFFER | PIPE_BARRIER_INDEX_BUFFER))
111 nvc0->base.vbo_dirty = true;
112 }
113
114 static void
115 nvc0_emit_string_marker(struct pipe_context *pipe, const char *str, int len)
116 {
117 struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
118 int string_words = len / 4;
119 int data_words;
120
121 if (len <= 0)
122 return;
123 string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN);
124 if (string_words == NV04_PFIFO_MAX_PACKET_LEN)
125 data_words = string_words;
126 else
127 data_words = string_words + !!(len & 3);
128 BEGIN_NIC0(push, SUBC_3D(NV04_GRAPH_NOP), data_words);
129 if (string_words)
130 PUSH_DATAp(push, str, string_words);
131 if (string_words != data_words) {
132 int data = 0;
133 memcpy(&data, &str[string_words * 4], len & 3);
134 PUSH_DATA (push, data);
135 }
136 }
137
138 static void
139 nvc0_context_unreference_resources(struct nvc0_context *nvc0)
140 {
141 unsigned s, i;
142
143 nouveau_bufctx_del(&nvc0->bufctx_3d);
144 nouveau_bufctx_del(&nvc0->bufctx);
145 nouveau_bufctx_del(&nvc0->bufctx_cp);
146
147 util_unreference_framebuffer_state(&nvc0->framebuffer);
148
149 for (i = 0; i < nvc0->num_vtxbufs; ++i)
150 pipe_vertex_buffer_unreference(&nvc0->vtxbuf[i]);
151
152 pipe_resource_reference(&nvc0->idxbuf.buffer, NULL);
153
154 for (s = 0; s < 6; ++s) {
155 for (i = 0; i < nvc0->num_textures[s]; ++i)
156 pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
157
158 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i)
159 if (!nvc0->constbuf[s][i].user)
160 pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL);
161
162 for (i = 0; i < NVC0_MAX_BUFFERS; ++i)
163 pipe_resource_reference(&nvc0->buffers[s][i].buffer, NULL);
164
165 for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
166 pipe_resource_reference(&nvc0->images[s][i].resource, NULL);
167 if (nvc0->screen->base.class_3d >= GM107_3D_CLASS)
168 pipe_sampler_view_reference(&nvc0->images_tic[s][i], NULL);
169 }
170 }
171
172 for (s = 0; s < 2; ++s) {
173 for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i)
174 pipe_surface_reference(&nvc0->surfaces[s][i], NULL);
175 }
176
177 for (i = 0; i < nvc0->num_tfbbufs; ++i)
178 pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
179
180 for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *);
181 ++i) {
182 struct pipe_resource **res = util_dynarray_element(
183 &nvc0->global_residents, struct pipe_resource *, i);
184 pipe_resource_reference(res, NULL);
185 }
186 util_dynarray_fini(&nvc0->global_residents);
187
188 if (nvc0->tcp_empty)
189 nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty);
190 }
191
192 static void
193 nvc0_destroy(struct pipe_context *pipe)
194 {
195 struct nvc0_context *nvc0 = nvc0_context(pipe);
196
197 if (nvc0->screen->cur_ctx == nvc0) {
198 nvc0->screen->cur_ctx = NULL;
199 nvc0->screen->save_state = nvc0->state;
200 nvc0->screen->save_state.tfb = NULL;
201 }
202
203 if (nvc0->base.pipe.stream_uploader)
204 u_upload_destroy(nvc0->base.pipe.stream_uploader);
205
206 /* Unset bufctx, we don't want to revalidate any resources after the flush.
207 * Other contexts will always set their bufctx again on action calls.
208 */
209 nouveau_pushbuf_bufctx(nvc0->base.pushbuf, NULL);
210 nouveau_pushbuf_kick(nvc0->base.pushbuf, nvc0->base.pushbuf->channel);
211
212 nvc0_context_unreference_resources(nvc0);
213 nvc0_blitctx_destroy(nvc0);
214
215 nouveau_context_destroy(&nvc0->base);
216 }
217
218 void
219 nvc0_default_kick_notify(struct nouveau_pushbuf *push)
220 {
221 struct nvc0_screen *screen = push->user_priv;
222
223 if (screen) {
224 nouveau_fence_next(&screen->base);
225 nouveau_fence_update(&screen->base, true);
226 if (screen->cur_ctx)
227 screen->cur_ctx->state.flushed = true;
228 NOUVEAU_DRV_STAT(&screen->base, pushbuf_count, 1);
229 }
230 }
231
232 static int
233 nvc0_invalidate_resource_storage(struct nouveau_context *ctx,
234 struct pipe_resource *res,
235 int ref)
236 {
237 struct nvc0_context *nvc0 = nvc0_context(&ctx->pipe);
238 unsigned s, i;
239
240 if (res->bind & PIPE_BIND_RENDER_TARGET) {
241 for (i = 0; i < nvc0->framebuffer.nr_cbufs; ++i) {
242 if (nvc0->framebuffer.cbufs[i] &&
243 nvc0->framebuffer.cbufs[i]->texture == res) {
244 nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
245 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
246 if (!--ref)
247 return ref;
248 }
249 }
250 }
251 if (res->bind & PIPE_BIND_DEPTH_STENCIL) {
252 if (nvc0->framebuffer.zsbuf &&
253 nvc0->framebuffer.zsbuf->texture == res) {
254 nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
255 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
256 if (!--ref)
257 return ref;
258 }
259 }
260
261 if (res->target == PIPE_BUFFER) {
262 for (i = 0; i < nvc0->num_vtxbufs; ++i) {
263 if (nvc0->vtxbuf[i].buffer.resource == res) {
264 nvc0->dirty_3d |= NVC0_NEW_3D_ARRAYS;
265 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_VTX);
266 if (!--ref)
267 return ref;
268 }
269 }
270
271 if (nvc0->idxbuf.buffer == res) {
272 nvc0->dirty_3d |= NVC0_NEW_3D_IDXBUF;
273 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_IDX);
274 if (!--ref)
275 return ref;
276 }
277
278 for (s = 0; s < 6; ++s) {
279 for (i = 0; i < nvc0->num_textures[s]; ++i) {
280 if (nvc0->textures[s][i] &&
281 nvc0->textures[s][i]->texture == res) {
282 nvc0->textures_dirty[s] |= 1 << i;
283 if (unlikely(s == 5)) {
284 nvc0->dirty_cp |= NVC0_NEW_CP_TEXTURES;
285 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_TEX(i));
286 } else {
287 nvc0->dirty_3d |= NVC0_NEW_3D_TEXTURES;
288 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_TEX(s, i));
289 }
290 if (!--ref)
291 return ref;
292 }
293 }
294 }
295
296 for (s = 0; s < 6; ++s) {
297 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) {
298 if (!(nvc0->constbuf_valid[s] & (1 << i)))
299 continue;
300 if (!nvc0->constbuf[s][i].user &&
301 nvc0->constbuf[s][i].u.buf == res) {
302 nvc0->constbuf_dirty[s] |= 1 << i;
303 if (unlikely(s == 5)) {
304 nvc0->dirty_cp |= NVC0_NEW_CP_CONSTBUF;
305 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_CB(i));
306 } else {
307 nvc0->dirty_3d |= NVC0_NEW_3D_CONSTBUF;
308 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_CB(s, i));
309 }
310 if (!--ref)
311 return ref;
312 }
313 }
314 }
315
316 for (s = 0; s < 6; ++s) {
317 for (i = 0; i < NVC0_MAX_BUFFERS; ++i) {
318 if (nvc0->buffers[s][i].buffer == res) {
319 nvc0->buffers_dirty[s] |= 1 << i;
320 if (unlikely(s == 5)) {
321 nvc0->dirty_cp |= NVC0_NEW_CP_BUFFERS;
322 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_BUF);
323 } else {
324 nvc0->dirty_3d |= NVC0_NEW_3D_BUFFERS;
325 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_BUF);
326 }
327 if (!--ref)
328 return ref;
329 }
330 }
331 }
332
333 for (s = 0; s < 6; ++s) {
334 for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
335 if (nvc0->images[s][i].resource == res) {
336 nvc0->images_dirty[s] |= 1 << i;
337 if (unlikely(s == 5)) {
338 nvc0->dirty_cp |= NVC0_NEW_CP_SURFACES;
339 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_SUF);
340 } else {
341 nvc0->dirty_3d |= NVC0_NEW_3D_SURFACES;
342 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_SUF);
343 }
344 }
345 if (!--ref)
346 return ref;
347 }
348 }
349 }
350
351 return ref;
352 }
353
354 static void
355 nvc0_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
356 float *);
357
358 struct pipe_context *
359 nvc0_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
360 {
361 struct nvc0_screen *screen = nvc0_screen(pscreen);
362 struct nvc0_context *nvc0;
363 struct pipe_context *pipe;
364 int ret;
365 uint32_t flags;
366
367 nvc0 = CALLOC_STRUCT(nvc0_context);
368 if (!nvc0)
369 return NULL;
370 pipe = &nvc0->base.pipe;
371
372 if (!nvc0_blitctx_create(nvc0))
373 goto out_err;
374
375 nvc0->base.pushbuf = screen->base.pushbuf;
376 nvc0->base.client = screen->base.client;
377
378 ret = nouveau_bufctx_new(screen->base.client, 2, &nvc0->bufctx);
379 if (!ret)
380 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_3D_COUNT,
381 &nvc0->bufctx_3d);
382 if (!ret)
383 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_CP_COUNT,
384 &nvc0->bufctx_cp);
385 if (ret)
386 goto out_err;
387
388 nvc0->screen = screen;
389 nvc0->base.screen = &screen->base;
390
391 pipe->screen = pscreen;
392 pipe->priv = priv;
393 pipe->stream_uploader = u_upload_create_default(pipe);
394 if (!pipe->stream_uploader)
395 goto out_err;
396 pipe->const_uploader = pipe->stream_uploader;
397
398 pipe->destroy = nvc0_destroy;
399
400 pipe->draw_vbo = nvc0_draw_vbo;
401 pipe->clear = nvc0_clear;
402 pipe->launch_grid = (nvc0->screen->base.class_3d >= NVE4_3D_CLASS) ?
403 nve4_launch_grid : nvc0_launch_grid;
404
405 pipe->flush = nvc0_flush;
406 pipe->texture_barrier = nvc0_texture_barrier;
407 pipe->memory_barrier = nvc0_memory_barrier;
408 pipe->get_sample_position = nvc0_context_get_sample_position;
409 pipe->emit_string_marker = nvc0_emit_string_marker;
410
411 nouveau_context_init(&nvc0->base);
412 nvc0_init_query_functions(nvc0);
413 nvc0_init_surface_functions(nvc0);
414 nvc0_init_state_functions(nvc0);
415 nvc0_init_transfer_functions(nvc0);
416 nvc0_init_resource_functions(pipe);
417
418 nvc0->base.invalidate_resource_storage = nvc0_invalidate_resource_storage;
419
420 pipe->create_video_codec = nvc0_create_decoder;
421 pipe->create_video_buffer = nvc0_video_buffer_create;
422
423 /* shader builtin library is per-screen, but we need a context for m2mf */
424 nvc0_program_library_upload(nvc0);
425 nvc0_program_init_tcp_empty(nvc0);
426 if (!nvc0->tcp_empty)
427 goto out_err;
428 /* set the empty tctl prog on next draw in case one is never set */
429 nvc0->dirty_3d |= NVC0_NEW_3D_TCTLPROG;
430
431 /* Do not bind the COMPUTE driver constbuf at screen initialization because
432 * CBs are aliased between 3D and COMPUTE, but make sure it will be bound if
433 * a grid is launched later. */
434 nvc0->dirty_cp |= NVC0_NEW_CP_DRIVERCONST;
435
436 /* now that there are no more opportunities for errors, set the current
437 * context if there isn't already one.
438 */
439 if (!screen->cur_ctx) {
440 nvc0->state = screen->save_state;
441 screen->cur_ctx = nvc0;
442 nouveau_pushbuf_bufctx(screen->base.pushbuf, nvc0->bufctx);
443 }
444 screen->base.pushbuf->kick_notify = nvc0_default_kick_notify;
445
446 /* add permanently resident buffers to bufctxts */
447
448 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RD;
449
450 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_TEXT, flags, screen->text);
451 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->uniform_bo);
452 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->txc);
453 if (screen->compute) {
454 BCTX_REFN_bo(nvc0->bufctx_cp, CP_TEXT, flags, screen->text);
455 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->uniform_bo);
456 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->txc);
457 }
458
459 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RDWR;
460
461 if (screen->poly_cache)
462 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->poly_cache);
463 if (screen->compute)
464 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->tls);
465
466 flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
467
468 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo);
469 BCTX_REFN_bo(nvc0->bufctx, FENCE, flags, screen->fence.bo);
470 if (screen->compute)
471 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
472
473 nvc0->base.scratch.bo_size = 2 << 20;
474
475 memset(nvc0->tex_handles, ~0, sizeof(nvc0->tex_handles));
476
477 util_dynarray_init(&nvc0->global_residents);
478
479 return pipe;
480
481 out_err:
482 if (nvc0) {
483 if (pipe->stream_uploader)
484 u_upload_destroy(pipe->stream_uploader);
485 if (nvc0->bufctx_3d)
486 nouveau_bufctx_del(&nvc0->bufctx_3d);
487 if (nvc0->bufctx_cp)
488 nouveau_bufctx_del(&nvc0->bufctx_cp);
489 if (nvc0->bufctx)
490 nouveau_bufctx_del(&nvc0->bufctx);
491 FREE(nvc0->blit);
492 FREE(nvc0);
493 }
494 return NULL;
495 }
496
497 void
498 nvc0_bufctx_fence(struct nvc0_context *nvc0, struct nouveau_bufctx *bufctx,
499 bool on_flush)
500 {
501 struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
502 struct nouveau_list *it;
503 NOUVEAU_DRV_STAT_IFD(unsigned count = 0);
504
505 for (it = list->next; it != list; it = it->next) {
506 struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
507 struct nv04_resource *res = ref->priv;
508 if (res)
509 nvc0_resource_validate(res, (unsigned)ref->priv_data);
510 NOUVEAU_DRV_STAT_IFD(count++);
511 }
512 NOUVEAU_DRV_STAT(&nvc0->screen->base, resource_validate_count, count);
513 }
514
515 const void *
516 nvc0_get_sample_locations(unsigned sample_count)
517 {
518 static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
519 static const uint8_t ms2[2][2] = {
520 { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
521 static const uint8_t ms4[4][2] = {
522 { 0x6, 0x2 }, { 0xe, 0x6 }, /* (0,0), (1,0) */
523 { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
524 static const uint8_t ms8[8][2] = {
525 { 0x1, 0x7 }, { 0x5, 0x3 }, /* (0,0), (1,0) */
526 { 0x3, 0xd }, { 0x7, 0xb }, /* (0,1), (1,1) */
527 { 0x9, 0x5 }, { 0xf, 0x1 }, /* (2,0), (3,0) */
528 { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
529 #if 0
530 /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
531 static const uint8_t ms8_alt[8][2] = {
532 { 0x9, 0x5 }, { 0x7, 0xb }, /* (2,0), (1,1) */
533 { 0xd, 0x9 }, { 0x5, 0x3 }, /* (3,1), (1,0) */
534 { 0x3, 0xd }, { 0x1, 0x7 }, /* (0,1), (0,0) */
535 { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
536 #endif
537
538 const uint8_t (*ptr)[2];
539
540 switch (sample_count) {
541 case 0:
542 case 1: ptr = ms1; break;
543 case 2: ptr = ms2; break;
544 case 4: ptr = ms4; break;
545 case 8: ptr = ms8; break;
546 default:
547 assert(0);
548 return NULL; /* bad sample count -> undefined locations */
549 }
550 return ptr;
551 }
552
553 static void
554 nvc0_context_get_sample_position(struct pipe_context *pipe,
555 unsigned sample_count, unsigned sample_index,
556 float *xy)
557 {
558 const uint8_t (*ptr)[2];
559
560 ptr = nvc0_get_sample_locations(sample_count);
561 if (!ptr)
562 return;
563
564 xy[0] = ptr[sample_index][0] * 0.0625f;
565 xy[1] = ptr[sample_index][1] * 0.0625f;
566 }