ddebug: handle some cases of non-TGSI shaders
[mesa.git] / src / gallium / drivers / ddebug / dd_context.c
1 /**************************************************************************
2 *
3 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Copyright 2008 VMware, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the "Software"),
9 * to deal in the Software without restriction, including without limitation
10 * on the rights to use, copy, modify, merge, publish, distribute, sub
11 * license, and/or sell copies of the Software, and to permit persons to whom
12 * the Software is furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the next
15 * paragraph) shall be included in all copies or substantial portions of the
16 * Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
22 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24 * USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "dd_pipe.h"
29 #include "tgsi/tgsi_parse.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32
33
34 static void
35 safe_memcpy(void *dst, const void *src, size_t size)
36 {
37 if (src)
38 memcpy(dst, src, size);
39 else
40 memset(dst, 0, size);
41 }
42
43
44 /********************************************************************
45 * queries
46 */
47
48 static struct dd_query *
49 dd_query(struct pipe_query *query)
50 {
51 return (struct dd_query *)query;
52 }
53
54 static struct pipe_query *
55 dd_query_unwrap(struct pipe_query *query)
56 {
57 if (query) {
58 return dd_query(query)->query;
59 } else {
60 return NULL;
61 }
62 }
63
64 static struct pipe_query *
65 dd_context_create_query(struct pipe_context *_pipe, unsigned query_type,
66 unsigned index)
67 {
68 struct pipe_context *pipe = dd_context(_pipe)->pipe;
69 struct pipe_query *query;
70
71 query = pipe->create_query(pipe, query_type, index);
72
73 /* Wrap query object. */
74 if (query) {
75 struct dd_query *dd_query = CALLOC_STRUCT(dd_query);
76 if (dd_query) {
77 dd_query->type = query_type;
78 dd_query->query = query;
79 query = (struct pipe_query *)dd_query;
80 } else {
81 pipe->destroy_query(pipe, query);
82 query = NULL;
83 }
84 }
85
86 return query;
87 }
88
89 static struct pipe_query *
90 dd_context_create_batch_query(struct pipe_context *_pipe, unsigned num_queries,
91 unsigned *query_types)
92 {
93 struct pipe_context *pipe = dd_context(_pipe)->pipe;
94 struct pipe_query *query;
95
96 query = pipe->create_batch_query(pipe, num_queries, query_types);
97
98 /* Wrap query object. */
99 if (query) {
100 struct dd_query *dd_query = CALLOC_STRUCT(dd_query);
101 if (dd_query) {
102 /* no special handling for batch queries yet */
103 dd_query->type = query_types[0];
104 dd_query->query = query;
105 query = (struct pipe_query *)dd_query;
106 } else {
107 pipe->destroy_query(pipe, query);
108 query = NULL;
109 }
110 }
111
112 return query;
113 }
114
115 static void
116 dd_context_destroy_query(struct pipe_context *_pipe,
117 struct pipe_query *query)
118 {
119 struct pipe_context *pipe = dd_context(_pipe)->pipe;
120
121 pipe->destroy_query(pipe, dd_query_unwrap(query));
122 FREE(query);
123 }
124
125 static boolean
126 dd_context_begin_query(struct pipe_context *_pipe, struct pipe_query *query)
127 {
128 struct dd_context *dctx = dd_context(_pipe);
129 struct pipe_context *pipe = dctx->pipe;
130
131 return pipe->begin_query(pipe, dd_query_unwrap(query));
132 }
133
134 static bool
135 dd_context_end_query(struct pipe_context *_pipe, struct pipe_query *query)
136 {
137 struct dd_context *dctx = dd_context(_pipe);
138 struct pipe_context *pipe = dctx->pipe;
139
140 return pipe->end_query(pipe, dd_query_unwrap(query));
141 }
142
143 static boolean
144 dd_context_get_query_result(struct pipe_context *_pipe,
145 struct pipe_query *query, boolean wait,
146 union pipe_query_result *result)
147 {
148 struct pipe_context *pipe = dd_context(_pipe)->pipe;
149
150 return pipe->get_query_result(pipe, dd_query_unwrap(query), wait, result);
151 }
152
153 static void
154 dd_context_get_query_result_resource(struct pipe_context *_pipe,
155 struct pipe_query *query,
156 boolean wait,
157 enum pipe_query_value_type result_type,
158 int index,
159 struct pipe_resource *resource,
160 unsigned offset)
161 {
162 struct pipe_context *pipe = dd_context(_pipe)->pipe;
163
164 pipe->get_query_result_resource(pipe, dd_query_unwrap(query), wait,
165 result_type, index, resource, offset);
166 }
167
168 static void
169 dd_context_set_active_query_state(struct pipe_context *_pipe, boolean enable)
170 {
171 struct pipe_context *pipe = dd_context(_pipe)->pipe;
172
173 pipe->set_active_query_state(pipe, enable);
174 }
175
176 static void
177 dd_context_render_condition(struct pipe_context *_pipe,
178 struct pipe_query *query, boolean condition,
179 enum pipe_render_cond_flag mode)
180 {
181 struct dd_context *dctx = dd_context(_pipe);
182 struct pipe_context *pipe = dctx->pipe;
183 struct dd_draw_state *dstate = &dctx->draw_state;
184
185 pipe->render_condition(pipe, dd_query_unwrap(query), condition, mode);
186 dstate->render_cond.query = dd_query(query);
187 dstate->render_cond.condition = condition;
188 dstate->render_cond.mode = mode;
189 }
190
191
192 /********************************************************************
193 * constant (immutable) non-shader states
194 */
195
196 #define DD_CSO_CREATE(name, shortname) \
197 static void * \
198 dd_context_create_##name##_state(struct pipe_context *_pipe, \
199 const struct pipe_##name##_state *state) \
200 { \
201 struct pipe_context *pipe = dd_context(_pipe)->pipe; \
202 struct dd_state *hstate = CALLOC_STRUCT(dd_state); \
203 \
204 if (!hstate) \
205 return NULL; \
206 hstate->cso = pipe->create_##name##_state(pipe, state); \
207 hstate->state.shortname = *state; \
208 return hstate; \
209 }
210
211 #define DD_CSO_BIND(name, shortname) \
212 static void \
213 dd_context_bind_##name##_state(struct pipe_context *_pipe, void *state) \
214 { \
215 struct dd_context *dctx = dd_context(_pipe); \
216 struct pipe_context *pipe = dctx->pipe; \
217 struct dd_state *hstate = state; \
218 \
219 dctx->draw_state.shortname = hstate; \
220 pipe->bind_##name##_state(pipe, hstate ? hstate->cso : NULL); \
221 }
222
223 #define DD_CSO_DELETE(name) \
224 static void \
225 dd_context_delete_##name##_state(struct pipe_context *_pipe, void *state) \
226 { \
227 struct dd_context *dctx = dd_context(_pipe); \
228 struct pipe_context *pipe = dctx->pipe; \
229 struct dd_state *hstate = state; \
230 \
231 pipe->delete_##name##_state(pipe, hstate->cso); \
232 FREE(hstate); \
233 }
234
235 #define DD_CSO_WHOLE(name, shortname) \
236 DD_CSO_CREATE(name, shortname) \
237 DD_CSO_BIND(name, shortname) \
238 DD_CSO_DELETE(name)
239
240 DD_CSO_WHOLE(blend, blend)
241 DD_CSO_WHOLE(rasterizer, rs)
242 DD_CSO_WHOLE(depth_stencil_alpha, dsa)
243
244 DD_CSO_CREATE(sampler, sampler)
245 DD_CSO_DELETE(sampler)
246
247 static void
248 dd_context_bind_sampler_states(struct pipe_context *_pipe,
249 enum pipe_shader_type shader,
250 unsigned start, unsigned count, void **states)
251 {
252 struct dd_context *dctx = dd_context(_pipe);
253 struct pipe_context *pipe = dctx->pipe;
254
255 memcpy(&dctx->draw_state.sampler_states[shader][start], states,
256 sizeof(void*) * count);
257
258 if (states) {
259 void *samp[PIPE_MAX_SAMPLERS];
260 int i;
261
262 for (i = 0; i < count; i++) {
263 struct dd_state *s = states[i];
264 samp[i] = s ? s->cso : NULL;
265 }
266
267 pipe->bind_sampler_states(pipe, shader, start, count, samp);
268 }
269 else
270 pipe->bind_sampler_states(pipe, shader, start, count, NULL);
271 }
272
273 static void *
274 dd_context_create_vertex_elements_state(struct pipe_context *_pipe,
275 unsigned num_elems,
276 const struct pipe_vertex_element *elems)
277 {
278 struct pipe_context *pipe = dd_context(_pipe)->pipe;
279 struct dd_state *hstate = CALLOC_STRUCT(dd_state);
280
281 if (!hstate)
282 return NULL;
283 hstate->cso = pipe->create_vertex_elements_state(pipe, num_elems, elems);
284 memcpy(hstate->state.velems.velems, elems, sizeof(elems[0]) * num_elems);
285 hstate->state.velems.count = num_elems;
286 return hstate;
287 }
288
289 DD_CSO_BIND(vertex_elements, velems)
290 DD_CSO_DELETE(vertex_elements)
291
292
293 /********************************************************************
294 * shaders
295 */
296
297 #define DD_SHADER_NOCREATE(NAME, name) \
298 static void \
299 dd_context_bind_##name##_state(struct pipe_context *_pipe, void *state) \
300 { \
301 struct dd_context *dctx = dd_context(_pipe); \
302 struct pipe_context *pipe = dctx->pipe; \
303 struct dd_state *hstate = state; \
304 \
305 dctx->draw_state.shaders[PIPE_SHADER_##NAME] = hstate; \
306 pipe->bind_##name##_state(pipe, hstate ? hstate->cso : NULL); \
307 } \
308 \
309 static void \
310 dd_context_delete_##name##_state(struct pipe_context *_pipe, void *state) \
311 { \
312 struct dd_context *dctx = dd_context(_pipe); \
313 struct pipe_context *pipe = dctx->pipe; \
314 struct dd_state *hstate = state; \
315 \
316 pipe->delete_##name##_state(pipe, hstate->cso); \
317 if (hstate->state.shader.type == PIPE_SHADER_IR_TGSI) \
318 tgsi_free_tokens(hstate->state.shader.tokens); \
319 FREE(hstate); \
320 }
321
322 #define DD_SHADER(NAME, name) \
323 static void * \
324 dd_context_create_##name##_state(struct pipe_context *_pipe, \
325 const struct pipe_shader_state *state) \
326 { \
327 struct pipe_context *pipe = dd_context(_pipe)->pipe; \
328 struct dd_state *hstate = CALLOC_STRUCT(dd_state); \
329 \
330 if (!hstate) \
331 return NULL; \
332 hstate->cso = pipe->create_##name##_state(pipe, state); \
333 hstate->state.shader = *state; \
334 if (hstate->state.shader.type == PIPE_SHADER_IR_TGSI) \
335 hstate->state.shader.tokens = tgsi_dup_tokens(state->tokens); \
336 return hstate; \
337 } \
338 \
339 DD_SHADER_NOCREATE(NAME, name)
340
341 DD_SHADER(FRAGMENT, fs)
342 DD_SHADER(VERTEX, vs)
343 DD_SHADER(GEOMETRY, gs)
344 DD_SHADER(TESS_CTRL, tcs)
345 DD_SHADER(TESS_EVAL, tes)
346
347 static void * \
348 dd_context_create_compute_state(struct pipe_context *_pipe,
349 const struct pipe_compute_state *state)
350 {
351 struct pipe_context *pipe = dd_context(_pipe)->pipe;
352 struct dd_state *hstate = CALLOC_STRUCT(dd_state);
353
354 if (!hstate)
355 return NULL;
356 hstate->cso = pipe->create_compute_state(pipe, state);
357
358 hstate->state.shader.type = state->ir_type;
359
360 if (state->ir_type == PIPE_SHADER_IR_TGSI)
361 hstate->state.shader.tokens = tgsi_dup_tokens(state->prog);
362
363 return hstate;
364 }
365
366 DD_SHADER_NOCREATE(COMPUTE, compute)
367
368 /********************************************************************
369 * immediate states
370 */
371
372 #define DD_IMM_STATE(name, type, deref, ref) \
373 static void \
374 dd_context_set_##name(struct pipe_context *_pipe, type deref) \
375 { \
376 struct dd_context *dctx = dd_context(_pipe); \
377 struct pipe_context *pipe = dctx->pipe; \
378 \
379 dctx->draw_state.name = deref; \
380 pipe->set_##name(pipe, ref); \
381 }
382
383 DD_IMM_STATE(blend_color, const struct pipe_blend_color, *state, state)
384 DD_IMM_STATE(stencil_ref, const struct pipe_stencil_ref, *state, state)
385 DD_IMM_STATE(clip_state, const struct pipe_clip_state, *state, state)
386 DD_IMM_STATE(sample_mask, unsigned, sample_mask, sample_mask)
387 DD_IMM_STATE(min_samples, unsigned, min_samples, min_samples)
388 DD_IMM_STATE(framebuffer_state, const struct pipe_framebuffer_state, *state, state)
389 DD_IMM_STATE(polygon_stipple, const struct pipe_poly_stipple, *state, state)
390
391 static void
392 dd_context_set_constant_buffer(struct pipe_context *_pipe,
393 enum pipe_shader_type shader, uint index,
394 const struct pipe_constant_buffer *constant_buffer)
395 {
396 struct dd_context *dctx = dd_context(_pipe);
397 struct pipe_context *pipe = dctx->pipe;
398
399 safe_memcpy(&dctx->draw_state.constant_buffers[shader][index],
400 constant_buffer, sizeof(*constant_buffer));
401 pipe->set_constant_buffer(pipe, shader, index, constant_buffer);
402 }
403
404 static void
405 dd_context_set_scissor_states(struct pipe_context *_pipe,
406 unsigned start_slot, unsigned num_scissors,
407 const struct pipe_scissor_state *states)
408 {
409 struct dd_context *dctx = dd_context(_pipe);
410 struct pipe_context *pipe = dctx->pipe;
411
412 safe_memcpy(&dctx->draw_state.scissors[start_slot], states,
413 sizeof(*states) * num_scissors);
414 pipe->set_scissor_states(pipe, start_slot, num_scissors, states);
415 }
416
417 static void
418 dd_context_set_viewport_states(struct pipe_context *_pipe,
419 unsigned start_slot, unsigned num_viewports,
420 const struct pipe_viewport_state *states)
421 {
422 struct dd_context *dctx = dd_context(_pipe);
423 struct pipe_context *pipe = dctx->pipe;
424
425 safe_memcpy(&dctx->draw_state.viewports[start_slot], states,
426 sizeof(*states) * num_viewports);
427 pipe->set_viewport_states(pipe, start_slot, num_viewports, states);
428 }
429
430 static void dd_context_set_tess_state(struct pipe_context *_pipe,
431 const float default_outer_level[4],
432 const float default_inner_level[2])
433 {
434 struct dd_context *dctx = dd_context(_pipe);
435 struct pipe_context *pipe = dctx->pipe;
436
437 memcpy(dctx->draw_state.tess_default_levels, default_outer_level,
438 sizeof(float) * 4);
439 memcpy(dctx->draw_state.tess_default_levels+4, default_inner_level,
440 sizeof(float) * 2);
441 pipe->set_tess_state(pipe, default_outer_level, default_inner_level);
442 }
443
444
445 /********************************************************************
446 * views
447 */
448
449 static struct pipe_surface *
450 dd_context_create_surface(struct pipe_context *_pipe,
451 struct pipe_resource *resource,
452 const struct pipe_surface *surf_tmpl)
453 {
454 struct pipe_context *pipe = dd_context(_pipe)->pipe;
455 struct pipe_surface *view =
456 pipe->create_surface(pipe, resource, surf_tmpl);
457
458 if (!view)
459 return NULL;
460 view->context = _pipe;
461 return view;
462 }
463
464 static void
465 dd_context_surface_destroy(struct pipe_context *_pipe,
466 struct pipe_surface *surf)
467 {
468 struct pipe_context *pipe = dd_context(_pipe)->pipe;
469
470 pipe->surface_destroy(pipe, surf);
471 }
472
473 static struct pipe_sampler_view *
474 dd_context_create_sampler_view(struct pipe_context *_pipe,
475 struct pipe_resource *resource,
476 const struct pipe_sampler_view *templ)
477 {
478 struct pipe_context *pipe = dd_context(_pipe)->pipe;
479 struct pipe_sampler_view *view =
480 pipe->create_sampler_view(pipe, resource, templ);
481
482 if (!view)
483 return NULL;
484 view->context = _pipe;
485 return view;
486 }
487
488 static void
489 dd_context_sampler_view_destroy(struct pipe_context *_pipe,
490 struct pipe_sampler_view *view)
491 {
492 struct pipe_context *pipe = dd_context(_pipe)->pipe;
493
494 pipe->sampler_view_destroy(pipe, view);
495 }
496
497 static struct pipe_stream_output_target *
498 dd_context_create_stream_output_target(struct pipe_context *_pipe,
499 struct pipe_resource *res,
500 unsigned buffer_offset,
501 unsigned buffer_size)
502 {
503 struct pipe_context *pipe = dd_context(_pipe)->pipe;
504 struct pipe_stream_output_target *view =
505 pipe->create_stream_output_target(pipe, res, buffer_offset,
506 buffer_size);
507
508 if (!view)
509 return NULL;
510 view->context = _pipe;
511 return view;
512 }
513
514 static void
515 dd_context_stream_output_target_destroy(struct pipe_context *_pipe,
516 struct pipe_stream_output_target *target)
517 {
518 struct pipe_context *pipe = dd_context(_pipe)->pipe;
519
520 pipe->stream_output_target_destroy(pipe, target);
521 }
522
523
524 /********************************************************************
525 * set states
526 */
527
528 static void
529 dd_context_set_sampler_views(struct pipe_context *_pipe,
530 enum pipe_shader_type shader,
531 unsigned start, unsigned num,
532 struct pipe_sampler_view **views)
533 {
534 struct dd_context *dctx = dd_context(_pipe);
535 struct pipe_context *pipe = dctx->pipe;
536
537 safe_memcpy(&dctx->draw_state.sampler_views[shader][start], views,
538 sizeof(views[0]) * num);
539 pipe->set_sampler_views(pipe, shader, start, num, views);
540 }
541
542 static void
543 dd_context_set_shader_images(struct pipe_context *_pipe,
544 enum pipe_shader_type shader,
545 unsigned start, unsigned num,
546 const struct pipe_image_view *views)
547 {
548 struct dd_context *dctx = dd_context(_pipe);
549 struct pipe_context *pipe = dctx->pipe;
550
551 safe_memcpy(&dctx->draw_state.shader_images[shader][start], views,
552 sizeof(views[0]) * num);
553 pipe->set_shader_images(pipe, shader, start, num, views);
554 }
555
556 static void
557 dd_context_set_shader_buffers(struct pipe_context *_pipe, unsigned shader,
558 unsigned start, unsigned num_buffers,
559 const struct pipe_shader_buffer *buffers)
560 {
561 struct dd_context *dctx = dd_context(_pipe);
562 struct pipe_context *pipe = dctx->pipe;
563
564 safe_memcpy(&dctx->draw_state.shader_buffers[shader][start], buffers,
565 sizeof(buffers[0]) * num_buffers);
566 pipe->set_shader_buffers(pipe, shader, start, num_buffers, buffers);
567 }
568
569 static void
570 dd_context_set_vertex_buffers(struct pipe_context *_pipe,
571 unsigned start, unsigned num_buffers,
572 const struct pipe_vertex_buffer *buffers)
573 {
574 struct dd_context *dctx = dd_context(_pipe);
575 struct pipe_context *pipe = dctx->pipe;
576
577 safe_memcpy(&dctx->draw_state.vertex_buffers[start], buffers,
578 sizeof(buffers[0]) * num_buffers);
579 pipe->set_vertex_buffers(pipe, start, num_buffers, buffers);
580 }
581
582 static void
583 dd_context_set_stream_output_targets(struct pipe_context *_pipe,
584 unsigned num_targets,
585 struct pipe_stream_output_target **tgs,
586 const unsigned *offsets)
587 {
588 struct dd_context *dctx = dd_context(_pipe);
589 struct pipe_context *pipe = dctx->pipe;
590 struct dd_draw_state *dstate = &dctx->draw_state;
591
592 dstate->num_so_targets = num_targets;
593 safe_memcpy(dstate->so_targets, tgs, sizeof(*tgs) * num_targets);
594 safe_memcpy(dstate->so_offsets, offsets, sizeof(*offsets) * num_targets);
595 pipe->set_stream_output_targets(pipe, num_targets, tgs, offsets);
596 }
597
598 static void
599 dd_context_destroy(struct pipe_context *_pipe)
600 {
601 struct dd_context *dctx = dd_context(_pipe);
602 struct pipe_context *pipe = dctx->pipe;
603
604 if (dctx->thread) {
605 mtx_lock(&dctx->mutex);
606 dctx->kill_thread = 1;
607 mtx_unlock(&dctx->mutex);
608 thrd_join(dctx->thread, NULL);
609 mtx_destroy(&dctx->mutex);
610 assert(!dctx->records);
611 }
612
613 if (dctx->fence) {
614 pipe->transfer_unmap(pipe, dctx->fence_transfer);
615 pipe_resource_reference(&dctx->fence, NULL);
616 }
617 pipe->destroy(pipe);
618 FREE(dctx);
619 }
620
621
622 /********************************************************************
623 * transfer
624 */
625
626 static void *
627 dd_context_transfer_map(struct pipe_context *_pipe,
628 struct pipe_resource *resource, unsigned level,
629 unsigned usage, const struct pipe_box *box,
630 struct pipe_transfer **transfer)
631 {
632 struct pipe_context *pipe = dd_context(_pipe)->pipe;
633
634 return pipe->transfer_map(pipe, resource, level, usage, box, transfer);
635 }
636
637 static void
638 dd_context_transfer_flush_region(struct pipe_context *_pipe,
639 struct pipe_transfer *transfer,
640 const struct pipe_box *box)
641 {
642 struct pipe_context *pipe = dd_context(_pipe)->pipe;
643
644 pipe->transfer_flush_region(pipe, transfer, box);
645 }
646
647 static void
648 dd_context_transfer_unmap(struct pipe_context *_pipe,
649 struct pipe_transfer *transfer)
650 {
651 struct pipe_context *pipe = dd_context(_pipe)->pipe;
652
653 pipe->transfer_unmap(pipe, transfer);
654 }
655
656 static void
657 dd_context_buffer_subdata(struct pipe_context *_pipe,
658 struct pipe_resource *resource,
659 unsigned usage, unsigned offset,
660 unsigned size, const void *data)
661 {
662 struct pipe_context *pipe = dd_context(_pipe)->pipe;
663
664 pipe->buffer_subdata(pipe, resource, usage, offset, size, data);
665 }
666
667 static void
668 dd_context_texture_subdata(struct pipe_context *_pipe,
669 struct pipe_resource *resource,
670 unsigned level, unsigned usage,
671 const struct pipe_box *box,
672 const void *data, unsigned stride,
673 unsigned layer_stride)
674 {
675 struct pipe_context *pipe = dd_context(_pipe)->pipe;
676
677 pipe->texture_subdata(pipe, resource, level, usage, box, data,
678 stride, layer_stride);
679 }
680
681
682 /********************************************************************
683 * miscellaneous
684 */
685
686 static void
687 dd_context_texture_barrier(struct pipe_context *_pipe, unsigned flags)
688 {
689 struct pipe_context *pipe = dd_context(_pipe)->pipe;
690
691 pipe->texture_barrier(pipe, flags);
692 }
693
694 static void
695 dd_context_memory_barrier(struct pipe_context *_pipe, unsigned flags)
696 {
697 struct pipe_context *pipe = dd_context(_pipe)->pipe;
698
699 pipe->memory_barrier(pipe, flags);
700 }
701
702 static bool
703 dd_context_resource_commit(struct pipe_context *_pipe,
704 struct pipe_resource *resource,
705 unsigned level, struct pipe_box *box, bool commit)
706 {
707 struct pipe_context *pipe = dd_context(_pipe)->pipe;
708
709 return pipe->resource_commit(pipe, resource, level, box, commit);
710 }
711
712 static void
713 dd_context_get_sample_position(struct pipe_context *_pipe,
714 unsigned sample_count, unsigned sample_index,
715 float *out_value)
716 {
717 struct pipe_context *pipe = dd_context(_pipe)->pipe;
718
719 return pipe->get_sample_position(pipe, sample_count, sample_index,
720 out_value);
721 }
722
723 static void
724 dd_context_invalidate_resource(struct pipe_context *_pipe,
725 struct pipe_resource *resource)
726 {
727 struct pipe_context *pipe = dd_context(_pipe)->pipe;
728
729 pipe->invalidate_resource(pipe, resource);
730 }
731
732 static enum pipe_reset_status
733 dd_context_get_device_reset_status(struct pipe_context *_pipe)
734 {
735 struct pipe_context *pipe = dd_context(_pipe)->pipe;
736
737 return pipe->get_device_reset_status(pipe);
738 }
739
740 static void
741 dd_context_set_device_reset_callback(struct pipe_context *_pipe,
742 const struct pipe_device_reset_callback *cb)
743 {
744 struct pipe_context *pipe = dd_context(_pipe)->pipe;
745
746 return pipe->set_device_reset_callback(pipe, cb);
747 }
748
749 static void
750 dd_context_emit_string_marker(struct pipe_context *_pipe,
751 const char *string, int len)
752 {
753 struct dd_context *dctx = dd_context(_pipe);
754 struct pipe_context *pipe = dctx->pipe;
755
756 pipe->emit_string_marker(pipe, string, len);
757 dd_parse_apitrace_marker(string, len, &dctx->draw_state.apitrace_call_number);
758 }
759
760 static void
761 dd_context_dump_debug_state(struct pipe_context *_pipe, FILE *stream,
762 unsigned flags)
763 {
764 struct pipe_context *pipe = dd_context(_pipe)->pipe;
765
766 return pipe->dump_debug_state(pipe, stream, flags);
767 }
768
769 static uint64_t
770 dd_context_create_texture_handle(struct pipe_context *_pipe,
771 struct pipe_sampler_view *view,
772 const struct pipe_sampler_state *state)
773 {
774 struct pipe_context *pipe = dd_context(_pipe)->pipe;
775
776 return pipe->create_texture_handle(pipe, view, state);
777 }
778
779 static void
780 dd_context_delete_texture_handle(struct pipe_context *_pipe, uint64_t handle)
781 {
782 struct pipe_context *pipe = dd_context(_pipe)->pipe;
783
784 pipe->delete_texture_handle(pipe, handle);
785 }
786
787 static void
788 dd_context_make_texture_handle_resident(struct pipe_context *_pipe,
789 uint64_t handle, bool resident)
790 {
791 struct pipe_context *pipe = dd_context(_pipe)->pipe;
792
793 pipe->make_texture_handle_resident(pipe, handle, resident);
794 }
795
796 static uint64_t
797 dd_context_create_image_handle(struct pipe_context *_pipe,
798 const struct pipe_image_view *image)
799 {
800 struct pipe_context *pipe = dd_context(_pipe)->pipe;
801
802 return pipe->create_image_handle(pipe, image);
803 }
804
805 static void
806 dd_context_delete_image_handle(struct pipe_context *_pipe, uint64_t handle)
807 {
808 struct pipe_context *pipe = dd_context(_pipe)->pipe;
809
810 pipe->delete_image_handle(pipe, handle);
811 }
812
813 static void
814 dd_context_make_image_handle_resident(struct pipe_context *_pipe,
815 uint64_t handle, unsigned access,
816 bool resident)
817 {
818 struct pipe_context *pipe = dd_context(_pipe)->pipe;
819
820 pipe->make_image_handle_resident(pipe, handle, access, resident);
821 }
822
823 struct pipe_context *
824 dd_context_create(struct dd_screen *dscreen, struct pipe_context *pipe)
825 {
826 struct dd_context *dctx;
827
828 if (!pipe)
829 return NULL;
830
831 dctx = CALLOC_STRUCT(dd_context);
832 if (!dctx)
833 goto fail;
834
835 dctx->pipe = pipe;
836 dctx->base.priv = pipe->priv; /* expose wrapped priv data */
837 dctx->base.screen = &dscreen->base;
838 dctx->base.stream_uploader = pipe->stream_uploader;
839 dctx->base.const_uploader = pipe->const_uploader;
840
841 dctx->base.destroy = dd_context_destroy;
842
843 CTX_INIT(render_condition);
844 CTX_INIT(create_query);
845 CTX_INIT(create_batch_query);
846 CTX_INIT(destroy_query);
847 CTX_INIT(begin_query);
848 CTX_INIT(end_query);
849 CTX_INIT(get_query_result);
850 CTX_INIT(get_query_result_resource);
851 CTX_INIT(set_active_query_state);
852 CTX_INIT(create_blend_state);
853 CTX_INIT(bind_blend_state);
854 CTX_INIT(delete_blend_state);
855 CTX_INIT(create_sampler_state);
856 CTX_INIT(bind_sampler_states);
857 CTX_INIT(delete_sampler_state);
858 CTX_INIT(create_rasterizer_state);
859 CTX_INIT(bind_rasterizer_state);
860 CTX_INIT(delete_rasterizer_state);
861 CTX_INIT(create_depth_stencil_alpha_state);
862 CTX_INIT(bind_depth_stencil_alpha_state);
863 CTX_INIT(delete_depth_stencil_alpha_state);
864 CTX_INIT(create_fs_state);
865 CTX_INIT(bind_fs_state);
866 CTX_INIT(delete_fs_state);
867 CTX_INIT(create_vs_state);
868 CTX_INIT(bind_vs_state);
869 CTX_INIT(delete_vs_state);
870 CTX_INIT(create_gs_state);
871 CTX_INIT(bind_gs_state);
872 CTX_INIT(delete_gs_state);
873 CTX_INIT(create_tcs_state);
874 CTX_INIT(bind_tcs_state);
875 CTX_INIT(delete_tcs_state);
876 CTX_INIT(create_tes_state);
877 CTX_INIT(bind_tes_state);
878 CTX_INIT(delete_tes_state);
879 CTX_INIT(create_compute_state);
880 CTX_INIT(bind_compute_state);
881 CTX_INIT(delete_compute_state);
882 CTX_INIT(create_vertex_elements_state);
883 CTX_INIT(bind_vertex_elements_state);
884 CTX_INIT(delete_vertex_elements_state);
885 CTX_INIT(set_blend_color);
886 CTX_INIT(set_stencil_ref);
887 CTX_INIT(set_sample_mask);
888 CTX_INIT(set_min_samples);
889 CTX_INIT(set_clip_state);
890 CTX_INIT(set_constant_buffer);
891 CTX_INIT(set_framebuffer_state);
892 CTX_INIT(set_polygon_stipple);
893 CTX_INIT(set_scissor_states);
894 CTX_INIT(set_viewport_states);
895 CTX_INIT(set_sampler_views);
896 CTX_INIT(set_tess_state);
897 CTX_INIT(set_shader_buffers);
898 CTX_INIT(set_shader_images);
899 CTX_INIT(set_vertex_buffers);
900 CTX_INIT(create_stream_output_target);
901 CTX_INIT(stream_output_target_destroy);
902 CTX_INIT(set_stream_output_targets);
903 CTX_INIT(create_sampler_view);
904 CTX_INIT(sampler_view_destroy);
905 CTX_INIT(create_surface);
906 CTX_INIT(surface_destroy);
907 CTX_INIT(transfer_map);
908 CTX_INIT(transfer_flush_region);
909 CTX_INIT(transfer_unmap);
910 CTX_INIT(buffer_subdata);
911 CTX_INIT(texture_subdata);
912 CTX_INIT(texture_barrier);
913 CTX_INIT(memory_barrier);
914 CTX_INIT(resource_commit);
915 /* create_video_codec */
916 /* create_video_buffer */
917 /* set_compute_resources */
918 /* set_global_binding */
919 CTX_INIT(get_sample_position);
920 CTX_INIT(invalidate_resource);
921 CTX_INIT(get_device_reset_status);
922 CTX_INIT(set_device_reset_callback);
923 CTX_INIT(dump_debug_state);
924 CTX_INIT(emit_string_marker);
925 CTX_INIT(create_texture_handle);
926 CTX_INIT(delete_texture_handle);
927 CTX_INIT(make_texture_handle_resident);
928 CTX_INIT(create_image_handle);
929 CTX_INIT(delete_image_handle);
930 CTX_INIT(make_image_handle_resident);
931
932 dd_init_draw_functions(dctx);
933
934 dctx->draw_state.sample_mask = ~0;
935
936 if (dscreen->mode == DD_DETECT_HANGS_PIPELINED) {
937 dctx->fence = pipe_buffer_create(dscreen->screen, PIPE_BIND_CUSTOM,
938 PIPE_USAGE_STAGING, 4);
939 if (!dctx->fence)
940 goto fail;
941
942 dctx->mapped_fence = pipe_buffer_map(pipe, dctx->fence,
943 PIPE_TRANSFER_READ_WRITE |
944 PIPE_TRANSFER_PERSISTENT |
945 PIPE_TRANSFER_COHERENT,
946 &dctx->fence_transfer);
947 if (!dctx->mapped_fence)
948 goto fail;
949
950 *dctx->mapped_fence = 0;
951
952 (void) mtx_init(&dctx->mutex, mtx_plain);
953 dctx->thread = u_thread_create(dd_thread_pipelined_hang_detect, dctx);
954 if (!dctx->thread) {
955 mtx_destroy(&dctx->mutex);
956 goto fail;
957 }
958 }
959
960 return &dctx->base;
961
962 fail:
963 if (dctx) {
964 if (dctx->mapped_fence)
965 pipe_transfer_unmap(pipe, dctx->fence_transfer);
966 pipe_resource_reference(&dctx->fence, NULL);
967 FREE(dctx);
968 }
969 pipe->destroy(pipe);
970 return NULL;
971 }