gallium: remove pipe_index_buffer and set_index_buffer
[mesa.git] / src / gallium / drivers / ddebug / dd_context.c
1 /**************************************************************************
2 *
3 * Copyright 2015 Advanced Micro Devices, Inc.
4 * Copyright 2008 VMware, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the "Software"),
9 * to deal in the Software without restriction, including without limitation
10 * on the rights to use, copy, modify, merge, publish, distribute, sub
11 * license, and/or sell copies of the Software, and to permit persons to whom
12 * the Software is furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the next
15 * paragraph) shall be included in all copies or substantial portions of the
16 * Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
22 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24 * USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "dd_pipe.h"
29 #include "tgsi/tgsi_parse.h"
30 #include "util/u_inlines.h"
31 #include "util/u_memory.h"
32
33
34 static void
35 safe_memcpy(void *dst, const void *src, size_t size)
36 {
37 if (src)
38 memcpy(dst, src, size);
39 else
40 memset(dst, 0, size);
41 }
42
43
44 /********************************************************************
45 * queries
46 */
47
48 static struct dd_query *
49 dd_query(struct pipe_query *query)
50 {
51 return (struct dd_query *)query;
52 }
53
54 static struct pipe_query *
55 dd_query_unwrap(struct pipe_query *query)
56 {
57 if (query) {
58 return dd_query(query)->query;
59 } else {
60 return NULL;
61 }
62 }
63
64 static struct pipe_query *
65 dd_context_create_query(struct pipe_context *_pipe, unsigned query_type,
66 unsigned index)
67 {
68 struct pipe_context *pipe = dd_context(_pipe)->pipe;
69 struct pipe_query *query;
70
71 query = pipe->create_query(pipe, query_type, index);
72
73 /* Wrap query object. */
74 if (query) {
75 struct dd_query *dd_query = CALLOC_STRUCT(dd_query);
76 if (dd_query) {
77 dd_query->type = query_type;
78 dd_query->query = query;
79 query = (struct pipe_query *)dd_query;
80 } else {
81 pipe->destroy_query(pipe, query);
82 query = NULL;
83 }
84 }
85
86 return query;
87 }
88
89 static struct pipe_query *
90 dd_context_create_batch_query(struct pipe_context *_pipe, unsigned num_queries,
91 unsigned *query_types)
92 {
93 struct pipe_context *pipe = dd_context(_pipe)->pipe;
94 struct pipe_query *query;
95
96 query = pipe->create_batch_query(pipe, num_queries, query_types);
97
98 /* Wrap query object. */
99 if (query) {
100 struct dd_query *dd_query = CALLOC_STRUCT(dd_query);
101 if (dd_query) {
102 /* no special handling for batch queries yet */
103 dd_query->type = query_types[0];
104 dd_query->query = query;
105 query = (struct pipe_query *)dd_query;
106 } else {
107 pipe->destroy_query(pipe, query);
108 query = NULL;
109 }
110 }
111
112 return query;
113 }
114
115 static void
116 dd_context_destroy_query(struct pipe_context *_pipe,
117 struct pipe_query *query)
118 {
119 struct pipe_context *pipe = dd_context(_pipe)->pipe;
120
121 pipe->destroy_query(pipe, dd_query_unwrap(query));
122 FREE(query);
123 }
124
125 static boolean
126 dd_context_begin_query(struct pipe_context *_pipe, struct pipe_query *query)
127 {
128 struct dd_context *dctx = dd_context(_pipe);
129 struct pipe_context *pipe = dctx->pipe;
130
131 return pipe->begin_query(pipe, dd_query_unwrap(query));
132 }
133
134 static bool
135 dd_context_end_query(struct pipe_context *_pipe, struct pipe_query *query)
136 {
137 struct dd_context *dctx = dd_context(_pipe);
138 struct pipe_context *pipe = dctx->pipe;
139
140 return pipe->end_query(pipe, dd_query_unwrap(query));
141 }
142
143 static boolean
144 dd_context_get_query_result(struct pipe_context *_pipe,
145 struct pipe_query *query, boolean wait,
146 union pipe_query_result *result)
147 {
148 struct pipe_context *pipe = dd_context(_pipe)->pipe;
149
150 return pipe->get_query_result(pipe, dd_query_unwrap(query), wait, result);
151 }
152
153 static void
154 dd_context_get_query_result_resource(struct pipe_context *_pipe,
155 struct pipe_query *query,
156 boolean wait,
157 enum pipe_query_value_type result_type,
158 int index,
159 struct pipe_resource *resource,
160 unsigned offset)
161 {
162 struct pipe_context *pipe = dd_context(_pipe)->pipe;
163
164 pipe->get_query_result_resource(pipe, dd_query_unwrap(query), wait,
165 result_type, index, resource, offset);
166 }
167
168 static void
169 dd_context_set_active_query_state(struct pipe_context *_pipe, boolean enable)
170 {
171 struct pipe_context *pipe = dd_context(_pipe)->pipe;
172
173 pipe->set_active_query_state(pipe, enable);
174 }
175
176 static void
177 dd_context_render_condition(struct pipe_context *_pipe,
178 struct pipe_query *query, boolean condition,
179 enum pipe_render_cond_flag mode)
180 {
181 struct dd_context *dctx = dd_context(_pipe);
182 struct pipe_context *pipe = dctx->pipe;
183 struct dd_draw_state *dstate = &dctx->draw_state;
184
185 pipe->render_condition(pipe, dd_query_unwrap(query), condition, mode);
186 dstate->render_cond.query = dd_query(query);
187 dstate->render_cond.condition = condition;
188 dstate->render_cond.mode = mode;
189 }
190
191
192 /********************************************************************
193 * constant (immutable) non-shader states
194 */
195
196 #define DD_CSO_CREATE(name, shortname) \
197 static void * \
198 dd_context_create_##name##_state(struct pipe_context *_pipe, \
199 const struct pipe_##name##_state *state) \
200 { \
201 struct pipe_context *pipe = dd_context(_pipe)->pipe; \
202 struct dd_state *hstate = CALLOC_STRUCT(dd_state); \
203 \
204 if (!hstate) \
205 return NULL; \
206 hstate->cso = pipe->create_##name##_state(pipe, state); \
207 hstate->state.shortname = *state; \
208 return hstate; \
209 }
210
211 #define DD_CSO_BIND(name, shortname) \
212 static void \
213 dd_context_bind_##name##_state(struct pipe_context *_pipe, void *state) \
214 { \
215 struct dd_context *dctx = dd_context(_pipe); \
216 struct pipe_context *pipe = dctx->pipe; \
217 struct dd_state *hstate = state; \
218 \
219 dctx->draw_state.shortname = hstate; \
220 pipe->bind_##name##_state(pipe, hstate ? hstate->cso : NULL); \
221 }
222
223 #define DD_CSO_DELETE(name) \
224 static void \
225 dd_context_delete_##name##_state(struct pipe_context *_pipe, void *state) \
226 { \
227 struct dd_context *dctx = dd_context(_pipe); \
228 struct pipe_context *pipe = dctx->pipe; \
229 struct dd_state *hstate = state; \
230 \
231 pipe->delete_##name##_state(pipe, hstate->cso); \
232 FREE(hstate); \
233 }
234
235 #define DD_CSO_WHOLE(name, shortname) \
236 DD_CSO_CREATE(name, shortname) \
237 DD_CSO_BIND(name, shortname) \
238 DD_CSO_DELETE(name)
239
240 DD_CSO_WHOLE(blend, blend)
241 DD_CSO_WHOLE(rasterizer, rs)
242 DD_CSO_WHOLE(depth_stencil_alpha, dsa)
243
244 DD_CSO_CREATE(sampler, sampler)
245 DD_CSO_DELETE(sampler)
246
247 static void
248 dd_context_bind_sampler_states(struct pipe_context *_pipe,
249 enum pipe_shader_type shader,
250 unsigned start, unsigned count, void **states)
251 {
252 struct dd_context *dctx = dd_context(_pipe);
253 struct pipe_context *pipe = dctx->pipe;
254
255 memcpy(&dctx->draw_state.sampler_states[shader][start], states,
256 sizeof(void*) * count);
257
258 if (states) {
259 void *samp[PIPE_MAX_SAMPLERS];
260 int i;
261
262 for (i = 0; i < count; i++) {
263 struct dd_state *s = states[i];
264 samp[i] = s ? s->cso : NULL;
265 }
266
267 pipe->bind_sampler_states(pipe, shader, start, count, samp);
268 }
269 else
270 pipe->bind_sampler_states(pipe, shader, start, count, NULL);
271 }
272
273 static void *
274 dd_context_create_vertex_elements_state(struct pipe_context *_pipe,
275 unsigned num_elems,
276 const struct pipe_vertex_element *elems)
277 {
278 struct pipe_context *pipe = dd_context(_pipe)->pipe;
279 struct dd_state *hstate = CALLOC_STRUCT(dd_state);
280
281 if (!hstate)
282 return NULL;
283 hstate->cso = pipe->create_vertex_elements_state(pipe, num_elems, elems);
284 memcpy(hstate->state.velems.velems, elems, sizeof(elems[0]) * num_elems);
285 hstate->state.velems.count = num_elems;
286 return hstate;
287 }
288
289 DD_CSO_BIND(vertex_elements, velems)
290 DD_CSO_DELETE(vertex_elements)
291
292
293 /********************************************************************
294 * shaders
295 */
296
297 #define DD_SHADER_NOCREATE(NAME, name) \
298 static void \
299 dd_context_bind_##name##_state(struct pipe_context *_pipe, void *state) \
300 { \
301 struct dd_context *dctx = dd_context(_pipe); \
302 struct pipe_context *pipe = dctx->pipe; \
303 struct dd_state *hstate = state; \
304 \
305 dctx->draw_state.shaders[PIPE_SHADER_##NAME] = hstate; \
306 pipe->bind_##name##_state(pipe, hstate ? hstate->cso : NULL); \
307 } \
308 \
309 static void \
310 dd_context_delete_##name##_state(struct pipe_context *_pipe, void *state) \
311 { \
312 struct dd_context *dctx = dd_context(_pipe); \
313 struct pipe_context *pipe = dctx->pipe; \
314 struct dd_state *hstate = state; \
315 \
316 pipe->delete_##name##_state(pipe, hstate->cso); \
317 tgsi_free_tokens(hstate->state.shader.tokens); \
318 FREE(hstate); \
319 }
320
321 #define DD_SHADER(NAME, name) \
322 static void * \
323 dd_context_create_##name##_state(struct pipe_context *_pipe, \
324 const struct pipe_shader_state *state) \
325 { \
326 struct pipe_context *pipe = dd_context(_pipe)->pipe; \
327 struct dd_state *hstate = CALLOC_STRUCT(dd_state); \
328 \
329 if (!hstate) \
330 return NULL; \
331 hstate->cso = pipe->create_##name##_state(pipe, state); \
332 hstate->state.shader = *state; \
333 hstate->state.shader.tokens = tgsi_dup_tokens(state->tokens); \
334 return hstate; \
335 } \
336 \
337 DD_SHADER_NOCREATE(NAME, name)
338
339 DD_SHADER(FRAGMENT, fs)
340 DD_SHADER(VERTEX, vs)
341 DD_SHADER(GEOMETRY, gs)
342 DD_SHADER(TESS_CTRL, tcs)
343 DD_SHADER(TESS_EVAL, tes)
344
345 static void * \
346 dd_context_create_compute_state(struct pipe_context *_pipe,
347 const struct pipe_compute_state *state)
348 {
349 struct pipe_context *pipe = dd_context(_pipe)->pipe;
350 struct dd_state *hstate = CALLOC_STRUCT(dd_state);
351
352 if (!hstate)
353 return NULL;
354 hstate->cso = pipe->create_compute_state(pipe, state);
355
356 hstate->state.shader.type = state->ir_type;
357
358 if (state->ir_type == PIPE_SHADER_IR_TGSI)
359 hstate->state.shader.tokens = tgsi_dup_tokens(state->prog);
360
361 return hstate;
362 }
363
364 DD_SHADER_NOCREATE(COMPUTE, compute)
365
366 /********************************************************************
367 * immediate states
368 */
369
370 #define DD_IMM_STATE(name, type, deref, ref) \
371 static void \
372 dd_context_set_##name(struct pipe_context *_pipe, type deref) \
373 { \
374 struct dd_context *dctx = dd_context(_pipe); \
375 struct pipe_context *pipe = dctx->pipe; \
376 \
377 dctx->draw_state.name = deref; \
378 pipe->set_##name(pipe, ref); \
379 }
380
381 DD_IMM_STATE(blend_color, const struct pipe_blend_color, *state, state)
382 DD_IMM_STATE(stencil_ref, const struct pipe_stencil_ref, *state, state)
383 DD_IMM_STATE(clip_state, const struct pipe_clip_state, *state, state)
384 DD_IMM_STATE(sample_mask, unsigned, sample_mask, sample_mask)
385 DD_IMM_STATE(min_samples, unsigned, min_samples, min_samples)
386 DD_IMM_STATE(framebuffer_state, const struct pipe_framebuffer_state, *state, state)
387 DD_IMM_STATE(polygon_stipple, const struct pipe_poly_stipple, *state, state)
388
389 static void
390 dd_context_set_constant_buffer(struct pipe_context *_pipe,
391 enum pipe_shader_type shader, uint index,
392 const struct pipe_constant_buffer *constant_buffer)
393 {
394 struct dd_context *dctx = dd_context(_pipe);
395 struct pipe_context *pipe = dctx->pipe;
396
397 safe_memcpy(&dctx->draw_state.constant_buffers[shader][index],
398 constant_buffer, sizeof(*constant_buffer));
399 pipe->set_constant_buffer(pipe, shader, index, constant_buffer);
400 }
401
402 static void
403 dd_context_set_scissor_states(struct pipe_context *_pipe,
404 unsigned start_slot, unsigned num_scissors,
405 const struct pipe_scissor_state *states)
406 {
407 struct dd_context *dctx = dd_context(_pipe);
408 struct pipe_context *pipe = dctx->pipe;
409
410 safe_memcpy(&dctx->draw_state.scissors[start_slot], states,
411 sizeof(*states) * num_scissors);
412 pipe->set_scissor_states(pipe, start_slot, num_scissors, states);
413 }
414
415 static void
416 dd_context_set_viewport_states(struct pipe_context *_pipe,
417 unsigned start_slot, unsigned num_viewports,
418 const struct pipe_viewport_state *states)
419 {
420 struct dd_context *dctx = dd_context(_pipe);
421 struct pipe_context *pipe = dctx->pipe;
422
423 safe_memcpy(&dctx->draw_state.viewports[start_slot], states,
424 sizeof(*states) * num_viewports);
425 pipe->set_viewport_states(pipe, start_slot, num_viewports, states);
426 }
427
428 static void dd_context_set_tess_state(struct pipe_context *_pipe,
429 const float default_outer_level[4],
430 const float default_inner_level[2])
431 {
432 struct dd_context *dctx = dd_context(_pipe);
433 struct pipe_context *pipe = dctx->pipe;
434
435 memcpy(dctx->draw_state.tess_default_levels, default_outer_level,
436 sizeof(float) * 4);
437 memcpy(dctx->draw_state.tess_default_levels+4, default_inner_level,
438 sizeof(float) * 2);
439 pipe->set_tess_state(pipe, default_outer_level, default_inner_level);
440 }
441
442
443 /********************************************************************
444 * views
445 */
446
447 static struct pipe_surface *
448 dd_context_create_surface(struct pipe_context *_pipe,
449 struct pipe_resource *resource,
450 const struct pipe_surface *surf_tmpl)
451 {
452 struct pipe_context *pipe = dd_context(_pipe)->pipe;
453 struct pipe_surface *view =
454 pipe->create_surface(pipe, resource, surf_tmpl);
455
456 if (!view)
457 return NULL;
458 view->context = _pipe;
459 return view;
460 }
461
462 static void
463 dd_context_surface_destroy(struct pipe_context *_pipe,
464 struct pipe_surface *surf)
465 {
466 struct pipe_context *pipe = dd_context(_pipe)->pipe;
467
468 pipe->surface_destroy(pipe, surf);
469 }
470
471 static struct pipe_sampler_view *
472 dd_context_create_sampler_view(struct pipe_context *_pipe,
473 struct pipe_resource *resource,
474 const struct pipe_sampler_view *templ)
475 {
476 struct pipe_context *pipe = dd_context(_pipe)->pipe;
477 struct pipe_sampler_view *view =
478 pipe->create_sampler_view(pipe, resource, templ);
479
480 if (!view)
481 return NULL;
482 view->context = _pipe;
483 return view;
484 }
485
486 static void
487 dd_context_sampler_view_destroy(struct pipe_context *_pipe,
488 struct pipe_sampler_view *view)
489 {
490 struct pipe_context *pipe = dd_context(_pipe)->pipe;
491
492 pipe->sampler_view_destroy(pipe, view);
493 }
494
495 static struct pipe_stream_output_target *
496 dd_context_create_stream_output_target(struct pipe_context *_pipe,
497 struct pipe_resource *res,
498 unsigned buffer_offset,
499 unsigned buffer_size)
500 {
501 struct pipe_context *pipe = dd_context(_pipe)->pipe;
502 struct pipe_stream_output_target *view =
503 pipe->create_stream_output_target(pipe, res, buffer_offset,
504 buffer_size);
505
506 if (!view)
507 return NULL;
508 view->context = _pipe;
509 return view;
510 }
511
512 static void
513 dd_context_stream_output_target_destroy(struct pipe_context *_pipe,
514 struct pipe_stream_output_target *target)
515 {
516 struct pipe_context *pipe = dd_context(_pipe)->pipe;
517
518 pipe->stream_output_target_destroy(pipe, target);
519 }
520
521
522 /********************************************************************
523 * set states
524 */
525
526 static void
527 dd_context_set_sampler_views(struct pipe_context *_pipe,
528 enum pipe_shader_type shader,
529 unsigned start, unsigned num,
530 struct pipe_sampler_view **views)
531 {
532 struct dd_context *dctx = dd_context(_pipe);
533 struct pipe_context *pipe = dctx->pipe;
534
535 safe_memcpy(&dctx->draw_state.sampler_views[shader][start], views,
536 sizeof(views[0]) * num);
537 pipe->set_sampler_views(pipe, shader, start, num, views);
538 }
539
540 static void
541 dd_context_set_shader_images(struct pipe_context *_pipe,
542 enum pipe_shader_type shader,
543 unsigned start, unsigned num,
544 const struct pipe_image_view *views)
545 {
546 struct dd_context *dctx = dd_context(_pipe);
547 struct pipe_context *pipe = dctx->pipe;
548
549 safe_memcpy(&dctx->draw_state.shader_images[shader][start], views,
550 sizeof(views[0]) * num);
551 pipe->set_shader_images(pipe, shader, start, num, views);
552 }
553
554 static void
555 dd_context_set_shader_buffers(struct pipe_context *_pipe, unsigned shader,
556 unsigned start, unsigned num_buffers,
557 const struct pipe_shader_buffer *buffers)
558 {
559 struct dd_context *dctx = dd_context(_pipe);
560 struct pipe_context *pipe = dctx->pipe;
561
562 safe_memcpy(&dctx->draw_state.shader_buffers[shader][start], buffers,
563 sizeof(buffers[0]) * num_buffers);
564 pipe->set_shader_buffers(pipe, shader, start, num_buffers, buffers);
565 }
566
567 static void
568 dd_context_set_vertex_buffers(struct pipe_context *_pipe,
569 unsigned start, unsigned num_buffers,
570 const struct pipe_vertex_buffer *buffers)
571 {
572 struct dd_context *dctx = dd_context(_pipe);
573 struct pipe_context *pipe = dctx->pipe;
574
575 safe_memcpy(&dctx->draw_state.vertex_buffers[start], buffers,
576 sizeof(buffers[0]) * num_buffers);
577 pipe->set_vertex_buffers(pipe, start, num_buffers, buffers);
578 }
579
580 static void
581 dd_context_set_stream_output_targets(struct pipe_context *_pipe,
582 unsigned num_targets,
583 struct pipe_stream_output_target **tgs,
584 const unsigned *offsets)
585 {
586 struct dd_context *dctx = dd_context(_pipe);
587 struct pipe_context *pipe = dctx->pipe;
588 struct dd_draw_state *dstate = &dctx->draw_state;
589
590 dstate->num_so_targets = num_targets;
591 safe_memcpy(dstate->so_targets, tgs, sizeof(*tgs) * num_targets);
592 safe_memcpy(dstate->so_offsets, offsets, sizeof(*offsets) * num_targets);
593 pipe->set_stream_output_targets(pipe, num_targets, tgs, offsets);
594 }
595
596 static void
597 dd_context_destroy(struct pipe_context *_pipe)
598 {
599 struct dd_context *dctx = dd_context(_pipe);
600 struct pipe_context *pipe = dctx->pipe;
601
602 if (dctx->thread) {
603 mtx_lock(&dctx->mutex);
604 dctx->kill_thread = 1;
605 mtx_unlock(&dctx->mutex);
606 thrd_join(dctx->thread, NULL);
607 mtx_destroy(&dctx->mutex);
608 assert(!dctx->records);
609 }
610
611 if (dctx->fence) {
612 pipe->transfer_unmap(pipe, dctx->fence_transfer);
613 pipe_resource_reference(&dctx->fence, NULL);
614 }
615 pipe->destroy(pipe);
616 FREE(dctx);
617 }
618
619
620 /********************************************************************
621 * transfer
622 */
623
624 static void *
625 dd_context_transfer_map(struct pipe_context *_pipe,
626 struct pipe_resource *resource, unsigned level,
627 unsigned usage, const struct pipe_box *box,
628 struct pipe_transfer **transfer)
629 {
630 struct pipe_context *pipe = dd_context(_pipe)->pipe;
631
632 return pipe->transfer_map(pipe, resource, level, usage, box, transfer);
633 }
634
635 static void
636 dd_context_transfer_flush_region(struct pipe_context *_pipe,
637 struct pipe_transfer *transfer,
638 const struct pipe_box *box)
639 {
640 struct pipe_context *pipe = dd_context(_pipe)->pipe;
641
642 pipe->transfer_flush_region(pipe, transfer, box);
643 }
644
645 static void
646 dd_context_transfer_unmap(struct pipe_context *_pipe,
647 struct pipe_transfer *transfer)
648 {
649 struct pipe_context *pipe = dd_context(_pipe)->pipe;
650
651 pipe->transfer_unmap(pipe, transfer);
652 }
653
654 static void
655 dd_context_buffer_subdata(struct pipe_context *_pipe,
656 struct pipe_resource *resource,
657 unsigned usage, unsigned offset,
658 unsigned size, const void *data)
659 {
660 struct pipe_context *pipe = dd_context(_pipe)->pipe;
661
662 pipe->buffer_subdata(pipe, resource, usage, offset, size, data);
663 }
664
665 static void
666 dd_context_texture_subdata(struct pipe_context *_pipe,
667 struct pipe_resource *resource,
668 unsigned level, unsigned usage,
669 const struct pipe_box *box,
670 const void *data, unsigned stride,
671 unsigned layer_stride)
672 {
673 struct pipe_context *pipe = dd_context(_pipe)->pipe;
674
675 pipe->texture_subdata(pipe, resource, level, usage, box, data,
676 stride, layer_stride);
677 }
678
679
680 /********************************************************************
681 * miscellaneous
682 */
683
684 static void
685 dd_context_texture_barrier(struct pipe_context *_pipe, unsigned flags)
686 {
687 struct pipe_context *pipe = dd_context(_pipe)->pipe;
688
689 pipe->texture_barrier(pipe, flags);
690 }
691
692 static void
693 dd_context_memory_barrier(struct pipe_context *_pipe, unsigned flags)
694 {
695 struct pipe_context *pipe = dd_context(_pipe)->pipe;
696
697 pipe->memory_barrier(pipe, flags);
698 }
699
700 static bool
701 dd_context_resource_commit(struct pipe_context *_pipe,
702 struct pipe_resource *resource,
703 unsigned level, struct pipe_box *box, bool commit)
704 {
705 struct pipe_context *pipe = dd_context(_pipe)->pipe;
706
707 return pipe->resource_commit(pipe, resource, level, box, commit);
708 }
709
710 static void
711 dd_context_get_sample_position(struct pipe_context *_pipe,
712 unsigned sample_count, unsigned sample_index,
713 float *out_value)
714 {
715 struct pipe_context *pipe = dd_context(_pipe)->pipe;
716
717 return pipe->get_sample_position(pipe, sample_count, sample_index,
718 out_value);
719 }
720
721 static void
722 dd_context_invalidate_resource(struct pipe_context *_pipe,
723 struct pipe_resource *resource)
724 {
725 struct pipe_context *pipe = dd_context(_pipe)->pipe;
726
727 pipe->invalidate_resource(pipe, resource);
728 }
729
730 static enum pipe_reset_status
731 dd_context_get_device_reset_status(struct pipe_context *_pipe)
732 {
733 struct pipe_context *pipe = dd_context(_pipe)->pipe;
734
735 return pipe->get_device_reset_status(pipe);
736 }
737
738 static void
739 dd_context_set_device_reset_callback(struct pipe_context *_pipe,
740 const struct pipe_device_reset_callback *cb)
741 {
742 struct pipe_context *pipe = dd_context(_pipe)->pipe;
743
744 return pipe->set_device_reset_callback(pipe, cb);
745 }
746
747 static void
748 dd_context_emit_string_marker(struct pipe_context *_pipe,
749 const char *string, int len)
750 {
751 struct dd_context *dctx = dd_context(_pipe);
752 struct pipe_context *pipe = dctx->pipe;
753
754 pipe->emit_string_marker(pipe, string, len);
755 dd_parse_apitrace_marker(string, len, &dctx->draw_state.apitrace_call_number);
756 }
757
758 static void
759 dd_context_dump_debug_state(struct pipe_context *_pipe, FILE *stream,
760 unsigned flags)
761 {
762 struct pipe_context *pipe = dd_context(_pipe)->pipe;
763
764 return pipe->dump_debug_state(pipe, stream, flags);
765 }
766
767 struct pipe_context *
768 dd_context_create(struct dd_screen *dscreen, struct pipe_context *pipe)
769 {
770 struct dd_context *dctx;
771
772 if (!pipe)
773 return NULL;
774
775 dctx = CALLOC_STRUCT(dd_context);
776 if (!dctx)
777 goto fail;
778
779 dctx->pipe = pipe;
780 dctx->base.priv = pipe->priv; /* expose wrapped priv data */
781 dctx->base.screen = &dscreen->base;
782 dctx->base.stream_uploader = pipe->stream_uploader;
783 dctx->base.const_uploader = pipe->const_uploader;
784
785 dctx->base.destroy = dd_context_destroy;
786
787 CTX_INIT(render_condition);
788 CTX_INIT(create_query);
789 CTX_INIT(create_batch_query);
790 CTX_INIT(destroy_query);
791 CTX_INIT(begin_query);
792 CTX_INIT(end_query);
793 CTX_INIT(get_query_result);
794 CTX_INIT(get_query_result_resource);
795 CTX_INIT(set_active_query_state);
796 CTX_INIT(create_blend_state);
797 CTX_INIT(bind_blend_state);
798 CTX_INIT(delete_blend_state);
799 CTX_INIT(create_sampler_state);
800 CTX_INIT(bind_sampler_states);
801 CTX_INIT(delete_sampler_state);
802 CTX_INIT(create_rasterizer_state);
803 CTX_INIT(bind_rasterizer_state);
804 CTX_INIT(delete_rasterizer_state);
805 CTX_INIT(create_depth_stencil_alpha_state);
806 CTX_INIT(bind_depth_stencil_alpha_state);
807 CTX_INIT(delete_depth_stencil_alpha_state);
808 CTX_INIT(create_fs_state);
809 CTX_INIT(bind_fs_state);
810 CTX_INIT(delete_fs_state);
811 CTX_INIT(create_vs_state);
812 CTX_INIT(bind_vs_state);
813 CTX_INIT(delete_vs_state);
814 CTX_INIT(create_gs_state);
815 CTX_INIT(bind_gs_state);
816 CTX_INIT(delete_gs_state);
817 CTX_INIT(create_tcs_state);
818 CTX_INIT(bind_tcs_state);
819 CTX_INIT(delete_tcs_state);
820 CTX_INIT(create_tes_state);
821 CTX_INIT(bind_tes_state);
822 CTX_INIT(delete_tes_state);
823 CTX_INIT(create_compute_state);
824 CTX_INIT(bind_compute_state);
825 CTX_INIT(delete_compute_state);
826 CTX_INIT(create_vertex_elements_state);
827 CTX_INIT(bind_vertex_elements_state);
828 CTX_INIT(delete_vertex_elements_state);
829 CTX_INIT(set_blend_color);
830 CTX_INIT(set_stencil_ref);
831 CTX_INIT(set_sample_mask);
832 CTX_INIT(set_min_samples);
833 CTX_INIT(set_clip_state);
834 CTX_INIT(set_constant_buffer);
835 CTX_INIT(set_framebuffer_state);
836 CTX_INIT(set_polygon_stipple);
837 CTX_INIT(set_scissor_states);
838 CTX_INIT(set_viewport_states);
839 CTX_INIT(set_sampler_views);
840 CTX_INIT(set_tess_state);
841 CTX_INIT(set_shader_buffers);
842 CTX_INIT(set_shader_images);
843 CTX_INIT(set_vertex_buffers);
844 CTX_INIT(create_stream_output_target);
845 CTX_INIT(stream_output_target_destroy);
846 CTX_INIT(set_stream_output_targets);
847 CTX_INIT(create_sampler_view);
848 CTX_INIT(sampler_view_destroy);
849 CTX_INIT(create_surface);
850 CTX_INIT(surface_destroy);
851 CTX_INIT(transfer_map);
852 CTX_INIT(transfer_flush_region);
853 CTX_INIT(transfer_unmap);
854 CTX_INIT(buffer_subdata);
855 CTX_INIT(texture_subdata);
856 CTX_INIT(texture_barrier);
857 CTX_INIT(memory_barrier);
858 CTX_INIT(resource_commit);
859 /* create_video_codec */
860 /* create_video_buffer */
861 /* set_compute_resources */
862 /* set_global_binding */
863 CTX_INIT(get_sample_position);
864 CTX_INIT(invalidate_resource);
865 CTX_INIT(get_device_reset_status);
866 CTX_INIT(set_device_reset_callback);
867 CTX_INIT(dump_debug_state);
868 CTX_INIT(emit_string_marker);
869
870 dd_init_draw_functions(dctx);
871
872 dctx->draw_state.sample_mask = ~0;
873
874 if (dscreen->mode == DD_DETECT_HANGS_PIPELINED) {
875 dctx->fence = pipe_buffer_create(dscreen->screen, PIPE_BIND_CUSTOM,
876 PIPE_USAGE_STAGING, 4);
877 if (!dctx->fence)
878 goto fail;
879
880 dctx->mapped_fence = pipe_buffer_map(pipe, dctx->fence,
881 PIPE_TRANSFER_READ_WRITE |
882 PIPE_TRANSFER_PERSISTENT |
883 PIPE_TRANSFER_COHERENT,
884 &dctx->fence_transfer);
885 if (!dctx->mapped_fence)
886 goto fail;
887
888 *dctx->mapped_fence = 0;
889
890 (void) mtx_init(&dctx->mutex, mtx_plain);
891 dctx->thread = u_thread_create(dd_thread_pipelined_hang_detect, dctx);
892 if (!dctx->thread) {
893 mtx_destroy(&dctx->mutex);
894 goto fail;
895 }
896 }
897
898 return &dctx->base;
899
900 fail:
901 if (dctx) {
902 if (dctx->mapped_fence)
903 pipe_transfer_unmap(pipe, dctx->fence_transfer);
904 pipe_resource_reference(&dctx->fence, NULL);
905 FREE(dctx);
906 }
907 pipe->destroy(pipe);
908 return NULL;
909 }