1 /**************************************************************************
3 * Copyright 2007 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 #include "pipe/p_context.h"
32 #include "pipe/p_defines.h"
33 #include "pipe/p_shader_tokens.h"
34 #include "pipe/p_state.h"
35 #include "pipe/p_screen.h"
36 #include "util/u_debug.h"
37 #include "util/u_debug_describe.h"
38 #include "util/u_debug_refcnt.h"
39 #include "util/u_atomic.h"
40 #include "util/u_box.h"
41 #include "util/u_math.h"
50 * Reference counting helper functions.
55 pipe_reference_init(struct pipe_reference
*dst
, unsigned count
)
57 p_atomic_set(&dst
->count
, count
);
61 pipe_is_referenced(struct pipe_reference
*src
)
63 return p_atomic_read(&src
->count
) != 0;
67 * Update reference counting.
68 * The old thing pointed to, if any, will be unreferenced.
69 * Both 'dst' and 'src' may be NULL.
70 * \return TRUE if the object's refcount hits zero and should be destroyed.
73 pipe_reference_described(struct pipe_reference
*dst
,
74 struct pipe_reference
*src
,
75 debug_reference_descriptor get_desc
)
78 /* bump the src.count first */
80 ASSERTED
int count
= p_atomic_inc_return(&src
->count
);
81 assert(count
!= 1); /* src had to be referenced */
82 debug_reference(src
, get_desc
, 1);
86 int count
= p_atomic_dec_return(&dst
->count
);
87 assert(count
!= -1); /* dst had to be referenced */
88 debug_reference(dst
, get_desc
, -1);
98 pipe_reference(struct pipe_reference
*dst
, struct pipe_reference
*src
)
100 return pipe_reference_described(dst
, src
,
101 (debug_reference_descriptor
)
102 debug_describe_reference
);
106 pipe_surface_reference(struct pipe_surface
**dst
, struct pipe_surface
*src
)
108 struct pipe_surface
*old_dst
= *dst
;
110 if (pipe_reference_described(old_dst
? &old_dst
->reference
: NULL
,
111 src
? &src
->reference
: NULL
,
112 (debug_reference_descriptor
)
113 debug_describe_surface
))
114 old_dst
->context
->surface_destroy(old_dst
->context
, old_dst
);
119 * Similar to pipe_surface_reference() but always set the pointer to NULL
120 * and pass in an explicit context. The explicit context avoids the problem
121 * of using a deleted context's surface_destroy() method when freeing a surface
122 * that's shared by multiple contexts.
125 pipe_surface_release(struct pipe_context
*pipe
, struct pipe_surface
**ptr
)
127 struct pipe_surface
*old
= *ptr
;
129 if (pipe_reference_described(&old
->reference
, NULL
,
130 (debug_reference_descriptor
)
131 debug_describe_surface
))
132 pipe
->surface_destroy(pipe
, old
);
138 pipe_resource_reference(struct pipe_resource
**dst
, struct pipe_resource
*src
)
140 struct pipe_resource
*old_dst
= *dst
;
142 if (pipe_reference_described(old_dst
? &old_dst
->reference
: NULL
,
143 src
? &src
->reference
: NULL
,
144 (debug_reference_descriptor
)
145 debug_describe_resource
)) {
146 /* Avoid recursion, which would prevent inlining this function */
148 struct pipe_resource
*next
= old_dst
->next
;
150 old_dst
->screen
->resource_destroy(old_dst
->screen
, old_dst
);
152 } while (pipe_reference_described(old_dst
? &old_dst
->reference
: NULL
,
154 (debug_reference_descriptor
)
155 debug_describe_resource
));
161 * Same as pipe_surface_release, but used when pipe_context doesn't exist
165 pipe_surface_release_no_context(struct pipe_surface
**ptr
)
167 struct pipe_surface
*surf
= *ptr
;
169 if (pipe_reference_described(&surf
->reference
, NULL
,
170 (debug_reference_descriptor
)
171 debug_describe_surface
)) {
172 /* trivially destroy pipe_surface */
173 pipe_resource_reference(&surf
->texture
, NULL
);
180 * Set *dst to \p src with proper reference counting.
182 * The caller must guarantee that \p src and *dst were created in
183 * the same context (if they exist), and that this must be the current context.
186 pipe_sampler_view_reference(struct pipe_sampler_view
**dst
,
187 struct pipe_sampler_view
*src
)
189 struct pipe_sampler_view
*old_dst
= *dst
;
191 if (pipe_reference_described(old_dst
? &old_dst
->reference
: NULL
,
192 src
? &src
->reference
: NULL
,
193 (debug_reference_descriptor
)
194 debug_describe_sampler_view
))
195 old_dst
->context
->sampler_view_destroy(old_dst
->context
, old_dst
);
200 pipe_so_target_reference(struct pipe_stream_output_target
**dst
,
201 struct pipe_stream_output_target
*src
)
203 struct pipe_stream_output_target
*old_dst
= *dst
;
205 if (pipe_reference_described(old_dst
? &old_dst
->reference
: NULL
,
206 src
? &src
->reference
: NULL
,
207 (debug_reference_descriptor
)debug_describe_so_target
))
208 old_dst
->context
->stream_output_target_destroy(old_dst
->context
, old_dst
);
213 pipe_vertex_buffer_unreference(struct pipe_vertex_buffer
*dst
)
215 if (dst
->is_user_buffer
)
216 dst
->buffer
.user
= NULL
;
218 pipe_resource_reference(&dst
->buffer
.resource
, NULL
);
222 pipe_vertex_buffer_reference(struct pipe_vertex_buffer
*dst
,
223 const struct pipe_vertex_buffer
*src
)
225 pipe_vertex_buffer_unreference(dst
);
226 if (!src
->is_user_buffer
)
227 pipe_resource_reference(&dst
->buffer
.resource
, src
->buffer
.resource
);
228 memcpy(dst
, src
, sizeof(*src
));
232 pipe_surface_reset(struct pipe_context
*ctx
, struct pipe_surface
* ps
,
233 struct pipe_resource
*pt
, unsigned level
, unsigned layer
)
235 pipe_resource_reference(&ps
->texture
, pt
);
236 ps
->format
= pt
->format
;
237 ps
->width
= u_minify(pt
->width0
, level
);
238 ps
->height
= u_minify(pt
->height0
, level
);
239 ps
->u
.tex
.level
= level
;
240 ps
->u
.tex
.first_layer
= ps
->u
.tex
.last_layer
= layer
;
245 pipe_surface_init(struct pipe_context
*ctx
, struct pipe_surface
* ps
,
246 struct pipe_resource
*pt
, unsigned level
, unsigned layer
)
249 pipe_reference_init(&ps
->reference
, 1);
250 pipe_surface_reset(ctx
, ps
, pt
, level
, layer
);
253 /* Return true if the surfaces are equal. */
254 static inline boolean
255 pipe_surface_equal(struct pipe_surface
*s1
, struct pipe_surface
*s2
)
257 return s1
->texture
== s2
->texture
&&
258 s1
->format
== s2
->format
&&
259 (s1
->texture
->target
!= PIPE_BUFFER
||
260 (s1
->u
.buf
.first_element
== s2
->u
.buf
.first_element
&&
261 s1
->u
.buf
.last_element
== s2
->u
.buf
.last_element
)) &&
262 (s1
->texture
->target
== PIPE_BUFFER
||
263 (s1
->u
.tex
.level
== s2
->u
.tex
.level
&&
264 s1
->u
.tex
.first_layer
== s2
->u
.tex
.first_layer
&&
265 s1
->u
.tex
.last_layer
== s2
->u
.tex
.last_layer
));
269 * Convenience wrappers for screen buffer functions.
274 * Create a new resource.
275 * \param bind bitmask of PIPE_BIND_x flags
276 * \param usage a PIPE_USAGE_x value
278 static inline struct pipe_resource
*
279 pipe_buffer_create(struct pipe_screen
*screen
,
281 enum pipe_resource_usage usage
,
284 struct pipe_resource buffer
;
285 memset(&buffer
, 0, sizeof buffer
);
286 buffer
.target
= PIPE_BUFFER
;
287 buffer
.format
= PIPE_FORMAT_R8_UNORM
; /* want TYPELESS or similar */
289 buffer
.usage
= usage
;
291 buffer
.width0
= size
;
294 buffer
.array_size
= 1;
295 return screen
->resource_create(screen
, &buffer
);
299 static inline struct pipe_resource
*
300 pipe_buffer_create_const0(struct pipe_screen
*screen
,
302 enum pipe_resource_usage usage
,
305 struct pipe_resource buffer
;
306 memset(&buffer
, 0, sizeof buffer
);
307 buffer
.target
= PIPE_BUFFER
;
308 buffer
.format
= PIPE_FORMAT_R8_UNORM
;
310 buffer
.usage
= usage
;
311 buffer
.flags
= screen
->get_param(screen
, PIPE_CAP_CONSTBUF0_FLAGS
);
312 buffer
.width0
= size
;
315 buffer
.array_size
= 1;
316 return screen
->resource_create(screen
, &buffer
);
321 * Map a range of a resource.
322 * \param offset start of region, in bytes
323 * \param length size of region, in bytes
324 * \param access bitmask of PIPE_TRANSFER_x flags
325 * \param transfer returns a transfer object
328 pipe_buffer_map_range(struct pipe_context
*pipe
,
329 struct pipe_resource
*buffer
,
333 struct pipe_transfer
**transfer
)
338 assert(offset
< buffer
->width0
);
339 assert(offset
+ length
<= buffer
->width0
);
342 u_box_1d(offset
, length
, &box
);
344 map
= pipe
->transfer_map(pipe
, buffer
, 0, access
, &box
, transfer
);
354 * Map whole resource.
355 * \param access bitmask of PIPE_TRANSFER_x flags
356 * \param transfer returns a transfer object
359 pipe_buffer_map(struct pipe_context
*pipe
,
360 struct pipe_resource
*buffer
,
362 struct pipe_transfer
**transfer
)
364 return pipe_buffer_map_range(pipe
, buffer
, 0, buffer
->width0
,
370 pipe_buffer_unmap(struct pipe_context
*pipe
,
371 struct pipe_transfer
*transfer
)
373 pipe
->transfer_unmap(pipe
, transfer
);
377 pipe_buffer_flush_mapped_range(struct pipe_context
*pipe
,
378 struct pipe_transfer
*transfer
,
386 assert(transfer
->box
.x
<= (int) offset
);
387 assert((int) (offset
+ length
) <= transfer
->box
.x
+ transfer
->box
.width
);
389 /* Match old screen->buffer_flush_mapped_range() behaviour, where
390 * offset parameter is relative to the start of the buffer, not the
393 transfer_offset
= offset
- transfer
->box
.x
;
395 u_box_1d(transfer_offset
, length
, &box
);
397 pipe
->transfer_flush_region(pipe
, transfer
, &box
);
401 pipe_buffer_write(struct pipe_context
*pipe
,
402 struct pipe_resource
*buf
,
407 /* Don't set any other usage bits. Drivers should derive them. */
408 pipe
->buffer_subdata(pipe
, buf
, PIPE_TRANSFER_WRITE
, offset
, size
, data
);
412 * Special case for writing non-overlapping ranges.
414 * We can avoid GPU/CPU synchronization when writing range that has never
415 * been written before.
418 pipe_buffer_write_nooverlap(struct pipe_context
*pipe
,
419 struct pipe_resource
*buf
,
420 unsigned offset
, unsigned size
,
423 pipe
->buffer_subdata(pipe
, buf
,
424 (PIPE_TRANSFER_WRITE
|
425 PIPE_TRANSFER_UNSYNCHRONIZED
),
431 * Create a new resource and immediately put data into it
432 * \param bind bitmask of PIPE_BIND_x flags
433 * \param usage bitmask of PIPE_USAGE_x flags
435 static inline struct pipe_resource
*
436 pipe_buffer_create_with_data(struct pipe_context
*pipe
,
438 enum pipe_resource_usage usage
,
442 struct pipe_resource
*res
= pipe_buffer_create(pipe
->screen
,
444 pipe_buffer_write_nooverlap(pipe
, res
, 0, size
, ptr
);
449 pipe_buffer_read(struct pipe_context
*pipe
,
450 struct pipe_resource
*buf
,
455 struct pipe_transfer
*src_transfer
;
458 map
= (ubyte
*) pipe_buffer_map_range(pipe
,
466 memcpy(data
, map
, size
);
467 pipe_buffer_unmap(pipe
, src_transfer
);
472 * Map a resource for reading/writing.
473 * \param access bitmask of PIPE_TRANSFER_x flags
476 pipe_transfer_map(struct pipe_context
*context
,
477 struct pipe_resource
*resource
,
478 unsigned level
, unsigned layer
,
480 unsigned x
, unsigned y
,
481 unsigned w
, unsigned h
,
482 struct pipe_transfer
**transfer
)
485 u_box_2d_zslice(x
, y
, layer
, w
, h
, &box
);
486 return context
->transfer_map(context
,
495 * Map a 3D (texture) resource for reading/writing.
496 * \param access bitmask of PIPE_TRANSFER_x flags
499 pipe_transfer_map_3d(struct pipe_context
*context
,
500 struct pipe_resource
*resource
,
503 unsigned x
, unsigned y
, unsigned z
,
504 unsigned w
, unsigned h
, unsigned d
,
505 struct pipe_transfer
**transfer
)
508 u_box_3d(x
, y
, z
, w
, h
, d
, &box
);
509 return context
->transfer_map(context
,
517 pipe_transfer_unmap(struct pipe_context
*context
,
518 struct pipe_transfer
*transfer
)
520 context
->transfer_unmap(context
, transfer
);
524 pipe_set_constant_buffer(struct pipe_context
*pipe
,
525 enum pipe_shader_type shader
, uint index
,
526 struct pipe_resource
*buf
)
529 struct pipe_constant_buffer cb
;
531 cb
.buffer_offset
= 0;
532 cb
.buffer_size
= buf
->width0
;
533 cb
.user_buffer
= NULL
;
534 pipe
->set_constant_buffer(pipe
, shader
, index
, &cb
);
536 pipe
->set_constant_buffer(pipe
, shader
, index
, NULL
);
542 * Get the polygon offset enable/disable flag for the given polygon fill mode.
543 * \param fill_mode one of PIPE_POLYGON_MODE_POINT/LINE/FILL
545 static inline boolean
546 util_get_offset(const struct pipe_rasterizer_state
*templ
,
550 case PIPE_POLYGON_MODE_POINT
:
551 return templ
->offset_point
;
552 case PIPE_POLYGON_MODE_LINE
:
553 return templ
->offset_line
;
554 case PIPE_POLYGON_MODE_FILL
:
555 return templ
->offset_tri
;
563 util_get_min_point_size(const struct pipe_rasterizer_state
*state
)
565 /* The point size should be clamped to this value at the rasterizer stage.
567 return !state
->point_quad_rasterization
&&
568 !state
->point_smooth
&&
569 !state
->multisample
? 1.0f
: 0.0f
;
573 util_query_clear_result(union pipe_query_result
*result
, unsigned type
)
576 case PIPE_QUERY_OCCLUSION_PREDICATE
:
577 case PIPE_QUERY_OCCLUSION_PREDICATE_CONSERVATIVE
:
578 case PIPE_QUERY_SO_OVERFLOW_PREDICATE
:
579 case PIPE_QUERY_SO_OVERFLOW_ANY_PREDICATE
:
580 case PIPE_QUERY_GPU_FINISHED
:
583 case PIPE_QUERY_OCCLUSION_COUNTER
:
584 case PIPE_QUERY_TIMESTAMP
:
585 case PIPE_QUERY_TIME_ELAPSED
:
586 case PIPE_QUERY_PRIMITIVES_GENERATED
:
587 case PIPE_QUERY_PRIMITIVES_EMITTED
:
590 case PIPE_QUERY_SO_STATISTICS
:
591 memset(&result
->so_statistics
, 0, sizeof(result
->so_statistics
));
593 case PIPE_QUERY_TIMESTAMP_DISJOINT
:
594 memset(&result
->timestamp_disjoint
, 0, sizeof(result
->timestamp_disjoint
));
596 case PIPE_QUERY_PIPELINE_STATISTICS
:
597 memset(&result
->pipeline_statistics
, 0, sizeof(result
->pipeline_statistics
));
600 memset(result
, 0, sizeof(*result
));
604 /** Convert PIPE_TEXTURE_x to TGSI_TEXTURE_x */
605 static inline enum tgsi_texture_type
606 util_pipe_tex_to_tgsi_tex(enum pipe_texture_target pipe_tex_target
,
609 switch (pipe_tex_target
) {
611 return TGSI_TEXTURE_BUFFER
;
613 case PIPE_TEXTURE_1D
:
614 assert(nr_samples
<= 1);
615 return TGSI_TEXTURE_1D
;
617 case PIPE_TEXTURE_2D
:
618 return nr_samples
> 1 ? TGSI_TEXTURE_2D_MSAA
: TGSI_TEXTURE_2D
;
620 case PIPE_TEXTURE_RECT
:
621 assert(nr_samples
<= 1);
622 return TGSI_TEXTURE_RECT
;
624 case PIPE_TEXTURE_3D
:
625 assert(nr_samples
<= 1);
626 return TGSI_TEXTURE_3D
;
628 case PIPE_TEXTURE_CUBE
:
629 assert(nr_samples
<= 1);
630 return TGSI_TEXTURE_CUBE
;
632 case PIPE_TEXTURE_1D_ARRAY
:
633 assert(nr_samples
<= 1);
634 return TGSI_TEXTURE_1D_ARRAY
;
636 case PIPE_TEXTURE_2D_ARRAY
:
637 return nr_samples
> 1 ? TGSI_TEXTURE_2D_ARRAY_MSAA
:
638 TGSI_TEXTURE_2D_ARRAY
;
640 case PIPE_TEXTURE_CUBE_ARRAY
:
641 return TGSI_TEXTURE_CUBE_ARRAY
;
644 assert(0 && "unexpected texture target");
645 return TGSI_TEXTURE_UNKNOWN
;
651 util_copy_constant_buffer(struct pipe_constant_buffer
*dst
,
652 const struct pipe_constant_buffer
*src
)
655 pipe_resource_reference(&dst
->buffer
, src
->buffer
);
656 dst
->buffer_offset
= src
->buffer_offset
;
657 dst
->buffer_size
= src
->buffer_size
;
658 dst
->user_buffer
= src
->user_buffer
;
661 pipe_resource_reference(&dst
->buffer
, NULL
);
662 dst
->buffer_offset
= 0;
663 dst
->buffer_size
= 0;
664 dst
->user_buffer
= NULL
;
669 util_copy_shader_buffer(struct pipe_shader_buffer
*dst
,
670 const struct pipe_shader_buffer
*src
)
673 pipe_resource_reference(&dst
->buffer
, src
->buffer
);
674 dst
->buffer_offset
= src
->buffer_offset
;
675 dst
->buffer_size
= src
->buffer_size
;
678 pipe_resource_reference(&dst
->buffer
, NULL
);
679 dst
->buffer_offset
= 0;
680 dst
->buffer_size
= 0;
685 util_copy_image_view(struct pipe_image_view
*dst
,
686 const struct pipe_image_view
*src
)
689 pipe_resource_reference(&dst
->resource
, src
->resource
);
690 dst
->format
= src
->format
;
691 dst
->access
= src
->access
;
692 dst
->shader_access
= src
->shader_access
;
695 pipe_resource_reference(&dst
->resource
, NULL
);
696 dst
->format
= PIPE_FORMAT_NONE
;
698 dst
->shader_access
= 0;
699 memset(&dst
->u
, 0, sizeof(dst
->u
));
703 static inline unsigned
704 util_max_layer(const struct pipe_resource
*r
, unsigned level
)
707 case PIPE_TEXTURE_3D
:
708 return u_minify(r
->depth0
, level
) - 1;
709 case PIPE_TEXTURE_CUBE
:
710 assert(r
->array_size
== 6);
712 case PIPE_TEXTURE_1D_ARRAY
:
713 case PIPE_TEXTURE_2D_ARRAY
:
714 case PIPE_TEXTURE_CUBE_ARRAY
:
715 return r
->array_size
- 1;
721 static inline unsigned
722 util_num_layers(const struct pipe_resource
*r
, unsigned level
)
724 return util_max_layer(r
, level
) + 1;
728 util_texrange_covers_whole_level(const struct pipe_resource
*tex
,
729 unsigned level
, unsigned x
, unsigned y
,
730 unsigned z
, unsigned width
,
731 unsigned height
, unsigned depth
)
733 return x
== 0 && y
== 0 && z
== 0 &&
734 width
== u_minify(tex
->width0
, level
) &&
735 height
== u_minify(tex
->height0
, level
) &&
736 depth
== util_num_layers(tex
, level
);
740 util_logicop_reads_dest(enum pipe_logicop op
)
743 case PIPE_LOGICOP_NOR
:
744 case PIPE_LOGICOP_AND_INVERTED
:
745 case PIPE_LOGICOP_AND_REVERSE
:
746 case PIPE_LOGICOP_INVERT
:
747 case PIPE_LOGICOP_XOR
:
748 case PIPE_LOGICOP_NAND
:
749 case PIPE_LOGICOP_AND
:
750 case PIPE_LOGICOP_EQUIV
:
751 case PIPE_LOGICOP_NOOP
:
752 case PIPE_LOGICOP_OR_INVERTED
:
753 case PIPE_LOGICOP_OR_REVERSE
:
754 case PIPE_LOGICOP_OR
:
756 case PIPE_LOGICOP_CLEAR
:
757 case PIPE_LOGICOP_COPY_INVERTED
:
758 case PIPE_LOGICOP_COPY
:
759 case PIPE_LOGICOP_SET
:
762 unreachable("bad logicop");
765 static inline struct pipe_context
*
766 pipe_create_multimedia_context(struct pipe_screen
*screen
)
770 if (!screen
->get_param(screen
, PIPE_CAP_GRAPHICS
))
771 flags
|= PIPE_CONTEXT_COMPUTE_ONLY
;
773 return screen
->context_create(screen
, NULL
, flags
);
776 static inline unsigned util_res_sample_count(struct pipe_resource
*res
)
778 return res
->nr_samples
> 0 ? res
->nr_samples
: 1;
785 #endif /* U_INLINES_H */