1 /**************************************************************************
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
31 #include "pipe/p_context.h"
32 #include "pipe/p_defines.h"
33 #include "pipe/p_screen.h"
42 * Reference counting helper functions.
47 pipe_reference_init(struct pipe_reference
*reference
, unsigned count
)
49 p_atomic_set(&reference
->count
, count
);
53 pipe_is_referenced(struct pipe_reference
*reference
)
55 return p_atomic_read(&reference
->count
) != 0;
59 * Update reference counting.
60 * The old thing pointed to, if any, will be unreferenced.
61 * Both 'ptr' and 'reference' may be NULL.
62 * \return TRUE if the object's refcount hits zero and should be destroyed.
65 pipe_reference(struct pipe_reference
*ptr
, struct pipe_reference
*reference
)
67 boolean destroy
= FALSE
;
69 if(ptr
!= reference
) {
70 /* bump the reference.count first */
72 assert(pipe_is_referenced(reference
));
73 p_atomic_inc(&reference
->count
);
77 assert(pipe_is_referenced(ptr
));
78 if (p_atomic_dec_zero(&ptr
->count
)) {
88 pipe_buffer_reference(struct pipe_buffer
**ptr
, struct pipe_buffer
*buf
)
90 struct pipe_buffer
*old_buf
= *ptr
;
92 if (pipe_reference(&(*ptr
)->reference
, &buf
->reference
))
93 old_buf
->screen
->buffer_destroy(old_buf
);
98 pipe_surface_reference(struct pipe_surface
**ptr
, struct pipe_surface
*surf
)
100 struct pipe_surface
*old_surf
= *ptr
;
102 if (pipe_reference(&(*ptr
)->reference
, &surf
->reference
))
103 old_surf
->texture
->screen
->tex_surface_destroy(old_surf
);
108 pipe_texture_reference(struct pipe_texture
**ptr
, struct pipe_texture
*tex
)
110 struct pipe_texture
*old_tex
= *ptr
;
112 if (pipe_reference(&(*ptr
)->reference
, &tex
->reference
))
113 old_tex
->screen
->texture_destroy(old_tex
);
119 * Convenience wrappers for screen buffer functions.
122 static INLINE
struct pipe_buffer
*
123 pipe_buffer_create( struct pipe_screen
*screen
,
124 unsigned alignment
, unsigned usage
, unsigned size
)
126 return screen
->buffer_create(screen
, alignment
, usage
, size
);
129 static INLINE
struct pipe_buffer
*
130 pipe_user_buffer_create( struct pipe_screen
*screen
, void *ptr
, unsigned size
)
132 return screen
->user_buffer_create(screen
, ptr
, size
);
136 pipe_buffer_map(struct pipe_screen
*screen
,
137 struct pipe_buffer
*buf
,
140 if(screen
->buffer_map_range
) {
142 unsigned length
= buf
->size
;
143 return screen
->buffer_map_range(screen
, buf
, offset
, length
, usage
);
146 return screen
->buffer_map(screen
, buf
, usage
);
150 pipe_buffer_unmap(struct pipe_screen
*screen
,
151 struct pipe_buffer
*buf
)
153 screen
->buffer_unmap(screen
, buf
);
157 pipe_buffer_map_range(struct pipe_screen
*screen
,
158 struct pipe_buffer
*buf
,
163 assert(offset
< buf
->size
);
164 assert(offset
+ length
<= buf
->size
);
166 if(screen
->buffer_map_range
)
167 return screen
->buffer_map_range(screen
, buf
, offset
, length
, usage
);
169 return screen
->buffer_map(screen
, buf
, usage
);
173 pipe_buffer_flush_mapped_range(struct pipe_screen
*screen
,
174 struct pipe_buffer
*buf
,
178 assert(offset
< buf
->size
);
179 assert(offset
+ length
<= buf
->size
);
181 if(screen
->buffer_flush_mapped_range
)
182 screen
->buffer_flush_mapped_range(screen
, buf
, offset
, length
);
186 pipe_buffer_write(struct pipe_screen
*screen
,
187 struct pipe_buffer
*buf
,
188 unsigned offset
, unsigned size
,
193 assert(offset
< buf
->size
);
194 assert(offset
+ size
<= buf
->size
);
197 map
= pipe_buffer_map_range(screen
, buf
, offset
, size
,
198 PIPE_BUFFER_USAGE_CPU_WRITE
|
199 PIPE_BUFFER_USAGE_FLUSH_EXPLICIT
|
200 PIPE_BUFFER_USAGE_DISCARD
);
203 memcpy((uint8_t *)map
+ offset
, data
, size
);
204 pipe_buffer_flush_mapped_range(screen
, buf
, offset
, size
);
205 pipe_buffer_unmap(screen
, buf
);
210 * Special case for writing non-overlapping ranges.
212 * We can avoid GPU/CPU synchronization when writing range that has never
213 * been written before.
216 pipe_buffer_write_nooverlap(struct pipe_screen
*screen
,
217 struct pipe_buffer
*buf
,
218 unsigned offset
, unsigned size
,
223 assert(offset
< buf
->size
);
224 assert(offset
+ size
<= buf
->size
);
227 map
= pipe_buffer_map_range(screen
, buf
, offset
, size
,
228 PIPE_BUFFER_USAGE_CPU_WRITE
|
229 PIPE_BUFFER_USAGE_FLUSH_EXPLICIT
|
230 PIPE_BUFFER_USAGE_DISCARD
|
231 PIPE_BUFFER_USAGE_UNSYNCHRONIZED
);
234 memcpy((uint8_t *)map
+ offset
, data
, size
);
235 pipe_buffer_flush_mapped_range(screen
, buf
, offset
, size
);
236 pipe_buffer_unmap(screen
, buf
);
241 pipe_buffer_read(struct pipe_screen
*screen
,
242 struct pipe_buffer
*buf
,
243 unsigned offset
, unsigned size
,
248 assert(offset
< buf
->size
);
249 assert(offset
+ size
<= buf
->size
);
252 map
= pipe_buffer_map_range(screen
, buf
, offset
, size
, PIPE_BUFFER_USAGE_CPU_READ
);
255 memcpy(data
, (const uint8_t *)map
+ offset
, size
);
256 pipe_buffer_unmap(screen
, buf
);
261 pipe_transfer_map( struct pipe_transfer
*transf
)
263 struct pipe_screen
*screen
= transf
->texture
->screen
;
264 return screen
->transfer_map(screen
, transf
);
268 pipe_transfer_unmap( struct pipe_transfer
*transf
)
270 struct pipe_screen
*screen
= transf
->texture
->screen
;
271 screen
->transfer_unmap(screen
, transf
);
275 pipe_transfer_destroy( struct pipe_transfer
*transf
)
277 struct pipe_screen
*screen
= transf
->texture
->screen
;
278 screen
->tex_transfer_destroy(transf
);
281 static INLINE
unsigned
282 pipe_transfer_buffer_flags( struct pipe_transfer
*transf
)
284 switch (transf
->usage
& PIPE_TRANSFER_READ_WRITE
) {
285 case PIPE_TRANSFER_READ_WRITE
:
286 return PIPE_BUFFER_USAGE_CPU_READ
| PIPE_BUFFER_USAGE_CPU_WRITE
;
287 case PIPE_TRANSFER_READ
:
288 return PIPE_BUFFER_USAGE_CPU_READ
;
289 case PIPE_TRANSFER_WRITE
:
290 return PIPE_BUFFER_USAGE_CPU_WRITE
;
301 #endif /* U_INLINES_H */