assert(buf);
if(!buf)
return NULL;
- assert(buf->base.reference.count > 0);
+ assert(p_atomic_read(&buf->base.reference.count) > 0);
return buf->vtbl->map(buf, flags);
}
assert(buf);
if(!buf)
return;
- assert(buf->base.reference.count > 0);
+ assert(p_atomic_read(&buf->base.reference.count) > 0);
buf->vtbl->unmap(buf);
}
offset = 0;
return;
}
- assert(buf->base.reference.count > 0);
+ assert(p_atomic_read(&buf->base.reference.count) > 0);
assert(buf->vtbl->get_base_buffer);
buf->vtbl->get_base_buffer(buf, base_buf, offset);
assert(*base_buf);
assert(buf);
if(!buf)
return;
- assert(buf->base.reference.count == 0);
+ assert(p_atomic_read(&buf->base.reference.count) == 0);
buf->vtbl->destroy(buf);
}
{
struct fenced_buffer_list *fenced_list = fenced_buf->list;
- assert(fenced_buf->base.base.reference.count);
+ assert(p_atomic_read(&fenced_buf->base.base.reference.count));
assert(fenced_buf->flags & PIPE_BUFFER_USAGE_GPU_READ_WRITE);
assert(fenced_buf->fence);
{
struct fenced_buffer_list *fenced_list = fenced_buf->list;
- assert(!fenced_buf->base.base.reference.count);
+ assert(p_atomic_read(&fenced_buf->base.base.reference.count) == 0);
assert(!fenced_buf->fence);
#ifdef DEBUG
assert(fenced_buf->head.prev);
++fenced_list->numUnfenced;
#endif
- if(!fenced_buf->base.base.reference.count)
+ /**
+ * FIXME!!!
+ */
+
+ if(!p_atomic_read(&fenced_buf->base.base.reference.count))
_fenced_buffer_destroy(fenced_buf);
}
struct fenced_buffer_list *fenced_list = fenced_buf->list;
pipe_mutex_lock(fenced_list->mutex);
- assert(fenced_buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&fenced_buf->base.base.reference.count) == 0);
if (fenced_buf->fence) {
struct pb_fence_ops *ops = fenced_list->ops;
if(ops->fence_signalled(ops, fenced_buf->fence, 0) == 0) {
LIST_DEL(&buf->head);
assert(mgr->numDelayed);
--mgr->numDelayed;
- assert(!buf->base.base.reference.count);
+ assert(p_atomic_read(&buf->base.base.reference.count) == 0);
pb_reference(&buf->buffer, NULL);
FREE(buf);
}
struct pb_cache_manager *mgr = buf->mgr;
pipe_mutex_lock(mgr->mutex);
- assert(buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&buf->base.base.reference.count) == 0);
_pb_cache_buffer_list_check_free(mgr);
return NULL;
}
- assert(buf->buffer->base.reference.count >= 1);
+ assert(p_atomic_read(&buf->buffer->base.reference.count) >= 1);
assert(pb_check_alignment(desc->alignment, buf->buffer->base.alignment));
assert(pb_check_usage(desc->usage, buf->buffer->base.usage));
assert(buf->buffer->base.size >= size);
{
struct pb_debug_buffer *buf = pb_debug_buffer(_buf);
- assert(!buf->base.base.reference.count);
+ assert(p_atomic_read(&buf->base.base.reference.count) == 0);
pb_debug_buffer_check(buf);
return NULL;
}
- assert(buf->buffer->base.reference.count >= 1);
+ assert(p_atomic_read(&buf->buffer->base.reference.count) >= 1);
assert(pb_check_alignment(real_desc.alignment, buf->buffer->base.alignment));
assert(pb_check_usage(real_desc.usage, buf->buffer->base.usage));
assert(buf->buffer->base.size >= real_size);
struct mm_buffer *mm_buf = mm_buffer(buf);
struct mm_pb_manager *mm = mm_buf->mgr;
- assert(mm_buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&mm_buf->base.base.reference.count) == 0);
pipe_mutex_lock(mm->mutex);
u_mmFreeMem(mm_buf->block);
struct pool_buffer *pool_buf = pool_buffer(buf);
struct pool_pb_manager *pool = pool_buf->mgr;
- assert(pool_buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&pool_buf->base.base.reference.count) == 0);
pipe_mutex_lock(pool->mutex);
LIST_ADD(&pool_buf->head, &pool->free);
pipe_mutex_unlock(pool->mutex);
pool_buf = LIST_ENTRY(struct pool_buffer, item, head);
- assert(pool_buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&pool_buf->base.base.reference.count) == 0);
pipe_reference_init(&pool_buf->base.base.reference, 1);
pool_buf->base.base.alignment = desc->alignment;
pool_buf->base.base.usage = desc->usage;
pipe_mutex_lock(mgr->mutex);
- assert(buf->base.base.reference.count == 0);
+ assert(p_atomic_read(&buf->base.base.reference.count) == 0);
buf->mapCount = 0;
goto fail;
}
- assert(spt->base.reference.count == 1);
+ assert(atomic_read(&spt->base.reference.count) == 1);
return &spt->base;
fail:
static void trace_dump_reference(const struct pipe_reference *reference)
{
trace_dump_struct_begin("pipe_reference");
- trace_dump_member(uint, reference, count);
+ trace_dump_member(int, &reference->count, count);
trace_dump_struct_end();
}
#include "p_defines.h"
+#include "p_atomic.h"
#ifdef __cplusplus
struct pipe_reference
{
- unsigned count;
+ struct pipe_atomic count;
};
static INLINE void
pipe_reference_init(struct pipe_reference *reference, unsigned count)
{
- reference->count = count;
+ p_atomic_set(&reference->count, count);
}
/* bump the reference.count first */
if (reference) {
- assert(reference->count);
- reference->count++;
+ assert(p_atomic_read(&reference->count) != 0);
+ p_atomic_inc(&reference->count);
}
if (*ptr) {
- assert((*ptr)->count);
- if (--(*ptr)->count == 0) {
+ assert(p_atomic_read(&(*ptr)->count) != 0);
+ if (p_atomic_dec_zero(&(*ptr)->count)) {
destroy = TRUE;
}
}
newtex = screen->texture_create(screen, &pt);
- assert(!newtex || newtex->reference.count == 1);
+ assert(!newtex || p_atomic_read(&newtex->reference.count) == 1);
return newtex;
}