/* If the slab becomes totally empty, free it */
if (slab->numFree == slab->numBuffers) {
list = &slab->head;
- LIST_DELINIT(list);
+ list_delinit(list);
pb_reference(&slab->bo, NULL);
FREE(slab->buffers);
FREE(slab);
/* If totally full remove from the partial slab list */
if (--slab->numFree == 0)
- LIST_DELINIT(list);
+ list_delinit(list);
list = slab->freeBuffers.next;
- LIST_DELINIT(list);
+ list_delinit(list);
mtx_unlock(&mgr->mutex);
buf = LIST_ENTRY(struct pb_slab_buffer, list, head);
util_dirty_surface_set_clean(struct util_dirty_surfaces *dss, struct util_dirty_surface *ds)
{
if(!LIST_IS_EMPTY(&ds->dirty_list))
- LIST_DELINIT(&ds->dirty_list);
+ list_delinit(&ds->dirty_list);
}
#endif
r600_query_hw_emit_stop(rctx, query);
if (!(query->flags & R600_QUERY_HW_FLAG_NO_START))
- LIST_DELINIT(&query->list);
+ list_delinit(&query->list);
if (!query->buffer.buf)
return false;
if (view->tex_resource->gpu_address &&
view->tex_resource->b.b.target == PIPE_BUFFER)
- LIST_DELINIT(&view->list);
+ list_delinit(&view->list);
pipe_resource_reference(&state->texture, NULL);
FREE(view);
si_query_hw_emit_stop(sctx, query);
if (!(query->flags & SI_QUERY_HW_FLAG_NO_START)) {
- LIST_DELINIT(&query->b.active_list);
+ list_delinit(&query->b.active_list);
sctx->num_cs_dw_queries_suspend -= query->b.num_cs_dw_suspend;
}
mtx_lock(&ops->mutex);
LIST_FOR_EACH_ENTRY_SAFE(fence, n, &ops->not_signaled, ops_list)
- LIST_DELINIT(&fence->ops_list);
+ list_delinit(&fence->ops_list);
mtx_unlock(&ops->mutex);
}
break;
p_atomic_set(&fence->signalled, 1);
- LIST_DELINIT(&fence->ops_list);
+ list_delinit(&fence->ops_list);
}
ops->last_signaled = signaled;
ops->last_emitted = emitted;
vmw_ioctl_fence_unref(vws, vfence->handle);
mtx_lock(&ops->mutex);
- LIST_DELINIT(&vfence->ops_list);
+ list_delinit(&vfence->ops_list);
mtx_unlock(&ops->mutex);
}
}
#define LIST_DEL(__item) list_del(__item)
-#define LIST_DELINIT(__item) list_delinit(__item)
#define LIST_ENTRY(__type, __item, __field) \
((__type *)(((char *)(__item)) - offsetof(__type, __field)))