FD_DIRTY_PROG = BIT(16),
FD_DIRTY_CONST = BIT(17),
FD_DIRTY_TEX = BIT(18),
+ FD_DIRTY_IMAGE = BIT(19),
+ FD_DIRTY_SSBO = BIT(20),
/* only used by a2xx.. possibly can be removed.. */
- FD_DIRTY_TEXSTATE = BIT(19),
+ FD_DIRTY_TEXSTATE = BIT(21),
};
/* per shader-stage dirty state: */
/**
* Go through the entire state and see if the resource is bound
* anywhere. If it is, mark the relevant state as dirty. This is
- * called on realloc_bo to ensure the neccessary state is re-
+ * called on realloc_bo to ensure the necessary state is re-
* emitted so the GPU looks at the new backing bo.
*/
static void
for (unsigned i = 1; i < num_ubos; i++) {
if (ctx->dirty_shader[stage] & FD_DIRTY_SHADER_CONST)
break;
- if (ctx->constbuf[stage].cb[i].buffer == prsc)
+ if (ctx->constbuf[stage].cb[i].buffer == prsc) {
ctx->dirty_shader[stage] |= FD_DIRTY_SHADER_CONST;
+ ctx->dirty |= FD_DIRTY_CONST;
+ }
}
/* Textures */
for (unsigned i = 0; i < ctx->tex[stage].num_textures; i++) {
if (ctx->dirty_shader[stage] & FD_DIRTY_SHADER_TEX)
break;
- if (ctx->tex[stage].textures[i] && (ctx->tex[stage].textures[i]->texture == prsc))
+ if (ctx->tex[stage].textures[i] && (ctx->tex[stage].textures[i]->texture == prsc)) {
ctx->dirty_shader[stage] |= FD_DIRTY_SHADER_TEX;
+ ctx->dirty |= FD_DIRTY_TEX;
+ }
}
/* Images */
for (unsigned i = 0; i < num_images; i++) {
if (ctx->dirty_shader[stage] & FD_DIRTY_SHADER_IMAGE)
break;
- if (ctx->shaderimg[stage].si[i].resource == prsc)
+ if (ctx->shaderimg[stage].si[i].resource == prsc) {
ctx->dirty_shader[stage] |= FD_DIRTY_SHADER_IMAGE;
+ ctx->dirty |= FD_DIRTY_IMAGE;
+ }
}
/* SSBOs */
for (unsigned i = 0; i < num_ssbos; i++) {
if (ctx->dirty_shader[stage] & FD_DIRTY_SHADER_SSBO)
break;
- if (ctx->shaderbuf[stage].sb[i].buffer == prsc)
+ if (ctx->shaderbuf[stage].sb[i].buffer == prsc) {
ctx->dirty_shader[stage] |= FD_DIRTY_SHADER_SSBO;
+ ctx->dirty |= FD_DIRTY_SSBO;
+ }
}
}
}