min_layer, num_layers,
disable_aux);
- brw_render_cache_set_check_flush(brw, tex_obj->mt->bo);
+ brw_cache_flush_for_read(brw, tex_obj->mt->bo);
if (tex_obj->base.StencilSampling ||
tex_obj->mt->format == MESA_FORMAT_S_UINT8) {
intel_miptree_prepare_image(brw, tex_obj->mt);
- brw_render_cache_set_check_flush(brw, tex_obj->mt->bo);
+ brw_cache_flush_for_read(brw, tex_obj->mt->bo);
}
}
}
depth_written);
}
if (depth_written)
- brw_render_cache_set_add_bo(brw, depth_irb->mt->bo);
+ brw_depth_cache_add_bo(brw, depth_irb->mt->bo);
}
if (stencil_irb && brw->stencil_write_enabled)
- brw_render_cache_set_add_bo(brw, stencil_irb->mt->bo);
+ brw_depth_cache_add_bo(brw, stencil_irb->mt->bo);
for (unsigned i = 0; i < fb->_NumColorDrawBuffers; i++) {
struct intel_renderbuffer *irb =
_mesa_get_render_format(ctx, intel_rb_format(irb));
enum isl_format isl_format = brw_isl_format_for_mesa_format(mesa_format);
- brw_render_cache_set_add_bo(brw, irb->mt->bo);
+ brw_render_cache_add_bo(brw, irb->mt->bo);
intel_miptree_finish_render(brw, irb->mt, irb->mt_level,
irb->mt_layer, irb->layer_count,
isl_format,
if (irb->align_wa_mt == NULL)
return;
- brw_render_cache_set_check_flush(brw, irb->align_wa_mt->bo);
+ brw_cache_flush_for_read(brw, irb->align_wa_mt->bo);
intel_miptree_copy_slice(brw, irb->align_wa_mt, 0, 0,
irb->mt,
}
if (depth_mt)
- brw_render_cache_set_check_flush(brw, depth_mt->bo);
+ brw_cache_flush_for_depth(brw, depth_mt->bo);
if (stencil_mt)
- brw_render_cache_set_check_flush(brw, stencil_mt->bo);
+ brw_cache_flush_for_depth(brw, stencil_mt->bo);
brw->vtbl.emit_depth_stencil_hiz(brw, depth_mt, depth_offset,
depthbuffer_format, depth_surface_type,
* data.
*/
if (params->src.enabled)
- brw_render_cache_set_check_flush(brw, params->src.addr.buffer);
+ brw_cache_flush_for_read(brw, params->src.addr.buffer);
+ brw_cache_flush_for_render(brw, params->dst.addr.buffer);
brw_render_cache_set_check_flush(brw, params->dst.addr.buffer);
brw_select_pipeline(brw, BRW_RENDER_PIPELINE);
brw->ib.index_size = -1;
if (params->dst.enabled)
- brw_render_cache_set_add_bo(brw, params->dst.addr.buffer);
+ brw_render_cache_add_bo(brw, params->dst.addr.buffer);
if (params->depth.enabled)
- brw_render_cache_set_add_bo(brw, params->depth.addr.buffer);
+ brw_depth_cache_add_bo(brw, params->depth.addr.buffer);
if (params->stencil.enabled)
- brw_render_cache_set_add_bo(brw, params->stencil.addr.buffer);
+ brw_depth_cache_add_bo(brw, params->stencil.addr.buffer);
}
brw_render_cache_set_clear(brw);
}
+void
+brw_cache_flush_for_read(struct brw_context *brw, struct brw_bo *bo)
+{
+ brw_render_cache_set_check_flush(brw, bo);
+}
+
+void
+brw_cache_flush_for_render(struct brw_context *brw, struct brw_bo *bo)
+{
+}
+
+void
+brw_render_cache_add_bo(struct brw_context *brw, struct brw_bo *bo)
+{
+ brw_render_cache_set_add_bo(brw, bo);
+}
+
+void
+brw_cache_flush_for_depth(struct brw_context *brw, struct brw_bo *bo)
+{
+ brw_render_cache_set_check_flush(brw, bo);
+}
+
+void
+brw_depth_cache_add_bo(struct brw_context *brw, struct brw_bo *bo)
+{
+ brw_render_cache_set_add_bo(brw, bo);
+}
+
/**
* Do one-time context initializations related to GL_EXT_framebuffer_object.
* Hook in device driver functions.
void brw_render_cache_set_add_bo(struct brw_context *brw, struct brw_bo *bo);
void brw_render_cache_set_check_flush(struct brw_context *brw, struct brw_bo *bo);
+void brw_cache_flush_for_read(struct brw_context *brw, struct brw_bo *bo);
+void brw_cache_flush_for_render(struct brw_context *brw, struct brw_bo *bo);
+void brw_cache_flush_for_depth(struct brw_context *brw, struct brw_bo *bo);
+void brw_render_cache_add_bo(struct brw_context *brw, struct brw_bo *bo);
+void brw_depth_cache_add_bo(struct brw_context *brw, struct brw_bo *bo);
+
unsigned
intel_quantize_num_samples(struct intel_screen *intel, unsigned num_samples);
}
}
- brw_render_cache_set_check_flush(brw, dst->bo);
+ brw_cache_flush_for_read(brw, dst->bo);
src->r8stencil_needs_update = false;
}