unsigned int len;
if (depth_irb &&
- depth_irb->mt) {
- hiz_region = depth_irb->mt->hiz_region;
+ depth_irb->mt &&
+ depth_irb->mt->hiz_mt) {
+ hiz_region = depth_irb->mt->hiz_mt->region;
}
/* 3DSTATE_DEPTH_BUFFER, 3DSTATE_STENCIL_BUFFER are both
rb->mt->region->name == buffer->name) ||
(buffer->attachment == __DRI_BUFFER_HIZ &&
rb->mt &&
- rb->mt->hiz_region &&
- rb->mt->hiz_region->name == buffer->name)) {
+ rb->mt->hiz_mt &&
+ rb->mt->hiz_mt->region->name == buffer->name)) {
return;
}
}
/* Release the buffer storage now in case we have to return early
- * due to region allocation failure.
+ * due to failure to allocate new storage.
*/
if (buffer->attachment == __DRI_BUFFER_HIZ) {
- intel_region_release(&rb->mt->hiz_region);
+ intel_miptree_release(&rb->mt->hiz_mt);
} else {
intel_miptree_release(&rb->mt);
}
if (!region)
return;
+ struct intel_mipmap_tree *mt =
+ intel_miptree_create_for_region(intel,
+ GL_TEXTURE_2D,
+ rb->Base.Format,
+ region);
+ intel_region_release(®ion);
+
/* Associate buffer with new storage. */
if (buffer->attachment == __DRI_BUFFER_HIZ) {
- rb->mt->hiz_region = region;
+ rb->mt->hiz_mt = mt;
} else {
- rb->mt = intel_miptree_create_for_region(intel,
- GL_TEXTURE_2D,
- rb->Base.Format,
- region);
- intel_region_release(®ion);
+ rb->mt = mt;
}
}
struct intel_renderbuffer *rb = NULL;
if (fb)
rb = intel_get_renderbuffer(fb, BUFFER_DEPTH);
- return rb && rb->mt && rb->mt->hiz_region;
+ return rb && rb->mt && rb->mt->hiz_mt;
}
struct intel_region*
return false;
if (intel->vtbl.is_hiz_depth_format(intel, rb->Format)) {
- irb->mt->hiz_region = intel_region_alloc(intel->intelScreen,
- I915_TILING_Y,
- cpp,
- rb->Width,
- rb->Height,
- true);
- if (!irb->mt->hiz_region) {
+ bool ok = intel_miptree_alloc_hiz(intel, irb->mt);
+ if (!ok) {
intel_miptree_release(&irb->mt);
return false;
}
DBG("%s deleting %p\n", __FUNCTION__, *mt);
intel_region_release(&((*mt)->region));
- intel_region_release(&((*mt)->hiz_region));
intel_miptree_release(&(*mt)->stencil_mt);
+ intel_miptree_release(&(*mt)->hiz_mt);
for (i = 0; i < MAX_TEXTURE_LEVELS; i++) {
free((*mt)->level[i].slice);
{
intel_miptree_s8z24_scattergather(intel, mt, level, layer, false);
}
+
+bool
+intel_miptree_alloc_hiz(struct intel_context *intel,
+ struct intel_mipmap_tree *mt)
+{
+ assert(mt->hiz_mt == NULL);
+ mt->hiz_mt = intel_miptree_create(intel,
+ mt->target,
+ MESA_FORMAT_X8_Z24,
+ mt->first_level,
+ mt->last_level,
+ mt->width0,
+ mt->height0,
+ mt->depth0,
+ true);
+ return mt->hiz_mt != NULL;
+}
struct intel_region *region;
/**
- * This points to an auxillary hiz region if all of the following hold:
- * 1. The texture has been attached to an FBO as a depthbuffer.
- * 2. The texture format is hiz compatible.
- * 3. The intel context supports hiz.
+ * \brief HiZ miptree
*
- * When a texture is attached to multiple FBO's, a separate renderbuffer
- * wrapper is created for each attachment. This necessitates storing the
- * hiz region in the texture itself instead of the renderbuffer wrapper.
+ * This is non-null only if HiZ is enabled for this miptree.
*
- * \see intel_fbo.c:intel_wrap_texture()
+ * \see intel_miptree_alloc_hiz()
*/
- struct intel_region *hiz_region;
+ struct intel_mipmap_tree *hiz_mt;
/**
* \brief Stencil miptree for depthstencil textures.
uint32_t level,
uint32_t layer);
+/**
+ * \brief Allocate the miptree's embedded HiZ miptree.
+ * \see intel_mipmap_tree:hiz_mt
+ * \return false if allocation failed
+ */
+bool
+intel_miptree_alloc_hiz(struct intel_context *intel,
+ struct intel_mipmap_tree *mt);
+
/* i915_mipmap_tree.c:
*/
void i915_miptree_layout(struct intel_mipmap_tree *mt);