}
}
+/* Tracks the maximum bindful UBO accessed so that we reduce the UBO
+ * descriptors emitted in the fast path for GL.
+ */
static void
-lower_ubo_block_decrement(nir_intrinsic_instr *instr, nir_builder *b, int *num_ubos)
+track_ubo_use(nir_intrinsic_instr *instr, nir_builder *b, int *num_ubos)
{
- /* Skip shifting things for turnip's bindless resources. */
if (ir3_bindless_resource(instr->src[0])) {
assert(!b->shader->info.first_ubo_is_default_ubo); /* only set for GL */
return;
}
- /* Shift all GL nir_intrinsic_load_ubo UBO indices down by 1, because we
- * have lowered block 0 off of load_ubo to constbuf and ir3_const only
- * uploads pointers for block 1-N. This is also where we update the NIR
- * num_ubos to reflect the UBOs that remain in use after others got
- * lowered to constbuf access.
- */
if (nir_src_is_const(instr->src[0])) {
- int block = nir_src_as_uint(instr->src[0]) - 1;
+ int block = nir_src_as_uint(instr->src[0]);
*num_ubos = MAX2(*num_ubos, block + 1);
} else {
- *num_ubos = b->shader->info.num_ubos - 1;
+ *num_ubos = b->shader->info.num_ubos;
}
-
- nir_ssa_def *old_idx = nir_ssa_for_src(b, instr->src[0], 1);
- nir_ssa_def *new_idx = nir_iadd_imm(b, old_idx, -1);
- nir_instr_rewrite_src(&instr->instr, &instr->src[0],
- nir_src_for_ssa(new_idx));
}
static void
*/
struct ir3_ubo_range *range = get_existing_range(instr, state, false);
if (!range) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
* access, so for now just fall back to pulling.
*/
if (!nir_src_is_const(instr->src[1])) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
*/
const struct ir3_ubo_range r = get_ubo_load_range(instr, alignment);
if (!(range->start <= r.start && r.end <= range->end)) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
}
OUT_RING(ring, CP_LOAD_STATE6_2_EXT_SRC_ADDR_HI(0));
for (int i = 0; i < num_ubos; i++) {
- /* Note: gallium constbuf 0 was always lowered to hardware constbuf,
- * and UBO load indices decremented by one.
- */
- struct pipe_constant_buffer *cb = &constbuf->cb[i + 1];
+ struct pipe_constant_buffer *cb = &constbuf->cb[i];
/* If we have user pointers (constbuf 0, aka GL uniforms), upload them
* to a buffer now, and save it in the constbuf so that we don't have
struct pipe_resource *prscs[params];
for (uint32_t i = 0; i < params; i++) {
- const uint32_t index = i + 1; /* UBOs start at index 1 */
- struct pipe_constant_buffer *cb = &constbuf->cb[index];
+ struct pipe_constant_buffer *cb = &constbuf->cb[i];
/* If we have user pointers (constbuf 0, aka GL uniforms), upload
* them to a buffer now, and save it in the constbuf so that we
cb->user_buffer = NULL;
}
- if ((constbuf->enabled_mask & (1 << index)) && cb->buffer) {
+ if ((constbuf->enabled_mask & (1 << i)) && cb->buffer) {
offsets[i] = cb->buffer_offset;
prscs[i] = cb->buffer;
} else {