const struct pipe_draw_info *info,
unsigned index_offset)
{
- struct pipe_resource *idx_buffer = NULL;
- enum a4xx_index_size idx_type;
enum pc_di_src_sel src_sel;
- uint32_t idx_size, idx_offset;
if (info->index_size) {
assert(!info->has_user_indices);
- idx_buffer = info->index.resource;
- idx_type = fd4_size2indextype(info->index_size);
- idx_size = info->index_size * info->count;
- idx_offset = index_offset + info->start * info->index_size;
+ struct pipe_resource *idx_buffer = info->index.resource;
+ uint32_t idx_size = info->index_size * info->count;
+ uint32_t idx_offset = index_offset + info->start * info->index_size;
+ enum a4xx_index_size idx_type = fd4_size2indextype(info->index_size);
src_sel = DI_SRC_SEL_DMA;
-
- OUT_PKT7(ring, CP_DRAW_INDX_OFFSET, idx_buffer ? 7 : 3);
+ OUT_PKT7(ring, CP_DRAW_INDX_OFFSET, 7);
if (vismode == USE_VISIBILITY) {
/* leave vis mode blank for now, it will be patched up when
* we know if we are binning or not
OUT_RELOC(ring, fd_resource(idx_buffer)->bo, idx_offset, 0, 0);
OUT_RING (ring, idx_size);
} else {
- idx_buffer = NULL;
- idx_type = INDEX4_SIZE_32_BIT;
- idx_size = 0;
- idx_offset = 0;
src_sel = DI_SRC_SEL_AUTO_INDEX;
- OUT_PKT7(ring, CP_DRAW_INDX_OFFSET, idx_buffer ? 7 : 3);
+ OUT_PKT7(ring, CP_DRAW_INDX_OFFSET, 3);
if (vismode == USE_VISIBILITY) {
/* leave vis mode blank for now, it will be patched up when
* we know if we are binning or not
*/
- OUT_RINGP(ring, DRAW4(primtype, src_sel, idx_type, 0) | 0x2000,
+ OUT_RINGP(ring, DRAW4(primtype, src_sel, INDEX4_SIZE_32_BIT, 0) | 0x2000,
&batch->draw_patches);
} else {
- OUT_RING(ring, DRAW4(primtype, src_sel, idx_type, vismode) | 0x2000);
+ OUT_RING(ring, DRAW4(primtype, src_sel, INDEX4_SIZE_32_BIT, vismode) | 0x2000);
}
OUT_RING(ring, info->instance_count); /* NumInstances */
OUT_RING(ring, info->count); /* NumIndices */