+/* size of one component of instruction result, ie. half vs full: */
+static unsigned
+live_size(struct ir3_instruction *instr)
+{
+ if (is_half(instr)) {
+ return 1;
+ } else if (is_high(instr)) {
+ /* doesn't count towards footprint */
+ return 0;
+ } else {
+ return 2;
+ }
+}
+
+static unsigned
+name_size(struct ir3_ra_ctx *ctx, unsigned name)
+{
+ if (name_is_array(ctx, name)) {
+ struct ir3_array *arr = name_to_array(ctx, name);
+ return arr->half ? 1 : 2;
+ } else {
+ struct ir3_instruction *instr = name_to_instr(ctx, name);
+ /* in scalar pass, each name represents on scalar value,
+ * half or full precision
+ */
+ return live_size(instr);
+ }
+}
+
+static unsigned
+ra_calc_block_live_values(struct ir3_ra_ctx *ctx, struct ir3_block *block)
+{
+ struct ir3_ra_block_data *bd = block->data;
+ unsigned name;
+
+ assert(ctx->name_to_instr);
+
+ /* TODO this gets a bit more complicated in non-scalar pass.. but
+ * possibly a lowball estimate is fine to start with if we do
+ * round-robin in non-scalar pass? Maybe we just want to handle
+ * that in a different fxn?
+ */
+ assert(ctx->scalar_pass);
+
+ BITSET_WORD *live =
+ rzalloc_array(bd, BITSET_WORD, BITSET_WORDS(ctx->alloc_count));
+
+ /* Add the live input values: */
+ unsigned livein = 0;
+ BITSET_FOREACH_SET (name, bd->livein, ctx->alloc_count) {
+ livein += name_size(ctx, name);
+ BITSET_SET(live, name);
+ }
+
+ d("---------------------");
+ d("block%u: LIVEIN: %u", block_id(block), livein);
+
+ unsigned max = livein;
+ int cur_live = max;
+
+ /* Now that we know the live inputs to the block, iterate the
+ * instructions adjusting the current # of live values as we
+ * see their last use:
+ */
+ foreach_instr (instr, &block->instr_list) {
+ if (RA_DEBUG)
+ print_bitset("LIVE", live, ctx->alloc_count);
+ di(instr, "CALC");
+
+ unsigned new_live = 0; /* newly live values */
+ unsigned new_dead = 0; /* newly no-longer live values */
+ unsigned next_dead = 0; /* newly dead following this instr */
+
+ foreach_def (name, ctx, instr) {
+ /* NOTE: checking ctx->def filters out things like split/
+ * collect which are just redefining existing live names
+ * or array writes to already live array elements:
+ */
+ if (ctx->def[name] != instr->ip)
+ continue;
+ new_live += live_size(instr);
+ d("NEW_LIVE: %u (new_live=%u, use=%u)", name, new_live, ctx->use[name]);
+ BITSET_SET(live, name);
+ /* There can be cases where this is *also* the last use
+ * of a value, for example instructions that write multiple
+ * values, only some of which are used. These values are
+ * dead *after* (rather than during) this instruction.
+ */
+ if (ctx->use[name] != instr->ip)
+ continue;
+ next_dead += live_size(instr);
+ d("NEXT_DEAD: %u (next_dead=%u)", name, next_dead);
+ BITSET_CLEAR(live, name);
+ }
+
+ /* To be more resilient against special cases where liverange
+ * is extended (like first_non_input), rather than using the
+ * foreach_use() iterator, we iterate the current live values
+ * instead:
+ */
+ BITSET_FOREACH_SET (name, live, ctx->alloc_count) {
+ /* Is this the last use? */
+ if (ctx->use[name] != instr->ip)
+ continue;
+ new_dead += name_size(ctx, name);
+ d("NEW_DEAD: %u (new_dead=%u)", name, new_dead);
+ BITSET_CLEAR(live, name);
+ }
+
+ cur_live += new_live;
+ cur_live -= new_dead;
+
+ assert(cur_live >= 0);
+ d("CUR_LIVE: %u", cur_live);
+
+ max = MAX2(max, cur_live);
+
+ /* account for written values which are not used later,
+ * but after updating max (since they are for one cycle
+ * live)
+ */
+ cur_live -= next_dead;
+ assert(cur_live >= 0);
+
+ if (RA_DEBUG) {
+ unsigned cnt = 0;
+ BITSET_FOREACH_SET (name, live, ctx->alloc_count) {
+ cnt += name_size(ctx, name);
+ }
+ assert(cur_live == cnt);
+ }
+ }
+
+ d("block%u max=%u", block_id(block), max);
+
+ /* the remaining live should match liveout (for extra sanity testing): */
+ if (RA_DEBUG) {
+ unsigned new_dead = 0;
+ BITSET_FOREACH_SET (name, live, ctx->alloc_count) {
+ /* Is this the last use? */
+ if (ctx->use[name] != block->end_ip)
+ continue;
+ new_dead += name_size(ctx, name);
+ d("NEW_DEAD: %u (new_dead=%u)", name, new_dead);
+ BITSET_CLEAR(live, name);
+ }
+ unsigned liveout = 0;
+ BITSET_FOREACH_SET (name, bd->liveout, ctx->alloc_count) {
+ liveout += name_size(ctx, name);
+ BITSET_CLEAR(live, name);
+ }
+
+ if (cur_live != liveout) {
+ print_bitset("LEAKED", live, ctx->alloc_count);
+ /* TODO there are a few edge cases where live-range extension
+ * tells us a value is livein. But not used by the block or
+ * liveout for the block. Possibly a bug in the liverange
+ * extension. But for now leave the assert disabled:
+ assert(cur_live == liveout);
+ */
+ }
+ }
+
+ ralloc_free(live);
+
+ return max;
+}
+
+static unsigned
+ra_calc_max_live_values(struct ir3_ra_ctx *ctx)
+{
+ unsigned max = 0;
+
+ foreach_block (block, &ctx->ir->block_list) {
+ unsigned block_live = ra_calc_block_live_values(ctx, block);
+ max = MAX2(max, block_live);
+ }
+
+ return max;
+}
+