vc4: Add a constant folding pass.
authorEric Anholt <eric@anholt.net>
Mon, 30 Mar 2015 17:38:21 +0000 (10:38 -0700)
committerEric Anholt <eric@anholt.net>
Mon, 30 Mar 2015 19:57:45 +0000 (12:57 -0700)
This cleans up some pointless operations generated by the in-driver mul24
lowering (commonly generated by making a vec4 index for a matrix in a
uniform array).

I could fill in other operations, but pretty much anything else ought to
be getting handled at the NIR level, I think.

total uniforms in shared programs: 13423 -> 13421 (-0.01%)
uniforms in affected programs:     346 -> 344 (-0.58%)

src/gallium/drivers/vc4/Makefile.sources
src/gallium/drivers/vc4/vc4_opt_constant_folding.c [new file with mode: 0644]
src/gallium/drivers/vc4/vc4_qir.c
src/gallium/drivers/vc4/vc4_qir.h

index c7254ea1473383bd377b21eff5604e8655355ad2..ec0f25ca34aaadde07f16fc15d7af6f082b6abaa 100644 (file)
@@ -12,6 +12,7 @@ C_SOURCES := \
        vc4_fence.c \
        vc4_formats.c \
        vc4_opt_algebraic.c \
+       vc4_opt_constant_folding.c \
        vc4_opt_copy_propagation.c \
        vc4_opt_cse.c \
        vc4_opt_dead_code.c \
diff --git a/src/gallium/drivers/vc4/vc4_opt_constant_folding.c b/src/gallium/drivers/vc4/vc4_opt_constant_folding.c
new file mode 100644 (file)
index 0000000..ac9be5c
--- /dev/null
@@ -0,0 +1,110 @@
+/*
+ * Copyright © 2015 Broadcom
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the next
+ * paragraph) shall be included in all copies or substantial portions of the
+ * Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+ * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+ * IN THE SOFTWARE.
+ */
+
+/**
+ * @file vc4_opt_constant_folding.c
+ *
+ * Simple constant folding pass to clean up operations on only constants,
+ * which we might have generated within vc4_program.c.
+ */
+
+#include "vc4_qir.h"
+#include "util/u_math.h"
+
+static bool debug;
+
+static void
+dump_from(struct vc4_compile *c, struct qinst *inst)
+{
+        if (!debug)
+                return;
+
+        fprintf(stderr, "optimizing: ");
+        qir_dump_inst(c, inst);
+        fprintf(stderr, "\n");
+}
+
+static void
+dump_to(struct vc4_compile *c, struct qinst *inst)
+{
+        if (!debug)
+                return;
+
+        fprintf(stderr, "to: ");
+        qir_dump_inst(c, inst);
+        fprintf(stderr, "\n");
+}
+
+static bool
+constant_fold(struct vc4_compile *c, struct qinst *inst)
+{
+        int nsrc = qir_get_op_nsrc(inst->op);
+        uint32_t ui[nsrc];
+
+        for (int i = 0; i < nsrc; i++) {
+                struct qreg reg = inst->src[i];
+                if (reg.file == QFILE_UNIF &&
+                    c->uniform_contents[reg.index] == QUNIFORM_CONSTANT) {
+                        ui[i] = c->uniform_data[reg.index];
+                } else if (reg.file == QFILE_SMALL_IMM) {
+                        ui[i] = reg.index;
+                } else {
+                        return false;
+                }
+        }
+
+        uint32_t result = 0;
+        switch (inst->op) {
+        case QOP_SHR:
+                result = ui[0] >> ui[1];
+                break;
+
+        default:
+                return false;
+        }
+
+        dump_from(c, inst);
+
+        inst->src[0] = qir_uniform_ui(c, result);
+        for (int i = 1; i < nsrc; i++)
+                inst->src[i] = c->undef;
+        inst->op = QOP_MOV;
+
+        dump_to(c, inst);
+        return true;
+}
+
+bool
+qir_opt_constant_folding(struct vc4_compile *c)
+{
+        bool progress = false;
+        struct simple_node *node;
+
+        foreach(node, &c->instructions) {
+                struct qinst *inst = (struct qinst *)node;
+                if (constant_fold(c, inst))
+                        progress = true;
+        }
+
+        return progress;
+}
index e453d848096963381adf174096b5f81cf1ee7502..93be98a5b6af21bbe231792c01be5ed8c31b0fd0 100644 (file)
@@ -512,6 +512,7 @@ qir_optimize(struct vc4_compile *c)
 
                 OPTPASS(qir_opt_algebraic);
                 OPTPASS(qir_opt_cse);
+                OPTPASS(qir_opt_constant_folding);
                 OPTPASS(qir_opt_copy_propagation);
                 OPTPASS(qir_opt_dead_code);
                 OPTPASS(qir_opt_small_immediates);
index 4f910e3c3df9702a6ebd7b18deb9a95ca3b57e87..6cc8dbefea450ec45f01c2f9d741a8fba8c87d52 100644 (file)
@@ -396,6 +396,7 @@ const char *qir_get_stage_name(enum qstage stage);
 
 void qir_optimize(struct vc4_compile *c);
 bool qir_opt_algebraic(struct vc4_compile *c);
+bool qir_opt_constant_folding(struct vc4_compile *c);
 bool qir_opt_copy_propagation(struct vc4_compile *c);
 bool qir_opt_cse(struct vc4_compile *c);
 bool qir_opt_dead_code(struct vc4_compile *c);