+static inline struct qreg
+qir_LOAD_IMM(struct vc4_compile *c, uint32_t val)
+{
+ return qir_emit_def(c, qir_inst(QOP_LOAD_IMM, c->undef,
+ qir_reg(QFILE_LOAD_IMM, val), c->undef));
+}
+
+static inline struct qreg
+qir_LOAD_IMM_U2(struct vc4_compile *c, uint32_t val)
+{
+ return qir_emit_def(c, qir_inst(QOP_LOAD_IMM_U2, c->undef,
+ qir_reg(QFILE_LOAD_IMM, val),
+ c->undef));
+}
+
+static inline struct qreg
+qir_LOAD_IMM_I2(struct vc4_compile *c, uint32_t val)
+{
+ return qir_emit_def(c, qir_inst(QOP_LOAD_IMM_I2, c->undef,
+ qir_reg(QFILE_LOAD_IMM, val),
+ c->undef));
+}
+
+/** Shifts the multiply output to the right by rot channels */
+static inline struct qreg
+qir_ROT_MUL(struct vc4_compile *c, struct qreg val, uint32_t rot)
+{
+ return qir_emit_def(c, qir_inst(QOP_ROT_MUL, c->undef,
+ val,
+ qir_reg(QFILE_LOAD_IMM,
+ QPU_SMALL_IMM_MUL_ROT + rot)));
+}
+
+static inline struct qinst *
+qir_MOV_cond(struct vc4_compile *c, uint8_t cond,
+ struct qreg dest, struct qreg src)
+{
+ struct qinst *mov = qir_MOV_dest(c, dest, src);
+ mov->cond = cond;
+ return mov;
+}
+
+static inline struct qinst *
+qir_BRANCH(struct vc4_compile *c, uint8_t cond)
+{
+ struct qinst *inst = qir_inst(QOP_BRANCH, c->undef, c->undef, c->undef);
+ inst->cond = cond;
+ qir_emit_nondef(c, inst);
+ return inst;
+}
+
+#define qir_for_each_block(block, c) \
+ list_for_each_entry(struct qblock, block, &c->blocks, link)
+
+#define qir_for_each_block_rev(block, c) \
+ list_for_each_entry_rev(struct qblock, block, &c->blocks, link)
+
+/* Loop over the non-NULL members of the successors array. */
+#define qir_for_each_successor(succ, block) \
+ for (struct qblock *succ = block->successors[0]; \
+ succ != NULL; \
+ succ = (succ == block->successors[1] ? NULL : \
+ block->successors[1]))
+
+#define qir_for_each_inst(inst, block) \
+ list_for_each_entry(struct qinst, inst, &block->instructions, link)
+
+#define qir_for_each_inst_rev(inst, block) \
+ list_for_each_entry_rev(struct qinst, inst, &block->instructions, link)
+
+#define qir_for_each_inst_safe(inst, block) \
+ list_for_each_entry_safe(struct qinst, inst, &block->instructions, link)
+
+#define qir_for_each_inst_inorder(inst, c) \
+ qir_for_each_block(_block, c) \
+ qir_for_each_inst_safe(inst, _block)
+