QPU_COND_CC,
};
+enum qpu_branch_cond {
+ QPU_COND_BRANCH_ALL_ZS,
+ QPU_COND_BRANCH_ALL_ZC,
+ QPU_COND_BRANCH_ANY_ZS,
+ QPU_COND_BRANCH_ANY_ZC,
+ QPU_COND_BRANCH_ALL_NS,
+ QPU_COND_BRANCH_ALL_NC,
+ QPU_COND_BRANCH_ANY_NS,
+ QPU_COND_BRANCH_ANY_NC,
+ QPU_COND_BRANCH_ALL_CS,
+ QPU_COND_BRANCH_ALL_CC,
+ QPU_COND_BRANCH_ANY_CS,
+ QPU_COND_BRANCH_ANY_CC,
+
+ QPU_COND_BRANCH_ALWAYS = 15
+};
+
enum qpu_pack_mul {
QPU_PACK_MUL_NOP,
QPU_PACK_MUL_8888 = 3, /* replicated to each 8 bits of the 32-bit dst. */
#define QPU_UNPACK_SHIFT 57
#define QPU_UNPACK_MASK QPU_MASK(59, 57)
+#define QPU_LOAD_IMM_MODE_SHIFT 57
+#define QPU_LOAD_IMM_MODE_MASK QPU_MASK(59, 57)
+# define QPU_LOAD_IMM_MODE_U32 0
+# define QPU_LOAD_IMM_MODE_I2 1
+# define QPU_LOAD_IMM_MODE_U2 3
+
/**
* If set, the pack field means PACK_MUL or R4 packing, instead of normal
* regfile a packing.
#define QPU_COND_MUL_SHIFT 46
#define QPU_COND_MUL_MASK QPU_MASK(48, 46)
+
+#define QPU_BRANCH_COND_SHIFT 52
+#define QPU_BRANCH_COND_MASK QPU_MASK(55, 52)
+
+#define QPU_BRANCH_REL ((uint64_t)1 << 51)
+#define QPU_BRANCH_REG ((uint64_t)1 << 50)
+
+#define QPU_BRANCH_RADDR_A_SHIFT 45
+#define QPU_BRANCH_RADDR_A_MASK QPU_MASK(49, 45)
+
#define QPU_SF ((uint64_t)1 << 45)
#define QPU_WADDR_ADD_SHIFT 38
#define QPU_RADDR_B_MASK QPU_MASK(17, 12)
#define QPU_SMALL_IMM_SHIFT 12
#define QPU_SMALL_IMM_MASK QPU_MASK(17, 12)
+/* Small immediate value for rotate-by-r5, and 49-63 are "rotate by n
+ * channels"
+ */
+#define QPU_SMALL_IMM_MUL_ROT 48
#define QPU_ADD_A_SHIFT 9
#define QPU_ADD_A_MASK QPU_MASK(11, 9)
#define QPU_OP_ADD_SHIFT 24
#define QPU_OP_ADD_MASK QPU_MASK(28, 24)
+#define QPU_LOAD_IMM_SHIFT 0
+#define QPU_LOAD_IMM_MASK QPU_MASK(31, 0)
+
+#define QPU_BRANCH_TARGET_SHIFT 0
+#define QPU_BRANCH_TARGET_MASK QPU_MASK(31, 0)
+
#endif /* VC4_QPU_DEFINES_H */