nir_variable *switch_fall_var, bool *has_switch_break,
vtn_instruction_handler handler)
{
- list_for_each_entry(struct vtn_cf_node, node, cf_list, link) {
+ vtn_foreach_cf_node(node, cf_list) {
switch (node->type) {
case vtn_cf_node_type_block: {
- struct vtn_block *block = (struct vtn_block *)node;
+ struct vtn_block *block = vtn_cf_node_as_block(node);
const uint32_t *block_start = block->label;
const uint32_t *block_end = block->merge ? block->merge :
}
case vtn_cf_node_type_if: {
- struct vtn_if *vtn_if = (struct vtn_if *)node;
+ struct vtn_if *vtn_if = vtn_cf_node_as_if(node);
bool sw_break = false;
nir_if *nif =
}
case vtn_cf_node_type_loop: {
- struct vtn_loop *vtn_loop = (struct vtn_loop *)node;
+ struct vtn_loop *vtn_loop = vtn_cf_node_as_loop(node);
nir_loop *loop = nir_push_loop(&b->nb);
loop->control = vtn_loop_control(b, vtn_loop);
}
case vtn_cf_node_type_switch: {
- struct vtn_switch *vtn_switch = (struct vtn_switch *)node;
+ struct vtn_switch *vtn_switch = vtn_cf_node_as_switch(node);
/* First, we create a variable to keep track of whether or not the
* switch is still going at any given point. Any switch breaks
SpvFunctionControlMask control;
};
+#define VTN_DECL_CF_NODE_CAST(_type) \
+static inline struct vtn_##_type * \
+vtn_cf_node_as_##_type(struct vtn_cf_node *node) \
+{ \
+ assert(node->type == vtn_cf_node_type_##_type); \
+ return (struct vtn_##_type *)node; \
+}
+
+VTN_DECL_CF_NODE_CAST(block)
+VTN_DECL_CF_NODE_CAST(loop)
+VTN_DECL_CF_NODE_CAST(if)
+VTN_DECL_CF_NODE_CAST(switch)
+
+#define vtn_foreach_cf_node(node, cf_list) \
+ list_for_each_entry(struct vtn_cf_node, node, cf_list, link)
+
typedef bool (*vtn_instruction_handler)(struct vtn_builder *, SpvOp,
const uint32_t *, unsigned);