nir: Add a deref instruction type
[mesa.git] / src / compiler / nir / nir_validate.c
1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Connor Abbott (cwabbott0@gmail.com)
25 *
26 */
27
28 #include "nir.h"
29 #include <assert.h>
30
31 /*
32 * This file checks for invalid IR indicating a bug somewhere in the compiler.
33 */
34
35 /* Since this file is just a pile of asserts, don't bother compiling it if
36 * we're not building a debug build.
37 */
38 #ifndef NDEBUG
39
40 /*
41 * Per-register validation state.
42 */
43
44 typedef struct {
45 /*
46 * equivalent to the uses and defs in nir_register, but built up by the
47 * validator. At the end, we verify that the sets have the same entries.
48 */
49 struct set *uses, *if_uses, *defs;
50 nir_function_impl *where_defined; /* NULL for global registers */
51 } reg_validate_state;
52
53 typedef struct {
54 /*
55 * equivalent to the uses in nir_ssa_def, but built up by the validator.
56 * At the end, we verify that the sets have the same entries.
57 */
58 struct set *uses, *if_uses;
59 nir_function_impl *where_defined;
60 } ssa_def_validate_state;
61
62 typedef struct {
63 /* map of register -> validation state (struct above) */
64 struct hash_table *regs;
65
66 /* the current shader being validated */
67 nir_shader *shader;
68
69 /* the current instruction being validated */
70 nir_instr *instr;
71
72 /* the current variable being validated */
73 nir_variable *var;
74
75 /* the current basic block being validated */
76 nir_block *block;
77
78 /* the current if statement being validated */
79 nir_if *if_stmt;
80
81 /* the current loop being visited */
82 nir_loop *loop;
83
84 /* the parent of the current cf node being visited */
85 nir_cf_node *parent_node;
86
87 /* the current function implementation being validated */
88 nir_function_impl *impl;
89
90 /* map of SSA value -> function implementation where it is defined */
91 struct hash_table *ssa_defs;
92
93 /* bitset of ssa definitions we have found; used to check uniqueness */
94 BITSET_WORD *ssa_defs_found;
95
96 /* bitset of registers we have currently found; used to check uniqueness */
97 BITSET_WORD *regs_found;
98
99 /* map of variable -> function implementation where it is defined or NULL
100 * if it is a global variable
101 */
102 struct hash_table *var_defs;
103
104 /* map of instruction/var/etc to failed assert string */
105 struct hash_table *errors;
106 } validate_state;
107
108 static void
109 log_error(validate_state *state, const char *cond, const char *file, int line)
110 {
111 const void *obj;
112
113 if (state->instr)
114 obj = state->instr;
115 else if (state->var)
116 obj = state->var;
117 else
118 obj = cond;
119
120 char *msg = ralloc_asprintf(state->errors, "error: %s (%s:%d)",
121 cond, file, line);
122
123 _mesa_hash_table_insert(state->errors, obj, msg);
124 }
125
126 #define validate_assert(state, cond) do { \
127 if (!(cond)) \
128 log_error(state, #cond, __FILE__, __LINE__); \
129 } while (0)
130
131 static void validate_src(nir_src *src, validate_state *state,
132 unsigned bit_size, unsigned num_components);
133
134 static void
135 validate_reg_src(nir_src *src, validate_state *state,
136 unsigned bit_size, unsigned num_components)
137 {
138 validate_assert(state, src->reg.reg != NULL);
139
140 struct hash_entry *entry;
141 entry = _mesa_hash_table_search(state->regs, src->reg.reg);
142 validate_assert(state, entry);
143
144 reg_validate_state *reg_state = (reg_validate_state *) entry->data;
145
146 if (state->instr) {
147 _mesa_set_add(reg_state->uses, src);
148 } else {
149 validate_assert(state, state->if_stmt);
150 _mesa_set_add(reg_state->if_uses, src);
151 }
152
153 if (!src->reg.reg->is_global) {
154 validate_assert(state, reg_state->where_defined == state->impl &&
155 "using a register declared in a different function");
156 }
157
158 if (!src->reg.reg->is_packed) {
159 if (bit_size)
160 validate_assert(state, src->reg.reg->bit_size == bit_size);
161 if (num_components)
162 validate_assert(state, src->reg.reg->num_components == num_components);
163 }
164
165 validate_assert(state, (src->reg.reg->num_array_elems == 0 ||
166 src->reg.base_offset < src->reg.reg->num_array_elems) &&
167 "definitely out-of-bounds array access");
168
169 if (src->reg.indirect) {
170 validate_assert(state, src->reg.reg->num_array_elems != 0);
171 validate_assert(state, (src->reg.indirect->is_ssa ||
172 src->reg.indirect->reg.indirect == NULL) &&
173 "only one level of indirection allowed");
174 validate_src(src->reg.indirect, state, 32, 1);
175 }
176 }
177
178 static void
179 validate_ssa_src(nir_src *src, validate_state *state,
180 unsigned bit_size, unsigned num_components)
181 {
182 validate_assert(state, src->ssa != NULL);
183
184 struct hash_entry *entry = _mesa_hash_table_search(state->ssa_defs, src->ssa);
185
186 validate_assert(state, entry);
187
188 if (!entry)
189 return;
190
191 ssa_def_validate_state *def_state = (ssa_def_validate_state *)entry->data;
192
193 validate_assert(state, def_state->where_defined == state->impl &&
194 "using an SSA value defined in a different function");
195
196 if (state->instr) {
197 _mesa_set_add(def_state->uses, src);
198 } else {
199 validate_assert(state, state->if_stmt);
200 _mesa_set_add(def_state->if_uses, src);
201 }
202
203 if (bit_size)
204 validate_assert(state, src->ssa->bit_size == bit_size);
205 if (num_components)
206 validate_assert(state, src->ssa->num_components == num_components);
207
208 /* TODO validate that the use is dominated by the definition */
209 }
210
211 static void
212 validate_src(nir_src *src, validate_state *state,
213 unsigned bit_size, unsigned num_components)
214 {
215 if (state->instr)
216 validate_assert(state, src->parent_instr == state->instr);
217 else
218 validate_assert(state, src->parent_if == state->if_stmt);
219
220 if (src->is_ssa)
221 validate_ssa_src(src, state, bit_size, num_components);
222 else
223 validate_reg_src(src, state, bit_size, num_components);
224 }
225
226 static void
227 validate_alu_src(nir_alu_instr *instr, unsigned index, validate_state *state)
228 {
229 nir_alu_src *src = &instr->src[index];
230
231 unsigned num_components = nir_src_num_components(src->src);
232 if (!src->src.is_ssa && src->src.reg.reg->is_packed)
233 num_components = 4; /* can't check anything */
234 for (unsigned i = 0; i < 4; i++) {
235 validate_assert(state, src->swizzle[i] < 4);
236
237 if (nir_alu_instr_channel_used(instr, index, i))
238 validate_assert(state, src->swizzle[i] < num_components);
239 }
240
241 validate_src(&src->src, state, 0, 0);
242 }
243
244 static void
245 validate_reg_dest(nir_reg_dest *dest, validate_state *state,
246 unsigned bit_size, unsigned num_components)
247 {
248 validate_assert(state, dest->reg != NULL);
249
250 validate_assert(state, dest->parent_instr == state->instr);
251
252 struct hash_entry *entry2;
253 entry2 = _mesa_hash_table_search(state->regs, dest->reg);
254
255 validate_assert(state, entry2);
256
257 reg_validate_state *reg_state = (reg_validate_state *) entry2->data;
258 _mesa_set_add(reg_state->defs, dest);
259
260 if (!dest->reg->is_global) {
261 validate_assert(state, reg_state->where_defined == state->impl &&
262 "writing to a register declared in a different function");
263 }
264
265 if (!dest->reg->is_packed) {
266 if (bit_size)
267 validate_assert(state, dest->reg->bit_size == bit_size);
268 if (num_components)
269 validate_assert(state, dest->reg->num_components == num_components);
270 }
271
272 validate_assert(state, (dest->reg->num_array_elems == 0 ||
273 dest->base_offset < dest->reg->num_array_elems) &&
274 "definitely out-of-bounds array access");
275
276 if (dest->indirect) {
277 validate_assert(state, dest->reg->num_array_elems != 0);
278 validate_assert(state, (dest->indirect->is_ssa || dest->indirect->reg.indirect == NULL) &&
279 "only one level of indirection allowed");
280 validate_src(dest->indirect, state, 32, 1);
281 }
282 }
283
284 static void
285 validate_ssa_def(nir_ssa_def *def, validate_state *state)
286 {
287 validate_assert(state, def->index < state->impl->ssa_alloc);
288 validate_assert(state, !BITSET_TEST(state->ssa_defs_found, def->index));
289 BITSET_SET(state->ssa_defs_found, def->index);
290
291 validate_assert(state, def->parent_instr == state->instr);
292
293 validate_assert(state, (def->num_components <= 4) ||
294 (def->num_components == 8) ||
295 (def->num_components == 16));
296
297 list_validate(&def->uses);
298 list_validate(&def->if_uses);
299
300 ssa_def_validate_state *def_state = ralloc(state->ssa_defs,
301 ssa_def_validate_state);
302 def_state->where_defined = state->impl;
303 def_state->uses = _mesa_set_create(def_state, _mesa_hash_pointer,
304 _mesa_key_pointer_equal);
305 def_state->if_uses = _mesa_set_create(def_state, _mesa_hash_pointer,
306 _mesa_key_pointer_equal);
307 _mesa_hash_table_insert(state->ssa_defs, def, def_state);
308 }
309
310 static void
311 validate_dest(nir_dest *dest, validate_state *state,
312 unsigned bit_size, unsigned num_components)
313 {
314 if (dest->is_ssa) {
315 if (bit_size)
316 validate_assert(state, dest->ssa.bit_size == bit_size);
317 if (num_components)
318 validate_assert(state, dest->ssa.num_components == num_components);
319 validate_ssa_def(&dest->ssa, state);
320 } else {
321 validate_reg_dest(&dest->reg, state, bit_size, num_components);
322 }
323 }
324
325 static void
326 validate_alu_dest(nir_alu_instr *instr, validate_state *state)
327 {
328 nir_alu_dest *dest = &instr->dest;
329
330 unsigned dest_size = nir_dest_num_components(dest->dest);
331 bool is_packed = !dest->dest.is_ssa && dest->dest.reg.reg->is_packed;
332 /*
333 * validate that the instruction doesn't write to components not in the
334 * register/SSA value
335 */
336 validate_assert(state, is_packed || !(dest->write_mask & ~((1 << dest_size) - 1)));
337
338 /* validate that saturate is only ever used on instructions with
339 * destinations of type float
340 */
341 nir_alu_instr *alu = nir_instr_as_alu(state->instr);
342 validate_assert(state,
343 (nir_alu_type_get_base_type(nir_op_infos[alu->op].output_type) ==
344 nir_type_float) ||
345 !dest->saturate);
346
347 validate_dest(&dest->dest, state, 0, 0);
348 }
349
350 static void
351 validate_alu_instr(nir_alu_instr *instr, validate_state *state)
352 {
353 validate_assert(state, instr->op < nir_num_opcodes);
354
355 unsigned instr_bit_size = 0;
356 for (unsigned i = 0; i < nir_op_infos[instr->op].num_inputs; i++) {
357 nir_alu_type src_type = nir_op_infos[instr->op].input_types[i];
358 unsigned src_bit_size = nir_src_bit_size(instr->src[i].src);
359 if (nir_alu_type_get_type_size(src_type)) {
360 validate_assert(state, src_bit_size == nir_alu_type_get_type_size(src_type));
361 } else if (instr_bit_size) {
362 validate_assert(state, src_bit_size == instr_bit_size);
363 } else {
364 instr_bit_size = src_bit_size;
365 }
366
367 if (nir_alu_type_get_base_type(src_type) == nir_type_float) {
368 /* 8-bit float isn't a thing */
369 validate_assert(state, src_bit_size == 16 || src_bit_size == 32 ||
370 src_bit_size == 64);
371 }
372
373 validate_alu_src(instr, i, state);
374 }
375
376 nir_alu_type dest_type = nir_op_infos[instr->op].output_type;
377 unsigned dest_bit_size = nir_dest_bit_size(instr->dest.dest);
378 if (nir_alu_type_get_type_size(dest_type)) {
379 validate_assert(state, dest_bit_size == nir_alu_type_get_type_size(dest_type));
380 } else if (instr_bit_size) {
381 validate_assert(state, dest_bit_size == instr_bit_size);
382 } else {
383 /* The only unsized thing is the destination so it's vacuously valid */
384 }
385
386 if (nir_alu_type_get_base_type(dest_type) == nir_type_float) {
387 /* 8-bit float isn't a thing */
388 validate_assert(state, dest_bit_size == 16 || dest_bit_size == 32 ||
389 dest_bit_size == 64);
390 }
391
392 validate_alu_dest(instr, state);
393 }
394
395 static void
396 validate_deref_chain(nir_deref *deref, nir_variable_mode mode,
397 validate_state *state)
398 {
399 validate_assert(state, deref->child == NULL || ralloc_parent(deref->child) == deref);
400
401 nir_deref *parent = NULL;
402 while (deref != NULL) {
403 switch (deref->deref_type) {
404 case nir_deref_type_array:
405 if (mode == nir_var_shared) {
406 /* Shared variables have a bit more relaxed rules because we need
407 * to be able to handle array derefs on vectors. Fortunately,
408 * nir_lower_io handles these just fine.
409 */
410 validate_assert(state, glsl_type_is_array(parent->type) ||
411 glsl_type_is_matrix(parent->type) ||
412 glsl_type_is_vector(parent->type));
413 } else {
414 /* Most of NIR cannot handle array derefs on vectors */
415 validate_assert(state, glsl_type_is_array(parent->type) ||
416 glsl_type_is_matrix(parent->type));
417 }
418 validate_assert(state, deref->type == glsl_get_array_element(parent->type));
419 if (nir_deref_as_array(deref)->deref_array_type ==
420 nir_deref_array_type_indirect)
421 validate_src(&nir_deref_as_array(deref)->indirect, state, 32, 1);
422 break;
423
424 case nir_deref_type_struct:
425 assume(parent); /* cannot happen: deref change starts w/ nir_deref_var */
426 validate_assert(state, deref->type ==
427 glsl_get_struct_field(parent->type,
428 nir_deref_as_struct(deref)->index));
429 break;
430
431 case nir_deref_type_var:
432 break;
433
434 default:
435 validate_assert(state, !"Invalid deref type");
436 break;
437 }
438
439 parent = deref;
440 deref = deref->child;
441 }
442 }
443
444 static void
445 validate_var_use(nir_variable *var, validate_state *state)
446 {
447 struct hash_entry *entry = _mesa_hash_table_search(state->var_defs, var);
448 validate_assert(state, entry);
449 if (var->data.mode == nir_var_local)
450 validate_assert(state, (nir_function_impl *) entry->data == state->impl);
451 }
452
453 static void
454 validate_deref_var(void *parent_mem_ctx, nir_deref_var *deref, validate_state *state)
455 {
456 validate_assert(state, deref != NULL);
457 validate_assert(state, ralloc_parent(deref) == parent_mem_ctx);
458 validate_assert(state, deref->deref.type == deref->var->type);
459
460 validate_var_use(deref->var, state);
461
462 validate_deref_chain(&deref->deref, deref->var->data.mode, state);
463 }
464
465 static void
466 validate_deref_instr(nir_deref_instr *instr, validate_state *state)
467 {
468 if (instr->deref_type == nir_deref_type_var) {
469 /* Variable dereferences are stupid simple. */
470 validate_assert(state, instr->mode == instr->var->data.mode);
471 validate_assert(state, instr->type == instr->var->type);
472 validate_var_use(instr->var, state);
473 } else if (instr->deref_type == nir_deref_type_cast) {
474 /* For cast, we simply have to trust the instruction. It's up to
475 * lowering passes and front/back-ends to make them sane.
476 */
477 validate_src(&instr->parent, state, 0, 0);
478
479 /* We just validate that the type and mode are there */
480 validate_assert(state, instr->mode);
481 validate_assert(state, instr->type);
482 } else {
483 /* We require the parent to be SSA. This may be lifted in the future */
484 validate_assert(state, instr->parent.is_ssa);
485
486 /* The parent pointer value must have the same number of components
487 * as the destination.
488 */
489 validate_src(&instr->parent, state, nir_dest_bit_size(instr->dest),
490 nir_dest_num_components(instr->dest));
491
492 nir_instr *parent_instr = instr->parent.ssa->parent_instr;
493
494 /* The parent must come from another deref instruction */
495 validate_assert(state, parent_instr->type == nir_instr_type_deref);
496
497 nir_deref_instr *parent = nir_instr_as_deref(parent_instr);
498
499 validate_assert(state, instr->mode == parent->mode);
500
501 switch (instr->deref_type) {
502 case nir_deref_type_struct:
503 validate_assert(state, glsl_type_is_struct(parent->type));
504 validate_assert(state,
505 instr->strct.index < glsl_get_length(parent->type));
506 validate_assert(state, instr->type ==
507 glsl_get_struct_field(parent->type, instr->strct.index));
508 break;
509
510 case nir_deref_type_array:
511 case nir_deref_type_array_wildcard:
512 if (instr->mode == nir_var_shared) {
513 /* Shared variables have a bit more relaxed rules because we need
514 * to be able to handle array derefs on vectors. Fortunately,
515 * nir_lower_io handles these just fine.
516 */
517 validate_assert(state, glsl_type_is_array(parent->type) ||
518 glsl_type_is_matrix(parent->type) ||
519 glsl_type_is_vector(parent->type));
520 } else {
521 /* Most of NIR cannot handle array derefs on vectors */
522 validate_assert(state, glsl_type_is_array(parent->type) ||
523 glsl_type_is_matrix(parent->type));
524 }
525 validate_assert(state,
526 instr->type == glsl_get_array_element(parent->type));
527
528 if (instr->deref_type == nir_deref_type_array)
529 validate_src(&instr->arr.index, state, 32, 1);
530 break;
531
532 default:
533 unreachable("Invalid deref instruction type");
534 }
535 }
536
537 /* We intentionally don't validate the size of the destination because we
538 * want to let other compiler components such as SPIR-V decide how big
539 * pointers should be.
540 */
541 validate_dest(&instr->dest, state, 0, 0);
542 }
543
544 static void
545 validate_intrinsic_instr(nir_intrinsic_instr *instr, validate_state *state)
546 {
547 unsigned dest_bit_size = 0;
548 unsigned src_bit_sizes[NIR_INTRINSIC_MAX_INPUTS] = { 0, };
549 switch (instr->intrinsic) {
550 case nir_intrinsic_load_var: {
551 const struct glsl_type *type =
552 nir_deref_tail(&instr->variables[0]->deref)->type;
553 validate_assert(state, glsl_type_is_vector_or_scalar(type) ||
554 (instr->variables[0]->var->data.mode == nir_var_uniform &&
555 glsl_get_base_type(type) == GLSL_TYPE_SUBROUTINE));
556 validate_assert(state, instr->num_components ==
557 glsl_get_vector_elements(type));
558 dest_bit_size = glsl_get_bit_size(type);
559 break;
560 }
561
562 case nir_intrinsic_store_var: {
563 const struct glsl_type *type =
564 nir_deref_tail(&instr->variables[0]->deref)->type;
565 validate_assert(state, glsl_type_is_vector_or_scalar(type) ||
566 (instr->variables[0]->var->data.mode == nir_var_uniform &&
567 glsl_get_base_type(type) == GLSL_TYPE_SUBROUTINE));
568 validate_assert(state, instr->num_components == glsl_get_vector_elements(type));
569 src_bit_sizes[0] = glsl_get_bit_size(type);
570 validate_assert(state, instr->variables[0]->var->data.mode != nir_var_shader_in &&
571 instr->variables[0]->var->data.mode != nir_var_uniform &&
572 instr->variables[0]->var->data.mode != nir_var_shader_storage);
573 validate_assert(state, (nir_intrinsic_write_mask(instr) & ~((1 << instr->num_components) - 1)) == 0);
574 break;
575 }
576
577 case nir_intrinsic_copy_var:
578 validate_assert(state, nir_deref_tail(&instr->variables[0]->deref)->type ==
579 nir_deref_tail(&instr->variables[1]->deref)->type);
580 validate_assert(state, instr->variables[0]->var->data.mode != nir_var_shader_in &&
581 instr->variables[0]->var->data.mode != nir_var_uniform &&
582 instr->variables[0]->var->data.mode != nir_var_shader_storage);
583 break;
584
585 default:
586 break;
587 }
588
589 unsigned num_srcs = nir_intrinsic_infos[instr->intrinsic].num_srcs;
590 for (unsigned i = 0; i < num_srcs; i++) {
591 unsigned components_read = nir_intrinsic_src_components(instr, i);
592
593 validate_assert(state, components_read > 0);
594
595 validate_src(&instr->src[i], state, src_bit_sizes[i], components_read);
596 }
597
598 unsigned num_vars = nir_intrinsic_infos[instr->intrinsic].num_variables;
599 for (unsigned i = 0; i < num_vars; i++) {
600 validate_deref_var(instr, instr->variables[i], state);
601 }
602
603 if (nir_intrinsic_infos[instr->intrinsic].has_dest) {
604 unsigned components_written = nir_intrinsic_dest_components(instr);
605
606 validate_assert(state, components_written > 0);
607
608 validate_dest(&instr->dest, state, dest_bit_size, components_written);
609 }
610 }
611
612 static void
613 validate_tex_instr(nir_tex_instr *instr, validate_state *state)
614 {
615 bool src_type_seen[nir_num_tex_src_types];
616 for (unsigned i = 0; i < nir_num_tex_src_types; i++)
617 src_type_seen[i] = false;
618
619 for (unsigned i = 0; i < instr->num_srcs; i++) {
620 validate_assert(state, !src_type_seen[instr->src[i].src_type]);
621 src_type_seen[instr->src[i].src_type] = true;
622 validate_src(&instr->src[i].src, state,
623 0, nir_tex_instr_src_size(instr, i));
624 }
625
626 if (instr->texture != NULL)
627 validate_deref_var(instr, instr->texture, state);
628
629 if (instr->sampler != NULL)
630 validate_deref_var(instr, instr->sampler, state);
631
632 validate_dest(&instr->dest, state, 0, nir_tex_instr_dest_size(instr));
633 }
634
635 static void
636 validate_call_instr(nir_call_instr *instr, validate_state *state)
637 {
638 if (instr->return_deref == NULL) {
639 validate_assert(state, glsl_type_is_void(instr->callee->return_type));
640 } else {
641 validate_assert(state, instr->return_deref->deref.type == instr->callee->return_type);
642 validate_deref_var(instr, instr->return_deref, state);
643 }
644
645 validate_assert(state, instr->num_params == instr->callee->num_params);
646
647 for (unsigned i = 0; i < instr->num_params; i++) {
648 validate_assert(state, instr->callee->params[i].type == instr->params[i]->deref.type);
649 validate_deref_var(instr, instr->params[i], state);
650 }
651 }
652
653 static void
654 validate_load_const_instr(nir_load_const_instr *instr, validate_state *state)
655 {
656 validate_ssa_def(&instr->def, state);
657 }
658
659 static void
660 validate_ssa_undef_instr(nir_ssa_undef_instr *instr, validate_state *state)
661 {
662 validate_ssa_def(&instr->def, state);
663 }
664
665 static void
666 validate_phi_instr(nir_phi_instr *instr, validate_state *state)
667 {
668 /*
669 * don't validate the sources until we get to them from their predecessor
670 * basic blocks, to avoid validating an SSA use before its definition.
671 */
672
673 validate_dest(&instr->dest, state, 0, 0);
674
675 exec_list_validate(&instr->srcs);
676 validate_assert(state, exec_list_length(&instr->srcs) ==
677 state->block->predecessors->entries);
678 }
679
680 static void
681 validate_instr(nir_instr *instr, validate_state *state)
682 {
683 validate_assert(state, instr->block == state->block);
684
685 state->instr = instr;
686
687 switch (instr->type) {
688 case nir_instr_type_alu:
689 validate_alu_instr(nir_instr_as_alu(instr), state);
690 break;
691
692 case nir_instr_type_deref:
693 validate_deref_instr(nir_instr_as_deref(instr), state);
694 break;
695
696 case nir_instr_type_call:
697 validate_call_instr(nir_instr_as_call(instr), state);
698 break;
699
700 case nir_instr_type_intrinsic:
701 validate_intrinsic_instr(nir_instr_as_intrinsic(instr), state);
702 break;
703
704 case nir_instr_type_tex:
705 validate_tex_instr(nir_instr_as_tex(instr), state);
706 break;
707
708 case nir_instr_type_load_const:
709 validate_load_const_instr(nir_instr_as_load_const(instr), state);
710 break;
711
712 case nir_instr_type_phi:
713 validate_phi_instr(nir_instr_as_phi(instr), state);
714 break;
715
716 case nir_instr_type_ssa_undef:
717 validate_ssa_undef_instr(nir_instr_as_ssa_undef(instr), state);
718 break;
719
720 case nir_instr_type_jump:
721 break;
722
723 default:
724 validate_assert(state, !"Invalid ALU instruction type");
725 break;
726 }
727
728 state->instr = NULL;
729 }
730
731 static void
732 validate_phi_src(nir_phi_instr *instr, nir_block *pred, validate_state *state)
733 {
734 state->instr = &instr->instr;
735
736 validate_assert(state, instr->dest.is_ssa);
737
738 exec_list_validate(&instr->srcs);
739 nir_foreach_phi_src(src, instr) {
740 if (src->pred == pred) {
741 validate_assert(state, src->src.is_ssa);
742 validate_src(&src->src, state, instr->dest.ssa.bit_size,
743 instr->dest.ssa.num_components);
744 state->instr = NULL;
745 return;
746 }
747 }
748
749 abort();
750 }
751
752 static void
753 validate_phi_srcs(nir_block *block, nir_block *succ, validate_state *state)
754 {
755 nir_foreach_instr(instr, succ) {
756 if (instr->type != nir_instr_type_phi)
757 break;
758
759 validate_phi_src(nir_instr_as_phi(instr), block, state);
760 }
761 }
762
763 static void validate_cf_node(nir_cf_node *node, validate_state *state);
764
765 static void
766 validate_block(nir_block *block, validate_state *state)
767 {
768 validate_assert(state, block->cf_node.parent == state->parent_node);
769
770 state->block = block;
771
772 exec_list_validate(&block->instr_list);
773 nir_foreach_instr(instr, block) {
774 if (instr->type == nir_instr_type_phi) {
775 validate_assert(state, instr == nir_block_first_instr(block) ||
776 nir_instr_prev(instr)->type == nir_instr_type_phi);
777 }
778
779 if (instr->type == nir_instr_type_jump) {
780 validate_assert(state, instr == nir_block_last_instr(block));
781 }
782
783 validate_instr(instr, state);
784 }
785
786 validate_assert(state, block->successors[0] != NULL);
787 validate_assert(state, block->successors[0] != block->successors[1]);
788
789 for (unsigned i = 0; i < 2; i++) {
790 if (block->successors[i] != NULL) {
791 struct set_entry *entry =
792 _mesa_set_search(block->successors[i]->predecessors, block);
793 validate_assert(state, entry);
794
795 validate_phi_srcs(block, block->successors[i], state);
796 }
797 }
798
799 struct set_entry *entry;
800 set_foreach(block->predecessors, entry) {
801 const nir_block *pred = entry->key;
802 validate_assert(state, pred->successors[0] == block ||
803 pred->successors[1] == block);
804 }
805
806 if (!exec_list_is_empty(&block->instr_list) &&
807 nir_block_last_instr(block)->type == nir_instr_type_jump) {
808 validate_assert(state, block->successors[1] == NULL);
809 nir_jump_instr *jump = nir_instr_as_jump(nir_block_last_instr(block));
810 switch (jump->type) {
811 case nir_jump_break: {
812 nir_block *after =
813 nir_cf_node_as_block(nir_cf_node_next(&state->loop->cf_node));
814 validate_assert(state, block->successors[0] == after);
815 break;
816 }
817
818 case nir_jump_continue: {
819 nir_block *first = nir_loop_first_block(state->loop);
820 validate_assert(state, block->successors[0] == first);
821 break;
822 }
823
824 case nir_jump_return:
825 validate_assert(state, block->successors[0] == state->impl->end_block);
826 break;
827
828 default:
829 unreachable("bad jump type");
830 }
831 } else {
832 nir_cf_node *next = nir_cf_node_next(&block->cf_node);
833 if (next == NULL) {
834 switch (state->parent_node->type) {
835 case nir_cf_node_loop: {
836 nir_block *first = nir_loop_first_block(state->loop);
837 validate_assert(state, block->successors[0] == first);
838 /* due to the hack for infinite loops, block->successors[1] may
839 * point to the block after the loop.
840 */
841 break;
842 }
843
844 case nir_cf_node_if: {
845 nir_block *after =
846 nir_cf_node_as_block(nir_cf_node_next(state->parent_node));
847 validate_assert(state, block->successors[0] == after);
848 validate_assert(state, block->successors[1] == NULL);
849 break;
850 }
851
852 case nir_cf_node_function:
853 validate_assert(state, block->successors[0] == state->impl->end_block);
854 validate_assert(state, block->successors[1] == NULL);
855 break;
856
857 default:
858 unreachable("unknown control flow node type");
859 }
860 } else {
861 if (next->type == nir_cf_node_if) {
862 nir_if *if_stmt = nir_cf_node_as_if(next);
863 validate_assert(state, block->successors[0] ==
864 nir_if_first_then_block(if_stmt));
865 validate_assert(state, block->successors[1] ==
866 nir_if_first_else_block(if_stmt));
867 } else {
868 validate_assert(state, next->type == nir_cf_node_loop);
869 nir_loop *loop = nir_cf_node_as_loop(next);
870 validate_assert(state, block->successors[0] ==
871 nir_loop_first_block(loop));
872 validate_assert(state, block->successors[1] == NULL);
873 }
874 }
875 }
876 }
877
878 static void
879 validate_if(nir_if *if_stmt, validate_state *state)
880 {
881 state->if_stmt = if_stmt;
882
883 validate_assert(state, !exec_node_is_head_sentinel(if_stmt->cf_node.node.prev));
884 nir_cf_node *prev_node = nir_cf_node_prev(&if_stmt->cf_node);
885 validate_assert(state, prev_node->type == nir_cf_node_block);
886
887 validate_assert(state, !exec_node_is_tail_sentinel(if_stmt->cf_node.node.next));
888 nir_cf_node *next_node = nir_cf_node_next(&if_stmt->cf_node);
889 validate_assert(state, next_node->type == nir_cf_node_block);
890
891 validate_src(&if_stmt->condition, state, 32, 1);
892
893 validate_assert(state, !exec_list_is_empty(&if_stmt->then_list));
894 validate_assert(state, !exec_list_is_empty(&if_stmt->else_list));
895
896 nir_cf_node *old_parent = state->parent_node;
897 state->parent_node = &if_stmt->cf_node;
898
899 exec_list_validate(&if_stmt->then_list);
900 foreach_list_typed(nir_cf_node, cf_node, node, &if_stmt->then_list) {
901 validate_cf_node(cf_node, state);
902 }
903
904 exec_list_validate(&if_stmt->else_list);
905 foreach_list_typed(nir_cf_node, cf_node, node, &if_stmt->else_list) {
906 validate_cf_node(cf_node, state);
907 }
908
909 state->parent_node = old_parent;
910 state->if_stmt = NULL;
911 }
912
913 static void
914 validate_loop(nir_loop *loop, validate_state *state)
915 {
916 validate_assert(state, !exec_node_is_head_sentinel(loop->cf_node.node.prev));
917 nir_cf_node *prev_node = nir_cf_node_prev(&loop->cf_node);
918 validate_assert(state, prev_node->type == nir_cf_node_block);
919
920 validate_assert(state, !exec_node_is_tail_sentinel(loop->cf_node.node.next));
921 nir_cf_node *next_node = nir_cf_node_next(&loop->cf_node);
922 validate_assert(state, next_node->type == nir_cf_node_block);
923
924 validate_assert(state, !exec_list_is_empty(&loop->body));
925
926 nir_cf_node *old_parent = state->parent_node;
927 state->parent_node = &loop->cf_node;
928 nir_loop *old_loop = state->loop;
929 state->loop = loop;
930
931 exec_list_validate(&loop->body);
932 foreach_list_typed(nir_cf_node, cf_node, node, &loop->body) {
933 validate_cf_node(cf_node, state);
934 }
935
936 state->parent_node = old_parent;
937 state->loop = old_loop;
938 }
939
940 static void
941 validate_cf_node(nir_cf_node *node, validate_state *state)
942 {
943 validate_assert(state, node->parent == state->parent_node);
944
945 switch (node->type) {
946 case nir_cf_node_block:
947 validate_block(nir_cf_node_as_block(node), state);
948 break;
949
950 case nir_cf_node_if:
951 validate_if(nir_cf_node_as_if(node), state);
952 break;
953
954 case nir_cf_node_loop:
955 validate_loop(nir_cf_node_as_loop(node), state);
956 break;
957
958 default:
959 unreachable("Invalid CF node type");
960 }
961 }
962
963 static void
964 prevalidate_reg_decl(nir_register *reg, bool is_global, validate_state *state)
965 {
966 validate_assert(state, reg->is_global == is_global);
967
968 if (is_global)
969 validate_assert(state, reg->index < state->shader->reg_alloc);
970 else
971 validate_assert(state, reg->index < state->impl->reg_alloc);
972 validate_assert(state, !BITSET_TEST(state->regs_found, reg->index));
973 BITSET_SET(state->regs_found, reg->index);
974
975 list_validate(&reg->uses);
976 list_validate(&reg->defs);
977 list_validate(&reg->if_uses);
978
979 reg_validate_state *reg_state = ralloc(state->regs, reg_validate_state);
980 reg_state->uses = _mesa_set_create(reg_state, _mesa_hash_pointer,
981 _mesa_key_pointer_equal);
982 reg_state->if_uses = _mesa_set_create(reg_state, _mesa_hash_pointer,
983 _mesa_key_pointer_equal);
984 reg_state->defs = _mesa_set_create(reg_state, _mesa_hash_pointer,
985 _mesa_key_pointer_equal);
986
987 reg_state->where_defined = is_global ? NULL : state->impl;
988
989 _mesa_hash_table_insert(state->regs, reg, reg_state);
990 }
991
992 static void
993 postvalidate_reg_decl(nir_register *reg, validate_state *state)
994 {
995 struct hash_entry *entry = _mesa_hash_table_search(state->regs, reg);
996
997 assume(entry);
998 reg_validate_state *reg_state = (reg_validate_state *) entry->data;
999
1000 nir_foreach_use(src, reg) {
1001 struct set_entry *entry = _mesa_set_search(reg_state->uses, src);
1002 validate_assert(state, entry);
1003 _mesa_set_remove(reg_state->uses, entry);
1004 }
1005
1006 if (reg_state->uses->entries != 0) {
1007 printf("extra entries in register uses:\n");
1008 struct set_entry *entry;
1009 set_foreach(reg_state->uses, entry)
1010 printf("%p\n", entry->key);
1011
1012 abort();
1013 }
1014
1015 nir_foreach_if_use(src, reg) {
1016 struct set_entry *entry = _mesa_set_search(reg_state->if_uses, src);
1017 validate_assert(state, entry);
1018 _mesa_set_remove(reg_state->if_uses, entry);
1019 }
1020
1021 if (reg_state->if_uses->entries != 0) {
1022 printf("extra entries in register if_uses:\n");
1023 struct set_entry *entry;
1024 set_foreach(reg_state->if_uses, entry)
1025 printf("%p\n", entry->key);
1026
1027 abort();
1028 }
1029
1030 nir_foreach_def(src, reg) {
1031 struct set_entry *entry = _mesa_set_search(reg_state->defs, src);
1032 validate_assert(state, entry);
1033 _mesa_set_remove(reg_state->defs, entry);
1034 }
1035
1036 if (reg_state->defs->entries != 0) {
1037 printf("extra entries in register defs:\n");
1038 struct set_entry *entry;
1039 set_foreach(reg_state->defs, entry)
1040 printf("%p\n", entry->key);
1041
1042 abort();
1043 }
1044 }
1045
1046 static void
1047 validate_var_decl(nir_variable *var, bool is_global, validate_state *state)
1048 {
1049 state->var = var;
1050
1051 validate_assert(state, is_global == nir_variable_is_global(var));
1052
1053 /* Must have exactly one mode set */
1054 validate_assert(state, util_is_power_of_two_nonzero(var->data.mode));
1055
1056 if (var->data.compact) {
1057 /* The "compact" flag is only valid on arrays of scalars. */
1058 assert(glsl_type_is_array(var->type));
1059
1060 const struct glsl_type *type = glsl_get_array_element(var->type);
1061 if (nir_is_per_vertex_io(var, state->shader->info.stage)) {
1062 assert(glsl_type_is_array(type));
1063 assert(glsl_type_is_scalar(glsl_get_array_element(type)));
1064 } else {
1065 assert(glsl_type_is_scalar(type));
1066 }
1067 }
1068
1069 /*
1070 * TODO validate some things ir_validate.cpp does (requires more GLSL type
1071 * support)
1072 */
1073
1074 _mesa_hash_table_insert(state->var_defs, var,
1075 is_global ? NULL : state->impl);
1076
1077 state->var = NULL;
1078 }
1079
1080 static bool
1081 postvalidate_ssa_def(nir_ssa_def *def, void *void_state)
1082 {
1083 validate_state *state = void_state;
1084
1085 struct hash_entry *entry = _mesa_hash_table_search(state->ssa_defs, def);
1086
1087 assume(entry);
1088 ssa_def_validate_state *def_state = (ssa_def_validate_state *)entry->data;
1089
1090 nir_foreach_use(src, def) {
1091 struct set_entry *entry = _mesa_set_search(def_state->uses, src);
1092 validate_assert(state, entry);
1093 _mesa_set_remove(def_state->uses, entry);
1094 }
1095
1096 if (def_state->uses->entries != 0) {
1097 printf("extra entries in SSA def uses:\n");
1098 struct set_entry *entry;
1099 set_foreach(def_state->uses, entry)
1100 printf("%p\n", entry->key);
1101
1102 abort();
1103 }
1104
1105 nir_foreach_if_use(src, def) {
1106 struct set_entry *entry = _mesa_set_search(def_state->if_uses, src);
1107 validate_assert(state, entry);
1108 _mesa_set_remove(def_state->if_uses, entry);
1109 }
1110
1111 if (def_state->if_uses->entries != 0) {
1112 printf("extra entries in SSA def uses:\n");
1113 struct set_entry *entry;
1114 set_foreach(def_state->if_uses, entry)
1115 printf("%p\n", entry->key);
1116
1117 abort();
1118 }
1119
1120 return true;
1121 }
1122
1123 static void
1124 validate_function_impl(nir_function_impl *impl, validate_state *state)
1125 {
1126 validate_assert(state, impl->function->impl == impl);
1127 validate_assert(state, impl->cf_node.parent == NULL);
1128
1129 validate_assert(state, impl->num_params == impl->function->num_params);
1130 for (unsigned i = 0; i < impl->num_params; i++) {
1131 validate_assert(state, impl->params[i]->type == impl->function->params[i].type);
1132 validate_assert(state, impl->params[i]->data.mode == nir_var_param);
1133 validate_assert(state, impl->params[i]->data.location == i);
1134 validate_var_decl(impl->params[i], false, state);
1135 }
1136
1137 if (glsl_type_is_void(impl->function->return_type)) {
1138 validate_assert(state, impl->return_var == NULL);
1139 } else {
1140 validate_assert(state, impl->return_var->type == impl->function->return_type);
1141 validate_assert(state, impl->return_var->data.mode == nir_var_param);
1142 validate_assert(state, impl->return_var->data.location == -1);
1143 validate_var_decl(impl->return_var, false, state);
1144 }
1145
1146 validate_assert(state, exec_list_is_empty(&impl->end_block->instr_list));
1147 validate_assert(state, impl->end_block->successors[0] == NULL);
1148 validate_assert(state, impl->end_block->successors[1] == NULL);
1149
1150 state->impl = impl;
1151 state->parent_node = &impl->cf_node;
1152
1153 exec_list_validate(&impl->locals);
1154 nir_foreach_variable(var, &impl->locals) {
1155 validate_var_decl(var, false, state);
1156 }
1157
1158 state->regs_found = realloc(state->regs_found,
1159 BITSET_WORDS(impl->reg_alloc) *
1160 sizeof(BITSET_WORD));
1161 memset(state->regs_found, 0, BITSET_WORDS(impl->reg_alloc) *
1162 sizeof(BITSET_WORD));
1163 exec_list_validate(&impl->registers);
1164 foreach_list_typed(nir_register, reg, node, &impl->registers) {
1165 prevalidate_reg_decl(reg, false, state);
1166 }
1167
1168 state->ssa_defs_found = realloc(state->ssa_defs_found,
1169 BITSET_WORDS(impl->ssa_alloc) *
1170 sizeof(BITSET_WORD));
1171 memset(state->ssa_defs_found, 0, BITSET_WORDS(impl->ssa_alloc) *
1172 sizeof(BITSET_WORD));
1173 exec_list_validate(&impl->body);
1174 foreach_list_typed(nir_cf_node, node, node, &impl->body) {
1175 validate_cf_node(node, state);
1176 }
1177
1178 foreach_list_typed(nir_register, reg, node, &impl->registers) {
1179 postvalidate_reg_decl(reg, state);
1180 }
1181
1182 nir_foreach_block(block, impl) {
1183 nir_foreach_instr(instr, block)
1184 nir_foreach_ssa_def(instr, postvalidate_ssa_def, state);
1185 }
1186 }
1187
1188 static void
1189 validate_function(nir_function *func, validate_state *state)
1190 {
1191 if (func->impl != NULL) {
1192 validate_assert(state, func->impl->function == func);
1193 validate_function_impl(func->impl, state);
1194 }
1195 }
1196
1197 static void
1198 init_validate_state(validate_state *state)
1199 {
1200 state->regs = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
1201 _mesa_key_pointer_equal);
1202 state->ssa_defs = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
1203 _mesa_key_pointer_equal);
1204 state->ssa_defs_found = NULL;
1205 state->regs_found = NULL;
1206 state->var_defs = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
1207 _mesa_key_pointer_equal);
1208 state->errors = _mesa_hash_table_create(NULL, _mesa_hash_pointer,
1209 _mesa_key_pointer_equal);
1210
1211 state->loop = NULL;
1212 state->instr = NULL;
1213 state->var = NULL;
1214 }
1215
1216 static void
1217 destroy_validate_state(validate_state *state)
1218 {
1219 _mesa_hash_table_destroy(state->regs, NULL);
1220 _mesa_hash_table_destroy(state->ssa_defs, NULL);
1221 free(state->ssa_defs_found);
1222 free(state->regs_found);
1223 _mesa_hash_table_destroy(state->var_defs, NULL);
1224 _mesa_hash_table_destroy(state->errors, NULL);
1225 }
1226
1227 static void
1228 dump_errors(validate_state *state)
1229 {
1230 struct hash_table *errors = state->errors;
1231
1232 fprintf(stderr, "%d errors:\n", _mesa_hash_table_num_entries(errors));
1233
1234 nir_print_shader_annotated(state->shader, stderr, errors);
1235
1236 if (_mesa_hash_table_num_entries(errors) > 0) {
1237 fprintf(stderr, "%d additional errors:\n",
1238 _mesa_hash_table_num_entries(errors));
1239 struct hash_entry *entry;
1240 hash_table_foreach(errors, entry) {
1241 fprintf(stderr, "%s\n", (char *)entry->data);
1242 }
1243 }
1244
1245 abort();
1246 }
1247
1248 void
1249 nir_validate_shader(nir_shader *shader)
1250 {
1251 static int should_validate = -1;
1252 if (should_validate < 0)
1253 should_validate = env_var_as_boolean("NIR_VALIDATE", true);
1254 if (!should_validate)
1255 return;
1256
1257 validate_state state;
1258 init_validate_state(&state);
1259
1260 state.shader = shader;
1261
1262 exec_list_validate(&shader->uniforms);
1263 nir_foreach_variable(var, &shader->uniforms) {
1264 validate_var_decl(var, true, &state);
1265 }
1266
1267 exec_list_validate(&shader->inputs);
1268 nir_foreach_variable(var, &shader->inputs) {
1269 validate_var_decl(var, true, &state);
1270 }
1271
1272 exec_list_validate(&shader->outputs);
1273 nir_foreach_variable(var, &shader->outputs) {
1274 validate_var_decl(var, true, &state);
1275 }
1276
1277 exec_list_validate(&shader->shared);
1278 nir_foreach_variable(var, &shader->shared) {
1279 validate_var_decl(var, true, &state);
1280 }
1281
1282 exec_list_validate(&shader->globals);
1283 nir_foreach_variable(var, &shader->globals) {
1284 validate_var_decl(var, true, &state);
1285 }
1286
1287 exec_list_validate(&shader->system_values);
1288 nir_foreach_variable(var, &shader->system_values) {
1289 validate_var_decl(var, true, &state);
1290 }
1291
1292 state.regs_found = realloc(state.regs_found,
1293 BITSET_WORDS(shader->reg_alloc) *
1294 sizeof(BITSET_WORD));
1295 memset(state.regs_found, 0, BITSET_WORDS(shader->reg_alloc) *
1296 sizeof(BITSET_WORD));
1297 exec_list_validate(&shader->registers);
1298 foreach_list_typed(nir_register, reg, node, &shader->registers) {
1299 prevalidate_reg_decl(reg, true, &state);
1300 }
1301
1302 exec_list_validate(&shader->functions);
1303 foreach_list_typed(nir_function, func, node, &shader->functions) {
1304 validate_function(func, &state);
1305 }
1306
1307 foreach_list_typed(nir_register, reg, node, &shader->registers) {
1308 postvalidate_reg_decl(reg, &state);
1309 }
1310
1311 if (_mesa_hash_table_num_entries(state.errors) > 0)
1312 dump_errors(&state);
1313
1314 destroy_validate_state(&state);
1315 }
1316
1317 #endif /* NDEBUG */