2 * Copyright © 2018 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
24 #include <gtest/gtest.h>
27 #include "nir_builder.h"
31 class nir_vars_test
: public ::testing::Test
{
36 nir_variable
*create_var(nir_variable_mode mode
, const glsl_type
*type
,
38 if (mode
== nir_var_function_temp
)
39 return nir_local_variable_create(b
->impl
, type
, name
);
41 return nir_variable_create(b
->shader
, mode
, type
, name
);
44 nir_variable
*create_int(nir_variable_mode mode
, const char *name
) {
45 return create_var(mode
, glsl_int_type(), name
);
48 nir_variable
*create_ivec2(nir_variable_mode mode
, const char *name
) {
49 return create_var(mode
, glsl_vector_type(GLSL_TYPE_INT
, 2), name
);
52 nir_variable
**create_many_int(nir_variable_mode mode
, const char *prefix
, unsigned count
) {
53 nir_variable
**result
= (nir_variable
**)linear_alloc_child(lin_ctx
, sizeof(nir_variable
*) * count
);
54 for (unsigned i
= 0; i
< count
; i
++)
55 result
[i
] = create_int(mode
, linear_asprintf(lin_ctx
, "%s%u", prefix
, i
));
59 nir_variable
**create_many_ivec2(nir_variable_mode mode
, const char *prefix
, unsigned count
) {
60 nir_variable
**result
= (nir_variable
**)linear_alloc_child(lin_ctx
, sizeof(nir_variable
*) * count
);
61 for (unsigned i
= 0; i
< count
; i
++)
62 result
[i
] = create_ivec2(mode
, linear_asprintf(lin_ctx
, "%s%u", prefix
, i
));
66 unsigned count_intrinsics(nir_intrinsic_op intrinsic
);
68 nir_intrinsic_instr
*get_intrinsic(nir_intrinsic_op intrinsic
,
77 nir_vars_test::nir_vars_test()
79 mem_ctx
= ralloc_context(NULL
);
80 lin_ctx
= linear_alloc_parent(mem_ctx
, 0);
81 static const nir_shader_compiler_options options
= { };
82 b
= rzalloc(mem_ctx
, nir_builder
);
83 nir_builder_init_simple_shader(b
, mem_ctx
, MESA_SHADER_FRAGMENT
, &options
);
86 nir_vars_test::~nir_vars_test()
89 printf("\nShader from the failed test:\n\n");
90 nir_print_shader(b
->shader
, stdout
);
97 nir_vars_test::count_intrinsics(nir_intrinsic_op intrinsic
)
100 nir_foreach_block(block
, b
->impl
) {
101 nir_foreach_instr(instr
, block
) {
102 if (instr
->type
!= nir_instr_type_intrinsic
)
104 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
105 if (intrin
->intrinsic
== intrinsic
)
112 nir_intrinsic_instr
*
113 nir_vars_test::get_intrinsic(nir_intrinsic_op intrinsic
,
116 nir_foreach_block(block
, b
->impl
) {
117 nir_foreach_instr(instr
, block
) {
118 if (instr
->type
!= nir_instr_type_intrinsic
)
120 nir_intrinsic_instr
*intrin
= nir_instr_as_intrinsic(instr
);
121 if (intrin
->intrinsic
== intrinsic
) {
131 /* Allow grouping the tests while still sharing the helpers. */
132 class nir_redundant_load_vars_test
: public nir_vars_test
{};
133 class nir_copy_prop_vars_test
: public nir_vars_test
{};
134 class nir_dead_write_vars_test
: public nir_vars_test
{};
138 TEST_F(nir_redundant_load_vars_test
, duplicated_load
)
140 /* Load a variable twice in the same block. One should be removed. */
142 nir_variable
*in
= create_int(nir_var_shader_in
, "in");
143 nir_variable
**out
= create_many_int(nir_var_shader_out
, "out", 2);
145 nir_store_var(b
, out
[0], nir_load_var(b
, in
), 1);
146 nir_store_var(b
, out
[1], nir_load_var(b
, in
), 1);
148 nir_validate_shader(b
->shader
, NULL
);
150 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
152 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
153 EXPECT_TRUE(progress
);
155 nir_validate_shader(b
->shader
, NULL
);
157 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 1);
160 TEST_F(nir_redundant_load_vars_test
, duplicated_load_in_two_blocks
)
162 /* Load a variable twice in different blocks. One should be removed. */
164 nir_variable
*in
= create_int(nir_var_shader_in
, "in");
165 nir_variable
**out
= create_many_int(nir_var_shader_out
, "out", 2);
167 nir_store_var(b
, out
[0], nir_load_var(b
, in
), 1);
169 /* Forces the stores to be in different blocks. */
170 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
172 nir_store_var(b
, out
[1], nir_load_var(b
, in
), 1);
174 nir_validate_shader(b
->shader
, NULL
);
176 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
178 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
179 EXPECT_TRUE(progress
);
181 nir_validate_shader(b
->shader
, NULL
);
183 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 1);
186 TEST_F(nir_redundant_load_vars_test
, invalidate_inside_if_block
)
188 /* Load variables, then write to some of then in different branches of the
189 * if statement. They should be invalidated accordingly.
192 nir_variable
**g
= create_many_int(nir_var_shader_temp
, "g", 3);
193 nir_variable
**out
= create_many_int(nir_var_shader_out
, "out", 3);
195 nir_load_var(b
, g
[0]);
196 nir_load_var(b
, g
[1]);
197 nir_load_var(b
, g
[2]);
199 nir_if
*if_stmt
= nir_push_if(b
, nir_imm_int(b
, 0));
200 nir_store_var(b
, g
[0], nir_imm_int(b
, 10), 1);
202 nir_push_else(b
, if_stmt
);
203 nir_store_var(b
, g
[1], nir_imm_int(b
, 20), 1);
205 nir_pop_if(b
, if_stmt
);
207 nir_store_var(b
, out
[0], nir_load_var(b
, g
[0]), 1);
208 nir_store_var(b
, out
[1], nir_load_var(b
, g
[1]), 1);
209 nir_store_var(b
, out
[2], nir_load_var(b
, g
[2]), 1);
211 nir_validate_shader(b
->shader
, NULL
);
213 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
214 EXPECT_TRUE(progress
);
216 /* There are 3 initial loads, plus 2 loads for the values invalidated
217 * inside the if statement.
219 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 5);
221 /* We only load g[2] once. */
222 unsigned g2_load_count
= 0;
223 for (int i
= 0; i
< 5; i
++) {
224 nir_intrinsic_instr
*load
= get_intrinsic(nir_intrinsic_load_deref
, i
);
225 if (nir_intrinsic_get_var(load
, 0) == g
[2])
228 EXPECT_EQ(g2_load_count
, 1);
231 TEST_F(nir_redundant_load_vars_test
, invalidate_live_load_in_the_end_of_loop
)
233 /* Invalidating a load in the end of loop body will apply to the whole loop
237 nir_variable
*v
= create_int(nir_var_mem_ssbo
, "v");
241 nir_loop
*loop
= nir_push_loop(b
);
243 nir_if
*if_stmt
= nir_push_if(b
, nir_imm_int(b
, 0));
244 nir_jump(b
, nir_jump_break
);
245 nir_pop_if(b
, if_stmt
);
248 nir_store_var(b
, v
, nir_imm_int(b
, 10), 1);
250 nir_pop_loop(b
, loop
);
252 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
253 ASSERT_FALSE(progress
);
256 TEST_F(nir_copy_prop_vars_test
, simple_copies
)
258 nir_variable
*in
= create_int(nir_var_shader_in
, "in");
259 nir_variable
*temp
= create_int(nir_var_function_temp
, "temp");
260 nir_variable
*out
= create_int(nir_var_shader_out
, "out");
262 nir_copy_var(b
, temp
, in
);
263 nir_copy_var(b
, out
, temp
);
265 nir_validate_shader(b
->shader
, NULL
);
267 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
268 EXPECT_TRUE(progress
);
270 nir_validate_shader(b
->shader
, NULL
);
272 ASSERT_EQ(count_intrinsics(nir_intrinsic_copy_deref
), 2);
274 nir_intrinsic_instr
*first_copy
= get_intrinsic(nir_intrinsic_copy_deref
, 0);
275 ASSERT_TRUE(first_copy
->src
[1].is_ssa
);
277 nir_intrinsic_instr
*second_copy
= get_intrinsic(nir_intrinsic_copy_deref
, 1);
278 ASSERT_TRUE(second_copy
->src
[1].is_ssa
);
280 EXPECT_EQ(first_copy
->src
[1].ssa
, second_copy
->src
[1].ssa
);
283 TEST_F(nir_copy_prop_vars_test
, simple_store_load
)
285 nir_variable
**v
= create_many_ivec2(nir_var_function_temp
, "v", 2);
286 unsigned mask
= 1 | 2;
288 nir_ssa_def
*stored_value
= nir_imm_ivec2(b
, 10, 20);
289 nir_store_var(b
, v
[0], stored_value
, mask
);
291 nir_ssa_def
*read_value
= nir_load_var(b
, v
[0]);
292 nir_store_var(b
, v
[1], read_value
, mask
);
294 nir_validate_shader(b
->shader
, NULL
);
296 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
297 EXPECT_TRUE(progress
);
299 nir_validate_shader(b
->shader
, NULL
);
301 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
303 for (int i
= 0; i
< 2; i
++) {
304 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, i
);
305 ASSERT_TRUE(store
->src
[1].is_ssa
);
306 EXPECT_EQ(store
->src
[1].ssa
, stored_value
);
310 TEST_F(nir_copy_prop_vars_test
, store_store_load
)
312 nir_variable
**v
= create_many_ivec2(nir_var_function_temp
, "v", 2);
313 unsigned mask
= 1 | 2;
315 nir_ssa_def
*first_value
= nir_imm_ivec2(b
, 10, 20);
316 nir_store_var(b
, v
[0], first_value
, mask
);
318 nir_ssa_def
*second_value
= nir_imm_ivec2(b
, 30, 40);
319 nir_store_var(b
, v
[0], second_value
, mask
);
321 nir_ssa_def
*read_value
= nir_load_var(b
, v
[0]);
322 nir_store_var(b
, v
[1], read_value
, mask
);
324 nir_validate_shader(b
->shader
, NULL
);
326 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
327 EXPECT_TRUE(progress
);
329 nir_validate_shader(b
->shader
, NULL
);
331 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
333 /* Store to v[1] should use second_value directly. */
334 nir_intrinsic_instr
*store_to_v1
= get_intrinsic(nir_intrinsic_store_deref
, 2);
335 ASSERT_EQ(nir_intrinsic_get_var(store_to_v1
, 0), v
[1]);
336 ASSERT_TRUE(store_to_v1
->src
[1].is_ssa
);
337 EXPECT_EQ(store_to_v1
->src
[1].ssa
, second_value
);
340 TEST_F(nir_copy_prop_vars_test
, store_store_load_different_components
)
342 nir_variable
**v
= create_many_ivec2(nir_var_function_temp
, "v", 2);
344 nir_ssa_def
*first_value
= nir_imm_ivec2(b
, 10, 20);
345 nir_store_var(b
, v
[0], first_value
, 1 << 1);
347 nir_ssa_def
*second_value
= nir_imm_ivec2(b
, 30, 40);
348 nir_store_var(b
, v
[0], second_value
, 1 << 0);
350 nir_ssa_def
*read_value
= nir_load_var(b
, v
[0]);
351 nir_store_var(b
, v
[1], read_value
, 1 << 1);
353 nir_validate_shader(b
->shader
, NULL
);
355 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
356 EXPECT_TRUE(progress
);
358 nir_validate_shader(b
->shader
, NULL
);
360 nir_opt_constant_folding(b
->shader
);
361 nir_validate_shader(b
->shader
, NULL
);
363 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
365 /* Store to v[1] should use first_value directly. The write of
366 * second_value did not overwrite the component it uses.
368 nir_intrinsic_instr
*store_to_v1
= get_intrinsic(nir_intrinsic_store_deref
, 2);
369 ASSERT_EQ(nir_intrinsic_get_var(store_to_v1
, 0), v
[1]);
370 ASSERT_EQ(nir_src_comp_as_uint(store_to_v1
->src
[1], 1), 20);
373 TEST_F(nir_copy_prop_vars_test
, store_store_load_different_components_in_many_blocks
)
375 nir_variable
**v
= create_many_ivec2(nir_var_function_temp
, "v", 2);
377 nir_ssa_def
*first_value
= nir_imm_ivec2(b
, 10, 20);
378 nir_store_var(b
, v
[0], first_value
, 1 << 1);
380 /* Adding an if statement will cause blocks to be created. */
381 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
383 nir_ssa_def
*second_value
= nir_imm_ivec2(b
, 30, 40);
384 nir_store_var(b
, v
[0], second_value
, 1 << 0);
386 /* Adding an if statement will cause blocks to be created. */
387 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
389 nir_ssa_def
*read_value
= nir_load_var(b
, v
[0]);
390 nir_store_var(b
, v
[1], read_value
, 1 << 1);
392 nir_validate_shader(b
->shader
, NULL
);
394 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
395 EXPECT_TRUE(progress
);
397 nir_validate_shader(b
->shader
, NULL
);
399 nir_opt_constant_folding(b
->shader
);
400 nir_validate_shader(b
->shader
, NULL
);
402 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
404 /* Store to v[1] should use first_value directly. The write of
405 * second_value did not overwrite the component it uses.
407 nir_intrinsic_instr
*store_to_v1
= get_intrinsic(nir_intrinsic_store_deref
, 2);
408 ASSERT_EQ(nir_intrinsic_get_var(store_to_v1
, 0), v
[1]);
409 ASSERT_EQ(nir_src_comp_as_uint(store_to_v1
->src
[1], 1), 20);
412 TEST_F(nir_copy_prop_vars_test
, memory_barrier_in_two_blocks
)
414 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 4);
416 nir_store_var(b
, v
[0], nir_imm_int(b
, 1), 1);
417 nir_store_var(b
, v
[1], nir_imm_int(b
, 2), 1);
419 /* Split into many blocks. */
420 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
422 nir_store_var(b
, v
[2], nir_load_var(b
, v
[0]), 1);
424 nir_builder_instr_insert(b
, &nir_intrinsic_instr_create(b
->shader
, nir_intrinsic_memory_barrier
)->instr
);
426 nir_store_var(b
, v
[3], nir_load_var(b
, v
[1]), 1);
428 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
429 ASSERT_TRUE(progress
);
431 /* Only the second load will remain after the optimization. */
432 ASSERT_EQ(1, count_intrinsics(nir_intrinsic_load_deref
));
433 nir_intrinsic_instr
*load
= get_intrinsic(nir_intrinsic_load_deref
, 0);
434 ASSERT_EQ(nir_intrinsic_get_var(load
, 0), v
[1]);
437 TEST_F(nir_copy_prop_vars_test
, simple_store_load_in_two_blocks
)
439 nir_variable
**v
= create_many_ivec2(nir_var_function_temp
, "v", 2);
440 unsigned mask
= 1 | 2;
442 nir_ssa_def
*stored_value
= nir_imm_ivec2(b
, 10, 20);
443 nir_store_var(b
, v
[0], stored_value
, mask
);
445 /* Adding an if statement will cause blocks to be created. */
446 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
448 nir_ssa_def
*read_value
= nir_load_var(b
, v
[0]);
449 nir_store_var(b
, v
[1], read_value
, mask
);
451 nir_validate_shader(b
->shader
, NULL
);
453 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
454 EXPECT_TRUE(progress
);
456 nir_validate_shader(b
->shader
, NULL
);
458 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
460 for (int i
= 0; i
< 2; i
++) {
461 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, i
);
462 ASSERT_TRUE(store
->src
[1].is_ssa
);
463 EXPECT_EQ(store
->src
[1].ssa
, stored_value
);
467 TEST_F(nir_copy_prop_vars_test
, load_direct_array_deref_on_vector_reuses_previous_load
)
469 nir_variable
*in0
= create_ivec2(nir_var_mem_ssbo
, "in0");
470 nir_variable
*in1
= create_ivec2(nir_var_mem_ssbo
, "in1");
471 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
472 nir_variable
*out
= create_int(nir_var_mem_ssbo
, "out");
474 nir_store_var(b
, vec
, nir_load_var(b
, in0
), 1 << 0);
475 nir_store_var(b
, vec
, nir_load_var(b
, in1
), 1 << 1);
477 /* This load will be dropped, as vec.y (or vec[1]) is already known. */
478 nir_deref_instr
*deref
=
479 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 1);
480 nir_ssa_def
*loaded_from_deref
= nir_load_deref(b
, deref
);
482 /* This store should use the value loaded from in1. */
483 nir_store_var(b
, out
, loaded_from_deref
, 1 << 0);
485 nir_validate_shader(b
->shader
, NULL
);
486 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 3);
487 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
489 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
490 EXPECT_TRUE(progress
);
492 nir_validate_shader(b
->shader
, NULL
);
493 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
494 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
496 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 2);
497 ASSERT_TRUE(store
->src
[1].is_ssa
);
499 /* NOTE: The ALU instruction is how we get the vec.y. */
500 ASSERT_TRUE(nir_src_as_alu_instr(&store
->src
[1]));
503 TEST_F(nir_copy_prop_vars_test
, load_direct_array_deref_on_vector_reuses_previous_copy
)
505 nir_variable
*in0
= create_ivec2(nir_var_mem_ssbo
, "in0");
506 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
508 nir_copy_var(b
, vec
, in0
);
510 /* This load will be replaced with one from in0. */
511 nir_deref_instr
*deref
=
512 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 1);
513 nir_load_deref(b
, deref
);
515 nir_validate_shader(b
->shader
, NULL
);
517 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
518 EXPECT_TRUE(progress
);
520 nir_validate_shader(b
->shader
, NULL
);
521 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 1);
523 nir_intrinsic_instr
*load
= get_intrinsic(nir_intrinsic_load_deref
, 0);
524 ASSERT_EQ(nir_intrinsic_get_var(load
, 0), in0
);
527 TEST_F(nir_copy_prop_vars_test
, load_direct_array_deref_on_vector_gets_reused
)
529 nir_variable
*in0
= create_ivec2(nir_var_mem_ssbo
, "in0");
530 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
531 nir_variable
*out
= create_ivec2(nir_var_mem_ssbo
, "out");
533 /* Loading "vec[1]" deref will save the information about vec.y. */
534 nir_deref_instr
*deref
=
535 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 1);
536 nir_load_deref(b
, deref
);
538 /* Store to vec.x. */
539 nir_store_var(b
, vec
, nir_load_var(b
, in0
), 1 << 0);
541 /* This load will be dropped, since both vec.x and vec.y are known. */
542 nir_ssa_def
*loaded_from_vec
= nir_load_var(b
, vec
);
543 nir_store_var(b
, out
, loaded_from_vec
, 0x3);
545 nir_validate_shader(b
->shader
, NULL
);
546 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 3);
547 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
549 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
550 EXPECT_TRUE(progress
);
552 nir_validate_shader(b
->shader
, NULL
);
553 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
554 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
556 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 1);
557 ASSERT_TRUE(store
->src
[1].is_ssa
);
558 ASSERT_TRUE(nir_src_as_alu_instr(&store
->src
[1]));
561 TEST_F(nir_copy_prop_vars_test
, store_load_direct_array_deref_on_vector
)
563 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
564 nir_variable
*out0
= create_int(nir_var_mem_ssbo
, "out0");
565 nir_variable
*out1
= create_ivec2(nir_var_mem_ssbo
, "out1");
567 /* Store to "vec[1]" and "vec[0]". */
568 nir_deref_instr
*store_deref_y
=
569 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 1);
570 nir_store_deref(b
, store_deref_y
, nir_imm_int(b
, 20), 1);
572 nir_deref_instr
*store_deref_x
=
573 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 0);
574 nir_store_deref(b
, store_deref_x
, nir_imm_int(b
, 10), 1);
576 /* Both loads below will be dropped, because the values are already known. */
577 nir_deref_instr
*load_deref_y
=
578 nir_build_deref_array_imm(b
, nir_build_deref_var(b
, vec
), 1);
579 nir_store_var(b
, out0
, nir_load_deref(b
, load_deref_y
), 1);
581 nir_store_var(b
, out1
, nir_load_var(b
, vec
), 1);
583 nir_validate_shader(b
->shader
, NULL
);
584 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
585 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 4);
587 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
588 EXPECT_TRUE(progress
);
590 nir_validate_shader(b
->shader
, NULL
);
591 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 0);
592 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 4);
594 /* Third store will just use the value from first store. */
595 nir_intrinsic_instr
*first_store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
596 nir_intrinsic_instr
*third_store
= get_intrinsic(nir_intrinsic_store_deref
, 2);
597 ASSERT_TRUE(third_store
->src
[1].is_ssa
);
598 EXPECT_EQ(third_store
->src
[1].ssa
, first_store
->src
[1].ssa
);
600 /* Fourth store will compose first and second store values. */
601 nir_intrinsic_instr
*fourth_store
= get_intrinsic(nir_intrinsic_store_deref
, 3);
602 ASSERT_TRUE(fourth_store
->src
[1].is_ssa
);
603 EXPECT_TRUE(nir_src_as_alu_instr(&fourth_store
->src
[1]));
606 TEST_F(nir_copy_prop_vars_test
, store_load_indirect_array_deref_on_vector
)
608 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
609 nir_variable
*idx
= create_int(nir_var_mem_ssbo
, "idx");
610 nir_variable
*out
= create_int(nir_var_mem_ssbo
, "out");
612 nir_ssa_def
*idx_ssa
= nir_load_var(b
, idx
);
614 /* Store to vec[idx]. */
615 nir_deref_instr
*store_deref
=
616 nir_build_deref_array(b
, nir_build_deref_var(b
, vec
), idx_ssa
);
617 nir_store_deref(b
, store_deref
, nir_imm_int(b
, 20), 1);
619 /* Load from vec[idx] to store in out. This load should be dropped. */
620 nir_deref_instr
*load_deref
=
621 nir_build_deref_array(b
, nir_build_deref_var(b
, vec
), idx_ssa
);
622 nir_store_var(b
, out
, nir_load_deref(b
, load_deref
), 1);
624 nir_validate_shader(b
->shader
, NULL
);
625 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
626 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
628 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
629 EXPECT_TRUE(progress
);
631 nir_validate_shader(b
->shader
, NULL
);
632 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 1);
633 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
635 /* Store to vec[idx] propagated to out. */
636 nir_intrinsic_instr
*first
= get_intrinsic(nir_intrinsic_store_deref
, 0);
637 nir_intrinsic_instr
*second
= get_intrinsic(nir_intrinsic_store_deref
, 1);
638 ASSERT_TRUE(first
->src
[1].is_ssa
);
639 ASSERT_TRUE(second
->src
[1].is_ssa
);
640 EXPECT_EQ(first
->src
[1].ssa
, second
->src
[1].ssa
);
643 TEST_F(nir_copy_prop_vars_test
, store_load_direct_and_indirect_array_deref_on_vector
)
645 nir_variable
*vec
= create_ivec2(nir_var_mem_ssbo
, "vec");
646 nir_variable
*idx
= create_int(nir_var_mem_ssbo
, "idx");
647 nir_variable
**out
= create_many_int(nir_var_mem_ssbo
, "out", 2);
649 nir_ssa_def
*idx_ssa
= nir_load_var(b
, idx
);
652 nir_store_var(b
, vec
, nir_imm_ivec2(b
, 10, 10), 1 | 2);
654 /* Load from vec[idx]. This load is currently not dropped. */
655 nir_deref_instr
*indirect
=
656 nir_build_deref_array(b
, nir_build_deref_var(b
, vec
), idx_ssa
);
657 nir_store_var(b
, out
[0], nir_load_deref(b
, indirect
), 1);
659 /* Load from vec[idx] again. This load should be dropped. */
660 nir_store_var(b
, out
[1], nir_load_deref(b
, indirect
), 1);
662 nir_validate_shader(b
->shader
, NULL
);
663 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 3);
664 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
666 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
667 EXPECT_TRUE(progress
);
669 nir_validate_shader(b
->shader
, NULL
);
670 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
671 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 3);
673 /* Store to vec[idx] propagated to out. */
674 nir_intrinsic_instr
*second
= get_intrinsic(nir_intrinsic_store_deref
, 1);
675 nir_intrinsic_instr
*third
= get_intrinsic(nir_intrinsic_store_deref
, 2);
676 ASSERT_TRUE(second
->src
[1].is_ssa
);
677 ASSERT_TRUE(third
->src
[1].is_ssa
);
678 EXPECT_EQ(second
->src
[1].ssa
, third
->src
[1].ssa
);
681 TEST_F(nir_copy_prop_vars_test
, store_load_indirect_array_deref
)
683 nir_variable
*arr
= create_var(nir_var_mem_ssbo
,
684 glsl_array_type(glsl_int_type(), 10, 0),
686 nir_variable
*idx
= create_int(nir_var_mem_ssbo
, "idx");
687 nir_variable
*out
= create_int(nir_var_mem_ssbo
, "out");
689 nir_ssa_def
*idx_ssa
= nir_load_var(b
, idx
);
691 /* Store to arr[idx]. */
692 nir_deref_instr
*store_deref
=
693 nir_build_deref_array(b
, nir_build_deref_var(b
, arr
), idx_ssa
);
694 nir_store_deref(b
, store_deref
, nir_imm_int(b
, 20), 1);
696 /* Load from arr[idx] to store in out. This load should be dropped. */
697 nir_deref_instr
*load_deref
=
698 nir_build_deref_array(b
, nir_build_deref_var(b
, arr
), idx_ssa
);
699 nir_store_var(b
, out
, nir_load_deref(b
, load_deref
), 1);
701 nir_validate_shader(b
->shader
, NULL
);
702 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 2);
703 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
705 bool progress
= nir_opt_copy_prop_vars(b
->shader
);
706 EXPECT_TRUE(progress
);
708 nir_validate_shader(b
->shader
, NULL
);
709 ASSERT_EQ(count_intrinsics(nir_intrinsic_load_deref
), 1);
710 ASSERT_EQ(count_intrinsics(nir_intrinsic_store_deref
), 2);
712 /* Store to arr[idx] propagated to out. */
713 nir_intrinsic_instr
*first
= get_intrinsic(nir_intrinsic_store_deref
, 0);
714 nir_intrinsic_instr
*second
= get_intrinsic(nir_intrinsic_store_deref
, 1);
715 ASSERT_TRUE(first
->src
[1].is_ssa
);
716 ASSERT_TRUE(second
->src
[1].is_ssa
);
717 EXPECT_EQ(first
->src
[1].ssa
, second
->src
[1].ssa
);
720 TEST_F(nir_dead_write_vars_test
, no_dead_writes_in_block
)
722 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 2);
724 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
726 bool progress
= nir_opt_dead_write_vars(b
->shader
);
727 ASSERT_FALSE(progress
);
730 TEST_F(nir_dead_write_vars_test
, no_dead_writes_different_components_in_block
)
732 nir_variable
**v
= create_many_ivec2(nir_var_mem_ssbo
, "v", 3);
734 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1 << 0);
735 nir_store_var(b
, v
[0], nir_load_var(b
, v
[2]), 1 << 1);
737 bool progress
= nir_opt_dead_write_vars(b
->shader
);
738 ASSERT_FALSE(progress
);
741 TEST_F(nir_dead_write_vars_test
, no_dead_writes_in_if_statement
)
743 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 6);
745 nir_store_var(b
, v
[2], nir_load_var(b
, v
[0]), 1);
746 nir_store_var(b
, v
[3], nir_load_var(b
, v
[1]), 1);
748 /* Each arm of the if statement will overwrite one store. */
749 nir_if
*if_stmt
= nir_push_if(b
, nir_imm_int(b
, 0));
750 nir_store_var(b
, v
[2], nir_load_var(b
, v
[4]), 1);
752 nir_push_else(b
, if_stmt
);
753 nir_store_var(b
, v
[3], nir_load_var(b
, v
[5]), 1);
755 nir_pop_if(b
, if_stmt
);
757 bool progress
= nir_opt_dead_write_vars(b
->shader
);
758 ASSERT_FALSE(progress
);
761 TEST_F(nir_dead_write_vars_test
, no_dead_writes_in_loop_statement
)
763 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 3);
765 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
767 /* Loop will write other value. Since it might not be executed, it doesn't
768 * kill the first write.
770 nir_loop
*loop
= nir_push_loop(b
);
772 nir_if
*if_stmt
= nir_push_if(b
, nir_imm_int(b
, 0));
773 nir_jump(b
, nir_jump_break
);
774 nir_pop_if(b
, if_stmt
);
776 nir_store_var(b
, v
[0], nir_load_var(b
, v
[2]), 1);
777 nir_pop_loop(b
, loop
);
779 bool progress
= nir_opt_dead_write_vars(b
->shader
);
780 ASSERT_FALSE(progress
);
783 TEST_F(nir_dead_write_vars_test
, dead_write_in_block
)
785 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 3);
787 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
788 nir_ssa_def
*load_v2
= nir_load_var(b
, v
[2]);
789 nir_store_var(b
, v
[0], load_v2
, 1);
791 bool progress
= nir_opt_dead_write_vars(b
->shader
);
792 ASSERT_TRUE(progress
);
794 EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref
));
796 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
797 ASSERT_TRUE(store
->src
[1].is_ssa
);
798 EXPECT_EQ(store
->src
[1].ssa
, load_v2
);
801 TEST_F(nir_dead_write_vars_test
, dead_write_components_in_block
)
803 nir_variable
**v
= create_many_ivec2(nir_var_mem_ssbo
, "v", 3);
805 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1 << 0);
806 nir_ssa_def
*load_v2
= nir_load_var(b
, v
[2]);
807 nir_store_var(b
, v
[0], load_v2
, 1 << 0);
809 bool progress
= nir_opt_dead_write_vars(b
->shader
);
810 ASSERT_TRUE(progress
);
812 EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref
));
814 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
815 ASSERT_TRUE(store
->src
[1].is_ssa
);
816 EXPECT_EQ(store
->src
[1].ssa
, load_v2
);
820 /* TODO: The DISABLED tests below depend on the dead write removal be able to
821 * identify dead writes between multiple blocks. This is still not
825 TEST_F(nir_dead_write_vars_test
, DISABLED_dead_write_in_two_blocks
)
827 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 3);
829 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
830 nir_ssa_def
*load_v2
= nir_load_var(b
, v
[2]);
832 /* Causes the stores to be in different blocks. */
833 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
835 nir_store_var(b
, v
[0], load_v2
, 1);
837 bool progress
= nir_opt_dead_write_vars(b
->shader
);
838 ASSERT_TRUE(progress
);
840 EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref
));
842 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
843 ASSERT_TRUE(store
->src
[1].is_ssa
);
844 EXPECT_EQ(store
->src
[1].ssa
, load_v2
);
847 TEST_F(nir_dead_write_vars_test
, DISABLED_dead_write_components_in_two_blocks
)
849 nir_variable
**v
= create_many_ivec2(nir_var_mem_ssbo
, "v", 3);
851 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1 << 0);
853 /* Causes the stores to be in different blocks. */
854 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
856 nir_ssa_def
*load_v2
= nir_load_var(b
, v
[2]);
857 nir_store_var(b
, v
[0], load_v2
, 1 << 0);
859 bool progress
= nir_opt_dead_write_vars(b
->shader
);
860 ASSERT_TRUE(progress
);
862 EXPECT_EQ(1, count_intrinsics(nir_intrinsic_store_deref
));
864 nir_intrinsic_instr
*store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
865 ASSERT_TRUE(store
->src
[1].is_ssa
);
866 EXPECT_EQ(store
->src
[1].ssa
, load_v2
);
869 TEST_F(nir_dead_write_vars_test
, DISABLED_dead_writes_in_if_statement
)
871 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 4);
873 /* Both branches will overwrite, making the previous store dead. */
874 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
876 nir_if
*if_stmt
= nir_push_if(b
, nir_imm_int(b
, 0));
877 nir_ssa_def
*load_v2
= nir_load_var(b
, v
[2]);
878 nir_store_var(b
, v
[0], load_v2
, 1);
880 nir_push_else(b
, if_stmt
);
881 nir_ssa_def
*load_v3
= nir_load_var(b
, v
[3]);
882 nir_store_var(b
, v
[0], load_v3
, 1);
884 nir_pop_if(b
, if_stmt
);
886 bool progress
= nir_opt_dead_write_vars(b
->shader
);
887 ASSERT_TRUE(progress
);
888 EXPECT_EQ(2, count_intrinsics(nir_intrinsic_store_deref
));
890 nir_intrinsic_instr
*first_store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
891 ASSERT_TRUE(first_store
->src
[1].is_ssa
);
892 EXPECT_EQ(first_store
->src
[1].ssa
, load_v2
);
894 nir_intrinsic_instr
*second_store
= get_intrinsic(nir_intrinsic_store_deref
, 1);
895 ASSERT_TRUE(second_store
->src
[1].is_ssa
);
896 EXPECT_EQ(second_store
->src
[1].ssa
, load_v3
);
899 TEST_F(nir_dead_write_vars_test
, DISABLED_memory_barrier_in_two_blocks
)
901 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 2);
903 nir_store_var(b
, v
[0], nir_imm_int(b
, 1), 1);
904 nir_store_var(b
, v
[1], nir_imm_int(b
, 2), 1);
906 /* Split into many blocks. */
907 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
909 /* Because it is before the barrier, this will kill the previous store to that target. */
910 nir_store_var(b
, v
[0], nir_imm_int(b
, 3), 1);
912 nir_builder_instr_insert(b
, &nir_intrinsic_instr_create(b
->shader
, nir_intrinsic_memory_barrier
)->instr
);
914 nir_store_var(b
, v
[1], nir_imm_int(b
, 4), 1);
916 bool progress
= nir_opt_dead_write_vars(b
->shader
);
917 ASSERT_TRUE(progress
);
919 EXPECT_EQ(3, count_intrinsics(nir_intrinsic_store_deref
));
922 TEST_F(nir_dead_write_vars_test
, DISABLED_unrelated_barrier_in_two_blocks
)
924 nir_variable
**v
= create_many_int(nir_var_mem_ssbo
, "v", 3);
925 nir_variable
*out
= create_int(nir_var_shader_out
, "out");
927 nir_store_var(b
, out
, nir_load_var(b
, v
[1]), 1);
928 nir_store_var(b
, v
[0], nir_load_var(b
, v
[1]), 1);
930 /* Split into many blocks. */
931 nir_pop_if(b
, nir_push_if(b
, nir_imm_int(b
, 0)));
933 /* Emit vertex will ensure writes to output variables are considered used,
934 * but should not affect other types of variables. */
936 nir_builder_instr_insert(b
, &nir_intrinsic_instr_create(b
->shader
, nir_intrinsic_emit_vertex
)->instr
);
938 nir_store_var(b
, out
, nir_load_var(b
, v
[2]), 1);
939 nir_store_var(b
, v
[0], nir_load_var(b
, v
[2]), 1);
941 bool progress
= nir_opt_dead_write_vars(b
->shader
);
942 ASSERT_TRUE(progress
);
944 /* Verify the first write to v[0] was removed. */
945 EXPECT_EQ(3, count_intrinsics(nir_intrinsic_store_deref
));
947 nir_intrinsic_instr
*first_store
= get_intrinsic(nir_intrinsic_store_deref
, 0);
948 EXPECT_EQ(nir_intrinsic_get_var(first_store
, 0), out
);
950 nir_intrinsic_instr
*second_store
= get_intrinsic(nir_intrinsic_store_deref
, 1);
951 EXPECT_EQ(nir_intrinsic_get_var(second_store
, 0), out
);
953 nir_intrinsic_instr
*third_store
= get_intrinsic(nir_intrinsic_store_deref
, 2);
954 EXPECT_EQ(nir_intrinsic_get_var(third_store
, 0), v
[0]);