2 * Copyright © 2014 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
28 #include "nir_search.h"
33 unsigned variables_seen
;
34 nir_alu_src variables
[NIR_SEARCH_MAX_VARIABLES
];
38 match_expression(const nir_search_expression
*expr
, nir_alu_instr
*instr
,
39 unsigned num_components
, const uint8_t *swizzle
,
40 struct match_state
*state
);
42 static const uint8_t identity_swizzle
[] = { 0, 1, 2, 3 };
44 static bool alu_instr_is_bool(nir_alu_instr
*instr
);
47 src_is_bool(nir_src src
)
51 if (src
.ssa
->parent_instr
->type
!= nir_instr_type_alu
)
53 return alu_instr_is_bool(nir_instr_as_alu(src
.ssa
->parent_instr
));
57 alu_instr_is_bool(nir_alu_instr
*instr
)
63 return src_is_bool(instr
->src
[0].src
) && src_is_bool(instr
->src
[1].src
);
65 return src_is_bool(instr
->src
[0].src
);
67 return (nir_alu_type_get_base_type(nir_op_infos
[instr
->op
].output_type
)
73 match_value(const nir_search_value
*value
, nir_alu_instr
*instr
, unsigned src
,
74 unsigned num_components
, const uint8_t *swizzle
,
75 struct match_state
*state
)
77 uint8_t new_swizzle
[4];
79 /* If the source is an explicitly sized source, then we need to reset
80 * both the number of components and the swizzle.
82 if (nir_op_infos
[instr
->op
].input_sizes
[src
] != 0) {
83 num_components
= nir_op_infos
[instr
->op
].input_sizes
[src
];
84 swizzle
= identity_swizzle
;
87 for (unsigned i
= 0; i
< num_components
; ++i
)
88 new_swizzle
[i
] = instr
->src
[src
].swizzle
[swizzle
[i
]];
90 switch (value
->type
) {
91 case nir_search_value_expression
:
92 if (!instr
->src
[src
].src
.is_ssa
)
95 if (instr
->src
[src
].src
.ssa
->parent_instr
->type
!= nir_instr_type_alu
)
98 return match_expression(nir_search_value_as_expression(value
),
99 nir_instr_as_alu(instr
->src
[src
].src
.ssa
->parent_instr
),
100 num_components
, new_swizzle
, state
);
102 case nir_search_value_variable
: {
103 nir_search_variable
*var
= nir_search_value_as_variable(value
);
104 assert(var
->variable
< NIR_SEARCH_MAX_VARIABLES
);
106 if (state
->variables_seen
& (1 << var
->variable
)) {
107 if (!nir_srcs_equal(state
->variables
[var
->variable
].src
,
108 instr
->src
[src
].src
))
111 assert(!instr
->src
[src
].abs
&& !instr
->src
[src
].negate
);
113 for (unsigned i
= 0; i
< num_components
; ++i
) {
114 if (state
->variables
[var
->variable
].swizzle
[i
] != new_swizzle
[i
])
120 if (var
->is_constant
&&
121 instr
->src
[src
].src
.ssa
->parent_instr
->type
!= nir_instr_type_load_const
)
124 if (var
->type
!= nir_type_invalid
) {
125 if (instr
->src
[src
].src
.ssa
->parent_instr
->type
!= nir_instr_type_alu
)
128 nir_alu_instr
*src_alu
=
129 nir_instr_as_alu(instr
->src
[src
].src
.ssa
->parent_instr
);
131 if (nir_alu_type_get_base_type(nir_op_infos
[src_alu
->op
].output_type
) !=
133 !(nir_alu_type_get_base_type(var
->type
) == nir_type_bool
&&
134 alu_instr_is_bool(src_alu
)))
138 state
->variables_seen
|= (1 << var
->variable
);
139 state
->variables
[var
->variable
].src
= instr
->src
[src
].src
;
140 state
->variables
[var
->variable
].abs
= false;
141 state
->variables
[var
->variable
].negate
= false;
143 for (unsigned i
= 0; i
< 4; ++i
) {
144 if (i
< num_components
)
145 state
->variables
[var
->variable
].swizzle
[i
] = new_swizzle
[i
];
147 state
->variables
[var
->variable
].swizzle
[i
] = 0;
154 case nir_search_value_constant
: {
155 nir_search_constant
*const_val
= nir_search_value_as_constant(value
);
157 if (!instr
->src
[src
].src
.is_ssa
)
160 if (instr
->src
[src
].src
.ssa
->parent_instr
->type
!= nir_instr_type_load_const
)
163 nir_load_const_instr
*load
=
164 nir_instr_as_load_const(instr
->src
[src
].src
.ssa
->parent_instr
);
166 switch (const_val
->type
) {
168 for (unsigned i
= 0; i
< num_components
; ++i
) {
170 switch (load
->def
.bit_size
) {
172 val
= load
->value
.f32
[new_swizzle
[i
]];
175 val
= load
->value
.f64
[new_swizzle
[i
]];
178 unreachable("unknown bit size");
181 if (val
!= const_val
->data
.d
)
187 for (unsigned i
= 0; i
< num_components
; ++i
) {
189 switch (load
->def
.bit_size
) {
191 val
= load
->value
.i32
[new_swizzle
[i
]];
194 val
= load
->value
.i64
[new_swizzle
[i
]];
197 unreachable("unknown bit size");
200 if (val
!= const_val
->data
.i
)
206 case nir_type_bool32
:
207 for (unsigned i
= 0; i
< num_components
; ++i
) {
209 switch (load
->def
.bit_size
) {
211 val
= load
->value
.u32
[new_swizzle
[i
]];
214 val
= load
->value
.u64
[new_swizzle
[i
]];
217 unreachable("unknown bit size");
220 if (val
!= const_val
->data
.u
)
226 unreachable("Invalid alu source type");
231 unreachable("Invalid search value type");
236 match_expression(const nir_search_expression
*expr
, nir_alu_instr
*instr
,
237 unsigned num_components
, const uint8_t *swizzle
,
238 struct match_state
*state
)
240 if (instr
->op
!= expr
->opcode
)
243 assert(instr
->dest
.dest
.is_ssa
);
245 state
->inexact_match
= expr
->inexact
|| state
->inexact_match
;
246 state
->has_exact_alu
= instr
->exact
|| state
->has_exact_alu
;
247 if (state
->inexact_match
&& state
->has_exact_alu
)
250 assert(!instr
->dest
.saturate
);
251 assert(nir_op_infos
[instr
->op
].num_inputs
> 0);
253 /* If we have an explicitly sized destination, we can only handle the
254 * identity swizzle. While dot(vec3(a, b, c).zxy) is a valid
255 * expression, we don't have the information right now to propagate that
256 * swizzle through. We can only properly propagate swizzles if the
257 * instruction is vectorized.
259 if (nir_op_infos
[instr
->op
].output_size
!= 0) {
260 for (unsigned i
= 0; i
< num_components
; i
++) {
266 /* Stash off the current variables_seen bitmask. This way we can
267 * restore it prior to matching in the commutative case below.
269 unsigned variables_seen_stash
= state
->variables_seen
;
272 for (unsigned i
= 0; i
< nir_op_infos
[instr
->op
].num_inputs
; i
++) {
273 if (!match_value(expr
->srcs
[i
], instr
, i
, num_components
,
283 if (nir_op_infos
[instr
->op
].algebraic_properties
& NIR_OP_IS_COMMUTATIVE
) {
284 assert(nir_op_infos
[instr
->op
].num_inputs
== 2);
286 /* Restore the variables_seen bitmask. If we don't do this, then we
287 * could end up with an erroneous failure due to variables found in the
288 * first match attempt above not matching those in the second.
290 state
->variables_seen
= variables_seen_stash
;
292 if (!match_value(expr
->srcs
[0], instr
, 1, num_components
,
296 return match_value(expr
->srcs
[1], instr
, 0, num_components
,
303 typedef struct bitsize_tree
{
305 struct bitsize_tree
*srcs
[4];
307 unsigned common_size
;
308 bool is_src_sized
[4];
312 unsigned src_size
[4];
315 static bitsize_tree
*
316 build_bitsize_tree(void *mem_ctx
, struct match_state
*state
,
317 const nir_search_value
*value
)
319 bitsize_tree
*tree
= ralloc(mem_ctx
, bitsize_tree
);
321 switch (value
->type
) {
322 case nir_search_value_expression
: {
323 nir_search_expression
*expr
= nir_search_value_as_expression(value
);
324 nir_op_info info
= nir_op_infos
[expr
->opcode
];
325 tree
->num_srcs
= info
.num_inputs
;
326 tree
->common_size
= 0;
327 for (unsigned i
= 0; i
< info
.num_inputs
; i
++) {
328 tree
->is_src_sized
[i
] = !!nir_alu_type_get_type_size(info
.input_types
[i
]);
329 if (tree
->is_src_sized
[i
])
330 tree
->src_size
[i
] = nir_alu_type_get_type_size(info
.input_types
[i
]);
331 tree
->srcs
[i
] = build_bitsize_tree(mem_ctx
, state
, expr
->srcs
[i
]);
333 tree
->is_dest_sized
= !!nir_alu_type_get_type_size(info
.output_type
);
334 if (tree
->is_dest_sized
)
335 tree
->dest_size
= nir_alu_type_get_type_size(info
.output_type
);
339 case nir_search_value_variable
: {
340 nir_search_variable
*var
= nir_search_value_as_variable(value
);
342 tree
->is_dest_sized
= true;
343 tree
->dest_size
= nir_src_bit_size(state
->variables
[var
->variable
].src
);
347 case nir_search_value_constant
: {
349 tree
->is_dest_sized
= false;
350 tree
->common_size
= 0;
359 bitsize_tree_filter_up(bitsize_tree
*tree
)
361 for (unsigned i
= 0; i
< tree
->num_srcs
; i
++) {
362 unsigned src_size
= bitsize_tree_filter_up(tree
->srcs
[i
]);
366 if (tree
->is_src_sized
[i
]) {
367 assert(src_size
== tree
->src_size
[i
]);
368 } else if (tree
->common_size
!= 0) {
369 assert(src_size
== tree
->common_size
);
370 tree
->src_size
[i
] = src_size
;
372 tree
->common_size
= src_size
;
373 tree
->src_size
[i
] = src_size
;
377 if (tree
->num_srcs
&& tree
->common_size
) {
378 if (tree
->dest_size
== 0)
379 tree
->dest_size
= tree
->common_size
;
380 else if (!tree
->is_dest_sized
)
381 assert(tree
->dest_size
== tree
->common_size
);
383 for (unsigned i
= 0; i
< tree
->num_srcs
; i
++) {
384 if (!tree
->src_size
[i
])
385 tree
->src_size
[i
] = tree
->common_size
;
389 return tree
->dest_size
;
393 bitsize_tree_filter_down(bitsize_tree
*tree
, unsigned size
)
396 assert(tree
->dest_size
== size
);
398 tree
->dest_size
= size
;
400 if (!tree
->is_dest_sized
) {
401 if (tree
->common_size
)
402 assert(tree
->common_size
== size
);
404 tree
->common_size
= size
;
407 for (unsigned i
= 0; i
< tree
->num_srcs
; i
++) {
408 if (!tree
->src_size
[i
]) {
409 assert(tree
->common_size
);
410 tree
->src_size
[i
] = tree
->common_size
;
412 bitsize_tree_filter_down(tree
->srcs
[i
], tree
->src_size
[i
]);
417 construct_value(const nir_search_value
*value
,
418 unsigned num_components
, bitsize_tree
*bitsize
,
419 struct match_state
*state
,
420 nir_instr
*instr
, void *mem_ctx
)
422 switch (value
->type
) {
423 case nir_search_value_expression
: {
424 const nir_search_expression
*expr
= nir_search_value_as_expression(value
);
426 if (nir_op_infos
[expr
->opcode
].output_size
!= 0)
427 num_components
= nir_op_infos
[expr
->opcode
].output_size
;
429 nir_alu_instr
*alu
= nir_alu_instr_create(mem_ctx
, expr
->opcode
);
430 nir_ssa_dest_init(&alu
->instr
, &alu
->dest
.dest
, num_components
,
431 bitsize
->dest_size
, NULL
);
432 alu
->dest
.write_mask
= (1 << num_components
) - 1;
433 alu
->dest
.saturate
= false;
435 /* We have no way of knowing what values in a given search expression
436 * map to a particular replacement value. Therefore, if the
437 * expression we are replacing has any exact values, the entire
438 * replacement should be exact.
440 alu
->exact
= state
->has_exact_alu
;
442 for (unsigned i
= 0; i
< nir_op_infos
[expr
->opcode
].num_inputs
; i
++) {
443 /* If the source is an explicitly sized source, then we need to reset
444 * the number of components to match.
446 if (nir_op_infos
[alu
->op
].input_sizes
[i
] != 0)
447 num_components
= nir_op_infos
[alu
->op
].input_sizes
[i
];
449 alu
->src
[i
] = construct_value(expr
->srcs
[i
],
450 num_components
, bitsize
->srcs
[i
],
451 state
, instr
, mem_ctx
);
454 nir_instr_insert_before(instr
, &alu
->instr
);
457 val
.src
= nir_src_for_ssa(&alu
->dest
.dest
.ssa
);
460 memcpy(val
.swizzle
, identity_swizzle
, sizeof val
.swizzle
);
465 case nir_search_value_variable
: {
466 const nir_search_variable
*var
= nir_search_value_as_variable(value
);
467 assert(state
->variables_seen
& (1 << var
->variable
));
469 nir_alu_src val
= { NIR_SRC_INIT
};
470 nir_alu_src_copy(&val
, &state
->variables
[var
->variable
], mem_ctx
);
472 assert(!var
->is_constant
);
477 case nir_search_value_constant
: {
478 const nir_search_constant
*c
= nir_search_value_as_constant(value
);
479 nir_load_const_instr
*load
= nir_load_const_instr_create(mem_ctx
, 1);
483 load
->def
.name
= ralloc_asprintf(load
, "%f", c
->data
.d
);
484 switch (bitsize
->dest_size
) {
486 load
->value
.f32
[0] = c
->data
.d
;
489 load
->value
.f64
[0] = c
->data
.d
;
492 unreachable("unknown bit size");
497 load
->def
.name
= ralloc_asprintf(load
, "%ld", c
->data
.i
);
498 switch (bitsize
->dest_size
) {
500 load
->value
.i32
[0] = c
->data
.i
;
503 load
->value
.i64
[0] = c
->data
.i
;
506 unreachable("unknown bit size");
511 load
->def
.name
= ralloc_asprintf(load
, "%lu", c
->data
.u
);
512 switch (bitsize
->dest_size
) {
514 load
->value
.u32
[0] = c
->data
.u
;
517 load
->value
.u64
[0] = c
->data
.u
;
520 unreachable("unknown bit size");
523 case nir_type_bool32
:
524 load
->value
.u32
[0] = c
->data
.u
;
527 unreachable("Invalid alu source type");
530 load
->def
.bit_size
= bitsize
->dest_size
;
532 nir_instr_insert_before(instr
, &load
->instr
);
535 val
.src
= nir_src_for_ssa(&load
->def
);
538 memset(val
.swizzle
, 0, sizeof val
.swizzle
);
544 unreachable("Invalid search value type");
549 nir_replace_instr(nir_alu_instr
*instr
, const nir_search_expression
*search
,
550 const nir_search_value
*replace
, void *mem_ctx
)
552 uint8_t swizzle
[4] = { 0, 0, 0, 0 };
554 for (unsigned i
= 0; i
< instr
->dest
.dest
.ssa
.num_components
; ++i
)
557 assert(instr
->dest
.dest
.is_ssa
);
559 struct match_state state
;
560 state
.inexact_match
= false;
561 state
.has_exact_alu
= false;
562 state
.variables_seen
= 0;
564 if (!match_expression(search
, instr
, instr
->dest
.dest
.ssa
.num_components
,
568 void *bitsize_ctx
= ralloc_context(NULL
);
569 bitsize_tree
*tree
= build_bitsize_tree(bitsize_ctx
, &state
, replace
);
570 bitsize_tree_filter_up(tree
);
571 bitsize_tree_filter_down(tree
, instr
->dest
.dest
.ssa
.bit_size
);
573 /* Inserting a mov may be unnecessary. However, it's much easier to
574 * simply let copy propagation clean this up than to try to go through
575 * and rewrite swizzles ourselves.
577 nir_alu_instr
*mov
= nir_alu_instr_create(mem_ctx
, nir_op_imov
);
578 mov
->dest
.write_mask
= instr
->dest
.write_mask
;
579 nir_ssa_dest_init(&mov
->instr
, &mov
->dest
.dest
,
580 instr
->dest
.dest
.ssa
.num_components
,
581 instr
->dest
.dest
.ssa
.bit_size
, NULL
);
583 mov
->src
[0] = construct_value(replace
,
584 instr
->dest
.dest
.ssa
.num_components
, tree
,
585 &state
, &instr
->instr
, mem_ctx
);
586 nir_instr_insert_before(&instr
->instr
, &mov
->instr
);
588 nir_ssa_def_rewrite_uses(&instr
->dest
.dest
.ssa
,
589 nir_src_for_ssa(&mov
->dest
.dest
.ssa
));
591 /* We know this one has no more uses because we just rewrote them all,
592 * so we can remove it. The rest of the matched expression, however, we
593 * don't know so much about. We'll just let dead code clean them up.
595 nir_instr_remove(&instr
->instr
);
597 ralloc_free(bitsize_ctx
);