Remove wrongly repeated words in comments
[mesa.git] / src / compiler / nir / nir_search.c
1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #include <inttypes.h>
29 #include "nir_search.h"
30
31 struct match_state {
32 bool inexact_match;
33 bool has_exact_alu;
34 unsigned variables_seen;
35 nir_alu_src variables[NIR_SEARCH_MAX_VARIABLES];
36 };
37
38 static bool
39 match_expression(const nir_search_expression *expr, nir_alu_instr *instr,
40 unsigned num_components, const uint8_t *swizzle,
41 struct match_state *state);
42
43 static const uint8_t identity_swizzle[] = { 0, 1, 2, 3 };
44
45 static bool alu_instr_is_bool(nir_alu_instr *instr);
46
47 static bool
48 src_is_bool(nir_src src)
49 {
50 if (!src.is_ssa)
51 return false;
52 if (src.ssa->parent_instr->type != nir_instr_type_alu)
53 return false;
54 return alu_instr_is_bool(nir_instr_as_alu(src.ssa->parent_instr));
55 }
56
57 static bool
58 alu_instr_is_bool(nir_alu_instr *instr)
59 {
60 switch (instr->op) {
61 case nir_op_iand:
62 case nir_op_ior:
63 case nir_op_ixor:
64 return src_is_bool(instr->src[0].src) && src_is_bool(instr->src[1].src);
65 case nir_op_inot:
66 return src_is_bool(instr->src[0].src);
67 default:
68 return (nir_alu_type_get_base_type(nir_op_infos[instr->op].output_type)
69 == nir_type_bool);
70 }
71 }
72
73 static bool
74 match_value(const nir_search_value *value, nir_alu_instr *instr, unsigned src,
75 unsigned num_components, const uint8_t *swizzle,
76 struct match_state *state)
77 {
78 uint8_t new_swizzle[4];
79
80 /* If the source is an explicitly sized source, then we need to reset
81 * both the number of components and the swizzle.
82 */
83 if (nir_op_infos[instr->op].input_sizes[src] != 0) {
84 num_components = nir_op_infos[instr->op].input_sizes[src];
85 swizzle = identity_swizzle;
86 }
87
88 for (unsigned i = 0; i < num_components; ++i)
89 new_swizzle[i] = instr->src[src].swizzle[swizzle[i]];
90
91 /* If the value has a specific bit size and it doesn't match, bail */
92 if (value->bit_size &&
93 nir_src_bit_size(instr->src[src].src) != value->bit_size)
94 return false;
95
96 switch (value->type) {
97 case nir_search_value_expression:
98 if (!instr->src[src].src.is_ssa)
99 return false;
100
101 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_alu)
102 return false;
103
104 return match_expression(nir_search_value_as_expression(value),
105 nir_instr_as_alu(instr->src[src].src.ssa->parent_instr),
106 num_components, new_swizzle, state);
107
108 case nir_search_value_variable: {
109 nir_search_variable *var = nir_search_value_as_variable(value);
110 assert(var->variable < NIR_SEARCH_MAX_VARIABLES);
111
112 if (state->variables_seen & (1 << var->variable)) {
113 if (!nir_srcs_equal(state->variables[var->variable].src,
114 instr->src[src].src))
115 return false;
116
117 assert(!instr->src[src].abs && !instr->src[src].negate);
118
119 for (unsigned i = 0; i < num_components; ++i) {
120 if (state->variables[var->variable].swizzle[i] != new_swizzle[i])
121 return false;
122 }
123
124 return true;
125 } else {
126 if (var->is_constant &&
127 instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const)
128 return false;
129
130 if (var->cond && !var->cond(instr, src, num_components, new_swizzle))
131 return false;
132
133 if (var->type != nir_type_invalid) {
134 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_alu)
135 return false;
136
137 nir_alu_instr *src_alu =
138 nir_instr_as_alu(instr->src[src].src.ssa->parent_instr);
139
140 if (nir_alu_type_get_base_type(nir_op_infos[src_alu->op].output_type) !=
141 var->type &&
142 !(nir_alu_type_get_base_type(var->type) == nir_type_bool &&
143 alu_instr_is_bool(src_alu)))
144 return false;
145 }
146
147 state->variables_seen |= (1 << var->variable);
148 state->variables[var->variable].src = instr->src[src].src;
149 state->variables[var->variable].abs = false;
150 state->variables[var->variable].negate = false;
151
152 for (unsigned i = 0; i < 4; ++i) {
153 if (i < num_components)
154 state->variables[var->variable].swizzle[i] = new_swizzle[i];
155 else
156 state->variables[var->variable].swizzle[i] = 0;
157 }
158
159 return true;
160 }
161 }
162
163 case nir_search_value_constant: {
164 nir_search_constant *const_val = nir_search_value_as_constant(value);
165
166 if (!instr->src[src].src.is_ssa)
167 return false;
168
169 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const)
170 return false;
171
172 nir_load_const_instr *load =
173 nir_instr_as_load_const(instr->src[src].src.ssa->parent_instr);
174
175 switch (const_val->type) {
176 case nir_type_float:
177 for (unsigned i = 0; i < num_components; ++i) {
178 double val;
179 switch (load->def.bit_size) {
180 case 32:
181 val = load->value.f32[new_swizzle[i]];
182 break;
183 case 64:
184 val = load->value.f64[new_swizzle[i]];
185 break;
186 default:
187 unreachable("unknown bit size");
188 }
189
190 if (val != const_val->data.d)
191 return false;
192 }
193 return true;
194
195 case nir_type_int:
196 for (unsigned i = 0; i < num_components; ++i) {
197 int64_t val;
198 switch (load->def.bit_size) {
199 case 32:
200 val = load->value.i32[new_swizzle[i]];
201 break;
202 case 64:
203 val = load->value.i64[new_swizzle[i]];
204 break;
205 default:
206 unreachable("unknown bit size");
207 }
208
209 if (val != const_val->data.i)
210 return false;
211 }
212 return true;
213
214 case nir_type_uint:
215 case nir_type_bool32:
216 for (unsigned i = 0; i < num_components; ++i) {
217 uint64_t val;
218 switch (load->def.bit_size) {
219 case 32:
220 val = load->value.u32[new_swizzle[i]];
221 break;
222 case 64:
223 val = load->value.u64[new_swizzle[i]];
224 break;
225 default:
226 unreachable("unknown bit size");
227 }
228
229 if (val != const_val->data.u)
230 return false;
231 }
232 return true;
233
234 default:
235 unreachable("Invalid alu source type");
236 }
237 }
238
239 default:
240 unreachable("Invalid search value type");
241 }
242 }
243
244 static bool
245 match_expression(const nir_search_expression *expr, nir_alu_instr *instr,
246 unsigned num_components, const uint8_t *swizzle,
247 struct match_state *state)
248 {
249 if (instr->op != expr->opcode)
250 return false;
251
252 assert(instr->dest.dest.is_ssa);
253
254 if (expr->value.bit_size &&
255 instr->dest.dest.ssa.bit_size != expr->value.bit_size)
256 return false;
257
258 state->inexact_match = expr->inexact || state->inexact_match;
259 state->has_exact_alu = instr->exact || state->has_exact_alu;
260 if (state->inexact_match && state->has_exact_alu)
261 return false;
262
263 assert(!instr->dest.saturate);
264 assert(nir_op_infos[instr->op].num_inputs > 0);
265
266 /* If we have an explicitly sized destination, we can only handle the
267 * identity swizzle. While dot(vec3(a, b, c).zxy) is a valid
268 * expression, we don't have the information right now to propagate that
269 * swizzle through. We can only properly propagate swizzles if the
270 * instruction is vectorized.
271 */
272 if (nir_op_infos[instr->op].output_size != 0) {
273 for (unsigned i = 0; i < num_components; i++) {
274 if (swizzle[i] != i)
275 return false;
276 }
277 }
278
279 /* Stash off the current variables_seen bitmask. This way we can
280 * restore it prior to matching in the commutative case below.
281 */
282 unsigned variables_seen_stash = state->variables_seen;
283
284 bool matched = true;
285 for (unsigned i = 0; i < nir_op_infos[instr->op].num_inputs; i++) {
286 if (!match_value(expr->srcs[i], instr, i, num_components,
287 swizzle, state)) {
288 matched = false;
289 break;
290 }
291 }
292
293 if (matched)
294 return true;
295
296 if (nir_op_infos[instr->op].algebraic_properties & NIR_OP_IS_COMMUTATIVE) {
297 assert(nir_op_infos[instr->op].num_inputs == 2);
298
299 /* Restore the variables_seen bitmask. If we don't do this, then we
300 * could end up with an erroneous failure due to variables found in the
301 * first match attempt above not matching those in the second.
302 */
303 state->variables_seen = variables_seen_stash;
304
305 if (!match_value(expr->srcs[0], instr, 1, num_components,
306 swizzle, state))
307 return false;
308
309 return match_value(expr->srcs[1], instr, 0, num_components,
310 swizzle, state);
311 } else {
312 return false;
313 }
314 }
315
316 typedef struct bitsize_tree {
317 unsigned num_srcs;
318 struct bitsize_tree *srcs[4];
319
320 unsigned common_size;
321 bool is_src_sized[4];
322 bool is_dest_sized;
323
324 unsigned dest_size;
325 unsigned src_size[4];
326 } bitsize_tree;
327
328 static bitsize_tree *
329 build_bitsize_tree(void *mem_ctx, struct match_state *state,
330 const nir_search_value *value)
331 {
332 bitsize_tree *tree = ralloc(mem_ctx, bitsize_tree);
333
334 switch (value->type) {
335 case nir_search_value_expression: {
336 nir_search_expression *expr = nir_search_value_as_expression(value);
337 nir_op_info info = nir_op_infos[expr->opcode];
338 tree->num_srcs = info.num_inputs;
339 tree->common_size = 0;
340 for (unsigned i = 0; i < info.num_inputs; i++) {
341 tree->is_src_sized[i] = !!nir_alu_type_get_type_size(info.input_types[i]);
342 if (tree->is_src_sized[i])
343 tree->src_size[i] = nir_alu_type_get_type_size(info.input_types[i]);
344 tree->srcs[i] = build_bitsize_tree(mem_ctx, state, expr->srcs[i]);
345 }
346 tree->is_dest_sized = !!nir_alu_type_get_type_size(info.output_type);
347 if (tree->is_dest_sized)
348 tree->dest_size = nir_alu_type_get_type_size(info.output_type);
349 break;
350 }
351
352 case nir_search_value_variable: {
353 nir_search_variable *var = nir_search_value_as_variable(value);
354 tree->num_srcs = 0;
355 tree->is_dest_sized = true;
356 tree->dest_size = nir_src_bit_size(state->variables[var->variable].src);
357 break;
358 }
359
360 case nir_search_value_constant: {
361 tree->num_srcs = 0;
362 tree->is_dest_sized = false;
363 tree->common_size = 0;
364 break;
365 }
366 }
367
368 if (value->bit_size) {
369 assert(!tree->is_dest_sized || tree->dest_size == value->bit_size);
370 tree->common_size = value->bit_size;
371 }
372
373 return tree;
374 }
375
376 static unsigned
377 bitsize_tree_filter_up(bitsize_tree *tree)
378 {
379 for (unsigned i = 0; i < tree->num_srcs; i++) {
380 unsigned src_size = bitsize_tree_filter_up(tree->srcs[i]);
381 if (src_size == 0)
382 continue;
383
384 if (tree->is_src_sized[i]) {
385 assert(src_size == tree->src_size[i]);
386 } else if (tree->common_size != 0) {
387 assert(src_size == tree->common_size);
388 tree->src_size[i] = src_size;
389 } else {
390 tree->common_size = src_size;
391 tree->src_size[i] = src_size;
392 }
393 }
394
395 if (tree->num_srcs && tree->common_size) {
396 if (tree->dest_size == 0)
397 tree->dest_size = tree->common_size;
398 else if (!tree->is_dest_sized)
399 assert(tree->dest_size == tree->common_size);
400
401 for (unsigned i = 0; i < tree->num_srcs; i++) {
402 if (!tree->src_size[i])
403 tree->src_size[i] = tree->common_size;
404 }
405 }
406
407 return tree->dest_size;
408 }
409
410 static void
411 bitsize_tree_filter_down(bitsize_tree *tree, unsigned size)
412 {
413 if (tree->dest_size)
414 assert(tree->dest_size == size);
415 else
416 tree->dest_size = size;
417
418 if (!tree->is_dest_sized) {
419 if (tree->common_size)
420 assert(tree->common_size == size);
421 else
422 tree->common_size = size;
423 }
424
425 for (unsigned i = 0; i < tree->num_srcs; i++) {
426 if (!tree->src_size[i]) {
427 assert(tree->common_size);
428 tree->src_size[i] = tree->common_size;
429 }
430 bitsize_tree_filter_down(tree->srcs[i], tree->src_size[i]);
431 }
432 }
433
434 static nir_alu_src
435 construct_value(const nir_search_value *value,
436 unsigned num_components, bitsize_tree *bitsize,
437 struct match_state *state,
438 nir_instr *instr, void *mem_ctx)
439 {
440 switch (value->type) {
441 case nir_search_value_expression: {
442 const nir_search_expression *expr = nir_search_value_as_expression(value);
443
444 if (nir_op_infos[expr->opcode].output_size != 0)
445 num_components = nir_op_infos[expr->opcode].output_size;
446
447 nir_alu_instr *alu = nir_alu_instr_create(mem_ctx, expr->opcode);
448 nir_ssa_dest_init(&alu->instr, &alu->dest.dest, num_components,
449 bitsize->dest_size, NULL);
450 alu->dest.write_mask = (1 << num_components) - 1;
451 alu->dest.saturate = false;
452
453 /* We have no way of knowing what values in a given search expression
454 * map to a particular replacement value. Therefore, if the
455 * expression we are replacing has any exact values, the entire
456 * replacement should be exact.
457 */
458 alu->exact = state->has_exact_alu;
459
460 for (unsigned i = 0; i < nir_op_infos[expr->opcode].num_inputs; i++) {
461 /* If the source is an explicitly sized source, then we need to reset
462 * the number of components to match.
463 */
464 if (nir_op_infos[alu->op].input_sizes[i] != 0)
465 num_components = nir_op_infos[alu->op].input_sizes[i];
466
467 alu->src[i] = construct_value(expr->srcs[i],
468 num_components, bitsize->srcs[i],
469 state, instr, mem_ctx);
470 }
471
472 nir_instr_insert_before(instr, &alu->instr);
473
474 nir_alu_src val;
475 val.src = nir_src_for_ssa(&alu->dest.dest.ssa);
476 val.negate = false;
477 val.abs = false,
478 memcpy(val.swizzle, identity_swizzle, sizeof val.swizzle);
479
480 return val;
481 }
482
483 case nir_search_value_variable: {
484 const nir_search_variable *var = nir_search_value_as_variable(value);
485 assert(state->variables_seen & (1 << var->variable));
486
487 nir_alu_src val = { NIR_SRC_INIT };
488 nir_alu_src_copy(&val, &state->variables[var->variable], mem_ctx);
489
490 assert(!var->is_constant);
491
492 return val;
493 }
494
495 case nir_search_value_constant: {
496 const nir_search_constant *c = nir_search_value_as_constant(value);
497 nir_load_const_instr *load =
498 nir_load_const_instr_create(mem_ctx, 1, bitsize->dest_size);
499
500 switch (c->type) {
501 case nir_type_float:
502 load->def.name = ralloc_asprintf(load, "%f", c->data.d);
503 switch (bitsize->dest_size) {
504 case 32:
505 load->value.f32[0] = c->data.d;
506 break;
507 case 64:
508 load->value.f64[0] = c->data.d;
509 break;
510 default:
511 unreachable("unknown bit size");
512 }
513 break;
514
515 case nir_type_int:
516 load->def.name = ralloc_asprintf(load, "%" PRIi64, c->data.i);
517 switch (bitsize->dest_size) {
518 case 32:
519 load->value.i32[0] = c->data.i;
520 break;
521 case 64:
522 load->value.i64[0] = c->data.i;
523 break;
524 default:
525 unreachable("unknown bit size");
526 }
527 break;
528
529 case nir_type_uint:
530 load->def.name = ralloc_asprintf(load, "%" PRIu64, c->data.u);
531 switch (bitsize->dest_size) {
532 case 32:
533 load->value.u32[0] = c->data.u;
534 break;
535 case 64:
536 load->value.u64[0] = c->data.u;
537 break;
538 default:
539 unreachable("unknown bit size");
540 }
541 break;
542
543 case nir_type_bool32:
544 load->value.u32[0] = c->data.u;
545 break;
546 default:
547 unreachable("Invalid alu source type");
548 }
549
550 nir_instr_insert_before(instr, &load->instr);
551
552 nir_alu_src val;
553 val.src = nir_src_for_ssa(&load->def);
554 val.negate = false;
555 val.abs = false,
556 memset(val.swizzle, 0, sizeof val.swizzle);
557
558 return val;
559 }
560
561 default:
562 unreachable("Invalid search value type");
563 }
564 }
565
566 nir_alu_instr *
567 nir_replace_instr(nir_alu_instr *instr, const nir_search_expression *search,
568 const nir_search_value *replace, void *mem_ctx)
569 {
570 uint8_t swizzle[4] = { 0, 0, 0, 0 };
571
572 for (unsigned i = 0; i < instr->dest.dest.ssa.num_components; ++i)
573 swizzle[i] = i;
574
575 assert(instr->dest.dest.is_ssa);
576
577 struct match_state state;
578 state.inexact_match = false;
579 state.has_exact_alu = false;
580 state.variables_seen = 0;
581
582 if (!match_expression(search, instr, instr->dest.dest.ssa.num_components,
583 swizzle, &state))
584 return NULL;
585
586 void *bitsize_ctx = ralloc_context(NULL);
587 bitsize_tree *tree = build_bitsize_tree(bitsize_ctx, &state, replace);
588 bitsize_tree_filter_up(tree);
589 bitsize_tree_filter_down(tree, instr->dest.dest.ssa.bit_size);
590
591 /* Inserting a mov may be unnecessary. However, it's much easier to
592 * simply let copy propagation clean this up than to try to go through
593 * and rewrite swizzles ourselves.
594 */
595 nir_alu_instr *mov = nir_alu_instr_create(mem_ctx, nir_op_imov);
596 mov->dest.write_mask = instr->dest.write_mask;
597 nir_ssa_dest_init(&mov->instr, &mov->dest.dest,
598 instr->dest.dest.ssa.num_components,
599 instr->dest.dest.ssa.bit_size, NULL);
600
601 mov->src[0] = construct_value(replace,
602 instr->dest.dest.ssa.num_components, tree,
603 &state, &instr->instr, mem_ctx);
604 nir_instr_insert_before(&instr->instr, &mov->instr);
605
606 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa,
607 nir_src_for_ssa(&mov->dest.dest.ssa));
608
609 /* We know this one has no more uses because we just rewrote them all,
610 * so we can remove it. The rest of the matched expression, however, we
611 * don't know so much about. We'll just let dead code clean them up.
612 */
613 nir_instr_remove(&instr->instr);
614
615 ralloc_free(bitsize_ctx);
616
617 return mov;
618 }