Merge remote-tracking branch 'public/master' into vulkan
[mesa.git] / src / compiler / nir / nir_search.c
1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #include "nir_search.h"
29
30 struct match_state {
31 bool inexact_match;
32 bool has_exact_alu;
33 unsigned variables_seen;
34 nir_alu_src variables[NIR_SEARCH_MAX_VARIABLES];
35 };
36
37 static bool
38 match_expression(const nir_search_expression *expr, nir_alu_instr *instr,
39 unsigned num_components, const uint8_t *swizzle,
40 struct match_state *state);
41
42 static const uint8_t identity_swizzle[] = { 0, 1, 2, 3 };
43
44 static bool alu_instr_is_bool(nir_alu_instr *instr);
45
46 static bool
47 src_is_bool(nir_src src)
48 {
49 if (!src.is_ssa)
50 return false;
51 if (src.ssa->parent_instr->type != nir_instr_type_alu)
52 return false;
53 return alu_instr_is_bool(nir_instr_as_alu(src.ssa->parent_instr));
54 }
55
56 static bool
57 alu_instr_is_bool(nir_alu_instr *instr)
58 {
59 switch (instr->op) {
60 case nir_op_iand:
61 case nir_op_ior:
62 case nir_op_ixor:
63 return src_is_bool(instr->src[0].src) && src_is_bool(instr->src[1].src);
64 case nir_op_inot:
65 return src_is_bool(instr->src[0].src);
66 default:
67 return (nir_alu_type_get_base_type(nir_op_infos[instr->op].output_type)
68 == nir_type_bool);
69 }
70 }
71
72 static bool
73 match_value(const nir_search_value *value, nir_alu_instr *instr, unsigned src,
74 unsigned num_components, const uint8_t *swizzle,
75 struct match_state *state)
76 {
77 uint8_t new_swizzle[4];
78
79 /* If the source is an explicitly sized source, then we need to reset
80 * both the number of components and the swizzle.
81 */
82 if (nir_op_infos[instr->op].input_sizes[src] != 0) {
83 num_components = nir_op_infos[instr->op].input_sizes[src];
84 swizzle = identity_swizzle;
85 }
86
87 for (unsigned i = 0; i < num_components; ++i)
88 new_swizzle[i] = instr->src[src].swizzle[swizzle[i]];
89
90 switch (value->type) {
91 case nir_search_value_expression:
92 if (!instr->src[src].src.is_ssa)
93 return false;
94
95 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_alu)
96 return false;
97
98 return match_expression(nir_search_value_as_expression(value),
99 nir_instr_as_alu(instr->src[src].src.ssa->parent_instr),
100 num_components, new_swizzle, state);
101
102 case nir_search_value_variable: {
103 nir_search_variable *var = nir_search_value_as_variable(value);
104 assert(var->variable < NIR_SEARCH_MAX_VARIABLES);
105
106 if (state->variables_seen & (1 << var->variable)) {
107 if (!nir_srcs_equal(state->variables[var->variable].src,
108 instr->src[src].src))
109 return false;
110
111 assert(!instr->src[src].abs && !instr->src[src].negate);
112
113 for (unsigned i = 0; i < num_components; ++i) {
114 if (state->variables[var->variable].swizzle[i] != new_swizzle[i])
115 return false;
116 }
117
118 return true;
119 } else {
120 if (var->is_constant &&
121 instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const)
122 return false;
123
124 if (var->type != nir_type_invalid) {
125 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_alu)
126 return false;
127
128 nir_alu_instr *src_alu =
129 nir_instr_as_alu(instr->src[src].src.ssa->parent_instr);
130
131 if (nir_alu_type_get_base_type(nir_op_infos[src_alu->op].output_type) !=
132 var->type &&
133 !(nir_alu_type_get_base_type(var->type) == nir_type_bool &&
134 alu_instr_is_bool(src_alu)))
135 return false;
136 }
137
138 state->variables_seen |= (1 << var->variable);
139 state->variables[var->variable].src = instr->src[src].src;
140 state->variables[var->variable].abs = false;
141 state->variables[var->variable].negate = false;
142
143 for (unsigned i = 0; i < 4; ++i) {
144 if (i < num_components)
145 state->variables[var->variable].swizzle[i] = new_swizzle[i];
146 else
147 state->variables[var->variable].swizzle[i] = 0;
148 }
149
150 return true;
151 }
152 }
153
154 case nir_search_value_constant: {
155 nir_search_constant *const_val = nir_search_value_as_constant(value);
156
157 if (!instr->src[src].src.is_ssa)
158 return false;
159
160 if (instr->src[src].src.ssa->parent_instr->type != nir_instr_type_load_const)
161 return false;
162
163 nir_load_const_instr *load =
164 nir_instr_as_load_const(instr->src[src].src.ssa->parent_instr);
165
166 switch (const_val->type) {
167 case nir_type_float:
168 for (unsigned i = 0; i < num_components; ++i) {
169 double val;
170 switch (load->def.bit_size) {
171 case 32:
172 val = load->value.f32[new_swizzle[i]];
173 break;
174 case 64:
175 val = load->value.f64[new_swizzle[i]];
176 break;
177 default:
178 unreachable("unknown bit size");
179 }
180
181 if (val != const_val->data.d)
182 return false;
183 }
184 return true;
185
186 case nir_type_int:
187 for (unsigned i = 0; i < num_components; ++i) {
188 int64_t val;
189 switch (load->def.bit_size) {
190 case 32:
191 val = load->value.i32[new_swizzle[i]];
192 break;
193 case 64:
194 val = load->value.i64[new_swizzle[i]];
195 break;
196 default:
197 unreachable("unknown bit size");
198 }
199
200 if (val != const_val->data.i)
201 return false;
202 }
203 return true;
204
205 case nir_type_uint:
206 case nir_type_bool32:
207 for (unsigned i = 0; i < num_components; ++i) {
208 uint64_t val;
209 switch (load->def.bit_size) {
210 case 32:
211 val = load->value.u32[new_swizzle[i]];
212 break;
213 case 64:
214 val = load->value.u64[new_swizzle[i]];
215 break;
216 default:
217 unreachable("unknown bit size");
218 }
219
220 if (val != const_val->data.u)
221 return false;
222 }
223 return true;
224
225 default:
226 unreachable("Invalid alu source type");
227 }
228 }
229
230 default:
231 unreachable("Invalid search value type");
232 }
233 }
234
235 static bool
236 match_expression(const nir_search_expression *expr, nir_alu_instr *instr,
237 unsigned num_components, const uint8_t *swizzle,
238 struct match_state *state)
239 {
240 if (instr->op != expr->opcode)
241 return false;
242
243 assert(instr->dest.dest.is_ssa);
244
245 state->inexact_match = expr->inexact || state->inexact_match;
246 state->has_exact_alu = instr->exact || state->has_exact_alu;
247 if (state->inexact_match && state->has_exact_alu)
248 return false;
249
250 assert(!instr->dest.saturate);
251 assert(nir_op_infos[instr->op].num_inputs > 0);
252
253 /* If we have an explicitly sized destination, we can only handle the
254 * identity swizzle. While dot(vec3(a, b, c).zxy) is a valid
255 * expression, we don't have the information right now to propagate that
256 * swizzle through. We can only properly propagate swizzles if the
257 * instruction is vectorized.
258 */
259 if (nir_op_infos[instr->op].output_size != 0) {
260 for (unsigned i = 0; i < num_components; i++) {
261 if (swizzle[i] != i)
262 return false;
263 }
264 }
265
266 /* Stash off the current variables_seen bitmask. This way we can
267 * restore it prior to matching in the commutative case below.
268 */
269 unsigned variables_seen_stash = state->variables_seen;
270
271 bool matched = true;
272 for (unsigned i = 0; i < nir_op_infos[instr->op].num_inputs; i++) {
273 if (!match_value(expr->srcs[i], instr, i, num_components,
274 swizzle, state)) {
275 matched = false;
276 break;
277 }
278 }
279
280 if (matched)
281 return true;
282
283 if (nir_op_infos[instr->op].algebraic_properties & NIR_OP_IS_COMMUTATIVE) {
284 assert(nir_op_infos[instr->op].num_inputs == 2);
285
286 /* Restore the variables_seen bitmask. If we don't do this, then we
287 * could end up with an erroneous failure due to variables found in the
288 * first match attempt above not matching those in the second.
289 */
290 state->variables_seen = variables_seen_stash;
291
292 if (!match_value(expr->srcs[0], instr, 1, num_components,
293 swizzle, state))
294 return false;
295
296 return match_value(expr->srcs[1], instr, 0, num_components,
297 swizzle, state);
298 } else {
299 return false;
300 }
301 }
302
303 typedef struct bitsize_tree {
304 unsigned num_srcs;
305 struct bitsize_tree *srcs[4];
306
307 unsigned common_size;
308 bool is_src_sized[4];
309 bool is_dest_sized;
310
311 unsigned dest_size;
312 unsigned src_size[4];
313 } bitsize_tree;
314
315 static bitsize_tree *
316 build_bitsize_tree(void *mem_ctx, struct match_state *state,
317 const nir_search_value *value)
318 {
319 bitsize_tree *tree = ralloc(mem_ctx, bitsize_tree);
320
321 switch (value->type) {
322 case nir_search_value_expression: {
323 nir_search_expression *expr = nir_search_value_as_expression(value);
324 nir_op_info info = nir_op_infos[expr->opcode];
325 tree->num_srcs = info.num_inputs;
326 tree->common_size = 0;
327 for (unsigned i = 0; i < info.num_inputs; i++) {
328 tree->is_src_sized[i] = !!nir_alu_type_get_type_size(info.input_types[i]);
329 if (tree->is_src_sized[i])
330 tree->src_size[i] = nir_alu_type_get_type_size(info.input_types[i]);
331 tree->srcs[i] = build_bitsize_tree(mem_ctx, state, expr->srcs[i]);
332 }
333 tree->is_dest_sized = !!nir_alu_type_get_type_size(info.output_type);
334 if (tree->is_dest_sized)
335 tree->dest_size = nir_alu_type_get_type_size(info.output_type);
336 break;
337 }
338
339 case nir_search_value_variable: {
340 nir_search_variable *var = nir_search_value_as_variable(value);
341 tree->num_srcs = 0;
342 tree->is_dest_sized = true;
343 tree->dest_size = nir_src_bit_size(state->variables[var->variable].src);
344 break;
345 }
346
347 case nir_search_value_constant: {
348 tree->num_srcs = 0;
349 tree->is_dest_sized = false;
350 tree->common_size = 0;
351 break;
352 }
353 }
354
355 return tree;
356 }
357
358 static unsigned
359 bitsize_tree_filter_up(bitsize_tree *tree)
360 {
361 for (unsigned i = 0; i < tree->num_srcs; i++) {
362 unsigned src_size = bitsize_tree_filter_up(tree->srcs[i]);
363 if (src_size == 0)
364 continue;
365
366 if (tree->is_src_sized[i]) {
367 assert(src_size == tree->src_size[i]);
368 } else if (tree->common_size != 0) {
369 assert(src_size == tree->common_size);
370 tree->src_size[i] = src_size;
371 } else {
372 tree->common_size = src_size;
373 tree->src_size[i] = src_size;
374 }
375 }
376
377 if (tree->num_srcs && tree->common_size) {
378 if (tree->dest_size == 0)
379 tree->dest_size = tree->common_size;
380 else if (!tree->is_dest_sized)
381 assert(tree->dest_size == tree->common_size);
382
383 for (unsigned i = 0; i < tree->num_srcs; i++) {
384 if (!tree->src_size[i])
385 tree->src_size[i] = tree->common_size;
386 }
387 }
388
389 return tree->dest_size;
390 }
391
392 static void
393 bitsize_tree_filter_down(bitsize_tree *tree, unsigned size)
394 {
395 if (tree->dest_size)
396 assert(tree->dest_size == size);
397 else
398 tree->dest_size = size;
399
400 if (!tree->is_dest_sized) {
401 if (tree->common_size)
402 assert(tree->common_size == size);
403 else
404 tree->common_size = size;
405 }
406
407 for (unsigned i = 0; i < tree->num_srcs; i++) {
408 if (!tree->src_size[i]) {
409 assert(tree->common_size);
410 tree->src_size[i] = tree->common_size;
411 }
412 bitsize_tree_filter_down(tree->srcs[i], tree->src_size[i]);
413 }
414 }
415
416 static nir_alu_src
417 construct_value(const nir_search_value *value,
418 unsigned num_components, bitsize_tree *bitsize,
419 struct match_state *state,
420 nir_instr *instr, void *mem_ctx)
421 {
422 switch (value->type) {
423 case nir_search_value_expression: {
424 const nir_search_expression *expr = nir_search_value_as_expression(value);
425
426 if (nir_op_infos[expr->opcode].output_size != 0)
427 num_components = nir_op_infos[expr->opcode].output_size;
428
429 nir_alu_instr *alu = nir_alu_instr_create(mem_ctx, expr->opcode);
430 nir_ssa_dest_init(&alu->instr, &alu->dest.dest, num_components,
431 bitsize->dest_size, NULL);
432 alu->dest.write_mask = (1 << num_components) - 1;
433 alu->dest.saturate = false;
434
435 /* We have no way of knowing what values in a given search expression
436 * map to a particular replacement value. Therefore, if the
437 * expression we are replacing has any exact values, the entire
438 * replacement should be exact.
439 */
440 alu->exact = state->has_exact_alu;
441
442 for (unsigned i = 0; i < nir_op_infos[expr->opcode].num_inputs; i++) {
443 /* If the source is an explicitly sized source, then we need to reset
444 * the number of components to match.
445 */
446 if (nir_op_infos[alu->op].input_sizes[i] != 0)
447 num_components = nir_op_infos[alu->op].input_sizes[i];
448
449 alu->src[i] = construct_value(expr->srcs[i],
450 num_components, bitsize->srcs[i],
451 state, instr, mem_ctx);
452 }
453
454 nir_instr_insert_before(instr, &alu->instr);
455
456 nir_alu_src val;
457 val.src = nir_src_for_ssa(&alu->dest.dest.ssa);
458 val.negate = false;
459 val.abs = false,
460 memcpy(val.swizzle, identity_swizzle, sizeof val.swizzle);
461
462 return val;
463 }
464
465 case nir_search_value_variable: {
466 const nir_search_variable *var = nir_search_value_as_variable(value);
467 assert(state->variables_seen & (1 << var->variable));
468
469 nir_alu_src val = { NIR_SRC_INIT };
470 nir_alu_src_copy(&val, &state->variables[var->variable], mem_ctx);
471
472 assert(!var->is_constant);
473
474 return val;
475 }
476
477 case nir_search_value_constant: {
478 const nir_search_constant *c = nir_search_value_as_constant(value);
479 nir_load_const_instr *load = nir_load_const_instr_create(mem_ctx, 1);
480
481 switch (c->type) {
482 case nir_type_float:
483 load->def.name = ralloc_asprintf(load, "%f", c->data.d);
484 switch (bitsize->dest_size) {
485 case 32:
486 load->value.f32[0] = c->data.d;
487 break;
488 case 64:
489 load->value.f64[0] = c->data.d;
490 break;
491 default:
492 unreachable("unknown bit size");
493 }
494 break;
495
496 case nir_type_int:
497 load->def.name = ralloc_asprintf(load, "%ld", c->data.i);
498 switch (bitsize->dest_size) {
499 case 32:
500 load->value.i32[0] = c->data.i;
501 break;
502 case 64:
503 load->value.i64[0] = c->data.i;
504 break;
505 default:
506 unreachable("unknown bit size");
507 }
508 break;
509
510 case nir_type_uint:
511 load->def.name = ralloc_asprintf(load, "%lu", c->data.u);
512 switch (bitsize->dest_size) {
513 case 32:
514 load->value.u32[0] = c->data.u;
515 break;
516 case 64:
517 load->value.u64[0] = c->data.u;
518 break;
519 default:
520 unreachable("unknown bit size");
521 }
522
523 case nir_type_bool32:
524 load->value.u32[0] = c->data.u;
525 break;
526 default:
527 unreachable("Invalid alu source type");
528 }
529
530 load->def.bit_size = bitsize->dest_size;
531
532 nir_instr_insert_before(instr, &load->instr);
533
534 nir_alu_src val;
535 val.src = nir_src_for_ssa(&load->def);
536 val.negate = false;
537 val.abs = false,
538 memset(val.swizzle, 0, sizeof val.swizzle);
539
540 return val;
541 }
542
543 default:
544 unreachable("Invalid search value type");
545 }
546 }
547
548 nir_alu_instr *
549 nir_replace_instr(nir_alu_instr *instr, const nir_search_expression *search,
550 const nir_search_value *replace, void *mem_ctx)
551 {
552 uint8_t swizzle[4] = { 0, 0, 0, 0 };
553
554 for (unsigned i = 0; i < instr->dest.dest.ssa.num_components; ++i)
555 swizzle[i] = i;
556
557 assert(instr->dest.dest.is_ssa);
558
559 struct match_state state;
560 state.inexact_match = false;
561 state.has_exact_alu = false;
562 state.variables_seen = 0;
563
564 if (!match_expression(search, instr, instr->dest.dest.ssa.num_components,
565 swizzle, &state))
566 return NULL;
567
568 void *bitsize_ctx = ralloc_context(NULL);
569 bitsize_tree *tree = build_bitsize_tree(bitsize_ctx, &state, replace);
570 bitsize_tree_filter_up(tree);
571 bitsize_tree_filter_down(tree, instr->dest.dest.ssa.bit_size);
572
573 /* Inserting a mov may be unnecessary. However, it's much easier to
574 * simply let copy propagation clean this up than to try to go through
575 * and rewrite swizzles ourselves.
576 */
577 nir_alu_instr *mov = nir_alu_instr_create(mem_ctx, nir_op_imov);
578 mov->dest.write_mask = instr->dest.write_mask;
579 nir_ssa_dest_init(&mov->instr, &mov->dest.dest,
580 instr->dest.dest.ssa.num_components,
581 instr->dest.dest.ssa.bit_size, NULL);
582
583 mov->src[0] = construct_value(replace,
584 instr->dest.dest.ssa.num_components, tree,
585 &state, &instr->instr, mem_ctx);
586 nir_instr_insert_before(&instr->instr, &mov->instr);
587
588 nir_ssa_def_rewrite_uses(&instr->dest.dest.ssa,
589 nir_src_for_ssa(&mov->dest.dest.ssa));
590
591 /* We know this one has no more uses because we just rewrote them all,
592 * so we can remove it. The rest of the matched expression, however, we
593 * don't know so much about. We'll just let dead code clean them up.
594 */
595 nir_instr_remove(&instr->instr);
596
597 ralloc_free(bitsize_ctx);
598
599 return mov;
600 }