tree-data-ref.c (subscript_dependence_tester_1): Call free_conflict_function.
[gcc.git] / gcc / tree-vect-patterns.c
1 /* Analysis Utilities for Loop Vectorization.
2 Copyright (C) 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Dorit Nuzman <dorit@il.ibm.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "tree.h"
27
28 #include "target.h"
29 #include "basic-block.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "expr.h"
36 #include "optabs.h"
37 #include "params.h"
38 #include "tree-data-ref.h"
39 #include "tree-vectorizer.h"
40 #include "recog.h"
41 #include "toplev.h"
42
43 /* Function prototypes */
44 static void vect_pattern_recog_1
45 (tree (* ) (tree, tree *, tree *), block_stmt_iterator);
46 static bool widened_name_p (tree, tree, tree *, tree *);
47
48 /* Pattern recognition functions */
49 static tree vect_recog_widen_sum_pattern (tree, tree *, tree *);
50 static tree vect_recog_widen_mult_pattern (tree, tree *, tree *);
51 static tree vect_recog_dot_prod_pattern (tree, tree *, tree *);
52 static tree vect_recog_pow_pattern (tree, tree *, tree *);
53 static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
54 vect_recog_widen_mult_pattern,
55 vect_recog_widen_sum_pattern,
56 vect_recog_dot_prod_pattern,
57 vect_recog_pow_pattern};
58
59
60 /* Function widened_name_p
61
62 Check whether NAME, an ssa-name used in USE_STMT,
63 is a result of a type-promotion, such that:
64 DEF_STMT: NAME = NOP (name0)
65 where the type of name0 (HALF_TYPE) is smaller than the type of NAME.
66 */
67
68 static bool
69 widened_name_p (tree name, tree use_stmt, tree *half_type, tree *def_stmt)
70 {
71 tree dummy;
72 loop_vec_info loop_vinfo;
73 stmt_vec_info stmt_vinfo;
74 tree expr;
75 tree type = TREE_TYPE (name);
76 tree oprnd0;
77 enum vect_def_type dt;
78 tree def;
79
80 stmt_vinfo = vinfo_for_stmt (use_stmt);
81 loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
82
83 if (!vect_is_simple_use (name, loop_vinfo, def_stmt, &def, &dt))
84 return false;
85
86 if (dt != vect_loop_def
87 && dt != vect_invariant_def && dt != vect_constant_def)
88 return false;
89
90 if (! *def_stmt)
91 return false;
92
93 if (TREE_CODE (*def_stmt) != GIMPLE_MODIFY_STMT)
94 return false;
95
96 expr = GIMPLE_STMT_OPERAND (*def_stmt, 1);
97 if (TREE_CODE (expr) != NOP_EXPR)
98 return false;
99
100 oprnd0 = TREE_OPERAND (expr, 0);
101
102 *half_type = TREE_TYPE (oprnd0);
103 if (!INTEGRAL_TYPE_P (type) || !INTEGRAL_TYPE_P (*half_type)
104 || (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (*half_type))
105 || (TYPE_PRECISION (type) < (TYPE_PRECISION (*half_type) * 2)))
106 return false;
107
108 if (!vect_is_simple_use (oprnd0, loop_vinfo, &dummy, &dummy, &dt))
109 return false;
110
111 return true;
112 }
113
114
115 /* Function vect_recog_dot_prod_pattern
116
117 Try to find the following pattern:
118
119 type x_t, y_t;
120 TYPE1 prod;
121 TYPE2 sum = init;
122 loop:
123 sum_0 = phi <init, sum_1>
124 S1 x_t = ...
125 S2 y_t = ...
126 S3 x_T = (TYPE1) x_t;
127 S4 y_T = (TYPE1) y_t;
128 S5 prod = x_T * y_T;
129 [S6 prod = (TYPE2) prod; #optional]
130 S7 sum_1 = prod + sum_0;
131
132 where 'TYPE1' is exactly double the size of type 'type', and 'TYPE2' is the
133 same size of 'TYPE1' or bigger. This is a special case of a reduction
134 computation.
135
136 Input:
137
138 * LAST_STMT: A stmt from which the pattern search begins. In the example,
139 when this function is called with S7, the pattern {S3,S4,S5,S6,S7} will be
140 detected.
141
142 Output:
143
144 * TYPE_IN: The type of the input arguments to the pattern.
145
146 * TYPE_OUT: The type of the output of this pattern.
147
148 * Return value: A new stmt that will be used to replace the sequence of
149 stmts that constitute the pattern. In this case it will be:
150 WIDEN_DOT_PRODUCT <x_t, y_t, sum_0>
151
152 Note: The dot-prod idiom is a widening reduction pattern that is
153 vectorized without preserving all the intermediate results. It
154 produces only N/2 (widened) results (by summing up pairs of
155 intermediate results) rather than all N results. Therefore, we
156 cannot allow this pattern when we want to get all the results and in
157 the correct order (as is the case when this computation is in an
158 inner-loop nested in an outer-loop that us being vectorized). */
159
160 static tree
161 vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
162 {
163 tree stmt, expr;
164 tree oprnd0, oprnd1;
165 tree oprnd00, oprnd01;
166 stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
167 tree type, half_type;
168 tree pattern_expr;
169 tree prod_type;
170 loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
171 struct loop *loop = LOOP_VINFO_LOOP (loop_info);
172
173 if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
174 return NULL;
175
176 expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
177 type = TREE_TYPE (expr);
178
179 /* Look for the following pattern
180 DX = (TYPE1) X;
181 DY = (TYPE1) Y;
182 DPROD = DX * DY;
183 DDPROD = (TYPE2) DPROD;
184 sum_1 = DDPROD + sum_0;
185 In which
186 - DX is double the size of X
187 - DY is double the size of Y
188 - DX, DY, DPROD all have the same type
189 - sum is the same size of DPROD or bigger
190 - sum has been recognized as a reduction variable.
191
192 This is equivalent to:
193 DPROD = X w* Y; #widen mult
194 sum_1 = DPROD w+ sum_0; #widen summation
195 or
196 DPROD = X w* Y; #widen mult
197 sum_1 = DPROD + sum_0; #summation
198 */
199
200 /* Starting from LAST_STMT, follow the defs of its uses in search
201 of the above pattern. */
202
203 if (TREE_CODE (expr) != PLUS_EXPR)
204 return NULL;
205
206 if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
207 {
208 /* Has been detected as widening-summation? */
209
210 stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
211 expr = GIMPLE_STMT_OPERAND (stmt, 1);
212 type = TREE_TYPE (expr);
213 if (TREE_CODE (expr) != WIDEN_SUM_EXPR)
214 return NULL;
215 oprnd0 = TREE_OPERAND (expr, 0);
216 oprnd1 = TREE_OPERAND (expr, 1);
217 half_type = TREE_TYPE (oprnd0);
218 }
219 else
220 {
221 tree def_stmt;
222
223 if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
224 return NULL;
225 oprnd0 = TREE_OPERAND (expr, 0);
226 oprnd1 = TREE_OPERAND (expr, 1);
227 if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
228 || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
229 return NULL;
230 stmt = last_stmt;
231
232 if (widened_name_p (oprnd0, stmt, &half_type, &def_stmt))
233 {
234 stmt = def_stmt;
235 expr = GIMPLE_STMT_OPERAND (stmt, 1);
236 oprnd0 = TREE_OPERAND (expr, 0);
237 }
238 else
239 half_type = type;
240 }
241
242 /* So far so good. Since last_stmt was detected as a (summation) reduction,
243 we know that oprnd1 is the reduction variable (defined by a loop-header
244 phi), and oprnd0 is an ssa-name defined by a stmt in the loop body.
245 Left to check that oprnd0 is defined by a (widen_)mult_expr */
246
247 prod_type = half_type;
248 stmt = SSA_NAME_DEF_STMT (oprnd0);
249 gcc_assert (stmt);
250 stmt_vinfo = vinfo_for_stmt (stmt);
251 gcc_assert (stmt_vinfo);
252 if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_loop_def)
253 return NULL;
254 /* FORNOW. Can continue analyzing the def-use chain when this stmt in a phi
255 inside the loop (in case we are analyzing an outer-loop). */
256 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
257 return NULL;
258 expr = GIMPLE_STMT_OPERAND (stmt, 1);
259 if (TREE_CODE (expr) != MULT_EXPR)
260 return NULL;
261 if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
262 {
263 /* Has been detected as a widening multiplication? */
264
265 stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
266 expr = GIMPLE_STMT_OPERAND (stmt, 1);
267 if (TREE_CODE (expr) != WIDEN_MULT_EXPR)
268 return NULL;
269 stmt_vinfo = vinfo_for_stmt (stmt);
270 gcc_assert (stmt_vinfo);
271 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_loop_def);
272 oprnd00 = TREE_OPERAND (expr, 0);
273 oprnd01 = TREE_OPERAND (expr, 1);
274 }
275 else
276 {
277 tree half_type0, half_type1;
278 tree def_stmt;
279 tree oprnd0, oprnd1;
280
281 oprnd0 = TREE_OPERAND (expr, 0);
282 oprnd1 = TREE_OPERAND (expr, 1);
283 if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0))
284 != TYPE_MAIN_VARIANT (prod_type)
285 || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1))
286 != TYPE_MAIN_VARIANT (prod_type))
287 return NULL;
288 if (!widened_name_p (oprnd0, stmt, &half_type0, &def_stmt))
289 return NULL;
290 oprnd00 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
291 if (!widened_name_p (oprnd1, stmt, &half_type1, &def_stmt))
292 return NULL;
293 oprnd01 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
294 if (TYPE_MAIN_VARIANT (half_type0) != TYPE_MAIN_VARIANT (half_type1))
295 return NULL;
296 if (TYPE_PRECISION (prod_type) != TYPE_PRECISION (half_type0) * 2)
297 return NULL;
298 }
299
300 half_type = TREE_TYPE (oprnd00);
301 *type_in = half_type;
302 *type_out = type;
303
304 /* Pattern detected. Create a stmt to be used to replace the pattern: */
305 pattern_expr = build3 (DOT_PROD_EXPR, type, oprnd00, oprnd01, oprnd1);
306 if (vect_print_dump_info (REPORT_DETAILS))
307 {
308 fprintf (vect_dump, "vect_recog_dot_prod_pattern: detected: ");
309 print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
310 }
311
312 /* We don't allow changing the order of the computation in the inner-loop
313 when doing outer-loop vectorization. */
314 if (nested_in_vect_loop_p (loop, last_stmt))
315 {
316 if (vect_print_dump_info (REPORT_DETAILS))
317 fprintf (vect_dump, "vect_recog_dot_prod_pattern: not allowed.");
318 return NULL;
319 }
320
321 return pattern_expr;
322 }
323
324
325 /* Function vect_recog_widen_mult_pattern
326
327 Try to find the following pattern:
328
329 type a_t, b_t;
330 TYPE a_T, b_T, prod_T;
331
332 S1 a_t = ;
333 S2 b_t = ;
334 S3 a_T = (TYPE) a_t;
335 S4 b_T = (TYPE) b_t;
336 S5 prod_T = a_T * b_T;
337
338 where type 'TYPE' is at least double the size of type 'type'.
339
340 Input:
341
342 * LAST_STMT: A stmt from which the pattern search begins. In the example,
343 when this function is called with S5, the pattern {S3,S4,S5} is be detected.
344
345 Output:
346
347 * TYPE_IN: The type of the input arguments to the pattern.
348
349 * TYPE_OUT: The type of the output of this pattern.
350
351 * Return value: A new stmt that will be used to replace the sequence of
352 stmts that constitute the pattern. In this case it will be:
353 WIDEN_MULT <a_t, b_t>
354 */
355
356 static tree
357 vect_recog_widen_mult_pattern (tree last_stmt,
358 tree *type_in,
359 tree *type_out)
360 {
361 tree expr;
362 tree def_stmt0, def_stmt1;
363 tree oprnd0, oprnd1;
364 tree type, half_type0, half_type1;
365 tree pattern_expr;
366 tree vectype;
367 tree dummy;
368 enum tree_code dummy_code;
369
370 if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
371 return NULL;
372
373 expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
374 type = TREE_TYPE (expr);
375
376 /* Starting from LAST_STMT, follow the defs of its uses in search
377 of the above pattern. */
378
379 if (TREE_CODE (expr) != MULT_EXPR)
380 return NULL;
381
382 oprnd0 = TREE_OPERAND (expr, 0);
383 oprnd1 = TREE_OPERAND (expr, 1);
384 if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
385 || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
386 return NULL;
387
388 /* Check argument 0 */
389 if (!widened_name_p (oprnd0, last_stmt, &half_type0, &def_stmt0))
390 return NULL;
391 oprnd0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt0, 1), 0);
392
393 /* Check argument 1 */
394 if (!widened_name_p (oprnd1, last_stmt, &half_type1, &def_stmt1))
395 return NULL;
396 oprnd1 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt1, 1), 0);
397
398 if (TYPE_MAIN_VARIANT (half_type0) != TYPE_MAIN_VARIANT (half_type1))
399 return NULL;
400
401 /* Pattern detected. */
402 if (vect_print_dump_info (REPORT_DETAILS))
403 fprintf (vect_dump, "vect_recog_widen_mult_pattern: detected: ");
404
405 /* Check target support */
406 vectype = get_vectype_for_scalar_type (half_type0);
407 if (!vectype
408 || !supportable_widening_operation (WIDEN_MULT_EXPR, last_stmt, vectype,
409 &dummy, &dummy, &dummy_code,
410 &dummy_code))
411 return NULL;
412
413 *type_in = vectype;
414 *type_out = NULL_TREE;
415
416 /* Pattern supported. Create a stmt to be used to replace the pattern: */
417 pattern_expr = build2 (WIDEN_MULT_EXPR, type, oprnd0, oprnd1);
418 if (vect_print_dump_info (REPORT_DETAILS))
419 print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
420 return pattern_expr;
421 }
422
423
424 /* Function vect_recog_pow_pattern
425
426 Try to find the following pattern:
427
428 x = POW (y, N);
429
430 with POW being one of pow, powf, powi, powif and N being
431 either 2 or 0.5.
432
433 Input:
434
435 * LAST_STMT: A stmt from which the pattern search begins.
436
437 Output:
438
439 * TYPE_IN: The type of the input arguments to the pattern.
440
441 * TYPE_OUT: The type of the output of this pattern.
442
443 * Return value: A new stmt that will be used to replace the sequence of
444 stmts that constitute the pattern. In this case it will be:
445 x * x
446 or
447 sqrt (x)
448 */
449
450 static tree
451 vect_recog_pow_pattern (tree last_stmt, tree *type_in, tree *type_out)
452 {
453 tree expr;
454 tree type;
455 tree fn, base, exp;
456
457 if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
458 return NULL;
459
460 expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
461 type = TREE_TYPE (expr);
462
463 if (TREE_CODE (expr) != CALL_EXPR)
464 return NULL_TREE;
465
466 fn = get_callee_fndecl (expr);
467 switch (DECL_FUNCTION_CODE (fn))
468 {
469 case BUILT_IN_POWIF:
470 case BUILT_IN_POWI:
471 case BUILT_IN_POWF:
472 case BUILT_IN_POW:
473 base = CALL_EXPR_ARG (expr, 0);
474 exp = CALL_EXPR_ARG (expr, 1);
475 if (TREE_CODE (exp) != REAL_CST
476 && TREE_CODE (exp) != INTEGER_CST)
477 return NULL_TREE;
478 break;
479
480 default:;
481 return NULL_TREE;
482 }
483
484 /* We now have a pow or powi builtin function call with a constant
485 exponent. */
486
487 *type_out = NULL_TREE;
488
489 /* Catch squaring. */
490 if ((host_integerp (exp, 0)
491 && tree_low_cst (exp, 0) == 2)
492 || (TREE_CODE (exp) == REAL_CST
493 && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
494 {
495 *type_in = TREE_TYPE (base);
496 return build2 (MULT_EXPR, TREE_TYPE (base), base, base);
497 }
498
499 /* Catch square root. */
500 if (TREE_CODE (exp) == REAL_CST
501 && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconsthalf))
502 {
503 tree newfn = mathfn_built_in (TREE_TYPE (base), BUILT_IN_SQRT);
504 *type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
505 if (*type_in)
506 {
507 newfn = build_call_expr (newfn, 1, base);
508 if (vectorizable_function (newfn, *type_in, *type_in) != NULL_TREE)
509 return newfn;
510 }
511 }
512
513 return NULL_TREE;
514 }
515
516
517 /* Function vect_recog_widen_sum_pattern
518
519 Try to find the following pattern:
520
521 type x_t;
522 TYPE x_T, sum = init;
523 loop:
524 sum_0 = phi <init, sum_1>
525 S1 x_t = *p;
526 S2 x_T = (TYPE) x_t;
527 S3 sum_1 = x_T + sum_0;
528
529 where type 'TYPE' is at least double the size of type 'type', i.e - we're
530 summing elements of type 'type' into an accumulator of type 'TYPE'. This is
531 a special case of a reduction computation.
532
533 Input:
534
535 * LAST_STMT: A stmt from which the pattern search begins. In the example,
536 when this function is called with S3, the pattern {S2,S3} will be detected.
537
538 Output:
539
540 * TYPE_IN: The type of the input arguments to the pattern.
541
542 * TYPE_OUT: The type of the output of this pattern.
543
544 * Return value: A new stmt that will be used to replace the sequence of
545 stmts that constitute the pattern. In this case it will be:
546 WIDEN_SUM <x_t, sum_0>
547
548 Note: The widening-sum idiom is a widening reduction pattern that is
549 vectorized without preserving all the intermediate results. It
550 produces only N/2 (widened) results (by summing up pairs of
551 intermediate results) rather than all N results. Therefore, we
552 cannot allow this pattern when we want to get all the results and in
553 the correct order (as is the case when this computation is in an
554 inner-loop nested in an outer-loop that us being vectorized). */
555
556 static tree
557 vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
558 {
559 tree stmt, expr;
560 tree oprnd0, oprnd1;
561 stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
562 tree type, half_type;
563 tree pattern_expr;
564 loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
565 struct loop *loop = LOOP_VINFO_LOOP (loop_info);
566
567 if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
568 return NULL;
569
570 expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
571 type = TREE_TYPE (expr);
572
573 /* Look for the following pattern
574 DX = (TYPE) X;
575 sum_1 = DX + sum_0;
576 In which DX is at least double the size of X, and sum_1 has been
577 recognized as a reduction variable.
578 */
579
580 /* Starting from LAST_STMT, follow the defs of its uses in search
581 of the above pattern. */
582
583 if (TREE_CODE (expr) != PLUS_EXPR)
584 return NULL;
585
586 if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
587 return NULL;
588
589 oprnd0 = TREE_OPERAND (expr, 0);
590 oprnd1 = TREE_OPERAND (expr, 1);
591 if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
592 || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
593 return NULL;
594
595 /* So far so good. Since last_stmt was detected as a (summation) reduction,
596 we know that oprnd1 is the reduction variable (defined by a loop-header
597 phi), and oprnd0 is an ssa-name defined by a stmt in the loop body.
598 Left to check that oprnd0 is defined by a cast from type 'type' to type
599 'TYPE'. */
600
601 if (!widened_name_p (oprnd0, last_stmt, &half_type, &stmt))
602 return NULL;
603
604 oprnd0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0);
605 *type_in = half_type;
606 *type_out = type;
607
608 /* Pattern detected. Create a stmt to be used to replace the pattern: */
609 pattern_expr = build2 (WIDEN_SUM_EXPR, type, oprnd0, oprnd1);
610 if (vect_print_dump_info (REPORT_DETAILS))
611 {
612 fprintf (vect_dump, "vect_recog_widen_sum_pattern: detected: ");
613 print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
614 }
615
616 /* We don't allow changing the order of the computation in the inner-loop
617 when doing outer-loop vectorization. */
618 if (nested_in_vect_loop_p (loop, last_stmt))
619 {
620 if (vect_print_dump_info (REPORT_DETAILS))
621 fprintf (vect_dump, "vect_recog_widen_sum_pattern: not allowed.");
622 return NULL;
623 }
624
625 return pattern_expr;
626 }
627
628
629 /* Function vect_pattern_recog_1
630
631 Input:
632 PATTERN_RECOG_FUNC: A pointer to a function that detects a certain
633 computation pattern.
634 STMT: A stmt from which the pattern search should start.
635
636 If PATTERN_RECOG_FUNC successfully detected the pattern, it creates an
637 expression that computes the same functionality and can be used to
638 replace the sequence of stmts that are involved in the pattern.
639
640 Output:
641 This function checks if the expression returned by PATTERN_RECOG_FUNC is
642 supported in vector form by the target. We use 'TYPE_IN' to obtain the
643 relevant vector type. If 'TYPE_IN' is already a vector type, then this
644 indicates that target support had already been checked by PATTERN_RECOG_FUNC.
645 If 'TYPE_OUT' is also returned by PATTERN_RECOG_FUNC, we check that it fits
646 to the available target pattern.
647
648 This function also does some bookkeeping, as explained in the documentation
649 for vect_recog_pattern. */
650
651 static void
652 vect_pattern_recog_1 (
653 tree (* vect_recog_func) (tree, tree *, tree *),
654 block_stmt_iterator si)
655 {
656 tree stmt = bsi_stmt (si);
657 stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
658 stmt_vec_info pattern_stmt_info;
659 loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
660 tree pattern_expr;
661 tree pattern_vectype;
662 tree type_in, type_out;
663 tree pattern_type;
664 enum tree_code code;
665 tree var, var_name;
666 stmt_ann_t ann;
667
668 pattern_expr = (* vect_recog_func) (stmt, &type_in, &type_out);
669 if (!pattern_expr)
670 return;
671
672 if (VECTOR_MODE_P (TYPE_MODE (type_in)))
673 {
674 /* No need to check target support (already checked by the pattern
675 recognition function). */
676 pattern_vectype = type_in;
677 }
678 else
679 {
680 enum tree_code vec_mode;
681 enum insn_code icode;
682 optab optab;
683
684 /* Check target support */
685 pattern_vectype = get_vectype_for_scalar_type (type_in);
686 if (!pattern_vectype)
687 return;
688
689 optab = optab_for_tree_code (TREE_CODE (pattern_expr), pattern_vectype);
690 vec_mode = TYPE_MODE (pattern_vectype);
691 if (!optab
692 || (icode = optab_handler (optab, vec_mode)->insn_code) ==
693 CODE_FOR_nothing
694 || (type_out
695 && (!get_vectype_for_scalar_type (type_out)
696 || (insn_data[icode].operand[0].mode !=
697 TYPE_MODE (get_vectype_for_scalar_type (type_out))))))
698 return;
699 }
700
701 /* Found a vectorizable pattern. */
702 if (vect_print_dump_info (REPORT_DETAILS))
703 {
704 fprintf (vect_dump, "pattern recognized: ");
705 print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
706 }
707
708 /* Mark the stmts that are involved in the pattern,
709 create a new stmt to express the pattern and insert it. */
710 code = TREE_CODE (pattern_expr);
711 pattern_type = TREE_TYPE (pattern_expr);
712 var = create_tmp_var (pattern_type, "patt");
713 add_referenced_var (var);
714 var_name = make_ssa_name (var, NULL_TREE);
715 pattern_expr = build_gimple_modify_stmt (var_name, pattern_expr);
716 SSA_NAME_DEF_STMT (var_name) = pattern_expr;
717 bsi_insert_before (&si, pattern_expr, BSI_SAME_STMT);
718 ann = stmt_ann (pattern_expr);
719 set_stmt_info (ann, new_stmt_vec_info (pattern_expr, loop_vinfo));
720 pattern_stmt_info = vinfo_for_stmt (pattern_expr);
721
722 STMT_VINFO_RELATED_STMT (pattern_stmt_info) = stmt;
723 STMT_VINFO_DEF_TYPE (pattern_stmt_info) = STMT_VINFO_DEF_TYPE (stmt_info);
724 STMT_VINFO_VECTYPE (pattern_stmt_info) = pattern_vectype;
725 STMT_VINFO_IN_PATTERN_P (stmt_info) = true;
726 STMT_VINFO_RELATED_STMT (stmt_info) = pattern_expr;
727
728 return;
729 }
730
731
732 /* Function vect_pattern_recog
733
734 Input:
735 LOOP_VINFO - a struct_loop_info of a loop in which we want to look for
736 computation idioms.
737
738 Output - for each computation idiom that is detected we insert a new stmt
739 that provides the same functionality and that can be vectorized. We
740 also record some information in the struct_stmt_info of the relevant
741 stmts, as explained below:
742
743 At the entry to this function we have the following stmts, with the
744 following initial value in the STMT_VINFO fields:
745
746 stmt in_pattern_p related_stmt vec_stmt
747 S1: a_i = .... - - -
748 S2: a_2 = ..use(a_i).. - - -
749 S3: a_1 = ..use(a_2).. - - -
750 S4: a_0 = ..use(a_1).. - - -
751 S5: ... = ..use(a_0).. - - -
752
753 Say the sequence {S1,S2,S3,S4} was detected as a pattern that can be
754 represented by a single stmt. We then:
755 - create a new stmt S6 that will replace the pattern.
756 - insert the new stmt S6 before the last stmt in the pattern
757 - fill in the STMT_VINFO fields as follows:
758
759 in_pattern_p related_stmt vec_stmt
760 S1: a_i = .... - - -
761 S2: a_2 = ..use(a_i).. - - -
762 S3: a_1 = ..use(a_2).. - - -
763 > S6: a_new = .... - S4 -
764 S4: a_0 = ..use(a_1).. true S6 -
765 S5: ... = ..use(a_0).. - - -
766
767 (the last stmt in the pattern (S4) and the new pattern stmt (S6) point
768 to each other through the RELATED_STMT field).
769
770 S6 will be marked as relevant in vect_mark_stmts_to_be_vectorized instead
771 of S4 because it will replace all its uses. Stmts {S1,S2,S3} will
772 remain irrelevant unless used by stmts other than S4.
773
774 If vectorization succeeds, vect_transform_stmt will skip over {S1,S2,S3}
775 (because they are marked as irrelevant). It will vectorize S6, and record
776 a pointer to the new vector stmt VS6 both from S6 (as usual), and also
777 from S4. We do that so that when we get to vectorizing stmts that use the
778 def of S4 (like S5 that uses a_0), we'll know where to take the relevant
779 vector-def from. S4 will be skipped, and S5 will be vectorized as usual:
780
781 in_pattern_p related_stmt vec_stmt
782 S1: a_i = .... - - -
783 S2: a_2 = ..use(a_i).. - - -
784 S3: a_1 = ..use(a_2).. - - -
785 > VS6: va_new = .... - - -
786 S6: a_new = .... - S4 VS6
787 S4: a_0 = ..use(a_1).. true S6 VS6
788 > VS5: ... = ..vuse(va_new).. - - -
789 S5: ... = ..use(a_0).. - - -
790
791 DCE could then get rid of {S1,S2,S3,S4,S5,S6} (if their defs are not used
792 elsewhere), and we'll end up with:
793
794 VS6: va_new = ....
795 VS5: ... = ..vuse(va_new)..
796
797 If vectorization does not succeed, DCE will clean S6 away (its def is
798 not used), and we'll end up with the original sequence.
799 */
800
801 void
802 vect_pattern_recog (loop_vec_info loop_vinfo)
803 {
804 struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
805 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
806 unsigned int nbbs = loop->num_nodes;
807 block_stmt_iterator si;
808 tree stmt;
809 unsigned int i, j;
810 tree (* vect_recog_func_ptr) (tree, tree *, tree *);
811
812 if (vect_print_dump_info (REPORT_DETAILS))
813 fprintf (vect_dump, "=== vect_pattern_recog ===");
814
815 /* Scan through the loop stmts, applying the pattern recognition
816 functions starting at each stmt visited: */
817 for (i = 0; i < nbbs; i++)
818 {
819 basic_block bb = bbs[i];
820 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
821 {
822 stmt = bsi_stmt (si);
823
824 /* Scan over all generic vect_recog_xxx_pattern functions. */
825 for (j = 0; j < NUM_PATTERNS; j++)
826 {
827 vect_recog_func_ptr = vect_vect_recog_func_ptrs[j];
828 vect_pattern_recog_1 (vect_recog_func_ptr, si);
829 }
830 }
831 }
832 }