re PR tree-optimization/66772 (ICE at -O2 and -O3 on x86_64-linux-gnu)
[gcc.git] / gcc / tree-ssa-ccp.c
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
65
66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "tm.h"
125 #include "alias.h"
126 #include "symtab.h"
127 #include "tree.h"
128 #include "fold-const.h"
129 #include "stor-layout.h"
130 #include "flags.h"
131 #include "tm_p.h"
132 #include "predict.h"
133 #include "hard-reg-set.h"
134 #include "function.h"
135 #include "dominance.h"
136 #include "cfg.h"
137 #include "basic-block.h"
138 #include "gimple-pretty-print.h"
139 #include "tree-ssa-alias.h"
140 #include "internal-fn.h"
141 #include "gimple-fold.h"
142 #include "tree-eh.h"
143 #include "gimple-expr.h"
144 #include "gimple.h"
145 #include "gimplify.h"
146 #include "gimple-iterator.h"
147 #include "gimple-ssa.h"
148 #include "tree-cfg.h"
149 #include "tree-phinodes.h"
150 #include "ssa-iterators.h"
151 #include "stringpool.h"
152 #include "tree-ssanames.h"
153 #include "tree-pass.h"
154 #include "tree-ssa-propagate.h"
155 #include "value-prof.h"
156 #include "langhooks.h"
157 #include "target.h"
158 #include "diagnostic-core.h"
159 #include "dbgcnt.h"
160 #include "params.h"
161 #include "wide-int-print.h"
162 #include "builtins.h"
163 #include "tree-chkp.h"
164
165
166 /* Possible lattice values. */
167 typedef enum
168 {
169 UNINITIALIZED,
170 UNDEFINED,
171 CONSTANT,
172 VARYING
173 } ccp_lattice_t;
174
175 struct ccp_prop_value_t {
176 /* Lattice value. */
177 ccp_lattice_t lattice_val;
178
179 /* Propagated value. */
180 tree value;
181
182 /* Mask that applies to the propagated value during CCP. For X
183 with a CONSTANT lattice value X & ~mask == value & ~mask. The
184 zero bits in the mask cover constant values. The ones mean no
185 information. */
186 widest_int mask;
187 };
188
189 /* Array of propagated constant values. After propagation,
190 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
191 the constant is held in an SSA name representing a memory store
192 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
193 memory reference used to store (i.e., the LHS of the assignment
194 doing the store). */
195 static ccp_prop_value_t *const_val;
196 static unsigned n_const_val;
197
198 static void canonicalize_value (ccp_prop_value_t *);
199 static bool ccp_fold_stmt (gimple_stmt_iterator *);
200 static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
201
202 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
203
204 static void
205 dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
206 {
207 switch (val.lattice_val)
208 {
209 case UNINITIALIZED:
210 fprintf (outf, "%sUNINITIALIZED", prefix);
211 break;
212 case UNDEFINED:
213 fprintf (outf, "%sUNDEFINED", prefix);
214 break;
215 case VARYING:
216 fprintf (outf, "%sVARYING", prefix);
217 break;
218 case CONSTANT:
219 if (TREE_CODE (val.value) != INTEGER_CST
220 || val.mask == 0)
221 {
222 fprintf (outf, "%sCONSTANT ", prefix);
223 print_generic_expr (outf, val.value, dump_flags);
224 }
225 else
226 {
227 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
228 val.mask);
229 fprintf (outf, "%sCONSTANT ", prefix);
230 print_hex (cval, outf);
231 fprintf (outf, " (");
232 print_hex (val.mask, outf);
233 fprintf (outf, ")");
234 }
235 break;
236 default:
237 gcc_unreachable ();
238 }
239 }
240
241
242 /* Print lattice value VAL to stderr. */
243
244 void debug_lattice_value (ccp_prop_value_t val);
245
246 DEBUG_FUNCTION void
247 debug_lattice_value (ccp_prop_value_t val)
248 {
249 dump_lattice_value (stderr, "", val);
250 fprintf (stderr, "\n");
251 }
252
253 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
254
255 static widest_int
256 extend_mask (const wide_int &nonzero_bits)
257 {
258 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
259 | widest_int::from (nonzero_bits, UNSIGNED));
260 }
261
262 /* Compute a default value for variable VAR and store it in the
263 CONST_VAL array. The following rules are used to get default
264 values:
265
266 1- Global and static variables that are declared constant are
267 considered CONSTANT.
268
269 2- Any other value is considered UNDEFINED. This is useful when
270 considering PHI nodes. PHI arguments that are undefined do not
271 change the constant value of the PHI node, which allows for more
272 constants to be propagated.
273
274 3- Variables defined by statements other than assignments and PHI
275 nodes are considered VARYING.
276
277 4- Initial values of variables that are not GIMPLE registers are
278 considered VARYING. */
279
280 static ccp_prop_value_t
281 get_default_value (tree var)
282 {
283 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
284 gimple stmt;
285
286 stmt = SSA_NAME_DEF_STMT (var);
287
288 if (gimple_nop_p (stmt))
289 {
290 /* Variables defined by an empty statement are those used
291 before being initialized. If VAR is a local variable, we
292 can assume initially that it is UNDEFINED, otherwise we must
293 consider it VARYING. */
294 if (!virtual_operand_p (var)
295 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
296 val.lattice_val = UNDEFINED;
297 else
298 {
299 val.lattice_val = VARYING;
300 val.mask = -1;
301 if (flag_tree_bit_ccp)
302 {
303 wide_int nonzero_bits = get_nonzero_bits (var);
304 if (nonzero_bits != -1)
305 {
306 val.lattice_val = CONSTANT;
307 val.value = build_zero_cst (TREE_TYPE (var));
308 val.mask = extend_mask (nonzero_bits);
309 }
310 }
311 }
312 }
313 else if (is_gimple_assign (stmt))
314 {
315 tree cst;
316 if (gimple_assign_single_p (stmt)
317 && DECL_P (gimple_assign_rhs1 (stmt))
318 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
319 {
320 val.lattice_val = CONSTANT;
321 val.value = cst;
322 }
323 else
324 {
325 /* Any other variable defined by an assignment is considered
326 UNDEFINED. */
327 val.lattice_val = UNDEFINED;
328 }
329 }
330 else if ((is_gimple_call (stmt)
331 && gimple_call_lhs (stmt) != NULL_TREE)
332 || gimple_code (stmt) == GIMPLE_PHI)
333 {
334 /* A variable defined by a call or a PHI node is considered
335 UNDEFINED. */
336 val.lattice_val = UNDEFINED;
337 }
338 else
339 {
340 /* Otherwise, VAR will never take on a constant value. */
341 val.lattice_val = VARYING;
342 val.mask = -1;
343 }
344
345 return val;
346 }
347
348
349 /* Get the constant value associated with variable VAR. */
350
351 static inline ccp_prop_value_t *
352 get_value (tree var)
353 {
354 ccp_prop_value_t *val;
355
356 if (const_val == NULL
357 || SSA_NAME_VERSION (var) >= n_const_val)
358 return NULL;
359
360 val = &const_val[SSA_NAME_VERSION (var)];
361 if (val->lattice_val == UNINITIALIZED)
362 *val = get_default_value (var);
363
364 canonicalize_value (val);
365
366 return val;
367 }
368
369 /* Return the constant tree value associated with VAR. */
370
371 static inline tree
372 get_constant_value (tree var)
373 {
374 ccp_prop_value_t *val;
375 if (TREE_CODE (var) != SSA_NAME)
376 {
377 if (is_gimple_min_invariant (var))
378 return var;
379 return NULL_TREE;
380 }
381 val = get_value (var);
382 if (val
383 && val->lattice_val == CONSTANT
384 && (TREE_CODE (val->value) != INTEGER_CST
385 || val->mask == 0))
386 return val->value;
387 return NULL_TREE;
388 }
389
390 /* Sets the value associated with VAR to VARYING. */
391
392 static inline void
393 set_value_varying (tree var)
394 {
395 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
396
397 val->lattice_val = VARYING;
398 val->value = NULL_TREE;
399 val->mask = -1;
400 }
401
402 /* For integer constants, make sure to drop TREE_OVERFLOW. */
403
404 static void
405 canonicalize_value (ccp_prop_value_t *val)
406 {
407 if (val->lattice_val != CONSTANT)
408 return;
409
410 if (TREE_OVERFLOW_P (val->value))
411 val->value = drop_tree_overflow (val->value);
412 }
413
414 /* Return whether the lattice transition is valid. */
415
416 static bool
417 valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
418 {
419 /* Lattice transitions must always be monotonically increasing in
420 value. */
421 if (old_val.lattice_val < new_val.lattice_val)
422 return true;
423
424 if (old_val.lattice_val != new_val.lattice_val)
425 return false;
426
427 if (!old_val.value && !new_val.value)
428 return true;
429
430 /* Now both lattice values are CONSTANT. */
431
432 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
433 when only a single copy edge is executable. */
434 if (TREE_CODE (old_val.value) == SSA_NAME
435 && TREE_CODE (new_val.value) == SSA_NAME)
436 return true;
437
438 /* Allow transitioning from a constant to a copy. */
439 if (is_gimple_min_invariant (old_val.value)
440 && TREE_CODE (new_val.value) == SSA_NAME)
441 return true;
442
443 /* Allow transitioning from PHI <&x, not executable> == &x
444 to PHI <&x, &y> == common alignment. */
445 if (TREE_CODE (old_val.value) != INTEGER_CST
446 && TREE_CODE (new_val.value) == INTEGER_CST)
447 return true;
448
449 /* Bit-lattices have to agree in the still valid bits. */
450 if (TREE_CODE (old_val.value) == INTEGER_CST
451 && TREE_CODE (new_val.value) == INTEGER_CST)
452 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
453 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
454
455 /* Otherwise constant values have to agree. */
456 if (operand_equal_p (old_val.value, new_val.value, 0))
457 return true;
458
459 /* At least the kinds and types should agree now. */
460 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
461 || !types_compatible_p (TREE_TYPE (old_val.value),
462 TREE_TYPE (new_val.value)))
463 return false;
464
465 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
466 to non-NaN. */
467 tree type = TREE_TYPE (new_val.value);
468 if (SCALAR_FLOAT_TYPE_P (type)
469 && !HONOR_NANS (type))
470 {
471 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
472 return true;
473 }
474 else if (VECTOR_FLOAT_TYPE_P (type)
475 && !HONOR_NANS (type))
476 {
477 for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
478 if (!REAL_VALUE_ISNAN
479 (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
480 && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
481 VECTOR_CST_ELT (new_val.value, i), 0))
482 return false;
483 return true;
484 }
485 else if (COMPLEX_FLOAT_TYPE_P (type)
486 && !HONOR_NANS (type))
487 {
488 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
489 && !operand_equal_p (TREE_REALPART (old_val.value),
490 TREE_REALPART (new_val.value), 0))
491 return false;
492 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
493 && !operand_equal_p (TREE_IMAGPART (old_val.value),
494 TREE_IMAGPART (new_val.value), 0))
495 return false;
496 return true;
497 }
498 return false;
499 }
500
501 /* Set the value for variable VAR to NEW_VAL. Return true if the new
502 value is different from VAR's previous value. */
503
504 static bool
505 set_lattice_value (tree var, ccp_prop_value_t *new_val)
506 {
507 /* We can deal with old UNINITIALIZED values just fine here. */
508 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
509
510 canonicalize_value (new_val);
511
512 /* We have to be careful to not go up the bitwise lattice
513 represented by the mask. Instead of dropping to VARYING
514 use the meet operator to retain a conservative value.
515 Missed optimizations like PR65851 makes this necessary.
516 It also ensures we converge to a stable lattice solution. */
517 if (new_val->lattice_val == CONSTANT
518 && old_val->lattice_val == CONSTANT
519 && TREE_CODE (new_val->value) != SSA_NAME)
520 ccp_lattice_meet (new_val, old_val);
521
522 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
523
524 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
525 caller that this was a non-transition. */
526 if (old_val->lattice_val != new_val->lattice_val
527 || (new_val->lattice_val == CONSTANT
528 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
529 || (TREE_CODE (new_val->value) == INTEGER_CST
530 && (new_val->mask != old_val->mask
531 || (wi::bit_and_not (wi::to_widest (old_val->value),
532 new_val->mask)
533 != wi::bit_and_not (wi::to_widest (new_val->value),
534 new_val->mask))))
535 || (TREE_CODE (new_val->value) != INTEGER_CST
536 && !operand_equal_p (new_val->value, old_val->value, 0)))))
537 {
538 /* ??? We would like to delay creation of INTEGER_CSTs from
539 partially constants here. */
540
541 if (dump_file && (dump_flags & TDF_DETAILS))
542 {
543 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
544 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
545 }
546
547 *old_val = *new_val;
548
549 gcc_assert (new_val->lattice_val != UNINITIALIZED);
550 return true;
551 }
552
553 return false;
554 }
555
556 static ccp_prop_value_t get_value_for_expr (tree, bool);
557 static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
558 static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
559 tree, const widest_int &, const widest_int &,
560 tree, const widest_int &, const widest_int &);
561
562 /* Return a widest_int that can be used for bitwise simplifications
563 from VAL. */
564
565 static widest_int
566 value_to_wide_int (ccp_prop_value_t val)
567 {
568 if (val.value
569 && TREE_CODE (val.value) == INTEGER_CST)
570 return wi::to_widest (val.value);
571
572 return 0;
573 }
574
575 /* Return the value for the address expression EXPR based on alignment
576 information. */
577
578 static ccp_prop_value_t
579 get_value_from_alignment (tree expr)
580 {
581 tree type = TREE_TYPE (expr);
582 ccp_prop_value_t val;
583 unsigned HOST_WIDE_INT bitpos;
584 unsigned int align;
585
586 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
587
588 get_pointer_alignment_1 (expr, &align, &bitpos);
589 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
590 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
591 : -1).and_not (align / BITS_PER_UNIT - 1);
592 val.lattice_val
593 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
594 if (val.lattice_val == CONSTANT)
595 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
596 else
597 val.value = NULL_TREE;
598
599 return val;
600 }
601
602 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
603 return constant bits extracted from alignment information for
604 invariant addresses. */
605
606 static ccp_prop_value_t
607 get_value_for_expr (tree expr, bool for_bits_p)
608 {
609 ccp_prop_value_t val;
610
611 if (TREE_CODE (expr) == SSA_NAME)
612 {
613 val = *get_value (expr);
614 if (for_bits_p
615 && val.lattice_val == CONSTANT
616 && TREE_CODE (val.value) == ADDR_EXPR)
617 val = get_value_from_alignment (val.value);
618 /* Fall back to a copy value. */
619 if (!for_bits_p
620 && val.lattice_val == VARYING
621 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
622 {
623 val.lattice_val = CONSTANT;
624 val.value = expr;
625 val.mask = -1;
626 }
627 }
628 else if (is_gimple_min_invariant (expr)
629 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
630 {
631 val.lattice_val = CONSTANT;
632 val.value = expr;
633 val.mask = 0;
634 canonicalize_value (&val);
635 }
636 else if (TREE_CODE (expr) == ADDR_EXPR)
637 val = get_value_from_alignment (expr);
638 else
639 {
640 val.lattice_val = VARYING;
641 val.mask = -1;
642 val.value = NULL_TREE;
643 }
644 return val;
645 }
646
647 /* Return the likely CCP lattice value for STMT.
648
649 If STMT has no operands, then return CONSTANT.
650
651 Else if undefinedness of operands of STMT cause its value to be
652 undefined, then return UNDEFINED.
653
654 Else if any operands of STMT are constants, then return CONSTANT.
655
656 Else return VARYING. */
657
658 static ccp_lattice_t
659 likely_value (gimple stmt)
660 {
661 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
662 bool has_nsa_operand;
663 tree use;
664 ssa_op_iter iter;
665 unsigned i;
666
667 enum gimple_code code = gimple_code (stmt);
668
669 /* This function appears to be called only for assignments, calls,
670 conditionals, and switches, due to the logic in visit_stmt. */
671 gcc_assert (code == GIMPLE_ASSIGN
672 || code == GIMPLE_CALL
673 || code == GIMPLE_COND
674 || code == GIMPLE_SWITCH);
675
676 /* If the statement has volatile operands, it won't fold to a
677 constant value. */
678 if (gimple_has_volatile_ops (stmt))
679 return VARYING;
680
681 /* Arrive here for more complex cases. */
682 has_constant_operand = false;
683 has_undefined_operand = false;
684 all_undefined_operands = true;
685 has_nsa_operand = false;
686 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
687 {
688 ccp_prop_value_t *val = get_value (use);
689
690 if (val->lattice_val == UNDEFINED)
691 has_undefined_operand = true;
692 else
693 all_undefined_operands = false;
694
695 if (val->lattice_val == CONSTANT)
696 has_constant_operand = true;
697
698 if (SSA_NAME_IS_DEFAULT_DEF (use)
699 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
700 has_nsa_operand = true;
701 }
702
703 /* There may be constants in regular rhs operands. For calls we
704 have to ignore lhs, fndecl and static chain, otherwise only
705 the lhs. */
706 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
707 i < gimple_num_ops (stmt); ++i)
708 {
709 tree op = gimple_op (stmt, i);
710 if (!op || TREE_CODE (op) == SSA_NAME)
711 continue;
712 if (is_gimple_min_invariant (op))
713 has_constant_operand = true;
714 }
715
716 if (has_constant_operand)
717 all_undefined_operands = false;
718
719 if (has_undefined_operand
720 && code == GIMPLE_CALL
721 && gimple_call_internal_p (stmt))
722 switch (gimple_call_internal_fn (stmt))
723 {
724 /* These 3 builtins use the first argument just as a magic
725 way how to find out a decl uid. */
726 case IFN_GOMP_SIMD_LANE:
727 case IFN_GOMP_SIMD_VF:
728 case IFN_GOMP_SIMD_LAST_LANE:
729 has_undefined_operand = false;
730 break;
731 default:
732 break;
733 }
734
735 /* If the operation combines operands like COMPLEX_EXPR make sure to
736 not mark the result UNDEFINED if only one part of the result is
737 undefined. */
738 if (has_undefined_operand && all_undefined_operands)
739 return UNDEFINED;
740 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
741 {
742 switch (gimple_assign_rhs_code (stmt))
743 {
744 /* Unary operators are handled with all_undefined_operands. */
745 case PLUS_EXPR:
746 case MINUS_EXPR:
747 case POINTER_PLUS_EXPR:
748 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
749 Not bitwise operators, one VARYING operand may specify the
750 result completely. Not logical operators for the same reason.
751 Not COMPLEX_EXPR as one VARYING operand makes the result partly
752 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
753 the undefined operand may be promoted. */
754 return UNDEFINED;
755
756 case ADDR_EXPR:
757 /* If any part of an address is UNDEFINED, like the index
758 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
759 return UNDEFINED;
760
761 default:
762 ;
763 }
764 }
765 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
766 fall back to CONSTANT. During iteration UNDEFINED may still drop
767 to CONSTANT. */
768 if (has_undefined_operand)
769 return CONSTANT;
770
771 /* We do not consider virtual operands here -- load from read-only
772 memory may have only VARYING virtual operands, but still be
773 constant. Also we can combine the stmt with definitions from
774 operands whose definitions are not simulated again. */
775 if (has_constant_operand
776 || has_nsa_operand
777 || gimple_references_memory_p (stmt))
778 return CONSTANT;
779
780 return VARYING;
781 }
782
783 /* Returns true if STMT cannot be constant. */
784
785 static bool
786 surely_varying_stmt_p (gimple stmt)
787 {
788 /* If the statement has operands that we cannot handle, it cannot be
789 constant. */
790 if (gimple_has_volatile_ops (stmt))
791 return true;
792
793 /* If it is a call and does not return a value or is not a
794 builtin and not an indirect call or a call to function with
795 assume_aligned/alloc_align attribute, it is varying. */
796 if (is_gimple_call (stmt))
797 {
798 tree fndecl, fntype = gimple_call_fntype (stmt);
799 if (!gimple_call_lhs (stmt)
800 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
801 && !DECL_BUILT_IN (fndecl)
802 && !lookup_attribute ("assume_aligned",
803 TYPE_ATTRIBUTES (fntype))
804 && !lookup_attribute ("alloc_align",
805 TYPE_ATTRIBUTES (fntype))))
806 return true;
807 }
808
809 /* Any other store operation is not interesting. */
810 else if (gimple_vdef (stmt))
811 return true;
812
813 /* Anything other than assignments and conditional jumps are not
814 interesting for CCP. */
815 if (gimple_code (stmt) != GIMPLE_ASSIGN
816 && gimple_code (stmt) != GIMPLE_COND
817 && gimple_code (stmt) != GIMPLE_SWITCH
818 && gimple_code (stmt) != GIMPLE_CALL)
819 return true;
820
821 return false;
822 }
823
824 /* Initialize local data structures for CCP. */
825
826 static void
827 ccp_initialize (void)
828 {
829 basic_block bb;
830
831 n_const_val = num_ssa_names;
832 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
833
834 /* Initialize simulation flags for PHI nodes and statements. */
835 FOR_EACH_BB_FN (bb, cfun)
836 {
837 gimple_stmt_iterator i;
838
839 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
840 {
841 gimple stmt = gsi_stmt (i);
842 bool is_varying;
843
844 /* If the statement is a control insn, then we do not
845 want to avoid simulating the statement once. Failure
846 to do so means that those edges will never get added. */
847 if (stmt_ends_bb_p (stmt))
848 is_varying = false;
849 else
850 is_varying = surely_varying_stmt_p (stmt);
851
852 if (is_varying)
853 {
854 tree def;
855 ssa_op_iter iter;
856
857 /* If the statement will not produce a constant, mark
858 all its outputs VARYING. */
859 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
860 set_value_varying (def);
861 }
862 prop_set_simulate_again (stmt, !is_varying);
863 }
864 }
865
866 /* Now process PHI nodes. We never clear the simulate_again flag on
867 phi nodes, since we do not know which edges are executable yet,
868 except for phi nodes for virtual operands when we do not do store ccp. */
869 FOR_EACH_BB_FN (bb, cfun)
870 {
871 gphi_iterator i;
872
873 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
874 {
875 gphi *phi = i.phi ();
876
877 if (virtual_operand_p (gimple_phi_result (phi)))
878 prop_set_simulate_again (phi, false);
879 else
880 prop_set_simulate_again (phi, true);
881 }
882 }
883 }
884
885 /* Debug count support. Reset the values of ssa names
886 VARYING when the total number ssa names analyzed is
887 beyond the debug count specified. */
888
889 static void
890 do_dbg_cnt (void)
891 {
892 unsigned i;
893 for (i = 0; i < num_ssa_names; i++)
894 {
895 if (!dbg_cnt (ccp))
896 {
897 const_val[i].lattice_val = VARYING;
898 const_val[i].mask = -1;
899 const_val[i].value = NULL_TREE;
900 }
901 }
902 }
903
904
905 /* Do final substitution of propagated values, cleanup the flowgraph and
906 free allocated storage.
907
908 Return TRUE when something was optimized. */
909
910 static bool
911 ccp_finalize (void)
912 {
913 bool something_changed;
914 unsigned i;
915
916 do_dbg_cnt ();
917
918 /* Derive alignment and misalignment information from partially
919 constant pointers in the lattice or nonzero bits from partially
920 constant integers. */
921 for (i = 1; i < num_ssa_names; ++i)
922 {
923 tree name = ssa_name (i);
924 ccp_prop_value_t *val;
925 unsigned int tem, align;
926
927 if (!name
928 || (!POINTER_TYPE_P (TREE_TYPE (name))
929 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
930 /* Don't record nonzero bits before IPA to avoid
931 using too much memory. */
932 || first_pass_instance)))
933 continue;
934
935 val = get_value (name);
936 if (val->lattice_val != CONSTANT
937 || TREE_CODE (val->value) != INTEGER_CST)
938 continue;
939
940 if (POINTER_TYPE_P (TREE_TYPE (name)))
941 {
942 /* Trailing mask bits specify the alignment, trailing value
943 bits the misalignment. */
944 tem = val->mask.to_uhwi ();
945 align = (tem & -tem);
946 if (align > 1)
947 set_ptr_info_alignment (get_ptr_info (name), align,
948 (TREE_INT_CST_LOW (val->value)
949 & (align - 1)));
950 }
951 else
952 {
953 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
954 wide_int nonzero_bits = wide_int::from (val->mask, precision,
955 UNSIGNED) | val->value;
956 nonzero_bits &= get_nonzero_bits (name);
957 set_nonzero_bits (name, nonzero_bits);
958 }
959 }
960
961 /* Perform substitutions based on the known constant values. */
962 something_changed = substitute_and_fold (get_constant_value,
963 ccp_fold_stmt, true);
964
965 free (const_val);
966 const_val = NULL;
967 return something_changed;;
968 }
969
970
971 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
972 in VAL1.
973
974 any M UNDEFINED = any
975 any M VARYING = VARYING
976 Ci M Cj = Ci if (i == j)
977 Ci M Cj = VARYING if (i != j)
978 */
979
980 static void
981 ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
982 {
983 if (val1->lattice_val == UNDEFINED
984 /* For UNDEFINED M SSA we can't always SSA because its definition
985 may not dominate the PHI node. Doing optimistic copy propagation
986 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
987 && (val2->lattice_val != CONSTANT
988 || TREE_CODE (val2->value) != SSA_NAME))
989 {
990 /* UNDEFINED M any = any */
991 *val1 = *val2;
992 }
993 else if (val2->lattice_val == UNDEFINED
994 /* See above. */
995 && (val1->lattice_val != CONSTANT
996 || TREE_CODE (val1->value) != SSA_NAME))
997 {
998 /* any M UNDEFINED = any
999 Nothing to do. VAL1 already contains the value we want. */
1000 ;
1001 }
1002 else if (val1->lattice_val == VARYING
1003 || val2->lattice_val == VARYING)
1004 {
1005 /* any M VARYING = VARYING. */
1006 val1->lattice_val = VARYING;
1007 val1->mask = -1;
1008 val1->value = NULL_TREE;
1009 }
1010 else if (val1->lattice_val == CONSTANT
1011 && val2->lattice_val == CONSTANT
1012 && TREE_CODE (val1->value) == INTEGER_CST
1013 && TREE_CODE (val2->value) == INTEGER_CST)
1014 {
1015 /* Ci M Cj = Ci if (i == j)
1016 Ci M Cj = VARYING if (i != j)
1017
1018 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1019 drop to varying. */
1020 val1->mask = (val1->mask | val2->mask
1021 | (wi::to_widest (val1->value)
1022 ^ wi::to_widest (val2->value)));
1023 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
1024 {
1025 val1->lattice_val = VARYING;
1026 val1->value = NULL_TREE;
1027 }
1028 }
1029 else if (val1->lattice_val == CONSTANT
1030 && val2->lattice_val == CONSTANT
1031 && operand_equal_p (val1->value, val2->value, 0))
1032 {
1033 /* Ci M Cj = Ci if (i == j)
1034 Ci M Cj = VARYING if (i != j)
1035
1036 VAL1 already contains the value we want for equivalent values. */
1037 }
1038 else if (val1->lattice_val == CONSTANT
1039 && val2->lattice_val == CONSTANT
1040 && (TREE_CODE (val1->value) == ADDR_EXPR
1041 || TREE_CODE (val2->value) == ADDR_EXPR))
1042 {
1043 /* When not equal addresses are involved try meeting for
1044 alignment. */
1045 ccp_prop_value_t tem = *val2;
1046 if (TREE_CODE (val1->value) == ADDR_EXPR)
1047 *val1 = get_value_for_expr (val1->value, true);
1048 if (TREE_CODE (val2->value) == ADDR_EXPR)
1049 tem = get_value_for_expr (val2->value, true);
1050 ccp_lattice_meet (val1, &tem);
1051 }
1052 else
1053 {
1054 /* Any other combination is VARYING. */
1055 val1->lattice_val = VARYING;
1056 val1->mask = -1;
1057 val1->value = NULL_TREE;
1058 }
1059 }
1060
1061
1062 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1063 lattice values to determine PHI_NODE's lattice value. The value of a
1064 PHI node is determined calling ccp_lattice_meet with all the arguments
1065 of the PHI node that are incoming via executable edges. */
1066
1067 static enum ssa_prop_result
1068 ccp_visit_phi_node (gphi *phi)
1069 {
1070 unsigned i;
1071 ccp_prop_value_t new_val;
1072
1073 if (dump_file && (dump_flags & TDF_DETAILS))
1074 {
1075 fprintf (dump_file, "\nVisiting PHI node: ");
1076 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1077 }
1078
1079 new_val.lattice_val = UNDEFINED;
1080 new_val.value = NULL_TREE;
1081 new_val.mask = 0;
1082
1083 bool first = true;
1084 bool non_exec_edge = false;
1085 for (i = 0; i < gimple_phi_num_args (phi); i++)
1086 {
1087 /* Compute the meet operator over all the PHI arguments flowing
1088 through executable edges. */
1089 edge e = gimple_phi_arg_edge (phi, i);
1090
1091 if (dump_file && (dump_flags & TDF_DETAILS))
1092 {
1093 fprintf (dump_file,
1094 "\n Argument #%d (%d -> %d %sexecutable)\n",
1095 i, e->src->index, e->dest->index,
1096 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1097 }
1098
1099 /* If the incoming edge is executable, Compute the meet operator for
1100 the existing value of the PHI node and the current PHI argument. */
1101 if (e->flags & EDGE_EXECUTABLE)
1102 {
1103 tree arg = gimple_phi_arg (phi, i)->def;
1104 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1105
1106 if (first)
1107 {
1108 new_val = arg_val;
1109 first = false;
1110 }
1111 else
1112 ccp_lattice_meet (&new_val, &arg_val);
1113
1114 if (dump_file && (dump_flags & TDF_DETAILS))
1115 {
1116 fprintf (dump_file, "\t");
1117 print_generic_expr (dump_file, arg, dump_flags);
1118 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1119 fprintf (dump_file, "\n");
1120 }
1121
1122 if (new_val.lattice_val == VARYING)
1123 break;
1124 }
1125 else
1126 non_exec_edge = true;
1127 }
1128
1129 /* In case there were non-executable edges and the value is a copy
1130 make sure its definition dominates the PHI node. */
1131 if (non_exec_edge
1132 && new_val.lattice_val == CONSTANT
1133 && TREE_CODE (new_val.value) == SSA_NAME
1134 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1135 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1136 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1137 {
1138 new_val.lattice_val = VARYING;
1139 new_val.value = NULL_TREE;
1140 new_val.mask = -1;
1141 }
1142
1143 if (dump_file && (dump_flags & TDF_DETAILS))
1144 {
1145 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1146 fprintf (dump_file, "\n\n");
1147 }
1148
1149 /* Make the transition to the new value. */
1150 if (set_lattice_value (gimple_phi_result (phi), &new_val))
1151 {
1152 if (new_val.lattice_val == VARYING)
1153 return SSA_PROP_VARYING;
1154 else
1155 return SSA_PROP_INTERESTING;
1156 }
1157 else
1158 return SSA_PROP_NOT_INTERESTING;
1159 }
1160
1161 /* Return the constant value for OP or OP otherwise. */
1162
1163 static tree
1164 valueize_op (tree op)
1165 {
1166 if (TREE_CODE (op) == SSA_NAME)
1167 {
1168 tree tem = get_constant_value (op);
1169 if (tem)
1170 return tem;
1171 }
1172 return op;
1173 }
1174
1175 /* Return the constant value for OP, but signal to not follow SSA
1176 edges if the definition may be simulated again. */
1177
1178 static tree
1179 valueize_op_1 (tree op)
1180 {
1181 if (TREE_CODE (op) == SSA_NAME)
1182 {
1183 /* If the definition may be simulated again we cannot follow
1184 this SSA edge as the SSA propagator does not necessarily
1185 re-visit the use. */
1186 gimple def_stmt = SSA_NAME_DEF_STMT (op);
1187 if (!gimple_nop_p (def_stmt)
1188 && prop_simulate_again_p (def_stmt))
1189 return NULL_TREE;
1190 tree tem = get_constant_value (op);
1191 if (tem)
1192 return tem;
1193 }
1194 return op;
1195 }
1196
1197 /* CCP specific front-end to the non-destructive constant folding
1198 routines.
1199
1200 Attempt to simplify the RHS of STMT knowing that one or more
1201 operands are constants.
1202
1203 If simplification is possible, return the simplified RHS,
1204 otherwise return the original RHS or NULL_TREE. */
1205
1206 static tree
1207 ccp_fold (gimple stmt)
1208 {
1209 location_t loc = gimple_location (stmt);
1210 switch (gimple_code (stmt))
1211 {
1212 case GIMPLE_COND:
1213 {
1214 /* Handle comparison operators that can appear in GIMPLE form. */
1215 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1216 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1217 enum tree_code code = gimple_cond_code (stmt);
1218 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1219 }
1220
1221 case GIMPLE_SWITCH:
1222 {
1223 /* Return the constant switch index. */
1224 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1225 }
1226
1227 case GIMPLE_ASSIGN:
1228 case GIMPLE_CALL:
1229 return gimple_fold_stmt_to_constant_1 (stmt,
1230 valueize_op, valueize_op_1);
1231
1232 default:
1233 gcc_unreachable ();
1234 }
1235 }
1236
1237 /* Apply the operation CODE in type TYPE to the value, mask pair
1238 RVAL and RMASK representing a value of type RTYPE and set
1239 the value, mask pair *VAL and *MASK to the result. */
1240
1241 static void
1242 bit_value_unop_1 (enum tree_code code, tree type,
1243 widest_int *val, widest_int *mask,
1244 tree rtype, const widest_int &rval, const widest_int &rmask)
1245 {
1246 switch (code)
1247 {
1248 case BIT_NOT_EXPR:
1249 *mask = rmask;
1250 *val = ~rval;
1251 break;
1252
1253 case NEGATE_EXPR:
1254 {
1255 widest_int temv, temm;
1256 /* Return ~rval + 1. */
1257 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1258 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1259 type, temv, temm, type, 1, 0);
1260 break;
1261 }
1262
1263 CASE_CONVERT:
1264 {
1265 signop sgn;
1266
1267 /* First extend mask and value according to the original type. */
1268 sgn = TYPE_SIGN (rtype);
1269 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1270 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
1271
1272 /* Then extend mask and value according to the target type. */
1273 sgn = TYPE_SIGN (type);
1274 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1275 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
1276 break;
1277 }
1278
1279 default:
1280 *mask = -1;
1281 break;
1282 }
1283 }
1284
1285 /* Apply the operation CODE in type TYPE to the value, mask pairs
1286 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1287 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1288
1289 static void
1290 bit_value_binop_1 (enum tree_code code, tree type,
1291 widest_int *val, widest_int *mask,
1292 tree r1type, const widest_int &r1val,
1293 const widest_int &r1mask, tree r2type,
1294 const widest_int &r2val, const widest_int &r2mask)
1295 {
1296 signop sgn = TYPE_SIGN (type);
1297 int width = TYPE_PRECISION (type);
1298 bool swap_p = false;
1299
1300 /* Assume we'll get a constant result. Use an initial non varying
1301 value, we fall back to varying in the end if necessary. */
1302 *mask = -1;
1303
1304 switch (code)
1305 {
1306 case BIT_AND_EXPR:
1307 /* The mask is constant where there is a known not
1308 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1309 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1310 *val = r1val & r2val;
1311 break;
1312
1313 case BIT_IOR_EXPR:
1314 /* The mask is constant where there is a known
1315 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1316 *mask = (r1mask | r2mask)
1317 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1318 *val = r1val | r2val;
1319 break;
1320
1321 case BIT_XOR_EXPR:
1322 /* m1 | m2 */
1323 *mask = r1mask | r2mask;
1324 *val = r1val ^ r2val;
1325 break;
1326
1327 case LROTATE_EXPR:
1328 case RROTATE_EXPR:
1329 if (r2mask == 0)
1330 {
1331 widest_int shift = r2val;
1332 if (shift == 0)
1333 {
1334 *mask = r1mask;
1335 *val = r1val;
1336 }
1337 else
1338 {
1339 if (wi::neg_p (shift))
1340 {
1341 shift = -shift;
1342 if (code == RROTATE_EXPR)
1343 code = LROTATE_EXPR;
1344 else
1345 code = RROTATE_EXPR;
1346 }
1347 if (code == RROTATE_EXPR)
1348 {
1349 *mask = wi::rrotate (r1mask, shift, width);
1350 *val = wi::rrotate (r1val, shift, width);
1351 }
1352 else
1353 {
1354 *mask = wi::lrotate (r1mask, shift, width);
1355 *val = wi::lrotate (r1val, shift, width);
1356 }
1357 }
1358 }
1359 break;
1360
1361 case LSHIFT_EXPR:
1362 case RSHIFT_EXPR:
1363 /* ??? We can handle partially known shift counts if we know
1364 its sign. That way we can tell that (x << (y | 8)) & 255
1365 is zero. */
1366 if (r2mask == 0)
1367 {
1368 widest_int shift = r2val;
1369 if (shift == 0)
1370 {
1371 *mask = r1mask;
1372 *val = r1val;
1373 }
1374 else
1375 {
1376 if (wi::neg_p (shift))
1377 {
1378 shift = -shift;
1379 if (code == RSHIFT_EXPR)
1380 code = LSHIFT_EXPR;
1381 else
1382 code = RSHIFT_EXPR;
1383 }
1384 if (code == RSHIFT_EXPR)
1385 {
1386 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1387 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1388 }
1389 else
1390 {
1391 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1392 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
1393 }
1394 }
1395 }
1396 break;
1397
1398 case PLUS_EXPR:
1399 case POINTER_PLUS_EXPR:
1400 {
1401 /* Do the addition with unknown bits set to zero, to give carry-ins of
1402 zero wherever possible. */
1403 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1404 lo = wi::ext (lo, width, sgn);
1405 /* Do the addition with unknown bits set to one, to give carry-ins of
1406 one wherever possible. */
1407 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1408 hi = wi::ext (hi, width, sgn);
1409 /* Each bit in the result is known if (a) the corresponding bits in
1410 both inputs are known, and (b) the carry-in to that bit position
1411 is known. We can check condition (b) by seeing if we got the same
1412 result with minimised carries as with maximised carries. */
1413 *mask = r1mask | r2mask | (lo ^ hi);
1414 *mask = wi::ext (*mask, width, sgn);
1415 /* It shouldn't matter whether we choose lo or hi here. */
1416 *val = lo;
1417 break;
1418 }
1419
1420 case MINUS_EXPR:
1421 {
1422 widest_int temv, temm;
1423 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1424 r2type, r2val, r2mask);
1425 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1426 r1type, r1val, r1mask,
1427 r2type, temv, temm);
1428 break;
1429 }
1430
1431 case MULT_EXPR:
1432 {
1433 /* Just track trailing zeros in both operands and transfer
1434 them to the other. */
1435 int r1tz = wi::ctz (r1val | r1mask);
1436 int r2tz = wi::ctz (r2val | r2mask);
1437 if (r1tz + r2tz >= width)
1438 {
1439 *mask = 0;
1440 *val = 0;
1441 }
1442 else if (r1tz + r2tz > 0)
1443 {
1444 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1445 width, sgn);
1446 *val = 0;
1447 }
1448 break;
1449 }
1450
1451 case EQ_EXPR:
1452 case NE_EXPR:
1453 {
1454 widest_int m = r1mask | r2mask;
1455 if (r1val.and_not (m) != r2val.and_not (m))
1456 {
1457 *mask = 0;
1458 *val = ((code == EQ_EXPR) ? 0 : 1);
1459 }
1460 else
1461 {
1462 /* We know the result of a comparison is always one or zero. */
1463 *mask = 1;
1464 *val = 0;
1465 }
1466 break;
1467 }
1468
1469 case GE_EXPR:
1470 case GT_EXPR:
1471 swap_p = true;
1472 code = swap_tree_comparison (code);
1473 /* Fall through. */
1474 case LT_EXPR:
1475 case LE_EXPR:
1476 {
1477 int minmax, maxmin;
1478
1479 const widest_int &o1val = swap_p ? r2val : r1val;
1480 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1481 const widest_int &o2val = swap_p ? r1val : r2val;
1482 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1483
1484 /* If the most significant bits are not known we know nothing. */
1485 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1486 break;
1487
1488 /* For comparisons the signedness is in the comparison operands. */
1489 sgn = TYPE_SIGN (r1type);
1490
1491 /* If we know the most significant bits we know the values
1492 value ranges by means of treating varying bits as zero
1493 or one. Do a cross comparison of the max/min pairs. */
1494 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1495 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
1496 if (maxmin < 0) /* o1 is less than o2. */
1497 {
1498 *mask = 0;
1499 *val = 1;
1500 }
1501 else if (minmax > 0) /* o1 is not less or equal to o2. */
1502 {
1503 *mask = 0;
1504 *val = 0;
1505 }
1506 else if (maxmin == minmax) /* o1 and o2 are equal. */
1507 {
1508 /* This probably should never happen as we'd have
1509 folded the thing during fully constant value folding. */
1510 *mask = 0;
1511 *val = (code == LE_EXPR ? 1 : 0);
1512 }
1513 else
1514 {
1515 /* We know the result of a comparison is always one or zero. */
1516 *mask = 1;
1517 *val = 0;
1518 }
1519 break;
1520 }
1521
1522 default:;
1523 }
1524 }
1525
1526 /* Return the propagation value when applying the operation CODE to
1527 the value RHS yielding type TYPE. */
1528
1529 static ccp_prop_value_t
1530 bit_value_unop (enum tree_code code, tree type, tree rhs)
1531 {
1532 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1533 widest_int value, mask;
1534 ccp_prop_value_t val;
1535
1536 if (rval.lattice_val == UNDEFINED)
1537 return rval;
1538
1539 gcc_assert ((rval.lattice_val == CONSTANT
1540 && TREE_CODE (rval.value) == INTEGER_CST)
1541 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
1542 bit_value_unop_1 (code, type, &value, &mask,
1543 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1544 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1545 {
1546 val.lattice_val = CONSTANT;
1547 val.mask = mask;
1548 /* ??? Delay building trees here. */
1549 val.value = wide_int_to_tree (type, value);
1550 }
1551 else
1552 {
1553 val.lattice_val = VARYING;
1554 val.value = NULL_TREE;
1555 val.mask = -1;
1556 }
1557 return val;
1558 }
1559
1560 /* Return the propagation value when applying the operation CODE to
1561 the values RHS1 and RHS2 yielding type TYPE. */
1562
1563 static ccp_prop_value_t
1564 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1565 {
1566 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1567 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1568 widest_int value, mask;
1569 ccp_prop_value_t val;
1570
1571 if (r1val.lattice_val == UNDEFINED
1572 || r2val.lattice_val == UNDEFINED)
1573 {
1574 val.lattice_val = VARYING;
1575 val.value = NULL_TREE;
1576 val.mask = -1;
1577 return val;
1578 }
1579
1580 gcc_assert ((r1val.lattice_val == CONSTANT
1581 && TREE_CODE (r1val.value) == INTEGER_CST)
1582 || wi::sext (r1val.mask,
1583 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
1584 gcc_assert ((r2val.lattice_val == CONSTANT
1585 && TREE_CODE (r2val.value) == INTEGER_CST)
1586 || wi::sext (r2val.mask,
1587 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
1588 bit_value_binop_1 (code, type, &value, &mask,
1589 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1590 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1591 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1592 {
1593 val.lattice_val = CONSTANT;
1594 val.mask = mask;
1595 /* ??? Delay building trees here. */
1596 val.value = wide_int_to_tree (type, value);
1597 }
1598 else
1599 {
1600 val.lattice_val = VARYING;
1601 val.value = NULL_TREE;
1602 val.mask = -1;
1603 }
1604 return val;
1605 }
1606
1607 /* Return the propagation value for __builtin_assume_aligned
1608 and functions with assume_aligned or alloc_aligned attribute.
1609 For __builtin_assume_aligned, ATTR is NULL_TREE,
1610 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1611 is false, for alloc_aligned attribute ATTR is non-NULL and
1612 ALLOC_ALIGNED is true. */
1613
1614 static ccp_prop_value_t
1615 bit_value_assume_aligned (gimple stmt, tree attr, ccp_prop_value_t ptrval,
1616 bool alloc_aligned)
1617 {
1618 tree align, misalign = NULL_TREE, type;
1619 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1620 ccp_prop_value_t alignval;
1621 widest_int value, mask;
1622 ccp_prop_value_t val;
1623
1624 if (attr == NULL_TREE)
1625 {
1626 tree ptr = gimple_call_arg (stmt, 0);
1627 type = TREE_TYPE (ptr);
1628 ptrval = get_value_for_expr (ptr, true);
1629 }
1630 else
1631 {
1632 tree lhs = gimple_call_lhs (stmt);
1633 type = TREE_TYPE (lhs);
1634 }
1635
1636 if (ptrval.lattice_val == UNDEFINED)
1637 return ptrval;
1638 gcc_assert ((ptrval.lattice_val == CONSTANT
1639 && TREE_CODE (ptrval.value) == INTEGER_CST)
1640 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
1641 if (attr == NULL_TREE)
1642 {
1643 /* Get aligni and misaligni from __builtin_assume_aligned. */
1644 align = gimple_call_arg (stmt, 1);
1645 if (!tree_fits_uhwi_p (align))
1646 return ptrval;
1647 aligni = tree_to_uhwi (align);
1648 if (gimple_call_num_args (stmt) > 2)
1649 {
1650 misalign = gimple_call_arg (stmt, 2);
1651 if (!tree_fits_uhwi_p (misalign))
1652 return ptrval;
1653 misaligni = tree_to_uhwi (misalign);
1654 }
1655 }
1656 else
1657 {
1658 /* Get aligni and misaligni from assume_aligned or
1659 alloc_align attributes. */
1660 if (TREE_VALUE (attr) == NULL_TREE)
1661 return ptrval;
1662 attr = TREE_VALUE (attr);
1663 align = TREE_VALUE (attr);
1664 if (!tree_fits_uhwi_p (align))
1665 return ptrval;
1666 aligni = tree_to_uhwi (align);
1667 if (alloc_aligned)
1668 {
1669 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1670 return ptrval;
1671 align = gimple_call_arg (stmt, aligni - 1);
1672 if (!tree_fits_uhwi_p (align))
1673 return ptrval;
1674 aligni = tree_to_uhwi (align);
1675 }
1676 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1677 {
1678 misalign = TREE_VALUE (TREE_CHAIN (attr));
1679 if (!tree_fits_uhwi_p (misalign))
1680 return ptrval;
1681 misaligni = tree_to_uhwi (misalign);
1682 }
1683 }
1684 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1685 return ptrval;
1686
1687 align = build_int_cst_type (type, -aligni);
1688 alignval = get_value_for_expr (align, true);
1689 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1690 type, value_to_wide_int (ptrval), ptrval.mask,
1691 type, value_to_wide_int (alignval), alignval.mask);
1692 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1693 {
1694 val.lattice_val = CONSTANT;
1695 val.mask = mask;
1696 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1697 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1698 value |= misaligni;
1699 /* ??? Delay building trees here. */
1700 val.value = wide_int_to_tree (type, value);
1701 }
1702 else
1703 {
1704 val.lattice_val = VARYING;
1705 val.value = NULL_TREE;
1706 val.mask = -1;
1707 }
1708 return val;
1709 }
1710
1711 /* Evaluate statement STMT.
1712 Valid only for assignments, calls, conditionals, and switches. */
1713
1714 static ccp_prop_value_t
1715 evaluate_stmt (gimple stmt)
1716 {
1717 ccp_prop_value_t val;
1718 tree simplified = NULL_TREE;
1719 ccp_lattice_t likelyvalue = likely_value (stmt);
1720 bool is_constant = false;
1721 unsigned int align;
1722
1723 if (dump_file && (dump_flags & TDF_DETAILS))
1724 {
1725 fprintf (dump_file, "which is likely ");
1726 switch (likelyvalue)
1727 {
1728 case CONSTANT:
1729 fprintf (dump_file, "CONSTANT");
1730 break;
1731 case UNDEFINED:
1732 fprintf (dump_file, "UNDEFINED");
1733 break;
1734 case VARYING:
1735 fprintf (dump_file, "VARYING");
1736 break;
1737 default:;
1738 }
1739 fprintf (dump_file, "\n");
1740 }
1741
1742 /* If the statement is likely to have a CONSTANT result, then try
1743 to fold the statement to determine the constant value. */
1744 /* FIXME. This is the only place that we call ccp_fold.
1745 Since likely_value never returns CONSTANT for calls, we will
1746 not attempt to fold them, including builtins that may profit. */
1747 if (likelyvalue == CONSTANT)
1748 {
1749 fold_defer_overflow_warnings ();
1750 simplified = ccp_fold (stmt);
1751 if (simplified && TREE_CODE (simplified) == SSA_NAME)
1752 {
1753 val = *get_value (simplified);
1754 if (val.lattice_val != VARYING)
1755 {
1756 fold_undefer_overflow_warnings (true, stmt, 0);
1757 return val;
1758 }
1759 }
1760 is_constant = simplified && is_gimple_min_invariant (simplified);
1761 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1762 if (is_constant)
1763 {
1764 /* The statement produced a constant value. */
1765 val.lattice_val = CONSTANT;
1766 val.value = simplified;
1767 val.mask = 0;
1768 return val;
1769 }
1770 }
1771 /* If the statement is likely to have a VARYING result, then do not
1772 bother folding the statement. */
1773 else if (likelyvalue == VARYING)
1774 {
1775 enum gimple_code code = gimple_code (stmt);
1776 if (code == GIMPLE_ASSIGN)
1777 {
1778 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1779
1780 /* Other cases cannot satisfy is_gimple_min_invariant
1781 without folding. */
1782 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1783 simplified = gimple_assign_rhs1 (stmt);
1784 }
1785 else if (code == GIMPLE_SWITCH)
1786 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
1787 else
1788 /* These cannot satisfy is_gimple_min_invariant without folding. */
1789 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1790 is_constant = simplified && is_gimple_min_invariant (simplified);
1791 if (is_constant)
1792 {
1793 /* The statement produced a constant value. */
1794 val.lattice_val = CONSTANT;
1795 val.value = simplified;
1796 val.mask = 0;
1797 }
1798 }
1799 /* If the statement result is likely UNDEFINED, make it so. */
1800 else if (likelyvalue == UNDEFINED)
1801 {
1802 val.lattice_val = UNDEFINED;
1803 val.value = NULL_TREE;
1804 val.mask = 0;
1805 return val;
1806 }
1807
1808 /* Resort to simplification for bitwise tracking. */
1809 if (flag_tree_bit_ccp
1810 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1811 || (gimple_assign_single_p (stmt)
1812 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
1813 && !is_constant)
1814 {
1815 enum gimple_code code = gimple_code (stmt);
1816 val.lattice_val = VARYING;
1817 val.value = NULL_TREE;
1818 val.mask = -1;
1819 if (code == GIMPLE_ASSIGN)
1820 {
1821 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1822 tree rhs1 = gimple_assign_rhs1 (stmt);
1823 tree lhs = gimple_assign_lhs (stmt);
1824 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1825 || POINTER_TYPE_P (TREE_TYPE (lhs)))
1826 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1827 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
1828 switch (get_gimple_rhs_class (subcode))
1829 {
1830 case GIMPLE_SINGLE_RHS:
1831 val = get_value_for_expr (rhs1, true);
1832 break;
1833
1834 case GIMPLE_UNARY_RHS:
1835 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
1836 break;
1837
1838 case GIMPLE_BINARY_RHS:
1839 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
1840 gimple_assign_rhs2 (stmt));
1841 break;
1842
1843 default:;
1844 }
1845 }
1846 else if (code == GIMPLE_COND)
1847 {
1848 enum tree_code code = gimple_cond_code (stmt);
1849 tree rhs1 = gimple_cond_lhs (stmt);
1850 tree rhs2 = gimple_cond_rhs (stmt);
1851 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1852 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1853 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1854 }
1855 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1856 {
1857 tree fndecl = gimple_call_fndecl (stmt);
1858 switch (DECL_FUNCTION_CODE (fndecl))
1859 {
1860 case BUILT_IN_MALLOC:
1861 case BUILT_IN_REALLOC:
1862 case BUILT_IN_CALLOC:
1863 case BUILT_IN_STRDUP:
1864 case BUILT_IN_STRNDUP:
1865 val.lattice_val = CONSTANT;
1866 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1867 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1868 / BITS_PER_UNIT - 1);
1869 break;
1870
1871 case BUILT_IN_ALLOCA:
1872 case BUILT_IN_ALLOCA_WITH_ALIGN:
1873 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1874 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1875 : BIGGEST_ALIGNMENT);
1876 val.lattice_val = CONSTANT;
1877 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1878 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
1879 break;
1880
1881 /* These builtins return their first argument, unmodified. */
1882 case BUILT_IN_MEMCPY:
1883 case BUILT_IN_MEMMOVE:
1884 case BUILT_IN_MEMSET:
1885 case BUILT_IN_STRCPY:
1886 case BUILT_IN_STRNCPY:
1887 case BUILT_IN_MEMCPY_CHK:
1888 case BUILT_IN_MEMMOVE_CHK:
1889 case BUILT_IN_MEMSET_CHK:
1890 case BUILT_IN_STRCPY_CHK:
1891 case BUILT_IN_STRNCPY_CHK:
1892 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1893 break;
1894
1895 case BUILT_IN_ASSUME_ALIGNED:
1896 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
1897 break;
1898
1899 case BUILT_IN_ALIGNED_ALLOC:
1900 {
1901 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1902 if (align
1903 && tree_fits_uhwi_p (align))
1904 {
1905 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1906 if (aligni > 1
1907 /* align must be power-of-two */
1908 && (aligni & (aligni - 1)) == 0)
1909 {
1910 val.lattice_val = CONSTANT;
1911 val.value = build_int_cst (ptr_type_node, 0);
1912 val.mask = -aligni;
1913 }
1914 }
1915 break;
1916 }
1917
1918 default:;
1919 }
1920 }
1921 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1922 {
1923 tree fntype = gimple_call_fntype (stmt);
1924 if (fntype)
1925 {
1926 tree attrs = lookup_attribute ("assume_aligned",
1927 TYPE_ATTRIBUTES (fntype));
1928 if (attrs)
1929 val = bit_value_assume_aligned (stmt, attrs, val, false);
1930 attrs = lookup_attribute ("alloc_align",
1931 TYPE_ATTRIBUTES (fntype));
1932 if (attrs)
1933 val = bit_value_assume_aligned (stmt, attrs, val, true);
1934 }
1935 }
1936 is_constant = (val.lattice_val == CONSTANT);
1937 }
1938
1939 if (flag_tree_bit_ccp
1940 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1941 || !is_constant)
1942 && gimple_get_lhs (stmt)
1943 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1944 {
1945 tree lhs = gimple_get_lhs (stmt);
1946 wide_int nonzero_bits = get_nonzero_bits (lhs);
1947 if (nonzero_bits != -1)
1948 {
1949 if (!is_constant)
1950 {
1951 val.lattice_val = CONSTANT;
1952 val.value = build_zero_cst (TREE_TYPE (lhs));
1953 val.mask = extend_mask (nonzero_bits);
1954 is_constant = true;
1955 }
1956 else
1957 {
1958 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
1959 val.value = wide_int_to_tree (TREE_TYPE (lhs),
1960 nonzero_bits & val.value);
1961 if (nonzero_bits == 0)
1962 val.mask = 0;
1963 else
1964 val.mask = val.mask & extend_mask (nonzero_bits);
1965 }
1966 }
1967 }
1968
1969 /* The statement produced a nonconstant value. */
1970 if (!is_constant)
1971 {
1972 /* The statement produced a copy. */
1973 if (simplified && TREE_CODE (simplified) == SSA_NAME
1974 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
1975 {
1976 val.lattice_val = CONSTANT;
1977 val.value = simplified;
1978 val.mask = -1;
1979 }
1980 /* The statement is VARYING. */
1981 else
1982 {
1983 val.lattice_val = VARYING;
1984 val.value = NULL_TREE;
1985 val.mask = -1;
1986 }
1987 }
1988
1989 return val;
1990 }
1991
1992 typedef hash_table<nofree_ptr_hash<gimple_statement_base> > gimple_htab;
1993
1994 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1995 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1996
1997 static void
1998 insert_clobber_before_stack_restore (tree saved_val, tree var,
1999 gimple_htab **visited)
2000 {
2001 gimple stmt;
2002 gassign *clobber_stmt;
2003 tree clobber;
2004 imm_use_iterator iter;
2005 gimple_stmt_iterator i;
2006 gimple *slot;
2007
2008 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
2009 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2010 {
2011 clobber = build_constructor (TREE_TYPE (var),
2012 NULL);
2013 TREE_THIS_VOLATILE (clobber) = 1;
2014 clobber_stmt = gimple_build_assign (var, clobber);
2015
2016 i = gsi_for_stmt (stmt);
2017 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2018 }
2019 else if (gimple_code (stmt) == GIMPLE_PHI)
2020 {
2021 if (!*visited)
2022 *visited = new gimple_htab (10);
2023
2024 slot = (*visited)->find_slot (stmt, INSERT);
2025 if (*slot != NULL)
2026 continue;
2027
2028 *slot = stmt;
2029 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2030 visited);
2031 }
2032 else if (gimple_assign_ssa_name_copy_p (stmt))
2033 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2034 visited);
2035 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
2036 continue;
2037 else
2038 gcc_assert (is_gimple_debug (stmt));
2039 }
2040
2041 /* Advance the iterator to the previous non-debug gimple statement in the same
2042 or dominating basic block. */
2043
2044 static inline void
2045 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2046 {
2047 basic_block dom;
2048
2049 gsi_prev_nondebug (i);
2050 while (gsi_end_p (*i))
2051 {
2052 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
2053 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2054 return;
2055
2056 *i = gsi_last_bb (dom);
2057 }
2058 }
2059
2060 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2061 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2062
2063 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2064 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2065 that case the function gives up without inserting the clobbers. */
2066
2067 static void
2068 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2069 {
2070 gimple stmt;
2071 tree saved_val;
2072 gimple_htab *visited = NULL;
2073
2074 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2075 {
2076 stmt = gsi_stmt (i);
2077
2078 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2079 continue;
2080
2081 saved_val = gimple_call_lhs (stmt);
2082 if (saved_val == NULL_TREE)
2083 continue;
2084
2085 insert_clobber_before_stack_restore (saved_val, var, &visited);
2086 break;
2087 }
2088
2089 delete visited;
2090 }
2091
2092 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2093 fixed-size array and returns the address, if found, otherwise returns
2094 NULL_TREE. */
2095
2096 static tree
2097 fold_builtin_alloca_with_align (gimple stmt)
2098 {
2099 unsigned HOST_WIDE_INT size, threshold, n_elem;
2100 tree lhs, arg, block, var, elem_type, array_type;
2101
2102 /* Get lhs. */
2103 lhs = gimple_call_lhs (stmt);
2104 if (lhs == NULL_TREE)
2105 return NULL_TREE;
2106
2107 /* Detect constant argument. */
2108 arg = get_constant_value (gimple_call_arg (stmt, 0));
2109 if (arg == NULL_TREE
2110 || TREE_CODE (arg) != INTEGER_CST
2111 || !tree_fits_uhwi_p (arg))
2112 return NULL_TREE;
2113
2114 size = tree_to_uhwi (arg);
2115
2116 /* Heuristic: don't fold large allocas. */
2117 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
2118 /* In case the alloca is located at function entry, it has the same lifetime
2119 as a declared array, so we allow a larger size. */
2120 block = gimple_block (stmt);
2121 if (!(cfun->after_inlining
2122 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2123 threshold /= 10;
2124 if (size > threshold)
2125 return NULL_TREE;
2126
2127 /* Declare array. */
2128 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2129 n_elem = size * 8 / BITS_PER_UNIT;
2130 array_type = build_array_type_nelts (elem_type, n_elem);
2131 var = create_tmp_var (array_type);
2132 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
2133 {
2134 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2135 if (pi != NULL && !pi->pt.anything)
2136 {
2137 bool singleton_p;
2138 unsigned uid;
2139 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2140 gcc_assert (singleton_p);
2141 SET_DECL_PT_UID (var, uid);
2142 }
2143 }
2144
2145 /* Fold alloca to the address of the array. */
2146 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2147 }
2148
2149 /* Fold the stmt at *GSI with CCP specific information that propagating
2150 and regular folding does not catch. */
2151
2152 static bool
2153 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2154 {
2155 gimple stmt = gsi_stmt (*gsi);
2156
2157 switch (gimple_code (stmt))
2158 {
2159 case GIMPLE_COND:
2160 {
2161 gcond *cond_stmt = as_a <gcond *> (stmt);
2162 ccp_prop_value_t val;
2163 /* Statement evaluation will handle type mismatches in constants
2164 more gracefully than the final propagation. This allows us to
2165 fold more conditionals here. */
2166 val = evaluate_stmt (stmt);
2167 if (val.lattice_val != CONSTANT
2168 || val.mask != 0)
2169 return false;
2170
2171 if (dump_file)
2172 {
2173 fprintf (dump_file, "Folding predicate ");
2174 print_gimple_expr (dump_file, stmt, 0, 0);
2175 fprintf (dump_file, " to ");
2176 print_generic_expr (dump_file, val.value, 0);
2177 fprintf (dump_file, "\n");
2178 }
2179
2180 if (integer_zerop (val.value))
2181 gimple_cond_make_false (cond_stmt);
2182 else
2183 gimple_cond_make_true (cond_stmt);
2184
2185 return true;
2186 }
2187
2188 case GIMPLE_CALL:
2189 {
2190 tree lhs = gimple_call_lhs (stmt);
2191 int flags = gimple_call_flags (stmt);
2192 tree val;
2193 tree argt;
2194 bool changed = false;
2195 unsigned i;
2196
2197 /* If the call was folded into a constant make sure it goes
2198 away even if we cannot propagate into all uses because of
2199 type issues. */
2200 if (lhs
2201 && TREE_CODE (lhs) == SSA_NAME
2202 && (val = get_constant_value (lhs))
2203 /* Don't optimize away calls that have side-effects. */
2204 && (flags & (ECF_CONST|ECF_PURE)) != 0
2205 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2206 {
2207 tree new_rhs = unshare_expr (val);
2208 bool res;
2209 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2210 TREE_TYPE (new_rhs)))
2211 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2212 res = update_call_from_tree (gsi, new_rhs);
2213 gcc_assert (res);
2214 return true;
2215 }
2216
2217 /* Internal calls provide no argument types, so the extra laxity
2218 for normal calls does not apply. */
2219 if (gimple_call_internal_p (stmt))
2220 return false;
2221
2222 /* The heuristic of fold_builtin_alloca_with_align differs before and
2223 after inlining, so we don't require the arg to be changed into a
2224 constant for folding, but just to be constant. */
2225 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2226 {
2227 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2228 if (new_rhs)
2229 {
2230 bool res = update_call_from_tree (gsi, new_rhs);
2231 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2232 gcc_assert (res);
2233 insert_clobbers_for_var (*gsi, var);
2234 return true;
2235 }
2236 }
2237
2238 /* Propagate into the call arguments. Compared to replace_uses_in
2239 this can use the argument slot types for type verification
2240 instead of the current argument type. We also can safely
2241 drop qualifiers here as we are dealing with constants anyway. */
2242 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2243 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2244 ++i, argt = TREE_CHAIN (argt))
2245 {
2246 tree arg = gimple_call_arg (stmt, i);
2247 if (TREE_CODE (arg) == SSA_NAME
2248 && (val = get_constant_value (arg))
2249 && useless_type_conversion_p
2250 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2251 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2252 {
2253 gimple_call_set_arg (stmt, i, unshare_expr (val));
2254 changed = true;
2255 }
2256 }
2257
2258 return changed;
2259 }
2260
2261 case GIMPLE_ASSIGN:
2262 {
2263 tree lhs = gimple_assign_lhs (stmt);
2264 tree val;
2265
2266 /* If we have a load that turned out to be constant replace it
2267 as we cannot propagate into all uses in all cases. */
2268 if (gimple_assign_single_p (stmt)
2269 && TREE_CODE (lhs) == SSA_NAME
2270 && (val = get_constant_value (lhs)))
2271 {
2272 tree rhs = unshare_expr (val);
2273 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2274 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2275 gimple_assign_set_rhs_from_tree (gsi, rhs);
2276 return true;
2277 }
2278
2279 return false;
2280 }
2281
2282 default:
2283 return false;
2284 }
2285 }
2286
2287 /* Visit the assignment statement STMT. Set the value of its LHS to the
2288 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2289 creates virtual definitions, set the value of each new name to that
2290 of the RHS (if we can derive a constant out of the RHS).
2291 Value-returning call statements also perform an assignment, and
2292 are handled here. */
2293
2294 static enum ssa_prop_result
2295 visit_assignment (gimple stmt, tree *output_p)
2296 {
2297 ccp_prop_value_t val;
2298 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
2299
2300 tree lhs = gimple_get_lhs (stmt);
2301 if (TREE_CODE (lhs) == SSA_NAME)
2302 {
2303 /* Evaluate the statement, which could be
2304 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2305 val = evaluate_stmt (stmt);
2306
2307 /* If STMT is an assignment to an SSA_NAME, we only have one
2308 value to set. */
2309 if (set_lattice_value (lhs, &val))
2310 {
2311 *output_p = lhs;
2312 if (val.lattice_val == VARYING)
2313 retval = SSA_PROP_VARYING;
2314 else
2315 retval = SSA_PROP_INTERESTING;
2316 }
2317 }
2318
2319 return retval;
2320 }
2321
2322
2323 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2324 if it can determine which edge will be taken. Otherwise, return
2325 SSA_PROP_VARYING. */
2326
2327 static enum ssa_prop_result
2328 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2329 {
2330 ccp_prop_value_t val;
2331 basic_block block;
2332
2333 block = gimple_bb (stmt);
2334 val = evaluate_stmt (stmt);
2335 if (val.lattice_val != CONSTANT
2336 || val.mask != 0)
2337 return SSA_PROP_VARYING;
2338
2339 /* Find which edge out of the conditional block will be taken and add it
2340 to the worklist. If no single edge can be determined statically,
2341 return SSA_PROP_VARYING to feed all the outgoing edges to the
2342 propagation engine. */
2343 *taken_edge_p = find_taken_edge (block, val.value);
2344 if (*taken_edge_p)
2345 return SSA_PROP_INTERESTING;
2346 else
2347 return SSA_PROP_VARYING;
2348 }
2349
2350
2351 /* Evaluate statement STMT. If the statement produces an output value and
2352 its evaluation changes the lattice value of its output, return
2353 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2354 output value.
2355
2356 If STMT is a conditional branch and we can determine its truth
2357 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2358 value, return SSA_PROP_VARYING. */
2359
2360 static enum ssa_prop_result
2361 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2362 {
2363 tree def;
2364 ssa_op_iter iter;
2365
2366 if (dump_file && (dump_flags & TDF_DETAILS))
2367 {
2368 fprintf (dump_file, "\nVisiting statement:\n");
2369 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2370 }
2371
2372 switch (gimple_code (stmt))
2373 {
2374 case GIMPLE_ASSIGN:
2375 /* If the statement is an assignment that produces a single
2376 output value, evaluate its RHS to see if the lattice value of
2377 its output has changed. */
2378 return visit_assignment (stmt, output_p);
2379
2380 case GIMPLE_CALL:
2381 /* A value-returning call also performs an assignment. */
2382 if (gimple_call_lhs (stmt) != NULL_TREE)
2383 return visit_assignment (stmt, output_p);
2384 break;
2385
2386 case GIMPLE_COND:
2387 case GIMPLE_SWITCH:
2388 /* If STMT is a conditional branch, see if we can determine
2389 which branch will be taken. */
2390 /* FIXME. It appears that we should be able to optimize
2391 computed GOTOs here as well. */
2392 return visit_cond_stmt (stmt, taken_edge_p);
2393
2394 default:
2395 break;
2396 }
2397
2398 /* Any other kind of statement is not interesting for constant
2399 propagation and, therefore, not worth simulating. */
2400 if (dump_file && (dump_flags & TDF_DETAILS))
2401 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2402
2403 /* Definitions made by statements other than assignments to
2404 SSA_NAMEs represent unknown modifications to their outputs.
2405 Mark them VARYING. */
2406 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2407 set_value_varying (def);
2408
2409 return SSA_PROP_VARYING;
2410 }
2411
2412
2413 /* Main entry point for SSA Conditional Constant Propagation. */
2414
2415 static unsigned int
2416 do_ssa_ccp (void)
2417 {
2418 unsigned int todo = 0;
2419 calculate_dominance_info (CDI_DOMINATORS);
2420 ccp_initialize ();
2421 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2422 if (ccp_finalize ())
2423 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2424 free_dominance_info (CDI_DOMINATORS);
2425 return todo;
2426 }
2427
2428
2429 namespace {
2430
2431 const pass_data pass_data_ccp =
2432 {
2433 GIMPLE_PASS, /* type */
2434 "ccp", /* name */
2435 OPTGROUP_NONE, /* optinfo_flags */
2436 TV_TREE_CCP, /* tv_id */
2437 ( PROP_cfg | PROP_ssa ), /* properties_required */
2438 0, /* properties_provided */
2439 0, /* properties_destroyed */
2440 0, /* todo_flags_start */
2441 TODO_update_address_taken, /* todo_flags_finish */
2442 };
2443
2444 class pass_ccp : public gimple_opt_pass
2445 {
2446 public:
2447 pass_ccp (gcc::context *ctxt)
2448 : gimple_opt_pass (pass_data_ccp, ctxt)
2449 {}
2450
2451 /* opt_pass methods: */
2452 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2453 virtual bool gate (function *) { return flag_tree_ccp != 0; }
2454 virtual unsigned int execute (function *) { return do_ssa_ccp (); }
2455
2456 }; // class pass_ccp
2457
2458 } // anon namespace
2459
2460 gimple_opt_pass *
2461 make_pass_ccp (gcc::context *ctxt)
2462 {
2463 return new pass_ccp (ctxt);
2464 }
2465
2466
2467
2468 /* Try to optimize out __builtin_stack_restore. Optimize it out
2469 if there is another __builtin_stack_restore in the same basic
2470 block and no calls or ASM_EXPRs are in between, or if this block's
2471 only outgoing edge is to EXIT_BLOCK and there are no calls or
2472 ASM_EXPRs after this __builtin_stack_restore. */
2473
2474 static tree
2475 optimize_stack_restore (gimple_stmt_iterator i)
2476 {
2477 tree callee;
2478 gimple stmt;
2479
2480 basic_block bb = gsi_bb (i);
2481 gimple call = gsi_stmt (i);
2482
2483 if (gimple_code (call) != GIMPLE_CALL
2484 || gimple_call_num_args (call) != 1
2485 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2486 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2487 return NULL_TREE;
2488
2489 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2490 {
2491 stmt = gsi_stmt (i);
2492 if (gimple_code (stmt) == GIMPLE_ASM)
2493 return NULL_TREE;
2494 if (gimple_code (stmt) != GIMPLE_CALL)
2495 continue;
2496
2497 callee = gimple_call_fndecl (stmt);
2498 if (!callee
2499 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2500 /* All regular builtins are ok, just obviously not alloca. */
2501 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2502 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2503 return NULL_TREE;
2504
2505 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2506 goto second_stack_restore;
2507 }
2508
2509 if (!gsi_end_p (i))
2510 return NULL_TREE;
2511
2512 /* Allow one successor of the exit block, or zero successors. */
2513 switch (EDGE_COUNT (bb->succs))
2514 {
2515 case 0:
2516 break;
2517 case 1:
2518 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2519 return NULL_TREE;
2520 break;
2521 default:
2522 return NULL_TREE;
2523 }
2524 second_stack_restore:
2525
2526 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2527 If there are multiple uses, then the last one should remove the call.
2528 In any case, whether the call to __builtin_stack_save can be removed
2529 or not is irrelevant to removing the call to __builtin_stack_restore. */
2530 if (has_single_use (gimple_call_arg (call, 0)))
2531 {
2532 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2533 if (is_gimple_call (stack_save))
2534 {
2535 callee = gimple_call_fndecl (stack_save);
2536 if (callee
2537 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2538 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2539 {
2540 gimple_stmt_iterator stack_save_gsi;
2541 tree rhs;
2542
2543 stack_save_gsi = gsi_for_stmt (stack_save);
2544 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2545 update_call_from_tree (&stack_save_gsi, rhs);
2546 }
2547 }
2548 }
2549
2550 /* No effect, so the statement will be deleted. */
2551 return integer_zero_node;
2552 }
2553
2554 /* If va_list type is a simple pointer and nothing special is needed,
2555 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2556 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2557 pointer assignment. */
2558
2559 static tree
2560 optimize_stdarg_builtin (gimple call)
2561 {
2562 tree callee, lhs, rhs, cfun_va_list;
2563 bool va_list_simple_ptr;
2564 location_t loc = gimple_location (call);
2565
2566 if (gimple_code (call) != GIMPLE_CALL)
2567 return NULL_TREE;
2568
2569 callee = gimple_call_fndecl (call);
2570
2571 cfun_va_list = targetm.fn_abi_va_list (callee);
2572 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2573 && (TREE_TYPE (cfun_va_list) == void_type_node
2574 || TREE_TYPE (cfun_va_list) == char_type_node);
2575
2576 switch (DECL_FUNCTION_CODE (callee))
2577 {
2578 case BUILT_IN_VA_START:
2579 if (!va_list_simple_ptr
2580 || targetm.expand_builtin_va_start != NULL
2581 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2582 return NULL_TREE;
2583
2584 if (gimple_call_num_args (call) != 2)
2585 return NULL_TREE;
2586
2587 lhs = gimple_call_arg (call, 0);
2588 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2589 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2590 != TYPE_MAIN_VARIANT (cfun_va_list))
2591 return NULL_TREE;
2592
2593 lhs = build_fold_indirect_ref_loc (loc, lhs);
2594 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2595 1, integer_zero_node);
2596 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2597 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2598
2599 case BUILT_IN_VA_COPY:
2600 if (!va_list_simple_ptr)
2601 return NULL_TREE;
2602
2603 if (gimple_call_num_args (call) != 2)
2604 return NULL_TREE;
2605
2606 lhs = gimple_call_arg (call, 0);
2607 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2608 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2609 != TYPE_MAIN_VARIANT (cfun_va_list))
2610 return NULL_TREE;
2611
2612 lhs = build_fold_indirect_ref_loc (loc, lhs);
2613 rhs = gimple_call_arg (call, 1);
2614 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2615 != TYPE_MAIN_VARIANT (cfun_va_list))
2616 return NULL_TREE;
2617
2618 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2619 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2620
2621 case BUILT_IN_VA_END:
2622 /* No effect, so the statement will be deleted. */
2623 return integer_zero_node;
2624
2625 default:
2626 gcc_unreachable ();
2627 }
2628 }
2629
2630 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2631 the incoming jumps. Return true if at least one jump was changed. */
2632
2633 static bool
2634 optimize_unreachable (gimple_stmt_iterator i)
2635 {
2636 basic_block bb = gsi_bb (i);
2637 gimple_stmt_iterator gsi;
2638 gimple stmt;
2639 edge_iterator ei;
2640 edge e;
2641 bool ret;
2642
2643 if (flag_sanitize & SANITIZE_UNREACHABLE)
2644 return false;
2645
2646 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2647 {
2648 stmt = gsi_stmt (gsi);
2649
2650 if (is_gimple_debug (stmt))
2651 continue;
2652
2653 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2654 {
2655 /* Verify we do not need to preserve the label. */
2656 if (FORCED_LABEL (gimple_label_label (label_stmt)))
2657 return false;
2658
2659 continue;
2660 }
2661
2662 /* Only handle the case that __builtin_unreachable is the first statement
2663 in the block. We rely on DCE to remove stmts without side-effects
2664 before __builtin_unreachable. */
2665 if (gsi_stmt (gsi) != gsi_stmt (i))
2666 return false;
2667 }
2668
2669 ret = false;
2670 FOR_EACH_EDGE (e, ei, bb->preds)
2671 {
2672 gsi = gsi_last_bb (e->src);
2673 if (gsi_end_p (gsi))
2674 continue;
2675
2676 stmt = gsi_stmt (gsi);
2677 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
2678 {
2679 if (e->flags & EDGE_TRUE_VALUE)
2680 gimple_cond_make_false (cond_stmt);
2681 else if (e->flags & EDGE_FALSE_VALUE)
2682 gimple_cond_make_true (cond_stmt);
2683 else
2684 gcc_unreachable ();
2685 update_stmt (cond_stmt);
2686 }
2687 else
2688 {
2689 /* Todo: handle other cases, f.i. switch statement. */
2690 continue;
2691 }
2692
2693 ret = true;
2694 }
2695
2696 return ret;
2697 }
2698
2699 /* A simple pass that attempts to fold all builtin functions. This pass
2700 is run after we've propagated as many constants as we can. */
2701
2702 namespace {
2703
2704 const pass_data pass_data_fold_builtins =
2705 {
2706 GIMPLE_PASS, /* type */
2707 "fab", /* name */
2708 OPTGROUP_NONE, /* optinfo_flags */
2709 TV_NONE, /* tv_id */
2710 ( PROP_cfg | PROP_ssa ), /* properties_required */
2711 0, /* properties_provided */
2712 0, /* properties_destroyed */
2713 0, /* todo_flags_start */
2714 TODO_update_ssa, /* todo_flags_finish */
2715 };
2716
2717 class pass_fold_builtins : public gimple_opt_pass
2718 {
2719 public:
2720 pass_fold_builtins (gcc::context *ctxt)
2721 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2722 {}
2723
2724 /* opt_pass methods: */
2725 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2726 virtual unsigned int execute (function *);
2727
2728 }; // class pass_fold_builtins
2729
2730 unsigned int
2731 pass_fold_builtins::execute (function *fun)
2732 {
2733 bool cfg_changed = false;
2734 basic_block bb;
2735 unsigned int todoflags = 0;
2736
2737 FOR_EACH_BB_FN (bb, fun)
2738 {
2739 gimple_stmt_iterator i;
2740 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2741 {
2742 gimple stmt, old_stmt;
2743 tree callee;
2744 enum built_in_function fcode;
2745
2746 stmt = gsi_stmt (i);
2747
2748 if (gimple_code (stmt) != GIMPLE_CALL)
2749 {
2750 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2751 after the last GIMPLE DSE they aren't needed and might
2752 unnecessarily keep the SSA_NAMEs live. */
2753 if (gimple_clobber_p (stmt))
2754 {
2755 tree lhs = gimple_assign_lhs (stmt);
2756 if (TREE_CODE (lhs) == MEM_REF
2757 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2758 {
2759 unlink_stmt_vdef (stmt);
2760 gsi_remove (&i, true);
2761 release_defs (stmt);
2762 continue;
2763 }
2764 }
2765 gsi_next (&i);
2766 continue;
2767 }
2768
2769 callee = gimple_call_fndecl (stmt);
2770 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2771 {
2772 gsi_next (&i);
2773 continue;
2774 }
2775
2776 fcode = DECL_FUNCTION_CODE (callee);
2777 if (fold_stmt (&i))
2778 ;
2779 else
2780 {
2781 tree result = NULL_TREE;
2782 switch (DECL_FUNCTION_CODE (callee))
2783 {
2784 case BUILT_IN_CONSTANT_P:
2785 /* Resolve __builtin_constant_p. If it hasn't been
2786 folded to integer_one_node by now, it's fairly
2787 certain that the value simply isn't constant. */
2788 result = integer_zero_node;
2789 break;
2790
2791 case BUILT_IN_ASSUME_ALIGNED:
2792 /* Remove __builtin_assume_aligned. */
2793 result = gimple_call_arg (stmt, 0);
2794 break;
2795
2796 case BUILT_IN_STACK_RESTORE:
2797 result = optimize_stack_restore (i);
2798 if (result)
2799 break;
2800 gsi_next (&i);
2801 continue;
2802
2803 case BUILT_IN_UNREACHABLE:
2804 if (optimize_unreachable (i))
2805 cfg_changed = true;
2806 break;
2807
2808 case BUILT_IN_VA_START:
2809 case BUILT_IN_VA_END:
2810 case BUILT_IN_VA_COPY:
2811 /* These shouldn't be folded before pass_stdarg. */
2812 result = optimize_stdarg_builtin (stmt);
2813 if (result)
2814 break;
2815 /* FALLTHRU */
2816
2817 default:;
2818 }
2819
2820 if (!result)
2821 {
2822 gsi_next (&i);
2823 continue;
2824 }
2825
2826 if (!update_call_from_tree (&i, result))
2827 gimplify_and_update_call_from_tree (&i, result);
2828 }
2829
2830 todoflags |= TODO_update_address_taken;
2831
2832 if (dump_file && (dump_flags & TDF_DETAILS))
2833 {
2834 fprintf (dump_file, "Simplified\n ");
2835 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2836 }
2837
2838 old_stmt = stmt;
2839 stmt = gsi_stmt (i);
2840 update_stmt (stmt);
2841
2842 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2843 && gimple_purge_dead_eh_edges (bb))
2844 cfg_changed = true;
2845
2846 if (dump_file && (dump_flags & TDF_DETAILS))
2847 {
2848 fprintf (dump_file, "to\n ");
2849 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2850 fprintf (dump_file, "\n");
2851 }
2852
2853 /* Retry the same statement if it changed into another
2854 builtin, there might be new opportunities now. */
2855 if (gimple_code (stmt) != GIMPLE_CALL)
2856 {
2857 gsi_next (&i);
2858 continue;
2859 }
2860 callee = gimple_call_fndecl (stmt);
2861 if (!callee
2862 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2863 || DECL_FUNCTION_CODE (callee) == fcode)
2864 gsi_next (&i);
2865 }
2866 }
2867
2868 /* Delete unreachable blocks. */
2869 if (cfg_changed)
2870 todoflags |= TODO_cleanup_cfg;
2871
2872 return todoflags;
2873 }
2874
2875 } // anon namespace
2876
2877 gimple_opt_pass *
2878 make_pass_fold_builtins (gcc::context *ctxt)
2879 {
2880 return new pass_fold_builtins (ctxt);
2881 }