re PR tree-optimization/68026 (Regression in GCC-6.0.0's optimizer)
[gcc.git] / gcc / tree-ssa-ccp.c
1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2015 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 3, or (at your option) any
11 later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
65
66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121 #include "config.h"
122 #include "system.h"
123 #include "coretypes.h"
124 #include "backend.h"
125 #include "tree.h"
126 #include "gimple.h"
127 #include "hard-reg-set.h"
128 #include "ssa.h"
129 #include "alias.h"
130 #include "fold-const.h"
131 #include "stor-layout.h"
132 #include "flags.h"
133 #include "tm_p.h"
134 #include "gimple-pretty-print.h"
135 #include "internal-fn.h"
136 #include "gimple-fold.h"
137 #include "tree-eh.h"
138 #include "gimplify.h"
139 #include "gimple-iterator.h"
140 #include "tree-cfg.h"
141 #include "tree-pass.h"
142 #include "tree-ssa-propagate.h"
143 #include "value-prof.h"
144 #include "langhooks.h"
145 #include "target.h"
146 #include "diagnostic-core.h"
147 #include "dbgcnt.h"
148 #include "params.h"
149 #include "wide-int-print.h"
150 #include "builtins.h"
151 #include "tree-chkp.h"
152
153
154 /* Possible lattice values. */
155 typedef enum
156 {
157 UNINITIALIZED,
158 UNDEFINED,
159 CONSTANT,
160 VARYING
161 } ccp_lattice_t;
162
163 struct ccp_prop_value_t {
164 /* Lattice value. */
165 ccp_lattice_t lattice_val;
166
167 /* Propagated value. */
168 tree value;
169
170 /* Mask that applies to the propagated value during CCP. For X
171 with a CONSTANT lattice value X & ~mask == value & ~mask. The
172 zero bits in the mask cover constant values. The ones mean no
173 information. */
174 widest_int mask;
175 };
176
177 /* Array of propagated constant values. After propagation,
178 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
179 the constant is held in an SSA name representing a memory store
180 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
181 memory reference used to store (i.e., the LHS of the assignment
182 doing the store). */
183 static ccp_prop_value_t *const_val;
184 static unsigned n_const_val;
185
186 static void canonicalize_value (ccp_prop_value_t *);
187 static bool ccp_fold_stmt (gimple_stmt_iterator *);
188 static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
189
190 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
191
192 static void
193 dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
194 {
195 switch (val.lattice_val)
196 {
197 case UNINITIALIZED:
198 fprintf (outf, "%sUNINITIALIZED", prefix);
199 break;
200 case UNDEFINED:
201 fprintf (outf, "%sUNDEFINED", prefix);
202 break;
203 case VARYING:
204 fprintf (outf, "%sVARYING", prefix);
205 break;
206 case CONSTANT:
207 if (TREE_CODE (val.value) != INTEGER_CST
208 || val.mask == 0)
209 {
210 fprintf (outf, "%sCONSTANT ", prefix);
211 print_generic_expr (outf, val.value, dump_flags);
212 }
213 else
214 {
215 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
216 val.mask);
217 fprintf (outf, "%sCONSTANT ", prefix);
218 print_hex (cval, outf);
219 fprintf (outf, " (");
220 print_hex (val.mask, outf);
221 fprintf (outf, ")");
222 }
223 break;
224 default:
225 gcc_unreachable ();
226 }
227 }
228
229
230 /* Print lattice value VAL to stderr. */
231
232 void debug_lattice_value (ccp_prop_value_t val);
233
234 DEBUG_FUNCTION void
235 debug_lattice_value (ccp_prop_value_t val)
236 {
237 dump_lattice_value (stderr, "", val);
238 fprintf (stderr, "\n");
239 }
240
241 /* Extend NONZERO_BITS to a full mask, with the upper bits being set. */
242
243 static widest_int
244 extend_mask (const wide_int &nonzero_bits)
245 {
246 return (wi::mask <widest_int> (wi::get_precision (nonzero_bits), true)
247 | widest_int::from (nonzero_bits, UNSIGNED));
248 }
249
250 /* Compute a default value for variable VAR and store it in the
251 CONST_VAL array. The following rules are used to get default
252 values:
253
254 1- Global and static variables that are declared constant are
255 considered CONSTANT.
256
257 2- Any other value is considered UNDEFINED. This is useful when
258 considering PHI nodes. PHI arguments that are undefined do not
259 change the constant value of the PHI node, which allows for more
260 constants to be propagated.
261
262 3- Variables defined by statements other than assignments and PHI
263 nodes are considered VARYING.
264
265 4- Initial values of variables that are not GIMPLE registers are
266 considered VARYING. */
267
268 static ccp_prop_value_t
269 get_default_value (tree var)
270 {
271 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE, 0 };
272 gimple *stmt;
273
274 stmt = SSA_NAME_DEF_STMT (var);
275
276 if (gimple_nop_p (stmt))
277 {
278 /* Variables defined by an empty statement are those used
279 before being initialized. If VAR is a local variable, we
280 can assume initially that it is UNDEFINED, otherwise we must
281 consider it VARYING. */
282 if (!virtual_operand_p (var)
283 && TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
284 val.lattice_val = UNDEFINED;
285 else
286 {
287 val.lattice_val = VARYING;
288 val.mask = -1;
289 if (flag_tree_bit_ccp)
290 {
291 wide_int nonzero_bits = get_nonzero_bits (var);
292 if (nonzero_bits != -1)
293 {
294 val.lattice_val = CONSTANT;
295 val.value = build_zero_cst (TREE_TYPE (var));
296 val.mask = extend_mask (nonzero_bits);
297 }
298 }
299 }
300 }
301 else if (is_gimple_assign (stmt))
302 {
303 tree cst;
304 if (gimple_assign_single_p (stmt)
305 && DECL_P (gimple_assign_rhs1 (stmt))
306 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
307 {
308 val.lattice_val = CONSTANT;
309 val.value = cst;
310 }
311 else
312 {
313 /* Any other variable defined by an assignment is considered
314 UNDEFINED. */
315 val.lattice_val = UNDEFINED;
316 }
317 }
318 else if ((is_gimple_call (stmt)
319 && gimple_call_lhs (stmt) != NULL_TREE)
320 || gimple_code (stmt) == GIMPLE_PHI)
321 {
322 /* A variable defined by a call or a PHI node is considered
323 UNDEFINED. */
324 val.lattice_val = UNDEFINED;
325 }
326 else
327 {
328 /* Otherwise, VAR will never take on a constant value. */
329 val.lattice_val = VARYING;
330 val.mask = -1;
331 }
332
333 return val;
334 }
335
336
337 /* Get the constant value associated with variable VAR. */
338
339 static inline ccp_prop_value_t *
340 get_value (tree var)
341 {
342 ccp_prop_value_t *val;
343
344 if (const_val == NULL
345 || SSA_NAME_VERSION (var) >= n_const_val)
346 return NULL;
347
348 val = &const_val[SSA_NAME_VERSION (var)];
349 if (val->lattice_val == UNINITIALIZED)
350 *val = get_default_value (var);
351
352 canonicalize_value (val);
353
354 return val;
355 }
356
357 /* Return the constant tree value associated with VAR. */
358
359 static inline tree
360 get_constant_value (tree var)
361 {
362 ccp_prop_value_t *val;
363 if (TREE_CODE (var) != SSA_NAME)
364 {
365 if (is_gimple_min_invariant (var))
366 return var;
367 return NULL_TREE;
368 }
369 val = get_value (var);
370 if (val
371 && val->lattice_val == CONSTANT
372 && (TREE_CODE (val->value) != INTEGER_CST
373 || val->mask == 0))
374 return val->value;
375 return NULL_TREE;
376 }
377
378 /* Sets the value associated with VAR to VARYING. */
379
380 static inline void
381 set_value_varying (tree var)
382 {
383 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
384
385 val->lattice_val = VARYING;
386 val->value = NULL_TREE;
387 val->mask = -1;
388 }
389
390 /* For integer constants, make sure to drop TREE_OVERFLOW. */
391
392 static void
393 canonicalize_value (ccp_prop_value_t *val)
394 {
395 if (val->lattice_val != CONSTANT)
396 return;
397
398 if (TREE_OVERFLOW_P (val->value))
399 val->value = drop_tree_overflow (val->value);
400 }
401
402 /* Return whether the lattice transition is valid. */
403
404 static bool
405 valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
406 {
407 /* Lattice transitions must always be monotonically increasing in
408 value. */
409 if (old_val.lattice_val < new_val.lattice_val)
410 return true;
411
412 if (old_val.lattice_val != new_val.lattice_val)
413 return false;
414
415 if (!old_val.value && !new_val.value)
416 return true;
417
418 /* Now both lattice values are CONSTANT. */
419
420 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
421 when only a single copy edge is executable. */
422 if (TREE_CODE (old_val.value) == SSA_NAME
423 && TREE_CODE (new_val.value) == SSA_NAME)
424 return true;
425
426 /* Allow transitioning from a constant to a copy. */
427 if (is_gimple_min_invariant (old_val.value)
428 && TREE_CODE (new_val.value) == SSA_NAME)
429 return true;
430
431 /* Allow transitioning from PHI <&x, not executable> == &x
432 to PHI <&x, &y> == common alignment. */
433 if (TREE_CODE (old_val.value) != INTEGER_CST
434 && TREE_CODE (new_val.value) == INTEGER_CST)
435 return true;
436
437 /* Bit-lattices have to agree in the still valid bits. */
438 if (TREE_CODE (old_val.value) == INTEGER_CST
439 && TREE_CODE (new_val.value) == INTEGER_CST)
440 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
441 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
442
443 /* Otherwise constant values have to agree. */
444 if (operand_equal_p (old_val.value, new_val.value, 0))
445 return true;
446
447 /* At least the kinds and types should agree now. */
448 if (TREE_CODE (old_val.value) != TREE_CODE (new_val.value)
449 || !types_compatible_p (TREE_TYPE (old_val.value),
450 TREE_TYPE (new_val.value)))
451 return false;
452
453 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
454 to non-NaN. */
455 tree type = TREE_TYPE (new_val.value);
456 if (SCALAR_FLOAT_TYPE_P (type)
457 && !HONOR_NANS (type))
458 {
459 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value)))
460 return true;
461 }
462 else if (VECTOR_FLOAT_TYPE_P (type)
463 && !HONOR_NANS (type))
464 {
465 for (unsigned i = 0; i < VECTOR_CST_NELTS (old_val.value); ++i)
466 if (!REAL_VALUE_ISNAN
467 (TREE_REAL_CST (VECTOR_CST_ELT (old_val.value, i)))
468 && !operand_equal_p (VECTOR_CST_ELT (old_val.value, i),
469 VECTOR_CST_ELT (new_val.value, i), 0))
470 return false;
471 return true;
472 }
473 else if (COMPLEX_FLOAT_TYPE_P (type)
474 && !HONOR_NANS (type))
475 {
476 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))
477 && !operand_equal_p (TREE_REALPART (old_val.value),
478 TREE_REALPART (new_val.value), 0))
479 return false;
480 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))
481 && !operand_equal_p (TREE_IMAGPART (old_val.value),
482 TREE_IMAGPART (new_val.value), 0))
483 return false;
484 return true;
485 }
486 return false;
487 }
488
489 /* Set the value for variable VAR to NEW_VAL. Return true if the new
490 value is different from VAR's previous value. */
491
492 static bool
493 set_lattice_value (tree var, ccp_prop_value_t *new_val)
494 {
495 /* We can deal with old UNINITIALIZED values just fine here. */
496 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
497
498 canonicalize_value (new_val);
499
500 /* We have to be careful to not go up the bitwise lattice
501 represented by the mask. Instead of dropping to VARYING
502 use the meet operator to retain a conservative value.
503 Missed optimizations like PR65851 makes this necessary.
504 It also ensures we converge to a stable lattice solution. */
505 if (new_val->lattice_val == CONSTANT
506 && old_val->lattice_val == CONSTANT
507 && TREE_CODE (new_val->value) != SSA_NAME)
508 ccp_lattice_meet (new_val, old_val);
509
510 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val));
511
512 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
513 caller that this was a non-transition. */
514 if (old_val->lattice_val != new_val->lattice_val
515 || (new_val->lattice_val == CONSTANT
516 && (TREE_CODE (new_val->value) != TREE_CODE (old_val->value)
517 || (TREE_CODE (new_val->value) == INTEGER_CST
518 && (new_val->mask != old_val->mask
519 || (wi::bit_and_not (wi::to_widest (old_val->value),
520 new_val->mask)
521 != wi::bit_and_not (wi::to_widest (new_val->value),
522 new_val->mask))))
523 || (TREE_CODE (new_val->value) != INTEGER_CST
524 && !operand_equal_p (new_val->value, old_val->value, 0)))))
525 {
526 /* ??? We would like to delay creation of INTEGER_CSTs from
527 partially constants here. */
528
529 if (dump_file && (dump_flags & TDF_DETAILS))
530 {
531 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
532 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
533 }
534
535 *old_val = *new_val;
536
537 gcc_assert (new_val->lattice_val != UNINITIALIZED);
538 return true;
539 }
540
541 return false;
542 }
543
544 static ccp_prop_value_t get_value_for_expr (tree, bool);
545 static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
546 static void bit_value_binop_1 (enum tree_code, tree, widest_int *, widest_int *,
547 tree, const widest_int &, const widest_int &,
548 tree, const widest_int &, const widest_int &);
549
550 /* Return a widest_int that can be used for bitwise simplifications
551 from VAL. */
552
553 static widest_int
554 value_to_wide_int (ccp_prop_value_t val)
555 {
556 if (val.value
557 && TREE_CODE (val.value) == INTEGER_CST)
558 return wi::to_widest (val.value);
559
560 return 0;
561 }
562
563 /* Return the value for the address expression EXPR based on alignment
564 information. */
565
566 static ccp_prop_value_t
567 get_value_from_alignment (tree expr)
568 {
569 tree type = TREE_TYPE (expr);
570 ccp_prop_value_t val;
571 unsigned HOST_WIDE_INT bitpos;
572 unsigned int align;
573
574 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
575
576 get_pointer_alignment_1 (expr, &align, &bitpos);
577 val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
578 ? wi::mask <widest_int> (TYPE_PRECISION (type), false)
579 : -1).and_not (align / BITS_PER_UNIT - 1);
580 val.lattice_val
581 = wi::sext (val.mask, TYPE_PRECISION (type)) == -1 ? VARYING : CONSTANT;
582 if (val.lattice_val == CONSTANT)
583 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT);
584 else
585 val.value = NULL_TREE;
586
587 return val;
588 }
589
590 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
591 return constant bits extracted from alignment information for
592 invariant addresses. */
593
594 static ccp_prop_value_t
595 get_value_for_expr (tree expr, bool for_bits_p)
596 {
597 ccp_prop_value_t val;
598
599 if (TREE_CODE (expr) == SSA_NAME)
600 {
601 val = *get_value (expr);
602 if (for_bits_p
603 && val.lattice_val == CONSTANT
604 && TREE_CODE (val.value) == ADDR_EXPR)
605 val = get_value_from_alignment (val.value);
606 /* Fall back to a copy value. */
607 if (!for_bits_p
608 && val.lattice_val == VARYING
609 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr))
610 {
611 val.lattice_val = CONSTANT;
612 val.value = expr;
613 val.mask = -1;
614 }
615 }
616 else if (is_gimple_min_invariant (expr)
617 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
618 {
619 val.lattice_val = CONSTANT;
620 val.value = expr;
621 val.mask = 0;
622 canonicalize_value (&val);
623 }
624 else if (TREE_CODE (expr) == ADDR_EXPR)
625 val = get_value_from_alignment (expr);
626 else
627 {
628 val.lattice_val = VARYING;
629 val.mask = -1;
630 val.value = NULL_TREE;
631 }
632
633 if (val.lattice_val == VARYING
634 && TYPE_UNSIGNED (TREE_TYPE (expr)))
635 val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr)));
636
637 return val;
638 }
639
640 /* Return the likely CCP lattice value for STMT.
641
642 If STMT has no operands, then return CONSTANT.
643
644 Else if undefinedness of operands of STMT cause its value to be
645 undefined, then return UNDEFINED.
646
647 Else if any operands of STMT are constants, then return CONSTANT.
648
649 Else return VARYING. */
650
651 static ccp_lattice_t
652 likely_value (gimple *stmt)
653 {
654 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
655 bool has_nsa_operand;
656 tree use;
657 ssa_op_iter iter;
658 unsigned i;
659
660 enum gimple_code code = gimple_code (stmt);
661
662 /* This function appears to be called only for assignments, calls,
663 conditionals, and switches, due to the logic in visit_stmt. */
664 gcc_assert (code == GIMPLE_ASSIGN
665 || code == GIMPLE_CALL
666 || code == GIMPLE_COND
667 || code == GIMPLE_SWITCH);
668
669 /* If the statement has volatile operands, it won't fold to a
670 constant value. */
671 if (gimple_has_volatile_ops (stmt))
672 return VARYING;
673
674 /* Arrive here for more complex cases. */
675 has_constant_operand = false;
676 has_undefined_operand = false;
677 all_undefined_operands = true;
678 has_nsa_operand = false;
679 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
680 {
681 ccp_prop_value_t *val = get_value (use);
682
683 if (val->lattice_val == UNDEFINED)
684 has_undefined_operand = true;
685 else
686 all_undefined_operands = false;
687
688 if (val->lattice_val == CONSTANT)
689 has_constant_operand = true;
690
691 if (SSA_NAME_IS_DEFAULT_DEF (use)
692 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)))
693 has_nsa_operand = true;
694 }
695
696 /* There may be constants in regular rhs operands. For calls we
697 have to ignore lhs, fndecl and static chain, otherwise only
698 the lhs. */
699 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
700 i < gimple_num_ops (stmt); ++i)
701 {
702 tree op = gimple_op (stmt, i);
703 if (!op || TREE_CODE (op) == SSA_NAME)
704 continue;
705 if (is_gimple_min_invariant (op))
706 has_constant_operand = true;
707 }
708
709 if (has_constant_operand)
710 all_undefined_operands = false;
711
712 if (has_undefined_operand
713 && code == GIMPLE_CALL
714 && gimple_call_internal_p (stmt))
715 switch (gimple_call_internal_fn (stmt))
716 {
717 /* These 3 builtins use the first argument just as a magic
718 way how to find out a decl uid. */
719 case IFN_GOMP_SIMD_LANE:
720 case IFN_GOMP_SIMD_VF:
721 case IFN_GOMP_SIMD_LAST_LANE:
722 has_undefined_operand = false;
723 break;
724 default:
725 break;
726 }
727
728 /* If the operation combines operands like COMPLEX_EXPR make sure to
729 not mark the result UNDEFINED if only one part of the result is
730 undefined. */
731 if (has_undefined_operand && all_undefined_operands)
732 return UNDEFINED;
733 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
734 {
735 switch (gimple_assign_rhs_code (stmt))
736 {
737 /* Unary operators are handled with all_undefined_operands. */
738 case PLUS_EXPR:
739 case MINUS_EXPR:
740 case POINTER_PLUS_EXPR:
741 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
742 Not bitwise operators, one VARYING operand may specify the
743 result completely. Not logical operators for the same reason.
744 Not COMPLEX_EXPR as one VARYING operand makes the result partly
745 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
746 the undefined operand may be promoted. */
747 return UNDEFINED;
748
749 case ADDR_EXPR:
750 /* If any part of an address is UNDEFINED, like the index
751 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
752 return UNDEFINED;
753
754 default:
755 ;
756 }
757 }
758 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
759 fall back to CONSTANT. During iteration UNDEFINED may still drop
760 to CONSTANT. */
761 if (has_undefined_operand)
762 return CONSTANT;
763
764 /* We do not consider virtual operands here -- load from read-only
765 memory may have only VARYING virtual operands, but still be
766 constant. Also we can combine the stmt with definitions from
767 operands whose definitions are not simulated again. */
768 if (has_constant_operand
769 || has_nsa_operand
770 || gimple_references_memory_p (stmt))
771 return CONSTANT;
772
773 return VARYING;
774 }
775
776 /* Returns true if STMT cannot be constant. */
777
778 static bool
779 surely_varying_stmt_p (gimple *stmt)
780 {
781 /* If the statement has operands that we cannot handle, it cannot be
782 constant. */
783 if (gimple_has_volatile_ops (stmt))
784 return true;
785
786 /* If it is a call and does not return a value or is not a
787 builtin and not an indirect call or a call to function with
788 assume_aligned/alloc_align attribute, it is varying. */
789 if (is_gimple_call (stmt))
790 {
791 tree fndecl, fntype = gimple_call_fntype (stmt);
792 if (!gimple_call_lhs (stmt)
793 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
794 && !DECL_BUILT_IN (fndecl)
795 && !lookup_attribute ("assume_aligned",
796 TYPE_ATTRIBUTES (fntype))
797 && !lookup_attribute ("alloc_align",
798 TYPE_ATTRIBUTES (fntype))))
799 return true;
800 }
801
802 /* Any other store operation is not interesting. */
803 else if (gimple_vdef (stmt))
804 return true;
805
806 /* Anything other than assignments and conditional jumps are not
807 interesting for CCP. */
808 if (gimple_code (stmt) != GIMPLE_ASSIGN
809 && gimple_code (stmt) != GIMPLE_COND
810 && gimple_code (stmt) != GIMPLE_SWITCH
811 && gimple_code (stmt) != GIMPLE_CALL)
812 return true;
813
814 return false;
815 }
816
817 /* Initialize local data structures for CCP. */
818
819 static void
820 ccp_initialize (void)
821 {
822 basic_block bb;
823
824 n_const_val = num_ssa_names;
825 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val);
826
827 /* Initialize simulation flags for PHI nodes and statements. */
828 FOR_EACH_BB_FN (bb, cfun)
829 {
830 gimple_stmt_iterator i;
831
832 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
833 {
834 gimple *stmt = gsi_stmt (i);
835 bool is_varying;
836
837 /* If the statement is a control insn, then we do not
838 want to avoid simulating the statement once. Failure
839 to do so means that those edges will never get added. */
840 if (stmt_ends_bb_p (stmt))
841 is_varying = false;
842 else
843 is_varying = surely_varying_stmt_p (stmt);
844
845 if (is_varying)
846 {
847 tree def;
848 ssa_op_iter iter;
849
850 /* If the statement will not produce a constant, mark
851 all its outputs VARYING. */
852 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
853 set_value_varying (def);
854 }
855 prop_set_simulate_again (stmt, !is_varying);
856 }
857 }
858
859 /* Now process PHI nodes. We never clear the simulate_again flag on
860 phi nodes, since we do not know which edges are executable yet,
861 except for phi nodes for virtual operands when we do not do store ccp. */
862 FOR_EACH_BB_FN (bb, cfun)
863 {
864 gphi_iterator i;
865
866 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
867 {
868 gphi *phi = i.phi ();
869
870 if (virtual_operand_p (gimple_phi_result (phi)))
871 prop_set_simulate_again (phi, false);
872 else
873 prop_set_simulate_again (phi, true);
874 }
875 }
876 }
877
878 /* Debug count support. Reset the values of ssa names
879 VARYING when the total number ssa names analyzed is
880 beyond the debug count specified. */
881
882 static void
883 do_dbg_cnt (void)
884 {
885 unsigned i;
886 for (i = 0; i < num_ssa_names; i++)
887 {
888 if (!dbg_cnt (ccp))
889 {
890 const_val[i].lattice_val = VARYING;
891 const_val[i].mask = -1;
892 const_val[i].value = NULL_TREE;
893 }
894 }
895 }
896
897
898 /* Do final substitution of propagated values, cleanup the flowgraph and
899 free allocated storage.
900
901 Return TRUE when something was optimized. */
902
903 static bool
904 ccp_finalize (void)
905 {
906 bool something_changed;
907 unsigned i;
908
909 do_dbg_cnt ();
910
911 /* Derive alignment and misalignment information from partially
912 constant pointers in the lattice or nonzero bits from partially
913 constant integers. */
914 for (i = 1; i < num_ssa_names; ++i)
915 {
916 tree name = ssa_name (i);
917 ccp_prop_value_t *val;
918 unsigned int tem, align;
919
920 if (!name
921 || (!POINTER_TYPE_P (TREE_TYPE (name))
922 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))
923 /* Don't record nonzero bits before IPA to avoid
924 using too much memory. */
925 || first_pass_instance)))
926 continue;
927
928 val = get_value (name);
929 if (val->lattice_val != CONSTANT
930 || TREE_CODE (val->value) != INTEGER_CST)
931 continue;
932
933 if (POINTER_TYPE_P (TREE_TYPE (name)))
934 {
935 /* Trailing mask bits specify the alignment, trailing value
936 bits the misalignment. */
937 tem = val->mask.to_uhwi ();
938 align = (tem & -tem);
939 if (align > 1)
940 set_ptr_info_alignment (get_ptr_info (name), align,
941 (TREE_INT_CST_LOW (val->value)
942 & (align - 1)));
943 }
944 else
945 {
946 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value));
947 wide_int nonzero_bits = wide_int::from (val->mask, precision,
948 UNSIGNED) | val->value;
949 nonzero_bits &= get_nonzero_bits (name);
950 set_nonzero_bits (name, nonzero_bits);
951 }
952 }
953
954 /* Perform substitutions based on the known constant values. */
955 something_changed = substitute_and_fold (get_constant_value,
956 ccp_fold_stmt, true);
957
958 free (const_val);
959 const_val = NULL;
960 return something_changed;;
961 }
962
963
964 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
965 in VAL1.
966
967 any M UNDEFINED = any
968 any M VARYING = VARYING
969 Ci M Cj = Ci if (i == j)
970 Ci M Cj = VARYING if (i != j)
971 */
972
973 static void
974 ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
975 {
976 if (val1->lattice_val == UNDEFINED
977 /* For UNDEFINED M SSA we can't always SSA because its definition
978 may not dominate the PHI node. Doing optimistic copy propagation
979 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
980 && (val2->lattice_val != CONSTANT
981 || TREE_CODE (val2->value) != SSA_NAME))
982 {
983 /* UNDEFINED M any = any */
984 *val1 = *val2;
985 }
986 else if (val2->lattice_val == UNDEFINED
987 /* See above. */
988 && (val1->lattice_val != CONSTANT
989 || TREE_CODE (val1->value) != SSA_NAME))
990 {
991 /* any M UNDEFINED = any
992 Nothing to do. VAL1 already contains the value we want. */
993 ;
994 }
995 else if (val1->lattice_val == VARYING
996 || val2->lattice_val == VARYING)
997 {
998 /* any M VARYING = VARYING. */
999 val1->lattice_val = VARYING;
1000 val1->mask = -1;
1001 val1->value = NULL_TREE;
1002 }
1003 else if (val1->lattice_val == CONSTANT
1004 && val2->lattice_val == CONSTANT
1005 && TREE_CODE (val1->value) == INTEGER_CST
1006 && TREE_CODE (val2->value) == INTEGER_CST)
1007 {
1008 /* Ci M Cj = Ci if (i == j)
1009 Ci M Cj = VARYING if (i != j)
1010
1011 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1012 drop to varying. */
1013 val1->mask = (val1->mask | val2->mask
1014 | (wi::to_widest (val1->value)
1015 ^ wi::to_widest (val2->value)));
1016 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))) == -1)
1017 {
1018 val1->lattice_val = VARYING;
1019 val1->value = NULL_TREE;
1020 }
1021 }
1022 else if (val1->lattice_val == CONSTANT
1023 && val2->lattice_val == CONSTANT
1024 && operand_equal_p (val1->value, val2->value, 0))
1025 {
1026 /* Ci M Cj = Ci if (i == j)
1027 Ci M Cj = VARYING if (i != j)
1028
1029 VAL1 already contains the value we want for equivalent values. */
1030 }
1031 else if (val1->lattice_val == CONSTANT
1032 && val2->lattice_val == CONSTANT
1033 && (TREE_CODE (val1->value) == ADDR_EXPR
1034 || TREE_CODE (val2->value) == ADDR_EXPR))
1035 {
1036 /* When not equal addresses are involved try meeting for
1037 alignment. */
1038 ccp_prop_value_t tem = *val2;
1039 if (TREE_CODE (val1->value) == ADDR_EXPR)
1040 *val1 = get_value_for_expr (val1->value, true);
1041 if (TREE_CODE (val2->value) == ADDR_EXPR)
1042 tem = get_value_for_expr (val2->value, true);
1043 ccp_lattice_meet (val1, &tem);
1044 }
1045 else
1046 {
1047 /* Any other combination is VARYING. */
1048 val1->lattice_val = VARYING;
1049 val1->mask = -1;
1050 val1->value = NULL_TREE;
1051 }
1052 }
1053
1054
1055 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
1056 lattice values to determine PHI_NODE's lattice value. The value of a
1057 PHI node is determined calling ccp_lattice_meet with all the arguments
1058 of the PHI node that are incoming via executable edges. */
1059
1060 static enum ssa_prop_result
1061 ccp_visit_phi_node (gphi *phi)
1062 {
1063 unsigned i;
1064 ccp_prop_value_t new_val;
1065
1066 if (dump_file && (dump_flags & TDF_DETAILS))
1067 {
1068 fprintf (dump_file, "\nVisiting PHI node: ");
1069 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1070 }
1071
1072 new_val.lattice_val = UNDEFINED;
1073 new_val.value = NULL_TREE;
1074 new_val.mask = 0;
1075
1076 bool first = true;
1077 bool non_exec_edge = false;
1078 for (i = 0; i < gimple_phi_num_args (phi); i++)
1079 {
1080 /* Compute the meet operator over all the PHI arguments flowing
1081 through executable edges. */
1082 edge e = gimple_phi_arg_edge (phi, i);
1083
1084 if (dump_file && (dump_flags & TDF_DETAILS))
1085 {
1086 fprintf (dump_file,
1087 "\n Argument #%d (%d -> %d %sexecutable)\n",
1088 i, e->src->index, e->dest->index,
1089 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1090 }
1091
1092 /* If the incoming edge is executable, Compute the meet operator for
1093 the existing value of the PHI node and the current PHI argument. */
1094 if (e->flags & EDGE_EXECUTABLE)
1095 {
1096 tree arg = gimple_phi_arg (phi, i)->def;
1097 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1098
1099 if (first)
1100 {
1101 new_val = arg_val;
1102 first = false;
1103 }
1104 else
1105 ccp_lattice_meet (&new_val, &arg_val);
1106
1107 if (dump_file && (dump_flags & TDF_DETAILS))
1108 {
1109 fprintf (dump_file, "\t");
1110 print_generic_expr (dump_file, arg, dump_flags);
1111 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1112 fprintf (dump_file, "\n");
1113 }
1114
1115 if (new_val.lattice_val == VARYING)
1116 break;
1117 }
1118 else
1119 non_exec_edge = true;
1120 }
1121
1122 /* In case there were non-executable edges and the value is a copy
1123 make sure its definition dominates the PHI node. */
1124 if (non_exec_edge
1125 && new_val.lattice_val == CONSTANT
1126 && TREE_CODE (new_val.value) == SSA_NAME
1127 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)
1128 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1129 gimple_bb (SSA_NAME_DEF_STMT (new_val.value))))
1130 {
1131 new_val.lattice_val = VARYING;
1132 new_val.value = NULL_TREE;
1133 new_val.mask = -1;
1134 }
1135
1136 if (dump_file && (dump_flags & TDF_DETAILS))
1137 {
1138 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1139 fprintf (dump_file, "\n\n");
1140 }
1141
1142 /* Make the transition to the new value. */
1143 if (set_lattice_value (gimple_phi_result (phi), &new_val))
1144 {
1145 if (new_val.lattice_val == VARYING)
1146 return SSA_PROP_VARYING;
1147 else
1148 return SSA_PROP_INTERESTING;
1149 }
1150 else
1151 return SSA_PROP_NOT_INTERESTING;
1152 }
1153
1154 /* Return the constant value for OP or OP otherwise. */
1155
1156 static tree
1157 valueize_op (tree op)
1158 {
1159 if (TREE_CODE (op) == SSA_NAME)
1160 {
1161 tree tem = get_constant_value (op);
1162 if (tem)
1163 return tem;
1164 }
1165 return op;
1166 }
1167
1168 /* Return the constant value for OP, but signal to not follow SSA
1169 edges if the definition may be simulated again. */
1170
1171 static tree
1172 valueize_op_1 (tree op)
1173 {
1174 if (TREE_CODE (op) == SSA_NAME)
1175 {
1176 /* If the definition may be simulated again we cannot follow
1177 this SSA edge as the SSA propagator does not necessarily
1178 re-visit the use. */
1179 gimple *def_stmt = SSA_NAME_DEF_STMT (op);
1180 if (!gimple_nop_p (def_stmt)
1181 && prop_simulate_again_p (def_stmt))
1182 return NULL_TREE;
1183 tree tem = get_constant_value (op);
1184 if (tem)
1185 return tem;
1186 }
1187 return op;
1188 }
1189
1190 /* CCP specific front-end to the non-destructive constant folding
1191 routines.
1192
1193 Attempt to simplify the RHS of STMT knowing that one or more
1194 operands are constants.
1195
1196 If simplification is possible, return the simplified RHS,
1197 otherwise return the original RHS or NULL_TREE. */
1198
1199 static tree
1200 ccp_fold (gimple *stmt)
1201 {
1202 location_t loc = gimple_location (stmt);
1203 switch (gimple_code (stmt))
1204 {
1205 case GIMPLE_COND:
1206 {
1207 /* Handle comparison operators that can appear in GIMPLE form. */
1208 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1209 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1210 enum tree_code code = gimple_cond_code (stmt);
1211 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1212 }
1213
1214 case GIMPLE_SWITCH:
1215 {
1216 /* Return the constant switch index. */
1217 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1218 }
1219
1220 case GIMPLE_ASSIGN:
1221 case GIMPLE_CALL:
1222 return gimple_fold_stmt_to_constant_1 (stmt,
1223 valueize_op, valueize_op_1);
1224
1225 default:
1226 gcc_unreachable ();
1227 }
1228 }
1229
1230 /* Apply the operation CODE in type TYPE to the value, mask pair
1231 RVAL and RMASK representing a value of type RTYPE and set
1232 the value, mask pair *VAL and *MASK to the result. */
1233
1234 static void
1235 bit_value_unop_1 (enum tree_code code, tree type,
1236 widest_int *val, widest_int *mask,
1237 tree rtype, const widest_int &rval, const widest_int &rmask)
1238 {
1239 switch (code)
1240 {
1241 case BIT_NOT_EXPR:
1242 *mask = rmask;
1243 *val = ~rval;
1244 break;
1245
1246 case NEGATE_EXPR:
1247 {
1248 widest_int temv, temm;
1249 /* Return ~rval + 1. */
1250 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1251 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1252 type, temv, temm, type, 1, 0);
1253 break;
1254 }
1255
1256 CASE_CONVERT:
1257 {
1258 signop sgn;
1259
1260 /* First extend mask and value according to the original type. */
1261 sgn = TYPE_SIGN (rtype);
1262 *mask = wi::ext (rmask, TYPE_PRECISION (rtype), sgn);
1263 *val = wi::ext (rval, TYPE_PRECISION (rtype), sgn);
1264
1265 /* Then extend mask and value according to the target type. */
1266 sgn = TYPE_SIGN (type);
1267 *mask = wi::ext (*mask, TYPE_PRECISION (type), sgn);
1268 *val = wi::ext (*val, TYPE_PRECISION (type), sgn);
1269 break;
1270 }
1271
1272 default:
1273 *mask = -1;
1274 break;
1275 }
1276 }
1277
1278 /* Apply the operation CODE in type TYPE to the value, mask pairs
1279 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1280 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1281
1282 static void
1283 bit_value_binop_1 (enum tree_code code, tree type,
1284 widest_int *val, widest_int *mask,
1285 tree r1type, const widest_int &r1val,
1286 const widest_int &r1mask, tree r2type,
1287 const widest_int &r2val, const widest_int &r2mask)
1288 {
1289 signop sgn = TYPE_SIGN (type);
1290 int width = TYPE_PRECISION (type);
1291 bool swap_p = false;
1292
1293 /* Assume we'll get a constant result. Use an initial non varying
1294 value, we fall back to varying in the end if necessary. */
1295 *mask = -1;
1296
1297 switch (code)
1298 {
1299 case BIT_AND_EXPR:
1300 /* The mask is constant where there is a known not
1301 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1302 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1303 *val = r1val & r2val;
1304 break;
1305
1306 case BIT_IOR_EXPR:
1307 /* The mask is constant where there is a known
1308 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1309 *mask = (r1mask | r2mask)
1310 .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
1311 *val = r1val | r2val;
1312 break;
1313
1314 case BIT_XOR_EXPR:
1315 /* m1 | m2 */
1316 *mask = r1mask | r2mask;
1317 *val = r1val ^ r2val;
1318 break;
1319
1320 case LROTATE_EXPR:
1321 case RROTATE_EXPR:
1322 if (r2mask == 0)
1323 {
1324 widest_int shift = r2val;
1325 if (shift == 0)
1326 {
1327 *mask = r1mask;
1328 *val = r1val;
1329 }
1330 else
1331 {
1332 if (wi::neg_p (shift))
1333 {
1334 shift = -shift;
1335 if (code == RROTATE_EXPR)
1336 code = LROTATE_EXPR;
1337 else
1338 code = RROTATE_EXPR;
1339 }
1340 if (code == RROTATE_EXPR)
1341 {
1342 *mask = wi::rrotate (r1mask, shift, width);
1343 *val = wi::rrotate (r1val, shift, width);
1344 }
1345 else
1346 {
1347 *mask = wi::lrotate (r1mask, shift, width);
1348 *val = wi::lrotate (r1val, shift, width);
1349 }
1350 }
1351 }
1352 break;
1353
1354 case LSHIFT_EXPR:
1355 case RSHIFT_EXPR:
1356 /* ??? We can handle partially known shift counts if we know
1357 its sign. That way we can tell that (x << (y | 8)) & 255
1358 is zero. */
1359 if (r2mask == 0)
1360 {
1361 widest_int shift = r2val;
1362 if (shift == 0)
1363 {
1364 *mask = r1mask;
1365 *val = r1val;
1366 }
1367 else
1368 {
1369 if (wi::neg_p (shift))
1370 {
1371 shift = -shift;
1372 if (code == RSHIFT_EXPR)
1373 code = LSHIFT_EXPR;
1374 else
1375 code = RSHIFT_EXPR;
1376 }
1377 if (code == RSHIFT_EXPR)
1378 {
1379 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1380 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1381 }
1382 else
1383 {
1384 *mask = wi::ext (wi::lshift (r1mask, shift), width, sgn);
1385 *val = wi::ext (wi::lshift (r1val, shift), width, sgn);
1386 }
1387 }
1388 }
1389 break;
1390
1391 case PLUS_EXPR:
1392 case POINTER_PLUS_EXPR:
1393 {
1394 /* Do the addition with unknown bits set to zero, to give carry-ins of
1395 zero wherever possible. */
1396 widest_int lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
1397 lo = wi::ext (lo, width, sgn);
1398 /* Do the addition with unknown bits set to one, to give carry-ins of
1399 one wherever possible. */
1400 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1401 hi = wi::ext (hi, width, sgn);
1402 /* Each bit in the result is known if (a) the corresponding bits in
1403 both inputs are known, and (b) the carry-in to that bit position
1404 is known. We can check condition (b) by seeing if we got the same
1405 result with minimised carries as with maximised carries. */
1406 *mask = r1mask | r2mask | (lo ^ hi);
1407 *mask = wi::ext (*mask, width, sgn);
1408 /* It shouldn't matter whether we choose lo or hi here. */
1409 *val = lo;
1410 break;
1411 }
1412
1413 case MINUS_EXPR:
1414 {
1415 widest_int temv, temm;
1416 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1417 r2type, r2val, r2mask);
1418 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1419 r1type, r1val, r1mask,
1420 r2type, temv, temm);
1421 break;
1422 }
1423
1424 case MULT_EXPR:
1425 {
1426 /* Just track trailing zeros in both operands and transfer
1427 them to the other. */
1428 int r1tz = wi::ctz (r1val | r1mask);
1429 int r2tz = wi::ctz (r2val | r2mask);
1430 if (r1tz + r2tz >= width)
1431 {
1432 *mask = 0;
1433 *val = 0;
1434 }
1435 else if (r1tz + r2tz > 0)
1436 {
1437 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1438 width, sgn);
1439 *val = 0;
1440 }
1441 break;
1442 }
1443
1444 case EQ_EXPR:
1445 case NE_EXPR:
1446 {
1447 widest_int m = r1mask | r2mask;
1448 if (r1val.and_not (m) != r2val.and_not (m))
1449 {
1450 *mask = 0;
1451 *val = ((code == EQ_EXPR) ? 0 : 1);
1452 }
1453 else
1454 {
1455 /* We know the result of a comparison is always one or zero. */
1456 *mask = 1;
1457 *val = 0;
1458 }
1459 break;
1460 }
1461
1462 case GE_EXPR:
1463 case GT_EXPR:
1464 swap_p = true;
1465 code = swap_tree_comparison (code);
1466 /* Fall through. */
1467 case LT_EXPR:
1468 case LE_EXPR:
1469 {
1470 int minmax, maxmin;
1471
1472 const widest_int &o1val = swap_p ? r2val : r1val;
1473 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1474 const widest_int &o2val = swap_p ? r1val : r2val;
1475 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1476
1477 /* If the most significant bits are not known we know nothing. */
1478 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1479 break;
1480
1481 /* For comparisons the signedness is in the comparison operands. */
1482 sgn = TYPE_SIGN (r1type);
1483
1484 /* If we know the most significant bits we know the values
1485 value ranges by means of treating varying bits as zero
1486 or one. Do a cross comparison of the max/min pairs. */
1487 maxmin = wi::cmp (o1val | o1mask, o2val.and_not (o2mask), sgn);
1488 minmax = wi::cmp (o1val.and_not (o1mask), o2val | o2mask, sgn);
1489 if (maxmin < 0) /* o1 is less than o2. */
1490 {
1491 *mask = 0;
1492 *val = 1;
1493 }
1494 else if (minmax > 0) /* o1 is not less or equal to o2. */
1495 {
1496 *mask = 0;
1497 *val = 0;
1498 }
1499 else if (maxmin == minmax) /* o1 and o2 are equal. */
1500 {
1501 /* This probably should never happen as we'd have
1502 folded the thing during fully constant value folding. */
1503 *mask = 0;
1504 *val = (code == LE_EXPR ? 1 : 0);
1505 }
1506 else
1507 {
1508 /* We know the result of a comparison is always one or zero. */
1509 *mask = 1;
1510 *val = 0;
1511 }
1512 break;
1513 }
1514
1515 default:;
1516 }
1517 }
1518
1519 /* Return the propagation value when applying the operation CODE to
1520 the value RHS yielding type TYPE. */
1521
1522 static ccp_prop_value_t
1523 bit_value_unop (enum tree_code code, tree type, tree rhs)
1524 {
1525 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1526 widest_int value, mask;
1527 ccp_prop_value_t val;
1528
1529 if (rval.lattice_val == UNDEFINED)
1530 return rval;
1531
1532 gcc_assert ((rval.lattice_val == CONSTANT
1533 && TREE_CODE (rval.value) == INTEGER_CST)
1534 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1);
1535 bit_value_unop_1 (code, type, &value, &mask,
1536 TREE_TYPE (rhs), value_to_wide_int (rval), rval.mask);
1537 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1538 {
1539 val.lattice_val = CONSTANT;
1540 val.mask = mask;
1541 /* ??? Delay building trees here. */
1542 val.value = wide_int_to_tree (type, value);
1543 }
1544 else
1545 {
1546 val.lattice_val = VARYING;
1547 val.value = NULL_TREE;
1548 val.mask = -1;
1549 }
1550 return val;
1551 }
1552
1553 /* Return the propagation value when applying the operation CODE to
1554 the values RHS1 and RHS2 yielding type TYPE. */
1555
1556 static ccp_prop_value_t
1557 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1558 {
1559 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1560 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1561 widest_int value, mask;
1562 ccp_prop_value_t val;
1563
1564 if (r1val.lattice_val == UNDEFINED
1565 || r2val.lattice_val == UNDEFINED)
1566 {
1567 val.lattice_val = VARYING;
1568 val.value = NULL_TREE;
1569 val.mask = -1;
1570 return val;
1571 }
1572
1573 gcc_assert ((r1val.lattice_val == CONSTANT
1574 && TREE_CODE (r1val.value) == INTEGER_CST)
1575 || wi::sext (r1val.mask,
1576 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1);
1577 gcc_assert ((r2val.lattice_val == CONSTANT
1578 && TREE_CODE (r2val.value) == INTEGER_CST)
1579 || wi::sext (r2val.mask,
1580 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1);
1581 bit_value_binop_1 (code, type, &value, &mask,
1582 TREE_TYPE (rhs1), value_to_wide_int (r1val), r1val.mask,
1583 TREE_TYPE (rhs2), value_to_wide_int (r2val), r2val.mask);
1584 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1585 {
1586 val.lattice_val = CONSTANT;
1587 val.mask = mask;
1588 /* ??? Delay building trees here. */
1589 val.value = wide_int_to_tree (type, value);
1590 }
1591 else
1592 {
1593 val.lattice_val = VARYING;
1594 val.value = NULL_TREE;
1595 val.mask = -1;
1596 }
1597 return val;
1598 }
1599
1600 /* Return the propagation value for __builtin_assume_aligned
1601 and functions with assume_aligned or alloc_aligned attribute.
1602 For __builtin_assume_aligned, ATTR is NULL_TREE,
1603 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1604 is false, for alloc_aligned attribute ATTR is non-NULL and
1605 ALLOC_ALIGNED is true. */
1606
1607 static ccp_prop_value_t
1608 bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
1609 bool alloc_aligned)
1610 {
1611 tree align, misalign = NULL_TREE, type;
1612 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1613 ccp_prop_value_t alignval;
1614 widest_int value, mask;
1615 ccp_prop_value_t val;
1616
1617 if (attr == NULL_TREE)
1618 {
1619 tree ptr = gimple_call_arg (stmt, 0);
1620 type = TREE_TYPE (ptr);
1621 ptrval = get_value_for_expr (ptr, true);
1622 }
1623 else
1624 {
1625 tree lhs = gimple_call_lhs (stmt);
1626 type = TREE_TYPE (lhs);
1627 }
1628
1629 if (ptrval.lattice_val == UNDEFINED)
1630 return ptrval;
1631 gcc_assert ((ptrval.lattice_val == CONSTANT
1632 && TREE_CODE (ptrval.value) == INTEGER_CST)
1633 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1);
1634 if (attr == NULL_TREE)
1635 {
1636 /* Get aligni and misaligni from __builtin_assume_aligned. */
1637 align = gimple_call_arg (stmt, 1);
1638 if (!tree_fits_uhwi_p (align))
1639 return ptrval;
1640 aligni = tree_to_uhwi (align);
1641 if (gimple_call_num_args (stmt) > 2)
1642 {
1643 misalign = gimple_call_arg (stmt, 2);
1644 if (!tree_fits_uhwi_p (misalign))
1645 return ptrval;
1646 misaligni = tree_to_uhwi (misalign);
1647 }
1648 }
1649 else
1650 {
1651 /* Get aligni and misaligni from assume_aligned or
1652 alloc_align attributes. */
1653 if (TREE_VALUE (attr) == NULL_TREE)
1654 return ptrval;
1655 attr = TREE_VALUE (attr);
1656 align = TREE_VALUE (attr);
1657 if (!tree_fits_uhwi_p (align))
1658 return ptrval;
1659 aligni = tree_to_uhwi (align);
1660 if (alloc_aligned)
1661 {
1662 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1663 return ptrval;
1664 align = gimple_call_arg (stmt, aligni - 1);
1665 if (!tree_fits_uhwi_p (align))
1666 return ptrval;
1667 aligni = tree_to_uhwi (align);
1668 }
1669 else if (TREE_CHAIN (attr) && TREE_VALUE (TREE_CHAIN (attr)))
1670 {
1671 misalign = TREE_VALUE (TREE_CHAIN (attr));
1672 if (!tree_fits_uhwi_p (misalign))
1673 return ptrval;
1674 misaligni = tree_to_uhwi (misalign);
1675 }
1676 }
1677 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1678 return ptrval;
1679
1680 align = build_int_cst_type (type, -aligni);
1681 alignval = get_value_for_expr (align, true);
1682 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1683 type, value_to_wide_int (ptrval), ptrval.mask,
1684 type, value_to_wide_int (alignval), alignval.mask);
1685 if (wi::sext (mask, TYPE_PRECISION (type)) != -1)
1686 {
1687 val.lattice_val = CONSTANT;
1688 val.mask = mask;
1689 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0);
1690 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0);
1691 value |= misaligni;
1692 /* ??? Delay building trees here. */
1693 val.value = wide_int_to_tree (type, value);
1694 }
1695 else
1696 {
1697 val.lattice_val = VARYING;
1698 val.value = NULL_TREE;
1699 val.mask = -1;
1700 }
1701 return val;
1702 }
1703
1704 /* Evaluate statement STMT.
1705 Valid only for assignments, calls, conditionals, and switches. */
1706
1707 static ccp_prop_value_t
1708 evaluate_stmt (gimple *stmt)
1709 {
1710 ccp_prop_value_t val;
1711 tree simplified = NULL_TREE;
1712 ccp_lattice_t likelyvalue = likely_value (stmt);
1713 bool is_constant = false;
1714 unsigned int align;
1715
1716 if (dump_file && (dump_flags & TDF_DETAILS))
1717 {
1718 fprintf (dump_file, "which is likely ");
1719 switch (likelyvalue)
1720 {
1721 case CONSTANT:
1722 fprintf (dump_file, "CONSTANT");
1723 break;
1724 case UNDEFINED:
1725 fprintf (dump_file, "UNDEFINED");
1726 break;
1727 case VARYING:
1728 fprintf (dump_file, "VARYING");
1729 break;
1730 default:;
1731 }
1732 fprintf (dump_file, "\n");
1733 }
1734
1735 /* If the statement is likely to have a CONSTANT result, then try
1736 to fold the statement to determine the constant value. */
1737 /* FIXME. This is the only place that we call ccp_fold.
1738 Since likely_value never returns CONSTANT for calls, we will
1739 not attempt to fold them, including builtins that may profit. */
1740 if (likelyvalue == CONSTANT)
1741 {
1742 fold_defer_overflow_warnings ();
1743 simplified = ccp_fold (stmt);
1744 if (simplified && TREE_CODE (simplified) == SSA_NAME)
1745 {
1746 val = *get_value (simplified);
1747 if (val.lattice_val != VARYING)
1748 {
1749 fold_undefer_overflow_warnings (true, stmt, 0);
1750 return val;
1751 }
1752 }
1753 is_constant = simplified && is_gimple_min_invariant (simplified);
1754 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1755 if (is_constant)
1756 {
1757 /* The statement produced a constant value. */
1758 val.lattice_val = CONSTANT;
1759 val.value = simplified;
1760 val.mask = 0;
1761 return val;
1762 }
1763 }
1764 /* If the statement is likely to have a VARYING result, then do not
1765 bother folding the statement. */
1766 else if (likelyvalue == VARYING)
1767 {
1768 enum gimple_code code = gimple_code (stmt);
1769 if (code == GIMPLE_ASSIGN)
1770 {
1771 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1772
1773 /* Other cases cannot satisfy is_gimple_min_invariant
1774 without folding. */
1775 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1776 simplified = gimple_assign_rhs1 (stmt);
1777 }
1778 else if (code == GIMPLE_SWITCH)
1779 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
1780 else
1781 /* These cannot satisfy is_gimple_min_invariant without folding. */
1782 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1783 is_constant = simplified && is_gimple_min_invariant (simplified);
1784 if (is_constant)
1785 {
1786 /* The statement produced a constant value. */
1787 val.lattice_val = CONSTANT;
1788 val.value = simplified;
1789 val.mask = 0;
1790 }
1791 }
1792 /* If the statement result is likely UNDEFINED, make it so. */
1793 else if (likelyvalue == UNDEFINED)
1794 {
1795 val.lattice_val = UNDEFINED;
1796 val.value = NULL_TREE;
1797 val.mask = 0;
1798 return val;
1799 }
1800
1801 /* Resort to simplification for bitwise tracking. */
1802 if (flag_tree_bit_ccp
1803 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1804 || (gimple_assign_single_p (stmt)
1805 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
1806 && !is_constant)
1807 {
1808 enum gimple_code code = gimple_code (stmt);
1809 val.lattice_val = VARYING;
1810 val.value = NULL_TREE;
1811 val.mask = -1;
1812 if (code == GIMPLE_ASSIGN)
1813 {
1814 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1815 tree rhs1 = gimple_assign_rhs1 (stmt);
1816 tree lhs = gimple_assign_lhs (stmt);
1817 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1818 || POINTER_TYPE_P (TREE_TYPE (lhs)))
1819 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1820 || POINTER_TYPE_P (TREE_TYPE (rhs1))))
1821 switch (get_gimple_rhs_class (subcode))
1822 {
1823 case GIMPLE_SINGLE_RHS:
1824 val = get_value_for_expr (rhs1, true);
1825 break;
1826
1827 case GIMPLE_UNARY_RHS:
1828 val = bit_value_unop (subcode, TREE_TYPE (lhs), rhs1);
1829 break;
1830
1831 case GIMPLE_BINARY_RHS:
1832 val = bit_value_binop (subcode, TREE_TYPE (lhs), rhs1,
1833 gimple_assign_rhs2 (stmt));
1834 break;
1835
1836 default:;
1837 }
1838 }
1839 else if (code == GIMPLE_COND)
1840 {
1841 enum tree_code code = gimple_cond_code (stmt);
1842 tree rhs1 = gimple_cond_lhs (stmt);
1843 tree rhs2 = gimple_cond_rhs (stmt);
1844 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1845 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1846 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1847 }
1848 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1849 {
1850 tree fndecl = gimple_call_fndecl (stmt);
1851 switch (DECL_FUNCTION_CODE (fndecl))
1852 {
1853 case BUILT_IN_MALLOC:
1854 case BUILT_IN_REALLOC:
1855 case BUILT_IN_CALLOC:
1856 case BUILT_IN_STRDUP:
1857 case BUILT_IN_STRNDUP:
1858 val.lattice_val = CONSTANT;
1859 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1860 val.mask = ~((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT
1861 / BITS_PER_UNIT - 1);
1862 break;
1863
1864 case BUILT_IN_ALLOCA:
1865 case BUILT_IN_ALLOCA_WITH_ALIGN:
1866 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1867 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1868 : BIGGEST_ALIGNMENT);
1869 val.lattice_val = CONSTANT;
1870 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1871 val.mask = ~((HOST_WIDE_INT) align / BITS_PER_UNIT - 1);
1872 break;
1873
1874 /* These builtins return their first argument, unmodified. */
1875 case BUILT_IN_MEMCPY:
1876 case BUILT_IN_MEMMOVE:
1877 case BUILT_IN_MEMSET:
1878 case BUILT_IN_STRCPY:
1879 case BUILT_IN_STRNCPY:
1880 case BUILT_IN_MEMCPY_CHK:
1881 case BUILT_IN_MEMMOVE_CHK:
1882 case BUILT_IN_MEMSET_CHK:
1883 case BUILT_IN_STRCPY_CHK:
1884 case BUILT_IN_STRNCPY_CHK:
1885 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1886 break;
1887
1888 case BUILT_IN_ASSUME_ALIGNED:
1889 val = bit_value_assume_aligned (stmt, NULL_TREE, val, false);
1890 break;
1891
1892 case BUILT_IN_ALIGNED_ALLOC:
1893 {
1894 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1895 if (align
1896 && tree_fits_uhwi_p (align))
1897 {
1898 unsigned HOST_WIDE_INT aligni = tree_to_uhwi (align);
1899 if (aligni > 1
1900 /* align must be power-of-two */
1901 && (aligni & (aligni - 1)) == 0)
1902 {
1903 val.lattice_val = CONSTANT;
1904 val.value = build_int_cst (ptr_type_node, 0);
1905 val.mask = -aligni;
1906 }
1907 }
1908 break;
1909 }
1910
1911 default:;
1912 }
1913 }
1914 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
1915 {
1916 tree fntype = gimple_call_fntype (stmt);
1917 if (fntype)
1918 {
1919 tree attrs = lookup_attribute ("assume_aligned",
1920 TYPE_ATTRIBUTES (fntype));
1921 if (attrs)
1922 val = bit_value_assume_aligned (stmt, attrs, val, false);
1923 attrs = lookup_attribute ("alloc_align",
1924 TYPE_ATTRIBUTES (fntype));
1925 if (attrs)
1926 val = bit_value_assume_aligned (stmt, attrs, val, true);
1927 }
1928 }
1929 is_constant = (val.lattice_val == CONSTANT);
1930 }
1931
1932 if (flag_tree_bit_ccp
1933 && ((is_constant && TREE_CODE (val.value) == INTEGER_CST)
1934 || !is_constant)
1935 && gimple_get_lhs (stmt)
1936 && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME)
1937 {
1938 tree lhs = gimple_get_lhs (stmt);
1939 wide_int nonzero_bits = get_nonzero_bits (lhs);
1940 if (nonzero_bits != -1)
1941 {
1942 if (!is_constant)
1943 {
1944 val.lattice_val = CONSTANT;
1945 val.value = build_zero_cst (TREE_TYPE (lhs));
1946 val.mask = extend_mask (nonzero_bits);
1947 is_constant = true;
1948 }
1949 else
1950 {
1951 if (wi::bit_and_not (val.value, nonzero_bits) != 0)
1952 val.value = wide_int_to_tree (TREE_TYPE (lhs),
1953 nonzero_bits & val.value);
1954 if (nonzero_bits == 0)
1955 val.mask = 0;
1956 else
1957 val.mask = val.mask & extend_mask (nonzero_bits);
1958 }
1959 }
1960 }
1961
1962 /* The statement produced a nonconstant value. */
1963 if (!is_constant)
1964 {
1965 /* The statement produced a copy. */
1966 if (simplified && TREE_CODE (simplified) == SSA_NAME
1967 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified))
1968 {
1969 val.lattice_val = CONSTANT;
1970 val.value = simplified;
1971 val.mask = -1;
1972 }
1973 /* The statement is VARYING. */
1974 else
1975 {
1976 val.lattice_val = VARYING;
1977 val.value = NULL_TREE;
1978 val.mask = -1;
1979 }
1980 }
1981
1982 return val;
1983 }
1984
1985 typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
1986
1987 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1988 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1989
1990 static void
1991 insert_clobber_before_stack_restore (tree saved_val, tree var,
1992 gimple_htab **visited)
1993 {
1994 gimple *stmt;
1995 gassign *clobber_stmt;
1996 tree clobber;
1997 imm_use_iterator iter;
1998 gimple_stmt_iterator i;
1999 gimple **slot;
2000
2001 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
2002 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2003 {
2004 clobber = build_constructor (TREE_TYPE (var),
2005 NULL);
2006 TREE_THIS_VOLATILE (clobber) = 1;
2007 clobber_stmt = gimple_build_assign (var, clobber);
2008
2009 i = gsi_for_stmt (stmt);
2010 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2011 }
2012 else if (gimple_code (stmt) == GIMPLE_PHI)
2013 {
2014 if (!*visited)
2015 *visited = new gimple_htab (10);
2016
2017 slot = (*visited)->find_slot (stmt, INSERT);
2018 if (*slot != NULL)
2019 continue;
2020
2021 *slot = stmt;
2022 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2023 visited);
2024 }
2025 else if (gimple_assign_ssa_name_copy_p (stmt))
2026 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2027 visited);
2028 else if (chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
2029 continue;
2030 else
2031 gcc_assert (is_gimple_debug (stmt));
2032 }
2033
2034 /* Advance the iterator to the previous non-debug gimple statement in the same
2035 or dominating basic block. */
2036
2037 static inline void
2038 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2039 {
2040 basic_block dom;
2041
2042 gsi_prev_nondebug (i);
2043 while (gsi_end_p (*i))
2044 {
2045 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
2046 if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2047 return;
2048
2049 *i = gsi_last_bb (dom);
2050 }
2051 }
2052
2053 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2054 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2055
2056 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
2057 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
2058 that case the function gives up without inserting the clobbers. */
2059
2060 static void
2061 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2062 {
2063 gimple *stmt;
2064 tree saved_val;
2065 gimple_htab *visited = NULL;
2066
2067 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2068 {
2069 stmt = gsi_stmt (i);
2070
2071 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2072 continue;
2073
2074 saved_val = gimple_call_lhs (stmt);
2075 if (saved_val == NULL_TREE)
2076 continue;
2077
2078 insert_clobber_before_stack_restore (saved_val, var, &visited);
2079 break;
2080 }
2081
2082 delete visited;
2083 }
2084
2085 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
2086 fixed-size array and returns the address, if found, otherwise returns
2087 NULL_TREE. */
2088
2089 static tree
2090 fold_builtin_alloca_with_align (gimple *stmt)
2091 {
2092 unsigned HOST_WIDE_INT size, threshold, n_elem;
2093 tree lhs, arg, block, var, elem_type, array_type;
2094
2095 /* Get lhs. */
2096 lhs = gimple_call_lhs (stmt);
2097 if (lhs == NULL_TREE)
2098 return NULL_TREE;
2099
2100 /* Detect constant argument. */
2101 arg = get_constant_value (gimple_call_arg (stmt, 0));
2102 if (arg == NULL_TREE
2103 || TREE_CODE (arg) != INTEGER_CST
2104 || !tree_fits_uhwi_p (arg))
2105 return NULL_TREE;
2106
2107 size = tree_to_uhwi (arg);
2108
2109 /* Heuristic: don't fold large allocas. */
2110 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
2111 /* In case the alloca is located at function entry, it has the same lifetime
2112 as a declared array, so we allow a larger size. */
2113 block = gimple_block (stmt);
2114 if (!(cfun->after_inlining
2115 && block
2116 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
2117 threshold /= 10;
2118 if (size > threshold)
2119 return NULL_TREE;
2120
2121 /* Declare array. */
2122 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
2123 n_elem = size * 8 / BITS_PER_UNIT;
2124 array_type = build_array_type_nelts (elem_type, n_elem);
2125 var = create_tmp_var (array_type);
2126 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
2127 {
2128 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
2129 if (pi != NULL && !pi->pt.anything)
2130 {
2131 bool singleton_p;
2132 unsigned uid;
2133 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
2134 gcc_assert (singleton_p);
2135 SET_DECL_PT_UID (var, uid);
2136 }
2137 }
2138
2139 /* Fold alloca to the address of the array. */
2140 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
2141 }
2142
2143 /* Fold the stmt at *GSI with CCP specific information that propagating
2144 and regular folding does not catch. */
2145
2146 static bool
2147 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2148 {
2149 gimple *stmt = gsi_stmt (*gsi);
2150
2151 switch (gimple_code (stmt))
2152 {
2153 case GIMPLE_COND:
2154 {
2155 gcond *cond_stmt = as_a <gcond *> (stmt);
2156 ccp_prop_value_t val;
2157 /* Statement evaluation will handle type mismatches in constants
2158 more gracefully than the final propagation. This allows us to
2159 fold more conditionals here. */
2160 val = evaluate_stmt (stmt);
2161 if (val.lattice_val != CONSTANT
2162 || val.mask != 0)
2163 return false;
2164
2165 if (dump_file)
2166 {
2167 fprintf (dump_file, "Folding predicate ");
2168 print_gimple_expr (dump_file, stmt, 0, 0);
2169 fprintf (dump_file, " to ");
2170 print_generic_expr (dump_file, val.value, 0);
2171 fprintf (dump_file, "\n");
2172 }
2173
2174 if (integer_zerop (val.value))
2175 gimple_cond_make_false (cond_stmt);
2176 else
2177 gimple_cond_make_true (cond_stmt);
2178
2179 return true;
2180 }
2181
2182 case GIMPLE_CALL:
2183 {
2184 tree lhs = gimple_call_lhs (stmt);
2185 int flags = gimple_call_flags (stmt);
2186 tree val;
2187 tree argt;
2188 bool changed = false;
2189 unsigned i;
2190
2191 /* If the call was folded into a constant make sure it goes
2192 away even if we cannot propagate into all uses because of
2193 type issues. */
2194 if (lhs
2195 && TREE_CODE (lhs) == SSA_NAME
2196 && (val = get_constant_value (lhs))
2197 /* Don't optimize away calls that have side-effects. */
2198 && (flags & (ECF_CONST|ECF_PURE)) != 0
2199 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
2200 {
2201 tree new_rhs = unshare_expr (val);
2202 bool res;
2203 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2204 TREE_TYPE (new_rhs)))
2205 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2206 res = update_call_from_tree (gsi, new_rhs);
2207 gcc_assert (res);
2208 return true;
2209 }
2210
2211 /* Internal calls provide no argument types, so the extra laxity
2212 for normal calls does not apply. */
2213 if (gimple_call_internal_p (stmt))
2214 return false;
2215
2216 /* The heuristic of fold_builtin_alloca_with_align differs before and
2217 after inlining, so we don't require the arg to be changed into a
2218 constant for folding, but just to be constant. */
2219 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
2220 {
2221 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2222 if (new_rhs)
2223 {
2224 bool res = update_call_from_tree (gsi, new_rhs);
2225 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
2226 gcc_assert (res);
2227 insert_clobbers_for_var (*gsi, var);
2228 return true;
2229 }
2230 }
2231
2232 /* Propagate into the call arguments. Compared to replace_uses_in
2233 this can use the argument slot types for type verification
2234 instead of the current argument type. We also can safely
2235 drop qualifiers here as we are dealing with constants anyway. */
2236 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
2237 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2238 ++i, argt = TREE_CHAIN (argt))
2239 {
2240 tree arg = gimple_call_arg (stmt, i);
2241 if (TREE_CODE (arg) == SSA_NAME
2242 && (val = get_constant_value (arg))
2243 && useless_type_conversion_p
2244 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2245 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2246 {
2247 gimple_call_set_arg (stmt, i, unshare_expr (val));
2248 changed = true;
2249 }
2250 }
2251
2252 return changed;
2253 }
2254
2255 case GIMPLE_ASSIGN:
2256 {
2257 tree lhs = gimple_assign_lhs (stmt);
2258 tree val;
2259
2260 /* If we have a load that turned out to be constant replace it
2261 as we cannot propagate into all uses in all cases. */
2262 if (gimple_assign_single_p (stmt)
2263 && TREE_CODE (lhs) == SSA_NAME
2264 && (val = get_constant_value (lhs)))
2265 {
2266 tree rhs = unshare_expr (val);
2267 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2268 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2269 gimple_assign_set_rhs_from_tree (gsi, rhs);
2270 return true;
2271 }
2272
2273 return false;
2274 }
2275
2276 default:
2277 return false;
2278 }
2279 }
2280
2281 /* Visit the assignment statement STMT. Set the value of its LHS to the
2282 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2283 creates virtual definitions, set the value of each new name to that
2284 of the RHS (if we can derive a constant out of the RHS).
2285 Value-returning call statements also perform an assignment, and
2286 are handled here. */
2287
2288 static enum ssa_prop_result
2289 visit_assignment (gimple *stmt, tree *output_p)
2290 {
2291 ccp_prop_value_t val;
2292 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
2293
2294 tree lhs = gimple_get_lhs (stmt);
2295 if (TREE_CODE (lhs) == SSA_NAME)
2296 {
2297 /* Evaluate the statement, which could be
2298 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2299 val = evaluate_stmt (stmt);
2300
2301 /* If STMT is an assignment to an SSA_NAME, we only have one
2302 value to set. */
2303 if (set_lattice_value (lhs, &val))
2304 {
2305 *output_p = lhs;
2306 if (val.lattice_val == VARYING)
2307 retval = SSA_PROP_VARYING;
2308 else
2309 retval = SSA_PROP_INTERESTING;
2310 }
2311 }
2312
2313 return retval;
2314 }
2315
2316
2317 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2318 if it can determine which edge will be taken. Otherwise, return
2319 SSA_PROP_VARYING. */
2320
2321 static enum ssa_prop_result
2322 visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
2323 {
2324 ccp_prop_value_t val;
2325 basic_block block;
2326
2327 block = gimple_bb (stmt);
2328 val = evaluate_stmt (stmt);
2329 if (val.lattice_val != CONSTANT
2330 || val.mask != 0)
2331 return SSA_PROP_VARYING;
2332
2333 /* Find which edge out of the conditional block will be taken and add it
2334 to the worklist. If no single edge can be determined statically,
2335 return SSA_PROP_VARYING to feed all the outgoing edges to the
2336 propagation engine. */
2337 *taken_edge_p = find_taken_edge (block, val.value);
2338 if (*taken_edge_p)
2339 return SSA_PROP_INTERESTING;
2340 else
2341 return SSA_PROP_VARYING;
2342 }
2343
2344
2345 /* Evaluate statement STMT. If the statement produces an output value and
2346 its evaluation changes the lattice value of its output, return
2347 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2348 output value.
2349
2350 If STMT is a conditional branch and we can determine its truth
2351 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2352 value, return SSA_PROP_VARYING. */
2353
2354 static enum ssa_prop_result
2355 ccp_visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
2356 {
2357 tree def;
2358 ssa_op_iter iter;
2359
2360 if (dump_file && (dump_flags & TDF_DETAILS))
2361 {
2362 fprintf (dump_file, "\nVisiting statement:\n");
2363 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2364 }
2365
2366 switch (gimple_code (stmt))
2367 {
2368 case GIMPLE_ASSIGN:
2369 /* If the statement is an assignment that produces a single
2370 output value, evaluate its RHS to see if the lattice value of
2371 its output has changed. */
2372 return visit_assignment (stmt, output_p);
2373
2374 case GIMPLE_CALL:
2375 /* A value-returning call also performs an assignment. */
2376 if (gimple_call_lhs (stmt) != NULL_TREE)
2377 return visit_assignment (stmt, output_p);
2378 break;
2379
2380 case GIMPLE_COND:
2381 case GIMPLE_SWITCH:
2382 /* If STMT is a conditional branch, see if we can determine
2383 which branch will be taken. */
2384 /* FIXME. It appears that we should be able to optimize
2385 computed GOTOs here as well. */
2386 return visit_cond_stmt (stmt, taken_edge_p);
2387
2388 default:
2389 break;
2390 }
2391
2392 /* Any other kind of statement is not interesting for constant
2393 propagation and, therefore, not worth simulating. */
2394 if (dump_file && (dump_flags & TDF_DETAILS))
2395 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2396
2397 /* Definitions made by statements other than assignments to
2398 SSA_NAMEs represent unknown modifications to their outputs.
2399 Mark them VARYING. */
2400 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2401 set_value_varying (def);
2402
2403 return SSA_PROP_VARYING;
2404 }
2405
2406
2407 /* Main entry point for SSA Conditional Constant Propagation. */
2408
2409 static unsigned int
2410 do_ssa_ccp (void)
2411 {
2412 unsigned int todo = 0;
2413 calculate_dominance_info (CDI_DOMINATORS);
2414 ccp_initialize ();
2415 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2416 if (ccp_finalize ())
2417 todo = (TODO_cleanup_cfg | TODO_update_ssa);
2418 free_dominance_info (CDI_DOMINATORS);
2419 return todo;
2420 }
2421
2422
2423 namespace {
2424
2425 const pass_data pass_data_ccp =
2426 {
2427 GIMPLE_PASS, /* type */
2428 "ccp", /* name */
2429 OPTGROUP_NONE, /* optinfo_flags */
2430 TV_TREE_CCP, /* tv_id */
2431 ( PROP_cfg | PROP_ssa ), /* properties_required */
2432 0, /* properties_provided */
2433 0, /* properties_destroyed */
2434 0, /* todo_flags_start */
2435 TODO_update_address_taken, /* todo_flags_finish */
2436 };
2437
2438 class pass_ccp : public gimple_opt_pass
2439 {
2440 public:
2441 pass_ccp (gcc::context *ctxt)
2442 : gimple_opt_pass (pass_data_ccp, ctxt)
2443 {}
2444
2445 /* opt_pass methods: */
2446 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2447 virtual bool gate (function *) { return flag_tree_ccp != 0; }
2448 virtual unsigned int execute (function *) { return do_ssa_ccp (); }
2449
2450 }; // class pass_ccp
2451
2452 } // anon namespace
2453
2454 gimple_opt_pass *
2455 make_pass_ccp (gcc::context *ctxt)
2456 {
2457 return new pass_ccp (ctxt);
2458 }
2459
2460
2461
2462 /* Try to optimize out __builtin_stack_restore. Optimize it out
2463 if there is another __builtin_stack_restore in the same basic
2464 block and no calls or ASM_EXPRs are in between, or if this block's
2465 only outgoing edge is to EXIT_BLOCK and there are no calls or
2466 ASM_EXPRs after this __builtin_stack_restore. */
2467
2468 static tree
2469 optimize_stack_restore (gimple_stmt_iterator i)
2470 {
2471 tree callee;
2472 gimple *stmt;
2473
2474 basic_block bb = gsi_bb (i);
2475 gimple *call = gsi_stmt (i);
2476
2477 if (gimple_code (call) != GIMPLE_CALL
2478 || gimple_call_num_args (call) != 1
2479 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2480 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2481 return NULL_TREE;
2482
2483 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2484 {
2485 stmt = gsi_stmt (i);
2486 if (gimple_code (stmt) == GIMPLE_ASM)
2487 return NULL_TREE;
2488 if (gimple_code (stmt) != GIMPLE_CALL)
2489 continue;
2490
2491 callee = gimple_call_fndecl (stmt);
2492 if (!callee
2493 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2494 /* All regular builtins are ok, just obviously not alloca. */
2495 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2496 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2497 return NULL_TREE;
2498
2499 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2500 goto second_stack_restore;
2501 }
2502
2503 if (!gsi_end_p (i))
2504 return NULL_TREE;
2505
2506 /* Allow one successor of the exit block, or zero successors. */
2507 switch (EDGE_COUNT (bb->succs))
2508 {
2509 case 0:
2510 break;
2511 case 1:
2512 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2513 return NULL_TREE;
2514 break;
2515 default:
2516 return NULL_TREE;
2517 }
2518 second_stack_restore:
2519
2520 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2521 If there are multiple uses, then the last one should remove the call.
2522 In any case, whether the call to __builtin_stack_save can be removed
2523 or not is irrelevant to removing the call to __builtin_stack_restore. */
2524 if (has_single_use (gimple_call_arg (call, 0)))
2525 {
2526 gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2527 if (is_gimple_call (stack_save))
2528 {
2529 callee = gimple_call_fndecl (stack_save);
2530 if (callee
2531 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2532 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2533 {
2534 gimple_stmt_iterator stack_save_gsi;
2535 tree rhs;
2536
2537 stack_save_gsi = gsi_for_stmt (stack_save);
2538 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2539 update_call_from_tree (&stack_save_gsi, rhs);
2540 }
2541 }
2542 }
2543
2544 /* No effect, so the statement will be deleted. */
2545 return integer_zero_node;
2546 }
2547
2548 /* If va_list type is a simple pointer and nothing special is needed,
2549 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2550 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2551 pointer assignment. */
2552
2553 static tree
2554 optimize_stdarg_builtin (gimple *call)
2555 {
2556 tree callee, lhs, rhs, cfun_va_list;
2557 bool va_list_simple_ptr;
2558 location_t loc = gimple_location (call);
2559
2560 if (gimple_code (call) != GIMPLE_CALL)
2561 return NULL_TREE;
2562
2563 callee = gimple_call_fndecl (call);
2564
2565 cfun_va_list = targetm.fn_abi_va_list (callee);
2566 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2567 && (TREE_TYPE (cfun_va_list) == void_type_node
2568 || TREE_TYPE (cfun_va_list) == char_type_node);
2569
2570 switch (DECL_FUNCTION_CODE (callee))
2571 {
2572 case BUILT_IN_VA_START:
2573 if (!va_list_simple_ptr
2574 || targetm.expand_builtin_va_start != NULL
2575 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2576 return NULL_TREE;
2577
2578 if (gimple_call_num_args (call) != 2)
2579 return NULL_TREE;
2580
2581 lhs = gimple_call_arg (call, 0);
2582 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2583 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2584 != TYPE_MAIN_VARIANT (cfun_va_list))
2585 return NULL_TREE;
2586
2587 lhs = build_fold_indirect_ref_loc (loc, lhs);
2588 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2589 1, integer_zero_node);
2590 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2591 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2592
2593 case BUILT_IN_VA_COPY:
2594 if (!va_list_simple_ptr)
2595 return NULL_TREE;
2596
2597 if (gimple_call_num_args (call) != 2)
2598 return NULL_TREE;
2599
2600 lhs = gimple_call_arg (call, 0);
2601 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2602 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2603 != TYPE_MAIN_VARIANT (cfun_va_list))
2604 return NULL_TREE;
2605
2606 lhs = build_fold_indirect_ref_loc (loc, lhs);
2607 rhs = gimple_call_arg (call, 1);
2608 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2609 != TYPE_MAIN_VARIANT (cfun_va_list))
2610 return NULL_TREE;
2611
2612 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2613 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2614
2615 case BUILT_IN_VA_END:
2616 /* No effect, so the statement will be deleted. */
2617 return integer_zero_node;
2618
2619 default:
2620 gcc_unreachable ();
2621 }
2622 }
2623
2624 /* Attemp to make the block of __builtin_unreachable I unreachable by changing
2625 the incoming jumps. Return true if at least one jump was changed. */
2626
2627 static bool
2628 optimize_unreachable (gimple_stmt_iterator i)
2629 {
2630 basic_block bb = gsi_bb (i);
2631 gimple_stmt_iterator gsi;
2632 gimple *stmt;
2633 edge_iterator ei;
2634 edge e;
2635 bool ret;
2636
2637 if (flag_sanitize & SANITIZE_UNREACHABLE)
2638 return false;
2639
2640 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2641 {
2642 stmt = gsi_stmt (gsi);
2643
2644 if (is_gimple_debug (stmt))
2645 continue;
2646
2647 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2648 {
2649 /* Verify we do not need to preserve the label. */
2650 if (FORCED_LABEL (gimple_label_label (label_stmt)))
2651 return false;
2652
2653 continue;
2654 }
2655
2656 /* Only handle the case that __builtin_unreachable is the first statement
2657 in the block. We rely on DCE to remove stmts without side-effects
2658 before __builtin_unreachable. */
2659 if (gsi_stmt (gsi) != gsi_stmt (i))
2660 return false;
2661 }
2662
2663 ret = false;
2664 FOR_EACH_EDGE (e, ei, bb->preds)
2665 {
2666 gsi = gsi_last_bb (e->src);
2667 if (gsi_end_p (gsi))
2668 continue;
2669
2670 stmt = gsi_stmt (gsi);
2671 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
2672 {
2673 if (e->flags & EDGE_TRUE_VALUE)
2674 gimple_cond_make_false (cond_stmt);
2675 else if (e->flags & EDGE_FALSE_VALUE)
2676 gimple_cond_make_true (cond_stmt);
2677 else
2678 gcc_unreachable ();
2679 update_stmt (cond_stmt);
2680 }
2681 else
2682 {
2683 /* Todo: handle other cases, f.i. switch statement. */
2684 continue;
2685 }
2686
2687 ret = true;
2688 }
2689
2690 return ret;
2691 }
2692
2693 /* A simple pass that attempts to fold all builtin functions. This pass
2694 is run after we've propagated as many constants as we can. */
2695
2696 namespace {
2697
2698 const pass_data pass_data_fold_builtins =
2699 {
2700 GIMPLE_PASS, /* type */
2701 "fab", /* name */
2702 OPTGROUP_NONE, /* optinfo_flags */
2703 TV_NONE, /* tv_id */
2704 ( PROP_cfg | PROP_ssa ), /* properties_required */
2705 0, /* properties_provided */
2706 0, /* properties_destroyed */
2707 0, /* todo_flags_start */
2708 TODO_update_ssa, /* todo_flags_finish */
2709 };
2710
2711 class pass_fold_builtins : public gimple_opt_pass
2712 {
2713 public:
2714 pass_fold_builtins (gcc::context *ctxt)
2715 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
2716 {}
2717
2718 /* opt_pass methods: */
2719 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
2720 virtual unsigned int execute (function *);
2721
2722 }; // class pass_fold_builtins
2723
2724 unsigned int
2725 pass_fold_builtins::execute (function *fun)
2726 {
2727 bool cfg_changed = false;
2728 basic_block bb;
2729 unsigned int todoflags = 0;
2730
2731 FOR_EACH_BB_FN (bb, fun)
2732 {
2733 gimple_stmt_iterator i;
2734 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2735 {
2736 gimple *stmt, *old_stmt;
2737 tree callee;
2738 enum built_in_function fcode;
2739
2740 stmt = gsi_stmt (i);
2741
2742 if (gimple_code (stmt) != GIMPLE_CALL)
2743 {
2744 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
2745 after the last GIMPLE DSE they aren't needed and might
2746 unnecessarily keep the SSA_NAMEs live. */
2747 if (gimple_clobber_p (stmt))
2748 {
2749 tree lhs = gimple_assign_lhs (stmt);
2750 if (TREE_CODE (lhs) == MEM_REF
2751 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
2752 {
2753 unlink_stmt_vdef (stmt);
2754 gsi_remove (&i, true);
2755 release_defs (stmt);
2756 continue;
2757 }
2758 }
2759 gsi_next (&i);
2760 continue;
2761 }
2762
2763 callee = gimple_call_fndecl (stmt);
2764 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2765 {
2766 gsi_next (&i);
2767 continue;
2768 }
2769
2770 fcode = DECL_FUNCTION_CODE (callee);
2771 if (fold_stmt (&i))
2772 ;
2773 else
2774 {
2775 tree result = NULL_TREE;
2776 switch (DECL_FUNCTION_CODE (callee))
2777 {
2778 case BUILT_IN_CONSTANT_P:
2779 /* Resolve __builtin_constant_p. If it hasn't been
2780 folded to integer_one_node by now, it's fairly
2781 certain that the value simply isn't constant. */
2782 result = integer_zero_node;
2783 break;
2784
2785 case BUILT_IN_ASSUME_ALIGNED:
2786 /* Remove __builtin_assume_aligned. */
2787 result = gimple_call_arg (stmt, 0);
2788 break;
2789
2790 case BUILT_IN_STACK_RESTORE:
2791 result = optimize_stack_restore (i);
2792 if (result)
2793 break;
2794 gsi_next (&i);
2795 continue;
2796
2797 case BUILT_IN_UNREACHABLE:
2798 if (optimize_unreachable (i))
2799 cfg_changed = true;
2800 break;
2801
2802 case BUILT_IN_VA_START:
2803 case BUILT_IN_VA_END:
2804 case BUILT_IN_VA_COPY:
2805 /* These shouldn't be folded before pass_stdarg. */
2806 result = optimize_stdarg_builtin (stmt);
2807 if (result)
2808 break;
2809 /* FALLTHRU */
2810
2811 default:;
2812 }
2813
2814 if (!result)
2815 {
2816 gsi_next (&i);
2817 continue;
2818 }
2819
2820 if (!update_call_from_tree (&i, result))
2821 gimplify_and_update_call_from_tree (&i, result);
2822 }
2823
2824 todoflags |= TODO_update_address_taken;
2825
2826 if (dump_file && (dump_flags & TDF_DETAILS))
2827 {
2828 fprintf (dump_file, "Simplified\n ");
2829 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2830 }
2831
2832 old_stmt = stmt;
2833 stmt = gsi_stmt (i);
2834 update_stmt (stmt);
2835
2836 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2837 && gimple_purge_dead_eh_edges (bb))
2838 cfg_changed = true;
2839
2840 if (dump_file && (dump_flags & TDF_DETAILS))
2841 {
2842 fprintf (dump_file, "to\n ");
2843 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2844 fprintf (dump_file, "\n");
2845 }
2846
2847 /* Retry the same statement if it changed into another
2848 builtin, there might be new opportunities now. */
2849 if (gimple_code (stmt) != GIMPLE_CALL)
2850 {
2851 gsi_next (&i);
2852 continue;
2853 }
2854 callee = gimple_call_fndecl (stmt);
2855 if (!callee
2856 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2857 || DECL_FUNCTION_CODE (callee) == fcode)
2858 gsi_next (&i);
2859 }
2860 }
2861
2862 /* Delete unreachable blocks. */
2863 if (cfg_changed)
2864 todoflags |= TODO_cleanup_cfg;
2865
2866 return todoflags;
2867 }
2868
2869 } // anon namespace
2870
2871 gimple_opt_pass *
2872 make_pass_fold_builtins (gcc::context *ctxt)
2873 {
2874 return new pass_fold_builtins (ctxt);
2875 }