re PR c/79153 (-Wimplicit-fallthrough missed warning)
[gcc.git] / gcc / tree-dfa.c
1 /* Data flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-pretty-print.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "langhooks.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "tree-dfa.h"
37
38 /* Build and maintain data flow information for trees. */
39
40 /* Counters used to display DFA and SSA statistics. */
41 struct dfa_stats_d
42 {
43 long num_defs;
44 long num_uses;
45 long num_phis;
46 long num_phi_args;
47 size_t max_num_phi_args;
48 long num_vdefs;
49 long num_vuses;
50 };
51
52
53 /* Local functions. */
54 static void collect_dfa_stats (struct dfa_stats_d *);
55
56
57 /*---------------------------------------------------------------------------
58 Dataflow analysis (DFA) routines
59 ---------------------------------------------------------------------------*/
60
61 /* Renumber all of the gimple stmt uids. */
62
63 void
64 renumber_gimple_stmt_uids (void)
65 {
66 basic_block bb;
67
68 set_gimple_stmt_max_uid (cfun, 0);
69 FOR_ALL_BB_FN (bb, cfun)
70 {
71 gimple_stmt_iterator bsi;
72 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
73 {
74 gimple *stmt = gsi_stmt (bsi);
75 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
76 }
77 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
78 {
79 gimple *stmt = gsi_stmt (bsi);
80 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
81 }
82 }
83 }
84
85 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
86 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
87
88 void
89 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
90 {
91 int i;
92
93 set_gimple_stmt_max_uid (cfun, 0);
94 for (i = 0; i < n_blocks; i++)
95 {
96 basic_block bb = blocks[i];
97 gimple_stmt_iterator bsi;
98 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
99 {
100 gimple *stmt = gsi_stmt (bsi);
101 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
102 }
103 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
104 {
105 gimple *stmt = gsi_stmt (bsi);
106 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
107 }
108 }
109 }
110
111
112
113 /*---------------------------------------------------------------------------
114 Debugging functions
115 ---------------------------------------------------------------------------*/
116
117 /* Dump variable VAR and its may-aliases to FILE. */
118
119 void
120 dump_variable (FILE *file, tree var)
121 {
122 if (TREE_CODE (var) == SSA_NAME)
123 {
124 if (POINTER_TYPE_P (TREE_TYPE (var)))
125 dump_points_to_info_for (file, var);
126 var = SSA_NAME_VAR (var);
127 }
128
129 if (var == NULL_TREE)
130 {
131 fprintf (file, "<nil>");
132 return;
133 }
134
135 print_generic_expr (file, var, dump_flags);
136
137 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
138 if (DECL_PT_UID (var) != DECL_UID (var))
139 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
140
141 fprintf (file, ", ");
142 print_generic_expr (file, TREE_TYPE (var), dump_flags);
143
144 if (TREE_ADDRESSABLE (var))
145 fprintf (file, ", is addressable");
146
147 if (is_global_var (var))
148 fprintf (file, ", is global");
149
150 if (TREE_THIS_VOLATILE (var))
151 fprintf (file, ", is volatile");
152
153 if (cfun && ssa_default_def (cfun, var))
154 {
155 fprintf (file, ", default def: ");
156 print_generic_expr (file, ssa_default_def (cfun, var), dump_flags);
157 }
158
159 if (DECL_INITIAL (var))
160 {
161 fprintf (file, ", initial: ");
162 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
163 }
164
165 fprintf (file, "\n");
166 }
167
168
169 /* Dump variable VAR and its may-aliases to stderr. */
170
171 DEBUG_FUNCTION void
172 debug_variable (tree var)
173 {
174 dump_variable (stderr, var);
175 }
176
177
178 /* Dump various DFA statistics to FILE. */
179
180 void
181 dump_dfa_stats (FILE *file)
182 {
183 struct dfa_stats_d dfa_stats;
184
185 unsigned long size, total = 0;
186 const char * const fmt_str = "%-30s%-13s%12s\n";
187 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
188 const char * const fmt_str_3 = "%-43s%11lu%c\n";
189 const char *funcname
190 = lang_hooks.decl_printable_name (current_function_decl, 2);
191
192 collect_dfa_stats (&dfa_stats);
193
194 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
195
196 fprintf (file, "---------------------------------------------------------\n");
197 fprintf (file, fmt_str, "", " Number of ", "Memory");
198 fprintf (file, fmt_str, "", " instances ", "used ");
199 fprintf (file, "---------------------------------------------------------\n");
200
201 size = dfa_stats.num_uses * sizeof (tree *);
202 total += size;
203 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
204 SCALE (size), LABEL (size));
205
206 size = dfa_stats.num_defs * sizeof (tree *);
207 total += size;
208 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
209 SCALE (size), LABEL (size));
210
211 size = dfa_stats.num_vuses * sizeof (tree *);
212 total += size;
213 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
214 SCALE (size), LABEL (size));
215
216 size = dfa_stats.num_vdefs * sizeof (tree *);
217 total += size;
218 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
219 SCALE (size), LABEL (size));
220
221 size = dfa_stats.num_phis * sizeof (struct gphi);
222 total += size;
223 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
224 SCALE (size), LABEL (size));
225
226 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
227 total += size;
228 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
229 SCALE (size), LABEL (size));
230
231 fprintf (file, "---------------------------------------------------------\n");
232 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
233 LABEL (total));
234 fprintf (file, "---------------------------------------------------------\n");
235 fprintf (file, "\n");
236
237 if (dfa_stats.num_phis)
238 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
239 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
240 (long) dfa_stats.max_num_phi_args);
241
242 fprintf (file, "\n");
243 }
244
245
246 /* Dump DFA statistics on stderr. */
247
248 DEBUG_FUNCTION void
249 debug_dfa_stats (void)
250 {
251 dump_dfa_stats (stderr);
252 }
253
254
255 /* Collect DFA statistics and store them in the structure pointed to by
256 DFA_STATS_P. */
257
258 static void
259 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
260 {
261 basic_block bb;
262
263 gcc_assert (dfa_stats_p);
264
265 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
266
267 /* Walk all the statements in the function counting references. */
268 FOR_EACH_BB_FN (bb, cfun)
269 {
270 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
271 gsi_next (&si))
272 {
273 gphi *phi = si.phi ();
274 dfa_stats_p->num_phis++;
275 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
276 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
277 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
278 }
279
280 for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
281 gsi_next (&si))
282 {
283 gimple *stmt = gsi_stmt (si);
284 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
285 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
286 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
287 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
288 }
289 }
290 }
291
292
293 /*---------------------------------------------------------------------------
294 Miscellaneous helpers
295 ---------------------------------------------------------------------------*/
296
297 /* Lookup VAR UID in the default_defs hashtable and return the associated
298 variable. */
299
300 tree
301 ssa_default_def (struct function *fn, tree var)
302 {
303 struct tree_decl_minimal ind;
304 struct tree_ssa_name in;
305 gcc_assert (VAR_P (var)
306 || TREE_CODE (var) == PARM_DECL
307 || TREE_CODE (var) == RESULT_DECL);
308
309 /* Always NULL_TREE for rtl function dumps. */
310 if (!fn->gimple_df)
311 return NULL_TREE;
312
313 in.var = (tree)&ind;
314 ind.uid = DECL_UID (var);
315 return DEFAULT_DEFS (fn)->find_with_hash ((tree)&in, DECL_UID (var));
316 }
317
318 /* Insert the pair VAR's UID, DEF into the default_defs hashtable
319 of function FN. */
320
321 void
322 set_ssa_default_def (struct function *fn, tree var, tree def)
323 {
324 struct tree_decl_minimal ind;
325 struct tree_ssa_name in;
326
327 gcc_assert (VAR_P (var)
328 || TREE_CODE (var) == PARM_DECL
329 || TREE_CODE (var) == RESULT_DECL);
330 in.var = (tree)&ind;
331 ind.uid = DECL_UID (var);
332 if (!def)
333 {
334 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
335 DECL_UID (var),
336 NO_INSERT);
337 if (loc)
338 {
339 SSA_NAME_IS_DEFAULT_DEF (*(tree *)loc) = false;
340 DEFAULT_DEFS (fn)->clear_slot (loc);
341 }
342 return;
343 }
344 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
345 tree *loc = DEFAULT_DEFS (fn)->find_slot_with_hash ((tree)&in,
346 DECL_UID (var), INSERT);
347
348 /* Default definition might be changed by tail call optimization. */
349 if (*loc)
350 SSA_NAME_IS_DEFAULT_DEF (*loc) = false;
351
352 /* Mark DEF as the default definition for VAR. */
353 *loc = def;
354 SSA_NAME_IS_DEFAULT_DEF (def) = true;
355 }
356
357 /* Retrieve or create a default definition for VAR. */
358
359 tree
360 get_or_create_ssa_default_def (struct function *fn, tree var)
361 {
362 tree ddef = ssa_default_def (fn, var);
363 if (ddef == NULL_TREE)
364 {
365 ddef = make_ssa_name_fn (fn, var, gimple_build_nop ());
366 set_ssa_default_def (fn, var, ddef);
367 }
368 return ddef;
369 }
370
371
372 /* If EXP is a handled component reference for a structure, return the
373 base variable. The access range is delimited by bit positions *POFFSET and
374 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
375 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
376 and *PMAX_SIZE are equal, the access is non-variable. If *PREVERSE is
377 true, the storage order of the reference is reversed. */
378
379 tree
380 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
381 HOST_WIDE_INT *psize,
382 HOST_WIDE_INT *pmax_size,
383 bool *preverse)
384 {
385 offset_int bitsize = -1;
386 offset_int maxsize;
387 tree size_tree = NULL_TREE;
388 offset_int bit_offset = 0;
389 bool seen_variable_array_ref = false;
390
391 /* First get the final access size and the storage order from just the
392 outermost expression. */
393 if (TREE_CODE (exp) == COMPONENT_REF)
394 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
395 else if (TREE_CODE (exp) == BIT_FIELD_REF)
396 size_tree = TREE_OPERAND (exp, 1);
397 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
398 {
399 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
400 if (mode == BLKmode)
401 size_tree = TYPE_SIZE (TREE_TYPE (exp));
402 else
403 bitsize = int (GET_MODE_BITSIZE (mode));
404 }
405 if (size_tree != NULL_TREE
406 && TREE_CODE (size_tree) == INTEGER_CST)
407 bitsize = wi::to_offset (size_tree);
408
409 *preverse = reverse_storage_order_for_component_p (exp);
410
411 /* Initially, maxsize is the same as the accessed element size.
412 In the following it will only grow (or become -1). */
413 maxsize = bitsize;
414
415 /* Compute cumulative bit-offset for nested component-refs and array-refs,
416 and find the ultimate containing object. */
417 while (1)
418 {
419 switch (TREE_CODE (exp))
420 {
421 case BIT_FIELD_REF:
422 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
423 break;
424
425 case COMPONENT_REF:
426 {
427 tree field = TREE_OPERAND (exp, 1);
428 tree this_offset = component_ref_field_offset (exp);
429
430 if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
431 {
432 offset_int woffset = (wi::to_offset (this_offset)
433 << LOG2_BITS_PER_UNIT);
434 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
435 bit_offset += woffset;
436
437 /* If we had seen a variable array ref already and we just
438 referenced the last field of a struct or a union member
439 then we have to adjust maxsize by the padding at the end
440 of our field. */
441 if (seen_variable_array_ref && maxsize != -1)
442 {
443 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
444 tree next = DECL_CHAIN (field);
445 while (next && TREE_CODE (next) != FIELD_DECL)
446 next = DECL_CHAIN (next);
447 if (!next
448 || TREE_CODE (stype) != RECORD_TYPE)
449 {
450 tree fsize = DECL_SIZE_UNIT (field);
451 tree ssize = TYPE_SIZE_UNIT (stype);
452 if (fsize == NULL
453 || TREE_CODE (fsize) != INTEGER_CST
454 || ssize == NULL
455 || TREE_CODE (ssize) != INTEGER_CST)
456 maxsize = -1;
457 else
458 {
459 offset_int tem = (wi::to_offset (ssize)
460 - wi::to_offset (fsize));
461 tem <<= LOG2_BITS_PER_UNIT;
462 tem -= woffset;
463 maxsize += tem;
464 }
465 }
466 }
467 }
468 else
469 {
470 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
471 /* We need to adjust maxsize to the whole structure bitsize.
472 But we can subtract any constant offset seen so far,
473 because that would get us out of the structure otherwise. */
474 if (maxsize != -1
475 && csize
476 && TREE_CODE (csize) == INTEGER_CST)
477 maxsize = wi::to_offset (csize) - bit_offset;
478 else
479 maxsize = -1;
480 }
481 }
482 break;
483
484 case ARRAY_REF:
485 case ARRAY_RANGE_REF:
486 {
487 tree index = TREE_OPERAND (exp, 1);
488 tree low_bound, unit_size;
489
490 /* If the resulting bit-offset is constant, track it. */
491 if (TREE_CODE (index) == INTEGER_CST
492 && (low_bound = array_ref_low_bound (exp),
493 TREE_CODE (low_bound) == INTEGER_CST)
494 && (unit_size = array_ref_element_size (exp),
495 TREE_CODE (unit_size) == INTEGER_CST))
496 {
497 offset_int woffset
498 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
499 TYPE_PRECISION (TREE_TYPE (index)));
500 woffset *= wi::to_offset (unit_size);
501 woffset <<= LOG2_BITS_PER_UNIT;
502 bit_offset += woffset;
503
504 /* An array ref with a constant index up in the structure
505 hierarchy will constrain the size of any variable array ref
506 lower in the access hierarchy. */
507 seen_variable_array_ref = false;
508 }
509 else
510 {
511 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
512 /* We need to adjust maxsize to the whole array bitsize.
513 But we can subtract any constant offset seen so far,
514 because that would get us outside of the array otherwise. */
515 if (maxsize != -1
516 && asize
517 && TREE_CODE (asize) == INTEGER_CST)
518 maxsize = wi::to_offset (asize) - bit_offset;
519 else
520 maxsize = -1;
521
522 /* Remember that we have seen an array ref with a variable
523 index. */
524 seen_variable_array_ref = true;
525 }
526 }
527 break;
528
529 case REALPART_EXPR:
530 break;
531
532 case IMAGPART_EXPR:
533 bit_offset += bitsize;
534 break;
535
536 case VIEW_CONVERT_EXPR:
537 break;
538
539 case TARGET_MEM_REF:
540 /* Via the variable index or index2 we can reach the
541 whole object. Still hand back the decl here. */
542 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
543 && (TMR_INDEX (exp) || TMR_INDEX2 (exp)))
544 {
545 exp = TREE_OPERAND (TMR_BASE (exp), 0);
546 bit_offset = 0;
547 maxsize = -1;
548 goto done;
549 }
550 /* Fallthru. */
551 case MEM_REF:
552 /* We need to deal with variable arrays ending structures such as
553 struct { int length; int a[1]; } x; x.a[d]
554 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
555 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
556 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
557 where we do not know maxsize for variable index accesses to
558 the array. The simplest way to conservatively deal with this
559 is to punt in the case that offset + maxsize reaches the
560 base type boundary. This needs to include possible trailing
561 padding that is there for alignment purposes. */
562 if (seen_variable_array_ref
563 && maxsize != -1
564 && (TYPE_SIZE (TREE_TYPE (exp)) == NULL_TREE
565 || TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
566 || (bit_offset + maxsize
567 == wi::to_offset (TYPE_SIZE (TREE_TYPE (exp))))))
568 maxsize = -1;
569
570 /* Hand back the decl for MEM[&decl, off]. */
571 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
572 {
573 if (integer_zerop (TREE_OPERAND (exp, 1)))
574 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
575 else
576 {
577 offset_int off = mem_ref_offset (exp);
578 off <<= LOG2_BITS_PER_UNIT;
579 off += bit_offset;
580 if (wi::fits_shwi_p (off))
581 {
582 bit_offset = off;
583 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
584 }
585 }
586 }
587 goto done;
588
589 default:
590 goto done;
591 }
592
593 exp = TREE_OPERAND (exp, 0);
594 }
595
596 done:
597 if (!wi::fits_shwi_p (bitsize) || wi::neg_p (bitsize))
598 {
599 *poffset = 0;
600 *psize = -1;
601 *pmax_size = -1;
602
603 return exp;
604 }
605
606 *psize = bitsize.to_shwi ();
607
608 if (!wi::fits_shwi_p (bit_offset))
609 {
610 *poffset = 0;
611 *pmax_size = -1;
612
613 return exp;
614 }
615
616 /* In case of a decl or constant base object we can do better. */
617
618 if (DECL_P (exp))
619 {
620 if (flag_unconstrained_commons && VAR_P (exp) && DECL_COMMON (exp))
621 {
622 tree sz_tree = TYPE_SIZE (TREE_TYPE (exp));
623 /* If size is unknown, or we have read to the end, assume there
624 may be more to the structure than we are told. */
625 if (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
626 || (seen_variable_array_ref
627 && (sz_tree == NULL_TREE
628 || TREE_CODE (sz_tree) != INTEGER_CST
629 || (bit_offset + maxsize == wi::to_offset (sz_tree)))))
630 maxsize = -1;
631 }
632 /* If maxsize is unknown adjust it according to the size of the
633 base decl. */
634 else if (maxsize == -1
635 && DECL_SIZE (exp)
636 && TREE_CODE (DECL_SIZE (exp)) == INTEGER_CST)
637 maxsize = wi::to_offset (DECL_SIZE (exp)) - bit_offset;
638 }
639 else if (CONSTANT_CLASS_P (exp))
640 {
641 /* If maxsize is unknown adjust it according to the size of the
642 base type constant. */
643 if (maxsize == -1
644 && TYPE_SIZE (TREE_TYPE (exp))
645 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
646 maxsize = (wi::to_offset (TYPE_SIZE (TREE_TYPE (exp)))
647 - bit_offset);
648 }
649
650 /* ??? Due to negative offsets in ARRAY_REF we can end up with
651 negative bit_offset here. We might want to store a zero offset
652 in this case. */
653 *poffset = bit_offset.to_shwi ();
654 if (!wi::fits_shwi_p (maxsize) || wi::neg_p (maxsize))
655 *pmax_size = -1;
656 else
657 {
658 *pmax_size = maxsize.to_shwi ();
659 if (*poffset > HOST_WIDE_INT_MAX - *pmax_size)
660 *pmax_size = -1;
661 }
662
663 /* Punt if *POFFSET + *PSIZE overflows in HOST_WIDE_INT, the callers don't
664 check for such overflows individually and assume it works. */
665 if (*psize != -1 && *poffset > HOST_WIDE_INT_MAX - *psize)
666 {
667 *poffset = 0;
668 *psize = -1;
669 *pmax_size = -1;
670
671 return exp;
672 }
673
674 return exp;
675 }
676
677 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
678 denotes the starting address of the memory access EXP.
679 Returns NULL_TREE if the offset is not constant or any component
680 is not BITS_PER_UNIT-aligned.
681 VALUEIZE if non-NULL is used to valueize SSA names. It should return
682 its argument or a constant if the argument is known to be constant. */
683
684 tree
685 get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
686 tree (*valueize) (tree))
687 {
688 HOST_WIDE_INT byte_offset = 0;
689
690 /* Compute cumulative byte-offset for nested component-refs and array-refs,
691 and find the ultimate containing object. */
692 while (1)
693 {
694 switch (TREE_CODE (exp))
695 {
696 case BIT_FIELD_REF:
697 {
698 HOST_WIDE_INT this_off = TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
699 if (this_off % BITS_PER_UNIT)
700 return NULL_TREE;
701 byte_offset += this_off / BITS_PER_UNIT;
702 }
703 break;
704
705 case COMPONENT_REF:
706 {
707 tree field = TREE_OPERAND (exp, 1);
708 tree this_offset = component_ref_field_offset (exp);
709 HOST_WIDE_INT hthis_offset;
710
711 if (!this_offset
712 || TREE_CODE (this_offset) != INTEGER_CST
713 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
714 % BITS_PER_UNIT))
715 return NULL_TREE;
716
717 hthis_offset = TREE_INT_CST_LOW (this_offset);
718 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
719 / BITS_PER_UNIT);
720 byte_offset += hthis_offset;
721 }
722 break;
723
724 case ARRAY_REF:
725 case ARRAY_RANGE_REF:
726 {
727 tree index = TREE_OPERAND (exp, 1);
728 tree low_bound, unit_size;
729
730 if (valueize
731 && TREE_CODE (index) == SSA_NAME)
732 index = (*valueize) (index);
733
734 /* If the resulting bit-offset is constant, track it. */
735 if (TREE_CODE (index) == INTEGER_CST
736 && (low_bound = array_ref_low_bound (exp),
737 TREE_CODE (low_bound) == INTEGER_CST)
738 && (unit_size = array_ref_element_size (exp),
739 TREE_CODE (unit_size) == INTEGER_CST))
740 {
741 offset_int woffset
742 = wi::sext (wi::to_offset (index) - wi::to_offset (low_bound),
743 TYPE_PRECISION (TREE_TYPE (index)));
744 woffset *= wi::to_offset (unit_size);
745 byte_offset += woffset.to_shwi ();
746 }
747 else
748 return NULL_TREE;
749 }
750 break;
751
752 case REALPART_EXPR:
753 break;
754
755 case IMAGPART_EXPR:
756 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
757 break;
758
759 case VIEW_CONVERT_EXPR:
760 break;
761
762 case MEM_REF:
763 {
764 tree base = TREE_OPERAND (exp, 0);
765 if (valueize
766 && TREE_CODE (base) == SSA_NAME)
767 base = (*valueize) (base);
768
769 /* Hand back the decl for MEM[&decl, off]. */
770 if (TREE_CODE (base) == ADDR_EXPR)
771 {
772 if (!integer_zerop (TREE_OPERAND (exp, 1)))
773 {
774 offset_int off = mem_ref_offset (exp);
775 byte_offset += off.to_short_addr ();
776 }
777 exp = TREE_OPERAND (base, 0);
778 }
779 goto done;
780 }
781
782 case TARGET_MEM_REF:
783 {
784 tree base = TREE_OPERAND (exp, 0);
785 if (valueize
786 && TREE_CODE (base) == SSA_NAME)
787 base = (*valueize) (base);
788
789 /* Hand back the decl for MEM[&decl, off]. */
790 if (TREE_CODE (base) == ADDR_EXPR)
791 {
792 if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
793 return NULL_TREE;
794 if (!integer_zerop (TMR_OFFSET (exp)))
795 {
796 offset_int off = mem_ref_offset (exp);
797 byte_offset += off.to_short_addr ();
798 }
799 exp = TREE_OPERAND (base, 0);
800 }
801 goto done;
802 }
803
804 default:
805 goto done;
806 }
807
808 exp = TREE_OPERAND (exp, 0);
809 }
810 done:
811
812 *poffset = byte_offset;
813 return exp;
814 }
815
816 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
817 denotes the starting address of the memory access EXP.
818 Returns NULL_TREE if the offset is not constant or any component
819 is not BITS_PER_UNIT-aligned. */
820
821 tree
822 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
823 {
824 return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
825 }
826
827 /* Returns true if STMT references an SSA_NAME that has
828 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
829
830 bool
831 stmt_references_abnormal_ssa_name (gimple *stmt)
832 {
833 ssa_op_iter oi;
834 use_operand_p use_p;
835
836 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
837 {
838 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
839 return true;
840 }
841
842 return false;
843 }
844
845 /* If STMT takes any abnormal PHI values as input, replace them with
846 local copies. */
847
848 void
849 replace_abnormal_ssa_names (gimple *stmt)
850 {
851 ssa_op_iter oi;
852 use_operand_p use_p;
853
854 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
855 {
856 tree op = USE_FROM_PTR (use_p);
857 if (TREE_CODE (op) == SSA_NAME && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op))
858 {
859 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
860 tree new_name = make_ssa_name (TREE_TYPE (op));
861 gassign *assign = gimple_build_assign (new_name, op);
862 gsi_insert_before (&gsi, assign, GSI_SAME_STMT);
863 SET_USE (use_p, new_name);
864 }
865 }
866 }
867
868 /* Pair of tree and a sorting index, for dump_enumerated_decls. */
869 struct GTY(()) numbered_tree
870 {
871 tree t;
872 int num;
873 };
874
875
876 /* Compare two declarations references by their DECL_UID / sequence number.
877 Called via qsort. */
878
879 static int
880 compare_decls_by_uid (const void *pa, const void *pb)
881 {
882 const numbered_tree *nt_a = ((const numbered_tree *)pa);
883 const numbered_tree *nt_b = ((const numbered_tree *)pb);
884
885 if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
886 return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
887 return nt_a->num - nt_b->num;
888 }
889
890 /* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
891 static tree
892 dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
893 {
894 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
895 vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
896 numbered_tree nt;
897
898 if (!DECL_P (*tp))
899 return NULL_TREE;
900 nt.t = *tp;
901 nt.num = list->length ();
902 list->safe_push (nt);
903 *walk_subtrees = 0;
904 return NULL_TREE;
905 }
906
907 /* Find all the declarations used by the current function, sort them by uid,
908 and emit the sorted list. Each declaration is tagged with a sequence
909 number indicating when it was found during statement / tree walking,
910 so that TDF_NOUID comparisons of anonymous declarations are still
911 meaningful. Where a declaration was encountered more than once, we
912 emit only the sequence number of the first encounter.
913 FILE is the dump file where to output the list and FLAGS is as in
914 print_generic_expr. */
915 void
916 dump_enumerated_decls (FILE *file, dump_flags_t flags)
917 {
918 basic_block bb;
919 struct walk_stmt_info wi;
920 auto_vec<numbered_tree, 40> decl_list;
921
922 memset (&wi, '\0', sizeof (wi));
923 wi.info = (void *) &decl_list;
924 FOR_EACH_BB_FN (bb, cfun)
925 {
926 gimple_stmt_iterator gsi;
927
928 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
929 if (!is_gimple_debug (gsi_stmt (gsi)))
930 walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
931 }
932 decl_list.qsort (compare_decls_by_uid);
933 if (decl_list.length ())
934 {
935 unsigned ix;
936 numbered_tree *ntp;
937 tree last = NULL_TREE;
938
939 fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
940 current_function_name ());
941 FOR_EACH_VEC_ELT (decl_list, ix, ntp)
942 {
943 if (ntp->t == last)
944 continue;
945 fprintf (file, "%d: ", ntp->num);
946 print_generic_decl (file, ntp->t, flags);
947 fprintf (file, "\n");
948 last = ntp->t;
949 }
950 }
951 }