* jvspec.c (jvgenmain_spec): Don't handle -fnew-verifier.
[gcc.git] / gcc / tree-dfa.c
1 /* Data flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "hashtab.h"
28 #include "pointer-set.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "timevar.h"
34 #include "ggc.h"
35 #include "langhooks.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "tree-pretty-print.h"
39 #include "tree-dump.h"
40 #include "gimple.h"
41 #include "tree-flow.h"
42 #include "tree-inline.h"
43 #include "tree-pass.h"
44 #include "convert.h"
45 #include "params.h"
46 #include "cgraph.h"
47
48 /* Build and maintain data flow information for trees. */
49
50 /* Counters used to display DFA and SSA statistics. */
51 struct dfa_stats_d
52 {
53 long num_var_anns;
54 long num_defs;
55 long num_uses;
56 long num_phis;
57 long num_phi_args;
58 size_t max_num_phi_args;
59 long num_vdefs;
60 long num_vuses;
61 };
62
63
64 /* Local functions. */
65 static void collect_dfa_stats (struct dfa_stats_d *);
66 static tree find_vars_r (tree *, int *, void *);
67
68
69 /*---------------------------------------------------------------------------
70 Dataflow analysis (DFA) routines
71 ---------------------------------------------------------------------------*/
72 /* Find all the variables referenced in the function. This function
73 builds the global arrays REFERENCED_VARS and CALL_CLOBBERED_VARS.
74
75 Note that this function does not look for statement operands, it simply
76 determines what variables are referenced in the program and detects
77 various attributes for each variable used by alias analysis and the
78 optimizer. */
79
80 static unsigned int
81 find_referenced_vars (void)
82 {
83 basic_block bb;
84 gimple_stmt_iterator si;
85
86 FOR_EACH_BB (bb)
87 {
88 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
89 {
90 gimple stmt = gsi_stmt (si);
91 if (is_gimple_debug (stmt))
92 continue;
93 find_referenced_vars_in (gsi_stmt (si));
94 }
95
96 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
97 find_referenced_vars_in (gsi_stmt (si));
98 }
99
100 return 0;
101 }
102
103 struct gimple_opt_pass pass_referenced_vars =
104 {
105 {
106 GIMPLE_PASS,
107 "*referenced_vars", /* name */
108 NULL, /* gate */
109 find_referenced_vars, /* execute */
110 NULL, /* sub */
111 NULL, /* next */
112 0, /* static_pass_number */
113 TV_FIND_REFERENCED_VARS, /* tv_id */
114 PROP_gimple_leh | PROP_cfg, /* properties_required */
115 PROP_referenced_vars, /* properties_provided */
116 0, /* properties_destroyed */
117 TODO_dump_func, /* todo_flags_start */
118 TODO_dump_func /* todo_flags_finish */
119 }
120 };
121
122
123 /*---------------------------------------------------------------------------
124 Manage annotations
125 ---------------------------------------------------------------------------*/
126 /* Create a new annotation for a _DECL node T. */
127
128 var_ann_t
129 create_var_ann (tree t)
130 {
131 var_ann_t ann;
132
133 gcc_assert (t);
134 gcc_assert (TREE_CODE (t) == VAR_DECL
135 || TREE_CODE (t) == PARM_DECL
136 || TREE_CODE (t) == RESULT_DECL);
137
138 ann = ggc_alloc_cleared_var_ann_d ();
139 *DECL_VAR_ANN_PTR (t) = ann;
140
141 return ann;
142 }
143
144 /* Renumber all of the gimple stmt uids. */
145
146 void
147 renumber_gimple_stmt_uids (void)
148 {
149 basic_block bb;
150
151 set_gimple_stmt_max_uid (cfun, 0);
152 FOR_ALL_BB (bb)
153 {
154 gimple_stmt_iterator bsi;
155 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
156 {
157 gimple stmt = gsi_stmt (bsi);
158 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
159 }
160 }
161 }
162
163 /* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
164 in BLOCKS, of which there are N_BLOCKS. Also renumbers PHIs. */
165
166 void
167 renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
168 {
169 int i;
170
171 set_gimple_stmt_max_uid (cfun, 0);
172 for (i = 0; i < n_blocks; i++)
173 {
174 basic_block bb = blocks[i];
175 gimple_stmt_iterator bsi;
176 for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
177 {
178 gimple stmt = gsi_stmt (bsi);
179 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
180 }
181 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
182 {
183 gimple stmt = gsi_stmt (bsi);
184 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
185 }
186 }
187 }
188
189 /* Build a temporary. Make sure and register it to be renamed. */
190
191 tree
192 make_rename_temp (tree type, const char *prefix)
193 {
194 tree t = create_tmp_reg (type, prefix);
195
196 if (gimple_referenced_vars (cfun))
197 {
198 add_referenced_var (t);
199 mark_sym_for_renaming (t);
200 }
201
202 return t;
203 }
204
205
206
207 /*---------------------------------------------------------------------------
208 Debugging functions
209 ---------------------------------------------------------------------------*/
210 /* Dump the list of all the referenced variables in the current function to
211 FILE. */
212
213 void
214 dump_referenced_vars (FILE *file)
215 {
216 tree var;
217 referenced_var_iterator rvi;
218
219 fprintf (file, "\nReferenced variables in %s: %u\n\n",
220 get_name (current_function_decl), (unsigned) num_referenced_vars);
221
222 FOR_EACH_REFERENCED_VAR (var, rvi)
223 {
224 fprintf (file, "Variable: ");
225 dump_variable (file, var);
226 }
227
228 fprintf (file, "\n");
229 }
230
231
232 /* Dump the list of all the referenced variables to stderr. */
233
234 DEBUG_FUNCTION void
235 debug_referenced_vars (void)
236 {
237 dump_referenced_vars (stderr);
238 }
239
240
241 /* Dump variable VAR and its may-aliases to FILE. */
242
243 void
244 dump_variable (FILE *file, tree var)
245 {
246 if (TREE_CODE (var) == SSA_NAME)
247 {
248 if (POINTER_TYPE_P (TREE_TYPE (var)))
249 dump_points_to_info_for (file, var);
250 var = SSA_NAME_VAR (var);
251 }
252
253 if (var == NULL_TREE)
254 {
255 fprintf (file, "<nil>");
256 return;
257 }
258
259 print_generic_expr (file, var, dump_flags);
260
261 fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
262 if (DECL_PT_UID (var) != DECL_UID (var))
263 fprintf (file, ", PT-UID D.%u", (unsigned) DECL_PT_UID (var));
264
265 fprintf (file, ", ");
266 print_generic_expr (file, TREE_TYPE (var), dump_flags);
267
268 if (TREE_ADDRESSABLE (var))
269 fprintf (file, ", is addressable");
270
271 if (is_global_var (var))
272 fprintf (file, ", is global");
273
274 if (TREE_THIS_VOLATILE (var))
275 fprintf (file, ", is volatile");
276
277 if (cfun && gimple_default_def (cfun, var))
278 {
279 fprintf (file, ", default def: ");
280 print_generic_expr (file, gimple_default_def (cfun, var), dump_flags);
281 }
282
283 if (DECL_INITIAL (var))
284 {
285 fprintf (file, ", initial: ");
286 print_generic_expr (file, DECL_INITIAL (var), dump_flags);
287 }
288
289 fprintf (file, "\n");
290 }
291
292
293 /* Dump variable VAR and its may-aliases to stderr. */
294
295 DEBUG_FUNCTION void
296 debug_variable (tree var)
297 {
298 dump_variable (stderr, var);
299 }
300
301
302 /* Dump various DFA statistics to FILE. */
303
304 void
305 dump_dfa_stats (FILE *file)
306 {
307 struct dfa_stats_d dfa_stats;
308
309 unsigned long size, total = 0;
310 const char * const fmt_str = "%-30s%-13s%12s\n";
311 const char * const fmt_str_1 = "%-30s%13lu%11lu%c\n";
312 const char * const fmt_str_3 = "%-43s%11lu%c\n";
313 const char *funcname
314 = lang_hooks.decl_printable_name (current_function_decl, 2);
315
316 collect_dfa_stats (&dfa_stats);
317
318 fprintf (file, "\nDFA Statistics for %s\n\n", funcname);
319
320 fprintf (file, "---------------------------------------------------------\n");
321 fprintf (file, fmt_str, "", " Number of ", "Memory");
322 fprintf (file, fmt_str, "", " instances ", "used ");
323 fprintf (file, "---------------------------------------------------------\n");
324
325 size = num_referenced_vars * sizeof (tree);
326 total += size;
327 fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
328 SCALE (size), LABEL (size));
329
330 size = dfa_stats.num_var_anns * sizeof (struct var_ann_d);
331 total += size;
332 fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns,
333 SCALE (size), LABEL (size));
334
335 size = dfa_stats.num_uses * sizeof (tree *);
336 total += size;
337 fprintf (file, fmt_str_1, "USE operands", dfa_stats.num_uses,
338 SCALE (size), LABEL (size));
339
340 size = dfa_stats.num_defs * sizeof (tree *);
341 total += size;
342 fprintf (file, fmt_str_1, "DEF operands", dfa_stats.num_defs,
343 SCALE (size), LABEL (size));
344
345 size = dfa_stats.num_vuses * sizeof (tree *);
346 total += size;
347 fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
348 SCALE (size), LABEL (size));
349
350 size = dfa_stats.num_vdefs * sizeof (tree *);
351 total += size;
352 fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
353 SCALE (size), LABEL (size));
354
355 size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
356 total += size;
357 fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
358 SCALE (size), LABEL (size));
359
360 size = dfa_stats.num_phi_args * sizeof (struct phi_arg_d);
361 total += size;
362 fprintf (file, fmt_str_1, "PHI arguments", dfa_stats.num_phi_args,
363 SCALE (size), LABEL (size));
364
365 fprintf (file, "---------------------------------------------------------\n");
366 fprintf (file, fmt_str_3, "Total memory used by DFA/SSA data", SCALE (total),
367 LABEL (total));
368 fprintf (file, "---------------------------------------------------------\n");
369 fprintf (file, "\n");
370
371 if (dfa_stats.num_phis)
372 fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
373 (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
374 (long) dfa_stats.max_num_phi_args);
375
376 fprintf (file, "\n");
377 }
378
379
380 /* Dump DFA statistics on stderr. */
381
382 DEBUG_FUNCTION void
383 debug_dfa_stats (void)
384 {
385 dump_dfa_stats (stderr);
386 }
387
388
389 /* Collect DFA statistics and store them in the structure pointed to by
390 DFA_STATS_P. */
391
392 static void
393 collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
394 {
395 basic_block bb;
396 referenced_var_iterator vi;
397 tree var;
398
399 gcc_assert (dfa_stats_p);
400
401 memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
402
403 /* Count all the variable annotations. */
404 FOR_EACH_REFERENCED_VAR (var, vi)
405 if (var_ann (var))
406 dfa_stats_p->num_var_anns++;
407
408 /* Walk all the statements in the function counting references. */
409 FOR_EACH_BB (bb)
410 {
411 gimple_stmt_iterator si;
412
413 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
414 {
415 gimple phi = gsi_stmt (si);
416 dfa_stats_p->num_phis++;
417 dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
418 if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
419 dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
420 }
421
422 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
423 {
424 gimple stmt = gsi_stmt (si);
425 dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
426 dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
427 dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
428 dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
429 }
430 }
431 }
432
433
434 /*---------------------------------------------------------------------------
435 Miscellaneous helpers
436 ---------------------------------------------------------------------------*/
437 /* Callback for walk_tree. Used to collect variables referenced in
438 the function. */
439
440 static tree
441 find_vars_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
442 {
443 /* If we are reading the lto info back in, we need to rescan the
444 referenced vars. */
445 if (TREE_CODE (*tp) == SSA_NAME)
446 add_referenced_var (SSA_NAME_VAR (*tp));
447
448 /* If T is a regular variable that the optimizers are interested
449 in, add it to the list of variables. */
450 else if (SSA_VAR_P (*tp))
451 add_referenced_var (*tp);
452
453 /* Type, _DECL and constant nodes have no interesting children.
454 Ignore them. */
455 else if (IS_TYPE_OR_DECL_P (*tp) || CONSTANT_CLASS_P (*tp))
456 *walk_subtrees = 0;
457
458 return NULL_TREE;
459 }
460
461 /* Find referenced variables in STMT. In contrast with
462 find_new_referenced_vars, this function will not mark newly found
463 variables for renaming. */
464
465 void
466 find_referenced_vars_in (gimple stmt)
467 {
468 size_t i;
469
470 if (gimple_code (stmt) != GIMPLE_PHI)
471 {
472 for (i = 0; i < gimple_num_ops (stmt); i++)
473 walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL);
474 }
475 else
476 {
477 walk_tree (gimple_phi_result_ptr (stmt), find_vars_r, NULL, NULL);
478
479 for (i = 0; i < gimple_phi_num_args (stmt); i++)
480 {
481 tree arg = gimple_phi_arg_def (stmt, i);
482 walk_tree (&arg, find_vars_r, NULL, NULL);
483 }
484 }
485 }
486
487
488 /* Lookup UID in the referenced_vars hashtable and return the associated
489 variable. */
490
491 tree
492 referenced_var_lookup (unsigned int uid)
493 {
494 tree h;
495 struct tree_decl_minimal in;
496 in.uid = uid;
497 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
498 return h;
499 }
500
501 /* Check if TO is in the referenced_vars hash table and insert it if not.
502 Return true if it required insertion. */
503
504 bool
505 referenced_var_check_and_insert (tree to)
506 {
507 tree h, *loc;
508 struct tree_decl_minimal in;
509 unsigned int uid = DECL_UID (to);
510
511 in.uid = uid;
512 h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
513 if (h)
514 {
515 /* DECL_UID has already been entered in the table. Verify that it is
516 the same entry as TO. See PR 27793. */
517 gcc_assert (h == to);
518 return false;
519 }
520
521 loc = (tree *) htab_find_slot_with_hash (gimple_referenced_vars (cfun),
522 &in, uid, INSERT);
523 *loc = to;
524 return true;
525 }
526
527 /* Lookup VAR UID in the default_defs hashtable and return the associated
528 variable. */
529
530 tree
531 gimple_default_def (struct function *fn, tree var)
532 {
533 struct tree_decl_minimal ind;
534 struct tree_ssa_name in;
535 gcc_assert (SSA_VAR_P (var));
536 in.var = (tree)&ind;
537 ind.uid = DECL_UID (var);
538 return (tree) htab_find_with_hash (DEFAULT_DEFS (fn), &in, DECL_UID (var));
539 }
540
541 /* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
542
543 void
544 set_default_def (tree var, tree def)
545 {
546 struct tree_decl_minimal ind;
547 struct tree_ssa_name in;
548 void **loc;
549
550 gcc_assert (SSA_VAR_P (var));
551 in.var = (tree)&ind;
552 ind.uid = DECL_UID (var);
553 if (!def)
554 {
555 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
556 DECL_UID (var), INSERT);
557 gcc_assert (*loc);
558 htab_remove_elt (DEFAULT_DEFS (cfun), *loc);
559 return;
560 }
561 gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
562 loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
563 DECL_UID (var), INSERT);
564
565 /* Default definition might be changed by tail call optimization. */
566 if (*loc)
567 SSA_NAME_IS_DEFAULT_DEF (*(tree *) loc) = false;
568 *(tree *) loc = def;
569
570 /* Mark DEF as the default definition for VAR. */
571 SSA_NAME_IS_DEFAULT_DEF (def) = true;
572 }
573
574 /* Add VAR to the list of referenced variables if it isn't already there. */
575
576 bool
577 add_referenced_var (tree var)
578 {
579 get_var_ann (var);
580 gcc_assert (DECL_P (var));
581
582 /* Insert VAR into the referenced_vars has table if it isn't present. */
583 if (referenced_var_check_and_insert (var))
584 {
585 /* Scan DECL_INITIAL for pointer variables as they may contain
586 address arithmetic referencing the address of other
587 variables. As we are only interested in directly referenced
588 globals or referenced locals restrict this to initializers
589 than can refer to local variables. */
590 if (DECL_INITIAL (var)
591 && DECL_CONTEXT (var) == current_function_decl)
592 walk_tree (&DECL_INITIAL (var), find_vars_r, NULL, 0);
593
594 return true;
595 }
596
597 return false;
598 }
599
600 /* Remove VAR from the list. */
601
602 void
603 remove_referenced_var (tree var)
604 {
605 var_ann_t v_ann;
606 struct tree_decl_minimal in;
607 void **loc;
608 unsigned int uid = DECL_UID (var);
609
610 /* Preserve var_anns of globals. */
611 if (!is_global_var (var)
612 && (v_ann = var_ann (var)))
613 {
614 ggc_free (v_ann);
615 *DECL_VAR_ANN_PTR (var) = NULL;
616 }
617 gcc_assert (DECL_P (var));
618 in.uid = uid;
619 loc = htab_find_slot_with_hash (gimple_referenced_vars (cfun), &in, uid,
620 NO_INSERT);
621 htab_clear_slot (gimple_referenced_vars (cfun), loc);
622 }
623
624
625 /* Return the virtual variable associated to the non-scalar variable VAR. */
626
627 tree
628 get_virtual_var (tree var)
629 {
630 STRIP_NOPS (var);
631
632 if (TREE_CODE (var) == SSA_NAME)
633 var = SSA_NAME_VAR (var);
634
635 while (TREE_CODE (var) == REALPART_EXPR || TREE_CODE (var) == IMAGPART_EXPR
636 || handled_component_p (var))
637 var = TREE_OPERAND (var, 0);
638
639 /* Treating GIMPLE registers as virtual variables makes no sense.
640 Also complain if we couldn't extract a _DECL out of the original
641 expression. */
642 gcc_assert (SSA_VAR_P (var));
643 gcc_assert (!is_gimple_reg (var));
644
645 return var;
646 }
647
648 /* Mark all the naked symbols in STMT for SSA renaming. */
649
650 void
651 mark_symbols_for_renaming (gimple stmt)
652 {
653 tree op;
654 ssa_op_iter iter;
655
656 update_stmt (stmt);
657
658 /* Mark all the operands for renaming. */
659 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
660 if (DECL_P (op))
661 mark_sym_for_renaming (op);
662 }
663
664
665 /* Find all variables within the gimplified statement that were not
666 previously visible to the function and add them to the referenced
667 variables list. */
668
669 static tree
670 find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
671 void *data ATTRIBUTE_UNUSED)
672 {
673 tree t = *tp;
674
675 if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
676 {
677 add_referenced_var (t);
678 mark_sym_for_renaming (t);
679 }
680
681 if (IS_TYPE_OR_DECL_P (t))
682 *walk_subtrees = 0;
683
684 return NULL;
685 }
686
687
688 /* Find any new referenced variables in STMT. */
689
690 void
691 find_new_referenced_vars (gimple stmt)
692 {
693 walk_gimple_op (stmt, find_new_referenced_vars_1, NULL);
694 }
695
696
697 /* If EXP is a handled component reference for a structure, return the
698 base variable. The access range is delimited by bit positions *POFFSET and
699 *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
700 *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
701 and *PMAX_SIZE are equal, the access is non-variable. */
702
703 tree
704 get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
705 HOST_WIDE_INT *psize,
706 HOST_WIDE_INT *pmax_size)
707 {
708 HOST_WIDE_INT bitsize = -1;
709 HOST_WIDE_INT maxsize = -1;
710 tree size_tree = NULL_TREE;
711 HOST_WIDE_INT bit_offset = 0;
712 bool seen_variable_array_ref = false;
713
714 /* First get the final access size from just the outermost expression. */
715 if (TREE_CODE (exp) == COMPONENT_REF)
716 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
717 else if (TREE_CODE (exp) == BIT_FIELD_REF)
718 size_tree = TREE_OPERAND (exp, 1);
719 else if (!VOID_TYPE_P (TREE_TYPE (exp)))
720 {
721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
722 if (mode == BLKmode)
723 size_tree = TYPE_SIZE (TREE_TYPE (exp));
724 else
725 bitsize = GET_MODE_BITSIZE (mode);
726 }
727 if (size_tree != NULL_TREE)
728 {
729 if (! host_integerp (size_tree, 1))
730 bitsize = -1;
731 else
732 bitsize = TREE_INT_CST_LOW (size_tree);
733 }
734
735 /* Initially, maxsize is the same as the accessed element size.
736 In the following it will only grow (or become -1). */
737 maxsize = bitsize;
738
739 /* Compute cumulative bit-offset for nested component-refs and array-refs,
740 and find the ultimate containing object. */
741 while (1)
742 {
743 switch (TREE_CODE (exp))
744 {
745 case BIT_FIELD_REF:
746 bit_offset += TREE_INT_CST_LOW (TREE_OPERAND (exp, 2));
747 break;
748
749 case COMPONENT_REF:
750 {
751 tree field = TREE_OPERAND (exp, 1);
752 tree this_offset = component_ref_field_offset (exp);
753
754 if (this_offset
755 && TREE_CODE (this_offset) == INTEGER_CST
756 && host_integerp (this_offset, 0))
757 {
758 HOST_WIDE_INT hthis_offset = TREE_INT_CST_LOW (this_offset);
759 hthis_offset *= BITS_PER_UNIT;
760 hthis_offset
761 += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
762 bit_offset += hthis_offset;
763
764 /* If we had seen a variable array ref already and we just
765 referenced the last field of a struct or a union member
766 then we have to adjust maxsize by the padding at the end
767 of our field. */
768 if (seen_variable_array_ref
769 && maxsize != -1)
770 {
771 tree stype = TREE_TYPE (TREE_OPERAND (exp, 0));
772 tree next = DECL_CHAIN (field);
773 while (next && TREE_CODE (next) != FIELD_DECL)
774 next = DECL_CHAIN (next);
775 if (!next
776 || TREE_CODE (stype) != RECORD_TYPE)
777 {
778 tree fsize = DECL_SIZE_UNIT (field);
779 tree ssize = TYPE_SIZE_UNIT (stype);
780 if (host_integerp (fsize, 0)
781 && host_integerp (ssize, 0))
782 maxsize += ((TREE_INT_CST_LOW (ssize)
783 - TREE_INT_CST_LOW (fsize))
784 * BITS_PER_UNIT - hthis_offset);
785 else
786 maxsize = -1;
787 }
788 }
789 }
790 else
791 {
792 tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
793 /* We need to adjust maxsize to the whole structure bitsize.
794 But we can subtract any constant offset seen so far,
795 because that would get us out of the structure otherwise. */
796 if (maxsize != -1 && csize && host_integerp (csize, 1))
797 maxsize = TREE_INT_CST_LOW (csize) - bit_offset;
798 else
799 maxsize = -1;
800 }
801 }
802 break;
803
804 case ARRAY_REF:
805 case ARRAY_RANGE_REF:
806 {
807 tree index = TREE_OPERAND (exp, 1);
808 tree low_bound, unit_size;
809
810 /* If the resulting bit-offset is constant, track it. */
811 if (TREE_CODE (index) == INTEGER_CST
812 && host_integerp (index, 0)
813 && (low_bound = array_ref_low_bound (exp),
814 host_integerp (low_bound, 0))
815 && (unit_size = array_ref_element_size (exp),
816 host_integerp (unit_size, 1)))
817 {
818 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
819
820 hindex -= TREE_INT_CST_LOW (low_bound);
821 hindex *= TREE_INT_CST_LOW (unit_size);
822 hindex *= BITS_PER_UNIT;
823 bit_offset += hindex;
824
825 /* An array ref with a constant index up in the structure
826 hierarchy will constrain the size of any variable array ref
827 lower in the access hierarchy. */
828 seen_variable_array_ref = false;
829 }
830 else
831 {
832 tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
833 /* We need to adjust maxsize to the whole array bitsize.
834 But we can subtract any constant offset seen so far,
835 because that would get us outside of the array otherwise. */
836 if (maxsize != -1 && asize && host_integerp (asize, 1))
837 maxsize = TREE_INT_CST_LOW (asize) - bit_offset;
838 else
839 maxsize = -1;
840
841 /* Remember that we have seen an array ref with a variable
842 index. */
843 seen_variable_array_ref = true;
844 }
845 }
846 break;
847
848 case REALPART_EXPR:
849 break;
850
851 case IMAGPART_EXPR:
852 bit_offset += bitsize;
853 break;
854
855 case VIEW_CONVERT_EXPR:
856 break;
857
858 case MEM_REF:
859 /* Hand back the decl for MEM[&decl, off]. */
860 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
861 {
862 if (integer_zerop (TREE_OPERAND (exp, 1)))
863 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
864 else
865 {
866 double_int off = mem_ref_offset (exp);
867 off = double_int_lshift (off,
868 BITS_PER_UNIT == 8
869 ? 3 : exact_log2 (BITS_PER_UNIT),
870 HOST_BITS_PER_DOUBLE_INT, true);
871 off = double_int_add (off, shwi_to_double_int (bit_offset));
872 if (double_int_fits_in_shwi_p (off))
873 {
874 bit_offset = double_int_to_shwi (off);
875 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
876 }
877 }
878 }
879 goto done;
880
881 case TARGET_MEM_REF:
882 /* Hand back the decl for MEM[&decl, off]. */
883 if (TMR_SYMBOL (exp))
884 {
885 /* Via the variable index or base we can reach the
886 whole object. */
887 if (TMR_INDEX (exp) || TMR_BASE (exp))
888 {
889 exp = TREE_OPERAND (TMR_SYMBOL (exp), 0);
890 bit_offset = 0;
891 maxsize = -1;
892 goto done;
893 }
894 if (integer_zerop (TMR_OFFSET (exp)))
895 exp = TREE_OPERAND (TMR_SYMBOL (exp), 0);
896 else
897 {
898 double_int off = mem_ref_offset (exp);
899 off = double_int_lshift (off,
900 BITS_PER_UNIT == 8
901 ? 3 : exact_log2 (BITS_PER_UNIT),
902 HOST_BITS_PER_DOUBLE_INT, true);
903 off = double_int_add (off, shwi_to_double_int (bit_offset));
904 if (double_int_fits_in_shwi_p (off))
905 {
906 bit_offset = double_int_to_shwi (off);
907 exp = TREE_OPERAND (TMR_SYMBOL (exp), 0);
908 }
909 }
910 }
911 goto done;
912
913 default:
914 goto done;
915 }
916
917 exp = TREE_OPERAND (exp, 0);
918 }
919 done:
920
921 /* We need to deal with variable arrays ending structures such as
922 struct { int length; int a[1]; } x; x.a[d]
923 struct { struct { int a; int b; } a[1]; } x; x.a[d].a
924 struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
925 struct { int len; union { int a[1]; struct X x; } u; } x; x.u.a[d]
926 where we do not know maxsize for variable index accesses to
927 the array. The simplest way to conservatively deal with this
928 is to punt in the case that offset + maxsize reaches the
929 base type boundary. This needs to include possible trailing padding
930 that is there for alignment purposes.
931
932 That is of course only true if the base object is not a decl. */
933
934 if (DECL_P (exp))
935 {
936 /* If maxsize is unknown adjust it according to the size of the
937 base decl. */
938 if (maxsize == -1
939 && host_integerp (DECL_SIZE (exp), 1))
940 maxsize = TREE_INT_CST_LOW (DECL_SIZE (exp)) - bit_offset;
941 }
942 else if (seen_variable_array_ref
943 && maxsize != -1
944 && (!host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
945 || (bit_offset + maxsize
946 == (signed) TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))))
947 maxsize = -1;
948
949 /* ??? Due to negative offsets in ARRAY_REF we can end up with
950 negative bit_offset here. We might want to store a zero offset
951 in this case. */
952 *poffset = bit_offset;
953 *psize = bitsize;
954 *pmax_size = maxsize;
955
956 return exp;
957 }
958
959 /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
960 denotes the starting address of the memory access EXP.
961 Returns NULL_TREE if the offset is not constant or any component
962 is not BITS_PER_UNIT-aligned. */
963
964 tree
965 get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
966 {
967 HOST_WIDE_INT byte_offset = 0;
968
969 /* Compute cumulative byte-offset for nested component-refs and array-refs,
970 and find the ultimate containing object. */
971 while (1)
972 {
973 switch (TREE_CODE (exp))
974 {
975 case BIT_FIELD_REF:
976 return NULL_TREE;
977
978 case COMPONENT_REF:
979 {
980 tree field = TREE_OPERAND (exp, 1);
981 tree this_offset = component_ref_field_offset (exp);
982 HOST_WIDE_INT hthis_offset;
983
984 if (!this_offset
985 || TREE_CODE (this_offset) != INTEGER_CST
986 || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
987 % BITS_PER_UNIT))
988 return NULL_TREE;
989
990 hthis_offset = TREE_INT_CST_LOW (this_offset);
991 hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
992 / BITS_PER_UNIT);
993 byte_offset += hthis_offset;
994 }
995 break;
996
997 case ARRAY_REF:
998 case ARRAY_RANGE_REF:
999 {
1000 tree index = TREE_OPERAND (exp, 1);
1001 tree low_bound, unit_size;
1002
1003 /* If the resulting bit-offset is constant, track it. */
1004 if (TREE_CODE (index) == INTEGER_CST
1005 && (low_bound = array_ref_low_bound (exp),
1006 TREE_CODE (low_bound) == INTEGER_CST)
1007 && (unit_size = array_ref_element_size (exp),
1008 TREE_CODE (unit_size) == INTEGER_CST))
1009 {
1010 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
1011
1012 hindex -= TREE_INT_CST_LOW (low_bound);
1013 hindex *= TREE_INT_CST_LOW (unit_size);
1014 byte_offset += hindex;
1015 }
1016 else
1017 return NULL_TREE;
1018 }
1019 break;
1020
1021 case REALPART_EXPR:
1022 break;
1023
1024 case IMAGPART_EXPR:
1025 byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
1026 break;
1027
1028 case VIEW_CONVERT_EXPR:
1029 break;
1030
1031 case MEM_REF:
1032 /* Hand back the decl for MEM[&decl, off]. */
1033 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
1034 {
1035 if (!integer_zerop (TREE_OPERAND (exp, 1)))
1036 {
1037 double_int off = mem_ref_offset (exp);
1038 gcc_assert (off.high == -1 || off.high == 0);
1039 byte_offset += double_int_to_shwi (off);
1040 }
1041 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1042 }
1043 goto done;
1044
1045 case TARGET_MEM_REF:
1046 /* Hand back the decl for MEM[&decl, off]. */
1047 if (TMR_SYMBOL (exp))
1048 {
1049 if (TMR_INDEX (exp) || TMR_BASE (exp))
1050 return NULL_TREE;
1051 if (!integer_zerop (TMR_OFFSET (exp)))
1052 {
1053 double_int off = mem_ref_offset (exp);
1054 gcc_assert (off.high == -1 || off.high == 0);
1055 byte_offset += double_int_to_shwi (off);
1056 }
1057 exp = TREE_OPERAND (TMR_SYMBOL (exp), 0);
1058 }
1059 goto done;
1060
1061 default:
1062 goto done;
1063 }
1064
1065 exp = TREE_OPERAND (exp, 0);
1066 }
1067 done:
1068
1069 *poffset = byte_offset;
1070 return exp;
1071 }
1072
1073 /* Returns true if STMT references an SSA_NAME that has
1074 SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
1075
1076 bool
1077 stmt_references_abnormal_ssa_name (gimple stmt)
1078 {
1079 ssa_op_iter oi;
1080 use_operand_p use_p;
1081
1082 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
1083 {
1084 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
1085 return true;
1086 }
1087
1088 return false;
1089 }
1090