backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008 Free Software
3 Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "ggc.h"
30 #include "langhooks.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "bitmap.h"
38 #include "pointer-set.h"
39 #include "tree-flow.h"
40 #include "gimple.h"
41 #include "tree-inline.h"
42 #include "varray.h"
43 #include "timevar.h"
44 #include "hashtab.h"
45 #include "tree-dump.h"
46 #include "tree-pass.h"
47 #include "toplev.h"
48
49 /* Pointer map of variable mappings, keyed by edge. */
50 static struct pointer_map_t *edge_var_maps;
51
52
53 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
54
55 void
56 redirect_edge_var_map_add (edge e, tree result, tree def)
57 {
58 void **slot;
59 edge_var_map_vector old_head, head;
60 edge_var_map new_node;
61
62 if (edge_var_maps == NULL)
63 edge_var_maps = pointer_map_create ();
64
65 slot = pointer_map_insert (edge_var_maps, e);
66 old_head = head = (edge_var_map_vector) *slot;
67 if (!head)
68 {
69 head = VEC_alloc (edge_var_map, heap, 5);
70 *slot = head;
71 }
72 new_node.def = def;
73 new_node.result = result;
74
75 VEC_safe_push (edge_var_map, heap, head, &new_node);
76 if (old_head != head)
77 {
78 /* The push did some reallocation. Update the pointer map. */
79 *slot = head;
80 }
81 }
82
83
84 /* Clear the var mappings in edge E. */
85
86 void
87 redirect_edge_var_map_clear (edge e)
88 {
89 void **slot;
90 edge_var_map_vector head;
91
92 if (!edge_var_maps)
93 return;
94
95 slot = pointer_map_contains (edge_var_maps, e);
96
97 if (slot)
98 {
99 head = (edge_var_map_vector) *slot;
100 VEC_free (edge_var_map, heap, head);
101 *slot = NULL;
102 }
103 }
104
105
106 /* Duplicate the redirected var mappings in OLDE in NEWE.
107
108 Since we can't remove a mapping, let's just duplicate it. This assumes a
109 pointer_map can have multiple edges mapping to the same var_map (many to
110 one mapping), since we don't remove the previous mappings. */
111
112 void
113 redirect_edge_var_map_dup (edge newe, edge olde)
114 {
115 void **new_slot, **old_slot; edge_var_map_vector head;
116
117 if (!edge_var_maps)
118 return;
119
120 new_slot = pointer_map_insert (edge_var_maps, newe);
121 old_slot = pointer_map_contains (edge_var_maps, olde);
122 if (!old_slot)
123 return;
124 head = (edge_var_map_vector) *old_slot;
125
126 if (head)
127 *new_slot = VEC_copy (edge_var_map, heap, head);
128 else
129 *new_slot = VEC_alloc (edge_var_map, heap, 5);
130 }
131
132
133 /* Return the variable mappings for a given edge. If there is none, return
134 NULL. */
135
136 edge_var_map_vector
137 redirect_edge_var_map_vector (edge e)
138 {
139 void **slot;
140
141 /* Hey, what kind of idiot would... you'd be surprised. */
142 if (!edge_var_maps)
143 return NULL;
144
145 slot = pointer_map_contains (edge_var_maps, e);
146 if (!slot)
147 return NULL;
148
149 return (edge_var_map_vector) *slot;
150 }
151
152
153 /* Clear the edge variable mappings. */
154
155 void
156 redirect_edge_var_map_destroy (void)
157 {
158 if (edge_var_maps)
159 {
160 pointer_map_destroy (edge_var_maps);
161 edge_var_maps = NULL;
162 }
163 }
164
165
166 /* Remove the corresponding arguments from the PHI nodes in E's
167 destination block and redirect it to DEST. Return redirected edge.
168 The list of removed arguments is stored in a vector accessed
169 through edge_var_maps. */
170
171 edge
172 ssa_redirect_edge (edge e, basic_block dest)
173 {
174 gimple_stmt_iterator gsi;
175 gimple phi;
176
177 redirect_edge_var_map_clear (e);
178
179 /* Remove the appropriate PHI arguments in E's destination block. */
180 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
181 {
182 tree def;
183
184 phi = gsi_stmt (gsi);
185 def = gimple_phi_arg_def (phi, e->dest_idx);
186
187 if (def == NULL_TREE)
188 continue;
189
190 redirect_edge_var_map_add (e, gimple_phi_result (phi), def);
191 }
192
193 e = redirect_edge_succ_nodup (e, dest);
194
195 return e;
196 }
197
198
199 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
200 E->dest. */
201
202 void
203 flush_pending_stmts (edge e)
204 {
205 gimple phi;
206 edge_var_map_vector v;
207 edge_var_map *vm;
208 int i;
209 gimple_stmt_iterator gsi;
210
211 v = redirect_edge_var_map_vector (e);
212 if (!v)
213 return;
214
215 for (gsi = gsi_start_phis (e->dest), i = 0;
216 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
217 gsi_next (&gsi), i++)
218 {
219 tree def;
220
221 phi = gsi_stmt (gsi);
222 def = redirect_edge_var_map_def (vm);
223 add_phi_arg (phi, def, e);
224 }
225
226 redirect_edge_var_map_clear (e);
227 }
228
229 /* Return true if SSA_NAME is malformed and mark it visited.
230
231 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
232 operand. */
233
234 static bool
235 verify_ssa_name (tree ssa_name, bool is_virtual)
236 {
237 if (TREE_CODE (ssa_name) != SSA_NAME)
238 {
239 error ("expected an SSA_NAME object");
240 return true;
241 }
242
243 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
244 {
245 error ("type mismatch between an SSA_NAME and its symbol");
246 return true;
247 }
248
249 if (SSA_NAME_IN_FREE_LIST (ssa_name))
250 {
251 error ("found an SSA_NAME that had been released into the free pool");
252 return true;
253 }
254
255 if (is_virtual && is_gimple_reg (ssa_name))
256 {
257 error ("found a virtual definition for a GIMPLE register");
258 return true;
259 }
260
261 if (!is_virtual && !is_gimple_reg (ssa_name))
262 {
263 error ("found a real definition for a non-register");
264 return true;
265 }
266
267 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
268 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
269 {
270 error ("found a default name with a non-empty defining statement");
271 return true;
272 }
273
274 return false;
275 }
276
277
278 /* Return true if the definition of SSA_NAME at block BB is malformed.
279
280 STMT is the statement where SSA_NAME is created.
281
282 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
283 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
284 it means that the block in that array slot contains the
285 definition of SSA_NAME.
286
287 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
288
289 static bool
290 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
291 gimple stmt, bool is_virtual)
292 {
293 if (verify_ssa_name (ssa_name, is_virtual))
294 goto err;
295
296 if (definition_block[SSA_NAME_VERSION (ssa_name)])
297 {
298 error ("SSA_NAME created in two different blocks %i and %i",
299 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
300 goto err;
301 }
302
303 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
304
305 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
306 {
307 error ("SSA_NAME_DEF_STMT is wrong");
308 fprintf (stderr, "Expected definition statement:\n");
309 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
310 fprintf (stderr, "\nActual definition statement:\n");
311 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
312 goto err;
313 }
314
315 return false;
316
317 err:
318 fprintf (stderr, "while verifying SSA_NAME ");
319 print_generic_expr (stderr, ssa_name, 0);
320 fprintf (stderr, " in statement\n");
321 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
322
323 return true;
324 }
325
326
327 /* Return true if the use of SSA_NAME at statement STMT in block BB is
328 malformed.
329
330 DEF_BB is the block where SSA_NAME was found to be created.
331
332 IDOM contains immediate dominator information for the flowgraph.
333
334 CHECK_ABNORMAL is true if the caller wants to check whether this use
335 is flowing through an abnormal edge (only used when checking PHI
336 arguments).
337
338 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
339 that are defined before STMT in basic block BB. */
340
341 static bool
342 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
343 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
344 {
345 bool err = false;
346 tree ssa_name = USE_FROM_PTR (use_p);
347
348 if (!TREE_VISITED (ssa_name))
349 if (verify_imm_links (stderr, ssa_name))
350 err = true;
351
352 TREE_VISITED (ssa_name) = 1;
353
354 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
355 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
356 ; /* Default definitions have empty statements. Nothing to do. */
357 else if (!def_bb)
358 {
359 error ("missing definition");
360 err = true;
361 }
362 else if (bb != def_bb
363 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
364 {
365 error ("definition in block %i does not dominate use in block %i",
366 def_bb->index, bb->index);
367 err = true;
368 }
369 else if (bb == def_bb
370 && names_defined_in_bb != NULL
371 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
372 {
373 error ("definition in block %i follows the use", def_bb->index);
374 err = true;
375 }
376
377 if (check_abnormal
378 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
379 {
380 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
381 err = true;
382 }
383
384 /* Make sure the use is in an appropriate list by checking the previous
385 element to make sure it's the same. */
386 if (use_p->prev == NULL)
387 {
388 error ("no immediate_use list");
389 err = true;
390 }
391 else
392 {
393 tree listvar;
394 if (use_p->prev->use == NULL)
395 listvar = use_p->prev->loc.ssa_name;
396 else
397 listvar = USE_FROM_PTR (use_p->prev);
398 if (listvar != ssa_name)
399 {
400 error ("wrong immediate use list");
401 err = true;
402 }
403 }
404
405 if (err)
406 {
407 fprintf (stderr, "for SSA_NAME: ");
408 print_generic_expr (stderr, ssa_name, TDF_VOPS);
409 fprintf (stderr, " in statement:\n");
410 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
411 }
412
413 return err;
414 }
415
416
417 /* Return true if any of the arguments for PHI node PHI at block BB is
418 malformed.
419
420 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
421 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
422 it means that the block in that array slot contains the
423 definition of SSA_NAME. */
424
425 static bool
426 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
427 {
428 edge e;
429 bool err = false;
430 size_t i, phi_num_args = gimple_phi_num_args (phi);
431
432 if (EDGE_COUNT (bb->preds) != phi_num_args)
433 {
434 error ("incoming edge count does not match number of PHI arguments");
435 err = true;
436 goto error;
437 }
438
439 for (i = 0; i < phi_num_args; i++)
440 {
441 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
442 tree op = USE_FROM_PTR (op_p);
443
444 e = EDGE_PRED (bb, i);
445
446 if (op == NULL_TREE)
447 {
448 error ("PHI argument is missing for edge %d->%d",
449 e->src->index,
450 e->dest->index);
451 err = true;
452 goto error;
453 }
454
455 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
456 {
457 error ("PHI argument is not SSA_NAME, or invariant");
458 err = true;
459 }
460
461 if (TREE_CODE (op) == SSA_NAME)
462 {
463 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
464 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
465 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
466 }
467
468 if (e->dest != bb)
469 {
470 error ("wrong edge %d->%d for PHI argument",
471 e->src->index, e->dest->index);
472 err = true;
473 }
474
475 if (err)
476 {
477 fprintf (stderr, "PHI argument\n");
478 print_generic_stmt (stderr, op, TDF_VOPS);
479 goto error;
480 }
481 }
482
483 error:
484 if (err)
485 {
486 fprintf (stderr, "for PHI node\n");
487 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
488 }
489
490
491 return err;
492 }
493
494
495 static void
496 verify_flow_insensitive_alias_info (void)
497 {
498 tree var;
499 referenced_var_iterator rvi;
500
501 FOR_EACH_REFERENCED_VAR (var, rvi)
502 {
503 unsigned int j;
504 bitmap aliases;
505 tree alias;
506 bitmap_iterator bi;
507
508 if (!MTAG_P (var) || !MTAG_ALIASES (var))
509 continue;
510
511 aliases = MTAG_ALIASES (var);
512
513 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
514 {
515 alias = referenced_var (j);
516
517 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
518 && !may_be_aliased (alias))
519 {
520 error ("non-addressable variable inside an alias set");
521 debug_variable (alias);
522 goto err;
523 }
524 }
525 }
526
527 return;
528
529 err:
530 debug_variable (var);
531 internal_error ("verify_flow_insensitive_alias_info failed");
532 }
533
534
535 static void
536 verify_flow_sensitive_alias_info (void)
537 {
538 size_t i;
539 tree ptr;
540
541 for (i = 1; i < num_ssa_names; i++)
542 {
543 tree var;
544 var_ann_t ann;
545 struct ptr_info_def *pi;
546
547
548 ptr = ssa_name (i);
549 if (!ptr)
550 continue;
551
552 /* We only care for pointers that are actually referenced in the
553 program. */
554 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
555 continue;
556
557 /* RESULT_DECL is special. If it's a GIMPLE register, then it
558 is only written-to only once in the return statement.
559 Otherwise, aggregate RESULT_DECLs may be written-to more than
560 once in virtual operands. */
561 var = SSA_NAME_VAR (ptr);
562 if (TREE_CODE (var) == RESULT_DECL
563 && is_gimple_reg (ptr))
564 continue;
565
566 pi = SSA_NAME_PTR_INFO (ptr);
567 if (pi == NULL)
568 continue;
569
570 ann = var_ann (var);
571 if (pi->memory_tag_needed && !pi->name_mem_tag && !ann->symbol_mem_tag)
572 {
573 error ("dereferenced pointers should have a name or a symbol tag");
574 goto err;
575 }
576
577 if (pi->name_mem_tag
578 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
579 {
580 error ("pointers with a memory tag, should have points-to sets");
581 goto err;
582 }
583
584 if (pi->value_escapes_p
585 && pi->escape_mask & ~ESCAPE_TO_RETURN
586 && pi->name_mem_tag)
587 {
588 tree t = memory_partition (pi->name_mem_tag);
589 if (t == NULL_TREE)
590 t = pi->name_mem_tag;
591
592 if (!is_call_clobbered (t))
593 {
594 error ("pointer escapes but its name tag is not call-clobbered");
595 goto err;
596 }
597 }
598 }
599
600 return;
601
602 err:
603 debug_variable (ptr);
604 internal_error ("verify_flow_sensitive_alias_info failed");
605 }
606
607
608 /* Verify the consistency of call clobbering information. */
609
610 static void
611 verify_call_clobbering (void)
612 {
613 unsigned int i;
614 bitmap_iterator bi;
615 tree var;
616 referenced_var_iterator rvi;
617
618 /* At all times, the result of the call_clobbered flag should
619 match the result of the call_clobbered_vars bitmap. Verify both
620 that everything in call_clobbered_vars is marked
621 call_clobbered, and that everything marked
622 call_clobbered is in call_clobbered_vars. */
623 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
624 {
625 var = referenced_var (i);
626
627 if (memory_partition (var))
628 var = memory_partition (var);
629
630 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
631 {
632 error ("variable in call_clobbered_vars but not marked "
633 "call_clobbered");
634 debug_variable (var);
635 goto err;
636 }
637 }
638
639 FOR_EACH_REFERENCED_VAR (var, rvi)
640 {
641 if (is_gimple_reg (var))
642 continue;
643
644 if (memory_partition (var))
645 var = memory_partition (var);
646
647 if (!MTAG_P (var)
648 && var_ann (var)->call_clobbered
649 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
650 {
651 error ("variable marked call_clobbered but not in "
652 "call_clobbered_vars bitmap.");
653 debug_variable (var);
654 goto err;
655 }
656 }
657
658 return;
659
660 err:
661 internal_error ("verify_call_clobbering failed");
662 }
663
664
665 /* Verify invariants in memory partitions. */
666
667 static void
668 verify_memory_partitions (void)
669 {
670 unsigned i;
671 tree mpt;
672 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
673 struct pointer_set_t *partitioned_syms = pointer_set_create ();
674
675 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
676 {
677 unsigned j;
678 bitmap_iterator bj;
679
680 if (MPT_SYMBOLS (mpt) == NULL)
681 {
682 error ("Memory partitions should have at least one symbol");
683 debug_variable (mpt);
684 goto err;
685 }
686
687 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
688 {
689 tree var = referenced_var (j);
690 if (pointer_set_insert (partitioned_syms, var))
691 {
692 error ("Partitioned symbols should belong to exactly one "
693 "partition");
694 debug_variable (var);
695 goto err;
696 }
697 }
698 }
699
700 pointer_set_destroy (partitioned_syms);
701
702 return;
703
704 err:
705 internal_error ("verify_memory_partitions failed");
706 }
707
708
709 /* Verify the consistency of aliasing information. */
710
711 static void
712 verify_alias_info (void)
713 {
714 verify_flow_sensitive_alias_info ();
715 verify_call_clobbering ();
716 verify_flow_insensitive_alias_info ();
717 verify_memory_partitions ();
718 }
719
720
721 /* Verify common invariants in the SSA web.
722 TODO: verify the variable annotations. */
723
724 void
725 verify_ssa (bool check_modified_stmt)
726 {
727 size_t i;
728 basic_block bb;
729 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
730 ssa_op_iter iter;
731 tree op;
732 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
733 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
734
735 gcc_assert (!need_ssa_update_p ());
736
737 verify_stmts ();
738
739 timevar_push (TV_TREE_SSA_VERIFY);
740
741 /* Keep track of SSA names present in the IL. */
742 for (i = 1; i < num_ssa_names; i++)
743 {
744 tree name = ssa_name (i);
745 if (name)
746 {
747 gimple stmt;
748 TREE_VISITED (name) = 0;
749
750 stmt = SSA_NAME_DEF_STMT (name);
751 if (!gimple_nop_p (stmt))
752 {
753 basic_block bb = gimple_bb (stmt);
754 verify_def (bb, definition_block,
755 name, stmt, !is_gimple_reg (name));
756
757 }
758 }
759 }
760
761 calculate_dominance_info (CDI_DOMINATORS);
762
763 /* Now verify all the uses and make sure they agree with the definitions
764 found in the previous pass. */
765 FOR_EACH_BB (bb)
766 {
767 edge e;
768 gimple phi;
769 edge_iterator ei;
770 gimple_stmt_iterator gsi;
771
772 /* Make sure that all edges have a clear 'aux' field. */
773 FOR_EACH_EDGE (e, ei, bb->preds)
774 {
775 if (e->aux)
776 {
777 error ("AUX pointer initialized for edge %d->%d", e->src->index,
778 e->dest->index);
779 goto err;
780 }
781 }
782
783 /* Verify the arguments for every PHI node in the block. */
784 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
785 {
786 phi = gsi_stmt (gsi);
787 if (verify_phi_args (phi, bb, definition_block))
788 goto err;
789
790 bitmap_set_bit (names_defined_in_bb,
791 SSA_NAME_VERSION (gimple_phi_result (phi)));
792 }
793
794 /* Now verify all the uses and vuses in every statement of the block. */
795 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
796 {
797 gimple stmt = gsi_stmt (gsi);
798 use_operand_p use_p;
799
800 if (check_modified_stmt && gimple_modified_p (stmt))
801 {
802 error ("stmt (%p) marked modified after optimization pass: ",
803 (void *)stmt);
804 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
805 goto err;
806 }
807
808 if (is_gimple_assign (stmt)
809 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
810 {
811 tree lhs, base_address;
812
813 lhs = gimple_assign_lhs (stmt);
814 base_address = get_base_address (lhs);
815
816 if (base_address
817 && gimple_aliases_computed_p (cfun)
818 && SSA_VAR_P (base_address)
819 && !gimple_has_volatile_ops (stmt)
820 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
821 {
822 error ("statement makes a memory store, but has no VDEFS");
823 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
824 goto err;
825 }
826 }
827
828 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
829 {
830 if (verify_ssa_name (op, true))
831 {
832 error ("in statement");
833 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
834 goto err;
835 }
836 }
837
838 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
839 {
840 if (verify_ssa_name (op, false))
841 {
842 error ("in statement");
843 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
844 goto err;
845 }
846 }
847
848 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
849 {
850 op = USE_FROM_PTR (use_p);
851 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
852 use_p, stmt, false, names_defined_in_bb))
853 goto err;
854 }
855
856 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
857 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
858 }
859
860 bitmap_clear (names_defined_in_bb);
861 }
862
863 /* Finally, verify alias information. */
864 if (gimple_aliases_computed_p (cfun))
865 verify_alias_info ();
866
867 free (definition_block);
868
869 /* Restore the dominance information to its prior known state, so
870 that we do not perturb the compiler's subsequent behavior. */
871 if (orig_dom_state == DOM_NONE)
872 free_dominance_info (CDI_DOMINATORS);
873 else
874 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
875
876 BITMAP_FREE (names_defined_in_bb);
877 timevar_pop (TV_TREE_SSA_VERIFY);
878 return;
879
880 err:
881 internal_error ("verify_ssa failed");
882 }
883
884 /* Return true if the uid in both int tree maps are equal. */
885
886 int
887 int_tree_map_eq (const void *va, const void *vb)
888 {
889 const struct int_tree_map *a = (const struct int_tree_map *) va;
890 const struct int_tree_map *b = (const struct int_tree_map *) vb;
891 return (a->uid == b->uid);
892 }
893
894 /* Hash a UID in a int_tree_map. */
895
896 unsigned int
897 int_tree_map_hash (const void *item)
898 {
899 return ((const struct int_tree_map *)item)->uid;
900 }
901
902 /* Return true if the DECL_UID in both trees are equal. */
903
904 int
905 uid_decl_map_eq (const void *va, const void *vb)
906 {
907 const_tree a = (const_tree) va;
908 const_tree b = (const_tree) vb;
909 return (a->decl_minimal.uid == b->decl_minimal.uid);
910 }
911
912 /* Hash a tree in a uid_decl_map. */
913
914 unsigned int
915 uid_decl_map_hash (const void *item)
916 {
917 return ((const_tree)item)->decl_minimal.uid;
918 }
919
920 /* Return true if the DECL_UID in both trees are equal. */
921
922 static int
923 uid_ssaname_map_eq (const void *va, const void *vb)
924 {
925 const_tree a = (const_tree) va;
926 const_tree b = (const_tree) vb;
927 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
928 }
929
930 /* Hash a tree in a uid_decl_map. */
931
932 static unsigned int
933 uid_ssaname_map_hash (const void *item)
934 {
935 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
936 }
937
938
939 /* Initialize global DFA and SSA structures. */
940
941 void
942 init_tree_ssa (struct function *fn)
943 {
944 fn->gimple_df = GGC_CNEW (struct gimple_df);
945 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
946 uid_decl_map_eq, NULL);
947 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
948 uid_ssaname_map_eq, NULL);
949 fn->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
950 fn->gimple_df->call_used_vars = BITMAP_GGC_ALLOC ();
951 fn->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
952 init_ssanames (fn, 0);
953 init_phinodes ();
954 }
955
956
957 /* Deallocate memory associated with SSA data structures for FNDECL. */
958
959 void
960 delete_tree_ssa (void)
961 {
962 size_t i;
963 basic_block bb;
964 gimple_stmt_iterator gsi;
965 referenced_var_iterator rvi;
966 tree var;
967
968 /* Release any ssa_names still in use. */
969 for (i = 0; i < num_ssa_names; i++)
970 {
971 tree var = ssa_name (i);
972 if (var && TREE_CODE (var) == SSA_NAME)
973 {
974 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
975 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
976 }
977 release_ssa_name (var);
978 }
979
980 /* FIXME. This may not be necessary. We will release all this
981 memory en masse in free_ssa_operands. This clearing used to be
982 necessary to avoid problems with the inliner, but it may not be
983 needed anymore. */
984 FOR_EACH_BB (bb)
985 {
986 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
987 {
988 gimple stmt = gsi_stmt (gsi);
989
990 if (gimple_has_ops (stmt))
991 {
992 gimple_set_def_ops (stmt, NULL);
993 gimple_set_use_ops (stmt, NULL);
994 gimple_set_addresses_taken (stmt, NULL);
995 }
996
997 if (gimple_has_mem_ops (stmt))
998 {
999 gimple_set_vdef_ops (stmt, NULL);
1000 gimple_set_vuse_ops (stmt, NULL);
1001 BITMAP_FREE (stmt->gsmem.membase.stores);
1002 BITMAP_FREE (stmt->gsmem.membase.loads);
1003 }
1004
1005 gimple_set_modified (stmt, true);
1006 }
1007 set_phi_nodes (bb, NULL);
1008 }
1009
1010 /* Remove annotations from every referenced local variable. */
1011 FOR_EACH_REFERENCED_VAR (var, rvi)
1012 {
1013 if (!MTAG_P (var)
1014 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1015 {
1016 var_ann (var)->mpt = NULL_TREE;
1017 var_ann (var)->symbol_mem_tag = NULL_TREE;
1018 continue;
1019 }
1020 if (var->base.ann)
1021 ggc_free (var->base.ann);
1022 var->base.ann = NULL;
1023 }
1024 htab_delete (gimple_referenced_vars (cfun));
1025 cfun->gimple_df->referenced_vars = NULL;
1026
1027 fini_ssanames ();
1028 fini_phinodes ();
1029
1030 /* We no longer maintain the SSA operand cache at this point. */
1031 if (ssa_operands_active ())
1032 fini_ssa_operands ();
1033
1034 cfun->gimple_df->global_var = NULL_TREE;
1035
1036 htab_delete (cfun->gimple_df->default_defs);
1037 cfun->gimple_df->default_defs = NULL;
1038 cfun->gimple_df->call_clobbered_vars = NULL;
1039 cfun->gimple_df->call_used_vars = NULL;
1040 cfun->gimple_df->addressable_vars = NULL;
1041 cfun->gimple_df->modified_noreturn_calls = NULL;
1042 if (gimple_aliases_computed_p (cfun))
1043 {
1044 delete_alias_heapvars ();
1045 gcc_assert (!need_ssa_update_p ());
1046 }
1047 cfun->gimple_df->aliases_computed_p = false;
1048 delete_mem_ref_stats (cfun);
1049
1050 cfun->gimple_df = NULL;
1051
1052 /* We no longer need the edge variable maps. */
1053 redirect_edge_var_map_destroy ();
1054 }
1055
1056 /* Helper function for useless_type_conversion_p. */
1057
1058 static bool
1059 useless_type_conversion_p_1 (tree outer_type, tree inner_type)
1060 {
1061 /* Qualifiers on value types do not matter. */
1062 inner_type = TYPE_MAIN_VARIANT (inner_type);
1063 outer_type = TYPE_MAIN_VARIANT (outer_type);
1064
1065 if (inner_type == outer_type)
1066 return true;
1067
1068 /* If we know the canonical types, compare them. */
1069 if (TYPE_CANONICAL (inner_type)
1070 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1071 return true;
1072
1073 /* Changes in machine mode are never useless conversions. */
1074 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
1075 return false;
1076
1077 /* If both the inner and outer types are integral types, then the
1078 conversion is not necessary if they have the same mode and
1079 signedness and precision, and both or neither are boolean. */
1080 if (INTEGRAL_TYPE_P (inner_type)
1081 && INTEGRAL_TYPE_P (outer_type))
1082 {
1083 /* Preserve changes in signedness or precision. */
1084 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1085 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1086 return false;
1087
1088 /* Conversions from a non-base to a base type are not useless.
1089 This way we preserve the invariant to do arithmetic in
1090 base types only. */
1091 if (TREE_TYPE (inner_type)
1092 && TREE_TYPE (inner_type) != inner_type
1093 && (TREE_TYPE (outer_type) == outer_type
1094 || TREE_TYPE (outer_type) == NULL_TREE))
1095 return false;
1096
1097 /* We don't need to preserve changes in the types minimum or
1098 maximum value in general as these do not generate code
1099 unless the types precisions are different. */
1100
1101 return true;
1102 }
1103
1104 /* Scalar floating point types with the same mode are compatible. */
1105 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1106 && SCALAR_FLOAT_TYPE_P (outer_type))
1107 return true;
1108
1109 /* We need to take special care recursing to pointed-to types. */
1110 else if (POINTER_TYPE_P (inner_type)
1111 && POINTER_TYPE_P (outer_type))
1112 {
1113 /* Don't lose casts between pointers to volatile and non-volatile
1114 qualified types. Doing so would result in changing the semantics
1115 of later accesses. */
1116 if ((TYPE_VOLATILE (TREE_TYPE (outer_type))
1117 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
1118 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
1119 return false;
1120
1121 /* Do not lose casts between pointers with different
1122 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
1123 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
1124 != TYPE_REF_CAN_ALIAS_ALL (outer_type))
1125 || (get_alias_set (TREE_TYPE (inner_type))
1126 != get_alias_set (TREE_TYPE (outer_type))))
1127 return false;
1128
1129 /* We do not care for const qualification of the pointed-to types
1130 as const qualification has no semantic value to the middle-end. */
1131
1132 /* Do not lose casts to restrict qualified pointers. */
1133 if ((TYPE_RESTRICT (outer_type)
1134 != TYPE_RESTRICT (inner_type))
1135 && TYPE_RESTRICT (outer_type))
1136 return false;
1137
1138 /* Otherwise pointers/references are equivalent if their pointed
1139 to types are effectively the same. We can strip qualifiers
1140 on pointed-to types for further comparison, which is done in
1141 the callee. */
1142 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1143 TREE_TYPE (inner_type));
1144 }
1145
1146 /* Recurse for complex types. */
1147 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1148 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1149 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1150 TREE_TYPE (inner_type));
1151
1152 /* Recurse for vector types with the same number of subparts. */
1153 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1154 && TREE_CODE (outer_type) == VECTOR_TYPE
1155 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1156 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1157 TREE_TYPE (inner_type));
1158
1159 /* For aggregates we may need to fall back to structural equality
1160 checks. */
1161 else if (AGGREGATE_TYPE_P (inner_type)
1162 && AGGREGATE_TYPE_P (outer_type))
1163 {
1164 /* Different types of aggregates are incompatible. */
1165 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
1166 return false;
1167
1168 /* ??? Add structural equivalence check. */
1169
1170 /* ??? This should eventually just return false. */
1171 return lang_hooks.types_compatible_p (inner_type, outer_type);
1172 }
1173
1174 return false;
1175 }
1176
1177 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1178 useless type conversion, otherwise return false.
1179
1180 This function implicitly defines the middle-end type system. With
1181 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1182 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1183 the following invariants shall be fulfilled:
1184
1185 1) useless_type_conversion_p is transitive.
1186 If a < b and b < c then a < c.
1187
1188 2) useless_type_conversion_p is not symmetric.
1189 From a < b does not follow a > b.
1190
1191 3) Types define the available set of operations applicable to values.
1192 A type conversion is useless if the operations for the target type
1193 is a subset of the operations for the source type. For example
1194 casts to void* are useless, casts from void* are not (void* can't
1195 be dereferenced or offsetted, but copied, hence its set of operations
1196 is a strict subset of that of all other data pointer types). Casts
1197 to const T* are useless (can't be written to), casts from const T*
1198 to T* are not. */
1199
1200 bool
1201 useless_type_conversion_p (tree outer_type, tree inner_type)
1202 {
1203 /* If the outer type is (void *), then the conversion is not
1204 necessary. We have to make sure to not apply this while
1205 recursing though. */
1206 if (POINTER_TYPE_P (inner_type)
1207 && POINTER_TYPE_P (outer_type)
1208 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
1209 return true;
1210
1211 return useless_type_conversion_p_1 (outer_type, inner_type);
1212 }
1213
1214 /* Return true if a conversion from either type of TYPE1 and TYPE2
1215 to the other is not required. Otherwise return false. */
1216
1217 bool
1218 types_compatible_p (tree type1, tree type2)
1219 {
1220 return (type1 == type2
1221 || (useless_type_conversion_p (type1, type2)
1222 && useless_type_conversion_p (type2, type1)));
1223 }
1224
1225 /* Return true if EXPR is a useless type conversion, otherwise return
1226 false. */
1227
1228 bool
1229 tree_ssa_useless_type_conversion (tree expr)
1230 {
1231 /* If we have an assignment that merely uses a NOP_EXPR to change
1232 the top of the RHS to the type of the LHS and the type conversion
1233 is "safe", then strip away the type conversion so that we can
1234 enter LHS = RHS into the const_and_copies table. */
1235 if (CONVERT_EXPR_P (expr)
1236 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1237 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1238 return useless_type_conversion_p
1239 (TREE_TYPE (expr),
1240 TREE_TYPE (TREE_OPERAND (expr, 0)));
1241
1242 return false;
1243 }
1244
1245
1246 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1247 described in walk_use_def_chains.
1248
1249 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1250 infinite loops. We used to have a bitmap for this to just mark
1251 SSA versions we had visited. But non-sparse bitmaps are way too
1252 expensive, while sparse bitmaps may cause quadratic behavior.
1253
1254 IS_DFS is true if the caller wants to perform a depth-first search
1255 when visiting PHI nodes. A DFS will visit each PHI argument and
1256 call FN after each one. Otherwise, all the arguments are
1257 visited first and then FN is called with each of the visited
1258 arguments in a separate pass. */
1259
1260 static bool
1261 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1262 struct pointer_set_t *visited, bool is_dfs)
1263 {
1264 gimple def_stmt;
1265
1266 if (pointer_set_insert (visited, var))
1267 return false;
1268
1269 def_stmt = SSA_NAME_DEF_STMT (var);
1270
1271 if (gimple_code (def_stmt) != GIMPLE_PHI)
1272 {
1273 /* If we reached the end of the use-def chain, call FN. */
1274 return fn (var, def_stmt, data);
1275 }
1276 else
1277 {
1278 size_t i;
1279
1280 /* When doing a breadth-first search, call FN before following the
1281 use-def links for each argument. */
1282 if (!is_dfs)
1283 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1284 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1285 return true;
1286
1287 /* Follow use-def links out of each PHI argument. */
1288 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1289 {
1290 tree arg = gimple_phi_arg_def (def_stmt, i);
1291
1292 /* ARG may be NULL for newly introduced PHI nodes. */
1293 if (arg
1294 && TREE_CODE (arg) == SSA_NAME
1295 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1296 return true;
1297 }
1298
1299 /* When doing a depth-first search, call FN after following the
1300 use-def links for each argument. */
1301 if (is_dfs)
1302 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1303 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1304 return true;
1305 }
1306
1307 return false;
1308 }
1309
1310
1311
1312 /* Walk use-def chains starting at the SSA variable VAR. Call
1313 function FN at each reaching definition found. FN takes three
1314 arguments: VAR, its defining statement (DEF_STMT) and a generic
1315 pointer to whatever state information that FN may want to maintain
1316 (DATA). FN is able to stop the walk by returning true, otherwise
1317 in order to continue the walk, FN should return false.
1318
1319 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1320 different. The first argument to FN is no longer the original
1321 variable VAR, but the PHI argument currently being examined. If FN
1322 wants to get at VAR, it should call PHI_RESULT (PHI).
1323
1324 If IS_DFS is true, this function will:
1325
1326 1- walk the use-def chains for all the PHI arguments, and,
1327 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1328
1329 If IS_DFS is false, the two steps above are done in reverse order
1330 (i.e., a breadth-first search). */
1331
1332 void
1333 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1334 bool is_dfs)
1335 {
1336 gimple def_stmt;
1337
1338 gcc_assert (TREE_CODE (var) == SSA_NAME);
1339
1340 def_stmt = SSA_NAME_DEF_STMT (var);
1341
1342 /* We only need to recurse if the reaching definition comes from a PHI
1343 node. */
1344 if (gimple_code (def_stmt) != GIMPLE_PHI)
1345 (*fn) (var, def_stmt, data);
1346 else
1347 {
1348 struct pointer_set_t *visited = pointer_set_create ();
1349 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1350 pointer_set_destroy (visited);
1351 }
1352 }
1353
1354 \f
1355 /* Return true if T, an SSA_NAME, has an undefined value. */
1356
1357 bool
1358 ssa_undefined_value_p (tree t)
1359 {
1360 tree var = SSA_NAME_VAR (t);
1361
1362 /* Parameters get their initial value from the function entry. */
1363 if (TREE_CODE (var) == PARM_DECL)
1364 return false;
1365
1366 /* Hard register variables get their initial value from the ether. */
1367 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1368 return false;
1369
1370 /* The value is undefined iff its definition statement is empty. */
1371 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1372 }
1373
1374 /* Emit warnings for uninitialized variables. This is done in two passes.
1375
1376 The first pass notices real uses of SSA names with undefined values.
1377 Such uses are unconditionally uninitialized, and we can be certain that
1378 such a use is a mistake. This pass is run before most optimizations,
1379 so that we catch as many as we can.
1380
1381 The second pass follows PHI nodes to find uses that are potentially
1382 uninitialized. In this case we can't necessarily prove that the use
1383 is really uninitialized. This pass is run after most optimizations,
1384 so that we thread as many jumps and possible, and delete as much dead
1385 code as possible, in order to reduce false positives. We also look
1386 again for plain uninitialized variables, since optimization may have
1387 changed conditionally uninitialized to unconditionally uninitialized. */
1388
1389 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1390 warning text is in MSGID and LOCUS may contain a location or be null. */
1391
1392 static void
1393 warn_uninit (tree t, const char *gmsgid, void *data)
1394 {
1395 tree var = SSA_NAME_VAR (t);
1396 gimple context = (gimple) data;
1397 location_t location;
1398 expanded_location xloc, floc;
1399
1400 if (!ssa_undefined_value_p (t))
1401 return;
1402
1403 /* TREE_NO_WARNING either means we already warned, or the front end
1404 wishes to suppress the warning. */
1405 if (TREE_NO_WARNING (var))
1406 return;
1407
1408 location = (context != NULL && gimple_has_location (context))
1409 ? gimple_location (context)
1410 : DECL_SOURCE_LOCATION (var);
1411 warning_at (location, OPT_Wuninitialized, gmsgid, var);
1412 xloc = expand_location (location);
1413 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1414 if (xloc.file != floc.file
1415 || xloc.line < floc.line
1416 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1417 inform ("%J%qD was declared here", var, var);
1418
1419 TREE_NO_WARNING (var) = 1;
1420 }
1421
1422 struct walk_data {
1423 gimple stmt;
1424 bool always_executed;
1425 };
1426
1427 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1428 and warn about them. */
1429
1430 static tree
1431 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
1432 {
1433 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
1434 struct walk_data *data = (struct walk_data *) wi->info;
1435 tree t = *tp;
1436
1437 switch (TREE_CODE (t))
1438 {
1439 case SSA_NAME:
1440 /* We only do data flow with SSA_NAMEs, so that's all we
1441 can warn about. */
1442 if (data->always_executed)
1443 warn_uninit (t, "%qD is used uninitialized in this function",
1444 data->stmt);
1445 else
1446 warn_uninit (t, "%qD may be used uninitialized in this function",
1447 data->stmt);
1448 *walk_subtrees = 0;
1449 break;
1450
1451 case REALPART_EXPR:
1452 case IMAGPART_EXPR:
1453 /* The total store transformation performed during gimplification
1454 creates uninitialized variable uses. If all is well, these will
1455 be optimized away, so don't warn now. */
1456 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1457 *walk_subtrees = 0;
1458 break;
1459
1460 default:
1461 if (IS_TYPE_OR_DECL_P (t))
1462 *walk_subtrees = 0;
1463 break;
1464 }
1465
1466 return NULL_TREE;
1467 }
1468
1469 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1470 and warn about them. */
1471
1472 static void
1473 warn_uninitialized_phi (gimple phi)
1474 {
1475 size_t i, n = gimple_phi_num_args (phi);
1476
1477 /* Don't look at memory tags. */
1478 if (!is_gimple_reg (gimple_phi_result (phi)))
1479 return;
1480
1481 for (i = 0; i < n; ++i)
1482 {
1483 tree op = gimple_phi_arg_def (phi, i);
1484 if (TREE_CODE (op) == SSA_NAME)
1485 warn_uninit (op, "%qD may be used uninitialized in this function",
1486 NULL);
1487 }
1488 }
1489
1490 static unsigned int
1491 execute_early_warn_uninitialized (void)
1492 {
1493 gimple_stmt_iterator gsi;
1494 basic_block bb;
1495 struct walk_data data;
1496
1497 calculate_dominance_info (CDI_POST_DOMINATORS);
1498
1499 FOR_EACH_BB (bb)
1500 {
1501 data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1502 single_succ (ENTRY_BLOCK_PTR), bb);
1503 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1504 {
1505 struct walk_stmt_info wi;
1506 data.stmt = gsi_stmt (gsi);
1507 memset (&wi, 0, sizeof (wi));
1508 wi.info = &data;
1509 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
1510 }
1511 }
1512 return 0;
1513 }
1514
1515 static unsigned int
1516 execute_late_warn_uninitialized (void)
1517 {
1518 basic_block bb;
1519 gimple_stmt_iterator gsi;
1520
1521 /* Re-do the plain uninitialized variable check, as optimization may have
1522 straightened control flow. Do this first so that we don't accidentally
1523 get a "may be" warning when we'd have seen an "is" warning later. */
1524 execute_early_warn_uninitialized ();
1525
1526 FOR_EACH_BB (bb)
1527 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1528 warn_uninitialized_phi (gsi_stmt (gsi));
1529
1530 return 0;
1531 }
1532
1533 static bool
1534 gate_warn_uninitialized (void)
1535 {
1536 return warn_uninitialized != 0;
1537 }
1538
1539 struct gimple_opt_pass pass_early_warn_uninitialized =
1540 {
1541 {
1542 GIMPLE_PASS,
1543 NULL, /* name */
1544 gate_warn_uninitialized, /* gate */
1545 execute_early_warn_uninitialized, /* execute */
1546 NULL, /* sub */
1547 NULL, /* next */
1548 0, /* static_pass_number */
1549 0, /* tv_id */
1550 PROP_ssa, /* properties_required */
1551 0, /* properties_provided */
1552 0, /* properties_destroyed */
1553 0, /* todo_flags_start */
1554 0 /* todo_flags_finish */
1555 }
1556 };
1557
1558 struct gimple_opt_pass pass_late_warn_uninitialized =
1559 {
1560 {
1561 GIMPLE_PASS,
1562 NULL, /* name */
1563 gate_warn_uninitialized, /* gate */
1564 execute_late_warn_uninitialized, /* execute */
1565 NULL, /* sub */
1566 NULL, /* next */
1567 0, /* static_pass_number */
1568 0, /* tv_id */
1569 PROP_ssa, /* properties_required */
1570 0, /* properties_provided */
1571 0, /* properties_destroyed */
1572 0, /* todo_flags_start */
1573 0 /* todo_flags_finish */
1574 }
1575 };
1576
1577 /* Compute TREE_ADDRESSABLE for local variables. */
1578
1579 static unsigned int
1580 execute_update_addresses_taken (void)
1581 {
1582 tree var;
1583 referenced_var_iterator rvi;
1584 gimple_stmt_iterator gsi;
1585 basic_block bb;
1586 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1587 bitmap vars_updated = BITMAP_ALLOC (NULL);
1588 bool update_vops = false;
1589
1590 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1591 the function body. */
1592 FOR_EACH_BB (bb)
1593 {
1594 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1595 {
1596 bitmap taken = gimple_addresses_taken (gsi_stmt (gsi));
1597 if (taken)
1598 bitmap_ior_into (addresses_taken, taken);
1599 }
1600
1601 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1602 {
1603 size_t i;
1604 gimple phi = gsi_stmt (gsi);
1605
1606 for (i = 0; i < gimple_phi_num_args (phi); i++)
1607 {
1608 tree op = PHI_ARG_DEF (phi, i), var;
1609 if (TREE_CODE (op) == ADDR_EXPR
1610 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1611 && DECL_P (var))
1612 bitmap_set_bit (addresses_taken, DECL_UID (var));
1613 }
1614 }
1615 }
1616
1617 /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
1618 SSA. */
1619 FOR_EACH_REFERENCED_VAR (var, rvi)
1620 if (!is_global_var (var)
1621 && TREE_CODE (var) != RESULT_DECL
1622 && TREE_ADDRESSABLE (var)
1623 && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
1624 {
1625 TREE_ADDRESSABLE (var) = 0;
1626 if (is_gimple_reg (var))
1627 mark_sym_for_renaming (var);
1628 update_vops = true;
1629 bitmap_set_bit (vars_updated, DECL_UID (var));
1630 if (dump_file)
1631 {
1632 fprintf (dump_file, "No longer having address taken ");
1633 print_generic_expr (dump_file, var, 0);
1634 fprintf (dump_file, "\n");
1635 }
1636 }
1637
1638 /* Operand caches needs to be recomputed for operands referencing the updated
1639 variables. */
1640 if (update_vops)
1641 FOR_EACH_BB (bb)
1642 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1643 {
1644 gimple stmt = gsi_stmt (gsi);
1645
1646 if ((gimple_loaded_syms (stmt)
1647 && bitmap_intersect_p (gimple_loaded_syms (stmt), vars_updated))
1648 || (gimple_stored_syms (stmt)
1649 && bitmap_intersect_p (gimple_stored_syms (stmt), vars_updated)))
1650 update_stmt (stmt);
1651 }
1652 BITMAP_FREE (addresses_taken);
1653 BITMAP_FREE (vars_updated);
1654 return 0;
1655 }
1656
1657 struct gimple_opt_pass pass_update_address_taken =
1658 {
1659 {
1660 GIMPLE_PASS,
1661 "addressables", /* name */
1662 NULL, /* gate */
1663 execute_update_addresses_taken, /* execute */
1664 NULL, /* sub */
1665 NULL, /* next */
1666 0, /* static_pass_number */
1667 0, /* tv_id */
1668 PROP_ssa, /* properties_required */
1669 0, /* properties_provided */
1670 0, /* properties_destroyed */
1671 0, /* todo_flags_start */
1672 TODO_update_ssa /* todo_flags_finish */
1673 }
1674 };