inclhack.def (hpux_imaginary_i): Remove spaces.
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "target.h"
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "output.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "diagnostic.h"
38 #include "bitmap.h"
39 #include "pointer-set.h"
40 #include "tree-flow.h"
41 #include "gimple.h"
42 #include "tree-inline.h"
43 #include "varray.h"
44 #include "timevar.h"
45 #include "hashtab.h"
46 #include "tree-dump.h"
47 #include "tree-pass.h"
48 #include "toplev.h"
49
50 /* Pointer map of variable mappings, keyed by edge. */
51 static struct pointer_map_t *edge_var_maps;
52
53
54 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
55
56 void
57 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
58 {
59 void **slot;
60 edge_var_map_vector old_head, head;
61 edge_var_map new_node;
62
63 if (edge_var_maps == NULL)
64 edge_var_maps = pointer_map_create ();
65
66 slot = pointer_map_insert (edge_var_maps, e);
67 old_head = head = (edge_var_map_vector) *slot;
68 if (!head)
69 {
70 head = VEC_alloc (edge_var_map, heap, 5);
71 *slot = head;
72 }
73 new_node.def = def;
74 new_node.result = result;
75 new_node.locus = locus;
76
77 VEC_safe_push (edge_var_map, heap, head, &new_node);
78 if (old_head != head)
79 {
80 /* The push did some reallocation. Update the pointer map. */
81 *slot = head;
82 }
83 }
84
85
86 /* Clear the var mappings in edge E. */
87
88 void
89 redirect_edge_var_map_clear (edge e)
90 {
91 void **slot;
92 edge_var_map_vector head;
93
94 if (!edge_var_maps)
95 return;
96
97 slot = pointer_map_contains (edge_var_maps, e);
98
99 if (slot)
100 {
101 head = (edge_var_map_vector) *slot;
102 VEC_free (edge_var_map, heap, head);
103 *slot = NULL;
104 }
105 }
106
107
108 /* Duplicate the redirected var mappings in OLDE in NEWE.
109
110 Since we can't remove a mapping, let's just duplicate it. This assumes a
111 pointer_map can have multiple edges mapping to the same var_map (many to
112 one mapping), since we don't remove the previous mappings. */
113
114 void
115 redirect_edge_var_map_dup (edge newe, edge olde)
116 {
117 void **new_slot, **old_slot;
118 edge_var_map_vector head;
119
120 if (!edge_var_maps)
121 return;
122
123 new_slot = pointer_map_insert (edge_var_maps, newe);
124 old_slot = pointer_map_contains (edge_var_maps, olde);
125 if (!old_slot)
126 return;
127 head = (edge_var_map_vector) *old_slot;
128
129 if (head)
130 *new_slot = VEC_copy (edge_var_map, heap, head);
131 else
132 *new_slot = VEC_alloc (edge_var_map, heap, 5);
133 }
134
135
136 /* Return the variable mappings for a given edge. If there is none, return
137 NULL. */
138
139 edge_var_map_vector
140 redirect_edge_var_map_vector (edge e)
141 {
142 void **slot;
143
144 /* Hey, what kind of idiot would... you'd be surprised. */
145 if (!edge_var_maps)
146 return NULL;
147
148 slot = pointer_map_contains (edge_var_maps, e);
149 if (!slot)
150 return NULL;
151
152 return (edge_var_map_vector) *slot;
153 }
154
155 /* Used by redirect_edge_var_map_destroy to free all memory. */
156
157 static bool
158 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
159 void **value,
160 void *data ATTRIBUTE_UNUSED)
161 {
162 edge_var_map_vector head = (edge_var_map_vector) *value;
163 VEC_free (edge_var_map, heap, head);
164 return true;
165 }
166
167 /* Clear the edge variable mappings. */
168
169 void
170 redirect_edge_var_map_destroy (void)
171 {
172 if (edge_var_maps)
173 {
174 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
175 pointer_map_destroy (edge_var_maps);
176 edge_var_maps = NULL;
177 }
178 }
179
180
181 /* Remove the corresponding arguments from the PHI nodes in E's
182 destination block and redirect it to DEST. Return redirected edge.
183 The list of removed arguments is stored in a vector accessed
184 through edge_var_maps. */
185
186 edge
187 ssa_redirect_edge (edge e, basic_block dest)
188 {
189 gimple_stmt_iterator gsi;
190 gimple phi;
191
192 redirect_edge_var_map_clear (e);
193
194 /* Remove the appropriate PHI arguments in E's destination block. */
195 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
196 {
197 tree def;
198 source_location locus ;
199
200 phi = gsi_stmt (gsi);
201 def = gimple_phi_arg_def (phi, e->dest_idx);
202 locus = gimple_phi_arg_location (phi, e->dest_idx);
203
204 if (def == NULL_TREE)
205 continue;
206
207 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
208 }
209
210 e = redirect_edge_succ_nodup (e, dest);
211
212 return e;
213 }
214
215
216 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
217 E->dest. */
218
219 void
220 flush_pending_stmts (edge e)
221 {
222 gimple phi;
223 edge_var_map_vector v;
224 edge_var_map *vm;
225 int i;
226 gimple_stmt_iterator gsi;
227
228 v = redirect_edge_var_map_vector (e);
229 if (!v)
230 return;
231
232 for (gsi = gsi_start_phis (e->dest), i = 0;
233 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
234 gsi_next (&gsi), i++)
235 {
236 tree def;
237
238 phi = gsi_stmt (gsi);
239 def = redirect_edge_var_map_def (vm);
240 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
241 }
242
243 redirect_edge_var_map_clear (e);
244 }
245
246 /* Return true if SSA_NAME is malformed and mark it visited.
247
248 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
249 operand. */
250
251 static bool
252 verify_ssa_name (tree ssa_name, bool is_virtual)
253 {
254 if (TREE_CODE (ssa_name) != SSA_NAME)
255 {
256 error ("expected an SSA_NAME object");
257 return true;
258 }
259
260 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
261 {
262 error ("type mismatch between an SSA_NAME and its symbol");
263 return true;
264 }
265
266 if (SSA_NAME_IN_FREE_LIST (ssa_name))
267 {
268 error ("found an SSA_NAME that had been released into the free pool");
269 return true;
270 }
271
272 if (is_virtual && is_gimple_reg (ssa_name))
273 {
274 error ("found a virtual definition for a GIMPLE register");
275 return true;
276 }
277
278 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
279 {
280 error ("virtual SSA name for non-VOP decl");
281 return true;
282 }
283
284 if (!is_virtual && !is_gimple_reg (ssa_name))
285 {
286 error ("found a real definition for a non-register");
287 return true;
288 }
289
290 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
291 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
292 {
293 error ("found a default name with a non-empty defining statement");
294 return true;
295 }
296
297 return false;
298 }
299
300
301 /* Return true if the definition of SSA_NAME at block BB is malformed.
302
303 STMT is the statement where SSA_NAME is created.
304
305 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
306 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
307 it means that the block in that array slot contains the
308 definition of SSA_NAME.
309
310 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
311
312 static bool
313 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
314 gimple stmt, bool is_virtual)
315 {
316 if (verify_ssa_name (ssa_name, is_virtual))
317 goto err;
318
319 if (definition_block[SSA_NAME_VERSION (ssa_name)])
320 {
321 error ("SSA_NAME created in two different blocks %i and %i",
322 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
323 goto err;
324 }
325
326 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
327
328 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
329 {
330 error ("SSA_NAME_DEF_STMT is wrong");
331 fprintf (stderr, "Expected definition statement:\n");
332 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
333 fprintf (stderr, "\nActual definition statement:\n");
334 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
335 goto err;
336 }
337
338 return false;
339
340 err:
341 fprintf (stderr, "while verifying SSA_NAME ");
342 print_generic_expr (stderr, ssa_name, 0);
343 fprintf (stderr, " in statement\n");
344 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
345
346 return true;
347 }
348
349
350 /* Return true if the use of SSA_NAME at statement STMT in block BB is
351 malformed.
352
353 DEF_BB is the block where SSA_NAME was found to be created.
354
355 IDOM contains immediate dominator information for the flowgraph.
356
357 CHECK_ABNORMAL is true if the caller wants to check whether this use
358 is flowing through an abnormal edge (only used when checking PHI
359 arguments).
360
361 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
362 that are defined before STMT in basic block BB. */
363
364 static bool
365 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
366 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
367 {
368 bool err = false;
369 tree ssa_name = USE_FROM_PTR (use_p);
370
371 if (!TREE_VISITED (ssa_name))
372 if (verify_imm_links (stderr, ssa_name))
373 err = true;
374
375 TREE_VISITED (ssa_name) = 1;
376
377 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
378 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
379 ; /* Default definitions have empty statements. Nothing to do. */
380 else if (!def_bb)
381 {
382 error ("missing definition");
383 err = true;
384 }
385 else if (bb != def_bb
386 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
387 {
388 error ("definition in block %i does not dominate use in block %i",
389 def_bb->index, bb->index);
390 err = true;
391 }
392 else if (bb == def_bb
393 && names_defined_in_bb != NULL
394 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
395 {
396 error ("definition in block %i follows the use", def_bb->index);
397 err = true;
398 }
399
400 if (check_abnormal
401 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
402 {
403 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
404 err = true;
405 }
406
407 /* Make sure the use is in an appropriate list by checking the previous
408 element to make sure it's the same. */
409 if (use_p->prev == NULL)
410 {
411 error ("no immediate_use list");
412 err = true;
413 }
414 else
415 {
416 tree listvar;
417 if (use_p->prev->use == NULL)
418 listvar = use_p->prev->loc.ssa_name;
419 else
420 listvar = USE_FROM_PTR (use_p->prev);
421 if (listvar != ssa_name)
422 {
423 error ("wrong immediate use list");
424 err = true;
425 }
426 }
427
428 if (err)
429 {
430 fprintf (stderr, "for SSA_NAME: ");
431 print_generic_expr (stderr, ssa_name, TDF_VOPS);
432 fprintf (stderr, " in statement:\n");
433 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
434 }
435
436 return err;
437 }
438
439
440 /* Return true if any of the arguments for PHI node PHI at block BB is
441 malformed.
442
443 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
444 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
445 it means that the block in that array slot contains the
446 definition of SSA_NAME. */
447
448 static bool
449 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
450 {
451 edge e;
452 bool err = false;
453 size_t i, phi_num_args = gimple_phi_num_args (phi);
454
455 if (EDGE_COUNT (bb->preds) != phi_num_args)
456 {
457 error ("incoming edge count does not match number of PHI arguments");
458 err = true;
459 goto error;
460 }
461
462 for (i = 0; i < phi_num_args; i++)
463 {
464 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
465 tree op = USE_FROM_PTR (op_p);
466
467 e = EDGE_PRED (bb, i);
468
469 if (op == NULL_TREE)
470 {
471 error ("PHI argument is missing for edge %d->%d",
472 e->src->index,
473 e->dest->index);
474 err = true;
475 goto error;
476 }
477
478 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
479 {
480 error ("PHI argument is not SSA_NAME, or invariant");
481 err = true;
482 }
483
484 if (TREE_CODE (op) == SSA_NAME)
485 {
486 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
487 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
488 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
489 }
490
491 if (TREE_CODE (op) == ADDR_EXPR)
492 {
493 tree base = TREE_OPERAND (op, 0);
494 while (handled_component_p (base))
495 base = TREE_OPERAND (base, 0);
496 if ((TREE_CODE (base) == VAR_DECL
497 || TREE_CODE (base) == PARM_DECL
498 || TREE_CODE (base) == RESULT_DECL)
499 && !TREE_ADDRESSABLE (base))
500 {
501 error ("address taken, but ADDRESSABLE bit not set");
502 err = true;
503 }
504 }
505
506 if (e->dest != bb)
507 {
508 error ("wrong edge %d->%d for PHI argument",
509 e->src->index, e->dest->index);
510 err = true;
511 }
512
513 if (err)
514 {
515 fprintf (stderr, "PHI argument\n");
516 print_generic_stmt (stderr, op, TDF_VOPS);
517 goto error;
518 }
519 }
520
521 error:
522 if (err)
523 {
524 fprintf (stderr, "for PHI node\n");
525 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
526 }
527
528
529 return err;
530 }
531
532
533 /* Verify common invariants in the SSA web.
534 TODO: verify the variable annotations. */
535
536 void
537 verify_ssa (bool check_modified_stmt)
538 {
539 size_t i;
540 basic_block bb;
541 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
542 ssa_op_iter iter;
543 tree op;
544 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
545 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
546
547 gcc_assert (!need_ssa_update_p (cfun));
548
549 verify_stmts ();
550
551 timevar_push (TV_TREE_SSA_VERIFY);
552
553 /* Keep track of SSA names present in the IL. */
554 for (i = 1; i < num_ssa_names; i++)
555 {
556 tree name = ssa_name (i);
557 if (name)
558 {
559 gimple stmt;
560 TREE_VISITED (name) = 0;
561
562 stmt = SSA_NAME_DEF_STMT (name);
563 if (!gimple_nop_p (stmt))
564 {
565 basic_block bb = gimple_bb (stmt);
566 verify_def (bb, definition_block,
567 name, stmt, !is_gimple_reg (name));
568
569 }
570 }
571 }
572
573 calculate_dominance_info (CDI_DOMINATORS);
574
575 /* Now verify all the uses and make sure they agree with the definitions
576 found in the previous pass. */
577 FOR_EACH_BB (bb)
578 {
579 edge e;
580 gimple phi;
581 edge_iterator ei;
582 gimple_stmt_iterator gsi;
583
584 /* Make sure that all edges have a clear 'aux' field. */
585 FOR_EACH_EDGE (e, ei, bb->preds)
586 {
587 if (e->aux)
588 {
589 error ("AUX pointer initialized for edge %d->%d", e->src->index,
590 e->dest->index);
591 goto err;
592 }
593 }
594
595 /* Verify the arguments for every PHI node in the block. */
596 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
597 {
598 phi = gsi_stmt (gsi);
599 if (verify_phi_args (phi, bb, definition_block))
600 goto err;
601
602 bitmap_set_bit (names_defined_in_bb,
603 SSA_NAME_VERSION (gimple_phi_result (phi)));
604 }
605
606 /* Now verify all the uses and vuses in every statement of the block. */
607 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
608 {
609 gimple stmt = gsi_stmt (gsi);
610 use_operand_p use_p;
611 bool has_err;
612
613 if (check_modified_stmt && gimple_modified_p (stmt))
614 {
615 error ("stmt (%p) marked modified after optimization pass: ",
616 (void *)stmt);
617 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
618 goto err;
619 }
620
621 if (is_gimple_assign (stmt)
622 && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
623 {
624 tree lhs, base_address;
625
626 lhs = gimple_assign_lhs (stmt);
627 base_address = get_base_address (lhs);
628
629 if (base_address
630 && SSA_VAR_P (base_address)
631 && !gimple_vdef (stmt)
632 && optimize > 0)
633 {
634 error ("statement makes a memory store, but has no VDEFS");
635 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
636 goto err;
637 }
638 }
639
640 /* Verify the single virtual operand and its constraints. */
641 has_err = false;
642 if (gimple_vdef (stmt))
643 {
644 if (gimple_vdef_op (stmt) == NULL_DEF_OPERAND_P)
645 {
646 error ("statement has VDEF operand not in defs list");
647 has_err = true;
648 }
649 if (!gimple_vuse (stmt))
650 {
651 error ("statement has VDEF but no VUSE operand");
652 has_err = true;
653 }
654 else if (SSA_NAME_VAR (gimple_vdef (stmt))
655 != SSA_NAME_VAR (gimple_vuse (stmt)))
656 {
657 error ("VDEF and VUSE do not use the same symbol");
658 has_err = true;
659 }
660 has_err |= verify_ssa_name (gimple_vdef (stmt), true);
661 }
662 if (gimple_vuse (stmt))
663 {
664 if (gimple_vuse_op (stmt) == NULL_USE_OPERAND_P)
665 {
666 error ("statement has VUSE operand not in uses list");
667 has_err = true;
668 }
669 has_err |= verify_ssa_name (gimple_vuse (stmt), true);
670 }
671 if (has_err)
672 {
673 error ("in statement");
674 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
675 goto err;
676 }
677
678 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
679 {
680 if (verify_ssa_name (op, false))
681 {
682 error ("in statement");
683 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
684 goto err;
685 }
686 }
687
688 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
689 {
690 op = USE_FROM_PTR (use_p);
691 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
692 use_p, stmt, false, names_defined_in_bb))
693 goto err;
694 }
695
696 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
697 {
698 if (SSA_NAME_DEF_STMT (op) != stmt)
699 {
700 error ("SSA_NAME_DEF_STMT is wrong");
701 fprintf (stderr, "Expected definition statement:\n");
702 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
703 fprintf (stderr, "\nActual definition statement:\n");
704 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
705 4, TDF_VOPS);
706 goto err;
707 }
708 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
709 }
710 }
711
712 bitmap_clear (names_defined_in_bb);
713 }
714
715 free (definition_block);
716
717 /* Restore the dominance information to its prior known state, so
718 that we do not perturb the compiler's subsequent behavior. */
719 if (orig_dom_state == DOM_NONE)
720 free_dominance_info (CDI_DOMINATORS);
721 else
722 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
723
724 BITMAP_FREE (names_defined_in_bb);
725 timevar_pop (TV_TREE_SSA_VERIFY);
726 return;
727
728 err:
729 internal_error ("verify_ssa failed");
730 }
731
732 /* Return true if the uid in both int tree maps are equal. */
733
734 int
735 int_tree_map_eq (const void *va, const void *vb)
736 {
737 const struct int_tree_map *a = (const struct int_tree_map *) va;
738 const struct int_tree_map *b = (const struct int_tree_map *) vb;
739 return (a->uid == b->uid);
740 }
741
742 /* Hash a UID in a int_tree_map. */
743
744 unsigned int
745 int_tree_map_hash (const void *item)
746 {
747 return ((const struct int_tree_map *)item)->uid;
748 }
749
750 /* Return true if the DECL_UID in both trees are equal. */
751
752 int
753 uid_decl_map_eq (const void *va, const void *vb)
754 {
755 const_tree a = (const_tree) va;
756 const_tree b = (const_tree) vb;
757 return (a->decl_minimal.uid == b->decl_minimal.uid);
758 }
759
760 /* Hash a tree in a uid_decl_map. */
761
762 unsigned int
763 uid_decl_map_hash (const void *item)
764 {
765 return ((const_tree)item)->decl_minimal.uid;
766 }
767
768 /* Return true if the DECL_UID in both trees are equal. */
769
770 static int
771 uid_ssaname_map_eq (const void *va, const void *vb)
772 {
773 const_tree a = (const_tree) va;
774 const_tree b = (const_tree) vb;
775 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
776 }
777
778 /* Hash a tree in a uid_decl_map. */
779
780 static unsigned int
781 uid_ssaname_map_hash (const void *item)
782 {
783 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
784 }
785
786
787 /* Initialize global DFA and SSA structures. */
788
789 void
790 init_tree_ssa (struct function *fn)
791 {
792 fn->gimple_df = GGC_CNEW (struct gimple_df);
793 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
794 uid_decl_map_eq, NULL);
795 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
796 uid_ssaname_map_eq, NULL);
797 pt_solution_reset (&fn->gimple_df->escaped);
798 pt_solution_reset (&fn->gimple_df->callused);
799 init_ssanames (fn, 0);
800 init_phinodes ();
801 }
802
803
804 /* Deallocate memory associated with SSA data structures for FNDECL. */
805
806 void
807 delete_tree_ssa (void)
808 {
809 referenced_var_iterator rvi;
810 tree var;
811
812 /* Remove annotations from every referenced local variable. */
813 FOR_EACH_REFERENCED_VAR (var, rvi)
814 {
815 if (is_global_var (var))
816 continue;
817 if (var->base.ann)
818 ggc_free (var->base.ann);
819 var->base.ann = NULL;
820 }
821 htab_delete (gimple_referenced_vars (cfun));
822 cfun->gimple_df->referenced_vars = NULL;
823
824 fini_ssanames ();
825 fini_phinodes ();
826
827 /* We no longer maintain the SSA operand cache at this point. */
828 if (ssa_operands_active ())
829 fini_ssa_operands ();
830
831 delete_alias_heapvars ();
832
833 htab_delete (cfun->gimple_df->default_defs);
834 cfun->gimple_df->default_defs = NULL;
835 pt_solution_reset (&cfun->gimple_df->escaped);
836 pt_solution_reset (&cfun->gimple_df->callused);
837 if (cfun->gimple_df->decls_to_pointers != NULL)
838 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
839 cfun->gimple_df->decls_to_pointers = NULL;
840 cfun->gimple_df->modified_noreturn_calls = NULL;
841 cfun->gimple_df = NULL;
842
843 /* We no longer need the edge variable maps. */
844 redirect_edge_var_map_destroy ();
845 }
846
847 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
848 useless type conversion, otherwise return false.
849
850 This function implicitly defines the middle-end type system. With
851 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
852 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
853 the following invariants shall be fulfilled:
854
855 1) useless_type_conversion_p is transitive.
856 If a < b and b < c then a < c.
857
858 2) useless_type_conversion_p is not symmetric.
859 From a < b does not follow a > b.
860
861 3) Types define the available set of operations applicable to values.
862 A type conversion is useless if the operations for the target type
863 is a subset of the operations for the source type. For example
864 casts to void* are useless, casts from void* are not (void* can't
865 be dereferenced or offsetted, but copied, hence its set of operations
866 is a strict subset of that of all other data pointer types). Casts
867 to const T* are useless (can't be written to), casts from const T*
868 to T* are not. */
869
870 bool
871 useless_type_conversion_p (tree outer_type, tree inner_type)
872 {
873 /* Do the following before stripping toplevel qualifiers. */
874 if (POINTER_TYPE_P (inner_type)
875 && POINTER_TYPE_P (outer_type))
876 {
877 /* If the outer type is (void *) or a pointer to an incomplete
878 record type or a pointer to an unprototyped function,
879 then the conversion is not necessary. */
880 if (VOID_TYPE_P (TREE_TYPE (outer_type))
881 || (AGGREGATE_TYPE_P (TREE_TYPE (outer_type))
882 && TREE_CODE (TREE_TYPE (outer_type)) != ARRAY_TYPE
883 && (TREE_CODE (TREE_TYPE (outer_type))
884 == TREE_CODE (TREE_TYPE (inner_type)))
885 && !COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
886 || ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
887 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
888 && (TREE_CODE (TREE_TYPE (outer_type))
889 == TREE_CODE (TREE_TYPE (inner_type)))
890 && !TYPE_ARG_TYPES (TREE_TYPE (outer_type))
891 && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
892 TREE_TYPE (TREE_TYPE (inner_type)))))
893 return true;
894
895 /* Do not lose casts to restrict qualified pointers. */
896 if ((TYPE_RESTRICT (outer_type)
897 != TYPE_RESTRICT (inner_type))
898 && TYPE_RESTRICT (outer_type))
899 return false;
900 }
901
902 /* From now on qualifiers on value types do not matter. */
903 inner_type = TYPE_MAIN_VARIANT (inner_type);
904 outer_type = TYPE_MAIN_VARIANT (outer_type);
905
906 if (inner_type == outer_type)
907 return true;
908
909 /* If we know the canonical types, compare them. */
910 if (TYPE_CANONICAL (inner_type)
911 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
912 return true;
913
914 /* Changes in machine mode are never useless conversions unless we
915 deal with aggregate types in which case we defer to later checks. */
916 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
917 && !AGGREGATE_TYPE_P (inner_type))
918 return false;
919
920 /* If both the inner and outer types are integral types, then the
921 conversion is not necessary if they have the same mode and
922 signedness and precision, and both or neither are boolean. */
923 if (INTEGRAL_TYPE_P (inner_type)
924 && INTEGRAL_TYPE_P (outer_type))
925 {
926 /* Preserve changes in signedness or precision. */
927 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
928 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
929 return false;
930
931 /* We don't need to preserve changes in the types minimum or
932 maximum value in general as these do not generate code
933 unless the types precisions are different. */
934 return true;
935 }
936
937 /* Scalar floating point types with the same mode are compatible. */
938 else if (SCALAR_FLOAT_TYPE_P (inner_type)
939 && SCALAR_FLOAT_TYPE_P (outer_type))
940 return true;
941
942 /* Fixed point types with the same mode are compatible. */
943 else if (FIXED_POINT_TYPE_P (inner_type)
944 && FIXED_POINT_TYPE_P (outer_type))
945 return true;
946
947 /* We need to take special care recursing to pointed-to types. */
948 else if (POINTER_TYPE_P (inner_type)
949 && POINTER_TYPE_P (outer_type))
950 {
951 /* Don't lose casts between pointers to volatile and non-volatile
952 qualified types. Doing so would result in changing the semantics
953 of later accesses. For function types the volatile qualifier
954 is used to indicate noreturn functions. */
955 if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
956 && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
957 && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
958 && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
959 && (TYPE_VOLATILE (TREE_TYPE (outer_type))
960 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
961 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
962 return false;
963
964 /* We require explicit conversions from incomplete target types. */
965 if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
966 && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
967 return false;
968
969 /* Do not lose casts between pointers that when dereferenced access
970 memory with different alias sets. */
971 if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
972 return false;
973
974 /* We do not care for const qualification of the pointed-to types
975 as const qualification has no semantic value to the middle-end. */
976
977 /* Otherwise pointers/references are equivalent if their pointed
978 to types are effectively the same. We can strip qualifiers
979 on pointed-to types for further comparison, which is done in
980 the callee. Note we have to use true compatibility here
981 because addresses are subject to propagation into dereferences
982 and thus might get the original type exposed which is equivalent
983 to a reverse conversion. */
984 return types_compatible_p (TREE_TYPE (outer_type),
985 TREE_TYPE (inner_type));
986 }
987
988 /* Recurse for complex types. */
989 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
990 && TREE_CODE (outer_type) == COMPLEX_TYPE)
991 return useless_type_conversion_p (TREE_TYPE (outer_type),
992 TREE_TYPE (inner_type));
993
994 /* Recurse for vector types with the same number of subparts. */
995 else if (TREE_CODE (inner_type) == VECTOR_TYPE
996 && TREE_CODE (outer_type) == VECTOR_TYPE
997 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
998 return useless_type_conversion_p (TREE_TYPE (outer_type),
999 TREE_TYPE (inner_type));
1000
1001 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1002 && TREE_CODE (outer_type) == ARRAY_TYPE)
1003 {
1004 /* Preserve string attributes. */
1005 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1006 return false;
1007
1008 /* Conversions from array types with unknown extent to
1009 array types with known extent are not useless. */
1010 if (!TYPE_DOMAIN (inner_type)
1011 && TYPE_DOMAIN (outer_type))
1012 return false;
1013
1014 /* Nor are conversions from array types with non-constant size to
1015 array types with constant size or to different size. */
1016 if (TYPE_SIZE (outer_type)
1017 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1018 && (!TYPE_SIZE (inner_type)
1019 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1020 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1021 TYPE_SIZE (inner_type))))
1022 return false;
1023
1024 /* Check conversions between arrays with partially known extents.
1025 If the array min/max values are constant they have to match.
1026 Otherwise allow conversions to unknown and variable extents.
1027 In particular this declares conversions that may change the
1028 mode to BLKmode as useless. */
1029 if (TYPE_DOMAIN (inner_type)
1030 && TYPE_DOMAIN (outer_type)
1031 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1032 {
1033 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1034 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1035 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1036 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1037
1038 /* After gimplification a variable min/max value carries no
1039 additional information compared to a NULL value. All that
1040 matters has been lowered to be part of the IL. */
1041 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1042 inner_min = NULL_TREE;
1043 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1044 outer_min = NULL_TREE;
1045 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1046 inner_max = NULL_TREE;
1047 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1048 outer_max = NULL_TREE;
1049
1050 /* Conversions NULL / variable <- cst are useless, but not
1051 the other way around. */
1052 if (outer_min
1053 && (!inner_min
1054 || !tree_int_cst_equal (inner_min, outer_min)))
1055 return false;
1056 if (outer_max
1057 && (!inner_max
1058 || !tree_int_cst_equal (inner_max, outer_max)))
1059 return false;
1060 }
1061
1062 /* Recurse on the element check. */
1063 return useless_type_conversion_p (TREE_TYPE (outer_type),
1064 TREE_TYPE (inner_type));
1065 }
1066
1067 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1068 || TREE_CODE (inner_type) == METHOD_TYPE)
1069 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1070 {
1071 tree outer_parm, inner_parm;
1072
1073 /* If the return types are not compatible bail out. */
1074 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1075 TREE_TYPE (inner_type)))
1076 return false;
1077
1078 /* Method types should belong to a compatible base class. */
1079 if (TREE_CODE (inner_type) == METHOD_TYPE
1080 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1081 TYPE_METHOD_BASETYPE (inner_type)))
1082 return false;
1083
1084 /* A conversion to an unprototyped argument list is ok. */
1085 if (!TYPE_ARG_TYPES (outer_type))
1086 return true;
1087
1088 /* If the argument types are compatible the conversion is useless. */
1089 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1090 return true;
1091
1092 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1093 inner_parm = TYPE_ARG_TYPES (inner_type);
1094 outer_parm && inner_parm;
1095 outer_parm = TREE_CHAIN (outer_parm),
1096 inner_parm = TREE_CHAIN (inner_parm))
1097 if (!useless_type_conversion_p (TREE_VALUE (outer_parm),
1098 TREE_VALUE (inner_parm)))
1099 return false;
1100
1101 /* If there is a mismatch in the number of arguments the functions
1102 are not compatible. */
1103 if (outer_parm || inner_parm)
1104 return false;
1105
1106 /* Defer to the target if necessary. */
1107 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1108 return targetm.comp_type_attributes (outer_type, inner_type) != 0;
1109
1110 return true;
1111 }
1112
1113 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1114 explicit conversions for types involving to be structurally
1115 compared types. */
1116 else if (AGGREGATE_TYPE_P (inner_type)
1117 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1118 return false;
1119
1120 return false;
1121 }
1122
1123 /* Return true if a conversion from either type of TYPE1 and TYPE2
1124 to the other is not required. Otherwise return false. */
1125
1126 bool
1127 types_compatible_p (tree type1, tree type2)
1128 {
1129 return (type1 == type2
1130 || (useless_type_conversion_p (type1, type2)
1131 && useless_type_conversion_p (type2, type1)));
1132 }
1133
1134 /* Return true if EXPR is a useless type conversion, otherwise return
1135 false. */
1136
1137 bool
1138 tree_ssa_useless_type_conversion (tree expr)
1139 {
1140 /* If we have an assignment that merely uses a NOP_EXPR to change
1141 the top of the RHS to the type of the LHS and the type conversion
1142 is "safe", then strip away the type conversion so that we can
1143 enter LHS = RHS into the const_and_copies table. */
1144 if (CONVERT_EXPR_P (expr)
1145 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1146 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1147 return useless_type_conversion_p
1148 (TREE_TYPE (expr),
1149 TREE_TYPE (TREE_OPERAND (expr, 0)));
1150
1151 return false;
1152 }
1153
1154 /* Strip conversions from EXP according to
1155 tree_ssa_useless_type_conversion and return the resulting
1156 expression. */
1157
1158 tree
1159 tree_ssa_strip_useless_type_conversions (tree exp)
1160 {
1161 while (tree_ssa_useless_type_conversion (exp))
1162 exp = TREE_OPERAND (exp, 0);
1163 return exp;
1164 }
1165
1166
1167 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1168 described in walk_use_def_chains.
1169
1170 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1171 infinite loops. We used to have a bitmap for this to just mark
1172 SSA versions we had visited. But non-sparse bitmaps are way too
1173 expensive, while sparse bitmaps may cause quadratic behavior.
1174
1175 IS_DFS is true if the caller wants to perform a depth-first search
1176 when visiting PHI nodes. A DFS will visit each PHI argument and
1177 call FN after each one. Otherwise, all the arguments are
1178 visited first and then FN is called with each of the visited
1179 arguments in a separate pass. */
1180
1181 static bool
1182 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1183 struct pointer_set_t *visited, bool is_dfs)
1184 {
1185 gimple def_stmt;
1186
1187 if (pointer_set_insert (visited, var))
1188 return false;
1189
1190 def_stmt = SSA_NAME_DEF_STMT (var);
1191
1192 if (gimple_code (def_stmt) != GIMPLE_PHI)
1193 {
1194 /* If we reached the end of the use-def chain, call FN. */
1195 return fn (var, def_stmt, data);
1196 }
1197 else
1198 {
1199 size_t i;
1200
1201 /* When doing a breadth-first search, call FN before following the
1202 use-def links for each argument. */
1203 if (!is_dfs)
1204 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1205 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1206 return true;
1207
1208 /* Follow use-def links out of each PHI argument. */
1209 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1210 {
1211 tree arg = gimple_phi_arg_def (def_stmt, i);
1212
1213 /* ARG may be NULL for newly introduced PHI nodes. */
1214 if (arg
1215 && TREE_CODE (arg) == SSA_NAME
1216 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1217 return true;
1218 }
1219
1220 /* When doing a depth-first search, call FN after following the
1221 use-def links for each argument. */
1222 if (is_dfs)
1223 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1224 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1225 return true;
1226 }
1227
1228 return false;
1229 }
1230
1231
1232
1233 /* Walk use-def chains starting at the SSA variable VAR. Call
1234 function FN at each reaching definition found. FN takes three
1235 arguments: VAR, its defining statement (DEF_STMT) and a generic
1236 pointer to whatever state information that FN may want to maintain
1237 (DATA). FN is able to stop the walk by returning true, otherwise
1238 in order to continue the walk, FN should return false.
1239
1240 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1241 different. The first argument to FN is no longer the original
1242 variable VAR, but the PHI argument currently being examined. If FN
1243 wants to get at VAR, it should call PHI_RESULT (PHI).
1244
1245 If IS_DFS is true, this function will:
1246
1247 1- walk the use-def chains for all the PHI arguments, and,
1248 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1249
1250 If IS_DFS is false, the two steps above are done in reverse order
1251 (i.e., a breadth-first search). */
1252
1253 void
1254 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1255 bool is_dfs)
1256 {
1257 gimple def_stmt;
1258
1259 gcc_assert (TREE_CODE (var) == SSA_NAME);
1260
1261 def_stmt = SSA_NAME_DEF_STMT (var);
1262
1263 /* We only need to recurse if the reaching definition comes from a PHI
1264 node. */
1265 if (gimple_code (def_stmt) != GIMPLE_PHI)
1266 (*fn) (var, def_stmt, data);
1267 else
1268 {
1269 struct pointer_set_t *visited = pointer_set_create ();
1270 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1271 pointer_set_destroy (visited);
1272 }
1273 }
1274
1275 \f
1276 /* Return true if T, an SSA_NAME, has an undefined value. */
1277
1278 bool
1279 ssa_undefined_value_p (tree t)
1280 {
1281 tree var = SSA_NAME_VAR (t);
1282
1283 /* Parameters get their initial value from the function entry. */
1284 if (TREE_CODE (var) == PARM_DECL)
1285 return false;
1286
1287 /* Hard register variables get their initial value from the ether. */
1288 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1289 return false;
1290
1291 /* The value is undefined iff its definition statement is empty. */
1292 return gimple_nop_p (SSA_NAME_DEF_STMT (t));
1293 }
1294
1295 /* Emit warnings for uninitialized variables. This is done in two passes.
1296
1297 The first pass notices real uses of SSA names with undefined values.
1298 Such uses are unconditionally uninitialized, and we can be certain that
1299 such a use is a mistake. This pass is run before most optimizations,
1300 so that we catch as many as we can.
1301
1302 The second pass follows PHI nodes to find uses that are potentially
1303 uninitialized. In this case we can't necessarily prove that the use
1304 is really uninitialized. This pass is run after most optimizations,
1305 so that we thread as many jumps and possible, and delete as much dead
1306 code as possible, in order to reduce false positives. We also look
1307 again for plain uninitialized variables, since optimization may have
1308 changed conditionally uninitialized to unconditionally uninitialized. */
1309
1310 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1311 warning text is in MSGID and LOCUS may contain a location or be null. */
1312
1313 static void
1314 warn_uninit (tree t, const char *gmsgid, void *data)
1315 {
1316 tree var = SSA_NAME_VAR (t);
1317 gimple context = (gimple) data;
1318 location_t location;
1319 expanded_location xloc, floc;
1320
1321 if (!ssa_undefined_value_p (t))
1322 return;
1323
1324 /* TREE_NO_WARNING either means we already warned, or the front end
1325 wishes to suppress the warning. */
1326 if (TREE_NO_WARNING (var))
1327 return;
1328
1329 /* Do not warn if it can be initialized outside this module. */
1330 if (is_global_var (var))
1331 return;
1332
1333 location = (context != NULL && gimple_has_location (context))
1334 ? gimple_location (context)
1335 : DECL_SOURCE_LOCATION (var);
1336 xloc = expand_location (location);
1337 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1338 if (warning_at (location, OPT_Wuninitialized, gmsgid, var))
1339 {
1340 TREE_NO_WARNING (var) = 1;
1341
1342 if (xloc.file != floc.file
1343 || xloc.line < floc.line
1344 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1345 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1346 }
1347 }
1348
1349 struct walk_data {
1350 gimple stmt;
1351 bool always_executed;
1352 bool warn_possibly_uninitialized;
1353 };
1354
1355 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1356 and warn about them. */
1357
1358 static tree
1359 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
1360 {
1361 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
1362 struct walk_data *data = (struct walk_data *) wi->info;
1363 tree t = *tp;
1364
1365 /* We do not care about LHS. */
1366 if (wi->is_lhs)
1367 {
1368 /* Except for operands of INDIRECT_REF. */
1369 if (!INDIRECT_REF_P (t))
1370 return NULL_TREE;
1371 t = TREE_OPERAND (t, 0);
1372 }
1373
1374 switch (TREE_CODE (t))
1375 {
1376 case ADDR_EXPR:
1377 /* Taking the address of an uninitialized variable does not
1378 count as using it. */
1379 *walk_subtrees = 0;
1380 break;
1381
1382 case VAR_DECL:
1383 {
1384 /* A VAR_DECL in the RHS of a gimple statement may mean that
1385 this variable is loaded from memory. */
1386 use_operand_p vuse;
1387 tree op;
1388
1389 /* If there is not gimple stmt,
1390 or alias information has not been computed,
1391 then we cannot check VUSE ops. */
1392 if (data->stmt == NULL)
1393 return NULL_TREE;
1394
1395 /* If the load happens as part of a call do not warn about it. */
1396 if (is_gimple_call (data->stmt))
1397 return NULL_TREE;
1398
1399 vuse = gimple_vuse_op (data->stmt);
1400 if (vuse == NULL_USE_OPERAND_P)
1401 return NULL_TREE;
1402
1403 op = USE_FROM_PTR (vuse);
1404 if (t != SSA_NAME_VAR (op)
1405 || !SSA_NAME_IS_DEFAULT_DEF (op))
1406 return NULL_TREE;
1407 /* If this is a VUSE of t and it is the default definition,
1408 then warn about op. */
1409 t = op;
1410 /* Fall through into SSA_NAME. */
1411 }
1412
1413 case SSA_NAME:
1414 /* We only do data flow with SSA_NAMEs, so that's all we
1415 can warn about. */
1416 if (data->always_executed)
1417 warn_uninit (t, "%qD is used uninitialized in this function",
1418 data->stmt);
1419 else if (data->warn_possibly_uninitialized)
1420 warn_uninit (t, "%qD may be used uninitialized in this function",
1421 data->stmt);
1422 *walk_subtrees = 0;
1423 break;
1424
1425 case REALPART_EXPR:
1426 case IMAGPART_EXPR:
1427 /* The total store transformation performed during gimplification
1428 creates uninitialized variable uses. If all is well, these will
1429 be optimized away, so don't warn now. */
1430 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1431 *walk_subtrees = 0;
1432 break;
1433
1434 default:
1435 if (IS_TYPE_OR_DECL_P (t))
1436 *walk_subtrees = 0;
1437 break;
1438 }
1439
1440 return NULL_TREE;
1441 }
1442
1443 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1444 and warn about them. */
1445
1446 static void
1447 warn_uninitialized_phi (gimple phi)
1448 {
1449 size_t i, n = gimple_phi_num_args (phi);
1450
1451 /* Don't look at memory tags. */
1452 if (!is_gimple_reg (gimple_phi_result (phi)))
1453 return;
1454
1455 for (i = 0; i < n; ++i)
1456 {
1457 tree op = gimple_phi_arg_def (phi, i);
1458 if (TREE_CODE (op) == SSA_NAME)
1459 warn_uninit (op, "%qD may be used uninitialized in this function",
1460 NULL);
1461 }
1462 }
1463
1464 static unsigned int
1465 warn_uninitialized_vars (bool warn_possibly_uninitialized)
1466 {
1467 gimple_stmt_iterator gsi;
1468 basic_block bb;
1469 struct walk_data data;
1470
1471 data.warn_possibly_uninitialized = warn_possibly_uninitialized;
1472
1473 calculate_dominance_info (CDI_POST_DOMINATORS);
1474
1475 FOR_EACH_BB (bb)
1476 {
1477 data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1478 single_succ (ENTRY_BLOCK_PTR), bb);
1479 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1480 {
1481 struct walk_stmt_info wi;
1482 data.stmt = gsi_stmt (gsi);
1483 memset (&wi, 0, sizeof (wi));
1484 wi.info = &data;
1485 walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
1486 }
1487 }
1488
1489 /* Post-dominator information can not be reliably updated. Free it
1490 after the use. */
1491
1492 free_dominance_info (CDI_POST_DOMINATORS);
1493 return 0;
1494 }
1495
1496 static unsigned int
1497 execute_early_warn_uninitialized (void)
1498 {
1499 /* Currently, this pass runs always but
1500 execute_late_warn_uninitialized only runs with optimization. With
1501 optimization we want to warn about possible uninitialized as late
1502 as possible, thus don't do it here. However, without
1503 optimization we need to warn here about "may be uninitialized".
1504 */
1505 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
1506 return 0;
1507 }
1508
1509 static unsigned int
1510 execute_late_warn_uninitialized (void)
1511 {
1512 basic_block bb;
1513 gimple_stmt_iterator gsi;
1514
1515 /* Re-do the plain uninitialized variable check, as optimization may have
1516 straightened control flow. Do this first so that we don't accidentally
1517 get a "may be" warning when we'd have seen an "is" warning later. */
1518 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1519
1520 FOR_EACH_BB (bb)
1521 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1522 warn_uninitialized_phi (gsi_stmt (gsi));
1523
1524 return 0;
1525 }
1526
1527 static bool
1528 gate_warn_uninitialized (void)
1529 {
1530 return warn_uninitialized != 0;
1531 }
1532
1533 struct gimple_opt_pass pass_early_warn_uninitialized =
1534 {
1535 {
1536 GIMPLE_PASS,
1537 NULL, /* name */
1538 gate_warn_uninitialized, /* gate */
1539 execute_early_warn_uninitialized, /* execute */
1540 NULL, /* sub */
1541 NULL, /* next */
1542 0, /* static_pass_number */
1543 TV_NONE, /* tv_id */
1544 PROP_ssa, /* properties_required */
1545 0, /* properties_provided */
1546 0, /* properties_destroyed */
1547 0, /* todo_flags_start */
1548 0 /* todo_flags_finish */
1549 }
1550 };
1551
1552 struct gimple_opt_pass pass_late_warn_uninitialized =
1553 {
1554 {
1555 GIMPLE_PASS,
1556 NULL, /* name */
1557 gate_warn_uninitialized, /* gate */
1558 execute_late_warn_uninitialized, /* execute */
1559 NULL, /* sub */
1560 NULL, /* next */
1561 0, /* static_pass_number */
1562 TV_NONE, /* tv_id */
1563 PROP_ssa, /* properties_required */
1564 0, /* properties_provided */
1565 0, /* properties_destroyed */
1566 0, /* todo_flags_start */
1567 0 /* todo_flags_finish */
1568 }
1569 };
1570
1571 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1572
1573 void
1574 execute_update_addresses_taken (bool do_optimize)
1575 {
1576 tree var;
1577 referenced_var_iterator rvi;
1578 gimple_stmt_iterator gsi;
1579 basic_block bb;
1580 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1581 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1582 bool update_vops = false;
1583
1584 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1585 the function body. */
1586 FOR_EACH_BB (bb)
1587 {
1588 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1589 {
1590 gimple stmt = gsi_stmt (gsi);
1591 enum gimple_code code = gimple_code (stmt);
1592
1593 /* Note all addresses taken by the stmt. */
1594 gimple_ior_addresses_taken (addresses_taken, stmt);
1595
1596 /* If we have a call or an assignment, see if the lhs contains
1597 a local decl that requires not to be a gimple register. */
1598 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1599 {
1600 tree lhs = gimple_get_lhs (stmt);
1601
1602 /* We may not rewrite TMR_SYMBOL to SSA. */
1603 if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
1604 && TMR_SYMBOL (lhs))
1605 bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
1606
1607 /* A plain decl does not need it set. */
1608 else if (lhs && handled_component_p (lhs))
1609 {
1610 var = get_base_address (lhs);
1611 if (DECL_P (var))
1612 bitmap_set_bit (not_reg_needs, DECL_UID (var));
1613 }
1614 }
1615 }
1616
1617 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1618 {
1619 size_t i;
1620 gimple phi = gsi_stmt (gsi);
1621
1622 for (i = 0; i < gimple_phi_num_args (phi); i++)
1623 {
1624 tree op = PHI_ARG_DEF (phi, i), var;
1625 if (TREE_CODE (op) == ADDR_EXPR
1626 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1627 && DECL_P (var))
1628 bitmap_set_bit (addresses_taken, DECL_UID (var));
1629 }
1630 }
1631 }
1632
1633 /* When possible, clear ADDRESSABLE bit or set the REGISTER bit
1634 and mark variable for conversion into SSA. */
1635 if (optimize && do_optimize)
1636 FOR_EACH_REFERENCED_VAR (var, rvi)
1637 {
1638 /* Global Variables, result decls cannot be changed. */
1639 if (is_global_var (var)
1640 || TREE_CODE (var) == RESULT_DECL
1641 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1642 continue;
1643
1644 if (TREE_ADDRESSABLE (var)
1645 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1646 a non-register. Otherwise we are confused and forget to
1647 add virtual operands for it. */
1648 && (!is_gimple_reg_type (TREE_TYPE (var))
1649 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1650 {
1651 TREE_ADDRESSABLE (var) = 0;
1652 if (is_gimple_reg (var))
1653 mark_sym_for_renaming (var);
1654 update_vops = true;
1655 if (dump_file)
1656 {
1657 fprintf (dump_file, "No longer having address taken ");
1658 print_generic_expr (dump_file, var, 0);
1659 fprintf (dump_file, "\n");
1660 }
1661 }
1662 if (!DECL_GIMPLE_REG_P (var)
1663 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1664 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1665 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1666 && !TREE_THIS_VOLATILE (var)
1667 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1668 {
1669 DECL_GIMPLE_REG_P (var) = 1;
1670 mark_sym_for_renaming (var);
1671 update_vops = true;
1672 if (dump_file)
1673 {
1674 fprintf (dump_file, "Decl is now a gimple register ");
1675 print_generic_expr (dump_file, var, 0);
1676 fprintf (dump_file, "\n");
1677 }
1678 }
1679 }
1680
1681 /* Operand caches needs to be recomputed for operands referencing the updated
1682 variables. */
1683 if (update_vops)
1684 {
1685 FOR_EACH_BB (bb)
1686 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1687 {
1688 gimple stmt = gsi_stmt (gsi);
1689
1690 if (gimple_references_memory_p (stmt))
1691 update_stmt (stmt);
1692 }
1693
1694 /* Update SSA form here, we are called as non-pass as well. */
1695 update_ssa (TODO_update_ssa);
1696 }
1697
1698 BITMAP_FREE (not_reg_needs);
1699 BITMAP_FREE (addresses_taken);
1700 }
1701
1702 struct gimple_opt_pass pass_update_address_taken =
1703 {
1704 {
1705 GIMPLE_PASS,
1706 "addressables", /* name */
1707 NULL, /* gate */
1708 NULL, /* execute */
1709 NULL, /* sub */
1710 NULL, /* next */
1711 0, /* static_pass_number */
1712 TV_NONE, /* tv_id */
1713 PROP_ssa, /* properties_required */
1714 0, /* properties_provided */
1715 0, /* properties_destroyed */
1716 0, /* todo_flags_start */
1717 TODO_update_address_taken
1718 | TODO_dump_func /* todo_flags_finish */
1719 }
1720 };