re PR c++/31863 (g++-4.1: out of memory with -O1/-O2)
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "ggc.h"
29 #include "langhooks.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "diagnostic.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "tree-gimple.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "timevar.h"
43 #include "hashtab.h"
44 #include "tree-dump.h"
45 #include "tree-pass.h"
46 #include "toplev.h"
47
48 /* Remove the corresponding arguments from the PHI nodes in E's
49 destination block and redirect it to DEST. Return redirected edge.
50 The list of removed arguments is stored in PENDING_STMT (e). */
51
52 edge
53 ssa_redirect_edge (edge e, basic_block dest)
54 {
55 tree phi;
56 tree list = NULL, *last = &list;
57 tree src, dst, node;
58
59 /* Remove the appropriate PHI arguments in E's destination block. */
60 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
61 {
62 if (PHI_ARG_DEF (phi, e->dest_idx) == NULL_TREE)
63 continue;
64
65 src = PHI_ARG_DEF (phi, e->dest_idx);
66 dst = PHI_RESULT (phi);
67 node = build_tree_list (dst, src);
68 *last = node;
69 last = &TREE_CHAIN (node);
70 }
71
72 e = redirect_edge_succ_nodup (e, dest);
73 PENDING_STMT (e) = list;
74
75 return e;
76 }
77
78 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
79 E->dest. */
80
81 void
82 flush_pending_stmts (edge e)
83 {
84 tree phi, arg;
85
86 if (!PENDING_STMT (e))
87 return;
88
89 for (phi = phi_nodes (e->dest), arg = PENDING_STMT (e);
90 phi;
91 phi = PHI_CHAIN (phi), arg = TREE_CHAIN (arg))
92 {
93 tree def = TREE_VALUE (arg);
94 add_phi_arg (phi, def, e);
95 }
96
97 PENDING_STMT (e) = NULL;
98 }
99
100 /* Return true if SSA_NAME is malformed and mark it visited.
101
102 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
103 operand. */
104
105 static bool
106 verify_ssa_name (tree ssa_name, bool is_virtual)
107 {
108 if (TREE_CODE (ssa_name) != SSA_NAME)
109 {
110 error ("expected an SSA_NAME object");
111 return true;
112 }
113
114 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
115 {
116 error ("type mismatch between an SSA_NAME and its symbol");
117 return true;
118 }
119
120 if (SSA_NAME_IN_FREE_LIST (ssa_name))
121 {
122 error ("found an SSA_NAME that had been released into the free pool");
123 return true;
124 }
125
126 if (is_virtual && is_gimple_reg (ssa_name))
127 {
128 error ("found a virtual definition for a GIMPLE register");
129 return true;
130 }
131
132 if (!is_virtual && !is_gimple_reg (ssa_name))
133 {
134 error ("found a real definition for a non-register");
135 return true;
136 }
137
138 if (is_virtual && var_ann (SSA_NAME_VAR (ssa_name))
139 && get_subvars_for_var (SSA_NAME_VAR (ssa_name)) != NULL)
140 {
141 error ("found real variable when subvariables should have appeared");
142 return true;
143 }
144
145 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
146 && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
147 {
148 error ("found a default name with a non-empty defining statement");
149 return true;
150 }
151
152 return false;
153 }
154
155
156 /* Return true if the definition of SSA_NAME at block BB is malformed.
157
158 STMT is the statement where SSA_NAME is created.
159
160 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
161 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
162 it means that the block in that array slot contains the
163 definition of SSA_NAME.
164
165 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
166
167 static bool
168 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
169 tree stmt, bool is_virtual)
170 {
171 if (verify_ssa_name (ssa_name, is_virtual))
172 goto err;
173
174 if (definition_block[SSA_NAME_VERSION (ssa_name)])
175 {
176 error ("SSA_NAME created in two different blocks %i and %i",
177 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
178 goto err;
179 }
180
181 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
182
183 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
184 {
185 error ("SSA_NAME_DEF_STMT is wrong");
186 fprintf (stderr, "Expected definition statement:\n");
187 print_generic_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), TDF_VOPS);
188 fprintf (stderr, "\nActual definition statement:\n");
189 print_generic_stmt (stderr, stmt, TDF_VOPS);
190 goto err;
191 }
192
193 return false;
194
195 err:
196 fprintf (stderr, "while verifying SSA_NAME ");
197 print_generic_expr (stderr, ssa_name, 0);
198 fprintf (stderr, " in statement\n");
199 print_generic_stmt (stderr, stmt, TDF_VOPS);
200
201 return true;
202 }
203
204
205 /* Return true if the use of SSA_NAME at statement STMT in block BB is
206 malformed.
207
208 DEF_BB is the block where SSA_NAME was found to be created.
209
210 IDOM contains immediate dominator information for the flowgraph.
211
212 CHECK_ABNORMAL is true if the caller wants to check whether this use
213 is flowing through an abnormal edge (only used when checking PHI
214 arguments).
215
216 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
217 that are defined before STMT in basic block BB. */
218
219 static bool
220 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
221 tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
222 {
223 bool err = false;
224 tree ssa_name = USE_FROM_PTR (use_p);
225
226 if (!TREE_VISITED (ssa_name))
227 if (verify_imm_links (stderr, ssa_name))
228 err = true;
229
230 TREE_VISITED (ssa_name) = 1;
231
232 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
233 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
234 ; /* Default definitions have empty statements. Nothing to do. */
235 else if (!def_bb)
236 {
237 error ("missing definition");
238 err = true;
239 }
240 else if (bb != def_bb
241 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
242 {
243 error ("definition in block %i does not dominate use in block %i",
244 def_bb->index, bb->index);
245 err = true;
246 }
247 else if (bb == def_bb
248 && names_defined_in_bb != NULL
249 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
250 {
251 error ("definition in block %i follows the use", def_bb->index);
252 err = true;
253 }
254
255 if (check_abnormal
256 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
257 {
258 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
259 err = true;
260 }
261
262 /* Make sure the use is in an appropriate list by checking the previous
263 element to make sure it's the same. */
264 if (use_p->prev == NULL)
265 {
266 error ("no immediate_use list");
267 err = true;
268 }
269 else
270 {
271 tree listvar ;
272 if (use_p->prev->use == NULL)
273 listvar = use_p->prev->stmt;
274 else
275 listvar = USE_FROM_PTR (use_p->prev);
276 if (listvar != ssa_name)
277 {
278 error ("wrong immediate use list");
279 err = true;
280 }
281 }
282
283 if (err)
284 {
285 fprintf (stderr, "for SSA_NAME: ");
286 print_generic_expr (stderr, ssa_name, TDF_VOPS);
287 fprintf (stderr, " in statement:\n");
288 print_generic_stmt (stderr, stmt, TDF_VOPS);
289 }
290
291 return err;
292 }
293
294
295 /* Return true if any of the arguments for PHI node PHI at block BB is
296 malformed.
297
298 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
299 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
300 it means that the block in that array slot contains the
301 definition of SSA_NAME. */
302
303 static bool
304 verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
305 {
306 edge e;
307 bool err = false;
308 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
309
310 if (EDGE_COUNT (bb->preds) != phi_num_args)
311 {
312 error ("incoming edge count does not match number of PHI arguments");
313 err = true;
314 goto error;
315 }
316
317 for (i = 0; i < phi_num_args; i++)
318 {
319 use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
320 tree op = USE_FROM_PTR (op_p);
321
322 e = EDGE_PRED (bb, i);
323
324 if (op == NULL_TREE)
325 {
326 error ("PHI argument is missing for edge %d->%d",
327 e->src->index,
328 e->dest->index);
329 err = true;
330 goto error;
331 }
332
333 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
334 {
335 error ("PHI argument is not SSA_NAME, or invariant");
336 err = true;
337 }
338
339 if (TREE_CODE (op) == SSA_NAME)
340 {
341 err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
342 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
343 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
344 }
345
346 if (e->dest != bb)
347 {
348 error ("wrong edge %d->%d for PHI argument",
349 e->src->index, e->dest->index);
350 err = true;
351 }
352
353 if (err)
354 {
355 fprintf (stderr, "PHI argument\n");
356 print_generic_stmt (stderr, op, TDF_VOPS);
357 goto error;
358 }
359 }
360
361 error:
362 if (err)
363 {
364 fprintf (stderr, "for PHI node\n");
365 print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
366 }
367
368
369 return err;
370 }
371
372
373 static void
374 verify_flow_insensitive_alias_info (void)
375 {
376 tree var;
377 referenced_var_iterator rvi;
378
379 FOR_EACH_REFERENCED_VAR (var, rvi)
380 {
381 unsigned int j;
382 bitmap aliases;
383 tree alias;
384 bitmap_iterator bi;
385
386 if (!MTAG_P (var) || !MTAG_ALIASES (var))
387 continue;
388
389 aliases = MTAG_ALIASES (var);
390
391 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
392 {
393 alias = referenced_var (j);
394
395 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
396 && !may_be_aliased (alias))
397 {
398 error ("non-addressable variable inside an alias set");
399 debug_variable (alias);
400 goto err;
401 }
402 }
403 }
404
405 return;
406
407 err:
408 debug_variable (var);
409 internal_error ("verify_flow_insensitive_alias_info failed");
410 }
411
412
413 static void
414 verify_flow_sensitive_alias_info (void)
415 {
416 size_t i;
417 tree ptr;
418
419 for (i = 1; i < num_ssa_names; i++)
420 {
421 tree var;
422 var_ann_t ann;
423 struct ptr_info_def *pi;
424
425
426 ptr = ssa_name (i);
427 if (!ptr)
428 continue;
429
430 /* We only care for pointers that are actually referenced in the
431 program. */
432 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
433 continue;
434
435 /* RESULT_DECL is special. If it's a GIMPLE register, then it
436 is only written-to only once in the return statement.
437 Otherwise, aggregate RESULT_DECLs may be written-to more than
438 once in virtual operands. */
439 var = SSA_NAME_VAR (ptr);
440 if (TREE_CODE (var) == RESULT_DECL
441 && is_gimple_reg (ptr))
442 continue;
443
444 pi = SSA_NAME_PTR_INFO (ptr);
445 if (pi == NULL)
446 continue;
447
448 ann = var_ann (var);
449 if (pi->is_dereferenced && !pi->name_mem_tag && !ann->symbol_mem_tag)
450 {
451 error ("dereferenced pointers should have a name or a symbol tag");
452 goto err;
453 }
454
455 if (pi->name_mem_tag
456 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
457 {
458 error ("pointers with a memory tag, should have points-to sets");
459 goto err;
460 }
461
462 if (pi->value_escapes_p && pi->name_mem_tag)
463 {
464 tree t = memory_partition (pi->name_mem_tag);
465 if (t == NULL_TREE)
466 t = pi->name_mem_tag;
467
468 if (!is_call_clobbered (t))
469 {
470 error ("pointer escapes but its name tag is not call-clobbered");
471 goto err;
472 }
473 }
474 }
475
476 return;
477
478 err:
479 debug_variable (ptr);
480 internal_error ("verify_flow_sensitive_alias_info failed");
481 }
482
483
484 /* Verify the consistency of call clobbering information. */
485
486 static void
487 verify_call_clobbering (void)
488 {
489 unsigned int i;
490 bitmap_iterator bi;
491 tree var;
492 referenced_var_iterator rvi;
493
494 /* At all times, the result of the call_clobbered flag should
495 match the result of the call_clobbered_vars bitmap. Verify both
496 that everything in call_clobbered_vars is marked
497 call_clobbered, and that everything marked
498 call_clobbered is in call_clobbered_vars. */
499 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
500 {
501 var = referenced_var (i);
502
503 if (memory_partition (var))
504 var = memory_partition (var);
505
506 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
507 {
508 error ("variable in call_clobbered_vars but not marked "
509 "call_clobbered");
510 debug_variable (var);
511 goto err;
512 }
513 }
514
515 FOR_EACH_REFERENCED_VAR (var, rvi)
516 {
517 if (is_gimple_reg (var))
518 continue;
519
520 if (memory_partition (var))
521 var = memory_partition (var);
522
523 if (!MTAG_P (var)
524 && var_ann (var)->call_clobbered
525 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
526 {
527 error ("variable marked call_clobbered but not in "
528 "call_clobbered_vars bitmap.");
529 debug_variable (var);
530 goto err;
531 }
532 }
533
534 return;
535
536 err:
537 internal_error ("verify_call_clobbering failed");
538 }
539
540
541 /* Verify invariants in memory partitions. */
542
543 static void
544 verify_memory_partitions (void)
545 {
546 unsigned i;
547 tree mpt;
548 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
549 struct pointer_set_t *partitioned_syms = pointer_set_create ();
550
551 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
552 {
553 unsigned j;
554 bitmap_iterator bj;
555
556 if (MPT_SYMBOLS (mpt) == NULL)
557 {
558 error ("Memory partitions should have at least one symbol");
559 debug_variable (mpt);
560 goto err;
561 }
562
563 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
564 {
565 tree var = referenced_var (j);
566 if (pointer_set_insert (partitioned_syms, var))
567 {
568 error ("Partitioned symbols should belong to exactly one "
569 "partition");
570 debug_variable (var);
571 goto err;
572 }
573 }
574 }
575
576 pointer_set_destroy (partitioned_syms);
577
578 return;
579
580 err:
581 internal_error ("verify_memory_partitions failed");
582 }
583
584
585 /* Verify the consistency of aliasing information. */
586
587 static void
588 verify_alias_info (void)
589 {
590 verify_flow_sensitive_alias_info ();
591 verify_call_clobbering ();
592 verify_flow_insensitive_alias_info ();
593 verify_memory_partitions ();
594 }
595
596
597 /* Verify common invariants in the SSA web.
598 TODO: verify the variable annotations. */
599
600 void
601 verify_ssa (bool check_modified_stmt)
602 {
603 size_t i;
604 basic_block bb;
605 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
606 ssa_op_iter iter;
607 tree op;
608 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
609 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
610
611 gcc_assert (!need_ssa_update_p ());
612
613 verify_stmts ();
614
615 timevar_push (TV_TREE_SSA_VERIFY);
616
617 /* Keep track of SSA names present in the IL. */
618 for (i = 1; i < num_ssa_names; i++)
619 {
620 tree name = ssa_name (i);
621 if (name)
622 {
623 tree stmt;
624 TREE_VISITED (name) = 0;
625
626 stmt = SSA_NAME_DEF_STMT (name);
627 if (!IS_EMPTY_STMT (stmt))
628 {
629 basic_block bb = bb_for_stmt (stmt);
630 verify_def (bb, definition_block,
631 name, stmt, !is_gimple_reg (name));
632
633 }
634 }
635 }
636
637 calculate_dominance_info (CDI_DOMINATORS);
638
639 /* Now verify all the uses and make sure they agree with the definitions
640 found in the previous pass. */
641 FOR_EACH_BB (bb)
642 {
643 edge e;
644 tree phi;
645 edge_iterator ei;
646 block_stmt_iterator bsi;
647
648 /* Make sure that all edges have a clear 'aux' field. */
649 FOR_EACH_EDGE (e, ei, bb->preds)
650 {
651 if (e->aux)
652 {
653 error ("AUX pointer initialized for edge %d->%d", e->src->index,
654 e->dest->index);
655 goto err;
656 }
657 }
658
659 /* Verify the arguments for every PHI node in the block. */
660 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
661 {
662 if (verify_phi_args (phi, bb, definition_block))
663 goto err;
664
665 bitmap_set_bit (names_defined_in_bb,
666 SSA_NAME_VERSION (PHI_RESULT (phi)));
667 }
668
669 /* Now verify all the uses and vuses in every statement of the block. */
670 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
671 {
672 tree stmt = bsi_stmt (bsi);
673 use_operand_p use_p;
674
675 if (check_modified_stmt && stmt_modified_p (stmt))
676 {
677 error ("stmt (%p) marked modified after optimization pass: ",
678 (void *)stmt);
679 print_generic_stmt (stderr, stmt, TDF_VOPS);
680 goto err;
681 }
682
683 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
684 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
685 {
686 tree lhs, base_address;
687
688 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
689 base_address = get_base_address (lhs);
690
691 if (base_address
692 && gimple_aliases_computed_p (cfun)
693 && SSA_VAR_P (base_address)
694 && !stmt_ann (stmt)->has_volatile_ops
695 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
696 {
697 error ("statement makes a memory store, but has no VDEFS");
698 print_generic_stmt (stderr, stmt, TDF_VOPS);
699 goto err;
700 }
701 }
702
703 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
704 {
705 if (verify_ssa_name (op, true))
706 {
707 error ("in statement");
708 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
709 goto err;
710 }
711 }
712
713 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
714 {
715 if (verify_ssa_name (op, false))
716 {
717 error ("in statement");
718 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
719 goto err;
720 }
721 }
722
723 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
724 {
725 op = USE_FROM_PTR (use_p);
726 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
727 use_p, stmt, false, names_defined_in_bb))
728 goto err;
729 }
730
731 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
732 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
733 }
734
735 bitmap_clear (names_defined_in_bb);
736 }
737
738 /* Finally, verify alias information. */
739 if (gimple_aliases_computed_p (cfun))
740 verify_alias_info ();
741
742 free (definition_block);
743
744 /* Restore the dominance information to its prior known state, so
745 that we do not perturb the compiler's subsequent behavior. */
746 if (orig_dom_state == DOM_NONE)
747 free_dominance_info (CDI_DOMINATORS);
748 else
749 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
750
751 BITMAP_FREE (names_defined_in_bb);
752 timevar_pop (TV_TREE_SSA_VERIFY);
753 return;
754
755 err:
756 internal_error ("verify_ssa failed");
757 }
758
759 /* Return true if the uid in both int tree maps are equal. */
760
761 int
762 int_tree_map_eq (const void *va, const void *vb)
763 {
764 const struct int_tree_map *a = (const struct int_tree_map *) va;
765 const struct int_tree_map *b = (const struct int_tree_map *) vb;
766 return (a->uid == b->uid);
767 }
768
769 /* Hash a UID in a int_tree_map. */
770
771 unsigned int
772 int_tree_map_hash (const void *item)
773 {
774 return ((const struct int_tree_map *)item)->uid;
775 }
776
777 /* Return true if the uid in both int tree maps are equal. */
778
779 static int
780 var_ann_eq (const void *va, const void *vb)
781 {
782 const struct static_var_ann_d *a = (const struct static_var_ann_d *) va;
783 const_tree const b = (const_tree) vb;
784 return (a->uid == DECL_UID (b));
785 }
786
787 /* Hash a UID in a int_tree_map. */
788
789 static unsigned int
790 var_ann_hash (const void *item)
791 {
792 return ((const struct static_var_ann_d *)item)->uid;
793 }
794
795
796 /* Initialize global DFA and SSA structures. */
797
798 void
799 init_tree_ssa (void)
800 {
801 cfun->gimple_df = GGC_CNEW (struct gimple_df);
802 cfun->gimple_df->referenced_vars = htab_create_ggc (20, int_tree_map_hash,
803 int_tree_map_eq, NULL);
804 cfun->gimple_df->default_defs = htab_create_ggc (20, int_tree_map_hash,
805 int_tree_map_eq, NULL);
806 cfun->gimple_df->var_anns = htab_create_ggc (20, var_ann_hash,
807 var_ann_eq, NULL);
808 cfun->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
809 cfun->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
810 init_ssanames ();
811 init_phinodes ();
812 }
813
814
815 /* Deallocate memory associated with SSA data structures for FNDECL. */
816
817 void
818 delete_tree_ssa (void)
819 {
820 size_t i;
821 basic_block bb;
822 block_stmt_iterator bsi;
823 referenced_var_iterator rvi;
824 tree var;
825
826 /* Release any ssa_names still in use. */
827 for (i = 0; i < num_ssa_names; i++)
828 {
829 tree var = ssa_name (i);
830 if (var && TREE_CODE (var) == SSA_NAME)
831 {
832 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
833 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
834 }
835 release_ssa_name (var);
836 }
837
838 /* Remove annotations from every tree in the function. */
839 FOR_EACH_BB (bb)
840 {
841 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
842 {
843 tree stmt = bsi_stmt (bsi);
844 stmt_ann_t ann = get_stmt_ann (stmt);
845
846 free_ssa_operands (&ann->operands);
847 ann->addresses_taken = 0;
848 mark_stmt_modified (stmt);
849 }
850 set_phi_nodes (bb, NULL);
851 }
852
853 /* Remove annotations from every referenced variable. */
854 FOR_EACH_REFERENCED_VAR (var, rvi)
855 {
856 if (var->base.ann)
857 ggc_free (var->base.ann);
858 var->base.ann = NULL;
859 }
860 htab_delete (gimple_referenced_vars (cfun));
861 cfun->gimple_df->referenced_vars = NULL;
862
863 fini_ssanames ();
864 fini_phinodes ();
865 /* we no longer maintain the SSA operand cache at this point. */
866 fini_ssa_operands ();
867
868 cfun->gimple_df->global_var = NULL_TREE;
869
870 htab_delete (cfun->gimple_df->default_defs);
871 cfun->gimple_df->default_defs = NULL;
872 htab_delete (cfun->gimple_df->var_anns);
873 cfun->gimple_df->var_anns = NULL;
874 cfun->gimple_df->call_clobbered_vars = NULL;
875 cfun->gimple_df->addressable_vars = NULL;
876 cfun->gimple_df->modified_noreturn_calls = NULL;
877 if (gimple_aliases_computed_p (cfun))
878 {
879 delete_alias_heapvars ();
880 gcc_assert (!need_ssa_update_p ());
881 }
882 cfun->gimple_df->aliases_computed_p = false;
883 delete_mem_ref_stats (cfun);
884
885 cfun->gimple_df = NULL;
886 }
887
888 /* Helper function for useless_type_conversion_p. */
889
890 static bool
891 useless_type_conversion_p_1 (tree outer_type, tree inner_type)
892 {
893 /* Qualifiers on value types do not matter. */
894 inner_type = TYPE_MAIN_VARIANT (inner_type);
895 outer_type = TYPE_MAIN_VARIANT (outer_type);
896
897 if (inner_type == outer_type)
898 return true;
899
900 /* If we know the canonical types, compare them. */
901 if (TYPE_CANONICAL (inner_type)
902 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
903 return true;
904
905 /* Changes in machine mode are never useless conversions. */
906 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
907 return false;
908
909 /* If both the inner and outer types are integral types, then the
910 conversion is not necessary if they have the same mode and
911 signedness and precision, and both or neither are boolean. */
912 if (INTEGRAL_TYPE_P (inner_type)
913 && INTEGRAL_TYPE_P (outer_type))
914 {
915 /* Preserve changes in signedness or precision. */
916 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
917 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
918 return false;
919
920 /* Conversions from a non-base to a base type are not useless.
921 This way we preserve the invariant to do arithmetic in
922 base types only. */
923 if (TREE_TYPE (inner_type)
924 && TREE_TYPE (inner_type) != inner_type
925 && (TREE_TYPE (outer_type) == outer_type
926 || TREE_TYPE (outer_type) == NULL_TREE))
927 return false;
928
929 /* We don't need to preserve changes in the types minimum or
930 maximum value in general as these do not generate code
931 unless the types precisions are different. */
932
933 return true;
934 }
935
936 /* Scalar floating point types with the same mode are compatible. */
937 else if (SCALAR_FLOAT_TYPE_P (inner_type)
938 && SCALAR_FLOAT_TYPE_P (outer_type))
939 return true;
940
941 /* We need to take special care recursing to pointed-to types. */
942 else if (POINTER_TYPE_P (inner_type)
943 && POINTER_TYPE_P (outer_type))
944 {
945 /* Don't lose casts between pointers to volatile and non-volatile
946 qualified types. Doing so would result in changing the semantics
947 of later accesses. */
948 if ((TYPE_VOLATILE (TREE_TYPE (outer_type))
949 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
950 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
951 return false;
952
953 /* Do not lose casts between pointers with different
954 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
955 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
956 != TYPE_REF_CAN_ALIAS_ALL (outer_type))
957 || (get_alias_set (TREE_TYPE (inner_type))
958 != get_alias_set (TREE_TYPE (outer_type))))
959 return false;
960
961 /* Do not lose casts from const qualified to non-const
962 qualified. */
963 if ((TYPE_READONLY (TREE_TYPE (outer_type))
964 != TYPE_READONLY (TREE_TYPE (inner_type)))
965 && TYPE_READONLY (TREE_TYPE (inner_type)))
966 return false;
967
968 /* Do not lose casts to restrict qualified pointers. */
969 if ((TYPE_RESTRICT (outer_type)
970 != TYPE_RESTRICT (inner_type))
971 && TYPE_RESTRICT (outer_type))
972 return false;
973
974 /* Otherwise pointers/references are equivalent if their pointed
975 to types are effectively the same. We can strip qualifiers
976 on pointed-to types for further comparison, which is done in
977 the callee. */
978 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
979 TREE_TYPE (inner_type));
980 }
981
982 /* Recurse for complex types. */
983 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
984 && TREE_CODE (outer_type) == COMPLEX_TYPE)
985 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
986 TREE_TYPE (inner_type));
987
988 /* Recurse for vector types with the same number of subparts. */
989 else if (TREE_CODE (inner_type) == VECTOR_TYPE
990 && TREE_CODE (outer_type) == VECTOR_TYPE
991 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
992 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
993 TREE_TYPE (inner_type));
994
995 /* For aggregates we may need to fall back to structural equality
996 checks. */
997 else if (AGGREGATE_TYPE_P (inner_type)
998 && AGGREGATE_TYPE_P (outer_type))
999 {
1000 /* Different types of aggregates are incompatible. */
1001 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
1002 return false;
1003
1004 /* ??? Add structural equivalence check. */
1005
1006 /* ??? This should eventually just return false. */
1007 return lang_hooks.types_compatible_p (inner_type, outer_type);
1008 }
1009
1010 return false;
1011 }
1012
1013 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1014 useless type conversion, otherwise return false.
1015
1016 This function implicitly defines the middle-end type system. With
1017 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1018 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1019 the following invariants shall be fulfilled:
1020
1021 1) useless_type_conversion_p is transitive.
1022 If a < b and b < c then a < c.
1023
1024 2) useless_type_conversion_p is not symmetric.
1025 From a < b does not follow a > b.
1026
1027 3) Types define the available set of operations applicable to values.
1028 A type conversion is useless if the operations for the target type
1029 is a subset of the operations for the source type. For example
1030 casts to void* are useless, casts from void* are not (void* can't
1031 be dereferenced or offsetted, but copied, hence its set of operations
1032 is a strict subset of that of all other data pointer types). Casts
1033 to const T* are useless (can't be written to), casts from const T*
1034 to T* are not. */
1035
1036 bool
1037 useless_type_conversion_p (tree outer_type, tree inner_type)
1038 {
1039 /* If the outer type is (void *), then the conversion is not
1040 necessary. We have to make sure to not apply this while
1041 recursing though. */
1042 if (POINTER_TYPE_P (inner_type)
1043 && POINTER_TYPE_P (outer_type)
1044 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
1045 return true;
1046
1047 return useless_type_conversion_p_1 (outer_type, inner_type);
1048 }
1049
1050 /* Return true if a conversion from either type of TYPE1 and TYPE2
1051 to the other is not required. Otherwise return false. */
1052
1053 bool
1054 types_compatible_p (tree type1, tree type2)
1055 {
1056 return (type1 == type2
1057 || (useless_type_conversion_p (type1, type2)
1058 && useless_type_conversion_p (type2, type1)));
1059 }
1060
1061 /* Return true if EXPR is a useless type conversion, otherwise return
1062 false. */
1063
1064 bool
1065 tree_ssa_useless_type_conversion (tree expr)
1066 {
1067 /* If we have an assignment that merely uses a NOP_EXPR to change
1068 the top of the RHS to the type of the LHS and the type conversion
1069 is "safe", then strip away the type conversion so that we can
1070 enter LHS = RHS into the const_and_copies table. */
1071 if (TREE_CODE (expr) == NOP_EXPR || TREE_CODE (expr) == CONVERT_EXPR
1072 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1073 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1074 /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
1075 around known bugs with GIMPLE_MODIFY_STMTs appearing in places
1076 they shouldn't. See PR 30391. */
1077 return useless_type_conversion_p
1078 (TREE_TYPE (expr),
1079 GENERIC_TREE_TYPE (TREE_OPERAND (expr, 0)));
1080
1081 return false;
1082 }
1083
1084
1085 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1086 described in walk_use_def_chains.
1087
1088 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1089 infinite loops. We used to have a bitmap for this to just mark
1090 SSA versions we had visited. But non-sparse bitmaps are way too
1091 expensive, while sparse bitmaps may cause quadratic behavior.
1092
1093 IS_DFS is true if the caller wants to perform a depth-first search
1094 when visiting PHI nodes. A DFS will visit each PHI argument and
1095 call FN after each one. Otherwise, all the arguments are
1096 visited first and then FN is called with each of the visited
1097 arguments in a separate pass. */
1098
1099 static bool
1100 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1101 struct pointer_set_t *visited, bool is_dfs)
1102 {
1103 tree def_stmt;
1104
1105 if (pointer_set_insert (visited, var))
1106 return false;
1107
1108 def_stmt = SSA_NAME_DEF_STMT (var);
1109
1110 if (TREE_CODE (def_stmt) != PHI_NODE)
1111 {
1112 /* If we reached the end of the use-def chain, call FN. */
1113 return fn (var, def_stmt, data);
1114 }
1115 else
1116 {
1117 int i;
1118
1119 /* When doing a breadth-first search, call FN before following the
1120 use-def links for each argument. */
1121 if (!is_dfs)
1122 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1123 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1124 return true;
1125
1126 /* Follow use-def links out of each PHI argument. */
1127 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1128 {
1129 tree arg = PHI_ARG_DEF (def_stmt, i);
1130
1131 /* ARG may be NULL for newly introduced PHI nodes. */
1132 if (arg
1133 && TREE_CODE (arg) == SSA_NAME
1134 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1135 return true;
1136 }
1137
1138 /* When doing a depth-first search, call FN after following the
1139 use-def links for each argument. */
1140 if (is_dfs)
1141 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1142 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1143 return true;
1144 }
1145
1146 return false;
1147 }
1148
1149
1150
1151 /* Walk use-def chains starting at the SSA variable VAR. Call
1152 function FN at each reaching definition found. FN takes three
1153 arguments: VAR, its defining statement (DEF_STMT) and a generic
1154 pointer to whatever state information that FN may want to maintain
1155 (DATA). FN is able to stop the walk by returning true, otherwise
1156 in order to continue the walk, FN should return false.
1157
1158 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1159 different. The first argument to FN is no longer the original
1160 variable VAR, but the PHI argument currently being examined. If FN
1161 wants to get at VAR, it should call PHI_RESULT (PHI).
1162
1163 If IS_DFS is true, this function will:
1164
1165 1- walk the use-def chains for all the PHI arguments, and,
1166 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1167
1168 If IS_DFS is false, the two steps above are done in reverse order
1169 (i.e., a breadth-first search). */
1170
1171 void
1172 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1173 bool is_dfs)
1174 {
1175 tree def_stmt;
1176
1177 gcc_assert (TREE_CODE (var) == SSA_NAME);
1178
1179 def_stmt = SSA_NAME_DEF_STMT (var);
1180
1181 /* We only need to recurse if the reaching definition comes from a PHI
1182 node. */
1183 if (TREE_CODE (def_stmt) != PHI_NODE)
1184 (*fn) (var, def_stmt, data);
1185 else
1186 {
1187 struct pointer_set_t *visited = pointer_set_create ();
1188 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1189 pointer_set_destroy (visited);
1190 }
1191 }
1192
1193 \f
1194 /* Emit warnings for uninitialized variables. This is done in two passes.
1195
1196 The first pass notices real uses of SSA names with default definitions.
1197 Such uses are unconditionally uninitialized, and we can be certain that
1198 such a use is a mistake. This pass is run before most optimizations,
1199 so that we catch as many as we can.
1200
1201 The second pass follows PHI nodes to find uses that are potentially
1202 uninitialized. In this case we can't necessarily prove that the use
1203 is really uninitialized. This pass is run after most optimizations,
1204 so that we thread as many jumps and possible, and delete as much dead
1205 code as possible, in order to reduce false positives. We also look
1206 again for plain uninitialized variables, since optimization may have
1207 changed conditionally uninitialized to unconditionally uninitialized. */
1208
1209 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1210 warning text is in MSGID and LOCUS may contain a location or be null. */
1211
1212 static void
1213 warn_uninit (tree t, const char *gmsgid, void *data)
1214 {
1215 tree var = SSA_NAME_VAR (t);
1216 tree def = SSA_NAME_DEF_STMT (t);
1217 tree context = (tree) data;
1218 location_t *locus;
1219 expanded_location xloc, floc;
1220
1221 /* Default uses (indicated by an empty definition statement),
1222 are uninitialized. */
1223 if (!IS_EMPTY_STMT (def))
1224 return;
1225
1226 /* Except for PARMs of course, which are always initialized. */
1227 if (TREE_CODE (var) == PARM_DECL)
1228 return;
1229
1230 /* Hard register variables get their initial value from the ether. */
1231 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1232 return;
1233
1234 /* TREE_NO_WARNING either means we already warned, or the front end
1235 wishes to suppress the warning. */
1236 if (TREE_NO_WARNING (var))
1237 return;
1238
1239 locus = (context != NULL && EXPR_HAS_LOCATION (context)
1240 ? EXPR_LOCUS (context)
1241 : &DECL_SOURCE_LOCATION (var));
1242 warning (OPT_Wuninitialized, gmsgid, locus, var);
1243 xloc = expand_location (*locus);
1244 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1245 if (xloc.file != floc.file
1246 || xloc.line < floc.line
1247 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1248 inform ("%J%qD was declared here", var, var);
1249
1250 TREE_NO_WARNING (var) = 1;
1251 }
1252
1253 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1254 and warn about them. */
1255
1256 static tree
1257 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data)
1258 {
1259 tree t = *tp;
1260
1261 switch (TREE_CODE (t))
1262 {
1263 case SSA_NAME:
1264 /* We only do data flow with SSA_NAMEs, so that's all we
1265 can warn about. */
1266 warn_uninit (t, "%H%qD is used uninitialized in this function", data);
1267 *walk_subtrees = 0;
1268 break;
1269
1270 case REALPART_EXPR:
1271 case IMAGPART_EXPR:
1272 /* The total store transformation performed during gimplification
1273 creates uninitialized variable uses. If all is well, these will
1274 be optimized away, so don't warn now. */
1275 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1276 *walk_subtrees = 0;
1277 break;
1278
1279 default:
1280 if (IS_TYPE_OR_DECL_P (t))
1281 *walk_subtrees = 0;
1282 break;
1283 }
1284
1285 return NULL_TREE;
1286 }
1287
1288 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1289 and warn about them. */
1290
1291 static void
1292 warn_uninitialized_phi (tree phi)
1293 {
1294 int i, n = PHI_NUM_ARGS (phi);
1295
1296 /* Don't look at memory tags. */
1297 if (!is_gimple_reg (PHI_RESULT (phi)))
1298 return;
1299
1300 for (i = 0; i < n; ++i)
1301 {
1302 tree op = PHI_ARG_DEF (phi, i);
1303 if (TREE_CODE (op) == SSA_NAME)
1304 warn_uninit (op, "%H%qD may be used uninitialized in this function",
1305 NULL);
1306 }
1307 }
1308
1309 static unsigned int
1310 execute_early_warn_uninitialized (void)
1311 {
1312 block_stmt_iterator bsi;
1313 basic_block bb;
1314
1315 FOR_EACH_BB (bb)
1316 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1317 {
1318 tree context = bsi_stmt (bsi);
1319 walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
1320 context, NULL);
1321 }
1322 return 0;
1323 }
1324
1325 static unsigned int
1326 execute_late_warn_uninitialized (void)
1327 {
1328 basic_block bb;
1329 tree phi;
1330
1331 /* Re-do the plain uninitialized variable check, as optimization may have
1332 straightened control flow. Do this first so that we don't accidentally
1333 get a "may be" warning when we'd have seen an "is" warning later. */
1334 execute_early_warn_uninitialized ();
1335
1336 FOR_EACH_BB (bb)
1337 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1338 warn_uninitialized_phi (phi);
1339 return 0;
1340 }
1341
1342 static bool
1343 gate_warn_uninitialized (void)
1344 {
1345 return warn_uninitialized != 0;
1346 }
1347
1348 struct tree_opt_pass pass_early_warn_uninitialized =
1349 {
1350 NULL, /* name */
1351 gate_warn_uninitialized, /* gate */
1352 execute_early_warn_uninitialized, /* execute */
1353 NULL, /* sub */
1354 NULL, /* next */
1355 0, /* static_pass_number */
1356 0, /* tv_id */
1357 PROP_ssa, /* properties_required */
1358 0, /* properties_provided */
1359 0, /* properties_destroyed */
1360 0, /* todo_flags_start */
1361 0, /* todo_flags_finish */
1362 0 /* letter */
1363 };
1364
1365 struct tree_opt_pass pass_late_warn_uninitialized =
1366 {
1367 NULL, /* name */
1368 gate_warn_uninitialized, /* gate */
1369 execute_late_warn_uninitialized, /* execute */
1370 NULL, /* sub */
1371 NULL, /* next */
1372 0, /* static_pass_number */
1373 0, /* tv_id */
1374 PROP_ssa, /* properties_required */
1375 0, /* properties_provided */
1376 0, /* properties_destroyed */
1377 0, /* todo_flags_start */
1378 0, /* todo_flags_finish */
1379 0 /* letter */
1380 };
1381
1382 /* Compute TREE_ADDRESSABLE for local variables. */
1383
1384 static unsigned int
1385 execute_update_addresses_taken (void)
1386 {
1387 tree var;
1388 referenced_var_iterator rvi;
1389 block_stmt_iterator bsi;
1390 basic_block bb;
1391 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1392 bitmap vars_updated = BITMAP_ALLOC (NULL);
1393 bool update_vops;
1394 tree phi;
1395
1396 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1397 the function body. */
1398 FOR_EACH_BB (bb)
1399 {
1400 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1401 {
1402 stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
1403
1404 if (s_ann->addresses_taken)
1405 bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
1406 }
1407 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1408 {
1409 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
1410 for (i = 0; i < phi_num_args; i++)
1411 {
1412 tree op = PHI_ARG_DEF (phi, i), var;
1413 if (TREE_CODE (op) == ADDR_EXPR
1414 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
1415 && DECL_P (var))
1416 bitmap_set_bit (addresses_taken, DECL_UID (var));
1417 }
1418 }
1419 }
1420
1421 /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
1422 SSA. */
1423 FOR_EACH_REFERENCED_VAR (var, rvi)
1424 if (!is_global_var (var)
1425 && TREE_CODE (var) != RESULT_DECL
1426 && TREE_ADDRESSABLE (var)
1427 && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
1428 {
1429 TREE_ADDRESSABLE (var) = 0;
1430 if (is_gimple_reg (var))
1431 mark_sym_for_renaming (var);
1432 update_vops = true;
1433 bitmap_set_bit (vars_updated, DECL_UID (var));
1434 if (dump_file)
1435 {
1436 fprintf (dump_file, "No longer having address taken ");
1437 print_generic_expr (dump_file, var, 0);
1438 fprintf (dump_file, "\n");
1439 }
1440 }
1441
1442 /* Operand caches needs to be recomputed for operands referencing the updated
1443 variables. */
1444 if (update_vops)
1445 FOR_EACH_BB (bb)
1446 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1447 {
1448 tree stmt = bsi_stmt (bsi);
1449
1450 if ((LOADED_SYMS (stmt)
1451 && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
1452 || (STORED_SYMS (stmt)
1453 && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
1454 update_stmt (stmt);
1455 }
1456 BITMAP_FREE (addresses_taken);
1457 BITMAP_FREE (vars_updated);
1458 return 0;
1459 }
1460
1461 struct tree_opt_pass pass_update_address_taken =
1462 {
1463 "addressables", /* name */
1464 NULL, /* gate */
1465 execute_update_addresses_taken, /* execute */
1466 NULL, /* sub */
1467 NULL, /* next */
1468 0, /* static_pass_number */
1469 0, /* tv_id */
1470 PROP_ssa, /* properties_required */
1471 0, /* properties_provided */
1472 0, /* properties_destroyed */
1473 0, /* todo_flags_start */
1474 TODO_update_ssa, /* todo_flags_finish */
1475 0 /* letter */
1476 };