tree-data-ref.c (subscript_dependence_tester_1): Call free_conflict_function.
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "ggc.h"
29 #include "langhooks.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "diagnostic.h"
36 #include "bitmap.h"
37 #include "pointer-set.h"
38 #include "tree-flow.h"
39 #include "tree-gimple.h"
40 #include "tree-inline.h"
41 #include "varray.h"
42 #include "timevar.h"
43 #include "hashtab.h"
44 #include "tree-dump.h"
45 #include "tree-pass.h"
46 #include "toplev.h"
47
48 /* Remove the corresponding arguments from the PHI nodes in E's
49 destination block and redirect it to DEST. Return redirected edge.
50 The list of removed arguments is stored in PENDING_STMT (e). */
51
52 edge
53 ssa_redirect_edge (edge e, basic_block dest)
54 {
55 tree phi;
56 tree list = NULL, *last = &list;
57 tree src, dst, node;
58
59 /* Remove the appropriate PHI arguments in E's destination block. */
60 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
61 {
62 if (PHI_ARG_DEF (phi, e->dest_idx) == NULL_TREE)
63 continue;
64
65 src = PHI_ARG_DEF (phi, e->dest_idx);
66 dst = PHI_RESULT (phi);
67 node = build_tree_list (dst, src);
68 *last = node;
69 last = &TREE_CHAIN (node);
70 }
71
72 e = redirect_edge_succ_nodup (e, dest);
73 PENDING_STMT (e) = list;
74
75 return e;
76 }
77
78 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
79 E->dest. */
80
81 void
82 flush_pending_stmts (edge e)
83 {
84 tree phi, arg;
85
86 if (!PENDING_STMT (e))
87 return;
88
89 for (phi = phi_nodes (e->dest), arg = PENDING_STMT (e);
90 phi;
91 phi = PHI_CHAIN (phi), arg = TREE_CHAIN (arg))
92 {
93 tree def = TREE_VALUE (arg);
94 add_phi_arg (phi, def, e);
95 }
96
97 PENDING_STMT (e) = NULL;
98 }
99
100 /* Return true if SSA_NAME is malformed and mark it visited.
101
102 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
103 operand. */
104
105 static bool
106 verify_ssa_name (tree ssa_name, bool is_virtual)
107 {
108 if (TREE_CODE (ssa_name) != SSA_NAME)
109 {
110 error ("expected an SSA_NAME object");
111 return true;
112 }
113
114 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
115 {
116 error ("type mismatch between an SSA_NAME and its symbol");
117 return true;
118 }
119
120 if (SSA_NAME_IN_FREE_LIST (ssa_name))
121 {
122 error ("found an SSA_NAME that had been released into the free pool");
123 return true;
124 }
125
126 if (is_virtual && is_gimple_reg (ssa_name))
127 {
128 error ("found a virtual definition for a GIMPLE register");
129 return true;
130 }
131
132 if (!is_virtual && !is_gimple_reg (ssa_name))
133 {
134 error ("found a real definition for a non-register");
135 return true;
136 }
137
138 if (is_virtual && var_ann (SSA_NAME_VAR (ssa_name))
139 && get_subvars_for_var (SSA_NAME_VAR (ssa_name)) != NULL)
140 {
141 error ("found real variable when subvariables should have appeared");
142 return true;
143 }
144
145 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
146 && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
147 {
148 error ("found a default name with a non-empty defining statement");
149 return true;
150 }
151
152 return false;
153 }
154
155
156 /* Return true if the definition of SSA_NAME at block BB is malformed.
157
158 STMT is the statement where SSA_NAME is created.
159
160 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
161 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
162 it means that the block in that array slot contains the
163 definition of SSA_NAME.
164
165 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
166
167 static bool
168 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
169 tree stmt, bool is_virtual)
170 {
171 if (verify_ssa_name (ssa_name, is_virtual))
172 goto err;
173
174 if (definition_block[SSA_NAME_VERSION (ssa_name)])
175 {
176 error ("SSA_NAME created in two different blocks %i and %i",
177 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
178 goto err;
179 }
180
181 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
182
183 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
184 {
185 error ("SSA_NAME_DEF_STMT is wrong");
186 fprintf (stderr, "Expected definition statement:\n");
187 print_generic_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), TDF_VOPS);
188 fprintf (stderr, "\nActual definition statement:\n");
189 print_generic_stmt (stderr, stmt, TDF_VOPS);
190 goto err;
191 }
192
193 return false;
194
195 err:
196 fprintf (stderr, "while verifying SSA_NAME ");
197 print_generic_expr (stderr, ssa_name, 0);
198 fprintf (stderr, " in statement\n");
199 print_generic_stmt (stderr, stmt, TDF_VOPS);
200
201 return true;
202 }
203
204
205 /* Return true if the use of SSA_NAME at statement STMT in block BB is
206 malformed.
207
208 DEF_BB is the block where SSA_NAME was found to be created.
209
210 IDOM contains immediate dominator information for the flowgraph.
211
212 CHECK_ABNORMAL is true if the caller wants to check whether this use
213 is flowing through an abnormal edge (only used when checking PHI
214 arguments).
215
216 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
217 that are defined before STMT in basic block BB. */
218
219 static bool
220 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
221 tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
222 {
223 bool err = false;
224 tree ssa_name = USE_FROM_PTR (use_p);
225
226 if (!TREE_VISITED (ssa_name))
227 if (verify_imm_links (stderr, ssa_name))
228 err = true;
229
230 TREE_VISITED (ssa_name) = 1;
231
232 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
233 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
234 ; /* Default definitions have empty statements. Nothing to do. */
235 else if (!def_bb)
236 {
237 error ("missing definition");
238 err = true;
239 }
240 else if (bb != def_bb
241 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
242 {
243 error ("definition in block %i does not dominate use in block %i",
244 def_bb->index, bb->index);
245 err = true;
246 }
247 else if (bb == def_bb
248 && names_defined_in_bb != NULL
249 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
250 {
251 error ("definition in block %i follows the use", def_bb->index);
252 err = true;
253 }
254
255 if (check_abnormal
256 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
257 {
258 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
259 err = true;
260 }
261
262 /* Make sure the use is in an appropriate list by checking the previous
263 element to make sure it's the same. */
264 if (use_p->prev == NULL)
265 {
266 error ("no immediate_use list");
267 err = true;
268 }
269 else
270 {
271 tree listvar ;
272 if (use_p->prev->use == NULL)
273 listvar = use_p->prev->stmt;
274 else
275 listvar = USE_FROM_PTR (use_p->prev);
276 if (listvar != ssa_name)
277 {
278 error ("wrong immediate use list");
279 err = true;
280 }
281 }
282
283 if (err)
284 {
285 fprintf (stderr, "for SSA_NAME: ");
286 print_generic_expr (stderr, ssa_name, TDF_VOPS);
287 fprintf (stderr, " in statement:\n");
288 print_generic_stmt (stderr, stmt, TDF_VOPS);
289 }
290
291 return err;
292 }
293
294
295 /* Return true if any of the arguments for PHI node PHI at block BB is
296 malformed.
297
298 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
299 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
300 it means that the block in that array slot contains the
301 definition of SSA_NAME. */
302
303 static bool
304 verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
305 {
306 edge e;
307 bool err = false;
308 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
309
310 if (EDGE_COUNT (bb->preds) != phi_num_args)
311 {
312 error ("incoming edge count does not match number of PHI arguments");
313 err = true;
314 goto error;
315 }
316
317 for (i = 0; i < phi_num_args; i++)
318 {
319 use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
320 tree op = USE_FROM_PTR (op_p);
321
322 e = EDGE_PRED (bb, i);
323
324 if (op == NULL_TREE)
325 {
326 error ("PHI argument is missing for edge %d->%d",
327 e->src->index,
328 e->dest->index);
329 err = true;
330 goto error;
331 }
332
333 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
334 {
335 error ("PHI argument is not SSA_NAME, or invariant");
336 err = true;
337 }
338
339 if (TREE_CODE (op) == SSA_NAME)
340 {
341 err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
342 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
343 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
344 }
345
346 if (e->dest != bb)
347 {
348 error ("wrong edge %d->%d for PHI argument",
349 e->src->index, e->dest->index);
350 err = true;
351 }
352
353 if (err)
354 {
355 fprintf (stderr, "PHI argument\n");
356 print_generic_stmt (stderr, op, TDF_VOPS);
357 goto error;
358 }
359 }
360
361 error:
362 if (err)
363 {
364 fprintf (stderr, "for PHI node\n");
365 print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
366 }
367
368
369 return err;
370 }
371
372
373 static void
374 verify_flow_insensitive_alias_info (void)
375 {
376 tree var;
377 referenced_var_iterator rvi;
378
379 FOR_EACH_REFERENCED_VAR (var, rvi)
380 {
381 unsigned int j;
382 bitmap aliases;
383 tree alias;
384 bitmap_iterator bi;
385
386 if (!MTAG_P (var) || !MTAG_ALIASES (var))
387 continue;
388
389 aliases = MTAG_ALIASES (var);
390
391 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, j, bi)
392 {
393 alias = referenced_var (j);
394
395 if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
396 && !may_be_aliased (alias))
397 {
398 error ("non-addressable variable inside an alias set");
399 debug_variable (alias);
400 goto err;
401 }
402 }
403 }
404
405 return;
406
407 err:
408 debug_variable (var);
409 internal_error ("verify_flow_insensitive_alias_info failed");
410 }
411
412
413 static void
414 verify_flow_sensitive_alias_info (void)
415 {
416 size_t i;
417 tree ptr;
418
419 for (i = 1; i < num_ssa_names; i++)
420 {
421 tree var;
422 var_ann_t ann;
423 struct ptr_info_def *pi;
424
425
426 ptr = ssa_name (i);
427 if (!ptr)
428 continue;
429
430 /* We only care for pointers that are actually referenced in the
431 program. */
432 if (!POINTER_TYPE_P (TREE_TYPE (ptr)) || !TREE_VISITED (ptr))
433 continue;
434
435 /* RESULT_DECL is special. If it's a GIMPLE register, then it
436 is only written-to only once in the return statement.
437 Otherwise, aggregate RESULT_DECLs may be written-to more than
438 once in virtual operands. */
439 var = SSA_NAME_VAR (ptr);
440 if (TREE_CODE (var) == RESULT_DECL
441 && is_gimple_reg (ptr))
442 continue;
443
444 pi = SSA_NAME_PTR_INFO (ptr);
445 if (pi == NULL)
446 continue;
447
448 ann = var_ann (var);
449 if (pi->is_dereferenced && !pi->name_mem_tag && !ann->symbol_mem_tag)
450 {
451 error ("dereferenced pointers should have a name or a symbol tag");
452 goto err;
453 }
454
455 if (pi->name_mem_tag
456 && (pi->pt_vars == NULL || bitmap_empty_p (pi->pt_vars)))
457 {
458 error ("pointers with a memory tag, should have points-to sets");
459 goto err;
460 }
461
462 if (pi->value_escapes_p && pi->name_mem_tag)
463 {
464 tree t = memory_partition (pi->name_mem_tag);
465 if (t == NULL_TREE)
466 t = pi->name_mem_tag;
467
468 if (!is_call_clobbered (t))
469 {
470 error ("pointer escapes but its name tag is not call-clobbered");
471 goto err;
472 }
473 }
474 }
475
476 return;
477
478 err:
479 debug_variable (ptr);
480 internal_error ("verify_flow_sensitive_alias_info failed");
481 }
482
483
484 /* Verify the consistency of call clobbering information. */
485
486 static void
487 verify_call_clobbering (void)
488 {
489 unsigned int i;
490 bitmap_iterator bi;
491 tree var;
492 referenced_var_iterator rvi;
493
494 /* At all times, the result of the call_clobbered flag should
495 match the result of the call_clobbered_vars bitmap. Verify both
496 that everything in call_clobbered_vars is marked
497 call_clobbered, and that everything marked
498 call_clobbered is in call_clobbered_vars. */
499 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
500 {
501 var = referenced_var (i);
502
503 if (memory_partition (var))
504 var = memory_partition (var);
505
506 if (!MTAG_P (var) && !var_ann (var)->call_clobbered)
507 {
508 error ("variable in call_clobbered_vars but not marked "
509 "call_clobbered");
510 debug_variable (var);
511 goto err;
512 }
513 }
514
515 FOR_EACH_REFERENCED_VAR (var, rvi)
516 {
517 if (is_gimple_reg (var))
518 continue;
519
520 if (memory_partition (var))
521 var = memory_partition (var);
522
523 if (!MTAG_P (var)
524 && var_ann (var)->call_clobbered
525 && !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
526 {
527 error ("variable marked call_clobbered but not in "
528 "call_clobbered_vars bitmap.");
529 debug_variable (var);
530 goto err;
531 }
532 }
533
534 return;
535
536 err:
537 internal_error ("verify_call_clobbering failed");
538 }
539
540
541 /* Verify invariants in memory partitions. */
542
543 static void
544 verify_memory_partitions (void)
545 {
546 unsigned i;
547 tree mpt;
548 VEC(tree,heap) *mpt_table = gimple_ssa_operands (cfun)->mpt_table;
549 struct pointer_set_t *partitioned_syms = pointer_set_create ();
550
551 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
552 {
553 unsigned j;
554 bitmap_iterator bj;
555
556 if (MPT_SYMBOLS (mpt) == NULL)
557 {
558 error ("Memory partitions should have at least one symbol");
559 debug_variable (mpt);
560 goto err;
561 }
562
563 EXECUTE_IF_SET_IN_BITMAP (MPT_SYMBOLS (mpt), 0, j, bj)
564 {
565 tree var = referenced_var (j);
566 if (pointer_set_insert (partitioned_syms, var))
567 {
568 error ("Partitioned symbols should belong to exactly one "
569 "partition");
570 debug_variable (var);
571 goto err;
572 }
573 }
574 }
575
576 pointer_set_destroy (partitioned_syms);
577
578 return;
579
580 err:
581 internal_error ("verify_memory_partitions failed");
582 }
583
584
585 /* Verify the consistency of aliasing information. */
586
587 static void
588 verify_alias_info (void)
589 {
590 verify_flow_sensitive_alias_info ();
591 verify_call_clobbering ();
592 verify_flow_insensitive_alias_info ();
593 verify_memory_partitions ();
594 }
595
596
597 /* Verify common invariants in the SSA web.
598 TODO: verify the variable annotations. */
599
600 void
601 verify_ssa (bool check_modified_stmt)
602 {
603 size_t i;
604 basic_block bb;
605 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
606 ssa_op_iter iter;
607 tree op;
608 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
609 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
610
611 gcc_assert (!need_ssa_update_p ());
612
613 verify_stmts ();
614
615 timevar_push (TV_TREE_SSA_VERIFY);
616
617 /* Keep track of SSA names present in the IL. */
618 for (i = 1; i < num_ssa_names; i++)
619 {
620 tree name = ssa_name (i);
621 if (name)
622 {
623 tree stmt;
624 TREE_VISITED (name) = 0;
625
626 stmt = SSA_NAME_DEF_STMT (name);
627 if (!IS_EMPTY_STMT (stmt))
628 {
629 basic_block bb = bb_for_stmt (stmt);
630 verify_def (bb, definition_block,
631 name, stmt, !is_gimple_reg (name));
632
633 }
634 }
635 }
636
637 calculate_dominance_info (CDI_DOMINATORS);
638
639 /* Now verify all the uses and make sure they agree with the definitions
640 found in the previous pass. */
641 FOR_EACH_BB (bb)
642 {
643 edge e;
644 tree phi;
645 edge_iterator ei;
646 block_stmt_iterator bsi;
647
648 /* Make sure that all edges have a clear 'aux' field. */
649 FOR_EACH_EDGE (e, ei, bb->preds)
650 {
651 if (e->aux)
652 {
653 error ("AUX pointer initialized for edge %d->%d", e->src->index,
654 e->dest->index);
655 goto err;
656 }
657 }
658
659 /* Verify the arguments for every PHI node in the block. */
660 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
661 {
662 if (verify_phi_args (phi, bb, definition_block))
663 goto err;
664
665 bitmap_set_bit (names_defined_in_bb,
666 SSA_NAME_VERSION (PHI_RESULT (phi)));
667 }
668
669 /* Now verify all the uses and vuses in every statement of the block. */
670 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
671 {
672 tree stmt = bsi_stmt (bsi);
673 use_operand_p use_p;
674
675 if (check_modified_stmt && stmt_modified_p (stmt))
676 {
677 error ("stmt (%p) marked modified after optimization pass: ",
678 (void *)stmt);
679 print_generic_stmt (stderr, stmt, TDF_VOPS);
680 goto err;
681 }
682
683 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
684 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
685 {
686 tree lhs, base_address;
687
688 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
689 base_address = get_base_address (lhs);
690
691 if (base_address
692 && gimple_aliases_computed_p (cfun)
693 && SSA_VAR_P (base_address)
694 && !stmt_ann (stmt)->has_volatile_ops
695 && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
696 {
697 error ("statement makes a memory store, but has no VDEFS");
698 print_generic_stmt (stderr, stmt, TDF_VOPS);
699 goto err;
700 }
701 }
702
703 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
704 {
705 if (verify_ssa_name (op, true))
706 {
707 error ("in statement");
708 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
709 goto err;
710 }
711 }
712
713 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
714 {
715 if (verify_ssa_name (op, false))
716 {
717 error ("in statement");
718 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
719 goto err;
720 }
721 }
722
723 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
724 {
725 op = USE_FROM_PTR (use_p);
726 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
727 use_p, stmt, false, names_defined_in_bb))
728 goto err;
729 }
730
731 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
732 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
733 }
734
735 bitmap_clear (names_defined_in_bb);
736 }
737
738 /* Finally, verify alias information. */
739 if (gimple_aliases_computed_p (cfun))
740 verify_alias_info ();
741
742 free (definition_block);
743
744 /* Restore the dominance information to its prior known state, so
745 that we do not perturb the compiler's subsequent behavior. */
746 if (orig_dom_state == DOM_NONE)
747 free_dominance_info (CDI_DOMINATORS);
748 else
749 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
750
751 BITMAP_FREE (names_defined_in_bb);
752 timevar_pop (TV_TREE_SSA_VERIFY);
753 return;
754
755 err:
756 internal_error ("verify_ssa failed");
757 }
758
759 /* Return true if the uid in both int tree maps are equal. */
760
761 int
762 int_tree_map_eq (const void *va, const void *vb)
763 {
764 const struct int_tree_map *a = (const struct int_tree_map *) va;
765 const struct int_tree_map *b = (const struct int_tree_map *) vb;
766 return (a->uid == b->uid);
767 }
768
769 /* Hash a UID in a int_tree_map. */
770
771 unsigned int
772 int_tree_map_hash (const void *item)
773 {
774 return ((const struct int_tree_map *)item)->uid;
775 }
776
777 /* Return true if the DECL_UID in both trees are equal. */
778
779 int
780 uid_decl_map_eq (const void *va, const void *vb)
781 {
782 const_tree a = (const_tree) va;
783 const_tree b = (const_tree) vb;
784 return (a->decl_minimal.uid == b->decl_minimal.uid);
785 }
786
787 /* Hash a tree in a uid_decl_map. */
788
789 unsigned int
790 uid_decl_map_hash (const void *item)
791 {
792 return ((const_tree)item)->decl_minimal.uid;
793 }
794
795 /* Return true if the uid in both int tree maps are equal. */
796
797 static int
798 var_ann_eq (const void *va, const void *vb)
799 {
800 const struct static_var_ann_d *a = (const struct static_var_ann_d *) va;
801 const_tree const b = (const_tree) vb;
802 return (a->uid == DECL_UID (b));
803 }
804
805 /* Hash a UID in a int_tree_map. */
806
807 static unsigned int
808 var_ann_hash (const void *item)
809 {
810 return ((const struct static_var_ann_d *)item)->uid;
811 }
812
813 /* Return true if the DECL_UID in both trees are equal. */
814
815 static int
816 uid_ssaname_map_eq (const void *va, const void *vb)
817 {
818 const_tree a = (const_tree) va;
819 const_tree b = (const_tree) vb;
820 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
821 }
822
823 /* Hash a tree in a uid_decl_map. */
824
825 static unsigned int
826 uid_ssaname_map_hash (const void *item)
827 {
828 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
829 }
830
831
832 /* Initialize global DFA and SSA structures. */
833
834 void
835 init_tree_ssa (void)
836 {
837 cfun->gimple_df = GGC_CNEW (struct gimple_df);
838 cfun->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
839 uid_decl_map_eq, NULL);
840 cfun->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
841 uid_ssaname_map_eq, NULL);
842 cfun->gimple_df->var_anns = htab_create_ggc (20, var_ann_hash,
843 var_ann_eq, NULL);
844 cfun->gimple_df->call_clobbered_vars = BITMAP_GGC_ALLOC ();
845 cfun->gimple_df->addressable_vars = BITMAP_GGC_ALLOC ();
846 init_ssanames ();
847 init_phinodes ();
848 }
849
850
851 /* Deallocate memory associated with SSA data structures for FNDECL. */
852
853 void
854 delete_tree_ssa (void)
855 {
856 size_t i;
857 basic_block bb;
858 block_stmt_iterator bsi;
859 referenced_var_iterator rvi;
860 tree var;
861
862 /* Release any ssa_names still in use. */
863 for (i = 0; i < num_ssa_names; i++)
864 {
865 tree var = ssa_name (i);
866 if (var && TREE_CODE (var) == SSA_NAME)
867 {
868 SSA_NAME_IMM_USE_NODE (var).prev = &(SSA_NAME_IMM_USE_NODE (var));
869 SSA_NAME_IMM_USE_NODE (var).next = &(SSA_NAME_IMM_USE_NODE (var));
870 }
871 release_ssa_name (var);
872 }
873
874 /* Remove annotations from every tree in the function. */
875 FOR_EACH_BB (bb)
876 {
877 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
878 {
879 tree stmt = bsi_stmt (bsi);
880 stmt_ann_t ann = get_stmt_ann (stmt);
881
882 free_ssa_operands (&ann->operands);
883 ann->addresses_taken = 0;
884 mark_stmt_modified (stmt);
885 }
886 set_phi_nodes (bb, NULL);
887 }
888
889 /* Remove annotations from every referenced variable. */
890 FOR_EACH_REFERENCED_VAR (var, rvi)
891 {
892 if (var->base.ann)
893 ggc_free (var->base.ann);
894 var->base.ann = NULL;
895 }
896 htab_delete (gimple_referenced_vars (cfun));
897 cfun->gimple_df->referenced_vars = NULL;
898
899 fini_ssanames ();
900 fini_phinodes ();
901 /* we no longer maintain the SSA operand cache at this point. */
902 if (ssa_operands_active ())
903 fini_ssa_operands ();
904
905 cfun->gimple_df->global_var = NULL_TREE;
906
907 htab_delete (cfun->gimple_df->default_defs);
908 cfun->gimple_df->default_defs = NULL;
909 htab_delete (cfun->gimple_df->var_anns);
910 cfun->gimple_df->var_anns = NULL;
911 cfun->gimple_df->call_clobbered_vars = NULL;
912 cfun->gimple_df->addressable_vars = NULL;
913 cfun->gimple_df->modified_noreturn_calls = NULL;
914 if (gimple_aliases_computed_p (cfun))
915 {
916 delete_alias_heapvars ();
917 gcc_assert (!need_ssa_update_p ());
918 }
919 cfun->gimple_df->aliases_computed_p = false;
920 delete_mem_ref_stats (cfun);
921
922 cfun->gimple_df = NULL;
923 }
924
925 /* Helper function for useless_type_conversion_p. */
926
927 static bool
928 useless_type_conversion_p_1 (tree outer_type, tree inner_type)
929 {
930 /* Qualifiers on value types do not matter. */
931 inner_type = TYPE_MAIN_VARIANT (inner_type);
932 outer_type = TYPE_MAIN_VARIANT (outer_type);
933
934 if (inner_type == outer_type)
935 return true;
936
937 /* If we know the canonical types, compare them. */
938 if (TYPE_CANONICAL (inner_type)
939 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
940 return true;
941
942 /* Changes in machine mode are never useless conversions. */
943 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
944 return false;
945
946 /* If both the inner and outer types are integral types, then the
947 conversion is not necessary if they have the same mode and
948 signedness and precision, and both or neither are boolean. */
949 if (INTEGRAL_TYPE_P (inner_type)
950 && INTEGRAL_TYPE_P (outer_type))
951 {
952 /* Preserve changes in signedness or precision. */
953 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
954 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
955 return false;
956
957 /* Conversions from a non-base to a base type are not useless.
958 This way we preserve the invariant to do arithmetic in
959 base types only. */
960 if (TREE_TYPE (inner_type)
961 && TREE_TYPE (inner_type) != inner_type
962 && (TREE_TYPE (outer_type) == outer_type
963 || TREE_TYPE (outer_type) == NULL_TREE))
964 return false;
965
966 /* We don't need to preserve changes in the types minimum or
967 maximum value in general as these do not generate code
968 unless the types precisions are different. */
969
970 return true;
971 }
972
973 /* Scalar floating point types with the same mode are compatible. */
974 else if (SCALAR_FLOAT_TYPE_P (inner_type)
975 && SCALAR_FLOAT_TYPE_P (outer_type))
976 return true;
977
978 /* We need to take special care recursing to pointed-to types. */
979 else if (POINTER_TYPE_P (inner_type)
980 && POINTER_TYPE_P (outer_type))
981 {
982 /* Don't lose casts between pointers to volatile and non-volatile
983 qualified types. Doing so would result in changing the semantics
984 of later accesses. */
985 if ((TYPE_VOLATILE (TREE_TYPE (outer_type))
986 != TYPE_VOLATILE (TREE_TYPE (inner_type)))
987 && TYPE_VOLATILE (TREE_TYPE (outer_type)))
988 return false;
989
990 /* Do not lose casts between pointers with different
991 TYPE_REF_CAN_ALIAS_ALL setting or alias sets. */
992 if ((TYPE_REF_CAN_ALIAS_ALL (inner_type)
993 != TYPE_REF_CAN_ALIAS_ALL (outer_type))
994 || (get_alias_set (TREE_TYPE (inner_type))
995 != get_alias_set (TREE_TYPE (outer_type))))
996 return false;
997
998 /* Do not lose casts from const qualified to non-const
999 qualified. */
1000 if ((TYPE_READONLY (TREE_TYPE (outer_type))
1001 != TYPE_READONLY (TREE_TYPE (inner_type)))
1002 && TYPE_READONLY (TREE_TYPE (inner_type)))
1003 return false;
1004
1005 /* Do not lose casts to restrict qualified pointers. */
1006 if ((TYPE_RESTRICT (outer_type)
1007 != TYPE_RESTRICT (inner_type))
1008 && TYPE_RESTRICT (outer_type))
1009 return false;
1010
1011 /* Otherwise pointers/references are equivalent if their pointed
1012 to types are effectively the same. We can strip qualifiers
1013 on pointed-to types for further comparison, which is done in
1014 the callee. */
1015 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1016 TREE_TYPE (inner_type));
1017 }
1018
1019 /* Recurse for complex types. */
1020 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1021 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1022 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1023 TREE_TYPE (inner_type));
1024
1025 /* Recurse for vector types with the same number of subparts. */
1026 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1027 && TREE_CODE (outer_type) == VECTOR_TYPE
1028 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1029 return useless_type_conversion_p_1 (TREE_TYPE (outer_type),
1030 TREE_TYPE (inner_type));
1031
1032 /* For aggregates we may need to fall back to structural equality
1033 checks. */
1034 else if (AGGREGATE_TYPE_P (inner_type)
1035 && AGGREGATE_TYPE_P (outer_type))
1036 {
1037 /* Different types of aggregates are incompatible. */
1038 if (TREE_CODE (inner_type) != TREE_CODE (outer_type))
1039 return false;
1040
1041 /* ??? Add structural equivalence check. */
1042
1043 /* ??? This should eventually just return false. */
1044 return lang_hooks.types_compatible_p (inner_type, outer_type);
1045 }
1046
1047 return false;
1048 }
1049
1050 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1051 useless type conversion, otherwise return false.
1052
1053 This function implicitly defines the middle-end type system. With
1054 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1055 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1056 the following invariants shall be fulfilled:
1057
1058 1) useless_type_conversion_p is transitive.
1059 If a < b and b < c then a < c.
1060
1061 2) useless_type_conversion_p is not symmetric.
1062 From a < b does not follow a > b.
1063
1064 3) Types define the available set of operations applicable to values.
1065 A type conversion is useless if the operations for the target type
1066 is a subset of the operations for the source type. For example
1067 casts to void* are useless, casts from void* are not (void* can't
1068 be dereferenced or offsetted, but copied, hence its set of operations
1069 is a strict subset of that of all other data pointer types). Casts
1070 to const T* are useless (can't be written to), casts from const T*
1071 to T* are not. */
1072
1073 bool
1074 useless_type_conversion_p (tree outer_type, tree inner_type)
1075 {
1076 /* If the outer type is (void *), then the conversion is not
1077 necessary. We have to make sure to not apply this while
1078 recursing though. */
1079 if (POINTER_TYPE_P (inner_type)
1080 && POINTER_TYPE_P (outer_type)
1081 && TREE_CODE (TREE_TYPE (outer_type)) == VOID_TYPE)
1082 return true;
1083
1084 return useless_type_conversion_p_1 (outer_type, inner_type);
1085 }
1086
1087 /* Return true if a conversion from either type of TYPE1 and TYPE2
1088 to the other is not required. Otherwise return false. */
1089
1090 bool
1091 types_compatible_p (tree type1, tree type2)
1092 {
1093 return (type1 == type2
1094 || (useless_type_conversion_p (type1, type2)
1095 && useless_type_conversion_p (type2, type1)));
1096 }
1097
1098 /* Return true if EXPR is a useless type conversion, otherwise return
1099 false. */
1100
1101 bool
1102 tree_ssa_useless_type_conversion (tree expr)
1103 {
1104 /* If we have an assignment that merely uses a NOP_EXPR to change
1105 the top of the RHS to the type of the LHS and the type conversion
1106 is "safe", then strip away the type conversion so that we can
1107 enter LHS = RHS into the const_and_copies table. */
1108 if (TREE_CODE (expr) == NOP_EXPR || TREE_CODE (expr) == CONVERT_EXPR
1109 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1110 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1111 /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
1112 around known bugs with GIMPLE_MODIFY_STMTs appearing in places
1113 they shouldn't. See PR 30391. */
1114 return useless_type_conversion_p
1115 (TREE_TYPE (expr),
1116 GENERIC_TREE_TYPE (TREE_OPERAND (expr, 0)));
1117
1118 return false;
1119 }
1120
1121
1122 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1123 described in walk_use_def_chains.
1124
1125 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1126 infinite loops. We used to have a bitmap for this to just mark
1127 SSA versions we had visited. But non-sparse bitmaps are way too
1128 expensive, while sparse bitmaps may cause quadratic behavior.
1129
1130 IS_DFS is true if the caller wants to perform a depth-first search
1131 when visiting PHI nodes. A DFS will visit each PHI argument and
1132 call FN after each one. Otherwise, all the arguments are
1133 visited first and then FN is called with each of the visited
1134 arguments in a separate pass. */
1135
1136 static bool
1137 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1138 struct pointer_set_t *visited, bool is_dfs)
1139 {
1140 tree def_stmt;
1141
1142 if (pointer_set_insert (visited, var))
1143 return false;
1144
1145 def_stmt = SSA_NAME_DEF_STMT (var);
1146
1147 if (TREE_CODE (def_stmt) != PHI_NODE)
1148 {
1149 /* If we reached the end of the use-def chain, call FN. */
1150 return fn (var, def_stmt, data);
1151 }
1152 else
1153 {
1154 int i;
1155
1156 /* When doing a breadth-first search, call FN before following the
1157 use-def links for each argument. */
1158 if (!is_dfs)
1159 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1160 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1161 return true;
1162
1163 /* Follow use-def links out of each PHI argument. */
1164 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1165 {
1166 tree arg = PHI_ARG_DEF (def_stmt, i);
1167
1168 /* ARG may be NULL for newly introduced PHI nodes. */
1169 if (arg
1170 && TREE_CODE (arg) == SSA_NAME
1171 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1172 return true;
1173 }
1174
1175 /* When doing a depth-first search, call FN after following the
1176 use-def links for each argument. */
1177 if (is_dfs)
1178 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
1179 if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
1180 return true;
1181 }
1182
1183 return false;
1184 }
1185
1186
1187
1188 /* Walk use-def chains starting at the SSA variable VAR. Call
1189 function FN at each reaching definition found. FN takes three
1190 arguments: VAR, its defining statement (DEF_STMT) and a generic
1191 pointer to whatever state information that FN may want to maintain
1192 (DATA). FN is able to stop the walk by returning true, otherwise
1193 in order to continue the walk, FN should return false.
1194
1195 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1196 different. The first argument to FN is no longer the original
1197 variable VAR, but the PHI argument currently being examined. If FN
1198 wants to get at VAR, it should call PHI_RESULT (PHI).
1199
1200 If IS_DFS is true, this function will:
1201
1202 1- walk the use-def chains for all the PHI arguments, and,
1203 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1204
1205 If IS_DFS is false, the two steps above are done in reverse order
1206 (i.e., a breadth-first search). */
1207
1208 void
1209 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1210 bool is_dfs)
1211 {
1212 tree def_stmt;
1213
1214 gcc_assert (TREE_CODE (var) == SSA_NAME);
1215
1216 def_stmt = SSA_NAME_DEF_STMT (var);
1217
1218 /* We only need to recurse if the reaching definition comes from a PHI
1219 node. */
1220 if (TREE_CODE (def_stmt) != PHI_NODE)
1221 (*fn) (var, def_stmt, data);
1222 else
1223 {
1224 struct pointer_set_t *visited = pointer_set_create ();
1225 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1226 pointer_set_destroy (visited);
1227 }
1228 }
1229
1230 \f
1231 /* Return true if T, an SSA_NAME, has an undefined value. */
1232
1233 bool
1234 ssa_undefined_value_p (tree t)
1235 {
1236 tree var = SSA_NAME_VAR (t);
1237
1238 /* Parameters get their initial value from the function entry. */
1239 if (TREE_CODE (var) == PARM_DECL)
1240 return false;
1241
1242 /* Hard register variables get their initial value from the ether. */
1243 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1244 return false;
1245
1246 /* The value is undefined iff its definition statement is empty. */
1247 return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t));
1248 }
1249
1250 /* Emit warnings for uninitialized variables. This is done in two passes.
1251
1252 The first pass notices real uses of SSA names with undefined values.
1253 Such uses are unconditionally uninitialized, and we can be certain that
1254 such a use is a mistake. This pass is run before most optimizations,
1255 so that we catch as many as we can.
1256
1257 The second pass follows PHI nodes to find uses that are potentially
1258 uninitialized. In this case we can't necessarily prove that the use
1259 is really uninitialized. This pass is run after most optimizations,
1260 so that we thread as many jumps and possible, and delete as much dead
1261 code as possible, in order to reduce false positives. We also look
1262 again for plain uninitialized variables, since optimization may have
1263 changed conditionally uninitialized to unconditionally uninitialized. */
1264
1265 /* Emit a warning for T, an SSA_NAME, being uninitialized. The exact
1266 warning text is in MSGID and LOCUS may contain a location or be null. */
1267
1268 static void
1269 warn_uninit (tree t, const char *gmsgid, void *data)
1270 {
1271 tree var = SSA_NAME_VAR (t);
1272 tree context = (tree) data;
1273 location_t *locus;
1274 expanded_location xloc, floc;
1275
1276 if (!ssa_undefined_value_p (t))
1277 return;
1278
1279 /* TREE_NO_WARNING either means we already warned, or the front end
1280 wishes to suppress the warning. */
1281 if (TREE_NO_WARNING (var))
1282 return;
1283
1284 locus = (context != NULL && EXPR_HAS_LOCATION (context)
1285 ? EXPR_LOCUS (context)
1286 : &DECL_SOURCE_LOCATION (var));
1287 warning (OPT_Wuninitialized, gmsgid, locus, var);
1288 xloc = expand_location (*locus);
1289 floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
1290 if (xloc.file != floc.file
1291 || xloc.line < floc.line
1292 || xloc.line > LOCATION_LINE (cfun->function_end_locus))
1293 inform ("%J%qD was declared here", var, var);
1294
1295 TREE_NO_WARNING (var) = 1;
1296 }
1297
1298 /* Called via walk_tree, look for SSA_NAMEs that have empty definitions
1299 and warn about them. */
1300
1301 static tree
1302 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data)
1303 {
1304 tree t = *tp;
1305
1306 switch (TREE_CODE (t))
1307 {
1308 case SSA_NAME:
1309 /* We only do data flow with SSA_NAMEs, so that's all we
1310 can warn about. */
1311 warn_uninit (t, "%H%qD is used uninitialized in this function", data);
1312 *walk_subtrees = 0;
1313 break;
1314
1315 case REALPART_EXPR:
1316 case IMAGPART_EXPR:
1317 /* The total store transformation performed during gimplification
1318 creates uninitialized variable uses. If all is well, these will
1319 be optimized away, so don't warn now. */
1320 if (TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
1321 *walk_subtrees = 0;
1322 break;
1323
1324 default:
1325 if (IS_TYPE_OR_DECL_P (t))
1326 *walk_subtrees = 0;
1327 break;
1328 }
1329
1330 return NULL_TREE;
1331 }
1332
1333 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1334 and warn about them. */
1335
1336 static void
1337 warn_uninitialized_phi (tree phi)
1338 {
1339 int i, n = PHI_NUM_ARGS (phi);
1340
1341 /* Don't look at memory tags. */
1342 if (!is_gimple_reg (PHI_RESULT (phi)))
1343 return;
1344
1345 for (i = 0; i < n; ++i)
1346 {
1347 tree op = PHI_ARG_DEF (phi, i);
1348 if (TREE_CODE (op) == SSA_NAME)
1349 warn_uninit (op, "%H%qD may be used uninitialized in this function",
1350 NULL);
1351 }
1352 }
1353
1354 static unsigned int
1355 execute_early_warn_uninitialized (void)
1356 {
1357 block_stmt_iterator bsi;
1358 basic_block bb;
1359
1360 FOR_EACH_BB (bb)
1361 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1362 {
1363 tree context = bsi_stmt (bsi);
1364 walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
1365 context, NULL);
1366 }
1367 return 0;
1368 }
1369
1370 static unsigned int
1371 execute_late_warn_uninitialized (void)
1372 {
1373 basic_block bb;
1374 tree phi;
1375
1376 /* Re-do the plain uninitialized variable check, as optimization may have
1377 straightened control flow. Do this first so that we don't accidentally
1378 get a "may be" warning when we'd have seen an "is" warning later. */
1379 execute_early_warn_uninitialized ();
1380
1381 FOR_EACH_BB (bb)
1382 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1383 warn_uninitialized_phi (phi);
1384 return 0;
1385 }
1386
1387 static bool
1388 gate_warn_uninitialized (void)
1389 {
1390 return warn_uninitialized != 0;
1391 }
1392
1393 struct tree_opt_pass pass_early_warn_uninitialized =
1394 {
1395 NULL, /* name */
1396 gate_warn_uninitialized, /* gate */
1397 execute_early_warn_uninitialized, /* execute */
1398 NULL, /* sub */
1399 NULL, /* next */
1400 0, /* static_pass_number */
1401 0, /* tv_id */
1402 PROP_ssa, /* properties_required */
1403 0, /* properties_provided */
1404 0, /* properties_destroyed */
1405 0, /* todo_flags_start */
1406 0, /* todo_flags_finish */
1407 0 /* letter */
1408 };
1409
1410 struct tree_opt_pass pass_late_warn_uninitialized =
1411 {
1412 NULL, /* name */
1413 gate_warn_uninitialized, /* gate */
1414 execute_late_warn_uninitialized, /* execute */
1415 NULL, /* sub */
1416 NULL, /* next */
1417 0, /* static_pass_number */
1418 0, /* tv_id */
1419 PROP_ssa, /* properties_required */
1420 0, /* properties_provided */
1421 0, /* properties_destroyed */
1422 0, /* todo_flags_start */
1423 0, /* todo_flags_finish */
1424 0 /* letter */
1425 };
1426
1427 /* Compute TREE_ADDRESSABLE for local variables. */
1428
1429 static unsigned int
1430 execute_update_addresses_taken (void)
1431 {
1432 tree var;
1433 referenced_var_iterator rvi;
1434 block_stmt_iterator bsi;
1435 basic_block bb;
1436 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1437 bitmap vars_updated = BITMAP_ALLOC (NULL);
1438 bool update_vops = false;
1439 tree phi;
1440
1441 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1442 the function body. */
1443 FOR_EACH_BB (bb)
1444 {
1445 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1446 {
1447 stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
1448
1449 if (s_ann->addresses_taken)
1450 bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
1451 }
1452 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1453 {
1454 unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
1455 for (i = 0; i < phi_num_args; i++)
1456 {
1457 tree op = PHI_ARG_DEF (phi, i), var;
1458 if (TREE_CODE (op) == ADDR_EXPR
1459 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
1460 && DECL_P (var))
1461 bitmap_set_bit (addresses_taken, DECL_UID (var));
1462 }
1463 }
1464 }
1465
1466 /* When possible, clear ADDRESSABLE bit and mark variable for conversion into
1467 SSA. */
1468 FOR_EACH_REFERENCED_VAR (var, rvi)
1469 if (!is_global_var (var)
1470 && TREE_CODE (var) != RESULT_DECL
1471 && TREE_ADDRESSABLE (var)
1472 && !bitmap_bit_p (addresses_taken, DECL_UID (var)))
1473 {
1474 TREE_ADDRESSABLE (var) = 0;
1475 if (is_gimple_reg (var))
1476 mark_sym_for_renaming (var);
1477 update_vops = true;
1478 bitmap_set_bit (vars_updated, DECL_UID (var));
1479 if (dump_file)
1480 {
1481 fprintf (dump_file, "No longer having address taken ");
1482 print_generic_expr (dump_file, var, 0);
1483 fprintf (dump_file, "\n");
1484 }
1485 }
1486
1487 /* Operand caches needs to be recomputed for operands referencing the updated
1488 variables. */
1489 if (update_vops)
1490 FOR_EACH_BB (bb)
1491 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1492 {
1493 tree stmt = bsi_stmt (bsi);
1494
1495 if ((LOADED_SYMS (stmt)
1496 && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
1497 || (STORED_SYMS (stmt)
1498 && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
1499 update_stmt (stmt);
1500 }
1501 BITMAP_FREE (addresses_taken);
1502 BITMAP_FREE (vars_updated);
1503 return 0;
1504 }
1505
1506 struct tree_opt_pass pass_update_address_taken =
1507 {
1508 "addressables", /* name */
1509 NULL, /* gate */
1510 execute_update_addresses_taken, /* execute */
1511 NULL, /* sub */
1512 NULL, /* next */
1513 0, /* static_pass_number */
1514 0, /* tv_id */
1515 PROP_ssa, /* properties_required */
1516 0, /* properties_provided */
1517 0, /* properties_destroyed */
1518 0, /* todo_flags_start */
1519 TODO_update_ssa, /* todo_flags_finish */
1520 0 /* letter */
1521 };