36aaffdecc4cf0b3eddde957615415dbe21c558b
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "tm_p.h"
28 #include "target.h"
29 #include "ggc.h"
30 #include "langhooks.h"
31 #include "basic-block.h"
32 #include "function.h"
33 #include "gimple-pretty-print.h"
34 #include "bitmap.h"
35 #include "pointer-set.h"
36 #include "tree-flow.h"
37 #include "gimple.h"
38 #include "tree-inline.h"
39 #include "hashtab.h"
40 #include "tree-pass.h"
41 #include "diagnostic-core.h"
42 #include "cfgloop.h"
43
44 /* Pointer map of variable mappings, keyed by edge. */
45 static struct pointer_map_t *edge_var_maps;
46
47
48 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
49
50 void
51 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
52 {
53 void **slot;
54 edge_var_map_vector old_head, head;
55 edge_var_map new_node;
56
57 if (edge_var_maps == NULL)
58 edge_var_maps = pointer_map_create ();
59
60 slot = pointer_map_insert (edge_var_maps, e);
61 old_head = head = (edge_var_map_vector) *slot;
62 if (!head)
63 {
64 head = VEC_alloc (edge_var_map, heap, 5);
65 *slot = head;
66 }
67 new_node.def = def;
68 new_node.result = result;
69 new_node.locus = locus;
70
71 VEC_safe_push (edge_var_map, heap, head, &new_node);
72 if (old_head != head)
73 {
74 /* The push did some reallocation. Update the pointer map. */
75 *slot = head;
76 }
77 }
78
79
80 /* Clear the var mappings in edge E. */
81
82 void
83 redirect_edge_var_map_clear (edge e)
84 {
85 void **slot;
86 edge_var_map_vector head;
87
88 if (!edge_var_maps)
89 return;
90
91 slot = pointer_map_contains (edge_var_maps, e);
92
93 if (slot)
94 {
95 head = (edge_var_map_vector) *slot;
96 VEC_free (edge_var_map, heap, head);
97 *slot = NULL;
98 }
99 }
100
101
102 /* Duplicate the redirected var mappings in OLDE in NEWE.
103
104 Since we can't remove a mapping, let's just duplicate it. This assumes a
105 pointer_map can have multiple edges mapping to the same var_map (many to
106 one mapping), since we don't remove the previous mappings. */
107
108 void
109 redirect_edge_var_map_dup (edge newe, edge olde)
110 {
111 void **new_slot, **old_slot;
112 edge_var_map_vector head;
113
114 if (!edge_var_maps)
115 return;
116
117 new_slot = pointer_map_insert (edge_var_maps, newe);
118 old_slot = pointer_map_contains (edge_var_maps, olde);
119 if (!old_slot)
120 return;
121 head = (edge_var_map_vector) *old_slot;
122
123 if (head)
124 *new_slot = VEC_copy (edge_var_map, heap, head);
125 else
126 *new_slot = VEC_alloc (edge_var_map, heap, 5);
127 }
128
129
130 /* Return the variable mappings for a given edge. If there is none, return
131 NULL. */
132
133 edge_var_map_vector
134 redirect_edge_var_map_vector (edge e)
135 {
136 void **slot;
137
138 /* Hey, what kind of idiot would... you'd be surprised. */
139 if (!edge_var_maps)
140 return NULL;
141
142 slot = pointer_map_contains (edge_var_maps, e);
143 if (!slot)
144 return NULL;
145
146 return (edge_var_map_vector) *slot;
147 }
148
149 /* Used by redirect_edge_var_map_destroy to free all memory. */
150
151 static bool
152 free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
153 void **value,
154 void *data ATTRIBUTE_UNUSED)
155 {
156 edge_var_map_vector head = (edge_var_map_vector) *value;
157 VEC_free (edge_var_map, heap, head);
158 return true;
159 }
160
161 /* Clear the edge variable mappings. */
162
163 void
164 redirect_edge_var_map_destroy (void)
165 {
166 if (edge_var_maps)
167 {
168 pointer_map_traverse (edge_var_maps, free_var_map_entry, NULL);
169 pointer_map_destroy (edge_var_maps);
170 edge_var_maps = NULL;
171 }
172 }
173
174
175 /* Remove the corresponding arguments from the PHI nodes in E's
176 destination block and redirect it to DEST. Return redirected edge.
177 The list of removed arguments is stored in a vector accessed
178 through edge_var_maps. */
179
180 edge
181 ssa_redirect_edge (edge e, basic_block dest)
182 {
183 gimple_stmt_iterator gsi;
184 gimple phi;
185
186 redirect_edge_var_map_clear (e);
187
188 /* Remove the appropriate PHI arguments in E's destination block. */
189 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
190 {
191 tree def;
192 source_location locus ;
193
194 phi = gsi_stmt (gsi);
195 def = gimple_phi_arg_def (phi, e->dest_idx);
196 locus = gimple_phi_arg_location (phi, e->dest_idx);
197
198 if (def == NULL_TREE)
199 continue;
200
201 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
202 }
203
204 e = redirect_edge_succ_nodup (e, dest);
205
206 return e;
207 }
208
209
210 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
211 E->dest. */
212
213 void
214 flush_pending_stmts (edge e)
215 {
216 gimple phi;
217 edge_var_map_vector v;
218 edge_var_map *vm;
219 int i;
220 gimple_stmt_iterator gsi;
221
222 v = redirect_edge_var_map_vector (e);
223 if (!v)
224 return;
225
226 for (gsi = gsi_start_phis (e->dest), i = 0;
227 !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
228 gsi_next (&gsi), i++)
229 {
230 tree def;
231
232 phi = gsi_stmt (gsi);
233 def = redirect_edge_var_map_def (vm);
234 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
235 }
236
237 redirect_edge_var_map_clear (e);
238 }
239
240 /* Given a tree for an expression for which we might want to emit
241 locations or values in debug information (generally a variable, but
242 we might deal with other kinds of trees in the future), return the
243 tree that should be used as the variable of a DEBUG_BIND STMT or
244 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
245
246 tree
247 target_for_debug_bind (tree var)
248 {
249 if (!MAY_HAVE_DEBUG_STMTS)
250 return NULL_TREE;
251
252 if (TREE_CODE (var) != VAR_DECL
253 && TREE_CODE (var) != PARM_DECL)
254 return NULL_TREE;
255
256 if (DECL_HAS_VALUE_EXPR_P (var))
257 return target_for_debug_bind (DECL_VALUE_EXPR (var));
258
259 if (DECL_IGNORED_P (var))
260 return NULL_TREE;
261
262 if (!is_gimple_reg (var))
263 {
264 if (is_gimple_reg_type (TREE_TYPE (var))
265 && referenced_var_lookup (cfun, DECL_UID (var)) == NULL_TREE)
266 return var;
267 return NULL_TREE;
268 }
269
270 return var;
271 }
272
273 /* Called via walk_tree, look for SSA_NAMEs that have already been
274 released. */
275
276 static tree
277 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
278 {
279 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
280
281 if (wi && wi->is_lhs)
282 return NULL_TREE;
283
284 if (TREE_CODE (*tp) == SSA_NAME)
285 {
286 if (SSA_NAME_IN_FREE_LIST (*tp))
287 return *tp;
288
289 *walk_subtrees = 0;
290 }
291 else if (IS_TYPE_OR_DECL_P (*tp))
292 *walk_subtrees = 0;
293
294 return NULL_TREE;
295 }
296
297 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
298 by other DEBUG stmts, and replace uses of the DEF with the
299 newly-created debug temp. */
300
301 void
302 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
303 {
304 imm_use_iterator imm_iter;
305 use_operand_p use_p;
306 gimple stmt;
307 gimple def_stmt = NULL;
308 int usecount = 0;
309 tree value = NULL;
310
311 if (!MAY_HAVE_DEBUG_STMTS)
312 return;
313
314 /* If this name has already been registered for replacement, do nothing
315 as anything that uses this name isn't in SSA form. */
316 if (name_registered_for_update_p (var))
317 return;
318
319 /* Check whether there are debug stmts that reference this variable and,
320 if there are, decide whether we should use a debug temp. */
321 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
322 {
323 stmt = USE_STMT (use_p);
324
325 if (!gimple_debug_bind_p (stmt))
326 continue;
327
328 if (usecount++)
329 break;
330
331 if (gimple_debug_bind_get_value (stmt) != var)
332 {
333 /* Count this as an additional use, so as to make sure we
334 use a temp unless VAR's definition has a SINGLE_RHS that
335 can be shared. */
336 usecount++;
337 break;
338 }
339 }
340
341 if (!usecount)
342 return;
343
344 if (gsi)
345 def_stmt = gsi_stmt (*gsi);
346 else
347 def_stmt = SSA_NAME_DEF_STMT (var);
348
349 /* If we didn't get an insertion point, and the stmt has already
350 been removed, we won't be able to insert the debug bind stmt, so
351 we'll have to drop debug information. */
352 if (gimple_code (def_stmt) == GIMPLE_PHI)
353 {
354 value = degenerate_phi_result (def_stmt);
355 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
356 value = NULL;
357 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
358 to. */
359 else if (value == error_mark_node)
360 value = NULL;
361 }
362 else if (is_gimple_assign (def_stmt))
363 {
364 bool no_value = false;
365
366 if (!dom_info_available_p (CDI_DOMINATORS))
367 {
368 struct walk_stmt_info wi;
369
370 memset (&wi, 0, sizeof (wi));
371
372 /* When removing blocks without following reverse dominance
373 order, we may sometimes encounter SSA_NAMEs that have
374 already been released, referenced in other SSA_DEFs that
375 we're about to release. Consider:
376
377 <bb X>:
378 v_1 = foo;
379
380 <bb Y>:
381 w_2 = v_1 + bar;
382 # DEBUG w => w_2
383
384 If we deleted BB X first, propagating the value of w_2
385 won't do us any good. It's too late to recover their
386 original definition of v_1: when it was deleted, it was
387 only referenced in other DEFs, it couldn't possibly know
388 it should have been retained, and propagating every
389 single DEF just in case it might have to be propagated
390 into a DEBUG STMT would probably be too wasteful.
391
392 When dominator information is not readily available, we
393 check for and accept some loss of debug information. But
394 if it is available, there's no excuse for us to remove
395 blocks in the wrong order, so we don't even check for
396 dead SSA NAMEs. SSA verification shall catch any
397 errors. */
398 if ((!gsi && !gimple_bb (def_stmt))
399 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
400 no_value = true;
401 }
402
403 if (!no_value)
404 value = gimple_assign_rhs_to_tree (def_stmt);
405 }
406
407 if (value)
408 {
409 /* If there's a single use of VAR, and VAR is the entire debug
410 expression (usecount would have been incremented again
411 otherwise), and the definition involves only constants and
412 SSA names, then we can propagate VALUE into this single use,
413 avoiding the temp.
414
415 We can also avoid using a temp if VALUE can be shared and
416 propagated into all uses, without generating expressions that
417 wouldn't be valid gimple RHSs.
418
419 Other cases that would require unsharing or non-gimple RHSs
420 are deferred to a debug temp, although we could avoid temps
421 at the expense of duplication of expressions. */
422
423 if (CONSTANT_CLASS_P (value)
424 || gimple_code (def_stmt) == GIMPLE_PHI
425 || (usecount == 1
426 && (!gimple_assign_single_p (def_stmt)
427 || is_gimple_min_invariant (value)))
428 || is_gimple_reg (value))
429 value = unshare_expr (value);
430 else
431 {
432 gimple def_temp;
433 tree vexpr = make_node (DEBUG_EXPR_DECL);
434
435 def_temp = gimple_build_debug_bind (vexpr,
436 unshare_expr (value),
437 def_stmt);
438
439 DECL_ARTIFICIAL (vexpr) = 1;
440 TREE_TYPE (vexpr) = TREE_TYPE (value);
441 if (DECL_P (value))
442 DECL_MODE (vexpr) = DECL_MODE (value);
443 else
444 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
445
446 if (gsi)
447 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
448 else
449 {
450 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
451 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
452 }
453
454 value = vexpr;
455 }
456 }
457
458 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
459 {
460 if (!gimple_debug_bind_p (stmt))
461 continue;
462
463 if (value)
464 {
465 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
466 /* unshare_expr is not needed here. vexpr is either a
467 SINGLE_RHS, that can be safely shared, some other RHS
468 that was unshared when we found it had a single debug
469 use, or a DEBUG_EXPR_DECL, that can be safely
470 shared. */
471 SET_USE (use_p, value);
472 /* If we didn't replace uses with a debug decl fold the
473 resulting expression. Otherwise we end up with invalid IL. */
474 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
475 {
476 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
477 fold_stmt_inplace (&gsi);
478 }
479 }
480 else
481 gimple_debug_bind_reset_value (stmt);
482
483 update_stmt (stmt);
484 }
485 }
486
487
488 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
489 other DEBUG stmts, and replace uses of the DEF with the
490 newly-created debug temp. */
491
492 void
493 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
494 {
495 gimple stmt;
496 ssa_op_iter op_iter;
497 def_operand_p def_p;
498
499 if (!MAY_HAVE_DEBUG_STMTS)
500 return;
501
502 stmt = gsi_stmt (*gsi);
503
504 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
505 {
506 tree var = DEF_FROM_PTR (def_p);
507
508 if (TREE_CODE (var) != SSA_NAME)
509 continue;
510
511 insert_debug_temp_for_var_def (gsi, var);
512 }
513 }
514
515 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
516
517 void
518 reset_debug_uses (gimple stmt)
519 {
520 ssa_op_iter op_iter;
521 def_operand_p def_p;
522 imm_use_iterator imm_iter;
523 gimple use_stmt;
524
525 if (!MAY_HAVE_DEBUG_STMTS)
526 return;
527
528 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
529 {
530 tree var = DEF_FROM_PTR (def_p);
531
532 if (TREE_CODE (var) != SSA_NAME)
533 continue;
534
535 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
536 {
537 if (!gimple_debug_bind_p (use_stmt))
538 continue;
539
540 gimple_debug_bind_reset_value (use_stmt);
541 update_stmt (use_stmt);
542 }
543 }
544 }
545
546 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
547 dominated stmts before their dominators, so that release_ssa_defs
548 stands a chance of propagating DEFs into debug bind stmts. */
549
550 void
551 release_defs_bitset (bitmap toremove)
552 {
553 unsigned j;
554 bitmap_iterator bi;
555
556 /* Performing a topological sort is probably overkill, this will
557 most likely run in slightly superlinear time, rather than the
558 pathological quadratic worst case. */
559 while (!bitmap_empty_p (toremove))
560 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
561 {
562 bool remove_now = true;
563 tree var = ssa_name (j);
564 gimple stmt;
565 imm_use_iterator uit;
566
567 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
568 {
569 ssa_op_iter dit;
570 def_operand_p def_p;
571
572 /* We can't propagate PHI nodes into debug stmts. */
573 if (gimple_code (stmt) == GIMPLE_PHI
574 || is_gimple_debug (stmt))
575 continue;
576
577 /* If we find another definition to remove that uses
578 the one we're looking at, defer the removal of this
579 one, so that it can be propagated into debug stmts
580 after the other is. */
581 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
582 {
583 tree odef = DEF_FROM_PTR (def_p);
584
585 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
586 {
587 remove_now = false;
588 break;
589 }
590 }
591
592 if (!remove_now)
593 BREAK_FROM_IMM_USE_STMT (uit);
594 }
595
596 if (remove_now)
597 {
598 gimple def = SSA_NAME_DEF_STMT (var);
599 gimple_stmt_iterator gsi = gsi_for_stmt (def);
600
601 if (gimple_code (def) == GIMPLE_PHI)
602 remove_phi_node (&gsi, true);
603 else
604 {
605 gsi_remove (&gsi, true);
606 release_defs (def);
607 }
608
609 bitmap_clear_bit (toremove, j);
610 }
611 }
612 }
613
614 /* Return true if SSA_NAME is malformed and mark it visited.
615
616 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
617 operand. */
618
619 static bool
620 verify_ssa_name (tree ssa_name, bool is_virtual)
621 {
622 if (TREE_CODE (ssa_name) != SSA_NAME)
623 {
624 error ("expected an SSA_NAME object");
625 return true;
626 }
627
628 if (TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
629 {
630 error ("type mismatch between an SSA_NAME and its symbol");
631 return true;
632 }
633
634 if (SSA_NAME_IN_FREE_LIST (ssa_name))
635 {
636 error ("found an SSA_NAME that had been released into the free pool");
637 return true;
638 }
639
640 if (is_virtual && is_gimple_reg (ssa_name))
641 {
642 error ("found a virtual definition for a GIMPLE register");
643 return true;
644 }
645
646 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
647 {
648 error ("virtual SSA name for non-VOP decl");
649 return true;
650 }
651
652 if (!is_virtual && !is_gimple_reg (ssa_name))
653 {
654 error ("found a real definition for a non-register");
655 return true;
656 }
657
658 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
659 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
660 {
661 error ("found a default name with a non-empty defining statement");
662 return true;
663 }
664
665 return false;
666 }
667
668
669 /* Return true if the definition of SSA_NAME at block BB is malformed.
670
671 STMT is the statement where SSA_NAME is created.
672
673 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
674 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
675 it means that the block in that array slot contains the
676 definition of SSA_NAME.
677
678 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
679
680 static bool
681 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
682 gimple stmt, bool is_virtual)
683 {
684 if (verify_ssa_name (ssa_name, is_virtual))
685 goto err;
686
687 if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
688 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
689 {
690 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
691 goto err;
692 }
693
694 if (definition_block[SSA_NAME_VERSION (ssa_name)])
695 {
696 error ("SSA_NAME created in two different blocks %i and %i",
697 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
698 goto err;
699 }
700
701 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
702
703 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
704 {
705 error ("SSA_NAME_DEF_STMT is wrong");
706 fprintf (stderr, "Expected definition statement:\n");
707 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
708 fprintf (stderr, "\nActual definition statement:\n");
709 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
710 goto err;
711 }
712
713 return false;
714
715 err:
716 fprintf (stderr, "while verifying SSA_NAME ");
717 print_generic_expr (stderr, ssa_name, 0);
718 fprintf (stderr, " in statement\n");
719 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
720
721 return true;
722 }
723
724
725 /* Return true if the use of SSA_NAME at statement STMT in block BB is
726 malformed.
727
728 DEF_BB is the block where SSA_NAME was found to be created.
729
730 IDOM contains immediate dominator information for the flowgraph.
731
732 CHECK_ABNORMAL is true if the caller wants to check whether this use
733 is flowing through an abnormal edge (only used when checking PHI
734 arguments).
735
736 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
737 that are defined before STMT in basic block BB. */
738
739 static bool
740 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
741 gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
742 {
743 bool err = false;
744 tree ssa_name = USE_FROM_PTR (use_p);
745
746 if (!TREE_VISITED (ssa_name))
747 if (verify_imm_links (stderr, ssa_name))
748 err = true;
749
750 TREE_VISITED (ssa_name) = 1;
751
752 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
753 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
754 ; /* Default definitions have empty statements. Nothing to do. */
755 else if (!def_bb)
756 {
757 error ("missing definition");
758 err = true;
759 }
760 else if (bb != def_bb
761 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
762 {
763 error ("definition in block %i does not dominate use in block %i",
764 def_bb->index, bb->index);
765 err = true;
766 }
767 else if (bb == def_bb
768 && names_defined_in_bb != NULL
769 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
770 {
771 error ("definition in block %i follows the use", def_bb->index);
772 err = true;
773 }
774
775 if (check_abnormal
776 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
777 {
778 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
779 err = true;
780 }
781
782 /* Make sure the use is in an appropriate list by checking the previous
783 element to make sure it's the same. */
784 if (use_p->prev == NULL)
785 {
786 error ("no immediate_use list");
787 err = true;
788 }
789 else
790 {
791 tree listvar;
792 if (use_p->prev->use == NULL)
793 listvar = use_p->prev->loc.ssa_name;
794 else
795 listvar = USE_FROM_PTR (use_p->prev);
796 if (listvar != ssa_name)
797 {
798 error ("wrong immediate use list");
799 err = true;
800 }
801 }
802
803 if (err)
804 {
805 fprintf (stderr, "for SSA_NAME: ");
806 print_generic_expr (stderr, ssa_name, TDF_VOPS);
807 fprintf (stderr, " in statement:\n");
808 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
809 }
810
811 return err;
812 }
813
814
815 /* Return true if any of the arguments for PHI node PHI at block BB is
816 malformed.
817
818 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
819 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
820 it means that the block in that array slot contains the
821 definition of SSA_NAME. */
822
823 static bool
824 verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
825 {
826 edge e;
827 bool err = false;
828 size_t i, phi_num_args = gimple_phi_num_args (phi);
829
830 if (EDGE_COUNT (bb->preds) != phi_num_args)
831 {
832 error ("incoming edge count does not match number of PHI arguments");
833 err = true;
834 goto error;
835 }
836
837 for (i = 0; i < phi_num_args; i++)
838 {
839 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
840 tree op = USE_FROM_PTR (op_p);
841
842 e = EDGE_PRED (bb, i);
843
844 if (op == NULL_TREE)
845 {
846 error ("PHI argument is missing for edge %d->%d",
847 e->src->index,
848 e->dest->index);
849 err = true;
850 goto error;
851 }
852
853 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
854 {
855 error ("PHI argument is not SSA_NAME, or invariant");
856 err = true;
857 }
858
859 if (TREE_CODE (op) == SSA_NAME)
860 {
861 err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
862 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
863 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
864 }
865
866 if (TREE_CODE (op) == ADDR_EXPR)
867 {
868 tree base = TREE_OPERAND (op, 0);
869 while (handled_component_p (base))
870 base = TREE_OPERAND (base, 0);
871 if ((TREE_CODE (base) == VAR_DECL
872 || TREE_CODE (base) == PARM_DECL
873 || TREE_CODE (base) == RESULT_DECL)
874 && !TREE_ADDRESSABLE (base))
875 {
876 error ("address taken, but ADDRESSABLE bit not set");
877 err = true;
878 }
879 }
880
881 if (e->dest != bb)
882 {
883 error ("wrong edge %d->%d for PHI argument",
884 e->src->index, e->dest->index);
885 err = true;
886 }
887
888 if (err)
889 {
890 fprintf (stderr, "PHI argument\n");
891 print_generic_stmt (stderr, op, TDF_VOPS);
892 goto error;
893 }
894 }
895
896 error:
897 if (err)
898 {
899 fprintf (stderr, "for PHI node\n");
900 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
901 }
902
903
904 return err;
905 }
906
907
908 /* Verify common invariants in the SSA web.
909 TODO: verify the variable annotations. */
910
911 DEBUG_FUNCTION void
912 verify_ssa (bool check_modified_stmt)
913 {
914 size_t i;
915 basic_block bb;
916 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
917 ssa_op_iter iter;
918 tree op;
919 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
920 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
921
922 gcc_assert (!need_ssa_update_p (cfun));
923
924 timevar_push (TV_TREE_SSA_VERIFY);
925
926 /* Keep track of SSA names present in the IL. */
927 for (i = 1; i < num_ssa_names; i++)
928 {
929 tree name = ssa_name (i);
930 if (name)
931 {
932 gimple stmt;
933 TREE_VISITED (name) = 0;
934
935 verify_ssa_name (name, !is_gimple_reg (name));
936
937 stmt = SSA_NAME_DEF_STMT (name);
938 if (!gimple_nop_p (stmt))
939 {
940 basic_block bb = gimple_bb (stmt);
941 verify_def (bb, definition_block,
942 name, stmt, !is_gimple_reg (name));
943
944 }
945 }
946 }
947
948 calculate_dominance_info (CDI_DOMINATORS);
949
950 /* Now verify all the uses and make sure they agree with the definitions
951 found in the previous pass. */
952 FOR_EACH_BB (bb)
953 {
954 edge e;
955 gimple phi;
956 edge_iterator ei;
957 gimple_stmt_iterator gsi;
958
959 /* Make sure that all edges have a clear 'aux' field. */
960 FOR_EACH_EDGE (e, ei, bb->preds)
961 {
962 if (e->aux)
963 {
964 error ("AUX pointer initialized for edge %d->%d", e->src->index,
965 e->dest->index);
966 goto err;
967 }
968 }
969
970 /* Verify the arguments for every PHI node in the block. */
971 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
972 {
973 phi = gsi_stmt (gsi);
974 if (verify_phi_args (phi, bb, definition_block))
975 goto err;
976
977 bitmap_set_bit (names_defined_in_bb,
978 SSA_NAME_VERSION (gimple_phi_result (phi)));
979 }
980
981 /* Now verify all the uses and vuses in every statement of the block. */
982 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
983 {
984 gimple stmt = gsi_stmt (gsi);
985 use_operand_p use_p;
986
987 if (check_modified_stmt && gimple_modified_p (stmt))
988 {
989 error ("stmt (%p) marked modified after optimization pass: ",
990 (void *)stmt);
991 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
992 goto err;
993 }
994
995 if (verify_ssa_operands (stmt))
996 {
997 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
998 goto err;
999 }
1000
1001 if (gimple_debug_bind_p (stmt)
1002 && !gimple_debug_bind_has_value_p (stmt))
1003 continue;
1004
1005 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1006 {
1007 op = USE_FROM_PTR (use_p);
1008 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1009 use_p, stmt, false, names_defined_in_bb))
1010 goto err;
1011 }
1012
1013 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1014 {
1015 if (SSA_NAME_DEF_STMT (op) != stmt)
1016 {
1017 error ("SSA_NAME_DEF_STMT is wrong");
1018 fprintf (stderr, "Expected definition statement:\n");
1019 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1020 fprintf (stderr, "\nActual definition statement:\n");
1021 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1022 4, TDF_VOPS);
1023 goto err;
1024 }
1025 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1026 }
1027 }
1028
1029 bitmap_clear (names_defined_in_bb);
1030 }
1031
1032 free (definition_block);
1033
1034 /* Restore the dominance information to its prior known state, so
1035 that we do not perturb the compiler's subsequent behavior. */
1036 if (orig_dom_state == DOM_NONE)
1037 free_dominance_info (CDI_DOMINATORS);
1038 else
1039 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1040
1041 BITMAP_FREE (names_defined_in_bb);
1042 timevar_pop (TV_TREE_SSA_VERIFY);
1043 return;
1044
1045 err:
1046 internal_error ("verify_ssa failed");
1047 }
1048
1049 /* Return true if the uid in both int tree maps are equal. */
1050
1051 int
1052 int_tree_map_eq (const void *va, const void *vb)
1053 {
1054 const struct int_tree_map *a = (const struct int_tree_map *) va;
1055 const struct int_tree_map *b = (const struct int_tree_map *) vb;
1056 return (a->uid == b->uid);
1057 }
1058
1059 /* Hash a UID in a int_tree_map. */
1060
1061 unsigned int
1062 int_tree_map_hash (const void *item)
1063 {
1064 return ((const struct int_tree_map *)item)->uid;
1065 }
1066
1067 /* Return true if the DECL_UID in both trees are equal. */
1068
1069 int
1070 uid_decl_map_eq (const void *va, const void *vb)
1071 {
1072 const_tree a = (const_tree) va;
1073 const_tree b = (const_tree) vb;
1074 return (a->decl_minimal.uid == b->decl_minimal.uid);
1075 }
1076
1077 /* Hash a tree in a uid_decl_map. */
1078
1079 unsigned int
1080 uid_decl_map_hash (const void *item)
1081 {
1082 return ((const_tree)item)->decl_minimal.uid;
1083 }
1084
1085 /* Return true if the DECL_UID in both trees are equal. */
1086
1087 static int
1088 uid_ssaname_map_eq (const void *va, const void *vb)
1089 {
1090 const_tree a = (const_tree) va;
1091 const_tree b = (const_tree) vb;
1092 return (a->ssa_name.var->decl_minimal.uid == b->ssa_name.var->decl_minimal.uid);
1093 }
1094
1095 /* Hash a tree in a uid_decl_map. */
1096
1097 static unsigned int
1098 uid_ssaname_map_hash (const void *item)
1099 {
1100 return ((const_tree)item)->ssa_name.var->decl_minimal.uid;
1101 }
1102
1103
1104 /* Initialize global DFA and SSA structures. */
1105
1106 void
1107 init_tree_ssa (struct function *fn)
1108 {
1109 fn->gimple_df = ggc_alloc_cleared_gimple_df ();
1110 fn->gimple_df->referenced_vars = htab_create_ggc (20, uid_decl_map_hash,
1111 uid_decl_map_eq, NULL);
1112 fn->gimple_df->default_defs = htab_create_ggc (20, uid_ssaname_map_hash,
1113 uid_ssaname_map_eq, NULL);
1114 pt_solution_reset (&fn->gimple_df->escaped);
1115 init_ssanames (fn, 0);
1116 }
1117
1118 /* Do the actions required to initialize internal data structures used
1119 in tree-ssa optimization passes. */
1120
1121 static unsigned int
1122 execute_init_datastructures (void)
1123 {
1124 /* Allocate hash tables, arrays and other structures. */
1125 init_tree_ssa (cfun);
1126 return 0;
1127 }
1128
1129 struct gimple_opt_pass pass_init_datastructures =
1130 {
1131 {
1132 GIMPLE_PASS,
1133 "*init_datastructures", /* name */
1134 NULL, /* gate */
1135 execute_init_datastructures, /* execute */
1136 NULL, /* sub */
1137 NULL, /* next */
1138 0, /* static_pass_number */
1139 TV_NONE, /* tv_id */
1140 PROP_cfg, /* properties_required */
1141 0, /* properties_provided */
1142 0, /* properties_destroyed */
1143 0, /* todo_flags_start */
1144 0 /* todo_flags_finish */
1145 }
1146 };
1147
1148 /* Deallocate memory associated with SSA data structures for FNDECL. */
1149
1150 void
1151 delete_tree_ssa (void)
1152 {
1153 referenced_var_iterator rvi;
1154 tree var;
1155
1156 /* Remove annotations from every referenced local variable. */
1157 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
1158 {
1159 if (is_global_var (var))
1160 continue;
1161 if (var_ann (var))
1162 {
1163 ggc_free (var_ann (var));
1164 *DECL_VAR_ANN_PTR (var) = NULL;
1165 }
1166 }
1167 htab_delete (gimple_referenced_vars (cfun));
1168 cfun->gimple_df->referenced_vars = NULL;
1169
1170 fini_ssanames ();
1171
1172 /* We no longer maintain the SSA operand cache at this point. */
1173 if (ssa_operands_active ())
1174 fini_ssa_operands ();
1175
1176 htab_delete (cfun->gimple_df->default_defs);
1177 cfun->gimple_df->default_defs = NULL;
1178 pt_solution_reset (&cfun->gimple_df->escaped);
1179 if (cfun->gimple_df->decls_to_pointers != NULL)
1180 pointer_map_destroy (cfun->gimple_df->decls_to_pointers);
1181 cfun->gimple_df->decls_to_pointers = NULL;
1182 cfun->gimple_df->modified_noreturn_calls = NULL;
1183 cfun->gimple_df = NULL;
1184
1185 /* We no longer need the edge variable maps. */
1186 redirect_edge_var_map_destroy ();
1187 }
1188
1189 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
1190 useless type conversion, otherwise return false.
1191
1192 This function implicitly defines the middle-end type system. With
1193 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
1194 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
1195 the following invariants shall be fulfilled:
1196
1197 1) useless_type_conversion_p is transitive.
1198 If a < b and b < c then a < c.
1199
1200 2) useless_type_conversion_p is not symmetric.
1201 From a < b does not follow a > b.
1202
1203 3) Types define the available set of operations applicable to values.
1204 A type conversion is useless if the operations for the target type
1205 is a subset of the operations for the source type. For example
1206 casts to void* are useless, casts from void* are not (void* can't
1207 be dereferenced or offsetted, but copied, hence its set of operations
1208 is a strict subset of that of all other data pointer types). Casts
1209 to const T* are useless (can't be written to), casts from const T*
1210 to T* are not. */
1211
1212 bool
1213 useless_type_conversion_p (tree outer_type, tree inner_type)
1214 {
1215 /* Do the following before stripping toplevel qualifiers. */
1216 if (POINTER_TYPE_P (inner_type)
1217 && POINTER_TYPE_P (outer_type))
1218 {
1219 /* Do not lose casts between pointers to different address spaces. */
1220 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
1221 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
1222 return false;
1223 }
1224
1225 /* From now on qualifiers on value types do not matter. */
1226 inner_type = TYPE_MAIN_VARIANT (inner_type);
1227 outer_type = TYPE_MAIN_VARIANT (outer_type);
1228
1229 if (inner_type == outer_type)
1230 return true;
1231
1232 /* If we know the canonical types, compare them. */
1233 if (TYPE_CANONICAL (inner_type)
1234 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type))
1235 return true;
1236
1237 /* Changes in machine mode are never useless conversions unless we
1238 deal with aggregate types in which case we defer to later checks. */
1239 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type)
1240 && !AGGREGATE_TYPE_P (inner_type))
1241 return false;
1242
1243 /* If both the inner and outer types are integral types, then the
1244 conversion is not necessary if they have the same mode and
1245 signedness and precision, and both or neither are boolean. */
1246 if (INTEGRAL_TYPE_P (inner_type)
1247 && INTEGRAL_TYPE_P (outer_type))
1248 {
1249 /* Preserve changes in signedness or precision. */
1250 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
1251 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
1252 return false;
1253
1254 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
1255 of precision one. */
1256 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
1257 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
1258 && TYPE_PRECISION (outer_type) != 1)
1259 return false;
1260
1261 /* We don't need to preserve changes in the types minimum or
1262 maximum value in general as these do not generate code
1263 unless the types precisions are different. */
1264 return true;
1265 }
1266
1267 /* Scalar floating point types with the same mode are compatible. */
1268 else if (SCALAR_FLOAT_TYPE_P (inner_type)
1269 && SCALAR_FLOAT_TYPE_P (outer_type))
1270 return true;
1271
1272 /* Fixed point types with the same mode are compatible. */
1273 else if (FIXED_POINT_TYPE_P (inner_type)
1274 && FIXED_POINT_TYPE_P (outer_type))
1275 return true;
1276
1277 /* We need to take special care recursing to pointed-to types. */
1278 else if (POINTER_TYPE_P (inner_type)
1279 && POINTER_TYPE_P (outer_type))
1280 {
1281 /* Do not lose casts to function pointer types. */
1282 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
1283 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
1284 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
1285 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
1286 return false;
1287
1288 /* We do not care for const qualification of the pointed-to types
1289 as const qualification has no semantic value to the middle-end. */
1290
1291 /* Otherwise pointers/references are equivalent. */
1292 return true;
1293 }
1294
1295 /* Recurse for complex types. */
1296 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
1297 && TREE_CODE (outer_type) == COMPLEX_TYPE)
1298 return useless_type_conversion_p (TREE_TYPE (outer_type),
1299 TREE_TYPE (inner_type));
1300
1301 /* Recurse for vector types with the same number of subparts. */
1302 else if (TREE_CODE (inner_type) == VECTOR_TYPE
1303 && TREE_CODE (outer_type) == VECTOR_TYPE
1304 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
1305 return useless_type_conversion_p (TREE_TYPE (outer_type),
1306 TREE_TYPE (inner_type));
1307
1308 else if (TREE_CODE (inner_type) == ARRAY_TYPE
1309 && TREE_CODE (outer_type) == ARRAY_TYPE)
1310 {
1311 /* Preserve string attributes. */
1312 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
1313 return false;
1314
1315 /* Conversions from array types with unknown extent to
1316 array types with known extent are not useless. */
1317 if (!TYPE_DOMAIN (inner_type)
1318 && TYPE_DOMAIN (outer_type))
1319 return false;
1320
1321 /* Nor are conversions from array types with non-constant size to
1322 array types with constant size or to different size. */
1323 if (TYPE_SIZE (outer_type)
1324 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
1325 && (!TYPE_SIZE (inner_type)
1326 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
1327 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
1328 TYPE_SIZE (inner_type))))
1329 return false;
1330
1331 /* Check conversions between arrays with partially known extents.
1332 If the array min/max values are constant they have to match.
1333 Otherwise allow conversions to unknown and variable extents.
1334 In particular this declares conversions that may change the
1335 mode to BLKmode as useless. */
1336 if (TYPE_DOMAIN (inner_type)
1337 && TYPE_DOMAIN (outer_type)
1338 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
1339 {
1340 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
1341 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
1342 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
1343 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
1344
1345 /* After gimplification a variable min/max value carries no
1346 additional information compared to a NULL value. All that
1347 matters has been lowered to be part of the IL. */
1348 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
1349 inner_min = NULL_TREE;
1350 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
1351 outer_min = NULL_TREE;
1352 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
1353 inner_max = NULL_TREE;
1354 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
1355 outer_max = NULL_TREE;
1356
1357 /* Conversions NULL / variable <- cst are useless, but not
1358 the other way around. */
1359 if (outer_min
1360 && (!inner_min
1361 || !tree_int_cst_equal (inner_min, outer_min)))
1362 return false;
1363 if (outer_max
1364 && (!inner_max
1365 || !tree_int_cst_equal (inner_max, outer_max)))
1366 return false;
1367 }
1368
1369 /* Recurse on the element check. */
1370 return useless_type_conversion_p (TREE_TYPE (outer_type),
1371 TREE_TYPE (inner_type));
1372 }
1373
1374 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
1375 || TREE_CODE (inner_type) == METHOD_TYPE)
1376 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1377 {
1378 tree outer_parm, inner_parm;
1379
1380 /* If the return types are not compatible bail out. */
1381 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
1382 TREE_TYPE (inner_type)))
1383 return false;
1384
1385 /* Method types should belong to a compatible base class. */
1386 if (TREE_CODE (inner_type) == METHOD_TYPE
1387 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
1388 TYPE_METHOD_BASETYPE (inner_type)))
1389 return false;
1390
1391 /* A conversion to an unprototyped argument list is ok. */
1392 if (!prototype_p (outer_type))
1393 return true;
1394
1395 /* If the unqualified argument types are compatible the conversion
1396 is useless. */
1397 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
1398 return true;
1399
1400 for (outer_parm = TYPE_ARG_TYPES (outer_type),
1401 inner_parm = TYPE_ARG_TYPES (inner_type);
1402 outer_parm && inner_parm;
1403 outer_parm = TREE_CHAIN (outer_parm),
1404 inner_parm = TREE_CHAIN (inner_parm))
1405 if (!useless_type_conversion_p
1406 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
1407 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
1408 return false;
1409
1410 /* If there is a mismatch in the number of arguments the functions
1411 are not compatible. */
1412 if (outer_parm || inner_parm)
1413 return false;
1414
1415 /* Defer to the target if necessary. */
1416 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
1417 return comp_type_attributes (outer_type, inner_type) != 0;
1418
1419 return true;
1420 }
1421
1422 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
1423 explicit conversions for types involving to be structurally
1424 compared types. */
1425 else if (AGGREGATE_TYPE_P (inner_type)
1426 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
1427 return false;
1428
1429 return false;
1430 }
1431
1432 /* Return true if a conversion from either type of TYPE1 and TYPE2
1433 to the other is not required. Otherwise return false. */
1434
1435 bool
1436 types_compatible_p (tree type1, tree type2)
1437 {
1438 return (type1 == type2
1439 || (useless_type_conversion_p (type1, type2)
1440 && useless_type_conversion_p (type2, type1)));
1441 }
1442
1443 /* Return true if EXPR is a useless type conversion, otherwise return
1444 false. */
1445
1446 bool
1447 tree_ssa_useless_type_conversion (tree expr)
1448 {
1449 /* If we have an assignment that merely uses a NOP_EXPR to change
1450 the top of the RHS to the type of the LHS and the type conversion
1451 is "safe", then strip away the type conversion so that we can
1452 enter LHS = RHS into the const_and_copies table. */
1453 if (CONVERT_EXPR_P (expr)
1454 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1455 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1456 return useless_type_conversion_p
1457 (TREE_TYPE (expr),
1458 TREE_TYPE (TREE_OPERAND (expr, 0)));
1459
1460 return false;
1461 }
1462
1463 /* Strip conversions from EXP according to
1464 tree_ssa_useless_type_conversion and return the resulting
1465 expression. */
1466
1467 tree
1468 tree_ssa_strip_useless_type_conversions (tree exp)
1469 {
1470 while (tree_ssa_useless_type_conversion (exp))
1471 exp = TREE_OPERAND (exp, 0);
1472 return exp;
1473 }
1474
1475
1476 /* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
1477 described in walk_use_def_chains.
1478
1479 VISITED is a pointer set used to mark visited SSA_NAMEs to avoid
1480 infinite loops. We used to have a bitmap for this to just mark
1481 SSA versions we had visited. But non-sparse bitmaps are way too
1482 expensive, while sparse bitmaps may cause quadratic behavior.
1483
1484 IS_DFS is true if the caller wants to perform a depth-first search
1485 when visiting PHI nodes. A DFS will visit each PHI argument and
1486 call FN after each one. Otherwise, all the arguments are
1487 visited first and then FN is called with each of the visited
1488 arguments in a separate pass. */
1489
1490 static bool
1491 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
1492 struct pointer_set_t *visited, bool is_dfs)
1493 {
1494 gimple def_stmt;
1495
1496 if (pointer_set_insert (visited, var))
1497 return false;
1498
1499 def_stmt = SSA_NAME_DEF_STMT (var);
1500
1501 if (gimple_code (def_stmt) != GIMPLE_PHI)
1502 {
1503 /* If we reached the end of the use-def chain, call FN. */
1504 return fn (var, def_stmt, data);
1505 }
1506 else
1507 {
1508 size_t i;
1509
1510 /* When doing a breadth-first search, call FN before following the
1511 use-def links for each argument. */
1512 if (!is_dfs)
1513 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1514 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1515 return true;
1516
1517 /* Follow use-def links out of each PHI argument. */
1518 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1519 {
1520 tree arg = gimple_phi_arg_def (def_stmt, i);
1521
1522 /* ARG may be NULL for newly introduced PHI nodes. */
1523 if (arg
1524 && TREE_CODE (arg) == SSA_NAME
1525 && walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
1526 return true;
1527 }
1528
1529 /* When doing a depth-first search, call FN after following the
1530 use-def links for each argument. */
1531 if (is_dfs)
1532 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1533 if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
1534 return true;
1535 }
1536
1537 return false;
1538 }
1539
1540
1541
1542 /* Walk use-def chains starting at the SSA variable VAR. Call
1543 function FN at each reaching definition found. FN takes three
1544 arguments: VAR, its defining statement (DEF_STMT) and a generic
1545 pointer to whatever state information that FN may want to maintain
1546 (DATA). FN is able to stop the walk by returning true, otherwise
1547 in order to continue the walk, FN should return false.
1548
1549 Note, that if DEF_STMT is a PHI node, the semantics are slightly
1550 different. The first argument to FN is no longer the original
1551 variable VAR, but the PHI argument currently being examined. If FN
1552 wants to get at VAR, it should call PHI_RESULT (PHI).
1553
1554 If IS_DFS is true, this function will:
1555
1556 1- walk the use-def chains for all the PHI arguments, and,
1557 2- call (*FN) (ARG, PHI, DATA) on all the PHI arguments.
1558
1559 If IS_DFS is false, the two steps above are done in reverse order
1560 (i.e., a breadth-first search). */
1561
1562 void
1563 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
1564 bool is_dfs)
1565 {
1566 gimple def_stmt;
1567
1568 gcc_assert (TREE_CODE (var) == SSA_NAME);
1569
1570 def_stmt = SSA_NAME_DEF_STMT (var);
1571
1572 /* We only need to recurse if the reaching definition comes from a PHI
1573 node. */
1574 if (gimple_code (def_stmt) != GIMPLE_PHI)
1575 (*fn) (var, def_stmt, data);
1576 else
1577 {
1578 struct pointer_set_t *visited = pointer_set_create ();
1579 walk_use_def_chains_1 (var, fn, data, visited, is_dfs);
1580 pointer_set_destroy (visited);
1581 }
1582 }
1583
1584 \f
1585 /* Emit warnings for uninitialized variables. This is done in two passes.
1586
1587 The first pass notices real uses of SSA names with undefined values.
1588 Such uses are unconditionally uninitialized, and we can be certain that
1589 such a use is a mistake. This pass is run before most optimizations,
1590 so that we catch as many as we can.
1591
1592 The second pass follows PHI nodes to find uses that are potentially
1593 uninitialized. In this case we can't necessarily prove that the use
1594 is really uninitialized. This pass is run after most optimizations,
1595 so that we thread as many jumps and possible, and delete as much dead
1596 code as possible, in order to reduce false positives. We also look
1597 again for plain uninitialized variables, since optimization may have
1598 changed conditionally uninitialized to unconditionally uninitialized. */
1599
1600 /* Emit a warning for EXPR based on variable VAR at the point in the
1601 program T, an SSA_NAME, is used being uninitialized. The exact
1602 warning text is in MSGID and LOCUS may contain a location or be null.
1603 WC is the warning code. */
1604
1605 void
1606 warn_uninit (enum opt_code wc, tree t,
1607 tree expr, tree var, const char *gmsgid, void *data)
1608 {
1609 gimple context = (gimple) data;
1610 location_t location, cfun_loc;
1611 expanded_location xloc, floc;
1612
1613 if (!ssa_undefined_value_p (t))
1614 return;
1615
1616 /* TREE_NO_WARNING either means we already warned, or the front end
1617 wishes to suppress the warning. */
1618 if ((context
1619 && (gimple_no_warning_p (context)
1620 || (gimple_assign_single_p (context)
1621 && TREE_NO_WARNING (gimple_assign_rhs1 (context)))))
1622 || TREE_NO_WARNING (expr))
1623 return;
1624
1625 location = (context != NULL && gimple_has_location (context))
1626 ? gimple_location (context)
1627 : DECL_SOURCE_LOCATION (var);
1628 location = linemap_resolve_location (line_table, location,
1629 LRK_SPELLING_LOCATION,
1630 NULL);
1631 cfun_loc = DECL_SOURCE_LOCATION (cfun->decl);
1632 xloc = expand_location (location);
1633 floc = expand_location (cfun_loc);
1634 if (warning_at (location, wc, gmsgid, expr))
1635 {
1636 TREE_NO_WARNING (expr) = 1;
1637
1638 if (location == DECL_SOURCE_LOCATION (var))
1639 return;
1640 if (xloc.file != floc.file
1641 || linemap_location_before_p (line_table,
1642 location, cfun_loc)
1643 || linemap_location_before_p (line_table,
1644 cfun->function_end_locus,
1645 location))
1646 inform (DECL_SOURCE_LOCATION (var), "%qD was declared here", var);
1647 }
1648 }
1649
1650 unsigned int
1651 warn_uninitialized_vars (bool warn_possibly_uninitialized)
1652 {
1653 gimple_stmt_iterator gsi;
1654 basic_block bb;
1655
1656 FOR_EACH_BB (bb)
1657 {
1658 bool always_executed = dominated_by_p (CDI_POST_DOMINATORS,
1659 single_succ (ENTRY_BLOCK_PTR), bb);
1660 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1661 {
1662 gimple stmt = gsi_stmt (gsi);
1663 use_operand_p use_p;
1664 ssa_op_iter op_iter;
1665 tree use;
1666
1667 if (is_gimple_debug (stmt))
1668 continue;
1669
1670 /* We only do data flow with SSA_NAMEs, so that's all we
1671 can warn about. */
1672 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, op_iter, SSA_OP_USE)
1673 {
1674 use = USE_FROM_PTR (use_p);
1675 if (always_executed)
1676 warn_uninit (OPT_Wuninitialized, use,
1677 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1678 "%qD is used uninitialized in this function",
1679 stmt);
1680 else if (warn_possibly_uninitialized)
1681 warn_uninit (OPT_Wuninitialized, use,
1682 SSA_NAME_VAR (use), SSA_NAME_VAR (use),
1683 "%qD may be used uninitialized in this function",
1684 stmt);
1685 }
1686
1687 /* For memory the only cheap thing we can do is see if we
1688 have a use of the default def of the virtual operand.
1689 ??? Note that at -O0 we do not have virtual operands.
1690 ??? Not so cheap would be to use the alias oracle via
1691 walk_aliased_vdefs, if we don't find any aliasing vdef
1692 warn as is-used-uninitialized, if we don't find an aliasing
1693 vdef that kills our use (stmt_kills_ref_p), warn as
1694 may-be-used-uninitialized. But this walk is quadratic and
1695 so must be limited which means we would miss warning
1696 opportunities. */
1697 use = gimple_vuse (stmt);
1698 if (use
1699 && gimple_assign_single_p (stmt)
1700 && !gimple_vdef (stmt)
1701 && SSA_NAME_IS_DEFAULT_DEF (use))
1702 {
1703 tree rhs = gimple_assign_rhs1 (stmt);
1704 tree base = get_base_address (rhs);
1705
1706 /* Do not warn if it can be initialized outside this function. */
1707 if (TREE_CODE (base) != VAR_DECL
1708 || DECL_HARD_REGISTER (base)
1709 || is_global_var (base))
1710 continue;
1711
1712 if (always_executed)
1713 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1714 base,
1715 "%qE is used uninitialized in this function",
1716 stmt);
1717 else if (warn_possibly_uninitialized)
1718 warn_uninit (OPT_Wuninitialized, use, gimple_assign_rhs1 (stmt),
1719 base,
1720 "%qE may be used uninitialized in this function",
1721 stmt);
1722 }
1723 }
1724 }
1725
1726 return 0;
1727 }
1728
1729 static unsigned int
1730 execute_early_warn_uninitialized (void)
1731 {
1732 /* Currently, this pass runs always but
1733 execute_late_warn_uninitialized only runs with optimization. With
1734 optimization we want to warn about possible uninitialized as late
1735 as possible, thus don't do it here. However, without
1736 optimization we need to warn here about "may be uninitialized".
1737 */
1738 calculate_dominance_info (CDI_POST_DOMINATORS);
1739
1740 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/!optimize);
1741
1742 /* Post-dominator information can not be reliably updated. Free it
1743 after the use. */
1744
1745 free_dominance_info (CDI_POST_DOMINATORS);
1746 return 0;
1747 }
1748
1749 static bool
1750 gate_warn_uninitialized (void)
1751 {
1752 return warn_uninitialized != 0;
1753 }
1754
1755 struct gimple_opt_pass pass_early_warn_uninitialized =
1756 {
1757 {
1758 GIMPLE_PASS,
1759 "*early_warn_uninitialized", /* name */
1760 gate_warn_uninitialized, /* gate */
1761 execute_early_warn_uninitialized, /* execute */
1762 NULL, /* sub */
1763 NULL, /* next */
1764 0, /* static_pass_number */
1765 TV_TREE_UNINIT, /* tv_id */
1766 PROP_ssa, /* properties_required */
1767 0, /* properties_provided */
1768 0, /* properties_destroyed */
1769 0, /* todo_flags_start */
1770 0 /* todo_flags_finish */
1771 }
1772 };
1773
1774
1775 /* If necessary, rewrite the base of the reference tree *TP from
1776 a MEM_REF to a plain or converted symbol. */
1777
1778 static void
1779 maybe_rewrite_mem_ref_base (tree *tp)
1780 {
1781 tree sym;
1782
1783 while (handled_component_p (*tp))
1784 tp = &TREE_OPERAND (*tp, 0);
1785 if (TREE_CODE (*tp) == MEM_REF
1786 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1787 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1788 && DECL_P (sym)
1789 && !TREE_ADDRESSABLE (sym)
1790 && symbol_marked_for_renaming (sym))
1791 {
1792 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1793 && useless_type_conversion_p (TREE_TYPE (*tp),
1794 TREE_TYPE (TREE_TYPE (sym)))
1795 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1796 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1797 {
1798 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1799 TYPE_SIZE (TREE_TYPE (*tp)),
1800 int_const_binop (MULT_EXPR,
1801 bitsize_int (BITS_PER_UNIT),
1802 TREE_OPERAND (*tp, 1)));
1803 }
1804 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1805 && useless_type_conversion_p (TREE_TYPE (*tp),
1806 TREE_TYPE (TREE_TYPE (sym))))
1807 {
1808 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1809 ? REALPART_EXPR : IMAGPART_EXPR,
1810 TREE_TYPE (*tp), sym);
1811 }
1812 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1813 {
1814 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1815 TREE_TYPE (sym)))
1816 *tp = build1 (VIEW_CONVERT_EXPR,
1817 TREE_TYPE (*tp), sym);
1818 else
1819 *tp = sym;
1820 }
1821 }
1822 }
1823
1824 /* For a tree REF return its base if it is the base of a MEM_REF
1825 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1826
1827 static tree
1828 non_rewritable_mem_ref_base (tree ref)
1829 {
1830 tree base = ref;
1831
1832 /* A plain decl does not need it set. */
1833 if (DECL_P (ref))
1834 return NULL_TREE;
1835
1836 while (handled_component_p (base))
1837 base = TREE_OPERAND (base, 0);
1838
1839 /* But watch out for MEM_REFs we cannot lower to a
1840 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1841 if (TREE_CODE (base) == MEM_REF
1842 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1843 {
1844 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1845 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1846 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1847 && useless_type_conversion_p (TREE_TYPE (base),
1848 TREE_TYPE (TREE_TYPE (decl)))
1849 && double_int_fits_in_uhwi_p (mem_ref_offset (base))
1850 && double_int_ucmp
1851 (tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1852 mem_ref_offset (base)) == 1
1853 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1854 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1855 return NULL_TREE;
1856 if (DECL_P (decl)
1857 && (!integer_zerop (TREE_OPERAND (base, 1))
1858 || (DECL_SIZE (decl)
1859 != TYPE_SIZE (TREE_TYPE (base)))
1860 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1861 return decl;
1862 }
1863
1864 return NULL_TREE;
1865 }
1866
1867 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1868 Otherwise return true. */
1869
1870 static bool
1871 non_rewritable_lvalue_p (tree lhs)
1872 {
1873 /* A plain decl is always rewritable. */
1874 if (DECL_P (lhs))
1875 return false;
1876
1877 /* A decl that is wrapped inside a MEM-REF that covers
1878 it full is also rewritable.
1879 ??? The following could be relaxed allowing component
1880 references that do not change the access size. */
1881 if (TREE_CODE (lhs) == MEM_REF
1882 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1883 && integer_zerop (TREE_OPERAND (lhs, 1)))
1884 {
1885 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1886 if (DECL_P (decl)
1887 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1888 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1889 return false;
1890 }
1891
1892 return true;
1893 }
1894
1895 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1896 mark the variable VAR for conversion into SSA. Return true when updating
1897 stmts is required. */
1898
1899 static bool
1900 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs)
1901 {
1902 bool update_vops = false;
1903
1904 /* Global Variables, result decls cannot be changed. */
1905 if (is_global_var (var)
1906 || TREE_CODE (var) == RESULT_DECL
1907 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1908 return false;
1909
1910 /* If the variable is not in the list of referenced vars then we
1911 do not need to touch it nor can we rename it. */
1912 if (!referenced_var_lookup (cfun, DECL_UID (var)))
1913 return false;
1914
1915 if (TREE_ADDRESSABLE (var)
1916 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1917 a non-register. Otherwise we are confused and forget to
1918 add virtual operands for it. */
1919 && (!is_gimple_reg_type (TREE_TYPE (var))
1920 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1921 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1922 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1923 {
1924 TREE_ADDRESSABLE (var) = 0;
1925 if (is_gimple_reg (var))
1926 mark_sym_for_renaming (var);
1927 update_vops = true;
1928 if (dump_file)
1929 {
1930 fprintf (dump_file, "No longer having address taken: ");
1931 print_generic_expr (dump_file, var, 0);
1932 fprintf (dump_file, "\n");
1933 }
1934 }
1935
1936 if (!DECL_GIMPLE_REG_P (var)
1937 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1938 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1939 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1940 && !TREE_THIS_VOLATILE (var)
1941 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1942 {
1943 DECL_GIMPLE_REG_P (var) = 1;
1944 mark_sym_for_renaming (var);
1945 update_vops = true;
1946 if (dump_file)
1947 {
1948 fprintf (dump_file, "Now a gimple register: ");
1949 print_generic_expr (dump_file, var, 0);
1950 fprintf (dump_file, "\n");
1951 }
1952 }
1953
1954 return update_vops;
1955 }
1956
1957 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1958
1959 void
1960 execute_update_addresses_taken (void)
1961 {
1962 gimple_stmt_iterator gsi;
1963 basic_block bb;
1964 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1965 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1966 bool update_vops = false;
1967 tree var;
1968 unsigned i;
1969
1970 timevar_push (TV_ADDRESS_TAKEN);
1971
1972 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1973 the function body. */
1974 FOR_EACH_BB (bb)
1975 {
1976 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1977 {
1978 gimple stmt = gsi_stmt (gsi);
1979 enum gimple_code code = gimple_code (stmt);
1980 tree decl;
1981
1982 /* Note all addresses taken by the stmt. */
1983 gimple_ior_addresses_taken (addresses_taken, stmt);
1984
1985 /* If we have a call or an assignment, see if the lhs contains
1986 a local decl that requires not to be a gimple register. */
1987 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1988 {
1989 tree lhs = gimple_get_lhs (stmt);
1990 if (lhs
1991 && TREE_CODE (lhs) != SSA_NAME
1992 && non_rewritable_lvalue_p (lhs))
1993 {
1994 decl = get_base_address (lhs);
1995 if (DECL_P (decl))
1996 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1997 }
1998 }
1999
2000 if (gimple_assign_single_p (stmt))
2001 {
2002 tree rhs = gimple_assign_rhs1 (stmt);
2003 if ((decl = non_rewritable_mem_ref_base (rhs)))
2004 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2005 }
2006
2007 else if (code == GIMPLE_CALL)
2008 {
2009 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2010 {
2011 tree arg = gimple_call_arg (stmt, i);
2012 if ((decl = non_rewritable_mem_ref_base (arg)))
2013 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2014 }
2015 }
2016
2017 else if (code == GIMPLE_ASM)
2018 {
2019 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2020 {
2021 tree link = gimple_asm_output_op (stmt, i);
2022 tree lhs = TREE_VALUE (link);
2023 if (TREE_CODE (lhs) != SSA_NAME)
2024 {
2025 decl = get_base_address (lhs);
2026 if (DECL_P (decl)
2027 && (non_rewritable_lvalue_p (lhs)
2028 /* We cannot move required conversions from
2029 the lhs to the rhs in asm statements, so
2030 require we do not need any. */
2031 || !useless_type_conversion_p
2032 (TREE_TYPE (lhs), TREE_TYPE (decl))))
2033 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2034 }
2035 }
2036 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2037 {
2038 tree link = gimple_asm_input_op (stmt, i);
2039 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
2040 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
2041 }
2042 }
2043 }
2044
2045 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2046 {
2047 size_t i;
2048 gimple phi = gsi_stmt (gsi);
2049
2050 for (i = 0; i < gimple_phi_num_args (phi); i++)
2051 {
2052 tree op = PHI_ARG_DEF (phi, i), var;
2053 if (TREE_CODE (op) == ADDR_EXPR
2054 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
2055 && DECL_P (var))
2056 bitmap_set_bit (addresses_taken, DECL_UID (var));
2057 }
2058 }
2059 }
2060
2061 /* We cannot iterate over all referenced vars because that can contain
2062 unused vars from BLOCK trees, which causes code generation differences
2063 for -g vs. -g0. */
2064 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
2065 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2066
2067 FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
2068 update_vops |= maybe_optimize_var (var, addresses_taken, not_reg_needs);
2069
2070 /* Operand caches need to be recomputed for operands referencing the updated
2071 variables. */
2072 if (update_vops)
2073 {
2074 FOR_EACH_BB (bb)
2075 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2076 {
2077 gimple stmt = gsi_stmt (gsi);
2078
2079 /* Re-write TARGET_MEM_REFs of symbols we want to
2080 rewrite into SSA form. */
2081 if (gimple_assign_single_p (stmt))
2082 {
2083 tree lhs = gimple_assign_lhs (stmt);
2084 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
2085 tree sym;
2086
2087 /* We shouldn't have any fancy wrapping of
2088 component-refs on the LHS, but look through
2089 VIEW_CONVERT_EXPRs as that is easy. */
2090 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
2091 lhs = TREE_OPERAND (lhs, 0);
2092 if (TREE_CODE (lhs) == MEM_REF
2093 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
2094 && integer_zerop (TREE_OPERAND (lhs, 1))
2095 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
2096 && DECL_P (sym)
2097 && !TREE_ADDRESSABLE (sym)
2098 && symbol_marked_for_renaming (sym))
2099 lhs = sym;
2100 else
2101 lhs = gimple_assign_lhs (stmt);
2102
2103 /* Rewrite the RHS and make sure the resulting assignment
2104 is validly typed. */
2105 maybe_rewrite_mem_ref_base (rhsp);
2106 rhs = gimple_assign_rhs1 (stmt);
2107 if (gimple_assign_lhs (stmt) != lhs
2108 && !useless_type_conversion_p (TREE_TYPE (lhs),
2109 TREE_TYPE (rhs)))
2110 rhs = fold_build1 (VIEW_CONVERT_EXPR,
2111 TREE_TYPE (lhs), rhs);
2112
2113 if (gimple_assign_lhs (stmt) != lhs)
2114 gimple_assign_set_lhs (stmt, lhs);
2115
2116 /* For var ={v} {CLOBBER}; where var lost
2117 TREE_ADDRESSABLE just remove the stmt. */
2118 if (DECL_P (lhs)
2119 && TREE_CLOBBER_P (rhs)
2120 && symbol_marked_for_renaming (lhs))
2121 {
2122 unlink_stmt_vdef (stmt);
2123 gsi_remove (&gsi, true);
2124 release_defs (stmt);
2125 continue;
2126 }
2127
2128 if (gimple_assign_rhs1 (stmt) != rhs)
2129 {
2130 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2131 gimple_assign_set_rhs_from_tree (&gsi, rhs);
2132 }
2133 }
2134
2135 else if (gimple_code (stmt) == GIMPLE_CALL)
2136 {
2137 unsigned i;
2138 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2139 {
2140 tree *argp = gimple_call_arg_ptr (stmt, i);
2141 maybe_rewrite_mem_ref_base (argp);
2142 }
2143 }
2144
2145 else if (gimple_code (stmt) == GIMPLE_ASM)
2146 {
2147 unsigned i;
2148 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2149 {
2150 tree link = gimple_asm_output_op (stmt, i);
2151 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2152 }
2153 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2154 {
2155 tree link = gimple_asm_input_op (stmt, i);
2156 maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
2157 }
2158 }
2159
2160 else if (gimple_debug_bind_p (stmt)
2161 && gimple_debug_bind_has_value_p (stmt))
2162 {
2163 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2164 tree decl;
2165 maybe_rewrite_mem_ref_base (valuep);
2166 decl = non_rewritable_mem_ref_base (*valuep);
2167 if (decl && symbol_marked_for_renaming (decl))
2168 gimple_debug_bind_reset_value (stmt);
2169 }
2170
2171 if (gimple_references_memory_p (stmt)
2172 || is_gimple_debug (stmt))
2173 update_stmt (stmt);
2174
2175 gsi_next (&gsi);
2176 }
2177
2178 /* Update SSA form here, we are called as non-pass as well. */
2179 if (number_of_loops () > 1 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2180 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2181 else
2182 update_ssa (TODO_update_ssa);
2183 }
2184
2185 BITMAP_FREE (not_reg_needs);
2186 BITMAP_FREE (addresses_taken);
2187 timevar_pop (TV_ADDRESS_TAKEN);
2188 }
2189
2190 struct gimple_opt_pass pass_update_address_taken =
2191 {
2192 {
2193 GIMPLE_PASS,
2194 "addressables", /* name */
2195 NULL, /* gate */
2196 NULL, /* execute */
2197 NULL, /* sub */
2198 NULL, /* next */
2199 0, /* static_pass_number */
2200 TV_ADDRESS_TAKEN, /* tv_id */
2201 PROP_ssa, /* properties_required */
2202 0, /* properties_provided */
2203 0, /* properties_destroyed */
2204 0, /* todo_flags_start */
2205 TODO_update_address_taken /* todo_flags_finish */
2206 }
2207 };