gen-pass-instances.awk: Remove unused var in handle_line
[gcc.git] / gcc / tree-ssa.c
1 /* Miscellaneous SSA utility functions.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42
43 /* Pointer map of variable mappings, keyed by edge. */
44 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
45
46
47 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */
48
49 void
50 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
51 {
52 edge_var_map new_node;
53
54 if (edge_var_maps == NULL)
55 edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
56
57 auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
58 new_node.def = def;
59 new_node.result = result;
60 new_node.locus = locus;
61
62 slot.safe_push (new_node);
63 }
64
65
66 /* Clear the var mappings in edge E. */
67
68 void
69 redirect_edge_var_map_clear (edge e)
70 {
71 if (!edge_var_maps)
72 return;
73
74 auto_vec<edge_var_map> *head = edge_var_maps->get (e);
75
76 if (head)
77 head->release ();
78 }
79
80
81 /* Duplicate the redirected var mappings in OLDE in NEWE.
82
83 This assumes a hash_map can have multiple edges mapping to the same
84 var_map (many to one mapping), since we don't remove the previous mappings.
85 */
86
87 void
88 redirect_edge_var_map_dup (edge newe, edge olde)
89 {
90 if (!edge_var_maps)
91 return;
92
93 auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
94 auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
95 if (!old_head)
96 return;
97
98 new_head->safe_splice (*old_head);
99 }
100
101
102 /* Return the variable mappings for a given edge. If there is none, return
103 NULL. */
104
105 vec<edge_var_map> *
106 redirect_edge_var_map_vector (edge e)
107 {
108 /* Hey, what kind of idiot would... you'd be surprised. */
109 if (!edge_var_maps)
110 return NULL;
111
112 auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
113 if (!slot)
114 return NULL;
115
116 return slot;
117 }
118
119 /* Clear the edge variable mappings. */
120
121 void
122 redirect_edge_var_map_destroy (void)
123 {
124 delete edge_var_maps;
125 edge_var_maps = NULL;
126 }
127
128
129 /* Remove the corresponding arguments from the PHI nodes in E's
130 destination block and redirect it to DEST. Return redirected edge.
131 The list of removed arguments is stored in a vector accessed
132 through edge_var_maps. */
133
134 edge
135 ssa_redirect_edge (edge e, basic_block dest)
136 {
137 gphi_iterator gsi;
138 gphi *phi;
139
140 redirect_edge_var_map_clear (e);
141
142 /* Remove the appropriate PHI arguments in E's destination block. */
143 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
144 {
145 tree def;
146 source_location locus ;
147
148 phi = gsi.phi ();
149 def = gimple_phi_arg_def (phi, e->dest_idx);
150 locus = gimple_phi_arg_location (phi, e->dest_idx);
151
152 if (def == NULL_TREE)
153 continue;
154
155 redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
156 }
157
158 e = redirect_edge_succ_nodup (e, dest);
159
160 return e;
161 }
162
163
164 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
165 E->dest. */
166
167 void
168 flush_pending_stmts (edge e)
169 {
170 gphi *phi;
171 edge_var_map *vm;
172 int i;
173 gphi_iterator gsi;
174
175 vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
176 if (!v)
177 return;
178
179 for (gsi = gsi_start_phis (e->dest), i = 0;
180 !gsi_end_p (gsi) && v->iterate (i, &vm);
181 gsi_next (&gsi), i++)
182 {
183 tree def;
184
185 phi = gsi.phi ();
186 def = redirect_edge_var_map_def (vm);
187 add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
188 }
189
190 redirect_edge_var_map_clear (e);
191 }
192
193 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
194 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
195 expression with a different value.
196
197 This will update any annotations (say debug bind stmts) referring
198 to the original LHS, so that they use the RHS instead. This is
199 done even if NLHS and LHS are the same, for it is understood that
200 the RHS will be modified afterwards, and NLHS will not be assigned
201 an equivalent value.
202
203 Adjusting any non-annotation uses of the LHS, if needed, is a
204 responsibility of the caller.
205
206 The effect of this call should be pretty much the same as that of
207 inserting a copy of STMT before STMT, and then removing the
208 original stmt, at which time gsi_remove() would have update
209 annotations, but using this function saves all the inserting,
210 copying and removing. */
211
212 void
213 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
214 {
215 if (MAY_HAVE_DEBUG_STMTS)
216 {
217 tree lhs = gimple_get_lhs (stmt);
218
219 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
220
221 insert_debug_temp_for_var_def (NULL, lhs);
222 }
223
224 gimple_set_lhs (stmt, nlhs);
225 }
226
227
228 /* Given a tree for an expression for which we might want to emit
229 locations or values in debug information (generally a variable, but
230 we might deal with other kinds of trees in the future), return the
231 tree that should be used as the variable of a DEBUG_BIND STMT or
232 VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */
233
234 tree
235 target_for_debug_bind (tree var)
236 {
237 if (!MAY_HAVE_DEBUG_STMTS)
238 return NULL_TREE;
239
240 if (TREE_CODE (var) == SSA_NAME)
241 {
242 var = SSA_NAME_VAR (var);
243 if (var == NULL_TREE)
244 return NULL_TREE;
245 }
246
247 if ((TREE_CODE (var) != VAR_DECL
248 || VAR_DECL_IS_VIRTUAL_OPERAND (var))
249 && TREE_CODE (var) != PARM_DECL)
250 return NULL_TREE;
251
252 if (DECL_HAS_VALUE_EXPR_P (var))
253 return target_for_debug_bind (DECL_VALUE_EXPR (var));
254
255 if (DECL_IGNORED_P (var))
256 return NULL_TREE;
257
258 /* var-tracking only tracks registers. */
259 if (!is_gimple_reg_type (TREE_TYPE (var)))
260 return NULL_TREE;
261
262 return var;
263 }
264
265 /* Called via walk_tree, look for SSA_NAMEs that have already been
266 released. */
267
268 static tree
269 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
270 {
271 struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
272
273 if (wi && wi->is_lhs)
274 return NULL_TREE;
275
276 if (TREE_CODE (*tp) == SSA_NAME)
277 {
278 if (SSA_NAME_IN_FREE_LIST (*tp))
279 return *tp;
280
281 *walk_subtrees = 0;
282 }
283 else if (IS_TYPE_OR_DECL_P (*tp))
284 *walk_subtrees = 0;
285
286 return NULL_TREE;
287 }
288
289 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
290 by other DEBUG stmts, and replace uses of the DEF with the
291 newly-created debug temp. */
292
293 void
294 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
295 {
296 imm_use_iterator imm_iter;
297 use_operand_p use_p;
298 gimple *stmt;
299 gimple *def_stmt = NULL;
300 int usecount = 0;
301 tree value = NULL;
302
303 if (!MAY_HAVE_DEBUG_STMTS)
304 return;
305
306 /* If this name has already been registered for replacement, do nothing
307 as anything that uses this name isn't in SSA form. */
308 if (name_registered_for_update_p (var))
309 return;
310
311 /* Check whether there are debug stmts that reference this variable and,
312 if there are, decide whether we should use a debug temp. */
313 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
314 {
315 stmt = USE_STMT (use_p);
316
317 if (!gimple_debug_bind_p (stmt))
318 continue;
319
320 if (usecount++)
321 break;
322
323 if (gimple_debug_bind_get_value (stmt) != var)
324 {
325 /* Count this as an additional use, so as to make sure we
326 use a temp unless VAR's definition has a SINGLE_RHS that
327 can be shared. */
328 usecount++;
329 break;
330 }
331 }
332
333 if (!usecount)
334 return;
335
336 if (gsi)
337 def_stmt = gsi_stmt (*gsi);
338 else
339 def_stmt = SSA_NAME_DEF_STMT (var);
340
341 /* If we didn't get an insertion point, and the stmt has already
342 been removed, we won't be able to insert the debug bind stmt, so
343 we'll have to drop debug information. */
344 if (gimple_code (def_stmt) == GIMPLE_PHI)
345 {
346 value = degenerate_phi_result (as_a <gphi *> (def_stmt));
347 if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
348 value = NULL;
349 /* error_mark_node is what fixup_noreturn_call changes PHI arguments
350 to. */
351 else if (value == error_mark_node)
352 value = NULL;
353 }
354 else if (is_gimple_assign (def_stmt))
355 {
356 bool no_value = false;
357
358 if (!dom_info_available_p (CDI_DOMINATORS))
359 {
360 struct walk_stmt_info wi;
361
362 memset (&wi, 0, sizeof (wi));
363
364 /* When removing blocks without following reverse dominance
365 order, we may sometimes encounter SSA_NAMEs that have
366 already been released, referenced in other SSA_DEFs that
367 we're about to release. Consider:
368
369 <bb X>:
370 v_1 = foo;
371
372 <bb Y>:
373 w_2 = v_1 + bar;
374 # DEBUG w => w_2
375
376 If we deleted BB X first, propagating the value of w_2
377 won't do us any good. It's too late to recover their
378 original definition of v_1: when it was deleted, it was
379 only referenced in other DEFs, it couldn't possibly know
380 it should have been retained, and propagating every
381 single DEF just in case it might have to be propagated
382 into a DEBUG STMT would probably be too wasteful.
383
384 When dominator information is not readily available, we
385 check for and accept some loss of debug information. But
386 if it is available, there's no excuse for us to remove
387 blocks in the wrong order, so we don't even check for
388 dead SSA NAMEs. SSA verification shall catch any
389 errors. */
390 if ((!gsi && !gimple_bb (def_stmt))
391 || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
392 no_value = true;
393 }
394
395 if (!no_value)
396 value = gimple_assign_rhs_to_tree (def_stmt);
397 }
398
399 if (value)
400 {
401 /* If there's a single use of VAR, and VAR is the entire debug
402 expression (usecount would have been incremented again
403 otherwise), and the definition involves only constants and
404 SSA names, then we can propagate VALUE into this single use,
405 avoiding the temp.
406
407 We can also avoid using a temp if VALUE can be shared and
408 propagated into all uses, without generating expressions that
409 wouldn't be valid gimple RHSs.
410
411 Other cases that would require unsharing or non-gimple RHSs
412 are deferred to a debug temp, although we could avoid temps
413 at the expense of duplication of expressions. */
414
415 if (CONSTANT_CLASS_P (value)
416 || gimple_code (def_stmt) == GIMPLE_PHI
417 || (usecount == 1
418 && (!gimple_assign_single_p (def_stmt)
419 || is_gimple_min_invariant (value)))
420 || is_gimple_reg (value))
421 ;
422 else
423 {
424 gdebug *def_temp;
425 tree vexpr = make_node (DEBUG_EXPR_DECL);
426
427 def_temp = gimple_build_debug_bind (vexpr,
428 unshare_expr (value),
429 def_stmt);
430
431 DECL_ARTIFICIAL (vexpr) = 1;
432 TREE_TYPE (vexpr) = TREE_TYPE (value);
433 if (DECL_P (value))
434 DECL_MODE (vexpr) = DECL_MODE (value);
435 else
436 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (value));
437
438 if (gsi)
439 gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
440 else
441 {
442 gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
443 gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
444 }
445
446 value = vexpr;
447 }
448 }
449
450 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
451 {
452 if (!gimple_debug_bind_p (stmt))
453 continue;
454
455 if (value)
456 {
457 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
458 /* unshare_expr is not needed here. vexpr is either a
459 SINGLE_RHS, that can be safely shared, some other RHS
460 that was unshared when we found it had a single debug
461 use, or a DEBUG_EXPR_DECL, that can be safely
462 shared. */
463 SET_USE (use_p, unshare_expr (value));
464 /* If we didn't replace uses with a debug decl fold the
465 resulting expression. Otherwise we end up with invalid IL. */
466 if (TREE_CODE (value) != DEBUG_EXPR_DECL)
467 {
468 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
469 fold_stmt_inplace (&gsi);
470 }
471 }
472 else
473 gimple_debug_bind_reset_value (stmt);
474
475 update_stmt (stmt);
476 }
477 }
478
479
480 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
481 other DEBUG stmts, and replace uses of the DEF with the
482 newly-created debug temp. */
483
484 void
485 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
486 {
487 gimple *stmt;
488 ssa_op_iter op_iter;
489 def_operand_p def_p;
490
491 if (!MAY_HAVE_DEBUG_STMTS)
492 return;
493
494 stmt = gsi_stmt (*gsi);
495
496 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
497 {
498 tree var = DEF_FROM_PTR (def_p);
499
500 if (TREE_CODE (var) != SSA_NAME)
501 continue;
502
503 insert_debug_temp_for_var_def (gsi, var);
504 }
505 }
506
507 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */
508
509 void
510 reset_debug_uses (gimple *stmt)
511 {
512 ssa_op_iter op_iter;
513 def_operand_p def_p;
514 imm_use_iterator imm_iter;
515 gimple *use_stmt;
516
517 if (!MAY_HAVE_DEBUG_STMTS)
518 return;
519
520 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
521 {
522 tree var = DEF_FROM_PTR (def_p);
523
524 if (TREE_CODE (var) != SSA_NAME)
525 continue;
526
527 FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
528 {
529 if (!gimple_debug_bind_p (use_stmt))
530 continue;
531
532 gimple_debug_bind_reset_value (use_stmt);
533 update_stmt (use_stmt);
534 }
535 }
536 }
537
538 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
539 dominated stmts before their dominators, so that release_ssa_defs
540 stands a chance of propagating DEFs into debug bind stmts. */
541
542 void
543 release_defs_bitset (bitmap toremove)
544 {
545 unsigned j;
546 bitmap_iterator bi;
547
548 /* Performing a topological sort is probably overkill, this will
549 most likely run in slightly superlinear time, rather than the
550 pathological quadratic worst case. */
551 while (!bitmap_empty_p (toremove))
552 EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
553 {
554 bool remove_now = true;
555 tree var = ssa_name (j);
556 gimple *stmt;
557 imm_use_iterator uit;
558
559 FOR_EACH_IMM_USE_STMT (stmt, uit, var)
560 {
561 ssa_op_iter dit;
562 def_operand_p def_p;
563
564 /* We can't propagate PHI nodes into debug stmts. */
565 if (gimple_code (stmt) == GIMPLE_PHI
566 || is_gimple_debug (stmt))
567 continue;
568
569 /* If we find another definition to remove that uses
570 the one we're looking at, defer the removal of this
571 one, so that it can be propagated into debug stmts
572 after the other is. */
573 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
574 {
575 tree odef = DEF_FROM_PTR (def_p);
576
577 if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
578 {
579 remove_now = false;
580 break;
581 }
582 }
583
584 if (!remove_now)
585 BREAK_FROM_IMM_USE_STMT (uit);
586 }
587
588 if (remove_now)
589 {
590 gimple *def = SSA_NAME_DEF_STMT (var);
591 gimple_stmt_iterator gsi = gsi_for_stmt (def);
592
593 if (gimple_code (def) == GIMPLE_PHI)
594 remove_phi_node (&gsi, true);
595 else
596 {
597 gsi_remove (&gsi, true);
598 release_defs (def);
599 }
600
601 bitmap_clear_bit (toremove, j);
602 }
603 }
604 }
605
606 /* Return true if SSA_NAME is malformed and mark it visited.
607
608 IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
609 operand. */
610
611 static bool
612 verify_ssa_name (tree ssa_name, bool is_virtual)
613 {
614 if (TREE_CODE (ssa_name) != SSA_NAME)
615 {
616 error ("expected an SSA_NAME object");
617 return true;
618 }
619
620 if (SSA_NAME_IN_FREE_LIST (ssa_name))
621 {
622 error ("found an SSA_NAME that had been released into the free pool");
623 return true;
624 }
625
626 if (SSA_NAME_VAR (ssa_name) != NULL_TREE
627 && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
628 {
629 error ("type mismatch between an SSA_NAME and its symbol");
630 return true;
631 }
632
633 if (is_virtual && !virtual_operand_p (ssa_name))
634 {
635 error ("found a virtual definition for a GIMPLE register");
636 return true;
637 }
638
639 if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
640 {
641 error ("virtual SSA name for non-VOP decl");
642 return true;
643 }
644
645 if (!is_virtual && virtual_operand_p (ssa_name))
646 {
647 error ("found a real definition for a non-register");
648 return true;
649 }
650
651 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
652 && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
653 {
654 error ("found a default name with a non-empty defining statement");
655 return true;
656 }
657
658 return false;
659 }
660
661
662 /* Return true if the definition of SSA_NAME at block BB is malformed.
663
664 STMT is the statement where SSA_NAME is created.
665
666 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
667 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
668 it means that the block in that array slot contains the
669 definition of SSA_NAME.
670
671 IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
672
673 static bool
674 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
675 gimple *stmt, bool is_virtual)
676 {
677 if (verify_ssa_name (ssa_name, is_virtual))
678 goto err;
679
680 if (SSA_NAME_VAR (ssa_name)
681 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
682 && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
683 {
684 error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
685 goto err;
686 }
687
688 if (definition_block[SSA_NAME_VERSION (ssa_name)])
689 {
690 error ("SSA_NAME created in two different blocks %i and %i",
691 definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
692 goto err;
693 }
694
695 definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
696
697 if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
698 {
699 error ("SSA_NAME_DEF_STMT is wrong");
700 fprintf (stderr, "Expected definition statement:\n");
701 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
702 fprintf (stderr, "\nActual definition statement:\n");
703 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
704 goto err;
705 }
706
707 return false;
708
709 err:
710 fprintf (stderr, "while verifying SSA_NAME ");
711 print_generic_expr (stderr, ssa_name, 0);
712 fprintf (stderr, " in statement\n");
713 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
714
715 return true;
716 }
717
718
719 /* Return true if the use of SSA_NAME at statement STMT in block BB is
720 malformed.
721
722 DEF_BB is the block where SSA_NAME was found to be created.
723
724 IDOM contains immediate dominator information for the flowgraph.
725
726 CHECK_ABNORMAL is true if the caller wants to check whether this use
727 is flowing through an abnormal edge (only used when checking PHI
728 arguments).
729
730 If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
731 that are defined before STMT in basic block BB. */
732
733 static bool
734 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
735 gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
736 {
737 bool err = false;
738 tree ssa_name = USE_FROM_PTR (use_p);
739
740 if (!TREE_VISITED (ssa_name))
741 if (verify_imm_links (stderr, ssa_name))
742 err = true;
743
744 TREE_VISITED (ssa_name) = 1;
745
746 if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
747 && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
748 ; /* Default definitions have empty statements. Nothing to do. */
749 else if (!def_bb)
750 {
751 error ("missing definition");
752 err = true;
753 }
754 else if (bb != def_bb
755 && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
756 {
757 error ("definition in block %i does not dominate use in block %i",
758 def_bb->index, bb->index);
759 err = true;
760 }
761 else if (bb == def_bb
762 && names_defined_in_bb != NULL
763 && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
764 {
765 error ("definition in block %i follows the use", def_bb->index);
766 err = true;
767 }
768
769 if (check_abnormal
770 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
771 {
772 error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
773 err = true;
774 }
775
776 /* Make sure the use is in an appropriate list by checking the previous
777 element to make sure it's the same. */
778 if (use_p->prev == NULL)
779 {
780 error ("no immediate_use list");
781 err = true;
782 }
783 else
784 {
785 tree listvar;
786 if (use_p->prev->use == NULL)
787 listvar = use_p->prev->loc.ssa_name;
788 else
789 listvar = USE_FROM_PTR (use_p->prev);
790 if (listvar != ssa_name)
791 {
792 error ("wrong immediate use list");
793 err = true;
794 }
795 }
796
797 if (err)
798 {
799 fprintf (stderr, "for SSA_NAME: ");
800 print_generic_expr (stderr, ssa_name, TDF_VOPS);
801 fprintf (stderr, " in statement:\n");
802 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
803 }
804
805 return err;
806 }
807
808
809 /* Return true if any of the arguments for PHI node PHI at block BB is
810 malformed.
811
812 DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
813 version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
814 it means that the block in that array slot contains the
815 definition of SSA_NAME. */
816
817 static bool
818 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
819 {
820 edge e;
821 bool err = false;
822 size_t i, phi_num_args = gimple_phi_num_args (phi);
823
824 if (EDGE_COUNT (bb->preds) != phi_num_args)
825 {
826 error ("incoming edge count does not match number of PHI arguments");
827 err = true;
828 goto error;
829 }
830
831 for (i = 0; i < phi_num_args; i++)
832 {
833 use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
834 tree op = USE_FROM_PTR (op_p);
835
836 e = EDGE_PRED (bb, i);
837
838 if (op == NULL_TREE)
839 {
840 error ("PHI argument is missing for edge %d->%d",
841 e->src->index,
842 e->dest->index);
843 err = true;
844 goto error;
845 }
846
847 if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
848 {
849 error ("PHI argument is not SSA_NAME, or invariant");
850 err = true;
851 }
852
853 if (TREE_CODE (op) == SSA_NAME)
854 {
855 err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
856 err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
857 op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
858 }
859
860 if (TREE_CODE (op) == ADDR_EXPR)
861 {
862 tree base = TREE_OPERAND (op, 0);
863 while (handled_component_p (base))
864 base = TREE_OPERAND (base, 0);
865 if ((TREE_CODE (base) == VAR_DECL
866 || TREE_CODE (base) == PARM_DECL
867 || TREE_CODE (base) == RESULT_DECL)
868 && !TREE_ADDRESSABLE (base))
869 {
870 error ("address taken, but ADDRESSABLE bit not set");
871 err = true;
872 }
873 }
874
875 if (e->dest != bb)
876 {
877 error ("wrong edge %d->%d for PHI argument",
878 e->src->index, e->dest->index);
879 err = true;
880 }
881
882 if (err)
883 {
884 fprintf (stderr, "PHI argument\n");
885 print_generic_stmt (stderr, op, TDF_VOPS);
886 goto error;
887 }
888 }
889
890 error:
891 if (err)
892 {
893 fprintf (stderr, "for PHI node\n");
894 print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
895 }
896
897
898 return err;
899 }
900
901
902 /* Verify common invariants in the SSA web.
903 TODO: verify the variable annotations. */
904
905 DEBUG_FUNCTION void
906 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
907 {
908 size_t i;
909 basic_block bb;
910 basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
911 ssa_op_iter iter;
912 tree op;
913 enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
914 bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
915
916 gcc_assert (!need_ssa_update_p (cfun));
917
918 timevar_push (TV_TREE_SSA_VERIFY);
919
920 /* Keep track of SSA names present in the IL. */
921 for (i = 1; i < num_ssa_names; i++)
922 {
923 tree name = ssa_name (i);
924 if (name)
925 {
926 gimple *stmt;
927 TREE_VISITED (name) = 0;
928
929 verify_ssa_name (name, virtual_operand_p (name));
930
931 stmt = SSA_NAME_DEF_STMT (name);
932 if (!gimple_nop_p (stmt))
933 {
934 basic_block bb = gimple_bb (stmt);
935 if (verify_def (bb, definition_block,
936 name, stmt, virtual_operand_p (name)))
937 goto err;
938 }
939 }
940 }
941
942 calculate_dominance_info (CDI_DOMINATORS);
943
944 /* Now verify all the uses and make sure they agree with the definitions
945 found in the previous pass. */
946 FOR_EACH_BB_FN (bb, cfun)
947 {
948 edge e;
949 edge_iterator ei;
950
951 /* Make sure that all edges have a clear 'aux' field. */
952 FOR_EACH_EDGE (e, ei, bb->preds)
953 {
954 if (e->aux)
955 {
956 error ("AUX pointer initialized for edge %d->%d", e->src->index,
957 e->dest->index);
958 goto err;
959 }
960 }
961
962 /* Verify the arguments for every PHI node in the block. */
963 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
964 {
965 gphi *phi = gsi.phi ();
966 if (verify_phi_args (phi, bb, definition_block))
967 goto err;
968
969 bitmap_set_bit (names_defined_in_bb,
970 SSA_NAME_VERSION (gimple_phi_result (phi)));
971 }
972
973 /* Now verify all the uses and vuses in every statement of the block. */
974 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
975 gsi_next (&gsi))
976 {
977 gimple *stmt = gsi_stmt (gsi);
978 use_operand_p use_p;
979
980 if (check_modified_stmt && gimple_modified_p (stmt))
981 {
982 error ("stmt (%p) marked modified after optimization pass: ",
983 (void *)stmt);
984 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
985 goto err;
986 }
987
988 if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
989 {
990 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
991 goto err;
992 }
993
994 if (gimple_debug_bind_p (stmt)
995 && !gimple_debug_bind_has_value_p (stmt))
996 continue;
997
998 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
999 {
1000 op = USE_FROM_PTR (use_p);
1001 if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1002 use_p, stmt, false, names_defined_in_bb))
1003 goto err;
1004 }
1005
1006 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1007 {
1008 if (SSA_NAME_DEF_STMT (op) != stmt)
1009 {
1010 error ("SSA_NAME_DEF_STMT is wrong");
1011 fprintf (stderr, "Expected definition statement:\n");
1012 print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1013 fprintf (stderr, "\nActual definition statement:\n");
1014 print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1015 4, TDF_VOPS);
1016 goto err;
1017 }
1018 bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1019 }
1020 }
1021
1022 bitmap_clear (names_defined_in_bb);
1023 }
1024
1025 free (definition_block);
1026
1027 /* Restore the dominance information to its prior known state, so
1028 that we do not perturb the compiler's subsequent behavior. */
1029 if (orig_dom_state == DOM_NONE)
1030 free_dominance_info (CDI_DOMINATORS);
1031 else
1032 set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1033
1034 BITMAP_FREE (names_defined_in_bb);
1035 timevar_pop (TV_TREE_SSA_VERIFY);
1036 return;
1037
1038 err:
1039 internal_error ("verify_ssa failed");
1040 }
1041
1042
1043 /* Initialize global DFA and SSA structures. */
1044
1045 void
1046 init_tree_ssa (struct function *fn)
1047 {
1048 fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1049 fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1050 pt_solution_reset (&fn->gimple_df->escaped);
1051 init_ssanames (fn, 0);
1052 }
1053
1054 /* Do the actions required to initialize internal data structures used
1055 in tree-ssa optimization passes. */
1056
1057 static unsigned int
1058 execute_init_datastructures (void)
1059 {
1060 /* Allocate hash tables, arrays and other structures. */
1061 gcc_assert (!cfun->gimple_df);
1062 init_tree_ssa (cfun);
1063 return 0;
1064 }
1065
1066 namespace {
1067
1068 const pass_data pass_data_init_datastructures =
1069 {
1070 GIMPLE_PASS, /* type */
1071 "*init_datastructures", /* name */
1072 OPTGROUP_NONE, /* optinfo_flags */
1073 TV_NONE, /* tv_id */
1074 PROP_cfg, /* properties_required */
1075 0, /* properties_provided */
1076 0, /* properties_destroyed */
1077 0, /* todo_flags_start */
1078 0, /* todo_flags_finish */
1079 };
1080
1081 class pass_init_datastructures : public gimple_opt_pass
1082 {
1083 public:
1084 pass_init_datastructures (gcc::context *ctxt)
1085 : gimple_opt_pass (pass_data_init_datastructures, ctxt)
1086 {}
1087
1088 /* opt_pass methods: */
1089 virtual bool gate (function *fun)
1090 {
1091 /* Do nothing for funcions that was produced already in SSA form. */
1092 return !(fun->curr_properties & PROP_ssa);
1093 }
1094
1095 virtual unsigned int execute (function *)
1096 {
1097 return execute_init_datastructures ();
1098 }
1099
1100 }; // class pass_init_datastructures
1101
1102 } // anon namespace
1103
1104 gimple_opt_pass *
1105 make_pass_init_datastructures (gcc::context *ctxt)
1106 {
1107 return new pass_init_datastructures (ctxt);
1108 }
1109
1110 /* Deallocate memory associated with SSA data structures for FNDECL. */
1111
1112 void
1113 delete_tree_ssa (struct function *fn)
1114 {
1115 fini_ssanames (fn);
1116
1117 /* We no longer maintain the SSA operand cache at this point. */
1118 if (ssa_operands_active (fn))
1119 fini_ssa_operands (fn);
1120
1121 fn->gimple_df->default_defs->empty ();
1122 fn->gimple_df->default_defs = NULL;
1123 pt_solution_reset (&fn->gimple_df->escaped);
1124 if (fn->gimple_df->decls_to_pointers != NULL)
1125 delete fn->gimple_df->decls_to_pointers;
1126 fn->gimple_df->decls_to_pointers = NULL;
1127 fn->gimple_df->modified_noreturn_calls = NULL;
1128 fn->gimple_df = NULL;
1129 }
1130
1131 /* Return true if EXPR is a useless type conversion, otherwise return
1132 false. */
1133
1134 bool
1135 tree_ssa_useless_type_conversion (tree expr)
1136 {
1137 /* If we have an assignment that merely uses a NOP_EXPR to change
1138 the top of the RHS to the type of the LHS and the type conversion
1139 is "safe", then strip away the type conversion so that we can
1140 enter LHS = RHS into the const_and_copies table. */
1141 if (CONVERT_EXPR_P (expr)
1142 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1143 || TREE_CODE (expr) == NON_LVALUE_EXPR)
1144 return useless_type_conversion_p
1145 (TREE_TYPE (expr),
1146 TREE_TYPE (TREE_OPERAND (expr, 0)));
1147
1148 return false;
1149 }
1150
1151 /* Strip conversions from EXP according to
1152 tree_ssa_useless_type_conversion and return the resulting
1153 expression. */
1154
1155 tree
1156 tree_ssa_strip_useless_type_conversions (tree exp)
1157 {
1158 while (tree_ssa_useless_type_conversion (exp))
1159 exp = TREE_OPERAND (exp, 0);
1160 return exp;
1161 }
1162
1163
1164 /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what
1165 should be returned if the value is only partially undefined. */
1166
1167 bool
1168 ssa_undefined_value_p (tree t, bool partial)
1169 {
1170 gimple *def_stmt;
1171 tree var = SSA_NAME_VAR (t);
1172
1173 if (!var)
1174 ;
1175 /* Parameters get their initial value from the function entry. */
1176 else if (TREE_CODE (var) == PARM_DECL)
1177 return false;
1178 /* When returning by reference the return address is actually a hidden
1179 parameter. */
1180 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1181 return false;
1182 /* Hard register variables get their initial value from the ether. */
1183 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1184 return false;
1185
1186 /* The value is undefined iff its definition statement is empty. */
1187 def_stmt = SSA_NAME_DEF_STMT (t);
1188 if (gimple_nop_p (def_stmt))
1189 return true;
1190
1191 /* Check if the complex was not only partially defined. */
1192 if (partial && is_gimple_assign (def_stmt)
1193 && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1194 {
1195 tree rhs1, rhs2;
1196
1197 rhs1 = gimple_assign_rhs1 (def_stmt);
1198 rhs2 = gimple_assign_rhs2 (def_stmt);
1199 return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1200 || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1201 }
1202 return false;
1203 }
1204
1205
1206 /* Return TRUE iff STMT, a gimple statement, references an undefined
1207 SSA name. */
1208
1209 bool
1210 gimple_uses_undefined_value_p (gimple *stmt)
1211 {
1212 ssa_op_iter iter;
1213 tree op;
1214
1215 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1216 if (ssa_undefined_value_p (op))
1217 return true;
1218
1219 return false;
1220 }
1221
1222
1223
1224 /* If necessary, rewrite the base of the reference tree *TP from
1225 a MEM_REF to a plain or converted symbol. */
1226
1227 static void
1228 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1229 {
1230 tree sym;
1231
1232 while (handled_component_p (*tp))
1233 tp = &TREE_OPERAND (*tp, 0);
1234 if (TREE_CODE (*tp) == MEM_REF
1235 && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1236 && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1237 && DECL_P (sym)
1238 && !TREE_ADDRESSABLE (sym)
1239 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1240 {
1241 if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1242 && useless_type_conversion_p (TREE_TYPE (*tp),
1243 TREE_TYPE (TREE_TYPE (sym)))
1244 && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1245 TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1246 {
1247 *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1248 TYPE_SIZE (TREE_TYPE (*tp)),
1249 int_const_binop (MULT_EXPR,
1250 bitsize_int (BITS_PER_UNIT),
1251 TREE_OPERAND (*tp, 1)));
1252 }
1253 else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1254 && useless_type_conversion_p (TREE_TYPE (*tp),
1255 TREE_TYPE (TREE_TYPE (sym))))
1256 {
1257 *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1258 ? REALPART_EXPR : IMAGPART_EXPR,
1259 TREE_TYPE (*tp), sym);
1260 }
1261 else if (integer_zerop (TREE_OPERAND (*tp, 1)))
1262 {
1263 if (!useless_type_conversion_p (TREE_TYPE (*tp),
1264 TREE_TYPE (sym)))
1265 *tp = build1 (VIEW_CONVERT_EXPR,
1266 TREE_TYPE (*tp), sym);
1267 else
1268 *tp = sym;
1269 }
1270 }
1271 }
1272
1273 /* For a tree REF return its base if it is the base of a MEM_REF
1274 that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */
1275
1276 static tree
1277 non_rewritable_mem_ref_base (tree ref)
1278 {
1279 tree base = ref;
1280
1281 /* A plain decl does not need it set. */
1282 if (DECL_P (ref))
1283 return NULL_TREE;
1284
1285 while (handled_component_p (base))
1286 base = TREE_OPERAND (base, 0);
1287
1288 /* But watch out for MEM_REFs we cannot lower to a
1289 VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */
1290 if (TREE_CODE (base) == MEM_REF
1291 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1292 {
1293 tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1294 if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1295 || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1296 && useless_type_conversion_p (TREE_TYPE (base),
1297 TREE_TYPE (TREE_TYPE (decl)))
1298 && wi::fits_uhwi_p (mem_ref_offset (base))
1299 && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1300 mem_ref_offset (base))
1301 && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1302 TYPE_SIZE_UNIT (TREE_TYPE (base))))
1303 return NULL_TREE;
1304 if (DECL_P (decl)
1305 && (!integer_zerop (TREE_OPERAND (base, 1))
1306 || (DECL_SIZE (decl)
1307 != TYPE_SIZE (TREE_TYPE (base)))
1308 || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)))
1309 return decl;
1310 }
1311
1312 return NULL_TREE;
1313 }
1314
1315 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1316 Otherwise return true. */
1317
1318 static bool
1319 non_rewritable_lvalue_p (tree lhs)
1320 {
1321 /* A plain decl is always rewritable. */
1322 if (DECL_P (lhs))
1323 return false;
1324
1325 /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1326 a reasonably efficient manner... */
1327 if ((TREE_CODE (lhs) == REALPART_EXPR
1328 || TREE_CODE (lhs) == IMAGPART_EXPR)
1329 && DECL_P (TREE_OPERAND (lhs, 0)))
1330 return false;
1331
1332 /* A decl that is wrapped inside a MEM-REF that covers
1333 it full is also rewritable.
1334 ??? The following could be relaxed allowing component
1335 references that do not change the access size. */
1336 if (TREE_CODE (lhs) == MEM_REF
1337 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1338 && integer_zerop (TREE_OPERAND (lhs, 1)))
1339 {
1340 tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1341 if (DECL_P (decl)
1342 && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1343 && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1344 return false;
1345 }
1346
1347 return true;
1348 }
1349
1350 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1351 mark the variable VAR for conversion into SSA. Return true when updating
1352 stmts is required. */
1353
1354 static void
1355 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1356 bitmap suitable_for_renaming)
1357 {
1358 /* Global Variables, result decls cannot be changed. */
1359 if (is_global_var (var)
1360 || TREE_CODE (var) == RESULT_DECL
1361 || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1362 return;
1363
1364 if (TREE_ADDRESSABLE (var)
1365 /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1366 a non-register. Otherwise we are confused and forget to
1367 add virtual operands for it. */
1368 && (!is_gimple_reg_type (TREE_TYPE (var))
1369 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1370 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1371 || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1372 {
1373 TREE_ADDRESSABLE (var) = 0;
1374 if (is_gimple_reg (var))
1375 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1376 if (dump_file)
1377 {
1378 fprintf (dump_file, "No longer having address taken: ");
1379 print_generic_expr (dump_file, var, 0);
1380 fprintf (dump_file, "\n");
1381 }
1382 }
1383
1384 if (!DECL_GIMPLE_REG_P (var)
1385 && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1386 && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1387 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1388 && !TREE_THIS_VOLATILE (var)
1389 && (TREE_CODE (var) != VAR_DECL || !DECL_HARD_REGISTER (var)))
1390 {
1391 DECL_GIMPLE_REG_P (var) = 1;
1392 bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1393 if (dump_file)
1394 {
1395 fprintf (dump_file, "Now a gimple register: ");
1396 print_generic_expr (dump_file, var, 0);
1397 fprintf (dump_file, "\n");
1398 }
1399 }
1400 }
1401
1402 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */
1403
1404 void
1405 execute_update_addresses_taken (void)
1406 {
1407 basic_block bb;
1408 bitmap addresses_taken = BITMAP_ALLOC (NULL);
1409 bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1410 bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1411 tree var;
1412 unsigned i;
1413
1414 timevar_push (TV_ADDRESS_TAKEN);
1415
1416 /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1417 the function body. */
1418 FOR_EACH_BB_FN (bb, cfun)
1419 {
1420 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1421 gsi_next (&gsi))
1422 {
1423 gimple *stmt = gsi_stmt (gsi);
1424 enum gimple_code code = gimple_code (stmt);
1425 tree decl;
1426
1427 /* Note all addresses taken by the stmt. */
1428 gimple_ior_addresses_taken (addresses_taken, stmt);
1429
1430 /* If we have a call or an assignment, see if the lhs contains
1431 a local decl that requires not to be a gimple register. */
1432 if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1433 {
1434 tree lhs = gimple_get_lhs (stmt);
1435 if (lhs
1436 && TREE_CODE (lhs) != SSA_NAME
1437 && non_rewritable_lvalue_p (lhs))
1438 {
1439 decl = get_base_address (lhs);
1440 if (DECL_P (decl))
1441 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1442 }
1443 }
1444
1445 if (gimple_assign_single_p (stmt))
1446 {
1447 tree rhs = gimple_assign_rhs1 (stmt);
1448 if ((decl = non_rewritable_mem_ref_base (rhs)))
1449 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1450 }
1451
1452 else if (code == GIMPLE_CALL)
1453 {
1454 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1455 {
1456 tree arg = gimple_call_arg (stmt, i);
1457 if ((decl = non_rewritable_mem_ref_base (arg)))
1458 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1459 }
1460 }
1461
1462 else if (code == GIMPLE_ASM)
1463 {
1464 gasm *asm_stmt = as_a <gasm *> (stmt);
1465 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1466 {
1467 tree link = gimple_asm_output_op (asm_stmt, i);
1468 tree lhs = TREE_VALUE (link);
1469 if (TREE_CODE (lhs) != SSA_NAME)
1470 {
1471 decl = get_base_address (lhs);
1472 if (DECL_P (decl)
1473 && (non_rewritable_lvalue_p (lhs)
1474 /* We cannot move required conversions from
1475 the lhs to the rhs in asm statements, so
1476 require we do not need any. */
1477 || !useless_type_conversion_p
1478 (TREE_TYPE (lhs), TREE_TYPE (decl))))
1479 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1480 }
1481 }
1482 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1483 {
1484 tree link = gimple_asm_input_op (asm_stmt, i);
1485 if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1486 bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1487 }
1488 }
1489 }
1490
1491 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1492 gsi_next (&gsi))
1493 {
1494 size_t i;
1495 gphi *phi = gsi.phi ();
1496
1497 for (i = 0; i < gimple_phi_num_args (phi); i++)
1498 {
1499 tree op = PHI_ARG_DEF (phi, i), var;
1500 if (TREE_CODE (op) == ADDR_EXPR
1501 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1502 && DECL_P (var))
1503 bitmap_set_bit (addresses_taken, DECL_UID (var));
1504 }
1505 }
1506 }
1507
1508 /* We cannot iterate over all referenced vars because that can contain
1509 unused vars from BLOCK trees, which causes code generation differences
1510 for -g vs. -g0. */
1511 for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1512 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1513 suitable_for_renaming);
1514
1515 FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1516 maybe_optimize_var (var, addresses_taken, not_reg_needs,
1517 suitable_for_renaming);
1518
1519 /* Operand caches need to be recomputed for operands referencing the updated
1520 variables and operands need to be rewritten to expose bare symbols. */
1521 if (!bitmap_empty_p (suitable_for_renaming))
1522 {
1523 FOR_EACH_BB_FN (bb, cfun)
1524 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1525 {
1526 gimple *stmt = gsi_stmt (gsi);
1527
1528 /* Re-write TARGET_MEM_REFs of symbols we want to
1529 rewrite into SSA form. */
1530 if (gimple_assign_single_p (stmt))
1531 {
1532 tree lhs = gimple_assign_lhs (stmt);
1533 tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1534 tree sym;
1535
1536 /* Rewrite LHS IMAG/REALPART_EXPR similar to
1537 gimplify_modify_expr_complex_part. */
1538 if ((TREE_CODE (lhs) == IMAGPART_EXPR
1539 || TREE_CODE (lhs) == REALPART_EXPR)
1540 && DECL_P (TREE_OPERAND (lhs, 0))
1541 && bitmap_bit_p (suitable_for_renaming,
1542 DECL_UID (TREE_OPERAND (lhs, 0))))
1543 {
1544 tree other = make_ssa_name (TREE_TYPE (lhs));
1545 tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1546 ? REALPART_EXPR : IMAGPART_EXPR,
1547 TREE_TYPE (other),
1548 TREE_OPERAND (lhs, 0));
1549 gimple *load = gimple_build_assign (other, lrhs);
1550 location_t loc = gimple_location (stmt);
1551 gimple_set_location (load, loc);
1552 gimple_set_vuse (load, gimple_vuse (stmt));
1553 gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1554 gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1555 gimple_assign_set_rhs_with_ops
1556 (&gsi, COMPLEX_EXPR,
1557 TREE_CODE (lhs) == IMAGPART_EXPR
1558 ? other : gimple_assign_rhs1 (stmt),
1559 TREE_CODE (lhs) == IMAGPART_EXPR
1560 ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1561 stmt = gsi_stmt (gsi);
1562 unlink_stmt_vdef (stmt);
1563 update_stmt (stmt);
1564 continue;
1565 }
1566
1567 /* We shouldn't have any fancy wrapping of
1568 component-refs on the LHS, but look through
1569 VIEW_CONVERT_EXPRs as that is easy. */
1570 while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1571 lhs = TREE_OPERAND (lhs, 0);
1572 if (TREE_CODE (lhs) == MEM_REF
1573 && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1574 && integer_zerop (TREE_OPERAND (lhs, 1))
1575 && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1576 && DECL_P (sym)
1577 && !TREE_ADDRESSABLE (sym)
1578 && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1579 lhs = sym;
1580 else
1581 lhs = gimple_assign_lhs (stmt);
1582
1583 /* Rewrite the RHS and make sure the resulting assignment
1584 is validly typed. */
1585 maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1586 rhs = gimple_assign_rhs1 (stmt);
1587 if (gimple_assign_lhs (stmt) != lhs
1588 && !useless_type_conversion_p (TREE_TYPE (lhs),
1589 TREE_TYPE (rhs)))
1590 rhs = fold_build1 (VIEW_CONVERT_EXPR,
1591 TREE_TYPE (lhs), rhs);
1592
1593 if (gimple_assign_lhs (stmt) != lhs)
1594 gimple_assign_set_lhs (stmt, lhs);
1595
1596 if (gimple_assign_rhs1 (stmt) != rhs)
1597 {
1598 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1599 gimple_assign_set_rhs_from_tree (&gsi, rhs);
1600 }
1601 }
1602
1603 else if (gimple_code (stmt) == GIMPLE_CALL)
1604 {
1605 unsigned i;
1606 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1607 {
1608 tree *argp = gimple_call_arg_ptr (stmt, i);
1609 maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1610 }
1611 }
1612
1613 else if (gimple_code (stmt) == GIMPLE_ASM)
1614 {
1615 gasm *asm_stmt = as_a <gasm *> (stmt);
1616 unsigned i;
1617 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1618 {
1619 tree link = gimple_asm_output_op (asm_stmt, i);
1620 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1621 suitable_for_renaming);
1622 }
1623 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1624 {
1625 tree link = gimple_asm_input_op (asm_stmt, i);
1626 maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1627 suitable_for_renaming);
1628 }
1629 }
1630
1631 else if (gimple_debug_bind_p (stmt)
1632 && gimple_debug_bind_has_value_p (stmt))
1633 {
1634 tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1635 tree decl;
1636 maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1637 decl = non_rewritable_mem_ref_base (*valuep);
1638 if (decl
1639 && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1640 gimple_debug_bind_reset_value (stmt);
1641 }
1642
1643 if (gimple_references_memory_p (stmt)
1644 || is_gimple_debug (stmt))
1645 update_stmt (stmt);
1646
1647 gsi_next (&gsi);
1648 }
1649
1650 /* Update SSA form here, we are called as non-pass as well. */
1651 if (number_of_loops (cfun) > 1
1652 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
1653 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1654 else
1655 update_ssa (TODO_update_ssa);
1656 }
1657
1658 BITMAP_FREE (not_reg_needs);
1659 BITMAP_FREE (addresses_taken);
1660 BITMAP_FREE (suitable_for_renaming);
1661 timevar_pop (TV_ADDRESS_TAKEN);
1662 }
1663
1664 namespace {
1665
1666 const pass_data pass_data_update_address_taken =
1667 {
1668 GIMPLE_PASS, /* type */
1669 "addressables", /* name */
1670 OPTGROUP_NONE, /* optinfo_flags */
1671 TV_ADDRESS_TAKEN, /* tv_id */
1672 PROP_ssa, /* properties_required */
1673 0, /* properties_provided */
1674 0, /* properties_destroyed */
1675 0, /* todo_flags_start */
1676 TODO_update_address_taken, /* todo_flags_finish */
1677 };
1678
1679 class pass_update_address_taken : public gimple_opt_pass
1680 {
1681 public:
1682 pass_update_address_taken (gcc::context *ctxt)
1683 : gimple_opt_pass (pass_data_update_address_taken, ctxt)
1684 {}
1685
1686 /* opt_pass methods: */
1687
1688 }; // class pass_update_address_taken
1689
1690 } // anon namespace
1691
1692 gimple_opt_pass *
1693 make_pass_update_address_taken (gcc::context *ctxt)
1694 {
1695 return new pass_update_address_taken (ctxt);
1696 }