1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 GIMPLE_MODIFY_STMTs. */
102 #define opf_non_specific (1 << 3)
104 /* Array for building all the def operands. */
105 static VEC(tree
,heap
) *build_defs
;
107 /* Array for building all the use operands. */
108 static VEC(tree
,heap
) *build_uses
;
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree
,heap
) *build_v_may_defs
;
113 /* Array for building all the VUSE operands. */
114 static VEC(tree
,heap
) *build_vuses
;
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree
,heap
) *build_v_must_defs
;
119 static void get_expr_operands (tree
, tree
*, int);
121 /* Number of functions with initialized ssa_operands. */
122 static int n_initialized
= 0;
124 /* Allocates operand OP of given TYPE from the appropriate free list,
125 or of the new value if the list is empty. */
127 #define ALLOC_OPTYPE(OP, TYPE) \
130 TYPE##_optype_p ret \
131 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
133 gimple_ssa_operands (cfun)->free_##TYPE##s \
136 ret = ssa_operand_alloc (sizeof (*ret)); \
140 /* Return the DECL_UID of the base variable of T. */
142 static inline unsigned
143 get_name_decl (tree t
)
145 if (TREE_CODE (t
) != SSA_NAME
)
148 return DECL_UID (SSA_NAME_VAR (t
));
152 /* Comparison function for qsort used in operand_build_sort_virtual. */
155 operand_build_cmp (const void *p
, const void *q
)
157 tree e1
= *((const tree
*)p
);
158 tree e2
= *((const tree
*)q
);
161 u1
= get_name_decl (e1
);
162 u2
= get_name_decl (e2
);
164 /* We want to sort in ascending order. They can never be equal. */
165 #ifdef ENABLE_CHECKING
166 gcc_assert (u1
!= u2
);
168 return (u1
> u2
? 1 : -1);
172 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
175 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
177 int num
= VEC_length (tree
, list
);
184 if (get_name_decl (VEC_index (tree
, list
, 0))
185 > get_name_decl (VEC_index (tree
, list
, 1)))
187 /* Swap elements if in the wrong order. */
188 tree tmp
= VEC_index (tree
, list
, 0);
189 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
190 VEC_replace (tree
, list
, 1, tmp
);
195 /* There are 3 or more elements, call qsort. */
196 qsort (VEC_address (tree
, list
),
197 VEC_length (tree
, list
),
203 /* Return true if the SSA operands cache is active. */
206 ssa_operands_active (void)
208 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
212 /* Structure storing statistics on how many call clobbers we have, and
213 how many where avoided. */
217 /* Number of call-clobbered ops we attempt to add to calls in
218 add_call_clobber_ops. */
219 unsigned int clobbered_vars
;
221 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
222 not_written information. */
223 unsigned int static_write_clobbers_avoided
;
225 /* Number of reads (VUSEs) avoided by using not_read information. */
226 unsigned int static_read_clobbers_avoided
;
228 /* Number of write-clobbers avoided because the variable can't escape to
230 unsigned int unescapable_clobbers_avoided
;
232 /* Number of read-only uses we attempt to add to calls in
233 add_call_read_ops. */
234 unsigned int readonly_clobbers
;
236 /* Number of read-only uses we avoid using not_read information. */
237 unsigned int static_readonly_clobbers_avoided
;
241 /* Initialize the operand cache routines. */
244 init_ssa_operands (void)
246 if (!n_initialized
++)
248 build_defs
= VEC_alloc (tree
, heap
, 5);
249 build_uses
= VEC_alloc (tree
, heap
, 10);
250 build_vuses
= VEC_alloc (tree
, heap
, 25);
251 build_v_may_defs
= VEC_alloc (tree
, heap
, 25);
252 build_v_must_defs
= VEC_alloc (tree
, heap
, 25);
255 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
256 gimple_ssa_operands (cfun
)->operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
257 gimple_ssa_operands (cfun
)->ops_active
= true;
258 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
262 /* Dispose of anything required by the operand routines. */
265 fini_ssa_operands (void)
267 struct ssa_operand_memory_d
*ptr
;
268 if (!--n_initialized
)
270 VEC_free (tree
, heap
, build_defs
);
271 VEC_free (tree
, heap
, build_uses
);
272 VEC_free (tree
, heap
, build_v_must_defs
);
273 VEC_free (tree
, heap
, build_v_may_defs
);
274 VEC_free (tree
, heap
, build_vuses
);
276 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
277 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
278 gimple_ssa_operands (cfun
)->free_vuses
= NULL
;
279 gimple_ssa_operands (cfun
)->free_maydefs
= NULL
;
280 gimple_ssa_operands (cfun
)->free_mustdefs
= NULL
;
281 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
283 gimple_ssa_operands (cfun
)->operand_memory
284 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
288 gimple_ssa_operands (cfun
)->ops_active
= false;
290 if (dump_file
&& (dump_flags
& TDF_STATS
))
292 fprintf (dump_file
, "Original clobbered vars:%d\n",
293 clobber_stats
.clobbered_vars
);
294 fprintf (dump_file
, "Static write clobbers avoided:%d\n",
295 clobber_stats
.static_write_clobbers_avoided
);
296 fprintf (dump_file
, "Static read clobbers avoided:%d\n",
297 clobber_stats
.static_read_clobbers_avoided
);
298 fprintf (dump_file
, "Unescapable clobbers avoided:%d\n",
299 clobber_stats
.unescapable_clobbers_avoided
);
300 fprintf (dump_file
, "Original read-only clobbers:%d\n",
301 clobber_stats
.readonly_clobbers
);
302 fprintf (dump_file
, "Static read-only clobbers avoided:%d\n",
303 clobber_stats
.static_readonly_clobbers_avoided
);
308 /* Return memory for operands of SIZE chunks. */
311 ssa_operand_alloc (unsigned size
)
314 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
315 >= SSA_OPERAND_MEMORY_SIZE
)
317 struct ssa_operand_memory_d
*ptr
;
318 ptr
= GGC_NEW (struct ssa_operand_memory_d
);
319 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
320 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
321 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
323 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
324 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
325 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
331 /* This routine makes sure that PTR is in an immediate use list, and makes
332 sure the stmt pointer is set to the current stmt. */
335 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
337 /* fold_stmt may have changed the stmt pointers. */
338 if (ptr
->stmt
!= stmt
)
341 /* If this use isn't in a list, add it to the correct list. */
343 link_imm_use (ptr
, *(ptr
->use
));
346 /* Appends ELT after TO, and moves the TO pointer to ELT. */
348 #define APPEND_OP_AFTER(ELT, TO) \
351 (TO)->next = (ELT); \
355 /* Appends head of list FROM after TO, and move both pointers
356 to their successors. */
358 #define MOVE_HEAD_AFTER(FROM, TO) \
361 APPEND_OP_AFTER (FROM, TO); \
362 (FROM) = (FROM)->next; \
365 /* Moves OP to appropriate freelist. OP is set to its successor. */
367 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
370 TYPE##_optype_p next = (OP)->next; \
372 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
373 gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
377 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
378 of immediate uses. STMT is the current statement. */
380 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
383 (USE_PTR)->use = (VAL); \
384 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
387 /* Adds OP to the list of defs after LAST, and moves
388 LAST to the new element. */
391 add_def_op (tree
*op
, def_optype_p
*last
)
395 ALLOC_OPTYPE (new, def
);
396 DEF_OP_PTR (new) = op
;
397 APPEND_OP_AFTER (new, *last
);
400 /* Adds OP to the list of uses of statement STMT after LAST, and moves
401 LAST to the new element. */
404 add_use_op (tree stmt
, tree
*op
, use_optype_p
*last
)
408 ALLOC_OPTYPE (new, use
);
409 INITIALIZE_USE (USE_OP_PTR (new), op
, stmt
);
410 APPEND_OP_AFTER (new, *last
);
413 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
414 LAST to the new element. */
417 add_vuse_op (tree stmt
, tree op
, vuse_optype_p
*last
)
421 ALLOC_OPTYPE (new, vuse
);
423 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt
);
424 APPEND_OP_AFTER (new, *last
);
427 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
428 LAST to the new element. */
431 add_maydef_op (tree stmt
, tree op
, maydef_optype_p
*last
)
435 ALLOC_OPTYPE (new, maydef
);
436 MAYDEF_RESULT (new) = op
;
437 MAYDEF_OP (new) = op
;
438 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt
);
439 APPEND_OP_AFTER (new, *last
);
442 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
443 LAST to the new element. */
446 add_mustdef_op (tree stmt
, tree op
, mustdef_optype_p
*last
)
448 mustdef_optype_p
new;
450 ALLOC_OPTYPE (new, mustdef
);
451 MUSTDEF_RESULT (new) = op
;
452 MUSTDEF_KILL (new) = op
;
453 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt
);
454 APPEND_OP_AFTER (new, *last
);
457 /* Takes elements from build_defs and turns them into def operands of STMT.
458 TODO -- Given that def operands list is not necessarily sorted, merging
459 the operands this way does not make much sense.
460 -- Make build_defs VEC of tree *. */
463 finalize_ssa_def_ops (tree stmt
)
466 struct def_optype_d new_list
;
467 def_optype_p old_ops
, last
;
470 new_list
.next
= NULL
;
473 old_ops
= DEF_OPS (stmt
);
476 while (old_ops
&& new_i
< VEC_length (tree
, build_defs
))
478 tree
*new_base
= (tree
*) VEC_index (tree
, build_defs
, new_i
);
479 old_base
= DEF_OP_PTR (old_ops
);
481 if (old_base
== new_base
)
483 /* if variables are the same, reuse this node. */
484 MOVE_HEAD_AFTER (old_ops
, last
);
487 else if (old_base
< new_base
)
489 /* if old is less than new, old goes to the free list. */
490 MOVE_HEAD_TO_FREELIST (old_ops
, def
);
494 /* This is a new operand. */
495 add_def_op (new_base
, &last
);
500 /* If there is anything remaining in the build_defs list, simply emit it. */
501 for ( ; new_i
< VEC_length (tree
, build_defs
); new_i
++)
502 add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), &last
);
506 /* If there is anything in the old list, free it. */
509 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
510 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
513 /* Now set the stmt's operands. */
514 DEF_OPS (stmt
) = new_list
.next
;
516 #ifdef ENABLE_CHECKING
520 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
523 gcc_assert (x
== VEC_length (tree
, build_defs
));
528 /* This routine will create stmt operands for STMT from the def build list. */
531 finalize_ssa_defs (tree stmt
)
533 unsigned int num
= VEC_length (tree
, build_defs
);
535 /* There should only be a single real definition per assignment. */
536 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
538 /* If there is an old list, often the new list is identical, or close, so
539 find the elements at the beginning that are the same as the vector. */
540 finalize_ssa_def_ops (stmt
);
541 VEC_truncate (tree
, build_defs
, 0);
544 /* Takes elements from build_uses and turns them into use operands of STMT.
545 TODO -- Make build_uses VEC of tree *. */
548 finalize_ssa_use_ops (tree stmt
)
551 struct use_optype_d new_list
;
552 use_optype_p old_ops
, ptr
, last
;
554 new_list
.next
= NULL
;
557 old_ops
= USE_OPS (stmt
);
559 /* If there is anything in the old list, free it. */
562 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
563 delink_imm_use (USE_OP_PTR (ptr
));
564 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
565 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
568 /* Now create nodes for all the new nodes. */
569 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
570 add_use_op (stmt
, (tree
*) VEC_index (tree
, build_uses
, new_i
), &last
);
574 /* Now set the stmt's operands. */
575 USE_OPS (stmt
) = new_list
.next
;
577 #ifdef ENABLE_CHECKING
580 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
583 gcc_assert (x
== VEC_length (tree
, build_uses
));
588 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
591 finalize_ssa_uses (tree stmt
)
593 #ifdef ENABLE_CHECKING
596 unsigned num
= VEC_length (tree
, build_uses
);
598 /* If the pointer to the operand is the statement itself, something is
599 wrong. It means that we are pointing to a local variable (the
600 initial call to update_stmt_operands does not pass a pointer to a
602 for (x
= 0; x
< num
; x
++)
603 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
606 finalize_ssa_use_ops (stmt
);
607 VEC_truncate (tree
, build_uses
, 0);
611 /* Takes elements from build_v_may_defs and turns them into maydef operands of
615 finalize_ssa_v_may_def_ops (tree stmt
)
618 struct maydef_optype_d new_list
;
619 maydef_optype_p old_ops
, ptr
, last
;
621 unsigned old_base
, new_base
;
623 new_list
.next
= NULL
;
626 old_ops
= MAYDEF_OPS (stmt
);
629 while (old_ops
&& new_i
< VEC_length (tree
, build_v_may_defs
))
631 act
= VEC_index (tree
, build_v_may_defs
, new_i
);
632 new_base
= get_name_decl (act
);
633 old_base
= get_name_decl (MAYDEF_OP (old_ops
));
635 if (old_base
== new_base
)
637 /* if variables are the same, reuse this node. */
638 MOVE_HEAD_AFTER (old_ops
, last
);
639 set_virtual_use_link (MAYDEF_OP_PTR (last
), stmt
);
642 else if (old_base
< new_base
)
644 /* if old is less than new, old goes to the free list. */
645 delink_imm_use (MAYDEF_OP_PTR (old_ops
));
646 MOVE_HEAD_TO_FREELIST (old_ops
, maydef
);
650 /* This is a new operand. */
651 add_maydef_op (stmt
, act
, &last
);
656 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
657 for ( ; new_i
< VEC_length (tree
, build_v_may_defs
); new_i
++)
658 add_maydef_op (stmt
, VEC_index (tree
, build_v_may_defs
, new_i
), &last
);
662 /* If there is anything in the old list, free it. */
665 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
666 delink_imm_use (MAYDEF_OP_PTR (ptr
));
667 old_ops
->next
= gimple_ssa_operands (cfun
)->free_maydefs
;
668 gimple_ssa_operands (cfun
)->free_maydefs
= old_ops
;
671 /* Now set the stmt's operands. */
672 MAYDEF_OPS (stmt
) = new_list
.next
;
674 #ifdef ENABLE_CHECKING
677 for (ptr
= MAYDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
680 gcc_assert (x
== VEC_length (tree
, build_v_may_defs
));
686 finalize_ssa_v_may_defs (tree stmt
)
688 finalize_ssa_v_may_def_ops (stmt
);
692 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
695 cleanup_v_may_defs (void)
698 num
= VEC_length (tree
, build_v_may_defs
);
700 for (x
= 0; x
< num
; x
++)
702 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
703 if (TREE_CODE (t
) != SSA_NAME
)
705 var_ann_t ann
= var_ann (t
);
706 ann
->in_v_may_def_list
= 0;
709 VEC_truncate (tree
, build_v_may_defs
, 0);
713 /* Takes elements from build_vuses and turns them into vuse operands of
717 finalize_ssa_vuse_ops (tree stmt
)
720 struct vuse_optype_d new_list
;
721 vuse_optype_p old_ops
, ptr
, last
;
723 unsigned old_base
, new_base
;
725 new_list
.next
= NULL
;
728 old_ops
= VUSE_OPS (stmt
);
731 while (old_ops
&& new_i
< VEC_length (tree
, build_vuses
))
733 act
= VEC_index (tree
, build_vuses
, new_i
);
734 new_base
= get_name_decl (act
);
735 old_base
= get_name_decl (VUSE_OP (old_ops
));
737 if (old_base
== new_base
)
739 /* if variables are the same, reuse this node. */
740 MOVE_HEAD_AFTER (old_ops
, last
);
741 set_virtual_use_link (VUSE_OP_PTR (last
), stmt
);
744 else if (old_base
< new_base
)
746 /* if old is less than new, old goes to the free list. */
747 delink_imm_use (USE_OP_PTR (old_ops
));
748 MOVE_HEAD_TO_FREELIST (old_ops
, vuse
);
752 /* This is a new operand. */
753 add_vuse_op (stmt
, act
, &last
);
758 /* If there is anything remaining in the build_vuses list, simply emit it. */
759 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
760 add_vuse_op (stmt
, VEC_index (tree
, build_vuses
, new_i
), &last
);
764 /* If there is anything in the old list, free it. */
767 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
768 delink_imm_use (VUSE_OP_PTR (ptr
));
769 old_ops
->next
= gimple_ssa_operands (cfun
)->free_vuses
;
770 gimple_ssa_operands (cfun
)->free_vuses
= old_ops
;
773 /* Now set the stmt's operands. */
774 VUSE_OPS (stmt
) = new_list
.next
;
776 #ifdef ENABLE_CHECKING
779 for (ptr
= VUSE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
782 gcc_assert (x
== VEC_length (tree
, build_vuses
));
787 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
790 finalize_ssa_vuses (tree stmt
)
792 unsigned num
, num_v_may_defs
;
795 /* Remove superfluous VUSE operands. If the statement already has a
796 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
797 not needed because V_MAY_DEFs imply a VUSE of the variable. For
798 instance, suppose that variable 'a' is aliased:
801 # a_3 = V_MAY_DEF <a_2>
804 The VUSE <a_2> is superfluous because it is implied by the
805 V_MAY_DEF operation. */
806 num
= VEC_length (tree
, build_vuses
);
807 num_v_may_defs
= VEC_length (tree
, build_v_may_defs
);
809 if (num
> 0 && num_v_may_defs
> 0)
811 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
814 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
815 if (TREE_CODE (vuse
) != SSA_NAME
)
817 var_ann_t ann
= var_ann (vuse
);
818 ann
->in_vuse_list
= 0;
819 if (ann
->in_v_may_def_list
)
821 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
830 /* Clear out the in_list bits. */
832 vuse_index
< VEC_length (tree
, build_vuses
);
835 tree t
= VEC_index (tree
, build_vuses
, vuse_index
);
836 if (TREE_CODE (t
) != SSA_NAME
)
838 var_ann_t ann
= var_ann (t
);
839 ann
->in_vuse_list
= 0;
844 finalize_ssa_vuse_ops (stmt
);
846 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
847 cleanup_v_may_defs ();
849 /* Free the VUSEs build vector. */
850 VEC_truncate (tree
, build_vuses
, 0);
854 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
858 finalize_ssa_v_must_def_ops (tree stmt
)
861 struct mustdef_optype_d new_list
;
862 mustdef_optype_p old_ops
, ptr
, last
;
864 unsigned old_base
, new_base
;
866 new_list
.next
= NULL
;
869 old_ops
= MUSTDEF_OPS (stmt
);
872 while (old_ops
&& new_i
< VEC_length (tree
, build_v_must_defs
))
874 act
= VEC_index (tree
, build_v_must_defs
, new_i
);
875 new_base
= get_name_decl (act
);
876 old_base
= get_name_decl (MUSTDEF_KILL (old_ops
));
878 if (old_base
== new_base
)
880 /* If variables are the same, reuse this node. */
881 MOVE_HEAD_AFTER (old_ops
, last
);
882 set_virtual_use_link (MUSTDEF_KILL_PTR (last
), stmt
);
885 else if (old_base
< new_base
)
887 /* If old is less than new, old goes to the free list. */
888 delink_imm_use (MUSTDEF_KILL_PTR (old_ops
));
889 MOVE_HEAD_TO_FREELIST (old_ops
, mustdef
);
893 /* This is a new operand. */
894 add_mustdef_op (stmt
, act
, &last
);
899 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
900 for ( ; new_i
< VEC_length (tree
, build_v_must_defs
); new_i
++)
901 add_mustdef_op (stmt
, VEC_index (tree
, build_v_must_defs
, new_i
), &last
);
905 /* If there is anything in the old list, free it. */
908 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
909 delink_imm_use (MUSTDEF_KILL_PTR (ptr
));
910 old_ops
->next
= gimple_ssa_operands (cfun
)->free_mustdefs
;
911 gimple_ssa_operands (cfun
)->free_mustdefs
= old_ops
;
914 /* Now set the stmt's operands. */
915 MUSTDEF_OPS (stmt
) = new_list
.next
;
917 #ifdef ENABLE_CHECKING
920 for (ptr
= MUSTDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
923 gcc_assert (x
== VEC_length (tree
, build_v_must_defs
));
929 finalize_ssa_v_must_defs (tree stmt
)
931 /* In the presence of subvars, there may be more than one V_MUST_DEF
932 per statement (one for each subvar). It is a bit expensive to
933 verify that all must-defs in a statement belong to subvars if
934 there is more than one must-def, so we don't do it. Suffice to
935 say, if you reach here without having subvars, and have num >1,
936 you have hit a bug. */
937 finalize_ssa_v_must_def_ops (stmt
);
938 VEC_truncate (tree
, build_v_must_defs
, 0);
942 /* Finalize all the build vectors, fill the new ones into INFO. */
945 finalize_ssa_stmt_operands (tree stmt
)
947 finalize_ssa_defs (stmt
);
948 finalize_ssa_uses (stmt
);
949 finalize_ssa_v_must_defs (stmt
);
950 finalize_ssa_v_may_defs (stmt
);
951 finalize_ssa_vuses (stmt
);
955 /* Start the process of building up operands vectors in INFO. */
958 start_ssa_stmt_operands (void)
960 gcc_assert (VEC_length (tree
, build_defs
) == 0);
961 gcc_assert (VEC_length (tree
, build_uses
) == 0);
962 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
963 gcc_assert (VEC_length (tree
, build_v_may_defs
) == 0);
964 gcc_assert (VEC_length (tree
, build_v_must_defs
) == 0);
968 /* Add DEF_P to the list of pointers to operands. */
971 append_def (tree
*def_p
)
973 VEC_safe_push (tree
, heap
, build_defs
, (tree
)def_p
);
977 /* Add USE_P to the list of pointers to operands. */
980 append_use (tree
*use_p
)
982 VEC_safe_push (tree
, heap
, build_uses
, (tree
)use_p
);
986 /* Add a new virtual may def for variable VAR to the build array. */
989 append_v_may_def (tree var
)
991 if (TREE_CODE (var
) != SSA_NAME
)
993 var_ann_t ann
= get_var_ann (var
);
995 /* Don't allow duplicate entries. */
996 if (ann
->in_v_may_def_list
)
998 ann
->in_v_may_def_list
= 1;
1001 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)var
);
1005 /* Add VAR to the list of virtual uses. */
1008 append_vuse (tree var
)
1010 /* Don't allow duplicate entries. */
1011 if (TREE_CODE (var
) != SSA_NAME
)
1013 var_ann_t ann
= get_var_ann (var
);
1015 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
1017 ann
->in_vuse_list
= 1;
1020 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)var
);
1024 /* Add VAR to the list of virtual must definitions for INFO. */
1027 append_v_must_def (tree var
)
1031 /* Don't allow duplicate entries. */
1032 for (i
= 0; i
< VEC_length (tree
, build_v_must_defs
); i
++)
1033 if (var
== VEC_index (tree
, build_v_must_defs
, i
))
1036 VEC_safe_push (tree
, heap
, build_v_must_defs
, (tree
)var
);
1040 /* REF is a tree that contains the entire pointer dereference
1041 expression, if available, or NULL otherwise. ALIAS is the variable
1042 we are asking if REF can access. OFFSET and SIZE come from the
1043 memory access expression that generated this virtual operand. */
1046 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1049 bool offsetgtz
= offset
> 0;
1050 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1051 tree base
= ref
? get_base_address (ref
) : NULL
;
1053 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1054 using a call-clobbered memory tag. By definition, call-clobbered
1055 memory tags can always touch .GLOBAL_VAR. */
1056 if (alias
== gimple_global_var (cfun
))
1059 /* If ALIAS is an SFT, it can't be touched if the offset
1060 and size of the access is not overlapping with the SFT offset and
1061 size. This is only true if we are accessing through a pointer
1062 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1063 be accessing through a pointer to some substruct of the
1064 structure, and if we try to prune there, we will have the wrong
1065 offset, and get the wrong answer.
1066 i.e., we can't prune without more work if we have something like
1072 const char *byte_op;
1080 foo = &targetm.asm_out.aligned_op;
1083 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1084 terms of SFT_PARENT_VAR, that is where it is.
1085 However, the access through the foo pointer will be at offset 0. */
1087 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1089 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1090 && !overlap_subvar (offset
, size
, alias
, NULL
))
1092 #ifdef ACCESS_DEBUGGING
1093 fprintf (stderr
, "Access to ");
1094 print_generic_expr (stderr
, ref
, 0);
1095 fprintf (stderr
, " may not touch ");
1096 print_generic_expr (stderr
, alias
, 0);
1097 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1102 /* Without strict aliasing, it is impossible for a component access
1103 through a pointer to touch a random variable, unless that
1104 variable *is* a structure or a pointer.
1106 That is, given p->c, and some random global variable b,
1107 there is no legal way that p->c could be an access to b.
1109 Without strict aliasing on, we consider it legal to do something
1112 struct foos { int l; };
1114 static struct foos *getfoo(void);
1117 struct foos *f = getfoo();
1124 static struct foos *getfoo(void)
1125 { return (struct foos *)&foo; }
1127 (taken from 20000623-1.c)
1129 The docs also say/imply that access through union pointers
1130 is legal (but *not* if you take the address of the union member,
1131 i.e. the inverse), such that you can do
1141 U *pretmp = (U*)&rv;
1145 To implement this, we just punt on accesses through union
1149 && flag_strict_aliasing
1150 && TREE_CODE (ref
) != INDIRECT_REF
1152 && (TREE_CODE (base
) != INDIRECT_REF
1153 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1154 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1155 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1156 && !var_ann (alias
)->is_heapvar
1157 /* When the struct has may_alias attached to it, we need not to
1159 && get_alias_set (base
))
1161 #ifdef ACCESS_DEBUGGING
1162 fprintf (stderr
, "Access to ");
1163 print_generic_expr (stderr
, ref
, 0);
1164 fprintf (stderr
, " may not touch ");
1165 print_generic_expr (stderr
, alias
, 0);
1166 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1171 /* If the offset of the access is greater than the size of one of
1172 the possible aliases, it can't be touching that alias, because it
1173 would be past the end of the structure. */
1175 && flag_strict_aliasing
1176 && TREE_CODE (ref
) != INDIRECT_REF
1178 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1180 && DECL_SIZE (alias
)
1181 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1182 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1184 #ifdef ACCESS_DEBUGGING
1185 fprintf (stderr
, "Access to ");
1186 print_generic_expr (stderr
, ref
, 0);
1187 fprintf (stderr
, " may not touch ");
1188 print_generic_expr (stderr
, alias
, 0);
1189 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1198 /* Add VAR to the virtual operands array. FLAGS is as in
1199 get_expr_operands. FULL_REF is a tree that contains the entire
1200 pointer dereference expression, if available, or NULL otherwise.
1201 OFFSET and SIZE come from the memory access expression that
1202 generated this virtual operand. FOR_CLOBBER is true is this is
1203 adding a virtual operand for a call clobber. */
1206 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1207 tree full_ref
, HOST_WIDE_INT offset
,
1208 HOST_WIDE_INT size
, bool for_clobber
)
1210 VEC(tree
,gc
) *aliases
;
1214 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1215 v_ann
= var_ann (sym
);
1217 /* Mark statements with volatile operands. Optimizers should back
1218 off from statements having volatile operands. */
1219 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1220 s_ann
->has_volatile_ops
= true;
1222 /* If the variable cannot be modified and this is a V_MAY_DEF change
1223 it into a VUSE. This happens when read-only variables are marked
1224 call-clobbered and/or aliased to writable variables. So we only
1225 check that this only happens on non-specific stores.
1227 Note that if this is a specific store, i.e. associated with a
1228 gimple_modify_stmt, then we can't suppress the V_MAY_DEF, lest we run
1229 into validation problems.
1231 This can happen when programs cast away const, leaving us with a
1232 store to read-only memory. If the statement is actually executed
1233 at runtime, then the program is ill formed. If the statement is
1234 not executed then all is well. At the very least, we cannot ICE. */
1235 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
1236 flags
&= ~(opf_is_def
| opf_kill_def
);
1238 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1239 virtual operands, unless the caller has specifically requested
1240 not to add virtual operands (used when adding operands inside an
1241 ADDR_EXPR expression). */
1242 if (flags
& opf_no_vops
)
1245 aliases
= v_ann
->may_aliases
;
1246 if (aliases
== NULL
)
1248 /* The variable is not aliased or it is an alias tag. */
1249 if (flags
& opf_is_def
)
1251 if (flags
& opf_kill_def
)
1253 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1254 variable definitions. */
1255 gcc_assert (!MTAG_P (var
)
1256 || TREE_CODE (var
) == STRUCT_FIELD_TAG
);
1257 append_v_must_def (var
);
1261 /* Add a V_MAY_DEF for call-clobbered variables and
1263 append_v_may_def (var
);
1274 /* The variable is aliased. Add its aliases to the virtual
1276 gcc_assert (VEC_length (tree
, aliases
) != 0);
1278 if (flags
& opf_is_def
)
1281 bool none_added
= true;
1283 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1285 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1289 append_v_may_def (al
);
1292 /* If the variable is also an alias tag, add a virtual
1293 operand for it, otherwise we will miss representing
1294 references to the members of the variable's alias set.
1295 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1297 It is also necessary to add bare defs on clobbers for
1298 SMT's, so that bare SMT uses caused by pruning all the
1299 aliases will link up properly with calls. In order to
1300 keep the number of these bare defs we add down to the
1301 minimum necessary, we keep track of which SMT's were used
1302 alone in statement vdefs or VUSEs. */
1303 if (v_ann
->is_aliased
1305 || (TREE_CODE (var
) == SYMBOL_MEMORY_TAG
1307 && SMT_USED_ALONE (var
)))
1309 /* Every bare SMT def we add should have SMT_USED_ALONE
1310 set on it, or else we will get the wrong answer on
1313 && !updating_used_alone
&& gimple_aliases_computed_p (cfun
)
1314 && TREE_CODE (var
) == SYMBOL_MEMORY_TAG
)
1315 gcc_assert (SMT_USED_ALONE (var
));
1317 append_v_may_def (var
);
1322 bool none_added
= true;
1323 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
1325 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1331 /* Similarly, append a virtual uses for VAR itself, when
1332 it is an alias tag. */
1333 if (v_ann
->is_aliased
|| none_added
)
1340 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1341 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1342 the statement's real operands, otherwise it is added to virtual
1346 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1353 gcc_assert (SSA_VAR_P (var
));
1355 is_real_op
= is_gimple_reg (var
);
1357 /* If this is a real operand, the operand is either an SSA name or a
1358 decl. Virtual operands may only be decls. */
1359 gcc_assert (is_real_op
|| DECL_P (var
));
1361 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1362 v_ann
= var_ann (sym
);
1364 /* Mark statements with volatile operands. Optimizers should back
1365 off from statements having volatile operands. */
1366 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1367 s_ann
->has_volatile_ops
= true;
1371 /* The variable is a GIMPLE register. Add it to real operands. */
1372 if (flags
& opf_is_def
)
1378 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1382 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1383 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1385 STMT is the statement being processed, EXPR is the INDIRECT_REF
1388 FLAGS is as in get_expr_operands.
1390 FULL_REF contains the full pointer dereference expression, if we
1391 have it, or NULL otherwise.
1393 OFFSET and SIZE are the location of the access inside the
1394 dereferenced pointer, if known.
1396 RECURSE_ON_BASE should be set to true if we want to continue
1397 calling get_expr_operands on the base pointer, and false if
1398 something else will do it for us. */
1401 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1403 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1404 bool recurse_on_base
)
1406 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1408 stmt_ann_t s_ann
= stmt_ann (stmt
);
1410 /* Stores into INDIRECT_REF operands are never killing definitions. */
1411 flags
&= ~opf_kill_def
;
1413 if (SSA_VAR_P (ptr
))
1415 struct ptr_info_def
*pi
= NULL
;
1417 /* If PTR has flow-sensitive points-to information, use it. */
1418 if (TREE_CODE (ptr
) == SSA_NAME
1419 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1420 && pi
->name_mem_tag
)
1422 /* PTR has its own memory tag. Use it. */
1423 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1424 full_ref
, offset
, size
, false);
1428 /* If PTR is not an SSA_NAME or it doesn't have a name
1429 tag, use its symbol memory tag. */
1432 /* If we are emitting debugging dumps, display a warning if
1433 PTR is an SSA_NAME with no flow-sensitive alias
1434 information. That means that we may need to compute
1437 && TREE_CODE (ptr
) == SSA_NAME
1441 "NOTE: no flow-sensitive alias info for ");
1442 print_generic_expr (dump_file
, ptr
, dump_flags
);
1443 fprintf (dump_file
, " in ");
1444 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1447 if (TREE_CODE (ptr
) == SSA_NAME
)
1448 ptr
= SSA_NAME_VAR (ptr
);
1449 v_ann
= var_ann (ptr
);
1451 if (v_ann
->symbol_mem_tag
)
1452 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1453 full_ref
, offset
, size
, false);
1456 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1458 /* If a constant is used as a pointer, we can't generate a real
1459 operand for it but we mark the statement volatile to prevent
1460 optimizations from messing things up. */
1462 s_ann
->has_volatile_ops
= true;
1467 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1471 /* If requested, add a USE operand for the base pointer. */
1472 if (recurse_on_base
)
1473 get_expr_operands (stmt
, pptr
, opf_none
);
1477 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1480 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1482 tree tag
= TMR_TAG (expr
), ref
;
1483 HOST_WIDE_INT offset
, size
, maxsize
;
1485 stmt_ann_t s_ann
= stmt_ann (stmt
);
1487 /* First record the real operands. */
1488 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1489 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1491 /* MEM_REFs should never be killing. */
1492 flags
&= ~opf_kill_def
;
1494 if (TMR_SYMBOL (expr
))
1496 stmt_ann_t ann
= stmt_ann (stmt
);
1497 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1502 /* Something weird, so ensure that we will be careful. */
1503 stmt_ann (stmt
)->has_volatile_ops
= true;
1509 get_expr_operands (stmt
, &tag
, flags
);
1513 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1514 gcc_assert (ref
!= NULL_TREE
);
1515 svars
= get_subvars_for_var (ref
);
1516 for (sv
= svars
; sv
; sv
= sv
->next
)
1519 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1521 int subvar_flags
= flags
;
1522 if (!exact
|| size
!= maxsize
)
1523 subvar_flags
&= ~opf_kill_def
;
1524 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1530 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1531 clobbered variables in the function. */
1534 add_call_clobber_ops (tree stmt
, tree callee
)
1538 stmt_ann_t s_ann
= stmt_ann (stmt
);
1539 bitmap not_read_b
, not_written_b
;
1541 /* Functions that are not const, pure or never return may clobber
1542 call-clobbered variables. */
1544 s_ann
->makes_clobbering_call
= true;
1546 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1547 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1548 if (gimple_global_var (cfun
))
1550 tree var
= gimple_global_var (cfun
);
1551 add_stmt_operand (&var
, s_ann
, opf_is_def
);
1555 /* Get info for local and module level statics. There is a bit
1556 set for each static if the call being processed does not read
1557 or write that variable. */
1558 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1559 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1560 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1561 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1563 tree var
= referenced_var_lookup (u
);
1564 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1565 tree real_var
= var
;
1569 /* Not read and not written are computed on regular vars, not
1570 subvars, so look at the parent var if this is an SFT. */
1571 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1572 real_var
= SFT_PARENT_VAR (var
);
1574 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1575 DECL_UID (real_var
)) : false;
1576 not_written
= not_written_b
? bitmap_bit_p (not_written_b
,
1577 DECL_UID (real_var
)) : false;
1578 gcc_assert (!unmodifiable_var_p (var
));
1580 clobber_stats
.clobbered_vars
++;
1582 /* See if this variable is really clobbered by this function. */
1584 /* Trivial case: Things escaping only to pure/const are not
1585 clobbered by non-pure-const, and only read by pure/const. */
1586 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1588 tree call
= get_call_expr_in (stmt
);
1589 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1591 add_stmt_operand (&var
, s_ann
, opf_none
);
1592 clobber_stats
.unescapable_clobbers_avoided
++;
1597 clobber_stats
.unescapable_clobbers_avoided
++;
1604 clobber_stats
.static_write_clobbers_avoided
++;
1606 add_stmt_operand (&var
, s_ann
, opf_none
);
1608 clobber_stats
.static_read_clobbers_avoided
++;
1611 add_virtual_operand (var
, s_ann
, opf_is_def
, NULL
, 0, -1, true);
1616 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1620 add_call_read_ops (tree stmt
, tree callee
)
1624 stmt_ann_t s_ann
= stmt_ann (stmt
);
1627 /* if the function is not pure, it may reference memory. Add
1628 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1629 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1630 if (gimple_global_var (cfun
))
1632 tree var
= gimple_global_var (cfun
);
1633 add_stmt_operand (&var
, s_ann
, opf_none
);
1637 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1639 /* Add a VUSE for each call-clobbered variable. */
1640 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1642 tree var
= referenced_var (u
);
1643 tree real_var
= var
;
1646 clobber_stats
.readonly_clobbers
++;
1648 /* Not read and not written are computed on regular vars, not
1649 subvars, so look at the parent var if this is an SFT. */
1651 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1652 real_var
= SFT_PARENT_VAR (var
);
1654 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1659 clobber_stats
.static_readonly_clobbers_avoided
++;
1663 add_stmt_operand (&var
, s_ann
, opf_none
| opf_non_specific
);
1668 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1671 get_call_expr_operands (tree stmt
, tree expr
)
1674 int call_flags
= call_expr_flags (expr
);
1676 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1677 operands for all the symbols that have been found to be
1680 Note that if aliases have not been computed, the global effects
1681 of calls will not be included in the SSA web. This is fine
1682 because no optimizer should run before aliases have been
1683 computed. By not bothering with virtual operands for CALL_EXPRs
1684 we avoid adding superfluous virtual operands, which can be a
1685 significant compile time sink (See PR 15855). */
1686 if (gimple_aliases_computed_p (cfun
)
1687 && !bitmap_empty_p (gimple_call_clobbered_vars (cfun
))
1688 && !(call_flags
& ECF_NOVOPS
))
1690 /* A 'pure' or a 'const' function never call-clobbers anything.
1691 A 'noreturn' function might, but since we don't return anyway
1692 there is no point in recording that. */
1693 if (TREE_SIDE_EFFECTS (expr
)
1694 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1695 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1696 else if (!(call_flags
& ECF_CONST
))
1697 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1700 /* Find uses in the called function. */
1701 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1703 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1704 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1706 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1710 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1713 get_asm_expr_operands (tree stmt
)
1715 stmt_ann_t s_ann
= stmt_ann (stmt
);
1716 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1717 const char **oconstraints
1718 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1721 const char *constraint
;
1722 bool allows_mem
, allows_reg
, is_inout
;
1724 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1726 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1727 oconstraints
[i
] = constraint
;
1728 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1729 &allows_reg
, &is_inout
);
1731 /* This should have been split in gimplify_asm_expr. */
1732 gcc_assert (!allows_reg
|| !is_inout
);
1734 /* Memory operands are addressable. Note that STMT needs the
1735 address of this operand. */
1736 if (!allows_reg
&& allows_mem
)
1738 tree t
= get_base_address (TREE_VALUE (link
));
1739 if (t
&& DECL_P (t
) && s_ann
)
1740 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1743 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1746 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1748 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1749 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1750 oconstraints
, &allows_mem
, &allows_reg
);
1752 /* Memory operands are addressable. Note that STMT needs the
1753 address of this operand. */
1754 if (!allows_reg
&& allows_mem
)
1756 tree t
= get_base_address (TREE_VALUE (link
));
1757 if (t
&& DECL_P (t
) && s_ann
)
1758 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1761 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1765 /* Clobber memory for asm ("" : : : "memory"); */
1766 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1767 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1772 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1773 decided to group them). */
1774 if (gimple_global_var (cfun
))
1776 tree var
= gimple_global_var (cfun
);
1777 add_stmt_operand (&var
, s_ann
, opf_is_def
);
1780 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
1782 tree var
= referenced_var (i
);
1783 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1786 /* Now clobber all addressables. */
1787 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
1789 tree var
= referenced_var (i
);
1791 /* Subvars are explicitly represented in this list, so
1792 we don't need the original to be added to the clobber
1793 ops, but the original *will* be in this list because
1794 we keep the addressability of the original
1795 variable up-to-date so we don't screw up the rest of
1797 if (var_can_have_subvars (var
)
1798 && get_subvars_for_var (var
) != NULL
)
1801 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1809 /* Scan operands for the assignment expression EXPR in statement STMT. */
1812 get_modify_stmt_operands (tree stmt
, tree expr
)
1814 /* First get operands from the RHS. */
1815 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_none
);
1817 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1818 registers. If the LHS is a store to memory, we will either need
1819 a preserving definition (V_MAY_DEF) or a killing definition
1822 Preserving definitions are those that modify a part of an
1823 aggregate object for which no subvars have been computed (or the
1824 reference does not correspond exactly to one of them). Stores
1825 through a pointer are also represented with V_MAY_DEF operators.
1827 The determination of whether to use a preserving or a killing
1828 definition is done while scanning the LHS of the assignment. By
1829 default, assume that we will emit a V_MUST_DEF. */
1830 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0),
1831 opf_is_def
|opf_kill_def
);
1835 /* Recursively scan the expression pointed to by EXPR_P in statement
1836 STMT. FLAGS is one of the OPF_* constants modifying how to
1837 interpret the operands found. */
1840 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1842 enum tree_code code
;
1843 enum tree_code_class
class;
1844 tree expr
= *expr_p
;
1845 stmt_ann_t s_ann
= stmt_ann (stmt
);
1850 code
= TREE_CODE (expr
);
1851 class = TREE_CODE_CLASS (code
);
1856 /* Taking the address of a variable does not represent a
1857 reference to it, but the fact that the statement takes its
1858 address will be of interest to some passes (e.g. alias
1860 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
1862 /* If the address is invariant, there may be no interesting
1863 variable references inside. */
1864 if (is_gimple_min_invariant (expr
))
1867 /* Otherwise, there may be variables referenced inside but there
1868 should be no VUSEs created, since the referenced objects are
1869 not really accessed. The only operands that we should find
1870 here are ARRAY_REF indices which will always be real operands
1871 (GIMPLE does not allow non-registers as array indices). */
1872 flags
|= opf_no_vops
;
1873 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1877 case STRUCT_FIELD_TAG
:
1878 case SYMBOL_MEMORY_TAG
:
1879 case NAME_MEMORY_TAG
:
1880 add_stmt_operand (expr_p
, s_ann
, flags
);
1889 /* Add the subvars for a variable, if it has subvars, to DEFS
1890 or USES. Otherwise, add the variable itself. Whether it
1891 goes to USES or DEFS depends on the operand flags. */
1892 if (var_can_have_subvars (expr
)
1893 && (svars
= get_subvars_for_var (expr
)))
1896 for (sv
= svars
; sv
; sv
= sv
->next
)
1897 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1900 add_stmt_operand (expr_p
, s_ann
, flags
);
1905 case MISALIGNED_INDIRECT_REF
:
1906 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1909 case ALIGN_INDIRECT_REF
:
1911 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
1914 case TARGET_MEM_REF
:
1915 get_tmr_operands (stmt
, expr
, flags
);
1919 case ARRAY_RANGE_REF
:
1925 HOST_WIDE_INT offset
, size
, maxsize
;
1928 /* This component reference becomes an access to all of the
1929 subvariables it can touch, if we can determine that, but
1930 *NOT* the real one. If we can't determine which fields we
1931 could touch, the recursion will eventually get to a
1932 variable and add *all* of its subvars, or whatever is the
1933 minimum correct subset. */
1934 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
1935 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
1938 subvar_t svars
= get_subvars_for_var (ref
);
1940 for (sv
= svars
; sv
; sv
= sv
->next
)
1944 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1946 int subvar_flags
= flags
;
1948 if (!exact
|| size
!= maxsize
)
1949 subvar_flags
&= ~opf_kill_def
;
1950 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1955 flags
|= opf_no_vops
;
1957 else if (TREE_CODE (ref
) == INDIRECT_REF
)
1959 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
1961 flags
|= opf_no_vops
;
1964 /* Even if we found subvars above we need to ensure to see
1965 immediate uses for d in s.a[d]. In case of s.a having
1966 a subvar or we would miss it otherwise. */
1967 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1968 flags
& ~opf_kill_def
);
1970 if (code
== COMPONENT_REF
)
1972 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1973 s_ann
->has_volatile_ops
= true;
1974 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1976 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
1978 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1979 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1980 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1986 case WITH_SIZE_EXPR
:
1987 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1988 and an rvalue reference to its second argument. */
1989 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1990 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1994 get_call_expr_operands (stmt
, expr
);
1999 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
2000 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
2001 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
2004 case GIMPLE_MODIFY_STMT
:
2005 get_modify_stmt_operands (stmt
, expr
);
2010 /* General aggregate CONSTRUCTORs have been decomposed, but they
2011 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2012 constructor_elt
*ce
;
2013 unsigned HOST_WIDE_INT idx
;
2016 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2018 get_expr_operands (stmt
, &ce
->value
, opf_none
);
2024 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2025 flags
&= ~opf_kill_def
;
2029 case TRUTH_NOT_EXPR
:
2030 case VIEW_CONVERT_EXPR
:
2032 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2035 case TRUTH_AND_EXPR
:
2037 case TRUTH_XOR_EXPR
:
2043 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2044 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2049 case REALIGN_LOAD_EXPR
:
2051 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2052 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2053 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2072 /* Expressions that make no memory references. */
2076 if (class == tcc_unary
)
2078 if (class == tcc_binary
|| class == tcc_comparison
)
2080 if (class == tcc_constant
|| class == tcc_type
)
2084 /* If we get here, something has gone wrong. */
2085 #ifdef ENABLE_CHECKING
2086 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2088 fputs ("\n", stderr
);
2094 /* Parse STMT looking for operands. When finished, the various
2095 build_* operand vectors will have potential operands in them. */
2098 parse_ssa_operands (tree stmt
)
2100 enum tree_code code
;
2102 code
= TREE_CODE (stmt
);
2105 case GIMPLE_MODIFY_STMT
:
2106 get_modify_stmt_operands (stmt
, stmt
);
2110 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
2114 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
2118 get_asm_expr_operands (stmt
);
2122 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
2126 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
2130 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
2134 case CASE_LABEL_EXPR
:
2135 case TRY_CATCH_EXPR
:
2136 case TRY_FINALLY_EXPR
:
2137 case EH_FILTER_EXPR
:
2140 /* These nodes contain no variable references. */
2144 /* Notice that if get_expr_operands tries to use &STMT as the
2145 operand pointer (which may only happen for USE operands), we
2146 will fail in add_stmt_operand. This default will handle
2147 statements like empty statements, or CALL_EXPRs that may
2148 appear on the RHS of a statement or as statements themselves. */
2149 get_expr_operands (stmt
, &stmt
, opf_none
);
2155 /* Create an operands cache for STMT. */
2158 build_ssa_operands (tree stmt
)
2160 stmt_ann_t ann
= get_stmt_ann (stmt
);
2162 /* Initially assume that the statement has no volatile operands. */
2164 ann
->has_volatile_ops
= false;
2166 start_ssa_stmt_operands ();
2168 parse_ssa_operands (stmt
);
2169 operand_build_sort_virtual (build_vuses
);
2170 operand_build_sort_virtual (build_v_may_defs
);
2171 operand_build_sort_virtual (build_v_must_defs
);
2173 finalize_ssa_stmt_operands (stmt
);
2177 /* Free any operands vectors in OPS. */
2180 free_ssa_operands (stmt_operands_p ops
)
2182 ops
->def_ops
= NULL
;
2183 ops
->use_ops
= NULL
;
2184 ops
->maydef_ops
= NULL
;
2185 ops
->mustdef_ops
= NULL
;
2186 ops
->vuse_ops
= NULL
;
2190 /* Get the operands of statement STMT. */
2193 update_stmt_operands (tree stmt
)
2195 stmt_ann_t ann
= get_stmt_ann (stmt
);
2197 /* If update_stmt_operands is called before SSA is initialized, do
2199 if (!ssa_operands_active ())
2202 /* The optimizers cannot handle statements that are nothing but a
2203 _DECL. This indicates a bug in the gimplifier. */
2204 gcc_assert (!SSA_VAR_P (stmt
));
2206 gcc_assert (ann
->modified
);
2208 timevar_push (TV_TREE_OPS
);
2210 build_ssa_operands (stmt
);
2212 /* Clear the modified bit for STMT. */
2215 timevar_pop (TV_TREE_OPS
);
2219 /* Copies virtual operands from SRC to DST. */
2222 copy_virtual_operands (tree dest
, tree src
)
2225 ssa_op_iter iter
, old_iter
;
2226 use_operand_p use_p
, u2
;
2227 def_operand_p def_p
, d2
;
2229 build_ssa_operands (dest
);
2231 /* Copy all the virtual fields. */
2232 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
2234 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
2235 append_v_may_def (t
);
2236 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
2237 append_v_must_def (t
);
2239 if (VEC_length (tree
, build_vuses
) == 0
2240 && VEC_length (tree
, build_v_may_defs
) == 0
2241 && VEC_length (tree
, build_v_must_defs
) == 0)
2244 /* Now commit the virtual operands to this stmt. */
2245 finalize_ssa_v_must_defs (dest
);
2246 finalize_ssa_v_may_defs (dest
);
2247 finalize_ssa_vuses (dest
);
2249 /* Finally, set the field to the same values as then originals. */
2250 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
2251 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
2253 gcc_assert (!op_iter_done (&old_iter
));
2255 t
= op_iter_next_tree (&old_iter
);
2257 gcc_assert (op_iter_done (&old_iter
));
2259 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
2260 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
2262 gcc_assert (!op_iter_done (&old_iter
));
2263 SET_USE (use_p
, USE_FROM_PTR (u2
));
2264 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
2265 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
2267 gcc_assert (op_iter_done (&old_iter
));
2269 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
2270 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
2272 gcc_assert (!op_iter_done (&old_iter
));
2273 SET_USE (use_p
, USE_FROM_PTR (u2
));
2274 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
2275 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
2277 gcc_assert (op_iter_done (&old_iter
));
2282 /* Specifically for use in DOM's expression analysis. Given a store, we
2283 create an artificial stmt which looks like a load from the store, this can
2284 be used to eliminate redundant loads. OLD_OPS are the operands from the
2285 store stmt, and NEW_STMT is the new load which represents a load of the
2289 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
2294 use_operand_p use_p
;
2297 ann
= get_stmt_ann (new_stmt
);
2299 /* Process the stmt looking for operands. */
2300 start_ssa_stmt_operands ();
2301 parse_ssa_operands (new_stmt
);
2303 for (x
= 0; x
< VEC_length (tree
, build_vuses
); x
++)
2305 tree t
= VEC_index (tree
, build_vuses
, x
);
2306 if (TREE_CODE (t
) != SSA_NAME
)
2308 var_ann_t ann
= var_ann (t
);
2309 ann
->in_vuse_list
= 0;
2313 for (x
= 0; x
< VEC_length (tree
, build_v_may_defs
); x
++)
2315 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
2316 if (TREE_CODE (t
) != SSA_NAME
)
2318 var_ann_t ann
= var_ann (t
);
2319 ann
->in_v_may_def_list
= 0;
2323 /* Remove any virtual operands that were found. */
2324 VEC_truncate (tree
, build_v_may_defs
, 0);
2325 VEC_truncate (tree
, build_v_must_defs
, 0);
2326 VEC_truncate (tree
, build_vuses
, 0);
2328 /* For each VDEF on the original statement, we want to create a
2329 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2331 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
2332 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
2335 /* Now build the operands for this new stmt. */
2336 finalize_ssa_stmt_operands (new_stmt
);
2338 /* All uses in this fake stmt must not be in the immediate use lists. */
2339 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2340 delink_imm_use (use_p
);
2344 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2345 to test the validity of the swap operation. */
2348 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2354 /* If the operand cache is active, attempt to preserve the relative
2355 positions of these two operands in their respective immediate use
2357 if (ssa_operands_active () && op0
!= op1
)
2359 use_optype_p use0
, use1
, ptr
;
2362 /* Find the 2 operands in the cache, if they are there. */
2363 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2364 if (USE_OP_PTR (ptr
)->use
== exp0
)
2370 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2371 if (USE_OP_PTR (ptr
)->use
== exp1
)
2377 /* If both uses don't have operand entries, there isn't much we can do
2378 at this point. Presumably we don't need to worry about it. */
2381 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2382 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2383 USE_OP_PTR (use0
)->use
= tmp
;
2387 /* Now swap the data. */
2393 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2394 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2395 a single variable whose address has been taken or any other valid
2396 GIMPLE memory reference (structure reference, array, etc). If the
2397 base address of REF is a decl that has sub-variables, also add all
2398 of its sub-variables. */
2401 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2406 gcc_assert (addresses_taken
);
2408 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2409 as the only thing we take the address of. If VAR is a structure,
2410 taking the address of a field means that the whole structure may
2411 be referenced using pointer arithmetic. See PR 21407 and the
2412 ensuing mailing list discussion. */
2413 var
= get_base_address (ref
);
2414 if (var
&& SSA_VAR_P (var
))
2416 if (*addresses_taken
== NULL
)
2417 *addresses_taken
= BITMAP_GGC_ALLOC ();
2419 if (var_can_have_subvars (var
)
2420 && (svars
= get_subvars_for_var (var
)))
2423 for (sv
= svars
; sv
; sv
= sv
->next
)
2425 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2426 TREE_ADDRESSABLE (sv
->var
) = 1;
2431 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2432 TREE_ADDRESSABLE (var
) = 1;
2438 /* Scan the immediate_use list for VAR making sure its linked properly.
2439 Return TRUE if there is a problem and emit an error message to F. */
2442 verify_imm_links (FILE *f
, tree var
)
2444 use_operand_p ptr
, prev
, list
;
2447 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2449 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2450 gcc_assert (list
->use
== NULL
);
2452 if (list
->prev
== NULL
)
2454 gcc_assert (list
->next
== NULL
);
2460 for (ptr
= list
->next
; ptr
!= list
; )
2462 if (prev
!= ptr
->prev
)
2465 if (ptr
->use
== NULL
)
2466 goto error
; /* 2 roots, or SAFE guard node. */
2467 else if (*(ptr
->use
) != var
)
2473 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2475 if (count
++ > 50000000)
2479 /* Verify list in the other direction. */
2481 for (ptr
= list
->prev
; ptr
!= list
; )
2483 if (prev
!= ptr
->next
)
2497 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2499 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2500 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2502 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2504 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2510 /* Dump all the immediate uses to FILE. */
2513 dump_immediate_uses_for (FILE *file
, tree var
)
2515 imm_use_iterator iter
;
2516 use_operand_p use_p
;
2518 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2520 print_generic_expr (file
, var
, TDF_SLIM
);
2521 fprintf (file
, " : -->");
2522 if (has_zero_uses (var
))
2523 fprintf (file
, " no uses.\n");
2525 if (has_single_use (var
))
2526 fprintf (file
, " single use.\n");
2528 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2530 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2532 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2533 fprintf (file
, "***end of stmt iterator marker***\n");
2535 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2536 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2538 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2540 fprintf(file
, "\n");
2544 /* Dump all the immediate uses to FILE. */
2547 dump_immediate_uses (FILE *file
)
2552 fprintf (file
, "Immediate_uses: \n\n");
2553 for (x
= 1; x
< num_ssa_names
; x
++)
2558 dump_immediate_uses_for (file
, var
);
2563 /* Dump def-use edges on stderr. */
2566 debug_immediate_uses (void)
2568 dump_immediate_uses (stderr
);
2572 /* Dump def-use edges on stderr. */
2575 debug_immediate_uses_for (tree var
)
2577 dump_immediate_uses_for (stderr
, var
);