1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
79 /* Flags to describe operand properties in helpers. */
81 /* By default, operands are loaded. */
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable */
86 #define opf_is_def (1 << 0)
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
97 #define opf_no_vops (1 << 2)
99 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
100 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
101 #define opf_non_specific (1 << 3)
103 /* Array for building all the def operands. */
104 static VEC(tree
,heap
) *build_defs
;
106 /* Array for building all the use operands. */
107 static VEC(tree
,heap
) *build_uses
;
109 /* Array for building all the v_may_def operands. */
110 static VEC(tree
,heap
) *build_v_may_defs
;
112 /* Array for building all the vuse operands. */
113 static VEC(tree
,heap
) *build_vuses
;
115 /* Array for building all the v_must_def operands. */
116 static VEC(tree
,heap
) *build_v_must_defs
;
118 /* These arrays are the cached operand vectors for call clobbered calls. */
119 static bool ops_active
= false;
121 static GTY (()) struct ssa_operand_memory_d
*operand_memory
= NULL
;
122 static unsigned operand_memory_index
;
124 static void get_expr_operands (tree
, tree
*, int);
126 static def_optype_p free_defs
= NULL
;
127 static use_optype_p free_uses
= NULL
;
128 static vuse_optype_p free_vuses
= NULL
;
129 static maydef_optype_p free_maydefs
= NULL
;
130 static mustdef_optype_p free_mustdefs
= NULL
;
133 /* Return the DECL_UID of the base variable of T. */
135 static inline unsigned
136 get_name_decl (tree t
)
138 if (TREE_CODE (t
) != SSA_NAME
)
141 return DECL_UID (SSA_NAME_VAR (t
));
145 /* Comparison function for qsort used in operand_build_sort_virtual. */
148 operand_build_cmp (const void *p
, const void *q
)
150 tree e1
= *((const tree
*)p
);
151 tree e2
= *((const tree
*)q
);
154 u1
= get_name_decl (e1
);
155 u2
= get_name_decl (e2
);
157 /* We want to sort in ascending order. They can never be equal. */
158 #ifdef ENABLE_CHECKING
159 gcc_assert (u1
!= u2
);
161 return (u1
> u2
? 1 : -1);
165 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
168 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
170 int num
= VEC_length (tree
, list
);
175 if (get_name_decl (VEC_index (tree
, list
, 0))
176 > get_name_decl (VEC_index (tree
, list
, 1)))
178 /* Swap elements if in the wrong order. */
179 tree tmp
= VEC_index (tree
, list
, 0);
180 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
181 VEC_replace (tree
, list
, 1, tmp
);
185 /* There are 3 or more elements, call qsort. */
186 qsort (VEC_address (tree
, list
),
187 VEC_length (tree
, list
),
193 /* Return true if the ssa operands cache is active. */
196 ssa_operands_active (void)
202 /* Structure storing statistics on how many call clobbers we have, and
203 how many where avoided. */
207 /* Number of call-clobbered ops we attempt to add to calls in
208 add_call_clobber_ops. */
209 unsigned int clobbered_vars
;
211 /* Number of write-clobbers (v_may_defs) avoided by using
212 not_written information. */
213 unsigned int static_write_clobbers_avoided
;
215 /* Number of reads (vuses) avoided by using not_read
217 unsigned int static_read_clobbers_avoided
;
219 /* Number of write-clobbers avoided because the variable can't escape to
221 unsigned int unescapable_clobbers_avoided
;
223 /* Number of readonly uses we attempt to add to calls in
224 add_call_read_ops. */
225 unsigned int readonly_clobbers
;
227 /* Number of readonly uses we avoid using not_read information. */
228 unsigned int static_readonly_clobbers_avoided
;
232 /* Initialize the operand cache routines. */
235 init_ssa_operands (void)
237 build_defs
= VEC_alloc (tree
, heap
, 5);
238 build_uses
= VEC_alloc (tree
, heap
, 10);
239 build_vuses
= VEC_alloc (tree
, heap
, 25);
240 build_v_may_defs
= VEC_alloc (tree
, heap
, 25);
241 build_v_must_defs
= VEC_alloc (tree
, heap
, 25);
243 gcc_assert (operand_memory
== NULL
);
244 operand_memory_index
= SSA_OPERAND_MEMORY_SIZE
;
246 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
251 /* Dispose of anything required by the operand routines. */
254 fini_ssa_operands (void)
256 struct ssa_operand_memory_d
*ptr
;
257 VEC_free (tree
, heap
, build_defs
);
258 VEC_free (tree
, heap
, build_uses
);
259 VEC_free (tree
, heap
, build_v_must_defs
);
260 VEC_free (tree
, heap
, build_v_may_defs
);
261 VEC_free (tree
, heap
, build_vuses
);
266 free_mustdefs
= NULL
;
267 while ((ptr
= operand_memory
) != NULL
)
269 operand_memory
= operand_memory
->next
;
275 if (dump_file
&& (dump_flags
& TDF_STATS
))
277 fprintf (dump_file
, "Original clobbered vars:%d\n",
278 clobber_stats
.clobbered_vars
);
279 fprintf (dump_file
, "Static write clobbers avoided:%d\n",
280 clobber_stats
.static_write_clobbers_avoided
);
281 fprintf (dump_file
, "Static read clobbers avoided:%d\n",
282 clobber_stats
.static_read_clobbers_avoided
);
283 fprintf (dump_file
, "Unescapable clobbers avoided:%d\n",
284 clobber_stats
.unescapable_clobbers_avoided
);
285 fprintf (dump_file
, "Original readonly clobbers:%d\n",
286 clobber_stats
.readonly_clobbers
);
287 fprintf (dump_file
, "Static readonly clobbers avoided:%d\n",
288 clobber_stats
.static_readonly_clobbers_avoided
);
293 /* Return memory for operands of SIZE chunks. */
296 ssa_operand_alloc (unsigned size
)
299 if (operand_memory_index
+ size
>= SSA_OPERAND_MEMORY_SIZE
)
301 struct ssa_operand_memory_d
*ptr
;
302 ptr
= GGC_NEW (struct ssa_operand_memory_d
);
303 ptr
->next
= operand_memory
;
304 operand_memory
= ptr
;
305 operand_memory_index
= 0;
307 ptr
= &(operand_memory
->mem
[operand_memory_index
]);
308 operand_memory_index
+= size
;
313 /* Make sure PTR is in the correct immediate use list. Since uses are simply
314 pointers into the stmt TREE, there is no way of telling if anyone has
315 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
316 The contents are different, but the pointer is still the same. This
317 routine will check to make sure PTR is in the correct list, and if it isn't
318 put it in the correct list. We cannot simply check the previous node
319 because all nodes in the same stmt might have be changed. */
322 correct_use_link (use_operand_p ptr
, tree stmt
)
327 /* Fold_stmt () may have changed the stmt pointers. */
328 if (ptr
->stmt
!= stmt
)
334 /* Find the root element, making sure we skip any safe iterators. */
335 while (prev
->use
!= NULL
|| prev
->stmt
== NULL
)
338 /* Get the ssa_name of the list the node is in. */
340 /* If it's the right list, simply return. */
341 if (root
== *(ptr
->use
))
345 /* It is in the wrong list if we reach here. */
346 delink_imm_use (ptr
);
347 link_imm_use (ptr
, *(ptr
->use
));
351 /* This routine makes sure that PTR is in an immediate use list, and makes
352 sure the stmt pointer is set to the current stmt. Virtual uses do not need
353 the overhead of correct_use_link since they cannot be directly manipulated
354 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
357 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
359 /* Fold_stmt () may have changed the stmt pointers. */
360 if (ptr
->stmt
!= stmt
)
363 /* If this use isn't in a list, add it to the correct list. */
365 link_imm_use (ptr
, *(ptr
->use
));
369 #define FINALIZE_OPBUILD build_defs
370 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
372 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
374 #define FINALIZE_FUNC finalize_ssa_def_ops
375 #define FINALIZE_ALLOC alloc_def
376 #define FINALIZE_FREE free_defs
377 #define FINALIZE_TYPE struct def_optype_d
378 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
379 #define FINALIZE_OPS DEF_OPS
380 #define FINALIZE_BASE(VAR) VAR
381 #define FINALIZE_BASE_TYPE tree *
382 #define FINALIZE_BASE_ZERO NULL
383 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
384 #include "tree-ssa-opfinalize.h"
387 /* This routine will create stmt operands for STMT from the def build list. */
390 finalize_ssa_defs (tree stmt
)
392 unsigned int num
= VEC_length (tree
, build_defs
);
394 /* There should only be a single real definition per assignment. */
395 gcc_assert ((stmt
&& TREE_CODE (stmt
) != MODIFY_EXPR
) || num
<= 1);
397 /* If there is an old list, often the new list is identical, or close, so
398 find the elements at the beginning that are the same as the vector. */
399 finalize_ssa_def_ops (stmt
);
400 VEC_truncate (tree
, build_defs
, 0);
403 #define FINALIZE_OPBUILD build_uses
404 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
406 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
408 #define FINALIZE_FUNC finalize_ssa_use_ops
409 #define FINALIZE_ALLOC alloc_use
410 #define FINALIZE_FREE free_uses
411 #define FINALIZE_TYPE struct use_optype_d
412 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
413 #define FINALIZE_OPS USE_OPS
414 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
415 #define FINALIZE_CORRECT_USE correct_use_link
416 #define FINALIZE_BASE(VAR) VAR
417 #define FINALIZE_BASE_TYPE tree *
418 #define FINALIZE_BASE_ZERO NULL
419 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
420 (PTR)->use_ptr.use = (VAL); \
421 link_imm_use_stmt (&((PTR)->use_ptr), \
423 #include "tree-ssa-opfinalize.h"
425 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
428 finalize_ssa_uses (tree stmt
)
430 #ifdef ENABLE_CHECKING
433 unsigned num
= VEC_length (tree
, build_uses
);
435 /* If the pointer to the operand is the statement itself, something is
436 wrong. It means that we are pointing to a local variable (the
437 initial call to get_stmt_operands does not pass a pointer to a
439 for (x
= 0; x
< num
; x
++)
440 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
443 finalize_ssa_use_ops (stmt
);
444 VEC_truncate (tree
, build_uses
, 0);
448 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
449 #define FINALIZE_OPBUILD build_v_may_defs
450 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
451 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
452 build_v_may_defs, (I)))
453 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
454 #define FINALIZE_ALLOC alloc_maydef
455 #define FINALIZE_FREE free_maydefs
456 #define FINALIZE_TYPE struct maydef_optype_d
457 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
458 #define FINALIZE_OPS MAYDEF_OPS
459 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
460 #define FINALIZE_CORRECT_USE set_virtual_use_link
461 #define FINALIZE_BASE_ZERO 0
462 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
463 #define FINALIZE_BASE_TYPE unsigned
464 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
465 (PTR)->def_var = (VAL); \
466 (PTR)->use_var = (VAL); \
467 (PTR)->use_ptr.use = &((PTR)->use_var); \
468 link_imm_use_stmt (&((PTR)->use_ptr), \
470 #include "tree-ssa-opfinalize.h"
474 finalize_ssa_v_may_defs (tree stmt
)
476 finalize_ssa_v_may_def_ops (stmt
);
480 /* Clear the in_list bits and empty the build array for v_may_defs. */
483 cleanup_v_may_defs (void)
486 num
= VEC_length (tree
, build_v_may_defs
);
488 for (x
= 0; x
< num
; x
++)
490 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
491 if (TREE_CODE (t
) != SSA_NAME
)
493 var_ann_t ann
= var_ann (t
);
494 ann
->in_v_may_def_list
= 0;
497 VEC_truncate (tree
, build_v_may_defs
, 0);
501 #define FINALIZE_OPBUILD build_vuses
502 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
503 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
505 #define FINALIZE_FUNC finalize_ssa_vuse_ops
506 #define FINALIZE_ALLOC alloc_vuse
507 #define FINALIZE_FREE free_vuses
508 #define FINALIZE_TYPE struct vuse_optype_d
509 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
510 #define FINALIZE_OPS VUSE_OPS
511 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
512 #define FINALIZE_CORRECT_USE set_virtual_use_link
513 #define FINALIZE_BASE_ZERO 0
514 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
515 #define FINALIZE_BASE_TYPE unsigned
516 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
517 (PTR)->use_var = (VAL); \
518 (PTR)->use_ptr.use = &((PTR)->use_var); \
519 link_imm_use_stmt (&((PTR)->use_ptr), \
521 #include "tree-ssa-opfinalize.h"
524 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
527 finalize_ssa_vuses (tree stmt
)
529 unsigned num
, num_v_may_defs
;
532 /* Remove superfluous VUSE operands. If the statement already has a
533 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
534 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
535 suppose that variable 'a' is aliased:
538 # a_3 = V_MAY_DEF <a_2>
541 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
544 num
= VEC_length (tree
, build_vuses
);
545 num_v_may_defs
= VEC_length (tree
, build_v_may_defs
);
547 if (num
> 0 && num_v_may_defs
> 0)
549 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
552 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
553 if (TREE_CODE (vuse
) != SSA_NAME
)
555 var_ann_t ann
= var_ann (vuse
);
556 ann
->in_vuse_list
= 0;
557 if (ann
->in_v_may_def_list
)
559 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
567 /* Clear out the in_list bits. */
569 vuse_index
< VEC_length (tree
, build_vuses
);
572 tree t
= VEC_index (tree
, build_vuses
, vuse_index
);
573 if (TREE_CODE (t
) != SSA_NAME
)
575 var_ann_t ann
= var_ann (t
);
576 ann
->in_vuse_list
= 0;
580 finalize_ssa_vuse_ops (stmt
);
581 /* The v_may_def build vector wasn't cleaned up because we needed it. */
582 cleanup_v_may_defs ();
584 /* Free the vuses build vector. */
585 VEC_truncate (tree
, build_vuses
, 0);
589 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
591 #define FINALIZE_OPBUILD build_v_must_defs
592 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
593 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
594 build_v_must_defs, (I)))
595 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
596 #define FINALIZE_ALLOC alloc_mustdef
597 #define FINALIZE_FREE free_mustdefs
598 #define FINALIZE_TYPE struct mustdef_optype_d
599 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
600 #define FINALIZE_OPS MUSTDEF_OPS
601 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
602 #define FINALIZE_CORRECT_USE set_virtual_use_link
603 #define FINALIZE_BASE_ZERO 0
604 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
605 #define FINALIZE_BASE_TYPE unsigned
606 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
607 (PTR)->def_var = (VAL); \
608 (PTR)->kill_var = (VAL); \
609 (PTR)->use_ptr.use = &((PTR)->kill_var);\
610 link_imm_use_stmt (&((PTR)->use_ptr), \
612 #include "tree-ssa-opfinalize.h"
616 finalize_ssa_v_must_defs (tree stmt
)
618 /* In the presence of subvars, there may be more than one V_MUST_DEF per
619 statement (one for each subvar). It is a bit expensive to verify that
620 all must-defs in a statement belong to subvars if there is more than one
621 MUST-def, so we don't do it. Suffice to say, if you reach here without
622 having subvars, and have num >1, you have hit a bug. */
623 finalize_ssa_v_must_def_ops (stmt
);
624 VEC_truncate (tree
, build_v_must_defs
, 0);
628 /* Finalize all the build vectors, fill the new ones into INFO. */
631 finalize_ssa_stmt_operands (tree stmt
)
633 finalize_ssa_defs (stmt
);
634 finalize_ssa_uses (stmt
);
635 finalize_ssa_v_must_defs (stmt
);
636 finalize_ssa_v_may_defs (stmt
);
637 finalize_ssa_vuses (stmt
);
641 /* Start the process of building up operands vectors in INFO. */
644 start_ssa_stmt_operands (void)
646 gcc_assert (VEC_length (tree
, build_defs
) == 0);
647 gcc_assert (VEC_length (tree
, build_uses
) == 0);
648 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
649 gcc_assert (VEC_length (tree
, build_v_may_defs
) == 0);
650 gcc_assert (VEC_length (tree
, build_v_must_defs
) == 0);
654 /* Add DEF_P to the list of pointers to operands. */
657 append_def (tree
*def_p
)
659 VEC_safe_push (tree
, heap
, build_defs
, (tree
)def_p
);
663 /* Add USE_P to the list of pointers to operands. */
666 append_use (tree
*use_p
)
668 VEC_safe_push (tree
, heap
, build_uses
, (tree
)use_p
);
672 /* Add a new virtual may def for variable VAR to the build array. */
675 append_v_may_def (tree var
)
677 if (TREE_CODE (var
) != SSA_NAME
)
679 var_ann_t ann
= get_var_ann (var
);
681 /* Don't allow duplicate entries. */
682 if (ann
->in_v_may_def_list
)
684 ann
->in_v_may_def_list
= 1;
687 VEC_safe_push (tree
, heap
, build_v_may_defs
, (tree
)var
);
691 /* Add VAR to the list of virtual uses. */
694 append_vuse (tree var
)
697 /* Don't allow duplicate entries. */
698 if (TREE_CODE (var
) != SSA_NAME
)
700 var_ann_t ann
= get_var_ann (var
);
702 if (ann
->in_vuse_list
|| ann
->in_v_may_def_list
)
704 ann
->in_vuse_list
= 1;
707 VEC_safe_push (tree
, heap
, build_vuses
, (tree
)var
);
711 /* Add VAR to the list of virtual must definitions for INFO. */
714 append_v_must_def (tree var
)
718 /* Don't allow duplicate entries. */
719 for (i
= 0; i
< VEC_length (tree
, build_v_must_defs
); i
++)
720 if (var
== VEC_index (tree
, build_v_must_defs
, i
))
723 VEC_safe_push (tree
, heap
, build_v_must_defs
, (tree
)var
);
727 /* REF is a tree that contains the entire pointer dereference
728 expression, if available, or NULL otherwise. ALIAS is the variable
729 we are asking if REF can access. OFFSET and SIZE come from the
730 memory access expression that generated this virtual operand.
731 FOR_CLOBBER is true is this is adding a virtual operand for a call
735 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
738 bool offsetgtz
= offset
> 0;
739 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
740 tree base
= ref
? get_base_address (ref
) : NULL
;
742 /* If ALIAS is an SFT, it can't be touched if the offset
743 and size of the access is not overlapping with the SFT offset and
744 size. This is only true if we are accessing through a pointer
745 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
746 be accessing through a pointer to some substruct of the
747 structure, and if we try to prune there, we will have the wrong
748 offset, and get the wrong answer.
749 i.e., we can't prune without more work if we have something like
763 foo = &targetm.asm_out.aligned_op;
766 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
767 terms of SFT_PARENT_VAR, that is where it is.
768 However, the access through the foo pointer will be at offset 0. */
770 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
772 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
773 && !overlap_subvar (offset
, size
, alias
, NULL
))
775 #ifdef ACCESS_DEBUGGING
776 fprintf (stderr
, "Access to ");
777 print_generic_expr (stderr
, ref
, 0);
778 fprintf (stderr
, " may not touch ");
779 print_generic_expr (stderr
, alias
, 0);
780 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
785 /* Without strict aliasing, it is impossible for a component access
786 through a pointer to touch a random variable, unless that
787 variable *is* a structure or a pointer.
789 That is, given p->c, and some random global variable b,
790 there is no legal way that p->c could be an access to b.
792 Without strict aliasing on, we consider it legal to do something
795 struct foos { int l; };
797 static struct foos *getfoo(void);
800 struct foos *f = getfoo();
807 static struct foos *getfoo(void)
808 { return (struct foos *)&foo; }
810 (taken from 20000623-1.c)
813 && flag_strict_aliasing
814 && TREE_CODE (ref
) != INDIRECT_REF
816 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
817 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
818 && !POINTER_TYPE_P (TREE_TYPE (alias
)))
820 #ifdef ACCESS_DEBUGGING
821 fprintf (stderr
, "Access to ");
822 print_generic_expr (stderr
, ref
, 0);
823 fprintf (stderr
, " may not touch ");
824 print_generic_expr (stderr
, alias
, 0);
825 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
830 /* If the offset of the access is greater than the size of one of
831 the possible aliases, it can't be touching that alias, because it
832 would be past the end of the structure. */
834 && flag_strict_aliasing
835 && TREE_CODE (ref
) != INDIRECT_REF
837 && !POINTER_TYPE_P (TREE_TYPE (alias
))
840 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
841 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
843 #ifdef ACCESS_DEBUGGING
844 fprintf (stderr
, "Access to ");
845 print_generic_expr (stderr
, ref
, 0);
846 fprintf (stderr
, " may not touch ");
847 print_generic_expr (stderr
, alias
, 0);
848 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
857 /* Add VAR to the virtual operands array. FLAGS is as in
858 get_expr_operands. FULL_REF is a tree that contains the entire
859 pointer dereference expression, if available, or NULL otherwise.
860 OFFSET and SIZE come from the memory access expression that
861 generated this virtual operand. FOR_CLOBBER is true is this is
862 adding a virtual operand for a call clobber. */
865 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
866 tree full_ref
, HOST_WIDE_INT offset
,
867 HOST_WIDE_INT size
, bool for_clobber
)
869 VEC(tree
,gc
) *aliases
;
873 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
874 v_ann
= var_ann (sym
);
876 /* Mark statements with volatile operands. Optimizers should back
877 off from statements having volatile operands. */
878 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
879 s_ann
->has_volatile_ops
= true;
881 /* If the variable cannot be modified and this is a V_MAY_DEF change
882 it into a VUSE. This happens when read-only variables are marked
883 call-clobbered and/or aliased to writable variables. So we only
884 check that this only happens on non-specific stores.
886 Note that if this is a specific store, i.e. associated with a
887 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
888 into validation problems.
890 This can happen when programs cast away const, leaving us with a
891 store to read-only memory. If the statement is actually executed
892 at runtime, then the program is ill formed. If the statement is
893 not executed then all is well. At the very least, we cannot ICE. */
894 if ((flags
& opf_non_specific
) && unmodifiable_var_p (var
))
895 flags
&= ~(opf_is_def
| opf_kill_def
);
897 /* The variable is not a GIMPLE register. Add it (or its aliases) to
898 virtual operands, unless the caller has specifically requested
899 not to add virtual operands (used when adding operands inside an
900 ADDR_EXPR expression). */
901 if (flags
& opf_no_vops
)
904 aliases
= v_ann
->may_aliases
;
907 /* The variable is not aliased or it is an alias tag. */
908 if (flags
& opf_is_def
)
910 if (flags
& opf_kill_def
)
912 /* V_MUST_DEF for non-aliased, non-GIMPLE register
913 variable definitions. */
914 gcc_assert (!MTAG_P (var
)
915 || TREE_CODE (var
) == STRUCT_FIELD_TAG
);
916 append_v_must_def (var
);
920 /* Add a V_MAY_DEF for call-clobbered variables and
922 append_v_may_def (var
);
933 /* The variable is aliased. Add its aliases to the virtual
935 gcc_assert (VEC_length (tree
, aliases
) != 0);
937 if (flags
& opf_is_def
)
940 bool none_added
= true;
942 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
944 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
948 append_v_may_def (al
);
951 /* If the variable is also an alias tag, add a virtual
952 operand for it, otherwise we will miss representing
953 references to the members of the variable's alias set.
954 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
956 It is also necessary to add bare defs on clobbers for
957 TMT's, so that bare TMT uses caused by pruning all the
958 aliases will link up properly with calls. In order to
959 keep the number of these bare defs we add down to the
960 minimum necessary, we keep track of which TMT's were used
961 alone in statement defs or vuses. */
962 if (v_ann
->is_aliased
964 || (TREE_CODE (var
) == TYPE_MEMORY_TAG
&& for_clobber
965 && TMT_USED_ALONE (var
)))
967 /* Every bare tmt def we add should have TMT_USED_ALONE
968 set on it, or else we will get the wrong answer on
971 if (none_added
&& !updating_used_alone
&& aliases_computed_p
972 && TREE_CODE (var
) == TYPE_MEMORY_TAG
)
973 gcc_assert (TMT_USED_ALONE (var
));
975 append_v_may_def (var
);
980 bool none_added
= true;
981 for (i
= 0; VEC_iterate (tree
, aliases
, i
, al
); i
++)
983 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
989 /* Similarly, append a virtual uses for VAR itself, when
990 it is an alias tag. */
991 if (v_ann
->is_aliased
|| none_added
)
998 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
999 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1000 the statement's real operands, otherwise it is added to virtual
1004 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1011 gcc_assert (SSA_VAR_P (var
));
1013 is_real_op
= is_gimple_reg (var
);
1015 /* If this is a real operand, the operand is either an SSA name or a
1016 decl. Virtual operands may only be decls. */
1017 gcc_assert (is_real_op
|| DECL_P (var
));
1019 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1020 v_ann
= var_ann (sym
);
1022 /* Mark statements with volatile operands. Optimizers should back
1023 off from statements having volatile operands. */
1024 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1025 s_ann
->has_volatile_ops
= true;
1029 /* The variable is a GIMPLE register. Add it to real operands. */
1030 if (flags
& opf_is_def
)
1036 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1040 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1041 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1043 STMT is the statement being processed, EXPR is the INDIRECT_REF
1046 FLAGS is as in get_expr_operands.
1048 FULL_REF contains the full pointer dereference expression, if we
1049 have it, or NULL otherwise.
1051 OFFSET and SIZE are the location of the access inside the
1052 dereferenced pointer, if known.
1054 RECURSE_ON_BASE should be set to true if we want to continue
1055 calling get_expr_operands on the base pointer, and false if
1056 something else will do it for us. */
1059 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1061 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1062 bool recurse_on_base
)
1064 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1066 stmt_ann_t s_ann
= stmt_ann (stmt
);
1068 /* Stores into INDIRECT_REF operands are never killing definitions. */
1069 flags
&= ~opf_kill_def
;
1071 if (SSA_VAR_P (ptr
))
1073 struct ptr_info_def
*pi
= NULL
;
1075 /* If PTR has flow-sensitive points-to information, use it. */
1076 if (TREE_CODE (ptr
) == SSA_NAME
1077 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1078 && pi
->name_mem_tag
)
1080 /* PTR has its own memory tag. Use it. */
1081 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1082 full_ref
, offset
, size
, false);
1086 /* If PTR is not an SSA_NAME or it doesn't have a name
1087 tag, use its type memory tag. */
1090 /* If we are emitting debugging dumps, display a warning if
1091 PTR is an SSA_NAME with no flow-sensitive alias
1092 information. That means that we may need to compute
1095 && TREE_CODE (ptr
) == SSA_NAME
1099 "NOTE: no flow-sensitive alias info for ");
1100 print_generic_expr (dump_file
, ptr
, dump_flags
);
1101 fprintf (dump_file
, " in ");
1102 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1105 if (TREE_CODE (ptr
) == SSA_NAME
)
1106 ptr
= SSA_NAME_VAR (ptr
);
1107 v_ann
= var_ann (ptr
);
1109 if (v_ann
->type_mem_tag
)
1110 add_virtual_operand (v_ann
->type_mem_tag
, s_ann
, flags
,
1111 full_ref
, offset
, size
, false);
1114 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1116 /* If a constant is used as a pointer, we can't generate a real
1117 operand for it but we mark the statement volatile to prevent
1118 optimizations from messing things up. */
1120 s_ann
->has_volatile_ops
= true;
1125 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1129 /* If requested, add a USE operand for the base pointer. */
1130 if (recurse_on_base
)
1131 get_expr_operands (stmt
, pptr
, opf_none
);
1135 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1138 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1140 tree tag
= TMR_TAG (expr
), ref
;
1141 HOST_WIDE_INT offset
, size
, maxsize
;
1143 stmt_ann_t s_ann
= stmt_ann (stmt
);
1145 /* First record the real operands. */
1146 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_none
);
1147 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_none
);
1149 /* MEM_REFs should never be killing. */
1150 flags
&= ~opf_kill_def
;
1152 if (TMR_SYMBOL (expr
))
1154 stmt_ann_t ann
= stmt_ann (stmt
);
1155 add_to_addressable_set (TMR_SYMBOL (expr
), &ann
->addresses_taken
);
1160 /* Something weird, so ensure that we will be careful. */
1161 stmt_ann (stmt
)->has_volatile_ops
= true;
1167 get_expr_operands (stmt
, &tag
, flags
);
1171 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1172 gcc_assert (ref
!= NULL_TREE
);
1173 svars
= get_subvars_for_var (ref
);
1174 for (sv
= svars
; sv
; sv
= sv
->next
)
1177 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1179 int subvar_flags
= flags
;
1180 if (!exact
|| size
!= maxsize
)
1181 subvar_flags
&= ~opf_kill_def
;
1182 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1188 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1189 clobbered variables in the function. */
1192 add_call_clobber_ops (tree stmt
, tree callee
)
1196 stmt_ann_t s_ann
= stmt_ann (stmt
);
1197 bitmap not_read_b
, not_written_b
;
1199 /* Functions that are not const, pure or never return may clobber
1200 call-clobbered variables. */
1202 s_ann
->makes_clobbering_call
= true;
1204 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1205 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1208 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1212 /* Get info for local and module level statics. There is a bit
1213 set for each static if the call being processed does not read
1214 or write that variable. */
1215 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1216 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1217 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1218 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1220 tree var
= referenced_var_lookup (u
);
1221 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1222 tree real_var
= var
;
1226 /* Not read and not written are computed on regular vars, not
1227 subvars, so look at the parent var if this is an SFT. */
1228 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1229 real_var
= SFT_PARENT_VAR (var
);
1231 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1232 DECL_UID (real_var
)) : false;
1233 not_written
= not_written_b
? bitmap_bit_p (not_written_b
,
1234 DECL_UID (real_var
)) : false;
1235 gcc_assert (!unmodifiable_var_p (var
));
1237 clobber_stats
.clobbered_vars
++;
1239 /* See if this variable is really clobbered by this function. */
1241 /* Trivial case: Things escaping only to pure/const are not
1242 clobbered by non-pure-const, and only read by pure/const. */
1243 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1245 tree call
= get_call_expr_in (stmt
);
1246 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1248 add_stmt_operand (&var
, s_ann
, opf_none
);
1249 clobber_stats
.unescapable_clobbers_avoided
++;
1254 clobber_stats
.unescapable_clobbers_avoided
++;
1261 clobber_stats
.static_write_clobbers_avoided
++;
1263 add_stmt_operand (&var
, s_ann
, opf_none
);
1265 clobber_stats
.static_read_clobbers_avoided
++;
1268 add_virtual_operand (var
, s_ann
, opf_is_def
,
1275 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1279 add_call_read_ops (tree stmt
, tree callee
)
1283 stmt_ann_t s_ann
= stmt_ann (stmt
);
1286 /* if the function is not pure, it may reference memory. Add
1287 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1288 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1291 add_stmt_operand (&global_var
, s_ann
, opf_none
);
1295 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1297 /* Add a VUSE for each call-clobbered variable. */
1298 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, u
, bi
)
1300 tree var
= referenced_var (u
);
1301 tree real_var
= var
;
1304 clobber_stats
.readonly_clobbers
++;
1306 /* Not read and not written are computed on regular vars, not
1307 subvars, so look at the parent var if this is an SFT. */
1309 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1310 real_var
= SFT_PARENT_VAR (var
);
1312 not_read
= not_read_b
? bitmap_bit_p (not_read_b
,
1313 DECL_UID (real_var
)) : false;
1317 clobber_stats
.static_readonly_clobbers_avoided
++;
1321 add_stmt_operand (&var
, s_ann
, opf_none
| opf_non_specific
);
1326 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1329 get_call_expr_operands (tree stmt
, tree expr
)
1332 int call_flags
= call_expr_flags (expr
);
1334 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1335 operands for all the symbols that have been found to be
1338 Note that if aliases have not been computed, the global effects
1339 of calls will not be included in the SSA web. This is fine
1340 because no optimizer should run before aliases have been
1341 computed. By not bothering with virtual operands for CALL_EXPRs
1342 we avoid adding superfluous virtual operands, which can be a
1343 significant compile time sink (See PR 15855). */
1344 if (aliases_computed_p
1345 && !bitmap_empty_p (call_clobbered_vars
)
1346 && !(call_flags
& ECF_NOVOPS
))
1348 /* A 'pure' or a 'const' function never call-clobbers anything.
1349 A 'noreturn' function might, but since we don't return anyway
1350 there is no point in recording that. */
1351 if (TREE_SIDE_EFFECTS (expr
)
1352 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1353 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1354 else if (!(call_flags
& ECF_CONST
))
1355 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1358 /* Find uses in the called function. */
1359 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1361 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1362 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
);
1364 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1368 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1371 get_asm_expr_operands (tree stmt
)
1373 stmt_ann_t s_ann
= stmt_ann (stmt
);
1374 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1375 const char **oconstraints
1376 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1379 const char *constraint
;
1380 bool allows_mem
, allows_reg
, is_inout
;
1382 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1384 oconstraints
[i
] = constraint
1385 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1386 parse_output_constraint (&constraint
, i
, 0, 0,
1387 &allows_mem
, &allows_reg
, &is_inout
);
1389 /* This should have been split in gimplify_asm_expr. */
1390 gcc_assert (!allows_reg
|| !is_inout
);
1392 /* Memory operands are addressable. Note that STMT needs the
1393 address of this operand. */
1394 if (!allows_reg
&& allows_mem
)
1396 tree t
= get_base_address (TREE_VALUE (link
));
1397 if (t
&& DECL_P (t
) && s_ann
)
1398 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1401 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
);
1404 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1406 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1407 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1408 oconstraints
, &allows_mem
, &allows_reg
);
1410 /* Memory operands are addressable. Note that STMT needs the
1411 address of this operand. */
1412 if (!allows_reg
&& allows_mem
)
1414 tree t
= get_base_address (TREE_VALUE (link
));
1415 if (t
&& DECL_P (t
) && s_ann
)
1416 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1419 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1423 /* Clobber memory for asm ("" : : : "memory"); */
1424 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1425 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1430 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1431 decided to group them). */
1433 add_stmt_operand (&global_var
, s_ann
, opf_is_def
);
1435 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
, bi
)
1437 tree var
= referenced_var (i
);
1438 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1441 /* Now clobber all addressables. */
1442 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
, bi
)
1444 tree var
= referenced_var (i
);
1446 /* Subvars are explicitly represented in this list, so
1447 we don't need the original to be added to the clobber
1448 ops, but the original *will* be in this list because
1449 we keep the addressability of the original
1450 variable up-to-date so we don't screw up the rest of
1452 if (var_can_have_subvars (var
)
1453 && get_subvars_for_var (var
) != NULL
)
1456 add_stmt_operand (&var
, s_ann
, opf_is_def
| opf_non_specific
);
1464 /* Recursively scan the expression pointed to by EXPR_P in statement
1465 referred to by INFO. FLAGS is one of the OPF_* constants modifying
1466 how to interpret the operands found. */
1469 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
1471 enum tree_code code
;
1472 enum tree_code_class
class;
1473 tree expr
= *expr_p
;
1474 stmt_ann_t s_ann
= stmt_ann (stmt
);
1479 code
= TREE_CODE (expr
);
1480 class = TREE_CODE_CLASS (code
);
1485 /* Taking the address of a variable does not represent a
1486 reference to it, but the fact that the statement takes its
1487 address will be of interest to some passes (e.g. alias
1489 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
1491 /* If the address is invariant, there may be no interesting
1492 variable references inside. */
1493 if (is_gimple_min_invariant (expr
))
1496 /* Otherwise, there may be variables referenced inside but there
1497 should be no VUSEs created, since the referenced objects are
1498 not really accessed. The only operands that we should find
1499 here are ARRAY_REF indices which will always be real operands
1500 (GIMPLE does not allow non-registers as array indices). */
1501 flags
|= opf_no_vops
;
1502 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1506 case STRUCT_FIELD_TAG
:
1507 case TYPE_MEMORY_TAG
:
1508 case NAME_MEMORY_TAG
:
1509 add_stmt_operand (expr_p
, s_ann
, flags
);
1518 /* Add the subvars for a variable if it has subvars, to DEFS
1519 or USES. Otherwise, add the variable itself. Whether it
1520 goes to USES or DEFS depends on the operand flags. */
1521 if (var_can_have_subvars (expr
)
1522 && (svars
= get_subvars_for_var (expr
)))
1525 for (sv
= svars
; sv
; sv
= sv
->next
)
1526 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1529 add_stmt_operand (expr_p
, s_ann
, flags
);
1534 case MISALIGNED_INDIRECT_REF
:
1535 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1538 case ALIGN_INDIRECT_REF
:
1540 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
,
1544 case TARGET_MEM_REF
:
1545 get_tmr_operands (stmt
, expr
, flags
);
1548 case ARRAY_RANGE_REF
:
1549 /* Treat array references as references to the virtual variable
1550 representing the array. The virtual variable for an ARRAY_REF
1551 is the VAR_DECL for the array. */
1552 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1553 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1554 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1555 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1564 HOST_WIDE_INT offset
, size
, maxsize
;
1567 /* This component reference becomes an access to all of the
1568 subvariables it can touch, if we can determine that, but
1569 *NOT* the real one. If we can't determine which fields we
1570 could touch, the recursion will eventually get to a
1571 variable and add *all* of its subvars, or whatever is the
1572 minimum correct subset. */
1573 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
1574 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
1577 subvar_t svars
= get_subvars_for_var (ref
);
1579 for (sv
= svars
; sv
; sv
= sv
->next
)
1583 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1585 int subvar_flags
= flags
;
1587 if (!exact
|| size
!= maxsize
)
1588 subvar_flags
&= ~opf_kill_def
;
1589 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
1594 flags
|= opf_no_vops
;
1596 else if (TREE_CODE (ref
) == INDIRECT_REF
)
1598 get_indirect_ref_operands (stmt
, ref
, flags
, expr
,
1599 offset
, maxsize
, false);
1600 flags
|= opf_no_vops
;
1603 /* Even if we found subvars above we need to ensure to see
1604 immediate uses for d in s.a[d]. In case of s.a having
1605 a subvar we'd miss it otherwise. */
1606 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1607 flags
& ~opf_kill_def
);
1609 if (code
== COMPONENT_REF
)
1611 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
1612 s_ann
->has_volatile_ops
= true;
1613 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1615 else if (code
== ARRAY_REF
)
1617 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1618 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1619 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
);
1625 case WITH_SIZE_EXPR
:
1626 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1627 and an rvalue reference to its second argument. */
1628 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1629 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1633 get_call_expr_operands (stmt
, expr
);
1638 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
);
1639 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1640 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
);
1648 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
);
1650 op
= TREE_OPERAND (expr
, 0);
1651 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1652 op
= TREE_OPERAND (expr
, 0);
1653 if (TREE_CODE (op
) == ARRAY_RANGE_REF
1654 || TREE_CODE (op
) == REALPART_EXPR
1655 || TREE_CODE (op
) == IMAGPART_EXPR
)
1656 subflags
= opf_is_def
;
1658 subflags
= opf_is_def
| opf_kill_def
;
1660 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), subflags
);
1666 /* General aggregate CONSTRUCTORs have been decomposed, but they
1667 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1668 constructor_elt
*ce
;
1669 unsigned HOST_WIDE_INT idx
;
1672 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
1674 get_expr_operands (stmt
, &ce
->value
, opf_none
);
1679 case TRUTH_NOT_EXPR
:
1681 case VIEW_CONVERT_EXPR
:
1683 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1686 case TRUTH_AND_EXPR
:
1688 case TRUTH_XOR_EXPR
:
1694 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1695 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1700 case REALIGN_LOAD_EXPR
:
1702 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
1703 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
1704 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
1717 case OMP_RETURN_EXPR
:
1722 /* Expressions that make no memory references. */
1726 if (class == tcc_unary
)
1728 if (class == tcc_binary
|| class == tcc_comparison
)
1730 if (class == tcc_constant
|| class == tcc_type
)
1734 /* If we get here, something has gone wrong. */
1735 #ifdef ENABLE_CHECKING
1736 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1738 fputs ("\n", stderr
);
1744 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
1745 cache for STMT, if it existed before. When finished, the various build_*
1746 operand vectors will have potential operands. in them. */
1749 parse_ssa_operands (tree stmt
)
1751 enum tree_code code
;
1753 code
= TREE_CODE (stmt
);
1757 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
1758 either only part of LHS is modified or if the RHS might throw,
1759 otherwise, use V_MUST_DEF.
1761 ??? If it might throw, we should represent somehow that it is killed
1762 on the fallthrough path. */
1764 tree lhs
= TREE_OPERAND (stmt
, 0);
1765 int lhs_flags
= opf_is_def
;
1767 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
);
1769 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
1770 or not the entire LHS is modified; that depends on what's
1771 inside the VIEW_CONVERT_EXPR. */
1772 if (TREE_CODE (lhs
) == VIEW_CONVERT_EXPR
)
1773 lhs
= TREE_OPERAND (lhs
, 0);
1775 if (TREE_CODE (lhs
) != ARRAY_RANGE_REF
1776 && TREE_CODE (lhs
) != BIT_FIELD_REF
)
1777 lhs_flags
|= opf_kill_def
;
1779 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), lhs_flags
);
1784 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
);
1788 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
);
1792 get_asm_expr_operands (stmt
);
1796 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
);
1800 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
);
1804 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
);
1807 /* These nodes contain no variable references. */
1809 case CASE_LABEL_EXPR
:
1810 case TRY_CATCH_EXPR
:
1811 case TRY_FINALLY_EXPR
:
1812 case EH_FILTER_EXPR
:
1818 /* Notice that if get_expr_operands tries to use &STMT as the operand
1819 pointer (which may only happen for USE operands), we will fail in
1820 append_use. This default will handle statements like empty
1821 statements, or CALL_EXPRs that may appear on the RHS of a statement
1822 or as statements themselves. */
1823 get_expr_operands (stmt
, &stmt
, opf_none
);
1829 /* Create an operands cache for STMT. */
1832 build_ssa_operands (tree stmt
)
1834 stmt_ann_t ann
= get_stmt_ann (stmt
);
1836 /* Initially assume that the statement has no volatile operands. */
1838 ann
->has_volatile_ops
= false;
1840 start_ssa_stmt_operands ();
1842 parse_ssa_operands (stmt
);
1843 operand_build_sort_virtual (build_vuses
);
1844 operand_build_sort_virtual (build_v_may_defs
);
1845 operand_build_sort_virtual (build_v_must_defs
);
1847 finalize_ssa_stmt_operands (stmt
);
1851 /* Free any operands vectors in OPS. */
1853 free_ssa_operands (stmt_operands_p ops
)
1855 ops
->def_ops
= NULL
;
1856 ops
->use_ops
= NULL
;
1857 ops
->maydef_ops
= NULL
;
1858 ops
->mustdef_ops
= NULL
;
1859 ops
->vuse_ops
= NULL
;
1863 /* Get the operands of statement STMT. Note that repeated calls to
1864 get_stmt_operands for the same statement will do nothing until the
1865 statement is marked modified by a call to mark_stmt_modified(). */
1868 update_stmt_operands (tree stmt
)
1870 stmt_ann_t ann
= get_stmt_ann (stmt
);
1872 /* If get_stmt_operands is called before SSA is initialized, dont
1874 if (!ssa_operands_active ())
1877 /* The optimizers cannot handle statements that are nothing but a
1878 _DECL. This indicates a bug in the gimplifier. */
1879 gcc_assert (!SSA_VAR_P (stmt
));
1881 gcc_assert (ann
->modified
);
1883 timevar_push (TV_TREE_OPS
);
1885 build_ssa_operands (stmt
);
1887 /* Clear the modified bit for STMT. Subsequent calls to
1888 get_stmt_operands for this statement will do nothing until the
1889 statement is marked modified by a call to mark_stmt_modified(). */
1892 timevar_pop (TV_TREE_OPS
);
1896 /* Copies virtual operands from SRC to DST. */
1899 copy_virtual_operands (tree dest
, tree src
)
1902 ssa_op_iter iter
, old_iter
;
1903 use_operand_p use_p
, u2
;
1904 def_operand_p def_p
, d2
;
1906 build_ssa_operands (dest
);
1908 /* Copy all the virtual fields. */
1909 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VUSE
)
1911 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMAYDEF
)
1912 append_v_may_def (t
);
1913 FOR_EACH_SSA_TREE_OPERAND (t
, src
, iter
, SSA_OP_VMUSTDEF
)
1914 append_v_must_def (t
);
1916 if (VEC_length (tree
, build_vuses
) == 0
1917 && VEC_length (tree
, build_v_may_defs
) == 0
1918 && VEC_length (tree
, build_v_must_defs
) == 0)
1921 /* Now commit the virtual operands to this stmt. */
1922 finalize_ssa_v_must_defs (dest
);
1923 finalize_ssa_v_may_defs (dest
);
1924 finalize_ssa_vuses (dest
);
1926 /* Finally, set the field to the same values as then originals. */
1929 t
= op_iter_init_tree (&old_iter
, src
, SSA_OP_VUSE
);
1930 FOR_EACH_SSA_USE_OPERAND (use_p
, dest
, iter
, SSA_OP_VUSE
)
1932 gcc_assert (!op_iter_done (&old_iter
));
1934 t
= op_iter_next_tree (&old_iter
);
1936 gcc_assert (op_iter_done (&old_iter
));
1938 op_iter_init_maydef (&old_iter
, src
, &u2
, &d2
);
1939 FOR_EACH_SSA_MAYDEF_OPERAND (def_p
, use_p
, dest
, iter
)
1941 gcc_assert (!op_iter_done (&old_iter
));
1942 SET_USE (use_p
, USE_FROM_PTR (u2
));
1943 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
1944 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
1946 gcc_assert (op_iter_done (&old_iter
));
1948 op_iter_init_mustdef (&old_iter
, src
, &u2
, &d2
);
1949 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p
, use_p
, dest
, iter
)
1951 gcc_assert (!op_iter_done (&old_iter
));
1952 SET_USE (use_p
, USE_FROM_PTR (u2
));
1953 SET_DEF (def_p
, DEF_FROM_PTR (d2
));
1954 op_iter_next_maymustdef (&u2
, &d2
, &old_iter
);
1956 gcc_assert (op_iter_done (&old_iter
));
1961 /* Specifically for use in DOM's expression analysis. Given a store, we
1962 create an artificial stmt which looks like a load from the store, this can
1963 be used to eliminate redundant loads. OLD_OPS are the operands from the
1964 store stmt, and NEW_STMT is the new load which represents a load of the
1968 create_ssa_artficial_load_stmt (tree new_stmt
, tree old_stmt
)
1973 use_operand_p use_p
;
1976 ann
= get_stmt_ann (new_stmt
);
1978 /* process the stmt looking for operands. */
1979 start_ssa_stmt_operands ();
1980 parse_ssa_operands (new_stmt
);
1982 for (x
= 0; x
< VEC_length (tree
, build_vuses
); x
++)
1984 tree t
= VEC_index (tree
, build_vuses
, x
);
1985 if (TREE_CODE (t
) != SSA_NAME
)
1987 var_ann_t ann
= var_ann (t
);
1988 ann
->in_vuse_list
= 0;
1992 for (x
= 0; x
< VEC_length (tree
, build_v_may_defs
); x
++)
1994 tree t
= VEC_index (tree
, build_v_may_defs
, x
);
1995 if (TREE_CODE (t
) != SSA_NAME
)
1997 var_ann_t ann
= var_ann (t
);
1998 ann
->in_v_may_def_list
= 0;
2002 /* Remove any virtual operands that were found. */
2003 VEC_truncate (tree
, build_v_may_defs
, 0);
2004 VEC_truncate (tree
, build_v_must_defs
, 0);
2005 VEC_truncate (tree
, build_vuses
, 0);
2007 /* For each VDEF on the original statement, we want to create a
2008 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2010 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
,
2011 (SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF
))
2014 /* Now build the operands for this new stmt. */
2015 finalize_ssa_stmt_operands (new_stmt
);
2017 /* All uses in this fake stmt must not be in the immediate use lists. */
2018 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2019 delink_imm_use (use_p
);
2023 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2024 to test the validity of the swap operation. */
2027 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2033 /* If the operand cache is active, attempt to preserve the relative positions
2034 of these two operands in their respective immediate use lists. */
2035 if (ssa_operands_active () && op0
!= op1
)
2037 use_optype_p use0
, use1
, ptr
;
2040 /* Find the 2 operands in the cache, if they are there. */
2041 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2042 if (USE_OP_PTR (ptr
)->use
== exp0
)
2048 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2049 if (USE_OP_PTR (ptr
)->use
== exp1
)
2055 /* If both uses don't have operand entries, there isn't much we can do
2056 at this point. Presumably we dont need to worry about it. */
2059 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2060 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2061 USE_OP_PTR (use0
)->use
= tmp
;
2065 /* Now swap the data. */
2071 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2072 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2073 a single variable whose address has been taken or any other valid
2074 GIMPLE memory reference (structure reference, array, etc). If the
2075 base address of REF is a decl that has sub-variables, also add all
2076 of its sub-variables. */
2079 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2084 gcc_assert (addresses_taken
);
2086 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2087 as the only thing we take the address of. If VAR is a structure,
2088 taking the address of a field means that the whole structure may
2089 be referenced using pointer arithmetic. See PR 21407 and the
2090 ensuing mailing list discussion. */
2091 var
= get_base_address (ref
);
2092 if (var
&& SSA_VAR_P (var
))
2094 if (*addresses_taken
== NULL
)
2095 *addresses_taken
= BITMAP_GGC_ALLOC ();
2097 if (var_can_have_subvars (var
)
2098 && (svars
= get_subvars_for_var (var
)))
2101 for (sv
= svars
; sv
; sv
= sv
->next
)
2103 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2104 TREE_ADDRESSABLE (sv
->var
) = 1;
2109 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2110 TREE_ADDRESSABLE (var
) = 1;
2116 /* Scan the immediate_use list for VAR making sure its linked properly.
2117 return RTUE iof there is a problem. */
2120 verify_imm_links (FILE *f
, tree var
)
2122 use_operand_p ptr
, prev
, list
;
2125 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2127 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2128 gcc_assert (list
->use
== NULL
);
2130 if (list
->prev
== NULL
)
2132 gcc_assert (list
->next
== NULL
);
2138 for (ptr
= list
->next
; ptr
!= list
; )
2140 if (prev
!= ptr
->prev
)
2143 if (ptr
->use
== NULL
)
2144 goto error
; /* 2 roots, or SAFE guard node. */
2145 else if (*(ptr
->use
) != var
)
2151 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2153 if (count
++ > 50000000)
2157 /* Verify list in the other direction. */
2159 for (ptr
= list
->prev
; ptr
!= list
; )
2161 if (prev
!= ptr
->next
)
2175 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2177 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2178 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2180 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2182 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2188 /* Dump all the immediate uses to FILE. */
2191 dump_immediate_uses_for (FILE *file
, tree var
)
2193 imm_use_iterator iter
;
2194 use_operand_p use_p
;
2196 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2198 print_generic_expr (file
, var
, TDF_SLIM
);
2199 fprintf (file
, " : -->");
2200 if (has_zero_uses (var
))
2201 fprintf (file
, " no uses.\n");
2203 if (has_single_use (var
))
2204 fprintf (file
, " single use.\n");
2206 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2208 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2210 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2211 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
);
2213 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2215 fprintf(file
, "\n");
2219 /* Dump all the immediate uses to FILE. */
2222 dump_immediate_uses (FILE *file
)
2227 fprintf (file
, "Immediate_uses: \n\n");
2228 for (x
= 1; x
< num_ssa_names
; x
++)
2233 dump_immediate_uses_for (file
, var
);
2238 /* Dump def-use edges on stderr. */
2241 debug_immediate_uses (void)
2243 dump_immediate_uses (stderr
);
2246 /* Dump def-use edges on stderr. */
2249 debug_immediate_uses_for (tree var
)
2251 dump_immediate_uses_for (stderr
, var
);
2254 #include "gt-tree-ssa-operands.h"