1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
37 /* By default, operands are loaded. */
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
53 #define opf_no_vops (1 << 1)
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs
;
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses
;
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs
;
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses
;
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs
;
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt
;
74 typedef struct voperands_d
76 v_may_def_optype v_may_def_ops
;
78 v_must_def_optype v_must_def_ops
;
81 static void note_addressable (tree
, stmt_ann_t
);
82 static void get_expr_operands (tree
, tree
*, int, voperands_t
);
83 static void get_asm_expr_operands (tree
, voperands_t
);
84 static inline void append_def (tree
*, tree
);
85 static inline void append_use (tree
*, tree
);
86 static void append_v_may_def (tree
, tree
, voperands_t
);
87 static void append_v_must_def (tree
, tree
, voperands_t
);
88 static void add_call_clobber_ops (tree
, voperands_t
);
89 static void add_call_read_ops (tree
, voperands_t
);
90 static void add_stmt_operand (tree
*, tree
, int, voperands_t
);
92 /* Return a vector of contiguous memory of a specified size. */
94 static inline def_optype
95 allocate_def_optype (unsigned num
)
99 size
= sizeof (struct def_optype_d
) + sizeof (tree
*) * (num
- 1);
100 def_ops
= ggc_alloc (size
);
101 def_ops
->num_defs
= num
;
105 static inline use_optype
106 allocate_use_optype (unsigned num
)
110 size
= sizeof (struct use_optype_d
) + sizeof (tree
*) * (num
- 1);
111 use_ops
= ggc_alloc (size
);
112 use_ops
->num_uses
= num
;
116 static inline v_may_def_optype
117 allocate_v_may_def_optype (unsigned num
)
119 v_may_def_optype v_may_def_ops
;
121 size
= sizeof (struct v_may_def_optype_d
) + sizeof (tree
) * ((num
* 2) - 1);
122 v_may_def_ops
= ggc_alloc (size
);
123 v_may_def_ops
->num_v_may_defs
= num
;
124 return v_may_def_ops
;
127 static inline vuse_optype
128 allocate_vuse_optype (unsigned num
)
130 vuse_optype vuse_ops
;
132 size
= sizeof (struct vuse_optype_d
) + sizeof (tree
) * (num
- 1);
133 vuse_ops
= ggc_alloc (size
);
134 vuse_ops
->num_vuses
= num
;
138 static inline v_must_def_optype
139 allocate_v_must_def_optype (unsigned num
)
141 v_must_def_optype v_must_def_ops
;
143 size
= sizeof (struct v_must_def_optype_d
) + sizeof (tree
*) * (num
- 1);
144 v_must_def_ops
= ggc_alloc (size
);
145 v_must_def_ops
->num_v_must_defs
= num
;
146 return v_must_def_ops
;
150 free_uses (use_optype
*uses
, bool dealloc
)
161 free_defs (def_optype
*defs
, bool dealloc
)
172 free_vuses (vuse_optype
*vuses
, bool dealloc
)
183 free_v_may_defs (v_may_def_optype
*v_may_defs
, bool dealloc
)
188 ggc_free (*v_may_defs
);
194 free_v_must_defs (v_must_def_optype
*v_must_defs
, bool dealloc
)
199 ggc_free (*v_must_defs
);
205 remove_vuses (tree stmt
)
209 ann
= stmt_ann (stmt
);
211 free_vuses (&(ann
->vuse_ops
), true);
215 remove_v_may_defs (tree stmt
)
219 ann
= stmt_ann (stmt
);
221 free_v_may_defs (&(ann
->v_may_def_ops
), true);
225 remove_v_must_defs (tree stmt
)
229 ann
= stmt_ann (stmt
);
231 free_v_must_defs (&(ann
->v_must_def_ops
), true);
235 init_ssa_operands (void)
237 VARRAY_TREE_PTR_INIT (build_defs
, 5, "build defs");
238 VARRAY_TREE_PTR_INIT (build_uses
, 10, "build uses");
239 VARRAY_TREE_INIT (build_v_may_defs
, 10, "build v_may_defs");
240 VARRAY_TREE_INIT (build_vuses
, 10, "build vuses");
241 VARRAY_TREE_INIT (build_v_must_defs
, 10, "build v_must_defs");
245 fini_ssa_operands (void)
250 finalize_ssa_defs (tree stmt
)
256 num
= VARRAY_ACTIVE_SIZE (build_defs
);
260 #ifdef ENABLE_CHECKING
261 /* There should only be a single real definition per assignment. */
262 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
266 def_ops
= allocate_def_optype (num
);
267 for (x
= 0; x
< num
; x
++)
268 def_ops
->defs
[x
].def
= VARRAY_TREE_PTR (build_defs
, x
);
269 VARRAY_POP_ALL (build_defs
);
271 ann
= stmt_ann (stmt
);
272 ann
->def_ops
= def_ops
;
276 finalize_ssa_uses (tree stmt
)
282 num
= VARRAY_ACTIVE_SIZE (build_uses
);
286 #ifdef ENABLE_CHECKING
289 /* If the pointer to the operand is the statement itself, something is
290 wrong. It means that we are pointing to a local variable (the
291 initial call to get_stmt_operands does not pass a pointer to a
293 for (x
= 0; x
< num
; x
++)
294 if (*(VARRAY_TREE_PTR (build_uses
, x
)) == stmt
)
299 use_ops
= allocate_use_optype (num
);
300 for (x
= 0; x
< num
; x
++)
301 use_ops
->uses
[x
].use
= VARRAY_TREE_PTR (build_uses
, x
);
302 VARRAY_POP_ALL (build_uses
);
304 ann
= stmt_ann (stmt
);
305 ann
->use_ops
= use_ops
;
309 finalize_ssa_v_may_defs (tree stmt
)
312 v_may_def_optype v_may_def_ops
;
315 num
= VARRAY_ACTIVE_SIZE (build_v_may_defs
);
319 #ifdef ENABLE_CHECKING
320 /* V_MAY_DEFs must be entered in pairs of result/uses. */
325 v_may_def_ops
= allocate_v_may_def_optype (num
/ 2);
326 for (x
= 0; x
< num
; x
++)
327 v_may_def_ops
->v_may_defs
[x
] = VARRAY_TREE (build_v_may_defs
, x
);
328 VARRAY_CLEAR (build_v_may_defs
);
330 ann
= stmt_ann (stmt
);
331 ann
->v_may_def_ops
= v_may_def_ops
;
335 finalize_ssa_vuses (tree stmt
)
339 vuse_optype vuse_ops
;
340 v_may_def_optype v_may_defs
;
342 #ifdef ENABLE_CHECKING
343 if (VARRAY_ACTIVE_SIZE (build_v_may_defs
) > 0)
345 fprintf (stderr
, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
350 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
354 /* Remove superfluous VUSE operands. If the statement already has a
355 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
356 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
357 suppose that variable 'a' is aliased:
360 # a_3 = V_MAY_DEF <a_2>
363 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
366 ann
= stmt_ann (stmt
);
367 v_may_defs
= V_MAY_DEF_OPS (ann
);
368 if (NUM_V_MAY_DEFS (v_may_defs
) > 0)
371 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
374 for (j
= 0; j
< NUM_V_MAY_DEFS (v_may_defs
); j
++)
376 tree vuse_var
, v_may_def_var
;
377 tree vuse
= VARRAY_TREE (build_vuses
, i
);
378 tree v_may_def
= V_MAY_DEF_OP (v_may_defs
, j
);
380 if (TREE_CODE (vuse
) == SSA_NAME
)
381 vuse_var
= SSA_NAME_VAR (vuse
);
385 if (TREE_CODE (v_may_def
) == SSA_NAME
)
386 v_may_def_var
= SSA_NAME_VAR (v_may_def
);
388 v_may_def_var
= v_may_def
;
390 if (vuse_var
== v_may_def_var
)
397 /* If we found a useless VUSE operand, remove it from the
398 operand array by replacing it with the last active element
399 in the operand array (unless the useless VUSE was the
400 last operand, in which case we simply remove it. */
403 if (i
!= VARRAY_ACTIVE_SIZE (build_vuses
) - 1)
405 VARRAY_TREE (build_vuses
, i
)
406 = VARRAY_TREE (build_vuses
,
407 VARRAY_ACTIVE_SIZE (build_vuses
) - 1);
409 VARRAY_POP (build_vuses
);
411 /* We want to rescan the element at this index, unless
412 this was the last element, in which case the loop
419 num
= VARRAY_ACTIVE_SIZE (build_vuses
);
420 /* We could have reduced the size to zero now, however. */
424 vuse_ops
= allocate_vuse_optype (num
);
425 for (x
= 0; x
< num
; x
++)
426 vuse_ops
->vuses
[x
] = VARRAY_TREE (build_vuses
, x
);
427 VARRAY_CLEAR (build_vuses
);
428 ann
->vuse_ops
= vuse_ops
;
432 finalize_ssa_v_must_defs (tree stmt
)
436 v_must_def_optype v_must_def_ops
;
438 num
= VARRAY_ACTIVE_SIZE (build_v_must_defs
);
442 #ifdef ENABLE_CHECKING
443 /* There should only be a single V_MUST_DEF per assignment. */
444 if (TREE_CODE (stmt
) == MODIFY_EXPR
&& num
> 1)
448 v_must_def_ops
= allocate_v_must_def_optype (num
);
449 for (x
= 0; x
< num
; x
++)
450 v_must_def_ops
->v_must_defs
[x
] = VARRAY_TREE (build_v_must_defs
, x
);
451 VARRAY_POP_ALL (build_v_must_defs
);
453 ann
= stmt_ann (stmt
);
454 ann
->v_must_def_ops
= v_must_def_ops
;
458 finalize_ssa_stmt_operands (tree stmt
)
460 #ifdef ENABLE_CHECKING
461 if (check_build_stmt
== NULL
)
465 finalize_ssa_defs (stmt
);
466 finalize_ssa_uses (stmt
);
467 finalize_ssa_v_must_defs (stmt
);
468 finalize_ssa_v_may_defs (stmt
);
469 finalize_ssa_vuses (stmt
);
471 #ifdef ENABLE_CHECKING
472 check_build_stmt
= NULL
;
478 verify_start_operands (tree stmt ATTRIBUTE_UNUSED
)
480 #ifdef ENABLE_CHECKING
481 if (VARRAY_ACTIVE_SIZE (build_defs
) > 0
482 || VARRAY_ACTIVE_SIZE (build_uses
) > 0
483 || VARRAY_ACTIVE_SIZE (build_vuses
) > 0
484 || VARRAY_ACTIVE_SIZE (build_v_may_defs
) > 0
485 || VARRAY_ACTIVE_SIZE (build_v_must_defs
) > 0)
487 if (check_build_stmt
!= NULL
)
489 check_build_stmt
= stmt
;
494 /* Add DEF_P to the list of pointers to operands defined by STMT. */
497 append_def (tree
*def_p
, tree stmt ATTRIBUTE_UNUSED
)
499 #ifdef ENABLE_CHECKING
500 if (check_build_stmt
!= stmt
)
503 VARRAY_PUSH_TREE_PTR (build_defs
, def_p
);
507 /* Add USE_P to the list of pointers to operands used by STMT. */
510 append_use (tree
*use_p
, tree stmt ATTRIBUTE_UNUSED
)
512 #ifdef ENABLE_CHECKING
513 if (check_build_stmt
!= stmt
)
516 VARRAY_PUSH_TREE_PTR (build_uses
, use_p
);
520 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
521 is not NULL, the existing entries are preserved and no new entries are
522 added here. This is done to preserve the SSA numbering of virtual
526 append_v_may_def (tree var
, tree stmt
, voperands_t prev_vops
)
532 #ifdef ENABLE_CHECKING
533 if (check_build_stmt
!= stmt
)
537 ann
= stmt_ann (stmt
);
539 /* Don't allow duplicate entries. */
541 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_may_defs
); i
+= 2)
543 tree result
= VARRAY_TREE (build_v_may_defs
, i
);
545 || (TREE_CODE (result
) == SSA_NAME
546 && var
== SSA_NAME_VAR (result
)))
550 /* If the statement already had virtual definitions, see if any of the
551 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
552 V_MAY_DEF for VAR. */
556 for (i
= 0; i
< NUM_V_MAY_DEFS (prev_vops
->v_may_def_ops
); i
++)
558 result
= V_MAY_DEF_RESULT (prev_vops
->v_may_def_ops
, i
);
560 || (TREE_CODE (result
) == SSA_NAME
561 && SSA_NAME_VAR (result
) == var
))
563 source
= V_MAY_DEF_OP (prev_vops
->v_may_def_ops
, i
);
568 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
569 if (source
== NULL_TREE
)
575 VARRAY_PUSH_TREE (build_v_may_defs
, result
);
576 VARRAY_PUSH_TREE (build_v_may_defs
, source
);
580 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
581 is not NULL, the existing entries are preserved and no new entries are
582 added here. This is done to preserve the SSA numbering of virtual
586 append_vuse (tree var
, tree stmt
, voperands_t prev_vops
)
593 #ifdef ENABLE_CHECKING
594 if (check_build_stmt
!= stmt
)
598 ann
= stmt_ann (stmt
);
600 /* Don't allow duplicate entries. */
601 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_vuses
); i
++)
603 tree vuse_var
= VARRAY_TREE (build_vuses
, i
);
605 || (TREE_CODE (vuse_var
) == SSA_NAME
606 && var
== SSA_NAME_VAR (vuse_var
)))
610 /* If the statement already had virtual uses, see if any of the
611 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
616 for (i
= 0; i
< NUM_VUSES (prev_vops
->vuse_ops
); i
++)
618 vuse
= VUSE_OP (prev_vops
->vuse_ops
, i
);
620 || (TREE_CODE (vuse
) == SSA_NAME
621 && SSA_NAME_VAR (vuse
) == var
))
628 /* If VAR existed already in PREV_VOPS, re-use it. */
632 VARRAY_PUSH_TREE (build_vuses
, var
);
635 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
636 is not NULL, the existing entries are preserved and no new entries are
637 added here. This is done to preserve the SSA numbering of virtual
641 append_v_must_def (tree var
, tree stmt
, voperands_t prev_vops
)
648 #ifdef ENABLE_CHECKING
649 if (check_build_stmt
!= stmt
)
653 ann
= stmt_ann (stmt
);
655 /* Don't allow duplicate entries. */
656 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (build_v_must_defs
); i
++)
658 tree v_must_def_var
= VARRAY_TREE (build_v_must_defs
, i
);
659 if (var
== v_must_def_var
660 || (TREE_CODE (v_must_def_var
) == SSA_NAME
661 && var
== SSA_NAME_VAR (v_must_def_var
)))
665 /* If the statement already had virtual must defs, see if any of the
666 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
667 V_MUST_DEF for VAR. */
669 v_must_def
= NULL_TREE
;
671 for (i
= 0; i
< NUM_V_MUST_DEFS (prev_vops
->v_must_def_ops
); i
++)
673 v_must_def
= V_MUST_DEF_OP (prev_vops
->v_must_def_ops
, i
);
674 if (v_must_def
== var
675 || (TREE_CODE (v_must_def
) == SSA_NAME
676 && SSA_NAME_VAR (v_must_def
) == var
))
683 /* If VAR existed already in PREV_VOPS, re-use it. */
687 VARRAY_PUSH_TREE (build_v_must_defs
, var
);
691 /* External entry point which by-passes the previous vops mechanism. */
693 add_vuse (tree var
, tree stmt
)
695 append_vuse (var
, stmt
, NULL
);
699 /* Get the operands of statement STMT. Note that repeated calls to
700 get_stmt_operands for the same statement will do nothing until the
701 statement is marked modified by a call to modify_stmt(). */
704 get_stmt_operands (tree stmt
)
708 struct voperands_d prev_vops
;
710 #if defined ENABLE_CHECKING
711 /* The optimizers cannot handle statements that are nothing but a
712 _DECL. This indicates a bug in the gimplifier. */
713 if (SSA_VAR_P (stmt
))
717 /* Ignore error statements. */
718 if (TREE_CODE (stmt
) == ERROR_MARK
)
721 ann
= get_stmt_ann (stmt
);
723 /* If the statement has not been modified, the operands are still valid. */
727 timevar_push (TV_TREE_OPS
);
729 /* Initially assume that the statement has no volatile operands, nor
730 makes aliased loads or stores. */
731 ann
->has_volatile_ops
= false;
732 ann
->makes_aliased_stores
= false;
733 ann
->makes_aliased_loads
= false;
735 /* Remove any existing operands as they will be scanned again. */
736 free_defs (&(ann
->def_ops
), true);
737 free_uses (&(ann
->use_ops
), true);
739 /* Before removing existing virtual operands, save them in PREV_VOPS so
740 that we can re-use their SSA versions. */
741 prev_vops
.v_may_def_ops
= V_MAY_DEF_OPS (ann
);
742 prev_vops
.vuse_ops
= VUSE_OPS (ann
);
743 prev_vops
.v_must_def_ops
= V_MUST_DEF_OPS (ann
);
745 /* Don't free the previous values to memory since we're still using them. */
746 free_v_may_defs (&(ann
->v_may_def_ops
), false);
747 free_vuses (&(ann
->vuse_ops
), false);
748 free_v_must_defs (&(ann
->v_must_def_ops
), false);
750 start_ssa_stmt_operands (stmt
);
752 code
= TREE_CODE (stmt
);
756 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 1), opf_none
, &prev_vops
);
757 if (TREE_CODE (TREE_OPERAND (stmt
, 0)) == ARRAY_REF
758 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == COMPONENT_REF
759 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == REALPART_EXPR
760 || TREE_CODE (TREE_OPERAND (stmt
, 0)) == IMAGPART_EXPR
761 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
762 modified in that case. FIXME we should represent somehow
763 that it is killed on the fallthrough path. */
764 || tree_could_throw_p (TREE_OPERAND (stmt
, 1)))
765 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_is_def
,
768 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0),
769 opf_is_def
| opf_kill_def
, &prev_vops
);
773 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_none
, &prev_vops
);
777 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_none
, &prev_vops
);
781 get_asm_expr_operands (stmt
, &prev_vops
);
785 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_none
, &prev_vops
);
789 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_none
, &prev_vops
);
793 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_none
, &prev_vops
);
796 /* These nodes contain no variable references. */
798 case CASE_LABEL_EXPR
:
800 case TRY_FINALLY_EXPR
:
807 /* Notice that if get_expr_operands tries to use &STMT as the operand
808 pointer (which may only happen for USE operands), we will abort in
809 append_use. This default will handle statements like empty statements,
810 CALL_EXPRs or VA_ARG_EXPRs that may appear on the RHS of a statement
811 or as statements themselves. */
812 get_expr_operands (stmt
, &stmt
, opf_none
, &prev_vops
);
816 finalize_ssa_stmt_operands (stmt
);
818 /* Now free the previous virtual ops to memory. */
819 free_v_may_defs (&(prev_vops
.v_may_def_ops
), true);
820 free_vuses (&(prev_vops
.vuse_ops
), true);
821 free_v_must_defs (&(prev_vops
.v_must_def_ops
), true);
823 /* Clear the modified bit for STMT. Subsequent calls to
824 get_stmt_operands for this statement will do nothing until the
825 statement is marked modified by a call to modify_stmt(). */
828 timevar_pop (TV_TREE_OPS
);
832 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
833 FLAGS is one of the OPF_* constants modifying how to interpret the
834 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
837 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
, voperands_t prev_vops
)
843 if (expr
== NULL
|| expr
== error_mark_node
)
846 code
= TREE_CODE (expr
);
847 class = TREE_CODE_CLASS (code
);
849 /* We could have the address of a component, array member, etc which
850 has interesting variable references. */
851 if (code
== ADDR_EXPR
)
853 /* Taking the address of a variable does not represent a
854 reference to it, but the fact that STMT takes its address will be
855 of interest to some passes (e.g. alias resolution). */
856 add_stmt_operand (expr_p
, stmt
, 0, NULL
);
858 /* If the address is constant (invariant is not sufficient), there will
859 be no interesting variable references inside. */
860 if (TREE_CONSTANT (expr
))
863 /* There should be no VUSEs created, since the referenced objects are
864 not really accessed. The only operands that we should find here
865 are ARRAY_REF indices which will always be real operands (GIMPLE
866 does not allow non-registers as array indices). */
867 flags
|= opf_no_vops
;
869 /* Avoid recursion. */
870 expr_p
= &TREE_OPERAND (expr
, 0);
872 code
= TREE_CODE (expr
);
873 class = TREE_CODE_CLASS (code
);
876 /* Expressions that make no memory references. */
880 || code
== FUNCTION_DECL
881 || code
== EXC_PTR_EXPR
882 || code
== FILTER_EXPR
883 || code
== LABEL_DECL
)
886 /* If we found a variable, add it to DEFS or USES depending on the
888 if (SSA_VAR_P (expr
))
890 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
894 /* Pointer dereferences always represent a use of the base pointer. */
895 if (code
== INDIRECT_REF
)
897 tree
*pptr
= &TREE_OPERAND (expr
, 0);
902 if (!aliases_computed_p
)
904 /* If the pointer does not have a memory tag and aliases have not
905 been computed yet, mark the statement as having volatile
906 operands to prevent DOM from entering it in equivalence tables
907 and DCE from killing it. */
908 stmt_ann (stmt
)->has_volatile_ops
= true;
912 struct ptr_info_def
*pi
= NULL
;
914 /* If we have computed aliasing already, check if PTR has
915 flow-sensitive points-to information. */
916 if (TREE_CODE (ptr
) == SSA_NAME
917 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
920 /* PTR has its own memory tag. Use it. */
921 add_stmt_operand (&pi
->name_mem_tag
, stmt
, flags
,
926 /* If PTR is not an SSA_NAME or it doesn't have a name
927 tag, use its type memory tag. */
930 /* If we are emitting debugging dumps, display a warning if
931 PTR is an SSA_NAME with no flow-sensitive alias
932 information. That means that we may need to compute
935 && TREE_CODE (ptr
) == SSA_NAME
939 "NOTE: no flow-sensitive alias info for ");
940 print_generic_expr (dump_file
, ptr
, dump_flags
);
941 fprintf (dump_file
, " in ");
942 print_generic_stmt (dump_file
, stmt
, dump_flags
);
945 if (TREE_CODE (ptr
) == SSA_NAME
)
946 ptr
= SSA_NAME_VAR (ptr
);
948 add_stmt_operand (&ann
->type_mem_tag
, stmt
, flags
, prev_vops
);
953 /* If a constant is used as a pointer, we can't generate a real
954 operand for it but we mark the statement volatile to prevent
955 optimizations from messing things up. */
956 else if (TREE_CODE (ptr
) == INTEGER_CST
)
958 stmt_ann (stmt
)->has_volatile_ops
= true;
962 /* Everything else *should* have been folded elsewhere, but users
963 are smarter than we in finding ways to write invalid code. We
964 cannot just abort here. If we were absolutely certain that we
965 do handle all valid cases, then we could just do nothing here.
966 That seems optimistic, so attempt to do something logical... */
967 else if ((TREE_CODE (ptr
) == PLUS_EXPR
|| TREE_CODE (ptr
) == MINUS_EXPR
)
968 && TREE_CODE (TREE_OPERAND (ptr
, 0)) == ADDR_EXPR
969 && TREE_CODE (TREE_OPERAND (ptr
, 1)) == INTEGER_CST
)
971 /* Make sure we know the object is addressable. */
972 pptr
= &TREE_OPERAND (ptr
, 0);
973 add_stmt_operand (pptr
, stmt
, 0, NULL
);
975 /* Mark the object itself with a VUSE. */
976 pptr
= &TREE_OPERAND (*pptr
, 0);
977 get_expr_operands (stmt
, pptr
, flags
, prev_vops
);
981 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
985 /* Add a USE operand for the base pointer. */
986 get_expr_operands (stmt
, pptr
, opf_none
, prev_vops
);
990 /* Treat array references as references to the virtual variable
991 representing the array. The virtual variable for an ARRAY_REF
992 is the VAR_DECL for the array. */
993 if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
995 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
996 according to the value of IS_DEF. Recurse if the LHS of the
997 ARRAY_REF node is not a regular variable. */
998 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
999 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
1001 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
1003 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
, prev_vops
);
1004 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
1005 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_none
, prev_vops
);
1009 /* Similarly to arrays, references to compound variables (complex types
1010 and structures/unions) are globbed.
1012 FIXME: This means that
1018 will not be constant propagated because the two partial
1019 definitions to 'a' will kill each other. Note that SRA may be
1020 able to fix this problem if 'a' can be scalarized. */
1021 if (code
== IMAGPART_EXPR
|| code
== REALPART_EXPR
|| code
== COMPONENT_REF
)
1023 /* If the LHS of the compound reference is not a regular variable,
1024 recurse to keep looking for more operands in the subexpression. */
1025 if (SSA_VAR_P (TREE_OPERAND (expr
, 0)))
1026 add_stmt_operand (expr_p
, stmt
, flags
, prev_vops
);
1028 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
1030 if (code
== COMPONENT_REF
)
1031 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
1035 /* Function calls. Add every argument to USES. If the callee is
1036 neither pure nor const, create a VDEF reference for GLOBAL_VAR
1037 (See find_vars_r). */
1038 if (code
== CALL_EXPR
)
1041 int call_flags
= call_expr_flags (expr
);
1043 /* Find uses in the called function. */
1044 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_none
, prev_vops
);
1046 for (op
= TREE_OPERAND (expr
, 1); op
; op
= TREE_CHAIN (op
))
1047 get_expr_operands (stmt
, &TREE_VALUE (op
), opf_none
, prev_vops
);
1049 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_none
, prev_vops
);
1051 if (bitmap_first_set_bit (call_clobbered_vars
) >= 0)
1053 /* A 'pure' or a 'const' functions never call clobber anything.
1054 A 'noreturn' function might, but since we don't return anyway
1055 there is no point in recording that. */
1057 & (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1058 add_call_clobber_ops (stmt
, prev_vops
);
1059 else if (!(call_flags
& (ECF_CONST
| ECF_NORETURN
)))
1060 add_call_read_ops (stmt
, prev_vops
);
1062 else if (!aliases_computed_p
)
1063 stmt_ann (stmt
)->has_volatile_ops
= true;
1069 if (code
== TREE_LIST
)
1073 for (op
= expr
; op
; op
= TREE_CHAIN (op
))
1074 get_expr_operands (stmt
, &TREE_VALUE (op
), flags
, prev_vops
);
1080 if (code
== MODIFY_EXPR
)
1082 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_none
, prev_vops
);
1083 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ARRAY_REF
1084 || TREE_CODE (TREE_OPERAND (expr
, 0)) == COMPONENT_REF
1085 || TREE_CODE (TREE_OPERAND (expr
, 0)) == REALPART_EXPR
1086 || TREE_CODE (TREE_OPERAND (expr
, 0)) == IMAGPART_EXPR
)
1087 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_is_def
,
1090 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0),
1091 opf_is_def
| opf_kill_def
, prev_vops
);
1096 /* Mark VA_ARG_EXPR nodes as making volatile references. FIXME,
1097 this is needed because we currently do not gimplify VA_ARG_EXPR
1099 if (code
== VA_ARG_EXPR
)
1101 stmt_ann (stmt
)->has_volatile_ops
= true;
1105 /* Unary expressions. */
1107 || code
== TRUTH_NOT_EXPR
1108 || code
== BIT_FIELD_REF
1109 || code
== CONSTRUCTOR
)
1111 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
1115 /* Binary expressions. */
1118 || code
== TRUTH_AND_EXPR
1119 || code
== TRUTH_OR_EXPR
1120 || code
== TRUTH_XOR_EXPR
1121 || code
== COMPOUND_EXPR
1122 || code
== OBJ_TYPE_REF
)
1124 tree op0
= TREE_OPERAND (expr
, 0);
1125 tree op1
= TREE_OPERAND (expr
, 1);
1127 /* If it would be profitable to swap the operands, then do so to
1128 canonicalize the statement, enabling better optimization.
1130 By placing canonicalization of such expressions here we
1131 transparently keep statements in canonical form, even
1132 when the statement is modified. */
1133 if (tree_swap_operands_p (op0
, op1
, false))
1135 /* For relationals we need to swap the operands and change
1142 TREE_SET_CODE (expr
, swap_tree_comparison (code
));
1143 TREE_OPERAND (expr
, 0) = op1
;
1144 TREE_OPERAND (expr
, 1) = op0
;
1147 /* For a commutative operator we can just swap the operands. */
1148 if (commutative_tree_code (code
))
1150 TREE_OPERAND (expr
, 0) = op1
;
1151 TREE_OPERAND (expr
, 1) = op0
;
1155 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
, prev_vops
);
1156 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
, prev_vops
);
1160 /* If we get here, something has gone wrong. */
1161 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
1163 fputs ("\n", stderr
);
1168 /* Scan operands in ASM_EXPR STMT. PREV_VOPS is as in append_v_may_def and
1172 get_asm_expr_operands (tree stmt
, voperands_t prev_vops
)
1174 int noutputs
= list_length (ASM_OUTPUTS (stmt
));
1175 const char **oconstraints
1176 = (const char **) alloca ((noutputs
) * sizeof (const char *));
1179 const char *constraint
;
1180 bool allows_mem
, allows_reg
, is_inout
;
1181 stmt_ann_t s_ann
= stmt_ann (stmt
);
1183 for (i
=0, link
= ASM_OUTPUTS (stmt
); link
; ++i
, link
= TREE_CHAIN (link
))
1185 oconstraints
[i
] = constraint
1186 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1187 parse_output_constraint (&constraint
, i
, 0, 0,
1188 &allows_mem
, &allows_reg
, &is_inout
);
1190 #if defined ENABLE_CHECKING
1191 /* This should have been split in gimplify_asm_expr. */
1192 if (allows_reg
&& is_inout
)
1196 /* Memory operands are addressable. Note that STMT needs the
1197 address of this operand. */
1198 if (!allows_reg
&& allows_mem
)
1200 tree t
= get_base_address (TREE_VALUE (link
));
1201 if (t
&& DECL_P (t
))
1202 note_addressable (t
, s_ann
);
1205 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_is_def
, prev_vops
);
1208 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1211 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1212 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
1213 oconstraints
, &allows_mem
, &allows_reg
);
1215 /* Memory operands are addressable. Note that STMT needs the
1216 address of this operand. */
1217 if (!allows_reg
&& allows_mem
)
1219 tree t
= get_base_address (TREE_VALUE (link
));
1220 if (t
&& DECL_P (t
))
1221 note_addressable (t
, s_ann
);
1224 get_expr_operands (stmt
, &TREE_VALUE (link
), 0, prev_vops
);
1228 /* Clobber memory for asm ("" : : : "memory"); */
1229 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1230 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1234 /* If we still have not computed aliasing information, we
1235 won't know what variables are call-clobbered and/or
1236 addressable. Just mark the statement as having volatile
1237 operands for now. */
1238 if (!aliases_computed_p
)
1240 stmt_ann (stmt
)->has_volatile_ops
= true;
1244 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1245 decided to group them). */
1247 add_stmt_operand (&global_var
, stmt
, opf_is_def
, prev_vops
);
1249 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1251 tree var
= referenced_var (i
);
1252 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1255 /* Now clobber all addressables. */
1256 EXECUTE_IF_SET_IN_BITMAP (addressable_vars
, 0, i
,
1258 tree var
= referenced_var (i
);
1259 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1267 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1268 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1269 the statement's real operands, otherwise it is added to virtual
1272 PREV_VOPS is used when adding virtual operands to statements that
1273 already had them (See append_v_may_def and append_vuse). */
1276 add_stmt_operand (tree
*var_p
, tree stmt
, int flags
, voperands_t prev_vops
)
1286 s_ann
= stmt_ann (stmt
);
1288 /* If the operand is an ADDR_EXPR, add its operand to the list of
1289 variables that have had their address taken in this statement. */
1290 if (TREE_CODE (var
) == ADDR_EXPR
)
1292 note_addressable (TREE_OPERAND (var
, 0), s_ann
);
1296 /* If the original variable is not a scalar, it will be added to the list
1297 of virtual operands. In that case, use its base symbol as the virtual
1298 variable representing it. */
1299 is_real_op
= is_gimple_reg (var
);
1300 if (!is_real_op
&& !DECL_P (var
))
1301 var
= get_virtual_var (var
);
1303 /* If VAR is not a variable that we care to optimize, do nothing. */
1304 if (var
== NULL_TREE
|| !SSA_VAR_P (var
))
1307 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1308 v_ann
= var_ann (sym
);
1310 /* FIXME: We currently refuse to optimize variables that have hidden uses
1311 (variables used in VLA declarations, MD builtin calls and variables
1312 from the parent function in nested functions). This is because not
1313 all uses of these variables are exposed in the IL or the statements
1314 that reference them are not in GIMPLE form. If that's the case, mark
1315 the statement as having volatile operands and return. */
1316 if (v_ann
->has_hidden_use
)
1318 s_ann
->has_volatile_ops
= true;
1322 /* Don't expose volatile variables to the optimizers. */
1323 if (TREE_THIS_VOLATILE (sym
))
1325 s_ann
->has_volatile_ops
= true;
1331 /* The variable is a GIMPLE register. Add it to real operands. */
1332 if (flags
& opf_is_def
)
1333 append_def (var_p
, stmt
);
1335 append_use (var_p
, stmt
);
1339 varray_type aliases
;
1341 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1342 virtual operands, unless the caller has specifically requested
1343 not to add virtual operands (used when adding operands inside an
1344 ADDR_EXPR expression). */
1345 if (flags
& opf_no_vops
)
1348 aliases
= v_ann
->may_aliases
;
1350 /* If alias information hasn't been computed yet, then
1351 addressable variables will not be an alias tag nor will they
1352 have aliases. In this case, mark the statement as having
1353 volatile operands. */
1354 if (!aliases_computed_p
&& may_be_aliased (var
))
1355 s_ann
->has_volatile_ops
= true;
1357 if (aliases
== NULL
)
1359 /* The variable is not aliased or it is an alias tag. */
1360 if (flags
& opf_is_def
)
1362 if (v_ann
->is_alias_tag
)
1364 /* Alias tagged vars get regular V_MAY_DEF */
1365 s_ann
->makes_aliased_stores
= 1;
1366 append_v_may_def (var
, stmt
, prev_vops
);
1368 else if ((flags
& opf_kill_def
)
1369 && v_ann
->mem_tag_kind
== NOT_A_TAG
)
1370 /* V_MUST_DEF for non-aliased non-GIMPLE register
1371 variable definitions. Avoid memory tags. */
1372 append_v_must_def (var
, stmt
, prev_vops
);
1374 /* Call-clobbered variables & memory tags get
1376 append_v_may_def (var
, stmt
, prev_vops
);
1380 append_vuse (var
, stmt
, prev_vops
);
1381 if (v_ann
->is_alias_tag
)
1382 s_ann
->makes_aliased_loads
= 1;
1389 /* The variable is aliased. Add its aliases to the virtual
1391 if (VARRAY_ACTIVE_SIZE (aliases
) == 0)
1394 if (flags
& opf_is_def
)
1396 /* If the variable is also an alias tag, add a virtual
1397 operand for it, otherwise we will miss representing
1398 references to the members of the variable's alias set.
1399 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1400 if (v_ann
->is_alias_tag
)
1401 append_v_may_def (var
, stmt
, prev_vops
);
1403 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1404 append_v_may_def (VARRAY_TREE (aliases
, i
), stmt
, prev_vops
);
1406 s_ann
->makes_aliased_stores
= 1;
1410 if (v_ann
->is_alias_tag
)
1411 append_vuse (var
, stmt
, prev_vops
);
1413 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (aliases
); i
++)
1414 append_vuse (VARRAY_TREE (aliases
, i
), stmt
, prev_vops
);
1416 s_ann
->makes_aliased_loads
= 1;
1422 /* Record that VAR had its address taken in the statement with annotations
1426 note_addressable (tree var
, stmt_ann_t s_ann
)
1428 var
= get_base_address (var
);
1429 if (var
&& SSA_VAR_P (var
))
1431 if (s_ann
->addresses_taken
== NULL
)
1432 s_ann
->addresses_taken
= BITMAP_GGC_ALLOC ();
1433 bitmap_set_bit (s_ann
->addresses_taken
, var_ann (var
)->uid
);
1438 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1439 clobbered variables in the function. */
1442 add_call_clobber_ops (tree stmt
, voperands_t prev_vops
)
1444 /* Functions that are not const, pure or never return may clobber
1445 call-clobbered variables. */
1446 stmt_ann (stmt
)->makes_clobbering_call
= true;
1448 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1449 a V_MAY_DEF operand for every call clobbered variable. See
1450 compute_may_aliases for the heuristic used to decide whether
1451 to create .GLOBAL_VAR or not. */
1453 add_stmt_operand (&global_var
, stmt
, opf_is_def
, prev_vops
);
1458 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1460 tree var
= referenced_var (i
);
1462 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1464 if (!TREE_READONLY (var
))
1465 add_stmt_operand (&var
, stmt
, opf_is_def
, prev_vops
);
1467 add_stmt_operand (&var
, stmt
, opf_none
, prev_vops
);
1473 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1477 add_call_read_ops (tree stmt
, voperands_t prev_vops
)
1479 /* Otherwise, if the function is not pure, it may reference memory. Add
1480 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1481 for each call-clobbered variable. See add_referenced_var for the
1482 heuristic used to decide whether to create .GLOBAL_VAR. */
1484 add_stmt_operand (&global_var
, stmt
, opf_none
, prev_vops
);
1489 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars
, 0, i
,
1491 tree var
= referenced_var (i
);
1492 add_stmt_operand (&var
, stmt
, opf_none
, prev_vops
);
1497 /* Copies virtual operands from SRC to DST. */
1500 copy_virtual_operands (tree dst
, tree src
)
1502 vuse_optype vuses
= STMT_VUSE_OPS (src
);
1503 v_may_def_optype v_may_defs
= STMT_V_MAY_DEF_OPS (src
);
1504 v_must_def_optype v_must_defs
= STMT_V_MUST_DEF_OPS (src
);
1505 vuse_optype
*vuses_new
= &stmt_ann (dst
)->vuse_ops
;
1506 v_may_def_optype
*v_may_defs_new
= &stmt_ann (dst
)->v_may_def_ops
;
1507 v_must_def_optype
*v_must_defs_new
= &stmt_ann (dst
)->v_must_def_ops
;
1512 *vuses_new
= allocate_vuse_optype (NUM_VUSES (vuses
));
1513 for (i
= 0; i
< NUM_VUSES (vuses
); i
++)
1514 SET_VUSE_OP (*vuses_new
, i
, VUSE_OP (vuses
, i
));
1519 *v_may_defs_new
= allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs
));
1520 for (i
= 0; i
< NUM_V_MAY_DEFS (v_may_defs
); i
++)
1522 SET_V_MAY_DEF_OP (*v_may_defs_new
, i
, V_MAY_DEF_OP (v_may_defs
, i
));
1523 SET_V_MAY_DEF_RESULT (*v_may_defs_new
, i
,
1524 V_MAY_DEF_RESULT (v_may_defs
, i
));
1530 *v_must_defs_new
= allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs
));
1531 for (i
= 0; i
< NUM_V_MUST_DEFS (v_must_defs
); i
++)
1532 SET_V_MUST_DEF_OP (*v_must_defs_new
, i
, V_MUST_DEF_OP (v_must_defs
, i
));
1536 #include "gt-tree-ssa-operands.h"