1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
50 The routines in this file are concerned with creating this operand cache
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars
;
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided
;
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided
;
95 /* Number of write-clobbers avoided because the variable can't escape to
97 unsigned int unescapable_clobbers_avoided
;
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers
;
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided
;
108 /* Flags to describe operand properties in helpers. */
110 /* By default, operands are loaded. */
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
123 #define opf_no_vops (1 << 1)
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
130 /* Array for building all the def operands. */
131 static VEC(tree
,heap
) *build_defs
;
133 /* Array for building all the use operands. */
134 static VEC(tree
,heap
) *build_uses
;
136 /* Set for building all the VDEF operands. */
137 static VEC(tree
,heap
) *build_vdefs
;
139 /* Set for building all the VUSE operands. */
140 static VEC(tree
,heap
) *build_vuses
;
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack
;
146 /* Set for building all the loaded symbols. */
147 static bitmap build_loads
;
149 /* Set for building all the stored symbols. */
150 static bitmap build_stores
;
152 static void get_expr_operands (tree
, tree
*, int);
154 /* Number of functions with initialized ssa_operands. */
155 static int n_initialized
= 0;
157 /* Statement change buffer. Data structure used to record state
158 information for statements. This is used to determine what needs
159 to be done in order to update the SSA web after a statement is
160 modified by a pass. If STMT is a statement that has just been
161 created, or needs to be folded via fold_stmt, or anything that
162 changes its physical structure then the pass should:
164 1- Call push_stmt_changes (&stmt) to record the current state of
165 STMT before any modifications are made.
167 2- Make all appropriate modifications to the statement.
169 3- Call pop_stmt_changes (&stmt) to find new symbols that
170 need to be put in SSA form, SSA name mappings for names that
171 have disappeared, recompute invariantness for address
172 expressions, cleanup EH information, etc.
174 If it is possible to determine that the statement was not modified,
175 instead of calling pop_stmt_changes it is quicker to call
176 discard_stmt_changes to avoid the expensive and unnecessary operand
177 re-scan and change comparison. */
181 /* Pointer to the statement being modified. */
184 /* If the statement references memory these are the sets of symbols
185 loaded and stored by the statement. */
190 typedef struct scb_d
*scb_t
;
192 DEF_VEC_ALLOC_P(scb_t
,heap
);
194 /* Stack of statement change buffers (SCB). Every call to
195 push_stmt_changes pushes a new buffer onto the stack. Calls to
196 pop_stmt_changes pop a buffer off of the stack and compute the set
197 of changes for the popped statement. */
198 static VEC(scb_t
,heap
) *scb_stack
;
200 /* Return the DECL_UID of the base variable of T. */
202 static inline unsigned
203 get_name_decl (tree t
)
205 if (TREE_CODE (t
) != SSA_NAME
)
208 return DECL_UID (SSA_NAME_VAR (t
));
212 /* Comparison function for qsort used in operand_build_sort_virtual. */
215 operand_build_cmp (const void *p
, const void *q
)
217 tree e1
= *((const tree
*)p
);
218 tree e2
= *((const tree
*)q
);
221 u1
= get_name_decl (e1
);
222 u2
= get_name_decl (e2
);
224 /* We want to sort in ascending order. They can never be equal. */
225 #ifdef ENABLE_CHECKING
226 gcc_assert (u1
!= u2
);
228 return (u1
> u2
? 1 : -1);
232 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
235 operand_build_sort_virtual (VEC(tree
,heap
) *list
)
237 int num
= VEC_length (tree
, list
);
244 if (get_name_decl (VEC_index (tree
, list
, 0))
245 > get_name_decl (VEC_index (tree
, list
, 1)))
247 /* Swap elements if in the wrong order. */
248 tree tmp
= VEC_index (tree
, list
, 0);
249 VEC_replace (tree
, list
, 0, VEC_index (tree
, list
, 1));
250 VEC_replace (tree
, list
, 1, tmp
);
255 /* There are 3 or more elements, call qsort. */
256 qsort (VEC_address (tree
, list
),
257 VEC_length (tree
, list
),
263 /* Return true if the SSA operands cache is active. */
266 ssa_operands_active (void)
268 return cfun
->gimple_df
&& gimple_ssa_operands (cfun
)->ops_active
;
272 /* VOPs are of variable sized, so the free list maps "free buckets" to the
285 Any VOPs larger than this are simply added to the largest bucket when they
289 /* Return the number of operands used in bucket BUCKET. */
292 vop_free_bucket_size (int bucket
)
294 #ifdef ENABLE_CHECKING
295 gcc_assert (bucket
>= 0 && bucket
< NUM_VOP_FREE_BUCKETS
);
299 return (bucket
- 13) * 8;
303 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
304 beyond the end of the bucket table, return -1. */
307 vop_free_bucket_index (int num
)
309 gcc_assert (num
> 0 && NUM_VOP_FREE_BUCKETS
> 16);
311 /* Sizes 1 through 16 use buckets 0-15. */
314 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
315 num
= 14 + (num
- 1) / 8;
316 if (num
>= NUM_VOP_FREE_BUCKETS
)
323 /* Initialize the VOP free buckets. */
326 init_vop_buckets (void)
330 for (x
= 0; x
< NUM_VOP_FREE_BUCKETS
; x
++)
331 gimple_ssa_operands (cfun
)->vop_free_buckets
[x
] = NULL
;
335 /* Add PTR to the appropriate VOP bucket. */
338 add_vop_to_freelist (voptype_p ptr
)
340 int bucket
= vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr
->usev
));
342 /* Too large, use the largest bucket so its not a complete throw away. */
344 bucket
= NUM_VOP_FREE_BUCKETS
- 1;
346 ptr
->next
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
347 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] = ptr
;
351 /* These are the sizes of the operand memory buffer which gets allocated each
352 time more operands space is required. The final value is the amount that is
353 allocated every time after that. */
355 #define OP_SIZE_INIT 0
357 #define OP_SIZE_2 110
358 #define OP_SIZE_3 511
360 /* Initialize the operand cache routines. */
363 init_ssa_operands (void)
365 if (!n_initialized
++)
367 build_defs
= VEC_alloc (tree
, heap
, 5);
368 build_uses
= VEC_alloc (tree
, heap
, 10);
369 build_vuses
= VEC_alloc (tree
, heap
, 25);
370 build_vdefs
= VEC_alloc (tree
, heap
, 25);
371 bitmap_obstack_initialize (&operands_bitmap_obstack
);
372 build_loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
373 build_stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
374 scb_stack
= VEC_alloc (scb_t
, heap
, 20);
377 gcc_assert (gimple_ssa_operands (cfun
)->operand_memory
== NULL
);
378 gcc_assert (gimple_ssa_operands (cfun
)->mpt_table
== NULL
);
379 gimple_ssa_operands (cfun
)->operand_memory_index
380 = gimple_ssa_operands (cfun
)->ssa_operand_mem_size
;
381 gimple_ssa_operands (cfun
)->ops_active
= true;
382 memset (&clobber_stats
, 0, sizeof (clobber_stats
));
384 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
= OP_SIZE_INIT
;
388 /* Dispose of anything required by the operand routines. */
391 fini_ssa_operands (void)
393 struct ssa_operand_memory_d
*ptr
;
397 if (!--n_initialized
)
399 VEC_free (tree
, heap
, build_defs
);
400 VEC_free (tree
, heap
, build_uses
);
401 VEC_free (tree
, heap
, build_vdefs
);
402 VEC_free (tree
, heap
, build_vuses
);
403 BITMAP_FREE (build_loads
);
404 BITMAP_FREE (build_stores
);
406 /* The change buffer stack had better be empty. */
407 gcc_assert (VEC_length (scb_t
, scb_stack
) == 0);
408 VEC_free (scb_t
, heap
, scb_stack
);
412 gimple_ssa_operands (cfun
)->free_defs
= NULL
;
413 gimple_ssa_operands (cfun
)->free_uses
= NULL
;
415 while ((ptr
= gimple_ssa_operands (cfun
)->operand_memory
) != NULL
)
417 gimple_ssa_operands (cfun
)->operand_memory
418 = gimple_ssa_operands (cfun
)->operand_memory
->next
;
423 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, ix
, mpt
);
427 BITMAP_FREE (MPT_SYMBOLS (mpt
));
430 VEC_free (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
);
432 gimple_ssa_operands (cfun
)->ops_active
= false;
435 bitmap_obstack_release (&operands_bitmap_obstack
);
436 if (dump_file
&& (dump_flags
& TDF_STATS
))
438 fprintf (dump_file
, "Original clobbered vars: %d\n",
439 clobber_stats
.clobbered_vars
);
440 fprintf (dump_file
, "Static write clobbers avoided: %d\n",
441 clobber_stats
.static_write_clobbers_avoided
);
442 fprintf (dump_file
, "Static read clobbers avoided: %d\n",
443 clobber_stats
.static_read_clobbers_avoided
);
444 fprintf (dump_file
, "Unescapable clobbers avoided: %d\n",
445 clobber_stats
.unescapable_clobbers_avoided
);
446 fprintf (dump_file
, "Original read-only clobbers: %d\n",
447 clobber_stats
.readonly_clobbers
);
448 fprintf (dump_file
, "Static read-only clobbers avoided: %d\n",
449 clobber_stats
.static_readonly_clobbers_avoided
);
454 /* Return memory for operands of SIZE chunks. */
457 ssa_operand_alloc (unsigned size
)
461 if (gimple_ssa_operands (cfun
)->operand_memory_index
+ size
462 >= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
464 struct ssa_operand_memory_d
*ptr
;
466 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
== OP_SIZE_INIT
)
467 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
468 = OP_SIZE_1
* sizeof (struct voptype_d
);
470 if (gimple_ssa_operands (cfun
)->ssa_operand_mem_size
471 == OP_SIZE_1
* sizeof (struct voptype_d
))
472 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
473 = OP_SIZE_2
* sizeof (struct voptype_d
);
475 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
476 = OP_SIZE_3
* sizeof (struct voptype_d
);
478 /* Go right to the maximum size if the request is too large. */
479 if (size
> gimple_ssa_operands (cfun
)->ssa_operand_mem_size
)
480 gimple_ssa_operands (cfun
)->ssa_operand_mem_size
481 = OP_SIZE_3
* sizeof (struct voptype_d
);
483 /* Fail if there is not enough space. If there are this many operands
484 required, first make sure there isn't a different problem causing this
485 many operands. If the decision is that this is OK, then we can
486 specially allocate a buffer just for this request. */
487 gcc_assert (size
<= gimple_ssa_operands (cfun
)->ssa_operand_mem_size
);
489 ptr
= (struct ssa_operand_memory_d
*)
490 ggc_alloc (sizeof (struct ssa_operand_memory_d
)
491 + gimple_ssa_operands (cfun
)->ssa_operand_mem_size
- 1);
492 ptr
->next
= gimple_ssa_operands (cfun
)->operand_memory
;
493 gimple_ssa_operands (cfun
)->operand_memory
= ptr
;
494 gimple_ssa_operands (cfun
)->operand_memory_index
= 0;
496 ptr
= &(gimple_ssa_operands (cfun
)->operand_memory
497 ->mem
[gimple_ssa_operands (cfun
)->operand_memory_index
]);
498 gimple_ssa_operands (cfun
)->operand_memory_index
+= size
;
503 /* Allocate a DEF operand. */
505 static inline struct def_optype_d
*
508 struct def_optype_d
*ret
;
509 if (gimple_ssa_operands (cfun
)->free_defs
)
511 ret
= gimple_ssa_operands (cfun
)->free_defs
;
512 gimple_ssa_operands (cfun
)->free_defs
513 = gimple_ssa_operands (cfun
)->free_defs
->next
;
516 ret
= (struct def_optype_d
*)
517 ssa_operand_alloc (sizeof (struct def_optype_d
));
522 /* Allocate a USE operand. */
524 static inline struct use_optype_d
*
527 struct use_optype_d
*ret
;
528 if (gimple_ssa_operands (cfun
)->free_uses
)
530 ret
= gimple_ssa_operands (cfun
)->free_uses
;
531 gimple_ssa_operands (cfun
)->free_uses
532 = gimple_ssa_operands (cfun
)->free_uses
->next
;
535 ret
= (struct use_optype_d
*)
536 ssa_operand_alloc (sizeof (struct use_optype_d
));
541 /* Allocate a vop with NUM elements. */
543 static inline struct voptype_d
*
546 struct voptype_d
*ret
= NULL
;
549 int bucket
= vop_free_bucket_index (num
);
552 /* If there is a free operand, use it. */
553 if (gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] != NULL
)
555 ret
= gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
];
556 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
] =
557 gimple_ssa_operands (cfun
)->vop_free_buckets
[bucket
]->next
;
560 alloc_size
= vop_free_bucket_size(bucket
);
566 ret
= (struct voptype_d
*)ssa_operand_alloc (
567 sizeof (struct voptype_d
) + (alloc_size
- 1) * sizeof (vuse_element_t
));
569 VUSE_VECT_NUM_ELEM (ret
->usev
) = num
;
574 /* This routine makes sure that PTR is in an immediate use list, and makes
575 sure the stmt pointer is set to the current stmt. */
578 set_virtual_use_link (use_operand_p ptr
, tree stmt
)
580 /* fold_stmt may have changed the stmt pointers. */
581 if (ptr
->stmt
!= stmt
)
584 /* If this use isn't in a list, add it to the correct list. */
586 link_imm_use (ptr
, *(ptr
->use
));
590 /* Adds OP to the list of defs after LAST. */
592 static inline def_optype_p
593 add_def_op (tree
*op
, def_optype_p last
)
598 DEF_OP_PTR (new) = op
;
605 /* Adds OP to the list of uses of statement STMT after LAST. */
607 static inline use_optype_p
608 add_use_op (tree stmt
, tree
*op
, use_optype_p last
)
613 USE_OP_PTR (new)->use
= op
;
614 link_imm_use_stmt (USE_OP_PTR (new), *op
, stmt
);
621 /* Return a virtual op pointer with NUM elements which are all initialized to OP
622 and are linked into the immediate uses for STMT. The new vop is appended
625 static inline voptype_p
626 add_vop (tree stmt
, tree op
, int num
, voptype_p prev
)
631 new = alloc_vop (num
);
632 for (x
= 0; x
< num
; x
++)
634 VUSE_OP_PTR (new, x
)->prev
= NULL
;
635 SET_VUSE_OP (new, x
, op
);
636 VUSE_OP_PTR (new, x
)->use
= &new->usev
.uses
[x
].use_var
;
637 link_imm_use_stmt (VUSE_OP_PTR (new, x
), new->usev
.uses
[x
].use_var
, stmt
);
647 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
648 LAST to the new element. */
650 static inline voptype_p
651 add_vuse_op (tree stmt
, tree op
, int num
, voptype_p last
)
653 voptype_p
new = add_vop (stmt
, op
, num
, last
);
654 VDEF_RESULT (new) = NULL_TREE
;
659 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
660 LAST to the new element. */
662 static inline voptype_p
663 add_vdef_op (tree stmt
, tree op
, int num
, voptype_p last
)
665 voptype_p
new = add_vop (stmt
, op
, num
, last
);
666 VDEF_RESULT (new) = op
;
671 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
672 is the head of the operand list it belongs to. */
674 static inline struct voptype_d
*
675 realloc_vop (struct voptype_d
*ptr
, unsigned int num_elem
,
676 struct voptype_d
**root
)
680 struct voptype_d
*ret
, *tmp
;
682 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) == num_elem
)
685 val
= VUSE_OP (ptr
, 0);
686 if (TREE_CODE (val
) == SSA_NAME
)
687 val
= SSA_NAME_VAR (val
);
689 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
691 /* Delink all the existing uses. */
692 for (x
= 0; x
< VUSE_VECT_NUM_ELEM (ptr
->usev
); x
++)
694 use_operand_p use_p
= VUSE_OP_PTR (ptr
, x
);
695 delink_imm_use (use_p
);
698 /* If we want less space, simply use this one, and shrink the size. */
699 if (VUSE_VECT_NUM_ELEM (ptr
->usev
) > num_elem
)
701 VUSE_VECT_NUM_ELEM (ptr
->usev
) = num_elem
;
705 /* It is growing. Allocate a new one and replace the old one. */
706 ret
= add_vuse_op (stmt
, val
, num_elem
, ptr
);
708 /* Clear PTR and add its memory to the free list. */
709 lim
= VUSE_VECT_NUM_ELEM (ptr
->usev
);
711 sizeof (struct voptype_d
) + sizeof (vuse_element_t
) * (lim
- 1));
712 add_vop_to_freelist (ptr
);
714 /* Now simply remove the old one. */
722 tmp
!= NULL
&& tmp
->next
!= ptr
;
729 /* The pointer passed in isn't in STMT's VDEF lists. */
734 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
737 realloc_vdef (struct voptype_d
*ptr
, unsigned int num_elem
)
740 struct voptype_d
*ret
;
742 val
= VDEF_RESULT (ptr
);
743 stmt
= USE_STMT (VDEF_OP_PTR (ptr
, 0));
744 ret
= realloc_vop (ptr
, num_elem
, &(VDEF_OPS (stmt
)));
745 VDEF_RESULT (ret
) = val
;
750 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
753 realloc_vuse (struct voptype_d
*ptr
, unsigned int num_elem
)
756 struct voptype_d
*ret
;
758 stmt
= USE_STMT (VUSE_OP_PTR (ptr
, 0));
759 ret
= realloc_vop (ptr
, num_elem
, &(VUSE_OPS (stmt
)));
764 /* Takes elements from build_defs and turns them into def operands of STMT.
765 TODO -- Make build_defs VEC of tree *. */
768 finalize_ssa_defs (tree stmt
)
771 struct def_optype_d new_list
;
772 def_optype_p old_ops
, last
;
773 unsigned int num
= VEC_length (tree
, build_defs
);
775 /* There should only be a single real definition per assignment. */
776 gcc_assert ((stmt
&& TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
) || num
<= 1);
778 new_list
.next
= NULL
;
781 old_ops
= DEF_OPS (stmt
);
785 /* Check for the common case of 1 def that hasn't changed. */
786 if (old_ops
&& old_ops
->next
== NULL
&& num
== 1
787 && (tree
*) VEC_index (tree
, build_defs
, 0) == DEF_OP_PTR (old_ops
))
790 /* If there is anything in the old list, free it. */
793 old_ops
->next
= gimple_ssa_operands (cfun
)->free_defs
;
794 gimple_ssa_operands (cfun
)->free_defs
= old_ops
;
797 /* If there is anything remaining in the build_defs list, simply emit it. */
798 for ( ; new_i
< num
; new_i
++)
799 last
= add_def_op ((tree
*) VEC_index (tree
, build_defs
, new_i
), last
);
801 /* Now set the stmt's operands. */
802 DEF_OPS (stmt
) = new_list
.next
;
804 #ifdef ENABLE_CHECKING
808 for (ptr
= DEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
811 gcc_assert (x
== num
);
817 /* Takes elements from build_uses and turns them into use operands of STMT.
818 TODO -- Make build_uses VEC of tree *. */
821 finalize_ssa_uses (tree stmt
)
824 struct use_optype_d new_list
;
825 use_optype_p old_ops
, ptr
, last
;
827 #ifdef ENABLE_CHECKING
830 unsigned num
= VEC_length (tree
, build_uses
);
832 /* If the pointer to the operand is the statement itself, something is
833 wrong. It means that we are pointing to a local variable (the
834 initial call to update_stmt_operands does not pass a pointer to a
836 for (x
= 0; x
< num
; x
++)
837 gcc_assert (*((tree
*)VEC_index (tree
, build_uses
, x
)) != stmt
);
841 new_list
.next
= NULL
;
844 old_ops
= USE_OPS (stmt
);
846 /* If there is anything in the old list, free it. */
849 for (ptr
= old_ops
; ptr
; ptr
= ptr
->next
)
850 delink_imm_use (USE_OP_PTR (ptr
));
851 old_ops
->next
= gimple_ssa_operands (cfun
)->free_uses
;
852 gimple_ssa_operands (cfun
)->free_uses
= old_ops
;
855 /* Now create nodes for all the new nodes. */
856 for (new_i
= 0; new_i
< VEC_length (tree
, build_uses
); new_i
++)
857 last
= add_use_op (stmt
,
858 (tree
*) VEC_index (tree
, build_uses
, new_i
),
861 /* Now set the stmt's operands. */
862 USE_OPS (stmt
) = new_list
.next
;
864 #ifdef ENABLE_CHECKING
867 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
870 gcc_assert (x
== VEC_length (tree
, build_uses
));
876 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
877 STMT. FIXME, for now VDEF operators should have a single operand
881 finalize_ssa_vdefs (tree stmt
)
884 struct voptype_d new_list
;
885 voptype_p old_ops
, ptr
, last
;
886 stmt_ann_t ann
= stmt_ann (stmt
);
888 /* Set the symbols referenced by STMT. */
889 if (!bitmap_empty_p (build_stores
))
891 if (ann
->operands
.stores
== NULL
)
892 ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
894 bitmap_copy (ann
->operands
.stores
, build_stores
);
897 BITMAP_FREE (ann
->operands
.stores
);
899 /* If aliases have not been computed, do not instantiate a virtual
900 operator on STMT. Initially, we only compute the SSA form on
901 GIMPLE registers. The virtual SSA form is only computed after
902 alias analysis, so virtual operators will remain unrenamed and
903 the verifier will complain. However, alias analysis needs to
904 access symbol load/store information, so we need to compute
906 if (!gimple_aliases_computed_p (cfun
))
909 new_list
.next
= NULL
;
912 old_ops
= VDEF_OPS (stmt
);
914 while (old_ops
&& new_i
< VEC_length (tree
, build_vdefs
))
916 tree op
= VEC_index (tree
, build_vdefs
, new_i
);
917 unsigned new_uid
= get_name_decl (op
);
918 unsigned old_uid
= get_name_decl (VDEF_RESULT (old_ops
));
920 /* FIXME, for now each VDEF operator should have at most one
921 operand in their RHS. */
922 gcc_assert (VDEF_NUM (old_ops
) == 1);
924 if (old_uid
== new_uid
)
926 /* If the symbols are the same, reuse the existing operand. */
927 last
->next
= old_ops
;
929 old_ops
= old_ops
->next
;
931 set_virtual_use_link (VDEF_OP_PTR (last
, 0), stmt
);
934 else if (old_uid
< new_uid
)
936 /* If old is less than new, old goes to the free list. */
938 delink_imm_use (VDEF_OP_PTR (old_ops
, 0));
939 next
= old_ops
->next
;
940 add_vop_to_freelist (old_ops
);
945 /* This is a new operand. */
946 last
= add_vdef_op (stmt
, op
, 1, last
);
951 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
952 for ( ; new_i
< VEC_length (tree
, build_vdefs
); new_i
++)
953 last
= add_vdef_op (stmt
, VEC_index (tree
, build_vdefs
, new_i
), 1, last
);
955 /* If there is anything in the old list, free it. */
958 for (ptr
= old_ops
; ptr
; ptr
= last
)
961 delink_imm_use (VDEF_OP_PTR (ptr
, 0));
962 add_vop_to_freelist (ptr
);
966 /* Now set STMT's operands. */
967 VDEF_OPS (stmt
) = new_list
.next
;
969 #ifdef ENABLE_CHECKING
972 for (ptr
= VDEF_OPS (stmt
); ptr
; ptr
= ptr
->next
)
975 gcc_assert (x
== VEC_length (tree
, build_vdefs
));
981 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
985 finalize_ssa_vuse_ops (tree stmt
)
987 unsigned new_i
, old_i
;
988 voptype_p old_ops
, last
;
989 VEC(tree
,heap
) *new_ops
;
992 /* Set the symbols referenced by STMT. */
993 ann
= stmt_ann (stmt
);
994 if (!bitmap_empty_p (build_loads
))
996 if (ann
->operands
.loads
== NULL
)
997 ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
999 bitmap_copy (ann
->operands
.loads
, build_loads
);
1002 BITMAP_FREE (ann
->operands
.loads
);
1004 /* If aliases have not been computed, do not instantiate a virtual
1005 operator on STMT. Initially, we only compute the SSA form on
1006 GIMPLE registers. The virtual SSA form is only computed after
1007 alias analysis, so virtual operators will remain unrenamed and
1008 the verifier will complain. However, alias analysis needs to
1009 access symbol load/store information, so we need to compute
1011 if (!gimple_aliases_computed_p (cfun
))
1014 /* STMT should have at most one VUSE operator. */
1015 old_ops
= VUSE_OPS (stmt
);
1016 gcc_assert (old_ops
== NULL
|| old_ops
->next
== NULL
);
1021 && old_i
< VUSE_NUM (old_ops
)
1022 && new_i
< VEC_length (tree
, build_vuses
))
1024 tree new_op
= VEC_index (tree
, build_vuses
, new_i
);
1025 tree old_op
= VUSE_OP (old_ops
, old_i
);
1026 unsigned new_uid
= get_name_decl (new_op
);
1027 unsigned old_uid
= get_name_decl (old_op
);
1029 if (old_uid
== new_uid
)
1031 /* If the symbols are the same, reuse the existing operand. */
1032 VEC_safe_push (tree
, heap
, new_ops
, old_op
);
1036 else if (old_uid
< new_uid
)
1038 /* If OLD_UID is less than NEW_UID, the old operand has
1039 disappeared, skip to the next old operand. */
1044 /* This is a new operand. */
1045 VEC_safe_push (tree
, heap
, new_ops
, new_op
);
1050 /* If there is anything remaining in the build_vuses list, simply emit it. */
1051 for ( ; new_i
< VEC_length (tree
, build_vuses
); new_i
++)
1052 VEC_safe_push (tree
, heap
, new_ops
, VEC_index (tree
, build_vuses
, new_i
));
1054 /* If there is anything in the old list, free it. */
1057 for (old_i
= 0; old_i
< VUSE_NUM (old_ops
); old_i
++)
1058 delink_imm_use (VUSE_OP_PTR (old_ops
, old_i
));
1059 add_vop_to_freelist (old_ops
);
1060 VUSE_OPS (stmt
) = NULL
;
1063 /* If there are any operands, instantiate a VUSE operator for STMT. */
1069 last
= add_vuse_op (stmt
, NULL
, VEC_length (tree
, new_ops
), NULL
);
1071 for (i
= 0; VEC_iterate (tree
, new_ops
, i
, op
); i
++)
1072 SET_USE (VUSE_OP_PTR (last
, (int) i
), op
);
1074 VUSE_OPS (stmt
) = last
;
1077 #ifdef ENABLE_CHECKING
1081 if (VUSE_OPS (stmt
))
1083 gcc_assert (VUSE_OPS (stmt
)->next
== NULL
);
1084 x
= VUSE_NUM (VUSE_OPS (stmt
));
1089 gcc_assert (x
== VEC_length (tree
, build_vuses
));
1094 /* Return a new VUSE operand vector for STMT. */
1097 finalize_ssa_vuses (tree stmt
)
1099 unsigned num
, num_vdefs
;
1100 unsigned vuse_index
;
1102 /* Remove superfluous VUSE operands. If the statement already has a
1103 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1104 needed because VDEFs imply a VUSE of the variable. For instance,
1105 suppose that variable 'a' is pointed-to by p and q:
1111 The VUSE <a_2> is superfluous because it is implied by the
1113 num
= VEC_length (tree
, build_vuses
);
1114 num_vdefs
= VEC_length (tree
, build_vdefs
);
1116 if (num
> 0 && num_vdefs
> 0)
1117 for (vuse_index
= 0; vuse_index
< VEC_length (tree
, build_vuses
); )
1120 vuse
= VEC_index (tree
, build_vuses
, vuse_index
);
1121 if (TREE_CODE (vuse
) != SSA_NAME
)
1123 var_ann_t ann
= var_ann (vuse
);
1124 ann
->in_vuse_list
= 0;
1125 if (ann
->in_vdef_list
)
1127 VEC_ordered_remove (tree
, build_vuses
, vuse_index
);
1134 finalize_ssa_vuse_ops (stmt
);
1138 /* Clear the in_list bits and empty the build array for VDEFs and
1142 cleanup_build_arrays (void)
1147 for (i
= 0; VEC_iterate (tree
, build_vdefs
, i
, t
); i
++)
1148 if (TREE_CODE (t
) != SSA_NAME
)
1149 var_ann (t
)->in_vdef_list
= false;
1151 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, t
); i
++)
1152 if (TREE_CODE (t
) != SSA_NAME
)
1153 var_ann (t
)->in_vuse_list
= false;
1155 VEC_truncate (tree
, build_vdefs
, 0);
1156 VEC_truncate (tree
, build_vuses
, 0);
1157 VEC_truncate (tree
, build_defs
, 0);
1158 VEC_truncate (tree
, build_uses
, 0);
1159 bitmap_clear (build_loads
);
1160 bitmap_clear (build_stores
);
1164 /* Finalize all the build vectors, fill the new ones into INFO. */
1167 finalize_ssa_stmt_operands (tree stmt
)
1169 finalize_ssa_defs (stmt
);
1170 finalize_ssa_uses (stmt
);
1171 finalize_ssa_vdefs (stmt
);
1172 finalize_ssa_vuses (stmt
);
1173 cleanup_build_arrays ();
1177 /* Start the process of building up operands vectors in INFO. */
1180 start_ssa_stmt_operands (void)
1182 gcc_assert (VEC_length (tree
, build_defs
) == 0);
1183 gcc_assert (VEC_length (tree
, build_uses
) == 0);
1184 gcc_assert (VEC_length (tree
, build_vuses
) == 0);
1185 gcc_assert (VEC_length (tree
, build_vdefs
) == 0);
1186 gcc_assert (bitmap_empty_p (build_loads
));
1187 gcc_assert (bitmap_empty_p (build_stores
));
1191 /* Add DEF_P to the list of pointers to operands. */
1194 append_def (tree
*def_p
)
1196 VEC_safe_push (tree
, heap
, build_defs
, (tree
) def_p
);
1200 /* Add USE_P to the list of pointers to operands. */
1203 append_use (tree
*use_p
)
1205 VEC_safe_push (tree
, heap
, build_uses
, (tree
) use_p
);
1209 /* Add VAR to the set of variables that require a VDEF operator. */
1212 append_vdef (tree var
)
1216 if (TREE_CODE (var
) != SSA_NAME
)
1221 /* If VAR belongs to a memory partition, use it instead of VAR. */
1222 mpt
= memory_partition (var
);
1226 /* Don't allow duplicate entries. */
1227 ann
= get_var_ann (var
);
1228 if (ann
->in_vdef_list
)
1231 ann
->in_vdef_list
= true;
1235 sym
= SSA_NAME_VAR (var
);
1237 VEC_safe_push (tree
, heap
, build_vdefs
, var
);
1238 bitmap_set_bit (build_stores
, DECL_UID (sym
));
1242 /* Add VAR to the set of variables that require a VUSE operator. */
1245 append_vuse (tree var
)
1249 if (TREE_CODE (var
) != SSA_NAME
)
1254 /* If VAR belongs to a memory partition, use it instead of VAR. */
1255 mpt
= memory_partition (var
);
1259 /* Don't allow duplicate entries. */
1260 ann
= get_var_ann (var
);
1261 if (ann
->in_vuse_list
|| ann
->in_vdef_list
)
1264 ann
->in_vuse_list
= true;
1268 sym
= SSA_NAME_VAR (var
);
1270 VEC_safe_push (tree
, heap
, build_vuses
, var
);
1271 bitmap_set_bit (build_loads
, DECL_UID (sym
));
1275 /* REF is a tree that contains the entire pointer dereference
1276 expression, if available, or NULL otherwise. ALIAS is the variable
1277 we are asking if REF can access. OFFSET and SIZE come from the
1278 memory access expression that generated this virtual operand. */
1281 access_can_touch_variable (tree ref
, tree alias
, HOST_WIDE_INT offset
,
1284 bool offsetgtz
= offset
> 0;
1285 unsigned HOST_WIDE_INT uoffset
= (unsigned HOST_WIDE_INT
) offset
;
1286 tree base
= ref
? get_base_address (ref
) : NULL
;
1288 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1289 using a call-clobbered memory tag. By definition, call-clobbered
1290 memory tags can always touch .GLOBAL_VAR. */
1291 if (alias
== gimple_global_var (cfun
))
1294 /* If ALIAS is an SFT, it can't be touched if the offset
1295 and size of the access is not overlapping with the SFT offset and
1296 size. This is only true if we are accessing through a pointer
1297 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1298 be accessing through a pointer to some substruct of the
1299 structure, and if we try to prune there, we will have the wrong
1300 offset, and get the wrong answer.
1301 i.e., we can't prune without more work if we have something like
1307 const char *byte_op;
1315 foo = &targetm.asm_out.aligned_op;
1318 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1319 terms of SFT_PARENT_VAR, that is where it is.
1320 However, the access through the foo pointer will be at offset 0. */
1322 && TREE_CODE (alias
) == STRUCT_FIELD_TAG
1324 && TREE_TYPE (base
) == TREE_TYPE (SFT_PARENT_VAR (alias
))
1325 && !overlap_subvar (offset
, size
, alias
, NULL
))
1327 #ifdef ACCESS_DEBUGGING
1328 fprintf (stderr
, "Access to ");
1329 print_generic_expr (stderr
, ref
, 0);
1330 fprintf (stderr
, " may not touch ");
1331 print_generic_expr (stderr
, alias
, 0);
1332 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1337 /* Without strict aliasing, it is impossible for a component access
1338 through a pointer to touch a random variable, unless that
1339 variable *is* a structure or a pointer.
1341 That is, given p->c, and some random global variable b,
1342 there is no legal way that p->c could be an access to b.
1344 Without strict aliasing on, we consider it legal to do something
1347 struct foos { int l; };
1349 static struct foos *getfoo(void);
1352 struct foos *f = getfoo();
1359 static struct foos *getfoo(void)
1360 { return (struct foos *)&foo; }
1362 (taken from 20000623-1.c)
1364 The docs also say/imply that access through union pointers
1365 is legal (but *not* if you take the address of the union member,
1366 i.e. the inverse), such that you can do
1376 U *pretmp = (U*)&rv;
1380 To implement this, we just punt on accesses through union
1384 && flag_strict_aliasing
1385 && TREE_CODE (ref
) != INDIRECT_REF
1387 && (TREE_CODE (base
) != INDIRECT_REF
1388 || TREE_CODE (TREE_TYPE (base
)) != UNION_TYPE
)
1389 && !AGGREGATE_TYPE_P (TREE_TYPE (alias
))
1390 && TREE_CODE (TREE_TYPE (alias
)) != COMPLEX_TYPE
1391 && !var_ann (alias
)->is_heapvar
1392 /* When the struct has may_alias attached to it, we need not to
1394 && get_alias_set (base
))
1396 #ifdef ACCESS_DEBUGGING
1397 fprintf (stderr
, "Access to ");
1398 print_generic_expr (stderr
, ref
, 0);
1399 fprintf (stderr
, " may not touch ");
1400 print_generic_expr (stderr
, alias
, 0);
1401 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1406 /* If the offset of the access is greater than the size of one of
1407 the possible aliases, it can't be touching that alias, because it
1408 would be past the end of the structure. */
1410 && flag_strict_aliasing
1411 && TREE_CODE (ref
) != INDIRECT_REF
1413 && !POINTER_TYPE_P (TREE_TYPE (alias
))
1415 && DECL_SIZE (alias
)
1416 && TREE_CODE (DECL_SIZE (alias
)) == INTEGER_CST
1417 && uoffset
> TREE_INT_CST_LOW (DECL_SIZE (alias
)))
1419 #ifdef ACCESS_DEBUGGING
1420 fprintf (stderr
, "Access to ");
1421 print_generic_expr (stderr
, ref
, 0);
1422 fprintf (stderr
, " may not touch ");
1423 print_generic_expr (stderr
, alias
, 0);
1424 fprintf (stderr
, " in function %s\n", get_name (current_function_decl
));
1433 /* Add VAR to the virtual operands array. FLAGS is as in
1434 get_expr_operands. FULL_REF is a tree that contains the entire
1435 pointer dereference expression, if available, or NULL otherwise.
1436 OFFSET and SIZE come from the memory access expression that
1437 generated this virtual operand. IS_CALL_SITE is true if the
1438 affected statement is a call site. */
1441 add_virtual_operand (tree var
, stmt_ann_t s_ann
, int flags
,
1442 tree full_ref
, HOST_WIDE_INT offset
,
1443 HOST_WIDE_INT size
, bool is_call_site
)
1445 bitmap aliases
= NULL
;
1449 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1450 v_ann
= var_ann (sym
);
1452 /* Mark the statement as having memory operands. */
1453 s_ann
->references_memory
= true;
1455 /* Mark statements with volatile operands. Optimizers should back
1456 off from statements having volatile operands. */
1457 if (TREE_THIS_VOLATILE (sym
) && s_ann
)
1458 s_ann
->has_volatile_ops
= true;
1460 /* If the variable cannot be modified and this is a VDEF change
1461 it into a VUSE. This happens when read-only variables are marked
1462 call-clobbered and/or aliased to writable variables. So we only
1463 check that this only happens on non-specific stores.
1465 Note that if this is a specific store, i.e. associated with a
1466 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1467 into validation problems.
1469 This can happen when programs cast away const, leaving us with a
1470 store to read-only memory. If the statement is actually executed
1471 at runtime, then the program is ill formed. If the statement is
1472 not executed then all is well. At the very least, we cannot ICE. */
1473 if ((flags
& opf_implicit
) && unmodifiable_var_p (var
))
1476 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1477 virtual operands, unless the caller has specifically requested
1478 not to add virtual operands (used when adding operands inside an
1479 ADDR_EXPR expression). */
1480 if (flags
& opf_no_vops
)
1484 aliases
= MTAG_ALIASES (var
);
1486 if (aliases
== NULL
)
1488 if (s_ann
&& !gimple_aliases_computed_p (cfun
))
1489 s_ann
->has_volatile_ops
= true;
1491 /* The variable is not aliased or it is an alias tag. */
1492 if (flags
& opf_def
)
1503 /* The variable is aliased. Add its aliases to the virtual
1505 gcc_assert (!bitmap_empty_p (aliases
));
1507 if (flags
& opf_def
)
1509 bool none_added
= true;
1510 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1512 al
= referenced_var (i
);
1513 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1516 /* Call-clobbered tags may have non-call-clobbered
1517 symbols in their alias sets. Ignore them if we are
1518 adding VOPs for a call site. */
1519 if (is_call_site
&& !is_call_clobbered (al
))
1526 /* Even if no aliases have been added, we still need to
1527 establish def-use and use-def chains, lest
1528 transformations think that this is not a memory
1529 reference. For an example of this scenario, see
1530 testsuite/g++.dg/opt/cleanup1.C. */
1536 bool none_added
= true;
1537 EXECUTE_IF_SET_IN_BITMAP (aliases
, 0, i
, bi
)
1539 al
= referenced_var (i
);
1540 if (!access_can_touch_variable (full_ref
, al
, offset
, size
))
1543 /* Call-clobbered tags may have non-call-clobbered
1544 symbols in their alias sets. Ignore them if we are
1545 adding VOPs for a call site. */
1546 if (is_call_site
&& !is_call_clobbered (al
))
1553 /* Even if no aliases have been added, we still need to
1554 establish def-use and use-def chains, lest
1555 transformations think that this is not a memory
1556 reference. For an example of this scenario, see
1557 testsuite/g++.dg/opt/cleanup1.C. */
1565 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1566 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1567 the statement's real operands, otherwise it is added to virtual
1571 add_stmt_operand (tree
*var_p
, stmt_ann_t s_ann
, int flags
)
1576 gcc_assert (SSA_VAR_P (*var_p
) && s_ann
);
1579 sym
= (TREE_CODE (var
) == SSA_NAME
? SSA_NAME_VAR (var
) : var
);
1580 v_ann
= var_ann (sym
);
1582 /* Mark statements with volatile operands. */
1583 if (TREE_THIS_VOLATILE (sym
))
1584 s_ann
->has_volatile_ops
= true;
1586 if (is_gimple_reg (sym
))
1588 /* The variable is a GIMPLE register. Add it to real operands. */
1589 if (flags
& opf_def
)
1595 add_virtual_operand (var
, s_ann
, flags
, NULL_TREE
, 0, -1, false);
1599 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1600 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1602 STMT is the statement being processed, EXPR is the INDIRECT_REF
1605 FLAGS is as in get_expr_operands.
1607 FULL_REF contains the full pointer dereference expression, if we
1608 have it, or NULL otherwise.
1610 OFFSET and SIZE are the location of the access inside the
1611 dereferenced pointer, if known.
1613 RECURSE_ON_BASE should be set to true if we want to continue
1614 calling get_expr_operands on the base pointer, and false if
1615 something else will do it for us. */
1618 get_indirect_ref_operands (tree stmt
, tree expr
, int flags
,
1620 HOST_WIDE_INT offset
, HOST_WIDE_INT size
,
1621 bool recurse_on_base
)
1623 tree
*pptr
= &TREE_OPERAND (expr
, 0);
1625 stmt_ann_t s_ann
= stmt_ann (stmt
);
1627 s_ann
->references_memory
= true;
1628 if (s_ann
&& TREE_THIS_VOLATILE (expr
))
1629 s_ann
->has_volatile_ops
= true;
1631 if (SSA_VAR_P (ptr
))
1633 struct ptr_info_def
*pi
= NULL
;
1635 /* If PTR has flow-sensitive points-to information, use it. */
1636 if (TREE_CODE (ptr
) == SSA_NAME
1637 && (pi
= SSA_NAME_PTR_INFO (ptr
)) != NULL
1638 && pi
->name_mem_tag
)
1640 /* PTR has its own memory tag. Use it. */
1641 add_virtual_operand (pi
->name_mem_tag
, s_ann
, flags
,
1642 full_ref
, offset
, size
, false);
1646 /* If PTR is not an SSA_NAME or it doesn't have a name
1647 tag, use its symbol memory tag. */
1650 /* If we are emitting debugging dumps, display a warning if
1651 PTR is an SSA_NAME with no flow-sensitive alias
1652 information. That means that we may need to compute
1655 && TREE_CODE (ptr
) == SSA_NAME
1659 "NOTE: no flow-sensitive alias info for ");
1660 print_generic_expr (dump_file
, ptr
, dump_flags
);
1661 fprintf (dump_file
, " in ");
1662 print_generic_stmt (dump_file
, stmt
, dump_flags
);
1665 if (TREE_CODE (ptr
) == SSA_NAME
)
1666 ptr
= SSA_NAME_VAR (ptr
);
1667 v_ann
= var_ann (ptr
);
1669 if (v_ann
->symbol_mem_tag
)
1670 add_virtual_operand (v_ann
->symbol_mem_tag
, s_ann
, flags
,
1671 full_ref
, offset
, size
, false);
1673 /* Aliasing information is missing; mark statement as
1674 volatile so we won't optimize it out too actively. */
1676 && !gimple_aliases_computed_p (cfun
)
1677 && (flags
& opf_def
))
1678 s_ann
->has_volatile_ops
= true;
1681 else if (TREE_CODE (ptr
) == INTEGER_CST
)
1683 /* If a constant is used as a pointer, we can't generate a real
1684 operand for it but we mark the statement volatile to prevent
1685 optimizations from messing things up. */
1687 s_ann
->has_volatile_ops
= true;
1692 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1696 /* If requested, add a USE operand for the base pointer. */
1697 if (recurse_on_base
)
1698 get_expr_operands (stmt
, pptr
, opf_use
);
1702 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1705 get_tmr_operands (tree stmt
, tree expr
, int flags
)
1708 HOST_WIDE_INT offset
, size
, maxsize
;
1710 stmt_ann_t s_ann
= stmt_ann (stmt
);
1712 /* This statement references memory. */
1713 s_ann
->references_memory
= 1;
1715 /* First record the real operands. */
1716 get_expr_operands (stmt
, &TMR_BASE (expr
), opf_use
);
1717 get_expr_operands (stmt
, &TMR_INDEX (expr
), opf_use
);
1719 if (TMR_SYMBOL (expr
))
1720 add_to_addressable_set (TMR_SYMBOL (expr
), &s_ann
->addresses_taken
);
1722 tag
= TMR_TAG (expr
);
1725 /* Something weird, so ensure that we will be careful. */
1726 s_ann
->has_volatile_ops
= true;
1732 get_expr_operands (stmt
, &tag
, flags
);
1736 ref
= get_ref_base_and_extent (tag
, &offset
, &size
, &maxsize
);
1737 gcc_assert (ref
!= NULL_TREE
);
1738 svars
= get_subvars_for_var (ref
);
1739 for (sv
= svars
; sv
; sv
= sv
->next
)
1743 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
1744 add_stmt_operand (&sv
->var
, s_ann
, flags
);
1749 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1750 clobbered variables in the function. */
1753 add_call_clobber_ops (tree stmt
, tree callee
)
1757 stmt_ann_t s_ann
= stmt_ann (stmt
);
1758 bitmap not_read_b
, not_written_b
;
1760 /* Functions that are not const, pure or never return may clobber
1761 call-clobbered variables. */
1763 s_ann
->makes_clobbering_call
= true;
1765 /* If we created .GLOBAL_VAR earlier, just use it. */
1766 if (gimple_global_var (cfun
))
1768 tree var
= gimple_global_var (cfun
);
1769 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1773 /* Get info for local and module level statics. There is a bit
1774 set for each static if the call being processed does not read
1775 or write that variable. */
1776 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1777 not_written_b
= callee
? ipa_reference_get_not_written_global (callee
) : NULL
;
1779 /* Add a VDEF operand for every call clobbered variable. */
1780 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1782 tree var
= referenced_var_lookup (u
);
1783 unsigned int escape_mask
= var_ann (var
)->escape_mask
;
1784 tree real_var
= var
;
1788 /* Not read and not written are computed on regular vars, not
1789 subvars, so look at the parent var if this is an SFT. */
1790 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1791 real_var
= SFT_PARENT_VAR (var
);
1793 not_read
= not_read_b
1794 ? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1797 not_written
= not_written_b
1798 ? bitmap_bit_p (not_written_b
, DECL_UID (real_var
))
1800 gcc_assert (!unmodifiable_var_p (var
));
1802 clobber_stats
.clobbered_vars
++;
1804 /* See if this variable is really clobbered by this function. */
1806 /* Trivial case: Things escaping only to pure/const are not
1807 clobbered by non-pure-const, and only read by pure/const. */
1808 if ((escape_mask
& ~(ESCAPE_TO_PURE_CONST
)) == 0)
1810 tree call
= get_call_expr_in (stmt
);
1811 if (call_expr_flags (call
) & (ECF_CONST
| ECF_PURE
))
1813 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1814 clobber_stats
.unescapable_clobbers_avoided
++;
1819 clobber_stats
.unescapable_clobbers_avoided
++;
1826 clobber_stats
.static_write_clobbers_avoided
++;
1828 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1830 clobber_stats
.static_read_clobbers_avoided
++;
1833 add_virtual_operand (var
, s_ann
, opf_def
, NULL
, 0, -1, true);
1838 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1842 add_call_read_ops (tree stmt
, tree callee
)
1846 stmt_ann_t s_ann
= stmt_ann (stmt
);
1849 /* if the function is not pure, it may reference memory. Add
1850 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1851 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1852 if (gimple_global_var (cfun
))
1854 tree var
= gimple_global_var (cfun
);
1855 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1859 not_read_b
= callee
? ipa_reference_get_not_read_global (callee
) : NULL
;
1861 /* Add a VUSE for each call-clobbered variable. */
1862 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, u
, bi
)
1864 tree var
= referenced_var (u
);
1865 tree real_var
= var
;
1868 clobber_stats
.readonly_clobbers
++;
1870 /* Not read and not written are computed on regular vars, not
1871 subvars, so look at the parent var if this is an SFT. */
1873 if (TREE_CODE (var
) == STRUCT_FIELD_TAG
)
1874 real_var
= SFT_PARENT_VAR (var
);
1876 not_read
= not_read_b
? bitmap_bit_p (not_read_b
, DECL_UID (real_var
))
1881 clobber_stats
.static_readonly_clobbers_avoided
++;
1885 add_virtual_operand (var
, s_ann
, opf_use
, NULL
, 0, -1, true);
1890 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1893 get_call_expr_operands (tree stmt
, tree expr
)
1895 int call_flags
= call_expr_flags (expr
);
1897 stmt_ann_t ann
= stmt_ann (stmt
);
1899 ann
->references_memory
= true;
1901 /* If aliases have been computed already, add VDEF or VUSE
1902 operands for all the symbols that have been found to be
1904 if (gimple_aliases_computed_p (cfun
)
1905 && !(call_flags
& ECF_NOVOPS
))
1907 /* A 'pure' or a 'const' function never call-clobbers anything.
1908 A 'noreturn' function might, but since we don't return anyway
1909 there is no point in recording that. */
1910 if (TREE_SIDE_EFFECTS (expr
)
1911 && !(call_flags
& (ECF_PURE
| ECF_CONST
| ECF_NORETURN
)))
1912 add_call_clobber_ops (stmt
, get_callee_fndecl (expr
));
1913 else if (!(call_flags
& ECF_CONST
))
1914 add_call_read_ops (stmt
, get_callee_fndecl (expr
));
1917 /* Find uses in the called function. */
1918 get_expr_operands (stmt
, &CALL_EXPR_FN (expr
), opf_use
);
1919 nargs
= call_expr_nargs (expr
);
1920 for (i
= 0; i
< nargs
; i
++)
1921 get_expr_operands (stmt
, &CALL_EXPR_ARG (expr
, i
), opf_use
);
1923 get_expr_operands (stmt
, &CALL_EXPR_STATIC_CHAIN (expr
), opf_use
);
1927 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1930 get_asm_expr_operands (tree stmt
)
1934 const char **oconstraints
;
1935 const char *constraint
;
1936 bool allows_mem
, allows_reg
, is_inout
;
1939 s_ann
= stmt_ann (stmt
);
1940 noutputs
= list_length (ASM_OUTPUTS (stmt
));
1941 oconstraints
= (const char **) alloca ((noutputs
) * sizeof (const char *));
1943 /* Gather all output operands. */
1944 for (i
= 0, link
= ASM_OUTPUTS (stmt
); link
; i
++, link
= TREE_CHAIN (link
))
1946 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1947 oconstraints
[i
] = constraint
;
1948 parse_output_constraint (&constraint
, i
, 0, 0, &allows_mem
,
1949 &allows_reg
, &is_inout
);
1951 /* This should have been split in gimplify_asm_expr. */
1952 gcc_assert (!allows_reg
|| !is_inout
);
1954 /* Memory operands are addressable. Note that STMT needs the
1955 address of this operand. */
1956 if (!allows_reg
&& allows_mem
)
1958 tree t
= get_base_address (TREE_VALUE (link
));
1959 if (t
&& DECL_P (t
) && s_ann
)
1960 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1963 get_expr_operands (stmt
, &TREE_VALUE (link
), opf_def
);
1966 /* Gather all input operands. */
1967 for (link
= ASM_INPUTS (stmt
); link
; link
= TREE_CHAIN (link
))
1969 constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
1970 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0, oconstraints
,
1971 &allows_mem
, &allows_reg
);
1973 /* Memory operands are addressable. Note that STMT needs the
1974 address of this operand. */
1975 if (!allows_reg
&& allows_mem
)
1977 tree t
= get_base_address (TREE_VALUE (link
));
1978 if (t
&& DECL_P (t
) && s_ann
)
1979 add_to_addressable_set (t
, &s_ann
->addresses_taken
);
1982 get_expr_operands (stmt
, &TREE_VALUE (link
), 0);
1985 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1986 for (link
= ASM_CLOBBERS (stmt
); link
; link
= TREE_CHAIN (link
))
1987 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link
)), "memory") == 0)
1992 s_ann
->references_memory
= true;
1994 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun
), 0, i
, bi
)
1996 tree var
= referenced_var (i
);
1997 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2000 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun
), 0, i
, bi
)
2002 tree var
= referenced_var (i
);
2004 /* Subvars are explicitly represented in this list, so we
2005 don't need the original to be added to the clobber ops,
2006 but the original *will* be in this list because we keep
2007 the addressability of the original variable up-to-date
2008 to avoid confusing the back-end. */
2009 if (var_can_have_subvars (var
)
2010 && get_subvars_for_var (var
) != NULL
)
2013 add_stmt_operand (&var
, s_ann
, opf_def
| opf_implicit
);
2020 /* Scan operands for the assignment expression EXPR in statement STMT. */
2023 get_modify_stmt_operands (tree stmt
, tree expr
)
2025 /* First get operands from the RHS. */
2026 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 1), opf_use
);
2028 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2029 registers. If the LHS is a store to memory, we will need
2030 a preserving definition (VDEF).
2032 Preserving definitions are those that modify a part of an
2033 aggregate object for which no subvars have been computed (or the
2034 reference does not correspond exactly to one of them). Stores
2035 through a pointer are also represented with VDEF operators.
2037 We used to distinguish between preserving and killing definitions.
2038 We always emit preserving definitions now. */
2039 get_expr_operands (stmt
, &GIMPLE_STMT_OPERAND (expr
, 0), opf_def
);
2043 /* Recursively scan the expression pointed to by EXPR_P in statement
2044 STMT. FLAGS is one of the OPF_* constants modifying how to
2045 interpret the operands found. */
2048 get_expr_operands (tree stmt
, tree
*expr_p
, int flags
)
2050 enum tree_code code
;
2051 enum tree_code_class
class;
2052 tree expr
= *expr_p
;
2053 stmt_ann_t s_ann
= stmt_ann (stmt
);
2058 code
= TREE_CODE (expr
);
2059 class = TREE_CODE_CLASS (code
);
2064 /* Taking the address of a variable does not represent a
2065 reference to it, but the fact that the statement takes its
2066 address will be of interest to some passes (e.g. alias
2068 add_to_addressable_set (TREE_OPERAND (expr
, 0), &s_ann
->addresses_taken
);
2070 /* If the address is invariant, there may be no interesting
2071 variable references inside. */
2072 if (is_gimple_min_invariant (expr
))
2075 /* Otherwise, there may be variables referenced inside but there
2076 should be no VUSEs created, since the referenced objects are
2077 not really accessed. The only operands that we should find
2078 here are ARRAY_REF indices which will always be real operands
2079 (GIMPLE does not allow non-registers as array indices). */
2080 flags
|= opf_no_vops
;
2081 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2085 case STRUCT_FIELD_TAG
:
2086 case SYMBOL_MEMORY_TAG
:
2087 case NAME_MEMORY_TAG
:
2088 add_stmt_operand (expr_p
, s_ann
, flags
);
2097 /* Add the subvars for a variable, if it has subvars, to DEFS
2098 or USES. Otherwise, add the variable itself. Whether it
2099 goes to USES or DEFS depends on the operand flags. */
2100 if (var_can_have_subvars (expr
)
2101 && (svars
= get_subvars_for_var (expr
)))
2104 for (sv
= svars
; sv
; sv
= sv
->next
)
2105 add_stmt_operand (&sv
->var
, s_ann
, flags
);
2108 add_stmt_operand (expr_p
, s_ann
, flags
);
2113 case MISALIGNED_INDIRECT_REF
:
2114 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2117 case ALIGN_INDIRECT_REF
:
2119 get_indirect_ref_operands (stmt
, expr
, flags
, NULL_TREE
, 0, -1, true);
2122 case TARGET_MEM_REF
:
2123 get_tmr_operands (stmt
, expr
, flags
);
2127 case ARRAY_RANGE_REF
:
2133 HOST_WIDE_INT offset
, size
, maxsize
;
2136 /* This component reference becomes an access to all of the
2137 subvariables it can touch, if we can determine that, but
2138 *NOT* the real one. If we can't determine which fields we
2139 could touch, the recursion will eventually get to a
2140 variable and add *all* of its subvars, or whatever is the
2141 minimum correct subset. */
2142 ref
= get_ref_base_and_extent (expr
, &offset
, &size
, &maxsize
);
2143 if (SSA_VAR_P (ref
) && get_subvars_for_var (ref
))
2146 subvar_t svars
= get_subvars_for_var (ref
);
2148 for (sv
= svars
; sv
; sv
= sv
->next
)
2152 if (overlap_subvar (offset
, maxsize
, sv
->var
, &exact
))
2154 int subvar_flags
= flags
;
2156 add_stmt_operand (&sv
->var
, s_ann
, subvar_flags
);
2161 flags
|= opf_no_vops
;
2163 else if (TREE_CODE (ref
) == INDIRECT_REF
)
2165 get_indirect_ref_operands (stmt
, ref
, flags
, expr
, offset
,
2167 flags
|= opf_no_vops
;
2170 /* Even if we found subvars above we need to ensure to see
2171 immediate uses for d in s.a[d]. In case of s.a having
2172 a subvar or we would miss it otherwise. */
2173 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2175 if (code
== COMPONENT_REF
)
2177 if (s_ann
&& TREE_THIS_VOLATILE (TREE_OPERAND (expr
, 1)))
2178 s_ann
->has_volatile_ops
= true;
2179 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2181 else if (code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
2183 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2184 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2185 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 3), opf_use
);
2191 case WITH_SIZE_EXPR
:
2192 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2193 and an rvalue reference to its second argument. */
2194 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2195 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2199 get_call_expr_operands (stmt
, expr
);
2204 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), opf_use
);
2205 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), opf_use
);
2206 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), opf_use
);
2209 case GIMPLE_MODIFY_STMT
:
2210 get_modify_stmt_operands (stmt
, expr
);
2215 /* General aggregate CONSTRUCTORs have been decomposed, but they
2216 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2217 constructor_elt
*ce
;
2218 unsigned HOST_WIDE_INT idx
;
2221 VEC_iterate (constructor_elt
, CONSTRUCTOR_ELTS (expr
), idx
, ce
);
2223 get_expr_operands (stmt
, &ce
->value
, opf_use
);
2229 case TRUTH_NOT_EXPR
:
2230 case VIEW_CONVERT_EXPR
:
2232 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2235 case TRUTH_AND_EXPR
:
2237 case TRUTH_XOR_EXPR
:
2243 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2244 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2249 case REALIGN_LOAD_EXPR
:
2251 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 0), flags
);
2252 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 1), flags
);
2253 get_expr_operands (stmt
, &TREE_OPERAND (expr
, 2), flags
);
2272 /* Expressions that make no memory references. */
2276 if (class == tcc_unary
)
2278 if (class == tcc_binary
|| class == tcc_comparison
)
2280 if (class == tcc_constant
|| class == tcc_type
)
2284 /* If we get here, something has gone wrong. */
2285 #ifdef ENABLE_CHECKING
2286 fprintf (stderr
, "unhandled expression in get_expr_operands():\n");
2288 fputs ("\n", stderr
);
2294 /* Parse STMT looking for operands. When finished, the various
2295 build_* operand vectors will have potential operands in them. */
2298 parse_ssa_operands (tree stmt
)
2300 enum tree_code code
;
2302 code
= TREE_CODE (stmt
);
2305 case GIMPLE_MODIFY_STMT
:
2306 get_modify_stmt_operands (stmt
, stmt
);
2310 get_expr_operands (stmt
, &COND_EXPR_COND (stmt
), opf_use
);
2314 get_expr_operands (stmt
, &SWITCH_COND (stmt
), opf_use
);
2318 get_asm_expr_operands (stmt
);
2322 get_expr_operands (stmt
, &TREE_OPERAND (stmt
, 0), opf_use
);
2326 get_expr_operands (stmt
, &GOTO_DESTINATION (stmt
), opf_use
);
2330 get_expr_operands (stmt
, &LABEL_EXPR_LABEL (stmt
), opf_use
);
2334 case CASE_LABEL_EXPR
:
2335 case TRY_CATCH_EXPR
:
2336 case TRY_FINALLY_EXPR
:
2337 case EH_FILTER_EXPR
:
2340 /* These nodes contain no variable references. */
2344 /* Notice that if get_expr_operands tries to use &STMT as the
2345 operand pointer (which may only happen for USE operands), we
2346 will fail in add_stmt_operand. This default will handle
2347 statements like empty statements, or CALL_EXPRs that may
2348 appear on the RHS of a statement or as statements themselves. */
2349 get_expr_operands (stmt
, &stmt
, opf_use
);
2355 /* Create an operands cache for STMT. */
2358 build_ssa_operands (tree stmt
)
2360 stmt_ann_t ann
= get_stmt_ann (stmt
);
2362 /* Initially assume that the statement has no volatile operands and
2363 makes no memory references. */
2364 ann
->has_volatile_ops
= false;
2365 ann
->references_memory
= false;
2366 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2367 if (ann
->addresses_taken
)
2368 bitmap_clear (ann
->addresses_taken
);
2370 start_ssa_stmt_operands ();
2371 parse_ssa_operands (stmt
);
2372 operand_build_sort_virtual (build_vuses
);
2373 operand_build_sort_virtual (build_vdefs
);
2374 finalize_ssa_stmt_operands (stmt
);
2376 if (ann
->addresses_taken
&& bitmap_empty_p (ann
->addresses_taken
))
2377 ann
->addresses_taken
= NULL
;
2378 /* For added safety, assume that statements with volatile operands
2379 also reference memory. */
2380 if (ann
->has_volatile_ops
)
2381 ann
->references_memory
= true;
2385 /* Free any operands vectors in OPS. */
2388 free_ssa_operands (stmt_operands_p ops
)
2390 ops
->def_ops
= NULL
;
2391 ops
->use_ops
= NULL
;
2392 ops
->vdef_ops
= NULL
;
2393 ops
->vuse_ops
= NULL
;
2394 BITMAP_FREE (ops
->loads
);
2395 BITMAP_FREE (ops
->stores
);
2399 /* Get the operands of statement STMT. */
2402 update_stmt_operands (tree stmt
)
2404 stmt_ann_t ann
= get_stmt_ann (stmt
);
2406 /* If update_stmt_operands is called before SSA is initialized, do
2408 if (!ssa_operands_active ())
2411 /* The optimizers cannot handle statements that are nothing but a
2412 _DECL. This indicates a bug in the gimplifier. */
2413 gcc_assert (!SSA_VAR_P (stmt
));
2415 timevar_push (TV_TREE_OPS
);
2417 gcc_assert (ann
->modified
);
2418 build_ssa_operands (stmt
);
2421 timevar_pop (TV_TREE_OPS
);
2425 /* Copies virtual operands from SRC to DST. */
2428 copy_virtual_operands (tree dest
, tree src
)
2431 voptype_p src_vuses
, dest_vuses
;
2432 voptype_p src_vdefs
, dest_vdefs
;
2433 struct voptype_d vuse
;
2434 struct voptype_d vdef
;
2435 stmt_ann_t dest_ann
;
2437 VDEF_OPS (dest
) = NULL
;
2438 VUSE_OPS (dest
) = NULL
;
2440 dest_ann
= get_stmt_ann (dest
);
2441 BITMAP_FREE (dest_ann
->operands
.loads
);
2442 BITMAP_FREE (dest_ann
->operands
.stores
);
2444 if (LOADED_SYMS (src
))
2446 dest_ann
->operands
.loads
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2447 bitmap_copy (dest_ann
->operands
.loads
, LOADED_SYMS (src
));
2450 if (STORED_SYMS (src
))
2452 dest_ann
->operands
.stores
= BITMAP_ALLOC (&operands_bitmap_obstack
);
2453 bitmap_copy (dest_ann
->operands
.stores
, STORED_SYMS (src
));
2456 /* Copy all the VUSE operators and corresponding operands. */
2458 for (src_vuses
= VUSE_OPS (src
); src_vuses
; src_vuses
= src_vuses
->next
)
2460 n
= VUSE_NUM (src_vuses
);
2461 dest_vuses
= add_vuse_op (dest
, NULL_TREE
, n
, dest_vuses
);
2462 for (i
= 0; i
< n
; i
++)
2463 SET_USE (VUSE_OP_PTR (dest_vuses
, i
), VUSE_OP (src_vuses
, i
));
2465 if (VUSE_OPS (dest
) == NULL
)
2466 VUSE_OPS (dest
) = vuse
.next
;
2469 /* Copy all the VDEF operators and corresponding operands. */
2471 for (src_vdefs
= VDEF_OPS (src
); src_vdefs
; src_vdefs
= src_vdefs
->next
)
2473 n
= VUSE_NUM (src_vdefs
);
2474 dest_vdefs
= add_vdef_op (dest
, NULL_TREE
, n
, dest_vdefs
);
2475 VDEF_RESULT (dest_vdefs
) = VDEF_RESULT (src_vdefs
);
2476 for (i
= 0; i
< n
; i
++)
2477 SET_USE (VUSE_OP_PTR (dest_vdefs
, i
), VUSE_OP (src_vdefs
, i
));
2479 if (VDEF_OPS (dest
) == NULL
)
2480 VDEF_OPS (dest
) = vdef
.next
;
2485 /* Specifically for use in DOM's expression analysis. Given a store, we
2486 create an artificial stmt which looks like a load from the store, this can
2487 be used to eliminate redundant loads. OLD_OPS are the operands from the
2488 store stmt, and NEW_STMT is the new load which represents a load of the
2492 create_ssa_artificial_load_stmt (tree new_stmt
, tree old_stmt
)
2496 use_operand_p use_p
;
2499 get_stmt_ann (new_stmt
);
2501 /* Process NEW_STMT looking for operands. */
2502 start_ssa_stmt_operands ();
2503 parse_ssa_operands (new_stmt
);
2505 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2506 if (TREE_CODE (op
) != SSA_NAME
)
2507 var_ann (op
)->in_vuse_list
= false;
2509 for (i
= 0; VEC_iterate (tree
, build_vuses
, i
, op
); i
++)
2510 if (TREE_CODE (op
) != SSA_NAME
)
2511 var_ann (op
)->in_vdef_list
= false;
2513 /* Remove any virtual operands that were found. */
2514 VEC_truncate (tree
, build_vdefs
, 0);
2515 VEC_truncate (tree
, build_vuses
, 0);
2517 /* For each VDEF on the original statement, we want to create a
2518 VUSE of the VDEF result operand on the new statement. */
2519 FOR_EACH_SSA_TREE_OPERAND (op
, old_stmt
, iter
, SSA_OP_VDEF
)
2522 finalize_ssa_stmt_operands (new_stmt
);
2524 /* All uses in this fake stmt must not be in the immediate use lists. */
2525 FOR_EACH_SSA_USE_OPERAND (use_p
, new_stmt
, iter
, SSA_OP_ALL_USES
)
2526 delink_imm_use (use_p
);
2530 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2531 to test the validity of the swap operation. */
2534 swap_tree_operands (tree stmt
, tree
*exp0
, tree
*exp1
)
2540 /* If the operand cache is active, attempt to preserve the relative
2541 positions of these two operands in their respective immediate use
2543 if (ssa_operands_active () && op0
!= op1
)
2545 use_optype_p use0
, use1
, ptr
;
2548 /* Find the 2 operands in the cache, if they are there. */
2549 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2550 if (USE_OP_PTR (ptr
)->use
== exp0
)
2556 for (ptr
= USE_OPS (stmt
); ptr
; ptr
= ptr
->next
)
2557 if (USE_OP_PTR (ptr
)->use
== exp1
)
2563 /* If both uses don't have operand entries, there isn't much we can do
2564 at this point. Presumably we don't need to worry about it. */
2567 tree
*tmp
= USE_OP_PTR (use1
)->use
;
2568 USE_OP_PTR (use1
)->use
= USE_OP_PTR (use0
)->use
;
2569 USE_OP_PTR (use0
)->use
= tmp
;
2573 /* Now swap the data. */
2579 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2580 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2581 a single variable whose address has been taken or any other valid
2582 GIMPLE memory reference (structure reference, array, etc). If the
2583 base address of REF is a decl that has sub-variables, also add all
2584 of its sub-variables. */
2587 add_to_addressable_set (tree ref
, bitmap
*addresses_taken
)
2592 gcc_assert (addresses_taken
);
2594 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2595 as the only thing we take the address of. If VAR is a structure,
2596 taking the address of a field means that the whole structure may
2597 be referenced using pointer arithmetic. See PR 21407 and the
2598 ensuing mailing list discussion. */
2599 var
= get_base_address (ref
);
2600 if (var
&& SSA_VAR_P (var
))
2602 if (*addresses_taken
== NULL
)
2603 *addresses_taken
= BITMAP_GGC_ALLOC ();
2605 if (var_can_have_subvars (var
)
2606 && (svars
= get_subvars_for_var (var
)))
2609 for (sv
= svars
; sv
; sv
= sv
->next
)
2611 bitmap_set_bit (*addresses_taken
, DECL_UID (sv
->var
));
2612 TREE_ADDRESSABLE (sv
->var
) = 1;
2617 bitmap_set_bit (*addresses_taken
, DECL_UID (var
));
2618 TREE_ADDRESSABLE (var
) = 1;
2624 /* Scan the immediate_use list for VAR making sure its linked properly.
2625 Return TRUE if there is a problem and emit an error message to F. */
2628 verify_imm_links (FILE *f
, tree var
)
2630 use_operand_p ptr
, prev
, list
;
2633 gcc_assert (TREE_CODE (var
) == SSA_NAME
);
2635 list
= &(SSA_NAME_IMM_USE_NODE (var
));
2636 gcc_assert (list
->use
== NULL
);
2638 if (list
->prev
== NULL
)
2640 gcc_assert (list
->next
== NULL
);
2646 for (ptr
= list
->next
; ptr
!= list
; )
2648 if (prev
!= ptr
->prev
)
2651 if (ptr
->use
== NULL
)
2652 goto error
; /* 2 roots, or SAFE guard node. */
2653 else if (*(ptr
->use
) != var
)
2659 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2661 if (count
++ > 50000000)
2665 /* Verify list in the other direction. */
2667 for (ptr
= list
->prev
; ptr
!= list
; )
2669 if (prev
!= ptr
->next
)
2683 if (ptr
->stmt
&& stmt_modified_p (ptr
->stmt
))
2685 fprintf (f
, " STMT MODIFIED. - <%p> ", (void *)ptr
->stmt
);
2686 print_generic_stmt (f
, ptr
->stmt
, TDF_SLIM
);
2688 fprintf (f
, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr
,
2690 print_generic_expr (f
, USE_FROM_PTR (ptr
), TDF_SLIM
);
2696 /* Dump all the immediate uses to FILE. */
2699 dump_immediate_uses_for (FILE *file
, tree var
)
2701 imm_use_iterator iter
;
2702 use_operand_p use_p
;
2704 gcc_assert (var
&& TREE_CODE (var
) == SSA_NAME
);
2706 print_generic_expr (file
, var
, TDF_SLIM
);
2707 fprintf (file
, " : -->");
2708 if (has_zero_uses (var
))
2709 fprintf (file
, " no uses.\n");
2711 if (has_single_use (var
))
2712 fprintf (file
, " single use.\n");
2714 fprintf (file
, "%d uses.\n", num_imm_uses (var
));
2716 FOR_EACH_IMM_USE_FAST (use_p
, iter
, var
)
2718 if (use_p
->stmt
== NULL
&& use_p
->use
== NULL
)
2719 fprintf (file
, "***end of stmt iterator marker***\n");
2721 if (!is_gimple_reg (USE_FROM_PTR (use_p
)))
2722 print_generic_stmt (file
, USE_STMT (use_p
), TDF_VOPS
|TDF_MEMSYMS
);
2724 print_generic_stmt (file
, USE_STMT (use_p
), TDF_SLIM
);
2726 fprintf(file
, "\n");
2730 /* Dump all the immediate uses to FILE. */
2733 dump_immediate_uses (FILE *file
)
2738 fprintf (file
, "Immediate_uses: \n\n");
2739 for (x
= 1; x
< num_ssa_names
; x
++)
2744 dump_immediate_uses_for (file
, var
);
2749 /* Dump def-use edges on stderr. */
2752 debug_immediate_uses (void)
2754 dump_immediate_uses (stderr
);
2758 /* Dump def-use edges on stderr. */
2761 debug_immediate_uses_for (tree var
)
2763 dump_immediate_uses_for (stderr
, var
);
2767 /* Create a new change buffer for the statement pointed by STMT_P and
2768 push the buffer into SCB_STACK. Each change buffer
2769 records state information needed to determine what changed in the
2770 statement. Mainly, this keeps track of symbols that may need to be
2771 put into SSA form, SSA name replacements and other information
2772 needed to keep the SSA form up to date. */
2775 push_stmt_changes (tree
*stmt_p
)
2782 /* It makes no sense to keep track of PHI nodes. */
2783 if (TREE_CODE (stmt
) == PHI_NODE
)
2786 buf
= xmalloc (sizeof *buf
);
2787 memset (buf
, 0, sizeof *buf
);
2789 buf
->stmt_p
= stmt_p
;
2791 if (stmt_references_memory_p (stmt
))
2796 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2798 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2799 if (buf
->loads
== NULL
)
2800 buf
->loads
= BITMAP_ALLOC (NULL
);
2801 bitmap_set_bit (buf
->loads
, DECL_UID (sym
));
2804 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2806 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2807 if (buf
->stores
== NULL
)
2808 buf
->stores
= BITMAP_ALLOC (NULL
);
2809 bitmap_set_bit (buf
->stores
, DECL_UID (sym
));
2813 VEC_safe_push (scb_t
, heap
, scb_stack
, buf
);
2817 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2818 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2821 mark_difference_for_renaming (bitmap s1
, bitmap s2
)
2823 if (s1
== NULL
&& s2
== NULL
)
2826 if (s1
&& s2
== NULL
)
2827 mark_set_for_renaming (s1
);
2828 else if (s1
== NULL
&& s2
)
2829 mark_set_for_renaming (s2
);
2830 else if (!bitmap_equal_p (s1
, s2
))
2832 bitmap t1
= BITMAP_ALLOC (NULL
);
2833 bitmap t2
= BITMAP_ALLOC (NULL
);
2835 bitmap_and_compl (t1
, s1
, s2
);
2836 bitmap_and_compl (t2
, s2
, s1
);
2837 bitmap_ior_into (t1
, t2
);
2838 mark_set_for_renaming (t1
);
2846 /* Pop the top SCB from SCB_STACK and act on the differences between
2847 what was recorded by push_stmt_changes and the current state of
2851 pop_stmt_changes (tree
*stmt_p
)
2855 bitmap loads
, stores
;
2860 /* It makes no sense to keep track of PHI nodes. */
2861 if (TREE_CODE (stmt
) == PHI_NODE
)
2864 buf
= VEC_pop (scb_t
, scb_stack
);
2865 gcc_assert (stmt_p
== buf
->stmt_p
);
2867 /* Force an operand re-scan on the statement and mark any newly
2868 exposed variables. */
2871 /* Determine whether any memory symbols need to be renamed. If the
2872 sets of loads and stores are different after the statement is
2873 modified, then the affected symbols need to be renamed.
2875 Note that it may be possible for the statement to not reference
2876 memory anymore, but we still need to act on the differences in
2877 the sets of symbols. */
2878 loads
= stores
= NULL
;
2879 if (stmt_references_memory_p (stmt
))
2884 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VUSE
)
2886 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2888 loads
= BITMAP_ALLOC (NULL
);
2889 bitmap_set_bit (loads
, DECL_UID (sym
));
2892 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_VDEF
)
2894 tree sym
= TREE_CODE (op
) == SSA_NAME
? SSA_NAME_VAR (op
) : op
;
2896 stores
= BITMAP_ALLOC (NULL
);
2897 bitmap_set_bit (stores
, DECL_UID (sym
));
2901 /* If LOADS is different from BUF->LOADS, the affected
2902 symbols need to be marked for renaming. */
2903 mark_difference_for_renaming (loads
, buf
->loads
);
2905 /* Similarly for STORES and BUF->STORES. */
2906 mark_difference_for_renaming (stores
, buf
->stores
);
2908 /* Mark all the naked GIMPLE register operands for renaming. */
2909 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_DEF
|SSA_OP_USE
)
2911 mark_sym_for_renaming (op
);
2913 /* FIXME, need to add more finalizers here. Cleanup EH info,
2914 recompute invariants for address expressions, add
2915 SSA replacement mappings, etc. For instance, given
2916 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2919 # SMT.4_20 = VDEF <SMT.4_16>
2922 So, the VDEF will disappear, but instead of marking SMT.4 for
2923 renaming it would be far more efficient to establish a
2924 replacement mapping that would replace every reference of
2925 SMT.4_20 with SMT.4_16. */
2927 /* Free memory used by the buffer. */
2928 BITMAP_FREE (buf
->loads
);
2929 BITMAP_FREE (buf
->stores
);
2930 BITMAP_FREE (loads
);
2931 BITMAP_FREE (stores
);
2937 /* Discard the topmost change buffer from SCB_STACK. This is useful
2938 when the caller realized that it did not actually modified the
2939 statement. It avoids the expensive operand re-scan. */
2942 discard_stmt_changes (tree
*stmt_p
)
2947 /* It makes no sense to keep track of PHI nodes. */
2949 if (TREE_CODE (stmt
) == PHI_NODE
)
2952 buf
= VEC_pop (scb_t
, scb_stack
);
2953 gcc_assert (stmt_p
== buf
->stmt_p
);
2955 /* Free memory used by the buffer. */
2956 BITMAP_FREE (buf
->loads
);
2957 BITMAP_FREE (buf
->stores
);
2963 /* Returns true if statement STMT may access memory. */
2966 stmt_references_memory_p (tree stmt
)
2968 if (!gimple_ssa_operands (cfun
)->ops_active
|| TREE_CODE (stmt
) == PHI_NODE
)
2971 return stmt_ann (stmt
)->references_memory
;
2975 /* Return the memory partition tag (MPT) associated with memory
2976 symbol SYM. From a correctness standpoint, memory partitions can
2977 be assigned in any arbitrary fashion as long as this rule is
2978 observed: Given two memory partitions MPT.i and MPT.j, they must
2979 not contain symbols in common.
2981 Memory partitions are used when putting the program into Memory-SSA
2982 form. In particular, in Memory-SSA PHI nodes are not computed for
2983 individual memory symbols. They are computed for memory
2984 partitions. This reduces the amount of PHI nodes in the SSA graph
2985 at the expense of precision (i.e., it makes unrelated stores affect
2988 However, it is possible to increase precision by changing this
2989 partitioning scheme. For instance, if the partitioning scheme is
2990 such that get_mpt_for is the identity function (that is,
2991 get_mpt_for (s) = s), this will result in ultimate precision at the
2992 expense of huge SSA webs.
2994 At the other extreme, a partitioning scheme that groups all the
2995 symbols in the same set results in minimal SSA webs and almost
2996 total loss of precision. */
2999 get_mpt_for (tree sym
)
3003 /* Don't create a new tag unnecessarily. */
3004 mpt
= memory_partition (sym
);
3005 if (mpt
== NULL_TREE
)
3007 mpt
= create_tag_raw (MEMORY_PARTITION_TAG
, TREE_TYPE (sym
), "MPT");
3008 TREE_ADDRESSABLE (mpt
) = 0;
3009 MTAG_GLOBAL (mpt
) = 1;
3010 add_referenced_var (mpt
);
3011 VEC_safe_push (tree
, heap
, gimple_ssa_operands (cfun
)->mpt_table
, mpt
);
3012 MPT_SYMBOLS (mpt
) = BITMAP_ALLOC (&operands_bitmap_obstack
);
3013 set_memory_partition (sym
, mpt
);
3020 /* Dump memory partition information to FILE. */
3023 dump_memory_partitions (FILE *file
)
3026 unsigned long nsyms
;
3029 fprintf (file
, "\nMemory partitions\n\n");
3030 for (i
= 0, npart
= 0, nsyms
= 0;
3031 VEC_iterate (tree
, gimple_ssa_operands (cfun
)->mpt_table
, i
, mpt
);
3036 bitmap syms
= MPT_SYMBOLS (mpt
);
3037 unsigned long n
= bitmap_count_bits (syms
);
3039 fprintf (file
, "#%u: ", i
);
3040 print_generic_expr (file
, mpt
, 0);
3041 fprintf (file
, ": %lu elements: ", n
);
3042 dump_decl_set (file
, syms
);
3048 fprintf (file
, "\n%u memory partitions holding %lu symbols\n", npart
, nsyms
);
3052 /* Dump memory partition information to stderr. */
3055 debug_memory_partitions (void)
3057 dump_memory_partitions (stderr
);