tree-ssa-alias.c (compute_may_aliases): Compute flow sensitive before flow insensitive.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
103
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
106
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
109
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
112
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
115
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
118
119 static void get_expr_operands (tree, tree *, int);
120
121 /* Number of functions with initialized ssa_operands. */
122 static int n_initialized = 0;
123
124 /* Allocates operand OP of given TYPE from the appropriate free list,
125 or of the new value if the list is empty. */
126
127 #define ALLOC_OPTYPE(OP, TYPE) \
128 do \
129 { \
130 TYPE##_optype_p ret \
131 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
132 if (ret) \
133 gimple_ssa_operands (cfun)->free_##TYPE##s \
134 = ret->next; \
135 else \
136 ret = ssa_operand_alloc (sizeof (*ret)); \
137 (OP) = ret; \
138 } while (0)
139
140 /* Return the DECL_UID of the base variable of T. */
141
142 static inline unsigned
143 get_name_decl (tree t)
144 {
145 if (TREE_CODE (t) != SSA_NAME)
146 return DECL_UID (t);
147 else
148 return DECL_UID (SSA_NAME_VAR (t));
149 }
150
151
152 /* Comparison function for qsort used in operand_build_sort_virtual. */
153
154 static int
155 operand_build_cmp (const void *p, const void *q)
156 {
157 tree e1 = *((const tree *)p);
158 tree e2 = *((const tree *)q);
159 unsigned int u1,u2;
160
161 u1 = get_name_decl (e1);
162 u2 = get_name_decl (e2);
163
164 /* We want to sort in ascending order. They can never be equal. */
165 #ifdef ENABLE_CHECKING
166 gcc_assert (u1 != u2);
167 #endif
168 return (u1 > u2 ? 1 : -1);
169 }
170
171
172 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
173
174 static inline void
175 operand_build_sort_virtual (VEC(tree,heap) *list)
176 {
177 int num = VEC_length (tree, list);
178
179 if (num < 2)
180 return;
181
182 if (num == 2)
183 {
184 if (get_name_decl (VEC_index (tree, list, 0))
185 > get_name_decl (VEC_index (tree, list, 1)))
186 {
187 /* Swap elements if in the wrong order. */
188 tree tmp = VEC_index (tree, list, 0);
189 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
190 VEC_replace (tree, list, 1, tmp);
191 }
192 return;
193 }
194
195 /* There are 3 or more elements, call qsort. */
196 qsort (VEC_address (tree, list),
197 VEC_length (tree, list),
198 sizeof (tree),
199 operand_build_cmp);
200 }
201
202
203 /* Return true if the SSA operands cache is active. */
204
205 bool
206 ssa_operands_active (void)
207 {
208 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
209 }
210
211
212 /* Structure storing statistics on how many call clobbers we have, and
213 how many where avoided. */
214
215 static struct
216 {
217 /* Number of call-clobbered ops we attempt to add to calls in
218 add_call_clobber_ops. */
219 unsigned int clobbered_vars;
220
221 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
222 not_written information. */
223 unsigned int static_write_clobbers_avoided;
224
225 /* Number of reads (VUSEs) avoided by using not_read information. */
226 unsigned int static_read_clobbers_avoided;
227
228 /* Number of write-clobbers avoided because the variable can't escape to
229 this call. */
230 unsigned int unescapable_clobbers_avoided;
231
232 /* Number of read-only uses we attempt to add to calls in
233 add_call_read_ops. */
234 unsigned int readonly_clobbers;
235
236 /* Number of read-only uses we avoid using not_read information. */
237 unsigned int static_readonly_clobbers_avoided;
238 } clobber_stats;
239
240
241 /* Initialize the operand cache routines. */
242
243 void
244 init_ssa_operands (void)
245 {
246 if (!n_initialized++)
247 {
248 build_defs = VEC_alloc (tree, heap, 5);
249 build_uses = VEC_alloc (tree, heap, 10);
250 build_vuses = VEC_alloc (tree, heap, 25);
251 build_v_may_defs = VEC_alloc (tree, heap, 25);
252 build_v_must_defs = VEC_alloc (tree, heap, 25);
253 }
254
255 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
256 gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
257 gimple_ssa_operands (cfun)->ops_active = true;
258 memset (&clobber_stats, 0, sizeof (clobber_stats));
259 }
260
261
262 /* Dispose of anything required by the operand routines. */
263
264 void
265 fini_ssa_operands (void)
266 {
267 struct ssa_operand_memory_d *ptr;
268 if (!--n_initialized)
269 {
270 VEC_free (tree, heap, build_defs);
271 VEC_free (tree, heap, build_uses);
272 VEC_free (tree, heap, build_v_must_defs);
273 VEC_free (tree, heap, build_v_may_defs);
274 VEC_free (tree, heap, build_vuses);
275 }
276 gimple_ssa_operands (cfun)->free_defs = NULL;
277 gimple_ssa_operands (cfun)->free_uses = NULL;
278 gimple_ssa_operands (cfun)->free_vuses = NULL;
279 gimple_ssa_operands (cfun)->free_maydefs = NULL;
280 gimple_ssa_operands (cfun)->free_mustdefs = NULL;
281 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
282 {
283 gimple_ssa_operands (cfun)->operand_memory
284 = gimple_ssa_operands (cfun)->operand_memory->next;
285 ggc_free (ptr);
286 }
287
288 gimple_ssa_operands (cfun)->ops_active = false;
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
304 }
305 }
306
307
308 /* Return memory for operands of SIZE chunks. */
309
310 static inline void *
311 ssa_operand_alloc (unsigned size)
312 {
313 char *ptr;
314 if (gimple_ssa_operands (cfun)->operand_memory_index + size
315 >= SSA_OPERAND_MEMORY_SIZE)
316 {
317 struct ssa_operand_memory_d *ptr;
318 ptr = GGC_NEW (struct ssa_operand_memory_d);
319 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
320 gimple_ssa_operands (cfun)->operand_memory = ptr;
321 gimple_ssa_operands (cfun)->operand_memory_index = 0;
322 }
323 ptr = &(gimple_ssa_operands (cfun)->operand_memory
324 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
325 gimple_ssa_operands (cfun)->operand_memory_index += size;
326 return ptr;
327 }
328
329
330
331 /* This routine makes sure that PTR is in an immediate use list, and makes
332 sure the stmt pointer is set to the current stmt. */
333
334 static inline void
335 set_virtual_use_link (use_operand_p ptr, tree stmt)
336 {
337 /* fold_stmt may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
340
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
344 }
345
346 /* Appends ELT after TO, and moves the TO pointer to ELT. */
347
348 #define APPEND_OP_AFTER(ELT, TO) \
349 do \
350 { \
351 (TO)->next = (ELT); \
352 (TO) = (ELT); \
353 } while (0)
354
355 /* Appends head of list FROM after TO, and move both pointers
356 to their successors. */
357
358 #define MOVE_HEAD_AFTER(FROM, TO) \
359 do \
360 { \
361 APPEND_OP_AFTER (FROM, TO); \
362 (FROM) = (FROM)->next; \
363 } while (0)
364
365 /* Moves OP to appropriate freelist. OP is set to its successor. */
366
367 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
368 do \
369 { \
370 TYPE##_optype_p next = (OP)->next; \
371 (OP)->next \
372 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
373 gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
374 (OP) = next; \
375 } while (0)
376
377 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
378 of immediate uses. STMT is the current statement. */
379
380 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
381 do \
382 { \
383 (USE_PTR)->use = (VAL); \
384 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
385 } while (0)
386
387 /* Adds OP to the list of defs after LAST, and moves
388 LAST to the new element. */
389
390 static inline void
391 add_def_op (tree *op, def_optype_p *last)
392 {
393 def_optype_p new;
394
395 ALLOC_OPTYPE (new, def);
396 DEF_OP_PTR (new) = op;
397 APPEND_OP_AFTER (new, *last);
398 }
399
400 /* Adds OP to the list of uses of statement STMT after LAST, and moves
401 LAST to the new element. */
402
403 static inline void
404 add_use_op (tree stmt, tree *op, use_optype_p *last)
405 {
406 use_optype_p new;
407
408 ALLOC_OPTYPE (new, use);
409 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
410 APPEND_OP_AFTER (new, *last);
411 }
412
413 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
414 LAST to the new element. */
415
416 static inline void
417 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
418 {
419 vuse_optype_p new;
420
421 ALLOC_OPTYPE (new, vuse);
422 VUSE_OP (new) = op;
423 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
424 APPEND_OP_AFTER (new, *last);
425 }
426
427 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
428 LAST to the new element. */
429
430 static inline void
431 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
432 {
433 maydef_optype_p new;
434
435 ALLOC_OPTYPE (new, maydef);
436 MAYDEF_RESULT (new) = op;
437 MAYDEF_OP (new) = op;
438 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
439 APPEND_OP_AFTER (new, *last);
440 }
441
442 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
443 LAST to the new element. */
444
445 static inline void
446 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
447 {
448 mustdef_optype_p new;
449
450 ALLOC_OPTYPE (new, mustdef);
451 MUSTDEF_RESULT (new) = op;
452 MUSTDEF_KILL (new) = op;
453 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
454 APPEND_OP_AFTER (new, *last);
455 }
456
457 /* Takes elements from build_defs and turns them into def operands of STMT.
458 TODO -- Given that def operands list is not necessarily sorted, merging
459 the operands this way does not make much sense.
460 -- Make build_defs VEC of tree *. */
461
462 static inline void
463 finalize_ssa_def_ops (tree stmt)
464 {
465 unsigned new_i;
466 struct def_optype_d new_list;
467 def_optype_p old_ops, last;
468 tree *old_base;
469
470 new_list.next = NULL;
471 last = &new_list;
472
473 old_ops = DEF_OPS (stmt);
474
475 new_i = 0;
476 while (old_ops && new_i < VEC_length (tree, build_defs))
477 {
478 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
479 old_base = DEF_OP_PTR (old_ops);
480
481 if (old_base == new_base)
482 {
483 /* if variables are the same, reuse this node. */
484 MOVE_HEAD_AFTER (old_ops, last);
485 new_i++;
486 }
487 else if (old_base < new_base)
488 {
489 /* if old is less than new, old goes to the free list. */
490 MOVE_HEAD_TO_FREELIST (old_ops, def);
491 }
492 else
493 {
494 /* This is a new operand. */
495 add_def_op (new_base, &last);
496 new_i++;
497 }
498 }
499
500 /* If there is anything remaining in the build_defs list, simply emit it. */
501 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
502 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
503
504 last->next = NULL;
505
506 /* If there is anything in the old list, free it. */
507 if (old_ops)
508 {
509 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
510 gimple_ssa_operands (cfun)->free_defs = old_ops;
511 }
512
513 /* Now set the stmt's operands. */
514 DEF_OPS (stmt) = new_list.next;
515
516 #ifdef ENABLE_CHECKING
517 {
518 def_optype_p ptr;
519 unsigned x = 0;
520 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
521 x++;
522
523 gcc_assert (x == VEC_length (tree, build_defs));
524 }
525 #endif
526 }
527
528 /* This routine will create stmt operands for STMT from the def build list. */
529
530 static void
531 finalize_ssa_defs (tree stmt)
532 {
533 unsigned int num = VEC_length (tree, build_defs);
534
535 /* There should only be a single real definition per assignment. */
536 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
537
538 /* If there is an old list, often the new list is identical, or close, so
539 find the elements at the beginning that are the same as the vector. */
540 finalize_ssa_def_ops (stmt);
541 VEC_truncate (tree, build_defs, 0);
542 }
543
544 /* Takes elements from build_uses and turns them into use operands of STMT.
545 TODO -- Make build_uses VEC of tree *. */
546
547 static inline void
548 finalize_ssa_use_ops (tree stmt)
549 {
550 unsigned new_i;
551 struct use_optype_d new_list;
552 use_optype_p old_ops, ptr, last;
553
554 new_list.next = NULL;
555 last = &new_list;
556
557 old_ops = USE_OPS (stmt);
558
559 /* If there is anything in the old list, free it. */
560 if (old_ops)
561 {
562 for (ptr = old_ops; ptr; ptr = ptr->next)
563 delink_imm_use (USE_OP_PTR (ptr));
564 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
565 gimple_ssa_operands (cfun)->free_uses = old_ops;
566 }
567
568 /* Now create nodes for all the new nodes. */
569 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
570 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
571
572 last->next = NULL;
573
574 /* Now set the stmt's operands. */
575 USE_OPS (stmt) = new_list.next;
576
577 #ifdef ENABLE_CHECKING
578 {
579 unsigned x = 0;
580 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
581 x++;
582
583 gcc_assert (x == VEC_length (tree, build_uses));
584 }
585 #endif
586 }
587
588 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
589
590 static void
591 finalize_ssa_uses (tree stmt)
592 {
593 #ifdef ENABLE_CHECKING
594 {
595 unsigned x;
596 unsigned num = VEC_length (tree, build_uses);
597
598 /* If the pointer to the operand is the statement itself, something is
599 wrong. It means that we are pointing to a local variable (the
600 initial call to update_stmt_operands does not pass a pointer to a
601 statement). */
602 for (x = 0; x < num; x++)
603 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
604 }
605 #endif
606 finalize_ssa_use_ops (stmt);
607 VEC_truncate (tree, build_uses, 0);
608 }
609
610
611 /* Takes elements from build_v_may_defs and turns them into maydef operands of
612 STMT. */
613
614 static inline void
615 finalize_ssa_v_may_def_ops (tree stmt)
616 {
617 unsigned new_i;
618 struct maydef_optype_d new_list;
619 maydef_optype_p old_ops, ptr, last;
620 tree act;
621 unsigned old_base, new_base;
622
623 new_list.next = NULL;
624 last = &new_list;
625
626 old_ops = MAYDEF_OPS (stmt);
627
628 new_i = 0;
629 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
630 {
631 act = VEC_index (tree, build_v_may_defs, new_i);
632 new_base = get_name_decl (act);
633 old_base = get_name_decl (MAYDEF_OP (old_ops));
634
635 if (old_base == new_base)
636 {
637 /* if variables are the same, reuse this node. */
638 MOVE_HEAD_AFTER (old_ops, last);
639 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
640 new_i++;
641 }
642 else if (old_base < new_base)
643 {
644 /* if old is less than new, old goes to the free list. */
645 delink_imm_use (MAYDEF_OP_PTR (old_ops));
646 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
647 }
648 else
649 {
650 /* This is a new operand. */
651 add_maydef_op (stmt, act, &last);
652 new_i++;
653 }
654 }
655
656 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
657 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
658 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
659
660 last->next = NULL;
661
662 /* If there is anything in the old list, free it. */
663 if (old_ops)
664 {
665 for (ptr = old_ops; ptr; ptr = ptr->next)
666 delink_imm_use (MAYDEF_OP_PTR (ptr));
667 old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
668 gimple_ssa_operands (cfun)->free_maydefs = old_ops;
669 }
670
671 /* Now set the stmt's operands. */
672 MAYDEF_OPS (stmt) = new_list.next;
673
674 #ifdef ENABLE_CHECKING
675 {
676 unsigned x = 0;
677 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
678 x++;
679
680 gcc_assert (x == VEC_length (tree, build_v_may_defs));
681 }
682 #endif
683 }
684
685 static void
686 finalize_ssa_v_may_defs (tree stmt)
687 {
688 finalize_ssa_v_may_def_ops (stmt);
689 }
690
691
692 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
693
694 static inline void
695 cleanup_v_may_defs (void)
696 {
697 unsigned x, num;
698 num = VEC_length (tree, build_v_may_defs);
699
700 for (x = 0; x < num; x++)
701 {
702 tree t = VEC_index (tree, build_v_may_defs, x);
703 if (TREE_CODE (t) != SSA_NAME)
704 {
705 var_ann_t ann = var_ann (t);
706 ann->in_v_may_def_list = 0;
707 }
708 }
709 VEC_truncate (tree, build_v_may_defs, 0);
710 }
711
712
713 /* Takes elements from build_vuses and turns them into vuse operands of
714 STMT. */
715
716 static inline void
717 finalize_ssa_vuse_ops (tree stmt)
718 {
719 unsigned new_i;
720 struct vuse_optype_d new_list;
721 vuse_optype_p old_ops, ptr, last;
722 tree act;
723 unsigned old_base, new_base;
724
725 new_list.next = NULL;
726 last = &new_list;
727
728 old_ops = VUSE_OPS (stmt);
729
730 new_i = 0;
731 while (old_ops && new_i < VEC_length (tree, build_vuses))
732 {
733 act = VEC_index (tree, build_vuses, new_i);
734 new_base = get_name_decl (act);
735 old_base = get_name_decl (VUSE_OP (old_ops));
736
737 if (old_base == new_base)
738 {
739 /* if variables are the same, reuse this node. */
740 MOVE_HEAD_AFTER (old_ops, last);
741 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
742 new_i++;
743 }
744 else if (old_base < new_base)
745 {
746 /* if old is less than new, old goes to the free list. */
747 delink_imm_use (USE_OP_PTR (old_ops));
748 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
749 }
750 else
751 {
752 /* This is a new operand. */
753 add_vuse_op (stmt, act, &last);
754 new_i++;
755 }
756 }
757
758 /* If there is anything remaining in the build_vuses list, simply emit it. */
759 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
760 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
761
762 last->next = NULL;
763
764 /* If there is anything in the old list, free it. */
765 if (old_ops)
766 {
767 for (ptr = old_ops; ptr; ptr = ptr->next)
768 delink_imm_use (VUSE_OP_PTR (ptr));
769 old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
770 gimple_ssa_operands (cfun)->free_vuses = old_ops;
771 }
772
773 /* Now set the stmt's operands. */
774 VUSE_OPS (stmt) = new_list.next;
775
776 #ifdef ENABLE_CHECKING
777 {
778 unsigned x = 0;
779 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
780 x++;
781
782 gcc_assert (x == VEC_length (tree, build_vuses));
783 }
784 #endif
785 }
786
787 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
788
789 static void
790 finalize_ssa_vuses (tree stmt)
791 {
792 unsigned num, num_v_may_defs;
793 unsigned vuse_index;
794
795 /* Remove superfluous VUSE operands. If the statement already has a
796 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
797 not needed because V_MAY_DEFs imply a VUSE of the variable. For
798 instance, suppose that variable 'a' is aliased:
799
800 # VUSE <a_2>
801 # a_3 = V_MAY_DEF <a_2>
802 a = a + 1;
803
804 The VUSE <a_2> is superfluous because it is implied by the
805 V_MAY_DEF operation. */
806 num = VEC_length (tree, build_vuses);
807 num_v_may_defs = VEC_length (tree, build_v_may_defs);
808
809 if (num > 0 && num_v_may_defs > 0)
810 {
811 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
812 {
813 tree vuse;
814 vuse = VEC_index (tree, build_vuses, vuse_index);
815 if (TREE_CODE (vuse) != SSA_NAME)
816 {
817 var_ann_t ann = var_ann (vuse);
818 ann->in_vuse_list = 0;
819 if (ann->in_v_may_def_list)
820 {
821 VEC_ordered_remove (tree, build_vuses, vuse_index);
822 continue;
823 }
824 }
825 vuse_index++;
826 }
827 }
828 else
829 {
830 /* Clear out the in_list bits. */
831 for (vuse_index = 0;
832 vuse_index < VEC_length (tree, build_vuses);
833 vuse_index++)
834 {
835 tree t = VEC_index (tree, build_vuses, vuse_index);
836 if (TREE_CODE (t) != SSA_NAME)
837 {
838 var_ann_t ann = var_ann (t);
839 ann->in_vuse_list = 0;
840 }
841 }
842 }
843
844 finalize_ssa_vuse_ops (stmt);
845
846 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
847 cleanup_v_may_defs ();
848
849 /* Free the VUSEs build vector. */
850 VEC_truncate (tree, build_vuses, 0);
851
852 }
853
854 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
855 STMT. */
856
857 static inline void
858 finalize_ssa_v_must_def_ops (tree stmt)
859 {
860 unsigned new_i;
861 struct mustdef_optype_d new_list;
862 mustdef_optype_p old_ops, ptr, last;
863 tree act;
864 unsigned old_base, new_base;
865
866 new_list.next = NULL;
867 last = &new_list;
868
869 old_ops = MUSTDEF_OPS (stmt);
870
871 new_i = 0;
872 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
873 {
874 act = VEC_index (tree, build_v_must_defs, new_i);
875 new_base = get_name_decl (act);
876 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
877
878 if (old_base == new_base)
879 {
880 /* If variables are the same, reuse this node. */
881 MOVE_HEAD_AFTER (old_ops, last);
882 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
883 new_i++;
884 }
885 else if (old_base < new_base)
886 {
887 /* If old is less than new, old goes to the free list. */
888 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
889 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
890 }
891 else
892 {
893 /* This is a new operand. */
894 add_mustdef_op (stmt, act, &last);
895 new_i++;
896 }
897 }
898
899 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
900 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
901 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
902
903 last->next = NULL;
904
905 /* If there is anything in the old list, free it. */
906 if (old_ops)
907 {
908 for (ptr = old_ops; ptr; ptr = ptr->next)
909 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
910 old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
911 gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
912 }
913
914 /* Now set the stmt's operands. */
915 MUSTDEF_OPS (stmt) = new_list.next;
916
917 #ifdef ENABLE_CHECKING
918 {
919 unsigned x = 0;
920 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
921 x++;
922
923 gcc_assert (x == VEC_length (tree, build_v_must_defs));
924 }
925 #endif
926 }
927
928 static void
929 finalize_ssa_v_must_defs (tree stmt)
930 {
931 /* In the presence of subvars, there may be more than one V_MUST_DEF
932 per statement (one for each subvar). It is a bit expensive to
933 verify that all must-defs in a statement belong to subvars if
934 there is more than one must-def, so we don't do it. Suffice to
935 say, if you reach here without having subvars, and have num >1,
936 you have hit a bug. */
937 finalize_ssa_v_must_def_ops (stmt);
938 VEC_truncate (tree, build_v_must_defs, 0);
939 }
940
941
942 /* Finalize all the build vectors, fill the new ones into INFO. */
943
944 static inline void
945 finalize_ssa_stmt_operands (tree stmt)
946 {
947 finalize_ssa_defs (stmt);
948 finalize_ssa_uses (stmt);
949 finalize_ssa_v_must_defs (stmt);
950 finalize_ssa_v_may_defs (stmt);
951 finalize_ssa_vuses (stmt);
952 }
953
954
955 /* Start the process of building up operands vectors in INFO. */
956
957 static inline void
958 start_ssa_stmt_operands (void)
959 {
960 gcc_assert (VEC_length (tree, build_defs) == 0);
961 gcc_assert (VEC_length (tree, build_uses) == 0);
962 gcc_assert (VEC_length (tree, build_vuses) == 0);
963 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
964 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
965 }
966
967
968 /* Add DEF_P to the list of pointers to operands. */
969
970 static inline void
971 append_def (tree *def_p)
972 {
973 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
974 }
975
976
977 /* Add USE_P to the list of pointers to operands. */
978
979 static inline void
980 append_use (tree *use_p)
981 {
982 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
983 }
984
985
986 /* Add a new virtual may def for variable VAR to the build array. */
987
988 static inline void
989 append_v_may_def (tree var)
990 {
991 if (TREE_CODE (var) != SSA_NAME)
992 {
993 var_ann_t ann = get_var_ann (var);
994
995 /* Don't allow duplicate entries. */
996 if (ann->in_v_may_def_list)
997 return;
998 ann->in_v_may_def_list = 1;
999 }
1000
1001 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1002 }
1003
1004
1005 /* Add VAR to the list of virtual uses. */
1006
1007 static inline void
1008 append_vuse (tree var)
1009 {
1010 /* Don't allow duplicate entries. */
1011 if (TREE_CODE (var) != SSA_NAME)
1012 {
1013 var_ann_t ann = get_var_ann (var);
1014
1015 if (ann->in_vuse_list || ann->in_v_may_def_list)
1016 return;
1017 ann->in_vuse_list = 1;
1018 }
1019
1020 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1021 }
1022
1023
1024 /* Add VAR to the list of virtual must definitions for INFO. */
1025
1026 static inline void
1027 append_v_must_def (tree var)
1028 {
1029 unsigned i;
1030
1031 /* Don't allow duplicate entries. */
1032 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1033 if (var == VEC_index (tree, build_v_must_defs, i))
1034 return;
1035
1036 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1037 }
1038
1039
1040 /* REF is a tree that contains the entire pointer dereference
1041 expression, if available, or NULL otherwise. ALIAS is the variable
1042 we are asking if REF can access. OFFSET and SIZE come from the
1043 memory access expression that generated this virtual operand. */
1044
1045 static bool
1046 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1047 HOST_WIDE_INT size)
1048 {
1049 bool offsetgtz = offset > 0;
1050 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1051 tree base = ref ? get_base_address (ref) : NULL;
1052
1053 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1054 using a call-clobbered memory tag. By definition, call-clobbered
1055 memory tags can always touch .GLOBAL_VAR. */
1056 if (alias == gimple_global_var (cfun))
1057 return true;
1058
1059 /* If ALIAS is an SFT, it can't be touched if the offset
1060 and size of the access is not overlapping with the SFT offset and
1061 size. This is only true if we are accessing through a pointer
1062 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1063 be accessing through a pointer to some substruct of the
1064 structure, and if we try to prune there, we will have the wrong
1065 offset, and get the wrong answer.
1066 i.e., we can't prune without more work if we have something like
1067
1068 struct gcc_target
1069 {
1070 struct asm_out
1071 {
1072 const char *byte_op;
1073 struct asm_int_op
1074 {
1075 const char *hi;
1076 } aligned_op;
1077 } asm_out;
1078 } targetm;
1079
1080 foo = &targetm.asm_out.aligned_op;
1081 return foo->hi;
1082
1083 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1084 terms of SFT_PARENT_VAR, that is where it is.
1085 However, the access through the foo pointer will be at offset 0. */
1086 if (size != -1
1087 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1088 && base
1089 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1090 && !overlap_subvar (offset, size, alias, NULL))
1091 {
1092 #ifdef ACCESS_DEBUGGING
1093 fprintf (stderr, "Access to ");
1094 print_generic_expr (stderr, ref, 0);
1095 fprintf (stderr, " may not touch ");
1096 print_generic_expr (stderr, alias, 0);
1097 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1098 #endif
1099 return false;
1100 }
1101
1102 /* Without strict aliasing, it is impossible for a component access
1103 through a pointer to touch a random variable, unless that
1104 variable *is* a structure or a pointer.
1105
1106 That is, given p->c, and some random global variable b,
1107 there is no legal way that p->c could be an access to b.
1108
1109 Without strict aliasing on, we consider it legal to do something
1110 like:
1111
1112 struct foos { int l; };
1113 int foo;
1114 static struct foos *getfoo(void);
1115 int main (void)
1116 {
1117 struct foos *f = getfoo();
1118 f->l = 1;
1119 foo = 2;
1120 if (f->l == 1)
1121 abort();
1122 exit(0);
1123 }
1124 static struct foos *getfoo(void)
1125 { return (struct foos *)&foo; }
1126
1127 (taken from 20000623-1.c)
1128
1129 The docs also say/imply that access through union pointers
1130 is legal (but *not* if you take the address of the union member,
1131 i.e. the inverse), such that you can do
1132
1133 typedef union {
1134 int d;
1135 } U;
1136
1137 int rv;
1138 void breakme()
1139 {
1140 U *rv0;
1141 U *pretmp = (U*)&rv;
1142 rv0 = pretmp;
1143 rv0->d = 42;
1144 }
1145 To implement this, we just punt on accesses through union
1146 pointers entirely.
1147 */
1148 else if (ref
1149 && flag_strict_aliasing
1150 && TREE_CODE (ref) != INDIRECT_REF
1151 && !MTAG_P (alias)
1152 && (TREE_CODE (base) != INDIRECT_REF
1153 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1154 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1155 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1156 #if 0
1157 /* FIXME: PR tree-optimization/29680. */
1158 && !var_ann (alias)->is_heapvar
1159 #else
1160 && !POINTER_TYPE_P (TREE_TYPE (alias))
1161 #endif
1162 /* When the struct has may_alias attached to it, we need not to
1163 return true. */
1164 && get_alias_set (base))
1165 {
1166 #ifdef ACCESS_DEBUGGING
1167 fprintf (stderr, "Access to ");
1168 print_generic_expr (stderr, ref, 0);
1169 fprintf (stderr, " may not touch ");
1170 print_generic_expr (stderr, alias, 0);
1171 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1172 #endif
1173 return false;
1174 }
1175
1176 /* If the offset of the access is greater than the size of one of
1177 the possible aliases, it can't be touching that alias, because it
1178 would be past the end of the structure. */
1179 else if (ref
1180 && flag_strict_aliasing
1181 && TREE_CODE (ref) != INDIRECT_REF
1182 && !MTAG_P (alias)
1183 && !POINTER_TYPE_P (TREE_TYPE (alias))
1184 && offsetgtz
1185 && DECL_SIZE (alias)
1186 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1187 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1188 {
1189 #ifdef ACCESS_DEBUGGING
1190 fprintf (stderr, "Access to ");
1191 print_generic_expr (stderr, ref, 0);
1192 fprintf (stderr, " may not touch ");
1193 print_generic_expr (stderr, alias, 0);
1194 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1195 #endif
1196 return false;
1197 }
1198
1199 return true;
1200 }
1201
1202
1203 /* Add VAR to the virtual operands array. FLAGS is as in
1204 get_expr_operands. FULL_REF is a tree that contains the entire
1205 pointer dereference expression, if available, or NULL otherwise.
1206 OFFSET and SIZE come from the memory access expression that
1207 generated this virtual operand. FOR_CLOBBER is true is this is
1208 adding a virtual operand for a call clobber. */
1209
1210 static void
1211 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1212 tree full_ref, HOST_WIDE_INT offset,
1213 HOST_WIDE_INT size, bool for_clobber)
1214 {
1215 VEC(tree,gc) *aliases;
1216 tree sym;
1217 var_ann_t v_ann;
1218
1219 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1220 v_ann = var_ann (sym);
1221
1222 /* Mark statements with volatile operands. Optimizers should back
1223 off from statements having volatile operands. */
1224 if (TREE_THIS_VOLATILE (sym) && s_ann)
1225 s_ann->has_volatile_ops = true;
1226
1227 /* If the variable cannot be modified and this is a V_MAY_DEF change
1228 it into a VUSE. This happens when read-only variables are marked
1229 call-clobbered and/or aliased to writable variables. So we only
1230 check that this only happens on non-specific stores.
1231
1232 Note that if this is a specific store, i.e. associated with a
1233 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1234 into validation problems.
1235
1236 This can happen when programs cast away const, leaving us with a
1237 store to read-only memory. If the statement is actually executed
1238 at runtime, then the program is ill formed. If the statement is
1239 not executed then all is well. At the very least, we cannot ICE. */
1240 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1241 flags &= ~(opf_is_def | opf_kill_def);
1242
1243 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1244 virtual operands, unless the caller has specifically requested
1245 not to add virtual operands (used when adding operands inside an
1246 ADDR_EXPR expression). */
1247 if (flags & opf_no_vops)
1248 return;
1249
1250 aliases = v_ann->may_aliases;
1251 if (aliases == NULL)
1252 {
1253 /* The variable is not aliased or it is an alias tag. */
1254 if (flags & opf_is_def)
1255 {
1256 if (flags & opf_kill_def)
1257 {
1258 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1259 variable definitions. */
1260 gcc_assert (!MTAG_P (var)
1261 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1262 append_v_must_def (var);
1263 }
1264 else
1265 {
1266 /* Add a V_MAY_DEF for call-clobbered variables and
1267 memory tags. */
1268 append_v_may_def (var);
1269 }
1270 }
1271 else
1272 append_vuse (var);
1273 }
1274 else
1275 {
1276 unsigned i;
1277 tree al;
1278
1279 /* The variable is aliased. Add its aliases to the virtual
1280 operands. */
1281 gcc_assert (VEC_length (tree, aliases) != 0);
1282
1283 if (flags & opf_is_def)
1284 {
1285
1286 bool none_added = true;
1287
1288 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1289 {
1290 if (!access_can_touch_variable (full_ref, al, offset, size))
1291 continue;
1292
1293 none_added = false;
1294 append_v_may_def (al);
1295 }
1296
1297 /* If the variable is also an alias tag, add a virtual
1298 operand for it, otherwise we will miss representing
1299 references to the members of the variable's alias set.
1300 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1301
1302 It is also necessary to add bare defs on clobbers for
1303 SMT's, so that bare SMT uses caused by pruning all the
1304 aliases will link up properly with calls. In order to
1305 keep the number of these bare defs we add down to the
1306 minimum necessary, we keep track of which SMT's were used
1307 alone in statement vdefs or VUSEs. */
1308 if (v_ann->is_aliased
1309 || none_added
1310 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1311 && for_clobber
1312 && SMT_USED_ALONE (var)))
1313 {
1314 /* Every bare SMT def we add should have SMT_USED_ALONE
1315 set on it, or else we will get the wrong answer on
1316 clobbers. */
1317 if (none_added
1318 && !updating_used_alone && gimple_aliases_computed_p (cfun)
1319 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1320 gcc_assert (SMT_USED_ALONE (var));
1321
1322 append_v_may_def (var);
1323 }
1324 }
1325 else
1326 {
1327 bool none_added = true;
1328 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1329 {
1330 if (!access_can_touch_variable (full_ref, al, offset, size))
1331 continue;
1332 none_added = false;
1333 append_vuse (al);
1334 }
1335
1336 /* Similarly, append a virtual uses for VAR itself, when
1337 it is an alias tag. */
1338 if (v_ann->is_aliased || none_added)
1339 append_vuse (var);
1340 }
1341 }
1342 }
1343
1344
1345 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1346 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1347 the statement's real operands, otherwise it is added to virtual
1348 operands. */
1349
1350 static void
1351 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1352 {
1353 bool is_real_op;
1354 tree var, sym;
1355 var_ann_t v_ann;
1356
1357 var = *var_p;
1358 gcc_assert (SSA_VAR_P (var));
1359
1360 is_real_op = is_gimple_reg (var);
1361
1362 /* If this is a real operand, the operand is either an SSA name or a
1363 decl. Virtual operands may only be decls. */
1364 gcc_assert (is_real_op || DECL_P (var));
1365
1366 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1367 v_ann = var_ann (sym);
1368
1369 /* Mark statements with volatile operands. Optimizers should back
1370 off from statements having volatile operands. */
1371 if (TREE_THIS_VOLATILE (sym) && s_ann)
1372 s_ann->has_volatile_ops = true;
1373
1374 if (is_real_op)
1375 {
1376 /* The variable is a GIMPLE register. Add it to real operands. */
1377 if (flags & opf_is_def)
1378 append_def (var_p);
1379 else
1380 append_use (var_p);
1381 }
1382 else
1383 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1384 }
1385
1386
1387 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1388 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1389
1390 STMT is the statement being processed, EXPR is the INDIRECT_REF
1391 that got us here.
1392
1393 FLAGS is as in get_expr_operands.
1394
1395 FULL_REF contains the full pointer dereference expression, if we
1396 have it, or NULL otherwise.
1397
1398 OFFSET and SIZE are the location of the access inside the
1399 dereferenced pointer, if known.
1400
1401 RECURSE_ON_BASE should be set to true if we want to continue
1402 calling get_expr_operands on the base pointer, and false if
1403 something else will do it for us. */
1404
1405 static void
1406 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1407 tree full_ref,
1408 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1409 bool recurse_on_base)
1410 {
1411 tree *pptr = &TREE_OPERAND (expr, 0);
1412 tree ptr = *pptr;
1413 stmt_ann_t s_ann = stmt_ann (stmt);
1414
1415 /* Stores into INDIRECT_REF operands are never killing definitions. */
1416 flags &= ~opf_kill_def;
1417
1418 if (SSA_VAR_P (ptr))
1419 {
1420 struct ptr_info_def *pi = NULL;
1421
1422 /* If PTR has flow-sensitive points-to information, use it. */
1423 if (TREE_CODE (ptr) == SSA_NAME
1424 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1425 && pi->name_mem_tag)
1426 {
1427 /* PTR has its own memory tag. Use it. */
1428 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1429 full_ref, offset, size, false);
1430 }
1431 else
1432 {
1433 /* If PTR is not an SSA_NAME or it doesn't have a name
1434 tag, use its symbol memory tag. */
1435 var_ann_t v_ann;
1436
1437 /* If we are emitting debugging dumps, display a warning if
1438 PTR is an SSA_NAME with no flow-sensitive alias
1439 information. That means that we may need to compute
1440 aliasing again. */
1441 if (dump_file
1442 && TREE_CODE (ptr) == SSA_NAME
1443 && pi == NULL)
1444 {
1445 fprintf (dump_file,
1446 "NOTE: no flow-sensitive alias info for ");
1447 print_generic_expr (dump_file, ptr, dump_flags);
1448 fprintf (dump_file, " in ");
1449 print_generic_stmt (dump_file, stmt, dump_flags);
1450 }
1451
1452 if (TREE_CODE (ptr) == SSA_NAME)
1453 ptr = SSA_NAME_VAR (ptr);
1454 v_ann = var_ann (ptr);
1455
1456 if (v_ann->symbol_mem_tag)
1457 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1458 full_ref, offset, size, false);
1459 }
1460 }
1461 else if (TREE_CODE (ptr) == INTEGER_CST)
1462 {
1463 /* If a constant is used as a pointer, we can't generate a real
1464 operand for it but we mark the statement volatile to prevent
1465 optimizations from messing things up. */
1466 if (s_ann)
1467 s_ann->has_volatile_ops = true;
1468 return;
1469 }
1470 else
1471 {
1472 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1473 gcc_unreachable ();
1474 }
1475
1476 /* If requested, add a USE operand for the base pointer. */
1477 if (recurse_on_base)
1478 get_expr_operands (stmt, pptr, opf_none);
1479 }
1480
1481
1482 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1483
1484 static void
1485 get_tmr_operands (tree stmt, tree expr, int flags)
1486 {
1487 tree tag = TMR_TAG (expr), ref;
1488 HOST_WIDE_INT offset, size, maxsize;
1489 subvar_t svars, sv;
1490 stmt_ann_t s_ann = stmt_ann (stmt);
1491
1492 /* First record the real operands. */
1493 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1494 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1495
1496 /* MEM_REFs should never be killing. */
1497 flags &= ~opf_kill_def;
1498
1499 if (TMR_SYMBOL (expr))
1500 {
1501 stmt_ann_t ann = stmt_ann (stmt);
1502 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1503 }
1504
1505 if (!tag)
1506 {
1507 /* Something weird, so ensure that we will be careful. */
1508 stmt_ann (stmt)->has_volatile_ops = true;
1509 return;
1510 }
1511
1512 if (DECL_P (tag))
1513 {
1514 get_expr_operands (stmt, &tag, flags);
1515 return;
1516 }
1517
1518 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1519 gcc_assert (ref != NULL_TREE);
1520 svars = get_subvars_for_var (ref);
1521 for (sv = svars; sv; sv = sv->next)
1522 {
1523 bool exact;
1524 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1525 {
1526 int subvar_flags = flags;
1527 if (!exact || size != maxsize)
1528 subvar_flags &= ~opf_kill_def;
1529 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1530 }
1531 }
1532 }
1533
1534
1535 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1536 clobbered variables in the function. */
1537
1538 static void
1539 add_call_clobber_ops (tree stmt, tree callee)
1540 {
1541 unsigned u;
1542 bitmap_iterator bi;
1543 stmt_ann_t s_ann = stmt_ann (stmt);
1544 bitmap not_read_b, not_written_b;
1545
1546 /* Functions that are not const, pure or never return may clobber
1547 call-clobbered variables. */
1548 if (s_ann)
1549 s_ann->makes_clobbering_call = true;
1550
1551 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1552 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1553 if (gimple_global_var (cfun))
1554 {
1555 tree var = gimple_global_var (cfun);
1556 add_stmt_operand (&var, s_ann, opf_is_def);
1557 return;
1558 }
1559
1560 /* Get info for local and module level statics. There is a bit
1561 set for each static if the call being processed does not read
1562 or write that variable. */
1563 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1564 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1565 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1566 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1567 {
1568 tree var = referenced_var_lookup (u);
1569 unsigned int escape_mask = var_ann (var)->escape_mask;
1570 tree real_var = var;
1571 bool not_read;
1572 bool not_written;
1573
1574 /* Not read and not written are computed on regular vars, not
1575 subvars, so look at the parent var if this is an SFT. */
1576 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1577 real_var = SFT_PARENT_VAR (var);
1578
1579 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1580 DECL_UID (real_var)) : false;
1581 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1582 DECL_UID (real_var)) : false;
1583 gcc_assert (!unmodifiable_var_p (var));
1584
1585 clobber_stats.clobbered_vars++;
1586
1587 /* See if this variable is really clobbered by this function. */
1588
1589 /* Trivial case: Things escaping only to pure/const are not
1590 clobbered by non-pure-const, and only read by pure/const. */
1591 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1592 {
1593 tree call = get_call_expr_in (stmt);
1594 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1595 {
1596 add_stmt_operand (&var, s_ann, opf_none);
1597 clobber_stats.unescapable_clobbers_avoided++;
1598 continue;
1599 }
1600 else
1601 {
1602 clobber_stats.unescapable_clobbers_avoided++;
1603 continue;
1604 }
1605 }
1606
1607 if (not_written)
1608 {
1609 clobber_stats.static_write_clobbers_avoided++;
1610 if (!not_read)
1611 add_stmt_operand (&var, s_ann, opf_none);
1612 else
1613 clobber_stats.static_read_clobbers_avoided++;
1614 }
1615 else
1616 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1617 }
1618 }
1619
1620
1621 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1622 function. */
1623
1624 static void
1625 add_call_read_ops (tree stmt, tree callee)
1626 {
1627 unsigned u;
1628 bitmap_iterator bi;
1629 stmt_ann_t s_ann = stmt_ann (stmt);
1630 bitmap not_read_b;
1631
1632 /* if the function is not pure, it may reference memory. Add
1633 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1634 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1635 if (gimple_global_var (cfun))
1636 {
1637 tree var = gimple_global_var (cfun);
1638 add_stmt_operand (&var, s_ann, opf_none);
1639 return;
1640 }
1641
1642 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1643
1644 /* Add a VUSE for each call-clobbered variable. */
1645 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1646 {
1647 tree var = referenced_var (u);
1648 tree real_var = var;
1649 bool not_read;
1650
1651 clobber_stats.readonly_clobbers++;
1652
1653 /* Not read and not written are computed on regular vars, not
1654 subvars, so look at the parent var if this is an SFT. */
1655
1656 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1657 real_var = SFT_PARENT_VAR (var);
1658
1659 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1660 : false;
1661
1662 if (not_read)
1663 {
1664 clobber_stats.static_readonly_clobbers_avoided++;
1665 continue;
1666 }
1667
1668 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1669 }
1670 }
1671
1672
1673 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1674
1675 static void
1676 get_call_expr_operands (tree stmt, tree expr)
1677 {
1678 tree op;
1679 int call_flags = call_expr_flags (expr);
1680
1681 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1682 operands for all the symbols that have been found to be
1683 call-clobbered.
1684
1685 Note that if aliases have not been computed, the global effects
1686 of calls will not be included in the SSA web. This is fine
1687 because no optimizer should run before aliases have been
1688 computed. By not bothering with virtual operands for CALL_EXPRs
1689 we avoid adding superfluous virtual operands, which can be a
1690 significant compile time sink (See PR 15855). */
1691 if (gimple_aliases_computed_p (cfun)
1692 && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
1693 && !(call_flags & ECF_NOVOPS))
1694 {
1695 /* A 'pure' or a 'const' function never call-clobbers anything.
1696 A 'noreturn' function might, but since we don't return anyway
1697 there is no point in recording that. */
1698 if (TREE_SIDE_EFFECTS (expr)
1699 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1700 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1701 else if (!(call_flags & ECF_CONST))
1702 add_call_read_ops (stmt, get_callee_fndecl (expr));
1703 }
1704
1705 /* Find uses in the called function. */
1706 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1707
1708 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1709 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1710
1711 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1712 }
1713
1714
1715 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1716
1717 static void
1718 get_asm_expr_operands (tree stmt)
1719 {
1720 stmt_ann_t s_ann = stmt_ann (stmt);
1721 int noutputs = list_length (ASM_OUTPUTS (stmt));
1722 const char **oconstraints
1723 = (const char **) alloca ((noutputs) * sizeof (const char *));
1724 int i;
1725 tree link;
1726 const char *constraint;
1727 bool allows_mem, allows_reg, is_inout;
1728
1729 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1730 {
1731 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1732 oconstraints[i] = constraint;
1733 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1734 &allows_reg, &is_inout);
1735
1736 /* This should have been split in gimplify_asm_expr. */
1737 gcc_assert (!allows_reg || !is_inout);
1738
1739 /* Memory operands are addressable. Note that STMT needs the
1740 address of this operand. */
1741 if (!allows_reg && allows_mem)
1742 {
1743 tree t = get_base_address (TREE_VALUE (link));
1744 if (t && DECL_P (t) && s_ann)
1745 add_to_addressable_set (t, &s_ann->addresses_taken);
1746 }
1747
1748 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1749 }
1750
1751 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1752 {
1753 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1754 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1755 oconstraints, &allows_mem, &allows_reg);
1756
1757 /* Memory operands are addressable. Note that STMT needs the
1758 address of this operand. */
1759 if (!allows_reg && allows_mem)
1760 {
1761 tree t = get_base_address (TREE_VALUE (link));
1762 if (t && DECL_P (t) && s_ann)
1763 add_to_addressable_set (t, &s_ann->addresses_taken);
1764 }
1765
1766 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1767 }
1768
1769
1770 /* Clobber memory for asm ("" : : : "memory"); */
1771 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1772 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1773 {
1774 unsigned i;
1775 bitmap_iterator bi;
1776
1777 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1778 decided to group them). */
1779 if (gimple_global_var (cfun))
1780 {
1781 tree var = gimple_global_var (cfun);
1782 add_stmt_operand (&var, s_ann, opf_is_def);
1783 }
1784 else
1785 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1786 {
1787 tree var = referenced_var (i);
1788 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1789 }
1790
1791 /* Now clobber all addressables. */
1792 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1793 {
1794 tree var = referenced_var (i);
1795
1796 /* Subvars are explicitly represented in this list, so
1797 we don't need the original to be added to the clobber
1798 ops, but the original *will* be in this list because
1799 we keep the addressability of the original
1800 variable up-to-date so we don't screw up the rest of
1801 the backend. */
1802 if (var_can_have_subvars (var)
1803 && get_subvars_for_var (var) != NULL)
1804 continue;
1805
1806 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1807 }
1808
1809 break;
1810 }
1811 }
1812
1813
1814 /* Scan operands for the assignment expression EXPR in statement STMT. */
1815
1816 static void
1817 get_modify_expr_operands (tree stmt, tree expr)
1818 {
1819 /* First get operands from the RHS. */
1820 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1821
1822 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1823 registers. If the LHS is a store to memory, we will either need
1824 a preserving definition (V_MAY_DEF) or a killing definition
1825 (V_MUST_DEF).
1826
1827 Preserving definitions are those that modify a part of an
1828 aggregate object for which no subvars have been computed (or the
1829 reference does not correspond exactly to one of them). Stores
1830 through a pointer are also represented with V_MAY_DEF operators.
1831
1832 The determination of whether to use a preserving or a killing
1833 definition is done while scanning the LHS of the assignment. By
1834 default, assume that we will emit a V_MUST_DEF. */
1835 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1836 }
1837
1838
1839 /* Recursively scan the expression pointed to by EXPR_P in statement
1840 STMT. FLAGS is one of the OPF_* constants modifying how to
1841 interpret the operands found. */
1842
1843 static void
1844 get_expr_operands (tree stmt, tree *expr_p, int flags)
1845 {
1846 enum tree_code code;
1847 enum tree_code_class class;
1848 tree expr = *expr_p;
1849 stmt_ann_t s_ann = stmt_ann (stmt);
1850
1851 if (expr == NULL)
1852 return;
1853
1854 code = TREE_CODE (expr);
1855 class = TREE_CODE_CLASS (code);
1856
1857 switch (code)
1858 {
1859 case ADDR_EXPR:
1860 /* Taking the address of a variable does not represent a
1861 reference to it, but the fact that the statement takes its
1862 address will be of interest to some passes (e.g. alias
1863 resolution). */
1864 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1865
1866 /* If the address is invariant, there may be no interesting
1867 variable references inside. */
1868 if (is_gimple_min_invariant (expr))
1869 return;
1870
1871 /* Otherwise, there may be variables referenced inside but there
1872 should be no VUSEs created, since the referenced objects are
1873 not really accessed. The only operands that we should find
1874 here are ARRAY_REF indices which will always be real operands
1875 (GIMPLE does not allow non-registers as array indices). */
1876 flags |= opf_no_vops;
1877 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1878 return;
1879
1880 case SSA_NAME:
1881 case STRUCT_FIELD_TAG:
1882 case SYMBOL_MEMORY_TAG:
1883 case NAME_MEMORY_TAG:
1884 add_stmt_operand (expr_p, s_ann, flags);
1885 return;
1886
1887 case VAR_DECL:
1888 case PARM_DECL:
1889 case RESULT_DECL:
1890 {
1891 subvar_t svars;
1892
1893 /* Add the subvars for a variable, if it has subvars, to DEFS
1894 or USES. Otherwise, add the variable itself. Whether it
1895 goes to USES or DEFS depends on the operand flags. */
1896 if (var_can_have_subvars (expr)
1897 && (svars = get_subvars_for_var (expr)))
1898 {
1899 subvar_t sv;
1900 for (sv = svars; sv; sv = sv->next)
1901 add_stmt_operand (&sv->var, s_ann, flags);
1902 }
1903 else
1904 add_stmt_operand (expr_p, s_ann, flags);
1905
1906 return;
1907 }
1908
1909 case MISALIGNED_INDIRECT_REF:
1910 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1911 /* fall through */
1912
1913 case ALIGN_INDIRECT_REF:
1914 case INDIRECT_REF:
1915 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1916 return;
1917
1918 case TARGET_MEM_REF:
1919 get_tmr_operands (stmt, expr, flags);
1920 return;
1921
1922 case ARRAY_REF:
1923 case ARRAY_RANGE_REF:
1924 case COMPONENT_REF:
1925 case REALPART_EXPR:
1926 case IMAGPART_EXPR:
1927 {
1928 tree ref;
1929 HOST_WIDE_INT offset, size, maxsize;
1930 bool none = true;
1931
1932 /* This component reference becomes an access to all of the
1933 subvariables it can touch, if we can determine that, but
1934 *NOT* the real one. If we can't determine which fields we
1935 could touch, the recursion will eventually get to a
1936 variable and add *all* of its subvars, or whatever is the
1937 minimum correct subset. */
1938 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1939 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1940 {
1941 subvar_t sv;
1942 subvar_t svars = get_subvars_for_var (ref);
1943
1944 for (sv = svars; sv; sv = sv->next)
1945 {
1946 bool exact;
1947
1948 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1949 {
1950 int subvar_flags = flags;
1951 none = false;
1952 if (!exact || size != maxsize)
1953 subvar_flags &= ~opf_kill_def;
1954 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1955 }
1956 }
1957
1958 if (!none)
1959 flags |= opf_no_vops;
1960 }
1961 else if (TREE_CODE (ref) == INDIRECT_REF)
1962 {
1963 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1964 maxsize, false);
1965 flags |= opf_no_vops;
1966 }
1967
1968 /* Even if we found subvars above we need to ensure to see
1969 immediate uses for d in s.a[d]. In case of s.a having
1970 a subvar or we would miss it otherwise. */
1971 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1972 flags & ~opf_kill_def);
1973
1974 if (code == COMPONENT_REF)
1975 {
1976 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1977 s_ann->has_volatile_ops = true;
1978 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1979 }
1980 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1981 {
1982 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1983 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1984 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1985 }
1986
1987 return;
1988 }
1989
1990 case WITH_SIZE_EXPR:
1991 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1992 and an rvalue reference to its second argument. */
1993 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1994 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1995 return;
1996
1997 case CALL_EXPR:
1998 get_call_expr_operands (stmt, expr);
1999 return;
2000
2001 case COND_EXPR:
2002 case VEC_COND_EXPR:
2003 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2004 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2005 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2006 return;
2007
2008 case MODIFY_EXPR:
2009 get_modify_expr_operands (stmt, expr);
2010 return;
2011
2012 case CONSTRUCTOR:
2013 {
2014 /* General aggregate CONSTRUCTORs have been decomposed, but they
2015 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2016 constructor_elt *ce;
2017 unsigned HOST_WIDE_INT idx;
2018
2019 for (idx = 0;
2020 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2021 idx++)
2022 get_expr_operands (stmt, &ce->value, opf_none);
2023
2024 return;
2025 }
2026
2027 case BIT_FIELD_REF:
2028 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2029 flags &= ~opf_kill_def;
2030
2031 /* Fallthru */
2032
2033 case TRUTH_NOT_EXPR:
2034 case VIEW_CONVERT_EXPR:
2035 do_unary:
2036 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2037 return;
2038
2039 case TRUTH_AND_EXPR:
2040 case TRUTH_OR_EXPR:
2041 case TRUTH_XOR_EXPR:
2042 case COMPOUND_EXPR:
2043 case OBJ_TYPE_REF:
2044 case ASSERT_EXPR:
2045 do_binary:
2046 {
2047 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2048 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2049 return;
2050 }
2051
2052 case DOT_PROD_EXPR:
2053 case REALIGN_LOAD_EXPR:
2054 {
2055 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2056 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2057 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2058 return;
2059 }
2060
2061 case BLOCK:
2062 case FUNCTION_DECL:
2063 case EXC_PTR_EXPR:
2064 case FILTER_EXPR:
2065 case LABEL_DECL:
2066 case CONST_DECL:
2067 case OMP_PARALLEL:
2068 case OMP_SECTIONS:
2069 case OMP_FOR:
2070 case OMP_SINGLE:
2071 case OMP_MASTER:
2072 case OMP_ORDERED:
2073 case OMP_CRITICAL:
2074 case OMP_RETURN:
2075 case OMP_CONTINUE:
2076 /* Expressions that make no memory references. */
2077 return;
2078
2079 default:
2080 if (class == tcc_unary)
2081 goto do_unary;
2082 if (class == tcc_binary || class == tcc_comparison)
2083 goto do_binary;
2084 if (class == tcc_constant || class == tcc_type)
2085 return;
2086 }
2087
2088 /* If we get here, something has gone wrong. */
2089 #ifdef ENABLE_CHECKING
2090 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2091 debug_tree (expr);
2092 fputs ("\n", stderr);
2093 #endif
2094 gcc_unreachable ();
2095 }
2096
2097
2098 /* Parse STMT looking for operands. When finished, the various
2099 build_* operand vectors will have potential operands in them. */
2100
2101 static void
2102 parse_ssa_operands (tree stmt)
2103 {
2104 enum tree_code code;
2105
2106 code = TREE_CODE (stmt);
2107 switch (code)
2108 {
2109 case MODIFY_EXPR:
2110 get_modify_expr_operands (stmt, stmt);
2111 break;
2112
2113 case COND_EXPR:
2114 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2115 break;
2116
2117 case SWITCH_EXPR:
2118 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2119 break;
2120
2121 case ASM_EXPR:
2122 get_asm_expr_operands (stmt);
2123 break;
2124
2125 case RETURN_EXPR:
2126 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2127 break;
2128
2129 case GOTO_EXPR:
2130 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2131 break;
2132
2133 case LABEL_EXPR:
2134 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2135 break;
2136
2137 case BIND_EXPR:
2138 case CASE_LABEL_EXPR:
2139 case TRY_CATCH_EXPR:
2140 case TRY_FINALLY_EXPR:
2141 case EH_FILTER_EXPR:
2142 case CATCH_EXPR:
2143 case RESX_EXPR:
2144 /* These nodes contain no variable references. */
2145 break;
2146
2147 default:
2148 /* Notice that if get_expr_operands tries to use &STMT as the
2149 operand pointer (which may only happen for USE operands), we
2150 will fail in add_stmt_operand. This default will handle
2151 statements like empty statements, or CALL_EXPRs that may
2152 appear on the RHS of a statement or as statements themselves. */
2153 get_expr_operands (stmt, &stmt, opf_none);
2154 break;
2155 }
2156 }
2157
2158
2159 /* Create an operands cache for STMT. */
2160
2161 static void
2162 build_ssa_operands (tree stmt)
2163 {
2164 stmt_ann_t ann = get_stmt_ann (stmt);
2165
2166 /* Initially assume that the statement has no volatile operands. */
2167 if (ann)
2168 ann->has_volatile_ops = false;
2169
2170 start_ssa_stmt_operands ();
2171
2172 parse_ssa_operands (stmt);
2173 operand_build_sort_virtual (build_vuses);
2174 operand_build_sort_virtual (build_v_may_defs);
2175 operand_build_sort_virtual (build_v_must_defs);
2176
2177 finalize_ssa_stmt_operands (stmt);
2178 }
2179
2180
2181 /* Free any operands vectors in OPS. */
2182
2183 void
2184 free_ssa_operands (stmt_operands_p ops)
2185 {
2186 ops->def_ops = NULL;
2187 ops->use_ops = NULL;
2188 ops->maydef_ops = NULL;
2189 ops->mustdef_ops = NULL;
2190 ops->vuse_ops = NULL;
2191 }
2192
2193
2194 /* Get the operands of statement STMT. */
2195
2196 void
2197 update_stmt_operands (tree stmt)
2198 {
2199 stmt_ann_t ann = get_stmt_ann (stmt);
2200
2201 /* If update_stmt_operands is called before SSA is initialized, do
2202 nothing. */
2203 if (!ssa_operands_active ())
2204 return;
2205
2206 /* The optimizers cannot handle statements that are nothing but a
2207 _DECL. This indicates a bug in the gimplifier. */
2208 gcc_assert (!SSA_VAR_P (stmt));
2209
2210 gcc_assert (ann->modified);
2211
2212 timevar_push (TV_TREE_OPS);
2213
2214 build_ssa_operands (stmt);
2215
2216 /* Clear the modified bit for STMT. */
2217 ann->modified = 0;
2218
2219 timevar_pop (TV_TREE_OPS);
2220 }
2221
2222
2223 /* Copies virtual operands from SRC to DST. */
2224
2225 void
2226 copy_virtual_operands (tree dest, tree src)
2227 {
2228 tree t;
2229 ssa_op_iter iter, old_iter;
2230 use_operand_p use_p, u2;
2231 def_operand_p def_p, d2;
2232
2233 build_ssa_operands (dest);
2234
2235 /* Copy all the virtual fields. */
2236 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2237 append_vuse (t);
2238 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2239 append_v_may_def (t);
2240 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2241 append_v_must_def (t);
2242
2243 if (VEC_length (tree, build_vuses) == 0
2244 && VEC_length (tree, build_v_may_defs) == 0
2245 && VEC_length (tree, build_v_must_defs) == 0)
2246 return;
2247
2248 /* Now commit the virtual operands to this stmt. */
2249 finalize_ssa_v_must_defs (dest);
2250 finalize_ssa_v_may_defs (dest);
2251 finalize_ssa_vuses (dest);
2252
2253 /* Finally, set the field to the same values as then originals. */
2254 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2255 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2256 {
2257 gcc_assert (!op_iter_done (&old_iter));
2258 SET_USE (use_p, t);
2259 t = op_iter_next_tree (&old_iter);
2260 }
2261 gcc_assert (op_iter_done (&old_iter));
2262
2263 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2264 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2265 {
2266 gcc_assert (!op_iter_done (&old_iter));
2267 SET_USE (use_p, USE_FROM_PTR (u2));
2268 SET_DEF (def_p, DEF_FROM_PTR (d2));
2269 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2270 }
2271 gcc_assert (op_iter_done (&old_iter));
2272
2273 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2274 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2275 {
2276 gcc_assert (!op_iter_done (&old_iter));
2277 SET_USE (use_p, USE_FROM_PTR (u2));
2278 SET_DEF (def_p, DEF_FROM_PTR (d2));
2279 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2280 }
2281 gcc_assert (op_iter_done (&old_iter));
2282
2283 }
2284
2285
2286 /* Specifically for use in DOM's expression analysis. Given a store, we
2287 create an artificial stmt which looks like a load from the store, this can
2288 be used to eliminate redundant loads. OLD_OPS are the operands from the
2289 store stmt, and NEW_STMT is the new load which represents a load of the
2290 values stored. */
2291
2292 void
2293 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2294 {
2295 stmt_ann_t ann;
2296 tree op;
2297 ssa_op_iter iter;
2298 use_operand_p use_p;
2299 unsigned x;
2300
2301 ann = get_stmt_ann (new_stmt);
2302
2303 /* Process the stmt looking for operands. */
2304 start_ssa_stmt_operands ();
2305 parse_ssa_operands (new_stmt);
2306
2307 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2308 {
2309 tree t = VEC_index (tree, build_vuses, x);
2310 if (TREE_CODE (t) != SSA_NAME)
2311 {
2312 var_ann_t ann = var_ann (t);
2313 ann->in_vuse_list = 0;
2314 }
2315 }
2316
2317 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2318 {
2319 tree t = VEC_index (tree, build_v_may_defs, x);
2320 if (TREE_CODE (t) != SSA_NAME)
2321 {
2322 var_ann_t ann = var_ann (t);
2323 ann->in_v_may_def_list = 0;
2324 }
2325 }
2326
2327 /* Remove any virtual operands that were found. */
2328 VEC_truncate (tree, build_v_may_defs, 0);
2329 VEC_truncate (tree, build_v_must_defs, 0);
2330 VEC_truncate (tree, build_vuses, 0);
2331
2332 /* For each VDEF on the original statement, we want to create a
2333 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2334 statement. */
2335 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2336 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2337 append_vuse (op);
2338
2339 /* Now build the operands for this new stmt. */
2340 finalize_ssa_stmt_operands (new_stmt);
2341
2342 /* All uses in this fake stmt must not be in the immediate use lists. */
2343 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2344 delink_imm_use (use_p);
2345 }
2346
2347
2348 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2349 to test the validity of the swap operation. */
2350
2351 void
2352 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2353 {
2354 tree op0, op1;
2355 op0 = *exp0;
2356 op1 = *exp1;
2357
2358 /* If the operand cache is active, attempt to preserve the relative
2359 positions of these two operands in their respective immediate use
2360 lists. */
2361 if (ssa_operands_active () && op0 != op1)
2362 {
2363 use_optype_p use0, use1, ptr;
2364 use0 = use1 = NULL;
2365
2366 /* Find the 2 operands in the cache, if they are there. */
2367 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2368 if (USE_OP_PTR (ptr)->use == exp0)
2369 {
2370 use0 = ptr;
2371 break;
2372 }
2373
2374 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2375 if (USE_OP_PTR (ptr)->use == exp1)
2376 {
2377 use1 = ptr;
2378 break;
2379 }
2380
2381 /* If both uses don't have operand entries, there isn't much we can do
2382 at this point. Presumably we don't need to worry about it. */
2383 if (use0 && use1)
2384 {
2385 tree *tmp = USE_OP_PTR (use1)->use;
2386 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2387 USE_OP_PTR (use0)->use = tmp;
2388 }
2389 }
2390
2391 /* Now swap the data. */
2392 *exp0 = op1;
2393 *exp1 = op0;
2394 }
2395
2396
2397 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2398 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2399 a single variable whose address has been taken or any other valid
2400 GIMPLE memory reference (structure reference, array, etc). If the
2401 base address of REF is a decl that has sub-variables, also add all
2402 of its sub-variables. */
2403
2404 void
2405 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2406 {
2407 tree var;
2408 subvar_t svars;
2409
2410 gcc_assert (addresses_taken);
2411
2412 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2413 as the only thing we take the address of. If VAR is a structure,
2414 taking the address of a field means that the whole structure may
2415 be referenced using pointer arithmetic. See PR 21407 and the
2416 ensuing mailing list discussion. */
2417 var = get_base_address (ref);
2418 if (var && SSA_VAR_P (var))
2419 {
2420 if (*addresses_taken == NULL)
2421 *addresses_taken = BITMAP_GGC_ALLOC ();
2422
2423 if (var_can_have_subvars (var)
2424 && (svars = get_subvars_for_var (var)))
2425 {
2426 subvar_t sv;
2427 for (sv = svars; sv; sv = sv->next)
2428 {
2429 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2430 TREE_ADDRESSABLE (sv->var) = 1;
2431 }
2432 }
2433 else
2434 {
2435 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2436 TREE_ADDRESSABLE (var) = 1;
2437 }
2438 }
2439 }
2440
2441
2442 /* Scan the immediate_use list for VAR making sure its linked properly.
2443 Return TRUE if there is a problem and emit an error message to F. */
2444
2445 bool
2446 verify_imm_links (FILE *f, tree var)
2447 {
2448 use_operand_p ptr, prev, list;
2449 int count;
2450
2451 gcc_assert (TREE_CODE (var) == SSA_NAME);
2452
2453 list = &(SSA_NAME_IMM_USE_NODE (var));
2454 gcc_assert (list->use == NULL);
2455
2456 if (list->prev == NULL)
2457 {
2458 gcc_assert (list->next == NULL);
2459 return false;
2460 }
2461
2462 prev = list;
2463 count = 0;
2464 for (ptr = list->next; ptr != list; )
2465 {
2466 if (prev != ptr->prev)
2467 goto error;
2468
2469 if (ptr->use == NULL)
2470 goto error; /* 2 roots, or SAFE guard node. */
2471 else if (*(ptr->use) != var)
2472 goto error;
2473
2474 prev = ptr;
2475 ptr = ptr->next;
2476
2477 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2478 problem. */
2479 if (count++ > 50000000)
2480 goto error;
2481 }
2482
2483 /* Verify list in the other direction. */
2484 prev = list;
2485 for (ptr = list->prev; ptr != list; )
2486 {
2487 if (prev != ptr->next)
2488 goto error;
2489 prev = ptr;
2490 ptr = ptr->prev;
2491 if (count-- < 0)
2492 goto error;
2493 }
2494
2495 if (count != 0)
2496 goto error;
2497
2498 return false;
2499
2500 error:
2501 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2502 {
2503 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2504 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2505 }
2506 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2507 (void *)ptr->use);
2508 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2509 fprintf(f, "\n");
2510 return true;
2511 }
2512
2513
2514 /* Dump all the immediate uses to FILE. */
2515
2516 void
2517 dump_immediate_uses_for (FILE *file, tree var)
2518 {
2519 imm_use_iterator iter;
2520 use_operand_p use_p;
2521
2522 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2523
2524 print_generic_expr (file, var, TDF_SLIM);
2525 fprintf (file, " : -->");
2526 if (has_zero_uses (var))
2527 fprintf (file, " no uses.\n");
2528 else
2529 if (has_single_use (var))
2530 fprintf (file, " single use.\n");
2531 else
2532 fprintf (file, "%d uses.\n", num_imm_uses (var));
2533
2534 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2535 {
2536 if (use_p->stmt == NULL && use_p->use == NULL)
2537 fprintf (file, "***end of stmt iterator marker***\n");
2538 else
2539 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2540 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2541 else
2542 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2543 }
2544 fprintf(file, "\n");
2545 }
2546
2547
2548 /* Dump all the immediate uses to FILE. */
2549
2550 void
2551 dump_immediate_uses (FILE *file)
2552 {
2553 tree var;
2554 unsigned int x;
2555
2556 fprintf (file, "Immediate_uses: \n\n");
2557 for (x = 1; x < num_ssa_names; x++)
2558 {
2559 var = ssa_name(x);
2560 if (!var)
2561 continue;
2562 dump_immediate_uses_for (file, var);
2563 }
2564 }
2565
2566
2567 /* Dump def-use edges on stderr. */
2568
2569 void
2570 debug_immediate_uses (void)
2571 {
2572 dump_immediate_uses (stderr);
2573 }
2574
2575
2576 /* Dump def-use edges on stderr. */
2577
2578 void
2579 debug_immediate_uses_for (tree var)
2580 {
2581 dump_immediate_uses_for (stderr, var);
2582 }