Implement new immediate use iterators.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
103
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
106
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
109
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
112
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
115
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
118
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active = false;
121
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
124
125 static void get_expr_operands (tree, tree *, int);
126
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
132
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
135
136 #define ALLOC_OPTYPE(OP, TYPE) \
137 do \
138 { \
139 TYPE##_optype_p ret = free_##TYPE##s; \
140 if (ret) \
141 free_##TYPE##s = ret->next; \
142 else \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
144 (OP) = ret; \
145 } while (0)
146
147 /* Return the DECL_UID of the base variable of T. */
148
149 static inline unsigned
150 get_name_decl (tree t)
151 {
152 if (TREE_CODE (t) != SSA_NAME)
153 return DECL_UID (t);
154 else
155 return DECL_UID (SSA_NAME_VAR (t));
156 }
157
158
159 /* Comparison function for qsort used in operand_build_sort_virtual. */
160
161 static int
162 operand_build_cmp (const void *p, const void *q)
163 {
164 tree e1 = *((const tree *)p);
165 tree e2 = *((const tree *)q);
166 unsigned int u1,u2;
167
168 u1 = get_name_decl (e1);
169 u2 = get_name_decl (e2);
170
171 /* We want to sort in ascending order. They can never be equal. */
172 #ifdef ENABLE_CHECKING
173 gcc_assert (u1 != u2);
174 #endif
175 return (u1 > u2 ? 1 : -1);
176 }
177
178
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
180
181 static inline void
182 operand_build_sort_virtual (VEC(tree,heap) *list)
183 {
184 int num = VEC_length (tree, list);
185
186 if (num < 2)
187 return;
188
189 if (num == 2)
190 {
191 if (get_name_decl (VEC_index (tree, list, 0))
192 > get_name_decl (VEC_index (tree, list, 1)))
193 {
194 /* Swap elements if in the wrong order. */
195 tree tmp = VEC_index (tree, list, 0);
196 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 VEC_replace (tree, list, 1, tmp);
198 }
199 return;
200 }
201
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree, list),
204 VEC_length (tree, list),
205 sizeof (tree),
206 operand_build_cmp);
207 }
208
209
210 /* Return true if the SSA operands cache is active. */
211
212 bool
213 ssa_operands_active (void)
214 {
215 return ops_active;
216 }
217
218
219 /* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
221
222 static struct
223 {
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars;
227
228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229 not_written information. */
230 unsigned int static_write_clobbers_avoided;
231
232 /* Number of reads (VUSEs) avoided by using not_read information. */
233 unsigned int static_read_clobbers_avoided;
234
235 /* Number of write-clobbers avoided because the variable can't escape to
236 this call. */
237 unsigned int unescapable_clobbers_avoided;
238
239 /* Number of read-only uses we attempt to add to calls in
240 add_call_read_ops. */
241 unsigned int readonly_clobbers;
242
243 /* Number of read-only uses we avoid using not_read information. */
244 unsigned int static_readonly_clobbers_avoided;
245 } clobber_stats;
246
247
248 /* Initialize the operand cache routines. */
249
250 void
251 init_ssa_operands (void)
252 {
253 build_defs = VEC_alloc (tree, heap, 5);
254 build_uses = VEC_alloc (tree, heap, 10);
255 build_vuses = VEC_alloc (tree, heap, 25);
256 build_v_may_defs = VEC_alloc (tree, heap, 25);
257 build_v_must_defs = VEC_alloc (tree, heap, 25);
258
259 gcc_assert (operand_memory == NULL);
260 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261 ops_active = true;
262 memset (&clobber_stats, 0, sizeof (clobber_stats));
263 }
264
265
266 /* Dispose of anything required by the operand routines. */
267
268 void
269 fini_ssa_operands (void)
270 {
271 struct ssa_operand_memory_d *ptr;
272 VEC_free (tree, heap, build_defs);
273 VEC_free (tree, heap, build_uses);
274 VEC_free (tree, heap, build_v_must_defs);
275 VEC_free (tree, heap, build_v_may_defs);
276 VEC_free (tree, heap, build_vuses);
277 free_defs = NULL;
278 free_uses = NULL;
279 free_vuses = NULL;
280 free_maydefs = NULL;
281 free_mustdefs = NULL;
282 while ((ptr = operand_memory) != NULL)
283 {
284 operand_memory = operand_memory->next;
285 ggc_free (ptr);
286 }
287
288 ops_active = false;
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
304 }
305 }
306
307
308 /* Return memory for operands of SIZE chunks. */
309
310 static inline void *
311 ssa_operand_alloc (unsigned size)
312 {
313 char *ptr;
314 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315 {
316 struct ssa_operand_memory_d *ptr;
317 ptr = GGC_NEW (struct ssa_operand_memory_d);
318 ptr->next = operand_memory;
319 operand_memory = ptr;
320 operand_memory_index = 0;
321 }
322 ptr = &(operand_memory->mem[operand_memory_index]);
323 operand_memory_index += size;
324 return ptr;
325 }
326
327
328
329 /* This routine makes sure that PTR is in an immediate use list, and makes
330 sure the stmt pointer is set to the current stmt. */
331
332 static inline void
333 set_virtual_use_link (use_operand_p ptr, tree stmt)
334 {
335 /* fold_stmt may have changed the stmt pointers. */
336 if (ptr->stmt != stmt)
337 ptr->stmt = stmt;
338
339 /* If this use isn't in a list, add it to the correct list. */
340 if (!ptr->prev)
341 link_imm_use (ptr, *(ptr->use));
342 }
343
344 /* Appends ELT after TO, and moves the TO pointer to ELT. */
345
346 #define APPEND_OP_AFTER(ELT, TO) \
347 do \
348 { \
349 (TO)->next = (ELT); \
350 (TO) = (ELT); \
351 } while (0)
352
353 /* Appends head of list FROM after TO, and move both pointers
354 to their successors. */
355
356 #define MOVE_HEAD_AFTER(FROM, TO) \
357 do \
358 { \
359 APPEND_OP_AFTER (FROM, TO); \
360 (FROM) = (FROM)->next; \
361 } while (0)
362
363 /* Moves OP to appropriate freelist. OP is set to its successor. */
364
365 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
366 do \
367 { \
368 TYPE##_optype_p next = (OP)->next; \
369 (OP)->next = free_##TYPE##s; \
370 free_##TYPE##s = (OP); \
371 (OP) = next; \
372 } while (0)
373
374 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
375 of immediate uses. STMT is the current statement. */
376
377 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
378 do \
379 { \
380 (USE_PTR)->use = (VAL); \
381 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
382 } while (0)
383
384 /* Adds OP to the list of defs after LAST, and moves
385 LAST to the new element. */
386
387 static inline void
388 add_def_op (tree *op, def_optype_p *last)
389 {
390 def_optype_p new;
391
392 ALLOC_OPTYPE (new, def);
393 DEF_OP_PTR (new) = op;
394 APPEND_OP_AFTER (new, *last);
395 }
396
397 /* Adds OP to the list of uses of statement STMT after LAST, and moves
398 LAST to the new element. */
399
400 static inline void
401 add_use_op (tree stmt, tree *op, use_optype_p *last)
402 {
403 use_optype_p new;
404
405 ALLOC_OPTYPE (new, use);
406 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
407 APPEND_OP_AFTER (new, *last);
408 }
409
410 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
411 LAST to the new element. */
412
413 static inline void
414 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
415 {
416 vuse_optype_p new;
417
418 ALLOC_OPTYPE (new, vuse);
419 VUSE_OP (new) = op;
420 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
421 APPEND_OP_AFTER (new, *last);
422 }
423
424 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
425 LAST to the new element. */
426
427 static inline void
428 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
429 {
430 maydef_optype_p new;
431
432 ALLOC_OPTYPE (new, maydef);
433 MAYDEF_RESULT (new) = op;
434 MAYDEF_OP (new) = op;
435 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
436 APPEND_OP_AFTER (new, *last);
437 }
438
439 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
440 LAST to the new element. */
441
442 static inline void
443 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
444 {
445 mustdef_optype_p new;
446
447 ALLOC_OPTYPE (new, mustdef);
448 MUSTDEF_RESULT (new) = op;
449 MUSTDEF_KILL (new) = op;
450 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
451 APPEND_OP_AFTER (new, *last);
452 }
453
454 /* Takes elements from build_defs and turns them into def operands of STMT.
455 TODO -- Given that def operands list is not necessarily sorted, merging
456 the operands this way does not make much sense.
457 -- Make build_defs VEC of tree *. */
458
459 static inline void
460 finalize_ssa_def_ops (tree stmt)
461 {
462 unsigned new_i;
463 struct def_optype_d new_list;
464 def_optype_p old_ops, last;
465 tree *old_base;
466
467 new_list.next = NULL;
468 last = &new_list;
469
470 old_ops = DEF_OPS (stmt);
471
472 new_i = 0;
473 while (old_ops && new_i < VEC_length (tree, build_defs))
474 {
475 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
476 old_base = DEF_OP_PTR (old_ops);
477
478 if (old_base == new_base)
479 {
480 /* if variables are the same, reuse this node. */
481 MOVE_HEAD_AFTER (old_ops, last);
482 new_i++;
483 }
484 else if (old_base < new_base)
485 {
486 /* if old is less than new, old goes to the free list. */
487 MOVE_HEAD_TO_FREELIST (old_ops, def);
488 }
489 else
490 {
491 /* This is a new operand. */
492 add_def_op (new_base, &last);
493 new_i++;
494 }
495 }
496
497 /* If there is anything remaining in the build_defs list, simply emit it. */
498 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
499 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
500
501 last->next = NULL;
502
503 /* If there is anything in the old list, free it. */
504 if (old_ops)
505 {
506 old_ops->next = free_defs;
507 free_defs = old_ops;
508 }
509
510 /* Now set the stmt's operands. */
511 DEF_OPS (stmt) = new_list.next;
512
513 #ifdef ENABLE_CHECKING
514 {
515 def_optype_p ptr;
516 unsigned x = 0;
517 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
518 x++;
519
520 gcc_assert (x == VEC_length (tree, build_defs));
521 }
522 #endif
523 }
524
525 /* This routine will create stmt operands for STMT from the def build list. */
526
527 static void
528 finalize_ssa_defs (tree stmt)
529 {
530 unsigned int num = VEC_length (tree, build_defs);
531
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
534
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
537 finalize_ssa_def_ops (stmt);
538 VEC_truncate (tree, build_defs, 0);
539 }
540
541 /* Takes elements from build_uses and turns them into use operands of STMT.
542 TODO -- Make build_uses VEC of tree *. */
543
544 static inline void
545 finalize_ssa_use_ops (tree stmt)
546 {
547 unsigned new_i;
548 struct use_optype_d new_list;
549 use_optype_p old_ops, ptr, last;
550
551 new_list.next = NULL;
552 last = &new_list;
553
554 old_ops = USE_OPS (stmt);
555
556 /* If there is anything in the old list, free it. */
557 if (old_ops)
558 {
559 for (ptr = old_ops; ptr; ptr = ptr->next)
560 delink_imm_use (USE_OP_PTR (ptr));
561 old_ops->next = free_uses;
562 free_uses = old_ops;
563 }
564
565 /* Now create nodes for all the new nodes. */
566 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
567 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
568
569 last->next = NULL;
570
571 /* Now set the stmt's operands. */
572 USE_OPS (stmt) = new_list.next;
573
574 #ifdef ENABLE_CHECKING
575 {
576 unsigned x = 0;
577 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
578 x++;
579
580 gcc_assert (x == VEC_length (tree, build_uses));
581 }
582 #endif
583 }
584
585 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
586
587 static void
588 finalize_ssa_uses (tree stmt)
589 {
590 #ifdef ENABLE_CHECKING
591 {
592 unsigned x;
593 unsigned num = VEC_length (tree, build_uses);
594
595 /* If the pointer to the operand is the statement itself, something is
596 wrong. It means that we are pointing to a local variable (the
597 initial call to update_stmt_operands does not pass a pointer to a
598 statement). */
599 for (x = 0; x < num; x++)
600 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
601 }
602 #endif
603 finalize_ssa_use_ops (stmt);
604 VEC_truncate (tree, build_uses, 0);
605 }
606
607
608 /* Takes elements from build_v_may_defs and turns them into maydef operands of
609 STMT. */
610
611 static inline void
612 finalize_ssa_v_may_def_ops (tree stmt)
613 {
614 unsigned new_i;
615 struct maydef_optype_d new_list;
616 maydef_optype_p old_ops, ptr, last;
617 tree act;
618 unsigned old_base, new_base;
619
620 new_list.next = NULL;
621 last = &new_list;
622
623 old_ops = MAYDEF_OPS (stmt);
624
625 new_i = 0;
626 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
627 {
628 act = VEC_index (tree, build_v_may_defs, new_i);
629 new_base = get_name_decl (act);
630 old_base = get_name_decl (MAYDEF_OP (old_ops));
631
632 if (old_base == new_base)
633 {
634 /* if variables are the same, reuse this node. */
635 MOVE_HEAD_AFTER (old_ops, last);
636 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
637 new_i++;
638 }
639 else if (old_base < new_base)
640 {
641 /* if old is less than new, old goes to the free list. */
642 delink_imm_use (MAYDEF_OP_PTR (old_ops));
643 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
644 }
645 else
646 {
647 /* This is a new operand. */
648 add_maydef_op (stmt, act, &last);
649 new_i++;
650 }
651 }
652
653 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
654 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
655 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
656
657 last->next = NULL;
658
659 /* If there is anything in the old list, free it. */
660 if (old_ops)
661 {
662 for (ptr = old_ops; ptr; ptr = ptr->next)
663 delink_imm_use (MAYDEF_OP_PTR (ptr));
664 old_ops->next = free_maydefs;
665 free_maydefs = old_ops;
666 }
667
668 /* Now set the stmt's operands. */
669 MAYDEF_OPS (stmt) = new_list.next;
670
671 #ifdef ENABLE_CHECKING
672 {
673 unsigned x = 0;
674 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
675 x++;
676
677 gcc_assert (x == VEC_length (tree, build_v_may_defs));
678 }
679 #endif
680 }
681
682 static void
683 finalize_ssa_v_may_defs (tree stmt)
684 {
685 finalize_ssa_v_may_def_ops (stmt);
686 }
687
688
689 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
690
691 static inline void
692 cleanup_v_may_defs (void)
693 {
694 unsigned x, num;
695 num = VEC_length (tree, build_v_may_defs);
696
697 for (x = 0; x < num; x++)
698 {
699 tree t = VEC_index (tree, build_v_may_defs, x);
700 if (TREE_CODE (t) != SSA_NAME)
701 {
702 var_ann_t ann = var_ann (t);
703 ann->in_v_may_def_list = 0;
704 }
705 }
706 VEC_truncate (tree, build_v_may_defs, 0);
707 }
708
709
710 /* Takes elements from build_vuses and turns them into vuse operands of
711 STMT. */
712
713 static inline void
714 finalize_ssa_vuse_ops (tree stmt)
715 {
716 unsigned new_i;
717 struct vuse_optype_d new_list;
718 vuse_optype_p old_ops, ptr, last;
719 tree act;
720 unsigned old_base, new_base;
721
722 new_list.next = NULL;
723 last = &new_list;
724
725 old_ops = VUSE_OPS (stmt);
726
727 new_i = 0;
728 while (old_ops && new_i < VEC_length (tree, build_vuses))
729 {
730 act = VEC_index (tree, build_vuses, new_i);
731 new_base = get_name_decl (act);
732 old_base = get_name_decl (VUSE_OP (old_ops));
733
734 if (old_base == new_base)
735 {
736 /* if variables are the same, reuse this node. */
737 MOVE_HEAD_AFTER (old_ops, last);
738 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
739 new_i++;
740 }
741 else if (old_base < new_base)
742 {
743 /* if old is less than new, old goes to the free list. */
744 delink_imm_use (USE_OP_PTR (old_ops));
745 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
746 }
747 else
748 {
749 /* This is a new operand. */
750 add_vuse_op (stmt, act, &last);
751 new_i++;
752 }
753 }
754
755 /* If there is anything remaining in the build_vuses list, simply emit it. */
756 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
757 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
758
759 last->next = NULL;
760
761 /* If there is anything in the old list, free it. */
762 if (old_ops)
763 {
764 for (ptr = old_ops; ptr; ptr = ptr->next)
765 delink_imm_use (VUSE_OP_PTR (ptr));
766 old_ops->next = free_vuses;
767 free_vuses = old_ops;
768 }
769
770 /* Now set the stmt's operands. */
771 VUSE_OPS (stmt) = new_list.next;
772
773 #ifdef ENABLE_CHECKING
774 {
775 unsigned x = 0;
776 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
777 x++;
778
779 gcc_assert (x == VEC_length (tree, build_vuses));
780 }
781 #endif
782 }
783
784 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
785
786 static void
787 finalize_ssa_vuses (tree stmt)
788 {
789 unsigned num, num_v_may_defs;
790 unsigned vuse_index;
791
792 /* Remove superfluous VUSE operands. If the statement already has a
793 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
794 not needed because V_MAY_DEFs imply a VUSE of the variable. For
795 instance, suppose that variable 'a' is aliased:
796
797 # VUSE <a_2>
798 # a_3 = V_MAY_DEF <a_2>
799 a = a + 1;
800
801 The VUSE <a_2> is superfluous because it is implied by the
802 V_MAY_DEF operation. */
803 num = VEC_length (tree, build_vuses);
804 num_v_may_defs = VEC_length (tree, build_v_may_defs);
805
806 if (num > 0 && num_v_may_defs > 0)
807 {
808 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
809 {
810 tree vuse;
811 vuse = VEC_index (tree, build_vuses, vuse_index);
812 if (TREE_CODE (vuse) != SSA_NAME)
813 {
814 var_ann_t ann = var_ann (vuse);
815 ann->in_vuse_list = 0;
816 if (ann->in_v_may_def_list)
817 {
818 VEC_ordered_remove (tree, build_vuses, vuse_index);
819 continue;
820 }
821 }
822 vuse_index++;
823 }
824 }
825 else
826 {
827 /* Clear out the in_list bits. */
828 for (vuse_index = 0;
829 vuse_index < VEC_length (tree, build_vuses);
830 vuse_index++)
831 {
832 tree t = VEC_index (tree, build_vuses, vuse_index);
833 if (TREE_CODE (t) != SSA_NAME)
834 {
835 var_ann_t ann = var_ann (t);
836 ann->in_vuse_list = 0;
837 }
838 }
839 }
840
841 finalize_ssa_vuse_ops (stmt);
842
843 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
844 cleanup_v_may_defs ();
845
846 /* Free the VUSEs build vector. */
847 VEC_truncate (tree, build_vuses, 0);
848
849 }
850
851 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
852 STMT. */
853
854 static inline void
855 finalize_ssa_v_must_def_ops (tree stmt)
856 {
857 unsigned new_i;
858 struct mustdef_optype_d new_list;
859 mustdef_optype_p old_ops, ptr, last;
860 tree act;
861 unsigned old_base, new_base;
862
863 new_list.next = NULL;
864 last = &new_list;
865
866 old_ops = MUSTDEF_OPS (stmt);
867
868 new_i = 0;
869 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
870 {
871 act = VEC_index (tree, build_v_must_defs, new_i);
872 new_base = get_name_decl (act);
873 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
874
875 if (old_base == new_base)
876 {
877 /* If variables are the same, reuse this node. */
878 MOVE_HEAD_AFTER (old_ops, last);
879 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
880 new_i++;
881 }
882 else if (old_base < new_base)
883 {
884 /* If old is less than new, old goes to the free list. */
885 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
886 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
887 }
888 else
889 {
890 /* This is a new operand. */
891 add_mustdef_op (stmt, act, &last);
892 new_i++;
893 }
894 }
895
896 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
897 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
898 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
899
900 last->next = NULL;
901
902 /* If there is anything in the old list, free it. */
903 if (old_ops)
904 {
905 for (ptr = old_ops; ptr; ptr = ptr->next)
906 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
907 old_ops->next = free_mustdefs;
908 free_mustdefs = old_ops;
909 }
910
911 /* Now set the stmt's operands. */
912 MUSTDEF_OPS (stmt) = new_list.next;
913
914 #ifdef ENABLE_CHECKING
915 {
916 unsigned x = 0;
917 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
918 x++;
919
920 gcc_assert (x == VEC_length (tree, build_v_must_defs));
921 }
922 #endif
923 }
924
925 static void
926 finalize_ssa_v_must_defs (tree stmt)
927 {
928 /* In the presence of subvars, there may be more than one V_MUST_DEF
929 per statement (one for each subvar). It is a bit expensive to
930 verify that all must-defs in a statement belong to subvars if
931 there is more than one must-def, so we don't do it. Suffice to
932 say, if you reach here without having subvars, and have num >1,
933 you have hit a bug. */
934 finalize_ssa_v_must_def_ops (stmt);
935 VEC_truncate (tree, build_v_must_defs, 0);
936 }
937
938
939 /* Finalize all the build vectors, fill the new ones into INFO. */
940
941 static inline void
942 finalize_ssa_stmt_operands (tree stmt)
943 {
944 finalize_ssa_defs (stmt);
945 finalize_ssa_uses (stmt);
946 finalize_ssa_v_must_defs (stmt);
947 finalize_ssa_v_may_defs (stmt);
948 finalize_ssa_vuses (stmt);
949 }
950
951
952 /* Start the process of building up operands vectors in INFO. */
953
954 static inline void
955 start_ssa_stmt_operands (void)
956 {
957 gcc_assert (VEC_length (tree, build_defs) == 0);
958 gcc_assert (VEC_length (tree, build_uses) == 0);
959 gcc_assert (VEC_length (tree, build_vuses) == 0);
960 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
961 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
962 }
963
964
965 /* Add DEF_P to the list of pointers to operands. */
966
967 static inline void
968 append_def (tree *def_p)
969 {
970 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
971 }
972
973
974 /* Add USE_P to the list of pointers to operands. */
975
976 static inline void
977 append_use (tree *use_p)
978 {
979 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
980 }
981
982
983 /* Add a new virtual may def for variable VAR to the build array. */
984
985 static inline void
986 append_v_may_def (tree var)
987 {
988 if (TREE_CODE (var) != SSA_NAME)
989 {
990 var_ann_t ann = get_var_ann (var);
991
992 /* Don't allow duplicate entries. */
993 if (ann->in_v_may_def_list)
994 return;
995 ann->in_v_may_def_list = 1;
996 }
997
998 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
999 }
1000
1001
1002 /* Add VAR to the list of virtual uses. */
1003
1004 static inline void
1005 append_vuse (tree var)
1006 {
1007 /* Don't allow duplicate entries. */
1008 if (TREE_CODE (var) != SSA_NAME)
1009 {
1010 var_ann_t ann = get_var_ann (var);
1011
1012 if (ann->in_vuse_list || ann->in_v_may_def_list)
1013 return;
1014 ann->in_vuse_list = 1;
1015 }
1016
1017 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1018 }
1019
1020
1021 /* Add VAR to the list of virtual must definitions for INFO. */
1022
1023 static inline void
1024 append_v_must_def (tree var)
1025 {
1026 unsigned i;
1027
1028 /* Don't allow duplicate entries. */
1029 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1030 if (var == VEC_index (tree, build_v_must_defs, i))
1031 return;
1032
1033 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1034 }
1035
1036
1037 /* REF is a tree that contains the entire pointer dereference
1038 expression, if available, or NULL otherwise. ALIAS is the variable
1039 we are asking if REF can access. OFFSET and SIZE come from the
1040 memory access expression that generated this virtual operand.
1041 FOR_CLOBBER is true is this is adding a virtual operand for a call
1042 clobber. */
1043
1044 static bool
1045 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1046 HOST_WIDE_INT size)
1047 {
1048 bool offsetgtz = offset > 0;
1049 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1050 tree base = ref ? get_base_address (ref) : NULL;
1051
1052 /* If ALIAS is an SFT, it can't be touched if the offset
1053 and size of the access is not overlapping with the SFT offset and
1054 size. This is only true if we are accessing through a pointer
1055 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1056 be accessing through a pointer to some substruct of the
1057 structure, and if we try to prune there, we will have the wrong
1058 offset, and get the wrong answer.
1059 i.e., we can't prune without more work if we have something like
1060
1061 struct gcc_target
1062 {
1063 struct asm_out
1064 {
1065 const char *byte_op;
1066 struct asm_int_op
1067 {
1068 const char *hi;
1069 } aligned_op;
1070 } asm_out;
1071 } targetm;
1072
1073 foo = &targetm.asm_out.aligned_op;
1074 return foo->hi;
1075
1076 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1077 terms of SFT_PARENT_VAR, that is where it is.
1078 However, the access through the foo pointer will be at offset 0. */
1079 if (size != -1
1080 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1081 && base
1082 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1083 && !overlap_subvar (offset, size, alias, NULL))
1084 {
1085 #ifdef ACCESS_DEBUGGING
1086 fprintf (stderr, "Access to ");
1087 print_generic_expr (stderr, ref, 0);
1088 fprintf (stderr, " may not touch ");
1089 print_generic_expr (stderr, alias, 0);
1090 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1091 #endif
1092 return false;
1093 }
1094
1095 /* Without strict aliasing, it is impossible for a component access
1096 through a pointer to touch a random variable, unless that
1097 variable *is* a structure or a pointer.
1098
1099 That is, given p->c, and some random global variable b,
1100 there is no legal way that p->c could be an access to b.
1101
1102 Without strict aliasing on, we consider it legal to do something
1103 like:
1104
1105 struct foos { int l; };
1106 int foo;
1107 static struct foos *getfoo(void);
1108 int main (void)
1109 {
1110 struct foos *f = getfoo();
1111 f->l = 1;
1112 foo = 2;
1113 if (f->l == 1)
1114 abort();
1115 exit(0);
1116 }
1117 static struct foos *getfoo(void)
1118 { return (struct foos *)&foo; }
1119
1120 (taken from 20000623-1.c)
1121 */
1122 else if (ref
1123 && flag_strict_aliasing
1124 && TREE_CODE (ref) != INDIRECT_REF
1125 && !MTAG_P (alias)
1126 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1127 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1128 && !POINTER_TYPE_P (TREE_TYPE (alias)))
1129 {
1130 #ifdef ACCESS_DEBUGGING
1131 fprintf (stderr, "Access to ");
1132 print_generic_expr (stderr, ref, 0);
1133 fprintf (stderr, " may not touch ");
1134 print_generic_expr (stderr, alias, 0);
1135 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1136 #endif
1137 return false;
1138 }
1139
1140 /* If the offset of the access is greater than the size of one of
1141 the possible aliases, it can't be touching that alias, because it
1142 would be past the end of the structure. */
1143 else if (ref
1144 && flag_strict_aliasing
1145 && TREE_CODE (ref) != INDIRECT_REF
1146 && !MTAG_P (alias)
1147 && !POINTER_TYPE_P (TREE_TYPE (alias))
1148 && offsetgtz
1149 && DECL_SIZE (alias)
1150 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1151 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1152 {
1153 #ifdef ACCESS_DEBUGGING
1154 fprintf (stderr, "Access to ");
1155 print_generic_expr (stderr, ref, 0);
1156 fprintf (stderr, " may not touch ");
1157 print_generic_expr (stderr, alias, 0);
1158 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1159 #endif
1160 return false;
1161 }
1162
1163 return true;
1164 }
1165
1166
1167 /* Add VAR to the virtual operands array. FLAGS is as in
1168 get_expr_operands. FULL_REF is a tree that contains the entire
1169 pointer dereference expression, if available, or NULL otherwise.
1170 OFFSET and SIZE come from the memory access expression that
1171 generated this virtual operand. FOR_CLOBBER is true is this is
1172 adding a virtual operand for a call clobber. */
1173
1174 static void
1175 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1176 tree full_ref, HOST_WIDE_INT offset,
1177 HOST_WIDE_INT size, bool for_clobber)
1178 {
1179 VEC(tree,gc) *aliases;
1180 tree sym;
1181 var_ann_t v_ann;
1182
1183 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1184 v_ann = var_ann (sym);
1185
1186 /* Mark statements with volatile operands. Optimizers should back
1187 off from statements having volatile operands. */
1188 if (TREE_THIS_VOLATILE (sym) && s_ann)
1189 s_ann->has_volatile_ops = true;
1190
1191 /* If the variable cannot be modified and this is a V_MAY_DEF change
1192 it into a VUSE. This happens when read-only variables are marked
1193 call-clobbered and/or aliased to writable variables. So we only
1194 check that this only happens on non-specific stores.
1195
1196 Note that if this is a specific store, i.e. associated with a
1197 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1198 into validation problems.
1199
1200 This can happen when programs cast away const, leaving us with a
1201 store to read-only memory. If the statement is actually executed
1202 at runtime, then the program is ill formed. If the statement is
1203 not executed then all is well. At the very least, we cannot ICE. */
1204 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1205 flags &= ~(opf_is_def | opf_kill_def);
1206
1207 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1208 virtual operands, unless the caller has specifically requested
1209 not to add virtual operands (used when adding operands inside an
1210 ADDR_EXPR expression). */
1211 if (flags & opf_no_vops)
1212 return;
1213
1214 aliases = v_ann->may_aliases;
1215 if (aliases == NULL)
1216 {
1217 /* The variable is not aliased or it is an alias tag. */
1218 if (flags & opf_is_def)
1219 {
1220 if (flags & opf_kill_def)
1221 {
1222 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1223 variable definitions. */
1224 gcc_assert (!MTAG_P (var)
1225 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1226 append_v_must_def (var);
1227 }
1228 else
1229 {
1230 /* Add a V_MAY_DEF for call-clobbered variables and
1231 memory tags. */
1232 append_v_may_def (var);
1233 }
1234 }
1235 else
1236 append_vuse (var);
1237 }
1238 else
1239 {
1240 unsigned i;
1241 tree al;
1242
1243 /* The variable is aliased. Add its aliases to the virtual
1244 operands. */
1245 gcc_assert (VEC_length (tree, aliases) != 0);
1246
1247 if (flags & opf_is_def)
1248 {
1249
1250 bool none_added = true;
1251
1252 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1253 {
1254 if (!access_can_touch_variable (full_ref, al, offset, size))
1255 continue;
1256
1257 none_added = false;
1258 append_v_may_def (al);
1259 }
1260
1261 /* If the variable is also an alias tag, add a virtual
1262 operand for it, otherwise we will miss representing
1263 references to the members of the variable's alias set.
1264 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1265
1266 It is also necessary to add bare defs on clobbers for
1267 SMT's, so that bare SMT uses caused by pruning all the
1268 aliases will link up properly with calls. In order to
1269 keep the number of these bare defs we add down to the
1270 minimum necessary, we keep track of which SMT's were used
1271 alone in statement vdefs or VUSEs. */
1272 if (v_ann->is_aliased
1273 || none_added
1274 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1275 && for_clobber
1276 && SMT_USED_ALONE (var)))
1277 {
1278 /* Every bare SMT def we add should have SMT_USED_ALONE
1279 set on it, or else we will get the wrong answer on
1280 clobbers. */
1281 if (none_added
1282 && !updating_used_alone && aliases_computed_p
1283 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1284 gcc_assert (SMT_USED_ALONE (var));
1285
1286 append_v_may_def (var);
1287 }
1288 }
1289 else
1290 {
1291 bool none_added = true;
1292 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1293 {
1294 if (!access_can_touch_variable (full_ref, al, offset, size))
1295 continue;
1296 none_added = false;
1297 append_vuse (al);
1298 }
1299
1300 /* Similarly, append a virtual uses for VAR itself, when
1301 it is an alias tag. */
1302 if (v_ann->is_aliased || none_added)
1303 append_vuse (var);
1304 }
1305 }
1306 }
1307
1308
1309 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1310 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1311 the statement's real operands, otherwise it is added to virtual
1312 operands. */
1313
1314 static void
1315 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1316 {
1317 bool is_real_op;
1318 tree var, sym;
1319 var_ann_t v_ann;
1320
1321 var = *var_p;
1322 gcc_assert (SSA_VAR_P (var));
1323
1324 is_real_op = is_gimple_reg (var);
1325
1326 /* If this is a real operand, the operand is either an SSA name or a
1327 decl. Virtual operands may only be decls. */
1328 gcc_assert (is_real_op || DECL_P (var));
1329
1330 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1331 v_ann = var_ann (sym);
1332
1333 /* Mark statements with volatile operands. Optimizers should back
1334 off from statements having volatile operands. */
1335 if (TREE_THIS_VOLATILE (sym) && s_ann)
1336 s_ann->has_volatile_ops = true;
1337
1338 if (is_real_op)
1339 {
1340 /* The variable is a GIMPLE register. Add it to real operands. */
1341 if (flags & opf_is_def)
1342 append_def (var_p);
1343 else
1344 append_use (var_p);
1345 }
1346 else
1347 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1348 }
1349
1350
1351 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1352 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1353
1354 STMT is the statement being processed, EXPR is the INDIRECT_REF
1355 that got us here.
1356
1357 FLAGS is as in get_expr_operands.
1358
1359 FULL_REF contains the full pointer dereference expression, if we
1360 have it, or NULL otherwise.
1361
1362 OFFSET and SIZE are the location of the access inside the
1363 dereferenced pointer, if known.
1364
1365 RECURSE_ON_BASE should be set to true if we want to continue
1366 calling get_expr_operands on the base pointer, and false if
1367 something else will do it for us. */
1368
1369 static void
1370 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1371 tree full_ref,
1372 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1373 bool recurse_on_base)
1374 {
1375 tree *pptr = &TREE_OPERAND (expr, 0);
1376 tree ptr = *pptr;
1377 stmt_ann_t s_ann = stmt_ann (stmt);
1378
1379 /* Stores into INDIRECT_REF operands are never killing definitions. */
1380 flags &= ~opf_kill_def;
1381
1382 if (SSA_VAR_P (ptr))
1383 {
1384 struct ptr_info_def *pi = NULL;
1385
1386 /* If PTR has flow-sensitive points-to information, use it. */
1387 if (TREE_CODE (ptr) == SSA_NAME
1388 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1389 && pi->name_mem_tag)
1390 {
1391 /* PTR has its own memory tag. Use it. */
1392 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1393 full_ref, offset, size, false);
1394 }
1395 else
1396 {
1397 /* If PTR is not an SSA_NAME or it doesn't have a name
1398 tag, use its symbol memory tag. */
1399 var_ann_t v_ann;
1400
1401 /* If we are emitting debugging dumps, display a warning if
1402 PTR is an SSA_NAME with no flow-sensitive alias
1403 information. That means that we may need to compute
1404 aliasing again. */
1405 if (dump_file
1406 && TREE_CODE (ptr) == SSA_NAME
1407 && pi == NULL)
1408 {
1409 fprintf (dump_file,
1410 "NOTE: no flow-sensitive alias info for ");
1411 print_generic_expr (dump_file, ptr, dump_flags);
1412 fprintf (dump_file, " in ");
1413 print_generic_stmt (dump_file, stmt, dump_flags);
1414 }
1415
1416 if (TREE_CODE (ptr) == SSA_NAME)
1417 ptr = SSA_NAME_VAR (ptr);
1418 v_ann = var_ann (ptr);
1419
1420 if (v_ann->symbol_mem_tag)
1421 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1422 full_ref, offset, size, false);
1423 }
1424 }
1425 else if (TREE_CODE (ptr) == INTEGER_CST)
1426 {
1427 /* If a constant is used as a pointer, we can't generate a real
1428 operand for it but we mark the statement volatile to prevent
1429 optimizations from messing things up. */
1430 if (s_ann)
1431 s_ann->has_volatile_ops = true;
1432 return;
1433 }
1434 else
1435 {
1436 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1437 gcc_unreachable ();
1438 }
1439
1440 /* If requested, add a USE operand for the base pointer. */
1441 if (recurse_on_base)
1442 get_expr_operands (stmt, pptr, opf_none);
1443 }
1444
1445
1446 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1447
1448 static void
1449 get_tmr_operands (tree stmt, tree expr, int flags)
1450 {
1451 tree tag = TMR_TAG (expr), ref;
1452 HOST_WIDE_INT offset, size, maxsize;
1453 subvar_t svars, sv;
1454 stmt_ann_t s_ann = stmt_ann (stmt);
1455
1456 /* First record the real operands. */
1457 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1458 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1459
1460 /* MEM_REFs should never be killing. */
1461 flags &= ~opf_kill_def;
1462
1463 if (TMR_SYMBOL (expr))
1464 {
1465 stmt_ann_t ann = stmt_ann (stmt);
1466 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1467 }
1468
1469 if (!tag)
1470 {
1471 /* Something weird, so ensure that we will be careful. */
1472 stmt_ann (stmt)->has_volatile_ops = true;
1473 return;
1474 }
1475
1476 if (DECL_P (tag))
1477 {
1478 get_expr_operands (stmt, &tag, flags);
1479 return;
1480 }
1481
1482 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1483 gcc_assert (ref != NULL_TREE);
1484 svars = get_subvars_for_var (ref);
1485 for (sv = svars; sv; sv = sv->next)
1486 {
1487 bool exact;
1488 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1489 {
1490 int subvar_flags = flags;
1491 if (!exact || size != maxsize)
1492 subvar_flags &= ~opf_kill_def;
1493 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1494 }
1495 }
1496 }
1497
1498
1499 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1500 clobbered variables in the function. */
1501
1502 static void
1503 add_call_clobber_ops (tree stmt, tree callee)
1504 {
1505 unsigned u;
1506 bitmap_iterator bi;
1507 stmt_ann_t s_ann = stmt_ann (stmt);
1508 bitmap not_read_b, not_written_b;
1509
1510 /* Functions that are not const, pure or never return may clobber
1511 call-clobbered variables. */
1512 if (s_ann)
1513 s_ann->makes_clobbering_call = true;
1514
1515 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1516 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1517 if (global_var)
1518 {
1519 add_stmt_operand (&global_var, s_ann, opf_is_def);
1520 return;
1521 }
1522
1523 /* Get info for local and module level statics. There is a bit
1524 set for each static if the call being processed does not read
1525 or write that variable. */
1526 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1527 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1528 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1529 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1530 {
1531 tree var = referenced_var_lookup (u);
1532 unsigned int escape_mask = var_ann (var)->escape_mask;
1533 tree real_var = var;
1534 bool not_read;
1535 bool not_written;
1536
1537 /* Not read and not written are computed on regular vars, not
1538 subvars, so look at the parent var if this is an SFT. */
1539 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1540 real_var = SFT_PARENT_VAR (var);
1541
1542 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1543 DECL_UID (real_var)) : false;
1544 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1545 DECL_UID (real_var)) : false;
1546 gcc_assert (!unmodifiable_var_p (var));
1547
1548 clobber_stats.clobbered_vars++;
1549
1550 /* See if this variable is really clobbered by this function. */
1551
1552 /* Trivial case: Things escaping only to pure/const are not
1553 clobbered by non-pure-const, and only read by pure/const. */
1554 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1555 {
1556 tree call = get_call_expr_in (stmt);
1557 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1558 {
1559 add_stmt_operand (&var, s_ann, opf_none);
1560 clobber_stats.unescapable_clobbers_avoided++;
1561 continue;
1562 }
1563 else
1564 {
1565 clobber_stats.unescapable_clobbers_avoided++;
1566 continue;
1567 }
1568 }
1569
1570 if (not_written)
1571 {
1572 clobber_stats.static_write_clobbers_avoided++;
1573 if (!not_read)
1574 add_stmt_operand (&var, s_ann, opf_none);
1575 else
1576 clobber_stats.static_read_clobbers_avoided++;
1577 }
1578 else
1579 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1580 }
1581 }
1582
1583
1584 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1585 function. */
1586
1587 static void
1588 add_call_read_ops (tree stmt, tree callee)
1589 {
1590 unsigned u;
1591 bitmap_iterator bi;
1592 stmt_ann_t s_ann = stmt_ann (stmt);
1593 bitmap not_read_b;
1594
1595 /* if the function is not pure, it may reference memory. Add
1596 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1597 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1598 if (global_var)
1599 {
1600 add_stmt_operand (&global_var, s_ann, opf_none);
1601 return;
1602 }
1603
1604 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1605
1606 /* Add a VUSE for each call-clobbered variable. */
1607 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1608 {
1609 tree var = referenced_var (u);
1610 tree real_var = var;
1611 bool not_read;
1612
1613 clobber_stats.readonly_clobbers++;
1614
1615 /* Not read and not written are computed on regular vars, not
1616 subvars, so look at the parent var if this is an SFT. */
1617
1618 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1619 real_var = SFT_PARENT_VAR (var);
1620
1621 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1622 : false;
1623
1624 if (not_read)
1625 {
1626 clobber_stats.static_readonly_clobbers_avoided++;
1627 continue;
1628 }
1629
1630 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1631 }
1632 }
1633
1634
1635 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1636
1637 static void
1638 get_call_expr_operands (tree stmt, tree expr)
1639 {
1640 tree op;
1641 int call_flags = call_expr_flags (expr);
1642
1643 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1644 operands for all the symbols that have been found to be
1645 call-clobbered.
1646
1647 Note that if aliases have not been computed, the global effects
1648 of calls will not be included in the SSA web. This is fine
1649 because no optimizer should run before aliases have been
1650 computed. By not bothering with virtual operands for CALL_EXPRs
1651 we avoid adding superfluous virtual operands, which can be a
1652 significant compile time sink (See PR 15855). */
1653 if (aliases_computed_p
1654 && !bitmap_empty_p (call_clobbered_vars)
1655 && !(call_flags & ECF_NOVOPS))
1656 {
1657 /* A 'pure' or a 'const' function never call-clobbers anything.
1658 A 'noreturn' function might, but since we don't return anyway
1659 there is no point in recording that. */
1660 if (TREE_SIDE_EFFECTS (expr)
1661 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1662 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1663 else if (!(call_flags & ECF_CONST))
1664 add_call_read_ops (stmt, get_callee_fndecl (expr));
1665 }
1666
1667 /* Find uses in the called function. */
1668 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1669
1670 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1671 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1672
1673 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1674 }
1675
1676
1677 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1678
1679 static void
1680 get_asm_expr_operands (tree stmt)
1681 {
1682 stmt_ann_t s_ann = stmt_ann (stmt);
1683 int noutputs = list_length (ASM_OUTPUTS (stmt));
1684 const char **oconstraints
1685 = (const char **) alloca ((noutputs) * sizeof (const char *));
1686 int i;
1687 tree link;
1688 const char *constraint;
1689 bool allows_mem, allows_reg, is_inout;
1690
1691 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1692 {
1693 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1694 oconstraints[i] = constraint;
1695 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1696 &allows_reg, &is_inout);
1697
1698 /* This should have been split in gimplify_asm_expr. */
1699 gcc_assert (!allows_reg || !is_inout);
1700
1701 /* Memory operands are addressable. Note that STMT needs the
1702 address of this operand. */
1703 if (!allows_reg && allows_mem)
1704 {
1705 tree t = get_base_address (TREE_VALUE (link));
1706 if (t && DECL_P (t) && s_ann)
1707 add_to_addressable_set (t, &s_ann->addresses_taken);
1708 }
1709
1710 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1711 }
1712
1713 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1714 {
1715 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1716 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1717 oconstraints, &allows_mem, &allows_reg);
1718
1719 /* Memory operands are addressable. Note that STMT needs the
1720 address of this operand. */
1721 if (!allows_reg && allows_mem)
1722 {
1723 tree t = get_base_address (TREE_VALUE (link));
1724 if (t && DECL_P (t) && s_ann)
1725 add_to_addressable_set (t, &s_ann->addresses_taken);
1726 }
1727
1728 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1729 }
1730
1731
1732 /* Clobber memory for asm ("" : : : "memory"); */
1733 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1734 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1735 {
1736 unsigned i;
1737 bitmap_iterator bi;
1738
1739 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1740 decided to group them). */
1741 if (global_var)
1742 add_stmt_operand (&global_var, s_ann, opf_is_def);
1743 else
1744 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1745 {
1746 tree var = referenced_var (i);
1747 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1748 }
1749
1750 /* Now clobber all addressables. */
1751 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1752 {
1753 tree var = referenced_var (i);
1754
1755 /* Subvars are explicitly represented in this list, so
1756 we don't need the original to be added to the clobber
1757 ops, but the original *will* be in this list because
1758 we keep the addressability of the original
1759 variable up-to-date so we don't screw up the rest of
1760 the backend. */
1761 if (var_can_have_subvars (var)
1762 && get_subvars_for_var (var) != NULL)
1763 continue;
1764
1765 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1766 }
1767
1768 break;
1769 }
1770 }
1771
1772
1773 /* Scan operands for the assignment expression EXPR in statement STMT. */
1774
1775 static void
1776 get_modify_expr_operands (tree stmt, tree expr)
1777 {
1778 /* First get operands from the RHS. */
1779 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1780
1781 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1782 registers. If the LHS is a store to memory, we will either need
1783 a preserving definition (V_MAY_DEF) or a killing definition
1784 (V_MUST_DEF).
1785
1786 Preserving definitions are those that modify a part of an
1787 aggregate object for which no subvars have been computed (or the
1788 reference does not correspond exactly to one of them). Stores
1789 through a pointer are also represented with V_MAY_DEF operators.
1790
1791 The determination of whether to use a preserving or a killing
1792 definition is done while scanning the LHS of the assignment. By
1793 default, assume that we will emit a V_MUST_DEF. */
1794 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1795 }
1796
1797
1798 /* Recursively scan the expression pointed to by EXPR_P in statement
1799 STMT. FLAGS is one of the OPF_* constants modifying how to
1800 interpret the operands found. */
1801
1802 static void
1803 get_expr_operands (tree stmt, tree *expr_p, int flags)
1804 {
1805 enum tree_code code;
1806 enum tree_code_class class;
1807 tree expr = *expr_p;
1808 stmt_ann_t s_ann = stmt_ann (stmt);
1809
1810 if (expr == NULL)
1811 return;
1812
1813 code = TREE_CODE (expr);
1814 class = TREE_CODE_CLASS (code);
1815
1816 switch (code)
1817 {
1818 case ADDR_EXPR:
1819 /* Taking the address of a variable does not represent a
1820 reference to it, but the fact that the statement takes its
1821 address will be of interest to some passes (e.g. alias
1822 resolution). */
1823 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1824
1825 /* If the address is invariant, there may be no interesting
1826 variable references inside. */
1827 if (is_gimple_min_invariant (expr))
1828 return;
1829
1830 /* Otherwise, there may be variables referenced inside but there
1831 should be no VUSEs created, since the referenced objects are
1832 not really accessed. The only operands that we should find
1833 here are ARRAY_REF indices which will always be real operands
1834 (GIMPLE does not allow non-registers as array indices). */
1835 flags |= opf_no_vops;
1836 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1837 return;
1838
1839 case SSA_NAME:
1840 case STRUCT_FIELD_TAG:
1841 case SYMBOL_MEMORY_TAG:
1842 case NAME_MEMORY_TAG:
1843 add_stmt_operand (expr_p, s_ann, flags);
1844 return;
1845
1846 case VAR_DECL:
1847 case PARM_DECL:
1848 case RESULT_DECL:
1849 {
1850 subvar_t svars;
1851
1852 /* Add the subvars for a variable, if it has subvars, to DEFS
1853 or USES. Otherwise, add the variable itself. Whether it
1854 goes to USES or DEFS depends on the operand flags. */
1855 if (var_can_have_subvars (expr)
1856 && (svars = get_subvars_for_var (expr)))
1857 {
1858 subvar_t sv;
1859 for (sv = svars; sv; sv = sv->next)
1860 add_stmt_operand (&sv->var, s_ann, flags);
1861 }
1862 else
1863 add_stmt_operand (expr_p, s_ann, flags);
1864
1865 return;
1866 }
1867
1868 case MISALIGNED_INDIRECT_REF:
1869 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1870 /* fall through */
1871
1872 case ALIGN_INDIRECT_REF:
1873 case INDIRECT_REF:
1874 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1875 return;
1876
1877 case TARGET_MEM_REF:
1878 get_tmr_operands (stmt, expr, flags);
1879 return;
1880
1881 case ARRAY_REF:
1882 case ARRAY_RANGE_REF:
1883 case COMPONENT_REF:
1884 case REALPART_EXPR:
1885 case IMAGPART_EXPR:
1886 {
1887 tree ref;
1888 HOST_WIDE_INT offset, size, maxsize;
1889 bool none = true;
1890
1891 /* This component reference becomes an access to all of the
1892 subvariables it can touch, if we can determine that, but
1893 *NOT* the real one. If we can't determine which fields we
1894 could touch, the recursion will eventually get to a
1895 variable and add *all* of its subvars, or whatever is the
1896 minimum correct subset. */
1897 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1898 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1899 {
1900 subvar_t sv;
1901 subvar_t svars = get_subvars_for_var (ref);
1902
1903 for (sv = svars; sv; sv = sv->next)
1904 {
1905 bool exact;
1906
1907 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1908 {
1909 int subvar_flags = flags;
1910 none = false;
1911 if (!exact || size != maxsize)
1912 subvar_flags &= ~opf_kill_def;
1913 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1914 }
1915 }
1916
1917 if (!none)
1918 flags |= opf_no_vops;
1919 }
1920 else if (TREE_CODE (ref) == INDIRECT_REF)
1921 {
1922 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1923 maxsize, false);
1924 flags |= opf_no_vops;
1925 }
1926
1927 /* Even if we found subvars above we need to ensure to see
1928 immediate uses for d in s.a[d]. In case of s.a having
1929 a subvar or we would miss it otherwise. */
1930 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1931 flags & ~opf_kill_def);
1932
1933 if (code == COMPONENT_REF)
1934 {
1935 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1936 s_ann->has_volatile_ops = true;
1937 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1938 }
1939 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1940 {
1941 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1942 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1943 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1944 }
1945
1946 return;
1947 }
1948
1949 case WITH_SIZE_EXPR:
1950 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1951 and an rvalue reference to its second argument. */
1952 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1953 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1954 return;
1955
1956 case CALL_EXPR:
1957 get_call_expr_operands (stmt, expr);
1958 return;
1959
1960 case COND_EXPR:
1961 case VEC_COND_EXPR:
1962 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1963 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1964 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1965 return;
1966
1967 case MODIFY_EXPR:
1968 get_modify_expr_operands (stmt, expr);
1969 return;
1970
1971 case CONSTRUCTOR:
1972 {
1973 /* General aggregate CONSTRUCTORs have been decomposed, but they
1974 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1975 constructor_elt *ce;
1976 unsigned HOST_WIDE_INT idx;
1977
1978 for (idx = 0;
1979 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1980 idx++)
1981 get_expr_operands (stmt, &ce->value, opf_none);
1982
1983 return;
1984 }
1985
1986 case BIT_FIELD_REF:
1987 /* Stores using BIT_FIELD_REF are always preserving definitions. */
1988 flags &= ~opf_kill_def;
1989
1990 /* Fallthru */
1991
1992 case TRUTH_NOT_EXPR:
1993 case VIEW_CONVERT_EXPR:
1994 do_unary:
1995 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1996 return;
1997
1998 case TRUTH_AND_EXPR:
1999 case TRUTH_OR_EXPR:
2000 case TRUTH_XOR_EXPR:
2001 case COMPOUND_EXPR:
2002 case OBJ_TYPE_REF:
2003 case ASSERT_EXPR:
2004 do_binary:
2005 {
2006 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2007 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2008 return;
2009 }
2010
2011 case DOT_PROD_EXPR:
2012 case REALIGN_LOAD_EXPR:
2013 {
2014 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2015 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2016 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2017 return;
2018 }
2019
2020 case BLOCK:
2021 case FUNCTION_DECL:
2022 case EXC_PTR_EXPR:
2023 case FILTER_EXPR:
2024 case LABEL_DECL:
2025 case CONST_DECL:
2026 case OMP_PARALLEL:
2027 case OMP_SECTIONS:
2028 case OMP_FOR:
2029 case OMP_SINGLE:
2030 case OMP_MASTER:
2031 case OMP_ORDERED:
2032 case OMP_CRITICAL:
2033 case OMP_RETURN:
2034 case OMP_CONTINUE:
2035 /* Expressions that make no memory references. */
2036 return;
2037
2038 default:
2039 if (class == tcc_unary)
2040 goto do_unary;
2041 if (class == tcc_binary || class == tcc_comparison)
2042 goto do_binary;
2043 if (class == tcc_constant || class == tcc_type)
2044 return;
2045 }
2046
2047 /* If we get here, something has gone wrong. */
2048 #ifdef ENABLE_CHECKING
2049 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2050 debug_tree (expr);
2051 fputs ("\n", stderr);
2052 #endif
2053 gcc_unreachable ();
2054 }
2055
2056
2057 /* Parse STMT looking for operands. When finished, the various
2058 build_* operand vectors will have potential operands in them. */
2059
2060 static void
2061 parse_ssa_operands (tree stmt)
2062 {
2063 enum tree_code code;
2064
2065 code = TREE_CODE (stmt);
2066 switch (code)
2067 {
2068 case MODIFY_EXPR:
2069 get_modify_expr_operands (stmt, stmt);
2070 break;
2071
2072 case COND_EXPR:
2073 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2074 break;
2075
2076 case SWITCH_EXPR:
2077 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2078 break;
2079
2080 case ASM_EXPR:
2081 get_asm_expr_operands (stmt);
2082 break;
2083
2084 case RETURN_EXPR:
2085 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2086 break;
2087
2088 case GOTO_EXPR:
2089 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2090 break;
2091
2092 case LABEL_EXPR:
2093 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2094 break;
2095
2096 case BIND_EXPR:
2097 case CASE_LABEL_EXPR:
2098 case TRY_CATCH_EXPR:
2099 case TRY_FINALLY_EXPR:
2100 case EH_FILTER_EXPR:
2101 case CATCH_EXPR:
2102 case RESX_EXPR:
2103 /* These nodes contain no variable references. */
2104 break;
2105
2106 default:
2107 /* Notice that if get_expr_operands tries to use &STMT as the
2108 operand pointer (which may only happen for USE operands), we
2109 will fail in add_stmt_operand. This default will handle
2110 statements like empty statements, or CALL_EXPRs that may
2111 appear on the RHS of a statement or as statements themselves. */
2112 get_expr_operands (stmt, &stmt, opf_none);
2113 break;
2114 }
2115 }
2116
2117
2118 /* Create an operands cache for STMT. */
2119
2120 static void
2121 build_ssa_operands (tree stmt)
2122 {
2123 stmt_ann_t ann = get_stmt_ann (stmt);
2124
2125 /* Initially assume that the statement has no volatile operands. */
2126 if (ann)
2127 ann->has_volatile_ops = false;
2128
2129 start_ssa_stmt_operands ();
2130
2131 parse_ssa_operands (stmt);
2132 operand_build_sort_virtual (build_vuses);
2133 operand_build_sort_virtual (build_v_may_defs);
2134 operand_build_sort_virtual (build_v_must_defs);
2135
2136 finalize_ssa_stmt_operands (stmt);
2137 }
2138
2139
2140 /* Free any operands vectors in OPS. */
2141
2142 void
2143 free_ssa_operands (stmt_operands_p ops)
2144 {
2145 ops->def_ops = NULL;
2146 ops->use_ops = NULL;
2147 ops->maydef_ops = NULL;
2148 ops->mustdef_ops = NULL;
2149 ops->vuse_ops = NULL;
2150 }
2151
2152
2153 /* Get the operands of statement STMT. */
2154
2155 void
2156 update_stmt_operands (tree stmt)
2157 {
2158 stmt_ann_t ann = get_stmt_ann (stmt);
2159
2160 /* If update_stmt_operands is called before SSA is initialized, do
2161 nothing. */
2162 if (!ssa_operands_active ())
2163 return;
2164
2165 /* The optimizers cannot handle statements that are nothing but a
2166 _DECL. This indicates a bug in the gimplifier. */
2167 gcc_assert (!SSA_VAR_P (stmt));
2168
2169 gcc_assert (ann->modified);
2170
2171 timevar_push (TV_TREE_OPS);
2172
2173 build_ssa_operands (stmt);
2174
2175 /* Clear the modified bit for STMT. */
2176 ann->modified = 0;
2177
2178 timevar_pop (TV_TREE_OPS);
2179 }
2180
2181
2182 /* Copies virtual operands from SRC to DST. */
2183
2184 void
2185 copy_virtual_operands (tree dest, tree src)
2186 {
2187 tree t;
2188 ssa_op_iter iter, old_iter;
2189 use_operand_p use_p, u2;
2190 def_operand_p def_p, d2;
2191
2192 build_ssa_operands (dest);
2193
2194 /* Copy all the virtual fields. */
2195 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2196 append_vuse (t);
2197 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2198 append_v_may_def (t);
2199 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2200 append_v_must_def (t);
2201
2202 if (VEC_length (tree, build_vuses) == 0
2203 && VEC_length (tree, build_v_may_defs) == 0
2204 && VEC_length (tree, build_v_must_defs) == 0)
2205 return;
2206
2207 /* Now commit the virtual operands to this stmt. */
2208 finalize_ssa_v_must_defs (dest);
2209 finalize_ssa_v_may_defs (dest);
2210 finalize_ssa_vuses (dest);
2211
2212 /* Finally, set the field to the same values as then originals. */
2213 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2214 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2215 {
2216 gcc_assert (!op_iter_done (&old_iter));
2217 SET_USE (use_p, t);
2218 t = op_iter_next_tree (&old_iter);
2219 }
2220 gcc_assert (op_iter_done (&old_iter));
2221
2222 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2223 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2224 {
2225 gcc_assert (!op_iter_done (&old_iter));
2226 SET_USE (use_p, USE_FROM_PTR (u2));
2227 SET_DEF (def_p, DEF_FROM_PTR (d2));
2228 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2229 }
2230 gcc_assert (op_iter_done (&old_iter));
2231
2232 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2233 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2234 {
2235 gcc_assert (!op_iter_done (&old_iter));
2236 SET_USE (use_p, USE_FROM_PTR (u2));
2237 SET_DEF (def_p, DEF_FROM_PTR (d2));
2238 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2239 }
2240 gcc_assert (op_iter_done (&old_iter));
2241
2242 }
2243
2244
2245 /* Specifically for use in DOM's expression analysis. Given a store, we
2246 create an artificial stmt which looks like a load from the store, this can
2247 be used to eliminate redundant loads. OLD_OPS are the operands from the
2248 store stmt, and NEW_STMT is the new load which represents a load of the
2249 values stored. */
2250
2251 void
2252 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2253 {
2254 stmt_ann_t ann;
2255 tree op;
2256 ssa_op_iter iter;
2257 use_operand_p use_p;
2258 unsigned x;
2259
2260 ann = get_stmt_ann (new_stmt);
2261
2262 /* Process the stmt looking for operands. */
2263 start_ssa_stmt_operands ();
2264 parse_ssa_operands (new_stmt);
2265
2266 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2267 {
2268 tree t = VEC_index (tree, build_vuses, x);
2269 if (TREE_CODE (t) != SSA_NAME)
2270 {
2271 var_ann_t ann = var_ann (t);
2272 ann->in_vuse_list = 0;
2273 }
2274 }
2275
2276 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2277 {
2278 tree t = VEC_index (tree, build_v_may_defs, x);
2279 if (TREE_CODE (t) != SSA_NAME)
2280 {
2281 var_ann_t ann = var_ann (t);
2282 ann->in_v_may_def_list = 0;
2283 }
2284 }
2285
2286 /* Remove any virtual operands that were found. */
2287 VEC_truncate (tree, build_v_may_defs, 0);
2288 VEC_truncate (tree, build_v_must_defs, 0);
2289 VEC_truncate (tree, build_vuses, 0);
2290
2291 /* For each VDEF on the original statement, we want to create a
2292 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2293 statement. */
2294 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2295 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2296 append_vuse (op);
2297
2298 /* Now build the operands for this new stmt. */
2299 finalize_ssa_stmt_operands (new_stmt);
2300
2301 /* All uses in this fake stmt must not be in the immediate use lists. */
2302 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2303 delink_imm_use (use_p);
2304 }
2305
2306
2307 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2308 to test the validity of the swap operation. */
2309
2310 void
2311 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2312 {
2313 tree op0, op1;
2314 op0 = *exp0;
2315 op1 = *exp1;
2316
2317 /* If the operand cache is active, attempt to preserve the relative
2318 positions of these two operands in their respective immediate use
2319 lists. */
2320 if (ssa_operands_active () && op0 != op1)
2321 {
2322 use_optype_p use0, use1, ptr;
2323 use0 = use1 = NULL;
2324
2325 /* Find the 2 operands in the cache, if they are there. */
2326 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2327 if (USE_OP_PTR (ptr)->use == exp0)
2328 {
2329 use0 = ptr;
2330 break;
2331 }
2332
2333 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2334 if (USE_OP_PTR (ptr)->use == exp1)
2335 {
2336 use1 = ptr;
2337 break;
2338 }
2339
2340 /* If both uses don't have operand entries, there isn't much we can do
2341 at this point. Presumably we don't need to worry about it. */
2342 if (use0 && use1)
2343 {
2344 tree *tmp = USE_OP_PTR (use1)->use;
2345 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2346 USE_OP_PTR (use0)->use = tmp;
2347 }
2348 }
2349
2350 /* Now swap the data. */
2351 *exp0 = op1;
2352 *exp1 = op0;
2353 }
2354
2355
2356 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2357 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2358 a single variable whose address has been taken or any other valid
2359 GIMPLE memory reference (structure reference, array, etc). If the
2360 base address of REF is a decl that has sub-variables, also add all
2361 of its sub-variables. */
2362
2363 void
2364 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2365 {
2366 tree var;
2367 subvar_t svars;
2368
2369 gcc_assert (addresses_taken);
2370
2371 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2372 as the only thing we take the address of. If VAR is a structure,
2373 taking the address of a field means that the whole structure may
2374 be referenced using pointer arithmetic. See PR 21407 and the
2375 ensuing mailing list discussion. */
2376 var = get_base_address (ref);
2377 if (var && SSA_VAR_P (var))
2378 {
2379 if (*addresses_taken == NULL)
2380 *addresses_taken = BITMAP_GGC_ALLOC ();
2381
2382 if (var_can_have_subvars (var)
2383 && (svars = get_subvars_for_var (var)))
2384 {
2385 subvar_t sv;
2386 for (sv = svars; sv; sv = sv->next)
2387 {
2388 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2389 TREE_ADDRESSABLE (sv->var) = 1;
2390 }
2391 }
2392 else
2393 {
2394 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2395 TREE_ADDRESSABLE (var) = 1;
2396 }
2397 }
2398 }
2399
2400
2401 /* Scan the immediate_use list for VAR making sure its linked properly.
2402 Return TRUE if there is a problem and emit an error message to F. */
2403
2404 bool
2405 verify_imm_links (FILE *f, tree var)
2406 {
2407 use_operand_p ptr, prev, list;
2408 int count;
2409
2410 gcc_assert (TREE_CODE (var) == SSA_NAME);
2411
2412 list = &(SSA_NAME_IMM_USE_NODE (var));
2413 gcc_assert (list->use == NULL);
2414
2415 if (list->prev == NULL)
2416 {
2417 gcc_assert (list->next == NULL);
2418 return false;
2419 }
2420
2421 prev = list;
2422 count = 0;
2423 for (ptr = list->next; ptr != list; )
2424 {
2425 if (prev != ptr->prev)
2426 goto error;
2427
2428 if (ptr->use == NULL)
2429 goto error; /* 2 roots, or SAFE guard node. */
2430 else if (*(ptr->use) != var)
2431 goto error;
2432
2433 prev = ptr;
2434 ptr = ptr->next;
2435
2436 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2437 problem. */
2438 if (count++ > 50000000)
2439 goto error;
2440 }
2441
2442 /* Verify list in the other direction. */
2443 prev = list;
2444 for (ptr = list->prev; ptr != list; )
2445 {
2446 if (prev != ptr->next)
2447 goto error;
2448 prev = ptr;
2449 ptr = ptr->prev;
2450 if (count-- < 0)
2451 goto error;
2452 }
2453
2454 if (count != 0)
2455 goto error;
2456
2457 return false;
2458
2459 error:
2460 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2461 {
2462 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2463 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2464 }
2465 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2466 (void *)ptr->use);
2467 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2468 fprintf(f, "\n");
2469 return true;
2470 }
2471
2472
2473 /* Dump all the immediate uses to FILE. */
2474
2475 void
2476 dump_immediate_uses_for (FILE *file, tree var)
2477 {
2478 imm_use_iterator iter;
2479 use_operand_p use_p;
2480
2481 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2482
2483 print_generic_expr (file, var, TDF_SLIM);
2484 fprintf (file, " : -->");
2485 if (has_zero_uses (var))
2486 fprintf (file, " no uses.\n");
2487 else
2488 if (has_single_use (var))
2489 fprintf (file, " single use.\n");
2490 else
2491 fprintf (file, "%d uses.\n", num_imm_uses (var));
2492
2493 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2494 {
2495 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2496 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2497 else
2498 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2499 }
2500 fprintf(file, "\n");
2501 }
2502
2503
2504 /* Dump all the immediate uses to FILE. */
2505
2506 void
2507 dump_immediate_uses (FILE *file)
2508 {
2509 tree var;
2510 unsigned int x;
2511
2512 fprintf (file, "Immediate_uses: \n\n");
2513 for (x = 1; x < num_ssa_names; x++)
2514 {
2515 var = ssa_name(x);
2516 if (!var)
2517 continue;
2518 dump_immediate_uses_for (file, var);
2519 }
2520 }
2521
2522
2523 /* Dump def-use edges on stderr. */
2524
2525 void
2526 debug_immediate_uses (void)
2527 {
2528 dump_immediate_uses (stderr);
2529 }
2530
2531
2532 /* Dump def-use edges on stderr. */
2533
2534 void
2535 debug_immediate_uses_for (tree var)
2536 {
2537 dump_immediate_uses_for (stderr, var);
2538 }
2539
2540 #include "gt-tree-ssa-operands.h"