tree-ssa-opfinalize.h: Removed.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
103
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
106
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
109
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
112
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
115
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
118
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active = false;
121
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
124
125 static void get_expr_operands (tree, tree *, int);
126
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
132
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
135
136 #define ALLOC_OPTYPE(OP, TYPE) \
137 do \
138 { \
139 TYPE##_optype_p ret = free_##TYPE##s; \
140 if (ret) \
141 free_##TYPE##s = ret->next; \
142 else \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
144 (OP) = ret; \
145 } while (0)
146
147 /* Return the DECL_UID of the base variable of T. */
148
149 static inline unsigned
150 get_name_decl (tree t)
151 {
152 if (TREE_CODE (t) != SSA_NAME)
153 return DECL_UID (t);
154 else
155 return DECL_UID (SSA_NAME_VAR (t));
156 }
157
158
159 /* Comparison function for qsort used in operand_build_sort_virtual. */
160
161 static int
162 operand_build_cmp (const void *p, const void *q)
163 {
164 tree e1 = *((const tree *)p);
165 tree e2 = *((const tree *)q);
166 unsigned int u1,u2;
167
168 u1 = get_name_decl (e1);
169 u2 = get_name_decl (e2);
170
171 /* We want to sort in ascending order. They can never be equal. */
172 #ifdef ENABLE_CHECKING
173 gcc_assert (u1 != u2);
174 #endif
175 return (u1 > u2 ? 1 : -1);
176 }
177
178
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
180
181 static inline void
182 operand_build_sort_virtual (VEC(tree,heap) *list)
183 {
184 int num = VEC_length (tree, list);
185
186 if (num < 2)
187 return;
188
189 if (num == 2)
190 {
191 if (get_name_decl (VEC_index (tree, list, 0))
192 > get_name_decl (VEC_index (tree, list, 1)))
193 {
194 /* Swap elements if in the wrong order. */
195 tree tmp = VEC_index (tree, list, 0);
196 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 VEC_replace (tree, list, 1, tmp);
198 }
199 return;
200 }
201
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree, list),
204 VEC_length (tree, list),
205 sizeof (tree),
206 operand_build_cmp);
207 }
208
209
210 /* Return true if the SSA operands cache is active. */
211
212 bool
213 ssa_operands_active (void)
214 {
215 return ops_active;
216 }
217
218
219 /* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
221
222 static struct
223 {
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars;
227
228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229 not_written information. */
230 unsigned int static_write_clobbers_avoided;
231
232 /* Number of reads (VUSEs) avoided by using not_read information. */
233 unsigned int static_read_clobbers_avoided;
234
235 /* Number of write-clobbers avoided because the variable can't escape to
236 this call. */
237 unsigned int unescapable_clobbers_avoided;
238
239 /* Number of read-only uses we attempt to add to calls in
240 add_call_read_ops. */
241 unsigned int readonly_clobbers;
242
243 /* Number of read-only uses we avoid using not_read information. */
244 unsigned int static_readonly_clobbers_avoided;
245 } clobber_stats;
246
247
248 /* Initialize the operand cache routines. */
249
250 void
251 init_ssa_operands (void)
252 {
253 build_defs = VEC_alloc (tree, heap, 5);
254 build_uses = VEC_alloc (tree, heap, 10);
255 build_vuses = VEC_alloc (tree, heap, 25);
256 build_v_may_defs = VEC_alloc (tree, heap, 25);
257 build_v_must_defs = VEC_alloc (tree, heap, 25);
258
259 gcc_assert (operand_memory == NULL);
260 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261 ops_active = true;
262 memset (&clobber_stats, 0, sizeof (clobber_stats));
263 }
264
265
266 /* Dispose of anything required by the operand routines. */
267
268 void
269 fini_ssa_operands (void)
270 {
271 struct ssa_operand_memory_d *ptr;
272 VEC_free (tree, heap, build_defs);
273 VEC_free (tree, heap, build_uses);
274 VEC_free (tree, heap, build_v_must_defs);
275 VEC_free (tree, heap, build_v_may_defs);
276 VEC_free (tree, heap, build_vuses);
277 free_defs = NULL;
278 free_uses = NULL;
279 free_vuses = NULL;
280 free_maydefs = NULL;
281 free_mustdefs = NULL;
282 while ((ptr = operand_memory) != NULL)
283 {
284 operand_memory = operand_memory->next;
285 ggc_free (ptr);
286 }
287
288 ops_active = false;
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
304 }
305 }
306
307
308 /* Return memory for operands of SIZE chunks. */
309
310 static inline void *
311 ssa_operand_alloc (unsigned size)
312 {
313 char *ptr;
314 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315 {
316 struct ssa_operand_memory_d *ptr;
317 ptr = GGC_NEW (struct ssa_operand_memory_d);
318 ptr->next = operand_memory;
319 operand_memory = ptr;
320 operand_memory_index = 0;
321 }
322 ptr = &(operand_memory->mem[operand_memory_index]);
323 operand_memory_index += size;
324 return ptr;
325 }
326
327
328 /* Make sure PTR is in the correct immediate use list. Since uses are simply
329 pointers into the stmt TREE, there is no way of telling if anyone has
330 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
331 The contents are different, but the pointer is still the same. This
332 routine will check to make sure PTR is in the correct list, and if it isn't
333 put it in the correct list. We cannot simply check the previous node
334 because all nodes in the same stmt might have be changed. */
335
336 static inline void
337 correct_use_link (use_operand_p ptr, tree stmt)
338 {
339 use_operand_p prev;
340 tree root;
341
342 /* fold_stmt may have changed the stmt pointers. */
343 if (ptr->stmt != stmt)
344 ptr->stmt = stmt;
345
346 prev = ptr->prev;
347 if (prev)
348 {
349 /* Find the root element, making sure we skip any safe iterators. */
350 while (prev->use != NULL || prev->stmt == NULL)
351 prev = prev->prev;
352
353 /* Get the SSA_NAME of the list the node is in. */
354 root = prev->stmt;
355
356 /* If it's the right list, simply return. */
357 if (root == *(ptr->use))
358 return;
359 }
360
361 /* It is in the wrong list if we reach here. */
362 delink_imm_use (ptr);
363 link_imm_use (ptr, *(ptr->use));
364 }
365
366
367 /* This routine makes sure that PTR is in an immediate use list, and makes
368 sure the stmt pointer is set to the current stmt. Virtual uses do not need
369 the overhead of correct_use_link since they cannot be directly manipulated
370 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
371
372 static inline void
373 set_virtual_use_link (use_operand_p ptr, tree stmt)
374 {
375 /* fold_stmt may have changed the stmt pointers. */
376 if (ptr->stmt != stmt)
377 ptr->stmt = stmt;
378
379 /* If this use isn't in a list, add it to the correct list. */
380 if (!ptr->prev)
381 link_imm_use (ptr, *(ptr->use));
382 }
383
384 /* Appends ELT after TO, and moves the TO pointer to ELT. */
385
386 #define APPEND_OP_AFTER(ELT, TO) \
387 do \
388 { \
389 (TO)->next = (ELT); \
390 (TO) = (ELT); \
391 } while (0)
392
393 /* Appends head of list FROM after TO, and move both pointers
394 to their successors. */
395
396 #define MOVE_HEAD_AFTER(FROM, TO) \
397 do \
398 { \
399 APPEND_OP_AFTER (FROM, TO); \
400 (FROM) = (FROM)->next; \
401 } while (0)
402
403 /* Moves OP to appropriate freelist. OP is set to its successor. */
404
405 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
406 do \
407 { \
408 TYPE##_optype_p next = (OP)->next; \
409 (OP)->next = free_##TYPE##s; \
410 free_##TYPE##s = (OP); \
411 (OP) = next; \
412 } while (0)
413
414 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
415 of immeditate uses. STMT is the current statement. */
416
417 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
418 do \
419 { \
420 (USE_PTR)->use = (VAL); \
421 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
422 } while (0)
423
424 /* Adds OP to the list of defs after LAST, and moves
425 LAST to the new element. */
426
427 static inline void
428 add_def_op (tree *op, def_optype_p *last)
429 {
430 def_optype_p new;
431
432 ALLOC_OPTYPE (new, def);
433 DEF_OP_PTR (new) = op;
434 APPEND_OP_AFTER (new, *last);
435 }
436
437 /* Adds OP to the list of uses of statement STMT after LAST, and moves
438 LAST to the new element. */
439
440 static inline void
441 add_use_op (tree stmt, tree *op, use_optype_p *last)
442 {
443 use_optype_p new;
444
445 ALLOC_OPTYPE (new, use);
446 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
447 APPEND_OP_AFTER (new, *last);
448 }
449
450 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
451 LAST to the new element. */
452
453 static inline void
454 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
455 {
456 vuse_optype_p new;
457
458 ALLOC_OPTYPE (new, vuse);
459 VUSE_OP (new) = op;
460 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
461 APPEND_OP_AFTER (new, *last);
462 }
463
464 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
465 LAST to the new element. */
466
467 static inline void
468 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
469 {
470 maydef_optype_p new;
471
472 ALLOC_OPTYPE (new, maydef);
473 MAYDEF_RESULT (new) = op;
474 MAYDEF_OP (new) = op;
475 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
476 APPEND_OP_AFTER (new, *last);
477 }
478
479 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
480 LAST to the new element. */
481
482 static inline void
483 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
484 {
485 mustdef_optype_p new;
486
487 ALLOC_OPTYPE (new, mustdef);
488 MUSTDEF_RESULT (new) = op;
489 MUSTDEF_KILL (new) = op;
490 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
491 APPEND_OP_AFTER (new, *last);
492 }
493
494 /* Takes elements from build_defs and turns them into def operands of STMT.
495 TODO -- Given that def operands list is not neccessarily sorted, merging
496 the operands this way does not make much sense.
497 -- Make build_defs VEC of tree *. */
498
499 static inline void
500 finalize_ssa_def_ops (tree stmt)
501 {
502 unsigned new_i;
503 struct def_optype_d new_list;
504 def_optype_p old_ops, ptr, last;
505 tree *old_base;
506
507 new_list.next = NULL;
508 last = &new_list;
509
510 old_ops = DEF_OPS (stmt);
511
512 new_i = 0;
513 while (old_ops && new_i < VEC_length (tree, build_defs))
514 {
515 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
516 old_base = DEF_OP_PTR (old_ops);
517
518 if (old_base == new_base)
519 {
520 /* if variables are the same, reuse this node. */
521 MOVE_HEAD_AFTER (old_ops, last);
522 new_i++;
523 }
524 else if (old_base < new_base)
525 {
526 /* if old is less than new, old goes to the free list. */
527 MOVE_HEAD_TO_FREELIST (old_ops, def);
528 }
529 else
530 {
531 /* This is a new operand. */
532 add_def_op (new_base, &last);
533 new_i++;
534 }
535 }
536
537 /* If there is anything remaining in the build_defs list, simply emit it. */
538 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
539 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
540
541 last->next = NULL;
542
543 /* If there is anything in the old list, free it. */
544 if (old_ops)
545 {
546 old_ops->next = free_defs;
547 free_defs = old_ops;
548 }
549
550 /* Now set the stmt's operands. */
551 DEF_OPS (stmt) = new_list.next;
552
553 #ifdef ENABLE_CHECKING
554 {
555 unsigned x = 0;
556 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
557 x++;
558
559 gcc_assert (x == VEC_length (tree, build_defs));
560 }
561 #endif
562 }
563
564 /* This routine will create stmt operands for STMT from the def build list. */
565
566 static void
567 finalize_ssa_defs (tree stmt)
568 {
569 unsigned int num = VEC_length (tree, build_defs);
570
571 /* There should only be a single real definition per assignment. */
572 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
573
574 /* If there is an old list, often the new list is identical, or close, so
575 find the elements at the beginning that are the same as the vector. */
576 finalize_ssa_def_ops (stmt);
577 VEC_truncate (tree, build_defs, 0);
578 }
579
580 /* Takes elements from build_uses and turns them into use operands of STMT.
581 TODO -- Given that use operands list is not neccessarily sorted, merging
582 the operands this way does not make much sense.
583 -- Make build_uses VEC of tree *. */
584
585 static inline void
586 finalize_ssa_use_ops (tree stmt)
587 {
588 unsigned new_i;
589 struct use_optype_d new_list;
590 use_optype_p old_ops, ptr, last;
591 tree *old_base, *new_base;
592
593 new_list.next = NULL;
594 last = &new_list;
595
596 old_ops = USE_OPS (stmt);
597
598 new_i = 0;
599 while (old_ops && new_i < VEC_length (tree, build_uses))
600 {
601 new_base = (tree *) VEC_index (tree, build_uses, new_i);
602 old_base = USE_OP_PTR (old_ops)->use;
603
604 if (old_base == new_base)
605 {
606 /* if variables are the same, reuse this node. */
607 MOVE_HEAD_AFTER (old_ops, last);
608 correct_use_link (USE_OP_PTR (last), stmt);
609 new_i++;
610 }
611 else if (old_base < new_base)
612 {
613 /* if old is less than new, old goes to the free list. */
614 delink_imm_use (USE_OP_PTR (old_ops));
615 MOVE_HEAD_TO_FREELIST (old_ops, use);
616 }
617 else
618 {
619 /* This is a new operand. */
620 add_use_op (stmt, new_base, &last);
621 new_i++;
622 }
623 }
624
625 /* If there is anything remaining in the build_uses list, simply emit it. */
626 for ( ; new_i < VEC_length (tree, build_uses); new_i++)
627 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
628
629 last->next = NULL;
630
631 /* If there is anything in the old list, free it. */
632 if (old_ops)
633 {
634 for (ptr = old_ops; ptr; ptr = ptr->next)
635 delink_imm_use (USE_OP_PTR (ptr));
636 old_ops->next = free_uses;
637 free_uses = old_ops;
638 }
639
640 /* Now set the stmt's operands. */
641 USE_OPS (stmt) = new_list.next;
642
643 #ifdef ENABLE_CHECKING
644 {
645 unsigned x = 0;
646 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
647 x++;
648
649 gcc_assert (x == VEC_length (tree, build_uses));
650 }
651 #endif
652 }
653
654 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
655
656 static void
657 finalize_ssa_uses (tree stmt)
658 {
659 #ifdef ENABLE_CHECKING
660 {
661 unsigned x;
662 unsigned num = VEC_length (tree, build_uses);
663
664 /* If the pointer to the operand is the statement itself, something is
665 wrong. It means that we are pointing to a local variable (the
666 initial call to update_stmt_operands does not pass a pointer to a
667 statement). */
668 for (x = 0; x < num; x++)
669 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
670 }
671 #endif
672 finalize_ssa_use_ops (stmt);
673 VEC_truncate (tree, build_uses, 0);
674 }
675
676
677 /* Takes elements from build_v_may_defs and turns them into maydef operands of
678 STMT. */
679
680 static inline void
681 finalize_ssa_v_may_def_ops (tree stmt)
682 {
683 unsigned new_i;
684 struct maydef_optype_d new_list;
685 maydef_optype_p old_ops, ptr, last;
686 tree act;
687 unsigned old_base, new_base;
688
689 new_list.next = NULL;
690 last = &new_list;
691
692 old_ops = MAYDEF_OPS (stmt);
693
694 new_i = 0;
695 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
696 {
697 act = VEC_index (tree, build_v_may_defs, new_i);
698 new_base = get_name_decl (act);
699 old_base = get_name_decl (MAYDEF_OP (old_ops));
700
701 if (old_base == new_base)
702 {
703 /* if variables are the same, reuse this node. */
704 MOVE_HEAD_AFTER (old_ops, last);
705 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
706 new_i++;
707 }
708 else if (old_base < new_base)
709 {
710 /* if old is less than new, old goes to the free list. */
711 delink_imm_use (MAYDEF_OP_PTR (old_ops));
712 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
713 }
714 else
715 {
716 /* This is a new operand. */
717 add_maydef_op (stmt, act, &last);
718 new_i++;
719 }
720 }
721
722 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
723 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
724 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
725
726 last->next = NULL;
727
728 /* If there is anything in the old list, free it. */
729 if (old_ops)
730 {
731 for (ptr = old_ops; ptr; ptr = ptr->next)
732 delink_imm_use (MAYDEF_OP_PTR (ptr));
733 old_ops->next = free_maydefs;
734 free_maydefs = old_ops;
735 }
736
737 /* Now set the stmt's operands. */
738 MAYDEF_OPS (stmt) = new_list.next;
739
740 #ifdef ENABLE_CHECKING
741 {
742 unsigned x = 0;
743 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
744 x++;
745
746 gcc_assert (x == VEC_length (tree, build_v_may_defs));
747 }
748 #endif
749 }
750
751 static void
752 finalize_ssa_v_may_defs (tree stmt)
753 {
754 finalize_ssa_v_may_def_ops (stmt);
755 }
756
757
758 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
759
760 static inline void
761 cleanup_v_may_defs (void)
762 {
763 unsigned x, num;
764 num = VEC_length (tree, build_v_may_defs);
765
766 for (x = 0; x < num; x++)
767 {
768 tree t = VEC_index (tree, build_v_may_defs, x);
769 if (TREE_CODE (t) != SSA_NAME)
770 {
771 var_ann_t ann = var_ann (t);
772 ann->in_v_may_def_list = 0;
773 }
774 }
775 VEC_truncate (tree, build_v_may_defs, 0);
776 }
777
778
779 /* Takes elements from build_vuses and turns them into vuse operands of
780 STMT. */
781
782 static inline void
783 finalize_ssa_vuse_ops (tree stmt)
784 {
785 unsigned new_i;
786 struct vuse_optype_d new_list;
787 vuse_optype_p old_ops, ptr, last;
788 tree act;
789 unsigned old_base, new_base;
790
791 new_list.next = NULL;
792 last = &new_list;
793
794 old_ops = VUSE_OPS (stmt);
795
796 new_i = 0;
797 while (old_ops && new_i < VEC_length (tree, build_vuses))
798 {
799 act = VEC_index (tree, build_vuses, new_i);
800 new_base = get_name_decl (act);
801 old_base = get_name_decl (VUSE_OP (old_ops));
802
803 if (old_base == new_base)
804 {
805 /* if variables are the same, reuse this node. */
806 MOVE_HEAD_AFTER (old_ops, last);
807 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
808 new_i++;
809 }
810 else if (old_base < new_base)
811 {
812 /* if old is less than new, old goes to the free list. */
813 delink_imm_use (USE_OP_PTR (old_ops));
814 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
815 }
816 else
817 {
818 /* This is a new operand. */
819 add_vuse_op (stmt, act, &last);
820 new_i++;
821 }
822 }
823
824 /* If there is anything remaining in the build_vuses list, simply emit it. */
825 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
826 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
827
828 last->next = NULL;
829
830 /* If there is anything in the old list, free it. */
831 if (old_ops)
832 {
833 for (ptr = old_ops; ptr; ptr = ptr->next)
834 delink_imm_use (VUSE_OP_PTR (ptr));
835 old_ops->next = free_vuses;
836 free_vuses = old_ops;
837 }
838
839 /* Now set the stmt's operands. */
840 VUSE_OPS (stmt) = new_list.next;
841
842 #ifdef ENABLE_CHECKING
843 {
844 unsigned x = 0;
845 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
846 x++;
847
848 gcc_assert (x == VEC_length (tree, build_vuses));
849 }
850 #endif
851 }
852
853 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
854
855 static void
856 finalize_ssa_vuses (tree stmt)
857 {
858 unsigned num, num_v_may_defs;
859 unsigned vuse_index;
860
861 /* Remove superfluous VUSE operands. If the statement already has a
862 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
863 not needed because V_MAY_DEFs imply a VUSE of the variable. For
864 instance, suppose that variable 'a' is aliased:
865
866 # VUSE <a_2>
867 # a_3 = V_MAY_DEF <a_2>
868 a = a + 1;
869
870 The VUSE <a_2> is superfluous because it is implied by the
871 V_MAY_DEF operation. */
872 num = VEC_length (tree, build_vuses);
873 num_v_may_defs = VEC_length (tree, build_v_may_defs);
874
875 if (num > 0 && num_v_may_defs > 0)
876 {
877 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
878 {
879 tree vuse;
880 vuse = VEC_index (tree, build_vuses, vuse_index);
881 if (TREE_CODE (vuse) != SSA_NAME)
882 {
883 var_ann_t ann = var_ann (vuse);
884 ann->in_vuse_list = 0;
885 if (ann->in_v_may_def_list)
886 {
887 VEC_ordered_remove (tree, build_vuses, vuse_index);
888 continue;
889 }
890 }
891 vuse_index++;
892 }
893 }
894 else
895 {
896 /* Clear out the in_list bits. */
897 for (vuse_index = 0;
898 vuse_index < VEC_length (tree, build_vuses);
899 vuse_index++)
900 {
901 tree t = VEC_index (tree, build_vuses, vuse_index);
902 if (TREE_CODE (t) != SSA_NAME)
903 {
904 var_ann_t ann = var_ann (t);
905 ann->in_vuse_list = 0;
906 }
907 }
908 }
909
910 finalize_ssa_vuse_ops (stmt);
911
912 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
913 cleanup_v_may_defs ();
914
915 /* Free the VUSEs build vector. */
916 VEC_truncate (tree, build_vuses, 0);
917
918 }
919
920 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
921 STMT. */
922
923 static inline void
924 finalize_ssa_v_must_def_ops (tree stmt)
925 {
926 unsigned new_i;
927 struct mustdef_optype_d new_list;
928 mustdef_optype_p old_ops, ptr, last;
929 tree act;
930 unsigned old_base, new_base;
931
932 new_list.next = NULL;
933 last = &new_list;
934
935 old_ops = MUSTDEF_OPS (stmt);
936
937 new_i = 0;
938 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
939 {
940 act = VEC_index (tree, build_v_must_defs, new_i);
941 new_base = get_name_decl (act);
942 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
943
944 if (old_base == new_base)
945 {
946 /* If variables are the same, reuse this node. */
947 MOVE_HEAD_AFTER (old_ops, last);
948 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
949 new_i++;
950 }
951 else if (old_base < new_base)
952 {
953 /* If old is less than new, old goes to the free list. */
954 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
955 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
956 }
957 else
958 {
959 /* This is a new operand. */
960 add_mustdef_op (stmt, act, &last);
961 new_i++;
962 }
963 }
964
965 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
966 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
967 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
968
969 last->next = NULL;
970
971 /* If there is anything in the old list, free it. */
972 if (old_ops)
973 {
974 for (ptr = old_ops; ptr; ptr = ptr->next)
975 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
976 old_ops->next = free_mustdefs;
977 free_mustdefs = old_ops;
978 }
979
980 /* Now set the stmt's operands. */
981 MUSTDEF_OPS (stmt) = new_list.next;
982
983 #ifdef ENABLE_CHECKING
984 {
985 unsigned x = 0;
986 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
987 x++;
988
989 gcc_assert (x == VEC_length (tree, build_v_must_defs));
990 }
991 #endif
992 }
993
994 static void
995 finalize_ssa_v_must_defs (tree stmt)
996 {
997 /* In the presence of subvars, there may be more than one V_MUST_DEF
998 per statement (one for each subvar). It is a bit expensive to
999 verify that all must-defs in a statement belong to subvars if
1000 there is more than one must-def, so we don't do it. Suffice to
1001 say, if you reach here without having subvars, and have num >1,
1002 you have hit a bug. */
1003 finalize_ssa_v_must_def_ops (stmt);
1004 VEC_truncate (tree, build_v_must_defs, 0);
1005 }
1006
1007
1008 /* Finalize all the build vectors, fill the new ones into INFO. */
1009
1010 static inline void
1011 finalize_ssa_stmt_operands (tree stmt)
1012 {
1013 finalize_ssa_defs (stmt);
1014 finalize_ssa_uses (stmt);
1015 finalize_ssa_v_must_defs (stmt);
1016 finalize_ssa_v_may_defs (stmt);
1017 finalize_ssa_vuses (stmt);
1018 }
1019
1020
1021 /* Start the process of building up operands vectors in INFO. */
1022
1023 static inline void
1024 start_ssa_stmt_operands (void)
1025 {
1026 gcc_assert (VEC_length (tree, build_defs) == 0);
1027 gcc_assert (VEC_length (tree, build_uses) == 0);
1028 gcc_assert (VEC_length (tree, build_vuses) == 0);
1029 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
1030 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
1031 }
1032
1033
1034 /* Add DEF_P to the list of pointers to operands. */
1035
1036 static inline void
1037 append_def (tree *def_p)
1038 {
1039 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
1040 }
1041
1042
1043 /* Add USE_P to the list of pointers to operands. */
1044
1045 static inline void
1046 append_use (tree *use_p)
1047 {
1048 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
1049 }
1050
1051
1052 /* Add a new virtual may def for variable VAR to the build array. */
1053
1054 static inline void
1055 append_v_may_def (tree var)
1056 {
1057 if (TREE_CODE (var) != SSA_NAME)
1058 {
1059 var_ann_t ann = get_var_ann (var);
1060
1061 /* Don't allow duplicate entries. */
1062 if (ann->in_v_may_def_list)
1063 return;
1064 ann->in_v_may_def_list = 1;
1065 }
1066
1067 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1068 }
1069
1070
1071 /* Add VAR to the list of virtual uses. */
1072
1073 static inline void
1074 append_vuse (tree var)
1075 {
1076 /* Don't allow duplicate entries. */
1077 if (TREE_CODE (var) != SSA_NAME)
1078 {
1079 var_ann_t ann = get_var_ann (var);
1080
1081 if (ann->in_vuse_list || ann->in_v_may_def_list)
1082 return;
1083 ann->in_vuse_list = 1;
1084 }
1085
1086 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1087 }
1088
1089
1090 /* Add VAR to the list of virtual must definitions for INFO. */
1091
1092 static inline void
1093 append_v_must_def (tree var)
1094 {
1095 unsigned i;
1096
1097 /* Don't allow duplicate entries. */
1098 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1099 if (var == VEC_index (tree, build_v_must_defs, i))
1100 return;
1101
1102 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1103 }
1104
1105
1106 /* REF is a tree that contains the entire pointer dereference
1107 expression, if available, or NULL otherwise. ALIAS is the variable
1108 we are asking if REF can access. OFFSET and SIZE come from the
1109 memory access expression that generated this virtual operand.
1110 FOR_CLOBBER is true is this is adding a virtual operand for a call
1111 clobber. */
1112
1113 static bool
1114 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1115 HOST_WIDE_INT size)
1116 {
1117 bool offsetgtz = offset > 0;
1118 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1119 tree base = ref ? get_base_address (ref) : NULL;
1120
1121 /* If ALIAS is an SFT, it can't be touched if the offset
1122 and size of the access is not overlapping with the SFT offset and
1123 size. This is only true if we are accessing through a pointer
1124 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1125 be accessing through a pointer to some substruct of the
1126 structure, and if we try to prune there, we will have the wrong
1127 offset, and get the wrong answer.
1128 i.e., we can't prune without more work if we have something like
1129
1130 struct gcc_target
1131 {
1132 struct asm_out
1133 {
1134 const char *byte_op;
1135 struct asm_int_op
1136 {
1137 const char *hi;
1138 } aligned_op;
1139 } asm_out;
1140 } targetm;
1141
1142 foo = &targetm.asm_out.aligned_op;
1143 return foo->hi;
1144
1145 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1146 terms of SFT_PARENT_VAR, that is where it is.
1147 However, the access through the foo pointer will be at offset 0. */
1148 if (size != -1
1149 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1150 && base
1151 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1152 && !overlap_subvar (offset, size, alias, NULL))
1153 {
1154 #ifdef ACCESS_DEBUGGING
1155 fprintf (stderr, "Access to ");
1156 print_generic_expr (stderr, ref, 0);
1157 fprintf (stderr, " may not touch ");
1158 print_generic_expr (stderr, alias, 0);
1159 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1160 #endif
1161 return false;
1162 }
1163
1164 /* Without strict aliasing, it is impossible for a component access
1165 through a pointer to touch a random variable, unless that
1166 variable *is* a structure or a pointer.
1167
1168 That is, given p->c, and some random global variable b,
1169 there is no legal way that p->c could be an access to b.
1170
1171 Without strict aliasing on, we consider it legal to do something
1172 like:
1173
1174 struct foos { int l; };
1175 int foo;
1176 static struct foos *getfoo(void);
1177 int main (void)
1178 {
1179 struct foos *f = getfoo();
1180 f->l = 1;
1181 foo = 2;
1182 if (f->l == 1)
1183 abort();
1184 exit(0);
1185 }
1186 static struct foos *getfoo(void)
1187 { return (struct foos *)&foo; }
1188
1189 (taken from 20000623-1.c)
1190 */
1191 else if (ref
1192 && flag_strict_aliasing
1193 && TREE_CODE (ref) != INDIRECT_REF
1194 && !MTAG_P (alias)
1195 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1196 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1197 && !POINTER_TYPE_P (TREE_TYPE (alias)))
1198 {
1199 #ifdef ACCESS_DEBUGGING
1200 fprintf (stderr, "Access to ");
1201 print_generic_expr (stderr, ref, 0);
1202 fprintf (stderr, " may not touch ");
1203 print_generic_expr (stderr, alias, 0);
1204 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1205 #endif
1206 return false;
1207 }
1208
1209 /* If the offset of the access is greater than the size of one of
1210 the possible aliases, it can't be touching that alias, because it
1211 would be past the end of the structure. */
1212 else if (ref
1213 && flag_strict_aliasing
1214 && TREE_CODE (ref) != INDIRECT_REF
1215 && !MTAG_P (alias)
1216 && !POINTER_TYPE_P (TREE_TYPE (alias))
1217 && offsetgtz
1218 && DECL_SIZE (alias)
1219 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1220 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1221 {
1222 #ifdef ACCESS_DEBUGGING
1223 fprintf (stderr, "Access to ");
1224 print_generic_expr (stderr, ref, 0);
1225 fprintf (stderr, " may not touch ");
1226 print_generic_expr (stderr, alias, 0);
1227 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1228 #endif
1229 return false;
1230 }
1231
1232 return true;
1233 }
1234
1235
1236 /* Add VAR to the virtual operands array. FLAGS is as in
1237 get_expr_operands. FULL_REF is a tree that contains the entire
1238 pointer dereference expression, if available, or NULL otherwise.
1239 OFFSET and SIZE come from the memory access expression that
1240 generated this virtual operand. FOR_CLOBBER is true is this is
1241 adding a virtual operand for a call clobber. */
1242
1243 static void
1244 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1245 tree full_ref, HOST_WIDE_INT offset,
1246 HOST_WIDE_INT size, bool for_clobber)
1247 {
1248 VEC(tree,gc) *aliases;
1249 tree sym;
1250 var_ann_t v_ann;
1251
1252 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1253 v_ann = var_ann (sym);
1254
1255 /* Mark statements with volatile operands. Optimizers should back
1256 off from statements having volatile operands. */
1257 if (TREE_THIS_VOLATILE (sym) && s_ann)
1258 s_ann->has_volatile_ops = true;
1259
1260 /* If the variable cannot be modified and this is a V_MAY_DEF change
1261 it into a VUSE. This happens when read-only variables are marked
1262 call-clobbered and/or aliased to writable variables. So we only
1263 check that this only happens on non-specific stores.
1264
1265 Note that if this is a specific store, i.e. associated with a
1266 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1267 into validation problems.
1268
1269 This can happen when programs cast away const, leaving us with a
1270 store to read-only memory. If the statement is actually executed
1271 at runtime, then the program is ill formed. If the statement is
1272 not executed then all is well. At the very least, we cannot ICE. */
1273 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1274 flags &= ~(opf_is_def | opf_kill_def);
1275
1276 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1277 virtual operands, unless the caller has specifically requested
1278 not to add virtual operands (used when adding operands inside an
1279 ADDR_EXPR expression). */
1280 if (flags & opf_no_vops)
1281 return;
1282
1283 aliases = v_ann->may_aliases;
1284 if (aliases == NULL)
1285 {
1286 /* The variable is not aliased or it is an alias tag. */
1287 if (flags & opf_is_def)
1288 {
1289 if (flags & opf_kill_def)
1290 {
1291 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1292 variable definitions. */
1293 gcc_assert (!MTAG_P (var)
1294 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1295 append_v_must_def (var);
1296 }
1297 else
1298 {
1299 /* Add a V_MAY_DEF for call-clobbered variables and
1300 memory tags. */
1301 append_v_may_def (var);
1302 }
1303 }
1304 else
1305 append_vuse (var);
1306 }
1307 else
1308 {
1309 unsigned i;
1310 tree al;
1311
1312 /* The variable is aliased. Add its aliases to the virtual
1313 operands. */
1314 gcc_assert (VEC_length (tree, aliases) != 0);
1315
1316 if (flags & opf_is_def)
1317 {
1318
1319 bool none_added = true;
1320
1321 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1322 {
1323 if (!access_can_touch_variable (full_ref, al, offset, size))
1324 continue;
1325
1326 none_added = false;
1327 append_v_may_def (al);
1328 }
1329
1330 /* If the variable is also an alias tag, add a virtual
1331 operand for it, otherwise we will miss representing
1332 references to the members of the variable's alias set.
1333 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1334
1335 It is also necessary to add bare defs on clobbers for
1336 SMT's, so that bare SMT uses caused by pruning all the
1337 aliases will link up properly with calls. In order to
1338 keep the number of these bare defs we add down to the
1339 minimum necessary, we keep track of which SMT's were used
1340 alone in statement vdefs or VUSEs. */
1341 if (v_ann->is_aliased
1342 || none_added
1343 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1344 && for_clobber
1345 && SMT_USED_ALONE (var)))
1346 {
1347 /* Every bare SMT def we add should have SMT_USED_ALONE
1348 set on it, or else we will get the wrong answer on
1349 clobbers. */
1350 if (none_added
1351 && !updating_used_alone && aliases_computed_p
1352 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1353 gcc_assert (SMT_USED_ALONE (var));
1354
1355 append_v_may_def (var);
1356 }
1357 }
1358 else
1359 {
1360 bool none_added = true;
1361 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1362 {
1363 if (!access_can_touch_variable (full_ref, al, offset, size))
1364 continue;
1365 none_added = false;
1366 append_vuse (al);
1367 }
1368
1369 /* Similarly, append a virtual uses for VAR itself, when
1370 it is an alias tag. */
1371 if (v_ann->is_aliased || none_added)
1372 append_vuse (var);
1373 }
1374 }
1375 }
1376
1377
1378 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1379 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1380 the statement's real operands, otherwise it is added to virtual
1381 operands. */
1382
1383 static void
1384 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1385 {
1386 bool is_real_op;
1387 tree var, sym;
1388 var_ann_t v_ann;
1389
1390 var = *var_p;
1391 gcc_assert (SSA_VAR_P (var));
1392
1393 is_real_op = is_gimple_reg (var);
1394
1395 /* If this is a real operand, the operand is either an SSA name or a
1396 decl. Virtual operands may only be decls. */
1397 gcc_assert (is_real_op || DECL_P (var));
1398
1399 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1400 v_ann = var_ann (sym);
1401
1402 /* Mark statements with volatile operands. Optimizers should back
1403 off from statements having volatile operands. */
1404 if (TREE_THIS_VOLATILE (sym) && s_ann)
1405 s_ann->has_volatile_ops = true;
1406
1407 if (is_real_op)
1408 {
1409 /* The variable is a GIMPLE register. Add it to real operands. */
1410 if (flags & opf_is_def)
1411 append_def (var_p);
1412 else
1413 append_use (var_p);
1414 }
1415 else
1416 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1417 }
1418
1419
1420 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1421 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1422
1423 STMT is the statement being processed, EXPR is the INDIRECT_REF
1424 that got us here.
1425
1426 FLAGS is as in get_expr_operands.
1427
1428 FULL_REF contains the full pointer dereference expression, if we
1429 have it, or NULL otherwise.
1430
1431 OFFSET and SIZE are the location of the access inside the
1432 dereferenced pointer, if known.
1433
1434 RECURSE_ON_BASE should be set to true if we want to continue
1435 calling get_expr_operands on the base pointer, and false if
1436 something else will do it for us. */
1437
1438 static void
1439 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1440 tree full_ref,
1441 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1442 bool recurse_on_base)
1443 {
1444 tree *pptr = &TREE_OPERAND (expr, 0);
1445 tree ptr = *pptr;
1446 stmt_ann_t s_ann = stmt_ann (stmt);
1447
1448 /* Stores into INDIRECT_REF operands are never killing definitions. */
1449 flags &= ~opf_kill_def;
1450
1451 if (SSA_VAR_P (ptr))
1452 {
1453 struct ptr_info_def *pi = NULL;
1454
1455 /* If PTR has flow-sensitive points-to information, use it. */
1456 if (TREE_CODE (ptr) == SSA_NAME
1457 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1458 && pi->name_mem_tag)
1459 {
1460 /* PTR has its own memory tag. Use it. */
1461 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1462 full_ref, offset, size, false);
1463 }
1464 else
1465 {
1466 /* If PTR is not an SSA_NAME or it doesn't have a name
1467 tag, use its symbol memory tag. */
1468 var_ann_t v_ann;
1469
1470 /* If we are emitting debugging dumps, display a warning if
1471 PTR is an SSA_NAME with no flow-sensitive alias
1472 information. That means that we may need to compute
1473 aliasing again. */
1474 if (dump_file
1475 && TREE_CODE (ptr) == SSA_NAME
1476 && pi == NULL)
1477 {
1478 fprintf (dump_file,
1479 "NOTE: no flow-sensitive alias info for ");
1480 print_generic_expr (dump_file, ptr, dump_flags);
1481 fprintf (dump_file, " in ");
1482 print_generic_stmt (dump_file, stmt, dump_flags);
1483 }
1484
1485 if (TREE_CODE (ptr) == SSA_NAME)
1486 ptr = SSA_NAME_VAR (ptr);
1487 v_ann = var_ann (ptr);
1488
1489 if (v_ann->symbol_mem_tag)
1490 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1491 full_ref, offset, size, false);
1492 }
1493 }
1494 else if (TREE_CODE (ptr) == INTEGER_CST)
1495 {
1496 /* If a constant is used as a pointer, we can't generate a real
1497 operand for it but we mark the statement volatile to prevent
1498 optimizations from messing things up. */
1499 if (s_ann)
1500 s_ann->has_volatile_ops = true;
1501 return;
1502 }
1503 else
1504 {
1505 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1506 gcc_unreachable ();
1507 }
1508
1509 /* If requested, add a USE operand for the base pointer. */
1510 if (recurse_on_base)
1511 get_expr_operands (stmt, pptr, opf_none);
1512 }
1513
1514
1515 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1516
1517 static void
1518 get_tmr_operands (tree stmt, tree expr, int flags)
1519 {
1520 tree tag = TMR_TAG (expr), ref;
1521 HOST_WIDE_INT offset, size, maxsize;
1522 subvar_t svars, sv;
1523 stmt_ann_t s_ann = stmt_ann (stmt);
1524
1525 /* First record the real operands. */
1526 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1527 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1528
1529 /* MEM_REFs should never be killing. */
1530 flags &= ~opf_kill_def;
1531
1532 if (TMR_SYMBOL (expr))
1533 {
1534 stmt_ann_t ann = stmt_ann (stmt);
1535 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1536 }
1537
1538 if (!tag)
1539 {
1540 /* Something weird, so ensure that we will be careful. */
1541 stmt_ann (stmt)->has_volatile_ops = true;
1542 return;
1543 }
1544
1545 if (DECL_P (tag))
1546 {
1547 get_expr_operands (stmt, &tag, flags);
1548 return;
1549 }
1550
1551 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1552 gcc_assert (ref != NULL_TREE);
1553 svars = get_subvars_for_var (ref);
1554 for (sv = svars; sv; sv = sv->next)
1555 {
1556 bool exact;
1557 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1558 {
1559 int subvar_flags = flags;
1560 if (!exact || size != maxsize)
1561 subvar_flags &= ~opf_kill_def;
1562 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1563 }
1564 }
1565 }
1566
1567
1568 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1569 clobbered variables in the function. */
1570
1571 static void
1572 add_call_clobber_ops (tree stmt, tree callee)
1573 {
1574 unsigned u;
1575 bitmap_iterator bi;
1576 stmt_ann_t s_ann = stmt_ann (stmt);
1577 bitmap not_read_b, not_written_b;
1578
1579 /* Functions that are not const, pure or never return may clobber
1580 call-clobbered variables. */
1581 if (s_ann)
1582 s_ann->makes_clobbering_call = true;
1583
1584 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1585 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1586 if (global_var)
1587 {
1588 add_stmt_operand (&global_var, s_ann, opf_is_def);
1589 return;
1590 }
1591
1592 /* Get info for local and module level statics. There is a bit
1593 set for each static if the call being processed does not read
1594 or write that variable. */
1595 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1596 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1597 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1598 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1599 {
1600 tree var = referenced_var_lookup (u);
1601 unsigned int escape_mask = var_ann (var)->escape_mask;
1602 tree real_var = var;
1603 bool not_read;
1604 bool not_written;
1605
1606 /* Not read and not written are computed on regular vars, not
1607 subvars, so look at the parent var if this is an SFT. */
1608 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1609 real_var = SFT_PARENT_VAR (var);
1610
1611 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1612 DECL_UID (real_var)) : false;
1613 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1614 DECL_UID (real_var)) : false;
1615 gcc_assert (!unmodifiable_var_p (var));
1616
1617 clobber_stats.clobbered_vars++;
1618
1619 /* See if this variable is really clobbered by this function. */
1620
1621 /* Trivial case: Things escaping only to pure/const are not
1622 clobbered by non-pure-const, and only read by pure/const. */
1623 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1624 {
1625 tree call = get_call_expr_in (stmt);
1626 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1627 {
1628 add_stmt_operand (&var, s_ann, opf_none);
1629 clobber_stats.unescapable_clobbers_avoided++;
1630 continue;
1631 }
1632 else
1633 {
1634 clobber_stats.unescapable_clobbers_avoided++;
1635 continue;
1636 }
1637 }
1638
1639 if (not_written)
1640 {
1641 clobber_stats.static_write_clobbers_avoided++;
1642 if (!not_read)
1643 add_stmt_operand (&var, s_ann, opf_none);
1644 else
1645 clobber_stats.static_read_clobbers_avoided++;
1646 }
1647 else
1648 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1649 }
1650 }
1651
1652
1653 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1654 function. */
1655
1656 static void
1657 add_call_read_ops (tree stmt, tree callee)
1658 {
1659 unsigned u;
1660 bitmap_iterator bi;
1661 stmt_ann_t s_ann = stmt_ann (stmt);
1662 bitmap not_read_b;
1663
1664 /* if the function is not pure, it may reference memory. Add
1665 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1666 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1667 if (global_var)
1668 {
1669 add_stmt_operand (&global_var, s_ann, opf_none);
1670 return;
1671 }
1672
1673 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1674
1675 /* Add a VUSE for each call-clobbered variable. */
1676 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1677 {
1678 tree var = referenced_var (u);
1679 tree real_var = var;
1680 bool not_read;
1681
1682 clobber_stats.readonly_clobbers++;
1683
1684 /* Not read and not written are computed on regular vars, not
1685 subvars, so look at the parent var if this is an SFT. */
1686
1687 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1688 real_var = SFT_PARENT_VAR (var);
1689
1690 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1691 : false;
1692
1693 if (not_read)
1694 {
1695 clobber_stats.static_readonly_clobbers_avoided++;
1696 continue;
1697 }
1698
1699 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1700 }
1701 }
1702
1703
1704 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1705
1706 static void
1707 get_call_expr_operands (tree stmt, tree expr)
1708 {
1709 tree op;
1710 int call_flags = call_expr_flags (expr);
1711
1712 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1713 operands for all the symbols that have been found to be
1714 call-clobbered.
1715
1716 Note that if aliases have not been computed, the global effects
1717 of calls will not be included in the SSA web. This is fine
1718 because no optimizer should run before aliases have been
1719 computed. By not bothering with virtual operands for CALL_EXPRs
1720 we avoid adding superfluous virtual operands, which can be a
1721 significant compile time sink (See PR 15855). */
1722 if (aliases_computed_p
1723 && !bitmap_empty_p (call_clobbered_vars)
1724 && !(call_flags & ECF_NOVOPS))
1725 {
1726 /* A 'pure' or a 'const' function never call-clobbers anything.
1727 A 'noreturn' function might, but since we don't return anyway
1728 there is no point in recording that. */
1729 if (TREE_SIDE_EFFECTS (expr)
1730 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1731 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1732 else if (!(call_flags & ECF_CONST))
1733 add_call_read_ops (stmt, get_callee_fndecl (expr));
1734 }
1735
1736 /* Find uses in the called function. */
1737 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1738
1739 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1740 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1741
1742 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1743 }
1744
1745
1746 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1747
1748 static void
1749 get_asm_expr_operands (tree stmt)
1750 {
1751 stmt_ann_t s_ann = stmt_ann (stmt);
1752 int noutputs = list_length (ASM_OUTPUTS (stmt));
1753 const char **oconstraints
1754 = (const char **) alloca ((noutputs) * sizeof (const char *));
1755 int i;
1756 tree link;
1757 const char *constraint;
1758 bool allows_mem, allows_reg, is_inout;
1759
1760 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1761 {
1762 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1763 oconstraints[i] = constraint;
1764 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1765 &allows_reg, &is_inout);
1766
1767 /* This should have been split in gimplify_asm_expr. */
1768 gcc_assert (!allows_reg || !is_inout);
1769
1770 /* Memory operands are addressable. Note that STMT needs the
1771 address of this operand. */
1772 if (!allows_reg && allows_mem)
1773 {
1774 tree t = get_base_address (TREE_VALUE (link));
1775 if (t && DECL_P (t) && s_ann)
1776 add_to_addressable_set (t, &s_ann->addresses_taken);
1777 }
1778
1779 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1780 }
1781
1782 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1783 {
1784 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1785 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1786 oconstraints, &allows_mem, &allows_reg);
1787
1788 /* Memory operands are addressable. Note that STMT needs the
1789 address of this operand. */
1790 if (!allows_reg && allows_mem)
1791 {
1792 tree t = get_base_address (TREE_VALUE (link));
1793 if (t && DECL_P (t) && s_ann)
1794 add_to_addressable_set (t, &s_ann->addresses_taken);
1795 }
1796
1797 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1798 }
1799
1800
1801 /* Clobber memory for asm ("" : : : "memory"); */
1802 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1803 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1804 {
1805 unsigned i;
1806 bitmap_iterator bi;
1807
1808 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1809 decided to group them). */
1810 if (global_var)
1811 add_stmt_operand (&global_var, s_ann, opf_is_def);
1812 else
1813 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1814 {
1815 tree var = referenced_var (i);
1816 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1817 }
1818
1819 /* Now clobber all addressables. */
1820 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1821 {
1822 tree var = referenced_var (i);
1823
1824 /* Subvars are explicitly represented in this list, so
1825 we don't need the original to be added to the clobber
1826 ops, but the original *will* be in this list because
1827 we keep the addressability of the original
1828 variable up-to-date so we don't screw up the rest of
1829 the backend. */
1830 if (var_can_have_subvars (var)
1831 && get_subvars_for_var (var) != NULL)
1832 continue;
1833
1834 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1835 }
1836
1837 break;
1838 }
1839 }
1840
1841
1842 /* Scan operands for the assignment expression EXPR in statement STMT. */
1843
1844 static void
1845 get_modify_expr_operands (tree stmt, tree expr)
1846 {
1847 /* First get operands from the RHS. */
1848 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1849
1850 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1851 registers. If the LHS is a store to memory, we will either need
1852 a preserving definition (V_MAY_DEF) or a killing definition
1853 (V_MUST_DEF).
1854
1855 Preserving definitions are those that modify a part of an
1856 aggregate object for which no subvars have been computed (or the
1857 reference does not correspond exactly to one of them). Stores
1858 through a pointer are also represented with V_MAY_DEF operators.
1859
1860 The determination of whether to use a preserving or a killing
1861 definition is done while scanning the LHS of the assignment. By
1862 default, assume that we will emit a V_MUST_DEF. */
1863 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1864 }
1865
1866
1867 /* Recursively scan the expression pointed to by EXPR_P in statement
1868 STMT. FLAGS is one of the OPF_* constants modifying how to
1869 interpret the operands found. */
1870
1871 static void
1872 get_expr_operands (tree stmt, tree *expr_p, int flags)
1873 {
1874 enum tree_code code;
1875 enum tree_code_class class;
1876 tree expr = *expr_p;
1877 stmt_ann_t s_ann = stmt_ann (stmt);
1878
1879 if (expr == NULL)
1880 return;
1881
1882 code = TREE_CODE (expr);
1883 class = TREE_CODE_CLASS (code);
1884
1885 switch (code)
1886 {
1887 case ADDR_EXPR:
1888 /* Taking the address of a variable does not represent a
1889 reference to it, but the fact that the statement takes its
1890 address will be of interest to some passes (e.g. alias
1891 resolution). */
1892 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1893
1894 /* If the address is invariant, there may be no interesting
1895 variable references inside. */
1896 if (is_gimple_min_invariant (expr))
1897 return;
1898
1899 /* Otherwise, there may be variables referenced inside but there
1900 should be no VUSEs created, since the referenced objects are
1901 not really accessed. The only operands that we should find
1902 here are ARRAY_REF indices which will always be real operands
1903 (GIMPLE does not allow non-registers as array indices). */
1904 flags |= opf_no_vops;
1905 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1906 return;
1907
1908 case SSA_NAME:
1909 case STRUCT_FIELD_TAG:
1910 case SYMBOL_MEMORY_TAG:
1911 case NAME_MEMORY_TAG:
1912 add_stmt_operand (expr_p, s_ann, flags);
1913 return;
1914
1915 case VAR_DECL:
1916 case PARM_DECL:
1917 case RESULT_DECL:
1918 {
1919 subvar_t svars;
1920
1921 /* Add the subvars for a variable, if it has subvars, to DEFS
1922 or USES. Otherwise, add the variable itself. Whether it
1923 goes to USES or DEFS depends on the operand flags. */
1924 if (var_can_have_subvars (expr)
1925 && (svars = get_subvars_for_var (expr)))
1926 {
1927 subvar_t sv;
1928 for (sv = svars; sv; sv = sv->next)
1929 add_stmt_operand (&sv->var, s_ann, flags);
1930 }
1931 else
1932 add_stmt_operand (expr_p, s_ann, flags);
1933
1934 return;
1935 }
1936
1937 case MISALIGNED_INDIRECT_REF:
1938 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1939 /* fall through */
1940
1941 case ALIGN_INDIRECT_REF:
1942 case INDIRECT_REF:
1943 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1944 return;
1945
1946 case TARGET_MEM_REF:
1947 get_tmr_operands (stmt, expr, flags);
1948 return;
1949
1950 case ARRAY_REF:
1951 case ARRAY_RANGE_REF:
1952 case COMPONENT_REF:
1953 case REALPART_EXPR:
1954 case IMAGPART_EXPR:
1955 {
1956 tree ref;
1957 HOST_WIDE_INT offset, size, maxsize;
1958 bool none = true;
1959
1960 /* This component reference becomes an access to all of the
1961 subvariables it can touch, if we can determine that, but
1962 *NOT* the real one. If we can't determine which fields we
1963 could touch, the recursion will eventually get to a
1964 variable and add *all* of its subvars, or whatever is the
1965 minimum correct subset. */
1966 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1967 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1968 {
1969 subvar_t sv;
1970 subvar_t svars = get_subvars_for_var (ref);
1971
1972 for (sv = svars; sv; sv = sv->next)
1973 {
1974 bool exact;
1975
1976 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1977 {
1978 int subvar_flags = flags;
1979 none = false;
1980 if (!exact || size != maxsize)
1981 subvar_flags &= ~opf_kill_def;
1982 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1983 }
1984 }
1985
1986 if (!none)
1987 flags |= opf_no_vops;
1988 }
1989 else if (TREE_CODE (ref) == INDIRECT_REF)
1990 {
1991 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1992 maxsize, false);
1993 flags |= opf_no_vops;
1994 }
1995
1996 /* Even if we found subvars above we need to ensure to see
1997 immediate uses for d in s.a[d]. In case of s.a having
1998 a subvar or we would miss it otherwise. */
1999 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
2000 flags & ~opf_kill_def);
2001
2002 if (code == COMPONENT_REF)
2003 {
2004 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2005 s_ann->has_volatile_ops = true;
2006 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2007 }
2008 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2009 {
2010 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2011 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2012 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
2013 }
2014
2015 return;
2016 }
2017
2018 case WITH_SIZE_EXPR:
2019 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2020 and an rvalue reference to its second argument. */
2021 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2022 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2023 return;
2024
2025 case CALL_EXPR:
2026 get_call_expr_operands (stmt, expr);
2027 return;
2028
2029 case COND_EXPR:
2030 case VEC_COND_EXPR:
2031 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2032 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2033 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2034 return;
2035
2036 case MODIFY_EXPR:
2037 get_modify_expr_operands (stmt, expr);
2038 return;
2039
2040 case CONSTRUCTOR:
2041 {
2042 /* General aggregate CONSTRUCTORs have been decomposed, but they
2043 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2044 constructor_elt *ce;
2045 unsigned HOST_WIDE_INT idx;
2046
2047 for (idx = 0;
2048 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2049 idx++)
2050 get_expr_operands (stmt, &ce->value, opf_none);
2051
2052 return;
2053 }
2054
2055 case BIT_FIELD_REF:
2056 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2057 flags &= ~opf_kill_def;
2058
2059 /* Fallthru */
2060
2061 case TRUTH_NOT_EXPR:
2062 case VIEW_CONVERT_EXPR:
2063 do_unary:
2064 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2065 return;
2066
2067 case TRUTH_AND_EXPR:
2068 case TRUTH_OR_EXPR:
2069 case TRUTH_XOR_EXPR:
2070 case COMPOUND_EXPR:
2071 case OBJ_TYPE_REF:
2072 case ASSERT_EXPR:
2073 do_binary:
2074 {
2075 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2076 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2077 return;
2078 }
2079
2080 case DOT_PROD_EXPR:
2081 case REALIGN_LOAD_EXPR:
2082 {
2083 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2084 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2085 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2086 return;
2087 }
2088
2089 case BLOCK:
2090 case FUNCTION_DECL:
2091 case EXC_PTR_EXPR:
2092 case FILTER_EXPR:
2093 case LABEL_DECL:
2094 case CONST_DECL:
2095 case OMP_PARALLEL:
2096 case OMP_SECTIONS:
2097 case OMP_FOR:
2098 case OMP_RETURN_EXPR:
2099 case OMP_SINGLE:
2100 case OMP_MASTER:
2101 case OMP_ORDERED:
2102 case OMP_CRITICAL:
2103 /* Expressions that make no memory references. */
2104 return;
2105
2106 default:
2107 if (class == tcc_unary)
2108 goto do_unary;
2109 if (class == tcc_binary || class == tcc_comparison)
2110 goto do_binary;
2111 if (class == tcc_constant || class == tcc_type)
2112 return;
2113 }
2114
2115 /* If we get here, something has gone wrong. */
2116 #ifdef ENABLE_CHECKING
2117 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2118 debug_tree (expr);
2119 fputs ("\n", stderr);
2120 #endif
2121 gcc_unreachable ();
2122 }
2123
2124
2125 /* Parse STMT looking for operands. When finished, the various
2126 build_* operand vectors will have potential operands in them. */
2127
2128 static void
2129 parse_ssa_operands (tree stmt)
2130 {
2131 enum tree_code code;
2132
2133 code = TREE_CODE (stmt);
2134 switch (code)
2135 {
2136 case MODIFY_EXPR:
2137 get_modify_expr_operands (stmt, stmt);
2138 break;
2139
2140 case COND_EXPR:
2141 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2142 break;
2143
2144 case SWITCH_EXPR:
2145 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2146 break;
2147
2148 case ASM_EXPR:
2149 get_asm_expr_operands (stmt);
2150 break;
2151
2152 case RETURN_EXPR:
2153 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2154 break;
2155
2156 case GOTO_EXPR:
2157 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2158 break;
2159
2160 case LABEL_EXPR:
2161 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2162 break;
2163
2164 case BIND_EXPR:
2165 case CASE_LABEL_EXPR:
2166 case TRY_CATCH_EXPR:
2167 case TRY_FINALLY_EXPR:
2168 case EH_FILTER_EXPR:
2169 case CATCH_EXPR:
2170 case RESX_EXPR:
2171 /* These nodes contain no variable references. */
2172 break;
2173
2174 default:
2175 /* Notice that if get_expr_operands tries to use &STMT as the
2176 operand pointer (which may only happen for USE operands), we
2177 will fail in add_stmt_operand. This default will handle
2178 statements like empty statements, or CALL_EXPRs that may
2179 appear on the RHS of a statement or as statements themselves. */
2180 get_expr_operands (stmt, &stmt, opf_none);
2181 break;
2182 }
2183 }
2184
2185
2186 /* Create an operands cache for STMT. */
2187
2188 static void
2189 build_ssa_operands (tree stmt)
2190 {
2191 stmt_ann_t ann = get_stmt_ann (stmt);
2192
2193 /* Initially assume that the statement has no volatile operands. */
2194 if (ann)
2195 ann->has_volatile_ops = false;
2196
2197 start_ssa_stmt_operands ();
2198
2199 parse_ssa_operands (stmt);
2200 operand_build_sort_virtual (build_vuses);
2201 operand_build_sort_virtual (build_v_may_defs);
2202 operand_build_sort_virtual (build_v_must_defs);
2203
2204 finalize_ssa_stmt_operands (stmt);
2205 }
2206
2207
2208 /* Free any operands vectors in OPS. */
2209
2210 void
2211 free_ssa_operands (stmt_operands_p ops)
2212 {
2213 ops->def_ops = NULL;
2214 ops->use_ops = NULL;
2215 ops->maydef_ops = NULL;
2216 ops->mustdef_ops = NULL;
2217 ops->vuse_ops = NULL;
2218 }
2219
2220
2221 /* Get the operands of statement STMT. */
2222
2223 void
2224 update_stmt_operands (tree stmt)
2225 {
2226 stmt_ann_t ann = get_stmt_ann (stmt);
2227
2228 /* If update_stmt_operands is called before SSA is initialized, do
2229 nothing. */
2230 if (!ssa_operands_active ())
2231 return;
2232
2233 /* The optimizers cannot handle statements that are nothing but a
2234 _DECL. This indicates a bug in the gimplifier. */
2235 gcc_assert (!SSA_VAR_P (stmt));
2236
2237 gcc_assert (ann->modified);
2238
2239 timevar_push (TV_TREE_OPS);
2240
2241 build_ssa_operands (stmt);
2242
2243 /* Clear the modified bit for STMT. */
2244 ann->modified = 0;
2245
2246 timevar_pop (TV_TREE_OPS);
2247 }
2248
2249
2250 /* Copies virtual operands from SRC to DST. */
2251
2252 void
2253 copy_virtual_operands (tree dest, tree src)
2254 {
2255 tree t;
2256 ssa_op_iter iter, old_iter;
2257 use_operand_p use_p, u2;
2258 def_operand_p def_p, d2;
2259
2260 build_ssa_operands (dest);
2261
2262 /* Copy all the virtual fields. */
2263 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2264 append_vuse (t);
2265 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2266 append_v_may_def (t);
2267 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2268 append_v_must_def (t);
2269
2270 if (VEC_length (tree, build_vuses) == 0
2271 && VEC_length (tree, build_v_may_defs) == 0
2272 && VEC_length (tree, build_v_must_defs) == 0)
2273 return;
2274
2275 /* Now commit the virtual operands to this stmt. */
2276 finalize_ssa_v_must_defs (dest);
2277 finalize_ssa_v_may_defs (dest);
2278 finalize_ssa_vuses (dest);
2279
2280 /* Finally, set the field to the same values as then originals. */
2281 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2282 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2283 {
2284 gcc_assert (!op_iter_done (&old_iter));
2285 SET_USE (use_p, t);
2286 t = op_iter_next_tree (&old_iter);
2287 }
2288 gcc_assert (op_iter_done (&old_iter));
2289
2290 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2291 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2292 {
2293 gcc_assert (!op_iter_done (&old_iter));
2294 SET_USE (use_p, USE_FROM_PTR (u2));
2295 SET_DEF (def_p, DEF_FROM_PTR (d2));
2296 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2297 }
2298 gcc_assert (op_iter_done (&old_iter));
2299
2300 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2301 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2302 {
2303 gcc_assert (!op_iter_done (&old_iter));
2304 SET_USE (use_p, USE_FROM_PTR (u2));
2305 SET_DEF (def_p, DEF_FROM_PTR (d2));
2306 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2307 }
2308 gcc_assert (op_iter_done (&old_iter));
2309
2310 }
2311
2312
2313 /* Specifically for use in DOM's expression analysis. Given a store, we
2314 create an artificial stmt which looks like a load from the store, this can
2315 be used to eliminate redundant loads. OLD_OPS are the operands from the
2316 store stmt, and NEW_STMT is the new load which represents a load of the
2317 values stored. */
2318
2319 void
2320 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2321 {
2322 stmt_ann_t ann;
2323 tree op;
2324 ssa_op_iter iter;
2325 use_operand_p use_p;
2326 unsigned x;
2327
2328 ann = get_stmt_ann (new_stmt);
2329
2330 /* Process the stmt looking for operands. */
2331 start_ssa_stmt_operands ();
2332 parse_ssa_operands (new_stmt);
2333
2334 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2335 {
2336 tree t = VEC_index (tree, build_vuses, x);
2337 if (TREE_CODE (t) != SSA_NAME)
2338 {
2339 var_ann_t ann = var_ann (t);
2340 ann->in_vuse_list = 0;
2341 }
2342 }
2343
2344 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2345 {
2346 tree t = VEC_index (tree, build_v_may_defs, x);
2347 if (TREE_CODE (t) != SSA_NAME)
2348 {
2349 var_ann_t ann = var_ann (t);
2350 ann->in_v_may_def_list = 0;
2351 }
2352 }
2353
2354 /* Remove any virtual operands that were found. */
2355 VEC_truncate (tree, build_v_may_defs, 0);
2356 VEC_truncate (tree, build_v_must_defs, 0);
2357 VEC_truncate (tree, build_vuses, 0);
2358
2359 /* For each VDEF on the original statement, we want to create a
2360 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2361 statement. */
2362 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2363 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2364 append_vuse (op);
2365
2366 /* Now build the operands for this new stmt. */
2367 finalize_ssa_stmt_operands (new_stmt);
2368
2369 /* All uses in this fake stmt must not be in the immediate use lists. */
2370 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2371 delink_imm_use (use_p);
2372 }
2373
2374
2375 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2376 to test the validity of the swap operation. */
2377
2378 void
2379 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2380 {
2381 tree op0, op1;
2382 op0 = *exp0;
2383 op1 = *exp1;
2384
2385 /* If the operand cache is active, attempt to preserve the relative
2386 positions of these two operands in their respective immediate use
2387 lists. */
2388 if (ssa_operands_active () && op0 != op1)
2389 {
2390 use_optype_p use0, use1, ptr;
2391 use0 = use1 = NULL;
2392
2393 /* Find the 2 operands in the cache, if they are there. */
2394 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2395 if (USE_OP_PTR (ptr)->use == exp0)
2396 {
2397 use0 = ptr;
2398 break;
2399 }
2400
2401 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2402 if (USE_OP_PTR (ptr)->use == exp1)
2403 {
2404 use1 = ptr;
2405 break;
2406 }
2407
2408 /* If both uses don't have operand entries, there isn't much we can do
2409 at this point. Presumably we don't need to worry about it. */
2410 if (use0 && use1)
2411 {
2412 tree *tmp = USE_OP_PTR (use1)->use;
2413 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2414 USE_OP_PTR (use0)->use = tmp;
2415 }
2416 }
2417
2418 /* Now swap the data. */
2419 *exp0 = op1;
2420 *exp1 = op0;
2421 }
2422
2423
2424 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2425 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2426 a single variable whose address has been taken or any other valid
2427 GIMPLE memory reference (structure reference, array, etc). If the
2428 base address of REF is a decl that has sub-variables, also add all
2429 of its sub-variables. */
2430
2431 void
2432 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2433 {
2434 tree var;
2435 subvar_t svars;
2436
2437 gcc_assert (addresses_taken);
2438
2439 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2440 as the only thing we take the address of. If VAR is a structure,
2441 taking the address of a field means that the whole structure may
2442 be referenced using pointer arithmetic. See PR 21407 and the
2443 ensuing mailing list discussion. */
2444 var = get_base_address (ref);
2445 if (var && SSA_VAR_P (var))
2446 {
2447 if (*addresses_taken == NULL)
2448 *addresses_taken = BITMAP_GGC_ALLOC ();
2449
2450 if (var_can_have_subvars (var)
2451 && (svars = get_subvars_for_var (var)))
2452 {
2453 subvar_t sv;
2454 for (sv = svars; sv; sv = sv->next)
2455 {
2456 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2457 TREE_ADDRESSABLE (sv->var) = 1;
2458 }
2459 }
2460 else
2461 {
2462 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2463 TREE_ADDRESSABLE (var) = 1;
2464 }
2465 }
2466 }
2467
2468
2469 /* Scan the immediate_use list for VAR making sure its linked properly.
2470 Return TRUE if there is a problem and emit an error message to F. */
2471
2472 bool
2473 verify_imm_links (FILE *f, tree var)
2474 {
2475 use_operand_p ptr, prev, list;
2476 int count;
2477
2478 gcc_assert (TREE_CODE (var) == SSA_NAME);
2479
2480 list = &(SSA_NAME_IMM_USE_NODE (var));
2481 gcc_assert (list->use == NULL);
2482
2483 if (list->prev == NULL)
2484 {
2485 gcc_assert (list->next == NULL);
2486 return false;
2487 }
2488
2489 prev = list;
2490 count = 0;
2491 for (ptr = list->next; ptr != list; )
2492 {
2493 if (prev != ptr->prev)
2494 goto error;
2495
2496 if (ptr->use == NULL)
2497 goto error; /* 2 roots, or SAFE guard node. */
2498 else if (*(ptr->use) != var)
2499 goto error;
2500
2501 prev = ptr;
2502 ptr = ptr->next;
2503
2504 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2505 problem. */
2506 if (count++ > 50000000)
2507 goto error;
2508 }
2509
2510 /* Verify list in the other direction. */
2511 prev = list;
2512 for (ptr = list->prev; ptr != list; )
2513 {
2514 if (prev != ptr->next)
2515 goto error;
2516 prev = ptr;
2517 ptr = ptr->prev;
2518 if (count-- < 0)
2519 goto error;
2520 }
2521
2522 if (count != 0)
2523 goto error;
2524
2525 return false;
2526
2527 error:
2528 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2529 {
2530 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2531 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2532 }
2533 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2534 (void *)ptr->use);
2535 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2536 fprintf(f, "\n");
2537 return true;
2538 }
2539
2540
2541 /* Dump all the immediate uses to FILE. */
2542
2543 void
2544 dump_immediate_uses_for (FILE *file, tree var)
2545 {
2546 imm_use_iterator iter;
2547 use_operand_p use_p;
2548
2549 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2550
2551 print_generic_expr (file, var, TDF_SLIM);
2552 fprintf (file, " : -->");
2553 if (has_zero_uses (var))
2554 fprintf (file, " no uses.\n");
2555 else
2556 if (has_single_use (var))
2557 fprintf (file, " single use.\n");
2558 else
2559 fprintf (file, "%d uses.\n", num_imm_uses (var));
2560
2561 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2562 {
2563 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2564 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2565 else
2566 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2567 }
2568 fprintf(file, "\n");
2569 }
2570
2571
2572 /* Dump all the immediate uses to FILE. */
2573
2574 void
2575 dump_immediate_uses (FILE *file)
2576 {
2577 tree var;
2578 unsigned int x;
2579
2580 fprintf (file, "Immediate_uses: \n\n");
2581 for (x = 1; x < num_ssa_names; x++)
2582 {
2583 var = ssa_name(x);
2584 if (!var)
2585 continue;
2586 dump_immediate_uses_for (file, var);
2587 }
2588 }
2589
2590
2591 /* Dump def-use edges on stderr. */
2592
2593 void
2594 debug_immediate_uses (void)
2595 {
2596 dump_immediate_uses (stderr);
2597 }
2598
2599
2600 /* Dump def-use edges on stderr. */
2601
2602 void
2603 debug_immediate_uses_for (tree var)
2604 {
2605 dump_immediate_uses_for (stderr, var);
2606 }
2607
2608 #include "gt-tree-ssa-operands.h"