Daily bump.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 MODIFY_EXPRs. */
102 #define opf_non_specific (1 << 3)
103
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
106
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
109
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
112
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
115
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
118
119 /* These arrays are the cached operand vectors for call clobbered calls. */
120 static bool ops_active = false;
121
122 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
123 static unsigned operand_memory_index;
124
125 static void get_expr_operands (tree, tree *, int);
126
127 static def_optype_p free_defs = NULL;
128 static use_optype_p free_uses = NULL;
129 static vuse_optype_p free_vuses = NULL;
130 static maydef_optype_p free_maydefs = NULL;
131 static mustdef_optype_p free_mustdefs = NULL;
132
133 /* Allocates operand OP of given TYPE from the appropriate free list,
134 or of the new value if the list is empty. */
135
136 #define ALLOC_OPTYPE(OP, TYPE) \
137 do \
138 { \
139 TYPE##_optype_p ret = free_##TYPE##s; \
140 if (ret) \
141 free_##TYPE##s = ret->next; \
142 else \
143 ret = ssa_operand_alloc (sizeof (*ret)); \
144 (OP) = ret; \
145 } while (0)
146
147 /* Return the DECL_UID of the base variable of T. */
148
149 static inline unsigned
150 get_name_decl (tree t)
151 {
152 if (TREE_CODE (t) != SSA_NAME)
153 return DECL_UID (t);
154 else
155 return DECL_UID (SSA_NAME_VAR (t));
156 }
157
158
159 /* Comparison function for qsort used in operand_build_sort_virtual. */
160
161 static int
162 operand_build_cmp (const void *p, const void *q)
163 {
164 tree e1 = *((const tree *)p);
165 tree e2 = *((const tree *)q);
166 unsigned int u1,u2;
167
168 u1 = get_name_decl (e1);
169 u2 = get_name_decl (e2);
170
171 /* We want to sort in ascending order. They can never be equal. */
172 #ifdef ENABLE_CHECKING
173 gcc_assert (u1 != u2);
174 #endif
175 return (u1 > u2 ? 1 : -1);
176 }
177
178
179 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
180
181 static inline void
182 operand_build_sort_virtual (VEC(tree,heap) *list)
183 {
184 int num = VEC_length (tree, list);
185
186 if (num < 2)
187 return;
188
189 if (num == 2)
190 {
191 if (get_name_decl (VEC_index (tree, list, 0))
192 > get_name_decl (VEC_index (tree, list, 1)))
193 {
194 /* Swap elements if in the wrong order. */
195 tree tmp = VEC_index (tree, list, 0);
196 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
197 VEC_replace (tree, list, 1, tmp);
198 }
199 return;
200 }
201
202 /* There are 3 or more elements, call qsort. */
203 qsort (VEC_address (tree, list),
204 VEC_length (tree, list),
205 sizeof (tree),
206 operand_build_cmp);
207 }
208
209
210 /* Return true if the SSA operands cache is active. */
211
212 bool
213 ssa_operands_active (void)
214 {
215 return ops_active;
216 }
217
218
219 /* Structure storing statistics on how many call clobbers we have, and
220 how many where avoided. */
221
222 static struct
223 {
224 /* Number of call-clobbered ops we attempt to add to calls in
225 add_call_clobber_ops. */
226 unsigned int clobbered_vars;
227
228 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
229 not_written information. */
230 unsigned int static_write_clobbers_avoided;
231
232 /* Number of reads (VUSEs) avoided by using not_read information. */
233 unsigned int static_read_clobbers_avoided;
234
235 /* Number of write-clobbers avoided because the variable can't escape to
236 this call. */
237 unsigned int unescapable_clobbers_avoided;
238
239 /* Number of read-only uses we attempt to add to calls in
240 add_call_read_ops. */
241 unsigned int readonly_clobbers;
242
243 /* Number of read-only uses we avoid using not_read information. */
244 unsigned int static_readonly_clobbers_avoided;
245 } clobber_stats;
246
247
248 /* Initialize the operand cache routines. */
249
250 void
251 init_ssa_operands (void)
252 {
253 build_defs = VEC_alloc (tree, heap, 5);
254 build_uses = VEC_alloc (tree, heap, 10);
255 build_vuses = VEC_alloc (tree, heap, 25);
256 build_v_may_defs = VEC_alloc (tree, heap, 25);
257 build_v_must_defs = VEC_alloc (tree, heap, 25);
258
259 gcc_assert (operand_memory == NULL);
260 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
261 ops_active = true;
262 memset (&clobber_stats, 0, sizeof (clobber_stats));
263 }
264
265
266 /* Dispose of anything required by the operand routines. */
267
268 void
269 fini_ssa_operands (void)
270 {
271 struct ssa_operand_memory_d *ptr;
272 VEC_free (tree, heap, build_defs);
273 VEC_free (tree, heap, build_uses);
274 VEC_free (tree, heap, build_v_must_defs);
275 VEC_free (tree, heap, build_v_may_defs);
276 VEC_free (tree, heap, build_vuses);
277 free_defs = NULL;
278 free_uses = NULL;
279 free_vuses = NULL;
280 free_maydefs = NULL;
281 free_mustdefs = NULL;
282 while ((ptr = operand_memory) != NULL)
283 {
284 operand_memory = operand_memory->next;
285 ggc_free (ptr);
286 }
287
288 ops_active = false;
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
304 }
305 }
306
307
308 /* Return memory for operands of SIZE chunks. */
309
310 static inline void *
311 ssa_operand_alloc (unsigned size)
312 {
313 char *ptr;
314 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
315 {
316 struct ssa_operand_memory_d *ptr;
317 ptr = GGC_NEW (struct ssa_operand_memory_d);
318 ptr->next = operand_memory;
319 operand_memory = ptr;
320 operand_memory_index = 0;
321 }
322 ptr = &(operand_memory->mem[operand_memory_index]);
323 operand_memory_index += size;
324 return ptr;
325 }
326
327
328 /* Make sure PTR is in the correct immediate use list. Since uses are simply
329 pointers into the stmt TREE, there is no way of telling if anyone has
330 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
331 The contents are different, but the pointer is still the same. This
332 routine will check to make sure PTR is in the correct list, and if it isn't
333 put it in the correct list. We cannot simply check the previous node
334 because all nodes in the same stmt might have be changed. */
335
336 static inline void
337 correct_use_link (use_operand_p ptr, tree stmt)
338 {
339 use_operand_p prev;
340 tree root;
341
342 /* fold_stmt may have changed the stmt pointers. */
343 if (ptr->stmt != stmt)
344 ptr->stmt = stmt;
345
346 prev = ptr->prev;
347 if (prev)
348 {
349 /* Find the root element, making sure we skip any safe iterators. */
350 while (prev->use != NULL || prev->stmt == NULL)
351 prev = prev->prev;
352
353 /* Get the SSA_NAME of the list the node is in. */
354 root = prev->stmt;
355
356 /* If it's the right list, simply return. */
357 if (root == *(ptr->use))
358 return;
359 }
360
361 /* It is in the wrong list if we reach here. */
362 delink_imm_use (ptr);
363 link_imm_use (ptr, *(ptr->use));
364 }
365
366
367 /* This routine makes sure that PTR is in an immediate use list, and makes
368 sure the stmt pointer is set to the current stmt. Virtual uses do not need
369 the overhead of correct_use_link since they cannot be directly manipulated
370 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
371
372 static inline void
373 set_virtual_use_link (use_operand_p ptr, tree stmt)
374 {
375 /* fold_stmt may have changed the stmt pointers. */
376 if (ptr->stmt != stmt)
377 ptr->stmt = stmt;
378
379 /* If this use isn't in a list, add it to the correct list. */
380 if (!ptr->prev)
381 link_imm_use (ptr, *(ptr->use));
382 }
383
384 /* Appends ELT after TO, and moves the TO pointer to ELT. */
385
386 #define APPEND_OP_AFTER(ELT, TO) \
387 do \
388 { \
389 (TO)->next = (ELT); \
390 (TO) = (ELT); \
391 } while (0)
392
393 /* Appends head of list FROM after TO, and move both pointers
394 to their successors. */
395
396 #define MOVE_HEAD_AFTER(FROM, TO) \
397 do \
398 { \
399 APPEND_OP_AFTER (FROM, TO); \
400 (FROM) = (FROM)->next; \
401 } while (0)
402
403 /* Moves OP to appropriate freelist. OP is set to its successor. */
404
405 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
406 do \
407 { \
408 TYPE##_optype_p next = (OP)->next; \
409 (OP)->next = free_##TYPE##s; \
410 free_##TYPE##s = (OP); \
411 (OP) = next; \
412 } while (0)
413
414 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
415 of immeditate uses. STMT is the current statement. */
416
417 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
418 do \
419 { \
420 (USE_PTR)->use = (VAL); \
421 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
422 } while (0)
423
424 /* Adds OP to the list of defs after LAST, and moves
425 LAST to the new element. */
426
427 static inline void
428 add_def_op (tree *op, def_optype_p *last)
429 {
430 def_optype_p new;
431
432 ALLOC_OPTYPE (new, def);
433 DEF_OP_PTR (new) = op;
434 APPEND_OP_AFTER (new, *last);
435 }
436
437 /* Adds OP to the list of uses of statement STMT after LAST, and moves
438 LAST to the new element. */
439
440 static inline void
441 add_use_op (tree stmt, tree *op, use_optype_p *last)
442 {
443 use_optype_p new;
444
445 ALLOC_OPTYPE (new, use);
446 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
447 APPEND_OP_AFTER (new, *last);
448 }
449
450 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
451 LAST to the new element. */
452
453 static inline void
454 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
455 {
456 vuse_optype_p new;
457
458 ALLOC_OPTYPE (new, vuse);
459 VUSE_OP (new) = op;
460 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
461 APPEND_OP_AFTER (new, *last);
462 }
463
464 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
465 LAST to the new element. */
466
467 static inline void
468 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
469 {
470 maydef_optype_p new;
471
472 ALLOC_OPTYPE (new, maydef);
473 MAYDEF_RESULT (new) = op;
474 MAYDEF_OP (new) = op;
475 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
476 APPEND_OP_AFTER (new, *last);
477 }
478
479 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
480 LAST to the new element. */
481
482 static inline void
483 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
484 {
485 mustdef_optype_p new;
486
487 ALLOC_OPTYPE (new, mustdef);
488 MUSTDEF_RESULT (new) = op;
489 MUSTDEF_KILL (new) = op;
490 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
491 APPEND_OP_AFTER (new, *last);
492 }
493
494 /* Takes elements from build_defs and turns them into def operands of STMT.
495 TODO -- Given that def operands list is not neccessarily sorted, merging
496 the operands this way does not make much sense.
497 -- Make build_defs VEC of tree *. */
498
499 static inline void
500 finalize_ssa_def_ops (tree stmt)
501 {
502 unsigned new_i;
503 struct def_optype_d new_list;
504 def_optype_p old_ops, last;
505 tree *old_base;
506
507 new_list.next = NULL;
508 last = &new_list;
509
510 old_ops = DEF_OPS (stmt);
511
512 new_i = 0;
513 while (old_ops && new_i < VEC_length (tree, build_defs))
514 {
515 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
516 old_base = DEF_OP_PTR (old_ops);
517
518 if (old_base == new_base)
519 {
520 /* if variables are the same, reuse this node. */
521 MOVE_HEAD_AFTER (old_ops, last);
522 new_i++;
523 }
524 else if (old_base < new_base)
525 {
526 /* if old is less than new, old goes to the free list. */
527 MOVE_HEAD_TO_FREELIST (old_ops, def);
528 }
529 else
530 {
531 /* This is a new operand. */
532 add_def_op (new_base, &last);
533 new_i++;
534 }
535 }
536
537 /* If there is anything remaining in the build_defs list, simply emit it. */
538 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
539 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
540
541 last->next = NULL;
542
543 /* If there is anything in the old list, free it. */
544 if (old_ops)
545 {
546 old_ops->next = free_defs;
547 free_defs = old_ops;
548 }
549
550 /* Now set the stmt's operands. */
551 DEF_OPS (stmt) = new_list.next;
552
553 #ifdef ENABLE_CHECKING
554 {
555 def_optype_p ptr;
556 unsigned x = 0;
557 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
558 x++;
559
560 gcc_assert (x == VEC_length (tree, build_defs));
561 }
562 #endif
563 }
564
565 /* This routine will create stmt operands for STMT from the def build list. */
566
567 static void
568 finalize_ssa_defs (tree stmt)
569 {
570 unsigned int num = VEC_length (tree, build_defs);
571
572 /* There should only be a single real definition per assignment. */
573 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
574
575 /* If there is an old list, often the new list is identical, or close, so
576 find the elements at the beginning that are the same as the vector. */
577 finalize_ssa_def_ops (stmt);
578 VEC_truncate (tree, build_defs, 0);
579 }
580
581 /* Takes elements from build_uses and turns them into use operands of STMT.
582 TODO -- Given that use operands list is not neccessarily sorted, merging
583 the operands this way does not make much sense.
584 -- Make build_uses VEC of tree *. */
585
586 static inline void
587 finalize_ssa_use_ops (tree stmt)
588 {
589 unsigned new_i;
590 struct use_optype_d new_list;
591 use_optype_p old_ops, ptr, last;
592 tree *old_base, *new_base;
593
594 new_list.next = NULL;
595 last = &new_list;
596
597 old_ops = USE_OPS (stmt);
598
599 new_i = 0;
600 while (old_ops && new_i < VEC_length (tree, build_uses))
601 {
602 new_base = (tree *) VEC_index (tree, build_uses, new_i);
603 old_base = USE_OP_PTR (old_ops)->use;
604
605 if (old_base == new_base)
606 {
607 /* if variables are the same, reuse this node. */
608 MOVE_HEAD_AFTER (old_ops, last);
609 correct_use_link (USE_OP_PTR (last), stmt);
610 new_i++;
611 }
612 else if (old_base < new_base)
613 {
614 /* if old is less than new, old goes to the free list. */
615 delink_imm_use (USE_OP_PTR (old_ops));
616 MOVE_HEAD_TO_FREELIST (old_ops, use);
617 }
618 else
619 {
620 /* This is a new operand. */
621 add_use_op (stmt, new_base, &last);
622 new_i++;
623 }
624 }
625
626 /* If there is anything remaining in the build_uses list, simply emit it. */
627 for ( ; new_i < VEC_length (tree, build_uses); new_i++)
628 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
629
630 last->next = NULL;
631
632 /* If there is anything in the old list, free it. */
633 if (old_ops)
634 {
635 for (ptr = old_ops; ptr; ptr = ptr->next)
636 delink_imm_use (USE_OP_PTR (ptr));
637 old_ops->next = free_uses;
638 free_uses = old_ops;
639 }
640
641 /* Now set the stmt's operands. */
642 USE_OPS (stmt) = new_list.next;
643
644 #ifdef ENABLE_CHECKING
645 {
646 unsigned x = 0;
647 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
648 x++;
649
650 gcc_assert (x == VEC_length (tree, build_uses));
651 }
652 #endif
653 }
654
655 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
656
657 static void
658 finalize_ssa_uses (tree stmt)
659 {
660 #ifdef ENABLE_CHECKING
661 {
662 unsigned x;
663 unsigned num = VEC_length (tree, build_uses);
664
665 /* If the pointer to the operand is the statement itself, something is
666 wrong. It means that we are pointing to a local variable (the
667 initial call to update_stmt_operands does not pass a pointer to a
668 statement). */
669 for (x = 0; x < num; x++)
670 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
671 }
672 #endif
673 finalize_ssa_use_ops (stmt);
674 VEC_truncate (tree, build_uses, 0);
675 }
676
677
678 /* Takes elements from build_v_may_defs and turns them into maydef operands of
679 STMT. */
680
681 static inline void
682 finalize_ssa_v_may_def_ops (tree stmt)
683 {
684 unsigned new_i;
685 struct maydef_optype_d new_list;
686 maydef_optype_p old_ops, ptr, last;
687 tree act;
688 unsigned old_base, new_base;
689
690 new_list.next = NULL;
691 last = &new_list;
692
693 old_ops = MAYDEF_OPS (stmt);
694
695 new_i = 0;
696 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
697 {
698 act = VEC_index (tree, build_v_may_defs, new_i);
699 new_base = get_name_decl (act);
700 old_base = get_name_decl (MAYDEF_OP (old_ops));
701
702 if (old_base == new_base)
703 {
704 /* if variables are the same, reuse this node. */
705 MOVE_HEAD_AFTER (old_ops, last);
706 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
707 new_i++;
708 }
709 else if (old_base < new_base)
710 {
711 /* if old is less than new, old goes to the free list. */
712 delink_imm_use (MAYDEF_OP_PTR (old_ops));
713 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
714 }
715 else
716 {
717 /* This is a new operand. */
718 add_maydef_op (stmt, act, &last);
719 new_i++;
720 }
721 }
722
723 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
724 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
725 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
726
727 last->next = NULL;
728
729 /* If there is anything in the old list, free it. */
730 if (old_ops)
731 {
732 for (ptr = old_ops; ptr; ptr = ptr->next)
733 delink_imm_use (MAYDEF_OP_PTR (ptr));
734 old_ops->next = free_maydefs;
735 free_maydefs = old_ops;
736 }
737
738 /* Now set the stmt's operands. */
739 MAYDEF_OPS (stmt) = new_list.next;
740
741 #ifdef ENABLE_CHECKING
742 {
743 unsigned x = 0;
744 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
745 x++;
746
747 gcc_assert (x == VEC_length (tree, build_v_may_defs));
748 }
749 #endif
750 }
751
752 static void
753 finalize_ssa_v_may_defs (tree stmt)
754 {
755 finalize_ssa_v_may_def_ops (stmt);
756 }
757
758
759 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
760
761 static inline void
762 cleanup_v_may_defs (void)
763 {
764 unsigned x, num;
765 num = VEC_length (tree, build_v_may_defs);
766
767 for (x = 0; x < num; x++)
768 {
769 tree t = VEC_index (tree, build_v_may_defs, x);
770 if (TREE_CODE (t) != SSA_NAME)
771 {
772 var_ann_t ann = var_ann (t);
773 ann->in_v_may_def_list = 0;
774 }
775 }
776 VEC_truncate (tree, build_v_may_defs, 0);
777 }
778
779
780 /* Takes elements from build_vuses and turns them into vuse operands of
781 STMT. */
782
783 static inline void
784 finalize_ssa_vuse_ops (tree stmt)
785 {
786 unsigned new_i;
787 struct vuse_optype_d new_list;
788 vuse_optype_p old_ops, ptr, last;
789 tree act;
790 unsigned old_base, new_base;
791
792 new_list.next = NULL;
793 last = &new_list;
794
795 old_ops = VUSE_OPS (stmt);
796
797 new_i = 0;
798 while (old_ops && new_i < VEC_length (tree, build_vuses))
799 {
800 act = VEC_index (tree, build_vuses, new_i);
801 new_base = get_name_decl (act);
802 old_base = get_name_decl (VUSE_OP (old_ops));
803
804 if (old_base == new_base)
805 {
806 /* if variables are the same, reuse this node. */
807 MOVE_HEAD_AFTER (old_ops, last);
808 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
809 new_i++;
810 }
811 else if (old_base < new_base)
812 {
813 /* if old is less than new, old goes to the free list. */
814 delink_imm_use (USE_OP_PTR (old_ops));
815 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
816 }
817 else
818 {
819 /* This is a new operand. */
820 add_vuse_op (stmt, act, &last);
821 new_i++;
822 }
823 }
824
825 /* If there is anything remaining in the build_vuses list, simply emit it. */
826 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
827 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
828
829 last->next = NULL;
830
831 /* If there is anything in the old list, free it. */
832 if (old_ops)
833 {
834 for (ptr = old_ops; ptr; ptr = ptr->next)
835 delink_imm_use (VUSE_OP_PTR (ptr));
836 old_ops->next = free_vuses;
837 free_vuses = old_ops;
838 }
839
840 /* Now set the stmt's operands. */
841 VUSE_OPS (stmt) = new_list.next;
842
843 #ifdef ENABLE_CHECKING
844 {
845 unsigned x = 0;
846 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
847 x++;
848
849 gcc_assert (x == VEC_length (tree, build_vuses));
850 }
851 #endif
852 }
853
854 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
855
856 static void
857 finalize_ssa_vuses (tree stmt)
858 {
859 unsigned num, num_v_may_defs;
860 unsigned vuse_index;
861
862 /* Remove superfluous VUSE operands. If the statement already has a
863 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
864 not needed because V_MAY_DEFs imply a VUSE of the variable. For
865 instance, suppose that variable 'a' is aliased:
866
867 # VUSE <a_2>
868 # a_3 = V_MAY_DEF <a_2>
869 a = a + 1;
870
871 The VUSE <a_2> is superfluous because it is implied by the
872 V_MAY_DEF operation. */
873 num = VEC_length (tree, build_vuses);
874 num_v_may_defs = VEC_length (tree, build_v_may_defs);
875
876 if (num > 0 && num_v_may_defs > 0)
877 {
878 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
879 {
880 tree vuse;
881 vuse = VEC_index (tree, build_vuses, vuse_index);
882 if (TREE_CODE (vuse) != SSA_NAME)
883 {
884 var_ann_t ann = var_ann (vuse);
885 ann->in_vuse_list = 0;
886 if (ann->in_v_may_def_list)
887 {
888 VEC_ordered_remove (tree, build_vuses, vuse_index);
889 continue;
890 }
891 }
892 vuse_index++;
893 }
894 }
895 else
896 {
897 /* Clear out the in_list bits. */
898 for (vuse_index = 0;
899 vuse_index < VEC_length (tree, build_vuses);
900 vuse_index++)
901 {
902 tree t = VEC_index (tree, build_vuses, vuse_index);
903 if (TREE_CODE (t) != SSA_NAME)
904 {
905 var_ann_t ann = var_ann (t);
906 ann->in_vuse_list = 0;
907 }
908 }
909 }
910
911 finalize_ssa_vuse_ops (stmt);
912
913 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
914 cleanup_v_may_defs ();
915
916 /* Free the VUSEs build vector. */
917 VEC_truncate (tree, build_vuses, 0);
918
919 }
920
921 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
922 STMT. */
923
924 static inline void
925 finalize_ssa_v_must_def_ops (tree stmt)
926 {
927 unsigned new_i;
928 struct mustdef_optype_d new_list;
929 mustdef_optype_p old_ops, ptr, last;
930 tree act;
931 unsigned old_base, new_base;
932
933 new_list.next = NULL;
934 last = &new_list;
935
936 old_ops = MUSTDEF_OPS (stmt);
937
938 new_i = 0;
939 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
940 {
941 act = VEC_index (tree, build_v_must_defs, new_i);
942 new_base = get_name_decl (act);
943 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
944
945 if (old_base == new_base)
946 {
947 /* If variables are the same, reuse this node. */
948 MOVE_HEAD_AFTER (old_ops, last);
949 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
950 new_i++;
951 }
952 else if (old_base < new_base)
953 {
954 /* If old is less than new, old goes to the free list. */
955 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
956 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
957 }
958 else
959 {
960 /* This is a new operand. */
961 add_mustdef_op (stmt, act, &last);
962 new_i++;
963 }
964 }
965
966 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
967 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
968 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
969
970 last->next = NULL;
971
972 /* If there is anything in the old list, free it. */
973 if (old_ops)
974 {
975 for (ptr = old_ops; ptr; ptr = ptr->next)
976 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
977 old_ops->next = free_mustdefs;
978 free_mustdefs = old_ops;
979 }
980
981 /* Now set the stmt's operands. */
982 MUSTDEF_OPS (stmt) = new_list.next;
983
984 #ifdef ENABLE_CHECKING
985 {
986 unsigned x = 0;
987 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
988 x++;
989
990 gcc_assert (x == VEC_length (tree, build_v_must_defs));
991 }
992 #endif
993 }
994
995 static void
996 finalize_ssa_v_must_defs (tree stmt)
997 {
998 /* In the presence of subvars, there may be more than one V_MUST_DEF
999 per statement (one for each subvar). It is a bit expensive to
1000 verify that all must-defs in a statement belong to subvars if
1001 there is more than one must-def, so we don't do it. Suffice to
1002 say, if you reach here without having subvars, and have num >1,
1003 you have hit a bug. */
1004 finalize_ssa_v_must_def_ops (stmt);
1005 VEC_truncate (tree, build_v_must_defs, 0);
1006 }
1007
1008
1009 /* Finalize all the build vectors, fill the new ones into INFO. */
1010
1011 static inline void
1012 finalize_ssa_stmt_operands (tree stmt)
1013 {
1014 finalize_ssa_defs (stmt);
1015 finalize_ssa_uses (stmt);
1016 finalize_ssa_v_must_defs (stmt);
1017 finalize_ssa_v_may_defs (stmt);
1018 finalize_ssa_vuses (stmt);
1019 }
1020
1021
1022 /* Start the process of building up operands vectors in INFO. */
1023
1024 static inline void
1025 start_ssa_stmt_operands (void)
1026 {
1027 gcc_assert (VEC_length (tree, build_defs) == 0);
1028 gcc_assert (VEC_length (tree, build_uses) == 0);
1029 gcc_assert (VEC_length (tree, build_vuses) == 0);
1030 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
1031 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
1032 }
1033
1034
1035 /* Add DEF_P to the list of pointers to operands. */
1036
1037 static inline void
1038 append_def (tree *def_p)
1039 {
1040 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
1041 }
1042
1043
1044 /* Add USE_P to the list of pointers to operands. */
1045
1046 static inline void
1047 append_use (tree *use_p)
1048 {
1049 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
1050 }
1051
1052
1053 /* Add a new virtual may def for variable VAR to the build array. */
1054
1055 static inline void
1056 append_v_may_def (tree var)
1057 {
1058 if (TREE_CODE (var) != SSA_NAME)
1059 {
1060 var_ann_t ann = get_var_ann (var);
1061
1062 /* Don't allow duplicate entries. */
1063 if (ann->in_v_may_def_list)
1064 return;
1065 ann->in_v_may_def_list = 1;
1066 }
1067
1068 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1069 }
1070
1071
1072 /* Add VAR to the list of virtual uses. */
1073
1074 static inline void
1075 append_vuse (tree var)
1076 {
1077 /* Don't allow duplicate entries. */
1078 if (TREE_CODE (var) != SSA_NAME)
1079 {
1080 var_ann_t ann = get_var_ann (var);
1081
1082 if (ann->in_vuse_list || ann->in_v_may_def_list)
1083 return;
1084 ann->in_vuse_list = 1;
1085 }
1086
1087 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1088 }
1089
1090
1091 /* Add VAR to the list of virtual must definitions for INFO. */
1092
1093 static inline void
1094 append_v_must_def (tree var)
1095 {
1096 unsigned i;
1097
1098 /* Don't allow duplicate entries. */
1099 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1100 if (var == VEC_index (tree, build_v_must_defs, i))
1101 return;
1102
1103 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1104 }
1105
1106
1107 /* REF is a tree that contains the entire pointer dereference
1108 expression, if available, or NULL otherwise. ALIAS is the variable
1109 we are asking if REF can access. OFFSET and SIZE come from the
1110 memory access expression that generated this virtual operand.
1111 FOR_CLOBBER is true is this is adding a virtual operand for a call
1112 clobber. */
1113
1114 static bool
1115 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1116 HOST_WIDE_INT size)
1117 {
1118 bool offsetgtz = offset > 0;
1119 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1120 tree base = ref ? get_base_address (ref) : NULL;
1121
1122 /* If ALIAS is an SFT, it can't be touched if the offset
1123 and size of the access is not overlapping with the SFT offset and
1124 size. This is only true if we are accessing through a pointer
1125 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1126 be accessing through a pointer to some substruct of the
1127 structure, and if we try to prune there, we will have the wrong
1128 offset, and get the wrong answer.
1129 i.e., we can't prune without more work if we have something like
1130
1131 struct gcc_target
1132 {
1133 struct asm_out
1134 {
1135 const char *byte_op;
1136 struct asm_int_op
1137 {
1138 const char *hi;
1139 } aligned_op;
1140 } asm_out;
1141 } targetm;
1142
1143 foo = &targetm.asm_out.aligned_op;
1144 return foo->hi;
1145
1146 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1147 terms of SFT_PARENT_VAR, that is where it is.
1148 However, the access through the foo pointer will be at offset 0. */
1149 if (size != -1
1150 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1151 && base
1152 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1153 && !overlap_subvar (offset, size, alias, NULL))
1154 {
1155 #ifdef ACCESS_DEBUGGING
1156 fprintf (stderr, "Access to ");
1157 print_generic_expr (stderr, ref, 0);
1158 fprintf (stderr, " may not touch ");
1159 print_generic_expr (stderr, alias, 0);
1160 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1161 #endif
1162 return false;
1163 }
1164
1165 /* Without strict aliasing, it is impossible for a component access
1166 through a pointer to touch a random variable, unless that
1167 variable *is* a structure or a pointer.
1168
1169 That is, given p->c, and some random global variable b,
1170 there is no legal way that p->c could be an access to b.
1171
1172 Without strict aliasing on, we consider it legal to do something
1173 like:
1174
1175 struct foos { int l; };
1176 int foo;
1177 static struct foos *getfoo(void);
1178 int main (void)
1179 {
1180 struct foos *f = getfoo();
1181 f->l = 1;
1182 foo = 2;
1183 if (f->l == 1)
1184 abort();
1185 exit(0);
1186 }
1187 static struct foos *getfoo(void)
1188 { return (struct foos *)&foo; }
1189
1190 (taken from 20000623-1.c)
1191 */
1192 else if (ref
1193 && flag_strict_aliasing
1194 && TREE_CODE (ref) != INDIRECT_REF
1195 && !MTAG_P (alias)
1196 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1197 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1198 && !POINTER_TYPE_P (TREE_TYPE (alias)))
1199 {
1200 #ifdef ACCESS_DEBUGGING
1201 fprintf (stderr, "Access to ");
1202 print_generic_expr (stderr, ref, 0);
1203 fprintf (stderr, " may not touch ");
1204 print_generic_expr (stderr, alias, 0);
1205 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1206 #endif
1207 return false;
1208 }
1209
1210 /* If the offset of the access is greater than the size of one of
1211 the possible aliases, it can't be touching that alias, because it
1212 would be past the end of the structure. */
1213 else if (ref
1214 && flag_strict_aliasing
1215 && TREE_CODE (ref) != INDIRECT_REF
1216 && !MTAG_P (alias)
1217 && !POINTER_TYPE_P (TREE_TYPE (alias))
1218 && offsetgtz
1219 && DECL_SIZE (alias)
1220 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1221 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1222 {
1223 #ifdef ACCESS_DEBUGGING
1224 fprintf (stderr, "Access to ");
1225 print_generic_expr (stderr, ref, 0);
1226 fprintf (stderr, " may not touch ");
1227 print_generic_expr (stderr, alias, 0);
1228 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1229 #endif
1230 return false;
1231 }
1232
1233 return true;
1234 }
1235
1236
1237 /* Add VAR to the virtual operands array. FLAGS is as in
1238 get_expr_operands. FULL_REF is a tree that contains the entire
1239 pointer dereference expression, if available, or NULL otherwise.
1240 OFFSET and SIZE come from the memory access expression that
1241 generated this virtual operand. FOR_CLOBBER is true is this is
1242 adding a virtual operand for a call clobber. */
1243
1244 static void
1245 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1246 tree full_ref, HOST_WIDE_INT offset,
1247 HOST_WIDE_INT size, bool for_clobber)
1248 {
1249 VEC(tree,gc) *aliases;
1250 tree sym;
1251 var_ann_t v_ann;
1252
1253 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1254 v_ann = var_ann (sym);
1255
1256 /* Mark statements with volatile operands. Optimizers should back
1257 off from statements having volatile operands. */
1258 if (TREE_THIS_VOLATILE (sym) && s_ann)
1259 s_ann->has_volatile_ops = true;
1260
1261 /* If the variable cannot be modified and this is a V_MAY_DEF change
1262 it into a VUSE. This happens when read-only variables are marked
1263 call-clobbered and/or aliased to writable variables. So we only
1264 check that this only happens on non-specific stores.
1265
1266 Note that if this is a specific store, i.e. associated with a
1267 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1268 into validation problems.
1269
1270 This can happen when programs cast away const, leaving us with a
1271 store to read-only memory. If the statement is actually executed
1272 at runtime, then the program is ill formed. If the statement is
1273 not executed then all is well. At the very least, we cannot ICE. */
1274 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1275 flags &= ~(opf_is_def | opf_kill_def);
1276
1277 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1278 virtual operands, unless the caller has specifically requested
1279 not to add virtual operands (used when adding operands inside an
1280 ADDR_EXPR expression). */
1281 if (flags & opf_no_vops)
1282 return;
1283
1284 aliases = v_ann->may_aliases;
1285 if (aliases == NULL)
1286 {
1287 /* The variable is not aliased or it is an alias tag. */
1288 if (flags & opf_is_def)
1289 {
1290 if (flags & opf_kill_def)
1291 {
1292 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1293 variable definitions. */
1294 gcc_assert (!MTAG_P (var)
1295 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1296 append_v_must_def (var);
1297 }
1298 else
1299 {
1300 /* Add a V_MAY_DEF for call-clobbered variables and
1301 memory tags. */
1302 append_v_may_def (var);
1303 }
1304 }
1305 else
1306 append_vuse (var);
1307 }
1308 else
1309 {
1310 unsigned i;
1311 tree al;
1312
1313 /* The variable is aliased. Add its aliases to the virtual
1314 operands. */
1315 gcc_assert (VEC_length (tree, aliases) != 0);
1316
1317 if (flags & opf_is_def)
1318 {
1319
1320 bool none_added = true;
1321
1322 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1323 {
1324 if (!access_can_touch_variable (full_ref, al, offset, size))
1325 continue;
1326
1327 none_added = false;
1328 append_v_may_def (al);
1329 }
1330
1331 /* If the variable is also an alias tag, add a virtual
1332 operand for it, otherwise we will miss representing
1333 references to the members of the variable's alias set.
1334 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1335
1336 It is also necessary to add bare defs on clobbers for
1337 SMT's, so that bare SMT uses caused by pruning all the
1338 aliases will link up properly with calls. In order to
1339 keep the number of these bare defs we add down to the
1340 minimum necessary, we keep track of which SMT's were used
1341 alone in statement vdefs or VUSEs. */
1342 if (v_ann->is_aliased
1343 || none_added
1344 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1345 && for_clobber
1346 && SMT_USED_ALONE (var)))
1347 {
1348 /* Every bare SMT def we add should have SMT_USED_ALONE
1349 set on it, or else we will get the wrong answer on
1350 clobbers. */
1351 if (none_added
1352 && !updating_used_alone && aliases_computed_p
1353 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1354 gcc_assert (SMT_USED_ALONE (var));
1355
1356 append_v_may_def (var);
1357 }
1358 }
1359 else
1360 {
1361 bool none_added = true;
1362 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1363 {
1364 if (!access_can_touch_variable (full_ref, al, offset, size))
1365 continue;
1366 none_added = false;
1367 append_vuse (al);
1368 }
1369
1370 /* Similarly, append a virtual uses for VAR itself, when
1371 it is an alias tag. */
1372 if (v_ann->is_aliased || none_added)
1373 append_vuse (var);
1374 }
1375 }
1376 }
1377
1378
1379 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1380 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1381 the statement's real operands, otherwise it is added to virtual
1382 operands. */
1383
1384 static void
1385 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1386 {
1387 bool is_real_op;
1388 tree var, sym;
1389 var_ann_t v_ann;
1390
1391 var = *var_p;
1392 gcc_assert (SSA_VAR_P (var));
1393
1394 is_real_op = is_gimple_reg (var);
1395
1396 /* If this is a real operand, the operand is either an SSA name or a
1397 decl. Virtual operands may only be decls. */
1398 gcc_assert (is_real_op || DECL_P (var));
1399
1400 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1401 v_ann = var_ann (sym);
1402
1403 /* Mark statements with volatile operands. Optimizers should back
1404 off from statements having volatile operands. */
1405 if (TREE_THIS_VOLATILE (sym) && s_ann)
1406 s_ann->has_volatile_ops = true;
1407
1408 if (is_real_op)
1409 {
1410 /* The variable is a GIMPLE register. Add it to real operands. */
1411 if (flags & opf_is_def)
1412 append_def (var_p);
1413 else
1414 append_use (var_p);
1415 }
1416 else
1417 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1418 }
1419
1420
1421 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1422 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1423
1424 STMT is the statement being processed, EXPR is the INDIRECT_REF
1425 that got us here.
1426
1427 FLAGS is as in get_expr_operands.
1428
1429 FULL_REF contains the full pointer dereference expression, if we
1430 have it, or NULL otherwise.
1431
1432 OFFSET and SIZE are the location of the access inside the
1433 dereferenced pointer, if known.
1434
1435 RECURSE_ON_BASE should be set to true if we want to continue
1436 calling get_expr_operands on the base pointer, and false if
1437 something else will do it for us. */
1438
1439 static void
1440 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1441 tree full_ref,
1442 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1443 bool recurse_on_base)
1444 {
1445 tree *pptr = &TREE_OPERAND (expr, 0);
1446 tree ptr = *pptr;
1447 stmt_ann_t s_ann = stmt_ann (stmt);
1448
1449 /* Stores into INDIRECT_REF operands are never killing definitions. */
1450 flags &= ~opf_kill_def;
1451
1452 if (SSA_VAR_P (ptr))
1453 {
1454 struct ptr_info_def *pi = NULL;
1455
1456 /* If PTR has flow-sensitive points-to information, use it. */
1457 if (TREE_CODE (ptr) == SSA_NAME
1458 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1459 && pi->name_mem_tag)
1460 {
1461 /* PTR has its own memory tag. Use it. */
1462 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1463 full_ref, offset, size, false);
1464 }
1465 else
1466 {
1467 /* If PTR is not an SSA_NAME or it doesn't have a name
1468 tag, use its symbol memory tag. */
1469 var_ann_t v_ann;
1470
1471 /* If we are emitting debugging dumps, display a warning if
1472 PTR is an SSA_NAME with no flow-sensitive alias
1473 information. That means that we may need to compute
1474 aliasing again. */
1475 if (dump_file
1476 && TREE_CODE (ptr) == SSA_NAME
1477 && pi == NULL)
1478 {
1479 fprintf (dump_file,
1480 "NOTE: no flow-sensitive alias info for ");
1481 print_generic_expr (dump_file, ptr, dump_flags);
1482 fprintf (dump_file, " in ");
1483 print_generic_stmt (dump_file, stmt, dump_flags);
1484 }
1485
1486 if (TREE_CODE (ptr) == SSA_NAME)
1487 ptr = SSA_NAME_VAR (ptr);
1488 v_ann = var_ann (ptr);
1489
1490 if (v_ann->symbol_mem_tag)
1491 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1492 full_ref, offset, size, false);
1493 }
1494 }
1495 else if (TREE_CODE (ptr) == INTEGER_CST)
1496 {
1497 /* If a constant is used as a pointer, we can't generate a real
1498 operand for it but we mark the statement volatile to prevent
1499 optimizations from messing things up. */
1500 if (s_ann)
1501 s_ann->has_volatile_ops = true;
1502 return;
1503 }
1504 else
1505 {
1506 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1507 gcc_unreachable ();
1508 }
1509
1510 /* If requested, add a USE operand for the base pointer. */
1511 if (recurse_on_base)
1512 get_expr_operands (stmt, pptr, opf_none);
1513 }
1514
1515
1516 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1517
1518 static void
1519 get_tmr_operands (tree stmt, tree expr, int flags)
1520 {
1521 tree tag = TMR_TAG (expr), ref;
1522 HOST_WIDE_INT offset, size, maxsize;
1523 subvar_t svars, sv;
1524 stmt_ann_t s_ann = stmt_ann (stmt);
1525
1526 /* First record the real operands. */
1527 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1528 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1529
1530 /* MEM_REFs should never be killing. */
1531 flags &= ~opf_kill_def;
1532
1533 if (TMR_SYMBOL (expr))
1534 {
1535 stmt_ann_t ann = stmt_ann (stmt);
1536 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1537 }
1538
1539 if (!tag)
1540 {
1541 /* Something weird, so ensure that we will be careful. */
1542 stmt_ann (stmt)->has_volatile_ops = true;
1543 return;
1544 }
1545
1546 if (DECL_P (tag))
1547 {
1548 get_expr_operands (stmt, &tag, flags);
1549 return;
1550 }
1551
1552 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1553 gcc_assert (ref != NULL_TREE);
1554 svars = get_subvars_for_var (ref);
1555 for (sv = svars; sv; sv = sv->next)
1556 {
1557 bool exact;
1558 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1559 {
1560 int subvar_flags = flags;
1561 if (!exact || size != maxsize)
1562 subvar_flags &= ~opf_kill_def;
1563 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1564 }
1565 }
1566 }
1567
1568
1569 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1570 clobbered variables in the function. */
1571
1572 static void
1573 add_call_clobber_ops (tree stmt, tree callee)
1574 {
1575 unsigned u;
1576 bitmap_iterator bi;
1577 stmt_ann_t s_ann = stmt_ann (stmt);
1578 bitmap not_read_b, not_written_b;
1579
1580 /* Functions that are not const, pure or never return may clobber
1581 call-clobbered variables. */
1582 if (s_ann)
1583 s_ann->makes_clobbering_call = true;
1584
1585 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1586 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1587 if (global_var)
1588 {
1589 add_stmt_operand (&global_var, s_ann, opf_is_def);
1590 return;
1591 }
1592
1593 /* Get info for local and module level statics. There is a bit
1594 set for each static if the call being processed does not read
1595 or write that variable. */
1596 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1597 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1598 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1599 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1600 {
1601 tree var = referenced_var_lookup (u);
1602 unsigned int escape_mask = var_ann (var)->escape_mask;
1603 tree real_var = var;
1604 bool not_read;
1605 bool not_written;
1606
1607 /* Not read and not written are computed on regular vars, not
1608 subvars, so look at the parent var if this is an SFT. */
1609 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1610 real_var = SFT_PARENT_VAR (var);
1611
1612 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1613 DECL_UID (real_var)) : false;
1614 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1615 DECL_UID (real_var)) : false;
1616 gcc_assert (!unmodifiable_var_p (var));
1617
1618 clobber_stats.clobbered_vars++;
1619
1620 /* See if this variable is really clobbered by this function. */
1621
1622 /* Trivial case: Things escaping only to pure/const are not
1623 clobbered by non-pure-const, and only read by pure/const. */
1624 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1625 {
1626 tree call = get_call_expr_in (stmt);
1627 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1628 {
1629 add_stmt_operand (&var, s_ann, opf_none);
1630 clobber_stats.unescapable_clobbers_avoided++;
1631 continue;
1632 }
1633 else
1634 {
1635 clobber_stats.unescapable_clobbers_avoided++;
1636 continue;
1637 }
1638 }
1639
1640 if (not_written)
1641 {
1642 clobber_stats.static_write_clobbers_avoided++;
1643 if (!not_read)
1644 add_stmt_operand (&var, s_ann, opf_none);
1645 else
1646 clobber_stats.static_read_clobbers_avoided++;
1647 }
1648 else
1649 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1650 }
1651 }
1652
1653
1654 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1655 function. */
1656
1657 static void
1658 add_call_read_ops (tree stmt, tree callee)
1659 {
1660 unsigned u;
1661 bitmap_iterator bi;
1662 stmt_ann_t s_ann = stmt_ann (stmt);
1663 bitmap not_read_b;
1664
1665 /* if the function is not pure, it may reference memory. Add
1666 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1667 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1668 if (global_var)
1669 {
1670 add_stmt_operand (&global_var, s_ann, opf_none);
1671 return;
1672 }
1673
1674 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1675
1676 /* Add a VUSE for each call-clobbered variable. */
1677 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1678 {
1679 tree var = referenced_var (u);
1680 tree real_var = var;
1681 bool not_read;
1682
1683 clobber_stats.readonly_clobbers++;
1684
1685 /* Not read and not written are computed on regular vars, not
1686 subvars, so look at the parent var if this is an SFT. */
1687
1688 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1689 real_var = SFT_PARENT_VAR (var);
1690
1691 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1692 : false;
1693
1694 if (not_read)
1695 {
1696 clobber_stats.static_readonly_clobbers_avoided++;
1697 continue;
1698 }
1699
1700 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1701 }
1702 }
1703
1704
1705 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1706
1707 static void
1708 get_call_expr_operands (tree stmt, tree expr)
1709 {
1710 tree op;
1711 int call_flags = call_expr_flags (expr);
1712
1713 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1714 operands for all the symbols that have been found to be
1715 call-clobbered.
1716
1717 Note that if aliases have not been computed, the global effects
1718 of calls will not be included in the SSA web. This is fine
1719 because no optimizer should run before aliases have been
1720 computed. By not bothering with virtual operands for CALL_EXPRs
1721 we avoid adding superfluous virtual operands, which can be a
1722 significant compile time sink (See PR 15855). */
1723 if (aliases_computed_p
1724 && !bitmap_empty_p (call_clobbered_vars)
1725 && !(call_flags & ECF_NOVOPS))
1726 {
1727 /* A 'pure' or a 'const' function never call-clobbers anything.
1728 A 'noreturn' function might, but since we don't return anyway
1729 there is no point in recording that. */
1730 if (TREE_SIDE_EFFECTS (expr)
1731 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1732 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1733 else if (!(call_flags & ECF_CONST))
1734 add_call_read_ops (stmt, get_callee_fndecl (expr));
1735 }
1736
1737 /* Find uses in the called function. */
1738 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1739
1740 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1741 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1742
1743 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1744 }
1745
1746
1747 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1748
1749 static void
1750 get_asm_expr_operands (tree stmt)
1751 {
1752 stmt_ann_t s_ann = stmt_ann (stmt);
1753 int noutputs = list_length (ASM_OUTPUTS (stmt));
1754 const char **oconstraints
1755 = (const char **) alloca ((noutputs) * sizeof (const char *));
1756 int i;
1757 tree link;
1758 const char *constraint;
1759 bool allows_mem, allows_reg, is_inout;
1760
1761 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1762 {
1763 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1764 oconstraints[i] = constraint;
1765 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1766 &allows_reg, &is_inout);
1767
1768 /* This should have been split in gimplify_asm_expr. */
1769 gcc_assert (!allows_reg || !is_inout);
1770
1771 /* Memory operands are addressable. Note that STMT needs the
1772 address of this operand. */
1773 if (!allows_reg && allows_mem)
1774 {
1775 tree t = get_base_address (TREE_VALUE (link));
1776 if (t && DECL_P (t) && s_ann)
1777 add_to_addressable_set (t, &s_ann->addresses_taken);
1778 }
1779
1780 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1781 }
1782
1783 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1784 {
1785 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1786 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1787 oconstraints, &allows_mem, &allows_reg);
1788
1789 /* Memory operands are addressable. Note that STMT needs the
1790 address of this operand. */
1791 if (!allows_reg && allows_mem)
1792 {
1793 tree t = get_base_address (TREE_VALUE (link));
1794 if (t && DECL_P (t) && s_ann)
1795 add_to_addressable_set (t, &s_ann->addresses_taken);
1796 }
1797
1798 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1799 }
1800
1801
1802 /* Clobber memory for asm ("" : : : "memory"); */
1803 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1804 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1805 {
1806 unsigned i;
1807 bitmap_iterator bi;
1808
1809 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1810 decided to group them). */
1811 if (global_var)
1812 add_stmt_operand (&global_var, s_ann, opf_is_def);
1813 else
1814 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1815 {
1816 tree var = referenced_var (i);
1817 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1818 }
1819
1820 /* Now clobber all addressables. */
1821 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1822 {
1823 tree var = referenced_var (i);
1824
1825 /* Subvars are explicitly represented in this list, so
1826 we don't need the original to be added to the clobber
1827 ops, but the original *will* be in this list because
1828 we keep the addressability of the original
1829 variable up-to-date so we don't screw up the rest of
1830 the backend. */
1831 if (var_can_have_subvars (var)
1832 && get_subvars_for_var (var) != NULL)
1833 continue;
1834
1835 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1836 }
1837
1838 break;
1839 }
1840 }
1841
1842
1843 /* Scan operands for the assignment expression EXPR in statement STMT. */
1844
1845 static void
1846 get_modify_expr_operands (tree stmt, tree expr)
1847 {
1848 /* First get operands from the RHS. */
1849 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1850
1851 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1852 registers. If the LHS is a store to memory, we will either need
1853 a preserving definition (V_MAY_DEF) or a killing definition
1854 (V_MUST_DEF).
1855
1856 Preserving definitions are those that modify a part of an
1857 aggregate object for which no subvars have been computed (or the
1858 reference does not correspond exactly to one of them). Stores
1859 through a pointer are also represented with V_MAY_DEF operators.
1860
1861 The determination of whether to use a preserving or a killing
1862 definition is done while scanning the LHS of the assignment. By
1863 default, assume that we will emit a V_MUST_DEF. */
1864 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def|opf_kill_def);
1865 }
1866
1867
1868 /* Recursively scan the expression pointed to by EXPR_P in statement
1869 STMT. FLAGS is one of the OPF_* constants modifying how to
1870 interpret the operands found. */
1871
1872 static void
1873 get_expr_operands (tree stmt, tree *expr_p, int flags)
1874 {
1875 enum tree_code code;
1876 enum tree_code_class class;
1877 tree expr = *expr_p;
1878 stmt_ann_t s_ann = stmt_ann (stmt);
1879
1880 if (expr == NULL)
1881 return;
1882
1883 code = TREE_CODE (expr);
1884 class = TREE_CODE_CLASS (code);
1885
1886 switch (code)
1887 {
1888 case ADDR_EXPR:
1889 /* Taking the address of a variable does not represent a
1890 reference to it, but the fact that the statement takes its
1891 address will be of interest to some passes (e.g. alias
1892 resolution). */
1893 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1894
1895 /* If the address is invariant, there may be no interesting
1896 variable references inside. */
1897 if (is_gimple_min_invariant (expr))
1898 return;
1899
1900 /* Otherwise, there may be variables referenced inside but there
1901 should be no VUSEs created, since the referenced objects are
1902 not really accessed. The only operands that we should find
1903 here are ARRAY_REF indices which will always be real operands
1904 (GIMPLE does not allow non-registers as array indices). */
1905 flags |= opf_no_vops;
1906 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1907 return;
1908
1909 case SSA_NAME:
1910 case STRUCT_FIELD_TAG:
1911 case SYMBOL_MEMORY_TAG:
1912 case NAME_MEMORY_TAG:
1913 add_stmt_operand (expr_p, s_ann, flags);
1914 return;
1915
1916 case VAR_DECL:
1917 case PARM_DECL:
1918 case RESULT_DECL:
1919 {
1920 subvar_t svars;
1921
1922 /* Add the subvars for a variable, if it has subvars, to DEFS
1923 or USES. Otherwise, add the variable itself. Whether it
1924 goes to USES or DEFS depends on the operand flags. */
1925 if (var_can_have_subvars (expr)
1926 && (svars = get_subvars_for_var (expr)))
1927 {
1928 subvar_t sv;
1929 for (sv = svars; sv; sv = sv->next)
1930 add_stmt_operand (&sv->var, s_ann, flags);
1931 }
1932 else
1933 add_stmt_operand (expr_p, s_ann, flags);
1934
1935 return;
1936 }
1937
1938 case MISALIGNED_INDIRECT_REF:
1939 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1940 /* fall through */
1941
1942 case ALIGN_INDIRECT_REF:
1943 case INDIRECT_REF:
1944 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1945 return;
1946
1947 case TARGET_MEM_REF:
1948 get_tmr_operands (stmt, expr, flags);
1949 return;
1950
1951 case ARRAY_REF:
1952 case ARRAY_RANGE_REF:
1953 case COMPONENT_REF:
1954 case REALPART_EXPR:
1955 case IMAGPART_EXPR:
1956 {
1957 tree ref;
1958 HOST_WIDE_INT offset, size, maxsize;
1959 bool none = true;
1960
1961 /* This component reference becomes an access to all of the
1962 subvariables it can touch, if we can determine that, but
1963 *NOT* the real one. If we can't determine which fields we
1964 could touch, the recursion will eventually get to a
1965 variable and add *all* of its subvars, or whatever is the
1966 minimum correct subset. */
1967 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1968 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1969 {
1970 subvar_t sv;
1971 subvar_t svars = get_subvars_for_var (ref);
1972
1973 for (sv = svars; sv; sv = sv->next)
1974 {
1975 bool exact;
1976
1977 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1978 {
1979 int subvar_flags = flags;
1980 none = false;
1981 if (!exact || size != maxsize)
1982 subvar_flags &= ~opf_kill_def;
1983 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1984 }
1985 }
1986
1987 if (!none)
1988 flags |= opf_no_vops;
1989 }
1990 else if (TREE_CODE (ref) == INDIRECT_REF)
1991 {
1992 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1993 maxsize, false);
1994 flags |= opf_no_vops;
1995 }
1996
1997 /* Even if we found subvars above we need to ensure to see
1998 immediate uses for d in s.a[d]. In case of s.a having
1999 a subvar or we would miss it otherwise. */
2000 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
2001 flags & ~opf_kill_def);
2002
2003 if (code == COMPONENT_REF)
2004 {
2005 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2006 s_ann->has_volatile_ops = true;
2007 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2008 }
2009 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2010 {
2011 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2012 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2013 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
2014 }
2015
2016 return;
2017 }
2018
2019 case WITH_SIZE_EXPR:
2020 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2021 and an rvalue reference to its second argument. */
2022 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2023 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2024 return;
2025
2026 case CALL_EXPR:
2027 get_call_expr_operands (stmt, expr);
2028 return;
2029
2030 case COND_EXPR:
2031 case VEC_COND_EXPR:
2032 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2033 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2034 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2035 return;
2036
2037 case MODIFY_EXPR:
2038 get_modify_expr_operands (stmt, expr);
2039 return;
2040
2041 case CONSTRUCTOR:
2042 {
2043 /* General aggregate CONSTRUCTORs have been decomposed, but they
2044 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2045 constructor_elt *ce;
2046 unsigned HOST_WIDE_INT idx;
2047
2048 for (idx = 0;
2049 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2050 idx++)
2051 get_expr_operands (stmt, &ce->value, opf_none);
2052
2053 return;
2054 }
2055
2056 case BIT_FIELD_REF:
2057 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2058 flags &= ~opf_kill_def;
2059
2060 /* Fallthru */
2061
2062 case TRUTH_NOT_EXPR:
2063 case VIEW_CONVERT_EXPR:
2064 do_unary:
2065 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2066 return;
2067
2068 case TRUTH_AND_EXPR:
2069 case TRUTH_OR_EXPR:
2070 case TRUTH_XOR_EXPR:
2071 case COMPOUND_EXPR:
2072 case OBJ_TYPE_REF:
2073 case ASSERT_EXPR:
2074 do_binary:
2075 {
2076 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2077 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2078 return;
2079 }
2080
2081 case DOT_PROD_EXPR:
2082 case REALIGN_LOAD_EXPR:
2083 {
2084 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2085 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2086 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2087 return;
2088 }
2089
2090 case BLOCK:
2091 case FUNCTION_DECL:
2092 case EXC_PTR_EXPR:
2093 case FILTER_EXPR:
2094 case LABEL_DECL:
2095 case CONST_DECL:
2096 case OMP_PARALLEL:
2097 case OMP_SECTIONS:
2098 case OMP_FOR:
2099 case OMP_RETURN_EXPR:
2100 case OMP_SINGLE:
2101 case OMP_MASTER:
2102 case OMP_ORDERED:
2103 case OMP_CRITICAL:
2104 /* Expressions that make no memory references. */
2105 return;
2106
2107 default:
2108 if (class == tcc_unary)
2109 goto do_unary;
2110 if (class == tcc_binary || class == tcc_comparison)
2111 goto do_binary;
2112 if (class == tcc_constant || class == tcc_type)
2113 return;
2114 }
2115
2116 /* If we get here, something has gone wrong. */
2117 #ifdef ENABLE_CHECKING
2118 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2119 debug_tree (expr);
2120 fputs ("\n", stderr);
2121 #endif
2122 gcc_unreachable ();
2123 }
2124
2125
2126 /* Parse STMT looking for operands. When finished, the various
2127 build_* operand vectors will have potential operands in them. */
2128
2129 static void
2130 parse_ssa_operands (tree stmt)
2131 {
2132 enum tree_code code;
2133
2134 code = TREE_CODE (stmt);
2135 switch (code)
2136 {
2137 case MODIFY_EXPR:
2138 get_modify_expr_operands (stmt, stmt);
2139 break;
2140
2141 case COND_EXPR:
2142 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2143 break;
2144
2145 case SWITCH_EXPR:
2146 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2147 break;
2148
2149 case ASM_EXPR:
2150 get_asm_expr_operands (stmt);
2151 break;
2152
2153 case RETURN_EXPR:
2154 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2155 break;
2156
2157 case GOTO_EXPR:
2158 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2159 break;
2160
2161 case LABEL_EXPR:
2162 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2163 break;
2164
2165 case BIND_EXPR:
2166 case CASE_LABEL_EXPR:
2167 case TRY_CATCH_EXPR:
2168 case TRY_FINALLY_EXPR:
2169 case EH_FILTER_EXPR:
2170 case CATCH_EXPR:
2171 case RESX_EXPR:
2172 /* These nodes contain no variable references. */
2173 break;
2174
2175 default:
2176 /* Notice that if get_expr_operands tries to use &STMT as the
2177 operand pointer (which may only happen for USE operands), we
2178 will fail in add_stmt_operand. This default will handle
2179 statements like empty statements, or CALL_EXPRs that may
2180 appear on the RHS of a statement or as statements themselves. */
2181 get_expr_operands (stmt, &stmt, opf_none);
2182 break;
2183 }
2184 }
2185
2186
2187 /* Create an operands cache for STMT. */
2188
2189 static void
2190 build_ssa_operands (tree stmt)
2191 {
2192 stmt_ann_t ann = get_stmt_ann (stmt);
2193
2194 /* Initially assume that the statement has no volatile operands. */
2195 if (ann)
2196 ann->has_volatile_ops = false;
2197
2198 start_ssa_stmt_operands ();
2199
2200 parse_ssa_operands (stmt);
2201 operand_build_sort_virtual (build_vuses);
2202 operand_build_sort_virtual (build_v_may_defs);
2203 operand_build_sort_virtual (build_v_must_defs);
2204
2205 finalize_ssa_stmt_operands (stmt);
2206 }
2207
2208
2209 /* Free any operands vectors in OPS. */
2210
2211 void
2212 free_ssa_operands (stmt_operands_p ops)
2213 {
2214 ops->def_ops = NULL;
2215 ops->use_ops = NULL;
2216 ops->maydef_ops = NULL;
2217 ops->mustdef_ops = NULL;
2218 ops->vuse_ops = NULL;
2219 }
2220
2221
2222 /* Get the operands of statement STMT. */
2223
2224 void
2225 update_stmt_operands (tree stmt)
2226 {
2227 stmt_ann_t ann = get_stmt_ann (stmt);
2228
2229 /* If update_stmt_operands is called before SSA is initialized, do
2230 nothing. */
2231 if (!ssa_operands_active ())
2232 return;
2233
2234 /* The optimizers cannot handle statements that are nothing but a
2235 _DECL. This indicates a bug in the gimplifier. */
2236 gcc_assert (!SSA_VAR_P (stmt));
2237
2238 gcc_assert (ann->modified);
2239
2240 timevar_push (TV_TREE_OPS);
2241
2242 build_ssa_operands (stmt);
2243
2244 /* Clear the modified bit for STMT. */
2245 ann->modified = 0;
2246
2247 timevar_pop (TV_TREE_OPS);
2248 }
2249
2250
2251 /* Copies virtual operands from SRC to DST. */
2252
2253 void
2254 copy_virtual_operands (tree dest, tree src)
2255 {
2256 tree t;
2257 ssa_op_iter iter, old_iter;
2258 use_operand_p use_p, u2;
2259 def_operand_p def_p, d2;
2260
2261 build_ssa_operands (dest);
2262
2263 /* Copy all the virtual fields. */
2264 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2265 append_vuse (t);
2266 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2267 append_v_may_def (t);
2268 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2269 append_v_must_def (t);
2270
2271 if (VEC_length (tree, build_vuses) == 0
2272 && VEC_length (tree, build_v_may_defs) == 0
2273 && VEC_length (tree, build_v_must_defs) == 0)
2274 return;
2275
2276 /* Now commit the virtual operands to this stmt. */
2277 finalize_ssa_v_must_defs (dest);
2278 finalize_ssa_v_may_defs (dest);
2279 finalize_ssa_vuses (dest);
2280
2281 /* Finally, set the field to the same values as then originals. */
2282 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2283 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2284 {
2285 gcc_assert (!op_iter_done (&old_iter));
2286 SET_USE (use_p, t);
2287 t = op_iter_next_tree (&old_iter);
2288 }
2289 gcc_assert (op_iter_done (&old_iter));
2290
2291 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2292 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2293 {
2294 gcc_assert (!op_iter_done (&old_iter));
2295 SET_USE (use_p, USE_FROM_PTR (u2));
2296 SET_DEF (def_p, DEF_FROM_PTR (d2));
2297 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2298 }
2299 gcc_assert (op_iter_done (&old_iter));
2300
2301 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2302 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2303 {
2304 gcc_assert (!op_iter_done (&old_iter));
2305 SET_USE (use_p, USE_FROM_PTR (u2));
2306 SET_DEF (def_p, DEF_FROM_PTR (d2));
2307 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2308 }
2309 gcc_assert (op_iter_done (&old_iter));
2310
2311 }
2312
2313
2314 /* Specifically for use in DOM's expression analysis. Given a store, we
2315 create an artificial stmt which looks like a load from the store, this can
2316 be used to eliminate redundant loads. OLD_OPS are the operands from the
2317 store stmt, and NEW_STMT is the new load which represents a load of the
2318 values stored. */
2319
2320 void
2321 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2322 {
2323 stmt_ann_t ann;
2324 tree op;
2325 ssa_op_iter iter;
2326 use_operand_p use_p;
2327 unsigned x;
2328
2329 ann = get_stmt_ann (new_stmt);
2330
2331 /* Process the stmt looking for operands. */
2332 start_ssa_stmt_operands ();
2333 parse_ssa_operands (new_stmt);
2334
2335 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2336 {
2337 tree t = VEC_index (tree, build_vuses, x);
2338 if (TREE_CODE (t) != SSA_NAME)
2339 {
2340 var_ann_t ann = var_ann (t);
2341 ann->in_vuse_list = 0;
2342 }
2343 }
2344
2345 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2346 {
2347 tree t = VEC_index (tree, build_v_may_defs, x);
2348 if (TREE_CODE (t) != SSA_NAME)
2349 {
2350 var_ann_t ann = var_ann (t);
2351 ann->in_v_may_def_list = 0;
2352 }
2353 }
2354
2355 /* Remove any virtual operands that were found. */
2356 VEC_truncate (tree, build_v_may_defs, 0);
2357 VEC_truncate (tree, build_v_must_defs, 0);
2358 VEC_truncate (tree, build_vuses, 0);
2359
2360 /* For each VDEF on the original statement, we want to create a
2361 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2362 statement. */
2363 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2364 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2365 append_vuse (op);
2366
2367 /* Now build the operands for this new stmt. */
2368 finalize_ssa_stmt_operands (new_stmt);
2369
2370 /* All uses in this fake stmt must not be in the immediate use lists. */
2371 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2372 delink_imm_use (use_p);
2373 }
2374
2375
2376 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2377 to test the validity of the swap operation. */
2378
2379 void
2380 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2381 {
2382 tree op0, op1;
2383 op0 = *exp0;
2384 op1 = *exp1;
2385
2386 /* If the operand cache is active, attempt to preserve the relative
2387 positions of these two operands in their respective immediate use
2388 lists. */
2389 if (ssa_operands_active () && op0 != op1)
2390 {
2391 use_optype_p use0, use1, ptr;
2392 use0 = use1 = NULL;
2393
2394 /* Find the 2 operands in the cache, if they are there. */
2395 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2396 if (USE_OP_PTR (ptr)->use == exp0)
2397 {
2398 use0 = ptr;
2399 break;
2400 }
2401
2402 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2403 if (USE_OP_PTR (ptr)->use == exp1)
2404 {
2405 use1 = ptr;
2406 break;
2407 }
2408
2409 /* If both uses don't have operand entries, there isn't much we can do
2410 at this point. Presumably we don't need to worry about it. */
2411 if (use0 && use1)
2412 {
2413 tree *tmp = USE_OP_PTR (use1)->use;
2414 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2415 USE_OP_PTR (use0)->use = tmp;
2416 }
2417 }
2418
2419 /* Now swap the data. */
2420 *exp0 = op1;
2421 *exp1 = op0;
2422 }
2423
2424
2425 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2426 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2427 a single variable whose address has been taken or any other valid
2428 GIMPLE memory reference (structure reference, array, etc). If the
2429 base address of REF is a decl that has sub-variables, also add all
2430 of its sub-variables. */
2431
2432 void
2433 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2434 {
2435 tree var;
2436 subvar_t svars;
2437
2438 gcc_assert (addresses_taken);
2439
2440 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2441 as the only thing we take the address of. If VAR is a structure,
2442 taking the address of a field means that the whole structure may
2443 be referenced using pointer arithmetic. See PR 21407 and the
2444 ensuing mailing list discussion. */
2445 var = get_base_address (ref);
2446 if (var && SSA_VAR_P (var))
2447 {
2448 if (*addresses_taken == NULL)
2449 *addresses_taken = BITMAP_GGC_ALLOC ();
2450
2451 if (var_can_have_subvars (var)
2452 && (svars = get_subvars_for_var (var)))
2453 {
2454 subvar_t sv;
2455 for (sv = svars; sv; sv = sv->next)
2456 {
2457 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2458 TREE_ADDRESSABLE (sv->var) = 1;
2459 }
2460 }
2461 else
2462 {
2463 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2464 TREE_ADDRESSABLE (var) = 1;
2465 }
2466 }
2467 }
2468
2469
2470 /* Scan the immediate_use list for VAR making sure its linked properly.
2471 Return TRUE if there is a problem and emit an error message to F. */
2472
2473 bool
2474 verify_imm_links (FILE *f, tree var)
2475 {
2476 use_operand_p ptr, prev, list;
2477 int count;
2478
2479 gcc_assert (TREE_CODE (var) == SSA_NAME);
2480
2481 list = &(SSA_NAME_IMM_USE_NODE (var));
2482 gcc_assert (list->use == NULL);
2483
2484 if (list->prev == NULL)
2485 {
2486 gcc_assert (list->next == NULL);
2487 return false;
2488 }
2489
2490 prev = list;
2491 count = 0;
2492 for (ptr = list->next; ptr != list; )
2493 {
2494 if (prev != ptr->prev)
2495 goto error;
2496
2497 if (ptr->use == NULL)
2498 goto error; /* 2 roots, or SAFE guard node. */
2499 else if (*(ptr->use) != var)
2500 goto error;
2501
2502 prev = ptr;
2503 ptr = ptr->next;
2504
2505 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2506 problem. */
2507 if (count++ > 50000000)
2508 goto error;
2509 }
2510
2511 /* Verify list in the other direction. */
2512 prev = list;
2513 for (ptr = list->prev; ptr != list; )
2514 {
2515 if (prev != ptr->next)
2516 goto error;
2517 prev = ptr;
2518 ptr = ptr->prev;
2519 if (count-- < 0)
2520 goto error;
2521 }
2522
2523 if (count != 0)
2524 goto error;
2525
2526 return false;
2527
2528 error:
2529 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2530 {
2531 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2532 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2533 }
2534 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2535 (void *)ptr->use);
2536 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2537 fprintf(f, "\n");
2538 return true;
2539 }
2540
2541
2542 /* Dump all the immediate uses to FILE. */
2543
2544 void
2545 dump_immediate_uses_for (FILE *file, tree var)
2546 {
2547 imm_use_iterator iter;
2548 use_operand_p use_p;
2549
2550 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2551
2552 print_generic_expr (file, var, TDF_SLIM);
2553 fprintf (file, " : -->");
2554 if (has_zero_uses (var))
2555 fprintf (file, " no uses.\n");
2556 else
2557 if (has_single_use (var))
2558 fprintf (file, " single use.\n");
2559 else
2560 fprintf (file, "%d uses.\n", num_imm_uses (var));
2561
2562 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2563 {
2564 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2565 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2566 else
2567 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2568 }
2569 fprintf(file, "\n");
2570 }
2571
2572
2573 /* Dump all the immediate uses to FILE. */
2574
2575 void
2576 dump_immediate_uses (FILE *file)
2577 {
2578 tree var;
2579 unsigned int x;
2580
2581 fprintf (file, "Immediate_uses: \n\n");
2582 for (x = 1; x < num_ssa_names; x++)
2583 {
2584 var = ssa_name(x);
2585 if (!var)
2586 continue;
2587 dump_immediate_uses_for (file, var);
2588 }
2589 }
2590
2591
2592 /* Dump def-use edges on stderr. */
2593
2594 void
2595 debug_immediate_uses (void)
2596 {
2597 dump_immediate_uses (stderr);
2598 }
2599
2600
2601 /* Dump def-use edges on stderr. */
2602
2603 void
2604 debug_immediate_uses_for (tree var)
2605 {
2606 dump_immediate_uses_for (stderr, var);
2607 }
2608
2609 #include "gt-tree-ssa-operands.h"