re PR tree-optimization/14784 ([Tree-ssa] alias analysis deficiency)
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This
100 is used to distinguish "reset the world" events from explicit
101 GIMPLE_MODIFY_STMTs. */
102 #define opf_non_specific (1 << 3)
103
104 /* Array for building all the def operands. */
105 static VEC(tree,heap) *build_defs;
106
107 /* Array for building all the use operands. */
108 static VEC(tree,heap) *build_uses;
109
110 /* Array for building all the V_MAY_DEF operands. */
111 static VEC(tree,heap) *build_v_may_defs;
112
113 /* Array for building all the VUSE operands. */
114 static VEC(tree,heap) *build_vuses;
115
116 /* Array for building all the V_MUST_DEF operands. */
117 static VEC(tree,heap) *build_v_must_defs;
118
119 static void get_expr_operands (tree, tree *, int);
120
121 /* Number of functions with initialized ssa_operands. */
122 static int n_initialized = 0;
123
124 /* Allocates operand OP of given TYPE from the appropriate free list,
125 or of the new value if the list is empty. */
126
127 #define ALLOC_OPTYPE(OP, TYPE) \
128 do \
129 { \
130 TYPE##_optype_p ret \
131 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
132 if (ret) \
133 gimple_ssa_operands (cfun)->free_##TYPE##s \
134 = ret->next; \
135 else \
136 ret = ssa_operand_alloc (sizeof (*ret)); \
137 (OP) = ret; \
138 } while (0)
139
140 /* Return the DECL_UID of the base variable of T. */
141
142 static inline unsigned
143 get_name_decl (tree t)
144 {
145 if (TREE_CODE (t) != SSA_NAME)
146 return DECL_UID (t);
147 else
148 return DECL_UID (SSA_NAME_VAR (t));
149 }
150
151
152 /* Comparison function for qsort used in operand_build_sort_virtual. */
153
154 static int
155 operand_build_cmp (const void *p, const void *q)
156 {
157 tree e1 = *((const tree *)p);
158 tree e2 = *((const tree *)q);
159 unsigned int u1,u2;
160
161 u1 = get_name_decl (e1);
162 u2 = get_name_decl (e2);
163
164 /* We want to sort in ascending order. They can never be equal. */
165 #ifdef ENABLE_CHECKING
166 gcc_assert (u1 != u2);
167 #endif
168 return (u1 > u2 ? 1 : -1);
169 }
170
171
172 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
173
174 static inline void
175 operand_build_sort_virtual (VEC(tree,heap) *list)
176 {
177 int num = VEC_length (tree, list);
178
179 if (num < 2)
180 return;
181
182 if (num == 2)
183 {
184 if (get_name_decl (VEC_index (tree, list, 0))
185 > get_name_decl (VEC_index (tree, list, 1)))
186 {
187 /* Swap elements if in the wrong order. */
188 tree tmp = VEC_index (tree, list, 0);
189 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
190 VEC_replace (tree, list, 1, tmp);
191 }
192 return;
193 }
194
195 /* There are 3 or more elements, call qsort. */
196 qsort (VEC_address (tree, list),
197 VEC_length (tree, list),
198 sizeof (tree),
199 operand_build_cmp);
200 }
201
202
203 /* Return true if the SSA operands cache is active. */
204
205 bool
206 ssa_operands_active (void)
207 {
208 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
209 }
210
211
212 /* Structure storing statistics on how many call clobbers we have, and
213 how many where avoided. */
214
215 static struct
216 {
217 /* Number of call-clobbered ops we attempt to add to calls in
218 add_call_clobber_ops. */
219 unsigned int clobbered_vars;
220
221 /* Number of write-clobbers (V_MAY_DEFs) avoided by using
222 not_written information. */
223 unsigned int static_write_clobbers_avoided;
224
225 /* Number of reads (VUSEs) avoided by using not_read information. */
226 unsigned int static_read_clobbers_avoided;
227
228 /* Number of write-clobbers avoided because the variable can't escape to
229 this call. */
230 unsigned int unescapable_clobbers_avoided;
231
232 /* Number of read-only uses we attempt to add to calls in
233 add_call_read_ops. */
234 unsigned int readonly_clobbers;
235
236 /* Number of read-only uses we avoid using not_read information. */
237 unsigned int static_readonly_clobbers_avoided;
238 } clobber_stats;
239
240
241 /* Initialize the operand cache routines. */
242
243 void
244 init_ssa_operands (void)
245 {
246 if (!n_initialized++)
247 {
248 build_defs = VEC_alloc (tree, heap, 5);
249 build_uses = VEC_alloc (tree, heap, 10);
250 build_vuses = VEC_alloc (tree, heap, 25);
251 build_v_may_defs = VEC_alloc (tree, heap, 25);
252 build_v_must_defs = VEC_alloc (tree, heap, 25);
253 }
254
255 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
256 gimple_ssa_operands (cfun)->operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
257 gimple_ssa_operands (cfun)->ops_active = true;
258 memset (&clobber_stats, 0, sizeof (clobber_stats));
259 }
260
261
262 /* Dispose of anything required by the operand routines. */
263
264 void
265 fini_ssa_operands (void)
266 {
267 struct ssa_operand_memory_d *ptr;
268 if (!--n_initialized)
269 {
270 VEC_free (tree, heap, build_defs);
271 VEC_free (tree, heap, build_uses);
272 VEC_free (tree, heap, build_v_must_defs);
273 VEC_free (tree, heap, build_v_may_defs);
274 VEC_free (tree, heap, build_vuses);
275 }
276 gimple_ssa_operands (cfun)->free_defs = NULL;
277 gimple_ssa_operands (cfun)->free_uses = NULL;
278 gimple_ssa_operands (cfun)->free_vuses = NULL;
279 gimple_ssa_operands (cfun)->free_maydefs = NULL;
280 gimple_ssa_operands (cfun)->free_mustdefs = NULL;
281 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
282 {
283 gimple_ssa_operands (cfun)->operand_memory
284 = gimple_ssa_operands (cfun)->operand_memory->next;
285 ggc_free (ptr);
286 }
287
288 gimple_ssa_operands (cfun)->ops_active = false;
289
290 if (dump_file && (dump_flags & TDF_STATS))
291 {
292 fprintf (dump_file, "Original clobbered vars:%d\n",
293 clobber_stats.clobbered_vars);
294 fprintf (dump_file, "Static write clobbers avoided:%d\n",
295 clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n",
297 clobber_stats.static_read_clobbers_avoided);
298 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
299 clobber_stats.unescapable_clobbers_avoided);
300 fprintf (dump_file, "Original read-only clobbers:%d\n",
301 clobber_stats.readonly_clobbers);
302 fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
303 clobber_stats.static_readonly_clobbers_avoided);
304 }
305 }
306
307
308 /* Return memory for operands of SIZE chunks. */
309
310 static inline void *
311 ssa_operand_alloc (unsigned size)
312 {
313 char *ptr;
314 if (gimple_ssa_operands (cfun)->operand_memory_index + size
315 >= SSA_OPERAND_MEMORY_SIZE)
316 {
317 struct ssa_operand_memory_d *ptr;
318 ptr = GGC_NEW (struct ssa_operand_memory_d);
319 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
320 gimple_ssa_operands (cfun)->operand_memory = ptr;
321 gimple_ssa_operands (cfun)->operand_memory_index = 0;
322 }
323 ptr = &(gimple_ssa_operands (cfun)->operand_memory
324 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
325 gimple_ssa_operands (cfun)->operand_memory_index += size;
326 return ptr;
327 }
328
329
330
331 /* This routine makes sure that PTR is in an immediate use list, and makes
332 sure the stmt pointer is set to the current stmt. */
333
334 static inline void
335 set_virtual_use_link (use_operand_p ptr, tree stmt)
336 {
337 /* fold_stmt may have changed the stmt pointers. */
338 if (ptr->stmt != stmt)
339 ptr->stmt = stmt;
340
341 /* If this use isn't in a list, add it to the correct list. */
342 if (!ptr->prev)
343 link_imm_use (ptr, *(ptr->use));
344 }
345
346 /* Appends ELT after TO, and moves the TO pointer to ELT. */
347
348 #define APPEND_OP_AFTER(ELT, TO) \
349 do \
350 { \
351 (TO)->next = (ELT); \
352 (TO) = (ELT); \
353 } while (0)
354
355 /* Appends head of list FROM after TO, and move both pointers
356 to their successors. */
357
358 #define MOVE_HEAD_AFTER(FROM, TO) \
359 do \
360 { \
361 APPEND_OP_AFTER (FROM, TO); \
362 (FROM) = (FROM)->next; \
363 } while (0)
364
365 /* Moves OP to appropriate freelist. OP is set to its successor. */
366
367 #define MOVE_HEAD_TO_FREELIST(OP, TYPE) \
368 do \
369 { \
370 TYPE##_optype_p next = (OP)->next; \
371 (OP)->next \
372 = gimple_ssa_operands (cfun)->free_##TYPE##s; \
373 gimple_ssa_operands (cfun)->free_##TYPE##s = (OP);\
374 (OP) = next; \
375 } while (0)
376
377 /* Initializes immediate use at USE_PTR to value VAL, and links it to the list
378 of immediate uses. STMT is the current statement. */
379
380 #define INITIALIZE_USE(USE_PTR, VAL, STMT) \
381 do \
382 { \
383 (USE_PTR)->use = (VAL); \
384 link_imm_use_stmt ((USE_PTR), *(VAL), (STMT)); \
385 } while (0)
386
387 /* Adds OP to the list of defs after LAST, and moves
388 LAST to the new element. */
389
390 static inline void
391 add_def_op (tree *op, def_optype_p *last)
392 {
393 def_optype_p new;
394
395 ALLOC_OPTYPE (new, def);
396 DEF_OP_PTR (new) = op;
397 APPEND_OP_AFTER (new, *last);
398 }
399
400 /* Adds OP to the list of uses of statement STMT after LAST, and moves
401 LAST to the new element. */
402
403 static inline void
404 add_use_op (tree stmt, tree *op, use_optype_p *last)
405 {
406 use_optype_p new;
407
408 ALLOC_OPTYPE (new, use);
409 INITIALIZE_USE (USE_OP_PTR (new), op, stmt);
410 APPEND_OP_AFTER (new, *last);
411 }
412
413 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
414 LAST to the new element. */
415
416 static inline void
417 add_vuse_op (tree stmt, tree op, vuse_optype_p *last)
418 {
419 vuse_optype_p new;
420
421 ALLOC_OPTYPE (new, vuse);
422 VUSE_OP (new) = op;
423 INITIALIZE_USE (VUSE_OP_PTR (new), &VUSE_OP (new), stmt);
424 APPEND_OP_AFTER (new, *last);
425 }
426
427 /* Adds OP to the list of maydefs of statement STMT after LAST, and moves
428 LAST to the new element. */
429
430 static inline void
431 add_maydef_op (tree stmt, tree op, maydef_optype_p *last)
432 {
433 maydef_optype_p new;
434
435 ALLOC_OPTYPE (new, maydef);
436 MAYDEF_RESULT (new) = op;
437 MAYDEF_OP (new) = op;
438 INITIALIZE_USE (MAYDEF_OP_PTR (new), &MAYDEF_OP (new), stmt);
439 APPEND_OP_AFTER (new, *last);
440 }
441
442 /* Adds OP to the list of mustdefs of statement STMT after LAST, and moves
443 LAST to the new element. */
444
445 static inline void
446 add_mustdef_op (tree stmt, tree op, mustdef_optype_p *last)
447 {
448 mustdef_optype_p new;
449
450 ALLOC_OPTYPE (new, mustdef);
451 MUSTDEF_RESULT (new) = op;
452 MUSTDEF_KILL (new) = op;
453 INITIALIZE_USE (MUSTDEF_KILL_PTR (new), &MUSTDEF_KILL (new), stmt);
454 APPEND_OP_AFTER (new, *last);
455 }
456
457 /* Takes elements from build_defs and turns them into def operands of STMT.
458 TODO -- Given that def operands list is not necessarily sorted, merging
459 the operands this way does not make much sense.
460 -- Make build_defs VEC of tree *. */
461
462 static inline void
463 finalize_ssa_def_ops (tree stmt)
464 {
465 unsigned new_i;
466 struct def_optype_d new_list;
467 def_optype_p old_ops, last;
468 tree *old_base;
469
470 new_list.next = NULL;
471 last = &new_list;
472
473 old_ops = DEF_OPS (stmt);
474
475 new_i = 0;
476 while (old_ops && new_i < VEC_length (tree, build_defs))
477 {
478 tree *new_base = (tree *) VEC_index (tree, build_defs, new_i);
479 old_base = DEF_OP_PTR (old_ops);
480
481 if (old_base == new_base)
482 {
483 /* if variables are the same, reuse this node. */
484 MOVE_HEAD_AFTER (old_ops, last);
485 new_i++;
486 }
487 else if (old_base < new_base)
488 {
489 /* if old is less than new, old goes to the free list. */
490 MOVE_HEAD_TO_FREELIST (old_ops, def);
491 }
492 else
493 {
494 /* This is a new operand. */
495 add_def_op (new_base, &last);
496 new_i++;
497 }
498 }
499
500 /* If there is anything remaining in the build_defs list, simply emit it. */
501 for ( ; new_i < VEC_length (tree, build_defs); new_i++)
502 add_def_op ((tree *) VEC_index (tree, build_defs, new_i), &last);
503
504 last->next = NULL;
505
506 /* If there is anything in the old list, free it. */
507 if (old_ops)
508 {
509 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
510 gimple_ssa_operands (cfun)->free_defs = old_ops;
511 }
512
513 /* Now set the stmt's operands. */
514 DEF_OPS (stmt) = new_list.next;
515
516 #ifdef ENABLE_CHECKING
517 {
518 def_optype_p ptr;
519 unsigned x = 0;
520 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
521 x++;
522
523 gcc_assert (x == VEC_length (tree, build_defs));
524 }
525 #endif
526 }
527
528 /* This routine will create stmt operands for STMT from the def build list. */
529
530 static void
531 finalize_ssa_defs (tree stmt)
532 {
533 unsigned int num = VEC_length (tree, build_defs);
534
535 /* There should only be a single real definition per assignment. */
536 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
537
538 /* If there is an old list, often the new list is identical, or close, so
539 find the elements at the beginning that are the same as the vector. */
540 finalize_ssa_def_ops (stmt);
541 VEC_truncate (tree, build_defs, 0);
542 }
543
544 /* Takes elements from build_uses and turns them into use operands of STMT.
545 TODO -- Make build_uses VEC of tree *. */
546
547 static inline void
548 finalize_ssa_use_ops (tree stmt)
549 {
550 unsigned new_i;
551 struct use_optype_d new_list;
552 use_optype_p old_ops, ptr, last;
553
554 new_list.next = NULL;
555 last = &new_list;
556
557 old_ops = USE_OPS (stmt);
558
559 /* If there is anything in the old list, free it. */
560 if (old_ops)
561 {
562 for (ptr = old_ops; ptr; ptr = ptr->next)
563 delink_imm_use (USE_OP_PTR (ptr));
564 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
565 gimple_ssa_operands (cfun)->free_uses = old_ops;
566 }
567
568 /* Now create nodes for all the new nodes. */
569 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
570 add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
571
572 last->next = NULL;
573
574 /* Now set the stmt's operands. */
575 USE_OPS (stmt) = new_list.next;
576
577 #ifdef ENABLE_CHECKING
578 {
579 unsigned x = 0;
580 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
581 x++;
582
583 gcc_assert (x == VEC_length (tree, build_uses));
584 }
585 #endif
586 }
587
588 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
589
590 static void
591 finalize_ssa_uses (tree stmt)
592 {
593 #ifdef ENABLE_CHECKING
594 {
595 unsigned x;
596 unsigned num = VEC_length (tree, build_uses);
597
598 /* If the pointer to the operand is the statement itself, something is
599 wrong. It means that we are pointing to a local variable (the
600 initial call to update_stmt_operands does not pass a pointer to a
601 statement). */
602 for (x = 0; x < num; x++)
603 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
604 }
605 #endif
606 finalize_ssa_use_ops (stmt);
607 VEC_truncate (tree, build_uses, 0);
608 }
609
610
611 /* Takes elements from build_v_may_defs and turns them into maydef operands of
612 STMT. */
613
614 static inline void
615 finalize_ssa_v_may_def_ops (tree stmt)
616 {
617 unsigned new_i;
618 struct maydef_optype_d new_list;
619 maydef_optype_p old_ops, ptr, last;
620 tree act;
621 unsigned old_base, new_base;
622
623 new_list.next = NULL;
624 last = &new_list;
625
626 old_ops = MAYDEF_OPS (stmt);
627
628 new_i = 0;
629 while (old_ops && new_i < VEC_length (tree, build_v_may_defs))
630 {
631 act = VEC_index (tree, build_v_may_defs, new_i);
632 new_base = get_name_decl (act);
633 old_base = get_name_decl (MAYDEF_OP (old_ops));
634
635 if (old_base == new_base)
636 {
637 /* if variables are the same, reuse this node. */
638 MOVE_HEAD_AFTER (old_ops, last);
639 set_virtual_use_link (MAYDEF_OP_PTR (last), stmt);
640 new_i++;
641 }
642 else if (old_base < new_base)
643 {
644 /* if old is less than new, old goes to the free list. */
645 delink_imm_use (MAYDEF_OP_PTR (old_ops));
646 MOVE_HEAD_TO_FREELIST (old_ops, maydef);
647 }
648 else
649 {
650 /* This is a new operand. */
651 add_maydef_op (stmt, act, &last);
652 new_i++;
653 }
654 }
655
656 /* If there is anything remaining in the build_v_may_defs list, simply emit it. */
657 for ( ; new_i < VEC_length (tree, build_v_may_defs); new_i++)
658 add_maydef_op (stmt, VEC_index (tree, build_v_may_defs, new_i), &last);
659
660 last->next = NULL;
661
662 /* If there is anything in the old list, free it. */
663 if (old_ops)
664 {
665 for (ptr = old_ops; ptr; ptr = ptr->next)
666 delink_imm_use (MAYDEF_OP_PTR (ptr));
667 old_ops->next = gimple_ssa_operands (cfun)->free_maydefs;
668 gimple_ssa_operands (cfun)->free_maydefs = old_ops;
669 }
670
671 /* Now set the stmt's operands. */
672 MAYDEF_OPS (stmt) = new_list.next;
673
674 #ifdef ENABLE_CHECKING
675 {
676 unsigned x = 0;
677 for (ptr = MAYDEF_OPS (stmt); ptr; ptr = ptr->next)
678 x++;
679
680 gcc_assert (x == VEC_length (tree, build_v_may_defs));
681 }
682 #endif
683 }
684
685 static void
686 finalize_ssa_v_may_defs (tree stmt)
687 {
688 finalize_ssa_v_may_def_ops (stmt);
689 }
690
691
692 /* Clear the in_list bits and empty the build array for V_MAY_DEFs. */
693
694 static inline void
695 cleanup_v_may_defs (void)
696 {
697 unsigned x, num;
698 num = VEC_length (tree, build_v_may_defs);
699
700 for (x = 0; x < num; x++)
701 {
702 tree t = VEC_index (tree, build_v_may_defs, x);
703 if (TREE_CODE (t) != SSA_NAME)
704 {
705 var_ann_t ann = var_ann (t);
706 ann->in_v_may_def_list = 0;
707 }
708 }
709 VEC_truncate (tree, build_v_may_defs, 0);
710 }
711
712
713 /* Takes elements from build_vuses and turns them into vuse operands of
714 STMT. */
715
716 static inline void
717 finalize_ssa_vuse_ops (tree stmt)
718 {
719 unsigned new_i;
720 struct vuse_optype_d new_list;
721 vuse_optype_p old_ops, ptr, last;
722 tree act;
723 unsigned old_base, new_base;
724
725 new_list.next = NULL;
726 last = &new_list;
727
728 old_ops = VUSE_OPS (stmt);
729
730 new_i = 0;
731 while (old_ops && new_i < VEC_length (tree, build_vuses))
732 {
733 act = VEC_index (tree, build_vuses, new_i);
734 new_base = get_name_decl (act);
735 old_base = get_name_decl (VUSE_OP (old_ops));
736
737 if (old_base == new_base)
738 {
739 /* if variables are the same, reuse this node. */
740 MOVE_HEAD_AFTER (old_ops, last);
741 set_virtual_use_link (VUSE_OP_PTR (last), stmt);
742 new_i++;
743 }
744 else if (old_base < new_base)
745 {
746 /* if old is less than new, old goes to the free list. */
747 delink_imm_use (USE_OP_PTR (old_ops));
748 MOVE_HEAD_TO_FREELIST (old_ops, vuse);
749 }
750 else
751 {
752 /* This is a new operand. */
753 add_vuse_op (stmt, act, &last);
754 new_i++;
755 }
756 }
757
758 /* If there is anything remaining in the build_vuses list, simply emit it. */
759 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
760 add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), &last);
761
762 last->next = NULL;
763
764 /* If there is anything in the old list, free it. */
765 if (old_ops)
766 {
767 for (ptr = old_ops; ptr; ptr = ptr->next)
768 delink_imm_use (VUSE_OP_PTR (ptr));
769 old_ops->next = gimple_ssa_operands (cfun)->free_vuses;
770 gimple_ssa_operands (cfun)->free_vuses = old_ops;
771 }
772
773 /* Now set the stmt's operands. */
774 VUSE_OPS (stmt) = new_list.next;
775
776 #ifdef ENABLE_CHECKING
777 {
778 unsigned x = 0;
779 for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
780 x++;
781
782 gcc_assert (x == VEC_length (tree, build_vuses));
783 }
784 #endif
785 }
786
787 /* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
788
789 static void
790 finalize_ssa_vuses (tree stmt)
791 {
792 unsigned num, num_v_may_defs;
793 unsigned vuse_index;
794
795 /* Remove superfluous VUSE operands. If the statement already has a
796 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is
797 not needed because V_MAY_DEFs imply a VUSE of the variable. For
798 instance, suppose that variable 'a' is aliased:
799
800 # VUSE <a_2>
801 # a_3 = V_MAY_DEF <a_2>
802 a = a + 1;
803
804 The VUSE <a_2> is superfluous because it is implied by the
805 V_MAY_DEF operation. */
806 num = VEC_length (tree, build_vuses);
807 num_v_may_defs = VEC_length (tree, build_v_may_defs);
808
809 if (num > 0 && num_v_may_defs > 0)
810 {
811 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
812 {
813 tree vuse;
814 vuse = VEC_index (tree, build_vuses, vuse_index);
815 if (TREE_CODE (vuse) != SSA_NAME)
816 {
817 var_ann_t ann = var_ann (vuse);
818 ann->in_vuse_list = 0;
819 if (ann->in_v_may_def_list)
820 {
821 VEC_ordered_remove (tree, build_vuses, vuse_index);
822 continue;
823 }
824 }
825 vuse_index++;
826 }
827 }
828 else
829 {
830 /* Clear out the in_list bits. */
831 for (vuse_index = 0;
832 vuse_index < VEC_length (tree, build_vuses);
833 vuse_index++)
834 {
835 tree t = VEC_index (tree, build_vuses, vuse_index);
836 if (TREE_CODE (t) != SSA_NAME)
837 {
838 var_ann_t ann = var_ann (t);
839 ann->in_vuse_list = 0;
840 }
841 }
842 }
843
844 finalize_ssa_vuse_ops (stmt);
845
846 /* The V_MAY_DEF build vector wasn't cleaned up because we needed it. */
847 cleanup_v_may_defs ();
848
849 /* Free the VUSEs build vector. */
850 VEC_truncate (tree, build_vuses, 0);
851
852 }
853
854 /* Takes elements from build_v_must_defs and turns them into mustdef operands of
855 STMT. */
856
857 static inline void
858 finalize_ssa_v_must_def_ops (tree stmt)
859 {
860 unsigned new_i;
861 struct mustdef_optype_d new_list;
862 mustdef_optype_p old_ops, ptr, last;
863 tree act;
864 unsigned old_base, new_base;
865
866 new_list.next = NULL;
867 last = &new_list;
868
869 old_ops = MUSTDEF_OPS (stmt);
870
871 new_i = 0;
872 while (old_ops && new_i < VEC_length (tree, build_v_must_defs))
873 {
874 act = VEC_index (tree, build_v_must_defs, new_i);
875 new_base = get_name_decl (act);
876 old_base = get_name_decl (MUSTDEF_KILL (old_ops));
877
878 if (old_base == new_base)
879 {
880 /* If variables are the same, reuse this node. */
881 MOVE_HEAD_AFTER (old_ops, last);
882 set_virtual_use_link (MUSTDEF_KILL_PTR (last), stmt);
883 new_i++;
884 }
885 else if (old_base < new_base)
886 {
887 /* If old is less than new, old goes to the free list. */
888 delink_imm_use (MUSTDEF_KILL_PTR (old_ops));
889 MOVE_HEAD_TO_FREELIST (old_ops, mustdef);
890 }
891 else
892 {
893 /* This is a new operand. */
894 add_mustdef_op (stmt, act, &last);
895 new_i++;
896 }
897 }
898
899 /* If there is anything remaining in the build_v_must_defs list, simply emit it. */
900 for ( ; new_i < VEC_length (tree, build_v_must_defs); new_i++)
901 add_mustdef_op (stmt, VEC_index (tree, build_v_must_defs, new_i), &last);
902
903 last->next = NULL;
904
905 /* If there is anything in the old list, free it. */
906 if (old_ops)
907 {
908 for (ptr = old_ops; ptr; ptr = ptr->next)
909 delink_imm_use (MUSTDEF_KILL_PTR (ptr));
910 old_ops->next = gimple_ssa_operands (cfun)->free_mustdefs;
911 gimple_ssa_operands (cfun)->free_mustdefs = old_ops;
912 }
913
914 /* Now set the stmt's operands. */
915 MUSTDEF_OPS (stmt) = new_list.next;
916
917 #ifdef ENABLE_CHECKING
918 {
919 unsigned x = 0;
920 for (ptr = MUSTDEF_OPS (stmt); ptr; ptr = ptr->next)
921 x++;
922
923 gcc_assert (x == VEC_length (tree, build_v_must_defs));
924 }
925 #endif
926 }
927
928 static void
929 finalize_ssa_v_must_defs (tree stmt)
930 {
931 /* In the presence of subvars, there may be more than one V_MUST_DEF
932 per statement (one for each subvar). It is a bit expensive to
933 verify that all must-defs in a statement belong to subvars if
934 there is more than one must-def, so we don't do it. Suffice to
935 say, if you reach here without having subvars, and have num >1,
936 you have hit a bug. */
937 finalize_ssa_v_must_def_ops (stmt);
938 VEC_truncate (tree, build_v_must_defs, 0);
939 }
940
941
942 /* Finalize all the build vectors, fill the new ones into INFO. */
943
944 static inline void
945 finalize_ssa_stmt_operands (tree stmt)
946 {
947 finalize_ssa_defs (stmt);
948 finalize_ssa_uses (stmt);
949 finalize_ssa_v_must_defs (stmt);
950 finalize_ssa_v_may_defs (stmt);
951 finalize_ssa_vuses (stmt);
952 }
953
954
955 /* Start the process of building up operands vectors in INFO. */
956
957 static inline void
958 start_ssa_stmt_operands (void)
959 {
960 gcc_assert (VEC_length (tree, build_defs) == 0);
961 gcc_assert (VEC_length (tree, build_uses) == 0);
962 gcc_assert (VEC_length (tree, build_vuses) == 0);
963 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
964 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
965 }
966
967
968 /* Add DEF_P to the list of pointers to operands. */
969
970 static inline void
971 append_def (tree *def_p)
972 {
973 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
974 }
975
976
977 /* Add USE_P to the list of pointers to operands. */
978
979 static inline void
980 append_use (tree *use_p)
981 {
982 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
983 }
984
985
986 /* Add a new virtual may def for variable VAR to the build array. */
987
988 static inline void
989 append_v_may_def (tree var)
990 {
991 if (TREE_CODE (var) != SSA_NAME)
992 {
993 var_ann_t ann = get_var_ann (var);
994
995 /* Don't allow duplicate entries. */
996 if (ann->in_v_may_def_list)
997 return;
998 ann->in_v_may_def_list = 1;
999 }
1000
1001 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
1002 }
1003
1004
1005 /* Add VAR to the list of virtual uses. */
1006
1007 static inline void
1008 append_vuse (tree var)
1009 {
1010 /* Don't allow duplicate entries. */
1011 if (TREE_CODE (var) != SSA_NAME)
1012 {
1013 var_ann_t ann = get_var_ann (var);
1014
1015 if (ann->in_vuse_list || ann->in_v_may_def_list)
1016 return;
1017 ann->in_vuse_list = 1;
1018 }
1019
1020 VEC_safe_push (tree, heap, build_vuses, (tree)var);
1021 }
1022
1023
1024 /* Add VAR to the list of virtual must definitions for INFO. */
1025
1026 static inline void
1027 append_v_must_def (tree var)
1028 {
1029 unsigned i;
1030
1031 /* Don't allow duplicate entries. */
1032 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
1033 if (var == VEC_index (tree, build_v_must_defs, i))
1034 return;
1035
1036 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
1037 }
1038
1039
1040 /* REF is a tree that contains the entire pointer dereference
1041 expression, if available, or NULL otherwise. ALIAS is the variable
1042 we are asking if REF can access. OFFSET and SIZE come from the
1043 memory access expression that generated this virtual operand. */
1044
1045 static bool
1046 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1047 HOST_WIDE_INT size)
1048 {
1049 bool offsetgtz = offset > 0;
1050 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1051 tree base = ref ? get_base_address (ref) : NULL;
1052
1053 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1054 using a call-clobbered memory tag. By definition, call-clobbered
1055 memory tags can always touch .GLOBAL_VAR. */
1056 if (alias == gimple_global_var (cfun))
1057 return true;
1058
1059 /* If ALIAS is an SFT, it can't be touched if the offset
1060 and size of the access is not overlapping with the SFT offset and
1061 size. This is only true if we are accessing through a pointer
1062 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1063 be accessing through a pointer to some substruct of the
1064 structure, and if we try to prune there, we will have the wrong
1065 offset, and get the wrong answer.
1066 i.e., we can't prune without more work if we have something like
1067
1068 struct gcc_target
1069 {
1070 struct asm_out
1071 {
1072 const char *byte_op;
1073 struct asm_int_op
1074 {
1075 const char *hi;
1076 } aligned_op;
1077 } asm_out;
1078 } targetm;
1079
1080 foo = &targetm.asm_out.aligned_op;
1081 return foo->hi;
1082
1083 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1084 terms of SFT_PARENT_VAR, that is where it is.
1085 However, the access through the foo pointer will be at offset 0. */
1086 if (size != -1
1087 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1088 && base
1089 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1090 && !overlap_subvar (offset, size, alias, NULL))
1091 {
1092 #ifdef ACCESS_DEBUGGING
1093 fprintf (stderr, "Access to ");
1094 print_generic_expr (stderr, ref, 0);
1095 fprintf (stderr, " may not touch ");
1096 print_generic_expr (stderr, alias, 0);
1097 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1098 #endif
1099 return false;
1100 }
1101
1102 /* Without strict aliasing, it is impossible for a component access
1103 through a pointer to touch a random variable, unless that
1104 variable *is* a structure or a pointer.
1105
1106 That is, given p->c, and some random global variable b,
1107 there is no legal way that p->c could be an access to b.
1108
1109 Without strict aliasing on, we consider it legal to do something
1110 like:
1111
1112 struct foos { int l; };
1113 int foo;
1114 static struct foos *getfoo(void);
1115 int main (void)
1116 {
1117 struct foos *f = getfoo();
1118 f->l = 1;
1119 foo = 2;
1120 if (f->l == 1)
1121 abort();
1122 exit(0);
1123 }
1124 static struct foos *getfoo(void)
1125 { return (struct foos *)&foo; }
1126
1127 (taken from 20000623-1.c)
1128
1129 The docs also say/imply that access through union pointers
1130 is legal (but *not* if you take the address of the union member,
1131 i.e. the inverse), such that you can do
1132
1133 typedef union {
1134 int d;
1135 } U;
1136
1137 int rv;
1138 void breakme()
1139 {
1140 U *rv0;
1141 U *pretmp = (U*)&rv;
1142 rv0 = pretmp;
1143 rv0->d = 42;
1144 }
1145 To implement this, we just punt on accesses through union
1146 pointers entirely.
1147 */
1148 else if (ref
1149 && flag_strict_aliasing
1150 && TREE_CODE (ref) != INDIRECT_REF
1151 && !MTAG_P (alias)
1152 && (TREE_CODE (base) != INDIRECT_REF
1153 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1154 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1155 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1156 && !var_ann (alias)->is_heapvar
1157 /* When the struct has may_alias attached to it, we need not to
1158 return true. */
1159 && get_alias_set (base))
1160 {
1161 #ifdef ACCESS_DEBUGGING
1162 fprintf (stderr, "Access to ");
1163 print_generic_expr (stderr, ref, 0);
1164 fprintf (stderr, " may not touch ");
1165 print_generic_expr (stderr, alias, 0);
1166 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1167 #endif
1168 return false;
1169 }
1170
1171 /* If the offset of the access is greater than the size of one of
1172 the possible aliases, it can't be touching that alias, because it
1173 would be past the end of the structure. */
1174 else if (ref
1175 && flag_strict_aliasing
1176 && TREE_CODE (ref) != INDIRECT_REF
1177 && !MTAG_P (alias)
1178 && !POINTER_TYPE_P (TREE_TYPE (alias))
1179 && offsetgtz
1180 && DECL_SIZE (alias)
1181 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1182 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1183 {
1184 #ifdef ACCESS_DEBUGGING
1185 fprintf (stderr, "Access to ");
1186 print_generic_expr (stderr, ref, 0);
1187 fprintf (stderr, " may not touch ");
1188 print_generic_expr (stderr, alias, 0);
1189 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1190 #endif
1191 return false;
1192 }
1193
1194 return true;
1195 }
1196
1197
1198 /* Add VAR to the virtual operands array. FLAGS is as in
1199 get_expr_operands. FULL_REF is a tree that contains the entire
1200 pointer dereference expression, if available, or NULL otherwise.
1201 OFFSET and SIZE come from the memory access expression that
1202 generated this virtual operand. FOR_CLOBBER is true is this is
1203 adding a virtual operand for a call clobber. */
1204
1205 static void
1206 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1207 tree full_ref, HOST_WIDE_INT offset,
1208 HOST_WIDE_INT size, bool for_clobber)
1209 {
1210 VEC(tree,gc) *aliases;
1211 tree sym;
1212 var_ann_t v_ann;
1213
1214 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1215 v_ann = var_ann (sym);
1216
1217 /* Mark statements with volatile operands. Optimizers should back
1218 off from statements having volatile operands. */
1219 if (TREE_THIS_VOLATILE (sym) && s_ann)
1220 s_ann->has_volatile_ops = true;
1221
1222 /* If the variable cannot be modified and this is a V_MAY_DEF change
1223 it into a VUSE. This happens when read-only variables are marked
1224 call-clobbered and/or aliased to writable variables. So we only
1225 check that this only happens on non-specific stores.
1226
1227 Note that if this is a specific store, i.e. associated with a
1228 gimple_modify_stmt, then we can't suppress the V_MAY_DEF, lest we run
1229 into validation problems.
1230
1231 This can happen when programs cast away const, leaving us with a
1232 store to read-only memory. If the statement is actually executed
1233 at runtime, then the program is ill formed. If the statement is
1234 not executed then all is well. At the very least, we cannot ICE. */
1235 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1236 flags &= ~(opf_is_def | opf_kill_def);
1237
1238 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1239 virtual operands, unless the caller has specifically requested
1240 not to add virtual operands (used when adding operands inside an
1241 ADDR_EXPR expression). */
1242 if (flags & opf_no_vops)
1243 return;
1244
1245 aliases = v_ann->may_aliases;
1246 if (aliases == NULL)
1247 {
1248 /* The variable is not aliased or it is an alias tag. */
1249 if (flags & opf_is_def)
1250 {
1251 if (flags & opf_kill_def)
1252 {
1253 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1254 variable definitions. */
1255 gcc_assert (!MTAG_P (var)
1256 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1257 append_v_must_def (var);
1258 }
1259 else
1260 {
1261 /* Add a V_MAY_DEF for call-clobbered variables and
1262 memory tags. */
1263 append_v_may_def (var);
1264 }
1265 }
1266 else
1267 append_vuse (var);
1268 }
1269 else
1270 {
1271 unsigned i;
1272 tree al;
1273
1274 /* The variable is aliased. Add its aliases to the virtual
1275 operands. */
1276 gcc_assert (VEC_length (tree, aliases) != 0);
1277
1278 if (flags & opf_is_def)
1279 {
1280
1281 bool none_added = true;
1282
1283 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1284 {
1285 if (!access_can_touch_variable (full_ref, al, offset, size))
1286 continue;
1287
1288 none_added = false;
1289 append_v_may_def (al);
1290 }
1291
1292 /* If the variable is also an alias tag, add a virtual
1293 operand for it, otherwise we will miss representing
1294 references to the members of the variable's alias set.
1295 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1296
1297 It is also necessary to add bare defs on clobbers for
1298 SMT's, so that bare SMT uses caused by pruning all the
1299 aliases will link up properly with calls. In order to
1300 keep the number of these bare defs we add down to the
1301 minimum necessary, we keep track of which SMT's were used
1302 alone in statement vdefs or VUSEs. */
1303 if (v_ann->is_aliased
1304 || none_added
1305 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1306 && for_clobber
1307 && SMT_USED_ALONE (var)))
1308 {
1309 /* Every bare SMT def we add should have SMT_USED_ALONE
1310 set on it, or else we will get the wrong answer on
1311 clobbers. */
1312 if (none_added
1313 && !updating_used_alone && gimple_aliases_computed_p (cfun)
1314 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
1315 gcc_assert (SMT_USED_ALONE (var));
1316
1317 append_v_may_def (var);
1318 }
1319 }
1320 else
1321 {
1322 bool none_added = true;
1323 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1324 {
1325 if (!access_can_touch_variable (full_ref, al, offset, size))
1326 continue;
1327 none_added = false;
1328 append_vuse (al);
1329 }
1330
1331 /* Similarly, append a virtual uses for VAR itself, when
1332 it is an alias tag. */
1333 if (v_ann->is_aliased || none_added)
1334 append_vuse (var);
1335 }
1336 }
1337 }
1338
1339
1340 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1341 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1342 the statement's real operands, otherwise it is added to virtual
1343 operands. */
1344
1345 static void
1346 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1347 {
1348 bool is_real_op;
1349 tree var, sym;
1350 var_ann_t v_ann;
1351
1352 var = *var_p;
1353 gcc_assert (SSA_VAR_P (var));
1354
1355 is_real_op = is_gimple_reg (var);
1356
1357 /* If this is a real operand, the operand is either an SSA name or a
1358 decl. Virtual operands may only be decls. */
1359 gcc_assert (is_real_op || DECL_P (var));
1360
1361 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1362 v_ann = var_ann (sym);
1363
1364 /* Mark statements with volatile operands. Optimizers should back
1365 off from statements having volatile operands. */
1366 if (TREE_THIS_VOLATILE (sym) && s_ann)
1367 s_ann->has_volatile_ops = true;
1368
1369 if (is_real_op)
1370 {
1371 /* The variable is a GIMPLE register. Add it to real operands. */
1372 if (flags & opf_is_def)
1373 append_def (var_p);
1374 else
1375 append_use (var_p);
1376 }
1377 else
1378 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1379 }
1380
1381
1382 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1383 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1384
1385 STMT is the statement being processed, EXPR is the INDIRECT_REF
1386 that got us here.
1387
1388 FLAGS is as in get_expr_operands.
1389
1390 FULL_REF contains the full pointer dereference expression, if we
1391 have it, or NULL otherwise.
1392
1393 OFFSET and SIZE are the location of the access inside the
1394 dereferenced pointer, if known.
1395
1396 RECURSE_ON_BASE should be set to true if we want to continue
1397 calling get_expr_operands on the base pointer, and false if
1398 something else will do it for us. */
1399
1400 static void
1401 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1402 tree full_ref,
1403 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1404 bool recurse_on_base)
1405 {
1406 tree *pptr = &TREE_OPERAND (expr, 0);
1407 tree ptr = *pptr;
1408 stmt_ann_t s_ann = stmt_ann (stmt);
1409
1410 /* Stores into INDIRECT_REF operands are never killing definitions. */
1411 flags &= ~opf_kill_def;
1412
1413 if (SSA_VAR_P (ptr))
1414 {
1415 struct ptr_info_def *pi = NULL;
1416
1417 /* If PTR has flow-sensitive points-to information, use it. */
1418 if (TREE_CODE (ptr) == SSA_NAME
1419 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1420 && pi->name_mem_tag)
1421 {
1422 /* PTR has its own memory tag. Use it. */
1423 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1424 full_ref, offset, size, false);
1425 }
1426 else
1427 {
1428 /* If PTR is not an SSA_NAME or it doesn't have a name
1429 tag, use its symbol memory tag. */
1430 var_ann_t v_ann;
1431
1432 /* If we are emitting debugging dumps, display a warning if
1433 PTR is an SSA_NAME with no flow-sensitive alias
1434 information. That means that we may need to compute
1435 aliasing again. */
1436 if (dump_file
1437 && TREE_CODE (ptr) == SSA_NAME
1438 && pi == NULL)
1439 {
1440 fprintf (dump_file,
1441 "NOTE: no flow-sensitive alias info for ");
1442 print_generic_expr (dump_file, ptr, dump_flags);
1443 fprintf (dump_file, " in ");
1444 print_generic_stmt (dump_file, stmt, dump_flags);
1445 }
1446
1447 if (TREE_CODE (ptr) == SSA_NAME)
1448 ptr = SSA_NAME_VAR (ptr);
1449 v_ann = var_ann (ptr);
1450
1451 if (v_ann->symbol_mem_tag)
1452 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1453 full_ref, offset, size, false);
1454 }
1455 }
1456 else if (TREE_CODE (ptr) == INTEGER_CST)
1457 {
1458 /* If a constant is used as a pointer, we can't generate a real
1459 operand for it but we mark the statement volatile to prevent
1460 optimizations from messing things up. */
1461 if (s_ann)
1462 s_ann->has_volatile_ops = true;
1463 return;
1464 }
1465 else
1466 {
1467 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1468 gcc_unreachable ();
1469 }
1470
1471 /* If requested, add a USE operand for the base pointer. */
1472 if (recurse_on_base)
1473 get_expr_operands (stmt, pptr, opf_none);
1474 }
1475
1476
1477 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1478
1479 static void
1480 get_tmr_operands (tree stmt, tree expr, int flags)
1481 {
1482 tree tag = TMR_TAG (expr), ref;
1483 HOST_WIDE_INT offset, size, maxsize;
1484 subvar_t svars, sv;
1485 stmt_ann_t s_ann = stmt_ann (stmt);
1486
1487 /* First record the real operands. */
1488 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1489 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1490
1491 /* MEM_REFs should never be killing. */
1492 flags &= ~opf_kill_def;
1493
1494 if (TMR_SYMBOL (expr))
1495 {
1496 stmt_ann_t ann = stmt_ann (stmt);
1497 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1498 }
1499
1500 if (!tag)
1501 {
1502 /* Something weird, so ensure that we will be careful. */
1503 stmt_ann (stmt)->has_volatile_ops = true;
1504 return;
1505 }
1506
1507 if (DECL_P (tag))
1508 {
1509 get_expr_operands (stmt, &tag, flags);
1510 return;
1511 }
1512
1513 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1514 gcc_assert (ref != NULL_TREE);
1515 svars = get_subvars_for_var (ref);
1516 for (sv = svars; sv; sv = sv->next)
1517 {
1518 bool exact;
1519 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1520 {
1521 int subvar_flags = flags;
1522 if (!exact || size != maxsize)
1523 subvar_flags &= ~opf_kill_def;
1524 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1525 }
1526 }
1527 }
1528
1529
1530 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1531 clobbered variables in the function. */
1532
1533 static void
1534 add_call_clobber_ops (tree stmt, tree callee)
1535 {
1536 unsigned u;
1537 bitmap_iterator bi;
1538 stmt_ann_t s_ann = stmt_ann (stmt);
1539 bitmap not_read_b, not_written_b;
1540
1541 /* Functions that are not const, pure or never return may clobber
1542 call-clobbered variables. */
1543 if (s_ann)
1544 s_ann->makes_clobbering_call = true;
1545
1546 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1547 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1548 if (gimple_global_var (cfun))
1549 {
1550 tree var = gimple_global_var (cfun);
1551 add_stmt_operand (&var, s_ann, opf_is_def);
1552 return;
1553 }
1554
1555 /* Get info for local and module level statics. There is a bit
1556 set for each static if the call being processed does not read
1557 or write that variable. */
1558 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1559 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1560 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1561 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1562 {
1563 tree var = referenced_var_lookup (u);
1564 unsigned int escape_mask = var_ann (var)->escape_mask;
1565 tree real_var = var;
1566 bool not_read;
1567 bool not_written;
1568
1569 /* Not read and not written are computed on regular vars, not
1570 subvars, so look at the parent var if this is an SFT. */
1571 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1572 real_var = SFT_PARENT_VAR (var);
1573
1574 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1575 DECL_UID (real_var)) : false;
1576 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1577 DECL_UID (real_var)) : false;
1578 gcc_assert (!unmodifiable_var_p (var));
1579
1580 clobber_stats.clobbered_vars++;
1581
1582 /* See if this variable is really clobbered by this function. */
1583
1584 /* Trivial case: Things escaping only to pure/const are not
1585 clobbered by non-pure-const, and only read by pure/const. */
1586 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1587 {
1588 tree call = get_call_expr_in (stmt);
1589 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1590 {
1591 add_stmt_operand (&var, s_ann, opf_none);
1592 clobber_stats.unescapable_clobbers_avoided++;
1593 continue;
1594 }
1595 else
1596 {
1597 clobber_stats.unescapable_clobbers_avoided++;
1598 continue;
1599 }
1600 }
1601
1602 if (not_written)
1603 {
1604 clobber_stats.static_write_clobbers_avoided++;
1605 if (!not_read)
1606 add_stmt_operand (&var, s_ann, opf_none);
1607 else
1608 clobber_stats.static_read_clobbers_avoided++;
1609 }
1610 else
1611 add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
1612 }
1613 }
1614
1615
1616 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1617 function. */
1618
1619 static void
1620 add_call_read_ops (tree stmt, tree callee)
1621 {
1622 unsigned u;
1623 bitmap_iterator bi;
1624 stmt_ann_t s_ann = stmt_ann (stmt);
1625 bitmap not_read_b;
1626
1627 /* if the function is not pure, it may reference memory. Add
1628 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1629 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1630 if (gimple_global_var (cfun))
1631 {
1632 tree var = gimple_global_var (cfun);
1633 add_stmt_operand (&var, s_ann, opf_none);
1634 return;
1635 }
1636
1637 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1638
1639 /* Add a VUSE for each call-clobbered variable. */
1640 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1641 {
1642 tree var = referenced_var (u);
1643 tree real_var = var;
1644 bool not_read;
1645
1646 clobber_stats.readonly_clobbers++;
1647
1648 /* Not read and not written are computed on regular vars, not
1649 subvars, so look at the parent var if this is an SFT. */
1650
1651 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1652 real_var = SFT_PARENT_VAR (var);
1653
1654 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1655 : false;
1656
1657 if (not_read)
1658 {
1659 clobber_stats.static_readonly_clobbers_avoided++;
1660 continue;
1661 }
1662
1663 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1664 }
1665 }
1666
1667
1668 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1669
1670 static void
1671 get_call_expr_operands (tree stmt, tree expr)
1672 {
1673 tree op;
1674 int call_flags = call_expr_flags (expr);
1675
1676 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1677 operands for all the symbols that have been found to be
1678 call-clobbered.
1679
1680 Note that if aliases have not been computed, the global effects
1681 of calls will not be included in the SSA web. This is fine
1682 because no optimizer should run before aliases have been
1683 computed. By not bothering with virtual operands for CALL_EXPRs
1684 we avoid adding superfluous virtual operands, which can be a
1685 significant compile time sink (See PR 15855). */
1686 if (gimple_aliases_computed_p (cfun)
1687 && !bitmap_empty_p (gimple_call_clobbered_vars (cfun))
1688 && !(call_flags & ECF_NOVOPS))
1689 {
1690 /* A 'pure' or a 'const' function never call-clobbers anything.
1691 A 'noreturn' function might, but since we don't return anyway
1692 there is no point in recording that. */
1693 if (TREE_SIDE_EFFECTS (expr)
1694 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1695 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1696 else if (!(call_flags & ECF_CONST))
1697 add_call_read_ops (stmt, get_callee_fndecl (expr));
1698 }
1699
1700 /* Find uses in the called function. */
1701 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1702
1703 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1704 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1705
1706 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1707 }
1708
1709
1710 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1711
1712 static void
1713 get_asm_expr_operands (tree stmt)
1714 {
1715 stmt_ann_t s_ann = stmt_ann (stmt);
1716 int noutputs = list_length (ASM_OUTPUTS (stmt));
1717 const char **oconstraints
1718 = (const char **) alloca ((noutputs) * sizeof (const char *));
1719 int i;
1720 tree link;
1721 const char *constraint;
1722 bool allows_mem, allows_reg, is_inout;
1723
1724 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1725 {
1726 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1727 oconstraints[i] = constraint;
1728 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1729 &allows_reg, &is_inout);
1730
1731 /* This should have been split in gimplify_asm_expr. */
1732 gcc_assert (!allows_reg || !is_inout);
1733
1734 /* Memory operands are addressable. Note that STMT needs the
1735 address of this operand. */
1736 if (!allows_reg && allows_mem)
1737 {
1738 tree t = get_base_address (TREE_VALUE (link));
1739 if (t && DECL_P (t) && s_ann)
1740 add_to_addressable_set (t, &s_ann->addresses_taken);
1741 }
1742
1743 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1744 }
1745
1746 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1747 {
1748 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1749 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1750 oconstraints, &allows_mem, &allows_reg);
1751
1752 /* Memory operands are addressable. Note that STMT needs the
1753 address of this operand. */
1754 if (!allows_reg && allows_mem)
1755 {
1756 tree t = get_base_address (TREE_VALUE (link));
1757 if (t && DECL_P (t) && s_ann)
1758 add_to_addressable_set (t, &s_ann->addresses_taken);
1759 }
1760
1761 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1762 }
1763
1764
1765 /* Clobber memory for asm ("" : : : "memory"); */
1766 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1767 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1768 {
1769 unsigned i;
1770 bitmap_iterator bi;
1771
1772 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1773 decided to group them). */
1774 if (gimple_global_var (cfun))
1775 {
1776 tree var = gimple_global_var (cfun);
1777 add_stmt_operand (&var, s_ann, opf_is_def);
1778 }
1779 else
1780 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1781 {
1782 tree var = referenced_var (i);
1783 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1784 }
1785
1786 /* Now clobber all addressables. */
1787 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1788 {
1789 tree var = referenced_var (i);
1790
1791 /* Subvars are explicitly represented in this list, so
1792 we don't need the original to be added to the clobber
1793 ops, but the original *will* be in this list because
1794 we keep the addressability of the original
1795 variable up-to-date so we don't screw up the rest of
1796 the backend. */
1797 if (var_can_have_subvars (var)
1798 && get_subvars_for_var (var) != NULL)
1799 continue;
1800
1801 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1802 }
1803
1804 break;
1805 }
1806 }
1807
1808
1809 /* Scan operands for the assignment expression EXPR in statement STMT. */
1810
1811 static void
1812 get_modify_stmt_operands (tree stmt, tree expr)
1813 {
1814 /* First get operands from the RHS. */
1815 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_none);
1816
1817 /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
1818 registers. If the LHS is a store to memory, we will either need
1819 a preserving definition (V_MAY_DEF) or a killing definition
1820 (V_MUST_DEF).
1821
1822 Preserving definitions are those that modify a part of an
1823 aggregate object for which no subvars have been computed (or the
1824 reference does not correspond exactly to one of them). Stores
1825 through a pointer are also represented with V_MAY_DEF operators.
1826
1827 The determination of whether to use a preserving or a killing
1828 definition is done while scanning the LHS of the assignment. By
1829 default, assume that we will emit a V_MUST_DEF. */
1830 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0),
1831 opf_is_def|opf_kill_def);
1832 }
1833
1834
1835 /* Recursively scan the expression pointed to by EXPR_P in statement
1836 STMT. FLAGS is one of the OPF_* constants modifying how to
1837 interpret the operands found. */
1838
1839 static void
1840 get_expr_operands (tree stmt, tree *expr_p, int flags)
1841 {
1842 enum tree_code code;
1843 enum tree_code_class class;
1844 tree expr = *expr_p;
1845 stmt_ann_t s_ann = stmt_ann (stmt);
1846
1847 if (expr == NULL)
1848 return;
1849
1850 code = TREE_CODE (expr);
1851 class = TREE_CODE_CLASS (code);
1852
1853 switch (code)
1854 {
1855 case ADDR_EXPR:
1856 /* Taking the address of a variable does not represent a
1857 reference to it, but the fact that the statement takes its
1858 address will be of interest to some passes (e.g. alias
1859 resolution). */
1860 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1861
1862 /* If the address is invariant, there may be no interesting
1863 variable references inside. */
1864 if (is_gimple_min_invariant (expr))
1865 return;
1866
1867 /* Otherwise, there may be variables referenced inside but there
1868 should be no VUSEs created, since the referenced objects are
1869 not really accessed. The only operands that we should find
1870 here are ARRAY_REF indices which will always be real operands
1871 (GIMPLE does not allow non-registers as array indices). */
1872 flags |= opf_no_vops;
1873 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1874 return;
1875
1876 case SSA_NAME:
1877 case STRUCT_FIELD_TAG:
1878 case SYMBOL_MEMORY_TAG:
1879 case NAME_MEMORY_TAG:
1880 add_stmt_operand (expr_p, s_ann, flags);
1881 return;
1882
1883 case VAR_DECL:
1884 case PARM_DECL:
1885 case RESULT_DECL:
1886 {
1887 subvar_t svars;
1888
1889 /* Add the subvars for a variable, if it has subvars, to DEFS
1890 or USES. Otherwise, add the variable itself. Whether it
1891 goes to USES or DEFS depends on the operand flags. */
1892 if (var_can_have_subvars (expr)
1893 && (svars = get_subvars_for_var (expr)))
1894 {
1895 subvar_t sv;
1896 for (sv = svars; sv; sv = sv->next)
1897 add_stmt_operand (&sv->var, s_ann, flags);
1898 }
1899 else
1900 add_stmt_operand (expr_p, s_ann, flags);
1901
1902 return;
1903 }
1904
1905 case MISALIGNED_INDIRECT_REF:
1906 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1907 /* fall through */
1908
1909 case ALIGN_INDIRECT_REF:
1910 case INDIRECT_REF:
1911 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
1912 return;
1913
1914 case TARGET_MEM_REF:
1915 get_tmr_operands (stmt, expr, flags);
1916 return;
1917
1918 case ARRAY_REF:
1919 case ARRAY_RANGE_REF:
1920 case COMPONENT_REF:
1921 case REALPART_EXPR:
1922 case IMAGPART_EXPR:
1923 {
1924 tree ref;
1925 HOST_WIDE_INT offset, size, maxsize;
1926 bool none = true;
1927
1928 /* This component reference becomes an access to all of the
1929 subvariables it can touch, if we can determine that, but
1930 *NOT* the real one. If we can't determine which fields we
1931 could touch, the recursion will eventually get to a
1932 variable and add *all* of its subvars, or whatever is the
1933 minimum correct subset. */
1934 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1935 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1936 {
1937 subvar_t sv;
1938 subvar_t svars = get_subvars_for_var (ref);
1939
1940 for (sv = svars; sv; sv = sv->next)
1941 {
1942 bool exact;
1943
1944 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1945 {
1946 int subvar_flags = flags;
1947 none = false;
1948 if (!exact || size != maxsize)
1949 subvar_flags &= ~opf_kill_def;
1950 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1951 }
1952 }
1953
1954 if (!none)
1955 flags |= opf_no_vops;
1956 }
1957 else if (TREE_CODE (ref) == INDIRECT_REF)
1958 {
1959 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
1960 maxsize, false);
1961 flags |= opf_no_vops;
1962 }
1963
1964 /* Even if we found subvars above we need to ensure to see
1965 immediate uses for d in s.a[d]. In case of s.a having
1966 a subvar or we would miss it otherwise. */
1967 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1968 flags & ~opf_kill_def);
1969
1970 if (code == COMPONENT_REF)
1971 {
1972 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1973 s_ann->has_volatile_ops = true;
1974 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1975 }
1976 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
1977 {
1978 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1979 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1980 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1981 }
1982
1983 return;
1984 }
1985
1986 case WITH_SIZE_EXPR:
1987 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1988 and an rvalue reference to its second argument. */
1989 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1990 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1991 return;
1992
1993 case CALL_EXPR:
1994 get_call_expr_operands (stmt, expr);
1995 return;
1996
1997 case COND_EXPR:
1998 case VEC_COND_EXPR:
1999 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
2000 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
2001 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
2002 return;
2003
2004 case GIMPLE_MODIFY_STMT:
2005 get_modify_stmt_operands (stmt, expr);
2006 return;
2007
2008 case CONSTRUCTOR:
2009 {
2010 /* General aggregate CONSTRUCTORs have been decomposed, but they
2011 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2012 constructor_elt *ce;
2013 unsigned HOST_WIDE_INT idx;
2014
2015 for (idx = 0;
2016 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2017 idx++)
2018 get_expr_operands (stmt, &ce->value, opf_none);
2019
2020 return;
2021 }
2022
2023 case BIT_FIELD_REF:
2024 /* Stores using BIT_FIELD_REF are always preserving definitions. */
2025 flags &= ~opf_kill_def;
2026
2027 /* Fallthru */
2028
2029 case TRUTH_NOT_EXPR:
2030 case VIEW_CONVERT_EXPR:
2031 do_unary:
2032 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2033 return;
2034
2035 case TRUTH_AND_EXPR:
2036 case TRUTH_OR_EXPR:
2037 case TRUTH_XOR_EXPR:
2038 case COMPOUND_EXPR:
2039 case OBJ_TYPE_REF:
2040 case ASSERT_EXPR:
2041 do_binary:
2042 {
2043 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2044 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2045 return;
2046 }
2047
2048 case DOT_PROD_EXPR:
2049 case REALIGN_LOAD_EXPR:
2050 {
2051 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2052 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2053 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2054 return;
2055 }
2056
2057 case BLOCK:
2058 case FUNCTION_DECL:
2059 case EXC_PTR_EXPR:
2060 case FILTER_EXPR:
2061 case LABEL_DECL:
2062 case CONST_DECL:
2063 case OMP_PARALLEL:
2064 case OMP_SECTIONS:
2065 case OMP_FOR:
2066 case OMP_SINGLE:
2067 case OMP_MASTER:
2068 case OMP_ORDERED:
2069 case OMP_CRITICAL:
2070 case OMP_RETURN:
2071 case OMP_CONTINUE:
2072 /* Expressions that make no memory references. */
2073 return;
2074
2075 default:
2076 if (class == tcc_unary)
2077 goto do_unary;
2078 if (class == tcc_binary || class == tcc_comparison)
2079 goto do_binary;
2080 if (class == tcc_constant || class == tcc_type)
2081 return;
2082 }
2083
2084 /* If we get here, something has gone wrong. */
2085 #ifdef ENABLE_CHECKING
2086 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2087 debug_tree (expr);
2088 fputs ("\n", stderr);
2089 #endif
2090 gcc_unreachable ();
2091 }
2092
2093
2094 /* Parse STMT looking for operands. When finished, the various
2095 build_* operand vectors will have potential operands in them. */
2096
2097 static void
2098 parse_ssa_operands (tree stmt)
2099 {
2100 enum tree_code code;
2101
2102 code = TREE_CODE (stmt);
2103 switch (code)
2104 {
2105 case GIMPLE_MODIFY_STMT:
2106 get_modify_stmt_operands (stmt, stmt);
2107 break;
2108
2109 case COND_EXPR:
2110 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
2111 break;
2112
2113 case SWITCH_EXPR:
2114 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
2115 break;
2116
2117 case ASM_EXPR:
2118 get_asm_expr_operands (stmt);
2119 break;
2120
2121 case RETURN_EXPR:
2122 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
2123 break;
2124
2125 case GOTO_EXPR:
2126 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
2127 break;
2128
2129 case LABEL_EXPR:
2130 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
2131 break;
2132
2133 case BIND_EXPR:
2134 case CASE_LABEL_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case TRY_FINALLY_EXPR:
2137 case EH_FILTER_EXPR:
2138 case CATCH_EXPR:
2139 case RESX_EXPR:
2140 /* These nodes contain no variable references. */
2141 break;
2142
2143 default:
2144 /* Notice that if get_expr_operands tries to use &STMT as the
2145 operand pointer (which may only happen for USE operands), we
2146 will fail in add_stmt_operand. This default will handle
2147 statements like empty statements, or CALL_EXPRs that may
2148 appear on the RHS of a statement or as statements themselves. */
2149 get_expr_operands (stmt, &stmt, opf_none);
2150 break;
2151 }
2152 }
2153
2154
2155 /* Create an operands cache for STMT. */
2156
2157 static void
2158 build_ssa_operands (tree stmt)
2159 {
2160 stmt_ann_t ann = get_stmt_ann (stmt);
2161
2162 /* Initially assume that the statement has no volatile operands. */
2163 if (ann)
2164 ann->has_volatile_ops = false;
2165
2166 start_ssa_stmt_operands ();
2167
2168 parse_ssa_operands (stmt);
2169 operand_build_sort_virtual (build_vuses);
2170 operand_build_sort_virtual (build_v_may_defs);
2171 operand_build_sort_virtual (build_v_must_defs);
2172
2173 finalize_ssa_stmt_operands (stmt);
2174 }
2175
2176
2177 /* Free any operands vectors in OPS. */
2178
2179 void
2180 free_ssa_operands (stmt_operands_p ops)
2181 {
2182 ops->def_ops = NULL;
2183 ops->use_ops = NULL;
2184 ops->maydef_ops = NULL;
2185 ops->mustdef_ops = NULL;
2186 ops->vuse_ops = NULL;
2187 }
2188
2189
2190 /* Get the operands of statement STMT. */
2191
2192 void
2193 update_stmt_operands (tree stmt)
2194 {
2195 stmt_ann_t ann = get_stmt_ann (stmt);
2196
2197 /* If update_stmt_operands is called before SSA is initialized, do
2198 nothing. */
2199 if (!ssa_operands_active ())
2200 return;
2201
2202 /* The optimizers cannot handle statements that are nothing but a
2203 _DECL. This indicates a bug in the gimplifier. */
2204 gcc_assert (!SSA_VAR_P (stmt));
2205
2206 gcc_assert (ann->modified);
2207
2208 timevar_push (TV_TREE_OPS);
2209
2210 build_ssa_operands (stmt);
2211
2212 /* Clear the modified bit for STMT. */
2213 ann->modified = 0;
2214
2215 timevar_pop (TV_TREE_OPS);
2216 }
2217
2218
2219 /* Copies virtual operands from SRC to DST. */
2220
2221 void
2222 copy_virtual_operands (tree dest, tree src)
2223 {
2224 tree t;
2225 ssa_op_iter iter, old_iter;
2226 use_operand_p use_p, u2;
2227 def_operand_p def_p, d2;
2228
2229 build_ssa_operands (dest);
2230
2231 /* Copy all the virtual fields. */
2232 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
2233 append_vuse (t);
2234 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
2235 append_v_may_def (t);
2236 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
2237 append_v_must_def (t);
2238
2239 if (VEC_length (tree, build_vuses) == 0
2240 && VEC_length (tree, build_v_may_defs) == 0
2241 && VEC_length (tree, build_v_must_defs) == 0)
2242 return;
2243
2244 /* Now commit the virtual operands to this stmt. */
2245 finalize_ssa_v_must_defs (dest);
2246 finalize_ssa_v_may_defs (dest);
2247 finalize_ssa_vuses (dest);
2248
2249 /* Finally, set the field to the same values as then originals. */
2250 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
2251 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
2252 {
2253 gcc_assert (!op_iter_done (&old_iter));
2254 SET_USE (use_p, t);
2255 t = op_iter_next_tree (&old_iter);
2256 }
2257 gcc_assert (op_iter_done (&old_iter));
2258
2259 op_iter_init_maydef (&old_iter, src, &u2, &d2);
2260 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
2261 {
2262 gcc_assert (!op_iter_done (&old_iter));
2263 SET_USE (use_p, USE_FROM_PTR (u2));
2264 SET_DEF (def_p, DEF_FROM_PTR (d2));
2265 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2266 }
2267 gcc_assert (op_iter_done (&old_iter));
2268
2269 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
2270 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
2271 {
2272 gcc_assert (!op_iter_done (&old_iter));
2273 SET_USE (use_p, USE_FROM_PTR (u2));
2274 SET_DEF (def_p, DEF_FROM_PTR (d2));
2275 op_iter_next_maymustdef (&u2, &d2, &old_iter);
2276 }
2277 gcc_assert (op_iter_done (&old_iter));
2278
2279 }
2280
2281
2282 /* Specifically for use in DOM's expression analysis. Given a store, we
2283 create an artificial stmt which looks like a load from the store, this can
2284 be used to eliminate redundant loads. OLD_OPS are the operands from the
2285 store stmt, and NEW_STMT is the new load which represents a load of the
2286 values stored. */
2287
2288 void
2289 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
2290 {
2291 stmt_ann_t ann;
2292 tree op;
2293 ssa_op_iter iter;
2294 use_operand_p use_p;
2295 unsigned x;
2296
2297 ann = get_stmt_ann (new_stmt);
2298
2299 /* Process the stmt looking for operands. */
2300 start_ssa_stmt_operands ();
2301 parse_ssa_operands (new_stmt);
2302
2303 for (x = 0; x < VEC_length (tree, build_vuses); x++)
2304 {
2305 tree t = VEC_index (tree, build_vuses, x);
2306 if (TREE_CODE (t) != SSA_NAME)
2307 {
2308 var_ann_t ann = var_ann (t);
2309 ann->in_vuse_list = 0;
2310 }
2311 }
2312
2313 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
2314 {
2315 tree t = VEC_index (tree, build_v_may_defs, x);
2316 if (TREE_CODE (t) != SSA_NAME)
2317 {
2318 var_ann_t ann = var_ann (t);
2319 ann->in_v_may_def_list = 0;
2320 }
2321 }
2322
2323 /* Remove any virtual operands that were found. */
2324 VEC_truncate (tree, build_v_may_defs, 0);
2325 VEC_truncate (tree, build_v_must_defs, 0);
2326 VEC_truncate (tree, build_vuses, 0);
2327
2328 /* For each VDEF on the original statement, we want to create a
2329 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2330 statement. */
2331 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2332 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2333 append_vuse (op);
2334
2335 /* Now build the operands for this new stmt. */
2336 finalize_ssa_stmt_operands (new_stmt);
2337
2338 /* All uses in this fake stmt must not be in the immediate use lists. */
2339 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2340 delink_imm_use (use_p);
2341 }
2342
2343
2344 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2345 to test the validity of the swap operation. */
2346
2347 void
2348 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2349 {
2350 tree op0, op1;
2351 op0 = *exp0;
2352 op1 = *exp1;
2353
2354 /* If the operand cache is active, attempt to preserve the relative
2355 positions of these two operands in their respective immediate use
2356 lists. */
2357 if (ssa_operands_active () && op0 != op1)
2358 {
2359 use_optype_p use0, use1, ptr;
2360 use0 = use1 = NULL;
2361
2362 /* Find the 2 operands in the cache, if they are there. */
2363 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2364 if (USE_OP_PTR (ptr)->use == exp0)
2365 {
2366 use0 = ptr;
2367 break;
2368 }
2369
2370 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2371 if (USE_OP_PTR (ptr)->use == exp1)
2372 {
2373 use1 = ptr;
2374 break;
2375 }
2376
2377 /* If both uses don't have operand entries, there isn't much we can do
2378 at this point. Presumably we don't need to worry about it. */
2379 if (use0 && use1)
2380 {
2381 tree *tmp = USE_OP_PTR (use1)->use;
2382 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2383 USE_OP_PTR (use0)->use = tmp;
2384 }
2385 }
2386
2387 /* Now swap the data. */
2388 *exp0 = op1;
2389 *exp1 = op0;
2390 }
2391
2392
2393 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2394 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2395 a single variable whose address has been taken or any other valid
2396 GIMPLE memory reference (structure reference, array, etc). If the
2397 base address of REF is a decl that has sub-variables, also add all
2398 of its sub-variables. */
2399
2400 void
2401 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2402 {
2403 tree var;
2404 subvar_t svars;
2405
2406 gcc_assert (addresses_taken);
2407
2408 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2409 as the only thing we take the address of. If VAR is a structure,
2410 taking the address of a field means that the whole structure may
2411 be referenced using pointer arithmetic. See PR 21407 and the
2412 ensuing mailing list discussion. */
2413 var = get_base_address (ref);
2414 if (var && SSA_VAR_P (var))
2415 {
2416 if (*addresses_taken == NULL)
2417 *addresses_taken = BITMAP_GGC_ALLOC ();
2418
2419 if (var_can_have_subvars (var)
2420 && (svars = get_subvars_for_var (var)))
2421 {
2422 subvar_t sv;
2423 for (sv = svars; sv; sv = sv->next)
2424 {
2425 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2426 TREE_ADDRESSABLE (sv->var) = 1;
2427 }
2428 }
2429 else
2430 {
2431 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2432 TREE_ADDRESSABLE (var) = 1;
2433 }
2434 }
2435 }
2436
2437
2438 /* Scan the immediate_use list for VAR making sure its linked properly.
2439 Return TRUE if there is a problem and emit an error message to F. */
2440
2441 bool
2442 verify_imm_links (FILE *f, tree var)
2443 {
2444 use_operand_p ptr, prev, list;
2445 int count;
2446
2447 gcc_assert (TREE_CODE (var) == SSA_NAME);
2448
2449 list = &(SSA_NAME_IMM_USE_NODE (var));
2450 gcc_assert (list->use == NULL);
2451
2452 if (list->prev == NULL)
2453 {
2454 gcc_assert (list->next == NULL);
2455 return false;
2456 }
2457
2458 prev = list;
2459 count = 0;
2460 for (ptr = list->next; ptr != list; )
2461 {
2462 if (prev != ptr->prev)
2463 goto error;
2464
2465 if (ptr->use == NULL)
2466 goto error; /* 2 roots, or SAFE guard node. */
2467 else if (*(ptr->use) != var)
2468 goto error;
2469
2470 prev = ptr;
2471 ptr = ptr->next;
2472
2473 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2474 problem. */
2475 if (count++ > 50000000)
2476 goto error;
2477 }
2478
2479 /* Verify list in the other direction. */
2480 prev = list;
2481 for (ptr = list->prev; ptr != list; )
2482 {
2483 if (prev != ptr->next)
2484 goto error;
2485 prev = ptr;
2486 ptr = ptr->prev;
2487 if (count-- < 0)
2488 goto error;
2489 }
2490
2491 if (count != 0)
2492 goto error;
2493
2494 return false;
2495
2496 error:
2497 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2498 {
2499 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2500 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2501 }
2502 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2503 (void *)ptr->use);
2504 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2505 fprintf(f, "\n");
2506 return true;
2507 }
2508
2509
2510 /* Dump all the immediate uses to FILE. */
2511
2512 void
2513 dump_immediate_uses_for (FILE *file, tree var)
2514 {
2515 imm_use_iterator iter;
2516 use_operand_p use_p;
2517
2518 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2519
2520 print_generic_expr (file, var, TDF_SLIM);
2521 fprintf (file, " : -->");
2522 if (has_zero_uses (var))
2523 fprintf (file, " no uses.\n");
2524 else
2525 if (has_single_use (var))
2526 fprintf (file, " single use.\n");
2527 else
2528 fprintf (file, "%d uses.\n", num_imm_uses (var));
2529
2530 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2531 {
2532 if (use_p->stmt == NULL && use_p->use == NULL)
2533 fprintf (file, "***end of stmt iterator marker***\n");
2534 else
2535 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2536 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2537 else
2538 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2539 }
2540 fprintf(file, "\n");
2541 }
2542
2543
2544 /* Dump all the immediate uses to FILE. */
2545
2546 void
2547 dump_immediate_uses (FILE *file)
2548 {
2549 tree var;
2550 unsigned int x;
2551
2552 fprintf (file, "Immediate_uses: \n\n");
2553 for (x = 1; x < num_ssa_names; x++)
2554 {
2555 var = ssa_name(x);
2556 if (!var)
2557 continue;
2558 dump_immediate_uses_for (file, var);
2559 }
2560 }
2561
2562
2563 /* Dump def-use edges on stderr. */
2564
2565 void
2566 debug_immediate_uses (void)
2567 {
2568 dump_immediate_uses (stderr);
2569 }
2570
2571
2572 /* Dump def-use edges on stderr. */
2573
2574 void
2575 debug_immediate_uses_for (tree var)
2576 {
2577 dump_immediate_uses_for (stderr, var);
2578 }