tree-ssa-operands.c (clobbered_v_may_defs, [...]): Use VEC instead of VARRAY.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
35
36 #include "langhooks.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* This structure maintain a sorted list of operands which is created by
103 parse_ssa_operand. */
104 struct opbuild_list_d GTY (())
105 {
106 varray_type vars; /* The VAR_DECLS tree. */
107 varray_type uid; /* The sort value for virtual symbols. */
108 varray_type next; /* The next index in the sorted list. */
109 int first; /* First element in list. */
110 unsigned num; /* Number of elements. */
111 };
112
113 #define OPBUILD_LAST -1
114
115
116 /* Array for building all the def operands. */
117 static GTY (()) struct opbuild_list_d build_defs;
118
119 /* Array for building all the use operands. */
120 static GTY (()) struct opbuild_list_d build_uses;
121
122 /* Array for building all the v_may_def operands. */
123 static GTY (()) struct opbuild_list_d build_v_may_defs;
124
125 /* Array for building all the vuse operands. */
126 static GTY (()) struct opbuild_list_d build_vuses;
127
128 /* Array for building all the v_must_def operands. */
129 static GTY (()) struct opbuild_list_d build_v_must_defs;
130
131 /* True if the operands for call clobbered vars are cached and valid. */
132 bool ssa_call_clobbered_cache_valid;
133 bool ssa_ro_call_cache_valid;
134
135 /* These arrays are the cached operand vectors for call clobbered calls. */
136 static VEC(tree,heap) *clobbered_v_may_defs;
137 static VEC(tree,heap) *clobbered_vuses;
138 static VEC(tree,heap) *ro_call_vuses;
139 static bool clobbered_aliased_loads;
140 static bool clobbered_aliased_stores;
141 static bool ro_call_aliased_loads;
142 static bool ops_active = false;
143
144 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
145 static unsigned operand_memory_index;
146
147 static void note_addressable (tree, stmt_ann_t);
148 static void get_expr_operands (tree, tree *, int);
149 static void get_asm_expr_operands (tree);
150 static void get_indirect_ref_operands (tree, tree, int);
151 static void get_call_expr_operands (tree, tree);
152 static inline void append_def (tree *);
153 static inline void append_use (tree *);
154 static void append_v_may_def (tree);
155 static void append_v_must_def (tree);
156 static void add_call_clobber_ops (tree);
157 static void add_call_read_ops (tree);
158 static void add_stmt_operand (tree *, stmt_ann_t, int);
159 static void build_ssa_operands (tree stmt);
160
161 static def_optype_p free_defs = NULL;
162 static use_optype_p free_uses = NULL;
163 static vuse_optype_p free_vuses = NULL;
164 static maydef_optype_p free_maydefs = NULL;
165 static mustdef_optype_p free_mustdefs = NULL;
166
167 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
168
169 static inline void
170 opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
171 const char *name)
172 {
173 list->first = OPBUILD_LAST;
174 list->num = 0;
175 VARRAY_TREE_INIT (list->vars, num, name);
176 VARRAY_UINT_INIT (list->uid, num, "List UID");
177 VARRAY_INT_INIT (list->next, num, "List NEXT");
178 }
179
180
181 /* Initialize a real operand build LIST called NAME with NUM elements. */
182
183 static inline void
184 opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
185 {
186 list->first = OPBUILD_LAST;
187 list->num = 0;
188 VARRAY_TREE_PTR_INIT (list->vars, num, name);
189 VARRAY_INT_INIT (list->next, num, "List NEXT");
190 /* The UID field is not needed since we sort based on the pointer value. */
191 list->uid = NULL;
192 }
193
194
195 /* Free memory used in virtual operand build object LIST. */
196
197 static inline void
198 opbuild_free (struct opbuild_list_d *list)
199 {
200 list->vars = NULL;
201 list->uid = NULL;
202 list->next = NULL;
203 }
204
205
206 /* Number of elements in an opbuild list. */
207
208 static inline unsigned
209 opbuild_num_elems (struct opbuild_list_d *list)
210 {
211 return list->num;
212 }
213
214
215 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
216 duplicates. The actual sort value is the tree pointer value. */
217
218 static inline void
219 opbuild_append_real (struct opbuild_list_d *list, tree *var)
220 {
221 int index;
222
223 #ifdef ENABLE_CHECKING
224 /* Ensure the real operand doesn't exist already. */
225 for (index = list->first;
226 index != OPBUILD_LAST;
227 index = VARRAY_INT (list->next, index))
228 gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
229 #endif
230
231 /* First item in the list. */
232 index = VARRAY_ACTIVE_SIZE (list->vars);
233 if (index == 0)
234 list->first = index;
235 else
236 VARRAY_INT (list->next, index - 1) = index;
237 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
238 VARRAY_PUSH_TREE_PTR (list->vars, var);
239 list->num++;
240 }
241
242
243 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
244 duplicates. The actual sort value is the DECL UID of the base variable. */
245
246 static inline void
247 opbuild_append_virtual (struct opbuild_list_d *list, tree var)
248 {
249 int index, curr, last;
250 unsigned int var_uid;
251
252 if (TREE_CODE (var) != SSA_NAME)
253 var_uid = DECL_UID (var);
254 else
255 var_uid = DECL_UID (SSA_NAME_VAR (var));
256
257 index = VARRAY_ACTIVE_SIZE (list->vars);
258
259 if (index == 0)
260 {
261 VARRAY_PUSH_TREE (list->vars, var);
262 VARRAY_PUSH_UINT (list->uid, var_uid);
263 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
264 list->first = 0;
265 list->num = 1;
266 return;
267 }
268
269 last = OPBUILD_LAST;
270 /* Find the correct spot in the sorted list. */
271 for (curr = list->first;
272 curr != OPBUILD_LAST;
273 last = curr, curr = VARRAY_INT (list->next, curr))
274 {
275 if (VARRAY_UINT (list->uid, curr) > var_uid)
276 break;
277 }
278
279 if (last == OPBUILD_LAST)
280 {
281 /* First item in the list. */
282 VARRAY_PUSH_INT (list->next, list->first);
283 list->first = index;
284 }
285 else
286 {
287 /* Don't enter duplicates at all. */
288 if (VARRAY_UINT (list->uid, last) == var_uid)
289 return;
290
291 VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
292 VARRAY_INT (list->next, last) = index;
293 }
294 VARRAY_PUSH_TREE (list->vars, var);
295 VARRAY_PUSH_UINT (list->uid, var_uid);
296 list->num++;
297 }
298
299
300 /* Return the first element index in LIST. OPBUILD_LAST means there are no
301 more elements. */
302
303 static inline int
304 opbuild_first (struct opbuild_list_d *list)
305 {
306 if (list->num > 0)
307 return list->first;
308 else
309 return OPBUILD_LAST;
310 }
311
312
313 /* Return the next element after PREV in LIST. */
314
315 static inline int
316 opbuild_next (struct opbuild_list_d *list, int prev)
317 {
318 return VARRAY_INT (list->next, prev);
319 }
320
321
322 /* Return the real element at index ELEM in LIST. */
323
324 static inline tree *
325 opbuild_elem_real (struct opbuild_list_d *list, int elem)
326 {
327 return VARRAY_TREE_PTR (list->vars, elem);
328 }
329
330
331 /* Return the virtual element at index ELEM in LIST. */
332
333 static inline tree
334 opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
335 {
336 return VARRAY_TREE (list->vars, elem);
337 }
338
339
340 /* Return the virtual element uid at index ELEM in LIST. */
341 static inline unsigned int
342 opbuild_elem_uid (struct opbuild_list_d *list, int elem)
343 {
344 return VARRAY_UINT (list->uid, elem);
345 }
346
347
348 /* Reset an operand build list. */
349
350 static inline void
351 opbuild_clear (struct opbuild_list_d *list)
352 {
353 list->first = OPBUILD_LAST;
354 VARRAY_POP_ALL (list->vars);
355 VARRAY_POP_ALL (list->next);
356 if (list->uid)
357 VARRAY_POP_ALL (list->uid);
358 list->num = 0;
359 }
360
361
362 /* Remove ELEM from LIST where PREV is the previous element. Return the next
363 element. */
364
365 static inline int
366 opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
367 {
368 int ret;
369 if (prev != OPBUILD_LAST)
370 {
371 gcc_assert (VARRAY_INT (list->next, prev) == elem);
372 ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
373 }
374 else
375 {
376 gcc_assert (list->first == elem);
377 ret = list->first = VARRAY_INT (list->next, elem);
378 }
379 list->num--;
380 return ret;
381 }
382
383
384 /* Return true if the ssa operands cache is active. */
385
386 bool
387 ssa_operands_active (void)
388 {
389 return ops_active;
390 }
391
392
393 /* Initialize the operand cache routines. */
394
395 void
396 init_ssa_operands (void)
397 {
398 opbuild_initialize_real (&build_defs, 5, "build defs");
399 opbuild_initialize_real (&build_uses, 10, "build uses");
400 opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
401 opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
402 opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
403 gcc_assert (operand_memory == NULL);
404 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
405 ops_active = true;
406 }
407
408
409 /* Dispose of anything required by the operand routines. */
410
411 void
412 fini_ssa_operands (void)
413 {
414 struct ssa_operand_memory_d *ptr;
415 opbuild_free (&build_defs);
416 opbuild_free (&build_uses);
417 opbuild_free (&build_v_must_defs);
418 opbuild_free (&build_v_may_defs);
419 opbuild_free (&build_vuses);
420 free_defs = NULL;
421 free_uses = NULL;
422 free_vuses = NULL;
423 free_maydefs = NULL;
424 free_mustdefs = NULL;
425 while ((ptr = operand_memory) != NULL)
426 {
427 operand_memory = operand_memory->next;
428 ggc_free (ptr);
429 }
430
431 VEC_free (tree, heap, clobbered_v_may_defs);
432 VEC_free (tree, heap, clobbered_vuses);
433 VEC_free (tree, heap, ro_call_vuses);
434 ops_active = false;
435 }
436
437
438 /* Return memory for operands of SIZE chunks. */
439
440 static inline void *
441 ssa_operand_alloc (unsigned size)
442 {
443 char *ptr;
444 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
445 {
446 struct ssa_operand_memory_d *ptr;
447 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
448 ptr->next = operand_memory;
449 operand_memory = ptr;
450 operand_memory_index = 0;
451 }
452 ptr = &(operand_memory->mem[operand_memory_index]);
453 operand_memory_index += size;
454 return ptr;
455 }
456
457
458 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
459 pointers into the stmt TREE, there is no way of telling if anyone has
460 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
461 THe contents are different, but the the pointer is still the same. This
462 routine will check to make sure PTR is in the correct list, and if it isn't
463 put it in the correct list. We cannot simply check the previous node
464 because all nodes in the same stmt might have be changed. */
465
466 static inline void
467 correct_use_link (use_operand_p ptr, tree stmt)
468 {
469 use_operand_p prev;
470 tree root;
471
472 /* Fold_stmt () may have changed the stmt pointers. */
473 if (ptr->stmt != stmt)
474 ptr->stmt = stmt;
475
476 prev = ptr->prev;
477 if (prev)
478 {
479 bool stmt_mod = true;
480 /* Find the first element which isn't a SAFE iterator, is in a different
481 stmt, and is not a a modified stmt, That node is in the correct list,
482 see if we are too. */
483
484 while (stmt_mod)
485 {
486 while (prev->stmt == stmt || prev->stmt == NULL)
487 prev = prev->prev;
488 if (prev->use == NULL)
489 stmt_mod = false;
490 else
491 if ((stmt_mod = stmt_modified_p (prev->stmt)))
492 prev = prev->prev;
493 }
494
495 /* Get the ssa_name of the list the node is in. */
496 if (prev->use == NULL)
497 root = prev->stmt;
498 else
499 root = *(prev->use);
500 /* If it's the right list, simply return. */
501 if (root == *(ptr->use))
502 return;
503 }
504 /* Its in the wrong list if we reach here. */
505 delink_imm_use (ptr);
506 link_imm_use (ptr, *(ptr->use));
507 }
508
509
510 #define FINALIZE_OPBUILD build_defs
511 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
512 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
513 #define FINALIZE_FUNC finalize_ssa_def_ops
514 #define FINALIZE_ALLOC alloc_def
515 #define FINALIZE_FREE free_defs
516 #define FINALIZE_TYPE struct def_optype_d
517 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
518 #define FINALIZE_OPS DEF_OPS
519 #define FINALIZE_BASE(VAR) VAR
520 #define FINALIZE_BASE_TYPE tree *
521 #define FINALIZE_BASE_ZERO NULL
522 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
523 #include "tree-ssa-opfinalize.h"
524
525
526 /* This routine will create stmt operands for STMT from the def build list. */
527
528 static void
529 finalize_ssa_defs (tree stmt)
530 {
531 unsigned int num = opbuild_num_elems (&build_defs);
532 /* There should only be a single real definition per assignment. */
533 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
534
535 /* If there is an old list, often the new list is identical, or close, so
536 find the elements at the beginning that are the same as the vector. */
537
538 finalize_ssa_def_ops (stmt);
539 opbuild_clear (&build_defs);
540 }
541
542 #define FINALIZE_OPBUILD build_uses
543 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
544 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
545 #define FINALIZE_FUNC finalize_ssa_use_ops
546 #define FINALIZE_ALLOC alloc_use
547 #define FINALIZE_FREE free_uses
548 #define FINALIZE_TYPE struct use_optype_d
549 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
550 #define FINALIZE_OPS USE_OPS
551 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
552 #define FINALIZE_BASE(VAR) VAR
553 #define FINALIZE_BASE_TYPE tree *
554 #define FINALIZE_BASE_ZERO NULL
555 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
556 (PTR)->use_ptr.use = (VAL); \
557 link_imm_use_stmt (&((PTR)->use_ptr), \
558 *(VAL), (STMT))
559 #include "tree-ssa-opfinalize.h"
560
561 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
562
563 static void
564 finalize_ssa_uses (tree stmt)
565 {
566 #ifdef ENABLE_CHECKING
567 {
568 unsigned x;
569 unsigned num = opbuild_num_elems (&build_uses);
570
571 /* If the pointer to the operand is the statement itself, something is
572 wrong. It means that we are pointing to a local variable (the
573 initial call to get_stmt_operands does not pass a pointer to a
574 statement). */
575 for (x = 0; x < num; x++)
576 gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
577 }
578 #endif
579 finalize_ssa_use_ops (stmt);
580 opbuild_clear (&build_uses);
581 }
582
583
584 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
585 #define FINALIZE_OPBUILD build_v_may_defs
586 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
587 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
588 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
589 #define FINALIZE_ALLOC alloc_maydef
590 #define FINALIZE_FREE free_maydefs
591 #define FINALIZE_TYPE struct maydef_optype_d
592 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
593 #define FINALIZE_OPS MAYDEF_OPS
594 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
595 #define FINALIZE_BASE_ZERO 0
596 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
597 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
598 #define FINALIZE_BASE_TYPE unsigned
599 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
600 (PTR)->def_var = (VAL); \
601 (PTR)->use_var = (VAL); \
602 (PTR)->use_ptr.use = &((PTR)->use_var); \
603 link_imm_use_stmt (&((PTR)->use_ptr), \
604 (VAL), (STMT))
605 #include "tree-ssa-opfinalize.h"
606
607
608 static void
609 finalize_ssa_v_may_defs (tree stmt)
610 {
611 finalize_ssa_v_may_def_ops (stmt);
612 }
613
614
615 /* Clear the in_list bits and empty the build array for v_may_defs. */
616
617 static inline void
618 cleanup_v_may_defs (void)
619 {
620 unsigned x, num;
621 num = opbuild_num_elems (&build_v_may_defs);
622
623 for (x = 0; x < num; x++)
624 {
625 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
626 if (TREE_CODE (t) != SSA_NAME)
627 {
628 var_ann_t ann = var_ann (t);
629 ann->in_v_may_def_list = 0;
630 }
631 }
632 opbuild_clear (&build_v_may_defs);
633 }
634
635
636 #define FINALIZE_OPBUILD build_vuses
637 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
638 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
639 #define FINALIZE_FUNC finalize_ssa_vuse_ops
640 #define FINALIZE_ALLOC alloc_vuse
641 #define FINALIZE_FREE free_vuses
642 #define FINALIZE_TYPE struct vuse_optype_d
643 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
644 #define FINALIZE_OPS VUSE_OPS
645 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
646 #define FINALIZE_BASE_ZERO 0
647 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
648 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
649 #define FINALIZE_BASE_TYPE unsigned
650 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
651 (PTR)->use_var = (VAL); \
652 (PTR)->use_ptr.use = &((PTR)->use_var); \
653 link_imm_use_stmt (&((PTR)->use_ptr), \
654 (VAL), (STMT))
655 #include "tree-ssa-opfinalize.h"
656
657
658 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
659
660 static void
661 finalize_ssa_vuses (tree stmt)
662 {
663 unsigned num, num_v_may_defs;
664 int vuse_index;
665
666 /* Remove superfluous VUSE operands. If the statement already has a
667 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
668 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
669 suppose that variable 'a' is aliased:
670
671 # VUSE <a_2>
672 # a_3 = V_MAY_DEF <a_2>
673 a = a + 1;
674
675 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
676 operation. */
677
678 num = opbuild_num_elems (&build_vuses);
679 num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
680
681 if (num > 0 && num_v_may_defs > 0)
682 {
683 int last = OPBUILD_LAST;
684 vuse_index = opbuild_first (&build_vuses);
685 for ( ; vuse_index != OPBUILD_LAST; )
686 {
687 tree vuse;
688 vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
689 if (TREE_CODE (vuse) != SSA_NAME)
690 {
691 var_ann_t ann = var_ann (vuse);
692 ann->in_vuse_list = 0;
693 if (ann->in_v_may_def_list)
694 {
695 vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
696 last);
697 continue;
698 }
699 }
700 last = vuse_index;
701 vuse_index = opbuild_next (&build_vuses, vuse_index);
702 }
703 }
704 else
705 /* Clear out the in_list bits. */
706 for (vuse_index = opbuild_first (&build_vuses);
707 vuse_index != OPBUILD_LAST;
708 vuse_index = opbuild_next (&build_vuses, vuse_index))
709 {
710 tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
711 if (TREE_CODE (t) != SSA_NAME)
712 {
713 var_ann_t ann = var_ann (t);
714 ann->in_vuse_list = 0;
715 }
716 }
717
718 finalize_ssa_vuse_ops (stmt);
719 /* The v_may_def build vector wasn't cleaned up because we needed it. */
720 cleanup_v_may_defs ();
721
722 /* Free the vuses build vector. */
723 opbuild_clear (&build_vuses);
724
725 }
726
727 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
728
729 #define FINALIZE_OPBUILD build_v_must_defs
730 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
731 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
732 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
733 #define FINALIZE_ALLOC alloc_mustdef
734 #define FINALIZE_FREE free_mustdefs
735 #define FINALIZE_TYPE struct mustdef_optype_d
736 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
737 #define FINALIZE_OPS MUSTDEF_OPS
738 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
739 #define FINALIZE_BASE_ZERO 0
740 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
741 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
742 #define FINALIZE_BASE_TYPE unsigned
743 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
744 (PTR)->def_var = (VAL); \
745 (PTR)->kill_var = (VAL); \
746 (PTR)->use_ptr.use = &((PTR)->kill_var);\
747 link_imm_use_stmt (&((PTR)->use_ptr), \
748 (VAL), (STMT))
749 #include "tree-ssa-opfinalize.h"
750
751
752 static void
753 finalize_ssa_v_must_defs (tree stmt)
754 {
755 /* In the presence of subvars, there may be more than one V_MUST_DEF per
756 statement (one for each subvar). It is a bit expensive to verify that
757 all must-defs in a statement belong to subvars if there is more than one
758 MUST-def, so we don't do it. Suffice to say, if you reach here without
759 having subvars, and have num >1, you have hit a bug. */
760
761 finalize_ssa_v_must_def_ops (stmt);
762 opbuild_clear (&build_v_must_defs);
763 }
764
765
766 /* Finalize all the build vectors, fill the new ones into INFO. */
767
768 static inline void
769 finalize_ssa_stmt_operands (tree stmt)
770 {
771 finalize_ssa_defs (stmt);
772 finalize_ssa_uses (stmt);
773 finalize_ssa_v_must_defs (stmt);
774 finalize_ssa_v_may_defs (stmt);
775 finalize_ssa_vuses (stmt);
776 }
777
778
779 /* Start the process of building up operands vectors in INFO. */
780
781 static inline void
782 start_ssa_stmt_operands (void)
783 {
784 gcc_assert (opbuild_num_elems (&build_defs) == 0);
785 gcc_assert (opbuild_num_elems (&build_uses) == 0);
786 gcc_assert (opbuild_num_elems (&build_vuses) == 0);
787 gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
788 gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
789 }
790
791
792 /* Add DEF_P to the list of pointers to operands. */
793
794 static inline void
795 append_def (tree *def_p)
796 {
797 opbuild_append_real (&build_defs, def_p);
798 }
799
800
801 /* Add USE_P to the list of pointers to operands. */
802
803 static inline void
804 append_use (tree *use_p)
805 {
806 opbuild_append_real (&build_uses, use_p);
807 }
808
809
810 /* Add a new virtual may def for variable VAR to the build array. */
811
812 static inline void
813 append_v_may_def (tree var)
814 {
815 if (TREE_CODE (var) != SSA_NAME)
816 {
817 var_ann_t ann = get_var_ann (var);
818
819 /* Don't allow duplicate entries. */
820 if (ann->in_v_may_def_list)
821 return;
822 ann->in_v_may_def_list = 1;
823 }
824
825 opbuild_append_virtual (&build_v_may_defs, var);
826 }
827
828
829 /* Add VAR to the list of virtual uses. */
830
831 static inline void
832 append_vuse (tree var)
833 {
834
835 /* Don't allow duplicate entries. */
836 if (TREE_CODE (var) != SSA_NAME)
837 {
838 var_ann_t ann = get_var_ann (var);
839
840 if (ann->in_vuse_list || ann->in_v_may_def_list)
841 return;
842 ann->in_vuse_list = 1;
843 }
844
845 opbuild_append_virtual (&build_vuses, var);
846 }
847
848
849 /* Add VAR to the list of virtual must definitions for INFO. */
850
851 static inline void
852 append_v_must_def (tree var)
853 {
854 unsigned i;
855
856 /* Don't allow duplicate entries. */
857 for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
858 if (var == opbuild_elem_virtual (&build_v_must_defs, i))
859 return;
860
861 opbuild_append_virtual (&build_v_must_defs, var);
862 }
863
864
865 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
866 cache for STMT, if it existed before. When finished, the various build_*
867 operand vectors will have potential operands. in them. */
868
869 static void
870 parse_ssa_operands (tree stmt)
871 {
872 enum tree_code code;
873
874 code = TREE_CODE (stmt);
875 switch (code)
876 {
877 case MODIFY_EXPR:
878 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
879 either only part of LHS is modified or if the RHS might throw,
880 otherwise, use V_MUST_DEF.
881
882 ??? If it might throw, we should represent somehow that it is killed
883 on the fallthrough path. */
884 {
885 tree lhs = TREE_OPERAND (stmt, 0);
886 int lhs_flags = opf_is_def;
887
888 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
889
890 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
891 or not the entire LHS is modified; that depends on what's
892 inside the VIEW_CONVERT_EXPR. */
893 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
894 lhs = TREE_OPERAND (lhs, 0);
895
896 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
897 && TREE_CODE (lhs) != BIT_FIELD_REF
898 && TREE_CODE (lhs) != REALPART_EXPR
899 && TREE_CODE (lhs) != IMAGPART_EXPR)
900 lhs_flags |= opf_kill_def;
901
902 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
903 }
904 break;
905
906 case COND_EXPR:
907 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
908 break;
909
910 case SWITCH_EXPR:
911 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
912 break;
913
914 case ASM_EXPR:
915 get_asm_expr_operands (stmt);
916 break;
917
918 case RETURN_EXPR:
919 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
920 break;
921
922 case GOTO_EXPR:
923 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
924 break;
925
926 case LABEL_EXPR:
927 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
928 break;
929
930 /* These nodes contain no variable references. */
931 case BIND_EXPR:
932 case CASE_LABEL_EXPR:
933 case TRY_CATCH_EXPR:
934 case TRY_FINALLY_EXPR:
935 case EH_FILTER_EXPR:
936 case CATCH_EXPR:
937 case RESX_EXPR:
938 break;
939
940 default:
941 /* Notice that if get_expr_operands tries to use &STMT as the operand
942 pointer (which may only happen for USE operands), we will fail in
943 append_use. This default will handle statements like empty
944 statements, or CALL_EXPRs that may appear on the RHS of a statement
945 or as statements themselves. */
946 get_expr_operands (stmt, &stmt, opf_none);
947 break;
948 }
949 }
950
951 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
952 original operands, and if ANN is non-null, appropriate stmt flags are set
953 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
954 stmt, and none of the operands will be entered into their respective
955 immediate uses tables. This is to allow stmts to be processed when they
956 are not actually in the CFG.
957
958 Note that some fields in old_ops may change to NULL, although none of the
959 memory they originally pointed to will be destroyed. It is appropriate
960 to call free_stmt_operands() on the value returned in old_ops.
961
962 The rationale for this: Certain optimizations wish to examine the difference
963 between new_ops and old_ops after processing. If a set of operands don't
964 change, new_ops will simply assume the pointer in old_ops, and the old_ops
965 pointer will be set to NULL, indicating no memory needs to be cleared.
966 Usage might appear something like:
967
968 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
969 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
970 <* compare old_ops_copy and new_ops *>
971 free_ssa_operands (old_ops); */
972
973 static void
974 build_ssa_operands (tree stmt)
975 {
976 stmt_ann_t ann = get_stmt_ann (stmt);
977
978 /* Initially assume that the statement has no volatile operands, nor
979 makes aliased loads or stores. */
980 if (ann)
981 {
982 ann->has_volatile_ops = false;
983 ann->makes_aliased_stores = false;
984 ann->makes_aliased_loads = false;
985 }
986
987 start_ssa_stmt_operands ();
988
989 parse_ssa_operands (stmt);
990
991 finalize_ssa_stmt_operands (stmt);
992 }
993
994
995 /* Free any operands vectors in OPS. */
996 #if 0
997 static void
998 free_ssa_operands (stmt_operands_p ops)
999 {
1000 ops->def_ops = NULL;
1001 ops->use_ops = NULL;
1002 ops->maydef_ops = NULL;
1003 ops->mustdef_ops = NULL;
1004 ops->vuse_ops = NULL;
1005 while (ops->memory.next != NULL)
1006 {
1007 operand_memory_p tmp = ops->memory.next;
1008 ops->memory.next = tmp->next;
1009 ggc_free (tmp);
1010 }
1011 }
1012 #endif
1013
1014
1015 /* Get the operands of statement STMT. Note that repeated calls to
1016 get_stmt_operands for the same statement will do nothing until the
1017 statement is marked modified by a call to mark_stmt_modified(). */
1018
1019 void
1020 update_stmt_operands (tree stmt)
1021 {
1022 stmt_ann_t ann = get_stmt_ann (stmt);
1023 /* If get_stmt_operands is called before SSA is initialized, dont
1024 do anything. */
1025 if (!ssa_operands_active ())
1026 return;
1027 /* The optimizers cannot handle statements that are nothing but a
1028 _DECL. This indicates a bug in the gimplifier. */
1029 gcc_assert (!SSA_VAR_P (stmt));
1030
1031 gcc_assert (ann->modified);
1032
1033 timevar_push (TV_TREE_OPS);
1034
1035 build_ssa_operands (stmt);
1036
1037 /* Clear the modified bit for STMT. Subsequent calls to
1038 get_stmt_operands for this statement will do nothing until the
1039 statement is marked modified by a call to mark_stmt_modified(). */
1040 ann->modified = 0;
1041
1042 timevar_pop (TV_TREE_OPS);
1043 }
1044
1045
1046 /* Copies virtual operands from SRC to DST. */
1047
1048 void
1049 copy_virtual_operands (tree dest, tree src)
1050 {
1051 tree t;
1052 ssa_op_iter iter, old_iter;
1053 use_operand_p use_p, u2;
1054 def_operand_p def_p, d2;
1055
1056 build_ssa_operands (dest);
1057
1058 /* Copy all the virtual fields. */
1059 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1060 append_vuse (t);
1061 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1062 append_v_may_def (t);
1063 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1064 append_v_must_def (t);
1065
1066 if (opbuild_num_elems (&build_vuses) == 0
1067 && opbuild_num_elems (&build_v_may_defs) == 0
1068 && opbuild_num_elems (&build_v_must_defs) == 0)
1069 return;
1070
1071 /* Now commit the virtual operands to this stmt. */
1072 finalize_ssa_v_must_defs (dest);
1073 finalize_ssa_v_may_defs (dest);
1074 finalize_ssa_vuses (dest);
1075
1076 /* Finally, set the field to the same values as then originals. */
1077
1078
1079 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1080 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1081 {
1082 gcc_assert (!op_iter_done (&old_iter));
1083 SET_USE (use_p, t);
1084 t = op_iter_next_tree (&old_iter);
1085 }
1086 gcc_assert (op_iter_done (&old_iter));
1087
1088 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1089 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1090 {
1091 gcc_assert (!op_iter_done (&old_iter));
1092 SET_USE (use_p, USE_FROM_PTR (u2));
1093 SET_DEF (def_p, DEF_FROM_PTR (d2));
1094 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1095 }
1096 gcc_assert (op_iter_done (&old_iter));
1097
1098 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1099 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1100 {
1101 gcc_assert (!op_iter_done (&old_iter));
1102 SET_USE (use_p, USE_FROM_PTR (u2));
1103 SET_DEF (def_p, DEF_FROM_PTR (d2));
1104 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1105 }
1106 gcc_assert (op_iter_done (&old_iter));
1107
1108 }
1109
1110
1111 /* Specifically for use in DOM's expression analysis. Given a store, we
1112 create an artificial stmt which looks like a load from the store, this can
1113 be used to eliminate redundant loads. OLD_OPS are the operands from the
1114 store stmt, and NEW_STMT is the new load which represents a load of the
1115 values stored. */
1116
1117 void
1118 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1119 {
1120 stmt_ann_t ann;
1121 tree op;
1122 ssa_op_iter iter;
1123 use_operand_p use_p;
1124 unsigned x;
1125
1126 ann = get_stmt_ann (new_stmt);
1127
1128 /* process the stmt looking for operands. */
1129 start_ssa_stmt_operands ();
1130 parse_ssa_operands (new_stmt);
1131
1132 for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
1133 {
1134 tree t = opbuild_elem_virtual (&build_vuses, x);
1135 if (TREE_CODE (t) != SSA_NAME)
1136 {
1137 var_ann_t ann = var_ann (t);
1138 ann->in_vuse_list = 0;
1139 }
1140 }
1141
1142 for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
1143 {
1144 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
1145 if (TREE_CODE (t) != SSA_NAME)
1146 {
1147 var_ann_t ann = var_ann (t);
1148 ann->in_v_may_def_list = 0;
1149 }
1150 }
1151 /* Remove any virtual operands that were found. */
1152 opbuild_clear (&build_v_may_defs);
1153 opbuild_clear (&build_v_must_defs);
1154 opbuild_clear (&build_vuses);
1155
1156 /* For each VDEF on the original statement, we want to create a
1157 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1158 statement. */
1159 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1160 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1161 append_vuse (op);
1162
1163 /* Now build the operands for this new stmt. */
1164 finalize_ssa_stmt_operands (new_stmt);
1165
1166 /* All uses in this fake stmt must not be in the immediate use lists. */
1167 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1168 delink_imm_use (use_p);
1169 }
1170
1171 static void
1172 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1173 {
1174 tree op0, op1;
1175 op0 = *exp0;
1176 op1 = *exp1;
1177
1178 /* If the operand cache is active, attempt to preserve the relative positions
1179 of these two operands in their respective immediate use lists. */
1180 if (ssa_operands_active () && op0 != op1)
1181 {
1182 use_optype_p use0, use1, ptr;
1183 use0 = use1 = NULL;
1184 /* Find the 2 operands in the cache, if they are there. */
1185 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1186 if (USE_OP_PTR (ptr)->use == exp0)
1187 {
1188 use0 = ptr;
1189 break;
1190 }
1191 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1192 if (USE_OP_PTR (ptr)->use == exp1)
1193 {
1194 use1 = ptr;
1195 break;
1196 }
1197 /* If both uses don't have operand entries, there isn't much we can do
1198 at this point. Presumably we dont need to worry about it. */
1199 if (use0 && use1)
1200 {
1201 tree *tmp = USE_OP_PTR (use1)->use;
1202 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1203 USE_OP_PTR (use0)->use = tmp;
1204 }
1205 }
1206
1207 /* Now swap the data. */
1208 *exp0 = op1;
1209 *exp1 = op0;
1210 }
1211
1212
1213 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1214 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1215 operands found. */
1216
1217 static void
1218 get_expr_operands (tree stmt, tree *expr_p, int flags)
1219 {
1220 enum tree_code code;
1221 enum tree_code_class class;
1222 tree expr = *expr_p;
1223 stmt_ann_t s_ann = stmt_ann (stmt);
1224
1225 if (expr == NULL)
1226 return;
1227
1228 code = TREE_CODE (expr);
1229 class = TREE_CODE_CLASS (code);
1230
1231 switch (code)
1232 {
1233 case ADDR_EXPR:
1234 /* We could have the address of a component, array member,
1235 etc which has interesting variable references. */
1236 /* Taking the address of a variable does not represent a
1237 reference to it, but the fact that the stmt takes its address will be
1238 of interest to some passes (e.g. alias resolution). */
1239 add_stmt_operand (expr_p, s_ann, 0);
1240
1241 /* If the address is invariant, there may be no interesting variable
1242 references inside. */
1243 if (is_gimple_min_invariant (expr))
1244 return;
1245
1246 /* There should be no VUSEs created, since the referenced objects are
1247 not really accessed. The only operands that we should find here
1248 are ARRAY_REF indices which will always be real operands (GIMPLE
1249 does not allow non-registers as array indices). */
1250 flags |= opf_no_vops;
1251
1252 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1253 return;
1254
1255 case SSA_NAME:
1256 case VAR_DECL:
1257 case PARM_DECL:
1258 case RESULT_DECL:
1259 case CONST_DECL:
1260 {
1261 subvar_t svars;
1262
1263 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1264 Otherwise, add the variable itself.
1265 Whether it goes to USES or DEFS depends on the operand flags. */
1266 if (var_can_have_subvars (expr)
1267 && (svars = get_subvars_for_var (expr)))
1268 {
1269 subvar_t sv;
1270 for (sv = svars; sv; sv = sv->next)
1271 add_stmt_operand (&sv->var, s_ann, flags);
1272 }
1273 else
1274 {
1275 add_stmt_operand (expr_p, s_ann, flags);
1276 }
1277 return;
1278 }
1279 case MISALIGNED_INDIRECT_REF:
1280 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1281 /* fall through */
1282
1283 case ALIGN_INDIRECT_REF:
1284 case INDIRECT_REF:
1285 get_indirect_ref_operands (stmt, expr, flags);
1286 return;
1287
1288 case ARRAY_REF:
1289 case ARRAY_RANGE_REF:
1290 /* Treat array references as references to the virtual variable
1291 representing the array. The virtual variable for an ARRAY_REF
1292 is the VAR_DECL for the array. */
1293
1294 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1295 according to the value of IS_DEF. Recurse if the LHS of the
1296 ARRAY_REF node is not a regular variable. */
1297 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1298 add_stmt_operand (expr_p, s_ann, flags);
1299 else
1300 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1301
1302 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1303 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1305 return;
1306
1307 case COMPONENT_REF:
1308 case REALPART_EXPR:
1309 case IMAGPART_EXPR:
1310 {
1311 tree ref;
1312 HOST_WIDE_INT offset, size;
1313 /* This component ref becomes an access to all of the subvariables
1314 it can touch, if we can determine that, but *NOT* the real one.
1315 If we can't determine which fields we could touch, the recursion
1316 will eventually get to a variable and add *all* of its subvars, or
1317 whatever is the minimum correct subset. */
1318
1319 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1320 if (ref)
1321 {
1322 subvar_t svars = get_subvars_for_var (ref);
1323 subvar_t sv;
1324 for (sv = svars; sv; sv = sv->next)
1325 {
1326 bool exact;
1327 if (overlap_subvar (offset, size, sv, &exact))
1328 {
1329 if (exact)
1330 flags &= ~opf_kill_def;
1331 add_stmt_operand (&sv->var, s_ann, flags);
1332 }
1333 }
1334 }
1335 else
1336 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1337 flags & ~opf_kill_def);
1338
1339 if (code == COMPONENT_REF)
1340 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1341 return;
1342 }
1343 case WITH_SIZE_EXPR:
1344 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1345 and an rvalue reference to its second argument. */
1346 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1347 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1348 return;
1349
1350 case CALL_EXPR:
1351 get_call_expr_operands (stmt, expr);
1352 return;
1353
1354 case COND_EXPR:
1355 case VEC_COND_EXPR:
1356 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1357 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1358 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1359 return;
1360
1361 case MODIFY_EXPR:
1362 {
1363 int subflags;
1364 tree op;
1365
1366 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1367
1368 op = TREE_OPERAND (expr, 0);
1369 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1370 op = TREE_OPERAND (expr, 0);
1371 if (TREE_CODE (op) == ARRAY_REF
1372 || TREE_CODE (op) == ARRAY_RANGE_REF
1373 || TREE_CODE (op) == REALPART_EXPR
1374 || TREE_CODE (op) == IMAGPART_EXPR)
1375 subflags = opf_is_def;
1376 else
1377 subflags = opf_is_def | opf_kill_def;
1378
1379 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1380 return;
1381 }
1382
1383 case CONSTRUCTOR:
1384 {
1385 /* General aggregate CONSTRUCTORs have been decomposed, but they
1386 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1387
1388 tree t;
1389 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1390 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1391
1392 return;
1393 }
1394
1395 case TRUTH_NOT_EXPR:
1396 case BIT_FIELD_REF:
1397 case VIEW_CONVERT_EXPR:
1398 do_unary:
1399 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1400 return;
1401
1402 case TRUTH_AND_EXPR:
1403 case TRUTH_OR_EXPR:
1404 case TRUTH_XOR_EXPR:
1405 case COMPOUND_EXPR:
1406 case OBJ_TYPE_REF:
1407 case ASSERT_EXPR:
1408 do_binary:
1409 {
1410 tree op0 = TREE_OPERAND (expr, 0);
1411 tree op1 = TREE_OPERAND (expr, 1);
1412
1413 /* If it would be profitable to swap the operands, then do so to
1414 canonicalize the statement, enabling better optimization.
1415
1416 By placing canonicalization of such expressions here we
1417 transparently keep statements in canonical form, even
1418 when the statement is modified. */
1419 if (tree_swap_operands_p (op0, op1, false))
1420 {
1421 /* For relationals we need to swap the operands
1422 and change the code. */
1423 if (code == LT_EXPR
1424 || code == GT_EXPR
1425 || code == LE_EXPR
1426 || code == GE_EXPR)
1427 {
1428 TREE_SET_CODE (expr, swap_tree_comparison (code));
1429 swap_tree_operands (stmt,
1430 &TREE_OPERAND (expr, 0),
1431 &TREE_OPERAND (expr, 1));
1432 }
1433
1434 /* For a commutative operator we can just swap the operands. */
1435 else if (commutative_tree_code (code))
1436 {
1437 swap_tree_operands (stmt,
1438 &TREE_OPERAND (expr, 0),
1439 &TREE_OPERAND (expr, 1));
1440 }
1441 }
1442
1443 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1444 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1445 return;
1446 }
1447
1448 case REALIGN_LOAD_EXPR:
1449 {
1450 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1451 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1452 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1453 return;
1454 }
1455
1456 case BLOCK:
1457 case FUNCTION_DECL:
1458 case EXC_PTR_EXPR:
1459 case FILTER_EXPR:
1460 case LABEL_DECL:
1461 /* Expressions that make no memory references. */
1462 return;
1463
1464 default:
1465 if (class == tcc_unary)
1466 goto do_unary;
1467 if (class == tcc_binary || class == tcc_comparison)
1468 goto do_binary;
1469 if (class == tcc_constant || class == tcc_type)
1470 return;
1471 }
1472
1473 /* If we get here, something has gone wrong. */
1474 #ifdef ENABLE_CHECKING
1475 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1476 debug_tree (expr);
1477 fputs ("\n", stderr);
1478 internal_error ("internal error");
1479 #endif
1480 gcc_unreachable ();
1481 }
1482
1483
1484 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1485
1486 static void
1487 get_asm_expr_operands (tree stmt)
1488 {
1489 stmt_ann_t s_ann = stmt_ann (stmt);
1490 int noutputs = list_length (ASM_OUTPUTS (stmt));
1491 const char **oconstraints
1492 = (const char **) alloca ((noutputs) * sizeof (const char *));
1493 int i;
1494 tree link;
1495 const char *constraint;
1496 bool allows_mem, allows_reg, is_inout;
1497
1498 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1499 {
1500 oconstraints[i] = constraint
1501 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1502 parse_output_constraint (&constraint, i, 0, 0,
1503 &allows_mem, &allows_reg, &is_inout);
1504
1505 /* This should have been split in gimplify_asm_expr. */
1506 gcc_assert (!allows_reg || !is_inout);
1507
1508 /* Memory operands are addressable. Note that STMT needs the
1509 address of this operand. */
1510 if (!allows_reg && allows_mem)
1511 {
1512 tree t = get_base_address (TREE_VALUE (link));
1513 if (t && DECL_P (t))
1514 note_addressable (t, s_ann);
1515 }
1516
1517 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1518 }
1519
1520 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1521 {
1522 constraint
1523 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1524 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1525 oconstraints, &allows_mem, &allows_reg);
1526
1527 /* Memory operands are addressable. Note that STMT needs the
1528 address of this operand. */
1529 if (!allows_reg && allows_mem)
1530 {
1531 tree t = get_base_address (TREE_VALUE (link));
1532 if (t && DECL_P (t))
1533 note_addressable (t, s_ann);
1534 }
1535
1536 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1537 }
1538
1539
1540 /* Clobber memory for asm ("" : : : "memory"); */
1541 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1542 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1543 {
1544 unsigned i;
1545 bitmap_iterator bi;
1546
1547 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1548 decided to group them). */
1549 if (global_var)
1550 add_stmt_operand (&global_var, s_ann, opf_is_def);
1551 else
1552 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1553 {
1554 tree var = referenced_var (i);
1555 add_stmt_operand (&var, s_ann, opf_is_def);
1556 }
1557
1558 /* Now clobber all addressables. */
1559 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1560 {
1561 tree var = referenced_var (i);
1562
1563 /* Subvars are explicitly represented in this list, so
1564 we don't need the original to be added to the clobber
1565 ops, but the original *will* be in this list because
1566 we keep the addressability of the original
1567 variable up-to-date so we don't screw up the rest of
1568 the backend. */
1569 if (var_can_have_subvars (var)
1570 && get_subvars_for_var (var) != NULL)
1571 continue;
1572
1573 add_stmt_operand (&var, s_ann, opf_is_def);
1574 }
1575
1576 break;
1577 }
1578 }
1579
1580 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1581 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1582
1583 static void
1584 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1585 {
1586 tree *pptr = &TREE_OPERAND (expr, 0);
1587 tree ptr = *pptr;
1588 stmt_ann_t s_ann = stmt_ann (stmt);
1589
1590 /* Stores into INDIRECT_REF operands are never killing definitions. */
1591 flags &= ~opf_kill_def;
1592
1593 if (SSA_VAR_P (ptr))
1594 {
1595 struct ptr_info_def *pi = NULL;
1596
1597 /* If PTR has flow-sensitive points-to information, use it. */
1598 if (TREE_CODE (ptr) == SSA_NAME
1599 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1600 && pi->name_mem_tag)
1601 {
1602 /* PTR has its own memory tag. Use it. */
1603 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1604 }
1605 else
1606 {
1607 /* If PTR is not an SSA_NAME or it doesn't have a name
1608 tag, use its type memory tag. */
1609 var_ann_t v_ann;
1610
1611 /* If we are emitting debugging dumps, display a warning if
1612 PTR is an SSA_NAME with no flow-sensitive alias
1613 information. That means that we may need to compute
1614 aliasing again. */
1615 if (dump_file
1616 && TREE_CODE (ptr) == SSA_NAME
1617 && pi == NULL)
1618 {
1619 fprintf (dump_file,
1620 "NOTE: no flow-sensitive alias info for ");
1621 print_generic_expr (dump_file, ptr, dump_flags);
1622 fprintf (dump_file, " in ");
1623 print_generic_stmt (dump_file, stmt, dump_flags);
1624 }
1625
1626 if (TREE_CODE (ptr) == SSA_NAME)
1627 ptr = SSA_NAME_VAR (ptr);
1628 v_ann = var_ann (ptr);
1629 if (v_ann->type_mem_tag)
1630 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1631 }
1632 }
1633
1634 /* If a constant is used as a pointer, we can't generate a real
1635 operand for it but we mark the statement volatile to prevent
1636 optimizations from messing things up. */
1637 else if (TREE_CODE (ptr) == INTEGER_CST)
1638 {
1639 if (s_ann)
1640 s_ann->has_volatile_ops = true;
1641 return;
1642 }
1643
1644 /* Everything else *should* have been folded elsewhere, but users
1645 are smarter than we in finding ways to write invalid code. We
1646 cannot just assert here. If we were absolutely certain that we
1647 do handle all valid cases, then we could just do nothing here.
1648 That seems optimistic, so attempt to do something logical... */
1649 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1650 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1651 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1652 {
1653 /* Make sure we know the object is addressable. */
1654 pptr = &TREE_OPERAND (ptr, 0);
1655 add_stmt_operand (pptr, s_ann, 0);
1656
1657 /* Mark the object itself with a VUSE. */
1658 pptr = &TREE_OPERAND (*pptr, 0);
1659 get_expr_operands (stmt, pptr, flags);
1660 return;
1661 }
1662
1663 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1664 else
1665 gcc_unreachable ();
1666
1667 /* Add a USE operand for the base pointer. */
1668 get_expr_operands (stmt, pptr, opf_none);
1669 }
1670
1671 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1672
1673 static void
1674 get_call_expr_operands (tree stmt, tree expr)
1675 {
1676 tree op;
1677 int call_flags = call_expr_flags (expr);
1678
1679 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1680 operands for all the symbols that have been found to be
1681 call-clobbered.
1682
1683 Note that if aliases have not been computed, the global effects
1684 of calls will not be included in the SSA web. This is fine
1685 because no optimizer should run before aliases have been
1686 computed. By not bothering with virtual operands for CALL_EXPRs
1687 we avoid adding superfluous virtual operands, which can be a
1688 significant compile time sink (See PR 15855). */
1689 if (aliases_computed_p
1690 && !bitmap_empty_p (call_clobbered_vars)
1691 && !(call_flags & ECF_NOVOPS))
1692 {
1693 /* A 'pure' or a 'const' function never call-clobbers anything.
1694 A 'noreturn' function might, but since we don't return anyway
1695 there is no point in recording that. */
1696 if (TREE_SIDE_EFFECTS (expr)
1697 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1698 add_call_clobber_ops (stmt);
1699 else if (!(call_flags & ECF_CONST))
1700 add_call_read_ops (stmt);
1701 }
1702
1703 /* Find uses in the called function. */
1704 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1705
1706 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1707 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1708
1709 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1710
1711 }
1712
1713
1714 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1715 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1716 the statement's real operands, otherwise it is added to virtual
1717 operands. */
1718
1719 static void
1720 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1721 {
1722 bool is_real_op;
1723 tree var, sym;
1724 var_ann_t v_ann;
1725
1726 var = *var_p;
1727 STRIP_NOPS (var);
1728
1729 /* If the operand is an ADDR_EXPR, add its operand to the list of
1730 variables that have had their address taken in this statement. */
1731 if (TREE_CODE (var) == ADDR_EXPR)
1732 {
1733 note_addressable (TREE_OPERAND (var, 0), s_ann);
1734 return;
1735 }
1736
1737 /* If the original variable is not a scalar, it will be added to the list
1738 of virtual operands. In that case, use its base symbol as the virtual
1739 variable representing it. */
1740 is_real_op = is_gimple_reg (var);
1741 if (!is_real_op && !DECL_P (var))
1742 var = get_virtual_var (var);
1743
1744 /* If VAR is not a variable that we care to optimize, do nothing. */
1745 if (var == NULL_TREE || !SSA_VAR_P (var))
1746 return;
1747
1748 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1749 v_ann = var_ann (sym);
1750
1751 /* Mark statements with volatile operands. Optimizers should back
1752 off from statements having volatile operands. */
1753 if (TREE_THIS_VOLATILE (sym) && s_ann)
1754 s_ann->has_volatile_ops = true;
1755
1756 /* If the variable cannot be modified and this is a V_MAY_DEF change
1757 it into a VUSE. This happens when read-only variables are marked
1758 call-clobbered and/or aliased to writeable variables. So we only
1759 check that this only happens on stores, and not writes to GIMPLE
1760 registers.
1761
1762 FIXME: The C++ FE is emitting assignments in the IL stream for
1763 read-only globals. This is wrong, but for the time being disable
1764 this transformation on V_MUST_DEF operands (otherwise, we
1765 mis-optimize SPEC2000's eon). */
1766 if ((flags & opf_is_def)
1767 && !(flags & opf_kill_def)
1768 && unmodifiable_var_p (var))
1769 {
1770 gcc_assert (!is_real_op);
1771 flags &= ~opf_is_def;
1772 }
1773
1774 if (is_real_op)
1775 {
1776 /* The variable is a GIMPLE register. Add it to real operands. */
1777 if (flags & opf_is_def)
1778 append_def (var_p);
1779 else
1780 append_use (var_p);
1781 }
1782 else
1783 {
1784 varray_type aliases;
1785
1786 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1787 virtual operands, unless the caller has specifically requested
1788 not to add virtual operands (used when adding operands inside an
1789 ADDR_EXPR expression). */
1790 if (flags & opf_no_vops)
1791 return;
1792
1793 aliases = v_ann->may_aliases;
1794
1795 if (aliases == NULL)
1796 {
1797 /* The variable is not aliased or it is an alias tag. */
1798 if (flags & opf_is_def)
1799 {
1800 if (flags & opf_kill_def)
1801 {
1802 /* Only regular variables or struct fields may get a
1803 V_MUST_DEF operand. */
1804 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1805 || v_ann->mem_tag_kind == STRUCT_FIELD);
1806 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1807 variable definitions. */
1808 append_v_must_def (var);
1809 }
1810 else
1811 {
1812 /* Add a V_MAY_DEF for call-clobbered variables and
1813 memory tags. */
1814 append_v_may_def (var);
1815 }
1816 }
1817 else
1818 {
1819 append_vuse (var);
1820 if (s_ann && v_ann->is_alias_tag)
1821 s_ann->makes_aliased_loads = 1;
1822 }
1823 }
1824 else
1825 {
1826 size_t i;
1827
1828 /* The variable is aliased. Add its aliases to the virtual
1829 operands. */
1830 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1831
1832 if (flags & opf_is_def)
1833 {
1834 bool added_may_defs_p = false;
1835
1836 /* If the variable is also an alias tag, add a virtual
1837 operand for it, otherwise we will miss representing
1838 references to the members of the variable's alias set.
1839 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1840 if (v_ann->is_alias_tag)
1841 {
1842 added_may_defs_p = true;
1843 append_v_may_def (var);
1844 }
1845
1846 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1847 {
1848 /* While VAR may be modifiable, some of its aliases
1849 may not be. If that's the case, we don't really
1850 need to add them a V_MAY_DEF for them. */
1851 tree alias = VARRAY_TREE (aliases, i);
1852
1853 if (unmodifiable_var_p (alias))
1854 append_vuse (alias);
1855 else
1856 {
1857 append_v_may_def (alias);
1858 added_may_defs_p = true;
1859 }
1860 }
1861
1862 if (s_ann && added_may_defs_p)
1863 s_ann->makes_aliased_stores = 1;
1864 }
1865 else
1866 {
1867 /* Similarly, append a virtual uses for VAR itself, when
1868 it is an alias tag. */
1869 if (v_ann->is_alias_tag)
1870 append_vuse (var);
1871
1872 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1873 append_vuse (VARRAY_TREE (aliases, i));
1874
1875 if (s_ann)
1876 s_ann->makes_aliased_loads = 1;
1877 }
1878 }
1879 }
1880 }
1881
1882
1883 /* Record that VAR had its address taken in the statement with annotations
1884 S_ANN. */
1885
1886 static void
1887 note_addressable (tree var, stmt_ann_t s_ann)
1888 {
1889 tree ref;
1890 subvar_t svars;
1891 HOST_WIDE_INT offset;
1892 HOST_WIDE_INT size;
1893
1894 if (!s_ann)
1895 return;
1896
1897 /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
1898 take the address of the subvariables it will touch.
1899 Otherwise, we take the address of all the subvariables, plus the real
1900 ones. */
1901
1902 if (var && TREE_CODE (var) == COMPONENT_REF
1903 && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
1904 {
1905 subvar_t sv;
1906 svars = get_subvars_for_var (ref);
1907
1908 if (s_ann->addresses_taken == NULL)
1909 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1910
1911 for (sv = svars; sv; sv = sv->next)
1912 {
1913 if (overlap_subvar (offset, size, sv, NULL))
1914 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1915 }
1916 return;
1917 }
1918
1919 var = get_base_address (var);
1920 if (var && SSA_VAR_P (var))
1921 {
1922 if (s_ann->addresses_taken == NULL)
1923 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1924
1925
1926 if (var_can_have_subvars (var)
1927 && (svars = get_subvars_for_var (var)))
1928 {
1929 subvar_t sv;
1930 for (sv = svars; sv; sv = sv->next)
1931 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1932 }
1933 else
1934 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1935 }
1936 }
1937
1938 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1939 clobbered variables in the function. */
1940
1941 static void
1942 add_call_clobber_ops (tree stmt)
1943 {
1944 int i;
1945 unsigned u;
1946 tree t;
1947 bitmap_iterator bi;
1948 stmt_ann_t s_ann = stmt_ann (stmt);
1949 struct stmt_ann_d empty_ann;
1950
1951 /* Functions that are not const, pure or never return may clobber
1952 call-clobbered variables. */
1953 if (s_ann)
1954 s_ann->makes_clobbering_call = true;
1955
1956 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1957 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1958 if (global_var)
1959 {
1960 add_stmt_operand (&global_var, s_ann, opf_is_def);
1961 return;
1962 }
1963
1964 /* If cache is valid, copy the elements into the build vectors. */
1965 if (ssa_call_clobbered_cache_valid)
1966 {
1967 /* Process the caches in reverse order so we are always inserting at
1968 the head of the list. */
1969 for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
1970 {
1971 t = VEC_index (tree, clobbered_vuses, i);
1972 gcc_assert (TREE_CODE (t) != SSA_NAME);
1973 var_ann (t)->in_vuse_list = 1;
1974 opbuild_append_virtual (&build_vuses, t);
1975 }
1976 for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
1977 {
1978 t = VEC_index (tree, clobbered_v_may_defs, i);
1979 gcc_assert (TREE_CODE (t) != SSA_NAME);
1980 var_ann (t)->in_v_may_def_list = 1;
1981 opbuild_append_virtual (&build_v_may_defs, t);
1982 }
1983 if (s_ann)
1984 {
1985 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1986 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1987 }
1988 return;
1989 }
1990
1991 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1992
1993 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1994 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1995 {
1996 tree var = referenced_var (u);
1997 if (unmodifiable_var_p (var))
1998 add_stmt_operand (&var, &empty_ann, opf_none);
1999 else
2000 add_stmt_operand (&var, &empty_ann, opf_is_def);
2001 }
2002
2003 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
2004 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
2005
2006 /* Set the flags for a stmt's annotation. */
2007 if (s_ann)
2008 {
2009 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2010 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
2011 }
2012
2013 /* Prepare empty cache vectors. */
2014 VEC_truncate (tree, clobbered_vuses, 0);
2015 VEC_truncate (tree, clobbered_v_may_defs, 0);
2016
2017 /* Now fill the clobbered cache with the values that have been found. */
2018 for (i = opbuild_first (&build_vuses);
2019 i != OPBUILD_LAST;
2020 i = opbuild_next (&build_vuses, i))
2021 VEC_safe_push (tree, heap, clobbered_vuses,
2022 opbuild_elem_virtual (&build_vuses, i));
2023
2024 gcc_assert (opbuild_num_elems (&build_vuses)
2025 == VEC_length (tree, clobbered_vuses));
2026
2027 for (i = opbuild_first (&build_v_may_defs);
2028 i != OPBUILD_LAST;
2029 i = opbuild_next (&build_v_may_defs, i))
2030 VEC_safe_push (tree, heap, clobbered_v_may_defs,
2031 opbuild_elem_virtual (&build_v_may_defs, i));
2032
2033 gcc_assert (opbuild_num_elems (&build_v_may_defs)
2034 == VEC_length (tree, clobbered_v_may_defs));
2035
2036 ssa_call_clobbered_cache_valid = true;
2037 }
2038
2039
2040 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2041 function. */
2042
2043 static void
2044 add_call_read_ops (tree stmt)
2045 {
2046 int i;
2047 unsigned u;
2048 tree t;
2049 bitmap_iterator bi;
2050 stmt_ann_t s_ann = stmt_ann (stmt);
2051 struct stmt_ann_d empty_ann;
2052
2053 /* if the function is not pure, it may reference memory. Add
2054 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2055 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2056 if (global_var)
2057 {
2058 add_stmt_operand (&global_var, s_ann, opf_none);
2059 return;
2060 }
2061
2062 /* If cache is valid, copy the elements into the build vector. */
2063 if (ssa_ro_call_cache_valid)
2064 {
2065 for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
2066 {
2067 /* Process the caches in reverse order so we are always inserting at
2068 the head of the list. */
2069 t = VEC_index (tree, ro_call_vuses, i);
2070 gcc_assert (TREE_CODE (t) != SSA_NAME);
2071 var_ann (t)->in_vuse_list = 1;
2072 opbuild_append_virtual (&build_vuses, t);
2073 }
2074 if (s_ann)
2075 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2076 return;
2077 }
2078
2079 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2080
2081 /* Add a VUSE for each call-clobbered variable. */
2082 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2083 {
2084 tree var = referenced_var (u);
2085 add_stmt_operand (&var, &empty_ann, opf_none);
2086 }
2087
2088 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2089 if (s_ann)
2090 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2091
2092 /* Prepare empty cache vectors. */
2093 VEC_truncate (tree, ro_call_vuses, 0);
2094
2095 /* Now fill the clobbered cache with the values that have been found. */
2096 for (i = opbuild_first (&build_vuses);
2097 i != OPBUILD_LAST;
2098 i = opbuild_next (&build_vuses, i))
2099 VEC_safe_push (tree, heap, ro_call_vuses,
2100 opbuild_elem_virtual (&build_vuses, i));
2101
2102 gcc_assert (opbuild_num_elems (&build_vuses)
2103 == VEC_length (tree, ro_call_vuses));
2104
2105 ssa_ro_call_cache_valid = true;
2106 }
2107
2108
2109 /* Scan the immediate_use list for VAR making sure its linked properly.
2110 return RTUE iof there is a problem. */
2111
2112 bool
2113 verify_imm_links (FILE *f, tree var)
2114 {
2115 use_operand_p ptr, prev, list;
2116 int count;
2117
2118 gcc_assert (TREE_CODE (var) == SSA_NAME);
2119
2120 list = &(SSA_NAME_IMM_USE_NODE (var));
2121 gcc_assert (list->use == NULL);
2122
2123 if (list->prev == NULL)
2124 {
2125 gcc_assert (list->next == NULL);
2126 return false;
2127 }
2128
2129 prev = list;
2130 count = 0;
2131 for (ptr = list->next; ptr != list; )
2132 {
2133 if (prev != ptr->prev)
2134 goto error;
2135
2136 if (ptr->use == NULL)
2137 goto error; /* 2 roots, or SAFE guard node. */
2138 else if (*(ptr->use) != var)
2139 goto error;
2140
2141 prev = ptr;
2142 ptr = ptr->next;
2143 /* Avoid infinite loops. */
2144 if (count++ > 30000)
2145 goto error;
2146 }
2147
2148 /* Verify list in the other direction. */
2149 prev = list;
2150 for (ptr = list->prev; ptr != list; )
2151 {
2152 if (prev != ptr->next)
2153 goto error;
2154 prev = ptr;
2155 ptr = ptr->prev;
2156 if (count-- < 0)
2157 goto error;
2158 }
2159
2160 if (count != 0)
2161 goto error;
2162
2163 return false;
2164
2165 error:
2166 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2167 {
2168 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2169 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2170 }
2171 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2172 (void *)ptr->use);
2173 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2174 fprintf(f, "\n");
2175 return true;
2176 }
2177
2178
2179 /* Dump all the immediate uses to FILE. */
2180
2181 void
2182 dump_immediate_uses_for (FILE *file, tree var)
2183 {
2184 imm_use_iterator iter;
2185 use_operand_p use_p;
2186
2187 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2188
2189 print_generic_expr (file, var, TDF_SLIM);
2190 fprintf (file, " : -->");
2191 if (has_zero_uses (var))
2192 fprintf (file, " no uses.\n");
2193 else
2194 if (has_single_use (var))
2195 fprintf (file, " single use.\n");
2196 else
2197 fprintf (file, "%d uses.\n", num_imm_uses (var));
2198
2199 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2200 {
2201 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2202 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2203 else
2204 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2205 }
2206 fprintf(file, "\n");
2207 }
2208
2209 /* Dump all the immediate uses to FILE. */
2210
2211 void
2212 dump_immediate_uses (FILE *file)
2213 {
2214 tree var;
2215 unsigned int x;
2216
2217 fprintf (file, "Immediate_uses: \n\n");
2218 for (x = 1; x < num_ssa_names; x++)
2219 {
2220 var = ssa_name(x);
2221 if (!var)
2222 continue;
2223 dump_immediate_uses_for (file, var);
2224 }
2225 }
2226
2227
2228 /* Dump def-use edges on stderr. */
2229
2230 void
2231 debug_immediate_uses (void)
2232 {
2233 dump_immediate_uses (stderr);
2234 }
2235
2236 /* Dump def-use edges on stderr. */
2237
2238 void
2239 debug_immediate_uses_for (tree var)
2240 {
2241 dump_immediate_uses_for (stderr, var);
2242 }
2243 #include "gt-tree-ssa-operands.h"