re PR tree-optimization/22171 (gcc-4.0-20050623 internal compiler error in linux...
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35
36 #include "langhooks.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106 /* This structure maintain a sorted list of operands which is created by
107 parse_ssa_operand. */
108 struct opbuild_list_d GTY (())
109 {
110 varray_type vars; /* The VAR_DECLS tree. */
111 varray_type uid; /* The sort value for virtual symbols. */
112 varray_type next; /* The next index in the sorted list. */
113 int first; /* First element in list. */
114 unsigned num; /* Number of elements. */
115 };
116
117 #define OPBUILD_LAST -1
118
119
120 /* Array for building all the def operands. */
121 static GTY (()) struct opbuild_list_d build_defs;
122
123 /* Array for building all the use operands. */
124 static GTY (()) struct opbuild_list_d build_uses;
125
126 /* Array for building all the v_may_def operands. */
127 static GTY (()) struct opbuild_list_d build_v_may_defs;
128
129 /* Array for building all the vuse operands. */
130 static GTY (()) struct opbuild_list_d build_vuses;
131
132 /* Array for building all the v_must_def operands. */
133 static GTY (()) struct opbuild_list_d build_v_must_defs;
134
135 /* True if the operands for call clobbered vars are cached and valid. */
136 bool ssa_call_clobbered_cache_valid;
137 bool ssa_ro_call_cache_valid;
138
139 /* These arrays are the cached operand vectors for call clobbered calls. */
140 static VEC(tree,heap) *clobbered_v_may_defs;
141 static VEC(tree,heap) *clobbered_vuses;
142 static VEC(tree,heap) *ro_call_vuses;
143 static bool clobbered_aliased_loads;
144 static bool clobbered_aliased_stores;
145 static bool ro_call_aliased_loads;
146 static bool ops_active = false;
147
148 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
149 static unsigned operand_memory_index;
150
151 static void note_addressable (tree, stmt_ann_t);
152 static void get_expr_operands (tree, tree *, int);
153 static void get_asm_expr_operands (tree);
154 static void get_indirect_ref_operands (tree, tree, int);
155 static void get_tmr_operands (tree, tree, int);
156 static void get_call_expr_operands (tree, tree);
157 static inline void append_def (tree *);
158 static inline void append_use (tree *);
159 static void append_v_may_def (tree);
160 static void append_v_must_def (tree);
161 static void add_call_clobber_ops (tree);
162 static void add_call_read_ops (tree);
163 static void add_stmt_operand (tree *, stmt_ann_t, int);
164 static void build_ssa_operands (tree stmt);
165
166 static def_optype_p free_defs = NULL;
167 static use_optype_p free_uses = NULL;
168 static vuse_optype_p free_vuses = NULL;
169 static maydef_optype_p free_maydefs = NULL;
170 static mustdef_optype_p free_mustdefs = NULL;
171
172 /* Initialize a virtual operand build LIST called NAME with NUM elements. */
173
174 static inline void
175 opbuild_initialize_virtual (struct opbuild_list_d *list, int num,
176 const char *name)
177 {
178 list->first = OPBUILD_LAST;
179 list->num = 0;
180 VARRAY_TREE_INIT (list->vars, num, name);
181 VARRAY_UINT_INIT (list->uid, num, "List UID");
182 VARRAY_INT_INIT (list->next, num, "List NEXT");
183 }
184
185
186 /* Initialize a real operand build LIST called NAME with NUM elements. */
187
188 static inline void
189 opbuild_initialize_real (struct opbuild_list_d *list, int num, const char *name)
190 {
191 list->first = OPBUILD_LAST;
192 list->num = 0;
193 VARRAY_TREE_PTR_INIT (list->vars, num, name);
194 VARRAY_INT_INIT (list->next, num, "List NEXT");
195 /* The UID field is not needed since we sort based on the pointer value. */
196 list->uid = NULL;
197 }
198
199
200 /* Free memory used in virtual operand build object LIST. */
201
202 static inline void
203 opbuild_free (struct opbuild_list_d *list)
204 {
205 list->vars = NULL;
206 list->uid = NULL;
207 list->next = NULL;
208 }
209
210
211 /* Number of elements in an opbuild list. */
212
213 static inline unsigned
214 opbuild_num_elems (struct opbuild_list_d *list)
215 {
216 return list->num;
217 }
218
219
220 /* Add VAR to the real operand list LIST, keeping it sorted and avoiding
221 duplicates. The actual sort value is the tree pointer value. */
222
223 static inline void
224 opbuild_append_real (struct opbuild_list_d *list, tree *var)
225 {
226 int index;
227
228 #ifdef ENABLE_CHECKING
229 /* Ensure the real operand doesn't exist already. */
230 for (index = list->first;
231 index != OPBUILD_LAST;
232 index = VARRAY_INT (list->next, index))
233 gcc_assert (VARRAY_TREE_PTR (list->vars, index) != var);
234 #endif
235
236 /* First item in the list. */
237 index = VARRAY_ACTIVE_SIZE (list->vars);
238 if (index == 0)
239 list->first = index;
240 else
241 VARRAY_INT (list->next, index - 1) = index;
242 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
243 VARRAY_PUSH_TREE_PTR (list->vars, var);
244 list->num++;
245 }
246
247
248 /* Add VAR to the virtual operand list LIST, keeping it sorted and avoiding
249 duplicates. The actual sort value is the DECL UID of the base variable. */
250
251 static inline void
252 opbuild_append_virtual (struct opbuild_list_d *list, tree var)
253 {
254 int index, curr, last;
255 unsigned int var_uid;
256
257 if (TREE_CODE (var) != SSA_NAME)
258 var_uid = DECL_UID (var);
259 else
260 var_uid = DECL_UID (SSA_NAME_VAR (var));
261
262 index = VARRAY_ACTIVE_SIZE (list->vars);
263
264 if (index == 0)
265 {
266 VARRAY_PUSH_TREE (list->vars, var);
267 VARRAY_PUSH_UINT (list->uid, var_uid);
268 VARRAY_PUSH_INT (list->next, OPBUILD_LAST);
269 list->first = 0;
270 list->num = 1;
271 return;
272 }
273
274 last = OPBUILD_LAST;
275 /* Find the correct spot in the sorted list. */
276 for (curr = list->first;
277 curr != OPBUILD_LAST;
278 last = curr, curr = VARRAY_INT (list->next, curr))
279 {
280 if (VARRAY_UINT (list->uid, curr) > var_uid)
281 break;
282 }
283
284 if (last == OPBUILD_LAST)
285 {
286 /* First item in the list. */
287 VARRAY_PUSH_INT (list->next, list->first);
288 list->first = index;
289 }
290 else
291 {
292 /* Don't enter duplicates at all. */
293 if (VARRAY_UINT (list->uid, last) == var_uid)
294 return;
295
296 VARRAY_PUSH_INT (list->next, VARRAY_INT (list->next, last));
297 VARRAY_INT (list->next, last) = index;
298 }
299 VARRAY_PUSH_TREE (list->vars, var);
300 VARRAY_PUSH_UINT (list->uid, var_uid);
301 list->num++;
302 }
303
304
305 /* Return the first element index in LIST. OPBUILD_LAST means there are no
306 more elements. */
307
308 static inline int
309 opbuild_first (struct opbuild_list_d *list)
310 {
311 if (list->num > 0)
312 return list->first;
313 else
314 return OPBUILD_LAST;
315 }
316
317
318 /* Return the next element after PREV in LIST. */
319
320 static inline int
321 opbuild_next (struct opbuild_list_d *list, int prev)
322 {
323 return VARRAY_INT (list->next, prev);
324 }
325
326
327 /* Return the real element at index ELEM in LIST. */
328
329 static inline tree *
330 opbuild_elem_real (struct opbuild_list_d *list, int elem)
331 {
332 return VARRAY_TREE_PTR (list->vars, elem);
333 }
334
335
336 /* Return the virtual element at index ELEM in LIST. */
337
338 static inline tree
339 opbuild_elem_virtual (struct opbuild_list_d *list, int elem)
340 {
341 return VARRAY_TREE (list->vars, elem);
342 }
343
344
345 /* Return the virtual element uid at index ELEM in LIST. */
346 static inline unsigned int
347 opbuild_elem_uid (struct opbuild_list_d *list, int elem)
348 {
349 return VARRAY_UINT (list->uid, elem);
350 }
351
352
353 /* Reset an operand build list. */
354
355 static inline void
356 opbuild_clear (struct opbuild_list_d *list)
357 {
358 list->first = OPBUILD_LAST;
359 VARRAY_POP_ALL (list->vars);
360 VARRAY_POP_ALL (list->next);
361 if (list->uid)
362 VARRAY_POP_ALL (list->uid);
363 list->num = 0;
364 }
365
366
367 /* Remove ELEM from LIST where PREV is the previous element. Return the next
368 element. */
369
370 static inline int
371 opbuild_remove_elem (struct opbuild_list_d *list, int elem, int prev)
372 {
373 int ret;
374 if (prev != OPBUILD_LAST)
375 {
376 gcc_assert (VARRAY_INT (list->next, prev) == elem);
377 ret = VARRAY_INT (list->next, prev) = VARRAY_INT (list->next, elem);
378 }
379 else
380 {
381 gcc_assert (list->first == elem);
382 ret = list->first = VARRAY_INT (list->next, elem);
383 }
384 list->num--;
385 return ret;
386 }
387
388
389 /* Return true if the ssa operands cache is active. */
390
391 bool
392 ssa_operands_active (void)
393 {
394 return ops_active;
395 }
396
397
398 /* Initialize the operand cache routines. */
399
400 void
401 init_ssa_operands (void)
402 {
403 opbuild_initialize_real (&build_defs, 5, "build defs");
404 opbuild_initialize_real (&build_uses, 10, "build uses");
405 opbuild_initialize_virtual (&build_vuses, 25, "build_vuses");
406 opbuild_initialize_virtual (&build_v_may_defs, 25, "build_v_may_defs");
407 opbuild_initialize_virtual (&build_v_must_defs, 25, "build_v_must_defs");
408 gcc_assert (operand_memory == NULL);
409 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
410 ops_active = true;
411 }
412
413
414 /* Dispose of anything required by the operand routines. */
415
416 void
417 fini_ssa_operands (void)
418 {
419 struct ssa_operand_memory_d *ptr;
420 opbuild_free (&build_defs);
421 opbuild_free (&build_uses);
422 opbuild_free (&build_v_must_defs);
423 opbuild_free (&build_v_may_defs);
424 opbuild_free (&build_vuses);
425 free_defs = NULL;
426 free_uses = NULL;
427 free_vuses = NULL;
428 free_maydefs = NULL;
429 free_mustdefs = NULL;
430 while ((ptr = operand_memory) != NULL)
431 {
432 operand_memory = operand_memory->next;
433 ggc_free (ptr);
434 }
435
436 VEC_free (tree, heap, clobbered_v_may_defs);
437 VEC_free (tree, heap, clobbered_vuses);
438 VEC_free (tree, heap, ro_call_vuses);
439 ops_active = false;
440 }
441
442
443 /* Return memory for operands of SIZE chunks. */
444
445 static inline void *
446 ssa_operand_alloc (unsigned size)
447 {
448 char *ptr;
449 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
450 {
451 struct ssa_operand_memory_d *ptr;
452 ptr = ggc_alloc (sizeof (struct ssa_operand_memory_d));
453 ptr->next = operand_memory;
454 operand_memory = ptr;
455 operand_memory_index = 0;
456 }
457 ptr = &(operand_memory->mem[operand_memory_index]);
458 operand_memory_index += size;
459 return ptr;
460 }
461
462
463 /* Make sure PTR is inn the correct immediate use list. Since uses are simply
464 pointers into the stmt TREE, there is no way of telling if anyone has
465 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
466 THe contents are different, but the the pointer is still the same. This
467 routine will check to make sure PTR is in the correct list, and if it isn't
468 put it in the correct list. We cannot simply check the previous node
469 because all nodes in the same stmt might have be changed. */
470
471 static inline void
472 correct_use_link (use_operand_p ptr, tree stmt)
473 {
474 use_operand_p prev;
475 tree root;
476
477 /* Fold_stmt () may have changed the stmt pointers. */
478 if (ptr->stmt != stmt)
479 ptr->stmt = stmt;
480
481 prev = ptr->prev;
482 if (prev)
483 {
484 bool stmt_mod = true;
485 /* Find the first element which isn't a SAFE iterator, is in a different
486 stmt, and is not a a modified stmt, That node is in the correct list,
487 see if we are too. */
488
489 while (stmt_mod)
490 {
491 while (prev->stmt == stmt || prev->stmt == NULL)
492 prev = prev->prev;
493 if (prev->use == NULL)
494 stmt_mod = false;
495 else
496 if ((stmt_mod = stmt_modified_p (prev->stmt)))
497 prev = prev->prev;
498 }
499
500 /* Get the ssa_name of the list the node is in. */
501 if (prev->use == NULL)
502 root = prev->stmt;
503 else
504 root = *(prev->use);
505 /* If it's the right list, simply return. */
506 if (root == *(ptr->use))
507 return;
508 }
509 /* Its in the wrong list if we reach here. */
510 delink_imm_use (ptr);
511 link_imm_use (ptr, *(ptr->use));
512 }
513
514
515 #define FINALIZE_OPBUILD build_defs
516 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_defs, (I))
517 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_defs, (I))
518 #define FINALIZE_FUNC finalize_ssa_def_ops
519 #define FINALIZE_ALLOC alloc_def
520 #define FINALIZE_FREE free_defs
521 #define FINALIZE_TYPE struct def_optype_d
522 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
523 #define FINALIZE_OPS DEF_OPS
524 #define FINALIZE_BASE(VAR) VAR
525 #define FINALIZE_BASE_TYPE tree *
526 #define FINALIZE_BASE_ZERO NULL
527 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
528 #include "tree-ssa-opfinalize.h"
529
530
531 /* This routine will create stmt operands for STMT from the def build list. */
532
533 static void
534 finalize_ssa_defs (tree stmt)
535 {
536 unsigned int num = opbuild_num_elems (&build_defs);
537 /* There should only be a single real definition per assignment. */
538 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
539
540 /* If there is an old list, often the new list is identical, or close, so
541 find the elements at the beginning that are the same as the vector. */
542
543 finalize_ssa_def_ops (stmt);
544 opbuild_clear (&build_defs);
545 }
546
547 #define FINALIZE_OPBUILD build_uses
548 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_real (&build_uses, (I))
549 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_real (&build_uses, (I))
550 #define FINALIZE_FUNC finalize_ssa_use_ops
551 #define FINALIZE_ALLOC alloc_use
552 #define FINALIZE_FREE free_uses
553 #define FINALIZE_TYPE struct use_optype_d
554 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
555 #define FINALIZE_OPS USE_OPS
556 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
557 #define FINALIZE_BASE(VAR) VAR
558 #define FINALIZE_BASE_TYPE tree *
559 #define FINALIZE_BASE_ZERO NULL
560 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
561 (PTR)->use_ptr.use = (VAL); \
562 link_imm_use_stmt (&((PTR)->use_ptr), \
563 *(VAL), (STMT))
564 #include "tree-ssa-opfinalize.h"
565
566 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
567
568 static void
569 finalize_ssa_uses (tree stmt)
570 {
571 #ifdef ENABLE_CHECKING
572 {
573 unsigned x;
574 unsigned num = opbuild_num_elems (&build_uses);
575
576 /* If the pointer to the operand is the statement itself, something is
577 wrong. It means that we are pointing to a local variable (the
578 initial call to get_stmt_operands does not pass a pointer to a
579 statement). */
580 for (x = 0; x < num; x++)
581 gcc_assert (*(opbuild_elem_real (&build_uses, x)) != stmt);
582 }
583 #endif
584 finalize_ssa_use_ops (stmt);
585 opbuild_clear (&build_uses);
586 }
587
588
589 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
590 #define FINALIZE_OPBUILD build_v_may_defs
591 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_may_defs, (I))
592 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_may_defs, (I))
593 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
594 #define FINALIZE_ALLOC alloc_maydef
595 #define FINALIZE_FREE free_maydefs
596 #define FINALIZE_TYPE struct maydef_optype_d
597 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
598 #define FINALIZE_OPS MAYDEF_OPS
599 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
600 #define FINALIZE_BASE_ZERO 0
601 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
602 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
603 #define FINALIZE_BASE_TYPE unsigned
604 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
605 (PTR)->def_var = (VAL); \
606 (PTR)->use_var = (VAL); \
607 (PTR)->use_ptr.use = &((PTR)->use_var); \
608 link_imm_use_stmt (&((PTR)->use_ptr), \
609 (VAL), (STMT))
610 #include "tree-ssa-opfinalize.h"
611
612
613 static void
614 finalize_ssa_v_may_defs (tree stmt)
615 {
616 finalize_ssa_v_may_def_ops (stmt);
617 }
618
619
620 /* Clear the in_list bits and empty the build array for v_may_defs. */
621
622 static inline void
623 cleanup_v_may_defs (void)
624 {
625 unsigned x, num;
626 num = opbuild_num_elems (&build_v_may_defs);
627
628 for (x = 0; x < num; x++)
629 {
630 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
631 if (TREE_CODE (t) != SSA_NAME)
632 {
633 var_ann_t ann = var_ann (t);
634 ann->in_v_may_def_list = 0;
635 }
636 }
637 opbuild_clear (&build_v_may_defs);
638 }
639
640
641 #define FINALIZE_OPBUILD build_vuses
642 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_vuses, (I))
643 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_vuses, (I))
644 #define FINALIZE_FUNC finalize_ssa_vuse_ops
645 #define FINALIZE_ALLOC alloc_vuse
646 #define FINALIZE_FREE free_vuses
647 #define FINALIZE_TYPE struct vuse_optype_d
648 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
649 #define FINALIZE_OPS VUSE_OPS
650 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
651 #define FINALIZE_BASE_ZERO 0
652 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
653 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
654 #define FINALIZE_BASE_TYPE unsigned
655 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
656 (PTR)->use_var = (VAL); \
657 (PTR)->use_ptr.use = &((PTR)->use_var); \
658 link_imm_use_stmt (&((PTR)->use_ptr), \
659 (VAL), (STMT))
660 #include "tree-ssa-opfinalize.h"
661
662
663 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
664
665 static void
666 finalize_ssa_vuses (tree stmt)
667 {
668 unsigned num, num_v_may_defs;
669 int vuse_index;
670
671 /* Remove superfluous VUSE operands. If the statement already has a
672 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
673 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
674 suppose that variable 'a' is aliased:
675
676 # VUSE <a_2>
677 # a_3 = V_MAY_DEF <a_2>
678 a = a + 1;
679
680 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
681 operation. */
682
683 num = opbuild_num_elems (&build_vuses);
684 num_v_may_defs = opbuild_num_elems (&build_v_may_defs);
685
686 if (num > 0 && num_v_may_defs > 0)
687 {
688 int last = OPBUILD_LAST;
689 vuse_index = opbuild_first (&build_vuses);
690 for ( ; vuse_index != OPBUILD_LAST; )
691 {
692 tree vuse;
693 vuse = opbuild_elem_virtual (&build_vuses, vuse_index);
694 if (TREE_CODE (vuse) != SSA_NAME)
695 {
696 var_ann_t ann = var_ann (vuse);
697 ann->in_vuse_list = 0;
698 if (ann->in_v_may_def_list)
699 {
700 vuse_index = opbuild_remove_elem (&build_vuses, vuse_index,
701 last);
702 continue;
703 }
704 }
705 last = vuse_index;
706 vuse_index = opbuild_next (&build_vuses, vuse_index);
707 }
708 }
709 else
710 /* Clear out the in_list bits. */
711 for (vuse_index = opbuild_first (&build_vuses);
712 vuse_index != OPBUILD_LAST;
713 vuse_index = opbuild_next (&build_vuses, vuse_index))
714 {
715 tree t = opbuild_elem_virtual (&build_vuses, vuse_index);
716 if (TREE_CODE (t) != SSA_NAME)
717 {
718 var_ann_t ann = var_ann (t);
719 ann->in_vuse_list = 0;
720 }
721 }
722
723 finalize_ssa_vuse_ops (stmt);
724 /* The v_may_def build vector wasn't cleaned up because we needed it. */
725 cleanup_v_may_defs ();
726
727 /* Free the vuses build vector. */
728 opbuild_clear (&build_vuses);
729
730 }
731
732 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
733
734 #define FINALIZE_OPBUILD build_v_must_defs
735 #define FINALIZE_OPBUILD_ELEM(I) opbuild_elem_virtual (&build_v_must_defs, (I))
736 #define FINALIZE_OPBUILD_BASE(I) opbuild_elem_uid (&build_v_must_defs, (I))
737 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
738 #define FINALIZE_ALLOC alloc_mustdef
739 #define FINALIZE_FREE free_mustdefs
740 #define FINALIZE_TYPE struct mustdef_optype_d
741 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
742 #define FINALIZE_OPS MUSTDEF_OPS
743 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
744 #define FINALIZE_BASE_ZERO 0
745 #define FINALIZE_BASE(VAR) ((TREE_CODE (VAR) == SSA_NAME) \
746 ? DECL_UID (SSA_NAME_VAR (VAR)) : DECL_UID ((VAR)))
747 #define FINALIZE_BASE_TYPE unsigned
748 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
749 (PTR)->def_var = (VAL); \
750 (PTR)->kill_var = (VAL); \
751 (PTR)->use_ptr.use = &((PTR)->kill_var);\
752 link_imm_use_stmt (&((PTR)->use_ptr), \
753 (VAL), (STMT))
754 #include "tree-ssa-opfinalize.h"
755
756
757 static void
758 finalize_ssa_v_must_defs (tree stmt)
759 {
760 /* In the presence of subvars, there may be more than one V_MUST_DEF per
761 statement (one for each subvar). It is a bit expensive to verify that
762 all must-defs in a statement belong to subvars if there is more than one
763 MUST-def, so we don't do it. Suffice to say, if you reach here without
764 having subvars, and have num >1, you have hit a bug. */
765
766 finalize_ssa_v_must_def_ops (stmt);
767 opbuild_clear (&build_v_must_defs);
768 }
769
770
771 /* Finalize all the build vectors, fill the new ones into INFO. */
772
773 static inline void
774 finalize_ssa_stmt_operands (tree stmt)
775 {
776 finalize_ssa_defs (stmt);
777 finalize_ssa_uses (stmt);
778 finalize_ssa_v_must_defs (stmt);
779 finalize_ssa_v_may_defs (stmt);
780 finalize_ssa_vuses (stmt);
781 }
782
783
784 /* Start the process of building up operands vectors in INFO. */
785
786 static inline void
787 start_ssa_stmt_operands (void)
788 {
789 gcc_assert (opbuild_num_elems (&build_defs) == 0);
790 gcc_assert (opbuild_num_elems (&build_uses) == 0);
791 gcc_assert (opbuild_num_elems (&build_vuses) == 0);
792 gcc_assert (opbuild_num_elems (&build_v_may_defs) == 0);
793 gcc_assert (opbuild_num_elems (&build_v_must_defs) == 0);
794 }
795
796
797 /* Add DEF_P to the list of pointers to operands. */
798
799 static inline void
800 append_def (tree *def_p)
801 {
802 opbuild_append_real (&build_defs, def_p);
803 }
804
805
806 /* Add USE_P to the list of pointers to operands. */
807
808 static inline void
809 append_use (tree *use_p)
810 {
811 opbuild_append_real (&build_uses, use_p);
812 }
813
814
815 /* Add a new virtual may def for variable VAR to the build array. */
816
817 static inline void
818 append_v_may_def (tree var)
819 {
820 if (TREE_CODE (var) != SSA_NAME)
821 {
822 var_ann_t ann = get_var_ann (var);
823
824 /* Don't allow duplicate entries. */
825 if (ann->in_v_may_def_list)
826 return;
827 ann->in_v_may_def_list = 1;
828 }
829
830 opbuild_append_virtual (&build_v_may_defs, var);
831 }
832
833
834 /* Add VAR to the list of virtual uses. */
835
836 static inline void
837 append_vuse (tree var)
838 {
839
840 /* Don't allow duplicate entries. */
841 if (TREE_CODE (var) != SSA_NAME)
842 {
843 var_ann_t ann = get_var_ann (var);
844
845 if (ann->in_vuse_list || ann->in_v_may_def_list)
846 return;
847 ann->in_vuse_list = 1;
848 }
849
850 opbuild_append_virtual (&build_vuses, var);
851 }
852
853
854 /* Add VAR to the list of virtual must definitions for INFO. */
855
856 static inline void
857 append_v_must_def (tree var)
858 {
859 unsigned i;
860
861 /* Don't allow duplicate entries. */
862 for (i = 0; i < opbuild_num_elems (&build_v_must_defs); i++)
863 if (var == opbuild_elem_virtual (&build_v_must_defs, i))
864 return;
865
866 opbuild_append_virtual (&build_v_must_defs, var);
867 }
868
869
870 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
871 cache for STMT, if it existed before. When finished, the various build_*
872 operand vectors will have potential operands. in them. */
873
874 static void
875 parse_ssa_operands (tree stmt)
876 {
877 enum tree_code code;
878
879 code = TREE_CODE (stmt);
880 switch (code)
881 {
882 case MODIFY_EXPR:
883 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
884 either only part of LHS is modified or if the RHS might throw,
885 otherwise, use V_MUST_DEF.
886
887 ??? If it might throw, we should represent somehow that it is killed
888 on the fallthrough path. */
889 {
890 tree lhs = TREE_OPERAND (stmt, 0);
891 int lhs_flags = opf_is_def;
892
893 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
894
895 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
896 or not the entire LHS is modified; that depends on what's
897 inside the VIEW_CONVERT_EXPR. */
898 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
899 lhs = TREE_OPERAND (lhs, 0);
900
901 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
902 && TREE_CODE (lhs) != BIT_FIELD_REF
903 && TREE_CODE (lhs) != REALPART_EXPR
904 && TREE_CODE (lhs) != IMAGPART_EXPR)
905 lhs_flags |= opf_kill_def;
906
907 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
908 }
909 break;
910
911 case COND_EXPR:
912 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
913 break;
914
915 case SWITCH_EXPR:
916 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
917 break;
918
919 case ASM_EXPR:
920 get_asm_expr_operands (stmt);
921 break;
922
923 case RETURN_EXPR:
924 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
925 break;
926
927 case GOTO_EXPR:
928 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
929 break;
930
931 case LABEL_EXPR:
932 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
933 break;
934
935 /* These nodes contain no variable references. */
936 case BIND_EXPR:
937 case CASE_LABEL_EXPR:
938 case TRY_CATCH_EXPR:
939 case TRY_FINALLY_EXPR:
940 case EH_FILTER_EXPR:
941 case CATCH_EXPR:
942 case RESX_EXPR:
943 break;
944
945 default:
946 /* Notice that if get_expr_operands tries to use &STMT as the operand
947 pointer (which may only happen for USE operands), we will fail in
948 append_use. This default will handle statements like empty
949 statements, or CALL_EXPRs that may appear on the RHS of a statement
950 or as statements themselves. */
951 get_expr_operands (stmt, &stmt, opf_none);
952 break;
953 }
954 }
955
956 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
957 original operands, and if ANN is non-null, appropriate stmt flags are set
958 in the stmt's annotation. If ANN is NULL, this is not considered a "real"
959 stmt, and none of the operands will be entered into their respective
960 immediate uses tables. This is to allow stmts to be processed when they
961 are not actually in the CFG.
962
963 Note that some fields in old_ops may change to NULL, although none of the
964 memory they originally pointed to will be destroyed. It is appropriate
965 to call free_stmt_operands() on the value returned in old_ops.
966
967 The rationale for this: Certain optimizations wish to examine the difference
968 between new_ops and old_ops after processing. If a set of operands don't
969 change, new_ops will simply assume the pointer in old_ops, and the old_ops
970 pointer will be set to NULL, indicating no memory needs to be cleared.
971 Usage might appear something like:
972
973 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
974 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
975 <* compare old_ops_copy and new_ops *>
976 free_ssa_operands (old_ops); */
977
978 static void
979 build_ssa_operands (tree stmt)
980 {
981 stmt_ann_t ann = get_stmt_ann (stmt);
982
983 /* Initially assume that the statement has no volatile operands, nor
984 makes aliased loads or stores. */
985 if (ann)
986 {
987 ann->has_volatile_ops = false;
988 ann->makes_aliased_stores = false;
989 ann->makes_aliased_loads = false;
990 }
991
992 start_ssa_stmt_operands ();
993
994 parse_ssa_operands (stmt);
995
996 finalize_ssa_stmt_operands (stmt);
997 }
998
999
1000 /* Free any operands vectors in OPS. */
1001 #if 0
1002 static void
1003 free_ssa_operands (stmt_operands_p ops)
1004 {
1005 ops->def_ops = NULL;
1006 ops->use_ops = NULL;
1007 ops->maydef_ops = NULL;
1008 ops->mustdef_ops = NULL;
1009 ops->vuse_ops = NULL;
1010 while (ops->memory.next != NULL)
1011 {
1012 operand_memory_p tmp = ops->memory.next;
1013 ops->memory.next = tmp->next;
1014 ggc_free (tmp);
1015 }
1016 }
1017 #endif
1018
1019
1020 /* Get the operands of statement STMT. Note that repeated calls to
1021 get_stmt_operands for the same statement will do nothing until the
1022 statement is marked modified by a call to mark_stmt_modified(). */
1023
1024 void
1025 update_stmt_operands (tree stmt)
1026 {
1027 stmt_ann_t ann = get_stmt_ann (stmt);
1028 /* If get_stmt_operands is called before SSA is initialized, dont
1029 do anything. */
1030 if (!ssa_operands_active ())
1031 return;
1032 /* The optimizers cannot handle statements that are nothing but a
1033 _DECL. This indicates a bug in the gimplifier. */
1034 gcc_assert (!SSA_VAR_P (stmt));
1035
1036 gcc_assert (ann->modified);
1037
1038 timevar_push (TV_TREE_OPS);
1039
1040 build_ssa_operands (stmt);
1041
1042 /* Clear the modified bit for STMT. Subsequent calls to
1043 get_stmt_operands for this statement will do nothing until the
1044 statement is marked modified by a call to mark_stmt_modified(). */
1045 ann->modified = 0;
1046
1047 timevar_pop (TV_TREE_OPS);
1048 }
1049
1050
1051 /* Copies virtual operands from SRC to DST. */
1052
1053 void
1054 copy_virtual_operands (tree dest, tree src)
1055 {
1056 tree t;
1057 ssa_op_iter iter, old_iter;
1058 use_operand_p use_p, u2;
1059 def_operand_p def_p, d2;
1060
1061 build_ssa_operands (dest);
1062
1063 /* Copy all the virtual fields. */
1064 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1065 append_vuse (t);
1066 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1067 append_v_may_def (t);
1068 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1069 append_v_must_def (t);
1070
1071 if (opbuild_num_elems (&build_vuses) == 0
1072 && opbuild_num_elems (&build_v_may_defs) == 0
1073 && opbuild_num_elems (&build_v_must_defs) == 0)
1074 return;
1075
1076 /* Now commit the virtual operands to this stmt. */
1077 finalize_ssa_v_must_defs (dest);
1078 finalize_ssa_v_may_defs (dest);
1079 finalize_ssa_vuses (dest);
1080
1081 /* Finally, set the field to the same values as then originals. */
1082
1083
1084 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1085 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1086 {
1087 gcc_assert (!op_iter_done (&old_iter));
1088 SET_USE (use_p, t);
1089 t = op_iter_next_tree (&old_iter);
1090 }
1091 gcc_assert (op_iter_done (&old_iter));
1092
1093 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1094 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1095 {
1096 gcc_assert (!op_iter_done (&old_iter));
1097 SET_USE (use_p, USE_FROM_PTR (u2));
1098 SET_DEF (def_p, DEF_FROM_PTR (d2));
1099 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1100 }
1101 gcc_assert (op_iter_done (&old_iter));
1102
1103 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1104 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1105 {
1106 gcc_assert (!op_iter_done (&old_iter));
1107 SET_USE (use_p, USE_FROM_PTR (u2));
1108 SET_DEF (def_p, DEF_FROM_PTR (d2));
1109 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1110 }
1111 gcc_assert (op_iter_done (&old_iter));
1112
1113 }
1114
1115
1116 /* Specifically for use in DOM's expression analysis. Given a store, we
1117 create an artificial stmt which looks like a load from the store, this can
1118 be used to eliminate redundant loads. OLD_OPS are the operands from the
1119 store stmt, and NEW_STMT is the new load which represents a load of the
1120 values stored. */
1121
1122 void
1123 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1124 {
1125 stmt_ann_t ann;
1126 tree op;
1127 ssa_op_iter iter;
1128 use_operand_p use_p;
1129 unsigned x;
1130
1131 ann = get_stmt_ann (new_stmt);
1132
1133 /* process the stmt looking for operands. */
1134 start_ssa_stmt_operands ();
1135 parse_ssa_operands (new_stmt);
1136
1137 for (x = 0; x < opbuild_num_elems (&build_vuses); x++)
1138 {
1139 tree t = opbuild_elem_virtual (&build_vuses, x);
1140 if (TREE_CODE (t) != SSA_NAME)
1141 {
1142 var_ann_t ann = var_ann (t);
1143 ann->in_vuse_list = 0;
1144 }
1145 }
1146
1147 for (x = 0; x < opbuild_num_elems (&build_v_may_defs); x++)
1148 {
1149 tree t = opbuild_elem_virtual (&build_v_may_defs, x);
1150 if (TREE_CODE (t) != SSA_NAME)
1151 {
1152 var_ann_t ann = var_ann (t);
1153 ann->in_v_may_def_list = 0;
1154 }
1155 }
1156 /* Remove any virtual operands that were found. */
1157 opbuild_clear (&build_v_may_defs);
1158 opbuild_clear (&build_v_must_defs);
1159 opbuild_clear (&build_vuses);
1160
1161 /* For each VDEF on the original statement, we want to create a
1162 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1163 statement. */
1164 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1165 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1166 append_vuse (op);
1167
1168 /* Now build the operands for this new stmt. */
1169 finalize_ssa_stmt_operands (new_stmt);
1170
1171 /* All uses in this fake stmt must not be in the immediate use lists. */
1172 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1173 delink_imm_use (use_p);
1174 }
1175
1176 static void
1177 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1178 {
1179 tree op0, op1;
1180 op0 = *exp0;
1181 op1 = *exp1;
1182
1183 /* If the operand cache is active, attempt to preserve the relative positions
1184 of these two operands in their respective immediate use lists. */
1185 if (ssa_operands_active () && op0 != op1)
1186 {
1187 use_optype_p use0, use1, ptr;
1188 use0 = use1 = NULL;
1189 /* Find the 2 operands in the cache, if they are there. */
1190 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1191 if (USE_OP_PTR (ptr)->use == exp0)
1192 {
1193 use0 = ptr;
1194 break;
1195 }
1196 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1197 if (USE_OP_PTR (ptr)->use == exp1)
1198 {
1199 use1 = ptr;
1200 break;
1201 }
1202 /* If both uses don't have operand entries, there isn't much we can do
1203 at this point. Presumably we dont need to worry about it. */
1204 if (use0 && use1)
1205 {
1206 tree *tmp = USE_OP_PTR (use1)->use;
1207 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1208 USE_OP_PTR (use0)->use = tmp;
1209 }
1210 }
1211
1212 /* Now swap the data. */
1213 *exp0 = op1;
1214 *exp1 = op0;
1215 }
1216
1217
1218 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1219 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1220 operands found. */
1221
1222 static void
1223 get_expr_operands (tree stmt, tree *expr_p, int flags)
1224 {
1225 enum tree_code code;
1226 enum tree_code_class class;
1227 tree expr = *expr_p;
1228 stmt_ann_t s_ann = stmt_ann (stmt);
1229
1230 if (expr == NULL)
1231 return;
1232
1233 code = TREE_CODE (expr);
1234 class = TREE_CODE_CLASS (code);
1235
1236 switch (code)
1237 {
1238 case ADDR_EXPR:
1239 /* We could have the address of a component, array member,
1240 etc which has interesting variable references. */
1241 /* Taking the address of a variable does not represent a
1242 reference to it, but the fact that the stmt takes its address will be
1243 of interest to some passes (e.g. alias resolution). */
1244 add_stmt_operand (expr_p, s_ann, 0);
1245
1246 /* If the address is invariant, there may be no interesting variable
1247 references inside. */
1248 if (is_gimple_min_invariant (expr))
1249 return;
1250
1251 /* There should be no VUSEs created, since the referenced objects are
1252 not really accessed. The only operands that we should find here
1253 are ARRAY_REF indices which will always be real operands (GIMPLE
1254 does not allow non-registers as array indices). */
1255 flags |= opf_no_vops;
1256
1257 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1258 return;
1259
1260 case SSA_NAME:
1261 case VAR_DECL:
1262 case PARM_DECL:
1263 case RESULT_DECL:
1264 case CONST_DECL:
1265 {
1266 subvar_t svars;
1267
1268 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1269 Otherwise, add the variable itself.
1270 Whether it goes to USES or DEFS depends on the operand flags. */
1271 if (var_can_have_subvars (expr)
1272 && (svars = get_subvars_for_var (expr)))
1273 {
1274 subvar_t sv;
1275 for (sv = svars; sv; sv = sv->next)
1276 add_stmt_operand (&sv->var, s_ann, flags);
1277 }
1278 else
1279 {
1280 add_stmt_operand (expr_p, s_ann, flags);
1281 }
1282 return;
1283 }
1284 case MISALIGNED_INDIRECT_REF:
1285 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1286 /* fall through */
1287
1288 case ALIGN_INDIRECT_REF:
1289 case INDIRECT_REF:
1290 get_indirect_ref_operands (stmt, expr, flags);
1291 return;
1292
1293 case TARGET_MEM_REF:
1294 get_tmr_operands (stmt, expr, flags);
1295 return;
1296
1297 case ARRAY_REF:
1298 case ARRAY_RANGE_REF:
1299 /* Treat array references as references to the virtual variable
1300 representing the array. The virtual variable for an ARRAY_REF
1301 is the VAR_DECL for the array. */
1302
1303 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1304 according to the value of IS_DEF. Recurse if the LHS of the
1305 ARRAY_REF node is not a regular variable. */
1306 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1307 add_stmt_operand (expr_p, s_ann, flags);
1308 else
1309 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1310
1311 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1312 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1313 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1314 return;
1315
1316 case COMPONENT_REF:
1317 case REALPART_EXPR:
1318 case IMAGPART_EXPR:
1319 {
1320 tree ref;
1321 HOST_WIDE_INT offset, size;
1322 /* This component ref becomes an access to all of the subvariables
1323 it can touch, if we can determine that, but *NOT* the real one.
1324 If we can't determine which fields we could touch, the recursion
1325 will eventually get to a variable and add *all* of its subvars, or
1326 whatever is the minimum correct subset. */
1327
1328 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1329 if (ref)
1330 {
1331 subvar_t svars = get_subvars_for_var (ref);
1332 subvar_t sv;
1333 for (sv = svars; sv; sv = sv->next)
1334 {
1335 bool exact;
1336 if (overlap_subvar (offset, size, sv, &exact))
1337 {
1338 if (!exact)
1339 flags &= ~opf_kill_def;
1340 add_stmt_operand (&sv->var, s_ann, flags);
1341 }
1342 }
1343 }
1344 else
1345 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1346 flags & ~opf_kill_def);
1347
1348 if (code == COMPONENT_REF)
1349 {
1350 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1351 s_ann->has_volatile_ops = true;
1352 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1353 }
1354 return;
1355 }
1356 case WITH_SIZE_EXPR:
1357 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1358 and an rvalue reference to its second argument. */
1359 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1360 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1361 return;
1362
1363 case CALL_EXPR:
1364 get_call_expr_operands (stmt, expr);
1365 return;
1366
1367 case COND_EXPR:
1368 case VEC_COND_EXPR:
1369 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1370 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1371 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1372 return;
1373
1374 case MODIFY_EXPR:
1375 {
1376 int subflags;
1377 tree op;
1378
1379 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1380
1381 op = TREE_OPERAND (expr, 0);
1382 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1383 op = TREE_OPERAND (expr, 0);
1384 if (TREE_CODE (op) == ARRAY_REF
1385 || TREE_CODE (op) == ARRAY_RANGE_REF
1386 || TREE_CODE (op) == REALPART_EXPR
1387 || TREE_CODE (op) == IMAGPART_EXPR)
1388 subflags = opf_is_def;
1389 else
1390 subflags = opf_is_def | opf_kill_def;
1391
1392 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1393 return;
1394 }
1395
1396 case CONSTRUCTOR:
1397 {
1398 /* General aggregate CONSTRUCTORs have been decomposed, but they
1399 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1400
1401 tree t;
1402 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1403 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1404
1405 return;
1406 }
1407
1408 case TRUTH_NOT_EXPR:
1409 case BIT_FIELD_REF:
1410 case VIEW_CONVERT_EXPR:
1411 do_unary:
1412 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1413 return;
1414
1415 case TRUTH_AND_EXPR:
1416 case TRUTH_OR_EXPR:
1417 case TRUTH_XOR_EXPR:
1418 case COMPOUND_EXPR:
1419 case OBJ_TYPE_REF:
1420 case ASSERT_EXPR:
1421 do_binary:
1422 {
1423 tree op0 = TREE_OPERAND (expr, 0);
1424 tree op1 = TREE_OPERAND (expr, 1);
1425
1426 /* If it would be profitable to swap the operands, then do so to
1427 canonicalize the statement, enabling better optimization.
1428
1429 By placing canonicalization of such expressions here we
1430 transparently keep statements in canonical form, even
1431 when the statement is modified. */
1432 if (tree_swap_operands_p (op0, op1, false))
1433 {
1434 /* For relationals we need to swap the operands
1435 and change the code. */
1436 if (code == LT_EXPR
1437 || code == GT_EXPR
1438 || code == LE_EXPR
1439 || code == GE_EXPR)
1440 {
1441 TREE_SET_CODE (expr, swap_tree_comparison (code));
1442 swap_tree_operands (stmt,
1443 &TREE_OPERAND (expr, 0),
1444 &TREE_OPERAND (expr, 1));
1445 }
1446
1447 /* For a commutative operator we can just swap the operands. */
1448 else if (commutative_tree_code (code))
1449 {
1450 swap_tree_operands (stmt,
1451 &TREE_OPERAND (expr, 0),
1452 &TREE_OPERAND (expr, 1));
1453 }
1454 }
1455
1456 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1457 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1458 return;
1459 }
1460
1461 case REALIGN_LOAD_EXPR:
1462 {
1463 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1464 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1465 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1466 return;
1467 }
1468
1469 case BLOCK:
1470 case FUNCTION_DECL:
1471 case EXC_PTR_EXPR:
1472 case FILTER_EXPR:
1473 case LABEL_DECL:
1474 /* Expressions that make no memory references. */
1475 return;
1476
1477 default:
1478 if (class == tcc_unary)
1479 goto do_unary;
1480 if (class == tcc_binary || class == tcc_comparison)
1481 goto do_binary;
1482 if (class == tcc_constant || class == tcc_type)
1483 return;
1484 }
1485
1486 /* If we get here, something has gone wrong. */
1487 #ifdef ENABLE_CHECKING
1488 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1489 debug_tree (expr);
1490 fputs ("\n", stderr);
1491 internal_error ("internal error");
1492 #endif
1493 gcc_unreachable ();
1494 }
1495
1496
1497 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1498
1499 static void
1500 get_asm_expr_operands (tree stmt)
1501 {
1502 stmt_ann_t s_ann = stmt_ann (stmt);
1503 int noutputs = list_length (ASM_OUTPUTS (stmt));
1504 const char **oconstraints
1505 = (const char **) alloca ((noutputs) * sizeof (const char *));
1506 int i;
1507 tree link;
1508 const char *constraint;
1509 bool allows_mem, allows_reg, is_inout;
1510
1511 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1512 {
1513 oconstraints[i] = constraint
1514 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1515 parse_output_constraint (&constraint, i, 0, 0,
1516 &allows_mem, &allows_reg, &is_inout);
1517
1518 /* This should have been split in gimplify_asm_expr. */
1519 gcc_assert (!allows_reg || !is_inout);
1520
1521 /* Memory operands are addressable. Note that STMT needs the
1522 address of this operand. */
1523 if (!allows_reg && allows_mem)
1524 {
1525 tree t = get_base_address (TREE_VALUE (link));
1526 if (t && DECL_P (t))
1527 note_addressable (t, s_ann);
1528 }
1529
1530 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1531 }
1532
1533 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1534 {
1535 constraint
1536 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1537 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1538 oconstraints, &allows_mem, &allows_reg);
1539
1540 /* Memory operands are addressable. Note that STMT needs the
1541 address of this operand. */
1542 if (!allows_reg && allows_mem)
1543 {
1544 tree t = get_base_address (TREE_VALUE (link));
1545 if (t && DECL_P (t))
1546 note_addressable (t, s_ann);
1547 }
1548
1549 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1550 }
1551
1552
1553 /* Clobber memory for asm ("" : : : "memory"); */
1554 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1555 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1556 {
1557 unsigned i;
1558 bitmap_iterator bi;
1559
1560 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1561 decided to group them). */
1562 if (global_var)
1563 add_stmt_operand (&global_var, s_ann, opf_is_def);
1564 else
1565 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1566 {
1567 tree var = referenced_var (i);
1568 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1569 }
1570
1571 /* Now clobber all addressables. */
1572 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1573 {
1574 tree var = referenced_var (i);
1575
1576 /* Subvars are explicitly represented in this list, so
1577 we don't need the original to be added to the clobber
1578 ops, but the original *will* be in this list because
1579 we keep the addressability of the original
1580 variable up-to-date so we don't screw up the rest of
1581 the backend. */
1582 if (var_can_have_subvars (var)
1583 && get_subvars_for_var (var) != NULL)
1584 continue;
1585
1586 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1587 }
1588
1589 break;
1590 }
1591 }
1592
1593 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1594 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1595
1596 static void
1597 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1598 {
1599 tree *pptr = &TREE_OPERAND (expr, 0);
1600 tree ptr = *pptr;
1601 stmt_ann_t s_ann = stmt_ann (stmt);
1602
1603 /* Stores into INDIRECT_REF operands are never killing definitions. */
1604 flags &= ~opf_kill_def;
1605
1606 if (SSA_VAR_P (ptr))
1607 {
1608 struct ptr_info_def *pi = NULL;
1609
1610 /* If PTR has flow-sensitive points-to information, use it. */
1611 if (TREE_CODE (ptr) == SSA_NAME
1612 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1613 && pi->name_mem_tag)
1614 {
1615 /* PTR has its own memory tag. Use it. */
1616 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1617 }
1618 else
1619 {
1620 /* If PTR is not an SSA_NAME or it doesn't have a name
1621 tag, use its type memory tag. */
1622 var_ann_t v_ann;
1623
1624 /* If we are emitting debugging dumps, display a warning if
1625 PTR is an SSA_NAME with no flow-sensitive alias
1626 information. That means that we may need to compute
1627 aliasing again. */
1628 if (dump_file
1629 && TREE_CODE (ptr) == SSA_NAME
1630 && pi == NULL)
1631 {
1632 fprintf (dump_file,
1633 "NOTE: no flow-sensitive alias info for ");
1634 print_generic_expr (dump_file, ptr, dump_flags);
1635 fprintf (dump_file, " in ");
1636 print_generic_stmt (dump_file, stmt, dump_flags);
1637 }
1638
1639 if (TREE_CODE (ptr) == SSA_NAME)
1640 ptr = SSA_NAME_VAR (ptr);
1641 v_ann = var_ann (ptr);
1642 if (v_ann->type_mem_tag)
1643 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1644 }
1645 }
1646
1647 /* If a constant is used as a pointer, we can't generate a real
1648 operand for it but we mark the statement volatile to prevent
1649 optimizations from messing things up. */
1650 else if (TREE_CODE (ptr) == INTEGER_CST)
1651 {
1652 if (s_ann)
1653 s_ann->has_volatile_ops = true;
1654 return;
1655 }
1656
1657 /* Everything else *should* have been folded elsewhere, but users
1658 are smarter than we in finding ways to write invalid code. We
1659 cannot just assert here. If we were absolutely certain that we
1660 do handle all valid cases, then we could just do nothing here.
1661 That seems optimistic, so attempt to do something logical... */
1662 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1663 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1664 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1665 {
1666 /* Make sure we know the object is addressable. */
1667 pptr = &TREE_OPERAND (ptr, 0);
1668 add_stmt_operand (pptr, s_ann, 0);
1669
1670 /* Mark the object itself with a VUSE. */
1671 pptr = &TREE_OPERAND (*pptr, 0);
1672 get_expr_operands (stmt, pptr, flags);
1673 return;
1674 }
1675
1676 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1677 else
1678 gcc_unreachable ();
1679
1680 /* Add a USE operand for the base pointer. */
1681 get_expr_operands (stmt, pptr, opf_none);
1682 }
1683
1684 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1685
1686 static void
1687 get_tmr_operands (tree stmt, tree expr, int flags)
1688 {
1689 tree tag = TMR_TAG (expr);
1690
1691 /* First record the real operands. */
1692 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1693 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1694
1695 /* MEM_REFs should never be killing. */
1696 flags &= ~opf_kill_def;
1697
1698 if (TMR_SYMBOL (expr))
1699 note_addressable (TMR_SYMBOL (expr), stmt_ann (stmt));
1700
1701 if (tag)
1702 add_stmt_operand (&tag, stmt_ann (stmt), flags);
1703 else
1704 /* Something weird, so ensure that we will be careful. */
1705 stmt_ann (stmt)->has_volatile_ops = true;
1706 }
1707
1708 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1709
1710 static void
1711 get_call_expr_operands (tree stmt, tree expr)
1712 {
1713 tree op;
1714 int call_flags = call_expr_flags (expr);
1715
1716 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1717 operands for all the symbols that have been found to be
1718 call-clobbered.
1719
1720 Note that if aliases have not been computed, the global effects
1721 of calls will not be included in the SSA web. This is fine
1722 because no optimizer should run before aliases have been
1723 computed. By not bothering with virtual operands for CALL_EXPRs
1724 we avoid adding superfluous virtual operands, which can be a
1725 significant compile time sink (See PR 15855). */
1726 if (aliases_computed_p
1727 && !bitmap_empty_p (call_clobbered_vars)
1728 && !(call_flags & ECF_NOVOPS))
1729 {
1730 /* A 'pure' or a 'const' function never call-clobbers anything.
1731 A 'noreturn' function might, but since we don't return anyway
1732 there is no point in recording that. */
1733 if (TREE_SIDE_EFFECTS (expr)
1734 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1735 add_call_clobber_ops (stmt);
1736 else if (!(call_flags & ECF_CONST))
1737 add_call_read_ops (stmt);
1738 }
1739
1740 /* Find uses in the called function. */
1741 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1742
1743 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1744 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1745
1746 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1747
1748 }
1749
1750
1751 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1752 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1753 the statement's real operands, otherwise it is added to virtual
1754 operands. */
1755
1756 static void
1757 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1758 {
1759 bool is_real_op;
1760 tree var, sym;
1761 var_ann_t v_ann;
1762
1763 var = *var_p;
1764 STRIP_NOPS (var);
1765
1766 /* If the operand is an ADDR_EXPR, add its operand to the list of
1767 variables that have had their address taken in this statement. */
1768 if (TREE_CODE (var) == ADDR_EXPR)
1769 {
1770 note_addressable (TREE_OPERAND (var, 0), s_ann);
1771 return;
1772 }
1773
1774 /* If the original variable is not a scalar, it will be added to the list
1775 of virtual operands. In that case, use its base symbol as the virtual
1776 variable representing it. */
1777 is_real_op = is_gimple_reg (var);
1778 if (!is_real_op && !DECL_P (var))
1779 var = get_virtual_var (var);
1780
1781 /* If VAR is not a variable that we care to optimize, do nothing. */
1782 if (var == NULL_TREE || !SSA_VAR_P (var))
1783 return;
1784
1785 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1786 v_ann = var_ann (sym);
1787
1788 /* Mark statements with volatile operands. Optimizers should back
1789 off from statements having volatile operands. */
1790 if (TREE_THIS_VOLATILE (sym) && s_ann)
1791 s_ann->has_volatile_ops = true;
1792
1793 /* If the variable cannot be modified and this is a V_MAY_DEF change
1794 it into a VUSE. This happens when read-only variables are marked
1795 call-clobbered and/or aliased to writable variables. So we only
1796 check that this only happens on non-specific stores.
1797
1798 Note that if this is a specific store, i.e. associated with a
1799 modify_expr, then we can't suppress the V_DEF, lest we run into
1800 validation problems.
1801
1802 This can happen when programs cast away const, leaving us with a
1803 store to read-only memory. If the statement is actually executed
1804 at runtime, then the program is ill formed. If the statement is
1805 not executed then all is well. At the very least, we cannot ICE. */
1806 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1807 {
1808 gcc_assert (!is_real_op);
1809 flags &= ~(opf_is_def | opf_kill_def);
1810 }
1811
1812 if (is_real_op)
1813 {
1814 /* The variable is a GIMPLE register. Add it to real operands. */
1815 if (flags & opf_is_def)
1816 append_def (var_p);
1817 else
1818 append_use (var_p);
1819 }
1820 else
1821 {
1822 varray_type aliases;
1823
1824 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1825 virtual operands, unless the caller has specifically requested
1826 not to add virtual operands (used when adding operands inside an
1827 ADDR_EXPR expression). */
1828 if (flags & opf_no_vops)
1829 return;
1830
1831 aliases = v_ann->may_aliases;
1832
1833 if (aliases == NULL)
1834 {
1835 /* The variable is not aliased or it is an alias tag. */
1836 if (flags & opf_is_def)
1837 {
1838 if (flags & opf_kill_def)
1839 {
1840 /* Only regular variables or struct fields may get a
1841 V_MUST_DEF operand. */
1842 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1843 || v_ann->mem_tag_kind == STRUCT_FIELD);
1844 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1845 variable definitions. */
1846 append_v_must_def (var);
1847 }
1848 else
1849 {
1850 /* Add a V_MAY_DEF for call-clobbered variables and
1851 memory tags. */
1852 append_v_may_def (var);
1853 }
1854 }
1855 else
1856 {
1857 append_vuse (var);
1858 if (s_ann && v_ann->is_alias_tag)
1859 s_ann->makes_aliased_loads = 1;
1860 }
1861 }
1862 else
1863 {
1864 size_t i;
1865
1866 /* The variable is aliased. Add its aliases to the virtual
1867 operands. */
1868 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1869
1870 if (flags & opf_is_def)
1871 {
1872 bool added_may_defs_p = false;
1873
1874 /* If the variable is also an alias tag, add a virtual
1875 operand for it, otherwise we will miss representing
1876 references to the members of the variable's alias set.
1877 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1878 if (v_ann->is_alias_tag)
1879 {
1880 added_may_defs_p = true;
1881 append_v_may_def (var);
1882 }
1883
1884 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1885 {
1886 /* While VAR may be modifiable, some of its aliases
1887 may not be. If that's the case, we don't really
1888 need to add them a V_MAY_DEF for them. */
1889 tree alias = VARRAY_TREE (aliases, i);
1890
1891 if (unmodifiable_var_p (alias))
1892 append_vuse (alias);
1893 else
1894 {
1895 append_v_may_def (alias);
1896 added_may_defs_p = true;
1897 }
1898 }
1899
1900 if (s_ann && added_may_defs_p)
1901 s_ann->makes_aliased_stores = 1;
1902 }
1903 else
1904 {
1905 /* Similarly, append a virtual uses for VAR itself, when
1906 it is an alias tag. */
1907 if (v_ann->is_alias_tag)
1908 append_vuse (var);
1909
1910 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1911 append_vuse (VARRAY_TREE (aliases, i));
1912
1913 if (s_ann)
1914 s_ann->makes_aliased_loads = 1;
1915 }
1916 }
1917 }
1918 }
1919
1920
1921 /* Record that VAR had its address taken in the statement with annotations
1922 S_ANN. */
1923
1924 static void
1925 note_addressable (tree var, stmt_ann_t s_ann)
1926 {
1927 subvar_t svars;
1928
1929 if (!s_ann)
1930 return;
1931
1932 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1933 as the only thing we take the address of.
1934 See PR 21407 and the ensuing mailing list discussion. */
1935
1936 var = get_base_address (var);
1937 if (var && SSA_VAR_P (var))
1938 {
1939 if (s_ann->addresses_taken == NULL)
1940 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1941
1942
1943 if (var_can_have_subvars (var)
1944 && (svars = get_subvars_for_var (var)))
1945 {
1946 subvar_t sv;
1947 for (sv = svars; sv; sv = sv->next)
1948 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1949 }
1950 else
1951 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1952 }
1953 }
1954
1955 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1956 clobbered variables in the function. */
1957
1958 static void
1959 add_call_clobber_ops (tree stmt)
1960 {
1961 int i;
1962 unsigned u;
1963 tree t;
1964 bitmap_iterator bi;
1965 stmt_ann_t s_ann = stmt_ann (stmt);
1966 struct stmt_ann_d empty_ann;
1967
1968 /* Functions that are not const, pure or never return may clobber
1969 call-clobbered variables. */
1970 if (s_ann)
1971 s_ann->makes_clobbering_call = true;
1972
1973 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1974 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1975 if (global_var)
1976 {
1977 add_stmt_operand (&global_var, s_ann, opf_is_def);
1978 return;
1979 }
1980
1981 /* If cache is valid, copy the elements into the build vectors. */
1982 if (ssa_call_clobbered_cache_valid)
1983 {
1984 /* Process the caches in reverse order so we are always inserting at
1985 the head of the list. */
1986 for (i = VEC_length (tree, clobbered_vuses) - 1; i >=0; i--)
1987 {
1988 t = VEC_index (tree, clobbered_vuses, i);
1989 gcc_assert (TREE_CODE (t) != SSA_NAME);
1990 var_ann (t)->in_vuse_list = 1;
1991 opbuild_append_virtual (&build_vuses, t);
1992 }
1993 for (i = VEC_length (tree, clobbered_v_may_defs) - 1; i >= 0; i--)
1994 {
1995 t = VEC_index (tree, clobbered_v_may_defs, i);
1996 gcc_assert (TREE_CODE (t) != SSA_NAME);
1997 var_ann (t)->in_v_may_def_list = 1;
1998 opbuild_append_virtual (&build_v_may_defs, t);
1999 }
2000 if (s_ann)
2001 {
2002 s_ann->makes_aliased_loads = clobbered_aliased_loads;
2003 s_ann->makes_aliased_stores = clobbered_aliased_stores;
2004 }
2005 return;
2006 }
2007
2008 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2009
2010 /* Add a V_MAY_DEF operand for every call clobbered variable. */
2011 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2012 {
2013 tree var = referenced_var (u);
2014 if (unmodifiable_var_p (var))
2015 add_stmt_operand (&var, &empty_ann, opf_none);
2016 else
2017 add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
2018 }
2019
2020 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
2021 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
2022
2023 /* Set the flags for a stmt's annotation. */
2024 if (s_ann)
2025 {
2026 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2027 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
2028 }
2029
2030 /* Prepare empty cache vectors. */
2031 VEC_truncate (tree, clobbered_vuses, 0);
2032 VEC_truncate (tree, clobbered_v_may_defs, 0);
2033
2034 /* Now fill the clobbered cache with the values that have been found. */
2035 for (i = opbuild_first (&build_vuses);
2036 i != OPBUILD_LAST;
2037 i = opbuild_next (&build_vuses, i))
2038 VEC_safe_push (tree, heap, clobbered_vuses,
2039 opbuild_elem_virtual (&build_vuses, i));
2040
2041 gcc_assert (opbuild_num_elems (&build_vuses)
2042 == VEC_length (tree, clobbered_vuses));
2043
2044 for (i = opbuild_first (&build_v_may_defs);
2045 i != OPBUILD_LAST;
2046 i = opbuild_next (&build_v_may_defs, i))
2047 VEC_safe_push (tree, heap, clobbered_v_may_defs,
2048 opbuild_elem_virtual (&build_v_may_defs, i));
2049
2050 gcc_assert (opbuild_num_elems (&build_v_may_defs)
2051 == VEC_length (tree, clobbered_v_may_defs));
2052
2053 ssa_call_clobbered_cache_valid = true;
2054 }
2055
2056
2057 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2058 function. */
2059
2060 static void
2061 add_call_read_ops (tree stmt)
2062 {
2063 int i;
2064 unsigned u;
2065 tree t;
2066 bitmap_iterator bi;
2067 stmt_ann_t s_ann = stmt_ann (stmt);
2068 struct stmt_ann_d empty_ann;
2069
2070 /* if the function is not pure, it may reference memory. Add
2071 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2072 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2073 if (global_var)
2074 {
2075 add_stmt_operand (&global_var, s_ann, opf_none);
2076 return;
2077 }
2078
2079 /* If cache is valid, copy the elements into the build vector. */
2080 if (ssa_ro_call_cache_valid)
2081 {
2082 for (i = VEC_length (tree, ro_call_vuses) - 1; i >=0 ; i--)
2083 {
2084 /* Process the caches in reverse order so we are always inserting at
2085 the head of the list. */
2086 t = VEC_index (tree, ro_call_vuses, i);
2087 gcc_assert (TREE_CODE (t) != SSA_NAME);
2088 var_ann (t)->in_vuse_list = 1;
2089 opbuild_append_virtual (&build_vuses, t);
2090 }
2091 if (s_ann)
2092 s_ann->makes_aliased_loads = ro_call_aliased_loads;
2093 return;
2094 }
2095
2096 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
2097
2098 /* Add a VUSE for each call-clobbered variable. */
2099 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2100 {
2101 tree var = referenced_var (u);
2102 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
2103 }
2104
2105 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
2106 if (s_ann)
2107 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
2108
2109 /* Prepare empty cache vectors. */
2110 VEC_truncate (tree, ro_call_vuses, 0);
2111
2112 /* Now fill the clobbered cache with the values that have been found. */
2113 for (i = opbuild_first (&build_vuses);
2114 i != OPBUILD_LAST;
2115 i = opbuild_next (&build_vuses, i))
2116 VEC_safe_push (tree, heap, ro_call_vuses,
2117 opbuild_elem_virtual (&build_vuses, i));
2118
2119 gcc_assert (opbuild_num_elems (&build_vuses)
2120 == VEC_length (tree, ro_call_vuses));
2121
2122 ssa_ro_call_cache_valid = true;
2123 }
2124
2125
2126 /* Scan the immediate_use list for VAR making sure its linked properly.
2127 return RTUE iof there is a problem. */
2128
2129 bool
2130 verify_imm_links (FILE *f, tree var)
2131 {
2132 use_operand_p ptr, prev, list;
2133 int count;
2134
2135 gcc_assert (TREE_CODE (var) == SSA_NAME);
2136
2137 list = &(SSA_NAME_IMM_USE_NODE (var));
2138 gcc_assert (list->use == NULL);
2139
2140 if (list->prev == NULL)
2141 {
2142 gcc_assert (list->next == NULL);
2143 return false;
2144 }
2145
2146 prev = list;
2147 count = 0;
2148 for (ptr = list->next; ptr != list; )
2149 {
2150 if (prev != ptr->prev)
2151 goto error;
2152
2153 if (ptr->use == NULL)
2154 goto error; /* 2 roots, or SAFE guard node. */
2155 else if (*(ptr->use) != var)
2156 goto error;
2157
2158 prev = ptr;
2159 ptr = ptr->next;
2160 /* Avoid infinite loops. */
2161 if (count++ > 30000)
2162 goto error;
2163 }
2164
2165 /* Verify list in the other direction. */
2166 prev = list;
2167 for (ptr = list->prev; ptr != list; )
2168 {
2169 if (prev != ptr->next)
2170 goto error;
2171 prev = ptr;
2172 ptr = ptr->prev;
2173 if (count-- < 0)
2174 goto error;
2175 }
2176
2177 if (count != 0)
2178 goto error;
2179
2180 return false;
2181
2182 error:
2183 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2184 {
2185 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2186 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2187 }
2188 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2189 (void *)ptr->use);
2190 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2191 fprintf(f, "\n");
2192 return true;
2193 }
2194
2195
2196 /* Dump all the immediate uses to FILE. */
2197
2198 void
2199 dump_immediate_uses_for (FILE *file, tree var)
2200 {
2201 imm_use_iterator iter;
2202 use_operand_p use_p;
2203
2204 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2205
2206 print_generic_expr (file, var, TDF_SLIM);
2207 fprintf (file, " : -->");
2208 if (has_zero_uses (var))
2209 fprintf (file, " no uses.\n");
2210 else
2211 if (has_single_use (var))
2212 fprintf (file, " single use.\n");
2213 else
2214 fprintf (file, "%d uses.\n", num_imm_uses (var));
2215
2216 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2217 {
2218 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2219 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2220 else
2221 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2222 }
2223 fprintf(file, "\n");
2224 }
2225
2226 /* Dump all the immediate uses to FILE. */
2227
2228 void
2229 dump_immediate_uses (FILE *file)
2230 {
2231 tree var;
2232 unsigned int x;
2233
2234 fprintf (file, "Immediate_uses: \n\n");
2235 for (x = 1; x < num_ssa_names; x++)
2236 {
2237 var = ssa_name(x);
2238 if (!var)
2239 continue;
2240 dump_immediate_uses_for (file, var);
2241 }
2242 }
2243
2244
2245 /* Dump def-use edges on stderr. */
2246
2247 void
2248 debug_immediate_uses (void)
2249 {
2250 dump_immediate_uses (stderr);
2251 }
2252
2253 /* Dump def-use edges on stderr. */
2254
2255 void
2256 debug_immediate_uses_for (tree var)
2257 {
2258 dump_immediate_uses_for (stderr, var);
2259 }
2260 #include "gt-tree-ssa-operands.h"