tree.def (SYMBOL_MEMORY_TAG): Rename from TYPE_MEMORY_TAG.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
100 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
101 #define opf_non_specific (1 << 3)
102
103 /* Array for building all the def operands. */
104 static VEC(tree,heap) *build_defs;
105
106 /* Array for building all the use operands. */
107 static VEC(tree,heap) *build_uses;
108
109 /* Array for building all the v_may_def operands. */
110 static VEC(tree,heap) *build_v_may_defs;
111
112 /* Array for building all the vuse operands. */
113 static VEC(tree,heap) *build_vuses;
114
115 /* Array for building all the v_must_def operands. */
116 static VEC(tree,heap) *build_v_must_defs;
117
118 /* These arrays are the cached operand vectors for call clobbered calls. */
119 static bool ops_active = false;
120
121 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
122 static unsigned operand_memory_index;
123
124 static void get_expr_operands (tree, tree *, int);
125
126 static def_optype_p free_defs = NULL;
127 static use_optype_p free_uses = NULL;
128 static vuse_optype_p free_vuses = NULL;
129 static maydef_optype_p free_maydefs = NULL;
130 static mustdef_optype_p free_mustdefs = NULL;
131
132
133 /* Return the DECL_UID of the base variable of T. */
134
135 static inline unsigned
136 get_name_decl (tree t)
137 {
138 if (TREE_CODE (t) != SSA_NAME)
139 return DECL_UID (t);
140 else
141 return DECL_UID (SSA_NAME_VAR (t));
142 }
143
144
145 /* Comparison function for qsort used in operand_build_sort_virtual. */
146
147 static int
148 operand_build_cmp (const void *p, const void *q)
149 {
150 tree e1 = *((const tree *)p);
151 tree e2 = *((const tree *)q);
152 unsigned int u1,u2;
153
154 u1 = get_name_decl (e1);
155 u2 = get_name_decl (e2);
156
157 /* We want to sort in ascending order. They can never be equal. */
158 #ifdef ENABLE_CHECKING
159 gcc_assert (u1 != u2);
160 #endif
161 return (u1 > u2 ? 1 : -1);
162 }
163
164
165 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
166
167 static inline void
168 operand_build_sort_virtual (VEC(tree,heap) *list)
169 {
170 int num = VEC_length (tree, list);
171 if (num < 2)
172 return;
173 if (num == 2)
174 {
175 if (get_name_decl (VEC_index (tree, list, 0))
176 > get_name_decl (VEC_index (tree, list, 1)))
177 {
178 /* Swap elements if in the wrong order. */
179 tree tmp = VEC_index (tree, list, 0);
180 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
181 VEC_replace (tree, list, 1, tmp);
182 }
183 return;
184 }
185 /* There are 3 or more elements, call qsort. */
186 qsort (VEC_address (tree, list),
187 VEC_length (tree, list),
188 sizeof (tree),
189 operand_build_cmp);
190 }
191
192
193 /* Return true if the ssa operands cache is active. */
194
195 bool
196 ssa_operands_active (void)
197 {
198 return ops_active;
199 }
200
201
202 /* Structure storing statistics on how many call clobbers we have, and
203 how many where avoided. */
204
205 static struct
206 {
207 /* Number of call-clobbered ops we attempt to add to calls in
208 add_call_clobber_ops. */
209 unsigned int clobbered_vars;
210
211 /* Number of write-clobbers (v_may_defs) avoided by using
212 not_written information. */
213 unsigned int static_write_clobbers_avoided;
214
215 /* Number of reads (vuses) avoided by using not_read
216 information. */
217 unsigned int static_read_clobbers_avoided;
218
219 /* Number of write-clobbers avoided because the variable can't escape to
220 this call. */
221 unsigned int unescapable_clobbers_avoided;
222
223 /* Number of readonly uses we attempt to add to calls in
224 add_call_read_ops. */
225 unsigned int readonly_clobbers;
226
227 /* Number of readonly uses we avoid using not_read information. */
228 unsigned int static_readonly_clobbers_avoided;
229 } clobber_stats;
230
231
232 /* Initialize the operand cache routines. */
233
234 void
235 init_ssa_operands (void)
236 {
237 build_defs = VEC_alloc (tree, heap, 5);
238 build_uses = VEC_alloc (tree, heap, 10);
239 build_vuses = VEC_alloc (tree, heap, 25);
240 build_v_may_defs = VEC_alloc (tree, heap, 25);
241 build_v_must_defs = VEC_alloc (tree, heap, 25);
242
243 gcc_assert (operand_memory == NULL);
244 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
245 ops_active = true;
246 memset (&clobber_stats, 0, sizeof (clobber_stats));
247
248 }
249
250
251 /* Dispose of anything required by the operand routines. */
252
253 void
254 fini_ssa_operands (void)
255 {
256 struct ssa_operand_memory_d *ptr;
257 VEC_free (tree, heap, build_defs);
258 VEC_free (tree, heap, build_uses);
259 VEC_free (tree, heap, build_v_must_defs);
260 VEC_free (tree, heap, build_v_may_defs);
261 VEC_free (tree, heap, build_vuses);
262 free_defs = NULL;
263 free_uses = NULL;
264 free_vuses = NULL;
265 free_maydefs = NULL;
266 free_mustdefs = NULL;
267 while ((ptr = operand_memory) != NULL)
268 {
269 operand_memory = operand_memory->next;
270 ggc_free (ptr);
271 }
272
273 ops_active = false;
274
275 if (dump_file && (dump_flags & TDF_STATS))
276 {
277 fprintf (dump_file, "Original clobbered vars:%d\n",
278 clobber_stats.clobbered_vars);
279 fprintf (dump_file, "Static write clobbers avoided:%d\n",
280 clobber_stats.static_write_clobbers_avoided);
281 fprintf (dump_file, "Static read clobbers avoided:%d\n",
282 clobber_stats.static_read_clobbers_avoided);
283 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
284 clobber_stats.unescapable_clobbers_avoided);
285 fprintf (dump_file, "Original readonly clobbers:%d\n",
286 clobber_stats.readonly_clobbers);
287 fprintf (dump_file, "Static readonly clobbers avoided:%d\n",
288 clobber_stats.static_readonly_clobbers_avoided);
289 }
290 }
291
292
293 /* Return memory for operands of SIZE chunks. */
294
295 static inline void *
296 ssa_operand_alloc (unsigned size)
297 {
298 char *ptr;
299 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
300 {
301 struct ssa_operand_memory_d *ptr;
302 ptr = GGC_NEW (struct ssa_operand_memory_d);
303 ptr->next = operand_memory;
304 operand_memory = ptr;
305 operand_memory_index = 0;
306 }
307 ptr = &(operand_memory->mem[operand_memory_index]);
308 operand_memory_index += size;
309 return ptr;
310 }
311
312
313 /* Make sure PTR is in the correct immediate use list. Since uses are simply
314 pointers into the stmt TREE, there is no way of telling if anyone has
315 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
316 The contents are different, but the pointer is still the same. This
317 routine will check to make sure PTR is in the correct list, and if it isn't
318 put it in the correct list. We cannot simply check the previous node
319 because all nodes in the same stmt might have be changed. */
320
321 static inline void
322 correct_use_link (use_operand_p ptr, tree stmt)
323 {
324 use_operand_p prev;
325 tree root;
326
327 /* Fold_stmt () may have changed the stmt pointers. */
328 if (ptr->stmt != stmt)
329 ptr->stmt = stmt;
330
331 prev = ptr->prev;
332 if (prev)
333 {
334 /* Find the root element, making sure we skip any safe iterators. */
335 while (prev->use != NULL || prev->stmt == NULL)
336 prev = prev->prev;
337
338 /* Get the ssa_name of the list the node is in. */
339 root = prev->stmt;
340 /* If it's the right list, simply return. */
341 if (root == *(ptr->use))
342 return;
343 }
344
345 /* It is in the wrong list if we reach here. */
346 delink_imm_use (ptr);
347 link_imm_use (ptr, *(ptr->use));
348 }
349
350
351 /* This routine makes sure that PTR is in an immediate use list, and makes
352 sure the stmt pointer is set to the current stmt. Virtual uses do not need
353 the overhead of correct_use_link since they cannot be directly manipulated
354 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
355
356 static inline void
357 set_virtual_use_link (use_operand_p ptr, tree stmt)
358 {
359 /* Fold_stmt () may have changed the stmt pointers. */
360 if (ptr->stmt != stmt)
361 ptr->stmt = stmt;
362
363 /* If this use isn't in a list, add it to the correct list. */
364 if (!ptr->prev)
365 link_imm_use (ptr, *(ptr->use));
366 }
367
368
369 #define FINALIZE_OPBUILD build_defs
370 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
371 build_defs, (I))
372 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
373 build_defs, (I))
374 #define FINALIZE_FUNC finalize_ssa_def_ops
375 #define FINALIZE_ALLOC alloc_def
376 #define FINALIZE_FREE free_defs
377 #define FINALIZE_TYPE struct def_optype_d
378 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
379 #define FINALIZE_OPS DEF_OPS
380 #define FINALIZE_BASE(VAR) VAR
381 #define FINALIZE_BASE_TYPE tree *
382 #define FINALIZE_BASE_ZERO NULL
383 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
384 #include "tree-ssa-opfinalize.h"
385
386
387 /* This routine will create stmt operands for STMT from the def build list. */
388
389 static void
390 finalize_ssa_defs (tree stmt)
391 {
392 unsigned int num = VEC_length (tree, build_defs);
393
394 /* There should only be a single real definition per assignment. */
395 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
396
397 /* If there is an old list, often the new list is identical, or close, so
398 find the elements at the beginning that are the same as the vector. */
399 finalize_ssa_def_ops (stmt);
400 VEC_truncate (tree, build_defs, 0);
401 }
402
403 #define FINALIZE_OPBUILD build_uses
404 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
405 build_uses, (I))
406 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
407 build_uses, (I))
408 #define FINALIZE_FUNC finalize_ssa_use_ops
409 #define FINALIZE_ALLOC alloc_use
410 #define FINALIZE_FREE free_uses
411 #define FINALIZE_TYPE struct use_optype_d
412 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
413 #define FINALIZE_OPS USE_OPS
414 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
415 #define FINALIZE_CORRECT_USE correct_use_link
416 #define FINALIZE_BASE(VAR) VAR
417 #define FINALIZE_BASE_TYPE tree *
418 #define FINALIZE_BASE_ZERO NULL
419 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
420 (PTR)->use_ptr.use = (VAL); \
421 link_imm_use_stmt (&((PTR)->use_ptr), \
422 *(VAL), (STMT))
423 #include "tree-ssa-opfinalize.h"
424
425 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
426
427 static void
428 finalize_ssa_uses (tree stmt)
429 {
430 #ifdef ENABLE_CHECKING
431 {
432 unsigned x;
433 unsigned num = VEC_length (tree, build_uses);
434
435 /* If the pointer to the operand is the statement itself, something is
436 wrong. It means that we are pointing to a local variable (the
437 initial call to get_stmt_operands does not pass a pointer to a
438 statement). */
439 for (x = 0; x < num; x++)
440 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
441 }
442 #endif
443 finalize_ssa_use_ops (stmt);
444 VEC_truncate (tree, build_uses, 0);
445 }
446
447
448 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
449 #define FINALIZE_OPBUILD build_v_may_defs
450 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
451 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
452 build_v_may_defs, (I)))
453 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
454 #define FINALIZE_ALLOC alloc_maydef
455 #define FINALIZE_FREE free_maydefs
456 #define FINALIZE_TYPE struct maydef_optype_d
457 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
458 #define FINALIZE_OPS MAYDEF_OPS
459 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
460 #define FINALIZE_CORRECT_USE set_virtual_use_link
461 #define FINALIZE_BASE_ZERO 0
462 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
463 #define FINALIZE_BASE_TYPE unsigned
464 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
465 (PTR)->def_var = (VAL); \
466 (PTR)->use_var = (VAL); \
467 (PTR)->use_ptr.use = &((PTR)->use_var); \
468 link_imm_use_stmt (&((PTR)->use_ptr), \
469 (VAL), (STMT))
470 #include "tree-ssa-opfinalize.h"
471
472
473 static void
474 finalize_ssa_v_may_defs (tree stmt)
475 {
476 finalize_ssa_v_may_def_ops (stmt);
477 }
478
479
480 /* Clear the in_list bits and empty the build array for v_may_defs. */
481
482 static inline void
483 cleanup_v_may_defs (void)
484 {
485 unsigned x, num;
486 num = VEC_length (tree, build_v_may_defs);
487
488 for (x = 0; x < num; x++)
489 {
490 tree t = VEC_index (tree, build_v_may_defs, x);
491 if (TREE_CODE (t) != SSA_NAME)
492 {
493 var_ann_t ann = var_ann (t);
494 ann->in_v_may_def_list = 0;
495 }
496 }
497 VEC_truncate (tree, build_v_may_defs, 0);
498 }
499
500
501 #define FINALIZE_OPBUILD build_vuses
502 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
503 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
504 build_vuses, (I)))
505 #define FINALIZE_FUNC finalize_ssa_vuse_ops
506 #define FINALIZE_ALLOC alloc_vuse
507 #define FINALIZE_FREE free_vuses
508 #define FINALIZE_TYPE struct vuse_optype_d
509 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
510 #define FINALIZE_OPS VUSE_OPS
511 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
512 #define FINALIZE_CORRECT_USE set_virtual_use_link
513 #define FINALIZE_BASE_ZERO 0
514 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
515 #define FINALIZE_BASE_TYPE unsigned
516 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
517 (PTR)->use_var = (VAL); \
518 (PTR)->use_ptr.use = &((PTR)->use_var); \
519 link_imm_use_stmt (&((PTR)->use_ptr), \
520 (VAL), (STMT))
521 #include "tree-ssa-opfinalize.h"
522
523
524 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
525
526 static void
527 finalize_ssa_vuses (tree stmt)
528 {
529 unsigned num, num_v_may_defs;
530 unsigned vuse_index;
531
532 /* Remove superfluous VUSE operands. If the statement already has a
533 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
534 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
535 suppose that variable 'a' is aliased:
536
537 # VUSE <a_2>
538 # a_3 = V_MAY_DEF <a_2>
539 a = a + 1;
540
541 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
542 operation. */
543
544 num = VEC_length (tree, build_vuses);
545 num_v_may_defs = VEC_length (tree, build_v_may_defs);
546
547 if (num > 0 && num_v_may_defs > 0)
548 {
549 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
550 {
551 tree vuse;
552 vuse = VEC_index (tree, build_vuses, vuse_index);
553 if (TREE_CODE (vuse) != SSA_NAME)
554 {
555 var_ann_t ann = var_ann (vuse);
556 ann->in_vuse_list = 0;
557 if (ann->in_v_may_def_list)
558 {
559 VEC_ordered_remove (tree, build_vuses, vuse_index);
560 continue;
561 }
562 }
563 vuse_index++;
564 }
565 }
566 else
567 /* Clear out the in_list bits. */
568 for (vuse_index = 0;
569 vuse_index < VEC_length (tree, build_vuses);
570 vuse_index++)
571 {
572 tree t = VEC_index (tree, build_vuses, vuse_index);
573 if (TREE_CODE (t) != SSA_NAME)
574 {
575 var_ann_t ann = var_ann (t);
576 ann->in_vuse_list = 0;
577 }
578 }
579
580 finalize_ssa_vuse_ops (stmt);
581 /* The v_may_def build vector wasn't cleaned up because we needed it. */
582 cleanup_v_may_defs ();
583
584 /* Free the vuses build vector. */
585 VEC_truncate (tree, build_vuses, 0);
586
587 }
588
589 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
590
591 #define FINALIZE_OPBUILD build_v_must_defs
592 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
593 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
594 build_v_must_defs, (I)))
595 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
596 #define FINALIZE_ALLOC alloc_mustdef
597 #define FINALIZE_FREE free_mustdefs
598 #define FINALIZE_TYPE struct mustdef_optype_d
599 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
600 #define FINALIZE_OPS MUSTDEF_OPS
601 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
602 #define FINALIZE_CORRECT_USE set_virtual_use_link
603 #define FINALIZE_BASE_ZERO 0
604 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
605 #define FINALIZE_BASE_TYPE unsigned
606 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
607 (PTR)->def_var = (VAL); \
608 (PTR)->kill_var = (VAL); \
609 (PTR)->use_ptr.use = &((PTR)->kill_var);\
610 link_imm_use_stmt (&((PTR)->use_ptr), \
611 (VAL), (STMT))
612 #include "tree-ssa-opfinalize.h"
613
614
615 static void
616 finalize_ssa_v_must_defs (tree stmt)
617 {
618 /* In the presence of subvars, there may be more than one V_MUST_DEF per
619 statement (one for each subvar). It is a bit expensive to verify that
620 all must-defs in a statement belong to subvars if there is more than one
621 MUST-def, so we don't do it. Suffice to say, if you reach here without
622 having subvars, and have num >1, you have hit a bug. */
623 finalize_ssa_v_must_def_ops (stmt);
624 VEC_truncate (tree, build_v_must_defs, 0);
625 }
626
627
628 /* Finalize all the build vectors, fill the new ones into INFO. */
629
630 static inline void
631 finalize_ssa_stmt_operands (tree stmt)
632 {
633 finalize_ssa_defs (stmt);
634 finalize_ssa_uses (stmt);
635 finalize_ssa_v_must_defs (stmt);
636 finalize_ssa_v_may_defs (stmt);
637 finalize_ssa_vuses (stmt);
638 }
639
640
641 /* Start the process of building up operands vectors in INFO. */
642
643 static inline void
644 start_ssa_stmt_operands (void)
645 {
646 gcc_assert (VEC_length (tree, build_defs) == 0);
647 gcc_assert (VEC_length (tree, build_uses) == 0);
648 gcc_assert (VEC_length (tree, build_vuses) == 0);
649 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
650 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
651 }
652
653
654 /* Add DEF_P to the list of pointers to operands. */
655
656 static inline void
657 append_def (tree *def_p)
658 {
659 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
660 }
661
662
663 /* Add USE_P to the list of pointers to operands. */
664
665 static inline void
666 append_use (tree *use_p)
667 {
668 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
669 }
670
671
672 /* Add a new virtual may def for variable VAR to the build array. */
673
674 static inline void
675 append_v_may_def (tree var)
676 {
677 if (TREE_CODE (var) != SSA_NAME)
678 {
679 var_ann_t ann = get_var_ann (var);
680
681 /* Don't allow duplicate entries. */
682 if (ann->in_v_may_def_list)
683 return;
684 ann->in_v_may_def_list = 1;
685 }
686
687 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
688 }
689
690
691 /* Add VAR to the list of virtual uses. */
692
693 static inline void
694 append_vuse (tree var)
695 {
696
697 /* Don't allow duplicate entries. */
698 if (TREE_CODE (var) != SSA_NAME)
699 {
700 var_ann_t ann = get_var_ann (var);
701
702 if (ann->in_vuse_list || ann->in_v_may_def_list)
703 return;
704 ann->in_vuse_list = 1;
705 }
706
707 VEC_safe_push (tree, heap, build_vuses, (tree)var);
708 }
709
710
711 /* Add VAR to the list of virtual must definitions for INFO. */
712
713 static inline void
714 append_v_must_def (tree var)
715 {
716 unsigned i;
717
718 /* Don't allow duplicate entries. */
719 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
720 if (var == VEC_index (tree, build_v_must_defs, i))
721 return;
722
723 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
724 }
725
726
727 /* REF is a tree that contains the entire pointer dereference
728 expression, if available, or NULL otherwise. ALIAS is the variable
729 we are asking if REF can access. OFFSET and SIZE come from the
730 memory access expression that generated this virtual operand.
731 FOR_CLOBBER is true is this is adding a virtual operand for a call
732 clobber. */
733
734 static bool
735 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
736 HOST_WIDE_INT size)
737 {
738 bool offsetgtz = offset > 0;
739 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
740 tree base = ref ? get_base_address (ref) : NULL;
741
742 /* If ALIAS is an SFT, it can't be touched if the offset
743 and size of the access is not overlapping with the SFT offset and
744 size. This is only true if we are accessing through a pointer
745 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
746 be accessing through a pointer to some substruct of the
747 structure, and if we try to prune there, we will have the wrong
748 offset, and get the wrong answer.
749 i.e., we can't prune without more work if we have something like
750
751 struct gcc_target
752 {
753 struct asm_out
754 {
755 const char *byte_op;
756 struct asm_int_op
757 {
758 const char *hi;
759 } aligned_op;
760 } asm_out;
761 } targetm;
762
763 foo = &targetm.asm_out.aligned_op;
764 return foo->hi;
765
766 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
767 terms of SFT_PARENT_VAR, that is where it is.
768 However, the access through the foo pointer will be at offset 0. */
769 if (size != -1
770 && TREE_CODE (alias) == STRUCT_FIELD_TAG
771 && base
772 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
773 && !overlap_subvar (offset, size, alias, NULL))
774 {
775 #ifdef ACCESS_DEBUGGING
776 fprintf (stderr, "Access to ");
777 print_generic_expr (stderr, ref, 0);
778 fprintf (stderr, " may not touch ");
779 print_generic_expr (stderr, alias, 0);
780 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
781 #endif
782 return false;
783 }
784
785 /* Without strict aliasing, it is impossible for a component access
786 through a pointer to touch a random variable, unless that
787 variable *is* a structure or a pointer.
788
789 That is, given p->c, and some random global variable b,
790 there is no legal way that p->c could be an access to b.
791
792 Without strict aliasing on, we consider it legal to do something
793 like:
794
795 struct foos { int l; };
796 int foo;
797 static struct foos *getfoo(void);
798 int main (void)
799 {
800 struct foos *f = getfoo();
801 f->l = 1;
802 foo = 2;
803 if (f->l == 1)
804 abort();
805 exit(0);
806 }
807 static struct foos *getfoo(void)
808 { return (struct foos *)&foo; }
809
810 (taken from 20000623-1.c)
811 */
812 else if (ref
813 && flag_strict_aliasing
814 && TREE_CODE (ref) != INDIRECT_REF
815 && !MTAG_P (alias)
816 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
817 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
818 && !POINTER_TYPE_P (TREE_TYPE (alias)))
819 {
820 #ifdef ACCESS_DEBUGGING
821 fprintf (stderr, "Access to ");
822 print_generic_expr (stderr, ref, 0);
823 fprintf (stderr, " may not touch ");
824 print_generic_expr (stderr, alias, 0);
825 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
826 #endif
827 return false;
828 }
829
830 /* If the offset of the access is greater than the size of one of
831 the possible aliases, it can't be touching that alias, because it
832 would be past the end of the structure. */
833 else if (ref
834 && flag_strict_aliasing
835 && TREE_CODE (ref) != INDIRECT_REF
836 && !MTAG_P (alias)
837 && !POINTER_TYPE_P (TREE_TYPE (alias))
838 && offsetgtz
839 && DECL_SIZE (alias)
840 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
841 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
842 {
843 #ifdef ACCESS_DEBUGGING
844 fprintf (stderr, "Access to ");
845 print_generic_expr (stderr, ref, 0);
846 fprintf (stderr, " may not touch ");
847 print_generic_expr (stderr, alias, 0);
848 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
849 #endif
850 return false;
851 }
852
853 return true;
854 }
855
856
857 /* Add VAR to the virtual operands array. FLAGS is as in
858 get_expr_operands. FULL_REF is a tree that contains the entire
859 pointer dereference expression, if available, or NULL otherwise.
860 OFFSET and SIZE come from the memory access expression that
861 generated this virtual operand. FOR_CLOBBER is true is this is
862 adding a virtual operand for a call clobber. */
863
864 static void
865 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
866 tree full_ref, HOST_WIDE_INT offset,
867 HOST_WIDE_INT size, bool for_clobber)
868 {
869 VEC(tree,gc) *aliases;
870 tree sym;
871 var_ann_t v_ann;
872
873 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
874 v_ann = var_ann (sym);
875
876 /* Mark statements with volatile operands. Optimizers should back
877 off from statements having volatile operands. */
878 if (TREE_THIS_VOLATILE (sym) && s_ann)
879 s_ann->has_volatile_ops = true;
880
881 /* If the variable cannot be modified and this is a V_MAY_DEF change
882 it into a VUSE. This happens when read-only variables are marked
883 call-clobbered and/or aliased to writable variables. So we only
884 check that this only happens on non-specific stores.
885
886 Note that if this is a specific store, i.e. associated with a
887 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
888 into validation problems.
889
890 This can happen when programs cast away const, leaving us with a
891 store to read-only memory. If the statement is actually executed
892 at runtime, then the program is ill formed. If the statement is
893 not executed then all is well. At the very least, we cannot ICE. */
894 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
895 flags &= ~(opf_is_def | opf_kill_def);
896
897 /* The variable is not a GIMPLE register. Add it (or its aliases) to
898 virtual operands, unless the caller has specifically requested
899 not to add virtual operands (used when adding operands inside an
900 ADDR_EXPR expression). */
901 if (flags & opf_no_vops)
902 return;
903
904 aliases = v_ann->may_aliases;
905 if (aliases == NULL)
906 {
907 /* The variable is not aliased or it is an alias tag. */
908 if (flags & opf_is_def)
909 {
910 if (flags & opf_kill_def)
911 {
912 /* V_MUST_DEF for non-aliased, non-GIMPLE register
913 variable definitions. */
914 gcc_assert (!MTAG_P (var)
915 || TREE_CODE (var) == STRUCT_FIELD_TAG);
916 append_v_must_def (var);
917 }
918 else
919 {
920 /* Add a V_MAY_DEF for call-clobbered variables and
921 memory tags. */
922 append_v_may_def (var);
923 }
924 }
925 else
926 append_vuse (var);
927 }
928 else
929 {
930 unsigned i;
931 tree al;
932
933 /* The variable is aliased. Add its aliases to the virtual
934 operands. */
935 gcc_assert (VEC_length (tree, aliases) != 0);
936
937 if (flags & opf_is_def)
938 {
939
940 bool none_added = true;
941
942 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
943 {
944 if (!access_can_touch_variable (full_ref, al, offset, size))
945 continue;
946
947 none_added = false;
948 append_v_may_def (al);
949 }
950
951 /* If the variable is also an alias tag, add a virtual
952 operand for it, otherwise we will miss representing
953 references to the members of the variable's alias set.
954 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
955
956 It is also necessary to add bare defs on clobbers for
957 SMT's, so that bare SMT uses caused by pruning all the
958 aliases will link up properly with calls. In order to
959 keep the number of these bare defs we add down to the
960 minimum necessary, we keep track of which SMT's were used
961 alone in statement vdefs or vuses. */
962 if (v_ann->is_aliased
963 || none_added
964 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
965 && for_clobber
966 && SMT_USED_ALONE (var)))
967 {
968 /* Every bare SMT def we add should have SMT_USED_ALONE
969 set on it, or else we will get the wrong answer on
970 clobbers. */
971 if (none_added
972 && !updating_used_alone && aliases_computed_p
973 && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
974 gcc_assert (SMT_USED_ALONE (var));
975
976 append_v_may_def (var);
977 }
978 }
979 else
980 {
981 bool none_added = true;
982 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
983 {
984 if (!access_can_touch_variable (full_ref, al, offset, size))
985 continue;
986 none_added = false;
987 append_vuse (al);
988 }
989
990 /* Similarly, append a virtual uses for VAR itself, when
991 it is an alias tag. */
992 if (v_ann->is_aliased || none_added)
993 append_vuse (var);
994 }
995 }
996 }
997
998
999 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1000 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1001 the statement's real operands, otherwise it is added to virtual
1002 operands. */
1003
1004 static void
1005 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1006 {
1007 bool is_real_op;
1008 tree var, sym;
1009 var_ann_t v_ann;
1010
1011 var = *var_p;
1012 gcc_assert (SSA_VAR_P (var));
1013
1014 is_real_op = is_gimple_reg (var);
1015
1016 /* If this is a real operand, the operand is either an SSA name or a
1017 decl. Virtual operands may only be decls. */
1018 gcc_assert (is_real_op || DECL_P (var));
1019
1020 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1021 v_ann = var_ann (sym);
1022
1023 /* Mark statements with volatile operands. Optimizers should back
1024 off from statements having volatile operands. */
1025 if (TREE_THIS_VOLATILE (sym) && s_ann)
1026 s_ann->has_volatile_ops = true;
1027
1028 if (is_real_op)
1029 {
1030 /* The variable is a GIMPLE register. Add it to real operands. */
1031 if (flags & opf_is_def)
1032 append_def (var_p);
1033 else
1034 append_use (var_p);
1035 }
1036 else
1037 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1038 }
1039
1040
1041 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1042 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1043
1044 STMT is the statement being processed, EXPR is the INDIRECT_REF
1045 that got us here.
1046
1047 FLAGS is as in get_expr_operands.
1048
1049 FULL_REF contains the full pointer dereference expression, if we
1050 have it, or NULL otherwise.
1051
1052 OFFSET and SIZE are the location of the access inside the
1053 dereferenced pointer, if known.
1054
1055 RECURSE_ON_BASE should be set to true if we want to continue
1056 calling get_expr_operands on the base pointer, and false if
1057 something else will do it for us. */
1058
1059 static void
1060 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1061 tree full_ref,
1062 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1063 bool recurse_on_base)
1064 {
1065 tree *pptr = &TREE_OPERAND (expr, 0);
1066 tree ptr = *pptr;
1067 stmt_ann_t s_ann = stmt_ann (stmt);
1068
1069 /* Stores into INDIRECT_REF operands are never killing definitions. */
1070 flags &= ~opf_kill_def;
1071
1072 if (SSA_VAR_P (ptr))
1073 {
1074 struct ptr_info_def *pi = NULL;
1075
1076 /* If PTR has flow-sensitive points-to information, use it. */
1077 if (TREE_CODE (ptr) == SSA_NAME
1078 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1079 && pi->name_mem_tag)
1080 {
1081 /* PTR has its own memory tag. Use it. */
1082 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1083 full_ref, offset, size, false);
1084 }
1085 else
1086 {
1087 /* If PTR is not an SSA_NAME or it doesn't have a name
1088 tag, use its symbol memory tag. */
1089 var_ann_t v_ann;
1090
1091 /* If we are emitting debugging dumps, display a warning if
1092 PTR is an SSA_NAME with no flow-sensitive alias
1093 information. That means that we may need to compute
1094 aliasing again. */
1095 if (dump_file
1096 && TREE_CODE (ptr) == SSA_NAME
1097 && pi == NULL)
1098 {
1099 fprintf (dump_file,
1100 "NOTE: no flow-sensitive alias info for ");
1101 print_generic_expr (dump_file, ptr, dump_flags);
1102 fprintf (dump_file, " in ");
1103 print_generic_stmt (dump_file, stmt, dump_flags);
1104 }
1105
1106 if (TREE_CODE (ptr) == SSA_NAME)
1107 ptr = SSA_NAME_VAR (ptr);
1108 v_ann = var_ann (ptr);
1109
1110 if (v_ann->symbol_mem_tag)
1111 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1112 full_ref, offset, size, false);
1113 }
1114 }
1115 else if (TREE_CODE (ptr) == INTEGER_CST)
1116 {
1117 /* If a constant is used as a pointer, we can't generate a real
1118 operand for it but we mark the statement volatile to prevent
1119 optimizations from messing things up. */
1120 if (s_ann)
1121 s_ann->has_volatile_ops = true;
1122 return;
1123 }
1124 else
1125 {
1126 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1127 gcc_unreachable ();
1128 }
1129
1130 /* If requested, add a USE operand for the base pointer. */
1131 if (recurse_on_base)
1132 get_expr_operands (stmt, pptr, opf_none);
1133 }
1134
1135
1136 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1137
1138 static void
1139 get_tmr_operands (tree stmt, tree expr, int flags)
1140 {
1141 tree tag = TMR_TAG (expr), ref;
1142 HOST_WIDE_INT offset, size, maxsize;
1143 subvar_t svars, sv;
1144 stmt_ann_t s_ann = stmt_ann (stmt);
1145
1146 /* First record the real operands. */
1147 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1148 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1149
1150 /* MEM_REFs should never be killing. */
1151 flags &= ~opf_kill_def;
1152
1153 if (TMR_SYMBOL (expr))
1154 {
1155 stmt_ann_t ann = stmt_ann (stmt);
1156 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1157 }
1158
1159 if (!tag)
1160 {
1161 /* Something weird, so ensure that we will be careful. */
1162 stmt_ann (stmt)->has_volatile_ops = true;
1163 return;
1164 }
1165
1166 if (DECL_P (tag))
1167 {
1168 get_expr_operands (stmt, &tag, flags);
1169 return;
1170 }
1171
1172 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1173 gcc_assert (ref != NULL_TREE);
1174 svars = get_subvars_for_var (ref);
1175 for (sv = svars; sv; sv = sv->next)
1176 {
1177 bool exact;
1178 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1179 {
1180 int subvar_flags = flags;
1181 if (!exact || size != maxsize)
1182 subvar_flags &= ~opf_kill_def;
1183 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1184 }
1185 }
1186 }
1187
1188
1189 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1190 clobbered variables in the function. */
1191
1192 static void
1193 add_call_clobber_ops (tree stmt, tree callee)
1194 {
1195 unsigned u;
1196 bitmap_iterator bi;
1197 stmt_ann_t s_ann = stmt_ann (stmt);
1198 bitmap not_read_b, not_written_b;
1199
1200 /* Functions that are not const, pure or never return may clobber
1201 call-clobbered variables. */
1202 if (s_ann)
1203 s_ann->makes_clobbering_call = true;
1204
1205 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1206 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1207 if (global_var)
1208 {
1209 add_stmt_operand (&global_var, s_ann, opf_is_def);
1210 return;
1211 }
1212
1213 /* Get info for local and module level statics. There is a bit
1214 set for each static if the call being processed does not read
1215 or write that variable. */
1216 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1217 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1218 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1219 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1220 {
1221 tree var = referenced_var_lookup (u);
1222 unsigned int escape_mask = var_ann (var)->escape_mask;
1223 tree real_var = var;
1224 bool not_read;
1225 bool not_written;
1226
1227 /* Not read and not written are computed on regular vars, not
1228 subvars, so look at the parent var if this is an SFT. */
1229 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1230 real_var = SFT_PARENT_VAR (var);
1231
1232 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1233 DECL_UID (real_var)) : false;
1234 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1235 DECL_UID (real_var)) : false;
1236 gcc_assert (!unmodifiable_var_p (var));
1237
1238 clobber_stats.clobbered_vars++;
1239
1240 /* See if this variable is really clobbered by this function. */
1241
1242 /* Trivial case: Things escaping only to pure/const are not
1243 clobbered by non-pure-const, and only read by pure/const. */
1244 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1245 {
1246 tree call = get_call_expr_in (stmt);
1247 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1248 {
1249 add_stmt_operand (&var, s_ann, opf_none);
1250 clobber_stats.unescapable_clobbers_avoided++;
1251 continue;
1252 }
1253 else
1254 {
1255 clobber_stats.unescapable_clobbers_avoided++;
1256 continue;
1257 }
1258 }
1259
1260 if (not_written)
1261 {
1262 clobber_stats.static_write_clobbers_avoided++;
1263 if (!not_read)
1264 add_stmt_operand (&var, s_ann, opf_none);
1265 else
1266 clobber_stats.static_read_clobbers_avoided++;
1267 }
1268 else
1269 add_virtual_operand (var, s_ann, opf_is_def,
1270 NULL, 0, -1, true);
1271 }
1272
1273 }
1274
1275
1276 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1277 function. */
1278
1279 static void
1280 add_call_read_ops (tree stmt, tree callee)
1281 {
1282 unsigned u;
1283 bitmap_iterator bi;
1284 stmt_ann_t s_ann = stmt_ann (stmt);
1285 bitmap not_read_b;
1286
1287 /* if the function is not pure, it may reference memory. Add
1288 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1289 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1290 if (global_var)
1291 {
1292 add_stmt_operand (&global_var, s_ann, opf_none);
1293 return;
1294 }
1295
1296 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1297
1298 /* Add a VUSE for each call-clobbered variable. */
1299 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1300 {
1301 tree var = referenced_var (u);
1302 tree real_var = var;
1303 bool not_read;
1304
1305 clobber_stats.readonly_clobbers++;
1306
1307 /* Not read and not written are computed on regular vars, not
1308 subvars, so look at the parent var if this is an SFT. */
1309
1310 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1311 real_var = SFT_PARENT_VAR (var);
1312
1313 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1314 DECL_UID (real_var)) : false;
1315
1316 if (not_read)
1317 {
1318 clobber_stats.static_readonly_clobbers_avoided++;
1319 continue;
1320 }
1321
1322 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1323 }
1324 }
1325
1326
1327 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1328
1329 static void
1330 get_call_expr_operands (tree stmt, tree expr)
1331 {
1332 tree op;
1333 int call_flags = call_expr_flags (expr);
1334
1335 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1336 operands for all the symbols that have been found to be
1337 call-clobbered.
1338
1339 Note that if aliases have not been computed, the global effects
1340 of calls will not be included in the SSA web. This is fine
1341 because no optimizer should run before aliases have been
1342 computed. By not bothering with virtual operands for CALL_EXPRs
1343 we avoid adding superfluous virtual operands, which can be a
1344 significant compile time sink (See PR 15855). */
1345 if (aliases_computed_p
1346 && !bitmap_empty_p (call_clobbered_vars)
1347 && !(call_flags & ECF_NOVOPS))
1348 {
1349 /* A 'pure' or a 'const' function never call-clobbers anything.
1350 A 'noreturn' function might, but since we don't return anyway
1351 there is no point in recording that. */
1352 if (TREE_SIDE_EFFECTS (expr)
1353 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1354 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1355 else if (!(call_flags & ECF_CONST))
1356 add_call_read_ops (stmt, get_callee_fndecl (expr));
1357 }
1358
1359 /* Find uses in the called function. */
1360 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1361
1362 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1363 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1364
1365 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1366 }
1367
1368
1369 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1370
1371 static void
1372 get_asm_expr_operands (tree stmt)
1373 {
1374 stmt_ann_t s_ann = stmt_ann (stmt);
1375 int noutputs = list_length (ASM_OUTPUTS (stmt));
1376 const char **oconstraints
1377 = (const char **) alloca ((noutputs) * sizeof (const char *));
1378 int i;
1379 tree link;
1380 const char *constraint;
1381 bool allows_mem, allows_reg, is_inout;
1382
1383 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1384 {
1385 oconstraints[i] = constraint
1386 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1387 parse_output_constraint (&constraint, i, 0, 0,
1388 &allows_mem, &allows_reg, &is_inout);
1389
1390 /* This should have been split in gimplify_asm_expr. */
1391 gcc_assert (!allows_reg || !is_inout);
1392
1393 /* Memory operands are addressable. Note that STMT needs the
1394 address of this operand. */
1395 if (!allows_reg && allows_mem)
1396 {
1397 tree t = get_base_address (TREE_VALUE (link));
1398 if (t && DECL_P (t) && s_ann)
1399 add_to_addressable_set (t, &s_ann->addresses_taken);
1400 }
1401
1402 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1403 }
1404
1405 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1406 {
1407 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1408 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1409 oconstraints, &allows_mem, &allows_reg);
1410
1411 /* Memory operands are addressable. Note that STMT needs the
1412 address of this operand. */
1413 if (!allows_reg && allows_mem)
1414 {
1415 tree t = get_base_address (TREE_VALUE (link));
1416 if (t && DECL_P (t) && s_ann)
1417 add_to_addressable_set (t, &s_ann->addresses_taken);
1418 }
1419
1420 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1421 }
1422
1423
1424 /* Clobber memory for asm ("" : : : "memory"); */
1425 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1426 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1427 {
1428 unsigned i;
1429 bitmap_iterator bi;
1430
1431 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1432 decided to group them). */
1433 if (global_var)
1434 add_stmt_operand (&global_var, s_ann, opf_is_def);
1435 else
1436 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1437 {
1438 tree var = referenced_var (i);
1439 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1440 }
1441
1442 /* Now clobber all addressables. */
1443 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1444 {
1445 tree var = referenced_var (i);
1446
1447 /* Subvars are explicitly represented in this list, so
1448 we don't need the original to be added to the clobber
1449 ops, but the original *will* be in this list because
1450 we keep the addressability of the original
1451 variable up-to-date so we don't screw up the rest of
1452 the backend. */
1453 if (var_can_have_subvars (var)
1454 && get_subvars_for_var (var) != NULL)
1455 continue;
1456
1457 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1458 }
1459
1460 break;
1461 }
1462 }
1463
1464
1465 /* Recursively scan the expression pointed to by EXPR_P in statement
1466 referred to by INFO. FLAGS is one of the OPF_* constants modifying
1467 how to interpret the operands found. */
1468
1469 static void
1470 get_expr_operands (tree stmt, tree *expr_p, int flags)
1471 {
1472 enum tree_code code;
1473 enum tree_code_class class;
1474 tree expr = *expr_p;
1475 stmt_ann_t s_ann = stmt_ann (stmt);
1476
1477 if (expr == NULL)
1478 return;
1479
1480 code = TREE_CODE (expr);
1481 class = TREE_CODE_CLASS (code);
1482
1483 switch (code)
1484 {
1485 case ADDR_EXPR:
1486 /* Taking the address of a variable does not represent a
1487 reference to it, but the fact that the statement takes its
1488 address will be of interest to some passes (e.g. alias
1489 resolution). */
1490 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1491
1492 /* If the address is invariant, there may be no interesting
1493 variable references inside. */
1494 if (is_gimple_min_invariant (expr))
1495 return;
1496
1497 /* Otherwise, there may be variables referenced inside but there
1498 should be no VUSEs created, since the referenced objects are
1499 not really accessed. The only operands that we should find
1500 here are ARRAY_REF indices which will always be real operands
1501 (GIMPLE does not allow non-registers as array indices). */
1502 flags |= opf_no_vops;
1503 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1504 return;
1505
1506 case SSA_NAME:
1507 case STRUCT_FIELD_TAG:
1508 case SYMBOL_MEMORY_TAG:
1509 case NAME_MEMORY_TAG:
1510 add_stmt_operand (expr_p, s_ann, flags);
1511 return;
1512
1513 case VAR_DECL:
1514 case PARM_DECL:
1515 case RESULT_DECL:
1516 {
1517 subvar_t svars;
1518
1519 /* Add the subvars for a variable if it has subvars, to DEFS
1520 or USES. Otherwise, add the variable itself. Whether it
1521 goes to USES or DEFS depends on the operand flags. */
1522 if (var_can_have_subvars (expr)
1523 && (svars = get_subvars_for_var (expr)))
1524 {
1525 subvar_t sv;
1526 for (sv = svars; sv; sv = sv->next)
1527 add_stmt_operand (&sv->var, s_ann, flags);
1528 }
1529 else
1530 add_stmt_operand (expr_p, s_ann, flags);
1531
1532 return;
1533 }
1534
1535 case MISALIGNED_INDIRECT_REF:
1536 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1537 /* fall through */
1538
1539 case ALIGN_INDIRECT_REF:
1540 case INDIRECT_REF:
1541 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE,
1542 0, -1, true);
1543 return;
1544
1545 case TARGET_MEM_REF:
1546 get_tmr_operands (stmt, expr, flags);
1547 return;
1548
1549 case ARRAY_RANGE_REF:
1550 /* Treat array references as references to the virtual variable
1551 representing the array. The virtual variable for an ARRAY_REF
1552 is the VAR_DECL for the array. */
1553 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1554 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1555 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1556 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1557 return;
1558
1559 case ARRAY_REF:
1560 case COMPONENT_REF:
1561 case REALPART_EXPR:
1562 case IMAGPART_EXPR:
1563 {
1564 tree ref;
1565 HOST_WIDE_INT offset, size, maxsize;
1566 bool none = true;
1567
1568 /* This component reference becomes an access to all of the
1569 subvariables it can touch, if we can determine that, but
1570 *NOT* the real one. If we can't determine which fields we
1571 could touch, the recursion will eventually get to a
1572 variable and add *all* of its subvars, or whatever is the
1573 minimum correct subset. */
1574 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1575 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1576 {
1577 subvar_t sv;
1578 subvar_t svars = get_subvars_for_var (ref);
1579
1580 for (sv = svars; sv; sv = sv->next)
1581 {
1582 bool exact;
1583
1584 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1585 {
1586 int subvar_flags = flags;
1587 none = false;
1588 if (!exact || size != maxsize)
1589 subvar_flags &= ~opf_kill_def;
1590 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1591 }
1592 }
1593
1594 if (!none)
1595 flags |= opf_no_vops;
1596 }
1597 else if (TREE_CODE (ref) == INDIRECT_REF)
1598 {
1599 get_indirect_ref_operands (stmt, ref, flags, expr,
1600 offset, maxsize, false);
1601 flags |= opf_no_vops;
1602 }
1603
1604 /* Even if we found subvars above we need to ensure to see
1605 immediate uses for d in s.a[d]. In case of s.a having
1606 a subvar we'd miss it otherwise. */
1607 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1608 flags & ~opf_kill_def);
1609
1610 if (code == COMPONENT_REF)
1611 {
1612 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1613 s_ann->has_volatile_ops = true;
1614 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1615 }
1616 else if (code == ARRAY_REF)
1617 {
1618 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1619 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1620 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1621 }
1622
1623 return;
1624 }
1625
1626 case WITH_SIZE_EXPR:
1627 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1628 and an rvalue reference to its second argument. */
1629 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1630 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1631 return;
1632
1633 case CALL_EXPR:
1634 get_call_expr_operands (stmt, expr);
1635 return;
1636
1637 case COND_EXPR:
1638 case VEC_COND_EXPR:
1639 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1640 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1641 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1642 return;
1643
1644 case MODIFY_EXPR:
1645 {
1646 int subflags;
1647 tree op;
1648
1649 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1650
1651 op = TREE_OPERAND (expr, 0);
1652 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1653 op = TREE_OPERAND (expr, 0);
1654 if (TREE_CODE (op) == ARRAY_RANGE_REF
1655 || TREE_CODE (op) == REALPART_EXPR
1656 || TREE_CODE (op) == IMAGPART_EXPR)
1657 subflags = opf_is_def;
1658 else
1659 subflags = opf_is_def | opf_kill_def;
1660
1661 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1662 return;
1663 }
1664
1665 case CONSTRUCTOR:
1666 {
1667 /* General aggregate CONSTRUCTORs have been decomposed, but they
1668 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1669 constructor_elt *ce;
1670 unsigned HOST_WIDE_INT idx;
1671
1672 for (idx = 0;
1673 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1674 idx++)
1675 get_expr_operands (stmt, &ce->value, opf_none);
1676
1677 return;
1678 }
1679
1680 case TRUTH_NOT_EXPR:
1681 case BIT_FIELD_REF:
1682 case VIEW_CONVERT_EXPR:
1683 do_unary:
1684 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1685 return;
1686
1687 case TRUTH_AND_EXPR:
1688 case TRUTH_OR_EXPR:
1689 case TRUTH_XOR_EXPR:
1690 case COMPOUND_EXPR:
1691 case OBJ_TYPE_REF:
1692 case ASSERT_EXPR:
1693 do_binary:
1694 {
1695 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1696 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1697 return;
1698 }
1699
1700 case DOT_PROD_EXPR:
1701 case REALIGN_LOAD_EXPR:
1702 {
1703 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1704 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1705 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1706 return;
1707 }
1708
1709 case BLOCK:
1710 case FUNCTION_DECL:
1711 case EXC_PTR_EXPR:
1712 case FILTER_EXPR:
1713 case LABEL_DECL:
1714 case CONST_DECL:
1715 case OMP_PARALLEL:
1716 case OMP_SECTIONS:
1717 case OMP_FOR:
1718 case OMP_RETURN_EXPR:
1719 case OMP_SINGLE:
1720 case OMP_MASTER:
1721 case OMP_ORDERED:
1722 case OMP_CRITICAL:
1723 /* Expressions that make no memory references. */
1724 return;
1725
1726 default:
1727 if (class == tcc_unary)
1728 goto do_unary;
1729 if (class == tcc_binary || class == tcc_comparison)
1730 goto do_binary;
1731 if (class == tcc_constant || class == tcc_type)
1732 return;
1733 }
1734
1735 /* If we get here, something has gone wrong. */
1736 #ifdef ENABLE_CHECKING
1737 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1738 debug_tree (expr);
1739 fputs ("\n", stderr);
1740 #endif
1741 gcc_unreachable ();
1742 }
1743
1744
1745 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
1746 cache for STMT, if it existed before. When finished, the various build_*
1747 operand vectors will have potential operands. in them. */
1748
1749 static void
1750 parse_ssa_operands (tree stmt)
1751 {
1752 enum tree_code code;
1753
1754 code = TREE_CODE (stmt);
1755 switch (code)
1756 {
1757 case MODIFY_EXPR:
1758 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
1759 either only part of LHS is modified or if the RHS might throw,
1760 otherwise, use V_MUST_DEF.
1761
1762 ??? If it might throw, we should represent somehow that it is killed
1763 on the fallthrough path. */
1764 {
1765 tree lhs = TREE_OPERAND (stmt, 0);
1766 int lhs_flags = opf_is_def;
1767
1768 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
1769
1770 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
1771 or not the entire LHS is modified; that depends on what's
1772 inside the VIEW_CONVERT_EXPR. */
1773 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1774 lhs = TREE_OPERAND (lhs, 0);
1775
1776 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
1777 && TREE_CODE (lhs) != BIT_FIELD_REF)
1778 lhs_flags |= opf_kill_def;
1779
1780 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
1781 }
1782 break;
1783
1784 case COND_EXPR:
1785 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
1786 break;
1787
1788 case SWITCH_EXPR:
1789 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
1790 break;
1791
1792 case ASM_EXPR:
1793 get_asm_expr_operands (stmt);
1794 break;
1795
1796 case RETURN_EXPR:
1797 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
1798 break;
1799
1800 case GOTO_EXPR:
1801 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
1802 break;
1803
1804 case LABEL_EXPR:
1805 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
1806 break;
1807
1808 /* These nodes contain no variable references. */
1809 case BIND_EXPR:
1810 case CASE_LABEL_EXPR:
1811 case TRY_CATCH_EXPR:
1812 case TRY_FINALLY_EXPR:
1813 case EH_FILTER_EXPR:
1814 case CATCH_EXPR:
1815 case RESX_EXPR:
1816 break;
1817
1818 default:
1819 /* Notice that if get_expr_operands tries to use &STMT as the operand
1820 pointer (which may only happen for USE operands), we will fail in
1821 append_use. This default will handle statements like empty
1822 statements, or CALL_EXPRs that may appear on the RHS of a statement
1823 or as statements themselves. */
1824 get_expr_operands (stmt, &stmt, opf_none);
1825 break;
1826 }
1827 }
1828
1829
1830 /* Create an operands cache for STMT. */
1831
1832 static void
1833 build_ssa_operands (tree stmt)
1834 {
1835 stmt_ann_t ann = get_stmt_ann (stmt);
1836
1837 /* Initially assume that the statement has no volatile operands. */
1838 if (ann)
1839 ann->has_volatile_ops = false;
1840
1841 start_ssa_stmt_operands ();
1842
1843 parse_ssa_operands (stmt);
1844 operand_build_sort_virtual (build_vuses);
1845 operand_build_sort_virtual (build_v_may_defs);
1846 operand_build_sort_virtual (build_v_must_defs);
1847
1848 finalize_ssa_stmt_operands (stmt);
1849 }
1850
1851
1852 /* Free any operands vectors in OPS. */
1853 void
1854 free_ssa_operands (stmt_operands_p ops)
1855 {
1856 ops->def_ops = NULL;
1857 ops->use_ops = NULL;
1858 ops->maydef_ops = NULL;
1859 ops->mustdef_ops = NULL;
1860 ops->vuse_ops = NULL;
1861 }
1862
1863
1864 /* Get the operands of statement STMT. Note that repeated calls to
1865 get_stmt_operands for the same statement will do nothing until the
1866 statement is marked modified by a call to mark_stmt_modified(). */
1867
1868 void
1869 update_stmt_operands (tree stmt)
1870 {
1871 stmt_ann_t ann = get_stmt_ann (stmt);
1872
1873 /* If get_stmt_operands is called before SSA is initialized, dont
1874 do anything. */
1875 if (!ssa_operands_active ())
1876 return;
1877
1878 /* The optimizers cannot handle statements that are nothing but a
1879 _DECL. This indicates a bug in the gimplifier. */
1880 gcc_assert (!SSA_VAR_P (stmt));
1881
1882 gcc_assert (ann->modified);
1883
1884 timevar_push (TV_TREE_OPS);
1885
1886 build_ssa_operands (stmt);
1887
1888 /* Clear the modified bit for STMT. Subsequent calls to
1889 get_stmt_operands for this statement will do nothing until the
1890 statement is marked modified by a call to mark_stmt_modified(). */
1891 ann->modified = 0;
1892
1893 timevar_pop (TV_TREE_OPS);
1894 }
1895
1896
1897 /* Copies virtual operands from SRC to DST. */
1898
1899 void
1900 copy_virtual_operands (tree dest, tree src)
1901 {
1902 tree t;
1903 ssa_op_iter iter, old_iter;
1904 use_operand_p use_p, u2;
1905 def_operand_p def_p, d2;
1906
1907 build_ssa_operands (dest);
1908
1909 /* Copy all the virtual fields. */
1910 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1911 append_vuse (t);
1912 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1913 append_v_may_def (t);
1914 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1915 append_v_must_def (t);
1916
1917 if (VEC_length (tree, build_vuses) == 0
1918 && VEC_length (tree, build_v_may_defs) == 0
1919 && VEC_length (tree, build_v_must_defs) == 0)
1920 return;
1921
1922 /* Now commit the virtual operands to this stmt. */
1923 finalize_ssa_v_must_defs (dest);
1924 finalize_ssa_v_may_defs (dest);
1925 finalize_ssa_vuses (dest);
1926
1927 /* Finally, set the field to the same values as then originals. */
1928
1929
1930 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1931 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1932 {
1933 gcc_assert (!op_iter_done (&old_iter));
1934 SET_USE (use_p, t);
1935 t = op_iter_next_tree (&old_iter);
1936 }
1937 gcc_assert (op_iter_done (&old_iter));
1938
1939 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1940 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1941 {
1942 gcc_assert (!op_iter_done (&old_iter));
1943 SET_USE (use_p, USE_FROM_PTR (u2));
1944 SET_DEF (def_p, DEF_FROM_PTR (d2));
1945 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1946 }
1947 gcc_assert (op_iter_done (&old_iter));
1948
1949 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1950 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1951 {
1952 gcc_assert (!op_iter_done (&old_iter));
1953 SET_USE (use_p, USE_FROM_PTR (u2));
1954 SET_DEF (def_p, DEF_FROM_PTR (d2));
1955 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1956 }
1957 gcc_assert (op_iter_done (&old_iter));
1958
1959 }
1960
1961
1962 /* Specifically for use in DOM's expression analysis. Given a store, we
1963 create an artificial stmt which looks like a load from the store, this can
1964 be used to eliminate redundant loads. OLD_OPS are the operands from the
1965 store stmt, and NEW_STMT is the new load which represents a load of the
1966 values stored. */
1967
1968 void
1969 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1970 {
1971 stmt_ann_t ann;
1972 tree op;
1973 ssa_op_iter iter;
1974 use_operand_p use_p;
1975 unsigned x;
1976
1977 ann = get_stmt_ann (new_stmt);
1978
1979 /* process the stmt looking for operands. */
1980 start_ssa_stmt_operands ();
1981 parse_ssa_operands (new_stmt);
1982
1983 for (x = 0; x < VEC_length (tree, build_vuses); x++)
1984 {
1985 tree t = VEC_index (tree, build_vuses, x);
1986 if (TREE_CODE (t) != SSA_NAME)
1987 {
1988 var_ann_t ann = var_ann (t);
1989 ann->in_vuse_list = 0;
1990 }
1991 }
1992
1993 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
1994 {
1995 tree t = VEC_index (tree, build_v_may_defs, x);
1996 if (TREE_CODE (t) != SSA_NAME)
1997 {
1998 var_ann_t ann = var_ann (t);
1999 ann->in_v_may_def_list = 0;
2000 }
2001 }
2002
2003 /* Remove any virtual operands that were found. */
2004 VEC_truncate (tree, build_v_may_defs, 0);
2005 VEC_truncate (tree, build_v_must_defs, 0);
2006 VEC_truncate (tree, build_vuses, 0);
2007
2008 /* For each VDEF on the original statement, we want to create a
2009 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2010 statement. */
2011 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2012 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2013 append_vuse (op);
2014
2015 /* Now build the operands for this new stmt. */
2016 finalize_ssa_stmt_operands (new_stmt);
2017
2018 /* All uses in this fake stmt must not be in the immediate use lists. */
2019 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2020 delink_imm_use (use_p);
2021 }
2022
2023
2024 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2025 to test the validity of the swap operation. */
2026
2027 void
2028 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2029 {
2030 tree op0, op1;
2031 op0 = *exp0;
2032 op1 = *exp1;
2033
2034 /* If the operand cache is active, attempt to preserve the relative positions
2035 of these two operands in their respective immediate use lists. */
2036 if (ssa_operands_active () && op0 != op1)
2037 {
2038 use_optype_p use0, use1, ptr;
2039 use0 = use1 = NULL;
2040
2041 /* Find the 2 operands in the cache, if they are there. */
2042 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2043 if (USE_OP_PTR (ptr)->use == exp0)
2044 {
2045 use0 = ptr;
2046 break;
2047 }
2048
2049 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2050 if (USE_OP_PTR (ptr)->use == exp1)
2051 {
2052 use1 = ptr;
2053 break;
2054 }
2055
2056 /* If both uses don't have operand entries, there isn't much we can do
2057 at this point. Presumably we dont need to worry about it. */
2058 if (use0 && use1)
2059 {
2060 tree *tmp = USE_OP_PTR (use1)->use;
2061 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2062 USE_OP_PTR (use0)->use = tmp;
2063 }
2064 }
2065
2066 /* Now swap the data. */
2067 *exp0 = op1;
2068 *exp1 = op0;
2069 }
2070
2071
2072 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2073 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2074 a single variable whose address has been taken or any other valid
2075 GIMPLE memory reference (structure reference, array, etc). If the
2076 base address of REF is a decl that has sub-variables, also add all
2077 of its sub-variables. */
2078
2079 void
2080 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2081 {
2082 tree var;
2083 subvar_t svars;
2084
2085 gcc_assert (addresses_taken);
2086
2087 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2088 as the only thing we take the address of. If VAR is a structure,
2089 taking the address of a field means that the whole structure may
2090 be referenced using pointer arithmetic. See PR 21407 and the
2091 ensuing mailing list discussion. */
2092 var = get_base_address (ref);
2093 if (var && SSA_VAR_P (var))
2094 {
2095 if (*addresses_taken == NULL)
2096 *addresses_taken = BITMAP_GGC_ALLOC ();
2097
2098 if (var_can_have_subvars (var)
2099 && (svars = get_subvars_for_var (var)))
2100 {
2101 subvar_t sv;
2102 for (sv = svars; sv; sv = sv->next)
2103 {
2104 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2105 TREE_ADDRESSABLE (sv->var) = 1;
2106 }
2107 }
2108 else
2109 {
2110 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2111 TREE_ADDRESSABLE (var) = 1;
2112 }
2113 }
2114 }
2115
2116
2117 /* Scan the immediate_use list for VAR making sure its linked properly.
2118 return RTUE iof there is a problem. */
2119
2120 bool
2121 verify_imm_links (FILE *f, tree var)
2122 {
2123 use_operand_p ptr, prev, list;
2124 int count;
2125
2126 gcc_assert (TREE_CODE (var) == SSA_NAME);
2127
2128 list = &(SSA_NAME_IMM_USE_NODE (var));
2129 gcc_assert (list->use == NULL);
2130
2131 if (list->prev == NULL)
2132 {
2133 gcc_assert (list->next == NULL);
2134 return false;
2135 }
2136
2137 prev = list;
2138 count = 0;
2139 for (ptr = list->next; ptr != list; )
2140 {
2141 if (prev != ptr->prev)
2142 goto error;
2143
2144 if (ptr->use == NULL)
2145 goto error; /* 2 roots, or SAFE guard node. */
2146 else if (*(ptr->use) != var)
2147 goto error;
2148
2149 prev = ptr;
2150 ptr = ptr->next;
2151
2152 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2153 problem. */
2154 if (count++ > 50000000)
2155 goto error;
2156 }
2157
2158 /* Verify list in the other direction. */
2159 prev = list;
2160 for (ptr = list->prev; ptr != list; )
2161 {
2162 if (prev != ptr->next)
2163 goto error;
2164 prev = ptr;
2165 ptr = ptr->prev;
2166 if (count-- < 0)
2167 goto error;
2168 }
2169
2170 if (count != 0)
2171 goto error;
2172
2173 return false;
2174
2175 error:
2176 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2177 {
2178 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2179 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2180 }
2181 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2182 (void *)ptr->use);
2183 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2184 fprintf(f, "\n");
2185 return true;
2186 }
2187
2188
2189 /* Dump all the immediate uses to FILE. */
2190
2191 void
2192 dump_immediate_uses_for (FILE *file, tree var)
2193 {
2194 imm_use_iterator iter;
2195 use_operand_p use_p;
2196
2197 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2198
2199 print_generic_expr (file, var, TDF_SLIM);
2200 fprintf (file, " : -->");
2201 if (has_zero_uses (var))
2202 fprintf (file, " no uses.\n");
2203 else
2204 if (has_single_use (var))
2205 fprintf (file, " single use.\n");
2206 else
2207 fprintf (file, "%d uses.\n", num_imm_uses (var));
2208
2209 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2210 {
2211 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2212 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2213 else
2214 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2215 }
2216 fprintf(file, "\n");
2217 }
2218
2219
2220 /* Dump all the immediate uses to FILE. */
2221
2222 void
2223 dump_immediate_uses (FILE *file)
2224 {
2225 tree var;
2226 unsigned int x;
2227
2228 fprintf (file, "Immediate_uses: \n\n");
2229 for (x = 1; x < num_ssa_names; x++)
2230 {
2231 var = ssa_name(x);
2232 if (!var)
2233 continue;
2234 dump_immediate_uses_for (file, var);
2235 }
2236 }
2237
2238
2239 /* Dump def-use edges on stderr. */
2240
2241 void
2242 debug_immediate_uses (void)
2243 {
2244 dump_immediate_uses (stderr);
2245 }
2246
2247 /* Dump def-use edges on stderr. */
2248
2249 void
2250 debug_immediate_uses_for (tree var)
2251 {
2252 dump_immediate_uses_for (stderr, var);
2253 }
2254
2255 #include "gt-tree-ssa-operands.h"