tree-ssa-operands.c: Cleanup whitespace.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'. */
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_none 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable */
86 #define opf_is_def (1 << 0)
87
88 /* Operand is the target of an assignment expression. */
89 #define opf_kill_def (1 << 1)
90
91 /* No virtual operands should be created in the expression. This is used
92 when traversing ADDR_EXPR nodes which have different semantics than
93 other expressions. Inside an ADDR_EXPR node, the only operands that we
94 need to consider are indices into arrays. For instance, &a.b[i] should
95 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
96 VUSE for 'b'. */
97 #define opf_no_vops (1 << 2)
98
99 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
100 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
101 #define opf_non_specific (1 << 3)
102
103 /* Array for building all the def operands. */
104 static VEC(tree,heap) *build_defs;
105
106 /* Array for building all the use operands. */
107 static VEC(tree,heap) *build_uses;
108
109 /* Array for building all the v_may_def operands. */
110 static VEC(tree,heap) *build_v_may_defs;
111
112 /* Array for building all the vuse operands. */
113 static VEC(tree,heap) *build_vuses;
114
115 /* Array for building all the v_must_def operands. */
116 static VEC(tree,heap) *build_v_must_defs;
117
118 /* These arrays are the cached operand vectors for call clobbered calls. */
119 static bool ops_active = false;
120
121 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
122 static unsigned operand_memory_index;
123
124 static void get_expr_operands (tree, tree *, int);
125
126 static def_optype_p free_defs = NULL;
127 static use_optype_p free_uses = NULL;
128 static vuse_optype_p free_vuses = NULL;
129 static maydef_optype_p free_maydefs = NULL;
130 static mustdef_optype_p free_mustdefs = NULL;
131
132
133 /* Return the DECL_UID of the base variable of T. */
134
135 static inline unsigned
136 get_name_decl (tree t)
137 {
138 if (TREE_CODE (t) != SSA_NAME)
139 return DECL_UID (t);
140 else
141 return DECL_UID (SSA_NAME_VAR (t));
142 }
143
144
145 /* Comparison function for qsort used in operand_build_sort_virtual. */
146
147 static int
148 operand_build_cmp (const void *p, const void *q)
149 {
150 tree e1 = *((const tree *)p);
151 tree e2 = *((const tree *)q);
152 unsigned int u1,u2;
153
154 u1 = get_name_decl (e1);
155 u2 = get_name_decl (e2);
156
157 /* We want to sort in ascending order. They can never be equal. */
158 #ifdef ENABLE_CHECKING
159 gcc_assert (u1 != u2);
160 #endif
161 return (u1 > u2 ? 1 : -1);
162 }
163
164
165 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
166
167 static inline void
168 operand_build_sort_virtual (VEC(tree,heap) *list)
169 {
170 int num = VEC_length (tree, list);
171 if (num < 2)
172 return;
173 if (num == 2)
174 {
175 if (get_name_decl (VEC_index (tree, list, 0))
176 > get_name_decl (VEC_index (tree, list, 1)))
177 {
178 /* Swap elements if in the wrong order. */
179 tree tmp = VEC_index (tree, list, 0);
180 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
181 VEC_replace (tree, list, 1, tmp);
182 }
183 return;
184 }
185 /* There are 3 or more elements, call qsort. */
186 qsort (VEC_address (tree, list),
187 VEC_length (tree, list),
188 sizeof (tree),
189 operand_build_cmp);
190 }
191
192
193 /* Return true if the ssa operands cache is active. */
194
195 bool
196 ssa_operands_active (void)
197 {
198 return ops_active;
199 }
200
201
202 /* Structure storing statistics on how many call clobbers we have, and
203 how many where avoided. */
204
205 static struct
206 {
207 /* Number of call-clobbered ops we attempt to add to calls in
208 add_call_clobber_ops. */
209 unsigned int clobbered_vars;
210
211 /* Number of write-clobbers (v_may_defs) avoided by using
212 not_written information. */
213 unsigned int static_write_clobbers_avoided;
214
215 /* Number of reads (vuses) avoided by using not_read
216 information. */
217 unsigned int static_read_clobbers_avoided;
218
219 /* Number of write-clobbers avoided because the variable can't escape to
220 this call. */
221 unsigned int unescapable_clobbers_avoided;
222
223 /* Number of readonly uses we attempt to add to calls in
224 add_call_read_ops. */
225 unsigned int readonly_clobbers;
226
227 /* Number of readonly uses we avoid using not_read information. */
228 unsigned int static_readonly_clobbers_avoided;
229 } clobber_stats;
230
231
232 /* Initialize the operand cache routines. */
233
234 void
235 init_ssa_operands (void)
236 {
237 build_defs = VEC_alloc (tree, heap, 5);
238 build_uses = VEC_alloc (tree, heap, 10);
239 build_vuses = VEC_alloc (tree, heap, 25);
240 build_v_may_defs = VEC_alloc (tree, heap, 25);
241 build_v_must_defs = VEC_alloc (tree, heap, 25);
242
243 gcc_assert (operand_memory == NULL);
244 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
245 ops_active = true;
246 memset (&clobber_stats, 0, sizeof (clobber_stats));
247
248 }
249
250
251 /* Dispose of anything required by the operand routines. */
252
253 void
254 fini_ssa_operands (void)
255 {
256 struct ssa_operand_memory_d *ptr;
257 VEC_free (tree, heap, build_defs);
258 VEC_free (tree, heap, build_uses);
259 VEC_free (tree, heap, build_v_must_defs);
260 VEC_free (tree, heap, build_v_may_defs);
261 VEC_free (tree, heap, build_vuses);
262 free_defs = NULL;
263 free_uses = NULL;
264 free_vuses = NULL;
265 free_maydefs = NULL;
266 free_mustdefs = NULL;
267 while ((ptr = operand_memory) != NULL)
268 {
269 operand_memory = operand_memory->next;
270 ggc_free (ptr);
271 }
272
273 ops_active = false;
274
275 if (dump_file && (dump_flags & TDF_STATS))
276 {
277 fprintf (dump_file, "Original clobbered vars:%d\n",
278 clobber_stats.clobbered_vars);
279 fprintf (dump_file, "Static write clobbers avoided:%d\n",
280 clobber_stats.static_write_clobbers_avoided);
281 fprintf (dump_file, "Static read clobbers avoided:%d\n",
282 clobber_stats.static_read_clobbers_avoided);
283 fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
284 clobber_stats.unescapable_clobbers_avoided);
285 fprintf (dump_file, "Original readonly clobbers:%d\n",
286 clobber_stats.readonly_clobbers);
287 fprintf (dump_file, "Static readonly clobbers avoided:%d\n",
288 clobber_stats.static_readonly_clobbers_avoided);
289 }
290 }
291
292
293 /* Return memory for operands of SIZE chunks. */
294
295 static inline void *
296 ssa_operand_alloc (unsigned size)
297 {
298 char *ptr;
299 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
300 {
301 struct ssa_operand_memory_d *ptr;
302 ptr = GGC_NEW (struct ssa_operand_memory_d);
303 ptr->next = operand_memory;
304 operand_memory = ptr;
305 operand_memory_index = 0;
306 }
307 ptr = &(operand_memory->mem[operand_memory_index]);
308 operand_memory_index += size;
309 return ptr;
310 }
311
312
313 /* Make sure PTR is in the correct immediate use list. Since uses are simply
314 pointers into the stmt TREE, there is no way of telling if anyone has
315 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
316 The contents are different, but the pointer is still the same. This
317 routine will check to make sure PTR is in the correct list, and if it isn't
318 put it in the correct list. We cannot simply check the previous node
319 because all nodes in the same stmt might have be changed. */
320
321 static inline void
322 correct_use_link (use_operand_p ptr, tree stmt)
323 {
324 use_operand_p prev;
325 tree root;
326
327 /* Fold_stmt () may have changed the stmt pointers. */
328 if (ptr->stmt != stmt)
329 ptr->stmt = stmt;
330
331 prev = ptr->prev;
332 if (prev)
333 {
334 /* Find the root element, making sure we skip any safe iterators. */
335 while (prev->use != NULL || prev->stmt == NULL)
336 prev = prev->prev;
337
338 /* Get the ssa_name of the list the node is in. */
339 root = prev->stmt;
340 /* If it's the right list, simply return. */
341 if (root == *(ptr->use))
342 return;
343 }
344
345 /* It is in the wrong list if we reach here. */
346 delink_imm_use (ptr);
347 link_imm_use (ptr, *(ptr->use));
348 }
349
350
351 /* This routine makes sure that PTR is in an immediate use list, and makes
352 sure the stmt pointer is set to the current stmt. Virtual uses do not need
353 the overhead of correct_use_link since they cannot be directly manipulated
354 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
355
356 static inline void
357 set_virtual_use_link (use_operand_p ptr, tree stmt)
358 {
359 /* Fold_stmt () may have changed the stmt pointers. */
360 if (ptr->stmt != stmt)
361 ptr->stmt = stmt;
362
363 /* If this use isn't in a list, add it to the correct list. */
364 if (!ptr->prev)
365 link_imm_use (ptr, *(ptr->use));
366 }
367
368
369 #define FINALIZE_OPBUILD build_defs
370 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
371 build_defs, (I))
372 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
373 build_defs, (I))
374 #define FINALIZE_FUNC finalize_ssa_def_ops
375 #define FINALIZE_ALLOC alloc_def
376 #define FINALIZE_FREE free_defs
377 #define FINALIZE_TYPE struct def_optype_d
378 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
379 #define FINALIZE_OPS DEF_OPS
380 #define FINALIZE_BASE(VAR) VAR
381 #define FINALIZE_BASE_TYPE tree *
382 #define FINALIZE_BASE_ZERO NULL
383 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
384 #include "tree-ssa-opfinalize.h"
385
386
387 /* This routine will create stmt operands for STMT from the def build list. */
388
389 static void
390 finalize_ssa_defs (tree stmt)
391 {
392 unsigned int num = VEC_length (tree, build_defs);
393
394 /* There should only be a single real definition per assignment. */
395 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
396
397 /* If there is an old list, often the new list is identical, or close, so
398 find the elements at the beginning that are the same as the vector. */
399 finalize_ssa_def_ops (stmt);
400 VEC_truncate (tree, build_defs, 0);
401 }
402
403 #define FINALIZE_OPBUILD build_uses
404 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
405 build_uses, (I))
406 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
407 build_uses, (I))
408 #define FINALIZE_FUNC finalize_ssa_use_ops
409 #define FINALIZE_ALLOC alloc_use
410 #define FINALIZE_FREE free_uses
411 #define FINALIZE_TYPE struct use_optype_d
412 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
413 #define FINALIZE_OPS USE_OPS
414 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
415 #define FINALIZE_CORRECT_USE correct_use_link
416 #define FINALIZE_BASE(VAR) VAR
417 #define FINALIZE_BASE_TYPE tree *
418 #define FINALIZE_BASE_ZERO NULL
419 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
420 (PTR)->use_ptr.use = (VAL); \
421 link_imm_use_stmt (&((PTR)->use_ptr), \
422 *(VAL), (STMT))
423 #include "tree-ssa-opfinalize.h"
424
425 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
426
427 static void
428 finalize_ssa_uses (tree stmt)
429 {
430 #ifdef ENABLE_CHECKING
431 {
432 unsigned x;
433 unsigned num = VEC_length (tree, build_uses);
434
435 /* If the pointer to the operand is the statement itself, something is
436 wrong. It means that we are pointing to a local variable (the
437 initial call to get_stmt_operands does not pass a pointer to a
438 statement). */
439 for (x = 0; x < num; x++)
440 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
441 }
442 #endif
443 finalize_ssa_use_ops (stmt);
444 VEC_truncate (tree, build_uses, 0);
445 }
446
447
448 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
449 #define FINALIZE_OPBUILD build_v_may_defs
450 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
451 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
452 build_v_may_defs, (I)))
453 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
454 #define FINALIZE_ALLOC alloc_maydef
455 #define FINALIZE_FREE free_maydefs
456 #define FINALIZE_TYPE struct maydef_optype_d
457 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
458 #define FINALIZE_OPS MAYDEF_OPS
459 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
460 #define FINALIZE_CORRECT_USE set_virtual_use_link
461 #define FINALIZE_BASE_ZERO 0
462 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
463 #define FINALIZE_BASE_TYPE unsigned
464 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
465 (PTR)->def_var = (VAL); \
466 (PTR)->use_var = (VAL); \
467 (PTR)->use_ptr.use = &((PTR)->use_var); \
468 link_imm_use_stmt (&((PTR)->use_ptr), \
469 (VAL), (STMT))
470 #include "tree-ssa-opfinalize.h"
471
472
473 static void
474 finalize_ssa_v_may_defs (tree stmt)
475 {
476 finalize_ssa_v_may_def_ops (stmt);
477 }
478
479
480 /* Clear the in_list bits and empty the build array for v_may_defs. */
481
482 static inline void
483 cleanup_v_may_defs (void)
484 {
485 unsigned x, num;
486 num = VEC_length (tree, build_v_may_defs);
487
488 for (x = 0; x < num; x++)
489 {
490 tree t = VEC_index (tree, build_v_may_defs, x);
491 if (TREE_CODE (t) != SSA_NAME)
492 {
493 var_ann_t ann = var_ann (t);
494 ann->in_v_may_def_list = 0;
495 }
496 }
497 VEC_truncate (tree, build_v_may_defs, 0);
498 }
499
500
501 #define FINALIZE_OPBUILD build_vuses
502 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
503 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
504 build_vuses, (I)))
505 #define FINALIZE_FUNC finalize_ssa_vuse_ops
506 #define FINALIZE_ALLOC alloc_vuse
507 #define FINALIZE_FREE free_vuses
508 #define FINALIZE_TYPE struct vuse_optype_d
509 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
510 #define FINALIZE_OPS VUSE_OPS
511 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
512 #define FINALIZE_CORRECT_USE set_virtual_use_link
513 #define FINALIZE_BASE_ZERO 0
514 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
515 #define FINALIZE_BASE_TYPE unsigned
516 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
517 (PTR)->use_var = (VAL); \
518 (PTR)->use_ptr.use = &((PTR)->use_var); \
519 link_imm_use_stmt (&((PTR)->use_ptr), \
520 (VAL), (STMT))
521 #include "tree-ssa-opfinalize.h"
522
523
524 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
525
526 static void
527 finalize_ssa_vuses (tree stmt)
528 {
529 unsigned num, num_v_may_defs;
530 unsigned vuse_index;
531
532 /* Remove superfluous VUSE operands. If the statement already has a
533 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
534 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
535 suppose that variable 'a' is aliased:
536
537 # VUSE <a_2>
538 # a_3 = V_MAY_DEF <a_2>
539 a = a + 1;
540
541 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
542 operation. */
543
544 num = VEC_length (tree, build_vuses);
545 num_v_may_defs = VEC_length (tree, build_v_may_defs);
546
547 if (num > 0 && num_v_may_defs > 0)
548 {
549 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
550 {
551 tree vuse;
552 vuse = VEC_index (tree, build_vuses, vuse_index);
553 if (TREE_CODE (vuse) != SSA_NAME)
554 {
555 var_ann_t ann = var_ann (vuse);
556 ann->in_vuse_list = 0;
557 if (ann->in_v_may_def_list)
558 {
559 VEC_ordered_remove (tree, build_vuses, vuse_index);
560 continue;
561 }
562 }
563 vuse_index++;
564 }
565 }
566 else
567 /* Clear out the in_list bits. */
568 for (vuse_index = 0;
569 vuse_index < VEC_length (tree, build_vuses);
570 vuse_index++)
571 {
572 tree t = VEC_index (tree, build_vuses, vuse_index);
573 if (TREE_CODE (t) != SSA_NAME)
574 {
575 var_ann_t ann = var_ann (t);
576 ann->in_vuse_list = 0;
577 }
578 }
579
580 finalize_ssa_vuse_ops (stmt);
581 /* The v_may_def build vector wasn't cleaned up because we needed it. */
582 cleanup_v_may_defs ();
583
584 /* Free the vuses build vector. */
585 VEC_truncate (tree, build_vuses, 0);
586
587 }
588
589 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
590
591 #define FINALIZE_OPBUILD build_v_must_defs
592 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
593 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
594 build_v_must_defs, (I)))
595 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
596 #define FINALIZE_ALLOC alloc_mustdef
597 #define FINALIZE_FREE free_mustdefs
598 #define FINALIZE_TYPE struct mustdef_optype_d
599 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
600 #define FINALIZE_OPS MUSTDEF_OPS
601 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
602 #define FINALIZE_CORRECT_USE set_virtual_use_link
603 #define FINALIZE_BASE_ZERO 0
604 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
605 #define FINALIZE_BASE_TYPE unsigned
606 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
607 (PTR)->def_var = (VAL); \
608 (PTR)->kill_var = (VAL); \
609 (PTR)->use_ptr.use = &((PTR)->kill_var);\
610 link_imm_use_stmt (&((PTR)->use_ptr), \
611 (VAL), (STMT))
612 #include "tree-ssa-opfinalize.h"
613
614
615 static void
616 finalize_ssa_v_must_defs (tree stmt)
617 {
618 /* In the presence of subvars, there may be more than one V_MUST_DEF per
619 statement (one for each subvar). It is a bit expensive to verify that
620 all must-defs in a statement belong to subvars if there is more than one
621 MUST-def, so we don't do it. Suffice to say, if you reach here without
622 having subvars, and have num >1, you have hit a bug. */
623 finalize_ssa_v_must_def_ops (stmt);
624 VEC_truncate (tree, build_v_must_defs, 0);
625 }
626
627
628 /* Finalize all the build vectors, fill the new ones into INFO. */
629
630 static inline void
631 finalize_ssa_stmt_operands (tree stmt)
632 {
633 finalize_ssa_defs (stmt);
634 finalize_ssa_uses (stmt);
635 finalize_ssa_v_must_defs (stmt);
636 finalize_ssa_v_may_defs (stmt);
637 finalize_ssa_vuses (stmt);
638 }
639
640
641 /* Start the process of building up operands vectors in INFO. */
642
643 static inline void
644 start_ssa_stmt_operands (void)
645 {
646 gcc_assert (VEC_length (tree, build_defs) == 0);
647 gcc_assert (VEC_length (tree, build_uses) == 0);
648 gcc_assert (VEC_length (tree, build_vuses) == 0);
649 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
650 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
651 }
652
653
654 /* Add DEF_P to the list of pointers to operands. */
655
656 static inline void
657 append_def (tree *def_p)
658 {
659 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
660 }
661
662
663 /* Add USE_P to the list of pointers to operands. */
664
665 static inline void
666 append_use (tree *use_p)
667 {
668 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
669 }
670
671
672 /* Add a new virtual may def for variable VAR to the build array. */
673
674 static inline void
675 append_v_may_def (tree var)
676 {
677 if (TREE_CODE (var) != SSA_NAME)
678 {
679 var_ann_t ann = get_var_ann (var);
680
681 /* Don't allow duplicate entries. */
682 if (ann->in_v_may_def_list)
683 return;
684 ann->in_v_may_def_list = 1;
685 }
686
687 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
688 }
689
690
691 /* Add VAR to the list of virtual uses. */
692
693 static inline void
694 append_vuse (tree var)
695 {
696
697 /* Don't allow duplicate entries. */
698 if (TREE_CODE (var) != SSA_NAME)
699 {
700 var_ann_t ann = get_var_ann (var);
701
702 if (ann->in_vuse_list || ann->in_v_may_def_list)
703 return;
704 ann->in_vuse_list = 1;
705 }
706
707 VEC_safe_push (tree, heap, build_vuses, (tree)var);
708 }
709
710
711 /* Add VAR to the list of virtual must definitions for INFO. */
712
713 static inline void
714 append_v_must_def (tree var)
715 {
716 unsigned i;
717
718 /* Don't allow duplicate entries. */
719 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
720 if (var == VEC_index (tree, build_v_must_defs, i))
721 return;
722
723 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
724 }
725
726
727 /* REF is a tree that contains the entire pointer dereference
728 expression, if available, or NULL otherwise. ALIAS is the variable
729 we are asking if REF can access. OFFSET and SIZE come from the
730 memory access expression that generated this virtual operand.
731 FOR_CLOBBER is true is this is adding a virtual operand for a call
732 clobber. */
733
734 static bool
735 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
736 HOST_WIDE_INT size)
737 {
738 bool offsetgtz = offset > 0;
739 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
740 tree base = ref ? get_base_address (ref) : NULL;
741
742 /* If ALIAS is an SFT, it can't be touched if the offset
743 and size of the access is not overlapping with the SFT offset and
744 size. This is only true if we are accessing through a pointer
745 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
746 be accessing through a pointer to some substruct of the
747 structure, and if we try to prune there, we will have the wrong
748 offset, and get the wrong answer.
749 i.e., we can't prune without more work if we have something like
750
751 struct gcc_target
752 {
753 struct asm_out
754 {
755 const char *byte_op;
756 struct asm_int_op
757 {
758 const char *hi;
759 } aligned_op;
760 } asm_out;
761 } targetm;
762
763 foo = &targetm.asm_out.aligned_op;
764 return foo->hi;
765
766 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
767 terms of SFT_PARENT_VAR, that is where it is.
768 However, the access through the foo pointer will be at offset 0. */
769 if (size != -1
770 && TREE_CODE (alias) == STRUCT_FIELD_TAG
771 && base
772 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
773 && !overlap_subvar (offset, size, alias, NULL))
774 {
775 #ifdef ACCESS_DEBUGGING
776 fprintf (stderr, "Access to ");
777 print_generic_expr (stderr, ref, 0);
778 fprintf (stderr, " may not touch ");
779 print_generic_expr (stderr, alias, 0);
780 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
781 #endif
782 return false;
783 }
784
785 /* Without strict aliasing, it is impossible for a component access
786 through a pointer to touch a random variable, unless that
787 variable *is* a structure or a pointer.
788
789 That is, given p->c, and some random global variable b,
790 there is no legal way that p->c could be an access to b.
791
792 Without strict aliasing on, we consider it legal to do something
793 like:
794
795 struct foos { int l; };
796 int foo;
797 static struct foos *getfoo(void);
798 int main (void)
799 {
800 struct foos *f = getfoo();
801 f->l = 1;
802 foo = 2;
803 if (f->l == 1)
804 abort();
805 exit(0);
806 }
807 static struct foos *getfoo(void)
808 { return (struct foos *)&foo; }
809
810 (taken from 20000623-1.c)
811 */
812 else if (ref
813 && flag_strict_aliasing
814 && TREE_CODE (ref) != INDIRECT_REF
815 && !MTAG_P (alias)
816 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
817 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
818 && !POINTER_TYPE_P (TREE_TYPE (alias)))
819 {
820 #ifdef ACCESS_DEBUGGING
821 fprintf (stderr, "Access to ");
822 print_generic_expr (stderr, ref, 0);
823 fprintf (stderr, " may not touch ");
824 print_generic_expr (stderr, alias, 0);
825 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
826 #endif
827 return false;
828 }
829
830 /* If the offset of the access is greater than the size of one of
831 the possible aliases, it can't be touching that alias, because it
832 would be past the end of the structure. */
833 else if (ref
834 && flag_strict_aliasing
835 && TREE_CODE (ref) != INDIRECT_REF
836 && !MTAG_P (alias)
837 && !POINTER_TYPE_P (TREE_TYPE (alias))
838 && offsetgtz
839 && DECL_SIZE (alias)
840 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
841 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
842 {
843 #ifdef ACCESS_DEBUGGING
844 fprintf (stderr, "Access to ");
845 print_generic_expr (stderr, ref, 0);
846 fprintf (stderr, " may not touch ");
847 print_generic_expr (stderr, alias, 0);
848 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
849 #endif
850 return false;
851 }
852
853 return true;
854 }
855
856
857 /* Add VAR to the virtual operands array. FLAGS is as in
858 get_expr_operands. FULL_REF is a tree that contains the entire
859 pointer dereference expression, if available, or NULL otherwise.
860 OFFSET and SIZE come from the memory access expression that
861 generated this virtual operand. FOR_CLOBBER is true is this is
862 adding a virtual operand for a call clobber. */
863
864 static void
865 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
866 tree full_ref, HOST_WIDE_INT offset,
867 HOST_WIDE_INT size, bool for_clobber)
868 {
869 VEC(tree,gc) *aliases;
870 tree sym;
871 var_ann_t v_ann;
872
873 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
874 v_ann = var_ann (sym);
875
876 /* Mark statements with volatile operands. Optimizers should back
877 off from statements having volatile operands. */
878 if (TREE_THIS_VOLATILE (sym) && s_ann)
879 s_ann->has_volatile_ops = true;
880
881 /* If the variable cannot be modified and this is a V_MAY_DEF change
882 it into a VUSE. This happens when read-only variables are marked
883 call-clobbered and/or aliased to writable variables. So we only
884 check that this only happens on non-specific stores.
885
886 Note that if this is a specific store, i.e. associated with a
887 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
888 into validation problems.
889
890 This can happen when programs cast away const, leaving us with a
891 store to read-only memory. If the statement is actually executed
892 at runtime, then the program is ill formed. If the statement is
893 not executed then all is well. At the very least, we cannot ICE. */
894 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
895 flags &= ~(opf_is_def | opf_kill_def);
896
897 /* The variable is not a GIMPLE register. Add it (or its aliases) to
898 virtual operands, unless the caller has specifically requested
899 not to add virtual operands (used when adding operands inside an
900 ADDR_EXPR expression). */
901 if (flags & opf_no_vops)
902 return;
903
904 aliases = v_ann->may_aliases;
905 if (aliases == NULL)
906 {
907 /* The variable is not aliased or it is an alias tag. */
908 if (flags & opf_is_def)
909 {
910 if (flags & opf_kill_def)
911 {
912 /* V_MUST_DEF for non-aliased, non-GIMPLE register
913 variable definitions. */
914 gcc_assert (!MTAG_P (var)
915 || TREE_CODE (var) == STRUCT_FIELD_TAG);
916 append_v_must_def (var);
917 }
918 else
919 {
920 /* Add a V_MAY_DEF for call-clobbered variables and
921 memory tags. */
922 append_v_may_def (var);
923 }
924 }
925 else
926 append_vuse (var);
927 }
928 else
929 {
930 unsigned i;
931 tree al;
932
933 /* The variable is aliased. Add its aliases to the virtual
934 operands. */
935 gcc_assert (VEC_length (tree, aliases) != 0);
936
937 if (flags & opf_is_def)
938 {
939
940 bool none_added = true;
941
942 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
943 {
944 if (!access_can_touch_variable (full_ref, al, offset, size))
945 continue;
946
947 none_added = false;
948 append_v_may_def (al);
949 }
950
951 /* If the variable is also an alias tag, add a virtual
952 operand for it, otherwise we will miss representing
953 references to the members of the variable's alias set.
954 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
955
956 It is also necessary to add bare defs on clobbers for
957 TMT's, so that bare TMT uses caused by pruning all the
958 aliases will link up properly with calls. In order to
959 keep the number of these bare defs we add down to the
960 minimum necessary, we keep track of which TMT's were used
961 alone in statement defs or vuses. */
962 if (v_ann->is_aliased
963 || none_added
964 || (TREE_CODE (var) == TYPE_MEMORY_TAG && for_clobber
965 && TMT_USED_ALONE (var)))
966 {
967 /* Every bare tmt def we add should have TMT_USED_ALONE
968 set on it, or else we will get the wrong answer on
969 clobbers. */
970
971 if (none_added && !updating_used_alone && aliases_computed_p
972 && TREE_CODE (var) == TYPE_MEMORY_TAG)
973 gcc_assert (TMT_USED_ALONE (var));
974
975 append_v_may_def (var);
976 }
977 }
978 else
979 {
980 bool none_added = true;
981 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
982 {
983 if (!access_can_touch_variable (full_ref, al, offset, size))
984 continue;
985 none_added = false;
986 append_vuse (al);
987 }
988
989 /* Similarly, append a virtual uses for VAR itself, when
990 it is an alias tag. */
991 if (v_ann->is_aliased || none_added)
992 append_vuse (var);
993 }
994 }
995 }
996
997
998 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
999 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1000 the statement's real operands, otherwise it is added to virtual
1001 operands. */
1002
1003 static void
1004 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1005 {
1006 bool is_real_op;
1007 tree var, sym;
1008 var_ann_t v_ann;
1009
1010 var = *var_p;
1011 gcc_assert (SSA_VAR_P (var));
1012
1013 is_real_op = is_gimple_reg (var);
1014
1015 /* If this is a real operand, the operand is either an SSA name or a
1016 decl. Virtual operands may only be decls. */
1017 gcc_assert (is_real_op || DECL_P (var));
1018
1019 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1020 v_ann = var_ann (sym);
1021
1022 /* Mark statements with volatile operands. Optimizers should back
1023 off from statements having volatile operands. */
1024 if (TREE_THIS_VOLATILE (sym) && s_ann)
1025 s_ann->has_volatile_ops = true;
1026
1027 if (is_real_op)
1028 {
1029 /* The variable is a GIMPLE register. Add it to real operands. */
1030 if (flags & opf_is_def)
1031 append_def (var_p);
1032 else
1033 append_use (var_p);
1034 }
1035 else
1036 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1037 }
1038
1039
1040 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1041 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1042
1043 STMT is the statement being processed, EXPR is the INDIRECT_REF
1044 that got us here.
1045
1046 FLAGS is as in get_expr_operands.
1047
1048 FULL_REF contains the full pointer dereference expression, if we
1049 have it, or NULL otherwise.
1050
1051 OFFSET and SIZE are the location of the access inside the
1052 dereferenced pointer, if known.
1053
1054 RECURSE_ON_BASE should be set to true if we want to continue
1055 calling get_expr_operands on the base pointer, and false if
1056 something else will do it for us. */
1057
1058 static void
1059 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1060 tree full_ref,
1061 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1062 bool recurse_on_base)
1063 {
1064 tree *pptr = &TREE_OPERAND (expr, 0);
1065 tree ptr = *pptr;
1066 stmt_ann_t s_ann = stmt_ann (stmt);
1067
1068 /* Stores into INDIRECT_REF operands are never killing definitions. */
1069 flags &= ~opf_kill_def;
1070
1071 if (SSA_VAR_P (ptr))
1072 {
1073 struct ptr_info_def *pi = NULL;
1074
1075 /* If PTR has flow-sensitive points-to information, use it. */
1076 if (TREE_CODE (ptr) == SSA_NAME
1077 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1078 && pi->name_mem_tag)
1079 {
1080 /* PTR has its own memory tag. Use it. */
1081 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1082 full_ref, offset, size, false);
1083 }
1084 else
1085 {
1086 /* If PTR is not an SSA_NAME or it doesn't have a name
1087 tag, use its type memory tag. */
1088 var_ann_t v_ann;
1089
1090 /* If we are emitting debugging dumps, display a warning if
1091 PTR is an SSA_NAME with no flow-sensitive alias
1092 information. That means that we may need to compute
1093 aliasing again. */
1094 if (dump_file
1095 && TREE_CODE (ptr) == SSA_NAME
1096 && pi == NULL)
1097 {
1098 fprintf (dump_file,
1099 "NOTE: no flow-sensitive alias info for ");
1100 print_generic_expr (dump_file, ptr, dump_flags);
1101 fprintf (dump_file, " in ");
1102 print_generic_stmt (dump_file, stmt, dump_flags);
1103 }
1104
1105 if (TREE_CODE (ptr) == SSA_NAME)
1106 ptr = SSA_NAME_VAR (ptr);
1107 v_ann = var_ann (ptr);
1108
1109 if (v_ann->type_mem_tag)
1110 add_virtual_operand (v_ann->type_mem_tag, s_ann, flags,
1111 full_ref, offset, size, false);
1112 }
1113 }
1114 else if (TREE_CODE (ptr) == INTEGER_CST)
1115 {
1116 /* If a constant is used as a pointer, we can't generate a real
1117 operand for it but we mark the statement volatile to prevent
1118 optimizations from messing things up. */
1119 if (s_ann)
1120 s_ann->has_volatile_ops = true;
1121 return;
1122 }
1123 else
1124 {
1125 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1126 gcc_unreachable ();
1127 }
1128
1129 /* If requested, add a USE operand for the base pointer. */
1130 if (recurse_on_base)
1131 get_expr_operands (stmt, pptr, opf_none);
1132 }
1133
1134
1135 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1136
1137 static void
1138 get_tmr_operands (tree stmt, tree expr, int flags)
1139 {
1140 tree tag = TMR_TAG (expr), ref;
1141 HOST_WIDE_INT offset, size, maxsize;
1142 subvar_t svars, sv;
1143 stmt_ann_t s_ann = stmt_ann (stmt);
1144
1145 /* First record the real operands. */
1146 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1147 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1148
1149 /* MEM_REFs should never be killing. */
1150 flags &= ~opf_kill_def;
1151
1152 if (TMR_SYMBOL (expr))
1153 {
1154 stmt_ann_t ann = stmt_ann (stmt);
1155 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1156 }
1157
1158 if (!tag)
1159 {
1160 /* Something weird, so ensure that we will be careful. */
1161 stmt_ann (stmt)->has_volatile_ops = true;
1162 return;
1163 }
1164
1165 if (DECL_P (tag))
1166 {
1167 get_expr_operands (stmt, &tag, flags);
1168 return;
1169 }
1170
1171 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1172 gcc_assert (ref != NULL_TREE);
1173 svars = get_subvars_for_var (ref);
1174 for (sv = svars; sv; sv = sv->next)
1175 {
1176 bool exact;
1177 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1178 {
1179 int subvar_flags = flags;
1180 if (!exact || size != maxsize)
1181 subvar_flags &= ~opf_kill_def;
1182 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1183 }
1184 }
1185 }
1186
1187
1188 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1189 clobbered variables in the function. */
1190
1191 static void
1192 add_call_clobber_ops (tree stmt, tree callee)
1193 {
1194 unsigned u;
1195 bitmap_iterator bi;
1196 stmt_ann_t s_ann = stmt_ann (stmt);
1197 bitmap not_read_b, not_written_b;
1198
1199 /* Functions that are not const, pure or never return may clobber
1200 call-clobbered variables. */
1201 if (s_ann)
1202 s_ann->makes_clobbering_call = true;
1203
1204 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1205 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1206 if (global_var)
1207 {
1208 add_stmt_operand (&global_var, s_ann, opf_is_def);
1209 return;
1210 }
1211
1212 /* Get info for local and module level statics. There is a bit
1213 set for each static if the call being processed does not read
1214 or write that variable. */
1215 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1216 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1217 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1218 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1219 {
1220 tree var = referenced_var_lookup (u);
1221 unsigned int escape_mask = var_ann (var)->escape_mask;
1222 tree real_var = var;
1223 bool not_read;
1224 bool not_written;
1225
1226 /* Not read and not written are computed on regular vars, not
1227 subvars, so look at the parent var if this is an SFT. */
1228 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1229 real_var = SFT_PARENT_VAR (var);
1230
1231 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1232 DECL_UID (real_var)) : false;
1233 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1234 DECL_UID (real_var)) : false;
1235 gcc_assert (!unmodifiable_var_p (var));
1236
1237 clobber_stats.clobbered_vars++;
1238
1239 /* See if this variable is really clobbered by this function. */
1240
1241 /* Trivial case: Things escaping only to pure/const are not
1242 clobbered by non-pure-const, and only read by pure/const. */
1243 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1244 {
1245 tree call = get_call_expr_in (stmt);
1246 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1247 {
1248 add_stmt_operand (&var, s_ann, opf_none);
1249 clobber_stats.unescapable_clobbers_avoided++;
1250 continue;
1251 }
1252 else
1253 {
1254 clobber_stats.unescapable_clobbers_avoided++;
1255 continue;
1256 }
1257 }
1258
1259 if (not_written)
1260 {
1261 clobber_stats.static_write_clobbers_avoided++;
1262 if (!not_read)
1263 add_stmt_operand (&var, s_ann, opf_none);
1264 else
1265 clobber_stats.static_read_clobbers_avoided++;
1266 }
1267 else
1268 add_virtual_operand (var, s_ann, opf_is_def,
1269 NULL, 0, -1, true);
1270 }
1271
1272 }
1273
1274
1275 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1276 function. */
1277
1278 static void
1279 add_call_read_ops (tree stmt, tree callee)
1280 {
1281 unsigned u;
1282 bitmap_iterator bi;
1283 stmt_ann_t s_ann = stmt_ann (stmt);
1284 bitmap not_read_b;
1285
1286 /* if the function is not pure, it may reference memory. Add
1287 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1288 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1289 if (global_var)
1290 {
1291 add_stmt_operand (&global_var, s_ann, opf_none);
1292 return;
1293 }
1294
1295 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1296
1297 /* Add a VUSE for each call-clobbered variable. */
1298 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1299 {
1300 tree var = referenced_var (u);
1301 tree real_var = var;
1302 bool not_read;
1303
1304 clobber_stats.readonly_clobbers++;
1305
1306 /* Not read and not written are computed on regular vars, not
1307 subvars, so look at the parent var if this is an SFT. */
1308
1309 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1310 real_var = SFT_PARENT_VAR (var);
1311
1312 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1313 DECL_UID (real_var)) : false;
1314
1315 if (not_read)
1316 {
1317 clobber_stats.static_readonly_clobbers_avoided++;
1318 continue;
1319 }
1320
1321 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1322 }
1323 }
1324
1325
1326 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1327
1328 static void
1329 get_call_expr_operands (tree stmt, tree expr)
1330 {
1331 tree op;
1332 int call_flags = call_expr_flags (expr);
1333
1334 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1335 operands for all the symbols that have been found to be
1336 call-clobbered.
1337
1338 Note that if aliases have not been computed, the global effects
1339 of calls will not be included in the SSA web. This is fine
1340 because no optimizer should run before aliases have been
1341 computed. By not bothering with virtual operands for CALL_EXPRs
1342 we avoid adding superfluous virtual operands, which can be a
1343 significant compile time sink (See PR 15855). */
1344 if (aliases_computed_p
1345 && !bitmap_empty_p (call_clobbered_vars)
1346 && !(call_flags & ECF_NOVOPS))
1347 {
1348 /* A 'pure' or a 'const' function never call-clobbers anything.
1349 A 'noreturn' function might, but since we don't return anyway
1350 there is no point in recording that. */
1351 if (TREE_SIDE_EFFECTS (expr)
1352 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1353 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1354 else if (!(call_flags & ECF_CONST))
1355 add_call_read_ops (stmt, get_callee_fndecl (expr));
1356 }
1357
1358 /* Find uses in the called function. */
1359 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1360
1361 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1362 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1363
1364 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1365 }
1366
1367
1368 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1369
1370 static void
1371 get_asm_expr_operands (tree stmt)
1372 {
1373 stmt_ann_t s_ann = stmt_ann (stmt);
1374 int noutputs = list_length (ASM_OUTPUTS (stmt));
1375 const char **oconstraints
1376 = (const char **) alloca ((noutputs) * sizeof (const char *));
1377 int i;
1378 tree link;
1379 const char *constraint;
1380 bool allows_mem, allows_reg, is_inout;
1381
1382 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1383 {
1384 oconstraints[i] = constraint
1385 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1386 parse_output_constraint (&constraint, i, 0, 0,
1387 &allows_mem, &allows_reg, &is_inout);
1388
1389 /* This should have been split in gimplify_asm_expr. */
1390 gcc_assert (!allows_reg || !is_inout);
1391
1392 /* Memory operands are addressable. Note that STMT needs the
1393 address of this operand. */
1394 if (!allows_reg && allows_mem)
1395 {
1396 tree t = get_base_address (TREE_VALUE (link));
1397 if (t && DECL_P (t) && s_ann)
1398 add_to_addressable_set (t, &s_ann->addresses_taken);
1399 }
1400
1401 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1402 }
1403
1404 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1405 {
1406 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1407 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1408 oconstraints, &allows_mem, &allows_reg);
1409
1410 /* Memory operands are addressable. Note that STMT needs the
1411 address of this operand. */
1412 if (!allows_reg && allows_mem)
1413 {
1414 tree t = get_base_address (TREE_VALUE (link));
1415 if (t && DECL_P (t) && s_ann)
1416 add_to_addressable_set (t, &s_ann->addresses_taken);
1417 }
1418
1419 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1420 }
1421
1422
1423 /* Clobber memory for asm ("" : : : "memory"); */
1424 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1425 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1426 {
1427 unsigned i;
1428 bitmap_iterator bi;
1429
1430 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1431 decided to group them). */
1432 if (global_var)
1433 add_stmt_operand (&global_var, s_ann, opf_is_def);
1434 else
1435 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1436 {
1437 tree var = referenced_var (i);
1438 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1439 }
1440
1441 /* Now clobber all addressables. */
1442 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1443 {
1444 tree var = referenced_var (i);
1445
1446 /* Subvars are explicitly represented in this list, so
1447 we don't need the original to be added to the clobber
1448 ops, but the original *will* be in this list because
1449 we keep the addressability of the original
1450 variable up-to-date so we don't screw up the rest of
1451 the backend. */
1452 if (var_can_have_subvars (var)
1453 && get_subvars_for_var (var) != NULL)
1454 continue;
1455
1456 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1457 }
1458
1459 break;
1460 }
1461 }
1462
1463
1464 /* Recursively scan the expression pointed to by EXPR_P in statement
1465 referred to by INFO. FLAGS is one of the OPF_* constants modifying
1466 how to interpret the operands found. */
1467
1468 static void
1469 get_expr_operands (tree stmt, tree *expr_p, int flags)
1470 {
1471 enum tree_code code;
1472 enum tree_code_class class;
1473 tree expr = *expr_p;
1474 stmt_ann_t s_ann = stmt_ann (stmt);
1475
1476 if (expr == NULL)
1477 return;
1478
1479 code = TREE_CODE (expr);
1480 class = TREE_CODE_CLASS (code);
1481
1482 switch (code)
1483 {
1484 case ADDR_EXPR:
1485 /* Taking the address of a variable does not represent a
1486 reference to it, but the fact that the statement takes its
1487 address will be of interest to some passes (e.g. alias
1488 resolution). */
1489 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1490
1491 /* If the address is invariant, there may be no interesting
1492 variable references inside. */
1493 if (is_gimple_min_invariant (expr))
1494 return;
1495
1496 /* Otherwise, there may be variables referenced inside but there
1497 should be no VUSEs created, since the referenced objects are
1498 not really accessed. The only operands that we should find
1499 here are ARRAY_REF indices which will always be real operands
1500 (GIMPLE does not allow non-registers as array indices). */
1501 flags |= opf_no_vops;
1502 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1503 return;
1504
1505 case SSA_NAME:
1506 case STRUCT_FIELD_TAG:
1507 case TYPE_MEMORY_TAG:
1508 case NAME_MEMORY_TAG:
1509 add_stmt_operand (expr_p, s_ann, flags);
1510 return;
1511
1512 case VAR_DECL:
1513 case PARM_DECL:
1514 case RESULT_DECL:
1515 {
1516 subvar_t svars;
1517
1518 /* Add the subvars for a variable if it has subvars, to DEFS
1519 or USES. Otherwise, add the variable itself. Whether it
1520 goes to USES or DEFS depends on the operand flags. */
1521 if (var_can_have_subvars (expr)
1522 && (svars = get_subvars_for_var (expr)))
1523 {
1524 subvar_t sv;
1525 for (sv = svars; sv; sv = sv->next)
1526 add_stmt_operand (&sv->var, s_ann, flags);
1527 }
1528 else
1529 add_stmt_operand (expr_p, s_ann, flags);
1530
1531 return;
1532 }
1533
1534 case MISALIGNED_INDIRECT_REF:
1535 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1536 /* fall through */
1537
1538 case ALIGN_INDIRECT_REF:
1539 case INDIRECT_REF:
1540 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE,
1541 0, -1, true);
1542 return;
1543
1544 case TARGET_MEM_REF:
1545 get_tmr_operands (stmt, expr, flags);
1546 return;
1547
1548 case ARRAY_RANGE_REF:
1549 /* Treat array references as references to the virtual variable
1550 representing the array. The virtual variable for an ARRAY_REF
1551 is the VAR_DECL for the array. */
1552 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1553 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1554 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1555 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1556 return;
1557
1558 case ARRAY_REF:
1559 case COMPONENT_REF:
1560 case REALPART_EXPR:
1561 case IMAGPART_EXPR:
1562 {
1563 tree ref;
1564 HOST_WIDE_INT offset, size, maxsize;
1565 bool none = true;
1566
1567 /* This component reference becomes an access to all of the
1568 subvariables it can touch, if we can determine that, but
1569 *NOT* the real one. If we can't determine which fields we
1570 could touch, the recursion will eventually get to a
1571 variable and add *all* of its subvars, or whatever is the
1572 minimum correct subset. */
1573 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1574 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1575 {
1576 subvar_t sv;
1577 subvar_t svars = get_subvars_for_var (ref);
1578
1579 for (sv = svars; sv; sv = sv->next)
1580 {
1581 bool exact;
1582
1583 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1584 {
1585 int subvar_flags = flags;
1586 none = false;
1587 if (!exact || size != maxsize)
1588 subvar_flags &= ~opf_kill_def;
1589 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1590 }
1591 }
1592
1593 if (!none)
1594 flags |= opf_no_vops;
1595 }
1596 else if (TREE_CODE (ref) == INDIRECT_REF)
1597 {
1598 get_indirect_ref_operands (stmt, ref, flags, expr,
1599 offset, maxsize, false);
1600 flags |= opf_no_vops;
1601 }
1602
1603 /* Even if we found subvars above we need to ensure to see
1604 immediate uses for d in s.a[d]. In case of s.a having
1605 a subvar we'd miss it otherwise. */
1606 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1607 flags & ~opf_kill_def);
1608
1609 if (code == COMPONENT_REF)
1610 {
1611 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1612 s_ann->has_volatile_ops = true;
1613 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1614 }
1615 else if (code == ARRAY_REF)
1616 {
1617 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1618 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1619 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1620 }
1621
1622 return;
1623 }
1624
1625 case WITH_SIZE_EXPR:
1626 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1627 and an rvalue reference to its second argument. */
1628 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1629 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1630 return;
1631
1632 case CALL_EXPR:
1633 get_call_expr_operands (stmt, expr);
1634 return;
1635
1636 case COND_EXPR:
1637 case VEC_COND_EXPR:
1638 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1639 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1640 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1641 return;
1642
1643 case MODIFY_EXPR:
1644 {
1645 int subflags;
1646 tree op;
1647
1648 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1649
1650 op = TREE_OPERAND (expr, 0);
1651 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1652 op = TREE_OPERAND (expr, 0);
1653 if (TREE_CODE (op) == ARRAY_RANGE_REF
1654 || TREE_CODE (op) == REALPART_EXPR
1655 || TREE_CODE (op) == IMAGPART_EXPR)
1656 subflags = opf_is_def;
1657 else
1658 subflags = opf_is_def | opf_kill_def;
1659
1660 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1661 return;
1662 }
1663
1664 case CONSTRUCTOR:
1665 {
1666 /* General aggregate CONSTRUCTORs have been decomposed, but they
1667 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1668 constructor_elt *ce;
1669 unsigned HOST_WIDE_INT idx;
1670
1671 for (idx = 0;
1672 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1673 idx++)
1674 get_expr_operands (stmt, &ce->value, opf_none);
1675
1676 return;
1677 }
1678
1679 case TRUTH_NOT_EXPR:
1680 case BIT_FIELD_REF:
1681 case VIEW_CONVERT_EXPR:
1682 do_unary:
1683 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1684 return;
1685
1686 case TRUTH_AND_EXPR:
1687 case TRUTH_OR_EXPR:
1688 case TRUTH_XOR_EXPR:
1689 case COMPOUND_EXPR:
1690 case OBJ_TYPE_REF:
1691 case ASSERT_EXPR:
1692 do_binary:
1693 {
1694 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1695 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1696 return;
1697 }
1698
1699 case DOT_PROD_EXPR:
1700 case REALIGN_LOAD_EXPR:
1701 {
1702 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1703 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1704 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1705 return;
1706 }
1707
1708 case BLOCK:
1709 case FUNCTION_DECL:
1710 case EXC_PTR_EXPR:
1711 case FILTER_EXPR:
1712 case LABEL_DECL:
1713 case CONST_DECL:
1714 case OMP_PARALLEL:
1715 case OMP_SECTIONS:
1716 case OMP_FOR:
1717 case OMP_RETURN_EXPR:
1718 case OMP_SINGLE:
1719 case OMP_MASTER:
1720 case OMP_ORDERED:
1721 case OMP_CRITICAL:
1722 /* Expressions that make no memory references. */
1723 return;
1724
1725 default:
1726 if (class == tcc_unary)
1727 goto do_unary;
1728 if (class == tcc_binary || class == tcc_comparison)
1729 goto do_binary;
1730 if (class == tcc_constant || class == tcc_type)
1731 return;
1732 }
1733
1734 /* If we get here, something has gone wrong. */
1735 #ifdef ENABLE_CHECKING
1736 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1737 debug_tree (expr);
1738 fputs ("\n", stderr);
1739 #endif
1740 gcc_unreachable ();
1741 }
1742
1743
1744 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
1745 cache for STMT, if it existed before. When finished, the various build_*
1746 operand vectors will have potential operands. in them. */
1747
1748 static void
1749 parse_ssa_operands (tree stmt)
1750 {
1751 enum tree_code code;
1752
1753 code = TREE_CODE (stmt);
1754 switch (code)
1755 {
1756 case MODIFY_EXPR:
1757 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
1758 either only part of LHS is modified or if the RHS might throw,
1759 otherwise, use V_MUST_DEF.
1760
1761 ??? If it might throw, we should represent somehow that it is killed
1762 on the fallthrough path. */
1763 {
1764 tree lhs = TREE_OPERAND (stmt, 0);
1765 int lhs_flags = opf_is_def;
1766
1767 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
1768
1769 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
1770 or not the entire LHS is modified; that depends on what's
1771 inside the VIEW_CONVERT_EXPR. */
1772 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1773 lhs = TREE_OPERAND (lhs, 0);
1774
1775 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
1776 && TREE_CODE (lhs) != BIT_FIELD_REF)
1777 lhs_flags |= opf_kill_def;
1778
1779 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
1780 }
1781 break;
1782
1783 case COND_EXPR:
1784 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
1785 break;
1786
1787 case SWITCH_EXPR:
1788 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
1789 break;
1790
1791 case ASM_EXPR:
1792 get_asm_expr_operands (stmt);
1793 break;
1794
1795 case RETURN_EXPR:
1796 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
1797 break;
1798
1799 case GOTO_EXPR:
1800 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
1801 break;
1802
1803 case LABEL_EXPR:
1804 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
1805 break;
1806
1807 /* These nodes contain no variable references. */
1808 case BIND_EXPR:
1809 case CASE_LABEL_EXPR:
1810 case TRY_CATCH_EXPR:
1811 case TRY_FINALLY_EXPR:
1812 case EH_FILTER_EXPR:
1813 case CATCH_EXPR:
1814 case RESX_EXPR:
1815 break;
1816
1817 default:
1818 /* Notice that if get_expr_operands tries to use &STMT as the operand
1819 pointer (which may only happen for USE operands), we will fail in
1820 append_use. This default will handle statements like empty
1821 statements, or CALL_EXPRs that may appear on the RHS of a statement
1822 or as statements themselves. */
1823 get_expr_operands (stmt, &stmt, opf_none);
1824 break;
1825 }
1826 }
1827
1828
1829 /* Create an operands cache for STMT. */
1830
1831 static void
1832 build_ssa_operands (tree stmt)
1833 {
1834 stmt_ann_t ann = get_stmt_ann (stmt);
1835
1836 /* Initially assume that the statement has no volatile operands. */
1837 if (ann)
1838 ann->has_volatile_ops = false;
1839
1840 start_ssa_stmt_operands ();
1841
1842 parse_ssa_operands (stmt);
1843 operand_build_sort_virtual (build_vuses);
1844 operand_build_sort_virtual (build_v_may_defs);
1845 operand_build_sort_virtual (build_v_must_defs);
1846
1847 finalize_ssa_stmt_operands (stmt);
1848 }
1849
1850
1851 /* Free any operands vectors in OPS. */
1852 void
1853 free_ssa_operands (stmt_operands_p ops)
1854 {
1855 ops->def_ops = NULL;
1856 ops->use_ops = NULL;
1857 ops->maydef_ops = NULL;
1858 ops->mustdef_ops = NULL;
1859 ops->vuse_ops = NULL;
1860 }
1861
1862
1863 /* Get the operands of statement STMT. Note that repeated calls to
1864 get_stmt_operands for the same statement will do nothing until the
1865 statement is marked modified by a call to mark_stmt_modified(). */
1866
1867 void
1868 update_stmt_operands (tree stmt)
1869 {
1870 stmt_ann_t ann = get_stmt_ann (stmt);
1871
1872 /* If get_stmt_operands is called before SSA is initialized, dont
1873 do anything. */
1874 if (!ssa_operands_active ())
1875 return;
1876
1877 /* The optimizers cannot handle statements that are nothing but a
1878 _DECL. This indicates a bug in the gimplifier. */
1879 gcc_assert (!SSA_VAR_P (stmt));
1880
1881 gcc_assert (ann->modified);
1882
1883 timevar_push (TV_TREE_OPS);
1884
1885 build_ssa_operands (stmt);
1886
1887 /* Clear the modified bit for STMT. Subsequent calls to
1888 get_stmt_operands for this statement will do nothing until the
1889 statement is marked modified by a call to mark_stmt_modified(). */
1890 ann->modified = 0;
1891
1892 timevar_pop (TV_TREE_OPS);
1893 }
1894
1895
1896 /* Copies virtual operands from SRC to DST. */
1897
1898 void
1899 copy_virtual_operands (tree dest, tree src)
1900 {
1901 tree t;
1902 ssa_op_iter iter, old_iter;
1903 use_operand_p use_p, u2;
1904 def_operand_p def_p, d2;
1905
1906 build_ssa_operands (dest);
1907
1908 /* Copy all the virtual fields. */
1909 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
1910 append_vuse (t);
1911 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
1912 append_v_may_def (t);
1913 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
1914 append_v_must_def (t);
1915
1916 if (VEC_length (tree, build_vuses) == 0
1917 && VEC_length (tree, build_v_may_defs) == 0
1918 && VEC_length (tree, build_v_must_defs) == 0)
1919 return;
1920
1921 /* Now commit the virtual operands to this stmt. */
1922 finalize_ssa_v_must_defs (dest);
1923 finalize_ssa_v_may_defs (dest);
1924 finalize_ssa_vuses (dest);
1925
1926 /* Finally, set the field to the same values as then originals. */
1927
1928
1929 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
1930 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
1931 {
1932 gcc_assert (!op_iter_done (&old_iter));
1933 SET_USE (use_p, t);
1934 t = op_iter_next_tree (&old_iter);
1935 }
1936 gcc_assert (op_iter_done (&old_iter));
1937
1938 op_iter_init_maydef (&old_iter, src, &u2, &d2);
1939 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
1940 {
1941 gcc_assert (!op_iter_done (&old_iter));
1942 SET_USE (use_p, USE_FROM_PTR (u2));
1943 SET_DEF (def_p, DEF_FROM_PTR (d2));
1944 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1945 }
1946 gcc_assert (op_iter_done (&old_iter));
1947
1948 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
1949 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
1950 {
1951 gcc_assert (!op_iter_done (&old_iter));
1952 SET_USE (use_p, USE_FROM_PTR (u2));
1953 SET_DEF (def_p, DEF_FROM_PTR (d2));
1954 op_iter_next_maymustdef (&u2, &d2, &old_iter);
1955 }
1956 gcc_assert (op_iter_done (&old_iter));
1957
1958 }
1959
1960
1961 /* Specifically for use in DOM's expression analysis. Given a store, we
1962 create an artificial stmt which looks like a load from the store, this can
1963 be used to eliminate redundant loads. OLD_OPS are the operands from the
1964 store stmt, and NEW_STMT is the new load which represents a load of the
1965 values stored. */
1966
1967 void
1968 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
1969 {
1970 stmt_ann_t ann;
1971 tree op;
1972 ssa_op_iter iter;
1973 use_operand_p use_p;
1974 unsigned x;
1975
1976 ann = get_stmt_ann (new_stmt);
1977
1978 /* process the stmt looking for operands. */
1979 start_ssa_stmt_operands ();
1980 parse_ssa_operands (new_stmt);
1981
1982 for (x = 0; x < VEC_length (tree, build_vuses); x++)
1983 {
1984 tree t = VEC_index (tree, build_vuses, x);
1985 if (TREE_CODE (t) != SSA_NAME)
1986 {
1987 var_ann_t ann = var_ann (t);
1988 ann->in_vuse_list = 0;
1989 }
1990 }
1991
1992 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
1993 {
1994 tree t = VEC_index (tree, build_v_may_defs, x);
1995 if (TREE_CODE (t) != SSA_NAME)
1996 {
1997 var_ann_t ann = var_ann (t);
1998 ann->in_v_may_def_list = 0;
1999 }
2000 }
2001
2002 /* Remove any virtual operands that were found. */
2003 VEC_truncate (tree, build_v_may_defs, 0);
2004 VEC_truncate (tree, build_v_must_defs, 0);
2005 VEC_truncate (tree, build_vuses, 0);
2006
2007 /* For each VDEF on the original statement, we want to create a
2008 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
2009 statement. */
2010 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
2011 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
2012 append_vuse (op);
2013
2014 /* Now build the operands for this new stmt. */
2015 finalize_ssa_stmt_operands (new_stmt);
2016
2017 /* All uses in this fake stmt must not be in the immediate use lists. */
2018 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2019 delink_imm_use (use_p);
2020 }
2021
2022
2023 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2024 to test the validity of the swap operation. */
2025
2026 void
2027 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2028 {
2029 tree op0, op1;
2030 op0 = *exp0;
2031 op1 = *exp1;
2032
2033 /* If the operand cache is active, attempt to preserve the relative positions
2034 of these two operands in their respective immediate use lists. */
2035 if (ssa_operands_active () && op0 != op1)
2036 {
2037 use_optype_p use0, use1, ptr;
2038 use0 = use1 = NULL;
2039
2040 /* Find the 2 operands in the cache, if they are there. */
2041 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2042 if (USE_OP_PTR (ptr)->use == exp0)
2043 {
2044 use0 = ptr;
2045 break;
2046 }
2047
2048 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2049 if (USE_OP_PTR (ptr)->use == exp1)
2050 {
2051 use1 = ptr;
2052 break;
2053 }
2054
2055 /* If both uses don't have operand entries, there isn't much we can do
2056 at this point. Presumably we dont need to worry about it. */
2057 if (use0 && use1)
2058 {
2059 tree *tmp = USE_OP_PTR (use1)->use;
2060 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2061 USE_OP_PTR (use0)->use = tmp;
2062 }
2063 }
2064
2065 /* Now swap the data. */
2066 *exp0 = op1;
2067 *exp1 = op0;
2068 }
2069
2070
2071 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2072 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2073 a single variable whose address has been taken or any other valid
2074 GIMPLE memory reference (structure reference, array, etc). If the
2075 base address of REF is a decl that has sub-variables, also add all
2076 of its sub-variables. */
2077
2078 void
2079 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2080 {
2081 tree var;
2082 subvar_t svars;
2083
2084 gcc_assert (addresses_taken);
2085
2086 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2087 as the only thing we take the address of. If VAR is a structure,
2088 taking the address of a field means that the whole structure may
2089 be referenced using pointer arithmetic. See PR 21407 and the
2090 ensuing mailing list discussion. */
2091 var = get_base_address (ref);
2092 if (var && SSA_VAR_P (var))
2093 {
2094 if (*addresses_taken == NULL)
2095 *addresses_taken = BITMAP_GGC_ALLOC ();
2096
2097 if (var_can_have_subvars (var)
2098 && (svars = get_subvars_for_var (var)))
2099 {
2100 subvar_t sv;
2101 for (sv = svars; sv; sv = sv->next)
2102 {
2103 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2104 TREE_ADDRESSABLE (sv->var) = 1;
2105 }
2106 }
2107 else
2108 {
2109 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2110 TREE_ADDRESSABLE (var) = 1;
2111 }
2112 }
2113 }
2114
2115
2116 /* Scan the immediate_use list for VAR making sure its linked properly.
2117 return RTUE iof there is a problem. */
2118
2119 bool
2120 verify_imm_links (FILE *f, tree var)
2121 {
2122 use_operand_p ptr, prev, list;
2123 int count;
2124
2125 gcc_assert (TREE_CODE (var) == SSA_NAME);
2126
2127 list = &(SSA_NAME_IMM_USE_NODE (var));
2128 gcc_assert (list->use == NULL);
2129
2130 if (list->prev == NULL)
2131 {
2132 gcc_assert (list->next == NULL);
2133 return false;
2134 }
2135
2136 prev = list;
2137 count = 0;
2138 for (ptr = list->next; ptr != list; )
2139 {
2140 if (prev != ptr->prev)
2141 goto error;
2142
2143 if (ptr->use == NULL)
2144 goto error; /* 2 roots, or SAFE guard node. */
2145 else if (*(ptr->use) != var)
2146 goto error;
2147
2148 prev = ptr;
2149 ptr = ptr->next;
2150
2151 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2152 problem. */
2153 if (count++ > 50000000)
2154 goto error;
2155 }
2156
2157 /* Verify list in the other direction. */
2158 prev = list;
2159 for (ptr = list->prev; ptr != list; )
2160 {
2161 if (prev != ptr->next)
2162 goto error;
2163 prev = ptr;
2164 ptr = ptr->prev;
2165 if (count-- < 0)
2166 goto error;
2167 }
2168
2169 if (count != 0)
2170 goto error;
2171
2172 return false;
2173
2174 error:
2175 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2176 {
2177 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2178 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2179 }
2180 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2181 (void *)ptr->use);
2182 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2183 fprintf(f, "\n");
2184 return true;
2185 }
2186
2187
2188 /* Dump all the immediate uses to FILE. */
2189
2190 void
2191 dump_immediate_uses_for (FILE *file, tree var)
2192 {
2193 imm_use_iterator iter;
2194 use_operand_p use_p;
2195
2196 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2197
2198 print_generic_expr (file, var, TDF_SLIM);
2199 fprintf (file, " : -->");
2200 if (has_zero_uses (var))
2201 fprintf (file, " no uses.\n");
2202 else
2203 if (has_single_use (var))
2204 fprintf (file, " single use.\n");
2205 else
2206 fprintf (file, "%d uses.\n", num_imm_uses (var));
2207
2208 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2209 {
2210 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2211 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2212 else
2213 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2214 }
2215 fprintf(file, "\n");
2216 }
2217
2218
2219 /* Dump all the immediate uses to FILE. */
2220
2221 void
2222 dump_immediate_uses (FILE *file)
2223 {
2224 tree var;
2225 unsigned int x;
2226
2227 fprintf (file, "Immediate_uses: \n\n");
2228 for (x = 1; x < num_ssa_names; x++)
2229 {
2230 var = ssa_name(x);
2231 if (!var)
2232 continue;
2233 dump_immediate_uses_for (file, var);
2234 }
2235 }
2236
2237
2238 /* Dump def-use edges on stderr. */
2239
2240 void
2241 debug_immediate_uses (void)
2242 {
2243 dump_immediate_uses (stderr);
2244 }
2245
2246 /* Dump def-use edges on stderr. */
2247
2248 void
2249 debug_immediate_uses_for (tree var)
2250 {
2251 dump_immediate_uses_for (stderr, var);
2252 }
2253
2254 #include "gt-tree-ssa-operands.h"