real.h (struct real_format): Split the signbit field into two two fields, signbit_ro...
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "errors.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
35
36 #include "langhooks.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 get_stmt_operands() in the primary entry point.
54
55 The operand tree is the parsed by the various get_* routines which look
56 through the stmt tree for the occurrence of operands which may be of
57 interest, and calls are made to the append_* routines whenever one is
58 found. There are 5 of these routines, each representing one of the
59 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
60 Virtual Must Defs.
61
62 The append_* routines check for duplication, and simply keep a list of
63 unique objects for each operand type in the build_* extendable vectors.
64
65 Once the stmt tree is completely parsed, the finalize_ssa_operands()
66 routine is called, which proceeds to perform the finalization routine
67 on each of the 5 operand vectors which have been built up.
68
69 If the stmt had a previous operand cache, the finalization routines
70 attempt to match up the new operands with the old ones. If its a perfect
71 match, the old vector is simply reused. If it isn't a perfect match, then
72 a new vector is created and the new operands are placed there. For
73 virtual operands, if the previous cache had SSA_NAME version of a
74 variable, and that same variable occurs in the same operands cache, then
75 the new cache vector will also get the same SSA_NAME.
76
77 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
78 vector for VUSE, then the new vector will also be modified such that
79 it contains 'a_5' rather than 'a'.
80
81 */
82
83
84 /* Flags to describe operand properties in get_stmt_operands and helpers. */
85
86 /* By default, operands are loaded. */
87 #define opf_none 0
88
89 /* Operand is the target of an assignment expression or a
90 call-clobbered variable */
91 #define opf_is_def (1 << 0)
92
93 /* Operand is the target of an assignment expression. */
94 #define opf_kill_def (1 << 1)
95
96 /* No virtual operands should be created in the expression. This is used
97 when traversing ADDR_EXPR nodes which have different semantics than
98 other expressions. Inside an ADDR_EXPR node, the only operands that we
99 need to consider are indices into arrays. For instance, &a.b[i] should
100 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
101 VUSE for 'b'. */
102 #define opf_no_vops (1 << 2)
103
104 /* Array for building all the def operands. */
105 static GTY (()) varray_type build_defs;
106
107 /* Array for building all the use operands. */
108 static GTY (()) varray_type build_uses;
109
110 /* Array for building all the v_may_def operands. */
111 static GTY (()) varray_type build_v_may_defs;
112
113 /* Array for building all the vuse operands. */
114 static GTY (()) varray_type build_vuses;
115
116 /* Array for building all the v_must_def operands. */
117 static GTY (()) varray_type build_v_must_defs;
118
119 /* True if the operands for call clobbered vars are cached and valid. */
120 bool ssa_call_clobbered_cache_valid;
121 bool ssa_ro_call_cache_valid;
122
123 /* These arrays are the cached operand vectors for call clobbered calls. */
124 static GTY (()) varray_type clobbered_v_may_defs;
125 static GTY (()) varray_type clobbered_vuses;
126 static GTY (()) varray_type ro_call_vuses;
127 static bool clobbered_aliased_loads;
128 static bool clobbered_aliased_stores;
129 static bool ro_call_aliased_loads;
130
131 def_operand_p NULL_DEF_OPERAND_P = { NULL };
132 use_operand_p NULL_USE_OPERAND_P = { NULL };
133
134 static void note_addressable (tree, stmt_ann_t);
135 static void get_expr_operands (tree, tree *, int);
136 static void get_asm_expr_operands (tree);
137 static void get_indirect_ref_operands (tree, tree, int);
138 static void get_call_expr_operands (tree, tree);
139 static inline void append_def (tree *);
140 static inline void append_use (tree *);
141 static void append_v_may_def (tree);
142 static void append_v_must_def (tree);
143 static void add_call_clobber_ops (tree);
144 static void add_call_read_ops (tree);
145 static void add_stmt_operand (tree *, stmt_ann_t, int);
146
147 /* Return a vector of contiguous memory for NUM def operands. */
148
149 static inline def_optype
150 allocate_def_optype (unsigned num)
151 {
152 def_optype def_ops;
153 unsigned size;
154 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
155 def_ops = ggc_alloc (size);
156 def_ops->num_defs = num;
157 return def_ops;
158 }
159
160
161 /* Return a vector of contiguous memory for NUM use operands. */
162
163 static inline use_optype
164 allocate_use_optype (unsigned num)
165 {
166 use_optype use_ops;
167 unsigned size;
168 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
169 use_ops = ggc_alloc (size);
170 use_ops->num_uses = num;
171 return use_ops;
172 }
173
174
175 /* Return a vector of contiguous memory for NUM v_may_def operands. */
176
177 static inline v_may_def_optype
178 allocate_v_may_def_optype (unsigned num)
179 {
180 v_may_def_optype v_may_def_ops;
181 unsigned size;
182 size = sizeof (struct v_may_def_optype_d)
183 + sizeof (v_def_use_operand_type_t) * (num - 1);
184 v_may_def_ops = ggc_alloc (size);
185 v_may_def_ops->num_v_may_defs = num;
186 return v_may_def_ops;
187 }
188
189
190 /* Return a vector of contiguous memory for NUM v_use operands. */
191
192 static inline vuse_optype
193 allocate_vuse_optype (unsigned num)
194 {
195 vuse_optype vuse_ops;
196 unsigned size;
197 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
198 vuse_ops = ggc_alloc (size);
199 vuse_ops->num_vuses = num;
200 return vuse_ops;
201 }
202
203
204 /* Return a vector of contiguous memory for NUM v_must_def operands. */
205
206 static inline v_must_def_optype
207 allocate_v_must_def_optype (unsigned num)
208 {
209 v_must_def_optype v_must_def_ops;
210 unsigned size;
211 size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
212 v_must_def_ops = ggc_alloc (size);
213 v_must_def_ops->num_v_must_defs = num;
214 return v_must_def_ops;
215 }
216
217
218 /* Free memory for USES. */
219
220 static inline void
221 free_uses (use_optype *uses)
222 {
223 if (*uses)
224 {
225 ggc_free (*uses);
226 *uses = NULL;
227 }
228 }
229
230
231 /* Free memory for DEFS. */
232
233 static inline void
234 free_defs (def_optype *defs)
235 {
236 if (*defs)
237 {
238 ggc_free (*defs);
239 *defs = NULL;
240 }
241 }
242
243
244 /* Free memory for VUSES. */
245
246 static inline void
247 free_vuses (vuse_optype *vuses)
248 {
249 if (*vuses)
250 {
251 ggc_free (*vuses);
252 *vuses = NULL;
253 }
254 }
255
256
257 /* Free memory for V_MAY_DEFS. */
258
259 static inline void
260 free_v_may_defs (v_may_def_optype *v_may_defs)
261 {
262 if (*v_may_defs)
263 {
264 ggc_free (*v_may_defs);
265 *v_may_defs = NULL;
266 }
267 }
268
269
270 /* Free memory for V_MUST_DEFS. */
271
272 static inline void
273 free_v_must_defs (v_must_def_optype *v_must_defs)
274 {
275 if (*v_must_defs)
276 {
277 ggc_free (*v_must_defs);
278 *v_must_defs = NULL;
279 }
280 }
281
282
283 /* Initialize the operand cache routines. */
284
285 void
286 init_ssa_operands (void)
287 {
288 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
289 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
290 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
291 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
292 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
293 }
294
295
296 /* Dispose of anything required by the operand routines. */
297
298 void
299 fini_ssa_operands (void)
300 {
301 ggc_free (build_defs);
302 ggc_free (build_uses);
303 ggc_free (build_v_may_defs);
304 ggc_free (build_vuses);
305 ggc_free (build_v_must_defs);
306 build_defs = NULL;
307 build_uses = NULL;
308 build_v_may_defs = NULL;
309 build_vuses = NULL;
310 build_v_must_defs = NULL;
311 if (clobbered_v_may_defs)
312 {
313 ggc_free (clobbered_v_may_defs);
314 ggc_free (clobbered_vuses);
315 clobbered_v_may_defs = NULL;
316 clobbered_vuses = NULL;
317 }
318 if (ro_call_vuses)
319 {
320 ggc_free (ro_call_vuses);
321 ro_call_vuses = NULL;
322 }
323 }
324
325
326 /* All the finalize_ssa_* routines do the work required to turn the build_
327 VARRAY into an operand_vector of the appropriate type. The original vector,
328 if any, is passed in for comparison and virtual SSA_NAME reuse. If the
329 old vector is reused, the pointer passed in is set to NULL so that
330 the memory is not freed when the old operands are freed. */
331
332 /* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
333
334 static def_optype
335 finalize_ssa_defs (def_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
336 {
337 unsigned num, x;
338 def_optype def_ops, old_ops;
339 bool build_diff;
340
341 num = VARRAY_ACTIVE_SIZE (build_defs);
342 if (num == 0)
343 return NULL;
344
345 /* There should only be a single real definition per assignment. */
346 gcc_assert (TREE_CODE (stmt) != MODIFY_EXPR || num <= 1);
347
348 old_ops = *old_ops_p;
349
350 /* Compare old vector and new array. */
351 build_diff = true;
352 if (old_ops && old_ops->num_defs == num)
353 {
354 build_diff = false;
355 for (x = 0; x < num; x++)
356 if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
357 {
358 build_diff = true;
359 break;
360 }
361 }
362
363 if (!build_diff)
364 {
365 def_ops = old_ops;
366 *old_ops_p = NULL;
367 }
368 else
369 {
370 def_ops = allocate_def_optype (num);
371 for (x = 0; x < num ; x++)
372 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
373 }
374
375 VARRAY_POP_ALL (build_defs);
376
377 return def_ops;
378 }
379
380
381 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
382
383 static use_optype
384 finalize_ssa_uses (use_optype *old_ops_p, tree stmt ATTRIBUTE_UNUSED)
385 {
386 unsigned num, x;
387 use_optype use_ops, old_ops;
388 bool build_diff;
389
390 num = VARRAY_ACTIVE_SIZE (build_uses);
391 if (num == 0)
392 return NULL;
393
394 #ifdef ENABLE_CHECKING
395 {
396 unsigned x;
397 /* If the pointer to the operand is the statement itself, something is
398 wrong. It means that we are pointing to a local variable (the
399 initial call to get_stmt_operands does not pass a pointer to a
400 statement). */
401 for (x = 0; x < num; x++)
402 gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
403 }
404 #endif
405 old_ops = *old_ops_p;
406
407 /* Check if the old vector and the new array are the same. */
408 build_diff = true;
409 if (old_ops && old_ops->num_uses == num)
410 {
411 build_diff = false;
412 for (x = 0; x < num; x++)
413 if (old_ops->uses[x].use != VARRAY_TREE_PTR (build_uses, x))
414 {
415 build_diff = true;
416 break;
417 }
418 }
419
420 if (!build_diff)
421 {
422 use_ops = old_ops;
423 *old_ops_p = NULL;
424 }
425 else
426 {
427 use_ops = allocate_use_optype (num);
428 for (x = 0; x < num ; x++)
429 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
430 }
431 VARRAY_POP_ALL (build_uses);
432
433 return use_ops;
434 }
435
436
437 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
438
439 static v_may_def_optype
440 finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p)
441 {
442 unsigned num, x, i, old_num;
443 v_may_def_optype v_may_def_ops, old_ops;
444 tree result, var;
445 bool build_diff;
446
447 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
448 if (num == 0)
449 return NULL;
450
451 old_ops = *old_ops_p;
452
453 /* Check if the old vector and the new array are the same. */
454 build_diff = true;
455 if (old_ops && old_ops->num_v_may_defs == num)
456 {
457 old_num = num;
458 build_diff = false;
459 for (x = 0; x < num; x++)
460 {
461 var = old_ops->v_may_defs[x].def;
462 if (TREE_CODE (var) == SSA_NAME)
463 var = SSA_NAME_VAR (var);
464 if (var != VARRAY_TREE (build_v_may_defs, x))
465 {
466 build_diff = true;
467 break;
468 }
469 }
470 }
471 else
472 old_num = (old_ops ? old_ops->num_v_may_defs : 0);
473
474 if (!build_diff)
475 {
476 v_may_def_ops = old_ops;
477 *old_ops_p = NULL;
478 }
479 else
480 {
481 v_may_def_ops = allocate_v_may_def_optype (num);
482 for (x = 0; x < num; x++)
483 {
484 var = VARRAY_TREE (build_v_may_defs, x);
485 /* Look for VAR in the old operands vector. */
486 for (i = 0; i < old_num; i++)
487 {
488 result = old_ops->v_may_defs[i].def;
489 if (TREE_CODE (result) == SSA_NAME)
490 result = SSA_NAME_VAR (result);
491 if (result == var)
492 {
493 v_may_def_ops->v_may_defs[x] = old_ops->v_may_defs[i];
494 break;
495 }
496 }
497 if (i == old_num)
498 {
499 v_may_def_ops->v_may_defs[x].def = var;
500 v_may_def_ops->v_may_defs[x].use = var;
501 }
502 }
503 }
504
505 /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
506
507 return v_may_def_ops;
508 }
509
510
511 /* Clear the in_list bits and empty the build array for v_may_defs. */
512
513 static inline void
514 cleanup_v_may_defs (void)
515 {
516 unsigned x, num;
517 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
518
519 for (x = 0; x < num; x++)
520 {
521 tree t = VARRAY_TREE (build_v_may_defs, x);
522 var_ann_t ann = var_ann (t);
523 ann->in_v_may_def_list = 0;
524 }
525 VARRAY_POP_ALL (build_v_may_defs);
526 }
527
528 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
529
530 static vuse_optype
531 finalize_ssa_vuses (vuse_optype *old_ops_p)
532 {
533 unsigned num, x, i, num_v_may_defs, old_num;
534 vuse_optype vuse_ops, old_ops;
535 bool build_diff;
536
537 num = VARRAY_ACTIVE_SIZE (build_vuses);
538 if (num == 0)
539 {
540 cleanup_v_may_defs ();
541 return NULL;
542 }
543
544 /* Remove superfluous VUSE operands. If the statement already has a
545 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
546 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
547 suppose that variable 'a' is aliased:
548
549 # VUSE <a_2>
550 # a_3 = V_MAY_DEF <a_2>
551 a = a + 1;
552
553 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
554 operation. */
555
556 num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
557
558 if (num_v_may_defs > 0)
559 {
560 size_t i;
561 tree vuse;
562 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
563 {
564 vuse = VARRAY_TREE (build_vuses, i);
565 if (TREE_CODE (vuse) != SSA_NAME)
566 {
567 var_ann_t ann = var_ann (vuse);
568 ann->in_vuse_list = 0;
569 if (ann->in_v_may_def_list)
570 {
571 /* If we found a useless VUSE operand, remove it from the
572 operand array by replacing it with the last active element
573 in the operand array (unless the useless VUSE was the
574 last operand, in which case we simply remove it. */
575 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
576 {
577 VARRAY_TREE (build_vuses, i)
578 = VARRAY_TREE (build_vuses,
579 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
580 }
581 VARRAY_POP (build_vuses);
582
583 /* We want to rescan the element at this index, unless
584 this was the last element, in which case the loop
585 terminates. */
586 i--;
587 }
588 }
589 }
590 }
591 else
592 /* Clear out the in_list bits. */
593 for (x = 0; x < num; x++)
594 {
595 tree t = VARRAY_TREE (build_vuses, x);
596 if (TREE_CODE (t) != SSA_NAME)
597 {
598 var_ann_t ann = var_ann (t);
599 ann->in_vuse_list = 0;
600 }
601 }
602
603
604 num = VARRAY_ACTIVE_SIZE (build_vuses);
605 /* We could have reduced the size to zero now, however. */
606 if (num == 0)
607 {
608 cleanup_v_may_defs ();
609 return NULL;
610 }
611
612 old_ops = *old_ops_p;
613
614 /* Determine whether vuses is the same as the old vector. */
615 build_diff = true;
616 if (old_ops && old_ops->num_vuses == num)
617 {
618 old_num = num;
619 build_diff = false;
620 for (x = 0; x < num ; x++)
621 {
622 tree v;
623 v = old_ops->vuses[x];
624 if (TREE_CODE (v) == SSA_NAME)
625 v = SSA_NAME_VAR (v);
626 if (v != VARRAY_TREE (build_vuses, x))
627 {
628 build_diff = true;
629 break;
630 }
631 }
632 }
633 else
634 old_num = (old_ops ? old_ops->num_vuses : 0);
635
636 if (!build_diff)
637 {
638 vuse_ops = old_ops;
639 *old_ops_p = NULL;
640 }
641 else
642 {
643 vuse_ops = allocate_vuse_optype (num);
644 for (x = 0; x < num; x++)
645 {
646 tree result, var = VARRAY_TREE (build_vuses, x);
647 /* Look for VAR in the old vector, and use that SSA_NAME. */
648 for (i = 0; i < old_num; i++)
649 {
650 result = old_ops->vuses[i];
651 if (TREE_CODE (result) == SSA_NAME)
652 result = SSA_NAME_VAR (result);
653 if (result == var)
654 {
655 vuse_ops->vuses[x] = old_ops->vuses[i];
656 break;
657 }
658 }
659 if (i == old_num)
660 vuse_ops->vuses[x] = var;
661 }
662 }
663
664 /* The v_may_def build vector wasn't freed because we needed it here.
665 Free it now with the vuses build vector. */
666 VARRAY_POP_ALL (build_vuses);
667 cleanup_v_may_defs ();
668
669 return vuse_ops;
670 }
671
672 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
673
674 static v_must_def_optype
675 finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p,
676 tree stmt ATTRIBUTE_UNUSED)
677 {
678 unsigned num, x, i, old_num = 0;
679 v_must_def_optype v_must_def_ops, old_ops;
680 bool build_diff;
681
682 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
683 if (num == 0)
684 return NULL;
685
686 /* In the presence of subvars, there may be more than one V_MUST_DEF per
687 statement (one for each subvar). It is a bit expensive to verify that
688 all must-defs in a statement belong to subvars if there is more than one
689 MUST-def, so we don't do it. Suffice to say, if you reach here without
690 having subvars, and have num >1, you have hit a bug. */
691
692
693 old_ops = *old_ops_p;
694
695 /* Check if the old vector and the new array are the same. */
696 build_diff = true;
697 if (old_ops && old_ops->num_v_must_defs == num)
698 {
699 old_num = num;
700 build_diff = false;
701 for (x = 0; x < num; x++)
702 {
703 tree var = old_ops->v_must_defs[x].def;
704 if (TREE_CODE (var) == SSA_NAME)
705 var = SSA_NAME_VAR (var);
706 if (var != VARRAY_TREE (build_v_must_defs, x))
707 {
708 build_diff = true;
709 break;
710 }
711 }
712 }
713 else
714 old_num = (old_ops ? old_ops->num_v_must_defs : 0);
715
716 if (!build_diff)
717 {
718 v_must_def_ops = old_ops;
719 *old_ops_p = NULL;
720 }
721 else
722 {
723 v_must_def_ops = allocate_v_must_def_optype (num);
724 for (x = 0; x < num ; x++)
725 {
726 tree result, var = VARRAY_TREE (build_v_must_defs, x);
727 /* Look for VAR in the original vector. */
728 for (i = 0; i < old_num; i++)
729 {
730 result = old_ops->v_must_defs[i].def;
731 if (TREE_CODE (result) == SSA_NAME)
732 result = SSA_NAME_VAR (result);
733 if (result == var)
734 {
735 v_must_def_ops->v_must_defs[x].def = old_ops->v_must_defs[i].def;
736 v_must_def_ops->v_must_defs[x].use = old_ops->v_must_defs[i].use;
737 break;
738 }
739 }
740 if (i == old_num)
741 {
742 v_must_def_ops->v_must_defs[x].def = var;
743 v_must_def_ops->v_must_defs[x].use = var;
744 }
745 }
746 }
747 VARRAY_POP_ALL (build_v_must_defs);
748
749 return v_must_def_ops;
750 }
751
752
753 /* Finalize all the build vectors, fill the new ones into INFO. */
754
755 static inline void
756 finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
757 stmt_operands_p new_ops)
758 {
759 new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
760 new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
761 new_ops->v_must_def_ops
762 = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
763 new_ops->v_may_def_ops = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops));
764 new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops));
765 }
766
767
768 /* Start the process of building up operands vectors in INFO. */
769
770 static inline void
771 start_ssa_stmt_operands (void)
772 {
773 gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
774 gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
775 gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
776 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
777 gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
778 }
779
780
781 /* Add DEF_P to the list of pointers to operands. */
782
783 static inline void
784 append_def (tree *def_p)
785 {
786 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
787 }
788
789
790 /* Add USE_P to the list of pointers to operands. */
791
792 static inline void
793 append_use (tree *use_p)
794 {
795 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
796 }
797
798
799 /* Add a new virtual may def for variable VAR to the build array. */
800
801 static inline void
802 append_v_may_def (tree var)
803 {
804 var_ann_t ann = get_var_ann (var);
805
806 /* Don't allow duplicate entries. */
807 if (ann->in_v_may_def_list)
808 return;
809 ann->in_v_may_def_list = 1;
810
811 VARRAY_PUSH_TREE (build_v_may_defs, var);
812 }
813
814
815 /* Add VAR to the list of virtual uses. */
816
817 static inline void
818 append_vuse (tree var)
819 {
820
821 /* Don't allow duplicate entries. */
822 if (TREE_CODE (var) != SSA_NAME)
823 {
824 var_ann_t ann = get_var_ann (var);
825
826 if (ann->in_vuse_list || ann->in_v_may_def_list)
827 return;
828 ann->in_vuse_list = 1;
829 }
830
831 VARRAY_PUSH_TREE (build_vuses, var);
832 }
833
834
835 /* Add VAR to the list of virtual must definitions for INFO. */
836
837 static inline void
838 append_v_must_def (tree var)
839 {
840 unsigned i;
841
842 /* Don't allow duplicate entries. */
843 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
844 if (var == VARRAY_TREE (build_v_must_defs, i))
845 return;
846
847 VARRAY_PUSH_TREE (build_v_must_defs, var);
848 }
849
850 /* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
851 original operands, and if ANN is non-null, appropriate stmt flags are set
852 in the stmt's annotation. Note that some fields in old_ops may
853 change to NULL, although none of the memory they originally pointed to
854 will be destroyed. It is appropriate to call free_stmt_operands() on
855 the value returned in old_ops.
856
857 The rationale for this: Certain optimizations wish to examine the difference
858 between new_ops and old_ops after processing. If a set of operands don't
859 change, new_ops will simply assume the pointer in old_ops, and the old_ops
860 pointer will be set to NULL, indicating no memory needs to be cleared.
861 Usage might appear something like:
862
863 old_ops_copy = old_ops = stmt_ann(stmt)->operands;
864 build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
865 <* compare old_ops_copy and new_ops *>
866 free_ssa_operands (old_ops); */
867
868 static void
869 build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
870 stmt_operands_p new_ops)
871 {
872 enum tree_code code;
873 tree_ann_t saved_ann = stmt->common.ann;
874
875 /* Replace stmt's annotation with the one passed in for the duration
876 of the operand building process. This allows "fake" stmts to be built
877 and not be included in other data structures which can be built here. */
878 stmt->common.ann = (tree_ann_t) ann;
879
880 /* Initially assume that the statement has no volatile operands, nor
881 makes aliased loads or stores. */
882 if (ann)
883 {
884 ann->has_volatile_ops = false;
885 ann->makes_aliased_stores = false;
886 ann->makes_aliased_loads = false;
887 }
888
889 start_ssa_stmt_operands ();
890
891 code = TREE_CODE (stmt);
892 switch (code)
893 {
894 case MODIFY_EXPR:
895 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
896 either only part of LHS is modified or if the RHS might throw,
897 otherwise, use V_MUST_DEF.
898
899 ??? If it might throw, we should represent somehow that it is killed
900 on the fallthrough path. */
901 {
902 tree lhs = TREE_OPERAND (stmt, 0);
903 int lhs_flags = opf_is_def;
904
905 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
906
907 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
908 or not the entire LHS is modified; that depends on what's
909 inside the VIEW_CONVERT_EXPR. */
910 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
911 lhs = TREE_OPERAND (lhs, 0);
912
913 if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
914 && TREE_CODE (lhs) != BIT_FIELD_REF
915 && TREE_CODE (lhs) != REALPART_EXPR
916 && TREE_CODE (lhs) != IMAGPART_EXPR)
917 lhs_flags |= opf_kill_def;
918
919 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
920 }
921 break;
922
923 case COND_EXPR:
924 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
925 break;
926
927 case SWITCH_EXPR:
928 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
929 break;
930
931 case ASM_EXPR:
932 get_asm_expr_operands (stmt);
933 break;
934
935 case RETURN_EXPR:
936 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
937 break;
938
939 case GOTO_EXPR:
940 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
941 break;
942
943 case LABEL_EXPR:
944 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
945 break;
946
947 /* These nodes contain no variable references. */
948 case BIND_EXPR:
949 case CASE_LABEL_EXPR:
950 case TRY_CATCH_EXPR:
951 case TRY_FINALLY_EXPR:
952 case EH_FILTER_EXPR:
953 case CATCH_EXPR:
954 case RESX_EXPR:
955 break;
956
957 default:
958 /* Notice that if get_expr_operands tries to use &STMT as the operand
959 pointer (which may only happen for USE operands), we will abort in
960 append_use. This default will handle statements like empty
961 statements, or CALL_EXPRs that may appear on the RHS of a statement
962 or as statements themselves. */
963 get_expr_operands (stmt, &stmt, opf_none);
964 break;
965 }
966
967 finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
968 stmt->common.ann = saved_ann;
969 }
970
971
972 /* Free any operands vectors in OPS. */
973
974 static void
975 free_ssa_operands (stmt_operands_p ops)
976 {
977 if (ops->def_ops)
978 free_defs (&(ops->def_ops));
979 if (ops->use_ops)
980 free_uses (&(ops->use_ops));
981 if (ops->vuse_ops)
982 free_vuses (&(ops->vuse_ops));
983 if (ops->v_may_def_ops)
984 free_v_may_defs (&(ops->v_may_def_ops));
985 if (ops->v_must_def_ops)
986 free_v_must_defs (&(ops->v_must_def_ops));
987 }
988
989
990 /* Get the operands of statement STMT. Note that repeated calls to
991 get_stmt_operands for the same statement will do nothing until the
992 statement is marked modified by a call to modify_stmt(). */
993
994 void
995 get_stmt_operands (tree stmt)
996 {
997 stmt_ann_t ann;
998 stmt_operands_t old_operands;
999
1000 /* The optimizers cannot handle statements that are nothing but a
1001 _DECL. This indicates a bug in the gimplifier. */
1002 gcc_assert (!SSA_VAR_P (stmt));
1003
1004 ann = get_stmt_ann (stmt);
1005
1006 /* If the statement has not been modified, the operands are still valid. */
1007 if (!ann->modified)
1008 return;
1009
1010 timevar_push (TV_TREE_OPS);
1011
1012 old_operands = ann->operands;
1013 memset (&(ann->operands), 0, sizeof (stmt_operands_t));
1014
1015 build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
1016 free_ssa_operands (&old_operands);
1017
1018 /* Clear the modified bit for STMT. Subsequent calls to
1019 get_stmt_operands for this statement will do nothing until the
1020 statement is marked modified by a call to modify_stmt(). */
1021 ann->modified = 0;
1022
1023 timevar_pop (TV_TREE_OPS);
1024 }
1025
1026
1027 /* Return true if OFFSET and SIZE define a range that overlaps with some
1028 portion of the range of SV, a subvar. If there was an exact overlap,
1029 *EXACT will be set to true upon return. */
1030
1031 static bool
1032 overlap_subvar (HOST_WIDE_INT offset, HOST_WIDE_INT size,
1033 subvar_t sv, bool *exact)
1034 {
1035 /* There are three possible cases of overlap.
1036 1. We can have an exact overlap, like so:
1037 |offset, offset + size |
1038 |sv->offset, sv->offset + sv->size |
1039
1040 2. We can have offset starting after sv->offset, like so:
1041
1042 |offset, offset + size |
1043 |sv->offset, sv->offset + sv->size |
1044
1045 3. We can have offset starting before sv->offset, like so:
1046
1047 |offset, offset + size |
1048 |sv->offset, sv->offset + sv->size|
1049 */
1050
1051 if (exact)
1052 *exact = false;
1053 if (offset == sv->offset && size == sv->size)
1054 {
1055 if (exact)
1056 *exact = true;
1057 return true;
1058 }
1059 else if (offset >= sv->offset && offset < (sv->offset + sv->size))
1060 {
1061 return true;
1062 }
1063 else if (offset < sv->offset && (offset + size > sv->offset))
1064 {
1065 return true;
1066 }
1067 return false;
1068
1069 }
1070 /* Recursively scan the expression pointed by EXPR_P in statement referred to
1071 by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
1072 operands found. */
1073
1074 static void
1075 get_expr_operands (tree stmt, tree *expr_p, int flags)
1076 {
1077 enum tree_code code;
1078 enum tree_code_class class;
1079 tree expr = *expr_p;
1080 stmt_ann_t s_ann = stmt_ann (stmt);
1081
1082 if (expr == NULL)
1083 return;
1084
1085 code = TREE_CODE (expr);
1086 class = TREE_CODE_CLASS (code);
1087
1088 switch (code)
1089 {
1090 case ADDR_EXPR:
1091 /* We could have the address of a component, array member,
1092 etc which has interesting variable references. */
1093 /* Taking the address of a variable does not represent a
1094 reference to it, but the fact that the stmt takes its address will be
1095 of interest to some passes (e.g. alias resolution). */
1096 add_stmt_operand (expr_p, s_ann, 0);
1097
1098 /* If the address is invariant, there may be no interesting variable
1099 references inside. */
1100 if (is_gimple_min_invariant (expr))
1101 return;
1102
1103 /* There should be no VUSEs created, since the referenced objects are
1104 not really accessed. The only operands that we should find here
1105 are ARRAY_REF indices which will always be real operands (GIMPLE
1106 does not allow non-registers as array indices). */
1107 flags |= opf_no_vops;
1108
1109 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1110 return;
1111
1112 case SSA_NAME:
1113 case VAR_DECL:
1114 case PARM_DECL:
1115 case RESULT_DECL:
1116 case CONST_DECL:
1117 {
1118 subvar_t svars;
1119
1120 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1121 Otherwise, add the variable itself.
1122 Whether it goes to USES or DEFS depends on the operand flags. */
1123 if (var_can_have_subvars (expr)
1124 && (svars = get_subvars_for_var (expr)))
1125 {
1126 subvar_t sv;
1127 for (sv = svars; sv; sv = sv->next)
1128 add_stmt_operand (&sv->var, s_ann, flags);
1129 }
1130 else
1131 {
1132 add_stmt_operand (expr_p, s_ann, flags);
1133 }
1134 return;
1135 }
1136 case MISALIGNED_INDIRECT_REF:
1137 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1138 /* fall through */
1139
1140 case ALIGN_INDIRECT_REF:
1141 case INDIRECT_REF:
1142 get_indirect_ref_operands (stmt, expr, flags);
1143 return;
1144
1145 case ARRAY_REF:
1146 case ARRAY_RANGE_REF:
1147 /* Treat array references as references to the virtual variable
1148 representing the array. The virtual variable for an ARRAY_REF
1149 is the VAR_DECL for the array. */
1150
1151 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1152 according to the value of IS_DEF. Recurse if the LHS of the
1153 ARRAY_REF node is not a regular variable. */
1154 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1155 add_stmt_operand (expr_p, s_ann, flags);
1156 else
1157 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1158
1159 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1160 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1161 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1162 return;
1163
1164 case COMPONENT_REF:
1165 case REALPART_EXPR:
1166 case IMAGPART_EXPR:
1167 {
1168 tree ref;
1169 HOST_WIDE_INT offset, size;
1170 /* This component ref becomes an access to all of the subvariables
1171 it can touch, if we can determine that, but *NOT* the real one.
1172 If we can't determine which fields we could touch, the recursion
1173 will eventually get to a variable and add *all* of its subvars, or
1174 whatever is the minimum correct subset. */
1175
1176 ref = okay_component_ref_for_subvars (expr, &offset, &size);
1177 if (ref)
1178 {
1179 subvar_t svars = get_subvars_for_var (ref);
1180 subvar_t sv;
1181 for (sv = svars; sv; sv = sv->next)
1182 {
1183 bool exact;
1184 if (overlap_subvar (offset, size, sv, &exact))
1185 {
1186 if (exact)
1187 flags &= ~opf_kill_def;
1188 add_stmt_operand (&sv->var, s_ann, flags);
1189 }
1190 }
1191 }
1192 else
1193 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1194 flags & ~opf_kill_def);
1195
1196 if (code == COMPONENT_REF)
1197 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1198 return;
1199 }
1200 case WITH_SIZE_EXPR:
1201 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1202 and an rvalue reference to its second argument. */
1203 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1204 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1205 return;
1206
1207 case CALL_EXPR:
1208 get_call_expr_operands (stmt, expr);
1209 return;
1210
1211 case COND_EXPR:
1212 case VEC_COND_EXPR:
1213 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1214 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1215 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1216 return;
1217
1218 case MODIFY_EXPR:
1219 {
1220 int subflags;
1221 tree op;
1222
1223 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1224
1225 op = TREE_OPERAND (expr, 0);
1226 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1227 op = TREE_OPERAND (expr, 0);
1228 if (TREE_CODE (op) == ARRAY_REF
1229 || TREE_CODE (op) == ARRAY_RANGE_REF
1230 || TREE_CODE (op) == REALPART_EXPR
1231 || TREE_CODE (op) == IMAGPART_EXPR)
1232 subflags = opf_is_def;
1233 else
1234 subflags = opf_is_def | opf_kill_def;
1235
1236 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1237 return;
1238 }
1239
1240 case CONSTRUCTOR:
1241 {
1242 /* General aggregate CONSTRUCTORs have been decomposed, but they
1243 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1244
1245 tree t;
1246 for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
1247 get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
1248
1249 return;
1250 }
1251
1252 case TRUTH_NOT_EXPR:
1253 case BIT_FIELD_REF:
1254 case VIEW_CONVERT_EXPR:
1255 do_unary:
1256 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1257 return;
1258
1259 case TRUTH_AND_EXPR:
1260 case TRUTH_OR_EXPR:
1261 case TRUTH_XOR_EXPR:
1262 case COMPOUND_EXPR:
1263 case OBJ_TYPE_REF:
1264 do_binary:
1265 {
1266 tree op0 = TREE_OPERAND (expr, 0);
1267 tree op1 = TREE_OPERAND (expr, 1);
1268
1269 /* If it would be profitable to swap the operands, then do so to
1270 canonicalize the statement, enabling better optimization.
1271
1272 By placing canonicalization of such expressions here we
1273 transparently keep statements in canonical form, even
1274 when the statement is modified. */
1275 if (tree_swap_operands_p (op0, op1, false))
1276 {
1277 /* For relationals we need to swap the operands
1278 and change the code. */
1279 if (code == LT_EXPR
1280 || code == GT_EXPR
1281 || code == LE_EXPR
1282 || code == GE_EXPR)
1283 {
1284 TREE_SET_CODE (expr, swap_tree_comparison (code));
1285 TREE_OPERAND (expr, 0) = op1;
1286 TREE_OPERAND (expr, 1) = op0;
1287 }
1288
1289 /* For a commutative operator we can just swap the operands. */
1290 else if (commutative_tree_code (code))
1291 {
1292 TREE_OPERAND (expr, 0) = op1;
1293 TREE_OPERAND (expr, 1) = op0;
1294 }
1295 }
1296
1297 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1298 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1299 return;
1300 }
1301
1302 case REALIGN_LOAD_EXPR:
1303 {
1304 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1305 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1306 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1307 return;
1308 }
1309
1310 case BLOCK:
1311 case FUNCTION_DECL:
1312 case EXC_PTR_EXPR:
1313 case FILTER_EXPR:
1314 case LABEL_DECL:
1315 /* Expressions that make no memory references. */
1316 return;
1317
1318 default:
1319 if (class == tcc_unary)
1320 goto do_unary;
1321 if (class == tcc_binary || class == tcc_comparison)
1322 goto do_binary;
1323 if (class == tcc_constant || class == tcc_type)
1324 return;
1325 }
1326
1327 /* If we get here, something has gone wrong. */
1328 #ifdef ENABLE_CHECKING
1329 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1330 debug_tree (expr);
1331 fputs ("\n", stderr);
1332 internal_error ("internal error");
1333 #endif
1334 gcc_unreachable ();
1335 }
1336
1337
1338 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1339
1340 static void
1341 get_asm_expr_operands (tree stmt)
1342 {
1343 stmt_ann_t s_ann = stmt_ann (stmt);
1344 int noutputs = list_length (ASM_OUTPUTS (stmt));
1345 const char **oconstraints
1346 = (const char **) alloca ((noutputs) * sizeof (const char *));
1347 int i;
1348 tree link;
1349 const char *constraint;
1350 bool allows_mem, allows_reg, is_inout;
1351
1352 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1353 {
1354 oconstraints[i] = constraint
1355 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1356 parse_output_constraint (&constraint, i, 0, 0,
1357 &allows_mem, &allows_reg, &is_inout);
1358
1359 /* This should have been split in gimplify_asm_expr. */
1360 gcc_assert (!allows_reg || !is_inout);
1361
1362 /* Memory operands are addressable. Note that STMT needs the
1363 address of this operand. */
1364 if (!allows_reg && allows_mem)
1365 {
1366 tree t = get_base_address (TREE_VALUE (link));
1367 if (t && DECL_P (t))
1368 note_addressable (t, s_ann);
1369 }
1370
1371 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1372 }
1373
1374 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1375 {
1376 constraint
1377 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1378 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1379 oconstraints, &allows_mem, &allows_reg);
1380
1381 /* Memory operands are addressable. Note that STMT needs the
1382 address of this operand. */
1383 if (!allows_reg && allows_mem)
1384 {
1385 tree t = get_base_address (TREE_VALUE (link));
1386 if (t && DECL_P (t))
1387 note_addressable (t, s_ann);
1388 }
1389
1390 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1391 }
1392
1393
1394 /* Clobber memory for asm ("" : : : "memory"); */
1395 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1396 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1397 {
1398 unsigned i;
1399 bitmap_iterator bi;
1400
1401 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1402 decided to group them). */
1403 if (global_var)
1404 add_stmt_operand (&global_var, s_ann, opf_is_def);
1405 else
1406 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1407 {
1408 tree var = referenced_var (i);
1409 add_stmt_operand (&var, s_ann, opf_is_def);
1410 }
1411
1412 /* Now clobber all addressables. */
1413 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1414 {
1415 tree var = referenced_var (i);
1416 add_stmt_operand (&var, s_ann, opf_is_def);
1417 }
1418
1419 break;
1420 }
1421 }
1422
1423 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1424 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1425
1426 static void
1427 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1428 {
1429 tree *pptr = &TREE_OPERAND (expr, 0);
1430 tree ptr = *pptr;
1431 stmt_ann_t s_ann = stmt_ann (stmt);
1432
1433 /* Stores into INDIRECT_REF operands are never killing definitions. */
1434 flags &= ~opf_kill_def;
1435
1436 if (SSA_VAR_P (ptr))
1437 {
1438 struct ptr_info_def *pi = NULL;
1439
1440 /* If PTR has flow-sensitive points-to information, use it. */
1441 if (TREE_CODE (ptr) == SSA_NAME
1442 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1443 && pi->name_mem_tag)
1444 {
1445 /* PTR has its own memory tag. Use it. */
1446 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1447 }
1448 else
1449 {
1450 /* If PTR is not an SSA_NAME or it doesn't have a name
1451 tag, use its type memory tag. */
1452 var_ann_t v_ann;
1453
1454 /* If we are emitting debugging dumps, display a warning if
1455 PTR is an SSA_NAME with no flow-sensitive alias
1456 information. That means that we may need to compute
1457 aliasing again. */
1458 if (dump_file
1459 && TREE_CODE (ptr) == SSA_NAME
1460 && pi == NULL)
1461 {
1462 fprintf (dump_file,
1463 "NOTE: no flow-sensitive alias info for ");
1464 print_generic_expr (dump_file, ptr, dump_flags);
1465 fprintf (dump_file, " in ");
1466 print_generic_stmt (dump_file, stmt, dump_flags);
1467 }
1468
1469 if (TREE_CODE (ptr) == SSA_NAME)
1470 ptr = SSA_NAME_VAR (ptr);
1471 v_ann = var_ann (ptr);
1472 if (v_ann->type_mem_tag)
1473 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1474 }
1475 }
1476
1477 /* If a constant is used as a pointer, we can't generate a real
1478 operand for it but we mark the statement volatile to prevent
1479 optimizations from messing things up. */
1480 else if (TREE_CODE (ptr) == INTEGER_CST)
1481 {
1482 if (s_ann)
1483 s_ann->has_volatile_ops = true;
1484 return;
1485 }
1486
1487 /* Everything else *should* have been folded elsewhere, but users
1488 are smarter than we in finding ways to write invalid code. We
1489 cannot just abort here. If we were absolutely certain that we
1490 do handle all valid cases, then we could just do nothing here.
1491 That seems optimistic, so attempt to do something logical... */
1492 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1493 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1494 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1495 {
1496 /* Make sure we know the object is addressable. */
1497 pptr = &TREE_OPERAND (ptr, 0);
1498 add_stmt_operand (pptr, s_ann, 0);
1499
1500 /* Mark the object itself with a VUSE. */
1501 pptr = &TREE_OPERAND (*pptr, 0);
1502 get_expr_operands (stmt, pptr, flags);
1503 return;
1504 }
1505
1506 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1507 else
1508 gcc_unreachable ();
1509
1510 /* Add a USE operand for the base pointer. */
1511 get_expr_operands (stmt, pptr, opf_none);
1512 }
1513
1514 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1515
1516 static void
1517 get_call_expr_operands (tree stmt, tree expr)
1518 {
1519 tree op;
1520 int call_flags = call_expr_flags (expr);
1521
1522 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1523 operands for all the symbols that have been found to be
1524 call-clobbered.
1525
1526 Note that if aliases have not been computed, the global effects
1527 of calls will not be included in the SSA web. This is fine
1528 because no optimizer should run before aliases have been
1529 computed. By not bothering with virtual operands for CALL_EXPRs
1530 we avoid adding superfluous virtual operands, which can be a
1531 significant compile time sink (See PR 15855). */
1532 if (aliases_computed_p
1533 && !bitmap_empty_p (call_clobbered_vars)
1534 && !(call_flags & ECF_NOVOPS))
1535 {
1536 /* A 'pure' or a 'const' functions never call clobber anything.
1537 A 'noreturn' function might, but since we don't return anyway
1538 there is no point in recording that. */
1539 if (TREE_SIDE_EFFECTS (expr)
1540 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1541 add_call_clobber_ops (stmt);
1542 else if (!(call_flags & ECF_CONST))
1543 add_call_read_ops (stmt);
1544 }
1545
1546 /* Find uses in the called function. */
1547 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1548
1549 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1550 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1551
1552 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1553
1554 }
1555
1556
1557 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1558 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1559 the statement's real operands, otherwise it is added to virtual
1560 operands. */
1561
1562 static void
1563 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1564 {
1565 bool is_real_op;
1566 tree var, sym;
1567 var_ann_t v_ann;
1568
1569 var = *var_p;
1570 STRIP_NOPS (var);
1571
1572 /* If the operand is an ADDR_EXPR, add its operand to the list of
1573 variables that have had their address taken in this statement. */
1574 if (TREE_CODE (var) == ADDR_EXPR)
1575 {
1576 note_addressable (TREE_OPERAND (var, 0), s_ann);
1577 return;
1578 }
1579
1580 /* If the original variable is not a scalar, it will be added to the list
1581 of virtual operands. In that case, use its base symbol as the virtual
1582 variable representing it. */
1583 is_real_op = is_gimple_reg (var);
1584 if (!is_real_op && !DECL_P (var))
1585 var = get_virtual_var (var);
1586
1587 /* If VAR is not a variable that we care to optimize, do nothing. */
1588 if (var == NULL_TREE || !SSA_VAR_P (var))
1589 return;
1590
1591 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1592 v_ann = var_ann (sym);
1593
1594 /* Mark statements with volatile operands. Optimizers should back
1595 off from statements having volatile operands. */
1596 if (TREE_THIS_VOLATILE (sym) && s_ann)
1597 s_ann->has_volatile_ops = true;
1598
1599 if (is_real_op)
1600 {
1601 /* The variable is a GIMPLE register. Add it to real operands. */
1602 if (flags & opf_is_def)
1603 append_def (var_p);
1604 else
1605 append_use (var_p);
1606 }
1607 else
1608 {
1609 varray_type aliases;
1610
1611 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1612 virtual operands, unless the caller has specifically requested
1613 not to add virtual operands (used when adding operands inside an
1614 ADDR_EXPR expression). */
1615 if (flags & opf_no_vops)
1616 return;
1617
1618 aliases = v_ann->may_aliases;
1619
1620 if (aliases == NULL)
1621 {
1622 /* The variable is not aliased or it is an alias tag. */
1623 if (flags & opf_is_def)
1624 {
1625 if (flags & opf_kill_def)
1626 {
1627 /* Only regular variables or struct fields may get a
1628 V_MUST_DEF operand. */
1629 gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
1630 || v_ann->mem_tag_kind == STRUCT_FIELD);
1631 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1632 variable definitions. */
1633 append_v_must_def (var);
1634 }
1635 else
1636 {
1637 /* Add a V_MAY_DEF for call-clobbered variables and
1638 memory tags. */
1639 append_v_may_def (var);
1640 }
1641 }
1642 else
1643 {
1644 append_vuse (var);
1645 if (s_ann && v_ann->is_alias_tag)
1646 s_ann->makes_aliased_loads = 1;
1647 }
1648 }
1649 else
1650 {
1651 size_t i;
1652
1653 /* The variable is aliased. Add its aliases to the virtual
1654 operands. */
1655 gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
1656
1657 if (flags & opf_is_def)
1658 {
1659 /* If the variable is also an alias tag, add a virtual
1660 operand for it, otherwise we will miss representing
1661 references to the members of the variable's alias set.
1662 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1663 if (v_ann->is_alias_tag)
1664 append_v_may_def (var);
1665
1666 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1667 append_v_may_def (VARRAY_TREE (aliases, i));
1668
1669 if (s_ann)
1670 s_ann->makes_aliased_stores = 1;
1671 }
1672 else
1673 {
1674 /* Similarly, append a virtual uses for VAR itself, when
1675 it is an alias tag. */
1676 if (v_ann->is_alias_tag)
1677 append_vuse (var);
1678
1679 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1680 append_vuse (VARRAY_TREE (aliases, i));
1681
1682 if (s_ann)
1683 s_ann->makes_aliased_loads = 1;
1684 }
1685 }
1686 }
1687 }
1688
1689
1690 /* Record that VAR had its address taken in the statement with annotations
1691 S_ANN. */
1692
1693 static void
1694 note_addressable (tree var, stmt_ann_t s_ann)
1695 {
1696 tree ref;
1697 subvar_t svars;
1698 HOST_WIDE_INT offset;
1699 HOST_WIDE_INT size;
1700
1701 if (!s_ann)
1702 return;
1703
1704 /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
1705 take the address of the subvariables it will touch.
1706 Otherwise, we take the address of all the subvariables, plus the real
1707 ones. */
1708
1709 if (var && TREE_CODE (var) == COMPONENT_REF
1710 && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
1711 {
1712 subvar_t sv;
1713 svars = get_subvars_for_var (ref);
1714
1715 if (s_ann->addresses_taken == NULL)
1716 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1717
1718 for (sv = svars; sv; sv = sv->next)
1719 {
1720 if (overlap_subvar (offset, size, sv, NULL))
1721 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1722 }
1723 return;
1724 }
1725
1726 var = get_base_address (var);
1727 if (var && SSA_VAR_P (var))
1728 {
1729 if (s_ann->addresses_taken == NULL)
1730 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1731
1732
1733 if (var_can_have_subvars (var)
1734 && (svars = get_subvars_for_var (var)))
1735 {
1736 subvar_t sv;
1737 for (sv = svars; sv; sv = sv->next)
1738 bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
1739 }
1740 else
1741 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1742 }
1743 }
1744
1745 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1746 clobbered variables in the function. */
1747
1748 static void
1749 add_call_clobber_ops (tree stmt)
1750 {
1751 unsigned i;
1752 tree t;
1753 bitmap_iterator bi;
1754 stmt_ann_t s_ann = stmt_ann (stmt);
1755 struct stmt_ann_d empty_ann;
1756
1757 /* Functions that are not const, pure or never return may clobber
1758 call-clobbered variables. */
1759 if (s_ann)
1760 s_ann->makes_clobbering_call = true;
1761
1762 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1763 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1764 if (global_var)
1765 {
1766 add_stmt_operand (&global_var, s_ann, opf_is_def);
1767 return;
1768 }
1769
1770 /* If cache is valid, copy the elements into the build vectors. */
1771 if (ssa_call_clobbered_cache_valid)
1772 {
1773 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
1774 {
1775 t = VARRAY_TREE (clobbered_vuses, i);
1776 gcc_assert (TREE_CODE (t) != SSA_NAME);
1777 var_ann (t)->in_vuse_list = 1;
1778 VARRAY_PUSH_TREE (build_vuses, t);
1779 }
1780 for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
1781 {
1782 t = VARRAY_TREE (clobbered_v_may_defs, i);
1783 gcc_assert (TREE_CODE (t) != SSA_NAME);
1784 var_ann (t)->in_v_may_def_list = 1;
1785 VARRAY_PUSH_TREE (build_v_may_defs, t);
1786 }
1787 if (s_ann)
1788 {
1789 s_ann->makes_aliased_loads = clobbered_aliased_loads;
1790 s_ann->makes_aliased_stores = clobbered_aliased_stores;
1791 }
1792 return;
1793 }
1794
1795 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1796
1797 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1798 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1799 {
1800 tree var = referenced_var (i);
1801 if (TREE_READONLY (var)
1802 && (TREE_STATIC (var) || DECL_EXTERNAL (var)))
1803 add_stmt_operand (&var, &empty_ann, opf_none);
1804 else
1805 add_stmt_operand (&var, &empty_ann, opf_is_def);
1806 }
1807
1808 clobbered_aliased_loads = empty_ann.makes_aliased_loads;
1809 clobbered_aliased_stores = empty_ann.makes_aliased_stores;
1810
1811 /* Set the flags for a stmt's annotation. */
1812 if (s_ann)
1813 {
1814 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1815 s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
1816 }
1817
1818 /* Prepare empty cache vectors. */
1819 if (clobbered_v_may_defs)
1820 {
1821 VARRAY_POP_ALL (clobbered_vuses);
1822 VARRAY_POP_ALL (clobbered_v_may_defs);
1823 }
1824 else
1825 {
1826 VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
1827 VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
1828 }
1829
1830 /* Now fill the clobbered cache with the values that have been found. */
1831 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
1832 VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
1833 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
1834 VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
1835
1836 ssa_call_clobbered_cache_valid = true;
1837 }
1838
1839
1840 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1841 function. */
1842
1843 static void
1844 add_call_read_ops (tree stmt)
1845 {
1846 unsigned i;
1847 tree t;
1848 bitmap_iterator bi;
1849 stmt_ann_t s_ann = stmt_ann (stmt);
1850 struct stmt_ann_d empty_ann;
1851
1852 /* if the function is not pure, it may reference memory. Add
1853 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1854 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1855 if (global_var)
1856 {
1857 add_stmt_operand (&global_var, s_ann, opf_none);
1858 return;
1859 }
1860
1861 /* If cache is valid, copy the elements into the build vector. */
1862 if (ssa_ro_call_cache_valid)
1863 {
1864 for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
1865 {
1866 t = VARRAY_TREE (ro_call_vuses, i);
1867 gcc_assert (TREE_CODE (t) != SSA_NAME);
1868 var_ann (t)->in_vuse_list = 1;
1869 VARRAY_PUSH_TREE (build_vuses, t);
1870 }
1871 if (s_ann)
1872 s_ann->makes_aliased_loads = ro_call_aliased_loads;
1873 return;
1874 }
1875
1876 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1877
1878 /* Add a VUSE for each call-clobbered variable. */
1879 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1880 {
1881 tree var = referenced_var (i);
1882 add_stmt_operand (&var, &empty_ann, opf_none);
1883 }
1884
1885 ro_call_aliased_loads = empty_ann.makes_aliased_loads;
1886 if (s_ann)
1887 s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
1888
1889 /* Prepare empty cache vectors. */
1890 if (ro_call_vuses)
1891 VARRAY_POP_ALL (ro_call_vuses);
1892 else
1893 VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
1894
1895 /* Now fill the clobbered cache with the values that have been found. */
1896 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
1897 VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
1898
1899 ssa_ro_call_cache_valid = true;
1900 }
1901
1902 /* Copies virtual operands from SRC to DST. */
1903
1904 void
1905 copy_virtual_operands (tree dst, tree src)
1906 {
1907 unsigned i;
1908 vuse_optype vuses = STMT_VUSE_OPS (src);
1909 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1910 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1911 vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
1912 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
1913 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
1914
1915 if (vuses)
1916 {
1917 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1918 for (i = 0; i < NUM_VUSES (vuses); i++)
1919 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1920 }
1921
1922 if (v_may_defs)
1923 {
1924 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1925 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1926 {
1927 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1928 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1929 V_MAY_DEF_RESULT (v_may_defs, i));
1930 }
1931 }
1932
1933 if (v_must_defs)
1934 {
1935 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1936 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1937 {
1938 SET_V_MUST_DEF_RESULT (*v_must_defs_new, i, V_MUST_DEF_RESULT (v_must_defs, i));
1939 SET_V_MUST_DEF_KILL (*v_must_defs_new, i, V_MUST_DEF_KILL (v_must_defs, i));
1940 }
1941 }
1942 }
1943
1944
1945 /* Specifically for use in DOM's expression analysis. Given a store, we
1946 create an artificial stmt which looks like a load from the store, this can
1947 be used to eliminate redundant loads. OLD_OPS are the operands from the
1948 store stmt, and NEW_STMT is the new load which represents a load of the
1949 values stored. */
1950
1951 void
1952 create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
1953 {
1954 stmt_ann_t ann;
1955 tree op;
1956 stmt_operands_t tmp;
1957 unsigned j;
1958
1959 memset (&tmp, 0, sizeof (stmt_operands_t));
1960 ann = get_stmt_ann (new_stmt);
1961
1962 /* Free operands just in case is was an existing stmt. */
1963 free_ssa_operands (&(ann->operands));
1964
1965 build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
1966 free_vuses (&(ann->operands.vuse_ops));
1967 free_v_may_defs (&(ann->operands.v_may_def_ops));
1968 free_v_must_defs (&(ann->operands.v_must_def_ops));
1969
1970 /* For each VDEF on the original statement, we want to create a
1971 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
1972 statement. */
1973 for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
1974 {
1975 op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
1976 append_vuse (op);
1977 }
1978
1979 for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
1980 {
1981 op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
1982 append_vuse (op);
1983 }
1984
1985 /* Now set the vuses for this new stmt. */
1986 ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops));
1987 }
1988
1989 #include "gt-tree-ssa-operands.h"