re PR target/13926 (GCC generates jumps that are too large to fit in word displacemen...
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34
35 /* Flags to describe operand properties in get_stmt_operands and helpers. */
36
37 /* By default, operands are loaded. */
38 #define opf_none 0
39
40 /* Operand is the target of an assignment expression or a
41 call-clobbered variable */
42 #define opf_is_def (1 << 0)
43
44 /* Operand is the target of an assignment expression. */
45 #define opf_kill_def (1 << 2)
46
47 /* No virtual operands should be created in the expression. This is used
48 when traversing ADDR_EXPR nodes which have different semantics than
49 other expressions. Inside an ADDR_EXPR node, the only operands that we
50 need to consider are indices into arrays. For instance, &a.b[i] should
51 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
52 VUSE for 'b'. */
53 #define opf_no_vops (1 << 1)
54
55 /* Array for building all the def operands. */
56 static GTY (()) varray_type build_defs;
57
58 /* Array for building all the use operands. */
59 static GTY (()) varray_type build_uses;
60
61 /* Array for building all the v_may_def operands. */
62 static GTY (()) varray_type build_v_may_defs;
63
64 /* Array for building all the vuse operands. */
65 static GTY (()) varray_type build_vuses;
66
67 /* Array for building all the v_must_def operands. */
68 static GTY (()) varray_type build_v_must_defs;
69
70 #ifdef ENABLE_CHECKING
71 tree check_build_stmt;
72 #endif
73
74 typedef struct voperands_d
75 {
76 v_may_def_optype v_may_def_ops;
77 vuse_optype vuse_ops;
78 v_must_def_optype v_must_def_ops;
79 } *voperands_t;
80
81 static void note_addressable (tree, stmt_ann_t);
82 static void get_expr_operands (tree, tree *, int, voperands_t);
83 static void get_asm_expr_operands (tree, voperands_t);
84 static inline void append_def (tree *, tree);
85 static inline void append_use (tree *, tree);
86 static void append_v_may_def (tree, tree, voperands_t);
87 static void append_v_must_def (tree, tree, voperands_t);
88 static void add_call_clobber_ops (tree, voperands_t);
89 static void add_call_read_ops (tree, voperands_t);
90 static void add_stmt_operand (tree *, tree, int, voperands_t);
91
92 /* Return a vector of contiguous memory of a specified size. */
93
94 static inline def_optype
95 allocate_def_optype (unsigned num)
96 {
97 def_optype def_ops;
98 unsigned size;
99 size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
100 def_ops = ggc_alloc (size);
101 def_ops->num_defs = num;
102 return def_ops;
103 }
104
105 static inline use_optype
106 allocate_use_optype (unsigned num)
107 {
108 use_optype use_ops;
109 unsigned size;
110 size = sizeof (struct use_optype_d) + sizeof (tree *) * (num - 1);
111 use_ops = ggc_alloc (size);
112 use_ops->num_uses = num;
113 return use_ops;
114 }
115
116 static inline v_may_def_optype
117 allocate_v_may_def_optype (unsigned num)
118 {
119 v_may_def_optype v_may_def_ops;
120 unsigned size;
121 size = sizeof (struct v_may_def_optype_d) + sizeof (tree) * ((num * 2) - 1);
122 v_may_def_ops = ggc_alloc (size);
123 v_may_def_ops->num_v_may_defs = num;
124 return v_may_def_ops;
125 }
126
127 static inline vuse_optype
128 allocate_vuse_optype (unsigned num)
129 {
130 vuse_optype vuse_ops;
131 unsigned size;
132 size = sizeof (struct vuse_optype_d) + sizeof (tree) * (num - 1);
133 vuse_ops = ggc_alloc (size);
134 vuse_ops->num_vuses = num;
135 return vuse_ops;
136 }
137
138 static inline v_must_def_optype
139 allocate_v_must_def_optype (unsigned num)
140 {
141 v_must_def_optype v_must_def_ops;
142 unsigned size;
143 size = sizeof (struct v_must_def_optype_d) + sizeof (tree *) * (num - 1);
144 v_must_def_ops = ggc_alloc (size);
145 v_must_def_ops->num_v_must_defs = num;
146 return v_must_def_ops;
147 }
148
149 static inline void
150 free_uses (use_optype *uses, bool dealloc)
151 {
152 if (*uses)
153 {
154 if (dealloc)
155 ggc_free (*uses);
156 *uses = NULL;
157 }
158 }
159
160 static inline void
161 free_defs (def_optype *defs, bool dealloc)
162 {
163 if (*defs)
164 {
165 if (dealloc)
166 ggc_free (*defs);
167 *defs = NULL;
168 }
169 }
170
171 static inline void
172 free_vuses (vuse_optype *vuses, bool dealloc)
173 {
174 if (*vuses)
175 {
176 if (dealloc)
177 ggc_free (*vuses);
178 *vuses = NULL;
179 }
180 }
181
182 static inline void
183 free_v_may_defs (v_may_def_optype *v_may_defs, bool dealloc)
184 {
185 if (*v_may_defs)
186 {
187 if (dealloc)
188 ggc_free (*v_may_defs);
189 *v_may_defs = NULL;
190 }
191 }
192
193 static inline void
194 free_v_must_defs (v_must_def_optype *v_must_defs, bool dealloc)
195 {
196 if (*v_must_defs)
197 {
198 if (dealloc)
199 ggc_free (*v_must_defs);
200 *v_must_defs = NULL;
201 }
202 }
203
204 void
205 remove_vuses (tree stmt)
206 {
207 stmt_ann_t ann;
208
209 ann = stmt_ann (stmt);
210 if (ann)
211 free_vuses (&(ann->vuse_ops), true);
212 }
213
214 void
215 remove_v_may_defs (tree stmt)
216 {
217 stmt_ann_t ann;
218
219 ann = stmt_ann (stmt);
220 if (ann)
221 free_v_may_defs (&(ann->v_may_def_ops), true);
222 }
223
224 void
225 remove_v_must_defs (tree stmt)
226 {
227 stmt_ann_t ann;
228
229 ann = stmt_ann (stmt);
230 if (ann)
231 free_v_must_defs (&(ann->v_must_def_ops), true);
232 }
233
234 void
235 init_ssa_operands (void)
236 {
237 VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
238 VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
239 VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
240 VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
241 VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
242 }
243
244 void
245 fini_ssa_operands (void)
246 {
247 }
248
249 static void
250 finalize_ssa_defs (tree stmt)
251 {
252 unsigned num, x;
253 stmt_ann_t ann;
254 def_optype def_ops;
255
256 num = VARRAY_ACTIVE_SIZE (build_defs);
257 if (num == 0)
258 return;
259
260 #ifdef ENABLE_CHECKING
261 /* There should only be a single real definition per assignment. */
262 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
263 abort ();
264 #endif
265
266 def_ops = allocate_def_optype (num);
267 for (x = 0; x < num ; x++)
268 def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
269 VARRAY_POP_ALL (build_defs);
270
271 ann = stmt_ann (stmt);
272 ann->def_ops = def_ops;
273 }
274
275 static void
276 finalize_ssa_uses (tree stmt)
277 {
278 unsigned num, x;
279 use_optype use_ops;
280 stmt_ann_t ann;
281
282 num = VARRAY_ACTIVE_SIZE (build_uses);
283 if (num == 0)
284 return;
285
286 #ifdef ENABLE_CHECKING
287 {
288 unsigned x;
289 /* If the pointer to the operand is the statement itself, something is
290 wrong. It means that we are pointing to a local variable (the
291 initial call to get_stmt_operands does not pass a pointer to a
292 statement). */
293 for (x = 0; x < num; x++)
294 if (*(VARRAY_TREE_PTR (build_uses, x)) == stmt)
295 abort ();
296 }
297 #endif
298
299 use_ops = allocate_use_optype (num);
300 for (x = 0; x < num ; x++)
301 use_ops->uses[x].use = VARRAY_TREE_PTR (build_uses, x);
302 VARRAY_POP_ALL (build_uses);
303
304 ann = stmt_ann (stmt);
305 ann->use_ops = use_ops;
306 }
307
308 static void
309 finalize_ssa_v_may_defs (tree stmt)
310 {
311 unsigned num, x;
312 v_may_def_optype v_may_def_ops;
313 stmt_ann_t ann;
314
315 num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
316 if (num == 0)
317 return;
318
319 #ifdef ENABLE_CHECKING
320 /* V_MAY_DEFs must be entered in pairs of result/uses. */
321 if (num % 2 != 0)
322 abort();
323 #endif
324
325 v_may_def_ops = allocate_v_may_def_optype (num / 2);
326 for (x = 0; x < num; x++)
327 v_may_def_ops->v_may_defs[x] = VARRAY_TREE (build_v_may_defs, x);
328 VARRAY_CLEAR (build_v_may_defs);
329
330 ann = stmt_ann (stmt);
331 ann->v_may_def_ops = v_may_def_ops;
332 }
333
334 static inline void
335 finalize_ssa_vuses (tree stmt)
336 {
337 unsigned num, x;
338 stmt_ann_t ann;
339 vuse_optype vuse_ops;
340 v_may_def_optype v_may_defs;
341
342 #ifdef ENABLE_CHECKING
343 if (VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0)
344 {
345 fprintf (stderr, "Please finalize V_MAY_DEFs before finalize VUSES.\n");
346 abort ();
347 }
348 #endif
349
350 num = VARRAY_ACTIVE_SIZE (build_vuses);
351 if (num == 0)
352 return;
353
354 /* Remove superfluous VUSE operands. If the statement already has a
355 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
356 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
357 suppose that variable 'a' is aliased:
358
359 # VUSE <a_2>
360 # a_3 = V_MAY_DEF <a_2>
361 a = a + 1;
362
363 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
364 operation. */
365
366 ann = stmt_ann (stmt);
367 v_may_defs = V_MAY_DEF_OPS (ann);
368 if (NUM_V_MAY_DEFS (v_may_defs) > 0)
369 {
370 size_t i, j;
371 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
372 {
373 bool found = false;
374 for (j = 0; j < NUM_V_MAY_DEFS (v_may_defs); j++)
375 {
376 tree vuse_var, v_may_def_var;
377 tree vuse = VARRAY_TREE (build_vuses, i);
378 tree v_may_def = V_MAY_DEF_OP (v_may_defs, j);
379
380 if (TREE_CODE (vuse) == SSA_NAME)
381 vuse_var = SSA_NAME_VAR (vuse);
382 else
383 vuse_var = vuse;
384
385 if (TREE_CODE (v_may_def) == SSA_NAME)
386 v_may_def_var = SSA_NAME_VAR (v_may_def);
387 else
388 v_may_def_var = v_may_def;
389
390 if (vuse_var == v_may_def_var)
391 {
392 found = true;
393 break;
394 }
395 }
396
397 /* If we found a useless VUSE operand, remove it from the
398 operand array by replacing it with the last active element
399 in the operand array (unless the useless VUSE was the
400 last operand, in which case we simply remove it. */
401 if (found)
402 {
403 if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
404 {
405 VARRAY_TREE (build_vuses, i)
406 = VARRAY_TREE (build_vuses,
407 VARRAY_ACTIVE_SIZE (build_vuses) - 1);
408 }
409 VARRAY_POP (build_vuses);
410
411 /* We want to rescan the element at this index, unless
412 this was the last element, in which case the loop
413 terminates. */
414 i--;
415 }
416 }
417 }
418
419 num = VARRAY_ACTIVE_SIZE (build_vuses);
420 /* We could have reduced the size to zero now, however. */
421 if (num == 0)
422 return;
423
424 vuse_ops = allocate_vuse_optype (num);
425 for (x = 0; x < num; x++)
426 vuse_ops->vuses[x] = VARRAY_TREE (build_vuses, x);
427 VARRAY_CLEAR (build_vuses);
428 ann->vuse_ops = vuse_ops;
429 }
430
431 static void
432 finalize_ssa_v_must_defs (tree stmt)
433 {
434 unsigned num, x;
435 stmt_ann_t ann;
436 v_must_def_optype v_must_def_ops;
437
438 num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
439 if (num == 0)
440 return;
441
442 #ifdef ENABLE_CHECKING
443 /* There should only be a single V_MUST_DEF per assignment. */
444 if (TREE_CODE (stmt) == MODIFY_EXPR && num > 1)
445 abort ();
446 #endif
447
448 v_must_def_ops = allocate_v_must_def_optype (num);
449 for (x = 0; x < num ; x++)
450 v_must_def_ops->v_must_defs[x] = VARRAY_TREE (build_v_must_defs, x);
451 VARRAY_POP_ALL (build_v_must_defs);
452
453 ann = stmt_ann (stmt);
454 ann->v_must_def_ops = v_must_def_ops;
455 }
456
457 extern void
458 finalize_ssa_stmt_operands (tree stmt)
459 {
460 #ifdef ENABLE_CHECKING
461 if (check_build_stmt == NULL)
462 abort();
463 #endif
464
465 finalize_ssa_defs (stmt);
466 finalize_ssa_uses (stmt);
467 finalize_ssa_v_must_defs (stmt);
468 finalize_ssa_v_may_defs (stmt);
469 finalize_ssa_vuses (stmt);
470
471 #ifdef ENABLE_CHECKING
472 check_build_stmt = NULL;
473 #endif
474 }
475
476
477 extern void
478 verify_start_operands (tree stmt ATTRIBUTE_UNUSED)
479 {
480 #ifdef ENABLE_CHECKING
481 if (VARRAY_ACTIVE_SIZE (build_defs) > 0
482 || VARRAY_ACTIVE_SIZE (build_uses) > 0
483 || VARRAY_ACTIVE_SIZE (build_vuses) > 0
484 || VARRAY_ACTIVE_SIZE (build_v_may_defs) > 0
485 || VARRAY_ACTIVE_SIZE (build_v_must_defs) > 0)
486 abort ();
487 if (check_build_stmt != NULL)
488 abort();
489 check_build_stmt = stmt;
490 #endif
491 }
492
493
494 /* Add DEF_P to the list of pointers to operands defined by STMT. */
495
496 static inline void
497 append_def (tree *def_p, tree stmt ATTRIBUTE_UNUSED)
498 {
499 #ifdef ENABLE_CHECKING
500 if (check_build_stmt != stmt)
501 abort();
502 #endif
503 VARRAY_PUSH_TREE_PTR (build_defs, def_p);
504 }
505
506
507 /* Add USE_P to the list of pointers to operands used by STMT. */
508
509 static inline void
510 append_use (tree *use_p, tree stmt ATTRIBUTE_UNUSED)
511 {
512 #ifdef ENABLE_CHECKING
513 if (check_build_stmt != stmt)
514 abort();
515 #endif
516 VARRAY_PUSH_TREE_PTR (build_uses, use_p);
517 }
518
519
520 /* Add a new virtual def for variable VAR to statement STMT. If PREV_VOPS
521 is not NULL, the existing entries are preserved and no new entries are
522 added here. This is done to preserve the SSA numbering of virtual
523 operands. */
524
525 static void
526 append_v_may_def (tree var, tree stmt, voperands_t prev_vops)
527 {
528 stmt_ann_t ann;
529 size_t i;
530 tree result, source;
531
532 #ifdef ENABLE_CHECKING
533 if (check_build_stmt != stmt)
534 abort();
535 #endif
536
537 ann = stmt_ann (stmt);
538
539 /* Don't allow duplicate entries. */
540
541 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i += 2)
542 {
543 tree result = VARRAY_TREE (build_v_may_defs, i);
544 if (var == result
545 || (TREE_CODE (result) == SSA_NAME
546 && var == SSA_NAME_VAR (result)))
547 return;
548 }
549
550 /* If the statement already had virtual definitions, see if any of the
551 existing V_MAY_DEFs matches VAR. If so, re-use it, otherwise add a new
552 V_MAY_DEF for VAR. */
553 result = NULL_TREE;
554 source = NULL_TREE;
555 if (prev_vops)
556 for (i = 0; i < NUM_V_MAY_DEFS (prev_vops->v_may_def_ops); i++)
557 {
558 result = V_MAY_DEF_RESULT (prev_vops->v_may_def_ops, i);
559 if (result == var
560 || (TREE_CODE (result) == SSA_NAME
561 && SSA_NAME_VAR (result) == var))
562 {
563 source = V_MAY_DEF_OP (prev_vops->v_may_def_ops, i);
564 break;
565 }
566 }
567
568 /* If no previous V_MAY_DEF operand was found for VAR, create one now. */
569 if (source == NULL_TREE)
570 {
571 result = var;
572 source = var;
573 }
574
575 VARRAY_PUSH_TREE (build_v_may_defs, result);
576 VARRAY_PUSH_TREE (build_v_may_defs, source);
577 }
578
579
580 /* Add VAR to the list of virtual uses for STMT. If PREV_VOPS
581 is not NULL, the existing entries are preserved and no new entries are
582 added here. This is done to preserve the SSA numbering of virtual
583 operands. */
584
585 static void
586 append_vuse (tree var, tree stmt, voperands_t prev_vops)
587 {
588 stmt_ann_t ann;
589 size_t i;
590 bool found;
591 tree vuse;
592
593 #ifdef ENABLE_CHECKING
594 if (check_build_stmt != stmt)
595 abort();
596 #endif
597
598 ann = stmt_ann (stmt);
599
600 /* Don't allow duplicate entries. */
601 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
602 {
603 tree vuse_var = VARRAY_TREE (build_vuses, i);
604 if (var == vuse_var
605 || (TREE_CODE (vuse_var) == SSA_NAME
606 && var == SSA_NAME_VAR (vuse_var)))
607 return;
608 }
609
610 /* If the statement already had virtual uses, see if any of the
611 existing VUSEs matches VAR. If so, re-use it, otherwise add a new
612 VUSE for VAR. */
613 found = false;
614 vuse = NULL_TREE;
615 if (prev_vops)
616 for (i = 0; i < NUM_VUSES (prev_vops->vuse_ops); i++)
617 {
618 vuse = VUSE_OP (prev_vops->vuse_ops, i);
619 if (vuse == var
620 || (TREE_CODE (vuse) == SSA_NAME
621 && SSA_NAME_VAR (vuse) == var))
622 {
623 found = true;
624 break;
625 }
626 }
627
628 /* If VAR existed already in PREV_VOPS, re-use it. */
629 if (found)
630 var = vuse;
631
632 VARRAY_PUSH_TREE (build_vuses, var);
633 }
634
635 /* Add VAR to the list of virtual must definitions for STMT. If PREV_VOPS
636 is not NULL, the existing entries are preserved and no new entries are
637 added here. This is done to preserve the SSA numbering of virtual
638 operands. */
639
640 static void
641 append_v_must_def (tree var, tree stmt, voperands_t prev_vops)
642 {
643 stmt_ann_t ann;
644 size_t i;
645 bool found;
646 tree v_must_def;
647
648 #ifdef ENABLE_CHECKING
649 if (check_build_stmt != stmt)
650 abort();
651 #endif
652
653 ann = stmt_ann (stmt);
654
655 /* Don't allow duplicate entries. */
656 for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
657 {
658 tree v_must_def_var = VARRAY_TREE (build_v_must_defs, i);
659 if (var == v_must_def_var
660 || (TREE_CODE (v_must_def_var) == SSA_NAME
661 && var == SSA_NAME_VAR (v_must_def_var)))
662 return;
663 }
664
665 /* If the statement already had virtual must defs, see if any of the
666 existing V_MUST_DEFs matches VAR. If so, re-use it, otherwise add a new
667 V_MUST_DEF for VAR. */
668 found = false;
669 v_must_def = NULL_TREE;
670 if (prev_vops)
671 for (i = 0; i < NUM_V_MUST_DEFS (prev_vops->v_must_def_ops); i++)
672 {
673 v_must_def = V_MUST_DEF_OP (prev_vops->v_must_def_ops, i);
674 if (v_must_def == var
675 || (TREE_CODE (v_must_def) == SSA_NAME
676 && SSA_NAME_VAR (v_must_def) == var))
677 {
678 found = true;
679 break;
680 }
681 }
682
683 /* If VAR existed already in PREV_VOPS, re-use it. */
684 if (found)
685 var = v_must_def;
686
687 VARRAY_PUSH_TREE (build_v_must_defs, var);
688 }
689
690
691 /* External entry point which by-passes the previous vops mechanism. */
692 void
693 add_vuse (tree var, tree stmt)
694 {
695 append_vuse (var, stmt, NULL);
696 }
697
698
699 /* Get the operands of statement STMT. Note that repeated calls to
700 get_stmt_operands for the same statement will do nothing until the
701 statement is marked modified by a call to modify_stmt(). */
702
703 void
704 get_stmt_operands (tree stmt)
705 {
706 enum tree_code code;
707 stmt_ann_t ann;
708 struct voperands_d prev_vops;
709
710 #if defined ENABLE_CHECKING
711 /* The optimizers cannot handle statements that are nothing but a
712 _DECL. This indicates a bug in the gimplifier. */
713 if (SSA_VAR_P (stmt))
714 abort ();
715 #endif
716
717 /* Ignore error statements. */
718 if (TREE_CODE (stmt) == ERROR_MARK)
719 return;
720
721 ann = get_stmt_ann (stmt);
722
723 /* If the statement has not been modified, the operands are still valid. */
724 if (!ann->modified)
725 return;
726
727 timevar_push (TV_TREE_OPS);
728
729 /* Initially assume that the statement has no volatile operands, nor
730 makes aliased loads or stores. */
731 ann->has_volatile_ops = false;
732 ann->makes_aliased_stores = false;
733 ann->makes_aliased_loads = false;
734
735 /* Remove any existing operands as they will be scanned again. */
736 free_defs (&(ann->def_ops), true);
737 free_uses (&(ann->use_ops), true);
738
739 /* Before removing existing virtual operands, save them in PREV_VOPS so
740 that we can re-use their SSA versions. */
741 prev_vops.v_may_def_ops = V_MAY_DEF_OPS (ann);
742 prev_vops.vuse_ops = VUSE_OPS (ann);
743 prev_vops.v_must_def_ops = V_MUST_DEF_OPS (ann);
744
745 /* Don't free the previous values to memory since we're still using them. */
746 free_v_may_defs (&(ann->v_may_def_ops), false);
747 free_vuses (&(ann->vuse_ops), false);
748 free_v_must_defs (&(ann->v_must_def_ops), false);
749
750 start_ssa_stmt_operands (stmt);
751
752 code = TREE_CODE (stmt);
753 switch (code)
754 {
755 case MODIFY_EXPR:
756 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none, &prev_vops);
757 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == ARRAY_REF
758 || TREE_CODE (TREE_OPERAND (stmt, 0)) == COMPONENT_REF
759 || TREE_CODE (TREE_OPERAND (stmt, 0)) == REALPART_EXPR
760 || TREE_CODE (TREE_OPERAND (stmt, 0)) == IMAGPART_EXPR
761 /* Use a V_MAY_DEF if the RHS might throw, as the LHS won't be
762 modified in that case. FIXME we should represent somehow
763 that it is killed on the fallthrough path. */
764 || tree_could_throw_p (TREE_OPERAND (stmt, 1)))
765 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_is_def,
766 &prev_vops);
767 else
768 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0),
769 opf_is_def | opf_kill_def, &prev_vops);
770 break;
771
772 case COND_EXPR:
773 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none, &prev_vops);
774 break;
775
776 case SWITCH_EXPR:
777 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none, &prev_vops);
778 break;
779
780 case ASM_EXPR:
781 get_asm_expr_operands (stmt, &prev_vops);
782 break;
783
784 case RETURN_EXPR:
785 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none, &prev_vops);
786 break;
787
788 case GOTO_EXPR:
789 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none, &prev_vops);
790 break;
791
792 case LABEL_EXPR:
793 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none, &prev_vops);
794 break;
795
796 /* These nodes contain no variable references. */
797 case BIND_EXPR:
798 case CASE_LABEL_EXPR:
799 case TRY_CATCH_EXPR:
800 case TRY_FINALLY_EXPR:
801 case EH_FILTER_EXPR:
802 case CATCH_EXPR:
803 case RESX_EXPR:
804 break;
805
806 default:
807 /* Notice that if get_expr_operands tries to use &STMT as the operand
808 pointer (which may only happen for USE operands), we will abort in
809 append_use. This default will handle statements like empty statements,
810 CALL_EXPRs or VA_ARG_EXPRs that may appear on the RHS of a statement
811 or as statements themselves. */
812 get_expr_operands (stmt, &stmt, opf_none, &prev_vops);
813 break;
814 }
815
816 finalize_ssa_stmt_operands (stmt);
817
818 /* Now free the previous virtual ops to memory. */
819 free_v_may_defs (&(prev_vops.v_may_def_ops), true);
820 free_vuses (&(prev_vops.vuse_ops), true);
821 free_v_must_defs (&(prev_vops.v_must_def_ops), true);
822
823 /* Clear the modified bit for STMT. Subsequent calls to
824 get_stmt_operands for this statement will do nothing until the
825 statement is marked modified by a call to modify_stmt(). */
826 ann->modified = 0;
827
828 timevar_pop (TV_TREE_OPS);
829 }
830
831
832 /* Recursively scan the expression pointed by EXPR_P in statement STMT.
833 FLAGS is one of the OPF_* constants modifying how to interpret the
834 operands found. PREV_VOPS is as in append_v_may_def and append_vuse. */
835
836 static void
837 get_expr_operands (tree stmt, tree *expr_p, int flags, voperands_t prev_vops)
838 {
839 enum tree_code code;
840 char class;
841 tree expr = *expr_p;
842
843 if (expr == NULL || expr == error_mark_node)
844 return;
845
846 code = TREE_CODE (expr);
847 class = TREE_CODE_CLASS (code);
848
849 /* We could have the address of a component, array member, etc which
850 has interesting variable references. */
851 if (code == ADDR_EXPR)
852 {
853 /* Taking the address of a variable does not represent a
854 reference to it, but the fact that STMT takes its address will be
855 of interest to some passes (e.g. alias resolution). */
856 add_stmt_operand (expr_p, stmt, 0, NULL);
857
858 /* If the address is constant (invariant is not sufficient), there will
859 be no interesting variable references inside. */
860 if (TREE_CONSTANT (expr))
861 return;
862
863 /* There should be no VUSEs created, since the referenced objects are
864 not really accessed. The only operands that we should find here
865 are ARRAY_REF indices which will always be real operands (GIMPLE
866 does not allow non-registers as array indices). */
867 flags |= opf_no_vops;
868
869 /* Avoid recursion. */
870 expr_p = &TREE_OPERAND (expr, 0);
871 expr = *expr_p;
872 code = TREE_CODE (expr);
873 class = TREE_CODE_CLASS (code);
874 }
875
876 /* Expressions that make no memory references. */
877 if (class == 'c'
878 || class == 't'
879 || code == BLOCK
880 || code == FUNCTION_DECL
881 || code == EXC_PTR_EXPR
882 || code == FILTER_EXPR
883 || code == LABEL_DECL)
884 return;
885
886 /* If we found a variable, add it to DEFS or USES depending on the
887 operand flags. */
888 if (SSA_VAR_P (expr))
889 {
890 add_stmt_operand (expr_p, stmt, flags, prev_vops);
891 return;
892 }
893
894 /* Pointer dereferences always represent a use of the base pointer. */
895 if (code == INDIRECT_REF)
896 {
897 tree *pptr = &TREE_OPERAND (expr, 0);
898 tree ptr = *pptr;
899
900 if (SSA_VAR_P (ptr))
901 {
902 if (!aliases_computed_p)
903 {
904 /* If the pointer does not have a memory tag and aliases have not
905 been computed yet, mark the statement as having volatile
906 operands to prevent DOM from entering it in equivalence tables
907 and DCE from killing it. */
908 stmt_ann (stmt)->has_volatile_ops = true;
909 }
910 else
911 {
912 struct ptr_info_def *pi = NULL;
913
914 /* If we have computed aliasing already, check if PTR has
915 flow-sensitive points-to information. */
916 if (TREE_CODE (ptr) == SSA_NAME
917 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
918 && pi->name_mem_tag)
919 {
920 /* PTR has its own memory tag. Use it. */
921 add_stmt_operand (&pi->name_mem_tag, stmt, flags,
922 prev_vops);
923 }
924 else
925 {
926 /* If PTR is not an SSA_NAME or it doesn't have a name
927 tag, use its type memory tag. */
928 var_ann_t ann;
929
930 /* If we are emitting debugging dumps, display a warning if
931 PTR is an SSA_NAME with no flow-sensitive alias
932 information. That means that we may need to compute
933 aliasing again. */
934 if (dump_file
935 && TREE_CODE (ptr) == SSA_NAME
936 && pi == NULL)
937 {
938 fprintf (dump_file,
939 "NOTE: no flow-sensitive alias info for ");
940 print_generic_expr (dump_file, ptr, dump_flags);
941 fprintf (dump_file, " in ");
942 print_generic_stmt (dump_file, stmt, dump_flags);
943 }
944
945 if (TREE_CODE (ptr) == SSA_NAME)
946 ptr = SSA_NAME_VAR (ptr);
947 ann = var_ann (ptr);
948 add_stmt_operand (&ann->type_mem_tag, stmt, flags, prev_vops);
949 }
950 }
951 }
952
953 /* If a constant is used as a pointer, we can't generate a real
954 operand for it but we mark the statement volatile to prevent
955 optimizations from messing things up. */
956 else if (TREE_CODE (ptr) == INTEGER_CST)
957 {
958 stmt_ann (stmt)->has_volatile_ops = true;
959 return;
960 }
961
962 /* Everything else *should* have been folded elsewhere, but users
963 are smarter than we in finding ways to write invalid code. We
964 cannot just abort here. If we were absolutely certain that we
965 do handle all valid cases, then we could just do nothing here.
966 That seems optimistic, so attempt to do something logical... */
967 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
968 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
969 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
970 {
971 /* Make sure we know the object is addressable. */
972 pptr = &TREE_OPERAND (ptr, 0);
973 add_stmt_operand (pptr, stmt, 0, NULL);
974
975 /* Mark the object itself with a VUSE. */
976 pptr = &TREE_OPERAND (*pptr, 0);
977 get_expr_operands (stmt, pptr, flags, prev_vops);
978 return;
979 }
980
981 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
982 else
983 abort ();
984
985 /* Add a USE operand for the base pointer. */
986 get_expr_operands (stmt, pptr, opf_none, prev_vops);
987 return;
988 }
989
990 /* Treat array references as references to the virtual variable
991 representing the array. The virtual variable for an ARRAY_REF
992 is the VAR_DECL for the array. */
993 if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
994 {
995 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
996 according to the value of IS_DEF. Recurse if the LHS of the
997 ARRAY_REF node is not a regular variable. */
998 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
999 add_stmt_operand (expr_p, stmt, flags, prev_vops);
1000 else
1001 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1002
1003 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
1004 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1005 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none, prev_vops);
1006 return;
1007 }
1008
1009 /* Similarly to arrays, references to compound variables (complex types
1010 and structures/unions) are globbed.
1011
1012 FIXME: This means that
1013
1014 a.x = 6;
1015 a.y = 7;
1016 foo (a.x, a.y);
1017
1018 will not be constant propagated because the two partial
1019 definitions to 'a' will kill each other. Note that SRA may be
1020 able to fix this problem if 'a' can be scalarized. */
1021 if (code == IMAGPART_EXPR || code == REALPART_EXPR || code == COMPONENT_REF)
1022 {
1023 /* If the LHS of the compound reference is not a regular variable,
1024 recurse to keep looking for more operands in the subexpression. */
1025 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1026 add_stmt_operand (expr_p, stmt, flags, prev_vops);
1027 else
1028 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1029
1030 if (code == COMPONENT_REF)
1031 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1032 return;
1033 }
1034
1035 /* Function calls. Add every argument to USES. If the callee is
1036 neither pure nor const, create a VDEF reference for GLOBAL_VAR
1037 (See find_vars_r). */
1038 if (code == CALL_EXPR)
1039 {
1040 tree op;
1041 int call_flags = call_expr_flags (expr);
1042
1043 /* Find uses in the called function. */
1044 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none, prev_vops);
1045
1046 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1047 get_expr_operands (stmt, &TREE_VALUE (op), opf_none, prev_vops);
1048
1049 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none, prev_vops);
1050
1051 if (bitmap_first_set_bit (call_clobbered_vars) >= 0)
1052 {
1053 /* A 'pure' or a 'const' functions never call clobber anything.
1054 A 'noreturn' function might, but since we don't return anyway
1055 there is no point in recording that. */
1056 if (!(call_flags
1057 & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1058 add_call_clobber_ops (stmt, prev_vops);
1059 else if (!(call_flags & (ECF_CONST | ECF_NORETURN)))
1060 add_call_read_ops (stmt, prev_vops);
1061 }
1062 else if (!aliases_computed_p)
1063 stmt_ann (stmt)->has_volatile_ops = true;
1064
1065 return;
1066 }
1067
1068 /* Lists. */
1069 if (code == TREE_LIST)
1070 {
1071 tree op;
1072
1073 for (op = expr; op; op = TREE_CHAIN (op))
1074 get_expr_operands (stmt, &TREE_VALUE (op), flags, prev_vops);
1075
1076 return;
1077 }
1078
1079 /* Assignments. */
1080 if (code == MODIFY_EXPR)
1081 {
1082 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none, prev_vops);
1083 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ARRAY_REF
1084 || TREE_CODE (TREE_OPERAND (expr, 0)) == COMPONENT_REF
1085 || TREE_CODE (TREE_OPERAND (expr, 0)) == REALPART_EXPR
1086 || TREE_CODE (TREE_OPERAND (expr, 0)) == IMAGPART_EXPR)
1087 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def,
1088 prev_vops);
1089 else
1090 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1091 opf_is_def | opf_kill_def, prev_vops);
1092 return;
1093 }
1094
1095
1096 /* Mark VA_ARG_EXPR nodes as making volatile references. FIXME,
1097 this is needed because we currently do not gimplify VA_ARG_EXPR
1098 properly. */
1099 if (code == VA_ARG_EXPR)
1100 {
1101 stmt_ann (stmt)->has_volatile_ops = true;
1102 return;
1103 }
1104
1105 /* Unary expressions. */
1106 if (class == '1'
1107 || code == TRUTH_NOT_EXPR
1108 || code == BIT_FIELD_REF
1109 || code == CONSTRUCTOR)
1110 {
1111 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1112 return;
1113 }
1114
1115 /* Binary expressions. */
1116 if (class == '2'
1117 || class == '<'
1118 || code == TRUTH_AND_EXPR
1119 || code == TRUTH_OR_EXPR
1120 || code == TRUTH_XOR_EXPR
1121 || code == COMPOUND_EXPR
1122 || code == OBJ_TYPE_REF)
1123 {
1124 tree op0 = TREE_OPERAND (expr, 0);
1125 tree op1 = TREE_OPERAND (expr, 1);
1126
1127 /* If it would be profitable to swap the operands, then do so to
1128 canonicalize the statement, enabling better optimization.
1129
1130 By placing canonicalization of such expressions here we
1131 transparently keep statements in canonical form, even
1132 when the statement is modified. */
1133 if (tree_swap_operands_p (op0, op1, false))
1134 {
1135 /* For relationals we need to swap the operands and change
1136 the code. */
1137 if (code == LT_EXPR
1138 || code == GT_EXPR
1139 || code == LE_EXPR
1140 || code == GE_EXPR)
1141 {
1142 TREE_SET_CODE (expr, swap_tree_comparison (code));
1143 TREE_OPERAND (expr, 0) = op1;
1144 TREE_OPERAND (expr, 1) = op0;
1145 }
1146
1147 /* For a commutative operator we can just swap the operands. */
1148 if (commutative_tree_code (code))
1149 {
1150 TREE_OPERAND (expr, 0) = op1;
1151 TREE_OPERAND (expr, 1) = op0;
1152 }
1153 }
1154
1155 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags, prev_vops);
1156 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags, prev_vops);
1157 return;
1158 }
1159
1160 /* If we get here, something has gone wrong. */
1161 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1162 debug_tree (expr);
1163 fputs ("\n", stderr);
1164 abort ();
1165 }
1166
1167
1168 /* Scan operands in ASM_EXPR STMT. PREV_VOPS is as in append_v_may_def and
1169 append_vuse. */
1170
1171 static void
1172 get_asm_expr_operands (tree stmt, voperands_t prev_vops)
1173 {
1174 int noutputs = list_length (ASM_OUTPUTS (stmt));
1175 const char **oconstraints
1176 = (const char **) alloca ((noutputs) * sizeof (const char *));
1177 int i;
1178 tree link;
1179 const char *constraint;
1180 bool allows_mem, allows_reg, is_inout;
1181 stmt_ann_t s_ann = stmt_ann (stmt);
1182
1183 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1184 {
1185 oconstraints[i] = constraint
1186 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1187 parse_output_constraint (&constraint, i, 0, 0,
1188 &allows_mem, &allows_reg, &is_inout);
1189
1190 #if defined ENABLE_CHECKING
1191 /* This should have been split in gimplify_asm_expr. */
1192 if (allows_reg && is_inout)
1193 abort ();
1194 #endif
1195
1196 /* Memory operands are addressable. Note that STMT needs the
1197 address of this operand. */
1198 if (!allows_reg && allows_mem)
1199 {
1200 tree t = get_base_address (TREE_VALUE (link));
1201 if (t && DECL_P (t))
1202 note_addressable (t, s_ann);
1203 }
1204
1205 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def, prev_vops);
1206 }
1207
1208 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1209 {
1210 constraint
1211 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1212 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1213 oconstraints, &allows_mem, &allows_reg);
1214
1215 /* Memory operands are addressable. Note that STMT needs the
1216 address of this operand. */
1217 if (!allows_reg && allows_mem)
1218 {
1219 tree t = get_base_address (TREE_VALUE (link));
1220 if (t && DECL_P (t))
1221 note_addressable (t, s_ann);
1222 }
1223
1224 get_expr_operands (stmt, &TREE_VALUE (link), 0, prev_vops);
1225 }
1226
1227
1228 /* Clobber memory for asm ("" : : : "memory"); */
1229 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1230 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1231 {
1232 size_t i;
1233
1234 /* If we still have not computed aliasing information, we
1235 won't know what variables are call-clobbered and/or
1236 addressable. Just mark the statement as having volatile
1237 operands for now. */
1238 if (!aliases_computed_p)
1239 {
1240 stmt_ann (stmt)->has_volatile_ops = true;
1241 break;
1242 }
1243
1244 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1245 decided to group them). */
1246 if (global_var)
1247 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1248 else
1249 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1250 {
1251 tree var = referenced_var (i);
1252 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1253 });
1254
1255 /* Now clobber all addressables. */
1256 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i,
1257 {
1258 tree var = referenced_var (i);
1259 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1260 });
1261
1262 break;
1263 }
1264 }
1265
1266
1267 /* Add *VAR_P to the appropriate operand array of STMT. FLAGS is as in
1268 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1269 the statement's real operands, otherwise it is added to virtual
1270 operands.
1271
1272 PREV_VOPS is used when adding virtual operands to statements that
1273 already had them (See append_v_may_def and append_vuse). */
1274
1275 static void
1276 add_stmt_operand (tree *var_p, tree stmt, int flags, voperands_t prev_vops)
1277 {
1278 bool is_real_op;
1279 tree var, sym;
1280 stmt_ann_t s_ann;
1281 var_ann_t v_ann;
1282
1283 var = *var_p;
1284 STRIP_NOPS (var);
1285
1286 s_ann = stmt_ann (stmt);
1287
1288 /* If the operand is an ADDR_EXPR, add its operand to the list of
1289 variables that have had their address taken in this statement. */
1290 if (TREE_CODE (var) == ADDR_EXPR)
1291 {
1292 note_addressable (TREE_OPERAND (var, 0), s_ann);
1293 return;
1294 }
1295
1296 /* If the original variable is not a scalar, it will be added to the list
1297 of virtual operands. In that case, use its base symbol as the virtual
1298 variable representing it. */
1299 is_real_op = is_gimple_reg (var);
1300 if (!is_real_op && !DECL_P (var))
1301 var = get_virtual_var (var);
1302
1303 /* If VAR is not a variable that we care to optimize, do nothing. */
1304 if (var == NULL_TREE || !SSA_VAR_P (var))
1305 return;
1306
1307 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1308 v_ann = var_ann (sym);
1309
1310 /* FIXME: We currently refuse to optimize variables that have hidden uses
1311 (variables used in VLA declarations, MD builtin calls and variables
1312 from the parent function in nested functions). This is because not
1313 all uses of these variables are exposed in the IL or the statements
1314 that reference them are not in GIMPLE form. If that's the case, mark
1315 the statement as having volatile operands and return. */
1316 if (v_ann->has_hidden_use)
1317 {
1318 s_ann->has_volatile_ops = true;
1319 return;
1320 }
1321
1322 /* Don't expose volatile variables to the optimizers. */
1323 if (TREE_THIS_VOLATILE (sym))
1324 {
1325 s_ann->has_volatile_ops = true;
1326 return;
1327 }
1328
1329 if (is_real_op)
1330 {
1331 /* The variable is a GIMPLE register. Add it to real operands. */
1332 if (flags & opf_is_def)
1333 append_def (var_p, stmt);
1334 else
1335 append_use (var_p, stmt);
1336 }
1337 else
1338 {
1339 varray_type aliases;
1340
1341 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1342 virtual operands, unless the caller has specifically requested
1343 not to add virtual operands (used when adding operands inside an
1344 ADDR_EXPR expression). */
1345 if (flags & opf_no_vops)
1346 return;
1347
1348 aliases = v_ann->may_aliases;
1349
1350 /* If alias information hasn't been computed yet, then
1351 addressable variables will not be an alias tag nor will they
1352 have aliases. In this case, mark the statement as having
1353 volatile operands. */
1354 if (!aliases_computed_p && may_be_aliased (var))
1355 s_ann->has_volatile_ops = true;
1356
1357 if (aliases == NULL)
1358 {
1359 /* The variable is not aliased or it is an alias tag. */
1360 if (flags & opf_is_def)
1361 {
1362 if (v_ann->is_alias_tag)
1363 {
1364 /* Alias tagged vars get regular V_MAY_DEF */
1365 s_ann->makes_aliased_stores = 1;
1366 append_v_may_def (var, stmt, prev_vops);
1367 }
1368 else if ((flags & opf_kill_def)
1369 && v_ann->mem_tag_kind == NOT_A_TAG)
1370 /* V_MUST_DEF for non-aliased non-GIMPLE register
1371 variable definitions. Avoid memory tags. */
1372 append_v_must_def (var, stmt, prev_vops);
1373 else
1374 /* Call-clobbered variables & memory tags get
1375 V_MAY_DEF */
1376 append_v_may_def (var, stmt, prev_vops);
1377 }
1378 else
1379 {
1380 append_vuse (var, stmt, prev_vops);
1381 if (v_ann->is_alias_tag)
1382 s_ann->makes_aliased_loads = 1;
1383 }
1384 }
1385 else
1386 {
1387 size_t i;
1388
1389 /* The variable is aliased. Add its aliases to the virtual
1390 operands. */
1391 if (VARRAY_ACTIVE_SIZE (aliases) == 0)
1392 abort ();
1393
1394 if (flags & opf_is_def)
1395 {
1396 /* If the variable is also an alias tag, add a virtual
1397 operand for it, otherwise we will miss representing
1398 references to the members of the variable's alias set.
1399 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1400 if (v_ann->is_alias_tag)
1401 append_v_may_def (var, stmt, prev_vops);
1402
1403 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1404 append_v_may_def (VARRAY_TREE (aliases, i), stmt, prev_vops);
1405
1406 s_ann->makes_aliased_stores = 1;
1407 }
1408 else
1409 {
1410 if (v_ann->is_alias_tag)
1411 append_vuse (var, stmt, prev_vops);
1412
1413 for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
1414 append_vuse (VARRAY_TREE (aliases, i), stmt, prev_vops);
1415
1416 s_ann->makes_aliased_loads = 1;
1417 }
1418 }
1419 }
1420 }
1421
1422 /* Record that VAR had its address taken in the statement with annotations
1423 S_ANN. */
1424
1425 static void
1426 note_addressable (tree var, stmt_ann_t s_ann)
1427 {
1428 var = get_base_address (var);
1429 if (var && SSA_VAR_P (var))
1430 {
1431 if (s_ann->addresses_taken == NULL)
1432 s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
1433 bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
1434 }
1435 }
1436
1437
1438 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1439 clobbered variables in the function. */
1440
1441 static void
1442 add_call_clobber_ops (tree stmt, voperands_t prev_vops)
1443 {
1444 /* Functions that are not const, pure or never return may clobber
1445 call-clobbered variables. */
1446 stmt_ann (stmt)->makes_clobbering_call = true;
1447
1448 /* If we had created .GLOBAL_VAR earlier, use it. Otherwise, add
1449 a V_MAY_DEF operand for every call clobbered variable. See
1450 compute_may_aliases for the heuristic used to decide whether
1451 to create .GLOBAL_VAR or not. */
1452 if (global_var)
1453 add_stmt_operand (&global_var, stmt, opf_is_def, prev_vops);
1454 else
1455 {
1456 size_t i;
1457
1458 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1459 {
1460 tree var = referenced_var (i);
1461
1462 /* If VAR is read-only, don't add a V_MAY_DEF, just a
1463 VUSE operand. */
1464 if (!TREE_READONLY (var))
1465 add_stmt_operand (&var, stmt, opf_is_def, prev_vops);
1466 else
1467 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1468 });
1469 }
1470 }
1471
1472
1473 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1474 function. */
1475
1476 static void
1477 add_call_read_ops (tree stmt, voperands_t prev_vops)
1478 {
1479 /* Otherwise, if the function is not pure, it may reference memory. Add
1480 a VUSE for .GLOBAL_VAR if it has been created. Otherwise, add a VUSE
1481 for each call-clobbered variable. See add_referenced_var for the
1482 heuristic used to decide whether to create .GLOBAL_VAR. */
1483 if (global_var)
1484 add_stmt_operand (&global_var, stmt, opf_none, prev_vops);
1485 else
1486 {
1487 size_t i;
1488
1489 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i,
1490 {
1491 tree var = referenced_var (i);
1492 add_stmt_operand (&var, stmt, opf_none, prev_vops);
1493 });
1494 }
1495 }
1496
1497 /* Copies virtual operands from SRC to DST. */
1498
1499 void
1500 copy_virtual_operands (tree dst, tree src)
1501 {
1502 vuse_optype vuses = STMT_VUSE_OPS (src);
1503 v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
1504 v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
1505 vuse_optype *vuses_new = &stmt_ann (dst)->vuse_ops;
1506 v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->v_may_def_ops;
1507 v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->v_must_def_ops;
1508 unsigned i;
1509
1510 if (vuses)
1511 {
1512 *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
1513 for (i = 0; i < NUM_VUSES (vuses); i++)
1514 SET_VUSE_OP (*vuses_new, i, VUSE_OP (vuses, i));
1515 }
1516
1517 if (v_may_defs)
1518 {
1519 *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
1520 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
1521 {
1522 SET_V_MAY_DEF_OP (*v_may_defs_new, i, V_MAY_DEF_OP (v_may_defs, i));
1523 SET_V_MAY_DEF_RESULT (*v_may_defs_new, i,
1524 V_MAY_DEF_RESULT (v_may_defs, i));
1525 }
1526 }
1527
1528 if (v_must_defs)
1529 {
1530 *v_must_defs_new = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
1531 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
1532 SET_V_MUST_DEF_OP (*v_must_defs_new, i, V_MUST_DEF_OP (v_must_defs, i));
1533 }
1534 }
1535
1536 #include "gt-tree-ssa-operands.h"