ipa-type-escape.c: Tidy some comments and white space.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
109
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
112
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
115
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
118
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
121
122
123 /* These arrays are the cached operand vectors for call clobbered calls. */
124 static bool ops_active = false;
125
126 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
127 static unsigned operand_memory_index;
128
129 static void get_expr_operands (tree, tree *, int);
130 static void get_asm_expr_operands (tree);
131 static void get_indirect_ref_operands (tree, tree, int, tree, HOST_WIDE_INT,
132 HOST_WIDE_INT, bool);
133 static void get_tmr_operands (tree, tree, int);
134 static void get_call_expr_operands (tree, tree);
135 static inline void append_def (tree *);
136 static inline void append_use (tree *);
137 static void append_v_may_def (tree);
138 static void append_v_must_def (tree);
139 static void add_call_clobber_ops (tree, tree);
140 static void add_call_read_ops (tree, tree);
141 static void add_stmt_operand (tree *, stmt_ann_t, int);
142 static void add_virtual_operand (tree, stmt_ann_t, int, tree,
143 HOST_WIDE_INT, HOST_WIDE_INT,
144 bool);
145 static void build_ssa_operands (tree stmt);
146
147 static def_optype_p free_defs = NULL;
148 static use_optype_p free_uses = NULL;
149 static vuse_optype_p free_vuses = NULL;
150 static maydef_optype_p free_maydefs = NULL;
151 static mustdef_optype_p free_mustdefs = NULL;
152
153
154 /* Return the DECL_UID of the base variable of T. */
155
156 static inline unsigned
157 get_name_decl (tree t)
158 {
159 if (TREE_CODE (t) != SSA_NAME)
160 return DECL_UID (t);
161 else
162 return DECL_UID (SSA_NAME_VAR (t));
163 }
164
165 /* Comparison function for qsort used in operand_build_sort_virtual. */
166
167 static int
168 operand_build_cmp (const void *p, const void *q)
169 {
170 tree e1 = *((const tree *)p);
171 tree e2 = *((const tree *)q);
172 unsigned int u1,u2;
173
174 u1 = get_name_decl (e1);
175 u2 = get_name_decl (e2);
176
177 /* We want to sort in ascending order. They can never be equal. */
178 #ifdef ENABLE_CHECKING
179 gcc_assert (u1 != u2);
180 #endif
181 return (u1 > u2 ? 1 : -1);
182 }
183
184 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
185
186 static inline void
187 operand_build_sort_virtual (VEC(tree,heap) *list)
188 {
189 int num = VEC_length (tree, list);
190 if (num < 2)
191 return;
192 if (num == 2)
193 {
194 if (get_name_decl (VEC_index (tree, list, 0))
195 > get_name_decl (VEC_index (tree, list, 1)))
196 {
197 /* Swap elements if in the wrong order. */
198 tree tmp = VEC_index (tree, list, 0);
199 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
200 VEC_replace (tree, list, 1, tmp);
201 }
202 return;
203 }
204 /* There are 3 or more elements, call qsort. */
205 qsort (VEC_address (tree, list),
206 VEC_length (tree, list),
207 sizeof (tree),
208 operand_build_cmp);
209 }
210
211
212
213 /* Return true if the ssa operands cache is active. */
214
215 bool
216 ssa_operands_active (void)
217 {
218 return ops_active;
219 }
220
221 /* Structure storing statistics on how many call clobbers we have, and
222 how many where avoided. */
223 static struct
224 {
225 /* Number of call-clobbered ops we attempt to add to calls in
226 add_call_clobber_ops. */
227 unsigned int clobbered_vars;
228
229 /* Number of write-clobbers (v_may_defs) avoided by using
230 not_written information. */
231 unsigned int static_write_clobbers_avoided;
232
233 /* Number of reads (vuses) avoided by using not_read
234 information. */
235 unsigned int static_read_clobbers_avoided;
236
237 /* Number of write-clobbers avoided because the variable can't escape to
238 this call. */
239 unsigned int unescapable_clobbers_avoided;
240
241 /* Number of readonly uses we attempt to add to calls in
242 add_call_read_ops. */
243 unsigned int readonly_clobbers;
244
245 /* Number of readonly uses we avoid using not_read information. */
246 unsigned int static_readonly_clobbers_avoided;
247 } clobber_stats;
248
249 /* Initialize the operand cache routines. */
250
251 void
252 init_ssa_operands (void)
253 {
254 build_defs = VEC_alloc (tree, heap, 5);
255 build_uses = VEC_alloc (tree, heap, 10);
256 build_vuses = VEC_alloc (tree, heap, 25);
257 build_v_may_defs = VEC_alloc (tree, heap, 25);
258 build_v_must_defs = VEC_alloc (tree, heap, 25);
259
260 gcc_assert (operand_memory == NULL);
261 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
262 ops_active = true;
263 memset (&clobber_stats, 0, sizeof (clobber_stats));
264
265 }
266
267
268 /* Dispose of anything required by the operand routines. */
269
270 void
271 fini_ssa_operands (void)
272 {
273 struct ssa_operand_memory_d *ptr;
274 VEC_free (tree, heap, build_defs);
275 VEC_free (tree, heap, build_uses);
276 VEC_free (tree, heap, build_v_must_defs);
277 VEC_free (tree, heap, build_v_may_defs);
278 VEC_free (tree, heap, build_vuses);
279 free_defs = NULL;
280 free_uses = NULL;
281 free_vuses = NULL;
282 free_maydefs = NULL;
283 free_mustdefs = NULL;
284 while ((ptr = operand_memory) != NULL)
285 {
286 operand_memory = operand_memory->next;
287 ggc_free (ptr);
288 }
289
290 ops_active = false;
291
292 if (dump_file && (dump_flags & TDF_STATS))
293 {
294 fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars);
295 fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided);
296 fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided);
297 fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided);
298 fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers);
299 fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided);
300 }
301 }
302
303
304 /* Return memory for operands of SIZE chunks. */
305
306 static inline void *
307 ssa_operand_alloc (unsigned size)
308 {
309 char *ptr;
310 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
311 {
312 struct ssa_operand_memory_d *ptr;
313 ptr = GGC_NEW (struct ssa_operand_memory_d);
314 ptr->next = operand_memory;
315 operand_memory = ptr;
316 operand_memory_index = 0;
317 }
318 ptr = &(operand_memory->mem[operand_memory_index]);
319 operand_memory_index += size;
320 return ptr;
321 }
322
323
324 /* Make sure PTR is in the correct immediate use list. Since uses are simply
325 pointers into the stmt TREE, there is no way of telling if anyone has
326 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
327 The contents are different, but the pointer is still the same. This
328 routine will check to make sure PTR is in the correct list, and if it isn't
329 put it in the correct list. We cannot simply check the previous node
330 because all nodes in the same stmt might have be changed. */
331
332 static inline void
333 correct_use_link (use_operand_p ptr, tree stmt)
334 {
335 use_operand_p prev;
336 tree root;
337
338 /* Fold_stmt () may have changed the stmt pointers. */
339 if (ptr->stmt != stmt)
340 ptr->stmt = stmt;
341
342 prev = ptr->prev;
343 if (prev)
344 {
345 /* Find the root element, making sure we skip any safe iterators. */
346 while (prev->use != NULL || prev->stmt == NULL)
347 prev = prev->prev;
348
349 /* Get the ssa_name of the list the node is in. */
350 root = prev->stmt;
351 /* If it's the right list, simply return. */
352 if (root == *(ptr->use))
353 return;
354 }
355 /* Its in the wrong list if we reach here. */
356 delink_imm_use (ptr);
357 link_imm_use (ptr, *(ptr->use));
358 }
359
360
361 /* This routine makes sure that PTR is in an immediate use list, and makes
362 sure the stmt pointer is set to the current stmt. Virtual uses do not need
363 the overhead of correct_use_link since they cannot be directly manipulated
364 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
365 static inline void
366 set_virtual_use_link (use_operand_p ptr, tree stmt)
367 {
368 /* Fold_stmt () may have changed the stmt pointers. */
369 if (ptr->stmt != stmt)
370 ptr->stmt = stmt;
371
372 /* If this use isn't in a list, add it to the correct list. */
373 if (!ptr->prev)
374 link_imm_use (ptr, *(ptr->use));
375 }
376
377
378
379 #define FINALIZE_OPBUILD build_defs
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
381 build_defs, (I))
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
383 build_defs, (I))
384 #define FINALIZE_FUNC finalize_ssa_def_ops
385 #define FINALIZE_ALLOC alloc_def
386 #define FINALIZE_FREE free_defs
387 #define FINALIZE_TYPE struct def_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
389 #define FINALIZE_OPS DEF_OPS
390 #define FINALIZE_BASE(VAR) VAR
391 #define FINALIZE_BASE_TYPE tree *
392 #define FINALIZE_BASE_ZERO NULL
393 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
394 #include "tree-ssa-opfinalize.h"
395
396
397 /* This routine will create stmt operands for STMT from the def build list. */
398
399 static void
400 finalize_ssa_defs (tree stmt)
401 {
402 unsigned int num = VEC_length (tree, build_defs);
403 /* There should only be a single real definition per assignment. */
404 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
405
406 /* If there is an old list, often the new list is identical, or close, so
407 find the elements at the beginning that are the same as the vector. */
408
409 finalize_ssa_def_ops (stmt);
410 VEC_truncate (tree, build_defs, 0);
411 }
412
413 #define FINALIZE_OPBUILD build_uses
414 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
415 build_uses, (I))
416 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
417 build_uses, (I))
418 #define FINALIZE_FUNC finalize_ssa_use_ops
419 #define FINALIZE_ALLOC alloc_use
420 #define FINALIZE_FREE free_uses
421 #define FINALIZE_TYPE struct use_optype_d
422 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
423 #define FINALIZE_OPS USE_OPS
424 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
425 #define FINALIZE_CORRECT_USE correct_use_link
426 #define FINALIZE_BASE(VAR) VAR
427 #define FINALIZE_BASE_TYPE tree *
428 #define FINALIZE_BASE_ZERO NULL
429 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
430 (PTR)->use_ptr.use = (VAL); \
431 link_imm_use_stmt (&((PTR)->use_ptr), \
432 *(VAL), (STMT))
433 #include "tree-ssa-opfinalize.h"
434
435 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
436
437 static void
438 finalize_ssa_uses (tree stmt)
439 {
440 #ifdef ENABLE_CHECKING
441 {
442 unsigned x;
443 unsigned num = VEC_length (tree, build_uses);
444
445 /* If the pointer to the operand is the statement itself, something is
446 wrong. It means that we are pointing to a local variable (the
447 initial call to get_stmt_operands does not pass a pointer to a
448 statement). */
449 for (x = 0; x < num; x++)
450 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
451 }
452 #endif
453 finalize_ssa_use_ops (stmt);
454 VEC_truncate (tree, build_uses, 0);
455 }
456
457
458 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
459 #define FINALIZE_OPBUILD build_v_may_defs
460 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
461 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
462 build_v_may_defs, (I)))
463 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
464 #define FINALIZE_ALLOC alloc_maydef
465 #define FINALIZE_FREE free_maydefs
466 #define FINALIZE_TYPE struct maydef_optype_d
467 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
468 #define FINALIZE_OPS MAYDEF_OPS
469 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
470 #define FINALIZE_CORRECT_USE set_virtual_use_link
471 #define FINALIZE_BASE_ZERO 0
472 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
473 #define FINALIZE_BASE_TYPE unsigned
474 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
475 (PTR)->def_var = (VAL); \
476 (PTR)->use_var = (VAL); \
477 (PTR)->use_ptr.use = &((PTR)->use_var); \
478 link_imm_use_stmt (&((PTR)->use_ptr), \
479 (VAL), (STMT))
480 #include "tree-ssa-opfinalize.h"
481
482
483 static void
484 finalize_ssa_v_may_defs (tree stmt)
485 {
486 finalize_ssa_v_may_def_ops (stmt);
487 }
488
489
490 /* Clear the in_list bits and empty the build array for v_may_defs. */
491
492 static inline void
493 cleanup_v_may_defs (void)
494 {
495 unsigned x, num;
496 num = VEC_length (tree, build_v_may_defs);
497
498 for (x = 0; x < num; x++)
499 {
500 tree t = VEC_index (tree, build_v_may_defs, x);
501 if (TREE_CODE (t) != SSA_NAME)
502 {
503 var_ann_t ann = var_ann (t);
504 ann->in_v_may_def_list = 0;
505 }
506 }
507 VEC_truncate (tree, build_v_may_defs, 0);
508 }
509
510
511 #define FINALIZE_OPBUILD build_vuses
512 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
513 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
514 build_vuses, (I)))
515 #define FINALIZE_FUNC finalize_ssa_vuse_ops
516 #define FINALIZE_ALLOC alloc_vuse
517 #define FINALIZE_FREE free_vuses
518 #define FINALIZE_TYPE struct vuse_optype_d
519 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
520 #define FINALIZE_OPS VUSE_OPS
521 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
522 #define FINALIZE_CORRECT_USE set_virtual_use_link
523 #define FINALIZE_BASE_ZERO 0
524 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
525 #define FINALIZE_BASE_TYPE unsigned
526 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
527 (PTR)->use_var = (VAL); \
528 (PTR)->use_ptr.use = &((PTR)->use_var); \
529 link_imm_use_stmt (&((PTR)->use_ptr), \
530 (VAL), (STMT))
531 #include "tree-ssa-opfinalize.h"
532
533
534 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
535
536 static void
537 finalize_ssa_vuses (tree stmt)
538 {
539 unsigned num, num_v_may_defs;
540 unsigned vuse_index;
541
542 /* Remove superfluous VUSE operands. If the statement already has a
543 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
544 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
545 suppose that variable 'a' is aliased:
546
547 # VUSE <a_2>
548 # a_3 = V_MAY_DEF <a_2>
549 a = a + 1;
550
551 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
552 operation. */
553
554 num = VEC_length (tree, build_vuses);
555 num_v_may_defs = VEC_length (tree, build_v_may_defs);
556
557 if (num > 0 && num_v_may_defs > 0)
558 {
559 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
560 {
561 tree vuse;
562 vuse = VEC_index (tree, build_vuses, vuse_index);
563 if (TREE_CODE (vuse) != SSA_NAME)
564 {
565 var_ann_t ann = var_ann (vuse);
566 ann->in_vuse_list = 0;
567 if (ann->in_v_may_def_list)
568 {
569 VEC_ordered_remove (tree, build_vuses, vuse_index);
570 continue;
571 }
572 }
573 vuse_index++;
574 }
575 }
576 else
577 /* Clear out the in_list bits. */
578 for (vuse_index = 0;
579 vuse_index < VEC_length (tree, build_vuses);
580 vuse_index++)
581 {
582 tree t = VEC_index (tree, build_vuses, vuse_index);
583 if (TREE_CODE (t) != SSA_NAME)
584 {
585 var_ann_t ann = var_ann (t);
586 ann->in_vuse_list = 0;
587 }
588 }
589
590 finalize_ssa_vuse_ops (stmt);
591 /* The v_may_def build vector wasn't cleaned up because we needed it. */
592 cleanup_v_may_defs ();
593
594 /* Free the vuses build vector. */
595 VEC_truncate (tree, build_vuses, 0);
596
597 }
598
599 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
600
601 #define FINALIZE_OPBUILD build_v_must_defs
602 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
603 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
604 build_v_must_defs, (I)))
605 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
606 #define FINALIZE_ALLOC alloc_mustdef
607 #define FINALIZE_FREE free_mustdefs
608 #define FINALIZE_TYPE struct mustdef_optype_d
609 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
610 #define FINALIZE_OPS MUSTDEF_OPS
611 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
612 #define FINALIZE_CORRECT_USE set_virtual_use_link
613 #define FINALIZE_BASE_ZERO 0
614 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
615 #define FINALIZE_BASE_TYPE unsigned
616 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
617 (PTR)->def_var = (VAL); \
618 (PTR)->kill_var = (VAL); \
619 (PTR)->use_ptr.use = &((PTR)->kill_var);\
620 link_imm_use_stmt (&((PTR)->use_ptr), \
621 (VAL), (STMT))
622 #include "tree-ssa-opfinalize.h"
623
624
625 static void
626 finalize_ssa_v_must_defs (tree stmt)
627 {
628 /* In the presence of subvars, there may be more than one V_MUST_DEF per
629 statement (one for each subvar). It is a bit expensive to verify that
630 all must-defs in a statement belong to subvars if there is more than one
631 MUST-def, so we don't do it. Suffice to say, if you reach here without
632 having subvars, and have num >1, you have hit a bug. */
633
634 finalize_ssa_v_must_def_ops (stmt);
635 VEC_truncate (tree, build_v_must_defs, 0);
636 }
637
638
639 /* Finalize all the build vectors, fill the new ones into INFO. */
640
641 static inline void
642 finalize_ssa_stmt_operands (tree stmt)
643 {
644 finalize_ssa_defs (stmt);
645 finalize_ssa_uses (stmt);
646 finalize_ssa_v_must_defs (stmt);
647 finalize_ssa_v_may_defs (stmt);
648 finalize_ssa_vuses (stmt);
649 }
650
651
652 /* Start the process of building up operands vectors in INFO. */
653
654 static inline void
655 start_ssa_stmt_operands (void)
656 {
657 gcc_assert (VEC_length (tree, build_defs) == 0);
658 gcc_assert (VEC_length (tree, build_uses) == 0);
659 gcc_assert (VEC_length (tree, build_vuses) == 0);
660 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
661 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
662 }
663
664
665 /* Add DEF_P to the list of pointers to operands. */
666
667 static inline void
668 append_def (tree *def_p)
669 {
670 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
671 }
672
673
674 /* Add USE_P to the list of pointers to operands. */
675
676 static inline void
677 append_use (tree *use_p)
678 {
679 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
680 }
681
682
683 /* Add a new virtual may def for variable VAR to the build array. */
684
685 static inline void
686 append_v_may_def (tree var)
687 {
688 if (TREE_CODE (var) != SSA_NAME)
689 {
690 var_ann_t ann = get_var_ann (var);
691
692 /* Don't allow duplicate entries. */
693 if (ann->in_v_may_def_list)
694 return;
695 ann->in_v_may_def_list = 1;
696 }
697
698 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
699 }
700
701
702 /* Add VAR to the list of virtual uses. */
703
704 static inline void
705 append_vuse (tree var)
706 {
707
708 /* Don't allow duplicate entries. */
709 if (TREE_CODE (var) != SSA_NAME)
710 {
711 var_ann_t ann = get_var_ann (var);
712
713 if (ann->in_vuse_list || ann->in_v_may_def_list)
714 return;
715 ann->in_vuse_list = 1;
716 }
717
718 VEC_safe_push (tree, heap, build_vuses, (tree)var);
719 }
720
721
722 /* Add VAR to the list of virtual must definitions for INFO. */
723
724 static inline void
725 append_v_must_def (tree var)
726 {
727 unsigned i;
728
729 /* Don't allow duplicate entries. */
730 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
731 if (var == VEC_index (tree, build_v_must_defs, i))
732 return;
733
734 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
735 }
736
737
738 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
739 cache for STMT, if it existed before. When finished, the various build_*
740 operand vectors will have potential operands. in them. */
741
742 static void
743 parse_ssa_operands (tree stmt)
744 {
745 enum tree_code code;
746
747 code = TREE_CODE (stmt);
748 switch (code)
749 {
750 case MODIFY_EXPR:
751 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
752 either only part of LHS is modified or if the RHS might throw,
753 otherwise, use V_MUST_DEF.
754
755 ??? If it might throw, we should represent somehow that it is killed
756 on the fallthrough path. */
757 {
758 tree lhs = TREE_OPERAND (stmt, 0);
759 int lhs_flags = opf_is_def;
760
761 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
762
763 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
764 or not the entire LHS is modified; that depends on what's
765 inside the VIEW_CONVERT_EXPR. */
766 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
767 lhs = TREE_OPERAND (lhs, 0);
768
769 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
770 && TREE_CODE (lhs) != BIT_FIELD_REF)
771 lhs_flags |= opf_kill_def;
772
773 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
774 }
775 break;
776
777 case COND_EXPR:
778 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
779 break;
780
781 case SWITCH_EXPR:
782 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
783 break;
784
785 case ASM_EXPR:
786 get_asm_expr_operands (stmt);
787 break;
788
789 case RETURN_EXPR:
790 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
791 break;
792
793 case GOTO_EXPR:
794 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
795 break;
796
797 case LABEL_EXPR:
798 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
799 break;
800
801 /* These nodes contain no variable references. */
802 case BIND_EXPR:
803 case CASE_LABEL_EXPR:
804 case TRY_CATCH_EXPR:
805 case TRY_FINALLY_EXPR:
806 case EH_FILTER_EXPR:
807 case CATCH_EXPR:
808 case RESX_EXPR:
809 break;
810
811 default:
812 /* Notice that if get_expr_operands tries to use &STMT as the operand
813 pointer (which may only happen for USE operands), we will fail in
814 append_use. This default will handle statements like empty
815 statements, or CALL_EXPRs that may appear on the RHS of a statement
816 or as statements themselves. */
817 get_expr_operands (stmt, &stmt, opf_none);
818 break;
819 }
820 }
821
822 /* Create an operands cache for STMT. */
823
824 static void
825 build_ssa_operands (tree stmt)
826 {
827 stmt_ann_t ann = get_stmt_ann (stmt);
828
829 /* Initially assume that the statement has no volatile operands. */
830 if (ann)
831 ann->has_volatile_ops = false;
832
833 start_ssa_stmt_operands ();
834
835 parse_ssa_operands (stmt);
836 operand_build_sort_virtual (build_vuses);
837 operand_build_sort_virtual (build_v_may_defs);
838 operand_build_sort_virtual (build_v_must_defs);
839
840 finalize_ssa_stmt_operands (stmt);
841 }
842
843
844 /* Free any operands vectors in OPS. */
845 void
846 free_ssa_operands (stmt_operands_p ops)
847 {
848 ops->def_ops = NULL;
849 ops->use_ops = NULL;
850 ops->maydef_ops = NULL;
851 ops->mustdef_ops = NULL;
852 ops->vuse_ops = NULL;
853 }
854
855
856 /* Get the operands of statement STMT. Note that repeated calls to
857 get_stmt_operands for the same statement will do nothing until the
858 statement is marked modified by a call to mark_stmt_modified(). */
859
860 void
861 update_stmt_operands (tree stmt)
862 {
863 stmt_ann_t ann = get_stmt_ann (stmt);
864 /* If get_stmt_operands is called before SSA is initialized, dont
865 do anything. */
866 if (!ssa_operands_active ())
867 return;
868 /* The optimizers cannot handle statements that are nothing but a
869 _DECL. This indicates a bug in the gimplifier. */
870 gcc_assert (!SSA_VAR_P (stmt));
871
872 gcc_assert (ann->modified);
873
874 timevar_push (TV_TREE_OPS);
875
876 build_ssa_operands (stmt);
877
878 /* Clear the modified bit for STMT. Subsequent calls to
879 get_stmt_operands for this statement will do nothing until the
880 statement is marked modified by a call to mark_stmt_modified(). */
881 ann->modified = 0;
882
883 timevar_pop (TV_TREE_OPS);
884 }
885
886
887 /* Copies virtual operands from SRC to DST. */
888
889 void
890 copy_virtual_operands (tree dest, tree src)
891 {
892 tree t;
893 ssa_op_iter iter, old_iter;
894 use_operand_p use_p, u2;
895 def_operand_p def_p, d2;
896
897 build_ssa_operands (dest);
898
899 /* Copy all the virtual fields. */
900 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
901 append_vuse (t);
902 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
903 append_v_may_def (t);
904 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
905 append_v_must_def (t);
906
907 if (VEC_length (tree, build_vuses) == 0
908 && VEC_length (tree, build_v_may_defs) == 0
909 && VEC_length (tree, build_v_must_defs) == 0)
910 return;
911
912 /* Now commit the virtual operands to this stmt. */
913 finalize_ssa_v_must_defs (dest);
914 finalize_ssa_v_may_defs (dest);
915 finalize_ssa_vuses (dest);
916
917 /* Finally, set the field to the same values as then originals. */
918
919
920 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
921 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
922 {
923 gcc_assert (!op_iter_done (&old_iter));
924 SET_USE (use_p, t);
925 t = op_iter_next_tree (&old_iter);
926 }
927 gcc_assert (op_iter_done (&old_iter));
928
929 op_iter_init_maydef (&old_iter, src, &u2, &d2);
930 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
931 {
932 gcc_assert (!op_iter_done (&old_iter));
933 SET_USE (use_p, USE_FROM_PTR (u2));
934 SET_DEF (def_p, DEF_FROM_PTR (d2));
935 op_iter_next_maymustdef (&u2, &d2, &old_iter);
936 }
937 gcc_assert (op_iter_done (&old_iter));
938
939 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
940 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
941 {
942 gcc_assert (!op_iter_done (&old_iter));
943 SET_USE (use_p, USE_FROM_PTR (u2));
944 SET_DEF (def_p, DEF_FROM_PTR (d2));
945 op_iter_next_maymustdef (&u2, &d2, &old_iter);
946 }
947 gcc_assert (op_iter_done (&old_iter));
948
949 }
950
951
952 /* Specifically for use in DOM's expression analysis. Given a store, we
953 create an artificial stmt which looks like a load from the store, this can
954 be used to eliminate redundant loads. OLD_OPS are the operands from the
955 store stmt, and NEW_STMT is the new load which represents a load of the
956 values stored. */
957
958 void
959 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
960 {
961 stmt_ann_t ann;
962 tree op;
963 ssa_op_iter iter;
964 use_operand_p use_p;
965 unsigned x;
966
967 ann = get_stmt_ann (new_stmt);
968
969 /* process the stmt looking for operands. */
970 start_ssa_stmt_operands ();
971 parse_ssa_operands (new_stmt);
972
973 for (x = 0; x < VEC_length (tree, build_vuses); x++)
974 {
975 tree t = VEC_index (tree, build_vuses, x);
976 if (TREE_CODE (t) != SSA_NAME)
977 {
978 var_ann_t ann = var_ann (t);
979 ann->in_vuse_list = 0;
980 }
981 }
982
983 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
984 {
985 tree t = VEC_index (tree, build_v_may_defs, x);
986 if (TREE_CODE (t) != SSA_NAME)
987 {
988 var_ann_t ann = var_ann (t);
989 ann->in_v_may_def_list = 0;
990 }
991 }
992 /* Remove any virtual operands that were found. */
993 VEC_truncate (tree, build_v_may_defs, 0);
994 VEC_truncate (tree, build_v_must_defs, 0);
995 VEC_truncate (tree, build_vuses, 0);
996
997 /* For each VDEF on the original statement, we want to create a
998 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
999 statement. */
1000 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
1001 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
1002 append_vuse (op);
1003
1004 /* Now build the operands for this new stmt. */
1005 finalize_ssa_stmt_operands (new_stmt);
1006
1007 /* All uses in this fake stmt must not be in the immediate use lists. */
1008 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1009 delink_imm_use (use_p);
1010 }
1011
1012 void
1013 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1014 {
1015 tree op0, op1;
1016 op0 = *exp0;
1017 op1 = *exp1;
1018
1019 /* If the operand cache is active, attempt to preserve the relative positions
1020 of these two operands in their respective immediate use lists. */
1021 if (ssa_operands_active () && op0 != op1)
1022 {
1023 use_optype_p use0, use1, ptr;
1024 use0 = use1 = NULL;
1025 /* Find the 2 operands in the cache, if they are there. */
1026 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1027 if (USE_OP_PTR (ptr)->use == exp0)
1028 {
1029 use0 = ptr;
1030 break;
1031 }
1032 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1033 if (USE_OP_PTR (ptr)->use == exp1)
1034 {
1035 use1 = ptr;
1036 break;
1037 }
1038 /* If both uses don't have operand entries, there isn't much we can do
1039 at this point. Presumably we dont need to worry about it. */
1040 if (use0 && use1)
1041 {
1042 tree *tmp = USE_OP_PTR (use1)->use;
1043 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1044 USE_OP_PTR (use0)->use = tmp;
1045 }
1046 }
1047
1048 /* Now swap the data. */
1049 *exp0 = op1;
1050 *exp1 = op0;
1051 }
1052
1053
1054 /* Recursively scan the expression pointed to by EXPR_P in statement
1055 referred to by INFO. FLAGS is one of the OPF_* constants modifying
1056 how to interpret the operands found. */
1057
1058 static void
1059 get_expr_operands (tree stmt, tree *expr_p, int flags)
1060 {
1061 enum tree_code code;
1062 enum tree_code_class class;
1063 tree expr = *expr_p;
1064 stmt_ann_t s_ann = stmt_ann (stmt);
1065
1066 if (expr == NULL)
1067 return;
1068
1069 code = TREE_CODE (expr);
1070 class = TREE_CODE_CLASS (code);
1071
1072 switch (code)
1073 {
1074 case ADDR_EXPR:
1075 /* Taking the address of a variable does not represent a
1076 reference to it, but the fact that the statement takes its
1077 address will be of interest to some passes (e.g. alias
1078 resolution). */
1079 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
1080
1081 /* If the address is invariant, there may be no interesting
1082 variable references inside. */
1083 if (is_gimple_min_invariant (expr))
1084 return;
1085
1086 /* Otherwise, there may be variables referenced inside but there
1087 should be no VUSEs created, since the referenced objects are
1088 not really accessed. The only operands that we should find
1089 here are ARRAY_REF indices which will always be real operands
1090 (GIMPLE does not allow non-registers as array indices). */
1091 flags |= opf_no_vops;
1092 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1093 return;
1094
1095 case SSA_NAME:
1096 case STRUCT_FIELD_TAG:
1097 case TYPE_MEMORY_TAG:
1098 case NAME_MEMORY_TAG:
1099 add_stmt_operand (expr_p, s_ann, flags);
1100 return;
1101
1102 case VAR_DECL:
1103 case PARM_DECL:
1104 case RESULT_DECL:
1105 {
1106 subvar_t svars;
1107
1108 /* Add the subvars for a variable if it has subvars, to DEFS
1109 or USES. Otherwise, add the variable itself. Whether it
1110 goes to USES or DEFS depends on the operand flags. */
1111 if (var_can_have_subvars (expr)
1112 && (svars = get_subvars_for_var (expr)))
1113 {
1114 subvar_t sv;
1115 for (sv = svars; sv; sv = sv->next)
1116 add_stmt_operand (&sv->var, s_ann, flags);
1117 }
1118 else
1119 add_stmt_operand (expr_p, s_ann, flags);
1120
1121 return;
1122 }
1123
1124 case MISALIGNED_INDIRECT_REF:
1125 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1126 /* fall through */
1127
1128 case ALIGN_INDIRECT_REF:
1129 case INDIRECT_REF:
1130 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE,
1131 0, -1, true);
1132 return;
1133
1134 case TARGET_MEM_REF:
1135 get_tmr_operands (stmt, expr, flags);
1136 return;
1137
1138 case ARRAY_RANGE_REF:
1139 /* Treat array references as references to the virtual variable
1140 representing the array. The virtual variable for an ARRAY_REF
1141 is the VAR_DECL for the array. */
1142 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1143 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1144 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1145 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1146 return;
1147
1148 case ARRAY_REF:
1149 case COMPONENT_REF:
1150 case REALPART_EXPR:
1151 case IMAGPART_EXPR:
1152 {
1153 tree ref;
1154 HOST_WIDE_INT offset, size, maxsize;
1155 bool none = true;
1156
1157 /* This component reference becomes an access to all of the
1158 subvariables it can touch, if we can determine that, but
1159 *NOT* the real one. If we can't determine which fields we
1160 could touch, the recursion will eventually get to a
1161 variable and add *all* of its subvars, or whatever is the
1162 minimum correct subset. */
1163 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1164 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1165 {
1166 subvar_t sv;
1167 subvar_t svars = get_subvars_for_var (ref);
1168
1169 for (sv = svars; sv; sv = sv->next)
1170 {
1171 bool exact;
1172
1173 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1174 {
1175 int subvar_flags = flags;
1176 none = false;
1177 if (!exact || size != maxsize)
1178 subvar_flags &= ~opf_kill_def;
1179 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1180 }
1181 }
1182
1183 if (!none)
1184 flags |= opf_no_vops;
1185 }
1186 else if (TREE_CODE (ref) == INDIRECT_REF)
1187 {
1188 get_indirect_ref_operands (stmt, ref, flags, expr,
1189 offset, maxsize, false);
1190 flags |= opf_no_vops;
1191 }
1192
1193 /* Even if we found subvars above we need to ensure to see
1194 immediate uses for d in s.a[d]. In case of s.a having
1195 a subvar we'd miss it otherwise. */
1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1197 flags & ~opf_kill_def);
1198
1199 if (code == COMPONENT_REF)
1200 {
1201 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1202 s_ann->has_volatile_ops = true;
1203 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1204 }
1205 else if (code == ARRAY_REF)
1206 {
1207 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1208 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1209 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1210 }
1211
1212 return;
1213 }
1214
1215 case WITH_SIZE_EXPR:
1216 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1217 and an rvalue reference to its second argument. */
1218 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1219 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1220 return;
1221
1222 case CALL_EXPR:
1223 get_call_expr_operands (stmt, expr);
1224 return;
1225
1226 case COND_EXPR:
1227 case VEC_COND_EXPR:
1228 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1229 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1230 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1231 return;
1232
1233 case MODIFY_EXPR:
1234 {
1235 int subflags;
1236 tree op;
1237
1238 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1239
1240 op = TREE_OPERAND (expr, 0);
1241 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1242 op = TREE_OPERAND (expr, 0);
1243 if (TREE_CODE (op) == ARRAY_RANGE_REF
1244 || TREE_CODE (op) == REALPART_EXPR
1245 || TREE_CODE (op) == IMAGPART_EXPR)
1246 subflags = opf_is_def;
1247 else
1248 subflags = opf_is_def | opf_kill_def;
1249
1250 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1251 return;
1252 }
1253
1254 case CONSTRUCTOR:
1255 {
1256 /* General aggregate CONSTRUCTORs have been decomposed, but they
1257 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1258 constructor_elt *ce;
1259 unsigned HOST_WIDE_INT idx;
1260
1261 for (idx = 0;
1262 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1263 idx++)
1264 get_expr_operands (stmt, &ce->value, opf_none);
1265
1266 return;
1267 }
1268
1269 case TRUTH_NOT_EXPR:
1270 case BIT_FIELD_REF:
1271 case VIEW_CONVERT_EXPR:
1272 do_unary:
1273 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1274 return;
1275
1276 case TRUTH_AND_EXPR:
1277 case TRUTH_OR_EXPR:
1278 case TRUTH_XOR_EXPR:
1279 case COMPOUND_EXPR:
1280 case OBJ_TYPE_REF:
1281 case ASSERT_EXPR:
1282 do_binary:
1283 {
1284 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1285 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1286 return;
1287 }
1288
1289 case DOT_PROD_EXPR:
1290 case REALIGN_LOAD_EXPR:
1291 {
1292 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1293 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1294 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1295 return;
1296 }
1297
1298 case BLOCK:
1299 case FUNCTION_DECL:
1300 case EXC_PTR_EXPR:
1301 case FILTER_EXPR:
1302 case LABEL_DECL:
1303 case CONST_DECL:
1304 case OMP_PARALLEL:
1305 case OMP_SECTIONS:
1306 case OMP_FOR:
1307 case OMP_RETURN_EXPR:
1308 case OMP_SINGLE:
1309 case OMP_MASTER:
1310 case OMP_ORDERED:
1311 case OMP_CRITICAL:
1312 /* Expressions that make no memory references. */
1313 return;
1314
1315 default:
1316 if (class == tcc_unary)
1317 goto do_unary;
1318 if (class == tcc_binary || class == tcc_comparison)
1319 goto do_binary;
1320 if (class == tcc_constant || class == tcc_type)
1321 return;
1322 }
1323
1324 /* If we get here, something has gone wrong. */
1325 #ifdef ENABLE_CHECKING
1326 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1327 debug_tree (expr);
1328 fputs ("\n", stderr);
1329 internal_error ("internal error");
1330 #endif
1331 gcc_unreachable ();
1332 }
1333
1334
1335 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1336
1337 static void
1338 get_asm_expr_operands (tree stmt)
1339 {
1340 stmt_ann_t s_ann = stmt_ann (stmt);
1341 int noutputs = list_length (ASM_OUTPUTS (stmt));
1342 const char **oconstraints
1343 = (const char **) alloca ((noutputs) * sizeof (const char *));
1344 int i;
1345 tree link;
1346 const char *constraint;
1347 bool allows_mem, allows_reg, is_inout;
1348
1349 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1350 {
1351 oconstraints[i] = constraint
1352 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1353 parse_output_constraint (&constraint, i, 0, 0,
1354 &allows_mem, &allows_reg, &is_inout);
1355
1356 /* This should have been split in gimplify_asm_expr. */
1357 gcc_assert (!allows_reg || !is_inout);
1358
1359 /* Memory operands are addressable. Note that STMT needs the
1360 address of this operand. */
1361 if (!allows_reg && allows_mem)
1362 {
1363 tree t = get_base_address (TREE_VALUE (link));
1364 if (t && DECL_P (t) && s_ann)
1365 add_to_addressable_set (t, &s_ann->addresses_taken);
1366 }
1367
1368 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1369 }
1370
1371 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1372 {
1373 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1374 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1375 oconstraints, &allows_mem, &allows_reg);
1376
1377 /* Memory operands are addressable. Note that STMT needs the
1378 address of this operand. */
1379 if (!allows_reg && allows_mem)
1380 {
1381 tree t = get_base_address (TREE_VALUE (link));
1382 if (t && DECL_P (t) && s_ann)
1383 add_to_addressable_set (t, &s_ann->addresses_taken);
1384 }
1385
1386 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1387 }
1388
1389
1390 /* Clobber memory for asm ("" : : : "memory"); */
1391 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1392 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1393 {
1394 unsigned i;
1395 bitmap_iterator bi;
1396
1397 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1398 decided to group them). */
1399 if (global_var)
1400 add_stmt_operand (&global_var, s_ann, opf_is_def);
1401 else
1402 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1403 {
1404 tree var = referenced_var (i);
1405 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1406 }
1407
1408 /* Now clobber all addressables. */
1409 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1410 {
1411 tree var = referenced_var (i);
1412
1413 /* Subvars are explicitly represented in this list, so
1414 we don't need the original to be added to the clobber
1415 ops, but the original *will* be in this list because
1416 we keep the addressability of the original
1417 variable up-to-date so we don't screw up the rest of
1418 the backend. */
1419 if (var_can_have_subvars (var)
1420 && get_subvars_for_var (var) != NULL)
1421 continue;
1422
1423 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1424 }
1425
1426 break;
1427 }
1428 }
1429
1430 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1431 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1432
1433 STMT is the statement being processed, EXPR is the INDIRECT_REF
1434 that got us here.
1435
1436 FLAGS is as in get_expr_operands.
1437
1438 FULL_REF contains the full pointer dereference expression, if we
1439 have it, or NULL otherwise.
1440
1441 OFFSET and SIZE are the location of the access inside the
1442 dereferenced pointer, if known.
1443
1444 RECURSE_ON_BASE should be set to true if we want to continue
1445 calling get_expr_operands on the base pointer, and false if
1446 something else will do it for us. */
1447
1448 static void
1449 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1450 tree full_ref,
1451 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1452 bool recurse_on_base)
1453 {
1454 tree *pptr = &TREE_OPERAND (expr, 0);
1455 tree ptr = *pptr;
1456 stmt_ann_t s_ann = stmt_ann (stmt);
1457
1458 /* Stores into INDIRECT_REF operands are never killing definitions. */
1459 flags &= ~opf_kill_def;
1460
1461 if (SSA_VAR_P (ptr))
1462 {
1463 struct ptr_info_def *pi = NULL;
1464
1465 /* If PTR has flow-sensitive points-to information, use it. */
1466 if (TREE_CODE (ptr) == SSA_NAME
1467 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1468 && pi->name_mem_tag)
1469 {
1470 /* PTR has its own memory tag. Use it. */
1471 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1472 full_ref, offset, size, false);
1473 }
1474 else
1475 {
1476 /* If PTR is not an SSA_NAME or it doesn't have a name
1477 tag, use its type memory tag. */
1478 var_ann_t v_ann;
1479
1480 /* If we are emitting debugging dumps, display a warning if
1481 PTR is an SSA_NAME with no flow-sensitive alias
1482 information. That means that we may need to compute
1483 aliasing again. */
1484 if (dump_file
1485 && TREE_CODE (ptr) == SSA_NAME
1486 && pi == NULL)
1487 {
1488 fprintf (dump_file,
1489 "NOTE: no flow-sensitive alias info for ");
1490 print_generic_expr (dump_file, ptr, dump_flags);
1491 fprintf (dump_file, " in ");
1492 print_generic_stmt (dump_file, stmt, dump_flags);
1493 }
1494
1495 if (TREE_CODE (ptr) == SSA_NAME)
1496 ptr = SSA_NAME_VAR (ptr);
1497 v_ann = var_ann (ptr);
1498
1499 if (v_ann->type_mem_tag)
1500 add_virtual_operand (v_ann->type_mem_tag, s_ann, flags,
1501 full_ref, offset, size, false);
1502 }
1503 }
1504 else if (TREE_CODE (ptr) == INTEGER_CST)
1505 {
1506 /* If a constant is used as a pointer, we can't generate a real
1507 operand for it but we mark the statement volatile to prevent
1508 optimizations from messing things up. */
1509 if (s_ann)
1510 s_ann->has_volatile_ops = true;
1511 return;
1512 }
1513 else
1514 {
1515 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1516 gcc_unreachable ();
1517 }
1518
1519 /* If requested, add a USE operand for the base pointer. */
1520 if (recurse_on_base)
1521 get_expr_operands (stmt, pptr, opf_none);
1522 }
1523
1524
1525 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1526
1527 static void
1528 get_tmr_operands (tree stmt, tree expr, int flags)
1529 {
1530 tree tag = TMR_TAG (expr), ref;
1531 HOST_WIDE_INT offset, size, maxsize;
1532 subvar_t svars, sv;
1533 stmt_ann_t s_ann = stmt_ann (stmt);
1534
1535 /* First record the real operands. */
1536 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1537 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1538
1539 /* MEM_REFs should never be killing. */
1540 flags &= ~opf_kill_def;
1541
1542 if (TMR_SYMBOL (expr))
1543 {
1544 stmt_ann_t ann = stmt_ann (stmt);
1545 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1546 }
1547
1548 if (!tag)
1549 {
1550 /* Something weird, so ensure that we will be careful. */
1551 stmt_ann (stmt)->has_volatile_ops = true;
1552 return;
1553 }
1554
1555 if (DECL_P (tag))
1556 {
1557 get_expr_operands (stmt, &tag, flags);
1558 return;
1559 }
1560
1561 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1562 gcc_assert (ref != NULL_TREE);
1563 svars = get_subvars_for_var (ref);
1564 for (sv = svars; sv; sv = sv->next)
1565 {
1566 bool exact;
1567 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1568 {
1569 int subvar_flags = flags;
1570 if (!exact || size != maxsize)
1571 subvar_flags &= ~opf_kill_def;
1572 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1573 }
1574 }
1575 }
1576
1577
1578 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1579
1580 static void
1581 get_call_expr_operands (tree stmt, tree expr)
1582 {
1583 tree op;
1584 int call_flags = call_expr_flags (expr);
1585
1586 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1587 operands for all the symbols that have been found to be
1588 call-clobbered.
1589
1590 Note that if aliases have not been computed, the global effects
1591 of calls will not be included in the SSA web. This is fine
1592 because no optimizer should run before aliases have been
1593 computed. By not bothering with virtual operands for CALL_EXPRs
1594 we avoid adding superfluous virtual operands, which can be a
1595 significant compile time sink (See PR 15855). */
1596 if (aliases_computed_p
1597 && !bitmap_empty_p (call_clobbered_vars)
1598 && !(call_flags & ECF_NOVOPS))
1599 {
1600 /* A 'pure' or a 'const' function never call-clobbers anything.
1601 A 'noreturn' function might, but since we don't return anyway
1602 there is no point in recording that. */
1603 if (TREE_SIDE_EFFECTS (expr)
1604 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1605 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1606 else if (!(call_flags & ECF_CONST))
1607 add_call_read_ops (stmt, get_callee_fndecl (expr));
1608 }
1609
1610 /* Find uses in the called function. */
1611 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1612
1613 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1614 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1615
1616 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1617
1618 }
1619
1620 /* REF is a tree that contains the entire pointer dereference
1621 expression, if available, or NULL otherwise. ALIAS is the variable
1622 we are asking if REF can access. OFFSET and SIZE come from the
1623 memory access expression that generated this virtual operand.
1624 FOR_CLOBBER is true is this is adding a virtual operand for a call
1625 clobber. */
1626
1627 static bool
1628 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1629 HOST_WIDE_INT size)
1630 {
1631 bool offsetgtz = offset > 0;
1632 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1633 tree base = ref ? get_base_address (ref) : NULL;
1634
1635 /* If ALIAS is an SFT, it can't be touched if the offset
1636 and size of the access is not overlapping with the SFT offset and
1637 size. This is only true if we are accessing through a pointer
1638 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1639 be accessing through a pointer to some substruct of the
1640 structure, and if we try to prune there, we will have the wrong
1641 offset, and get the wrong answer.
1642 i.e., we can't prune without more work if we have something like
1643
1644 struct gcc_target
1645 {
1646 struct asm_out
1647 {
1648 const char *byte_op;
1649 struct asm_int_op
1650 {
1651 const char *hi;
1652 } aligned_op;
1653 } asm_out;
1654 } targetm;
1655
1656 foo = &targetm.asm_out.aligned_op;
1657 return foo->hi;
1658
1659 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1660 terms of SFT_PARENT_VAR, that is where it is.
1661 However, the access through the foo pointer will be at offset 0. */
1662 if (size != -1
1663 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1664 && base
1665 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1666 && !overlap_subvar (offset, size, alias, NULL))
1667 {
1668 #ifdef ACCESS_DEBUGGING
1669 fprintf (stderr, "Access to ");
1670 print_generic_expr (stderr, ref, 0);
1671 fprintf (stderr, " may not touch ");
1672 print_generic_expr (stderr, alias, 0);
1673 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1674 #endif
1675 return false;
1676 }
1677
1678 /* Without strict aliasing, it is impossible for a component access
1679 through a pointer to touch a random variable, unless that
1680 variable *is* a structure or a pointer.
1681
1682 That is, given p->c, and some random global variable b,
1683 there is no legal way that p->c could be an access to b.
1684
1685 Without strict aliasing on, we consider it legal to do something
1686 like:
1687
1688 struct foos { int l; };
1689 int foo;
1690 static struct foos *getfoo(void);
1691 int main (void)
1692 {
1693 struct foos *f = getfoo();
1694 f->l = 1;
1695 foo = 2;
1696 if (f->l == 1)
1697 abort();
1698 exit(0);
1699 }
1700 static struct foos *getfoo(void)
1701 { return (struct foos *)&foo; }
1702
1703 (taken from 20000623-1.c)
1704 */
1705 else if (ref
1706 && flag_strict_aliasing
1707 && TREE_CODE (ref) != INDIRECT_REF
1708 && !MTAG_P (alias)
1709 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1710 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1711 && !POINTER_TYPE_P (TREE_TYPE (alias)))
1712 {
1713 #ifdef ACCESS_DEBUGGING
1714 fprintf (stderr, "Access to ");
1715 print_generic_expr (stderr, ref, 0);
1716 fprintf (stderr, " may not touch ");
1717 print_generic_expr (stderr, alias, 0);
1718 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1719 #endif
1720 return false;
1721 }
1722
1723 /* If the offset of the access is greater than the size of one of
1724 the possible aliases, it can't be touching that alias, because it
1725 would be past the end of the structure. */
1726 else if (ref
1727 && flag_strict_aliasing
1728 && TREE_CODE (ref) != INDIRECT_REF
1729 && !MTAG_P (alias)
1730 && !POINTER_TYPE_P (TREE_TYPE (alias))
1731 && offsetgtz
1732 && DECL_SIZE (alias)
1733 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1734 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1735 {
1736 #ifdef ACCESS_DEBUGGING
1737 fprintf (stderr, "Access to ");
1738 print_generic_expr (stderr, ref, 0);
1739 fprintf (stderr, " may not touch ");
1740 print_generic_expr (stderr, alias, 0);
1741 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1742 #endif
1743 return false;
1744 }
1745
1746 return true;
1747 }
1748
1749
1750 /* Add VAR to the virtual operands array. FLAGS is as in
1751 get_expr_operands. FULL_REF is a tree that contains the entire
1752 pointer dereference expression, if available, or NULL otherwise.
1753 OFFSET and SIZE come from the memory access expression that
1754 generated this virtual operand. FOR_CLOBBER is true is this is
1755 adding a virtual operand for a call clobber. */
1756
1757 static void
1758 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1759 tree full_ref, HOST_WIDE_INT offset,
1760 HOST_WIDE_INT size, bool for_clobber)
1761 {
1762 VEC(tree,gc) *aliases;
1763 tree sym;
1764 var_ann_t v_ann;
1765
1766 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1767 v_ann = var_ann (sym);
1768
1769 /* Mark statements with volatile operands. Optimizers should back
1770 off from statements having volatile operands. */
1771 if (TREE_THIS_VOLATILE (sym) && s_ann)
1772 s_ann->has_volatile_ops = true;
1773
1774 /* If the variable cannot be modified and this is a V_MAY_DEF change
1775 it into a VUSE. This happens when read-only variables are marked
1776 call-clobbered and/or aliased to writable variables. So we only
1777 check that this only happens on non-specific stores.
1778
1779 Note that if this is a specific store, i.e. associated with a
1780 modify_expr, then we can't suppress the V_MAY_DEF, lest we run
1781 into validation problems.
1782
1783 This can happen when programs cast away const, leaving us with a
1784 store to read-only memory. If the statement is actually executed
1785 at runtime, then the program is ill formed. If the statement is
1786 not executed then all is well. At the very least, we cannot ICE. */
1787 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1788 flags &= ~(opf_is_def | opf_kill_def);
1789
1790 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1791 virtual operands, unless the caller has specifically requested
1792 not to add virtual operands (used when adding operands inside an
1793 ADDR_EXPR expression). */
1794 if (flags & opf_no_vops)
1795 return;
1796
1797 aliases = v_ann->may_aliases;
1798 if (aliases == NULL)
1799 {
1800 /* The variable is not aliased or it is an alias tag. */
1801 if (flags & opf_is_def)
1802 {
1803 if (flags & opf_kill_def)
1804 {
1805 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1806 variable definitions. */
1807 gcc_assert (!MTAG_P (var)
1808 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1809 append_v_must_def (var);
1810 }
1811 else
1812 {
1813 /* Add a V_MAY_DEF for call-clobbered variables and
1814 memory tags. */
1815 append_v_may_def (var);
1816 }
1817 }
1818 else
1819 append_vuse (var);
1820 }
1821 else
1822 {
1823 unsigned i;
1824 tree al;
1825
1826 /* The variable is aliased. Add its aliases to the virtual
1827 operands. */
1828 gcc_assert (VEC_length (tree, aliases) != 0);
1829
1830 if (flags & opf_is_def)
1831 {
1832
1833 bool none_added = true;
1834
1835 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1836 {
1837 if (!access_can_touch_variable (full_ref, al, offset, size))
1838 continue;
1839
1840 none_added = false;
1841 append_v_may_def (al);
1842 }
1843
1844 /* If the variable is also an alias tag, add a virtual
1845 operand for it, otherwise we will miss representing
1846 references to the members of the variable's alias set.
1847 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1848
1849 It is also necessary to add bare defs on clobbers for
1850 TMT's, so that bare TMT uses caused by pruning all the
1851 aliases will link up properly with calls. In order to
1852 keep the number of these bare defs we add down to the
1853 minimum necessary, we keep track of which TMT's were used
1854 alone in statement defs or vuses. */
1855
1856 if (v_ann->is_aliased
1857 || none_added
1858 || (TREE_CODE (var) == TYPE_MEMORY_TAG && for_clobber
1859 && TMT_USED_ALONE (var)))
1860 {
1861 /* Every bare tmt def we add should have TMT_USED_ALONE
1862 set on it, or else we will get the wrong answer on
1863 clobbers. */
1864
1865 if (none_added && !updating_used_alone && aliases_computed_p
1866 && TREE_CODE (var) == TYPE_MEMORY_TAG)
1867 gcc_assert (TMT_USED_ALONE (var));
1868
1869 append_v_may_def (var);
1870 }
1871 }
1872 else
1873 {
1874 bool none_added = true;
1875 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1876 {
1877 if (!access_can_touch_variable (full_ref, al, offset, size))
1878 continue;
1879 none_added = false;
1880 append_vuse (al);
1881 }
1882
1883 /* Similarly, append a virtual uses for VAR itself, when
1884 it is an alias tag. */
1885 if (v_ann->is_aliased || none_added)
1886 append_vuse (var);
1887 }
1888 }
1889 }
1890
1891
1892 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1893 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1894 the statement's real operands, otherwise it is added to virtual
1895 operands. */
1896
1897 static void
1898 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1899 {
1900 bool is_real_op;
1901 tree var, sym;
1902 var_ann_t v_ann;
1903
1904 var = *var_p;
1905 gcc_assert (SSA_VAR_P (var));
1906
1907 is_real_op = is_gimple_reg (var);
1908
1909 /* If this is a real operand, the operand is either an SSA name or a
1910 decl. Virtual operands may only be decls. */
1911 gcc_assert (is_real_op || DECL_P (var));
1912
1913 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1914 v_ann = var_ann (sym);
1915
1916 /* Mark statements with volatile operands. Optimizers should back
1917 off from statements having volatile operands. */
1918 if (TREE_THIS_VOLATILE (sym) && s_ann)
1919 s_ann->has_volatile_ops = true;
1920
1921 if (is_real_op)
1922 {
1923 /* The variable is a GIMPLE register. Add it to real operands. */
1924 if (flags & opf_is_def)
1925 append_def (var_p);
1926 else
1927 append_use (var_p);
1928 }
1929 else
1930 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1931 }
1932
1933
1934 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1935 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1936 a single variable whose address has been taken or any other valid
1937 GIMPLE memory reference (structure reference, array, etc). If the
1938 base address of REF is a decl that has sub-variables, also add all
1939 of its sub-variables. */
1940
1941 void
1942 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1943 {
1944 tree var;
1945 subvar_t svars;
1946
1947 gcc_assert (addresses_taken);
1948
1949 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1950 as the only thing we take the address of. If VAR is a structure,
1951 taking the address of a field means that the whole structure may
1952 be referenced using pointer arithmetic. See PR 21407 and the
1953 ensuing mailing list discussion. */
1954 var = get_base_address (ref);
1955 if (var && SSA_VAR_P (var))
1956 {
1957 if (*addresses_taken == NULL)
1958 *addresses_taken = BITMAP_GGC_ALLOC ();
1959
1960 if (var_can_have_subvars (var)
1961 && (svars = get_subvars_for_var (var)))
1962 {
1963 subvar_t sv;
1964 for (sv = svars; sv; sv = sv->next)
1965 {
1966 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1967 TREE_ADDRESSABLE (sv->var) = 1;
1968 }
1969 }
1970 else
1971 {
1972 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1973 TREE_ADDRESSABLE (var) = 1;
1974 }
1975 }
1976 }
1977
1978
1979 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1980 clobbered variables in the function. */
1981
1982 static void
1983 add_call_clobber_ops (tree stmt, tree callee)
1984 {
1985 unsigned u;
1986 bitmap_iterator bi;
1987 stmt_ann_t s_ann = stmt_ann (stmt);
1988 bitmap not_read_b, not_written_b;
1989
1990 /* Functions that are not const, pure or never return may clobber
1991 call-clobbered variables. */
1992 if (s_ann)
1993 s_ann->makes_clobbering_call = true;
1994
1995 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1996 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1997 if (global_var)
1998 {
1999 add_stmt_operand (&global_var, s_ann, opf_is_def);
2000 return;
2001 }
2002
2003 /* Get info for local and module level statics. There is a bit
2004 set for each static if the call being processed does not read
2005 or write that variable. */
2006 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
2007 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
2008 /* Add a V_MAY_DEF operand for every call clobbered variable. */
2009 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2010 {
2011 tree var = referenced_var_lookup (u);
2012 unsigned int escape_mask = var_ann (var)->escape_mask;
2013 tree real_var = var;
2014 bool not_read;
2015 bool not_written;
2016
2017 /* Not read and not written are computed on regular vars, not
2018 subvars, so look at the parent var if this is an SFT. */
2019 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
2020 real_var = SFT_PARENT_VAR (var);
2021
2022 not_read = not_read_b ? bitmap_bit_p (not_read_b,
2023 DECL_UID (real_var)) : false;
2024 not_written = not_written_b ? bitmap_bit_p (not_written_b,
2025 DECL_UID (real_var)) : false;
2026 gcc_assert (!unmodifiable_var_p (var));
2027
2028 clobber_stats.clobbered_vars++;
2029
2030 /* See if this variable is really clobbered by this function. */
2031
2032 /* Trivial case: Things escaping only to pure/const are not
2033 clobbered by non-pure-const, and only read by pure/const. */
2034 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
2035 {
2036 tree call = get_call_expr_in (stmt);
2037 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
2038 {
2039 add_stmt_operand (&var, s_ann, opf_none);
2040 clobber_stats.unescapable_clobbers_avoided++;
2041 continue;
2042 }
2043 else
2044 {
2045 clobber_stats.unescapable_clobbers_avoided++;
2046 continue;
2047 }
2048 }
2049
2050 if (not_written)
2051 {
2052 clobber_stats.static_write_clobbers_avoided++;
2053 if (!not_read)
2054 add_stmt_operand (&var, s_ann, opf_none);
2055 else
2056 clobber_stats.static_read_clobbers_avoided++;
2057 }
2058 else
2059 add_virtual_operand (var, s_ann, opf_is_def,
2060 NULL, 0, -1, true);
2061 }
2062
2063 }
2064
2065
2066 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
2067 function. */
2068
2069 static void
2070 add_call_read_ops (tree stmt, tree callee)
2071 {
2072 unsigned u;
2073 bitmap_iterator bi;
2074 stmt_ann_t s_ann = stmt_ann (stmt);
2075 bitmap not_read_b;
2076
2077 /* if the function is not pure, it may reference memory. Add
2078 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
2079 for the heuristic used to decide whether to create .GLOBAL_VAR. */
2080 if (global_var)
2081 {
2082 add_stmt_operand (&global_var, s_ann, opf_none);
2083 return;
2084 }
2085
2086 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
2087
2088 /* Add a VUSE for each call-clobbered variable. */
2089 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
2090 {
2091 tree var = referenced_var (u);
2092 tree real_var = var;
2093 bool not_read;
2094
2095 clobber_stats.readonly_clobbers++;
2096
2097 /* Not read and not written are computed on regular vars, not
2098 subvars, so look at the parent var if this is an SFT. */
2099
2100 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
2101 real_var = SFT_PARENT_VAR (var);
2102
2103 not_read = not_read_b ? bitmap_bit_p (not_read_b,
2104 DECL_UID (real_var)) : false;
2105
2106 if (not_read)
2107 {
2108 clobber_stats.static_readonly_clobbers_avoided++;
2109 continue;
2110 }
2111
2112 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
2113 }
2114 }
2115
2116
2117 /* Scan the immediate_use list for VAR making sure its linked properly.
2118 return RTUE iof there is a problem. */
2119
2120 bool
2121 verify_imm_links (FILE *f, tree var)
2122 {
2123 use_operand_p ptr, prev, list;
2124 int count;
2125
2126 gcc_assert (TREE_CODE (var) == SSA_NAME);
2127
2128 list = &(SSA_NAME_IMM_USE_NODE (var));
2129 gcc_assert (list->use == NULL);
2130
2131 if (list->prev == NULL)
2132 {
2133 gcc_assert (list->next == NULL);
2134 return false;
2135 }
2136
2137 prev = list;
2138 count = 0;
2139 for (ptr = list->next; ptr != list; )
2140 {
2141 if (prev != ptr->prev)
2142 goto error;
2143
2144 if (ptr->use == NULL)
2145 goto error; /* 2 roots, or SAFE guard node. */
2146 else if (*(ptr->use) != var)
2147 goto error;
2148
2149 prev = ptr;
2150 ptr = ptr->next;
2151
2152 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2153 problem. */
2154 if (count++ > 50000000)
2155 goto error;
2156 }
2157
2158 /* Verify list in the other direction. */
2159 prev = list;
2160 for (ptr = list->prev; ptr != list; )
2161 {
2162 if (prev != ptr->next)
2163 goto error;
2164 prev = ptr;
2165 ptr = ptr->prev;
2166 if (count-- < 0)
2167 goto error;
2168 }
2169
2170 if (count != 0)
2171 goto error;
2172
2173 return false;
2174
2175 error:
2176 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2177 {
2178 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2179 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2180 }
2181 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2182 (void *)ptr->use);
2183 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2184 fprintf(f, "\n");
2185 return true;
2186 }
2187
2188
2189 /* Dump all the immediate uses to FILE. */
2190
2191 void
2192 dump_immediate_uses_for (FILE *file, tree var)
2193 {
2194 imm_use_iterator iter;
2195 use_operand_p use_p;
2196
2197 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2198
2199 print_generic_expr (file, var, TDF_SLIM);
2200 fprintf (file, " : -->");
2201 if (has_zero_uses (var))
2202 fprintf (file, " no uses.\n");
2203 else
2204 if (has_single_use (var))
2205 fprintf (file, " single use.\n");
2206 else
2207 fprintf (file, "%d uses.\n", num_imm_uses (var));
2208
2209 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2210 {
2211 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2212 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2213 else
2214 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2215 }
2216 fprintf(file, "\n");
2217 }
2218
2219
2220 /* Dump all the immediate uses to FILE. */
2221
2222 void
2223 dump_immediate_uses (FILE *file)
2224 {
2225 tree var;
2226 unsigned int x;
2227
2228 fprintf (file, "Immediate_uses: \n\n");
2229 for (x = 1; x < num_ssa_names; x++)
2230 {
2231 var = ssa_name(x);
2232 if (!var)
2233 continue;
2234 dump_immediate_uses_for (file, var);
2235 }
2236 }
2237
2238
2239 /* Dump def-use edges on stderr. */
2240
2241 void
2242 debug_immediate_uses (void)
2243 {
2244 dump_immediate_uses (stderr);
2245 }
2246
2247 /* Dump def-use edges on stderr. */
2248
2249 void
2250 debug_immediate_uses_for (tree var)
2251 {
2252 dump_immediate_uses_for (stderr, var);
2253 }
2254
2255 #include "gt-tree-ssa-operands.h"