tree-ssa-operands.h (ssa_call_clobbered_cache_valid): Remove.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
109
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
112
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
115
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
118
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
121
122
123 /* These arrays are the cached operand vectors for call clobbered calls. */
124 static bool ops_active = false;
125
126 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
127 static unsigned operand_memory_index;
128
129 static void get_expr_operands (tree, tree *, int);
130 static void get_asm_expr_operands (tree);
131 static void get_indirect_ref_operands (tree, tree, int);
132 static void get_tmr_operands (tree, tree, int);
133 static void get_call_expr_operands (tree, tree);
134 static inline void append_def (tree *);
135 static inline void append_use (tree *);
136 static void append_v_may_def (tree);
137 static void append_v_must_def (tree);
138 static void add_call_clobber_ops (tree, tree);
139 static void add_call_read_ops (tree, tree);
140 static void add_stmt_operand (tree *, stmt_ann_t, int);
141 static void build_ssa_operands (tree stmt);
142
143 static def_optype_p free_defs = NULL;
144 static use_optype_p free_uses = NULL;
145 static vuse_optype_p free_vuses = NULL;
146 static maydef_optype_p free_maydefs = NULL;
147 static mustdef_optype_p free_mustdefs = NULL;
148
149
150 /* Return the DECL_UID of the base variable of T. */
151
152 static inline unsigned
153 get_name_decl (tree t)
154 {
155 if (TREE_CODE (t) != SSA_NAME)
156 return DECL_UID (t);
157 else
158 return DECL_UID (SSA_NAME_VAR (t));
159 }
160
161 /* Comparison function for qsort used in operand_build_sort_virtual. */
162
163 static int
164 operand_build_cmp (const void *p, const void *q)
165 {
166 tree e1 = *((const tree *)p);
167 tree e2 = *((const tree *)q);
168 unsigned int u1,u2;
169
170 u1 = get_name_decl (e1);
171 u2 = get_name_decl (e2);
172
173 /* We want to sort in ascending order. They can never be equal. */
174 #ifdef ENABLE_CHECKING
175 gcc_assert (u1 != u2);
176 #endif
177 return (u1 > u2 ? 1 : -1);
178 }
179
180 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
181
182 static inline void
183 operand_build_sort_virtual (VEC(tree,heap) *list)
184 {
185 int num = VEC_length (tree, list);
186 if (num < 2)
187 return;
188 if (num == 2)
189 {
190 if (get_name_decl (VEC_index (tree, list, 0))
191 > get_name_decl (VEC_index (tree, list, 1)))
192 {
193 /* Swap elements if in the wrong order. */
194 tree tmp = VEC_index (tree, list, 0);
195 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
196 VEC_replace (tree, list, 1, tmp);
197 }
198 return;
199 }
200 /* There are 3 or more elements, call qsort. */
201 qsort (VEC_address (tree, list),
202 VEC_length (tree, list),
203 sizeof (tree),
204 operand_build_cmp);
205 }
206
207
208
209 /* Return true if the ssa operands cache is active. */
210
211 bool
212 ssa_operands_active (void)
213 {
214 return ops_active;
215 }
216
217 /* Structure storing statistics on how many call clobbers we have, and
218 how many where avoided. */
219 static struct
220 {
221 /* Number of call-clobbered ops we attempt to add to calls in
222 add_call_clobber_ops. */
223 unsigned int clobbered_vars;
224
225 /* Number of write-clobbers (v_may_defs) avoided by using
226 not_written information. */
227 unsigned int static_write_clobbers_avoided;
228
229 /* Number of reads (vuses) avoided by using not_read
230 information. */
231 unsigned int static_read_clobbers_avoided;
232
233 /* Number of write-clobbers avoided because the variable can't escape to
234 this call. */
235 unsigned int unescapable_clobbers_avoided;
236
237 /* Number of readonly uses we attempt to add to calls in
238 add_call_read_ops. */
239 unsigned int readonly_clobbers;
240
241 /* Number of readonly uses we avoid using not_read information. */
242 unsigned int static_readonly_clobbers_avoided;
243 } clobber_stats;
244
245 /* Initialize the operand cache routines. */
246
247 void
248 init_ssa_operands (void)
249 {
250 build_defs = VEC_alloc (tree, heap, 5);
251 build_uses = VEC_alloc (tree, heap, 10);
252 build_vuses = VEC_alloc (tree, heap, 25);
253 build_v_may_defs = VEC_alloc (tree, heap, 25);
254 build_v_must_defs = VEC_alloc (tree, heap, 25);
255
256 gcc_assert (operand_memory == NULL);
257 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
258 ops_active = true;
259 memset (&clobber_stats, 0, sizeof (clobber_stats));
260
261 }
262
263
264 /* Dispose of anything required by the operand routines. */
265
266 void
267 fini_ssa_operands (void)
268 {
269 struct ssa_operand_memory_d *ptr;
270 VEC_free (tree, heap, build_defs);
271 VEC_free (tree, heap, build_uses);
272 VEC_free (tree, heap, build_v_must_defs);
273 VEC_free (tree, heap, build_v_may_defs);
274 VEC_free (tree, heap, build_vuses);
275 free_defs = NULL;
276 free_uses = NULL;
277 free_vuses = NULL;
278 free_maydefs = NULL;
279 free_mustdefs = NULL;
280 while ((ptr = operand_memory) != NULL)
281 {
282 operand_memory = operand_memory->next;
283 ggc_free (ptr);
284 }
285
286 ops_active = false;
287
288 if (dump_file && (dump_flags & TDF_STATS))
289 {
290 fprintf (dump_file, "Original clobbered vars:%d\n", clobber_stats.clobbered_vars);
291 fprintf (dump_file, "Static write clobbers avoided:%d\n", clobber_stats.static_write_clobbers_avoided);
292 fprintf (dump_file, "Static read clobbers avoided:%d\n", clobber_stats.static_read_clobbers_avoided);
293 fprintf (dump_file, "Unescapable clobbers avoided:%d\n", clobber_stats.unescapable_clobbers_avoided);
294 fprintf (dump_file, "Original readonly clobbers:%d\n", clobber_stats.readonly_clobbers);
295 fprintf (dump_file, "Static readonly clobbers avoided:%d\n", clobber_stats.static_readonly_clobbers_avoided);
296 }
297 }
298
299
300 /* Return memory for operands of SIZE chunks. */
301
302 static inline void *
303 ssa_operand_alloc (unsigned size)
304 {
305 char *ptr;
306 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
307 {
308 struct ssa_operand_memory_d *ptr;
309 ptr = GGC_NEW (struct ssa_operand_memory_d);
310 ptr->next = operand_memory;
311 operand_memory = ptr;
312 operand_memory_index = 0;
313 }
314 ptr = &(operand_memory->mem[operand_memory_index]);
315 operand_memory_index += size;
316 return ptr;
317 }
318
319
320 /* Make sure PTR is in the correct immediate use list. Since uses are simply
321 pointers into the stmt TREE, there is no way of telling if anyone has
322 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
323 The contents are different, but the pointer is still the same. This
324 routine will check to make sure PTR is in the correct list, and if it isn't
325 put it in the correct list. We cannot simply check the previous node
326 because all nodes in the same stmt might have be changed. */
327
328 static inline void
329 correct_use_link (use_operand_p ptr, tree stmt)
330 {
331 use_operand_p prev;
332 tree root;
333
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
337
338 prev = ptr->prev;
339 if (prev)
340 {
341 /* Find the root element, making sure we skip any safe iterators. */
342 while (prev->use != NULL || prev->stmt == NULL)
343 prev = prev->prev;
344
345 /* Get the ssa_name of the list the node is in. */
346 root = prev->stmt;
347 /* If it's the right list, simply return. */
348 if (root == *(ptr->use))
349 return;
350 }
351 /* Its in the wrong list if we reach here. */
352 delink_imm_use (ptr);
353 link_imm_use (ptr, *(ptr->use));
354 }
355
356
357 /* This routine makes sure that PTR is in an immediate use list, and makes
358 sure the stmt pointer is set to the current stmt. Virtual uses do not need
359 the overhead of correct_use_link since they cannot be directly manipulated
360 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
361 static inline void
362 set_virtual_use_link (use_operand_p ptr, tree stmt)
363 {
364 /* Fold_stmt () may have changed the stmt pointers. */
365 if (ptr->stmt != stmt)
366 ptr->stmt = stmt;
367
368 /* If this use isn't in a list, add it to the correct list. */
369 if (!ptr->prev)
370 link_imm_use (ptr, *(ptr->use));
371 }
372
373
374
375 #define FINALIZE_OPBUILD build_defs
376 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
377 build_defs, (I))
378 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
379 build_defs, (I))
380 #define FINALIZE_FUNC finalize_ssa_def_ops
381 #define FINALIZE_ALLOC alloc_def
382 #define FINALIZE_FREE free_defs
383 #define FINALIZE_TYPE struct def_optype_d
384 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
385 #define FINALIZE_OPS DEF_OPS
386 #define FINALIZE_BASE(VAR) VAR
387 #define FINALIZE_BASE_TYPE tree *
388 #define FINALIZE_BASE_ZERO NULL
389 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
390 #include "tree-ssa-opfinalize.h"
391
392
393 /* This routine will create stmt operands for STMT from the def build list. */
394
395 static void
396 finalize_ssa_defs (tree stmt)
397 {
398 unsigned int num = VEC_length (tree, build_defs);
399 /* There should only be a single real definition per assignment. */
400 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
401
402 /* If there is an old list, often the new list is identical, or close, so
403 find the elements at the beginning that are the same as the vector. */
404
405 finalize_ssa_def_ops (stmt);
406 VEC_truncate (tree, build_defs, 0);
407 }
408
409 #define FINALIZE_OPBUILD build_uses
410 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
411 build_uses, (I))
412 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
413 build_uses, (I))
414 #define FINALIZE_FUNC finalize_ssa_use_ops
415 #define FINALIZE_ALLOC alloc_use
416 #define FINALIZE_FREE free_uses
417 #define FINALIZE_TYPE struct use_optype_d
418 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
419 #define FINALIZE_OPS USE_OPS
420 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
421 #define FINALIZE_CORRECT_USE correct_use_link
422 #define FINALIZE_BASE(VAR) VAR
423 #define FINALIZE_BASE_TYPE tree *
424 #define FINALIZE_BASE_ZERO NULL
425 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
426 (PTR)->use_ptr.use = (VAL); \
427 link_imm_use_stmt (&((PTR)->use_ptr), \
428 *(VAL), (STMT))
429 #include "tree-ssa-opfinalize.h"
430
431 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
432
433 static void
434 finalize_ssa_uses (tree stmt)
435 {
436 #ifdef ENABLE_CHECKING
437 {
438 unsigned x;
439 unsigned num = VEC_length (tree, build_uses);
440
441 /* If the pointer to the operand is the statement itself, something is
442 wrong. It means that we are pointing to a local variable (the
443 initial call to get_stmt_operands does not pass a pointer to a
444 statement). */
445 for (x = 0; x < num; x++)
446 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
447 }
448 #endif
449 finalize_ssa_use_ops (stmt);
450 VEC_truncate (tree, build_uses, 0);
451 }
452
453
454 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
455 #define FINALIZE_OPBUILD build_v_may_defs
456 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
457 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
458 build_v_may_defs, (I)))
459 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
460 #define FINALIZE_ALLOC alloc_maydef
461 #define FINALIZE_FREE free_maydefs
462 #define FINALIZE_TYPE struct maydef_optype_d
463 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
464 #define FINALIZE_OPS MAYDEF_OPS
465 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
466 #define FINALIZE_CORRECT_USE set_virtual_use_link
467 #define FINALIZE_BASE_ZERO 0
468 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
469 #define FINALIZE_BASE_TYPE unsigned
470 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
471 (PTR)->def_var = (VAL); \
472 (PTR)->use_var = (VAL); \
473 (PTR)->use_ptr.use = &((PTR)->use_var); \
474 link_imm_use_stmt (&((PTR)->use_ptr), \
475 (VAL), (STMT))
476 #include "tree-ssa-opfinalize.h"
477
478
479 static void
480 finalize_ssa_v_may_defs (tree stmt)
481 {
482 finalize_ssa_v_may_def_ops (stmt);
483 }
484
485
486 /* Clear the in_list bits and empty the build array for v_may_defs. */
487
488 static inline void
489 cleanup_v_may_defs (void)
490 {
491 unsigned x, num;
492 num = VEC_length (tree, build_v_may_defs);
493
494 for (x = 0; x < num; x++)
495 {
496 tree t = VEC_index (tree, build_v_may_defs, x);
497 if (TREE_CODE (t) != SSA_NAME)
498 {
499 var_ann_t ann = var_ann (t);
500 ann->in_v_may_def_list = 0;
501 }
502 }
503 VEC_truncate (tree, build_v_may_defs, 0);
504 }
505
506
507 #define FINALIZE_OPBUILD build_vuses
508 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
509 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
510 build_vuses, (I)))
511 #define FINALIZE_FUNC finalize_ssa_vuse_ops
512 #define FINALIZE_ALLOC alloc_vuse
513 #define FINALIZE_FREE free_vuses
514 #define FINALIZE_TYPE struct vuse_optype_d
515 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
516 #define FINALIZE_OPS VUSE_OPS
517 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
518 #define FINALIZE_CORRECT_USE set_virtual_use_link
519 #define FINALIZE_BASE_ZERO 0
520 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
521 #define FINALIZE_BASE_TYPE unsigned
522 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
523 (PTR)->use_var = (VAL); \
524 (PTR)->use_ptr.use = &((PTR)->use_var); \
525 link_imm_use_stmt (&((PTR)->use_ptr), \
526 (VAL), (STMT))
527 #include "tree-ssa-opfinalize.h"
528
529
530 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
531
532 static void
533 finalize_ssa_vuses (tree stmt)
534 {
535 unsigned num, num_v_may_defs;
536 unsigned vuse_index;
537
538 /* Remove superfluous VUSE operands. If the statement already has a
539 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
540 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
541 suppose that variable 'a' is aliased:
542
543 # VUSE <a_2>
544 # a_3 = V_MAY_DEF <a_2>
545 a = a + 1;
546
547 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
548 operation. */
549
550 num = VEC_length (tree, build_vuses);
551 num_v_may_defs = VEC_length (tree, build_v_may_defs);
552
553 if (num > 0 && num_v_may_defs > 0)
554 {
555 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
556 {
557 tree vuse;
558 vuse = VEC_index (tree, build_vuses, vuse_index);
559 if (TREE_CODE (vuse) != SSA_NAME)
560 {
561 var_ann_t ann = var_ann (vuse);
562 ann->in_vuse_list = 0;
563 if (ann->in_v_may_def_list)
564 {
565 VEC_ordered_remove (tree, build_vuses, vuse_index);
566 continue;
567 }
568 }
569 vuse_index++;
570 }
571 }
572 else
573 /* Clear out the in_list bits. */
574 for (vuse_index = 0;
575 vuse_index < VEC_length (tree, build_vuses);
576 vuse_index++)
577 {
578 tree t = VEC_index (tree, build_vuses, vuse_index);
579 if (TREE_CODE (t) != SSA_NAME)
580 {
581 var_ann_t ann = var_ann (t);
582 ann->in_vuse_list = 0;
583 }
584 }
585
586 finalize_ssa_vuse_ops (stmt);
587 /* The v_may_def build vector wasn't cleaned up because we needed it. */
588 cleanup_v_may_defs ();
589
590 /* Free the vuses build vector. */
591 VEC_truncate (tree, build_vuses, 0);
592
593 }
594
595 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
596
597 #define FINALIZE_OPBUILD build_v_must_defs
598 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
599 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
600 build_v_must_defs, (I)))
601 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
602 #define FINALIZE_ALLOC alloc_mustdef
603 #define FINALIZE_FREE free_mustdefs
604 #define FINALIZE_TYPE struct mustdef_optype_d
605 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
606 #define FINALIZE_OPS MUSTDEF_OPS
607 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
608 #define FINALIZE_CORRECT_USE set_virtual_use_link
609 #define FINALIZE_BASE_ZERO 0
610 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
611 #define FINALIZE_BASE_TYPE unsigned
612 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
613 (PTR)->def_var = (VAL); \
614 (PTR)->kill_var = (VAL); \
615 (PTR)->use_ptr.use = &((PTR)->kill_var);\
616 link_imm_use_stmt (&((PTR)->use_ptr), \
617 (VAL), (STMT))
618 #include "tree-ssa-opfinalize.h"
619
620
621 static void
622 finalize_ssa_v_must_defs (tree stmt)
623 {
624 /* In the presence of subvars, there may be more than one V_MUST_DEF per
625 statement (one for each subvar). It is a bit expensive to verify that
626 all must-defs in a statement belong to subvars if there is more than one
627 MUST-def, so we don't do it. Suffice to say, if you reach here without
628 having subvars, and have num >1, you have hit a bug. */
629
630 finalize_ssa_v_must_def_ops (stmt);
631 VEC_truncate (tree, build_v_must_defs, 0);
632 }
633
634
635 /* Finalize all the build vectors, fill the new ones into INFO. */
636
637 static inline void
638 finalize_ssa_stmt_operands (tree stmt)
639 {
640 finalize_ssa_defs (stmt);
641 finalize_ssa_uses (stmt);
642 finalize_ssa_v_must_defs (stmt);
643 finalize_ssa_v_may_defs (stmt);
644 finalize_ssa_vuses (stmt);
645 }
646
647
648 /* Start the process of building up operands vectors in INFO. */
649
650 static inline void
651 start_ssa_stmt_operands (void)
652 {
653 gcc_assert (VEC_length (tree, build_defs) == 0);
654 gcc_assert (VEC_length (tree, build_uses) == 0);
655 gcc_assert (VEC_length (tree, build_vuses) == 0);
656 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
657 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
658 }
659
660
661 /* Add DEF_P to the list of pointers to operands. */
662
663 static inline void
664 append_def (tree *def_p)
665 {
666 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
667 }
668
669
670 /* Add USE_P to the list of pointers to operands. */
671
672 static inline void
673 append_use (tree *use_p)
674 {
675 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
676 }
677
678
679 /* Add a new virtual may def for variable VAR to the build array. */
680
681 static inline void
682 append_v_may_def (tree var)
683 {
684 if (TREE_CODE (var) != SSA_NAME)
685 {
686 var_ann_t ann = get_var_ann (var);
687
688 /* Don't allow duplicate entries. */
689 if (ann->in_v_may_def_list)
690 return;
691 ann->in_v_may_def_list = 1;
692 }
693
694 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
695 }
696
697
698 /* Add VAR to the list of virtual uses. */
699
700 static inline void
701 append_vuse (tree var)
702 {
703
704 /* Don't allow duplicate entries. */
705 if (TREE_CODE (var) != SSA_NAME)
706 {
707 var_ann_t ann = get_var_ann (var);
708
709 if (ann->in_vuse_list || ann->in_v_may_def_list)
710 return;
711 ann->in_vuse_list = 1;
712 }
713
714 VEC_safe_push (tree, heap, build_vuses, (tree)var);
715 }
716
717
718 /* Add VAR to the list of virtual must definitions for INFO. */
719
720 static inline void
721 append_v_must_def (tree var)
722 {
723 unsigned i;
724
725 /* Don't allow duplicate entries. */
726 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
727 if (var == VEC_index (tree, build_v_must_defs, i))
728 return;
729
730 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
731 }
732
733
734 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
735 cache for STMT, if it existed before. When finished, the various build_*
736 operand vectors will have potential operands. in them. */
737
738 static void
739 parse_ssa_operands (tree stmt)
740 {
741 enum tree_code code;
742
743 code = TREE_CODE (stmt);
744 switch (code)
745 {
746 case MODIFY_EXPR:
747 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
748 either only part of LHS is modified or if the RHS might throw,
749 otherwise, use V_MUST_DEF.
750
751 ??? If it might throw, we should represent somehow that it is killed
752 on the fallthrough path. */
753 {
754 tree lhs = TREE_OPERAND (stmt, 0);
755 int lhs_flags = opf_is_def;
756
757 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
758
759 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
760 or not the entire LHS is modified; that depends on what's
761 inside the VIEW_CONVERT_EXPR. */
762 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
763 lhs = TREE_OPERAND (lhs, 0);
764
765 if (TREE_CODE (lhs) != ARRAY_RANGE_REF
766 && TREE_CODE (lhs) != BIT_FIELD_REF)
767 lhs_flags |= opf_kill_def;
768
769 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
770 }
771 break;
772
773 case COND_EXPR:
774 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
775 break;
776
777 case SWITCH_EXPR:
778 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
779 break;
780
781 case ASM_EXPR:
782 get_asm_expr_operands (stmt);
783 break;
784
785 case RETURN_EXPR:
786 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
787 break;
788
789 case GOTO_EXPR:
790 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
791 break;
792
793 case LABEL_EXPR:
794 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
795 break;
796
797 /* These nodes contain no variable references. */
798 case BIND_EXPR:
799 case CASE_LABEL_EXPR:
800 case TRY_CATCH_EXPR:
801 case TRY_FINALLY_EXPR:
802 case EH_FILTER_EXPR:
803 case CATCH_EXPR:
804 case RESX_EXPR:
805 break;
806
807 default:
808 /* Notice that if get_expr_operands tries to use &STMT as the operand
809 pointer (which may only happen for USE operands), we will fail in
810 append_use. This default will handle statements like empty
811 statements, or CALL_EXPRs that may appear on the RHS of a statement
812 or as statements themselves. */
813 get_expr_operands (stmt, &stmt, opf_none);
814 break;
815 }
816 }
817
818 /* Create an operands cache for STMT. */
819
820 static void
821 build_ssa_operands (tree stmt)
822 {
823 stmt_ann_t ann = get_stmt_ann (stmt);
824
825 /* Initially assume that the statement has no volatile operands. */
826 if (ann)
827 ann->has_volatile_ops = false;
828
829 start_ssa_stmt_operands ();
830
831 parse_ssa_operands (stmt);
832 operand_build_sort_virtual (build_vuses);
833 operand_build_sort_virtual (build_v_may_defs);
834 operand_build_sort_virtual (build_v_must_defs);
835
836 finalize_ssa_stmt_operands (stmt);
837 }
838
839
840 /* Free any operands vectors in OPS. */
841 void
842 free_ssa_operands (stmt_operands_p ops)
843 {
844 ops->def_ops = NULL;
845 ops->use_ops = NULL;
846 ops->maydef_ops = NULL;
847 ops->mustdef_ops = NULL;
848 ops->vuse_ops = NULL;
849 }
850
851
852 /* Get the operands of statement STMT. Note that repeated calls to
853 get_stmt_operands for the same statement will do nothing until the
854 statement is marked modified by a call to mark_stmt_modified(). */
855
856 void
857 update_stmt_operands (tree stmt)
858 {
859 stmt_ann_t ann = get_stmt_ann (stmt);
860 /* If get_stmt_operands is called before SSA is initialized, dont
861 do anything. */
862 if (!ssa_operands_active ())
863 return;
864 /* The optimizers cannot handle statements that are nothing but a
865 _DECL. This indicates a bug in the gimplifier. */
866 gcc_assert (!SSA_VAR_P (stmt));
867
868 gcc_assert (ann->modified);
869
870 timevar_push (TV_TREE_OPS);
871
872 build_ssa_operands (stmt);
873
874 /* Clear the modified bit for STMT. Subsequent calls to
875 get_stmt_operands for this statement will do nothing until the
876 statement is marked modified by a call to mark_stmt_modified(). */
877 ann->modified = 0;
878
879 timevar_pop (TV_TREE_OPS);
880 }
881
882
883 /* Copies virtual operands from SRC to DST. */
884
885 void
886 copy_virtual_operands (tree dest, tree src)
887 {
888 tree t;
889 ssa_op_iter iter, old_iter;
890 use_operand_p use_p, u2;
891 def_operand_p def_p, d2;
892
893 build_ssa_operands (dest);
894
895 /* Copy all the virtual fields. */
896 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
897 append_vuse (t);
898 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
899 append_v_may_def (t);
900 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
901 append_v_must_def (t);
902
903 if (VEC_length (tree, build_vuses) == 0
904 && VEC_length (tree, build_v_may_defs) == 0
905 && VEC_length (tree, build_v_must_defs) == 0)
906 return;
907
908 /* Now commit the virtual operands to this stmt. */
909 finalize_ssa_v_must_defs (dest);
910 finalize_ssa_v_may_defs (dest);
911 finalize_ssa_vuses (dest);
912
913 /* Finally, set the field to the same values as then originals. */
914
915
916 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
917 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
918 {
919 gcc_assert (!op_iter_done (&old_iter));
920 SET_USE (use_p, t);
921 t = op_iter_next_tree (&old_iter);
922 }
923 gcc_assert (op_iter_done (&old_iter));
924
925 op_iter_init_maydef (&old_iter, src, &u2, &d2);
926 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
927 {
928 gcc_assert (!op_iter_done (&old_iter));
929 SET_USE (use_p, USE_FROM_PTR (u2));
930 SET_DEF (def_p, DEF_FROM_PTR (d2));
931 op_iter_next_maymustdef (&u2, &d2, &old_iter);
932 }
933 gcc_assert (op_iter_done (&old_iter));
934
935 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
936 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
937 {
938 gcc_assert (!op_iter_done (&old_iter));
939 SET_USE (use_p, USE_FROM_PTR (u2));
940 SET_DEF (def_p, DEF_FROM_PTR (d2));
941 op_iter_next_maymustdef (&u2, &d2, &old_iter);
942 }
943 gcc_assert (op_iter_done (&old_iter));
944
945 }
946
947
948 /* Specifically for use in DOM's expression analysis. Given a store, we
949 create an artificial stmt which looks like a load from the store, this can
950 be used to eliminate redundant loads. OLD_OPS are the operands from the
951 store stmt, and NEW_STMT is the new load which represents a load of the
952 values stored. */
953
954 void
955 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
956 {
957 stmt_ann_t ann;
958 tree op;
959 ssa_op_iter iter;
960 use_operand_p use_p;
961 unsigned x;
962
963 ann = get_stmt_ann (new_stmt);
964
965 /* process the stmt looking for operands. */
966 start_ssa_stmt_operands ();
967 parse_ssa_operands (new_stmt);
968
969 for (x = 0; x < VEC_length (tree, build_vuses); x++)
970 {
971 tree t = VEC_index (tree, build_vuses, x);
972 if (TREE_CODE (t) != SSA_NAME)
973 {
974 var_ann_t ann = var_ann (t);
975 ann->in_vuse_list = 0;
976 }
977 }
978
979 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
980 {
981 tree t = VEC_index (tree, build_v_may_defs, x);
982 if (TREE_CODE (t) != SSA_NAME)
983 {
984 var_ann_t ann = var_ann (t);
985 ann->in_v_may_def_list = 0;
986 }
987 }
988 /* Remove any virtual operands that were found. */
989 VEC_truncate (tree, build_v_may_defs, 0);
990 VEC_truncate (tree, build_v_must_defs, 0);
991 VEC_truncate (tree, build_vuses, 0);
992
993 /* For each VDEF on the original statement, we want to create a
994 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
995 statement. */
996 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
997 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
998 append_vuse (op);
999
1000 /* Now build the operands for this new stmt. */
1001 finalize_ssa_stmt_operands (new_stmt);
1002
1003 /* All uses in this fake stmt must not be in the immediate use lists. */
1004 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
1005 delink_imm_use (use_p);
1006 }
1007
1008 void
1009 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
1010 {
1011 tree op0, op1;
1012 op0 = *exp0;
1013 op1 = *exp1;
1014
1015 /* If the operand cache is active, attempt to preserve the relative positions
1016 of these two operands in their respective immediate use lists. */
1017 if (ssa_operands_active () && op0 != op1)
1018 {
1019 use_optype_p use0, use1, ptr;
1020 use0 = use1 = NULL;
1021 /* Find the 2 operands in the cache, if they are there. */
1022 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1023 if (USE_OP_PTR (ptr)->use == exp0)
1024 {
1025 use0 = ptr;
1026 break;
1027 }
1028 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1029 if (USE_OP_PTR (ptr)->use == exp1)
1030 {
1031 use1 = ptr;
1032 break;
1033 }
1034 /* If both uses don't have operand entries, there isn't much we can do
1035 at this point. Presumably we dont need to worry about it. */
1036 if (use0 && use1)
1037 {
1038 tree *tmp = USE_OP_PTR (use1)->use;
1039 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1040 USE_OP_PTR (use0)->use = tmp;
1041 }
1042 }
1043
1044 /* Now swap the data. */
1045 *exp0 = op1;
1046 *exp1 = op0;
1047 }
1048
1049 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1050 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1051 the operands found. */
1052
1053 static void
1054 get_expr_operands (tree stmt, tree *expr_p, int flags)
1055 {
1056 enum tree_code code;
1057 enum tree_code_class class;
1058 tree expr = *expr_p;
1059 stmt_ann_t s_ann = stmt_ann (stmt);
1060
1061 if (expr == NULL)
1062 return;
1063
1064 code = TREE_CODE (expr);
1065 class = TREE_CODE_CLASS (code);
1066
1067 switch (code)
1068 {
1069 case ADDR_EXPR:
1070 /* Taking the address of a variable does not represent a
1071 reference to it, but the fact that the stmt takes its address will be
1072 of interest to some passes (e.g. alias resolution). */
1073 add_to_addressable_set (TREE_OPERAND (expr, 0),
1074 &s_ann->addresses_taken);
1075
1076 /* If the address is invariant, there may be no interesting variable
1077 references inside. */
1078 if (is_gimple_min_invariant (expr))
1079 return;
1080
1081 /* There should be no VUSEs created, since the referenced objects are
1082 not really accessed. The only operands that we should find here
1083 are ARRAY_REF indices which will always be real operands (GIMPLE
1084 does not allow non-registers as array indices). */
1085 flags |= opf_no_vops;
1086
1087 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1088 return;
1089
1090 case SSA_NAME:
1091 case STRUCT_FIELD_TAG:
1092 case TYPE_MEMORY_TAG:
1093 case NAME_MEMORY_TAG:
1094
1095 add_stmt_operand (expr_p, s_ann, flags);
1096 return;
1097
1098 case VAR_DECL:
1099 case PARM_DECL:
1100 case RESULT_DECL:
1101 {
1102 subvar_t svars;
1103
1104 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1105 Otherwise, add the variable itself.
1106 Whether it goes to USES or DEFS depends on the operand flags. */
1107 if (var_can_have_subvars (expr)
1108 && (svars = get_subvars_for_var (expr)))
1109 {
1110 subvar_t sv;
1111 for (sv = svars; sv; sv = sv->next)
1112 add_stmt_operand (&sv->var, s_ann, flags);
1113 }
1114 else
1115 {
1116 add_stmt_operand (expr_p, s_ann, flags);
1117 }
1118 return;
1119 }
1120 case MISALIGNED_INDIRECT_REF:
1121 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1122 /* fall through */
1123
1124 case ALIGN_INDIRECT_REF:
1125 case INDIRECT_REF:
1126 get_indirect_ref_operands (stmt, expr, flags);
1127 return;
1128
1129 case TARGET_MEM_REF:
1130 get_tmr_operands (stmt, expr, flags);
1131 return;
1132
1133 case ARRAY_RANGE_REF:
1134 /* Treat array references as references to the virtual variable
1135 representing the array. The virtual variable for an ARRAY_REF
1136 is the VAR_DECL for the array. */
1137
1138 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1139 according to the value of IS_DEF. */
1140 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1141 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1142 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1143 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1144 return;
1145
1146 case ARRAY_REF:
1147 case COMPONENT_REF:
1148 case REALPART_EXPR:
1149 case IMAGPART_EXPR:
1150 {
1151 tree ref;
1152 HOST_WIDE_INT offset, size, maxsize;
1153 bool none = true;
1154 /* This component ref becomes an access to all of the subvariables
1155 it can touch, if we can determine that, but *NOT* the real one.
1156 If we can't determine which fields we could touch, the recursion
1157 will eventually get to a variable and add *all* of its subvars, or
1158 whatever is the minimum correct subset. */
1159
1160 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1161 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1162 {
1163 subvar_t svars = get_subvars_for_var (ref);
1164 subvar_t sv;
1165 for (sv = svars; sv; sv = sv->next)
1166 {
1167 bool exact;
1168 if (overlap_subvar (offset, maxsize, sv, &exact))
1169 {
1170 int subvar_flags = flags;
1171 none = false;
1172 if (!exact
1173 || size != maxsize)
1174 subvar_flags &= ~opf_kill_def;
1175 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1176 }
1177 }
1178 if (!none)
1179 flags |= opf_no_vops;
1180 }
1181
1182 /* Even if we found subvars above we need to ensure to see
1183 immediate uses for d in s.a[d]. In case of s.a having
1184 a subvar we'd miss it otherwise. */
1185 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1186 flags & ~opf_kill_def);
1187
1188 if (code == COMPONENT_REF)
1189 {
1190 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1191 s_ann->has_volatile_ops = true;
1192 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1193 }
1194 else if (code == ARRAY_REF)
1195 {
1196 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1197 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1198 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1199 }
1200 return;
1201 }
1202 case WITH_SIZE_EXPR:
1203 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1204 and an rvalue reference to its second argument. */
1205 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1207 return;
1208
1209 case CALL_EXPR:
1210 get_call_expr_operands (stmt, expr);
1211 return;
1212
1213 case COND_EXPR:
1214 case VEC_COND_EXPR:
1215 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1216 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1217 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1218 return;
1219
1220 case MODIFY_EXPR:
1221 {
1222 int subflags;
1223 tree op;
1224
1225 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1226
1227 op = TREE_OPERAND (expr, 0);
1228 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1229 op = TREE_OPERAND (expr, 0);
1230 if (TREE_CODE (op) == ARRAY_RANGE_REF
1231 || TREE_CODE (op) == REALPART_EXPR
1232 || TREE_CODE (op) == IMAGPART_EXPR)
1233 subflags = opf_is_def;
1234 else
1235 subflags = opf_is_def | opf_kill_def;
1236
1237 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1238 return;
1239 }
1240
1241 case CONSTRUCTOR:
1242 {
1243 /* General aggregate CONSTRUCTORs have been decomposed, but they
1244 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1245 constructor_elt *ce;
1246 unsigned HOST_WIDE_INT idx;
1247
1248 for (idx = 0;
1249 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1250 idx++)
1251 get_expr_operands (stmt, &ce->value, opf_none);
1252
1253 return;
1254 }
1255
1256 case TRUTH_NOT_EXPR:
1257 case BIT_FIELD_REF:
1258 case VIEW_CONVERT_EXPR:
1259 do_unary:
1260 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1261 return;
1262
1263 case TRUTH_AND_EXPR:
1264 case TRUTH_OR_EXPR:
1265 case TRUTH_XOR_EXPR:
1266 case COMPOUND_EXPR:
1267 case OBJ_TYPE_REF:
1268 case ASSERT_EXPR:
1269 do_binary:
1270 {
1271 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1272 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1273 return;
1274 }
1275
1276 case REALIGN_LOAD_EXPR:
1277 {
1278 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1279 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1280 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1281 return;
1282 }
1283
1284 case BLOCK:
1285 case FUNCTION_DECL:
1286 case EXC_PTR_EXPR:
1287 case FILTER_EXPR:
1288 case LABEL_DECL:
1289 case CONST_DECL:
1290 /* Expressions that make no memory references. */
1291 return;
1292
1293 default:
1294 if (class == tcc_unary)
1295 goto do_unary;
1296 if (class == tcc_binary || class == tcc_comparison)
1297 goto do_binary;
1298 if (class == tcc_constant || class == tcc_type)
1299 return;
1300 }
1301
1302 /* If we get here, something has gone wrong. */
1303 #ifdef ENABLE_CHECKING
1304 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1305 debug_tree (expr);
1306 fputs ("\n", stderr);
1307 internal_error ("internal error");
1308 #endif
1309 gcc_unreachable ();
1310 }
1311
1312
1313 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1314
1315 static void
1316 get_asm_expr_operands (tree stmt)
1317 {
1318 stmt_ann_t s_ann = stmt_ann (stmt);
1319 int noutputs = list_length (ASM_OUTPUTS (stmt));
1320 const char **oconstraints
1321 = (const char **) alloca ((noutputs) * sizeof (const char *));
1322 int i;
1323 tree link;
1324 const char *constraint;
1325 bool allows_mem, allows_reg, is_inout;
1326
1327 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1328 {
1329 oconstraints[i] = constraint
1330 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1331 parse_output_constraint (&constraint, i, 0, 0,
1332 &allows_mem, &allows_reg, &is_inout);
1333
1334 /* This should have been split in gimplify_asm_expr. */
1335 gcc_assert (!allows_reg || !is_inout);
1336
1337 /* Memory operands are addressable. Note that STMT needs the
1338 address of this operand. */
1339 if (!allows_reg && allows_mem)
1340 {
1341 tree t = get_base_address (TREE_VALUE (link));
1342 if (t && DECL_P (t) && s_ann)
1343 add_to_addressable_set (t, &s_ann->addresses_taken);
1344 }
1345
1346 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1347 }
1348
1349 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1350 {
1351 constraint
1352 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1353 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1354 oconstraints, &allows_mem, &allows_reg);
1355
1356 /* Memory operands are addressable. Note that STMT needs the
1357 address of this operand. */
1358 if (!allows_reg && allows_mem)
1359 {
1360 tree t = get_base_address (TREE_VALUE (link));
1361 if (t && DECL_P (t) && s_ann)
1362 add_to_addressable_set (t, &s_ann->addresses_taken);
1363 }
1364
1365 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1366 }
1367
1368
1369 /* Clobber memory for asm ("" : : : "memory"); */
1370 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1371 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1372 {
1373 unsigned i;
1374 bitmap_iterator bi;
1375
1376 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1377 decided to group them). */
1378 if (global_var)
1379 add_stmt_operand (&global_var, s_ann, opf_is_def);
1380 else
1381 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1382 {
1383 tree var = referenced_var (i);
1384 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1385 }
1386
1387 /* Now clobber all addressables. */
1388 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1389 {
1390 tree var = referenced_var (i);
1391
1392 /* Subvars are explicitly represented in this list, so
1393 we don't need the original to be added to the clobber
1394 ops, but the original *will* be in this list because
1395 we keep the addressability of the original
1396 variable up-to-date so we don't screw up the rest of
1397 the backend. */
1398 if (var_can_have_subvars (var)
1399 && get_subvars_for_var (var) != NULL)
1400 continue;
1401
1402 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1403 }
1404
1405 break;
1406 }
1407 }
1408
1409 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1410 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1411
1412 static void
1413 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1414 {
1415 tree *pptr = &TREE_OPERAND (expr, 0);
1416 tree ptr = *pptr;
1417 stmt_ann_t s_ann = stmt_ann (stmt);
1418
1419 /* Stores into INDIRECT_REF operands are never killing definitions. */
1420 flags &= ~opf_kill_def;
1421
1422 if (SSA_VAR_P (ptr))
1423 {
1424 struct ptr_info_def *pi = NULL;
1425
1426 /* If PTR has flow-sensitive points-to information, use it. */
1427 if (TREE_CODE (ptr) == SSA_NAME
1428 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1429 && pi->name_mem_tag)
1430 {
1431 /* PTR has its own memory tag. Use it. */
1432 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1433 }
1434 else
1435 {
1436 /* If PTR is not an SSA_NAME or it doesn't have a name
1437 tag, use its type memory tag. */
1438 var_ann_t v_ann;
1439
1440 /* If we are emitting debugging dumps, display a warning if
1441 PTR is an SSA_NAME with no flow-sensitive alias
1442 information. That means that we may need to compute
1443 aliasing again. */
1444 if (dump_file
1445 && TREE_CODE (ptr) == SSA_NAME
1446 && pi == NULL)
1447 {
1448 fprintf (dump_file,
1449 "NOTE: no flow-sensitive alias info for ");
1450 print_generic_expr (dump_file, ptr, dump_flags);
1451 fprintf (dump_file, " in ");
1452 print_generic_stmt (dump_file, stmt, dump_flags);
1453 }
1454
1455 if (TREE_CODE (ptr) == SSA_NAME)
1456 ptr = SSA_NAME_VAR (ptr);
1457 v_ann = var_ann (ptr);
1458 if (v_ann->type_mem_tag)
1459 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1460 }
1461 }
1462
1463 /* If a constant is used as a pointer, we can't generate a real
1464 operand for it but we mark the statement volatile to prevent
1465 optimizations from messing things up. */
1466 else if (TREE_CODE (ptr) == INTEGER_CST)
1467 {
1468 if (s_ann)
1469 s_ann->has_volatile_ops = true;
1470 return;
1471 }
1472 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1473 else
1474 gcc_unreachable ();
1475
1476 /* Add a USE operand for the base pointer. */
1477 get_expr_operands (stmt, pptr, opf_none);
1478 }
1479
1480 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1481
1482 static void
1483 get_tmr_operands (tree stmt, tree expr, int flags)
1484 {
1485 tree tag = TMR_TAG (expr), ref;
1486 HOST_WIDE_INT offset, size, maxsize;
1487 subvar_t svars, sv;
1488 stmt_ann_t s_ann = stmt_ann (stmt);
1489
1490 /* First record the real operands. */
1491 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1492 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1493
1494 /* MEM_REFs should never be killing. */
1495 flags &= ~opf_kill_def;
1496
1497 if (TMR_SYMBOL (expr))
1498 {
1499 stmt_ann_t ann = stmt_ann (stmt);
1500 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1501 }
1502
1503 if (!tag)
1504 {
1505 /* Something weird, so ensure that we will be careful. */
1506 stmt_ann (stmt)->has_volatile_ops = true;
1507 return;
1508 }
1509
1510 if (DECL_P (tag))
1511 {
1512 get_expr_operands (stmt, &tag, flags);
1513 return;
1514 }
1515
1516 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1517 gcc_assert (ref != NULL_TREE);
1518 svars = get_subvars_for_var (ref);
1519 for (sv = svars; sv; sv = sv->next)
1520 {
1521 bool exact;
1522 if (overlap_subvar (offset, maxsize, sv, &exact))
1523 {
1524 int subvar_flags = flags;
1525 if (!exact || size != maxsize)
1526 subvar_flags &= ~opf_kill_def;
1527 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1528 }
1529 }
1530 }
1531
1532 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1533
1534 static void
1535 get_call_expr_operands (tree stmt, tree expr)
1536 {
1537 tree op;
1538 int call_flags = call_expr_flags (expr);
1539
1540 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1541 operands for all the symbols that have been found to be
1542 call-clobbered.
1543
1544 Note that if aliases have not been computed, the global effects
1545 of calls will not be included in the SSA web. This is fine
1546 because no optimizer should run before aliases have been
1547 computed. By not bothering with virtual operands for CALL_EXPRs
1548 we avoid adding superfluous virtual operands, which can be a
1549 significant compile time sink (See PR 15855). */
1550 if (aliases_computed_p
1551 && !bitmap_empty_p (call_clobbered_vars)
1552 && !(call_flags & ECF_NOVOPS))
1553 {
1554 /* A 'pure' or a 'const' function never call-clobbers anything.
1555 A 'noreturn' function might, but since we don't return anyway
1556 there is no point in recording that. */
1557 if (TREE_SIDE_EFFECTS (expr)
1558 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1559 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1560 else if (!(call_flags & ECF_CONST))
1561 add_call_read_ops (stmt, get_callee_fndecl (expr));
1562 }
1563
1564 /* Find uses in the called function. */
1565 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1566
1567 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1568 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1569
1570 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1571
1572 }
1573
1574
1575 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1576 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1577 the statement's real operands, otherwise it is added to virtual
1578 operands. */
1579
1580 static void
1581 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1582 {
1583 bool is_real_op;
1584 tree var, sym;
1585 var_ann_t v_ann;
1586
1587 var = *var_p;
1588 gcc_assert (SSA_VAR_P (var));
1589
1590 is_real_op = is_gimple_reg (var);
1591 /* If this is a real operand, the operand is either ssa name or decl.
1592 Virtual operands may only be decls. */
1593 gcc_assert (is_real_op || DECL_P (var));
1594
1595 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1596 v_ann = var_ann (sym);
1597
1598 /* Mark statements with volatile operands. Optimizers should back
1599 off from statements having volatile operands. */
1600 if (TREE_THIS_VOLATILE (sym) && s_ann)
1601 s_ann->has_volatile_ops = true;
1602
1603 /* If the variable cannot be modified and this is a V_MAY_DEF change
1604 it into a VUSE. This happens when read-only variables are marked
1605 call-clobbered and/or aliased to writable variables. So we only
1606 check that this only happens on non-specific stores.
1607
1608 Note that if this is a specific store, i.e. associated with a
1609 modify_expr, then we can't suppress the V_DEF, lest we run into
1610 validation problems.
1611
1612 This can happen when programs cast away const, leaving us with a
1613 store to read-only memory. If the statement is actually executed
1614 at runtime, then the program is ill formed. If the statement is
1615 not executed then all is well. At the very least, we cannot ICE. */
1616 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1617 {
1618 gcc_assert (!is_real_op);
1619 flags &= ~(opf_is_def | opf_kill_def);
1620 }
1621
1622 if (is_real_op)
1623 {
1624 /* The variable is a GIMPLE register. Add it to real operands. */
1625 if (flags & opf_is_def)
1626 append_def (var_p);
1627 else
1628 append_use (var_p);
1629 }
1630 else
1631 {
1632 VEC(tree,gc) *aliases;
1633
1634 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1635 virtual operands, unless the caller has specifically requested
1636 not to add virtual operands (used when adding operands inside an
1637 ADDR_EXPR expression). */
1638 if (flags & opf_no_vops)
1639 return;
1640
1641 aliases = v_ann->may_aliases;
1642
1643 if (aliases == NULL)
1644 {
1645 /* The variable is not aliased or it is an alias tag. */
1646 if (flags & opf_is_def)
1647 {
1648 if (flags & opf_kill_def)
1649 {
1650 /* Only regular variables or struct fields may get a
1651 V_MUST_DEF operand. */
1652 gcc_assert (!MTAG_P (var)
1653 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1654 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1655 variable definitions. */
1656 append_v_must_def (var);
1657 }
1658 else
1659 {
1660 /* Add a V_MAY_DEF for call-clobbered variables and
1661 memory tags. */
1662 append_v_may_def (var);
1663 }
1664 }
1665 else
1666 append_vuse (var);
1667 }
1668 else
1669 {
1670 unsigned i;
1671 tree al;
1672
1673 /* The variable is aliased. Add its aliases to the virtual
1674 operands. */
1675 gcc_assert (VEC_length (tree, aliases) != 0);
1676
1677 if (flags & opf_is_def)
1678 {
1679 /* If the variable is also an alias tag, add a virtual
1680 operand for it, otherwise we will miss representing
1681 references to the members of the variable's alias set.
1682 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1683 if (v_ann->is_alias_tag)
1684 append_v_may_def (var);
1685
1686 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1687 append_v_may_def (al);
1688 }
1689 else
1690 {
1691 /* Similarly, append a virtual uses for VAR itself, when
1692 it is an alias tag. */
1693 if (v_ann->is_alias_tag)
1694 append_vuse (var);
1695
1696 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1697 append_vuse (al);
1698 }
1699 }
1700 }
1701 }
1702
1703
1704 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1705 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1706 a single variable whose address has been taken or any other valid
1707 GIMPLE memory reference (structure reference, array, etc). If the
1708 base address of REF is a decl that has sub-variables, also add all
1709 of its sub-variables. */
1710
1711 void
1712 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1713 {
1714 tree var;
1715 subvar_t svars;
1716
1717 gcc_assert (addresses_taken);
1718
1719 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1720 as the only thing we take the address of. If VAR is a structure,
1721 taking the address of a field means that the whole structure may
1722 be referenced using pointer arithmetic. See PR 21407 and the
1723 ensuing mailing list discussion. */
1724 var = get_base_address (ref);
1725 if (var && SSA_VAR_P (var))
1726 {
1727 if (*addresses_taken == NULL)
1728 *addresses_taken = BITMAP_GGC_ALLOC ();
1729
1730 if (var_can_have_subvars (var)
1731 && (svars = get_subvars_for_var (var)))
1732 {
1733 subvar_t sv;
1734 for (sv = svars; sv; sv = sv->next)
1735 {
1736 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1737 TREE_ADDRESSABLE (sv->var) = 1;
1738 }
1739 }
1740 else
1741 {
1742 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1743 TREE_ADDRESSABLE (var) = 1;
1744 }
1745 }
1746 }
1747
1748 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1749 clobbered variables in the function. */
1750
1751 static void
1752 add_call_clobber_ops (tree stmt, tree callee)
1753 {
1754 unsigned u;
1755 bitmap_iterator bi;
1756 stmt_ann_t s_ann = stmt_ann (stmt);
1757 bitmap not_read_b, not_written_b;
1758
1759 /* Functions that are not const, pure or never return may clobber
1760 call-clobbered variables. */
1761 if (s_ann)
1762 s_ann->makes_clobbering_call = true;
1763
1764 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1765 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1766 if (global_var)
1767 {
1768 add_stmt_operand (&global_var, s_ann, opf_is_def);
1769 return;
1770 }
1771
1772 /* Get info for local and module level statics. There is a bit
1773 set for each static if the call being processed does not read
1774 or write that variable. */
1775
1776 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1777 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1778 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1779 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1780 {
1781 tree var = referenced_var_lookup (u);
1782 unsigned int escape_mask = var_ann (var)->escape_mask;
1783 tree real_var = var;
1784 bool not_read;
1785 bool not_written;
1786
1787 /* Not read and not written are computed on regular vars, not
1788 subvars, so look at the parent var if this is an SFT. */
1789
1790 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1791 real_var = SFT_PARENT_VAR (var);
1792
1793 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1794 DECL_UID (real_var)) : false;
1795 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1796 DECL_UID (real_var)) : false;
1797 gcc_assert (!unmodifiable_var_p (var));
1798
1799 clobber_stats.clobbered_vars++;
1800
1801 /* See if this variable is really clobbered by this function. */
1802
1803 /* Trivial case: Things escaping only to pure/const are not
1804 clobbered by non-pure-const, and only read by pure/const. */
1805 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1806 {
1807 tree call = get_call_expr_in (stmt);
1808 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1809 {
1810 add_stmt_operand (&var, s_ann, opf_none);
1811 clobber_stats.unescapable_clobbers_avoided++;
1812 continue;
1813 }
1814 else
1815 {
1816 clobber_stats.unescapable_clobbers_avoided++;
1817 continue;
1818 }
1819 }
1820
1821 if (not_written)
1822 {
1823 clobber_stats.static_write_clobbers_avoided++;
1824 if (!not_read)
1825 add_stmt_operand (&var, s_ann, opf_none);
1826 else
1827 clobber_stats.static_read_clobbers_avoided++;
1828 }
1829 else
1830 add_stmt_operand (&var, s_ann, opf_is_def);
1831 }
1832
1833 }
1834
1835
1836 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1837 function. */
1838
1839 static void
1840 add_call_read_ops (tree stmt, tree callee)
1841 {
1842 unsigned u;
1843 bitmap_iterator bi;
1844 stmt_ann_t s_ann = stmt_ann (stmt);
1845 bitmap not_read_b;
1846
1847 /* if the function is not pure, it may reference memory. Add
1848 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1849 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1850 if (global_var)
1851 {
1852 add_stmt_operand (&global_var, s_ann, opf_none);
1853 return;
1854 }
1855
1856 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1857
1858 /* Add a VUSE for each call-clobbered variable. */
1859 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1860 {
1861 tree var = referenced_var (u);
1862 tree real_var = var;
1863 bool not_read;
1864
1865 clobber_stats.readonly_clobbers++;
1866
1867 /* Not read and not written are computed on regular vars, not
1868 subvars, so look at the parent var if this is an SFT. */
1869
1870 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1871 real_var = SFT_PARENT_VAR (var);
1872
1873 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1874 DECL_UID (real_var)) : false;
1875
1876 if (not_read)
1877 {
1878 clobber_stats.static_readonly_clobbers_avoided++;
1879 continue;
1880 }
1881
1882 add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
1883 }
1884 }
1885
1886
1887 /* Scan the immediate_use list for VAR making sure its linked properly.
1888 return RTUE iof there is a problem. */
1889
1890 bool
1891 verify_imm_links (FILE *f, tree var)
1892 {
1893 use_operand_p ptr, prev, list;
1894 int count;
1895
1896 gcc_assert (TREE_CODE (var) == SSA_NAME);
1897
1898 list = &(SSA_NAME_IMM_USE_NODE (var));
1899 gcc_assert (list->use == NULL);
1900
1901 if (list->prev == NULL)
1902 {
1903 gcc_assert (list->next == NULL);
1904 return false;
1905 }
1906
1907 prev = list;
1908 count = 0;
1909 for (ptr = list->next; ptr != list; )
1910 {
1911 if (prev != ptr->prev)
1912 goto error;
1913
1914 if (ptr->use == NULL)
1915 goto error; /* 2 roots, or SAFE guard node. */
1916 else if (*(ptr->use) != var)
1917 goto error;
1918
1919 prev = ptr;
1920 ptr = ptr->next;
1921 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1922 if (count++ > 50000000)
1923 goto error;
1924 }
1925
1926 /* Verify list in the other direction. */
1927 prev = list;
1928 for (ptr = list->prev; ptr != list; )
1929 {
1930 if (prev != ptr->next)
1931 goto error;
1932 prev = ptr;
1933 ptr = ptr->prev;
1934 if (count-- < 0)
1935 goto error;
1936 }
1937
1938 if (count != 0)
1939 goto error;
1940
1941 return false;
1942
1943 error:
1944 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1945 {
1946 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1947 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1948 }
1949 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1950 (void *)ptr->use);
1951 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1952 fprintf(f, "\n");
1953 return true;
1954 }
1955
1956
1957 /* Dump all the immediate uses to FILE. */
1958
1959 void
1960 dump_immediate_uses_for (FILE *file, tree var)
1961 {
1962 imm_use_iterator iter;
1963 use_operand_p use_p;
1964
1965 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1966
1967 print_generic_expr (file, var, TDF_SLIM);
1968 fprintf (file, " : -->");
1969 if (has_zero_uses (var))
1970 fprintf (file, " no uses.\n");
1971 else
1972 if (has_single_use (var))
1973 fprintf (file, " single use.\n");
1974 else
1975 fprintf (file, "%d uses.\n", num_imm_uses (var));
1976
1977 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1978 {
1979 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1980 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
1981 else
1982 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
1983 }
1984 fprintf(file, "\n");
1985 }
1986
1987 /* Dump all the immediate uses to FILE. */
1988
1989 void
1990 dump_immediate_uses (FILE *file)
1991 {
1992 tree var;
1993 unsigned int x;
1994
1995 fprintf (file, "Immediate_uses: \n\n");
1996 for (x = 1; x < num_ssa_names; x++)
1997 {
1998 var = ssa_name(x);
1999 if (!var)
2000 continue;
2001 dump_immediate_uses_for (file, var);
2002 }
2003 }
2004
2005
2006 /* Dump def-use edges on stderr. */
2007
2008 void
2009 debug_immediate_uses (void)
2010 {
2011 dump_immediate_uses (stderr);
2012 }
2013
2014 /* Dump def-use edges on stderr. */
2015
2016 void
2017 debug_immediate_uses_for (tree var)
2018 {
2019 dump_immediate_uses_for (stderr, var);
2020 }
2021 #include "gt-tree-ssa-operands.h"