tree-ssa-operands.c (add_call_clobber_ops): Use SFT_PARENT_VAR to try to avoid clobbe...
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
109
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
112
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
115
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
118
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
121
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
125
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool ops_active = false;
131
132 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
133 static unsigned operand_memory_index;
134
135 static void get_expr_operands (tree, tree *, int);
136 static void get_asm_expr_operands (tree);
137 static void get_indirect_ref_operands (tree, tree, int);
138 static void get_tmr_operands (tree, tree, int);
139 static void get_call_expr_operands (tree, tree);
140 static inline void append_def (tree *);
141 static inline void append_use (tree *);
142 static void append_v_may_def (tree);
143 static void append_v_must_def (tree);
144 static void add_call_clobber_ops (tree, tree);
145 static void add_call_read_ops (tree);
146 static void add_stmt_operand (tree *, stmt_ann_t, int);
147 static void build_ssa_operands (tree stmt);
148
149 static def_optype_p free_defs = NULL;
150 static use_optype_p free_uses = NULL;
151 static vuse_optype_p free_vuses = NULL;
152 static maydef_optype_p free_maydefs = NULL;
153 static mustdef_optype_p free_mustdefs = NULL;
154
155
156 /* Return the DECL_UID of the base variable of T. */
157
158 static inline unsigned
159 get_name_decl (tree t)
160 {
161 if (TREE_CODE (t) != SSA_NAME)
162 return DECL_UID (t);
163 else
164 return DECL_UID (SSA_NAME_VAR (t));
165 }
166
167 /* Comparison function for qsort used in operand_build_sort_virtual. */
168
169 static int
170 operand_build_cmp (const void *p, const void *q)
171 {
172 tree e1 = *((const tree *)p);
173 tree e2 = *((const tree *)q);
174 unsigned int u1,u2;
175
176 u1 = get_name_decl (e1);
177 u2 = get_name_decl (e2);
178
179 /* We want to sort in ascending order. They can never be equal. */
180 #ifdef ENABLE_CHECKING
181 gcc_assert (u1 != u2);
182 #endif
183 return (u1 > u2 ? 1 : -1);
184 }
185
186 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
187
188 static inline void
189 operand_build_sort_virtual (VEC(tree,heap) *list)
190 {
191 int num = VEC_length (tree, list);
192 if (num < 2)
193 return;
194 if (num == 2)
195 {
196 if (get_name_decl (VEC_index (tree, list, 0))
197 > get_name_decl (VEC_index (tree, list, 1)))
198 {
199 /* Swap elements if in the wrong order. */
200 tree tmp = VEC_index (tree, list, 0);
201 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
202 VEC_replace (tree, list, 1, tmp);
203 }
204 return;
205 }
206 /* There are 3 or more elements, call qsort. */
207 qsort (VEC_address (tree, list),
208 VEC_length (tree, list),
209 sizeof (tree),
210 operand_build_cmp);
211 }
212
213
214
215 /* Return true if the ssa operands cache is active. */
216
217 bool
218 ssa_operands_active (void)
219 {
220 return ops_active;
221 }
222
223
224 /* Initialize the operand cache routines. */
225
226 void
227 init_ssa_operands (void)
228 {
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
231 build_vuses = VEC_alloc (tree, heap, 25);
232 build_v_may_defs = VEC_alloc (tree, heap, 25);
233 build_v_must_defs = VEC_alloc (tree, heap, 25);
234
235 gcc_assert (operand_memory == NULL);
236 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
237 ops_active = true;
238 }
239
240
241 /* Dispose of anything required by the operand routines. */
242
243 void
244 fini_ssa_operands (void)
245 {
246 struct ssa_operand_memory_d *ptr;
247 VEC_free (tree, heap, build_defs);
248 VEC_free (tree, heap, build_uses);
249 VEC_free (tree, heap, build_v_must_defs);
250 VEC_free (tree, heap, build_v_may_defs);
251 VEC_free (tree, heap, build_vuses);
252 free_defs = NULL;
253 free_uses = NULL;
254 free_vuses = NULL;
255 free_maydefs = NULL;
256 free_mustdefs = NULL;
257 while ((ptr = operand_memory) != NULL)
258 {
259 operand_memory = operand_memory->next;
260 ggc_free (ptr);
261 }
262
263 VEC_free (tree, heap, clobbered_v_may_defs);
264 VEC_free (tree, heap, clobbered_vuses);
265 VEC_free (tree, heap, ro_call_vuses);
266 ops_active = false;
267 }
268
269
270 /* Return memory for operands of SIZE chunks. */
271
272 static inline void *
273 ssa_operand_alloc (unsigned size)
274 {
275 char *ptr;
276 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
277 {
278 struct ssa_operand_memory_d *ptr;
279 ptr = GGC_NEW (struct ssa_operand_memory_d);
280 ptr->next = operand_memory;
281 operand_memory = ptr;
282 operand_memory_index = 0;
283 }
284 ptr = &(operand_memory->mem[operand_memory_index]);
285 operand_memory_index += size;
286 return ptr;
287 }
288
289
290 /* Make sure PTR is in the correct immediate use list. Since uses are simply
291 pointers into the stmt TREE, there is no way of telling if anyone has
292 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
293 The contents are different, but the pointer is still the same. This
294 routine will check to make sure PTR is in the correct list, and if it isn't
295 put it in the correct list. We cannot simply check the previous node
296 because all nodes in the same stmt might have be changed. */
297
298 static inline void
299 correct_use_link (use_operand_p ptr, tree stmt)
300 {
301 use_operand_p prev;
302 tree root;
303
304 /* Fold_stmt () may have changed the stmt pointers. */
305 if (ptr->stmt != stmt)
306 ptr->stmt = stmt;
307
308 prev = ptr->prev;
309 if (prev)
310 {
311 /* Find the root element, making sure we skip any safe iterators. */
312 while (prev->use != NULL || prev->stmt == NULL)
313 prev = prev->prev;
314
315 /* Get the ssa_name of the list the node is in. */
316 root = prev->stmt;
317 /* If it's the right list, simply return. */
318 if (root == *(ptr->use))
319 return;
320 }
321 /* Its in the wrong list if we reach here. */
322 delink_imm_use (ptr);
323 link_imm_use (ptr, *(ptr->use));
324 }
325
326
327 /* This routine makes sure that PTR is in an immediate use list, and makes
328 sure the stmt pointer is set to the current stmt. Virtual uses do not need
329 the overhead of correct_use_link since they cannot be directly manipulated
330 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
331 static inline void
332 set_virtual_use_link (use_operand_p ptr, tree stmt)
333 {
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
337
338 /* If this use isn't in a list, add it to the correct list. */
339 if (!ptr->prev)
340 link_imm_use (ptr, *(ptr->use));
341 }
342
343
344
345 #define FINALIZE_OPBUILD build_defs
346 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
347 build_defs, (I))
348 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
349 build_defs, (I))
350 #define FINALIZE_FUNC finalize_ssa_def_ops
351 #define FINALIZE_ALLOC alloc_def
352 #define FINALIZE_FREE free_defs
353 #define FINALIZE_TYPE struct def_optype_d
354 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
355 #define FINALIZE_OPS DEF_OPS
356 #define FINALIZE_BASE(VAR) VAR
357 #define FINALIZE_BASE_TYPE tree *
358 #define FINALIZE_BASE_ZERO NULL
359 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
360 #include "tree-ssa-opfinalize.h"
361
362
363 /* This routine will create stmt operands for STMT from the def build list. */
364
365 static void
366 finalize_ssa_defs (tree stmt)
367 {
368 unsigned int num = VEC_length (tree, build_defs);
369 /* There should only be a single real definition per assignment. */
370 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
371
372 /* If there is an old list, often the new list is identical, or close, so
373 find the elements at the beginning that are the same as the vector. */
374
375 finalize_ssa_def_ops (stmt);
376 VEC_truncate (tree, build_defs, 0);
377 }
378
379 #define FINALIZE_OPBUILD build_uses
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
381 build_uses, (I))
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
383 build_uses, (I))
384 #define FINALIZE_FUNC finalize_ssa_use_ops
385 #define FINALIZE_ALLOC alloc_use
386 #define FINALIZE_FREE free_uses
387 #define FINALIZE_TYPE struct use_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
389 #define FINALIZE_OPS USE_OPS
390 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
391 #define FINALIZE_CORRECT_USE correct_use_link
392 #define FINALIZE_BASE(VAR) VAR
393 #define FINALIZE_BASE_TYPE tree *
394 #define FINALIZE_BASE_ZERO NULL
395 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
396 (PTR)->use_ptr.use = (VAL); \
397 link_imm_use_stmt (&((PTR)->use_ptr), \
398 *(VAL), (STMT))
399 #include "tree-ssa-opfinalize.h"
400
401 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
402
403 static void
404 finalize_ssa_uses (tree stmt)
405 {
406 #ifdef ENABLE_CHECKING
407 {
408 unsigned x;
409 unsigned num = VEC_length (tree, build_uses);
410
411 /* If the pointer to the operand is the statement itself, something is
412 wrong. It means that we are pointing to a local variable (the
413 initial call to get_stmt_operands does not pass a pointer to a
414 statement). */
415 for (x = 0; x < num; x++)
416 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
417 }
418 #endif
419 finalize_ssa_use_ops (stmt);
420 VEC_truncate (tree, build_uses, 0);
421 }
422
423
424 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
425 #define FINALIZE_OPBUILD build_v_may_defs
426 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
427 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
428 build_v_may_defs, (I)))
429 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
430 #define FINALIZE_ALLOC alloc_maydef
431 #define FINALIZE_FREE free_maydefs
432 #define FINALIZE_TYPE struct maydef_optype_d
433 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
434 #define FINALIZE_OPS MAYDEF_OPS
435 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
436 #define FINALIZE_CORRECT_USE set_virtual_use_link
437 #define FINALIZE_BASE_ZERO 0
438 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
439 #define FINALIZE_BASE_TYPE unsigned
440 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
441 (PTR)->def_var = (VAL); \
442 (PTR)->use_var = (VAL); \
443 (PTR)->use_ptr.use = &((PTR)->use_var); \
444 link_imm_use_stmt (&((PTR)->use_ptr), \
445 (VAL), (STMT))
446 #include "tree-ssa-opfinalize.h"
447
448
449 static void
450 finalize_ssa_v_may_defs (tree stmt)
451 {
452 finalize_ssa_v_may_def_ops (stmt);
453 }
454
455
456 /* Clear the in_list bits and empty the build array for v_may_defs. */
457
458 static inline void
459 cleanup_v_may_defs (void)
460 {
461 unsigned x, num;
462 num = VEC_length (tree, build_v_may_defs);
463
464 for (x = 0; x < num; x++)
465 {
466 tree t = VEC_index (tree, build_v_may_defs, x);
467 if (TREE_CODE (t) != SSA_NAME)
468 {
469 var_ann_t ann = var_ann (t);
470 ann->in_v_may_def_list = 0;
471 }
472 }
473 VEC_truncate (tree, build_v_may_defs, 0);
474 }
475
476
477 #define FINALIZE_OPBUILD build_vuses
478 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
479 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
480 build_vuses, (I)))
481 #define FINALIZE_FUNC finalize_ssa_vuse_ops
482 #define FINALIZE_ALLOC alloc_vuse
483 #define FINALIZE_FREE free_vuses
484 #define FINALIZE_TYPE struct vuse_optype_d
485 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
486 #define FINALIZE_OPS VUSE_OPS
487 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
488 #define FINALIZE_CORRECT_USE set_virtual_use_link
489 #define FINALIZE_BASE_ZERO 0
490 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
491 #define FINALIZE_BASE_TYPE unsigned
492 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
493 (PTR)->use_var = (VAL); \
494 (PTR)->use_ptr.use = &((PTR)->use_var); \
495 link_imm_use_stmt (&((PTR)->use_ptr), \
496 (VAL), (STMT))
497 #include "tree-ssa-opfinalize.h"
498
499
500 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
501
502 static void
503 finalize_ssa_vuses (tree stmt)
504 {
505 unsigned num, num_v_may_defs;
506 unsigned vuse_index;
507
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
512
513 # VUSE <a_2>
514 # a_3 = V_MAY_DEF <a_2>
515 a = a + 1;
516
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
518 operation. */
519
520 num = VEC_length (tree, build_vuses);
521 num_v_may_defs = VEC_length (tree, build_v_may_defs);
522
523 if (num > 0 && num_v_may_defs > 0)
524 {
525 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
526 {
527 tree vuse;
528 vuse = VEC_index (tree, build_vuses, vuse_index);
529 if (TREE_CODE (vuse) != SSA_NAME)
530 {
531 var_ann_t ann = var_ann (vuse);
532 ann->in_vuse_list = 0;
533 if (ann->in_v_may_def_list)
534 {
535 VEC_ordered_remove (tree, build_vuses, vuse_index);
536 continue;
537 }
538 }
539 vuse_index++;
540 }
541 }
542 else
543 /* Clear out the in_list bits. */
544 for (vuse_index = 0;
545 vuse_index < VEC_length (tree, build_vuses);
546 vuse_index++)
547 {
548 tree t = VEC_index (tree, build_vuses, vuse_index);
549 if (TREE_CODE (t) != SSA_NAME)
550 {
551 var_ann_t ann = var_ann (t);
552 ann->in_vuse_list = 0;
553 }
554 }
555
556 finalize_ssa_vuse_ops (stmt);
557 /* The v_may_def build vector wasn't cleaned up because we needed it. */
558 cleanup_v_may_defs ();
559
560 /* Free the vuses build vector. */
561 VEC_truncate (tree, build_vuses, 0);
562
563 }
564
565 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
566
567 #define FINALIZE_OPBUILD build_v_must_defs
568 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
569 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
570 build_v_must_defs, (I)))
571 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
572 #define FINALIZE_ALLOC alloc_mustdef
573 #define FINALIZE_FREE free_mustdefs
574 #define FINALIZE_TYPE struct mustdef_optype_d
575 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
576 #define FINALIZE_OPS MUSTDEF_OPS
577 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
578 #define FINALIZE_CORRECT_USE set_virtual_use_link
579 #define FINALIZE_BASE_ZERO 0
580 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
581 #define FINALIZE_BASE_TYPE unsigned
582 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
583 (PTR)->def_var = (VAL); \
584 (PTR)->kill_var = (VAL); \
585 (PTR)->use_ptr.use = &((PTR)->kill_var);\
586 link_imm_use_stmt (&((PTR)->use_ptr), \
587 (VAL), (STMT))
588 #include "tree-ssa-opfinalize.h"
589
590
591 static void
592 finalize_ssa_v_must_defs (tree stmt)
593 {
594 /* In the presence of subvars, there may be more than one V_MUST_DEF per
595 statement (one for each subvar). It is a bit expensive to verify that
596 all must-defs in a statement belong to subvars if there is more than one
597 MUST-def, so we don't do it. Suffice to say, if you reach here without
598 having subvars, and have num >1, you have hit a bug. */
599
600 finalize_ssa_v_must_def_ops (stmt);
601 VEC_truncate (tree, build_v_must_defs, 0);
602 }
603
604
605 /* Finalize all the build vectors, fill the new ones into INFO. */
606
607 static inline void
608 finalize_ssa_stmt_operands (tree stmt)
609 {
610 finalize_ssa_defs (stmt);
611 finalize_ssa_uses (stmt);
612 finalize_ssa_v_must_defs (stmt);
613 finalize_ssa_v_may_defs (stmt);
614 finalize_ssa_vuses (stmt);
615 }
616
617
618 /* Start the process of building up operands vectors in INFO. */
619
620 static inline void
621 start_ssa_stmt_operands (void)
622 {
623 gcc_assert (VEC_length (tree, build_defs) == 0);
624 gcc_assert (VEC_length (tree, build_uses) == 0);
625 gcc_assert (VEC_length (tree, build_vuses) == 0);
626 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
627 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
628 }
629
630
631 /* Add DEF_P to the list of pointers to operands. */
632
633 static inline void
634 append_def (tree *def_p)
635 {
636 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
637 }
638
639
640 /* Add USE_P to the list of pointers to operands. */
641
642 static inline void
643 append_use (tree *use_p)
644 {
645 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
646 }
647
648
649 /* Add a new virtual may def for variable VAR to the build array. */
650
651 static inline void
652 append_v_may_def (tree var)
653 {
654 if (TREE_CODE (var) != SSA_NAME)
655 {
656 var_ann_t ann = get_var_ann (var);
657
658 /* Don't allow duplicate entries. */
659 if (ann->in_v_may_def_list)
660 return;
661 ann->in_v_may_def_list = 1;
662 }
663
664 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
665 }
666
667
668 /* Add VAR to the list of virtual uses. */
669
670 static inline void
671 append_vuse (tree var)
672 {
673
674 /* Don't allow duplicate entries. */
675 if (TREE_CODE (var) != SSA_NAME)
676 {
677 var_ann_t ann = get_var_ann (var);
678
679 if (ann->in_vuse_list || ann->in_v_may_def_list)
680 return;
681 ann->in_vuse_list = 1;
682 }
683
684 VEC_safe_push (tree, heap, build_vuses, (tree)var);
685 }
686
687
688 /* Add VAR to the list of virtual must definitions for INFO. */
689
690 static inline void
691 append_v_must_def (tree var)
692 {
693 unsigned i;
694
695 /* Don't allow duplicate entries. */
696 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
697 if (var == VEC_index (tree, build_v_must_defs, i))
698 return;
699
700 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
701 }
702
703
704 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
705 cache for STMT, if it existed before. When finished, the various build_*
706 operand vectors will have potential operands. in them. */
707
708 static void
709 parse_ssa_operands (tree stmt)
710 {
711 enum tree_code code;
712
713 code = TREE_CODE (stmt);
714 switch (code)
715 {
716 case MODIFY_EXPR:
717 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
718 either only part of LHS is modified or if the RHS might throw,
719 otherwise, use V_MUST_DEF.
720
721 ??? If it might throw, we should represent somehow that it is killed
722 on the fallthrough path. */
723 {
724 tree lhs = TREE_OPERAND (stmt, 0);
725 int lhs_flags = opf_is_def;
726
727 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
728
729 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
730 or not the entire LHS is modified; that depends on what's
731 inside the VIEW_CONVERT_EXPR. */
732 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
733 lhs = TREE_OPERAND (lhs, 0);
734
735 if (TREE_CODE (lhs) != ARRAY_REF
736 && TREE_CODE (lhs) != ARRAY_RANGE_REF
737 && TREE_CODE (lhs) != BIT_FIELD_REF)
738 lhs_flags |= opf_kill_def;
739
740 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
741 }
742 break;
743
744 case COND_EXPR:
745 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
746 break;
747
748 case SWITCH_EXPR:
749 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
750 break;
751
752 case ASM_EXPR:
753 get_asm_expr_operands (stmt);
754 break;
755
756 case RETURN_EXPR:
757 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
758 break;
759
760 case GOTO_EXPR:
761 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
762 break;
763
764 case LABEL_EXPR:
765 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
766 break;
767
768 /* These nodes contain no variable references. */
769 case BIND_EXPR:
770 case CASE_LABEL_EXPR:
771 case TRY_CATCH_EXPR:
772 case TRY_FINALLY_EXPR:
773 case EH_FILTER_EXPR:
774 case CATCH_EXPR:
775 case RESX_EXPR:
776 break;
777
778 default:
779 /* Notice that if get_expr_operands tries to use &STMT as the operand
780 pointer (which may only happen for USE operands), we will fail in
781 append_use. This default will handle statements like empty
782 statements, or CALL_EXPRs that may appear on the RHS of a statement
783 or as statements themselves. */
784 get_expr_operands (stmt, &stmt, opf_none);
785 break;
786 }
787 }
788
789 /* Create an operands cache for STMT. */
790
791 static void
792 build_ssa_operands (tree stmt)
793 {
794 stmt_ann_t ann = get_stmt_ann (stmt);
795
796 /* Initially assume that the statement has no volatile operands. */
797 if (ann)
798 ann->has_volatile_ops = false;
799
800 start_ssa_stmt_operands ();
801
802 parse_ssa_operands (stmt);
803 operand_build_sort_virtual (build_vuses);
804 operand_build_sort_virtual (build_v_may_defs);
805 operand_build_sort_virtual (build_v_must_defs);
806
807 finalize_ssa_stmt_operands (stmt);
808 }
809
810
811 /* Free any operands vectors in OPS. */
812 void
813 free_ssa_operands (stmt_operands_p ops)
814 {
815 ops->def_ops = NULL;
816 ops->use_ops = NULL;
817 ops->maydef_ops = NULL;
818 ops->mustdef_ops = NULL;
819 ops->vuse_ops = NULL;
820 }
821
822
823 /* Get the operands of statement STMT. Note that repeated calls to
824 get_stmt_operands for the same statement will do nothing until the
825 statement is marked modified by a call to mark_stmt_modified(). */
826
827 void
828 update_stmt_operands (tree stmt)
829 {
830 stmt_ann_t ann = get_stmt_ann (stmt);
831 /* If get_stmt_operands is called before SSA is initialized, dont
832 do anything. */
833 if (!ssa_operands_active ())
834 return;
835 /* The optimizers cannot handle statements that are nothing but a
836 _DECL. This indicates a bug in the gimplifier. */
837 gcc_assert (!SSA_VAR_P (stmt));
838
839 gcc_assert (ann->modified);
840
841 timevar_push (TV_TREE_OPS);
842
843 build_ssa_operands (stmt);
844
845 /* Clear the modified bit for STMT. Subsequent calls to
846 get_stmt_operands for this statement will do nothing until the
847 statement is marked modified by a call to mark_stmt_modified(). */
848 ann->modified = 0;
849
850 timevar_pop (TV_TREE_OPS);
851 }
852
853
854 /* Copies virtual operands from SRC to DST. */
855
856 void
857 copy_virtual_operands (tree dest, tree src)
858 {
859 tree t;
860 ssa_op_iter iter, old_iter;
861 use_operand_p use_p, u2;
862 def_operand_p def_p, d2;
863
864 build_ssa_operands (dest);
865
866 /* Copy all the virtual fields. */
867 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
868 append_vuse (t);
869 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
870 append_v_may_def (t);
871 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
872 append_v_must_def (t);
873
874 if (VEC_length (tree, build_vuses) == 0
875 && VEC_length (tree, build_v_may_defs) == 0
876 && VEC_length (tree, build_v_must_defs) == 0)
877 return;
878
879 /* Now commit the virtual operands to this stmt. */
880 finalize_ssa_v_must_defs (dest);
881 finalize_ssa_v_may_defs (dest);
882 finalize_ssa_vuses (dest);
883
884 /* Finally, set the field to the same values as then originals. */
885
886
887 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
888 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
889 {
890 gcc_assert (!op_iter_done (&old_iter));
891 SET_USE (use_p, t);
892 t = op_iter_next_tree (&old_iter);
893 }
894 gcc_assert (op_iter_done (&old_iter));
895
896 op_iter_init_maydef (&old_iter, src, &u2, &d2);
897 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
898 {
899 gcc_assert (!op_iter_done (&old_iter));
900 SET_USE (use_p, USE_FROM_PTR (u2));
901 SET_DEF (def_p, DEF_FROM_PTR (d2));
902 op_iter_next_maymustdef (&u2, &d2, &old_iter);
903 }
904 gcc_assert (op_iter_done (&old_iter));
905
906 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
907 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
908 {
909 gcc_assert (!op_iter_done (&old_iter));
910 SET_USE (use_p, USE_FROM_PTR (u2));
911 SET_DEF (def_p, DEF_FROM_PTR (d2));
912 op_iter_next_maymustdef (&u2, &d2, &old_iter);
913 }
914 gcc_assert (op_iter_done (&old_iter));
915
916 }
917
918
919 /* Specifically for use in DOM's expression analysis. Given a store, we
920 create an artificial stmt which looks like a load from the store, this can
921 be used to eliminate redundant loads. OLD_OPS are the operands from the
922 store stmt, and NEW_STMT is the new load which represents a load of the
923 values stored. */
924
925 void
926 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
927 {
928 stmt_ann_t ann;
929 tree op;
930 ssa_op_iter iter;
931 use_operand_p use_p;
932 unsigned x;
933
934 ann = get_stmt_ann (new_stmt);
935
936 /* process the stmt looking for operands. */
937 start_ssa_stmt_operands ();
938 parse_ssa_operands (new_stmt);
939
940 for (x = 0; x < VEC_length (tree, build_vuses); x++)
941 {
942 tree t = VEC_index (tree, build_vuses, x);
943 if (TREE_CODE (t) != SSA_NAME)
944 {
945 var_ann_t ann = var_ann (t);
946 ann->in_vuse_list = 0;
947 }
948 }
949
950 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
951 {
952 tree t = VEC_index (tree, build_v_may_defs, x);
953 if (TREE_CODE (t) != SSA_NAME)
954 {
955 var_ann_t ann = var_ann (t);
956 ann->in_v_may_def_list = 0;
957 }
958 }
959 /* Remove any virtual operands that were found. */
960 VEC_truncate (tree, build_v_may_defs, 0);
961 VEC_truncate (tree, build_v_must_defs, 0);
962 VEC_truncate (tree, build_vuses, 0);
963
964 /* For each VDEF on the original statement, we want to create a
965 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
966 statement. */
967 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
968 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
969 append_vuse (op);
970
971 /* Now build the operands for this new stmt. */
972 finalize_ssa_stmt_operands (new_stmt);
973
974 /* All uses in this fake stmt must not be in the immediate use lists. */
975 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
976 delink_imm_use (use_p);
977 }
978
979 void
980 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
981 {
982 tree op0, op1;
983 op0 = *exp0;
984 op1 = *exp1;
985
986 /* If the operand cache is active, attempt to preserve the relative positions
987 of these two operands in their respective immediate use lists. */
988 if (ssa_operands_active () && op0 != op1)
989 {
990 use_optype_p use0, use1, ptr;
991 use0 = use1 = NULL;
992 /* Find the 2 operands in the cache, if they are there. */
993 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
994 if (USE_OP_PTR (ptr)->use == exp0)
995 {
996 use0 = ptr;
997 break;
998 }
999 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1000 if (USE_OP_PTR (ptr)->use == exp1)
1001 {
1002 use1 = ptr;
1003 break;
1004 }
1005 /* If both uses don't have operand entries, there isn't much we can do
1006 at this point. Presumably we dont need to worry about it. */
1007 if (use0 && use1)
1008 {
1009 tree *tmp = USE_OP_PTR (use1)->use;
1010 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1011 USE_OP_PTR (use0)->use = tmp;
1012 }
1013 }
1014
1015 /* Now swap the data. */
1016 *exp0 = op1;
1017 *exp1 = op0;
1018 }
1019
1020 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1021 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1022 the operands found. */
1023
1024 static void
1025 get_expr_operands (tree stmt, tree *expr_p, int flags)
1026 {
1027 enum tree_code code;
1028 enum tree_code_class class;
1029 tree expr = *expr_p;
1030 stmt_ann_t s_ann = stmt_ann (stmt);
1031
1032 if (expr == NULL)
1033 return;
1034
1035 code = TREE_CODE (expr);
1036 class = TREE_CODE_CLASS (code);
1037
1038 switch (code)
1039 {
1040 case ADDR_EXPR:
1041 /* Taking the address of a variable does not represent a
1042 reference to it, but the fact that the stmt takes its address will be
1043 of interest to some passes (e.g. alias resolution). */
1044 add_to_addressable_set (TREE_OPERAND (expr, 0),
1045 &s_ann->addresses_taken);
1046
1047 /* If the address is invariant, there may be no interesting variable
1048 references inside. */
1049 if (is_gimple_min_invariant (expr))
1050 return;
1051
1052 /* There should be no VUSEs created, since the referenced objects are
1053 not really accessed. The only operands that we should find here
1054 are ARRAY_REF indices which will always be real operands (GIMPLE
1055 does not allow non-registers as array indices). */
1056 flags |= opf_no_vops;
1057
1058 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1059 return;
1060
1061 case SSA_NAME:
1062 case STRUCT_FIELD_TAG:
1063 case TYPE_MEMORY_TAG:
1064 case NAME_MEMORY_TAG:
1065
1066 add_stmt_operand (expr_p, s_ann, flags);
1067 return;
1068
1069 case VAR_DECL:
1070 case PARM_DECL:
1071 case RESULT_DECL:
1072 {
1073 subvar_t svars;
1074
1075 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1076 Otherwise, add the variable itself.
1077 Whether it goes to USES or DEFS depends on the operand flags. */
1078 if (var_can_have_subvars (expr)
1079 && (svars = get_subvars_for_var (expr)))
1080 {
1081 subvar_t sv;
1082 for (sv = svars; sv; sv = sv->next)
1083 add_stmt_operand (&sv->var, s_ann, flags);
1084 }
1085 else
1086 {
1087 add_stmt_operand (expr_p, s_ann, flags);
1088 }
1089 return;
1090 }
1091 case MISALIGNED_INDIRECT_REF:
1092 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1093 /* fall through */
1094
1095 case ALIGN_INDIRECT_REF:
1096 case INDIRECT_REF:
1097 get_indirect_ref_operands (stmt, expr, flags);
1098 return;
1099
1100 case TARGET_MEM_REF:
1101 get_tmr_operands (stmt, expr, flags);
1102 return;
1103
1104 case ARRAY_REF:
1105 case ARRAY_RANGE_REF:
1106 /* Treat array references as references to the virtual variable
1107 representing the array. The virtual variable for an ARRAY_REF
1108 is the VAR_DECL for the array. */
1109
1110 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1111 according to the value of IS_DEF. */
1112 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1113 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1114 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1115 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1116 return;
1117
1118 case COMPONENT_REF:
1119 case REALPART_EXPR:
1120 case IMAGPART_EXPR:
1121 {
1122 tree ref;
1123 HOST_WIDE_INT offset, size, maxsize;
1124 bool none = true;
1125 /* This component ref becomes an access to all of the subvariables
1126 it can touch, if we can determine that, but *NOT* the real one.
1127 If we can't determine which fields we could touch, the recursion
1128 will eventually get to a variable and add *all* of its subvars, or
1129 whatever is the minimum correct subset. */
1130
1131 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1132 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1133 {
1134 subvar_t svars = get_subvars_for_var (ref);
1135 subvar_t sv;
1136 for (sv = svars; sv; sv = sv->next)
1137 {
1138 bool exact;
1139 if (overlap_subvar (offset, maxsize, sv, &exact))
1140 {
1141 int subvar_flags = flags;
1142 none = false;
1143 if (!exact
1144 || size != maxsize)
1145 subvar_flags &= ~opf_kill_def;
1146 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1147 }
1148 }
1149 if (!none)
1150 flags |= opf_no_vops;
1151 }
1152
1153 /* Even if we found subvars above we need to ensure to see
1154 immediate uses for d in s.a[d]. In case of s.a having
1155 a subvar we'd miss it otherwise. */
1156 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1157 flags & ~opf_kill_def);
1158
1159 if (code == COMPONENT_REF)
1160 {
1161 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1162 s_ann->has_volatile_ops = true;
1163 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1164 }
1165 return;
1166 }
1167 case WITH_SIZE_EXPR:
1168 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1169 and an rvalue reference to its second argument. */
1170 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1171 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1172 return;
1173
1174 case CALL_EXPR:
1175 get_call_expr_operands (stmt, expr);
1176 return;
1177
1178 case COND_EXPR:
1179 case VEC_COND_EXPR:
1180 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1181 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1182 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1183 return;
1184
1185 case MODIFY_EXPR:
1186 {
1187 int subflags;
1188 tree op;
1189
1190 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1191
1192 op = TREE_OPERAND (expr, 0);
1193 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1194 op = TREE_OPERAND (expr, 0);
1195 if (TREE_CODE (op) == ARRAY_REF
1196 || TREE_CODE (op) == ARRAY_RANGE_REF
1197 || TREE_CODE (op) == REALPART_EXPR
1198 || TREE_CODE (op) == IMAGPART_EXPR)
1199 subflags = opf_is_def;
1200 else
1201 subflags = opf_is_def | opf_kill_def;
1202
1203 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1204 return;
1205 }
1206
1207 case CONSTRUCTOR:
1208 {
1209 /* General aggregate CONSTRUCTORs have been decomposed, but they
1210 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1211 constructor_elt *ce;
1212 unsigned HOST_WIDE_INT idx;
1213
1214 for (idx = 0;
1215 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1216 idx++)
1217 get_expr_operands (stmt, &ce->value, opf_none);
1218
1219 return;
1220 }
1221
1222 case TRUTH_NOT_EXPR:
1223 case BIT_FIELD_REF:
1224 case VIEW_CONVERT_EXPR:
1225 do_unary:
1226 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1227 return;
1228
1229 case TRUTH_AND_EXPR:
1230 case TRUTH_OR_EXPR:
1231 case TRUTH_XOR_EXPR:
1232 case COMPOUND_EXPR:
1233 case OBJ_TYPE_REF:
1234 case ASSERT_EXPR:
1235 do_binary:
1236 {
1237 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1238 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1239 return;
1240 }
1241
1242 case REALIGN_LOAD_EXPR:
1243 {
1244 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1245 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1246 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1247 return;
1248 }
1249
1250 case BLOCK:
1251 case FUNCTION_DECL:
1252 case EXC_PTR_EXPR:
1253 case FILTER_EXPR:
1254 case LABEL_DECL:
1255 case CONST_DECL:
1256 /* Expressions that make no memory references. */
1257 return;
1258
1259 default:
1260 if (class == tcc_unary)
1261 goto do_unary;
1262 if (class == tcc_binary || class == tcc_comparison)
1263 goto do_binary;
1264 if (class == tcc_constant || class == tcc_type)
1265 return;
1266 }
1267
1268 /* If we get here, something has gone wrong. */
1269 #ifdef ENABLE_CHECKING
1270 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1271 debug_tree (expr);
1272 fputs ("\n", stderr);
1273 internal_error ("internal error");
1274 #endif
1275 gcc_unreachable ();
1276 }
1277
1278
1279 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1280
1281 static void
1282 get_asm_expr_operands (tree stmt)
1283 {
1284 stmt_ann_t s_ann = stmt_ann (stmt);
1285 int noutputs = list_length (ASM_OUTPUTS (stmt));
1286 const char **oconstraints
1287 = (const char **) alloca ((noutputs) * sizeof (const char *));
1288 int i;
1289 tree link;
1290 const char *constraint;
1291 bool allows_mem, allows_reg, is_inout;
1292
1293 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1294 {
1295 oconstraints[i] = constraint
1296 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1297 parse_output_constraint (&constraint, i, 0, 0,
1298 &allows_mem, &allows_reg, &is_inout);
1299
1300 /* This should have been split in gimplify_asm_expr. */
1301 gcc_assert (!allows_reg || !is_inout);
1302
1303 /* Memory operands are addressable. Note that STMT needs the
1304 address of this operand. */
1305 if (!allows_reg && allows_mem)
1306 {
1307 tree t = get_base_address (TREE_VALUE (link));
1308 if (t && DECL_P (t) && s_ann)
1309 add_to_addressable_set (t, &s_ann->addresses_taken);
1310 }
1311
1312 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1313 }
1314
1315 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1316 {
1317 constraint
1318 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1319 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1320 oconstraints, &allows_mem, &allows_reg);
1321
1322 /* Memory operands are addressable. Note that STMT needs the
1323 address of this operand. */
1324 if (!allows_reg && allows_mem)
1325 {
1326 tree t = get_base_address (TREE_VALUE (link));
1327 if (t && DECL_P (t) && s_ann)
1328 add_to_addressable_set (t, &s_ann->addresses_taken);
1329 }
1330
1331 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1332 }
1333
1334
1335 /* Clobber memory for asm ("" : : : "memory"); */
1336 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1337 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1338 {
1339 unsigned i;
1340 bitmap_iterator bi;
1341
1342 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1343 decided to group them). */
1344 if (global_var)
1345 add_stmt_operand (&global_var, s_ann, opf_is_def);
1346 else
1347 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1348 {
1349 tree var = referenced_var (i);
1350 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1351 }
1352
1353 /* Now clobber all addressables. */
1354 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1355 {
1356 tree var = referenced_var (i);
1357
1358 /* Subvars are explicitly represented in this list, so
1359 we don't need the original to be added to the clobber
1360 ops, but the original *will* be in this list because
1361 we keep the addressability of the original
1362 variable up-to-date so we don't screw up the rest of
1363 the backend. */
1364 if (var_can_have_subvars (var)
1365 && get_subvars_for_var (var) != NULL)
1366 continue;
1367
1368 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1369 }
1370
1371 break;
1372 }
1373 }
1374
1375 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1376 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1377
1378 static void
1379 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1380 {
1381 tree *pptr = &TREE_OPERAND (expr, 0);
1382 tree ptr = *pptr;
1383 stmt_ann_t s_ann = stmt_ann (stmt);
1384
1385 /* Stores into INDIRECT_REF operands are never killing definitions. */
1386 flags &= ~opf_kill_def;
1387
1388 if (SSA_VAR_P (ptr))
1389 {
1390 struct ptr_info_def *pi = NULL;
1391
1392 /* If PTR has flow-sensitive points-to information, use it. */
1393 if (TREE_CODE (ptr) == SSA_NAME
1394 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1395 && pi->name_mem_tag)
1396 {
1397 /* PTR has its own memory tag. Use it. */
1398 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1399 }
1400 else
1401 {
1402 /* If PTR is not an SSA_NAME or it doesn't have a name
1403 tag, use its type memory tag. */
1404 var_ann_t v_ann;
1405
1406 /* If we are emitting debugging dumps, display a warning if
1407 PTR is an SSA_NAME with no flow-sensitive alias
1408 information. That means that we may need to compute
1409 aliasing again. */
1410 if (dump_file
1411 && TREE_CODE (ptr) == SSA_NAME
1412 && pi == NULL)
1413 {
1414 fprintf (dump_file,
1415 "NOTE: no flow-sensitive alias info for ");
1416 print_generic_expr (dump_file, ptr, dump_flags);
1417 fprintf (dump_file, " in ");
1418 print_generic_stmt (dump_file, stmt, dump_flags);
1419 }
1420
1421 if (TREE_CODE (ptr) == SSA_NAME)
1422 ptr = SSA_NAME_VAR (ptr);
1423 v_ann = var_ann (ptr);
1424 if (v_ann->type_mem_tag)
1425 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1426 }
1427 }
1428
1429 /* If a constant is used as a pointer, we can't generate a real
1430 operand for it but we mark the statement volatile to prevent
1431 optimizations from messing things up. */
1432 else if (TREE_CODE (ptr) == INTEGER_CST)
1433 {
1434 if (s_ann)
1435 s_ann->has_volatile_ops = true;
1436 return;
1437 }
1438 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1439 else
1440 gcc_unreachable ();
1441
1442 /* Add a USE operand for the base pointer. */
1443 get_expr_operands (stmt, pptr, opf_none);
1444 }
1445
1446 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1447
1448 static void
1449 get_tmr_operands (tree stmt, tree expr, int flags)
1450 {
1451 tree tag = TMR_TAG (expr), ref;
1452 HOST_WIDE_INT offset, size, maxsize;
1453 subvar_t svars, sv;
1454 stmt_ann_t s_ann = stmt_ann (stmt);
1455
1456 /* First record the real operands. */
1457 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1458 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1459
1460 /* MEM_REFs should never be killing. */
1461 flags &= ~opf_kill_def;
1462
1463 if (TMR_SYMBOL (expr))
1464 {
1465 stmt_ann_t ann = stmt_ann (stmt);
1466 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1467 }
1468
1469 if (!tag)
1470 {
1471 /* Something weird, so ensure that we will be careful. */
1472 stmt_ann (stmt)->has_volatile_ops = true;
1473 return;
1474 }
1475
1476 if (DECL_P (tag))
1477 {
1478 get_expr_operands (stmt, &tag, flags);
1479 return;
1480 }
1481
1482 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1483 gcc_assert (ref != NULL_TREE);
1484 svars = get_subvars_for_var (ref);
1485 for (sv = svars; sv; sv = sv->next)
1486 {
1487 bool exact;
1488 if (overlap_subvar (offset, maxsize, sv, &exact))
1489 {
1490 int subvar_flags = flags;
1491 if (!exact || size != maxsize)
1492 subvar_flags &= ~opf_kill_def;
1493 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1494 }
1495 }
1496 }
1497
1498 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1499
1500 static void
1501 get_call_expr_operands (tree stmt, tree expr)
1502 {
1503 tree op;
1504 int call_flags = call_expr_flags (expr);
1505
1506 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1507 operands for all the symbols that have been found to be
1508 call-clobbered.
1509
1510 Note that if aliases have not been computed, the global effects
1511 of calls will not be included in the SSA web. This is fine
1512 because no optimizer should run before aliases have been
1513 computed. By not bothering with virtual operands for CALL_EXPRs
1514 we avoid adding superfluous virtual operands, which can be a
1515 significant compile time sink (See PR 15855). */
1516 if (aliases_computed_p
1517 && !bitmap_empty_p (call_clobbered_vars)
1518 && !(call_flags & ECF_NOVOPS))
1519 {
1520 /* A 'pure' or a 'const' function never call-clobbers anything.
1521 A 'noreturn' function might, but since we don't return anyway
1522 there is no point in recording that. */
1523 if (TREE_SIDE_EFFECTS (expr)
1524 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1525 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1526 else if (!(call_flags & ECF_CONST))
1527 add_call_read_ops (stmt);
1528 }
1529
1530 /* Find uses in the called function. */
1531 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1532
1533 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1534 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1535
1536 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1537
1538 }
1539
1540
1541 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1542 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1543 the statement's real operands, otherwise it is added to virtual
1544 operands. */
1545
1546 static void
1547 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1548 {
1549 bool is_real_op;
1550 tree var, sym;
1551 var_ann_t v_ann;
1552
1553 var = *var_p;
1554 gcc_assert (SSA_VAR_P (var));
1555
1556 is_real_op = is_gimple_reg (var);
1557 /* If this is a real operand, the operand is either ssa name or decl.
1558 Virtual operands may only be decls. */
1559 gcc_assert (is_real_op || DECL_P (var));
1560
1561 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1562 v_ann = var_ann (sym);
1563
1564 /* Mark statements with volatile operands. Optimizers should back
1565 off from statements having volatile operands. */
1566 if (TREE_THIS_VOLATILE (sym) && s_ann)
1567 s_ann->has_volatile_ops = true;
1568
1569 /* If the variable cannot be modified and this is a V_MAY_DEF change
1570 it into a VUSE. This happens when read-only variables are marked
1571 call-clobbered and/or aliased to writable variables. So we only
1572 check that this only happens on non-specific stores.
1573
1574 Note that if this is a specific store, i.e. associated with a
1575 modify_expr, then we can't suppress the V_DEF, lest we run into
1576 validation problems.
1577
1578 This can happen when programs cast away const, leaving us with a
1579 store to read-only memory. If the statement is actually executed
1580 at runtime, then the program is ill formed. If the statement is
1581 not executed then all is well. At the very least, we cannot ICE. */
1582 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1583 {
1584 gcc_assert (!is_real_op);
1585 flags &= ~(opf_is_def | opf_kill_def);
1586 }
1587
1588 if (is_real_op)
1589 {
1590 /* The variable is a GIMPLE register. Add it to real operands. */
1591 if (flags & opf_is_def)
1592 append_def (var_p);
1593 else
1594 append_use (var_p);
1595 }
1596 else
1597 {
1598 VEC(tree,gc) *aliases;
1599
1600 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1601 virtual operands, unless the caller has specifically requested
1602 not to add virtual operands (used when adding operands inside an
1603 ADDR_EXPR expression). */
1604 if (flags & opf_no_vops)
1605 return;
1606
1607 aliases = v_ann->may_aliases;
1608
1609 if (aliases == NULL)
1610 {
1611 /* The variable is not aliased or it is an alias tag. */
1612 if (flags & opf_is_def)
1613 {
1614 if (flags & opf_kill_def)
1615 {
1616 /* Only regular variables or struct fields may get a
1617 V_MUST_DEF operand. */
1618 gcc_assert (!MTAG_P (var)
1619 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1620 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1621 variable definitions. */
1622 append_v_must_def (var);
1623 }
1624 else
1625 {
1626 /* Add a V_MAY_DEF for call-clobbered variables and
1627 memory tags. */
1628 append_v_may_def (var);
1629 }
1630 }
1631 else
1632 append_vuse (var);
1633 }
1634 else
1635 {
1636 unsigned i;
1637 tree al;
1638
1639 /* The variable is aliased. Add its aliases to the virtual
1640 operands. */
1641 gcc_assert (VEC_length (tree, aliases) != 0);
1642
1643 if (flags & opf_is_def)
1644 {
1645 /* If the variable is also an alias tag, add a virtual
1646 operand for it, otherwise we will miss representing
1647 references to the members of the variable's alias set.
1648 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1649 if (v_ann->is_alias_tag)
1650 append_v_may_def (var);
1651
1652 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1653 append_v_may_def (al);
1654 }
1655 else
1656 {
1657 /* Similarly, append a virtual uses for VAR itself, when
1658 it is an alias tag. */
1659 if (v_ann->is_alias_tag)
1660 append_vuse (var);
1661
1662 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1663 append_vuse (al);
1664 }
1665 }
1666 }
1667 }
1668
1669
1670 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1671 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1672 a single variable whose address has been taken or any other valid
1673 GIMPLE memory reference (structure reference, array, etc). If the
1674 base address of REF is a decl that has sub-variables, also add all
1675 of its sub-variables. */
1676
1677 void
1678 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1679 {
1680 tree var;
1681 subvar_t svars;
1682
1683 gcc_assert (addresses_taken);
1684
1685 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1686 as the only thing we take the address of. If VAR is a structure,
1687 taking the address of a field means that the whole structure may
1688 be referenced using pointer arithmetic. See PR 21407 and the
1689 ensuing mailing list discussion. */
1690 var = get_base_address (ref);
1691 if (var && SSA_VAR_P (var))
1692 {
1693 if (*addresses_taken == NULL)
1694 *addresses_taken = BITMAP_GGC_ALLOC ();
1695
1696 if (var_can_have_subvars (var)
1697 && (svars = get_subvars_for_var (var)))
1698 {
1699 subvar_t sv;
1700 for (sv = svars; sv; sv = sv->next)
1701 {
1702 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1703 TREE_ADDRESSABLE (sv->var) = 1;
1704 }
1705 }
1706 else
1707 {
1708 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1709 TREE_ADDRESSABLE (var) = 1;
1710 }
1711 }
1712 }
1713
1714
1715 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1716 clobbered variables in the function. */
1717
1718 static void
1719 add_call_clobber_ops (tree stmt, tree callee)
1720 {
1721 unsigned u;
1722 tree t;
1723 bitmap_iterator bi;
1724 stmt_ann_t s_ann = stmt_ann (stmt);
1725 struct stmt_ann_d empty_ann;
1726 bitmap not_read_b, not_written_b;
1727
1728 /* Functions that are not const, pure or never return may clobber
1729 call-clobbered variables. */
1730 if (s_ann)
1731 s_ann->makes_clobbering_call = true;
1732
1733 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1734 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1735 if (global_var)
1736 {
1737 add_stmt_operand (&global_var, s_ann, opf_is_def);
1738 return;
1739 }
1740
1741 /* FIXME - if we have better information from the static vars
1742 analysis, we need to make the cache call site specific. This way
1743 we can have the performance benefits even if we are doing good
1744 optimization. */
1745
1746 /* Get info for local and module level statics. There is a bit
1747 set for each static if the call being processed does not read
1748 or write that variable. */
1749
1750 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1751 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1752
1753 /* If cache is valid, copy the elements into the build vectors. */
1754 if (ssa_call_clobbered_cache_valid
1755 && (!not_read_b || bitmap_empty_p (not_read_b))
1756 && (!not_written_b || bitmap_empty_p (not_written_b)))
1757 {
1758 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1759 {
1760 t = VEC_index (tree, clobbered_vuses, u);
1761 gcc_assert (TREE_CODE (t) != SSA_NAME);
1762 var_ann (t)->in_vuse_list = 1;
1763 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1764 }
1765 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1766 {
1767 t = VEC_index (tree, clobbered_v_may_defs, u);
1768 gcc_assert (TREE_CODE (t) != SSA_NAME);
1769 var_ann (t)->in_v_may_def_list = 1;
1770 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1771 }
1772 return;
1773 }
1774
1775 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1776
1777 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1778 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1779 {
1780 tree var = referenced_var (u);
1781 unsigned int uid = u;
1782
1783 if (unmodifiable_var_p (var))
1784 add_stmt_operand (&var, &empty_ann, opf_none);
1785 else
1786 {
1787 bool not_read;
1788 bool not_written;
1789
1790 /* Not read and not written are computed on regular vars, not
1791 subvars, so look at the parent var if this is an SFT. */
1792
1793 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1794 uid = DECL_UID (SFT_PARENT_VAR (var));
1795
1796 not_read =
1797 not_read_b ? bitmap_bit_p (not_read_b, uid) : false;
1798 not_written =
1799 not_written_b ? bitmap_bit_p (not_written_b, uid) : false;
1800
1801 if (not_written)
1802 {
1803 if (!not_read)
1804 add_stmt_operand (&var, &empty_ann, opf_none);
1805 }
1806 else
1807 add_stmt_operand (&var, &empty_ann, opf_is_def);
1808 }
1809 }
1810
1811 if ((!not_read_b || bitmap_empty_p (not_read_b))
1812 && (!not_written_b || bitmap_empty_p (not_written_b)))
1813 {
1814 /* Prepare empty cache vectors. */
1815 VEC_truncate (tree, clobbered_vuses, 0);
1816 VEC_truncate (tree, clobbered_v_may_defs, 0);
1817
1818 /* Now fill the clobbered cache with the values that have been found. */
1819 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1820 VEC_safe_push (tree, heap, clobbered_vuses,
1821 VEC_index (tree, build_vuses, u));
1822
1823 gcc_assert (VEC_length (tree, build_vuses)
1824 == VEC_length (tree, clobbered_vuses));
1825
1826 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1827 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1828 VEC_index (tree, build_v_may_defs, u));
1829
1830 gcc_assert (VEC_length (tree, build_v_may_defs)
1831 == VEC_length (tree, clobbered_v_may_defs));
1832
1833 ssa_call_clobbered_cache_valid = true;
1834 }
1835 }
1836
1837
1838 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1839 function. */
1840
1841 static void
1842 add_call_read_ops (tree stmt)
1843 {
1844 unsigned u;
1845 tree t;
1846 bitmap_iterator bi;
1847 stmt_ann_t s_ann = stmt_ann (stmt);
1848 struct stmt_ann_d empty_ann;
1849
1850 /* if the function is not pure, it may reference memory. Add
1851 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1852 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1853 if (global_var)
1854 {
1855 add_stmt_operand (&global_var, s_ann, opf_none);
1856 return;
1857 }
1858
1859 /* If cache is valid, copy the elements into the build vector. */
1860 if (ssa_ro_call_cache_valid)
1861 {
1862 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1863 {
1864 t = VEC_index (tree, ro_call_vuses, u);
1865 gcc_assert (TREE_CODE (t) != SSA_NAME);
1866 var_ann (t)->in_vuse_list = 1;
1867 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1868 }
1869 return;
1870 }
1871
1872 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1873
1874 /* Add a VUSE for each call-clobbered variable. */
1875 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1876 {
1877 tree var = referenced_var (u);
1878 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1879 }
1880
1881 /* Prepare empty cache vectors. */
1882 VEC_truncate (tree, ro_call_vuses, 0);
1883
1884 /* Now fill the clobbered cache with the values that have been found. */
1885 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1886 VEC_safe_push (tree, heap, ro_call_vuses,
1887 VEC_index (tree, build_vuses, u));
1888
1889 gcc_assert (VEC_length (tree, build_vuses)
1890 == VEC_length (tree, ro_call_vuses));
1891
1892 ssa_ro_call_cache_valid = true;
1893 }
1894
1895
1896 /* Scan the immediate_use list for VAR making sure its linked properly.
1897 return RTUE iof there is a problem. */
1898
1899 bool
1900 verify_imm_links (FILE *f, tree var)
1901 {
1902 use_operand_p ptr, prev, list;
1903 int count;
1904
1905 gcc_assert (TREE_CODE (var) == SSA_NAME);
1906
1907 list = &(SSA_NAME_IMM_USE_NODE (var));
1908 gcc_assert (list->use == NULL);
1909
1910 if (list->prev == NULL)
1911 {
1912 gcc_assert (list->next == NULL);
1913 return false;
1914 }
1915
1916 prev = list;
1917 count = 0;
1918 for (ptr = list->next; ptr != list; )
1919 {
1920 if (prev != ptr->prev)
1921 goto error;
1922
1923 if (ptr->use == NULL)
1924 goto error; /* 2 roots, or SAFE guard node. */
1925 else if (*(ptr->use) != var)
1926 goto error;
1927
1928 prev = ptr;
1929 ptr = ptr->next;
1930 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1931 if (count++ > 50000000)
1932 goto error;
1933 }
1934
1935 /* Verify list in the other direction. */
1936 prev = list;
1937 for (ptr = list->prev; ptr != list; )
1938 {
1939 if (prev != ptr->next)
1940 goto error;
1941 prev = ptr;
1942 ptr = ptr->prev;
1943 if (count-- < 0)
1944 goto error;
1945 }
1946
1947 if (count != 0)
1948 goto error;
1949
1950 return false;
1951
1952 error:
1953 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1954 {
1955 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1956 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1957 }
1958 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1959 (void *)ptr->use);
1960 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1961 fprintf(f, "\n");
1962 return true;
1963 }
1964
1965
1966 /* Dump all the immediate uses to FILE. */
1967
1968 void
1969 dump_immediate_uses_for (FILE *file, tree var)
1970 {
1971 imm_use_iterator iter;
1972 use_operand_p use_p;
1973
1974 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1975
1976 print_generic_expr (file, var, TDF_SLIM);
1977 fprintf (file, " : -->");
1978 if (has_zero_uses (var))
1979 fprintf (file, " no uses.\n");
1980 else
1981 if (has_single_use (var))
1982 fprintf (file, " single use.\n");
1983 else
1984 fprintf (file, "%d uses.\n", num_imm_uses (var));
1985
1986 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1987 {
1988 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1989 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
1990 else
1991 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
1992 }
1993 fprintf(file, "\n");
1994 }
1995
1996 /* Dump all the immediate uses to FILE. */
1997
1998 void
1999 dump_immediate_uses (FILE *file)
2000 {
2001 tree var;
2002 unsigned int x;
2003
2004 fprintf (file, "Immediate_uses: \n\n");
2005 for (x = 1; x < num_ssa_names; x++)
2006 {
2007 var = ssa_name(x);
2008 if (!var)
2009 continue;
2010 dump_immediate_uses_for (file, var);
2011 }
2012 }
2013
2014
2015 /* Dump def-use edges on stderr. */
2016
2017 void
2018 debug_immediate_uses (void)
2019 {
2020 dump_immediate_uses (stderr);
2021 }
2022
2023 /* Dump def-use edges on stderr. */
2024
2025 void
2026 debug_immediate_uses_for (tree var)
2027 {
2028 dump_immediate_uses_for (stderr, var);
2029 }
2030 #include "gt-tree-ssa-operands.h"