re PR tree-optimization/24793 (ICE: expected ssa_name, have var_decl in verify_ssa...
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 5 of these routines, each representing one of the
57 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
58 Virtual Must Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 5 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
76 vector for VUSE, then the new vector will also be modified such that
77 it contains 'a_5' rather than 'a'.
78
79 */
80
81
82 /* Flags to describe operand properties in helpers. */
83
84 /* By default, operands are loaded. */
85 #define opf_none 0
86
87 /* Operand is the target of an assignment expression or a
88 call-clobbered variable */
89 #define opf_is_def (1 << 0)
90
91 /* Operand is the target of an assignment expression. */
92 #define opf_kill_def (1 << 1)
93
94 /* No virtual operands should be created in the expression. This is used
95 when traversing ADDR_EXPR nodes which have different semantics than
96 other expressions. Inside an ADDR_EXPR node, the only operands that we
97 need to consider are indices into arrays. For instance, &a.b[i] should
98 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
99 VUSE for 'b'. */
100 #define opf_no_vops (1 << 2)
101
102 /* Operand is a "non-specific" kill for call-clobbers and such. This is used
103 to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
104 #define opf_non_specific (1 << 3)
105
106
107 /* Array for building all the def operands. */
108 static VEC(tree,heap) *build_defs;
109
110 /* Array for building all the use operands. */
111 static VEC(tree,heap) *build_uses;
112
113 /* Array for building all the v_may_def operands. */
114 static VEC(tree,heap) *build_v_may_defs;
115
116 /* Array for building all the vuse operands. */
117 static VEC(tree,heap) *build_vuses;
118
119 /* Array for building all the v_must_def operands. */
120 static VEC(tree,heap) *build_v_must_defs;
121
122 /* True if the operands for call clobbered vars are cached and valid. */
123 bool ssa_call_clobbered_cache_valid;
124 bool ssa_ro_call_cache_valid;
125
126 /* These arrays are the cached operand vectors for call clobbered calls. */
127 static VEC(tree,heap) *clobbered_v_may_defs;
128 static VEC(tree,heap) *clobbered_vuses;
129 static VEC(tree,heap) *ro_call_vuses;
130 static bool ops_active = false;
131
132 static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
133 static unsigned operand_memory_index;
134
135 static void get_expr_operands (tree, tree *, int);
136 static void get_asm_expr_operands (tree);
137 static void get_indirect_ref_operands (tree, tree, int);
138 static void get_tmr_operands (tree, tree, int);
139 static void get_call_expr_operands (tree, tree);
140 static inline void append_def (tree *);
141 static inline void append_use (tree *);
142 static void append_v_may_def (tree);
143 static void append_v_must_def (tree);
144 static void add_call_clobber_ops (tree, tree);
145 static void add_call_read_ops (tree);
146 static void add_stmt_operand (tree *, stmt_ann_t, int);
147 static void build_ssa_operands (tree stmt);
148
149 static def_optype_p free_defs = NULL;
150 static use_optype_p free_uses = NULL;
151 static vuse_optype_p free_vuses = NULL;
152 static maydef_optype_p free_maydefs = NULL;
153 static mustdef_optype_p free_mustdefs = NULL;
154
155
156 /* Return the DECL_UID of the base variable of T. */
157
158 static inline unsigned
159 get_name_decl (tree t)
160 {
161 if (TREE_CODE (t) != SSA_NAME)
162 return DECL_UID (t);
163 else
164 return DECL_UID (SSA_NAME_VAR (t));
165 }
166
167 /* Comparison function for qsort used in operand_build_sort_virtual. */
168
169 static int
170 operand_build_cmp (const void *p, const void *q)
171 {
172 tree e1 = *((const tree *)p);
173 tree e2 = *((const tree *)q);
174 unsigned int u1,u2;
175
176 u1 = get_name_decl (e1);
177 u2 = get_name_decl (e2);
178
179 /* We want to sort in ascending order. They can never be equal. */
180 #ifdef ENABLE_CHECKING
181 gcc_assert (u1 != u2);
182 #endif
183 return (u1 > u2 ? 1 : -1);
184 }
185
186 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
187
188 static inline void
189 operand_build_sort_virtual (VEC(tree,heap) *list)
190 {
191 int num = VEC_length (tree, list);
192 if (num < 2)
193 return;
194 if (num == 2)
195 {
196 if (get_name_decl (VEC_index (tree, list, 0))
197 > get_name_decl (VEC_index (tree, list, 1)))
198 {
199 /* Swap elements if in the wrong order. */
200 tree tmp = VEC_index (tree, list, 0);
201 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
202 VEC_replace (tree, list, 1, tmp);
203 }
204 return;
205 }
206 /* There are 3 or more elements, call qsort. */
207 qsort (VEC_address (tree, list),
208 VEC_length (tree, list),
209 sizeof (tree),
210 operand_build_cmp);
211 }
212
213
214
215 /* Return true if the ssa operands cache is active. */
216
217 bool
218 ssa_operands_active (void)
219 {
220 return ops_active;
221 }
222
223
224 /* Initialize the operand cache routines. */
225
226 void
227 init_ssa_operands (void)
228 {
229 build_defs = VEC_alloc (tree, heap, 5);
230 build_uses = VEC_alloc (tree, heap, 10);
231 build_vuses = VEC_alloc (tree, heap, 25);
232 build_v_may_defs = VEC_alloc (tree, heap, 25);
233 build_v_must_defs = VEC_alloc (tree, heap, 25);
234
235 gcc_assert (operand_memory == NULL);
236 operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
237 ops_active = true;
238 }
239
240
241 /* Dispose of anything required by the operand routines. */
242
243 void
244 fini_ssa_operands (void)
245 {
246 struct ssa_operand_memory_d *ptr;
247 VEC_free (tree, heap, build_defs);
248 VEC_free (tree, heap, build_uses);
249 VEC_free (tree, heap, build_v_must_defs);
250 VEC_free (tree, heap, build_v_may_defs);
251 VEC_free (tree, heap, build_vuses);
252 free_defs = NULL;
253 free_uses = NULL;
254 free_vuses = NULL;
255 free_maydefs = NULL;
256 free_mustdefs = NULL;
257 while ((ptr = operand_memory) != NULL)
258 {
259 operand_memory = operand_memory->next;
260 ggc_free (ptr);
261 }
262
263 VEC_free (tree, heap, clobbered_v_may_defs);
264 VEC_free (tree, heap, clobbered_vuses);
265 VEC_free (tree, heap, ro_call_vuses);
266 ops_active = false;
267 }
268
269
270 /* Return memory for operands of SIZE chunks. */
271
272 static inline void *
273 ssa_operand_alloc (unsigned size)
274 {
275 char *ptr;
276 if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
277 {
278 struct ssa_operand_memory_d *ptr;
279 ptr = GGC_NEW (struct ssa_operand_memory_d);
280 ptr->next = operand_memory;
281 operand_memory = ptr;
282 operand_memory_index = 0;
283 }
284 ptr = &(operand_memory->mem[operand_memory_index]);
285 operand_memory_index += size;
286 return ptr;
287 }
288
289
290 /* Make sure PTR is in the correct immediate use list. Since uses are simply
291 pointers into the stmt TREE, there is no way of telling if anyone has
292 changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
293 The contents are different, but the pointer is still the same. This
294 routine will check to make sure PTR is in the correct list, and if it isn't
295 put it in the correct list. We cannot simply check the previous node
296 because all nodes in the same stmt might have be changed. */
297
298 static inline void
299 correct_use_link (use_operand_p ptr, tree stmt)
300 {
301 use_operand_p prev;
302 tree root;
303
304 /* Fold_stmt () may have changed the stmt pointers. */
305 if (ptr->stmt != stmt)
306 ptr->stmt = stmt;
307
308 prev = ptr->prev;
309 if (prev)
310 {
311 /* Find the root element, making sure we skip any safe iterators. */
312 while (prev->use != NULL || prev->stmt == NULL)
313 prev = prev->prev;
314
315 /* Get the ssa_name of the list the node is in. */
316 root = prev->stmt;
317 /* If it's the right list, simply return. */
318 if (root == *(ptr->use))
319 return;
320 }
321 /* Its in the wrong list if we reach here. */
322 delink_imm_use (ptr);
323 link_imm_use (ptr, *(ptr->use));
324 }
325
326
327 /* This routine makes sure that PTR is in an immediate use list, and makes
328 sure the stmt pointer is set to the current stmt. Virtual uses do not need
329 the overhead of correct_use_link since they cannot be directly manipulated
330 like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
331 static inline void
332 set_virtual_use_link (use_operand_p ptr, tree stmt)
333 {
334 /* Fold_stmt () may have changed the stmt pointers. */
335 if (ptr->stmt != stmt)
336 ptr->stmt = stmt;
337
338 /* If this use isn't in a list, add it to the correct list. */
339 if (!ptr->prev)
340 link_imm_use (ptr, *(ptr->use));
341 }
342
343
344
345 #define FINALIZE_OPBUILD build_defs
346 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
347 build_defs, (I))
348 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
349 build_defs, (I))
350 #define FINALIZE_FUNC finalize_ssa_def_ops
351 #define FINALIZE_ALLOC alloc_def
352 #define FINALIZE_FREE free_defs
353 #define FINALIZE_TYPE struct def_optype_d
354 #define FINALIZE_ELEM(PTR) ((PTR)->def_ptr)
355 #define FINALIZE_OPS DEF_OPS
356 #define FINALIZE_BASE(VAR) VAR
357 #define FINALIZE_BASE_TYPE tree *
358 #define FINALIZE_BASE_ZERO NULL
359 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) FINALIZE_ELEM (PTR) = (VAL)
360 #include "tree-ssa-opfinalize.h"
361
362
363 /* This routine will create stmt operands for STMT from the def build list. */
364
365 static void
366 finalize_ssa_defs (tree stmt)
367 {
368 unsigned int num = VEC_length (tree, build_defs);
369 /* There should only be a single real definition per assignment. */
370 gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
371
372 /* If there is an old list, often the new list is identical, or close, so
373 find the elements at the beginning that are the same as the vector. */
374
375 finalize_ssa_def_ops (stmt);
376 VEC_truncate (tree, build_defs, 0);
377 }
378
379 #define FINALIZE_OPBUILD build_uses
380 #define FINALIZE_OPBUILD_BASE(I) (tree *)VEC_index (tree, \
381 build_uses, (I))
382 #define FINALIZE_OPBUILD_ELEM(I) (tree *)VEC_index (tree, \
383 build_uses, (I))
384 #define FINALIZE_FUNC finalize_ssa_use_ops
385 #define FINALIZE_ALLOC alloc_use
386 #define FINALIZE_FREE free_uses
387 #define FINALIZE_TYPE struct use_optype_d
388 #define FINALIZE_ELEM(PTR) ((PTR)->use_ptr.use)
389 #define FINALIZE_OPS USE_OPS
390 #define FINALIZE_USE_PTR(PTR) USE_OP_PTR (PTR)
391 #define FINALIZE_CORRECT_USE correct_use_link
392 #define FINALIZE_BASE(VAR) VAR
393 #define FINALIZE_BASE_TYPE tree *
394 #define FINALIZE_BASE_ZERO NULL
395 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
396 (PTR)->use_ptr.use = (VAL); \
397 link_imm_use_stmt (&((PTR)->use_ptr), \
398 *(VAL), (STMT))
399 #include "tree-ssa-opfinalize.h"
400
401 /* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
402
403 static void
404 finalize_ssa_uses (tree stmt)
405 {
406 #ifdef ENABLE_CHECKING
407 {
408 unsigned x;
409 unsigned num = VEC_length (tree, build_uses);
410
411 /* If the pointer to the operand is the statement itself, something is
412 wrong. It means that we are pointing to a local variable (the
413 initial call to get_stmt_operands does not pass a pointer to a
414 statement). */
415 for (x = 0; x < num; x++)
416 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
417 }
418 #endif
419 finalize_ssa_use_ops (stmt);
420 VEC_truncate (tree, build_uses, 0);
421 }
422
423
424 /* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
425 #define FINALIZE_OPBUILD build_v_may_defs
426 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_may_defs, (I))
427 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
428 build_v_may_defs, (I)))
429 #define FINALIZE_FUNC finalize_ssa_v_may_def_ops
430 #define FINALIZE_ALLOC alloc_maydef
431 #define FINALIZE_FREE free_maydefs
432 #define FINALIZE_TYPE struct maydef_optype_d
433 #define FINALIZE_ELEM(PTR) MAYDEF_RESULT (PTR)
434 #define FINALIZE_OPS MAYDEF_OPS
435 #define FINALIZE_USE_PTR(PTR) MAYDEF_OP_PTR (PTR)
436 #define FINALIZE_CORRECT_USE set_virtual_use_link
437 #define FINALIZE_BASE_ZERO 0
438 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
439 #define FINALIZE_BASE_TYPE unsigned
440 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
441 (PTR)->def_var = (VAL); \
442 (PTR)->use_var = (VAL); \
443 (PTR)->use_ptr.use = &((PTR)->use_var); \
444 link_imm_use_stmt (&((PTR)->use_ptr), \
445 (VAL), (STMT))
446 #include "tree-ssa-opfinalize.h"
447
448
449 static void
450 finalize_ssa_v_may_defs (tree stmt)
451 {
452 finalize_ssa_v_may_def_ops (stmt);
453 }
454
455
456 /* Clear the in_list bits and empty the build array for v_may_defs. */
457
458 static inline void
459 cleanup_v_may_defs (void)
460 {
461 unsigned x, num;
462 num = VEC_length (tree, build_v_may_defs);
463
464 for (x = 0; x < num; x++)
465 {
466 tree t = VEC_index (tree, build_v_may_defs, x);
467 if (TREE_CODE (t) != SSA_NAME)
468 {
469 var_ann_t ann = var_ann (t);
470 ann->in_v_may_def_list = 0;
471 }
472 }
473 VEC_truncate (tree, build_v_may_defs, 0);
474 }
475
476
477 #define FINALIZE_OPBUILD build_vuses
478 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_vuses, (I))
479 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
480 build_vuses, (I)))
481 #define FINALIZE_FUNC finalize_ssa_vuse_ops
482 #define FINALIZE_ALLOC alloc_vuse
483 #define FINALIZE_FREE free_vuses
484 #define FINALIZE_TYPE struct vuse_optype_d
485 #define FINALIZE_ELEM(PTR) VUSE_OP (PTR)
486 #define FINALIZE_OPS VUSE_OPS
487 #define FINALIZE_USE_PTR(PTR) VUSE_OP_PTR (PTR)
488 #define FINALIZE_CORRECT_USE set_virtual_use_link
489 #define FINALIZE_BASE_ZERO 0
490 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
491 #define FINALIZE_BASE_TYPE unsigned
492 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
493 (PTR)->use_var = (VAL); \
494 (PTR)->use_ptr.use = &((PTR)->use_var); \
495 link_imm_use_stmt (&((PTR)->use_ptr), \
496 (VAL), (STMT))
497 #include "tree-ssa-opfinalize.h"
498
499
500 /* Return a new vuse operand vector, comparing to OLD_OPS_P. */
501
502 static void
503 finalize_ssa_vuses (tree stmt)
504 {
505 unsigned num, num_v_may_defs;
506 unsigned vuse_index;
507
508 /* Remove superfluous VUSE operands. If the statement already has a
509 V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
510 needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
511 suppose that variable 'a' is aliased:
512
513 # VUSE <a_2>
514 # a_3 = V_MAY_DEF <a_2>
515 a = a + 1;
516
517 The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
518 operation. */
519
520 num = VEC_length (tree, build_vuses);
521 num_v_may_defs = VEC_length (tree, build_v_may_defs);
522
523 if (num > 0 && num_v_may_defs > 0)
524 {
525 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
526 {
527 tree vuse;
528 vuse = VEC_index (tree, build_vuses, vuse_index);
529 if (TREE_CODE (vuse) != SSA_NAME)
530 {
531 var_ann_t ann = var_ann (vuse);
532 ann->in_vuse_list = 0;
533 if (ann->in_v_may_def_list)
534 {
535 VEC_ordered_remove (tree, build_vuses, vuse_index);
536 continue;
537 }
538 }
539 vuse_index++;
540 }
541 }
542 else
543 /* Clear out the in_list bits. */
544 for (vuse_index = 0;
545 vuse_index < VEC_length (tree, build_vuses);
546 vuse_index++)
547 {
548 tree t = VEC_index (tree, build_vuses, vuse_index);
549 if (TREE_CODE (t) != SSA_NAME)
550 {
551 var_ann_t ann = var_ann (t);
552 ann->in_vuse_list = 0;
553 }
554 }
555
556 finalize_ssa_vuse_ops (stmt);
557 /* The v_may_def build vector wasn't cleaned up because we needed it. */
558 cleanup_v_may_defs ();
559
560 /* Free the vuses build vector. */
561 VEC_truncate (tree, build_vuses, 0);
562
563 }
564
565 /* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
566
567 #define FINALIZE_OPBUILD build_v_must_defs
568 #define FINALIZE_OPBUILD_ELEM(I) VEC_index (tree, build_v_must_defs, (I))
569 #define FINALIZE_OPBUILD_BASE(I) get_name_decl (VEC_index (tree, \
570 build_v_must_defs, (I)))
571 #define FINALIZE_FUNC finalize_ssa_v_must_def_ops
572 #define FINALIZE_ALLOC alloc_mustdef
573 #define FINALIZE_FREE free_mustdefs
574 #define FINALIZE_TYPE struct mustdef_optype_d
575 #define FINALIZE_ELEM(PTR) MUSTDEF_RESULT (PTR)
576 #define FINALIZE_OPS MUSTDEF_OPS
577 #define FINALIZE_USE_PTR(PTR) MUSTDEF_KILL_PTR (PTR)
578 #define FINALIZE_CORRECT_USE set_virtual_use_link
579 #define FINALIZE_BASE_ZERO 0
580 #define FINALIZE_BASE(VAR) get_name_decl (VAR)
581 #define FINALIZE_BASE_TYPE unsigned
582 #define FINALIZE_INITIALIZE(PTR, VAL, STMT) \
583 (PTR)->def_var = (VAL); \
584 (PTR)->kill_var = (VAL); \
585 (PTR)->use_ptr.use = &((PTR)->kill_var);\
586 link_imm_use_stmt (&((PTR)->use_ptr), \
587 (VAL), (STMT))
588 #include "tree-ssa-opfinalize.h"
589
590
591 static void
592 finalize_ssa_v_must_defs (tree stmt)
593 {
594 /* In the presence of subvars, there may be more than one V_MUST_DEF per
595 statement (one for each subvar). It is a bit expensive to verify that
596 all must-defs in a statement belong to subvars if there is more than one
597 MUST-def, so we don't do it. Suffice to say, if you reach here without
598 having subvars, and have num >1, you have hit a bug. */
599
600 finalize_ssa_v_must_def_ops (stmt);
601 VEC_truncate (tree, build_v_must_defs, 0);
602 }
603
604
605 /* Finalize all the build vectors, fill the new ones into INFO. */
606
607 static inline void
608 finalize_ssa_stmt_operands (tree stmt)
609 {
610 finalize_ssa_defs (stmt);
611 finalize_ssa_uses (stmt);
612 finalize_ssa_v_must_defs (stmt);
613 finalize_ssa_v_may_defs (stmt);
614 finalize_ssa_vuses (stmt);
615 }
616
617
618 /* Start the process of building up operands vectors in INFO. */
619
620 static inline void
621 start_ssa_stmt_operands (void)
622 {
623 gcc_assert (VEC_length (tree, build_defs) == 0);
624 gcc_assert (VEC_length (tree, build_uses) == 0);
625 gcc_assert (VEC_length (tree, build_vuses) == 0);
626 gcc_assert (VEC_length (tree, build_v_may_defs) == 0);
627 gcc_assert (VEC_length (tree, build_v_must_defs) == 0);
628 }
629
630
631 /* Add DEF_P to the list of pointers to operands. */
632
633 static inline void
634 append_def (tree *def_p)
635 {
636 VEC_safe_push (tree, heap, build_defs, (tree)def_p);
637 }
638
639
640 /* Add USE_P to the list of pointers to operands. */
641
642 static inline void
643 append_use (tree *use_p)
644 {
645 VEC_safe_push (tree, heap, build_uses, (tree)use_p);
646 }
647
648
649 /* Add a new virtual may def for variable VAR to the build array. */
650
651 static inline void
652 append_v_may_def (tree var)
653 {
654 if (TREE_CODE (var) != SSA_NAME)
655 {
656 var_ann_t ann = get_var_ann (var);
657
658 /* Don't allow duplicate entries. */
659 if (ann->in_v_may_def_list)
660 return;
661 ann->in_v_may_def_list = 1;
662 }
663
664 VEC_safe_push (tree, heap, build_v_may_defs, (tree)var);
665 }
666
667
668 /* Add VAR to the list of virtual uses. */
669
670 static inline void
671 append_vuse (tree var)
672 {
673
674 /* Don't allow duplicate entries. */
675 if (TREE_CODE (var) != SSA_NAME)
676 {
677 var_ann_t ann = get_var_ann (var);
678
679 if (ann->in_vuse_list || ann->in_v_may_def_list)
680 return;
681 ann->in_vuse_list = 1;
682 }
683
684 VEC_safe_push (tree, heap, build_vuses, (tree)var);
685 }
686
687
688 /* Add VAR to the list of virtual must definitions for INFO. */
689
690 static inline void
691 append_v_must_def (tree var)
692 {
693 unsigned i;
694
695 /* Don't allow duplicate entries. */
696 for (i = 0; i < VEC_length (tree, build_v_must_defs); i++)
697 if (var == VEC_index (tree, build_v_must_defs, i))
698 return;
699
700 VEC_safe_push (tree, heap, build_v_must_defs, (tree)var);
701 }
702
703
704 /* Parse STMT looking for operands. OLD_OPS is the original stmt operand
705 cache for STMT, if it existed before. When finished, the various build_*
706 operand vectors will have potential operands. in them. */
707
708 static void
709 parse_ssa_operands (tree stmt)
710 {
711 enum tree_code code;
712
713 code = TREE_CODE (stmt);
714 switch (code)
715 {
716 case MODIFY_EXPR:
717 /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
718 either only part of LHS is modified or if the RHS might throw,
719 otherwise, use V_MUST_DEF.
720
721 ??? If it might throw, we should represent somehow that it is killed
722 on the fallthrough path. */
723 {
724 tree lhs = TREE_OPERAND (stmt, 0);
725 int lhs_flags = opf_is_def;
726
727 get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
728
729 /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
730 or not the entire LHS is modified; that depends on what's
731 inside the VIEW_CONVERT_EXPR. */
732 if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
733 lhs = TREE_OPERAND (lhs, 0);
734
735 if (TREE_CODE (lhs) != ARRAY_REF
736 && TREE_CODE (lhs) != ARRAY_RANGE_REF
737 && TREE_CODE (lhs) != BIT_FIELD_REF
738 && TREE_CODE (lhs) != REALPART_EXPR
739 && TREE_CODE (lhs) != IMAGPART_EXPR)
740 lhs_flags |= opf_kill_def;
741
742 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
743 }
744 break;
745
746 case COND_EXPR:
747 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
748 break;
749
750 case SWITCH_EXPR:
751 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
752 break;
753
754 case ASM_EXPR:
755 get_asm_expr_operands (stmt);
756 break;
757
758 case RETURN_EXPR:
759 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
760 break;
761
762 case GOTO_EXPR:
763 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
764 break;
765
766 case LABEL_EXPR:
767 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
768 break;
769
770 /* These nodes contain no variable references. */
771 case BIND_EXPR:
772 case CASE_LABEL_EXPR:
773 case TRY_CATCH_EXPR:
774 case TRY_FINALLY_EXPR:
775 case EH_FILTER_EXPR:
776 case CATCH_EXPR:
777 case RESX_EXPR:
778 break;
779
780 default:
781 /* Notice that if get_expr_operands tries to use &STMT as the operand
782 pointer (which may only happen for USE operands), we will fail in
783 append_use. This default will handle statements like empty
784 statements, or CALL_EXPRs that may appear on the RHS of a statement
785 or as statements themselves. */
786 get_expr_operands (stmt, &stmt, opf_none);
787 break;
788 }
789 }
790
791 /* Create an operands cache for STMT. */
792
793 static void
794 build_ssa_operands (tree stmt)
795 {
796 stmt_ann_t ann = get_stmt_ann (stmt);
797
798 /* Initially assume that the statement has no volatile operands. */
799 if (ann)
800 ann->has_volatile_ops = false;
801
802 start_ssa_stmt_operands ();
803
804 parse_ssa_operands (stmt);
805 operand_build_sort_virtual (build_vuses);
806 operand_build_sort_virtual (build_v_may_defs);
807 operand_build_sort_virtual (build_v_must_defs);
808
809 finalize_ssa_stmt_operands (stmt);
810 }
811
812
813 /* Free any operands vectors in OPS. */
814 void
815 free_ssa_operands (stmt_operands_p ops)
816 {
817 ops->def_ops = NULL;
818 ops->use_ops = NULL;
819 ops->maydef_ops = NULL;
820 ops->mustdef_ops = NULL;
821 ops->vuse_ops = NULL;
822 }
823
824
825 /* Get the operands of statement STMT. Note that repeated calls to
826 get_stmt_operands for the same statement will do nothing until the
827 statement is marked modified by a call to mark_stmt_modified(). */
828
829 void
830 update_stmt_operands (tree stmt)
831 {
832 stmt_ann_t ann = get_stmt_ann (stmt);
833 /* If get_stmt_operands is called before SSA is initialized, dont
834 do anything. */
835 if (!ssa_operands_active ())
836 return;
837 /* The optimizers cannot handle statements that are nothing but a
838 _DECL. This indicates a bug in the gimplifier. */
839 gcc_assert (!SSA_VAR_P (stmt));
840
841 gcc_assert (ann->modified);
842
843 timevar_push (TV_TREE_OPS);
844
845 build_ssa_operands (stmt);
846
847 /* Clear the modified bit for STMT. Subsequent calls to
848 get_stmt_operands for this statement will do nothing until the
849 statement is marked modified by a call to mark_stmt_modified(). */
850 ann->modified = 0;
851
852 timevar_pop (TV_TREE_OPS);
853 }
854
855
856 /* Copies virtual operands from SRC to DST. */
857
858 void
859 copy_virtual_operands (tree dest, tree src)
860 {
861 tree t;
862 ssa_op_iter iter, old_iter;
863 use_operand_p use_p, u2;
864 def_operand_p def_p, d2;
865
866 build_ssa_operands (dest);
867
868 /* Copy all the virtual fields. */
869 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
870 append_vuse (t);
871 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYDEF)
872 append_v_may_def (t);
873 FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMUSTDEF)
874 append_v_must_def (t);
875
876 if (VEC_length (tree, build_vuses) == 0
877 && VEC_length (tree, build_v_may_defs) == 0
878 && VEC_length (tree, build_v_must_defs) == 0)
879 return;
880
881 /* Now commit the virtual operands to this stmt. */
882 finalize_ssa_v_must_defs (dest);
883 finalize_ssa_v_may_defs (dest);
884 finalize_ssa_vuses (dest);
885
886 /* Finally, set the field to the same values as then originals. */
887
888
889 t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
890 FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
891 {
892 gcc_assert (!op_iter_done (&old_iter));
893 SET_USE (use_p, t);
894 t = op_iter_next_tree (&old_iter);
895 }
896 gcc_assert (op_iter_done (&old_iter));
897
898 op_iter_init_maydef (&old_iter, src, &u2, &d2);
899 FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, dest, iter)
900 {
901 gcc_assert (!op_iter_done (&old_iter));
902 SET_USE (use_p, USE_FROM_PTR (u2));
903 SET_DEF (def_p, DEF_FROM_PTR (d2));
904 op_iter_next_maymustdef (&u2, &d2, &old_iter);
905 }
906 gcc_assert (op_iter_done (&old_iter));
907
908 op_iter_init_mustdef (&old_iter, src, &u2, &d2);
909 FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, use_p, dest, iter)
910 {
911 gcc_assert (!op_iter_done (&old_iter));
912 SET_USE (use_p, USE_FROM_PTR (u2));
913 SET_DEF (def_p, DEF_FROM_PTR (d2));
914 op_iter_next_maymustdef (&u2, &d2, &old_iter);
915 }
916 gcc_assert (op_iter_done (&old_iter));
917
918 }
919
920
921 /* Specifically for use in DOM's expression analysis. Given a store, we
922 create an artificial stmt which looks like a load from the store, this can
923 be used to eliminate redundant loads. OLD_OPS are the operands from the
924 store stmt, and NEW_STMT is the new load which represents a load of the
925 values stored. */
926
927 void
928 create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
929 {
930 stmt_ann_t ann;
931 tree op;
932 ssa_op_iter iter;
933 use_operand_p use_p;
934 unsigned x;
935
936 ann = get_stmt_ann (new_stmt);
937
938 /* process the stmt looking for operands. */
939 start_ssa_stmt_operands ();
940 parse_ssa_operands (new_stmt);
941
942 for (x = 0; x < VEC_length (tree, build_vuses); x++)
943 {
944 tree t = VEC_index (tree, build_vuses, x);
945 if (TREE_CODE (t) != SSA_NAME)
946 {
947 var_ann_t ann = var_ann (t);
948 ann->in_vuse_list = 0;
949 }
950 }
951
952 for (x = 0; x < VEC_length (tree, build_v_may_defs); x++)
953 {
954 tree t = VEC_index (tree, build_v_may_defs, x);
955 if (TREE_CODE (t) != SSA_NAME)
956 {
957 var_ann_t ann = var_ann (t);
958 ann->in_v_may_def_list = 0;
959 }
960 }
961 /* Remove any virtual operands that were found. */
962 VEC_truncate (tree, build_v_may_defs, 0);
963 VEC_truncate (tree, build_v_must_defs, 0);
964 VEC_truncate (tree, build_vuses, 0);
965
966 /* For each VDEF on the original statement, we want to create a
967 VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
968 statement. */
969 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter,
970 (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))
971 append_vuse (op);
972
973 /* Now build the operands for this new stmt. */
974 finalize_ssa_stmt_operands (new_stmt);
975
976 /* All uses in this fake stmt must not be in the immediate use lists. */
977 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
978 delink_imm_use (use_p);
979 }
980
981 void
982 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
983 {
984 tree op0, op1;
985 op0 = *exp0;
986 op1 = *exp1;
987
988 /* If the operand cache is active, attempt to preserve the relative positions
989 of these two operands in their respective immediate use lists. */
990 if (ssa_operands_active () && op0 != op1)
991 {
992 use_optype_p use0, use1, ptr;
993 use0 = use1 = NULL;
994 /* Find the 2 operands in the cache, if they are there. */
995 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
996 if (USE_OP_PTR (ptr)->use == exp0)
997 {
998 use0 = ptr;
999 break;
1000 }
1001 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
1002 if (USE_OP_PTR (ptr)->use == exp1)
1003 {
1004 use1 = ptr;
1005 break;
1006 }
1007 /* If both uses don't have operand entries, there isn't much we can do
1008 at this point. Presumably we dont need to worry about it. */
1009 if (use0 && use1)
1010 {
1011 tree *tmp = USE_OP_PTR (use1)->use;
1012 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
1013 USE_OP_PTR (use0)->use = tmp;
1014 }
1015 }
1016
1017 /* Now swap the data. */
1018 *exp0 = op1;
1019 *exp1 = op0;
1020 }
1021
1022 /* Recursively scan the expression pointed to by EXPR_P in statement referred
1023 to by INFO. FLAGS is one of the OPF_* constants modifying how to interpret
1024 the operands found. */
1025
1026 static void
1027 get_expr_operands (tree stmt, tree *expr_p, int flags)
1028 {
1029 enum tree_code code;
1030 enum tree_code_class class;
1031 tree expr = *expr_p;
1032 stmt_ann_t s_ann = stmt_ann (stmt);
1033
1034 if (expr == NULL)
1035 return;
1036
1037 code = TREE_CODE (expr);
1038 class = TREE_CODE_CLASS (code);
1039
1040 switch (code)
1041 {
1042 case ADDR_EXPR:
1043 /* We could have the address of a component, array member,
1044 etc which has interesting variable references. */
1045 /* Taking the address of a variable does not represent a
1046 reference to it, but the fact that the stmt takes its address will be
1047 of interest to some passes (e.g. alias resolution). */
1048 add_stmt_operand (expr_p, s_ann, 0);
1049
1050 /* If the address is invariant, there may be no interesting variable
1051 references inside. */
1052 if (is_gimple_min_invariant (expr))
1053 return;
1054
1055 /* There should be no VUSEs created, since the referenced objects are
1056 not really accessed. The only operands that we should find here
1057 are ARRAY_REF indices which will always be real operands (GIMPLE
1058 does not allow non-registers as array indices). */
1059 flags |= opf_no_vops;
1060
1061 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1062 return;
1063
1064 case SSA_NAME:
1065 case STRUCT_FIELD_TAG:
1066 case TYPE_MEMORY_TAG:
1067 case NAME_MEMORY_TAG:
1068 case VAR_DECL:
1069 case PARM_DECL:
1070 case RESULT_DECL:
1071 case CONST_DECL:
1072 {
1073 subvar_t svars;
1074
1075 /* Add the subvars for a variable if it has subvars, to DEFS or USES.
1076 Otherwise, add the variable itself.
1077 Whether it goes to USES or DEFS depends on the operand flags. */
1078 if (var_can_have_subvars (expr)
1079 && (svars = get_subvars_for_var (expr)))
1080 {
1081 subvar_t sv;
1082 for (sv = svars; sv; sv = sv->next)
1083 add_stmt_operand (&sv->var, s_ann, flags);
1084 }
1085 else
1086 {
1087 add_stmt_operand (expr_p, s_ann, flags);
1088 }
1089 return;
1090 }
1091 case MISALIGNED_INDIRECT_REF:
1092 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1093 /* fall through */
1094
1095 case ALIGN_INDIRECT_REF:
1096 case INDIRECT_REF:
1097 get_indirect_ref_operands (stmt, expr, flags);
1098 return;
1099
1100 case TARGET_MEM_REF:
1101 get_tmr_operands (stmt, expr, flags);
1102 return;
1103
1104 case ARRAY_REF:
1105 case ARRAY_RANGE_REF:
1106 /* Treat array references as references to the virtual variable
1107 representing the array. The virtual variable for an ARRAY_REF
1108 is the VAR_DECL for the array. */
1109
1110 /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
1111 according to the value of IS_DEF. Recurse if the LHS of the
1112 ARRAY_REF node is not a regular variable. */
1113 if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
1114 add_stmt_operand (expr_p, s_ann, flags);
1115 else
1116 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1117
1118 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1119 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1120 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
1121 return;
1122
1123 case COMPONENT_REF:
1124 case REALPART_EXPR:
1125 case IMAGPART_EXPR:
1126 {
1127 tree ref;
1128 HOST_WIDE_INT offset, size, maxsize;
1129 /* This component ref becomes an access to all of the subvariables
1130 it can touch, if we can determine that, but *NOT* the real one.
1131 If we can't determine which fields we could touch, the recursion
1132 will eventually get to a variable and add *all* of its subvars, or
1133 whatever is the minimum correct subset. */
1134
1135 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
1136 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
1137 {
1138 subvar_t svars = get_subvars_for_var (ref);
1139 subvar_t sv;
1140 for (sv = svars; sv; sv = sv->next)
1141 {
1142 bool exact;
1143 if (overlap_subvar (offset, maxsize, sv, &exact))
1144 {
1145 int subvar_flags = flags;
1146 if (!exact
1147 || size != maxsize)
1148 subvar_flags &= ~opf_kill_def;
1149 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1150 }
1151 }
1152 }
1153 else
1154 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
1155 flags & ~opf_kill_def);
1156
1157 if (code == COMPONENT_REF)
1158 {
1159 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
1160 s_ann->has_volatile_ops = true;
1161 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1162 }
1163 return;
1164 }
1165 case WITH_SIZE_EXPR:
1166 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
1167 and an rvalue reference to its second argument. */
1168 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1170 return;
1171
1172 case CALL_EXPR:
1173 get_call_expr_operands (stmt, expr);
1174 return;
1175
1176 case COND_EXPR:
1177 case VEC_COND_EXPR:
1178 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1179 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1180 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1181 return;
1182
1183 case MODIFY_EXPR:
1184 {
1185 int subflags;
1186 tree op;
1187
1188 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
1189
1190 op = TREE_OPERAND (expr, 0);
1191 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1192 op = TREE_OPERAND (expr, 0);
1193 if (TREE_CODE (op) == ARRAY_REF
1194 || TREE_CODE (op) == ARRAY_RANGE_REF
1195 || TREE_CODE (op) == REALPART_EXPR
1196 || TREE_CODE (op) == IMAGPART_EXPR)
1197 subflags = opf_is_def;
1198 else
1199 subflags = opf_is_def | opf_kill_def;
1200
1201 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
1202 return;
1203 }
1204
1205 case CONSTRUCTOR:
1206 {
1207 /* General aggregate CONSTRUCTORs have been decomposed, but they
1208 are still in use as the COMPLEX_EXPR equivalent for vectors. */
1209 constructor_elt *ce;
1210 unsigned HOST_WIDE_INT idx;
1211
1212 for (idx = 0;
1213 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
1214 idx++)
1215 get_expr_operands (stmt, &ce->value, opf_none);
1216
1217 return;
1218 }
1219
1220 case TRUTH_NOT_EXPR:
1221 case BIT_FIELD_REF:
1222 case VIEW_CONVERT_EXPR:
1223 do_unary:
1224 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1225 return;
1226
1227 case TRUTH_AND_EXPR:
1228 case TRUTH_OR_EXPR:
1229 case TRUTH_XOR_EXPR:
1230 case COMPOUND_EXPR:
1231 case OBJ_TYPE_REF:
1232 case ASSERT_EXPR:
1233 do_binary:
1234 {
1235 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1236 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1237 return;
1238 }
1239
1240 case REALIGN_LOAD_EXPR:
1241 {
1242 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
1243 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
1244 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
1245 return;
1246 }
1247
1248 case BLOCK:
1249 case FUNCTION_DECL:
1250 case EXC_PTR_EXPR:
1251 case FILTER_EXPR:
1252 case LABEL_DECL:
1253 /* Expressions that make no memory references. */
1254 return;
1255
1256 default:
1257 if (class == tcc_unary)
1258 goto do_unary;
1259 if (class == tcc_binary || class == tcc_comparison)
1260 goto do_binary;
1261 if (class == tcc_constant || class == tcc_type)
1262 return;
1263 }
1264
1265 /* If we get here, something has gone wrong. */
1266 #ifdef ENABLE_CHECKING
1267 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
1268 debug_tree (expr);
1269 fputs ("\n", stderr);
1270 internal_error ("internal error");
1271 #endif
1272 gcc_unreachable ();
1273 }
1274
1275
1276 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1277
1278 static void
1279 get_asm_expr_operands (tree stmt)
1280 {
1281 stmt_ann_t s_ann = stmt_ann (stmt);
1282 int noutputs = list_length (ASM_OUTPUTS (stmt));
1283 const char **oconstraints
1284 = (const char **) alloca ((noutputs) * sizeof (const char *));
1285 int i;
1286 tree link;
1287 const char *constraint;
1288 bool allows_mem, allows_reg, is_inout;
1289
1290 for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
1291 {
1292 oconstraints[i] = constraint
1293 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1294 parse_output_constraint (&constraint, i, 0, 0,
1295 &allows_mem, &allows_reg, &is_inout);
1296
1297 /* This should have been split in gimplify_asm_expr. */
1298 gcc_assert (!allows_reg || !is_inout);
1299
1300 /* Memory operands are addressable. Note that STMT needs the
1301 address of this operand. */
1302 if (!allows_reg && allows_mem)
1303 {
1304 tree t = get_base_address (TREE_VALUE (link));
1305 if (t && DECL_P (t) && s_ann)
1306 add_to_addressable_set (t, &s_ann->addresses_taken);
1307 }
1308
1309 get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
1310 }
1311
1312 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1313 {
1314 constraint
1315 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1316 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1317 oconstraints, &allows_mem, &allows_reg);
1318
1319 /* Memory operands are addressable. Note that STMT needs the
1320 address of this operand. */
1321 if (!allows_reg && allows_mem)
1322 {
1323 tree t = get_base_address (TREE_VALUE (link));
1324 if (t && DECL_P (t) && s_ann)
1325 add_to_addressable_set (t, &s_ann->addresses_taken);
1326 }
1327
1328 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1329 }
1330
1331
1332 /* Clobber memory for asm ("" : : : "memory"); */
1333 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1334 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1335 {
1336 unsigned i;
1337 bitmap_iterator bi;
1338
1339 /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
1340 decided to group them). */
1341 if (global_var)
1342 add_stmt_operand (&global_var, s_ann, opf_is_def);
1343 else
1344 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
1345 {
1346 tree var = referenced_var (i);
1347 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1348 }
1349
1350 /* Now clobber all addressables. */
1351 EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
1352 {
1353 tree var = referenced_var (i);
1354
1355 /* Subvars are explicitly represented in this list, so
1356 we don't need the original to be added to the clobber
1357 ops, but the original *will* be in this list because
1358 we keep the addressability of the original
1359 variable up-to-date so we don't screw up the rest of
1360 the backend. */
1361 if (var_can_have_subvars (var)
1362 && get_subvars_for_var (var) != NULL)
1363 continue;
1364
1365 add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
1366 }
1367
1368 break;
1369 }
1370 }
1371
1372 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1373 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
1374
1375 static void
1376 get_indirect_ref_operands (tree stmt, tree expr, int flags)
1377 {
1378 tree *pptr = &TREE_OPERAND (expr, 0);
1379 tree ptr = *pptr;
1380 stmt_ann_t s_ann = stmt_ann (stmt);
1381
1382 /* Stores into INDIRECT_REF operands are never killing definitions. */
1383 flags &= ~opf_kill_def;
1384
1385 if (SSA_VAR_P (ptr))
1386 {
1387 struct ptr_info_def *pi = NULL;
1388
1389 /* If PTR has flow-sensitive points-to information, use it. */
1390 if (TREE_CODE (ptr) == SSA_NAME
1391 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1392 && pi->name_mem_tag)
1393 {
1394 /* PTR has its own memory tag. Use it. */
1395 add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
1396 }
1397 else
1398 {
1399 /* If PTR is not an SSA_NAME or it doesn't have a name
1400 tag, use its type memory tag. */
1401 var_ann_t v_ann;
1402
1403 /* If we are emitting debugging dumps, display a warning if
1404 PTR is an SSA_NAME with no flow-sensitive alias
1405 information. That means that we may need to compute
1406 aliasing again. */
1407 if (dump_file
1408 && TREE_CODE (ptr) == SSA_NAME
1409 && pi == NULL)
1410 {
1411 fprintf (dump_file,
1412 "NOTE: no flow-sensitive alias info for ");
1413 print_generic_expr (dump_file, ptr, dump_flags);
1414 fprintf (dump_file, " in ");
1415 print_generic_stmt (dump_file, stmt, dump_flags);
1416 }
1417
1418 if (TREE_CODE (ptr) == SSA_NAME)
1419 ptr = SSA_NAME_VAR (ptr);
1420 v_ann = var_ann (ptr);
1421 if (v_ann->type_mem_tag)
1422 add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
1423 }
1424 }
1425
1426 /* If a constant is used as a pointer, we can't generate a real
1427 operand for it but we mark the statement volatile to prevent
1428 optimizations from messing things up. */
1429 else if (TREE_CODE (ptr) == INTEGER_CST)
1430 {
1431 if (s_ann)
1432 s_ann->has_volatile_ops = true;
1433 return;
1434 }
1435
1436 /* Everything else *should* have been folded elsewhere, but users
1437 are smarter than we in finding ways to write invalid code. We
1438 cannot just assert here. If we were absolutely certain that we
1439 do handle all valid cases, then we could just do nothing here.
1440 That seems optimistic, so attempt to do something logical... */
1441 else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
1442 && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
1443 && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
1444 {
1445 /* Make sure we know the object is addressable. */
1446 pptr = &TREE_OPERAND (ptr, 0);
1447 add_stmt_operand (pptr, s_ann, 0);
1448
1449 /* Mark the object itself with a VUSE. */
1450 pptr = &TREE_OPERAND (*pptr, 0);
1451 get_expr_operands (stmt, pptr, flags);
1452 return;
1453 }
1454
1455 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1456 else
1457 gcc_unreachable ();
1458
1459 /* Add a USE operand for the base pointer. */
1460 get_expr_operands (stmt, pptr, opf_none);
1461 }
1462
1463 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1464
1465 static void
1466 get_tmr_operands (tree stmt, tree expr, int flags)
1467 {
1468 tree tag = TMR_TAG (expr), ref;
1469 HOST_WIDE_INT offset, size, maxsize;
1470 subvar_t svars, sv;
1471 stmt_ann_t s_ann = stmt_ann (stmt);
1472
1473 /* First record the real operands. */
1474 get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
1475 get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
1476
1477 /* MEM_REFs should never be killing. */
1478 flags &= ~opf_kill_def;
1479
1480 if (TMR_SYMBOL (expr))
1481 {
1482 stmt_ann_t ann = stmt_ann (stmt);
1483 add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
1484 }
1485
1486 if (!tag)
1487 {
1488 /* Something weird, so ensure that we will be careful. */
1489 stmt_ann (stmt)->has_volatile_ops = true;
1490 return;
1491 }
1492
1493 if (DECL_P (tag))
1494 {
1495 get_expr_operands (stmt, &tag, flags);
1496 return;
1497 }
1498
1499 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1500 gcc_assert (ref != NULL_TREE);
1501 svars = get_subvars_for_var (ref);
1502 for (sv = svars; sv; sv = sv->next)
1503 {
1504 bool exact;
1505 if (overlap_subvar (offset, maxsize, sv, &exact))
1506 {
1507 int subvar_flags = flags;
1508 if (!exact || size != maxsize)
1509 subvar_flags &= ~opf_kill_def;
1510 add_stmt_operand (&sv->var, s_ann, subvar_flags);
1511 }
1512 }
1513 }
1514
1515 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1516
1517 static void
1518 get_call_expr_operands (tree stmt, tree expr)
1519 {
1520 tree op;
1521 int call_flags = call_expr_flags (expr);
1522
1523 /* If aliases have been computed already, add V_MAY_DEF or V_USE
1524 operands for all the symbols that have been found to be
1525 call-clobbered.
1526
1527 Note that if aliases have not been computed, the global effects
1528 of calls will not be included in the SSA web. This is fine
1529 because no optimizer should run before aliases have been
1530 computed. By not bothering with virtual operands for CALL_EXPRs
1531 we avoid adding superfluous virtual operands, which can be a
1532 significant compile time sink (See PR 15855). */
1533 if (aliases_computed_p
1534 && !bitmap_empty_p (call_clobbered_vars)
1535 && !(call_flags & ECF_NOVOPS))
1536 {
1537 /* A 'pure' or a 'const' function never call-clobbers anything.
1538 A 'noreturn' function might, but since we don't return anyway
1539 there is no point in recording that. */
1540 if (TREE_SIDE_EFFECTS (expr)
1541 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1542 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1543 else if (!(call_flags & ECF_CONST))
1544 add_call_read_ops (stmt);
1545 }
1546
1547 /* Find uses in the called function. */
1548 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
1549
1550 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1551 get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
1552
1553 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
1554
1555 }
1556
1557
1558 /* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
1559 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1560 the statement's real operands, otherwise it is added to virtual
1561 operands. */
1562
1563 static void
1564 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1565 {
1566 bool is_real_op;
1567 tree var, sym;
1568 var_ann_t v_ann;
1569
1570 var = *var_p;
1571 STRIP_NOPS (var);
1572
1573 /* If the operand is an ADDR_EXPR, add its operand to the list of
1574 variables that have had their address taken in this statement. */
1575 if (TREE_CODE (var) == ADDR_EXPR && s_ann)
1576 {
1577 add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
1578 return;
1579 }
1580
1581 /* If the original variable is not a scalar, it will be added to the list
1582 of virtual operands. In that case, use its base symbol as the virtual
1583 variable representing it. */
1584 is_real_op = is_gimple_reg (var);
1585 if (!is_real_op && !DECL_P (var))
1586 var = get_virtual_var (var);
1587
1588 /* If VAR is not a variable that we care to optimize, do nothing. */
1589 if (var == NULL_TREE || !SSA_VAR_P (var))
1590 return;
1591
1592 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1593 v_ann = var_ann (sym);
1594
1595 /* Mark statements with volatile operands. Optimizers should back
1596 off from statements having volatile operands. */
1597 if (TREE_THIS_VOLATILE (sym) && s_ann)
1598 s_ann->has_volatile_ops = true;
1599
1600 /* If the variable cannot be modified and this is a V_MAY_DEF change
1601 it into a VUSE. This happens when read-only variables are marked
1602 call-clobbered and/or aliased to writable variables. So we only
1603 check that this only happens on non-specific stores.
1604
1605 Note that if this is a specific store, i.e. associated with a
1606 modify_expr, then we can't suppress the V_DEF, lest we run into
1607 validation problems.
1608
1609 This can happen when programs cast away const, leaving us with a
1610 store to read-only memory. If the statement is actually executed
1611 at runtime, then the program is ill formed. If the statement is
1612 not executed then all is well. At the very least, we cannot ICE. */
1613 if ((flags & opf_non_specific) && unmodifiable_var_p (var))
1614 {
1615 gcc_assert (!is_real_op);
1616 flags &= ~(opf_is_def | opf_kill_def);
1617 }
1618
1619 if (is_real_op)
1620 {
1621 /* The variable is a GIMPLE register. Add it to real operands. */
1622 if (flags & opf_is_def)
1623 append_def (var_p);
1624 else
1625 append_use (var_p);
1626 }
1627 else
1628 {
1629 VEC(tree,gc) *aliases;
1630
1631 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1632 virtual operands, unless the caller has specifically requested
1633 not to add virtual operands (used when adding operands inside an
1634 ADDR_EXPR expression). */
1635 if (flags & opf_no_vops)
1636 return;
1637
1638 aliases = v_ann->may_aliases;
1639
1640 if (aliases == NULL)
1641 {
1642 /* The variable is not aliased or it is an alias tag. */
1643 if (flags & opf_is_def)
1644 {
1645 if (flags & opf_kill_def)
1646 {
1647 /* Only regular variables or struct fields may get a
1648 V_MUST_DEF operand. */
1649 gcc_assert (!MTAG_P (var)
1650 || TREE_CODE (var) == STRUCT_FIELD_TAG);
1651 /* V_MUST_DEF for non-aliased, non-GIMPLE register
1652 variable definitions. */
1653 append_v_must_def (var);
1654 }
1655 else
1656 {
1657 /* Add a V_MAY_DEF for call-clobbered variables and
1658 memory tags. */
1659 append_v_may_def (var);
1660 }
1661 }
1662 else
1663 append_vuse (var);
1664 }
1665 else
1666 {
1667 unsigned i;
1668 tree al;
1669
1670 /* The variable is aliased. Add its aliases to the virtual
1671 operands. */
1672 gcc_assert (VEC_length (tree, aliases) != 0);
1673
1674 if (flags & opf_is_def)
1675 {
1676 /* If the variable is also an alias tag, add a virtual
1677 operand for it, otherwise we will miss representing
1678 references to the members of the variable's alias set.
1679 This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
1680 if (v_ann->is_alias_tag)
1681 append_v_may_def (var);
1682
1683 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1684 append_v_may_def (al);
1685 }
1686 else
1687 {
1688 /* Similarly, append a virtual uses for VAR itself, when
1689 it is an alias tag. */
1690 if (v_ann->is_alias_tag)
1691 append_vuse (var);
1692
1693 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1694 append_vuse (al);
1695 }
1696 }
1697 }
1698 }
1699
1700
1701 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
1702 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
1703 a single variable whose address has been taken or any other valid
1704 GIMPLE memory reference (structure reference, array, etc). If the
1705 base address of REF is a decl that has sub-variables, also add all
1706 of its sub-variables. */
1707
1708 void
1709 add_to_addressable_set (tree ref, bitmap *addresses_taken)
1710 {
1711 tree var;
1712 subvar_t svars;
1713
1714 gcc_assert (addresses_taken);
1715
1716 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
1717 as the only thing we take the address of. If VAR is a structure,
1718 taking the address of a field means that the whole structure may
1719 be referenced using pointer arithmetic. See PR 21407 and the
1720 ensuing mailing list discussion. */
1721 var = get_base_address (ref);
1722 if (var && SSA_VAR_P (var))
1723 {
1724 if (*addresses_taken == NULL)
1725 *addresses_taken = BITMAP_GGC_ALLOC ();
1726
1727 if (var_can_have_subvars (var)
1728 && (svars = get_subvars_for_var (var)))
1729 {
1730 subvar_t sv;
1731 for (sv = svars; sv; sv = sv->next)
1732 {
1733 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
1734 TREE_ADDRESSABLE (sv->var) = 1;
1735 }
1736 }
1737 else
1738 {
1739 bitmap_set_bit (*addresses_taken, DECL_UID (var));
1740 TREE_ADDRESSABLE (var) = 1;
1741 }
1742 }
1743 }
1744
1745
1746 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1747 clobbered variables in the function. */
1748
1749 static void
1750 add_call_clobber_ops (tree stmt, tree callee)
1751 {
1752 unsigned u;
1753 tree t;
1754 bitmap_iterator bi;
1755 stmt_ann_t s_ann = stmt_ann (stmt);
1756 struct stmt_ann_d empty_ann;
1757 bitmap not_read_b, not_written_b;
1758
1759 /* Functions that are not const, pure or never return may clobber
1760 call-clobbered variables. */
1761 if (s_ann)
1762 s_ann->makes_clobbering_call = true;
1763
1764 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1765 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1766 if (global_var)
1767 {
1768 add_stmt_operand (&global_var, s_ann, opf_is_def);
1769 return;
1770 }
1771
1772 /* FIXME - if we have better information from the static vars
1773 analysis, we need to make the cache call site specific. This way
1774 we can have the performance benefits even if we are doing good
1775 optimization. */
1776
1777 /* Get info for local and module level statics. There is a bit
1778 set for each static if the call being processed does not read
1779 or write that variable. */
1780
1781 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1782 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1783
1784 /* If cache is valid, copy the elements into the build vectors. */
1785 if (ssa_call_clobbered_cache_valid
1786 && (!not_read_b || bitmap_empty_p (not_read_b))
1787 && (!not_written_b || bitmap_empty_p (not_written_b)))
1788 {
1789 for (u = 0 ; u < VEC_length (tree, clobbered_vuses); u++)
1790 {
1791 t = VEC_index (tree, clobbered_vuses, u);
1792 gcc_assert (TREE_CODE (t) != SSA_NAME);
1793 var_ann (t)->in_vuse_list = 1;
1794 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1795 }
1796 for (u = 0; u < VEC_length (tree, clobbered_v_may_defs); u++)
1797 {
1798 t = VEC_index (tree, clobbered_v_may_defs, u);
1799 gcc_assert (TREE_CODE (t) != SSA_NAME);
1800 var_ann (t)->in_v_may_def_list = 1;
1801 VEC_safe_push (tree, heap, build_v_may_defs, (tree)t);
1802 }
1803 return;
1804 }
1805
1806 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1807
1808 /* Add a V_MAY_DEF operand for every call clobbered variable. */
1809 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1810 {
1811 tree var = referenced_var (u);
1812 if (unmodifiable_var_p (var))
1813 add_stmt_operand (&var, &empty_ann, opf_none);
1814 else
1815 {
1816 bool not_read
1817 = not_read_b ? bitmap_bit_p (not_read_b, u) : false;
1818 bool not_written
1819 = not_written_b ? bitmap_bit_p (not_written_b, u) : false;
1820
1821 if (not_written)
1822 {
1823 if (!not_read)
1824 add_stmt_operand (&var, &empty_ann, opf_none);
1825 }
1826 else
1827 add_stmt_operand (&var, &empty_ann, opf_is_def);
1828 }
1829 }
1830
1831 if ((!not_read_b || bitmap_empty_p (not_read_b))
1832 && (!not_written_b || bitmap_empty_p (not_written_b)))
1833 {
1834 /* Prepare empty cache vectors. */
1835 VEC_truncate (tree, clobbered_vuses, 0);
1836 VEC_truncate (tree, clobbered_v_may_defs, 0);
1837
1838 /* Now fill the clobbered cache with the values that have been found. */
1839 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1840 VEC_safe_push (tree, heap, clobbered_vuses,
1841 VEC_index (tree, build_vuses, u));
1842
1843 gcc_assert (VEC_length (tree, build_vuses)
1844 == VEC_length (tree, clobbered_vuses));
1845
1846 for (u = 0; u < VEC_length (tree, build_v_may_defs); u++)
1847 VEC_safe_push (tree, heap, clobbered_v_may_defs,
1848 VEC_index (tree, build_v_may_defs, u));
1849
1850 gcc_assert (VEC_length (tree, build_v_may_defs)
1851 == VEC_length (tree, clobbered_v_may_defs));
1852
1853 ssa_call_clobbered_cache_valid = true;
1854 }
1855 }
1856
1857
1858 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1859 function. */
1860
1861 static void
1862 add_call_read_ops (tree stmt)
1863 {
1864 unsigned u;
1865 tree t;
1866 bitmap_iterator bi;
1867 stmt_ann_t s_ann = stmt_ann (stmt);
1868 struct stmt_ann_d empty_ann;
1869
1870 /* if the function is not pure, it may reference memory. Add
1871 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1872 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1873 if (global_var)
1874 {
1875 add_stmt_operand (&global_var, s_ann, opf_none);
1876 return;
1877 }
1878
1879 /* If cache is valid, copy the elements into the build vector. */
1880 if (ssa_ro_call_cache_valid)
1881 {
1882 for (u = 0; u < VEC_length (tree, ro_call_vuses); u++)
1883 {
1884 t = VEC_index (tree, ro_call_vuses, u);
1885 gcc_assert (TREE_CODE (t) != SSA_NAME);
1886 var_ann (t)->in_vuse_list = 1;
1887 VEC_safe_push (tree, heap, build_vuses, (tree)t);
1888 }
1889 return;
1890 }
1891
1892 memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
1893
1894 /* Add a VUSE for each call-clobbered variable. */
1895 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
1896 {
1897 tree var = referenced_var (u);
1898 add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
1899 }
1900
1901 /* Prepare empty cache vectors. */
1902 VEC_truncate (tree, ro_call_vuses, 0);
1903
1904 /* Now fill the clobbered cache with the values that have been found. */
1905 for (u = 0; u < VEC_length (tree, build_vuses); u++)
1906 VEC_safe_push (tree, heap, ro_call_vuses,
1907 VEC_index (tree, build_vuses, u));
1908
1909 gcc_assert (VEC_length (tree, build_vuses)
1910 == VEC_length (tree, ro_call_vuses));
1911
1912 ssa_ro_call_cache_valid = true;
1913 }
1914
1915
1916 /* Scan the immediate_use list for VAR making sure its linked properly.
1917 return RTUE iof there is a problem. */
1918
1919 bool
1920 verify_imm_links (FILE *f, tree var)
1921 {
1922 use_operand_p ptr, prev, list;
1923 int count;
1924
1925 gcc_assert (TREE_CODE (var) == SSA_NAME);
1926
1927 list = &(SSA_NAME_IMM_USE_NODE (var));
1928 gcc_assert (list->use == NULL);
1929
1930 if (list->prev == NULL)
1931 {
1932 gcc_assert (list->next == NULL);
1933 return false;
1934 }
1935
1936 prev = list;
1937 count = 0;
1938 for (ptr = list->next; ptr != list; )
1939 {
1940 if (prev != ptr->prev)
1941 goto error;
1942
1943 if (ptr->use == NULL)
1944 goto error; /* 2 roots, or SAFE guard node. */
1945 else if (*(ptr->use) != var)
1946 goto error;
1947
1948 prev = ptr;
1949 ptr = ptr->next;
1950 /* Avoid infinite loops. 50,000,000 uses probably indicates a problem. */
1951 if (count++ > 50000000)
1952 goto error;
1953 }
1954
1955 /* Verify list in the other direction. */
1956 prev = list;
1957 for (ptr = list->prev; ptr != list; )
1958 {
1959 if (prev != ptr->next)
1960 goto error;
1961 prev = ptr;
1962 ptr = ptr->prev;
1963 if (count-- < 0)
1964 goto error;
1965 }
1966
1967 if (count != 0)
1968 goto error;
1969
1970 return false;
1971
1972 error:
1973 if (ptr->stmt && stmt_modified_p (ptr->stmt))
1974 {
1975 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
1976 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
1977 }
1978 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1979 (void *)ptr->use);
1980 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1981 fprintf(f, "\n");
1982 return true;
1983 }
1984
1985
1986 /* Dump all the immediate uses to FILE. */
1987
1988 void
1989 dump_immediate_uses_for (FILE *file, tree var)
1990 {
1991 imm_use_iterator iter;
1992 use_operand_p use_p;
1993
1994 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1995
1996 print_generic_expr (file, var, TDF_SLIM);
1997 fprintf (file, " : -->");
1998 if (has_zero_uses (var))
1999 fprintf (file, " no uses.\n");
2000 else
2001 if (has_single_use (var))
2002 fprintf (file, " single use.\n");
2003 else
2004 fprintf (file, "%d uses.\n", num_imm_uses (var));
2005
2006 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2007 {
2008 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2009 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS);
2010 else
2011 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2012 }
2013 fprintf(file, "\n");
2014 }
2015
2016 /* Dump all the immediate uses to FILE. */
2017
2018 void
2019 dump_immediate_uses (FILE *file)
2020 {
2021 tree var;
2022 unsigned int x;
2023
2024 fprintf (file, "Immediate_uses: \n\n");
2025 for (x = 1; x < num_ssa_names; x++)
2026 {
2027 var = ssa_name(x);
2028 if (!var)
2029 continue;
2030 dump_immediate_uses_for (file, var);
2031 }
2032 }
2033
2034
2035 /* Dump def-use edges on stderr. */
2036
2037 void
2038 debug_immediate_uses (void)
2039 {
2040 dump_immediate_uses (stderr);
2041 }
2042
2043 /* Dump def-use edges on stderr. */
2044
2045 void
2046 debug_immediate_uses_for (tree var)
2047 {
2048 dump_immediate_uses_for (stderr, var);
2049 }
2050 #include "gt-tree-ssa-operands.h"