tree.h (VAR_DECL_IS_VIRTUAL_OPERAND): New.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "ggc.h"
34 #include "timevar.h"
35 #include "langhooks.h"
36 #include "diagnostic-core.h"
37
38
39 /* This file contains the code required to manage the operands cache of the
40 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
41 annotation. This cache contains operands that will be of interest to
42 optimizers and other passes wishing to manipulate the IL.
43
44 The operand type are broken up into REAL and VIRTUAL operands. The real
45 operands are represented as pointers into the stmt's operand tree. Thus
46 any manipulation of the real operands will be reflected in the actual tree.
47 Virtual operands are represented solely in the cache, although the base
48 variable for the SSA_NAME may, or may not occur in the stmt's tree.
49 Manipulation of the virtual operands will not be reflected in the stmt tree.
50
51 The routines in this file are concerned with creating this operand cache
52 from a stmt tree.
53
54 The operand tree is the parsed by the various get_* routines which look
55 through the stmt tree for the occurrence of operands which may be of
56 interest, and calls are made to the append_* routines whenever one is
57 found. There are 4 of these routines, each representing one of the
58 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
59
60 The append_* routines check for duplication, and simply keep a list of
61 unique objects for each operand type in the build_* extendable vectors.
62
63 Once the stmt tree is completely parsed, the finalize_ssa_operands()
64 routine is called, which proceeds to perform the finalization routine
65 on each of the 4 operand vectors which have been built up.
66
67 If the stmt had a previous operand cache, the finalization routines
68 attempt to match up the new operands with the old ones. If it's a perfect
69 match, the old vector is simply reused. If it isn't a perfect match, then
70 a new vector is created and the new operands are placed there. For
71 virtual operands, if the previous cache had SSA_NAME version of a
72 variable, and that same variable occurs in the same operands cache, then
73 the new cache vector will also get the same SSA_NAME.
74
75 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
76 operand vector for VUSE, then the new vector will also be modified
77 such that it contains 'a_5' rather than 'a'. */
78
79
80 /* Flags to describe operand properties in helpers. */
81
82 /* By default, operands are loaded. */
83 #define opf_use 0
84
85 /* Operand is the target of an assignment expression or a
86 call-clobbered variable. */
87 #define opf_def (1 << 0)
88
89 /* No virtual operands should be created in the expression. This is used
90 when traversing ADDR_EXPR nodes which have different semantics than
91 other expressions. Inside an ADDR_EXPR node, the only operands that we
92 need to consider are indices into arrays. For instance, &a.b[i] should
93 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
94 VUSE for 'b'. */
95 #define opf_no_vops (1 << 1)
96
97 /* Operand is an implicit reference. This is used to distinguish
98 explicit assignments in the form of MODIFY_EXPR from
99 clobbering sites like function calls or ASM_EXPRs. */
100 #define opf_implicit (1 << 2)
101
102 /* Operand is in a place where address-taken does not imply addressable. */
103 #define opf_non_addressable (1 << 3)
104
105 /* Operand is in a place where opf_non_addressable does not apply. */
106 #define opf_not_non_addressable (1 << 4)
107
108 /* Array for building all the def operands. */
109 static VEC(tree,heap) *build_defs;
110
111 /* Array for building all the use operands. */
112 static VEC(tree,heap) *build_uses;
113
114 /* The built VDEF operand. */
115 static tree build_vdef;
116
117 /* The built VUSE operand. */
118 static tree build_vuse;
119
120 /* Bitmap obstack for our datastructures that needs to survive across
121 compilations of multiple functions. */
122 static bitmap_obstack operands_bitmap_obstack;
123
124 static void get_expr_operands (gimple, tree *, int);
125
126 /* Number of functions with initialized ssa_operands. */
127 static int n_initialized = 0;
128
129 /* Return the DECL_UID of the base variable of T. */
130
131 static inline unsigned
132 get_name_decl (const_tree t)
133 {
134 if (TREE_CODE (t) != SSA_NAME)
135 return DECL_UID (t);
136 else
137 return DECL_UID (SSA_NAME_VAR (t));
138 }
139
140
141 /* Return true if the SSA operands cache is active. */
142
143 bool
144 ssa_operands_active (void)
145 {
146 /* This function may be invoked from contexts where CFUN is NULL
147 (IPA passes), return false for now. FIXME: operands may be
148 active in each individual function, maybe this function should
149 take CFUN as a parameter. */
150 if (cfun == NULL)
151 return false;
152
153 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
154 }
155
156
157 /* Create the VOP variable, an artificial global variable to act as a
158 representative of all of the virtual operands FUD chain. */
159
160 static void
161 create_vop_var (struct function *fn)
162 {
163 tree global_var;
164
165 gcc_assert (fn->gimple_df->vop == NULL_TREE);
166
167 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
168 get_identifier (".MEM"),
169 void_type_node);
170 DECL_ARTIFICIAL (global_var) = 1;
171 TREE_READONLY (global_var) = 0;
172 DECL_EXTERNAL (global_var) = 1;
173 TREE_STATIC (global_var) = 1;
174 TREE_USED (global_var) = 1;
175 DECL_CONTEXT (global_var) = NULL_TREE;
176 TREE_THIS_VOLATILE (global_var) = 0;
177 TREE_ADDRESSABLE (global_var) = 0;
178 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
179
180 create_var_ann (global_var);
181 add_referenced_var_1 (global_var, fn);
182 fn->gimple_df->vop = global_var;
183 }
184
185 /* These are the sizes of the operand memory buffer in bytes which gets
186 allocated each time more operands space is required. The final value is
187 the amount that is allocated every time after that.
188 In 1k we can fit 25 use operands (or 63 def operands) on a host with
189 8 byte pointers, that would be 10 statements each with 1 def and 2
190 uses. */
191
192 #define OP_SIZE_INIT 0
193 #define OP_SIZE_1 (1024 - sizeof (void *))
194 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
195 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
196
197 /* Initialize the operand cache routines. */
198
199 void
200 init_ssa_operands (struct function *fn)
201 {
202 if (!n_initialized++)
203 {
204 build_defs = VEC_alloc (tree, heap, 5);
205 build_uses = VEC_alloc (tree, heap, 10);
206 build_vuse = NULL_TREE;
207 build_vdef = NULL_TREE;
208 bitmap_obstack_initialize (&operands_bitmap_obstack);
209 }
210
211 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
212 gimple_ssa_operands (fn)->operand_memory_index
213 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
214 gimple_ssa_operands (fn)->ops_active = true;
215 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
216 create_vop_var (fn);
217 }
218
219
220 /* Dispose of anything required by the operand routines. */
221
222 void
223 fini_ssa_operands (void)
224 {
225 struct ssa_operand_memory_d *ptr;
226
227 if (!--n_initialized)
228 {
229 VEC_free (tree, heap, build_defs);
230 VEC_free (tree, heap, build_uses);
231 build_vdef = NULL_TREE;
232 build_vuse = NULL_TREE;
233 }
234
235 gimple_ssa_operands (cfun)->free_defs = NULL;
236 gimple_ssa_operands (cfun)->free_uses = NULL;
237
238 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
239 {
240 gimple_ssa_operands (cfun)->operand_memory
241 = gimple_ssa_operands (cfun)->operand_memory->next;
242 ggc_free (ptr);
243 }
244
245 gimple_ssa_operands (cfun)->ops_active = false;
246
247 if (!n_initialized)
248 bitmap_obstack_release (&operands_bitmap_obstack);
249
250 cfun->gimple_df->vop = NULL_TREE;
251 }
252
253
254 /* Return memory for an operand of size SIZE. */
255
256 static inline void *
257 ssa_operand_alloc (unsigned size)
258 {
259 char *ptr;
260
261 gcc_assert (size == sizeof (struct use_optype_d)
262 || size == sizeof (struct def_optype_d));
263
264 if (gimple_ssa_operands (cfun)->operand_memory_index + size
265 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
266 {
267 struct ssa_operand_memory_d *ptr;
268
269 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
270 {
271 case OP_SIZE_INIT:
272 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
273 break;
274 case OP_SIZE_1:
275 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
276 break;
277 case OP_SIZE_2:
278 case OP_SIZE_3:
279 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
280 break;
281 default:
282 gcc_unreachable ();
283 }
284
285
286 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
287 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
288
289 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
290 gimple_ssa_operands (cfun)->operand_memory = ptr;
291 gimple_ssa_operands (cfun)->operand_memory_index = 0;
292 }
293
294 ptr = &(gimple_ssa_operands (cfun)->operand_memory
295 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
296 gimple_ssa_operands (cfun)->operand_memory_index += size;
297 return ptr;
298 }
299
300
301 /* Allocate a DEF operand. */
302
303 static inline struct def_optype_d *
304 alloc_def (void)
305 {
306 struct def_optype_d *ret;
307 if (gimple_ssa_operands (cfun)->free_defs)
308 {
309 ret = gimple_ssa_operands (cfun)->free_defs;
310 gimple_ssa_operands (cfun)->free_defs
311 = gimple_ssa_operands (cfun)->free_defs->next;
312 }
313 else
314 ret = (struct def_optype_d *)
315 ssa_operand_alloc (sizeof (struct def_optype_d));
316 return ret;
317 }
318
319
320 /* Allocate a USE operand. */
321
322 static inline struct use_optype_d *
323 alloc_use (void)
324 {
325 struct use_optype_d *ret;
326 if (gimple_ssa_operands (cfun)->free_uses)
327 {
328 ret = gimple_ssa_operands (cfun)->free_uses;
329 gimple_ssa_operands (cfun)->free_uses
330 = gimple_ssa_operands (cfun)->free_uses->next;
331 }
332 else
333 ret = (struct use_optype_d *)
334 ssa_operand_alloc (sizeof (struct use_optype_d));
335 return ret;
336 }
337
338
339 /* Adds OP to the list of defs after LAST. */
340
341 static inline def_optype_p
342 add_def_op (tree *op, def_optype_p last)
343 {
344 def_optype_p new_def;
345
346 new_def = alloc_def ();
347 DEF_OP_PTR (new_def) = op;
348 last->next = new_def;
349 new_def->next = NULL;
350 return new_def;
351 }
352
353
354 /* Adds OP to the list of uses of statement STMT after LAST. */
355
356 static inline use_optype_p
357 add_use_op (gimple stmt, tree *op, use_optype_p last)
358 {
359 use_optype_p new_use;
360
361 new_use = alloc_use ();
362 USE_OP_PTR (new_use)->use = op;
363 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
364 last->next = new_use;
365 new_use->next = NULL;
366 return new_use;
367 }
368
369
370
371 /* Takes elements from build_defs and turns them into def operands of STMT.
372 TODO -- Make build_defs VEC of tree *. */
373
374 static inline void
375 finalize_ssa_defs (gimple stmt)
376 {
377 unsigned new_i;
378 struct def_optype_d new_list;
379 def_optype_p old_ops, last;
380 unsigned int num = VEC_length (tree, build_defs);
381
382 /* There should only be a single real definition per assignment. */
383 gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
384
385 /* Pre-pend the vdef we may have built. */
386 if (build_vdef != NULL_TREE)
387 {
388 tree oldvdef = gimple_vdef (stmt);
389 if (oldvdef
390 && TREE_CODE (oldvdef) == SSA_NAME)
391 oldvdef = SSA_NAME_VAR (oldvdef);
392 if (oldvdef != build_vdef)
393 gimple_set_vdef (stmt, build_vdef);
394 VEC_safe_insert (tree, heap, build_defs, 0, (tree)gimple_vdef_ptr (stmt));
395 ++num;
396 }
397
398 new_list.next = NULL;
399 last = &new_list;
400
401 old_ops = gimple_def_ops (stmt);
402
403 new_i = 0;
404
405 /* Clear and unlink a no longer necessary VDEF. */
406 if (build_vdef == NULL_TREE
407 && gimple_vdef (stmt) != NULL_TREE)
408 {
409 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
410 {
411 unlink_stmt_vdef (stmt);
412 release_ssa_name (gimple_vdef (stmt));
413 }
414 gimple_set_vdef (stmt, NULL_TREE);
415 }
416
417 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
418 if (gimple_vdef (stmt)
419 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
420 mark_sym_for_renaming (gimple_vdef (stmt));
421
422 /* Check for the common case of 1 def that hasn't changed. */
423 if (old_ops && old_ops->next == NULL && num == 1
424 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
425 return;
426
427 /* If there is anything in the old list, free it. */
428 if (old_ops)
429 {
430 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
431 gimple_ssa_operands (cfun)->free_defs = old_ops;
432 }
433
434 /* If there is anything remaining in the build_defs list, simply emit it. */
435 for ( ; new_i < num; new_i++)
436 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
437
438 /* Now set the stmt's operands. */
439 gimple_set_def_ops (stmt, new_list.next);
440 }
441
442
443 /* Takes elements from build_uses and turns them into use operands of STMT.
444 TODO -- Make build_uses VEC of tree *. */
445
446 static inline void
447 finalize_ssa_uses (gimple stmt)
448 {
449 unsigned new_i;
450 struct use_optype_d new_list;
451 use_optype_p old_ops, ptr, last;
452
453 /* Pre-pend the VUSE we may have built. */
454 if (build_vuse != NULL_TREE)
455 {
456 tree oldvuse = gimple_vuse (stmt);
457 if (oldvuse
458 && TREE_CODE (oldvuse) == SSA_NAME)
459 oldvuse = SSA_NAME_VAR (oldvuse);
460 if (oldvuse != (build_vuse != NULL_TREE
461 ? build_vuse : build_vdef))
462 gimple_set_vuse (stmt, NULL_TREE);
463 VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
464 }
465
466 new_list.next = NULL;
467 last = &new_list;
468
469 old_ops = gimple_use_ops (stmt);
470
471 /* Clear a no longer necessary VUSE. */
472 if (build_vuse == NULL_TREE
473 && gimple_vuse (stmt) != NULL_TREE)
474 gimple_set_vuse (stmt, NULL_TREE);
475
476 /* If there is anything in the old list, free it. */
477 if (old_ops)
478 {
479 for (ptr = old_ops; ptr; ptr = ptr->next)
480 delink_imm_use (USE_OP_PTR (ptr));
481 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
482 gimple_ssa_operands (cfun)->free_uses = old_ops;
483 }
484
485 /* If we added a VUSE, make sure to set the operand if it is not already
486 present and mark it for renaming. */
487 if (build_vuse != NULL_TREE
488 && gimple_vuse (stmt) == NULL_TREE)
489 {
490 gimple_set_vuse (stmt, gimple_vop (cfun));
491 mark_sym_for_renaming (gimple_vop (cfun));
492 }
493
494 /* Now create nodes for all the new nodes. */
495 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
496 last = add_use_op (stmt,
497 (tree *) VEC_index (tree, build_uses, new_i),
498 last);
499
500 /* Now set the stmt's operands. */
501 gimple_set_use_ops (stmt, new_list.next);
502 }
503
504
505 /* Clear the in_list bits and empty the build array for VDEFs and
506 VUSEs. */
507
508 static inline void
509 cleanup_build_arrays (void)
510 {
511 build_vdef = NULL_TREE;
512 build_vuse = NULL_TREE;
513 VEC_truncate (tree, build_defs, 0);
514 VEC_truncate (tree, build_uses, 0);
515 }
516
517
518 /* Finalize all the build vectors, fill the new ones into INFO. */
519
520 static inline void
521 finalize_ssa_stmt_operands (gimple stmt)
522 {
523 finalize_ssa_defs (stmt);
524 finalize_ssa_uses (stmt);
525 cleanup_build_arrays ();
526 }
527
528
529 /* Start the process of building up operands vectors in INFO. */
530
531 static inline void
532 start_ssa_stmt_operands (void)
533 {
534 gcc_assert (VEC_length (tree, build_defs) == 0);
535 gcc_assert (VEC_length (tree, build_uses) == 0);
536 gcc_assert (build_vuse == NULL_TREE);
537 gcc_assert (build_vdef == NULL_TREE);
538 }
539
540
541 /* Add DEF_P to the list of pointers to operands. */
542
543 static inline void
544 append_def (tree *def_p)
545 {
546 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
547 }
548
549
550 /* Add USE_P to the list of pointers to operands. */
551
552 static inline void
553 append_use (tree *use_p)
554 {
555 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
556 }
557
558
559 /* Add VAR to the set of variables that require a VDEF operator. */
560
561 static inline void
562 append_vdef (tree var)
563 {
564 if (!optimize)
565 return;
566
567 gcc_assert ((build_vdef == NULL_TREE
568 || build_vdef == var)
569 && (build_vuse == NULL_TREE
570 || build_vuse == var));
571
572 build_vdef = var;
573 build_vuse = var;
574 }
575
576
577 /* Add VAR to the set of variables that require a VUSE operator. */
578
579 static inline void
580 append_vuse (tree var)
581 {
582 if (!optimize)
583 return;
584
585 gcc_assert (build_vuse == NULL_TREE
586 || build_vuse == var);
587
588 build_vuse = var;
589 }
590
591 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
592
593 static void
594 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
595 {
596 /* Add virtual operands to the stmt, unless the caller has specifically
597 requested not to do that (used when adding operands inside an
598 ADDR_EXPR expression). */
599 if (flags & opf_no_vops)
600 return;
601
602 gcc_assert (!is_gimple_debug (stmt));
603
604 if (flags & opf_def)
605 append_vdef (gimple_vop (cfun));
606 else
607 append_vuse (gimple_vop (cfun));
608 }
609
610
611 /* Add *VAR_P to the appropriate operand array for statement STMT.
612 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
613 it will be added to the statement's real operands, otherwise it is
614 added to virtual operands. */
615
616 static void
617 add_stmt_operand (tree *var_p, gimple stmt, int flags)
618 {
619 tree var, sym;
620
621 gcc_assert (SSA_VAR_P (*var_p));
622
623 var = *var_p;
624 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
625
626 /* Mark statements with volatile operands. */
627 if (!(flags & opf_no_vops)
628 && TREE_THIS_VOLATILE (sym))
629 gimple_set_has_volatile_ops (stmt, true);
630
631 if (is_gimple_reg (sym))
632 {
633 /* The variable is a GIMPLE register. Add it to real operands. */
634 if (flags & opf_def)
635 append_def (var_p);
636 else
637 append_use (var_p);
638 }
639 else
640 add_virtual_operand (stmt, flags);
641 }
642
643 /* Mark the base address of REF as having its address taken.
644 REF may be a single variable whose address has been taken or any
645 other valid GIMPLE memory reference (structure reference, array,
646 etc). */
647
648 static void
649 mark_address_taken (tree ref)
650 {
651 tree var;
652
653 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
654 as the only thing we take the address of. If VAR is a structure,
655 taking the address of a field means that the whole structure may
656 be referenced using pointer arithmetic. See PR 21407 and the
657 ensuing mailing list discussion. */
658 var = get_base_address (ref);
659 if (var)
660 {
661 if (DECL_P (var))
662 TREE_ADDRESSABLE (var) = 1;
663 else if (TREE_CODE (var) == MEM_REF
664 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
665 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
666 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
667 }
668 }
669
670
671 /* A subroutine of get_expr_operands to handle MEM_REF.
672
673 STMT is the statement being processed, EXPR is the MEM_REF
674 that got us here.
675
676 FLAGS is as in get_expr_operands.
677
678 RECURSE_ON_BASE should be set to true if we want to continue
679 calling get_expr_operands on the base pointer, and false if
680 something else will do it for us. */
681
682 static void
683 get_indirect_ref_operands (gimple stmt, tree expr, int flags,
684 bool recurse_on_base)
685 {
686 tree *pptr = &TREE_OPERAND (expr, 0);
687
688 if (!(flags & opf_no_vops)
689 && TREE_THIS_VOLATILE (expr))
690 gimple_set_has_volatile_ops (stmt, true);
691
692 /* Add the VOP. */
693 add_virtual_operand (stmt, flags);
694
695 /* If requested, add a USE operand for the base pointer. */
696 if (recurse_on_base)
697 get_expr_operands (stmt, pptr,
698 opf_non_addressable | opf_use
699 | (flags & (opf_no_vops|opf_not_non_addressable)));
700 }
701
702
703 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
704
705 static void
706 get_tmr_operands (gimple stmt, tree expr, int flags)
707 {
708 if (!(flags & opf_no_vops)
709 && TREE_THIS_VOLATILE (expr))
710 gimple_set_has_volatile_ops (stmt, true);
711
712 /* First record the real operands. */
713 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
714 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
715 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
716
717 add_virtual_operand (stmt, flags);
718 }
719
720
721 /* If STMT is a call that may clobber globals and other symbols that
722 escape, add them to the VDEF/VUSE lists for it. */
723
724 static void
725 maybe_add_call_vops (gimple stmt)
726 {
727 int call_flags = gimple_call_flags (stmt);
728
729 /* If aliases have been computed already, add VDEF or VUSE
730 operands for all the symbols that have been found to be
731 call-clobbered. */
732 if (!(call_flags & ECF_NOVOPS))
733 {
734 /* A 'pure' or a 'const' function never call-clobbers anything.
735 A 'noreturn' function might, but since we don't return anyway
736 there is no point in recording that. */
737 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
738 add_virtual_operand (stmt, opf_def);
739 else if (!(call_flags & ECF_CONST))
740 add_virtual_operand (stmt, opf_use);
741 }
742 }
743
744
745 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
746
747 static void
748 get_asm_expr_operands (gimple stmt)
749 {
750 size_t i, noutputs;
751 const char **oconstraints;
752 const char *constraint;
753 bool allows_mem, allows_reg, is_inout;
754
755 noutputs = gimple_asm_noutputs (stmt);
756 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
757
758 /* Gather all output operands. */
759 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
760 {
761 tree link = gimple_asm_output_op (stmt, i);
762 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
763 oconstraints[i] = constraint;
764 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
765 &allows_reg, &is_inout);
766
767 /* This should have been split in gimplify_asm_expr. */
768 gcc_assert (!allows_reg || !is_inout);
769
770 /* Memory operands are addressable. Note that STMT needs the
771 address of this operand. */
772 if (!allows_reg && allows_mem)
773 mark_address_taken (TREE_VALUE (link));
774
775 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
776 }
777
778 /* Gather all input operands. */
779 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
780 {
781 tree link = gimple_asm_input_op (stmt, i);
782 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
783 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
784 &allows_mem, &allows_reg);
785
786 /* Memory operands are addressable. Note that STMT needs the
787 address of this operand. */
788 if (!allows_reg && allows_mem)
789 mark_address_taken (TREE_VALUE (link));
790
791 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
792 }
793
794 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
795 if (gimple_asm_clobbers_memory_p (stmt))
796 add_virtual_operand (stmt, opf_def);
797 }
798
799
800 /* Recursively scan the expression pointed to by EXPR_P in statement
801 STMT. FLAGS is one of the OPF_* constants modifying how to
802 interpret the operands found. */
803
804 static void
805 get_expr_operands (gimple stmt, tree *expr_p, int flags)
806 {
807 enum tree_code code;
808 enum tree_code_class codeclass;
809 tree expr = *expr_p;
810 int uflags = opf_use;
811
812 if (expr == NULL)
813 return;
814
815 if (is_gimple_debug (stmt))
816 uflags |= (flags & opf_no_vops);
817
818 code = TREE_CODE (expr);
819 codeclass = TREE_CODE_CLASS (code);
820
821 switch (code)
822 {
823 case ADDR_EXPR:
824 /* Taking the address of a variable does not represent a
825 reference to it, but the fact that the statement takes its
826 address will be of interest to some passes (e.g. alias
827 resolution). */
828 if ((!(flags & opf_non_addressable)
829 || (flags & opf_not_non_addressable))
830 && !is_gimple_debug (stmt))
831 mark_address_taken (TREE_OPERAND (expr, 0));
832
833 /* If the address is invariant, there may be no interesting
834 variable references inside. */
835 if (is_gimple_min_invariant (expr))
836 return;
837
838 /* Otherwise, there may be variables referenced inside but there
839 should be no VUSEs created, since the referenced objects are
840 not really accessed. The only operands that we should find
841 here are ARRAY_REF indices which will always be real operands
842 (GIMPLE does not allow non-registers as array indices). */
843 flags |= opf_no_vops;
844 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
845 flags | opf_not_non_addressable);
846 return;
847
848 case SSA_NAME:
849 add_stmt_operand (expr_p, stmt, flags);
850 return;
851
852 case VAR_DECL:
853 case PARM_DECL:
854 case RESULT_DECL:
855 add_stmt_operand (expr_p, stmt, flags);
856 return;
857
858 case DEBUG_EXPR_DECL:
859 gcc_assert (gimple_debug_bind_p (stmt));
860 return;
861
862 case MEM_REF:
863 get_indirect_ref_operands (stmt, expr, flags, true);
864 return;
865
866 case TARGET_MEM_REF:
867 get_tmr_operands (stmt, expr, flags);
868 return;
869
870 case ARRAY_REF:
871 case ARRAY_RANGE_REF:
872 case COMPONENT_REF:
873 case REALPART_EXPR:
874 case IMAGPART_EXPR:
875 {
876 if (!(flags & opf_no_vops)
877 && TREE_THIS_VOLATILE (expr))
878 gimple_set_has_volatile_ops (stmt, true);
879
880 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
881
882 if (code == COMPONENT_REF)
883 {
884 if (!(flags & opf_no_vops)
885 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
886 gimple_set_has_volatile_ops (stmt, true);
887 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
888 }
889 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
890 {
891 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
892 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
893 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
894 }
895
896 return;
897 }
898
899 case WITH_SIZE_EXPR:
900 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
901 and an rvalue reference to its second argument. */
902 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
903 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
904 return;
905
906 case COND_EXPR:
907 case VEC_COND_EXPR:
908 case VEC_PERM_EXPR:
909 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
910 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
911 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
912 return;
913
914 case CONSTRUCTOR:
915 {
916 /* General aggregate CONSTRUCTORs have been decomposed, but they
917 are still in use as the COMPLEX_EXPR equivalent for vectors. */
918 constructor_elt *ce;
919 unsigned HOST_WIDE_INT idx;
920
921 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
922 the volatility to the statement, don't use TREE_CLOBBER_P for
923 mirroring the other uses of THIS_VOLATILE in this file. */
924 if (!(flags & opf_no_vops)
925 && TREE_THIS_VOLATILE (expr))
926 gimple_set_has_volatile_ops (stmt, true);
927
928 for (idx = 0;
929 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
930 idx++)
931 get_expr_operands (stmt, &ce->value, uflags);
932
933 return;
934 }
935
936 case BIT_FIELD_REF:
937 if (!(flags & opf_no_vops)
938 && TREE_THIS_VOLATILE (expr))
939 gimple_set_has_volatile_ops (stmt, true);
940 /* FALLTHRU */
941
942 case VIEW_CONVERT_EXPR:
943 do_unary:
944 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
945 return;
946
947 case COMPOUND_EXPR:
948 case OBJ_TYPE_REF:
949 case ASSERT_EXPR:
950 do_binary:
951 {
952 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
953 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
954 return;
955 }
956
957 case DOT_PROD_EXPR:
958 case REALIGN_LOAD_EXPR:
959 case WIDEN_MULT_PLUS_EXPR:
960 case WIDEN_MULT_MINUS_EXPR:
961 case FMA_EXPR:
962 {
963 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
964 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
965 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
966 return;
967 }
968
969 case FUNCTION_DECL:
970 case LABEL_DECL:
971 case CONST_DECL:
972 case CASE_LABEL_EXPR:
973 /* Expressions that make no memory references. */
974 return;
975
976 default:
977 if (codeclass == tcc_unary)
978 goto do_unary;
979 if (codeclass == tcc_binary || codeclass == tcc_comparison)
980 goto do_binary;
981 if (codeclass == tcc_constant || codeclass == tcc_type)
982 return;
983 }
984
985 /* If we get here, something has gone wrong. */
986 #ifdef ENABLE_CHECKING
987 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
988 debug_tree (expr);
989 fputs ("\n", stderr);
990 #endif
991 gcc_unreachable ();
992 }
993
994
995 /* Parse STMT looking for operands. When finished, the various
996 build_* operand vectors will have potential operands in them. */
997
998 static void
999 parse_ssa_operands (gimple stmt)
1000 {
1001 enum gimple_code code = gimple_code (stmt);
1002 size_t i, n, start = 0;
1003
1004 switch (code)
1005 {
1006 case GIMPLE_ASM:
1007 get_asm_expr_operands (stmt);
1008 break;
1009
1010 case GIMPLE_TRANSACTION:
1011 /* The start of a transaction is a memory barrier. */
1012 add_virtual_operand (stmt, opf_def | opf_use);
1013 break;
1014
1015 case GIMPLE_DEBUG:
1016 if (gimple_debug_bind_p (stmt)
1017 && gimple_debug_bind_has_value_p (stmt))
1018 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
1019 opf_use | opf_no_vops);
1020 break;
1021
1022 case GIMPLE_RETURN:
1023 append_vuse (gimple_vop (cfun));
1024 goto do_default;
1025
1026 case GIMPLE_CALL:
1027 /* Add call-clobbered operands, if needed. */
1028 maybe_add_call_vops (stmt);
1029 /* FALLTHRU */
1030
1031 case GIMPLE_ASSIGN:
1032 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
1033 start = 1;
1034 /* FALLTHRU */
1035
1036 default:
1037 do_default:
1038 n = gimple_num_ops (stmt);
1039 for (i = start; i < n; i++)
1040 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
1041 break;
1042 }
1043 }
1044
1045
1046 /* Create an operands cache for STMT. */
1047
1048 static void
1049 build_ssa_operands (gimple stmt)
1050 {
1051 /* Initially assume that the statement has no volatile operands. */
1052 gimple_set_has_volatile_ops (stmt, false);
1053
1054 start_ssa_stmt_operands ();
1055 parse_ssa_operands (stmt);
1056 finalize_ssa_stmt_operands (stmt);
1057 }
1058
1059 /* Verifies SSA statement operands. */
1060
1061 DEBUG_FUNCTION bool
1062 verify_ssa_operands (gimple stmt)
1063 {
1064 use_operand_p use_p;
1065 def_operand_p def_p;
1066 ssa_op_iter iter;
1067 unsigned i;
1068 tree use, def;
1069 bool volatile_p = gimple_has_volatile_ops (stmt);
1070
1071 /* build_ssa_operands w/o finalizing them. */
1072 gimple_set_has_volatile_ops (stmt, false);
1073 start_ssa_stmt_operands ();
1074 parse_ssa_operands (stmt);
1075
1076 /* Now verify the built operands are the same as present in STMT. */
1077 def = gimple_vdef (stmt);
1078 if (def
1079 && TREE_CODE (def) == SSA_NAME)
1080 def = SSA_NAME_VAR (def);
1081 if (build_vdef != def)
1082 {
1083 error ("virtual definition of statement not up-to-date");
1084 return true;
1085 }
1086 if (gimple_vdef (stmt)
1087 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
1088 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
1089 {
1090 error ("virtual def operand missing for stmt");
1091 return true;
1092 }
1093
1094 use = gimple_vuse (stmt);
1095 if (use
1096 && TREE_CODE (use) == SSA_NAME)
1097 use = SSA_NAME_VAR (use);
1098 if (build_vuse != use)
1099 {
1100 error ("virtual use of statement not up-to-date");
1101 return true;
1102 }
1103 if (gimple_vuse (stmt)
1104 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1105 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1106 {
1107 error ("virtual use operand missing for stmt");
1108 return true;
1109 }
1110
1111 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1112 {
1113 FOR_EACH_VEC_ELT (tree, build_uses, i, use)
1114 {
1115 if (use_p->use == (tree *)use)
1116 {
1117 VEC_replace (tree, build_uses, i, NULL_TREE);
1118 break;
1119 }
1120 }
1121 if (i == VEC_length (tree, build_uses))
1122 {
1123 error ("excess use operand for stmt");
1124 debug_generic_expr (USE_FROM_PTR (use_p));
1125 return true;
1126 }
1127 }
1128 FOR_EACH_VEC_ELT (tree, build_uses, i, use)
1129 if (use != NULL_TREE)
1130 {
1131 error ("use operand missing for stmt");
1132 debug_generic_expr (*(tree *)use);
1133 return true;
1134 }
1135
1136 FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
1137 {
1138 FOR_EACH_VEC_ELT (tree, build_defs, i, def)
1139 {
1140 if (def_p == (tree *)def)
1141 {
1142 VEC_replace (tree, build_defs, i, NULL_TREE);
1143 break;
1144 }
1145 }
1146 if (i == VEC_length (tree, build_defs))
1147 {
1148 error ("excess def operand for stmt");
1149 debug_generic_expr (DEF_FROM_PTR (def_p));
1150 return true;
1151 }
1152 }
1153 FOR_EACH_VEC_ELT (tree, build_defs, i, def)
1154 if (def != NULL_TREE)
1155 {
1156 error ("def operand missing for stmt");
1157 debug_generic_expr (*(tree *)def);
1158 return true;
1159 }
1160
1161 if (gimple_has_volatile_ops (stmt) != volatile_p)
1162 {
1163 error ("stmt volatile flag not up-to-date");
1164 return true;
1165 }
1166
1167 cleanup_build_arrays ();
1168 return false;
1169 }
1170
1171
1172 /* Releases the operands of STMT back to their freelists, and clears
1173 the stmt operand lists. */
1174
1175 void
1176 free_stmt_operands (gimple stmt)
1177 {
1178 def_optype_p defs = gimple_def_ops (stmt), last_def;
1179 use_optype_p uses = gimple_use_ops (stmt), last_use;
1180
1181 if (defs)
1182 {
1183 for (last_def = defs; last_def->next; last_def = last_def->next)
1184 continue;
1185 last_def->next = gimple_ssa_operands (cfun)->free_defs;
1186 gimple_ssa_operands (cfun)->free_defs = defs;
1187 gimple_set_def_ops (stmt, NULL);
1188 }
1189
1190 if (uses)
1191 {
1192 for (last_use = uses; last_use->next; last_use = last_use->next)
1193 delink_imm_use (USE_OP_PTR (last_use));
1194 delink_imm_use (USE_OP_PTR (last_use));
1195 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1196 gimple_ssa_operands (cfun)->free_uses = uses;
1197 gimple_set_use_ops (stmt, NULL);
1198 }
1199
1200 if (gimple_has_mem_ops (stmt))
1201 {
1202 gimple_set_vuse (stmt, NULL_TREE);
1203 gimple_set_vdef (stmt, NULL_TREE);
1204 }
1205 }
1206
1207
1208 /* Get the operands of statement STMT. */
1209
1210 void
1211 update_stmt_operands (gimple stmt)
1212 {
1213 /* If update_stmt_operands is called before SSA is initialized, do
1214 nothing. */
1215 if (!ssa_operands_active ())
1216 return;
1217
1218 timevar_push (TV_TREE_OPS);
1219
1220 /* If the stmt is a noreturn call queue it to be processed by
1221 split_bbs_on_noreturn_calls during cfg cleanup. */
1222 if (is_gimple_call (stmt)
1223 && gimple_call_noreturn_p (stmt))
1224 VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
1225
1226 gcc_assert (gimple_modified_p (stmt));
1227 build_ssa_operands (stmt);
1228 gimple_set_modified (stmt, false);
1229
1230 timevar_pop (TV_TREE_OPS);
1231 }
1232
1233
1234 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1235 to test the validity of the swap operation. */
1236
1237 void
1238 swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
1239 {
1240 tree op0, op1;
1241 op0 = *exp0;
1242 op1 = *exp1;
1243
1244 /* If the operand cache is active, attempt to preserve the relative
1245 positions of these two operands in their respective immediate use
1246 lists by adjusting their use pointer to point to the new
1247 operand position. */
1248 if (ssa_operands_active () && op0 != op1)
1249 {
1250 use_optype_p use0, use1, ptr;
1251 use0 = use1 = NULL;
1252
1253 /* Find the 2 operands in the cache, if they are there. */
1254 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1255 if (USE_OP_PTR (ptr)->use == exp0)
1256 {
1257 use0 = ptr;
1258 break;
1259 }
1260
1261 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1262 if (USE_OP_PTR (ptr)->use == exp1)
1263 {
1264 use1 = ptr;
1265 break;
1266 }
1267
1268 /* And adjust their location to point to the new position of the
1269 operand. */
1270 if (use0)
1271 USE_OP_PTR (use0)->use = exp1;
1272 if (use1)
1273 USE_OP_PTR (use1)->use = exp0;
1274 }
1275
1276 /* Now swap the data. */
1277 *exp0 = op1;
1278 *exp1 = op0;
1279 }
1280
1281
1282 /* Scan the immediate_use list for VAR making sure its linked properly.
1283 Return TRUE if there is a problem and emit an error message to F. */
1284
1285 DEBUG_FUNCTION bool
1286 verify_imm_links (FILE *f, tree var)
1287 {
1288 use_operand_p ptr, prev, list;
1289 int count;
1290
1291 gcc_assert (TREE_CODE (var) == SSA_NAME);
1292
1293 list = &(SSA_NAME_IMM_USE_NODE (var));
1294 gcc_assert (list->use == NULL);
1295
1296 if (list->prev == NULL)
1297 {
1298 gcc_assert (list->next == NULL);
1299 return false;
1300 }
1301
1302 prev = list;
1303 count = 0;
1304 for (ptr = list->next; ptr != list; )
1305 {
1306 if (prev != ptr->prev)
1307 goto error;
1308
1309 if (ptr->use == NULL)
1310 goto error; /* 2 roots, or SAFE guard node. */
1311 else if (*(ptr->use) != var)
1312 goto error;
1313
1314 prev = ptr;
1315 ptr = ptr->next;
1316
1317 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1318 problem. */
1319 if (count++ > 50000000)
1320 goto error;
1321 }
1322
1323 /* Verify list in the other direction. */
1324 prev = list;
1325 for (ptr = list->prev; ptr != list; )
1326 {
1327 if (prev != ptr->next)
1328 goto error;
1329 prev = ptr;
1330 ptr = ptr->prev;
1331 if (count-- < 0)
1332 goto error;
1333 }
1334
1335 if (count != 0)
1336 goto error;
1337
1338 return false;
1339
1340 error:
1341 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1342 {
1343 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1344 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1345 }
1346 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1347 (void *)ptr->use);
1348 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1349 fprintf(f, "\n");
1350 return true;
1351 }
1352
1353
1354 /* Dump all the immediate uses to FILE. */
1355
1356 void
1357 dump_immediate_uses_for (FILE *file, tree var)
1358 {
1359 imm_use_iterator iter;
1360 use_operand_p use_p;
1361
1362 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1363
1364 print_generic_expr (file, var, TDF_SLIM);
1365 fprintf (file, " : -->");
1366 if (has_zero_uses (var))
1367 fprintf (file, " no uses.\n");
1368 else
1369 if (has_single_use (var))
1370 fprintf (file, " single use.\n");
1371 else
1372 fprintf (file, "%d uses.\n", num_imm_uses (var));
1373
1374 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1375 {
1376 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1377 fprintf (file, "***end of stmt iterator marker***\n");
1378 else
1379 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1380 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1381 else
1382 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1383 }
1384 fprintf(file, "\n");
1385 }
1386
1387
1388 /* Dump all the immediate uses to FILE. */
1389
1390 void
1391 dump_immediate_uses (FILE *file)
1392 {
1393 tree var;
1394 unsigned int x;
1395
1396 fprintf (file, "Immediate_uses: \n\n");
1397 for (x = 1; x < num_ssa_names; x++)
1398 {
1399 var = ssa_name(x);
1400 if (!var)
1401 continue;
1402 dump_immediate_uses_for (file, var);
1403 }
1404 }
1405
1406
1407 /* Dump def-use edges on stderr. */
1408
1409 DEBUG_FUNCTION void
1410 debug_immediate_uses (void)
1411 {
1412 dump_immediate_uses (stderr);
1413 }
1414
1415
1416 /* Dump def-use edges on stderr. */
1417
1418 DEBUG_FUNCTION void
1419 debug_immediate_uses_for (tree var)
1420 {
1421 dump_immediate_uses_for (stderr, var);
1422 }
1423
1424
1425 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1426
1427 void
1428 unlink_stmt_vdef (gimple stmt)
1429 {
1430 use_operand_p use_p;
1431 imm_use_iterator iter;
1432 gimple use_stmt;
1433 tree vdef = gimple_vdef (stmt);
1434 tree vuse = gimple_vuse (stmt);
1435
1436 if (!vdef
1437 || TREE_CODE (vdef) != SSA_NAME)
1438 return;
1439
1440 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1441 {
1442 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1443 SET_USE (use_p, vuse);
1444 }
1445
1446 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1447 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1448 }
1449