tree-flow.h (tm_restart_node, gimple_df): Move to gimple-ssa.h.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "function.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-ssa.h"
29 #include "tree-inline.h"
30 #include "timevar.h"
31 #include "dumpfile.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "langhooks.h"
35 #include "diagnostic-core.h"
36
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
75 operand vector for VUSE, then the new vector will also be modified
76 such that it contains 'a_5' rather than 'a'. */
77
78
79 /* Flags to describe operand properties in helpers. */
80
81 /* By default, operands are loaded. */
82 #define opf_use 0
83
84 /* Operand is the target of an assignment expression or a
85 call-clobbered variable. */
86 #define opf_def (1 << 0)
87
88 /* No virtual operands should be created in the expression. This is used
89 when traversing ADDR_EXPR nodes which have different semantics than
90 other expressions. Inside an ADDR_EXPR node, the only operands that we
91 need to consider are indices into arrays. For instance, &a.b[i] should
92 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
93 VUSE for 'b'. */
94 #define opf_no_vops (1 << 1)
95
96 /* Operand is an implicit reference. This is used to distinguish
97 explicit assignments in the form of MODIFY_EXPR from
98 clobbering sites like function calls or ASM_EXPRs. */
99 #define opf_implicit (1 << 2)
100
101 /* Operand is in a place where address-taken does not imply addressable. */
102 #define opf_non_addressable (1 << 3)
103
104 /* Operand is in a place where opf_non_addressable does not apply. */
105 #define opf_not_non_addressable (1 << 4)
106
107 /* Array for building all the use operands. */
108 static vec<tree> build_uses;
109
110 /* The built VDEF operand. */
111 static tree build_vdef;
112
113 /* The built VUSE operand. */
114 static tree build_vuse;
115
116 /* Bitmap obstack for our datastructures that needs to survive across
117 compilations of multiple functions. */
118 static bitmap_obstack operands_bitmap_obstack;
119
120 static void get_expr_operands (gimple, tree *, int);
121
122 /* Number of functions with initialized ssa_operands. */
123 static int n_initialized = 0;
124
125 /* Accessor to tree-ssa-operands.c caches. */
126 static inline struct ssa_operands *
127 gimple_ssa_operands (const struct function *fun)
128 {
129 return &fun->gimple_df->ssa_operands;
130 }
131
132
133 /* Return true if the SSA operands cache is active. */
134
135 bool
136 ssa_operands_active (struct function *fun)
137 {
138 if (fun == NULL)
139 return false;
140
141 return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
142 }
143
144
145 /* Create the VOP variable, an artificial global variable to act as a
146 representative of all of the virtual operands FUD chain. */
147
148 static void
149 create_vop_var (struct function *fn)
150 {
151 tree global_var;
152
153 gcc_assert (fn->gimple_df->vop == NULL_TREE);
154
155 global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
156 get_identifier (".MEM"),
157 void_type_node);
158 DECL_ARTIFICIAL (global_var) = 1;
159 TREE_READONLY (global_var) = 0;
160 DECL_EXTERNAL (global_var) = 1;
161 TREE_STATIC (global_var) = 1;
162 TREE_USED (global_var) = 1;
163 DECL_CONTEXT (global_var) = NULL_TREE;
164 TREE_THIS_VOLATILE (global_var) = 0;
165 TREE_ADDRESSABLE (global_var) = 0;
166 VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
167
168 fn->gimple_df->vop = global_var;
169 }
170
171 /* These are the sizes of the operand memory buffer in bytes which gets
172 allocated each time more operands space is required. The final value is
173 the amount that is allocated every time after that.
174 In 1k we can fit 25 use operands (or 63 def operands) on a host with
175 8 byte pointers, that would be 10 statements each with 1 def and 2
176 uses. */
177
178 #define OP_SIZE_INIT 0
179 #define OP_SIZE_1 (1024 - sizeof (void *))
180 #define OP_SIZE_2 (1024 * 4 - sizeof (void *))
181 #define OP_SIZE_3 (1024 * 16 - sizeof (void *))
182
183 /* Initialize the operand cache routines. */
184
185 void
186 init_ssa_operands (struct function *fn)
187 {
188 if (!n_initialized++)
189 {
190 build_uses.create (10);
191 build_vuse = NULL_TREE;
192 build_vdef = NULL_TREE;
193 bitmap_obstack_initialize (&operands_bitmap_obstack);
194 }
195
196 gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
197 gimple_ssa_operands (fn)->operand_memory_index
198 = gimple_ssa_operands (fn)->ssa_operand_mem_size;
199 gimple_ssa_operands (fn)->ops_active = true;
200 gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
201 create_vop_var (fn);
202 }
203
204
205 /* Dispose of anything required by the operand routines. */
206
207 void
208 fini_ssa_operands (void)
209 {
210 struct ssa_operand_memory_d *ptr;
211
212 if (!--n_initialized)
213 {
214 build_uses.release ();
215 build_vdef = NULL_TREE;
216 build_vuse = NULL_TREE;
217 }
218
219 gimple_ssa_operands (cfun)->free_uses = NULL;
220
221 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
222 {
223 gimple_ssa_operands (cfun)->operand_memory
224 = gimple_ssa_operands (cfun)->operand_memory->next;
225 ggc_free (ptr);
226 }
227
228 gimple_ssa_operands (cfun)->ops_active = false;
229
230 if (!n_initialized)
231 bitmap_obstack_release (&operands_bitmap_obstack);
232
233 cfun->gimple_df->vop = NULL_TREE;
234 }
235
236
237 /* Return memory for an operand of size SIZE. */
238
239 static inline void *
240 ssa_operand_alloc (unsigned size)
241 {
242 char *ptr;
243
244 gcc_assert (size == sizeof (struct use_optype_d));
245
246 if (gimple_ssa_operands (cfun)->operand_memory_index + size
247 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
248 {
249 struct ssa_operand_memory_d *ptr;
250
251 switch (gimple_ssa_operands (cfun)->ssa_operand_mem_size)
252 {
253 case OP_SIZE_INIT:
254 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_1;
255 break;
256 case OP_SIZE_1:
257 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_2;
258 break;
259 case OP_SIZE_2:
260 case OP_SIZE_3:
261 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_3;
262 break;
263 default:
264 gcc_unreachable ();
265 }
266
267
268 ptr = ggc_alloc_ssa_operand_memory_d (sizeof (void *)
269 + gimple_ssa_operands (cfun)->ssa_operand_mem_size);
270
271 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
272 gimple_ssa_operands (cfun)->operand_memory = ptr;
273 gimple_ssa_operands (cfun)->operand_memory_index = 0;
274 }
275
276 ptr = &(gimple_ssa_operands (cfun)->operand_memory
277 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
278 gimple_ssa_operands (cfun)->operand_memory_index += size;
279 return ptr;
280 }
281
282
283 /* Allocate a USE operand. */
284
285 static inline struct use_optype_d *
286 alloc_use (void)
287 {
288 struct use_optype_d *ret;
289 if (gimple_ssa_operands (cfun)->free_uses)
290 {
291 ret = gimple_ssa_operands (cfun)->free_uses;
292 gimple_ssa_operands (cfun)->free_uses
293 = gimple_ssa_operands (cfun)->free_uses->next;
294 }
295 else
296 ret = (struct use_optype_d *)
297 ssa_operand_alloc (sizeof (struct use_optype_d));
298 return ret;
299 }
300
301
302 /* Adds OP to the list of uses of statement STMT after LAST. */
303
304 static inline use_optype_p
305 add_use_op (gimple stmt, tree *op, use_optype_p last)
306 {
307 use_optype_p new_use;
308
309 new_use = alloc_use ();
310 USE_OP_PTR (new_use)->use = op;
311 link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
312 last->next = new_use;
313 new_use->next = NULL;
314 return new_use;
315 }
316
317
318
319 /* Takes elements from build_defs and turns them into def operands of STMT.
320 TODO -- Make build_defs vec of tree *. */
321
322 static inline void
323 finalize_ssa_defs (gimple stmt)
324 {
325 /* Pre-pend the vdef we may have built. */
326 if (build_vdef != NULL_TREE)
327 {
328 tree oldvdef = gimple_vdef (stmt);
329 if (oldvdef
330 && TREE_CODE (oldvdef) == SSA_NAME)
331 oldvdef = SSA_NAME_VAR (oldvdef);
332 if (oldvdef != build_vdef)
333 gimple_set_vdef (stmt, build_vdef);
334 }
335
336 /* Clear and unlink a no longer necessary VDEF. */
337 if (build_vdef == NULL_TREE
338 && gimple_vdef (stmt) != NULL_TREE)
339 {
340 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
341 {
342 unlink_stmt_vdef (stmt);
343 release_ssa_name (gimple_vdef (stmt));
344 }
345 gimple_set_vdef (stmt, NULL_TREE);
346 }
347
348 /* If we have a non-SSA_NAME VDEF, mark it for renaming. */
349 if (gimple_vdef (stmt)
350 && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
351 {
352 cfun->gimple_df->rename_vops = 1;
353 cfun->gimple_df->ssa_renaming_needed = 1;
354 }
355 }
356
357
358 /* Takes elements from build_uses and turns them into use operands of STMT.
359 TODO -- Make build_uses vec of tree *. */
360
361 static inline void
362 finalize_ssa_uses (gimple stmt)
363 {
364 unsigned new_i;
365 struct use_optype_d new_list;
366 use_optype_p old_ops, ptr, last;
367
368 /* Pre-pend the VUSE we may have built. */
369 if (build_vuse != NULL_TREE)
370 {
371 tree oldvuse = gimple_vuse (stmt);
372 if (oldvuse
373 && TREE_CODE (oldvuse) == SSA_NAME)
374 oldvuse = SSA_NAME_VAR (oldvuse);
375 if (oldvuse != (build_vuse != NULL_TREE
376 ? build_vuse : build_vdef))
377 gimple_set_vuse (stmt, NULL_TREE);
378 build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
379 }
380
381 new_list.next = NULL;
382 last = &new_list;
383
384 old_ops = gimple_use_ops (stmt);
385
386 /* Clear a no longer necessary VUSE. */
387 if (build_vuse == NULL_TREE
388 && gimple_vuse (stmt) != NULL_TREE)
389 gimple_set_vuse (stmt, NULL_TREE);
390
391 /* If there is anything in the old list, free it. */
392 if (old_ops)
393 {
394 for (ptr = old_ops; ptr; ptr = ptr->next)
395 delink_imm_use (USE_OP_PTR (ptr));
396 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
397 gimple_ssa_operands (cfun)->free_uses = old_ops;
398 }
399
400 /* If we added a VUSE, make sure to set the operand if it is not already
401 present and mark it for renaming. */
402 if (build_vuse != NULL_TREE
403 && gimple_vuse (stmt) == NULL_TREE)
404 {
405 gimple_set_vuse (stmt, gimple_vop (cfun));
406 cfun->gimple_df->rename_vops = 1;
407 cfun->gimple_df->ssa_renaming_needed = 1;
408 }
409
410 /* Now create nodes for all the new nodes. */
411 for (new_i = 0; new_i < build_uses.length (); new_i++)
412 {
413 tree *op = (tree *) build_uses[new_i];
414 last = add_use_op (stmt, op, last);
415 }
416
417 /* Now set the stmt's operands. */
418 gimple_set_use_ops (stmt, new_list.next);
419 }
420
421
422 /* Clear the in_list bits and empty the build array for VDEFs and
423 VUSEs. */
424
425 static inline void
426 cleanup_build_arrays (void)
427 {
428 build_vdef = NULL_TREE;
429 build_vuse = NULL_TREE;
430 build_uses.truncate (0);
431 }
432
433
434 /* Finalize all the build vectors, fill the new ones into INFO. */
435
436 static inline void
437 finalize_ssa_stmt_operands (gimple stmt)
438 {
439 finalize_ssa_defs (stmt);
440 finalize_ssa_uses (stmt);
441 cleanup_build_arrays ();
442 }
443
444
445 /* Start the process of building up operands vectors in INFO. */
446
447 static inline void
448 start_ssa_stmt_operands (void)
449 {
450 gcc_assert (build_uses.length () == 0);
451 gcc_assert (build_vuse == NULL_TREE);
452 gcc_assert (build_vdef == NULL_TREE);
453 }
454
455
456 /* Add USE_P to the list of pointers to operands. */
457
458 static inline void
459 append_use (tree *use_p)
460 {
461 build_uses.safe_push ((tree) use_p);
462 }
463
464
465 /* Add VAR to the set of variables that require a VDEF operator. */
466
467 static inline void
468 append_vdef (tree var)
469 {
470 if (!optimize)
471 return;
472
473 gcc_assert ((build_vdef == NULL_TREE
474 || build_vdef == var)
475 && (build_vuse == NULL_TREE
476 || build_vuse == var));
477
478 build_vdef = var;
479 build_vuse = var;
480 }
481
482
483 /* Add VAR to the set of variables that require a VUSE operator. */
484
485 static inline void
486 append_vuse (tree var)
487 {
488 if (!optimize)
489 return;
490
491 gcc_assert (build_vuse == NULL_TREE
492 || build_vuse == var);
493
494 build_vuse = var;
495 }
496
497 /* Add virtual operands for STMT. FLAGS is as in get_expr_operands. */
498
499 static void
500 add_virtual_operand (gimple stmt ATTRIBUTE_UNUSED, int flags)
501 {
502 /* Add virtual operands to the stmt, unless the caller has specifically
503 requested not to do that (used when adding operands inside an
504 ADDR_EXPR expression). */
505 if (flags & opf_no_vops)
506 return;
507
508 gcc_assert (!is_gimple_debug (stmt));
509
510 if (flags & opf_def)
511 append_vdef (gimple_vop (cfun));
512 else
513 append_vuse (gimple_vop (cfun));
514 }
515
516
517 /* Add *VAR_P to the appropriate operand array for statement STMT.
518 FLAGS is as in get_expr_operands. If *VAR_P is a GIMPLE register,
519 it will be added to the statement's real operands, otherwise it is
520 added to virtual operands. */
521
522 static void
523 add_stmt_operand (tree *var_p, gimple stmt, int flags)
524 {
525 tree var = *var_p;
526
527 gcc_assert (SSA_VAR_P (*var_p));
528
529 if (is_gimple_reg (var))
530 {
531 /* The variable is a GIMPLE register. Add it to real operands. */
532 if (flags & opf_def)
533 ;
534 else
535 append_use (var_p);
536 if (DECL_P (*var_p))
537 cfun->gimple_df->ssa_renaming_needed = 1;
538 }
539 else
540 {
541 /* Mark statements with volatile operands. */
542 if (!(flags & opf_no_vops)
543 && TREE_THIS_VOLATILE (var))
544 gimple_set_has_volatile_ops (stmt, true);
545
546 /* The variable is a memory access. Add virtual operands. */
547 add_virtual_operand (stmt, flags);
548 }
549 }
550
551 /* Mark the base address of REF as having its address taken.
552 REF may be a single variable whose address has been taken or any
553 other valid GIMPLE memory reference (structure reference, array,
554 etc). */
555
556 static void
557 mark_address_taken (tree ref)
558 {
559 tree var;
560
561 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
562 as the only thing we take the address of. If VAR is a structure,
563 taking the address of a field means that the whole structure may
564 be referenced using pointer arithmetic. See PR 21407 and the
565 ensuing mailing list discussion. */
566 var = get_base_address (ref);
567 if (var)
568 {
569 if (DECL_P (var))
570 TREE_ADDRESSABLE (var) = 1;
571 else if (TREE_CODE (var) == MEM_REF
572 && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
573 && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
574 TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
575 }
576 }
577
578
579 /* A subroutine of get_expr_operands to handle MEM_REF.
580
581 STMT is the statement being processed, EXPR is the MEM_REF
582 that got us here.
583
584 FLAGS is as in get_expr_operands. */
585
586 static void
587 get_indirect_ref_operands (gimple stmt, tree expr, int flags)
588 {
589 tree *pptr = &TREE_OPERAND (expr, 0);
590
591 if (!(flags & opf_no_vops)
592 && TREE_THIS_VOLATILE (expr))
593 gimple_set_has_volatile_ops (stmt, true);
594
595 /* Add the VOP. */
596 add_virtual_operand (stmt, flags);
597
598 /* If requested, add a USE operand for the base pointer. */
599 get_expr_operands (stmt, pptr,
600 opf_non_addressable | opf_use
601 | (flags & (opf_no_vops|opf_not_non_addressable)));
602 }
603
604
605 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
606
607 static void
608 get_tmr_operands (gimple stmt, tree expr, int flags)
609 {
610 if (!(flags & opf_no_vops)
611 && TREE_THIS_VOLATILE (expr))
612 gimple_set_has_volatile_ops (stmt, true);
613
614 /* First record the real operands. */
615 get_expr_operands (stmt, &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
616 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
617 get_expr_operands (stmt, &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
618
619 add_virtual_operand (stmt, flags);
620 }
621
622
623 /* If STMT is a call that may clobber globals and other symbols that
624 escape, add them to the VDEF/VUSE lists for it. */
625
626 static void
627 maybe_add_call_vops (gimple stmt)
628 {
629 int call_flags = gimple_call_flags (stmt);
630
631 /* If aliases have been computed already, add VDEF or VUSE
632 operands for all the symbols that have been found to be
633 call-clobbered. */
634 if (!(call_flags & ECF_NOVOPS))
635 {
636 /* A 'pure' or a 'const' function never call-clobbers anything.
637 A 'noreturn' function might, but since we don't return anyway
638 there is no point in recording that. */
639 if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
640 add_virtual_operand (stmt, opf_def);
641 else if (!(call_flags & ECF_CONST))
642 add_virtual_operand (stmt, opf_use);
643 }
644 }
645
646
647 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
648
649 static void
650 get_asm_expr_operands (gimple stmt)
651 {
652 size_t i, noutputs;
653 const char **oconstraints;
654 const char *constraint;
655 bool allows_mem, allows_reg, is_inout;
656
657 noutputs = gimple_asm_noutputs (stmt);
658 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
659
660 /* Gather all output operands. */
661 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
662 {
663 tree link = gimple_asm_output_op (stmt, i);
664 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
665 oconstraints[i] = constraint;
666 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
667 &allows_reg, &is_inout);
668
669 /* This should have been split in gimplify_asm_expr. */
670 gcc_assert (!allows_reg || !is_inout);
671
672 /* Memory operands are addressable. Note that STMT needs the
673 address of this operand. */
674 if (!allows_reg && allows_mem)
675 mark_address_taken (TREE_VALUE (link));
676
677 get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
678 }
679
680 /* Gather all input operands. */
681 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
682 {
683 tree link = gimple_asm_input_op (stmt, i);
684 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
685 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
686 &allows_mem, &allows_reg);
687
688 /* Memory operands are addressable. Note that STMT needs the
689 address of this operand. */
690 if (!allows_reg && allows_mem)
691 mark_address_taken (TREE_VALUE (link));
692
693 get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
694 }
695
696 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
697 if (gimple_asm_clobbers_memory_p (stmt))
698 add_virtual_operand (stmt, opf_def);
699 }
700
701
702 /* Recursively scan the expression pointed to by EXPR_P in statement
703 STMT. FLAGS is one of the OPF_* constants modifying how to
704 interpret the operands found. */
705
706 static void
707 get_expr_operands (gimple stmt, tree *expr_p, int flags)
708 {
709 enum tree_code code;
710 enum tree_code_class codeclass;
711 tree expr = *expr_p;
712 int uflags = opf_use;
713
714 if (expr == NULL)
715 return;
716
717 if (is_gimple_debug (stmt))
718 uflags |= (flags & opf_no_vops);
719
720 code = TREE_CODE (expr);
721 codeclass = TREE_CODE_CLASS (code);
722
723 switch (code)
724 {
725 case ADDR_EXPR:
726 /* Taking the address of a variable does not represent a
727 reference to it, but the fact that the statement takes its
728 address will be of interest to some passes (e.g. alias
729 resolution). */
730 if ((!(flags & opf_non_addressable)
731 || (flags & opf_not_non_addressable))
732 && !is_gimple_debug (stmt))
733 mark_address_taken (TREE_OPERAND (expr, 0));
734
735 /* If the address is invariant, there may be no interesting
736 variable references inside. */
737 if (is_gimple_min_invariant (expr))
738 return;
739
740 /* Otherwise, there may be variables referenced inside but there
741 should be no VUSEs created, since the referenced objects are
742 not really accessed. The only operands that we should find
743 here are ARRAY_REF indices which will always be real operands
744 (GIMPLE does not allow non-registers as array indices). */
745 flags |= opf_no_vops;
746 get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
747 flags | opf_not_non_addressable);
748 return;
749
750 case SSA_NAME:
751 case VAR_DECL:
752 case PARM_DECL:
753 case RESULT_DECL:
754 add_stmt_operand (expr_p, stmt, flags);
755 return;
756
757 case DEBUG_EXPR_DECL:
758 gcc_assert (gimple_debug_bind_p (stmt));
759 return;
760
761 case MEM_REF:
762 get_indirect_ref_operands (stmt, expr, flags);
763 return;
764
765 case TARGET_MEM_REF:
766 get_tmr_operands (stmt, expr, flags);
767 return;
768
769 case ARRAY_REF:
770 case ARRAY_RANGE_REF:
771 case COMPONENT_REF:
772 case REALPART_EXPR:
773 case IMAGPART_EXPR:
774 {
775 if (!(flags & opf_no_vops)
776 && TREE_THIS_VOLATILE (expr))
777 gimple_set_has_volatile_ops (stmt, true);
778
779 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
780
781 if (code == COMPONENT_REF)
782 {
783 if (!(flags & opf_no_vops)
784 && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
785 gimple_set_has_volatile_ops (stmt, true);
786 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
787 }
788 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
789 {
790 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
791 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
792 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), uflags);
793 }
794
795 return;
796 }
797
798 case WITH_SIZE_EXPR:
799 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
800 and an rvalue reference to its second argument. */
801 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
802 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
803 return;
804
805 case COND_EXPR:
806 case VEC_COND_EXPR:
807 case VEC_PERM_EXPR:
808 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), uflags);
809 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), uflags);
810 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
811 return;
812
813 case CONSTRUCTOR:
814 {
815 /* General aggregate CONSTRUCTORs have been decomposed, but they
816 are still in use as the COMPLEX_EXPR equivalent for vectors. */
817 constructor_elt *ce;
818 unsigned HOST_WIDE_INT idx;
819
820 /* A volatile constructor is actually TREE_CLOBBER_P, transfer
821 the volatility to the statement, don't use TREE_CLOBBER_P for
822 mirroring the other uses of THIS_VOLATILE in this file. */
823 if (!(flags & opf_no_vops)
824 && TREE_THIS_VOLATILE (expr))
825 gimple_set_has_volatile_ops (stmt, true);
826
827 for (idx = 0;
828 vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
829 idx++)
830 get_expr_operands (stmt, &ce->value, uflags);
831
832 return;
833 }
834
835 case BIT_FIELD_REF:
836 if (!(flags & opf_no_vops)
837 && TREE_THIS_VOLATILE (expr))
838 gimple_set_has_volatile_ops (stmt, true);
839 /* FALLTHRU */
840
841 case VIEW_CONVERT_EXPR:
842 do_unary:
843 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
844 return;
845
846 case COMPOUND_EXPR:
847 case OBJ_TYPE_REF:
848 case ASSERT_EXPR:
849 do_binary:
850 {
851 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
852 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
853 return;
854 }
855
856 case DOT_PROD_EXPR:
857 case REALIGN_LOAD_EXPR:
858 case WIDEN_MULT_PLUS_EXPR:
859 case WIDEN_MULT_MINUS_EXPR:
860 case FMA_EXPR:
861 {
862 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
863 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
864 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
865 return;
866 }
867
868 case FUNCTION_DECL:
869 case LABEL_DECL:
870 case CONST_DECL:
871 case CASE_LABEL_EXPR:
872 /* Expressions that make no memory references. */
873 return;
874
875 default:
876 if (codeclass == tcc_unary)
877 goto do_unary;
878 if (codeclass == tcc_binary || codeclass == tcc_comparison)
879 goto do_binary;
880 if (codeclass == tcc_constant || codeclass == tcc_type)
881 return;
882 }
883
884 /* If we get here, something has gone wrong. */
885 #ifdef ENABLE_CHECKING
886 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
887 debug_tree (expr);
888 fputs ("\n", stderr);
889 #endif
890 gcc_unreachable ();
891 }
892
893
894 /* Parse STMT looking for operands. When finished, the various
895 build_* operand vectors will have potential operands in them. */
896
897 static void
898 parse_ssa_operands (gimple stmt)
899 {
900 enum gimple_code code = gimple_code (stmt);
901 size_t i, n, start = 0;
902
903 switch (code)
904 {
905 case GIMPLE_ASM:
906 get_asm_expr_operands (stmt);
907 break;
908
909 case GIMPLE_TRANSACTION:
910 /* The start of a transaction is a memory barrier. */
911 add_virtual_operand (stmt, opf_def | opf_use);
912 break;
913
914 case GIMPLE_DEBUG:
915 if (gimple_debug_bind_p (stmt)
916 && gimple_debug_bind_has_value_p (stmt))
917 get_expr_operands (stmt, gimple_debug_bind_get_value_ptr (stmt),
918 opf_use | opf_no_vops);
919 break;
920
921 case GIMPLE_RETURN:
922 append_vuse (gimple_vop (cfun));
923 goto do_default;
924
925 case GIMPLE_CALL:
926 /* Add call-clobbered operands, if needed. */
927 maybe_add_call_vops (stmt);
928 /* FALLTHRU */
929
930 case GIMPLE_ASSIGN:
931 get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
932 start = 1;
933 /* FALLTHRU */
934
935 default:
936 do_default:
937 n = gimple_num_ops (stmt);
938 for (i = start; i < n; i++)
939 get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
940 break;
941 }
942 }
943
944
945 /* Create an operands cache for STMT. */
946
947 static void
948 build_ssa_operands (gimple stmt)
949 {
950 /* Initially assume that the statement has no volatile operands. */
951 gimple_set_has_volatile_ops (stmt, false);
952
953 start_ssa_stmt_operands ();
954 parse_ssa_operands (stmt);
955 finalize_ssa_stmt_operands (stmt);
956 }
957
958 /* Verifies SSA statement operands. */
959
960 DEBUG_FUNCTION bool
961 verify_ssa_operands (gimple stmt)
962 {
963 use_operand_p use_p;
964 def_operand_p def_p;
965 ssa_op_iter iter;
966 unsigned i;
967 tree use, def;
968 bool volatile_p = gimple_has_volatile_ops (stmt);
969
970 /* build_ssa_operands w/o finalizing them. */
971 gimple_set_has_volatile_ops (stmt, false);
972 start_ssa_stmt_operands ();
973 parse_ssa_operands (stmt);
974
975 /* Now verify the built operands are the same as present in STMT. */
976 def = gimple_vdef (stmt);
977 if (def
978 && TREE_CODE (def) == SSA_NAME)
979 def = SSA_NAME_VAR (def);
980 if (build_vdef != def)
981 {
982 error ("virtual definition of statement not up-to-date");
983 return true;
984 }
985 if (gimple_vdef (stmt)
986 && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
987 || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
988 {
989 error ("virtual def operand missing for stmt");
990 return true;
991 }
992
993 use = gimple_vuse (stmt);
994 if (use
995 && TREE_CODE (use) == SSA_NAME)
996 use = SSA_NAME_VAR (use);
997 if (build_vuse != use)
998 {
999 error ("virtual use of statement not up-to-date");
1000 return true;
1001 }
1002 if (gimple_vuse (stmt)
1003 && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1004 || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1005 {
1006 error ("virtual use operand missing for stmt");
1007 return true;
1008 }
1009
1010 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1011 {
1012 FOR_EACH_VEC_ELT (build_uses, i, use)
1013 {
1014 if (use_p->use == (tree *)use)
1015 {
1016 build_uses[i] = NULL_TREE;
1017 break;
1018 }
1019 }
1020 if (i == build_uses.length ())
1021 {
1022 error ("excess use operand for stmt");
1023 debug_generic_expr (USE_FROM_PTR (use_p));
1024 return true;
1025 }
1026 }
1027 FOR_EACH_VEC_ELT (build_uses, i, use)
1028 if (use != NULL_TREE)
1029 {
1030 error ("use operand missing for stmt");
1031 debug_generic_expr (*(tree *)use);
1032 return true;
1033 }
1034
1035 if (gimple_has_volatile_ops (stmt) != volatile_p)
1036 {
1037 error ("stmt volatile flag not up-to-date");
1038 return true;
1039 }
1040
1041 cleanup_build_arrays ();
1042 return false;
1043 }
1044
1045
1046 /* Releases the operands of STMT back to their freelists, and clears
1047 the stmt operand lists. */
1048
1049 void
1050 free_stmt_operands (gimple stmt)
1051 {
1052 use_optype_p uses = gimple_use_ops (stmt), last_use;
1053
1054 if (uses)
1055 {
1056 for (last_use = uses; last_use->next; last_use = last_use->next)
1057 delink_imm_use (USE_OP_PTR (last_use));
1058 delink_imm_use (USE_OP_PTR (last_use));
1059 last_use->next = gimple_ssa_operands (cfun)->free_uses;
1060 gimple_ssa_operands (cfun)->free_uses = uses;
1061 gimple_set_use_ops (stmt, NULL);
1062 }
1063
1064 if (gimple_has_mem_ops (stmt))
1065 {
1066 gimple_set_vuse (stmt, NULL_TREE);
1067 gimple_set_vdef (stmt, NULL_TREE);
1068 }
1069 }
1070
1071
1072 /* Get the operands of statement STMT. */
1073
1074 void
1075 update_stmt_operands (gimple stmt)
1076 {
1077 /* If update_stmt_operands is called before SSA is initialized, do
1078 nothing. */
1079 if (!ssa_operands_active (cfun))
1080 return;
1081
1082 timevar_push (TV_TREE_OPS);
1083
1084 /* If the stmt is a noreturn call queue it to be processed by
1085 split_bbs_on_noreturn_calls during cfg cleanup. */
1086 if (is_gimple_call (stmt)
1087 && gimple_call_noreturn_p (stmt))
1088 vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt);
1089
1090 gcc_assert (gimple_modified_p (stmt));
1091 build_ssa_operands (stmt);
1092 gimple_set_modified (stmt, false);
1093
1094 timevar_pop (TV_TREE_OPS);
1095 }
1096
1097
1098 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
1099 to test the validity of the swap operation. */
1100
1101 void
1102 swap_ssa_operands (gimple stmt, tree *exp0, tree *exp1)
1103 {
1104 tree op0, op1;
1105 op0 = *exp0;
1106 op1 = *exp1;
1107
1108 gcc_checking_assert (ssa_operands_active (cfun));
1109
1110 if (op0 != op1)
1111 {
1112 /* Attempt to preserve the relative positions of these two operands in
1113 their * respective immediate use lists by adjusting their use pointer
1114 to point to the new operand position. */
1115 use_optype_p use0, use1, ptr;
1116 use0 = use1 = NULL;
1117
1118 /* Find the 2 operands in the cache, if they are there. */
1119 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1120 if (USE_OP_PTR (ptr)->use == exp0)
1121 {
1122 use0 = ptr;
1123 break;
1124 }
1125
1126 for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1127 if (USE_OP_PTR (ptr)->use == exp1)
1128 {
1129 use1 = ptr;
1130 break;
1131 }
1132
1133 /* And adjust their location to point to the new position of the
1134 operand. */
1135 if (use0)
1136 USE_OP_PTR (use0)->use = exp1;
1137 if (use1)
1138 USE_OP_PTR (use1)->use = exp0;
1139
1140 /* Now swap the data. */
1141 *exp0 = op1;
1142 *exp1 = op0;
1143 }
1144 }
1145
1146
1147 /* Scan the immediate_use list for VAR making sure its linked properly.
1148 Return TRUE if there is a problem and emit an error message to F. */
1149
1150 DEBUG_FUNCTION bool
1151 verify_imm_links (FILE *f, tree var)
1152 {
1153 use_operand_p ptr, prev, list;
1154 int count;
1155
1156 gcc_assert (TREE_CODE (var) == SSA_NAME);
1157
1158 list = &(SSA_NAME_IMM_USE_NODE (var));
1159 gcc_assert (list->use == NULL);
1160
1161 if (list->prev == NULL)
1162 {
1163 gcc_assert (list->next == NULL);
1164 return false;
1165 }
1166
1167 prev = list;
1168 count = 0;
1169 for (ptr = list->next; ptr != list; )
1170 {
1171 if (prev != ptr->prev)
1172 goto error;
1173
1174 if (ptr->use == NULL)
1175 goto error; /* 2 roots, or SAFE guard node. */
1176 else if (*(ptr->use) != var)
1177 goto error;
1178
1179 prev = ptr;
1180 ptr = ptr->next;
1181
1182 /* Avoid infinite loops. 50,000,000 uses probably indicates a
1183 problem. */
1184 if (count++ > 50000000)
1185 goto error;
1186 }
1187
1188 /* Verify list in the other direction. */
1189 prev = list;
1190 for (ptr = list->prev; ptr != list; )
1191 {
1192 if (prev != ptr->next)
1193 goto error;
1194 prev = ptr;
1195 ptr = ptr->prev;
1196 if (count-- < 0)
1197 goto error;
1198 }
1199
1200 if (count != 0)
1201 goto error;
1202
1203 return false;
1204
1205 error:
1206 if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1207 {
1208 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1209 print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1210 }
1211 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1212 (void *)ptr->use);
1213 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1214 fprintf (f, "\n");
1215 return true;
1216 }
1217
1218
1219 /* Dump all the immediate uses to FILE. */
1220
1221 void
1222 dump_immediate_uses_for (FILE *file, tree var)
1223 {
1224 imm_use_iterator iter;
1225 use_operand_p use_p;
1226
1227 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1228
1229 print_generic_expr (file, var, TDF_SLIM);
1230 fprintf (file, " : -->");
1231 if (has_zero_uses (var))
1232 fprintf (file, " no uses.\n");
1233 else
1234 if (has_single_use (var))
1235 fprintf (file, " single use.\n");
1236 else
1237 fprintf (file, "%d uses.\n", num_imm_uses (var));
1238
1239 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1240 {
1241 if (use_p->loc.stmt == NULL && use_p->use == NULL)
1242 fprintf (file, "***end of stmt iterator marker***\n");
1243 else
1244 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1245 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1246 else
1247 print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1248 }
1249 fprintf (file, "\n");
1250 }
1251
1252
1253 /* Dump all the immediate uses to FILE. */
1254
1255 void
1256 dump_immediate_uses (FILE *file)
1257 {
1258 tree var;
1259 unsigned int x;
1260
1261 fprintf (file, "Immediate_uses: \n\n");
1262 for (x = 1; x < num_ssa_names; x++)
1263 {
1264 var = ssa_name (x);
1265 if (!var)
1266 continue;
1267 dump_immediate_uses_for (file, var);
1268 }
1269 }
1270
1271
1272 /* Dump def-use edges on stderr. */
1273
1274 DEBUG_FUNCTION void
1275 debug_immediate_uses (void)
1276 {
1277 dump_immediate_uses (stderr);
1278 }
1279
1280
1281 /* Dump def-use edges on stderr. */
1282
1283 DEBUG_FUNCTION void
1284 debug_immediate_uses_for (tree var)
1285 {
1286 dump_immediate_uses_for (stderr, var);
1287 }
1288
1289
1290 /* Return true if OP, an SSA name or a DECL is a virtual operand. */
1291
1292 bool
1293 virtual_operand_p (tree op)
1294 {
1295 if (TREE_CODE (op) == SSA_NAME)
1296 {
1297 op = SSA_NAME_VAR (op);
1298 if (!op)
1299 return false;
1300 }
1301
1302 if (TREE_CODE (op) == VAR_DECL)
1303 return VAR_DECL_IS_VIRTUAL_OPERAND (op);
1304
1305 return false;
1306 }
1307
1308 /* Unlink STMTs virtual definition from the IL by propagating its use. */
1309
1310 void
1311 unlink_stmt_vdef (gimple stmt)
1312 {
1313 use_operand_p use_p;
1314 imm_use_iterator iter;
1315 gimple use_stmt;
1316 tree vdef = gimple_vdef (stmt);
1317 tree vuse = gimple_vuse (stmt);
1318
1319 if (!vdef
1320 || TREE_CODE (vdef) != SSA_NAME)
1321 return;
1322
1323 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1324 {
1325 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1326 SET_USE (use_p, vuse);
1327 }
1328
1329 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1330 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1331 }
1332
1333
1334 /* Return true if the var whose chain of uses starts at PTR has no
1335 nondebug uses. */
1336 bool
1337 has_zero_uses_1 (const ssa_use_operand_t *head)
1338 {
1339 const ssa_use_operand_t *ptr;
1340
1341 for (ptr = head->next; ptr != head; ptr = ptr->next)
1342 if (!is_gimple_debug (USE_STMT (ptr)))
1343 return false;
1344
1345 return true;
1346 }
1347
1348
1349 /* Return true if the var whose chain of uses starts at PTR has a
1350 single nondebug use. Set USE_P and STMT to that single nondebug
1351 use, if so, or to NULL otherwise. */
1352 bool
1353 single_imm_use_1 (const ssa_use_operand_t *head,
1354 use_operand_p *use_p, gimple *stmt)
1355 {
1356 ssa_use_operand_t *ptr, *single_use = 0;
1357
1358 for (ptr = head->next; ptr != head; ptr = ptr->next)
1359 if (!is_gimple_debug (USE_STMT (ptr)))
1360 {
1361 if (single_use)
1362 {
1363 single_use = NULL;
1364 break;
1365 }
1366 single_use = ptr;
1367 }
1368
1369 if (use_p)
1370 *use_p = single_use;
1371
1372 if (stmt)
1373 *stmt = single_use ? single_use->loc.stmt : NULL;
1374
1375 return single_use;
1376 }