tree-ssa-structalias.c (delete_points_to_sets): Free graph->complex.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
77
78
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82 static struct
83 {
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
106
107
108 /* Flags to describe operand properties in helpers. */
109
110 /* By default, operands are loaded. */
111 #define opf_use 0
112
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
116
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
124
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
129
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
132
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
135
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
138
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
141
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack;
145
146 /* Set for building all the loaded symbols. */
147 static bitmap build_loads;
148
149 /* Set for building all the stored symbols. */
150 static bitmap build_stores;
151
152 static void get_expr_operands (tree, tree *, int);
153
154 /* Number of functions with initialized ssa_operands. */
155 static int n_initialized = 0;
156
157 /* Statement change buffer. Data structure used to record state
158 information for statements. This is used to determine what needs
159 to be done in order to update the SSA web after a statement is
160 modified by a pass. If STMT is a statement that has just been
161 created, or needs to be folded via fold_stmt, or anything that
162 changes its physical structure then the pass should:
163
164 1- Call push_stmt_changes (&stmt) to record the current state of
165 STMT before any modifications are made.
166
167 2- Make all appropriate modifications to the statement.
168
169 3- Call pop_stmt_changes (&stmt) to find new symbols that
170 need to be put in SSA form, SSA name mappings for names that
171 have disappeared, recompute invariantness for address
172 expressions, cleanup EH information, etc.
173
174 If it is possible to determine that the statement was not modified,
175 instead of calling pop_stmt_changes it is quicker to call
176 discard_stmt_changes to avoid the expensive and unnecessary operand
177 re-scan and change comparison. */
178
179 struct scb_d
180 {
181 /* Pointer to the statement being modified. */
182 tree *stmt_p;
183
184 /* If the statement references memory these are the sets of symbols
185 loaded and stored by the statement. */
186 bitmap loads;
187 bitmap stores;
188 };
189
190 typedef struct scb_d *scb_t;
191 DEF_VEC_P(scb_t);
192 DEF_VEC_ALLOC_P(scb_t,heap);
193
194 /* Stack of statement change buffers (SCB). Every call to
195 push_stmt_changes pushes a new buffer onto the stack. Calls to
196 pop_stmt_changes pop a buffer off of the stack and compute the set
197 of changes for the popped statement. */
198 static VEC(scb_t,heap) *scb_stack;
199
200 /* Return the DECL_UID of the base variable of T. */
201
202 static inline unsigned
203 get_name_decl (tree t)
204 {
205 if (TREE_CODE (t) != SSA_NAME)
206 return DECL_UID (t);
207 else
208 return DECL_UID (SSA_NAME_VAR (t));
209 }
210
211
212 /* Comparison function for qsort used in operand_build_sort_virtual. */
213
214 static int
215 operand_build_cmp (const void *p, const void *q)
216 {
217 tree e1 = *((const tree *)p);
218 tree e2 = *((const tree *)q);
219 unsigned int u1,u2;
220
221 u1 = get_name_decl (e1);
222 u2 = get_name_decl (e2);
223
224 /* We want to sort in ascending order. They can never be equal. */
225 #ifdef ENABLE_CHECKING
226 gcc_assert (u1 != u2);
227 #endif
228 return (u1 > u2 ? 1 : -1);
229 }
230
231
232 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
233
234 static inline void
235 operand_build_sort_virtual (VEC(tree,heap) *list)
236 {
237 int num = VEC_length (tree, list);
238
239 if (num < 2)
240 return;
241
242 if (num == 2)
243 {
244 if (get_name_decl (VEC_index (tree, list, 0))
245 > get_name_decl (VEC_index (tree, list, 1)))
246 {
247 /* Swap elements if in the wrong order. */
248 tree tmp = VEC_index (tree, list, 0);
249 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
250 VEC_replace (tree, list, 1, tmp);
251 }
252 return;
253 }
254
255 /* There are 3 or more elements, call qsort. */
256 qsort (VEC_address (tree, list),
257 VEC_length (tree, list),
258 sizeof (tree),
259 operand_build_cmp);
260 }
261
262
263 /* Return true if the SSA operands cache is active. */
264
265 bool
266 ssa_operands_active (void)
267 {
268 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
269 }
270
271
272 /* VOPs are of variable sized, so the free list maps "free buckets" to the
273 following table:
274 bucket # operands
275 ------ ----------
276 0 1
277 1 2
278 ...
279 15 16
280 16 17-24
281 17 25-32
282 18 31-40
283 ...
284 29 121-128
285 Any VOPs larger than this are simply added to the largest bucket when they
286 are freed. */
287
288
289 /* Return the number of operands used in bucket BUCKET. */
290
291 static inline int
292 vop_free_bucket_size (int bucket)
293 {
294 #ifdef ENABLE_CHECKING
295 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
296 #endif
297 if (bucket < 16)
298 return bucket + 1;
299 return (bucket - 13) * 8;
300 }
301
302
303 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
304 beyond the end of the bucket table, return -1. */
305
306 static inline int
307 vop_free_bucket_index (int num)
308 {
309 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
310
311 /* Sizes 1 through 16 use buckets 0-15. */
312 if (num <= 16)
313 return num - 1;
314 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
315 num = 14 + (num - 1) / 8;
316 if (num >= NUM_VOP_FREE_BUCKETS)
317 return -1;
318 else
319 return num;
320 }
321
322
323 /* Initialize the VOP free buckets. */
324
325 static inline void
326 init_vop_buckets (void)
327 {
328 int x;
329
330 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
331 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
332 }
333
334
335 /* Add PTR to the appropriate VOP bucket. */
336
337 static inline void
338 add_vop_to_freelist (voptype_p ptr)
339 {
340 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
341
342 /* Too large, use the largest bucket so its not a complete throw away. */
343 if (bucket == -1)
344 bucket = NUM_VOP_FREE_BUCKETS - 1;
345
346 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
347 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
348 }
349
350
351 /* These are the sizes of the operand memory buffer which gets allocated each
352 time more operands space is required. The final value is the amount that is
353 allocated every time after that. */
354
355 #define OP_SIZE_INIT 0
356 #define OP_SIZE_1 30
357 #define OP_SIZE_2 110
358 #define OP_SIZE_3 511
359
360 /* Initialize the operand cache routines. */
361
362 void
363 init_ssa_operands (void)
364 {
365 if (!n_initialized++)
366 {
367 build_defs = VEC_alloc (tree, heap, 5);
368 build_uses = VEC_alloc (tree, heap, 10);
369 build_vuses = VEC_alloc (tree, heap, 25);
370 build_vdefs = VEC_alloc (tree, heap, 25);
371 bitmap_obstack_initialize (&operands_bitmap_obstack);
372 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
373 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
374 scb_stack = VEC_alloc (scb_t, heap, 20);
375 }
376
377 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
378 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
379 gimple_ssa_operands (cfun)->operand_memory_index
380 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
381 gimple_ssa_operands (cfun)->ops_active = true;
382 memset (&clobber_stats, 0, sizeof (clobber_stats));
383 init_vop_buckets ();
384 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
385 }
386
387
388 /* Dispose of anything required by the operand routines. */
389
390 void
391 fini_ssa_operands (void)
392 {
393 struct ssa_operand_memory_d *ptr;
394 unsigned ix;
395 tree mpt;
396
397 if (!--n_initialized)
398 {
399 VEC_free (tree, heap, build_defs);
400 VEC_free (tree, heap, build_uses);
401 VEC_free (tree, heap, build_vdefs);
402 VEC_free (tree, heap, build_vuses);
403 BITMAP_FREE (build_loads);
404 BITMAP_FREE (build_stores);
405
406 /* The change buffer stack had better be empty. */
407 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
408 VEC_free (scb_t, heap, scb_stack);
409 scb_stack = NULL;
410 }
411
412 gimple_ssa_operands (cfun)->free_defs = NULL;
413 gimple_ssa_operands (cfun)->free_uses = NULL;
414
415 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
416 {
417 gimple_ssa_operands (cfun)->operand_memory
418 = gimple_ssa_operands (cfun)->operand_memory->next;
419 ggc_free (ptr);
420 }
421
422 for (ix = 0;
423 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
424 ix++)
425 {
426 if (mpt)
427 BITMAP_FREE (MPT_SYMBOLS (mpt));
428 }
429
430 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
431
432 gimple_ssa_operands (cfun)->ops_active = false;
433
434 if (!n_initialized)
435 bitmap_obstack_release (&operands_bitmap_obstack);
436 if (dump_file && (dump_flags & TDF_STATS))
437 {
438 fprintf (dump_file, "Original clobbered vars: %d\n",
439 clobber_stats.clobbered_vars);
440 fprintf (dump_file, "Static write clobbers avoided: %d\n",
441 clobber_stats.static_write_clobbers_avoided);
442 fprintf (dump_file, "Static read clobbers avoided: %d\n",
443 clobber_stats.static_read_clobbers_avoided);
444 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
445 clobber_stats.unescapable_clobbers_avoided);
446 fprintf (dump_file, "Original read-only clobbers: %d\n",
447 clobber_stats.readonly_clobbers);
448 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
449 clobber_stats.static_readonly_clobbers_avoided);
450 }
451 }
452
453
454 /* Return memory for operands of SIZE chunks. */
455
456 static inline void *
457 ssa_operand_alloc (unsigned size)
458 {
459 char *ptr;
460
461 if (gimple_ssa_operands (cfun)->operand_memory_index + size
462 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
463 {
464 struct ssa_operand_memory_d *ptr;
465
466 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
467 gimple_ssa_operands (cfun)->ssa_operand_mem_size
468 = OP_SIZE_1 * sizeof (struct voptype_d);
469 else
470 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
471 == OP_SIZE_1 * sizeof (struct voptype_d))
472 gimple_ssa_operands (cfun)->ssa_operand_mem_size
473 = OP_SIZE_2 * sizeof (struct voptype_d);
474 else
475 gimple_ssa_operands (cfun)->ssa_operand_mem_size
476 = OP_SIZE_3 * sizeof (struct voptype_d);
477
478 /* Go right to the maximum size if the request is too large. */
479 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
480 gimple_ssa_operands (cfun)->ssa_operand_mem_size
481 = OP_SIZE_3 * sizeof (struct voptype_d);
482
483 /* Fail if there is not enough space. If there are this many operands
484 required, first make sure there isn't a different problem causing this
485 many operands. If the decision is that this is OK, then we can
486 specially allocate a buffer just for this request. */
487 gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
488
489 ptr = (struct ssa_operand_memory_d *)
490 ggc_alloc (sizeof (struct ssa_operand_memory_d)
491 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
492 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
493 gimple_ssa_operands (cfun)->operand_memory = ptr;
494 gimple_ssa_operands (cfun)->operand_memory_index = 0;
495 }
496 ptr = &(gimple_ssa_operands (cfun)->operand_memory
497 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
498 gimple_ssa_operands (cfun)->operand_memory_index += size;
499 return ptr;
500 }
501
502
503 /* Allocate a DEF operand. */
504
505 static inline struct def_optype_d *
506 alloc_def (void)
507 {
508 struct def_optype_d *ret;
509 if (gimple_ssa_operands (cfun)->free_defs)
510 {
511 ret = gimple_ssa_operands (cfun)->free_defs;
512 gimple_ssa_operands (cfun)->free_defs
513 = gimple_ssa_operands (cfun)->free_defs->next;
514 }
515 else
516 ret = (struct def_optype_d *)
517 ssa_operand_alloc (sizeof (struct def_optype_d));
518 return ret;
519 }
520
521
522 /* Allocate a USE operand. */
523
524 static inline struct use_optype_d *
525 alloc_use (void)
526 {
527 struct use_optype_d *ret;
528 if (gimple_ssa_operands (cfun)->free_uses)
529 {
530 ret = gimple_ssa_operands (cfun)->free_uses;
531 gimple_ssa_operands (cfun)->free_uses
532 = gimple_ssa_operands (cfun)->free_uses->next;
533 }
534 else
535 ret = (struct use_optype_d *)
536 ssa_operand_alloc (sizeof (struct use_optype_d));
537 return ret;
538 }
539
540
541 /* Allocate a vop with NUM elements. */
542
543 static inline struct voptype_d *
544 alloc_vop (int num)
545 {
546 struct voptype_d *ret = NULL;
547 int alloc_size = 0;
548
549 int bucket = vop_free_bucket_index (num);
550 if (bucket != -1)
551 {
552 /* If there is a free operand, use it. */
553 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
554 {
555 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
556 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
557 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
558 }
559 else
560 alloc_size = vop_free_bucket_size(bucket);
561 }
562 else
563 alloc_size = num;
564
565 if (alloc_size > 0)
566 ret = (struct voptype_d *)ssa_operand_alloc (
567 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
568
569 VUSE_VECT_NUM_ELEM (ret->usev) = num;
570 return ret;
571 }
572
573
574 /* This routine makes sure that PTR is in an immediate use list, and makes
575 sure the stmt pointer is set to the current stmt. */
576
577 static inline void
578 set_virtual_use_link (use_operand_p ptr, tree stmt)
579 {
580 /* fold_stmt may have changed the stmt pointers. */
581 if (ptr->stmt != stmt)
582 ptr->stmt = stmt;
583
584 /* If this use isn't in a list, add it to the correct list. */
585 if (!ptr->prev)
586 link_imm_use (ptr, *(ptr->use));
587 }
588
589
590 /* Adds OP to the list of defs after LAST. */
591
592 static inline def_optype_p
593 add_def_op (tree *op, def_optype_p last)
594 {
595 def_optype_p new;
596
597 new = alloc_def ();
598 DEF_OP_PTR (new) = op;
599 last->next = new;
600 new->next = NULL;
601 return new;
602 }
603
604
605 /* Adds OP to the list of uses of statement STMT after LAST. */
606
607 static inline use_optype_p
608 add_use_op (tree stmt, tree *op, use_optype_p last)
609 {
610 use_optype_p new;
611
612 new = alloc_use ();
613 USE_OP_PTR (new)->use = op;
614 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
615 last->next = new;
616 new->next = NULL;
617 return new;
618 }
619
620
621 /* Return a virtual op pointer with NUM elements which are all initialized to OP
622 and are linked into the immediate uses for STMT. The new vop is appended
623 after PREV. */
624
625 static inline voptype_p
626 add_vop (tree stmt, tree op, int num, voptype_p prev)
627 {
628 voptype_p new;
629 int x;
630
631 new = alloc_vop (num);
632 for (x = 0; x < num; x++)
633 {
634 VUSE_OP_PTR (new, x)->prev = NULL;
635 SET_VUSE_OP (new, x, op);
636 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
637 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
638 }
639
640 if (prev)
641 prev->next = new;
642 new->next = NULL;
643 return new;
644 }
645
646
647 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
648 LAST to the new element. */
649
650 static inline voptype_p
651 add_vuse_op (tree stmt, tree op, int num, voptype_p last)
652 {
653 voptype_p new = add_vop (stmt, op, num, last);
654 VDEF_RESULT (new) = NULL_TREE;
655 return new;
656 }
657
658
659 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
660 LAST to the new element. */
661
662 static inline voptype_p
663 add_vdef_op (tree stmt, tree op, int num, voptype_p last)
664 {
665 voptype_p new = add_vop (stmt, op, num, last);
666 VDEF_RESULT (new) = op;
667 return new;
668 }
669
670
671 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
672 is the head of the operand list it belongs to. */
673
674 static inline struct voptype_d *
675 realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
676 struct voptype_d **root)
677 {
678 unsigned int x, lim;
679 tree stmt, val;
680 struct voptype_d *ret, *tmp;
681
682 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
683 return ptr;
684
685 val = VUSE_OP (ptr, 0);
686 if (TREE_CODE (val) == SSA_NAME)
687 val = SSA_NAME_VAR (val);
688
689 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
690
691 /* Delink all the existing uses. */
692 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
693 {
694 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
695 delink_imm_use (use_p);
696 }
697
698 /* If we want less space, simply use this one, and shrink the size. */
699 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
700 {
701 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
702 return ptr;
703 }
704
705 /* It is growing. Allocate a new one and replace the old one. */
706 ret = add_vuse_op (stmt, val, num_elem, ptr);
707
708 /* Clear PTR and add its memory to the free list. */
709 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
710 memset (ptr, 0,
711 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
712 add_vop_to_freelist (ptr);
713
714 /* Now simply remove the old one. */
715 if (*root == ptr)
716 {
717 *root = ret;
718 return ret;
719 }
720 else
721 for (tmp = *root;
722 tmp != NULL && tmp->next != ptr;
723 tmp = tmp->next)
724 {
725 tmp->next = ret;
726 return ret;
727 }
728
729 /* The pointer passed in isn't in STMT's VDEF lists. */
730 gcc_unreachable ();
731 }
732
733
734 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
735
736 struct voptype_d *
737 realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
738 {
739 tree val, stmt;
740 struct voptype_d *ret;
741
742 val = VDEF_RESULT (ptr);
743 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
744 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
745 VDEF_RESULT (ret) = val;
746 return ret;
747 }
748
749
750 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
751
752 struct voptype_d *
753 realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
754 {
755 tree stmt;
756 struct voptype_d *ret;
757
758 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
759 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
760 return ret;
761 }
762
763
764 /* Takes elements from build_defs and turns them into def operands of STMT.
765 TODO -- Make build_defs VEC of tree *. */
766
767 static inline void
768 finalize_ssa_defs (tree stmt)
769 {
770 unsigned new_i;
771 struct def_optype_d new_list;
772 def_optype_p old_ops, last;
773 unsigned int num = VEC_length (tree, build_defs);
774
775 /* There should only be a single real definition per assignment. */
776 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
777
778 new_list.next = NULL;
779 last = &new_list;
780
781 old_ops = DEF_OPS (stmt);
782
783 new_i = 0;
784
785 /* Check for the common case of 1 def that hasn't changed. */
786 if (old_ops && old_ops->next == NULL && num == 1
787 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
788 return;
789
790 /* If there is anything in the old list, free it. */
791 if (old_ops)
792 {
793 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
794 gimple_ssa_operands (cfun)->free_defs = old_ops;
795 }
796
797 /* If there is anything remaining in the build_defs list, simply emit it. */
798 for ( ; new_i < num; new_i++)
799 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
800
801 /* Now set the stmt's operands. */
802 DEF_OPS (stmt) = new_list.next;
803
804 #ifdef ENABLE_CHECKING
805 {
806 def_optype_p ptr;
807 unsigned x = 0;
808 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
809 x++;
810
811 gcc_assert (x == num);
812 }
813 #endif
814 }
815
816
817 /* Takes elements from build_uses and turns them into use operands of STMT.
818 TODO -- Make build_uses VEC of tree *. */
819
820 static inline void
821 finalize_ssa_uses (tree stmt)
822 {
823 unsigned new_i;
824 struct use_optype_d new_list;
825 use_optype_p old_ops, ptr, last;
826
827 #ifdef ENABLE_CHECKING
828 {
829 unsigned x;
830 unsigned num = VEC_length (tree, build_uses);
831
832 /* If the pointer to the operand is the statement itself, something is
833 wrong. It means that we are pointing to a local variable (the
834 initial call to update_stmt_operands does not pass a pointer to a
835 statement). */
836 for (x = 0; x < num; x++)
837 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
838 }
839 #endif
840
841 new_list.next = NULL;
842 last = &new_list;
843
844 old_ops = USE_OPS (stmt);
845
846 /* If there is anything in the old list, free it. */
847 if (old_ops)
848 {
849 for (ptr = old_ops; ptr; ptr = ptr->next)
850 delink_imm_use (USE_OP_PTR (ptr));
851 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
852 gimple_ssa_operands (cfun)->free_uses = old_ops;
853 }
854
855 /* Now create nodes for all the new nodes. */
856 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
857 last = add_use_op (stmt,
858 (tree *) VEC_index (tree, build_uses, new_i),
859 last);
860
861 /* Now set the stmt's operands. */
862 USE_OPS (stmt) = new_list.next;
863
864 #ifdef ENABLE_CHECKING
865 {
866 unsigned x = 0;
867 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
868 x++;
869
870 gcc_assert (x == VEC_length (tree, build_uses));
871 }
872 #endif
873 }
874
875
876 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
877 STMT. FIXME, for now VDEF operators should have a single operand
878 in their RHS. */
879
880 static inline void
881 finalize_ssa_vdefs (tree stmt)
882 {
883 unsigned new_i;
884 struct voptype_d new_list;
885 voptype_p old_ops, ptr, last;
886 stmt_ann_t ann = stmt_ann (stmt);
887
888 /* Set the symbols referenced by STMT. */
889 if (!bitmap_empty_p (build_stores))
890 {
891 if (ann->operands.stores == NULL)
892 ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
893
894 bitmap_copy (ann->operands.stores, build_stores);
895 }
896 else
897 BITMAP_FREE (ann->operands.stores);
898
899 /* If aliases have not been computed, do not instantiate a virtual
900 operator on STMT. Initially, we only compute the SSA form on
901 GIMPLE registers. The virtual SSA form is only computed after
902 alias analysis, so virtual operators will remain unrenamed and
903 the verifier will complain. However, alias analysis needs to
904 access symbol load/store information, so we need to compute
905 those. */
906 if (!gimple_aliases_computed_p (cfun))
907 return;
908
909 new_list.next = NULL;
910 last = &new_list;
911
912 old_ops = VDEF_OPS (stmt);
913 new_i = 0;
914 while (old_ops && new_i < VEC_length (tree, build_vdefs))
915 {
916 tree op = VEC_index (tree, build_vdefs, new_i);
917 unsigned new_uid = get_name_decl (op);
918 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
919
920 /* FIXME, for now each VDEF operator should have at most one
921 operand in their RHS. */
922 gcc_assert (VDEF_NUM (old_ops) == 1);
923
924 if (old_uid == new_uid)
925 {
926 /* If the symbols are the same, reuse the existing operand. */
927 last->next = old_ops;
928 last = old_ops;
929 old_ops = old_ops->next;
930 last->next = NULL;
931 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
932 new_i++;
933 }
934 else if (old_uid < new_uid)
935 {
936 /* If old is less than new, old goes to the free list. */
937 voptype_p next;
938 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
939 next = old_ops->next;
940 add_vop_to_freelist (old_ops);
941 old_ops = next;
942 }
943 else
944 {
945 /* This is a new operand. */
946 last = add_vdef_op (stmt, op, 1, last);
947 new_i++;
948 }
949 }
950
951 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
952 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
953 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
954
955 /* If there is anything in the old list, free it. */
956 if (old_ops)
957 {
958 for (ptr = old_ops; ptr; ptr = last)
959 {
960 last = ptr->next;
961 delink_imm_use (VDEF_OP_PTR (ptr, 0));
962 add_vop_to_freelist (ptr);
963 }
964 }
965
966 /* Now set STMT's operands. */
967 VDEF_OPS (stmt) = new_list.next;
968
969 #ifdef ENABLE_CHECKING
970 {
971 unsigned x = 0;
972 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
973 x++;
974
975 gcc_assert (x == VEC_length (tree, build_vdefs));
976 }
977 #endif
978 }
979
980
981 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
982 STMT. */
983
984 static inline void
985 finalize_ssa_vuse_ops (tree stmt)
986 {
987 unsigned new_i, old_i;
988 voptype_p old_ops, last;
989 VEC(tree,heap) *new_ops;
990 stmt_ann_t ann;
991
992 /* Set the symbols referenced by STMT. */
993 ann = stmt_ann (stmt);
994 if (!bitmap_empty_p (build_loads))
995 {
996 if (ann->operands.loads == NULL)
997 ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
998
999 bitmap_copy (ann->operands.loads, build_loads);
1000 }
1001 else
1002 BITMAP_FREE (ann->operands.loads);
1003
1004 /* If aliases have not been computed, do not instantiate a virtual
1005 operator on STMT. Initially, we only compute the SSA form on
1006 GIMPLE registers. The virtual SSA form is only computed after
1007 alias analysis, so virtual operators will remain unrenamed and
1008 the verifier will complain. However, alias analysis needs to
1009 access symbol load/store information, so we need to compute
1010 those. */
1011 if (!gimple_aliases_computed_p (cfun))
1012 return;
1013
1014 /* STMT should have at most one VUSE operator. */
1015 old_ops = VUSE_OPS (stmt);
1016 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1017
1018 new_ops = NULL;
1019 new_i = old_i = 0;
1020 while (old_ops
1021 && old_i < VUSE_NUM (old_ops)
1022 && new_i < VEC_length (tree, build_vuses))
1023 {
1024 tree new_op = VEC_index (tree, build_vuses, new_i);
1025 tree old_op = VUSE_OP (old_ops, old_i);
1026 unsigned new_uid = get_name_decl (new_op);
1027 unsigned old_uid = get_name_decl (old_op);
1028
1029 if (old_uid == new_uid)
1030 {
1031 /* If the symbols are the same, reuse the existing operand. */
1032 VEC_safe_push (tree, heap, new_ops, old_op);
1033 new_i++;
1034 old_i++;
1035 }
1036 else if (old_uid < new_uid)
1037 {
1038 /* If OLD_UID is less than NEW_UID, the old operand has
1039 disappeared, skip to the next old operand. */
1040 old_i++;
1041 }
1042 else
1043 {
1044 /* This is a new operand. */
1045 VEC_safe_push (tree, heap, new_ops, new_op);
1046 new_i++;
1047 }
1048 }
1049
1050 /* If there is anything remaining in the build_vuses list, simply emit it. */
1051 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1052 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1053
1054 /* If there is anything in the old list, free it. */
1055 if (old_ops)
1056 {
1057 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1058 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1059 add_vop_to_freelist (old_ops);
1060 VUSE_OPS (stmt) = NULL;
1061 }
1062
1063 /* If there are any operands, instantiate a VUSE operator for STMT. */
1064 if (new_ops)
1065 {
1066 tree op;
1067 unsigned i;
1068
1069 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
1070
1071 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1072 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1073
1074 VUSE_OPS (stmt) = last;
1075 VEC_free (tree, heap, new_ops);
1076 }
1077
1078 #ifdef ENABLE_CHECKING
1079 {
1080 unsigned x;
1081
1082 if (VUSE_OPS (stmt))
1083 {
1084 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1085 x = VUSE_NUM (VUSE_OPS (stmt));
1086 }
1087 else
1088 x = 0;
1089
1090 gcc_assert (x == VEC_length (tree, build_vuses));
1091 }
1092 #endif
1093 }
1094
1095 /* Return a new VUSE operand vector for STMT. */
1096
1097 static void
1098 finalize_ssa_vuses (tree stmt)
1099 {
1100 unsigned num, num_vdefs;
1101 unsigned vuse_index;
1102
1103 /* Remove superfluous VUSE operands. If the statement already has a
1104 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1105 needed because VDEFs imply a VUSE of the variable. For instance,
1106 suppose that variable 'a' is pointed-to by p and q:
1107
1108 # VUSE <a_2>
1109 # a_3 = VDEF <a_2>
1110 *p = *q;
1111
1112 The VUSE <a_2> is superfluous because it is implied by the
1113 VDEF operator. */
1114 num = VEC_length (tree, build_vuses);
1115 num_vdefs = VEC_length (tree, build_vdefs);
1116
1117 if (num > 0 && num_vdefs > 0)
1118 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1119 {
1120 tree vuse;
1121 vuse = VEC_index (tree, build_vuses, vuse_index);
1122 if (TREE_CODE (vuse) != SSA_NAME)
1123 {
1124 var_ann_t ann = var_ann (vuse);
1125 ann->in_vuse_list = 0;
1126 if (ann->in_vdef_list)
1127 {
1128 VEC_ordered_remove (tree, build_vuses, vuse_index);
1129 continue;
1130 }
1131 }
1132 vuse_index++;
1133 }
1134
1135 finalize_ssa_vuse_ops (stmt);
1136 }
1137
1138
1139 /* Clear the in_list bits and empty the build array for VDEFs and
1140 VUSEs. */
1141
1142 static inline void
1143 cleanup_build_arrays (void)
1144 {
1145 unsigned i;
1146 tree t;
1147
1148 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1149 if (TREE_CODE (t) != SSA_NAME)
1150 var_ann (t)->in_vdef_list = false;
1151
1152 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1153 if (TREE_CODE (t) != SSA_NAME)
1154 var_ann (t)->in_vuse_list = false;
1155
1156 VEC_truncate (tree, build_vdefs, 0);
1157 VEC_truncate (tree, build_vuses, 0);
1158 VEC_truncate (tree, build_defs, 0);
1159 VEC_truncate (tree, build_uses, 0);
1160 bitmap_clear (build_loads);
1161 bitmap_clear (build_stores);
1162 }
1163
1164
1165 /* Finalize all the build vectors, fill the new ones into INFO. */
1166
1167 static inline void
1168 finalize_ssa_stmt_operands (tree stmt)
1169 {
1170 finalize_ssa_defs (stmt);
1171 finalize_ssa_uses (stmt);
1172 finalize_ssa_vdefs (stmt);
1173 finalize_ssa_vuses (stmt);
1174 cleanup_build_arrays ();
1175 }
1176
1177
1178 /* Start the process of building up operands vectors in INFO. */
1179
1180 static inline void
1181 start_ssa_stmt_operands (void)
1182 {
1183 gcc_assert (VEC_length (tree, build_defs) == 0);
1184 gcc_assert (VEC_length (tree, build_uses) == 0);
1185 gcc_assert (VEC_length (tree, build_vuses) == 0);
1186 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1187 gcc_assert (bitmap_empty_p (build_loads));
1188 gcc_assert (bitmap_empty_p (build_stores));
1189 }
1190
1191
1192 /* Add DEF_P to the list of pointers to operands. */
1193
1194 static inline void
1195 append_def (tree *def_p)
1196 {
1197 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1198 }
1199
1200
1201 /* Add USE_P to the list of pointers to operands. */
1202
1203 static inline void
1204 append_use (tree *use_p)
1205 {
1206 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1207 }
1208
1209
1210 /* Add VAR to the set of variables that require a VDEF operator. */
1211
1212 static inline void
1213 append_vdef (tree var)
1214 {
1215 tree sym;
1216
1217 if (TREE_CODE (var) != SSA_NAME)
1218 {
1219 tree mpt;
1220 var_ann_t ann;
1221
1222 /* If VAR belongs to a memory partition, use it instead of VAR. */
1223 mpt = memory_partition (var);
1224 if (mpt)
1225 var = mpt;
1226
1227 /* Don't allow duplicate entries. */
1228 ann = get_var_ann (var);
1229 if (ann->in_vdef_list)
1230 return;
1231
1232 ann->in_vdef_list = true;
1233 sym = var;
1234 }
1235 else
1236 sym = SSA_NAME_VAR (var);
1237
1238 VEC_safe_push (tree, heap, build_vdefs, var);
1239 bitmap_set_bit (build_stores, DECL_UID (sym));
1240 }
1241
1242
1243 /* Add VAR to the set of variables that require a VUSE operator. */
1244
1245 static inline void
1246 append_vuse (tree var)
1247 {
1248 tree sym;
1249
1250 if (TREE_CODE (var) != SSA_NAME)
1251 {
1252 tree mpt;
1253 var_ann_t ann;
1254
1255 /* If VAR belongs to a memory partition, use it instead of VAR. */
1256 mpt = memory_partition (var);
1257 if (mpt)
1258 var = mpt;
1259
1260 /* Don't allow duplicate entries. */
1261 ann = get_var_ann (var);
1262 if (ann->in_vuse_list || ann->in_vdef_list)
1263 return;
1264
1265 ann->in_vuse_list = true;
1266 sym = var;
1267 }
1268 else
1269 sym = SSA_NAME_VAR (var);
1270
1271 VEC_safe_push (tree, heap, build_vuses, var);
1272 bitmap_set_bit (build_loads, DECL_UID (sym));
1273 }
1274
1275
1276 /* REF is a tree that contains the entire pointer dereference
1277 expression, if available, or NULL otherwise. ALIAS is the variable
1278 we are asking if REF can access. OFFSET and SIZE come from the
1279 memory access expression that generated this virtual operand. */
1280
1281 static bool
1282 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1283 HOST_WIDE_INT size)
1284 {
1285 bool offsetgtz = offset > 0;
1286 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1287 tree base = ref ? get_base_address (ref) : NULL;
1288
1289 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1290 using a call-clobbered memory tag. By definition, call-clobbered
1291 memory tags can always touch .GLOBAL_VAR. */
1292 if (alias == gimple_global_var (cfun))
1293 return true;
1294
1295 /* If ALIAS is an SFT, it can't be touched if the offset
1296 and size of the access is not overlapping with the SFT offset and
1297 size. This is only true if we are accessing through a pointer
1298 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1299 be accessing through a pointer to some substruct of the
1300 structure, and if we try to prune there, we will have the wrong
1301 offset, and get the wrong answer.
1302 i.e., we can't prune without more work if we have something like
1303
1304 struct gcc_target
1305 {
1306 struct asm_out
1307 {
1308 const char *byte_op;
1309 struct asm_int_op
1310 {
1311 const char *hi;
1312 } aligned_op;
1313 } asm_out;
1314 } targetm;
1315
1316 foo = &targetm.asm_out.aligned_op;
1317 return foo->hi;
1318
1319 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1320 terms of SFT_PARENT_VAR, that is where it is.
1321 However, the access through the foo pointer will be at offset 0. */
1322 if (size != -1
1323 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1324 && base
1325 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1326 && !overlap_subvar (offset, size, alias, NULL))
1327 {
1328 #ifdef ACCESS_DEBUGGING
1329 fprintf (stderr, "Access to ");
1330 print_generic_expr (stderr, ref, 0);
1331 fprintf (stderr, " may not touch ");
1332 print_generic_expr (stderr, alias, 0);
1333 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1334 #endif
1335 return false;
1336 }
1337
1338 /* Without strict aliasing, it is impossible for a component access
1339 through a pointer to touch a random variable, unless that
1340 variable *is* a structure or a pointer.
1341
1342 That is, given p->c, and some random global variable b,
1343 there is no legal way that p->c could be an access to b.
1344
1345 Without strict aliasing on, we consider it legal to do something
1346 like:
1347
1348 struct foos { int l; };
1349 int foo;
1350 static struct foos *getfoo(void);
1351 int main (void)
1352 {
1353 struct foos *f = getfoo();
1354 f->l = 1;
1355 foo = 2;
1356 if (f->l == 1)
1357 abort();
1358 exit(0);
1359 }
1360 static struct foos *getfoo(void)
1361 { return (struct foos *)&foo; }
1362
1363 (taken from 20000623-1.c)
1364
1365 The docs also say/imply that access through union pointers
1366 is legal (but *not* if you take the address of the union member,
1367 i.e. the inverse), such that you can do
1368
1369 typedef union {
1370 int d;
1371 } U;
1372
1373 int rv;
1374 void breakme()
1375 {
1376 U *rv0;
1377 U *pretmp = (U*)&rv;
1378 rv0 = pretmp;
1379 rv0->d = 42;
1380 }
1381 To implement this, we just punt on accesses through union
1382 pointers entirely.
1383 */
1384 else if (ref
1385 && flag_strict_aliasing
1386 && TREE_CODE (ref) != INDIRECT_REF
1387 && !MTAG_P (alias)
1388 && (TREE_CODE (base) != INDIRECT_REF
1389 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1390 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1391 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1392 && !var_ann (alias)->is_heapvar
1393 /* When the struct has may_alias attached to it, we need not to
1394 return true. */
1395 && get_alias_set (base))
1396 {
1397 #ifdef ACCESS_DEBUGGING
1398 fprintf (stderr, "Access to ");
1399 print_generic_expr (stderr, ref, 0);
1400 fprintf (stderr, " may not touch ");
1401 print_generic_expr (stderr, alias, 0);
1402 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1403 #endif
1404 return false;
1405 }
1406
1407 /* If the offset of the access is greater than the size of one of
1408 the possible aliases, it can't be touching that alias, because it
1409 would be past the end of the structure. */
1410 else if (ref
1411 && flag_strict_aliasing
1412 && TREE_CODE (ref) != INDIRECT_REF
1413 && !MTAG_P (alias)
1414 && !POINTER_TYPE_P (TREE_TYPE (alias))
1415 && offsetgtz
1416 && DECL_SIZE (alias)
1417 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1418 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1419 {
1420 #ifdef ACCESS_DEBUGGING
1421 fprintf (stderr, "Access to ");
1422 print_generic_expr (stderr, ref, 0);
1423 fprintf (stderr, " may not touch ");
1424 print_generic_expr (stderr, alias, 0);
1425 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1426 #endif
1427 return false;
1428 }
1429
1430 return true;
1431 }
1432
1433
1434 /* Add VAR to the virtual operands array. FLAGS is as in
1435 get_expr_operands. FULL_REF is a tree that contains the entire
1436 pointer dereference expression, if available, or NULL otherwise.
1437 OFFSET and SIZE come from the memory access expression that
1438 generated this virtual operand. IS_CALL_SITE is true if the
1439 affected statement is a call site. */
1440
1441 static void
1442 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1443 tree full_ref, HOST_WIDE_INT offset,
1444 HOST_WIDE_INT size, bool is_call_site)
1445 {
1446 bitmap aliases = NULL;
1447 tree sym;
1448 var_ann_t v_ann;
1449
1450 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1451 v_ann = var_ann (sym);
1452
1453 /* Mark the statement as having memory operands. */
1454 s_ann->references_memory = true;
1455
1456 /* Mark statements with volatile operands. Optimizers should back
1457 off from statements having volatile operands. */
1458 if (TREE_THIS_VOLATILE (sym) && s_ann)
1459 s_ann->has_volatile_ops = true;
1460
1461 /* If the variable cannot be modified and this is a VDEF change
1462 it into a VUSE. This happens when read-only variables are marked
1463 call-clobbered and/or aliased to writable variables. So we only
1464 check that this only happens on non-specific stores.
1465
1466 Note that if this is a specific store, i.e. associated with a
1467 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1468 into validation problems.
1469
1470 This can happen when programs cast away const, leaving us with a
1471 store to read-only memory. If the statement is actually executed
1472 at runtime, then the program is ill formed. If the statement is
1473 not executed then all is well. At the very least, we cannot ICE. */
1474 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1475 flags &= ~opf_def;
1476
1477 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1478 virtual operands, unless the caller has specifically requested
1479 not to add virtual operands (used when adding operands inside an
1480 ADDR_EXPR expression). */
1481 if (flags & opf_no_vops)
1482 return;
1483
1484 if (MTAG_P (var))
1485 aliases = MTAG_ALIASES (var);
1486
1487 if (aliases == NULL)
1488 {
1489 if (s_ann && !gimple_aliases_computed_p (cfun))
1490 s_ann->has_volatile_ops = true;
1491
1492 /* The variable is not aliased or it is an alias tag. */
1493 if (flags & opf_def)
1494 append_vdef (var);
1495 else
1496 append_vuse (var);
1497 }
1498 else
1499 {
1500 bitmap_iterator bi;
1501 unsigned int i;
1502 tree al;
1503
1504 /* The variable is aliased. Add its aliases to the virtual
1505 operands. */
1506 gcc_assert (!bitmap_empty_p (aliases));
1507
1508 if (flags & opf_def)
1509 {
1510 bool none_added = true;
1511 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
1512 {
1513 al = referenced_var (i);
1514 if (!access_can_touch_variable (full_ref, al, offset, size))
1515 continue;
1516
1517 /* Call-clobbered tags may have non-call-clobbered
1518 symbols in their alias sets. Ignore them if we are
1519 adding VOPs for a call site. */
1520 if (is_call_site && !is_call_clobbered (al))
1521 continue;
1522
1523 none_added = false;
1524 append_vdef (al);
1525 }
1526
1527 /* If the variable is also an alias tag, add a virtual
1528 operand for it, otherwise we will miss representing
1529 references to the members of the variable's alias set.
1530 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1531
1532 It is also necessary to add bare defs on clobbers for
1533 SMT's, so that bare SMT uses caused by pruning all the
1534 aliases will link up properly with calls. In order to
1535 keep the number of these bare defs we add down to the
1536 minimum necessary, we keep track of which SMT's were used
1537 alone in statement vdefs or VUSEs. */
1538 if (none_added
1539 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1540 && is_call_site))
1541 {
1542 append_vdef (var);
1543 }
1544 }
1545 else
1546 {
1547 bool none_added = true;
1548 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
1549 {
1550 al = referenced_var (i);
1551 if (!access_can_touch_variable (full_ref, al, offset, size))
1552 continue;
1553
1554 /* Call-clobbered tags may have non-call-clobbered
1555 symbols in their alias sets. Ignore them if we are
1556 adding VOPs for a call site. */
1557 if (is_call_site && !is_call_clobbered (al))
1558 continue;
1559
1560 none_added = false;
1561 append_vuse (al);
1562 }
1563
1564 /* Even if no aliases have been added, we still need to
1565 establish def-use and use-def chains, lest
1566 transformations think that this is not a memory
1567 reference. For an example of this scenario, see
1568 testsuite/g++.dg/opt/cleanup1.C. */
1569 if (none_added)
1570 append_vuse (var);
1571 }
1572 }
1573 }
1574
1575
1576 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1577 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1578 the statement's real operands, otherwise it is added to virtual
1579 operands. */
1580
1581 static void
1582 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1583 {
1584 tree var, sym;
1585 var_ann_t v_ann;
1586
1587 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1588
1589 var = *var_p;
1590 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1591 v_ann = var_ann (sym);
1592
1593 /* Mark statements with volatile operands. */
1594 if (TREE_THIS_VOLATILE (sym))
1595 s_ann->has_volatile_ops = true;
1596
1597 if (is_gimple_reg (sym))
1598 {
1599 /* The variable is a GIMPLE register. Add it to real operands. */
1600 if (flags & opf_def)
1601 append_def (var_p);
1602 else
1603 append_use (var_p);
1604 }
1605 else
1606 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1607 }
1608
1609
1610 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1611 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1612
1613 STMT is the statement being processed, EXPR is the INDIRECT_REF
1614 that got us here.
1615
1616 FLAGS is as in get_expr_operands.
1617
1618 FULL_REF contains the full pointer dereference expression, if we
1619 have it, or NULL otherwise.
1620
1621 OFFSET and SIZE are the location of the access inside the
1622 dereferenced pointer, if known.
1623
1624 RECURSE_ON_BASE should be set to true if we want to continue
1625 calling get_expr_operands on the base pointer, and false if
1626 something else will do it for us. */
1627
1628 static void
1629 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1630 tree full_ref,
1631 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1632 bool recurse_on_base)
1633 {
1634 tree *pptr = &TREE_OPERAND (expr, 0);
1635 tree ptr = *pptr;
1636 stmt_ann_t s_ann = stmt_ann (stmt);
1637
1638 s_ann->references_memory = true;
1639 if (s_ann && TREE_THIS_VOLATILE (expr))
1640 s_ann->has_volatile_ops = true;
1641
1642 if (SSA_VAR_P (ptr))
1643 {
1644 struct ptr_info_def *pi = NULL;
1645
1646 /* If PTR has flow-sensitive points-to information, use it. */
1647 if (TREE_CODE (ptr) == SSA_NAME
1648 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1649 && pi->name_mem_tag)
1650 {
1651 /* PTR has its own memory tag. Use it. */
1652 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1653 full_ref, offset, size, false);
1654 }
1655 else
1656 {
1657 /* If PTR is not an SSA_NAME or it doesn't have a name
1658 tag, use its symbol memory tag. */
1659 var_ann_t v_ann;
1660
1661 /* If we are emitting debugging dumps, display a warning if
1662 PTR is an SSA_NAME with no flow-sensitive alias
1663 information. That means that we may need to compute
1664 aliasing again. */
1665 if (dump_file
1666 && TREE_CODE (ptr) == SSA_NAME
1667 && pi == NULL)
1668 {
1669 fprintf (dump_file,
1670 "NOTE: no flow-sensitive alias info for ");
1671 print_generic_expr (dump_file, ptr, dump_flags);
1672 fprintf (dump_file, " in ");
1673 print_generic_stmt (dump_file, stmt, dump_flags);
1674 }
1675
1676 if (TREE_CODE (ptr) == SSA_NAME)
1677 ptr = SSA_NAME_VAR (ptr);
1678 v_ann = var_ann (ptr);
1679
1680 if (v_ann->symbol_mem_tag)
1681 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1682 full_ref, offset, size, false);
1683
1684 /* Aliasing information is missing; mark statement as
1685 volatile so we won't optimize it out too actively. */
1686 else if (s_ann
1687 && !gimple_aliases_computed_p (cfun)
1688 && (flags & opf_def))
1689 s_ann->has_volatile_ops = true;
1690 }
1691 }
1692 else if (TREE_CODE (ptr) == INTEGER_CST)
1693 {
1694 /* If a constant is used as a pointer, we can't generate a real
1695 operand for it but we mark the statement volatile to prevent
1696 optimizations from messing things up. */
1697 if (s_ann)
1698 s_ann->has_volatile_ops = true;
1699 return;
1700 }
1701 else
1702 {
1703 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1704 gcc_unreachable ();
1705 }
1706
1707 /* If requested, add a USE operand for the base pointer. */
1708 if (recurse_on_base)
1709 get_expr_operands (stmt, pptr, opf_use);
1710 }
1711
1712
1713 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1714
1715 static void
1716 get_tmr_operands (tree stmt, tree expr, int flags)
1717 {
1718 tree tag, ref;
1719 HOST_WIDE_INT offset, size, maxsize;
1720 subvar_t svars, sv;
1721 stmt_ann_t s_ann = stmt_ann (stmt);
1722
1723 /* This statement references memory. */
1724 s_ann->references_memory = 1;
1725
1726 /* First record the real operands. */
1727 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1728 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1729
1730 if (TMR_SYMBOL (expr))
1731 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1732
1733 tag = TMR_TAG (expr);
1734 if (!tag)
1735 {
1736 /* Something weird, so ensure that we will be careful. */
1737 s_ann->has_volatile_ops = true;
1738 return;
1739 }
1740
1741 if (DECL_P (tag))
1742 {
1743 get_expr_operands (stmt, &tag, flags);
1744 return;
1745 }
1746
1747 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1748 gcc_assert (ref != NULL_TREE);
1749 svars = get_subvars_for_var (ref);
1750 for (sv = svars; sv; sv = sv->next)
1751 {
1752 bool exact;
1753
1754 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1755 add_stmt_operand (&sv->var, s_ann, flags);
1756 }
1757 }
1758
1759
1760 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1761 clobbered variables in the function. */
1762
1763 static void
1764 add_call_clobber_ops (tree stmt, tree callee)
1765 {
1766 unsigned u;
1767 bitmap_iterator bi;
1768 stmt_ann_t s_ann = stmt_ann (stmt);
1769 bitmap not_read_b, not_written_b;
1770
1771 /* Functions that are not const, pure or never return may clobber
1772 call-clobbered variables. */
1773 if (s_ann)
1774 s_ann->makes_clobbering_call = true;
1775
1776 /* If we created .GLOBAL_VAR earlier, just use it. */
1777 if (gimple_global_var (cfun))
1778 {
1779 tree var = gimple_global_var (cfun);
1780 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1781 return;
1782 }
1783
1784 /* Get info for local and module level statics. There is a bit
1785 set for each static if the call being processed does not read
1786 or write that variable. */
1787 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1788 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1789
1790 /* Add a VDEF operand for every call clobbered variable. */
1791 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1792 {
1793 tree var = referenced_var_lookup (u);
1794 unsigned int escape_mask = var_ann (var)->escape_mask;
1795 tree real_var = var;
1796 bool not_read;
1797 bool not_written;
1798
1799 /* Not read and not written are computed on regular vars, not
1800 subvars, so look at the parent var if this is an SFT. */
1801 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1802 real_var = SFT_PARENT_VAR (var);
1803
1804 not_read = not_read_b
1805 ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1806 : false;
1807
1808 not_written = not_written_b
1809 ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
1810 : false;
1811 gcc_assert (!unmodifiable_var_p (var));
1812
1813 clobber_stats.clobbered_vars++;
1814
1815 /* See if this variable is really clobbered by this function. */
1816
1817 /* Trivial case: Things escaping only to pure/const are not
1818 clobbered by non-pure-const, and only read by pure/const. */
1819 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1820 {
1821 tree call = get_call_expr_in (stmt);
1822 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1823 {
1824 add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
1825 clobber_stats.unescapable_clobbers_avoided++;
1826 continue;
1827 }
1828 else
1829 {
1830 clobber_stats.unescapable_clobbers_avoided++;
1831 continue;
1832 }
1833 }
1834
1835 if (not_written)
1836 {
1837 clobber_stats.static_write_clobbers_avoided++;
1838 if (!not_read)
1839 add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
1840 else
1841 clobber_stats.static_read_clobbers_avoided++;
1842 }
1843 else
1844 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1845 }
1846 }
1847
1848
1849 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1850 function. */
1851
1852 static void
1853 add_call_read_ops (tree stmt, tree callee)
1854 {
1855 unsigned u;
1856 bitmap_iterator bi;
1857 stmt_ann_t s_ann = stmt_ann (stmt);
1858 bitmap not_read_b;
1859
1860 /* if the function is not pure, it may reference memory. Add
1861 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1862 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1863 if (gimple_global_var (cfun))
1864 {
1865 tree var = gimple_global_var (cfun);
1866 add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
1867 return;
1868 }
1869
1870 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1871
1872 /* Add a VUSE for each call-clobbered variable. */
1873 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1874 {
1875 tree var = referenced_var (u);
1876 tree real_var = var;
1877 bool not_read;
1878
1879 clobber_stats.readonly_clobbers++;
1880
1881 /* Not read and not written are computed on regular vars, not
1882 subvars, so look at the parent var if this is an SFT. */
1883
1884 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1885 real_var = SFT_PARENT_VAR (var);
1886
1887 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1888 : false;
1889
1890 if (not_read)
1891 {
1892 clobber_stats.static_readonly_clobbers_avoided++;
1893 continue;
1894 }
1895
1896 add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
1897 }
1898 }
1899
1900
1901 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1902
1903 static void
1904 get_call_expr_operands (tree stmt, tree expr)
1905 {
1906 int call_flags = call_expr_flags (expr);
1907 int i, nargs;
1908 stmt_ann_t ann = stmt_ann (stmt);
1909
1910 ann->references_memory = true;
1911
1912 /* If aliases have been computed already, add VDEF or VUSE
1913 operands for all the symbols that have been found to be
1914 call-clobbered. */
1915 if (gimple_aliases_computed_p (cfun)
1916 && !(call_flags & ECF_NOVOPS))
1917 {
1918 /* A 'pure' or a 'const' function never call-clobbers anything.
1919 A 'noreturn' function might, but since we don't return anyway
1920 there is no point in recording that. */
1921 if (TREE_SIDE_EFFECTS (expr)
1922 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1923 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1924 else if (!(call_flags & ECF_CONST))
1925 add_call_read_ops (stmt, get_callee_fndecl (expr));
1926 }
1927
1928 /* Find uses in the called function. */
1929 get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
1930 nargs = call_expr_nargs (expr);
1931 for (i = 0; i < nargs; i++)
1932 get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
1933
1934 get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
1935 }
1936
1937
1938 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1939
1940 static void
1941 get_asm_expr_operands (tree stmt)
1942 {
1943 stmt_ann_t s_ann;
1944 int i, noutputs;
1945 const char **oconstraints;
1946 const char *constraint;
1947 bool allows_mem, allows_reg, is_inout;
1948 tree link;
1949
1950 s_ann = stmt_ann (stmt);
1951 noutputs = list_length (ASM_OUTPUTS (stmt));
1952 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1953
1954 /* Gather all output operands. */
1955 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1956 {
1957 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1958 oconstraints[i] = constraint;
1959 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1960 &allows_reg, &is_inout);
1961
1962 /* This should have been split in gimplify_asm_expr. */
1963 gcc_assert (!allows_reg || !is_inout);
1964
1965 /* Memory operands are addressable. Note that STMT needs the
1966 address of this operand. */
1967 if (!allows_reg && allows_mem)
1968 {
1969 tree t = get_base_address (TREE_VALUE (link));
1970 if (t && DECL_P (t) && s_ann)
1971 add_to_addressable_set (t, &s_ann->addresses_taken);
1972 }
1973
1974 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1975 }
1976
1977 /* Gather all input operands. */
1978 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1979 {
1980 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1981 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1982 &allows_mem, &allows_reg);
1983
1984 /* Memory operands are addressable. Note that STMT needs the
1985 address of this operand. */
1986 if (!allows_reg && allows_mem)
1987 {
1988 tree t = get_base_address (TREE_VALUE (link));
1989 if (t && DECL_P (t) && s_ann)
1990 add_to_addressable_set (t, &s_ann->addresses_taken);
1991 }
1992
1993 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1994 }
1995
1996 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1997 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1998 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1999 {
2000 unsigned i;
2001 bitmap_iterator bi;
2002
2003 s_ann->references_memory = true;
2004
2005 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
2006 {
2007 tree var = referenced_var (i);
2008 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
2009 }
2010
2011 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
2012 {
2013 tree var = referenced_var (i);
2014
2015 /* Subvars are explicitly represented in this list, so we
2016 don't need the original to be added to the clobber ops,
2017 but the original *will* be in this list because we keep
2018 the addressability of the original variable up-to-date
2019 to avoid confusing the back-end. */
2020 if (var_can_have_subvars (var)
2021 && get_subvars_for_var (var) != NULL)
2022 continue;
2023
2024 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
2025 }
2026 break;
2027 }
2028 }
2029
2030
2031 /* Scan operands for the assignment expression EXPR in statement STMT. */
2032
2033 static void
2034 get_modify_stmt_operands (tree stmt, tree expr)
2035 {
2036 /* First get operands from the RHS. */
2037 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
2038
2039 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2040 registers. If the LHS is a store to memory, we will need
2041 a preserving definition (VDEF).
2042
2043 Preserving definitions are those that modify a part of an
2044 aggregate object for which no subvars have been computed (or the
2045 reference does not correspond exactly to one of them). Stores
2046 through a pointer are also represented with VDEF operators.
2047
2048 We used to distinguish between preserving and killing definitions.
2049 We always emit preserving definitions now. */
2050 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2051 }
2052
2053
2054 /* Recursively scan the expression pointed to by EXPR_P in statement
2055 STMT. FLAGS is one of the OPF_* constants modifying how to
2056 interpret the operands found. */
2057
2058 static void
2059 get_expr_operands (tree stmt, tree *expr_p, int flags)
2060 {
2061 enum tree_code code;
2062 enum tree_code_class class;
2063 tree expr = *expr_p;
2064 stmt_ann_t s_ann = stmt_ann (stmt);
2065
2066 if (expr == NULL)
2067 return;
2068
2069 code = TREE_CODE (expr);
2070 class = TREE_CODE_CLASS (code);
2071
2072 switch (code)
2073 {
2074 case ADDR_EXPR:
2075 /* Taking the address of a variable does not represent a
2076 reference to it, but the fact that the statement takes its
2077 address will be of interest to some passes (e.g. alias
2078 resolution). */
2079 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2080
2081 /* If the address is invariant, there may be no interesting
2082 variable references inside. */
2083 if (is_gimple_min_invariant (expr))
2084 return;
2085
2086 /* Otherwise, there may be variables referenced inside but there
2087 should be no VUSEs created, since the referenced objects are
2088 not really accessed. The only operands that we should find
2089 here are ARRAY_REF indices which will always be real operands
2090 (GIMPLE does not allow non-registers as array indices). */
2091 flags |= opf_no_vops;
2092 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2093 return;
2094
2095 case SSA_NAME:
2096 case STRUCT_FIELD_TAG:
2097 case SYMBOL_MEMORY_TAG:
2098 case NAME_MEMORY_TAG:
2099 add_stmt_operand (expr_p, s_ann, flags);
2100 return;
2101
2102 case VAR_DECL:
2103 case PARM_DECL:
2104 case RESULT_DECL:
2105 {
2106 subvar_t svars;
2107
2108 /* Add the subvars for a variable, if it has subvars, to DEFS
2109 or USES. Otherwise, add the variable itself. Whether it
2110 goes to USES or DEFS depends on the operand flags. */
2111 if (var_can_have_subvars (expr)
2112 && (svars = get_subvars_for_var (expr)))
2113 {
2114 subvar_t sv;
2115 for (sv = svars; sv; sv = sv->next)
2116 add_stmt_operand (&sv->var, s_ann, flags);
2117 }
2118 else
2119 add_stmt_operand (expr_p, s_ann, flags);
2120
2121 return;
2122 }
2123
2124 case MISALIGNED_INDIRECT_REF:
2125 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2126 /* fall through */
2127
2128 case ALIGN_INDIRECT_REF:
2129 case INDIRECT_REF:
2130 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2131 return;
2132
2133 case TARGET_MEM_REF:
2134 get_tmr_operands (stmt, expr, flags);
2135 return;
2136
2137 case ARRAY_REF:
2138 case ARRAY_RANGE_REF:
2139 case COMPONENT_REF:
2140 case REALPART_EXPR:
2141 case IMAGPART_EXPR:
2142 {
2143 tree ref;
2144 HOST_WIDE_INT offset, size, maxsize;
2145 bool none = true;
2146
2147 /* This component reference becomes an access to all of the
2148 subvariables it can touch, if we can determine that, but
2149 *NOT* the real one. If we can't determine which fields we
2150 could touch, the recursion will eventually get to a
2151 variable and add *all* of its subvars, or whatever is the
2152 minimum correct subset. */
2153 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2154 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2155 {
2156 subvar_t sv;
2157 subvar_t svars = get_subvars_for_var (ref);
2158
2159 for (sv = svars; sv; sv = sv->next)
2160 {
2161 bool exact;
2162
2163 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2164 {
2165 int subvar_flags = flags;
2166 none = false;
2167 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2168 }
2169 }
2170
2171 if (!none)
2172 flags |= opf_no_vops;
2173 }
2174 else if (TREE_CODE (ref) == INDIRECT_REF)
2175 {
2176 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2177 maxsize, false);
2178 flags |= opf_no_vops;
2179 }
2180
2181 /* Even if we found subvars above we need to ensure to see
2182 immediate uses for d in s.a[d]. In case of s.a having
2183 a subvar or we would miss it otherwise. */
2184 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2185
2186 if (code == COMPONENT_REF)
2187 {
2188 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2189 s_ann->has_volatile_ops = true;
2190 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2191 }
2192 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2193 {
2194 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2195 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2196 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2197 }
2198
2199 return;
2200 }
2201
2202 case WITH_SIZE_EXPR:
2203 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2204 and an rvalue reference to its second argument. */
2205 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2207 return;
2208
2209 case CALL_EXPR:
2210 get_call_expr_operands (stmt, expr);
2211 return;
2212
2213 case COND_EXPR:
2214 case VEC_COND_EXPR:
2215 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2216 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2217 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2218 return;
2219
2220 case GIMPLE_MODIFY_STMT:
2221 get_modify_stmt_operands (stmt, expr);
2222 return;
2223
2224 case CONSTRUCTOR:
2225 {
2226 /* General aggregate CONSTRUCTORs have been decomposed, but they
2227 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2228 constructor_elt *ce;
2229 unsigned HOST_WIDE_INT idx;
2230
2231 for (idx = 0;
2232 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2233 idx++)
2234 get_expr_operands (stmt, &ce->value, opf_use);
2235
2236 return;
2237 }
2238
2239 case BIT_FIELD_REF:
2240 case TRUTH_NOT_EXPR:
2241 case VIEW_CONVERT_EXPR:
2242 do_unary:
2243 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2244 return;
2245
2246 case TRUTH_AND_EXPR:
2247 case TRUTH_OR_EXPR:
2248 case TRUTH_XOR_EXPR:
2249 case COMPOUND_EXPR:
2250 case OBJ_TYPE_REF:
2251 case ASSERT_EXPR:
2252 do_binary:
2253 {
2254 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2255 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2256 return;
2257 }
2258
2259 case DOT_PROD_EXPR:
2260 case REALIGN_LOAD_EXPR:
2261 {
2262 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2263 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2264 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2265 return;
2266 }
2267
2268 case BLOCK:
2269 case FUNCTION_DECL:
2270 case EXC_PTR_EXPR:
2271 case FILTER_EXPR:
2272 case LABEL_DECL:
2273 case CONST_DECL:
2274 case OMP_PARALLEL:
2275 case OMP_SECTIONS:
2276 case OMP_FOR:
2277 case OMP_SINGLE:
2278 case OMP_MASTER:
2279 case OMP_ORDERED:
2280 case OMP_CRITICAL:
2281 case OMP_RETURN:
2282 case OMP_CONTINUE:
2283 /* Expressions that make no memory references. */
2284 return;
2285
2286 default:
2287 if (class == tcc_unary)
2288 goto do_unary;
2289 if (class == tcc_binary || class == tcc_comparison)
2290 goto do_binary;
2291 if (class == tcc_constant || class == tcc_type)
2292 return;
2293 }
2294
2295 /* If we get here, something has gone wrong. */
2296 #ifdef ENABLE_CHECKING
2297 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2298 debug_tree (expr);
2299 fputs ("\n", stderr);
2300 #endif
2301 gcc_unreachable ();
2302 }
2303
2304
2305 /* Parse STMT looking for operands. When finished, the various
2306 build_* operand vectors will have potential operands in them. */
2307
2308 static void
2309 parse_ssa_operands (tree stmt)
2310 {
2311 enum tree_code code;
2312
2313 code = TREE_CODE (stmt);
2314 switch (code)
2315 {
2316 case GIMPLE_MODIFY_STMT:
2317 get_modify_stmt_operands (stmt, stmt);
2318 break;
2319
2320 case COND_EXPR:
2321 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2322 break;
2323
2324 case SWITCH_EXPR:
2325 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2326 break;
2327
2328 case ASM_EXPR:
2329 get_asm_expr_operands (stmt);
2330 break;
2331
2332 case RETURN_EXPR:
2333 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2334 break;
2335
2336 case GOTO_EXPR:
2337 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2338 break;
2339
2340 case LABEL_EXPR:
2341 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2342 break;
2343
2344 case BIND_EXPR:
2345 case CASE_LABEL_EXPR:
2346 case TRY_CATCH_EXPR:
2347 case TRY_FINALLY_EXPR:
2348 case EH_FILTER_EXPR:
2349 case CATCH_EXPR:
2350 case RESX_EXPR:
2351 /* These nodes contain no variable references. */
2352 break;
2353
2354 default:
2355 /* Notice that if get_expr_operands tries to use &STMT as the
2356 operand pointer (which may only happen for USE operands), we
2357 will fail in add_stmt_operand. This default will handle
2358 statements like empty statements, or CALL_EXPRs that may
2359 appear on the RHS of a statement or as statements themselves. */
2360 get_expr_operands (stmt, &stmt, opf_use);
2361 break;
2362 }
2363 }
2364
2365
2366 /* Create an operands cache for STMT. */
2367
2368 static void
2369 build_ssa_operands (tree stmt)
2370 {
2371 stmt_ann_t ann = get_stmt_ann (stmt);
2372
2373 /* Initially assume that the statement has no volatile operands and
2374 makes no memory references. */
2375 ann->has_volatile_ops = false;
2376 ann->references_memory = false;
2377 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2378 if (ann->addresses_taken)
2379 bitmap_clear (ann->addresses_taken);
2380
2381 start_ssa_stmt_operands ();
2382 parse_ssa_operands (stmt);
2383 operand_build_sort_virtual (build_vuses);
2384 operand_build_sort_virtual (build_vdefs);
2385 finalize_ssa_stmt_operands (stmt);
2386
2387 if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
2388 ann->addresses_taken = NULL;
2389 /* For added safety, assume that statements with volatile operands
2390 also reference memory. */
2391 if (ann->has_volatile_ops)
2392 ann->references_memory = true;
2393 }
2394
2395
2396 /* Free any operands vectors in OPS. */
2397
2398 void
2399 free_ssa_operands (stmt_operands_p ops)
2400 {
2401 ops->def_ops = NULL;
2402 ops->use_ops = NULL;
2403 ops->vdef_ops = NULL;
2404 ops->vuse_ops = NULL;
2405 BITMAP_FREE (ops->loads);
2406 BITMAP_FREE (ops->stores);
2407 }
2408
2409
2410 /* Get the operands of statement STMT. */
2411
2412 void
2413 update_stmt_operands (tree stmt)
2414 {
2415 stmt_ann_t ann = get_stmt_ann (stmt);
2416
2417 /* If update_stmt_operands is called before SSA is initialized, do
2418 nothing. */
2419 if (!ssa_operands_active ())
2420 return;
2421
2422 /* The optimizers cannot handle statements that are nothing but a
2423 _DECL. This indicates a bug in the gimplifier. */
2424 gcc_assert (!SSA_VAR_P (stmt));
2425
2426 timevar_push (TV_TREE_OPS);
2427
2428 gcc_assert (ann->modified);
2429 build_ssa_operands (stmt);
2430 ann->modified = 0;
2431
2432 timevar_pop (TV_TREE_OPS);
2433 }
2434
2435
2436 /* Copies virtual operands from SRC to DST. */
2437
2438 void
2439 copy_virtual_operands (tree dest, tree src)
2440 {
2441 unsigned int i, n;
2442 voptype_p src_vuses, dest_vuses;
2443 voptype_p src_vdefs, dest_vdefs;
2444 struct voptype_d vuse;
2445 struct voptype_d vdef;
2446 stmt_ann_t dest_ann;
2447
2448 VDEF_OPS (dest) = NULL;
2449 VUSE_OPS (dest) = NULL;
2450
2451 dest_ann = get_stmt_ann (dest);
2452 BITMAP_FREE (dest_ann->operands.loads);
2453 BITMAP_FREE (dest_ann->operands.stores);
2454
2455 if (LOADED_SYMS (src))
2456 {
2457 dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
2458 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2459 }
2460
2461 if (STORED_SYMS (src))
2462 {
2463 dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
2464 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2465 }
2466
2467 /* Copy all the VUSE operators and corresponding operands. */
2468 dest_vuses = &vuse;
2469 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2470 {
2471 n = VUSE_NUM (src_vuses);
2472 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
2473 for (i = 0; i < n; i++)
2474 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2475
2476 if (VUSE_OPS (dest) == NULL)
2477 VUSE_OPS (dest) = vuse.next;
2478 }
2479
2480 /* Copy all the VDEF operators and corresponding operands. */
2481 dest_vdefs = &vdef;
2482 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2483 {
2484 n = VUSE_NUM (src_vdefs);
2485 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
2486 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2487 for (i = 0; i < n; i++)
2488 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2489
2490 if (VDEF_OPS (dest) == NULL)
2491 VDEF_OPS (dest) = vdef.next;
2492 }
2493 }
2494
2495
2496 /* Specifically for use in DOM's expression analysis. Given a store, we
2497 create an artificial stmt which looks like a load from the store, this can
2498 be used to eliminate redundant loads. OLD_OPS are the operands from the
2499 store stmt, and NEW_STMT is the new load which represents a load of the
2500 values stored. */
2501
2502 void
2503 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2504 {
2505 tree op;
2506 ssa_op_iter iter;
2507 use_operand_p use_p;
2508 unsigned i;
2509
2510 get_stmt_ann (new_stmt);
2511
2512 /* Process NEW_STMT looking for operands. */
2513 start_ssa_stmt_operands ();
2514 parse_ssa_operands (new_stmt);
2515
2516 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2517 if (TREE_CODE (op) != SSA_NAME)
2518 var_ann (op)->in_vuse_list = false;
2519
2520 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2521 if (TREE_CODE (op) != SSA_NAME)
2522 var_ann (op)->in_vdef_list = false;
2523
2524 /* Remove any virtual operands that were found. */
2525 VEC_truncate (tree, build_vdefs, 0);
2526 VEC_truncate (tree, build_vuses, 0);
2527
2528 /* For each VDEF on the original statement, we want to create a
2529 VUSE of the VDEF result operand on the new statement. */
2530 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2531 append_vuse (op);
2532
2533 finalize_ssa_stmt_operands (new_stmt);
2534
2535 /* All uses in this fake stmt must not be in the immediate use lists. */
2536 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2537 delink_imm_use (use_p);
2538 }
2539
2540
2541 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2542 to test the validity of the swap operation. */
2543
2544 void
2545 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2546 {
2547 tree op0, op1;
2548 op0 = *exp0;
2549 op1 = *exp1;
2550
2551 /* If the operand cache is active, attempt to preserve the relative
2552 positions of these two operands in their respective immediate use
2553 lists. */
2554 if (ssa_operands_active () && op0 != op1)
2555 {
2556 use_optype_p use0, use1, ptr;
2557 use0 = use1 = NULL;
2558
2559 /* Find the 2 operands in the cache, if they are there. */
2560 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2561 if (USE_OP_PTR (ptr)->use == exp0)
2562 {
2563 use0 = ptr;
2564 break;
2565 }
2566
2567 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2568 if (USE_OP_PTR (ptr)->use == exp1)
2569 {
2570 use1 = ptr;
2571 break;
2572 }
2573
2574 /* If both uses don't have operand entries, there isn't much we can do
2575 at this point. Presumably we don't need to worry about it. */
2576 if (use0 && use1)
2577 {
2578 tree *tmp = USE_OP_PTR (use1)->use;
2579 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2580 USE_OP_PTR (use0)->use = tmp;
2581 }
2582 }
2583
2584 /* Now swap the data. */
2585 *exp0 = op1;
2586 *exp1 = op0;
2587 }
2588
2589
2590 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2591 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2592 a single variable whose address has been taken or any other valid
2593 GIMPLE memory reference (structure reference, array, etc). If the
2594 base address of REF is a decl that has sub-variables, also add all
2595 of its sub-variables. */
2596
2597 void
2598 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2599 {
2600 tree var;
2601 subvar_t svars;
2602
2603 gcc_assert (addresses_taken);
2604
2605 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2606 as the only thing we take the address of. If VAR is a structure,
2607 taking the address of a field means that the whole structure may
2608 be referenced using pointer arithmetic. See PR 21407 and the
2609 ensuing mailing list discussion. */
2610 var = get_base_address (ref);
2611 if (var && SSA_VAR_P (var))
2612 {
2613 if (*addresses_taken == NULL)
2614 *addresses_taken = BITMAP_GGC_ALLOC ();
2615
2616 if (var_can_have_subvars (var)
2617 && (svars = get_subvars_for_var (var)))
2618 {
2619 subvar_t sv;
2620 for (sv = svars; sv; sv = sv->next)
2621 {
2622 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2623 TREE_ADDRESSABLE (sv->var) = 1;
2624 }
2625 }
2626 else
2627 {
2628 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2629 TREE_ADDRESSABLE (var) = 1;
2630 }
2631 }
2632 }
2633
2634
2635 /* Scan the immediate_use list for VAR making sure its linked properly.
2636 Return TRUE if there is a problem and emit an error message to F. */
2637
2638 bool
2639 verify_imm_links (FILE *f, tree var)
2640 {
2641 use_operand_p ptr, prev, list;
2642 int count;
2643
2644 gcc_assert (TREE_CODE (var) == SSA_NAME);
2645
2646 list = &(SSA_NAME_IMM_USE_NODE (var));
2647 gcc_assert (list->use == NULL);
2648
2649 if (list->prev == NULL)
2650 {
2651 gcc_assert (list->next == NULL);
2652 return false;
2653 }
2654
2655 prev = list;
2656 count = 0;
2657 for (ptr = list->next; ptr != list; )
2658 {
2659 if (prev != ptr->prev)
2660 goto error;
2661
2662 if (ptr->use == NULL)
2663 goto error; /* 2 roots, or SAFE guard node. */
2664 else if (*(ptr->use) != var)
2665 goto error;
2666
2667 prev = ptr;
2668 ptr = ptr->next;
2669
2670 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2671 problem. */
2672 if (count++ > 50000000)
2673 goto error;
2674 }
2675
2676 /* Verify list in the other direction. */
2677 prev = list;
2678 for (ptr = list->prev; ptr != list; )
2679 {
2680 if (prev != ptr->next)
2681 goto error;
2682 prev = ptr;
2683 ptr = ptr->prev;
2684 if (count-- < 0)
2685 goto error;
2686 }
2687
2688 if (count != 0)
2689 goto error;
2690
2691 return false;
2692
2693 error:
2694 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2695 {
2696 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2697 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2698 }
2699 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2700 (void *)ptr->use);
2701 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2702 fprintf(f, "\n");
2703 return true;
2704 }
2705
2706
2707 /* Dump all the immediate uses to FILE. */
2708
2709 void
2710 dump_immediate_uses_for (FILE *file, tree var)
2711 {
2712 imm_use_iterator iter;
2713 use_operand_p use_p;
2714
2715 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2716
2717 print_generic_expr (file, var, TDF_SLIM);
2718 fprintf (file, " : -->");
2719 if (has_zero_uses (var))
2720 fprintf (file, " no uses.\n");
2721 else
2722 if (has_single_use (var))
2723 fprintf (file, " single use.\n");
2724 else
2725 fprintf (file, "%d uses.\n", num_imm_uses (var));
2726
2727 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2728 {
2729 if (use_p->stmt == NULL && use_p->use == NULL)
2730 fprintf (file, "***end of stmt iterator marker***\n");
2731 else
2732 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2733 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2734 else
2735 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2736 }
2737 fprintf(file, "\n");
2738 }
2739
2740
2741 /* Dump all the immediate uses to FILE. */
2742
2743 void
2744 dump_immediate_uses (FILE *file)
2745 {
2746 tree var;
2747 unsigned int x;
2748
2749 fprintf (file, "Immediate_uses: \n\n");
2750 for (x = 1; x < num_ssa_names; x++)
2751 {
2752 var = ssa_name(x);
2753 if (!var)
2754 continue;
2755 dump_immediate_uses_for (file, var);
2756 }
2757 }
2758
2759
2760 /* Dump def-use edges on stderr. */
2761
2762 void
2763 debug_immediate_uses (void)
2764 {
2765 dump_immediate_uses (stderr);
2766 }
2767
2768
2769 /* Dump def-use edges on stderr. */
2770
2771 void
2772 debug_immediate_uses_for (tree var)
2773 {
2774 dump_immediate_uses_for (stderr, var);
2775 }
2776
2777
2778 /* Create a new change buffer for the statement pointed by STMT_P and
2779 push the buffer into SCB_STACK. Each change buffer
2780 records state information needed to determine what changed in the
2781 statement. Mainly, this keeps track of symbols that may need to be
2782 put into SSA form, SSA name replacements and other information
2783 needed to keep the SSA form up to date. */
2784
2785 void
2786 push_stmt_changes (tree *stmt_p)
2787 {
2788 tree stmt;
2789 scb_t buf;
2790
2791 stmt = *stmt_p;
2792
2793 /* It makes no sense to keep track of PHI nodes. */
2794 if (TREE_CODE (stmt) == PHI_NODE)
2795 return;
2796
2797 buf = xmalloc (sizeof *buf);
2798 memset (buf, 0, sizeof *buf);
2799
2800 buf->stmt_p = stmt_p;
2801
2802 if (stmt_references_memory_p (stmt))
2803 {
2804 tree op;
2805 ssa_op_iter i;
2806
2807 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2808 {
2809 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2810 if (buf->loads == NULL)
2811 buf->loads = BITMAP_ALLOC (NULL);
2812 bitmap_set_bit (buf->loads, DECL_UID (sym));
2813 }
2814
2815 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2816 {
2817 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2818 if (buf->stores == NULL)
2819 buf->stores = BITMAP_ALLOC (NULL);
2820 bitmap_set_bit (buf->stores, DECL_UID (sym));
2821 }
2822 }
2823
2824 VEC_safe_push (scb_t, heap, scb_stack, buf);
2825 }
2826
2827
2828 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2829 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2830
2831 static void
2832 mark_difference_for_renaming (bitmap s1, bitmap s2)
2833 {
2834 if (s1 == NULL && s2 == NULL)
2835 return;
2836
2837 if (s1 && s2 == NULL)
2838 mark_set_for_renaming (s1);
2839 else if (s1 == NULL && s2)
2840 mark_set_for_renaming (s2);
2841 else if (!bitmap_equal_p (s1, s2))
2842 {
2843 bitmap t1 = BITMAP_ALLOC (NULL);
2844 bitmap t2 = BITMAP_ALLOC (NULL);
2845
2846 bitmap_and_compl (t1, s1, s2);
2847 bitmap_and_compl (t2, s2, s1);
2848 bitmap_ior_into (t1, t2);
2849 mark_set_for_renaming (t1);
2850
2851 BITMAP_FREE (t1);
2852 BITMAP_FREE (t2);
2853 }
2854 }
2855
2856
2857 /* Pop the top SCB from SCB_STACK and act on the differences between
2858 what was recorded by push_stmt_changes and the current state of
2859 the statement. */
2860
2861 void
2862 pop_stmt_changes (tree *stmt_p)
2863 {
2864 tree op, stmt;
2865 ssa_op_iter iter;
2866 bitmap loads, stores;
2867 scb_t buf;
2868
2869 stmt = *stmt_p;
2870
2871 /* It makes no sense to keep track of PHI nodes. */
2872 if (TREE_CODE (stmt) == PHI_NODE)
2873 return;
2874
2875 buf = VEC_pop (scb_t, scb_stack);
2876 gcc_assert (stmt_p == buf->stmt_p);
2877
2878 /* Force an operand re-scan on the statement and mark any newly
2879 exposed variables. */
2880 update_stmt (stmt);
2881
2882 /* Determine whether any memory symbols need to be renamed. If the
2883 sets of loads and stores are different after the statement is
2884 modified, then the affected symbols need to be renamed.
2885
2886 Note that it may be possible for the statement to not reference
2887 memory anymore, but we still need to act on the differences in
2888 the sets of symbols. */
2889 loads = stores = NULL;
2890 if (stmt_references_memory_p (stmt))
2891 {
2892 tree op;
2893 ssa_op_iter i;
2894
2895 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2896 {
2897 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2898 if (loads == NULL)
2899 loads = BITMAP_ALLOC (NULL);
2900 bitmap_set_bit (loads, DECL_UID (sym));
2901 }
2902
2903 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2904 {
2905 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2906 if (stores == NULL)
2907 stores = BITMAP_ALLOC (NULL);
2908 bitmap_set_bit (stores, DECL_UID (sym));
2909 }
2910 }
2911
2912 /* If LOADS is different from BUF->LOADS, the affected
2913 symbols need to be marked for renaming. */
2914 mark_difference_for_renaming (loads, buf->loads);
2915
2916 /* Similarly for STORES and BUF->STORES. */
2917 mark_difference_for_renaming (stores, buf->stores);
2918
2919 /* Mark all the naked GIMPLE register operands for renaming. */
2920 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2921 if (DECL_P (op))
2922 mark_sym_for_renaming (op);
2923
2924 /* FIXME, need to add more finalizers here. Cleanup EH info,
2925 recompute invariants for address expressions, add
2926 SSA replacement mappings, etc. For instance, given
2927 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2928 the form:
2929
2930 # SMT.4_20 = VDEF <SMT.4_16>
2931 D.1576_11 = 1.0e+0;
2932
2933 So, the VDEF will disappear, but instead of marking SMT.4 for
2934 renaming it would be far more efficient to establish a
2935 replacement mapping that would replace every reference of
2936 SMT.4_20 with SMT.4_16. */
2937
2938 /* Free memory used by the buffer. */
2939 BITMAP_FREE (buf->loads);
2940 BITMAP_FREE (buf->stores);
2941 BITMAP_FREE (loads);
2942 BITMAP_FREE (stores);
2943 buf->stmt_p = NULL;
2944 free (buf);
2945 }
2946
2947
2948 /* Discard the topmost change buffer from SCB_STACK. This is useful
2949 when the caller realized that it did not actually modified the
2950 statement. It avoids the expensive operand re-scan. */
2951
2952 void
2953 discard_stmt_changes (tree *stmt_p)
2954 {
2955 scb_t buf;
2956 tree stmt;
2957
2958 /* It makes no sense to keep track of PHI nodes. */
2959 stmt = *stmt_p;
2960 if (TREE_CODE (stmt) == PHI_NODE)
2961 return;
2962
2963 buf = VEC_pop (scb_t, scb_stack);
2964 gcc_assert (stmt_p == buf->stmt_p);
2965
2966 /* Free memory used by the buffer. */
2967 BITMAP_FREE (buf->loads);
2968 BITMAP_FREE (buf->stores);
2969 buf->stmt_p = NULL;
2970 free (buf);
2971 }
2972
2973
2974 /* Returns true if statement STMT may access memory. */
2975
2976 bool
2977 stmt_references_memory_p (tree stmt)
2978 {
2979 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2980 return false;
2981
2982 return stmt_ann (stmt)->references_memory;
2983 }