target.h (globalize_decl_name): New.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
77
78
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82 static struct
83 {
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
106
107
108 /* Flags to describe operand properties in helpers. */
109
110 /* By default, operands are loaded. */
111 #define opf_use 0
112
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
116
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
124
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
129
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
132
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
135
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
138
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
141
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple functions. */
144 static bitmap_obstack operands_bitmap_obstack;
145 /* Set for building all the loaded symbols. */
146 static bitmap build_loads;
147
148 /* Set for building all the stored symbols. */
149 static bitmap build_stores;
150
151 static void get_expr_operands (tree, tree *, int);
152
153 /* Number of functions with initialized ssa_operands. */
154 static int n_initialized = 0;
155
156 /* Statement change buffer. Data structure used to record state
157 information for statements. This is used to determine what needs
158 to be done in order to update the SSA web after a statement is
159 modified by a pass. If STMT is a statement that has just been
160 created, or needs to be folded via fold_stmt, or anything that
161 changes its physical structure then the pass should:
162
163 1- Call push_stmt_changes (&stmt) to record the current state of
164 STMT before any modifications are made.
165
166 2- Make all appropriate modifications to the statement.
167
168 3- Call pop_stmt_changes (&stmt) to find new symbols that
169 need to be put in SSA form, SSA name mappings for names that
170 have disappeared, recompute invariantness for address
171 expressions, cleanup EH information, etc.
172
173 If it is possible to determine that the statement was not modified,
174 instead of calling pop_stmt_changes it is quicker to call
175 discard_stmt_changes to avoid the expensive and unnecessary operand
176 re-scan and change comparison. */
177
178 struct scb_d
179 {
180 /* Pointer to the statement being modified. */
181 tree *stmt_p;
182
183 /* If the statement references memory these are the sets of symbols
184 loaded and stored by the statement. */
185 bitmap loads;
186 bitmap stores;
187 };
188
189 typedef struct scb_d *scb_t;
190 DEF_VEC_P(scb_t);
191 DEF_VEC_ALLOC_P(scb_t,heap);
192
193 /* Stack of statement change buffers (SCB). Every call to
194 push_stmt_changes pushes a new buffer onto the stack. Calls to
195 pop_stmt_changes pop a buffer off of the stack and compute the set
196 of changes for the popped statement. */
197 static VEC(scb_t,heap) *scb_stack;
198
199 /* Return the DECL_UID of the base variable of T. */
200
201 static inline unsigned
202 get_name_decl (tree t)
203 {
204 if (TREE_CODE (t) != SSA_NAME)
205 return DECL_UID (t);
206 else
207 return DECL_UID (SSA_NAME_VAR (t));
208 }
209
210
211 /* Comparison function for qsort used in operand_build_sort_virtual. */
212
213 static int
214 operand_build_cmp (const void *p, const void *q)
215 {
216 tree e1 = *((const tree *)p);
217 tree e2 = *((const tree *)q);
218 unsigned int u1,u2;
219
220 u1 = get_name_decl (e1);
221 u2 = get_name_decl (e2);
222
223 /* We want to sort in ascending order. They can never be equal. */
224 #ifdef ENABLE_CHECKING
225 gcc_assert (u1 != u2);
226 #endif
227 return (u1 > u2 ? 1 : -1);
228 }
229
230
231 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
232
233 static inline void
234 operand_build_sort_virtual (VEC(tree,heap) *list)
235 {
236 int num = VEC_length (tree, list);
237
238 if (num < 2)
239 return;
240
241 if (num == 2)
242 {
243 if (get_name_decl (VEC_index (tree, list, 0))
244 > get_name_decl (VEC_index (tree, list, 1)))
245 {
246 /* Swap elements if in the wrong order. */
247 tree tmp = VEC_index (tree, list, 0);
248 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
249 VEC_replace (tree, list, 1, tmp);
250 }
251 return;
252 }
253
254 /* There are 3 or more elements, call qsort. */
255 qsort (VEC_address (tree, list),
256 VEC_length (tree, list),
257 sizeof (tree),
258 operand_build_cmp);
259 }
260
261
262 /* Return true if the SSA operands cache is active. */
263
264 bool
265 ssa_operands_active (void)
266 {
267 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
268 }
269
270
271 /* VOPs are of variable sized, so the free list maps "free buckets" to the
272 following table:
273 bucket # operands
274 ------ ----------
275 0 1
276 1 2
277 ...
278 15 16
279 16 17-24
280 17 25-32
281 18 31-40
282 ...
283 29 121-128
284 Any VOPs larger than this are simply added to the largest bucket when they
285 are freed. */
286
287
288 /* Return the number of operands used in bucket BUCKET. */
289
290 static inline int
291 vop_free_bucket_size (int bucket)
292 {
293 #ifdef ENABLE_CHECKING
294 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
295 #endif
296 if (bucket < 16)
297 return bucket + 1;
298 return (bucket - 13) * 8;
299 }
300
301
302 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
303 beyond the end of the bucket table, return -1. */
304
305 static inline int
306 vop_free_bucket_index (int num)
307 {
308 gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
309
310 /* Sizes 1 through 16 use buckets 0-15. */
311 if (num <= 16)
312 return num - 1;
313 /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
314 num = 14 + (num - 1) / 8;
315 if (num >= NUM_VOP_FREE_BUCKETS)
316 return -1;
317 else
318 return num;
319 }
320
321
322 /* Initialize the VOP free buckets. */
323
324 static inline void
325 init_vop_buckets (void)
326 {
327 int x;
328
329 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
330 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
331 }
332
333
334 /* Add PTR to the appropriate VOP bucket. */
335
336 static inline void
337 add_vop_to_freelist (voptype_p ptr)
338 {
339 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
340
341 /* Too large, use the largest bucket so its not a complete throw away. */
342 if (bucket == -1)
343 bucket = NUM_VOP_FREE_BUCKETS - 1;
344
345 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
346 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
347 }
348
349
350 /* These are the sizes of the operand memory buffer which gets allocated each
351 time more operands space is required. The final value is the amount that is
352 allocated every time after that. */
353
354 #define OP_SIZE_INIT 0
355 #define OP_SIZE_1 30
356 #define OP_SIZE_2 110
357 #define OP_SIZE_3 511
358
359 /* Initialize the operand cache routines. */
360
361 void
362 init_ssa_operands (void)
363 {
364 if (!n_initialized++)
365 {
366 build_defs = VEC_alloc (tree, heap, 5);
367 build_uses = VEC_alloc (tree, heap, 10);
368 build_vuses = VEC_alloc (tree, heap, 25);
369 build_vdefs = VEC_alloc (tree, heap, 25);
370 bitmap_obstack_initialize (&operands_bitmap_obstack);
371 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
372 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
373 scb_stack = VEC_alloc (scb_t, heap, 20);
374 }
375
376 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
377 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
378 gimple_ssa_operands (cfun)->operand_memory_index
379 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
380 gimple_ssa_operands (cfun)->ops_active = true;
381 memset (&clobber_stats, 0, sizeof (clobber_stats));
382 init_vop_buckets ();
383 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
384 }
385
386
387 /* Dispose of anything required by the operand routines. */
388
389 void
390 fini_ssa_operands (void)
391 {
392 struct ssa_operand_memory_d *ptr;
393 unsigned ix;
394 tree mpt;
395
396 if (!--n_initialized)
397 {
398 VEC_free (tree, heap, build_defs);
399 VEC_free (tree, heap, build_uses);
400 VEC_free (tree, heap, build_vdefs);
401 VEC_free (tree, heap, build_vuses);
402 BITMAP_FREE (build_loads);
403 BITMAP_FREE (build_stores);
404
405 /* The change buffer stack had better be empty. */
406 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
407 VEC_free (scb_t, heap, scb_stack);
408 scb_stack = NULL;
409 }
410
411 gimple_ssa_operands (cfun)->free_defs = NULL;
412 gimple_ssa_operands (cfun)->free_uses = NULL;
413
414 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
415 {
416 gimple_ssa_operands (cfun)->operand_memory
417 = gimple_ssa_operands (cfun)->operand_memory->next;
418 ggc_free (ptr);
419 }
420
421 for (ix = 0;
422 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
423 ix++)
424 {
425 if (mpt)
426 BITMAP_FREE (MPT_SYMBOLS (mpt));
427 }
428
429 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
430
431 gimple_ssa_operands (cfun)->ops_active = false;
432
433 if (!n_initialized)
434 bitmap_obstack_release (&operands_bitmap_obstack);
435 if (dump_file && (dump_flags & TDF_STATS))
436 {
437 fprintf (dump_file, "Original clobbered vars: %d\n",
438 clobber_stats.clobbered_vars);
439 fprintf (dump_file, "Static write clobbers avoided: %d\n",
440 clobber_stats.static_write_clobbers_avoided);
441 fprintf (dump_file, "Static read clobbers avoided: %d\n",
442 clobber_stats.static_read_clobbers_avoided);
443 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
444 clobber_stats.unescapable_clobbers_avoided);
445 fprintf (dump_file, "Original read-only clobbers: %d\n",
446 clobber_stats.readonly_clobbers);
447 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
448 clobber_stats.static_readonly_clobbers_avoided);
449 }
450 }
451
452
453 /* Return memory for operands of SIZE chunks. */
454
455 static inline void *
456 ssa_operand_alloc (unsigned size)
457 {
458 char *ptr;
459
460 if (gimple_ssa_operands (cfun)->operand_memory_index + size
461 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
462 {
463 struct ssa_operand_memory_d *ptr;
464
465 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
466 gimple_ssa_operands (cfun)->ssa_operand_mem_size
467 = OP_SIZE_1 * sizeof (struct voptype_d);
468 else
469 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
470 == OP_SIZE_1 * sizeof (struct voptype_d))
471 gimple_ssa_operands (cfun)->ssa_operand_mem_size
472 = OP_SIZE_2 * sizeof (struct voptype_d);
473 else
474 gimple_ssa_operands (cfun)->ssa_operand_mem_size
475 = OP_SIZE_3 * sizeof (struct voptype_d);
476
477 /* Go right to the maximum size if the request is too large. */
478 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
479 gimple_ssa_operands (cfun)->ssa_operand_mem_size
480 = OP_SIZE_3 * sizeof (struct voptype_d);
481
482 /* Fail if there is not enough space. If there are this many operands
483 required, first make sure there isn't a different problem causing this
484 many operands. If the decision is that this is OK, then we can
485 specially allocate a buffer just for this request. */
486 gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
487
488 ptr = (struct ssa_operand_memory_d *)
489 ggc_alloc (sizeof (struct ssa_operand_memory_d)
490 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
491 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
492 gimple_ssa_operands (cfun)->operand_memory = ptr;
493 gimple_ssa_operands (cfun)->operand_memory_index = 0;
494 }
495 ptr = &(gimple_ssa_operands (cfun)->operand_memory
496 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
497 gimple_ssa_operands (cfun)->operand_memory_index += size;
498 return ptr;
499 }
500
501
502 /* Allocate a DEF operand. */
503
504 static inline struct def_optype_d *
505 alloc_def (void)
506 {
507 struct def_optype_d *ret;
508 if (gimple_ssa_operands (cfun)->free_defs)
509 {
510 ret = gimple_ssa_operands (cfun)->free_defs;
511 gimple_ssa_operands (cfun)->free_defs
512 = gimple_ssa_operands (cfun)->free_defs->next;
513 }
514 else
515 ret = (struct def_optype_d *)
516 ssa_operand_alloc (sizeof (struct def_optype_d));
517 return ret;
518 }
519
520
521 /* Allocate a USE operand. */
522
523 static inline struct use_optype_d *
524 alloc_use (void)
525 {
526 struct use_optype_d *ret;
527 if (gimple_ssa_operands (cfun)->free_uses)
528 {
529 ret = gimple_ssa_operands (cfun)->free_uses;
530 gimple_ssa_operands (cfun)->free_uses
531 = gimple_ssa_operands (cfun)->free_uses->next;
532 }
533 else
534 ret = (struct use_optype_d *)
535 ssa_operand_alloc (sizeof (struct use_optype_d));
536 return ret;
537 }
538
539
540 /* Allocate a vop with NUM elements. */
541
542 static inline struct voptype_d *
543 alloc_vop (int num)
544 {
545 struct voptype_d *ret = NULL;
546 int alloc_size = 0;
547
548 int bucket = vop_free_bucket_index (num);
549 if (bucket != -1)
550 {
551 /* If there is a free operand, use it. */
552 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
553 {
554 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
555 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
556 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
557 }
558 else
559 alloc_size = vop_free_bucket_size(bucket);
560 }
561 else
562 alloc_size = num;
563
564 if (alloc_size > 0)
565 ret = (struct voptype_d *)ssa_operand_alloc (
566 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
567
568 VUSE_VECT_NUM_ELEM (ret->usev) = num;
569 return ret;
570 }
571
572
573 /* This routine makes sure that PTR is in an immediate use list, and makes
574 sure the stmt pointer is set to the current stmt. */
575
576 static inline void
577 set_virtual_use_link (use_operand_p ptr, tree stmt)
578 {
579 /* fold_stmt may have changed the stmt pointers. */
580 if (ptr->stmt != stmt)
581 ptr->stmt = stmt;
582
583 /* If this use isn't in a list, add it to the correct list. */
584 if (!ptr->prev)
585 link_imm_use (ptr, *(ptr->use));
586 }
587
588
589 /* Adds OP to the list of defs after LAST. */
590
591 static inline def_optype_p
592 add_def_op (tree *op, def_optype_p last)
593 {
594 def_optype_p new;
595
596 new = alloc_def ();
597 DEF_OP_PTR (new) = op;
598 last->next = new;
599 new->next = NULL;
600 return new;
601 }
602
603
604 /* Adds OP to the list of uses of statement STMT after LAST. */
605
606 static inline use_optype_p
607 add_use_op (tree stmt, tree *op, use_optype_p last)
608 {
609 use_optype_p new;
610
611 new = alloc_use ();
612 USE_OP_PTR (new)->use = op;
613 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
614 last->next = new;
615 new->next = NULL;
616 return new;
617 }
618
619
620 /* Return a virtual op pointer with NUM elements which are all initialized to OP
621 and are linked into the immediate uses for STMT. The new vop is appended
622 after PREV. */
623
624 static inline voptype_p
625 add_vop (tree stmt, tree op, int num, voptype_p prev)
626 {
627 voptype_p new;
628 int x;
629
630 new = alloc_vop (num);
631 for (x = 0; x < num; x++)
632 {
633 VUSE_OP_PTR (new, x)->prev = NULL;
634 SET_VUSE_OP (new, x, op);
635 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
636 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
637 }
638
639 if (prev)
640 prev->next = new;
641 new->next = NULL;
642 return new;
643 }
644
645
646 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
647 LAST to the new element. */
648
649 static inline voptype_p
650 add_vuse_op (tree stmt, tree op, int num, voptype_p last)
651 {
652 voptype_p new = add_vop (stmt, op, num, last);
653 VDEF_RESULT (new) = NULL_TREE;
654 return new;
655 }
656
657
658 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
659 LAST to the new element. */
660
661 static inline voptype_p
662 add_vdef_op (tree stmt, tree op, int num, voptype_p last)
663 {
664 voptype_p new = add_vop (stmt, op, num, last);
665 VDEF_RESULT (new) = op;
666 return new;
667 }
668
669
670 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
671 is the head of the operand list it belongs to. */
672
673 static inline struct voptype_d *
674 realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
675 struct voptype_d **root)
676 {
677 unsigned int x, lim;
678 tree stmt, val;
679 struct voptype_d *ret, *tmp;
680
681 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
682 return ptr;
683
684 val = VUSE_OP (ptr, 0);
685 if (TREE_CODE (val) == SSA_NAME)
686 val = SSA_NAME_VAR (val);
687
688 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
689
690 /* Delink all the existing uses. */
691 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
692 {
693 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
694 delink_imm_use (use_p);
695 }
696
697 /* If we want less space, simply use this one, and shrink the size. */
698 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
699 {
700 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
701 return ptr;
702 }
703
704 /* It is growing. Allocate a new one and replace the old one. */
705 ret = add_vuse_op (stmt, val, num_elem, ptr);
706
707 /* Clear PTR and add its memory to the free list. */
708 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
709 memset (ptr, 0,
710 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
711 add_vop_to_freelist (ptr);
712
713 /* Now simply remove the old one. */
714 if (*root == ptr)
715 {
716 *root = ret;
717 return ret;
718 }
719 else
720 for (tmp = *root;
721 tmp != NULL && tmp->next != ptr;
722 tmp = tmp->next)
723 {
724 tmp->next = ret;
725 return ret;
726 }
727
728 /* The pointer passed in isn't in STMT's VDEF lists. */
729 gcc_unreachable ();
730 }
731
732
733 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
734
735 struct voptype_d *
736 realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
737 {
738 tree val, stmt;
739 struct voptype_d *ret;
740
741 val = VDEF_RESULT (ptr);
742 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
743 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
744 VDEF_RESULT (ret) = val;
745 return ret;
746 }
747
748
749 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
750
751 struct voptype_d *
752 realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
753 {
754 tree stmt;
755 struct voptype_d *ret;
756
757 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
758 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
759 return ret;
760 }
761
762
763 /* Takes elements from build_defs and turns them into def operands of STMT.
764 TODO -- Make build_defs VEC of tree *. */
765
766 static inline void
767 finalize_ssa_defs (tree stmt)
768 {
769 unsigned new_i;
770 struct def_optype_d new_list;
771 def_optype_p old_ops, last;
772 unsigned int num = VEC_length (tree, build_defs);
773
774 /* There should only be a single real definition per assignment. */
775 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
776
777 new_list.next = NULL;
778 last = &new_list;
779
780 old_ops = DEF_OPS (stmt);
781
782 new_i = 0;
783
784 /* Check for the common case of 1 def that hasn't changed. */
785 if (old_ops && old_ops->next == NULL && num == 1
786 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
787 return;
788
789 /* If there is anything in the old list, free it. */
790 if (old_ops)
791 {
792 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
793 gimple_ssa_operands (cfun)->free_defs = old_ops;
794 }
795
796 /* If there is anything remaining in the build_defs list, simply emit it. */
797 for ( ; new_i < num; new_i++)
798 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
799
800 /* Now set the stmt's operands. */
801 DEF_OPS (stmt) = new_list.next;
802
803 #ifdef ENABLE_CHECKING
804 {
805 def_optype_p ptr;
806 unsigned x = 0;
807 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
808 x++;
809
810 gcc_assert (x == num);
811 }
812 #endif
813 }
814
815
816 /* Takes elements from build_uses and turns them into use operands of STMT.
817 TODO -- Make build_uses VEC of tree *. */
818
819 static inline void
820 finalize_ssa_uses (tree stmt)
821 {
822 unsigned new_i;
823 struct use_optype_d new_list;
824 use_optype_p old_ops, ptr, last;
825
826 #ifdef ENABLE_CHECKING
827 {
828 unsigned x;
829 unsigned num = VEC_length (tree, build_uses);
830
831 /* If the pointer to the operand is the statement itself, something is
832 wrong. It means that we are pointing to a local variable (the
833 initial call to update_stmt_operands does not pass a pointer to a
834 statement). */
835 for (x = 0; x < num; x++)
836 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
837 }
838 #endif
839
840 new_list.next = NULL;
841 last = &new_list;
842
843 old_ops = USE_OPS (stmt);
844
845 /* If there is anything in the old list, free it. */
846 if (old_ops)
847 {
848 for (ptr = old_ops; ptr; ptr = ptr->next)
849 delink_imm_use (USE_OP_PTR (ptr));
850 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
851 gimple_ssa_operands (cfun)->free_uses = old_ops;
852 }
853
854 /* Now create nodes for all the new nodes. */
855 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
856 last = add_use_op (stmt,
857 (tree *) VEC_index (tree, build_uses, new_i),
858 last);
859
860 /* Now set the stmt's operands. */
861 USE_OPS (stmt) = new_list.next;
862
863 #ifdef ENABLE_CHECKING
864 {
865 unsigned x = 0;
866 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
867 x++;
868
869 gcc_assert (x == VEC_length (tree, build_uses));
870 }
871 #endif
872 }
873
874
875 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
876 STMT. FIXME, for now VDEF operators should have a single operand
877 in their RHS. */
878
879 static inline void
880 finalize_ssa_vdefs (tree stmt)
881 {
882 unsigned new_i;
883 struct voptype_d new_list;
884 voptype_p old_ops, ptr, last;
885 stmt_ann_t ann = stmt_ann (stmt);
886
887 /* Set the symbols referenced by STMT. */
888 if (!bitmap_empty_p (build_stores))
889 {
890 if (ann->operands.stores == NULL)
891 ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
892
893 bitmap_copy (ann->operands.stores, build_stores);
894 }
895 else
896 BITMAP_FREE (ann->operands.stores);
897
898 /* If aliases have not been computed, do not instantiate a virtual
899 operator on STMT. Initially, we only compute the SSA form on
900 GIMPLE registers. The virtual SSA form is only computed after
901 alias analysis, so virtual operators will remain unrenamed and
902 the verifier will complain. However, alias analysis needs to
903 access symbol load/store information, so we need to compute
904 those. */
905 if (!gimple_aliases_computed_p (cfun))
906 return;
907
908 new_list.next = NULL;
909 last = &new_list;
910
911 old_ops = VDEF_OPS (stmt);
912 new_i = 0;
913 while (old_ops && new_i < VEC_length (tree, build_vdefs))
914 {
915 tree op = VEC_index (tree, build_vdefs, new_i);
916 unsigned new_uid = get_name_decl (op);
917 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
918
919 /* FIXME, for now each VDEF operator should have at most one
920 operand in their RHS. */
921 gcc_assert (VDEF_NUM (old_ops) == 1);
922
923 if (old_uid == new_uid)
924 {
925 /* If the symbols are the same, reuse the existing operand. */
926 last->next = old_ops;
927 last = old_ops;
928 old_ops = old_ops->next;
929 last->next = NULL;
930 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
931 new_i++;
932 }
933 else if (old_uid < new_uid)
934 {
935 /* If old is less than new, old goes to the free list. */
936 voptype_p next;
937 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
938 next = old_ops->next;
939 add_vop_to_freelist (old_ops);
940 old_ops = next;
941 }
942 else
943 {
944 /* This is a new operand. */
945 last = add_vdef_op (stmt, op, 1, last);
946 new_i++;
947 }
948 }
949
950 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
951 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
952 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
953
954 /* If there is anything in the old list, free it. */
955 if (old_ops)
956 {
957 for (ptr = old_ops; ptr; ptr = last)
958 {
959 last = ptr->next;
960 delink_imm_use (VDEF_OP_PTR (ptr, 0));
961 add_vop_to_freelist (ptr);
962 }
963 }
964
965 /* Now set STMT's operands. */
966 VDEF_OPS (stmt) = new_list.next;
967
968 #ifdef ENABLE_CHECKING
969 {
970 unsigned x = 0;
971 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
972 x++;
973
974 gcc_assert (x == VEC_length (tree, build_vdefs));
975 }
976 #endif
977 }
978
979
980 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
981 STMT. */
982
983 static inline void
984 finalize_ssa_vuse_ops (tree stmt)
985 {
986 unsigned new_i, old_i;
987 voptype_p old_ops, last;
988 VEC(tree,heap) *new_ops;
989 stmt_ann_t ann;
990
991 /* Set the symbols referenced by STMT. */
992 ann = stmt_ann (stmt);
993 if (!bitmap_empty_p (build_loads))
994 {
995 if (ann->operands.loads == NULL)
996 ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
997
998 bitmap_copy (ann->operands.loads, build_loads);
999 }
1000 else
1001 BITMAP_FREE (ann->operands.loads);
1002
1003 /* If aliases have not been computed, do not instantiate a virtual
1004 operator on STMT. Initially, we only compute the SSA form on
1005 GIMPLE registers. The virtual SSA form is only computed after
1006 alias analysis, so virtual operators will remain unrenamed and
1007 the verifier will complain. However, alias analysis needs to
1008 access symbol load/store information, so we need to compute
1009 those. */
1010 if (!gimple_aliases_computed_p (cfun))
1011 return;
1012
1013 /* STMT should have at most one VUSE operator. */
1014 old_ops = VUSE_OPS (stmt);
1015 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1016
1017 new_ops = NULL;
1018 new_i = old_i = 0;
1019 while (old_ops
1020 && old_i < VUSE_NUM (old_ops)
1021 && new_i < VEC_length (tree, build_vuses))
1022 {
1023 tree new_op = VEC_index (tree, build_vuses, new_i);
1024 tree old_op = VUSE_OP (old_ops, old_i);
1025 unsigned new_uid = get_name_decl (new_op);
1026 unsigned old_uid = get_name_decl (old_op);
1027
1028 if (old_uid == new_uid)
1029 {
1030 /* If the symbols are the same, reuse the existing operand. */
1031 VEC_safe_push (tree, heap, new_ops, old_op);
1032 new_i++;
1033 old_i++;
1034 }
1035 else if (old_uid < new_uid)
1036 {
1037 /* If OLD_UID is less than NEW_UID, the old operand has
1038 disappeared, skip to the next old operand. */
1039 old_i++;
1040 }
1041 else
1042 {
1043 /* This is a new operand. */
1044 VEC_safe_push (tree, heap, new_ops, new_op);
1045 new_i++;
1046 }
1047 }
1048
1049 /* If there is anything remaining in the build_vuses list, simply emit it. */
1050 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1051 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1052
1053 /* If there is anything in the old list, free it. */
1054 if (old_ops)
1055 {
1056 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1057 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1058 add_vop_to_freelist (old_ops);
1059 VUSE_OPS (stmt) = NULL;
1060 }
1061
1062 /* If there are any operands, instantiate a VUSE operator for STMT. */
1063 if (new_ops)
1064 {
1065 tree op;
1066 unsigned i;
1067
1068 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
1069
1070 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1071 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1072
1073 VUSE_OPS (stmt) = last;
1074 }
1075
1076 #ifdef ENABLE_CHECKING
1077 {
1078 unsigned x;
1079
1080 if (VUSE_OPS (stmt))
1081 {
1082 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1083 x = VUSE_NUM (VUSE_OPS (stmt));
1084 }
1085 else
1086 x = 0;
1087
1088 gcc_assert (x == VEC_length (tree, build_vuses));
1089 }
1090 #endif
1091 }
1092
1093 /* Return a new VUSE operand vector for STMT. */
1094
1095 static void
1096 finalize_ssa_vuses (tree stmt)
1097 {
1098 unsigned num, num_vdefs;
1099 unsigned vuse_index;
1100
1101 /* Remove superfluous VUSE operands. If the statement already has a
1102 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1103 needed because VDEFs imply a VUSE of the variable. For instance,
1104 suppose that variable 'a' is pointed-to by p and q:
1105
1106 # VUSE <a_2>
1107 # a_3 = VDEF <a_2>
1108 *p = *q;
1109
1110 The VUSE <a_2> is superfluous because it is implied by the
1111 VDEF operator. */
1112 num = VEC_length (tree, build_vuses);
1113 num_vdefs = VEC_length (tree, build_vdefs);
1114
1115 if (num > 0 && num_vdefs > 0)
1116 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1117 {
1118 tree vuse;
1119 vuse = VEC_index (tree, build_vuses, vuse_index);
1120 if (TREE_CODE (vuse) != SSA_NAME)
1121 {
1122 var_ann_t ann = var_ann (vuse);
1123 ann->in_vuse_list = 0;
1124 if (ann->in_vdef_list)
1125 {
1126 VEC_ordered_remove (tree, build_vuses, vuse_index);
1127 continue;
1128 }
1129 }
1130 vuse_index++;
1131 }
1132
1133 finalize_ssa_vuse_ops (stmt);
1134 }
1135
1136
1137 /* Clear the in_list bits and empty the build array for VDEFs and
1138 VUSEs. */
1139
1140 static inline void
1141 cleanup_build_arrays (void)
1142 {
1143 unsigned i;
1144 tree t;
1145
1146 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1147 if (TREE_CODE (t) != SSA_NAME)
1148 var_ann (t)->in_vdef_list = false;
1149
1150 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1151 if (TREE_CODE (t) != SSA_NAME)
1152 var_ann (t)->in_vuse_list = false;
1153
1154 VEC_truncate (tree, build_vdefs, 0);
1155 VEC_truncate (tree, build_vuses, 0);
1156 VEC_truncate (tree, build_defs, 0);
1157 VEC_truncate (tree, build_uses, 0);
1158 bitmap_clear (build_loads);
1159 bitmap_clear (build_stores);
1160 }
1161
1162
1163 /* Finalize all the build vectors, fill the new ones into INFO. */
1164
1165 static inline void
1166 finalize_ssa_stmt_operands (tree stmt)
1167 {
1168 finalize_ssa_defs (stmt);
1169 finalize_ssa_uses (stmt);
1170 finalize_ssa_vdefs (stmt);
1171 finalize_ssa_vuses (stmt);
1172 cleanup_build_arrays ();
1173 }
1174
1175
1176 /* Start the process of building up operands vectors in INFO. */
1177
1178 static inline void
1179 start_ssa_stmt_operands (void)
1180 {
1181 gcc_assert (VEC_length (tree, build_defs) == 0);
1182 gcc_assert (VEC_length (tree, build_uses) == 0);
1183 gcc_assert (VEC_length (tree, build_vuses) == 0);
1184 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1185 gcc_assert (bitmap_empty_p (build_loads));
1186 gcc_assert (bitmap_empty_p (build_stores));
1187 }
1188
1189
1190 /* Add DEF_P to the list of pointers to operands. */
1191
1192 static inline void
1193 append_def (tree *def_p)
1194 {
1195 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1196 }
1197
1198
1199 /* Add USE_P to the list of pointers to operands. */
1200
1201 static inline void
1202 append_use (tree *use_p)
1203 {
1204 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1205 }
1206
1207
1208 /* Add VAR to the set of variables that require a VDEF operator. */
1209
1210 static inline void
1211 append_vdef (tree var)
1212 {
1213 tree sym;
1214
1215 if (TREE_CODE (var) != SSA_NAME)
1216 {
1217 tree mpt;
1218 var_ann_t ann;
1219
1220 /* If VAR belongs to a memory partition, use it instead of VAR. */
1221 mpt = memory_partition (var);
1222 if (mpt)
1223 var = mpt;
1224
1225 /* Don't allow duplicate entries. */
1226 ann = get_var_ann (var);
1227 if (ann->in_vdef_list)
1228 return;
1229
1230 ann->in_vdef_list = true;
1231 sym = var;
1232 }
1233 else
1234 sym = SSA_NAME_VAR (var);
1235
1236 VEC_safe_push (tree, heap, build_vdefs, var);
1237 bitmap_set_bit (build_stores, DECL_UID (sym));
1238 }
1239
1240
1241 /* Add VAR to the set of variables that require a VUSE operator. */
1242
1243 static inline void
1244 append_vuse (tree var)
1245 {
1246 tree sym;
1247
1248 if (TREE_CODE (var) != SSA_NAME)
1249 {
1250 tree mpt;
1251 var_ann_t ann;
1252
1253 /* If VAR belongs to a memory partition, use it instead of VAR. */
1254 mpt = memory_partition (var);
1255 if (mpt)
1256 var = mpt;
1257
1258 /* Don't allow duplicate entries. */
1259 ann = get_var_ann (var);
1260 if (ann->in_vuse_list || ann->in_vdef_list)
1261 return;
1262
1263 ann->in_vuse_list = true;
1264 sym = var;
1265 }
1266 else
1267 sym = SSA_NAME_VAR (var);
1268
1269 VEC_safe_push (tree, heap, build_vuses, var);
1270 bitmap_set_bit (build_loads, DECL_UID (sym));
1271 }
1272
1273
1274 /* REF is a tree that contains the entire pointer dereference
1275 expression, if available, or NULL otherwise. ALIAS is the variable
1276 we are asking if REF can access. OFFSET and SIZE come from the
1277 memory access expression that generated this virtual operand. */
1278
1279 static bool
1280 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1281 HOST_WIDE_INT size)
1282 {
1283 bool offsetgtz = offset > 0;
1284 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1285 tree base = ref ? get_base_address (ref) : NULL;
1286
1287 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1288 using a call-clobbered memory tag. By definition, call-clobbered
1289 memory tags can always touch .GLOBAL_VAR. */
1290 if (alias == gimple_global_var (cfun))
1291 return true;
1292
1293 /* If ALIAS is an SFT, it can't be touched if the offset
1294 and size of the access is not overlapping with the SFT offset and
1295 size. This is only true if we are accessing through a pointer
1296 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1297 be accessing through a pointer to some substruct of the
1298 structure, and if we try to prune there, we will have the wrong
1299 offset, and get the wrong answer.
1300 i.e., we can't prune without more work if we have something like
1301
1302 struct gcc_target
1303 {
1304 struct asm_out
1305 {
1306 const char *byte_op;
1307 struct asm_int_op
1308 {
1309 const char *hi;
1310 } aligned_op;
1311 } asm_out;
1312 } targetm;
1313
1314 foo = &targetm.asm_out.aligned_op;
1315 return foo->hi;
1316
1317 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1318 terms of SFT_PARENT_VAR, that is where it is.
1319 However, the access through the foo pointer will be at offset 0. */
1320 if (size != -1
1321 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1322 && base
1323 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1324 && !overlap_subvar (offset, size, alias, NULL))
1325 {
1326 #ifdef ACCESS_DEBUGGING
1327 fprintf (stderr, "Access to ");
1328 print_generic_expr (stderr, ref, 0);
1329 fprintf (stderr, " may not touch ");
1330 print_generic_expr (stderr, alias, 0);
1331 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1332 #endif
1333 return false;
1334 }
1335
1336 /* Without strict aliasing, it is impossible for a component access
1337 through a pointer to touch a random variable, unless that
1338 variable *is* a structure or a pointer.
1339
1340 That is, given p->c, and some random global variable b,
1341 there is no legal way that p->c could be an access to b.
1342
1343 Without strict aliasing on, we consider it legal to do something
1344 like:
1345
1346 struct foos { int l; };
1347 int foo;
1348 static struct foos *getfoo(void);
1349 int main (void)
1350 {
1351 struct foos *f = getfoo();
1352 f->l = 1;
1353 foo = 2;
1354 if (f->l == 1)
1355 abort();
1356 exit(0);
1357 }
1358 static struct foos *getfoo(void)
1359 { return (struct foos *)&foo; }
1360
1361 (taken from 20000623-1.c)
1362
1363 The docs also say/imply that access through union pointers
1364 is legal (but *not* if you take the address of the union member,
1365 i.e. the inverse), such that you can do
1366
1367 typedef union {
1368 int d;
1369 } U;
1370
1371 int rv;
1372 void breakme()
1373 {
1374 U *rv0;
1375 U *pretmp = (U*)&rv;
1376 rv0 = pretmp;
1377 rv0->d = 42;
1378 }
1379 To implement this, we just punt on accesses through union
1380 pointers entirely.
1381 */
1382 else if (ref
1383 && flag_strict_aliasing
1384 && TREE_CODE (ref) != INDIRECT_REF
1385 && !MTAG_P (alias)
1386 && (TREE_CODE (base) != INDIRECT_REF
1387 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1388 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1389 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1390 && !var_ann (alias)->is_heapvar
1391 /* When the struct has may_alias attached to it, we need not to
1392 return true. */
1393 && get_alias_set (base))
1394 {
1395 #ifdef ACCESS_DEBUGGING
1396 fprintf (stderr, "Access to ");
1397 print_generic_expr (stderr, ref, 0);
1398 fprintf (stderr, " may not touch ");
1399 print_generic_expr (stderr, alias, 0);
1400 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1401 #endif
1402 return false;
1403 }
1404
1405 /* If the offset of the access is greater than the size of one of
1406 the possible aliases, it can't be touching that alias, because it
1407 would be past the end of the structure. */
1408 else if (ref
1409 && flag_strict_aliasing
1410 && TREE_CODE (ref) != INDIRECT_REF
1411 && !MTAG_P (alias)
1412 && !POINTER_TYPE_P (TREE_TYPE (alias))
1413 && offsetgtz
1414 && DECL_SIZE (alias)
1415 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1416 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1417 {
1418 #ifdef ACCESS_DEBUGGING
1419 fprintf (stderr, "Access to ");
1420 print_generic_expr (stderr, ref, 0);
1421 fprintf (stderr, " may not touch ");
1422 print_generic_expr (stderr, alias, 0);
1423 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1424 #endif
1425 return false;
1426 }
1427
1428 return true;
1429 }
1430
1431
1432 /* Add VAR to the virtual operands array. FLAGS is as in
1433 get_expr_operands. FULL_REF is a tree that contains the entire
1434 pointer dereference expression, if available, or NULL otherwise.
1435 OFFSET and SIZE come from the memory access expression that
1436 generated this virtual operand. FOR_CLOBBER is true is this is
1437 adding a virtual operand for a call clobber. */
1438
1439 static void
1440 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1441 tree full_ref, HOST_WIDE_INT offset,
1442 HOST_WIDE_INT size, bool for_clobber)
1443 {
1444 VEC(tree,gc) *aliases;
1445 tree sym;
1446 var_ann_t v_ann;
1447
1448 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1449 v_ann = var_ann (sym);
1450
1451 /* Mark the statement as having memory operands. */
1452 s_ann->references_memory = true;
1453
1454 /* Mark statements with volatile operands. Optimizers should back
1455 off from statements having volatile operands. */
1456 if (TREE_THIS_VOLATILE (sym) && s_ann)
1457 s_ann->has_volatile_ops = true;
1458
1459 /* If the variable cannot be modified and this is a VDEF change
1460 it into a VUSE. This happens when read-only variables are marked
1461 call-clobbered and/or aliased to writable variables. So we only
1462 check that this only happens on non-specific stores.
1463
1464 Note that if this is a specific store, i.e. associated with a
1465 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1466 into validation problems.
1467
1468 This can happen when programs cast away const, leaving us with a
1469 store to read-only memory. If the statement is actually executed
1470 at runtime, then the program is ill formed. If the statement is
1471 not executed then all is well. At the very least, we cannot ICE. */
1472 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1473 flags &= ~opf_def;
1474
1475 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1476 virtual operands, unless the caller has specifically requested
1477 not to add virtual operands (used when adding operands inside an
1478 ADDR_EXPR expression). */
1479 if (flags & opf_no_vops)
1480 return;
1481
1482 aliases = v_ann->may_aliases;
1483 if (aliases == NULL)
1484 {
1485 if (s_ann && !gimple_aliases_computed_p (cfun))
1486 s_ann->has_volatile_ops = true;
1487 /* The variable is not aliased or it is an alias tag. */
1488 if (flags & opf_def)
1489 append_vdef (var);
1490 else
1491 append_vuse (var);
1492 }
1493 else
1494 {
1495 unsigned i;
1496 tree al;
1497
1498 /* The variable is aliased. Add its aliases to the virtual
1499 operands. */
1500 gcc_assert (VEC_length (tree, aliases) != 0);
1501
1502 if (flags & opf_def)
1503 {
1504 bool none_added = true;
1505
1506 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1507 {
1508 if (!access_can_touch_variable (full_ref, al, offset, size))
1509 continue;
1510
1511 none_added = false;
1512 append_vdef (al);
1513 }
1514
1515 /* If the variable is also an alias tag, add a virtual
1516 operand for it, otherwise we will miss representing
1517 references to the members of the variable's alias set.
1518 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1519
1520 It is also necessary to add bare defs on clobbers for
1521 SMT's, so that bare SMT uses caused by pruning all the
1522 aliases will link up properly with calls. In order to
1523 keep the number of these bare defs we add down to the
1524 minimum necessary, we keep track of which SMT's were used
1525 alone in statement vdefs or VUSEs. */
1526 if (v_ann->is_aliased
1527 || none_added
1528 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1529 && for_clobber))
1530 {
1531 append_vdef (var);
1532 }
1533 }
1534 else
1535 {
1536 bool none_added = true;
1537 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1538 {
1539 if (!access_can_touch_variable (full_ref, al, offset, size))
1540 continue;
1541 none_added = false;
1542 append_vuse (al);
1543 }
1544
1545 /* Similarly, append a virtual uses for VAR itself, when
1546 it is an alias tag. */
1547 if (v_ann->is_aliased || none_added)
1548 append_vuse (var);
1549 }
1550 }
1551 }
1552
1553
1554 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1555 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1556 the statement's real operands, otherwise it is added to virtual
1557 operands. */
1558
1559 static void
1560 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1561 {
1562 tree var, sym;
1563 var_ann_t v_ann;
1564
1565 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1566
1567 var = *var_p;
1568 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1569 v_ann = var_ann (sym);
1570
1571 /* Mark statements with volatile operands. */
1572 if (TREE_THIS_VOLATILE (sym))
1573 s_ann->has_volatile_ops = true;
1574
1575 if (is_gimple_reg (sym))
1576 {
1577 /* The variable is a GIMPLE register. Add it to real operands. */
1578 if (flags & opf_def)
1579 append_def (var_p);
1580 else
1581 append_use (var_p);
1582 }
1583 else
1584 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1585 }
1586
1587
1588 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1589 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1590
1591 STMT is the statement being processed, EXPR is the INDIRECT_REF
1592 that got us here.
1593
1594 FLAGS is as in get_expr_operands.
1595
1596 FULL_REF contains the full pointer dereference expression, if we
1597 have it, or NULL otherwise.
1598
1599 OFFSET and SIZE are the location of the access inside the
1600 dereferenced pointer, if known.
1601
1602 RECURSE_ON_BASE should be set to true if we want to continue
1603 calling get_expr_operands on the base pointer, and false if
1604 something else will do it for us. */
1605
1606 static void
1607 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1608 tree full_ref,
1609 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1610 bool recurse_on_base)
1611 {
1612 tree *pptr = &TREE_OPERAND (expr, 0);
1613 tree ptr = *pptr;
1614 stmt_ann_t s_ann = stmt_ann (stmt);
1615
1616 s_ann->references_memory = true;
1617 if (s_ann && TREE_THIS_VOLATILE (expr))
1618 s_ann->has_volatile_ops = true;
1619
1620 if (SSA_VAR_P (ptr))
1621 {
1622 struct ptr_info_def *pi = NULL;
1623
1624 /* If PTR has flow-sensitive points-to information, use it. */
1625 if (TREE_CODE (ptr) == SSA_NAME
1626 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1627 && pi->name_mem_tag)
1628 {
1629 /* PTR has its own memory tag. Use it. */
1630 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1631 full_ref, offset, size, false);
1632 }
1633 else
1634 {
1635 /* If PTR is not an SSA_NAME or it doesn't have a name
1636 tag, use its symbol memory tag. */
1637 var_ann_t v_ann;
1638
1639 /* If we are emitting debugging dumps, display a warning if
1640 PTR is an SSA_NAME with no flow-sensitive alias
1641 information. That means that we may need to compute
1642 aliasing again. */
1643 if (dump_file
1644 && TREE_CODE (ptr) == SSA_NAME
1645 && pi == NULL)
1646 {
1647 fprintf (dump_file,
1648 "NOTE: no flow-sensitive alias info for ");
1649 print_generic_expr (dump_file, ptr, dump_flags);
1650 fprintf (dump_file, " in ");
1651 print_generic_stmt (dump_file, stmt, dump_flags);
1652 }
1653
1654 if (TREE_CODE (ptr) == SSA_NAME)
1655 ptr = SSA_NAME_VAR (ptr);
1656 v_ann = var_ann (ptr);
1657
1658 if (v_ann->symbol_mem_tag)
1659 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1660 full_ref, offset, size, false);
1661 /* Aliasing information is missing; mark statement as volatile so we
1662 won't optimize it out too actively. */
1663 else if (s_ann && !gimple_aliases_computed_p (cfun)
1664 && (flags & opf_def))
1665 s_ann->has_volatile_ops = true;
1666 }
1667 }
1668 else if (TREE_CODE (ptr) == INTEGER_CST)
1669 {
1670 /* If a constant is used as a pointer, we can't generate a real
1671 operand for it but we mark the statement volatile to prevent
1672 optimizations from messing things up. */
1673 if (s_ann)
1674 s_ann->has_volatile_ops = true;
1675 return;
1676 }
1677 else
1678 {
1679 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1680 gcc_unreachable ();
1681 }
1682
1683 /* If requested, add a USE operand for the base pointer. */
1684 if (recurse_on_base)
1685 get_expr_operands (stmt, pptr, opf_use);
1686 }
1687
1688
1689 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1690
1691 static void
1692 get_tmr_operands (tree stmt, tree expr, int flags)
1693 {
1694 tree tag, ref;
1695 HOST_WIDE_INT offset, size, maxsize;
1696 subvar_t svars, sv;
1697 stmt_ann_t s_ann = stmt_ann (stmt);
1698
1699 /* This statement references memory. */
1700 s_ann->references_memory = 1;
1701
1702 /* First record the real operands. */
1703 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1704 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1705
1706 if (TMR_SYMBOL (expr))
1707 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1708
1709 tag = TMR_TAG (expr);
1710 if (!tag)
1711 {
1712 /* Something weird, so ensure that we will be careful. */
1713 s_ann->has_volatile_ops = true;
1714 return;
1715 }
1716
1717 if (DECL_P (tag))
1718 {
1719 get_expr_operands (stmt, &tag, flags);
1720 return;
1721 }
1722
1723 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1724 gcc_assert (ref != NULL_TREE);
1725 svars = get_subvars_for_var (ref);
1726 for (sv = svars; sv; sv = sv->next)
1727 {
1728 bool exact;
1729
1730 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1731 add_stmt_operand (&sv->var, s_ann, flags);
1732 }
1733 }
1734
1735
1736 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1737 clobbered variables in the function. */
1738
1739 static void
1740 add_call_clobber_ops (tree stmt, tree callee)
1741 {
1742 unsigned u;
1743 bitmap_iterator bi;
1744 stmt_ann_t s_ann = stmt_ann (stmt);
1745 bitmap not_read_b, not_written_b;
1746
1747 /* Functions that are not const, pure or never return may clobber
1748 call-clobbered variables. */
1749 if (s_ann)
1750 s_ann->makes_clobbering_call = true;
1751
1752 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1753 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1754 if (gimple_global_var (cfun))
1755 {
1756 tree var = gimple_global_var (cfun);
1757 add_stmt_operand (&var, s_ann, opf_def);
1758 return;
1759 }
1760
1761 /* Get info for local and module level statics. There is a bit
1762 set for each static if the call being processed does not read
1763 or write that variable. */
1764 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1765 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1766
1767 /* Add a VDEF operand for every call clobbered variable. */
1768 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1769 {
1770 tree var = referenced_var_lookup (u);
1771 unsigned int escape_mask = var_ann (var)->escape_mask;
1772 tree real_var = var;
1773 bool not_read;
1774 bool not_written;
1775
1776 /* Not read and not written are computed on regular vars, not
1777 subvars, so look at the parent var if this is an SFT. */
1778 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1779 real_var = SFT_PARENT_VAR (var);
1780
1781 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1782 DECL_UID (real_var)) : false;
1783 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1784 DECL_UID (real_var)) : false;
1785 gcc_assert (!unmodifiable_var_p (var));
1786
1787 clobber_stats.clobbered_vars++;
1788
1789 /* See if this variable is really clobbered by this function. */
1790
1791 /* Trivial case: Things escaping only to pure/const are not
1792 clobbered by non-pure-const, and only read by pure/const. */
1793 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1794 {
1795 tree call = get_call_expr_in (stmt);
1796 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1797 {
1798 add_stmt_operand (&var, s_ann, opf_use);
1799 clobber_stats.unescapable_clobbers_avoided++;
1800 continue;
1801 }
1802 else
1803 {
1804 clobber_stats.unescapable_clobbers_avoided++;
1805 continue;
1806 }
1807 }
1808
1809 if (not_written)
1810 {
1811 clobber_stats.static_write_clobbers_avoided++;
1812 if (!not_read)
1813 add_stmt_operand (&var, s_ann, opf_use);
1814 else
1815 clobber_stats.static_read_clobbers_avoided++;
1816 }
1817 else
1818 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1819 }
1820 }
1821
1822
1823 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1824 function. */
1825
1826 static void
1827 add_call_read_ops (tree stmt, tree callee)
1828 {
1829 unsigned u;
1830 bitmap_iterator bi;
1831 stmt_ann_t s_ann = stmt_ann (stmt);
1832 bitmap not_read_b;
1833
1834 /* if the function is not pure, it may reference memory. Add
1835 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1836 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1837 if (gimple_global_var (cfun))
1838 {
1839 tree var = gimple_global_var (cfun);
1840 add_stmt_operand (&var, s_ann, opf_use);
1841 return;
1842 }
1843
1844 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1845
1846 /* Add a VUSE for each call-clobbered variable. */
1847 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1848 {
1849 tree var = referenced_var (u);
1850 tree real_var = var;
1851 bool not_read;
1852
1853 clobber_stats.readonly_clobbers++;
1854
1855 /* Not read and not written are computed on regular vars, not
1856 subvars, so look at the parent var if this is an SFT. */
1857
1858 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1859 real_var = SFT_PARENT_VAR (var);
1860
1861 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1862 : false;
1863
1864 if (not_read)
1865 {
1866 clobber_stats.static_readonly_clobbers_avoided++;
1867 continue;
1868 }
1869
1870 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
1871 }
1872 }
1873
1874
1875 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1876
1877 static void
1878 get_call_expr_operands (tree stmt, tree expr)
1879 {
1880 tree op;
1881 int call_flags = call_expr_flags (expr);
1882 stmt_ann_t ann = stmt_ann (stmt);
1883
1884 ann->references_memory = true;
1885
1886 /* If aliases have been computed already, add VDEF or VUSE
1887 operands for all the symbols that have been found to be
1888 call-clobbered. */
1889 if (gimple_aliases_computed_p (cfun)
1890 && !(call_flags & ECF_NOVOPS))
1891 {
1892 /* A 'pure' or a 'const' function never call-clobbers anything.
1893 A 'noreturn' function might, but since we don't return anyway
1894 there is no point in recording that. */
1895 if (TREE_SIDE_EFFECTS (expr)
1896 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1897 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1898 else if (!(call_flags & ECF_CONST))
1899 add_call_read_ops (stmt, get_callee_fndecl (expr));
1900 }
1901
1902 /* Find uses in the called function. */
1903 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1904
1905 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1906 get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
1907
1908 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1909 }
1910
1911
1912 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1913
1914 static void
1915 get_asm_expr_operands (tree stmt)
1916 {
1917 stmt_ann_t s_ann;
1918 int i, noutputs;
1919 const char **oconstraints;
1920 const char *constraint;
1921 bool allows_mem, allows_reg, is_inout;
1922 tree link;
1923
1924 s_ann = stmt_ann (stmt);
1925 noutputs = list_length (ASM_OUTPUTS (stmt));
1926 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1927
1928 /* Gather all output operands. */
1929 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1930 {
1931 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1932 oconstraints[i] = constraint;
1933 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1934 &allows_reg, &is_inout);
1935
1936 /* This should have been split in gimplify_asm_expr. */
1937 gcc_assert (!allows_reg || !is_inout);
1938
1939 /* Memory operands are addressable. Note that STMT needs the
1940 address of this operand. */
1941 if (!allows_reg && allows_mem)
1942 {
1943 tree t = get_base_address (TREE_VALUE (link));
1944 if (t && DECL_P (t) && s_ann)
1945 add_to_addressable_set (t, &s_ann->addresses_taken);
1946 }
1947
1948 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1949 }
1950
1951 /* Gather all input operands. */
1952 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1953 {
1954 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1955 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1956 &allows_mem, &allows_reg);
1957
1958 /* Memory operands are addressable. Note that STMT needs the
1959 address of this operand. */
1960 if (!allows_reg && allows_mem)
1961 {
1962 tree t = get_base_address (TREE_VALUE (link));
1963 if (t && DECL_P (t) && s_ann)
1964 add_to_addressable_set (t, &s_ann->addresses_taken);
1965 }
1966
1967 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1968 }
1969
1970 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1971 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1972 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1973 {
1974 unsigned i;
1975 bitmap_iterator bi;
1976
1977 s_ann->references_memory = true;
1978
1979 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1980 {
1981 tree var = referenced_var (i);
1982 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1983 }
1984
1985 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1986 {
1987 tree var = referenced_var (i);
1988
1989 /* Subvars are explicitly represented in this list, so we
1990 don't need the original to be added to the clobber ops,
1991 but the original *will* be in this list because we keep
1992 the addressability of the original variable up-to-date
1993 to avoid confusing the back-end. */
1994 if (var_can_have_subvars (var)
1995 && get_subvars_for_var (var) != NULL)
1996 continue;
1997
1998 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1999 }
2000 break;
2001 }
2002 }
2003
2004
2005 /* Scan operands for the assignment expression EXPR in statement STMT. */
2006
2007 static void
2008 get_modify_stmt_operands (tree stmt, tree expr)
2009 {
2010 /* First get operands from the RHS. */
2011 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
2012
2013 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2014 registers. If the LHS is a store to memory, we will need
2015 a preserving definition (VDEF).
2016
2017 Preserving definitions are those that modify a part of an
2018 aggregate object for which no subvars have been computed (or the
2019 reference does not correspond exactly to one of them). Stores
2020 through a pointer are also represented with VDEF operators.
2021
2022 We used to distinguish between preserving and killing definitions.
2023 We always emit preserving definitions now. */
2024 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2025 }
2026
2027
2028 /* Recursively scan the expression pointed to by EXPR_P in statement
2029 STMT. FLAGS is one of the OPF_* constants modifying how to
2030 interpret the operands found. */
2031
2032 static void
2033 get_expr_operands (tree stmt, tree *expr_p, int flags)
2034 {
2035 enum tree_code code;
2036 enum tree_code_class class;
2037 tree expr = *expr_p;
2038 stmt_ann_t s_ann = stmt_ann (stmt);
2039
2040 if (expr == NULL)
2041 return;
2042
2043 code = TREE_CODE (expr);
2044 class = TREE_CODE_CLASS (code);
2045
2046 switch (code)
2047 {
2048 case ADDR_EXPR:
2049 /* Taking the address of a variable does not represent a
2050 reference to it, but the fact that the statement takes its
2051 address will be of interest to some passes (e.g. alias
2052 resolution). */
2053 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2054
2055 /* If the address is invariant, there may be no interesting
2056 variable references inside. */
2057 if (is_gimple_min_invariant (expr))
2058 return;
2059
2060 /* Otherwise, there may be variables referenced inside but there
2061 should be no VUSEs created, since the referenced objects are
2062 not really accessed. The only operands that we should find
2063 here are ARRAY_REF indices which will always be real operands
2064 (GIMPLE does not allow non-registers as array indices). */
2065 flags |= opf_no_vops;
2066 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2067 return;
2068
2069 case SSA_NAME:
2070 case STRUCT_FIELD_TAG:
2071 case SYMBOL_MEMORY_TAG:
2072 case NAME_MEMORY_TAG:
2073 add_stmt_operand (expr_p, s_ann, flags);
2074 return;
2075
2076 case VAR_DECL:
2077 case PARM_DECL:
2078 case RESULT_DECL:
2079 {
2080 subvar_t svars;
2081
2082 /* Add the subvars for a variable, if it has subvars, to DEFS
2083 or USES. Otherwise, add the variable itself. Whether it
2084 goes to USES or DEFS depends on the operand flags. */
2085 if (var_can_have_subvars (expr)
2086 && (svars = get_subvars_for_var (expr)))
2087 {
2088 subvar_t sv;
2089 for (sv = svars; sv; sv = sv->next)
2090 add_stmt_operand (&sv->var, s_ann, flags);
2091 }
2092 else
2093 add_stmt_operand (expr_p, s_ann, flags);
2094
2095 return;
2096 }
2097
2098 case MISALIGNED_INDIRECT_REF:
2099 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2100 /* fall through */
2101
2102 case ALIGN_INDIRECT_REF:
2103 case INDIRECT_REF:
2104 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2105 return;
2106
2107 case TARGET_MEM_REF:
2108 get_tmr_operands (stmt, expr, flags);
2109 return;
2110
2111 case ARRAY_REF:
2112 case ARRAY_RANGE_REF:
2113 case COMPONENT_REF:
2114 case REALPART_EXPR:
2115 case IMAGPART_EXPR:
2116 {
2117 tree ref;
2118 HOST_WIDE_INT offset, size, maxsize;
2119 bool none = true;
2120
2121 /* This component reference becomes an access to all of the
2122 subvariables it can touch, if we can determine that, but
2123 *NOT* the real one. If we can't determine which fields we
2124 could touch, the recursion will eventually get to a
2125 variable and add *all* of its subvars, or whatever is the
2126 minimum correct subset. */
2127 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2128 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2129 {
2130 subvar_t sv;
2131 subvar_t svars = get_subvars_for_var (ref);
2132
2133 for (sv = svars; sv; sv = sv->next)
2134 {
2135 bool exact;
2136
2137 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2138 {
2139 int subvar_flags = flags;
2140 none = false;
2141 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2142 }
2143 }
2144
2145 if (!none)
2146 flags |= opf_no_vops;
2147 }
2148 else if (TREE_CODE (ref) == INDIRECT_REF)
2149 {
2150 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2151 maxsize, false);
2152 flags |= opf_no_vops;
2153 }
2154
2155 /* Even if we found subvars above we need to ensure to see
2156 immediate uses for d in s.a[d]. In case of s.a having
2157 a subvar or we would miss it otherwise. */
2158 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2159
2160 if (code == COMPONENT_REF)
2161 {
2162 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2163 s_ann->has_volatile_ops = true;
2164 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2165 }
2166 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2167 {
2168 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2169 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2170 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2171 }
2172
2173 return;
2174 }
2175
2176 case WITH_SIZE_EXPR:
2177 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2178 and an rvalue reference to its second argument. */
2179 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2180 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2181 return;
2182
2183 case CALL_EXPR:
2184 get_call_expr_operands (stmt, expr);
2185 return;
2186
2187 case COND_EXPR:
2188 case VEC_COND_EXPR:
2189 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2190 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2191 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2192 return;
2193
2194 case GIMPLE_MODIFY_STMT:
2195 get_modify_stmt_operands (stmt, expr);
2196 return;
2197
2198 case CONSTRUCTOR:
2199 {
2200 /* General aggregate CONSTRUCTORs have been decomposed, but they
2201 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2202 constructor_elt *ce;
2203 unsigned HOST_WIDE_INT idx;
2204
2205 for (idx = 0;
2206 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2207 idx++)
2208 get_expr_operands (stmt, &ce->value, opf_use);
2209
2210 return;
2211 }
2212
2213 case BIT_FIELD_REF:
2214 case TRUTH_NOT_EXPR:
2215 case VIEW_CONVERT_EXPR:
2216 do_unary:
2217 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2218 return;
2219
2220 case TRUTH_AND_EXPR:
2221 case TRUTH_OR_EXPR:
2222 case TRUTH_XOR_EXPR:
2223 case COMPOUND_EXPR:
2224 case OBJ_TYPE_REF:
2225 case ASSERT_EXPR:
2226 do_binary:
2227 {
2228 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2229 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2230 return;
2231 }
2232
2233 case DOT_PROD_EXPR:
2234 case REALIGN_LOAD_EXPR:
2235 {
2236 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2237 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2238 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2239 return;
2240 }
2241
2242 case BLOCK:
2243 case FUNCTION_DECL:
2244 case EXC_PTR_EXPR:
2245 case FILTER_EXPR:
2246 case LABEL_DECL:
2247 case CONST_DECL:
2248 case OMP_PARALLEL:
2249 case OMP_SECTIONS:
2250 case OMP_FOR:
2251 case OMP_SINGLE:
2252 case OMP_MASTER:
2253 case OMP_ORDERED:
2254 case OMP_CRITICAL:
2255 case OMP_RETURN:
2256 case OMP_CONTINUE:
2257 /* Expressions that make no memory references. */
2258 return;
2259
2260 default:
2261 if (class == tcc_unary)
2262 goto do_unary;
2263 if (class == tcc_binary || class == tcc_comparison)
2264 goto do_binary;
2265 if (class == tcc_constant || class == tcc_type)
2266 return;
2267 }
2268
2269 /* If we get here, something has gone wrong. */
2270 #ifdef ENABLE_CHECKING
2271 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2272 debug_tree (expr);
2273 fputs ("\n", stderr);
2274 #endif
2275 gcc_unreachable ();
2276 }
2277
2278
2279 /* Parse STMT looking for operands. When finished, the various
2280 build_* operand vectors will have potential operands in them. */
2281
2282 static void
2283 parse_ssa_operands (tree stmt)
2284 {
2285 enum tree_code code;
2286
2287 code = TREE_CODE (stmt);
2288 switch (code)
2289 {
2290 case GIMPLE_MODIFY_STMT:
2291 get_modify_stmt_operands (stmt, stmt);
2292 break;
2293
2294 case COND_EXPR:
2295 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2296 break;
2297
2298 case SWITCH_EXPR:
2299 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2300 break;
2301
2302 case ASM_EXPR:
2303 get_asm_expr_operands (stmt);
2304 break;
2305
2306 case RETURN_EXPR:
2307 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2308 break;
2309
2310 case GOTO_EXPR:
2311 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2312 break;
2313
2314 case LABEL_EXPR:
2315 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2316 break;
2317
2318 case BIND_EXPR:
2319 case CASE_LABEL_EXPR:
2320 case TRY_CATCH_EXPR:
2321 case TRY_FINALLY_EXPR:
2322 case EH_FILTER_EXPR:
2323 case CATCH_EXPR:
2324 case RESX_EXPR:
2325 /* These nodes contain no variable references. */
2326 break;
2327
2328 default:
2329 /* Notice that if get_expr_operands tries to use &STMT as the
2330 operand pointer (which may only happen for USE operands), we
2331 will fail in add_stmt_operand. This default will handle
2332 statements like empty statements, or CALL_EXPRs that may
2333 appear on the RHS of a statement or as statements themselves. */
2334 get_expr_operands (stmt, &stmt, opf_use);
2335 break;
2336 }
2337 }
2338
2339
2340 /* Create an operands cache for STMT. */
2341
2342 static void
2343 build_ssa_operands (tree stmt)
2344 {
2345 stmt_ann_t ann = get_stmt_ann (stmt);
2346
2347 /* Initially assume that the statement has no volatile operands and
2348 makes no memory references. */
2349 ann->has_volatile_ops = false;
2350 ann->references_memory = false;
2351 /* Just clear the bitmap so we don't end up reallocating it over and over. */
2352 if (ann->addresses_taken)
2353 bitmap_clear (ann->addresses_taken);
2354
2355 start_ssa_stmt_operands ();
2356 parse_ssa_operands (stmt);
2357 operand_build_sort_virtual (build_vuses);
2358 operand_build_sort_virtual (build_vdefs);
2359 finalize_ssa_stmt_operands (stmt);
2360
2361 if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
2362 ann->addresses_taken = NULL;
2363 /* For added safety, assume that statements with volatile operands
2364 also reference memory. */
2365 if (ann->has_volatile_ops)
2366 ann->references_memory = true;
2367 }
2368
2369
2370 /* Free any operands vectors in OPS. */
2371
2372 void
2373 free_ssa_operands (stmt_operands_p ops)
2374 {
2375 ops->def_ops = NULL;
2376 ops->use_ops = NULL;
2377 ops->vdef_ops = NULL;
2378 ops->vuse_ops = NULL;
2379 BITMAP_FREE (ops->loads);
2380 BITMAP_FREE (ops->stores);
2381 }
2382
2383
2384 /* Get the operands of statement STMT. */
2385
2386 void
2387 update_stmt_operands (tree stmt)
2388 {
2389 stmt_ann_t ann = get_stmt_ann (stmt);
2390
2391 /* If update_stmt_operands is called before SSA is initialized, do
2392 nothing. */
2393 if (!ssa_operands_active ())
2394 return;
2395
2396 /* The optimizers cannot handle statements that are nothing but a
2397 _DECL. This indicates a bug in the gimplifier. */
2398 gcc_assert (!SSA_VAR_P (stmt));
2399
2400 timevar_push (TV_TREE_OPS);
2401
2402 gcc_assert (ann->modified);
2403 build_ssa_operands (stmt);
2404 ann->modified = 0;
2405
2406 timevar_pop (TV_TREE_OPS);
2407 }
2408
2409
2410 /* Copies virtual operands from SRC to DST. */
2411
2412 void
2413 copy_virtual_operands (tree dest, tree src)
2414 {
2415 unsigned int i, n;
2416 voptype_p src_vuses, dest_vuses;
2417 voptype_p src_vdefs, dest_vdefs;
2418 struct voptype_d vuse;
2419 struct voptype_d vdef;
2420 stmt_ann_t dest_ann;
2421
2422 VDEF_OPS (dest) = NULL;
2423 VUSE_OPS (dest) = NULL;
2424
2425 dest_ann = get_stmt_ann (dest);
2426 BITMAP_FREE (dest_ann->operands.loads);
2427 BITMAP_FREE (dest_ann->operands.stores);
2428
2429 if (LOADED_SYMS (src))
2430 {
2431 dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
2432 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2433 }
2434
2435 if (STORED_SYMS (src))
2436 {
2437 dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
2438 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2439 }
2440
2441 /* Copy all the VUSE operators and corresponding operands. */
2442 dest_vuses = &vuse;
2443 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2444 {
2445 n = VUSE_NUM (src_vuses);
2446 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
2447 for (i = 0; i < n; i++)
2448 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2449
2450 if (VUSE_OPS (dest) == NULL)
2451 VUSE_OPS (dest) = vuse.next;
2452 }
2453
2454 /* Copy all the VDEF operators and corresponding operands. */
2455 dest_vdefs = &vdef;
2456 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2457 {
2458 n = VUSE_NUM (src_vdefs);
2459 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
2460 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2461 for (i = 0; i < n; i++)
2462 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2463
2464 if (VDEF_OPS (dest) == NULL)
2465 VDEF_OPS (dest) = vdef.next;
2466 }
2467 }
2468
2469
2470 /* Specifically for use in DOM's expression analysis. Given a store, we
2471 create an artificial stmt which looks like a load from the store, this can
2472 be used to eliminate redundant loads. OLD_OPS are the operands from the
2473 store stmt, and NEW_STMT is the new load which represents a load of the
2474 values stored. */
2475
2476 void
2477 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2478 {
2479 tree op;
2480 ssa_op_iter iter;
2481 use_operand_p use_p;
2482 unsigned i;
2483
2484 get_stmt_ann (new_stmt);
2485
2486 /* Process NEW_STMT looking for operands. */
2487 start_ssa_stmt_operands ();
2488 parse_ssa_operands (new_stmt);
2489
2490 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2491 if (TREE_CODE (op) != SSA_NAME)
2492 var_ann (op)->in_vuse_list = false;
2493
2494 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2495 if (TREE_CODE (op) != SSA_NAME)
2496 var_ann (op)->in_vdef_list = false;
2497
2498 /* Remove any virtual operands that were found. */
2499 VEC_truncate (tree, build_vdefs, 0);
2500 VEC_truncate (tree, build_vuses, 0);
2501
2502 /* For each VDEF on the original statement, we want to create a
2503 VUSE of the VDEF result operand on the new statement. */
2504 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2505 append_vuse (op);
2506
2507 finalize_ssa_stmt_operands (new_stmt);
2508
2509 /* All uses in this fake stmt must not be in the immediate use lists. */
2510 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2511 delink_imm_use (use_p);
2512 }
2513
2514
2515 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2516 to test the validity of the swap operation. */
2517
2518 void
2519 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2520 {
2521 tree op0, op1;
2522 op0 = *exp0;
2523 op1 = *exp1;
2524
2525 /* If the operand cache is active, attempt to preserve the relative
2526 positions of these two operands in their respective immediate use
2527 lists. */
2528 if (ssa_operands_active () && op0 != op1)
2529 {
2530 use_optype_p use0, use1, ptr;
2531 use0 = use1 = NULL;
2532
2533 /* Find the 2 operands in the cache, if they are there. */
2534 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2535 if (USE_OP_PTR (ptr)->use == exp0)
2536 {
2537 use0 = ptr;
2538 break;
2539 }
2540
2541 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2542 if (USE_OP_PTR (ptr)->use == exp1)
2543 {
2544 use1 = ptr;
2545 break;
2546 }
2547
2548 /* If both uses don't have operand entries, there isn't much we can do
2549 at this point. Presumably we don't need to worry about it. */
2550 if (use0 && use1)
2551 {
2552 tree *tmp = USE_OP_PTR (use1)->use;
2553 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2554 USE_OP_PTR (use0)->use = tmp;
2555 }
2556 }
2557
2558 /* Now swap the data. */
2559 *exp0 = op1;
2560 *exp1 = op0;
2561 }
2562
2563
2564 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2565 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2566 a single variable whose address has been taken or any other valid
2567 GIMPLE memory reference (structure reference, array, etc). If the
2568 base address of REF is a decl that has sub-variables, also add all
2569 of its sub-variables. */
2570
2571 void
2572 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2573 {
2574 tree var;
2575 subvar_t svars;
2576
2577 gcc_assert (addresses_taken);
2578
2579 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2580 as the only thing we take the address of. If VAR is a structure,
2581 taking the address of a field means that the whole structure may
2582 be referenced using pointer arithmetic. See PR 21407 and the
2583 ensuing mailing list discussion. */
2584 var = get_base_address (ref);
2585 if (var && SSA_VAR_P (var))
2586 {
2587 if (*addresses_taken == NULL)
2588 *addresses_taken = BITMAP_GGC_ALLOC ();
2589
2590 if (var_can_have_subvars (var)
2591 && (svars = get_subvars_for_var (var)))
2592 {
2593 subvar_t sv;
2594 for (sv = svars; sv; sv = sv->next)
2595 {
2596 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2597 TREE_ADDRESSABLE (sv->var) = 1;
2598 }
2599 }
2600 else
2601 {
2602 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2603 TREE_ADDRESSABLE (var) = 1;
2604 }
2605 }
2606 }
2607
2608
2609 /* Scan the immediate_use list for VAR making sure its linked properly.
2610 Return TRUE if there is a problem and emit an error message to F. */
2611
2612 bool
2613 verify_imm_links (FILE *f, tree var)
2614 {
2615 use_operand_p ptr, prev, list;
2616 int count;
2617
2618 gcc_assert (TREE_CODE (var) == SSA_NAME);
2619
2620 list = &(SSA_NAME_IMM_USE_NODE (var));
2621 gcc_assert (list->use == NULL);
2622
2623 if (list->prev == NULL)
2624 {
2625 gcc_assert (list->next == NULL);
2626 return false;
2627 }
2628
2629 prev = list;
2630 count = 0;
2631 for (ptr = list->next; ptr != list; )
2632 {
2633 if (prev != ptr->prev)
2634 goto error;
2635
2636 if (ptr->use == NULL)
2637 goto error; /* 2 roots, or SAFE guard node. */
2638 else if (*(ptr->use) != var)
2639 goto error;
2640
2641 prev = ptr;
2642 ptr = ptr->next;
2643
2644 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2645 problem. */
2646 if (count++ > 50000000)
2647 goto error;
2648 }
2649
2650 /* Verify list in the other direction. */
2651 prev = list;
2652 for (ptr = list->prev; ptr != list; )
2653 {
2654 if (prev != ptr->next)
2655 goto error;
2656 prev = ptr;
2657 ptr = ptr->prev;
2658 if (count-- < 0)
2659 goto error;
2660 }
2661
2662 if (count != 0)
2663 goto error;
2664
2665 return false;
2666
2667 error:
2668 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2669 {
2670 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2671 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2672 }
2673 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2674 (void *)ptr->use);
2675 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2676 fprintf(f, "\n");
2677 return true;
2678 }
2679
2680
2681 /* Dump all the immediate uses to FILE. */
2682
2683 void
2684 dump_immediate_uses_for (FILE *file, tree var)
2685 {
2686 imm_use_iterator iter;
2687 use_operand_p use_p;
2688
2689 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2690
2691 print_generic_expr (file, var, TDF_SLIM);
2692 fprintf (file, " : -->");
2693 if (has_zero_uses (var))
2694 fprintf (file, " no uses.\n");
2695 else
2696 if (has_single_use (var))
2697 fprintf (file, " single use.\n");
2698 else
2699 fprintf (file, "%d uses.\n", num_imm_uses (var));
2700
2701 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2702 {
2703 if (use_p->stmt == NULL && use_p->use == NULL)
2704 fprintf (file, "***end of stmt iterator marker***\n");
2705 else
2706 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2707 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2708 else
2709 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2710 }
2711 fprintf(file, "\n");
2712 }
2713
2714
2715 /* Dump all the immediate uses to FILE. */
2716
2717 void
2718 dump_immediate_uses (FILE *file)
2719 {
2720 tree var;
2721 unsigned int x;
2722
2723 fprintf (file, "Immediate_uses: \n\n");
2724 for (x = 1; x < num_ssa_names; x++)
2725 {
2726 var = ssa_name(x);
2727 if (!var)
2728 continue;
2729 dump_immediate_uses_for (file, var);
2730 }
2731 }
2732
2733
2734 /* Dump def-use edges on stderr. */
2735
2736 void
2737 debug_immediate_uses (void)
2738 {
2739 dump_immediate_uses (stderr);
2740 }
2741
2742
2743 /* Dump def-use edges on stderr. */
2744
2745 void
2746 debug_immediate_uses_for (tree var)
2747 {
2748 dump_immediate_uses_for (stderr, var);
2749 }
2750
2751
2752 /* Create a new change buffer for the statement pointed by STMT_P and
2753 push the buffer into SCB_STACK. Each change buffer
2754 records state information needed to determine what changed in the
2755 statement. Mainly, this keeps track of symbols that may need to be
2756 put into SSA form, SSA name replacements and other information
2757 needed to keep the SSA form up to date. */
2758
2759 void
2760 push_stmt_changes (tree *stmt_p)
2761 {
2762 tree stmt;
2763 scb_t buf;
2764
2765 stmt = *stmt_p;
2766
2767 /* It makes no sense to keep track of PHI nodes. */
2768 if (TREE_CODE (stmt) == PHI_NODE)
2769 return;
2770
2771 buf = xmalloc (sizeof *buf);
2772 memset (buf, 0, sizeof *buf);
2773
2774 buf->stmt_p = stmt_p;
2775
2776 if (stmt_references_memory_p (stmt))
2777 {
2778 tree op;
2779 ssa_op_iter i;
2780
2781 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2782 {
2783 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2784 if (buf->loads == NULL)
2785 buf->loads = BITMAP_ALLOC (NULL);
2786 bitmap_set_bit (buf->loads, DECL_UID (sym));
2787 }
2788
2789 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2790 {
2791 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2792 if (buf->stores == NULL)
2793 buf->stores = BITMAP_ALLOC (NULL);
2794 bitmap_set_bit (buf->stores, DECL_UID (sym));
2795 }
2796 }
2797
2798 VEC_safe_push (scb_t, heap, scb_stack, buf);
2799 }
2800
2801
2802 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2803 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2804
2805 static void
2806 mark_difference_for_renaming (bitmap s1, bitmap s2)
2807 {
2808 if (s1 == NULL && s2 == NULL)
2809 return;
2810
2811 if (s1 && s2 == NULL)
2812 mark_set_for_renaming (s1);
2813 else if (s1 == NULL && s2)
2814 mark_set_for_renaming (s2);
2815 else if (!bitmap_equal_p (s1, s2))
2816 {
2817 bitmap t1 = BITMAP_ALLOC (NULL);
2818 bitmap t2 = BITMAP_ALLOC (NULL);
2819
2820 bitmap_and_compl (t1, s1, s2);
2821 bitmap_and_compl (t2, s2, s1);
2822 bitmap_ior_into (t1, t2);
2823 mark_set_for_renaming (t1);
2824
2825 BITMAP_FREE (t1);
2826 BITMAP_FREE (t2);
2827 }
2828 }
2829
2830
2831 /* Pop the top SCB from SCB_STACK and act on the differences between
2832 what was recorded by push_stmt_changes and the current state of
2833 the statement. */
2834
2835 void
2836 pop_stmt_changes (tree *stmt_p)
2837 {
2838 tree op, stmt;
2839 ssa_op_iter iter;
2840 bitmap loads, stores;
2841 scb_t buf;
2842
2843 stmt = *stmt_p;
2844
2845 /* It makes no sense to keep track of PHI nodes. */
2846 if (TREE_CODE (stmt) == PHI_NODE)
2847 return;
2848
2849 buf = VEC_pop (scb_t, scb_stack);
2850 gcc_assert (stmt_p == buf->stmt_p);
2851
2852 /* Force an operand re-scan on the statement and mark any newly
2853 exposed variables. */
2854 update_stmt (stmt);
2855
2856 /* Determine whether any memory symbols need to be renamed. If the
2857 sets of loads and stores are different after the statement is
2858 modified, then the affected symbols need to be renamed.
2859
2860 Note that it may be possible for the statement to not reference
2861 memory anymore, but we still need to act on the differences in
2862 the sets of symbols. */
2863 loads = stores = NULL;
2864 if (stmt_references_memory_p (stmt))
2865 {
2866 tree op;
2867 ssa_op_iter i;
2868
2869 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2870 {
2871 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2872 if (loads == NULL)
2873 loads = BITMAP_ALLOC (NULL);
2874 bitmap_set_bit (loads, DECL_UID (sym));
2875 }
2876
2877 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2878 {
2879 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2880 if (stores == NULL)
2881 stores = BITMAP_ALLOC (NULL);
2882 bitmap_set_bit (stores, DECL_UID (sym));
2883 }
2884 }
2885
2886 /* If LOADS is different from BUF->LOADS, the affected
2887 symbols need to be marked for renaming. */
2888 mark_difference_for_renaming (loads, buf->loads);
2889
2890 /* Similarly for STORES and BUF->STORES. */
2891 mark_difference_for_renaming (stores, buf->stores);
2892
2893 /* Mark all the naked GIMPLE register operands for renaming. */
2894 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2895 if (DECL_P (op))
2896 mark_sym_for_renaming (op);
2897
2898 /* FIXME, need to add more finalizers here. Cleanup EH info,
2899 recompute invariants for address expressions, add
2900 SSA replacement mappings, etc. For instance, given
2901 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2902 the form:
2903
2904 # SMT.4_20 = VDEF <SMT.4_16>
2905 D.1576_11 = 1.0e+0;
2906
2907 So, the VDEF will disappear, but instead of marking SMT.4 for
2908 renaming it would be far more efficient to establish a
2909 replacement mapping that would replace every reference of
2910 SMT.4_20 with SMT.4_16. */
2911
2912 /* Free memory used by the buffer. */
2913 BITMAP_FREE (buf->loads);
2914 BITMAP_FREE (buf->stores);
2915 BITMAP_FREE (loads);
2916 BITMAP_FREE (stores);
2917 buf->stmt_p = NULL;
2918 free (buf);
2919 }
2920
2921
2922 /* Discard the topmost change buffer from SCB_STACK. This is useful
2923 when the caller realized that it did not actually modified the
2924 statement. It avoids the expensive operand re-scan. */
2925
2926 void
2927 discard_stmt_changes (tree *stmt_p)
2928 {
2929 scb_t buf;
2930 tree stmt;
2931
2932 /* It makes no sense to keep track of PHI nodes. */
2933 stmt = *stmt_p;
2934 if (TREE_CODE (stmt) == PHI_NODE)
2935 return;
2936
2937 buf = VEC_pop (scb_t, scb_stack);
2938 gcc_assert (stmt_p == buf->stmt_p);
2939
2940 /* Free memory used by the buffer. */
2941 BITMAP_FREE (buf->loads);
2942 BITMAP_FREE (buf->stores);
2943 buf->stmt_p = NULL;
2944 free (buf);
2945 }
2946
2947
2948 /* Returns true if statement STMT may access memory. */
2949
2950 bool
2951 stmt_references_memory_p (tree stmt)
2952 {
2953 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2954 return false;
2955
2956 return stmt_ann (stmt)->references_memory;
2957 }
2958
2959
2960 /* Return the memory partition tag (MPT) associated with memory
2961 symbol SYM. From a correctness standpoint, memory partitions can
2962 be assigned in any arbitrary fashion as long as this rule is
2963 observed: Given two memory partitions MPT.i and MPT.j, they must
2964 not contain symbols in common.
2965
2966 Memory partitions are used when putting the program into Memory-SSA
2967 form. In particular, in Memory-SSA PHI nodes are not computed for
2968 individual memory symbols. They are computed for memory
2969 partitions. This reduces the amount of PHI nodes in the SSA graph
2970 at the expense of precision (i.e., it makes unrelated stores affect
2971 each other).
2972
2973 However, it is possible to increase precision by changing this
2974 partitioning scheme. For instance, if the partitioning scheme is
2975 such that get_mpt_for is the identity function (that is,
2976 get_mpt_for (s) = s), this will result in ultimate precision at the
2977 expense of huge SSA webs.
2978
2979 At the other extreme, a partitioning scheme that groups all the
2980 symbols in the same set results in minimal SSA webs and almost
2981 total loss of precision. */
2982
2983 tree
2984 get_mpt_for (tree sym)
2985 {
2986 tree mpt;
2987
2988 /* Don't create a new tag unnecessarily. */
2989 mpt = memory_partition (sym);
2990 if (mpt == NULL_TREE)
2991 {
2992 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2993 TREE_ADDRESSABLE (mpt) = 0;
2994 MTAG_GLOBAL (mpt) = 1;
2995 add_referenced_var (mpt);
2996 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
2997 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&operands_bitmap_obstack);
2998 set_memory_partition (sym, mpt);
2999 }
3000
3001 return mpt;
3002 }
3003
3004
3005 /* Dump memory partition information to FILE. */
3006
3007 void
3008 dump_memory_partitions (FILE *file)
3009 {
3010 unsigned i, npart;
3011 unsigned long nsyms;
3012 tree mpt;
3013
3014 fprintf (file, "\nMemory partitions\n\n");
3015 for (i = 0, npart = 0, nsyms = 0;
3016 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
3017 i++)
3018 {
3019 if (mpt)
3020 {
3021 bitmap syms = MPT_SYMBOLS (mpt);
3022 unsigned long n = bitmap_count_bits (syms);
3023
3024 fprintf (file, "#%u: ", i);
3025 print_generic_expr (file, mpt, 0);
3026 fprintf (file, ": %lu elements: ", n);
3027 dump_decl_set (file, syms);
3028 npart++;
3029 nsyms += n;
3030 }
3031 }
3032
3033 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3034 }
3035
3036
3037 /* Dump memory partition information to stderr. */
3038
3039 void
3040 debug_memory_partitions (void)
3041 {
3042 dump_memory_partitions (stderr);
3043 }