tree-ssa-operands.h (struct ssa_operand_memory_d): add ssa_operand_mem_size.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
77
78
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82 static struct
83 {
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
106
107
108 /* Flags to describe operand properties in helpers. */
109
110 /* By default, operands are loaded. */
111 #define opf_use 0
112
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
116
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
124
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
129
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
132
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
135
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
138
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
141
142 /* Bitmap obstack for our datastructures that needs to survive across
143 compilations of multiple funcitons. */
144 static bitmap_obstack operands_bitmap_obstack;
145 /* Set for building all the loaded symbols. */
146 static bitmap build_loads;
147
148 /* Set for building all the stored symbols. */
149 static bitmap build_stores;
150
151 static void get_expr_operands (tree, tree *, int);
152
153 /* Number of functions with initialized ssa_operands. */
154 static int n_initialized = 0;
155
156 /* Statement change buffer. Data structure used to record state
157 information for statements. This is used to determine what needs
158 to be done in order to update the SSA web after a statement is
159 modified by a pass. If STMT is a statement that has just been
160 created, or needs to be folded via fold_stmt, or anything that
161 changes its physical structure then the pass should:
162
163 1- Call push_stmt_changes (&stmt) to record the current state of
164 STMT before any modifications are made.
165
166 2- Make all appropriate modifications to the statement.
167
168 3- Call pop_stmt_changes (&stmt) to find new symbols that
169 need to be put in SSA form, SSA name mappings for names that
170 have disappeared, recompute invariantness for address
171 expressions, cleanup EH information, etc.
172
173 If it is possible to determine that the statement was not modified,
174 instead of calling pop_stmt_changes it is quicker to call
175 discard_stmt_changes to avoid the expensive and unnecessary operand
176 re-scan and change comparison. */
177
178 struct scb_d
179 {
180 /* Pointer to the statement being modified. */
181 tree *stmt_p;
182
183 /* If the statement references memory these are the sets of symbols
184 loaded and stored by the statement. */
185 bitmap loads;
186 bitmap stores;
187 };
188
189 typedef struct scb_d *scb_t;
190 DEF_VEC_P(scb_t);
191 DEF_VEC_ALLOC_P(scb_t,heap);
192
193 /* Stack of statement change buffers (SCB). Every call to
194 push_stmt_changes pushes a new buffer onto the stack. Calls to
195 pop_stmt_changes pop a buffer off of the stack and compute the set
196 of changes for the popped statement. */
197 static VEC(scb_t,heap) *scb_stack;
198
199 /* Return the DECL_UID of the base variable of T. */
200
201 static inline unsigned
202 get_name_decl (tree t)
203 {
204 if (TREE_CODE (t) != SSA_NAME)
205 return DECL_UID (t);
206 else
207 return DECL_UID (SSA_NAME_VAR (t));
208 }
209
210
211 /* Comparison function for qsort used in operand_build_sort_virtual. */
212
213 static int
214 operand_build_cmp (const void *p, const void *q)
215 {
216 tree e1 = *((const tree *)p);
217 tree e2 = *((const tree *)q);
218 unsigned int u1,u2;
219
220 u1 = get_name_decl (e1);
221 u2 = get_name_decl (e2);
222
223 /* We want to sort in ascending order. They can never be equal. */
224 #ifdef ENABLE_CHECKING
225 gcc_assert (u1 != u2);
226 #endif
227 return (u1 > u2 ? 1 : -1);
228 }
229
230
231 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
232
233 static inline void
234 operand_build_sort_virtual (VEC(tree,heap) *list)
235 {
236 int num = VEC_length (tree, list);
237
238 if (num < 2)
239 return;
240
241 if (num == 2)
242 {
243 if (get_name_decl (VEC_index (tree, list, 0))
244 > get_name_decl (VEC_index (tree, list, 1)))
245 {
246 /* Swap elements if in the wrong order. */
247 tree tmp = VEC_index (tree, list, 0);
248 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
249 VEC_replace (tree, list, 1, tmp);
250 }
251 return;
252 }
253
254 /* There are 3 or more elements, call qsort. */
255 qsort (VEC_address (tree, list),
256 VEC_length (tree, list),
257 sizeof (tree),
258 operand_build_cmp);
259 }
260
261
262 /* Return true if the SSA operands cache is active. */
263
264 bool
265 ssa_operands_active (void)
266 {
267 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
268 }
269
270
271 /* VOPs are of variable sized, so the free list maps "free buckets" to the
272 following table:
273 bucket # operands
274 ------ ----------
275 0 1
276 1 2
277 ...
278 15 16
279 16 17-24
280 17 25-32
281 18 31-40
282 ...
283 29 121-128
284 Any VOPs larger than this are simply added to the largest bucket when they
285 are freed. */
286
287
288 /* Return the number of operands used in bucket BUCKET. */
289
290 static inline int
291 vop_free_bucket_size (int bucket)
292 {
293 #ifdef ENABLE_CHECKING
294 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
295 #endif
296 if (bucket < 16)
297 return bucket + 1;
298 return (bucket - 13) * 8;
299 }
300
301
302 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
303 beyond the end of the bucket table, return -1. */
304
305 static inline int
306 vop_free_bucket_index (int num)
307 {
308 gcc_assert (num > 0);
309
310 /* Sizes 1 through 16 use buckets 0-15. */
311 if (num <= 16)
312 return num - 1;
313 /* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */
314 if (num < 256)
315 return 14 + (num - 1) / 8;
316 return -1;
317 }
318
319
320 /* Initialize the VOP free buckets. */
321
322 static inline void
323 init_vop_buckets (void)
324 {
325 int x;
326
327 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
328 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
329 }
330
331
332 /* Add PTR to the appropriate VOP bucket. */
333
334 static inline void
335 add_vop_to_freelist (voptype_p ptr)
336 {
337 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
338
339 /* Too large, use the largest bucket so its not a complete throw away. */
340 if (bucket == -1)
341 bucket = NUM_VOP_FREE_BUCKETS - 1;
342
343 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
344 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
345 }
346
347
348 /* These are the sizes of the operand memory buffer which gets allocated each
349 time more operands space is required. The final value is the amount that is
350 allocated every time after that. */
351
352 #define OP_SIZE_INIT 0
353 #define OP_SIZE_1 30
354 #define OP_SIZE_2 110
355 #define OP_SIZE_3 511
356
357 /* Initialize the operand cache routines. */
358
359 void
360 init_ssa_operands (void)
361 {
362 if (!n_initialized++)
363 {
364 build_defs = VEC_alloc (tree, heap, 5);
365 build_uses = VEC_alloc (tree, heap, 10);
366 build_vuses = VEC_alloc (tree, heap, 25);
367 build_vdefs = VEC_alloc (tree, heap, 25);
368 bitmap_obstack_initialize (&operands_bitmap_obstack);
369 build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
370 build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
371 scb_stack = VEC_alloc (scb_t, heap, 20);
372 }
373
374 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
375 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
376 gimple_ssa_operands (cfun)->operand_memory_index
377 = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
378 gimple_ssa_operands (cfun)->ops_active = true;
379 memset (&clobber_stats, 0, sizeof (clobber_stats));
380 init_vop_buckets ();
381 gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
382 }
383
384
385 /* Dispose of anything required by the operand routines. */
386
387 void
388 fini_ssa_operands (void)
389 {
390 struct ssa_operand_memory_d *ptr;
391 unsigned ix;
392 tree mpt;
393
394 if (!--n_initialized)
395 {
396 VEC_free (tree, heap, build_defs);
397 VEC_free (tree, heap, build_uses);
398 VEC_free (tree, heap, build_vdefs);
399 VEC_free (tree, heap, build_vuses);
400 BITMAP_FREE (build_loads);
401 BITMAP_FREE (build_stores);
402
403 /* The change buffer stack had better be empty. */
404 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
405 VEC_free (scb_t, heap, scb_stack);
406 scb_stack = NULL;
407 }
408
409 gimple_ssa_operands (cfun)->free_defs = NULL;
410 gimple_ssa_operands (cfun)->free_uses = NULL;
411
412 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
413 {
414 gimple_ssa_operands (cfun)->operand_memory
415 = gimple_ssa_operands (cfun)->operand_memory->next;
416 ggc_free (ptr);
417 }
418
419 for (ix = 0;
420 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
421 ix++)
422 {
423 if (mpt)
424 BITMAP_FREE (MPT_SYMBOLS (mpt));
425 }
426
427 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
428
429 gimple_ssa_operands (cfun)->ops_active = false;
430
431 if (!n_initialized)
432 bitmap_obstack_release (&operands_bitmap_obstack);
433 if (dump_file && (dump_flags & TDF_STATS))
434 {
435 fprintf (dump_file, "Original clobbered vars: %d\n",
436 clobber_stats.clobbered_vars);
437 fprintf (dump_file, "Static write clobbers avoided: %d\n",
438 clobber_stats.static_write_clobbers_avoided);
439 fprintf (dump_file, "Static read clobbers avoided: %d\n",
440 clobber_stats.static_read_clobbers_avoided);
441 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
442 clobber_stats.unescapable_clobbers_avoided);
443 fprintf (dump_file, "Original read-only clobbers: %d\n",
444 clobber_stats.readonly_clobbers);
445 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
446 clobber_stats.static_readonly_clobbers_avoided);
447 }
448 }
449
450
451 /* Return memory for operands of SIZE chunks. */
452
453 static inline void *
454 ssa_operand_alloc (unsigned size)
455 {
456 char *ptr;
457
458 if (gimple_ssa_operands (cfun)->operand_memory_index + size
459 >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
460 {
461 struct ssa_operand_memory_d *ptr;
462
463 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
464 gimple_ssa_operands (cfun)->ssa_operand_mem_size
465 = OP_SIZE_1 * sizeof (struct voptype_d);
466 else
467 if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
468 == OP_SIZE_1 * sizeof (struct voptype_d))
469 gimple_ssa_operands (cfun)->ssa_operand_mem_size
470 = OP_SIZE_2 * sizeof (struct voptype_d);
471 else
472 gimple_ssa_operands (cfun)->ssa_operand_mem_size
473 = OP_SIZE_3 * sizeof (struct voptype_d);
474
475 /* Go right to the maximum size if the request is too large. */
476 if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
477 gimple_ssa_operands (cfun)->ssa_operand_mem_size
478 = OP_SIZE_3 * sizeof (struct voptype_d);
479
480 /* Fail if there is not enough space. If there are this many operands
481 required, first make sure there isn't a different problem causing this
482 many operands. If the decision is that this is OK, then we can
483 specially allocate a buffer just for this request. */
484 gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
485
486 ptr = (struct ssa_operand_memory_d *)
487 ggc_alloc (sizeof (struct ssa_operand_memory_d)
488 + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
489 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
490 gimple_ssa_operands (cfun)->operand_memory = ptr;
491 gimple_ssa_operands (cfun)->operand_memory_index = 0;
492 }
493 ptr = &(gimple_ssa_operands (cfun)->operand_memory
494 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
495 gimple_ssa_operands (cfun)->operand_memory_index += size;
496 return ptr;
497 }
498
499
500 /* Allocate a DEF operand. */
501
502 static inline struct def_optype_d *
503 alloc_def (void)
504 {
505 struct def_optype_d *ret;
506 if (gimple_ssa_operands (cfun)->free_defs)
507 {
508 ret = gimple_ssa_operands (cfun)->free_defs;
509 gimple_ssa_operands (cfun)->free_defs
510 = gimple_ssa_operands (cfun)->free_defs->next;
511 }
512 else
513 ret = (struct def_optype_d *)
514 ssa_operand_alloc (sizeof (struct def_optype_d));
515 return ret;
516 }
517
518
519 /* Allocate a USE operand. */
520
521 static inline struct use_optype_d *
522 alloc_use (void)
523 {
524 struct use_optype_d *ret;
525 if (gimple_ssa_operands (cfun)->free_uses)
526 {
527 ret = gimple_ssa_operands (cfun)->free_uses;
528 gimple_ssa_operands (cfun)->free_uses
529 = gimple_ssa_operands (cfun)->free_uses->next;
530 }
531 else
532 ret = (struct use_optype_d *)
533 ssa_operand_alloc (sizeof (struct use_optype_d));
534 return ret;
535 }
536
537
538 /* Allocate a vop with NUM elements. */
539
540 static inline struct voptype_d *
541 alloc_vop (int num)
542 {
543 struct voptype_d *ret = NULL;
544 int alloc_size = 0;
545
546 int bucket = vop_free_bucket_index (num);
547 if (bucket != -1)
548 {
549 /* If there is a free operand, use it. */
550 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
551 {
552 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
553 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
554 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
555 }
556 else
557 alloc_size = vop_free_bucket_size(bucket);
558 }
559 else
560 alloc_size = num;
561
562 if (alloc_size > 0)
563 ret = (struct voptype_d *)ssa_operand_alloc (
564 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
565
566 VUSE_VECT_NUM_ELEM (ret->usev) = num;
567 return ret;
568 }
569
570
571 /* This routine makes sure that PTR is in an immediate use list, and makes
572 sure the stmt pointer is set to the current stmt. */
573
574 static inline void
575 set_virtual_use_link (use_operand_p ptr, tree stmt)
576 {
577 /* fold_stmt may have changed the stmt pointers. */
578 if (ptr->stmt != stmt)
579 ptr->stmt = stmt;
580
581 /* If this use isn't in a list, add it to the correct list. */
582 if (!ptr->prev)
583 link_imm_use (ptr, *(ptr->use));
584 }
585
586
587 /* Adds OP to the list of defs after LAST. */
588
589 static inline def_optype_p
590 add_def_op (tree *op, def_optype_p last)
591 {
592 def_optype_p new;
593
594 new = alloc_def ();
595 DEF_OP_PTR (new) = op;
596 last->next = new;
597 new->next = NULL;
598 return new;
599 }
600
601
602 /* Adds OP to the list of uses of statement STMT after LAST. */
603
604 static inline use_optype_p
605 add_use_op (tree stmt, tree *op, use_optype_p last)
606 {
607 use_optype_p new;
608
609 new = alloc_use ();
610 USE_OP_PTR (new)->use = op;
611 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
612 last->next = new;
613 new->next = NULL;
614 return new;
615 }
616
617
618 /* Return a virtual op pointer with NUM elements which are all initialized to OP
619 and are linked into the immediate uses for STMT. The new vop is appended
620 after PREV. */
621
622 static inline voptype_p
623 add_vop (tree stmt, tree op, int num, voptype_p prev)
624 {
625 voptype_p new;
626 int x;
627
628 new = alloc_vop (num);
629 for (x = 0; x < num; x++)
630 {
631 VUSE_OP_PTR (new, x)->prev = NULL;
632 SET_VUSE_OP (new, x, op);
633 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
634 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
635 }
636
637 if (prev)
638 prev->next = new;
639 new->next = NULL;
640 return new;
641 }
642
643
644 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
645 LAST to the new element. */
646
647 static inline voptype_p
648 add_vuse_op (tree stmt, tree op, int num, voptype_p last)
649 {
650 voptype_p new = add_vop (stmt, op, num, last);
651 VDEF_RESULT (new) = NULL_TREE;
652 return new;
653 }
654
655
656 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
657 LAST to the new element. */
658
659 static inline voptype_p
660 add_vdef_op (tree stmt, tree op, int num, voptype_p last)
661 {
662 voptype_p new = add_vop (stmt, op, num, last);
663 VDEF_RESULT (new) = op;
664 return new;
665 }
666
667
668 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
669 is the head of the operand list it belongs to. */
670
671 static inline struct voptype_d *
672 realloc_vop (struct voptype_d *ptr, int num_elem, struct voptype_d **root)
673 {
674 int x, lim;
675 tree stmt, val;
676 struct voptype_d *ret, *tmp;
677
678 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
679 return ptr;
680
681 val = VUSE_OP (ptr, 0);
682 if (TREE_CODE (val) == SSA_NAME)
683 val = SSA_NAME_VAR (val);
684
685 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
686
687 /* Delink all the existing uses. */
688 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
689 {
690 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
691 delink_imm_use (use_p);
692 }
693
694 /* If we want less space, simply use this one, and shrink the size. */
695 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
696 {
697 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
698 return ptr;
699 }
700
701 /* It is growing. Allocate a new one and replace the old one. */
702 ret = add_vuse_op (stmt, val, num_elem, ptr);
703
704 /* Clear PTR and add its memory to the free list. */
705 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
706 memset (ptr, 0,
707 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
708 add_vop_to_freelist (ptr);
709
710 /* Now simply remove the old one. */
711 if (*root == ptr)
712 {
713 *root = ret;
714 return ret;
715 }
716 else
717 for (tmp = *root;
718 tmp != NULL && tmp->next != ptr;
719 tmp = tmp->next)
720 {
721 tmp->next = ret;
722 return ret;
723 }
724
725 /* The pointer passed in isn't in STMT's VDEF lists. */
726 gcc_unreachable ();
727 }
728
729
730 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
731
732 struct voptype_d *
733 realloc_vdef (struct voptype_d *ptr, int num_elem)
734 {
735 tree val, stmt;
736 struct voptype_d *ret;
737
738 val = VDEF_RESULT (ptr);
739 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
740 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
741 VDEF_RESULT (ret) = val;
742 return ret;
743 }
744
745
746 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
747
748 struct voptype_d *
749 realloc_vuse (struct voptype_d *ptr, int num_elem)
750 {
751 tree stmt;
752 struct voptype_d *ret;
753
754 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
755 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
756 return ret;
757 }
758
759
760 /* Takes elements from build_defs and turns them into def operands of STMT.
761 TODO -- Make build_defs VEC of tree *. */
762
763 static inline void
764 finalize_ssa_defs (tree stmt)
765 {
766 unsigned new_i;
767 struct def_optype_d new_list;
768 def_optype_p old_ops, last;
769 unsigned int num = VEC_length (tree, build_defs);
770
771 /* There should only be a single real definition per assignment. */
772 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
773
774 new_list.next = NULL;
775 last = &new_list;
776
777 old_ops = DEF_OPS (stmt);
778
779 new_i = 0;
780
781 /* Check for the common case of 1 def that hasn't changed. */
782 if (old_ops && old_ops->next == NULL && num == 1
783 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
784 return;
785
786 /* If there is anything in the old list, free it. */
787 if (old_ops)
788 {
789 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
790 gimple_ssa_operands (cfun)->free_defs = old_ops;
791 }
792
793 /* If there is anything remaining in the build_defs list, simply emit it. */
794 for ( ; new_i < num; new_i++)
795 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
796
797 /* Now set the stmt's operands. */
798 DEF_OPS (stmt) = new_list.next;
799
800 #ifdef ENABLE_CHECKING
801 {
802 def_optype_p ptr;
803 unsigned x = 0;
804 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
805 x++;
806
807 gcc_assert (x == num);
808 }
809 #endif
810 }
811
812
813 /* Takes elements from build_uses and turns them into use operands of STMT.
814 TODO -- Make build_uses VEC of tree *. */
815
816 static inline void
817 finalize_ssa_uses (tree stmt)
818 {
819 unsigned new_i;
820 struct use_optype_d new_list;
821 use_optype_p old_ops, ptr, last;
822
823 #ifdef ENABLE_CHECKING
824 {
825 unsigned x;
826 unsigned num = VEC_length (tree, build_uses);
827
828 /* If the pointer to the operand is the statement itself, something is
829 wrong. It means that we are pointing to a local variable (the
830 initial call to update_stmt_operands does not pass a pointer to a
831 statement). */
832 for (x = 0; x < num; x++)
833 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
834 }
835 #endif
836
837 new_list.next = NULL;
838 last = &new_list;
839
840 old_ops = USE_OPS (stmt);
841
842 /* If there is anything in the old list, free it. */
843 if (old_ops)
844 {
845 for (ptr = old_ops; ptr; ptr = ptr->next)
846 delink_imm_use (USE_OP_PTR (ptr));
847 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
848 gimple_ssa_operands (cfun)->free_uses = old_ops;
849 }
850
851 /* Now create nodes for all the new nodes. */
852 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
853 last = add_use_op (stmt,
854 (tree *) VEC_index (tree, build_uses, new_i),
855 last);
856
857 /* Now set the stmt's operands. */
858 USE_OPS (stmt) = new_list.next;
859
860 #ifdef ENABLE_CHECKING
861 {
862 unsigned x = 0;
863 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
864 x++;
865
866 gcc_assert (x == VEC_length (tree, build_uses));
867 }
868 #endif
869 }
870
871
872 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
873 STMT. FIXME, for now VDEF operators should have a single operand
874 in their RHS. */
875
876 static inline void
877 finalize_ssa_vdefs (tree stmt)
878 {
879 unsigned new_i;
880 struct voptype_d new_list;
881 voptype_p old_ops, ptr, last;
882 stmt_ann_t ann = stmt_ann (stmt);
883
884 /* Set the symbols referenced by STMT. */
885 if (!bitmap_empty_p (build_stores))
886 {
887 if (ann->operands.stores == NULL)
888 ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
889
890 bitmap_copy (ann->operands.stores, build_stores);
891 }
892 else
893 BITMAP_FREE (ann->operands.stores);
894
895 /* If aliases have not been computed, do not instantiate a virtual
896 operator on STMT. Initially, we only compute the SSA form on
897 GIMPLE registers. The virtual SSA form is only computed after
898 alias analysis, so virtual operators will remain unrenamed and
899 the verifier will complain. However, alias analysis needs to
900 access symbol load/store information, so we need to compute
901 those. */
902 if (!gimple_aliases_computed_p (cfun))
903 return;
904
905 new_list.next = NULL;
906 last = &new_list;
907
908 old_ops = VDEF_OPS (stmt);
909 new_i = 0;
910 while (old_ops && new_i < VEC_length (tree, build_vdefs))
911 {
912 tree op = VEC_index (tree, build_vdefs, new_i);
913 unsigned new_uid = get_name_decl (op);
914 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
915
916 /* FIXME, for now each VDEF operator should have at most one
917 operand in their RHS. */
918 gcc_assert (VDEF_NUM (old_ops) == 1);
919
920 if (old_uid == new_uid)
921 {
922 /* If the symbols are the same, reuse the existing operand. */
923 last->next = old_ops;
924 last = old_ops;
925 old_ops = old_ops->next;
926 last->next = NULL;
927 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
928 new_i++;
929 }
930 else if (old_uid < new_uid)
931 {
932 /* If old is less than new, old goes to the free list. */
933 voptype_p next;
934 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
935 next = old_ops->next;
936 add_vop_to_freelist (old_ops);
937 old_ops = next;
938 }
939 else
940 {
941 /* This is a new operand. */
942 last = add_vdef_op (stmt, op, 1, last);
943 new_i++;
944 }
945 }
946
947 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
948 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
949 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
950
951 /* If there is anything in the old list, free it. */
952 if (old_ops)
953 {
954 for (ptr = old_ops; ptr; ptr = last)
955 {
956 last = ptr->next;
957 delink_imm_use (VDEF_OP_PTR (ptr, 0));
958 add_vop_to_freelist (ptr);
959 }
960 }
961
962 /* Now set STMT's operands. */
963 VDEF_OPS (stmt) = new_list.next;
964
965 #ifdef ENABLE_CHECKING
966 {
967 unsigned x = 0;
968 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
969 x++;
970
971 gcc_assert (x == VEC_length (tree, build_vdefs));
972 }
973 #endif
974 }
975
976
977 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
978 STMT. */
979
980 static inline void
981 finalize_ssa_vuse_ops (tree stmt)
982 {
983 unsigned new_i;
984 int old_i;
985 voptype_p old_ops, last;
986 VEC(tree,heap) *new_ops;
987 stmt_ann_t ann;
988
989 /* Set the symbols referenced by STMT. */
990 ann = stmt_ann (stmt);
991 if (!bitmap_empty_p (build_loads))
992 {
993 if (ann->operands.loads == NULL)
994 ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
995
996 bitmap_copy (ann->operands.loads, build_loads);
997 }
998 else
999 BITMAP_FREE (ann->operands.loads);
1000
1001 /* If aliases have not been computed, do not instantiate a virtual
1002 operator on STMT. Initially, we only compute the SSA form on
1003 GIMPLE registers. The virtual SSA form is only computed after
1004 alias analysis, so virtual operators will remain unrenamed and
1005 the verifier will complain. However, alias analysis needs to
1006 access symbol load/store information, so we need to compute
1007 those. */
1008 if (!gimple_aliases_computed_p (cfun))
1009 return;
1010
1011 /* STMT should have at most one VUSE operator. */
1012 old_ops = VUSE_OPS (stmt);
1013 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1014
1015 new_ops = NULL;
1016 new_i = old_i = 0;
1017 while (old_ops
1018 && old_i < VUSE_NUM (old_ops)
1019 && new_i < VEC_length (tree, build_vuses))
1020 {
1021 tree new_op = VEC_index (tree, build_vuses, new_i);
1022 tree old_op = VUSE_OP (old_ops, old_i);
1023 unsigned new_uid = get_name_decl (new_op);
1024 unsigned old_uid = get_name_decl (old_op);
1025
1026 if (old_uid == new_uid)
1027 {
1028 /* If the symbols are the same, reuse the existing operand. */
1029 VEC_safe_push (tree, heap, new_ops, old_op);
1030 new_i++;
1031 old_i++;
1032 }
1033 else if (old_uid < new_uid)
1034 {
1035 /* If OLD_UID is less than NEW_UID, the old operand has
1036 disappeared, skip to the next old operand. */
1037 old_i++;
1038 }
1039 else
1040 {
1041 /* This is a new operand. */
1042 VEC_safe_push (tree, heap, new_ops, new_op);
1043 new_i++;
1044 }
1045 }
1046
1047 /* If there is anything remaining in the build_vuses list, simply emit it. */
1048 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1049 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1050
1051 /* If there is anything in the old list, free it. */
1052 if (old_ops)
1053 {
1054 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1055 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1056 add_vop_to_freelist (old_ops);
1057 VUSE_OPS (stmt) = NULL;
1058 }
1059
1060 /* If there are any operands, instantiate a VUSE operator for STMT. */
1061 if (new_ops)
1062 {
1063 tree op;
1064 unsigned i;
1065
1066 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
1067
1068 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1069 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1070
1071 VUSE_OPS (stmt) = last;
1072 }
1073
1074 #ifdef ENABLE_CHECKING
1075 {
1076 unsigned x;
1077
1078 if (VUSE_OPS (stmt))
1079 {
1080 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1081 x = VUSE_NUM (VUSE_OPS (stmt));
1082 }
1083 else
1084 x = 0;
1085
1086 gcc_assert (x == VEC_length (tree, build_vuses));
1087 }
1088 #endif
1089 }
1090
1091 /* Return a new VUSE operand vector for STMT. */
1092
1093 static void
1094 finalize_ssa_vuses (tree stmt)
1095 {
1096 unsigned num, num_vdefs;
1097 unsigned vuse_index;
1098
1099 /* Remove superfluous VUSE operands. If the statement already has a
1100 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1101 needed because VDEFs imply a VUSE of the variable. For instance,
1102 suppose that variable 'a' is pointed-to by p and q:
1103
1104 # VUSE <a_2>
1105 # a_3 = VDEF <a_2>
1106 *p = *q;
1107
1108 The VUSE <a_2> is superfluous because it is implied by the
1109 VDEF operator. */
1110 num = VEC_length (tree, build_vuses);
1111 num_vdefs = VEC_length (tree, build_vdefs);
1112
1113 if (num > 0 && num_vdefs > 0)
1114 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1115 {
1116 tree vuse;
1117 vuse = VEC_index (tree, build_vuses, vuse_index);
1118 if (TREE_CODE (vuse) != SSA_NAME)
1119 {
1120 var_ann_t ann = var_ann (vuse);
1121 ann->in_vuse_list = 0;
1122 if (ann->in_vdef_list)
1123 {
1124 VEC_ordered_remove (tree, build_vuses, vuse_index);
1125 continue;
1126 }
1127 }
1128 vuse_index++;
1129 }
1130
1131 finalize_ssa_vuse_ops (stmt);
1132 }
1133
1134
1135 /* Clear the in_list bits and empty the build array for VDEFs and
1136 VUSEs. */
1137
1138 static inline void
1139 cleanup_build_arrays (void)
1140 {
1141 unsigned i;
1142 tree t;
1143
1144 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1145 if (TREE_CODE (t) != SSA_NAME)
1146 var_ann (t)->in_vdef_list = false;
1147
1148 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1149 if (TREE_CODE (t) != SSA_NAME)
1150 var_ann (t)->in_vuse_list = false;
1151
1152 VEC_truncate (tree, build_vdefs, 0);
1153 VEC_truncate (tree, build_vuses, 0);
1154 VEC_truncate (tree, build_defs, 0);
1155 VEC_truncate (tree, build_uses, 0);
1156 bitmap_clear (build_loads);
1157 bitmap_clear (build_stores);
1158 }
1159
1160
1161 /* Finalize all the build vectors, fill the new ones into INFO. */
1162
1163 static inline void
1164 finalize_ssa_stmt_operands (tree stmt)
1165 {
1166 finalize_ssa_defs (stmt);
1167 finalize_ssa_uses (stmt);
1168 finalize_ssa_vdefs (stmt);
1169 finalize_ssa_vuses (stmt);
1170 cleanup_build_arrays ();
1171 }
1172
1173
1174 /* Start the process of building up operands vectors in INFO. */
1175
1176 static inline void
1177 start_ssa_stmt_operands (void)
1178 {
1179 gcc_assert (VEC_length (tree, build_defs) == 0);
1180 gcc_assert (VEC_length (tree, build_uses) == 0);
1181 gcc_assert (VEC_length (tree, build_vuses) == 0);
1182 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1183 gcc_assert (bitmap_empty_p (build_loads));
1184 gcc_assert (bitmap_empty_p (build_stores));
1185 }
1186
1187
1188 /* Add DEF_P to the list of pointers to operands. */
1189
1190 static inline void
1191 append_def (tree *def_p)
1192 {
1193 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1194 }
1195
1196
1197 /* Add USE_P to the list of pointers to operands. */
1198
1199 static inline void
1200 append_use (tree *use_p)
1201 {
1202 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1203 }
1204
1205
1206 /* Add VAR to the set of variables that require a VDEF operator. */
1207
1208 static inline void
1209 append_vdef (tree var)
1210 {
1211 tree sym;
1212
1213 if (TREE_CODE (var) != SSA_NAME)
1214 {
1215 tree mpt;
1216 var_ann_t ann;
1217
1218 /* If VAR belongs to a memory partition, use it instead of VAR. */
1219 mpt = memory_partition (var);
1220 if (mpt)
1221 var = mpt;
1222
1223 /* Don't allow duplicate entries. */
1224 ann = get_var_ann (var);
1225 if (ann->in_vdef_list)
1226 return;
1227
1228 ann->in_vdef_list = true;
1229 sym = var;
1230 }
1231 else
1232 sym = SSA_NAME_VAR (var);
1233
1234 VEC_safe_push (tree, heap, build_vdefs, var);
1235 bitmap_set_bit (build_stores, DECL_UID (sym));
1236 }
1237
1238
1239 /* Add VAR to the set of variables that require a VUSE operator. */
1240
1241 static inline void
1242 append_vuse (tree var)
1243 {
1244 tree sym;
1245
1246 if (TREE_CODE (var) != SSA_NAME)
1247 {
1248 tree mpt;
1249 var_ann_t ann;
1250
1251 /* If VAR belongs to a memory partition, use it instead of VAR. */
1252 mpt = memory_partition (var);
1253 if (mpt)
1254 var = mpt;
1255
1256 /* Don't allow duplicate entries. */
1257 ann = get_var_ann (var);
1258 if (ann->in_vuse_list || ann->in_vdef_list)
1259 return;
1260
1261 ann->in_vuse_list = true;
1262 sym = var;
1263 }
1264 else
1265 sym = SSA_NAME_VAR (var);
1266
1267 VEC_safe_push (tree, heap, build_vuses, var);
1268 bitmap_set_bit (build_loads, DECL_UID (sym));
1269 }
1270
1271
1272 /* REF is a tree that contains the entire pointer dereference
1273 expression, if available, or NULL otherwise. ALIAS is the variable
1274 we are asking if REF can access. OFFSET and SIZE come from the
1275 memory access expression that generated this virtual operand. */
1276
1277 static bool
1278 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1279 HOST_WIDE_INT size)
1280 {
1281 bool offsetgtz = offset > 0;
1282 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1283 tree base = ref ? get_base_address (ref) : NULL;
1284
1285 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1286 using a call-clobbered memory tag. By definition, call-clobbered
1287 memory tags can always touch .GLOBAL_VAR. */
1288 if (alias == gimple_global_var (cfun))
1289 return true;
1290
1291 /* If ALIAS is an SFT, it can't be touched if the offset
1292 and size of the access is not overlapping with the SFT offset and
1293 size. This is only true if we are accessing through a pointer
1294 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1295 be accessing through a pointer to some substruct of the
1296 structure, and if we try to prune there, we will have the wrong
1297 offset, and get the wrong answer.
1298 i.e., we can't prune without more work if we have something like
1299
1300 struct gcc_target
1301 {
1302 struct asm_out
1303 {
1304 const char *byte_op;
1305 struct asm_int_op
1306 {
1307 const char *hi;
1308 } aligned_op;
1309 } asm_out;
1310 } targetm;
1311
1312 foo = &targetm.asm_out.aligned_op;
1313 return foo->hi;
1314
1315 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1316 terms of SFT_PARENT_VAR, that is where it is.
1317 However, the access through the foo pointer will be at offset 0. */
1318 if (size != -1
1319 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1320 && base
1321 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1322 && !overlap_subvar (offset, size, alias, NULL))
1323 {
1324 #ifdef ACCESS_DEBUGGING
1325 fprintf (stderr, "Access to ");
1326 print_generic_expr (stderr, ref, 0);
1327 fprintf (stderr, " may not touch ");
1328 print_generic_expr (stderr, alias, 0);
1329 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1330 #endif
1331 return false;
1332 }
1333
1334 /* Without strict aliasing, it is impossible for a component access
1335 through a pointer to touch a random variable, unless that
1336 variable *is* a structure or a pointer.
1337
1338 That is, given p->c, and some random global variable b,
1339 there is no legal way that p->c could be an access to b.
1340
1341 Without strict aliasing on, we consider it legal to do something
1342 like:
1343
1344 struct foos { int l; };
1345 int foo;
1346 static struct foos *getfoo(void);
1347 int main (void)
1348 {
1349 struct foos *f = getfoo();
1350 f->l = 1;
1351 foo = 2;
1352 if (f->l == 1)
1353 abort();
1354 exit(0);
1355 }
1356 static struct foos *getfoo(void)
1357 { return (struct foos *)&foo; }
1358
1359 (taken from 20000623-1.c)
1360
1361 The docs also say/imply that access through union pointers
1362 is legal (but *not* if you take the address of the union member,
1363 i.e. the inverse), such that you can do
1364
1365 typedef union {
1366 int d;
1367 } U;
1368
1369 int rv;
1370 void breakme()
1371 {
1372 U *rv0;
1373 U *pretmp = (U*)&rv;
1374 rv0 = pretmp;
1375 rv0->d = 42;
1376 }
1377 To implement this, we just punt on accesses through union
1378 pointers entirely.
1379 */
1380 else if (ref
1381 && flag_strict_aliasing
1382 && TREE_CODE (ref) != INDIRECT_REF
1383 && !MTAG_P (alias)
1384 && (TREE_CODE (base) != INDIRECT_REF
1385 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1386 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1387 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1388 && !var_ann (alias)->is_heapvar
1389 /* When the struct has may_alias attached to it, we need not to
1390 return true. */
1391 && get_alias_set (base))
1392 {
1393 #ifdef ACCESS_DEBUGGING
1394 fprintf (stderr, "Access to ");
1395 print_generic_expr (stderr, ref, 0);
1396 fprintf (stderr, " may not touch ");
1397 print_generic_expr (stderr, alias, 0);
1398 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1399 #endif
1400 return false;
1401 }
1402
1403 /* If the offset of the access is greater than the size of one of
1404 the possible aliases, it can't be touching that alias, because it
1405 would be past the end of the structure. */
1406 else if (ref
1407 && flag_strict_aliasing
1408 && TREE_CODE (ref) != INDIRECT_REF
1409 && !MTAG_P (alias)
1410 && !POINTER_TYPE_P (TREE_TYPE (alias))
1411 && offsetgtz
1412 && DECL_SIZE (alias)
1413 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1414 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1415 {
1416 #ifdef ACCESS_DEBUGGING
1417 fprintf (stderr, "Access to ");
1418 print_generic_expr (stderr, ref, 0);
1419 fprintf (stderr, " may not touch ");
1420 print_generic_expr (stderr, alias, 0);
1421 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1422 #endif
1423 return false;
1424 }
1425
1426 return true;
1427 }
1428
1429
1430 /* Add VAR to the virtual operands array. FLAGS is as in
1431 get_expr_operands. FULL_REF is a tree that contains the entire
1432 pointer dereference expression, if available, or NULL otherwise.
1433 OFFSET and SIZE come from the memory access expression that
1434 generated this virtual operand. FOR_CLOBBER is true is this is
1435 adding a virtual operand for a call clobber. */
1436
1437 static void
1438 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1439 tree full_ref, HOST_WIDE_INT offset,
1440 HOST_WIDE_INT size, bool for_clobber)
1441 {
1442 VEC(tree,gc) *aliases;
1443 tree sym;
1444 var_ann_t v_ann;
1445
1446 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1447 v_ann = var_ann (sym);
1448
1449 /* Mark the statement as having memory operands. */
1450 s_ann->references_memory = true;
1451
1452 /* Mark statements with volatile operands. Optimizers should back
1453 off from statements having volatile operands. */
1454 if (TREE_THIS_VOLATILE (sym) && s_ann)
1455 s_ann->has_volatile_ops = true;
1456
1457 /* If the variable cannot be modified and this is a VDEF change
1458 it into a VUSE. This happens when read-only variables are marked
1459 call-clobbered and/or aliased to writable variables. So we only
1460 check that this only happens on non-specific stores.
1461
1462 Note that if this is a specific store, i.e. associated with a
1463 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1464 into validation problems.
1465
1466 This can happen when programs cast away const, leaving us with a
1467 store to read-only memory. If the statement is actually executed
1468 at runtime, then the program is ill formed. If the statement is
1469 not executed then all is well. At the very least, we cannot ICE. */
1470 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1471 flags &= ~opf_def;
1472
1473 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1474 virtual operands, unless the caller has specifically requested
1475 not to add virtual operands (used when adding operands inside an
1476 ADDR_EXPR expression). */
1477 if (flags & opf_no_vops)
1478 return;
1479
1480 aliases = v_ann->may_aliases;
1481 if (aliases == NULL)
1482 {
1483 /* The variable is not aliased or it is an alias tag. */
1484 if (flags & opf_def)
1485 append_vdef (var);
1486 else
1487 append_vuse (var);
1488 }
1489 else
1490 {
1491 unsigned i;
1492 tree al;
1493
1494 /* The variable is aliased. Add its aliases to the virtual
1495 operands. */
1496 gcc_assert (VEC_length (tree, aliases) != 0);
1497
1498 if (flags & opf_def)
1499 {
1500 bool none_added = true;
1501
1502 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1503 {
1504 if (!access_can_touch_variable (full_ref, al, offset, size))
1505 continue;
1506
1507 none_added = false;
1508 append_vdef (al);
1509 }
1510
1511 /* If the variable is also an alias tag, add a virtual
1512 operand for it, otherwise we will miss representing
1513 references to the members of the variable's alias set.
1514 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1515
1516 It is also necessary to add bare defs on clobbers for
1517 SMT's, so that bare SMT uses caused by pruning all the
1518 aliases will link up properly with calls. In order to
1519 keep the number of these bare defs we add down to the
1520 minimum necessary, we keep track of which SMT's were used
1521 alone in statement vdefs or VUSEs. */
1522 if (v_ann->is_aliased
1523 || none_added
1524 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1525 && for_clobber))
1526 {
1527 append_vdef (var);
1528 }
1529 }
1530 else
1531 {
1532 bool none_added = true;
1533 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1534 {
1535 if (!access_can_touch_variable (full_ref, al, offset, size))
1536 continue;
1537 none_added = false;
1538 append_vuse (al);
1539 }
1540
1541 /* Similarly, append a virtual uses for VAR itself, when
1542 it is an alias tag. */
1543 if (v_ann->is_aliased || none_added)
1544 append_vuse (var);
1545 }
1546 }
1547 }
1548
1549
1550 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1551 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1552 the statement's real operands, otherwise it is added to virtual
1553 operands. */
1554
1555 static void
1556 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1557 {
1558 tree var, sym;
1559 var_ann_t v_ann;
1560
1561 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1562
1563 var = *var_p;
1564 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1565 v_ann = var_ann (sym);
1566
1567 /* Mark statements with volatile operands. */
1568 if (TREE_THIS_VOLATILE (sym))
1569 s_ann->has_volatile_ops = true;
1570
1571 if (is_gimple_reg (sym))
1572 {
1573 /* The variable is a GIMPLE register. Add it to real operands. */
1574 if (flags & opf_def)
1575 append_def (var_p);
1576 else
1577 append_use (var_p);
1578 }
1579 else
1580 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1581 }
1582
1583
1584 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1585 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1586
1587 STMT is the statement being processed, EXPR is the INDIRECT_REF
1588 that got us here.
1589
1590 FLAGS is as in get_expr_operands.
1591
1592 FULL_REF contains the full pointer dereference expression, if we
1593 have it, or NULL otherwise.
1594
1595 OFFSET and SIZE are the location of the access inside the
1596 dereferenced pointer, if known.
1597
1598 RECURSE_ON_BASE should be set to true if we want to continue
1599 calling get_expr_operands on the base pointer, and false if
1600 something else will do it for us. */
1601
1602 static void
1603 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1604 tree full_ref,
1605 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1606 bool recurse_on_base)
1607 {
1608 tree *pptr = &TREE_OPERAND (expr, 0);
1609 tree ptr = *pptr;
1610 stmt_ann_t s_ann = stmt_ann (stmt);
1611
1612 s_ann->references_memory = true;
1613
1614 if (SSA_VAR_P (ptr))
1615 {
1616 struct ptr_info_def *pi = NULL;
1617
1618 /* If PTR has flow-sensitive points-to information, use it. */
1619 if (TREE_CODE (ptr) == SSA_NAME
1620 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1621 && pi->name_mem_tag)
1622 {
1623 /* PTR has its own memory tag. Use it. */
1624 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1625 full_ref, offset, size, false);
1626 }
1627 else
1628 {
1629 /* If PTR is not an SSA_NAME or it doesn't have a name
1630 tag, use its symbol memory tag. */
1631 var_ann_t v_ann;
1632
1633 /* If we are emitting debugging dumps, display a warning if
1634 PTR is an SSA_NAME with no flow-sensitive alias
1635 information. That means that we may need to compute
1636 aliasing again. */
1637 if (dump_file
1638 && TREE_CODE (ptr) == SSA_NAME
1639 && pi == NULL)
1640 {
1641 fprintf (dump_file,
1642 "NOTE: no flow-sensitive alias info for ");
1643 print_generic_expr (dump_file, ptr, dump_flags);
1644 fprintf (dump_file, " in ");
1645 print_generic_stmt (dump_file, stmt, dump_flags);
1646 }
1647
1648 if (TREE_CODE (ptr) == SSA_NAME)
1649 ptr = SSA_NAME_VAR (ptr);
1650 v_ann = var_ann (ptr);
1651
1652 if (v_ann->symbol_mem_tag)
1653 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1654 full_ref, offset, size, false);
1655 }
1656 }
1657 else if (TREE_CODE (ptr) == INTEGER_CST)
1658 {
1659 /* If a constant is used as a pointer, we can't generate a real
1660 operand for it but we mark the statement volatile to prevent
1661 optimizations from messing things up. */
1662 if (s_ann)
1663 s_ann->has_volatile_ops = true;
1664 return;
1665 }
1666 else
1667 {
1668 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1669 gcc_unreachable ();
1670 }
1671
1672 /* If requested, add a USE operand for the base pointer. */
1673 if (recurse_on_base)
1674 get_expr_operands (stmt, pptr, opf_use);
1675 }
1676
1677
1678 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1679
1680 static void
1681 get_tmr_operands (tree stmt, tree expr, int flags)
1682 {
1683 tree tag, ref;
1684 HOST_WIDE_INT offset, size, maxsize;
1685 subvar_t svars, sv;
1686 stmt_ann_t s_ann = stmt_ann (stmt);
1687
1688 /* This statement references memory. */
1689 s_ann->references_memory = 1;
1690
1691 /* First record the real operands. */
1692 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1693 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1694
1695 if (TMR_SYMBOL (expr))
1696 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1697
1698 tag = TMR_TAG (expr);
1699 if (!tag)
1700 {
1701 /* Something weird, so ensure that we will be careful. */
1702 s_ann->has_volatile_ops = true;
1703 return;
1704 }
1705
1706 if (DECL_P (tag))
1707 {
1708 get_expr_operands (stmt, &tag, flags);
1709 return;
1710 }
1711
1712 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1713 gcc_assert (ref != NULL_TREE);
1714 svars = get_subvars_for_var (ref);
1715 for (sv = svars; sv; sv = sv->next)
1716 {
1717 bool exact;
1718
1719 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1720 add_stmt_operand (&sv->var, s_ann, flags);
1721 }
1722 }
1723
1724
1725 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1726 clobbered variables in the function. */
1727
1728 static void
1729 add_call_clobber_ops (tree stmt, tree callee)
1730 {
1731 unsigned u;
1732 bitmap_iterator bi;
1733 stmt_ann_t s_ann = stmt_ann (stmt);
1734 bitmap not_read_b, not_written_b;
1735
1736 /* Functions that are not const, pure or never return may clobber
1737 call-clobbered variables. */
1738 if (s_ann)
1739 s_ann->makes_clobbering_call = true;
1740
1741 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1742 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1743 if (gimple_global_var (cfun))
1744 {
1745 tree var = gimple_global_var (cfun);
1746 add_stmt_operand (&var, s_ann, opf_def);
1747 return;
1748 }
1749
1750 /* Get info for local and module level statics. There is a bit
1751 set for each static if the call being processed does not read
1752 or write that variable. */
1753 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1754 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1755
1756 /* Add a VDEF operand for every call clobbered variable. */
1757 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1758 {
1759 tree var = referenced_var_lookup (u);
1760 unsigned int escape_mask = var_ann (var)->escape_mask;
1761 tree real_var = var;
1762 bool not_read;
1763 bool not_written;
1764
1765 /* Not read and not written are computed on regular vars, not
1766 subvars, so look at the parent var if this is an SFT. */
1767 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1768 real_var = SFT_PARENT_VAR (var);
1769
1770 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1771 DECL_UID (real_var)) : false;
1772 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1773 DECL_UID (real_var)) : false;
1774 gcc_assert (!unmodifiable_var_p (var));
1775
1776 clobber_stats.clobbered_vars++;
1777
1778 /* See if this variable is really clobbered by this function. */
1779
1780 /* Trivial case: Things escaping only to pure/const are not
1781 clobbered by non-pure-const, and only read by pure/const. */
1782 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1783 {
1784 tree call = get_call_expr_in (stmt);
1785 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1786 {
1787 add_stmt_operand (&var, s_ann, opf_use);
1788 clobber_stats.unescapable_clobbers_avoided++;
1789 continue;
1790 }
1791 else
1792 {
1793 clobber_stats.unescapable_clobbers_avoided++;
1794 continue;
1795 }
1796 }
1797
1798 if (not_written)
1799 {
1800 clobber_stats.static_write_clobbers_avoided++;
1801 if (!not_read)
1802 add_stmt_operand (&var, s_ann, opf_use);
1803 else
1804 clobber_stats.static_read_clobbers_avoided++;
1805 }
1806 else
1807 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1808 }
1809 }
1810
1811
1812 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1813 function. */
1814
1815 static void
1816 add_call_read_ops (tree stmt, tree callee)
1817 {
1818 unsigned u;
1819 bitmap_iterator bi;
1820 stmt_ann_t s_ann = stmt_ann (stmt);
1821 bitmap not_read_b;
1822
1823 /* if the function is not pure, it may reference memory. Add
1824 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1825 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1826 if (gimple_global_var (cfun))
1827 {
1828 tree var = gimple_global_var (cfun);
1829 add_stmt_operand (&var, s_ann, opf_use);
1830 return;
1831 }
1832
1833 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1834
1835 /* Add a VUSE for each call-clobbered variable. */
1836 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1837 {
1838 tree var = referenced_var (u);
1839 tree real_var = var;
1840 bool not_read;
1841
1842 clobber_stats.readonly_clobbers++;
1843
1844 /* Not read and not written are computed on regular vars, not
1845 subvars, so look at the parent var if this is an SFT. */
1846
1847 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1848 real_var = SFT_PARENT_VAR (var);
1849
1850 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1851 : false;
1852
1853 if (not_read)
1854 {
1855 clobber_stats.static_readonly_clobbers_avoided++;
1856 continue;
1857 }
1858
1859 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
1860 }
1861 }
1862
1863
1864 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1865
1866 static void
1867 get_call_expr_operands (tree stmt, tree expr)
1868 {
1869 tree op;
1870 int call_flags = call_expr_flags (expr);
1871 stmt_ann_t ann = stmt_ann (stmt);
1872
1873 ann->references_memory = true;
1874
1875 /* If aliases have been computed already, add VDEF or VUSE
1876 operands for all the symbols that have been found to be
1877 call-clobbered. */
1878 if (gimple_aliases_computed_p (cfun)
1879 && !(call_flags & ECF_NOVOPS))
1880 {
1881 /* A 'pure' or a 'const' function never call-clobbers anything.
1882 A 'noreturn' function might, but since we don't return anyway
1883 there is no point in recording that. */
1884 if (TREE_SIDE_EFFECTS (expr)
1885 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1886 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1887 else if (!(call_flags & ECF_CONST))
1888 add_call_read_ops (stmt, get_callee_fndecl (expr));
1889 }
1890
1891 /* Find uses in the called function. */
1892 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1893
1894 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1895 get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
1896
1897 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1898 }
1899
1900
1901 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1902
1903 static void
1904 get_asm_expr_operands (tree stmt)
1905 {
1906 stmt_ann_t s_ann;
1907 int i, noutputs;
1908 const char **oconstraints;
1909 const char *constraint;
1910 bool allows_mem, allows_reg, is_inout;
1911 tree link;
1912
1913 s_ann = stmt_ann (stmt);
1914 noutputs = list_length (ASM_OUTPUTS (stmt));
1915 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1916
1917 /* Gather all output operands. */
1918 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1919 {
1920 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1921 oconstraints[i] = constraint;
1922 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1923 &allows_reg, &is_inout);
1924
1925 /* This should have been split in gimplify_asm_expr. */
1926 gcc_assert (!allows_reg || !is_inout);
1927
1928 /* Memory operands are addressable. Note that STMT needs the
1929 address of this operand. */
1930 if (!allows_reg && allows_mem)
1931 {
1932 tree t = get_base_address (TREE_VALUE (link));
1933 if (t && DECL_P (t) && s_ann)
1934 add_to_addressable_set (t, &s_ann->addresses_taken);
1935 }
1936
1937 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1938 }
1939
1940 /* Gather all input operands. */
1941 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1942 {
1943 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1944 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1945 &allows_mem, &allows_reg);
1946
1947 /* Memory operands are addressable. Note that STMT needs the
1948 address of this operand. */
1949 if (!allows_reg && allows_mem)
1950 {
1951 tree t = get_base_address (TREE_VALUE (link));
1952 if (t && DECL_P (t) && s_ann)
1953 add_to_addressable_set (t, &s_ann->addresses_taken);
1954 }
1955
1956 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1957 }
1958
1959 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1960 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1961 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1962 {
1963 unsigned i;
1964 bitmap_iterator bi;
1965
1966 s_ann->references_memory = true;
1967
1968 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1969 {
1970 tree var = referenced_var (i);
1971 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1972 }
1973
1974 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1975 {
1976 tree var = referenced_var (i);
1977
1978 /* Subvars are explicitly represented in this list, so we
1979 don't need the original to be added to the clobber ops,
1980 but the original *will* be in this list because we keep
1981 the addressability of the original variable up-to-date
1982 to avoid confusing the back-end. */
1983 if (var_can_have_subvars (var)
1984 && get_subvars_for_var (var) != NULL)
1985 continue;
1986
1987 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1988 }
1989 break;
1990 }
1991 }
1992
1993
1994 /* Scan operands for the assignment expression EXPR in statement STMT. */
1995
1996 static void
1997 get_modify_stmt_operands (tree stmt, tree expr)
1998 {
1999 /* First get operands from the RHS. */
2000 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
2001
2002 /* For the LHS, use a regular definition (opf_def) for GIMPLE
2003 registers. If the LHS is a store to memory, we will need
2004 a preserving definition (VDEF).
2005
2006 Preserving definitions are those that modify a part of an
2007 aggregate object for which no subvars have been computed (or the
2008 reference does not correspond exactly to one of them). Stores
2009 through a pointer are also represented with VDEF operators.
2010
2011 We used to distinguish between preserving and killing definitions.
2012 We always emit preserving definitions now. */
2013 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2014 }
2015
2016
2017 /* Recursively scan the expression pointed to by EXPR_P in statement
2018 STMT. FLAGS is one of the OPF_* constants modifying how to
2019 interpret the operands found. */
2020
2021 static void
2022 get_expr_operands (tree stmt, tree *expr_p, int flags)
2023 {
2024 enum tree_code code;
2025 enum tree_code_class class;
2026 tree expr = *expr_p;
2027 stmt_ann_t s_ann = stmt_ann (stmt);
2028
2029 if (expr == NULL)
2030 return;
2031
2032 code = TREE_CODE (expr);
2033 class = TREE_CODE_CLASS (code);
2034
2035 switch (code)
2036 {
2037 case ADDR_EXPR:
2038 /* Taking the address of a variable does not represent a
2039 reference to it, but the fact that the statement takes its
2040 address will be of interest to some passes (e.g. alias
2041 resolution). */
2042 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2043
2044 /* If the address is invariant, there may be no interesting
2045 variable references inside. */
2046 if (is_gimple_min_invariant (expr))
2047 return;
2048
2049 /* Otherwise, there may be variables referenced inside but there
2050 should be no VUSEs created, since the referenced objects are
2051 not really accessed. The only operands that we should find
2052 here are ARRAY_REF indices which will always be real operands
2053 (GIMPLE does not allow non-registers as array indices). */
2054 flags |= opf_no_vops;
2055 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2056 return;
2057
2058 case SSA_NAME:
2059 case STRUCT_FIELD_TAG:
2060 case SYMBOL_MEMORY_TAG:
2061 case NAME_MEMORY_TAG:
2062 add_stmt_operand (expr_p, s_ann, flags);
2063 return;
2064
2065 case VAR_DECL:
2066 case PARM_DECL:
2067 case RESULT_DECL:
2068 {
2069 subvar_t svars;
2070
2071 /* Add the subvars for a variable, if it has subvars, to DEFS
2072 or USES. Otherwise, add the variable itself. Whether it
2073 goes to USES or DEFS depends on the operand flags. */
2074 if (var_can_have_subvars (expr)
2075 && (svars = get_subvars_for_var (expr)))
2076 {
2077 subvar_t sv;
2078 for (sv = svars; sv; sv = sv->next)
2079 add_stmt_operand (&sv->var, s_ann, flags);
2080 }
2081 else
2082 add_stmt_operand (expr_p, s_ann, flags);
2083
2084 return;
2085 }
2086
2087 case MISALIGNED_INDIRECT_REF:
2088 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2089 /* fall through */
2090
2091 case ALIGN_INDIRECT_REF:
2092 case INDIRECT_REF:
2093 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2094 return;
2095
2096 case TARGET_MEM_REF:
2097 get_tmr_operands (stmt, expr, flags);
2098 return;
2099
2100 case ARRAY_REF:
2101 case ARRAY_RANGE_REF:
2102 case COMPONENT_REF:
2103 case REALPART_EXPR:
2104 case IMAGPART_EXPR:
2105 {
2106 tree ref;
2107 HOST_WIDE_INT offset, size, maxsize;
2108 bool none = true;
2109
2110 /* This component reference becomes an access to all of the
2111 subvariables it can touch, if we can determine that, but
2112 *NOT* the real one. If we can't determine which fields we
2113 could touch, the recursion will eventually get to a
2114 variable and add *all* of its subvars, or whatever is the
2115 minimum correct subset. */
2116 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2117 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2118 {
2119 subvar_t sv;
2120 subvar_t svars = get_subvars_for_var (ref);
2121
2122 for (sv = svars; sv; sv = sv->next)
2123 {
2124 bool exact;
2125
2126 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2127 {
2128 int subvar_flags = flags;
2129 none = false;
2130 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2131 }
2132 }
2133
2134 if (!none)
2135 flags |= opf_no_vops;
2136 }
2137 else if (TREE_CODE (ref) == INDIRECT_REF)
2138 {
2139 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2140 maxsize, false);
2141 flags |= opf_no_vops;
2142 }
2143
2144 /* Even if we found subvars above we need to ensure to see
2145 immediate uses for d in s.a[d]. In case of s.a having
2146 a subvar or we would miss it otherwise. */
2147 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2148
2149 if (code == COMPONENT_REF)
2150 {
2151 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2152 s_ann->has_volatile_ops = true;
2153 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2154 }
2155 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2156 {
2157 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2158 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2159 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2160 }
2161
2162 return;
2163 }
2164
2165 case WITH_SIZE_EXPR:
2166 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2167 and an rvalue reference to its second argument. */
2168 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2170 return;
2171
2172 case CALL_EXPR:
2173 get_call_expr_operands (stmt, expr);
2174 return;
2175
2176 case COND_EXPR:
2177 case VEC_COND_EXPR:
2178 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2179 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2180 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2181 return;
2182
2183 case GIMPLE_MODIFY_STMT:
2184 get_modify_stmt_operands (stmt, expr);
2185 return;
2186
2187 case CONSTRUCTOR:
2188 {
2189 /* General aggregate CONSTRUCTORs have been decomposed, but they
2190 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2191 constructor_elt *ce;
2192 unsigned HOST_WIDE_INT idx;
2193
2194 for (idx = 0;
2195 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2196 idx++)
2197 get_expr_operands (stmt, &ce->value, opf_use);
2198
2199 return;
2200 }
2201
2202 case BIT_FIELD_REF:
2203 case TRUTH_NOT_EXPR:
2204 case VIEW_CONVERT_EXPR:
2205 do_unary:
2206 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2207 return;
2208
2209 case TRUTH_AND_EXPR:
2210 case TRUTH_OR_EXPR:
2211 case TRUTH_XOR_EXPR:
2212 case COMPOUND_EXPR:
2213 case OBJ_TYPE_REF:
2214 case ASSERT_EXPR:
2215 do_binary:
2216 {
2217 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2218 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2219 return;
2220 }
2221
2222 case DOT_PROD_EXPR:
2223 case REALIGN_LOAD_EXPR:
2224 {
2225 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2226 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2227 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2228 return;
2229 }
2230
2231 case BLOCK:
2232 case FUNCTION_DECL:
2233 case EXC_PTR_EXPR:
2234 case FILTER_EXPR:
2235 case LABEL_DECL:
2236 case CONST_DECL:
2237 case OMP_PARALLEL:
2238 case OMP_SECTIONS:
2239 case OMP_FOR:
2240 case OMP_SINGLE:
2241 case OMP_MASTER:
2242 case OMP_ORDERED:
2243 case OMP_CRITICAL:
2244 case OMP_RETURN:
2245 case OMP_CONTINUE:
2246 /* Expressions that make no memory references. */
2247 return;
2248
2249 default:
2250 if (class == tcc_unary)
2251 goto do_unary;
2252 if (class == tcc_binary || class == tcc_comparison)
2253 goto do_binary;
2254 if (class == tcc_constant || class == tcc_type)
2255 return;
2256 }
2257
2258 /* If we get here, something has gone wrong. */
2259 #ifdef ENABLE_CHECKING
2260 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2261 debug_tree (expr);
2262 fputs ("\n", stderr);
2263 #endif
2264 gcc_unreachable ();
2265 }
2266
2267
2268 /* Parse STMT looking for operands. When finished, the various
2269 build_* operand vectors will have potential operands in them. */
2270
2271 static void
2272 parse_ssa_operands (tree stmt)
2273 {
2274 enum tree_code code;
2275
2276 code = TREE_CODE (stmt);
2277 switch (code)
2278 {
2279 case GIMPLE_MODIFY_STMT:
2280 get_modify_stmt_operands (stmt, stmt);
2281 break;
2282
2283 case COND_EXPR:
2284 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2285 break;
2286
2287 case SWITCH_EXPR:
2288 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2289 break;
2290
2291 case ASM_EXPR:
2292 get_asm_expr_operands (stmt);
2293 break;
2294
2295 case RETURN_EXPR:
2296 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2297 break;
2298
2299 case GOTO_EXPR:
2300 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2301 break;
2302
2303 case LABEL_EXPR:
2304 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2305 break;
2306
2307 case BIND_EXPR:
2308 case CASE_LABEL_EXPR:
2309 case TRY_CATCH_EXPR:
2310 case TRY_FINALLY_EXPR:
2311 case EH_FILTER_EXPR:
2312 case CATCH_EXPR:
2313 case RESX_EXPR:
2314 /* These nodes contain no variable references. */
2315 break;
2316
2317 default:
2318 /* Notice that if get_expr_operands tries to use &STMT as the
2319 operand pointer (which may only happen for USE operands), we
2320 will fail in add_stmt_operand. This default will handle
2321 statements like empty statements, or CALL_EXPRs that may
2322 appear on the RHS of a statement or as statements themselves. */
2323 get_expr_operands (stmt, &stmt, opf_use);
2324 break;
2325 }
2326 }
2327
2328
2329 /* Create an operands cache for STMT. */
2330
2331 static void
2332 build_ssa_operands (tree stmt)
2333 {
2334 stmt_ann_t ann = get_stmt_ann (stmt);
2335
2336 /* Initially assume that the statement has no volatile operands and
2337 makes no memory references. */
2338 ann->has_volatile_ops = false;
2339 ann->references_memory = false;
2340
2341 start_ssa_stmt_operands ();
2342 parse_ssa_operands (stmt);
2343 operand_build_sort_virtual (build_vuses);
2344 operand_build_sort_virtual (build_vdefs);
2345 finalize_ssa_stmt_operands (stmt);
2346
2347 /* For added safety, assume that statements with volatile operands
2348 also reference memory. */
2349 if (ann->has_volatile_ops)
2350 ann->references_memory = true;
2351 }
2352
2353
2354 /* Free any operands vectors in OPS. */
2355
2356 void
2357 free_ssa_operands (stmt_operands_p ops)
2358 {
2359 ops->def_ops = NULL;
2360 ops->use_ops = NULL;
2361 ops->vdef_ops = NULL;
2362 ops->vuse_ops = NULL;
2363 BITMAP_FREE (ops->loads);
2364 BITMAP_FREE (ops->stores);
2365 }
2366
2367
2368 /* Get the operands of statement STMT. */
2369
2370 void
2371 update_stmt_operands (tree stmt)
2372 {
2373 stmt_ann_t ann = get_stmt_ann (stmt);
2374
2375 /* If update_stmt_operands is called before SSA is initialized, do
2376 nothing. */
2377 if (!ssa_operands_active ())
2378 return;
2379
2380 /* The optimizers cannot handle statements that are nothing but a
2381 _DECL. This indicates a bug in the gimplifier. */
2382 gcc_assert (!SSA_VAR_P (stmt));
2383
2384 timevar_push (TV_TREE_OPS);
2385
2386 gcc_assert (ann->modified);
2387 build_ssa_operands (stmt);
2388 ann->modified = 0;
2389
2390 timevar_pop (TV_TREE_OPS);
2391 }
2392
2393
2394 /* Copies virtual operands from SRC to DST. */
2395
2396 void
2397 copy_virtual_operands (tree dest, tree src)
2398 {
2399 int i, n;
2400 voptype_p src_vuses, dest_vuses;
2401 voptype_p src_vdefs, dest_vdefs;
2402 struct voptype_d vuse;
2403 struct voptype_d vdef;
2404 stmt_ann_t dest_ann;
2405
2406 VDEF_OPS (dest) = NULL;
2407 VUSE_OPS (dest) = NULL;
2408
2409 dest_ann = get_stmt_ann (dest);
2410 BITMAP_FREE (dest_ann->operands.loads);
2411 BITMAP_FREE (dest_ann->operands.stores);
2412
2413 if (LOADED_SYMS (src))
2414 {
2415 dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
2416 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2417 }
2418
2419 if (STORED_SYMS (src))
2420 {
2421 dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
2422 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2423 }
2424
2425 /* Copy all the VUSE operators and corresponding operands. */
2426 dest_vuses = &vuse;
2427 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2428 {
2429 n = VUSE_NUM (src_vuses);
2430 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
2431 for (i = 0; i < n; i++)
2432 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2433
2434 if (VUSE_OPS (dest) == NULL)
2435 VUSE_OPS (dest) = vuse.next;
2436 }
2437
2438 /* Copy all the VDEF operators and corresponding operands. */
2439 dest_vdefs = &vdef;
2440 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2441 {
2442 n = VUSE_NUM (src_vdefs);
2443 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
2444 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2445 for (i = 0; i < n; i++)
2446 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2447
2448 if (VDEF_OPS (dest) == NULL)
2449 VDEF_OPS (dest) = vdef.next;
2450 }
2451 }
2452
2453
2454 /* Specifically for use in DOM's expression analysis. Given a store, we
2455 create an artificial stmt which looks like a load from the store, this can
2456 be used to eliminate redundant loads. OLD_OPS are the operands from the
2457 store stmt, and NEW_STMT is the new load which represents a load of the
2458 values stored. */
2459
2460 void
2461 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2462 {
2463 tree op;
2464 ssa_op_iter iter;
2465 use_operand_p use_p;
2466 unsigned i;
2467
2468 get_stmt_ann (new_stmt);
2469
2470 /* Process NEW_STMT looking for operands. */
2471 start_ssa_stmt_operands ();
2472 parse_ssa_operands (new_stmt);
2473
2474 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2475 if (TREE_CODE (op) != SSA_NAME)
2476 var_ann (op)->in_vuse_list = false;
2477
2478 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2479 if (TREE_CODE (op) != SSA_NAME)
2480 var_ann (op)->in_vdef_list = false;
2481
2482 /* Remove any virtual operands that were found. */
2483 VEC_truncate (tree, build_vdefs, 0);
2484 VEC_truncate (tree, build_vuses, 0);
2485
2486 /* For each VDEF on the original statement, we want to create a
2487 VUSE of the VDEF result operand on the new statement. */
2488 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2489 append_vuse (op);
2490
2491 finalize_ssa_stmt_operands (new_stmt);
2492
2493 /* All uses in this fake stmt must not be in the immediate use lists. */
2494 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2495 delink_imm_use (use_p);
2496 }
2497
2498
2499 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2500 to test the validity of the swap operation. */
2501
2502 void
2503 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2504 {
2505 tree op0, op1;
2506 op0 = *exp0;
2507 op1 = *exp1;
2508
2509 /* If the operand cache is active, attempt to preserve the relative
2510 positions of these two operands in their respective immediate use
2511 lists. */
2512 if (ssa_operands_active () && op0 != op1)
2513 {
2514 use_optype_p use0, use1, ptr;
2515 use0 = use1 = NULL;
2516
2517 /* Find the 2 operands in the cache, if they are there. */
2518 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2519 if (USE_OP_PTR (ptr)->use == exp0)
2520 {
2521 use0 = ptr;
2522 break;
2523 }
2524
2525 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2526 if (USE_OP_PTR (ptr)->use == exp1)
2527 {
2528 use1 = ptr;
2529 break;
2530 }
2531
2532 /* If both uses don't have operand entries, there isn't much we can do
2533 at this point. Presumably we don't need to worry about it. */
2534 if (use0 && use1)
2535 {
2536 tree *tmp = USE_OP_PTR (use1)->use;
2537 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2538 USE_OP_PTR (use0)->use = tmp;
2539 }
2540 }
2541
2542 /* Now swap the data. */
2543 *exp0 = op1;
2544 *exp1 = op0;
2545 }
2546
2547
2548 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2549 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2550 a single variable whose address has been taken or any other valid
2551 GIMPLE memory reference (structure reference, array, etc). If the
2552 base address of REF is a decl that has sub-variables, also add all
2553 of its sub-variables. */
2554
2555 void
2556 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2557 {
2558 tree var;
2559 subvar_t svars;
2560
2561 gcc_assert (addresses_taken);
2562
2563 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2564 as the only thing we take the address of. If VAR is a structure,
2565 taking the address of a field means that the whole structure may
2566 be referenced using pointer arithmetic. See PR 21407 and the
2567 ensuing mailing list discussion. */
2568 var = get_base_address (ref);
2569 if (var && SSA_VAR_P (var))
2570 {
2571 if (*addresses_taken == NULL)
2572 *addresses_taken = BITMAP_GGC_ALLOC ();
2573
2574 if (var_can_have_subvars (var)
2575 && (svars = get_subvars_for_var (var)))
2576 {
2577 subvar_t sv;
2578 for (sv = svars; sv; sv = sv->next)
2579 {
2580 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2581 TREE_ADDRESSABLE (sv->var) = 1;
2582 }
2583 }
2584 else
2585 {
2586 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2587 TREE_ADDRESSABLE (var) = 1;
2588 }
2589 }
2590 }
2591
2592
2593 /* Scan the immediate_use list for VAR making sure its linked properly.
2594 Return TRUE if there is a problem and emit an error message to F. */
2595
2596 bool
2597 verify_imm_links (FILE *f, tree var)
2598 {
2599 use_operand_p ptr, prev, list;
2600 int count;
2601
2602 gcc_assert (TREE_CODE (var) == SSA_NAME);
2603
2604 list = &(SSA_NAME_IMM_USE_NODE (var));
2605 gcc_assert (list->use == NULL);
2606
2607 if (list->prev == NULL)
2608 {
2609 gcc_assert (list->next == NULL);
2610 return false;
2611 }
2612
2613 prev = list;
2614 count = 0;
2615 for (ptr = list->next; ptr != list; )
2616 {
2617 if (prev != ptr->prev)
2618 goto error;
2619
2620 if (ptr->use == NULL)
2621 goto error; /* 2 roots, or SAFE guard node. */
2622 else if (*(ptr->use) != var)
2623 goto error;
2624
2625 prev = ptr;
2626 ptr = ptr->next;
2627
2628 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2629 problem. */
2630 if (count++ > 50000000)
2631 goto error;
2632 }
2633
2634 /* Verify list in the other direction. */
2635 prev = list;
2636 for (ptr = list->prev; ptr != list; )
2637 {
2638 if (prev != ptr->next)
2639 goto error;
2640 prev = ptr;
2641 ptr = ptr->prev;
2642 if (count-- < 0)
2643 goto error;
2644 }
2645
2646 if (count != 0)
2647 goto error;
2648
2649 return false;
2650
2651 error:
2652 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2653 {
2654 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2655 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2656 }
2657 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2658 (void *)ptr->use);
2659 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2660 fprintf(f, "\n");
2661 return true;
2662 }
2663
2664
2665 /* Dump all the immediate uses to FILE. */
2666
2667 void
2668 dump_immediate_uses_for (FILE *file, tree var)
2669 {
2670 imm_use_iterator iter;
2671 use_operand_p use_p;
2672
2673 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2674
2675 print_generic_expr (file, var, TDF_SLIM);
2676 fprintf (file, " : -->");
2677 if (has_zero_uses (var))
2678 fprintf (file, " no uses.\n");
2679 else
2680 if (has_single_use (var))
2681 fprintf (file, " single use.\n");
2682 else
2683 fprintf (file, "%d uses.\n", num_imm_uses (var));
2684
2685 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2686 {
2687 if (use_p->stmt == NULL && use_p->use == NULL)
2688 fprintf (file, "***end of stmt iterator marker***\n");
2689 else
2690 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2691 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2692 else
2693 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2694 }
2695 fprintf(file, "\n");
2696 }
2697
2698
2699 /* Dump all the immediate uses to FILE. */
2700
2701 void
2702 dump_immediate_uses (FILE *file)
2703 {
2704 tree var;
2705 unsigned int x;
2706
2707 fprintf (file, "Immediate_uses: \n\n");
2708 for (x = 1; x < num_ssa_names; x++)
2709 {
2710 var = ssa_name(x);
2711 if (!var)
2712 continue;
2713 dump_immediate_uses_for (file, var);
2714 }
2715 }
2716
2717
2718 /* Dump def-use edges on stderr. */
2719
2720 void
2721 debug_immediate_uses (void)
2722 {
2723 dump_immediate_uses (stderr);
2724 }
2725
2726
2727 /* Dump def-use edges on stderr. */
2728
2729 void
2730 debug_immediate_uses_for (tree var)
2731 {
2732 dump_immediate_uses_for (stderr, var);
2733 }
2734
2735
2736 /* Create a new change buffer for the statement pointed by STMT_P and
2737 push the buffer into SCB_STACK. Each change buffer
2738 records state information needed to determine what changed in the
2739 statement. Mainly, this keeps track of symbols that may need to be
2740 put into SSA form, SSA name replacements and other information
2741 needed to keep the SSA form up to date. */
2742
2743 void
2744 push_stmt_changes (tree *stmt_p)
2745 {
2746 tree stmt;
2747 scb_t buf;
2748
2749 stmt = *stmt_p;
2750
2751 /* It makes no sense to keep track of PHI nodes. */
2752 if (TREE_CODE (stmt) == PHI_NODE)
2753 return;
2754
2755 buf = xmalloc (sizeof *buf);
2756 memset (buf, 0, sizeof *buf);
2757
2758 buf->stmt_p = stmt_p;
2759
2760 if (stmt_references_memory_p (stmt))
2761 {
2762 tree op;
2763 ssa_op_iter i;
2764
2765 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2766 {
2767 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2768 if (buf->loads == NULL)
2769 buf->loads = BITMAP_ALLOC (NULL);
2770 bitmap_set_bit (buf->loads, DECL_UID (sym));
2771 }
2772
2773 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2774 {
2775 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2776 if (buf->stores == NULL)
2777 buf->stores = BITMAP_ALLOC (NULL);
2778 bitmap_set_bit (buf->stores, DECL_UID (sym));
2779 }
2780 }
2781
2782 VEC_safe_push (scb_t, heap, scb_stack, buf);
2783 }
2784
2785
2786 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2787 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2788
2789 static void
2790 mark_difference_for_renaming (bitmap s1, bitmap s2)
2791 {
2792 if (s1 == NULL && s2 == NULL)
2793 return;
2794
2795 if (s1 && s2 == NULL)
2796 mark_set_for_renaming (s1);
2797 else if (s1 == NULL && s2)
2798 mark_set_for_renaming (s2);
2799 else if (!bitmap_equal_p (s1, s2))
2800 {
2801 bitmap t1 = BITMAP_ALLOC (NULL);
2802 bitmap t2 = BITMAP_ALLOC (NULL);
2803
2804 bitmap_and_compl (t1, s1, s2);
2805 bitmap_and_compl (t2, s2, s1);
2806 bitmap_ior_into (t1, t2);
2807 mark_set_for_renaming (t1);
2808
2809 BITMAP_FREE (t1);
2810 BITMAP_FREE (t2);
2811 }
2812 }
2813
2814
2815 /* Pop the top SCB from SCB_STACK and act on the differences between
2816 what was recorded by push_stmt_changes and the current state of
2817 the statement. */
2818
2819 void
2820 pop_stmt_changes (tree *stmt_p)
2821 {
2822 tree op, stmt;
2823 ssa_op_iter iter;
2824 bitmap loads, stores;
2825 scb_t buf;
2826
2827 stmt = *stmt_p;
2828
2829 /* It makes no sense to keep track of PHI nodes. */
2830 if (TREE_CODE (stmt) == PHI_NODE)
2831 return;
2832
2833 buf = VEC_pop (scb_t, scb_stack);
2834 gcc_assert (stmt_p == buf->stmt_p);
2835
2836 /* Force an operand re-scan on the statement and mark any newly
2837 exposed variables. */
2838 update_stmt (stmt);
2839
2840 /* Determine whether any memory symbols need to be renamed. If the
2841 sets of loads and stores are different after the statement is
2842 modified, then the affected symbols need to be renamed.
2843
2844 Note that it may be possible for the statement to not reference
2845 memory anymore, but we still need to act on the differences in
2846 the sets of symbols. */
2847 loads = stores = NULL;
2848 if (stmt_references_memory_p (stmt))
2849 {
2850 tree op;
2851 ssa_op_iter i;
2852
2853 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2854 {
2855 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2856 if (loads == NULL)
2857 loads = BITMAP_ALLOC (NULL);
2858 bitmap_set_bit (loads, DECL_UID (sym));
2859 }
2860
2861 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2862 {
2863 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2864 if (stores == NULL)
2865 stores = BITMAP_ALLOC (NULL);
2866 bitmap_set_bit (stores, DECL_UID (sym));
2867 }
2868 }
2869
2870 /* If LOADS is different from BUF->LOADS, the affected
2871 symbols need to be marked for renaming. */
2872 mark_difference_for_renaming (loads, buf->loads);
2873
2874 /* Similarly for STORES and BUF->STORES. */
2875 mark_difference_for_renaming (stores, buf->stores);
2876
2877 /* Mark all the naked GIMPLE register operands for renaming. */
2878 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2879 if (DECL_P (op))
2880 mark_sym_for_renaming (op);
2881
2882 /* FIXME, need to add more finalizers here. Cleanup EH info,
2883 recompute invariants for address expressions, add
2884 SSA replacement mappings, etc. For instance, given
2885 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2886 the form:
2887
2888 # SMT.4_20 = VDEF <SMT.4_16>
2889 D.1576_11 = 1.0e+0;
2890
2891 So, the VDEF will disappear, but instead of marking SMT.4 for
2892 renaming it would be far more efficient to establish a
2893 replacement mapping that would replace every reference of
2894 SMT.4_20 with SMT.4_16. */
2895
2896 /* Free memory used by the buffer. */
2897 BITMAP_FREE (buf->loads);
2898 BITMAP_FREE (buf->stores);
2899 BITMAP_FREE (loads);
2900 BITMAP_FREE (stores);
2901 buf->stmt_p = NULL;
2902 free (buf);
2903 }
2904
2905
2906 /* Discard the topmost change buffer from SCB_STACK. This is useful
2907 when the caller realized that it did not actually modified the
2908 statement. It avoids the expensive operand re-scan. */
2909
2910 void
2911 discard_stmt_changes (tree *stmt_p)
2912 {
2913 scb_t buf;
2914 tree stmt;
2915
2916 /* It makes no sense to keep track of PHI nodes. */
2917 stmt = *stmt_p;
2918 if (TREE_CODE (stmt) == PHI_NODE)
2919 return;
2920
2921 buf = VEC_pop (scb_t, scb_stack);
2922 gcc_assert (stmt_p == buf->stmt_p);
2923
2924 /* Free memory used by the buffer. */
2925 BITMAP_FREE (buf->loads);
2926 BITMAP_FREE (buf->stores);
2927 buf->stmt_p = NULL;
2928 free (buf);
2929 }
2930
2931
2932 /* Returns true if statement STMT may access memory. */
2933
2934 bool
2935 stmt_references_memory_p (tree stmt)
2936 {
2937 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2938 return false;
2939
2940 return stmt_ann (stmt)->references_memory;
2941 }
2942
2943
2944 /* Return the memory partition tag (MPT) associated with memory
2945 symbol SYM. From a correctness standpoint, memory partitions can
2946 be assigned in any arbitrary fashion as long as this rule is
2947 observed: Given two memory partitions MPT.i and MPT.j, they must
2948 not contain symbols in common.
2949
2950 Memory partitions are used when putting the program into Memory-SSA
2951 form. In particular, in Memory-SSA PHI nodes are not computed for
2952 individual memory symbols. They are computed for memory
2953 partitions. This reduces the amount of PHI nodes in the SSA graph
2954 at the expense of precision (i.e., it makes unrelated stores affect
2955 each other).
2956
2957 However, it is possible to increase precision by changing this
2958 partitioning scheme. For instance, if the partitioning scheme is
2959 such that get_mpt_for is the identity function (that is,
2960 get_mpt_for (s) = s), this will result in ultimate precision at the
2961 expense of huge SSA webs.
2962
2963 At the other extreme, a partitioning scheme that groups all the
2964 symbols in the same set results in minimal SSA webs and almost
2965 total loss of precision. */
2966
2967 tree
2968 get_mpt_for (tree sym)
2969 {
2970 tree mpt;
2971
2972 /* Don't create a new tag unnecessarily. */
2973 mpt = memory_partition (sym);
2974 if (mpt == NULL_TREE)
2975 {
2976 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2977 TREE_ADDRESSABLE (mpt) = 0;
2978 MTAG_GLOBAL (mpt) = 1;
2979 add_referenced_var (mpt);
2980 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
2981 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&operands_bitmap_obstack);
2982 set_memory_partition (sym, mpt);
2983 }
2984
2985 return mpt;
2986 }
2987
2988
2989 /* Dump memory partition information to FILE. */
2990
2991 void
2992 dump_memory_partitions (FILE *file)
2993 {
2994 unsigned i, npart;
2995 unsigned long nsyms;
2996 tree mpt;
2997
2998 fprintf (file, "\nMemory partitions\n\n");
2999 for (i = 0, npart = 0, nsyms = 0;
3000 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
3001 i++)
3002 {
3003 if (mpt)
3004 {
3005 bitmap syms = MPT_SYMBOLS (mpt);
3006 unsigned long n = bitmap_count_bits (syms);
3007
3008 fprintf (file, "#%u: ", i);
3009 print_generic_expr (file, mpt, 0);
3010 fprintf (file, ": %lu elements: ", n);
3011 dump_decl_set (file, syms);
3012 npart++;
3013 nsyms += n;
3014 }
3015 }
3016
3017 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3018 }
3019
3020
3021 /* Dump memory partition information to stderr. */
3022
3023 void
3024 debug_memory_partitions (void)
3025 {
3026 dump_memory_partitions (stderr);
3027 }