cgraphunit.c, [...]: Fix comment typos.
[gcc.git] / gcc / tree-ssa-operands.c
1 /* SSA operands management for trees.
2 Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "diagnostic.h"
29 #include "tree-flow.h"
30 #include "tree-inline.h"
31 #include "tree-pass.h"
32 #include "ggc.h"
33 #include "timevar.h"
34 #include "toplev.h"
35 #include "langhooks.h"
36 #include "ipa-reference.h"
37
38 /* This file contains the code required to manage the operands cache of the
39 SSA optimizer. For every stmt, we maintain an operand cache in the stmt
40 annotation. This cache contains operands that will be of interest to
41 optimizers and other passes wishing to manipulate the IL.
42
43 The operand type are broken up into REAL and VIRTUAL operands. The real
44 operands are represented as pointers into the stmt's operand tree. Thus
45 any manipulation of the real operands will be reflected in the actual tree.
46 Virtual operands are represented solely in the cache, although the base
47 variable for the SSA_NAME may, or may not occur in the stmt's tree.
48 Manipulation of the virtual operands will not be reflected in the stmt tree.
49
50 The routines in this file are concerned with creating this operand cache
51 from a stmt tree.
52
53 The operand tree is the parsed by the various get_* routines which look
54 through the stmt tree for the occurrence of operands which may be of
55 interest, and calls are made to the append_* routines whenever one is
56 found. There are 4 of these routines, each representing one of the
57 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
58
59 The append_* routines check for duplication, and simply keep a list of
60 unique objects for each operand type in the build_* extendable vectors.
61
62 Once the stmt tree is completely parsed, the finalize_ssa_operands()
63 routine is called, which proceeds to perform the finalization routine
64 on each of the 4 operand vectors which have been built up.
65
66 If the stmt had a previous operand cache, the finalization routines
67 attempt to match up the new operands with the old ones. If it's a perfect
68 match, the old vector is simply reused. If it isn't a perfect match, then
69 a new vector is created and the new operands are placed there. For
70 virtual operands, if the previous cache had SSA_NAME version of a
71 variable, and that same variable occurs in the same operands cache, then
72 the new cache vector will also get the same SSA_NAME.
73
74 i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
75 vector for VUSE, then the new vector will also be modified such that
76 it contains 'a_5' rather than 'a'. */
77
78
79 /* Structure storing statistics on how many call clobbers we have, and
80 how many where avoided. */
81
82 static struct
83 {
84 /* Number of call-clobbered ops we attempt to add to calls in
85 add_call_clobbered_mem_symbols. */
86 unsigned int clobbered_vars;
87
88 /* Number of write-clobbers (VDEFs) avoided by using
89 not_written information. */
90 unsigned int static_write_clobbers_avoided;
91
92 /* Number of reads (VUSEs) avoided by using not_read information. */
93 unsigned int static_read_clobbers_avoided;
94
95 /* Number of write-clobbers avoided because the variable can't escape to
96 this call. */
97 unsigned int unescapable_clobbers_avoided;
98
99 /* Number of read-only uses we attempt to add to calls in
100 add_call_read_mem_symbols. */
101 unsigned int readonly_clobbers;
102
103 /* Number of read-only uses we avoid using not_read information. */
104 unsigned int static_readonly_clobbers_avoided;
105 } clobber_stats;
106
107
108 /* Flags to describe operand properties in helpers. */
109
110 /* By default, operands are loaded. */
111 #define opf_use 0
112
113 /* Operand is the target of an assignment expression or a
114 call-clobbered variable. */
115 #define opf_def (1 << 0)
116
117 /* No virtual operands should be created in the expression. This is used
118 when traversing ADDR_EXPR nodes which have different semantics than
119 other expressions. Inside an ADDR_EXPR node, the only operands that we
120 need to consider are indices into arrays. For instance, &a.b[i] should
121 generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
122 VUSE for 'b'. */
123 #define opf_no_vops (1 << 1)
124
125 /* Operand is an implicit reference. This is used to distinguish
126 explicit assignments in the form of GIMPLE_MODIFY_STMT from
127 clobbering sites like function calls or ASM_EXPRs. */
128 #define opf_implicit (1 << 2)
129
130 /* Array for building all the def operands. */
131 static VEC(tree,heap) *build_defs;
132
133 /* Array for building all the use operands. */
134 static VEC(tree,heap) *build_uses;
135
136 /* Set for building all the VDEF operands. */
137 static VEC(tree,heap) *build_vdefs;
138
139 /* Set for building all the VUSE operands. */
140 static VEC(tree,heap) *build_vuses;
141
142 /* Set for building all the loaded symbols. */
143 static bitmap build_loads;
144
145 /* Set for building all the stored symbols. */
146 static bitmap build_stores;
147
148 static void get_expr_operands (tree, tree *, int);
149
150 /* Number of functions with initialized ssa_operands. */
151 static int n_initialized = 0;
152
153 /* Statement change buffer. Data structure used to record state
154 information for statements. This is used to determine what needs
155 to be done in order to update the SSA web after a statement is
156 modified by a pass. If STMT is a statement that has just been
157 created, or needs to be folded via fold_stmt, or anything that
158 changes its physical structure then the pass should:
159
160 1- Call push_stmt_changes (&stmt) to record the current state of
161 STMT before any modifications are made.
162
163 2- Make all appropriate modifications to the statement.
164
165 3- Call pop_stmt_changes (&stmt) to find new symbols that
166 need to be put in SSA form, SSA name mappings for names that
167 have disappeared, recompute invariantness for address
168 expressions, cleanup EH information, etc.
169
170 If it is possible to determine that the statement was not modified,
171 instead of calling pop_stmt_changes it is quicker to call
172 discard_stmt_changes to avoid the expensive and unnecessary operand
173 re-scan and change comparison. */
174
175 struct scb_d
176 {
177 /* Pointer to the statement being modified. */
178 tree *stmt_p;
179
180 /* If the statement references memory these are the sets of symbols
181 loaded and stored by the statement. */
182 bitmap loads;
183 bitmap stores;
184 };
185
186 typedef struct scb_d *scb_t;
187 DEF_VEC_P(scb_t);
188 DEF_VEC_ALLOC_P(scb_t,heap);
189
190 /* Stack of statement change buffers (SCB). Every call to
191 push_stmt_changes pushes a new buffer onto the stack. Calls to
192 pop_stmt_changes pop a buffer off of the stack and compute the set
193 of changes for the popped statement. */
194 static VEC(scb_t,heap) *scb_stack;
195
196 /* Return the DECL_UID of the base variable of T. */
197
198 static inline unsigned
199 get_name_decl (tree t)
200 {
201 if (TREE_CODE (t) != SSA_NAME)
202 return DECL_UID (t);
203 else
204 return DECL_UID (SSA_NAME_VAR (t));
205 }
206
207
208 /* Comparison function for qsort used in operand_build_sort_virtual. */
209
210 static int
211 operand_build_cmp (const void *p, const void *q)
212 {
213 tree e1 = *((const tree *)p);
214 tree e2 = *((const tree *)q);
215 unsigned int u1,u2;
216
217 u1 = get_name_decl (e1);
218 u2 = get_name_decl (e2);
219
220 /* We want to sort in ascending order. They can never be equal. */
221 #ifdef ENABLE_CHECKING
222 gcc_assert (u1 != u2);
223 #endif
224 return (u1 > u2 ? 1 : -1);
225 }
226
227
228 /* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
229
230 static inline void
231 operand_build_sort_virtual (VEC(tree,heap) *list)
232 {
233 int num = VEC_length (tree, list);
234
235 if (num < 2)
236 return;
237
238 if (num == 2)
239 {
240 if (get_name_decl (VEC_index (tree, list, 0))
241 > get_name_decl (VEC_index (tree, list, 1)))
242 {
243 /* Swap elements if in the wrong order. */
244 tree tmp = VEC_index (tree, list, 0);
245 VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
246 VEC_replace (tree, list, 1, tmp);
247 }
248 return;
249 }
250
251 /* There are 3 or more elements, call qsort. */
252 qsort (VEC_address (tree, list),
253 VEC_length (tree, list),
254 sizeof (tree),
255 operand_build_cmp);
256 }
257
258
259 /* Return true if the SSA operands cache is active. */
260
261 bool
262 ssa_operands_active (void)
263 {
264 return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
265 }
266
267
268 /* VOPs are of variable sized, so the free list maps "free buckets" to the
269 following table:
270 bucket # operands
271 ------ ----------
272 0 1
273 1 2
274 ...
275 15 16
276 16 17-24
277 17 25-32
278 18 31-40
279 ...
280 29 121-128
281 Any VOPs larger than this are simply added to the largest bucket when they
282 are freed. */
283
284
285 /* Return the number of operands used in bucket BUCKET. */
286
287 static inline int
288 vop_free_bucket_size (int bucket)
289 {
290 #ifdef ENABLE_CHECKING
291 gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
292 #endif
293 if (bucket < 16)
294 return bucket + 1;
295 return (bucket - 13) * 8;
296 }
297
298
299 /* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
300 beyond the end of the bucket table, return -1. */
301
302 static inline int
303 vop_free_bucket_index (int num)
304 {
305 gcc_assert (num > 0);
306
307 /* Sizes 1 through 16 use buckets 0-15. */
308 if (num <= 16)
309 return num - 1;
310 /* Buckets 16 - 45 represent 17 through 256 in 8 unit chunks. */
311 if (num < 256)
312 return 14 + (num - 1) / 8;
313 return -1;
314 }
315
316
317 /* Initialize the VOP free buckets. */
318
319 static inline void
320 init_vop_buckets (void)
321 {
322 int x;
323
324 for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
325 gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
326 }
327
328
329 /* Add PTR to the appropriate VOP bucket. */
330
331 static inline void
332 add_vop_to_freelist (voptype_p ptr)
333 {
334 int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
335
336 /* Too large, use the largest bucket so its not a complete throw away. */
337 if (bucket == -1)
338 bucket = NUM_VOP_FREE_BUCKETS - 1;
339
340 ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
341 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
342 }
343
344
345 /* These are the sizes of the operand memory buffer which gets allocated each
346 time more operands space is required. The final value is the amount that is
347 allocated every time after that. */
348
349 #define OP_SIZE_INIT 0
350 #define OP_SIZE_1 30
351 #define OP_SIZE_2 110
352 #define OP_SIZE_3 511
353
354 /* Current size of the operand memory buffer. */
355 static unsigned int ssa_operand_mem_size;
356
357 /* Initialize the operand cache routines. */
358
359 void
360 init_ssa_operands (void)
361 {
362 if (!n_initialized++)
363 {
364 build_defs = VEC_alloc (tree, heap, 5);
365 build_uses = VEC_alloc (tree, heap, 10);
366 build_vuses = VEC_alloc (tree, heap, 25);
367 build_vdefs = VEC_alloc (tree, heap, 25);
368 build_loads = BITMAP_ALLOC (NULL);
369 build_stores = BITMAP_ALLOC (NULL);
370 scb_stack = VEC_alloc (scb_t, heap, 20);
371 }
372
373 gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
374 gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
375 gimple_ssa_operands (cfun)->operand_memory_index = ssa_operand_mem_size;
376 gimple_ssa_operands (cfun)->ops_active = true;
377 memset (&clobber_stats, 0, sizeof (clobber_stats));
378 init_vop_buckets ();
379 ssa_operand_mem_size = OP_SIZE_INIT;
380 }
381
382
383 /* Dispose of anything required by the operand routines. */
384
385 void
386 fini_ssa_operands (void)
387 {
388 struct ssa_operand_memory_d *ptr;
389 unsigned ix;
390 tree mpt;
391
392 if (!--n_initialized)
393 {
394 VEC_free (tree, heap, build_defs);
395 VEC_free (tree, heap, build_uses);
396 VEC_free (tree, heap, build_vdefs);
397 VEC_free (tree, heap, build_vuses);
398 BITMAP_FREE (build_loads);
399 BITMAP_FREE (build_stores);
400
401 /* The change buffer stack had better be empty. */
402 gcc_assert (VEC_length (scb_t, scb_stack) == 0);
403 VEC_free (scb_t, heap, scb_stack);
404 scb_stack = NULL;
405 }
406
407 gimple_ssa_operands (cfun)->free_defs = NULL;
408 gimple_ssa_operands (cfun)->free_uses = NULL;
409
410 while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
411 {
412 gimple_ssa_operands (cfun)->operand_memory
413 = gimple_ssa_operands (cfun)->operand_memory->next;
414 ggc_free (ptr);
415 }
416
417 for (ix = 0;
418 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
419 ix++)
420 {
421 if (mpt)
422 BITMAP_FREE (MPT_SYMBOLS (mpt));
423 }
424
425 VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
426
427 gimple_ssa_operands (cfun)->ops_active = false;
428
429 if (dump_file && (dump_flags & TDF_STATS))
430 {
431 fprintf (dump_file, "Original clobbered vars: %d\n",
432 clobber_stats.clobbered_vars);
433 fprintf (dump_file, "Static write clobbers avoided: %d\n",
434 clobber_stats.static_write_clobbers_avoided);
435 fprintf (dump_file, "Static read clobbers avoided: %d\n",
436 clobber_stats.static_read_clobbers_avoided);
437 fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
438 clobber_stats.unescapable_clobbers_avoided);
439 fprintf (dump_file, "Original read-only clobbers: %d\n",
440 clobber_stats.readonly_clobbers);
441 fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
442 clobber_stats.static_readonly_clobbers_avoided);
443 }
444 }
445
446
447 /* Return memory for operands of SIZE chunks. */
448
449 static inline void *
450 ssa_operand_alloc (unsigned size)
451 {
452 char *ptr;
453
454 if (gimple_ssa_operands (cfun)->operand_memory_index + size
455 >= ssa_operand_mem_size)
456 {
457 struct ssa_operand_memory_d *ptr;
458
459 if (ssa_operand_mem_size == OP_SIZE_INIT)
460 ssa_operand_mem_size = OP_SIZE_1 * sizeof (struct voptype_d);
461 else
462 if (ssa_operand_mem_size == OP_SIZE_1 * sizeof (struct voptype_d))
463 ssa_operand_mem_size = OP_SIZE_2 * sizeof (struct voptype_d);
464 else
465 ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d);
466
467 /* Go right to the maximum size if the request is too large. */
468 if (size > ssa_operand_mem_size)
469 ssa_operand_mem_size = OP_SIZE_3 * sizeof (struct voptype_d);
470
471 /* Fail if there is not enough space. If there are this many operands
472 required, first make sure there isn't a different problem causing this
473 many operands. If the decision is that this is OK, then we can
474 specially allocate a buffer just for this request. */
475 gcc_assert (size <= ssa_operand_mem_size);
476
477 ptr = (struct ssa_operand_memory_d *)
478 ggc_alloc (sizeof (struct ssa_operand_memory_d)
479 + ssa_operand_mem_size - 1);
480 ptr->next = gimple_ssa_operands (cfun)->operand_memory;
481 gimple_ssa_operands (cfun)->operand_memory = ptr;
482 gimple_ssa_operands (cfun)->operand_memory_index = 0;
483 }
484 ptr = &(gimple_ssa_operands (cfun)->operand_memory
485 ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
486 gimple_ssa_operands (cfun)->operand_memory_index += size;
487 return ptr;
488 }
489
490
491 /* Allocate a DEF operand. */
492
493 static inline struct def_optype_d *
494 alloc_def (void)
495 {
496 struct def_optype_d *ret;
497 if (gimple_ssa_operands (cfun)->free_defs)
498 {
499 ret = gimple_ssa_operands (cfun)->free_defs;
500 gimple_ssa_operands (cfun)->free_defs
501 = gimple_ssa_operands (cfun)->free_defs->next;
502 }
503 else
504 ret = (struct def_optype_d *)
505 ssa_operand_alloc (sizeof (struct def_optype_d));
506 return ret;
507 }
508
509
510 /* Allocate a USE operand. */
511
512 static inline struct use_optype_d *
513 alloc_use (void)
514 {
515 struct use_optype_d *ret;
516 if (gimple_ssa_operands (cfun)->free_uses)
517 {
518 ret = gimple_ssa_operands (cfun)->free_uses;
519 gimple_ssa_operands (cfun)->free_uses
520 = gimple_ssa_operands (cfun)->free_uses->next;
521 }
522 else
523 ret = (struct use_optype_d *)
524 ssa_operand_alloc (sizeof (struct use_optype_d));
525 return ret;
526 }
527
528
529 /* Allocate a vop with NUM elements. */
530
531 static inline struct voptype_d *
532 alloc_vop (int num)
533 {
534 struct voptype_d *ret = NULL;
535 int alloc_size = 0;
536
537 int bucket = vop_free_bucket_index (num);
538 if (bucket != -1)
539 {
540 /* If there is a free operand, use it. */
541 if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
542 {
543 ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
544 gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
545 gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
546 }
547 else
548 alloc_size = vop_free_bucket_size(bucket);
549 }
550 else
551 alloc_size = num;
552
553 if (alloc_size > 0)
554 ret = (struct voptype_d *)ssa_operand_alloc (
555 sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
556
557 VUSE_VECT_NUM_ELEM (ret->usev) = num;
558 return ret;
559 }
560
561
562 /* This routine makes sure that PTR is in an immediate use list, and makes
563 sure the stmt pointer is set to the current stmt. */
564
565 static inline void
566 set_virtual_use_link (use_operand_p ptr, tree stmt)
567 {
568 /* fold_stmt may have changed the stmt pointers. */
569 if (ptr->stmt != stmt)
570 ptr->stmt = stmt;
571
572 /* If this use isn't in a list, add it to the correct list. */
573 if (!ptr->prev)
574 link_imm_use (ptr, *(ptr->use));
575 }
576
577
578 /* Adds OP to the list of defs after LAST. */
579
580 static inline def_optype_p
581 add_def_op (tree *op, def_optype_p last)
582 {
583 def_optype_p new;
584
585 new = alloc_def ();
586 DEF_OP_PTR (new) = op;
587 last->next = new;
588 new->next = NULL;
589 return new;
590 }
591
592
593 /* Adds OP to the list of uses of statement STMT after LAST. */
594
595 static inline use_optype_p
596 add_use_op (tree stmt, tree *op, use_optype_p last)
597 {
598 use_optype_p new;
599
600 new = alloc_use ();
601 USE_OP_PTR (new)->use = op;
602 link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
603 last->next = new;
604 new->next = NULL;
605 return new;
606 }
607
608
609 /* Return a virtual op pointer with NUM elements which are all initialized to OP
610 and are linked into the immediate uses for STMT. The new vop is appended
611 after PREV. */
612
613 static inline voptype_p
614 add_vop (tree stmt, tree op, int num, voptype_p prev)
615 {
616 voptype_p new;
617 int x;
618
619 new = alloc_vop (num);
620 for (x = 0; x < num; x++)
621 {
622 VUSE_OP_PTR (new, x)->prev = NULL;
623 SET_VUSE_OP (new, x, op);
624 VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
625 link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
626 }
627
628 if (prev)
629 prev->next = new;
630 new->next = NULL;
631 return new;
632 }
633
634
635 /* Adds OP to the list of vuses of statement STMT after LAST, and moves
636 LAST to the new element. */
637
638 static inline voptype_p
639 add_vuse_op (tree stmt, tree op, int num, voptype_p last)
640 {
641 voptype_p new = add_vop (stmt, op, num, last);
642 VDEF_RESULT (new) = NULL_TREE;
643 return new;
644 }
645
646
647 /* Adds OP to the list of vdefs of statement STMT after LAST, and moves
648 LAST to the new element. */
649
650 static inline voptype_p
651 add_vdef_op (tree stmt, tree op, int num, voptype_p last)
652 {
653 voptype_p new = add_vop (stmt, op, num, last);
654 VDEF_RESULT (new) = op;
655 return new;
656 }
657
658
659 /* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
660 is the head of the operand list it belongs to. */
661
662 static inline struct voptype_d *
663 realloc_vop (struct voptype_d *ptr, int num_elem, struct voptype_d **root)
664 {
665 int x, lim;
666 tree stmt, val;
667 struct voptype_d *ret, *tmp;
668
669 if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
670 return ptr;
671
672 val = VUSE_OP (ptr, 0);
673 if (TREE_CODE (val) == SSA_NAME)
674 val = SSA_NAME_VAR (val);
675
676 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
677
678 /* Delink all the existing uses. */
679 for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
680 {
681 use_operand_p use_p = VUSE_OP_PTR (ptr, x);
682 delink_imm_use (use_p);
683 }
684
685 /* If we want less space, simply use this one, and shrink the size. */
686 if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
687 {
688 VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
689 return ptr;
690 }
691
692 /* It is growing. Allocate a new one and replace the old one. */
693 ret = add_vuse_op (stmt, val, num_elem, ptr);
694
695 /* Clear PTR and add its memory to the free list. */
696 lim = VUSE_VECT_NUM_ELEM (ptr->usev);
697 memset (ptr, 0,
698 sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
699 add_vop_to_freelist (ptr);
700
701 /* Now simply remove the old one. */
702 if (*root == ptr)
703 {
704 *root = ret;
705 return ret;
706 }
707 else
708 for (tmp = *root;
709 tmp != NULL && tmp->next != ptr;
710 tmp = tmp->next)
711 {
712 tmp->next = ret;
713 return ret;
714 }
715
716 /* The pointer passed in isn't in STMT's VDEF lists. */
717 gcc_unreachable ();
718 }
719
720
721 /* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
722
723 struct voptype_d *
724 realloc_vdef (struct voptype_d *ptr, int num_elem)
725 {
726 tree val, stmt;
727 struct voptype_d *ret;
728
729 val = VDEF_RESULT (ptr);
730 stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
731 ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
732 VDEF_RESULT (ret) = val;
733 return ret;
734 }
735
736
737 /* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
738
739 struct voptype_d *
740 realloc_vuse (struct voptype_d *ptr, int num_elem)
741 {
742 tree stmt;
743 struct voptype_d *ret;
744
745 stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
746 ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
747 return ret;
748 }
749
750
751 /* Takes elements from build_defs and turns them into def operands of STMT.
752 TODO -- Make build_defs VEC of tree *. */
753
754 static inline void
755 finalize_ssa_defs (tree stmt)
756 {
757 unsigned new_i;
758 struct def_optype_d new_list;
759 def_optype_p old_ops, last;
760 unsigned int num = VEC_length (tree, build_defs);
761
762 /* There should only be a single real definition per assignment. */
763 gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
764
765 new_list.next = NULL;
766 last = &new_list;
767
768 old_ops = DEF_OPS (stmt);
769
770 new_i = 0;
771
772 /* Check for the common case of 1 def that hasn't changed. */
773 if (old_ops && old_ops->next == NULL && num == 1
774 && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
775 return;
776
777 /* If there is anything in the old list, free it. */
778 if (old_ops)
779 {
780 old_ops->next = gimple_ssa_operands (cfun)->free_defs;
781 gimple_ssa_operands (cfun)->free_defs = old_ops;
782 }
783
784 /* If there is anything remaining in the build_defs list, simply emit it. */
785 for ( ; new_i < num; new_i++)
786 last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
787
788 /* Now set the stmt's operands. */
789 DEF_OPS (stmt) = new_list.next;
790
791 #ifdef ENABLE_CHECKING
792 {
793 def_optype_p ptr;
794 unsigned x = 0;
795 for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
796 x++;
797
798 gcc_assert (x == num);
799 }
800 #endif
801 }
802
803
804 /* Takes elements from build_uses and turns them into use operands of STMT.
805 TODO -- Make build_uses VEC of tree *. */
806
807 static inline void
808 finalize_ssa_uses (tree stmt)
809 {
810 unsigned new_i;
811 struct use_optype_d new_list;
812 use_optype_p old_ops, ptr, last;
813
814 #ifdef ENABLE_CHECKING
815 {
816 unsigned x;
817 unsigned num = VEC_length (tree, build_uses);
818
819 /* If the pointer to the operand is the statement itself, something is
820 wrong. It means that we are pointing to a local variable (the
821 initial call to update_stmt_operands does not pass a pointer to a
822 statement). */
823 for (x = 0; x < num; x++)
824 gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
825 }
826 #endif
827
828 new_list.next = NULL;
829 last = &new_list;
830
831 old_ops = USE_OPS (stmt);
832
833 /* If there is anything in the old list, free it. */
834 if (old_ops)
835 {
836 for (ptr = old_ops; ptr; ptr = ptr->next)
837 delink_imm_use (USE_OP_PTR (ptr));
838 old_ops->next = gimple_ssa_operands (cfun)->free_uses;
839 gimple_ssa_operands (cfun)->free_uses = old_ops;
840 }
841
842 /* Now create nodes for all the new nodes. */
843 for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
844 last = add_use_op (stmt,
845 (tree *) VEC_index (tree, build_uses, new_i),
846 last);
847
848 /* Now set the stmt's operands. */
849 USE_OPS (stmt) = new_list.next;
850
851 #ifdef ENABLE_CHECKING
852 {
853 unsigned x = 0;
854 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
855 x++;
856
857 gcc_assert (x == VEC_length (tree, build_uses));
858 }
859 #endif
860 }
861
862
863 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
864 STMT. FIXME, for now VDEF operators should have a single operand
865 in their RHS. */
866
867 static inline void
868 finalize_ssa_vdefs (tree stmt)
869 {
870 unsigned new_i;
871 struct voptype_d new_list;
872 voptype_p old_ops, ptr, last;
873 stmt_ann_t ann = stmt_ann (stmt);
874
875 /* Set the symbols referenced by STMT. */
876 if (!bitmap_empty_p (build_stores))
877 {
878 if (ann->operands.stores == NULL)
879 ann->operands.stores = BITMAP_ALLOC (NULL);
880
881 bitmap_copy (ann->operands.stores, build_stores);
882 }
883 else
884 BITMAP_FREE (ann->operands.stores);
885
886 /* If aliases have not been computed, do not instantiate a virtual
887 operator on STMT. Initially, we only compute the SSA form on
888 GIMPLE registers. The virtual SSA form is only computed after
889 alias analysis, so virtual operators will remain unrenamed and
890 the verifier will complain. However, alias analysis needs to
891 access symbol load/store information, so we need to compute
892 those. */
893 if (!gimple_aliases_computed_p (cfun))
894 return;
895
896 new_list.next = NULL;
897 last = &new_list;
898
899 old_ops = VDEF_OPS (stmt);
900 new_i = 0;
901 while (old_ops && new_i < VEC_length (tree, build_vdefs))
902 {
903 tree op = VEC_index (tree, build_vdefs, new_i);
904 unsigned new_uid = get_name_decl (op);
905 unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
906
907 /* FIXME, for now each VDEF operator should have at most one
908 operand in their RHS. */
909 gcc_assert (VDEF_NUM (old_ops) == 1);
910
911 if (old_uid == new_uid)
912 {
913 /* If the symbols are the same, reuse the existing operand. */
914 last->next = old_ops;
915 last = old_ops;
916 old_ops = old_ops->next;
917 last->next = NULL;
918 set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
919 new_i++;
920 }
921 else if (old_uid < new_uid)
922 {
923 /* If old is less than new, old goes to the free list. */
924 voptype_p next;
925 delink_imm_use (VDEF_OP_PTR (old_ops, 0));
926 next = old_ops->next;
927 add_vop_to_freelist (old_ops);
928 old_ops = next;
929 }
930 else
931 {
932 /* This is a new operand. */
933 last = add_vdef_op (stmt, op, 1, last);
934 new_i++;
935 }
936 }
937
938 /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
939 for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
940 last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
941
942 /* If there is anything in the old list, free it. */
943 if (old_ops)
944 {
945 for (ptr = old_ops; ptr; ptr = last)
946 {
947 last = ptr->next;
948 delink_imm_use (VDEF_OP_PTR (ptr, 0));
949 add_vop_to_freelist (ptr);
950 }
951 }
952
953 /* Now set STMT's operands. */
954 VDEF_OPS (stmt) = new_list.next;
955
956 #ifdef ENABLE_CHECKING
957 {
958 unsigned x = 0;
959 for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
960 x++;
961
962 gcc_assert (x == VEC_length (tree, build_vdefs));
963 }
964 #endif
965 }
966
967
968 /* Takes elements from BUILD_VUSES and turns them into VUSE operands of
969 STMT. */
970
971 static inline void
972 finalize_ssa_vuse_ops (tree stmt)
973 {
974 unsigned new_i;
975 int old_i;
976 voptype_p old_ops, last;
977 VEC(tree,heap) *new_ops;
978 stmt_ann_t ann;
979
980 /* Set the symbols referenced by STMT. */
981 ann = stmt_ann (stmt);
982 if (!bitmap_empty_p (build_loads))
983 {
984 if (ann->operands.loads == NULL)
985 ann->operands.loads = BITMAP_ALLOC (NULL);
986
987 bitmap_copy (ann->operands.loads, build_loads);
988 }
989 else
990 BITMAP_FREE (ann->operands.loads);
991
992 /* If aliases have not been computed, do not instantiate a virtual
993 operator on STMT. Initially, we only compute the SSA form on
994 GIMPLE registers. The virtual SSA form is only computed after
995 alias analysis, so virtual operators will remain unrenamed and
996 the verifier will complain. However, alias analysis needs to
997 access symbol load/store information, so we need to compute
998 those. */
999 if (!gimple_aliases_computed_p (cfun))
1000 return;
1001
1002 /* STMT should have at most one VUSE operator. */
1003 old_ops = VUSE_OPS (stmt);
1004 gcc_assert (old_ops == NULL || old_ops->next == NULL);
1005
1006 new_ops = NULL;
1007 new_i = old_i = 0;
1008 while (old_ops
1009 && old_i < VUSE_NUM (old_ops)
1010 && new_i < VEC_length (tree, build_vuses))
1011 {
1012 tree new_op = VEC_index (tree, build_vuses, new_i);
1013 tree old_op = VUSE_OP (old_ops, old_i);
1014 unsigned new_uid = get_name_decl (new_op);
1015 unsigned old_uid = get_name_decl (old_op);
1016
1017 if (old_uid == new_uid)
1018 {
1019 /* If the symbols are the same, reuse the existing operand. */
1020 VEC_safe_push (tree, heap, new_ops, old_op);
1021 new_i++;
1022 old_i++;
1023 }
1024 else if (old_uid < new_uid)
1025 {
1026 /* If OLD_UID is less than NEW_UID, the old operand has
1027 disappeared, skip to the next old operand. */
1028 old_i++;
1029 }
1030 else
1031 {
1032 /* This is a new operand. */
1033 VEC_safe_push (tree, heap, new_ops, new_op);
1034 new_i++;
1035 }
1036 }
1037
1038 /* If there is anything remaining in the build_vuses list, simply emit it. */
1039 for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
1040 VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
1041
1042 /* If there is anything in the old list, free it. */
1043 if (old_ops)
1044 {
1045 for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
1046 delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
1047 add_vop_to_freelist (old_ops);
1048 VUSE_OPS (stmt) = NULL;
1049 }
1050
1051 /* If there are any operands, instantiate a VUSE operator for STMT. */
1052 if (new_ops)
1053 {
1054 tree op;
1055 unsigned i;
1056
1057 last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
1058
1059 for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
1060 SET_USE (VUSE_OP_PTR (last, (int) i), op);
1061
1062 VUSE_OPS (stmt) = last;
1063 }
1064
1065 #ifdef ENABLE_CHECKING
1066 {
1067 unsigned x;
1068
1069 if (VUSE_OPS (stmt))
1070 {
1071 gcc_assert (VUSE_OPS (stmt)->next == NULL);
1072 x = VUSE_NUM (VUSE_OPS (stmt));
1073 }
1074 else
1075 x = 0;
1076
1077 gcc_assert (x == VEC_length (tree, build_vuses));
1078 }
1079 #endif
1080 }
1081
1082 /* Return a new VUSE operand vector for STMT. */
1083
1084 static void
1085 finalize_ssa_vuses (tree stmt)
1086 {
1087 unsigned num, num_vdefs;
1088 unsigned vuse_index;
1089
1090 /* Remove superfluous VUSE operands. If the statement already has a
1091 VDEF operator for a variable 'a', then a VUSE for 'a' is not
1092 needed because VDEFs imply a VUSE of the variable. For instance,
1093 suppose that variable 'a' is pointed-to by p and q:
1094
1095 # VUSE <a_2>
1096 # a_3 = VDEF <a_2>
1097 *p = *q;
1098
1099 The VUSE <a_2> is superfluous because it is implied by the
1100 VDEF operator. */
1101 num = VEC_length (tree, build_vuses);
1102 num_vdefs = VEC_length (tree, build_vdefs);
1103
1104 if (num > 0 && num_vdefs > 0)
1105 for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
1106 {
1107 tree vuse;
1108 vuse = VEC_index (tree, build_vuses, vuse_index);
1109 if (TREE_CODE (vuse) != SSA_NAME)
1110 {
1111 var_ann_t ann = var_ann (vuse);
1112 ann->in_vuse_list = 0;
1113 if (ann->in_vdef_list)
1114 {
1115 VEC_ordered_remove (tree, build_vuses, vuse_index);
1116 continue;
1117 }
1118 }
1119 vuse_index++;
1120 }
1121
1122 finalize_ssa_vuse_ops (stmt);
1123 }
1124
1125
1126 /* Clear the in_list bits and empty the build array for VDEFs and
1127 VUSEs. */
1128
1129 static inline void
1130 cleanup_build_arrays (void)
1131 {
1132 unsigned i;
1133 tree t;
1134
1135 for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
1136 if (TREE_CODE (t) != SSA_NAME)
1137 var_ann (t)->in_vdef_list = false;
1138
1139 for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
1140 if (TREE_CODE (t) != SSA_NAME)
1141 var_ann (t)->in_vuse_list = false;
1142
1143 VEC_truncate (tree, build_vdefs, 0);
1144 VEC_truncate (tree, build_vuses, 0);
1145 VEC_truncate (tree, build_defs, 0);
1146 VEC_truncate (tree, build_uses, 0);
1147 bitmap_clear (build_loads);
1148 bitmap_clear (build_stores);
1149 }
1150
1151
1152 /* Finalize all the build vectors, fill the new ones into INFO. */
1153
1154 static inline void
1155 finalize_ssa_stmt_operands (tree stmt)
1156 {
1157 finalize_ssa_defs (stmt);
1158 finalize_ssa_uses (stmt);
1159 finalize_ssa_vdefs (stmt);
1160 finalize_ssa_vuses (stmt);
1161 cleanup_build_arrays ();
1162 }
1163
1164
1165 /* Start the process of building up operands vectors in INFO. */
1166
1167 static inline void
1168 start_ssa_stmt_operands (void)
1169 {
1170 gcc_assert (VEC_length (tree, build_defs) == 0);
1171 gcc_assert (VEC_length (tree, build_uses) == 0);
1172 gcc_assert (VEC_length (tree, build_vuses) == 0);
1173 gcc_assert (VEC_length (tree, build_vdefs) == 0);
1174 gcc_assert (bitmap_empty_p (build_loads));
1175 gcc_assert (bitmap_empty_p (build_stores));
1176 }
1177
1178
1179 /* Add DEF_P to the list of pointers to operands. */
1180
1181 static inline void
1182 append_def (tree *def_p)
1183 {
1184 VEC_safe_push (tree, heap, build_defs, (tree) def_p);
1185 }
1186
1187
1188 /* Add USE_P to the list of pointers to operands. */
1189
1190 static inline void
1191 append_use (tree *use_p)
1192 {
1193 VEC_safe_push (tree, heap, build_uses, (tree) use_p);
1194 }
1195
1196
1197 /* Add VAR to the set of variables that require a VDEF operator. */
1198
1199 static inline void
1200 append_vdef (tree var)
1201 {
1202 tree sym;
1203
1204 if (TREE_CODE (var) != SSA_NAME)
1205 {
1206 tree mpt;
1207 var_ann_t ann;
1208
1209 /* If VAR belongs to a memory partition, use it instead of VAR. */
1210 mpt = memory_partition (var);
1211 if (mpt)
1212 var = mpt;
1213
1214 /* Don't allow duplicate entries. */
1215 ann = get_var_ann (var);
1216 if (ann->in_vdef_list)
1217 return;
1218
1219 ann->in_vdef_list = true;
1220 sym = var;
1221 }
1222 else
1223 sym = SSA_NAME_VAR (var);
1224
1225 VEC_safe_push (tree, heap, build_vdefs, var);
1226 bitmap_set_bit (build_stores, DECL_UID (sym));
1227 }
1228
1229
1230 /* Add VAR to the set of variables that require a VUSE operator. */
1231
1232 static inline void
1233 append_vuse (tree var)
1234 {
1235 tree sym;
1236
1237 if (TREE_CODE (var) != SSA_NAME)
1238 {
1239 tree mpt;
1240 var_ann_t ann;
1241
1242 /* If VAR belongs to a memory partition, use it instead of VAR. */
1243 mpt = memory_partition (var);
1244 if (mpt)
1245 var = mpt;
1246
1247 /* Don't allow duplicate entries. */
1248 ann = get_var_ann (var);
1249 if (ann->in_vuse_list || ann->in_vdef_list)
1250 return;
1251
1252 ann->in_vuse_list = true;
1253 sym = var;
1254 }
1255 else
1256 sym = SSA_NAME_VAR (var);
1257
1258 VEC_safe_push (tree, heap, build_vuses, var);
1259 bitmap_set_bit (build_loads, DECL_UID (sym));
1260 }
1261
1262
1263 /* REF is a tree that contains the entire pointer dereference
1264 expression, if available, or NULL otherwise. ALIAS is the variable
1265 we are asking if REF can access. OFFSET and SIZE come from the
1266 memory access expression that generated this virtual operand. */
1267
1268 static bool
1269 access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
1270 HOST_WIDE_INT size)
1271 {
1272 bool offsetgtz = offset > 0;
1273 unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
1274 tree base = ref ? get_base_address (ref) : NULL;
1275
1276 /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
1277 using a call-clobbered memory tag. By definition, call-clobbered
1278 memory tags can always touch .GLOBAL_VAR. */
1279 if (alias == gimple_global_var (cfun))
1280 return true;
1281
1282 /* If ALIAS is an SFT, it can't be touched if the offset
1283 and size of the access is not overlapping with the SFT offset and
1284 size. This is only true if we are accessing through a pointer
1285 to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
1286 be accessing through a pointer to some substruct of the
1287 structure, and if we try to prune there, we will have the wrong
1288 offset, and get the wrong answer.
1289 i.e., we can't prune without more work if we have something like
1290
1291 struct gcc_target
1292 {
1293 struct asm_out
1294 {
1295 const char *byte_op;
1296 struct asm_int_op
1297 {
1298 const char *hi;
1299 } aligned_op;
1300 } asm_out;
1301 } targetm;
1302
1303 foo = &targetm.asm_out.aligned_op;
1304 return foo->hi;
1305
1306 SFT.1, which represents hi, will have SFT_OFFSET=32 because in
1307 terms of SFT_PARENT_VAR, that is where it is.
1308 However, the access through the foo pointer will be at offset 0. */
1309 if (size != -1
1310 && TREE_CODE (alias) == STRUCT_FIELD_TAG
1311 && base
1312 && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
1313 && !overlap_subvar (offset, size, alias, NULL))
1314 {
1315 #ifdef ACCESS_DEBUGGING
1316 fprintf (stderr, "Access to ");
1317 print_generic_expr (stderr, ref, 0);
1318 fprintf (stderr, " may not touch ");
1319 print_generic_expr (stderr, alias, 0);
1320 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1321 #endif
1322 return false;
1323 }
1324
1325 /* Without strict aliasing, it is impossible for a component access
1326 through a pointer to touch a random variable, unless that
1327 variable *is* a structure or a pointer.
1328
1329 That is, given p->c, and some random global variable b,
1330 there is no legal way that p->c could be an access to b.
1331
1332 Without strict aliasing on, we consider it legal to do something
1333 like:
1334
1335 struct foos { int l; };
1336 int foo;
1337 static struct foos *getfoo(void);
1338 int main (void)
1339 {
1340 struct foos *f = getfoo();
1341 f->l = 1;
1342 foo = 2;
1343 if (f->l == 1)
1344 abort();
1345 exit(0);
1346 }
1347 static struct foos *getfoo(void)
1348 { return (struct foos *)&foo; }
1349
1350 (taken from 20000623-1.c)
1351
1352 The docs also say/imply that access through union pointers
1353 is legal (but *not* if you take the address of the union member,
1354 i.e. the inverse), such that you can do
1355
1356 typedef union {
1357 int d;
1358 } U;
1359
1360 int rv;
1361 void breakme()
1362 {
1363 U *rv0;
1364 U *pretmp = (U*)&rv;
1365 rv0 = pretmp;
1366 rv0->d = 42;
1367 }
1368 To implement this, we just punt on accesses through union
1369 pointers entirely.
1370 */
1371 else if (ref
1372 && flag_strict_aliasing
1373 && TREE_CODE (ref) != INDIRECT_REF
1374 && !MTAG_P (alias)
1375 && (TREE_CODE (base) != INDIRECT_REF
1376 || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
1377 && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
1378 && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
1379 && !var_ann (alias)->is_heapvar
1380 /* When the struct has may_alias attached to it, we need not to
1381 return true. */
1382 && get_alias_set (base))
1383 {
1384 #ifdef ACCESS_DEBUGGING
1385 fprintf (stderr, "Access to ");
1386 print_generic_expr (stderr, ref, 0);
1387 fprintf (stderr, " may not touch ");
1388 print_generic_expr (stderr, alias, 0);
1389 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1390 #endif
1391 return false;
1392 }
1393
1394 /* If the offset of the access is greater than the size of one of
1395 the possible aliases, it can't be touching that alias, because it
1396 would be past the end of the structure. */
1397 else if (ref
1398 && flag_strict_aliasing
1399 && TREE_CODE (ref) != INDIRECT_REF
1400 && !MTAG_P (alias)
1401 && !POINTER_TYPE_P (TREE_TYPE (alias))
1402 && offsetgtz
1403 && DECL_SIZE (alias)
1404 && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
1405 && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
1406 {
1407 #ifdef ACCESS_DEBUGGING
1408 fprintf (stderr, "Access to ");
1409 print_generic_expr (stderr, ref, 0);
1410 fprintf (stderr, " may not touch ");
1411 print_generic_expr (stderr, alias, 0);
1412 fprintf (stderr, " in function %s\n", get_name (current_function_decl));
1413 #endif
1414 return false;
1415 }
1416
1417 return true;
1418 }
1419
1420
1421 /* Add VAR to the virtual operands array. FLAGS is as in
1422 get_expr_operands. FULL_REF is a tree that contains the entire
1423 pointer dereference expression, if available, or NULL otherwise.
1424 OFFSET and SIZE come from the memory access expression that
1425 generated this virtual operand. FOR_CLOBBER is true is this is
1426 adding a virtual operand for a call clobber. */
1427
1428 static void
1429 add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
1430 tree full_ref, HOST_WIDE_INT offset,
1431 HOST_WIDE_INT size, bool for_clobber)
1432 {
1433 VEC(tree,gc) *aliases;
1434 tree sym;
1435 var_ann_t v_ann;
1436
1437 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1438 v_ann = var_ann (sym);
1439
1440 /* Mark the statement as having memory operands. */
1441 s_ann->references_memory = true;
1442
1443 /* Mark statements with volatile operands. Optimizers should back
1444 off from statements having volatile operands. */
1445 if (TREE_THIS_VOLATILE (sym) && s_ann)
1446 s_ann->has_volatile_ops = true;
1447
1448 /* If the variable cannot be modified and this is a VDEF change
1449 it into a VUSE. This happens when read-only variables are marked
1450 call-clobbered and/or aliased to writable variables. So we only
1451 check that this only happens on non-specific stores.
1452
1453 Note that if this is a specific store, i.e. associated with a
1454 GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
1455 into validation problems.
1456
1457 This can happen when programs cast away const, leaving us with a
1458 store to read-only memory. If the statement is actually executed
1459 at runtime, then the program is ill formed. If the statement is
1460 not executed then all is well. At the very least, we cannot ICE. */
1461 if ((flags & opf_implicit) && unmodifiable_var_p (var))
1462 flags &= ~opf_def;
1463
1464 /* The variable is not a GIMPLE register. Add it (or its aliases) to
1465 virtual operands, unless the caller has specifically requested
1466 not to add virtual operands (used when adding operands inside an
1467 ADDR_EXPR expression). */
1468 if (flags & opf_no_vops)
1469 return;
1470
1471 aliases = v_ann->may_aliases;
1472 if (aliases == NULL)
1473 {
1474 /* The variable is not aliased or it is an alias tag. */
1475 if (flags & opf_def)
1476 append_vdef (var);
1477 else
1478 append_vuse (var);
1479 }
1480 else
1481 {
1482 unsigned i;
1483 tree al;
1484
1485 /* The variable is aliased. Add its aliases to the virtual
1486 operands. */
1487 gcc_assert (VEC_length (tree, aliases) != 0);
1488
1489 if (flags & opf_def)
1490 {
1491 bool none_added = true;
1492
1493 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1494 {
1495 if (!access_can_touch_variable (full_ref, al, offset, size))
1496 continue;
1497
1498 none_added = false;
1499 append_vdef (al);
1500 }
1501
1502 /* If the variable is also an alias tag, add a virtual
1503 operand for it, otherwise we will miss representing
1504 references to the members of the variable's alias set.
1505 This fixes the bug in gcc.c-torture/execute/20020503-1.c.
1506
1507 It is also necessary to add bare defs on clobbers for
1508 SMT's, so that bare SMT uses caused by pruning all the
1509 aliases will link up properly with calls. In order to
1510 keep the number of these bare defs we add down to the
1511 minimum necessary, we keep track of which SMT's were used
1512 alone in statement vdefs or VUSEs. */
1513 if (v_ann->is_aliased
1514 || none_added
1515 || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
1516 && for_clobber))
1517 {
1518 append_vdef (var);
1519 }
1520 }
1521 else
1522 {
1523 bool none_added = true;
1524 for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
1525 {
1526 if (!access_can_touch_variable (full_ref, al, offset, size))
1527 continue;
1528 none_added = false;
1529 append_vuse (al);
1530 }
1531
1532 /* Similarly, append a virtual uses for VAR itself, when
1533 it is an alias tag. */
1534 if (v_ann->is_aliased || none_added)
1535 append_vuse (var);
1536 }
1537 }
1538 }
1539
1540
1541 /* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
1542 get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
1543 the statement's real operands, otherwise it is added to virtual
1544 operands. */
1545
1546 static void
1547 add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
1548 {
1549 tree var, sym;
1550 var_ann_t v_ann;
1551
1552 gcc_assert (SSA_VAR_P (*var_p) && s_ann);
1553
1554 var = *var_p;
1555 sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
1556 v_ann = var_ann (sym);
1557
1558 /* Mark statements with volatile operands. */
1559 if (TREE_THIS_VOLATILE (sym))
1560 s_ann->has_volatile_ops = true;
1561
1562 if (is_gimple_reg (sym))
1563 {
1564 /* The variable is a GIMPLE register. Add it to real operands. */
1565 if (flags & opf_def)
1566 append_def (var_p);
1567 else
1568 append_use (var_p);
1569 }
1570 else
1571 add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
1572 }
1573
1574
1575 /* A subroutine of get_expr_operands to handle INDIRECT_REF,
1576 ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
1577
1578 STMT is the statement being processed, EXPR is the INDIRECT_REF
1579 that got us here.
1580
1581 FLAGS is as in get_expr_operands.
1582
1583 FULL_REF contains the full pointer dereference expression, if we
1584 have it, or NULL otherwise.
1585
1586 OFFSET and SIZE are the location of the access inside the
1587 dereferenced pointer, if known.
1588
1589 RECURSE_ON_BASE should be set to true if we want to continue
1590 calling get_expr_operands on the base pointer, and false if
1591 something else will do it for us. */
1592
1593 static void
1594 get_indirect_ref_operands (tree stmt, tree expr, int flags,
1595 tree full_ref,
1596 HOST_WIDE_INT offset, HOST_WIDE_INT size,
1597 bool recurse_on_base)
1598 {
1599 tree *pptr = &TREE_OPERAND (expr, 0);
1600 tree ptr = *pptr;
1601 stmt_ann_t s_ann = stmt_ann (stmt);
1602
1603 s_ann->references_memory = true;
1604
1605 if (SSA_VAR_P (ptr))
1606 {
1607 struct ptr_info_def *pi = NULL;
1608
1609 /* If PTR has flow-sensitive points-to information, use it. */
1610 if (TREE_CODE (ptr) == SSA_NAME
1611 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1612 && pi->name_mem_tag)
1613 {
1614 /* PTR has its own memory tag. Use it. */
1615 add_virtual_operand (pi->name_mem_tag, s_ann, flags,
1616 full_ref, offset, size, false);
1617 }
1618 else
1619 {
1620 /* If PTR is not an SSA_NAME or it doesn't have a name
1621 tag, use its symbol memory tag. */
1622 var_ann_t v_ann;
1623
1624 /* If we are emitting debugging dumps, display a warning if
1625 PTR is an SSA_NAME with no flow-sensitive alias
1626 information. That means that we may need to compute
1627 aliasing again. */
1628 if (dump_file
1629 && TREE_CODE (ptr) == SSA_NAME
1630 && pi == NULL)
1631 {
1632 fprintf (dump_file,
1633 "NOTE: no flow-sensitive alias info for ");
1634 print_generic_expr (dump_file, ptr, dump_flags);
1635 fprintf (dump_file, " in ");
1636 print_generic_stmt (dump_file, stmt, dump_flags);
1637 }
1638
1639 if (TREE_CODE (ptr) == SSA_NAME)
1640 ptr = SSA_NAME_VAR (ptr);
1641 v_ann = var_ann (ptr);
1642
1643 if (v_ann->symbol_mem_tag)
1644 add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
1645 full_ref, offset, size, false);
1646 }
1647 }
1648 else if (TREE_CODE (ptr) == INTEGER_CST)
1649 {
1650 /* If a constant is used as a pointer, we can't generate a real
1651 operand for it but we mark the statement volatile to prevent
1652 optimizations from messing things up. */
1653 if (s_ann)
1654 s_ann->has_volatile_ops = true;
1655 return;
1656 }
1657 else
1658 {
1659 /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
1660 gcc_unreachable ();
1661 }
1662
1663 /* If requested, add a USE operand for the base pointer. */
1664 if (recurse_on_base)
1665 get_expr_operands (stmt, pptr, opf_use);
1666 }
1667
1668
1669 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
1670
1671 static void
1672 get_tmr_operands (tree stmt, tree expr, int flags)
1673 {
1674 tree tag, ref;
1675 HOST_WIDE_INT offset, size, maxsize;
1676 subvar_t svars, sv;
1677 stmt_ann_t s_ann = stmt_ann (stmt);
1678
1679 /* This statement references memory. */
1680 s_ann->references_memory = 1;
1681
1682 /* First record the real operands. */
1683 get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
1684 get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
1685
1686 if (TMR_SYMBOL (expr))
1687 add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
1688
1689 tag = TMR_TAG (expr);
1690 if (!tag)
1691 {
1692 /* Something weird, so ensure that we will be careful. */
1693 s_ann->has_volatile_ops = true;
1694 return;
1695 }
1696
1697 if (DECL_P (tag))
1698 {
1699 get_expr_operands (stmt, &tag, flags);
1700 return;
1701 }
1702
1703 ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
1704 gcc_assert (ref != NULL_TREE);
1705 svars = get_subvars_for_var (ref);
1706 for (sv = svars; sv; sv = sv->next)
1707 {
1708 bool exact;
1709
1710 if (overlap_subvar (offset, maxsize, sv->var, &exact))
1711 add_stmt_operand (&sv->var, s_ann, flags);
1712 }
1713 }
1714
1715
1716 /* Add clobbering definitions for .GLOBAL_VAR or for each of the call
1717 clobbered variables in the function. */
1718
1719 static void
1720 add_call_clobber_ops (tree stmt, tree callee)
1721 {
1722 unsigned u;
1723 bitmap_iterator bi;
1724 stmt_ann_t s_ann = stmt_ann (stmt);
1725 bitmap not_read_b, not_written_b;
1726
1727 /* Functions that are not const, pure or never return may clobber
1728 call-clobbered variables. */
1729 if (s_ann)
1730 s_ann->makes_clobbering_call = true;
1731
1732 /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
1733 for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
1734 if (gimple_global_var (cfun))
1735 {
1736 tree var = gimple_global_var (cfun);
1737 add_stmt_operand (&var, s_ann, opf_def);
1738 return;
1739 }
1740
1741 /* Get info for local and module level statics. There is a bit
1742 set for each static if the call being processed does not read
1743 or write that variable. */
1744 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1745 not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
1746
1747 /* Add a VDEF operand for every call clobbered variable. */
1748 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1749 {
1750 tree var = referenced_var_lookup (u);
1751 unsigned int escape_mask = var_ann (var)->escape_mask;
1752 tree real_var = var;
1753 bool not_read;
1754 bool not_written;
1755
1756 /* Not read and not written are computed on regular vars, not
1757 subvars, so look at the parent var if this is an SFT. */
1758 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1759 real_var = SFT_PARENT_VAR (var);
1760
1761 not_read = not_read_b ? bitmap_bit_p (not_read_b,
1762 DECL_UID (real_var)) : false;
1763 not_written = not_written_b ? bitmap_bit_p (not_written_b,
1764 DECL_UID (real_var)) : false;
1765 gcc_assert (!unmodifiable_var_p (var));
1766
1767 clobber_stats.clobbered_vars++;
1768
1769 /* See if this variable is really clobbered by this function. */
1770
1771 /* Trivial case: Things escaping only to pure/const are not
1772 clobbered by non-pure-const, and only read by pure/const. */
1773 if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
1774 {
1775 tree call = get_call_expr_in (stmt);
1776 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1777 {
1778 add_stmt_operand (&var, s_ann, opf_use);
1779 clobber_stats.unescapable_clobbers_avoided++;
1780 continue;
1781 }
1782 else
1783 {
1784 clobber_stats.unescapable_clobbers_avoided++;
1785 continue;
1786 }
1787 }
1788
1789 if (not_written)
1790 {
1791 clobber_stats.static_write_clobbers_avoided++;
1792 if (!not_read)
1793 add_stmt_operand (&var, s_ann, opf_use);
1794 else
1795 clobber_stats.static_read_clobbers_avoided++;
1796 }
1797 else
1798 add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
1799 }
1800 }
1801
1802
1803 /* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
1804 function. */
1805
1806 static void
1807 add_call_read_ops (tree stmt, tree callee)
1808 {
1809 unsigned u;
1810 bitmap_iterator bi;
1811 stmt_ann_t s_ann = stmt_ann (stmt);
1812 bitmap not_read_b;
1813
1814 /* if the function is not pure, it may reference memory. Add
1815 a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
1816 for the heuristic used to decide whether to create .GLOBAL_VAR. */
1817 if (gimple_global_var (cfun))
1818 {
1819 tree var = gimple_global_var (cfun);
1820 add_stmt_operand (&var, s_ann, opf_use);
1821 return;
1822 }
1823
1824 not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
1825
1826 /* Add a VUSE for each call-clobbered variable. */
1827 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
1828 {
1829 tree var = referenced_var (u);
1830 tree real_var = var;
1831 bool not_read;
1832
1833 clobber_stats.readonly_clobbers++;
1834
1835 /* Not read and not written are computed on regular vars, not
1836 subvars, so look at the parent var if this is an SFT. */
1837
1838 if (TREE_CODE (var) == STRUCT_FIELD_TAG)
1839 real_var = SFT_PARENT_VAR (var);
1840
1841 not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
1842 : false;
1843
1844 if (not_read)
1845 {
1846 clobber_stats.static_readonly_clobbers_avoided++;
1847 continue;
1848 }
1849
1850 add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
1851 }
1852 }
1853
1854
1855 /* A subroutine of get_expr_operands to handle CALL_EXPR. */
1856
1857 static void
1858 get_call_expr_operands (tree stmt, tree expr)
1859 {
1860 tree op;
1861 int call_flags = call_expr_flags (expr);
1862 stmt_ann_t ann = stmt_ann (stmt);
1863
1864 ann->references_memory = true;
1865
1866 /* If aliases have been computed already, add VDEF or VUSE
1867 operands for all the symbols that have been found to be
1868 call-clobbered. */
1869 if (gimple_aliases_computed_p (cfun)
1870 && !(call_flags & ECF_NOVOPS))
1871 {
1872 /* A 'pure' or a 'const' function never call-clobbers anything.
1873 A 'noreturn' function might, but since we don't return anyway
1874 there is no point in recording that. */
1875 if (TREE_SIDE_EFFECTS (expr)
1876 && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
1877 add_call_clobber_ops (stmt, get_callee_fndecl (expr));
1878 else if (!(call_flags & ECF_CONST))
1879 add_call_read_ops (stmt, get_callee_fndecl (expr));
1880 }
1881
1882 /* Find uses in the called function. */
1883 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
1884
1885 for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
1886 get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
1887
1888 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
1889 }
1890
1891
1892 /* Scan operands in the ASM_EXPR stmt referred to in INFO. */
1893
1894 static void
1895 get_asm_expr_operands (tree stmt)
1896 {
1897 stmt_ann_t s_ann;
1898 int i, noutputs;
1899 const char **oconstraints;
1900 const char *constraint;
1901 bool allows_mem, allows_reg, is_inout;
1902 tree link;
1903
1904 s_ann = stmt_ann (stmt);
1905 noutputs = list_length (ASM_OUTPUTS (stmt));
1906 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1907
1908 /* Gather all output operands. */
1909 for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
1910 {
1911 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1912 oconstraints[i] = constraint;
1913 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
1914 &allows_reg, &is_inout);
1915
1916 /* This should have been split in gimplify_asm_expr. */
1917 gcc_assert (!allows_reg || !is_inout);
1918
1919 /* Memory operands are addressable. Note that STMT needs the
1920 address of this operand. */
1921 if (!allows_reg && allows_mem)
1922 {
1923 tree t = get_base_address (TREE_VALUE (link));
1924 if (t && DECL_P (t) && s_ann)
1925 add_to_addressable_set (t, &s_ann->addresses_taken);
1926 }
1927
1928 get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
1929 }
1930
1931 /* Gather all input operands. */
1932 for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
1933 {
1934 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1935 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
1936 &allows_mem, &allows_reg);
1937
1938 /* Memory operands are addressable. Note that STMT needs the
1939 address of this operand. */
1940 if (!allows_reg && allows_mem)
1941 {
1942 tree t = get_base_address (TREE_VALUE (link));
1943 if (t && DECL_P (t) && s_ann)
1944 add_to_addressable_set (t, &s_ann->addresses_taken);
1945 }
1946
1947 get_expr_operands (stmt, &TREE_VALUE (link), 0);
1948 }
1949
1950 /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
1951 for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
1952 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
1953 {
1954 unsigned i;
1955 bitmap_iterator bi;
1956
1957 s_ann->references_memory = true;
1958
1959 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1960 {
1961 tree var = referenced_var (i);
1962 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1963 }
1964
1965 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1966 {
1967 tree var = referenced_var (i);
1968
1969 /* Subvars are explicitly represented in this list, so we
1970 don't need the original to be added to the clobber ops,
1971 but the original *will* be in this list because we keep
1972 the addressability of the original variable up-to-date
1973 to avoid confusing the back-end. */
1974 if (var_can_have_subvars (var)
1975 && get_subvars_for_var (var) != NULL)
1976 continue;
1977
1978 add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
1979 }
1980 break;
1981 }
1982 }
1983
1984
1985 /* Scan operands for the assignment expression EXPR in statement STMT. */
1986
1987 static void
1988 get_modify_stmt_operands (tree stmt, tree expr)
1989 {
1990 /* First get operands from the RHS. */
1991 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
1992
1993 /* For the LHS, use a regular definition (opf_def) for GIMPLE
1994 registers. If the LHS is a store to memory, we will need
1995 a preserving definition (VDEF).
1996
1997 Preserving definitions are those that modify a part of an
1998 aggregate object for which no subvars have been computed (or the
1999 reference does not correspond exactly to one of them). Stores
2000 through a pointer are also represented with VDEF operators.
2001
2002 We used to distinguish between preserving and killing definitions.
2003 We always emit preserving definitions now. */
2004 get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
2005 }
2006
2007
2008 /* Recursively scan the expression pointed to by EXPR_P in statement
2009 STMT. FLAGS is one of the OPF_* constants modifying how to
2010 interpret the operands found. */
2011
2012 static void
2013 get_expr_operands (tree stmt, tree *expr_p, int flags)
2014 {
2015 enum tree_code code;
2016 enum tree_code_class class;
2017 tree expr = *expr_p;
2018 stmt_ann_t s_ann = stmt_ann (stmt);
2019
2020 if (expr == NULL)
2021 return;
2022
2023 code = TREE_CODE (expr);
2024 class = TREE_CODE_CLASS (code);
2025
2026 switch (code)
2027 {
2028 case ADDR_EXPR:
2029 /* Taking the address of a variable does not represent a
2030 reference to it, but the fact that the statement takes its
2031 address will be of interest to some passes (e.g. alias
2032 resolution). */
2033 add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
2034
2035 /* If the address is invariant, there may be no interesting
2036 variable references inside. */
2037 if (is_gimple_min_invariant (expr))
2038 return;
2039
2040 /* Otherwise, there may be variables referenced inside but there
2041 should be no VUSEs created, since the referenced objects are
2042 not really accessed. The only operands that we should find
2043 here are ARRAY_REF indices which will always be real operands
2044 (GIMPLE does not allow non-registers as array indices). */
2045 flags |= opf_no_vops;
2046 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2047 return;
2048
2049 case SSA_NAME:
2050 case STRUCT_FIELD_TAG:
2051 case SYMBOL_MEMORY_TAG:
2052 case NAME_MEMORY_TAG:
2053 add_stmt_operand (expr_p, s_ann, flags);
2054 return;
2055
2056 case VAR_DECL:
2057 case PARM_DECL:
2058 case RESULT_DECL:
2059 {
2060 subvar_t svars;
2061
2062 /* Add the subvars for a variable, if it has subvars, to DEFS
2063 or USES. Otherwise, add the variable itself. Whether it
2064 goes to USES or DEFS depends on the operand flags. */
2065 if (var_can_have_subvars (expr)
2066 && (svars = get_subvars_for_var (expr)))
2067 {
2068 subvar_t sv;
2069 for (sv = svars; sv; sv = sv->next)
2070 add_stmt_operand (&sv->var, s_ann, flags);
2071 }
2072 else
2073 add_stmt_operand (expr_p, s_ann, flags);
2074
2075 return;
2076 }
2077
2078 case MISALIGNED_INDIRECT_REF:
2079 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2080 /* fall through */
2081
2082 case ALIGN_INDIRECT_REF:
2083 case INDIRECT_REF:
2084 get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
2085 return;
2086
2087 case TARGET_MEM_REF:
2088 get_tmr_operands (stmt, expr, flags);
2089 return;
2090
2091 case ARRAY_REF:
2092 case ARRAY_RANGE_REF:
2093 case COMPONENT_REF:
2094 case REALPART_EXPR:
2095 case IMAGPART_EXPR:
2096 {
2097 tree ref;
2098 HOST_WIDE_INT offset, size, maxsize;
2099 bool none = true;
2100
2101 /* This component reference becomes an access to all of the
2102 subvariables it can touch, if we can determine that, but
2103 *NOT* the real one. If we can't determine which fields we
2104 could touch, the recursion will eventually get to a
2105 variable and add *all* of its subvars, or whatever is the
2106 minimum correct subset. */
2107 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
2108 if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
2109 {
2110 subvar_t sv;
2111 subvar_t svars = get_subvars_for_var (ref);
2112
2113 for (sv = svars; sv; sv = sv->next)
2114 {
2115 bool exact;
2116
2117 if (overlap_subvar (offset, maxsize, sv->var, &exact))
2118 {
2119 int subvar_flags = flags;
2120 none = false;
2121 add_stmt_operand (&sv->var, s_ann, subvar_flags);
2122 }
2123 }
2124
2125 if (!none)
2126 flags |= opf_no_vops;
2127 }
2128 else if (TREE_CODE (ref) == INDIRECT_REF)
2129 {
2130 get_indirect_ref_operands (stmt, ref, flags, expr, offset,
2131 maxsize, false);
2132 flags |= opf_no_vops;
2133 }
2134
2135 /* Even if we found subvars above we need to ensure to see
2136 immediate uses for d in s.a[d]. In case of s.a having
2137 a subvar or we would miss it otherwise. */
2138 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2139
2140 if (code == COMPONENT_REF)
2141 {
2142 if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
2143 s_ann->has_volatile_ops = true;
2144 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2145 }
2146 else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
2147 {
2148 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2149 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2150 get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
2151 }
2152
2153 return;
2154 }
2155
2156 case WITH_SIZE_EXPR:
2157 /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
2158 and an rvalue reference to its second argument. */
2159 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2160 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2161 return;
2162
2163 case CALL_EXPR:
2164 get_call_expr_operands (stmt, expr);
2165 return;
2166
2167 case COND_EXPR:
2168 case VEC_COND_EXPR:
2169 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
2170 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
2171 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
2172 return;
2173
2174 case GIMPLE_MODIFY_STMT:
2175 get_modify_stmt_operands (stmt, expr);
2176 return;
2177
2178 case CONSTRUCTOR:
2179 {
2180 /* General aggregate CONSTRUCTORs have been decomposed, but they
2181 are still in use as the COMPLEX_EXPR equivalent for vectors. */
2182 constructor_elt *ce;
2183 unsigned HOST_WIDE_INT idx;
2184
2185 for (idx = 0;
2186 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
2187 idx++)
2188 get_expr_operands (stmt, &ce->value, opf_use);
2189
2190 return;
2191 }
2192
2193 case BIT_FIELD_REF:
2194 case TRUTH_NOT_EXPR:
2195 case VIEW_CONVERT_EXPR:
2196 do_unary:
2197 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2198 return;
2199
2200 case TRUTH_AND_EXPR:
2201 case TRUTH_OR_EXPR:
2202 case TRUTH_XOR_EXPR:
2203 case COMPOUND_EXPR:
2204 case OBJ_TYPE_REF:
2205 case ASSERT_EXPR:
2206 do_binary:
2207 {
2208 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2209 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2210 return;
2211 }
2212
2213 case DOT_PROD_EXPR:
2214 case REALIGN_LOAD_EXPR:
2215 {
2216 get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
2217 get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
2218 get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
2219 return;
2220 }
2221
2222 case BLOCK:
2223 case FUNCTION_DECL:
2224 case EXC_PTR_EXPR:
2225 case FILTER_EXPR:
2226 case LABEL_DECL:
2227 case CONST_DECL:
2228 case OMP_PARALLEL:
2229 case OMP_SECTIONS:
2230 case OMP_FOR:
2231 case OMP_SINGLE:
2232 case OMP_MASTER:
2233 case OMP_ORDERED:
2234 case OMP_CRITICAL:
2235 case OMP_RETURN:
2236 case OMP_CONTINUE:
2237 /* Expressions that make no memory references. */
2238 return;
2239
2240 default:
2241 if (class == tcc_unary)
2242 goto do_unary;
2243 if (class == tcc_binary || class == tcc_comparison)
2244 goto do_binary;
2245 if (class == tcc_constant || class == tcc_type)
2246 return;
2247 }
2248
2249 /* If we get here, something has gone wrong. */
2250 #ifdef ENABLE_CHECKING
2251 fprintf (stderr, "unhandled expression in get_expr_operands():\n");
2252 debug_tree (expr);
2253 fputs ("\n", stderr);
2254 #endif
2255 gcc_unreachable ();
2256 }
2257
2258
2259 /* Parse STMT looking for operands. When finished, the various
2260 build_* operand vectors will have potential operands in them. */
2261
2262 static void
2263 parse_ssa_operands (tree stmt)
2264 {
2265 enum tree_code code;
2266
2267 code = TREE_CODE (stmt);
2268 switch (code)
2269 {
2270 case GIMPLE_MODIFY_STMT:
2271 get_modify_stmt_operands (stmt, stmt);
2272 break;
2273
2274 case COND_EXPR:
2275 get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
2276 break;
2277
2278 case SWITCH_EXPR:
2279 get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
2280 break;
2281
2282 case ASM_EXPR:
2283 get_asm_expr_operands (stmt);
2284 break;
2285
2286 case RETURN_EXPR:
2287 get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
2288 break;
2289
2290 case GOTO_EXPR:
2291 get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
2292 break;
2293
2294 case LABEL_EXPR:
2295 get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
2296 break;
2297
2298 case BIND_EXPR:
2299 case CASE_LABEL_EXPR:
2300 case TRY_CATCH_EXPR:
2301 case TRY_FINALLY_EXPR:
2302 case EH_FILTER_EXPR:
2303 case CATCH_EXPR:
2304 case RESX_EXPR:
2305 /* These nodes contain no variable references. */
2306 break;
2307
2308 default:
2309 /* Notice that if get_expr_operands tries to use &STMT as the
2310 operand pointer (which may only happen for USE operands), we
2311 will fail in add_stmt_operand. This default will handle
2312 statements like empty statements, or CALL_EXPRs that may
2313 appear on the RHS of a statement or as statements themselves. */
2314 get_expr_operands (stmt, &stmt, opf_use);
2315 break;
2316 }
2317 }
2318
2319
2320 /* Create an operands cache for STMT. */
2321
2322 static void
2323 build_ssa_operands (tree stmt)
2324 {
2325 stmt_ann_t ann = get_stmt_ann (stmt);
2326
2327 /* Initially assume that the statement has no volatile operands and
2328 makes no memory references. */
2329 ann->has_volatile_ops = false;
2330 ann->references_memory = false;
2331
2332 start_ssa_stmt_operands ();
2333 parse_ssa_operands (stmt);
2334 operand_build_sort_virtual (build_vuses);
2335 operand_build_sort_virtual (build_vdefs);
2336 finalize_ssa_stmt_operands (stmt);
2337
2338 /* For added safety, assume that statements with volatile operands
2339 also reference memory. */
2340 if (ann->has_volatile_ops)
2341 ann->references_memory = true;
2342 }
2343
2344
2345 /* Free any operands vectors in OPS. */
2346
2347 void
2348 free_ssa_operands (stmt_operands_p ops)
2349 {
2350 ops->def_ops = NULL;
2351 ops->use_ops = NULL;
2352 ops->vdef_ops = NULL;
2353 ops->vuse_ops = NULL;
2354 BITMAP_FREE (ops->loads);
2355 BITMAP_FREE (ops->stores);
2356 }
2357
2358
2359 /* Get the operands of statement STMT. */
2360
2361 void
2362 update_stmt_operands (tree stmt)
2363 {
2364 stmt_ann_t ann = get_stmt_ann (stmt);
2365
2366 /* If update_stmt_operands is called before SSA is initialized, do
2367 nothing. */
2368 if (!ssa_operands_active ())
2369 return;
2370
2371 /* The optimizers cannot handle statements that are nothing but a
2372 _DECL. This indicates a bug in the gimplifier. */
2373 gcc_assert (!SSA_VAR_P (stmt));
2374
2375 timevar_push (TV_TREE_OPS);
2376
2377 gcc_assert (ann->modified);
2378 build_ssa_operands (stmt);
2379 ann->modified = 0;
2380
2381 timevar_pop (TV_TREE_OPS);
2382 }
2383
2384
2385 /* Copies virtual operands from SRC to DST. */
2386
2387 void
2388 copy_virtual_operands (tree dest, tree src)
2389 {
2390 int i, n;
2391 voptype_p src_vuses, dest_vuses;
2392 voptype_p src_vdefs, dest_vdefs;
2393 struct voptype_d vuse;
2394 struct voptype_d vdef;
2395 stmt_ann_t dest_ann;
2396
2397 VDEF_OPS (dest) = NULL;
2398 VUSE_OPS (dest) = NULL;
2399
2400 dest_ann = get_stmt_ann (dest);
2401 BITMAP_FREE (dest_ann->operands.loads);
2402 BITMAP_FREE (dest_ann->operands.stores);
2403
2404 if (LOADED_SYMS (src))
2405 {
2406 dest_ann->operands.loads = BITMAP_ALLOC (NULL);
2407 bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
2408 }
2409
2410 if (STORED_SYMS (src))
2411 {
2412 dest_ann->operands.stores = BITMAP_ALLOC (NULL);
2413 bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
2414 }
2415
2416 /* Copy all the VUSE operators and corresponding operands. */
2417 dest_vuses = &vuse;
2418 for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
2419 {
2420 n = VUSE_NUM (src_vuses);
2421 dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
2422 for (i = 0; i < n; i++)
2423 SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
2424
2425 if (VUSE_OPS (dest) == NULL)
2426 VUSE_OPS (dest) = vuse.next;
2427 }
2428
2429 /* Copy all the VDEF operators and corresponding operands. */
2430 dest_vdefs = &vdef;
2431 for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
2432 {
2433 n = VUSE_NUM (src_vdefs);
2434 dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
2435 VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
2436 for (i = 0; i < n; i++)
2437 SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
2438
2439 if (VDEF_OPS (dest) == NULL)
2440 VDEF_OPS (dest) = vdef.next;
2441 }
2442 }
2443
2444
2445 /* Specifically for use in DOM's expression analysis. Given a store, we
2446 create an artificial stmt which looks like a load from the store, this can
2447 be used to eliminate redundant loads. OLD_OPS are the operands from the
2448 store stmt, and NEW_STMT is the new load which represents a load of the
2449 values stored. */
2450
2451 void
2452 create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
2453 {
2454 tree op;
2455 ssa_op_iter iter;
2456 use_operand_p use_p;
2457 unsigned i;
2458
2459 get_stmt_ann (new_stmt);
2460
2461 /* Process NEW_STMT looking for operands. */
2462 start_ssa_stmt_operands ();
2463 parse_ssa_operands (new_stmt);
2464
2465 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2466 if (TREE_CODE (op) != SSA_NAME)
2467 var_ann (op)->in_vuse_list = false;
2468
2469 for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
2470 if (TREE_CODE (op) != SSA_NAME)
2471 var_ann (op)->in_vdef_list = false;
2472
2473 /* Remove any virtual operands that were found. */
2474 VEC_truncate (tree, build_vdefs, 0);
2475 VEC_truncate (tree, build_vuses, 0);
2476
2477 /* For each VDEF on the original statement, we want to create a
2478 VUSE of the VDEF result operand on the new statement. */
2479 FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
2480 append_vuse (op);
2481
2482 finalize_ssa_stmt_operands (new_stmt);
2483
2484 /* All uses in this fake stmt must not be in the immediate use lists. */
2485 FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
2486 delink_imm_use (use_p);
2487 }
2488
2489
2490 /* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
2491 to test the validity of the swap operation. */
2492
2493 void
2494 swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
2495 {
2496 tree op0, op1;
2497 op0 = *exp0;
2498 op1 = *exp1;
2499
2500 /* If the operand cache is active, attempt to preserve the relative
2501 positions of these two operands in their respective immediate use
2502 lists. */
2503 if (ssa_operands_active () && op0 != op1)
2504 {
2505 use_optype_p use0, use1, ptr;
2506 use0 = use1 = NULL;
2507
2508 /* Find the 2 operands in the cache, if they are there. */
2509 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2510 if (USE_OP_PTR (ptr)->use == exp0)
2511 {
2512 use0 = ptr;
2513 break;
2514 }
2515
2516 for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
2517 if (USE_OP_PTR (ptr)->use == exp1)
2518 {
2519 use1 = ptr;
2520 break;
2521 }
2522
2523 /* If both uses don't have operand entries, there isn't much we can do
2524 at this point. Presumably we don't need to worry about it. */
2525 if (use0 && use1)
2526 {
2527 tree *tmp = USE_OP_PTR (use1)->use;
2528 USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
2529 USE_OP_PTR (use0)->use = tmp;
2530 }
2531 }
2532
2533 /* Now swap the data. */
2534 *exp0 = op1;
2535 *exp1 = op0;
2536 }
2537
2538
2539 /* Add the base address of REF to the set *ADDRESSES_TAKEN. If
2540 *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
2541 a single variable whose address has been taken or any other valid
2542 GIMPLE memory reference (structure reference, array, etc). If the
2543 base address of REF is a decl that has sub-variables, also add all
2544 of its sub-variables. */
2545
2546 void
2547 add_to_addressable_set (tree ref, bitmap *addresses_taken)
2548 {
2549 tree var;
2550 subvar_t svars;
2551
2552 gcc_assert (addresses_taken);
2553
2554 /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
2555 as the only thing we take the address of. If VAR is a structure,
2556 taking the address of a field means that the whole structure may
2557 be referenced using pointer arithmetic. See PR 21407 and the
2558 ensuing mailing list discussion. */
2559 var = get_base_address (ref);
2560 if (var && SSA_VAR_P (var))
2561 {
2562 if (*addresses_taken == NULL)
2563 *addresses_taken = BITMAP_GGC_ALLOC ();
2564
2565 if (var_can_have_subvars (var)
2566 && (svars = get_subvars_for_var (var)))
2567 {
2568 subvar_t sv;
2569 for (sv = svars; sv; sv = sv->next)
2570 {
2571 bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
2572 TREE_ADDRESSABLE (sv->var) = 1;
2573 }
2574 }
2575 else
2576 {
2577 bitmap_set_bit (*addresses_taken, DECL_UID (var));
2578 TREE_ADDRESSABLE (var) = 1;
2579 }
2580 }
2581 }
2582
2583
2584 /* Scan the immediate_use list for VAR making sure its linked properly.
2585 Return TRUE if there is a problem and emit an error message to F. */
2586
2587 bool
2588 verify_imm_links (FILE *f, tree var)
2589 {
2590 use_operand_p ptr, prev, list;
2591 int count;
2592
2593 gcc_assert (TREE_CODE (var) == SSA_NAME);
2594
2595 list = &(SSA_NAME_IMM_USE_NODE (var));
2596 gcc_assert (list->use == NULL);
2597
2598 if (list->prev == NULL)
2599 {
2600 gcc_assert (list->next == NULL);
2601 return false;
2602 }
2603
2604 prev = list;
2605 count = 0;
2606 for (ptr = list->next; ptr != list; )
2607 {
2608 if (prev != ptr->prev)
2609 goto error;
2610
2611 if (ptr->use == NULL)
2612 goto error; /* 2 roots, or SAFE guard node. */
2613 else if (*(ptr->use) != var)
2614 goto error;
2615
2616 prev = ptr;
2617 ptr = ptr->next;
2618
2619 /* Avoid infinite loops. 50,000,000 uses probably indicates a
2620 problem. */
2621 if (count++ > 50000000)
2622 goto error;
2623 }
2624
2625 /* Verify list in the other direction. */
2626 prev = list;
2627 for (ptr = list->prev; ptr != list; )
2628 {
2629 if (prev != ptr->next)
2630 goto error;
2631 prev = ptr;
2632 ptr = ptr->prev;
2633 if (count-- < 0)
2634 goto error;
2635 }
2636
2637 if (count != 0)
2638 goto error;
2639
2640 return false;
2641
2642 error:
2643 if (ptr->stmt && stmt_modified_p (ptr->stmt))
2644 {
2645 fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
2646 print_generic_stmt (f, ptr->stmt, TDF_SLIM);
2647 }
2648 fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
2649 (void *)ptr->use);
2650 print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
2651 fprintf(f, "\n");
2652 return true;
2653 }
2654
2655
2656 /* Dump all the immediate uses to FILE. */
2657
2658 void
2659 dump_immediate_uses_for (FILE *file, tree var)
2660 {
2661 imm_use_iterator iter;
2662 use_operand_p use_p;
2663
2664 gcc_assert (var && TREE_CODE (var) == SSA_NAME);
2665
2666 print_generic_expr (file, var, TDF_SLIM);
2667 fprintf (file, " : -->");
2668 if (has_zero_uses (var))
2669 fprintf (file, " no uses.\n");
2670 else
2671 if (has_single_use (var))
2672 fprintf (file, " single use.\n");
2673 else
2674 fprintf (file, "%d uses.\n", num_imm_uses (var));
2675
2676 FOR_EACH_IMM_USE_FAST (use_p, iter, var)
2677 {
2678 if (use_p->stmt == NULL && use_p->use == NULL)
2679 fprintf (file, "***end of stmt iterator marker***\n");
2680 else
2681 if (!is_gimple_reg (USE_FROM_PTR (use_p)))
2682 print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
2683 else
2684 print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
2685 }
2686 fprintf(file, "\n");
2687 }
2688
2689
2690 /* Dump all the immediate uses to FILE. */
2691
2692 void
2693 dump_immediate_uses (FILE *file)
2694 {
2695 tree var;
2696 unsigned int x;
2697
2698 fprintf (file, "Immediate_uses: \n\n");
2699 for (x = 1; x < num_ssa_names; x++)
2700 {
2701 var = ssa_name(x);
2702 if (!var)
2703 continue;
2704 dump_immediate_uses_for (file, var);
2705 }
2706 }
2707
2708
2709 /* Dump def-use edges on stderr. */
2710
2711 void
2712 debug_immediate_uses (void)
2713 {
2714 dump_immediate_uses (stderr);
2715 }
2716
2717
2718 /* Dump def-use edges on stderr. */
2719
2720 void
2721 debug_immediate_uses_for (tree var)
2722 {
2723 dump_immediate_uses_for (stderr, var);
2724 }
2725
2726
2727 /* Create a new change buffer for the statement pointed by STMT_P and
2728 push the buffer into SCB_STACK. Each change buffer
2729 records state information needed to determine what changed in the
2730 statement. Mainly, this keeps track of symbols that may need to be
2731 put into SSA form, SSA name replacements and other information
2732 needed to keep the SSA form up to date. */
2733
2734 void
2735 push_stmt_changes (tree *stmt_p)
2736 {
2737 tree stmt;
2738 scb_t buf;
2739
2740 stmt = *stmt_p;
2741
2742 /* It makes no sense to keep track of PHI nodes. */
2743 if (TREE_CODE (stmt) == PHI_NODE)
2744 return;
2745
2746 buf = xmalloc (sizeof *buf);
2747 memset (buf, 0, sizeof *buf);
2748
2749 buf->stmt_p = stmt_p;
2750
2751 if (stmt_references_memory_p (stmt))
2752 {
2753 tree op;
2754 ssa_op_iter i;
2755
2756 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2757 {
2758 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2759 if (buf->loads == NULL)
2760 buf->loads = BITMAP_ALLOC (NULL);
2761 bitmap_set_bit (buf->loads, DECL_UID (sym));
2762 }
2763
2764 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2765 {
2766 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2767 if (buf->stores == NULL)
2768 buf->stores = BITMAP_ALLOC (NULL);
2769 bitmap_set_bit (buf->stores, DECL_UID (sym));
2770 }
2771 }
2772
2773 VEC_safe_push (scb_t, heap, scb_stack, buf);
2774 }
2775
2776
2777 /* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
2778 for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
2779
2780 static void
2781 mark_difference_for_renaming (bitmap s1, bitmap s2)
2782 {
2783 if (s1 == NULL && s2 == NULL)
2784 return;
2785
2786 if (s1 && s2 == NULL)
2787 mark_set_for_renaming (s1);
2788 else if (s1 == NULL && s2)
2789 mark_set_for_renaming (s2);
2790 else if (!bitmap_equal_p (s1, s2))
2791 {
2792 bitmap t1 = BITMAP_ALLOC (NULL);
2793 bitmap t2 = BITMAP_ALLOC (NULL);
2794
2795 bitmap_and_compl (t1, s1, s2);
2796 bitmap_and_compl (t2, s2, s1);
2797 bitmap_ior_into (t1, t2);
2798 mark_set_for_renaming (t1);
2799
2800 BITMAP_FREE (t1);
2801 BITMAP_FREE (t2);
2802 }
2803 }
2804
2805
2806 /* Pop the top SCB from SCB_STACK and act on the differences between
2807 what was recorded by push_stmt_changes and the current state of
2808 the statement. */
2809
2810 void
2811 pop_stmt_changes (tree *stmt_p)
2812 {
2813 tree op, stmt;
2814 ssa_op_iter iter;
2815 bitmap loads, stores;
2816 scb_t buf;
2817
2818 stmt = *stmt_p;
2819
2820 /* It makes no sense to keep track of PHI nodes. */
2821 if (TREE_CODE (stmt) == PHI_NODE)
2822 return;
2823
2824 buf = VEC_pop (scb_t, scb_stack);
2825 gcc_assert (stmt_p == buf->stmt_p);
2826
2827 /* Force an operand re-scan on the statement and mark any newly
2828 exposed variables. */
2829 update_stmt (stmt);
2830
2831 /* Determine whether any memory symbols need to be renamed. If the
2832 sets of loads and stores are different after the statement is
2833 modified, then the affected symbols need to be renamed.
2834
2835 Note that it may be possible for the statement to not reference
2836 memory anymore, but we still need to act on the differences in
2837 the sets of symbols. */
2838 loads = stores = NULL;
2839 if (stmt_references_memory_p (stmt))
2840 {
2841 tree op;
2842 ssa_op_iter i;
2843
2844 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
2845 {
2846 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2847 if (loads == NULL)
2848 loads = BITMAP_ALLOC (NULL);
2849 bitmap_set_bit (loads, DECL_UID (sym));
2850 }
2851
2852 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
2853 {
2854 tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
2855 if (stores == NULL)
2856 stores = BITMAP_ALLOC (NULL);
2857 bitmap_set_bit (stores, DECL_UID (sym));
2858 }
2859 }
2860
2861 /* If LOADS is different from BUF->LOADS, the affected
2862 symbols need to be marked for renaming. */
2863 mark_difference_for_renaming (loads, buf->loads);
2864
2865 /* Similarly for STORES and BUF->STORES. */
2866 mark_difference_for_renaming (stores, buf->stores);
2867
2868 /* Mark all the naked GIMPLE register operands for renaming. */
2869 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
2870 if (DECL_P (op))
2871 mark_sym_for_renaming (op);
2872
2873 /* FIXME, need to add more finalizers here. Cleanup EH info,
2874 recompute invariants for address expressions, add
2875 SSA replacement mappings, etc. For instance, given
2876 testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
2877 the form:
2878
2879 # SMT.4_20 = VDEF <SMT.4_16>
2880 D.1576_11 = 1.0e+0;
2881
2882 So, the VDEF will disappear, but instead of marking SMT.4 for
2883 renaming it would be far more efficient to establish a
2884 replacement mapping that would replace every reference of
2885 SMT.4_20 with SMT.4_16. */
2886
2887 /* Free memory used by the buffer. */
2888 BITMAP_FREE (buf->loads);
2889 BITMAP_FREE (buf->stores);
2890 BITMAP_FREE (loads);
2891 BITMAP_FREE (stores);
2892 buf->stmt_p = NULL;
2893 free (buf);
2894 }
2895
2896
2897 /* Discard the topmost change buffer from SCB_STACK. This is useful
2898 when the caller realized that it did not actually modified the
2899 statement. It avoids the expensive operand re-scan. */
2900
2901 void
2902 discard_stmt_changes (tree *stmt_p)
2903 {
2904 scb_t buf;
2905 tree stmt;
2906
2907 /* It makes no sense to keep track of PHI nodes. */
2908 stmt = *stmt_p;
2909 if (TREE_CODE (stmt) == PHI_NODE)
2910 return;
2911
2912 buf = VEC_pop (scb_t, scb_stack);
2913 gcc_assert (stmt_p == buf->stmt_p);
2914
2915 /* Free memory used by the buffer. */
2916 BITMAP_FREE (buf->loads);
2917 BITMAP_FREE (buf->stores);
2918 buf->stmt_p = NULL;
2919 free (buf);
2920 }
2921
2922
2923 /* Returns true if statement STMT may access memory. */
2924
2925 bool
2926 stmt_references_memory_p (tree stmt)
2927 {
2928 if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
2929 return false;
2930
2931 return stmt_ann (stmt)->references_memory;
2932 }
2933
2934
2935 /* Return the memory partition tag (MPT) associated with memory
2936 symbol SYM. From a correctness standpoint, memory partitions can
2937 be assigned in any arbitrary fashion as long as this rule is
2938 observed: Given two memory partitions MPT.i and MPT.j, they must
2939 not contain symbols in common.
2940
2941 Memory partitions are used when putting the program into Memory-SSA
2942 form. In particular, in Memory-SSA PHI nodes are not computed for
2943 individual memory symbols. They are computed for memory
2944 partitions. This reduces the amount of PHI nodes in the SSA graph
2945 at the expense of precision (i.e., it makes unrelated stores affect
2946 each other).
2947
2948 However, it is possible to increase precision by changing this
2949 partitioning scheme. For instance, if the partitioning scheme is
2950 such that get_mpt_for is the identity function (that is,
2951 get_mpt_for (s) = s), this will result in ultimate precision at the
2952 expense of huge SSA webs.
2953
2954 At the other extreme, a partitioning scheme that groups all the
2955 symbols in the same set results in minimal SSA webs and almost
2956 total loss of precision. */
2957
2958 tree
2959 get_mpt_for (tree sym)
2960 {
2961 tree mpt;
2962
2963 /* Don't create a new tag unnecessarily. */
2964 mpt = memory_partition (sym);
2965 if (mpt == NULL_TREE)
2966 {
2967 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
2968 TREE_ADDRESSABLE (mpt) = 0;
2969 MTAG_GLOBAL (mpt) = 1;
2970 add_referenced_var (mpt);
2971 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
2972 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (NULL);
2973 set_memory_partition (sym, mpt);
2974 }
2975
2976 return mpt;
2977 }
2978
2979
2980 /* Dump memory partition information to FILE. */
2981
2982 void
2983 dump_memory_partitions (FILE *file)
2984 {
2985 unsigned i, npart;
2986 unsigned long nsyms;
2987 tree mpt;
2988
2989 fprintf (file, "\nMemory partitions\n\n");
2990 for (i = 0, npart = 0, nsyms = 0;
2991 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
2992 i++)
2993 {
2994 if (mpt)
2995 {
2996 bitmap syms = MPT_SYMBOLS (mpt);
2997 unsigned long n = bitmap_count_bits (syms);
2998
2999 fprintf (file, "#%u: ", i);
3000 print_generic_expr (file, mpt, 0);
3001 fprintf (file, ": %lu elements: ", n);
3002 dump_decl_set (file, syms);
3003 npart++;
3004 nsyms += n;
3005 }
3006 }
3007
3008 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
3009 }
3010
3011
3012 /* Dump memory partition information to stderr. */
3013
3014 void
3015 debug_memory_partitions (void)
3016 {
3017 dump_memory_partitions (stderr);
3018 }