i386.c (legitimize_tls_address): Generate tls_initial_exec_64_sun only when !TARGET_X32.
[gcc.git] / gcc / cprop.c
1 /* Global constant/copy propagation for RTL.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "toplev.h"
27
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "except.h"
41 #include "params.h"
42 #include "cselib.h"
43 #include "intl.h"
44 #include "obstack.h"
45 #include "timevar.h"
46 #include "tree-pass.h"
47 #include "hashtab.h"
48 #include "df.h"
49 #include "dbgcnt.h"
50 #include "target.h"
51 #include "cfgloop.h"
52
53 \f
54 /* An obstack for our working variables. */
55 static struct obstack cprop_obstack;
56
57 /* Occurrence of an expression.
58 There is one per basic block. If a pattern appears more than once the
59 last appearance is used. */
60
61 struct occr
62 {
63 /* Next occurrence of this expression. */
64 struct occr *next;
65 /* The insn that computes the expression. */
66 rtx insn;
67 };
68
69 typedef struct occr *occr_t;
70 DEF_VEC_P (occr_t);
71 DEF_VEC_ALLOC_P (occr_t, heap);
72
73 /* Hash table entry for assignment expressions. */
74
75 struct expr
76 {
77 /* The expression (DEST := SRC). */
78 rtx dest;
79 rtx src;
80
81 /* Index in the available expression bitmaps. */
82 int bitmap_index;
83 /* Next entry with the same hash. */
84 struct expr *next_same_hash;
85 /* List of available occurrence in basic blocks in the function.
86 An "available occurrence" is one that is the last occurrence in the
87 basic block and whose operands are not modified by following statements
88 in the basic block [including this insn]. */
89 struct occr *avail_occr;
90 };
91
92 /* Hash table for copy propagation expressions.
93 Each hash table is an array of buckets.
94 ??? It is known that if it were an array of entries, structure elements
95 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
96 not clear whether in the final analysis a sufficient amount of memory would
97 be saved as the size of the available expression bitmaps would be larger
98 [one could build a mapping table without holes afterwards though].
99 Someday I'll perform the computation and figure it out. */
100
101 struct hash_table_d
102 {
103 /* The table itself.
104 This is an array of `set_hash_table_size' elements. */
105 struct expr **table;
106
107 /* Size of the hash table, in elements. */
108 unsigned int size;
109
110 /* Number of hash table elements. */
111 unsigned int n_elems;
112 };
113
114 /* Copy propagation hash table. */
115 static struct hash_table_d set_hash_table;
116
117 /* Array of implicit set patterns indexed by basic block index. */
118 static rtx *implicit_sets;
119
120 /* Array of indexes of expressions for implicit set patterns indexed by basic
121 block index. In other words, implicit_set_indexes[i] is the bitmap_index
122 of the expression whose RTX is implicit_sets[i]. */
123 static int *implicit_set_indexes;
124
125 /* Bitmap containing one bit for each register in the program.
126 Used when performing GCSE to track which registers have been set since
127 the start or end of the basic block while traversing that block. */
128 static regset reg_set_bitmap;
129
130 /* Various variables for statistics gathering. */
131
132 /* Memory used in a pass.
133 This isn't intended to be absolutely precise. Its intent is only
134 to keep an eye on memory usage. */
135 static int bytes_used;
136
137 /* Number of local constants propagated. */
138 static int local_const_prop_count;
139 /* Number of local copies propagated. */
140 static int local_copy_prop_count;
141 /* Number of global constants propagated. */
142 static int global_const_prop_count;
143 /* Number of global copies propagated. */
144 static int global_copy_prop_count;
145
146 #define GOBNEW(T) ((T *) cprop_alloc (sizeof (T)))
147 #define GOBNEWVAR(T, S) ((T *) cprop_alloc ((S)))
148
149 /* Cover function to obstack_alloc. */
150
151 static void *
152 cprop_alloc (unsigned long size)
153 {
154 bytes_used += size;
155 return obstack_alloc (&cprop_obstack, size);
156 }
157 \f
158 /* Return nonzero if register X is unchanged from INSN to the end
159 of INSN's basic block. */
160
161 static int
162 reg_available_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
163 {
164 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
165 }
166
167 /* Hash a set of register REGNO.
168
169 Sets are hashed on the register that is set. This simplifies the PRE copy
170 propagation code.
171
172 ??? May need to make things more elaborate. Later, as necessary. */
173
174 static unsigned int
175 hash_set (int regno, int hash_table_size)
176 {
177 unsigned int hash;
178
179 hash = regno;
180 return hash % hash_table_size;
181 }
182
183 /* Insert assignment DEST:=SET from INSN in the hash table.
184 DEST is a register and SET is a register or a suitable constant.
185 If the assignment is already present in the table, record it as
186 the last occurrence in INSN's basic block.
187 IMPLICIT is true if it's an implicit set, false otherwise. */
188
189 static void
190 insert_set_in_table (rtx dest, rtx src, rtx insn, struct hash_table_d *table,
191 bool implicit)
192 {
193 bool found = false;
194 unsigned int hash;
195 struct expr *cur_expr, *last_expr = NULL;
196 struct occr *cur_occr;
197
198 hash = hash_set (REGNO (dest), table->size);
199
200 for (cur_expr = table->table[hash]; cur_expr;
201 cur_expr = cur_expr->next_same_hash)
202 {
203 if (dest == cur_expr->dest
204 && src == cur_expr->src)
205 {
206 found = true;
207 break;
208 }
209 last_expr = cur_expr;
210 }
211
212 if (! found)
213 {
214 cur_expr = GOBNEW (struct expr);
215 bytes_used += sizeof (struct expr);
216 if (table->table[hash] == NULL)
217 /* This is the first pattern that hashed to this index. */
218 table->table[hash] = cur_expr;
219 else
220 /* Add EXPR to end of this hash chain. */
221 last_expr->next_same_hash = cur_expr;
222
223 /* Set the fields of the expr element.
224 We must copy X because it can be modified when copy propagation is
225 performed on its operands. */
226 cur_expr->dest = copy_rtx (dest);
227 cur_expr->src = copy_rtx (src);
228 cur_expr->bitmap_index = table->n_elems++;
229 cur_expr->next_same_hash = NULL;
230 cur_expr->avail_occr = NULL;
231 }
232
233 /* Now record the occurrence. */
234 cur_occr = cur_expr->avail_occr;
235
236 if (cur_occr
237 && BLOCK_FOR_INSN (cur_occr->insn) == BLOCK_FOR_INSN (insn))
238 {
239 /* Found another instance of the expression in the same basic block.
240 Prefer this occurrence to the currently recorded one. We want
241 the last one in the block and the block is scanned from start
242 to end. */
243 cur_occr->insn = insn;
244 }
245 else
246 {
247 /* First occurrence of this expression in this basic block. */
248 cur_occr = GOBNEW (struct occr);
249 bytes_used += sizeof (struct occr);
250 cur_occr->insn = insn;
251 cur_occr->next = cur_expr->avail_occr;
252 cur_expr->avail_occr = cur_occr;
253 }
254
255 /* Record bitmap_index of the implicit set in implicit_set_indexes. */
256 if (implicit)
257 implicit_set_indexes[BLOCK_FOR_INSN(insn)->index] = cur_expr->bitmap_index;
258 }
259
260 /* Determine whether the rtx X should be treated as a constant for CPROP.
261 Since X might be inserted more than once we have to take care that it
262 is sharable. */
263
264 static bool
265 cprop_constant_p (const_rtx x)
266 {
267 return CONSTANT_P (x) && (GET_CODE (x) != CONST || shared_const_p (x));
268 }
269
270 /* Scan SET present in INSN and add an entry to the hash TABLE.
271 IMPLICIT is true if it's an implicit set, false otherwise. */
272
273 static void
274 hash_scan_set (rtx set, rtx insn, struct hash_table_d *table, bool implicit)
275 {
276 rtx src = SET_SRC (set);
277 rtx dest = SET_DEST (set);
278
279 if (REG_P (dest)
280 && ! HARD_REGISTER_P (dest)
281 && reg_available_p (dest, insn)
282 && can_copy_p (GET_MODE (dest)))
283 {
284 /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
285
286 This allows us to do a single CPROP pass and still eliminate
287 redundant constants, addresses or other expressions that are
288 constructed with multiple instructions.
289
290 However, keep the original SRC if INSN is a simple reg-reg move. In
291 In this case, there will almost always be a REG_EQUAL note on the
292 insn that sets SRC. By recording the REG_EQUAL value here as SRC
293 for INSN, we miss copy propagation opportunities.
294
295 Note that this does not impede profitable constant propagations. We
296 "look through" reg-reg sets in lookup_set. */
297 rtx note = find_reg_equal_equiv_note (insn);
298 if (note != 0
299 && REG_NOTE_KIND (note) == REG_EQUAL
300 && !REG_P (src)
301 && cprop_constant_p (XEXP (note, 0)))
302 src = XEXP (note, 0), set = gen_rtx_SET (VOIDmode, dest, src);
303
304 /* Record sets for constant/copy propagation. */
305 if ((REG_P (src)
306 && src != dest
307 && ! HARD_REGISTER_P (src)
308 && reg_available_p (src, insn))
309 || cprop_constant_p (src))
310 insert_set_in_table (dest, src, insn, table, implicit);
311 }
312 }
313
314 /* Process INSN and add hash table entries as appropriate. */
315
316 static void
317 hash_scan_insn (rtx insn, struct hash_table_d *table)
318 {
319 rtx pat = PATTERN (insn);
320 int i;
321
322 /* Pick out the sets of INSN and for other forms of instructions record
323 what's been modified. */
324
325 if (GET_CODE (pat) == SET)
326 hash_scan_set (pat, insn, table, false);
327 else if (GET_CODE (pat) == PARALLEL)
328 for (i = 0; i < XVECLEN (pat, 0); i++)
329 {
330 rtx x = XVECEXP (pat, 0, i);
331
332 if (GET_CODE (x) == SET)
333 hash_scan_set (x, insn, table, false);
334 }
335 }
336
337 /* Dump the hash table TABLE to file FILE under the name NAME. */
338
339 static void
340 dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
341 {
342 int i;
343 /* Flattened out table, so it's printed in proper order. */
344 struct expr **flat_table;
345 unsigned int *hash_val;
346 struct expr *expr;
347
348 flat_table = XCNEWVEC (struct expr *, table->n_elems);
349 hash_val = XNEWVEC (unsigned int, table->n_elems);
350
351 for (i = 0; i < (int) table->size; i++)
352 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
353 {
354 flat_table[expr->bitmap_index] = expr;
355 hash_val[expr->bitmap_index] = i;
356 }
357
358 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
359 name, table->size, table->n_elems);
360
361 for (i = 0; i < (int) table->n_elems; i++)
362 if (flat_table[i] != 0)
363 {
364 expr = flat_table[i];
365 fprintf (file, "Index %d (hash value %d)\n ",
366 expr->bitmap_index, hash_val[i]);
367 print_rtl (file, expr->dest);
368 fprintf (file, " := ");
369 print_rtl (file, expr->src);
370 fprintf (file, "\n");
371 }
372
373 fprintf (file, "\n");
374
375 free (flat_table);
376 free (hash_val);
377 }
378
379 /* Record as unavailable all registers that are DEF operands of INSN. */
380
381 static void
382 make_set_regs_unavailable (rtx insn)
383 {
384 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
385 df_ref *def_rec;
386
387 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
388 SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
389 }
390
391 /* Top level function to create an assignment hash table.
392
393 Assignment entries are placed in the hash table if
394 - they are of the form (set (pseudo-reg) src),
395 - src is something we want to perform const/copy propagation on,
396 - none of the operands or target are subsequently modified in the block
397
398 Currently src must be a pseudo-reg or a const_int.
399
400 TABLE is the table computed. */
401
402 static void
403 compute_hash_table_work (struct hash_table_d *table)
404 {
405 basic_block bb;
406
407 /* Allocate vars to track sets of regs. */
408 reg_set_bitmap = ALLOC_REG_SET (NULL);
409
410 FOR_EACH_BB (bb)
411 {
412 rtx insn;
413
414 /* Reset tables used to keep track of what's not yet invalid [since
415 the end of the block]. */
416 CLEAR_REG_SET (reg_set_bitmap);
417
418 /* Go over all insns from the last to the first. This is convenient
419 for tracking available registers, i.e. not set between INSN and
420 the end of the basic block BB. */
421 FOR_BB_INSNS_REVERSE (bb, insn)
422 {
423 /* Only real insns are interesting. */
424 if (!NONDEBUG_INSN_P (insn))
425 continue;
426
427 /* Record interesting sets from INSN in the hash table. */
428 hash_scan_insn (insn, table);
429
430 /* Any registers set in INSN will make SETs above it not AVAIL. */
431 make_set_regs_unavailable (insn);
432 }
433
434 /* Insert implicit sets in the hash table, pretending they appear as
435 insns at the head of the basic block. */
436 if (implicit_sets[bb->index] != NULL_RTX)
437 hash_scan_set (implicit_sets[bb->index], BB_HEAD (bb), table, true);
438 }
439
440 FREE_REG_SET (reg_set_bitmap);
441 }
442
443 /* Allocate space for the set/expr hash TABLE.
444 It is used to determine the number of buckets to use. */
445
446 static void
447 alloc_hash_table (struct hash_table_d *table)
448 {
449 int n;
450
451 n = get_max_insn_count ();
452
453 table->size = n / 4;
454 if (table->size < 11)
455 table->size = 11;
456
457 /* Attempt to maintain efficient use of hash table.
458 Making it an odd number is simplest for now.
459 ??? Later take some measurements. */
460 table->size |= 1;
461 n = table->size * sizeof (struct expr *);
462 table->table = XNEWVAR (struct expr *, n);
463 }
464
465 /* Free things allocated by alloc_hash_table. */
466
467 static void
468 free_hash_table (struct hash_table_d *table)
469 {
470 free (table->table);
471 }
472
473 /* Compute the hash TABLE for doing copy/const propagation or
474 expression hash table. */
475
476 static void
477 compute_hash_table (struct hash_table_d *table)
478 {
479 /* Initialize count of number of entries in hash table. */
480 table->n_elems = 0;
481 memset (table->table, 0, table->size * sizeof (struct expr *));
482
483 compute_hash_table_work (table);
484 }
485 \f
486 /* Expression tracking support. */
487
488 /* Lookup REGNO in the set TABLE. The result is a pointer to the
489 table entry, or NULL if not found. */
490
491 static struct expr *
492 lookup_set (unsigned int regno, struct hash_table_d *table)
493 {
494 unsigned int hash = hash_set (regno, table->size);
495 struct expr *expr;
496
497 expr = table->table[hash];
498
499 while (expr && REGNO (expr->dest) != regno)
500 expr = expr->next_same_hash;
501
502 return expr;
503 }
504
505 /* Return the next entry for REGNO in list EXPR. */
506
507 static struct expr *
508 next_set (unsigned int regno, struct expr *expr)
509 {
510 do
511 expr = expr->next_same_hash;
512 while (expr && REGNO (expr->dest) != regno);
513
514 return expr;
515 }
516
517 /* Reset tables used to keep track of what's still available [since the
518 start of the block]. */
519
520 static void
521 reset_opr_set_tables (void)
522 {
523 /* Maintain a bitmap of which regs have been set since beginning of
524 the block. */
525 CLEAR_REG_SET (reg_set_bitmap);
526 }
527
528 /* Return nonzero if the register X has not been set yet [since the
529 start of the basic block containing INSN]. */
530
531 static int
532 reg_not_set_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
533 {
534 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
535 }
536
537 /* Record things set by INSN.
538 This data is used by reg_not_set_p. */
539
540 static void
541 mark_oprs_set (rtx insn)
542 {
543 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
544 df_ref *def_rec;
545
546 for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
547 SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
548 }
549 \f
550 /* Compute copy/constant propagation working variables. */
551
552 /* Local properties of assignments. */
553 static sbitmap *cprop_avloc;
554 static sbitmap *cprop_kill;
555
556 /* Global properties of assignments (computed from the local properties). */
557 static sbitmap *cprop_avin;
558 static sbitmap *cprop_avout;
559
560 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
561 basic blocks. N_SETS is the number of sets. */
562
563 static void
564 alloc_cprop_mem (int n_blocks, int n_sets)
565 {
566 cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets);
567 cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets);
568
569 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
570 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
571 }
572
573 /* Free vars used by copy/const propagation. */
574
575 static void
576 free_cprop_mem (void)
577 {
578 sbitmap_vector_free (cprop_avloc);
579 sbitmap_vector_free (cprop_kill);
580 sbitmap_vector_free (cprop_avin);
581 sbitmap_vector_free (cprop_avout);
582 }
583
584 /* Compute the local properties of each recorded expression.
585
586 Local properties are those that are defined by the block, irrespective of
587 other blocks.
588
589 An expression is killed in a block if its operands, either DEST or SRC, are
590 modified in the block.
591
592 An expression is computed (locally available) in a block if it is computed
593 at least once and expression would contain the same value if the
594 computation was moved to the end of the block.
595
596 KILL and COMP are destination sbitmaps for recording local properties. */
597
598 static void
599 compute_local_properties (sbitmap *kill, sbitmap *comp,
600 struct hash_table_d *table)
601 {
602 unsigned int i;
603
604 /* Initialize the bitmaps that were passed in. */
605 sbitmap_vector_zero (kill, last_basic_block);
606 sbitmap_vector_zero (comp, last_basic_block);
607
608 for (i = 0; i < table->size; i++)
609 {
610 struct expr *expr;
611
612 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
613 {
614 int indx = expr->bitmap_index;
615 df_ref def;
616 struct occr *occr;
617
618 /* For each definition of the destination pseudo-reg, the expression
619 is killed in the block where the definition is. */
620 for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
621 def; def = DF_REF_NEXT_REG (def))
622 SET_BIT (kill[DF_REF_BB (def)->index], indx);
623
624 /* If the source is a pseudo-reg, for each definition of the source,
625 the expression is killed in the block where the definition is. */
626 if (REG_P (expr->src))
627 for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
628 def; def = DF_REF_NEXT_REG (def))
629 SET_BIT (kill[DF_REF_BB (def)->index], indx);
630
631 /* The occurrences recorded in avail_occr are exactly those that
632 are locally available in the block where they are. */
633 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
634 {
635 SET_BIT (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
636 }
637 }
638 }
639 }
640 \f
641 /* Hash table support. */
642
643 /* Top level routine to do the dataflow analysis needed by copy/const
644 propagation. */
645
646 static void
647 compute_cprop_data (void)
648 {
649 basic_block bb;
650
651 compute_local_properties (cprop_kill, cprop_avloc, &set_hash_table);
652 compute_available (cprop_avloc, cprop_kill, cprop_avout, cprop_avin);
653
654 /* Merge implicit sets into CPROP_AVIN. They are always available at the
655 entry of their basic block. We need to do this because 1) implicit sets
656 aren't recorded for the local pass so they cannot be propagated within
657 their basic block by this pass and 2) the global pass would otherwise
658 propagate them only in the successors of their basic block. */
659 FOR_EACH_BB (bb)
660 {
661 int index = implicit_set_indexes[bb->index];
662 if (index != -1)
663 SET_BIT (cprop_avin[bb->index], index);
664 }
665 }
666 \f
667 /* Copy/constant propagation. */
668
669 /* Maximum number of register uses in an insn that we handle. */
670 #define MAX_USES 8
671
672 /* Table of uses (registers, both hard and pseudo) found in an insn.
673 Allocated statically to avoid alloc/free complexity and overhead. */
674 static rtx reg_use_table[MAX_USES];
675
676 /* Index into `reg_use_table' while building it. */
677 static unsigned reg_use_count;
678
679 /* Set up a list of register numbers used in INSN. The found uses are stored
680 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
681 and contains the number of uses in the table upon exit.
682
683 ??? If a register appears multiple times we will record it multiple times.
684 This doesn't hurt anything but it will slow things down. */
685
686 static void
687 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
688 {
689 int i, j;
690 enum rtx_code code;
691 const char *fmt;
692 rtx x = *xptr;
693
694 /* repeat is used to turn tail-recursion into iteration since GCC
695 can't do it when there's no return value. */
696 repeat:
697 if (x == 0)
698 return;
699
700 code = GET_CODE (x);
701 if (REG_P (x))
702 {
703 if (reg_use_count == MAX_USES)
704 return;
705
706 reg_use_table[reg_use_count] = x;
707 reg_use_count++;
708 }
709
710 /* Recursively scan the operands of this expression. */
711
712 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
713 {
714 if (fmt[i] == 'e')
715 {
716 /* If we are about to do the last recursive call
717 needed at this level, change it into iteration.
718 This function is called enough to be worth it. */
719 if (i == 0)
720 {
721 x = XEXP (x, 0);
722 goto repeat;
723 }
724
725 find_used_regs (&XEXP (x, i), data);
726 }
727 else if (fmt[i] == 'E')
728 for (j = 0; j < XVECLEN (x, i); j++)
729 find_used_regs (&XVECEXP (x, i, j), data);
730 }
731 }
732
733 /* Try to replace all uses of FROM in INSN with TO.
734 Return nonzero if successful. */
735
736 static int
737 try_replace_reg (rtx from, rtx to, rtx insn)
738 {
739 rtx note = find_reg_equal_equiv_note (insn);
740 rtx src = 0;
741 int success = 0;
742 rtx set = single_set (insn);
743
744 /* Usually we substitute easy stuff, so we won't copy everything.
745 We however need to take care to not duplicate non-trivial CONST
746 expressions. */
747 to = copy_rtx (to);
748
749 validate_replace_src_group (from, to, insn);
750 if (num_changes_pending () && apply_change_group ())
751 success = 1;
752
753 /* Try to simplify SET_SRC if we have substituted a constant. */
754 if (success && set && CONSTANT_P (to))
755 {
756 src = simplify_rtx (SET_SRC (set));
757
758 if (src)
759 validate_change (insn, &SET_SRC (set), src, 0);
760 }
761
762 /* If there is already a REG_EQUAL note, update the expression in it
763 with our replacement. */
764 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
765 set_unique_reg_note (insn, REG_EQUAL,
766 simplify_replace_rtx (XEXP (note, 0), from, to));
767 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
768 {
769 /* If above failed and this is a single set, try to simplify the source
770 of the set given our substitution. We could perhaps try this for
771 multiple SETs, but it probably won't buy us anything. */
772 src = simplify_replace_rtx (SET_SRC (set), from, to);
773
774 if (!rtx_equal_p (src, SET_SRC (set))
775 && validate_change (insn, &SET_SRC (set), src, 0))
776 success = 1;
777
778 /* If we've failed perform the replacement, have a single SET to
779 a REG destination and don't yet have a note, add a REG_EQUAL note
780 to not lose information. */
781 if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set)))
782 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
783 }
784
785 if (set && MEM_P (SET_DEST (set)) && reg_mentioned_p (from, SET_DEST (set)))
786 {
787 /* Registers can also appear as uses in SET_DEST if it is a MEM.
788 We could perhaps try this for multiple SETs, but it probably
789 won't buy us anything. */
790 rtx dest = simplify_replace_rtx (SET_DEST (set), from, to);
791
792 if (!rtx_equal_p (dest, SET_DEST (set))
793 && validate_change (insn, &SET_DEST (set), dest, 0))
794 success = 1;
795 }
796
797 /* REG_EQUAL may get simplified into register.
798 We don't allow that. Remove that note. This code ought
799 not to happen, because previous code ought to synthesize
800 reg-reg move, but be on the safe side. */
801 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
802 remove_note (insn, note);
803
804 return success;
805 }
806
807 /* Find a set of REGNOs that are available on entry to INSN's block. Return
808 NULL no such set is found. */
809
810 static struct expr *
811 find_avail_set (int regno, rtx insn)
812 {
813 /* SET1 contains the last set found that can be returned to the caller for
814 use in a substitution. */
815 struct expr *set1 = 0;
816
817 /* Loops are not possible here. To get a loop we would need two sets
818 available at the start of the block containing INSN. i.e. we would
819 need two sets like this available at the start of the block:
820
821 (set (reg X) (reg Y))
822 (set (reg Y) (reg X))
823
824 This can not happen since the set of (reg Y) would have killed the
825 set of (reg X) making it unavailable at the start of this block. */
826 while (1)
827 {
828 rtx src;
829 struct expr *set = lookup_set (regno, &set_hash_table);
830
831 /* Find a set that is available at the start of the block
832 which contains INSN. */
833 while (set)
834 {
835 if (TEST_BIT (cprop_avin[BLOCK_FOR_INSN (insn)->index],
836 set->bitmap_index))
837 break;
838 set = next_set (regno, set);
839 }
840
841 /* If no available set was found we've reached the end of the
842 (possibly empty) copy chain. */
843 if (set == 0)
844 break;
845
846 src = set->src;
847
848 /* We know the set is available.
849 Now check that SRC is locally anticipatable (i.e. none of the
850 source operands have changed since the start of the block).
851
852 If the source operand changed, we may still use it for the next
853 iteration of this loop, but we may not use it for substitutions. */
854
855 if (cprop_constant_p (src) || reg_not_set_p (src, insn))
856 set1 = set;
857
858 /* If the source of the set is anything except a register, then
859 we have reached the end of the copy chain. */
860 if (! REG_P (src))
861 break;
862
863 /* Follow the copy chain, i.e. start another iteration of the loop
864 and see if we have an available copy into SRC. */
865 regno = REGNO (src);
866 }
867
868 /* SET1 holds the last set that was available and anticipatable at
869 INSN. */
870 return set1;
871 }
872
873 /* Subroutine of cprop_insn that tries to propagate constants into
874 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
875 it is the instruction that immediately precedes JUMP, and must be a
876 single SET of a register. FROM is what we will try to replace,
877 SRC is the constant we will try to substitute for it. Return nonzero
878 if a change was made. */
879
880 static int
881 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
882 {
883 rtx new_rtx, set_src, note_src;
884 rtx set = pc_set (jump);
885 rtx note = find_reg_equal_equiv_note (jump);
886
887 if (note)
888 {
889 note_src = XEXP (note, 0);
890 if (GET_CODE (note_src) == EXPR_LIST)
891 note_src = NULL_RTX;
892 }
893 else note_src = NULL_RTX;
894
895 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
896 set_src = note_src ? note_src : SET_SRC (set);
897
898 /* First substitute the SETCC condition into the JUMP instruction,
899 then substitute that given values into this expanded JUMP. */
900 if (setcc != NULL_RTX
901 && !modified_between_p (from, setcc, jump)
902 && !modified_between_p (src, setcc, jump))
903 {
904 rtx setcc_src;
905 rtx setcc_set = single_set (setcc);
906 rtx setcc_note = find_reg_equal_equiv_note (setcc);
907 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
908 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
909 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
910 setcc_src);
911 }
912 else
913 setcc = NULL_RTX;
914
915 new_rtx = simplify_replace_rtx (set_src, from, src);
916
917 /* If no simplification can be made, then try the next register. */
918 if (rtx_equal_p (new_rtx, SET_SRC (set)))
919 return 0;
920
921 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
922 if (new_rtx == pc_rtx)
923 delete_insn (jump);
924 else
925 {
926 /* Ensure the value computed inside the jump insn to be equivalent
927 to one computed by setcc. */
928 if (setcc && modified_in_p (new_rtx, setcc))
929 return 0;
930 if (! validate_unshare_change (jump, &SET_SRC (set), new_rtx, 0))
931 {
932 /* When (some) constants are not valid in a comparison, and there
933 are two registers to be replaced by constants before the entire
934 comparison can be folded into a constant, we need to keep
935 intermediate information in REG_EQUAL notes. For targets with
936 separate compare insns, such notes are added by try_replace_reg.
937 When we have a combined compare-and-branch instruction, however,
938 we need to attach a note to the branch itself to make this
939 optimization work. */
940
941 if (!rtx_equal_p (new_rtx, note_src))
942 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx));
943 return 0;
944 }
945
946 /* Remove REG_EQUAL note after simplification. */
947 if (note_src)
948 remove_note (jump, note);
949 }
950
951 #ifdef HAVE_cc0
952 /* Delete the cc0 setter. */
953 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
954 delete_insn (setcc);
955 #endif
956
957 global_const_prop_count++;
958 if (dump_file != NULL)
959 {
960 fprintf (dump_file,
961 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with"
962 "constant ", REGNO (from), INSN_UID (jump));
963 print_rtl (dump_file, src);
964 fprintf (dump_file, "\n");
965 }
966 purge_dead_edges (bb);
967
968 /* If a conditional jump has been changed into unconditional jump, remove
969 the jump and make the edge fallthru - this is always called in
970 cfglayout mode. */
971 if (new_rtx != pc_rtx && simplejump_p (jump))
972 {
973 edge e;
974 edge_iterator ei;
975
976 FOR_EACH_EDGE (e, ei, bb->succs)
977 if (e->dest != EXIT_BLOCK_PTR
978 && BB_HEAD (e->dest) == JUMP_LABEL (jump))
979 {
980 e->flags |= EDGE_FALLTHRU;
981 break;
982 }
983 delete_insn (jump);
984 }
985
986 return 1;
987 }
988
989 /* Subroutine of cprop_insn that tries to propagate constants. FROM is what
990 we will try to replace, SRC is the constant we will try to substitute for
991 it and INSN is the instruction where this will be happening. */
992
993 static int
994 constprop_register (rtx from, rtx src, rtx insn)
995 {
996 rtx sset;
997
998 /* Check for reg or cc0 setting instructions followed by
999 conditional branch instructions first. */
1000 if ((sset = single_set (insn)) != NULL
1001 && NEXT_INSN (insn)
1002 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
1003 {
1004 rtx dest = SET_DEST (sset);
1005 if ((REG_P (dest) || CC0_P (dest))
1006 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn),
1007 from, src))
1008 return 1;
1009 }
1010
1011 /* Handle normal insns next. */
1012 if (NONJUMP_INSN_P (insn) && try_replace_reg (from, src, insn))
1013 return 1;
1014
1015 /* Try to propagate a CONST_INT into a conditional jump.
1016 We're pretty specific about what we will handle in this
1017 code, we can extend this as necessary over time.
1018
1019 Right now the insn in question must look like
1020 (set (pc) (if_then_else ...)) */
1021 else if (any_condjump_p (insn) && onlyjump_p (insn))
1022 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, src);
1023 return 0;
1024 }
1025
1026 /* Perform constant and copy propagation on INSN.
1027 Return nonzero if a change was made. */
1028
1029 static int
1030 cprop_insn (rtx insn)
1031 {
1032 unsigned i;
1033 int changed = 0, changed_this_round;
1034 rtx note;
1035
1036 retry:
1037 changed_this_round = 0;
1038 reg_use_count = 0;
1039 note_uses (&PATTERN (insn), find_used_regs, NULL);
1040
1041 /* We may win even when propagating constants into notes. */
1042 note = find_reg_equal_equiv_note (insn);
1043 if (note)
1044 find_used_regs (&XEXP (note, 0), NULL);
1045
1046 for (i = 0; i < reg_use_count; i++)
1047 {
1048 rtx reg_used = reg_use_table[i];
1049 unsigned int regno = REGNO (reg_used);
1050 rtx src;
1051 struct expr *set;
1052
1053 /* If the register has already been set in this block, there's
1054 nothing we can do. */
1055 if (! reg_not_set_p (reg_used, insn))
1056 continue;
1057
1058 /* Find an assignment that sets reg_used and is available
1059 at the start of the block. */
1060 set = find_avail_set (regno, insn);
1061 if (! set)
1062 continue;
1063
1064 src = set->src;
1065
1066 /* Constant propagation. */
1067 if (cprop_constant_p (src))
1068 {
1069 if (constprop_register (reg_used, src, insn))
1070 {
1071 changed_this_round = changed = 1;
1072 global_const_prop_count++;
1073 if (dump_file != NULL)
1074 {
1075 fprintf (dump_file,
1076 "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
1077 fprintf (dump_file, "insn %d with constant ",
1078 INSN_UID (insn));
1079 print_rtl (dump_file, src);
1080 fprintf (dump_file, "\n");
1081 }
1082 if (INSN_DELETED_P (insn))
1083 return 1;
1084 }
1085 }
1086 else if (REG_P (src)
1087 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1088 && REGNO (src) != regno)
1089 {
1090 if (try_replace_reg (reg_used, src, insn))
1091 {
1092 changed_this_round = changed = 1;
1093 global_copy_prop_count++;
1094 if (dump_file != NULL)
1095 {
1096 fprintf (dump_file,
1097 "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
1098 regno, INSN_UID (insn));
1099 fprintf (dump_file, " with reg %d\n", REGNO (src));
1100 }
1101
1102 /* The original insn setting reg_used may or may not now be
1103 deletable. We leave the deletion to DCE. */
1104 /* FIXME: If it turns out that the insn isn't deletable,
1105 then we may have unnecessarily extended register lifetimes
1106 and made things worse. */
1107 }
1108 }
1109
1110 /* If try_replace_reg simplified the insn, the regs found
1111 by find_used_regs may not be valid anymore. Start over. */
1112 if (changed_this_round)
1113 goto retry;
1114 }
1115
1116 if (changed && DEBUG_INSN_P (insn))
1117 return 0;
1118
1119 return changed;
1120 }
1121
1122 /* Like find_used_regs, but avoid recording uses that appear in
1123 input-output contexts such as zero_extract or pre_dec. This
1124 restricts the cases we consider to those for which local cprop
1125 can legitimately make replacements. */
1126
1127 static void
1128 local_cprop_find_used_regs (rtx *xptr, void *data)
1129 {
1130 rtx x = *xptr;
1131
1132 if (x == 0)
1133 return;
1134
1135 switch (GET_CODE (x))
1136 {
1137 case ZERO_EXTRACT:
1138 case SIGN_EXTRACT:
1139 case STRICT_LOW_PART:
1140 return;
1141
1142 case PRE_DEC:
1143 case PRE_INC:
1144 case POST_DEC:
1145 case POST_INC:
1146 case PRE_MODIFY:
1147 case POST_MODIFY:
1148 /* Can only legitimately appear this early in the context of
1149 stack pushes for function arguments, but handle all of the
1150 codes nonetheless. */
1151 return;
1152
1153 case SUBREG:
1154 /* Setting a subreg of a register larger than word_mode leaves
1155 the non-written words unchanged. */
1156 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
1157 return;
1158 break;
1159
1160 default:
1161 break;
1162 }
1163
1164 find_used_regs (xptr, data);
1165 }
1166
1167 /* Try to perform local const/copy propagation on X in INSN. */
1168
1169 static bool
1170 do_local_cprop (rtx x, rtx insn)
1171 {
1172 rtx newreg = NULL, newcnst = NULL;
1173
1174 /* Rule out USE instructions and ASM statements as we don't want to
1175 change the hard registers mentioned. */
1176 if (REG_P (x)
1177 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
1178 || (GET_CODE (PATTERN (insn)) != USE
1179 && asm_noperands (PATTERN (insn)) < 0)))
1180 {
1181 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
1182 struct elt_loc_list *l;
1183
1184 if (!val)
1185 return false;
1186 for (l = val->locs; l; l = l->next)
1187 {
1188 rtx this_rtx = l->loc;
1189 rtx note;
1190
1191 if (cprop_constant_p (this_rtx))
1192 newcnst = this_rtx;
1193 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
1194 /* Don't copy propagate if it has attached REG_EQUIV note.
1195 At this point this only function parameters should have
1196 REG_EQUIV notes and if the argument slot is used somewhere
1197 explicitly, it means address of parameter has been taken,
1198 so we should not extend the lifetime of the pseudo. */
1199 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
1200 || ! MEM_P (XEXP (note, 0))))
1201 newreg = this_rtx;
1202 }
1203 if (newcnst && constprop_register (x, newcnst, insn))
1204 {
1205 if (dump_file != NULL)
1206 {
1207 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
1208 REGNO (x));
1209 fprintf (dump_file, "insn %d with constant ",
1210 INSN_UID (insn));
1211 print_rtl (dump_file, newcnst);
1212 fprintf (dump_file, "\n");
1213 }
1214 local_const_prop_count++;
1215 return true;
1216 }
1217 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
1218 {
1219 if (dump_file != NULL)
1220 {
1221 fprintf (dump_file,
1222 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
1223 REGNO (x), INSN_UID (insn));
1224 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
1225 }
1226 local_copy_prop_count++;
1227 return true;
1228 }
1229 }
1230 return false;
1231 }
1232
1233 /* Do local const/copy propagation (i.e. within each basic block). */
1234
1235 static int
1236 local_cprop_pass (void)
1237 {
1238 basic_block bb;
1239 rtx insn;
1240 bool changed = false;
1241 unsigned i;
1242
1243 cselib_init (0);
1244 FOR_EACH_BB (bb)
1245 {
1246 FOR_BB_INSNS (bb, insn)
1247 {
1248 if (INSN_P (insn))
1249 {
1250 rtx note = find_reg_equal_equiv_note (insn);
1251 do
1252 {
1253 reg_use_count = 0;
1254 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
1255 NULL);
1256 if (note)
1257 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
1258
1259 for (i = 0; i < reg_use_count; i++)
1260 {
1261 if (do_local_cprop (reg_use_table[i], insn))
1262 {
1263 if (!DEBUG_INSN_P (insn))
1264 changed = true;
1265 break;
1266 }
1267 }
1268 if (INSN_DELETED_P (insn))
1269 break;
1270 }
1271 while (i < reg_use_count);
1272 }
1273 cselib_process_insn (insn);
1274 }
1275
1276 /* Forget everything at the end of a basic block. */
1277 cselib_clear_table ();
1278 }
1279
1280 cselib_finish ();
1281
1282 return changed;
1283 }
1284
1285 /* Similar to get_condition, only the resulting condition must be
1286 valid at JUMP, instead of at EARLIEST.
1287
1288 This differs from noce_get_condition in ifcvt.c in that we prefer not to
1289 settle for the condition variable in the jump instruction being integral.
1290 We prefer to be able to record the value of a user variable, rather than
1291 the value of a temporary used in a condition. This could be solved by
1292 recording the value of *every* register scanned by canonicalize_condition,
1293 but this would require some code reorganization. */
1294
1295 rtx
1296 fis_get_condition (rtx jump)
1297 {
1298 return get_condition (jump, NULL, false, true);
1299 }
1300
1301 /* Check the comparison COND to see if we can safely form an implicit
1302 set from it. */
1303
1304 static bool
1305 implicit_set_cond_p (const_rtx cond)
1306 {
1307 enum machine_mode mode;
1308 rtx cst;
1309
1310 /* COND must be either an EQ or NE comparison. */
1311 if (GET_CODE (cond) != EQ && GET_CODE (cond) != NE)
1312 return false;
1313
1314 /* The first operand of COND must be a pseudo-reg. */
1315 if (! REG_P (XEXP (cond, 0))
1316 || HARD_REGISTER_P (XEXP (cond, 0)))
1317 return false;
1318
1319 /* The second operand of COND must be a suitable constant. */
1320 mode = GET_MODE (XEXP (cond, 0));
1321 cst = XEXP (cond, 1);
1322
1323 /* We can't perform this optimization if either operand might be or might
1324 contain a signed zero. */
1325 if (HONOR_SIGNED_ZEROS (mode))
1326 {
1327 /* It is sufficient to check if CST is or contains a zero. We must
1328 handle float, complex, and vector. If any subpart is a zero, then
1329 the optimization can't be performed. */
1330 /* ??? The complex and vector checks are not implemented yet. We just
1331 always return zero for them. */
1332 if (GET_CODE (cst) == CONST_DOUBLE)
1333 {
1334 REAL_VALUE_TYPE d;
1335 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
1336 if (REAL_VALUES_EQUAL (d, dconst0))
1337 return 0;
1338 }
1339 else
1340 return 0;
1341 }
1342
1343 return cprop_constant_p (cst);
1344 }
1345
1346 /* Find the implicit sets of a function. An "implicit set" is a constraint
1347 on the value of a variable, implied by a conditional jump. For example,
1348 following "if (x == 2)", the then branch may be optimized as though the
1349 conditional performed an "explicit set", in this example, "x = 2". This
1350 function records the set patterns that are implicit at the start of each
1351 basic block.
1352
1353 If an implicit set is found but the set is implicit on a critical edge,
1354 this critical edge is split.
1355
1356 Return true if the CFG was modified, false otherwise. */
1357
1358 static bool
1359 find_implicit_sets (void)
1360 {
1361 basic_block bb, dest;
1362 rtx cond, new_rtx;
1363 unsigned int count = 0;
1364 bool edges_split = false;
1365 size_t implicit_sets_size = last_basic_block + 10;
1366
1367 implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
1368
1369 FOR_EACH_BB (bb)
1370 {
1371 /* Check for more than one successor. */
1372 if (EDGE_COUNT (bb->succs) <= 1)
1373 continue;
1374
1375 cond = fis_get_condition (BB_END (bb));
1376
1377 /* If no condition is found or if it isn't of a suitable form,
1378 ignore it. */
1379 if (! cond || ! implicit_set_cond_p (cond))
1380 continue;
1381
1382 dest = GET_CODE (cond) == EQ
1383 ? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
1384
1385 /* If DEST doesn't go anywhere, ignore it. */
1386 if (! dest || dest == EXIT_BLOCK_PTR)
1387 continue;
1388
1389 /* We have found a suitable implicit set. Try to record it now as
1390 a SET in DEST. If DEST has more than one predecessor, the edge
1391 between BB and DEST is a critical edge and we must split it,
1392 because we can only record one implicit set per DEST basic block. */
1393 if (! single_pred_p (dest))
1394 {
1395 dest = split_edge (find_edge (bb, dest));
1396 edges_split = true;
1397 }
1398
1399 if (implicit_sets_size <= (size_t) dest->index)
1400 {
1401 size_t old_implicit_sets_size = implicit_sets_size;
1402 implicit_sets_size *= 2;
1403 implicit_sets = XRESIZEVEC (rtx, implicit_sets, implicit_sets_size);
1404 memset (implicit_sets + old_implicit_sets_size, 0,
1405 (implicit_sets_size - old_implicit_sets_size) * sizeof (rtx));
1406 }
1407
1408 new_rtx = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
1409 XEXP (cond, 1));
1410 implicit_sets[dest->index] = new_rtx;
1411 if (dump_file)
1412 {
1413 fprintf(dump_file, "Implicit set of reg %d in ",
1414 REGNO (XEXP (cond, 0)));
1415 fprintf(dump_file, "basic block %d\n", dest->index);
1416 }
1417 count++;
1418 }
1419
1420 if (dump_file)
1421 fprintf (dump_file, "Found %d implicit sets\n", count);
1422
1423 /* Confess our sins. */
1424 return edges_split;
1425 }
1426
1427 /* Bypass conditional jumps. */
1428
1429 /* The value of last_basic_block at the beginning of the jump_bypass
1430 pass. The use of redirect_edge_and_branch_force may introduce new
1431 basic blocks, but the data flow analysis is only valid for basic
1432 block indices less than bypass_last_basic_block. */
1433
1434 static int bypass_last_basic_block;
1435
1436 /* Find a set of REGNO to a constant that is available at the end of basic
1437 block BB. Return NULL if no such set is found. Based heavily upon
1438 find_avail_set. */
1439
1440 static struct expr *
1441 find_bypass_set (int regno, int bb)
1442 {
1443 struct expr *result = 0;
1444
1445 for (;;)
1446 {
1447 rtx src;
1448 struct expr *set = lookup_set (regno, &set_hash_table);
1449
1450 while (set)
1451 {
1452 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
1453 break;
1454 set = next_set (regno, set);
1455 }
1456
1457 if (set == 0)
1458 break;
1459
1460 src = set->src;
1461 if (cprop_constant_p (src))
1462 result = set;
1463
1464 if (! REG_P (src))
1465 break;
1466
1467 regno = REGNO (src);
1468 }
1469 return result;
1470 }
1471
1472 /* Subroutine of bypass_block that checks whether a pseudo is killed by
1473 any of the instructions inserted on an edge. Jump bypassing places
1474 condition code setters on CFG edges using insert_insn_on_edge. This
1475 function is required to check that our data flow analysis is still
1476 valid prior to commit_edge_insertions. */
1477
1478 static bool
1479 reg_killed_on_edge (const_rtx reg, const_edge e)
1480 {
1481 rtx insn;
1482
1483 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
1484 if (INSN_P (insn) && reg_set_p (reg, insn))
1485 return true;
1486
1487 return false;
1488 }
1489
1490 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
1491 basic block BB which has more than one predecessor. If not NULL, SETCC
1492 is the first instruction of BB, which is immediately followed by JUMP_INSN
1493 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
1494 Returns nonzero if a change was made.
1495
1496 During the jump bypassing pass, we may place copies of SETCC instructions
1497 on CFG edges. The following routine must be careful to pay attention to
1498 these inserted insns when performing its transformations. */
1499
1500 static int
1501 bypass_block (basic_block bb, rtx setcc, rtx jump)
1502 {
1503 rtx insn, note;
1504 edge e, edest;
1505 int change;
1506 int may_be_loop_header;
1507 unsigned removed_p;
1508 unsigned i;
1509 edge_iterator ei;
1510
1511 insn = (setcc != NULL) ? setcc : jump;
1512
1513 /* Determine set of register uses in INSN. */
1514 reg_use_count = 0;
1515 note_uses (&PATTERN (insn), find_used_regs, NULL);
1516 note = find_reg_equal_equiv_note (insn);
1517 if (note)
1518 find_used_regs (&XEXP (note, 0), NULL);
1519
1520 may_be_loop_header = false;
1521 FOR_EACH_EDGE (e, ei, bb->preds)
1522 if (e->flags & EDGE_DFS_BACK)
1523 {
1524 may_be_loop_header = true;
1525 break;
1526 }
1527
1528 change = 0;
1529 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
1530 {
1531 removed_p = 0;
1532
1533 if (e->flags & EDGE_COMPLEX)
1534 {
1535 ei_next (&ei);
1536 continue;
1537 }
1538
1539 /* We can't redirect edges from new basic blocks. */
1540 if (e->src->index >= bypass_last_basic_block)
1541 {
1542 ei_next (&ei);
1543 continue;
1544 }
1545
1546 /* The irreducible loops created by redirecting of edges entering the
1547 loop from outside would decrease effectiveness of some of the
1548 following optimizations, so prevent this. */
1549 if (may_be_loop_header
1550 && !(e->flags & EDGE_DFS_BACK))
1551 {
1552 ei_next (&ei);
1553 continue;
1554 }
1555
1556 for (i = 0; i < reg_use_count; i++)
1557 {
1558 rtx reg_used = reg_use_table[i];
1559 unsigned int regno = REGNO (reg_used);
1560 basic_block dest, old_dest;
1561 struct expr *set;
1562 rtx src, new_rtx;
1563
1564 set = find_bypass_set (regno, e->src->index);
1565
1566 if (! set)
1567 continue;
1568
1569 /* Check the data flow is valid after edge insertions. */
1570 if (e->insns.r && reg_killed_on_edge (reg_used, e))
1571 continue;
1572
1573 src = SET_SRC (pc_set (jump));
1574
1575 if (setcc != NULL)
1576 src = simplify_replace_rtx (src,
1577 SET_DEST (PATTERN (setcc)),
1578 SET_SRC (PATTERN (setcc)));
1579
1580 new_rtx = simplify_replace_rtx (src, reg_used, set->src);
1581
1582 /* Jump bypassing may have already placed instructions on
1583 edges of the CFG. We can't bypass an outgoing edge that
1584 has instructions associated with it, as these insns won't
1585 get executed if the incoming edge is redirected. */
1586 if (new_rtx == pc_rtx)
1587 {
1588 edest = FALLTHRU_EDGE (bb);
1589 dest = edest->insns.r ? NULL : edest->dest;
1590 }
1591 else if (GET_CODE (new_rtx) == LABEL_REF)
1592 {
1593 dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0));
1594 /* Don't bypass edges containing instructions. */
1595 edest = find_edge (bb, dest);
1596 if (edest && edest->insns.r)
1597 dest = NULL;
1598 }
1599 else
1600 dest = NULL;
1601
1602 /* Avoid unification of the edge with other edges from original
1603 branch. We would end up emitting the instruction on "both"
1604 edges. */
1605 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
1606 && find_edge (e->src, dest))
1607 dest = NULL;
1608
1609 old_dest = e->dest;
1610 if (dest != NULL
1611 && dest != old_dest
1612 && dest != EXIT_BLOCK_PTR)
1613 {
1614 if (current_loops != NULL
1615 && e->src->loop_father->latch == e->src)
1616 {
1617 /* ??? Now we are creating (or may create) a loop
1618 with multiple entries. Simply mark it for
1619 removal. Alternatively we could not do this
1620 threading. */
1621 e->src->loop_father->header = NULL;
1622 e->src->loop_father->latch = NULL;
1623 }
1624
1625 redirect_edge_and_branch_force (e, dest);
1626
1627 /* Copy the register setter to the redirected edge.
1628 Don't copy CC0 setters, as CC0 is dead after jump. */
1629 if (setcc)
1630 {
1631 rtx pat = PATTERN (setcc);
1632 if (!CC0_P (SET_DEST (pat)))
1633 insert_insn_on_edge (copy_insn (pat), e);
1634 }
1635
1636 if (dump_file != NULL)
1637 {
1638 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
1639 "in jump_insn %d equals constant ",
1640 regno, INSN_UID (jump));
1641 print_rtl (dump_file, set->src);
1642 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
1643 e->src->index, old_dest->index, dest->index);
1644 }
1645 change = 1;
1646 removed_p = 1;
1647 break;
1648 }
1649 }
1650 if (!removed_p)
1651 ei_next (&ei);
1652 }
1653 return change;
1654 }
1655
1656 /* Find basic blocks with more than one predecessor that only contain a
1657 single conditional jump. If the result of the comparison is known at
1658 compile-time from any incoming edge, redirect that edge to the
1659 appropriate target. Return nonzero if a change was made.
1660
1661 This function is now mis-named, because we also handle indirect jumps. */
1662
1663 static int
1664 bypass_conditional_jumps (void)
1665 {
1666 basic_block bb;
1667 int changed;
1668 rtx setcc;
1669 rtx insn;
1670 rtx dest;
1671
1672 /* Note we start at block 1. */
1673 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1674 return 0;
1675
1676 bypass_last_basic_block = last_basic_block;
1677 mark_dfs_back_edges ();
1678
1679 changed = 0;
1680 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
1681 EXIT_BLOCK_PTR, next_bb)
1682 {
1683 /* Check for more than one predecessor. */
1684 if (!single_pred_p (bb))
1685 {
1686 setcc = NULL_RTX;
1687 FOR_BB_INSNS (bb, insn)
1688 if (DEBUG_INSN_P (insn))
1689 continue;
1690 else if (NONJUMP_INSN_P (insn))
1691 {
1692 if (setcc)
1693 break;
1694 if (GET_CODE (PATTERN (insn)) != SET)
1695 break;
1696
1697 dest = SET_DEST (PATTERN (insn));
1698 if (REG_P (dest) || CC0_P (dest))
1699 setcc = insn;
1700 else
1701 break;
1702 }
1703 else if (JUMP_P (insn))
1704 {
1705 if ((any_condjump_p (insn) || computed_jump_p (insn))
1706 && onlyjump_p (insn))
1707 changed |= bypass_block (bb, setcc, insn);
1708 break;
1709 }
1710 else if (INSN_P (insn))
1711 break;
1712 }
1713 }
1714
1715 /* If we bypassed any register setting insns, we inserted a
1716 copy on the redirected edge. These need to be committed. */
1717 if (changed)
1718 commit_edge_insertions ();
1719
1720 return changed;
1721 }
1722 \f
1723 /* Return true if the graph is too expensive to optimize. PASS is the
1724 optimization about to be performed. */
1725
1726 static bool
1727 is_too_expensive (const char *pass)
1728 {
1729 /* Trying to perform global optimizations on flow graphs which have
1730 a high connectivity will take a long time and is unlikely to be
1731 particularly useful.
1732
1733 In normal circumstances a cfg should have about twice as many
1734 edges as blocks. But we do not want to punish small functions
1735 which have a couple switch statements. Rather than simply
1736 threshold the number of blocks, uses something with a more
1737 graceful degradation. */
1738 if (n_edges > 20000 + n_basic_blocks * 4)
1739 {
1740 warning (OPT_Wdisabled_optimization,
1741 "%s: %d basic blocks and %d edges/basic block",
1742 pass, n_basic_blocks, n_edges / n_basic_blocks);
1743
1744 return true;
1745 }
1746
1747 /* If allocating memory for the cprop bitmap would take up too much
1748 storage it's better just to disable the optimization. */
1749 if ((n_basic_blocks
1750 * SBITMAP_SET_SIZE (max_reg_num ())
1751 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
1752 {
1753 warning (OPT_Wdisabled_optimization,
1754 "%s: %d basic blocks and %d registers",
1755 pass, n_basic_blocks, max_reg_num ());
1756
1757 return true;
1758 }
1759
1760 return false;
1761 }
1762 \f
1763 /* Main function for the CPROP pass. */
1764
1765 static int
1766 one_cprop_pass (void)
1767 {
1768 int i;
1769 int changed = 0;
1770
1771 /* Return if there's nothing to do, or it is too expensive. */
1772 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
1773 || is_too_expensive (_ ("const/copy propagation disabled")))
1774 return 0;
1775
1776 global_const_prop_count = local_const_prop_count = 0;
1777 global_copy_prop_count = local_copy_prop_count = 0;
1778
1779 bytes_used = 0;
1780 gcc_obstack_init (&cprop_obstack);
1781
1782 /* Do a local const/copy propagation pass first. The global pass
1783 only handles global opportunities.
1784 If the local pass changes something, remove any unreachable blocks
1785 because the CPROP global dataflow analysis may get into infinite
1786 loops for CFGs with unreachable blocks.
1787
1788 FIXME: This local pass should not be necessary after CSE (but for
1789 some reason it still is). It is also (proven) not necessary
1790 to run the local pass right after FWPWOP.
1791
1792 FIXME: The global analysis would not get into infinite loops if it
1793 would use the DF solver (via df_simple_dataflow) instead of
1794 the solver implemented in this file. */
1795 changed |= local_cprop_pass ();
1796 if (changed)
1797 delete_unreachable_blocks ();
1798
1799 /* Determine implicit sets. This may change the CFG (split critical
1800 edges if that exposes an implicit set).
1801 Note that find_implicit_sets() does not rely on up-to-date DF caches
1802 so that we do not have to re-run df_analyze() even if local CPROP
1803 changed something.
1804 ??? This could run earlier so that any uncovered implicit sets
1805 sets could be exploited in local_cprop_pass() also. Later. */
1806 changed |= find_implicit_sets ();
1807
1808 /* If local_cprop_pass() or find_implicit_sets() changed something,
1809 run df_analyze() to bring all insn caches up-to-date, and to take
1810 new basic blocks from edge splitting on the DF radar.
1811 NB: This also runs the fast DCE pass, because execute_rtl_cprop
1812 sets DF_LR_RUN_DCE. */
1813 if (changed)
1814 df_analyze ();
1815
1816 /* Initialize implicit_set_indexes array. */
1817 implicit_set_indexes = XNEWVEC (int, last_basic_block);
1818 for (i = 0; i < last_basic_block; i++)
1819 implicit_set_indexes[i] = -1;
1820
1821 alloc_hash_table (&set_hash_table);
1822 compute_hash_table (&set_hash_table);
1823
1824 /* Free implicit_sets before peak usage. */
1825 free (implicit_sets);
1826 implicit_sets = NULL;
1827
1828 if (dump_file)
1829 dump_hash_table (dump_file, "SET", &set_hash_table);
1830 if (set_hash_table.n_elems > 0)
1831 {
1832 basic_block bb;
1833 rtx insn;
1834
1835 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
1836 compute_cprop_data ();
1837
1838 free (implicit_set_indexes);
1839 implicit_set_indexes = NULL;
1840
1841 /* Allocate vars to track sets of regs. */
1842 reg_set_bitmap = ALLOC_REG_SET (NULL);
1843
1844 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR,
1845 next_bb)
1846 {
1847 /* Reset tables used to keep track of what's still valid [since
1848 the start of the block]. */
1849 reset_opr_set_tables ();
1850
1851 FOR_BB_INSNS (bb, insn)
1852 if (INSN_P (insn))
1853 {
1854 changed |= cprop_insn (insn);
1855
1856 /* Keep track of everything modified by this insn. */
1857 /* ??? Need to be careful w.r.t. mods done to INSN.
1858 Don't call mark_oprs_set if we turned the
1859 insn into a NOTE, or deleted the insn. */
1860 if (! NOTE_P (insn) && ! INSN_DELETED_P (insn))
1861 mark_oprs_set (insn);
1862 }
1863 }
1864
1865 changed |= bypass_conditional_jumps ();
1866
1867 FREE_REG_SET (reg_set_bitmap);
1868 free_cprop_mem ();
1869 }
1870 else
1871 {
1872 free (implicit_set_indexes);
1873 implicit_set_indexes = NULL;
1874 }
1875
1876 free_hash_table (&set_hash_table);
1877 obstack_free (&cprop_obstack, NULL);
1878
1879 if (dump_file)
1880 {
1881 fprintf (dump_file, "CPROP of %s, %d basic blocks, %d bytes needed, ",
1882 current_function_name (), n_basic_blocks, bytes_used);
1883 fprintf (dump_file, "%d local const props, %d local copy props, ",
1884 local_const_prop_count, local_copy_prop_count);
1885 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
1886 global_const_prop_count, global_copy_prop_count);
1887 }
1888
1889 return changed;
1890 }
1891 \f
1892 /* All the passes implemented in this file. Each pass has its
1893 own gate and execute function, and at the end of the file a
1894 pass definition for passes.c.
1895
1896 We do not construct an accurate cfg in functions which call
1897 setjmp, so none of these passes runs if the function calls
1898 setjmp.
1899 FIXME: Should just handle setjmp via REG_SETJMP notes. */
1900
1901 static bool
1902 gate_rtl_cprop (void)
1903 {
1904 return optimize > 0 && flag_gcse
1905 && !cfun->calls_setjmp
1906 && dbg_cnt (cprop);
1907 }
1908
1909 static unsigned int
1910 execute_rtl_cprop (void)
1911 {
1912 int changed;
1913 delete_unreachable_blocks ();
1914 df_set_flags (DF_LR_RUN_DCE);
1915 df_analyze ();
1916 changed = one_cprop_pass ();
1917 flag_rerun_cse_after_global_opts |= changed;
1918 if (changed)
1919 cleanup_cfg (CLEANUP_CFG_CHANGED);
1920 return 0;
1921 }
1922
1923 struct rtl_opt_pass pass_rtl_cprop =
1924 {
1925 {
1926 RTL_PASS,
1927 "cprop", /* name */
1928 gate_rtl_cprop, /* gate */
1929 execute_rtl_cprop, /* execute */
1930 NULL, /* sub */
1931 NULL, /* next */
1932 0, /* static_pass_number */
1933 TV_CPROP, /* tv_id */
1934 PROP_cfglayout, /* properties_required */
1935 0, /* properties_provided */
1936 0, /* properties_destroyed */
1937 0, /* todo_flags_start */
1938 TODO_df_finish | TODO_verify_rtl_sharing |
1939 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
1940 }
1941 };