re PR sanitizer/81929 (exponential slowdown in undefined behavior sanitizer for strea...
[gcc.git] / gcc / compare-elim.c
1 /* Post-reload compare elimination.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* There is a set of targets whose general-purpose move or addition
21 instructions clobber the flags. These targets cannot split their
22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload
23 itself insert these instructions in between the flags setter and user.
24 Because these targets cannot split the compare from the use, they
25 cannot make use of the comparison elimination offered by the combine pass.
26
27 This is a small pass intended to provide comparison elimination similar to
28 what is available via NOTICE_UPDATE_CC for cc0 targets. This should help
29 encourage cc0 targets to convert to an explicit post-reload representation
30 of the flags.
31
32 This pass assumes:
33
34 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload.
35
36 (1) All comparison patterns are represented as
37
38 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))]
39
40 (2) All insn patterns that modify the flags are represented as
41
42 [(set (reg) (operation)
43 (clobber (reg:CC))]
44
45 (3) If an insn of form (2) can usefully set the flags, there is
46 another pattern of the form
47
48 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
49 (set (reg) (operation)]
50
51 The mode CCM will be chosen as if by SELECT_CC_MODE.
52
53 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands.
54 This could be handled as a future enhancement.
55 */
56
57 #include "config.h"
58 #include "system.h"
59 #include "coretypes.h"
60 #include "backend.h"
61 #include "target.h"
62 #include "rtl.h"
63 #include "df.h"
64 #include "memmodel.h"
65 #include "tm_p.h"
66 #include "insn-config.h"
67 #include "recog.h"
68 #include "cfgrtl.h"
69 #include "tree-pass.h"
70 #include "domwalk.h"
71
72 \f
73 /* These structures describe a comparison and how it is used. */
74
75 /* The choice of maximum 3 uses comes from wanting to eliminate the two
76 duplicate compares from a three-way branch on the sign of a value.
77 This is also sufficient to eliminate the duplicate compare against the
78 high-part of a double-word comparison. */
79 #define MAX_CMP_USE 3
80
81 struct comparison_use
82 {
83 /* The instruction in which the result of the compare is used. */
84 rtx_insn *insn;
85 /* The location of the flags register within the use. */
86 rtx *loc;
87 /* The comparison code applied against the flags register. */
88 enum rtx_code code;
89 };
90
91 struct comparison
92 {
93 /* The comparison instruction. */
94 rtx_insn *insn;
95
96 /* The insn prior to the comparison insn that clobbers the flags. */
97 rtx_insn *prev_clobber;
98
99 /* The two values being compared. These will be either REGs or
100 constants. */
101 rtx in_a, in_b;
102
103 /* The REG_EH_REGION of the comparison. */
104 rtx eh_note;
105
106 /* Information about how this comparison is used. */
107 struct comparison_use uses[MAX_CMP_USE];
108
109 /* The original CC_MODE for this comparison. */
110 machine_mode orig_mode;
111
112 /* The number of uses identified for this comparison. */
113 unsigned short n_uses;
114
115 /* True if not all uses of this comparison have been identified.
116 This can happen either for overflowing the array above, or if
117 the flags register is used in some unusual context. */
118 bool missing_uses;
119
120 /* True if its inputs are still valid at the end of the block. */
121 bool inputs_valid;
122 };
123
124 static vec<comparison *> all_compares;
125
126 /* Look for a "conforming" comparison, as defined above. If valid, return
127 the rtx for the COMPARE itself. */
128
129 static rtx
130 conforming_compare (rtx_insn *insn)
131 {
132 rtx set, src, dest;
133
134 set = single_set (insn);
135 if (set == NULL)
136 return NULL;
137
138 src = SET_SRC (set);
139 if (GET_CODE (src) != COMPARE)
140 return NULL;
141
142 dest = SET_DEST (set);
143 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum)
144 return NULL;
145
146 if (!REG_P (XEXP (src, 0)))
147 return NULL;
148
149 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1)))
150 return src;
151
152 if (GET_CODE (XEXP (src, 1)) == UNSPEC)
153 {
154 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++)
155 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i)))
156 return NULL;
157 return src;
158 }
159
160 return NULL;
161 }
162
163 /* Look for a pattern of the "correct" form for an insn with a flags clobber
164 for which we may be able to eliminate a compare later. We're not looking
165 to validate any inputs at this time, merely see that the basic shape is
166 correct. The term "arithmetic" may be somewhat misleading... */
167
168 static bool
169 arithmetic_flags_clobber_p (rtx_insn *insn)
170 {
171 rtx pat, x;
172
173 if (!NONJUMP_INSN_P (insn))
174 return false;
175 pat = PATTERN (insn);
176 if (asm_noperands (pat) >= 0)
177 return false;
178
179 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2)
180 {
181 x = XVECEXP (pat, 0, 0);
182 if (GET_CODE (x) != SET)
183 return false;
184 x = SET_DEST (x);
185 if (!REG_P (x))
186 return false;
187
188 x = XVECEXP (pat, 0, 1);
189 if (GET_CODE (x) == CLOBBER)
190 {
191 x = XEXP (x, 0);
192 if (REG_P (x) && REGNO (x) == targetm.flags_regnum)
193 return true;
194 }
195 }
196
197 return false;
198 }
199
200 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record
201 it in CMP; otherwise indicate that we've missed a use. */
202
203 static void
204 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn)
205 {
206 df_ref use;
207
208 /* If we've already lost track of uses, don't bother collecting more. */
209 if (cmp->missing_uses)
210 return;
211
212 /* Find a USE of the flags register. */
213 FOR_EACH_INSN_USE (use, insn)
214 if (DF_REF_REGNO (use) == targetm.flags_regnum)
215 {
216 rtx x, *loc;
217
218 /* If this is an unusual use, quit. */
219 if (DF_REF_TYPE (use) != DF_REF_REG_USE)
220 goto fail;
221
222 /* If we've run out of slots to record uses, quit. */
223 if (cmp->n_uses == MAX_CMP_USE)
224 goto fail;
225
226 /* Unfortunately the location of the flags register, while present
227 in the reference structure, doesn't help. We need to find the
228 comparison code that is outer to the actual flags use. */
229 loc = DF_REF_LOC (use);
230 x = PATTERN (insn);
231 if (GET_CODE (x) == PARALLEL)
232 x = XVECEXP (x, 0, 0);
233 x = SET_SRC (x);
234 if (GET_CODE (x) == IF_THEN_ELSE)
235 x = XEXP (x, 0);
236 if (COMPARISON_P (x)
237 && loc == &XEXP (x, 0)
238 && XEXP (x, 1) == const0_rtx)
239 {
240 /* We've found a use of the flags that we understand. */
241 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++];
242 cuse->insn = insn;
243 cuse->loc = loc;
244 cuse->code = GET_CODE (x);
245 }
246 else
247 goto fail;
248 }
249 return;
250
251 fail:
252 /* We failed to recognize this use of the flags register. */
253 cmp->missing_uses = true;
254 }
255
256 class find_comparison_dom_walker : public dom_walker
257 {
258 public:
259 find_comparison_dom_walker (cdi_direction direction)
260 : dom_walker (direction) {}
261
262 virtual edge before_dom_children (basic_block);
263 };
264
265 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison
266 CMP and can thus be eliminated. */
267
268 static bool
269 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp)
270 {
271 /* Take care that it's in the same EH region. */
272 if (cfun->can_throw_non_call_exceptions
273 && !rtx_equal_p (eh_note, cmp->eh_note))
274 return false;
275
276 /* Make sure the compare is redundant with the previous. */
277 if (!rtx_equal_p (XEXP (compare, 0), cmp->in_a)
278 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b))
279 return false;
280
281 /* New mode must be compatible with the previous compare mode. */
282 machine_mode new_mode
283 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode);
284
285 if (new_mode == VOIDmode)
286 return false;
287
288 if (cmp->orig_mode != new_mode)
289 {
290 /* Generate new comparison for substitution. */
291 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum);
292 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b);
293 x = gen_rtx_SET (flags, x);
294
295 if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false))
296 return false;
297
298 cmp->orig_mode = new_mode;
299 }
300
301 return true;
302 }
303
304 /* Identify comparison instructions within BB. If the flags from the last
305 compare in the BB is live at the end of the block, install the compare
306 in BB->AUX. Called via dom_walker.walk (). */
307
308 edge
309 find_comparison_dom_walker::before_dom_children (basic_block bb)
310 {
311 struct comparison *last_cmp;
312 rtx_insn *insn, *next, *last_clobber;
313 bool last_cmp_valid;
314 bool need_purge = false;
315 bitmap killed;
316
317 killed = BITMAP_ALLOC (NULL);
318
319 /* The last comparison that was made. Will be reset to NULL
320 once the flags are clobbered. */
321 last_cmp = NULL;
322
323 /* True iff the last comparison has not been clobbered, nor
324 have its inputs. Used to eliminate duplicate compares. */
325 last_cmp_valid = false;
326
327 /* The last insn that clobbered the flags, if that insn is of
328 a form that may be valid for eliminating a following compare.
329 To be reset to NULL once the flags are set otherwise. */
330 last_clobber = NULL;
331
332 /* Propagate the last live comparison throughout the extended basic block. */
333 if (single_pred_p (bb))
334 {
335 last_cmp = (struct comparison *) single_pred (bb)->aux;
336 if (last_cmp)
337 last_cmp_valid = last_cmp->inputs_valid;
338 }
339
340 for (insn = BB_HEAD (bb); insn; insn = next)
341 {
342 rtx src;
343
344 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn));
345 if (!NONDEBUG_INSN_P (insn))
346 continue;
347
348 /* Compute the set of registers modified by this instruction. */
349 bitmap_clear (killed);
350 df_simulate_find_defs (insn, killed);
351
352 src = conforming_compare (insn);
353 if (src)
354 {
355 rtx eh_note = NULL;
356
357 if (cfun->can_throw_non_call_exceptions)
358 eh_note = find_reg_note (insn, REG_EH_REGION, NULL);
359
360 if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp))
361 {
362 if (eh_note)
363 need_purge = true;
364 delete_insn (insn);
365 continue;
366 }
367
368 last_cmp = XCNEW (struct comparison);
369 last_cmp->insn = insn;
370 last_cmp->prev_clobber = last_clobber;
371 last_cmp->in_a = XEXP (src, 0);
372 last_cmp->in_b = XEXP (src, 1);
373 last_cmp->eh_note = eh_note;
374 last_cmp->orig_mode = GET_MODE (src);
375 all_compares.safe_push (last_cmp);
376
377 /* It's unusual, but be prepared for comparison patterns that
378 also clobber an input, or perhaps a scratch. */
379 last_clobber = NULL;
380 last_cmp_valid = true;
381 }
382
383 else
384 {
385 /* Notice if this instruction uses the flags register. */
386 if (last_cmp)
387 find_flags_uses_in_insn (last_cmp, insn);
388
389 /* Notice if this instruction kills the flags register. */
390 if (bitmap_bit_p (killed, targetm.flags_regnum))
391 {
392 /* See if this insn could be the "clobber" that eliminates
393 a future comparison. */
394 last_clobber = (arithmetic_flags_clobber_p (insn) ? insn : NULL);
395
396 /* In either case, the previous compare is no longer valid. */
397 last_cmp = NULL;
398 last_cmp_valid = false;
399 }
400 }
401
402 /* Notice if any of the inputs to the comparison have changed. */
403 if (last_cmp_valid
404 && (bitmap_bit_p (killed, REGNO (last_cmp->in_a))
405 || (REG_P (last_cmp->in_b)
406 && bitmap_bit_p (killed, REGNO (last_cmp->in_b)))))
407 last_cmp_valid = false;
408 }
409
410 BITMAP_FREE (killed);
411
412 /* Remember the live comparison for subsequent members of
413 the extended basic block. */
414 if (last_cmp)
415 {
416 bb->aux = last_cmp;
417 last_cmp->inputs_valid = last_cmp_valid;
418
419 /* Look to see if the flags register is live outgoing here, and
420 incoming to any successor not part of the extended basic block. */
421 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum))
422 {
423 edge e;
424 edge_iterator ei;
425
426 FOR_EACH_EDGE (e, ei, bb->succs)
427 {
428 basic_block dest = e->dest;
429 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum)
430 && !single_pred_p (dest))
431 {
432 last_cmp->missing_uses = true;
433 break;
434 }
435 }
436 }
437 }
438
439 /* If we deleted a compare with a REG_EH_REGION note, we may need to
440 remove EH edges. */
441 if (need_purge)
442 purge_dead_edges (bb);
443
444 return NULL;
445 }
446
447 /* Find all comparisons in the function. */
448
449 static void
450 find_comparisons (void)
451 {
452 calculate_dominance_info (CDI_DOMINATORS);
453
454 find_comparison_dom_walker (CDI_DOMINATORS)
455 .walk (cfun->cfg->x_entry_block_ptr);
456
457 clear_aux_for_blocks ();
458 free_dominance_info (CDI_DOMINATORS);
459 }
460
461 /* Select an alternate CC_MODE for a comparison insn comparing A and B.
462 Note that inputs are almost certainly different than the IN_A and IN_B
463 stored in CMP -- we're called while attempting to eliminate the compare
464 after all. Return the new FLAGS rtx if successful, else return NULL.
465 Note that this function may start a change group. */
466
467 static rtx
468 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED,
469 rtx b ATTRIBUTE_UNUSED)
470 {
471 machine_mode sel_mode;
472 const int n = cmp->n_uses;
473 rtx flags = NULL;
474
475 #ifndef SELECT_CC_MODE
476 /* Minimize code differences when this target macro is undefined. */
477 return NULL;
478 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode)
479 #endif
480
481 /* If we don't have access to all of the uses, we can't validate. */
482 if (cmp->missing_uses || n == 0)
483 return NULL;
484
485 /* Find a new mode that works for all of the uses. Special case the
486 common case of exactly one use. */
487 if (n == 1)
488 {
489 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
490 if (sel_mode != cmp->orig_mode)
491 {
492 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
493 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true);
494 }
495 }
496 else
497 {
498 int i;
499
500 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b);
501 for (i = 1; i < n; ++i)
502 {
503 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b);
504 if (new_mode != sel_mode)
505 {
506 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode);
507 if (sel_mode == VOIDmode)
508 return NULL;
509 }
510 }
511
512 if (sel_mode != cmp->orig_mode)
513 {
514 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum);
515 for (i = 0; i < n; ++i)
516 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true);
517 }
518 }
519
520 return flags;
521 }
522
523 /* Return a register RTX holding the same value at START as REG at END, or
524 NULL_RTX if there is none. */
525
526 static rtx
527 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start)
528 {
529 machine_mode orig_mode = GET_MODE (reg);
530 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end));
531
532 for (rtx_insn *insn = PREV_INSN (end);
533 insn != start;
534 insn = PREV_INSN (insn))
535 {
536 const int abnormal_flags
537 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER
538 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
539 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
540 | DF_REF_PRE_POST_MODIFY);
541 df_ref def;
542
543 /* Note that the BB_HEAD is always either a note or a label, but in
544 any case it means that REG is defined outside the block. */
545 if (insn == bb_head)
546 return NULL_RTX;
547 if (NOTE_P (insn) || DEBUG_INSN_P (insn))
548 continue;
549
550 /* Find a possible def of REG in INSN. */
551 FOR_EACH_INSN_DEF (def, insn)
552 if (DF_REF_REGNO (def) == REGNO (reg))
553 break;
554
555 /* No definitions of REG; continue searching. */
556 if (def == NULL)
557 continue;
558
559 /* Bail if this is not a totally normal set of REG. */
560 if (DF_REF_IS_ARTIFICIAL (def))
561 return NULL_RTX;
562 if (DF_REF_FLAGS (def) & abnormal_flags)
563 return NULL_RTX;
564
565 /* We've found an insn between the compare and the clobber that sets
566 REG. Given that pass_cprop_hardreg has not yet run, we still find
567 situations in which we can usefully look through a copy insn. */
568 rtx x = single_set (insn);
569 if (x == NULL_RTX)
570 return NULL_RTX;
571 reg = SET_SRC (x);
572 if (!REG_P (reg))
573 return NULL_RTX;
574 }
575
576 if (GET_MODE (reg) != orig_mode)
577 return NULL_RTX;
578
579 return reg;
580 }
581
582 /* Attempt to replace a comparison with a prior arithmetic insn that can
583 compute the same flags value as the comparison itself. Return true if
584 successful, having made all rtl modifications necessary. */
585
586 static bool
587 try_eliminate_compare (struct comparison *cmp)
588 {
589 rtx flags, in_a, in_b, cmp_src;
590
591 /* We must have found an interesting "clobber" preceding the compare. */
592 if (cmp->prev_clobber == NULL)
593 return false;
594
595 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER.
596 Given that this target requires this pass, we can assume that most
597 insns do clobber the flags, and so the distance between the compare
598 and the clobber is likely to be small. */
599 /* ??? This is one point at which one could argue that DF_REF_CHAIN would
600 be useful, but it is thought to be too heavy-weight a solution here. */
601 in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber);
602 if (!in_a)
603 return false;
604
605 /* Likewise for IN_B if need be. */
606 if (CONSTANT_P (cmp->in_b))
607 in_b = cmp->in_b;
608 else if (REG_P (cmp->in_b))
609 {
610 in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber);
611 if (!in_b)
612 return false;
613 }
614 else if (GET_CODE (cmp->in_b) == UNSPEC)
615 {
616 const int len = XVECLEN (cmp->in_b, 0);
617 rtvec v = rtvec_alloc (len);
618 for (int i = 0; i < len; i++)
619 {
620 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i),
621 cmp->insn, cmp->prev_clobber);
622 if (!r)
623 return false;
624 RTVEC_ELT (v, i) = r;
625 }
626 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1));
627 }
628 else
629 gcc_unreachable ();
630
631 /* We've reached PREV_CLOBBER without finding a modification of IN_A.
632 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do
633 recall that we've already validated the shape of PREV_CLOBBER. */
634 rtx_insn *insn = cmp->prev_clobber;
635
636 rtx x = XVECEXP (PATTERN (insn), 0, 0);
637 if (rtx_equal_p (SET_DEST (x), in_a))
638 cmp_src = SET_SRC (x);
639
640 /* Also check operations with implicit extensions, e.g.:
641 [(set (reg:DI)
642 (zero_extend:DI (plus:SI (reg:SI) (reg:SI))))
643 (set (reg:CCZ flags)
644 (compare:CCZ (plus:SI (reg:SI) (reg:SI))
645 (const_int 0)))] */
646 else if (REG_P (SET_DEST (x))
647 && REG_P (in_a)
648 && REGNO (SET_DEST (x)) == REGNO (in_a)
649 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND
650 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND)
651 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a))
652 cmp_src = XEXP (SET_SRC (x), 0);
653
654 /* Also check fully redundant comparisons, e.g.:
655 [(set (reg:SI)
656 (minus:SI (reg:SI) (reg:SI))))
657 (set (reg:CC flags)
658 (compare:CC (reg:SI) (reg:SI)))] */
659 else if (REG_P (in_b)
660 && GET_CODE (SET_SRC (x)) == MINUS
661 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a)
662 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b))
663 cmp_src = in_a;
664
665 else
666 return false;
667
668 /* Determine if we ought to use a different CC_MODE here. */
669 flags = maybe_select_cc_mode (cmp, cmp_src, in_b);
670 if (flags == NULL)
671 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum);
672
673 /* Generate a new comparison for installation in the setter. */
674 rtx y = copy_rtx (cmp_src);
675 y = gen_rtx_COMPARE (GET_MODE (flags), y, in_b);
676 y = gen_rtx_SET (flags, y);
677
678 /* Canonicalize instruction to:
679 [(set (reg:CCM) (compare:CCM (operation) (immediate)))
680 (set (reg) (operation)] */
681
682 rtvec v = rtvec_alloc (2);
683 RTVEC_ELT (v, 0) = y;
684 RTVEC_ELT (v, 1) = x;
685
686 rtx pat = gen_rtx_PARALLEL (VOIDmode, v);
687
688 /* Succeed if the new instruction is valid. Note that we may have started
689 a change group within maybe_select_cc_mode, therefore we must continue. */
690 validate_change (insn, &PATTERN (insn), pat, true);
691
692 if (!apply_change_group ())
693 return false;
694
695 /* Success. Delete the compare insn... */
696 delete_insn (cmp->insn);
697
698 /* ... and any notes that are now invalid due to multiple sets. */
699 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum);
700 if (x)
701 remove_note (insn, x);
702 x = find_reg_note (insn, REG_EQUAL, NULL);
703 if (x)
704 remove_note (insn, x);
705 x = find_reg_note (insn, REG_EQUIV, NULL);
706 if (x)
707 remove_note (insn, x);
708
709 return true;
710 }
711
712 /* Main entry point to the pass. */
713
714 static unsigned int
715 execute_compare_elim_after_reload (void)
716 {
717 df_analyze ();
718
719 gcc_checking_assert (!all_compares.exists ());
720
721 /* Locate all comparisons and their uses, and eliminate duplicates. */
722 find_comparisons ();
723 if (all_compares.exists ())
724 {
725 struct comparison *cmp;
726 size_t i;
727
728 /* Eliminate comparisons that are redundant with flags computation. */
729 FOR_EACH_VEC_ELT (all_compares, i, cmp)
730 {
731 try_eliminate_compare (cmp);
732 XDELETE (cmp);
733 }
734
735 all_compares.release ();
736 }
737
738 return 0;
739 }
740
741 namespace {
742
743 const pass_data pass_data_compare_elim_after_reload =
744 {
745 RTL_PASS, /* type */
746 "cmpelim", /* name */
747 OPTGROUP_NONE, /* optinfo_flags */
748 TV_NONE, /* tv_id */
749 0, /* properties_required */
750 0, /* properties_provided */
751 0, /* properties_destroyed */
752 0, /* todo_flags_start */
753 ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */
754 };
755
756 class pass_compare_elim_after_reload : public rtl_opt_pass
757 {
758 public:
759 pass_compare_elim_after_reload (gcc::context *ctxt)
760 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt)
761 {}
762
763 /* opt_pass methods: */
764 virtual bool gate (function *)
765 {
766 /* Setting this target hook value is how a backend indicates the need. */
767 if (targetm.flags_regnum == INVALID_REGNUM)
768 return false;
769 return flag_compare_elim_after_reload;
770 }
771
772 virtual unsigned int execute (function *)
773 {
774 return execute_compare_elim_after_reload ();
775 }
776
777 }; // class pass_compare_elim_after_reload
778
779 } // anon namespace
780
781 rtl_opt_pass *
782 make_pass_compare_elim_after_reload (gcc::context *ctxt)
783 {
784 return new pass_compare_elim_after_reload (ctxt);
785 }