decl.c (value_annotation_hasher::handle_cache_entry): Delete.
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stor-layout.h"
30 #include "flags.h"
31 #include "hard-reg-set.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
36 #include "expmed.h"
37 #include "dojump.h"
38 #include "explow.h"
39 #include "calls.h"
40 #include "emit-rtl.h"
41 #include "varasm.h"
42 #include "stmt.h"
43 #include "expr.h"
44 #include "insn-codes.h"
45 #include "optabs.h"
46 #include "langhooks.h"
47 #include "predict.h"
48 #include "basic-block.h"
49 #include "tm_p.h"
50
51 static bool prefer_and_bit_test (machine_mode, int);
52 static void do_jump_by_parts_greater (tree, tree, int,
53 rtx_code_label *, rtx_code_label *, int);
54 static void do_jump_by_parts_equality (tree, tree, rtx_code_label *,
55 rtx_code_label *, int);
56 static void do_compare_and_jump (tree, tree, enum rtx_code, enum rtx_code,
57 rtx_code_label *, rtx_code_label *, int);
58
59 /* Invert probability if there is any. -1 stands for unknown. */
60
61 static inline int
62 inv (int prob)
63 {
64 return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
65 }
66
67 /* At the start of a function, record that we have no previously-pushed
68 arguments waiting to be popped. */
69
70 void
71 init_pending_stack_adjust (void)
72 {
73 pending_stack_adjust = 0;
74 }
75
76 /* Discard any pending stack adjustment. This avoid relying on the
77 RTL optimizers to remove useless adjustments when we know the
78 stack pointer value is dead. */
79 void
80 discard_pending_stack_adjust (void)
81 {
82 stack_pointer_delta -= pending_stack_adjust;
83 pending_stack_adjust = 0;
84 }
85
86 /* When exiting from function, if safe, clear out any pending stack adjust
87 so the adjustment won't get done.
88
89 Note, if the current function calls alloca, then it must have a
90 frame pointer regardless of the value of flag_omit_frame_pointer. */
91
92 void
93 clear_pending_stack_adjust (void)
94 {
95 if (optimize > 0
96 && (! flag_omit_frame_pointer || cfun->calls_alloca)
97 && EXIT_IGNORE_STACK)
98 discard_pending_stack_adjust ();
99 }
100
101 /* Pop any previously-pushed arguments that have not been popped yet. */
102
103 void
104 do_pending_stack_adjust (void)
105 {
106 if (inhibit_defer_pop == 0)
107 {
108 if (pending_stack_adjust != 0)
109 adjust_stack (GEN_INT (pending_stack_adjust));
110 pending_stack_adjust = 0;
111 }
112 }
113
114 /* Remember pending_stack_adjust/stack_pointer_delta.
115 To be used around code that may call do_pending_stack_adjust (),
116 but the generated code could be discarded e.g. using delete_insns_since. */
117
118 void
119 save_pending_stack_adjust (saved_pending_stack_adjust *save)
120 {
121 save->x_pending_stack_adjust = pending_stack_adjust;
122 save->x_stack_pointer_delta = stack_pointer_delta;
123 }
124
125 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
126
127 void
128 restore_pending_stack_adjust (saved_pending_stack_adjust *save)
129 {
130 if (inhibit_defer_pop == 0)
131 {
132 pending_stack_adjust = save->x_pending_stack_adjust;
133 stack_pointer_delta = save->x_stack_pointer_delta;
134 }
135 }
136 \f
137 /* Expand conditional expressions. */
138
139 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
140
141 void
142 jumpifnot (tree exp, rtx_code_label *label, int prob)
143 {
144 do_jump (exp, label, NULL, inv (prob));
145 }
146
147 void
148 jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx_code_label *label,
149 int prob)
150 {
151 do_jump_1 (code, op0, op1, label, NULL, inv (prob));
152 }
153
154 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
155
156 void
157 jumpif (tree exp, rtx_code_label *label, int prob)
158 {
159 do_jump (exp, NULL, label, prob);
160 }
161
162 void
163 jumpif_1 (enum tree_code code, tree op0, tree op1,
164 rtx_code_label *label, int prob)
165 {
166 do_jump_1 (code, op0, op1, NULL, label, prob);
167 }
168
169 /* Used internally by prefer_and_bit_test. */
170
171 static GTY(()) rtx and_reg;
172 static GTY(()) rtx and_test;
173 static GTY(()) rtx shift_test;
174
175 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
176 where X is an arbitrary register of mode MODE. Return true if the former
177 is preferred. */
178
179 static bool
180 prefer_and_bit_test (machine_mode mode, int bitnum)
181 {
182 bool speed_p;
183 wide_int mask = wi::set_bit_in_zero (bitnum, GET_MODE_PRECISION (mode));
184
185 if (and_test == 0)
186 {
187 /* Set up rtxes for the two variations. Use NULL as a placeholder
188 for the BITNUM-based constants. */
189 and_reg = gen_rtx_REG (mode, LAST_VIRTUAL_REGISTER + 1);
190 and_test = gen_rtx_AND (mode, and_reg, NULL);
191 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
192 const1_rtx);
193 }
194 else
195 {
196 /* Change the mode of the previously-created rtxes. */
197 PUT_MODE (and_reg, mode);
198 PUT_MODE (and_test, mode);
199 PUT_MODE (shift_test, mode);
200 PUT_MODE (XEXP (shift_test, 0), mode);
201 }
202
203 /* Fill in the integers. */
204 XEXP (and_test, 1) = immed_wide_int_const (mask, mode);
205 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
206
207 speed_p = optimize_insn_for_speed_p ();
208 return (rtx_cost (and_test, IF_THEN_ELSE, 0, speed_p)
209 <= rtx_cost (shift_test, IF_THEN_ELSE, 0, speed_p));
210 }
211
212 /* Subroutine of do_jump, dealing with exploded comparisons of the type
213 OP0 CODE OP1 . IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
214 PROB is probability of jump to if_true_label, or -1 if unknown. */
215
216 void
217 do_jump_1 (enum tree_code code, tree op0, tree op1,
218 rtx_code_label *if_false_label, rtx_code_label *if_true_label,
219 int prob)
220 {
221 machine_mode mode;
222 rtx_code_label *drop_through_label = 0;
223
224 switch (code)
225 {
226 case EQ_EXPR:
227 {
228 tree inner_type = TREE_TYPE (op0);
229
230 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
231 != MODE_COMPLEX_FLOAT);
232 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
233 != MODE_COMPLEX_INT);
234
235 if (integer_zerop (op1))
236 do_jump (op0, if_true_label, if_false_label, inv (prob));
237 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
238 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
239 do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
240 prob);
241 else
242 do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
243 prob);
244 break;
245 }
246
247 case NE_EXPR:
248 {
249 tree inner_type = TREE_TYPE (op0);
250
251 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
252 != MODE_COMPLEX_FLOAT);
253 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
254 != MODE_COMPLEX_INT);
255
256 if (integer_zerop (op1))
257 do_jump (op0, if_false_label, if_true_label, prob);
258 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
259 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
260 do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
261 inv (prob));
262 else
263 do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
264 prob);
265 break;
266 }
267
268 case LT_EXPR:
269 mode = TYPE_MODE (TREE_TYPE (op0));
270 if (GET_MODE_CLASS (mode) == MODE_INT
271 && ! can_compare_p (LT, mode, ccp_jump))
272 do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
273 prob);
274 else
275 do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
276 prob);
277 break;
278
279 case LE_EXPR:
280 mode = TYPE_MODE (TREE_TYPE (op0));
281 if (GET_MODE_CLASS (mode) == MODE_INT
282 && ! can_compare_p (LE, mode, ccp_jump))
283 do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
284 inv (prob));
285 else
286 do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
287 prob);
288 break;
289
290 case GT_EXPR:
291 mode = TYPE_MODE (TREE_TYPE (op0));
292 if (GET_MODE_CLASS (mode) == MODE_INT
293 && ! can_compare_p (GT, mode, ccp_jump))
294 do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
295 prob);
296 else
297 do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
298 prob);
299 break;
300
301 case GE_EXPR:
302 mode = TYPE_MODE (TREE_TYPE (op0));
303 if (GET_MODE_CLASS (mode) == MODE_INT
304 && ! can_compare_p (GE, mode, ccp_jump))
305 do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
306 inv (prob));
307 else
308 do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
309 prob);
310 break;
311
312 case ORDERED_EXPR:
313 do_compare_and_jump (op0, op1, ORDERED, ORDERED,
314 if_false_label, if_true_label, prob);
315 break;
316
317 case UNORDERED_EXPR:
318 do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
319 if_false_label, if_true_label, prob);
320 break;
321
322 case UNLT_EXPR:
323 do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
324 prob);
325 break;
326
327 case UNLE_EXPR:
328 do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
329 prob);
330 break;
331
332 case UNGT_EXPR:
333 do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
334 prob);
335 break;
336
337 case UNGE_EXPR:
338 do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
339 prob);
340 break;
341
342 case UNEQ_EXPR:
343 do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
344 prob);
345 break;
346
347 case LTGT_EXPR:
348 do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
349 prob);
350 break;
351
352 case TRUTH_ANDIF_EXPR:
353 {
354 /* Spread the probability that the expression is false evenly between
355 the two conditions. So the first condition is false half the total
356 probability of being false. The second condition is false the other
357 half of the total probability of being false, so its jump has a false
358 probability of half the total, relative to the probability we
359 reached it (i.e. the first condition was true). */
360 int op0_prob = -1;
361 int op1_prob = -1;
362 if (prob != -1)
363 {
364 int false_prob = inv (prob);
365 int op0_false_prob = false_prob / 2;
366 int op1_false_prob = GCOV_COMPUTE_SCALE ((false_prob / 2),
367 inv (op0_false_prob));
368 /* Get the probability that each jump below is true. */
369 op0_prob = inv (op0_false_prob);
370 op1_prob = inv (op1_false_prob);
371 }
372 if (if_false_label == NULL)
373 {
374 drop_through_label = gen_label_rtx ();
375 do_jump (op0, drop_through_label, NULL, op0_prob);
376 do_jump (op1, NULL, if_true_label, op1_prob);
377 }
378 else
379 {
380 do_jump (op0, if_false_label, NULL, op0_prob);
381 do_jump (op1, if_false_label, if_true_label, op1_prob);
382 }
383 break;
384 }
385
386 case TRUTH_ORIF_EXPR:
387 {
388 /* Spread the probability evenly between the two conditions. So
389 the first condition has half the total probability of being true.
390 The second condition has the other half of the total probability,
391 so its jump has a probability of half the total, relative to
392 the probability we reached it (i.e. the first condition was false). */
393 int op0_prob = -1;
394 int op1_prob = -1;
395 if (prob != -1)
396 {
397 op0_prob = prob / 2;
398 op1_prob = GCOV_COMPUTE_SCALE ((prob / 2), inv (op0_prob));
399 }
400 if (if_true_label == NULL)
401 {
402 drop_through_label = gen_label_rtx ();
403 do_jump (op0, NULL, drop_through_label, op0_prob);
404 do_jump (op1, if_false_label, NULL, op1_prob);
405 }
406 else
407 {
408 do_jump (op0, NULL, if_true_label, op0_prob);
409 do_jump (op1, if_false_label, if_true_label, op1_prob);
410 }
411 break;
412 }
413
414 default:
415 gcc_unreachable ();
416 }
417
418 if (drop_through_label)
419 {
420 do_pending_stack_adjust ();
421 emit_label (drop_through_label);
422 }
423 }
424
425 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
426 the result is zero, or IF_TRUE_LABEL if the result is one.
427 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
428 meaning fall through in that case.
429
430 do_jump always does any pending stack adjust except when it does not
431 actually perform a jump. An example where there is no jump
432 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
433
434 PROB is probability of jump to if_true_label, or -1 if unknown. */
435
436 void
437 do_jump (tree exp, rtx_code_label *if_false_label,
438 rtx_code_label *if_true_label, int prob)
439 {
440 enum tree_code code = TREE_CODE (exp);
441 rtx temp;
442 int i;
443 tree type;
444 machine_mode mode;
445 rtx_code_label *drop_through_label = NULL;
446
447 switch (code)
448 {
449 case ERROR_MARK:
450 break;
451
452 case INTEGER_CST:
453 {
454 rtx_code_label *lab = integer_zerop (exp) ? if_false_label
455 : if_true_label;
456 if (lab)
457 emit_jump (lab);
458 break;
459 }
460
461 #if 0
462 /* This is not true with #pragma weak */
463 case ADDR_EXPR:
464 /* The address of something can never be zero. */
465 if (if_true_label)
466 emit_jump (if_true_label);
467 break;
468 #endif
469
470 case NOP_EXPR:
471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
472 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
473 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
474 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
475 goto normal;
476 case CONVERT_EXPR:
477 /* If we are narrowing the operand, we have to do the compare in the
478 narrower mode. */
479 if ((TYPE_PRECISION (TREE_TYPE (exp))
480 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
481 goto normal;
482 case NON_LVALUE_EXPR:
483 case ABS_EXPR:
484 case NEGATE_EXPR:
485 case LROTATE_EXPR:
486 case RROTATE_EXPR:
487 /* These cannot change zero->nonzero or vice versa. */
488 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
489 break;
490
491 case TRUTH_NOT_EXPR:
492 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
493 inv (prob));
494 break;
495
496 case COND_EXPR:
497 {
498 rtx_code_label *label1 = gen_label_rtx ();
499 if (!if_true_label || !if_false_label)
500 {
501 drop_through_label = gen_label_rtx ();
502 if (!if_true_label)
503 if_true_label = drop_through_label;
504 if (!if_false_label)
505 if_false_label = drop_through_label;
506 }
507
508 do_pending_stack_adjust ();
509 do_jump (TREE_OPERAND (exp, 0), label1, NULL, -1);
510 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
511 emit_label (label1);
512 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
513 break;
514 }
515
516 case COMPOUND_EXPR:
517 /* Lowered by gimplify.c. */
518 gcc_unreachable ();
519
520 case MINUS_EXPR:
521 /* Nonzero iff operands of minus differ. */
522 code = NE_EXPR;
523
524 /* FALLTHRU */
525 case EQ_EXPR:
526 case NE_EXPR:
527 case LT_EXPR:
528 case LE_EXPR:
529 case GT_EXPR:
530 case GE_EXPR:
531 case ORDERED_EXPR:
532 case UNORDERED_EXPR:
533 case UNLT_EXPR:
534 case UNLE_EXPR:
535 case UNGT_EXPR:
536 case UNGE_EXPR:
537 case UNEQ_EXPR:
538 case LTGT_EXPR:
539 case TRUTH_ANDIF_EXPR:
540 case TRUTH_ORIF_EXPR:
541 other_code:
542 do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
543 if_false_label, if_true_label, prob);
544 break;
545
546 case BIT_AND_EXPR:
547 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
548 See if the former is preferred for jump tests and restore it
549 if so. */
550 if (integer_onep (TREE_OPERAND (exp, 1)))
551 {
552 tree exp0 = TREE_OPERAND (exp, 0);
553 rtx_code_label *set_label, *clr_label;
554 int setclr_prob = prob;
555
556 /* Strip narrowing integral type conversions. */
557 while (CONVERT_EXPR_P (exp0)
558 && TREE_OPERAND (exp0, 0) != error_mark_node
559 && TYPE_PRECISION (TREE_TYPE (exp0))
560 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
561 exp0 = TREE_OPERAND (exp0, 0);
562
563 /* "exp0 ^ 1" inverts the sense of the single bit test. */
564 if (TREE_CODE (exp0) == BIT_XOR_EXPR
565 && integer_onep (TREE_OPERAND (exp0, 1)))
566 {
567 exp0 = TREE_OPERAND (exp0, 0);
568 clr_label = if_true_label;
569 set_label = if_false_label;
570 setclr_prob = inv (prob);
571 }
572 else
573 {
574 clr_label = if_false_label;
575 set_label = if_true_label;
576 }
577
578 if (TREE_CODE (exp0) == RSHIFT_EXPR)
579 {
580 tree arg = TREE_OPERAND (exp0, 0);
581 tree shift = TREE_OPERAND (exp0, 1);
582 tree argtype = TREE_TYPE (arg);
583 if (TREE_CODE (shift) == INTEGER_CST
584 && compare_tree_int (shift, 0) >= 0
585 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
586 && prefer_and_bit_test (TYPE_MODE (argtype),
587 TREE_INT_CST_LOW (shift)))
588 {
589 unsigned HOST_WIDE_INT mask
590 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
591 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
592 build_int_cstu (argtype, mask)),
593 clr_label, set_label, setclr_prob);
594 break;
595 }
596 }
597 }
598
599 /* If we are AND'ing with a small constant, do this comparison in the
600 smallest type that fits. If the machine doesn't have comparisons
601 that small, it will be converted back to the wider comparison.
602 This helps if we are testing the sign bit of a narrower object.
603 combine can't do this for us because it can't know whether a
604 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
605
606 if (! SLOW_BYTE_ACCESS
607 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
608 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
609 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
610 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
611 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
612 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
613 && have_insn_for (COMPARE, TYPE_MODE (type)))
614 {
615 do_jump (fold_convert (type, exp), if_false_label, if_true_label,
616 prob);
617 break;
618 }
619
620 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
621 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
622 goto normal;
623
624 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
625
626 case TRUTH_AND_EXPR:
627 /* High branch cost, expand as the bitwise AND of the conditions.
628 Do the same if the RHS has side effects, because we're effectively
629 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
630 if (BRANCH_COST (optimize_insn_for_speed_p (),
631 false) >= 4
632 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
633 goto normal;
634 code = TRUTH_ANDIF_EXPR;
635 goto other_code;
636
637 case BIT_IOR_EXPR:
638 case TRUTH_OR_EXPR:
639 /* High branch cost, expand as the bitwise OR of the conditions.
640 Do the same if the RHS has side effects, because we're effectively
641 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
642 if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
643 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
644 goto normal;
645 code = TRUTH_ORIF_EXPR;
646 goto other_code;
647
648 /* Fall through and generate the normal code. */
649 default:
650 normal:
651 temp = expand_normal (exp);
652 do_pending_stack_adjust ();
653 /* The RTL optimizers prefer comparisons against pseudos. */
654 if (GET_CODE (temp) == SUBREG)
655 {
656 /* Compare promoted variables in their promoted mode. */
657 if (SUBREG_PROMOTED_VAR_P (temp)
658 && REG_P (XEXP (temp, 0)))
659 temp = XEXP (temp, 0);
660 else
661 temp = copy_to_reg (temp);
662 }
663 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
664 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
665 GET_MODE (temp), NULL_RTX,
666 if_false_label, if_true_label, prob);
667 }
668
669 if (drop_through_label)
670 {
671 do_pending_stack_adjust ();
672 emit_label (drop_through_label);
673 }
674 }
675 \f
676 /* Compare OP0 with OP1, word at a time, in mode MODE.
677 UNSIGNEDP says to do unsigned comparison.
678 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
679
680 static void
681 do_jump_by_parts_greater_rtx (machine_mode mode, int unsignedp, rtx op0,
682 rtx op1, rtx_code_label *if_false_label,
683 rtx_code_label *if_true_label,
684 int prob)
685 {
686 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
687 rtx_code_label *drop_through_label = 0;
688 bool drop_through_if_true = false, drop_through_if_false = false;
689 enum rtx_code code = GT;
690 int i;
691
692 if (! if_true_label || ! if_false_label)
693 drop_through_label = gen_label_rtx ();
694 if (! if_true_label)
695 {
696 if_true_label = drop_through_label;
697 drop_through_if_true = true;
698 }
699 if (! if_false_label)
700 {
701 if_false_label = drop_through_label;
702 drop_through_if_false = true;
703 }
704
705 /* Deal with the special case 0 > x: only one comparison is necessary and
706 we reverse it to avoid jumping to the drop-through label. */
707 if (op0 == const0_rtx && drop_through_if_true && !drop_through_if_false)
708 {
709 code = LE;
710 if_true_label = if_false_label;
711 if_false_label = drop_through_label;
712 drop_through_if_true = false;
713 drop_through_if_false = true;
714 }
715
716 /* Compare a word at a time, high order first. */
717 for (i = 0; i < nwords; i++)
718 {
719 rtx op0_word, op1_word;
720
721 if (WORDS_BIG_ENDIAN)
722 {
723 op0_word = operand_subword_force (op0, i, mode);
724 op1_word = operand_subword_force (op1, i, mode);
725 }
726 else
727 {
728 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
729 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
730 }
731
732 /* All but high-order word must be compared as unsigned. */
733 do_compare_rtx_and_jump (op0_word, op1_word, code, (unsignedp || i > 0),
734 word_mode, NULL_RTX, NULL, if_true_label,
735 prob);
736
737 /* Emit only one comparison for 0. Do not emit the last cond jump. */
738 if (op0 == const0_rtx || i == nwords - 1)
739 break;
740
741 /* Consider lower words only if these are equal. */
742 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
743 NULL_RTX, NULL, if_false_label, inv (prob));
744 }
745
746 if (!drop_through_if_false)
747 emit_jump (if_false_label);
748 if (drop_through_label)
749 emit_label (drop_through_label);
750 }
751
752 /* Given a comparison expression EXP for values too wide to be compared
753 with one insn, test the comparison and jump to the appropriate label.
754 The code of EXP is ignored; we always test GT if SWAP is 0,
755 and LT if SWAP is 1. */
756
757 static void
758 do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
759 rtx_code_label *if_false_label,
760 rtx_code_label *if_true_label, int prob)
761 {
762 rtx op0 = expand_normal (swap ? treeop1 : treeop0);
763 rtx op1 = expand_normal (swap ? treeop0 : treeop1);
764 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
765 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
766
767 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
768 if_true_label, prob);
769 }
770 \f
771 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
772 mode, MODE, that is too wide for the available compare insns. Either
773 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL
774 to indicate drop through. */
775
776 static void
777 do_jump_by_parts_zero_rtx (machine_mode mode, rtx op0,
778 rtx_code_label *if_false_label,
779 rtx_code_label *if_true_label, int prob)
780 {
781 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
782 rtx part;
783 int i;
784 rtx_code_label *drop_through_label = NULL;
785
786 /* The fastest way of doing this comparison on almost any machine is to
787 "or" all the words and compare the result. If all have to be loaded
788 from memory and this is a very wide item, it's possible this may
789 be slower, but that's highly unlikely. */
790
791 part = gen_reg_rtx (word_mode);
792 emit_move_insn (part, operand_subword_force (op0, 0, mode));
793 for (i = 1; i < nwords && part != 0; i++)
794 part = expand_binop (word_mode, ior_optab, part,
795 operand_subword_force (op0, i, mode),
796 part, 1, OPTAB_WIDEN);
797
798 if (part != 0)
799 {
800 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
801 NULL_RTX, if_false_label, if_true_label, prob);
802 return;
803 }
804
805 /* If we couldn't do the "or" simply, do this with a series of compares. */
806 if (! if_false_label)
807 if_false_label = drop_through_label = gen_label_rtx ();
808
809 for (i = 0; i < nwords; i++)
810 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
811 const0_rtx, EQ, 1, word_mode, NULL_RTX,
812 if_false_label, NULL, prob);
813
814 if (if_true_label)
815 emit_jump (if_true_label);
816
817 if (drop_through_label)
818 emit_label (drop_through_label);
819 }
820
821 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
822 where MODE is an integer mode too wide to be compared with one insn.
823 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
824 to indicate drop through. */
825
826 static void
827 do_jump_by_parts_equality_rtx (machine_mode mode, rtx op0, rtx op1,
828 rtx_code_label *if_false_label,
829 rtx_code_label *if_true_label, int prob)
830 {
831 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
832 rtx_code_label *drop_through_label = NULL;
833 int i;
834
835 if (op1 == const0_rtx)
836 {
837 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
838 prob);
839 return;
840 }
841 else if (op0 == const0_rtx)
842 {
843 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
844 prob);
845 return;
846 }
847
848 if (! if_false_label)
849 drop_through_label = if_false_label = gen_label_rtx ();
850
851 for (i = 0; i < nwords; i++)
852 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
853 operand_subword_force (op1, i, mode),
854 EQ, 0, word_mode, NULL_RTX,
855 if_false_label, NULL, prob);
856
857 if (if_true_label)
858 emit_jump (if_true_label);
859 if (drop_through_label)
860 emit_label (drop_through_label);
861 }
862
863 /* Given an EQ_EXPR expression EXP for values too wide to be compared
864 with one insn, test the comparison and jump to the appropriate label. */
865
866 static void
867 do_jump_by_parts_equality (tree treeop0, tree treeop1,
868 rtx_code_label *if_false_label,
869 rtx_code_label *if_true_label, int prob)
870 {
871 rtx op0 = expand_normal (treeop0);
872 rtx op1 = expand_normal (treeop1);
873 machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
874 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
875 if_true_label, prob);
876 }
877 \f
878 /* Split a comparison into two others, the second of which has the other
879 "orderedness". The first is always ORDERED or UNORDERED if MODE
880 does not honor NaNs (which means that it can be skipped in that case;
881 see do_compare_rtx_and_jump).
882
883 The two conditions are written in *CODE1 and *CODE2. Return true if
884 the conditions must be ANDed, false if they must be ORed. */
885
886 bool
887 split_comparison (enum rtx_code code, machine_mode mode,
888 enum rtx_code *code1, enum rtx_code *code2)
889 {
890 switch (code)
891 {
892 case LT:
893 *code1 = ORDERED;
894 *code2 = UNLT;
895 return true;
896 case LE:
897 *code1 = ORDERED;
898 *code2 = UNLE;
899 return true;
900 case GT:
901 *code1 = ORDERED;
902 *code2 = UNGT;
903 return true;
904 case GE:
905 *code1 = ORDERED;
906 *code2 = UNGE;
907 return true;
908 case EQ:
909 *code1 = ORDERED;
910 *code2 = UNEQ;
911 return true;
912 case NE:
913 *code1 = UNORDERED;
914 *code2 = LTGT;
915 return false;
916 case UNLT:
917 *code1 = UNORDERED;
918 *code2 = LT;
919 return false;
920 case UNLE:
921 *code1 = UNORDERED;
922 *code2 = LE;
923 return false;
924 case UNGT:
925 *code1 = UNORDERED;
926 *code2 = GT;
927 return false;
928 case UNGE:
929 *code1 = UNORDERED;
930 *code2 = GE;
931 return false;
932 case UNEQ:
933 *code1 = UNORDERED;
934 *code2 = EQ;
935 return false;
936 case LTGT:
937 /* Do not turn a trapping comparison into a non-trapping one. */
938 if (HONOR_SNANS (mode))
939 {
940 *code1 = LT;
941 *code2 = GT;
942 return false;
943 }
944 else
945 {
946 *code1 = ORDERED;
947 *code2 = NE;
948 return true;
949 }
950 default:
951 gcc_unreachable ();
952 }
953 }
954
955
956 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
957 The decision as to signed or unsigned comparison must be made by the caller.
958
959 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
960 compared. */
961
962 void
963 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
964 machine_mode mode, rtx size,
965 rtx_code_label *if_false_label,
966 rtx_code_label *if_true_label, int prob)
967 {
968 rtx tem;
969 rtx_code_label *dummy_label = NULL;
970
971 /* Reverse the comparison if that is safe and we want to jump if it is
972 false. Also convert to the reverse comparison if the target can
973 implement it. */
974 if ((! if_true_label
975 || ! can_compare_p (code, mode, ccp_jump))
976 && (! FLOAT_MODE_P (mode)
977 || code == ORDERED || code == UNORDERED
978 || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
979 || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
980 {
981 enum rtx_code rcode;
982 if (FLOAT_MODE_P (mode))
983 rcode = reverse_condition_maybe_unordered (code);
984 else
985 rcode = reverse_condition (code);
986
987 /* Canonicalize to UNORDERED for the libcall. */
988 if (can_compare_p (rcode, mode, ccp_jump)
989 || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
990 {
991 std::swap (if_true_label, if_false_label);
992 code = rcode;
993 prob = inv (prob);
994 }
995 }
996
997 /* If one operand is constant, make it the second one. Only do this
998 if the other operand is not constant as well. */
999
1000 if (swap_commutative_operands_p (op0, op1))
1001 {
1002 std::swap (op0, op1);
1003 code = swap_condition (code);
1004 }
1005
1006 do_pending_stack_adjust ();
1007
1008 code = unsignedp ? unsigned_condition (code) : code;
1009 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
1010 op0, op1)))
1011 {
1012 if (CONSTANT_P (tem))
1013 {
1014 rtx_code_label *label = (tem == const0_rtx
1015 || tem == CONST0_RTX (mode))
1016 ? if_false_label : if_true_label;
1017 if (label)
1018 emit_jump (label);
1019 return;
1020 }
1021
1022 code = GET_CODE (tem);
1023 mode = GET_MODE (tem);
1024 op0 = XEXP (tem, 0);
1025 op1 = XEXP (tem, 1);
1026 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
1027 }
1028
1029 if (! if_true_label)
1030 dummy_label = if_true_label = gen_label_rtx ();
1031
1032 if (GET_MODE_CLASS (mode) == MODE_INT
1033 && ! can_compare_p (code, mode, ccp_jump))
1034 {
1035 switch (code)
1036 {
1037 case LTU:
1038 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1039 if_false_label, if_true_label, prob);
1040 break;
1041
1042 case LEU:
1043 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1044 if_true_label, if_false_label,
1045 inv (prob));
1046 break;
1047
1048 case GTU:
1049 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
1050 if_false_label, if_true_label, prob);
1051 break;
1052
1053 case GEU:
1054 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
1055 if_true_label, if_false_label,
1056 inv (prob));
1057 break;
1058
1059 case LT:
1060 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1061 if_false_label, if_true_label, prob);
1062 break;
1063
1064 case LE:
1065 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1066 if_true_label, if_false_label,
1067 inv (prob));
1068 break;
1069
1070 case GT:
1071 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1072 if_false_label, if_true_label, prob);
1073 break;
1074
1075 case GE:
1076 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1077 if_true_label, if_false_label,
1078 inv (prob));
1079 break;
1080
1081 case EQ:
1082 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1083 if_true_label, prob);
1084 break;
1085
1086 case NE:
1087 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1088 if_false_label, inv (prob));
1089 break;
1090
1091 default:
1092 gcc_unreachable ();
1093 }
1094 }
1095 else
1096 {
1097 if (SCALAR_FLOAT_MODE_P (mode)
1098 && ! can_compare_p (code, mode, ccp_jump)
1099 && can_compare_p (swap_condition (code), mode, ccp_jump))
1100 {
1101 code = swap_condition (code);
1102 std::swap (op0, op1);
1103 }
1104 else if (SCALAR_FLOAT_MODE_P (mode)
1105 && ! can_compare_p (code, mode, ccp_jump)
1106 /* Never split ORDERED and UNORDERED.
1107 These must be implemented. */
1108 && (code != ORDERED && code != UNORDERED)
1109 /* Split a floating-point comparison if
1110 we can jump on other conditions... */
1111 && (have_insn_for (COMPARE, mode)
1112 /* ... or if there is no libcall for it. */
1113 || code_to_optab (code) == unknown_optab))
1114 {
1115 enum rtx_code first_code;
1116 bool and_them = split_comparison (code, mode, &first_code, &code);
1117
1118 /* If there are no NaNs, the first comparison should always fall
1119 through. */
1120 if (!HONOR_NANS (mode))
1121 gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1122
1123 else
1124 {
1125 int first_prob = prob;
1126 if (first_code == UNORDERED)
1127 first_prob = REG_BR_PROB_BASE / 100;
1128 else if (first_code == ORDERED)
1129 first_prob = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 100;
1130 if (and_them)
1131 {
1132 rtx_code_label *dest_label;
1133 /* If we only jump if true, just bypass the second jump. */
1134 if (! if_false_label)
1135 {
1136 if (! dummy_label)
1137 dummy_label = gen_label_rtx ();
1138 dest_label = dummy_label;
1139 }
1140 else
1141 dest_label = if_false_label;
1142 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1143 size, dest_label, NULL, first_prob);
1144 }
1145 else
1146 do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1147 size, NULL, if_true_label, first_prob);
1148 }
1149 }
1150
1151 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1152 if_true_label, prob);
1153 }
1154
1155 if (if_false_label)
1156 emit_jump (if_false_label);
1157 if (dummy_label)
1158 emit_label (dummy_label);
1159 }
1160
1161 /* Generate code for a comparison expression EXP (including code to compute
1162 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1163 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
1164 generated code will drop through.
1165 SIGNED_CODE should be the rtx operation for this comparison for
1166 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1167
1168 We force a stack adjustment unless there are currently
1169 things pushed on the stack that aren't yet used. */
1170
1171 static void
1172 do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1173 enum rtx_code unsigned_code,
1174 rtx_code_label *if_false_label,
1175 rtx_code_label *if_true_label, int prob)
1176 {
1177 rtx op0, op1;
1178 tree type;
1179 machine_mode mode;
1180 int unsignedp;
1181 enum rtx_code code;
1182
1183 /* Don't crash if the comparison was erroneous. */
1184 op0 = expand_normal (treeop0);
1185 if (TREE_CODE (treeop0) == ERROR_MARK)
1186 return;
1187
1188 op1 = expand_normal (treeop1);
1189 if (TREE_CODE (treeop1) == ERROR_MARK)
1190 return;
1191
1192 type = TREE_TYPE (treeop0);
1193 mode = TYPE_MODE (type);
1194 if (TREE_CODE (treeop0) == INTEGER_CST
1195 && (TREE_CODE (treeop1) != INTEGER_CST
1196 || (GET_MODE_BITSIZE (mode)
1197 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1198 {
1199 /* op0 might have been replaced by promoted constant, in which
1200 case the type of second argument should be used. */
1201 type = TREE_TYPE (treeop1);
1202 mode = TYPE_MODE (type);
1203 }
1204 unsignedp = TYPE_UNSIGNED (type);
1205 code = unsignedp ? unsigned_code : signed_code;
1206
1207 #ifdef HAVE_canonicalize_funcptr_for_compare
1208 /* If function pointers need to be "canonicalized" before they can
1209 be reliably compared, then canonicalize them.
1210 Only do this if *both* sides of the comparison are function pointers.
1211 If one side isn't, we want a noncanonicalized comparison. See PR
1212 middle-end/17564. */
1213 if (HAVE_canonicalize_funcptr_for_compare
1214 && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1215 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1216 == FUNCTION_TYPE
1217 && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1218 && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1219 == FUNCTION_TYPE)
1220 {
1221 rtx new_op0 = gen_reg_rtx (mode);
1222 rtx new_op1 = gen_reg_rtx (mode);
1223
1224 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1225 op0 = new_op0;
1226
1227 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1228 op1 = new_op1;
1229 }
1230 #endif
1231
1232 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1233 ((mode == BLKmode)
1234 ? expr_size (treeop0) : NULL_RTX),
1235 if_false_label, if_true_label, prob);
1236 }
1237
1238 #include "gt-dojump.h"