(Synchronize with addition made to binutils sources):
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37 #include "basic-block.h"
38
39 static bool prefer_and_bit_test (enum machine_mode, int);
40 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
41 static void do_jump_by_parts_equality (tree, rtx, rtx);
42 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
43 rtx);
44
45 /* At the start of a function, record that we have no previously-pushed
46 arguments waiting to be popped. */
47
48 void
49 init_pending_stack_adjust (void)
50 {
51 pending_stack_adjust = 0;
52 }
53
54 /* Discard any pending stack adjustment. This avoid relying on the
55 RTL optimizers to remove useless adjustments when we know the
56 stack pointer value is dead. */
57 void
58 discard_pending_stack_adjust (void)
59 {
60 stack_pointer_delta -= pending_stack_adjust;
61 pending_stack_adjust = 0;
62 }
63
64 /* When exiting from function, if safe, clear out any pending stack adjust
65 so the adjustment won't get done.
66
67 Note, if the current function calls alloca, then it must have a
68 frame pointer regardless of the value of flag_omit_frame_pointer. */
69
70 void
71 clear_pending_stack_adjust (void)
72 {
73 if (optimize > 0
74 && (! flag_omit_frame_pointer || cfun->calls_alloca)
75 && EXIT_IGNORE_STACK)
76 discard_pending_stack_adjust ();
77 }
78
79 /* Pop any previously-pushed arguments that have not been popped yet. */
80
81 void
82 do_pending_stack_adjust (void)
83 {
84 if (inhibit_defer_pop == 0)
85 {
86 if (pending_stack_adjust != 0)
87 adjust_stack (GEN_INT (pending_stack_adjust));
88 pending_stack_adjust = 0;
89 }
90 }
91 \f
92 /* Expand conditional expressions. */
93
94 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95 LABEL is an rtx of code CODE_LABEL, in this function and all the
96 functions here. */
97
98 void
99 jumpifnot (tree exp, rtx label)
100 {
101 do_jump (exp, label, NULL_RTX);
102 }
103
104 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
105
106 void
107 jumpif (tree exp, rtx label)
108 {
109 do_jump (exp, NULL_RTX, label);
110 }
111
112 /* Used internally by prefer_and_bit_test. */
113
114 static GTY(()) rtx and_reg;
115 static GTY(()) rtx and_test;
116 static GTY(()) rtx shift_test;
117
118 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
119 where X is an arbitrary register of mode MODE. Return true if the former
120 is preferred. */
121
122 static bool
123 prefer_and_bit_test (enum machine_mode mode, int bitnum)
124 {
125 if (and_test == 0)
126 {
127 /* Set up rtxes for the two variations. Use NULL as a placeholder
128 for the BITNUM-based constants. */
129 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130 and_test = gen_rtx_AND (mode, and_reg, NULL);
131 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132 const1_rtx);
133 }
134 else
135 {
136 /* Change the mode of the previously-created rtxes. */
137 PUT_MODE (and_reg, mode);
138 PUT_MODE (and_test, mode);
139 PUT_MODE (shift_test, mode);
140 PUT_MODE (XEXP (shift_test, 0), mode);
141 }
142
143 /* Fill in the integers. */
144 XEXP (and_test, 1)
145 = immed_double_const ((unsigned HOST_WIDE_INT) 1 << bitnum, 0, mode);
146 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
147
148 return (rtx_cost (and_test, IF_THEN_ELSE, optimize_insn_for_speed_p ())
149 <= rtx_cost (shift_test, IF_THEN_ELSE, optimize_insn_for_speed_p ()));
150 }
151
152 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
153 the result is zero, or IF_TRUE_LABEL if the result is one.
154 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
155 meaning fall through in that case.
156
157 do_jump always does any pending stack adjust except when it does not
158 actually perform a jump. An example where there is no jump
159 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160
161 void
162 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
163 {
164 enum tree_code code = TREE_CODE (exp);
165 rtx temp;
166 int i;
167 tree type;
168 enum machine_mode mode;
169 rtx drop_through_label = 0;
170
171 switch (code)
172 {
173 case ERROR_MARK:
174 break;
175
176 case INTEGER_CST:
177 temp = integer_zerop (exp) ? if_false_label : if_true_label;
178 if (temp)
179 emit_jump (temp);
180 break;
181
182 #if 0
183 /* This is not true with #pragma weak */
184 case ADDR_EXPR:
185 /* The address of something can never be zero. */
186 if (if_true_label)
187 emit_jump (if_true_label);
188 break;
189 #endif
190
191 case NOP_EXPR:
192 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
194 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
195 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
196 goto normal;
197 case CONVERT_EXPR:
198 /* If we are narrowing the operand, we have to do the compare in the
199 narrower mode. */
200 if ((TYPE_PRECISION (TREE_TYPE (exp))
201 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
202 goto normal;
203 case NON_LVALUE_EXPR:
204 case ABS_EXPR:
205 case NEGATE_EXPR:
206 case LROTATE_EXPR:
207 case RROTATE_EXPR:
208 /* These cannot change zero->nonzero or vice versa. */
209 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
210 break;
211
212 case TRUTH_NOT_EXPR:
213 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
214 break;
215
216 case COND_EXPR:
217 {
218 rtx label1 = gen_label_rtx ();
219 if (!if_true_label || !if_false_label)
220 {
221 drop_through_label = gen_label_rtx ();
222 if (!if_true_label)
223 if_true_label = drop_through_label;
224 if (!if_false_label)
225 if_false_label = drop_through_label;
226 }
227
228 do_pending_stack_adjust ();
229 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
230 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
231 emit_label (label1);
232 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
233 break;
234 }
235
236 case COMPOUND_EXPR:
237 /* Lowered by gimplify.c. */
238 gcc_unreachable ();
239
240 case COMPONENT_REF:
241 case BIT_FIELD_REF:
242 case ARRAY_REF:
243 case ARRAY_RANGE_REF:
244 {
245 HOST_WIDE_INT bitsize, bitpos;
246 int unsignedp;
247 enum machine_mode mode;
248 tree type;
249 tree offset;
250 int volatilep = 0;
251
252 /* Get description of this reference. We don't actually care
253 about the underlying object here. */
254 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
255 &unsignedp, &volatilep, false);
256
257 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
258 if (! SLOW_BYTE_ACCESS
259 && type != 0 && bitsize >= 0
260 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
261 && have_insn_for (COMPARE, TYPE_MODE (type)))
262 {
263 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
264 break;
265 }
266 goto normal;
267 }
268
269 case EQ_EXPR:
270 {
271 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
272
273 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
274 != MODE_COMPLEX_FLOAT);
275 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
276 != MODE_COMPLEX_INT);
277
278 if (integer_zerop (TREE_OPERAND (exp, 1)))
279 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
280 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
281 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
282 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
283 else
284 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
285 break;
286 }
287
288 case MINUS_EXPR:
289 /* Nonzero iff operands of minus differ. */
290 exp = build2 (NE_EXPR, TREE_TYPE (exp),
291 TREE_OPERAND (exp, 0),
292 TREE_OPERAND (exp, 1));
293 /* FALLTHRU */
294 case NE_EXPR:
295 {
296 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
297
298 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
299 != MODE_COMPLEX_FLOAT);
300 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
301 != MODE_COMPLEX_INT);
302
303 if (integer_zerop (TREE_OPERAND (exp, 1)))
304 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
305 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
306 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
307 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
308 else
309 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
310 break;
311 }
312
313 case LT_EXPR:
314 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
315 if (GET_MODE_CLASS (mode) == MODE_INT
316 && ! can_compare_p (LT, mode, ccp_jump))
317 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
318 else
319 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
320 break;
321
322 case LE_EXPR:
323 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
324 if (GET_MODE_CLASS (mode) == MODE_INT
325 && ! can_compare_p (LE, mode, ccp_jump))
326 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
327 else
328 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
329 break;
330
331 case GT_EXPR:
332 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
333 if (GET_MODE_CLASS (mode) == MODE_INT
334 && ! can_compare_p (GT, mode, ccp_jump))
335 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
336 else
337 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
338 break;
339
340 case GE_EXPR:
341 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
342 if (GET_MODE_CLASS (mode) == MODE_INT
343 && ! can_compare_p (GE, mode, ccp_jump))
344 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
345 else
346 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
347 break;
348
349 case UNORDERED_EXPR:
350 case ORDERED_EXPR:
351 {
352 enum rtx_code cmp, rcmp;
353 int do_rev;
354
355 if (code == UNORDERED_EXPR)
356 cmp = UNORDERED, rcmp = ORDERED;
357 else
358 cmp = ORDERED, rcmp = UNORDERED;
359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
360
361 do_rev = 0;
362 if (! can_compare_p (cmp, mode, ccp_jump)
363 && (can_compare_p (rcmp, mode, ccp_jump)
364 /* If the target doesn't provide either UNORDERED or ORDERED
365 comparisons, canonicalize on UNORDERED for the library. */
366 || rcmp == UNORDERED))
367 do_rev = 1;
368
369 if (! do_rev)
370 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
371 else
372 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
373 }
374 break;
375
376 {
377 enum rtx_code rcode1;
378 enum tree_code tcode1, tcode2;
379
380 case UNLT_EXPR:
381 rcode1 = UNLT;
382 tcode1 = UNORDERED_EXPR;
383 tcode2 = LT_EXPR;
384 goto unordered_bcc;
385 case UNLE_EXPR:
386 rcode1 = UNLE;
387 tcode1 = UNORDERED_EXPR;
388 tcode2 = LE_EXPR;
389 goto unordered_bcc;
390 case UNGT_EXPR:
391 rcode1 = UNGT;
392 tcode1 = UNORDERED_EXPR;
393 tcode2 = GT_EXPR;
394 goto unordered_bcc;
395 case UNGE_EXPR:
396 rcode1 = UNGE;
397 tcode1 = UNORDERED_EXPR;
398 tcode2 = GE_EXPR;
399 goto unordered_bcc;
400 case UNEQ_EXPR:
401 rcode1 = UNEQ;
402 tcode1 = UNORDERED_EXPR;
403 tcode2 = EQ_EXPR;
404 goto unordered_bcc;
405 case LTGT_EXPR:
406 /* It is ok for LTGT_EXPR to trap when the result is unordered,
407 so expand to (a < b) || (a > b). */
408 rcode1 = LTGT;
409 tcode1 = LT_EXPR;
410 tcode2 = GT_EXPR;
411 goto unordered_bcc;
412
413 unordered_bcc:
414 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
415 if (can_compare_p (rcode1, mode, ccp_jump))
416 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
417 if_true_label);
418 else
419 {
420 tree op0 = save_expr (TREE_OPERAND (exp, 0));
421 tree op1 = save_expr (TREE_OPERAND (exp, 1));
422 tree cmp0, cmp1;
423
424 /* If the target doesn't support combined unordered
425 compares, decompose into two comparisons. */
426 if (if_true_label == 0)
427 drop_through_label = if_true_label = gen_label_rtx ();
428
429 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
430 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
431 do_jump (cmp0, 0, if_true_label);
432 do_jump (cmp1, if_false_label, if_true_label);
433 }
434 break;
435 }
436
437 case BIT_AND_EXPR:
438 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
439 See if the former is preferred for jump tests and restore it
440 if so. */
441 if (integer_onep (TREE_OPERAND (exp, 1)))
442 {
443 tree exp0 = TREE_OPERAND (exp, 0);
444 rtx set_label, clr_label;
445
446 /* Strip narrowing integral type conversions. */
447 while (CONVERT_EXPR_P (exp0)
448 && TREE_OPERAND (exp0, 0) != error_mark_node
449 && TYPE_PRECISION (TREE_TYPE (exp0))
450 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
451 exp0 = TREE_OPERAND (exp0, 0);
452
453 /* "exp0 ^ 1" inverts the sense of the single bit test. */
454 if (TREE_CODE (exp0) == BIT_XOR_EXPR
455 && integer_onep (TREE_OPERAND (exp0, 1)))
456 {
457 exp0 = TREE_OPERAND (exp0, 0);
458 clr_label = if_true_label;
459 set_label = if_false_label;
460 }
461 else
462 {
463 clr_label = if_false_label;
464 set_label = if_true_label;
465 }
466
467 if (TREE_CODE (exp0) == RSHIFT_EXPR)
468 {
469 tree arg = TREE_OPERAND (exp0, 0);
470 tree shift = TREE_OPERAND (exp0, 1);
471 tree argtype = TREE_TYPE (arg);
472 if (TREE_CODE (shift) == INTEGER_CST
473 && compare_tree_int (shift, 0) >= 0
474 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
475 && prefer_and_bit_test (TYPE_MODE (argtype),
476 TREE_INT_CST_LOW (shift)))
477 {
478 unsigned HOST_WIDE_INT mask
479 = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
480 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
481 build_int_cst_wide_type (argtype, mask, 0)),
482 clr_label, set_label);
483 break;
484 }
485 }
486 }
487
488 /* If we are AND'ing with a small constant, do this comparison in the
489 smallest type that fits. If the machine doesn't have comparisons
490 that small, it will be converted back to the wider comparison.
491 This helps if we are testing the sign bit of a narrower object.
492 combine can't do this for us because it can't know whether a
493 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
494
495 if (! SLOW_BYTE_ACCESS
496 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
497 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
498 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
499 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
500 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
501 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
502 && have_insn_for (COMPARE, TYPE_MODE (type)))
503 {
504 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
505 break;
506 }
507
508 if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
509 || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
510 goto normal;
511
512 /* Boolean comparisons can be compiled as TRUTH_AND_EXPR. */
513
514 case TRUTH_AND_EXPR:
515 /* High branch cost, expand as the bitwise AND of the conditions.
516 Do the same if the RHS has side effects, because we're effectively
517 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
518 if (BRANCH_COST (optimize_insn_for_speed_p (),
519 false) >= 4
520 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
521 goto normal;
522
523 case TRUTH_ANDIF_EXPR:
524 if (if_false_label == NULL_RTX)
525 {
526 drop_through_label = gen_label_rtx ();
527 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
528 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
529 }
530 else
531 {
532 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
533 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
534 }
535 break;
536
537 case BIT_IOR_EXPR:
538 case TRUTH_OR_EXPR:
539 /* High branch cost, expand as the bitwise OR of the conditions.
540 Do the same if the RHS has side effects, because we're effectively
541 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
542 if (BRANCH_COST (optimize_insn_for_speed_p (), false)>= 4
543 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
544 goto normal;
545
546 case TRUTH_ORIF_EXPR:
547 if (if_true_label == NULL_RTX)
548 {
549 drop_through_label = gen_label_rtx ();
550 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
551 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
552 }
553 else
554 {
555 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
556 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
557 }
558 break;
559
560 /* Fall through and generate the normal code. */
561 default:
562 normal:
563 temp = expand_normal (exp);
564 do_pending_stack_adjust ();
565 /* The RTL optimizers prefer comparisons against pseudos. */
566 if (GET_CODE (temp) == SUBREG)
567 {
568 /* Compare promoted variables in their promoted mode. */
569 if (SUBREG_PROMOTED_VAR_P (temp)
570 && REG_P (XEXP (temp, 0)))
571 temp = XEXP (temp, 0);
572 else
573 temp = copy_to_reg (temp);
574 }
575 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
576 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
577 GET_MODE (temp), NULL_RTX,
578 if_false_label, if_true_label);
579 }
580
581 if (drop_through_label)
582 {
583 do_pending_stack_adjust ();
584 emit_label (drop_through_label);
585 }
586 }
587 \f
588 /* Compare OP0 with OP1, word at a time, in mode MODE.
589 UNSIGNEDP says to do unsigned comparison.
590 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
591
592 static void
593 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
594 rtx op1, rtx if_false_label, rtx if_true_label)
595 {
596 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
597 rtx drop_through_label = 0;
598 int i;
599
600 if (! if_true_label || ! if_false_label)
601 drop_through_label = gen_label_rtx ();
602 if (! if_true_label)
603 if_true_label = drop_through_label;
604 if (! if_false_label)
605 if_false_label = drop_through_label;
606
607 /* Compare a word at a time, high order first. */
608 for (i = 0; i < nwords; i++)
609 {
610 rtx op0_word, op1_word;
611
612 if (WORDS_BIG_ENDIAN)
613 {
614 op0_word = operand_subword_force (op0, i, mode);
615 op1_word = operand_subword_force (op1, i, mode);
616 }
617 else
618 {
619 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
620 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
621 }
622
623 /* All but high-order word must be compared as unsigned. */
624 do_compare_rtx_and_jump (op0_word, op1_word, GT,
625 (unsignedp || i > 0), word_mode, NULL_RTX,
626 NULL_RTX, if_true_label);
627
628 /* Consider lower words only if these are equal. */
629 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
630 NULL_RTX, NULL_RTX, if_false_label);
631 }
632
633 if (if_false_label)
634 emit_jump (if_false_label);
635 if (drop_through_label)
636 emit_label (drop_through_label);
637 }
638
639 /* Given a comparison expression EXP for values too wide to be compared
640 with one insn, test the comparison and jump to the appropriate label.
641 The code of EXP is ignored; we always test GT if SWAP is 0,
642 and LT if SWAP is 1. */
643
644 static void
645 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
646 rtx if_true_label)
647 {
648 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
649 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
650 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
651 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
652
653 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
654 if_true_label);
655 }
656 \f
657 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
658 mode, MODE, that is too wide for the available compare insns. Either
659 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
660 to indicate drop through. */
661
662 static void
663 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
664 rtx if_false_label, rtx if_true_label)
665 {
666 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
667 rtx part;
668 int i;
669 rtx drop_through_label = 0;
670
671 /* The fastest way of doing this comparison on almost any machine is to
672 "or" all the words and compare the result. If all have to be loaded
673 from memory and this is a very wide item, it's possible this may
674 be slower, but that's highly unlikely. */
675
676 part = gen_reg_rtx (word_mode);
677 emit_move_insn (part, operand_subword_force (op0, 0, mode));
678 for (i = 1; i < nwords && part != 0; i++)
679 part = expand_binop (word_mode, ior_optab, part,
680 operand_subword_force (op0, i, mode),
681 part, 1, OPTAB_WIDEN);
682
683 if (part != 0)
684 {
685 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
686 NULL_RTX, if_false_label, if_true_label);
687
688 return;
689 }
690
691 /* If we couldn't do the "or" simply, do this with a series of compares. */
692 if (! if_false_label)
693 drop_through_label = if_false_label = gen_label_rtx ();
694
695 for (i = 0; i < nwords; i++)
696 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
697 const0_rtx, EQ, 1, word_mode, NULL_RTX,
698 if_false_label, NULL_RTX);
699
700 if (if_true_label)
701 emit_jump (if_true_label);
702
703 if (drop_through_label)
704 emit_label (drop_through_label);
705 }
706
707 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
708 where MODE is an integer mode too wide to be compared with one insn.
709 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
710 to indicate drop through. */
711
712 static void
713 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
714 rtx if_false_label, rtx if_true_label)
715 {
716 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
717 rtx drop_through_label = 0;
718 int i;
719
720 if (op1 == const0_rtx)
721 {
722 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
723 return;
724 }
725 else if (op0 == const0_rtx)
726 {
727 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
728 return;
729 }
730
731 if (! if_false_label)
732 drop_through_label = if_false_label = gen_label_rtx ();
733
734 for (i = 0; i < nwords; i++)
735 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
736 operand_subword_force (op1, i, mode),
737 EQ, 0, word_mode, NULL_RTX,
738 if_false_label, NULL_RTX);
739
740 if (if_true_label)
741 emit_jump (if_true_label);
742 if (drop_through_label)
743 emit_label (drop_through_label);
744 }
745
746 /* Given an EQ_EXPR expression EXP for values too wide to be compared
747 with one insn, test the comparison and jump to the appropriate label. */
748
749 static void
750 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
751 {
752 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
753 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
754 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
755 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
756 if_true_label);
757 }
758 \f
759 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
760 The decision as to signed or unsigned comparison must be made by the caller.
761
762 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
763 compared. */
764
765 void
766 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
767 enum machine_mode mode, rtx size, rtx if_false_label,
768 rtx if_true_label)
769 {
770 rtx tem;
771 int dummy_true_label = 0;
772
773 /* Reverse the comparison if that is safe and we want to jump if it is
774 false. */
775 if (! if_true_label && ! FLOAT_MODE_P (mode))
776 {
777 if_true_label = if_false_label;
778 if_false_label = 0;
779 code = reverse_condition (code);
780 }
781
782 /* If one operand is constant, make it the second one. Only do this
783 if the other operand is not constant as well. */
784
785 if (swap_commutative_operands_p (op0, op1))
786 {
787 tem = op0;
788 op0 = op1;
789 op1 = tem;
790 code = swap_condition (code);
791 }
792
793 do_pending_stack_adjust ();
794
795 code = unsignedp ? unsigned_condition (code) : code;
796 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
797 op0, op1)))
798 {
799 if (CONSTANT_P (tem))
800 {
801 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
802 ? if_false_label : if_true_label;
803 if (label)
804 emit_jump (label);
805 return;
806 }
807
808 code = GET_CODE (tem);
809 mode = GET_MODE (tem);
810 op0 = XEXP (tem, 0);
811 op1 = XEXP (tem, 1);
812 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
813 }
814
815
816 if (! if_true_label)
817 {
818 dummy_true_label = 1;
819 if_true_label = gen_label_rtx ();
820 }
821
822 if (GET_MODE_CLASS (mode) == MODE_INT
823 && ! can_compare_p (code, mode, ccp_jump))
824 {
825 switch (code)
826 {
827 case LTU:
828 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
829 if_false_label, if_true_label);
830 break;
831
832 case LEU:
833 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
834 if_true_label, if_false_label);
835 break;
836
837 case GTU:
838 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
839 if_false_label, if_true_label);
840 break;
841
842 case GEU:
843 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
844 if_true_label, if_false_label);
845 break;
846
847 case LT:
848 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
849 if_false_label, if_true_label);
850 break;
851
852 case LE:
853 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
854 if_true_label, if_false_label);
855 break;
856
857 case GT:
858 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
859 if_false_label, if_true_label);
860 break;
861
862 case GE:
863 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
864 if_true_label, if_false_label);
865 break;
866
867 case EQ:
868 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
869 if_true_label);
870 break;
871
872 case NE:
873 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
874 if_false_label);
875 break;
876
877 default:
878 gcc_unreachable ();
879 }
880 }
881 else
882 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
883 if_true_label);
884
885 if (if_false_label)
886 emit_jump (if_false_label);
887 if (dummy_true_label)
888 emit_label (if_true_label);
889 }
890
891 /* Generate code for a comparison expression EXP (including code to compute
892 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
893 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
894 generated code will drop through.
895 SIGNED_CODE should be the rtx operation for this comparison for
896 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
897
898 We force a stack adjustment unless there are currently
899 things pushed on the stack that aren't yet used. */
900
901 static void
902 do_compare_and_jump (tree exp, enum rtx_code signed_code,
903 enum rtx_code unsigned_code, rtx if_false_label,
904 rtx if_true_label)
905 {
906 rtx op0, op1;
907 tree type;
908 enum machine_mode mode;
909 int unsignedp;
910 enum rtx_code code;
911
912 /* Don't crash if the comparison was erroneous. */
913 op0 = expand_normal (TREE_OPERAND (exp, 0));
914 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
915 return;
916
917 op1 = expand_normal (TREE_OPERAND (exp, 1));
918 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
919 return;
920
921 type = TREE_TYPE (TREE_OPERAND (exp, 0));
922 mode = TYPE_MODE (type);
923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
924 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
925 || (GET_MODE_BITSIZE (mode)
926 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
927 1)))))))
928 {
929 /* op0 might have been replaced by promoted constant, in which
930 case the type of second argument should be used. */
931 type = TREE_TYPE (TREE_OPERAND (exp, 1));
932 mode = TYPE_MODE (type);
933 }
934 unsignedp = TYPE_UNSIGNED (type);
935 code = unsignedp ? unsigned_code : signed_code;
936
937 #ifdef HAVE_canonicalize_funcptr_for_compare
938 /* If function pointers need to be "canonicalized" before they can
939 be reliably compared, then canonicalize them.
940 Only do this if *both* sides of the comparison are function pointers.
941 If one side isn't, we want a noncanonicalized comparison. See PR
942 middle-end/17564. */
943 if (HAVE_canonicalize_funcptr_for_compare
944 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
945 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
946 == FUNCTION_TYPE
947 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
948 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
949 == FUNCTION_TYPE)
950 {
951 rtx new_op0 = gen_reg_rtx (mode);
952 rtx new_op1 = gen_reg_rtx (mode);
953
954 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
955 op0 = new_op0;
956
957 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
958 op1 = new_op1;
959 }
960 #endif
961
962 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
963 ((mode == BLKmode)
964 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
965 if_false_label, if_true_label);
966 }
967
968 #include "gt-dojump.h"