re PR middle-end/37014 (internal compiler error: in expand_expr_real_1, at expr.c...
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
43
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47 void
48 init_pending_stack_adjust (void)
49 {
50 pending_stack_adjust = 0;
51 }
52
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void
57 discard_pending_stack_adjust (void)
58 {
59 stack_pointer_delta -= pending_stack_adjust;
60 pending_stack_adjust = 0;
61 }
62
63 /* When exiting from function, if safe, clear out any pending stack adjust
64 so the adjustment won't get done.
65
66 Note, if the current function calls alloca, then it must have a
67 frame pointer regardless of the value of flag_omit_frame_pointer. */
68
69 void
70 clear_pending_stack_adjust (void)
71 {
72 if (optimize > 0
73 && (! flag_omit_frame_pointer || cfun->calls_alloca)
74 && EXIT_IGNORE_STACK)
75 discard_pending_stack_adjust ();
76 }
77
78 /* Pop any previously-pushed arguments that have not been popped yet. */
79
80 void
81 do_pending_stack_adjust (void)
82 {
83 if (inhibit_defer_pop == 0)
84 {
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
88 }
89 }
90 \f
91 /* Expand conditional expressions. */
92
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
96
97 void
98 jumpifnot (tree exp, rtx label)
99 {
100 do_jump (exp, label, NULL_RTX);
101 }
102
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
104
105 void
106 jumpif (tree exp, rtx label)
107 {
108 do_jump (exp, NULL_RTX, label);
109 }
110
111 /* Used internally by prefer_and_bit_test. */
112
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
116
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
120
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
123 {
124 if (and_test == 0)
125 {
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
132 }
133 else
134 {
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
140 }
141
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
145
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
148 }
149
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
154
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
158
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
161 {
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
167 rtx drop_through_label = 0;
168
169 switch (code)
170 {
171 case ERROR_MARK:
172 break;
173
174 case INTEGER_CST:
175 temp = integer_zerop (exp) ? if_false_label : if_true_label;
176 if (temp)
177 emit_jump (temp);
178 break;
179
180 #if 0
181 /* This is not true with #pragma weak */
182 case ADDR_EXPR:
183 /* The address of something can never be zero. */
184 if (if_true_label)
185 emit_jump (if_true_label);
186 break;
187 #endif
188
189 case NOP_EXPR:
190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
194 goto normal;
195 case CONVERT_EXPR:
196 /* If we are narrowing the operand, we have to do the compare in the
197 narrower mode. */
198 if ((TYPE_PRECISION (TREE_TYPE (exp))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
200 goto normal;
201 case NON_LVALUE_EXPR:
202 case ABS_EXPR:
203 case NEGATE_EXPR:
204 case LROTATE_EXPR:
205 case RROTATE_EXPR:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
208 break;
209
210 case BIT_AND_EXPR:
211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
212 See if the former is preferred for jump tests and restore it
213 if so. */
214 if (integer_onep (TREE_OPERAND (exp, 1)))
215 {
216 tree exp0 = TREE_OPERAND (exp, 0);
217 rtx set_label, clr_label;
218
219 /* Strip narrowing integral type conversions. */
220 while (CONVERT_EXPR_P (exp0)
221 && TREE_OPERAND (exp0, 0) != error_mark_node
222 && TYPE_PRECISION (TREE_TYPE (exp0))
223 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
224 exp0 = TREE_OPERAND (exp0, 0);
225
226 /* "exp0 ^ 1" inverts the sense of the single bit test. */
227 if (TREE_CODE (exp0) == BIT_XOR_EXPR
228 && integer_onep (TREE_OPERAND (exp0, 1)))
229 {
230 exp0 = TREE_OPERAND (exp0, 0);
231 clr_label = if_true_label;
232 set_label = if_false_label;
233 }
234 else
235 {
236 clr_label = if_false_label;
237 set_label = if_true_label;
238 }
239
240 if (TREE_CODE (exp0) == RSHIFT_EXPR)
241 {
242 tree arg = TREE_OPERAND (exp0, 0);
243 tree shift = TREE_OPERAND (exp0, 1);
244 tree argtype = TREE_TYPE (arg);
245 if (TREE_CODE (shift) == INTEGER_CST
246 && compare_tree_int (shift, 0) >= 0
247 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
248 && prefer_and_bit_test (TYPE_MODE (argtype),
249 TREE_INT_CST_LOW (shift)))
250 {
251 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
252 << TREE_INT_CST_LOW (shift);
253 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
254 build_int_cst_type (argtype, mask)),
255 clr_label, set_label);
256 break;
257 }
258 }
259 }
260
261 /* If we are AND'ing with a small constant, do this comparison in the
262 smallest type that fits. If the machine doesn't have comparisons
263 that small, it will be converted back to the wider comparison.
264 This helps if we are testing the sign bit of a narrower object.
265 combine can't do this for us because it can't know whether a
266 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
267
268 if (! SLOW_BYTE_ACCESS
269 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
270 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
271 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
272 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
273 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
274 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
275 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
276 != CODE_FOR_nothing))
277 {
278 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
279 break;
280 }
281 goto normal;
282
283 case TRUTH_NOT_EXPR:
284 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
285 break;
286
287 case COND_EXPR:
288 {
289 rtx label1 = gen_label_rtx ();
290 if (!if_true_label || !if_false_label)
291 {
292 drop_through_label = gen_label_rtx ();
293 if (!if_true_label)
294 if_true_label = drop_through_label;
295 if (!if_false_label)
296 if_false_label = drop_through_label;
297 }
298
299 do_pending_stack_adjust ();
300 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
301 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
302 emit_label (label1);
303 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
304 break;
305 }
306
307 case COMPOUND_EXPR:
308 /* Lowered by gimplify.c. */
309 gcc_unreachable ();
310
311 case COMPONENT_REF:
312 case BIT_FIELD_REF:
313 case ARRAY_REF:
314 case ARRAY_RANGE_REF:
315 {
316 HOST_WIDE_INT bitsize, bitpos;
317 int unsignedp;
318 enum machine_mode mode;
319 tree type;
320 tree offset;
321 int volatilep = 0;
322
323 /* Get description of this reference. We don't actually care
324 about the underlying object here. */
325 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
326 &unsignedp, &volatilep, false);
327
328 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
329 if (! SLOW_BYTE_ACCESS
330 && type != 0 && bitsize >= 0
331 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
332 && (optab_handler (cmp_optab, TYPE_MODE (type))->insn_code
333 != CODE_FOR_nothing))
334 {
335 do_jump (fold_convert (type, exp), if_false_label, if_true_label);
336 break;
337 }
338 goto normal;
339 }
340
341 case EQ_EXPR:
342 {
343 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
344
345 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
346 != MODE_COMPLEX_FLOAT);
347 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
348 != MODE_COMPLEX_INT);
349
350 if (integer_zerop (TREE_OPERAND (exp, 1)))
351 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
352 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
353 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
354 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
355 else
356 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
357 break;
358 }
359
360 case MINUS_EXPR:
361 /* Nonzero iff operands of minus differ. */
362 exp = build2 (NE_EXPR, TREE_TYPE (exp),
363 TREE_OPERAND (exp, 0),
364 TREE_OPERAND (exp, 1));
365 /* FALLTHRU */
366 case NE_EXPR:
367 {
368 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
369
370 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
371 != MODE_COMPLEX_FLOAT);
372 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
373 != MODE_COMPLEX_INT);
374
375 if (integer_zerop (TREE_OPERAND (exp, 1)))
376 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
377 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
378 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
379 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
380 else
381 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
382 break;
383 }
384
385 case LT_EXPR:
386 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
387 if (GET_MODE_CLASS (mode) == MODE_INT
388 && ! can_compare_p (LT, mode, ccp_jump))
389 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
390 else
391 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
392 break;
393
394 case LE_EXPR:
395 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
396 if (GET_MODE_CLASS (mode) == MODE_INT
397 && ! can_compare_p (LE, mode, ccp_jump))
398 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
399 else
400 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
401 break;
402
403 case GT_EXPR:
404 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
405 if (GET_MODE_CLASS (mode) == MODE_INT
406 && ! can_compare_p (GT, mode, ccp_jump))
407 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
408 else
409 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
410 break;
411
412 case GE_EXPR:
413 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
414 if (GET_MODE_CLASS (mode) == MODE_INT
415 && ! can_compare_p (GE, mode, ccp_jump))
416 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
417 else
418 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
419 break;
420
421 case UNORDERED_EXPR:
422 case ORDERED_EXPR:
423 {
424 enum rtx_code cmp, rcmp;
425 int do_rev;
426
427 if (code == UNORDERED_EXPR)
428 cmp = UNORDERED, rcmp = ORDERED;
429 else
430 cmp = ORDERED, rcmp = UNORDERED;
431 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
432
433 do_rev = 0;
434 if (! can_compare_p (cmp, mode, ccp_jump)
435 && (can_compare_p (rcmp, mode, ccp_jump)
436 /* If the target doesn't provide either UNORDERED or ORDERED
437 comparisons, canonicalize on UNORDERED for the library. */
438 || rcmp == UNORDERED))
439 do_rev = 1;
440
441 if (! do_rev)
442 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
443 else
444 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
445 }
446 break;
447
448 {
449 enum rtx_code rcode1;
450 enum tree_code tcode1, tcode2;
451
452 case UNLT_EXPR:
453 rcode1 = UNLT;
454 tcode1 = UNORDERED_EXPR;
455 tcode2 = LT_EXPR;
456 goto unordered_bcc;
457 case UNLE_EXPR:
458 rcode1 = UNLE;
459 tcode1 = UNORDERED_EXPR;
460 tcode2 = LE_EXPR;
461 goto unordered_bcc;
462 case UNGT_EXPR:
463 rcode1 = UNGT;
464 tcode1 = UNORDERED_EXPR;
465 tcode2 = GT_EXPR;
466 goto unordered_bcc;
467 case UNGE_EXPR:
468 rcode1 = UNGE;
469 tcode1 = UNORDERED_EXPR;
470 tcode2 = GE_EXPR;
471 goto unordered_bcc;
472 case UNEQ_EXPR:
473 rcode1 = UNEQ;
474 tcode1 = UNORDERED_EXPR;
475 tcode2 = EQ_EXPR;
476 goto unordered_bcc;
477 case LTGT_EXPR:
478 /* It is ok for LTGT_EXPR to trap when the result is unordered,
479 so expand to (a < b) || (a > b). */
480 rcode1 = LTGT;
481 tcode1 = LT_EXPR;
482 tcode2 = GT_EXPR;
483 goto unordered_bcc;
484
485 unordered_bcc:
486 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
487 if (can_compare_p (rcode1, mode, ccp_jump))
488 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
489 if_true_label);
490 else
491 {
492 tree op0 = save_expr (TREE_OPERAND (exp, 0));
493 tree op1 = save_expr (TREE_OPERAND (exp, 1));
494 tree cmp0, cmp1;
495
496 /* If the target doesn't support combined unordered
497 compares, decompose into two comparisons. */
498 if (if_true_label == 0)
499 drop_through_label = if_true_label = gen_label_rtx ();
500
501 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
502 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
503 do_jump (cmp0, 0, if_true_label);
504 do_jump (cmp1, if_false_label, if_true_label);
505 }
506 }
507 break;
508
509 case TRUTH_AND_EXPR:
510 /* High branch cost, expand as the bitwise AND of the conditions.
511 Do the same if the RHS has side effects, because we're effectively
512 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
513 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
514 goto normal;
515
516 case TRUTH_ANDIF_EXPR:
517 if (if_false_label == NULL_RTX)
518 {
519 drop_through_label = gen_label_rtx ();
520 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
521 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
522 }
523 else
524 {
525 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
526 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
527 }
528 break;
529
530 case TRUTH_OR_EXPR:
531 /* High branch cost, expand as the bitwise OR of the conditions.
532 Do the same if the RHS has side effects, because we're effectively
533 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
534 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
535 goto normal;
536
537 case TRUTH_ORIF_EXPR:
538 if (if_true_label == NULL_RTX)
539 {
540 drop_through_label = gen_label_rtx ();
541 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
542 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
543 }
544 else
545 {
546 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
547 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
548 }
549 break;
550
551 /* Fall through and generate the normal code. */
552 default:
553 normal:
554 temp = expand_normal (exp);
555 do_pending_stack_adjust ();
556 /* The RTL optimizers prefer comparisons against pseudos. */
557 if (GET_CODE (temp) == SUBREG)
558 {
559 /* Compare promoted variables in their promoted mode. */
560 if (SUBREG_PROMOTED_VAR_P (temp)
561 && REG_P (XEXP (temp, 0)))
562 temp = XEXP (temp, 0);
563 else
564 temp = copy_to_reg (temp);
565 }
566 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
567 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
568 GET_MODE (temp), NULL_RTX,
569 if_false_label, if_true_label);
570 }
571
572 if (drop_through_label)
573 {
574 do_pending_stack_adjust ();
575 emit_label (drop_through_label);
576 }
577 }
578 \f
579 /* Compare OP0 with OP1, word at a time, in mode MODE.
580 UNSIGNEDP says to do unsigned comparison.
581 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
582
583 static void
584 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
585 rtx op1, rtx if_false_label, rtx if_true_label)
586 {
587 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
588 rtx drop_through_label = 0;
589 int i;
590
591 if (! if_true_label || ! if_false_label)
592 drop_through_label = gen_label_rtx ();
593 if (! if_true_label)
594 if_true_label = drop_through_label;
595 if (! if_false_label)
596 if_false_label = drop_through_label;
597
598 /* Compare a word at a time, high order first. */
599 for (i = 0; i < nwords; i++)
600 {
601 rtx op0_word, op1_word;
602
603 if (WORDS_BIG_ENDIAN)
604 {
605 op0_word = operand_subword_force (op0, i, mode);
606 op1_word = operand_subword_force (op1, i, mode);
607 }
608 else
609 {
610 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
611 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
612 }
613
614 /* All but high-order word must be compared as unsigned. */
615 do_compare_rtx_and_jump (op0_word, op1_word, GT,
616 (unsignedp || i > 0), word_mode, NULL_RTX,
617 NULL_RTX, if_true_label);
618
619 /* Consider lower words only if these are equal. */
620 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
621 NULL_RTX, NULL_RTX, if_false_label);
622 }
623
624 if (if_false_label)
625 emit_jump (if_false_label);
626 if (drop_through_label)
627 emit_label (drop_through_label);
628 }
629
630 /* Given a comparison expression EXP for values too wide to be compared
631 with one insn, test the comparison and jump to the appropriate label.
632 The code of EXP is ignored; we always test GT if SWAP is 0,
633 and LT if SWAP is 1. */
634
635 static void
636 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
637 rtx if_true_label)
638 {
639 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
640 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
642 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
643
644 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
645 if_true_label);
646 }
647 \f
648 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
649 mode, MODE, that is too wide for the available compare insns. Either
650 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
651 to indicate drop through. */
652
653 static void
654 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
655 rtx if_false_label, rtx if_true_label)
656 {
657 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
658 rtx part;
659 int i;
660 rtx drop_through_label = 0;
661
662 /* The fastest way of doing this comparison on almost any machine is to
663 "or" all the words and compare the result. If all have to be loaded
664 from memory and this is a very wide item, it's possible this may
665 be slower, but that's highly unlikely. */
666
667 part = gen_reg_rtx (word_mode);
668 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
669 for (i = 1; i < nwords && part != 0; i++)
670 part = expand_binop (word_mode, ior_optab, part,
671 operand_subword_force (op0, i, GET_MODE (op0)),
672 part, 1, OPTAB_WIDEN);
673
674 if (part != 0)
675 {
676 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
677 NULL_RTX, if_false_label, if_true_label);
678
679 return;
680 }
681
682 /* If we couldn't do the "or" simply, do this with a series of compares. */
683 if (! if_false_label)
684 drop_through_label = if_false_label = gen_label_rtx ();
685
686 for (i = 0; i < nwords; i++)
687 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
688 const0_rtx, EQ, 1, word_mode, NULL_RTX,
689 if_false_label, NULL_RTX);
690
691 if (if_true_label)
692 emit_jump (if_true_label);
693
694 if (drop_through_label)
695 emit_label (drop_through_label);
696 }
697
698 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
699 where MODE is an integer mode too wide to be compared with one insn.
700 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
701 to indicate drop through. */
702
703 static void
704 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
705 rtx if_false_label, rtx if_true_label)
706 {
707 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
708 rtx drop_through_label = 0;
709 int i;
710
711 if (op1 == const0_rtx)
712 {
713 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
714 return;
715 }
716 else if (op0 == const0_rtx)
717 {
718 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
719 return;
720 }
721
722 if (! if_false_label)
723 drop_through_label = if_false_label = gen_label_rtx ();
724
725 for (i = 0; i < nwords; i++)
726 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
727 operand_subword_force (op1, i, mode),
728 EQ, 0, word_mode, NULL_RTX,
729 if_false_label, NULL_RTX);
730
731 if (if_true_label)
732 emit_jump (if_true_label);
733 if (drop_through_label)
734 emit_label (drop_through_label);
735 }
736
737 /* Given an EQ_EXPR expression EXP for values too wide to be compared
738 with one insn, test the comparison and jump to the appropriate label. */
739
740 static void
741 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
742 {
743 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
744 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
745 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
746 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
747 if_true_label);
748 }
749 \f
750 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
751 MODE is the machine mode of the comparison, not of the result.
752 (including code to compute the values to be compared) and set CC0
753 according to the result. The decision as to signed or unsigned
754 comparison must be made by the caller.
755
756 We force a stack adjustment unless there are currently
757 things pushed on the stack that aren't yet used.
758
759 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
760 compared. */
761
762 rtx
763 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
764 enum machine_mode mode, rtx size)
765 {
766 rtx tem;
767
768 /* If one operand is constant, make it the second one. Only do this
769 if the other operand is not constant as well. */
770
771 if (swap_commutative_operands_p (op0, op1))
772 {
773 tem = op0;
774 op0 = op1;
775 op1 = tem;
776 code = swap_condition (code);
777 }
778
779 do_pending_stack_adjust ();
780
781 code = unsignedp ? unsigned_condition (code) : code;
782 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
783 if (tem)
784 {
785 if (CONSTANT_P (tem))
786 return tem;
787
788 if (COMPARISON_P (tem))
789 {
790 code = GET_CODE (tem);
791 op0 = XEXP (tem, 0);
792 op1 = XEXP (tem, 1);
793 mode = GET_MODE (op0);
794 unsignedp = (code == GTU || code == LTU
795 || code == GEU || code == LEU);
796 }
797 }
798
799 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
800
801 #if HAVE_cc0
802 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
803 #else
804 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
805 #endif
806 }
807
808 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
809 The decision as to signed or unsigned comparison must be made by the caller.
810
811 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
812 compared. */
813
814 void
815 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
816 enum machine_mode mode, rtx size, rtx if_false_label,
817 rtx if_true_label)
818 {
819 rtx tem;
820 int dummy_true_label = 0;
821
822 /* Reverse the comparison if that is safe and we want to jump if it is
823 false. */
824 if (! if_true_label && ! FLOAT_MODE_P (mode))
825 {
826 if_true_label = if_false_label;
827 if_false_label = 0;
828 code = reverse_condition (code);
829 }
830
831 /* If one operand is constant, make it the second one. Only do this
832 if the other operand is not constant as well. */
833
834 if (swap_commutative_operands_p (op0, op1))
835 {
836 tem = op0;
837 op0 = op1;
838 op1 = tem;
839 code = swap_condition (code);
840 }
841
842 do_pending_stack_adjust ();
843
844 code = unsignedp ? unsigned_condition (code) : code;
845 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
846 op0, op1)))
847 {
848 if (CONSTANT_P (tem))
849 {
850 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
851 ? if_false_label : if_true_label;
852 if (label)
853 emit_jump (label);
854 return;
855 }
856
857 code = GET_CODE (tem);
858 mode = GET_MODE (tem);
859 op0 = XEXP (tem, 0);
860 op1 = XEXP (tem, 1);
861 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
862 }
863
864
865 if (! if_true_label)
866 {
867 dummy_true_label = 1;
868 if_true_label = gen_label_rtx ();
869 }
870
871 if (GET_MODE_CLASS (mode) == MODE_INT
872 && ! can_compare_p (code, mode, ccp_jump))
873 {
874 switch (code)
875 {
876 case LTU:
877 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
878 if_false_label, if_true_label);
879 break;
880
881 case LEU:
882 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
883 if_true_label, if_false_label);
884 break;
885
886 case GTU:
887 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
888 if_false_label, if_true_label);
889 break;
890
891 case GEU:
892 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
893 if_true_label, if_false_label);
894 break;
895
896 case LT:
897 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
898 if_false_label, if_true_label);
899 break;
900
901 case LE:
902 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
903 if_true_label, if_false_label);
904 break;
905
906 case GT:
907 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
908 if_false_label, if_true_label);
909 break;
910
911 case GE:
912 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
913 if_true_label, if_false_label);
914 break;
915
916 case EQ:
917 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
918 if_true_label);
919 break;
920
921 case NE:
922 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
923 if_false_label);
924 break;
925
926 default:
927 gcc_unreachable ();
928 }
929 }
930 else
931 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
932 if_true_label);
933
934 if (if_false_label)
935 emit_jump (if_false_label);
936 if (dummy_true_label)
937 emit_label (if_true_label);
938 }
939
940 /* Generate code for a comparison expression EXP (including code to compute
941 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
942 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
943 generated code will drop through.
944 SIGNED_CODE should be the rtx operation for this comparison for
945 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
946
947 We force a stack adjustment unless there are currently
948 things pushed on the stack that aren't yet used. */
949
950 static void
951 do_compare_and_jump (tree exp, enum rtx_code signed_code,
952 enum rtx_code unsigned_code, rtx if_false_label,
953 rtx if_true_label)
954 {
955 rtx op0, op1;
956 tree type;
957 enum machine_mode mode;
958 int unsignedp;
959 enum rtx_code code;
960
961 /* Don't crash if the comparison was erroneous. */
962 op0 = expand_normal (TREE_OPERAND (exp, 0));
963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
964 return;
965
966 op1 = expand_normal (TREE_OPERAND (exp, 1));
967 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
968 return;
969
970 type = TREE_TYPE (TREE_OPERAND (exp, 0));
971 mode = TYPE_MODE (type);
972 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
973 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
974 || (GET_MODE_BITSIZE (mode)
975 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
976 1)))))))
977 {
978 /* op0 might have been replaced by promoted constant, in which
979 case the type of second argument should be used. */
980 type = TREE_TYPE (TREE_OPERAND (exp, 1));
981 mode = TYPE_MODE (type);
982 }
983 unsignedp = TYPE_UNSIGNED (type);
984 code = unsignedp ? unsigned_code : signed_code;
985
986 #ifdef HAVE_canonicalize_funcptr_for_compare
987 /* If function pointers need to be "canonicalized" before they can
988 be reliably compared, then canonicalize them.
989 Only do this if *both* sides of the comparison are function pointers.
990 If one side isn't, we want a noncanonicalized comparison. See PR
991 middle-end/17564. */
992 if (HAVE_canonicalize_funcptr_for_compare
993 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
994 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
995 == FUNCTION_TYPE
996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
997 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
998 == FUNCTION_TYPE)
999 {
1000 rtx new_op0 = gen_reg_rtx (mode);
1001 rtx new_op1 = gen_reg_rtx (mode);
1002
1003 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1004 op0 = new_op0;
1005
1006 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1007 op1 = new_op1;
1008 }
1009 #endif
1010
1011 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1012 ((mode == BLKmode)
1013 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1014 if_false_label, if_true_label);
1015 }
1016
1017 #include "gt-dojump.h"