darwin-20040809-2.c: Don't expect default debugging to be STABS.
[gcc.git] / gcc / dojump.c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
33 #include "expr.h"
34 #include "optabs.h"
35 #include "langhooks.h"
36 #include "ggc.h"
37
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
42 rtx);
43
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
46
47 void
48 init_pending_stack_adjust (void)
49 {
50 pending_stack_adjust = 0;
51 }
52
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
57 {
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
60 }
61
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
64
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
67
68 void
69 clear_pending_stack_adjust (void)
70 {
71 if (optimize > 0
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
73 && EXIT_IGNORE_STACK
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
76 }
77
78 /* Pop any previously-pushed arguments that have not been popped yet. */
79
80 void
81 do_pending_stack_adjust (void)
82 {
83 if (inhibit_defer_pop == 0)
84 {
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
88 }
89 }
90 \f
91 /* Expand conditional expressions. */
92
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
95 functions here. */
96
97 void
98 jumpifnot (tree exp, rtx label)
99 {
100 do_jump (exp, label, NULL_RTX);
101 }
102
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
104
105 void
106 jumpif (tree exp, rtx label)
107 {
108 do_jump (exp, NULL_RTX, label);
109 }
110
111 /* Used internally by prefer_and_bit_test. */
112
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
116
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
119 is preferred. */
120
121 static bool
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
123 {
124 if (and_test == 0)
125 {
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
131 const1_rtx);
132 }
133 else
134 {
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
140 }
141
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
145
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
148 }
149
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
154
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
158
159 void
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
161 {
162 enum tree_code code = TREE_CODE (exp);
163 rtx temp;
164 int i;
165 tree type;
166 enum machine_mode mode;
167 rtx drop_through_label = 0;
168
169 switch (code)
170 {
171 case ERROR_MARK:
172 break;
173
174 case INTEGER_CST:
175 temp = integer_zerop (exp) ? if_false_label : if_true_label;
176 if (temp)
177 emit_jump (temp);
178 break;
179
180 #if 0
181 /* This is not true with #pragma weak */
182 case ADDR_EXPR:
183 /* The address of something can never be zero. */
184 if (if_true_label)
185 emit_jump (if_true_label);
186 break;
187 #endif
188
189 case NOP_EXPR:
190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
194 goto normal;
195 case CONVERT_EXPR:
196 /* If we are narrowing the operand, we have to do the compare in the
197 narrower mode. */
198 if ((TYPE_PRECISION (TREE_TYPE (exp))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
200 goto normal;
201 case NON_LVALUE_EXPR:
202 case ABS_EXPR:
203 case NEGATE_EXPR:
204 case LROTATE_EXPR:
205 case RROTATE_EXPR:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
208 break;
209
210 case BIT_AND_EXPR:
211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
212 See if the former is preferred for jump tests and restore it
213 if so. */
214 if (integer_onep (TREE_OPERAND (exp, 1)))
215 {
216 tree exp0 = TREE_OPERAND (exp, 0);
217 rtx set_label, clr_label;
218
219 /* Strip narrowing integral type conversions. */
220 while ((TREE_CODE (exp0) == NOP_EXPR
221 || TREE_CODE (exp0) == CONVERT_EXPR
222 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
223 && TREE_OPERAND (exp0, 0) != error_mark_node
224 && TYPE_PRECISION (TREE_TYPE (exp0))
225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
226 exp0 = TREE_OPERAND (exp0, 0);
227
228 /* "exp0 ^ 1" inverts the sense of the single bit test. */
229 if (TREE_CODE (exp0) == BIT_XOR_EXPR
230 && integer_onep (TREE_OPERAND (exp0, 1)))
231 {
232 exp0 = TREE_OPERAND (exp0, 0);
233 clr_label = if_true_label;
234 set_label = if_false_label;
235 }
236 else
237 {
238 clr_label = if_false_label;
239 set_label = if_true_label;
240 }
241
242 if (TREE_CODE (exp0) == RSHIFT_EXPR)
243 {
244 tree arg = TREE_OPERAND (exp0, 0);
245 tree shift = TREE_OPERAND (exp0, 1);
246 tree argtype = TREE_TYPE (arg);
247 if (TREE_CODE (shift) == INTEGER_CST
248 && compare_tree_int (shift, 0) >= 0
249 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
250 && prefer_and_bit_test (TYPE_MODE (argtype),
251 TREE_INT_CST_LOW (shift)))
252 {
253 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
254 << TREE_INT_CST_LOW (shift);
255 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
256 build_int_cst_type (argtype, mask)),
257 clr_label, set_label);
258 break;
259 }
260 }
261 }
262
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
269
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
273 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
275 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
278 != CODE_FOR_nothing))
279 {
280 do_jump (convert (type, exp), if_false_label, if_true_label);
281 break;
282 }
283 goto normal;
284
285 case TRUTH_NOT_EXPR:
286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
287 break;
288
289 case COND_EXPR:
290 {
291 rtx label1 = gen_label_rtx ();
292 if (!if_true_label || !if_false_label)
293 {
294 drop_through_label = gen_label_rtx ();
295 if (!if_true_label)
296 if_true_label = drop_through_label;
297 if (!if_false_label)
298 if_false_label = drop_through_label;
299 }
300
301 do_pending_stack_adjust ();
302 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
304 emit_label (label1);
305 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
306 break;
307 }
308
309 case TRUTH_ANDIF_EXPR:
310 case TRUTH_ORIF_EXPR:
311 case COMPOUND_EXPR:
312 /* Lowered by gimplify.c. */
313 gcc_unreachable ();
314
315 case COMPONENT_REF:
316 case BIT_FIELD_REF:
317 case ARRAY_REF:
318 case ARRAY_RANGE_REF:
319 {
320 HOST_WIDE_INT bitsize, bitpos;
321 int unsignedp;
322 enum machine_mode mode;
323 tree type;
324 tree offset;
325 int volatilep = 0;
326
327 /* Get description of this reference. We don't actually care
328 about the underlying object here. */
329 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
330 &unsignedp, &volatilep, false);
331
332 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
333 if (! SLOW_BYTE_ACCESS
334 && type != 0 && bitsize >= 0
335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
336 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
337 != CODE_FOR_nothing))
338 {
339 do_jump (convert (type, exp), if_false_label, if_true_label);
340 break;
341 }
342 goto normal;
343 }
344
345 case EQ_EXPR:
346 {
347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
348
349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
350 != MODE_COMPLEX_FLOAT);
351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
352 != MODE_COMPLEX_INT);
353
354 if (integer_zerop (TREE_OPERAND (exp, 1)))
355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
358 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
359 else
360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
361 break;
362 }
363
364 case MINUS_EXPR:
365 /* Nonzero iff operands of minus differ. */
366 exp = build2 (NE_EXPR, TREE_TYPE (exp),
367 TREE_OPERAND (exp, 0),
368 TREE_OPERAND (exp, 1));
369 /* FALLTHRU */
370 case NE_EXPR:
371 {
372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
373
374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
375 != MODE_COMPLEX_FLOAT);
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
377 != MODE_COMPLEX_INT);
378
379 if (integer_zerop (TREE_OPERAND (exp, 1)))
380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
382 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
383 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
384 else
385 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
386 break;
387 }
388
389 case LT_EXPR:
390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 if (GET_MODE_CLASS (mode) == MODE_INT
392 && ! can_compare_p (LT, mode, ccp_jump))
393 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
394 else
395 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
396 break;
397
398 case LE_EXPR:
399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
400 if (GET_MODE_CLASS (mode) == MODE_INT
401 && ! can_compare_p (LE, mode, ccp_jump))
402 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
403 else
404 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
405 break;
406
407 case GT_EXPR:
408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
409 if (GET_MODE_CLASS (mode) == MODE_INT
410 && ! can_compare_p (GT, mode, ccp_jump))
411 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
412 else
413 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
414 break;
415
416 case GE_EXPR:
417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
418 if (GET_MODE_CLASS (mode) == MODE_INT
419 && ! can_compare_p (GE, mode, ccp_jump))
420 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
421 else
422 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
423 break;
424
425 case UNORDERED_EXPR:
426 case ORDERED_EXPR:
427 {
428 enum rtx_code cmp, rcmp;
429 int do_rev;
430
431 if (code == UNORDERED_EXPR)
432 cmp = UNORDERED, rcmp = ORDERED;
433 else
434 cmp = ORDERED, rcmp = UNORDERED;
435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
436
437 do_rev = 0;
438 if (! can_compare_p (cmp, mode, ccp_jump)
439 && (can_compare_p (rcmp, mode, ccp_jump)
440 /* If the target doesn't provide either UNORDERED or ORDERED
441 comparisons, canonicalize on UNORDERED for the library. */
442 || rcmp == UNORDERED))
443 do_rev = 1;
444
445 if (! do_rev)
446 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
447 else
448 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
449 }
450 break;
451
452 {
453 enum rtx_code rcode1;
454 enum tree_code tcode1, tcode2;
455
456 case UNLT_EXPR:
457 rcode1 = UNLT;
458 tcode1 = UNORDERED_EXPR;
459 tcode2 = LT_EXPR;
460 goto unordered_bcc;
461 case UNLE_EXPR:
462 rcode1 = UNLE;
463 tcode1 = UNORDERED_EXPR;
464 tcode2 = LE_EXPR;
465 goto unordered_bcc;
466 case UNGT_EXPR:
467 rcode1 = UNGT;
468 tcode1 = UNORDERED_EXPR;
469 tcode2 = GT_EXPR;
470 goto unordered_bcc;
471 case UNGE_EXPR:
472 rcode1 = UNGE;
473 tcode1 = UNORDERED_EXPR;
474 tcode2 = GE_EXPR;
475 goto unordered_bcc;
476 case UNEQ_EXPR:
477 rcode1 = UNEQ;
478 tcode1 = UNORDERED_EXPR;
479 tcode2 = EQ_EXPR;
480 goto unordered_bcc;
481 case LTGT_EXPR:
482 /* It is ok for LTGT_EXPR to trap when the result is unordered,
483 so expand to (a < b) || (a > b). */
484 rcode1 = LTGT;
485 tcode1 = LT_EXPR;
486 tcode2 = GT_EXPR;
487 goto unordered_bcc;
488
489 unordered_bcc:
490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
491 if (can_compare_p (rcode1, mode, ccp_jump))
492 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
493 if_true_label);
494 else
495 {
496 tree op0 = save_expr (TREE_OPERAND (exp, 0));
497 tree op1 = save_expr (TREE_OPERAND (exp, 1));
498 tree cmp0, cmp1;
499
500 /* If the target doesn't support combined unordered
501 compares, decompose into two comparisons. */
502 if (if_true_label == 0)
503 drop_through_label = if_true_label = gen_label_rtx ();
504
505 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
506 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
507 do_jump (cmp0, 0, if_true_label);
508 do_jump (cmp1, if_false_label, if_true_label);
509 }
510 }
511 break;
512
513 case TRUTH_AND_EXPR:
514 /* High branch cost, expand as the bitwise AND of the conditions.
515 Do the same if the RHS has side effects, because we're effectively
516 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR. */
517 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
518 goto normal;
519
520 if (if_false_label == NULL_RTX)
521 {
522 drop_through_label = gen_label_rtx ();
523 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
524 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
525 }
526 else
527 {
528 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
529 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
530 }
531 break;
532
533 case TRUTH_OR_EXPR:
534 /* High branch cost, expand as the bitwise OR of the conditions.
535 Do the same if the RHS has side effects, because we're effectively
536 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR. */
537 if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
538 goto normal;
539
540 if (if_true_label == NULL_RTX)
541 {
542 drop_through_label = gen_label_rtx ();
543 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
544 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
545 }
546 else
547 {
548 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
549 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
550 }
551 break;
552
553 /* Special case:
554 __builtin_expect (<test>, 0) and
555 __builtin_expect (<test>, 1)
556
557 We need to do this here, so that <test> is not converted to a SCC
558 operation on machines that use condition code registers and COMPARE
559 like the PowerPC, and then the jump is done based on whether the SCC
560 operation produced a 1 or 0. */
561 case CALL_EXPR:
562 /* Check for a built-in function. */
563 {
564 tree fndecl = get_callee_fndecl (exp);
565 tree arglist = TREE_OPERAND (exp, 1);
566
567 if (fndecl
568 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
569 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
570 && arglist != NULL_TREE
571 && TREE_CHAIN (arglist) != NULL_TREE)
572 {
573 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
574 if_true_label);
575
576 if (seq != NULL_RTX)
577 {
578 emit_insn (seq);
579 return;
580 }
581 }
582 }
583
584 /* Fall through and generate the normal code. */
585 default:
586 normal:
587 temp = expand_normal (exp);
588 do_pending_stack_adjust ();
589 /* The RTL optimizers prefer comparisons against pseudos. */
590 if (GET_CODE (temp) == SUBREG)
591 {
592 /* Compare promoted variables in their promoted mode. */
593 if (SUBREG_PROMOTED_VAR_P (temp)
594 && REG_P (XEXP (temp, 0)))
595 temp = XEXP (temp, 0);
596 else
597 temp = copy_to_reg (temp);
598 }
599 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
600 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
601 GET_MODE (temp), NULL_RTX,
602 if_false_label, if_true_label);
603 }
604
605 if (drop_through_label)
606 {
607 do_pending_stack_adjust ();
608 emit_label (drop_through_label);
609 }
610 }
611 \f
612 /* Compare OP0 with OP1, word at a time, in mode MODE.
613 UNSIGNEDP says to do unsigned comparison.
614 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
615
616 static void
617 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
618 rtx op1, rtx if_false_label, rtx if_true_label)
619 {
620 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
621 rtx drop_through_label = 0;
622 int i;
623
624 if (! if_true_label || ! if_false_label)
625 drop_through_label = gen_label_rtx ();
626 if (! if_true_label)
627 if_true_label = drop_through_label;
628 if (! if_false_label)
629 if_false_label = drop_through_label;
630
631 /* Compare a word at a time, high order first. */
632 for (i = 0; i < nwords; i++)
633 {
634 rtx op0_word, op1_word;
635
636 if (WORDS_BIG_ENDIAN)
637 {
638 op0_word = operand_subword_force (op0, i, mode);
639 op1_word = operand_subword_force (op1, i, mode);
640 }
641 else
642 {
643 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
644 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
645 }
646
647 /* All but high-order word must be compared as unsigned. */
648 do_compare_rtx_and_jump (op0_word, op1_word, GT,
649 (unsignedp || i > 0), word_mode, NULL_RTX,
650 NULL_RTX, if_true_label);
651
652 /* Consider lower words only if these are equal. */
653 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
654 NULL_RTX, NULL_RTX, if_false_label);
655 }
656
657 if (if_false_label)
658 emit_jump (if_false_label);
659 if (drop_through_label)
660 emit_label (drop_through_label);
661 }
662
663 /* Given a comparison expression EXP for values too wide to be compared
664 with one insn, test the comparison and jump to the appropriate label.
665 The code of EXP is ignored; we always test GT if SWAP is 0,
666 and LT if SWAP is 1. */
667
668 static void
669 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
670 rtx if_true_label)
671 {
672 rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
673 rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
674 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
675 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
676
677 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
678 if_true_label);
679 }
680 \f
681 /* Jump according to whether OP0 is 0. We assume that OP0 has an integer
682 mode, MODE, that is too wide for the available compare insns. Either
683 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
684 to indicate drop through. */
685
686 static void
687 do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
688 rtx if_false_label, rtx if_true_label)
689 {
690 int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
691 rtx part;
692 int i;
693 rtx drop_through_label = 0;
694
695 /* The fastest way of doing this comparison on almost any machine is to
696 "or" all the words and compare the result. If all have to be loaded
697 from memory and this is a very wide item, it's possible this may
698 be slower, but that's highly unlikely. */
699
700 part = gen_reg_rtx (word_mode);
701 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
702 for (i = 1; i < nwords && part != 0; i++)
703 part = expand_binop (word_mode, ior_optab, part,
704 operand_subword_force (op0, i, GET_MODE (op0)),
705 part, 1, OPTAB_WIDEN);
706
707 if (part != 0)
708 {
709 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
710 NULL_RTX, if_false_label, if_true_label);
711
712 return;
713 }
714
715 /* If we couldn't do the "or" simply, do this with a series of compares. */
716 if (! if_false_label)
717 drop_through_label = if_false_label = gen_label_rtx ();
718
719 for (i = 0; i < nwords; i++)
720 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
721 const0_rtx, EQ, 1, word_mode, NULL_RTX,
722 if_false_label, NULL_RTX);
723
724 if (if_true_label)
725 emit_jump (if_true_label);
726
727 if (drop_through_label)
728 emit_label (drop_through_label);
729 }
730
731 /* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
732 where MODE is an integer mode too wide to be compared with one insn.
733 Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
734 to indicate drop through. */
735
736 static void
737 do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
738 rtx if_false_label, rtx if_true_label)
739 {
740 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
741 rtx drop_through_label = 0;
742 int i;
743
744 if (op1 == const0_rtx)
745 {
746 do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
747 return;
748 }
749 else if (op0 == const0_rtx)
750 {
751 do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
752 return;
753 }
754
755 if (! if_false_label)
756 drop_through_label = if_false_label = gen_label_rtx ();
757
758 for (i = 0; i < nwords; i++)
759 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
760 operand_subword_force (op1, i, mode),
761 EQ, 0, word_mode, NULL_RTX,
762 if_false_label, NULL_RTX);
763
764 if (if_true_label)
765 emit_jump (if_true_label);
766 if (drop_through_label)
767 emit_label (drop_through_label);
768 }
769
770 /* Given an EQ_EXPR expression EXP for values too wide to be compared
771 with one insn, test the comparison and jump to the appropriate label. */
772
773 static void
774 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
775 {
776 rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
777 rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
778 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
779 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
780 if_true_label);
781 }
782 \f
783 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
784 MODE is the machine mode of the comparison, not of the result.
785 (including code to compute the values to be compared) and set CC0
786 according to the result. The decision as to signed or unsigned
787 comparison must be made by the caller.
788
789 We force a stack adjustment unless there are currently
790 things pushed on the stack that aren't yet used.
791
792 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
793 compared. */
794
795 rtx
796 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
797 enum machine_mode mode, rtx size)
798 {
799 rtx tem;
800
801 /* If one operand is constant, make it the second one. Only do this
802 if the other operand is not constant as well. */
803
804 if (swap_commutative_operands_p (op0, op1))
805 {
806 tem = op0;
807 op0 = op1;
808 op1 = tem;
809 code = swap_condition (code);
810 }
811
812 do_pending_stack_adjust ();
813
814 code = unsignedp ? unsigned_condition (code) : code;
815 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
816 if (tem)
817 {
818 if (CONSTANT_P (tem))
819 return tem;
820
821 if (COMPARISON_P (tem))
822 {
823 code = GET_CODE (tem);
824 op0 = XEXP (tem, 0);
825 op1 = XEXP (tem, 1);
826 mode = GET_MODE (op0);
827 unsignedp = (code == GTU || code == LTU
828 || code == GEU || code == LEU);
829 }
830 }
831
832 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
833
834 #if HAVE_cc0
835 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
836 #else
837 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
838 #endif
839 }
840
841 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
842 The decision as to signed or unsigned comparison must be made by the caller.
843
844 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
845 compared. */
846
847 void
848 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
849 enum machine_mode mode, rtx size, rtx if_false_label,
850 rtx if_true_label)
851 {
852 rtx tem;
853 int dummy_true_label = 0;
854
855 /* Reverse the comparison if that is safe and we want to jump if it is
856 false. */
857 if (! if_true_label && ! FLOAT_MODE_P (mode))
858 {
859 if_true_label = if_false_label;
860 if_false_label = 0;
861 code = reverse_condition (code);
862 }
863
864 /* If one operand is constant, make it the second one. Only do this
865 if the other operand is not constant as well. */
866
867 if (swap_commutative_operands_p (op0, op1))
868 {
869 tem = op0;
870 op0 = op1;
871 op1 = tem;
872 code = swap_condition (code);
873 }
874
875 do_pending_stack_adjust ();
876
877 code = unsignedp ? unsigned_condition (code) : code;
878 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
879 op0, op1)))
880 {
881 if (CONSTANT_P (tem))
882 {
883 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
884 ? if_false_label : if_true_label;
885 if (label)
886 emit_jump (label);
887 return;
888 }
889
890 code = GET_CODE (tem);
891 mode = GET_MODE (tem);
892 op0 = XEXP (tem, 0);
893 op1 = XEXP (tem, 1);
894 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
895 }
896
897
898 if (! if_true_label)
899 {
900 dummy_true_label = 1;
901 if_true_label = gen_label_rtx ();
902 }
903
904 if (GET_MODE_CLASS (mode) == MODE_INT
905 && ! can_compare_p (code, mode, ccp_jump))
906 {
907 switch (code)
908 {
909 case LTU:
910 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
911 if_false_label, if_true_label);
912 break;
913
914 case LEU:
915 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
916 if_true_label, if_false_label);
917 break;
918
919 case GTU:
920 do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
921 if_false_label, if_true_label);
922 break;
923
924 case GEU:
925 do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
926 if_true_label, if_false_label);
927 break;
928
929 case LT:
930 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
931 if_false_label, if_true_label);
932 break;
933
934 case LE:
935 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
936 if_true_label, if_false_label);
937 break;
938
939 case GT:
940 do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
941 if_false_label, if_true_label);
942 break;
943
944 case GE:
945 do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
946 if_true_label, if_false_label);
947 break;
948
949 case EQ:
950 do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
951 if_true_label);
952 break;
953
954 case NE:
955 do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
956 if_false_label);
957 break;
958
959 default:
960 gcc_unreachable ();
961 }
962 }
963 else
964 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
965 if_true_label);
966
967 if (if_false_label)
968 emit_jump (if_false_label);
969 if (dummy_true_label)
970 emit_label (if_true_label);
971 }
972
973 /* Generate code for a comparison expression EXP (including code to compute
974 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
975 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
976 generated code will drop through.
977 SIGNED_CODE should be the rtx operation for this comparison for
978 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
979
980 We force a stack adjustment unless there are currently
981 things pushed on the stack that aren't yet used. */
982
983 static void
984 do_compare_and_jump (tree exp, enum rtx_code signed_code,
985 enum rtx_code unsigned_code, rtx if_false_label,
986 rtx if_true_label)
987 {
988 rtx op0, op1;
989 tree type;
990 enum machine_mode mode;
991 int unsignedp;
992 enum rtx_code code;
993
994 /* Don't crash if the comparison was erroneous. */
995 op0 = expand_normal (TREE_OPERAND (exp, 0));
996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
997 return;
998
999 op1 = expand_normal (TREE_OPERAND (exp, 1));
1000 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1001 return;
1002
1003 type = TREE_TYPE (TREE_OPERAND (exp, 0));
1004 mode = TYPE_MODE (type);
1005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1006 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1007 || (GET_MODE_BITSIZE (mode)
1008 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1009 1)))))))
1010 {
1011 /* op0 might have been replaced by promoted constant, in which
1012 case the type of second argument should be used. */
1013 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1014 mode = TYPE_MODE (type);
1015 }
1016 unsignedp = TYPE_UNSIGNED (type);
1017 code = unsignedp ? unsigned_code : signed_code;
1018
1019 #ifdef HAVE_canonicalize_funcptr_for_compare
1020 /* If function pointers need to be "canonicalized" before they can
1021 be reliably compared, then canonicalize them.
1022 Only do this if *both* sides of the comparison are function pointers.
1023 If one side isn't, we want a noncanonicalized comparison. See PR
1024 middle-end/17564. */
1025 if (HAVE_canonicalize_funcptr_for_compare
1026 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1027 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1028 == FUNCTION_TYPE
1029 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1030 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1031 == FUNCTION_TYPE)
1032 {
1033 rtx new_op0 = gen_reg_rtx (mode);
1034 rtx new_op1 = gen_reg_rtx (mode);
1035
1036 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1037 op0 = new_op0;
1038
1039 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1040 op1 = new_op1;
1041 }
1042 #endif
1043
1044 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1045 ((mode == BLKmode)
1046 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1047 if_false_label, if_true_label);
1048 }
1049
1050 #include "gt-dojump.h"