1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
37 static void do_jump_by_parts_greater (tree
, int, rtx
, rtx
);
38 static void do_jump_by_parts_equality (tree
, rtx
, rtx
);
39 static void do_compare_and_jump (tree
, enum rtx_code
, enum rtx_code
, rtx
,
42 /* At the start of a function, record that we have no previously-pushed
43 arguments waiting to be popped. */
46 init_pending_stack_adjust (void)
48 pending_stack_adjust
= 0;
51 /* When exiting from function, if safe, clear out any pending stack adjust
52 so the adjustment won't get done.
54 Note, if the current function calls alloca, then it must have a
55 frame pointer regardless of the value of flag_omit_frame_pointer. */
58 clear_pending_stack_adjust (void)
60 #ifdef EXIT_IGNORE_STACK
62 && (! flag_omit_frame_pointer
|| current_function_calls_alloca
)
64 && ! (DECL_INLINE (current_function_decl
) && ! flag_no_inline
)
65 && ! flag_inline_functions
)
67 stack_pointer_delta
-= pending_stack_adjust
,
68 pending_stack_adjust
= 0;
73 /* Pop any previously-pushed arguments that have not been popped yet. */
76 do_pending_stack_adjust (void)
78 if (inhibit_defer_pop
== 0)
80 if (pending_stack_adjust
!= 0)
81 adjust_stack (GEN_INT (pending_stack_adjust
));
82 pending_stack_adjust
= 0;
86 /* Expand conditional expressions. */
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
93 jumpifnot (tree exp
, rtx label
)
95 do_jump (exp
, label
, NULL_RTX
);
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
101 jumpif (tree exp
, rtx label
)
103 do_jump (exp
, NULL_RTX
, label
);
106 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
107 the result is zero, or IF_TRUE_LABEL if the result is one.
108 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
109 meaning fall through in that case.
111 do_jump always does any pending stack adjust except when it does not
112 actually perform a jump. An example where there is no jump
113 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
115 This function is responsible for optimizing cases such as
116 &&, || and comparison operators in EXP. */
119 do_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
121 enum tree_code code
= TREE_CODE (exp
);
122 /* Some cases need to create a label to jump to
123 in order to properly fall through.
124 These cases set DROP_THROUGH_LABEL nonzero. */
125 rtx drop_through_label
= 0;
129 enum machine_mode mode
;
131 #ifdef MAX_INTEGER_COMPUTATION_MODE
132 check_max_integer_computation_mode (exp
);
143 temp
= integer_zerop (exp
) ? if_false_label
: if_true_label
;
149 /* This is not true with #pragma weak */
151 /* The address of something can never be zero. */
153 emit_jump (if_true_label
);
158 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
159 TREE_OPERAND (exp
, 0)
160 = (*lang_hooks
.unsave_expr_now
) (TREE_OPERAND (exp
, 0));
164 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == COMPONENT_REF
165 || TREE_CODE (TREE_OPERAND (exp
, 0)) == BIT_FIELD_REF
166 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_REF
167 || TREE_CODE (TREE_OPERAND (exp
, 0)) == ARRAY_RANGE_REF
)
170 /* If we are narrowing the operand, we have to do the compare in the
172 if ((TYPE_PRECISION (TREE_TYPE (exp
))
173 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
175 case NON_LVALUE_EXPR
:
181 /* These cannot change zero->nonzero or vice versa. */
182 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
185 case WITH_RECORD_EXPR
:
186 /* Put the object on the placeholder list, recurse through our first
187 operand, and pop the list. */
188 placeholder_list
= tree_cons (TREE_OPERAND (exp
, 1), NULL_TREE
,
190 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
191 placeholder_list
= TREE_CHAIN (placeholder_list
);
195 /* This is never less insns than evaluating the PLUS_EXPR followed by
196 a test and can be longer if the test is eliminated. */
198 /* Reduce to minus. */
199 exp
= build (MINUS_EXPR
, TREE_TYPE (exp
),
200 TREE_OPERAND (exp
, 0),
201 fold (build1 (NEGATE_EXPR
, TREE_TYPE (TREE_OPERAND (exp
, 1)),
202 TREE_OPERAND (exp
, 1))));
203 /* Process as MINUS. */
207 /* Nonzero iff operands of minus differ. */
208 do_compare_and_jump (build (NE_EXPR
, TREE_TYPE (exp
),
209 TREE_OPERAND (exp
, 0),
210 TREE_OPERAND (exp
, 1)),
211 NE
, NE
, if_false_label
, if_true_label
);
215 /* If we are AND'ing with a small constant, do this comparison in the
216 smallest type that fits. If the machine doesn't have comparisons
217 that small, it will be converted back to the wider comparison.
218 This helps if we are testing the sign bit of a narrower object.
219 combine can't do this for us because it can't know whether a
220 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
222 if (! SLOW_BYTE_ACCESS
223 && TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
224 && TYPE_PRECISION (TREE_TYPE (exp
)) <= HOST_BITS_PER_WIDE_INT
225 && (i
= tree_floor_log2 (TREE_OPERAND (exp
, 1))) >= 0
226 && (mode
= mode_for_size (i
+ 1, MODE_INT
, 0)) != BLKmode
227 && (type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 1)) != 0
228 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
229 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
230 != CODE_FOR_nothing
))
232 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
238 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
241 case TRUTH_ANDIF_EXPR
:
242 if (if_false_label
== 0)
243 if_false_label
= drop_through_label
= gen_label_rtx ();
244 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, NULL_RTX
);
245 start_cleanup_deferral ();
246 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
247 end_cleanup_deferral ();
250 case TRUTH_ORIF_EXPR
:
251 if (if_true_label
== 0)
252 if_true_label
= drop_through_label
= gen_label_rtx ();
253 do_jump (TREE_OPERAND (exp
, 0), NULL_RTX
, if_true_label
);
254 start_cleanup_deferral ();
255 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
256 end_cleanup_deferral ();
261 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
, 0);
262 preserve_temp_slots (NULL_RTX
);
266 do_pending_stack_adjust ();
267 do_jump (TREE_OPERAND (exp
, 1), if_false_label
, if_true_label
);
273 case ARRAY_RANGE_REF
:
275 HOST_WIDE_INT bitsize
, bitpos
;
277 enum machine_mode mode
;
282 /* Get description of this reference. We don't actually care
283 about the underlying object here. */
284 get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
285 &unsignedp
, &volatilep
);
287 type
= (*lang_hooks
.types
.type_for_size
) (bitsize
, unsignedp
);
288 if (! SLOW_BYTE_ACCESS
289 && type
!= 0 && bitsize
>= 0
290 && TYPE_PRECISION (type
) < TYPE_PRECISION (TREE_TYPE (exp
))
291 && (cmp_optab
->handlers
[(int) TYPE_MODE (type
)].insn_code
292 != CODE_FOR_nothing
))
294 do_jump (convert (type
, exp
), if_false_label
, if_true_label
);
301 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
302 if (integer_onep (TREE_OPERAND (exp
, 1))
303 && integer_zerop (TREE_OPERAND (exp
, 2)))
304 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
306 else if (integer_zerop (TREE_OPERAND (exp
, 1))
307 && integer_onep (TREE_OPERAND (exp
, 2)))
308 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
312 rtx label1
= gen_label_rtx ();
313 drop_through_label
= gen_label_rtx ();
315 do_jump (TREE_OPERAND (exp
, 0), label1
, NULL_RTX
);
317 start_cleanup_deferral ();
318 /* Now the THEN-expression. */
319 do_jump (TREE_OPERAND (exp
, 1),
320 if_false_label
? if_false_label
: drop_through_label
,
321 if_true_label
? if_true_label
: drop_through_label
);
322 /* In case the do_jump just above never jumps. */
323 do_pending_stack_adjust ();
326 /* Now the ELSE-expression. */
327 do_jump (TREE_OPERAND (exp
, 2),
328 if_false_label
? if_false_label
: drop_through_label
,
329 if_true_label
? if_true_label
: drop_through_label
);
330 end_cleanup_deferral ();
336 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
338 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
339 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
341 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
342 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
345 (build (TRUTH_ANDIF_EXPR
, TREE_TYPE (exp
),
346 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
347 fold (build1 (REALPART_EXPR
,
348 TREE_TYPE (inner_type
),
350 fold (build1 (REALPART_EXPR
,
351 TREE_TYPE (inner_type
),
353 fold (build (EQ_EXPR
, TREE_TYPE (exp
),
354 fold (build1 (IMAGPART_EXPR
,
355 TREE_TYPE (inner_type
),
357 fold (build1 (IMAGPART_EXPR
,
358 TREE_TYPE (inner_type
),
360 if_false_label
, if_true_label
);
363 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
364 do_jump (TREE_OPERAND (exp
, 0), if_true_label
, if_false_label
);
366 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
367 && !can_compare_p (EQ
, TYPE_MODE (inner_type
), ccp_jump
))
368 do_jump_by_parts_equality (exp
, if_false_label
, if_true_label
);
370 do_compare_and_jump (exp
, EQ
, EQ
, if_false_label
, if_true_label
);
376 tree inner_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
378 if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_FLOAT
379 || GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_COMPLEX_INT
)
381 tree exp0
= save_expr (TREE_OPERAND (exp
, 0));
382 tree exp1
= save_expr (TREE_OPERAND (exp
, 1));
385 (build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
),
386 fold (build (NE_EXPR
, TREE_TYPE (exp
),
387 fold (build1 (REALPART_EXPR
,
388 TREE_TYPE (inner_type
),
390 fold (build1 (REALPART_EXPR
,
391 TREE_TYPE (inner_type
),
393 fold (build (NE_EXPR
, TREE_TYPE (exp
),
394 fold (build1 (IMAGPART_EXPR
,
395 TREE_TYPE (inner_type
),
397 fold (build1 (IMAGPART_EXPR
,
398 TREE_TYPE (inner_type
),
400 if_false_label
, if_true_label
);
403 else if (integer_zerop (TREE_OPERAND (exp
, 1)))
404 do_jump (TREE_OPERAND (exp
, 0), if_false_label
, if_true_label
);
406 else if (GET_MODE_CLASS (TYPE_MODE (inner_type
)) == MODE_INT
407 && !can_compare_p (NE
, TYPE_MODE (inner_type
), ccp_jump
))
408 do_jump_by_parts_equality (exp
, if_true_label
, if_false_label
);
410 do_compare_and_jump (exp
, NE
, NE
, if_false_label
, if_true_label
);
415 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
416 if (GET_MODE_CLASS (mode
) == MODE_INT
417 && ! can_compare_p (LT
, mode
, ccp_jump
))
418 do_jump_by_parts_greater (exp
, 1, if_false_label
, if_true_label
);
420 do_compare_and_jump (exp
, LT
, LTU
, if_false_label
, if_true_label
);
424 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
425 if (GET_MODE_CLASS (mode
) == MODE_INT
426 && ! can_compare_p (LE
, mode
, ccp_jump
))
427 do_jump_by_parts_greater (exp
, 0, if_true_label
, if_false_label
);
429 do_compare_and_jump (exp
, LE
, LEU
, if_false_label
, if_true_label
);
433 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
434 if (GET_MODE_CLASS (mode
) == MODE_INT
435 && ! can_compare_p (GT
, mode
, ccp_jump
))
436 do_jump_by_parts_greater (exp
, 0, if_false_label
, if_true_label
);
438 do_compare_and_jump (exp
, GT
, GTU
, if_false_label
, if_true_label
);
442 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
443 if (GET_MODE_CLASS (mode
) == MODE_INT
444 && ! can_compare_p (GE
, mode
, ccp_jump
))
445 do_jump_by_parts_greater (exp
, 1, if_true_label
, if_false_label
);
447 do_compare_and_jump (exp
, GE
, GEU
, if_false_label
, if_true_label
);
453 enum rtx_code cmp
, rcmp
;
456 if (code
== UNORDERED_EXPR
)
457 cmp
= UNORDERED
, rcmp
= ORDERED
;
459 cmp
= ORDERED
, rcmp
= UNORDERED
;
460 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
463 if (! can_compare_p (cmp
, mode
, ccp_jump
)
464 && (can_compare_p (rcmp
, mode
, ccp_jump
)
465 /* If the target doesn't provide either UNORDERED or ORDERED
466 comparisons, canonicalize on UNORDERED for the library. */
467 || rcmp
== UNORDERED
))
471 do_compare_and_jump (exp
, cmp
, cmp
, if_false_label
, if_true_label
);
473 do_compare_and_jump (exp
, rcmp
, rcmp
, if_true_label
, if_false_label
);
478 enum rtx_code rcode1
;
479 enum tree_code tcode2
;
503 mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
504 if (can_compare_p (rcode1
, mode
, ccp_jump
))
505 do_compare_and_jump (exp
, rcode1
, rcode1
, if_false_label
,
509 tree op0
= save_expr (TREE_OPERAND (exp
, 0));
510 tree op1
= save_expr (TREE_OPERAND (exp
, 1));
513 /* If the target doesn't support combined unordered
514 compares, decompose into UNORDERED + comparison. */
515 cmp0
= fold (build (UNORDERED_EXPR
, TREE_TYPE (exp
), op0
, op1
));
516 cmp1
= fold (build (tcode2
, TREE_TYPE (exp
), op0
, op1
));
517 exp
= build (TRUTH_ORIF_EXPR
, TREE_TYPE (exp
), cmp0
, cmp1
);
518 do_jump (exp
, if_false_label
, if_true_label
);
524 __builtin_expect (<test>, 0) and
525 __builtin_expect (<test>, 1)
527 We need to do this here, so that <test> is not converted to a SCC
528 operation on machines that use condition code registers and COMPARE
529 like the PowerPC, and then the jump is done based on whether the SCC
530 operation produced a 1 or 0. */
532 /* Check for a built-in function. */
534 tree fndecl
= get_callee_fndecl (exp
);
535 tree arglist
= TREE_OPERAND (exp
, 1);
538 && DECL_BUILT_IN (fndecl
)
539 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
540 && arglist
!= NULL_TREE
541 && TREE_CHAIN (arglist
) != NULL_TREE
)
543 rtx seq
= expand_builtin_expect_jump (exp
, if_false_label
,
553 /* Fall through and generate the normal code. */
557 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, 0);
559 /* This is not needed any more and causes poor code since it causes
560 comparisons and tests from non-SI objects to have different code
562 /* Copy to register to avoid generating bad insns by cse
563 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
564 if (!cse_not_expected
&& GET_CODE (temp
) == MEM
)
565 temp
= copy_to_reg (temp
);
567 do_pending_stack_adjust ();
568 /* Do any postincrements in the expression that was tested. */
571 if (GET_CODE (temp
) == CONST_INT
572 || (GET_CODE (temp
) == CONST_DOUBLE
&& GET_MODE (temp
) == VOIDmode
)
573 || GET_CODE (temp
) == LABEL_REF
)
575 rtx target
= temp
== const0_rtx
? if_false_label
: if_true_label
;
579 else if (GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
580 && ! can_compare_p (NE
, GET_MODE (temp
), ccp_jump
))
581 /* Note swapping the labels gives us not-equal. */
582 do_jump_by_parts_equality_rtx (temp
, if_true_label
, if_false_label
);
583 else if (GET_MODE (temp
) != VOIDmode
)
585 /* The RTL optimizers prefer comparisons against pseudos. */
586 if (GET_CODE (temp
) == SUBREG
)
588 /* Compare promoted variables in their promoted mode. */
589 if (SUBREG_PROMOTED_VAR_P (temp
)
590 && GET_CODE (XEXP (temp
, 0)) == REG
)
591 temp
= XEXP (temp
, 0);
593 temp
= copy_to_reg (temp
);
595 do_compare_rtx_and_jump (temp
, CONST0_RTX (GET_MODE (temp
)),
596 NE
, TREE_UNSIGNED (TREE_TYPE (exp
)),
597 GET_MODE (temp
), NULL_RTX
,
598 if_false_label
, if_true_label
);
604 if (drop_through_label
)
606 /* If do_jump produces code that might be jumped around,
607 do any stack adjusts from that code, before the place
608 where control merges in. */
609 do_pending_stack_adjust ();
610 emit_label (drop_through_label
);
614 /* Given a comparison expression EXP for values too wide to be compared
615 with one insn, test the comparison and jump to the appropriate label.
616 The code of EXP is ignored; we always test GT if SWAP is 0,
617 and LT if SWAP is 1. */
620 do_jump_by_parts_greater (tree exp
, int swap
, rtx if_false_label
,
623 rtx op0
= expand_expr (TREE_OPERAND (exp
, swap
), NULL_RTX
, VOIDmode
, 0);
624 rtx op1
= expand_expr (TREE_OPERAND (exp
, !swap
), NULL_RTX
, VOIDmode
, 0);
625 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
626 int unsignedp
= TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
628 do_jump_by_parts_greater_rtx (mode
, unsignedp
, op0
, op1
, if_false_label
, if_true_label
);
631 /* Compare OP0 with OP1, word at a time, in mode MODE.
632 UNSIGNEDP says to do unsigned comparison.
633 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
636 do_jump_by_parts_greater_rtx (enum machine_mode mode
, int unsignedp
, rtx op0
,
637 rtx op1
, rtx if_false_label
, rtx if_true_label
)
639 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
640 rtx drop_through_label
= 0;
643 if (! if_true_label
|| ! if_false_label
)
644 drop_through_label
= gen_label_rtx ();
646 if_true_label
= drop_through_label
;
647 if (! if_false_label
)
648 if_false_label
= drop_through_label
;
650 /* Compare a word at a time, high order first. */
651 for (i
= 0; i
< nwords
; i
++)
653 rtx op0_word
, op1_word
;
655 if (WORDS_BIG_ENDIAN
)
657 op0_word
= operand_subword_force (op0
, i
, mode
);
658 op1_word
= operand_subword_force (op1
, i
, mode
);
662 op0_word
= operand_subword_force (op0
, nwords
- 1 - i
, mode
);
663 op1_word
= operand_subword_force (op1
, nwords
- 1 - i
, mode
);
666 /* All but high-order word must be compared as unsigned. */
667 do_compare_rtx_and_jump (op0_word
, op1_word
, GT
,
668 (unsignedp
|| i
> 0), word_mode
, NULL_RTX
,
669 NULL_RTX
, if_true_label
);
671 /* Consider lower words only if these are equal. */
672 do_compare_rtx_and_jump (op0_word
, op1_word
, NE
, unsignedp
, word_mode
,
673 NULL_RTX
, NULL_RTX
, if_false_label
);
677 emit_jump (if_false_label
);
678 if (drop_through_label
)
679 emit_label (drop_through_label
);
682 /* Given an EQ_EXPR expression EXP for values too wide to be compared
683 with one insn, test the comparison and jump to the appropriate label. */
686 do_jump_by_parts_equality (tree exp
, rtx if_false_label
, rtx if_true_label
)
688 rtx op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
689 rtx op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
690 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
691 int nwords
= (GET_MODE_SIZE (mode
) / UNITS_PER_WORD
);
693 rtx drop_through_label
= 0;
695 if (! if_false_label
)
696 drop_through_label
= if_false_label
= gen_label_rtx ();
698 for (i
= 0; i
< nwords
; i
++)
699 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, mode
),
700 operand_subword_force (op1
, i
, mode
),
701 EQ
, TREE_UNSIGNED (TREE_TYPE (exp
)),
702 word_mode
, NULL_RTX
, if_false_label
, NULL_RTX
);
705 emit_jump (if_true_label
);
706 if (drop_through_label
)
707 emit_label (drop_through_label
);
710 /* Jump according to whether OP0 is 0.
711 We assume that OP0 has an integer mode that is too wide
712 for the available compare insns. */
715 do_jump_by_parts_equality_rtx (rtx op0
, rtx if_false_label
, rtx if_true_label
)
717 int nwords
= GET_MODE_SIZE (GET_MODE (op0
)) / UNITS_PER_WORD
;
720 rtx drop_through_label
= 0;
722 /* The fastest way of doing this comparison on almost any machine is to
723 "or" all the words and compare the result. If all have to be loaded
724 from memory and this is a very wide item, it's possible this may
725 be slower, but that's highly unlikely. */
727 part
= gen_reg_rtx (word_mode
);
728 emit_move_insn (part
, operand_subword_force (op0
, 0, GET_MODE (op0
)));
729 for (i
= 1; i
< nwords
&& part
!= 0; i
++)
730 part
= expand_binop (word_mode
, ior_optab
, part
,
731 operand_subword_force (op0
, i
, GET_MODE (op0
)),
732 part
, 1, OPTAB_WIDEN
);
736 do_compare_rtx_and_jump (part
, const0_rtx
, EQ
, 1, word_mode
,
737 NULL_RTX
, if_false_label
, if_true_label
);
742 /* If we couldn't do the "or" simply, do this with a series of compares. */
743 if (! if_false_label
)
744 drop_through_label
= if_false_label
= gen_label_rtx ();
746 for (i
= 0; i
< nwords
; i
++)
747 do_compare_rtx_and_jump (operand_subword_force (op0
, i
, GET_MODE (op0
)),
748 const0_rtx
, EQ
, 1, word_mode
, NULL_RTX
,
749 if_false_label
, NULL_RTX
);
752 emit_jump (if_true_label
);
754 if (drop_through_label
)
755 emit_label (drop_through_label
);
758 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
759 (including code to compute the values to be compared)
760 and set (CC0) according to the result.
761 The decision as to signed or unsigned comparison must be made by the caller.
763 We force a stack adjustment unless there are currently
764 things pushed on the stack that aren't yet used.
766 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
770 compare_from_rtx (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
771 enum machine_mode mode
, rtx size
)
776 /* If one operand is constant, make it the second one. Only do this
777 if the other operand is not constant as well. */
779 if (swap_commutative_operands_p (op0
, op1
))
784 code
= swap_condition (code
);
789 op0
= force_not_mem (op0
);
790 op1
= force_not_mem (op1
);
793 do_pending_stack_adjust ();
795 ucode
= unsignedp
? unsigned_condition (code
) : code
;
796 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
800 /* There's no need to do this now that combine.c can eliminate lots of
801 sign extensions. This can be less efficient in certain cases on other
804 /* If this is a signed equality comparison, we can do it as an
805 unsigned comparison since zero-extension is cheaper than sign
806 extension and comparisons with zero are done as unsigned. This is
807 the case even on machines that can do fast sign extension, since
808 zero-extension is easier to combine with other operations than
809 sign-extension is. If we are comparing against a constant, we must
810 convert it to what it would look like unsigned. */
811 if ((code
== EQ
|| code
== NE
) && ! unsignedp
812 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
814 if (GET_CODE (op1
) == CONST_INT
815 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
816 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
821 emit_cmp_insn (op0
, op1
, code
, size
, mode
, unsignedp
);
824 return gen_rtx_fmt_ee (code
, VOIDmode
, cc0_rtx
, const0_rtx
);
826 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
830 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
831 The decision as to signed or unsigned comparison must be made by the caller.
833 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
837 do_compare_rtx_and_jump (rtx op0
, rtx op1
, enum rtx_code code
, int unsignedp
,
838 enum machine_mode mode
, rtx size
, rtx if_false_label
,
843 int dummy_true_label
= 0;
845 /* Reverse the comparison if that is safe and we want to jump if it is
847 if (! if_true_label
&& ! FLOAT_MODE_P (mode
))
849 if_true_label
= if_false_label
;
851 code
= reverse_condition (code
);
854 /* If one operand is constant, make it the second one. Only do this
855 if the other operand is not constant as well. */
857 if (swap_commutative_operands_p (op0
, op1
))
862 code
= swap_condition (code
);
867 op0
= force_not_mem (op0
);
868 op1
= force_not_mem (op1
);
871 do_pending_stack_adjust ();
873 ucode
= unsignedp
? unsigned_condition (code
) : code
;
874 if ((tem
= simplify_relational_operation (ucode
, mode
, op0
, op1
)) != 0)
876 if (tem
== const_true_rtx
)
879 emit_jump (if_true_label
);
884 emit_jump (if_false_label
);
890 /* There's no need to do this now that combine.c can eliminate lots of
891 sign extensions. This can be less efficient in certain cases on other
894 /* If this is a signed equality comparison, we can do it as an
895 unsigned comparison since zero-extension is cheaper than sign
896 extension and comparisons with zero are done as unsigned. This is
897 the case even on machines that can do fast sign extension, since
898 zero-extension is easier to combine with other operations than
899 sign-extension is. If we are comparing against a constant, we must
900 convert it to what it would look like unsigned. */
901 if ((code
== EQ
|| code
== NE
) && ! unsignedp
902 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
904 if (GET_CODE (op1
) == CONST_INT
905 && (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
))) != INTVAL (op1
))
906 op1
= GEN_INT (INTVAL (op1
) & GET_MODE_MASK (GET_MODE (op0
)));
913 dummy_true_label
= 1;
914 if_true_label
= gen_label_rtx ();
917 emit_cmp_and_jump_insns (op0
, op1
, code
, size
, mode
, unsignedp
,
921 emit_jump (if_false_label
);
922 if (dummy_true_label
)
923 emit_label (if_true_label
);
926 /* Generate code for a comparison expression EXP (including code to compute
927 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
928 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
929 generated code will drop through.
930 SIGNED_CODE should be the rtx operation for this comparison for
931 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
933 We force a stack adjustment unless there are currently
934 things pushed on the stack that aren't yet used. */
937 do_compare_and_jump (tree exp
, enum rtx_code signed_code
,
938 enum rtx_code unsigned_code
, rtx if_false_label
,
943 enum machine_mode mode
;
947 /* Don't crash if the comparison was erroneous. */
948 op0
= expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
949 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ERROR_MARK
)
952 op1
= expand_expr (TREE_OPERAND (exp
, 1), NULL_RTX
, VOIDmode
, 0);
953 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == ERROR_MARK
)
956 type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
957 mode
= TYPE_MODE (type
);
958 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == INTEGER_CST
959 && (TREE_CODE (TREE_OPERAND (exp
, 1)) != INTEGER_CST
960 || (GET_MODE_BITSIZE (mode
)
961 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
,
964 /* op0 might have been replaced by promoted constant, in which
965 case the type of second argument should be used. */
966 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
967 mode
= TYPE_MODE (type
);
969 unsignedp
= TREE_UNSIGNED (type
);
970 code
= unsignedp
? unsigned_code
: signed_code
;
972 #ifdef HAVE_canonicalize_funcptr_for_compare
973 /* If function pointers need to be "canonicalized" before they can
974 be reliably compared, then canonicalize them. */
975 if (HAVE_canonicalize_funcptr_for_compare
976 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == POINTER_TYPE
977 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
980 rtx new_op0
= gen_reg_rtx (mode
);
982 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0
, op0
));
986 if (HAVE_canonicalize_funcptr_for_compare
987 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 1))) == POINTER_TYPE
988 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 1))))
991 rtx new_op1
= gen_reg_rtx (mode
);
993 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1
, op1
));
998 /* Do any postincrements in the expression that was tested. */
1001 do_compare_rtx_and_jump (op0
, op1
, code
, unsignedp
, mode
,
1003 ? expr_size (TREE_OPERAND (exp
, 0)) : NULL_RTX
),
1004 if_false_label
, if_true_label
);